From e8d6f1d9e9764f90128938ce1386a5b767607c60 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 20 Feb 2022 22:02:56 +0100 Subject: [PATCH 0001/2550] Pull Fusion integration from `colorbleed` --- openpype/hosts/fusion/__init__.py | 5 + openpype/hosts/fusion/api/menu.py | 57 ++++++--- openpype/hosts/fusion/api/menu_style.qss | 29 ----- .../fusion/deploy/Config/openpype_menu.fu | 60 +++++++++ .../hosts/fusion/deploy/MenuScripts/README.md | 6 + .../deploy/MenuScripts/install_pyside2.py | 29 +++++ .../MenuScripts/openpype_menu.py} | 10 ++ .../32bit/backgrounds_selected_to32bit.py | 0 .../Comp}/32bit/backgrounds_to32bit.py | 0 .../Comp}/32bit/loaders_selected_to32bit.py | 0 .../Scripts/Comp}/32bit/loaders_to32bit.py | 0 .../Scripts/Comp}/switch_ui.py | 0 .../Scripts/Comp}/update_loader_ranges.py | 0 .../hosts/fusion/deploy/fusion_shared.prefs | 19 +++ .../hosts/fusion/hooks/pre_fusion_setup.py | 114 ++++-------------- openpype/hosts/fusion/plugins/load/actions.py | 2 + .../fusion/scripts/fusion_switch_shot.py | 2 +- 17 files changed, 197 insertions(+), 136 deletions(-) delete mode 100644 openpype/hosts/fusion/api/menu_style.qss create mode 100644 openpype/hosts/fusion/deploy/Config/openpype_menu.fu create mode 100644 openpype/hosts/fusion/deploy/MenuScripts/README.md create mode 100644 openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py rename openpype/hosts/fusion/{utility_scripts/__OpenPype_Menu__.py => deploy/MenuScripts/openpype_menu.py} (54%) rename openpype/hosts/fusion/{utility_scripts => deploy/Scripts/Comp}/32bit/backgrounds_selected_to32bit.py (100%) rename openpype/hosts/fusion/{utility_scripts => deploy/Scripts/Comp}/32bit/backgrounds_to32bit.py (100%) rename openpype/hosts/fusion/{utility_scripts => deploy/Scripts/Comp}/32bit/loaders_selected_to32bit.py (100%) rename openpype/hosts/fusion/{utility_scripts => deploy/Scripts/Comp}/32bit/loaders_to32bit.py (100%) rename openpype/hosts/fusion/{utility_scripts => deploy/Scripts/Comp}/switch_ui.py (100%) rename openpype/hosts/fusion/{utility_scripts => deploy/Scripts/Comp}/update_loader_ranges.py (100%) create mode 100644 openpype/hosts/fusion/deploy/fusion_shared.prefs diff --git a/openpype/hosts/fusion/__init__.py b/openpype/hosts/fusion/__init__.py index e69de29bb2..02befa76e2 100644 --- a/openpype/hosts/fusion/__init__.py +++ b/openpype/hosts/fusion/__init__.py @@ -0,0 +1,5 @@ +import os + +HOST_DIR = os.path.dirname( + os.path.abspath(__file__) +) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 6234322d7f..7799528462 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -3,26 +3,16 @@ import sys from Qt import QtWidgets, QtCore -from openpype import style +from avalon import api from openpype.tools.utils import host_tools +from openpype.style import load_stylesheet from openpype.hosts.fusion.scripts import ( set_rendermode, duplicate_with_inputs ) -def load_stylesheet(): - path = os.path.join(os.path.dirname(__file__), "menu_style.qss") - if not os.path.exists(path): - print("Unable to load stylesheet, file not found in resources") - return "" - - with open(path, "r") as file_stream: - stylesheet = file_stream.read() - return stylesheet - - class Spacer(QtWidgets.QWidget): def __init__(self, height, *args, **kwargs): super(Spacer, self).__init__(*args, **kwargs) @@ -55,6 +45,15 @@ class OpenPypeMenu(QtWidgets.QWidget): ) self.render_mode_widget = None self.setWindowTitle("OpenPype") + + asset_label = QtWidgets.QLabel("Context", self) + asset_label.setStyleSheet("""QLabel { + font-size: 14px; + font-weight: 600; + color: #5f9fb8; + }""") + asset_label.setAlignment(QtCore.Qt.AlignHCenter) + workfiles_btn = QtWidgets.QPushButton("Workfiles...", self) create_btn = QtWidgets.QPushButton("Create...", self) publish_btn = QtWidgets.QPushButton("Publish...", self) @@ -72,10 +71,17 @@ class OpenPypeMenu(QtWidgets.QWidget): layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(10, 20, 10, 20) + layout.addWidget(asset_label) + + layout.addWidget(Spacer(15, self)) + layout.addWidget(workfiles_btn) + + layout.addWidget(Spacer(15, self)) + layout.addWidget(create_btn) - layout.addWidget(publish_btn) layout.addWidget(load_btn) + layout.addWidget(publish_btn) layout.addWidget(manager_btn) layout.addWidget(Spacer(15, self)) @@ -93,6 +99,9 @@ class OpenPypeMenu(QtWidgets.QWidget): self.setLayout(layout) + # Store reference so we can update the label + self.asset_label = asset_label + workfiles_btn.clicked.connect(self.on_workfile_clicked) create_btn.clicked.connect(self.on_create_clicked) publish_btn.clicked.connect(self.on_publish_clicked) @@ -104,6 +113,26 @@ class OpenPypeMenu(QtWidgets.QWidget): self.on_duplicate_with_inputs_clicked) reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked) + self._callbacks = [] + self.register_callback("taskChanged", self.on_task_changed) + self.on_task_changed() + + def on_task_changed(self): + # Update current context label + label = api.Session["AVALON_ASSET"] + self.asset_label.setText(label) + + def register_callback(self, name, fn): + + # Create a wrapper callback that we only store + # for as long as we want it to persist as callback + callback = lambda *args: fn() + self._callbacks.append(callback) + api.on(name, callback) + + def deregister_all_callbacks(self): + self._callbacks[:] = [] + def on_workfile_clicked(self): print("Clicked Workfile") host_tools.show_workfiles() @@ -132,7 +161,7 @@ class OpenPypeMenu(QtWidgets.QWidget): print("Clicked Set Render Mode") if self.render_mode_widget is None: window = set_rendermode.SetRenderMode() - window.setStyleSheet(style.load_stylesheet()) + window.setStyleSheet(load_stylesheet()) window.show() self.render_mode_widget = window else: diff --git a/openpype/hosts/fusion/api/menu_style.qss b/openpype/hosts/fusion/api/menu_style.qss deleted file mode 100644 index 12c474b070..0000000000 --- a/openpype/hosts/fusion/api/menu_style.qss +++ /dev/null @@ -1,29 +0,0 @@ -QWidget { - background-color: #282828; - border-radius: 3; -} - -QPushButton { - border: 1px solid #090909; - background-color: #201f1f; - color: #ffffff; - padding: 5; -} - -QPushButton:focus { - background-color: "#171717"; - color: #d0d0d0; -} - -QPushButton:hover { - background-color: "#171717"; - color: #e64b3d; -} - -#OpenPypeMenu { - border: 1px solid #fef9ef; -} - -#Spacer { - background-color: #282828; -} diff --git a/openpype/hosts/fusion/deploy/Config/openpype_menu.fu b/openpype/hosts/fusion/deploy/Config/openpype_menu.fu new file mode 100644 index 0000000000..8b8d448259 --- /dev/null +++ b/openpype/hosts/fusion/deploy/Config/openpype_menu.fu @@ -0,0 +1,60 @@ +{ + Action + { + ID = "OpenPype_Menu", + Category = "OpenPype", + Name = "OpenPype Menu", + + Targets = + { + Composition = + { + Execute = _Lua [=[ + local scriptPath = app:MapPath("OpenPype:MenuScripts/openpype_menu.py") + if bmd.fileexists(scriptPath) == false then + print("[OpenPype Error] Can't run file: " .. scriptPath) + else + target:RunScript(scriptPath) + end + ]=], + }, + }, + }, + Action + { + ID = "OpenPype_Install_PySide2", + Category = "OpenPype", + Name = "Install PySide2", + + Targets = + { + Composition = + { + Execute = _Lua [=[ + local scriptPath = app:MapPath("OpenPype:MenuScripts/install_pyside2.py") + if bmd.fileexists(scriptPath) == false then + print("[OpenPype Error] Can't run file: " .. scriptPath) + else + target:RunScript(scriptPath) + end + ]=], + }, + }, + }, + Menus + { + Target = "ChildFrame", + + Before "Help" + { + Sub "OpenPype" + { + "OpenPype_Menu{}", + "_", + Sub "Admin" { + "OpenPype_Install_PySide2{}" + } + } + }, + }, +} diff --git a/openpype/hosts/fusion/deploy/MenuScripts/README.md b/openpype/hosts/fusion/deploy/MenuScripts/README.md new file mode 100644 index 0000000000..f87eaea4a2 --- /dev/null +++ b/openpype/hosts/fusion/deploy/MenuScripts/README.md @@ -0,0 +1,6 @@ +### OpenPype deploy MenuScripts + +Note that this `MenuScripts` is not an official Fusion folder. +OpenPype only uses this folder in `{fusion}/deploy/` to trigger the OpenPype menu actions. + +They are used in the actions defined in `.fu` files in `{fusion}/deploy/Config`. \ No newline at end of file diff --git a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py new file mode 100644 index 0000000000..4fcdb7658f --- /dev/null +++ b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py @@ -0,0 +1,29 @@ +# This is just a quick hack for users running Py3 locally but having no +# Qt library installed +import os +import subprocess +import importlib + + +try: + from Qt import QtWidgets + from Qt import __binding__ + print(f"Qt binding: {__binding__}") + mod = importlib.import_module(__binding__) + print(f"Qt path: {mod.__file__}") + print("Qt library found, nothing to do..") + +except ImportError as exc: + print("Assuming no Qt library is installed..") + print('Installing PySide2 for Python 3.6: ' + f'{os.environ["FUSION16_PYTHON36_HOME"]}') + + # Get full path to python executable + exe = "python.exe" if os.name == 'nt' else "python" + python = os.path.join(os.environ["FUSION16_PYTHON36_HOME"], exe) + assert os.path.exists(python), f"Python doesn't exist: {python}" + + # Do python -m pip install PySide2 + args = [python, "-m", "pip", "install", "PySide2"] + print(f"Args: {args}") + subprocess.Popen(args) diff --git a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py b/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py similarity index 54% rename from openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py rename to openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py index 4b5e8f91a0..20547b21f2 100644 --- a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py +++ b/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py @@ -8,6 +8,11 @@ log = Logger().get_logger(__name__) def main(env): + # This script working directory starts in Fusion application folder. + # However the contents of that folder can conflict with Qt library dlls + # so we make sure to move out of it to avoid DLL Load Failed errors. + os.chdir("..") + import avalon.api from openpype.hosts.fusion import api from openpype.hosts.fusion.api import menu @@ -22,6 +27,11 @@ def main(env): menu.launch_openpype_menu() + # Initiate a QTimer to check if Fusion is still alive every X interval + # If Fusion is not found - kill itself + # todo(roy): Implement timer that ensures UI doesn't remain when e.g. + # Fusion closes down + if __name__ == "__main__": result = main(os.environ) diff --git a/openpype/hosts/fusion/utility_scripts/32bit/backgrounds_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_selected_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/backgrounds_selected_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_selected_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/32bit/backgrounds_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/backgrounds_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/32bit/loaders_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_selected_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/loaders_selected_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_selected_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_to32bit.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/32bit/loaders_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_to32bit.py diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/deploy/Scripts/Comp/switch_ui.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/switch_ui.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/switch_ui.py diff --git a/openpype/hosts/fusion/utility_scripts/update_loader_ranges.py b/openpype/hosts/fusion/deploy/Scripts/Comp/update_loader_ranges.py similarity index 100% rename from openpype/hosts/fusion/utility_scripts/update_loader_ranges.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/update_loader_ranges.py diff --git a/openpype/hosts/fusion/deploy/fusion_shared.prefs b/openpype/hosts/fusion/deploy/fusion_shared.prefs new file mode 100644 index 0000000000..998c6a6d66 --- /dev/null +++ b/openpype/hosts/fusion/deploy/fusion_shared.prefs @@ -0,0 +1,19 @@ +{ +Locked = true, +Global = { + Paths = { + Map = { + ["OpenPype:"] = "$(OPENPYPE_FUSION)/deploy", + ["Reactor:"] = "$(REACTOR)", + + ["Config:"] = "UserPaths:Config;OpenPype:Config", + ["Scripts:"] = "UserPaths:Scripts;Reactor:System/Scripts;OpenPype:Scripts", + ["UserPaths:"] = "UserData:;AllData:;Fusion:;Reactor:Deploy" + }, + }, + Script = { + PythonVersion = 3, + Python3Forced = true + }, + }, +} \ No newline at end of file diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index e635a0ea74..c78d433e5c 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,8 +1,6 @@ import os -import shutil - -import openpype.hosts.fusion from openpype.lib import PreLaunchHook, ApplicationLaunchFailed +from openpype.hosts.fusion import HOST_DIR class FusionPrelaunch(PreLaunchHook): @@ -14,101 +12,33 @@ class FusionPrelaunch(PreLaunchHook): def execute(self): # making sure python 3.6 is installed at provided path - py36_dir = self.launch_context.env.get("PYTHON36") - if not py36_dir: - raise ApplicationLaunchFailed( - "Required environment variable \"PYTHON36\" is not set." - "\n\nFusion implementation requires to have" - " installed Python 3.6" - ) + py36_var = "FUSION16_PYTHON36_HOME" + fusion_python36_home = self.launch_context.env.get(py36_var, "") - py36_dir = os.path.normpath(py36_dir) - if not os.path.isdir(py36_dir): + self.log.info(f"Looking for Python 3.6 in: {fusion_python36_home}") + for path in fusion_python36_home.split(os.pathsep): + # Allow defining multiple paths to allow "fallback" to other + # path. But make to set only a single path as final variable. + py36_dir = os.path.normpath(path) + if os.path.isdir(py36_dir): + break + else: raise ApplicationLaunchFailed( "Python 3.6 is not installed at the provided path.\n" "Either make sure the environments in fusion settings has" " 'PYTHON36' set corectly or make sure Python 3.6 is installed" - f" in the given path.\n\nPYTHON36: {py36_dir}" - ) - self.log.info(f"Path to Fusion Python folder: '{py36_dir}'...") - self.launch_context.env["PYTHON36"] = py36_dir - - utility_dir = self.launch_context.env.get("FUSION_UTILITY_SCRIPTS_DIR") - if not utility_dir: - raise ApplicationLaunchFailed( - "Required Fusion utility script dir environment variable" - " \"FUSION_UTILITY_SCRIPTS_DIR\" is not set." + f" in the given path.\n\nPYTHON36: {fusion_python36_home}" ) - # setting utility scripts dir for scripts syncing - utility_dir = os.path.normpath(utility_dir) - if not os.path.isdir(utility_dir): - raise ApplicationLaunchFailed( - "Fusion utility script dir does not exist. Either make sure " - "the environments in fusion settings has" - " 'FUSION_UTILITY_SCRIPTS_DIR' set correctly or reinstall " - f"Fusion.\n\nFUSION_UTILITY_SCRIPTS_DIR: '{utility_dir}'" - ) + self.log.info(f"Setting {py36_var}: '{py36_dir}'...") + self.launch_context.env[py36_var] = py36_dir - self._sync_utility_scripts(self.launch_context.env) - self.log.info("Fusion Pype wrapper has been installed") + # Add our Fusion Master Prefs which is the only way to customize + # Fusion to define where it can read custom scripts and tools from + self.log.info(f"Setting OPENPYPE_FUSION: {HOST_DIR}") + self.launch_context.env["OPENPYPE_FUSION"] = HOST_DIR - def _sync_utility_scripts(self, env): - """ Synchronizing basic utlility scripts for resolve. - - To be able to run scripts from inside `Fusion/Workspace/Scripts` menu - all scripts has to be accessible from defined folder. - """ - if not env: - env = {k: v for k, v in os.environ.items()} - - # initiate inputs - scripts = {} - us_env = env.get("FUSION_UTILITY_SCRIPTS_SOURCE_DIR") - us_dir = env.get("FUSION_UTILITY_SCRIPTS_DIR", "") - us_paths = [os.path.join( - os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)), - "utility_scripts" - )] - - # collect script dirs - if us_env: - self.log.info(f"Utility Scripts Env: `{us_env}`") - us_paths = us_env.split( - os.pathsep) + us_paths - - # collect scripts from dirs - for path in us_paths: - scripts.update({path: os.listdir(path)}) - - self.log.info(f"Utility Scripts Dir: `{us_paths}`") - self.log.info(f"Utility Scripts: `{scripts}`") - - # make sure no script file is in folder - if next((s for s in os.listdir(us_dir)), None): - for s in os.listdir(us_dir): - path = os.path.normpath( - os.path.join(us_dir, s)) - self.log.info(f"Removing `{path}`...") - - # remove file or directory if not in our folders - if not os.path.isdir(path): - os.remove(path) - else: - shutil.rmtree(path) - - # copy scripts into Resolve's utility scripts dir - for d, sl in scripts.items(): - # directory and scripts list - for s in sl: - # script in script list - src = os.path.normpath(os.path.join(d, s)) - dst = os.path.normpath(os.path.join(us_dir, s)) - - self.log.info(f"Copying `{src}` to `{dst}`...") - - # copy file or directory from our folders to fusion's folder - if not os.path.isdir(src): - shutil.copy2(src, dst) - else: - shutil.copytree(src, dst) + pref_var = "FUSION16_MasterPrefs" # used by both Fu16 and Fu17 + prefs = os.path.join(HOST_DIR, "deploy", "fusion_shared.prefs") + self.log.info(f"Setting {pref_var}: {prefs}") + self.launch_context.env[pref_var] = prefs diff --git a/openpype/hosts/fusion/plugins/load/actions.py b/openpype/hosts/fusion/plugins/load/actions.py index 6af99e4c56..84b66fc69a 100644 --- a/openpype/hosts/fusion/plugins/load/actions.py +++ b/openpype/hosts/fusion/plugins/load/actions.py @@ -11,6 +11,7 @@ class FusionSetFrameRangeLoader(api.Loader): families = ["animation", "camera", "imagesequence", + "render", "yeticache", "pointcache", "render"] @@ -45,6 +46,7 @@ class FusionSetFrameRangeWithHandlesLoader(api.Loader): families = ["animation", "camera", "imagesequence", + "render", "yeticache", "pointcache", "render"] diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index 041b53f6c9..9dd8a351e4 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -176,7 +176,7 @@ def update_frame_range(comp, representations): versions = list(versions) versions = [v for v in versions - if v["data"].get("startFrame", None) is not None] + if v["data"].get("frameStart", None) is not None] if not versions: log.warning("No versions loaded to match frame range to.\n") From e72c7680684172c399b1a07f346c9897ba75d508 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 17 Feb 2022 12:30:10 +0100 Subject: [PATCH 0002/2550] fusion: adding reset resolution (cherry picked from commit 209511ee3d938e21c20cff03edcdbbde4a4791f0) --- openpype/hosts/fusion/api/__init__.py | 4 +++- openpype/hosts/fusion/api/lib.py | 29 +++++++++++++++++++++++++-- openpype/hosts/fusion/api/menu.py | 23 +++++++++++++-------- 3 files changed, 45 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/fusion/api/__init__.py b/openpype/hosts/fusion/api/__init__.py index 19d1e092fe..78afabdb45 100644 --- a/openpype/hosts/fusion/api/__init__.py +++ b/openpype/hosts/fusion/api/__init__.py @@ -23,7 +23,8 @@ from .workio import ( from .lib import ( maintained_selection, get_additional_data, - update_frame_range + update_frame_range, + set_framerange ) from .menu import launch_openpype_menu @@ -53,6 +54,7 @@ __all__ = [ "maintained_selection", "get_additional_data", "update_frame_range", + "set_framerange", # menu "launch_openpype_menu", diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 5d97f83032..37a13e4a10 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -8,12 +8,14 @@ from Qt import QtGui import avalon.api from avalon import io from .pipeline import get_current_comp, comp_lock_and_undo_chunk - +from openpype.api import ( + get_asset +) self = sys.modules[__name__] self._project = None -def update_frame_range(start, end, comp=None, set_render_range=True): +def update_frame_range(start, end, comp=None, set_render_range=True, **kwargs): """Set Fusion comp's start and end frame range Args: @@ -22,6 +24,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True): comp (object, Optional): comp object from fusion set_render_range (bool, Optional): When True this will also set the composition's render start and end frame. + kwargs (dict): additional kwargs Returns: None @@ -36,6 +39,16 @@ def update_frame_range(start, end, comp=None, set_render_range=True): "COMPN_GlobalEnd": end } + # exclude handles if any found in kwargs + if kwargs.get("handle_start"): + handle_start = kwargs.get("handle_start") + attrs["COMPN_GlobalStart"] = int(start - handle_start) + + if kwargs.get("handle_end"): + handle_end = kwargs.get("handle_end") + attrs["COMPN_GlobalEnd"] = int(end + handle_end) + + # set frame range if set_render_range: attrs.update({ "COMPN_RenderStart": start, @@ -46,6 +59,18 @@ def update_frame_range(start, end, comp=None, set_render_range=True): comp.SetAttrs(attrs) +def set_framerange(): + asset_doc = get_asset() + start = asset_doc["data"]["frameStart"] + end = asset_doc["data"]["frameEnd"] + + data = { + "handle_start": asset_doc["data"]["handleStart"], + "handle_end": asset_doc["data"]["handleEnd"] + } + update_frame_range(start, end, set_render_range=True, **data) + + def get_additional_data(container): """Get Fusion related data for the container diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 7799528462..31a3b5b88c 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -1,4 +1,3 @@ -import os import sys from Qt import QtWidgets, QtCore @@ -11,7 +10,9 @@ from openpype.hosts.fusion.scripts import ( set_rendermode, duplicate_with_inputs ) - +from openpype.hosts.fusion.api import ( + set_framerange +) class Spacer(QtWidgets.QWidget): def __init__(self, height, *args, **kwargs): @@ -61,12 +62,11 @@ class OpenPypeMenu(QtWidgets.QWidget): manager_btn = QtWidgets.QPushButton("Manage...", self) libload_btn = QtWidgets.QPushButton("Library...", self) rendermode_btn = QtWidgets.QPushButton("Set render mode...", self) + set_framerange_btn = QtWidgets.QPushButton("Set Frame Range", self) + set_resolution_btn = QtWidgets.QPushButton("Set Resolution", self) duplicate_with_inputs_btn = QtWidgets.QPushButton( "Duplicate with input connections", self ) - reset_resolution_btn = QtWidgets.QPushButton( - "Reset Resolution from project", self - ) layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(10, 20, 10, 20) @@ -90,12 +90,13 @@ class OpenPypeMenu(QtWidgets.QWidget): layout.addWidget(Spacer(15, self)) + layout.addWidget(set_framerange_btn) + layout.addWidget(set_resolution_btn) layout.addWidget(rendermode_btn) layout.addWidget(Spacer(15, self)) layout.addWidget(duplicate_with_inputs_btn) - layout.addWidget(reset_resolution_btn) self.setLayout(layout) @@ -111,7 +112,8 @@ class OpenPypeMenu(QtWidgets.QWidget): rendermode_btn.clicked.connect(self.on_rendernode_clicked) duplicate_with_inputs_btn.clicked.connect( self.on_duplicate_with_inputs_clicked) - reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked) + set_resolution_btn.clicked.connect(self.on_set_resolution_clicked) + set_framerange_btn.clicked.connect(self.on_set_framerange_clicked) self._callbacks = [] self.register_callback("taskChanged", self.on_task_changed) @@ -171,9 +173,14 @@ class OpenPypeMenu(QtWidgets.QWidget): duplicate_with_inputs.duplicate_with_input_connections() print("Clicked Set Colorspace") - def on_reset_resolution_clicked(self): + def on_set_resolution_clicked(self): print("Clicked Reset Resolution") + def on_set_framerange_clicked(self): + print("Clicked Reset Framerange") + set_framerange() + + def launch_openpype_menu(): app = QtWidgets.QApplication(sys.argv) From 1b40d5102fac1f726a03a6f5d08fcd3a519f16c2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 20 Feb 2022 22:22:50 +0100 Subject: [PATCH 0003/2550] Fix hound issues --- openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py index 4fcdb7658f..ab9f13ce05 100644 --- a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py +++ b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py @@ -6,14 +6,14 @@ import importlib try: - from Qt import QtWidgets + from Qt import QtWidgets # noqa: F401 from Qt import __binding__ print(f"Qt binding: {__binding__}") mod = importlib.import_module(__binding__) print(f"Qt path: {mod.__file__}") print("Qt library found, nothing to do..") -except ImportError as exc: +except ImportError: print("Assuming no Qt library is installed..") print('Installing PySide2 for Python 3.6: ' f'{os.environ["FUSION16_PYTHON36_HOME"]}') From 43be2004c7e14a272aa512df9bcc4916c5dac8a4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 20 Feb 2022 22:24:42 +0100 Subject: [PATCH 0004/2550] Avoid assigning a lambda expression --- openpype/hosts/fusion/api/menu.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 31a3b5b88c..1d7e1092da 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -14,6 +14,7 @@ from openpype.hosts.fusion.api import ( set_framerange ) + class Spacer(QtWidgets.QWidget): def __init__(self, height, *args, **kwargs): super(Spacer, self).__init__(*args, **kwargs) @@ -128,9 +129,11 @@ class OpenPypeMenu(QtWidgets.QWidget): # Create a wrapper callback that we only store # for as long as we want it to persist as callback - callback = lambda *args: fn() - self._callbacks.append(callback) - api.on(name, callback) + def _callback(*args): + fn() + + self._callbacks.append(_callback) + api.on(name, _callback) def deregister_all_callbacks(self): self._callbacks[:] = [] From 860f159ec369fedd423181ce0342d9b19c91db8f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 20 Feb 2022 22:25:59 +0100 Subject: [PATCH 0005/2550] Fix double "render" family --- openpype/hosts/fusion/plugins/load/actions.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/fusion/plugins/load/actions.py b/openpype/hosts/fusion/plugins/load/actions.py index 84b66fc69a..6af99e4c56 100644 --- a/openpype/hosts/fusion/plugins/load/actions.py +++ b/openpype/hosts/fusion/plugins/load/actions.py @@ -11,7 +11,6 @@ class FusionSetFrameRangeLoader(api.Loader): families = ["animation", "camera", "imagesequence", - "render", "yeticache", "pointcache", "render"] @@ -46,7 +45,6 @@ class FusionSetFrameRangeWithHandlesLoader(api.Loader): families = ["animation", "camera", "imagesequence", - "render", "yeticache", "pointcache", "render"] From cdca18e93f53ba1e222d52d90fecebf0e7feff01 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 22 Feb 2022 20:58:34 +0100 Subject: [PATCH 0006/2550] Fusion: Add support for open last workfile --- openpype/hooks/pre_add_last_workfile_arg.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hooks/pre_add_last_workfile_arg.py b/openpype/hooks/pre_add_last_workfile_arg.py index 653f97b3dd..eb9e6a6b1c 100644 --- a/openpype/hooks/pre_add_last_workfile_arg.py +++ b/openpype/hooks/pre_add_last_workfile_arg.py @@ -18,6 +18,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook): "nukex", "hiero", "nukestudio", + "fusion", "blender", "photoshop", "tvpaint", From 4e614c8337be7ea35ce4a063385d844d90c1e896 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 22 Feb 2022 21:02:43 +0100 Subject: [PATCH 0007/2550] Fix typo + wrong label --- openpype/hosts/fusion/api/menu.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 1d7e1092da..7b23e2bc2b 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -110,7 +110,7 @@ class OpenPypeMenu(QtWidgets.QWidget): load_btn.clicked.connect(self.on_load_clicked) manager_btn.clicked.connect(self.on_manager_clicked) libload_btn.clicked.connect(self.on_libload_clicked) - rendermode_btn.clicked.connect(self.on_rendernode_clicked) + rendermode_btn.clicked.connect(self.on_rendermode_clicked) duplicate_with_inputs_btn.clicked.connect( self.on_duplicate_with_inputs_clicked) set_resolution_btn.clicked.connect(self.on_set_resolution_clicked) @@ -162,7 +162,7 @@ class OpenPypeMenu(QtWidgets.QWidget): print("Clicked Library") host_tools.show_library_loader() - def on_rendernode_clicked(self): + def on_rendermode_clicked(self): print("Clicked Set Render Mode") if self.render_mode_widget is None: window = set_rendermode.SetRenderMode() @@ -173,8 +173,8 @@ class OpenPypeMenu(QtWidgets.QWidget): self.render_mode_widget.show() def on_duplicate_with_inputs_clicked(self): + print("Clicked Duplicate with input connections") duplicate_with_inputs.duplicate_with_input_connections() - print("Clicked Set Colorspace") def on_set_resolution_clicked(self): print("Clicked Reset Resolution") From 959aa2a47974075d97be41dc5c49fb6ee2641875 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 22 Feb 2022 21:10:30 +0100 Subject: [PATCH 0008/2550] Remove duplicate script that has more recent version in openpype/hosts/fusion/scripts --- openpype/scripts/fusion_switch_shot.py | 248 ------------------------- 1 file changed, 248 deletions(-) delete mode 100644 openpype/scripts/fusion_switch_shot.py diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py deleted file mode 100644 index 26f5356336..0000000000 --- a/openpype/scripts/fusion_switch_shot.py +++ /dev/null @@ -1,248 +0,0 @@ -import os -import re -import sys -import logging - -# Pipeline imports -from avalon import api, io, pipeline -import avalon.fusion - -# Config imports -import openpype.lib as pype -import openpype.hosts.fusion.lib as fusion_lib - -log = logging.getLogger("Update Slap Comp") - -self = sys.modules[__name__] -self._project = None - - -def _format_version_folder(folder): - """Format a version folder based on the filepath - - Assumption here is made that, if the path does not exists the folder - will be "v001" - - Args: - folder: file path to a folder - - Returns: - str: new version folder name - """ - - new_version = 1 - if os.path.isdir(folder): - re_version = re.compile("v\d+$") - versions = [i for i in os.listdir(folder) if os.path.isdir(i) - and re_version.match(i)] - if versions: - # ensure the "v" is not included - new_version = int(max(versions)[1:]) + 1 - - version_folder = "v{:03d}".format(new_version) - - return version_folder - - -def _get_work_folder(session): - """Convenience function to get the work folder path of the current asset""" - - # Get new filename, create path based on asset and work template - template_work = self._project["config"]["template"]["work"] - work_path = pipeline._format_work_template(template_work, session) - - return os.path.normpath(work_path) - - -def _get_fusion_instance(): - fusion = getattr(sys.modules["__main__"], "fusion", None) - if fusion is None: - try: - # Support for FuScript.exe, BlackmagicFusion module for py2 only - import BlackmagicFusion as bmf - fusion = bmf.scriptapp("Fusion") - except ImportError: - raise RuntimeError("Could not find a Fusion instance") - return fusion - - -def _format_filepath(session): - - project = session["AVALON_PROJECT"] - asset = session["AVALON_ASSET"] - - # Save updated slap comp - work_path = _get_work_folder(session) - walk_to_dir = os.path.join(work_path, "scenes", "slapcomp") - slapcomp_dir = os.path.abspath(walk_to_dir) - - # Ensure destination exists - if not os.path.isdir(slapcomp_dir): - log.warning("Folder did not exist, creating folder structure") - os.makedirs(slapcomp_dir) - - # Compute output path - new_filename = "{}_{}_slapcomp_v001.comp".format(project, asset) - new_filepath = os.path.join(slapcomp_dir, new_filename) - - # Create new unqiue filepath - if os.path.exists(new_filepath): - new_filepath = pype.version_up(new_filepath) - - return new_filepath - - -def _update_savers(comp, session): - """Update all savers of the current comp to ensure the output is correct - - Args: - comp (object): current comp instance - session (dict): the current Avalon session - - Returns: - None - """ - - new_work = _get_work_folder(session) - renders = os.path.join(new_work, "renders") - version_folder = _format_version_folder(renders) - renders_version = os.path.join(renders, version_folder) - - comp.Print("New renders to: %s\n" % renders) - - with avalon.fusion.comp_lock_and_undo_chunk(comp): - savers = comp.GetToolList(False, "Saver").values() - for saver in savers: - filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0] - filename = os.path.basename(filepath) - new_path = os.path.join(renders_version, filename) - saver["Clip"] = new_path - - -def update_frame_range(comp, representations): - """Update the frame range of the comp and render length - - The start and end frame are based on the lowest start frame and the highest - end frame - - Args: - comp (object): current focused comp - representations (list) collection of dicts - - Returns: - None - - """ - - version_ids = [r["parent"] for r in representations] - versions = io.find({"type": "version", "_id": {"$in": version_ids}}) - versions = list(versions) - - start = min(v["data"]["frameStart"] for v in versions) - end = max(v["data"]["frameEnd"] for v in versions) - - fusion_lib.update_frame_range(start, end, comp=comp) - - -def switch(asset_name, filepath=None, new=True): - """Switch the current containers of the file to the other asset (shot) - - Args: - filepath (str): file path of the comp file - asset_name (str): name of the asset (shot) - new (bool): Save updated comp under a different name - - Returns: - comp path (str): new filepath of the updated comp - - """ - - # If filepath provided, ensure it is valid absolute path - if filepath is not None: - if not os.path.isabs(filepath): - filepath = os.path.abspath(filepath) - - assert os.path.exists(filepath), "%s must exist " % filepath - - # Assert asset name exists - # It is better to do this here then to wait till switch_shot does it - asset = io.find_one({"type": "asset", "name": asset_name}) - assert asset, "Could not find '%s' in the database" % asset_name - - # Get current project - self._project = io.find_one({ - "type": "project", - "name": api.Session["AVALON_PROJECT"] - }) - - # Go to comp - if not filepath: - current_comp = avalon.fusion.get_current_comp() - assert current_comp is not None, "Could not find current comp" - else: - fusion = _get_fusion_instance() - current_comp = fusion.LoadComp(filepath, quiet=True) - assert current_comp is not None, "Fusion could not load '%s'" % filepath - - host = api.registered_host() - containers = list(host.ls()) - assert containers, "Nothing to update" - - representations = [] - for container in containers: - try: - representation = fusion_lib.switch_item(container, - asset_name=asset_name) - representations.append(representation) - except Exception as e: - current_comp.Print("Error in switching! %s\n" % e.message) - - message = "Switched %i Loaders of the %i\n" % (len(representations), - len(containers)) - current_comp.Print(message) - - # Build the session to switch to - switch_to_session = api.Session.copy() - switch_to_session["AVALON_ASSET"] = asset['name'] - - if new: - comp_path = _format_filepath(switch_to_session) - - # Update savers output based on new session - _update_savers(current_comp, switch_to_session) - else: - comp_path = pype.version_up(filepath) - - current_comp.Print(comp_path) - - current_comp.Print("\nUpdating frame range") - update_frame_range(current_comp, representations) - - current_comp.Save(comp_path) - - return comp_path - - -if __name__ == '__main__': - - import argparse - - parser = argparse.ArgumentParser(description="Switch to a shot within an" - "existing comp file") - - parser.add_argument("--file_path", - type=str, - default=True, - help="File path of the comp to use") - - parser.add_argument("--asset_name", - type=str, - default=True, - help="Name of the asset (shot) to switch") - - args, unknown = parser.parse_args() - - api.install(avalon.fusion) - switch(args.asset_name, args.file_path) - - sys.exit(0) From 51ef4ce4e5de954bde31ac634ecb1553b7ceee62 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 22 Feb 2022 21:11:40 +0100 Subject: [PATCH 0009/2550] Move Comp scripts to an OpenPype submenu to clarify those are OpenPype scripts --- .../Comp/{ => OpenPype}/32bit/backgrounds_selected_to32bit.py | 0 .../Scripts/Comp/{ => OpenPype}/32bit/backgrounds_to32bit.py | 0 .../Scripts/Comp/{ => OpenPype}/32bit/loaders_selected_to32bit.py | 0 .../deploy/Scripts/Comp/{ => OpenPype}/32bit/loaders_to32bit.py | 0 .../hosts/fusion/deploy/Scripts/Comp/{ => OpenPype}/switch_ui.py | 0 .../deploy/Scripts/Comp/{ => OpenPype}/update_loader_ranges.py | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/fusion/deploy/Scripts/Comp/{ => OpenPype}/32bit/backgrounds_selected_to32bit.py (100%) rename openpype/hosts/fusion/deploy/Scripts/Comp/{ => OpenPype}/32bit/backgrounds_to32bit.py (100%) rename openpype/hosts/fusion/deploy/Scripts/Comp/{ => OpenPype}/32bit/loaders_selected_to32bit.py (100%) rename openpype/hosts/fusion/deploy/Scripts/Comp/{ => OpenPype}/32bit/loaders_to32bit.py (100%) rename openpype/hosts/fusion/deploy/Scripts/Comp/{ => OpenPype}/switch_ui.py (100%) rename openpype/hosts/fusion/deploy/Scripts/Comp/{ => OpenPype}/update_loader_ranges.py (100%) diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_selected_to32bit.py similarity index 100% rename from openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_selected_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_selected_to32bit.py diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_to32bit.py similarity index 100% rename from openpype/hosts/fusion/deploy/Scripts/Comp/32bit/backgrounds_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_to32bit.py diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_selected_to32bit.py similarity index 100% rename from openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_selected_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_selected_to32bit.py diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_to32bit.py similarity index 100% rename from openpype/hosts/fusion/deploy/Scripts/Comp/32bit/loaders_to32bit.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_to32bit.py diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/switch_ui.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py similarity index 100% rename from openpype/hosts/fusion/deploy/Scripts/Comp/switch_ui.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/update_loader_ranges.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/update_loader_ranges.py similarity index 100% rename from openpype/hosts/fusion/deploy/Scripts/Comp/update_loader_ranges.py rename to openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/update_loader_ranges.py From 2f9eb8fa640226aafbe2729b5417d1db016cc56d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 22 Feb 2022 21:32:07 +0100 Subject: [PATCH 0010/2550] Fix on_pyblish_instance_toggled arguments --- openpype/hosts/fusion/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 64dda0bc8a..0c1c5a4362 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -97,7 +97,7 @@ def uninstall(): ) -def on_pyblish_instance_toggled(instance, new_value, old_value): +def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle saver tool passthrough states on instance toggles.""" comp = instance.context.data.get("currentComp") if not comp: From 45391662f7a6acc33b4d618f8a7453c1186cdd89 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 17 Mar 2022 12:21:12 +0100 Subject: [PATCH 0011/2550] Fix merge conflict and remove fusion_switch_shot.py again --- .../fusion/scripts/fusion_switch_shot.py | 285 ------------------ 1 file changed, 285 deletions(-) delete mode 100644 openpype/hosts/fusion/scripts/fusion_switch_shot.py diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py deleted file mode 100644 index ca7efb9136..0000000000 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ /dev/null @@ -1,285 +0,0 @@ -import os -import re -import sys -import logging - -# Pipeline imports -import avalon.api -from avalon import io - -from openpype.lib import version_up -from openpype.hosts.fusion import api -from openpype.hosts.fusion.api import lib -from openpype.lib.avalon_context import get_workdir_from_session - -log = logging.getLogger("Update Slap Comp") - -self = sys.modules[__name__] -self._project = None - - -def _format_version_folder(folder): - """Format a version folder based on the filepath - - Assumption here is made that, if the path does not exists the folder - will be "v001" - - Args: - folder: file path to a folder - - Returns: - str: new version folder name - """ - - new_version = 1 - if os.path.isdir(folder): - re_version = re.compile(r"v\d+$") - versions = [i for i in os.listdir(folder) if os.path.isdir(i) - and re_version.match(i)] - if versions: - # ensure the "v" is not included - new_version = int(max(versions)[1:]) + 1 - - version_folder = "v{:03d}".format(new_version) - - return version_folder - - -def _get_fusion_instance(): - fusion = getattr(sys.modules["__main__"], "fusion", None) - if fusion is None: - try: - # Support for FuScript.exe, BlackmagicFusion module for py2 only - import BlackmagicFusion as bmf - fusion = bmf.scriptapp("Fusion") - except ImportError: - raise RuntimeError("Could not find a Fusion instance") - return fusion - - -def _format_filepath(session): - - project = session["AVALON_PROJECT"] - asset = session["AVALON_ASSET"] - - # Save updated slap comp - work_path = get_workdir_from_session(session) - walk_to_dir = os.path.join(work_path, "scenes", "slapcomp") - slapcomp_dir = os.path.abspath(walk_to_dir) - - # Ensure destination exists - if not os.path.isdir(slapcomp_dir): - log.warning("Folder did not exist, creating folder structure") - os.makedirs(slapcomp_dir) - - # Compute output path - new_filename = "{}_{}_slapcomp_v001.comp".format(project, asset) - new_filepath = os.path.join(slapcomp_dir, new_filename) - - # Create new unique filepath - if os.path.exists(new_filepath): - new_filepath = version_up(new_filepath) - - return new_filepath - - -def _update_savers(comp, session): - """Update all savers of the current comp to ensure the output is correct - - This will refactor the Saver file outputs to the renders of the new session - that is provided. - - In the case the original saver path had a path set relative to a /fusion/ - folder then that relative path will be matched with the exception of all - "version" (e.g. v010) references will be reset to v001. Otherwise only a - version folder will be computed in the new session's work "render" folder - to dump the files in and keeping the original filenames. - - Args: - comp (object): current comp instance - session (dict): the current Avalon session - - Returns: - None - """ - - new_work = get_workdir_from_session(session) - renders = os.path.join(new_work, "renders") - version_folder = _format_version_folder(renders) - renders_version = os.path.join(renders, version_folder) - - comp.Print("New renders to: %s\n" % renders) - - with api.comp_lock_and_undo_chunk(comp): - savers = comp.GetToolList(False, "Saver").values() - for saver in savers: - filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0] - - # Get old relative path to the "fusion" app folder so we can apply - # the same relative path afterwards. If not found fall back to - # using just a version folder with the filename in it. - # todo: can we make this less magical? - relpath = filepath.replace("\\", "/").rsplit("/fusion/", 1)[-1] - - if os.path.isabs(relpath): - # If not relative to a "/fusion/" folder then just use filename - filename = os.path.basename(filepath) - log.warning("Can't parse relative path, refactoring to only" - "filename in a version folder: %s" % filename) - new_path = os.path.join(renders_version, filename) - - else: - # Else reuse the relative path - # Reset version in folder and filename in the relative path - # to v001. The version should be is only detected when prefixed - # with either `_v` (underscore) or `/v` (folder) - version_pattern = r"(/|_)v[0-9]+" - if re.search(version_pattern, relpath): - new_relpath = re.sub(version_pattern, - r"\1v001", - relpath) - log.info("Resetting version folders to v001: " - "%s -> %s" % (relpath, new_relpath)) - relpath = new_relpath - - new_path = os.path.join(new_work, relpath) - - saver["Clip"] = new_path - - -def update_frame_range(comp, representations): - """Update the frame range of the comp and render length - - The start and end frame are based on the lowest start frame and the highest - end frame - - Args: - comp (object): current focused comp - representations (list) collection of dicts - - Returns: - None - - """ - - version_ids = [r["parent"] for r in representations] - versions = io.find({"type": "version", "_id": {"$in": version_ids}}) - versions = list(versions) - - versions = [v for v in versions - if v["data"].get("frameStart", None) is not None] - - if not versions: - log.warning("No versions loaded to match frame range to.\n") - return - - start = min(v["data"]["frameStart"] for v in versions) - end = max(v["data"]["frameEnd"] for v in versions) - - lib.update_frame_range(start, end, comp=comp) - - -def switch(asset_name, filepath=None, new=True): - """Switch the current containers of the file to the other asset (shot) - - Args: - filepath (str): file path of the comp file - asset_name (str): name of the asset (shot) - new (bool): Save updated comp under a different name - - Returns: - comp path (str): new filepath of the updated comp - - """ - - # If filepath provided, ensure it is valid absolute path - if filepath is not None: - if not os.path.isabs(filepath): - filepath = os.path.abspath(filepath) - - assert os.path.exists(filepath), "%s must exist " % filepath - - # Assert asset name exists - # It is better to do this here then to wait till switch_shot does it - asset = io.find_one({"type": "asset", "name": asset_name}) - assert asset, "Could not find '%s' in the database" % asset_name - - # Get current project - self._project = io.find_one({"type": "project", - "name": avalon.api.Session["AVALON_PROJECT"]}) - - # Go to comp - if not filepath: - current_comp = api.get_current_comp() - assert current_comp is not None, "Could not find current comp" - else: - fusion = _get_fusion_instance() - current_comp = fusion.LoadComp(filepath, quiet=True) - assert current_comp is not None, ( - "Fusion could not load '{}'").format(filepath) - - host = avalon.api.registered_host() - containers = list(host.ls()) - assert containers, "Nothing to update" - - representations = [] - for container in containers: - try: - representation = lib.switch_item( - container, - asset_name=asset_name) - representations.append(representation) - except Exception as e: - current_comp.Print("Error in switching! %s\n" % e.message) - - message = "Switched %i Loaders of the %i\n" % (len(representations), - len(containers)) - current_comp.Print(message) - - # Build the session to switch to - switch_to_session = avalon.api.Session.copy() - switch_to_session["AVALON_ASSET"] = asset['name'] - - if new: - comp_path = _format_filepath(switch_to_session) - - # Update savers output based on new session - _update_savers(current_comp, switch_to_session) - else: - comp_path = version_up(filepath) - - current_comp.Print(comp_path) - - current_comp.Print("\nUpdating frame range") - update_frame_range(current_comp, representations) - - current_comp.Save(comp_path) - - return comp_path - - -if __name__ == '__main__': - - # QUESTION: can we convert this to gui rather then standalone script? - # TODO: convert to gui tool - import argparse - - parser = argparse.ArgumentParser(description="Switch to a shot within an" - "existing comp file") - - parser.add_argument("--file_path", - type=str, - default=True, - help="File path of the comp to use") - - parser.add_argument("--asset_name", - type=str, - default=True, - help="Name of the asset (shot) to switch") - - args, unknown = parser.parse_args() - - avalon.api.install(api) - switch(args.asset_name, args.file_path) - - sys.exit(0) From 0941c186dffb75759f59fa2ca46bdde0fd2dc9c3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 29 Mar 2022 19:44:42 +0200 Subject: [PATCH 0012/2550] Use new OpenPype Event System implemented with #2846 --- openpype/hosts/fusion/api/menu.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 7b23e2bc2b..b3d8e203c3 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -6,6 +6,7 @@ from avalon import api from openpype.tools.utils import host_tools from openpype.style import load_stylesheet +from openpype.lib import register_event_callback from openpype.hosts.fusion.scripts import ( set_rendermode, duplicate_with_inputs @@ -133,7 +134,7 @@ class OpenPypeMenu(QtWidgets.QWidget): fn() self._callbacks.append(_callback) - api.on(name, _callback) + register_event_callback(name, _callback) def deregister_all_callbacks(self): self._callbacks[:] = [] From 56892878c97b3244ef313cbd644241679773ecd3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 12:21:58 +0200 Subject: [PATCH 0013/2550] Cosmetics --- openpype/hosts/fusion/api/menu.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index b3d8e203c3..42dacfa0c0 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -185,7 +185,6 @@ class OpenPypeMenu(QtWidgets.QWidget): set_framerange() - def launch_openpype_menu(): app = QtWidgets.QApplication(sys.argv) app.setQuitOnLastWindowClosed(False) From 2d4061771d809c6d240e13965e845f04696ac0f9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 14:45:11 +0200 Subject: [PATCH 0014/2550] Shut down fusionscript process when OpenPype menu gets closed along with other Qt windows --- openpype/hosts/fusion/api/menu.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 42dacfa0c0..4a646c5e8f 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -187,7 +187,6 @@ class OpenPypeMenu(QtWidgets.QWidget): def launch_openpype_menu(): app = QtWidgets.QApplication(sys.argv) - app.setQuitOnLastWindowClosed(False) pype_menu = OpenPypeMenu() @@ -196,4 +195,6 @@ def launch_openpype_menu(): pype_menu.show() - sys.exit(app.exec_()) + result = app.exec_() + print("Shutting down..") + sys.exit(result) From 53382a1960e23dbc80235f9d0f25c79e8dff6d06 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 16:02:43 +0200 Subject: [PATCH 0015/2550] Draft implementation of Fusion heartbeat/pulse so UI closes after Fusion is closed --- openpype/hosts/fusion/api/menu.py | 6 +++ openpype/hosts/fusion/api/pulse.py | 62 ++++++++++++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 openpype/hosts/fusion/api/pulse.py diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 4a646c5e8f..823670b9cf 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -15,6 +15,8 @@ from openpype.hosts.fusion.api import ( set_framerange ) +from .pulse import FusionPulse + class Spacer(QtWidgets.QWidget): def __init__(self, height, *args, **kwargs): @@ -121,6 +123,10 @@ class OpenPypeMenu(QtWidgets.QWidget): self.register_callback("taskChanged", self.on_task_changed) self.on_task_changed() + # Force close current process if Fusion is closed + self._pulse = FusionPulse(parent=self) + self._pulse.start() + def on_task_changed(self): # Update current context label label = api.Session["AVALON_ASSET"] diff --git a/openpype/hosts/fusion/api/pulse.py b/openpype/hosts/fusion/api/pulse.py new file mode 100644 index 0000000000..cad1c74e13 --- /dev/null +++ b/openpype/hosts/fusion/api/pulse.py @@ -0,0 +1,62 @@ +import os +import sys + +from Qt import QtCore, QtWidgets + + +class PulseThread(QtCore.QThread): + no_response = QtCore.Signal() + + def __init__(self, parent=None): + super(PulseThread, self).__init__(parent=parent) + + # Interval in milliseconds + self._interval = os.environ.get("OPENPYPE_FUSION_PULSE_INTERVAL", 1000) + + def run(self): + app = getattr(sys.modules["__main__"], "app", None) + + while True: + if self.isInterruptionRequested(): + return + try: + app.Test() + except Exception: + self.no_response.emit() + + self.msleep(self._interval) + + +class FusionPulse(QtCore.QObject): + """A Timer that checks whether host app is still alive. + + This checks whether the Fusion process is still active at a certain + interval. This is useful due to how Fusion runs its scripts. Each script + runs in its own environment and process (a `fusionscript` process each). + If Fusion would go down and we have a UI process running at the same time + then it can happen that the `fusionscript.exe` will remain running in the + background in limbo due to e.g. a Qt interface's QApplication that keeps + running infinitely. + + Warning: + When the host is not detected this will automatically exit + the current process. + + """ + + def __init__(self, parent=None): + super(FusionPulse, self).__init__(parent=parent) + self._thread = PulseThread(parent=self) + self._thread.no_response.connect(self.on_no_response) + + def on_no_response(self): + print("Pulse detected no response from Fusion..") + sys.exit(1) + + def start(self): + self._thread.start() + + def stop(self): + self._thread.requestInterruption() + + From 84ab53664f794314513fd6530890c4a1f5fca34b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 16:11:35 +0200 Subject: [PATCH 0016/2550] Take the `interval` variable into the run thread - not sure if really better --- openpype/hosts/fusion/api/pulse.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/pulse.py b/openpype/hosts/fusion/api/pulse.py index cad1c74e13..6d448a31c9 100644 --- a/openpype/hosts/fusion/api/pulse.py +++ b/openpype/hosts/fusion/api/pulse.py @@ -10,12 +10,12 @@ class PulseThread(QtCore.QThread): def __init__(self, parent=None): super(PulseThread, self).__init__(parent=parent) - # Interval in milliseconds - self._interval = os.environ.get("OPENPYPE_FUSION_PULSE_INTERVAL", 1000) - def run(self): app = getattr(sys.modules["__main__"], "app", None) + # Interval in milliseconds + interval = os.environ.get("OPENPYPE_FUSION_PULSE_INTERVAL", 1000) + while True: if self.isInterruptionRequested(): return From 0e9dd34b3d015a6d2ea96f32820aad378ecf09dd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 30 Mar 2022 21:32:51 +0200 Subject: [PATCH 0017/2550] Fix code --- openpype/hosts/fusion/api/pulse.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/fusion/api/pulse.py b/openpype/hosts/fusion/api/pulse.py index 6d448a31c9..5b61f3bd63 100644 --- a/openpype/hosts/fusion/api/pulse.py +++ b/openpype/hosts/fusion/api/pulse.py @@ -1,7 +1,7 @@ import os import sys -from Qt import QtCore, QtWidgets +from Qt import QtCore class PulseThread(QtCore.QThread): @@ -24,7 +24,7 @@ class PulseThread(QtCore.QThread): except Exception: self.no_response.emit() - self.msleep(self._interval) + self.msleep(interval) class FusionPulse(QtCore.QObject): @@ -58,5 +58,3 @@ class FusionPulse(QtCore.QObject): def stop(self): self._thread.requestInterruption() - - From bff817afd999ddf2536e48340d0bae0ce049b1cf Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 8 Apr 2022 18:59:33 +0200 Subject: [PATCH 0018/2550] wip on new publisher conversion --- openpype/hosts/houdini/api/lib.py | 18 +++++++++++++ openpype/hosts/houdini/api/plugin.py | 24 +++++++++++++++-- .../hosts/houdini/hooks/set_operators_path.py | 25 ++++++++++++++++++ openpype/hosts/houdini/otls/OpenPype.hda | Bin 0 -> 8238 bytes .../plugins/create/create_pointcache.py | 5 +++- 5 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/houdini/hooks/set_operators_path.py create mode 100644 openpype/hosts/houdini/otls/OpenPype.hda diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index bd41618856..911df31714 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -453,3 +453,21 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) + + +def load_creator_code_to_asset( + otl_file_path, node_type_name, source_file_path): + # type: (str, str, str) -> None + # Load the Python source code. + with open(source_file_path, "rb") as src: + source = src.read() + + # Find the asset definition in the otl file. + definitions = [definition + for definition in hou.hda.definitionsInFile(otl_file_path) + if definition.nodeTypeName() == node_type_name] + assert(len(definitions) == 1) + definition = definitions[0] + + # Store the source code into the PythonCook section of the asset. + definition.addSection("PythonCook", source) \ No newline at end of file diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2bbb65aa05..64abfe9ef9 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -2,11 +2,17 @@ """Houdini specific Avalon/Pyblish plugin definitions.""" import sys import six - +from abc import ( + ABCMeta, + abstractmethod, + abstractproperty +) +import six import hou from openpype.pipeline import ( CreatorError, - LegacyCreator + LegacyCreator, + Creator as NewCreator ) from .lib import imprint @@ -84,3 +90,17 @@ class Creator(LegacyCreator): OpenPypeCreatorError, OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator): + _nodes = [] + + def collect_instances(self): + pass + + def update_instances(self, update_list): + pass + + def remove_instances(self, instances): + pass \ No newline at end of file diff --git a/openpype/hosts/houdini/hooks/set_operators_path.py b/openpype/hosts/houdini/hooks/set_operators_path.py new file mode 100644 index 0000000000..6f26baaa78 --- /dev/null +++ b/openpype/hosts/houdini/hooks/set_operators_path.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +from openpype.lib import PreLaunchHook +import os + + +class SetOperatorsPath(PreLaunchHook): + """Set path to OpenPype assets folder.""" + + app_groups = ["houdini"] + + def execute(self): + hou_path = self.launch_context.env.get("HOUDINIPATH") + + openpype_assets = os.path.join( + os.getenv("OPENPYPE_REPOS_ROOT"), + "openpype", "hosts", "houdini", "hda" + ) + + if not hou_path: + self.launch_context.env["HOUDINIPATH"] = openpype_assets + return + + self.launch_context.env["HOUDINIPATH"] = "{}{}{}".format( + hou_path, os.pathsep, openpype_assets + ) diff --git a/openpype/hosts/houdini/otls/OpenPype.hda b/openpype/hosts/houdini/otls/OpenPype.hda new file mode 100644 index 0000000000000000000000000000000000000000..b34418d422b69282353dc134b1c4855e377c1039 GIT binary patch literal 8238 zcmcgx?`{)E5O)fq!ceJHszg;pmjj7bs$&9tK*0%eY=@*xYzsR92_frzx3(9bcTc-} zi38#R`WF4rZ@d7{)fZ@Ib}x?4PMkQ{wiLyloj<>s{W~+;<>H&v$>$1u{cgKlEWK&e zN`?A%r5ulaX;wS`!uKCKBJvq$%N^ehSW~+42&i9>E9SUeX}+hP&U%u%nl?hgxb|GH zLoMIk|6;x+__-ghnQ!maM0DCufw`+w) zc%(am!_VW-H7j!bbM(Ijy#%2d4%fH9cC*ObK(uR~WTCcVw~Mil>8deP5Tct(-7ey2 zJn~~5oU2L^QmGkLl~6Omm1SC5j+w4*(I8Bvev(6iH|jzJYFTw?(6U2Ut^xaJV7a*& zaS!#B-5x~yP9JEuVpZRl`dYf1ET98Zcm7JHmj1@^`^5S{lyQQzgd}6LLflA;o~xPX z2Eh?&Q%)sJu%AwUOcVHUFnWDV$_!bxXAA~zlLptF2@~$5jTZ1YBp=h)9mo9qWT|Z_ zA|xXO{2&bc?)~GATMRpOAeI3!l@zP7rB76jB2a!RcV&)8N}A$Z|@^ zuY`tKJ`FDy#;<`@^z?Kez5=dJ#^?g!4FGOZXy%|sCT=n)8^AduQc3-j5!GM^&pSln zG=Qq?Kxkri$UV;R1S1QXP)Qs8IY;ZG2O2ZmyJcaql~%1PCglxxP@$z?qAlf>(=!1qLNs_jxhAyNP- z$`xG5g3lWzJWb&B0534r7&SI|0hG84_bFf&(sE~To=lU^Hdao64wB1S)Z}z% z`UkUj;dIuh8d81!18f=?C+@aZKY_;f)4wa-)-xJT1KES@GZSpI`GhLbVta>{(sensI#L>jgxJV2%i zWW@;COnf}oJ3XRbfiW((0Z4d|O_j3k+d>^NsSu=UNhfCxG-O_PEE$}9@a!{sXo_?- z8i02oO>8nWQZSqgR$FjQ24yl_ixMgcSjA2X&Kx0j1E!3oDg4LJ;)N~GNYMr)=fP;I za9$)edCeqk;rkCV-!bu-$8&m&v&9E5mrvUU!7F>vn08w%jPtF_Y1;DMfF; zWe_}io`%X(i}j1pX9=l~+NVdAz2H6rUC^3+186b-;s=PkBIJ2;)c+9^Grqq zddfmmu+ecf(T8Fb1oA5kDV%_apFpULL1-X}L(s{1nz%%P4R8hhStgmxI<{VNh}-m8 z)|>}h#eAb!+RX3m)Eo6mWyi4%m3U+)zfl4bgHXhj?LwvOV6b96yOc*}@$_}9@&FEJ zXunt<;KDFMkEKjCuFv(##vi%t2+gX?BCa8Q6Rp5&{7}g5n3+mwtQf!Q`Hh`YBVR5y z%K6>Wz-r8Lu2FdNLu8}%B5N}Z`!25()hcIT9*GTL;+4TOHR|k$?yN9l9!}5A)+(9QE{`T(OdM;}%nKf(Rz-rEp zEa$OqAv7$%N&SLXCzV^UBm)W+NxWCn~DxWr^b_1jAtUekt0V_L%HcEnK0m%mAKJ%z@ yR^PsZqcL^YdQ`(s+F1_$gAOU=NcdwZ33n_h;f*Cta^_fQ#1~6WxME4Cd-6X`t literal 0 HcmV?d00001 diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index feb683edf6..27112260ad 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,7 +1,7 @@ from openpype.hosts.houdini.api import plugin -class CreatePointCache(plugin.Creator): +class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" name = "pointcache" @@ -9,6 +9,9 @@ class CreatePointCache(plugin.Creator): family = "pointcache" icon = "gears" + def create(self, subset_name, instance_data, pre_create_data): + pass + def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) From 0ac27ab609a1198255e2fdad846f7be698e0e725 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 12 May 2022 13:19:29 +0200 Subject: [PATCH 0019/2550] start of integration --- openpype/hosts/3dsmax/__init__.py | 0 openpype/hosts/3dsmax/api/__init__.py | 0 openpype/hosts/3dsmax/plugins/__init__.py | 0 openpype/hosts/3dsmax/startup/startup.ms | 8 ++++ openpype/hosts/3dsmax/startup/startup.py | 2 + openpype/resources/app_icons/3dsmax.png | Bin 0 -> 12804 bytes .../system_settings/applications.json | 29 +++++++++++++ openpype/settings/entities/enum_entity.py | 1 + .../host_settings/schema_3dsmax.json | 39 ++++++++++++++++++ .../system_schema/schema_applications.json | 4 ++ 10 files changed, 83 insertions(+) create mode 100644 openpype/hosts/3dsmax/__init__.py create mode 100644 openpype/hosts/3dsmax/api/__init__.py create mode 100644 openpype/hosts/3dsmax/plugins/__init__.py create mode 100644 openpype/hosts/3dsmax/startup/startup.ms create mode 100644 openpype/hosts/3dsmax/startup/startup.py create mode 100644 openpype/resources/app_icons/3dsmax.png create mode 100644 openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json diff --git a/openpype/hosts/3dsmax/__init__.py b/openpype/hosts/3dsmax/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/api/__init__.py b/openpype/hosts/3dsmax/api/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/plugins/__init__.py b/openpype/hosts/3dsmax/plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/startup/startup.ms b/openpype/hosts/3dsmax/startup/startup.ms new file mode 100644 index 0000000000..94318afb01 --- /dev/null +++ b/openpype/hosts/3dsmax/startup/startup.ms @@ -0,0 +1,8 @@ +-- OpenPype Init Script +( + local sysPath = dotNetClass "System.IO.Path" + local sysDir = dotNetClass "System.IO.Directory" + local startup = sysPath.Combine (sysPath.GetDirectoryName getSourceFile) "startup.py" + + python.ExecuteFile startup +) \ No newline at end of file diff --git a/openpype/hosts/3dsmax/startup/startup.py b/openpype/hosts/3dsmax/startup/startup.py new file mode 100644 index 0000000000..dd8c08a6b9 --- /dev/null +++ b/openpype/hosts/3dsmax/startup/startup.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +print("inside python startup") \ No newline at end of file diff --git a/openpype/resources/app_icons/3dsmax.png b/openpype/resources/app_icons/3dsmax.png new file mode 100644 index 0000000000000000000000000000000000000000..9ebdf6099f6ac279ebaeccdf21a658a34da5e7b6 GIT binary patch literal 12804 zcmcJ0_gj-q&@M#;LPtR9O&|~uL5fK4O*#ol6fhK}S&-hOhAusTs7NnK2mxu*jUX6Q zdJ(0FA{~_8`5xZy`~&B@&iNt9l_$Hiv$M0a_uR8durxPh0`Y(-C@7eWjr6Q3C@7`R zUvw9NCqZE;e!vH%kF}u=1>q~-67c2118p;H3W}OkhGP^B@SXmN5yFRpg89Swi?Zi& zg$o6R+zVqpZQDSHwb%5ye9(7$Ef0*v-@Vz}dRIh+pr^m8B_ynRm9I+#H*QZY!Qrbc^HJ*$UXz7JZ;saBVHp>%jM>f>E zYc|vx+rm0@lHzj}DfTTI_EBxy68>u{`rUH0_XioCuRe@iGHx?f%Pn94E%_jACeV9Hac~(PZ9tUT7sGm`Z zUN=BK(nXa<{NUif(@?QFpM%>J%c-dSas4Xo^znVD#zrXA;rJpnuS096<$z0ZcADkl z+1GepKrAt!+QYH&YcEKd>61V!;_j13DOnXL1FrQ+x`+Kc6Q+rpXshf#OE`V>t%MQ* z89T?RlWP{WG_|)j)YwMZMRCfrR&b(`A%a@77u<&c<6XEXzEvJpIaZS&hdTfLqBjII zyiI`sqxGt@k#Oa1bXdx)T{KeDzbk6NvmTKA@>J2b6B z!T1Z`TI_aanyfIgZ6mC{HtE0Lt;&B$7Gbmo2xc9WSUerh zZw<-y{q2P!(l3>Tz*6o~LT6o`65D2H; z3X9ENAvi;HC$(Vv+_LIzD1^IZUj{fyG?rN#rAT*&`&_+!cBsZqor}x-d*u0OOd=bf zA9}eO<@Cwg%rnT|z6;~=?fA~7Yk=sEBY9Bh+4(~+8rUsGyji1J;C&afRFg)*yEW5&&P61w&WOS0HB7-6moQkE#Y7=SEoRoTYcN%Lk z!t|_#a}RPyT05`S_%K?X>n;t((+M>o+gqG1=m%X0YYW-JRabzQx0)X9lJOyry^!WvH; zAa^QOJ5eC2$bk;`GA7!kKg+r7g`8T#{j{Vyq1;8$ZXC79cSFyEp)$T%-B4o&N;H%f zPX_GhrUkpFf2kJ)^ckz&j3Z$w?{F76yVi#W`Sg-WVB8f;xZ>YNdE$hFU_4eRGxt&t zA*UM*aRlRedIj(E-r**pT+zm#pZd^FZ>N0V_Xkm+p)~2xo-(s3Wv2l%6GkvW6eKfw z^-oES71zR*KeT8l`m6cqM_BBk7A#&X>1Kio&1x_rx%O!k2}H4x@8;UHEY%=CLLy^( zGjlWV3mY=}ph?SLI@?10Ak@ma6_hT(aAC$UEX@OTP~91N-VdLbp7z4&v)26~Wu!}j3wjZcUN zm77ZrZJl7l zqJmaUZ@;P#G>yJkIrR4@R?Rb8vO#nR)KvMB=W$nBJwCaRh^25xnLR$f-SF7%J*;x5Lgw@eu8mY;75woD?W|!JrWU_YxQNX-yLY|E`VP<~5HIo~ zBEyfAFFkzyL}w6_aMKQ6Fe!r&@Wn@lEJ4Qgj2IbZA@G-F`kPr#TDmfK|(rGL8+-RPeIR__`IBC*p+`N{p_E&=wL> zmYm~&MYd+u{5B!q462pl_irMY?W94kpeT<86E}e+*p%zZb;G^5BQ%kN;pc#v1WFcX z=kU93mnWy$4O$y;jQ_hC?7Pt`l}MDX4y#IOc+$3Mymk8_&bU}+*VXj`8bcX4-rcVR zc8zIDOp_;j4yMNOP1;w3M1T(Q4a_&S#=FR-=AK{OlB{4#mT+z-n~z89HE`d#_)2PBXO60(Jst69MdX_?Y*O56B zh^yWgk5)Z@Q6U_Ud%+t6G|j+U8t>;<3dwI56zo1!j^}iTG1^3tq^`VvT{i7%$UUkMenNv#^}k=y^Orb#O+-{=3XqC8lML2unl+ZqiVlg}WshQGZS)X*Ut#=vKRzH~ zJeF{p(jvL~vt~c+!%?YGd7t~go>zi*i+ey+i6W4+5+Vg**lRfCkF4d|xZ~qz;o2Q{ z*abm-l*f|4{(Ql{`N3Fanv-YM2$mErlA3CJ7Q+U$MMRdMUA61bq~mlErv&R)DS%_X z4$59`I(jr#n-NQT%J@>=DeIs5hSge*eHqeR3Xu(`M$nha#VLwI(#p=)Y&X-*bO`oi-`spd z#Z|5tuTVhDiy&JWlq8qGzHe=ycUOm=sqi@&9yp;h{x2teS8(Cufa_?&f|a*x?%xj| zvphv}YXdsSZE|ioQ93xdpYpyI<)%y^LrlJN{5c?au8UHn2e&c=hlKGWpky*j(X~}M zx}cw)>GA@aHkgNGx*(ba936>DX_8098gu{4HUAHl&ubfq*`wDvV3#NmpPavcQI5-m zik)Wt6EWtV7l#x+fPh`4flZW_t(Kjde^1=zvcja4-)QX>SH%nH`J4H88I*ONf%cpjTY87TCFSD@==I?wFFC)AVXD4rVuoq!K=HRyK5lao%9*y(@Hxhu^cG z9LGk)oBufR?Na|)5C!MHtlBVn3CL_#)&z7aSETi_z_L9zHn>r

{SJ~!Y9>I3PqWedKP*SW{qUGfR^((-q<{G5uF z=Swwx2i~q_{TRd`=lM#obHE}a@k(CTAJXLVUb&ivPk=~k0{+s4xPOB&oSchk#Y@-N z&Aoc}!thRVt&YgjQ9zTa~O5;S(^--a|1z*l*QC;GD78qa`hLkLR7&#Ck<|OaeJea)>07gz`5RO6% zrn5Sas=aE8I`~(fJYoD`Qo&w&JgUaZZfG-#)!W|dO>6@r|HDr!birZIJHZe&4%pWT z^xmW9Pq*tOYw=nRq|Ioz9)Dc_i((Ta?zqw{`AIj1%*qFR4`S$}9PFqUp~f{InC--> zn9vwee;FL_W9eruu98tiX^f<3!#_hib!&s~BI&jx=oNY+(sB4$rYN2k6%}4NN zElD-7M5_PMyQ}U%iia9hI)7`XVK*>7T_$HGHY2+~1;$nb+?d+ozuskbg0Y zgW1QS+^#fQsL~5Ae&hD{WpMY?ITsm7o6hrSy4x0A`S%)>8D7dy_TIKOr7eC+JdUi%a_sOLi&#Eb!%AaT34hak`qKyR$#6Z#5A3X!n zBon$`kA8vC4b9UJy<~Jyq;~-^*vZB~&FO`!I@=vNSwu-Mwm((C8FZw`W4>d;9A7(; zqmNQdZp?-r$uwR;D0;fM77~|#S#V#zW-E5P%K-2JL$BUGgFCs?H~f$Ruko9~O5{0a zRSuZ0P%Zo9^gm|hv04+rXL(H1w;m zUOEW;nK^6Q_yn?qf0`3?&GIG2But1hE6wTT-8@}fUs6{hH@C1DA~;|dgm3^?v*I`Y zu5C_7)Dpflh><6Eh&2yMiYj6Fm2USx>^jTg$_IFcov?teG$8ME0ixAmEuI36(Kv^o zT*l=1`P`c3rNK+vAgoN^*Y%cqK&_$uCSE_TaHG}uX*G(Ep*V*m?Zhk%+bLU)=mUXFP{>P-qG^ zuCY3&7=4uM^XEi!-55nXOzN9T6L$A{#{hm&X%OyW1!~HN#S_i}JKX;+Ffjkf6Mjj2 z+3(j5h#8Nvg8N0*H9VRJ3!^d2xAHRTSkCg*?>tVQ3CRMeBF8UQIv@yuD;mp%j)+o^ zZnE=-la<5UrOb2$$KnZJ_iDpm%<|zG(#W(z~p&T=~(3~trw1XT5hkyU!5gD5_pT)Xhs9e+d>PY1aV zN--Y8HU2?_OGYxCc}PS3-2uGK_x9)xk@==2ri|aYB-Tf&y1W>7gWH}G*bqW82fE8h z@46SI`Y346Sm}0Ru|&n@L`}wK5G2LqLMe@_9pC7iOCR~To_1Rq*ztfM!o*E*$qoP- z#WG$(ksL6KRj{DI;DidQ=-Wm7bly7r*1^P*!BcH347whd4=_qp?x^uss_WNDa|D$y z37YeFh+4}2vD1pQdQ*yqsf_Z?LqMZn`p95MogcG5a)?2H%R?t}{yteXz2XE3;e|5s zd+=y^zqawzgK`18u8?R=mCoh=?4wyqGi?6(hE$jtTxnkSs!PB#0j1A$Qs8;q1;@B* zl}#g2p}Ua9pr`>=O~?M%Z%%+{DKfO&;ax1{=&iqHgp;LX;5D3>WCzXJlwpT@(+#vx zS82R#6+@dOJ9TZ-90#^&gf$%rKLr49mQNcynyE(S=_7%#sb_gUp2Dx2)kkVwux-q& z=f9g@m8--TRXiip-;XL^Z^p)u$P`96M0UBm5~?uU1ugH{kj>~}hP;Ke7A}%x5RvU= z8us{&OfQy7qI)~fxNA?NgL1u4A{HL;)fC4al*Y5pW)Ib$?G;S2LWAH8w+JfbeN?upRS@TYUb*7}Cl{LsJRMqe$LH z{XE=3a(WaNL+-RJJL3{!BT3{dMhvPOc!#yJy1VRLU6F2N&7Xm*=~xAqcs35bONQ25 zufk_3v1+a+jjRwyU0YUrluOzm6ae4MT1Wz+az2W9Tt&C_r*fz(;eNFyFH^Vi!SQV9 z?fP^&v+}Ix22ieD`_VWOnI2y`Kes-p&&d^2V=Ka^r($$45ayur^QMu86;YA!K6?HT z$fJ5ny4rDvmT;B1s>;#eU(eTX=UvMndhx{4^_bt(n4=pG%hw|>7#c#4me&1C9^U$L zTLfEsy%|J{#@tBD9@tF@XsRiLv7K9u_2a8|=Rx+x6EB{j)H+@evc#bLp2-%FUsr(QWiS^<%jvAYSNs7B2BszFMrig6tGPLiuvC zgK!aW_o<-rbiMQvOv(97oFe$Hn<#x)(Xdbjc7r-Ju%`5Im_(eu^`@m^0{Q+524NwnqcWHU)k=V&0w5 zDt~4KNdy#I(6@18&>|}b*Ga9-%mO`$R-C&Ox1S=-^nBM6MWY7`iG1%(R|Cp>Lu3jZB(y-wnRE%4+gg=rSFj<-O+)B>|5}quU zsfC^kt55iVEP-e>IxCD-oBB?DJsO72d*VWr1549J))C9HTTLHalfj#HJjJ{_xSUjX z|Le%nYN`CsCbqxGtF@%{zDjdkLi|!ob7afS9yIAGhY^FPhpU*39<1VxHr(|0M_$eL zg_vJz|7}c?Ktzw=%>=l{O^Vj`@@&H(zhcxcj)H@r8+F|7lBxGAHFY>Z5`0Y4U?19+ITwyWvf-_TUkP>l|Im`lTKUA1R8i*X+T9)aR)Q^DIV zSc@F6Gy2DS-UOPb-h&Uu1w4wUdorr~NYUkFPl!s8PKd5o*JWmjJ{g5A3koq1ximz`Hd%m<1zpKo>l7S=&}=9%qzaknxIMOetkjp>2*-$MCkG=_Yrve70>9QK0!+JiS$w;CQPJ_c)d=(6*J`sA*T4tGeDgl;@* z^ypZXAAP#{=|5^Yd98xgO4}Nvg|sPK9KjYELCjD;^V}!^&(Let&&BgyPG%(7_w5kQ zSu=`oa-+LVe-=vykFq&N!km!{Dwd1%nfNJEm75Mjk~YO5E9w+hN0Na!9W!6w`d5R{ zb9S%0EOc+G{Nm~a%DNqjYqrt&F`+b{1QL;1-MB0$&t%ftgQ?VhNy<36jqEODMVH%3dxl3oDe!gP>hIuFLE z92%U*Xx)D2@!F%+n1Wi9gQ9{`MQzz{)=(tijf#jMUqT4QeQmpQziS3Sl#`9dF(1V6 zfVc*AkaXk#!a*3cl+ysduURJr#1H==Z*Qa-2T=S2PELwVL#&t>8%AKh<@AtXr6E%( zQW9#FEC0>N(D+#y->+F`F#Ng^H{%WHBpI?(!~5t zeQk@pe3$I98vB3Rq~$!AXokb@HOec#KFY#)dsjv@C)FAFO-GH-q=K?dJ9xrqWnj<% z`3yxlf{kTS&cwW|q*P#hbt6}u1ICFh4V_q;C<~1*i+Z$q|3YS2%@FmF?&@appe7y8 z6ZwVvy|Uk>4~tT?U_6BoBxtqcS(``4w)Lod03o5s(x}1R(UDqGR-|gs`Rd6fD}GRgqg(niQp(Y26>33@F!!JCSv@c9ml<-1!Z}bl-`_D3M z+zfU}tH*^F5x$FWnD3|BpzcV;N>`<*St}A|nj4 z*7G)U?vs&Dt&JkUvQcTH&|RM11ASkwlM`8nJ`jwDX;SLEe9@fTSL``yK@ci8@1|b> zw`L_IbIh2eb_QT!xk93d&_-P_XB@8jzQ+VO$22(pO<{gG{JMLMKlCmb@5ZEzFOvgW(m1kbFWIj-^TZxO&{24yv2FJD~0+aX+65{IR<0_yc44=6*3`jF1SF zr7tDaPRyl=_@5T0)D7iu*VcmEu_g9x=?Fj{HdO12%!f&o#6unXW{MT*1? zH}B5p-E$$g?h0#u-;%xVWNeI#Cp7gJ02~3+T>Z@_Rd^0nyFt;LaKJWSuO>9R%!>P{Db8gPE2bYD7Qxs;K8*}euhB@D)Y)c)7@T88-> zg}a!7doyk$aS#Js6rmT|`0C}|%L=J75I2+6^Yv5mv?FMv#JZ;2u;;BKb<{j!A9TMA zuQ0qgbLwyK-;WoHq#JI!BxPr-Ex#@kty8-v@d|?*iQregV*lxNPVQJmQU#Tv)nFOY8M8e`E&+M+sXM3* zU|%TzUtAaZQB>!LlbQ+(@cVr>AMkBOE%e&wcbJ9lLf?M%_v6(r-y1r|d19G`N47?S zm`{AttLfFo4TFw$C#o@5>Cp2Ro3!6&^oN<-?{-~+zSyd?s8sSo&8uFX_3@cMi~3q4 zo$a|d^Ug-oV*UWt8MLPAUok8YboK8c=JS4+1eK$w`d=PLphk6Tqo#+ddSziEHT-V< zSRnK9jPF5Sfsc$`*gV9exfK@rSbJQETL<+{Tl1aqd%HK`4x_i)cfZffUB*t*gj_p) z{_=QbGrT^fC;Y^7R9smszvt`+>iw7S&bB@5%-P!GX--c;$Ix|^(Q6BOVH%;bfrK{A z)0%+N<(@`|Ky9Y9YL*P9XV>EM+P98oLLBW+I(fAbA~j^sG@#7=qZ739w&S=ofPqFZ z{P<{Qw2(!uz@)u`>P+LesGwDN+8NF-baO%Tu@T&8Fx$v?r(=3&nP0a3xRyt(nqdhx zu;Vc(vcTAJ_H5^e`u7MG@VqnwmbmkqKPC@ z&2f_Bs#PkfpD^g(f5(nzSY~!P`DQ6Pj%;2%wz0B}@Q%0(to%QArZ$4l?wq*k!PDNK zeQUcDw!sqqm%z2YpH||>N_&CbcM1u_&<=nA+=vbQdPCzZ=){M(phL(M3^qDh6FmFd zF0v0ldj5F(X6kQ7zyYjcb}$QX?9N0M3La6j6%bF*3;A28jfaErM4^aF8VANZEK{AL!n5JyGo*O+BZ2g@(nqZ`RUD z2HFfp`^VMK=FbgHUeG9aEUyYbfep1GdrBQopRW31N`P`Bpf*}}JXVkDULl?EXErtYs=_T{@fzZqmJ z!qYh}Yo-<~TvgEqJW~eM#`mOneg8d+uL9-t!P?`HH&tO>oLrzG!_e*G<vLU}i)34wBovx*{k&O6xztsH-ar z;fI-f=_>6q?130Px_AwN%J@gmHit}?t0}wxG?8?=Hb2USeQGh;_%&srUTUEESE~T- zeQaa>(OrbyNMi92)wvgxYxLG(<)cBupOmxT5^bqbsfNEFBp*6z8rXG518x^gn%rA` zJ8k$Uf-mJqqea_kwBRbVdSSlM39 z?k5Re?B%6fd9NI0^NuTfn_Bs^ZioKW z-VpUU)-?V;KzER1=kQNsIIN3}%R?vn_eL4gO0YOKH_B$Rt}@5=gtooGf?J= zcTCotfMD<+ZYUR`+x@K&_x~E_#J;E!(y@tgaR2#`{h{qxO8)WG#)zWh33kt_Uo?ei zxuEik!SOF);Ap?Hwe0<70Cchyv8Mo4Jr$9ZvTimQ(un}CfDRB{DyLW3-v~uL@P*z3 zODk81N%P@OS2{*MbnK%6%WH@UZd8HwiZ7kW(9@fiqn`RaJ~-WLgKTSPh*sNG#|k+H zY(CKV6{fVW#g8APYl%|q64_lpA1)3iluy`UkUgP$z8%O1CrPkSw%6(Q$W4r$fw zID}r?2nLndCEi`xXDXjy{CTbD~`Y8aO=%z+R=K_)Xt-ths^QUKM3Sz_AZhs&*v_ISA7#~(q__5 zkT2R4glk;Pc;_hcqCm8kxZZweYP0Hb(1SgV0jG#lY2NzT73I+v%v%?Ii~BtBp#<=3 zh}Q4DFP9zu5|Bsxj%@3#7U4(+)Ai_1&y4@vSsgX?oWB31@_4e;n6UaKciTIKLW#sx zGHtzxI6i}depbZ~xgW_gVy{T0^FamEf0^dhSpyHvX#ur-PcvfwG@4)xqWW*bJDqR) zJy2~so{n1@!&_QIv0y6Q-6oaj6fj9=M8SWiz4M}N$ZWlnqJ{J`+LWBEog&#VV?%|} zl=5e1Av??JA-0~ky=VOjVX=9(Bf9Q%ZaFEPy&a+$fdF2^9U=u#5kmb zCoE|aP2O7x9_40p>un`nM57laJ{JXfvlvlt2^AYrSJStgj+or6AqtPBoteF^e=m@= z%~OBfykAo2e`WRS>jZ<6OY}eJn+Rrc{JkV1gDVQryn<-ExhIrEvRYOKHVFCSf$0!^u4m%*wmPO9x*K7xo_jj zEKBtFZ{Za{f928~Yq`cIQlHf=gYB=NP}}Q@uU{*+e9ydTDHu&$9?igO$yVQ>h^X+G zIN4jF^Cj!DROYO3oC$Y?&r?17FTR0Eq;b*LHNfpxz8JK@R}WD?R18hR=V$wGQ016vtDg84%TON=togFmI7ffcv#$L0TR4SJh$|L;y);LMS) z1-z};VZ7xzVLfnJ@_b-OVNvY(sJHC6+TnXzzbgD#Jc4q}ReaqPJrE})hUA~M9) ztpHIyYcmHaTs8xW6c;Gi?um{fXvj(bD15DdBed^?xpIks>VZwrK%%2`3LG#-=mSVTvCk4 zU!WNVmpJ}d5B~i#XJV+&^Nl!{bF7DE&DP@?w|#Nw%u?d${9EPkUF~rSB~noMYx|iQue7^h{!hlc0QNlE&N1rq^271^7KK9+!T!_U-6~33L&Vdnm-(UA;b+?8l!5$< z4|Ib)>?(!M@c_*qs-ipmgIP35)7r2K2AbE-@0k*N_1^qa6`akh&h(^xP5b0p+3D(<0KScwn)oZux2&o&FTa^E6zI%%?qgt?+QByfI zY`~AgK`@_Te5->wgL?C*fXGM1#yZq)Lb}!y#d~crR8OtRe?TSMW8pA?Ju>E7mE6tPv$$dP4xhIEK4l;Z#HF9vXw-|F;9CFRlXk_D@dkuaN~-|FO!yB^&0)-6>{ l$`H2yo-zHX_gp)p4J|sLUs3g50RFi_VXSYiN6@(!^*@6=RyY6v literal 0 HcmV?d00001 diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 0fb99a2608..aaecef3494 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1232,6 +1232,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "3dsmax", + "environment": { + + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "djvview": { "enabled": true, "label": "DJV View", diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 92a397afba..b6004a3feb 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -154,6 +154,7 @@ class HostsEnumEntity(BaseEnumEntity): """ schema_types = ["hosts-enum"] all_host_names = [ + "3dsmax", "aftereffects", "blender", "celaction", diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json new file mode 100644 index 0000000000..f7c57298af --- /dev/null +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json @@ -0,0 +1,39 @@ +{ + "type": "dict", + "key": "3dsmax", + "label": "Autodesk 3ds Max", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "schema_template", + "name": "template_host_unchangables" + }, + { + "key": "environment", + "label": "Environment", + "type": "raw-json" + }, + { + "type": "dict-modifiable", + "key": "variants", + "collapsible_key": true, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } + } + ] +} diff --git a/openpype/settings/entities/schemas/system_schema/schema_applications.json b/openpype/settings/entities/schemas/system_schema/schema_applications.json index 20be33320d..36c5811496 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_applications.json +++ b/openpype/settings/entities/schemas/system_schema/schema_applications.json @@ -9,6 +9,10 @@ "type": "schema", "name": "schema_maya" }, + { + "type": "schema", + "name": "schema_3dsmax" + }, { "type": "schema", "name": "schema_flame" From 397ecb529e2a0524435055cfaedb5f465fa1a103 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 25 May 2022 17:46:35 +0200 Subject: [PATCH 0020/2550] nuke validate backdrop with help --- .../publish/help/validate_backdrop.xml | 36 +++++++++++++++++++ .../nuke/plugins/publish/validate_backdrop.py | 19 ++++++++-- 2 files changed, 52 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml new file mode 100644 index 0000000000..ab1b650773 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml @@ -0,0 +1,36 @@ + + + + Found multiple outputs + +## Invalid output amount + +Backdrop is having more than one outgoing connections. + +### How to repair? + +1. Use button `Center node in node graph` and navigate to the backdrop. +2. Reorganize nodes the way only one outgoing connection is present. +3. Hit reload button on the publisher. + + +### How could this happen? + +More than one node, which are found above the backdrop, are linked downstream or more output connections from a node also linked downstream. + + + + Empty backdrop + +## Invalid empty backdrop + +Backdrop is empty and no nodes are found above it. + +### How to repair? + +1. Use button `Center node in node graph` and navigate to the backdrop. +2. Add any node above it or delete it. +3. Hit reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index e2843d146e..2a0d3309a0 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -1,6 +1,7 @@ import nuke import pyblish from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.pipeline import PublishXmlValidationError class SelectCenterInNodeGraph(pyblish.api.Action): @@ -63,8 +64,20 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): msg_multiple_outputs = ( "Only one outcoming connection from " "\"{}\" is allowed").format(instance.data["name"]) - assert len(connections_out.keys()) <= 1, msg_multiple_outputs - msg_no_content = "No content on backdrop node: \"{}\"".format( + if len(connections_out.keys()) > 1: + raise PublishXmlValidationError( + self, + msg_multiple_outputs, + "multiple_outputs" + ) + + msg_no_nodes = "No content on backdrop node: \"{}\"".format( instance.data["name"]) - assert len(instance) > 1, msg_no_content + + if len(instance) == 0: + raise PublishXmlValidationError( + self, + msg_no_nodes, + "no_nodes" + ) From d097503f4de4828d4a30071769a26328a71f90f9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 25 May 2022 17:54:22 +0200 Subject: [PATCH 0021/2550] nuke validate write nodes with help --- .../publish/help/validate_write_nodes.xml | 17 ++++++++++++++ .../plugins/publish/validate_write_nodes.py | 22 +++++++++++-------- 2 files changed, 30 insertions(+), 9 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml new file mode 100644 index 0000000000..d209329434 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml @@ -0,0 +1,17 @@ + + + + Knobs values + +## Invalid node's knobs values + +Following write node knobs needs to be repaired: +{xml_msg} + +### How to repair? + +1. Use Repair button. +3. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index c0d5c8f402..94dabecc97 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,10 +1,10 @@ -import os import pyblish.api -import openpype.utils +from openpype.api import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( get_write_node_template_attr, get_node_path ) +from openpype.pipeline import PublishXmlValidationError @pyblish.api.log @@ -14,7 +14,7 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): icon = "wrench" def process(self, context, plugin): - instances = openpype.utils.filter_instances(context, plugin) + instances = get_errored_instances_from_context(context) for instance in instances: node = instance[1] @@ -60,13 +60,17 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): self.log.info(check) - msg = "Node's attribute `{0}` is not correct!\n" \ - "\nCorrect: `{1}` \n\nWrong: `{2}` \n\n" + msg = "Write node's knobs values are not correct!\n" + msg_add = "Knob `{0}` Correct: `{1}` Wrong: `{2}` \n" + xml_msg = "" if check: - print_msg = "" + dbg_msg = msg for item in check: - print_msg += msg.format(item[0], item[1], item[2]) - print_msg += "`RMB` click to the validator and `A` to fix!" + _msg_add = msg_add.format(item[0], item[1], item[2]) + dbg_msg += _msg_add + xml_msg += _msg_add - assert not check, print_msg + raise PublishXmlValidationError( + self, dbg_msg, {"xml_msg": xml_msg} + ) From bdf8c158e5877b1f2bfa08eb8e766e339b52867a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 21 Dec 2021 14:42:25 +0100 Subject: [PATCH 0022/2550] nuke adding better docstring --- openpype/hosts/nuke/plugins/publish/validate_write_nodes.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 94dabecc97..069c6f4d8c 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -25,7 +25,11 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): class ValidateNukeWriteNode(pyblish.api.InstancePlugin): - """ Validates file output. """ + """ Validate Write node's knobs. + + Compare knobs on write node inside the render group + with settings. At the moment supporting only `file` knob. + """ order = pyblish.api.ValidatorOrder optional = True From 1a86a2e2655e81e2633841bf26935d31f42a888a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 25 May 2022 17:56:07 +0200 Subject: [PATCH 0023/2550] nuke validate gizmo with help --- .../plugins/publish/help/validate_gizmo.xml | 36 +++++++++++++++++++ .../nuke/plugins/publish/validate_gizmo.py | 31 +++++++++++----- 2 files changed, 58 insertions(+), 9 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml new file mode 100644 index 0000000000..f39a41a4f9 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml @@ -0,0 +1,36 @@ + + + + Found multiple outputs + +## Invalid amount of Output nodes + +Group node `{node_name}` is having more than one Output node. + +### How to repair? + +1. Use button `Open Group`. +2. Remove redundant Output node. +3. Hit reload button on the publisher. + + +### How could this happen? + +Perhaps you had created exciently more than one Output node. + + + + Missing Input nodes + +## Missing Input nodes + +Make sure there is at least one connected Input node inside the group node with name `{node_name}` + +### How to repair? + +1. Use button `Open Group`. +2. Add at least one Input node and connect to other nodes. +3. Hit reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index d0d930f50c..2321bd1fd4 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -1,6 +1,7 @@ -import nuke import pyblish -from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.pipeline import PublishXmlValidationError +from openpype.hosts.nuke.api import maintained_selection +import nuke class OpenFailedGroupNode(pyblish.api.Action): @@ -8,7 +9,7 @@ class OpenFailedGroupNode(pyblish.api.Action): Centering failed instance node in node grap """ - label = "Open Gizmo in Node Graph" + label = "Open Group" icon = "wrench" on = "failed" @@ -48,11 +49,23 @@ class ValidateGizmo(pyblish.api.InstancePlugin): with grpn: connections_out = nuke.allNodes('Output') - msg_multiple_outputs = "Only one outcoming connection from " - "\"{}\" is allowed".format(instance.data["name"]) - assert len(connections_out) <= 1, msg_multiple_outputs + msg_multiple_outputs = ( + "Only one outcoming connection from " + "\"{}\" is allowed").format(instance.data["name"]) + + if len(connections_out) > 1: + raise PublishXmlValidationError( + self, msg_multiple_outputs, "multiple_outputs", + {"node_name": grpn["name"].value()} + ) connections_in = nuke.allNodes('Input') - msg_missing_inputs = "At least one Input node has to be used in: " - "\"{}\"".format(instance.data["name"]) - assert len(connections_in) >= 1, msg_missing_inputs + msg_missing_inputs = ( + "At least one Input node has to be inside Group: " + "\"{}\"").format(instance.data["name"]) + + if len(connections_in) == 0: + raise PublishXmlValidationError( + self, msg_missing_inputs, "no_inputs", + {"node_name": grpn["name"].value()} + ) From 0666bce2be03be03e9a4ff73ff48ee0d36343735 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 25 May 2022 17:58:33 +0200 Subject: [PATCH 0024/2550] nuke validate asset name with help rename plugin, fix order on how to fix in validate write nodes xml --- .../publish/help/validate_asset_name.xml | 18 ++++++++ .../publish/help/validate_write_nodes.xml | 2 +- ...e_in_context.py => validate_asset_name.py} | 43 ++++++++++++------- 3 files changed, 46 insertions(+), 17 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml rename openpype/hosts/nuke/plugins/publish/{validate_instance_in_context.py => validate_asset_name.py} (75%) diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml new file mode 100644 index 0000000000..7ea552597a --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml @@ -0,0 +1,18 @@ + + + + Shot/Asset mame + +## Invalid Shot/Asset name in subset + +Following Node with name `{node_name}`: +Is in context of `{context_asset}` but Node _asset_ knob is set as `{asset}`. + +### How to repair? + +1. Either use Repair or Select button. +2. If you chose Select then rename asset knob to correct name. +3. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml index d209329434..c1f59a94f8 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml @@ -11,7 +11,7 @@ Following write node knobs needs to be repaired: ### How to repair? 1. Use Repair button. -3. Hit Reload button on the publisher. +2. Hit Reload button on the publisher. \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py similarity index 75% rename from openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py rename to openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 842f74b6f6..f9adac81f8 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -3,20 +3,17 @@ from __future__ import absolute_import import nuke - import pyblish.api import openpype.api -from openpype.hosts.nuke.api.lib import ( - recreate_instance, - reset_selection, - select_nodes -) +import openpype.hosts.nuke.api.lib as nlib +import openpype.hosts.nuke.api as nuke_api +from openpype.pipeline import PublishXmlValidationError class SelectInvalidInstances(pyblish.api.Action): """Select invalid instances in Outliner.""" - label = "Select Instances" + label = "Select" icon = "briefcase" on = "failed" @@ -50,12 +47,12 @@ class SelectInvalidInstances(pyblish.api.Action): self.deselect() def select(self, instances): - select_nodes( + nlib.select_nodes( [nuke.toNode(str(x)) for x in instances] ) def deselect(self): - reset_selection() + nlib.reset_selection() class RepairSelectInvalidInstances(pyblish.api.Action): @@ -85,12 +82,12 @@ class RepairSelectInvalidInstances(pyblish.api.Action): context_asset = context.data["assetEntity"]["name"] for instance in instances: origin_node = instance[0] - recreate_instance( + nuke_api.lib.recreate_instance( origin_node, avalon_data={"asset": context_asset} ) -class ValidateInstanceInContext(pyblish.api.InstancePlugin): +class ValidateCorrectAssetName(pyblish.api.InstancePlugin): """Validator to check if instance asset match context asset. When working in per-shot style you always publish data in context of @@ -99,15 +96,29 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin): Action on this validator will select invalid instances in Outliner. """ - order = openpype.api.ValidateContentsOrder - label = "Instance in same Context" + label = "Validate correct asset name" hosts = ["nuke"] - actions = [SelectInvalidInstances, RepairSelectInvalidInstances] + actions = [ + SelectInvalidInstances, + RepairSelectInvalidInstances + ] optional = True def process(self, instance): asset = instance.data.get("asset") context_asset = instance.context.data["assetEntity"]["name"] - msg = "{} has asset {}".format(instance.name, asset) - assert asset == context_asset, msg + + msg = ( + "Instance `{}` has wrong shot/asset name:\n" + "Correct: `{}` | Wrong: `{}`").format( + instance.name, asset, context_asset) + + if asset != context_asset: + PublishXmlValidationError( + self, msg, formatting_data={ + "node_name": instance[0]["name"].value(), + "wrong_name": asset, + "correct_name": context_asset + } + ) From af3623c4dc3626ee474dd7606ac01a757323787a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 25 May 2022 18:00:36 +0200 Subject: [PATCH 0025/2550] nuke fixes on validation and precollect --- .../plugins/publish/help/validate_asset_name.xml | 2 +- .../nuke/plugins/publish/validate_asset_name.py | 5 ++++- .../hosts/nuke/plugins/publish/validate_backdrop.py | 12 +++++++++--- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml index 7ea552597a..1097909a5f 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml @@ -6,7 +6,7 @@ ## Invalid Shot/Asset name in subset Following Node with name `{node_name}`: -Is in context of `{context_asset}` but Node _asset_ knob is set as `{asset}`. +Is in context of `{correct_name}` but Node _asset_ knob is set as `{wrong_name}`. ### How to repair? diff --git a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py index f9adac81f8..7647471f8a 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -36,6 +36,7 @@ class SelectInvalidInstances(pyblish.api.Action): instances = pyblish.api.instances_by_plugin(failed, plugin) if instances: + self.deselect() self.log.info( "Selecting invalid nodes: %s" % ", ".join( [str(x) for x in instances] @@ -114,8 +115,10 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin): "Correct: `{}` | Wrong: `{}`").format( instance.name, asset, context_asset) + self.log.debug(msg) + if asset != context_asset: - PublishXmlValidationError( + raise PublishXmlValidationError( self, msg, formatting_data={ "node_name": instance[0]["name"].value(), "wrong_name": asset, diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index 2a0d3309a0..17dc79dc56 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -48,8 +48,9 @@ class SelectCenterInNodeGraph(pyblish.api.Action): @pyblish.api.log class ValidateBackdrop(pyblish.api.InstancePlugin): - """Validate amount of nodes on backdrop node in case user - forgotten to add nodes above the publishing backdrop node""" + """ Validate amount of nodes on backdrop node in case user + forgoten to add nodes above the publishing backdrop node. + """ order = pyblish.api.ValidatorOrder optional = True @@ -75,7 +76,12 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): msg_no_nodes = "No content on backdrop node: \"{}\"".format( instance.data["name"]) - if len(instance) == 0: + self.log.debug( + "Amount of nodes on instance: {}".format( + len(instance)) + ) + + if len(instance) == 1: raise PublishXmlValidationError( self, msg_no_nodes, From 44bd543d131e3b60be44fc79004e8487882ca61c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 21 Dec 2021 18:28:30 +0100 Subject: [PATCH 0026/2550] nuke: fixing name for validate asset name --- openpype/settings/defaults/project_settings/nuke.json | 2 +- .../schemas/projects_schema/schemas/schema_nuke_publish.json | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 33ddc2f251..fb1e475e9f 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -91,7 +91,7 @@ "write" ] }, - "ValidateInstanceInContext": { + "ValidateCorrectAssetName": { "enabled": true, "optional": true, "active": true diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 04df957d67..5635f871d5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -61,8 +61,8 @@ "name": "template_publish_plugin", "template_data": [ { - "key": "ValidateInstanceInContext", - "label": "Validate Instance In Context" + "key": "ValidateCorrectAssetName", + "label": "Validate Correct Asset name" } ] }, From e2b2bdc15d5f919793848e15b3b2fb2f00c2aec5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 22 Dec 2021 15:29:49 +0100 Subject: [PATCH 0027/2550] nuke validate knobs with help --- .../nuke/plugins/publish/help/validate_knobs.xml | 16 ++++++++++++++++ .../hosts/nuke/plugins/publish/validate_knobs.py | 8 +++++--- .../nuke/plugins/publish/validate_write_nodes.py | 2 +- 3 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml new file mode 100644 index 0000000000..cb5494729b --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml @@ -0,0 +1,16 @@ + + + + Knobs value + +## Invalid node's knobs values + +Following node knobs needs to be repaired: + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index d290ff4541..d20051a9d5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -2,6 +2,7 @@ import nuke import pyblish.api import openpype.api +from openpype.pipeline import PublishXmlValidationError class ValidateKnobs(pyblish.api.ContextPlugin): @@ -27,11 +28,12 @@ class ValidateKnobs(pyblish.api.ContextPlugin): optional = True def process(self, context): - invalid = self.get_invalid(context, compute=True) if invalid: - raise RuntimeError( - "Found knobs with invalid values:\n{}".format(invalid) + raise PublishXmlValidationError( + self, + "Found knobs with invalid values:\n{}".format(invalid), + formatting_data={} ) @classmethod diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 069c6f4d8c..f8d8393730 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -76,5 +76,5 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): xml_msg += _msg_add raise PublishXmlValidationError( - self, dbg_msg, {"xml_msg": xml_msg} + self, dbg_msg, formatting_data={"xml_msg": xml_msg} ) From 447742506dcf05301c3818d3ac154c0dd4e25d8d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 25 May 2022 21:04:45 +0200 Subject: [PATCH 0028/2550] Nuke: improving knob validator with p3 compatibility --- openpype/hosts/nuke/plugins/publish/validate_knobs.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index d20051a9d5..24723edc7a 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -1,5 +1,5 @@ import nuke - +import six import pyblish.api import openpype.api from openpype.pipeline import PublishXmlValidationError @@ -64,7 +64,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): knobs = {} for family in families: for preset in cls.knobs[family]: - knobs.update({preset: cls.knobs[family][preset]}) + knobs[preset] = cls.knobs[family][preset] # Get invalid knobs. nodes = [] @@ -73,8 +73,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): nodes.append(node) if node.Class() == "Group": node.begin() - for i in nuke.allNodes(): - nodes.append(i) + nodes.extend(iter(nuke.allNodes())) node.end() for node in nodes: @@ -101,7 +100,9 @@ class ValidateKnobs(pyblish.api.ContextPlugin): def repair(cls, instance): invalid = cls.get_invalid(instance) for data in invalid: - if isinstance(data["expected"], unicode): + # TODO: will need to improve type definitions + # with the new settings for knob types + if isinstance(data["expected"], six.text_type): data["knob"].setValue(str(data["expected"])) continue From 568ce1858163a79ad8b63db78c72a5f28bd77a63 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 22 Dec 2021 20:36:49 +0100 Subject: [PATCH 0029/2550] fixing validate knobs plugin --- .../hosts/nuke/plugins/publish/help/validate_knobs.xml | 2 ++ openpype/hosts/nuke/plugins/publish/validate_knobs.py | 9 ++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml index cb5494729b..76c184f653 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml @@ -7,6 +7,8 @@ Following node knobs needs to be repaired: +{invalid_items} + ### How to repair? 1. Use Repair button. diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index 24723edc7a..6df0afd5ba 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -30,10 +30,16 @@ class ValidateKnobs(pyblish.api.ContextPlugin): def process(self, context): invalid = self.get_invalid(context, compute=True) if invalid: + invalid_items = [ + ("Node __{node_name}__ with knob _{label}_ " + "expecting _{expected}_, " + "but is set to _{current}_").format(**i) + for i in invalid] raise PublishXmlValidationError( self, "Found knobs with invalid values:\n{}".format(invalid), - formatting_data={} + formatting_data={ + "invalid_items": "\n".join(invalid_items)} ) @classmethod @@ -85,6 +91,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): if node[knob].value() != expected: invalid_knobs.append( { + "node_name": node.name(), "knob": node[knob], "name": node[knob].name(), "label": node[knob].label(), From d5f5ba5bbfe361349503b0b24c4b0bc06450f796 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 3 Jan 2022 17:40:46 +0100 Subject: [PATCH 0030/2550] nuke validate output resolution with help --- .../help/validate_output_resolution.xml | 16 +++ .../publish/validate_output_resolution.py | 123 ++++++++++-------- 2 files changed, 85 insertions(+), 54 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml new file mode 100644 index 0000000000..08a88a993e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml @@ -0,0 +1,16 @@ + + + + Output format + +## Invalid format setting + +Either the Reformat node inside of the render group is missing or the Reformat node output format knob is not set to `root.format`. + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index 27094b8d74..dbd388c2e6 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -1,43 +1,9 @@ -import nuke import pyblish.api - - -class RepairWriteResolutionDifference(pyblish.api.Action): - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - for instance in instances: - reformat = instance[0].dependencies()[0] - if reformat.Class() != "Reformat": - reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)]) - - xpos = instance[0].xpos() - ypos = instance[0].ypos() - 26 - - dependent_ypos = instance[0].dependencies()[0].ypos() - if (instance[0].ypos() - dependent_ypos) <= 51: - xpos += 110 - - reformat.setXYpos(xpos, ypos) - - instance[0].setInput(0, reformat) - - reformat["resize"].setValue("none") +import openpype.api +import avalon.nuke.lib as anlib +from openpype.pipeline import PublishXmlValidationError +import nuke class ValidateOutputResolution(pyblish.api.InstancePlugin): @@ -52,27 +18,76 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): families = ["render", "render.local", "render.farm"] label = "Write Resolution" hosts = ["nuke"] - actions = [RepairWriteResolutionDifference] + actions = [openpype.api.RepairAction] + + missing_msg = "Missing Reformat node in render group node" + resolution_msg = "Reformat is set to wrong format" def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise PublishXmlValidationError(self, invalid) - # Skip bounding box check if a reformat node exists. - if instance[0].dependencies()[0].Class() == "Reformat": - return + @classmethod + def get_reformat(cls, instance): + reformat = None + for inode in instance: + if inode.Class() != "Reformat": + continue + reformat = inode - msg = "Bounding box is outside the format." - assert self.check_resolution(instance), msg + return reformat - def check_resolution(self, instance): - node = instance[0] + @classmethod + def get_invalid(cls, instance): + def _check_resolution(instance, reformat): + root_width = instance.data["resolutionWidth"] + root_height = instance.data["resolutionHeight"] - root_width = instance.data["resolutionWidth"] - root_height = instance.data["resolutionHeight"] + write_width = reformat.format().width() + write_height = reformat.format().height() - write_width = node.format().width() - write_height = node.format().height() + if (root_width != write_width) or (root_height != write_height): + return None + else: + return True - if (root_width != write_width) or (root_height != write_height): - return None - else: - return True + # check if reformat is in render node + reformat = cls.get_reformat(instance) + if not reformat: + return cls.missing_msg + + # check if reformat is set to correct root format + correct_format = _check_resolution(instance, reformat) + if not correct_format: + return cls.resolution_msg + + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + grp_node = instance[0] + + if cls.missing_msg == invalid: + # make sure we are inside of the group node + with grp_node: + # find input node and select it + input = None + for inode in instance: + if inode.Class() != "Input": + continue + input = inode + + # add reformat node under it + with anlib.maintained_selection(): + input['selected'].setValue(True) + _rfn = nuke.createNode("Reformat", "name Reformat01") + _rfn["resize"].setValue(0) + _rfn["black_outside"].setValue(1) + + cls.log.info("I am adding reformat node") + + if cls.resolution_msg == invalid: + reformat = cls.get_reformat(instance) + reformat["format"].setValue(nuke.root()["format"].value()) + cls.log.info("I am fixing reformat to root.format") \ No newline at end of file From 01ba3d1c8a21b7ae2dd13daff67b30969997b8b5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 25 May 2022 21:10:41 +0200 Subject: [PATCH 0031/2550] Nuke: validator ditch avalon dependency --- .../plugins/publish/validate_output_resolution.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index dbd388c2e6..710adde069 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -1,7 +1,7 @@ import pyblish.api import openpype.api -import avalon.nuke.lib as anlib +from openpype.hosts.nuke.api import maintained_selection from openpype.pipeline import PublishXmlValidationError import nuke @@ -62,7 +62,6 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): if not correct_format: return cls.resolution_msg - @classmethod def repair(cls, instance): invalid = cls.get_invalid(instance) @@ -72,15 +71,15 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): # make sure we are inside of the group node with grp_node: # find input node and select it - input = None + _input = None for inode in instance: if inode.Class() != "Input": continue - input = inode + _input = inode # add reformat node under it - with anlib.maintained_selection(): - input['selected'].setValue(True) + with maintained_selection(): + _input['selected'].setValue(True) _rfn = nuke.createNode("Reformat", "name Reformat01") _rfn["resize"].setValue(0) _rfn["black_outside"].setValue(1) From b9e16e87f9d2b26861b5a33e1bba502ffdfafeba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 27 May 2022 13:07:39 +0200 Subject: [PATCH 0032/2550] Nuke: proxy mode validator refactory to new publisher --- .../plugins/publish/help/validate_proxy_mode.xml | 16 ++++++++++++++++ .../nuke/plugins/publish/validate_proxy_mode.py | 8 ++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml new file mode 100644 index 0000000000..6fe5d5d43e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml @@ -0,0 +1,16 @@ + + + + Proxy mode + +## Invalid proxy mode value + +Nuke is set to use Proxy. This is not supported by publisher. + +### How to repair? + +1. Use Repair button. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py index 9c6ca03ffd..e5f3ae9800 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py +++ b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py @@ -1,5 +1,6 @@ import pyblish import nuke +from openpype.pipeline import PublishXmlValidationError class FixProxyMode(pyblish.api.Action): @@ -7,7 +8,7 @@ class FixProxyMode(pyblish.api.Action): Togger off proxy switch OFF """ - label = "Proxy toggle to OFF" + label = "Repair" icon = "wrench" on = "failed" @@ -30,4 +31,7 @@ class ValidateProxyMode(pyblish.api.ContextPlugin): rootNode = nuke.root() isProxy = rootNode["proxy"].value() - assert not isProxy, "Proxy mode should be toggled OFF" + if not isProxy: + raise PublishXmlValidationError( + self, "Proxy mode should be toggled OFF" + ) From 44655d25156b1d8a1587ca898cc5aaf2a17e898e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 27 May 2022 13:08:18 +0200 Subject: [PATCH 0033/2550] Nuke: improving code readability --- .../nuke/plugins/publish/validate_knobs.py | 11 ++-- .../plugins/publish/validate_write_nodes.py | 62 ++++++++++--------- 2 files changed, 40 insertions(+), 33 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index 6df0afd5ba..573c25f3fe 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -31,10 +31,13 @@ class ValidateKnobs(pyblish.api.ContextPlugin): invalid = self.get_invalid(context, compute=True) if invalid: invalid_items = [ - ("Node __{node_name}__ with knob _{label}_ " - "expecting _{expected}_, " - "but is set to _{current}_").format(**i) - for i in invalid] + ( + "Node __{node_name}__ with knob _{label}_ " + "expecting _{expected}_, " + "but is set to _{current}_" + ).format(**i) + for i in invalid + ] raise PublishXmlValidationError( self, "Found knobs with invalid values:\n{}".format(invalid), diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index f8d8393730..320307d09f 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -19,8 +19,8 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): for instance in instances: node = instance[1] correct_data = get_write_node_template_attr(node) - for k, v in correct_data.items(): - node[k].setValue(v) + for key, value in correct_data.items(): + node[key].setValue(value) self.log.info("Node attributes were fixed") @@ -41,40 +41,44 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): def process(self, instance): node = instance[1] - correct_data = get_write_node_template_attr(node) + write_group_node = instance[0] + correct_data = get_write_node_template_attr(write_group_node) check = [] - for k, v in correct_data.items(): - if k is 'file': - padding = len(v.split('#')) - ref_path = get_node_path(v, padding) - n_path = get_node_path(node[k].value(), padding) - isnt = False - for i, p in enumerate(ref_path): - if str(n_path[i]) not in str(p): - if not isnt: - isnt = True - else: - continue - if isnt: - check.append([k, v, node[k].value()]) - else: - if str(node[k].value()) not in str(v): - check.append([k, v, node[k].value()]) + for key, value in correct_data.items(): + if key == 'file': + padding = len(value.split('#')) + ref_path = get_node_path(value, padding) + n_path = get_node_path(node[key].value(), padding) + is_not = False + for i, path in enumerate(ref_path): + if ( + str(n_path[i]) != str(path) + and not is_not + ): + is_not = True + if is_not: + check.append([key, value, node[key].value()]) + + elif str(node[key].value()) != str(value): + check.append([key, value, node[key].value()]) self.log.info(check) + if check: + self._make_error(check) + + def _make_error(self, check): msg = "Write node's knobs values are not correct!\n" + dbg_msg = msg msg_add = "Knob `{0}` Correct: `{1}` Wrong: `{2}` \n" xml_msg = "" - if check: - dbg_msg = msg - for item in check: - _msg_add = msg_add.format(item[0], item[1], item[2]) - dbg_msg += _msg_add - xml_msg += _msg_add + for item in check: + _msg_add = msg_add.format(item[0], item[1], item[2]) + dbg_msg += _msg_add + xml_msg += _msg_add - raise PublishXmlValidationError( - self, dbg_msg, formatting_data={"xml_msg": xml_msg} - ) + raise PublishXmlValidationError( + self, dbg_msg, formatting_data={"xml_msg": xml_msg} + ) From e33c06f95e58db53eb27edbfb85909e6643679dd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 27 May 2022 17:26:30 +0200 Subject: [PATCH 0034/2550] Nuke: abstracting function for correct knob type value --- openpype/hosts/nuke/api/__init__.py | 4 ++- openpype/hosts/nuke/api/lib.py | 43 +++++++++++++++++------------ 2 files changed, 28 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index b571c4098c..f7288bb287 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -26,7 +26,8 @@ from .pipeline import ( update_container, ) from .lib import ( - maintained_selection + maintained_selection, + convert_knob_value_to_correct_type ) from .utils import ( @@ -58,6 +59,7 @@ __all__ = ( "update_container", "maintained_selection", + "convert_knob_value_to_correct_type", "colorspace_exists_on_node", "get_colorspace_list" diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index ba8aa7a8db..457e5d851f 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1528,28 +1528,35 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): if not knob_value: continue - # first convert string types to string - # just to ditch unicode - if isinstance(knob_value, six.text_type): - knob_value = str(knob_value) - - # set correctly knob types - if knob_type == "bool": - knob_value = bool(knob_value) - elif knob_type == "decimal_number": - knob_value = float(knob_value) - elif knob_type == "number": - knob_value = int(knob_value) - elif knob_type == "text": - knob_value = knob_value - elif knob_type == "color_gui": - knob_value = color_gui_to_int(knob_value) - elif knob_type in ["2d_vector", "3d_vector", "color"]: - knob_value = [float(v) for v in knob_value] + knob_value = convert_knob_value_to_correct_type( + knob_type, knob_value) node[knob_name].setValue(knob_value) +def convert_knob_value_to_correct_type(knob_type, knob_value): + # first convert string types to string + # just to ditch unicode + if isinstance(knob_value, six.text_type): + knob_value = str(knob_value) + + # set correctly knob types + if knob_type == "bool": + knob_value = bool(knob_value) + elif knob_type == "decimal_number": + knob_value = float(knob_value) + elif knob_type == "number": + knob_value = int(knob_value) + elif knob_type == "text": + knob_value = knob_value + elif knob_type == "color_gui": + knob_value = color_gui_to_int(knob_value) + elif knob_type in ["2d_vector", "3d_vector", "color"]: + knob_value = [float(v) for v in knob_value] + + return knob_value + + def color_gui_to_int(color_gui): hex_value = ( "0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui) From f8b45e078c74c64a9fc549e35429649db42b667a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 27 May 2022 17:28:04 +0200 Subject: [PATCH 0035/2550] Nuke: removing obsolete code fixing family to families as Write is stored there in avalon data on node --- openpype/hosts/nuke/api/lib.py | 30 +++--------------------------- 1 file changed, 3 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 457e5d851f..5d66cb2b89 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -836,29 +836,6 @@ def check_subsetname_exists(nodes, subset_name): if subset_name in read_avalon_data(n).get("subset", "")), False) - -def get_render_path(node): - ''' Generate Render path from presets regarding avalon knob data - ''' - avalon_knob_data = read_avalon_data(node) - data = {'avalon': avalon_knob_data} - - nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], - plugin_name=avalon_knob_data["creator"], - subset=avalon_knob_data["subset"] - ) - host_name = os.environ.get("AVALON_APP") - - data.update({ - "app": host_name, - "nuke_imageio_writes": nuke_imageio_writes - }) - - anatomy_filled = format_anatomy(data) - return anatomy_filled["render"]["path"].replace("\\", "/") - - def format_anatomy(data): ''' Helping function for formatting of anatomy paths @@ -2185,15 +2162,14 @@ def get_write_node_template_attr(node): avalon_knob_data = read_avalon_data(node) # get template data nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) + # collecting correct data - correct_data = OrderedDict({ - "file": get_render_path(node) - }) + correct_data = OrderedDict() # adding imageio knob presets for k, v in nuke_imageio_writes.items(): From 1686eaeadad906ddc8e4f20574f4af6c9e697674 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 27 May 2022 17:29:07 +0200 Subject: [PATCH 0036/2550] Nuke: refactory write node validator --- .../publish/help/validate_write_nodes.xml | 1 + .../plugins/publish/validate_write_nodes.py | 86 ++++++++++++------- 2 files changed, 55 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml index c1f59a94f8..cdf85102bc 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml @@ -6,6 +6,7 @@ ## Invalid node's knobs values Following write node knobs needs to be repaired: + {xml_msg} ### How to repair? diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 320307d09f..f0a7f01dfb 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,8 +1,10 @@ import pyblish.api from openpype.api import get_errored_instances_from_context +import openpype.hosts.nuke.api.lib as nlib from openpype.hosts.nuke.api.lib import ( get_write_node_template_attr, - get_node_path + set_node_knobs_from_settings + ) from openpype.pipeline import PublishXmlValidationError @@ -17,10 +19,17 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): instances = get_errored_instances_from_context(context) for instance in instances: - node = instance[1] - correct_data = get_write_node_template_attr(node) - for key, value in correct_data.items(): - node[key].setValue(value) + write_group_node = instance[0] + # get write node from inside of group + write_node = None + for x in instance: + if x.Class() == "Write": + write_node = x + + correct_data = get_write_node_template_attr(write_group_node) + + set_node_knobs_from_settings(write_node, correct_data["knobs"]) + self.log.info("Node attributes were fixed") @@ -39,29 +48,41 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): hosts = ["nuke"] def process(self, instance): - - node = instance[1] write_group_node = instance[0] + + # get write node from inside of group + write_node = None + for x in instance: + if x.Class() == "Write": + write_node = x + + if write_node is None: + return + correct_data = get_write_node_template_attr(write_group_node) - check = [] - for key, value in correct_data.items(): - if key == 'file': - padding = len(value.split('#')) - ref_path = get_node_path(value, padding) - n_path = get_node_path(node[key].value(), padding) - is_not = False - for i, path in enumerate(ref_path): - if ( - str(n_path[i]) != str(path) - and not is_not - ): - is_not = True - if is_not: - check.append([key, value, node[key].value()]) + if correct_data: + check_knobs = correct_data["knobs"] + else: + return - elif str(node[key].value()) != str(value): - check.append([key, value, node[key].value()]) + check = [] + self.log.debug("__ write_node: {}".format( + write_node + )) + + for knob_data in check_knobs: + key = knob_data["name"] + value = knob_data["value"] + self.log.debug("__ key: {} | value: {}".format( + key, value + )) + if ( + str(write_node[key].value()) != str(value) + and key != "file" + and key != "tile_color" + ): + check.append([key, value, write_node[key].value()]) self.log.info(check) @@ -69,15 +90,16 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): self._make_error(check) def _make_error(self, check): - msg = "Write node's knobs values are not correct!\n" - dbg_msg = msg - msg_add = "Knob `{0}` Correct: `{1}` Wrong: `{2}` \n" - xml_msg = "" + # sourcery skip: merge-assign-and-aug-assign, move-assign-in-block + dbg_msg = "Write node's knobs values are not correct!\n" + msg_add = "Knob '{0}' > Correct: `{1}` > Wrong: `{2}`" - for item in check: - _msg_add = msg_add.format(item[0], item[1], item[2]) - dbg_msg += _msg_add - xml_msg += _msg_add + details = [ + msg_add.format(item[0], item[1], item[2]) + for item in check + ] + xml_msg = "
".join(details) + dbg_msg += "\n\t".join(details) raise PublishXmlValidationError( self, dbg_msg, formatting_data={"xml_msg": xml_msg} From e71653fb1672a579dfb96b2843ac55eca4de7985 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 27 May 2022 17:29:21 +0200 Subject: [PATCH 0037/2550] Nuke: fixing proxy mode validator --- openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py index e5f3ae9800..dac240ad19 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py +++ b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py @@ -31,7 +31,7 @@ class ValidateProxyMode(pyblish.api.ContextPlugin): rootNode = nuke.root() isProxy = rootNode["proxy"].value() - if not isProxy: + if isProxy: raise PublishXmlValidationError( self, "Proxy mode should be toggled OFF" ) From 9fe4b635174060697a9ea8a9e33f47394a34d9e9 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 24 May 2022 12:56:24 +0200 Subject: [PATCH 0038/2550] refactor avalon imports from lib_template_builder --- .../hosts/maya/api/lib_template_builder.py | 184 ++++++++++++++++++ 1 file changed, 184 insertions(+) create mode 100644 openpype/hosts/maya/api/lib_template_builder.py diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py new file mode 100644 index 0000000000..172a6f9b2b --- /dev/null +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -0,0 +1,184 @@ +from collections import OrderedDict +import maya.cmds as cmds + +import qargparse +from openpype.tools.utils.widgets import OptionDialog +from lib import get_main_window, imprint + +# To change as enum +build_types = ["context_asset", "linked_asset", "all_assets"] + + +def get_placeholder_attributes(node): + return { + attr: cmds.getAttr("{}.{}".format(node, attr)) + for attr in cmds.listAttr(node, userDefined=True)} + + +def delete_placeholder_attributes(node): + ''' + function to delete all extra placeholder attributes + ''' + extra_attributes = get_placeholder_attributes(node) + for attribute in extra_attributes: + cmds.deleteAttr(node + '.' + attribute) + + +def create_placeholder(): + args = placeholder_window() + + if not args: + return # operation canceled, no locator created + + selection = cmds.ls(selection=True) + placeholder = cmds.spaceLocator(name="_TEMPLATE_PLACEHOLDER_")[0] + if selection: + cmds.parent(placeholder, selection[0]) + # custom arg parse to force empty data query + # and still imprint them on placeholder + # and getting items when arg is of type Enumerator + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + imprint(placeholder, options) + # Some tweaks because imprint force enums to to default value so we get + # back arg read and force them to attributes + imprint_enum(placeholder, args) + + # Add helper attributes to keep placeholder info + cmds.addAttr( + placeholder, longName="parent", + hidden=True, dataType="string") + cmds.addAttr( + placeholder, longName="index", + hidden=True, attributeType="short", + defaultValue=-1) + + +def update_placeholder(): + placeholder = cmds.ls(selection=True) + if len(placeholder) == 0: + raise ValueError("No node selected") + if len(placeholder) > 1: + raise ValueError("Too many selected nodes") + placeholder = placeholder[0] + + args = placeholder_window(get_placeholder_attributes(placeholder)) + # delete placeholder attributes + delete_placeholder_attributes(placeholder) + if not args: + return # operation canceled + + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + imprint(placeholder, options) + imprint_enum(placeholder, args) + + +def imprint_enum(placeholder, args): + """ + Imprint method doesn't act properly with enums. + Replacing the functionnality with this for now + """ + enum_values = {str(arg): arg.read() + for arg in args if arg._data.get("items")} + string_to_value_enum_table = { + build: i for i, build + in enumerate(build_types)} + for key, value in enum_values.items(): + cmds.setAttr( + placeholder + "." + key, + string_to_value_enum_table[value]) + + +def placeholder_window(options=None): + options = options or dict() + dialog = OptionDialog(parent=get_main_window()) + dialog.setWindowTitle("Create Placeholder") + + args = [ + qargparse.Separator("Main attributes"), + qargparse.Enum( + "builder_type", + label="Asset Builder Type", + default=options.get("builder_type", 0), + items=build_types, + help="""Asset Builder Type +Builder type describe what template loader will look for. +context_asset : Template loader will look for subsets of +current context asset (Asset bob will find asset) +linked_asset : Template loader will look for assets linked +to current context asset. +Linked asset are looked in avalon database under field "inputLinks" +""" + ), + qargparse.String( + "family", + default=options.get("family", ""), + label="OpenPype Family", + placeholder="ex: model, look ..."), + qargparse.String( + "representation", + default=options.get("representation", ""), + label="OpenPype Representation", + placeholder="ex: ma, abc ..."), + qargparse.String( + "loader", + default=options.get("loader", ""), + label="Loader", + placeholder="ex: ReferenceLoader, LightLoader ...", + help="""Loader +Defines what openpype loader will be used to load assets. +Useable loader depends on current host's loader list. +Field is case sensitive. +"""), + qargparse.String( + "loader_args", + default=options.get("loader_args", ""), + label="Loader Arguments", + placeholder='ex: {"camera":"persp", "lights":True}', + help="""Loader +Defines a dictionnary of arguments used to load assets. +Useable arguments depend on current placeholder Loader. +Field should be a valid python dict. Anything else will be ignored. +"""), + qargparse.Integer( + "order", + default=options.get("order", 0), + min=0, + max=999, + label="Order", + placeholder="ex: 0, 100 ... (smallest order loaded first)", + help="""Order +Order defines asset loading priority (0 to 999) +Priority rule is : "lowest is first to load"."""), + qargparse.Separator( + "Optional attributes"), + qargparse.String( + "asset", + default=options.get("asset", ""), + label="Asset filter", + placeholder="regex filtering by asset name", + help="Filtering assets by matching field regex to asset's name"), + qargparse.String( + "subset", + default=options.get("subset", ""), + label="Subset filter", + placeholder="regex filtering by subset name", + help="Filtering assets by matching field regex to subset's name"), + qargparse.String( + "hierarchy", + default=options.get("hierarchy", ""), + label="Hierarchy filter", + placeholder="regex filtering by asset's hierarchy", + help="Filtering assets by matching field asset's hierarchy") + ] + dialog.create(args) + + if not dialog.exec_(): + return None + + return args From 69a388de1319eb49de84a0f6d846631623fc5a7d Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 24 May 2022 14:58:27 +0200 Subject: [PATCH 0039/2550] add the templated wrokfile build schema for maya --- .../defaults/project_settings/maya.json | 8 +++++ .../projects_schema/schema_project_maya.json | 4 +++ .../schema_templated_workfile_build.json | 29 +++++++++++++++++++ 3 files changed, 41 insertions(+) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index a42f889e85..303cd052bb 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -718,6 +718,14 @@ } ] }, + "templated_workfile_build": { + "profiles": [ + { + "task_types": [], + "path": "/path/to/your/template" + } + ] + }, "filters": { "preset 1": { "ValidateNoAnimation": false, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 40e98b0333..d137049e9e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -73,6 +73,10 @@ "type": "schema", "name": "schema_workfile_build" }, + { + "type": "schema", + "name": "schema_templated_workfile_build" + }, { "type": "schema", "name": "schema_publish_gui_filter" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json new file mode 100644 index 0000000000..01e74f64b0 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -0,0 +1,29 @@ +{ + "type": "dict", + "collapsible": true, + "key": "templated_workfile_build", + "label": "Templated Workfile Build Settings", + "children": [ + { + "type": "list", + "key": "profiles", + "label": "Profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "path", + "label": "Path to template", + "type": "text", + "object_type": "text" + } + ] + } + } + ] +} From 108597f9b1e139f31e6b0f20568866cb2971020a Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 24 May 2022 17:28:42 +0200 Subject: [PATCH 0040/2550] add placeholder menu to maya --- .../hosts/maya/api/lib_template_builder.py | 2 +- openpype/hosts/maya/api/menu.py | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index 172a6f9b2b..d8772f3f9a 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -3,7 +3,7 @@ import maya.cmds as cmds import qargparse from openpype.tools.utils.widgets import OptionDialog -from lib import get_main_window, imprint +from .lib import get_main_window, imprint # To change as enum build_types = ["context_asset", "linked_asset", "all_assets"] diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 97f06c43af..8beaf491bb 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -11,8 +11,10 @@ from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib + from .lib import get_main_window, IS_HEADLESS from .commands import reset_frame_range +from .lib_template_builder import create_placeholder, update_placeholder log = logging.getLogger(__name__) @@ -139,6 +141,24 @@ def install(): parent_widget ) ) + + builder_menu = cmds.menuItem( + "Template Builder", + subMenu=True, + tearOff=True, + parent=MENU_NAME + ) + cmds.menuItem( + "Create Placeholder", + parent=builder_menu, + command=lambda *args: create_placeholder() + ) + cmds.menuItem( + "Update Placeholder", + parent=builder_menu, + command=lambda *args: update_placeholder() + ) + cmds.setParent(MENU_NAME, menu=True) def add_scripts_menu(): From 199aba87727d7a2417d7f8122dd34f6e4160b467 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 12:25:39 +0200 Subject: [PATCH 0041/2550] setup build template in openpype lib --- openpype/lib/__init__.py | 2 + openpype/lib/abstract_template_loader.py | 447 ++++++++++++++++++++++ openpype/lib/avalon_context.py | 222 +++++------ openpype/lib/build_template.py | 61 +++ openpype/lib/build_template_exceptions.py | 35 ++ 5 files changed, 660 insertions(+), 107 deletions(-) create mode 100644 openpype/lib/abstract_template_loader.py create mode 100644 openpype/lib/build_template.py create mode 100644 openpype/lib/build_template_exceptions.py diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 8d4e733b7d..8f3919d378 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -136,6 +136,7 @@ from .avalon_context import ( create_workfile_doc, save_workfile_data_to_doc, get_workfile_doc, + get_loaders_by_name, BuildWorkfile, @@ -308,6 +309,7 @@ __all__ = [ "create_workfile_doc", "save_workfile_data_to_doc", "get_workfile_doc", + "get_loaders_by_name", "BuildWorkfile", diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py new file mode 100644 index 0000000000..6888cbf757 --- /dev/null +++ b/openpype/lib/abstract_template_loader.py @@ -0,0 +1,447 @@ +import os +from abc import ABCMeta, abstractmethod + +import traceback + +import six + +import openpype +from openpype.settings import get_project_settings +from openpype.lib import Anatomy, get_linked_assets, get_loaders_by_name +from openpype.api import PypeLogger as Logger +from openpype.pipeline import legacy_io + +from functools import reduce + +from openpype.lib.build_template_exceptions import ( + TemplateAlreadyImported, + TemplateLoadingFailed, + TemplateProfileNotFound, + TemplateNotFound +) + + +def update_representations(entities, entity): + if entity['context']['subset'] not in entities: + entities[entity['context']['subset']] = entity + else: + current = entities[entity['context']['subset']] + incomming = entity + entities[entity['context']['subset']] = max( + current, incomming, + key=lambda entity: entity["context"].get("version", -1)) + + return entities + + +def parse_loader_args(loader_args): + if not loader_args: + return dict() + try: + parsed_args = eval(loader_args) + if not isinstance(parsed_args, dict): + return dict() + else: + return parsed_args + except Exception as err: + print( + "Error while parsing loader arguments '{}'.\n{}: {}\n\n" + "Continuing with default arguments. . .".format( + loader_args, + err.__class__.__name__, + err)) + return dict() + + +@six.add_metaclass(ABCMeta) +class AbstractTemplateLoader: + """ + Abstraction of Template Loader. + Properties: + template_path : property to get current template path + Methods: + import_template : Abstract Method. Used to load template, + depending on current host + get_template_nodes : Abstract Method. Used to query nodes acting + as placeholders. Depending on current host + """ + + def __init__(self, placeholder_class): + + self.loaders_by_name = get_loaders_by_name() + self.current_asset = legacy_io.Session["AVALON_ASSET"] + self.project_name = legacy_io.Session["AVALON_PROJECT"] + self.host_name = legacy_io.Session["AVALON_APP"] + self.task_name = legacy_io.Session["AVALON_TASK"] + self.placeholder_class = placeholder_class + self.current_asset_docs = legacy_io.find_one({ + "type": "asset", + "name": self.current_asset + }) + self.task_type = ( + self.current_asset_docs + .get("data", {}) + .get("tasks", {}) + .get(self.task_name, {}) + .get("type") + ) + + self.log = Logger().get_logger("BUILD TEMPLATE") + + self.log.info( + "BUILDING ASSET FROM TEMPLATE :\n" + "Starting templated build for {asset} in {project}\n\n" + "Asset : {asset}\n" + "Task : {task_name} ({task_type})\n" + "Host : {host}\n" + "Project : {project}\n".format( + asset=self.current_asset, + host=self.host_name, + project=self.project_name, + task_name=self.task_name, + task_type=self.task_type + )) + # Skip if there is no loader + if not self.loaders_by_name: + self.log.warning( + "There is no registered loaders. No assets will be loaded") + return + + def template_already_imported(self, err_msg): + """In case template was already loaded. + Raise the error as a default action. + Override this method in your template loader implementation + to manage this case.""" + self.log.error("{}: {}".format( + err_msg.__class__.__name__, + err_msg)) + raise TemplateAlreadyImported(err_msg) + + def template_loading_failed(self, err_msg): + """In case template loading failed + Raise the error as a default action. + Override this method in your template loader implementation + to manage this case. + """ + self.log.error("{}: {}".format( + err_msg.__class__.__name__, + err_msg)) + raise TemplateLoadingFailed(err_msg) + + @property + def template_path(self): + """ + Property returning template path. Avoiding setter. + Getting template path from open pype settings based on current avalon + session and solving the path variables if needed. + Returns: + str: Solved template path + Raises: + TemplateProfileNotFound: No profile found from settings for + current avalon session + KeyError: Could not solve path because a key does not exists + in avalon context + TemplateNotFound: Solved path does not exists on current filesystem + """ + project_name = self.project_name + host_name = self.host_name + task_name = self.task_name + task_type = self.task_type + + anatomy = Anatomy(project_name) + project_settings = get_project_settings(project_name) + + build_info = project_settings[host_name]['templated_workfile_build'] + profiles = build_info['profiles'] + + for prf in profiles: + if prf['task_types'] and task_type not in prf['task_types']: + continue + if prf['task_names'] and task_name not in prf['task_names']: + continue + path = prf['path'] + break + else: # IF no template were found (no break happened) + raise TemplateProfileNotFound( + "No matching profile found for task '{}' of type '{}' " + "with host '{}'".format(task_name, task_type, host_name) + ) + if path is None: + raise TemplateLoadingFailed( + "Template path is not set.\n" + "Path need to be set in {}\\Template Workfile Build " + "Settings\\Profiles".format(host_name.title())) + try: + solved_path = None + while True: + solved_path = anatomy.path_remapper(path) + if solved_path is None: + solved_path = path + if solved_path == path: + break + path = solved_path + except KeyError as missing_key: + raise KeyError( + "Could not solve key '{}' in template path '{}'".format( + missing_key, path)) + finally: + solved_path = os.path.normpath(solved_path) + + if not os.path.exists(solved_path): + raise TemplateNotFound( + "Template found in openPype settings for task '{}' with host " + "'{}' does not exists. (Not found : {})".format( + task_name, host_name, solved_path)) + + self.log.info("Found template at : '{}'".format(solved_path)) + + return solved_path + + def populate_template(self, ignored_ids=None): + """ + Use template placeholders to load assets and parent them in hierarchy + Arguments : + ignored_ids : + Returns: + None + """ + loaders_by_name = self.loaders_by_name + current_asset = self.current_asset + linked_assets = [asset['name'] for asset + in get_linked_assets(self.current_asset_docs)] + + ignored_ids = ignored_ids or [] + placeholders = self.get_placeholders() + for placeholder in placeholders: + placeholder_representations = self.get_placeholder_representations( + placeholder, + current_asset, + linked_assets + ) + for representation in placeholder_representations: + + self.preload(placeholder, loaders_by_name, representation) + + if self.load_data_is_incorrect( + placeholder, + representation, + ignored_ids): + continue + + self.log.info( + "Loading {}_{} with loader {}\n" + "Loader arguments used : {}".format( + representation['context']['asset'], + representation['context']['subset'], + placeholder.loader, + placeholder.data['loader_args'])) + + try: + container = self.load( + placeholder, loaders_by_name, representation) + except Exception: + self.load_failed(placeholder, representation) + else: + self.load_succeed(placeholder, container) + finally: + self.postload(placeholder) + + def get_placeholder_representations( + self, placeholder, current_asset, linked_assets): + placeholder_db_filters = placeholder.convert_to_db_filters( + current_asset, + linked_assets) + # get representation by assets + for db_filter in placeholder_db_filters: + placeholder_representations = list(avalon.io.find(db_filter)) + for representation in reduce(update_representations, + placeholder_representations, + dict()).values(): + yield representation + + def load_data_is_incorrect( + self, placeholder, last_representation, ignored_ids): + if not last_representation: + self.log.warning(placeholder.err_message()) + return True + if (str(last_representation['_id']) in ignored_ids): + print("Ignoring : ", last_representation['_id']) + return True + return False + + def preload(self, placeholder, loaders_by_name, last_representation): + pass + + def load(self, placeholder, loaders_by_name, last_representation): + return openpype.pipeline.load( + loaders_by_name[placeholder.loader], + last_representation['_id'], + options=parse_loader_args(placeholder.data['loader_args'])) + + def load_succeed(self, placeholder, container): + placeholder.parent_in_hierarchy(container) + + def load_failed(self, placeholder, last_representation): + self.log.warning("Got error trying to load {}:{} with {}\n\n" + "{}".format(last_representation['context']['asset'], + last_representation['context']['subset'], + placeholder.loader, + traceback.format_exc())) + + def postload(self, placeholder): + placeholder.clean() + + def update_missing_containers(self): + loaded_containers_ids = self.get_loaded_containers_by_id() + self.populate_template(ignored_ids=loaded_containers_ids) + + def get_placeholders(self): + placeholder_class = self.placeholder_class + placeholders = map(placeholder_class, self.get_template_nodes()) + valid_placeholders = filter(placeholder_class.is_valid, placeholders) + sorted_placeholders = sorted(valid_placeholders, + key=placeholder_class.order) + return sorted_placeholders + + @abstractmethod + def get_loaded_containers_by_id(self): + """ + Collect already loaded containers for updating scene + Return: + dict (string, node): A dictionnary id as key + and containers as value + """ + pass + + @abstractmethod + def import_template(self, template_path): + """ + Import template in current host + Args: + template_path (str): fullpath to current task and + host's template file + Return: + None + """ + pass + + @abstractmethod + def get_template_nodes(self): + """ + Returning a list of nodes acting as host placeholders for + templating. The data representation is by user. + AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes + Args : + None + Returns: + list(AnyNode): Solved template path + """ + pass + + +@six.add_metaclass(ABCMeta) +class AbstractPlaceholder: + """Abstraction of placeholders logic + Properties: + attributes: A list of mandatory attribute to decribe placeholder + and assets to load. + optional_attributes: A list of optional attribute to decribe + placeholder and assets to load + loader: Name of linked loader to use while loading assets + is_context: Is placeholder linked + to context asset (or to linked assets) + Methods: + is_repres_valid: + loader: + order: + is_valid: + get_data: + parent_in_hierachy: + """ + + attributes = {'builder_type', 'op_family', 'op_representation', + 'order', 'loader', 'loader_args'} + optional_attributes = {} + + def __init__(self, node): + self.get_data(node) + + def order(self): + """Get placeholder order. + Order is used to sort them by priority + Priority is lowset first, highest last + (ex: + 1: First to load + 100: Last to load) + Returns: + Int: Order priority + """ + return self.data.get('order') + + @property + def loader(self): + """Return placeholder loader type + Returns: + string: Loader name + """ + return self.data.get('loader') + + @property + def is_context(self): + """Return placeholder type + context_asset: For loading current asset + linked_asset: For loading linked assets + Returns: + bool: true if placeholder is a context placeholder + """ + return self.data.get('builder_type') == 'context_asset' + + def is_valid(self): + """Test validity of placeholder + i.e.: every attributes exists in placeholder data + Returns: + Bool: True if every attributes are a key of data + """ + if set(self.attributes).issubset(self.data.keys()): + print("Valid placeholder : {}".format(self.data["node"])) + return True + print("Placeholder is not valid : {}".format(self.data["node"])) + return False + + @abstractmethod + def parent_in_hierarchy(self, containers): + """Place container in correct hierarchy + given by placeholder + Args: + containers (String): Container name returned back by + placeholder's loader. + """ + pass + + @abstractmethod + def clean(self): + """Clean placeholder from hierarchy after loading assets. + """ + pass + + @abstractmethod + def convert_to_db_filters(self, current_asset, linked_asset): + """map current placeholder data as a db filter + args: + current_asset (String): Name of current asset in context + linked asset (list[String]) : Names of assets linked to + current asset in context + Returns: + dict: a dictionnary describing a filter to look for asset in + a database + """ + pass + + @abstractmethod + def get_data(self, node): + """ + Collect placeholders information. + Args: + node (AnyNode): A unique node decided by Placeholder implementation + """ + pass diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 9d8a92cfe9..8c80b4a4ae 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -15,6 +15,7 @@ from openpype.settings import ( get_project_settings, get_system_settings ) + from .anatomy import Anatomy from .profiles_filtering import filter_profiles from .events import emit_event @@ -922,6 +923,118 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): ) +@with_pipeline_io +def collect_last_version_repres(asset_entities): + """Collect subsets, versions and representations for asset_entities. + + Args: + asset_entities (list): Asset entities for which want to find data + + Returns: + (dict): collected entities + + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... + } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ + + if not asset_entities: + return {} + + asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} + + subsets = list(legacy_io.find({ + "type": "subset", + "parent": {"$in": list(asset_entity_by_ids.keys())} + })) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + + sorted_versions = list(legacy_io.find({ + "type": "version", + "parent": {"$in": list(subset_entity_by_ids.keys())} + }).sort("name", -1)) + + subset_id_with_latest_version = [] + last_versions_by_id = {} + for version in sorted_versions: + subset_id = version["parent"] + if subset_id in subset_id_with_latest_version: + continue + subset_id_with_latest_version.append(subset_id) + last_versions_by_id[version["_id"]] = version + + repres = legacy_io.find({ + "type": "representation", + "parent": {"$in": list(last_versions_by_id.keys())} + }) + + output = {} + for repre in repres: + version_id = repre["parent"] + version = last_versions_by_id[version_id] + + subset_id = version["parent"] + subset = subset_entity_by_ids[subset_id] + + asset_id = subset["parent"] + asset = asset_entity_by_ids[asset_id] + + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset, + "subsets": {} + } + + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset, + "version": { + "version_entity": version, + "repres": [] + } + } + + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre + ) + + return output + + +@with_pipeline_io +def get_loaders_by_name(): + from openpype.pipeline import discover_loader_plugins + + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {} !".format(loader_name) + ) + loaders_by_name[loader_name] = loader + return loaders_by_name + + class BuildWorkfile: """Wrapper for build workfile process. @@ -979,8 +1092,6 @@ class BuildWorkfile: ... }] """ - from openpype.pipeline import discover_loader_plugins - # Get current asset name and entity current_asset_name = legacy_io.Session["AVALON_ASSET"] current_asset_entity = legacy_io.find_one({ @@ -996,14 +1107,7 @@ class BuildWorkfile: return # Prepare available loaders - loaders_by_name = {} - for loader in discover_loader_plugins(): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError( - "Duplicated loader name {0}!".format(loader_name) - ) - loaders_by_name[loader_name] = loader + loaders_by_name = get_loaders_by_name() # Skip if there are any loaders if not loaders_by_name: @@ -1075,7 +1179,7 @@ class BuildWorkfile: return # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(assets) + prepared_entities = collect_last_version_repres(assets) # Load containers by prepared entities and presets loaded_containers = [] @@ -1491,102 +1595,6 @@ class BuildWorkfile: return loaded_containers - @with_pipeline_io - def _collect_last_version_repres(self, asset_entities): - """Collect subsets, versions and representations for asset_entities. - - Args: - asset_entities (list): Asset entities for which want to find data - - Returns: - (dict): collected entities - - Example output: - ``` - { - {Asset ID}: { - "asset_entity": , - "subsets": { - {Subset ID}: { - "subset_entity": , - "version": { - "version_entity": , - "repres": [ - , , ... - ] - } - }, - ... - } - }, - ... - } - output[asset_id]["subsets"][subset_id]["version"]["repres"] - ``` - """ - - if not asset_entities: - return {} - - asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} - - subsets = list(legacy_io.find({ - "type": "subset", - "parent": {"$in": list(asset_entity_by_ids.keys())} - })) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - - sorted_versions = list(legacy_io.find({ - "type": "version", - "parent": {"$in": list(subset_entity_by_ids.keys())} - }).sort("name", -1)) - - subset_id_with_latest_version = [] - last_versions_by_id = {} - for version in sorted_versions: - subset_id = version["parent"] - if subset_id in subset_id_with_latest_version: - continue - subset_id_with_latest_version.append(subset_id) - last_versions_by_id[version["_id"]] = version - - repres = legacy_io.find({ - "type": "representation", - "parent": {"$in": list(last_versions_by_id.keys())} - }) - - output = {} - for repre in repres: - version_id = repre["parent"] - version = last_versions_by_id[version_id] - - subset_id = version["parent"] - subset = subset_entity_by_ids[subset_id] - - asset_id = subset["parent"] - asset = asset_entity_by_ids[asset_id] - - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset, - "subsets": {} - } - - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset, - "version": { - "version_entity": version, - "repres": [] - } - } - - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre - ) - - return output - @with_pipeline_io def get_creator_by_name(creator_name, case_sensitive=False): diff --git a/openpype/lib/build_template.py b/openpype/lib/build_template.py new file mode 100644 index 0000000000..7f749cbec2 --- /dev/null +++ b/openpype/lib/build_template.py @@ -0,0 +1,61 @@ +from openpype.pipeline import registered_host +from openpype.lib import classes_from_module +from importlib import import_module + +from .abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader) + +from .build_template_exceptions import ( + TemplateLoadingFailed, + TemplateAlreadyImported, + MissingHostTemplateModule, + MissingTemplatePlaceholderClass, + MissingTemplateLoaderClass +) + +_module_path_format = 'openpype.{host}.template_loader' + + +def build_workfile_template(*args): + template_loader = build_template_loader() + try: + template_loader.import_template(template_loader.template_path) + except TemplateAlreadyImported as err: + template_loader.template_already_imported(err) + except TemplateLoadingFailed as err: + template_loader.template_loading_failed(err) + else: + template_loader.populate_template() + + +def update_workfile_template(args): + template_loader = build_template_loader() + template_loader.update_missing_containers() + + +def build_template_loader(): + host_name = registered_host().__name__.partition('.')[2] + module_path = _module_path_format.format(host=host_name) + module = import_module(module_path) + if not module: + raise MissingHostTemplateModule( + "No template loader found for host {}".format(host_name)) + + template_loader_class = classes_from_module( + AbstractTemplateLoader, + module + ) + template_placeholder_class = classes_from_module( + AbstractPlaceholder, + module + ) + + if not template_loader_class: + raise MissingTemplateLoaderClass() + template_loader_class = template_loader_class[0] + + if not template_placeholder_class: + raise MissingTemplatePlaceholderClass() + template_placeholder_class = template_placeholder_class[0] + return template_loader_class(template_placeholder_class) diff --git a/openpype/lib/build_template_exceptions.py b/openpype/lib/build_template_exceptions.py new file mode 100644 index 0000000000..d781eff204 --- /dev/null +++ b/openpype/lib/build_template_exceptions.py @@ -0,0 +1,35 @@ +class MissingHostTemplateModule(Exception): + """Error raised when expected module does not exists""" + pass + + +class MissingTemplatePlaceholderClass(Exception): + """Error raised when module doesn't implement a placeholder class""" + pass + + +class MissingTemplateLoaderClass(Exception): + """Error raised when module doesn't implement a template loader class""" + pass + + +class TemplateNotFound(Exception): + """Exception raised when template does not exist.""" + pass + + +class TemplateProfileNotFound(Exception): + """Exception raised when current profile + doesn't match any template profile""" + pass + + +class TemplateAlreadyImported(Exception): + """Error raised when Template was already imported by host for + this session""" + pass + + +class TemplateLoadingFailed(Exception): + """Error raised whend Template loader was unable to load the template""" + pass \ No newline at end of file From bd884262b0c001715432f28ec1cae6feeeabfed1 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 12:52:44 +0200 Subject: [PATCH 0042/2550] add template loader module --- openpype/hosts/maya/api/template_loader.py | 242 +++++++++++++++++++++ 1 file changed, 242 insertions(+) create mode 100644 openpype/hosts/maya/api/template_loader.py diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py new file mode 100644 index 0000000000..0e346ca411 --- /dev/null +++ b/openpype/hosts/maya/api/template_loader.py @@ -0,0 +1,242 @@ +from maya import cmds + +from openpype.pipeline import legacy_io +from openpype.lib.abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader +) +from openpype.lib.build_template_exceptions import TemplateAlreadyImported + +PLACEHOLDER_SET = 'PLACEHOLDERS_SET' + + +class MayaTemplateLoader(AbstractTemplateLoader): + """Concrete implementation of AbstractTemplateLoader for maya + """ + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + Returns: + bool: Wether the template was succesfully imported or not + """ + if cmds.objExists(PLACEHOLDER_SET): + raise TemplateAlreadyImported( + "Build template already loaded\n" + "Clean scene if needed (File > New Scene)") + + cmds.sets(name=PLACEHOLDER_SET, empty=True) + self.new_nodes = cmds.file(path, i=True, returnNewNodes=True) + cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True) + + for set in cmds.listSets(allSets=True): + if (cmds.objExists(set) and + cmds.attributeQuery('id', node=set, exists=True) and + cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'): + if cmds.attributeQuery('asset', node=set, exists=True): + cmds.setAttr( + set + '.asset', + legacy_io.Session['AVALON_ASSET'], type='string' + ) + + return True + + def template_already_imported(self, err_msg): + clearButton = "Clear scene and build" + updateButton = "Update template" + abortButton = "Abort" + + title = "Scene already builded" + message = ( + "It's seems a template was already build for this scene.\n" + "Error message reveived :\n\n\"{}\"".format(err_msg)) + buttons = [clearButton, updateButton, abortButton] + defaultButton = clearButton + cancelButton = abortButton + dismissString = abortButton + answer = cmds.confirmDialog( + t=title, + m=message, + b=buttons, + db=defaultButton, + cb=cancelButton, + ds=dismissString) + + if answer == clearButton: + cmds.file(newFile=True, force=True) + self.import_template(self.template_path) + self.populate_template() + elif answer == updateButton: + self.update_missing_containers() + elif answer == abortButton: + return + + @staticmethod + def get_template_nodes(): + attributes = cmds.ls('*.builder_type', long=True) + return [attribute.rpartition('.')[0] for attribute in attributes] + + def get_loaded_containers_by_id(self): + containers = cmds.sets('AVALON_CONTAINERS', q=True) + return [ + cmds.getAttr(container + '.representation') + for container in containers] + + +class MayaPlaceholder(AbstractPlaceholder): + """Concrete implementation of AbstractPlaceholder for maya + """ + + optional_attributes = {'asset', 'subset', 'hierarchy'} + + def get_data(self, node): + user_data = dict() + for attr in self.attributes.union(self.optional_attributes): + attribute_name = '{}.{}'.format(node, attr) + if not cmds.attributeQuery(attr, node=node, exists=True): + print("{} not found".format(attribute_name)) + continue + user_data[attr] = cmds.getAttr( + attribute_name, + asString=True) + user_data['parent'] = ( + cmds.getAttr(node + '.parent', asString=True) + or node.rpartition('|')[0] or "") + user_data['node'] = node + if user_data['parent']: + siblings = cmds.listRelatives(user_data['parent'], children=True) + else: + siblings = cmds.ls(assemblies=True) + node_shortname = user_data['node'].rpartition('|')[2] + current_index = cmds.getAttr(node + '.index', asString=True) + user_data['index'] = ( + current_index if current_index >= 0 + else siblings.index(node_shortname)) + + self.data = user_data + + def parent_in_hierarchy(self, containers): + """Parent loaded container to placeholder's parent + ie : Set loaded content as placeholder's sibling + Args: + containers (String): Placeholder loaded containers + """ + if not containers: + return + + roots = cmds.sets(containers, q=True) + nodes_to_parent = [] + for root in roots: + if root.endswith("_RN"): + refRoot = cmds.referenceQuery(root, n=True)[0] + refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] + nodes_to_parent.extend(refRoot) + elif root in cmds.listSets(allSets=True): + if not cmds.sets(root, q=True): + return + else: + continue + else: + nodes_to_parent.append(root) + + if self.data['parent']: + cmds.parent(nodes_to_parent, self.data['parent']) + # Move loaded nodes to correct index in outliner hierarchy + placeholder_node = self.data['node'] + placeholder_form = cmds.xform( + placeholder_node, + q=True, + matrix=True, + worldSpace=True + ) + for node in set(nodes_to_parent): + cmds.reorder(node, front=True) + cmds.reorder(node, relative=self.data['index']) + cmds.xform(node, matrix=placeholder_form, ws=True) + + holding_sets = cmds.listSets(object=placeholder_node) + if not holding_sets: + return + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) + + def clean(self): + """Hide placeholder, parent them to root + add them to placeholder set and register placeholder's parent + to keep placeholder info available for future use + """ + node = self.data['node'] + if self.data['parent']: + cmds.setAttr(node + '.parent', self.data['parent'], type='string') + if cmds.getAttr(node + '.index') < 0: + cmds.setAttr(node + '.index', self.data['index']) + + holding_sets = cmds.listSets(object=node) + if holding_sets: + for set in holding_sets: + cmds.sets(node, remove=set) + + if cmds.listRelatives(node, p=True): + node = cmds.parent(node, world=True)[0] + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr(node + '.hiddenInOutliner', True) + + def convert_to_db_filters(self, current_asset, linked_asset): + if self.data['builder_type'] == "context_asset": + return [ + { + "type": "representation", + "context.asset": { + "$eq": current_asset, + "$regex": self.data['asset'] + }, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + } + ] + + elif self.data['builder_type'] == "linked_asset": + return [ + { + "type": "representation", + "context.asset": { + "$eq": asset_name, + "$regex": self.data['asset'] + }, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + } for asset_name in linked_asset + ] + + else: + return [ + { + "type": "representation", + "context.asset": {"$regex": self.data['asset']}, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + } + ] + + def err_message(self): + return ( + "Error while trying to load a representation.\n" + "Either the subset wasn't published or the template is malformed." + "\n\n" + "Builder was looking for :\n{attributes}".format( + attributes="\n".join([ + "{}: {}".format(key.title(), value) + for key, value in self.data.items()] + ) + ) + ) From 60cc108251db884a04cef1d2ea29a558a7750b8c Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 14:28:28 +0200 Subject: [PATCH 0043/2550] add build workfile in menu --- openpype/hosts/maya/api/menu.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 8beaf491bb..c66eeb449f 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,7 +6,13 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile +from openpype.api import ( + BuildWorkfile, + # build_workfile_template + # update_workfile_template +) + +from openpype.lib.build_template import build_workfile_template, update_workfile_template from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools @@ -158,6 +164,16 @@ def install(): parent=builder_menu, command=lambda *args: update_placeholder() ) + cmds.menuItem( + "Build Workfile from template", + parent=builder_menu, + command=lambda *args: build_workfile_template() + ) + cmds.menuItem( + "Update Workfile from template", + parent=builder_menu, + command=lambda *args: update_workfile_template() + ) cmds.setParent(MENU_NAME, menu=True) From aaa1f13f9d0ae038f70eb2cdc21cba56f92b97dd Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 16:35:05 +0200 Subject: [PATCH 0044/2550] delete the task_name verification since it does not exists in the maya menu settings --- openpype/lib/abstract_template_loader.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 6888cbf757..2dfec1a006 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -157,8 +157,6 @@ class AbstractTemplateLoader: for prf in profiles: if prf['task_types'] and task_type not in prf['task_types']: continue - if prf['task_names'] and task_name not in prf['task_names']: - continue path = prf['path'] break else: # IF no template were found (no break happened) @@ -253,7 +251,7 @@ class AbstractTemplateLoader: linked_assets) # get representation by assets for db_filter in placeholder_db_filters: - placeholder_representations = list(avalon.io.find(db_filter)) + placeholder_representations = list(legacy_io.find(db_filter)) for representation in reduce(update_representations, placeholder_representations, dict()).values(): From c2aca3422c8c2e29a169f9550e7e1719733f7ec4 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 16:38:47 +0200 Subject: [PATCH 0045/2550] rename correctly attributes to correpsond the ones in the placeholders --- openpype/lib/abstract_template_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 2dfec1a006..628d0bd895 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -357,7 +357,7 @@ class AbstractPlaceholder: parent_in_hierachy: """ - attributes = {'builder_type', 'op_family', 'op_representation', + attributes = {'builder_type', 'family', 'representation', 'order', 'loader', 'loader_args'} optional_attributes = {} From 95d3686889470a8ad6d677b949a86cab094e47ea Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Fri, 27 May 2022 12:44:51 +0200 Subject: [PATCH 0046/2550] create placeholder name dynamically from arguments --- .../hosts/maya/api/lib_template_builder.py | 53 +++++++++++++++---- 1 file changed, 43 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index d8772f3f9a..ee78f19a3e 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -1,3 +1,4 @@ +import json from collections import OrderedDict import maya.cmds as cmds @@ -30,17 +31,20 @@ def create_placeholder(): if not args: return # operation canceled, no locator created - selection = cmds.ls(selection=True) - placeholder = cmds.spaceLocator(name="_TEMPLATE_PLACEHOLDER_")[0] - if selection: - cmds.parent(placeholder, selection[0]) # custom arg parse to force empty data query # and still imprint them on placeholder # and getting items when arg is of type Enumerator - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() + options = create_options(args) + + # create placeholder name dynamically from args and options + placeholder_name = create_placeholder_name(args, options) + + selection = cmds.ls(selection=True) + placeholder = cmds.spaceLocator(name=placeholder_name.capitalize())[0] + + if selection: + cmds.parent(placeholder, selection[0]) + imprint(placeholder, options) # Some tweaks because imprint force enums to to default value so we get # back arg read and force them to attributes @@ -49,13 +53,42 @@ def create_placeholder(): # Add helper attributes to keep placeholder info cmds.addAttr( placeholder, longName="parent", - hidden=True, dataType="string") + hidden=False, dataType="string") cmds.addAttr( placeholder, longName="index", - hidden=True, attributeType="short", + hidden=False, attributeType="short", defaultValue=-1) +def create_options(args): + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + return options + + +def create_placeholder_name(args, options): + placeholder_builder_type = [ + arg.read() for arg in args if 'builder_type' in str(arg) + ][0] + placeholder_family = options['family'] + placeholder_name = placeholder_builder_type.split('_') + placeholder_name.insert(1, placeholder_family) + + # add loader arguments if any + if options['loader_args']: + pos = 2 + loader_args = options['loader_args'].replace('\'', '\"') + loader_args = json.loads(loader_args) + values = [v for v in loader_args.values()] + for i in range(len(values)): + placeholder_name.insert(i + pos, values[i]) + placeholder_name = '_'.join(placeholder_name) + + return placeholder_name + + def update_placeholder(): placeholder = cmds.ls(selection=True) if len(placeholder) == 0: From e29d4e5699e6dace616933317c57fcc9bc43c878 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 30 May 2022 14:19:12 +0200 Subject: [PATCH 0047/2550] minor refactoring --- .../hosts/maya/api/lib_template_builder.py | 19 ++++++++++++++----- openpype/hosts/maya/api/menu.py | 11 +++++------ 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index ee78f19a3e..bec0f1fc66 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -52,12 +52,21 @@ def create_placeholder(): # Add helper attributes to keep placeholder info cmds.addAttr( - placeholder, longName="parent", - hidden=False, dataType="string") + placeholder, + longName="parent", + hidden=False, + dataType="string" + ) cmds.addAttr( - placeholder, longName="index", - hidden=False, attributeType="short", - defaultValue=-1) + placeholder, + longName="index", + hidden=False, + attributeType="short", + defaultValue=-1 + ) + + parents = cmds.ls(selection[0], long=True) + cmds.setAttr(placeholder + '.parent', parents[0], type="string") def create_options(args): diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index c66eeb449f..1337713561 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,13 +6,12 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import ( - BuildWorkfile, - # build_workfile_template - # update_workfile_template -) +from openpype.api import BuildWorkfile -from openpype.lib.build_template import build_workfile_template, update_workfile_template +from openpype.lib.build_template import ( + build_workfile_template, + update_workfile_template +) from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools From 28518eeb21f2a9ef56c32c0009ce09aecf871a86 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 30 May 2022 14:20:31 +0200 Subject: [PATCH 0048/2550] change load method since avalon doesn't exsist anymore --- openpype/lib/abstract_template_loader.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 628d0bd895..77ba04c4db 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -5,11 +5,10 @@ import traceback import six -import openpype from openpype.settings import get_project_settings from openpype.lib import Anatomy, get_linked_assets, get_loaders_by_name from openpype.api import PypeLogger as Logger -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, load from functools import reduce @@ -271,9 +270,10 @@ class AbstractTemplateLoader: pass def load(self, placeholder, loaders_by_name, last_representation): - return openpype.pipeline.load( + repre = load.get_representation_context(last_representation) + return load.load_with_repre_context( loaders_by_name[placeholder.loader], - last_representation['_id'], + repre, options=parse_loader_args(placeholder.data['loader_args'])) def load_succeed(self, placeholder, container): From b65a1d4e79e3fa2ff4ca11392f9ccbce68a19a78 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 30 May 2022 14:53:49 +0200 Subject: [PATCH 0049/2550] fix update placeholder --- .../hosts/maya/api/lib_template_builder.py | 38 ++++++++++++++----- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index bec0f1fc66..2efc210d10 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -69,14 +69,6 @@ def create_placeholder(): cmds.setAttr(placeholder + '.parent', parents[0], type="string") -def create_options(args): - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() - return options - - def create_placeholder_name(args, options): placeholder_builder_type = [ arg.read() for arg in args if 'builder_type' in str(arg) @@ -112,12 +104,38 @@ def update_placeholder(): if not args: return # operation canceled + options = create_options(args) + + imprint(placeholder, options) + imprint_enum(placeholder, args) + + cmds.addAttr( + placeholder, + longName="parent", + hidden=False, + dataType="string" + ) + cmds.addAttr( + placeholder, + longName="index", + hidden=False, + attributeType="short", + defaultValue=-1 + ) + + selected = cmds.ls(selection=True, long=True) + selected = selected[0].split('|')[-2] + selected = cmds.ls(selected) + parents = cmds.ls(selected, long=True) + cmds.setAttr(placeholder + '.parent', parents[0], type="string") + + +def create_options(args): options = OrderedDict() for arg in args: if not type(arg) == qargparse.Separator: options[str(arg)] = arg._data.get("items") or arg.read() - imprint(placeholder, options) - imprint_enum(placeholder, args) + return options def imprint_enum(placeholder, args): From b095249fb859c9845d00efb8d69bd515867c6e94 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 2 Jun 2022 10:40:44 +0200 Subject: [PATCH 0050/2550] change menu command for build and update workfile from template --- openpype/hosts/maya/api/menu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 1337713561..c0bad7092f 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -166,12 +166,12 @@ def install(): cmds.menuItem( "Build Workfile from template", parent=builder_menu, - command=lambda *args: build_workfile_template() + command=build_workfile_template ) cmds.menuItem( "Update Workfile from template", parent=builder_menu, - command=lambda *args: update_workfile_template() + command=update_workfile_template ) cmds.setParent(MENU_NAME, menu=True) From 79c9dc94528ff8f3ae216f106b2225ae790fb044 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 2 Jun 2022 12:22:06 +0200 Subject: [PATCH 0051/2550] get full name placeholder to avoid any conflict between two placeholders with same short name --- .../hosts/maya/api/lib_template_builder.py | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index 2efc210d10..108988a676 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -40,33 +40,37 @@ def create_placeholder(): placeholder_name = create_placeholder_name(args, options) selection = cmds.ls(selection=True) - placeholder = cmds.spaceLocator(name=placeholder_name.capitalize())[0] + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + + # get the long name of the placeholder (with the groups) + placeholder_full_name = cmds.ls(selection[0], long=True)[0] + '|' + placeholder.replace('|', '') if selection: cmds.parent(placeholder, selection[0]) - imprint(placeholder, options) + imprint(placeholder_full_name, options) + # Some tweaks because imprint force enums to to default value so we get # back arg read and force them to attributes - imprint_enum(placeholder, args) + imprint_enum(placeholder_full_name, args) # Add helper attributes to keep placeholder info cmds.addAttr( - placeholder, + placeholder_full_name, longName="parent", - hidden=False, + hidden=True, dataType="string" ) cmds.addAttr( - placeholder, + placeholder_full_name, longName="index", - hidden=False, + hidden=True, attributeType="short", defaultValue=-1 ) parents = cmds.ls(selection[0], long=True) - cmds.setAttr(placeholder + '.parent', parents[0], type="string") + cmds.setAttr(placeholder_full_name + '.parent', parents[0], type="string") def create_placeholder_name(args, options): @@ -75,7 +79,10 @@ def create_placeholder_name(args, options): ][0] placeholder_family = options['family'] placeholder_name = placeholder_builder_type.split('_') - placeholder_name.insert(1, placeholder_family) + + # add famlily in any + if placeholder_family: + placeholder_name.insert(1, placeholder_family) # add loader arguments if any if options['loader_args']: @@ -85,9 +92,10 @@ def create_placeholder_name(args, options): values = [v for v in loader_args.values()] for i in range(len(values)): placeholder_name.insert(i + pos, values[i]) + placeholder_name = '_'.join(placeholder_name) - return placeholder_name + return placeholder_name.capitalize() def update_placeholder(): @@ -112,13 +120,13 @@ def update_placeholder(): cmds.addAttr( placeholder, longName="parent", - hidden=False, + hidden=True, dataType="string" ) cmds.addAttr( placeholder, longName="index", - hidden=False, + hidden=True, attributeType="short", defaultValue=-1 ) From aa7e7093df8d72357118bdb34dbe03e4e73d6801 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 8 Jun 2022 12:46:24 +0200 Subject: [PATCH 0052/2550] add a log if no reprensation found for the current placeholder --- openpype/lib/abstract_template_loader.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 77ba04c4db..cd0416426c 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -19,6 +19,10 @@ from openpype.lib.build_template_exceptions import ( TemplateNotFound ) +import logging + +log = logging.getLogger(__name__) + def update_representations(entities, entity): if entity['context']['subset'] not in entities: @@ -215,8 +219,15 @@ class AbstractTemplateLoader: current_asset, linked_assets ) - for representation in placeholder_representations: + if not placeholder_representations: + self.log.info( + "There's no representation for this placeholder: " + "{}".format(placeholder.data['node']) + ) + continue + + for representation in placeholder_representations: self.preload(placeholder, loaders_by_name, representation) if self.load_data_is_incorrect( From f50999d0927bf533a74417479e4cdb4a06b32b3d Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 8 Jun 2022 12:53:59 +0200 Subject: [PATCH 0053/2550] add debug logs for placeholders --- openpype/lib/abstract_template_loader.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index cd0416426c..159d5c8f6c 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -213,7 +213,13 @@ class AbstractTemplateLoader: ignored_ids = ignored_ids or [] placeholders = self.get_placeholders() + self.log.debug("Placeholders found in template: {}".format( + [placeholder.data['node'] for placeholder in placeholders] + )) for placeholder in placeholders: + self.log.debug("Start to processing placeholder {}".format( + placeholder.data['node'] + )) placeholder_representations = self.get_placeholder_representations( placeholder, current_asset, From edb55949df619a81c1828571030634a4b0c49584 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 24 May 2022 12:56:24 +0200 Subject: [PATCH 0054/2550] refactor avalon imports from lib_template_builder --- .../hosts/maya/api/lib_template_builder.py | 184 ++++++++++++++++++ 1 file changed, 184 insertions(+) create mode 100644 openpype/hosts/maya/api/lib_template_builder.py diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py new file mode 100644 index 0000000000..172a6f9b2b --- /dev/null +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -0,0 +1,184 @@ +from collections import OrderedDict +import maya.cmds as cmds + +import qargparse +from openpype.tools.utils.widgets import OptionDialog +from lib import get_main_window, imprint + +# To change as enum +build_types = ["context_asset", "linked_asset", "all_assets"] + + +def get_placeholder_attributes(node): + return { + attr: cmds.getAttr("{}.{}".format(node, attr)) + for attr in cmds.listAttr(node, userDefined=True)} + + +def delete_placeholder_attributes(node): + ''' + function to delete all extra placeholder attributes + ''' + extra_attributes = get_placeholder_attributes(node) + for attribute in extra_attributes: + cmds.deleteAttr(node + '.' + attribute) + + +def create_placeholder(): + args = placeholder_window() + + if not args: + return # operation canceled, no locator created + + selection = cmds.ls(selection=True) + placeholder = cmds.spaceLocator(name="_TEMPLATE_PLACEHOLDER_")[0] + if selection: + cmds.parent(placeholder, selection[0]) + # custom arg parse to force empty data query + # and still imprint them on placeholder + # and getting items when arg is of type Enumerator + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + imprint(placeholder, options) + # Some tweaks because imprint force enums to to default value so we get + # back arg read and force them to attributes + imprint_enum(placeholder, args) + + # Add helper attributes to keep placeholder info + cmds.addAttr( + placeholder, longName="parent", + hidden=True, dataType="string") + cmds.addAttr( + placeholder, longName="index", + hidden=True, attributeType="short", + defaultValue=-1) + + +def update_placeholder(): + placeholder = cmds.ls(selection=True) + if len(placeholder) == 0: + raise ValueError("No node selected") + if len(placeholder) > 1: + raise ValueError("Too many selected nodes") + placeholder = placeholder[0] + + args = placeholder_window(get_placeholder_attributes(placeholder)) + # delete placeholder attributes + delete_placeholder_attributes(placeholder) + if not args: + return # operation canceled + + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + imprint(placeholder, options) + imprint_enum(placeholder, args) + + +def imprint_enum(placeholder, args): + """ + Imprint method doesn't act properly with enums. + Replacing the functionnality with this for now + """ + enum_values = {str(arg): arg.read() + for arg in args if arg._data.get("items")} + string_to_value_enum_table = { + build: i for i, build + in enumerate(build_types)} + for key, value in enum_values.items(): + cmds.setAttr( + placeholder + "." + key, + string_to_value_enum_table[value]) + + +def placeholder_window(options=None): + options = options or dict() + dialog = OptionDialog(parent=get_main_window()) + dialog.setWindowTitle("Create Placeholder") + + args = [ + qargparse.Separator("Main attributes"), + qargparse.Enum( + "builder_type", + label="Asset Builder Type", + default=options.get("builder_type", 0), + items=build_types, + help="""Asset Builder Type +Builder type describe what template loader will look for. +context_asset : Template loader will look for subsets of +current context asset (Asset bob will find asset) +linked_asset : Template loader will look for assets linked +to current context asset. +Linked asset are looked in avalon database under field "inputLinks" +""" + ), + qargparse.String( + "family", + default=options.get("family", ""), + label="OpenPype Family", + placeholder="ex: model, look ..."), + qargparse.String( + "representation", + default=options.get("representation", ""), + label="OpenPype Representation", + placeholder="ex: ma, abc ..."), + qargparse.String( + "loader", + default=options.get("loader", ""), + label="Loader", + placeholder="ex: ReferenceLoader, LightLoader ...", + help="""Loader +Defines what openpype loader will be used to load assets. +Useable loader depends on current host's loader list. +Field is case sensitive. +"""), + qargparse.String( + "loader_args", + default=options.get("loader_args", ""), + label="Loader Arguments", + placeholder='ex: {"camera":"persp", "lights":True}', + help="""Loader +Defines a dictionnary of arguments used to load assets. +Useable arguments depend on current placeholder Loader. +Field should be a valid python dict. Anything else will be ignored. +"""), + qargparse.Integer( + "order", + default=options.get("order", 0), + min=0, + max=999, + label="Order", + placeholder="ex: 0, 100 ... (smallest order loaded first)", + help="""Order +Order defines asset loading priority (0 to 999) +Priority rule is : "lowest is first to load"."""), + qargparse.Separator( + "Optional attributes"), + qargparse.String( + "asset", + default=options.get("asset", ""), + label="Asset filter", + placeholder="regex filtering by asset name", + help="Filtering assets by matching field regex to asset's name"), + qargparse.String( + "subset", + default=options.get("subset", ""), + label="Subset filter", + placeholder="regex filtering by subset name", + help="Filtering assets by matching field regex to subset's name"), + qargparse.String( + "hierarchy", + default=options.get("hierarchy", ""), + label="Hierarchy filter", + placeholder="regex filtering by asset's hierarchy", + help="Filtering assets by matching field asset's hierarchy") + ] + dialog.create(args) + + if not dialog.exec_(): + return None + + return args From 15e51cd6a640aea61eb927b84ce6b48990d206f3 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 24 May 2022 14:58:27 +0200 Subject: [PATCH 0055/2550] add the templated wrokfile build schema for maya --- .../defaults/project_settings/maya.json | 8 +++++ .../projects_schema/schema_project_maya.json | 4 +++ .../schema_templated_workfile_build.json | 29 +++++++++++++++++++ 3 files changed, 41 insertions(+) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index efd22e13c8..2e0e30b74b 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -718,6 +718,14 @@ } ] }, + "templated_workfile_build": { + "profiles": [ + { + "task_types": [], + "path": "/path/to/your/template" + } + ] + }, "filters": { "preset 1": { "ValidateNoAnimation": false, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 40e98b0333..d137049e9e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -73,6 +73,10 @@ "type": "schema", "name": "schema_workfile_build" }, + { + "type": "schema", + "name": "schema_templated_workfile_build" + }, { "type": "schema", "name": "schema_publish_gui_filter" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json new file mode 100644 index 0000000000..01e74f64b0 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -0,0 +1,29 @@ +{ + "type": "dict", + "collapsible": true, + "key": "templated_workfile_build", + "label": "Templated Workfile Build Settings", + "children": [ + { + "type": "list", + "key": "profiles", + "label": "Profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "path", + "label": "Path to template", + "type": "text", + "object_type": "text" + } + ] + } + } + ] +} From c8c36144cb26df5d0024fcd02df265736bbd209f Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 24 May 2022 17:28:42 +0200 Subject: [PATCH 0056/2550] add placeholder menu to maya --- .../hosts/maya/api/lib_template_builder.py | 2 +- openpype/hosts/maya/api/menu.py | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index 172a6f9b2b..d8772f3f9a 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -3,7 +3,7 @@ import maya.cmds as cmds import qargparse from openpype.tools.utils.widgets import OptionDialog -from lib import get_main_window, imprint +from .lib import get_main_window, imprint # To change as enum build_types = ["context_asset", "linked_asset", "all_assets"] diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 97f06c43af..8beaf491bb 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -11,8 +11,10 @@ from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib + from .lib import get_main_window, IS_HEADLESS from .commands import reset_frame_range +from .lib_template_builder import create_placeholder, update_placeholder log = logging.getLogger(__name__) @@ -139,6 +141,24 @@ def install(): parent_widget ) ) + + builder_menu = cmds.menuItem( + "Template Builder", + subMenu=True, + tearOff=True, + parent=MENU_NAME + ) + cmds.menuItem( + "Create Placeholder", + parent=builder_menu, + command=lambda *args: create_placeholder() + ) + cmds.menuItem( + "Update Placeholder", + parent=builder_menu, + command=lambda *args: update_placeholder() + ) + cmds.setParent(MENU_NAME, menu=True) def add_scripts_menu(): From 770b6d3ab2ee9e3bdf460cee4fdba96d67e44fb2 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 12:25:39 +0200 Subject: [PATCH 0057/2550] setup build template in openpype lib --- openpype/lib/__init__.py | 2 + openpype/lib/abstract_template_loader.py | 447 ++++++++++++++++++++++ openpype/lib/avalon_context.py | 222 +++++------ openpype/lib/build_template.py | 61 +++ openpype/lib/build_template_exceptions.py | 35 ++ 5 files changed, 660 insertions(+), 107 deletions(-) create mode 100644 openpype/lib/abstract_template_loader.py create mode 100644 openpype/lib/build_template.py create mode 100644 openpype/lib/build_template_exceptions.py diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 8d4e733b7d..8f3919d378 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -136,6 +136,7 @@ from .avalon_context import ( create_workfile_doc, save_workfile_data_to_doc, get_workfile_doc, + get_loaders_by_name, BuildWorkfile, @@ -308,6 +309,7 @@ __all__ = [ "create_workfile_doc", "save_workfile_data_to_doc", "get_workfile_doc", + "get_loaders_by_name", "BuildWorkfile", diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py new file mode 100644 index 0000000000..6888cbf757 --- /dev/null +++ b/openpype/lib/abstract_template_loader.py @@ -0,0 +1,447 @@ +import os +from abc import ABCMeta, abstractmethod + +import traceback + +import six + +import openpype +from openpype.settings import get_project_settings +from openpype.lib import Anatomy, get_linked_assets, get_loaders_by_name +from openpype.api import PypeLogger as Logger +from openpype.pipeline import legacy_io + +from functools import reduce + +from openpype.lib.build_template_exceptions import ( + TemplateAlreadyImported, + TemplateLoadingFailed, + TemplateProfileNotFound, + TemplateNotFound +) + + +def update_representations(entities, entity): + if entity['context']['subset'] not in entities: + entities[entity['context']['subset']] = entity + else: + current = entities[entity['context']['subset']] + incomming = entity + entities[entity['context']['subset']] = max( + current, incomming, + key=lambda entity: entity["context"].get("version", -1)) + + return entities + + +def parse_loader_args(loader_args): + if not loader_args: + return dict() + try: + parsed_args = eval(loader_args) + if not isinstance(parsed_args, dict): + return dict() + else: + return parsed_args + except Exception as err: + print( + "Error while parsing loader arguments '{}'.\n{}: {}\n\n" + "Continuing with default arguments. . .".format( + loader_args, + err.__class__.__name__, + err)) + return dict() + + +@six.add_metaclass(ABCMeta) +class AbstractTemplateLoader: + """ + Abstraction of Template Loader. + Properties: + template_path : property to get current template path + Methods: + import_template : Abstract Method. Used to load template, + depending on current host + get_template_nodes : Abstract Method. Used to query nodes acting + as placeholders. Depending on current host + """ + + def __init__(self, placeholder_class): + + self.loaders_by_name = get_loaders_by_name() + self.current_asset = legacy_io.Session["AVALON_ASSET"] + self.project_name = legacy_io.Session["AVALON_PROJECT"] + self.host_name = legacy_io.Session["AVALON_APP"] + self.task_name = legacy_io.Session["AVALON_TASK"] + self.placeholder_class = placeholder_class + self.current_asset_docs = legacy_io.find_one({ + "type": "asset", + "name": self.current_asset + }) + self.task_type = ( + self.current_asset_docs + .get("data", {}) + .get("tasks", {}) + .get(self.task_name, {}) + .get("type") + ) + + self.log = Logger().get_logger("BUILD TEMPLATE") + + self.log.info( + "BUILDING ASSET FROM TEMPLATE :\n" + "Starting templated build for {asset} in {project}\n\n" + "Asset : {asset}\n" + "Task : {task_name} ({task_type})\n" + "Host : {host}\n" + "Project : {project}\n".format( + asset=self.current_asset, + host=self.host_name, + project=self.project_name, + task_name=self.task_name, + task_type=self.task_type + )) + # Skip if there is no loader + if not self.loaders_by_name: + self.log.warning( + "There is no registered loaders. No assets will be loaded") + return + + def template_already_imported(self, err_msg): + """In case template was already loaded. + Raise the error as a default action. + Override this method in your template loader implementation + to manage this case.""" + self.log.error("{}: {}".format( + err_msg.__class__.__name__, + err_msg)) + raise TemplateAlreadyImported(err_msg) + + def template_loading_failed(self, err_msg): + """In case template loading failed + Raise the error as a default action. + Override this method in your template loader implementation + to manage this case. + """ + self.log.error("{}: {}".format( + err_msg.__class__.__name__, + err_msg)) + raise TemplateLoadingFailed(err_msg) + + @property + def template_path(self): + """ + Property returning template path. Avoiding setter. + Getting template path from open pype settings based on current avalon + session and solving the path variables if needed. + Returns: + str: Solved template path + Raises: + TemplateProfileNotFound: No profile found from settings for + current avalon session + KeyError: Could not solve path because a key does not exists + in avalon context + TemplateNotFound: Solved path does not exists on current filesystem + """ + project_name = self.project_name + host_name = self.host_name + task_name = self.task_name + task_type = self.task_type + + anatomy = Anatomy(project_name) + project_settings = get_project_settings(project_name) + + build_info = project_settings[host_name]['templated_workfile_build'] + profiles = build_info['profiles'] + + for prf in profiles: + if prf['task_types'] and task_type not in prf['task_types']: + continue + if prf['task_names'] and task_name not in prf['task_names']: + continue + path = prf['path'] + break + else: # IF no template were found (no break happened) + raise TemplateProfileNotFound( + "No matching profile found for task '{}' of type '{}' " + "with host '{}'".format(task_name, task_type, host_name) + ) + if path is None: + raise TemplateLoadingFailed( + "Template path is not set.\n" + "Path need to be set in {}\\Template Workfile Build " + "Settings\\Profiles".format(host_name.title())) + try: + solved_path = None + while True: + solved_path = anatomy.path_remapper(path) + if solved_path is None: + solved_path = path + if solved_path == path: + break + path = solved_path + except KeyError as missing_key: + raise KeyError( + "Could not solve key '{}' in template path '{}'".format( + missing_key, path)) + finally: + solved_path = os.path.normpath(solved_path) + + if not os.path.exists(solved_path): + raise TemplateNotFound( + "Template found in openPype settings for task '{}' with host " + "'{}' does not exists. (Not found : {})".format( + task_name, host_name, solved_path)) + + self.log.info("Found template at : '{}'".format(solved_path)) + + return solved_path + + def populate_template(self, ignored_ids=None): + """ + Use template placeholders to load assets and parent them in hierarchy + Arguments : + ignored_ids : + Returns: + None + """ + loaders_by_name = self.loaders_by_name + current_asset = self.current_asset + linked_assets = [asset['name'] for asset + in get_linked_assets(self.current_asset_docs)] + + ignored_ids = ignored_ids or [] + placeholders = self.get_placeholders() + for placeholder in placeholders: + placeholder_representations = self.get_placeholder_representations( + placeholder, + current_asset, + linked_assets + ) + for representation in placeholder_representations: + + self.preload(placeholder, loaders_by_name, representation) + + if self.load_data_is_incorrect( + placeholder, + representation, + ignored_ids): + continue + + self.log.info( + "Loading {}_{} with loader {}\n" + "Loader arguments used : {}".format( + representation['context']['asset'], + representation['context']['subset'], + placeholder.loader, + placeholder.data['loader_args'])) + + try: + container = self.load( + placeholder, loaders_by_name, representation) + except Exception: + self.load_failed(placeholder, representation) + else: + self.load_succeed(placeholder, container) + finally: + self.postload(placeholder) + + def get_placeholder_representations( + self, placeholder, current_asset, linked_assets): + placeholder_db_filters = placeholder.convert_to_db_filters( + current_asset, + linked_assets) + # get representation by assets + for db_filter in placeholder_db_filters: + placeholder_representations = list(avalon.io.find(db_filter)) + for representation in reduce(update_representations, + placeholder_representations, + dict()).values(): + yield representation + + def load_data_is_incorrect( + self, placeholder, last_representation, ignored_ids): + if not last_representation: + self.log.warning(placeholder.err_message()) + return True + if (str(last_representation['_id']) in ignored_ids): + print("Ignoring : ", last_representation['_id']) + return True + return False + + def preload(self, placeholder, loaders_by_name, last_representation): + pass + + def load(self, placeholder, loaders_by_name, last_representation): + return openpype.pipeline.load( + loaders_by_name[placeholder.loader], + last_representation['_id'], + options=parse_loader_args(placeholder.data['loader_args'])) + + def load_succeed(self, placeholder, container): + placeholder.parent_in_hierarchy(container) + + def load_failed(self, placeholder, last_representation): + self.log.warning("Got error trying to load {}:{} with {}\n\n" + "{}".format(last_representation['context']['asset'], + last_representation['context']['subset'], + placeholder.loader, + traceback.format_exc())) + + def postload(self, placeholder): + placeholder.clean() + + def update_missing_containers(self): + loaded_containers_ids = self.get_loaded_containers_by_id() + self.populate_template(ignored_ids=loaded_containers_ids) + + def get_placeholders(self): + placeholder_class = self.placeholder_class + placeholders = map(placeholder_class, self.get_template_nodes()) + valid_placeholders = filter(placeholder_class.is_valid, placeholders) + sorted_placeholders = sorted(valid_placeholders, + key=placeholder_class.order) + return sorted_placeholders + + @abstractmethod + def get_loaded_containers_by_id(self): + """ + Collect already loaded containers for updating scene + Return: + dict (string, node): A dictionnary id as key + and containers as value + """ + pass + + @abstractmethod + def import_template(self, template_path): + """ + Import template in current host + Args: + template_path (str): fullpath to current task and + host's template file + Return: + None + """ + pass + + @abstractmethod + def get_template_nodes(self): + """ + Returning a list of nodes acting as host placeholders for + templating. The data representation is by user. + AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes + Args : + None + Returns: + list(AnyNode): Solved template path + """ + pass + + +@six.add_metaclass(ABCMeta) +class AbstractPlaceholder: + """Abstraction of placeholders logic + Properties: + attributes: A list of mandatory attribute to decribe placeholder + and assets to load. + optional_attributes: A list of optional attribute to decribe + placeholder and assets to load + loader: Name of linked loader to use while loading assets + is_context: Is placeholder linked + to context asset (or to linked assets) + Methods: + is_repres_valid: + loader: + order: + is_valid: + get_data: + parent_in_hierachy: + """ + + attributes = {'builder_type', 'op_family', 'op_representation', + 'order', 'loader', 'loader_args'} + optional_attributes = {} + + def __init__(self, node): + self.get_data(node) + + def order(self): + """Get placeholder order. + Order is used to sort them by priority + Priority is lowset first, highest last + (ex: + 1: First to load + 100: Last to load) + Returns: + Int: Order priority + """ + return self.data.get('order') + + @property + def loader(self): + """Return placeholder loader type + Returns: + string: Loader name + """ + return self.data.get('loader') + + @property + def is_context(self): + """Return placeholder type + context_asset: For loading current asset + linked_asset: For loading linked assets + Returns: + bool: true if placeholder is a context placeholder + """ + return self.data.get('builder_type') == 'context_asset' + + def is_valid(self): + """Test validity of placeholder + i.e.: every attributes exists in placeholder data + Returns: + Bool: True if every attributes are a key of data + """ + if set(self.attributes).issubset(self.data.keys()): + print("Valid placeholder : {}".format(self.data["node"])) + return True + print("Placeholder is not valid : {}".format(self.data["node"])) + return False + + @abstractmethod + def parent_in_hierarchy(self, containers): + """Place container in correct hierarchy + given by placeholder + Args: + containers (String): Container name returned back by + placeholder's loader. + """ + pass + + @abstractmethod + def clean(self): + """Clean placeholder from hierarchy after loading assets. + """ + pass + + @abstractmethod + def convert_to_db_filters(self, current_asset, linked_asset): + """map current placeholder data as a db filter + args: + current_asset (String): Name of current asset in context + linked asset (list[String]) : Names of assets linked to + current asset in context + Returns: + dict: a dictionnary describing a filter to look for asset in + a database + """ + pass + + @abstractmethod + def get_data(self, node): + """ + Collect placeholders information. + Args: + node (AnyNode): A unique node decided by Placeholder implementation + """ + pass diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 9d8a92cfe9..8c80b4a4ae 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -15,6 +15,7 @@ from openpype.settings import ( get_project_settings, get_system_settings ) + from .anatomy import Anatomy from .profiles_filtering import filter_profiles from .events import emit_event @@ -922,6 +923,118 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): ) +@with_pipeline_io +def collect_last_version_repres(asset_entities): + """Collect subsets, versions and representations for asset_entities. + + Args: + asset_entities (list): Asset entities for which want to find data + + Returns: + (dict): collected entities + + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... + } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ + + if not asset_entities: + return {} + + asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} + + subsets = list(legacy_io.find({ + "type": "subset", + "parent": {"$in": list(asset_entity_by_ids.keys())} + })) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + + sorted_versions = list(legacy_io.find({ + "type": "version", + "parent": {"$in": list(subset_entity_by_ids.keys())} + }).sort("name", -1)) + + subset_id_with_latest_version = [] + last_versions_by_id = {} + for version in sorted_versions: + subset_id = version["parent"] + if subset_id in subset_id_with_latest_version: + continue + subset_id_with_latest_version.append(subset_id) + last_versions_by_id[version["_id"]] = version + + repres = legacy_io.find({ + "type": "representation", + "parent": {"$in": list(last_versions_by_id.keys())} + }) + + output = {} + for repre in repres: + version_id = repre["parent"] + version = last_versions_by_id[version_id] + + subset_id = version["parent"] + subset = subset_entity_by_ids[subset_id] + + asset_id = subset["parent"] + asset = asset_entity_by_ids[asset_id] + + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset, + "subsets": {} + } + + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset, + "version": { + "version_entity": version, + "repres": [] + } + } + + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre + ) + + return output + + +@with_pipeline_io +def get_loaders_by_name(): + from openpype.pipeline import discover_loader_plugins + + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {} !".format(loader_name) + ) + loaders_by_name[loader_name] = loader + return loaders_by_name + + class BuildWorkfile: """Wrapper for build workfile process. @@ -979,8 +1092,6 @@ class BuildWorkfile: ... }] """ - from openpype.pipeline import discover_loader_plugins - # Get current asset name and entity current_asset_name = legacy_io.Session["AVALON_ASSET"] current_asset_entity = legacy_io.find_one({ @@ -996,14 +1107,7 @@ class BuildWorkfile: return # Prepare available loaders - loaders_by_name = {} - for loader in discover_loader_plugins(): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError( - "Duplicated loader name {0}!".format(loader_name) - ) - loaders_by_name[loader_name] = loader + loaders_by_name = get_loaders_by_name() # Skip if there are any loaders if not loaders_by_name: @@ -1075,7 +1179,7 @@ class BuildWorkfile: return # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(assets) + prepared_entities = collect_last_version_repres(assets) # Load containers by prepared entities and presets loaded_containers = [] @@ -1491,102 +1595,6 @@ class BuildWorkfile: return loaded_containers - @with_pipeline_io - def _collect_last_version_repres(self, asset_entities): - """Collect subsets, versions and representations for asset_entities. - - Args: - asset_entities (list): Asset entities for which want to find data - - Returns: - (dict): collected entities - - Example output: - ``` - { - {Asset ID}: { - "asset_entity": , - "subsets": { - {Subset ID}: { - "subset_entity": , - "version": { - "version_entity": , - "repres": [ - , , ... - ] - } - }, - ... - } - }, - ... - } - output[asset_id]["subsets"][subset_id]["version"]["repres"] - ``` - """ - - if not asset_entities: - return {} - - asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} - - subsets = list(legacy_io.find({ - "type": "subset", - "parent": {"$in": list(asset_entity_by_ids.keys())} - })) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - - sorted_versions = list(legacy_io.find({ - "type": "version", - "parent": {"$in": list(subset_entity_by_ids.keys())} - }).sort("name", -1)) - - subset_id_with_latest_version = [] - last_versions_by_id = {} - for version in sorted_versions: - subset_id = version["parent"] - if subset_id in subset_id_with_latest_version: - continue - subset_id_with_latest_version.append(subset_id) - last_versions_by_id[version["_id"]] = version - - repres = legacy_io.find({ - "type": "representation", - "parent": {"$in": list(last_versions_by_id.keys())} - }) - - output = {} - for repre in repres: - version_id = repre["parent"] - version = last_versions_by_id[version_id] - - subset_id = version["parent"] - subset = subset_entity_by_ids[subset_id] - - asset_id = subset["parent"] - asset = asset_entity_by_ids[asset_id] - - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset, - "subsets": {} - } - - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset, - "version": { - "version_entity": version, - "repres": [] - } - } - - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre - ) - - return output - @with_pipeline_io def get_creator_by_name(creator_name, case_sensitive=False): diff --git a/openpype/lib/build_template.py b/openpype/lib/build_template.py new file mode 100644 index 0000000000..7f749cbec2 --- /dev/null +++ b/openpype/lib/build_template.py @@ -0,0 +1,61 @@ +from openpype.pipeline import registered_host +from openpype.lib import classes_from_module +from importlib import import_module + +from .abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader) + +from .build_template_exceptions import ( + TemplateLoadingFailed, + TemplateAlreadyImported, + MissingHostTemplateModule, + MissingTemplatePlaceholderClass, + MissingTemplateLoaderClass +) + +_module_path_format = 'openpype.{host}.template_loader' + + +def build_workfile_template(*args): + template_loader = build_template_loader() + try: + template_loader.import_template(template_loader.template_path) + except TemplateAlreadyImported as err: + template_loader.template_already_imported(err) + except TemplateLoadingFailed as err: + template_loader.template_loading_failed(err) + else: + template_loader.populate_template() + + +def update_workfile_template(args): + template_loader = build_template_loader() + template_loader.update_missing_containers() + + +def build_template_loader(): + host_name = registered_host().__name__.partition('.')[2] + module_path = _module_path_format.format(host=host_name) + module = import_module(module_path) + if not module: + raise MissingHostTemplateModule( + "No template loader found for host {}".format(host_name)) + + template_loader_class = classes_from_module( + AbstractTemplateLoader, + module + ) + template_placeholder_class = classes_from_module( + AbstractPlaceholder, + module + ) + + if not template_loader_class: + raise MissingTemplateLoaderClass() + template_loader_class = template_loader_class[0] + + if not template_placeholder_class: + raise MissingTemplatePlaceholderClass() + template_placeholder_class = template_placeholder_class[0] + return template_loader_class(template_placeholder_class) diff --git a/openpype/lib/build_template_exceptions.py b/openpype/lib/build_template_exceptions.py new file mode 100644 index 0000000000..d781eff204 --- /dev/null +++ b/openpype/lib/build_template_exceptions.py @@ -0,0 +1,35 @@ +class MissingHostTemplateModule(Exception): + """Error raised when expected module does not exists""" + pass + + +class MissingTemplatePlaceholderClass(Exception): + """Error raised when module doesn't implement a placeholder class""" + pass + + +class MissingTemplateLoaderClass(Exception): + """Error raised when module doesn't implement a template loader class""" + pass + + +class TemplateNotFound(Exception): + """Exception raised when template does not exist.""" + pass + + +class TemplateProfileNotFound(Exception): + """Exception raised when current profile + doesn't match any template profile""" + pass + + +class TemplateAlreadyImported(Exception): + """Error raised when Template was already imported by host for + this session""" + pass + + +class TemplateLoadingFailed(Exception): + """Error raised whend Template loader was unable to load the template""" + pass \ No newline at end of file From a5a3685f2b5b99bbd1f8de78581eb17af0175ed3 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 12:52:44 +0200 Subject: [PATCH 0058/2550] add template loader module --- openpype/hosts/maya/api/template_loader.py | 242 +++++++++++++++++++++ 1 file changed, 242 insertions(+) create mode 100644 openpype/hosts/maya/api/template_loader.py diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py new file mode 100644 index 0000000000..0e346ca411 --- /dev/null +++ b/openpype/hosts/maya/api/template_loader.py @@ -0,0 +1,242 @@ +from maya import cmds + +from openpype.pipeline import legacy_io +from openpype.lib.abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader +) +from openpype.lib.build_template_exceptions import TemplateAlreadyImported + +PLACEHOLDER_SET = 'PLACEHOLDERS_SET' + + +class MayaTemplateLoader(AbstractTemplateLoader): + """Concrete implementation of AbstractTemplateLoader for maya + """ + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + Returns: + bool: Wether the template was succesfully imported or not + """ + if cmds.objExists(PLACEHOLDER_SET): + raise TemplateAlreadyImported( + "Build template already loaded\n" + "Clean scene if needed (File > New Scene)") + + cmds.sets(name=PLACEHOLDER_SET, empty=True) + self.new_nodes = cmds.file(path, i=True, returnNewNodes=True) + cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True) + + for set in cmds.listSets(allSets=True): + if (cmds.objExists(set) and + cmds.attributeQuery('id', node=set, exists=True) and + cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'): + if cmds.attributeQuery('asset', node=set, exists=True): + cmds.setAttr( + set + '.asset', + legacy_io.Session['AVALON_ASSET'], type='string' + ) + + return True + + def template_already_imported(self, err_msg): + clearButton = "Clear scene and build" + updateButton = "Update template" + abortButton = "Abort" + + title = "Scene already builded" + message = ( + "It's seems a template was already build for this scene.\n" + "Error message reveived :\n\n\"{}\"".format(err_msg)) + buttons = [clearButton, updateButton, abortButton] + defaultButton = clearButton + cancelButton = abortButton + dismissString = abortButton + answer = cmds.confirmDialog( + t=title, + m=message, + b=buttons, + db=defaultButton, + cb=cancelButton, + ds=dismissString) + + if answer == clearButton: + cmds.file(newFile=True, force=True) + self.import_template(self.template_path) + self.populate_template() + elif answer == updateButton: + self.update_missing_containers() + elif answer == abortButton: + return + + @staticmethod + def get_template_nodes(): + attributes = cmds.ls('*.builder_type', long=True) + return [attribute.rpartition('.')[0] for attribute in attributes] + + def get_loaded_containers_by_id(self): + containers = cmds.sets('AVALON_CONTAINERS', q=True) + return [ + cmds.getAttr(container + '.representation') + for container in containers] + + +class MayaPlaceholder(AbstractPlaceholder): + """Concrete implementation of AbstractPlaceholder for maya + """ + + optional_attributes = {'asset', 'subset', 'hierarchy'} + + def get_data(self, node): + user_data = dict() + for attr in self.attributes.union(self.optional_attributes): + attribute_name = '{}.{}'.format(node, attr) + if not cmds.attributeQuery(attr, node=node, exists=True): + print("{} not found".format(attribute_name)) + continue + user_data[attr] = cmds.getAttr( + attribute_name, + asString=True) + user_data['parent'] = ( + cmds.getAttr(node + '.parent', asString=True) + or node.rpartition('|')[0] or "") + user_data['node'] = node + if user_data['parent']: + siblings = cmds.listRelatives(user_data['parent'], children=True) + else: + siblings = cmds.ls(assemblies=True) + node_shortname = user_data['node'].rpartition('|')[2] + current_index = cmds.getAttr(node + '.index', asString=True) + user_data['index'] = ( + current_index if current_index >= 0 + else siblings.index(node_shortname)) + + self.data = user_data + + def parent_in_hierarchy(self, containers): + """Parent loaded container to placeholder's parent + ie : Set loaded content as placeholder's sibling + Args: + containers (String): Placeholder loaded containers + """ + if not containers: + return + + roots = cmds.sets(containers, q=True) + nodes_to_parent = [] + for root in roots: + if root.endswith("_RN"): + refRoot = cmds.referenceQuery(root, n=True)[0] + refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] + nodes_to_parent.extend(refRoot) + elif root in cmds.listSets(allSets=True): + if not cmds.sets(root, q=True): + return + else: + continue + else: + nodes_to_parent.append(root) + + if self.data['parent']: + cmds.parent(nodes_to_parent, self.data['parent']) + # Move loaded nodes to correct index in outliner hierarchy + placeholder_node = self.data['node'] + placeholder_form = cmds.xform( + placeholder_node, + q=True, + matrix=True, + worldSpace=True + ) + for node in set(nodes_to_parent): + cmds.reorder(node, front=True) + cmds.reorder(node, relative=self.data['index']) + cmds.xform(node, matrix=placeholder_form, ws=True) + + holding_sets = cmds.listSets(object=placeholder_node) + if not holding_sets: + return + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) + + def clean(self): + """Hide placeholder, parent them to root + add them to placeholder set and register placeholder's parent + to keep placeholder info available for future use + """ + node = self.data['node'] + if self.data['parent']: + cmds.setAttr(node + '.parent', self.data['parent'], type='string') + if cmds.getAttr(node + '.index') < 0: + cmds.setAttr(node + '.index', self.data['index']) + + holding_sets = cmds.listSets(object=node) + if holding_sets: + for set in holding_sets: + cmds.sets(node, remove=set) + + if cmds.listRelatives(node, p=True): + node = cmds.parent(node, world=True)[0] + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr(node + '.hiddenInOutliner', True) + + def convert_to_db_filters(self, current_asset, linked_asset): + if self.data['builder_type'] == "context_asset": + return [ + { + "type": "representation", + "context.asset": { + "$eq": current_asset, + "$regex": self.data['asset'] + }, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + } + ] + + elif self.data['builder_type'] == "linked_asset": + return [ + { + "type": "representation", + "context.asset": { + "$eq": asset_name, + "$regex": self.data['asset'] + }, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + } for asset_name in linked_asset + ] + + else: + return [ + { + "type": "representation", + "context.asset": {"$regex": self.data['asset']}, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + } + ] + + def err_message(self): + return ( + "Error while trying to load a representation.\n" + "Either the subset wasn't published or the template is malformed." + "\n\n" + "Builder was looking for :\n{attributes}".format( + attributes="\n".join([ + "{}: {}".format(key.title(), value) + for key, value in self.data.items()] + ) + ) + ) From f2ae0ffa5950d922fd3cb90ce8bbf30ec64ca0b7 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 14:28:28 +0200 Subject: [PATCH 0059/2550] add build workfile in menu --- openpype/hosts/maya/api/menu.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 8beaf491bb..c66eeb449f 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,7 +6,13 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile +from openpype.api import ( + BuildWorkfile, + # build_workfile_template + # update_workfile_template +) + +from openpype.lib.build_template import build_workfile_template, update_workfile_template from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools @@ -158,6 +164,16 @@ def install(): parent=builder_menu, command=lambda *args: update_placeholder() ) + cmds.menuItem( + "Build Workfile from template", + parent=builder_menu, + command=lambda *args: build_workfile_template() + ) + cmds.menuItem( + "Update Workfile from template", + parent=builder_menu, + command=lambda *args: update_workfile_template() + ) cmds.setParent(MENU_NAME, menu=True) From 41a47bb2bfb9b728f0cad37f5614e5d382b2d9d1 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 16:35:05 +0200 Subject: [PATCH 0060/2550] delete the task_name verification since it does not exists in the maya menu settings --- openpype/lib/abstract_template_loader.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 6888cbf757..2dfec1a006 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -157,8 +157,6 @@ class AbstractTemplateLoader: for prf in profiles: if prf['task_types'] and task_type not in prf['task_types']: continue - if prf['task_names'] and task_name not in prf['task_names']: - continue path = prf['path'] break else: # IF no template were found (no break happened) @@ -253,7 +251,7 @@ class AbstractTemplateLoader: linked_assets) # get representation by assets for db_filter in placeholder_db_filters: - placeholder_representations = list(avalon.io.find(db_filter)) + placeholder_representations = list(legacy_io.find(db_filter)) for representation in reduce(update_representations, placeholder_representations, dict()).values(): From 58814d21e4688fbb13d183fab7ba9010c68b57f8 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 25 May 2022 16:38:47 +0200 Subject: [PATCH 0061/2550] rename correctly attributes to correpsond the ones in the placeholders --- openpype/lib/abstract_template_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 2dfec1a006..628d0bd895 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -357,7 +357,7 @@ class AbstractPlaceholder: parent_in_hierachy: """ - attributes = {'builder_type', 'op_family', 'op_representation', + attributes = {'builder_type', 'family', 'representation', 'order', 'loader', 'loader_args'} optional_attributes = {} From 6cb037d3d63290752bacc0aa8c2b81cac8e3b370 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Fri, 27 May 2022 12:44:51 +0200 Subject: [PATCH 0062/2550] create placeholder name dynamically from arguments --- .../hosts/maya/api/lib_template_builder.py | 53 +++++++++++++++---- 1 file changed, 43 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index d8772f3f9a..ee78f19a3e 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -1,3 +1,4 @@ +import json from collections import OrderedDict import maya.cmds as cmds @@ -30,17 +31,20 @@ def create_placeholder(): if not args: return # operation canceled, no locator created - selection = cmds.ls(selection=True) - placeholder = cmds.spaceLocator(name="_TEMPLATE_PLACEHOLDER_")[0] - if selection: - cmds.parent(placeholder, selection[0]) # custom arg parse to force empty data query # and still imprint them on placeholder # and getting items when arg is of type Enumerator - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() + options = create_options(args) + + # create placeholder name dynamically from args and options + placeholder_name = create_placeholder_name(args, options) + + selection = cmds.ls(selection=True) + placeholder = cmds.spaceLocator(name=placeholder_name.capitalize())[0] + + if selection: + cmds.parent(placeholder, selection[0]) + imprint(placeholder, options) # Some tweaks because imprint force enums to to default value so we get # back arg read and force them to attributes @@ -49,13 +53,42 @@ def create_placeholder(): # Add helper attributes to keep placeholder info cmds.addAttr( placeholder, longName="parent", - hidden=True, dataType="string") + hidden=False, dataType="string") cmds.addAttr( placeholder, longName="index", - hidden=True, attributeType="short", + hidden=False, attributeType="short", defaultValue=-1) +def create_options(args): + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + return options + + +def create_placeholder_name(args, options): + placeholder_builder_type = [ + arg.read() for arg in args if 'builder_type' in str(arg) + ][0] + placeholder_family = options['family'] + placeholder_name = placeholder_builder_type.split('_') + placeholder_name.insert(1, placeholder_family) + + # add loader arguments if any + if options['loader_args']: + pos = 2 + loader_args = options['loader_args'].replace('\'', '\"') + loader_args = json.loads(loader_args) + values = [v for v in loader_args.values()] + for i in range(len(values)): + placeholder_name.insert(i + pos, values[i]) + placeholder_name = '_'.join(placeholder_name) + + return placeholder_name + + def update_placeholder(): placeholder = cmds.ls(selection=True) if len(placeholder) == 0: From aa88ee13c0d3b647dc9c11534ed4a742168b0e1d Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 30 May 2022 14:19:12 +0200 Subject: [PATCH 0063/2550] minor refactoring --- .../hosts/maya/api/lib_template_builder.py | 19 ++++++++++++++----- openpype/hosts/maya/api/menu.py | 11 +++++------ 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index ee78f19a3e..bec0f1fc66 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -52,12 +52,21 @@ def create_placeholder(): # Add helper attributes to keep placeholder info cmds.addAttr( - placeholder, longName="parent", - hidden=False, dataType="string") + placeholder, + longName="parent", + hidden=False, + dataType="string" + ) cmds.addAttr( - placeholder, longName="index", - hidden=False, attributeType="short", - defaultValue=-1) + placeholder, + longName="index", + hidden=False, + attributeType="short", + defaultValue=-1 + ) + + parents = cmds.ls(selection[0], long=True) + cmds.setAttr(placeholder + '.parent', parents[0], type="string") def create_options(args): diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index c66eeb449f..1337713561 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,13 +6,12 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import ( - BuildWorkfile, - # build_workfile_template - # update_workfile_template -) +from openpype.api import BuildWorkfile -from openpype.lib.build_template import build_workfile_template, update_workfile_template +from openpype.lib.build_template import ( + build_workfile_template, + update_workfile_template +) from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools From d8edf2b1aa9e83861bad1b8aef6da69cc6011de4 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 30 May 2022 14:20:31 +0200 Subject: [PATCH 0064/2550] change load method since avalon doesn't exsist anymore --- openpype/lib/abstract_template_loader.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 628d0bd895..77ba04c4db 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -5,11 +5,10 @@ import traceback import six -import openpype from openpype.settings import get_project_settings from openpype.lib import Anatomy, get_linked_assets, get_loaders_by_name from openpype.api import PypeLogger as Logger -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, load from functools import reduce @@ -271,9 +270,10 @@ class AbstractTemplateLoader: pass def load(self, placeholder, loaders_by_name, last_representation): - return openpype.pipeline.load( + repre = load.get_representation_context(last_representation) + return load.load_with_repre_context( loaders_by_name[placeholder.loader], - last_representation['_id'], + repre, options=parse_loader_args(placeholder.data['loader_args'])) def load_succeed(self, placeholder, container): From bae9eef400e2b1195d8ece023543ca8d89c83b1b Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 30 May 2022 14:53:49 +0200 Subject: [PATCH 0065/2550] fix update placeholder --- .../hosts/maya/api/lib_template_builder.py | 38 ++++++++++++++----- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index bec0f1fc66..2efc210d10 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -69,14 +69,6 @@ def create_placeholder(): cmds.setAttr(placeholder + '.parent', parents[0], type="string") -def create_options(args): - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() - return options - - def create_placeholder_name(args, options): placeholder_builder_type = [ arg.read() for arg in args if 'builder_type' in str(arg) @@ -112,12 +104,38 @@ def update_placeholder(): if not args: return # operation canceled + options = create_options(args) + + imprint(placeholder, options) + imprint_enum(placeholder, args) + + cmds.addAttr( + placeholder, + longName="parent", + hidden=False, + dataType="string" + ) + cmds.addAttr( + placeholder, + longName="index", + hidden=False, + attributeType="short", + defaultValue=-1 + ) + + selected = cmds.ls(selection=True, long=True) + selected = selected[0].split('|')[-2] + selected = cmds.ls(selected) + parents = cmds.ls(selected, long=True) + cmds.setAttr(placeholder + '.parent', parents[0], type="string") + + +def create_options(args): options = OrderedDict() for arg in args: if not type(arg) == qargparse.Separator: options[str(arg)] = arg._data.get("items") or arg.read() - imprint(placeholder, options) - imprint_enum(placeholder, args) + return options def imprint_enum(placeholder, args): From 349d57a4a8ec86364d64b02798c7579f6c3cb5c2 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 2 Jun 2022 10:40:44 +0200 Subject: [PATCH 0066/2550] change menu command for build and update workfile from template --- openpype/hosts/maya/api/menu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 1337713561..c0bad7092f 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -166,12 +166,12 @@ def install(): cmds.menuItem( "Build Workfile from template", parent=builder_menu, - command=lambda *args: build_workfile_template() + command=build_workfile_template ) cmds.menuItem( "Update Workfile from template", parent=builder_menu, - command=lambda *args: update_workfile_template() + command=update_workfile_template ) cmds.setParent(MENU_NAME, menu=True) From e2506d569adf78c74ba6452643fec5d1afca0ab2 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 2 Jun 2022 12:22:06 +0200 Subject: [PATCH 0067/2550] get full name placeholder to avoid any conflict between two placeholders with same short name --- .../hosts/maya/api/lib_template_builder.py | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index 2efc210d10..108988a676 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -40,33 +40,37 @@ def create_placeholder(): placeholder_name = create_placeholder_name(args, options) selection = cmds.ls(selection=True) - placeholder = cmds.spaceLocator(name=placeholder_name.capitalize())[0] + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + + # get the long name of the placeholder (with the groups) + placeholder_full_name = cmds.ls(selection[0], long=True)[0] + '|' + placeholder.replace('|', '') if selection: cmds.parent(placeholder, selection[0]) - imprint(placeholder, options) + imprint(placeholder_full_name, options) + # Some tweaks because imprint force enums to to default value so we get # back arg read and force them to attributes - imprint_enum(placeholder, args) + imprint_enum(placeholder_full_name, args) # Add helper attributes to keep placeholder info cmds.addAttr( - placeholder, + placeholder_full_name, longName="parent", - hidden=False, + hidden=True, dataType="string" ) cmds.addAttr( - placeholder, + placeholder_full_name, longName="index", - hidden=False, + hidden=True, attributeType="short", defaultValue=-1 ) parents = cmds.ls(selection[0], long=True) - cmds.setAttr(placeholder + '.parent', parents[0], type="string") + cmds.setAttr(placeholder_full_name + '.parent', parents[0], type="string") def create_placeholder_name(args, options): @@ -75,7 +79,10 @@ def create_placeholder_name(args, options): ][0] placeholder_family = options['family'] placeholder_name = placeholder_builder_type.split('_') - placeholder_name.insert(1, placeholder_family) + + # add famlily in any + if placeholder_family: + placeholder_name.insert(1, placeholder_family) # add loader arguments if any if options['loader_args']: @@ -85,9 +92,10 @@ def create_placeholder_name(args, options): values = [v for v in loader_args.values()] for i in range(len(values)): placeholder_name.insert(i + pos, values[i]) + placeholder_name = '_'.join(placeholder_name) - return placeholder_name + return placeholder_name.capitalize() def update_placeholder(): @@ -112,13 +120,13 @@ def update_placeholder(): cmds.addAttr( placeholder, longName="parent", - hidden=False, + hidden=True, dataType="string" ) cmds.addAttr( placeholder, longName="index", - hidden=False, + hidden=True, attributeType="short", defaultValue=-1 ) From a6d948aa93e2c84c001a109c50dede9b9c160321 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 8 Jun 2022 12:46:24 +0200 Subject: [PATCH 0068/2550] add a log if no reprensation found for the current placeholder --- openpype/lib/abstract_template_loader.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 77ba04c4db..cd0416426c 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -19,6 +19,10 @@ from openpype.lib.build_template_exceptions import ( TemplateNotFound ) +import logging + +log = logging.getLogger(__name__) + def update_representations(entities, entity): if entity['context']['subset'] not in entities: @@ -215,8 +219,15 @@ class AbstractTemplateLoader: current_asset, linked_assets ) - for representation in placeholder_representations: + if not placeholder_representations: + self.log.info( + "There's no representation for this placeholder: " + "{}".format(placeholder.data['node']) + ) + continue + + for representation in placeholder_representations: self.preload(placeholder, loaders_by_name, representation) if self.load_data_is_incorrect( From d6543bf281a418fe768059a58a1a9eb8257ef68f Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 8 Jun 2022 12:53:59 +0200 Subject: [PATCH 0069/2550] add debug logs for placeholders --- openpype/lib/abstract_template_loader.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index cd0416426c..159d5c8f6c 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -213,7 +213,13 @@ class AbstractTemplateLoader: ignored_ids = ignored_ids or [] placeholders = self.get_placeholders() + self.log.debug("Placeholders found in template: {}".format( + [placeholder.data['node'] for placeholder in placeholders] + )) for placeholder in placeholders: + self.log.debug("Start to processing placeholder {}".format( + placeholder.data['node'] + )) placeholder_representations = self.get_placeholder_representations( placeholder, current_asset, From 4a02dd039de637398aa6c2a1d2c26ba772f720da Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 9 Jun 2022 14:51:48 +0200 Subject: [PATCH 0070/2550] set empty placholder parent at creation --- openpype/hosts/maya/api/lib_template_builder.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index 108988a676..20f6f041fb 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -69,8 +69,7 @@ def create_placeholder(): defaultValue=-1 ) - parents = cmds.ls(selection[0], long=True) - cmds.setAttr(placeholder_full_name + '.parent', parents[0], type="string") + cmds.setAttr(placeholder_full_name + '.parent', "", type="string") def create_placeholder_name(args, options): @@ -131,11 +130,11 @@ def update_placeholder(): defaultValue=-1 ) - selected = cmds.ls(selection=True, long=True) + """selected = cmds.ls(selection=True, long=True) selected = selected[0].split('|')[-2] selected = cmds.ls(selected) - parents = cmds.ls(selected, long=True) - cmds.setAttr(placeholder + '.parent', parents[0], type="string") + parents = cmds.ls(selected, long=True)""" + cmds.setAttr(placeholder + '.parent', '', type="string") def create_options(args): From a97f5379b16f4141035629aa23d7ca26f16fdced Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 10 Jun 2022 14:17:30 +0200 Subject: [PATCH 0071/2550] Add documentation --- website/docs/admin_hosts_maya.md | 50 ++++++++++++++++++ .../docs/assets/maya-create_placeholder.png | Bin 0 -> 31543 bytes website/docs/assets/maya-placeholder_new.png | Bin 0 -> 28008 bytes 3 files changed, 50 insertions(+) create mode 100644 website/docs/assets/maya-create_placeholder.png create mode 100644 website/docs/assets/maya-placeholder_new.png diff --git a/website/docs/admin_hosts_maya.md b/website/docs/admin_hosts_maya.md index 93bf32798f..c55dcc1b36 100644 --- a/website/docs/admin_hosts_maya.md +++ b/website/docs/admin_hosts_maya.md @@ -120,3 +120,53 @@ raw json. You can configure path mapping using Maya `dirmap` command. This will add bi-directional mapping between list of paths specified in **Settings**. You can find it in **Settings -> Project Settings -> Maya -> Maya Directory Mapping** ![Dirmap settings](assets/maya-admin_dirmap_settings.png) + +## Templated Build Workfile + +Building a workfile using a template designed by users. Helping to assert homogeneous subsets hierarchy and imports. Template stored as file easy to define, change and customize for production needs. + + **1. Make a template** + +Make your template. Add families and everything needed for your tasks. Here is an example template for the modeling task using a placeholder to import a gauge. + +![Dirmap settings](assets/maya-workfile-outliner.png) + +If needed, you can add placeholders when the template needs to load some assets. **OpenPype > Template Builder > Create Placeholder** + +![create placeholder](assets/maya-create_placeholder.png) + +- **Configure placeholders** + +Fill in the necessary fields (the optional fields are regex filters) + +![new place holder](assets/maya-placeholder_new.png) + + + - Builder type: Wether the the placeholder should load current asset representations or linked assets representations + + - Representation: Representation that will be loaded (ex: ma, abc, png, etc...) + + - Family: Family of the representation to load (main, look, image, etc ...) + + - Loader: Placeholder loader name that will be used to load corresponding representations + + - Order: Priority for current placeholder loader (priority is lowest first, highet last) + +- **Save your template** + + + **2. Configure Template** + +- **Go to Studio settings > Project > Your DCC > Templated Build Settings** +- Add a profile for your task and enter path to your template +![Dirmap settings](assets/settings/template_build_workfile.png) + +**3. Build your workfile** + +- Open maya + +- Build your workfile + +![Dirmap settings](assets/maya-build_workfile_from_template.png) + + diff --git a/website/docs/assets/maya-create_placeholder.png b/website/docs/assets/maya-create_placeholder.png new file mode 100644 index 0000000000000000000000000000000000000000..3f49fe2e2b801e89e2027ec521dbf700b97f80d2 GIT binary patch literal 31543 zcmbUJby$_}7d;FfLP{hA5hRpQI;5pjIuxWsx>LHPlopVdE|C(DE|n6Fln6+dlyo=D z=JTET&Aiw9&&-_b;wW*>d7k^Z_ugx-we}OHsw{I0=N=9Mfw(0nEAM%uVxJY zwd&IL27jj2zcnHvqN-{SUW$(TIyX0Wj4JBs!qU>x+}zF|lb8!QoxSbtfmN5|==z3+ z8aMo|C~^z8++-__*>6&2ON!ND$&sN&y<9O1e4CllbNeaKI8VczkdCS)(ezZQhJCr-x)VO;46U%GsGyq;BdxyIx&@qE^QDImgH(d)7UyY;r#PU}=z$RBZ+-CVl{u6TPG zQ30kq$Bm)0k%UY+z8m$x*S_ug}G*oqOoB&C1L6~JZd881!q?B&Z@SSELN z_Hv6%-^0s`GdLoiUZ%?``8Yb|u6+@$0`v0+8drJ-UaJp>%HvtjU4FLAHTx*Amixg5 z8>_Og;ZA-3o;dIU6EibD1wV)T=41+&g+YVM>UgycZd>vO*%Rl#5$r8*37!|JcXxMN z^nJ)?=-C}fnUX}3o1H&(6^wdXWzrQb9YNB((@Cz!R$EZ81c$+|@Cq&@mj#~0%JI$u zy-LRUb{qa(c7yX6|8vPwjhQCTy_LaS%Aa>0+y41XGJ}PUJq8g+pXuYg+?O7`T-Q-` z>~mS5lolQyE^>ALYVG;vI^(lv&mdkeXM^x7tEvPuJau1A*gh>P}Vq*k-n!BnIBD`#hcg#W#V z9oLxvrKsOzgG<9s;gG)ezZlK?vTtGE9Jx{<5Lt#aX98#l#DJkV;syf2{C|Ib#+g!5 zz6Tle;(76c*uKfXamxEn@y2-dO=@nheM?c%R)^M$n(`_%vHQ6r<%a&Nzw#kzh372w zrLWD-ip812fgP-kP6+hVInJ`GV7^Y4G3Y7!w)OktzNp#g(=)*(_2P`k{Sxb6y9~d? ze>7`;`ca}PfK#s~`2kx>mCh+QtL3AC@bruvO_o;Toju0<5l0P!wo44^Jg(0j2iEk2 z2ki_o=<3L$Xm*}b^CHd?Nd%H93#+iNrJFsU3>2IUX@gaiZu7L1F|^*$n3X$~!6 zF)^(!tIrJ#s3a^;b{8QO!=j_@`4)NYV>%Hi_-#JD24 zZO+u(e7epF?Y<&GJLK4@sVVHhk7Z>de&-&wS0^Z0NzcN*074>1Wx6&n9jz_}vw>w+ae1BVEY&6>47e;|6tNHd%h(HZll|$$6dn*!SnO;PopWc_4T8wx;PZo)P@q6HKk|<71*0ywx)$s3k3b7 zX*D%R-{9X*4NZXO9#hW12Q2K5{mzfa$H%u?&Z6nMmoxoE=;^y&a>&xfmA9PYP=7IM z!>G{u$q}jeXaZ`Ra$!VN6eW-K?~RFCc#DjrBzgiYQ%-F+w?oKY6G~9h#@9*6xSn_) zuET{yE;`@6oAjbsW8+T+EfbS`kvi2A=ir==Nrrx7{6*K;BuhNVZEHG;AUraX7$>B+ zcVt8v@%fgzq-2mPgA~@u*;z!#{MWBy!31yGV&(G-P<}Uk{#t4N?7flYdD_9*V_Zp^ zV69N2vA9$7c1|YNT%G}As=39l5r^(#C1?^q#D-AAUy>CxRiv1a1*^tu^wVR9W;atF zBW1iF1d}Ezv08a(&HSVeV-Uq;3k&>kBV`dMHJdDyvB<_;>Go0LwzN=mc@Sc`@3qQr zhARRB-D7_L_C;qy&&4|n3UPGWiIm)y@8Bdpy(lJ-@DVZ09aMku;xTRzn*Fn9gvd&! z1o_;-w;l7Lp`ps>1b#tnW!lr*>+y+6r0}xz^z_zNQIVhB-C<^9adEeO8lD#sW1+Jk z6B81ws&#@@t*nY#Td#W4_@E~IT-X6L_iJd#Ds@w~$J}3DNg*pEBg50vlLe`vsY!%R zO%l=E)MP)?5=FpCirdlA0lz(-g%1Ycafew$$Yc8f7Z*_uJt@M^0WII^^n1unDAENE zwIr5)6aDuLHTkUW?>HSFP$o)a)eDC8kPqXwao{R(u;4|%H6jfiBwJZnUDQD{NzHmK z{??2@f(7~ct&w)k$FdWtr}?X{R`-NIs$(=_1)1h3j8(riBTb~%B1xgM&q}d5iSp#J zt4^%Z578_h7Ifd_-aJ_|yy~ee`S$WhPmh4_=@+Ng%*^|Ex9Z+nD-M<1$XZ}xVlp;1 zcJw6L+S%P@VrA_x>RZ2!5i~d75mC^a%f`mWtwesa11p73h66Wubd8IPOHxv@t*uQZ zQ)J3`iB*jkj{Fk&y2?6L#OAvTz62Kg{lBZLPdv6~UDrl7rW$L@%J2%JF)qfp+LEDa z($LTlNYKgkWeAfe8R`gOcFDGL{`gy)-}Fpvv?Ps~;UQeflK6MwJMQOC$`j4B6ODu^ zJy_c0k10?q?Kd&I@6V}s=1-TzEKpz#&FC@Y>**>L^P&WQ8Q;k;8_UVb2?#)h=MYE$>S(*c z>_gYa0cjJ9nx39Mj*gsM*V1+48tA2_rjl`)&)!J?ke*(slopy)UeP3FWMm{OizV^) z!P`H7v_vvv7E-M;v=bqd-N8b~qBb;~2@l5!R&{pX{rD^wEAV4!>Bh^R#M+h7M2bY; z!- zoi(F-!I9R#S_Y(#;Z9RDY>muSLNd1U1YEXkwKwl3BUXoEJSth9ldmA_c|xjtv8Y3h zJm|u2y>cT^6*bcw!rdDRN@Wa6$3zAbi%&CIztj#jA`PT-WW0kpG!y=0sd`HwA)0p} z#Z0L)$dk?{)kDXrE$`*(ccU^o@xP-yHV3TSGBx_BJghIWX9Aze$;rvf_v|hrZ(#&U zWY@W_CDzu~K33j2IfwQ{k165ucp=7or&bMWyO8U@-**i>Q1DG)7rOZQiS#9mJ+z;0 zBD;IH*0=*Fj;^YzisznEgmmlf?(XVP;l#uQIc_Lmz(g|}YinE#NgbWk$5=IK&g8W# zzl+o=Dk@@QV~M3H1>GXe#ti*V%mB$lIniS~0))uJ!_&!h{*0Uc1yq1LU!BO_$$y9FxiRtIw_W=LSVru6z3m&7DIC>766_a|_DFn)R8~pwJ!z;pS*TgbYBDcZ z&xFzE;;9hK7Z7=)?N+HqD}9fqn|Wj=pN{2LKU-t_R#{Tuk2*C+qO2ouyU%afpRQZa|{2e zyY-lO-^L#+i*;m#wI_o`;^TUkiq}K$mjMJ39Ui$Gq&Pdo`|Rn-itVJrkG;LmsD9v+ z20)Z;hiYryMqKD4_g?Feh$?j{OJ3Y{(TJGIB(Zsd*r#p99N;~jQEI(tPUPa){z53( z8;z1>Xqk;}eRwOhoPVXktyOwEANAr{v4YnZUJt5I+h1|a6Ou_9nNe2U$F+&NOFrEg zB+}}wq{eDd$ZF-n&$6oxPPHe!ZxndzkEMGqEu{>Big>GJvTuj(`KSxpkaaajB({sH zni^4NKfU6EU`0Ni^Sg+NBOB~5)uNZ?lsl?V5&A}ltky@mm;q5X{`wxs>E|E6w>`Ds zW4St&c#n?uwfx2Lh(Ls51a3jG)QXG1$L~RS*4EbZJ06_^f^u;ywM;${!=C8X9s?@p z+34d>>5I&SAT2AlK4FbEs`R;un2;g6E5m((vEV8Y#Xf9ZZH0dulI%E|nC52=SFDxg zA5r9d0)m3H!H}Ey0F8U@Es;!o%JANNA|oRc{4O__)?oZ@Ks47{l$7cpbTrg30_`ch1Wd4V(0qnI^-i*w3z zn|gmS$#8z9-g@}c2nIcbGrUDfOUv}&pi|$6C(UVc9Ae3?(x?M77djz)cB6b6b5V{e z&8t*wNfQZ9S*oZPYK%-w^N#$bFXkqw2yg;eb8BqJ0!?TPC_We^A5DAr@78G=`VPP3 z7~obK`tt4NpIQgQt>ruWhN>?x(QbR59on70eKzs(f$$2=@lwF+H!SSzSL;>%_wL;r z7bec3M<9}EfcUsWNLXRfpK*G6${T2+fx$MYK>G0)XX9xS%ty?#5 zk_)(4{`X+;8_BYh$X0zo;6hXGFTagRic9xikIf$lBJ9B9T_pQxi8k%+FfUOeba5by z-13NRd9qa*Vr3)O)@n-Y>PV^E7j{t5&=#FNG1=#4XR|bldG19W)T1Nzncr8Se8DeX zo}Qi-qU!JJDy*y1dK525cRSI{RX`u`jVzt>t5-vo+`u)xq{sBoqj<&n_ZA*%ZoSd8paY6tI!JGH@mzR_P((0a`&M+bc3|v{sS*@e=OkGv=ogN!L z7S@0zx0{>WlP6CO4-Yw0a1FnF`J!GpbgOe?%s`%qJdn|)YNo-?vrSi|C|w7iTXMVzR$`MqlvZTPMx1OuGV3VmA!@-ygWSc zlF{G4>lz!$iT>_*R99D@{PzH~hTHc0_iqN`a5x{Nf(zXn&J@Ms;d%7*4fpijxTyuv~YX!*s(#bFa$WuPD~@LYHQb_2HIu^9m% zAMMh0!~h_Ec@GT@jSvpx7k3nMM_tp?A#OI4d$<@2E~-okIMjI>#lk{Dk*kmrfbgR5 zmy6@36f^M*!!wV}VVDe;CqR$sf{XR=;ltOjU*m*G6sgywq4pJuJ;-9{>F>wi3dRse zobvP_kv35H(8c#X_oc8ZSSA39XlR&Env6WMA#=hC{5-k3xmh-B?W9(qT^qOfP$19< zP#8{CiUvOgUK9NaWh@VH8A77=^GGcLsG+%$|507IlKWnS(=2 ztr@4bjMta-_4V!6f@VHFw!5{g0#BY$$1@J-HU^75u=Gx@mCtu}bo?6T;psWIY~9q{ z410?#^qc16=sOmqp*akugcpPoI;Dv@3;>o0Dr;-I=ur`;jMhU1N*~eoUi%-f!ddh_N@ ze0)5_6az7EaCh$9+1%U&?rw2$(L6~E>joy~HA@)v&&STLG$~2*<;w@+1UMmq1y=6v z?jaJ{0HAt$0s{lz%jZ8YYVXCW6CW8Ie3m*M3-Im6wL2=H6Rx1mW< z*0R7BU9+0339*=agGcGLHQiiRxUjmaZD4@M8Yc|d5UWsjsu^-7IwA#x+h4cBGg?A0Ie%inkhDSYWm#fBh3<%VHI%D1EGb#d6$jFuw@cD%C@<@{CwbI%gA5dL?B34Av4Z7 zo?l$R{y$?Pf<)rssj!gI+4(fl%qrE*)%D_R=E^`_{flu&cs#wz=4Aa|RlkTgWIOCY zM;8|%$Jwu~R~K9N{f>>`cRHnqK(PRQDhw=IBpDavA~&G;ea{Ye7P`E>ybgzD&=9yU z)uHbI{XvVR44MfX90`BDv?}+Ek0#nR)%gi~ySt8l(9)8yXa&@gg%U<|0KxKVxo^>t zAM&-W)8V^!1YuIQsi>j}au|AshpVe@enUfuHGTww96m+VWBY5}&6vb4h2jO0=R>t# zG!)UqZ2?;TA@@4{Z5eT5%r%{}`OwCYghKG5$xxaXGJ$wHAxvJ`C z=SBI~VgUrZ2M4}$A$KV#DV1WndwV;kw3U^W6%tv*v7Yer0}num{@86@Ytjo@$uj+; zyZakvJRRBn`|QLxCq6BWje^QOU0s-WJ)&TTyu4uy>-Xc+SaO5Az6V=UBhddqoI@TC zk=OzK02@0;Lzl90H^lzXqT`{4Y!VFIW7ZXRmJH zHxpTPM@H*OKcD_Rl=zPduIBMiCrd53g@KlFj3yw5g2<9YA%3F_$+W_xiz%T8j` z9SXv?W1Y}6R`xehx0}N}nu;LGdn3|j19l)%a&m-(g<-QgIRPs$-s(7VyfL{C;(sPHY}?ANpE*33pJrSZDD_1ztM)tG$i zyz!*~7sR2DNUO@dN=Mg$XyhkC39M29En||ceJ|n57x@f5wi$YAb~Cyr1MOjHlmp+q zCv-k;v=L0|b77M0OC9A&_U*KR!6Uq7n#3#AZpU$6KvZtl=3 zX=n!4+_L`lIOyEXr3ch%a96a4x><37sX&OX^|I1KzOKyJx~}kL zUhVcv=b#L$;;d0=4snWzrdVXGRyVyi zFgZ;lSzpys&9^;rs8=rBbHB6vp2wxa63C>|QM=^7x99paty97u1+6yTQNr-InEv}f zkk8xttm9rPU1Kwab;&0cECT71Cu4ivnK#vS`!z;CY2&?-+zHMRoyJaAwusE|z|ZXG z)6eLNT)lW)Dz)I8qg@p+hB7jx%iQ;!&uF^Ib8c>Kv`oLz>)=yq$AFtaEJL_4>C#ef z8Y3ek>`wTtPU%Saoe4qa2M<6b#q{WjTVJ<@LIf%wR>=2TwZ`>l=)f?Z|f-pG$#9#|Jfs?b0}nr$1_! zWd4(veAwwrc}chG#GE?fj>SFQx^U$}@4vA!RNm{4O{laIA@|zabf^q@+j#nqZ^l`l z>2a}!uHuEhoV4C)CsLU;++b8Ico2Lore-j2>1JQN-mlAkW0VXX+I@b17=zE&}@RxGbi= z1O)FLRUz*+IzItwDYRbq(-S|HSdC9k#Lhi;{25*+GU6&rDC-n09Sci)rC?}&X>^um zF-qaP#HZfZ`pBOzRW;{vlH{I>Lmn&kz_xUI*m(bZ?#+EpF0RtIJT$RbG6ElNEhsS0 z5;^rwamS7vrhVseXG^ka2t2LRF(d%B>wjJVr-jIfys@{@czN`4x7klSIerqjXEYU9 z6jmXTNZke(M!7iLw&2iEb2GCi0sMZ<5t(!6rMpjNhmHZR%K8x9)^8zOwJPD z4fqh`RC`&v+Yp$*?d6hT2#0qqm?iP}oy7m^=$^i>UsPR!Tyqid=R0#H(m%>*&D1xW zg)U#5Rs;0sy*IMwA3Q@`8zSm^z6&(VcO@qv-3-ch;+-$@n!3Lk`!jBx{Vd?sAs6iX zQ+!z-;ac};+AU4mW{~UQ!+eznwuT#J_3YY}QgpW|1zc9T=`;8D_9U^)%+1}9N}Es9 zk+;MNDDam35?Uq{d{AG%TpKC%X#DtbWv^dUML}Vr&WToO2;^(n@UL84e0D&=@ZPAd ztgHn5!KPQY9fU8!5Bwa6J=(guy4u=|*DaKXQyz$53!RXA+KqQWc5LKp%SzhuX!^-C zbydb!;%U1w%|&+d$knsiU~9{~)qLrlO25VY%2}-Hgp7+8Fc-o)8~@;r7wv5Gdv$hp-aEo=S_vcA z-P`j9{tkrikHglULK&qzBj0jAWbw^rU?!S1{y_^cVX$7cO8MYylVedES;Ffg6GYM? zjg4&5q-3$G9jh>uQt@$c@_z;t$(WcI@*;7N2&l{N{sF+4(RjQk+K&eC=v+SuiaRXq zHC4f3Z9Bv5t<(9ByZ31rWQ#>WNaY&8I$vjQe0o8q%FZS!G3279l^i9bQnG-y>a?%d zG(w&i2 zoHdslu3o0kTKyIqo&6a{ReJW~cX9r?H1BC^C3f^$!by7oDP3O9UOBg7RS@-hzjK9* zkkF`MrZbm5Q09p^JNPKJcXs5IlrBz|GhuadtNgNY|AHMPu-;}W@ldl1wd%n2{&PM(Cg3gCC&k8@3O zyt(E=+l+q1T#Xq|?k(yy9|fX6=sn}M`?jUbKkM$hSNyTv=T7Ht$F=Xj6DGxb%0N&0 zGO|AsB!7@X;V*Yj&imXx{W~~C@a?E^eJAR(SWM(2YfCgJ$KU;I$ir$oGj?{8(9;*n zg;b_%@&)WqnN%k7q=&jygHP-5jd5k8aDskmb8@jj^5oasbmjDDAIPVYJ*(MNV#T)x|>=#c%W?e@iyo z1oKtfM{*`NQTvWA#Klw?5q(Z>?sb+Y;?&rGFt>8DANe-;)zK^FRK;AnBxEnEgFJuc z0sPR{0I{=W?nu-f~BBgM<~HbL>kfT;ZqBXiC~S zG2z`ANmkH*&&<@+XRnu!mw&p7M_o)z45*aGF&>c~xuJb5c0cpsa@rXIDq%ry4pQ04 z$;s&ZqRP+SyAoN#%8Buv6`BV>{DAyfYDq0}c~7$6OhlOW99gC9nJ~4dMczxivZ&<;QyY<%wBk6c;Hjz|QuCzfQO>m=Z3Sy=a)J7V{YX8A$8K zIREl0ZP6BYuG5PZ7C-c*ruY*W8Hr0FP|D1GqhYKxl`6xLm$tTYSVNgDDNQ$9k>CBQ zh>XxMqxW4Bw+kz>71+jHkQ;zHQ5phyw@j5`y_!&g^{I-<%6fKz%7UfRWRI=w`kMUn zWXKLwF?>G9j;5dnR%^WC%Ynucz!mu(hR*j}x#ogtHpt3odmAR?Amf^+8y-c=k1RX$ zTLOZ7SMTcmo%YGcLsU>sg_%jda`+lZ8~plwb$-E{LvJ3L&F`fnvi2)yL?BkjXH0W2 z-fXP(V#DEMnuLW-KdKR>Hc?T~QBC!Wv<$!YRVtz%3p-G>y}Z2S<-?!mR^(_m^}Fic z$(ogrSZt1<)wsyy_V^+;ABYt=Vs$T~YV%Q(zRQW~1d2Kf$<}0jRb}OEYAFIKXc2%) z1@?p`=KOEG%1D{MO@jphZ&-_4*x2CY0Ot&spa?H-B62ZAwRhd5 z1-!Ggf+AO*HtDjgE$fyejh!z!NOLVrOct(NV0#Ddl@OlTaaGmAz3^=Tr+FzaFM$lZ zX_xT^zr2a1P@E6-PcNsU719=f@Oz`6s;4s%NpM@hTl%UsXd|O+&~h3eXfS~|0klYI z0EtEo{ik+{ub?X%F{k+RrA42ph;Qye2(2c#(4fiz!g2rmBR0dvVFNx@hl@wchkDN^ z_`SPsarL6B@WfWgRv9xkh3Nt>ot~dNuLO(a8eypn79l#>!6{0{pQ44;&n#&vDNj6i zP2=c9q4~RO=tD@o|ARILmyGk;t@Kjc4crm1-62j5eRiasoF0qVK%9FuU+%9CfqDhp zExP!7kooE89$fSJ*V=cHdTM<9tJ$xwH&2_g-1p~FWt-WJQrxY>PF)g8>g!LMkER!{ z`}o%7r+fR~Q)Nqg*{fH(@R?mYf$EXO3T{6fA=UTzz}5~tg5JpOJxWttj2DF zH_@!tZo&;bC&}9N_pR)vGTgMA?Nu@-T=Nfyu`NgMc&a2Jy^NId+%SI~h4F@V>1L-D zx>N1^7X8FkK4Ly2f%W7~B;~y;drTD&;Q6px-$a|2mKKfreR{gEMIS|# zI~33N@{7#nO>r3+e#?E7Ld(-ZnE2aO=fJ5;WD}rT`VS@;Rw<`X6e;Irn3L3d$xa*< zPVvMx2nseAuK9=dde1ddt38qp=d-_PiC+WZS#n|KVNo>iBv8d50&L;SrR;a5*w;RO z8BnllH&Juz9=GM6&qis`C)U`JPg@CP8R83J2!AX1E?)RZeUqBkRq|9r#$Nge|DNOa zEqZ=p3E98D@>}z9+di{ISfpa%;4x({bvX+6v&bca!#z`9z1 zoBLW2$q;b;2R;u<)zG8YVgvIS2!sp8W4m$gecfi# z@-^d0j-ZI(PCIXJ@8EaKoe_y)RECR07oDS(+YHFso*Jd6Sl}3Y=EXKG!3#OOZ*e%zitRIY43F@7~ym<&jw4?4@NSPd5gVqCp`BUG;4SLMyP+ zBIaNG#KbVK>eHz(M)KsP&6F7w;0DDt6|y*5=@GLkH#>pv`@fpa{y&Xp9}5Z|neNiv zK3!0gds^U48oF9IBoow<^Sns$zL-R-u#0I;*SaCXmg2n_kVo>wetA>j7ITe{iHs{e zpq$G@Q-BGEWtxbB;=GgGVST*1xEfnr?CP2v?f)M1x=TYo3;6$M&stSPIKhGgQlrRE zAUMAi%VdPq20qPw1}hMJJB$Z1!5LmHv-E)qzL=aD|Ha88lwZoFSq+VswzqEIPE>4=;v?JSW(CU3;Pw@iyuQl6zBJ4(| zg-%FsexU9xP=Vcdd|W5g2tcy0uP@`Lkhfd^$!UNKR2e898>34<1CkW91pYYKTet5g zrlpaSk%bXBJ3H%Cnqo`H(!}zE{tod74oH|?>oT9~OUFMp5AJhHHG8P;|h;u$2_`kWlpyAni1 zL|u_&d#7vV<+`p4hKqW|DSUQwnsquf4GU8G3ucf4;G{jzC~!p^%I919=T)sHN`) z;^l*?QeI#hSKwm&zipxa-~8W}CLp|H9;(M57Zpdx?f=MnAm4DYuvT(?P!M9ew)@Ze zKK%E%{vgDcj;Cmrva+&ZH3vveC;`@LIJs!WM}nTae<7T%n`&T}=jt(jI)AktS>R!T}(Vr7A;b_Fe`<&t7n} zZ^qEj-KF36cWT5QI668$kV~`FMFShZ*%)uZ=DBbH=z0;(EXaGKSN{?eYtYzl8S3ko z4y^)3O(@Yh-aPhP7K{2nTMMBCh@l`CK)nO>*#vgkBQ{fff#QOKPUmTrp(4?pR!}9T zTsr^-ZpwesFBsdj1vT$kbZaKwV3q#~Pr&>iZnGX@e}Df_BWO~=6s~G$*uUs}#_RvX z^cn4fybMMVO7$1w+akEFHXNjIDT&C)o_OuANWQ^4cs4D*tPW}erKGOzFL}D#DJ|>0 zD`D)TIyKtOPg9aUl`?UEyhkAMKAm=EScQT=ZWc#Jh$_e8N`gArGS$u5*=s4q!b6D4 zpo^cM{}0F{K;Q!VohbS&wZQw;_w9iJ)q1_n^>xK(*ET|@Rygg%bmb&=LqB*O_dO-x zQlx0+9QnaZpGe+(C@xm}wUn;_D(r9iCss){G_8*pXzuU^i+V*5NQV*_TYoyh=PYKY zmk(9EU59z-iXpD~47Y9N;1n!DH_=gtyx21PGokAQS)vz}p=@=}qiM7Ejo5=f4X(`7 zY)njGH^7;GBo~KV1a5C+YU%>0tb!-pY%qlI8)`6UE;v=g|CpJXwG*5DE(9bj!KZ9o zaSX*Yk$mb*Tr7fw-QdIf_jyCBr%Y(K!~Tw0WP(a^-)H;vn64UlXgQ2fB^aQ`{t-)y zmHqDBI~ec?eDD?=0FWg0N;Pzyq?M!%Ob&Slb3CGd_SVHO%ysN+=g@}oW$;Tp2;9Px z+&|@$e?R~yMAD^$caQsRfz^5CWG#0%#GmX{&k^FL7yUX-NDaNtjNp9`_4f z?$v_{@9WpEU|!=F6m%B~Sq5JSz}(J(JM8E71mr;!z6N{^8JLhYH!?v=I93C4i$30GLXJ&%!e6PQ;uFi!c77#pWVmcTDZ{NQC@`XF)XvMetToxRY~!QuY>`&YBy&?p?mHB0pBf7K~HBYv1555pcX z1!U_s31ccxD56)Xz=k&+_GJ$mPS=yf;i=y9)din~& z7X}V0gTiJBtiYK4?GN7cpFjLAE2=}n0DL`WWh|_iZXQ=ujp;q{UEq{>%jHKyNNjy< zOHalXf_vW{{&O(tWiK0isQ%mrMg0K*u>5FzM%|0fRZbqbZ4d-MuC2&mUOqV(HC%AR z0AUV*%N$Liwl!!&EM9nIoTfkCQD2XJ+_{5?hX?%|Ij1RPYAmH8+_n$G-Ws`sKhyZi z!Ii;4+|bwvk{&z@3J8#pkQC(SSMMas(xvj+-MbdeAfFD5q$8PMmbv5LPf{&uO#Ac+S@A-8uJrI^j!-nLcMxYv-)r*2)W%I;3(+N(Q?EL|a zrhD6Kq7Q$%2-^nkDnA#`ev34Fb^mjQ$T4Yjv0$mSC2I)C!QjFLhTf6?1GQvmax+k5 zL4kpPo$Iw&3aoUpRY_+!H|V!_cfIDr==mw4)6=IyZr)W?o83DBqFdN;HbA*BjmL(> zo(z6cS0n)DSQZu*9-agv(rTRx$b7i>MSNgwX8g`dRT=auP33fSbjTgXCMGOQP5&-0 zLpB0B1n)B1shYh7Lx3ZBSIk{(ZK#@Q@@TYN`o*r65#FU;$eTeebN4;~fLK+kh%n@}Gk^ zKOaO%NlAY1p5x@t{{ExgT?s5k;&3RwoVc{HvVb~wVuy!8&WL)drmGu4019O%bx>*Z z@Si`Vgx3%8A?}zi=sD5KR}ozUhBuR;qlFiEtkY8|s5~Q1Lh?jp(8+OzV|<^^X6M8` ztpIE}he=Taa}+={Xy2itCMG8SoxpN(eEdE!(cILu#IW@e#*zfmAqIAUH(+IECJK{+ ztpXxEz*}=kL4mio1qWZ;`LNQxpRz@WbnaC7euj-nPs0$&$}Wxdm}mICesQaOR8$mr zGN6ir2Y`adnwW~}U23Y=(WZVJ-Cb54bv3mhKX3J?ObH`!v?C*%Gp+ZjO~)QiOit?Q z>8&p=n&D;@gxKg=-ihddivx!H5le2Mba)S`$nmRK6siwLz!*nPPVRrYN+0+@@yr!7 z`Qq$I&8wMebVy%M5Bl{3FW>*D6sw9|iMYJ%ytm}MCG5^OPr>5otw&smnuP3FPSyB( z1N9qpYXZrE>P-5re!@veS4YP`&>#R}jEjrAJgjYP-tA!q_z7c8o(oZY1QIYMQCC?H ztw&*oS9seTC=|Xx;%ofXo?R;Ue#e;19fX6!vzeFSg+eVAdh20ZC{6G_KBs4;hBq-P zDyrjeW=2NOe?^6gm>{sdg6Wnn5Q?DY38ZfxZcIR3yV&WvpCJEGoZwiSwCXoQ`0oCG zlkZuLlZuH+ZhSlu9Cwf-%sGOMUbzrlNnkLWnQ3Llrv>E@OjfkPU@p)~qpNaIw}_= zA+sMJYJ`ZdS6AaKWNQ}83GxW5BsP5*`ifiCg{hFvXWb)&C%0@`oZm&0PV2}B>8qLj z=yCk;xulx--`W~D_aiflTfC3T#t6m%VK+4i330Lr3+tDU_NVdTUyWFo!l#>u^0Hvwy5Cr=k)Q z9gT;N4^}B=7MAv%sfIjTiV84kSL@hukb+71)hjN!xZ|#uVMe4vcwlPV@enE&d_gGj zyy!PL^2kLoKAVZc+?c+;KJ=aRSg4{zV>%!0?CtG$;jyPQ-E**H-X*W} zC+XEUCX-zK*Rw<w4&eP^@okbXO1npy|t#@|JK$_$y&{rvp*ZcJ|}|b>8nA z)a}2AR~E@a&yFZU760vZz5Abi=Zj7$sN4RRJ~QS8nKbhY3vi}Tf9>l)U$ydO8?1^6%~Z$U)E$8j-R0nMB(0A zEZFmQLofZvYY&%Xgen!)R~jsFhZ>^Eu~Y$m?fkVu$6Q&CRlX5X8J$bYlc!eaNpKC` z-VMNulh5}EcvErf_i-hUZ@L*`9P_~a+Hm!si6Tx3eE^IHA#dI+Y=A?l_3HAhvlhIf zzvfN*pMnN)ZCTwZJw85$?&D&{!2nph-~kwoC6Itt81l_V4X_BuYL#Kid6AKkFy$ts z@1~_i8q?LL*1<0z0C>ANlU@=l9W;Ef@f3K&yxhR>IvB7demq5G$S9)#3N<(;rXNO3 z!GGg-aeDCi^XIdpttu8lpX2SiI{s1(g`s^QSYK#r*1|wBY{qyx&$SWO4{tLwXJ8Zt zfYKmf+sx%9TOrH zZazNENAY80W8J2ezkWRj>6(%<3hlao&FnV>UkjK@pjJ`&?hk?i3VbUT;DF)bIRLc> zRt1ate}hIB%c56Sf?la}dneFfO*FU`>y$=EM(TaB90Y(44qw2Kkp$skVHH(XHYQl4 zzytL?LS<;U^uxU0h)YTOPOa<=7dWNV9Wjj4qHCfnS5B+zdZO&aP>i4m+k3F_u7O978O1?)f z#t0vh8gzp(brRs)w{IvHyeLUy^*}Cpn2$yGe~y4jY=BGiGT3mjKyZPnU@+W(ffSNI z4h|0ZS&w~0M7}zsA-3r)m{Gpe)Fige2|>Yc{n^ulgN@z&^QRP+RrNR&AD{;8Vb8#{ z2!}F>%_C}Rqn`JSFu_9E`cb)X0)PitX$_%~{`2P#EGM|uK*ntaD+V;?+@!c1^)l~Tp6~y z4mvXQ`p|t5N}!8dR*yThoIGM?MmGHR@$s=t?W__o0_z3nY8nj0ox;$7jI6ro|E$S} z@PiTMVs)l#E+4imOb&Kh9U%~CkxJ;Q!S9~tdIBZ3T>}AeJ&+7DbfHEis*Ut3|1U3q ztThOV`1eJ)Z^0gbDSPM=+$En*n0$Eso~|3L4aylp^-e5A)3bIavK+dK6}=!50YztXMj^XX?+N#~GBzgoQn0Aco2P z{?O5T3!miSP=TR1+TMl{Z0OG5Ti6hLOTA#ThAgPYlmOmCa0mj=)abqir)`d80BBN^ zEYijSBV%B)m%xG%axk;YmyZr{UjSqDN&OS(7{U;~G_f9DUIw6b!Nd!ggh4+5%ns6U z`jb~nYvrwdi_X0we)yBNh=2pz#{to#k0o*kxw*O9J3FBg!bo8ZUAOzRCq0aGF))0r ztlWiGnOIsWLx_y}+L{Ov_IGJXfS4RHTVTo0G%y#RA}OH-52)#U|N zHoQq!e}7DYm5NF~q{%_<3-D`z!)6b>O>j`q0ky(p&={wDrr)Vm#OmJ3Ir#1LTKy=g z>28O)ZB76JYGd4jGPTxzMZC1F7uxpXUFCp z)(Ekg$^--x0oL|&``cl84GpKcF(SsKkLc*6Xkup{E-;2WySjp}kjtV^50+cLeLf{%82D&;r?8Yjk%SW#P3qYcceO+6$jyVTNoR^oEjcyy@VX7WAXmd#6 zZM|0u?tCY*hiZT#pSW*QuyH}t@XkydW-?5>qkX8Eps*rb5Qd?BL%(tlX%1j>?meoLpx$RItCl|3ByLc@*C? z1Qj4->*@wz9*PPxa;#EwKJG2|1K-`=-VR)MB&A?t*8+4A)WLt9Phbp_Py)DF1sOJX zFvx&9w*zYh+31>agkN6M+9P~v42mCs9PiW&70BDQMj6KLPCTO|Cqp) zUtgneCD$Ut##OJ;LcMrpOll5%OWn(~DGY%9}{n77=5glg;L(>sn-WT5Q|{JK}{=BxPo}nZgeYM^ox}xCMETNZ3Gr)Sk~)1 zWmK{GBEN)_@6rvCcu%ru0FS4l}U39yq0vayvdWd3Mt z%k6m8n<#;b*f*@XE#~c|lm~3j?9+az1;B&}H8w8RgOAX;UWkUq7;KSH)yyACEm%vN zPXHpH!nYKfZV1&!1l+2uH&TRU`~|*Tz#R+A%WnuOh4?@#r|Sl!cVEDTfrI1o|G^Q| z#P{x{W@b7l%H$5tJ~@FVeh2-G1({mr{=2gt5%BsOs9XhJE=qL3BEc9E+!A03)Zr?j zT1f7YPlqkJVLY;NtEFut0ro!(w|S)Vn+O1t<^|SjILusC2FDnGaq(|JQMJIeY;trW zUq(e>V8D*u=l^fp@@j@)A_6W^QDI;}3;X}C=WdNhL}XxM19A~N5O@|Cv$wu$WRuG~MF)`HvKvMddhu8HU4}1V{ zmE_JH*SYUEkhGBRQsU!H?untHqQVlSKq9_-=gu?YYUuEQ0JR7Hv^m)q@ip^3zy~Uz zGIqAng;aX<{=zBySv9WCeRRN_OiyDYC z2>;oXXtMn)K#4dB*3g_(XZ2{$=41^4Qaj)VCiIBo}ULV)QX%(luyE)d6{h5*Ol zFav?nzW|JF$p-Cqp4&?8li?lY(vrDIro+LD5Q*$ng=PZ>4pJa{>%lPf-+dh%PuZLI ze_gvO0AJn14TYG8X4nPpJp>!g!P=TLL(u>794aR1(Px(8p;c)A;AT8<-weNyImDJOFnogHq# zG#C)Nj?T{Cksjw(JiNT&QaQZlWkIX!Vxne54^@K)fW0cytG^8oDyqvq!7aGMQ63PK zG_g%^&mt^pyf-Esq&h;eX4=R*JUY5DSk%M>1Q70MO=VBr+-h1|{o$&6de%QLlEO+U z4GSLXVjckYQy9#$SlJ*J14smwJw-8eSs4!t3o}?p^Zudz_|k ztqAX2I9=qBkQM(ix1pKqL=Sub#=I-}{9g|h5^blOwDX_$!qEKPHnLD^7%8g;9v6U> z`z|j`5Rs*kb^_UTxs>K`@9yt?xTyvVO|i4PySTWxyUSonOG&})LUI#F1i?S>yF|N6 zLQ?Yo(b<Ec`CCenKBiTGDKt!l__(|R7oX- zM50v4Jd_IOv%l}}cg|VstaJW2)_T`^X{TpD`+n~0x<1p*qQ82axYgrXtp0`-K^B zz;xN@BbrtQ-LNn(%}XDq%WiIN-dO+Hgxl-q>)Y!riy;Lo2_kVf55>FCgP{`#JJ>C2 zZDA2KJt)`kWQXG;604flgZ?X#k?R{PuMdgG*njw3aE>GL!Y)mfFlGloddXvsxk-Nk#Wym=MfXx7!zl%KGob&j zQ<&8CgpV<%zN0MEs=@q1S^Gj}066BWP-rkymoc*&QmZo4tNx$gBzUH4!R)p%Bf>pl zmr_;Eocr^w2W7{_#%p|Z-Re)p4}qzQ$O_@73pJP?>RwDU9 zN8*zKX@(~rZVfS;hW+yLk03t+B|Wg6LbMTpse1;!SP>Du%j!a z7~7edz44Lx2S5}b%dyYZ-JLoF-T+igl3`*_&S$t}s=Y_m9?|UvqKqmJvFT|OxLwH{ zI(8=NO`8C}545*O-l9-V;0-sW6CyCrlvq}6Ap}@oVAO@EZU}qT ztpioraQj!U z$lV%17E}yE()W88l=`yr^4PL5@8jdWL~dOrYy`9yAm6Z$`?b1?ec=x*aTh?i0{uY6 zFfmZ{``)$tkth0B+{U+4%j_C?1qEn}85{EGLVa2L__L%_-O+kdt73%ARJvf9XMBc= zhNtcIYtO+-7g4hks5u@qu2@dJv=eYgCJ%-eOqgzcB@`i0jbRM#?ChkVqJ22InZRWt z27+K_Wrbbq6!AE*t-{>fPT)~I9|nnKy#t`8W+h53V!>&JQWu< zF*+)R^cr%%hA$dGO%={guT<0N)tw34~T0 z02@#6X)mCTbxz=gSj={IL+N$Q7<2>$3sx7uQH5v;*iFUpMJ3zsqwMG&8ah8e*7p6o z^Zk&t1Xd$JGxb>yJ=Tax{jZ+s90sZ3zB8 z>hN6*YnwK0l35EdS+>>a>gibok(s*xj2*%>&fVCQ)24a!2y<&%S)Ge6wJK(h<7Q^k zMmId%-3dNEK9v-cJY#TxkoOZn-m0?15ZZ=j=zeA85|(kb*T~BJe71|F*w>9hXdyI? z9Kp@#ku4=#H~ldE?a2Zifz^pGf_cXd5GI`mt8;U|Jh@B-k2D)eSJL$Ay?Y|~OA%@m z1lAoCQyeGgL;0-Zs{+Z34LuYA0w^eG6E{t4e2Cm zZXTYW(;t391%{`_n7!?o)pPQsSKYMs>U#qv;q2ACIljJK)IXJHJb_jcLao}j3AmxLU=*dVDaomLHC!n=3* zYfpemnZaxZS{F|5pmXQ$V1UJ#4EsD#9RY2qeIRGSGX{ZHU|=9NIWTHXD9?X9dGNVk zN#5d&7{G_NEx^20jCuio1LZ`XUy!i!+7cT92u5M#{36DY&zeKNK0bdyK_LUi!^4AY z^C0)zAOo=xH-P?uJSv2O>dV;!3fzf}jfy^Z@jkIj07EG1nIL;jLDn_raO$8{TwGc@ zQR6*|H;dlP55kV-W;G|LZ=hwxAu~87`FwlL4@>+ZE{e zAOOyuS~bvW2qkymN37IOx5}QzwPP2Hh*%q!xD*`~1%ybFn<1Ky&WxI?jKh;k1yV#H z)0`ROB>(Qy{B(91=50a^3DB6ShS5r8kENKaH?tGqra+96b)~lt_|z0YD=}0bz(i!i zRYU50i<}(o#D>86G0+U#n3?(FB3o}4{zuk#^(uTtXfqW|8_||wx*@#%{LsIXekK$Y z2(slz52Ggs);9gTzQ}}{`tSl~ExMfD z44jhCj#+xZQW3LF6f5qeg@udAYXA}L=xm`T!P3UGSaStnI&d#U1GIQzf@+7}5qnj} z+0P}Mp6U*Rd8sNpJzT5kx-Sm)a_gOLPtiJ$dxii1k!6f)vhu3O$W^##Bw>CldpK700Syht@ zd)tjP7&QLb_K1aBu~qMzoW=@s(lyEsGUh5HQ&I2swuM>#uuO1AhqGInM5r%c33r<7 zxd!#2KA~gAp?roa*H@a|Vwszl*#19PuhZqT<;D= zlK)&Qy_b!*-%x(123r~JfdBL9d-Qp1)ZF+ZUqY>vJ)i!_mB6(No>Us98vktmcvLq> zU45uiO>x{+Xp2y{jb%<|{>Q*ACf*L4Cj{ai54n#=YbqM!(+p8C2_7O3hxTzXo=MNf9B;ZbGqhX>A%f^fsD71I)I3|nEZo{nr zTFCT{=)PGnNf@}MOKke>eZ``FO1#;uk-)pf>u-nNJJc~R&Nb7QZtt=QGgPO@Fw8yvB>mioJ*it6wHY)kdM~OF2y17tWT6xX(0NJ{ zx;QLrY#j*yKpjC`!F4T+wc}`!AiYXS;dGpMgU1$HJPhKPo1q3rLa&lk};4wX#*Nvyd z`1=RI7aijZz`LNq*}P{z17>S*L7*sU!)yW5LvZEvdaTWDtesTm*!e3ZKXRBQSFm}i zjq2IHs&1NOiY(}*(3M4Ow@E?oi%m^OM@J~;tgr81sH?~~Dl3Z*urIW%U?8ySF_W%B z9k;x)BG0)MQy+>0=qbA&puy2vJix~Q6p0tYq0^_wGZoLF=wb_UwYO*6wW|ql7@Q@L zTHx6fA-bfOZ{Cam>_EyUJ~kv4qX?Y=Wdp_%Py-NOFm3=VP|wZGzo98Z_l%1r<@X%f zb%88l!O2PHw3Ot0nmm3LG~+)=D#yMT;W*~+?H!05*^QrH7A*)5kY>~f*w(}zmsy8-=SF?Ic-E-+ z-TteaC$e!{Nxy1^OWLTAbtBPq9({gKfk6}GF}gbc$#UVpF2u(Ns`YPQsgM%*;l#rd-kKf z#pU9QN-&>|O2USXlfIjG?~bUr&CwiVaz)n*%a|&MWEVINqr8rS*m@ zSDs<_p-=&d0|Eg6z)R>#BqWe}E309IZ>=7mU&ZK#876&;$A5R9V$arMQ1j3&3tYHJv|2h(d)c1X2p2A*DG4YqsC6};}z=uEcvczbj@&& zR(#~e^UgN{dY{K0Djvs%JZw$Vx?fX+O^$tn#yh=C@-S;~R@8V(fLeWxG+noLS9_#x zs^&G66-l8Ct=X1i#WUL0fvsDl-Z_0+`M!@w=5T`N73LcmAm9EZ`D6U4i9bmGH zko>J>qGF`dqhf;pjATz|2~WoRI;Km9JSEs^n%2|TCPyfTGA}i@h{W@GGg%LmnmZ^dF)?`{T+?E8_BP@LuN8MXHM(c)2&>JlMPe_PVsz=RM{zybX26Y6zq09 z+C6{ijth}>nIm_P7E{=tvhumY>qdblmuugv7Bcqte3=iU`coUD)!p_{v-BOslNX|U zoqS%3$#Z_0mLC`1=*?-OXIlR0EzMcjk@nB%?we7B#@VG$X;&Bsl^)-(@48E-#x;FB zQ)MpBE5FXwbEw5uljSUCR?7_Ly4qip!3Pqn}yjdB5z;-nF(k@XPp_ z?H-m!_5zGZ zKR0gtd{`FrY>MsM^gk{ZJdwqh6sI9Q> zsJt#JUH(rIdnC)c&1u)tAPSlgo`BvD`VLefS8}H7jvv{$eKNZsZmW>Q`m%|Ai=M%r(x?Vb<) zDgV^AE3);vOKP>|0`z)96>Xop1{0i#qVRdE5OJ*HV2E)@0LuN)_`<@xlhHQw;1HJrfrF?<|M= zHsI)wFcr}Lu_64R!ppvYE?z_8)yE+X?!l3kqC3TFg3lz3*cpg`-odJq8z-zrIN4Y} zjXF$C&D3lZH2l_7oPDv7rfRA;ENZk{y2!lrUB+XxwEZN!M-qSZ%WBiq<45Ce9;Nz{ zeEi52rc)-jr`>J-JlHLuI-t|$boQvkm*WFkhhMx68{XmJlD7W)R;ZP!Typ^vF-mpF zj-PU9hey1%$nV-LwNbk}`$;$Ms`B&0G|Dk`d*%Jt-ilB3g-}ljOREKVxJznZelz6S=j(@yE$UyL5O#V)fy!7}J0)*Md z@&RQ7r@m{kjbdv<%L|i(yA1-{uLUk7%}NXx8h#rxgIGr*^SA5+x*0#3)tb^&uDztq z+t0oKDARB0^VD{2c>J~huD^ej`B`dL#*L2an98#n{VhybzH;%c+{B*0jboV%gFy@B z$8^6Anb|bipLlRi=#E67)PZHgm#c$U8Y<6rA$+)G4W-_-@hufVb~^St3yn#^Pnn;B4ojJ zAK}cCHq$S6$laZmD82vUwdPi$`kpHhLMv3<_q*ocvHtb4^76^2EdwkR{E3y(lE^Xkd7;TLAylT(a@UE+;{ zAI{JmBe9gGt{%A^8XEIQ`NZQ_ymiNmdA7@+(sjPncsWtK&5mVacKqyu&Sv69A@7d^ zKKoQg`Eh5w*YxFx!m5ArCm3(kpJ+?BWmZFFzWU186?mkDAeK8BB%=U0Zp8S}xSVhtU=Q_B5KN<6)GENZTPdi}G< zHU-ru51o5LN+exW8<1ing?&^pb@StC`g8S`p4WHNk#C*aYXP^CP0uuWe)SP&WqJSi zJ1^s$_q}&=lteoJ`Et>d!R^Onx_Am_#W}vFa7Ma1%Qu{x3w^6DlgX8Pe$IRLy&JXa zkH8E3MAwo%LrZUu!ruuc9yblyfC>ruf=rE#r>u;thrfS!$*;4l@N5Tl{`(bH7Uua4B1RLV}! zg>ni7aC|K(DO&t_)>c2L{EjGb?A;;XvI?V`0V?uFQ+Vpmqv425mF}DOzt>g_8M%2C z^9o$0zop$uNMc#P+8$Xy@UkW-+@Q}{=9Ijr={w0(?NBYwovxxD)052NEAl^id#RpO zgf+NiwegqFdFuTln25QvWhdEX?j{>FX%%gr_uCY5@jhd4O!p!8Q{8_YeJ0OTdPrRt zUsREKZGA^hUQD{nN`w^u*QVF*;;WYSenCc`fP`^&(U-+;VGSGR>)*`%=kKPlR4wZX zJ{V%0os~LMc=BRIaC=3Vr5OJ}zsK~;975$VYtuF}YjGnMYNKMe;KhhhCZ4onS*8fT z8na-pFG*`3Q|H}ZwM5$GMe;?F8h1}Gy+esjU!o<(Vt4s`ICrunu2pYi^0yiyUJiXu z&0?6(NneT_{GEU4_N#qHigfd5oMhvC#0k@gY|%=PunDv)S2fVY+%amd9y!2@rDUGDmd*Ol%0FPAYZ=o#4uZVES_J{Oz%u z75vj*-b8#8&Js=|i&b`&wjVa4O_Mfc*Wg(~w@9m~!8LhN*-lZ9Ms@J;M3cg~z`z87 z7l+mDHcp4_srq!eywPvw$KJn^n{jA}?uBRvS6ZR^;fZBe`YRF~{sG&7WyV(ixNKSK z36jTGf+W~o;geER7hRmR8(ENlrLIOFGyB_D?VW z;3cm|zfRD)-OJ3z>2nMstOV$O-i<5)AdF*cabSKXN(kWKy(Z#_AuJt1_`md^KS) zKa%qHyRG@X3bv-RV@TmSEWP)VtX`793pR^U@#ESQid{D0b1$}hpwciB%F%jIV0!25 z7C{-VW$AFmgSG;va|>Fgn|anozkF3?&@0wDG0}J!3oP(@2|XNBPadXC!>( zhM*E;08nzKZZ{-;A!ZheSc*?%bEv-lMV#GJpX(o0Wq)4vv@K?Hp~}6!ha$Z65B>>G zcpdr8{j3#@jCU{BHK$nJJ4)8#w|KVFf+WZ@4uVuRln>()L>-V=9TS16Vvn}8lvEkU zPX6nK7@*4`y-Em})hQkh|hqHP6Qd%pIu5kNaIDg6t2&0REsK zO79D+Kp2d~{|-{y|ByTc6akzs^jbKn zLVJE`DK0)N@S4;exhEa(QH!)4Xg5ANL?gp!5; zONiXmS#WodBfl@RKjP!#^M8^{S2@Vj{`PHawZSD_j+hCJjl01s!j1qq5fBy>z`4dn zD>K7K6Td)w4XX+91OqK}DYANALpAgN>X#7<_u-)})-PEj9urds{h>+gzobPVUJ4LL zN*MN>e~4^K%>sMMgk=sGfCXPK$xH5oxV&Y;&wJ3iy@-FDqG^dfb7 zb8tpO!B8918hPAZhIkqTCiEmgl%3Q1rj{XHB@7Xc1k??9Zhk&VK|x_g&=~L}))MfR zO~iGr-2TN|rkS9ck!r2VERyUBKHj~qD?dKo0D^35>z#yTGlT-iKxDz(Ng%NMZ0!yL z!eofb2~c2wIS_*u8cO+99taybnW7{F21x0|44?+c9UyRQ5E<@HIih03?z6kI&Hr_& zhHkgovDapg-^@aA zSXdE-3GUFCy$WKa`xX#wTU$4R88aE587K=2+SJifsP|Fgip_(Knfh5^9RoK2lu1px z$SO09&YJv-jKdgG`#vq*DrzRemsZn`pKEzfO-^xxhe<-bKx3zH0!paJ- z`n}O;;PEA0fLdKb}+mP{eK5gmU zyPx*;nK>5}7CLujsm68T@!h_CgUbfkrZkdoiRPA;)Ei(i+Ovv_r?86lH6&{t>1wv) zAF>{s=ga8K?rRwOk{Pu`NoVin9_RQS+Jz9va2tP57DIz6s|QYQcDVkQHYhZpaG|7 zW;t*mmFwEZSgeGl1y}Yf#5pOMm0Nk0lNl*Ap!!*mn7SANG{UVAHcpOTen?%2E>Tqg98?! z(nCv-J)sbl*MsIH$F`+<09yrER&jC$VUY{3w_#gqE?I3|fb{3SP2)d@4v}TCl4+`R zJ~1$Bt-6=)mX>5d;qxk+YCAJrO5L?WHLE}KtUk@1e?b{5!H_iXaIIYu9cmkS=Shqsj*2n_eg@L zsz@@b0b<_~f3vpKgQY}IfY{>V<$Vn68Y0|b8;ySjgZzIfB}%rvIg1S(ci{ct;G}OI zn0Hi+2*`MldI0JbGH?Kw^78U9GQf$>8hyA4qzn|&wG>gDd#F|azS;PTNULas!Nj(K zS2_fi6MB?dDLFwLV+40c*i(4B$ckV8wa+*jPpSYGWLJQx0@}#Pn1_shd>c0>=YEgD zO6ZQUa#7A;NP?p`LahuKH3A~qkJ;JXJs}S2(-UYXoIMh)J=l7|-oOxyLtw}UnJ_a; zo_Z2Oezvl*PGQ;OOFFf)r$1Ox;mB;xZY zps#!U=#f7dau_8tC2d31vQJSlmyLda46pY;am&yiL28S`FCdht$)l#GhA03<>I`}v zG%Z6k>F-T=mCi$`*n~QT6oj0J;B)6-7U?Go6#<8XtA(}&)HlSMhJC%zsP?-0;TVp8 zWf@={(cb_9-L-xDv!QATDUk4CmGgFKsvp_R7w`#i02Z zo#0?T=X^>yFpvPgc|tttMc_h*v8id>{^!kV_zX-7)N-6enx5Yo50ok*QpW304%!(lW?{77AzPV8=Z{At#sL8XMob zS3%(?NK0tgz#bDJ2vg3=!U;^YJNJ7((S18)jUYRmKI2)bZ@Z3YQ5o8+Y2#0?Zhz&; zV;$Y>93|)}q+HI6%tKoqbUB_U9+k#j9ypThY_pUYrHL)nJdb5UOz&k2@K=Iz_=j+~7T zA9_Ox+4&(1)DDybU4Qec%nOa2(;O)kEttoI_wVN<5R!tz23R;rDDGD)$J;*9$>TdG;nAvInKS~#>bEU#PWv0AK3_^$vQYz$0ia&&usN8($~XN z#bu`6A=Ahe3HGqk+|j@euthN`w3!0-0@O(JNI`?c_HtTFVtj4^% z(gj(s>buM8!NFNGvu$=^f>s;k)L0~?2B&3QbhvdOSz$*O%q{r^w&-b1sWP!6l}8Pl zBdm;;PCbY)CWL)`wr6hB1LHF)j#)~sN0HK1N9euvF=I5Ocf_}K^}Xa55f0_+GuXaa zC?|{~iqAmXNRx%SjQ5}8Dz)o-WON-;Da$9L8Og6vFQ6+lO077RqLso`zXE|C(@x>6 zQkJSdp;JfRtJd*`@(om-ho0}L)%rcEua`dth0pG>^x;Tfy%yp>=rZz-&m&}Y$AEKP zYR)uz;64QX(;2i*ad9$xnX8@DNiRgJSN@2VR~I+xzdS7nHDuUxRsk@r%j2DvR#gC0(- z^}4zR26qr0hNFM*Wn6^Zazd3ED+~T7*D6bqWE`!vdCy;1F!~^&MyH5sw*s|34U3(a zeOIm$uk4FBD;4I$Jk-4nl~wC`4*dfZv5}<-efD4d1dB7RSCnqt6@nZKQeiggvfyP` z>=S5>YEP^`-Lwm8C@N$uc_)D9Lnn;~pBKE~8b2N{^iGfXYJlK;h;i2qSoA=s-Jp z_oQXq@tjs~%vyp?b^?O}S|+DZ(p63=J5ZgFAL@4H;t)9UP=nzG3O|RfTfBs=!c#>j z&!oKVx=B-6O6lPG)$!nTN|}4v&Q@v58~QVz$4w{W3zcN`ZY3wfPLD()sq3*g-PG20 zH)rfzOQxG_6f-@oUZ~Oz+35brpn4KbWsZ%f|EHtf{%LA^r&Z#%>`m$}nkg_VuBxtf zx(c~{s&4)}*UM}_q7*w~!yHJ>@o)Lm#5iT6-3q*L?FX`dJZHVcti7Mzb3P=_=paW* zd9%oF&C#pJ{o92a^b7FFaw%UtWRFqqE83bks%@N{TFl{y{HLQ6LV+zQafpn@!5wTx z=A5IO2M*yk~$@*Wy4c0mMsxXT~uMIuYc~}AA!h$?!b11XsFF5Dw&|g3lwGL5n zIun1mYSQn(E&-GKgp}vd{_i!rz36pQ)3ipen!5G~Uto9?(7Ez!AVh2Elh%)RG+On1 zBx84XcgzwnwC)q9lTyFKkcJqBdxk>maUo02VZ$8Nro!PiJQ9ti`QeJ+Js+@ETDP*S z2&Nr8SLF0+vj+2S>P<7^KE2Hb2hO#gYdLuHzIAie6Zfn{?zYaYhW_<#u4f@qx-VSl z)qN|Bbmqguhl;WLrX9Bp4jz2?afCCAJ2 zDzAHq(edCbEJqB@NSF-a*wN)a^3I&c^#wThFEN?Xa?Z++& zN!NR=%Kx|LP=|Se@Jlgs9!F|+-u@#wcU?NBTE!1_zn`m{JJrV{H=e3=^OZD3#ItYR z0lnw_isjyw^#xx}Ogqc->Sf+)1(C-#!5Tg({I|a$w|wK#>QRB5^&6*^(zr}GtfmA^ z@>#81?&XL$dN9WuWESkw^BQYB<77f-Z?E^mXhw6pHPQV{a{O|r#qnO}-@BZoW>vCx zy6veFW>Ro+)2R&1+e)l~V(fsDE8%*Y%IvO>b_e>z^ Pn$XtJKYU;9ROtTzYS3%b literal 0 HcmV?d00001 diff --git a/website/docs/assets/maya-placeholder_new.png b/website/docs/assets/maya-placeholder_new.png new file mode 100644 index 0000000000000000000000000000000000000000..106a5275cdb5bc0588c811d9a059ec3b6e072e20 GIT binary patch literal 28008 zcmce;2{@MRyD$8d29;1LGAl(XNiq*bDJeq(WsVG)XPH%sNXnQw6iG-zrjVIT$rKNn zlQFYQ`*-)Qwf5fsz4rR{{=Vb*)^WV=|6S7a-1mK5*LnV?^M0hHaBlln#;qh0X}jF{ zGnYxEb!YHT9OWkbBxb(w2L5M*jkMfVO8lP-sMP>7Jkpa z9j!c^98Xc5;9zfZZ1h$x-A?yRla^uK#)mh?Cup88+0k7MIy;je?KQM_ywh1~s4Hx-)22RW zp7pG$>03{B%FnK*MIP%&+<7_7_t+`P3IX9G4}O{Rda+YVXYbmgOzW8!9hgj!{@dw~ z>_THr#YPYL;Lt~Q%r zocNgGc$|8RL;Pa5>%69x)9@?mL#+j08XKRLmYym1XY-Gb*GsIp!c8}&ea^<(j(zy| zb<PX_4vcptJn1zmi6}cTAn*@wk2JrgSOfDq(R1FLen^Xf%CkJY>!AuNtHig@%8oH z7Qp$ta=-sO=Xvw$ni{c_=9e$;bX}U;_~OkQAB)13ESVN2rnKzjy>81DW@o6Ho-JG# zAUS^$se5#PWBz2Q*SZGb*N+}eR1^nAc09Q~a&`C}qwd9vREpxiHyl5H{B`rMkp!{@ zGYiY&n1#E%jt;x~>U7MFT-!sAj*hRdz7f-MNYg4Q@* zR5PicLdC>ZR5SVb?yVdBCFU67&-s^^l-v*GQk|Ok#Qr#z)#b~Pqw7qebwe6)!u!f)60v~+`-ilUwSFhI9%2|DRew-!T zgCk*Q>2da0MKi<2a>vMUz z?A&PkelE|YNB;gaB>H1orZ&+Oo2e(OxMC)M+^6B!ucqsAUvtHFTAWy0wYYHM!g)El zo}@&4xe}l8H|{-0)k>y$;&63z>FA|>h;1!bj= zc8NGnab8Kd*mIvo^2OV?kM(Opl-1R1dP=+o7JmQ!jb*+^Be~$=OMh&toXLXp^yyQ} zlIvNnxB0`ymrF)YKVg-qPP&q4WNI3;t9QQJz5Xu6Wu$SDVK;_jPb zM+)cO)63nf$y`%S%5_OvS9Lg6X;ph^AS&zDg#b_K*q?2JrC+~Z{FMeFWo8(HgMfBskxehE_<60z>5JQ(;$()ACi&5qU6)3YH_kuBPF?#BA{>l=(S zr~a6ZWZgDdY%%T15A$OZ%DOS0rThK64>hweJKLoW9~1e@Tndpdva&*WR8xQS_jA^} zFE3azGBF)zp!#Vmn&)XGmiMmqing}9e7fnFn(6#c1)cw8B-j=!%w{%U7>%DC)2%tbMeLtKW0FDP}e3lk3vd zV1&iaJ$uUY9jBEJ2Ty&nv$HGcnabYJ!4cd1JgfACb^m$$vF{H!<;=?cyuIIbNHTRS zV^_WW@PQ7;@(Htuu8{wj>Au5JpDuF8P%mx8jlMp{Xe^G=4vAIr>S2m$axpJ#!$uPkY|p$FK)9L{L+)zswI=$ zeEItIHsic5TqnAiPGj6z>xhOwvm=W=UTh4KF2WX{78A(Js;L?rCr$*)1s*(Z-YFdI zzPhLqS{TLJpQStAT^QrxK{ELAyf{!%@=!KM)}Gc7arKTmRN!c*Ux&UKpOji%*x&6k z?WeIsab8i;@JG?z{K?PT?9HOg#^SSd$rf~5H(tATO>?Ko^kD7OI=NV-&c<~gPgZ=L zew6bjanaEI`K|fYhSF7Q>kUT60yak^3T1SC#jb7WVZAxBGWK&rGYgtdn79RlP~I2i z0QSY{i&IZR$;$aJUa(sw`mlG(n~v$s-^BSQ%b*+x*h$9iAoFOTpHzN7B-Lvf8Mny%J_yujXVN3S- zmV4|ow);4?;^({k*LH3={qdpZv5Y%YkJu^aeOsbWup4{hH;Q*;s(E{IQ{HQk<~(zn)^iW>3G5$e(U~{IJ~;pX<6jXy$ssBt^7~hE+Eft*P1_|J zYU;seRqX?uoc#}L3Kx2P$9(9qWs}oBU9hx#N1l28`n8I-_EBEmCnzTrlhr(FA9&0s zx(ZNNZnyH^WAAhoEqHhx)#kcGq^=xw%Y_0uAk8TgFDt##zI5|1_1KYdYII}t}+qC_2zyWz-*FOSVAKXJR(GiRg zvHU>_;5_a$V|4G{JxgnA-I11*t12o}=xWpRg=;Jxm6hkwgN2=DxEmWAJs&>&`t6%% zu@2E+xPvcX#}5w=)6vn<($Sp(rr6`V5sj-ZO6(9X@AxSB7^SqBte4lbe02ZtW8h2}@DW-bqjIpc#4k z{wAxr(Hn;kAI7q`OKw`N_(doAXvxRoRbRPHqkg4kr3@06O>c z<9gpaKLrHr+`4UBd!f73;^HDvy-;i3-7MMkX=P45K_SWqjq+zhEVHPs_pwh4_ke`< z(9?%@bSK?oxAjp-<5=I{-_M6fV?W+mg~cXLblba|Bmn_|A3uH+MT*$)gFn9F zr4<$3Q(3=kMXw$XUM_4lE~~0~6dB2kP6E);JKCOsukV}^-FW&ZqhYVT@-%I( zLa3&_4O)$?KWiD}M}ZMOjFvPoKVe^(wpE&H&EXX5)cjLDjVEj~}l~YC721=yiUy zI%RV#>vB^6%y1JxOZ(Adl%L75^2XjItlISy6@^IQn?ft!lCFr)wds9{5(}-azA!h@ zt*EOT*Ewu`f|{Bd`!XOT#6W1J^4b0`;U|5u%sT1U@AN+6!tp-V-x}k->XnZV~sc3Ev#wJ^DIr-zXL2dY1JG+wy1Go6eQk*({dflT(k4P^Q68IgbZf&Aw=5}~P zx~i%Q=w^z8u9S2YD>D=_AH`0oDYJu#i6>^;pFe->4R36~Bl`j*Gtj!ax{AX=RAkC~ zmlC7-3;+B|j>`GNI+5nSV&XuLYMAVO#o#pUxO{KW4cZMooBlu6PG4SU);RVLWuO^a z1x}eu#YLaTa$FYq12}sI2kk3H-m9g2`}VD&t*vd{ff2V}CT?4?XH?F-uP)00nCIp( zu*iO+ePGTNaI?N8y1u@;sv+A1Gxqc=~yGq#T??3(3`jlXl(wV6M- ze_w_xXwa#*;RvO4=xxQ?Nn;^KyNvyh81Y+Vg~X;EJ9pmWlFg&0Pi*8ANetuC~GQ}pU9{{({FCNSr-o>TBxo_V+08dW2&$sviLS*IS>DSE{9JBRtY;PGFHsL93Iglq}XlS^Jis6)sib{6r#D%)LI-*JAWVKzY z|MG>Ci_7cXJHC)(+Fkks!vUTQEG(CB$VB; zGfpdwOHeRNeP=J=gkzIqOR8oEM|#kO1IG$xc7jiuc4YaX+Fd=od;hs7x4yoVqhgbc z9335{eq@)NdxT@WE&cwXD+#pB%onY!j*|ipUi#eHO8-FmG_IClfqZ;VU%uo#vS%Bk z-@_;|hszookMV`PywtvB_nM8-LLNMQN(%^zQn^Vz%d}@|pvEku7&QoFXdR!y7Y0B~ z0vdS?hIc&N&gA{%Nx7+D#{pR|GeDrb?WTiYU&(XRZ6%=#5MN$fyFK{AS(M|ju&@oA zH=h6rz`Z0YMCA^LigIwQLrd!_aK1ap_C7h82ZxP@h6XE*@B8)3*nYH)qrR!o-5r;E z^5jWWft!L67Sl~ROT?L{+RMK|E}*TzSxC%&l%1X31AB6x+)msV;Ij3vZ-+v7HLJr; z7+N|wa0v@PC+9g_QB$kNeZ5Xed6l02tSM13!?H&@zo0-l=_<3N%k%#JTOmhoeD;@G ztppfkkZ=+}5#7(jv#mg@zPHTxM!N2U<)|E$>(?0o9*b}Bqi8KIPuGi}aHBRnW)a(y z(f+vI)S%2~H|i5Gq9YdR$&)9et{&zzYgJ9J~u z-i6qp?Y4M;q$sr_fI9g4>fsru0*$69VXJ*OLd+u8FUcMDLOzcl-y*>A>Yov#Of@%X z83EDJb)O$>le=_jV6vwKh+vWcY&F`eJF#6Gjc(2UYMvPSC{G6?_cqFZ`F3oM0Cm=AeaoAQfn#%0^1s|3$s&(iY zs6G_8Z{N0WI=W{YQMO26x-5*-2BH!i<)x*TXtA7f`^3Z|Vpc{CvGuD|v+L2s^7F;5 zt*woWjM!0;ARnQ62s_Um5fKsDuyJEgZ|~B zOER$ofnL{P0jK;?2l4Cow9+=8dUd%6=)u)h0xTpuh-w{>RMOkGFTtxca&5OHH($`# zKR`f}DVKqP0T9^UU%z}pL+Jr2DNe;V+IA#q8d7fGet?T>vs?flqu=ttG52SQh4Wp` zBw(*tCGN?YnGNhxQfx+Dp}f4jttlr^(b1BQqe#BUv7Z2YX^zyo*s(Nm1 zb!A`}ca9#A;kM$myuAFgyHIKyJ#WA7u}|zmLZSEWt)Fp{h?n&X7)nV>%DK7JU5gvK zDZmKOpOf19OO`%FtD{Cgxmx|mp1E;y4T{a^APp5&sc!m*WYcp`m~S?`IDPfnHLKCK zbd?%?9O5*QdXRPD(YYk3d{exX}^1{8mHe-90cXZ@w>bv|d zqesozXjmV8;K-53F{?9)r>1XI0!Kgc^BZ^-s?`f}()*+M{_5SqL~agB~C+# z(&G=igCjanmzn2x^pL$Q|Ay=DZ*7tBqTZ;3yLMUV@tSf*6*;1w_1JEt#ctT4;7Xdz ze+C`NC_D}h4pf^cE?v3=YC`b?8z(Q%Iilv8ipowp zx()2D4J0&96#dQN?w;uN`D4E4>Z8QGVq(|`6aeTqHA5&QwU3T-b9?*vY$bks%a&)^ zj-}o^P!2j(I`bOw*cpT^*$)SA%~Y$Ma9?xo?Ch+pso8({@YaRG`wHq~&K{jDlusif zYEZm923?6A1{L8kz63hlpr25e&dkroDYyLKi6AV%?%lgl+GPD0AE>3sCO1!E3wpkJ za~RF&w6XEw*w|PqCc(#Ok^rE^C=+Ne6{umU8qv|w)+yb_nO$EUa(Qf3_&o>hU0+>( z#3}dk<;%(@S69~?`FD=vL1=2JkA5p)-L=bxylG%?kl$_j4gmSe(!3$64I0O(3=!<^LbCxxhj!US&boE{>+lhCF* za@TjOWTDVRMn#P&xR3n$MSy|v@o@sUrJIdCF~|r*p>$4Ip(TnCp5c_2)bhGlm#fgp zlaiD5YQs++(fIiEcV9((voW@C+4eL=F}q)MXq)TUfiQ?7*$wRvaM*46_c<`#1^2ay zwRCf`XsSle=Ulsy1sv)+?0BvxY*KXq6pIy7tBnT7X0yB9Y6!S&Y-~(0DLnVbOhVN7 z<_L>VaxyX$AXC`B4A=)5^^(42x~E@VeijfQjnZb)nmRC?7(*zk)`?pM*x1n-tOjc% z9L^b@l{kI#Cdb{ock_RjGtuqYqbnGJJyxunK3OL=Ve2?{gTV9d$u6$0c>*6BE@ENa zS6#9!y6b>i?+$hinKu<0#@M+tkByB&(ZIpj$8H>fy4e!{ke8tx)GmLEuLv*<+U^)|RKO zcU3jx+<~vU3kAI%%FEU4Y>B)0JM zy@wAsL5Q`;zLT=^R&J%LVoM&P?+5&p~WV3gTn{VH~{pai-bp>w#1z}OqtdX0nxS8_u za$XTjA81F?p;orG`6=J~$DXKu(~7%tme!NkCePo|q@F^5!V^#9Uc(sZ90v!-`G7O# z=BdjY#!bA|jhjYA(`Kq21v=4(Lp1fHZ!-;RKQv<&MlNyjNPvOw5`9Ne^nnu{j3Sf! zfQkH|hO}MM&NL1d_pTfB{$D6Z;@N^@+kw70WFAg7o@2`tsjm*^aU9PX%6)hBb0$uY zW6uM6??;c)Bcj8?^+nbW8 z)3UOg4hQ=H!13Sy%K4CnjhE(3^EIekn*a+22g^gX-43ZbC%<{qmSe+(?HwuM90KTe zMN8|6N^%*zh_`Q#prYMH0R&1kVqG|!a!^7d3N4VRgn4;+;^!4KHCZ64w3Z2mU8LK+ zyJu{y76r9`U>&G(S!4V;$Xd6O)8S72EAG$EC!^VL9z2MX)o3IDFr%!ZQvO;w9y(N! zsbJs4L_PYJm5oi{=A}hw#i>kgS5bIj?4?@|Tp;N6;b4M{xlC1YL1yV3K4+`CW!tvC zXXjXBA&UXqqGoa*KD!*sn=A{rP|_Ei!C|1HXA&z`+i&#oI60(SC%o*%+*XPoGhmUisb zt5>a=>j=2r*S9~L=38s)Gyv}JTA}XB`>u-rnUYjwoY^}*u3&A=zlM@Vxn;|3yVvK@ zB0&rZ&;yg^fT$>=(Rj+#?Cb#48nbfNLx&E5ot^+SLzN&NA2e~8OMSU^;7r(Md-v_z=P`Iw-q*)x zJm(fA@ix+$8M|#KXe4rXwSA_L*>0AWRXOw?H=3CB>0q&UdSk-P*u~d z`pQSTMogpBWP&)8nlGj)aYHegS_7se;UNO8SlZagBsR6z>Sd}~SzE6k^%8#z)vU2` zhmrYnS2(Bxi~h_a(r`Gj$59;PnvIDK05R0sLRCFoQ$qtv6$!LLTwMJ1+qWM&o`Au< zH)`CsxCZG!a%uDgKnQj{I?e~R6taEW!^`;m#H=h8R{58)v8TJ9n6OAV1)*cvjJ6%e z0~59%z0TyEKg0vHV=72dtLN4x8IzkUibIW$=^=p0P-tMF4E#ylghu`yWl5z-Q9Ga4 zuesq<-OtMtD_kf$<}q0RHOnp5S>Mnw+v!5&vuBOlU)^p=+L9g@cMmS#WNtM4PIg|6 zkL-$hPLvlq^3hodug9tOYvxG>^{+>d95KzjRIi(E`Df-fC>zfDJF#CM_H^i`zjK)A zipY}CA{#hn?mc1fPQPXk+%`&T>Vt<4B{jeAHlAn)9|kSCY3h|vuE3zX)Rh&Q^NT5x8~&JXgclvOg?z*m_L+A zk)E@%vex6BB9MQ7lJl}%_8hnTaq7jQSX;8{vBfVecB-LfPPe1L?rg=;b)wW zf`aHAb8&#-A0;FuCDoYtMs?LJ&$em@K7U?mQ!nAgDJLK(_}*qn0ZR0MBQrBI0eOJv zAo%HKs)2v%PxV((F$*h=U(MOo+}sRp6`N%&A$!jfi0Q>}fk!B&@mCLvz#7X-O|7r2 z)IlBB$gzF^&<)w^Gb-iy>t*O3x)90X=`B8KX=;i>5r&F0IikJB2%6>SC<5*Y2$*F~ zuC{PrQpeIHy?$LpCPyVC9F$yL(2I5)+zSu^m;)yJAx}ugNcsBp^Dx&f@_%mve|%nR z0A2pw^&|Pybz(SK4!0fYaHy^AN?Kd9k!Cd9KcoI zP0fjF2=aRLMgzAG2_juhl5f6 zD^-nbgO;)$azhX9F?XMPb>3S#`*_&Jg{7tLK>D!mV>3=GEaW`*>>XQ_e+p(rJ9liH zl&;_Zke1ey`j!CWL=sz2L&l@crT^E)Q}1WfSalI$;TMfYkVik8?R0oUDZM%UKuO-p zL-$y-EUstWJpYbmV(%=g);Bjaco0v{^XJ3_G9G^cdDEH_FaaX%*~gs#fk+TaSULVL=-x|ZQB z%E`_AC{YkBJvil{=TNhXSHg1@6;QZyM;Nd_(>cfyqFbKB#CU_SkdG>iskyNZZ2jS& z=PXI69oS{qT^*4X#l?5wR6>jT4}8=9;FYOCM=Jy=^U#VtqM;cXCywP$u7}WM-*n={ z2|xf2@X>*qP=_0i5G(2;MFK&Z-|3cZeXGI)V}TIoa7lI15*@MWsjR|U@r`^Cgf7U- zTZsN}Iaf+?=f~a6&-reyXA&|W4Z6c}2;3Fl&vdX9J}R~^xLaS*T?)7r>GSak;d2KR&9fV38?PRrXH%;*t7SXqqfDJmxbvp_V_$3Tm^F4z7<_a-FC z7qxaXV;!NMo*U3|j58TiRBAvoe$US8<6F>w6rx1Suu^K8l3=v|4^k|oslmD^c>o2- z(KvCIXihj}kDfd^C?F7onsD*pE=Z3!^*D9VlHk!1bP`HmW@aWN!Y>9VHd8ada9>+3 zEi1dpABJcUpu!bw1~`5Y0oW#|rapIcL<9u|*>9(MdG+v(cRK3`eXiLU=FbAe&AWpq z{LcFBK_17q(Wswj{c!vd9=Q*Ufam2rKd#@Js+rBK_)|;fAr%Kde*pA^_LYYe>(?uB z(~&PHC|vUX^yw4MwpQyVC5X-b{{EekO4{0rYHEzQDe>tqCy2gT704AB9jyS#IyE)* z$n|%pQuTB5@-pDckq99br6SFD?B~z(g>Eb7FI{?t*u&4CMz}^xOUt6x)=-e(->`8% zFgimQV4&LcX=z*xq?*uj;3NpWy>xn7I0ND~$t+bSfD=q^5jN(fD_3>_>#_?9hV=I8 zuBV{5kz;)T>_dFv$NdyuDTIWmSwznOwzIObE-WrW?hV7H1(7T>64=SgssL**Ree1do(P%`S{0#?NlHeqEe}b-A^4i-V1Y9W z7j=C@aF=K%q5)>K=3uc3WzjRBsg*F79q&IH@ zHftE0EdHzQ2|w8rN;0(7p!tdB0>`YCYueh!CQ6d`Qf=C~b0?v!XR76`xE(!q>=+n9 ze^wLL0isB%uiZd(u!zdcL}@&U`jVc>n%*rVCAPQDPt_BRurOPU0+lOAxoy(e|_K-?AK`-l2|}&W67- zF^SB2iCB_RYl5L(`gOulhq`#?%$YWpXKKD?>DSv(jxTkvracj|>OBKm(?6A!Ngbuo zYJ9PSUo|u*g&&^zi`J37CynX;ktMHOxl#hApni0y)@hhXm_PyIK6Ho!b_PTxraJfd z*={+19Ssd;r-Eo8Jo@Bj1QYW26*}16%O1Qv0buhrX>&)kPj{}Gk5Vh8R;p2^L2!T3q;;OFtB6y?gQB; zj%nu7!B?X`yRdQ@nV0@P&8@;Y_4`N zWym#C;y@oLc@(ffV06Rm_@7+hPQt0;lxfkVLusQSwN z0u_H>I=Bl`;1D5BAfQP}NFHVM+Wo<*S|H9r#3!4y9Qm~R#{_{0B0>Vhd;Q3sPYb=< zaQ{{4mJWqWEKpyNaeUN4a(<5UggWWi=O+Yr1k8FD4Bax_o+yW|SX6oWnt14s!Sz;u zr840nSFmnOvChBWSh4s1{#UHhv!dEiP@s*mf*D1~zx8j+ocY^7}M>~_!d&s(Bd z7HC;GNR0a-z~-p4gbELM$GdTG=4gk(yqVPaQCrn zpTz9_olb(kJ+A*>N1HUBAGPJ#ym_+#<0S`rnU9xx6?UKg$)0uVPImX?u@ALy-+x5Q z`TU!BR@PVOlnQkZK=&xu87Lk|*^ZT0=In)cucxOM$}D^qv8!03NO!52O&RDh+R-&8 z?fZ96-hn?{&)NGw@#LMg#XPMHN4|Of?C&oHE2?mwC}@ox*-k^l1;AGuA>{DW>B0EG zuV2;p11_npAA=)HjM8!H!E6zlVJMsh70g6E#9a#6)i<;#E z@+zC|7mq)iMjZuDf#0l>9E`pVCyY22umr&Py-u36*tQyMb*L0{%Z5&d7^C>_(p`5P z9dTHN5$D8>0=giIU0ZXk@C%ORUbJG+FB~ag?Hh$|68Lx`;0nQ-Mr!#KG`r-Ha7kBD z)V}ipo{5QE5PmXETG#V|9l>pV%t`R-P{ewHziR$TPXg5mT6DRTe1CiYK82#hEFE-t z=pM+C$;r#dtEOp*FO6p4-caiZr4K0_nOLPs#H^vRDIx;^dmJl4I;a@!(lnMIVG{5^ zz&5`zmj4jEkVp#O9s|eN!54+fCxyr z=@h8R&CeT7C-iF)LQ?m(dM7`*ZY-vrPH8SUoq*H=XN`gT)7W-c$xV8{?4 zEW9N5rH*bQ|GaM9y8i_M{Xk6l7A$W<`-K1a`@2aR5dh4}kpP{D1Evh-QH@hcUPNpa zEP*iQZvoscBhL*X;dhI=#WUE^U?u?b14td8fEFdSGOWmq{6R?ew0X<7Z>5MtoR^o+ zh%isTzJQM=iMkvaj2I5_WME)8fAeOn%hs#^@bw!vZU_qt^BLALqppK>z#RiK%&{E~ z=+x9!QaS~kJ2^XxXx9U{TPXAjsxd0Nk$qb!-{Me;M2}Fn%PtaXDz8?4<!!Ixp_$=VvX=ENIGr#R+g`Mk_;xb~{|vboIesr@sJq5xH83 zqz}mAxPJTilFZE1qN1Xa?}yDKqEe#D!!90#6zq16MFd$!^f?@NVxyspVx7O2eabyD z)?Sy_4l2YTYRgSpnx7adp^*ye-bPJrfFvO#;3q6%_O>^|atB0-K)0=E)N&74bSVBC zgaoJkG`NI|%jER5%wGfqpF8MS-3Py~D*pj#re=Qr{5jsn8M})}m_R^0s1Qk6L^zhX zv4+SESOovTz(6v;VO=B#l;9MVZ2~cIS%Po6Q{Ijk<7whyoun~HRmrd&z6`nvum5d6 zE074>1Dg^v+|qu2{+&jjuAGB$6cgj`<>e)A`3L!TZ%#R|I6@NKzI|hOgg2CehEyDQ zNH>2aF}ZOb5_XvW@7^+?4Le7(^d=K;wUiK$y{a{B_!21h@ay)1yLvSnzsk)e;);DJ z{d)cq<)FF<2;5p*T>%Vv5APhot>+-xV4WaefMM7ZrtR+^;Baa!nJThW$%!dyjo$RO`Sbx|g*_FZG9wfg^= zyr${4C3v5pph~L$5|VzmEqhi6fBiZt}8`&KRUS4il{MItKQpbuR3*WueZbK%w0!`i(p&|;{9FC6$A#2fmb znJ2Vo+9F%sz7CB@y- z#m-90^Y0PuacBHJioa3BryaBeuK3@_xyPNev<$3nW;LWhKy5G*AZt?f2tGnmJ@!7_ zJ+fO#VS^yhiih*QL-!!s3B5lc^V_2>Iyp9WU4fQ+J?EK5h>-c{ zF$UuB;Rz9n5n|z=GDqf?okr$RCnK;A$TWIAbdpJE0Y8v~hBTX(XO!aO@1Kayh-9s{ zQ00nW*Dld6*Bdfal$2;ypeP=QBskCyJw)maon?4jcZM6OEo?CODaRvf(Y+TZ%x&%V z)*4J7E#$JzHy_b|f=@&dL|?={K_S3@prby{&K5!&Lr%hxG!iT0wUdd-w^-+$bASH7 zMC|@e*8w(y4}TMOxqUja!NQncK`S7l7Dxk7v>nZ`A5+7}UIY=#u^!k8X#zscQ-qrc zc+tJK>Og{8i`Bb{BXH!#drwHsAQJb{3W>@HLmR3^I!2^?e30OsB+}Va9z*{`v-upx zHBcNZ?d=J~1_ojUhk>9ZD#?Vn0zD)kAOJct;0AKea#mJW^sc&^`vnB(uz-;Ha4l$i zkKI=OG(vU9h{io|86uDf;s>Z6*Jvc{w)~MSO1Jev4t{#rofljS?mGrW9-<>5G!CVJxI}B~&+vgfzI^#2STly08}fH3;RtcV zH)$|_yu9o}a20f=q@*nt78Y^w@rxiGY0k$uIPQjrGvY8JbQCG&9*vtqWCeI^s`v*& zWuS;v1L&Ass3<@Ku}#BIIT3cM26CJTW9=2ogi1OtM}+NWCc0zlC6}o1hk${E`UamE zkq~46sEhDPzvt&Gboek%R0PZiIb3{svV?@&0-G*Eo)14k;)t;I6D~zSf_r{3VI&|{ zE#)AzAwWuW*mB??POj4A&@J1ym(|qx0t5CUOO6Z!w7;tD+43k{p`qts@k0A3gMAxk$w4ckw||*MG6Dn1mxX^v=$yPLQkSS zO~aBvphS>vX2$l#V$rLnVPS+Cah6~n?b@?P7Bt@=;|A-86*WNjDPcCSQlza(0JYGe zVIjW9CPTf(_2BBa`IdbK2}Mpw2Es3t4sc6^`!NClorr*D2#g#Q5!s7C%SWp|ODiip z;n-&D)^Fc_V096(`I`duSJ6W_oCK$q+M`$pc{>VHYXO(sP6$ST9>|sK6*6arqDQO^ zY-BGF{r*E1 zWF6UG4F&F22x{@4K1D+iCZU&k4BijZN=rn14`m5)H}O4|mo9B5;mHy1i4{f$82u7z zi3hHvs>)T5OYJs(oLrdEupEWiH&Yua!~m-XQ2Bs*Ve1I!+tW`5a6)o9d+HR4Nzn9E04J1dllf16(y^o>-E^gYQ?fJ- zP0VEBBtUMT1}UMarQcjpT3WfKG3)Ws^3309EQ)io6h_&Cb8iu(g@%#B8f+n`Vg@S% zn#a#sCxj8s0@R}93TY&8fmtDtr|Gbjs&-2aLU&FtK86?Tu;V~O454O|?EEF(>Xt=P zR#paAxRH9Zy-s@kWhb7Sebu;v?uWCnN|lL)$gq7HPi2F>p4VNBvxHJoQD5(m&1#Ka z6A``Gc?`1-WQi!E8AF*^^o7~24L8He&hAx0!X~)@Vr$jc(;)0stW(s~vwq%9}U)*x5-&D~`loCH zsXG^R9C!vkwWO3oDKiUU3pe@d;W7u?EGaehARiyCkwO4guXT(Gp>C`LFgj6LA;0$w z4{s50uSk<@oB7o&?>2_Eyf!y(7o{bG9e70T!}--UR7zr)5haYUgV0294G85jgM+X- zj~*wS4g^gB!?_@U&yKVPK6_RS87(O#rA&vfa_kJ^=r_NAxQ)30+#1{vYdk_`8H_|O^?S0q6Cig9vY+3?xT)^>JdLC#?;EG&|veQl#;Q<7(SljnFLatn#g(D8L%R!N;mkbYm3qtWk7CfRBPt5mLn+*JO z$Tl!E?kz`(o&LCcA4gWPaprq{=Fd(K{yhs&6Ee>G&@$rtFa7ah^On+moncE;5V~kR zTOda@n1Kp|t=c=VZjtcSO0$@CYKXq@HyV~&_CZtjz>GM7Rs{(FTMejsMT$Gt@b66j z5;43P&X(Iq-nat}syQGLB$39NBb4`a_W$?M!;cf~<3>A)mjEnj0V=W+e{%04C(yR% zp5FINf4$ugBaQ*m6?uWN++Z;SUSGPl0s7|p9o?(7)aD#J(q4Z5_OXs8hw)q_Ur@k=N~QC z7^K|9v0>N!fKFkGsD6aJ?L?kvj_YLc=9Ib&mqlwWWQSBjbLefIq@h6{r03Pp`DZKv z;|^=?&hxBv%~3;ZYmT|M!_|3IH!?Ejv>#qy&ThTSQzz2&a>T^|{wF!LxtU^Ofu&*O zYL(URZ?wl$%*GVPzPXuX6iU?&tH#(!4$XMMB4duIEr-<=`e&eQE8UJA#S}E+Mc`iD zCqzW5x|eIapCYnpXE$pdnq^!|WiQpITKK1V`?2isBe&TZ{bYf-+1wUl|CG_O^5nQmiZ(<8F^5RLl+1xhiDjNe_(69fgFmRhp`Bt@hz;oZBZ4qc9$BWfR_E=Wnm z0D8nV4I{n(;?*ntLlOJNUjXVkJr)xe7CvunJ;kqLYwDXcHH>se?I8nR?%fV) zckiyloqlsiu|Ne2O^<7yFqpU+_WU_fsj_pz2QprUl5Oqn@rQ?w9JxAAdH?=>0(^h? zU|YA0kTYDL^-T`PAzQ3#8e)MBj7ZX^gnF)Oj&HxN!FWbg5wgQs09r&M_4Nm=Lo?q1 znH(1p$@#g~&)mDu6(PUTu7U{dNuImJg@ZBGOOeKpU*)0PG|2>AKN#S9G(#y;%KEm z%CT?#ul}<)T2)15(|P>T20%^^;;+2sq4NF4pG2A-Zn&HJh!d3hp{P%oAk}gl*lkDo zZ7;G3KW%eG{0)&k>_xmaZ~PBEe_nyK_dt4oq1Qs`F2#oC<}#$$pEM0~a&mh9_u4?O zVGg0z*1@5vwYB0Lo9l;f`frea{5dvu0Mx;^EKUI$ScPu7PIiX`YrR4Gb@7lJkK4y~ zOCRw+upLt%-rx)n^R8BCA$ybf@}(!dWH^{#niQojIXDPG|KG4_)8%W|$}s7GWbQ#Y z^B@OM(TM;h&N(|c6HM|SwxW9(7>Ft>%uJ0mKq=4*k+H+IqV+eC$+fQYuKLW*vgSCwFEV&)qgRS7D>yz1s?NrZB3N z=pMj22>{#yZm`VJ82z5P4Yz>2T8!)5UZ6(tm6;|N48P#;5+|OpW)K(n)!2p83U-A| zEvlR{2Et8Dc%a~d5;Ie87L9rW4j)0S!%?as?^MYQ9*3||nkLrUaVWj_==zXb5b2U7#=$ zP&xSjXX33Tii$Kyqi$=-Ll}(ki}C;C>Jd*NsN!BGroNwz@sX0QYOdn^{P!@^ju8dG z7RR?W;JkHp3d9RokQ$OVHy^*3*3s29-iU#Md1920_yQsz5wlzs^MZRT;c3E0!5c>; ze_%z+y1U)2+-W&V?T!w!#?=f+D+h@*?LK=tx%ti7CkQ7bm}008RGUijM|?5ll9IAL zv1!XG+$lUGWMM$ntw-O74uXQLF*mtp=aPBe*RnIWk4xU5IAzZw8@o8}INee^Di(XH$neT39 zRk|;ZeOFX;Div}CnUR^g!oNi@rhP2h%-Gn62;bIS-w$2pLsk}qC&HRXniMlN8uKST zX@$(?Xu`zA#Nb{-V|n8&4$_U(9ws8+)EV-Jk#Mi2ScGTKc4+X1R%q=!b?)5egMo77 zk;ji7#p3lu#jW!YISeuYxbH&D;^4ShCu#@~P2gF~UQ`h;U?FBO;4rCYn{P}g0SGs# z>l+EX9SRnln7H{l>($$XN)B^BBr`fp(yxCjvAbVPOjQ#Tvk;kx05>mhNbz0IkF_EB zp^KP(CHOmfKw;5|oYO#^pidW&_rlvxiiT~!e*a#LXD!j9pMIT?i;ZlwiFfpojJq?AVwD9ra9Ez%)kxcyh9H19|k-Z$2<^`LHYXiGZB$MITL5C zKc`G*D#!$#1B08;1&I;Aztc95qd%JpBBk7{Z;(BYxa&a{p zXS(2>46W|d4`_wn{#%ccNZqz=TVCH;W?{=yW2g5>dyBow&Q^7ejEL}s*C=+)lUBDp zfR(=xElWtC8`4i+kp5fawdMM?{LTZE>Bay*)n3;q(*r6wGX75rJr_muHnak&tH(TP zTc4sSQQp!p;P6z!{HMvVJB_mUNaM)twpE{;k4x=ib0USKp919Aee_T4dZ?+XS)=Lm zQSL>=<4?77X+b!X8V1=;NRX5C3S}f%@H34SqcSM|>ctHh9 zJ~4?=J6xqUdzD`95Pe@K$hMIm~jUf(V0!EUq0s3s9R z@b>O91|cG3NG@yXd1=Er;+6$=8v(z zaTq~R@5{!=CMWyij){3SRaJUGLX0Z!U}ddC>k`)z!cZJQWl0Y^OmoEJUe`l@p@lh# zI3B#HbMcK|)BzFp06627M3z(ia;%R1J0Kt%Yf`C16>!FNkUq)p|kU%gG2VYJb2cRn}*NeZ95Qxun<57xU3%Q9AK?T zF$eSJnVsEl4$zb$?t|H#qeqV-mIE4qa0v%ulK5$t%k(7@5C9Pefg=ILM~q+WVqmC5 zKYFwuaOayPuL6Z2uJBWTeJBUQm>8 z-{`m$qa&Mmy-JADeK$8ZA~pE;sizLW%7=-?8|n*H0(l(3iSk?gjjgR-3W+5;e0Lt7 zpo04$1JMuQ{H=CjG-jEs&_&_GpFx%scY|3pWONSDl0(F4!J#m3TQ*>=jHQ7L6MLhhy*Z33-QJjE`Bqpp*)wN0jE#-iNbZV{j|c9cJ6jAv+Rxw*RXaRiyb~uOBO`EbZcfDc z<@|d9Mfe5C$d;6p0O-7Kf~Aa+8N#R~)F&(@=7Q=8T!YC%98}`W2M52Z6B8D$gh>$k zvMwp<5K{Slj#AvzUV3s`SH4)xd3{GgfR}QT-H_X-_xD75F*bnEMAR`%0k)Y6&ian4 zm_(+fnv7^yUR)vWWBQVVXcXi@+P!<_AS~emYzA}IR+mID+etw|fm2A#c939poU*Yw zG18i52wxJkwXv&fHQug!TgZni`k2IqOXaq<)IE)q0+Cd}vY|WkAa(; z`{ShvkB_?%7PpJ#S3X_ZU}XBHm4cGl;)?TtHyMMYd{`9{@V6mQBbsj<%^O6$#K+ zB^lcV(_-MAV9E~&ptY$$YeKmM&8xY>sel!J;gvCsEe3Av_Hy&Y7AAsb5(y^-1PJdB zp9l>I)fPCfYI2gVwzd`+1AcJ@UX%utn@HE@krfc{Mhu6~K9A+BGVw|nlF{wkH=!dC zb7sG`3^E;e&H;YHq>UDU^f!gw>B|OnBInMYjco*jHzFZW`wZ{^VgSShT_YpP(3-P| z6hoVaCYc;{80k$QAN>11*&L%rA;>6U?k(#FYrBh+von#dg9p8>CG+~WOg+J~$_z|Q z#2cD0&4v@ez|6csiN-EXBZq(D4>F8r3=9tL+O>-qQo)q0plO>&95)ghZ(?3yC|c)F zH5KA?n0X^1F+hyw*@{BXqxb>0P%2Ph{VUQOu z6u^6aIQz8>I7%fZo`AVpN&F?jlN-|tcR3W|Z8VO@q||O3h+gBQEqfo9Gk!u)&_0Z3 zhmicg^WNlN6bDoUMZCB@Y0Y?LD@-9&iWZ03}4=&EH}V(5orD)k0te zI2;y_BESxu+F$QJ0`l;kpc?l3{;!ZH->qZFOSJt)%8rGY^t`EiEc2<06>Ah6_G>M-u40@$MaHj2$=P zK2m%@q<5|Ri`4SUip5AZc0(1i;&E}(V-Ji?OJwjb5Iss2x0laU56F`HYoLH&>0FSi2d!`hahO8 zXiNd6J_`&iA$S|MIPL_Y9l!|UwGv27kPzMSg2DW0ZPgii1(N+!*Bua5Al?Q9JuH0w zPj-9FUcYk3R!wE)yZ^BGkdTm$X@mdP+j&M+eXd#jV8tj(t|*EDMPpsmYY~ z!;fLu0spO7u|gals+iI;b$b5u=dS2jg|}}LPR9z5_Lei5V0~aP$mk543v4?FHCtL* zB2Nk%AMa>zo4ejglQnC0u%V~h;lAJ#42loQ6yf!^Z{I#V_3p^309nh-t6npDcL(6hiaB@gBd#!hHECaO zGXw{AqTw~xiHIQkk)APRC@iG*3Nt1T9AqTkBd^Yg&9Em4&q?j@*lQVK5=&r7M-J2vefE#@St3!Z`^;wMLABc zF&UBz8``ppM~oOD(bm;H+Zd7ZrGDaFU|?m-*UdWMbeSv#?0}sglnF?(KDp&)$(Do< z5Xb}bX;qJLiQCG7W@bvnoKR0%7hHy7C10*HxBqRf+RJToP9^9xzl}!^`V>gZrzf&) z;S7#ke;5sSla%wO$e}ShI@&VPH)eO)KAU=*6r-xO2`7kFDjR=yjhRxAZ`sIr+L>q1 zyS+X>m+D+(jx)E$WF%hnRoCgBSrzD9I=_J-zM^6g&<8RG)L#xQVA2b`VL!a_nrwYDb|i4TI6;4kL=#;(zh?$-5ITFP2stKU1#RXL_s%tq^OFNh}` z4YXk$0|OJzF6B}9P3S<%##^y(dME>?C1 zUAuKF;LSm00FJ9Ia_j~z}ergZ;J-| zC%QROkbXU46X23ZeC5iO@~Wz(@E72M)-7wzl(=|z9Z5r4u0WybmOd{?9BBbIC<^Vf zdB#B{<*BS}-z%MN%f#GV+lI$_>`j;&_I`?QA+sTHZ305XUx|wFzyE!4Ttz4A^WUV@ zF(wLvk5Q3pC2}2k-EDRmR$4HGA}|eos!^ea-MIGTdY`tjdyJVHIO6d4pq#4P$xM
lrqGVyO9J*aQVr>A@Fyt}?M(Ms3xYKHc)WAQwZr$!14K5p^)Q zZPSH=_hv=LYW#TY7|Rfcpb8)wT6JGDEbsR;ZGd2<0)Q@fTd^9D_S%Mqfu&CHgQ~QW z-KyHz1LG=Nm{bN&qD){xpxB_hNZ7sZiofS2iyR+j!B)}w{sHd$7S*rRY3>>8J#!~+ z1a32gJ+ce8{{dSXM?Xr-%)6K>LTt!9VELiV(K!>(6s=w@FXO9{=YB<{rNGakApL-l zf`{K-g3uQJgvQaZ9hxr<apy-n{MITd`{D_Zp$4C3AQ1qQzE+(=5(!m6wC0DK2qJ zxp;2W&l~HW{1tNiZ_Ga11EC>w2H&r2FBrHd98r`NnvA>MbE4ngQJ!p_jdNo(#Os-i zD=PE7Xayi+lv#xcaATVQq`yC&JzV|9s^AkRrWh$s_ft%Psf4xsXJiw2_jiIL#%J3I zm_l1#ZSh-FO}2$_J%7ku;x>smYl&D=<9_jTK0fa{K3}JKfZhqo!H7I<%9I0emY`&C zg(O~o)L8us9q_tt=~^juCQw8(l9tzpJASd*aCd8!d(JzBbxH>@fA8zbY?9-uJn8ab zrsIPK`f4rrd^At~Pfn-%#W)p}bB3pbgP9NWAAVQgsTYJ{_kNqwYG;dqvxuhQWJ3G^ zUNwOW959YdLD0l4oMfzWW>}wNHAE^HBs1${KX969$ng3Dr6uNHef1k;60woa)CiyV z-8BqYTp3dAMBie%W_ZBOk2k*y%rUUidw@}Ip{W$w@2(!3{fmGI&=imHFSs=2oW!~# zsXWkbz;J@4N@vcT8Y*E-G;dU#pt~R!`y#GT-n#Z-zpmM#u^x!Dy_M&?i7RDqd2Ik}#N-ze0>*^Yx znR!3XI&!}2`Q@q-$%wb&I|AHpw(KItfwJ+Cr66(^h3hKa3etsyE7>-$!*0vAZf$9( zIQyuhV-0(dF9>l)3ggTWJ56rJvV=-9>lUtFJ)GAC)x9L+bwDS&1WuW?=H_7qJ}6wY z#!h7Q<02!)ttpT|pty?i%F62^-J5YR9WX5dHL%T0fo6hW;G)l&c7Y(2MW9%`=8#mL z2CQ%8!_8O_ql;PSCflDACWusVIkBO|n?_t59S z2(E;L7p7ab2}BYCsNdJui1iVE>|sU_8JoOJK9nPwlK}z2eB9GSE%19_fDoiz2+-t&$QH zkL`ILR>e(Cr=bBT;fM@yt#|6{e=OV_%Q+$)HqWke+_I7J1>35(q0|3wdT}O2%PKu{%NPA1HuiL;(Ts>3^=`d29@^VopjNiJx!E3H5}*|_ zb<|P}4(u;Io5c#mv4|zb5!vUQOQU4sm+`p~cQ`1lI3Ka0`DEEs_ZW?$thNk~IJsZB z3K3gQ#Xu{t1a0}|8Wl@qW5G@v#AE){YJFFkz3o*~!3Tz^MK>d=u}s5+ zCJ|Xg;En1e+cGX7FyNl2n&*LToDJG*EV_kRn%dMC{My2gG6Xc93&LOO#y`8Loo;f6m4 z9X`CsObOkyS+f~Gcx~-CiE-S=gu;?}z_{2$;pVPj2Xx861e!@PYU@ z+O&l6fbV#S;Tl$S@A2^XDKc_3+84Aw{mYm9L3eYnm86Gxn^Cm6rL8yB zLmU6kgp>E4c6F_3c+b^{ZA1Sgk`n946xaRNAKGCPb-8zC*fKM~q4kb$SVcoqDJ!NLfxh6e&3xCs@ia_#o!4M>J>UcbII>B-_>YTDeSHgz61T#nXx zxaOlX7RxfjOJRFEvo3c8)QwH)sQ>7Hrqoi;WS5*T#bmUI7Vq1$#}^LuiK=HQ3;|E@ zMQz*R`>V~ASk$d0iC4S9Vi!w8h$CTHag%6j!aBm5-HevD3!nWhU_Q|vM=?d5rqgn| zS!QZi^=v@IKnVF%(fn7DUUOXBdGiwpk2}Uom zj+R1RPtZc2f5iCL@+Bf%2yBlcSxf{7)|Kr$WE_|0lpX%Ei#g1>ZyR24ONZEP-t5Qr zMGs?%M7(bh7DE-qx^dsz?T|UI3w+-#7faO`ses?j#E);(3|+R}+`bEglS0w0TO!yJ zxspuxC6rXj9}ymXPi^}S3iWi#+__`utn>%neeV|Zze(y}g#tt?oXBAm1}K8zLWD59 z8QmW`?h$M++T-IOqi3-0@iJK$VilU!cvaP>{D)9&tMAD!IZ)YhMkcFqcvD*&t|Bry zvR>VD&LV>xmH>)O&crsrr{&I6T+=1I_2nKO^RC6FPEVFz_LmNXf)ZQ4Vhd_s;!lS8nUU8?>r=Ep_$kw@T zpBx+>o;G5}7+Iw8&i8u*A$a=Q%lduNN+lkQ)Pye%>I^qG`&r+^F=wM5 zY9>}PDgxS2>+j%$OKjF0^7r>_PcC?!;QMA8pBmu|GqDA=`lP(5j;J#nn`wPG|M-$; z6R`&p&HqF!t;urV&k)Paz5c$hhoVHST(KfLJ$*cQfRo0_w(WmY1Hp&rS%Sw_P8b4X z+wTzUaCfbyre=F;V*TdXXQu__|MonjEuW1_FPrv)Td7l=EK5>%#yRcrIOy+>0s2X> zg}l1cE%ley4-gx%(Rij8D?U0k&HFEWf9jdc0c?lnH(ZW literal 0 HcmV?d00001 From e4a7dd6548eec1c3d85bdf133c4f273432313c12 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 10 Jun 2022 14:27:00 +0200 Subject: [PATCH 0072/2550] add sreenshot --- .../maya-build_workfile_from_template.png | Bin 0 -> 20676 bytes .../assets/settings/template_build_workfile.png | Bin 0 -> 29814 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 website/docs/assets/maya-build_workfile_from_template.png create mode 100644 website/docs/assets/settings/template_build_workfile.png diff --git a/website/docs/assets/maya-build_workfile_from_template.png b/website/docs/assets/maya-build_workfile_from_template.png new file mode 100644 index 0000000000000000000000000000000000000000..336b76f8aa1ef8d22aa8b912da1cc5f4827fc5e8 GIT binary patch literal 20676 zcmagGWmuG3_c(k{&?z9Df&!A#Ie-YLD4?i_LyU)(?uG#blvY|A6i_JvhlT+}kQ%y6 zx?^DI_z#}vJm>j+dEXCQ*Kp6i*IsL{wO6lstD~hsLCQ=D0070kyLTP~00@o$Qza(E zKPd!F5dZ)uaPN-F6A#cvnso`=VElCP(!0+OzC6~I+~pFF<*&JMD=M6%d4w^-=Z_A5te*wY$NV`XBM6I;SRgS4M(5Z zd)LqB?_;?>%kJKazHgRyUgM3^uft58$>p86W$ZRQHS|94D>!>m=U3-;pjd}EY-O4u zcOaC3qz1^Q1_aBpiidXo^8};{JSM?^1L_7W5W*HnAhpA6DO2|E2T0%}<)8OJ-Drbu z03?&x(XHZq2?)O&beHWPs8x8VEiG!zLC(r-nUp$2`HOG}yEI)}AN2giStT zlj7}8m+0`uO@@j*T;ewR7y-8+T?*Cau@1q>SJk?!3zK&)-R0n(bQ%8Cd0N!TPbB0m?V?8?l{Ta<>a!(^)=(IRn{xmD`jh5dzp;cc_HsY|Y8asPYq(GOSqRZxwd*_4Ma z2rjI|to$5Ji14WhH8L&mo$~gMbk`aQoe82&6id@^S{o9FU8b>&8fNm0rvx~fj@-N2 zlgn_}u}x%4}176k~?wkB#!jlON}e`bm9p1ZPP zD8)<5w}|65Vg0hjT`a~U0=5@Z_xQw{*k!a67GLl&A{bTd>z+O<)vDmMLpdp{Bc_|~ z^FqsI7hpOjY_d&Rnj0osdhJ~5C0F8b_(PwT=^YaEpfYkzrG+|hv58*9;ainnle z7TYvlFi`g@*?_TK4OqF%}DO* zdr5_bf4NV2!2E2TwW?vfWSf%?y&x;G;zF5_<&KB1QrdY9yiPG)o0oY^$keA$Dx zN{~HW?ig3Y$$A#|4zskxUvP;zjLsy=(`fy}Zw1Hn54`~dwevAHJ8>g!_f$nFLV7es zS1jBt=7fGRMA+z0nP)Jl!$0MeUKp{wnh$ z!2+%D+Rz=z9y4l=!dS`jA^8tQ?;}_oD(v;o96Ih#@5xMjv-lN`bejD^@+m`qL3Kk@ ziO1!H);8_NuCW!_6Vn{3F_So#kd~sW-Ly-e_1T_3FLysIb$xOw8=}lfr%p{*#XWOp zfg9m_#4|;&#?Z?X%hQAV9N=YtWWQUW6s{g}wa2R$DZjDMsV{FJAk| zj*S%MVzp~HMzKE?C)cq@yOkfDTTxZ8fdJGBnj9iJcAsrmm;_C7o@YM>kp2osiMJ|F z>Uc6IpqS|QL*+&Z*q&bpjw|Ri*#DBo)|8;vxhbx=BIT6Xw%72AwDFlhe0%YeREBOc z{KFmswtRB*LH$8p2+P!ffldC%=i4?CM`_S%<=(6Zhulv9=QNP{g9fwXU?F^t(N>HLUHA8qAud0DQNWtYR z6g~{(3MWa0c~AtwoQ6H$21VDL4}eUOvK32_wwFpY^7^c3X;T_Wu@t)7FU7_QkSKd0 zuetz$L791^euH0Knv!yfEraxl>WmIisVm|d06Uxsdb?w)fDMzO+Py^u%p5Ic*iYeU z(I;+V;enP#QbgWb%Co&BWXW+Y++=Z|7Z1+opk~^Zib_~>Y#;%MQIb$t&Ml$@y% z(*y!DvP-EZ-SS%s&O0p(&Ma}p4Kwr}M3d2dZeC_=O<6xpx#D+3^FO+ZLyQ-gnmMhQ7kGzxhNX~MbtZH zffiZ68xqln0aOky9la19nJ-9_XRFjmiA`JS&2{7YD6MjWu{9hyRd=JpDga8dH?Ty? zYS<131gFWnN7;JP-}~AON!3tozvaG582B&3F6&H)D~E$P@)*(O8{~_Qlf?fb{h*vc zZevMy*Yd9HqTTI`|9J^CWe^_(&zzK)fNr`BMFO4D!V>7qLjgc-44#MAbP4cWw3-;V zn;32IVY?0pFe-UH>fgz(R*3!|YA%g1JL3N=-2i}t6mBY3NumS#9l7>6ieEtyM4gaT znNs8SYg=zdjT;eBwP{vuG3TPJUDjpJD!?KO12dxg(Kwz9tU<|;-ZPC(2(4LCe(VGx3)qUF2+>_(DNh%V)C$n`h@q)@2SJI87NJ0ME- zvECr_V%)rh0k%Cv`CY=dgPGn;pGhjXmhE83Zuu#rgT}rF!o9YuBtmC8c?x>kLkPM% zH{~|heZH_}Nd(FVfrE-Mt`l9mX(z}ls`t`Cq^UoFl)-1cwb=_{`;*e_xq~1zu;_GU z58xa%R)m*`MR9c9;!H2mgX8OrJL+GXH1R5&!Aalq^$Ru~0q)p;Syc{E3SJD%wAdR$ z6fX+(OicwX<{vO2psn-zIg|BtGUf{_JIj@1Xh9|@g6a9-qlABXiM~BZ*#t=7;F$os z5zReBjuEA&vZ8xXmVkj2fffZF!+k|@FUsY4eVLyZVvh57kFO^o(i5`ggaPjxf%eMh zNlRZ^(H?9){bH9)_=1YXjpfLv0hv+?{b7)}oWibO0k;C@D+>y0v*(gSBC>72O&l^; zJKqLSt#7XI8H9h46U_r&FI~SFi?LrfnB!fdzmF}An0sr!aPO?vfmsYYhVc82;{~i- zzA_sQSx0Yn@&jL)6q^EfZ{YRQe}O0iSbb)HvhqN@mnTvEG*Wo&yM{ZxaP{&v-327b za`if6)tdQTssbT7;LQJIeA;g6sN4O+Znr$fz@d>~v58Eiz(we3)%o+h50kUCj#K4$ zI9E5%$Nzdg8a-IUg_rZY-?_|cQ;yL>I(Vlt^QsFHp1Cplb2j1HaP}9$Tb)ew_~>o`L6~?NK>$@w7B-m#Q@uHdDBG`4c+vAPm8?<&1j15y~Zkh((l5v z;XlXF!v+W+bUDa8h-z%_qf*t{CY^PC?`kML0+5q}rSUnWLi$h$OQ*hxI8^~S2 z97APKetTCH=!j#-ln}@V<$#2G`IUArh{mYU>16yAl+qZ*=nk;oTZYda%QEfNOVvU@Ysd?%4$}U6j;ch! zh`6S}F_SU$%heP5e6oUN{fsG^?;%t`+>+-IEx9K$;FvzbYkYu#motwdS*n8@sIl56 zzFCwq6aYoaeY?v_nEC4hQcfM02p3ELjoa?*fomB>tmLJi{7&XLue%mtUQ$ox1(bZG zMcNV$m7l2N>7oA?E7)(RxIzx(*`oQP(dl=Hh|z8=Qh}jsy7-UE^%G*j9CS%aNi;}_ z5;bzcj&is*{TILc^(#U>_cn+0-Oqui2Om@~FtSUm&QqRC$>AdPBQVO!y*|x{gKKYb z^s?^ZsX@8L5WEiOR7C%A37$*a7K~2qxkdp6h||@5VZMelb^(x#ugM(l(7=3rbjU{9(OPZYrzE>p=N?!kuH-H4lSq}6i z6AXifBu?bNT346YP0E$|oq3g6_2)izw-E&%o^$(5y6?ql7Z*kRiRdIi{cif=Wa#+# ztGj#s+9}pWRX=S;N*ZmZYOAP;NtD4kGX*MzogSGNTOO_Qoi$T9VdwLsC6?1>@pBFb8tzY# zpMUVat9ap>BARfRq1oB%@Apd2X~yUMt&Y^Sp7nn4+>lg{$Ui%G@?ycyp#pTjXzb zH^^&L3;l{kEf|Cr)z-o|jfWE}i+HW%298{RbQFEwgtP64nY^xl-5w3npnvqOwh-Zon=6&Hg!7yiBSfgA zEs=vxVGi*oXm zJ80VaQw+)p z8Z=iPoRp?O3wt1eu@G15uBELBJ4&O9Q3W>#c&RHNwF)yfUEU{nw9?PE$8w$tk98j} z6?Lc zI~sd0F_4G_)9b;yRe`;D*M)m{oQpY%SPv95)4Q9yUVB_r-s4-{Ni@`r$a)A&xH%NF zRme}#0mj}NlFsvOSK!oP!!K}BqZ}|-6O*ikrx8YV?s@t(Zjou_gM^&*bBR99BDfZJ zTT4)F@4@lU)sN`UYqzuz7HhyBhf)XyUQB;ziW2Rwi(X)VXmr)D6_*UZF)-E7LQ~-= z*Q>;C`H6DsI^UW)df4J# zzY|Y7uXg*2&iqQrgL+{B$DPx3uhq|V2B5}kyN_KSep1}4vjy(>+Zo8(xv`E~3^Sx} zH=%6D3;y~IWgv~SJJ{Fk46we|(rgI1LFI0!XNYQ?`0n28vrmO8EG1foH8^)~=uI1L z_3)cCV2c!6ZGtp79sJ5E4}mw<#MCE^OLo!~MrHgAfubXP?)%Bt;Z#dc$#WUmjNe!@ zh&ghTVW_r)^uC+TRYP6C${)F?%yj5ob)-Ayu;29up5I25M^Z0&h12eWPGX5*dt79G zp;R2*hGa}c$1w!ThFZshR`n(M3Ke!nb+tZ1+B2cl($-_%Kgz7z`WAe5vaN5ra+O5_ zaj?l#tde*Q?Bq%run-tH?c1MyR2EuvtCH+T88~A$OHcvL7QQx4hK`fY&&rzE&d?~25 zWPC6wP4xcRlk~Vm(PAT``nGqr7~Tw@z9y;_=0GZ?Q-`dSAUq)(+5X@ZtI!ys4LXvt z)kv7ZAs@ukW1JS*OVo#}0@@y`PFCY{McLYFt=AVyXWW;2vIZvZrxlbepLU!s z;HvN((e}h)v0U2C+O;q=l_xQW_xi1xH6l!p?X+TsJ}^sp#oaJT@WGEUN$bvTA(qZo zyQ#l86q;Fn7;??L30+PsXuTLLuwq1(rpdB4c6m9x7mf+KLWrJqIZ#XdbX9zNF*q}? zf2jjayF9v$|9wnOYQ2 zL;knd1_#tkYPSP27~M&BI*Un9du|*&XId0!DIVQv;(sWNj$x35U$i7qU#PCUad9==I+la#^ z`}8GEh4VVKIMeEJpX_N(vm#$)92d0nrw%W0fd&k&6|LzI&(kN73{ znCYnQSGcY{+il~HPU_Fo?uQg?Q&j2SI%n0utTvqZBP+Gaxl|2U?*G75!JoB_@PcS}E?`zn4;F}pe+`Dh@(X8KuSF~|F7Kc6plg|^YLyuN@p zbJ>T31N#Fx#@<)m;j=wy%03gmsa?V4`h$LMLXk&%s9NhV%ie5=ZlQNNW?0fn&UEdl zJ=_9k)(QFXdNt#A*~MYK@UcEm&GqV+L8Dd9y!aTW8yD&lj&}x#gkc_}n*lBy&trtu zg|?a!d_K=HtI0N;Vmk!RxE0Pl*-JmAWxr%HAvy9)j|<6&xTm^}4uF`5&|cm63fEc` zv!75qrWjRQSnK#Xr*p|VPGXs?sFi6CHT?wS`;$_PvyYG- zZ3Yb|Dfw4gz-rxl>(5A{sX6EA`6`r`Mrk`CoHK4~xbdIl$x=2a#nUiCm3ci-X2M$d zoUC7b#_e4dIm}Glt%PN3dcb$PpE_=%rxtd#OGNI%^DJRMtUVTMrh3x!SJ$>`MtcE& zyU7IC^8BY9yWX5OA2VDFUOXxczW=J-K@Xa^7h1p^BhPq^Yhjv~We>!0UXLjmUC&6v z8tLvCEypd6i82#%r#1~a>?18zLI&%r04W_0ovf4J-twapF?cdZbBNxfALvT6c1F!T zaVT2w@KsM<$;O=!S4Ey^hi^&Wr02DWJ}jW0%)D}YH`7Fi|N(=T+ufzO70 z*OYE^sO?hvnSdR&i2kx2>jgzrL@2%7@FyW0cdXb9Q>dZZ)be(#CmX!&;6Bfv&ECB* zE8^wYK~O{y#7Jb!Jca!kZX@ruem5eFG4Wm_uZf_EU@wXceQi4yX42iVo3PJI!lkz; zq8U{@U5`~;I9gr&^m6@>UwlvRqN$_&cc@>SzHVG=<*qVAd!j7uSxXh``)8(+rsp~b z5D~@Y4eiyu9?Ctb_Qq>QCAEj?<(0dnfgxPhgrx`B4JKiLnYJ4{HlW)(jRHjLIhpJd!1Y zS@&*1D{EbtGG2?pk~QvWBn?{pYTh*8qSc%_=K=1#e8A^Ny3swE$n3*4MljvrH~-=! zXLM6L>n(5S_NvlR@uHy~n^eP2y+CJiopBpE7s`h|Dl5f1;s-{JhzB8QIQa9de6C%Fd)pw^a6VPIIAPdItbER* zmUj^eX6FPvP7fVx&R%2M0pw;2e1D>DWZ^?6X>UjouY0>%f2pb7F9EzLx#R5sbo+7m z-buWk(YIaM!otzoZ=WCYUZ^kAL6V2xsu>(78q&>~{>9hWTqlswYl5agM z0dI6vi5)ywyv}``^LKrHai0RL^4&L@LmyM`fpwtDTsy~m=BJd))vk?~MEpot5;;3F z7AGSk+dr(zxKP{OZP@Ghbu@i8I4v>kYxW$BTx~7fpweICsHgPQ%BQ`Vw~O^#f00P> zy=AY+)4qSXn2=LJ8I;Mmbu-IgT`tc0mOva9uUf72OTW%vl}uR9fdLH_Q(hhC1`K^E z;Eaezi=7mco~{ABOVzBiMXafsaJ+3?08!h|Ljo~u^-ZYQ>-Nl^YQs;)n*MMk(KGhw z2mXfK*cI5gO5&IUq>Y`2a)^3sPgtBJk|h=?Ami=?Q^NclojIlPLM}7FZmEg=(+h9|YQO$$ zM{@ij7jRp~V7*5>s|xsWq*S%oYyv2_54ZlD(PKvLJ8J-$WW2uYt$N4ijQ;2O4T8$p zICvN~;TfT$CuqGgb>2IWKR?VS#FhY`Te=ST4A)GV?TMqs7_#BdjF)NLI}W=IgL)-_ z>^*MfNc&6GJ%Ri7?>WMi}NI6pCcIID9B_8s>?_ z{)WE>^HYd%00cJro-fq*S`O!}u|v)MM@6iG#blXZzDC=EGYt}LN$^p{aMJHuArpiW zy{`2mmxnb{_VK_Q6?zco%swjPf`F`XBjaQT3Ji$SZkslKwoHc?S!CPxT#JeK6D=SU zvrW(haw$Ga`INm9#4#=j!M<9*kO%xX8P21QS{e=n7RM6L&D((p1=Rd#`s5;=4%kMO z8?&Z9o{NkRNsl6HF#SSRAa^4m zq_F^>3;5|7j#>RMvW8)nMnwbbsNdX`m3}<|p8*|(%6rZF5^=2k zHeMC@iXk7@m@mGBWHFyir!p>RNrK*FQ36L;Un=ah^PA1D0`S$S9UkSHVtgSDZ=No* z;w2|Pqk%bQ8syc8=~td?HWj0`-33QC04oKOo=tpV>Ys8X;vkX(^i$HHaydKv0lp@O z=&OKD9_zbLbXrFrG=E5-qMawFWeIVuV615THv z1_!co?{Nz-pPjFp{l@BxGD?72>24v+?rX%;y_IWcs>6E^meZti@Rgi{2tLqAo#a`?9NkW~Y%^cbOGehFVN(z_4JzN^vNaP7oh{V2vt>>hcoX8o$ z(IR{GOX!jbDww_#U^jQ*>G`wfB13yTC{7}sF1A;K255#2AO5s z0e&wCrJUnK*%o+a$C*sm8VGE34Eg?wY|u|t2P+}!l9sN&DbF$Rbc0jmn7a7fqbQ-p zLznYKq1!SDHJvnjLxn{FTykceJN)&nM<~Fk@+;87s)hXc(9(8&Qm6&p%hC1_xe%9J zNjcxb(H+@5|8;NmX#N$GIcmJKiwHF0XL27v`A2?08a!i310zHTIgI??X6DQg9$sh9 zzV9B8hxt0tizBJ6@(qduY#JP|1LFw?zpGEHy=K<(^>DV$Oz+HJA zao^JfsZ}dEy7e{Dgl^ja7(1?)Q#7f2SggP+!r;Xmf@3jnDo&QY0gT%l-~kjrqEG$M zwgvzxw=DH%N-pe^Y>JEg{7aA9Yt4s3|BL8DW#O+BVA0X02Q1yo8S4vX45L~P6S-^_ z-C@N4U=c@Z|kJMZNdc@T8-;*Zf-bI)RSkv^@Y=5V=(t-h`z+0dZMmlah?&WxIn|GXIr!4waNGewbjn=uJtE8xUATAFny5QoZ2-VB0`sco2u~< z+wY#add+_R*Ms$z=Rip9!@;4q&EG2GHpjC!wI35&=dHn*m&=5?v6I!j3hYi-Cml`- z&GP*sCdu;^yVf^r%71Ws-Jw+cT3iQbI&W7pWL$qCmML!Pva}J}?pdn2;f{owF z=J>p?C7e?Ctu;l7Qjz0L-f$Q@BDlss&;p05uX$(u(G)IqF@cyA%_T?W(-0=+CusSmx|JrF2U&_J372dIZn$GOEnY`B7C(XO|14!=89^RiOZ>{fPU%;IN9hy}2R-ArJJ<>>* zBOY#~0BGb~a+MAcsoQ(({OfvR(s%1&W=3FVBKPX?R-5Gd59?m5H&kiSzgMaHDZ7YW z9&>Q59(9b0sQlm!L3YQgc-TziSh1KV=y!ou0Q;%%)INvzz%DD3`Rupo*cjpw{I9cZC2?@l#fNs4E3}7o*f527p9--;s8H z332_^=kyiq&>TXc4qWP^aU-I0@;_ z_dyE$Te0za!(9jX4LiWpZqOR_@d|tCjH6DAPU(iDhu)m^*2BW%?s2`g%auT(&7~23 z5$&_X2{>AyEHLbUYHP(l3j&KhS*rVXw$pj+(i#m1K?xv-@~-4_uNsIHAUZX+&IO~} z>gFF_V`^JDIC*tKYqpp0Sp-%C_uD(|v#wzV=tQetUID6IZ!dA_k9ov_IN75!3+Gy+ zStjoK@8xhvPM_~r#IgiQPkA)Mo~a~LLpuVs+i8)|%pth30#jR`4A4UqIx#0=e0&R#u*f_&;8`*#lYYV?T1;0*9pOFK( zAbwZho_@pwmU|Z-&&}xloez z-%=h9dFD5ez{gV*p9K593%(^g4f6=_c)5AJR;>L~z;@0s5Rd8TzR7gG@8V7?M}=7l z+iIDH$xNBZk9BeFG~X81@xVkARFX+?s-*UYW&@up9;xh#EQdy4=Le*-;Fxd_<}`EL z_-s|LD*!^&IXhTKSfQK}5`Fsh^*&F^CS=@NUd9(t@I7|?{;GZ=dG!zqorYcSyb0?AN}qovlA23eXPZI1_5++6v1RVcZ_rEOHc~?5mlxaTPc$@ z?kV_kE?LteN|_`G)r??28CC;G=K-bKUy)1p9=Fu`aFWrL7-78lm}XJ{+w;F-_k$Tm zSgfcVZ9+*o9}QVV`v9zvbjY0nF1(`5pj;~tj>+aW+$5~t<6&skqN-aKDX>GsrqFO%@d@CVhqz%Bb{~Mz!$#g+i`I}} z8PMIBD!E&QySL$)VCC0f^-EG1J55Fifv_dIK>DJmRYuo^e_z!zcnK&C)Ztq=Xhefe z-!}3?lx-|-OBrRK4CKMg#+&BoR)RzXS3&7(9zG`QT#CxYL?UZ#qSc?Dqk~>en<~pL z{55hMKU_$J7cH+{V)^N$aG=Lx#@FijIM&C&TR$|;xj9C#g0&lz@4ZzwT#Flm@9z-y z&417^=y>nJ1T>*$Jn)6M_umxo&2;8gR{XNVCW1Jc&IFUi_%iKe-~C|5S}3>3kC}xe zzBG8|9;=$Y>hHj(m zr@j|)y`A+p5~;9O@kUx+EuLZ2yp=wU$fZDdxM1z689M}uYyUF#>lGpAI(w(V{>{vH zzpaAcRsTSPN`w)T3ot8WT$~1=7gRlG8i(>*wf*(k5#bP0I0LXlb?~RDCO{vY31}DD z%2F}uD2v7|<<4b{b7`wmUoh{S?qYb2IA=n|PBjjFf;PHEFuW;coWQMbI(JVAmk0HX zZ4b5`4?>Kc@@bK~vnmd$llz%9gvthGYSC6;opoXb6&uG{|8v;>w`isb9quTJ`QRB| zLfA5a`RQG$3T57)pw2VF&}1MkV6L^~_mx?W=St^0(bA_TBa>2b&`F!33@YT)dP2}h zn`?;9jILyXW_bqx}%49sy_lMwIN5PMUoj26 zjjS(=B;I_fEiY$wCC2w_p3c}_`h~lo^0An7I^=6(*y#`PRs#|MB=)vKk>^hfpvs;f z33`+$lhHk-MVYUJ8{cJGu=mX%Fc`5C_NTw8F^P~H^!$pCzVU10O{$0f`avrG6fiC0 z&6MBxz#|^JsNnQ-H7DYj`OLwRP(U+xxZgNHbykVoJ*^B@8A!z3)qG^zdGdJ zT|bww>9buEdy^q{AY>k2%v&7EK0~Xm@UlVuny+*7z7oXX#VyFfq<*x6THbFmy!PEd zCV1G|9i#V7f=1v24&kdG6(2G_H&kaaz`K5rdF_k?SYp`6{P4X_9TA=k$bJ;;GRe+- z8>ejM_^4TEK>eLqR&~E7LuL626XF0g(vMVS1>Tidc(E(rm_od=mIyR>pNM|gF&yKJ zpjVX){D`;e2c4E3&pOnY0*h^_Q$8x;DPJZ+Ky!5S{7mgV{P))VoTon@h8zy~uIl;g zXxR-IQfV)w$t`#w<+SJ3Y}JjjdC%8BA1-=Z_GCP2&?xoWTQAd+Uucw}0Km@=9mJX- zM3g*^AK&`CZcBtrr|d1fWla-ty<>Euh&ICTz*CWi>rby|HYViz3iG0w500NDGn8+HzBAF^1bowwRNfXgnYD)2@jPD61(#ful< zGn&G-Bld1TIJFT4Wl3f;s|)Y>cs74qdduBbg;PZgIJp`mM$h(2@hLX$u|{Kb8L;>W zNt9d}bdNsiBRN2ObE!jf8lfRdJ~+Gr)6as@g{G%NKm#8EI)s%+@T_Io1NQcI5ThFe zxg)iVw(}2bG?4PGC7-hq0>62yA%h}VJ)qQeeLQuS*iz20m0MfHO6fY;+0b~l7-fhd zkUp+~HxvUu{pp5#`0*O@hLE=E{k+gwv$5XN@YWhYIpFKwcWxj8r84R73RFI}e{;hq zbK?3@00UE*qr;Jz6%$y%P_K2);{C6wdJk_t+Jk#_jC0IS(>qR4N<4Vq^va$D&uyS@ zC49ds)1QnEp3=QNzH7Q61ZO65;>R6Uc61Mti@Ya5R? z*j5!*tSx}MGUXhkyXES5VUr;Na%{D$9=W4wmE_CqBWpw&muu%ejflPECJ8bX5+hD# zP_gLmjHjXu;P`6L8#hxHT?pmM_BIby=D?90)e4_B5M`@NBe4rD{bF+2bhK)rH5nN&M!bpIs0XvC*V%X>K%JO!rGpvSOA~rEx&YO@NMOI z%|n8gYAm4ey^{Z+l;Eb=2T~F==}-H|TLHM5hOP%dw=ggc(C%j?7nJ z>^%6dhpdwLYnbps3-muda5Tu*PsC%sIOB5%q(7tC4Gt75?+xRVIl!7kXWS&Y!DZ0B zsi=H9|3shj@-41G@{g;jOLhRYv1<%REc>xvs$2)RqhG-Xw1Dk;M^l#df?7rUUdq1^ z#;KHjpcN8(64WGUu@GPO4pN}QBk)r7{f%N3HxP#*B^k-y+Eg=!ZzZQ6ZyN9Q>YvuL zt>$B>DF)rau3lKrY~r}HbF^2=VQ$9R1dRgbV?;T|Awl^w=sDdbtmk zeEbmbByD$s1W;G!2S33+GC*3D{FU(7SU*~!xx94|ESeCjpi9TN>XaqZs7FU_c$1eM zkqpB-^Zz+|>rjCBaJdjO;ti*#j+&vf4GJ61eHkLQSvAnVl2sXJ_MDL3Rs`-ct{WYM zKN^zH=A2KL6x1Yp}m$JPlicUwDZ=m5!RfY@YMf3N%Wy- zs*Ua^l=Mb}hGiw_*$xr=V7LHQOcPue_aGzhvJvI=RBeE;gwg*Hwq}wIZ5U|*z)^u$iKAU&w|sRx9PKL5*P5b%YgGRl8h?uh2}$xJ-XWO&Y&S1DgH9u*#FTFcR&7Lj?853X^?igV3>@r^Z&rPZ!Qa(|EQz$R6 zeb(kw<7jEgdso{2rMfw5sV0!pg9cDv!AtTT1GInVVv^%2LU zEOicRJZ;sZaS}9Z@fF+q7da`x?@mw8m+QMME=BJjNrfW6xkHKhMYO$d%`QYBo|oG& z3v(w$1X;u_eKrAcpV1{PJLx-qW(c0c9O&r9-|JGr>^Y6QwPv5pcIettED)N&RHR?* z?j7P4N{LqUgV8QD-a61_-Wm7hR;~E)qqwam@nnq&B`z!6veW1+w=32R88-oMQ z*6*wql%(G&^k0NTE#aFLM1_^{c>N?Jn?TCK9f${9DQepD)cT@|q zE65lRvmqAR9{j!a$Twy>>v7h}w{7d&khje}UZ1~?QFQfZq8{n}FncKnTN-XNN`q9(y7#Xvk)UM>5RX(bPUk7$e>r}6m(Cg8{#X&^+pk{4yb#k) z_v>Yy78mk-DXXcSE|sU9Atk~-ZBpX*XNIY`ZR=`Rf9+)}J&2L;VO5nCDROS(^51`M z*FwZ;IpUVLQ@R41rx`a!5Ih*GU2mz?;4!hTKVcHpu$<7S@IzVt{3C8v9hW0J=6u>B zb9S%Hv?Iz?p^7e^3-2#Ccw${-o;6hb>B*QWN3469%W&B0<}NXL0v~|wa@n)__mkZQ z24G^>^dFxx?BplFpAMQ?koW%#uUa(=swccW@^oyRPZ#+>Y&YBZYxj0VDlEEAJ-`I~ z;4=#-j-3;vLjvq>GF_THPgQN1O z$}uv#f$yq&av}CxAnzrw>#W6VY+uB&JDC>}3fO~h{+SpHiIDBDfgDo;I%gx!t2yQ8 z6}#=;yxq}v@*-YN+M1B-Z>-&_UT_drIxpZ2>_%;G zw4z1)Q&%!yN|J@Lc{TJeTkm{iIyjR|RrUaM`)bipT3h3f%V}aUdtSKW8>I8R6HZiVeYw~N5y3 z2S)jvIU17XiP>7WtxBol=Y6Ut%CURTnM5G&|APq>38wP!1j{mj=Li+X!szT<3Esp} zeX7CcjiT(t>DZ*x_rYKRX}s#yfABg&2jKNcox z`XqUZP1FrQa_6W*I;$+h|AdDs!)!glLty~xxYTviF>tcf!KLy4o1NVF_4`#Enloo9&s6pHlRGY*R^-YhyL%v%c;XlYxxhvORsS5 z)zsXp@zhK4|9kr{u7HBea?I~WS<9OL0c*nsjJxGw&U?mpSAc(Il33X;0L%0r2Q}uA znU^~n@5YMy!J?5?y9@l6F5o{@@c=7-kM{uNOiy#&i6Z_Um$a|1Rh;(vcmFUvf-9he z0Z;%(^TCa2wRr;v;p%*RCGp;=K5Tw?@9uuU7&mW`(z)8vHRm|V z`lC+n|I}WULxFq%RsI(t$wV0Aj3~8NosJP--CghM2O+HJ!UlygKZDB<9dWHkT`b{Q z-x60ePa69x@wg~dDR$3;vDJVUIVtT2WgIT40m^3_B2h4^mX;BX^!XBe-Zm@Hde&;? zcx-uXk8f&=ibjM2%}mL~l71CC=N8gTHzHU9I4w*z%C8kKmtQXv>+$W!PV5CR;hHz* zdiD}s4O2T5-mn^KU`?awKE@vPI*a)`f@6Nh0vq*|F2qRgm^%(vxP=HO6o&~7TXPCq ze{=*g&_grr#Gw=vQsvz6FmSNTxRv%p2&Lmd{GX6L_c#qrYg>7SuC+YcJgr<>@q57t zzgsP4YOu>T+vK_3NE9m#FhxgjEd}iMTxCO3Jis51a-vN@Zls$Ri}q6RtT43v(Xi|7 z6+0&MUrAF#oue-vpk!|4ddN!zP@(SVCCmyvt`PDqn#;LGK?t^8X=s>e@5}Gz+YD-G zdx>`hPuwr|W-3D_p_#b%rf}_mTLdh&>#B~!+t$rRYp?sx%>HQfGcy`((S2-!9^nqk z6tg#L5kx|~fyxok$XKh4u#(wVxaAwfxnO!;PR=%LVqt(E^88Q1eG-b81*x;^VQ-kd zlGkN%WQ$(qNq_n!N@}}Wi%;>1x;;xs`Mb#$6%&urTrPz2@}(+Zpf0#nsO^j8JCbMwoi|6 zG+GT#h;fY0DE!sI|5*;4DRF1MNQs5{vBt5Omm`$ADf|{!=JV)V0r~?(aE%784;}$x zRIez-8tFkwUgJ+NZ};@(Wjyz9sE*OtORd7aV2k3B?1+P{^OZ(N3i(-3ciy|}mSZ)W z_&Po>=uh84Rh=}dPGfs9C=pn%-{D4AhFmI&KWzs8kq!I|Abw=-qYstU(9)a#U3BS) z&om{wwmJLM_NKV9S#kgzUX6SBS?U)>5LXZ6fVa`fK!7=ZT-By3Yk{D5o#2s6Jqg;a zNV6AbQnhz%crugmVSLN4X<|2vsMEyzqDP_fBFEX0_A=Y&oVDC90ev z&T`~Aq(tmjk~vcS-yMlG;NXi5XN&f^QA#xqK_Es8Pa(WEb;!M1TNIuM6ih1*e|Osz zKA?*B84JV^q;H)DKn^;~eJ_LOE3Z zo=ula6Zg)HZ5p^3{FM9-lqAn2!xw{XYr354s6L$7HB8r2?~W?^BODaIolx!$6j3E( z4Bb;amlJ9<4Ml9z1Pofq^kX{Ot$z~x@Zz50maW&6#04^n*T8qYJ+a|&j`$ko{|l%K zSM-klOKutW9CdzTmhCoeR-0LQu1uc$E=B+lf4)3lS~I$jwQ62(mVY=jY2Ebb*GoS! z!=L_zSt|7i!6PTGk2dWO#=mtk9&-`B_v><6mcNW|P0Oc+$5UQbRzHih6E5?C`%VPm z_gBC39`oEa`^}>_9X5Zm<3{tn_iYH4)mx*8fA_*m3STeNosX=!Dwo#*0Lp;E3q0M) zEz(YrGSY7eojRZY3o}UICS|fhr+&)tWaVYaWS;0w%)i1AY!-m(hO7y_0o;E0osn^S z=_h9J)1R4vg;S(6OK+vmWgfG`ke-Ip(NK4 zdAIrJSL`+a>i6D1mgj;l>-_%P?i=Yz$|e<^!__Um{U81;xucXj z!qG(?E6ZCyv1M@QIlAN3igePf{8T;(G>i>^Y7?J0N{oU^Px_)4SId3D^kB$rIVU}J z!RtI7hg_V?*ni@9tvXK(SybQ7OX)jzJ&xIoYXBDU1XmS+wdqZ~w$M$bBi18)OQ+$w zP*B#onA$yYbuK!Oc5fmb?Y|U-(aw%shiLk3&OZffmTqKZa)-%!JxeazPA6P3)$b7sQsi) z2k+Lxx}oQ}T3iFLNIT5}aC8$E*3*!#==5y88I4qx&C;zE$W5d^a5rC9)q-o@~-H&5<*>hmITj8B(Kk2l%kgr3HpRstVv ziKccwRR2;kUHQ@Z;HPq}N}K+2BTrG}qnuB-MUHPwS+d>ox@;((*Osq;Az7jHtbV;< zdKGNa|9JW3QULt7zw>+MmBp*g7p}g=oVfZH^V~JJnrE&(U_N*4@7tGPX62V+Iu1a& zFhWWhQZSvKp=@J!v(jExeccxwckAE|5r)7`*XKpz8U0Urd0Lk47vloJqVPkxRRf^) z#Olx+M-KqLpZ$vFYaO`>FIEZzP7ako2qqs>t^dCp+(y5vGqW$c9)>WlQrRcL)DFCdJ|2HVkXI_;(EIC(+Cp!>#n zk&8u_Sv3GAI|tL&8&?M)|7T`kw?8HCJEo7cWcxl!AKQ*kZ7Iq%gR&zXPSsKA(HxJ; z={mg_{^QrY^e^4UxU4e$4fP?bts>W6NG{Urw@Lf%ea^goN&a>y{%=t0`>e)03_yX1 zY&rOz&t)mw=wbIT*&1~(phzk01NtAPUo^o5JbGc7)o zs*aPF7%MGH_lxl=7k9G&Y)EgCo25u6(l>@tn$>%`DNM?%>qP5yCfFiaM9;ij-PGqg z9@1^C!%q3JKA6jPE#&rjplf^=b||P&h|Xc zi@xNKb9HUfoku=xKC<+XJ@WfHJ32`KCU>#3v#9f=z^gS@0+83sbaiia0XW^UbVoPl z6oB1Xpm|lywvU?t`7U#PT9UuS+KmxFW z4iJDe$aNTG%4}Az+6Ta1&-8e34qC1QwE(c?W9D^9+caxBKmgL{5VjR`fB+;6W(@}O zI35D9g7KS2Md6s!2EQ%<9y)Z$@ZcO!2b$3V0+0Zt2UtM|>H;8dG}1R3oDa@NojO1O zRxo~bE(m3J=l}so!5W;x^CqT}Hrf*VFUbZuJT*`*hMzVEZB3Mcu>JCi; zH;f@^t7lLzg0t>xR7XRZ7lW)tb0GoP++0(?IoEaRK!*Vc5C}-1Ao^U#{dt3dz$Ktu z7DnO)N;|Pr2z^>$tx!qi37CEaK9B6h<)AWdePtS6I7ONB~mz>N3id+njFA zDFBCNFc_q^Hd9KM@+QZD6M1ps6-JAdPSbUC9A)*nj_2ixQn+;_ondMXYrY9+9Du1C zGPUzD(KxH<66)w1Gr2jeBCDv*`eggkeDbo&W##!~F|EmKtckWd48UlMFxd)BK`Se7 zr1W}gGBw$dC0?F2JaUoC1)#SieQBdM#6wK=Gx1z?ImprHk!P9IOAMVZBW*KGtzR-% z_03D80JJ)ozTukAc&q4=Qzz-0lRnio@X7Y2`PMtDTvnb>7SpDqckP+S4gpZzbqbu+ z%O%4EDl)lHZmFXIrj_2eeTn+2ZojX#T6Ma9DJ5Fax7(sPWmJZ51IIs(AiQ8v3(%C|Q= zZ$KIYAZ~(!`Vtv^y-VPKaY|o5s3YE_3y08)S8n~$P1{JH$7MwZQnwe$Pj**gh!pFy z+|255)Q{9|4!qK2&eHuFE2m)T?+5DZ8w5QUjCUP^1YiSqa?$faxo}4pZP%4&p8MY( z0BU`TjP+1<8;0sdFS}Lg>R=iUshy8l#`wPoxoresTBtn$#FeM|oLrl3Kc$|G-ObnM zp}0?m%uf{ZrL)-wJOP+oWV656^9-zMkG(MfrnVMc9o5F&HR!5r&?72XKXr|`b)+4Y zUbwxCKIgWn+8djrP@YbA+jjFj<4W~O@)=J1+Ga&w6Gd&nZwpb&a@fj{21RTT-w+pL}1M$du66W;$(b zYty?0>a>-b3nwobbGjW_e}=L)-3|%BMA5swkh~z9UB4y)h_y{`qX)Jfq07M+r{e9s z)Js_LGdcZ?%UfU1Z3qZD9dUnqsVz@|a-}wPd+48PQ$5#<(gpa={5W$Fo;y{a_F|@P zm?s7x1n}S-%%cu$+_=$9^WaU41})>%hx&D3A^_fW(@n-bcJJP8cJAD1Hf`Eu-v9ph zn+q(;Fnf*Uq$Fg!R1v!(;M za0oyeQ*BHc@18zP9e{iH?ltfNp$zZ@ue|a~vt!2&9-M=j(g6and&Z7{;+nZSFm(Xl zc;k%*5Ztq8j|CxM$b)k*YdSyx(wMr&l;_>kiKzono(TekJU9olssp%icyJD=1Krnw Z{|`+)aTxx~sMG)e002ovPDHLkV1iGtk!t_| literal 0 HcmV?d00001 diff --git a/website/docs/assets/settings/template_build_workfile.png b/website/docs/assets/settings/template_build_workfile.png new file mode 100644 index 0000000000000000000000000000000000000000..7ef87861fe97322a3b23c28ee3644b76ce995e9d GIT binary patch literal 29814 zcmbTecRbbq-v@j|R>~nO*<^<#WIG{bmc94N$P5|DCWH_|2ub$dNwQb=-XVK$?w9ZH zx~}{BUHA3Rb)Uz#I^Uem`Hc7b^?I(?306{gd>xk@7lA-rmywouia?-c!>@PPSKx0F z1+gvR51bd$nvMv>wcyKNXnd{hxd;ReLPkPd#r5Or#7hq%=X24`?vAclYu;+h#N>A< z>2xd%LOjV3oZ2AS@`=f)vasK-e34Aq z#``I1YTcm;C*CkmQR;(D#^U=a-PV3_DMuaVBGg7L?`W|Rh*i9iK2kU!VMGx=!XSd? z1cBJc{(qkaMYS&odtOH%dM23t-Z}l)5%#==pRskSF6@becrAjt?uPd7ueVx8M~is7 zvO&&D>PW%evl>QdDwXlIZ@|xu|JUO-Rd>vb58AnPDoR7kzeg3>N%A#|&Dvw{W(s?s zj7^>I<$EtX353%7zIih{JL|GBY126BkD8fpji#rk*Nh!l*^A@USCE%4H9W#!4&$4o zk$K8Yw0-0eBkPY8aNRU;b8}lUm;cBgnYsKVj>EbA!Id6Wt&Y0NM?!3gcq#fUrkaDD zk)p2(;wD?>*?5w^1hfTC>C%sFv;)S7` z+fg93R}0r$9S;497pV$a?Ok1EHg}WCJWpKwdV&b(ub`vjQ;UrE#i5Ipry)@-s~bX- z+mXQxSGG~fSG?1dZhLp-hFy2)%25`*z4mP4GpW+&4j=FQZ}-xlF06j{rt1h3!;mB# z3ijMcozHY3bNs>Yo-QxnAHiMc!=^>=j2V2c`?J5Vu{NK0Ag!JRZ{0WHz9kbi zGBTo`uSY5BRU^kZ$FIN|!^_J{CFsJ=&+iKVptQ8qLqwmM$m?XSJe1^a)r%RFxHuZh zVr#ak+;$=)EX<~OUd>B#eSQ7DdDlFDc-of~ujA}5U!-Y}9m&Glbdj5`HE-X(eLj>o z+!^77k>964k zj5T#)f)p|NuXQgB@>+#O>V-FzlG9371!rIJXB<_uQa*g)o#kG7ZX)$S{L>wJj}WT| zrB;tl>ZzLTzm1fOVb4&Xot-^Gq3VwRviT{YP(KO`8?CIYHWv!n$NKx7mVe(V)HwZ{ zlRsJgGQ#kK!l{|CkkI;6yu__1&u)@}+eBJi+(()LHfCjIrSY$~ zmc{c43FM*rk;`;hiYZ=ue+O1qtzW)8Fl~?Z-0gjQ@7_Jun7=)#kBB5|_eY<@7CDqA zC5`hsJ6;gnbs&v6K}LChrkjMg1Jp%$rOx?ni#zJM zTBgU53^L(!$9pS_v$I9kBiw%Pl$iUModyO5*xP5nv9qwu@GIo1tvCq;OQfl=3T`Df z1zXzNV2IQBeal%I%1=p8H&#+YigYwJ`Oj5g;Zsfbsj|jMGtBbm)zs+Z57GOzwY9x@ z^Tu|(;-29T3?DHh2}x{h>@P04SF*4#$`04!XSO9s#M?U)ep7%(8$_olq zOS_~_?VTVSEZMM=MG!`<61up*_#O|0y}WCZtcq&KJrN`O>P+6a4XeXE3+Asdv(4YSO0?I zai5c#+FC&#F;P&aJFytEmOtY)ivU$m~uL zwVNbfT3_$tdYjD>@@;)eGxh;_*sWW)W@ct0BO|jElC1V;bDxxbFJiyp>5=_s!O^Va zfX0t3;Ap#@)6>HP6OU{Yo_QZF#5p@V^N*k{-IS)V3o?!zq23L_)J^)uCGs;yf}+`; z<3|55&7wZ8)x+>PjjqRx(;5n_^8`W9mfzz4eL!n_gQ7k7XnszdgQC5V9sX-}fG2q8 zqHxJ-noqqgP1}4X!!eo%pW_`eQ!i8Fa^?>CRRr_LKBC~`_!*-}w}R3-a_^knaiCCt z`jn7@LQa_oWwE=|Tk+60!bFpoh6X`9KRb(xGI{jqzVO>MWi_=-CFV@I_#bmyNR+Lu zZ8W32z{_==VpSqi(m;QIT0iVJK@kz8xc>Z69r^Kd{P_6zk)zdiByRF%BPjn!S$2eWpk^06xHhYc!&2Yn?3k%SX%xus!OX-^F+MIVq=}T%VJ% zv9W_gQA$b*i;*8{=Ho+09bH|;WFbp}BuWQ(9aFOFY>c{IgRzc6l7Om)Mo@lRTF7la zTLuOOcFiI`hSu?Mog{@JYu}u-G^6eLR@f9g@>6x5iF5q^DDgfgE32<8^`q>SG5WpG zU}op$($mvfLIfPk zjT^K)FJGAB+8y8NWhC{zt5xLe2I%cPROIOKB4J}9Za;#Tr+!F<|crs zy>5GOb}TkXx6Qx9BXJlo%!>8OXVOJD0yC5(B|F!Aa;bPYpa9*oC4cT@vrq4|$M<<9 z+F`SDPwgFhniK0ELN%j!B0IkFV@U5$_^JHiY@$b+iu#yKPoULIS_q%h;ELPrujpXH zy%T;#`6=JR%Ie2$5=9}zlD2e(AqJ(X2*v0(`kxNyuP5>Ta9mC5mp#>_Ko{f0SQF_n zv@xSNO}$b}Z9=`owew1ExB0WLsKTfG*RS+?Zjo#S((H-p?>w)z`~6ww(0urH7%$)G zjt6=ga@RpZPVTK_(_t(S`yDUFH+%v|*L?7RI;{xrM5E~;iQ74L_*Y2REbMk_&V(n@z;aht& zN;9wWu~h`=N(5rbMtVi218oUITA-)=ookX5Vzj%)6H8vG=kb=4KvIK2fnMFU*Jiiy zFvpt%&xcFRh5ue{;3Dk_RhG?&N@G@6A4;wx?BV3kw>V z-&MZ&XN5!f5BPfUUr+j;_NAQn_YEPA$t;$99wXk`U?!oJSAEL&t;7RV9`slBV$TK2 z%gdL|6OY2e!eU}#pq+XYQc+Q{x3dz31ajhj@WrbBw2|h1e8p>VW`;IW8W$JWB7srn z_t4PL*jV+qZ>uv;pvQR~q7aCPD2`T(larG%eSK)KsUlup zoO=csCJ3|r15Eso?M3)hLf=nt-fZsd%7~}s>Z}(;e9ABItX8Cp#PoW5)@zWcoPX`u#Qc?sfm?C^QAdajz_!-q53 zBJ&Op>q_DMg{bgp86u@$M9VV}YGN3vsuHgcHHx;Ga6j{LO{Brr*Vorkc*R=!sc#wM z3L3}M&=6_nSIqCp%7QP~f4+4>ylIVomL_ zbSx|^%*-;6gtJkp24sx<{AA{7Qt1tpj)3Ru>gs4|Y2g+F&_DV3v+C&z-`eI*^!M*# zr{5lBJRRB8Mn@cz7|pjt!UB|||I+EE0~kq%gOin&dun-Z?wPVOT6u=2f{aY?(MiqV z#{Q3!noo*Lg$DxtRu3?!5udW0nupC|WKBJNM|ygCuB)OW#EPdq^$@Rx->jk|hUsN< z)f(s1KRxQVDm|jVf$^N!M+AG+6M=Xo@lZfu)FheNK$L=#((vuIr4{R)k3&;aQ;Z9* zC-Xml{+z{$hPYiIzaJADJNlUdfmov2{D$sCCbWWxK)4Q~AqpOUwxkn6APT4%#y77Z zR*~a7@H>GCHR3y!_&yq9RXMK1@D35~Vpo!&$Ki|g-Y)}I5!9kyC;MyRBy=R90Q=V` zs)vZxu@ME1DoRR9rluJ-*E|6N<$V1*>ArF|A@l6?R4rdGSB3T3wQGVZ(gp_8(+$3~ zK&*9rSXo(%*3$L7y}j-1HX=mN$?6$T;fDv4uks5E?T>etfhBo)dA&MVAF$xLM#{k; z6P5M_&T4ga6|U3K&f@JbDIc){?~C)0kP!1^Hz@|ffx*FK!I!qcwm3LAgs39Mz6T$qwmt{7Kw1`vHjKpPNbUmV@)$^ah2vs)Cnqt& zpuA;s^Tk-~AzbXVw6u&TaUl|PeoA6u-`VnOW`Qqx$-!lJa^gX?G&Pwvyt#@^8!xA( zt1FNE`9(RMIRu4mPZ7wsy}iBJz#zk+RnpSXAkIwW?&da=c6xSZf4{E;4S}$kCBQHo zI~VcmNfLZ{e6Ue((i#Qb-Ot~jQplA{R8*9Q=Y8|s!u&jqACzUlK!CX(M^Je#w8i}W zYvtE7Ug;Qk{q}x4XZ>5Udv7iw)%ZxUxuBq6ijbQ_&nNL`nR3CQp=_+Ifah^<-0)hj zT;$NJ2|^wp)L)$SDT>|-lj{5R=#*D%>{C6y5IOd#McR`gMy!aas3^i*wQG(VK6N*2 z3I9v4G03IsF)UCcaK#X-{@gvMLpi*DV{P2fcO+pPz8c^<^$=X*QBK5+hx5Ivk`(kz`3!nVy zOcXsoG)~+Kj0R-nyk0Lu(F{zJRL@nMF*<%@s=_{9gT2g9vXeW7^sBJ-1t)Zt)18zH zkG0Yv0bP`n(=NyXE^DKvjX75)?4U+I3cMlg{Ff-A`O)Pn(w4)f4S{x9Wk2_;#IjFF zNQgtf?v#soMriwEIXQvEz)XZv+=I5NRJ!mvvH(loLnMkjel6sSxs{a` zK6k8XZB30?cQPzc;|SpZBzJ^p_#C@Kvjq=sqtruL9dg^#;Yt-cY*fb6%i1)!Wj+?7 zMOd(HypnVv>RCH-_M-b*m0zz#4QN*{|9Zn)0*P@$=l0*^5VC+<_lzWp8v>DlxdgmU4}thN3s5$4VbfAY4E?^#B@+ce zL0Un9>VXvzszvCWEWrN7iyo`eOh}T-JOfO6yu^uy%Ac?fO<_LN}_Sx>%*5%5@xlIOj|0jtC2Lc5?dm<%{dZ*}=tDgg3_3s{kZInr1=U z^xSN~x;UA-a5pSxwh*O?C^u;(%vKrg679qg4{Dl)1?a>h?$w!o^jyx(4@c<4ty4d= znn+N^AVS>_xZhx3U!VI0C=ZGs`Nfsr14O~b@DQ2m?;Dl1#1Nl8thS%SoZJ~Uj<{U7 zaO{`ZP?3Rs8#`J`F*YnAuWy+>S*yfrf&cK=+grW|P+=Ztt0degP?aqEO*sPf{bO{r zZz2MOBp)9i)|j@22E)#T2l;i!UvqMDB}R<7YnRq6e){_T0}JyG_T;B=c^Px-7Mq<&E(7S@1MRP}?o+ zm+f7-J8K6@7W6sS>JgzI0`Fx98{$5G^Z>dJA1j-!LWp6Q6Jq_vTc~M^Fg}<BPKIl?k0U%*^s8kcNYi-&sBRD0d76DG z=|wTvs779Qe(bF;F5YmzTj)^~)E|&UG&ME5y1D=!vA2T)gn@yfM5L;#yEZf9bD|GN zvL)}Q!=aw7qLn*XLK`FNCZy-->B*w@lI?Lgv>uP2uV_?ud-t3mLDuxpYy588k+M6yiGOYl2o;`a8RJ@x@PE~aPX!ysZ zB=tOc)$6C+E20;&D%Xfb7DywZ3S_GYv+ z1Bh;`A?=uy3umj^3=wqFR1I_SykA}H#tRJ5Q@WdLw+APlf_PBE5n?*Bf672dBN!JrxImTL_h%10qLZV{15BO1U8ME()B_YX%k z2?l%ZJ?_#5;N3zg69Lg$Ga(=QHnieNQfHIo*)?<^pCU5R(2yFlT5Nj|$*>FxsalS9 zc2#=A!9tvVlrN)fpaPFdjMI{jb>JKPm(Hz5X3lA8u~sjgoOFszaJmHjdZy0z%ZKz{ zbqZBF2~2?Klc!(bHMcc-{}-~J*~q?LPRS!8p3Q%9?XQ)E+`}<ZS&oOm&3>a@ z@%faN_M@Qz4J$=2pK0MtfZSs(yG2bJ&ZxTThJpHDa`ME41OfNhhzLahhEGylI1WDs zC~#5IY_2Xo^bUh|BOD_@>XDQqo!*CodR)=1vs~;E+P!^u{l0n#opC8_83 zOp0FHtxWOp@vs3cEiDKAE2^qQPF9Oy!@P8Mwox3_;`u3YK4TjlTXO$gC4T7rP_!1c zx7pt}Jmg`$y}ifVr^oG6rJ4>*tVK9awVm>FRuC>3(oJA-IPbFOs*SDc+nIz;H~J;} zz`1|+o{gfX?)&SI%m6eNAX4g?_W2a*xYj}0~ zY`?R$Z{d%dNv`44@v_bGceQYB`^8}-$13U)px#G^p1e0r-ci5FA>v+}Jjd@NrlhGU z)3*!gW@F-cW`+acGWy5iC6kTAvP0U5TSJx8jt&m6um38YI-76OeyAoIrg*%4YLt~E z>aCa6SGwq$s?<_s&6e)sCX_5jqx5k&`)(Np?{MmLU#tmpxOZEAP!xJ%UEA zsFqk}B%1R>@~Mc)rXl~i z$l30*rtJLj6py+Q$7<0-B&b_-;z(-*WNL~mnGohjC@HS3yn<`hJG#(sl1>q=m*NerG(Pr=#1H*Ll3i z)|jnwu;WuvQNe*LvJ>*x`{cQ0&h#T}IJ9p$1;g2GHE#sbh|SMa+%{?boDPbKvg#e) zy1w1`J5c#D+WvGvUtoMc6+8H}u_=`vic)6C)2C0pPPdxPL`&CCo|)?9)W0Z5v~!jp ztynaD>zx9W%$jVo|!_IRtAI*B@Ek%u{N#F&cvP>w&m*buJA zp@&yrZF{(3#MXp|oM2cfu?OEqX8+FJq>Oy#7jMbJIGi^Bc9t-mzj(AtyYnS!D~m;X zEt`Z|>fv>&tnNCq!t$l@^{=lM136Bzo-biX_EQpI823DQZ#FS;=F-}Po@<^WK{rxn z18tue6+{yR)vET@0q;pv-{l4{YQo(SUKz^2fR*;+#}A7HkyrcQnzrEKA#yQXXq$_UXq4q~j9V9K`Qr)+ZBfh! z#U_2|4gE9fQYvp=9JutkKlL>=Z=L7vuUb35XQmk!I6-CBeem2(zm`adL@`7tDtSod zi(@~mIQn%yb|%NGAW6ur2z?4M%=VA^aQ9a_O(Lc~ zsj7rBQSS4v0!0&UDsLFLNg^4h2F5hh4exQ5JWMtJBq?gJSVH8@iw6hN`3vA<8@?#0 z?B!LACY=!}+8e~J^XWsYQbA(ag>MP4sKwviTiM59;8^2KnyYyG7LS^mI*LIQN=nU6 zVKddM^P|PBtYY(SJ#fi0uA`0&1nNHR@OQz&Wu(Vs-S+P%T;h4!tvyL^1* zsyS-4u#rPaIlY4Hu2T^($jqv6he?&WZ8|GU5=r79`&<_+j(of=aQl9y((|~LOL(Pr zc73JW#U(ttZXsK35Y1_Nw$%8i!Tf8>Z13X@9Ju;-2!j)na0XwRpbkGi7 zy~f#RKmwm;tL18eFzB)}cvpywj)@6NaVTAdm6C!2VlUiJ?)WK@UezB`^}G2_?Y#ru zv!e-Kw;xZEcP4u#x(C&XX#M;q#nWEDuu%?}?HDRM)5|=3mZ87?JKzJ$c~o??&C|%- zbMzjq{mAzQyMOZ|>M8t@76@1kq{@LD^(V5jr>CbLo}P(! zN2aFgzkPdas<{#tSZUD1C{&zhsr85VWOHX}I>K;N}Zu-x5b^yvnR0n*0T1-w}X-s#L~O6Mf><8Q#6^qs(n@jLg;PT zh8yi6*dNKFCe%5qWfyX4eLlWuh)?QAjl!HA%~*(no3$1d5P5RC-6HZ|iu8Yb3gUEm z-5XW}$)!5ILa|h?&dp&mWr zvmP$+JlT)^LyzzbO;4w<>ArSc#bJ2>BJQuBry>Up;Aj!yN~)`1AP`A?`#P9OAn&WN zsu~y=*xLgv2PYadb`*9bL#6AMk)53#RRmbxQ6D}8AxG;xPvY;J3keI?*4EyRrxy%lhDy3;*iwFg!t5b>$uVGSjv>6w{A$3=xoojOSDL@}_0fW*?)WoxGT z41ut~dEmM^-QTY=Mk0&+2@K>{IHk6Zju7BDC}gMnO})K^VDv&Z1v~ZSq#IUXx->&u zSC3hqN8gK~En6N2QzN`8HKTs4rTR(SpP z&WwiiK$=>etnc|p^_fHR25iM%#Ixd06{M-L{9 zI%U#_NzH{*3cjGZf)IP`JVO=-ukVCv3dk{2_ zlHUvL8@GA!sAE%7s3|GKkPi0tngs@EC~4N1s)`C4KR;AzTwL4{CIX?c^kemjqM|w3 zAK?F)!hpBM#l_+CDEVy1VG%5LCI&9H@xkH&6~5N6F<>$I71(E%AXkcrh?sXLd#>b` z>%CelZEKSq-+dw{NAb{MzRGz;&+BO6+c$PUB^{md{_JN2C^R%Q@AI9MPCf+U`w!~D zm8Fli8u|3Ry!-oWV_LZ``>P|EzB3@V>FUy3;CvfGqq?p;FlDZ7ROs3ma2&hxJ^*Xj3ov2z%MF*t7<2@c>V$B_wPyQ^ zqi%0_8-2*-%+H$y81A~C!5t<*k+eHOW748Q!V+V;f8VkHi*jLMA*f2=nGOvd09Tvv z82K*pP^x=cW`R<`Uro(o{o~Iux=3lp=tzd$uLroSF^~-GtGPRo!dGRoRBSel;AE8WrSle&&Q-H~Fo)xSL2jQu$xzEQ~fmt`zOm0wpk z>7G7Thh2a(K4EA(_bW`;;f~F3brRmsDOq%wxQ{ztyojUs(~u0i$CUoZD9vfwHBwz# zQJ-dstPNyvj4NX#tc1a3)+2Aqp%!xJR{537M9uFUv8jFA+}<9o_pU$6&9RIR&&T%G}#o~<_Mc?P8! zY%2)nfWS5G?oBO@Jp9u;`|FLVW_oW+5qq;eDwT_Y!KCdy3uvQDQd3*258D}>#%=?yZ<@t_I^yqxh`LdKaSqGBt-am6GvN*ryAGT*-um?AWOacV zH$S9#|A3C1)1mQZys_2SlE9l2b0kq$K)#$i%4csYBq1dos`vJ;)r;_E=j5z+S|$V+ zAQfzs>r_Hy7r)`4^K`4F`kbVtTW%@}rK;uFD=LG02ij+%KE{V)*Fo`ZA?rcf31{%a z^u5pafKH$`_b}lq!Uj$fjJuRR`Apm1*PkwMV$;^@rNqY{0o!IF3fU(u0>t^|&6Sli zP)f41vjvjEO-JVE=i$S_;s)P3HV)PXXl^3V#}ssmtCl#3?-gVKmE4+7R{1IBY-sfa zi-+ybojZ_^ymxQwyAL{Dq{Z}iAM8|E<$ivCQ1472zXb9j#F5|?hllW#fsqk%oT3n& z<=_Z!o^zh9Mnu;Ys`Y{`16iq&K6Ny&jHlTGp6Lc$WKdT?b2qMgmerR8HYpspjXAAA zQr^fJcYH_x;#f})mqicFj<9S7A^zPZW?j`tE-!(&lun)MAmnV=t;f6gS@pwG;Yrd` zH&WrrndJjfc3E$_Ru&Jr;yYkkusqA07D)l?GE`g(TzJ<_F^;%7;xE7&ZhIIj^zFh* zd3)QPBqMvkYFwJ<7XJ%4IrInQ^59p?F$zNGg#T^)@q-M)OZ%`E1d?nAziC20ym8|O zG{%?f;~@S!xVV6+41sH#1^Pk_AfAG4yYA{lj z_^tZC0Ft_e7hu_!IiqcvsuWahYHVagM@I(+b7RprwVYbWxB{fayg}i5xM_%m6$l>k z=giEq?a1ipXYTIq5GN6%(Wr7_F`+KdhxYr&Tl7`L@v(d5=@5Y%7!;6cglJel_1gOS zc>z6Ihad+|6a`;=trN+`&q6#pEsEZ!YHGYtVlTsS=VvFbL2M|C;Q~WYDj8&C8KU05 ze-EuPSi*>$WbV%&UYk)KzjtCZ!==wjIWY3&?vjMUmdjAff!x4m1AIIzgvWYtO!@ho zs4tKP_}ib&-VO%^PbXFhWL^?l+tXK+ZKDl%-B_uB&_weYE*sem!lF`Kx zZ?+zPYQ)5+dfWW*WHy0hJ1SO``Fmo*C%_j z9SJ2B+02pBM#jc~Azz&y+V2n;XEuII{C?My2a?eN0Rd2xVOP0S)lOX#uf})!5FLHf z_O;U$un!g!rjEdRl8I(4Eyg(g3HEQhNfAipOz^mK=M3@;x}{dC&dz(weOUmwUn8p= z7jM)Fj+H(i0tgDe4E#u4-*X`NIf)zQEeCFixh$o#MtQ;Pg?t*nmLPGHpEQ2&?lC>0 z*%*3?1Mq>rfJ(@l9N+Rb#eOp z3Fk~09Fk7Oi-yy5;B9>teO&|~RO^g(Lwm7*8=98TCHpd5d-y31+$``leL7NsU4{DUOzUG&XGf5#w zALrA14^$J313zNDeGq12+kGgK2vCBPogMB1fbcX+fjf6H#=fqzu`hCS*?GMrw-@_Sn8TFTbNbM{xv#h@O0~7S-gTp735D5G0)ERa*KmK znrGGc_d89$e_0`*bG_SP3Q2m;W}~&jh3=3pYxCaS>jq_mJw1p% zjXkY*o`{klHMI}j%|ldj#$lG)puO+n;x4hU#^BnC?W6V3_2G3)_dcqus&ZL+;DCTs zu1wTVVb4tOvi_~1m?yXK;uS z|JpU}T6gDS>bqJcm!9gyX=hsu8v%;ibC&>R0dN4MI^Gi)Q4%ybF{cD*010r{rz1f@ z*bu~Hihtqa0ztkcuR>r;r-xfWA`-ea}ilLc+|9vDyAn0G^DrbkE8jUgPRSb$MQ1R|%$@g9A4Q0~gneks?z#4rOp4 z*_=J0R{}=kIwK<{j(PV^tdFO?2#yyFY3&k=NFYAqNMd4QSifHH%~FBT~w6oZKaelJ#2xz;_s2`GC(B)ftdb*_rKH_^9oUlf0XKO1C zKK{YxW=mTeIMo_u*0)g1bab;Vk@S<3lTgp|72?5-eE05k#~{sPZ0KHjdbRF=DhBE8 zF5yAg?Z9$~s<4y%-P_rTmdyYIoD~(+JlD&UtHi9rI`CCB2N(#TxT_6V2(IO*!cMp? z{3lRkZ>RTaXlo0?UMkhHsWo`B%-COo9jU`n4{q&ymS-U9+_hDP%z$C{RBozjC-}i@akyx=g&VUCMYK@ z(LE_^hAv}&u+tE)Fh)S$`9D1^OKFq^8E334_v=?&S_!0|UjD`Ym|byEk$`$aeZ6QS zm#YXFyMVwk9IBnQHP6~eiDh(yfon^9S(!y|8h}X%5MFKy$XEmi2WM86ArPKgaUD}x zean+>i`_uhg0%a^pt%t!()(fXbK^Q z@@-*yMI0eU{rk>{c^=#p+WFPh@&5i2(1S=xNvR^@5)$0^S0P@v4V^)m2%?PnI+YIR zr*8iy^l+1h0f4r#wuZqPSKM#0AJk5z<>Yi~Tno#~`68OxSXgA_QY2-@ICS{yhabux2;Ou zC#sx}j*gyY_cu2$LidG40S*q1CTwJ>^h#(WaL0ynwVM}r?$&vco))1Y)&>?v_(1T0 z(1Z%>+}s>&`}*_a=TQt(Zi`7UCI&c5T~&2$sy-#{3xw-v{9@TP9ae`6q4CAb?Zch9 zl+A%t1Aw&@hU`=(vFOK-A0c-M7{nLpzL@k9ruA+GK(QME$)=I(lB~ld&}H^gGL+={ zEpCQ~4@ZoGBQBE!Bim3uU^BxS4M7ryHjS0p^jVbx5$p$%OEE>HlM637PKhI~e(Tq5 zII+v**;bO5hlfzIr#nzF68)*Pd)E+43c69A5I)XFGQ`M2dP@YtWwEAEem*~C(W{C} zW5&mifOHKgq#_rmI|u6%g5KvsDCU19CHdJkVqtcvGnSfwpwP6Pq$ao5;v6zYaKDtY zRRF6$AN+QY3=cBhguxQ&y|5#vy#z|N&ZZmC{gj?Pi}v;P-6NzXA(l@Pc)-XQ&^%XJ zQSlKX98mhPY5h@!VA~{QE;t-bRKMJctZ*9}9tNHtgxucS1F7T@Hr&1Euq6+3G(9|_ z!MH1_?;Tuv5ZI~kGGIOGDk?O7776Dwfz;48jlJgeFOG)_3JPG^!(_;%aALu;K3w=H zy|+ZqtN6^5zfvlUoS2BH1Y&2grZ904D$2cK#ba|5qj!~%`VU|nPguL-PcQ35vpt;E zH4^q=*ic%z$sp+~F_+|BAq~Gd?jcUNl8=vAV$l`+1r$y);tw!qM(D?DLR90#|BHQ+QkM&hyJD89dzwuD;V zXZs$M2r*KhJ%K6-6B84NDglv@koXy`C{k%NR+d~xmk2-mo}Su1=D8niIzp=NLBPiZ zy7H@I2LvLP=A=pjjudpQSI3~L-+l9c*6Y6H=M5?A>qoTb=jKinn@6BhmvoL&p!tUo zvwb@VZJK@e?i~~e|9}961fFH@ixd6DHY~)hf~Qw)IB;K~osRpw`gQaG9w1W+mjQrY zWAVEy2&y}muh`vn5}K-QsstziNp@I8pDaOmqY!Xn=H#@4$!91+5R@?d08n;tU|9csfth}NkCYYo(RD0Yd$e0T1c!as-803jNr2T1D4g36#m=jY() zc$1KD^dbzzokZawqxc&LG5ubqp_P`_)&@!sAO-y^d#*Q3Rvhi7>XMR@D2a(5*o>Ae zEm?%R#9Q9E9%=)L%|E0(lv9-oatN`Am9Z7)OWgxRVE7Ph+7(AAz%o&=9RK*BV?pu{ z%BaaDCU}JH9~ekVLXyzd3+>~N)g>Ib{-D|Z&vW|{2M)w1&cE*+1_IwvJfycX!I^Uc`nofQ$c@+5f*hg?zs$C@-IY9NxS%mD=0d_SYvuv*L@m zfXair;oD$PcOTp-h}=d=Gf*^}nw!Ju-j@-U+`)YPi;0(04IZqB*Ik94`^rTAOlsNr z`ApbGkSYd|2qG|?&Ksy7%W<+ zcwX@lqd`t*3-X79#{TmpJuMA@p+$n3rVGGC^efV$-~KZTfOP{84^IaQHYD|Klap`W zgCz{;06Mieb}!Evgpp>C-f3%@qto)hP%w zoBW~oc?}HHBqol*B75@W36Qpb_lip6;)Y-+NJqgM()T(t1%?O65(0A|%m5ODe+A-w zr?(~Iwe7E!srq`UbOK!eBo0W0HJm<9@0IS`O*!Aqf|(eIyABTrwR`Uks9&6IcN78e)8s3pjoaBE)8 zw-5-aPXpwz8?PX^b;}JDF)-7QkB)$;0ZFadZexS_2M8qvDKYQv?9j__b8xIT;)~X} ztZAvJyble%mYg+d3$hC&chxd;-aPy!meO!3?$XlR(2%nNSRI0B&k`js_X z-j1Lmh`(3a_Z#IKFjd}GdSkh`LDqmDs{oXaXQ&xD? z6evR?6B7MlNcesO;6hNmK-{yGg>DWfM1TUeFNQ6j$O}FCIanP3F=%9c+I|JGi!o*n z3<;Vac^D(i(8J^IT^o@0fI>ZECW6GI&Dl82Q$Ul3!2L62*rGgkWND&#xxMUt!rAco zhwCt11Or*3&@4#VH3}^I7)!N;J&&En4`GU#L1yQ-Ev!u_x;M_r5s2D^wdM*5DXB~4 zC-crAka}2kP$wWg3U(C|1@bK1v@eyV{BULOLG^(-3_ql#GVE-~ias~ZeG3y{V6seq zDbkZ_G^)LEiIrVDTjBUXU}-VohWMgd4(J~NHz-3$mjD)l1wn(d==g9C*k@aJH#Lt1 zsA_OI;<@iIGq?PyzlRVzDE6%8q^F;Qr321IL z|OAK^1gioO=Ysc@DXs?r%!)J-Ffo#*|P)ij-VYt+k!)Y zFI%Xpt2+lU3~?DgGQ3Nkn1O>>T3LNw8{RE)Vhu1~*pt}v_CIdfrXCg;$|LIg;_3K^ z*YGC{nqdB~EXE|UuoIX!FALVLmuoB~;#gqJBxU)!4$MH^gh9&x<;y+(KSr8$y+Wb{ z-4W2?$6@3Tprye!b#rw!F*D<>Zu#|?+S3VOGMLPmSXgs+{0aaAg7`X7=Q&og2vS36 zL`3sdXx2FSK9_GXRpp!$bM0nm6X>BJCS_7zz>vf#`~a_!#!u~=1_22Pe8Y#Z-@EAO zJ{Z+;g%+$vf{p_+MU~SsWB}wZ4fBhQi}MJER`?ibAFxfeC^n_vIFAA@c`|4TvvvOp z_jE5_fa(XDBrx(q5o?IBuW{}`kt-wkvs zFzJ@LFiS^I4{^vBjAi<;fnhiZI*%iyk^PW*I2ec?CMTKwcx;R-;I-@PD@M&TrFaxM zEXexx7)J>6f~*d-z0C;o2-wKarY2Q&^*LfjZD@|M@7@&vl5$=doRJuVN(%GM5Uap` z^X1DIelR5M=x2&RKL{JbMQ*wR1x2kH42D^*1b8b#fXi$)}qT=Ju zyKlhW-v>o8iXr~v$G6vTpcjC(by@Eq+x?HnxH;2Uf3hyvSE~8#hn}|f^G1CZ#COJj zrMw}dgZ<`R`vO!!u-AC?_X@#ubcLz_0i#&89M2hl(oubgM*(1Y_+|p&xFds#|307qT2&~zTAq$&mE;*@ z2Dqe6~8awjA<(a9mtmJ?;cfQ4BC@ zyR{4_nWLWn>-qKaT}iloqs6!|PipSGCY%q$GtklfaBg-K=V$PKT7LMb7|R3B?9Z9! zpQPvf`eXqkz@C5>gBo80@pZEy@kM?PXu@=KKib=KAXd{ncUQZNT}X&xYO~BqIP(;~ z(gP?_BDblmi;K$>MadU$%PRllf0|T+VOek*M z8VRlfmBh%%2wXQ9_=5332=F`%Z)^`sY;S4#z-5RsG^A~|=aC;891JIafNQSL7W&B; zF2uiNZ0M&}%iIaN{*;KGF(*~;saG4dfk32+K$*^G^a4l&=`+}ZkSAZEeE?r50+v3g z{bn{cHn3V%Sl28vKnsA1OwMOZA>SvP!7VI2(bwnrFAN6>hIh*aySuv!?Qt6u)ub06 zVqzY^=-(-sR{l_V33gQ_@K0#Ug(FNu*r{gfh5+<=_e*&ny1Ev?Z#>^$QU3oDZ~oDI zOior|Bq7CPTZT#DbhWtK+W9)LfAAK{Q~|s^RvhY@8hQr${YYONl*)lBmJ=`_2#AR# z`<#G8fukhfH~4BZ686j5XzAtEx+}>L-Kyt0bM+rx-V1aNXb1p4pjW=mgfC!v0qq~; zh@BXL%lc;g3opIb6}}STpXiNf9dyG_pT0LX8pB?M=G5NZ9VeRsnTtze=&%s-&vy*b z%0v-K0uyn-cknxp{b7o&!KslYwkCZ>c0&OyUWR0 zKJ2(PD|y*r^8wEQeln=TOw7qyf%GG^ZTPB}O^`J8K?nv~52+;Jh>`6JAi2R1`5!iC zegT2mn@vVrKzpaWPHZ5%bG(ud;{&^6$5i0rz)Jzb|AH9%|E9X&HNrf~DIAZTX{#(k zEM@RNCyW}4*ip=Ue0}X)12w z|MF+Dzt+HvNk8krPsM5M5yk9q>n7(8Rh%*593i#hxYAAHxnTG0N&m$Ok z+}Yj+7NrMPH-J{i9VEQ%_c0{P)qmer6oDp|)dveKG)mgch-rfHiq8^`QxiF9umm@J zu@&nT0%9!Rq$>yr)4;c3fkO`8v}8o`^2LiUd3l(xA&&HvmLH0Iz}z7OFTupu2D8uQ z^Np_t|MZfwqM{#M)=*~uB&cg>0Kk?0f7<%)a4g&S?~6wWaaZUrM3Rck2H6!tnj}d^ zWXs+oBavC6jHGOtnc1Y0P)KG**@UtR;r)1?_mAIuyg$eB9QAaR`*Ppsb)MhPya;Q1 z`{1A;107ojhvbt1%{16?u+?!a*}DqxP`0oMcX&(k>(qgwP9MZ}f{b5(g_{_tF+E`b zT+B>xWJjEQ-1PoE?dHwVM@(@nJWh)M;)IlJj9DFn6`(f=PjK#aG@S0uot^Iof5D1< z8dn7)EO8;@3&?D+S1z+d$_fg8C1fth`1s&)3=YgKVQK2?XyxkFfItD)I_8VJ2R3=| zi%#$CChLV)NMDi<{gAhMUb}}BcOt`eZ)8bW-DE&;Fvy-ZlOx!gQZzC)Qb}cH3Ka~F zRU&~MC-jf&oGvb4Z|N8A_}rNM``K@gIPZ{6dH z<_PtU2)BSCrk3>PxaOgKMiTk_i#AgmwyL`drwgbYIrF&u3Mh+`M)#07Xax*MmY=rl zGqQRkoEc`$nNUb}*t>Xed+Fhpob~&ydrCPTzNk}uzAVrx^F~*bBxCpSnV*QzuAkfz zEJjR6Xs#`csU>*?7)NcFc}P?6xvo+rp8LxwdkV!;BgT}8i*-sXBqEDh#`)sfn0=B{ zz*C}^X?(3->e*qgsZ-?IN5>r{+_u%4cs2eoR3dGSqsljbxjw%unXJ4AFGpV>|EIB{ z*C6%TW|I7V{aFPadV20Ere6&7|6EH{i&lL%Y zmNWY2K05^VcXW1cpuf;UWad6HzB~-AA7JHSw=q>%U95zISwmsYr|L(ta@m{-=bN`o zuIRXqC%<{~1|b`w8YVTy-BRKfox%^4428*X#Ayr4dj1s;t2?8lgjGLeE$%eaclP{w zCR*Bn(9o2k3CvjHF0(xTwb3F5iY_iCv9Z^#UyoDgTbQ3`4tk5k6v!5^FYxhoOfFEJ zV3&4Pe|`a|egj@WmH-;X%7y%dWzQG+;^j-cTbP-k|FP10^u@-yyvIVCrAvao&pnEWFuZva z^sPg8fhp!FI8S(KZ&Z6xKz{Emu`|}wdxmZ*>xxh_m#_oo0YF*FPpn*A8& zEvOFnm3Yu`C*`(iaqe6wNYIwV(^Y@I2jNqE_vTG*542-+DnS8CbKV<&du<7HVa?z# z%+*<$nXEV%!Q|oMZWhf2-D4sua7ZC~L5*h%g|h_6d46l$6jx%&e4NSfu6U{L zTHSel;1O*U-SP=ed&v=*J~^&s9@VNG{&(dW8d-FkH*dWs535{->%xnmpfk?S;>3Z- z4{-Bk>QqSZ^Pl01G%_?KQN6^n-XN2pS?mf)xr`o{ZrF41abTj1L!pVN2AGmNVE)Fq-uWk0{I=HikHcJdvSFxKjJ$9dDnV z!xpTN>FgI5!yv0D-qKvblfHuiBppxd^cL%OuVX^l& z8NJ|9w}}=!to`1Eza7bJ21sb8W_dwBf?{seT=bli+306DCen zhj5MwJ?>&<+_Oc zF~7L$aryNh4Acn;MWG(|jFPIvZCabVmlX*vWzkig`eE(#`X@cC3R!=bZ_EaZ>|b|R z=eg24KBu$ul2&5rY;I;#adYi+o~f}HU2{e5#`bpOiXzXNfi=LB6Y;+=_i1X3ygNA3i|8*uVrp0Q4@MC-fkZ zT+z~U1yNaaIdJTNgr`S;P0()&IYX<5QU;ANZ4Df^;_DB{7GLt~&Re9Yt`&Wott(1V zebZ~zC;pD@yeWsSczV24)9P{)FBf%O+p)hi?;TwCrv)ElF)HSCxVgAwX}&y} zt}YsxuB7FaHzgE zqvuEwPmcCh>DK-)9~F&-|CDz3hpS6;)3N(ea@F^a&Qu=fl`}Lp;8K3>wg1X7_NvAE zMcW6a#aD*q;%cf#Z9}hnysyc%snL0HH$^e4a>CU2A{dYx`~wFk*|t!LylGhaBHSM* z+fU)OwEbk@&rKziWm?$^Z!V?T zv0-gLt^C26b-RoUcLzHk^xH|Sh&XQ1&50_sHs4Vn4;yn?*$P;?bm^C%@j=h$TuM(! zOuVkGja@dB{djA0Gl`QkY*74hgEZhKe2sE)#Np;ZRbDrw5H?YIpL~uWxiH<*jXWLT zeOp`JvoZ3i*pk)lPI&$GYe8TDk=(*{gl+yl|O&5(F%bLbw%F*K$11P zE@Vzj7xiB5;-O(=kpyhB=WJ|km!WL{JY{8N$>Kl-X$r=y@Wl%=h<>2&Krmw}0HQ}) z2$MhV!zVBt!b(Cx)aC*VVH7%qf)%77AeaC+{p+`P#_b4NC*_CQ$`uH)sqBo#;L(9t z)SY+k^>vrQ-OrE2on5>nB~kirhJM~UhGnbVLB#xlP+KQ8H#GUeD7dHtILg!6Hx-29hXGL}??wY91h7X?w5dpfV%Q6~QB zJK4&=FU|Q-3DLbJHU#}-Yp$pn`51FFNP?I4xKK3 z@SpGcBdJSHm!55Ewak6M;cN=x-UDnK`fE zOF=ODCw5Gx`$U{O$V|t^#zv4qzDHiVdiCDmD_`G#BAa@8VoUc{=$tndglX56gcf$h_=?gDn2_?ce#&&bz!Ur2Qq8(YZhlIAz> z-XXh#QXYMB5mHKex|=mf1RTUkGXs7v_wI~Z-^(P0`YQ>-g zDxn3H>A#!zY^B)kE{G!`sq62D<&=F7{g2acx~gDn8p78+*hTZxG+Kc_;dJ`0g3)|V zf7dcq#@|i+SKfqje6?UXp>u9>H25F9H@$}{B8bfRVydI>r=;L-qNGQQm1349C zY)#*+p?PWTo*f3qyg}`Q?nMj%q{qLE=?9#QzmJ-?>ya0casAw)hujt6KG!71JB1hH z<>66{If9x&PH}6m#f6Oo02o3L-mi*^iqg_(Yjat5$=mPlg#4p}3Z-mkjvRsUYU;Pa zyOTa|AI;M${y@>fN`5Zx#iQiIqwM2e8AgACtn?ZOg=zCcsuSdSD*d!FkCe$%Wn3hV zljzKY&=@+1nrd=8i-8Umm>^U8w^CLa;vN~fOR5keT4e6``(q5=Ya;Xy{*Uv7Cnx?= zn3P+8szyd^lNsXz(>2L#=>S2$lE}TQLUYx$DAuajqR1sdCwN!Cudh$>I=d#ti|BPx zCQ`*NK9it}R62?MG0T0AK75{Kv7?n|@;~F?VYgRzf=_A&oBOVVzqgop8?5CuHf=gI zzhz?p-9)h0Mc-Z=U#D(VOQEHlU0Ejm(0$a)N$Vp=7cgcJwXo88YLC&mBoB{&Mxr`~ z`lXC^^V|-ols{}9ixKw7$do}8`&^zw^MS*ZEi?nIp6m|Q)ANecyYY>7t5zJAcy^eS z;<#kBpZmc9C=JJtN1sFTyazQJX66Ex_e}Qmy@lL?13RskqVc8Ys;f+VPRFDO1Clnt z;%JfMI2Q|H^{1(>j(mp7)Tjj5&hn@=>wp`KoY z&@iIJ(}QyFj<6-Mu$cZa2B(~>eeQE-|6lq+vTsx9AWk5#1?-GKYCK1eI-uef2N>#e zdJ#r~cS*8{Iu-;oQYNjb7O}*JA0aA=T%n_@qvJW$RUCMWAegB(o)PeVE2Doc<7O1s z{RNwg+DKTcKua(aea?d+sJO&GL3dr(bMg_BW{4C>l)AVakNl37Ip+I!c4ot+k~H_! zPBxXrZD;fG7XB(3-v^Rbx6J5E7teE?AVD1KEOp?&s}Fj54y8nNeu}>YEi5e&(S)K- zp$C={mtD2K=7z2_xcAYVTHm?LLNRQ&z=OYqJ$rjnRC%v~x&!KjuN%l`U}gqwoqCD) zxrl+0ks{dG!g(O{gg<&T^8LFgeo7#)tyC|;k6;=@;P6EtEb}mpOylK17A0O9`4n&U z-<7My+sW3wk;*B#;@S8)+ng)mu)d&f>N5}NMBS30+w(x3FI@g332<-YvT*^KSy`A! z5QB+(@+6A$Nfd)$jp2A}vQ`#rZ7#%I=z^`|7O|ieAeF<(PRN;lXR|8TXWm9dAeWln zGzmfg+D3dlT*fjBWKL<VxF zX3#JGuKmV$lw%bWKH*bFL3eO>%a7Na6J-T{c=1%zUhx@|TI+5g@@d!D8@fmp%!@M( zKeo-=)H%w8D5~LTfOydr78?4osVVtu0o+IowU`X5s;boaZq}7qTU%c^z)W=lWIc+_ zn!P1acp7_3Ki3lGG?Wdh?U4s$fd#Yq(FSY9gZC%ph{S;d0XxkqQ2Nu~&q<`|aq01K zZB8(E*3VAdCp0U3q@|^}TD9W4aP;%qLM8&yPccv@5RV`5Tvb=cx7x#ZJzZV!YTlc! zuH*lLa=aGX+p<>>(2$FdjfhZ(bNJCCRyH=|ZeeSB7#@x$u8bnT*LppF5nTrLRE_|K z4;Sb}TNKyK&Bbon48w&pWp?@R0_BE$V;!VK- z8LyJ7hf;h$P4m#oTh%#x#Fq$$winL31bg22@pegYvH8_p?%xpXz`QouRG zp&!$?C=xi;W=|UKVNTrUEjNGq{^A}|z#46<%)i@v_rIGx7+_lY+f=5U2|XmbKP!*V zb&3Cu46)N)a{BwHWYEXv@R`gafj~;Hs21OpEx!#?!sbyFaWq&aWaV0OV?a&YOaDac zbxAcE&AIf?cYd7;{*b5qROH>KChzDxi;15OR;vPSbqjF^YtI_#onm?U^83|~?c2C1 zzxG&B9XeK)_+l&sAl-g{h(v9-RZiyW;)ng~6%ub6j#u}#7#2n9>RQ|@q)GmAs{Bd% zpq|j^ezPruZNuILDVHs4g%7p$+^oGf$)u9uv>x-tokH=!-IT>!)pdgdYO&iVpXw@{ zr_P!=HBb9Tn~|o@e3H$EeWmKnmHxBJG+rXUUr%{xGwPIA8P^XbuYCL*z29#+nDhJf zC|WF3w3)Pef`P zv5ZWoG5KOiIdKoz$lMG1d)wwg{HJ*5YnQGr6g=+|rOcsD)gw4}{A7v3kw z{NBu0D$a&s&UUx;@hcMVd$$rk6;JGvj`CES>sP*@p4yRWbxYcA@=NaHH;hHpKFnQR z7Rzg4;bts14HrA*{Iu+3%KnT+YR5h8^jvgIT=cs>v-+0txlHyo&(e~qMu9l*0EXEF zp%#USSbNV~jBcN2bUsGLWVY3sR^OXMINE*xk9{41`yFV6`h?Oo{8j=@)J&td`Dr~2 zxA_!UcaL*N`K0y;*qt1iLqMS%?E3#Q~LRfwB>*3UCU}UDR3p;^IxHo<@|DKNXZ;;O8l`Sks28>_S82$5@UTW zb05!DgqsmR?d<*tq%spgC`xN>R}7hmZ6)DrIzoJHbuL~D#;Lb7P2}$TKkTeI=JJkA zkbb^W{GhZqBP+waEjqIRBniO5c@lAwoUGGXm_9fAf{;UK)wHYVRKB!voD1c zq03}!6w;J&-mmsOI7|0S`IBJEcAKyBZFRaYB{l8#GO3t9I{QnbT}7g7` zYR&Yyx}1?{(fQ1Ys!PJTEI-@nHT`5ONklyI_pWe<|{I-oP zU{|40!+9~?+!K`7pXzy-o$aX+39e+_Iv{hwuxjv!r4P@;K639}$q9pJB0>qLy(sRu zmT5?(?^Oz?Y7MNTAa1MWKfHW?Qep8}M9`5pJ#WX%lJlN^8opC8w458_{S1MM3ZD^c|$aXv?#i(%P|FEt`+R@q61;=&`#htbv%wFqmyUaI9@k}Xu-k~tHu4TtGi_jBN>1;@g zyWrYnZ=LN;=`!VgUtTnnv~Jpda`F26V_hTMC9@xD^M-aB6uA24RgSm%UTXOLKD(gH zPn5WDts_m0WNuVZzT_{fCOW-Oam$y9EkvhSm9$Aw=F#{v`&r=!6-_VsOA35l^5qt% z+qNoO1!t#aohjRU&%@mxxJm{`_B*o){-y645bQ_--xKGQiGw|_uZ!aa)_nlL%GAb=v+{TLB z0n8+CIeQ_?mdZtO8}+S2)ZC13AfI_5(YuV9|r^8IbmKT1?vY2`aw%NL^~Qje9t zpFAMBfqFa}&2;JCrd|CtaX>O4n+COWg{NsLTf~HQdbAhWb4ClANpV#_6uPZaS)6=L zXvlY?zn;3Ma>S`n8gXejZG_bTAx6ebTzgz&FnCnJ?#U~JQsO5lkUzk-V}}?h!U?q3 z8?3Lb&X3k(SO@mZ*LK)=E=E6nY`GAUVF*M3WWse8TF+pS#*KdTNBMHxci*RVq_^KOK7F^q2j)M`_DYQR3_C^dNC2^W&JJy!o3HU5 zDOS?=(bq(i65!27(iB>m1`9H@p8jSHcs3G1Y|&?>;wvp7)=dHurc<2{ToE<#zTmjJ5pk{rT#jjs`dqKM)HEwPF6P^aRe{BcfZ9&c! z1%nzIC{cvKaS}r#qrV;erBH(4Er(MAQBm-4&F2SU_%pV#d8v75#I}Uf9bGIzJ794x zvg$wA{~DTQZEY>Gu&Cj4{1J8Tw}t-@;(2Nch_e2KarV2oCjbaoJ@VyUB$6;p%?PA} zlq)TjNLQa2A3w+%nnhO<_BnQL@Q zd|)iNth!a8=5>a&)}PWJy|C8)|L}3+Kv(V@g z1v#W$Blw^>;;R?}J04tt56m$c3KJs0!ej;*y@ydA;R&tO^wXtu zdpkRNM#f5bJqb`=7uPfJ{r(Lp3K17BtG&fygi#r^@?rWACOSG~yr2>jTqIg>g=dW( z)*Av4^71447hZ3u-T*@({5={OxFxzARr`cakn9gGdwJF?N$V79^|rOiSLxl9z2vIb z_gZpdNqEJYzx}sEM2|v`=_tAV{W&+)1v^zU_Q=-sbMabPSR~0Ib|u>0a%h$X%WIQZ zw91nBPaOppH}?$~)A4z5(Q+R=c+V7?V@F#XxhbJp3LqLuS(W5j0Au1l7y~kS!%7Mk zH`YA0<_V_V7;Lz4Gj$*b3wpC0k>$Q3YO54 zyQiimIm-j+7mYTyHUF2O{OK$O#{z-~Y=ujZU0`rPB(XsF;x$;7kO(wuzX=x|;BvuV z_L!_T9v<8Tq=gLmr4WM`!0t0b!EiJeJ!@6IL%XOw zRR3CE(xU>pmTeL>&Ngty{Td1c@M!QFhf(p)lY;McFIrfc5T0X(oLHHl1W*R(3W-74b+fZA$ z#s83^Y4yEPrvh+{^pk_rsnQs9@4wVk-82b}zhN=*8AmKLxoccb&GNI*k)vOajeQfa zGd(k-HR&&xTSK-+uaeg{&WjoT>U@!Lao5>Y4_{6(I9u4o_=VcJ(&41!*#-SZWj4D4 zSo^awgAGm=u|NDAVX;BC)N9KdTSH4*=FqMB4wnpQoPx!jIhMEP_zy3RTYcL{@T=9)uT8je>H+mHnK{Vd;I8cCL>-^zOjb zIx@mdnyT*US;-NxqHoAO{lHR3rgik}f@9|`uBqRs;$}sNkP7dADXk#17YxjX(W`k`=x&TbEeL;E%*c?F!SRr)cYaK8D;-Fh3Xgp literal 0 HcmV?d00001 From 4441a7cc1c07dc62fc04bd7428bd2f7efcdb0fd9 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 10 Jun 2022 14:50:42 +0200 Subject: [PATCH 0073/2550] add screenshot --- website/docs/admin_hosts_maya.md | 5 +++-- .../maya-build_workfile_from_template.png | Bin 20676 -> 29814 bytes .../docs/assets/maya-workfile-outliner.png | Bin 0 -> 4835 bytes .../settings/template_build_workfile.png | Bin 29814 -> 12596 bytes 4 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 website/docs/assets/maya-workfile-outliner.png diff --git a/website/docs/admin_hosts_maya.md b/website/docs/admin_hosts_maya.md index c55dcc1b36..0ba030c26f 100644 --- a/website/docs/admin_hosts_maya.md +++ b/website/docs/admin_hosts_maya.md @@ -129,7 +129,7 @@ Building a workfile using a template designed by users. Helping to assert homoge Make your template. Add families and everything needed for your tasks. Here is an example template for the modeling task using a placeholder to import a gauge. -![Dirmap settings](assets/maya-workfile-outliner.png) +![maya outliner](assets/maya-workfile-outliner.png) If needed, you can add placeholders when the template needs to load some assets. **OpenPype > Template Builder > Create Placeholder** @@ -159,7 +159,8 @@ Fill in the necessary fields (the optional fields are regex filters) - **Go to Studio settings > Project > Your DCC > Templated Build Settings** - Add a profile for your task and enter path to your template -![Dirmap settings](assets/settings/template_build_workfile.png) + +![build template](assets/settings/template_build_workfile.png) **3. Build your workfile** diff --git a/website/docs/assets/maya-build_workfile_from_template.png b/website/docs/assets/maya-build_workfile_from_template.png index 336b76f8aa1ef8d22aa8b912da1cc5f4827fc5e8..7ef87861fe97322a3b23c28ee3644b76ce995e9d 100644 GIT binary patch literal 29814 zcmbTecRbbq-v@j|R>~nO*<^<#WIG{bmc94N$P5|DCWH_|2ub$dNwQb=-XVK$?w9ZH zx~}{BUHA3Rb)Uz#I^Uem`Hc7b^?I(?306{gd>xk@7lA-rmywouia?-c!>@PPSKx0F z1+gvR51bd$nvMv>wcyKNXnd{hxd;ReLPkPd#r5Or#7hq%=X24`?vAclYu;+h#N>A< z>2xd%LOjV3oZ2AS@`=f)vasK-e34Aq z#``I1YTcm;C*CkmQR;(D#^U=a-PV3_DMuaVBGg7L?`W|Rh*i9iK2kU!VMGx=!XSd? z1cBJc{(qkaMYS&odtOH%dM23t-Z}l)5%#==pRskSF6@becrAjt?uPd7ueVx8M~is7 zvO&&D>PW%evl>QdDwXlIZ@|xu|JUO-Rd>vb58AnPDoR7kzeg3>N%A#|&Dvw{W(s?s zj7^>I<$EtX353%7zIih{JL|GBY126BkD8fpji#rk*Nh!l*^A@USCE%4H9W#!4&$4o zk$K8Yw0-0eBkPY8aNRU;b8}lUm;cBgnYsKVj>EbA!Id6Wt&Y0NM?!3gcq#fUrkaDD zk)p2(;wD?>*?5w^1hfTC>C%sFv;)S7` z+fg93R}0r$9S;497pV$a?Ok1EHg}WCJWpKwdV&b(ub`vjQ;UrE#i5Ipry)@-s~bX- z+mXQxSGG~fSG?1dZhLp-hFy2)%25`*z4mP4GpW+&4j=FQZ}-xlF06j{rt1h3!;mB# z3ijMcozHY3bNs>Yo-QxnAHiMc!=^>=j2V2c`?J5Vu{NK0Ag!JRZ{0WHz9kbi zGBTo`uSY5BRU^kZ$FIN|!^_J{CFsJ=&+iKVptQ8qLqwmM$m?XSJe1^a)r%RFxHuZh zVr#ak+;$=)EX<~OUd>B#eSQ7DdDlFDc-of~ujA}5U!-Y}9m&Glbdj5`HE-X(eLj>o z+!^77k>964k zj5T#)f)p|NuXQgB@>+#O>V-FzlG9371!rIJXB<_uQa*g)o#kG7ZX)$S{L>wJj}WT| zrB;tl>ZzLTzm1fOVb4&Xot-^Gq3VwRviT{YP(KO`8?CIYHWv!n$NKx7mVe(V)HwZ{ zlRsJgGQ#kK!l{|CkkI;6yu__1&u)@}+eBJi+(()LHfCjIrSY$~ zmc{c43FM*rk;`;hiYZ=ue+O1qtzW)8Fl~?Z-0gjQ@7_Jun7=)#kBB5|_eY<@7CDqA zC5`hsJ6;gnbs&v6K}LChrkjMg1Jp%$rOx?ni#zJM zTBgU53^L(!$9pS_v$I9kBiw%Pl$iUModyO5*xP5nv9qwu@GIo1tvCq;OQfl=3T`Df z1zXzNV2IQBeal%I%1=p8H&#+YigYwJ`Oj5g;Zsfbsj|jMGtBbm)zs+Z57GOzwY9x@ z^Tu|(;-29T3?DHh2}x{h>@P04SF*4#$`04!XSO9s#M?U)ep7%(8$_olq zOS_~_?VTVSEZMM=MG!`<61up*_#O|0y}WCZtcq&KJrN`O>P+6a4XeXE3+Asdv(4YSO0?I zai5c#+FC&#F;P&aJFytEmOtY)ivU$m~uL zwVNbfT3_$tdYjD>@@;)eGxh;_*sWW)W@ct0BO|jElC1V;bDxxbFJiyp>5=_s!O^Va zfX0t3;Ap#@)6>HP6OU{Yo_QZF#5p@V^N*k{-IS)V3o?!zq23L_)J^)uCGs;yf}+`; z<3|55&7wZ8)x+>PjjqRx(;5n_^8`W9mfzz4eL!n_gQ7k7XnszdgQC5V9sX-}fG2q8 zqHxJ-noqqgP1}4X!!eo%pW_`eQ!i8Fa^?>CRRr_LKBC~`_!*-}w}R3-a_^knaiCCt z`jn7@LQa_oWwE=|Tk+60!bFpoh6X`9KRb(xGI{jqzVO>MWi_=-CFV@I_#bmyNR+Lu zZ8W32z{_==VpSqi(m;QIT0iVJK@kz8xc>Z69r^Kd{P_6zk)zdiByRF%BPjn!S$2eWpk^06xHhYc!&2Yn?3k%SX%xus!OX-^F+MIVq=}T%VJ% zv9W_gQA$b*i;*8{=Ho+09bH|;WFbp}BuWQ(9aFOFY>c{IgRzc6l7Om)Mo@lRTF7la zTLuOOcFiI`hSu?Mog{@JYu}u-G^6eLR@f9g@>6x5iF5q^DDgfgE32<8^`q>SG5WpG zU}op$($mvfLIfPk zjT^K)FJGAB+8y8NWhC{zt5xLe2I%cPROIOKB4J}9Za;#Tr+!F<|crs zy>5GOb}TkXx6Qx9BXJlo%!>8OXVOJD0yC5(B|F!Aa;bPYpa9*oC4cT@vrq4|$M<<9 z+F`SDPwgFhniK0ELN%j!B0IkFV@U5$_^JHiY@$b+iu#yKPoULIS_q%h;ELPrujpXH zy%T;#`6=JR%Ie2$5=9}zlD2e(AqJ(X2*v0(`kxNyuP5>Ta9mC5mp#>_Ko{f0SQF_n zv@xSNO}$b}Z9=`owew1ExB0WLsKTfG*RS+?Zjo#S((H-p?>w)z`~6ww(0urH7%$)G zjt6=ga@RpZPVTK_(_t(S`yDUFH+%v|*L?7RI;{xrM5E~;iQ74L_*Y2REbMk_&V(n@z;aht& zN;9wWu~h`=N(5rbMtVi218oUITA-)=ookX5Vzj%)6H8vG=kb=4KvIK2fnMFU*Jiiy zFvpt%&xcFRh5ue{;3Dk_RhG?&N@G@6A4;wx?BV3kw>V z-&MZ&XN5!f5BPfUUr+j;_NAQn_YEPA$t;$99wXk`U?!oJSAEL&t;7RV9`slBV$TK2 z%gdL|6OY2e!eU}#pq+XYQc+Q{x3dz31ajhj@WrbBw2|h1e8p>VW`;IW8W$JWB7srn z_t4PL*jV+qZ>uv;pvQR~q7aCPD2`T(larG%eSK)KsUlup zoO=csCJ3|r15Eso?M3)hLf=nt-fZsd%7~}s>Z}(;e9ABItX8Cp#PoW5)@zWcoPX`u#Qc?sfm?C^QAdajz_!-q53 zBJ&Op>q_DMg{bgp86u@$M9VV}YGN3vsuHgcHHx;Ga6j{LO{Brr*Vorkc*R=!sc#wM z3L3}M&=6_nSIqCp%7QP~f4+4>ylIVomL_ zbSx|^%*-;6gtJkp24sx<{AA{7Qt1tpj)3Ru>gs4|Y2g+F&_DV3v+C&z-`eI*^!M*# zr{5lBJRRB8Mn@cz7|pjt!UB|||I+EE0~kq%gOin&dun-Z?wPVOT6u=2f{aY?(MiqV z#{Q3!noo*Lg$DxtRu3?!5udW0nupC|WKBJNM|ygCuB)OW#EPdq^$@Rx->jk|hUsN< z)f(s1KRxQVDm|jVf$^N!M+AG+6M=Xo@lZfu)FheNK$L=#((vuIr4{R)k3&;aQ;Z9* zC-Xml{+z{$hPYiIzaJADJNlUdfmov2{D$sCCbWWxK)4Q~AqpOUwxkn6APT4%#y77Z zR*~a7@H>GCHR3y!_&yq9RXMK1@D35~Vpo!&$Ki|g-Y)}I5!9kyC;MyRBy=R90Q=V` zs)vZxu@ME1DoRR9rluJ-*E|6N<$V1*>ArF|A@l6?R4rdGSB3T3wQGVZ(gp_8(+$3~ zK&*9rSXo(%*3$L7y}j-1HX=mN$?6$T;fDv4uks5E?T>etfhBo)dA&MVAF$xLM#{k; z6P5M_&T4ga6|U3K&f@JbDIc){?~C)0kP!1^Hz@|ffx*FK!I!qcwm3LAgs39Mz6T$qwmt{7Kw1`vHjKpPNbUmV@)$^ah2vs)Cnqt& zpuA;s^Tk-~AzbXVw6u&TaUl|PeoA6u-`VnOW`Qqx$-!lJa^gX?G&Pwvyt#@^8!xA( zt1FNE`9(RMIRu4mPZ7wsy}iBJz#zk+RnpSXAkIwW?&da=c6xSZf4{E;4S}$kCBQHo zI~VcmNfLZ{e6Ue((i#Qb-Ot~jQplA{R8*9Q=Y8|s!u&jqACzUlK!CX(M^Je#w8i}W zYvtE7Ug;Qk{q}x4XZ>5Udv7iw)%ZxUxuBq6ijbQ_&nNL`nR3CQp=_+Ifah^<-0)hj zT;$NJ2|^wp)L)$SDT>|-lj{5R=#*D%>{C6y5IOd#McR`gMy!aas3^i*wQG(VK6N*2 z3I9v4G03IsF)UCcaK#X-{@gvMLpi*DV{P2fcO+pPz8c^<^$=X*QBK5+hx5Ivk`(kz`3!nVy zOcXsoG)~+Kj0R-nyk0Lu(F{zJRL@nMF*<%@s=_{9gT2g9vXeW7^sBJ-1t)Zt)18zH zkG0Yv0bP`n(=NyXE^DKvjX75)?4U+I3cMlg{Ff-A`O)Pn(w4)f4S{x9Wk2_;#IjFF zNQgtf?v#soMriwEIXQvEz)XZv+=I5NRJ!mvvH(loLnMkjel6sSxs{a` zK6k8XZB30?cQPzc;|SpZBzJ^p_#C@Kvjq=sqtruL9dg^#;Yt-cY*fb6%i1)!Wj+?7 zMOd(HypnVv>RCH-_M-b*m0zz#4QN*{|9Zn)0*P@$=l0*^5VC+<_lzWp8v>DlxdgmU4}thN3s5$4VbfAY4E?^#B@+ce zL0Un9>VXvzszvCWEWrN7iyo`eOh}T-JOfO6yu^uy%Ac?fO<_LN}_Sx>%*5%5@xlIOj|0jtC2Lc5?dm<%{dZ*}=tDgg3_3s{kZInr1=U z^xSN~x;UA-a5pSxwh*O?C^u;(%vKrg679qg4{Dl)1?a>h?$w!o^jyx(4@c<4ty4d= znn+N^AVS>_xZhx3U!VI0C=ZGs`Nfsr14O~b@DQ2m?;Dl1#1Nl8thS%SoZJ~Uj<{U7 zaO{`ZP?3Rs8#`J`F*YnAuWy+>S*yfrf&cK=+grW|P+=Ztt0degP?aqEO*sPf{bO{r zZz2MOBp)9i)|j@22E)#T2l;i!UvqMDB}R<7YnRq6e){_T0}JyG_T;B=c^Px-7Mq<&E(7S@1MRP}?o+ zm+f7-J8K6@7W6sS>JgzI0`Fx98{$5G^Z>dJA1j-!LWp6Q6Jq_vTc~M^Fg}<BPKIl?k0U%*^s8kcNYi-&sBRD0d76DG z=|wTvs779Qe(bF;F5YmzTj)^~)E|&UG&ME5y1D=!vA2T)gn@yfM5L;#yEZf9bD|GN zvL)}Q!=aw7qLn*XLK`FNCZy-->B*w@lI?Lgv>uP2uV_?ud-t3mLDuxpYy588k+M6yiGOYl2o;`a8RJ@x@PE~aPX!ysZ zB=tOc)$6C+E20;&D%Xfb7DywZ3S_GYv+ z1Bh;`A?=uy3umj^3=wqFR1I_SykA}H#tRJ5Q@WdLw+APlf_PBE5n?*Bf672dBN!JrxImTL_h%10qLZV{15BO1U8ME()B_YX%k z2?l%ZJ?_#5;N3zg69Lg$Ga(=QHnieNQfHIo*)?<^pCU5R(2yFlT5Nj|$*>FxsalS9 zc2#=A!9tvVlrN)fpaPFdjMI{jb>JKPm(Hz5X3lA8u~sjgoOFszaJmHjdZy0z%ZKz{ zbqZBF2~2?Klc!(bHMcc-{}-~J*~q?LPRS!8p3Q%9?XQ)E+`}<ZS&oOm&3>a@ z@%faN_M@Qz4J$=2pK0MtfZSs(yG2bJ&ZxTThJpHDa`ME41OfNhhzLahhEGylI1WDs zC~#5IY_2Xo^bUh|BOD_@>XDQqo!*CodR)=1vs~;E+P!^u{l0n#opC8_83 zOp0FHtxWOp@vs3cEiDKAE2^qQPF9Oy!@P8Mwox3_;`u3YK4TjlTXO$gC4T7rP_!1c zx7pt}Jmg`$y}ifVr^oG6rJ4>*tVK9awVm>FRuC>3(oJA-IPbFOs*SDc+nIz;H~J;} zz`1|+o{gfX?)&SI%m6eNAX4g?_W2a*xYj}0~ zY`?R$Z{d%dNv`44@v_bGceQYB`^8}-$13U)px#G^p1e0r-ci5FA>v+}Jjd@NrlhGU z)3*!gW@F-cW`+acGWy5iC6kTAvP0U5TSJx8jt&m6um38YI-76OeyAoIrg*%4YLt~E z>aCa6SGwq$s?<_s&6e)sCX_5jqx5k&`)(Np?{MmLU#tmpxOZEAP!xJ%UEA zsFqk}B%1R>@~Mc)rXl~i z$l30*rtJLj6py+Q$7<0-B&b_-;z(-*WNL~mnGohjC@HS3yn<`hJG#(sl1>q=m*NerG(Pr=#1H*Ll3i z)|jnwu;WuvQNe*LvJ>*x`{cQ0&h#T}IJ9p$1;g2GHE#sbh|SMa+%{?boDPbKvg#e) zy1w1`J5c#D+WvGvUtoMc6+8H}u_=`vic)6C)2C0pPPdxPL`&CCo|)?9)W0Z5v~!jp ztynaD>zx9W%$jVo|!_IRtAI*B@Ek%u{N#F&cvP>w&m*buJA zp@&yrZF{(3#MXp|oM2cfu?OEqX8+FJq>Oy#7jMbJIGi^Bc9t-mzj(AtyYnS!D~m;X zEt`Z|>fv>&tnNCq!t$l@^{=lM136Bzo-biX_EQpI823DQZ#FS;=F-}Po@<^WK{rxn z18tue6+{yR)vET@0q;pv-{l4{YQo(SUKz^2fR*;+#}A7HkyrcQnzrEKA#yQXXq$_UXq4q~j9V9K`Qr)+ZBfh! z#U_2|4gE9fQYvp=9JutkKlL>=Z=L7vuUb35XQmk!I6-CBeem2(zm`adL@`7tDtSod zi(@~mIQn%yb|%NGAW6ur2z?4M%=VA^aQ9a_O(Lc~ zsj7rBQSS4v0!0&UDsLFLNg^4h2F5hh4exQ5JWMtJBq?gJSVH8@iw6hN`3vA<8@?#0 z?B!LACY=!}+8e~J^XWsYQbA(ag>MP4sKwviTiM59;8^2KnyYyG7LS^mI*LIQN=nU6 zVKddM^P|PBtYY(SJ#fi0uA`0&1nNHR@OQz&Wu(Vs-S+P%T;h4!tvyL^1* zsyS-4u#rPaIlY4Hu2T^($jqv6he?&WZ8|GU5=r79`&<_+j(of=aQl9y((|~LOL(Pr zc73JW#U(ttZXsK35Y1_Nw$%8i!Tf8>Z13X@9Ju;-2!j)na0XwRpbkGi7 zy~f#RKmwm;tL18eFzB)}cvpywj)@6NaVTAdm6C!2VlUiJ?)WK@UezB`^}G2_?Y#ru zv!e-Kw;xZEcP4u#x(C&XX#M;q#nWEDuu%?}?HDRM)5|=3mZ87?JKzJ$c~o??&C|%- zbMzjq{mAzQyMOZ|>M8t@76@1kq{@LD^(V5jr>CbLo}P(! zN2aFgzkPdas<{#tSZUD1C{&zhsr85VWOHX}I>K;N}Zu-x5b^yvnR0n*0T1-w}X-s#L~O6Mf><8Q#6^qs(n@jLg;PT zh8yi6*dNKFCe%5qWfyX4eLlWuh)?QAjl!HA%~*(no3$1d5P5RC-6HZ|iu8Yb3gUEm z-5XW}$)!5ILa|h?&dp&mWr zvmP$+JlT)^LyzzbO;4w<>ArSc#bJ2>BJQuBry>Up;Aj!yN~)`1AP`A?`#P9OAn&WN zsu~y=*xLgv2PYadb`*9bL#6AMk)53#RRmbxQ6D}8AxG;xPvY;J3keI?*4EyRrxy%lhDy3;*iwFg!t5b>$uVGSjv>6w{A$3=xoojOSDL@}_0fW*?)WoxGT z41ut~dEmM^-QTY=Mk0&+2@K>{IHk6Zju7BDC}gMnO})K^VDv&Z1v~ZSq#IUXx->&u zSC3hqN8gK~En6N2QzN`8HKTs4rTR(SpP z&WwiiK$=>etnc|p^_fHR25iM%#Ixd06{M-L{9 zI%U#_NzH{*3cjGZf)IP`JVO=-ukVCv3dk{2_ zlHUvL8@GA!sAE%7s3|GKkPi0tngs@EC~4N1s)`C4KR;AzTwL4{CIX?c^kemjqM|w3 zAK?F)!hpBM#l_+CDEVy1VG%5LCI&9H@xkH&6~5N6F<>$I71(E%AXkcrh?sXLd#>b` z>%CelZEKSq-+dw{NAb{MzRGz;&+BO6+c$PUB^{md{_JN2C^R%Q@AI9MPCf+U`w!~D zm8Fli8u|3Ry!-oWV_LZ``>P|EzB3@V>FUy3;CvfGqq?p;FlDZ7ROs3ma2&hxJ^*Xj3ov2z%MF*t7<2@c>V$B_wPyQ^ zqi%0_8-2*-%+H$y81A~C!5t<*k+eHOW748Q!V+V;f8VkHi*jLMA*f2=nGOvd09Tvv z82K*pP^x=cW`R<`Uro(o{o~Iux=3lp=tzd$uLroSF^~-GtGPRo!dGRoRBSel;AE8WrSle&&Q-H~Fo)xSL2jQu$xzEQ~fmt`zOm0wpk z>7G7Thh2a(K4EA(_bW`;;f~F3brRmsDOq%wxQ{ztyojUs(~u0i$CUoZD9vfwHBwz# zQJ-dstPNyvj4NX#tc1a3)+2Aqp%!xJR{537M9uFUv8jFA+}<9o_pU$6&9RIR&&T%G}#o~<_Mc?P8! zY%2)nfWS5G?oBO@Jp9u;`|FLVW_oW+5qq;eDwT_Y!KCdy3uvQDQd3*258D}>#%=?yZ<@t_I^yqxh`LdKaSqGBt-am6GvN*ryAGT*-um?AWOacV zH$S9#|A3C1)1mQZys_2SlE9l2b0kq$K)#$i%4csYBq1dos`vJ;)r;_E=j5z+S|$V+ zAQfzs>r_Hy7r)`4^K`4F`kbVtTW%@}rK;uFD=LG02ij+%KE{V)*Fo`ZA?rcf31{%a z^u5pafKH$`_b}lq!Uj$fjJuRR`Apm1*PkwMV$;^@rNqY{0o!IF3fU(u0>t^|&6Sli zP)f41vjvjEO-JVE=i$S_;s)P3HV)PXXl^3V#}ssmtCl#3?-gVKmE4+7R{1IBY-sfa zi-+ybojZ_^ymxQwyAL{Dq{Z}iAM8|E<$ivCQ1472zXb9j#F5|?hllW#fsqk%oT3n& z<=_Z!o^zh9Mnu;Ys`Y{`16iq&K6Ny&jHlTGp6Lc$WKdT?b2qMgmerR8HYpspjXAAA zQr^fJcYH_x;#f})mqicFj<9S7A^zPZW?j`tE-!(&lun)MAmnV=t;f6gS@pwG;Yrd` zH&WrrndJjfc3E$_Ru&Jr;yYkkusqA07D)l?GE`g(TzJ<_F^;%7;xE7&ZhIIj^zFh* zd3)QPBqMvkYFwJ<7XJ%4IrInQ^59p?F$zNGg#T^)@q-M)OZ%`E1d?nAziC20ym8|O zG{%?f;~@S!xVV6+41sH#1^Pk_AfAG4yYA{lj z_^tZC0Ft_e7hu_!IiqcvsuWahYHVagM@I(+b7RprwVYbWxB{fayg}i5xM_%m6$l>k z=giEq?a1ipXYTIq5GN6%(Wr7_F`+KdhxYr&Tl7`L@v(d5=@5Y%7!;6cglJel_1gOS zc>z6Ihad+|6a`;=trN+`&q6#pEsEZ!YHGYtVlTsS=VvFbL2M|C;Q~WYDj8&C8KU05 ze-EuPSi*>$WbV%&UYk)KzjtCZ!==wjIWY3&?vjMUmdjAff!x4m1AIIzgvWYtO!@ho zs4tKP_}ib&-VO%^PbXFhWL^?l+tXK+ZKDl%-B_uB&_weYE*sem!lF`Kx zZ?+zPYQ)5+dfWW*WHy0hJ1SO``Fmo*C%_j z9SJ2B+02pBM#jc~Azz&y+V2n;XEuII{C?My2a?eN0Rd2xVOP0S)lOX#uf})!5FLHf z_O;U$un!g!rjEdRl8I(4Eyg(g3HEQhNfAipOz^mK=M3@;x}{dC&dz(weOUmwUn8p= z7jM)Fj+H(i0tgDe4E#u4-*X`NIf)zQEeCFixh$o#MtQ;Pg?t*nmLPGHpEQ2&?lC>0 z*%*3?1Mq>rfJ(@l9N+Rb#eOp z3Fk~09Fk7Oi-yy5;B9>teO&|~RO^g(Lwm7*8=98TCHpd5d-y31+$``leL7NsU4{DUOzUG&XGf5#w zALrA14^$J313zNDeGq12+kGgK2vCBPogMB1fbcX+fjf6H#=fqzu`hCS*?GMrw-@_Sn8TFTbNbM{xv#h@O0~7S-gTp735D5G0)ERa*KmK znrGGc_d89$e_0`*bG_SP3Q2m;W}~&jh3=3pYxCaS>jq_mJw1p% zjXkY*o`{klHMI}j%|ldj#$lG)puO+n;x4hU#^BnC?W6V3_2G3)_dcqus&ZL+;DCTs zu1wTVVb4tOvi_~1m?yXK;uS z|JpU}T6gDS>bqJcm!9gyX=hsu8v%;ibC&>R0dN4MI^Gi)Q4%ybF{cD*010r{rz1f@ z*bu~Hihtqa0ztkcuR>r;r-xfWA`-ea}ilLc+|9vDyAn0G^DrbkE8jUgPRSb$MQ1R|%$@g9A4Q0~gneks?z#4rOp4 z*_=J0R{}=kIwK<{j(PV^tdFO?2#yyFY3&k=NFYAqNMd4QSifHH%~FBT~w6oZKaelJ#2xz;_s2`GC(B)ftdb*_rKH_^9oUlf0XKO1C zKK{YxW=mTeIMo_u*0)g1bab;Vk@S<3lTgp|72?5-eE05k#~{sPZ0KHjdbRF=DhBE8 zF5yAg?Z9$~s<4y%-P_rTmdyYIoD~(+JlD&UtHi9rI`CCB2N(#TxT_6V2(IO*!cMp? z{3lRkZ>RTaXlo0?UMkhHsWo`B%-COo9jU`n4{q&ymS-U9+_hDP%z$C{RBozjC-}i@akyx=g&VUCMYK@ z(LE_^hAv}&u+tE)Fh)S$`9D1^OKFq^8E334_v=?&S_!0|UjD`Ym|byEk$`$aeZ6QS zm#YXFyMVwk9IBnQHP6~eiDh(yfon^9S(!y|8h}X%5MFKy$XEmi2WM86ArPKgaUD}x zean+>i`_uhg0%a^pt%t!()(fXbK^Q z@@-*yMI0eU{rk>{c^=#p+WFPh@&5i2(1S=xNvR^@5)$0^S0P@v4V^)m2%?PnI+YIR zr*8iy^l+1h0f4r#wuZqPSKM#0AJk5z<>Yi~Tno#~`68OxSXgA_QY2-@ICS{yhabux2;Ou zC#sx}j*gyY_cu2$LidG40S*q1CTwJ>^h#(WaL0ynwVM}r?$&vco))1Y)&>?v_(1T0 z(1Z%>+}s>&`}*_a=TQt(Zi`7UCI&c5T~&2$sy-#{3xw-v{9@TP9ae`6q4CAb?Zch9 zl+A%t1Aw&@hU`=(vFOK-A0c-M7{nLpzL@k9ruA+GK(QME$)=I(lB~ld&}H^gGL+={ zEpCQ~4@ZoGBQBE!Bim3uU^BxS4M7ryHjS0p^jVbx5$p$%OEE>HlM637PKhI~e(Tq5 zII+v**;bO5hlfzIr#nzF68)*Pd)E+43c69A5I)XFGQ`M2dP@YtWwEAEem*~C(W{C} zW5&mifOHKgq#_rmI|u6%g5KvsDCU19CHdJkVqtcvGnSfwpwP6Pq$ao5;v6zYaKDtY zRRF6$AN+QY3=cBhguxQ&y|5#vy#z|N&ZZmC{gj?Pi}v;P-6NzXA(l@Pc)-XQ&^%XJ zQSlKX98mhPY5h@!VA~{QE;t-bRKMJctZ*9}9tNHtgxucS1F7T@Hr&1Euq6+3G(9|_ z!MH1_?;Tuv5ZI~kGGIOGDk?O7776Dwfz;48jlJgeFOG)_3JPG^!(_;%aALu;K3w=H zy|+ZqtN6^5zfvlUoS2BH1Y&2grZ904D$2cK#ba|5qj!~%`VU|nPguL-PcQ35vpt;E zH4^q=*ic%z$sp+~F_+|BAq~Gd?jcUNl8=vAV$l`+1r$y);tw!qM(D?DLR90#|BHQ+QkM&hyJD89dzwuD;V zXZs$M2r*KhJ%K6-6B84NDglv@koXy`C{k%NR+d~xmk2-mo}Su1=D8niIzp=NLBPiZ zy7H@I2LvLP=A=pjjudpQSI3~L-+l9c*6Y6H=M5?A>qoTb=jKinn@6BhmvoL&p!tUo zvwb@VZJK@e?i~~e|9}961fFH@ixd6DHY~)hf~Qw)IB;K~osRpw`gQaG9w1W+mjQrY zWAVEy2&y}muh`vn5}K-QsstziNp@I8pDaOmqY!Xn=H#@4$!91+5R@?d08n;tU|9csfth}NkCYYo(RD0Yd$e0T1c!as-803jNr2T1D4g36#m=jY() zc$1KD^dbzzokZawqxc&LG5ubqp_P`_)&@!sAO-y^d#*Q3Rvhi7>XMR@D2a(5*o>Ae zEm?%R#9Q9E9%=)L%|E0(lv9-oatN`Am9Z7)OWgxRVE7Ph+7(AAz%o&=9RK*BV?pu{ z%BaaDCU}JH9~ekVLXyzd3+>~N)g>Ib{-D|Z&vW|{2M)w1&cE*+1_IwvJfycX!I^Uc`nofQ$c@+5f*hg?zs$C@-IY9NxS%mD=0d_SYvuv*L@m zfXair;oD$PcOTp-h}=d=Gf*^}nw!Ju-j@-U+`)YPi;0(04IZqB*Ik94`^rTAOlsNr z`ApbGkSYd|2qG|?&Ksy7%W<+ zcwX@lqd`t*3-X79#{TmpJuMA@p+$n3rVGGC^efV$-~KZTfOP{84^IaQHYD|Klap`W zgCz{;06Mieb}!Evgpp>C-f3%@qto)hP%w zoBW~oc?}HHBqol*B75@W36Qpb_lip6;)Y-+NJqgM()T(t1%?O65(0A|%m5ODe+A-w zr?(~Iwe7E!srq`UbOK!eBo0W0HJm<9@0IS`O*!Aqf|(eIyABTrwR`Uks9&6IcN78e)8s3pjoaBE)8 zw-5-aPXpwz8?PX^b;}JDF)-7QkB)$;0ZFadZexS_2M8qvDKYQv?9j__b8xIT;)~X} ztZAvJyble%mYg+d3$hC&chxd;-aPy!meO!3?$XlR(2%nNSRI0B&k`js_X z-j1Lmh`(3a_Z#IKFjd}GdSkh`LDqmDs{oXaXQ&xD? z6evR?6B7MlNcesO;6hNmK-{yGg>DWfM1TUeFNQ6j$O}FCIanP3F=%9c+I|JGi!o*n z3<;Vac^D(i(8J^IT^o@0fI>ZECW6GI&Dl82Q$Ul3!2L62*rGgkWND&#xxMUt!rAco zhwCt11Or*3&@4#VH3}^I7)!N;J&&En4`GU#L1yQ-Ev!u_x;M_r5s2D^wdM*5DXB~4 zC-crAka}2kP$wWg3U(C|1@bK1v@eyV{BULOLG^(-3_ql#GVE-~ias~ZeG3y{V6seq zDbkZ_G^)LEiIrVDTjBUXU}-VohWMgd4(J~NHz-3$mjD)l1wn(d==g9C*k@aJH#Lt1 zsA_OI;<@iIGq?PyzlRVzDE6%8q^F;Qr321IL z|OAK^1gioO=Ysc@DXs?r%!)J-Ffo#*|P)ij-VYt+k!)Y zFI%Xpt2+lU3~?DgGQ3Nkn1O>>T3LNw8{RE)Vhu1~*pt}v_CIdfrXCg;$|LIg;_3K^ z*YGC{nqdB~EXE|UuoIX!FALVLmuoB~;#gqJBxU)!4$MH^gh9&x<;y+(KSr8$y+Wb{ z-4W2?$6@3Tprye!b#rw!F*D<>Zu#|?+S3VOGMLPmSXgs+{0aaAg7`X7=Q&og2vS36 zL`3sdXx2FSK9_GXRpp!$bM0nm6X>BJCS_7zz>vf#`~a_!#!u~=1_22Pe8Y#Z-@EAO zJ{Z+;g%+$vf{p_+MU~SsWB}wZ4fBhQi}MJER`?ibAFxfeC^n_vIFAA@c`|4TvvvOp z_jE5_fa(XDBrx(q5o?IBuW{}`kt-wkvs zFzJ@LFiS^I4{^vBjAi<;fnhiZI*%iyk^PW*I2ec?CMTKwcx;R-;I-@PD@M&TrFaxM zEXexx7)J>6f~*d-z0C;o2-wKarY2Q&^*LfjZD@|M@7@&vl5$=doRJuVN(%GM5Uap` z^X1DIelR5M=x2&RKL{JbMQ*wR1x2kH42D^*1b8b#fXi$)}qT=Ju zyKlhW-v>o8iXr~v$G6vTpcjC(by@Eq+x?HnxH;2Uf3hyvSE~8#hn}|f^G1CZ#COJj zrMw}dgZ<`R`vO!!u-AC?_X@#ubcLz_0i#&89M2hl(oubgM*(1Y_+|p&xFds#|307qT2&~zTAq$&mE;*@ z2Dqe6~8awjA<(a9mtmJ?;cfQ4BC@ zyR{4_nWLWn>-qKaT}iloqs6!|PipSGCY%q$GtklfaBg-K=V$PKT7LMb7|R3B?9Z9! zpQPvf`eXqkz@C5>gBo80@pZEy@kM?PXu@=KKib=KAXd{ncUQZNT}X&xYO~BqIP(;~ z(gP?_BDblmi;K$>MadU$%PRllf0|T+VOek*M z8VRlfmBh%%2wXQ9_=5332=F`%Z)^`sY;S4#z-5RsG^A~|=aC;891JIafNQSL7W&B; zF2uiNZ0M&}%iIaN{*;KGF(*~;saG4dfk32+K$*^G^a4l&=`+}ZkSAZEeE?r50+v3g z{bn{cHn3V%Sl28vKnsA1OwMOZA>SvP!7VI2(bwnrFAN6>hIh*aySuv!?Qt6u)ub06 zVqzY^=-(-sR{l_V33gQ_@K0#Ug(FNu*r{gfh5+<=_e*&ny1Ev?Z#>^$QU3oDZ~oDI zOior|Bq7CPTZT#DbhWtK+W9)LfAAK{Q~|s^RvhY@8hQr${YYONl*)lBmJ=`_2#AR# z`<#G8fukhfH~4BZ686j5XzAtEx+}>L-Kyt0bM+rx-V1aNXb1p4pjW=mgfC!v0qq~; zh@BXL%lc;g3opIb6}}STpXiNf9dyG_pT0LX8pB?M=G5NZ9VeRsnTtze=&%s-&vy*b z%0v-K0uyn-cknxp{b7o&!KslYwkCZ>c0&OyUWR0 zKJ2(PD|y*r^8wEQeln=TOw7qyf%GG^ZTPB}O^`J8K?nv~52+;Jh>`6JAi2R1`5!iC zegT2mn@vVrKzpaWPHZ5%bG(ud;{&^6$5i0rz)Jzb|AH9%|E9X&HNrf~DIAZTX{#(k zEM@RNCyW}4*ip=Ue0}X)12w z|MF+Dzt+HvNk8krPsM5M5yk9q>n7(8Rh%*593i#hxYAAHxnTG0N&m$Ok z+}Yj+7NrMPH-J{i9VEQ%_c0{P)qmer6oDp|)dveKG)mgch-rfHiq8^`QxiF9umm@J zu@&nT0%9!Rq$>yr)4;c3fkO`8v}8o`^2LiUd3l(xA&&HvmLH0Iz}z7OFTupu2D8uQ z^Np_t|MZfwqM{#M)=*~uB&cg>0Kk?0f7<%)a4g&S?~6wWaaZUrM3Rck2H6!tnj}d^ zWXs+oBavC6jHGOtnc1Y0P)KG**@UtR;r)1?_mAIuyg$eB9QAaR`*Ppsb)MhPya;Q1 z`{1A;107ojhvbt1%{16?u+?!a*}DqxP`0oMcX&(k>(qgwP9MZ}f{b5(g_{_tF+E`b zT+B>xWJjEQ-1PoE?dHwVM@(@nJWh)M;)IlJj9DFn6`(f=PjK#aG@S0uot^Iof5D1< z8dn7)EO8;@3&?D+S1z+d$_fg8C1fth`1s&)3=YgKVQK2?XyxkFfItD)I_8VJ2R3=| zi%#$CChLV)NMDi<{gAhMUb}}BcOt`eZ)8bW-DE&;Fvy-ZlOx!gQZzC)Qb}cH3Ka~F zRU&~MC-jf&oGvb4Z|N8A_}rNM``K@gIPZ{6dH z<_PtU2)BSCrk3>PxaOgKMiTk_i#AgmwyL`drwgbYIrF&u3Mh+`M)#07Xax*MmY=rl zGqQRkoEc`$nNUb}*t>Xed+Fhpob~&ydrCPTzNk}uzAVrx^F~*bBxCpSnV*QzuAkfz zEJjR6Xs#`csU>*?7)NcFc}P?6xvo+rp8LxwdkV!;BgT}8i*-sXBqEDh#`)sfn0=B{ zz*C}^X?(3->e*qgsZ-?IN5>r{+_u%4cs2eoR3dGSqsljbxjw%unXJ4AFGpV>|EIB{ z*C6%TW|I7V{aFPadV20Ere6&7|6EH{i&lL%Y zmNWY2K05^VcXW1cpuf;UWad6HzB~-AA7JHSw=q>%U95zISwmsYr|L(ta@m{-=bN`o zuIRXqC%<{~1|b`w8YVTy-BRKfox%^4428*X#Ayr4dj1s;t2?8lgjGLeE$%eaclP{w zCR*Bn(9o2k3CvjHF0(xTwb3F5iY_iCv9Z^#UyoDgTbQ3`4tk5k6v!5^FYxhoOfFEJ zV3&4Pe|`a|egj@WmH-;X%7y%dWzQG+;^j-cTbP-k|FP10^u@-yyvIVCrAvao&pnEWFuZva z^sPg8fhp!FI8S(KZ&Z6xKz{Emu`|}wdxmZ*>xxh_m#_oo0YF*FPpn*A8& zEvOFnm3Yu`C*`(iaqe6wNYIwV(^Y@I2jNqE_vTG*542-+DnS8CbKV<&du<7HVa?z# z%+*<$nXEV%!Q|oMZWhf2-D4sua7ZC~L5*h%g|h_6d46l$6jx%&e4NSfu6U{L zTHSel;1O*U-SP=ed&v=*J~^&s9@VNG{&(dW8d-FkH*dWs535{->%xnmpfk?S;>3Z- z4{-Bk>QqSZ^Pl01G%_?KQN6^n-XN2pS?mf)xr`o{ZrF41abTj1L!pVN2AGmNVE)Fq-uWk0{I=HikHcJdvSFxKjJ$9dDnV z!xpTN>FgI5!yv0D-qKvblfHuiBppxd^cL%OuVX^l& z8NJ|9w}}=!to`1Eza7bJ21sb8W_dwBf?{seT=bli+306DCen zhj5MwJ?>&<+_Oc zF~7L$aryNh4Acn;MWG(|jFPIvZCabVmlX*vWzkig`eE(#`X@cC3R!=bZ_EaZ>|b|R z=eg24KBu$ul2&5rY;I;#adYi+o~f}HU2{e5#`bpOiXzXNfi=LB6Y;+=_i1X3ygNA3i|8*uVrp0Q4@MC-fkZ zT+z~U1yNaaIdJTNgr`S;P0()&IYX<5QU;ANZ4Df^;_DB{7GLt~&Re9Yt`&Wott(1V zebZ~zC;pD@yeWsSczV24)9P{)FBf%O+p)hi?;TwCrv)ElF)HSCxVgAwX}&y} zt}YsxuB7FaHzgE zqvuEwPmcCh>DK-)9~F&-|CDz3hpS6;)3N(ea@F^a&Qu=fl`}Lp;8K3>wg1X7_NvAE zMcW6a#aD*q;%cf#Z9}hnysyc%snL0HH$^e4a>CU2A{dYx`~wFk*|t!LylGhaBHSM* z+fU)OwEbk@&rKziWm?$^Z!V?T zv0-gLt^C26b-RoUcLzHk^xH|Sh&XQ1&50_sHs4Vn4;yn?*$P;?bm^C%@j=h$TuM(! zOuVkGja@dB{djA0Gl`QkY*74hgEZhKe2sE)#Np;ZRbDrw5H?YIpL~uWxiH<*jXWLT zeOp`JvoZ3i*pk)lPI&$GYe8TDk=(*{gl+yl|O&5(F%bLbw%F*K$11P zE@Vzj7xiB5;-O(=kpyhB=WJ|km!WL{JY{8N$>Kl-X$r=y@Wl%=h<>2&Krmw}0HQ}) z2$MhV!zVBt!b(Cx)aC*VVH7%qf)%77AeaC+{p+`P#_b4NC*_CQ$`uH)sqBo#;L(9t z)SY+k^>vrQ-OrE2on5>nB~kirhJM~UhGnbVLB#xlP+KQ8H#GUeD7dHtILg!6Hx-29hXGL}??wY91h7X?w5dpfV%Q6~QB zJK4&=FU|Q-3DLbJHU#}-Yp$pn`51FFNP?I4xKK3 z@SpGcBdJSHm!55Ewak6M;cN=x-UDnK`fE zOF=ODCw5Gx`$U{O$V|t^#zv4qzDHiVdiCDmD_`G#BAa@8VoUc{=$tndglX56gcf$h_=?gDn2_?ce#&&bz!Ur2Qq8(YZhlIAz> z-XXh#QXYMB5mHKex|=mf1RTUkGXs7v_wI~Z-^(P0`YQ>-g zDxn3H>A#!zY^B)kE{G!`sq62D<&=F7{g2acx~gDn8p78+*hTZxG+Kc_;dJ`0g3)|V zf7dcq#@|i+SKfqje6?UXp>u9>H25F9H@$}{B8bfRVydI>r=;L-qNGQQm1349C zY)#*+p?PWTo*f3qyg}`Q?nMj%q{qLE=?9#QzmJ-?>ya0casAw)hujt6KG!71JB1hH z<>66{If9x&PH}6m#f6Oo02o3L-mi*^iqg_(Yjat5$=mPlg#4p}3Z-mkjvRsUYU;Pa zyOTa|AI;M${y@>fN`5Zx#iQiIqwM2e8AgACtn?ZOg=zCcsuSdSD*d!FkCe$%Wn3hV zljzKY&=@+1nrd=8i-8Umm>^U8w^CLa;vN~fOR5keT4e6``(q5=Ya;Xy{*Uv7Cnx?= zn3P+8szyd^lNsXz(>2L#=>S2$lE}TQLUYx$DAuajqR1sdCwN!Cudh$>I=d#ti|BPx zCQ`*NK9it}R62?MG0T0AK75{Kv7?n|@;~F?VYgRzf=_A&oBOVVzqgop8?5CuHf=gI zzhz?p-9)h0Mc-Z=U#D(VOQEHlU0Ejm(0$a)N$Vp=7cgcJwXo88YLC&mBoB{&Mxr`~ z`lXC^^V|-ols{}9ixKw7$do}8`&^zw^MS*ZEi?nIp6m|Q)ANecyYY>7t5zJAcy^eS z;<#kBpZmc9C=JJtN1sFTyazQJX66Ex_e}Qmy@lL?13RskqVc8Ys;f+VPRFDO1Clnt z;%JfMI2Q|H^{1(>j(mp7)Tjj5&hn@=>wp`KoY z&@iIJ(}QyFj<6-Mu$cZa2B(~>eeQE-|6lq+vTsx9AWk5#1?-GKYCK1eI-uef2N>#e zdJ#r~cS*8{Iu-;oQYNjb7O}*JA0aA=T%n_@qvJW$RUCMWAegB(o)PeVE2Doc<7O1s z{RNwg+DKTcKua(aea?d+sJO&GL3dr(bMg_BW{4C>l)AVakNl37Ip+I!c4ot+k~H_! zPBxXrZD;fG7XB(3-v^Rbx6J5E7teE?AVD1KEOp?&s}Fj54y8nNeu}>YEi5e&(S)K- zp$C={mtD2K=7z2_xcAYVTHm?LLNRQ&z=OYqJ$rjnRC%v~x&!KjuN%l`U}gqwoqCD) zxrl+0ks{dG!g(O{gg<&T^8LFgeo7#)tyC|;k6;=@;P6EtEb}mpOylK17A0O9`4n&U z-<7My+sW3wk;*B#;@S8)+ng)mu)d&f>N5}NMBS30+w(x3FI@g332<-YvT*^KSy`A! z5QB+(@+6A$Nfd)$jp2A}vQ`#rZ7#%I=z^`|7O|ieAeF<(PRN;lXR|8TXWm9dAeWln zGzmfg+D3dlT*fjBWKL<VxF zX3#JGuKmV$lw%bWKH*bFL3eO>%a7Na6J-T{c=1%zUhx@|TI+5g@@d!D8@fmp%!@M( zKeo-=)H%w8D5~LTfOydr78?4osVVtu0o+IowU`X5s;boaZq}7qTU%c^z)W=lWIc+_ zn!P1acp7_3Ki3lGG?Wdh?U4s$fd#Yq(FSY9gZC%ph{S;d0XxkqQ2Nu~&q<`|aq01K zZB8(E*3VAdCp0U3q@|^}TD9W4aP;%qLM8&yPccv@5RV`5Tvb=cx7x#ZJzZV!YTlc! zuH*lLa=aGX+p<>>(2$FdjfhZ(bNJCCRyH=|ZeeSB7#@x$u8bnT*LppF5nTrLRE_|K z4;Sb}TNKyK&Bbon48w&pWp?@R0_BE$V;!VK- z8LyJ7hf;h$P4m#oTh%#x#Fq$$winL31bg22@pegYvH8_p?%xpXz`QouRG zp&!$?C=xi;W=|UKVNTrUEjNGq{^A}|z#46<%)i@v_rIGx7+_lY+f=5U2|XmbKP!*V zb&3Cu46)N)a{BwHWYEXv@R`gafj~;Hs21OpEx!#?!sbyFaWq&aWaV0OV?a&YOaDac zbxAcE&AIf?cYd7;{*b5qROH>KChzDxi;15OR;vPSbqjF^YtI_#onm?U^83|~?c2C1 zzxG&B9XeK)_+l&sAl-g{h(v9-RZiyW;)ng~6%ub6j#u}#7#2n9>RQ|@q)GmAs{Bd% zpq|j^ezPruZNuILDVHs4g%7p$+^oGf$)u9uv>x-tokH=!-IT>!)pdgdYO&iVpXw@{ zr_P!=HBb9Tn~|o@e3H$EeWmKnmHxBJG+rXUUr%{xGwPIA8P^XbuYCL*z29#+nDhJf zC|WF3w3)Pef`P zv5ZWoG5KOiIdKoz$lMG1d)wwg{HJ*5YnQGr6g=+|rOcsD)gw4}{A7v3kw z{NBu0D$a&s&UUx;@hcMVd$$rk6;JGvj`CES>sP*@p4yRWbxYcA@=NaHH;hHpKFnQR z7Rzg4;bts14HrA*{Iu+3%KnT+YR5h8^jvgIT=cs>v-+0txlHyo&(e~qMu9l*0EXEF zp%#USSbNV~jBcN2bUsGLWVY3sR^OXMINE*xk9{41`yFV6`h?Oo{8j=@)J&td`Dr~2 zxA_!UcaL*N`K0y;*qt1iLqMS%?E3#Q~LRfwB>*3UCU}UDR3p;^IxHo<@|DKNXZ;;O8l`Sks28>_S82$5@UTW zb05!DgqsmR?d<*tq%spgC`xN>R}7hmZ6)DrIzoJHbuL~D#;Lb7P2}$TKkTeI=JJkA zkbb^W{GhZqBP+waEjqIRBniO5c@lAwoUGGXm_9fAf{;UK)wHYVRKB!voD1c zq03}!6w;J&-mmsOI7|0S`IBJEcAKyBZFRaYB{l8#GO3t9I{QnbT}7g7` zYR&Yyx}1?{(fQ1Ys!PJTEI-@nHT`5ONklyI_pWe<|{I-oP zU{|40!+9~?+!K`7pXzy-o$aX+39e+_Iv{hwuxjv!r4P@;K639}$q9pJB0>qLy(sRu zmT5?(?^Oz?Y7MNTAa1MWKfHW?Qep8}M9`5pJ#WX%lJlN^8opC8w458_{S1MM3ZD^c|$aXv?#i(%P|FEt`+R@q61;=&`#htbv%wFqmyUaI9@k}Xu-k~tHu4TtGi_jBN>1;@g zyWrYnZ=LN;=`!VgUtTnnv~Jpda`F26V_hTMC9@xD^M-aB6uA24RgSm%UTXOLKD(gH zPn5WDts_m0WNuVZzT_{fCOW-Oam$y9EkvhSm9$Aw=F#{v`&r=!6-_VsOA35l^5qt% z+qNoO1!t#aohjRU&%@mxxJm{`_B*o){-y645bQ_--xKGQiGw|_uZ!aa)_nlL%GAb=v+{TLB z0n8+CIeQ_?mdZtO8}+S2)ZC13AfI_5(YuV9|r^8IbmKT1?vY2`aw%NL^~Qje9t zpFAMBfqFa}&2;JCrd|CtaX>O4n+COWg{NsLTf~HQdbAhWb4ClANpV#_6uPZaS)6=L zXvlY?zn;3Ma>S`n8gXejZG_bTAx6ebTzgz&FnCnJ?#U~JQsO5lkUzk-V}}?h!U?q3 z8?3Lb&X3k(SO@mZ*LK)=E=E6nY`GAUVF*M3WWse8TF+pS#*KdTNBMHxci*RVq_^KOK7F^q2j)M`_DYQR3_C^dNC2^W&JJy!o3HU5 zDOS?=(bq(i65!27(iB>m1`9H@p8jSHcs3G1Y|&?>;wvp7)=dHurc<2{ToE<#zTmjJ5pk{rT#jjs`dqKM)HEwPF6P^aRe{BcfZ9&c! z1%nzIC{cvKaS}r#qrV;erBH(4Er(MAQBm-4&F2SU_%pV#d8v75#I}Uf9bGIzJ794x zvg$wA{~DTQZEY>Gu&Cj4{1J8Tw}t-@;(2Nch_e2KarV2oCjbaoJ@VyUB$6;p%?PA} zlq)TjNLQa2A3w+%nnhO<_BnQL@Q zd|)iNth!a8=5>a&)}PWJy|C8)|L}3+Kv(V@g z1v#W$Blw^>;;R?}J04tt56m$c3KJs0!ej;*y@ydA;R&tO^wXtu zdpkRNM#f5bJqb`=7uPfJ{r(Lp3K17BtG&fygi#r^@?rWACOSG~yr2>jTqIg>g=dW( z)*Av4^71447hZ3u-T*@({5={OxFxzARr`cakn9gGdwJF?N$V79^|rOiSLxl9z2vIb z_gZpdNqEJYzx}sEM2|v`=_tAV{W&+)1v^zU_Q=-sbMabPSR~0Ib|u>0a%h$X%WIQZ zw91nBPaOppH}?$~)A4z5(Q+R=c+V7?V@F#XxhbJp3LqLuS(W5j0Au1l7y~kS!%7Mk zH`YA0<_V_V7;Lz4Gj$*b3wpC0k>$Q3YO54 zyQiimIm-j+7mYTyHUF2O{OK$O#{z-~Y=ujZU0`rPB(XsF;x$;7kO(wuzX=x|;BvuV z_L!_T9v<8Tq=gLmr4WM`!0t0b!EiJeJ!@6IL%XOw zRR3CE(xU>pmTeL>&Ngty{Td1c@M!QFhf(p)lY;McFIrfc5T0X(oLHHl1W*R(3W-74b+fZA$ z#s83^Y4yEPrvh+{^pk_rsnQs9@4wVk-82b}zhN=*8AmKLxoccb&GNI*k)vOajeQfa zGd(k-HR&&xTSK-+uaeg{&WjoT>U@!Lao5>Y4_{6(I9u4o_=VcJ(&41!*#-SZWj4D4 zSo^awgAGm=u|NDAVX;BC)N9KdTSH4*=FqMB4wnpQoPx!jIhMEP_zy3RTYcL{@T=9)uT8je>H+mHnK{Vd;I8cCL>-^zOjb zIx@mdnyT*US;-NxqHoAO{lHR3rgik}f@9|`uBqRs;$}sNkP7dADXk#17YxjX(W`k`=x&TbEeL;E%*c?F!SRr)cYaK8D;-Fh3Xgp literal 20676 zcmagGWmuG3_c(k{&?z9Df&!A#Ie-YLD4?i_LyU)(?uG#blvY|A6i_JvhlT+}kQ%y6 zx?^DI_z#}vJm>j+dEXCQ*Kp6i*IsL{wO6lstD~hsLCQ=D0070kyLTP~00@o$Qza(E zKPd!F5dZ)uaPN-F6A#cvnso`=VElCP(!0+OzC6~I+~pFF<*&JMD=M6%d4w^-=Z_A5te*wY$NV`XBM6I;SRgS4M(5Z zd)LqB?_;?>%kJKazHgRyUgM3^uft58$>p86W$ZRQHS|94D>!>m=U3-;pjd}EY-O4u zcOaC3qz1^Q1_aBpiidXo^8};{JSM?^1L_7W5W*HnAhpA6DO2|E2T0%}<)8OJ-Drbu z03?&x(XHZq2?)O&beHWPs8x8VEiG!zLC(r-nUp$2`HOG}yEI)}AN2giStT zlj7}8m+0`uO@@j*T;ewR7y-8+T?*Cau@1q>SJk?!3zK&)-R0n(bQ%8Cd0N!TPbB0m?V?8?l{Ta<>a!(^)=(IRn{xmD`jh5dzp;cc_HsY|Y8asPYq(GOSqRZxwd*_4Ma z2rjI|to$5Ji14WhH8L&mo$~gMbk`aQoe82&6id@^S{o9FU8b>&8fNm0rvx~fj@-N2 zlgn_}u}x%4}176k~?wkB#!jlON}e`bm9p1ZPP zD8)<5w}|65Vg0hjT`a~U0=5@Z_xQw{*k!a67GLl&A{bTd>z+O<)vDmMLpdp{Bc_|~ z^FqsI7hpOjY_d&Rnj0osdhJ~5C0F8b_(PwT=^YaEpfYkzrG+|hv58*9;ainnle z7TYvlFi`g@*?_TK4OqF%}DO* zdr5_bf4NV2!2E2TwW?vfWSf%?y&x;G;zF5_<&KB1QrdY9yiPG)o0oY^$keA$Dx zN{~HW?ig3Y$$A#|4zskxUvP;zjLsy=(`fy}Zw1Hn54`~dwevAHJ8>g!_f$nFLV7es zS1jBt=7fGRMA+z0nP)Jl!$0MeUKp{wnh$ z!2+%D+Rz=z9y4l=!dS`jA^8tQ?;}_oD(v;o96Ih#@5xMjv-lN`bejD^@+m`qL3Kk@ ziO1!H);8_NuCW!_6Vn{3F_So#kd~sW-Ly-e_1T_3FLysIb$xOw8=}lfr%p{*#XWOp zfg9m_#4|;&#?Z?X%hQAV9N=YtWWQUW6s{g}wa2R$DZjDMsV{FJAk| zj*S%MVzp~HMzKE?C)cq@yOkfDTTxZ8fdJGBnj9iJcAsrmm;_C7o@YM>kp2osiMJ|F z>Uc6IpqS|QL*+&Z*q&bpjw|Ri*#DBo)|8;vxhbx=BIT6Xw%72AwDFlhe0%YeREBOc z{KFmswtRB*LH$8p2+P!ffldC%=i4?CM`_S%<=(6Zhulv9=QNP{g9fwXU?F^t(N>HLUHA8qAud0DQNWtYR z6g~{(3MWa0c~AtwoQ6H$21VDL4}eUOvK32_wwFpY^7^c3X;T_Wu@t)7FU7_QkSKd0 zuetz$L791^euH0Knv!yfEraxl>WmIisVm|d06Uxsdb?w)fDMzO+Py^u%p5Ic*iYeU z(I;+V;enP#QbgWb%Co&BWXW+Y++=Z|7Z1+opk~^Zib_~>Y#;%MQIb$t&Ml$@y% z(*y!DvP-EZ-SS%s&O0p(&Ma}p4Kwr}M3d2dZeC_=O<6xpx#D+3^FO+ZLyQ-gnmMhQ7kGzxhNX~MbtZH zffiZ68xqln0aOky9la19nJ-9_XRFjmiA`JS&2{7YD6MjWu{9hyRd=JpDga8dH?Ty? zYS<131gFWnN7;JP-}~AON!3tozvaG582B&3F6&H)D~E$P@)*(O8{~_Qlf?fb{h*vc zZevMy*Yd9HqTTI`|9J^CWe^_(&zzK)fNr`BMFO4D!V>7qLjgc-44#MAbP4cWw3-;V zn;32IVY?0pFe-UH>fgz(R*3!|YA%g1JL3N=-2i}t6mBY3NumS#9l7>6ieEtyM4gaT znNs8SYg=zdjT;eBwP{vuG3TPJUDjpJD!?KO12dxg(Kwz9tU<|;-ZPC(2(4LCe(VGx3)qUF2+>_(DNh%V)C$n`h@q)@2SJI87NJ0ME- zvECr_V%)rh0k%Cv`CY=dgPGn;pGhjXmhE83Zuu#rgT}rF!o9YuBtmC8c?x>kLkPM% zH{~|heZH_}Nd(FVfrE-Mt`l9mX(z}ls`t`Cq^UoFl)-1cwb=_{`;*e_xq~1zu;_GU z58xa%R)m*`MR9c9;!H2mgX8OrJL+GXH1R5&!Aalq^$Ru~0q)p;Syc{E3SJD%wAdR$ z6fX+(OicwX<{vO2psn-zIg|BtGUf{_JIj@1Xh9|@g6a9-qlABXiM~BZ*#t=7;F$os z5zReBjuEA&vZ8xXmVkj2fffZF!+k|@FUsY4eVLyZVvh57kFO^o(i5`ggaPjxf%eMh zNlRZ^(H?9){bH9)_=1YXjpfLv0hv+?{b7)}oWibO0k;C@D+>y0v*(gSBC>72O&l^; zJKqLSt#7XI8H9h46U_r&FI~SFi?LrfnB!fdzmF}An0sr!aPO?vfmsYYhVc82;{~i- zzA_sQSx0Yn@&jL)6q^EfZ{YRQe}O0iSbb)HvhqN@mnTvEG*Wo&yM{ZxaP{&v-327b za`if6)tdQTssbT7;LQJIeA;g6sN4O+Znr$fz@d>~v58Eiz(we3)%o+h50kUCj#K4$ zI9E5%$Nzdg8a-IUg_rZY-?_|cQ;yL>I(Vlt^QsFHp1Cplb2j1HaP}9$Tb)ew_~>o`L6~?NK>$@w7B-m#Q@uHdDBG`4c+vAPm8?<&1j15y~Zkh((l5v z;XlXF!v+W+bUDa8h-z%_qf*t{CY^PC?`kML0+5q}rSUnWLi$h$OQ*hxI8^~S2 z97APKetTCH=!j#-ln}@V<$#2G`IUArh{mYU>16yAl+qZ*=nk;oTZYda%QEfNOVvU@Ysd?%4$}U6j;ch! zh`6S}F_SU$%heP5e6oUN{fsG^?;%t`+>+-IEx9K$;FvzbYkYu#motwdS*n8@sIl56 zzFCwq6aYoaeY?v_nEC4hQcfM02p3ELjoa?*fomB>tmLJi{7&XLue%mtUQ$ox1(bZG zMcNV$m7l2N>7oA?E7)(RxIzx(*`oQP(dl=Hh|z8=Qh}jsy7-UE^%G*j9CS%aNi;}_ z5;bzcj&is*{TILc^(#U>_cn+0-Oqui2Om@~FtSUm&QqRC$>AdPBQVO!y*|x{gKKYb z^s?^ZsX@8L5WEiOR7C%A37$*a7K~2qxkdp6h||@5VZMelb^(x#ugM(l(7=3rbjU{9(OPZYrzE>p=N?!kuH-H4lSq}6i z6AXifBu?bNT346YP0E$|oq3g6_2)izw-E&%o^$(5y6?ql7Z*kRiRdIi{cif=Wa#+# ztGj#s+9}pWRX=S;N*ZmZYOAP;NtD4kGX*MzogSGNTOO_Qoi$T9VdwLsC6?1>@pBFb8tzY# zpMUVat9ap>BARfRq1oB%@Apd2X~yUMt&Y^Sp7nn4+>lg{$Ui%G@?ycyp#pTjXzb zH^^&L3;l{kEf|Cr)z-o|jfWE}i+HW%298{RbQFEwgtP64nY^xl-5w3npnvqOwh-Zon=6&Hg!7yiBSfgA zEs=vxVGi*oXm zJ80VaQw+)p z8Z=iPoRp?O3wt1eu@G15uBELBJ4&O9Q3W>#c&RHNwF)yfUEU{nw9?PE$8w$tk98j} z6?Lc zI~sd0F_4G_)9b;yRe`;D*M)m{oQpY%SPv95)4Q9yUVB_r-s4-{Ni@`r$a)A&xH%NF zRme}#0mj}NlFsvOSK!oP!!K}BqZ}|-6O*ikrx8YV?s@t(Zjou_gM^&*bBR99BDfZJ zTT4)F@4@lU)sN`UYqzuz7HhyBhf)XyUQB;ziW2Rwi(X)VXmr)D6_*UZF)-E7LQ~-= z*Q>;C`H6DsI^UW)df4J# zzY|Y7uXg*2&iqQrgL+{B$DPx3uhq|V2B5}kyN_KSep1}4vjy(>+Zo8(xv`E~3^Sx} zH=%6D3;y~IWgv~SJJ{Fk46we|(rgI1LFI0!XNYQ?`0n28vrmO8EG1foH8^)~=uI1L z_3)cCV2c!6ZGtp79sJ5E4}mw<#MCE^OLo!~MrHgAfubXP?)%Bt;Z#dc$#WUmjNe!@ zh&ghTVW_r)^uC+TRYP6C${)F?%yj5ob)-Ayu;29up5I25M^Z0&h12eWPGX5*dt79G zp;R2*hGa}c$1w!ThFZshR`n(M3Ke!nb+tZ1+B2cl($-_%Kgz7z`WAe5vaN5ra+O5_ zaj?l#tde*Q?Bq%run-tH?c1MyR2EuvtCH+T88~A$OHcvL7QQx4hK`fY&&rzE&d?~25 zWPC6wP4xcRlk~Vm(PAT``nGqr7~Tw@z9y;_=0GZ?Q-`dSAUq)(+5X@ZtI!ys4LXvt z)kv7ZAs@ukW1JS*OVo#}0@@y`PFCY{McLYFt=AVyXWW;2vIZvZrxlbepLU!s z;HvN((e}h)v0U2C+O;q=l_xQW_xi1xH6l!p?X+TsJ}^sp#oaJT@WGEUN$bvTA(qZo zyQ#l86q;Fn7;??L30+PsXuTLLuwq1(rpdB4c6m9x7mf+KLWrJqIZ#XdbX9zNF*q}? zf2jjayF9v$|9wnOYQ2 zL;knd1_#tkYPSP27~M&BI*Un9du|*&XId0!DIVQv;(sWNj$x35U$i7qU#PCUad9==I+la#^ z`}8GEh4VVKIMeEJpX_N(vm#$)92d0nrw%W0fd&k&6|LzI&(kN73{ znCYnQSGcY{+il~HPU_Fo?uQg?Q&j2SI%n0utTvqZBP+Gaxl|2U?*G75!JoB_@PcS}E?`zn4;F}pe+`Dh@(X8KuSF~|F7Kc6plg|^YLyuN@p zbJ>T31N#Fx#@<)m;j=wy%03gmsa?V4`h$LMLXk&%s9NhV%ie5=ZlQNNW?0fn&UEdl zJ=_9k)(QFXdNt#A*~MYK@UcEm&GqV+L8Dd9y!aTW8yD&lj&}x#gkc_}n*lBy&trtu zg|?a!d_K=HtI0N;Vmk!RxE0Pl*-JmAWxr%HAvy9)j|<6&xTm^}4uF`5&|cm63fEc` zv!75qrWjRQSnK#Xr*p|VPGXs?sFi6CHT?wS`;$_PvyYG- zZ3Yb|Dfw4gz-rxl>(5A{sX6EA`6`r`Mrk`CoHK4~xbdIl$x=2a#nUiCm3ci-X2M$d zoUC7b#_e4dIm}Glt%PN3dcb$PpE_=%rxtd#OGNI%^DJRMtUVTMrh3x!SJ$>`MtcE& zyU7IC^8BY9yWX5OA2VDFUOXxczW=J-K@Xa^7h1p^BhPq^Yhjv~We>!0UXLjmUC&6v z8tLvCEypd6i82#%r#1~a>?18zLI&%r04W_0ovf4J-twapF?cdZbBNxfALvT6c1F!T zaVT2w@KsM<$;O=!S4Ey^hi^&Wr02DWJ}jW0%)D}YH`7Fi|N(=T+ufzO70 z*OYE^sO?hvnSdR&i2kx2>jgzrL@2%7@FyW0cdXb9Q>dZZ)be(#CmX!&;6Bfv&ECB* zE8^wYK~O{y#7Jb!Jca!kZX@ruem5eFG4Wm_uZf_EU@wXceQi4yX42iVo3PJI!lkz; zq8U{@U5`~;I9gr&^m6@>UwlvRqN$_&cc@>SzHVG=<*qVAd!j7uSxXh``)8(+rsp~b z5D~@Y4eiyu9?Ctb_Qq>QCAEj?<(0dnfgxPhgrx`B4JKiLnYJ4{HlW)(jRHjLIhpJd!1Y zS@&*1D{EbtGG2?pk~QvWBn?{pYTh*8qSc%_=K=1#e8A^Ny3swE$n3*4MljvrH~-=! zXLM6L>n(5S_NvlR@uHy~n^eP2y+CJiopBpE7s`h|Dl5f1;s-{JhzB8QIQa9de6C%Fd)pw^a6VPIIAPdItbER* zmUj^eX6FPvP7fVx&R%2M0pw;2e1D>DWZ^?6X>UjouY0>%f2pb7F9EzLx#R5sbo+7m z-buWk(YIaM!otzoZ=WCYUZ^kAL6V2xsu>(78q&>~{>9hWTqlswYl5agM z0dI6vi5)ywyv}``^LKrHai0RL^4&L@LmyM`fpwtDTsy~m=BJd))vk?~MEpot5;;3F z7AGSk+dr(zxKP{OZP@Ghbu@i8I4v>kYxW$BTx~7fpweICsHgPQ%BQ`Vw~O^#f00P> zy=AY+)4qSXn2=LJ8I;Mmbu-IgT`tc0mOva9uUf72OTW%vl}uR9fdLH_Q(hhC1`K^E z;Eaezi=7mco~{ABOVzBiMXafsaJ+3?08!h|Ljo~u^-ZYQ>-Nl^YQs;)n*MMk(KGhw z2mXfK*cI5gO5&IUq>Y`2a)^3sPgtBJk|h=?Ami=?Q^NclojIlPLM}7FZmEg=(+h9|YQO$$ zM{@ij7jRp~V7*5>s|xsWq*S%oYyv2_54ZlD(PKvLJ8J-$WW2uYt$N4ijQ;2O4T8$p zICvN~;TfT$CuqGgb>2IWKR?VS#FhY`Te=ST4A)GV?TMqs7_#BdjF)NLI}W=IgL)-_ z>^*MfNc&6GJ%Ri7?>WMi}NI6pCcIID9B_8s>?_ z{)WE>^HYd%00cJro-fq*S`O!}u|v)MM@6iG#blXZzDC=EGYt}LN$^p{aMJHuArpiW zy{`2mmxnb{_VK_Q6?zco%swjPf`F`XBjaQT3Ji$SZkslKwoHc?S!CPxT#JeK6D=SU zvrW(haw$Ga`INm9#4#=j!M<9*kO%xX8P21QS{e=n7RM6L&D((p1=Rd#`s5;=4%kMO z8?&Z9o{NkRNsl6HF#SSRAa^4m zq_F^>3;5|7j#>RMvW8)nMnwbbsNdX`m3}<|p8*|(%6rZF5^=2k zHeMC@iXk7@m@mGBWHFyir!p>RNrK*FQ36L;Un=ah^PA1D0`S$S9UkSHVtgSDZ=No* z;w2|Pqk%bQ8syc8=~td?HWj0`-33QC04oKOo=tpV>Ys8X;vkX(^i$HHaydKv0lp@O z=&OKD9_zbLbXrFrG=E5-qMawFWeIVuV615THv z1_!co?{Nz-pPjFp{l@BxGD?72>24v+?rX%;y_IWcs>6E^meZti@Rgi{2tLqAo#a`?9NkW~Y%^cbOGehFVN(z_4JzN^vNaP7oh{V2vt>>hcoX8o$ z(IR{GOX!jbDww_#U^jQ*>G`wfB13yTC{7}sF1A;K255#2AO5s z0e&wCrJUnK*%o+a$C*sm8VGE34Eg?wY|u|t2P+}!l9sN&DbF$Rbc0jmn7a7fqbQ-p zLznYKq1!SDHJvnjLxn{FTykceJN)&nM<~Fk@+;87s)hXc(9(8&Qm6&p%hC1_xe%9J zNjcxb(H+@5|8;NmX#N$GIcmJKiwHF0XL27v`A2?08a!i310zHTIgI??X6DQg9$sh9 zzV9B8hxt0tizBJ6@(qduY#JP|1LFw?zpGEHy=K<(^>DV$Oz+HJA zao^JfsZ}dEy7e{Dgl^ja7(1?)Q#7f2SggP+!r;Xmf@3jnDo&QY0gT%l-~kjrqEG$M zwgvzxw=DH%N-pe^Y>JEg{7aA9Yt4s3|BL8DW#O+BVA0X02Q1yo8S4vX45L~P6S-^_ z-C@N4U=c@Z|kJMZNdc@T8-;*Zf-bI)RSkv^@Y=5V=(t-h`z+0dZMmlah?&WxIn|GXIr!4waNGewbjn=uJtE8xUATAFny5QoZ2-VB0`sco2u~< z+wY#add+_R*Ms$z=Rip9!@;4q&EG2GHpjC!wI35&=dHn*m&=5?v6I!j3hYi-Cml`- z&GP*sCdu;^yVf^r%71Ws-Jw+cT3iQbI&W7pWL$qCmML!Pva}J}?pdn2;f{owF z=J>p?C7e?Ctu;l7Qjz0L-f$Q@BDlss&;p05uX$(u(G)IqF@cyA%_T?W(-0=+CusSmx|JrF2U&_J372dIZn$GOEnY`B7C(XO|14!=89^RiOZ>{fPU%;IN9hy}2R-ArJJ<>>* zBOY#~0BGb~a+MAcsoQ(({OfvR(s%1&W=3FVBKPX?R-5Gd59?m5H&kiSzgMaHDZ7YW z9&>Q59(9b0sQlm!L3YQgc-TziSh1KV=y!ou0Q;%%)INvzz%DD3`Rupo*cjpw{I9cZC2?@l#fNs4E3}7o*f527p9--;s8H z332_^=kyiq&>TXc4qWP^aU-I0@;_ z_dyE$Te0za!(9jX4LiWpZqOR_@d|tCjH6DAPU(iDhu)m^*2BW%?s2`g%auT(&7~23 z5$&_X2{>AyEHLbUYHP(l3j&KhS*rVXw$pj+(i#m1K?xv-@~-4_uNsIHAUZX+&IO~} z>gFF_V`^JDIC*tKYqpp0Sp-%C_uD(|v#wzV=tQetUID6IZ!dA_k9ov_IN75!3+Gy+ zStjoK@8xhvPM_~r#IgiQPkA)Mo~a~LLpuVs+i8)|%pth30#jR`4A4UqIx#0=e0&R#u*f_&;8`*#lYYV?T1;0*9pOFK( zAbwZho_@pwmU|Z-&&}xloez z-%=h9dFD5ez{gV*p9K593%(^g4f6=_c)5AJR;>L~z;@0s5Rd8TzR7gG@8V7?M}=7l z+iIDH$xNBZk9BeFG~X81@xVkARFX+?s-*UYW&@up9;xh#EQdy4=Le*-;Fxd_<}`EL z_-s|LD*!^&IXhTKSfQK}5`Fsh^*&F^CS=@NUd9(t@I7|?{;GZ=dG!zqorYcSyb0?AN}qovlA23eXPZI1_5++6v1RVcZ_rEOHc~?5mlxaTPc$@ z?kV_kE?LteN|_`G)r??28CC;G=K-bKUy)1p9=Fu`aFWrL7-78lm}XJ{+w;F-_k$Tm zSgfcVZ9+*o9}QVV`v9zvbjY0nF1(`5pj;~tj>+aW+$5~t<6&skqN-aKDX>GsrqFO%@d@CVhqz%Bb{~Mz!$#g+i`I}} z8PMIBD!E&QySL$)VCC0f^-EG1J55Fifv_dIK>DJmRYuo^e_z!zcnK&C)Ztq=Xhefe z-!}3?lx-|-OBrRK4CKMg#+&BoR)RzXS3&7(9zG`QT#CxYL?UZ#qSc?Dqk~>en<~pL z{55hMKU_$J7cH+{V)^N$aG=Lx#@FijIM&C&TR$|;xj9C#g0&lz@4ZzwT#Flm@9z-y z&417^=y>nJ1T>*$Jn)6M_umxo&2;8gR{XNVCW1Jc&IFUi_%iKe-~C|5S}3>3kC}xe zzBG8|9;=$Y>hHj(m zr@j|)y`A+p5~;9O@kUx+EuLZ2yp=wU$fZDdxM1z689M}uYyUF#>lGpAI(w(V{>{vH zzpaAcRsTSPN`w)T3ot8WT$~1=7gRlG8i(>*wf*(k5#bP0I0LXlb?~RDCO{vY31}DD z%2F}uD2v7|<<4b{b7`wmUoh{S?qYb2IA=n|PBjjFf;PHEFuW;coWQMbI(JVAmk0HX zZ4b5`4?>Kc@@bK~vnmd$llz%9gvthGYSC6;opoXb6&uG{|8v;>w`isb9quTJ`QRB| zLfA5a`RQG$3T57)pw2VF&}1MkV6L^~_mx?W=St^0(bA_TBa>2b&`F!33@YT)dP2}h zn`?;9jILyXW_bqx}%49sy_lMwIN5PMUoj26 zjjS(=B;I_fEiY$wCC2w_p3c}_`h~lo^0An7I^=6(*y#`PRs#|MB=)vKk>^hfpvs;f z33`+$lhHk-MVYUJ8{cJGu=mX%Fc`5C_NTw8F^P~H^!$pCzVU10O{$0f`avrG6fiC0 z&6MBxz#|^JsNnQ-H7DYj`OLwRP(U+xxZgNHbykVoJ*^B@8A!z3)qG^zdGdJ zT|bww>9buEdy^q{AY>k2%v&7EK0~Xm@UlVuny+*7z7oXX#VyFfq<*x6THbFmy!PEd zCV1G|9i#V7f=1v24&kdG6(2G_H&kaaz`K5rdF_k?SYp`6{P4X_9TA=k$bJ;;GRe+- z8>ejM_^4TEK>eLqR&~E7LuL626XF0g(vMVS1>Tidc(E(rm_od=mIyR>pNM|gF&yKJ zpjVX){D`;e2c4E3&pOnY0*h^_Q$8x;DPJZ+Ky!5S{7mgV{P))VoTon@h8zy~uIl;g zXxR-IQfV)w$t`#w<+SJ3Y}JjjdC%8BA1-=Z_GCP2&?xoWTQAd+Uucw}0Km@=9mJX- zM3g*^AK&`CZcBtrr|d1fWla-ty<>Euh&ICTz*CWi>rby|HYViz3iG0w500NDGn8+HzBAF^1bowwRNfXgnYD)2@jPD61(#ful< zGn&G-Bld1TIJFT4Wl3f;s|)Y>cs74qdduBbg;PZgIJp`mM$h(2@hLX$u|{Kb8L;>W zNt9d}bdNsiBRN2ObE!jf8lfRdJ~+Gr)6as@g{G%NKm#8EI)s%+@T_Io1NQcI5ThFe zxg)iVw(}2bG?4PGC7-hq0>62yA%h}VJ)qQeeLQuS*iz20m0MfHO6fY;+0b~l7-fhd zkUp+~HxvUu{pp5#`0*O@hLE=E{k+gwv$5XN@YWhYIpFKwcWxj8r84R73RFI}e{;hq zbK?3@00UE*qr;Jz6%$y%P_K2);{C6wdJk_t+Jk#_jC0IS(>qR4N<4Vq^va$D&uyS@ zC49ds)1QnEp3=QNzH7Q61ZO65;>R6Uc61Mti@Ya5R? z*j5!*tSx}MGUXhkyXES5VUr;Na%{D$9=W4wmE_CqBWpw&muu%ejflPECJ8bX5+hD# zP_gLmjHjXu;P`6L8#hxHT?pmM_BIby=D?90)e4_B5M`@NBe4rD{bF+2bhK)rH5nN&M!bpIs0XvC*V%X>K%JO!rGpvSOA~rEx&YO@NMOI z%|n8gYAm4ey^{Z+l;Eb=2T~F==}-H|TLHM5hOP%dw=ggc(C%j?7nJ z>^%6dhpdwLYnbps3-muda5Tu*PsC%sIOB5%q(7tC4Gt75?+xRVIl!7kXWS&Y!DZ0B zsi=H9|3shj@-41G@{g;jOLhRYv1<%REc>xvs$2)RqhG-Xw1Dk;M^l#df?7rUUdq1^ z#;KHjpcN8(64WGUu@GPO4pN}QBk)r7{f%N3HxP#*B^k-y+Eg=!ZzZQ6ZyN9Q>YvuL zt>$B>DF)rau3lKrY~r}HbF^2=VQ$9R1dRgbV?;T|Awl^w=sDdbtmk zeEbmbByD$s1W;G!2S33+GC*3D{FU(7SU*~!xx94|ESeCjpi9TN>XaqZs7FU_c$1eM zkqpB-^Zz+|>rjCBaJdjO;ti*#j+&vf4GJ61eHkLQSvAnVl2sXJ_MDL3Rs`-ct{WYM zKN^zH=A2KL6x1Yp}m$JPlicUwDZ=m5!RfY@YMf3N%Wy- zs*Ua^l=Mb}hGiw_*$xr=V7LHQOcPue_aGzhvJvI=RBeE;gwg*Hwq}wIZ5U|*z)^u$iKAU&w|sRx9PKL5*P5b%YgGRl8h?uh2}$xJ-XWO&Y&S1DgH9u*#FTFcR&7Lj?853X^?igV3>@r^Z&rPZ!Qa(|EQz$R6 zeb(kw<7jEgdso{2rMfw5sV0!pg9cDv!AtTT1GInVVv^%2LU zEOicRJZ;sZaS}9Z@fF+q7da`x?@mw8m+QMME=BJjNrfW6xkHKhMYO$d%`QYBo|oG& z3v(w$1X;u_eKrAcpV1{PJLx-qW(c0c9O&r9-|JGr>^Y6QwPv5pcIettED)N&RHR?* z?j7P4N{LqUgV8QD-a61_-Wm7hR;~E)qqwam@nnq&B`z!6veW1+w=32R88-oMQ z*6*wql%(G&^k0NTE#aFLM1_^{c>N?Jn?TCK9f${9DQepD)cT@|q zE65lRvmqAR9{j!a$Twy>>v7h}w{7d&khje}UZ1~?QFQfZq8{n}FncKnTN-XNN`q9(y7#Xvk)UM>5RX(bPUk7$e>r}6m(Cg8{#X&^+pk{4yb#k) z_v>Yy78mk-DXXcSE|sU9Atk~-ZBpX*XNIY`ZR=`Rf9+)}J&2L;VO5nCDROS(^51`M z*FwZ;IpUVLQ@R41rx`a!5Ih*GU2mz?;4!hTKVcHpu$<7S@IzVt{3C8v9hW0J=6u>B zb9S%Hv?Iz?p^7e^3-2#Ccw${-o;6hb>B*QWN3469%W&B0<}NXL0v~|wa@n)__mkZQ z24G^>^dFxx?BplFpAMQ?koW%#uUa(=swccW@^oyRPZ#+>Y&YBZYxj0VDlEEAJ-`I~ z;4=#-j-3;vLjvq>GF_THPgQN1O z$}uv#f$yq&av}CxAnzrw>#W6VY+uB&JDC>}3fO~h{+SpHiIDBDfgDo;I%gx!t2yQ8 z6}#=;yxq}v@*-YN+M1B-Z>-&_UT_drIxpZ2>_%;G zw4z1)Q&%!yN|J@Lc{TJeTkm{iIyjR|RrUaM`)bipT3h3f%V}aUdtSKW8>I8R6HZiVeYw~N5y3 z2S)jvIU17XiP>7WtxBol=Y6Ut%CURTnM5G&|APq>38wP!1j{mj=Li+X!szT<3Esp} zeX7CcjiT(t>DZ*x_rYKRX}s#yfABg&2jKNcox z`XqUZP1FrQa_6W*I;$+h|AdDs!)!glLty~xxYTviF>tcf!KLy4o1NVF_4`#Enloo9&s6pHlRGY*R^-YhyL%v%c;XlYxxhvORsS5 z)zsXp@zhK4|9kr{u7HBea?I~WS<9OL0c*nsjJxGw&U?mpSAc(Il33X;0L%0r2Q}uA znU^~n@5YMy!J?5?y9@l6F5o{@@c=7-kM{uNOiy#&i6Z_Um$a|1Rh;(vcmFUvf-9he z0Z;%(^TCa2wRr;v;p%*RCGp;=K5Tw?@9uuU7&mW`(z)8vHRm|V z`lC+n|I}WULxFq%RsI(t$wV0Aj3~8NosJP--CghM2O+HJ!UlygKZDB<9dWHkT`b{Q z-x60ePa69x@wg~dDR$3;vDJVUIVtT2WgIT40m^3_B2h4^mX;BX^!XBe-Zm@Hde&;? zcx-uXk8f&=ibjM2%}mL~l71CC=N8gTHzHU9I4w*z%C8kKmtQXv>+$W!PV5CR;hHz* zdiD}s4O2T5-mn^KU`?awKE@vPI*a)`f@6Nh0vq*|F2qRgm^%(vxP=HO6o&~7TXPCq ze{=*g&_grr#Gw=vQsvz6FmSNTxRv%p2&Lmd{GX6L_c#qrYg>7SuC+YcJgr<>@q57t zzgsP4YOu>T+vK_3NE9m#FhxgjEd}iMTxCO3Jis51a-vN@Zls$Ri}q6RtT43v(Xi|7 z6+0&MUrAF#oue-vpk!|4ddN!zP@(SVCCmyvt`PDqn#;LGK?t^8X=s>e@5}Gz+YD-G zdx>`hPuwr|W-3D_p_#b%rf}_mTLdh&>#B~!+t$rRYp?sx%>HQfGcy`((S2-!9^nqk z6tg#L5kx|~fyxok$XKh4u#(wVxaAwfxnO!;PR=%LVqt(E^88Q1eG-b81*x;^VQ-kd zlGkN%WQ$(qNq_n!N@}}Wi%;>1x;;xs`Mb#$6%&urTrPz2@}(+Zpf0#nsO^j8JCbMwoi|6 zG+GT#h;fY0DE!sI|5*;4DRF1MNQs5{vBt5Omm`$ADf|{!=JV)V0r~?(aE%784;}$x zRIez-8tFkwUgJ+NZ};@(Wjyz9sE*OtORd7aV2k3B?1+P{^OZ(N3i(-3ciy|}mSZ)W z_&Po>=uh84Rh=}dPGfs9C=pn%-{D4AhFmI&KWzs8kq!I|Abw=-qYstU(9)a#U3BS) z&om{wwmJLM_NKV9S#kgzUX6SBS?U)>5LXZ6fVa`fK!7=ZT-By3Yk{D5o#2s6Jqg;a zNV6AbQnhz%crugmVSLN4X<|2vsMEyzqDP_fBFEX0_A=Y&oVDC90ev z&T`~Aq(tmjk~vcS-yMlG;NXi5XN&f^QA#xqK_Es8Pa(WEb;!M1TNIuM6ih1*e|Osz zKA?*B84JV^q;H)DKn^;~eJ_LOE3Z zo=ula6Zg)HZ5p^3{FM9-lqAn2!xw{XYr354s6L$7HB8r2?~W?^BODaIolx!$6j3E( z4Bb;amlJ9<4Ml9z1Pofq^kX{Ot$z~x@Zz50maW&6#04^n*T8qYJ+a|&j`$ko{|l%K zSM-klOKutW9CdzTmhCoeR-0LQu1uc$E=B+lf4)3lS~I$jwQ62(mVY=jY2Ebb*GoS! z!=L_zSt|7i!6PTGk2dWO#=mtk9&-`B_v><6mcNW|P0Oc+$5UQbRzHih6E5?C`%VPm z_gBC39`oEa`^}>_9X5Zm<3{tn_iYH4)mx*8fA_*m3STeNosX=!Dwo#*0Lp;E3q0M) zEz(YrGSY7eojRZY3o}UICS|fhr+&)tWaVYaWS;0w%)i1AY!-m(hO7y_0o;E0osn^S z=_h9J)1R4vg;S(6OK+vmWgfG`ke-Ip(NK4 zdAIrJSL`+a>i6D1mgj;l>-_%P?i=Yz$|e<^!__Um{U81;xucXj z!qG(?E6ZCyv1M@QIlAN3igePf{8T;(G>i>^Y7?J0N{oU^Px_)4SId3D^kB$rIVU}J z!RtI7hg_V?*ni@9tvXK(SybQ7OX)jzJ&xIoYXBDU1XmS+wdqZ~w$M$bBi18)OQ+$w zP*B#onA$yYbuK!Oc5fmb?Y|U-(aw%shiLk3&OZffmTqKZa)-%!JxeazPA6P3)$b7sQsi) z2k+Lxx}oQ}T3iFLNIT5}aC8$E*3*!#==5y88I4qx&C;zE$W5d^a5rC9)q-o@~-H&5<*>hmITj8B(Kk2l%kgr3HpRstVv ziKccwRR2;kUHQ@Z;HPq}N}K+2BTrG}qnuB-MUHPwS+d>ox@;((*Osq;Az7jHtbV;< zdKGNa|9JW3QULt7zw>+MmBp*g7p}g=oVfZH^V~JJnrE&(U_N*4@7tGPX62V+Iu1a& zFhWWhQZSvKp=@J!v(jExeccxwckAE|5r)7`*XKpz8U0Urd0Lk47vloJqVPkxRRf^) z#Olx+M-KqLpZ$vFYaO`>FIEZzP7ako2qqs>t^dCp+(y5vGqW$c9)>WlQrRcL)DFCdJ|2HVkXI_;(EIC(+Cp!>#n zk&8u_Sv3GAI|tL&8&?M)|7T`kw?8HCJEo7cWcxl!AKQ*kZ7Iq%gR&zXPSsKA(HxJ; z={mg_{^QrY^e^4UxU4e$4fP?bts>W6NG{Urw@Lf%ea^goN&a>y{%=t0`>e)03_yX1 zY&rOz&t)mw=wbIT*&1~(phzk01NtAPUo^o5JbGc7)o zs*aPF7%MGH_lxl=7k9G&Y)EgCo25u6(l>@tn$>%`DNM?%>qP5yCfFiaM9;ij-PGqg z9@1^C!%q3JKA6jPE#&rjplf^=b||P&h|Xc zi@xNKb9HUfoku=xKC<+XJ@WfHJ32`KCU>#3v#9f=z^gS@0+83sbaiia0XW^UbVoPl z6oB1Xpm|lywvU?t`7U#PT9UuS+KmxFW z4iJDe$aNTG%4}Az+6Ta1&-8e34qC1QwE(c?W9D^9+caxBKmgL{5VjR`fB+;6W(@}O zI35D9g7KS2Md6s!2EQ%<9y)Z$@ZcO!2b$3V0+0Zt2UtM|>H;8dG}1R3oDa@NojO1O zRxo~bE(m3J=l}so!5W;x^CqT}Hrf*VFUbZuJT*`*hMzVEZB3Mcu>JCi; zH;f@^t7lLzg0t>xR7XRZ7lW)tb0GoP++0(?IoEaRK!*Vc5C}-1Ao^U#{dt3dz$Ktu z7DnO)N;|Pr2z^>$tx!qi37CEaK9B6h<)AWdePtS6I7ONB~mz>N3id+njFA zDFBCNFc_q^Hd9KM@+QZD6M1ps6-JAdPSbUC9A)*nj_2ixQn+;_ondMXYrY9+9Du1C zGPUzD(KxH<66)w1Gr2jeBCDv*`eggkeDbo&W##!~F|EmKtckWd48UlMFxd)BK`Se7 zr1W}gGBw$dC0?F2JaUoC1)#SieQBdM#6wK=Gx1z?ImprHk!P9IOAMVZBW*KGtzR-% z_03D80JJ)ozTukAc&q4=Qzz-0lRnio@X7Y2`PMtDTvnb>7SpDqckP+S4gpZzbqbu+ z%O%4EDl)lHZmFXIrj_2eeTn+2ZojX#T6Ma9DJ5Fax7(sPWmJZ51IIs(AiQ8v3(%C|Q= zZ$KIYAZ~(!`Vtv^y-VPKaY|o5s3YE_3y08)S8n~$P1{JH$7MwZQnwe$Pj**gh!pFy z+|255)Q{9|4!qK2&eHuFE2m)T?+5DZ8w5QUjCUP^1YiSqa?$faxo}4pZP%4&p8MY( z0BU`TjP+1<8;0sdFS}Lg>R=iUshy8l#`wPoxoresTBtn$#FeM|oLrl3Kc$|G-ObnM zp}0?m%uf{ZrL)-wJOP+oWV656^9-zMkG(MfrnVMc9o5F&HR!5r&?72XKXr|`b)+4Y zUbwxCKIgWn+8djrP@YbA+jjFj<4W~O@)=J1+Ga&w6Gd&nZwpb&a@fj{21RTT-w+pL}1M$du66W;$(b zYty?0>a>-b3nwobbGjW_e}=L)-3|%BMA5swkh~z9UB4y)h_y{`qX)Jfq07M+r{e9s z)Js_LGdcZ?%UfU1Z3qZD9dUnqsVz@|a-}wPd+48PQ$5#<(gpa={5W$Fo;y{a_F|@P zm?s7x1n}S-%%cu$+_=$9^WaU41})>%hx&D3A^_fW(@n-bcJJP8cJAD1Hf`Eu-v9ph zn+q(;Fnf*Uq$Fg!R1v!(;M za0oyeQ*BHc@18zP9e{iH?ltfNp$zZ@ue|a~vt!2&9-M=j(g6and&Z7{;+nZSFm(Xl zc;k%*5Ztq8j|CxM$b)k*YdSyx(wMr&l;_>kiKzono(TekJU9olssp%icyJD=1Krnw Z{|`+)aTxx~sMG)e002ovPDHLkV1iGtk!t_| diff --git a/website/docs/assets/maya-workfile-outliner.png b/website/docs/assets/maya-workfile-outliner.png new file mode 100644 index 0000000000000000000000000000000000000000..fbd1bbd03bce52041ad511dac138479494641361 GIT binary patch literal 4835 zcmbVQXH-+$wgy3pLI_2S)R2TwMTqnwMIea;M5@wD0z?I*gMuJL=@1f81A>)Nt6Sm4;iAC0e)7RM2n2qg4=FwlChgoahYp6>h zY;1tmgTdYzSc+w16N*He8rwy>G4tFrkC}-jZaEhO5H4wgk~$<-;K2COxd++Gup4w2 z6s$Dss5b2R0Dm$FG!=RFv-#a^*ukdfRXQQptbsy8otg_^kby zUDL*W+Y&2#0&%JULokg;2nz8sOi{3uyNf+qGj>C~XtWIld^mEsqOK9VpRP4s+BN&u zxA)=Gm6^Ba3sB*(`-|J#uXlfM%`|~$P!VgN+n3r?kB8}+4voWL$I!TVV5Pp8{MKU0 zBZn%trk8`&n!IQ;H3S|Xeq*7mb$;aX3+?iIUAUf4QCe@&?o#=6OA=CU755yEkG#`m zdc0_^?2>d?K-O5GTD!4Ag%6PnS|$&l^-7EJ0-m*x)JSl<=9Q%;P_1HSMotW=$lTdU z$JD@9Vw4Dv1-mLZ1_;^-Orb%wqCl#3$rkV95-nQ;-<*$m??Tue(=L83b46&S+(joMvZ}jLaFan33Qsa8 z4gwRpn@mJm!%ZN@e>aFIkefN;i7W@aHf;We`RqGs?8fK7^_Vp;qiYiaXbTn!1f@(a zhNWSjDeaNPK?+@+fpm$gYncVPp$bk)QnR|mq;6AqzcdB7`&gPcat@QpGu1M7{iSI$r}|a5OGOoTD(ymvM-b<>$dc9O_xr zcX-L<^H~Tc00&vqDPVC!`hXkq!ZKNp|4x>_42394_zb%-Dw@RLREVpU?u(1=hy%+K zU8s?2;Og{BwU47vL`E^fn8#6y|E|Sxj(9{?kO-7~Z?xt%ATYz}eJr}^jL#=2$EvLO z>XxJjz@9&R>ck`3pdsFH#4QIN3!rlx)63*FQ>gHC2&H5CJRNH4+cAT&T`8FpEF{~( z%lnz9xJ=#Szo};;vZhdqwv!-WD?$VBV=QAg+C0-o(xZr@apU)zQH0_W6orQ__iNF_;(z3ua|8zo zQZR>~?D4PKo2cGgJ2I0MGFgXI1qg6NG1tOO;HppxI6?2pE8LPKAap&)NSS-&vm#O% zk#~a5$vb($jDbxZ>AQk_a?f1Zq~ml$Al2ttiRH!}EGdeBk^2+8;~XvE61)w`XKt`0 z%1TPcnP^AdRs)aY$9$IU@&zXK{Vh230Y0j!u@V33U=D z6hrhVE0guMuFcW)*SEhFCy%V&+jUt3-ZP=e zV0<4Nxva31C2XvTFwPLy3K)j`4qb8Ce789C%x>o9&x(C#e7%sQi*Hb zAd)6>)w%4vl^7ix4-8f_UhlkpRKxt4b6tS_Dfi}b+?$)f-}u@&*&#d6N{ZpYZErTD zrR`HKcZry@{y@IeIJKO@GmNV z_~kYdi8!AkMme)HNB&kv`s3S!UJc0xa+?s(P_0AxQ^e-P=n|Q1z8(mtt*v!R2R0|G zxW1{*;u#Z=(k~r)WN^bFm0B~P3O0>7i4*`I2i{q->XDO6pF^AFJTMA8=&U!-4rVuvpYW zRT&YwS-mRdK9P=jS=kMhyIqX<)UdcV+xg5$=oe(H;hPZ(&gvx2@#;~q_fG2Jb`Y2> z9ebqWOFdtVk|2qLejLn0Ir<+P5mrw4-n*LlApSkb{XfwcIw)ZwGb%Duiob9qt^R`U+1_;>ayo|Sy#NAUdH(7~ z8Aw2e+7|nqac7-2iGthu1?I07pTw?r#<5LwMDnO<>~X9ZATC8=15C4d_rtPwgjhfo zc^R=`+G~^7o}+EAWt4Bdn0O%|c;G<(GWJME+{IU6X^ECOEv4H6pBrepJ ztu!U$c`Bsll8$jeBxc$3oYF2%tCuuC&YjkHzb1XvQ+G#x2n|WWsGNy%`8L%C0%BEQ z*$P!4U@r=neultD#!&k&=UaSpP9|bdx(Gpp++S_+zc#wyD-v0>Ld7IumkF5%8b!H$#YFf)YTJYPXmbThf0NS8kC zb@?iHLP>sD20Gyd>`U)7cATr?WWrsaqzh`$hqHgg^n1<^KVvq!;U{ft0Y&*^yx!6% zzWFSd_o*1~2Z}?QP+L_W!ccufYbO_8yMHT+4V=kNxharEv0RFj^9McjG;L9-J7 zKrmq8K3<9vz34{gv&~<~2yTv$(aEx-`p1DUc$t?kp1&WRH$gpNU#xApAni1e`(59{ zieY!ijLxa!)t;srI#j`>SDWzA>BGzI<=ec5eWFxyCL-JD0*m6wYEV?Ah^(!f8Z?@K zR7bd{WGPhVEFivDn9|P32e8KzUH(nQyd?1+>p zRN(&Za+StwbFp>nJ2qJ^XcghP$e+x*vKw0~_AIILta*2-Y^hbiuzWgp|M$}8_S6xi zbc|hz%xc%Ex8=PLbHL1H38`)i^uYV5xR5X?P# z&1`cjV#c1En_Jv?Zi~c;vlcA^Exln1l1NyDu|zcY@Aa-zo25{sSW?K|+x_KRp?A$B zhxs$DW;GK(XBIPS=aFYRW2ip>DDe;}h3|Xk@{f^Q9Q%NmbFW$;D5@_#JU{LI{@HW^ z4ijb+0+#L|OVlLc<|QQm*G%T%&C*J;qx9H=^@pz0soN;HaN9r5Ns5BHc{p^40Ki_* ztLqCheb*5M$^h;qai;rHxJ__Q+BFw$qZ?~-t(xM}0j zaRZ7qcbF?h6*m!@aeaI!YYz#i4y8nr$hQ8BeOG^eNKn}>BakK2rq zcN(Au7;?3Lf5eJZ6ya$aufGzM(s$-(`R;__{u2ga748hLOQ8L%^*)jVpoS`LPI#9x z)#j_Kt#=fnUUu&`X_Z7x+_<&10M{e8s|$>Q(p@OTA7ig#SLp=&^xCfbrJuQUCU|{% zEb3us5;n}#J;vRw^x-{GMMK&1kXht59)s{8d4#|OtXXc3Y-A!KX&P1;FI%P@X6w6gc%1)C=kAAotDge# z6U`TkpU=EHX%P;gVA!Ckzn(CI>&uZ)4^c3f`0m%BXxP^ac&Si!yYJB?EtXiNLF$B? zwx(~02Z4{SvsY#%B0?stut+5mR8S$k7{Lw|spav!zdhz2>v~GnLyY@cy82vDkrJIU zhhRUw>yZkiMi}mGdpBLp2!xi8%kXDU$;Vjl$bS-MEaH(Vw)+nb(N zjY3(p%!%RS36u*c_Mhes!yw&e##So~9rsU+KfQcjWlib|2Qv-sh!iMczBHKeKp7r6VV}k7jN}ooXPkvFQf7SY7}`w({Hi)k*j@PucAQ_->hs@!IRg2k!;|9ySIgIsx2sj0kDPkAam07u_Cw$o zxlT%d{;I{*R;itfvBy%vj}pUBRRtgZ(50SSX;q?N5uVr&fqY4>A8e@kz#g?kPYP=B z0H(Hd_JPo=A$Qr4+}&#Nds!=*O=IuOrakz0$#C4|{6IYdw+z{k1tYU`f=4K>A0iXNaeynTeHPDv zZ`}6BF2AxA_74M3HRev-Bi=)*rGOV{)o(#S@5+)dVMV-tZG+2!Z3(m*pw&c;1hwMk zZ?~Ok1A3M+HR^ST-yDNb&Ga%#tDlw)e2h^N6qUL@c&Vsh^~AbUgPabj{ZN|-#rk|E zyIQ69E=zFhb6&~Of;+@F22kTWmCVGduUO7h@BYx+T196Z2#Gp?Iq2&(R$d!A4dx>J+B-KDaQU?Wh8&bwX_g+riOtRY5n717}tTm zcK+<_BeCF^z6BWbw43*R5<)|5V{S`e zTyVxL*H>&L7a3ofpAP z;aX-af6Fq z{@eWW2cA1tC(V@ub8OxFEmy_9M*<74o|MKZQVOvx5ks?`nfA9}+hzgRKHVZ3Sl`9G z7b>Ph7ZWX~kN>GQ=U_Hzn1T`g`ycCsWNs9`KFnhheEYAfo4*s{&%2!epW%YG{cq_9 X`o37wi;`y6#S|Ob%*wRV#3TNnn_AI~ literal 0 HcmV?d00001 diff --git a/website/docs/assets/settings/template_build_workfile.png b/website/docs/assets/settings/template_build_workfile.png index 7ef87861fe97322a3b23c28ee3644b76ce995e9d..1bea5b01f5f3145e2d1aa5d46545c3739aaf2584 100644 GIT binary patch literal 12596 zcmdUWXH=8h*6vF$DvE+ZI-(R&0Vzt4uu)K&B1i{~Vl)C$LJtrDQK@b$5CwuDARUn= z5)?N?dZ?iz1_=;IfB=C2$@j8Px%Z5F&$-|I&bW7s??*<)OV+#ATx-txtmm0)@x;dJ z;yyufK>z^unVbD~6#zIj0f3{3pBMZK(RLp{_{HIW^`bFQ+9f#$K5*YTZ+RX7Dw6kX z-{1kC1s<3=`U8N_x7`m1-lyOe0I0Q^|91Xbu-oD|E&2AvjLGF4J;J4v!NOM_eDT<$ z=BW4GcQiOuU)nm^xxH5|?dx4=qG)pT=xZO*DfEYxgCY-q`)vrS ze^l=2!9@FK+AGoe{M{cSFZ}A)Av=?ukvKAbXMoYa$cTQG=QE&#EbPjto3ti2jMG&j zDXch20DyM!gY%IMi$=F(;8cQ_rtqN63UN>_0Qhhb%K-qx@&o{gS`dbVZP)+OmRlSE zQVclv0l;@@G`J48lK)9_g@bVKx^MKe<5IL)`~I4+Yc*Li^{xX#;DC7$cvtrrD{vf# z8qJ6a-0iH~@+3jB{#(x3@z$?a!#L!F8LOluij6HFMrrz5At`8#b$4y!L78+AtyXtt zwR&`3-+Bf#)=w)4TNg&luwvHwVxW~2lgImIZ;z&rPNvGf<+KcTzSLC|hzti#e7iKR zn%w@Q*WXQJKWKkWY#{vQAy0i#_BZ02TBv9F9zOUARNr%1ADJt za<+<(J7UfE%=6B>y%HC&cMX-sQalbmY&(z2WTyB|hJdqizr4A(^X83~crMHSSKQ(P z$5%FA4O+#Ad*{96&FG?oOINMH$o^4*&J#S^!iTndT~*YFk_Ok$E)bm;vXD`CNIuOh z_(?>5=i}YRnc%{`G;b0%`pOM_xm^*0zQCp)DUtzz`)~G*WrI$6weMf9`#*7$j~eJo zz(VSuyj!A4aH;pzC^vkz7j(wn|FoC?$!LEGikfPuKVw3#q`L0@WS}D1=ab%()M%-# z;M-Sr3ERQH!9(UteyF4lCgn#cU&K2UFkJ5FZMVKqe~*KbrIxB={>6IbU|V@rO` zO2}fopk~oK&dyEHyQQN!*cv7ncwcl&3z2aq7n{G4 z-w~hTEoFxupn=gQyTbj(GCR-w0Azt--@% zGanwoeBw+_I5JyqgF*#g<9luY;Bhm;()glWCI!mg%Xp9454t3(+2v_BAqFcPsQ5Dw z>|!>GAgbNuB7V1`hO_hJ4cRw`{y1;aqDs6qCV@b?q7^Vi8}?oVd@=}3MGtkvKFzzM zE(vd?o*6q=HnrP0tC1-ad4C;M*Zrzr>!HHo1Yl z-`9)HjzKz5?nO8$3u$6-Gf?(PbRK+9x2b`Z)~ix2PJp1whDINs6S$DWgb)Ow0MPLW z8a-mlD-NrPQ{w_0fPaz>{|Usa>D>js2g3h41A3v~lD)WqsCejq9Zzo`ftN1mQ!+O@ zEQ9EM-F4(l2{O}PkJ`3&XDlZf)h2le0H#%np!6r!hK`R?Ra8aYCXm+KU8SSUacR}@ z2gx+UU($^$F{7dL0*pT@t?P;!k>dtC3zbw@vc9()IE(2!Ug#NWLy(h~NqCxibo{S9 z#4%E`(M0uH?N1&|WnN)wo=++22QO%Q&L(Id{9uLfEAPN)km`M^KM2reEZD zBJ2X9&iQYRBDdDPf)0)lh2faGm?qrMoAv8ZjVD|CfwT`*e3**tnYoNR?KSBgK&sND z=ZA+0D}N-acyBS0bSF{e0z9=`Y4*dRpN%jh9o^FSd#*M)N1fj%P5N%-50DPxfZ=C@;JvxKSlbycNot)QVG)muVe z&rlns^UsyxSQa~>QhphIH?3wI<~JTkbOAt-+7qaTLjPDeb!O>~=RKB&hW+Cw`c-yG zcU_BmMfzRcbKBlhMxXxtsK|3owEw_f4Ch?D1M_99{^@v+?vPIXm~olK-r1iLO~zLW z+UZa0E}gHQIhge7!4;P>*2q}4K(j0OVAtJU%c7$g#x479HN=mz*GZbUgjOkGClKC= zB{SJF`HbBATtKn0ww~r$Z)&Y|!#%#V{voJ8>^}B=qoID`9z@ZD=AQr-Kd1U&$7eA& zw&oy@f@`=lN3)o_)MSuXQMaJ{4o9m*q^i9?yNDG`<=U~w3Lu^m2{=V?3#e5Z4u3TB zKEJGP5rRiO^6YT7vNjBBM$M<+=|I^1G*)abJo)>D^DmATngjei>sZ5X2ws@ne9O?+ z_c`6U`FdoAtcJB4T5Z+SS+IalX2Wq5V`5*ogvqbm(+?aLLQ3CMA z)DeL-A7yS#%J{FH^-1H%yZuMp!!p)jCG{CTQ?txYT`Yke9|T{-zA0hR2OeeN-Jh=K^xuhQ0(2wLm;%dJg?%xgt#;W zjvH)lR|Y;d-;^1xHn!{uyv>-qX?3;(q3NkwV8>x;d?CcDFApzaXpP_#*D;EDgogH= z{*HWY=mw$W%p+7j$7}|O1kss2zL~y0(YMFh0dH0fg;u){z(*Kr4Tk{_yAOhztXET; zlQMLMkN#rjcGEt_#&w^O8y7wYeDhj#^G$XiK4GLmn)Lj{oTJD$CA>PUdtAAolT;Yo zYTZ8T(JLq;-;MYle)GzALUWCZ&2PlwR&~E5xTyED!;#s7LBzE^GYxRZ!JQiFYaubN zKZCOrYWvZABh|OC?f@L{7yvD3Vzbgzu4nn44Ge6H(s;x!+rNFsQ`)gT5+!Sz+7!6V zc&jLuIPVTs0XP8rQ zgdhIB(e@Cy>DBW8`;;SEwDy-!_hdlOr90k{qq@96)GtTpKk1?U=yu>3>%EAL??PZF ztlmrNjU8Ef{LGeZ{l$SQA;tnLqHtD`nCeS*{Pr_I?Cu-M1u^_c;pW-%o-emB(ui@A zW0oTDWyTqSb+jsY##tQ5S?yDwn#*6>)XSNu&L~7>p!3pg`5kgNV`AEWb)YK3_5!sh z2c_YMg|x!(b`=H@wcfr$*6eY%wD-ecpPfpH$}V+jJ%b9bm8D{g-4}1 zqtv3K5+WCb;z?_@iWl+7f-Bjwg1YxC?i!tHQJ)!JZQptJrDx4PMfkvgkOTi=DY`nb zltv+h4^(6@&V^=fhXM8J9;G5w+pzIQckJq}hhw&e_poG*)$=%HYtA)1fY@ z(v3{!Qb8@wNL6HNex!ubVN0lIwrg9?h%(~FfB%VTpdXQdf0{XzIIz6^9_GaH!Gjmq zL*WCa;ar9$IWe*}l~2pe1n<;$ZdgTFR9KR22+h^P$*QKck}3(pe3-*h5%!YsTS5`1 z8tCazZ2DZNpAaC6_>;uwR|_RM$0{!8~Pu;Jn;<9wo@4Q(Q(P7q~2Ku^)3C%sE7GwZ~&Mbgdw5W=`CF2$^~=WtK5y$4p#PM7imc z>PKZ5r*V%$Huk7d)qowL1gVH_qF|Cm#a2;L7#~W=-AU4BMB5Y4)Ytd9I6Dhhxj}`c zrp%48+D$0G^Z#|)0ZIZee_go$rPc!*Ex|?s;K$K_11bKR`j)5Hlg2YZ#h8)f9B2dz zogr}T$HQnJM-EpYAJ~g-{}@^n$Dg{#C=h4ws)zLByqE`N{1<_!eHDYy%3>RNxg2GJ zo;N!{izjRs?}68zi)ha&8~IpVmArqu6!T|UG;|Y*8Uu;O)y(Uu*pzCBl~7R1L9=ZWqTt~ba|vvHp!1b zh+i21q&+t0#+Vv+{uouf`q?@jYjb$|sD+tuS@$#bwxiUoCI`#!DOe9GVK2{Kgpi+i zireLuf-=9=9&a zxj{kAlDApaNBrUO(J?+6crju42>pmKT(?D#Q4>T3fa2@xS=rBi8i>KKMJn~a{{(m5 zspQ4Xerk~%8A0dyCp?qOO($)m6;@SxbUA>us48B}N3YXY-#Gh_L^!hLEgZ6sEvCNB zkk->C1W-%+cf9(>vU~iWS8c(NdH_JFu7*}xWD_2Jv4PxX4+$PBIqFs;ivI&``ZVm2 zN5{CBfV^wAM=4Z3xkUYfw8TuG#vl|sY*5&}jPkor(*@BzSV9=CFCd=yX(%XGK)DNZ zdg`jOIkK zXM-Ua?u5UE`^;{vKFm6&Go+rR)o}n`E!5RSEFt|7!_`@^$pxfjgYNqkBQBu1yU!U- zfE<^#{N3i85E${p&S6+b{r+6E!tfI~&t3ujwdHK~kksILB|#Hin`2Wq83j6qANMB` zdpp>-W|=049{k1m2>)Vg2!>L$1xEAlU^F)ycmB?4dCq8##;@s@cmUWw#|*uppFUYJ zy3c>`Q&5@^<0-z98}q=c&c54;cV@T_=jQ?ddAGTO;M2ZR{4I~72w5WZB_XSrJz*g3Hxe-v1YAn>9dDcCsZ1@x_l;QiS=emo{O2~!Ao!}}8Z!V4Q`-tK zO!c|Yd7@&!lbmA>ba3*71?l&cRxu6U*empS9*z-)MpXOWAP#l!Ypae!M!s=nojt&czgrm$b6PEVWaHD1F&A!SIL1^2%0NxKUihF z2Sk@A(8|E}y_zlF`)}|94+Q=V1pNQbl{7}cdI_p;T*nAhUiR$Y7o{M?6Qw}`?NqSt z_4OYDv8izd`^*PBtb0x#^o8~aSd#HG-mO7JwR?l`x@@B%0?&0XAnj730-9xzUwPjf z#400Gb;=~}*$M?#qXOgY{A}HZy(T~;!oDMSb=ZoFyi4oyvrm(flp!f6;;98`t;MO& zpY{V$${4WDajW6Pw%szu<2Y13oh(i7_|)mBIL9)rF)eB-#d$z}2>QsdlT zBQd+q2^LFf(=YVTL2guRFM}J(mp{B#g)EEC1$`74PG3U5MY%{GT&=j>D6~(GUhvT(#ix;rl`s`SgSbxC(Q_-naqABE4!mBws$e~qe;w}JJ=_sIwt&J-xlb+ z%y@{gvv09CAK-Cs@G$&kz8)b4cSTR4_qvg{C~I1`g!QR2?ol15<(UZ0mhZ6o8C{cq z08fWTupsip-WPJ=Yw75({KQ*C(?G+YtWr8cgHPNtF(W};^VI|^oimZ`)Pvp!L_Haa zf~KknXwGYBw)YC20t<)QoSh>lH3(w$1~Er~FUO7l8*ze8u`oQf>+i5>6b>RIs~0Q* z+N1oaJ*SY{Jd})YU?^+N>;s?|z~!Y*gyj!tCy(cxFav4)H7^i#%@3Xg5%lmLQ4kt1 zCiu0PeVR9mL5|IgF@gNxGXN)b2_%AKnH-8Wf z^&F6Z=MDvlY{YU|8hASpYh{HUl?dOdsq8Cqy4CYY57SHU-qo*gHU_O+%dx54BBYGjq0ZXt`6^hyw&W|RrF58)bW&Ob79Hh_A^4u5)AR*6|!Wi30qU4I6W~$z_ zbH1{`E9hNxYM+oBJawGYbsNuzu@@%dTdRIk8mcIOZdD)QvJBe+msyscWQX5h@_ryI zK}QU(J9zeElG>deeNI5z9waKuxwTT#*GvhJ*1{4cZ$7(c&zp#+V`7tjPR{SYb^Xh0 z1HwlK)3e-|0KT`GTW`>LQpXZDgUn3^A8Vz5eOKh#9pZtCrkIhP9%uT40_4xK!Hhkm zDYZ}ckAjD8Ra&v#f|^%epDt|Oz|~SObL`)_AWMK1l+{12e=iEhgd{0%xcbA(8jT3| zMbThnW`V1E!-Uh)_SPtmO^twZgt4fR>$P7SxxtU5o}RHd7OO2KFJ=9-$Wzpb zQu|N{=+ytL5p+{D946F2Ze8O-$_(iAR5Tzib^GvrB$SeMMBq=WamX0gBRx!T2OK&k zaS;Asu8Q*j@Z*deLDqKev;?>{o!0o59R6M4%IdSQR%m3;FzZ2z}h=ml>r(ebK^Ot~y>^v$ z$F42kD9(GFQ9$c3ktOi-4w|D&Q-l| zhtOYCGsUka?2yR65W-y=xIy{={+0iw7*g++UALBF5F(?pBfH->GPNph{bmA6Kz=TG z4f!dPO|ECT&4xT$6OD8)X^hU4l({=7oT=D@EaQ+hK?rCD(Hi>9WzO!=Aeaf830`vK zZiQ5${Pk?r`< ztMH!>Wt*Xb7J7sL(eI^{OW7X;uTSd#I`w8|W9p#_LPx*gDb}dwo=TTa!OV%+GFSdR z28ehj7t+0W6PZBV+KySiCr8NDJFqrJ-mAGthHbET?|v7rP@rrwIsy&I=(T;N@)-!Z z&@K*He*}AP=b5GiA_U(1^Ga{?+hBbJ_DRxN!CaB}0HMJ*%$Z>!NqIqUgRE5BNN*QH zQT2?IJ$BB?Sr?~Ty0td;Isz6Ex_1ppqo>~E!FXo=s`cl^IMQhL!u);fYRNxG76EaJ zQ{`|eS-%|TI4DP4fz1U1--kUTm1^y{=c{uI=IBL#nXNcT+v^=#VwI)vC-h@+A=S*^ z5$(&uIxUyU#iQ54E@oTMY|Tg7{ChXWVy`7AZ*)A6%6v%6^fH8Cd+ z*iXGrewOw0gP_N|(_dWa*ZhmVrv-j3DL{M_#Cji^W9kkn;1!!I)3e+!UxoLn_b!JH zN>wDO%mRe4tk4;?(H+YyBht`>7E+&WU!R$uFf95Hh!`@O2Ca0uy7X=XI79>wfXavY zHFFNY)sQWxkKW+@f6s;f24{cA)`rUm;nX0XEL^(|t$C}c`r;%lVsm6`+=}7=9!$TU z;}w@VcP!>P)xXRMpSUz1PxEWsUf>#l37p}^ybE2eP#j;cx-(uSah3z9eQ;>Mrsd<| z&4v1m*D6u^Kf2t*YV;yC?|^}E%OGWff52wc!7_o9Hd-OnCGHs= zGSFo?tXi*Q8MR!m(RtdsC!$K|%oXf%c0S5GG-Bx2`Eu0Gqp;bjiyfdcJ$(s>&f^#J zaWC4MhaQx6ueV;DIi{z1@YXf#lZoGZ$rngtv@QWnVYYVaWO@Y?)ca<`+!(+7Tnbmi z9BIYm%fXwEI1`QRc@$CJBJh-ztn?`_1>lRwG(GzV4(Jn<4#j)Q5^{;kb87t`&*~X3 zZO4@t2U_6eWl&#sR(}xtC%u3ni_Oo!p0B^-$Va@oJtHOlI$UXZzP^}WvyahN_8Gsk z6i@S|g-n7ZJICt+_DSF^!T9h;dEqXuLF7t?7I{)5zLL5qeljnL~a1(ck?9RY9X&q5FF%mxlrhtZ^^FhFRP@1 zR}yGm|JUK*Ad6QusI`egnypDcFq>6*?Aem2l%fDH;It~|zL}Q;LY7Co4+oE?KsC(g zMgSK#Zt;#&45%$@8Z%P$#7Hdj<3)4}`<6=&k(m;_;&_r)#i)zSH!7#L#tl-`izb5? z-<>y0hA1zIFbo@H|(d+?uVM>qG(F7^XwBmHQ zm@=1DNT$T7R-dW{@-_`TS1ou{* zA&ikHsQWZBh2d`}k-0b_Idd6Ba$cj1`I;l0LXd!mm)A0q>bEk8IqMLd?Z@#gT-f=l z#4})F{Azg*h3i3xFkGz$Jg;9KwK}M<>TIy=eQ9`bYwK|_v~QvMIWd*K#hHk6BWN_L zUFcv?BA2d+DRFgn$J=CoYo|~thSM&;VJnz3SR&t#m7@d??_oai#y;VZ1=)iWyn<8T z;*xL&ZY0~rLK?s2FhS_e zHVV2Jh72nzJHYb-6|@Q?=?iGKW?lT%W#f0>-1y~q+26*JSkCrnaK1{5N^6vJic3Dj zeP(3E@iz_-pDv0b@c7{LF>h~TvUnd+ewA1KBBF@b6JDuN+^fmz!1OYp|z=G!+AsY zyHRRU8LD$Qh(7rZhf~*R#ZC*NWxfA!QW z_uPTE;ZGdfB=s6mNIwsd%ASQ=f%libfad@yjXPhHKlwDSnK0t+u-g`_$G2C>yio(m};#RPqHT50vqzWmNoCcjG_(4sPH znt#`sUE`?z46e;jE`0k%ca1G(IAwDC%lIa>QETgu6DlN`p!BEsL1a$tk zQS-2cn(4sWkcgaNO|gu$x9qjF(Vk%Fl1w?|xCVSqA71DJ%7x(yUn0$jk)_S^ubZtS zmakJyK+V!$-Bux>*;gZg&a?ic4ZMP&Jvq09p0ZO)2hPS%vUY44b*c>_RV9AA({Myz zaNv-G42A+O-rN|0B_qSd$e-WmU&m^7oxtx}=8olvvNXn~1MGT_?PP0b)Yo+hsw(T7 zgm?RV(IP7vGM4p*9n!CTZ$Y?tcUZ>Y9!6cxorv*m{C?nd3{-=I1sUC(RcFsaK>5R;x3^nL3PU_G)~A zSO1W_1MCnyk0vh@LD-pAp`Drx<&3htuxpyT(8gr%=saCcVY~L;TmG|i%j{W|vRaZ6 z3v$JoJXBReVoN3ti>B>b2t>V?C0InU*{R!+Iv$`^fpFGR*3MPRibzpH{w1h zdpu5n9$Q-Nx--^l#C|ypN#NIvTwU|2SLu$>y(giy&ObP`TP?Z<7M7dXcT!kv>krJ9 z@(4lm;(LF8eTsbt!OlAGgo14^L~d7YeTg&lTrc?_zyJK(@2H5SSL^}Q+`Gto>rdQ_ov|Qrq%%KG}hA-}TZ^;Z992 z?lz(n$(ZOKQ?YiypZt2r-QQVj!Qj)5QTb>ei z15aIcr{4}0qJx9oN<+6m=9yehFSf>t=G0H^ucZ?w7Q1BZyfj3^N!#fsLs^o%#F{rc zj_ACTE0f!PMJQi;;W}*7nQPDlR)Y_YG!eRUFZ>Nes{T>aIfuQk&gI)dCMrnnzU>%m z&8|mAaSgbIu49DBtRbE`qJhK9uT}jm(q{)joAovk1+GJb26OX8O%Xu^M`_M|3NQ<7 zPjdT%oIsz5&0)T$u<)rtJzXC6S<1sm*4vLkqKqUu%00yEZWd{~z&gXdY|f>GyolIe zcfGG}y(F6&7V$a3#KHD5V{DV7^4SwYQ(Kcbpv`&&sBLl23nmxW*H z^=gg36^xebdxhhd@LG#+FeI1AwC>3_#F3rm-h4|N(D?+fDRa~SMf6ecj>aFf8?}$r z%L=T!ZIdi#+;KlB>b-R@@szDI-`=beo0)C`jFuDkIPNB5dY$x_zw^1Xj zELfuu%1C=jXMvPliIY<*du7s`hhxix%V)M1?y{eU&MTPScHqd#}{`2$dN6{K#DGRQ}lPgl-9)TJd6pwtW8xCi2APtuiSB8eG}HxyPW75WAYr>l|+n?*X0@F@Bj=^F5~ z?1FhWtVIVY+7eLGj;YVQ7;wK^vag~KF^gwWN`1FUmf=DsuT8UFb!7V_MiYBUfo4RV z)!P2z3}|$#*jo0FTsYoGZ%u?&nj)Jl-?CXUz9%wa#Pmski+h<#chIiAT6gpa9Rb#F zH)ly+EU`^Iwb5T=k2CXa--##|PXVj6sH{8uS@lR^-G;w$%Cn=>c zV&F9FQkJUKly0cBMwhvA{G||ASVC!CUhI`ys50C3^CUMYB46v|3za}^A5Y{ndLh{>JQrg* z**8U)Tg?8Tr)#jS*m@hczQ{lYLt>*>zvQTY2@cY`(yUSEiAdy$d^6Emk)-^Q*pG_~ zn=E!O-kNzQqqumRkW$x=`o(t%`P!-Pq(5_-GN0Lo=?e+D>r5u$8o6-%+VuOJ*VclU zz$p&moBfxGHbe$^3$*a%mVZHVg&m_|^CaeuzV(?`6NWeV#EtM**g^xO(k&*i{N$_!<; zRT#UAe|V~$?n7+2Yfw$w_2>;=am`+AC9f;m^ChIMc>@AhA-I&U##X2B>amS8BJiKj zw1l7TjV(nE^f!FA=$=xL1@VJllS|cCr6^ z&?Drsz~nfEc4^1JyfNA}yXx?_J#G}iKlJ!NyxfNp+Cers*&f>PyuktPzPune_}>vL z@s32O21;ggDVnP29^yk!mjH%rKxMhux`+UIV?@&e7g(;{m0=g9!tTO|5<_HTHP;AB+&crvLx| literal 29814 zcmbTecRbbq-v@j|R>~nO*<^<#WIG{bmc94N$P5|DCWH_|2ub$dNwQb=-XVK$?w9ZH zx~}{BUHA3Rb)Uz#I^Uem`Hc7b^?I(?306{gd>xk@7lA-rmywouia?-c!>@PPSKx0F z1+gvR51bd$nvMv>wcyKNXnd{hxd;ReLPkPd#r5Or#7hq%=X24`?vAclYu;+h#N>A< z>2xd%LOjV3oZ2AS@`=f)vasK-e34Aq z#``I1YTcm;C*CkmQR;(D#^U=a-PV3_DMuaVBGg7L?`W|Rh*i9iK2kU!VMGx=!XSd? z1cBJc{(qkaMYS&odtOH%dM23t-Z}l)5%#==pRskSF6@becrAjt?uPd7ueVx8M~is7 zvO&&D>PW%evl>QdDwXlIZ@|xu|JUO-Rd>vb58AnPDoR7kzeg3>N%A#|&Dvw{W(s?s zj7^>I<$EtX353%7zIih{JL|GBY126BkD8fpji#rk*Nh!l*^A@USCE%4H9W#!4&$4o zk$K8Yw0-0eBkPY8aNRU;b8}lUm;cBgnYsKVj>EbA!Id6Wt&Y0NM?!3gcq#fUrkaDD zk)p2(;wD?>*?5w^1hfTC>C%sFv;)S7` z+fg93R}0r$9S;497pV$a?Ok1EHg}WCJWpKwdV&b(ub`vjQ;UrE#i5Ipry)@-s~bX- z+mXQxSGG~fSG?1dZhLp-hFy2)%25`*z4mP4GpW+&4j=FQZ}-xlF06j{rt1h3!;mB# z3ijMcozHY3bNs>Yo-QxnAHiMc!=^>=j2V2c`?J5Vu{NK0Ag!JRZ{0WHz9kbi zGBTo`uSY5BRU^kZ$FIN|!^_J{CFsJ=&+iKVptQ8qLqwmM$m?XSJe1^a)r%RFxHuZh zVr#ak+;$=)EX<~OUd>B#eSQ7DdDlFDc-of~ujA}5U!-Y}9m&Glbdj5`HE-X(eLj>o z+!^77k>964k zj5T#)f)p|NuXQgB@>+#O>V-FzlG9371!rIJXB<_uQa*g)o#kG7ZX)$S{L>wJj}WT| zrB;tl>ZzLTzm1fOVb4&Xot-^Gq3VwRviT{YP(KO`8?CIYHWv!n$NKx7mVe(V)HwZ{ zlRsJgGQ#kK!l{|CkkI;6yu__1&u)@}+eBJi+(()LHfCjIrSY$~ zmc{c43FM*rk;`;hiYZ=ue+O1qtzW)8Fl~?Z-0gjQ@7_Jun7=)#kBB5|_eY<@7CDqA zC5`hsJ6;gnbs&v6K}LChrkjMg1Jp%$rOx?ni#zJM zTBgU53^L(!$9pS_v$I9kBiw%Pl$iUModyO5*xP5nv9qwu@GIo1tvCq;OQfl=3T`Df z1zXzNV2IQBeal%I%1=p8H&#+YigYwJ`Oj5g;Zsfbsj|jMGtBbm)zs+Z57GOzwY9x@ z^Tu|(;-29T3?DHh2}x{h>@P04SF*4#$`04!XSO9s#M?U)ep7%(8$_olq zOS_~_?VTVSEZMM=MG!`<61up*_#O|0y}WCZtcq&KJrN`O>P+6a4XeXE3+Asdv(4YSO0?I zai5c#+FC&#F;P&aJFytEmOtY)ivU$m~uL zwVNbfT3_$tdYjD>@@;)eGxh;_*sWW)W@ct0BO|jElC1V;bDxxbFJiyp>5=_s!O^Va zfX0t3;Ap#@)6>HP6OU{Yo_QZF#5p@V^N*k{-IS)V3o?!zq23L_)J^)uCGs;yf}+`; z<3|55&7wZ8)x+>PjjqRx(;5n_^8`W9mfzz4eL!n_gQ7k7XnszdgQC5V9sX-}fG2q8 zqHxJ-noqqgP1}4X!!eo%pW_`eQ!i8Fa^?>CRRr_LKBC~`_!*-}w}R3-a_^knaiCCt z`jn7@LQa_oWwE=|Tk+60!bFpoh6X`9KRb(xGI{jqzVO>MWi_=-CFV@I_#bmyNR+Lu zZ8W32z{_==VpSqi(m;QIT0iVJK@kz8xc>Z69r^Kd{P_6zk)zdiByRF%BPjn!S$2eWpk^06xHhYc!&2Yn?3k%SX%xus!OX-^F+MIVq=}T%VJ% zv9W_gQA$b*i;*8{=Ho+09bH|;WFbp}BuWQ(9aFOFY>c{IgRzc6l7Om)Mo@lRTF7la zTLuOOcFiI`hSu?Mog{@JYu}u-G^6eLR@f9g@>6x5iF5q^DDgfgE32<8^`q>SG5WpG zU}op$($mvfLIfPk zjT^K)FJGAB+8y8NWhC{zt5xLe2I%cPROIOKB4J}9Za;#Tr+!F<|crs zy>5GOb}TkXx6Qx9BXJlo%!>8OXVOJD0yC5(B|F!Aa;bPYpa9*oC4cT@vrq4|$M<<9 z+F`SDPwgFhniK0ELN%j!B0IkFV@U5$_^JHiY@$b+iu#yKPoULIS_q%h;ELPrujpXH zy%T;#`6=JR%Ie2$5=9}zlD2e(AqJ(X2*v0(`kxNyuP5>Ta9mC5mp#>_Ko{f0SQF_n zv@xSNO}$b}Z9=`owew1ExB0WLsKTfG*RS+?Zjo#S((H-p?>w)z`~6ww(0urH7%$)G zjt6=ga@RpZPVTK_(_t(S`yDUFH+%v|*L?7RI;{xrM5E~;iQ74L_*Y2REbMk_&V(n@z;aht& zN;9wWu~h`=N(5rbMtVi218oUITA-)=ookX5Vzj%)6H8vG=kb=4KvIK2fnMFU*Jiiy zFvpt%&xcFRh5ue{;3Dk_RhG?&N@G@6A4;wx?BV3kw>V z-&MZ&XN5!f5BPfUUr+j;_NAQn_YEPA$t;$99wXk`U?!oJSAEL&t;7RV9`slBV$TK2 z%gdL|6OY2e!eU}#pq+XYQc+Q{x3dz31ajhj@WrbBw2|h1e8p>VW`;IW8W$JWB7srn z_t4PL*jV+qZ>uv;pvQR~q7aCPD2`T(larG%eSK)KsUlup zoO=csCJ3|r15Eso?M3)hLf=nt-fZsd%7~}s>Z}(;e9ABItX8Cp#PoW5)@zWcoPX`u#Qc?sfm?C^QAdajz_!-q53 zBJ&Op>q_DMg{bgp86u@$M9VV}YGN3vsuHgcHHx;Ga6j{LO{Brr*Vorkc*R=!sc#wM z3L3}M&=6_nSIqCp%7QP~f4+4>ylIVomL_ zbSx|^%*-;6gtJkp24sx<{AA{7Qt1tpj)3Ru>gs4|Y2g+F&_DV3v+C&z-`eI*^!M*# zr{5lBJRRB8Mn@cz7|pjt!UB|||I+EE0~kq%gOin&dun-Z?wPVOT6u=2f{aY?(MiqV z#{Q3!noo*Lg$DxtRu3?!5udW0nupC|WKBJNM|ygCuB)OW#EPdq^$@Rx->jk|hUsN< z)f(s1KRxQVDm|jVf$^N!M+AG+6M=Xo@lZfu)FheNK$L=#((vuIr4{R)k3&;aQ;Z9* zC-Xml{+z{$hPYiIzaJADJNlUdfmov2{D$sCCbWWxK)4Q~AqpOUwxkn6APT4%#y77Z zR*~a7@H>GCHR3y!_&yq9RXMK1@D35~Vpo!&$Ki|g-Y)}I5!9kyC;MyRBy=R90Q=V` zs)vZxu@ME1DoRR9rluJ-*E|6N<$V1*>ArF|A@l6?R4rdGSB3T3wQGVZ(gp_8(+$3~ zK&*9rSXo(%*3$L7y}j-1HX=mN$?6$T;fDv4uks5E?T>etfhBo)dA&MVAF$xLM#{k; z6P5M_&T4ga6|U3K&f@JbDIc){?~C)0kP!1^Hz@|ffx*FK!I!qcwm3LAgs39Mz6T$qwmt{7Kw1`vHjKpPNbUmV@)$^ah2vs)Cnqt& zpuA;s^Tk-~AzbXVw6u&TaUl|PeoA6u-`VnOW`Qqx$-!lJa^gX?G&Pwvyt#@^8!xA( zt1FNE`9(RMIRu4mPZ7wsy}iBJz#zk+RnpSXAkIwW?&da=c6xSZf4{E;4S}$kCBQHo zI~VcmNfLZ{e6Ue((i#Qb-Ot~jQplA{R8*9Q=Y8|s!u&jqACzUlK!CX(M^Je#w8i}W zYvtE7Ug;Qk{q}x4XZ>5Udv7iw)%ZxUxuBq6ijbQ_&nNL`nR3CQp=_+Ifah^<-0)hj zT;$NJ2|^wp)L)$SDT>|-lj{5R=#*D%>{C6y5IOd#McR`gMy!aas3^i*wQG(VK6N*2 z3I9v4G03IsF)UCcaK#X-{@gvMLpi*DV{P2fcO+pPz8c^<^$=X*QBK5+hx5Ivk`(kz`3!nVy zOcXsoG)~+Kj0R-nyk0Lu(F{zJRL@nMF*<%@s=_{9gT2g9vXeW7^sBJ-1t)Zt)18zH zkG0Yv0bP`n(=NyXE^DKvjX75)?4U+I3cMlg{Ff-A`O)Pn(w4)f4S{x9Wk2_;#IjFF zNQgtf?v#soMriwEIXQvEz)XZv+=I5NRJ!mvvH(loLnMkjel6sSxs{a` zK6k8XZB30?cQPzc;|SpZBzJ^p_#C@Kvjq=sqtruL9dg^#;Yt-cY*fb6%i1)!Wj+?7 zMOd(HypnVv>RCH-_M-b*m0zz#4QN*{|9Zn)0*P@$=l0*^5VC+<_lzWp8v>DlxdgmU4}thN3s5$4VbfAY4E?^#B@+ce zL0Un9>VXvzszvCWEWrN7iyo`eOh}T-JOfO6yu^uy%Ac?fO<_LN}_Sx>%*5%5@xlIOj|0jtC2Lc5?dm<%{dZ*}=tDgg3_3s{kZInr1=U z^xSN~x;UA-a5pSxwh*O?C^u;(%vKrg679qg4{Dl)1?a>h?$w!o^jyx(4@c<4ty4d= znn+N^AVS>_xZhx3U!VI0C=ZGs`Nfsr14O~b@DQ2m?;Dl1#1Nl8thS%SoZJ~Uj<{U7 zaO{`ZP?3Rs8#`J`F*YnAuWy+>S*yfrf&cK=+grW|P+=Ztt0degP?aqEO*sPf{bO{r zZz2MOBp)9i)|j@22E)#T2l;i!UvqMDB}R<7YnRq6e){_T0}JyG_T;B=c^Px-7Mq<&E(7S@1MRP}?o+ zm+f7-J8K6@7W6sS>JgzI0`Fx98{$5G^Z>dJA1j-!LWp6Q6Jq_vTc~M^Fg}<BPKIl?k0U%*^s8kcNYi-&sBRD0d76DG z=|wTvs779Qe(bF;F5YmzTj)^~)E|&UG&ME5y1D=!vA2T)gn@yfM5L;#yEZf9bD|GN zvL)}Q!=aw7qLn*XLK`FNCZy-->B*w@lI?Lgv>uP2uV_?ud-t3mLDuxpYy588k+M6yiGOYl2o;`a8RJ@x@PE~aPX!ysZ zB=tOc)$6C+E20;&D%Xfb7DywZ3S_GYv+ z1Bh;`A?=uy3umj^3=wqFR1I_SykA}H#tRJ5Q@WdLw+APlf_PBE5n?*Bf672dBN!JrxImTL_h%10qLZV{15BO1U8ME()B_YX%k z2?l%ZJ?_#5;N3zg69Lg$Ga(=QHnieNQfHIo*)?<^pCU5R(2yFlT5Nj|$*>FxsalS9 zc2#=A!9tvVlrN)fpaPFdjMI{jb>JKPm(Hz5X3lA8u~sjgoOFszaJmHjdZy0z%ZKz{ zbqZBF2~2?Klc!(bHMcc-{}-~J*~q?LPRS!8p3Q%9?XQ)E+`}<ZS&oOm&3>a@ z@%faN_M@Qz4J$=2pK0MtfZSs(yG2bJ&ZxTThJpHDa`ME41OfNhhzLahhEGylI1WDs zC~#5IY_2Xo^bUh|BOD_@>XDQqo!*CodR)=1vs~;E+P!^u{l0n#opC8_83 zOp0FHtxWOp@vs3cEiDKAE2^qQPF9Oy!@P8Mwox3_;`u3YK4TjlTXO$gC4T7rP_!1c zx7pt}Jmg`$y}ifVr^oG6rJ4>*tVK9awVm>FRuC>3(oJA-IPbFOs*SDc+nIz;H~J;} zz`1|+o{gfX?)&SI%m6eNAX4g?_W2a*xYj}0~ zY`?R$Z{d%dNv`44@v_bGceQYB`^8}-$13U)px#G^p1e0r-ci5FA>v+}Jjd@NrlhGU z)3*!gW@F-cW`+acGWy5iC6kTAvP0U5TSJx8jt&m6um38YI-76OeyAoIrg*%4YLt~E z>aCa6SGwq$s?<_s&6e)sCX_5jqx5k&`)(Np?{MmLU#tmpxOZEAP!xJ%UEA zsFqk}B%1R>@~Mc)rXl~i z$l30*rtJLj6py+Q$7<0-B&b_-;z(-*WNL~mnGohjC@HS3yn<`hJG#(sl1>q=m*NerG(Pr=#1H*Ll3i z)|jnwu;WuvQNe*LvJ>*x`{cQ0&h#T}IJ9p$1;g2GHE#sbh|SMa+%{?boDPbKvg#e) zy1w1`J5c#D+WvGvUtoMc6+8H}u_=`vic)6C)2C0pPPdxPL`&CCo|)?9)W0Z5v~!jp ztynaD>zx9W%$jVo|!_IRtAI*B@Ek%u{N#F&cvP>w&m*buJA zp@&yrZF{(3#MXp|oM2cfu?OEqX8+FJq>Oy#7jMbJIGi^Bc9t-mzj(AtyYnS!D~m;X zEt`Z|>fv>&tnNCq!t$l@^{=lM136Bzo-biX_EQpI823DQZ#FS;=F-}Po@<^WK{rxn z18tue6+{yR)vET@0q;pv-{l4{YQo(SUKz^2fR*;+#}A7HkyrcQnzrEKA#yQXXq$_UXq4q~j9V9K`Qr)+ZBfh! z#U_2|4gE9fQYvp=9JutkKlL>=Z=L7vuUb35XQmk!I6-CBeem2(zm`adL@`7tDtSod zi(@~mIQn%yb|%NGAW6ur2z?4M%=VA^aQ9a_O(Lc~ zsj7rBQSS4v0!0&UDsLFLNg^4h2F5hh4exQ5JWMtJBq?gJSVH8@iw6hN`3vA<8@?#0 z?B!LACY=!}+8e~J^XWsYQbA(ag>MP4sKwviTiM59;8^2KnyYyG7LS^mI*LIQN=nU6 zVKddM^P|PBtYY(SJ#fi0uA`0&1nNHR@OQz&Wu(Vs-S+P%T;h4!tvyL^1* zsyS-4u#rPaIlY4Hu2T^($jqv6he?&WZ8|GU5=r79`&<_+j(of=aQl9y((|~LOL(Pr zc73JW#U(ttZXsK35Y1_Nw$%8i!Tf8>Z13X@9Ju;-2!j)na0XwRpbkGi7 zy~f#RKmwm;tL18eFzB)}cvpywj)@6NaVTAdm6C!2VlUiJ?)WK@UezB`^}G2_?Y#ru zv!e-Kw;xZEcP4u#x(C&XX#M;q#nWEDuu%?}?HDRM)5|=3mZ87?JKzJ$c~o??&C|%- zbMzjq{mAzQyMOZ|>M8t@76@1kq{@LD^(V5jr>CbLo}P(! zN2aFgzkPdas<{#tSZUD1C{&zhsr85VWOHX}I>K;N}Zu-x5b^yvnR0n*0T1-w}X-s#L~O6Mf><8Q#6^qs(n@jLg;PT zh8yi6*dNKFCe%5qWfyX4eLlWuh)?QAjl!HA%~*(no3$1d5P5RC-6HZ|iu8Yb3gUEm z-5XW}$)!5ILa|h?&dp&mWr zvmP$+JlT)^LyzzbO;4w<>ArSc#bJ2>BJQuBry>Up;Aj!yN~)`1AP`A?`#P9OAn&WN zsu~y=*xLgv2PYadb`*9bL#6AMk)53#RRmbxQ6D}8AxG;xPvY;J3keI?*4EyRrxy%lhDy3;*iwFg!t5b>$uVGSjv>6w{A$3=xoojOSDL@}_0fW*?)WoxGT z41ut~dEmM^-QTY=Mk0&+2@K>{IHk6Zju7BDC}gMnO})K^VDv&Z1v~ZSq#IUXx->&u zSC3hqN8gK~En6N2QzN`8HKTs4rTR(SpP z&WwiiK$=>etnc|p^_fHR25iM%#Ixd06{M-L{9 zI%U#_NzH{*3cjGZf)IP`JVO=-ukVCv3dk{2_ zlHUvL8@GA!sAE%7s3|GKkPi0tngs@EC~4N1s)`C4KR;AzTwL4{CIX?c^kemjqM|w3 zAK?F)!hpBM#l_+CDEVy1VG%5LCI&9H@xkH&6~5N6F<>$I71(E%AXkcrh?sXLd#>b` z>%CelZEKSq-+dw{NAb{MzRGz;&+BO6+c$PUB^{md{_JN2C^R%Q@AI9MPCf+U`w!~D zm8Fli8u|3Ry!-oWV_LZ``>P|EzB3@V>FUy3;CvfGqq?p;FlDZ7ROs3ma2&hxJ^*Xj3ov2z%MF*t7<2@c>V$B_wPyQ^ zqi%0_8-2*-%+H$y81A~C!5t<*k+eHOW748Q!V+V;f8VkHi*jLMA*f2=nGOvd09Tvv z82K*pP^x=cW`R<`Uro(o{o~Iux=3lp=tzd$uLroSF^~-GtGPRo!dGRoRBSel;AE8WrSle&&Q-H~Fo)xSL2jQu$xzEQ~fmt`zOm0wpk z>7G7Thh2a(K4EA(_bW`;;f~F3brRmsDOq%wxQ{ztyojUs(~u0i$CUoZD9vfwHBwz# zQJ-dstPNyvj4NX#tc1a3)+2Aqp%!xJR{537M9uFUv8jFA+}<9o_pU$6&9RIR&&T%G}#o~<_Mc?P8! zY%2)nfWS5G?oBO@Jp9u;`|FLVW_oW+5qq;eDwT_Y!KCdy3uvQDQd3*258D}>#%=?yZ<@t_I^yqxh`LdKaSqGBt-am6GvN*ryAGT*-um?AWOacV zH$S9#|A3C1)1mQZys_2SlE9l2b0kq$K)#$i%4csYBq1dos`vJ;)r;_E=j5z+S|$V+ zAQfzs>r_Hy7r)`4^K`4F`kbVtTW%@}rK;uFD=LG02ij+%KE{V)*Fo`ZA?rcf31{%a z^u5pafKH$`_b}lq!Uj$fjJuRR`Apm1*PkwMV$;^@rNqY{0o!IF3fU(u0>t^|&6Sli zP)f41vjvjEO-JVE=i$S_;s)P3HV)PXXl^3V#}ssmtCl#3?-gVKmE4+7R{1IBY-sfa zi-+ybojZ_^ymxQwyAL{Dq{Z}iAM8|E<$ivCQ1472zXb9j#F5|?hllW#fsqk%oT3n& z<=_Z!o^zh9Mnu;Ys`Y{`16iq&K6Ny&jHlTGp6Lc$WKdT?b2qMgmerR8HYpspjXAAA zQr^fJcYH_x;#f})mqicFj<9S7A^zPZW?j`tE-!(&lun)MAmnV=t;f6gS@pwG;Yrd` zH&WrrndJjfc3E$_Ru&Jr;yYkkusqA07D)l?GE`g(TzJ<_F^;%7;xE7&ZhIIj^zFh* zd3)QPBqMvkYFwJ<7XJ%4IrInQ^59p?F$zNGg#T^)@q-M)OZ%`E1d?nAziC20ym8|O zG{%?f;~@S!xVV6+41sH#1^Pk_AfAG4yYA{lj z_^tZC0Ft_e7hu_!IiqcvsuWahYHVagM@I(+b7RprwVYbWxB{fayg}i5xM_%m6$l>k z=giEq?a1ipXYTIq5GN6%(Wr7_F`+KdhxYr&Tl7`L@v(d5=@5Y%7!;6cglJel_1gOS zc>z6Ihad+|6a`;=trN+`&q6#pEsEZ!YHGYtVlTsS=VvFbL2M|C;Q~WYDj8&C8KU05 ze-EuPSi*>$WbV%&UYk)KzjtCZ!==wjIWY3&?vjMUmdjAff!x4m1AIIzgvWYtO!@ho zs4tKP_}ib&-VO%^PbXFhWL^?l+tXK+ZKDl%-B_uB&_weYE*sem!lF`Kx zZ?+zPYQ)5+dfWW*WHy0hJ1SO``Fmo*C%_j z9SJ2B+02pBM#jc~Azz&y+V2n;XEuII{C?My2a?eN0Rd2xVOP0S)lOX#uf})!5FLHf z_O;U$un!g!rjEdRl8I(4Eyg(g3HEQhNfAipOz^mK=M3@;x}{dC&dz(weOUmwUn8p= z7jM)Fj+H(i0tgDe4E#u4-*X`NIf)zQEeCFixh$o#MtQ;Pg?t*nmLPGHpEQ2&?lC>0 z*%*3?1Mq>rfJ(@l9N+Rb#eOp z3Fk~09Fk7Oi-yy5;B9>teO&|~RO^g(Lwm7*8=98TCHpd5d-y31+$``leL7NsU4{DUOzUG&XGf5#w zALrA14^$J313zNDeGq12+kGgK2vCBPogMB1fbcX+fjf6H#=fqzu`hCS*?GMrw-@_Sn8TFTbNbM{xv#h@O0~7S-gTp735D5G0)ERa*KmK znrGGc_d89$e_0`*bG_SP3Q2m;W}~&jh3=3pYxCaS>jq_mJw1p% zjXkY*o`{klHMI}j%|ldj#$lG)puO+n;x4hU#^BnC?W6V3_2G3)_dcqus&ZL+;DCTs zu1wTVVb4tOvi_~1m?yXK;uS z|JpU}T6gDS>bqJcm!9gyX=hsu8v%;ibC&>R0dN4MI^Gi)Q4%ybF{cD*010r{rz1f@ z*bu~Hihtqa0ztkcuR>r;r-xfWA`-ea}ilLc+|9vDyAn0G^DrbkE8jUgPRSb$MQ1R|%$@g9A4Q0~gneks?z#4rOp4 z*_=J0R{}=kIwK<{j(PV^tdFO?2#yyFY3&k=NFYAqNMd4QSifHH%~FBT~w6oZKaelJ#2xz;_s2`GC(B)ftdb*_rKH_^9oUlf0XKO1C zKK{YxW=mTeIMo_u*0)g1bab;Vk@S<3lTgp|72?5-eE05k#~{sPZ0KHjdbRF=DhBE8 zF5yAg?Z9$~s<4y%-P_rTmdyYIoD~(+JlD&UtHi9rI`CCB2N(#TxT_6V2(IO*!cMp? z{3lRkZ>RTaXlo0?UMkhHsWo`B%-COo9jU`n4{q&ymS-U9+_hDP%z$C{RBozjC-}i@akyx=g&VUCMYK@ z(LE_^hAv}&u+tE)Fh)S$`9D1^OKFq^8E334_v=?&S_!0|UjD`Ym|byEk$`$aeZ6QS zm#YXFyMVwk9IBnQHP6~eiDh(yfon^9S(!y|8h}X%5MFKy$XEmi2WM86ArPKgaUD}x zean+>i`_uhg0%a^pt%t!()(fXbK^Q z@@-*yMI0eU{rk>{c^=#p+WFPh@&5i2(1S=xNvR^@5)$0^S0P@v4V^)m2%?PnI+YIR zr*8iy^l+1h0f4r#wuZqPSKM#0AJk5z<>Yi~Tno#~`68OxSXgA_QY2-@ICS{yhabux2;Ou zC#sx}j*gyY_cu2$LidG40S*q1CTwJ>^h#(WaL0ynwVM}r?$&vco))1Y)&>?v_(1T0 z(1Z%>+}s>&`}*_a=TQt(Zi`7UCI&c5T~&2$sy-#{3xw-v{9@TP9ae`6q4CAb?Zch9 zl+A%t1Aw&@hU`=(vFOK-A0c-M7{nLpzL@k9ruA+GK(QME$)=I(lB~ld&}H^gGL+={ zEpCQ~4@ZoGBQBE!Bim3uU^BxS4M7ryHjS0p^jVbx5$p$%OEE>HlM637PKhI~e(Tq5 zII+v**;bO5hlfzIr#nzF68)*Pd)E+43c69A5I)XFGQ`M2dP@YtWwEAEem*~C(W{C} zW5&mifOHKgq#_rmI|u6%g5KvsDCU19CHdJkVqtcvGnSfwpwP6Pq$ao5;v6zYaKDtY zRRF6$AN+QY3=cBhguxQ&y|5#vy#z|N&ZZmC{gj?Pi}v;P-6NzXA(l@Pc)-XQ&^%XJ zQSlKX98mhPY5h@!VA~{QE;t-bRKMJctZ*9}9tNHtgxucS1F7T@Hr&1Euq6+3G(9|_ z!MH1_?;Tuv5ZI~kGGIOGDk?O7776Dwfz;48jlJgeFOG)_3JPG^!(_;%aALu;K3w=H zy|+ZqtN6^5zfvlUoS2BH1Y&2grZ904D$2cK#ba|5qj!~%`VU|nPguL-PcQ35vpt;E zH4^q=*ic%z$sp+~F_+|BAq~Gd?jcUNl8=vAV$l`+1r$y);tw!qM(D?DLR90#|BHQ+QkM&hyJD89dzwuD;V zXZs$M2r*KhJ%K6-6B84NDglv@koXy`C{k%NR+d~xmk2-mo}Su1=D8niIzp=NLBPiZ zy7H@I2LvLP=A=pjjudpQSI3~L-+l9c*6Y6H=M5?A>qoTb=jKinn@6BhmvoL&p!tUo zvwb@VZJK@e?i~~e|9}961fFH@ixd6DHY~)hf~Qw)IB;K~osRpw`gQaG9w1W+mjQrY zWAVEy2&y}muh`vn5}K-QsstziNp@I8pDaOmqY!Xn=H#@4$!91+5R@?d08n;tU|9csfth}NkCYYo(RD0Yd$e0T1c!as-803jNr2T1D4g36#m=jY() zc$1KD^dbzzokZawqxc&LG5ubqp_P`_)&@!sAO-y^d#*Q3Rvhi7>XMR@D2a(5*o>Ae zEm?%R#9Q9E9%=)L%|E0(lv9-oatN`Am9Z7)OWgxRVE7Ph+7(AAz%o&=9RK*BV?pu{ z%BaaDCU}JH9~ekVLXyzd3+>~N)g>Ib{-D|Z&vW|{2M)w1&cE*+1_IwvJfycX!I^Uc`nofQ$c@+5f*hg?zs$C@-IY9NxS%mD=0d_SYvuv*L@m zfXair;oD$PcOTp-h}=d=Gf*^}nw!Ju-j@-U+`)YPi;0(04IZqB*Ik94`^rTAOlsNr z`ApbGkSYd|2qG|?&Ksy7%W<+ zcwX@lqd`t*3-X79#{TmpJuMA@p+$n3rVGGC^efV$-~KZTfOP{84^IaQHYD|Klap`W zgCz{;06Mieb}!Evgpp>C-f3%@qto)hP%w zoBW~oc?}HHBqol*B75@W36Qpb_lip6;)Y-+NJqgM()T(t1%?O65(0A|%m5ODe+A-w zr?(~Iwe7E!srq`UbOK!eBo0W0HJm<9@0IS`O*!Aqf|(eIyABTrwR`Uks9&6IcN78e)8s3pjoaBE)8 zw-5-aPXpwz8?PX^b;}JDF)-7QkB)$;0ZFadZexS_2M8qvDKYQv?9j__b8xIT;)~X} ztZAvJyble%mYg+d3$hC&chxd;-aPy!meO!3?$XlR(2%nNSRI0B&k`js_X z-j1Lmh`(3a_Z#IKFjd}GdSkh`LDqmDs{oXaXQ&xD? z6evR?6B7MlNcesO;6hNmK-{yGg>DWfM1TUeFNQ6j$O}FCIanP3F=%9c+I|JGi!o*n z3<;Vac^D(i(8J^IT^o@0fI>ZECW6GI&Dl82Q$Ul3!2L62*rGgkWND&#xxMUt!rAco zhwCt11Or*3&@4#VH3}^I7)!N;J&&En4`GU#L1yQ-Ev!u_x;M_r5s2D^wdM*5DXB~4 zC-crAka}2kP$wWg3U(C|1@bK1v@eyV{BULOLG^(-3_ql#GVE-~ias~ZeG3y{V6seq zDbkZ_G^)LEiIrVDTjBUXU}-VohWMgd4(J~NHz-3$mjD)l1wn(d==g9C*k@aJH#Lt1 zsA_OI;<@iIGq?PyzlRVzDE6%8q^F;Qr321IL z|OAK^1gioO=Ysc@DXs?r%!)J-Ffo#*|P)ij-VYt+k!)Y zFI%Xpt2+lU3~?DgGQ3Nkn1O>>T3LNw8{RE)Vhu1~*pt}v_CIdfrXCg;$|LIg;_3K^ z*YGC{nqdB~EXE|UuoIX!FALVLmuoB~;#gqJBxU)!4$MH^gh9&x<;y+(KSr8$y+Wb{ z-4W2?$6@3Tprye!b#rw!F*D<>Zu#|?+S3VOGMLPmSXgs+{0aaAg7`X7=Q&og2vS36 zL`3sdXx2FSK9_GXRpp!$bM0nm6X>BJCS_7zz>vf#`~a_!#!u~=1_22Pe8Y#Z-@EAO zJ{Z+;g%+$vf{p_+MU~SsWB}wZ4fBhQi}MJER`?ibAFxfeC^n_vIFAA@c`|4TvvvOp z_jE5_fa(XDBrx(q5o?IBuW{}`kt-wkvs zFzJ@LFiS^I4{^vBjAi<;fnhiZI*%iyk^PW*I2ec?CMTKwcx;R-;I-@PD@M&TrFaxM zEXexx7)J>6f~*d-z0C;o2-wKarY2Q&^*LfjZD@|M@7@&vl5$=doRJuVN(%GM5Uap` z^X1DIelR5M=x2&RKL{JbMQ*wR1x2kH42D^*1b8b#fXi$)}qT=Ju zyKlhW-v>o8iXr~v$G6vTpcjC(by@Eq+x?HnxH;2Uf3hyvSE~8#hn}|f^G1CZ#COJj zrMw}dgZ<`R`vO!!u-AC?_X@#ubcLz_0i#&89M2hl(oubgM*(1Y_+|p&xFds#|307qT2&~zTAq$&mE;*@ z2Dqe6~8awjA<(a9mtmJ?;cfQ4BC@ zyR{4_nWLWn>-qKaT}iloqs6!|PipSGCY%q$GtklfaBg-K=V$PKT7LMb7|R3B?9Z9! zpQPvf`eXqkz@C5>gBo80@pZEy@kM?PXu@=KKib=KAXd{ncUQZNT}X&xYO~BqIP(;~ z(gP?_BDblmi;K$>MadU$%PRllf0|T+VOek*M z8VRlfmBh%%2wXQ9_=5332=F`%Z)^`sY;S4#z-5RsG^A~|=aC;891JIafNQSL7W&B; zF2uiNZ0M&}%iIaN{*;KGF(*~;saG4dfk32+K$*^G^a4l&=`+}ZkSAZEeE?r50+v3g z{bn{cHn3V%Sl28vKnsA1OwMOZA>SvP!7VI2(bwnrFAN6>hIh*aySuv!?Qt6u)ub06 zVqzY^=-(-sR{l_V33gQ_@K0#Ug(FNu*r{gfh5+<=_e*&ny1Ev?Z#>^$QU3oDZ~oDI zOior|Bq7CPTZT#DbhWtK+W9)LfAAK{Q~|s^RvhY@8hQr${YYONl*)lBmJ=`_2#AR# z`<#G8fukhfH~4BZ686j5XzAtEx+}>L-Kyt0bM+rx-V1aNXb1p4pjW=mgfC!v0qq~; zh@BXL%lc;g3opIb6}}STpXiNf9dyG_pT0LX8pB?M=G5NZ9VeRsnTtze=&%s-&vy*b z%0v-K0uyn-cknxp{b7o&!KslYwkCZ>c0&OyUWR0 zKJ2(PD|y*r^8wEQeln=TOw7qyf%GG^ZTPB}O^`J8K?nv~52+;Jh>`6JAi2R1`5!iC zegT2mn@vVrKzpaWPHZ5%bG(ud;{&^6$5i0rz)Jzb|AH9%|E9X&HNrf~DIAZTX{#(k zEM@RNCyW}4*ip=Ue0}X)12w z|MF+Dzt+HvNk8krPsM5M5yk9q>n7(8Rh%*593i#hxYAAHxnTG0N&m$Ok z+}Yj+7NrMPH-J{i9VEQ%_c0{P)qmer6oDp|)dveKG)mgch-rfHiq8^`QxiF9umm@J zu@&nT0%9!Rq$>yr)4;c3fkO`8v}8o`^2LiUd3l(xA&&HvmLH0Iz}z7OFTupu2D8uQ z^Np_t|MZfwqM{#M)=*~uB&cg>0Kk?0f7<%)a4g&S?~6wWaaZUrM3Rck2H6!tnj}d^ zWXs+oBavC6jHGOtnc1Y0P)KG**@UtR;r)1?_mAIuyg$eB9QAaR`*Ppsb)MhPya;Q1 z`{1A;107ojhvbt1%{16?u+?!a*}DqxP`0oMcX&(k>(qgwP9MZ}f{b5(g_{_tF+E`b zT+B>xWJjEQ-1PoE?dHwVM@(@nJWh)M;)IlJj9DFn6`(f=PjK#aG@S0uot^Iof5D1< z8dn7)EO8;@3&?D+S1z+d$_fg8C1fth`1s&)3=YgKVQK2?XyxkFfItD)I_8VJ2R3=| zi%#$CChLV)NMDi<{gAhMUb}}BcOt`eZ)8bW-DE&;Fvy-ZlOx!gQZzC)Qb}cH3Ka~F zRU&~MC-jf&oGvb4Z|N8A_}rNM``K@gIPZ{6dH z<_PtU2)BSCrk3>PxaOgKMiTk_i#AgmwyL`drwgbYIrF&u3Mh+`M)#07Xax*MmY=rl zGqQRkoEc`$nNUb}*t>Xed+Fhpob~&ydrCPTzNk}uzAVrx^F~*bBxCpSnV*QzuAkfz zEJjR6Xs#`csU>*?7)NcFc}P?6xvo+rp8LxwdkV!;BgT}8i*-sXBqEDh#`)sfn0=B{ zz*C}^X?(3->e*qgsZ-?IN5>r{+_u%4cs2eoR3dGSqsljbxjw%unXJ4AFGpV>|EIB{ z*C6%TW|I7V{aFPadV20Ere6&7|6EH{i&lL%Y zmNWY2K05^VcXW1cpuf;UWad6HzB~-AA7JHSw=q>%U95zISwmsYr|L(ta@m{-=bN`o zuIRXqC%<{~1|b`w8YVTy-BRKfox%^4428*X#Ayr4dj1s;t2?8lgjGLeE$%eaclP{w zCR*Bn(9o2k3CvjHF0(xTwb3F5iY_iCv9Z^#UyoDgTbQ3`4tk5k6v!5^FYxhoOfFEJ zV3&4Pe|`a|egj@WmH-;X%7y%dWzQG+;^j-cTbP-k|FP10^u@-yyvIVCrAvao&pnEWFuZva z^sPg8fhp!FI8S(KZ&Z6xKz{Emu`|}wdxmZ*>xxh_m#_oo0YF*FPpn*A8& zEvOFnm3Yu`C*`(iaqe6wNYIwV(^Y@I2jNqE_vTG*542-+DnS8CbKV<&du<7HVa?z# z%+*<$nXEV%!Q|oMZWhf2-D4sua7ZC~L5*h%g|h_6d46l$6jx%&e4NSfu6U{L zTHSel;1O*U-SP=ed&v=*J~^&s9@VNG{&(dW8d-FkH*dWs535{->%xnmpfk?S;>3Z- z4{-Bk>QqSZ^Pl01G%_?KQN6^n-XN2pS?mf)xr`o{ZrF41abTj1L!pVN2AGmNVE)Fq-uWk0{I=HikHcJdvSFxKjJ$9dDnV z!xpTN>FgI5!yv0D-qKvblfHuiBppxd^cL%OuVX^l& z8NJ|9w}}=!to`1Eza7bJ21sb8W_dwBf?{seT=bli+306DCen zhj5MwJ?>&<+_Oc zF~7L$aryNh4Acn;MWG(|jFPIvZCabVmlX*vWzkig`eE(#`X@cC3R!=bZ_EaZ>|b|R z=eg24KBu$ul2&5rY;I;#adYi+o~f}HU2{e5#`bpOiXzXNfi=LB6Y;+=_i1X3ygNA3i|8*uVrp0Q4@MC-fkZ zT+z~U1yNaaIdJTNgr`S;P0()&IYX<5QU;ANZ4Df^;_DB{7GLt~&Re9Yt`&Wott(1V zebZ~zC;pD@yeWsSczV24)9P{)FBf%O+p)hi?;TwCrv)ElF)HSCxVgAwX}&y} zt}YsxuB7FaHzgE zqvuEwPmcCh>DK-)9~F&-|CDz3hpS6;)3N(ea@F^a&Qu=fl`}Lp;8K3>wg1X7_NvAE zMcW6a#aD*q;%cf#Z9}hnysyc%snL0HH$^e4a>CU2A{dYx`~wFk*|t!LylGhaBHSM* z+fU)OwEbk@&rKziWm?$^Z!V?T zv0-gLt^C26b-RoUcLzHk^xH|Sh&XQ1&50_sHs4Vn4;yn?*$P;?bm^C%@j=h$TuM(! zOuVkGja@dB{djA0Gl`QkY*74hgEZhKe2sE)#Np;ZRbDrw5H?YIpL~uWxiH<*jXWLT zeOp`JvoZ3i*pk)lPI&$GYe8TDk=(*{gl+yl|O&5(F%bLbw%F*K$11P zE@Vzj7xiB5;-O(=kpyhB=WJ|km!WL{JY{8N$>Kl-X$r=y@Wl%=h<>2&Krmw}0HQ}) z2$MhV!zVBt!b(Cx)aC*VVH7%qf)%77AeaC+{p+`P#_b4NC*_CQ$`uH)sqBo#;L(9t z)SY+k^>vrQ-OrE2on5>nB~kirhJM~UhGnbVLB#xlP+KQ8H#GUeD7dHtILg!6Hx-29hXGL}??wY91h7X?w5dpfV%Q6~QB zJK4&=FU|Q-3DLbJHU#}-Yp$pn`51FFNP?I4xKK3 z@SpGcBdJSHm!55Ewak6M;cN=x-UDnK`fE zOF=ODCw5Gx`$U{O$V|t^#zv4qzDHiVdiCDmD_`G#BAa@8VoUc{=$tndglX56gcf$h_=?gDn2_?ce#&&bz!Ur2Qq8(YZhlIAz> z-XXh#QXYMB5mHKex|=mf1RTUkGXs7v_wI~Z-^(P0`YQ>-g zDxn3H>A#!zY^B)kE{G!`sq62D<&=F7{g2acx~gDn8p78+*hTZxG+Kc_;dJ`0g3)|V zf7dcq#@|i+SKfqje6?UXp>u9>H25F9H@$}{B8bfRVydI>r=;L-qNGQQm1349C zY)#*+p?PWTo*f3qyg}`Q?nMj%q{qLE=?9#QzmJ-?>ya0casAw)hujt6KG!71JB1hH z<>66{If9x&PH}6m#f6Oo02o3L-mi*^iqg_(Yjat5$=mPlg#4p}3Z-mkjvRsUYU;Pa zyOTa|AI;M${y@>fN`5Zx#iQiIqwM2e8AgACtn?ZOg=zCcsuSdSD*d!FkCe$%Wn3hV zljzKY&=@+1nrd=8i-8Umm>^U8w^CLa;vN~fOR5keT4e6``(q5=Ya;Xy{*Uv7Cnx?= zn3P+8szyd^lNsXz(>2L#=>S2$lE}TQLUYx$DAuajqR1sdCwN!Cudh$>I=d#ti|BPx zCQ`*NK9it}R62?MG0T0AK75{Kv7?n|@;~F?VYgRzf=_A&oBOVVzqgop8?5CuHf=gI zzhz?p-9)h0Mc-Z=U#D(VOQEHlU0Ejm(0$a)N$Vp=7cgcJwXo88YLC&mBoB{&Mxr`~ z`lXC^^V|-ols{}9ixKw7$do}8`&^zw^MS*ZEi?nIp6m|Q)ANecyYY>7t5zJAcy^eS z;<#kBpZmc9C=JJtN1sFTyazQJX66Ex_e}Qmy@lL?13RskqVc8Ys;f+VPRFDO1Clnt z;%JfMI2Q|H^{1(>j(mp7)Tjj5&hn@=>wp`KoY z&@iIJ(}QyFj<6-Mu$cZa2B(~>eeQE-|6lq+vTsx9AWk5#1?-GKYCK1eI-uef2N>#e zdJ#r~cS*8{Iu-;oQYNjb7O}*JA0aA=T%n_@qvJW$RUCMWAegB(o)PeVE2Doc<7O1s z{RNwg+DKTcKua(aea?d+sJO&GL3dr(bMg_BW{4C>l)AVakNl37Ip+I!c4ot+k~H_! zPBxXrZD;fG7XB(3-v^Rbx6J5E7teE?AVD1KEOp?&s}Fj54y8nNeu}>YEi5e&(S)K- zp$C={mtD2K=7z2_xcAYVTHm?LLNRQ&z=OYqJ$rjnRC%v~x&!KjuN%l`U}gqwoqCD) zxrl+0ks{dG!g(O{gg<&T^8LFgeo7#)tyC|;k6;=@;P6EtEb}mpOylK17A0O9`4n&U z-<7My+sW3wk;*B#;@S8)+ng)mu)d&f>N5}NMBS30+w(x3FI@g332<-YvT*^KSy`A! z5QB+(@+6A$Nfd)$jp2A}vQ`#rZ7#%I=z^`|7O|ieAeF<(PRN;lXR|8TXWm9dAeWln zGzmfg+D3dlT*fjBWKL<VxF zX3#JGuKmV$lw%bWKH*bFL3eO>%a7Na6J-T{c=1%zUhx@|TI+5g@@d!D8@fmp%!@M( zKeo-=)H%w8D5~LTfOydr78?4osVVtu0o+IowU`X5s;boaZq}7qTU%c^z)W=lWIc+_ zn!P1acp7_3Ki3lGG?Wdh?U4s$fd#Yq(FSY9gZC%ph{S;d0XxkqQ2Nu~&q<`|aq01K zZB8(E*3VAdCp0U3q@|^}TD9W4aP;%qLM8&yPccv@5RV`5Tvb=cx7x#ZJzZV!YTlc! zuH*lLa=aGX+p<>>(2$FdjfhZ(bNJCCRyH=|ZeeSB7#@x$u8bnT*LppF5nTrLRE_|K z4;Sb}TNKyK&Bbon48w&pWp?@R0_BE$V;!VK- z8LyJ7hf;h$P4m#oTh%#x#Fq$$winL31bg22@pegYvH8_p?%xpXz`QouRG zp&!$?C=xi;W=|UKVNTrUEjNGq{^A}|z#46<%)i@v_rIGx7+_lY+f=5U2|XmbKP!*V zb&3Cu46)N)a{BwHWYEXv@R`gafj~;Hs21OpEx!#?!sbyFaWq&aWaV0OV?a&YOaDac zbxAcE&AIf?cYd7;{*b5qROH>KChzDxi;15OR;vPSbqjF^YtI_#onm?U^83|~?c2C1 zzxG&B9XeK)_+l&sAl-g{h(v9-RZiyW;)ng~6%ub6j#u}#7#2n9>RQ|@q)GmAs{Bd% zpq|j^ezPruZNuILDVHs4g%7p$+^oGf$)u9uv>x-tokH=!-IT>!)pdgdYO&iVpXw@{ zr_P!=HBb9Tn~|o@e3H$EeWmKnmHxBJG+rXUUr%{xGwPIA8P^XbuYCL*z29#+nDhJf zC|WF3w3)Pef`P zv5ZWoG5KOiIdKoz$lMG1d)wwg{HJ*5YnQGr6g=+|rOcsD)gw4}{A7v3kw z{NBu0D$a&s&UUx;@hcMVd$$rk6;JGvj`CES>sP*@p4yRWbxYcA@=NaHH;hHpKFnQR z7Rzg4;bts14HrA*{Iu+3%KnT+YR5h8^jvgIT=cs>v-+0txlHyo&(e~qMu9l*0EXEF zp%#USSbNV~jBcN2bUsGLWVY3sR^OXMINE*xk9{41`yFV6`h?Oo{8j=@)J&td`Dr~2 zxA_!UcaL*N`K0y;*qt1iLqMS%?E3#Q~LRfwB>*3UCU}UDR3p;^IxHo<@|DKNXZ;;O8l`Sks28>_S82$5@UTW zb05!DgqsmR?d<*tq%spgC`xN>R}7hmZ6)DrIzoJHbuL~D#;Lb7P2}$TKkTeI=JJkA zkbb^W{GhZqBP+waEjqIRBniO5c@lAwoUGGXm_9fAf{;UK)wHYVRKB!voD1c zq03}!6w;J&-mmsOI7|0S`IBJEcAKyBZFRaYB{l8#GO3t9I{QnbT}7g7` zYR&Yyx}1?{(fQ1Ys!PJTEI-@nHT`5ONklyI_pWe<|{I-oP zU{|40!+9~?+!K`7pXzy-o$aX+39e+_Iv{hwuxjv!r4P@;K639}$q9pJB0>qLy(sRu zmT5?(?^Oz?Y7MNTAa1MWKfHW?Qep8}M9`5pJ#WX%lJlN^8opC8w458_{S1MM3ZD^c|$aXv?#i(%P|FEt`+R@q61;=&`#htbv%wFqmyUaI9@k}Xu-k~tHu4TtGi_jBN>1;@g zyWrYnZ=LN;=`!VgUtTnnv~Jpda`F26V_hTMC9@xD^M-aB6uA24RgSm%UTXOLKD(gH zPn5WDts_m0WNuVZzT_{fCOW-Oam$y9EkvhSm9$Aw=F#{v`&r=!6-_VsOA35l^5qt% z+qNoO1!t#aohjRU&%@mxxJm{`_B*o){-y645bQ_--xKGQiGw|_uZ!aa)_nlL%GAb=v+{TLB z0n8+CIeQ_?mdZtO8}+S2)ZC13AfI_5(YuV9|r^8IbmKT1?vY2`aw%NL^~Qje9t zpFAMBfqFa}&2;JCrd|CtaX>O4n+COWg{NsLTf~HQdbAhWb4ClANpV#_6uPZaS)6=L zXvlY?zn;3Ma>S`n8gXejZG_bTAx6ebTzgz&FnCnJ?#U~JQsO5lkUzk-V}}?h!U?q3 z8?3Lb&X3k(SO@mZ*LK)=E=E6nY`GAUVF*M3WWse8TF+pS#*KdTNBMHxci*RVq_^KOK7F^q2j)M`_DYQR3_C^dNC2^W&JJy!o3HU5 zDOS?=(bq(i65!27(iB>m1`9H@p8jSHcs3G1Y|&?>;wvp7)=dHurc<2{ToE<#zTmjJ5pk{rT#jjs`dqKM)HEwPF6P^aRe{BcfZ9&c! z1%nzIC{cvKaS}r#qrV;erBH(4Er(MAQBm-4&F2SU_%pV#d8v75#I}Uf9bGIzJ794x zvg$wA{~DTQZEY>Gu&Cj4{1J8Tw}t-@;(2Nch_e2KarV2oCjbaoJ@VyUB$6;p%?PA} zlq)TjNLQa2A3w+%nnhO<_BnQL@Q zd|)iNth!a8=5>a&)}PWJy|C8)|L}3+Kv(V@g z1v#W$Blw^>;;R?}J04tt56m$c3KJs0!ej;*y@ydA;R&tO^wXtu zdpkRNM#f5bJqb`=7uPfJ{r(Lp3K17BtG&fygi#r^@?rWACOSG~yr2>jTqIg>g=dW( z)*Av4^71447hZ3u-T*@({5={OxFxzARr`cakn9gGdwJF?N$V79^|rOiSLxl9z2vIb z_gZpdNqEJYzx}sEM2|v`=_tAV{W&+)1v^zU_Q=-sbMabPSR~0Ib|u>0a%h$X%WIQZ zw91nBPaOppH}?$~)A4z5(Q+R=c+V7?V@F#XxhbJp3LqLuS(W5j0Au1l7y~kS!%7Mk zH`YA0<_V_V7;Lz4Gj$*b3wpC0k>$Q3YO54 zyQiimIm-j+7mYTyHUF2O{OK$O#{z-~Y=ujZU0`rPB(XsF;x$;7kO(wuzX=x|;BvuV z_L!_T9v<8Tq=gLmr4WM`!0t0b!EiJeJ!@6IL%XOw zRR3CE(xU>pmTeL>&Ngty{Td1c@M!QFhf(p)lY;McFIrfc5T0X(oLHHl1W*R(3W-74b+fZA$ z#s83^Y4yEPrvh+{^pk_rsnQs9@4wVk-82b}zhN=*8AmKLxoccb&GNI*k)vOajeQfa zGd(k-HR&&xTSK-+uaeg{&WjoT>U@!Lao5>Y4_{6(I9u4o_=VcJ(&41!*#-SZWj4D4 zSo^awgAGm=u|NDAVX;BC)N9KdTSH4*=FqMB4wnpQoPx!jIhMEP_zy3RTYcL{@T=9)uT8je>H+mHnK{Vd;I8cCL>-^zOjb zIx@mdnyT*US;-NxqHoAO{lHR3rgik}f@9|`uBqRs;$}sNkP7dADXk#17YxjX(W`k`=x&TbEeL;E%*c?F!SRr)cYaK8D;-Fh3Xgp From 90754fb0b8d36eb0b34cba6da48f672505470f0e Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 10 Jun 2022 14:51:02 +0200 Subject: [PATCH 0074/2550] modify readme doc --- website/docs/admin_hosts_maya.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/docs/admin_hosts_maya.md b/website/docs/admin_hosts_maya.md index 0ba030c26f..0e77f29fc2 100644 --- a/website/docs/admin_hosts_maya.md +++ b/website/docs/admin_hosts_maya.md @@ -160,7 +160,7 @@ Fill in the necessary fields (the optional fields are regex filters) - **Go to Studio settings > Project > Your DCC > Templated Build Settings** - Add a profile for your task and enter path to your template -![build template](assets/settings/template_build_workfile.png) +![setting build template](assets/settings/template_build_workfile.png) **3. Build your workfile** @@ -168,6 +168,6 @@ Fill in the necessary fields (the optional fields are regex filters) - Build your workfile -![Dirmap settings](assets/maya-build_workfile_from_template.png) +![maya build template](assets/maya-build_workfile_from_template.png) From d3c2dc57d8f39865c3f3900db1be1f911b6436c9 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 10 Jun 2022 15:02:43 +0200 Subject: [PATCH 0075/2550] fix linter --- openpype/hosts/maya/api/lib_template_builder.py | 6 +++++- openpype/lib/build_template_exceptions.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index bed4291e3d..e5254b7f87 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -43,7 +43,11 @@ def create_placeholder(): placeholder = cmds.spaceLocator(name=placeholder_name)[0] # get the long name of the placeholder (with the groups) - placeholder_full_name = cmds.ls(selection[0], long=True)[0] + '|' + placeholder.replace('|', '') + placeholder_full_name = cmds.ls( + selection[0], + long=True)[0] + '|' + placeholder.replace('|', + '' + ) if selection: cmds.parent(placeholder, selection[0]) diff --git a/openpype/lib/build_template_exceptions.py b/openpype/lib/build_template_exceptions.py index d781eff204..7a5075e3dc 100644 --- a/openpype/lib/build_template_exceptions.py +++ b/openpype/lib/build_template_exceptions.py @@ -32,4 +32,4 @@ class TemplateAlreadyImported(Exception): class TemplateLoadingFailed(Exception): """Error raised whend Template loader was unable to load the template""" - pass \ No newline at end of file + pass From bc9c5b183171b9ef03b88062c7536a5effed3ae1 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 10 Jun 2022 15:22:33 +0200 Subject: [PATCH 0076/2550] fix linter lengh line --- openpype/hosts/maya/api/lib_template_builder.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index e5254b7f87..a30b3868b0 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -43,11 +43,8 @@ def create_placeholder(): placeholder = cmds.spaceLocator(name=placeholder_name)[0] # get the long name of the placeholder (with the groups) - placeholder_full_name = cmds.ls( - selection[0], - long=True)[0] + '|' + placeholder.replace('|', - '' - ) + placeholder_full_name = cmds.ls(selection[0], long=True)[ + 0] + '|' + placeholder.replace('|', '') if selection: cmds.parent(placeholder, selection[0]) From ba1abf8b15e1476b430180bc63b1a3801ff118ef Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 14 Jun 2022 10:35:41 +0200 Subject: [PATCH 0077/2550] add task name field in build templated workfile settings --- openpype/settings/defaults/project_settings/maya.json | 1 + .../schemas/schema_templated_workfile_build.json | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 2e0e30b74b..453706ff88 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -722,6 +722,7 @@ "profiles": [ { "task_types": [], + "tasks": [], "path": "/path/to/your/template" } ] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json index 01e74f64b0..a591facf98 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -16,6 +16,12 @@ "label": "Task types", "type": "task-types-enum" }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, { "key": "path", "label": "Path to template", From ef7627199eb8297196c9d0a778e222132d742be2 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 14 Jun 2022 11:37:08 +0200 Subject: [PATCH 0078/2550] add a task name verification for template loader --- openpype/lib/abstract_template_loader.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/lib/abstract_template_loader.py index 159d5c8f6c..e296e3207f 100644 --- a/openpype/lib/abstract_template_loader.py +++ b/openpype/lib/abstract_template_loader.py @@ -160,6 +160,8 @@ class AbstractTemplateLoader: for prf in profiles: if prf['task_types'] and task_type not in prf['task_types']: continue + if prf['tasks'] and task_name not in prf['tasks']: + continue path = prf['path'] break else: # IF no template were found (no break happened) From 712e1c6707a9b0b57bbf1a50f108d692c6f9030b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 14 Jun 2022 16:00:51 +0100 Subject: [PATCH 0079/2550] Implemented extraction of JSON layout from Maya --- .../maya/plugins/publish/extract_layout.py | 101 ++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/extract_layout.py diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py new file mode 100644 index 0000000000..4ae99f1052 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -0,0 +1,101 @@ +import os +import json + +from maya import cmds + +from bson.objectid import ObjectId + +from openpype.pipeline import legacy_io +import openpype.api + + +class ExtractLayout(openpype.api.Extractor): + """Extract a layout.""" + + label = "Extract Layout" + hosts = ["maya"] + families = ["layout"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + + # Perform extraction + self.log.info("Performing extraction..") + + if "representations" not in instance.data: + instance.data["representations"] = [] + + json_data = [] + + for asset in cmds.sets(str(instance), query=True): + # Find the container + grp_name = asset.split(':')[0] + containers = cmds.ls(f"{grp_name}*_CON") + + assert len(containers) == 1, \ + f"More than one container found for {asset}" + + container = containers[0] + + representation_id = cmds.getAttr(f"{container}.representation") + + representation = legacy_io.find_one( + { + "type": "representation", + "_id": ObjectId(representation_id) + }, projection={"parent": True, "context.family": True}) + + self.log.info(representation) + + version_id = representation.get("parent") + family = representation.get("context").get("family") + + json_element = { + "family": family, + "instance_name": cmds.getAttr(f"{container}.name"), + "representation": str(representation_id), + "version": str(version_id) + } + + loc = cmds.xform(asset, query=True, translation=True) + rot = cmds.xform(asset, query=True, rotation=True) + scl = cmds.xform(asset, query=True, relative=True, scale=True) + + json_element["transform"] = { + "translation": { + "x": loc[0], + "y": loc[1], + "z": loc[2] + }, + "rotation": { + "x": rot[0], + "y": rot[1], + "z": rot[2] + }, + "scale": { + "x": scl[0], + "y": scl[1], + "z": scl[2] + } + } + + json_data.append(json_element) + + json_filename = "{}.json".format(instance.name) + json_path = os.path.join(stagingdir, json_filename) + + with open(json_path, "w+") as file: + json.dump(json_data, fp=file, indent=2) + + json_representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(json_representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, json_representation) From 8e8aa452402c965cf70dbb59dfaa87a419c405a4 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 21 Jun 2022 17:09:19 +0100 Subject: [PATCH 0080/2550] Implemented loading of JSON layout from Maya The JSON file is an updated version of the one currently in use for Blender. Compatibility is kept for existing JSON layouts. However, the transform data is different in Blender, Maya and Unreal. This commit works for Maya -> Unreal, but breaks Blender -> Unreal. Will fix in future commits. --- .../plugins/load/load_alembic_staticmesh.py | 13 +- .../hosts/unreal/plugins/load/load_layout.py | 121 ++++++++++++------ 2 files changed, 84 insertions(+), 50 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 5a73c72c64..691971e02f 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -24,7 +24,11 @@ class StaticMeshAlembicLoader(plugin.Loader): task = unreal.AssetImportTask() options = unreal.AbcImportSettings() sm_settings = unreal.AbcStaticMeshSettings() - conversion_settings = unreal.AbcConversionSettings() + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=True, + rotation=[90.0, 0.0, 0.0], + scale=[1.0, -1.0, 1.0]) task.set_editor_property('filename', filename) task.set_editor_property('destination_path', asset_dir) @@ -40,13 +44,6 @@ class StaticMeshAlembicLoader(plugin.Loader): sm_settings.set_editor_property('merge_meshes', True) - conversion_settings.set_editor_property('flip_u', False) - conversion_settings.set_editor_property('flip_v', True) - conversion_settings.set_editor_property( - 'scale', unreal.Vector(x=100.0, y=100.0, z=100.0)) - conversion_settings.set_editor_property( - 'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0)) - options.static_mesh_settings = sm_settings options.conversion_settings = conversion_settings task.options = options diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index c65cd25ac8..ee31d32811 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -12,6 +12,8 @@ from unreal import AssetToolsHelpers from unreal import FBXImportType from unreal import MathLibrary as umath +from bson.objectid import ObjectId + from openpype.pipeline import ( discover_loader_plugins, loaders_from_representation, @@ -196,12 +198,12 @@ class LayoutLoader(plugin.Loader): except Exception as e: print(e) actor.set_actor_rotation(unreal.Rotator( - umath.radians_to_degrees( + ( transform.get('rotation').get('x')), - -umath.radians_to_degrees( - transform.get('rotation').get('y')), - umath.radians_to_degrees( + ( transform.get('rotation').get('z')), + -( + transform.get('rotation').get('y')), ), False) actor.set_actor_scale3d(transform.get('scale')) @@ -354,7 +356,7 @@ class LayoutLoader(plugin.Loader): sec_params.set_editor_property('animation', animation) @staticmethod - def _generate_sequence(self, h, h_dir): + def _generate_sequence(h, h_dir): tools = unreal.AssetToolsHelpers().get_asset_tools() sequence = tools.create_asset( @@ -406,7 +408,7 @@ class LayoutLoader(plugin.Loader): return sequence, (min_frame, max_frame) - def _process(self, lib_path, asset_dir, sequence, loaded=None): + def _process(self, lib_path, asset_dir, sequence, repr_loaded=None): ar = unreal.AssetRegistryHelpers.get_asset_registry() with open(lib_path, "r") as fp: @@ -414,8 +416,8 @@ class LayoutLoader(plugin.Loader): all_loaders = discover_loader_plugins() - if not loaded: - loaded = [] + if not repr_loaded: + repr_loaded = [] path = Path(lib_path) @@ -426,36 +428,64 @@ class LayoutLoader(plugin.Loader): loaded_assets = [] for element in data: - reference = None - if element.get('reference_fbx'): - reference = element.get('reference_fbx') + representation = None + repr_format = None + if element.get('representation'): + # representation = element.get('representation') + + self.log.info(element.get("version")) + + valid_formats = ['fbx', 'abc'] + + repr_data = legacy_io.find_one({ + "type": "representation", + "parent": ObjectId(element.get("version")), + "name": {"$in": valid_formats} + }) + repr_format = repr_data.get('name') + + if not repr_data: + self.log.error( + f"No valid representation found for version " + f"{element.get('version')}") + continue + + representation = str(repr_data.get('_id')) + print(representation) + # This is to keep compatibility with old versions of the + # json format. + elif element.get('reference_fbx'): + representation = element.get('reference_fbx') + repr_format = 'fbx' elif element.get('reference_abc'): - reference = element.get('reference_abc') + representation = element.get('reference_abc') + repr_format = 'abc' # If reference is None, this element is skipped, as it cannot be # imported in Unreal - if not reference: + if not representation: continue instance_name = element.get('instance_name') skeleton = None - if reference not in loaded: - loaded.append(reference) + if representation not in repr_loaded: + repr_loaded.append(representation) family = element.get('family') loaders = loaders_from_representation( - all_loaders, reference) + all_loaders, representation) loader = None - if reference == element.get('reference_fbx'): + if repr_format == 'fbx': loader = self._get_fbx_loader(loaders, family) - elif reference == element.get('reference_abc'): + elif repr_format == 'abc': loader = self._get_abc_loader(loaders, family) if not loader: + self.log.error(f"No valid loader found for {representation}") continue options = { @@ -464,7 +494,7 @@ class LayoutLoader(plugin.Loader): assets = load_container( loader, - reference, + representation, namespace=instance_name, options=options ) @@ -482,8 +512,10 @@ class LayoutLoader(plugin.Loader): instances = [ item for item in data - if (item.get('reference_fbx') == reference or - item.get('reference_abc') == reference)] + if ((item.get('version') and + item.get('version') == element.get('version')) or + item.get('reference_fbx') == representation or + item.get('reference_abc') == representation)] for instance in instances: transform = instance.get('transform') @@ -501,9 +533,9 @@ class LayoutLoader(plugin.Loader): bindings_dict[inst] = bindings if skeleton: - skeleton_dict[reference] = skeleton + skeleton_dict[representation] = skeleton else: - skeleton = skeleton_dict.get(reference) + skeleton = skeleton_dict.get(representation) animation_file = element.get('animation') @@ -599,23 +631,26 @@ class LayoutLoader(plugin.Loader): # Create map for the shot, and create hierarchy of map. If the maps # already exist, we will use them. - h_dir = hierarchy_dir_list[0] - h_asset = hierarchy[0] - master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" - if not EditorAssetLibrary.does_asset_exist(master_level): - EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") + master_level = None + if hierarchy: + h_dir = hierarchy_dir_list[0] + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + if not EditorAssetLibrary.does_asset_exist(master_level): + EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") level = f"{asset_dir}/{asset}_map.{asset}_map" EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") - EditorLevelLibrary.load_level(master_level) - EditorLevelUtils.add_level_to_world( - EditorLevelLibrary.get_editor_world(), - level, - unreal.LevelStreamingDynamic - ) - EditorLevelLibrary.save_all_dirty_levels() - EditorLevelLibrary.load_level(level) + if master_level: + EditorLevelLibrary.load_level(master_level) + EditorLevelUtils.add_level_to_world( + EditorLevelLibrary.get_editor_world(), + level, + unreal.LevelStreamingDynamic + ) + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(level) # Get all the sequences in the hierarchy. It will create them, if # they don't exist. @@ -664,11 +699,12 @@ class LayoutLoader(plugin.Loader): unreal.FrameRate(data.get("fps"), 1.0)) shot.set_playback_start(0) shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) - self._set_sequence_hierarchy( - sequences[-1], shot, - frame_ranges[-1][1], - data.get('clipIn'), data.get('clipOut'), - [level]) + if sequences: + self._set_sequence_hierarchy( + sequences[-1], shot, + frame_ranges[-1][1], + data.get('clipIn'), data.get('clipOut'), + [level]) EditorLevelLibrary.load_level(level) @@ -705,7 +741,8 @@ class LayoutLoader(plugin.Loader): for a in asset_content: EditorAssetLibrary.save_asset(a) - EditorLevelLibrary.load_level(master_level) + if master_level: + EditorLevelLibrary.load_level(master_level) return asset_content From b9f81b64ff81c74ef86698ce7e3ce69ec21485e3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 21 Jun 2022 17:11:08 +0100 Subject: [PATCH 0081/2550] Hound fixes --- openpype/hosts/unreal/plugins/load/load_layout.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index ee31d32811..fb8f46dad1 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -485,7 +485,8 @@ class LayoutLoader(plugin.Loader): loader = self._get_abc_loader(loaders, family) if not loader: - self.log.error(f"No valid loader found for {representation}") + self.log.error( + f"No valid loader found for {representation}") continue options = { @@ -512,7 +513,7 @@ class LayoutLoader(plugin.Loader): instances = [ item for item in data - if ((item.get('version') and + if ((item.get('version') and item.get('version') == element.get('version')) or item.get('reference_fbx') == representation or item.get('reference_abc') == representation)] From 9500e08a7d66646b07047b4bad6cf8e80bb99631 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 28 Jun 2022 16:49:50 +0200 Subject: [PATCH 0082/2550] update gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 7eaef69873..ea5b20eb69 100644 --- a/.gitignore +++ b/.gitignore @@ -102,5 +102,8 @@ website/.docusaurus .poetry/ .python-version +.editorconfig +.pre-commit-config.yaml +mypy.ini tools/run_eventserver.* From de5c4bffc46e5e2e93c6ea7b993e48d4b79da0a8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 28 Jun 2022 16:50:22 +0200 Subject: [PATCH 0083/2550] adding shotgrid back to a realease --- .../plugins/publish/submit_maya_deadline.py | 1 + openpype/modules/shotgrid/README.md | 19 ++ openpype/modules/shotgrid/__init__.py | 5 + openpype/modules/shotgrid/lib/__init__.py | 0 openpype/modules/shotgrid/lib/const.py | 1 + openpype/modules/shotgrid/lib/credentials.py | 125 +++++++++++ openpype/modules/shotgrid/lib/record.py | 20 ++ openpype/modules/shotgrid/lib/settings.py | 18 ++ .../publish/collect_shotgrid_entities.py | 100 +++++++++ .../publish/collect_shotgrid_session.py | 123 +++++++++++ .../publish/integrate_shotgrid_publish.py | 77 +++++++ .../publish/integrate_shotgrid_version.py | 92 ++++++++ .../plugins/publish/validate_shotgrid_user.py | 38 ++++ openpype/modules/shotgrid/server/README.md | 5 + openpype/modules/shotgrid/shotgrid_module.py | 58 +++++ .../tests/shotgrid/lib/test_credentials.py | 34 +++ .../shotgrid/tray/credential_dialog.py | 201 ++++++++++++++++++ .../modules/shotgrid/tray/shotgrid_tray.py | 75 +++++++ openpype/resources/app_icons/shotgrid.png | Bin 0 -> 45744 bytes .../defaults/project_settings/shotgrid.json | 22 ++ .../defaults/system_settings/modules.json | 8 +- openpype/settings/entities/__init__.py | 2 + openpype/settings/entities/enum_entity.py | 114 ++++++---- .../schemas/projects_schema/schema_main.json | 4 + .../schema_project_shotgrid.json | 98 +++++++++ .../schemas/schema_representation_tags.json | 3 + .../schemas/system_schema/schema_modules.json | 54 +++++ poetry.lock | 16 ++ pyproject.toml | 1 + 29 files changed, 1276 insertions(+), 38 deletions(-) create mode 100644 openpype/modules/shotgrid/README.md create mode 100644 openpype/modules/shotgrid/__init__.py create mode 100644 openpype/modules/shotgrid/lib/__init__.py create mode 100644 openpype/modules/shotgrid/lib/const.py create mode 100644 openpype/modules/shotgrid/lib/credentials.py create mode 100644 openpype/modules/shotgrid/lib/record.py create mode 100644 openpype/modules/shotgrid/lib/settings.py create mode 100644 openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py create mode 100644 openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py create mode 100644 openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py create mode 100644 openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py create mode 100644 openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py create mode 100644 openpype/modules/shotgrid/server/README.md create mode 100644 openpype/modules/shotgrid/shotgrid_module.py create mode 100644 openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py create mode 100644 openpype/modules/shotgrid/tray/credential_dialog.py create mode 100644 openpype/modules/shotgrid/tray/shotgrid_tray.py create mode 100644 openpype/resources/app_icons/shotgrid.png create mode 100644 openpype/settings/defaults/project_settings/shotgrid.json create mode 100644 openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 9964e3c646..dff80e62b9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -519,6 +519,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "OPENPYPE_SG_USER", "AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK", diff --git a/openpype/modules/shotgrid/README.md b/openpype/modules/shotgrid/README.md new file mode 100644 index 0000000000..cbee0e9bf4 --- /dev/null +++ b/openpype/modules/shotgrid/README.md @@ -0,0 +1,19 @@ +## Shotgrid Module + +### Pre-requisites + +Install and launch a [shotgrid leecher](https://github.com/Ellipsanime/shotgrid-leecher) server + +### Quickstart + +The goal of this tutorial is to synchronize an already existing shotgrid project with OpenPype. + +- Activate the shotgrid module in the **system settings** and inform the shotgrid leecher server API url + +- Create a new OpenPype project with the **project manager** + +- Inform the shotgrid authentication infos (url, script name, api key) and the shotgrid project ID related to this OpenPype project in the **project settings** + +- Use the batch interface (Tray > shotgrid > Launch batch), select your project and click "batch" + +- You can now access your shotgrid entities within the **avalon launcher** and publish informations to shotgrid with **pyblish** diff --git a/openpype/modules/shotgrid/__init__.py b/openpype/modules/shotgrid/__init__.py new file mode 100644 index 0000000000..f1337a9492 --- /dev/null +++ b/openpype/modules/shotgrid/__init__.py @@ -0,0 +1,5 @@ +from .shotgrid_module import ( + ShotgridModule, +) + +__all__ = ("ShotgridModule",) diff --git a/openpype/modules/shotgrid/lib/__init__.py b/openpype/modules/shotgrid/lib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/modules/shotgrid/lib/const.py b/openpype/modules/shotgrid/lib/const.py new file mode 100644 index 0000000000..2a34800fac --- /dev/null +++ b/openpype/modules/shotgrid/lib/const.py @@ -0,0 +1 @@ +MODULE_NAME = "shotgrid" diff --git a/openpype/modules/shotgrid/lib/credentials.py b/openpype/modules/shotgrid/lib/credentials.py new file mode 100644 index 0000000000..337c4f6ecb --- /dev/null +++ b/openpype/modules/shotgrid/lib/credentials.py @@ -0,0 +1,125 @@ + +from urllib.parse import urlparse + +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSecureRegistry, OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.record import Credentials + + +def _get_shotgrid_secure_key(hostname, key): + """Secure item key for entered hostname.""" + return f"shotgrid/{hostname}/{key}" + + +def _get_secure_value_and_registry( + hostname, + name, +): + key = _get_shotgrid_secure_key(hostname, name) + registry = OpenPypeSecureRegistry(key) + return registry.get_item(name, None), registry + + +def get_shotgrid_hostname(shotgrid_url): + + if not shotgrid_url: + raise Exception("Shotgrid url cannot be a null") + valid_shotgrid_url = ( + f"//{shotgrid_url}" if "//" not in shotgrid_url else shotgrid_url + ) + return urlparse(valid_shotgrid_url).hostname + + +# Credentials storing function (using keyring) + + +def get_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + if not hostname: + return None + login_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, _ = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + return Credentials(login_value, password_value) + + +def save_credentials(login, password, shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + _, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + _, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + clear_credentials(shotgrid_url) + login_registry.set_item(Credentials.login_key_prefix(), login) + password_registry.set_item(Credentials.password_key_prefix(), password) + + +def clear_credentials(shotgrid_url): + hostname = get_shotgrid_hostname(shotgrid_url) + login_value, login_registry = _get_secure_value_and_registry( + hostname, + Credentials.login_key_prefix(), + ) + password_value, password_registry = _get_secure_value_and_registry( + hostname, + Credentials.password_key_prefix(), + ) + + if login_value is not None: + login_registry.delete_item(Credentials.login_key_prefix()) + + if password_value is not None: + password_registry.delete_item(Credentials.password_key_prefix()) + + +# Login storing function (using json) + + +def get_local_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None + + +def save_local_login(login): + reg = OpenPypeSettingsRegistry() + reg.set_item("shotgrid_login", login) + + +def clear_local_login(): + reg = OpenPypeSettingsRegistry() + reg.delete_item("shotgrid_login") + + +def check_credentials( + login, + password, + shotgrid_url, +): + + if not shotgrid_url or not login or not password: + return False + try: + session = shotgun_api3.Shotgun( + shotgrid_url, + login=login, + password=password, + ) + session.preferences_read() + session.close() + except AuthenticationFault: + return False + return True diff --git a/openpype/modules/shotgrid/lib/record.py b/openpype/modules/shotgrid/lib/record.py new file mode 100644 index 0000000000..f62f4855d5 --- /dev/null +++ b/openpype/modules/shotgrid/lib/record.py @@ -0,0 +1,20 @@ + +class Credentials: + login = None + password = None + + def __init__(self, login, password) -> None: + super().__init__() + self.login = login + self.password = password + + def is_empty(self): + return not (self.login and self.password) + + @staticmethod + def login_key_prefix(): + return "login" + + @staticmethod + def password_key_prefix(): + return "password" diff --git a/openpype/modules/shotgrid/lib/settings.py b/openpype/modules/shotgrid/lib/settings.py new file mode 100644 index 0000000000..924099f04b --- /dev/null +++ b/openpype/modules/shotgrid/lib/settings.py @@ -0,0 +1,18 @@ +from openpype.api import get_system_settings, get_project_settings +from openpype.modules.shotgrid.lib.const import MODULE_NAME + + +def get_shotgrid_project_settings(project): + return get_project_settings(project).get(MODULE_NAME, {}) + + +def get_shotgrid_settings(): + return get_system_settings().get("modules", {}).get(MODULE_NAME, {}) + + +def get_shotgrid_servers(): + return get_shotgrid_settings().get("shotgrid_settings", {}) + + +def get_leecher_backend_url(): + return get_shotgrid_settings().get("leecher_backend_url") diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py new file mode 100644 index 0000000000..0b03ac2e5d --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_entities.py @@ -0,0 +1,100 @@ +import os + +import pyblish.api +from openpype.lib.mongo import OpenPypeMongoConnection + + +class CollectShotgridEntities(pyblish.api.ContextPlugin): + """Collect shotgrid entities according to the current context""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Shotgrid entities" + + def process(self, context): + + avalon_project = context.data.get("projectEntity") + avalon_asset = context.data.get("assetEntity") + avalon_task_name = os.getenv("AVALON_TASK") + + self.log.info(avalon_project) + self.log.info(avalon_asset) + + sg_project = _get_shotgrid_project(context) + sg_task = _get_shotgrid_task( + avalon_project, + avalon_asset, + avalon_task_name + ) + sg_entity = _get_shotgrid_entity(avalon_project, avalon_asset) + + if sg_project: + context.data["shotgridProject"] = sg_project + self.log.info( + "Collected correspondig shotgrid project : {}".format( + sg_project + ) + ) + + if sg_task: + context.data["shotgridTask"] = sg_task + self.log.info( + "Collected correspondig shotgrid task : {}".format(sg_task) + ) + + if sg_entity: + context.data["shotgridEntity"] = sg_entity + self.log.info( + "Collected correspondig shotgrid entity : {}".format(sg_entity) + ) + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + + sg = context.data.get("shotgridSession") + return sg.find_one("Version", filters, []) + + +def _get_shotgrid_collection(project): + client = OpenPypeMongoConnection.get_mongo_client() + return client.get_database("shotgrid_openpype").get_collection(project) + + +def _get_shotgrid_project(context): + shotgrid_project_id = context.data["project_settings"].get( + "shotgrid_project_id") + if shotgrid_project_id: + return {"type": "Project", "id": shotgrid_project_id} + return {} + + +def _get_shotgrid_task(avalon_project, avalon_asset, avalon_task): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_task_hierarchy_row = sg_col.find_one( + { + "type": "Task", + "_id": {"$regex": "^" + avalon_task + "_[0-9]*"}, + "parent": {"$regex": ".*," + avalon_asset["name"] + ","}, + } + ) + if shotgrid_task_hierarchy_row: + return {"type": "Task", "id": shotgrid_task_hierarchy_row["src_id"]} + return {} + + +def _get_shotgrid_entity(avalon_project, avalon_asset): + sg_col = _get_shotgrid_collection(avalon_project["name"]) + shotgrid_entity_hierarchy_row = sg_col.find_one( + {"_id": avalon_asset["name"]} + ) + if shotgrid_entity_hierarchy_row: + return { + "type": shotgrid_entity_hierarchy_row["type"], + "id": shotgrid_entity_hierarchy_row["src_id"], + } + return {} diff --git a/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py new file mode 100644 index 0000000000..9d5d2271bf --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/collect_shotgrid_session.py @@ -0,0 +1,123 @@ +import os + +import pyblish.api +import shotgun_api3 +from shotgun_api3.shotgun import AuthenticationFault + +from openpype.lib import OpenPypeSettingsRegistry +from openpype.modules.shotgrid.lib.settings import ( + get_shotgrid_servers, + get_shotgrid_project_settings, +) + + +class CollectShotgridSession(pyblish.api.ContextPlugin): + """Collect shotgrid session using user credentials""" + + order = pyblish.api.CollectorOrder + label = "Shotgrid user session" + + def process(self, context): + + certificate_path = os.getenv("SHOTGUN_API_CACERTS") + if certificate_path is None or not os.path.exists(certificate_path): + self.log.info( + "SHOTGUN_API_CACERTS does not contains a valid \ + path: {}".format( + certificate_path + ) + ) + certificate_path = get_shotgrid_certificate() + self.log.info("Get Certificate from shotgrid_api") + + if not os.path.exists(certificate_path): + self.log.error( + "Could not find certificate in shotgun_api3: \ + {}".format( + certificate_path + ) + ) + return + + set_shotgrid_certificate(certificate_path) + self.log.info("Set Certificate: {}".format(certificate_path)) + + avalon_project = os.getenv("AVALON_PROJECT") + + shotgrid_settings = get_shotgrid_project_settings(avalon_project) + self.log.info("shotgrid settings: {}".format(shotgrid_settings)) + shotgrid_servers_settings = get_shotgrid_servers() + self.log.info( + "shotgrid_servers_settings: {}".format(shotgrid_servers_settings) + ) + + shotgrid_server = shotgrid_settings.get("shotgrid_server", "") + if not shotgrid_server: + self.log.error( + "No Shotgrid server found, please choose a credential" + "in script name and script key in OpenPype settings" + ) + + shotgrid_server_setting = shotgrid_servers_settings.get( + shotgrid_server, {} + ) + shotgrid_url = shotgrid_server_setting.get("shotgrid_url", "") + + shotgrid_script_name = shotgrid_server_setting.get( + "shotgrid_script_name", "" + ) + shotgrid_script_key = shotgrid_server_setting.get( + "shotgrid_script_key", "" + ) + if not shotgrid_script_name and not shotgrid_script_key: + self.log.error( + "No Shotgrid api credential found, please enter " + "script name and script key in OpenPype settings" + ) + + login = get_login() or os.getenv("OPENPYPE_SG_USER") + + if not login: + self.log.error( + "No Shotgrid login found, please " + "login to shotgrid withing openpype Tray" + ) + + session = shotgun_api3.Shotgun( + base_url=shotgrid_url, + script_name=shotgrid_script_name, + api_key=shotgrid_script_key, + sudo_as_login=login, + ) + + try: + session.preferences_read() + except AuthenticationFault: + raise ValueError( + "Could not connect to shotgrid {} with user {}".format( + shotgrid_url, login + ) + ) + + self.log.info( + "Logged to shotgrid {} with user {}".format(shotgrid_url, login) + ) + context.data["shotgridSession"] = session + context.data["shotgridUser"] = login + + +def get_shotgrid_certificate(): + shotgun_api_path = os.path.dirname(shotgun_api3.__file__) + return os.path.join(shotgun_api_path, "lib", "certifi", "cacert.pem") + + +def set_shotgrid_certificate(certificate): + os.environ["SHOTGUN_API_CACERTS"] = certificate + + +def get_login(): + reg = OpenPypeSettingsRegistry() + try: + return str(reg.get_item("shotgrid_login")) + except Exception: + return None diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py new file mode 100644 index 0000000000..cfd2d10fd9 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_publish.py @@ -0,0 +1,77 @@ +import os +import pyblish.api + + +class IntegrateShotgridPublish(pyblish.api.InstancePlugin): + """ + Create published Files from representations and add it to version. If + representation is tagged add shotgrid review, it will add it in + path to movie for a movie file or path to frame for an image sequence. + """ + + order = pyblish.api.IntegratorOrder + 0.499 + label = "Shotgrid Published Files" + + def process(self, instance): + + context = instance.context + + self.sg = context.data.get("shotgridSession") + + shotgrid_version = instance.data.get("shotgridVersion") + + for representation in instance.data.get("representations", []): + + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if representation.get("tags", []): + continue + + published_file = self._find_existing_publish( + code, context, shotgrid_version + ) + + published_file_data = { + "project": context.data.get("shotgridProject"), + "code": code, + "entity": context.data.get("shotgridEntity"), + "task": context.data.get("shotgridTask"), + "version": shotgrid_version, + "path": {"local_path": local_path}, + } + if not published_file: + published_file = self._create_published(published_file_data) + self.log.info( + "Create Shotgrid PublishedFile: {}".format(published_file) + ) + else: + self.sg.update( + published_file["type"], + published_file["id"], + published_file_data, + ) + self.log.info( + "Update Shotgrid PublishedFile: {}".format(published_file) + ) + + if instance.data["family"] == "image": + self.sg.upload_thumbnail( + published_file["type"], published_file["id"], local_path + ) + instance.data["shotgridPublishedFile"] = published_file + + def _find_existing_publish(self, code, context, shotgrid_version): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["version", "is", shotgrid_version], + ["code", "is", code], + ] + return self.sg.find_one("PublishedFile", filters, []) + + def _create_published(self, published_file_data): + + return self.sg.create("PublishedFile", published_file_data) diff --git a/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py new file mode 100644 index 0000000000..a1b7140e22 --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/integrate_shotgrid_version.py @@ -0,0 +1,92 @@ +import os +import pyblish.api + + +class IntegrateShotgridVersion(pyblish.api.InstancePlugin): + """Integrate Shotgrid Version""" + + order = pyblish.api.IntegratorOrder + 0.497 + label = "Shotgrid Version" + + sg = None + + def process(self, instance): + + context = instance.context + self.sg = context.data.get("shotgridSession") + + # TODO: Use path template solver to build version code from settings + anatomy = instance.data.get("anatomyData", {}) + code = "_".join( + [ + anatomy["project"]["code"], + anatomy["parent"], + anatomy["asset"], + anatomy["task"]["name"], + "v{:03}".format(int(anatomy["version"])), + ] + ) + + version = self._find_existing_version(code, context) + + if not version: + version = self._create_version(code, context) + self.log.info("Create Shotgrid version: {}".format(version)) + else: + self.log.info("Use existing Shotgrid version: {}".format(version)) + + data_to_update = {} + status = context.data.get("intent", {}).get("value") + if status: + data_to_update["sg_status_list"] = status + + for representation in instance.data.get("representations", []): + local_path = representation.get("published_path") + code = os.path.basename(local_path) + + if "shotgridreview" in representation.get("tags", []): + + if representation["ext"] in ["mov", "avi"]: + self.log.info( + "Upload review: {} for version shotgrid {}".format( + local_path, version.get("id") + ) + ) + self.sg.upload( + "Version", + version.get("id"), + local_path, + field_name="sg_uploaded_movie", + ) + + data_to_update["sg_path_to_movie"] = local_path + + elif representation["ext"] in ["jpg", "png", "exr", "tga"]: + path_to_frame = local_path.replace("0000", "#") + data_to_update["sg_path_to_frames"] = path_to_frame + + self.log.info("Update Shotgrid version with {}".format(data_to_update)) + self.sg.update("Version", version["id"], data_to_update) + + instance.data["shotgridVersion"] = version + + def _find_existing_version(self, code, context): + + filters = [ + ["project", "is", context.data.get("shotgridProject")], + ["sg_task", "is", context.data.get("shotgridTask")], + ["entity", "is", context.data.get("shotgridEntity")], + ["code", "is", code], + ] + return self.sg.find_one("Version", filters, []) + + def _create_version(self, code, context): + + version_data = { + "project": context.data.get("shotgridProject"), + "sg_task": context.data.get("shotgridTask"), + "entity": context.data.get("shotgridEntity"), + "code": code, + } + + return self.sg.create("Version", version_data) diff --git a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py new file mode 100644 index 0000000000..c14c980e2a --- /dev/null +++ b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py @@ -0,0 +1,38 @@ +import pyblish.api +import openpype.api + + +class ValidateShotgridUser(pyblish.api.ContextPlugin): + """ + Check if user is valid and have access to the project. + """ + + label = "Validate Shotgrid User" + order = openpype.api.ValidateContentsOrder + + def process(self, context): + sg = context.data.get("shotgridSession") + + login = context.data.get("shotgridUser") + self.log.info("Login shotgrid set in OpenPype is {}".format(login)) + project = context.data.get("shotgridProject") + self.log.info("Current shotgun project is {}".format(project)) + + if not (login and sg and project): + raise KeyError() + + user = sg.find_one("HumanUser", [["login", "is", login]], ["projects"]) + + self.log.info(user) + self.log.info(login) + user_projects_id = [p["id"] for p in user.get("projects", [])] + if not project.get("id") in user_projects_id: + raise PermissionError( + "Login {} don't have access to the project {}".format( + login, project + ) + ) + + self.log.info( + "Login {} have access to the project {}".format(login, project) + ) diff --git a/openpype/modules/shotgrid/server/README.md b/openpype/modules/shotgrid/server/README.md new file mode 100644 index 0000000000..15e056ff3e --- /dev/null +++ b/openpype/modules/shotgrid/server/README.md @@ -0,0 +1,5 @@ + +### Shotgrid server + +Please refer to the external project that covers Openpype/Shotgrid communication: + - https://github.com/Ellipsanime/shotgrid-leecher diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py new file mode 100644 index 0000000000..5644f0c35f --- /dev/null +++ b/openpype/modules/shotgrid/shotgrid_module.py @@ -0,0 +1,58 @@ +import os + +from openpype_interfaces import ( + ITrayModule, + IPluginPaths, + ILaunchHookPaths, +) + +from openpype.modules import OpenPypeModule + +SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class ShotgridModule( + OpenPypeModule, ITrayModule, IPluginPaths, ILaunchHookPaths +): + leecher_manager_url = None + name = "shotgrid" + enabled = False + project_id = None + tray_wrapper = None + + def initialize(self, modules_settings): + shotgrid_settings = modules_settings.get(self.name, dict()) + self.enabled = shotgrid_settings.get("enabled", False) + self.leecher_manager_url = shotgrid_settings.get( + "leecher_manager_url", "" + ) + + def connect_with_modules(self, enabled_modules): + pass + + def get_global_environments(self): + return {"PROJECT_ID": self.project_id} + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(SHOTGRID_MODULE_DIR, "plugins", "publish") + ] + } + + def get_launch_hook_paths(self): + return os.path.join(SHOTGRID_MODULE_DIR, "hooks") + + def tray_init(self): + from .tray.shotgrid_tray import ShotgridTrayWrapper + + self.tray_wrapper = ShotgridTrayWrapper(self) + + def tray_start(self): + return self.tray_wrapper.validate() + + def tray_exit(self, *args, **kwargs): + return self.tray_wrapper + + def tray_menu(self, tray_menu): + return self.tray_wrapper.tray_menu(tray_menu) diff --git a/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py new file mode 100644 index 0000000000..1f78cf77c9 --- /dev/null +++ b/openpype/modules/shotgrid/tests/shotgrid/lib/test_credentials.py @@ -0,0 +1,34 @@ +import pytest +from assertpy import assert_that + +import openpype.modules.shotgrid.lib.credentials as sut + + +def test_missing_shotgrid_url(): + with pytest.raises(Exception) as ex: + # arrange + url = "" + # act + sut.get_shotgrid_hostname(url) + # assert + assert_that(ex).is_equal_to("Shotgrid url cannot be a null") + + +def test_full_shotgrid_url(): + # arrange + url = "https://shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") + + +def test_incomplete_shotgrid_url(): + # arrange + url = "shotgrid.com/myinstance" + # act + actual = sut.get_shotgrid_hostname(url) + # assert + assert_that(actual).is_not_empty() + assert_that(actual).is_equal_to("shotgrid.com") diff --git a/openpype/modules/shotgrid/tray/credential_dialog.py b/openpype/modules/shotgrid/tray/credential_dialog.py new file mode 100644 index 0000000000..9d841d98be --- /dev/null +++ b/openpype/modules/shotgrid/tray/credential_dialog.py @@ -0,0 +1,201 @@ +import os +from Qt import QtCore, QtWidgets, QtGui + +from openpype import style +from openpype import resources +from openpype.modules.shotgrid.lib import settings, credentials + + +class CredentialsDialog(QtWidgets.QDialog): + SIZE_W = 450 + SIZE_H = 200 + + _module = None + _is_logged = False + url_label = None + login_label = None + password_label = None + url_input = None + login_input = None + password_input = None + input_layout = None + login_button = None + buttons_layout = None + main_widget = None + + login_changed = QtCore.Signal() + + def __init__(self, module, parent=None): + super(CredentialsDialog, self).__init__(parent) + + self._module = module + self._is_logged = False + + self.setWindowTitle("OpenPype - Shotgrid Login") + + icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) + self.setWindowIcon(icon) + + self.setWindowFlags( + QtCore.Qt.WindowCloseButtonHint + | QtCore.Qt.WindowMinimizeButtonHint + ) + self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) + self.setMaximumSize(QtCore.QSize(self.SIZE_W + 100, self.SIZE_H + 100)) + self.setStyleSheet(style.load_stylesheet()) + + self.ui_init() + + def ui_init(self): + self.url_label = QtWidgets.QLabel("Shotgrid server:") + self.login_label = QtWidgets.QLabel("Login:") + self.password_label = QtWidgets.QLabel("Password:") + + self.url_input = QtWidgets.QComboBox() + # self.url_input.setReadOnly(True) + + self.login_input = QtWidgets.QLineEdit() + self.login_input.setPlaceholderText("login") + + self.password_input = QtWidgets.QLineEdit() + self.password_input.setPlaceholderText("password") + self.password_input.setEchoMode(QtWidgets.QLineEdit.Password) + + self.error_label = QtWidgets.QLabel("") + self.error_label.setStyleSheet("color: red;") + self.error_label.setWordWrap(True) + self.error_label.hide() + + self.input_layout = QtWidgets.QFormLayout() + self.input_layout.setContentsMargins(10, 15, 10, 5) + + self.input_layout.addRow(self.url_label, self.url_input) + self.input_layout.addRow(self.login_label, self.login_input) + self.input_layout.addRow(self.password_label, self.password_input) + self.input_layout.addRow(self.error_label) + + self.login_button = QtWidgets.QPushButton("Login") + self.login_button.setToolTip("Log in shotgrid instance") + self.login_button.clicked.connect(self._on_shotgrid_login_clicked) + + self.logout_button = QtWidgets.QPushButton("Logout") + self.logout_button.setToolTip("Log out shotgrid instance") + self.logout_button.clicked.connect(self._on_shotgrid_logout_clicked) + + self.buttons_layout = QtWidgets.QHBoxLayout() + self.buttons_layout.addWidget(self.logout_button) + self.buttons_layout.addWidget(self.login_button) + + self.main_widget = QtWidgets.QVBoxLayout(self) + self.main_widget.addLayout(self.input_layout) + self.main_widget.addLayout(self.buttons_layout) + self.setLayout(self.main_widget) + + def show(self, *args, **kwargs): + super(CredentialsDialog, self).show(*args, **kwargs) + self._fill_shotgrid_url() + self._fill_shotgrid_login() + + def _fill_shotgrid_url(self): + servers = settings.get_shotgrid_servers() + + if servers: + for _, v in servers.items(): + self.url_input.addItem("{}".format(v.get('shotgrid_url'))) + self._valid_input(self.url_input) + self.login_button.show() + self.logout_button.show() + enabled = True + else: + self.set_error("Ask your admin to add shotgrid server in settings") + self._invalid_input(self.url_input) + self.login_button.hide() + self.logout_button.hide() + enabled = False + + self.login_input.setEnabled(enabled) + self.password_input.setEnabled(enabled) + + def _fill_shotgrid_login(self): + login = credentials.get_local_login() + + if login: + self.login_input.setText(login) + + def _clear_shotgrid_login(self): + self.login_input.setText("") + self.password_input.setText("") + + def _on_shotgrid_login_clicked(self): + login = self.login_input.text().strip() + password = self.password_input.text().strip() + missing = [] + + if login == "": + missing.append("login") + self._invalid_input(self.login_input) + + if password == "": + missing.append("password") + self._invalid_input(self.password_input) + + url = self.url_input.currentText() + if url == "": + missing.append("url") + self._invalid_input(self.url_input) + + if len(missing) > 0: + self.set_error("You didn't enter {}".format(" and ".join(missing))) + return + + # if credentials.check_credentials( + # login=login, + # password=password, + # shotgrid_url=url, + # ): + credentials.save_local_login( + login=login + ) + os.environ['OPENPYPE_SG_USER'] = login + self._on_login() + + self.set_error("CANT LOGIN") + + def _on_shotgrid_logout_clicked(self): + credentials.clear_local_login() + del os.environ['OPENPYPE_SG_USER'] + self._clear_shotgrid_login() + self._on_logout() + + def set_error(self, msg): + self.error_label.setText(msg) + self.error_label.show() + + def _on_login(self): + self._is_logged = True + self.login_changed.emit() + self._close_widget() + + def _on_logout(self): + self._is_logged = False + self.login_changed.emit() + + def _close_widget(self): + self.hide() + + def _valid_input(self, input_widget): + input_widget.setStyleSheet("") + + def _invalid_input(self, input_widget): + input_widget.setStyleSheet("border: 1px solid red;") + + def login_with_credentials( + self, url, login, password + ): + verification = credentials.check_credentials(url, login, password) + if verification: + credentials.save_credentials(login, password, False) + self._module.set_credentials_to_env(login, password) + self.set_credentials(login, password) + self.login_changed.emit() + return verification diff --git a/openpype/modules/shotgrid/tray/shotgrid_tray.py b/openpype/modules/shotgrid/tray/shotgrid_tray.py new file mode 100644 index 0000000000..4038d77b03 --- /dev/null +++ b/openpype/modules/shotgrid/tray/shotgrid_tray.py @@ -0,0 +1,75 @@ +import os +import webbrowser + +from Qt import QtWidgets + +from openpype.modules.shotgrid.lib import credentials +from openpype.modules.shotgrid.tray.credential_dialog import ( + CredentialsDialog, +) + + +class ShotgridTrayWrapper: + module = None + credentials_dialog = None + logged_user_label = None + + def __init__(self, module): + self.module = module + self.credentials_dialog = CredentialsDialog(module) + self.credentials_dialog.login_changed.connect(self.set_login_label) + self.logged_user_label = QtWidgets.QAction("") + self.logged_user_label.setDisabled(True) + self.set_login_label() + + def show_batch_dialog(self): + if self.module.leecher_manager_url: + webbrowser.open(self.module.leecher_manager_url) + + def show_connect_dialog(self): + self.show_credential_dialog() + + def show_credential_dialog(self): + self.credentials_dialog.show() + self.credentials_dialog.activateWindow() + self.credentials_dialog.raise_() + + def set_login_label(self): + login = credentials.get_local_login() + if login: + self.logged_user_label.setText("{}".format(login)) + else: + self.logged_user_label.setText( + "No User logged in {0}".format(login) + ) + + def tray_menu(self, tray_menu): + # Add login to user menu + menu = QtWidgets.QMenu("Shotgrid", tray_menu) + show_connect_action = QtWidgets.QAction("Connect to Shotgrid", menu) + show_connect_action.triggered.connect(self.show_connect_dialog) + menu.addAction(self.logged_user_label) + menu.addSeparator() + menu.addAction(show_connect_action) + tray_menu.addMenu(menu) + + # Add manager to Admin menu + for m in tray_menu.findChildren(QtWidgets.QMenu): + if m.title() == "Admin": + shotgrid_manager_action = QtWidgets.QAction( + "Shotgrid manager", menu + ) + shotgrid_manager_action.triggered.connect( + self.show_batch_dialog + ) + m.addAction(shotgrid_manager_action) + + def validate(self): + login = credentials.get_local_login() + + if not login: + self.show_credential_dialog() + else: + os.environ["OPENPYPE_SG_USER"] = login + + return True diff --git a/openpype/resources/app_icons/shotgrid.png b/openpype/resources/app_icons/shotgrid.png new file mode 100644 index 0000000000000000000000000000000000000000..6d0cc047f9ed86e0db45ea557404ab7edb2f5bf2 GIT binary patch literal 45744 zcmeFZby$?$_BTFshcwb4-Q6W2;m}e_=g={Pbcd86B8`BQfRspxfP{o1((Mq^HT2N$ z?em=Toaf6q$Lo7t@B6!c|2UUAv-jF-?Y%#Ht+m%)`@W6U(zu6*eH$AD0^zBuDC&Sf zNWf1d5GFeC^~j^t7Wl$)Q!(-cfpCa_{zU?1W>bJb;)f7DL#QG6zJ!&lGmnL}tECN( zud^G_8U&J(^>wqbaS>w&D}urxz9H;TIMb6&2v37vSR;;^pJ#1{?S# zL$U<>*M3Y<0Hu9>S4#rFD@?5%O}7qAixc@;P!m%0=4kv zcJXBTQ^-H%DB5^hc|hEt5LXxapK>iMUA>^vjEp}!`s?$Lc{#iN)sc(mKd=K(eP>|LR*p7yT)rOUs_|FvUa zO~By4`u>mOb$0%bU3)^6ya5RQ0qK9_^wfLoX2Yvv~$}Yuwlj&b8{x5kpiWX2CnV%ZT%_qjqC#)wRD8Vl*At=DjCkp80--P_x z@`enst2M;-@qdybDj_KHcNu?cc~b^pLDm*fi~p6BzqkFH9BV5HTUQTf3#bgl*}~3- z*UiOFiuYfYe{1=dUP&mpI=OlP!?KYPl;ZtQ)qmsqL)Ro!T|A){E>HSi4#Q;{P=)D?xEVF=0LdZeeRnTW)@SQBiI&J^?XqTVVlVApt%CYkm>2e|GdY zVgJ^VrUwM@d=^fBYx8q1)<9?c78b&ymg2(P0)j$9+(H%t{M?peg5uoPf@0!UqN1YW zVm3Da?BYL&`M0iAAfAA}KK`2?1KRwDZ_~AL|DU!0Bsf9-@KJ6S9-cNoEl`^ApCk|ivxetI_PYdt=yM_4Q z+13B$Lj2uF{r|ZT|I%bDdkYsk8*3Tfe=6}mEB<$D_vecKPwV=p#s0fBO8wlTBmheW zw94WS3*wjJ{kOV*_55dh!=D!E2;Q2{>S!^{8k-+!?FoqUtk=+~@&C*NfK7v}>P z2vkPkAHChE{u|fNee_QkuM32HHZmf@!h*t5yf>?Fa%e$(ZJZ1hA%HLT{OMB!ghl?r zbd&NQIgS1+=iey*;QVv3{uZA7VTXU#0>K{;ck=!fc>bG-{^iU5U;g;ll>T2zy&>y& zCpQ53b@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ z=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(C zH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM z0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV z^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y`zsBbV^*3BM0Qq(CH(bBQ=LYpRTsHvub@4Y` zzsBbV^*3BM0Qq(CmvCYK>s3x07vTL(AK*nzF_6I&@RBFJm5L4+1oC4BfkMJTpwnyM zdmRMw;sb#;%t0WDbP$N#HN~u383ghbQB{=J^PS%M;FJBx;PLgf%xc& zH)7`^-X{Ci1QKUmFIoOvXmHbOh_T9cNBTr6?_hM0>CO9fGO}keOc$v1U9)B+ycfG& zHEj!}d3cx%8fz|^E3$3hc?v^0TMwCZeY~f=VSH?Ym-IWLP8Iz{5Uuhvi%VzY<)`|1w@v|g~o&ZCNA^< zMaisU_ci3x<#(Z_-8m3$E?O=q7x&F&g25~(B#JuiAe1wd9O(rT6;l4!#1Hi>jX{to znhRVwCL9Eh1JU0e28DPkr-68p-k_M`R>)f9A-<3zIbpx?!3aK~hI`KQ%$v@eTg3^1 z=AE7;v?sI=w5uac8Kx-I$}|R{z(f5*=#XTwDk7tR2JxvlA)?4t_!mo2)DNxADNQf} z7-J|e2oXGRsMX`!pz28*ule0pw8q2P>WxhfaJU|nUKQ5g1Z!X zYB#z}1inNdo=NG*({An;XuA$}@SUwsDFDryXS$IQ7wb-gb&*8Rd$+Z6JaS8LJY1kRyrw3zHB`2GGE@F z2MWi-nV(ID-MbeHdweYWdRP^*%oISk;Ey>dzQ62`3By>%h~4r;ri4*$MZ#PSL3V8| zmRsO*2IjO}l-6j>R6|viSD{2i%*>dv0$fl&sPV_6!46M+l*s*~RRk?2 zKzbGGYFMi9B9)5TsFS1AdDau^#$ zb~P~iCZlRFhLeMrKI!-lk+=v&l7km#_@y|X)G{J$QJKL3BjaUB5Ev`Ms988-mvN4x zqs&r_)VeZWo84T0pW?Zct$31lpN0N@dXefYU9{ywTpZYwjnCDIk4&2o%L4O3^UvDv zeMKW)djtyE3<+MuEhi^$EqPw0uh@v+|JHqpl7m~lv`7;8<37ASs^i=kOP1|RYfTF#GH-eSpD3N*BCB{f<(yYBk;TZmg{f3Vjz6 zNzwQms*hijELQK`a>27P_Vq@FCDL4N2`U5;49vl0pw-n#AiL6wK)3*T(>olvbi6TT zVah#U9#bx*%CfkCgKjxvrF<4-$}3u8c6s?D(2M*oRM)AtndC01fqpPwFaAEKBwC?G>;~Num#9 z?p|#$ZH7pqIL=oj5M);l$dNN3u-z6ckJTbp&C@(7#u#&H3a4k)o2<~56UR~|^|c^I zPo6o9^uHk)X15yUz^w8f)xCK8m~B{&QY4qAyTOtkwN}65YZ6h8n4FU}K}~W_m+~=T z##T@1^F2naSwo2!RW1iUU+nHGB+Xps0XaJ+Pyhwu&{BFma??DGh2Oy@+gEV;>jM)1 zaor75%`#8Y`eGL;$tY+!#dGB0%C+8$42pM8iD2;?h7pv&>}yKUV+nQ{Wf0V{p{p@? zyB!?h#VP(-U%~#jWOX+1nU^vnppb4XkTz7~1RSTa9!OA^+FeTGwn(Way{f%7qiE5m z+@Dik#{BSTH`SA(UVtr0i)nL&p4mM7v0Z4jFG|SArdM)_gDYV?(G9hqP1?5-1K8B!Y8ksn3;GqJH z<*Ll)RmIeHp|k&)r+BoEmNgfc0Kd9Xgn=YlBsm*9LD5RU$X!*OS8`Y=Io}TmIaJos zgpr0lwLbb_>k&3h$jdV0rSfS-<(rT$hgK1`hC}Vf@en1|MWp@TLcxQH7SC(DFmjpd zcd>N*>6m01Y1Z}KQ=EO1ecPY4+K<{L4Py)x;(^32LGaCXa&j=ACVNE(sQQ`2a85(f5oVYM6NaN zDt#V)Xz}3Q!Slp5I#a76L5vPXN|sjAj|~OB-ngD(;5E~M+hM{5G7>n)M21rKbP%%87hh%!os5qPyD%M{5CX;B?+IXMsAAWWP zU5|r8Mle8m@+Sd#D_LongWdf}^Z`khyXdu!RSXSh9BL-zNY(pGDkUCw)v5r8(*vkl zss&>Xw!S+Nr)Ze8Z~)tp?Q#!a9I^2_r_w83{3l_LataAFjv)r~S{8#}?xh=DmW(bO z=08P5QT{l!^3|qk^d#mMD|@HF-e;^!UJsRYDWHTsVU!{-G*%o}Vc0y%2@(kGw&+;Q2dX+8F-O zH~sV@YO}AhHO#mfrA{c)sDES5?S8ib{heHzp12~d;V!mSFE!@?*`d8a#U28{fX$bc z`9F)}v6Z$THfTpBxTec5!z^rMg(pnH!?dsi2-cf0tBOYwb5Sq@IK!wMJ5(EV2M)C$ z+l9l;sScY0uLtIQhf>ia)MyL!u}KW5&;29Y6YSiUz>`h4h9&6=9&ggT@{tXLQ#*i7 z*hWlo<=iCI=8#jVKw7~d!^q+w;|~-hum;Bam4W=khT#jX7f6G&NLnE~0`gCSiwRsO@>zN4r=&Y0W#5A=;b^S+{cT))mAlPidl(ywb(e z$#@_osZrk)?Xi0M$%j?lN423I=hzo2M1@k3MqfjB+xRHh=xioOGbL{Iv(}$$8|R(f z8kpHpZp5#@|Kbh1SfHF1vMJi^3@8MSG!M3?MWB&0quyoT+9G7jEA!&D*cyJAZXAfl zR$UTB*m{oqq-w-5NGSA~lTxFwrGljDPH@<0Ye@`&OHc0YaTZsu1oBt~!jH0&iXr=a zubC^?)U+KpMl+Ia$aM_;_?O>|v5kz^q&V!kHSgQY(Sv~%q<;xnRd_3X3=?v0BaaxB zBW$S~x#Oz;#8nhTF9kw?;7ud$qH35G9b&eSC$q0VQ+`-XxL+BJrG~k+#ZO|!{uEuK zmXw!>Ng^oZM{bYGu3tn~DEFjpI2&YvMF63nIz{7WSH5PKs)1IQu7tn`{PUAQs=mtR z?2aeuWp58HM*9sT-Qi~KO6llf_sQ?FXXd_taU9*M;5X6PxW|I`uB58+G~6D&03DRg z2)dwzzrYFFP>UEg6|bLS zsB8zsOu~5x=kGl9>f0)qCBr#?S47MvskLLM6f{O^Jz)xVUuQ|Ruvk?l)L2HFct}ZB zzd~zWq_X*}VR-LbLrmgUaoYOFxGoI?;pIG(;4x6hGmMZn<+^&Vc|1(#an0kx`&l&u zZC$+&T=S=g<8o*^O4Gc|E{8~DQ(mU+v~y^r1YMC^znfEX7_>_7R-IK8bm96i=`j`e zqkB^ru+uM#cQgtjFP^ApNbDLHk3Wjdvj6lo!CWuyUP3e`tS7Fk)TTM+{^7{pyeP!~ z5CgM8xw1)=PlhaDP8R<{7VyySAcPQnAyWDX?~4u@r>wHe(F0tQa~_X!o*pjS8f@Zw z&Jfc6o`hmI9p_2iPGBV913_6Pz=3Y>7SCwgzREvj1)&2W(ds?zyIPO@7@n<~X?k~h z0UBHFVU_;;0tXZt{mkhj*ji;bApMPh>wUk~aIrFxS>F|?kWL`_D4;3x2&8$>P*~f` z5>4wxkV{4Eg5&qTHB#5nmDL~Z+6Rb^lf42g2QZCdt%8v2DLc33btf8~J9$Bk?8XvY^}LxtQ+(-8@TXY$BbQ2h?oc;e+vKqEyJp{%Gal6@tz_ z`0zY5Zz`NF)^i8_XoEB8PAH%U{dglSM;b)ud5Ws4?tLsRis*$Yd9IaMv2dD-jyq%H zMfXkJm`v|zIGix07je0?Kp0>Xq;jaEG;ua@6rAH+w?PQO`9hon6QTtP#d{}OJ4QXN z6W7d3FJ|LSC?15<$YcPgD22hB?$%b36x=Pbl44s?rOmk^6!25E2@QPY-VhNm>WZ=}M zTxi3ON_~9r=8;ZBK=ait1YlS_JxiO>7t7Ts%J*AGTmF}3I*^CTP^fH&XZ@f|&!B;R zgxZV_;QFK+f^)ym$lilG$&RfsBZjOydt^<>XuLl9!rCcA#f8k^ zMojsXWj%*r`0dgvTj#6l3!Mx5yYJBi%dx}XsFaJvy7pwWz?EmMIl+UnzP-$b^n{I+ z(#1}jO}HQfBw3>vIU+eIl(&9d=D2nF+G@XpDkh%5Tp#CP*dH*DN{X>#8Ow0Vwe!5B zk<)`<42H6ITtlp?OQU8&jA5t1IPmJkR1eZ-V)1rg&elBf7&NvPo@YxtYqyE_U#T4O#=p5L0tDPe=7&5o1T z&DY`2Tv!F(1(S(=IXo)KQk$l`V97ZNz^Y3vnw~D%Ja`WK)~J+#P86V70$rkvGCkXj zH#u7A1V8e06i247ddJ<}pHO>v9xOj{3>-OlOiJ-PZ#UN+wKrKKrMHjBshhMZ-R^Y= z?fn5NG-%BTbiJVHe;ae3I@@)hlkSy#k(GFIY*5wxf-E}L|3j&&gA1G-bR{3~S=ngFug5pO9q)>( zdcGvCCg5l~5(C1pgUnOf(B?|U!WUja_+KmSJDNt2t8b3~5T`ds zosHF9H47gZ1&`GE&^y|+*w=pFX%~1&(|F{B9%6=a1v}$ti_M`yg;eiy2X_Ln&wA$0J+IXEE-KbgXM1ZB;A=j zIMz%t^C2Bd4s0fcCbm`G&vNjmTc;tYUZe_^b8?d$OMGsgj`!@E3k?aESaj%qOE70f zrH}5qv6A}~OS~F?=|wgYa%+-Nc0uFC-6w(MDN}x;widWmSC9@fbqsdp znxK#@r;q(f!+?MFt-)=oH(-z!HFr*nmIIO4GN?c9L&xS68F_o8u{Eor#i=!+u!y&o z5#a@k|5W?n2m{1`aGg(S&b=VU*s}z01+Ch#^NQ#7uI0|#eAN$PoqF=3_uIje=1|~F z34D<<8|vTrLw4d(nBnb(bL-qIe&tGKmrk!{{tDyl1rBuPu5MM1IY;)2miS`3-LC#5 z2hp0KG&$r(MfR%$r)rQ4=)%P$@TxDU#gN$l`>9d?2dWraohwS-IUo1TX6-0dn2)U5 z3<~V!bS)}xijB!TILYs&W;2dA(NST&-*#o0xgQ4Zwa7A%fD>26wI&-iZY@g;0kRHZ3nlDhQ#`v ztsJG!Y(S15>UOeqTJFxs$d~P|^_w|wce1S-?ky8MopzUf{&mc*Bj7BG>nU@Hh1U8b zrL(5Iy8X{|z}W?Q-#+nSX~Z3g=4c(*=~mzjt5#Ie$-H)Y0oi-}u^D6phR{7flT;X& zhIym#@rNVVaQo};OzD@oJbT%qUi)rH?Q;sMQD_lDPaiuva zt2q4ZTK%}dfRJWNH0j>e(|x;WqrK+@gf!p5MS)68-4jpAC5+vOxq)L#ir(BbApI| zn8jA68H0eIn`v(F^j1T0lo5ayA-uDvzy(sdg9$O;PmRknRYW*<)gHk@IF zWiKON_~>Qzia=_0uTv8ZZ%Rdp<6}Vy;l?$jdEHSP`_+ZC?=|JYksq^y$o)Mq*Y)l4 z>)_G}(?wjIZPeKpG1-XA@EXhBO@+!cgbi&y*kApCDk!VrcYX<^ zuu2JZ6*=56TpoS_RVPa%`?Em^DW||emvQ3o;%m`#R)h5Jd8Pn@k}{lieZ#Bg1jKZc zWNg8mTY&Vib~q2wIj!AB7!_bXNFRGG2%o+|Ve?ouyH^p+S#znmRBXISO3+Azpv?rc zQG*Q0TyW=QgbpBU{jQpax{jMxu&N;J`~Ic|d2nf9L+$4?Iy-`~Hk~s7I!YxIte|1B z#Gb7vSaVgW7@f=NKU98by_C^?_pbS9KO*_gX+1CFhfC9nJ|wV+aoz_X6o$k;;D3FT zL2Z!^U`w06zFTwc9IRRIDSX@&I#ou#yws!P8;ctVJM|QJ7<3n?FSxbls)CtV^1_~> z<^-#vjG>sP87p5Z;$hB^Bt3t>cy=0ds{bIC%e@WaGI`t&azpZ>n5%phDjoDx zZPyRKWqFB0gS5~%oqVtAT@`)(C6Zs;rCsA*{F`l8wqS+3u&47>BABWUn{%lzOjT_9 zl)rRzZM}TCi=1+iS+24ZfawqAb&=9d8l6|3Zwpz_J$=fj7TTF~h9UD@fD7I1{2ro4 z$krl2KVw(unWnW9oiyxHmW_RnC`0*M6~oa1YHCuJJvzhs<=1;PJp^l0;et9})KtE7 zhy`&C%sXBKc@hfGbL@_1y5k>oKa@`(lSd}KlhOB%8QO#?xDVKF)m$Eg1&v&6OT*4_ zwzlXsir*&#*k8)GpOgywSZU&ZJNR%7+!?Sg0tq4McX^AM1M#NUv=v)g9|Vxe(Ik91 z_(R4@_IG#XFhB~zmrPdv>=kWM`4d-IE2CnT;}PU&!(EW|g3a+@`h!V=Qg4yrKs+Nu z^>%fUTd3w<=t_co*mWE=KEEwl%TR)XURr-V;H6LynK%<;cmYK{B^m*Q@wSlqudH11sawp1GB0u%-JM zN=*4(VdDCV1~_N9cHhnchU=>hB&il=uq1*JK7eX%oZb>#LK+4bN?M>ORuipMo(``} z{f}OG4Hw3Myr{;kg# z6ioR}+(s{sZNzpf3Z_WuZ7vgQ6!Xw=z2ImSM*m^i+rjzsZTYsS>>xt8H1JzhHc+zi zBsknyeIi@e^gVJv=J7korp_XqDoR6*RTCW)L=4s41m6uxrWYK`K;ca>9{g;H(%1cXk8I*VoG8l{z`ppSRSV>|NLEj|%7 z2jjoAUIRVFf*y1|3Mt~$6vKft(Noe!V8>rjXnZC@4?fkrrN-d6Y@{7*qqJ9u)!cT% zUl2a1?YQHjcu7^8P7mTG>gmTfUs3KeIpl~i$lAtcizEyK%q(SQx2HF0;of4zqc=fQ zv7b;h4k-eOzfpSgb+du_-H#rY)gN2_=vh^|bgY~clD&`~9{es}=?v*SbnRvILQ6~W zTd}jIJ(a0Xb4kx4?M>2k+_qWVXs}Ez-nd1$g$sS_0)p4L8GtRp_&Uac%u@pmz+?to za<$Zw_G!Q0+cde3B6l5UYNi@~suYWi9wenOV{)x*eF!pr?d*;8RH&LIy4R&Oa(IKY zh6Q6&n*mWnj1sb5Hqp~+8gDs=b#Yb0JAnkVTih`Fm~@B#bb*YIwelEu94x z?jo!}*Vq-<1N-De{)_ad2US*pLUi=wf4xG3^+2|nnwI3JfoOWR`0J;2*i%6U>JYkE z;+c~>&s)KI{)3%t8UqegY0r^?q9VGFJ$k;P=VVO?bNfUm6v;2tAmVZKaOGl_$P_0m zgh=Pb`PWDHB*Wk)nY-t=J1Q4uv~OQt0(YT=?MYovT@up#+n&IsQ%N(NC;jl%RB8=c zkzj9yt85zgu@mdmCJdYR8}W~1G2Y;hoHVfFKW&>=W_c4RI{u@*qM{TY=-{jl@!N_9 zz+lcEZ}mIW6Me;SgoVZyNv?iN5-y}MqoY>Nn+Fql#I9xHA(Nau*L5P3NZv|1kJJ!t zgVe%0iss=6MD1HE4?(1_)NN-U3x#T@-#M6X(9(ZvTET$_QMpvS&Bm#=66$I}!m?LT zC%Ec2$XT1^3gp)@8_14ns9$`g)g**_u?gg(v42ax?l^N~uWUsYlw>7(9Wc9spGkXd zX6c{XuIeL<9GtNE1sAjUea!ab>_cMN4#-aW2-h9{y6{weGkJ^4dI_cw+yhzxwxCnP zHXkXnc8t5ym(mzOvYO|yuu3f4Ht#A(C{mbTaikMSU=?jD)_i{&#TCH|3gQB{gu!gLAZ{LOLNZJRf&r1V@{W1XK%Ffr|DA+PaLZX8jsb!lzp6U z$JwTRLjXmTws~^l%+oBOtB@jOPH|kilDbw-r2)_PoQC1DD+z7|xvhO2^(CRYFd$-; zF&n$qSP{4uEGnBac#LhtPG7DRp*zG{zMEJ4&OMUOmGP*nGW{8oZ*td#ui+i#t1zlf zuO!~m&K3TvJ;~0u(O)hoo)?j88J<{vDO&)BUL3x1eignJYFAp%>w0*aw@LQ)p~857 zjIp?RWD48Jr-ni{$wYE+0j@g1G6|y?A1XqoqmnUzd|mIB23ltsefDj{PW)hOeg#`U z-)8AGw&P4a0+p+!+bC33Ak5-XSUcI z2-8ekuw+x@lok=TceLg^LG-Bvxp8oZUI$7ePK(q7if+~TMbwp1Hd_}8M+XW)(I;H1 zLLcu5#nOS-PY7#|)@R&@HRqIf7PGWH3$AL73&xwmE}|S`cOO%W&XKK5abQY%>b(wF3uAJbz$KDwS_k;; z8oT1qtM3us_w(*VMbWzyD1j^m}QTP@ag8VY$E z&76@lqw-FJOLrPz_khXQD4z__a_D@~esoRb(1s8J`fXUUcyl;2R4;XaG>=BRePq#U zZ{?6$#%Zcq?;|~q2o+y5XPmp;$1SVCUep94tC%xrLtRMa$9aZ1_%Tj&LFi=u?eW4} zu8)wNPhg*U=7)#tRy})6?x~PazQapwBpnBvyxk@TEzAI^d?~osWw={;ukrjKaGx8A z1t_z=H=*MN{Wt=tVlSLxnf=0E=S9NpapX6eG~*9LxK{ZOtbow12)JM{eayaGM6R=G zT&O9>aI1cQ!HZoZ3R_Ao6G))|-jChMm%YQ2|DbTP^L;GsteO+-nIeHyxNt27aQ6qU z4-?W1nkx#`k%{G&W%BvR8OFaDE&G{GLucDW(|zE9ko2K+OQM#^`~)2-DACfP0$n7+ z?%Ee&8?kuih@aIjcy>7lrK;^FbnP366u%7l|jhZ29X$XFAAM#iR~8Yor&lG z3s~vQZrUupHZ{Id=2qT3ICJo{o~U4P5L3gQ+O`lAt}Oc=TZ02uEBx~9VZr@}SucUD zocYTv+wqzyBStPm>p6eHd7koUeP;-e?aGnc1O5jq= zS&gmXHupR!Yv<0@g+kZyh76LR=$HGgAT5#!Glh+sYqUX}I$d~+T!|0uM>Mg;V!Y?O zgKQdFCbgl%Pt^H5t?DwgNfF5uv6Uz0B%k0C1`|HII9RQ5!ueTAKl^q2uFh7~9PcpU z0Aw!ndY^|SvXQggO|+y#$ChpxmGrwZxb0gb6zRpw(za)le(G-NpLb_(E9X^i6ETxy z%*bqbZ6OXyCaeFVdT4T9JJ7L_hYayVTXe^#7uws#32u(*P5fKQACAnwNY_Yuy-rVL z+0lzqt+$h2e(*xTYb+}UxIp-kHlynFp4_UIuEYkbl~kO;1=~icng##qtej>?1RMh8y5lli+mY2zyvg)2_{iNCY}x#~*bg%7e0on?LjKi*J3Drti+E|1M~9 zuZWKoL*8p~N;{xPL+T+bDz;KU56TbTzyy7`g1;$X5@kH%c^)uj%lCA6=UM^c@NJ{2 zP0Y5#Cw!vi70=nL8c{`)GOgg@xUV0B$hUJA@L-8tH+1Uhwo_|71|_vc+kN1Lso4|QBG zqC%c@)v%n!ZU>R@4hfyl zQNHBoj5TvfVUWm&zRY+P$shF2zLs2zw#AIpM)ZE$sq$P8u6{B6*nl7LW-vb@ zR9sjmxEe>zZ96{)aK`R%6tlwWb&0L~%U+MG3gj6$At=>?2zJRblQt%`&M!CBe7MyNi07i8qZ!;O(>CP&+U8xP66F zQ)b+Xn!r&B{o!jWv-TuWi=-X#bJ2ot!sy5IrcX9`?GW6-kriY|PJXEQA11_iDF!=f zr&TSMKCcDCUIzkt zde9qe=gT5 zSrx325?)>>9ln$leQhR&w9e?^l?2sLVzGQ zT&#V|-4ekw)_UMoW6qu!mDsQO1iWDFZKLRs@sha+xOH0MXUcK85}gX1&%=b>)Iba*HXwaK+n#i% z`V)cWiEK30=e4tI-AfG%^F&&DpS|*o=KX|8CKQQfsKyR=s5blB=;}aQbe5A6SuLBQ znEtD$>7D*;PK{|FeFBIU^!PjrSE(m+rzJRM4bji%MJo1C5T3rs*7Xs98?1N4D@ex~ z@q9?WCgFh3Jh^O9kOilCJ+}#k1}9=I^^8oS%<%UR%-q@3WGTaiQ`)Iz zW26xQav_2}k%?Vp-c=lcn(c#?D zG1rJse!maa(|;6}Od})gmO>;b8up@>@8YfrnGQ+jW(O+lATBzU6>x7O+{?<)rw%}o7Xi1YH7s7PIYTG zr)H$}(T$67=)7b3oDF9o+h|1*#Q_guaf>RJ~9{Zzy|a+=K0qQ?Bie>*;{0 z&CN|X*3{0nTIyA0y|K~oZ7(l4%x8F$8|v@E+yh{yTJK78+`i*EyLuvCe^GzeNW}Zv zyOK3Z{IhY|vXn*-HZZ36t#1LG3qO_%C_!#2Jh*ru4yAtQ&u$~z2go77b%n4yp}+MY z)`T`4IQ~McsNO)^q#4V~;cXC$@6%}y34KD!N%@h%HVx+UJ`wP!yuSY)xs!y)7i~SdP1E|CZ1(;{>_aAi{hM+sZMs6h@BqUyyqp$SLkjbB~B+9MR-iM?) zp{{w+vc=HhyYF7LW^jo!Wvtz9!4g$)JP?pLZ*-`OEYVa}7@qbTXLO}Ws>mD>_Y(x? z#To>q51*9-Dx4djx*p4=T^}(qt9n(C6|)n7=+fk7=V!(Expx8WFa<@?a^J&?M%6-v zgqU<+;?`z&eo3+-`Baal&ra`HBv<2=oFc%4NNy~?cv7^t1oquyX|bp3ABD~W=UP4W z1N)6le9+HDrf{_8t$!n22Li!5lUUp((xSnXt&HDJ7a*WX!WDI$;)IcRTQO^|adG^8 z8$`l&w@a=9bED~8fjwu(E!rqYByVO`0eWHu4Lt3nN3H*UkeE|QDli#Xv}u%83tJ2zw@d6+Lmi=OwMe?#18yLqoc<6J z)P{{X)A3ywiO}r^JaWOc_twW%ApWw_z_TD>^?KJ=xqiGz2?|MNgA`Q66dT4B!GY&( z!8w_GWt9Xa&f+ePK$bZiPGikqE?f0oKl5pf1D3zFC|ak)VyowL8f24Y)_>|HlHBeG zuYBpG&H3&ucjI#*qQSsPV@gLTuUdNkmcJ4AQn94Fv7cSd`v_MDETWA?g^^e8{M;^Og z(wPK!NM)(2eW?i~;c3d~=oYm|pm{{UY%tGxVJ+yw5iafb^vB@!v&jR3TjJGdo*r5R z8T^HaWJ9-pR;?it>ey6ps8%iu7WZ;>v^ONpoTQbeN4;U`EpOp;UD`%Ug5LSpPRLj) zg>BG%I>S20Dck$+CAwKj(Z|N5kN25H>lP2Zf z)Q(&9$`;5r)0ia!p94Jwb9})xhF0f$9Vj97^0;zH6<7nvrO(|m-@}OpC_Du~0Ftyb zi5WDbqcDc{61Q^SSe8_pdr_*?;wQ`*w}2(?;4ZYx(0%U%>xS&G2vk2m?1b4}JlO5= zcpWN`#iq+CI{b}f;Ynv`xv^0lmVr=CJ=f+(3IU2I3?&#=sr%7v?`-NzDHF??_3n_t z(R_P?&>8qy&-*^C4Wt^F(eh1=>z;Z=bvv6Ns|^LL8rV1QZcc^|E-^oJn?76l)*Y?8 zSn*0u$r6zqlKA}uh-_o11JXtnU?t;qiNlX4v}O?k9G`hRlE4p1P1*b>lN9+&q5v;c z=2iJA9SJmCxxk$1p838SBRAaFKD1rfEuGN%vHkA1$_3kI?V&qg)L+A;fHM z-Bja8ySH%=eBw^-2OQohwR->Uy+KpVC7Fm(`3^;8;-18vqQ|?$uV^nsx)XSc_k#z4BLYH8@DsoLj}=m2AJFe#6fzQ%EwbJXOvtRQh|Mq!e?-(wJTPgXgIORVMy z!abKstGwtxy_JV=n~~;?gxoFjD3GGS13Q#o`2OBvR%yj-Mb?vz;tQGnC-ZNQp89Qx zOt+re0ckA<6UQ*(YoS}Y=pn1j4*`c1A2Pfmh7{BY{%|1MKYM-i=Q^Vo@efkVSrMTY6}{< z=d5=wZac#+aW3prj8|VX;a-X*6a=Kwbgn;&EG6UjC@fWB9MzB(lqAt=iZ!A#23{c0 zjkXQO-%!5``IlEBH-rsO#c)A=wnDgdDN&y3`{$# zAD@JT4SmaSX>xcq7&)1Nr^zpjO-NBtR}k{4G|X+M^&}!~=$mQoLkv-UZ=0iqQiJ3Jah(u=&!V*JB%hbBYjNGX*IoMDg8!fyO=Aj1M`{xHJ^2LsSNrHU)oM7>z z=g_cX@lGvYsP-4eIHP!-%SgD z?VyrdIY;aXsTm&xx-%pfvAa1g(6S9Hrr}ItbH0f#FMRf@&5=WhW zv8iiz8*@-eA7|RR7vCa*uAO==PP!Ju6Uw{NCC4Ws5tY~b@{9dtK?FA7G@#A5WT80i zn%31(C&{hXvPV)e%{?w(;~V4L94VH|PT8G-r~9!NnevMjAA%3rs@-zsPZ2eGgm`%e zcZ>3I9!X*Fj^Y);tyBx%({=YI6mHp@sE288>zCXM_n+>`vS9{ZYXBk~x2*T_NPwl<8J96b1u5drQ=$aHROTU#v-n^+1Pj4z$v(G6k}H(;QpdvCg_)UyNOsXlqKKqG2XjAtA@`AK zZAR~R*4XWU^p5VT;Q?X0*-<*-P7gL(Z`N0#K(4#eQ8-fFrGw43CMuV;dDXpOhK|1Z zoZq^LP|xg>=4r%J4K&fQClj+YB9u0Sb+Hz^<@B%^+!T1K9fpT^qmTAwW4$-ThZD+zugE*3L0?60=ep;Q?^@qvtY%9vceU;Q0X z@QTJ(_`@TFV_W`(;5@kz`^H&VlS}e`y$hC=a`^$f`rU#a^yo!V4#*nyvedPitF`#t zjv>a#wBruL)!}wmUK372#w`;fT3Xvr%+2KV z)To3}GJ%QE=+>mt2OD;B4@KAj{l@84Gg?g@Txm}s%uM#V0FC#`0J&7Yv-03hxPO~m zrH}OSJbn(5^bjo;tVjJFHOos(U0mWqp`ugzZ9^uwPlV5tha)CYmaReTl4Ra2Y`1gK zK+Q(WG2%f;h>tHjC6v|z=^E-^tR-SVnlJO*9vX)aRDHbyqohAW0Z!HI&dq5b2?BG< zHor%TyKAjJ*NkXaK+DK(;;Of3VH6DG7Y-92l1;*BV=&fyrHfZFaq6cNT~O1gM{k>w zmAw_YyOfp2B9!hJq}lM2gMb;;A^8#J@A%NA3`C)Nk4jx+_AkVQ)8^3BFElvS|k`__K|4 zG~9<@R<~+K1Srxrd4jP*-GFll-uoi=+`J$6p0Nquoe{l4^@oXM?|7DX+*d_0%5JBo zG~bA75Il3tQyk^!y_j@2L{{r@udp`>(KTwOQylM4xMg|ou}{ZX7w<;Qn=I=qp*G!% z{-+C@YrX<=q>vzdKp;sgRH5lI7o2HabYo1-JB+YtLv|@4#{aKX$=diWg z@j4o=FUf_LBfb`3KFB+N{h-~Q%$hel`NeJ<2%#}}g1vkG#qHc<%>7CukW||jU*jR& z_!_N*%!!6_@BxmVL3WZbR8G@Se8~Y-LqKHSe=;1S&nyt{?92?hq|E_-n*; zGqvYc*mgK149p}M4|k9UaYqF1`1`KoUlc7-nLW5QeD1$%uu$i7Z{LsT^l6@!l>@gVT*$rUqTl?YbPtJ%$^~`-o{eTzNQS=GHg8IvT4U~6C#&<#IJBU zTA0pzm;L9Xy~)+;lt~s?jsK^mtBi}P`?^DScbC%LJv69DOG=|6NOuk0NJt7uDc#+j zlG5GX%@70gzt8)7Kg^f8Gq=t;JJwli?%-BOBXw&`7De=uRkCe$c2UPON-J zTB(q>swR#K@;+oeHGS09ddiwp$j_xQT+78!uF;ha3nd-bOE>o``9XLnj@~E???)vv zE|Rx7^c4QJJ`-4y{AT?@(PH_W1v=%S)pDOKy2=sX_I<#;>0$U4=PNuJYS^}6}k z|5wU6EEF|>$pE|i)$f)ztimXf_un)}!~QzK5uI3gqY{gJ1!@E-FhMfQ2~aP6`;i31 z!zmfj4I{Uzh7E-;`kll-^i1CO?`Q)E`I=c{dX^i?h6@Pto2Y}>+a8=u7Y zFBNA#BvrZ`Pm><8nYwMUV;9&k^e0Swn!u5e(B@Cp+DzJK8$+YFNoI1OE=K#rzMHG| z4kZl{ZxxdMt)JRAYqVlerQt@)7{W07A;T%fSf<>C#Kfgh8e;^-PI`WX#YEo-U2}N* zt|G_r(AN54d{E=C!Hdd(f#3n-j~-V0d%Y%HlY$Jxd7C_=b44MynR!N?qYD$P)?&W< z6JBVI*2^YB;SqX8ruL*SD4gHEkds6}5X-D3<8SdOlrC9V-cJJZh~*L#daY&WdFZZC zuIf8N)0f?b174K{AhPivI5R1k;nVe$jcynwGA`UDudVVU967#x4opZQZ)RhsG6|Zl zC)6B2e!vk(9h{l5%hQs1gSDwAZwg5}wKX)$J@kiR|^9b)p_le1a9YUk}P zv*P!`?k@htt89C(3F-QgXr{Tlm5j++oC?AOXq_j1KMq`=zst}~3Wys0#MI_(GAlkb z(Tq45JzY%Mr4t%HMA#UJI8w83e%v9~7~OI$&#QUl|A7Y~6G>0MRZ9Y*wD0uSM4mq@ zs@RHnF}HQ}jH9OOFyY7=`ziGNCeeFDan`2VJw~!A`z4;=?~g?hYto$FF~d z?=@;#n&M4q#Gl(-CH_d%IZKc*w$i?CIhE$cZd1TAaly0qp|fA+&%E1hvh(x&$qm!t zfCv@`{wqJyoUV6{XECwL`9eOEVv5{(|I+4jT~0nQ!wQ7Z_WN(qe_FiPiE1()DgNe( z@gDp_4Jo-@VVjpu`NWQ+b8{d?m}iLXBQ3wC!%LAxiJ|aS3G~$->>?QIkcLpWXd(P8 zC2yxHOv+yDyvd`?k!w$uFVkCvSY6Bh={@nU?41P=%qxbCOt(7h?!SMVFk-?RYq71o-ahqe1&M6ud*hl&{$}fyqo`;`SBMB$ z2P0mNn!oG5=EWoe;)6*0RxN?bryHBF4$3?exU565Mjg}O7T0Lo$bM#gC6$yI1 zafO#!cKT?It{ZSD8VCBVrf;s0zq-dRpnq1m4yvgj4_viy&DD-5<1cyB$J%ASt92$ zm%rIYbB*<%Co~s5X$JaaduS!uv51segsTDjA}+@2g##kaKT^K|L_vLB-bs zo{^(%)!xIrxBnL8WGHCY1aRJ=6M$5WT0z~OzYWe;%Ja4;+7w(45_VhAz9iAbY)FT0 zkrx{nw*%Ca*kmASjJXW{1G%eo_1&|3b^b^Rn5l~7$|v-1GAc=aR)~7oLXjQ%xGqR9{{VQ=%; zu#Ck^@ZWVX-%(Ooe>7Mb|BxQHLH!$(OpOFQ8%C#&X16TR7iC7w9N%B8F?fW4#LSCt zg8xq;Wgww>@&O4uIgT#!m|tVw;A0fc8*9g?e2wb5EbY`G>U6z*7Jk53DsLsgnY{)( zH|x1E&UJ32U^-Y8SN~1o;)*un%5pq|(K!mxvsuZNrai}^oYguh;{;gs>$NTKF(17k zYXyJ7z~7F`-vRV~$6qcDQ!`y$5JQc6M_d7eH6-UA`-w(&Pl+H|5>g{HT1g`q7%YP; z#RDTA6AyG{C)Z}WR{q3+5>`C&P2G*|&3n$=5!Y31jT2^=rxILEGBoOy#$<@vGsPT( zh|)c+iPm-XYg!n0o*`x<&J6TY+fH9zBGRrsb)|I8)D(mtu+PBp@LNFe4Mvvy&0)4? z%gyI!`0-}C3X*2JT;5Q}<-}#W_+MZ6c7!uG$k5F`J{&VJ;}ExuUx)|QO!YhcFU$^X zoDi2TSt3Q01-x(ApT2l?WU&`$5OCx zpH*?2eRsJ^sZ=iMuuVa$Q^=deHoI@W=Ln3pADc73m-C z%(Y~-F_}Xet~A}LEA#h<{8)yk#djZaMb^DzIuJxAHa`JP(hD^rvTy zdaHJTtsj*@HJg&|=K4-)k#F=8vV{i^7+xCTJ^GoK1c*k>aL#@^zr3S(Xm|MW@|R3y z1Df1d}0Bd zF%#O3nrxdw$B4w5T$wNrl|V6w3`f+tCGoAydzWwEy#+{2BqZhppC25{x$pL-;@G-$ z+lu5Jddy!0_I=|wmq#L&u_r$#8z_2qeay&#*NyMS{r$t_s{v2&zz%%}jo70U8(m|5 z^gR>!Rn7IKyzvh=996`uHb+6O}Oy^>}tk6XRU_WY4!}8@h)yaJpjy@pA%=oZ{9s5)Gh=ZIIq! zy9s{!`}t%w&hp$}>1nGlZ-M20{QJk0Vm&zS>};wz&KY_kZ!d3kn1{BJ7vC;crShKA zG3Bcd4d11zXbdtX2V7}GqnP7EowPjs-{YAYM0}N=k|C{03-&3#wKKb-@gG@u>QuQ) z0p~P)Bg*?RNWJ!5tix1KTg~I|uiUx7GSbVX$BXImYwdspQ6+bDCJAcc=LKBRQH&-j zK^u)}X;(_U`Hy@Vefkfq5>eCDgu7lviK>Nc60u?}cs>(EfCEz?WhN~MQbPy!mcX~dqc5kTQnt8g;_8B*8DH=H<8HIUN_2SaWR(aURiBBg{Rc1t_~)`%Zmh2? z+mY4>?Tp@@Y|`Pk?tDn%X(n)S^mrB{DO~hr4wpgCteDl&Gg07s!Yz9?s)B-p#j1>r ze6^V(epJYiya;{`lhKJT(l-p2r#Ta;1D5o*YsCx;j2dgCaXZcOi2!Bw7lj42r$j(8 zmCFL<9t&1skW*4!!i5wNZUH-ApYgDpPk8A9m=qYT)xq%4Zcw~?m{EJwUbpA)4)BytYW>7Z^Ij;l;`;!HlljBIkXjygHl)1+Oz zRcQJIYVm&uDAz*Ei1*MCN4!?XlkdljhqSmi&_yqop&8;;jo^Uxb2b}4D!v+dtT~Ia zKapZJRF(B{)p&m2`&La~zYO73p}Jc?R;y5d8WcoX66|{D$htA&I38m~wjacd;_;#i zJ@!Z9nWbd)r%4{xyN?u|1W045H{0%&($b~<&AvIjh>aSJ5uPs+xop@rQ@*Ar3kwi9Z--WDG=gWyzNU#!bIC_To@eQ33iR z#dj(_7Ep0^BN{r~IL0&XG ze7_?Woq4;k6U0XHcSM#jdiE4X_z|8~2P`#0(gQr#QOK+O9}^ybwr1})S`tZbT;Iiy za!)%y(RU(R#vVdy=j=N(f4J+4k5Ol5{q_#%t63n_RfyU8Ctbj-yo%+~GWHYO9WjYq z?0mXmPree$p)EJ~`Y4meoNpXM5#WsL(cj;4Uz(I{`7#&};ED-S*J&AQnz+eZ=>oU` zEPCwBlNisPpa@HGI@vR zY~yMksq`lhz~0fYimaoyW*x%F+6kDzft+ge?EPjgm`OCVt0s}3VLGMb{bbC%Xi(b6 zvX-dx?Z&$Jwp&wPsJ+2JZ(x;$px?H-llS~~J?u7f&!Wz4rew9DP*HW-r5=Hot4rSZR+^bTlnb#nr(Tfg3(3rm z>r19LtKZ#!sKt;u^pG#UgRMDiu&B@184x7Zk@6`+&0}dm6W!_!TkaKo2JQ&sHSd>B z-_!@5o2%Yf#-2@ouG0@7HOs%4woB`Mes%U7M+646L5TJRRF*a&dpr>IO`VI3*0>(z z)p*OO{Y_;;QNMkry~DuqIenyMg8t&FFKp6~S0GQ(nQ#HsvfyR>+2OVX)ta<^;~LsZ zzeBcsq|i!WLPo-2R0<7JG4mbYG-u+l8VeTvqYR9r^R6Pp^pOi&p&nS~ zMpB$*;c>{IUgY9H4bE{-qR(*mm};bJ&vAwNh~)<2FMq?jPQ(_8G`_n3G7O#Zf2Q%o zWls>>7Y9UMNls1@sRTY(CWIdTLiB5*A3$17tH6%aW2aTeO)EeqWH>Hh7<3RDmeWn_ zspH8Q%rDNv08XCvyzFMF<5}Y;4k8D2l^x|g$(N$X4DcxneMs?-;M%VOmvCk*MMZ1g zQy1up|HKq0t89-OYVc8co?rohx1dM&*#*n2==la@CW*JxA}>0X`hpOfV;UWFMr8YW z_ARL2uRWgUSk%A*wJ|iW@gcT2#Daz`#-vYU6YxO+{pV#~Whm-7XB}pjfw;8D{*BoJ z0>97sG^?eWT`T^O7VVe*{(CN|Tn#lZJHJ2nPm+;O9dXm`uVyk2DcCxqvkwu?|H1n4 z%yCcq9FB3$lXNk$ks57N5GwEpUBhE@(74T?;cD!wi@oX%9_v(MkgAAlI17} zfvYU>ql?H$z8l;}>{`u5&9Y!CYgeaX-$=Xd&u3f7st49G!}WF{DY`bh^|M{x;W|A2 zU>ZDlYCs#XB8HBb6}`R^9Mxz2lDf`#bz9BXFboy3z#WbD#laB1Sx(-}x03ZC z9PT3@OP-kUfU(l?zDep>aB^_;Q#~Va5V#zmPcg6DUadtiYY43(>d&ew!y%b4qge@a zGvk2Ne0pL2-d|kNEGKf5c?9*Mg|yJ*=Imxe5{GI#y9{b1iwGt4ge`bmJ6up$vr{J8 zqZfMN!OP7@yyRi?y@dMJm?3?qChx2Ud90t8=Lr^_>Fm$>X`aNWwh3a1*4RzbkM~|FRM1==0>NJ+(-ibLGd&x$>HWFU)=VF%rllwg4DYh?U`% zTB2xzWOT@-WsbhKx6XHu=1&LwE0Bel3|`g#nAz5ui+)Q`SRZ$^#8LmivzG>+M4zh| z#Bb*FoyNU39-BU8n*H@6IQl^gD77-K9RKj(`?)5*-@uu6yku;Xt0r=c%jnYmunj;`WTg?B$~?KT@bzh z)A6KN=lgi74+v0t+=f0`P&nJx&6GdcI~R(ugYg<1;O;$XIcuv zgwBpl+liI_d1Km0w|TC1b#?ip95_MW5>(Kn#qBQdfi{hF7KaBz6&0LxPErvJV0YQy z7;aDFCa)GfEMv|FPmLm@3MYPO>UjG->tLPzAW38Lv3}Xd`|&W3_jN0IoTO!l{g05; zSv~Zl&a24fT2h~5zdGNm@;~tFZGc$f@psrv`{@hwzr2g-Q%^40`794YTdI@xqp;K* zRhMxS9nVAEGCy_8Q!hfE#ga~zHmHDlJhb@Z9vcDo6h zFu%*_Z+jU=o9?ka>{o9zy929Vg`3h}jT)ONsyC@9lQIMBajmuodGx`b?-2^RCh7<} zp&1H|D}n1Xw0nswc(85BTS-I^s1Ai5Ki!|b^mR#wJ!m>pinp(L%bly2dQExfXRF%> z18aBn9L_^wt9+T8YB~b1GX$&R0AL*E>oJo@>DvONzfF#F&Oq}SF<>dQFe_9SN0vy5 zPNm0If5npo_$0i%-oq&$oIFLpG84-uBnOZp*#$UwKP?RDqKbrwqOz z-oxL|Z4GK&^fk}2gy09nV)zU4WFaIfiWxDghN$XuNFO;^<>5#~yJJg;d@;dbqKuKx zs#)P`Zi0cm-z!ACkTn}m#_W#E*md!P^5gDo*R?r!5ucdKomBSknupoiX2c}mT@9?= zl8amI+u=FDw0F8ugK!Z_8o+{+8x*P@Fndx%@+sYI7B0JneX6eARIP;1pw}b;u(q<@^hnXSHnsWS zKT{EJh0N}Yvq*V$b&*K`YT7{jEaqMp?g>q27v#?5WS>qcrQjmH^@evkQQu)xH&@sE z3JD%oyJZn`va$I^2eZ;rY*HDXI@$^0HJiY_eV7MH?7!?zo}~Wj65eRj=rDZiSfbFd zqWI~$^b0r*pL~(Prj+M*%Zp83+4?X1JP#ES)m6yo^fp!wL!j}QJ=9FUJ zcM1ScUvTV~wP3ETN}g<8Zf-c8+OsJ^=Qe8{hwmNpWQ7^hvXF}YAb+*#QRXOl>c~ef zENDf+v+t!|Ah-`zH@^iJ77huz9nhQN+lmSJTDyKLC`Muet8F#-Gu~Z00KOt^X^Eq# zd?{wRR+o$jBd{n57pfkDF(7BX8 zIZe#&^TvwIB>z+MAS4RV4^(l-QJZ8~7>&+?4lf-yQ5+g+-#j3JM2}K^@$PG@Oggd; z>1$4DN0BCum~&u#We3&#TW+hOETIw1jrJ`tIxZwOR#cMq4Ez9YD*ja z#Pry+`8Gt$r-VglqDMh|WC2QGbAIg-i*aHlGSwb%chRj@NM}E8 zn|_r7LC|F)*Ag|U7LqLQ$?k#<^W?*{X*iNLc_%k}&k3ECge1D;Eq4@<>2jon`1xh_1h}7fidCpd-fz|4@;B<-oD5m8CIQO~YXMT`XB6?xAM{CI zHm4S?vRMSQr3Q1AfG`F}<=-6IEwN6Em%)&~$yEGq<+pvUkY7~Xb45h{z-Q%74k%YL zsR4LYiMTesl0RLce+mHPdyQ=wc$v|h=02*qO^*aUU^?sOOsW^Ctazaf3`wtK37L0p z!$sLaZC%iBXjv%ZcwD694n2@97IGelEj~9jou;{Nz8epf zQ_8WUA{+~!7k2c!IW$pX1*iSa`D}pjmeSkdO{9ZTw^|$T z!`t?TpZG&3y!FYc8qGJ%F_DvAg`M+LvOrd0e(DW#+_mz&nkB=YpxLJL=ekj;5{!_( zlDEp|=UL(z_G+7RXV>efcR>0lyCTFP@*WsXs3FF~&iAc0&oFG+3J-7egxmOf0zn;I}~A$Tgz zX2ja=jtNxK&0&ZZyhx?$wmp0ex6KBrD%A5bC71uTB?y}Fhp`RpfY@iL-H#k3F2V=D z>#^K#(v>T70Vj6)QRhI`lVtybZ|_0p zY~$&UIs(01l8AVL8Hpq5(yhZoTyGQ*HkPS>S8Z5nOsfkK!4JuJ!yknhhROnR`p9Yi zz@|*|y${F8dwsgBk&&rt7F7L_EUiS6$0`eeL;d6wnR++dh0A>5kCE^jtaK(m*v*|| z&hJ5o5D)53EM6`z;!0c|(j0(&yccLkk5i)8MrbfJhFj7kw3=UUC6%BIi6~0ebZ_gH zSdZ_weB5WzaLK=mJQy~rX;sSbJTVC<2iE9z#L9h0Ss00Ax}*&vHu~HD2Iqy3|$)C zy{&0j`z|EU!yl*7@2iw5oHL3ivq*X*AY)IV5)A(9?~(M5A@5CyHq*npKV9a1v6l;S zOoI7v<^6k`)OJ)CYqB0*45&1v-nMx62rpl_yJjn_KFRcjHfEAx@B2ekKE8cbI`Nmv@Mxc70Qtf>X&xqC85Q0kE4^nr>uPz=F zy+gF`r{Y4+-vt6R|C8|So~4Ixyw=VQ9Q1>DTU-XRFLwUJ z*Tid&dYUbba zZ*!{PpnpBEfxylKlx-LT_-Jd>3o$riTo$15Pm^7zdckZGhb1c2ug#wsOE2BF!%ntSXBxCCZS{Eq30&5ch(fK%?|JgNpuN zd<#&;Xfzfaq+6 z{Kl_baEjJ_B(^7RWaTqoO`80tJsk{6W@n(V1(g5l{JBi6e`kQoI2flmTLmeB3Mqf6 z6YAn=>^C&|Tss*2Ak97<$hoKDdm7j6a${04aR?vRo;CfwJ;Mq5H>LuRndx{Y|V7F;lYp-Wq*Su`08V+4x`QMojy>%hyRbI z*~)J61&P8jz1$O4$UBZm9yr6DC3xW6fT&6~oO@j^?nzuIm>C(cb8K&)`EmzEN!Zsy z+IQ_?1>Bj@m9l%x`=i=u2Hcw;J#q33dm%giZo#4P-`{qJh3x& znPcM|V9%CeC-t14GI=lSMfjgP07>4Qtz@fQL}}nrF;dsxS%lGKc3yJ%OiKnPVgRyk}&A**lqz%J)7F`tEUx?D<&ol zKG<*el;Wsvkd`~jUN0Q{r<`~z9s;3i;vueN?#p=OtYU(B22>_5x~RO2pd}D7-P`$j z^A6?~#z#yM*CZ8QtW9XP?j(~xO9Y7zh6(QI2r8J}I25}7EoE(k)&h`}W_o%d3}Qs* zY!R~4>5DI^e7^XI{Ko^iWxxW6=>OXM0Wiib56)il1zF$*Di?L8?XIIj6x@-0vwOlj zLTb;VK**G`-C>z!?&QE#9OsN>H|0$c*XMn{sa40lZ44AtdA@$)Lv`Qq;LPxo;j*B? z8Ufq_fyP)@@}?0YX0lE9$|aVSIWW?diZeFe&L=TBIjo;G-=EHKw`16j{7OS>%uQb+ zgWsQoadOw1Qj$L}JEhvaf`_lm_G=AQBh@m!3@a-V-E)<{>byZyfV^nM|%88Qd(X^713PAA8`6=o__s-ko^uft=<*($D1lB{?M552N~igFt4?16Sgj4;>~-iD zR`@YN=o{GhuA43dGQ{To$VCbQtu|dq(`$n9t$!gP&Tny<`tNj^<%2rZPfId1-b*dS zJ)>)mix@^u@a?Alej>0>o~rbZx;pkZN`;a#?{Rc$dA`%NI9tn8f9ZJz*^lz(fTKH7 zVBwdTu!1zY;*z(XZ=*4+@$nOT7dr2GR%Z1!$`?h!imb2lbY*piMafKqTaPfy3eOa9 zPjUBZqm{xo%5I_ciq?QHayZMoST@afALAkqMrvvBSNlRIw{gog=)1@8!~cm{fh}%b z6PifJ(8LG|e6%$8`2v;Wg0oiPy{Fe=NyAtPTp_;;Uhg%n!SLvPJtU`zX7d?8X3J~H zm(<}lR5W!hrrMulFE(6b@|Au9a9Pe9XmN^Q;snaPHtgA9qr63?@#fJ5v1b^qcfe>W z{P|)+1=hPvQ2)05op0f;an~uk>1o_8Ugfs2a{<`cZAmsHX$kUP+ zo$cij7sctD5rz2B)suB%Y_=?@!&Rq$u%uIRcQ0C9spB$UTk4oRwI)H1uUqOoJS*4m z1dP1>)Kj7E9z@O08Lytpt^pN1|!a72l&r4nGaC|Fw&b zy%$|gyu2FjSlo>dH~VUsjos_JU+M%;!^*A+>Mrwgtnl`(3{q+;btDkTSBlpy{m2GG zA<-;NAMY8IK-0Sjvx&%e6*l8W2MWDgAV6UwPTUIufw72Yonw70|9N^ z9Dz?S&4{25IX|*{kl73oa#dcDC*|?qY>N{J;(i{L28~|?YzJG9{$BfGs2BqVXmoiX ziD%L)?qKJWfN?^V#VXYCcx_W$X*~CsVI%mDD+%e?s6M{1JDQhY@;(ZwcaKsW1P@Mx z+1@O7e(OMNgOVUx)WYG9)1*RQQ`j$Ib9^FhT*;2Mf&lYgoj~eU5URI4@kKtqT)s## zjcIge%G1GJiYycfsX!ykXA9BmZND*G`7T1wj8@$ME)sdiIjd2lSbX0Ss;BxL2!;Ea zPlS@zq3WVMEf`|LF+=J^1LK#_Q`VOs(xc^giT6*Y-3t%sFF$S{J;<2xeQ_>MzGB5? zGGmz;;-cbp(s#t!94W+}4Yt6S@ol?X4=C|dvUmljU*i*gMB@{RTyLk`KN8t$yfZt2 z5dQX;U{6OBn5sf0{eGpce4jn&aF~15(D?`WOw}_4Ke?hFpi42bPe%eH?tGjY>HpF` zBuComGbV@Xzm5*OapnFsxi;z7V!E#DI)1(P$P`fNC*Bz52KNAxLMNUk+S#R0R;&G1 zWI6h%L_qK3vMJO=wvOBp-u%pc{M0umOb$7v?!9U?m1`l zGJ>&Dg?!m9NG>C*_kL84^Y#ilG$GnQ1?G)|&Vj&Q9#Ogjktcg02XgmOfZht%m;D9;8;9l4)g>lpC> zkXVeL5q?&5)a&5q`6~1tZQuAr`Jv0;d%t~6zh^p4ujRpu@#UCIawXO8w1^Q*zaC2( z$t=%n`{0Lu3ieN5ew{lU8V+gG31*xtlg>_MH6G^m-w%Vs)d;vV-xT}q{NvUsNj~E0 zRK88_L@5ZRUMySId0zH?@>L8&`YVE z9r`@OKI3`{VCEj=eDh(k@Ws{~ruq3G=J^FO$nQwQ*!V1v0Dt*Y5Or+mcJPZQ$Cvhg zBhP3{5fcQ;MwM=>x{C*51g6?xQL;6eTQ~m@m&xb^ZIr$l75q1(%BVpKsIyZ+lx$FuYn9~9+o&d^8AlSgsMVoEs!X$~L zyZOaf$Mjhic4pEyG=xoejlT6%u6*jcXo83lpyb162-J{)^VTH83PbI^ebi~`^ zX{%`9RHb*Zynlb%L{-_}3Qi6YZW$XO}Nbc(m}mZ=sV!LmZ9f4YhRr_=kKnV846_7YWhf5OoF1B+Tcq z)N$Szp&*c&F39E~bbh(!Qo@^Hm;Y`R9_V)rwxC`JTd!74b>=Cn1atYvZd8)qQi&pg z0_bTknnU3>xomkgy%8|vvjW@V_M=8eZXY4^Tr@?`v&m)@0Fvo*I14HIPAwVmaMDhR z-T)2((nEeOsiwZz-yo-y;6OR+`pRT{?ceD0-43Qvjc1tU_xqj5ovnE6LOVN-{|25B zk8RVubutl;6X-19_t|`nE8pMNZ+sEI70jNlEuce|?p&Z!87BtG9NKg{{L1=(IXYea z40L8_KlyAk--!k7Ij*mERHgrimutbMQ$RwEXSq%qvrpT#;0_)*V@cRXtA8v^*93H1 zl9JK5-hc`T3wW5E3!~JH@;3DIw$jVFPIQMw<|=Fj_m6s?)heH=iRpL&BlQ%^V<~;b zl$2D|kY6~mlAR)gb$^G=REPj#0D*FVPA23L*$o>>$Mnk3K#K0>t$I-&KMCL^$zty4 zgGZ0JL@~)#BHuRT{CK0#93)vqV!8jDT!K#VY&D2_?YAY4J9e`56cWq0AgbjqQ^{)u zCrq19I)ahw&4~UF%x$x&o8=wEoeoIsq>99t>9MwZA%(x$I=*f<-UcCFA4*i8y@=2C zf{`1e);m=3V#WNr=Lj#@@u5+RCTd6^IYiJu>cd8aQuXzcH;yGh8jT;h{qd1~g{+eT z+LM}VLC|+!q`P6sR*p%TOh}=_h!Go3y%=15PO-DbMUK|Mh~`c|fOfM_QE>O=w^I^@ zm=1x*(t6w#fjtTPYrpKQ(qN}@se38g%SU1Be$xic>%*U@dHVknd?%#bFLnMcl^ZS- z?6U7^dsqOdnSR_V<}5$tX)2)qus=%2$95QzjwKP* zNM?$TBF51^v&W7+5V4YaoCKaD@0*{QA%iv8d2d#|zo9E=qA~1t;qG)@4+Z{;NN8_C zZs5&DvSj78OCp(lYi5{!AcxHMj$j~|z5HVqRt1vU2eLQKLeDe_r2r``myV^ zl$~F2AQOf(A@5#wYe)3#Flfzf8{#pzcB`=mlGFA2Zn=-5b;dRC+B=rW!Xd~`kH?($ zX!f!iS#ys-8yhlEf&U}+MecNG6r!Z^ez_l+1F$_lVm|=n8g08&^-x|h#M!BDDO;9O zP3B)^A_}{JG6*DmLSbnK%CWREF?0$sGzzM?jD<>WBer7UMa1>aE#sSv1z(R!zn0t> zs46o?{3wl-gV9tpbEiEC1)$6BMNn`nX-r3rLx)2cp5XV?9 z`unTX({&WAl=1u7VB;j7WFNzQCOIzVO+(k`0*gc=R*KY^vaHtw;>{E_CXt6@1KH~H zK14-qB}toO5ox5&fKv-xWy(J)=tOH^-Q)SvWe%TAG~_KI^4Zb&M7*L;`4>3inb`35 z$C&I%p1~k<9tQ8LCh0ugxvp&;^%Y`0Oirp~du2tgodxu(W;DKR0I;3*U}DY*wxvS<)ksN(?y#4FJNhTZ3%!pajfaM9I6U1(Zq?l1 zibWr1Dn6)n@rxoJsK^^(>F&f+<%G<<=MK1#l)@Z?);T{y%sM9Ur{`qV7=kARf;8s? zD(imw4Z$jpv{#{OS`@)#*2^-SzQa>JOe4M2|if>HD9OT-4{i~y(~pW zpMM6$%3s)gkw9uxAdo^}r88v&NnCMGM>yce08x>v#9GO_i+RARobGy#2B%b2iC2jptYAFiK-7*Ge}QNms~E?*{RSRZQnf&?-qoK?4^6SMd}7`J8ok^gb=CO-@O~A#o>73d{0>0sW-KYD@jrYJf8B6J*!GD^ z^K?W*lS%1Js>vl) zq)$y8@s@&J@t3pFeOL+^Y41$&Uu#5??}y5qsrW=lL*2iIo{j3a=sZdJ+LV#ZI78po zTSMi$87q9fG4F+LK4Jl#Any^(3}3QP$g9Ys?oDJwf6RK_01h9pd_Ku)Kq-YO(Bx9^ z+j?2f$RG5Qrf6$6A80nNGjs5n6^LFm_GFIqgTDrsZ{rNdqtmgH;Xm;0+=DDU@tmzw z)r@h^M+`Zgc@!1-(n_Sj-VF=^y-sy9pnxby?Kj4Y^bX4Az16q8V*_c=if)L-yTr`&I3 zmRn>#<+edaP=3v+~n5Be7rc7Xhn? zm=S}~~`DMeo>uRI zW%#tOverM48E!s?AD$%HW~=O5%^*P`&oqLhPqw@U*tSiY)u2>J~#E_)L$T%j)V51o`Jp`BndlFg4Opa5A{n|+`SCZ%)5-_`C` zc)c0VKcG*k_L=M#%(b8_t(o{*qXyLT$Ci!N?CF|?J;Y2kFVYz>;_m~*JXgy%?;| zL=x3kfcMseHqJx{^xttDkiae?f&+S4P&DPEkJi*}@^7bH3&Fj(Dy1F{Bn<)H=q%&tZD#AqSD?eBVY`FNO(1i@WF zS;3;tEN%L~pDZGSTy}jo-U{k?qYxlx64N*BlLlFFm^Cj3b~887y;k?*pLwuC`Bkf|ahecu{z)618E_1m0d+;Mq4 z@FG5J6*K2haQ$2-=K$$ItyQW^*wqA*b16;*`WLUA8o1G7JHE~msOHL`#Zg6Vjk*+E zilKrKxe?|3jr0$p_kkkNX*cjk9P0e5Dl&3qvMr#~?f{3lz%HB)gdLS^!;^py1Z^PD N8$~sR3OTc&{{b(fTU!7C literal 0 HcmV?d00001 diff --git a/openpype/settings/defaults/project_settings/shotgrid.json b/openpype/settings/defaults/project_settings/shotgrid.json new file mode 100644 index 0000000000..83b6f69074 --- /dev/null +++ b/openpype/settings/defaults/project_settings/shotgrid.json @@ -0,0 +1,22 @@ +{ + "shotgrid_project_id": 0, + "shotgrid_server": "", + "event": { + "enabled": false + }, + "fields": { + "asset": { + "type": "sg_asset_type" + }, + "sequence": { + "episode_link": "episode" + }, + "shot": { + "episode_link": "sg_episode", + "sequence_link": "sg_sequence" + }, + "task": { + "step": "step" + } + } +} diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 8cd4114cb0..9d8910689a 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -131,6 +131,12 @@ } } }, + "shotgrid": { + "enabled": false, + "leecher_manager_url": "http://127.0.0.1:3000", + "leecher_backend_url": "http://127.0.0.1:8090", + "shotgrid_settings": {} + }, "kitsu": { "enabled": false, "server": "" @@ -203,4 +209,4 @@ "linux": "" } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index a173e2454f..b2cb2204f4 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -107,6 +107,7 @@ from .enum_entity import ( TaskTypeEnumEntity, DeadlineUrlEnumEntity, AnatomyTemplatesEnumEntity, + ShotgridUrlEnumEntity ) from .list_entity import ListEntity @@ -171,6 +172,7 @@ __all__ = ( "ToolsEnumEntity", "TaskTypeEnumEntity", "DeadlineUrlEnumEntity", + "ShotgridUrlEnumEntity", "AnatomyTemplatesEnumEntity", "ListEntity", diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 92a397afba..3b3dd47e61 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -1,10 +1,7 @@ import copy from .input_entities import InputEntity from .exceptions import EntitySchemaError -from .lib import ( - NOT_SET, - STRING_TYPE -) +from .lib import NOT_SET, STRING_TYPE class BaseEnumEntity(InputEntity): @@ -26,7 +23,7 @@ class BaseEnumEntity(InputEntity): for item in self.enum_items: key = tuple(item.keys())[0] if key in enum_keys: - reason = "Key \"{}\" is more than once in enum items.".format( + reason = 'Key "{}" is more than once in enum items.'.format( key ) raise EntitySchemaError(self, reason) @@ -34,7 +31,7 @@ class BaseEnumEntity(InputEntity): enum_keys.add(key) if not isinstance(key, STRING_TYPE): - reason = "Key \"{}\" has invalid type {}, expected {}.".format( + reason = 'Key "{}" has invalid type {}, expected {}.'.format( key, type(key), STRING_TYPE ) raise EntitySchemaError(self, reason) @@ -59,7 +56,7 @@ class BaseEnumEntity(InputEntity): for item in check_values: if item not in self.valid_keys: raise ValueError( - "{} Invalid value \"{}\". Expected one of: {}".format( + '{} Invalid value "{}". Expected one of: {}'.format( self.path, item, self.valid_keys ) ) @@ -84,7 +81,7 @@ class EnumEntity(BaseEnumEntity): self.valid_keys = set(all_keys) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) value_on_not_set = [] if enum_default: if not isinstance(enum_default, list): @@ -109,7 +106,7 @@ class EnumEntity(BaseEnumEntity): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -152,6 +149,7 @@ class HostsEnumEntity(BaseEnumEntity): Host name is not the same as application name. Host name defines implementation instead of application name. """ + schema_types = ["hosts-enum"] all_host_names = [ "aftereffects", @@ -169,7 +167,7 @@ class HostsEnumEntity(BaseEnumEntity): "tvpaint", "unreal", "standalonepublisher", - "webpublisher" + "webpublisher", ] def _item_initialization(self): @@ -210,7 +208,7 @@ class HostsEnumEntity(BaseEnumEntity): self.valid_keys = valid_keys if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: for key in valid_keys: @@ -218,7 +216,7 @@ class HostsEnumEntity(BaseEnumEntity): self.value_on_not_set = key break - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) # GUI attribute self.placeholder = self.schema_data.get("placeholder") @@ -226,14 +224,10 @@ class HostsEnumEntity(BaseEnumEntity): def schema_validations(self): if self.hosts_filter: enum_len = len(self.enum_items) - if ( - enum_len == 0 - or (enum_len == 1 and self.use_empty_value) - ): - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) + if enum_len == 0 or (enum_len == 1 and self.use_empty_value): + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) reason = ( "All host names were removed after applying" " host filters. {}" @@ -246,24 +240,25 @@ class HostsEnumEntity(BaseEnumEntity): invalid_filters.add(item) if invalid_filters: - joined_filters = ", ".join([ - '"{}"'.format(item) - for item in self.hosts_filter - ]) - expected_hosts = ", ".join([ - '"{}"'.format(item) - for item in self.all_host_names - ]) - self.log.warning(( - "Host filters containt invalid host names:" - " \"{}\" Expected values are {}" - ).format(joined_filters, expected_hosts)) + joined_filters = ", ".join( + ['"{}"'.format(item) for item in self.hosts_filter] + ) + expected_hosts = ", ".join( + ['"{}"'.format(item) for item in self.all_host_names] + ) + self.log.warning( + ( + "Host filters containt invalid host names:" + ' "{}" Expected values are {}' + ).format(joined_filters, expected_hosts) + ) super(HostsEnumEntity, self).schema_validations() class AppsEnumEntity(BaseEnumEntity): """Enum of applications for project anatomy attributes.""" + schema_types = ["apps-enum"] def _item_initialization(self): @@ -271,7 +266,7 @@ class AppsEnumEntity(BaseEnumEntity): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -352,7 +347,7 @@ class ToolsEnumEntity(BaseEnumEntity): self.value_on_not_set = [] self.enum_items = [] self.valid_keys = set() - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.placeholder = None def _get_enum_values(self): @@ -409,10 +404,10 @@ class TaskTypeEnumEntity(BaseEnumEntity): def _item_initialization(self): self.multiselection = self.schema_data.get("multiselection", True) if self.multiselection: - self.valid_value_types = (list, ) + self.valid_value_types = (list,) self.value_on_not_set = [] else: - self.valid_value_types = (STRING_TYPE, ) + self.valid_value_types = (STRING_TYPE,) self.value_on_not_set = "" self.enum_items = [] @@ -507,7 +502,8 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): enum_items_list = [] for server_name, url_entity in deadline_urls_entity.items(): enum_items_list.append( - {server_name: "{}: {}".format(server_name, url_entity.value)}) + {server_name: "{}: {}".format(server_name, url_entity.value)} + ) valid_keys.add(server_name) return enum_items_list, valid_keys @@ -530,6 +526,50 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): self._current_value = tuple(self.valid_keys)[0] +class ShotgridUrlEnumEntity(BaseEnumEntity): + schema_types = ["shotgrid_url-enum"] + + def _item_initialization(self): + self.multiselection = False + + self.enum_items = [] + self.valid_keys = set() + + self.valid_value_types = (STRING_TYPE,) + self.value_on_not_set = "" + + # GUI attribute + self.placeholder = self.schema_data.get("placeholder") + + def _get_enum_values(self): + shotgrid_settings = self.get_entity_from_path( + "system_settings/modules/shotgrid/shotgrid_settings" + ) + + valid_keys = set() + enum_items_list = [] + for server_name, settings in shotgrid_settings.items(): + enum_items_list.append( + { + server_name: "{}: {}".format( + server_name, settings["shotgrid_url"].value + ) + } + ) + valid_keys.add(server_name) + return enum_items_list, valid_keys + + def set_override_state(self, *args, **kwargs): + super(ShotgridUrlEnumEntity, self).set_override_state(*args, **kwargs) + + self.enum_items, self.valid_keys = self._get_enum_values() + if not self.valid_keys: + self._current_value = "" + + elif self._current_value not in self.valid_keys: + self._current_value = tuple(self.valid_keys)[0] + + class AnatomyTemplatesEnumEntity(BaseEnumEntity): schema_types = ["anatomy-templates-enum"] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 6c07209de3..80b1baad1b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -62,6 +62,10 @@ "type": "schema", "name": "schema_project_ftrack" }, + { + "type": "schema", + "name": "schema_project_shotgrid" + }, { "type": "schema", "name": "schema_project_kitsu" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json new file mode 100644 index 0000000000..4faeca89f3 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_shotgrid.json @@ -0,0 +1,98 @@ +{ + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "is_file": true, + "children": [ + { + "type": "number", + "key": "shotgrid_project_id", + "label": "Shotgrid project id" + }, + { + "type": "shotgrid_url-enum", + "key": "shotgrid_server", + "label": "Shotgrid Server" + }, + { + "type": "dict", + "key": "event", + "label": "Event Handler", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, + { + "type": "dict", + "key": "fields", + "label": "Fields Template", + "collapsible": true, + "children": [ + { + "type": "dict", + "key": "asset", + "label": "Asset", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "type", + "label": "Asset Type" + } + ] + }, + { + "type": "dict", + "key": "sequence", + "label": "Sequence", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + } + ] + }, + { + "type": "dict", + "key": "shot", + "label": "Shot", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "episode_link", + "label": "Episode link" + }, + { + "type": "text", + "key": "sequence_link", + "label": "Sequence link" + } + ] + }, + { + "type": "dict", + "key": "task", + "label": "Task", + "collapsible": true, + "children": [ + { + "type": "text", + "key": "step", + "label": "Step link" + } + ] + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index 484fbf9d07..a4b28f47bc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -13,6 +13,9 @@ { "ftrackreview": "Add review to Ftrack" }, + { + "shotgridreview": "Add review to Shotgrid" + }, { "delete": "Delete output" }, diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index d22b9016a7..952b38040c 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -48,6 +48,60 @@ "type": "schema", "name": "schema_kitsu" }, + { + "type": "dict", + "key": "shotgrid", + "label": "Shotgrid", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "leecher_manager_url", + "label": "Shotgrid Leecher Manager URL" + }, + { + "type": "text", + "key": "leecher_backend_url", + "label": "Shotgrid Leecher Backend URL" + }, + { + "type": "boolean", + "key": "filter_projects_by_login", + "label": "Filter projects by SG login" + }, + { + "type": "dict-modifiable", + "key": "shotgrid_settings", + "label": "Shotgrid Servers", + "object_type": { + "type": "dict", + "children": [ + { + "key": "shotgrid_url", + "label": "Server URL", + "type": "text" + }, + { + "key": "shotgrid_script_name", + "label": "Script Name", + "type": "text" + }, + { + "key": "shotgrid_script_key", + "label": "Script api key", + "type": "text" + } + ] + } + } + ] + }, { "type": "dict", "key": "timers_manager", diff --git a/poetry.lock b/poetry.lock index 7221e191ff..0033bc0d73 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1375,6 +1375,21 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "shotgun-api3" +version = "3.3.3" +description = "Shotgun Python API" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/shotgunsoftware/python-api.git" +reference = "v3.3.3" +resolved_reference = "b9f066c0edbea6e0733242e18f32f75489064840" + [[package]] name = "six" version = "1.16.0" @@ -2820,6 +2835,7 @@ semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, ] +shotgun-api3 = [] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, diff --git a/pyproject.toml b/pyproject.toml index bd5d3ad89d..306c7206fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ clique = "1.6.*" Click = "^7" dnspython = "^2.1.0" ftrack-python-api = "2.0.*" +shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} gazu = "^0.8.28" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" From 0353ec38f3593c9e81a9b82a735ab7868e406d9f Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 1 Jul 2022 11:06:44 +0100 Subject: [PATCH 0084/2550] Generalization of the basis of the origin platform in the JSON layout --- .../blender/plugins/publish/extract_layout.py | 14 ++++- .../maya/plugins/publish/extract_layout.py | 50 +++++++++++++-- .../plugins/load/load_alembic_staticmesh.py | 6 +- .../hosts/unreal/plugins/load/load_layout.py | 63 ++++++++++--------- 4 files changed, 97 insertions(+), 36 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index 8ecc78a2c6..f987dffe05 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -193,7 +193,7 @@ class ExtractLayout(openpype.api.Extractor): "rotation": { "x": asset.rotation_euler.x, "y": asset.rotation_euler.y, - "z": asset.rotation_euler.z, + "z": asset.rotation_euler.z }, "scale": { "x": asset.scale.x, @@ -202,6 +202,18 @@ class ExtractLayout(openpype.api.Extractor): } } + json_element["transform_matrix"] = [] + + for row in list(asset.matrix_world.transposed()): + json_element["transform_matrix"].append(list(row)) + + json_element["basis"] = [ + [1, 0, 0, 0], + [0, -1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1] + ] + # Extract the animation as well if family == "rig": f, n = self._export_animation( diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 4ae99f1052..7eb6a64e6d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -1,7 +1,9 @@ +import math import os import json from maya import cmds +from maya.api import OpenMaya as om from bson.objectid import ObjectId @@ -60,7 +62,7 @@ class ExtractLayout(openpype.api.Extractor): } loc = cmds.xform(asset, query=True, translation=True) - rot = cmds.xform(asset, query=True, rotation=True) + rot = cmds.xform(asset, query=True, rotation=True, euler=True) scl = cmds.xform(asset, query=True, relative=True, scale=True) json_element["transform"] = { @@ -70,9 +72,9 @@ class ExtractLayout(openpype.api.Extractor): "z": loc[2] }, "rotation": { - "x": rot[0], - "y": rot[1], - "z": rot[2] + "x": math.radians(rot[0]), + "y": math.radians(rot[1]), + "z": math.radians(rot[2]) }, "scale": { "x": scl[0], @@ -81,6 +83,46 @@ class ExtractLayout(openpype.api.Extractor): } } + row_length = 4 + t_matrix_list = cmds.xform(asset, query=True, matrix=True) + + transform_mm = om.MMatrix(t_matrix_list) + transform = om.MTransformationMatrix(transform_mm) + + transform.scaleBy([1.0, 1.0, -1.0], om.MSpace.kWorld) + transform.rotateBy( + om.MEulerRotation(math.radians(-90), 0, 0), om.MSpace.kWorld) + + t_matrix_list = list(transform.asMatrix()) + + t_matrix = [] + for i in range(0, len(t_matrix_list), row_length): + t_matrix.append(t_matrix_list[i:i + row_length]) + + json_element["transform_matrix"] = [] + for row in t_matrix: + json_element["transform_matrix"].append(list(row)) + + basis_list = [ + 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, -1, 0, + 0, 0, 0, 1 + ] + + basis_mm = om.MMatrix(basis_list) + basis = om.MTransformationMatrix(basis_mm) + + b_matrix_list = list(basis.asMatrix()) + b_matrix = [] + + for i in range(0, len(b_matrix_list), row_length): + b_matrix.append(b_matrix_list[i:i + row_length]) + + json_element["basis"] = [] + for row in b_matrix: + json_element["basis"].append(list(row)) + json_data.append(json_element) json_filename = "{}.json".format(instance.name) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 691971e02f..42abbda80f 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -26,9 +26,9 @@ class StaticMeshAlembicLoader(plugin.Loader): sm_settings = unreal.AbcStaticMeshSettings() conversion_settings = unreal.AbcConversionSettings( preset=unreal.AbcConversionPreset.CUSTOM, - flip_u=False, flip_v=True, - rotation=[90.0, 0.0, 0.0], - scale=[1.0, -1.0, 1.0]) + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, 1.0]) task.set_editor_property('filename', filename) task.set_editor_property('destination_path', asset_dir) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index fb8f46dad1..361c3684fa 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- """Loader for layouts.""" -import os import json from pathlib import Path @@ -170,9 +169,29 @@ class LayoutLoader(plugin.Loader): hid_section.set_row_index(index) hid_section.set_level_names(maps) - @staticmethod + def _transform_from_basis(self, transform, basis): + """Transform a transform from a basis to a new basis.""" + # Get the basis matrix + basis_matrix = unreal.Matrix( + basis[0], + basis[1], + basis[2], + basis[3] + ) + transform_matrix = unreal.Matrix( + transform[0], + transform[1], + transform[2], + transform[3] + ) + + new_transform = ( + basis_matrix.get_inverse() * transform_matrix * basis_matrix) + + return new_transform.transform() + def _process_family( - assets, class_name, transform, sequence, inst_name=None + self, assets, class_name, transform, basis, sequence, inst_name=None ): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -182,30 +201,12 @@ class LayoutLoader(plugin.Loader): for asset in assets: obj = ar.get_asset_by_object_path(asset).get_asset() if obj.get_class().get_name() == class_name: + t = self._transform_from_basis(transform, basis) actor = EditorLevelLibrary.spawn_actor_from_object( - obj, - transform.get('translation') + obj, t.translation ) - if inst_name: - try: - # Rename method leads to crash - # actor.rename(name=inst_name) - - # The label works, although it make it slightly more - # complicated to check for the names, as we need to - # loop through all the actors in the level - actor.set_actor_label(inst_name) - except Exception as e: - print(e) - actor.set_actor_rotation(unreal.Rotator( - ( - transform.get('rotation').get('x')), - ( - transform.get('rotation').get('z')), - -( - transform.get('rotation').get('y')), - ), False) - actor.set_actor_scale3d(transform.get('scale')) + actor.set_actor_rotation(t.rotation.rotator(), False) + actor.set_actor_scale3d(t.scale3d) if class_name == 'SkeletalMesh': skm_comp = actor.get_editor_property( @@ -519,17 +520,23 @@ class LayoutLoader(plugin.Loader): item.get('reference_abc') == representation)] for instance in instances: - transform = instance.get('transform') + # transform = instance.get('transform') + transform = instance.get('transform_matrix') + basis = instance.get('basis') inst = instance.get('instance_name') actors = [] if family == 'model': actors, _ = self._process_family( - assets, 'StaticMesh', transform, sequence, inst) + assets, 'StaticMesh', transform, basis, + sequence, inst + ) elif family == 'rig': actors, bindings = self._process_family( - assets, 'SkeletalMesh', transform, sequence, inst) + assets, 'SkeletalMesh', transform, basis, + sequence, inst + ) actors_dict[inst] = actors bindings_dict[inst] = bindings From 1bf95ddce5587c25258c9a7a7b07cf3a555b745b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 6 Jul 2022 17:00:35 +0100 Subject: [PATCH 0085/2550] Fix Maya transform --- openpype/hosts/maya/plugins/publish/extract_layout.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 7eb6a64e6d..991217684a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -89,9 +89,12 @@ class ExtractLayout(openpype.api.Extractor): transform_mm = om.MMatrix(t_matrix_list) transform = om.MTransformationMatrix(transform_mm) - transform.scaleBy([1.0, 1.0, -1.0], om.MSpace.kWorld) + t = transform.translation(om.MSpace.kWorld) + t = om.MVector(t.x, t.z, -t.y) + transform.setTranslation(t, om.MSpace.kWorld) transform.rotateBy( om.MEulerRotation(math.radians(-90), 0, 0), om.MSpace.kWorld) + transform.scaleBy([1.0, 1.0, -1.0], om.MSpace.kObject) t_matrix_list = list(transform.asMatrix()) From 3555143f06e61dd68e072f192f76f23bff40e5cc Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 11 Jul 2022 17:15:42 +0100 Subject: [PATCH 0086/2550] Load layouts without sequences. --- .../hosts/unreal/plugins/load/load_layout.py | 431 +++++++++--------- 1 file changed, 227 insertions(+), 204 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 361c3684fa..0dbaf0880a 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -215,16 +215,17 @@ class LayoutLoader(plugin.Loader): actors.append(actor) - binding = None - for p in sequence.get_possessables(): - if p.get_name() == actor.get_name(): - binding = p - break + if sequence: + binding = None + for p in sequence.get_possessables(): + if p.get_name() == actor.get_name(): + binding = p + break - if not binding: - binding = sequence.add_possessable(actor) + if not binding: + binding = sequence.add_possessable(actor) - bindings.append(binding) + bindings.append(binding) return actors, bindings @@ -312,49 +313,50 @@ class LayoutLoader(plugin.Loader): actor.skeletal_mesh_component.animation_data.set_editor_property( 'anim_to_play', animation) - # Add animation to the sequencer - bindings = bindings_dict.get(instance_name) + if sequence: + # Add animation to the sequencer + bindings = bindings_dict.get(instance_name) - ar = unreal.AssetRegistryHelpers.get_asset_registry() + ar = unreal.AssetRegistryHelpers.get_asset_registry() - for binding in bindings: - tracks = binding.get_tracks() - track = None - track = tracks[0] if tracks else binding.add_track( - unreal.MovieSceneSkeletalAnimationTrack) + for binding in bindings: + tracks = binding.get_tracks() + track = None + track = tracks[0] if tracks else binding.add_track( + unreal.MovieSceneSkeletalAnimationTrack) - sections = track.get_sections() - section = None - if not sections: - section = track.add_section() - else: - section = sections[0] + sections = track.get_sections() + section = None + if not sections: + section = track.add_section() + else: + section = sections[0] + sec_params = section.get_editor_property('params') + curr_anim = sec_params.get_editor_property('animation') + + if curr_anim: + # Checks if the animation path has a container. + # If it does, it means that the animation is + # already in the sequencer. + anim_path = str(Path( + curr_anim.get_path_name()).parent + ).replace('\\', '/') + + _filter = unreal.ARFilter( + class_names=["AssetContainer"], + package_paths=[anim_path], + recursive_paths=False) + containers = ar.get_assets(_filter) + + if len(containers) > 0: + return + + section.set_range( + sequence.get_playback_start(), + sequence.get_playback_end()) sec_params = section.get_editor_property('params') - curr_anim = sec_params.get_editor_property('animation') - - if curr_anim: - # Checks if the animation path has a container. - # If it does, it means that the animation is already - # in the sequencer. - anim_path = str(Path( - curr_anim.get_path_name()).parent - ).replace('\\', '/') - - _filter = unreal.ARFilter( - class_names=["AssetContainer"], - package_paths=[anim_path], - recursive_paths=False) - containers = ar.get_assets(_filter) - - if len(containers) > 0: - return - - section.set_range( - sequence.get_playback_start(), - sequence.get_playback_end()) - sec_params = section.get_editor_property('params') - sec_params.set_editor_property('animation', animation) + sec_params.set_editor_property('animation', animation) @staticmethod def _generate_sequence(h, h_dir): @@ -617,6 +619,9 @@ class LayoutLoader(plugin.Loader): Returns: list(str): list of container content """ + # TODO: get option from OpenPype settings + create_sequences = False + # Create directory for asset and avalon container hierarchy = context.get('asset').get('data').get('parents') root = self.ASSET_ROOT @@ -637,85 +642,88 @@ class LayoutLoader(plugin.Loader): EditorAssetLibrary.make_directory(asset_dir) - # Create map for the shot, and create hierarchy of map. If the maps - # already exist, we will use them. master_level = None - if hierarchy: - h_dir = hierarchy_dir_list[0] - h_asset = hierarchy[0] - master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" - if not EditorAssetLibrary.does_asset_exist(master_level): - EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") + shot = None + sequences = [] level = f"{asset_dir}/{asset}_map.{asset}_map" EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") - if master_level: - EditorLevelLibrary.load_level(master_level) - EditorLevelUtils.add_level_to_world( - EditorLevelLibrary.get_editor_world(), - level, - unreal.LevelStreamingDynamic + if create_sequences: + # Create map for the shot, and create hierarchy of map. If the maps + # already exist, we will use them. + if hierarchy: + h_dir = hierarchy_dir_list[0] + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + if not EditorAssetLibrary.does_asset_exist(master_level): + EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") + + if master_level: + EditorLevelLibrary.load_level(master_level) + EditorLevelUtils.add_level_to_world( + EditorLevelLibrary.get_editor_world(), + level, + unreal.LevelStreamingDynamic + ) + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(level) + + # Get all the sequences in the hierarchy. It will create them, if + # they don't exist. + frame_ranges = [] + for (h_dir, h) in zip(hierarchy_dir_list, hierarchy): + root_content = EditorAssetLibrary.list_assets( + h_dir, recursive=False, include_folder=False) + + existing_sequences = [ + EditorAssetLibrary.find_asset_data(asset) + for asset in root_content + if EditorAssetLibrary.find_asset_data( + asset).get_class().get_name() == 'LevelSequence' + ] + + if not existing_sequences: + sequence, frame_range = self._generate_sequence(h, h_dir) + + sequences.append(sequence) + frame_ranges.append(frame_range) + else: + for e in existing_sequences: + sequences.append(e.get_asset()) + frame_ranges.append(( + e.get_asset().get_playback_start(), + e.get_asset().get_playback_end())) + + shot = tools.create_asset( + asset_name=asset, + package_path=asset_dir, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew() ) - EditorLevelLibrary.save_all_dirty_levels() + + # sequences and frame_ranges have the same length + for i in range(0, len(sequences) - 1): + self._set_sequence_hierarchy( + sequences[i], sequences[i + 1], + frame_ranges[i][1], + frame_ranges[i + 1][0], frame_ranges[i + 1][1], + [level]) + + data = self._get_data(asset) + shot.set_display_rate( + unreal.FrameRate(data.get("fps"), 1.0)) + shot.set_playback_start(0) + shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) + if sequences: + self._set_sequence_hierarchy( + sequences[-1], shot, + frame_ranges[-1][1], + data.get('clipIn'), data.get('clipOut'), + [level]) + EditorLevelLibrary.load_level(level) - # Get all the sequences in the hierarchy. It will create them, if - # they don't exist. - sequences = [] - frame_ranges = [] - for (h_dir, h) in zip(hierarchy_dir_list, hierarchy): - root_content = EditorAssetLibrary.list_assets( - h_dir, recursive=False, include_folder=False) - - existing_sequences = [ - EditorAssetLibrary.find_asset_data(asset) - for asset in root_content - if EditorAssetLibrary.find_asset_data( - asset).get_class().get_name() == 'LevelSequence' - ] - - if not existing_sequences: - sequence, frame_range = self._generate_sequence(h, h_dir) - - sequences.append(sequence) - frame_ranges.append(frame_range) - else: - for e in existing_sequences: - sequences.append(e.get_asset()) - frame_ranges.append(( - e.get_asset().get_playback_start(), - e.get_asset().get_playback_end())) - - shot = tools.create_asset( - asset_name=asset, - package_path=asset_dir, - asset_class=unreal.LevelSequence, - factory=unreal.LevelSequenceFactoryNew() - ) - - # sequences and frame_ranges have the same length - for i in range(0, len(sequences) - 1): - self._set_sequence_hierarchy( - sequences[i], sequences[i + 1], - frame_ranges[i][1], - frame_ranges[i + 1][0], frame_ranges[i + 1][1], - [level]) - - data = self._get_data(asset) - shot.set_display_rate( - unreal.FrameRate(data.get("fps"), 1.0)) - shot.set_playback_start(0) - shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) - if sequences: - self._set_sequence_hierarchy( - sequences[-1], shot, - frame_ranges[-1][1], - data.get('clipIn'), data.get('clipOut'), - [level]) - - EditorLevelLibrary.load_level(level) - loaded_assets = self._process(self.fname, asset_dir, shot) for s in sequences: @@ -755,27 +763,31 @@ class LayoutLoader(plugin.Loader): return asset_content def update(self, container, representation): + # TODO: get option from OpenPype settings + create_sequences = False + ar = unreal.AssetRegistryHelpers.get_asset_registry() root = "/Game/OpenPype" asset_dir = container.get('namespace') - context = representation.get("context") - hierarchy = context.get('hierarchy').split("/") - h_dir = f"{root}/{hierarchy[0]}" - h_asset = hierarchy[0] - master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + sequence = None + master_level = None - # # Create a temporary level to delete the layout level. - # EditorLevelLibrary.save_all_dirty_levels() - # EditorAssetLibrary.make_directory(f"{root}/tmp") - # tmp_level = f"{root}/tmp/temp_map" - # if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"): - # EditorLevelLibrary.new_level(tmp_level) - # else: - # EditorLevelLibrary.load_level(tmp_level) + if create_sequences: + hierarchy = context.get('hierarchy').split("/") + h_dir = f"{root}/{hierarchy[0]}" + h_asset = hierarchy[0] + master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" + + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[asset_dir], + recursive_paths=False) + sequences = ar.get_assets(filter) + sequence = sequences[0].get_asset() # Get layout level filter = unreal.ARFilter( @@ -783,11 +795,6 @@ class LayoutLoader(plugin.Loader): package_paths=[asset_dir], recursive_paths=False) levels = ar.get_assets(filter) - filter = unreal.ARFilter( - class_names=["LevelSequence"], - package_paths=[asset_dir], - recursive_paths=False) - sequences = ar.get_assets(filter) layout_level = levels[0].get_editor_property('object_path') @@ -799,14 +806,14 @@ class LayoutLoader(plugin.Loader): for actor in actors: unreal.EditorLevelLibrary.destroy_actor(actor) - EditorLevelLibrary.save_current_level() + if create_sequences: + EditorLevelLibrary.save_current_level() EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/") source_path = get_representation_path(representation) - loaded_assets = self._process( - source_path, asset_dir, sequences[0].get_asset()) + loaded_assets = self._process(source_path, asset_dir, sequence) data = { "representation": str(representation["_id"]), @@ -824,13 +831,18 @@ class LayoutLoader(plugin.Loader): for a in asset_content: EditorAssetLibrary.save_asset(a) - EditorLevelLibrary.load_level(master_level) + if master_level: + EditorLevelLibrary.load_level(master_level) def remove(self, container): """ Delete the layout. First, check if the assets loaded with the layout are used by other layouts. If not, delete the assets. """ + # TODO: get option from OpenPype settings + create_sequences = False + + root = "/Game/OpenPype" path = Path(container.get("namespace")) containers = unreal_pipeline.ls() @@ -841,7 +853,7 @@ class LayoutLoader(plugin.Loader): # Check if the assets have been loaded by other layouts, and deletes # them if they haven't. - for asset in container.get('loaded_assets'): + for asset in eval(container.get('loaded_assets')): layouts = [ lc for lc in layout_containers if asset in lc.get('loaded_assets')] @@ -849,71 +861,83 @@ class LayoutLoader(plugin.Loader): if not layouts: EditorAssetLibrary.delete_directory(str(Path(asset).parent)) - # Remove the Level Sequence from the parent. - # We need to traverse the hierarchy from the master sequence to find - # the level sequence. - root = "/Game/OpenPype" - namespace = container.get('namespace').replace(f"{root}/", "") - ms_asset = namespace.split('/')[0] - ar = unreal.AssetRegistryHelpers.get_asset_registry() - _filter = unreal.ARFilter( - class_names=["LevelSequence"], - package_paths=[f"{root}/{ms_asset}"], - recursive_paths=False) - sequences = ar.get_assets(_filter) - master_sequence = sequences[0].get_asset() - _filter = unreal.ARFilter( - class_names=["World"], - package_paths=[f"{root}/{ms_asset}"], - recursive_paths=False) - levels = ar.get_assets(_filter) - master_level = levels[0].get_editor_property('object_path') + # Delete the parent folder if there aren't any more layouts in it. + asset_content = EditorAssetLibrary.list_assets( + str(Path(asset).parent.parent), recursive=False, include_folder=True + ) - sequences = [master_sequence] + if len(asset_content) == 0: + EditorAssetLibrary.delete_directory(str(Path(asset).parent.parent)) - parent = None - for s in sequences: - tracks = s.get_master_tracks() - subscene_track = None - visibility_track = None - for t in tracks: - if t.get_class() == unreal.MovieSceneSubTrack.static_class(): - subscene_track = t - if (t.get_class() == - unreal.MovieSceneLevelVisibilityTrack.static_class()): - visibility_track = t - if subscene_track: - sections = subscene_track.get_sections() - for ss in sections: - if ss.get_sequence().get_name() == container.get('asset'): - parent = s - subscene_track.remove_section(ss) - break - sequences.append(ss.get_sequence()) - # Update subscenes indexes. - i = 0 - for ss in sections: - ss.set_row_index(i) - i += 1 + master_sequence = None + master_level = None + sequences = [] - if visibility_track: - sections = visibility_track.get_sections() - for ss in sections: - if (unreal.Name(f"{container.get('asset')}_map") - in ss.get_level_names()): - visibility_track.remove_section(ss) - # Update visibility sections indexes. - i = -1 - prev_name = [] - for ss in sections: - if prev_name != ss.get_level_names(): + if create_sequences: + # Remove the Level Sequence from the parent. + # We need to traverse the hierarchy from the master sequence to find + # the level sequence. + namespace = container.get('namespace').replace(f"{root}/", "") + ms_asset = namespace.split('/')[0] + ar = unreal.AssetRegistryHelpers.get_asset_registry() + _filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + sequences = ar.get_assets(_filter) + master_sequence = sequences[0].get_asset() + _filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"{root}/{ms_asset}"], + recursive_paths=False) + levels = ar.get_assets(_filter) + master_level = levels[0].get_editor_property('object_path') + + sequences = [master_sequence] + + parent = None + for s in sequences: + tracks = s.get_master_tracks() + subscene_track = None + visibility_track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + subscene_track = t + if (t.get_class() == + unreal.MovieSceneLevelVisibilityTrack.static_class()): + visibility_track = t + if subscene_track: + sections = subscene_track.get_sections() + for ss in sections: + if ss.get_sequence().get_name() == container.get('asset'): + parent = s + subscene_track.remove_section(ss) + break + sequences.append(ss.get_sequence()) + # Update subscenes indexes. + i = 0 + for ss in sections: + ss.set_row_index(i) i += 1 - ss.set_row_index(i) - prev_name = ss.get_level_names() - if parent: - break - assert parent, "Could not find the parent sequence" + if visibility_track: + sections = visibility_track.get_sections() + for ss in sections: + if (unreal.Name(f"{container.get('asset')}_map") + in ss.get_level_names()): + visibility_track.remove_section(ss) + # Update visibility sections indexes. + i = -1 + prev_name = [] + for ss in sections: + if prev_name != ss.get_level_names(): + i += 1 + ss.set_row_index(i) + prev_name = ss.get_level_names() + if parent: + break + + assert parent, "Could not find the parent sequence" # Create a temporary level to delete the layout level. EditorLevelLibrary.save_all_dirty_levels() @@ -927,10 +951,9 @@ class LayoutLoader(plugin.Loader): # Delete the layout directory. EditorAssetLibrary.delete_directory(str(path)) - EditorLevelLibrary.load_level(master_level) - EditorAssetLibrary.delete_directory(f"{root}/tmp") - - EditorLevelLibrary.save_current_level() + if create_sequences: + EditorLevelLibrary.load_level(master_level) + EditorAssetLibrary.delete_directory(f"{root}/tmp") # Delete the parent folder if there aren't any more layouts in it. asset_content = EditorAssetLibrary.list_assets( From ef35c17ba484de54ce43998a437e7775e1d5b557 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 11 Jul 2022 17:19:54 +0100 Subject: [PATCH 0087/2550] Hound fixes --- .../hosts/unreal/plugins/load/load_layout.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 0dbaf0880a..7c8f78bd9a 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -9,7 +9,8 @@ from unreal import EditorLevelLibrary from unreal import EditorLevelUtils from unreal import AssetToolsHelpers from unreal import FBXImportType -from unreal import MathLibrary as umath +from unreal import MovieSceneLevelVisibilityTrack +from unreal import MovieSceneSubTrack from bson.objectid import ObjectId @@ -650,8 +651,8 @@ class LayoutLoader(plugin.Loader): EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") if create_sequences: - # Create map for the shot, and create hierarchy of map. If the maps - # already exist, we will use them. + # Create map for the shot, and create hierarchy of map. If the + # maps already exist, we will use them. if hierarchy: h_dir = hierarchy_dir_list[0] h_asset = hierarchy[0] @@ -861,13 +862,16 @@ class LayoutLoader(plugin.Loader): if not layouts: EditorAssetLibrary.delete_directory(str(Path(asset).parent)) - # Delete the parent folder if there aren't any more layouts in it. + # Delete the parent folder if there aren't any more + # layouts in it. asset_content = EditorAssetLibrary.list_assets( - str(Path(asset).parent.parent), recursive=False, include_folder=True + str(Path(asset).parent.parent), recursive=False, + include_folder=True ) if len(asset_content) == 0: - EditorAssetLibrary.delete_directory(str(Path(asset).parent.parent)) + EditorAssetLibrary.delete_directory( + str(Path(asset).parent.parent)) master_sequence = None master_level = None @@ -875,8 +879,8 @@ class LayoutLoader(plugin.Loader): if create_sequences: # Remove the Level Sequence from the parent. - # We need to traverse the hierarchy from the master sequence to find - # the level sequence. + # We need to traverse the hierarchy from the master sequence to + # find the level sequence. namespace = container.get('namespace').replace(f"{root}/", "") ms_asset = namespace.split('/')[0] ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -901,15 +905,16 @@ class LayoutLoader(plugin.Loader): subscene_track = None visibility_track = None for t in tracks: - if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + if t.get_class() == MovieSceneSubTrack.static_class(): subscene_track = t if (t.get_class() == - unreal.MovieSceneLevelVisibilityTrack.static_class()): + MovieSceneLevelVisibilityTrack.static_class()): visibility_track = t if subscene_track: sections = subscene_track.get_sections() for ss in sections: - if ss.get_sequence().get_name() == container.get('asset'): + if (ss.get_sequence().get_name() == + container.get('asset')): parent = s subscene_track.remove_section(ss) break From 5de5a37475f4fbab6d02d7a83776adb8646dbfda Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 11 Jul 2022 17:21:10 +0100 Subject: [PATCH 0088/2550] More hound fixes --- openpype/hosts/unreal/plugins/load/load_layout.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 7c8f78bd9a..f600a131c5 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -879,7 +879,7 @@ class LayoutLoader(plugin.Loader): if create_sequences: # Remove the Level Sequence from the parent. - # We need to traverse the hierarchy from the master sequence to + # We need to traverse the hierarchy from the master sequence to # find the level sequence. namespace = container.get('namespace').replace(f"{root}/", "") ms_asset = namespace.split('/')[0] From a2361d8283f55e91e7e5c2a9c30a4d8cea4cd7fc Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 11 Jul 2022 17:27:37 +0100 Subject: [PATCH 0089/2550] Set conversion settings for abc Skeletal Meshes --- .../plugins/load/load_alembic_skeletalmesh.py | 61 +++++++++---------- 1 file changed, 30 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py index b2c3889f68..d51a3ae0af 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py @@ -20,6 +20,34 @@ class SkeletalMeshAlembicLoader(plugin.Loader): icon = "cube" color = "orange" + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + sm_settings = unreal.AbcStaticMeshSettings() + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, 1.0]) + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options.set_editor_property( + 'import_type', unreal.AlembicImportType.SKELETAL) + + options.static_mesh_settings = sm_settings + options.conversion_settings = conversion_settings + task.options = options + + return task + def load(self, context, name, namespace, data): """Load and containerise representation into Content Browser. @@ -59,22 +87,8 @@ class SkeletalMeshAlembicLoader(plugin.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + task = self.get_task(self.fname, asset_dir, asset_name, False) - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.SKELETAL) - - task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 # Create Asset Container @@ -110,23 +124,8 @@ class SkeletalMeshAlembicLoader(plugin.Loader): source_path = get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.SKELETAL) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) container_path = "{}/{}".format(container["namespace"], From 46e2f629299a66b388d449d3ec29b2e296307348 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 11 Jul 2022 17:34:56 +0100 Subject: [PATCH 0090/2550] Avoid overwriting and reloading of loaded assets --- .../plugins/load/load_alembic_skeletalmesh.py | 16 +++++++++------- .../plugins/load/load_alembic_staticmesh.py | 16 +++++++++------- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py index d51a3ae0af..9fe5f3ab4b 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py @@ -78,22 +78,24 @@ class SkeletalMeshAlembicLoader(plugin.Loader): asset_name = "{}_{}".format(asset, name) else: asset_name = "{}".format(name) + version = context.get('version').get('name') tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset, name), suffix="") + f"{root}/{asset}/{name}_v{version:03d}", suffix="") container_name += suffix - unreal.EditorAssetLibrary.make_directory(asset_dir) + if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + unreal.EditorAssetLibrary.make_directory(asset_dir) - task = self.get_task(self.fname, asset_dir, asset_name, False) + task = self.get_task(self.fname, asset_dir, asset_name, False) - unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - # Create Asset Container - unreal_pipeline.create_container( - container=container_name, path=asset_dir) + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) data = { "schema": "openpype:container-2.0", diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 42abbda80f..50e498dbb0 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -80,22 +80,24 @@ class StaticMeshAlembicLoader(plugin.Loader): asset_name = "{}_{}".format(asset, name) else: asset_name = "{}".format(name) + version = context.get('version').get('name') tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset, name), suffix="") + f"{root}/{asset}/{name}_v{version:03d}", suffix="") container_name += suffix - unreal.EditorAssetLibrary.make_directory(asset_dir) + if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + unreal.EditorAssetLibrary.make_directory(asset_dir) - task = self.get_task(self.fname, asset_dir, asset_name, False) + task = self.get_task(self.fname, asset_dir, asset_name, False) - unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - # Create Asset Container - unreal_pipeline.create_container( - container=container_name, path=asset_dir) + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) data = { "schema": "openpype:container-2.0", From 2e8e4a0ee9724f0345038f1b9a2d78954ecf476d Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 12 Jul 2022 16:31:03 +0100 Subject: [PATCH 0091/2550] Added setting to generate sequences for layouts --- openpype/hosts/unreal/plugins/load/load_layout.py | 13 +++++++------ .../settings/defaults/project_settings/unreal.json | 1 + .../projects_schema/schema_project_unreal.json | 5 +++++ 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index f600a131c5..727488ee66 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -23,6 +23,7 @@ from openpype.pipeline import ( legacy_io, ) from openpype.api import get_asset +from openpype.api import get_current_project_settings from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline @@ -620,8 +621,8 @@ class LayoutLoader(plugin.Loader): Returns: list(str): list of container content """ - # TODO: get option from OpenPype settings - create_sequences = False + data = get_current_project_settings() + create_sequences = data["unreal"]["level_sequences_for_layouts"] # Create directory for asset and avalon container hierarchy = context.get('asset').get('data').get('parents') @@ -764,8 +765,8 @@ class LayoutLoader(plugin.Loader): return asset_content def update(self, container, representation): - # TODO: get option from OpenPype settings - create_sequences = False + data = get_current_project_settings() + create_sequences = data["unreal"]["level_sequences_for_layouts"] ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -840,8 +841,8 @@ class LayoutLoader(plugin.Loader): Delete the layout. First, check if the assets loaded with the layout are used by other layouts. If not, delete the assets. """ - # TODO: get option from OpenPype settings - create_sequences = False + data = get_current_project_settings() + create_sequences = data["unreal"]["level_sequences_for_layouts"] root = "/Game/OpenPype" path = Path(container.get("namespace")) diff --git a/openpype/settings/defaults/project_settings/unreal.json b/openpype/settings/defaults/project_settings/unreal.json index dad61cd1f0..c5f5cdf719 100644 --- a/openpype/settings/defaults/project_settings/unreal.json +++ b/openpype/settings/defaults/project_settings/unreal.json @@ -1,4 +1,5 @@ { + "level_sequences_for_layouts": false, "project_setup": { "dev_mode": true } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json index 4e197e9fc8..d26b5c1ccf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json @@ -5,6 +5,11 @@ "label": "Unreal Engine", "is_file": true, "children": [ + { + "type": "boolean", + "key": "level_sequences_for_layouts", + "label": "Generate level sequences when loading layouts" + }, { "type": "dict", "collapsible": true, From 284c152ff0d892d273763f0428c4fce645162886 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 18 Jul 2022 12:47:23 +0100 Subject: [PATCH 0092/2550] Reopen previous level after the update --- openpype/hosts/unreal/plugins/load/load_layout.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index b2d5b43e1e..4fdfac51c8 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -788,6 +788,16 @@ class LayoutLoader(plugin.Loader): sequences = ar.get_assets(filter) sequence = sequences[0].get_asset() + prev_level = None + + if not master_level: + curr_level = unreal.LevelEditorSubsystem().get_current_level() + curr_level_path = curr_level.get_outer().get_path_name() + # If the level path does not start with "/Game/", the current + # level is a temporary, unsaved level. + if curr_level_path.startswith("/Game/"): + prev_level = curr_level_path + # Get layout level filter = unreal.ARFilter( class_names=["World"], @@ -832,6 +842,8 @@ class LayoutLoader(plugin.Loader): if master_level: EditorLevelLibrary.load_level(master_level) + elif prev_level: + EditorLevelLibrary.load_level(prev_level) def remove(self, container): """ From 01f2c59049be47fce42a5bfdcf67ef1227be1d11 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 18 Jul 2022 15:15:40 +0200 Subject: [PATCH 0093/2550] the update placeholder keep placeholder info when canceled or closed --- openpype/hosts/maya/api/lib_template_builder.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index a30b3868b0..855c72e361 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -107,11 +107,13 @@ def update_placeholder(): placeholder = placeholder[0] args = placeholder_window(get_placeholder_attributes(placeholder)) - # delete placeholder attributes - delete_placeholder_attributes(placeholder) + if not args: return # operation canceled + # delete placeholder attributes + delete_placeholder_attributes(placeholder) + options = create_options(args) imprint(placeholder, options) From e6cad709cd5904087f687a4e2ec5a15641ab48e0 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 18 Jul 2022 18:43:34 +0200 Subject: [PATCH 0094/2550] fix error when updating workfile from template with empty scene --- openpype/hosts/maya/api/template_loader.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py index 0e346ca411..c7946b6ad3 100644 --- a/openpype/hosts/maya/api/template_loader.py +++ b/openpype/hosts/maya/api/template_loader.py @@ -80,7 +80,11 @@ class MayaTemplateLoader(AbstractTemplateLoader): return [attribute.rpartition('.')[0] for attribute in attributes] def get_loaded_containers_by_id(self): - containers = cmds.sets('AVALON_CONTAINERS', q=True) + try: + containers = cmds.sets("AVALON_CONTAINERS", q=True) + except ValueError: + return None + return [ cmds.getAttr(container + '.representation') for container in containers] From e64d00e39c5c1fe64445e87b374d856d230fba91 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Jul 2022 03:59:40 +0000 Subject: [PATCH 0095/2550] Bump terser from 5.10.0 to 5.14.2 in /website Bumps [terser](https://github.com/terser/terser) from 5.10.0 to 5.14.2. - [Release notes](https://github.com/terser/terser/releases) - [Changelog](https://github.com/terser/terser/blob/master/CHANGELOG.md) - [Commits](https://github.com/terser/terser/commits) --- updated-dependencies: - dependency-name: terser dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 64 +++++++++++++++++++++++++++++++++-------------- 1 file changed, 45 insertions(+), 19 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 04b9dd658b..38812dc6cd 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -1543,15 +1543,37 @@ dependencies: "@hapi/hoek" "^9.0.0" +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/resolve-uri@^3.0.3": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.5.tgz#68eb521368db76d040a6315cdb24bf2483037b9c" - integrity sha512-VPeQ7+wH0itvQxnG+lIzWgkysKIr3L9sslimFW55rHMdGu/qCQ5z5h9zq4gI8uBtqkpHhsF4Z/OwExufUCThew== + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.11" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz#771a1d8d744eeb71b6adb35808e1a6c7b9b8c8ec" - integrity sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg== + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== "@jridgewell/trace-mapping@^0.3.0": version "0.3.4" @@ -1561,6 +1583,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.14" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" + integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@mdx-js/mdx@1.6.22", "@mdx-js/mdx@^1.6.21": version "1.6.22" resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-1.6.22.tgz#8a723157bf90e78f17dc0f27995398e6c731f1ba" @@ -2140,10 +2170,10 @@ acorn@^6.1.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== -acorn@^8.0.4, acorn@^8.4.1: - version "8.7.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" - integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== +acorn@^8.0.4, acorn@^8.4.1, acorn@^8.5.0: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== address@^1.0.1, address@^1.1.2: version "1.1.2" @@ -6838,11 +6868,6 @@ source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== -source-map@~0.7.2: - version "0.7.3" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" - integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== - sourcemap-codec@^1.4.4: version "1.4.8" resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" @@ -7048,12 +7073,13 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.4: terser "^5.7.2" terser@^5.10.0, terser@^5.7.2: - version "5.10.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.10.0.tgz#b86390809c0389105eb0a0b62397563096ddafcc" - integrity sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA== + version "5.14.2" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" + integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA== dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" commander "^2.20.0" - source-map "~0.7.2" source-map-support "~0.5.20" text-table@^0.2.0: From 7793ea5580595c95d875b056bdf1a61685f63a1d Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 20 Jul 2022 19:26:10 +0100 Subject: [PATCH 0096/2550] Added documentation for UE5, layout and rendering --- website/docs/artist_hosts_unreal.md | 132 +++++++++++++++++- website/docs/assets/unreal-avalon_tools.jpg | Bin 25212 -> 0 bytes website/docs/assets/unreal-container.jpg | Bin 10414 -> 0 bytes website/docs/assets/unreal_add_level.png | Bin 0 -> 8393 bytes website/docs/assets/unreal_container.jpg | Bin 0 -> 10414 bytes website/docs/assets/unreal_create_render.png | Bin 0 -> 124745 bytes .../unreal_layout_loading_no_sequence.png | Bin 0 -> 12529 bytes .../assets/unreal_layout_loading_result.png | Bin 0 -> 23833 bytes website/docs/assets/unreal_level_list.png | Bin 0 -> 21009 bytes .../assets/unreal_level_list_no_sequences.png | Bin 0 -> 10784 bytes .../assets/unreal_level_streaming_method.png | Bin 0 -> 153336 bytes ...al_level_streaming_method_no_sequences.png | Bin 0 -> 82416 bytes website/docs/assets/unreal_load_layout.png | Bin 0 -> 138927 bytes .../docs/assets/unreal_load_layout_batch.png | Bin 0 -> 121461 bytes website/docs/assets/unreal_openpype_tools.png | Bin 0 -> 27332 bytes .../assets/unreal_openpype_tools_create.png | Bin 0 -> 27449 bytes .../assets/unreal_openpype_tools_load.png | Bin 0 -> 27465 bytes .../assets/unreal_openpype_tools_manage.png | Bin 0 -> 27475 bytes .../assets/unreal_openpype_tools_publish.png | Bin 0 -> 27453 bytes .../assets/unreal_openpype_tools_render.png | Bin 0 -> 27453 bytes website/docs/assets/unreal_publish_render.png | Bin 0 -> 247922 bytes .../assets/unreal_setting_level_sequence.png | Bin 0 -> 3881 bytes 22 files changed, 127 insertions(+), 5 deletions(-) delete mode 100644 website/docs/assets/unreal-avalon_tools.jpg delete mode 100644 website/docs/assets/unreal-container.jpg create mode 100644 website/docs/assets/unreal_add_level.png create mode 100644 website/docs/assets/unreal_container.jpg create mode 100644 website/docs/assets/unreal_create_render.png create mode 100644 website/docs/assets/unreal_layout_loading_no_sequence.png create mode 100644 website/docs/assets/unreal_layout_loading_result.png create mode 100644 website/docs/assets/unreal_level_list.png create mode 100644 website/docs/assets/unreal_level_list_no_sequences.png create mode 100644 website/docs/assets/unreal_level_streaming_method.png create mode 100644 website/docs/assets/unreal_level_streaming_method_no_sequences.png create mode 100644 website/docs/assets/unreal_load_layout.png create mode 100644 website/docs/assets/unreal_load_layout_batch.png create mode 100644 website/docs/assets/unreal_openpype_tools.png create mode 100644 website/docs/assets/unreal_openpype_tools_create.png create mode 100644 website/docs/assets/unreal_openpype_tools_load.png create mode 100644 website/docs/assets/unreal_openpype_tools_manage.png create mode 100644 website/docs/assets/unreal_openpype_tools_publish.png create mode 100644 website/docs/assets/unreal_openpype_tools_render.png create mode 100644 website/docs/assets/unreal_publish_render.png create mode 100644 website/docs/assets/unreal_setting_level_sequence.png diff --git a/website/docs/artist_hosts_unreal.md b/website/docs/artist_hosts_unreal.md index 1ff09893e3..45a0c8bb6f 100644 --- a/website/docs/artist_hosts_unreal.md +++ b/website/docs/artist_hosts_unreal.md @@ -8,6 +8,20 @@ sidebar_label: Unreal OpenPype supports Unreal in similar ways as in other DCCs Yet there are few specific you need to be aware of. +### Creating the Unreal project + +Selecting a task and opening it with Unreal will generate the Unreal project, if it hasn't been created before. +By default, OpenPype includes the plugin that will be built together with the project. + +Alternatively, the Environment variable `"OPENPYPE_UNREAL_PLUGIN"` can be set to the path of a compiled version of the plugin. +The version of the compiled plugin must match the version of Unreal with which the project is being created. + +:::note +Unreal version 5.0 onwards requires the following Environment variable: + +`"UE_PYTHONPATH": "{PYTHONPATH}"` +::: + ### Project naming Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are @@ -15,9 +29,9 @@ invalid. If OpenPype detects such name it automatically prepends letter **P** to ## OpenPype global tools -OpenPype global tools can be found in *Window* main menu: +OpenPype global tools can be found in Unreal's toolbar and in the *Tools* main menu: -![Unreal OpenPype Menu](assets/unreal-avalon_tools.jpg) +![Unreal OpenPype Menu](assets/unreal_openpype_tools.png) - [Create](artist_tools.md#creator) - [Load](artist_tools.md#loader) @@ -31,10 +45,118 @@ OpenPype global tools can be found in *Window* main menu: To import Static Mesh model, just choose **OpenPype → Load ...** and select your mesh. Static meshes are transferred as FBX files as specified in [Unreal Engine 4 Static Mesh Pipeline](https://docs.unrealengine.com/en-US/Engine/Content/Importing/FBX/StaticMeshes/index.html). This action will create new folder with subset name (`unrealStaticMeshMain_CON` for example) and put all data into it. Inside, you can find: -![Unreal Container Content](assets/unreal-container.jpg) +![Unreal Container Content](assets/unreal_container.jpg) -In this case there is **lambert1**, material pulled from Maya when this static mesh was published, **unrealStaticMeshCube** is the geometry itself, **unrealStaticMeshCube_CON** is a *AssetContainer* type and is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata. +In this case there is **lambert1**, material pulled from Maya when this static mesh was published, **antennaA_modelMain** is the geometry itself, **modelMain_v002_CON** is a *AssetContainer* type and is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata. ### Publishing -Publishing of Static Mesh works in similar ways. Select your mesh in *Content Browser* and **OpenPype → Create ...**. This will create folder named by subset you've chosen - for example **unrealStaticMeshDefault_INS**. It this folder is that mesh and *Avalon Publish Instance* asset marking this folder as publishable instance and holding important metadata on it. If you want to publish this instance, go **OpenPype → Publish ...** \ No newline at end of file +Publishing of Static Mesh works in similar ways. Select your mesh in *Content Browser* and **OpenPype → Create ...**. This will create folder named by subset you've chosen - for example **unrealStaticMeshDefault_INS**. It this folder is that mesh and *Avalon Publish Instance* asset marking this folder as publishable instance and holding important metadata on it. If you want to publish this instance, go **OpenPype → Publish ...** + +## Layout + +There are two different layout options in Unreal, depending on the type of project you are working on. +One only imports the layout, and saves it in a level. +The other uses [Master Sequences](https://docs.unrealengine.com/4.27/en-US/AnimatingObjects/Sequencer/Overview/TracksShot/) to track the whole level sequence hierarchy. +You can choose in the Project Settings if you want to generate the level sequences. + +![Unreal OP Settings Level Sequence](assets/unreal_setting_level_sequence.png) + +### Loading + +To load a layout, click on the OpenPype icon in Unreal’s main taskbar, and select **Load**. + +![Unreal OP Tools Load](assets/unreal_openpype_tools_load.png) + +Select the task on the left, then right click on the layout asset and select **Load Layout**. + +![Unreal Layout Load](assets/unreal_load_layout.png) + +If you need to load multiple layouts, you can select more than one task on the left, and you can load them together. + +![Unreal Layout Load Batch](assets/unreal_load_layout_batch.png) + +### Navigating the project + +The layout will be imported in the directory `/Content/OpenPype`. The layout will be split into two subfolders: +- *Assets*, which will contain all the rigs and models contained in the layout; +- *Asset name* (in the following example, *episode 2*), a folder named as the **asset** of the current **task**. + +![Unreal Layout Loading Result](assets/unreal_layout_loading_result.png) + +If you chose to generate the level sequences, in the second folder you will find the master level for the task (usually an episode), the level sequence and the folders for all the scenes in the episodes. +Otherwise you will find the level generated for the loaded layout. + +#### Layout without level sequences + +In the layout folder, you will find the level with the imported layout and an object of *AssetContainer* type. The latter is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata. + +![Unreal Layout Loading No Sequence](assets/unreal_layout_loading_no_sequence.png) + +The layout level will and should contain only the data included in the layout. To add lighting, or other elements, like an environment, you have to create a master level, and add the layout level as a [streaming level](https://docs.unrealengine.com/5.0/en-US/level-streaming-in-unreal-engine/). + +Create the master level and open it. Then, open the *Levels* window (from the menu **Windows → Levels**). Click on **Levels → Add Existing** and select the layout level and the other levels you with to include in the scene. The following example shows a master level in which have been added a light level and the layout level. + +![Unreal Add Level](assets/unreal_add_level.png) +![Unreal Level List](assets/unreal_level_list_no_sequences.png) + +#### Layout with level sequences + +In the episode folder, you will find the master level for the episode, the master level sequence and the folders for all the scenes in the episodes. + +After opening the master level, open the *Levels* window (from the menu **Windows → Levels**), and you will see the list of the levels of each shot of the episode for which a layout has been loaded. + +![Unreal Level List](assets/unreal_level_list.png) + +If it has not been added already, you will need to add the environment to the level. Click on **Levels → Add Existing** and select the level with the environment (check with the studio where it is located). + +![Unreal Add Level](assets/unreal_add_level.png) + +After adding the environment level to the master level, you will need to set it as always loaded by right clicking it, and selecting **Change Streaming Method** and selecting **Always Loaded**. + +![Unreal Level Streaming Method](assets/unreal_level_streaming_method.png) + +### Update layouts + +To manage loaded layouts, click on the OpenPype icon in Unreal’s main taskbar, and select **Manage**. + +![Unreal OP Tools Manage](assets/unreal_openpype_tools_manage.png) + +You will get a list of all the assets that have been loaded in the project. +The version number will be in red if it isn’t the latest version. Right click on the element, and select Update if you need to update the layout. + +:::note +**DO NOT** update rigs or models imported with a layout. Update only the layout. +::: + +## Rendering + +:::note +The rendering requires a layout loaded with the option to create the level sequences **on**. +::: + +To render and publish an episode, a scene or a shot, you will need to create a publish instance. The publish instance for the rendering is based on one level sequence. That means that if you want to render the whole episode, you will need to create it for the level sequence of the episode, but if you want to render just one shot, you will need to create it for that shot. + +Navigate to the folder that contains the level sequence that you need to render. Select the level sequence, and then click on the OpenPype icon in Unreal’s main taskbar, and select **Create**. + +![Unreal OP Tools Create](assets/unreal_openpype_tools_create.png) + +In the Instance Creator, select **Unreal - Render**, give it a name, and click **Create**. + +![Unreal OP Instance Creator](assets/unreal_create_render.png) + +The render instance will be created in `/Content/OpenPype/PublishInstances`. + +Select the instance you need to render, and then click on the OpenPype icon in Unreal’s main taskbar, and select **Render**. You can render more than one instance at a time, if needed. Just select all the instances that you need to render before selecting the **Render** button from the OpenPype menu. + +![Unreal OP Tools Render](assets/unreal_openpype_tools_render.png) + +Once the render is finished, click on the OpenPype icon in Unreal’s main taskbar, and select **Publish**. + +![Unreal OP Tools Publish](assets/unreal_openpype_tools_publish.png) + +On the left, you will see the render instances. They will be automatically reorganised to have an instance for each shot. So, for example, if you have created the render instance for the whole episode, here you will have an instance for each shot in the episode. + +![Unreal Publish Render](assets/unreal_publish_render.png) + +Click on the play button in the bottom right, and it will start the publishing process. diff --git a/website/docs/assets/unreal-avalon_tools.jpg b/website/docs/assets/unreal-avalon_tools.jpg deleted file mode 100644 index 531fbe516a2d0e645140c4a102afbbfa4cbdbbd6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 25212 zcmdSAcQ{<{+CMrv5k&7KA&6ch7%ho11VQu`M3l*BQAQmhdY2$NQKLq0gXlyMy|>Z( zj5^x*&G+5=+VB4E^E>Cf=bXRJGWX1NnP=8|?)5x(``n*(JAJzhxTmHJQU>7Q-~irW z{{Xjhfad@_T-?82*aIK?A|NIpz{e-JLqtSKeD}`XyQFtWNy*5m?vasGl9Q6&qq|2* zO+!mddzXTqfsTfOiiVcvuan^5VSj^9Kte!3LPJJMM)Uvpxcv#BB*tk3An)%F0dR5f@Nn_*{+cy*v>*0+06rxF z)dLYlLTYVGqK7Utq95aPh&i5Bw$tj2AUVaXTmwn&(9tt6GCktr=6TF3F7ZV2sg(5d z7fQ-1svtGp*LwN}hDOHLZ*AV$+SxmNaC7(YgnD@geF_c<4GWJ*`1~a?Dfw$k>bKmy z{DQ)w@5NQsHMMp14UJ75on75My?y<^M#sh{Ca0!nW>*laYwH`ETiZLRqvMm)vvc&t zZ6Ir@YKNS5)O!F*0 zr?Q=dLre!rYvnp}hmKQxN7S^=GoB6vo|HtbO!7Y*Jod&ZO!KZv?Mm4tp(rpYytZv(c)#w+qd3b96Z^|IY zM>qwX-!ev*`E57v-yI!;*|zNqn3@QMBTQ&pD#a}P;`Sn6HdQ8^3WW($4BQoUj9Mjd zX-);43ZYFhnQj4DTepD2J&S8I#qYjv|J2-Y?O-??#&wn)(R;r&ZvkQitZ(t!T&0;V zju8d{n`o;hjn;h64CCYdcVKotr5b|W=5;3mbpCP2P4a2n zAEG)~r>337iO}vX;Jmxy7Qmj1Vfygw7LWt8xCOKdz|c3ob2kO81+D3$w*cEnpF=vR zZT4CM8D(@4I?1Q>`{lUA1VI3Lw}6oJKW$qUQ`p%@T43Z= zuEjTkZvoFRZMOhl<68jgvITi{25$J{tGCRmYXqz7x@--*3`z4Tte9R8Jg}=|QZ>E! z{q2R(y`F5xr~Eh-z}OY?EWcd)DkX%z1*O9*?Pl2fnykHBz`xn3rzN_!Yya5)u#IDrT^Z;J!h(p_{1pWkU9)SDk!;*il;lJxX-(cQIg0Fo1nu39IX4|0Vew zJjb@fqBiup;(w2lpWD0zWJ9KIPU&v}EZ?#*V-~tf00@{(w0X*xBNy$z)APGKZxEPN zA5qRt!si5y9sE&nWa!)ck$a<;HfzPu1D&Pv^cIlZ2RjilgIzyN<-Wl=xdmJkXd#QG z&w>iKBdGSr(d4K(Udr#qh%-8I@w*V@riCzPWu7uxPZ!Nyx zlaH^gIqqlU{h$~nH+?xl)MVN+t7He=toyjMc~;piphMAd*Pc7I{+Wc*X2Ap?PGsWi zW@>Lk^@M=0tYv9y?RRFAfs}n~y$M0jkZ`MaoQ^%jDK5j-V^NK6FlvdG{GV)(gw1XD zMkCW&s^3s~uui`RXt904Sx`cyA@2H%j{Cm!mBL$Ef=2XqRWKWBtv~9{{`#B>a6t*l z_^6|9X>eEgL(mG05jxP(WFA>C2X%Mkck6ejKN2i`XVVmO#*otx@}Ba`<2xVg!MLD> zbZgvR4HVmVi5h(fLZ;C%*Zz&F;K4$^l2z$1Jl~(9KRDg)$BbX?%NUH+sLN{lS-e&) zFLxB6)$^$F7)~{0b=)anj7d z-P||@*-Yi&ospe5&y@H55ioGgSzRuwAO&EFBPNg!Yif&<;{lI=nHE1gJFZp-{`R7C ze|?T2xHcOM<3MU;4p z&#RFPAV~HmRFZOTVWGbg*4F7aHq)d}UXi=w$;9uv6J-2Z6^@XEH#Q1u1!a;xnQXWC z?emIA<_w1QH=&5SN{VRlHtMpn@V2p<2Jx_5k+v_E5q(A_!b=&8hW+jh!>Awj7*eEvR*s?lhuwQnt4&q-M1FnRz6Y1jz?irQ zRfi}O$<)Sl7l|PsUK8B>TR^oYruPRnCi20qPTE#f&SXQcFHEowKK~YQ2jDbw3z+$qaT9BK z3y{;t#kb{dl$nPBii>>JCl&i z3%kyL-M&fr-65wNz{IW$GenMqwG6I5vMKXGAxN%)R;3$u#J9_Sx+#eXu)}SstIT#_ zBN%gos2%H6!7E#voAhfdV-QkwcKhSF#&3BQ1J2~321@0=R#z{)A&W_eV6VHA)C2Ek zefb!C4C@{ocRIOD-Fagt z2s(bTM_b?E!QtHg#;b=yW);Swf*2HBAH- z>iT=PxfZq8F{88K6xKlAj^J?XKQ4>Sw*V5%&=RGu-M~%C3V4;&NaMRecgKZrnq4@R z#)cfd)el}xZ`4!-U_1>`mvU3NvY_G4YGrrQcXBGNKGPM?r|85YxGnrOecfpM%Z{>( zQB!EF+f!d^mz68}@q^*B4@SKoJaLthz+2_6uT`&<$1U4|t5dO3sT@vXUDNYXuU89c z+3fHp4q6}CF~u|%oA%At=GP8YI+ZJ*{ZZMt2rFx%V|&9uFf=2oy0*88_XQoi$^*1< z$s5L}cn>LKW3;-DZvakN?|5_b=aKf1G#g*~xcFQP@7s{GNw8EV&!*u42TzaGJ)k2F zyL)`%$F1kMPbEym0gbuh*t?AVkg(Pob3C=5C1p0M;4)lG$*qrSCLST}Tf;fmqy3s0 z@R<9fUbO;gsu(C$?EmJYgyK559Jc$(;*uuOR)RILbV|B*=`&5jejxvzj>|D@#45c`S0X z)8KfQe(Dk^kBr|wCoQrU;V=aVR9MEl_AS%KtgJ^@cKs-D$r#whP{WbE(fjXD?ELwl zbuuY%QSsQv61YD|_KjKx89%+qFMAu6G)=f=;bHvBlOm!4Oo2z#%_ZyiL;L zhVXJ#AP)Y`C=Le)(dS`_IkW+yCvqi2!66Rnk04Wix}j=a*Wy$ih^oTvP}p;61`1a^ z3(CIG(~vpzyL7BOlG=H^vwF$qQc`55v;VphS0B%%f7kqU72?!A>;G+$7P($O2dBIR z5WBa`ZhxRS!Narf=Ra@YFWD^ZQK7Q+{F)qNT&Tk9=IQs*ZViYmZLXU$hF` zaWpui0kX*|BW@J*Q2Leb=*590xj`>vH_~{u+yJ9;z_x-ROJqMu`QDlR-Hx z>7o7#PT@IF`eC#rM0xE9AdWNRYloan%wxsV84+KQ8&)6B{@Ux3H@^UaIFJ15Q;pg3 z8j(hpGE^mq7P1(m>KBH=^~%s47f$Z`*mBdPAKyvR2SH;fmIX=Q0>@M+{UO#PjAwmEFX{E0`@lTD;Sm{(O!k#$Xu z9UTm_&bJ^lQ1{YVr>q7_pmi5Y+|jS2&;12HWgiS~K)NG5Y48siN($il@5`xRmPgFGcfyL$E8OX7`q+P+gs42_e-a;Mn!Osy- zayXN*h2M;^M02U@{QiJgovP9`4(IX5=>!6AiddMfz@1(Y=vt_}W&haZQD1G6dzQeqsb|ay+3~5Xt<>nMQC8f;&ZC8PTkmtLgRy-o;!#)Y>5&5<;ZGe=WiM72 z^Ql!Wb`=DfdWYzMduge>l>~fb{BV=qn|HVM4k-dI~;AFRhjq&vTCo?j_=zxMNGan;z(78 zuG?Ebi-}HB09d9Q(EERW?&q9LDNawyLGsXMR=c{7qmImv9eC*xiP-1vHITdiXYNHE zQX)TermPn1btovP8Ilw-J*T|SOUXxT{jlg&DQJr#>BK`iHUqm2H}%H_-oIY~M>c=g zC$`jrA9V2+opXMC*;Jdg!h@7`yBR*iIPVwp_JWUWtr zVJB#f;`H}OM}-B?s1s=OYc6e*a%OobtR1}nXduV3J8M=EcIpLJT21p)mR0XE=r8VC zl{=Ds=*DXQkC7f!G zt96veY?FRt%C~^We3h+d%dbDUpO96A*I1xmYa7W`7UoY~^Buo^1O6n44QHRyHP&f} zWUea~-}iX{10+z<0Kf+0NF_0tr|9d+hMWw!TFGc(5{j^X0TzfM|8|(}B>8 z&ceZ6=Co{Eosx?LqE!oHSi4TEq#4@(1giE+VZV4rtX1epiaVHgNQ+U+!*dx&T{=^$Oapq_=NQJYKvFZt8Zo zG%sebxsJ^1RJb_bUCN>6YGVHaL*x#8UI^-umW44}99?VbMDSE7tUPR}jo5zn>*LSu z)bb|Ty!VV&o#f96rPwvW({dEo1mmsT7Etq@Ba;lij?~RCq7eDIG6AJk6APdNDm3)> z^B*bH0`bA*%4++vpR(?*qG3q+o=Q`+cdr2|2UT(XpwdC4uO7nO6`U8dNY&P(h--Q5 z_b2;pYD3TY!WD7l=$)@{+nG_e^bc#AC6p)Xz3bm=*dNrfYgf6rE`6 zWqu}&a|5SB=Z10 zjqV@NpB6#>0V#LRC-01#UhMkpAz ziY%Q)U%kSjub~jO9T`$tkps?gA;=1;i@Pn&qH6pMWlEkG?!)!3Ld-ujT&sLJQE%lK z)674EMC_5dzjhyWsk=S|y%Z9f8SpBQh@%n3pJlkGshCavEP^K+_ur*tyX{EQ7}nx+ z3&>?^x&`1|OHTPeGIcmxWUIOIJ4h>R%{vRzN)xx|v>TvW-Ei5!tN(sSVZc)BK>;=k zUa-fUwqc?i0H?D&-vaJnPTicytbHhINn?&s?fA*W$=i`EMOKrdE6Es>i#e%sR z7c73%-2(3VGcD9~P>`T?tZ}-&$v&BZx1Ilh`%7%z0@&a<$b+Rb7&^9SdyyI0<*Kx@ zpx)P9TUkfIMOQ^Ty90mvw1@md1mL>pul)Lt9J&7=b15_qg2!EpXeare!g062S=t6x zCG%pIPl*(FiTN8D-Y_f&S-7j`p-J0YXbQ+gIC5mqocM31qCp zU1@Ej5$<;lWI*_?${W1}T4gU+_a8SwYW=&PnGZSSS2(WU0$6>OCNR9v;#)vh>eScz zWY=o^_;VT^P>+A!=N*vj?%1^p3Z{nQ3d-^O+}$b&5^v!H)mo6wPv?xLB1RV(GU_9T zn+B<$%4vG7k=G(#=7aFxTr%-Qz1E5HP<$*U`PgwdSHK?#We1^N7*Bj*dyrCo?Nf;U zb;+CtC5mTJUe6DFLX{=*F&*MD&EQTFoAwpC)O`z}u*a|?_oGi+3(O~n4Mq-=7J2%f z)PH-%6+RHVmY}d{T!{Brytf#*-bz-Gek_d?AZ7xD&TVG|@zuPcX7uz;XdL@eIqw&>RN9wp5Tym^+YL z02*sOHUHaz-vW5HTiGYxirv%=E^v-6PUOU|M98ki#shN_U)tiY#h?$nFn^eou6zOL zM2YS%c`7+ORCx(NKG$M?u4lv5YtEtuS972P0;v48q;2diV2fg9_?n>Mua?(u-&Z%) z*G`D65PyLY$CENb88a2t=IWG{`gG~MxF!*uclTMSqk^zX#JTeH6^tmbn_)WI=ufhO zJz5_@^bbr757we}2kMd_UF5DD`n{_bme)Kt-{lyemqCYMR1Rtahk^ncUmHfsZRSge zDSm!Ryl8Pryyj=gto>tdHsD;sCCOWz1XzssPaym%l(ez4PwN#JN(q4Cw$+maPn(=Q*`8T8Hr}|Gt z1qHt~G)915JZLI5GW*5MF4`rh-OQ2V6l0u$m^ay(Ws5Ag|7>Fa=Si9$3xd{%PaA^& z%Q}$-v>wz;2BZa6+4PBk7L!emUw{4Se36~3yP7+RP{{u0BzF6nGP$EZ? zxxwvQ0Df-M$oo-0w>fAPwh7ZT#eZT^nH0Vcxkl_9A6`6qTc47MD|}zjNZqQ|tu3Zx zr7aZ0m^3@3F>!5Uk03KJkYq}P4|8Sl_uH(^cRlA7OODVDS;UD3X&+%BIe+opCvwb@ zFX^P;PfecDGwIbe^6N0~`bZ6=yis2I5prz9oI?7MgW%26Z1Pntw9s;yAne}st~uvK z`6)9yNoU1=_2?@lyg{w^}vo-S1hhhC9&;OY>@FUt)PSTm6YBBroJp) z3Y6?Q#WaNmU2`K2!;J+?=s!L%v05dbQcj9iC;27PYuhXwfsT-YqPmpoFn_viee8)FC!36kZjiK`gP7VqkVp~G*mDg^XgJePZo0x1zCBs4|2?0Ln_(jXuqra z{wefT%02R{FIF|McNS5ZoM_S6%-ofwFlscD(Rz&Jm4BeqiBCDnjLSo!B(9Sq1ZPx6 zC++292Q7@CyP%@S%BJA%0*D@*IjG~@JwYC z*ns9M$Go;Gk0G|<2qfOKPuz#q5$1p2$zqW(I)$irHR^c1Y!{dNs4hOn;Q zTA9Mj1T{{IqP+?Py1P2}HlgurWYsQ5<4Md~zjE1jSK2-eH81;dDMASXxaX^TchciLnSy!1vOr+4X017iqE&AmhuF9BC_weQU@P6$c_!zm zFLNIB;igU_-itI06|vL$Cv(5@r{xEz0dmuF91PIwjGq3g1b{ zntk$}(vZ^^d5C8WH=_o8F$$SJvFh-MN>lQOTX9D_|^o&i1L!K|mpZw{GQ+VdXDGb}TK*2Sz1wM)y zLlGGU*Oal&^<^r83pNOYK}M8s?P(t1zlo0kT*yUSlJ~wCtvQ^H?0wQin^97io;p4& zkj_)pRbOfH5}Bvo2T2gr_Uc!{lgMA$ee5sbD^)oT^g7GV7=K z3`=oGr3QEI1gZ{5ABooB9P8PeLv0Pm%HB3Mk5-gHa*s#{C91y(Wick)O%Mk5jW@x( z6glp{YYjkVg}P}GqfK98XsX5qPqjj-caCoXcD4Onmfwf(#XtMitQh`qdj^MLey+(* zjv4LQn<y+p1TloyHn`-u8Z3`Wce(Jsps zVRVaRlN+NC{|LO(@N(R{-_VpQQ(ZXQ4t#MvENC(;_a>5&GUFo&B00ua26?D~c0#aI zqCG+N6Sh7rf*p@ZEuK5s@SmOUizy^Dc$&_=*xjISfBGokegKqKM*S8Lm$7u?5{adO zB&2C#`6O%Hp0@x~EVlY5FPqHap9P+*g(*s~0s*6S@aLSAAdz5o(r&sk>mH9+VcWb@ zEL>Ea4TzBT*p$dwJ&duSbdM#3cFZ34NPafi8Cf)4RE9uQzS;y0#M6X*U=k(Ex8{q} zbkzh0VG*gSF^tzBmVFY7DqW(6rcSPQdY7uRv}ca$F>1%WiZuqTVv9Y%7LXf-*R_bB zY7dmHu-L|Rse?ia9x?}G_9|KON-zT@6b`5RLT4N3>XkO)`p|0$zijj~M%XIiywjhp z`bT5K?m&%8Fi{pfqHq!J*?}P&zqWLJGe*T|L&VJ45cA}kHjEod*sOjy)7CTj#A1xY`D>ml;i6OBSKc4;JTi>#xMm>5|Sz%1{0N3{wqKuY-EF7EFlZ)9;Zhy*2WD zi4I0&&}&1UYoY`zE&V6A-%aV)AN4-|NU8mhzm&XNPmlBu0pRYPz^NYp`&HvLH_A(9 zRDdLvFCjMn5CI*qp~QQtVO+-jbk!(ZHEc--QO{r4vPu( za~@k#U35H|?{y7VNvT0KZ|n|&oKSe(PwKqQ(&acOR%%8I=%xMOo=2q~u0ipfYRr^B zxDN%%-e@Z@dH8cpieK45_T9m|V5rj12p&f>N+7!H!#-fyD=ded-AjS!zFsU^69~FV zE3{ui#yMooT)yu>{2JS@^3t!ui$ihkaoDC85mxeKlSk^H;}HuE`;N5~G+={swd#k1 zABr?Mf>Y%M-!Ohnmj*=1hbxmSiFLHygX=D9)xtU?5N6h$OAa$k4gZyBz9_`MZSY{QjQIx#c}8spv$>rX0jv3S?@LEJ4u$jqW(@XQmy*6z`V*qD_f6B;@1`R9yqe7)UAsH0-@(gU2QH$NHT< zkcM-l((`WpToe7VcF!kbW?b$8v``;KP}l5-Fta*0shwMMFrYu`|3D`ViK|=h(G&*b zVmG-BIvmkh8+K|B)F=fN_tL~YI=1*_Yq*e+fp;)Zn+hUfY!~c0m7rmhMm;JW(Vz%( zH_VSvx3^A^nbC{i|I_>aXUKBbr(*VH{NSIZ(mZ%-!(FMZnRTS#;>9_^ESdS4Qc3c_ zS=(7QRiwYNoUu&^$#iRADcD~EMG@XQc2im5;9W7BALE5t@Zwy2A78@nc=h$b#=x0m zjp&VZYBv_@@kqV}JdkO-T&2eZKYEgZ?eO<$9%34>ctQ?Hf$6WLhXyOO>c&{FB7x;? z`SfGiN~z7SYb!4BuuYFCc-AFJ_8oET7;p9tfe)QY22)J1uzWgu4}-n)>WWJswg;=h zN|Z(i+x(V5C}+bFf8rS_L=<=}2H|S)DmK|B4(JuE_uHK7BmNb}CaSTq@@tQw*Ypm$ zDT{M-kQMH#&KYk~!b#bjWca%B9WhBqAc*ts+w`fN&|bYxr5-|Bo{n5i<-&;CNQh4t zeR7?1KS5sXRbCPg!ml--1IJ){xRE>clp=DGR_lOb|&k~RFt+US))v~O+o0i1to^l4ntP;r`E*GTZr0xOHMFC-AU4{ zy3HqAxbtEETDudIKt4R@S(!}&Srrvl{zf0dC_a5U-b+HnDVu8mnmewbZ-4D_5lEjQkX!AJOE zw1qx=>J&VvrQMitggLTpXlVGdz7{`SEh3kfV1M`0w%da4VX78ke#Ti)o{D6&kdQls zb18CAHjpdH+9#LzW=)9Swef*R61jnlEAaR%W`f#q94O<$1$XyTomCTrtW7{py&y#C z-84%IfD~2E;yPoIVD5bA{@=Q7|K526yiV1+{!}5q8}(N!o)wgb-7sp$|JpDo|0f&f z086965;rqH!EFv;$IL(Mq*t9siR zPEp%|nhU7q&;>d40pQS-eJ^KqkX;e|(LNiR{K|~^+2o^pw*asg&n>_zfoOjUvAhw9#t^BsTt-C2R9kki5Epw_5PA8VIKHK?o= zH>*;45o3%T(h@0GQ8*phR?pnRtu-NeVQ#)uYpmq5zRE4*yU=)8|CF&?24N7(iB4J7 zQ0e+?|H|&zLheFfIc0sonmHw{By){`af`r%?FlM02&lvriXauxG7IhUeixrNLngYa z6fgsyO!}eFHBZ_y$IxZS&Ib%o@xEw^;DsnIxXBtWFzhD0sB3y{Jy=)YtkTd@|M8Bg?cQz^ zh{h{{z$$*z2PZDipa@!hmQ*_lgcR&Lo9|2+zi#T>F^&mx$)A~1VbLk+6@LuWXkGqV zkSctHztV_*tZjD-sBw?-%oMD%uVp3DXd5qcJSi#B5FC4zIZJA2zDVjB;qm$wpkvKE zQB@!o&>pR*LjT>Ddfavl&T$j%#9^_t_1U4R;w*9e$qyrjQ|2_SRFE3K`c+Z#=KEw{ zygt0)(|w{>$ktk~yq&r#BZnuhHDYu9=kXucT-q;Vh}R5i+EH_=tu3RE+FGUr3L{@M zja4}5YSq9&8_9YNEnFrbLnRk#&VfX$w71Lw50rg#5roxw=+K_j*4!NXUeKr@w-Dsr zg0;B}?z6SEef;1dBHRF}oquLme?jPflg}Wm15MogXIhr~T|Dc8#2hxo(01~e2haSU zNIr$qf7G=4Rd6FL^Q;`(bT-$8ZPjH{h<}SP257?F%Wf=Ih9i81@_S*$Pz!@mx2ajG zonx&e1{3FBD=7+6tg>LDLWxTV$_m1bsQ-D0yJejF`)rE+jhGh|f&T+patiP59Hpsb zUhlM?RJTp-n?%^}>%MO-c&o`lZWTD_OFwQEsz}a2NnCr$I)v0F?aTu7d3(J)1p+?i z_G0{GtER;M5U!YSAwFA|<^9rXb)Qa4v|^?+S&27-;$S!}LCID>y=ZVOY4>>&m2O1B z4%CMBge&(-#?JV>M~E6~-D$AMnb4_`v`DM2Mc#6~HM+@v^=S4-0gk9xh|G`5I&#Jp z_CRUL&nt6}CG7X%5bju&5Z1L(?B)NI^yGa>FjdR-xE8gndKDmN9dA24Zcw6vH1C#U zo+BtXcQ_d&qKJ3~l{$+3;=X!#Srx*0}}rdcjPBl%q1hS*KJo?F{2p4sSW;RIoFo~aqE zL$g&NXeb6!cOIy)_q|U0>)f4u6PJ#tdtMd7CC2_KUL~gDbWc7n4poB0RGgSqJD&c5 z$0)G7Ya4}c;U++7Q&enhQ2w_7O=SLJG$hyVkLiLDs9>N}1wYbbvO?(V$P-RVd7zWs zz{db)fVKYaSs02x_0wf^S$SH)jwDl3M#-Q%m?k){sXE4PHLj#@;5dBAd)5BuPwhD0 zRqZh$ai4f6n7wRbdyLd41Iikwsuu>)Ka`#Oe*HXo@R5b7M1k#-zp&8UQ>0d~f!dRO zJbOO-r@@h`RgAzTnf*IgiO%}3aHF#cIN7XwG6maEn-LPMu-77KyW76~_WaTKp~gqHY#(wdINpW>bdvwc4M2=iVvIkayeqMOCE0$ zWhzMYp{p@UX0{kOwlO&ZG2;%CfWj3Ktj~4o{$Mi*GGkNe3nuf_K~Cp~--;GE0Hac~ zMqfWBXaZOaB>!d*07Yc%vPuL-(plk!&%6BRBOnV|kCh!kG}IiZ|o z#_%k?HgYk@uMk^ngO$8;pr(Dv#y=G}CnShw1-bd<1Kt~sxGIpuX?`>w!McV{qFV zT95~2?zs!|CLmk>$pjkP5XI1EeiTIG_lVm8ldJ&GCERO`RiV4rul}qdStKPhIUCYV z=BDRVHkixi8D^wK#uVPZowsz+tKbwSy6Dh*HVlp-vQeRv<;W z>a4u3b4c^4=egwPA91A`Uag-|7Dk^D=DDlvbTZIIh|N6MLpm?&;kcFWJF)r$NXOS1 z*hJL>?W2!oY_poFx!ebfn5;5*R65n?RO8gF8j5a+{3=D4k$V|^XB~#-k3iX*F}J~o z^Fe1mcDfFPZwNPl34W*{F|*q9ZXsrHR`p?UnG@W-BFtTgdMDLpZHg)HpaOiR@zhJl z24j~z7`F%X-S6dUg&BSebqTUYdiipA`sJhBG?LX~?AM}4msC8AriR`*e~{^!-(O(I z@xuG*?!*WUvZx>>w{0#1$fV2eHDLC zlJs~ZgF`<>?VkCZgYP^}yS(U*oHQ0L$nDOvK!@tJjDEAR+jA7MNxd#klCsDsaJ*%IiJ_is2a{kW3`&V7?KQw;<8|c5Aw();NFo0LS z!#vbNuNt|Y)FvDCW9z)S9|G}MCP-E7NWndq@hsPRneJYpc9Y>x4OK^+HF_4JKX>=9 zaQz^K@h3CC@O+|5((u|kx~k|-rxZTj(+jWP=nItcFv1|pI##h7^9yMnPCVvDCgO_! zq*!aH6XUHq5U44paL*Tr)CvKDR%AWS4aaLq1=RkC)W%Fwb2Gnv% z0MJBiaw902uGrmmv|Hh^H8wr9o$H_KY<*H6fw-i-qVzCP9XaFp7cl`oI zm|WQY=oZkEMTho&5^V&lZ)Kr-MvU*f%o6K;%F1)a^r7zjn!Q+xCVyB0wzC-8a|@t6 z!Lsb_{dqH&+P(A7s$;rE5#R65z5i8qom70GE5b^riT z?u>^P&$rsB;oK!q+m(%AuQFq7o!G-BKnPKn*V-V+_+I?6=$@L& z#{q{-o$&ZJnKV;~ zy0kv|Gyf@dZ(#IyvY!kA}GbguNHUJ9Nl>rSC7)$3&}t2;l0VX~qKu7uL_m0U52 z%p!w+TO8K6hcgALGj|=s8{DL<8}W!_bTlgaw={cpgGPE+mq^g=zV=QlMQ-3-ca(X< z`D8TWsrVdtL3Uc8qVJc|nhfw~IO8L&-}IYg3hb=f=2_Stb*mN?xm)#QD zwoDP^%e<1NAqr!dokg#dx($Raw@vPOEP~v&%I@knW5$m7LXH_M&(d*BxqF9+edUpw zk*l)YnsNfo<0p>~#lijM55i1L)dU{RNlSk}iqkB<>!>dMHiRfXy2Mu>RZzcD1eA40 zfD1<~BY5cM`s#(&l~+dPfj#f0??ee+-*qc>iejg}wn0&}*H5-`pk&X!TY~YYC}*X+ znZ!E?qtqY2&`;?|z*GILz)gd*a-wZk7}ZL@1f}SZW0*LN4BM}zxTo@Hp*7~0FM6*B z-;)aFraE^PlgH)rhe)IlY#b6(&u8u9@=nnDsNkMj|HgxzvY@{5tIoEX`fiImFXHVp z<)$8l14vn>2}fsy;NbfjTvj*|RgIGk+GS06dzl$AVs5;au^Qm5uF zw>}KzJzN+;FCericjSaogt_Go=UnG{yqoQ9wKuXcciPe-AUX$o&IF_`T2$%in`26^fel zivjhA1*T?3?e~|%*A`^DOurkRKLI>aUgnj@^`&W@^zVh0_q?w$Q&F0bkRiF>Qh&Vp zWHQ+`m$+(p{h}`4@C%DeoDnEK(^APhCJp&qJ7}dM0kit%#)ETo!)5g~=T( zKC;ij?+Y7II|vSR{CPiqvM@-FZrgh*$P&fXf{=1!mD|;Wuq_(8t$MQP^1C$pBpp-t zM^cqD5LwqE3+=Nhq`ozqz=B#I!$&Rj>Z)tgH11vrYTUrBR3|MSms6T`e#Jw+n_Dj@ zH+-{NjI!99D2Hk;_2>?lYn>{M=JSi!0Sr+kA?WMo>5N7ZBHroSgG7>fFR(=)I*wLd=6b0!xNcM4h*KoN!9 z0_xwp)qK{tl9<@3UQ}eMaB_?p`&}s(&LmFRAV?G4X(tm#gVUpJW?qe~@51ufeoNn+ zrT*AXC>1nvwDhtvih~RA^KPgnDKW=26UuA&_cZp<@- ztkWK$n>%WNDJ;1V@E5^w1kBf+b;P?Xf4VM-iFBCwJK9o&r+em(3hlJGyPIr_a0<0B z8OtFpG6Xq_-ERz|`th`nf~FA=(nnyBEnsTftrc!u-#yH>gW;V_XCAolBz|=rr%cEZ zE=&8Gfc<4As9ZAwaE49N3(o>Af>NU=LcU#zN1l_vb)=FKra3sXjL~8jYVi{QUl}Xq zbc)w!a(p~lmIO(3UBy*c#MwS-yei4`10U~J5Pma}afb}xJ1pi9f+spQs5{y_6G2|- zwLeg$W40eqkqE78f#I;!ib#C_mj=H}?!LG;K%@3B>IdeS-=UY0(czSp=qz!x3|lY+ zGkN6XFKAh2_j=kRl%ii{C@;Q!@~eo{w_8ASq}l1WmLqtxnD6(O^t`pp49bWIZ8Hk* zQGYo*@0gZ+bZl3g!_u5FMff35%aS%wdBnUQSgEo0dvC`ISJhzi)mjN<%^RkkF^s7F zJqL4|rqsz}%p-sD3Anr~N4Oj~tzutDAP&n;)t8&21GO_-+;J9L9e@4B$2MF}5UOj8 z9F8oA5U>HNBQz*pj;_~3A6~J-lM-f9{I%<#%1qet?E2?{X$7{&NC}i(Hq6dE zxa#tj%OP7zaZ8o%b%B%`U*{RmD{&ST<4_z^-hZg6yUuuoUG+tYrCF*mW3+Fo-j}IG zrp6$x#?jAem81LIwW*7sb!W1%cyR^JxjV%_WJK>INvL-((F27LM*jC}>`Q(EU)IrA zI@X3=&^z1u3S2E($)r(HqrjU$s`&C7xW_>nSHbnP|%Oiogo#Il_ zSzq5LhdqHN>FN8A@fIQ@3T*YO^5L0`DEdwlzsHv2wXE(Bt3pU>`DEPvWG!Rght#3CglE35g+T;HQ-haWV_$Ts2)Z}Y5RZ~IfYm9PR zWUHzkgG+$@XB|_VtZ|hXL*|KJw(&4W#l&*0_RsSTUH`9kuKXR!x9yK46QMo+}Cz)*Li+E=Whwg!d}(8bDMX_dRRrty=wQaa$Z}p2>k7hmpqzR z`=G{~8jr)X-Y`?!&qxwnGwh!jRGo6E|5P$<*)k!?v)(~7f$Hu+bcR5+h+PseYYRzQ z0t;dyl$FPAsbz&1it40l0*rIshidf7nH{>6chmVA-`Yv#btn2`^||LUd6f0wT}Cz) zjeXDaBF^1?qt$2>Tl}Ebz-RTlKoOh=~OMwQsgzf&I2mwH&VpUfyN zqqoBqH*YKVOFp+W;bQUgE}6(J5s48xbp7;A?k%xkKioiF4412VylW&lx=ABmaOEC8_1TUdsrJHYq0T2X$4D><2HDVU9AYBrRHp9)^g zZ&fp}%x3}3{GVx7GmvIA-P8Cll@W`vagYam zbZa2RI2ppaomKJg*Vc<-jjnW#4yhs&-wF6=l`MB+W1g^}T@|;>^@PCv&sc}0S@}3d z4rX(EyEmF;9(cFmz{rC5mZbL9p$*vL6;1}7f!M-IVTikTAl(}~kf#Z(5d+3)7!!ur zGTebIZ7iQ}oQZ^5QrY3(d|m`2c>Dn9e7t3;Of_}e7QV9cKBq1Lb}3dj&!u=bN;Zo<|tZ*p9NLs zOA9$w$Y_9EFC9=QN$w>-$H})k6&2X{<7-fTEz9V>U#7GE+e965^I6|IN8D`VM*7#? z6y-}^h8mTqOh#O_Oc73LYi-CsmVI0=E`HHT)|eB{aOq~~6rg!P1-QI4hM3WqyV1C2 zg0?kmtKL7YT*fzFC3RIFkZ^f4zRxi1MAv#Naiq+7MJuvj>Z2fCnE(Jf((r#DJd?5m z`Cm6{ab$n6hHapEk7RlH7dgjGmy-z&Z1M?sOfY52JCLm(v{pt( z5@9S9Gyh-*^2J~Wg3A8=^tJoqj{#A{GLpm@k(is#JCL;%>VFzc0E0WC18kx8Pm$qc zHMH(PW=1BI*0e5zZYleAAO#D6879uGOQ1Pow=)T=i8~PT!h-E7P$E(fV})W?4Kb_7 znNgsw-$7_XOb3G9<30q?5Pd9jZF-P6rCxjE0%qz22+8_-E6jddm@fU05}^HKVhLZj zPXm`oBd z_KCcUR|u>i7TC(Olek|OLkyM)l){dVG>FFWAoVYpxs<-IjUJOYlN2Y}8d|Ncp}!np zJ$E5ckM4qLajyjMvHP5bda8GA1H}&^k@M7y%22szQ|8Yf8usfSe zvjn%{8qujINUx^U1J}p->bH_fPvIYmZ^}8lB)HY(LBAGg_O>#5SX-bvUTSZhb4vL; z$j8=mx008yx+7>FywTY8bC@Zuz8y%M^;!UhtXh|Ho4E?)gtwE>3zc4NT}Dg?wT>eq zOwnOBNGmeD=bY#kQZMD}fJI_Kb;w!e?5)H))wH0ZS~$v6O=UM`FGKU_yJELvX>?8V z&LtlWPkhHvZe`U!^!X<4cwQY*o%msW_*gQPm!T5T2-_CXUd4KV6w_-U9;GzkPjn4f z*OoA}9zR9)jtN1I$h@TY18Mph^rP~{a)N&;5{PnZv zI*qIMI!o1rh)#*C-h(xYQ+mY(wP)RcIt1Z%AR>iw^s9l6$tG!L_I`d{7D|(&XZB8VPd;|)@2TS$=W#%wt`6hM5iQJ_yn(vO9f+5sQm|357-D!8arcK% zD1$q5-+Mb_q^m_;SJq?Xk~}IWD!U>oc>XxNr=RU%x^O#Wxl&SL8e2 zjC`tm`qqtq7=G2Ax)QoqTm8uzVi$%84+M$3tZbk~n*@e?!XGCHWoWg(G3kC`YeGP< zbXnp-Z9;sGuc|GaQ9z?4gClVe?wP?(#EX$XmI@j=5y$hH2gJ8vKqMzDJ3uXrtOm5H zg0+QN-Gf~aJGABeLupO>k{hml<@Z5BG;rVs^%^TYfssD|I-k7J9v5{1#$ppYqQttW z{Xaq!9QmKewZJr}0`%R?l6D<}5rQc~!=};(316A~uLzoiu_7&5%(qV9bLa?`iow6h zPb3nQ5<3PkfrKr{4g^lYwklO)(zj3O1^fSpZ38R#WwmUqvN7#c#66(d>-y1*Xro?yV2wQ zFHapw{y1;6g0TNID4Ni6Rxq}qokvDxxmpcZWHvdS-pK7G-rp5-bs+%rOyX6(hlaKA zz*!bJzBsi-9IKhhKu~f09SIVza_MrwMuBE(LJYT;ycd7JbQbpH(G&{$Y5REvMiw6I zNjuv1bUW@{dJtyH;kw-?x@4k22{8Bj_%G}#ESxT9S*;v}uWOQZ4BX{1Le2K&-P;KGJ#rNiH-~J1-p|q|BxvU|#LY%ut zw!QJp;O$SVu=2K(I>XfEfwm_aCjd;oW}d)Nv6m$<(&CNGofDHF6;}kL=T_E!sH%az zJLmW;&&STRC@D(txaJe?v#N(l3mQwS3^CC^UgLnNY2BmFmPUFg>tfcTao*PEgr`1 zdbydm-~gp6MXu94RZx8M1G$g15Gfu30rH*FslrYJMGXhC2DyEB`sdx)_eXaj4lQuq zly52yly7|B|G26eqqNBK$4(*JRqqq<Mv}=bZ4j$}Wf^{E8vgAmSVwH_9me#XB0NvwfV~2us=vmn)?-Y*udd!*g3DT@jYL@Hq6AK7cOuYthBW=OrLB8Tl^&x9-x?Z4TPcT2t)ufuj zO5v$48nQnZ=cG8n3PT+l9R8eTsM)}OUK*tef5@qROsukMyVG(J^|HIDctye4F2*jh z7oHb8Wq;TiF8=r4bEcdVP03W!nhp(ie~b%ufT+^5nXdIhrKh*jTj9xVc(RXYjN;$*qK>-6Nsx#5S7pP8EtR7c5bcH~|V}eRN;tCK%*Gk2V#$I@5 zzmPfQOTMG#X)YupBSg!GD*Z?as+3Q=w>l4(jIr;E&-Xm;BA*!a$|F$gA`$i8`(a`z ze&(1B74^`Say|5bu34E!b7kp=_+D|9Ohd0Ft10-Q!)9gng7uF6>SX*bgSy#arN_a- zG^^o-#s}BXT7C5*FH(zNs%8mcrRW{6RbJCo{qdnj3NNm+PlXuC^0uk;#ra4@4~_Q* zYM)D-7h_L%>drezR{LHNm4s0_B<~decfm=v0Xt4kfRgXfp*)_6tbiSyPbGa!-R5}i z-)Ag$+TWl${AWSKE^zk#F-J9xkLtb~X)#(7M|m$%UGh;A2-=%GO2eIxaV56%{(htb zE}fqYFc7i3s|`uAt5n%cePyWAgvB>>^EjzB&!Kkqr`J{jG-G_@Xo;rE_hTu*&}v^eE|eEb4~Lf zoSLv4(Bsq-jaDKfG3PvBhnS&X?Bq<;;FaO??cyq(lPOM#JW?oGm7z?*JCK%+R!nKc zk`krx<3C%4_@MX#btuDeg=8ka?MBd>dTy#0q1t&kcC^GYCEVcn!xe`u;2xqV2CV4( zM;MAfpjI}YVn+}GO$~4`|N7=LBy^eD5xuzHc78xp)7Uq&=aFgCLw+{Z$>*oolVAc? z1`Ji{dhu=9O#U56{m}cL&HO^Nj~B}Q(#*;O;JK^;o~sUc0|#%+?4A%P>7UTAbxl;* znnU2;Dq8ZMoVQ#^KJHGv-rLDKKqJ~mm<5;|SuU;0Ai)(CZ-lyJ3J%Gi z(K}JtCZQys81A_D^Q0-Kke*+LTK$L+I*f=R_nx;k%425CZ`iCHi?@FN-r)8$JiQ@K zWbdHRe*LkEqYe8D-!PP@s!?O;K;3}6NS|sep+QB7+u24sr}0n1mGzU@p1PVIcG?X- zEcYiC>et`DN{H-!tR4W=#mzA9#cZ`)6w)s90XH%Vh`b1LECI~TIS@UK0nZ<7&5&IZ zU^Au*!9Xa+g`lqz#`Nr8gE%d7iXQASx}Yn;~afaJ}u8|?TjgD&4!iTS#PVbru2(vYzNij`o1XqWhZ(zg>zTk z128qhIHvgkS5wUR-5aj2b(jSuvo$#qZpULK{rnD*zvSqXb%jzcKQ^t^q2m?&v_zQM zc=l}C4_m6{?2q0bYIQQ*l@U+eQq}y`tkQUE4l}l}zJ+>elDaRZ>S!NTR+$29>pzI; z3M#mWGFrB2S7n>Jo7TQwhE~SIO4wrhfFWwP_8dpOP5SbqU_*2G*xNmNcSjCsK@BE4 zIJi!!9M%6aJ>TBc{=-1TGGhQ+ZXmKEInn zWRt~Nwl3*nt3M!=zd|a1J$+Nl5A5dET^ph!m!&Dz4(%Pd(8smU~l=|2en?cQ;suH<7B~O1y->ueCV-$-~)SB@&rQtR)B)nV}ne1VG z&+WYk)HXb;#NG5$-?bLr&4hjhiD-6-<2NZ^MwekJE%J9>;q(8dr4^(7KZ;3He6uf| z!|)qBaiXWR5N$mcV%zjXqOJ-c=YkoxET-q zC&FgCv{M^zLgLC^tMfh_)smpKl%O+-*UIssjmVWsE-vPihR+JDW|YDQJpX}yc&g0h z(a>PMt(qBsGiEN-tZ-qjX!!eWYikO?DrX}l6EB;TemHo4b2jknmS(Q2rDuxglflws zN1bt@PCbmHxOuOP;NrVQXz0AHW5T7%(f}LLhB7j@JmSWih~Vzr@a>m>P|b4j;!dk!}ph8Q`VB%aFq&YnqtYj452WQ>R3D zyC!-awkVKp)=tkWafURA9Sd8leHk~=Q-mA;Qb+I|Mcv%iY3+3Xjed=-_mOwA5w2GSfx)!ufeIz*6MV|IPIC)xJ zUBaJ9_C4r<kqA6~K({W^3^-VQy=$NRz!SHRF2 zU5OBZMF#4i#6yq+3MaNm^D@`hO{#y ztlFxqjz?&=9KDhmHt-=~(|{r#u5|)UY^;IBBuRCq5&eaD?@PuUvJuD}WM)Zv@x5#r zL^q9oEO11NbJ>1})5;rq2Yf*b@)9gp^U=y5N#B@x$0h3}`v>zz*^$uL@sJaMIr#S) z>p}evBuI^B23it6o?&c+v9?i^lDcTfr1lse2o5t#F(W1{0c(UFBsh#luJ=Yuk15uA z2$(v|b2f@t7RMKF`ncA1@WwS0O5SRKHqZm4b&+AWFS@4tyexws0oN%P3T~=Ww{QL{VWPeF^lf=pJc{Ro8?ErQey@eei!5tNOUMnHrW=_Q9={p&uQ$AS=e`XIL1tVXFu%-DqHCw4HfC4jN(vbNU lq=S%Pr1e#gR&6iL7L+$?WP=LIZgVB=AyCzyet75me*yMiKav0d diff --git a/website/docs/assets/unreal-container.jpg b/website/docs/assets/unreal-container.jpg deleted file mode 100644 index f0c0a61e9519359a5dd980295e0d1d2b3d573e2e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10414 zcmb_>1yoeuxBsQ2yF*4vB}7_sFc6UvDQOUp7!i@qArz&C5)hCQB&3uMVQ7$UkQ%x> z2N-7F{nqdQdvCq6S+@bUk+!HW>QiAacu2nmVEh>5R|P>@kjkdu*K^?CUlaP{;g99pP0DL?G0(?S(KePr%`+@xcAuSQzbY~u5Rugo?hO;Z{LNy{}37$8}~6jA@NgE@|UdauQ|DS`QJ** z$}1|Xs%vVSTUy)NJ34=L4GoWsj*U67o&RAM9zgJy zS^u)^zt}|!+J#R@NI*#Zhh2F19$+J&B_z5odWG(;HnF8MJ(pM@3B$dZ%pXmp+~QC7 z7@xlxBxB-{nCIR9!?Zsw`+qYm=znC{zYP07c1;2l1bE=)5zqoq;4G_Ik)eLS`1`rz z+k|}+9?I87%fAimY;-zx`b^x58_Wazc|xROijOkBkwTG)I3RKi-o(DUg##cas%1TW zNdk*^I(DDSTnerc#UF2~JW=ckeBYZoRkv7mRq#=pq>GGduGK0Ett-2W!Pw(~(-SQ$ zS0a`HTZaRnY#x`FaiGZYdK}O!jsvuy=v5qGmRqssdm1Ms)wYx4R7b}zy#0CqcunB! zxr#&-P1Ff|Um~pNr0B*g))g;L9MI{yi?;2jhxgBdlHkiYVA1Ww_AQnIwedQ8@@IGM zD%NzKOQoaEm3Te*$W&ymB&2A5O0s;s=<`{eX5*!zM%GdOva*Z#VbJDW`UhcKyR=me zq3U0T*&W=_!@e&#;GJ{{yuUpK6rTNu@N+X9fQO-XyYA&`+Bc&$nFF~{a4|w{m1LNc znyZgEN&LQACGe}#F4}e{u!S~C|H~kGCk6+Up_vy1m#wajB}g4pN+jWcK{s10>tQPn zI6MXAqR!yx89$YU-C*o>v@PLjY0Kr;+$&_S9IYOc2!M4Q>;w21AD))NN3)xEPFlCW4PbKvf|z9>)0B{Z`sj` zn$?U)SKDi=p8V9fLO@{)*BUN{F?V?1FbB|KHr3O}--#58>egQB&SMB&j-InP&x2F< zcGv_)?m`HSY@LM+9fzx3(h9qgKPz4^Eas!4dq9;L@FC#!!`B*~9hh2pBlN5PdtV3+ zh*g_QzrZ(yugmK5b@W2^IvS*LKn5ZZ2Lytfyq|22wnDMvfB_XH9MA|Z1-jQSR2lH~ zzyTCKi|}(2G#m%u+k%TN0dL_%qp(4;#8|?99MBHidTa!sH?b_I?Kt3wb`J;q^}9WE zKFZe#2i%2F)g{(Mjgk_Ms0UfqC`W0a{P3wmu8Mk)(Di>!Cnf20oJ8|to#rp?CpPBxxa zncGHuX=vfI=ApnRw(x`f#sM!OXM*SO%YY0VFccoPKSN<_20aFy=$!2dQ94>>2M4Tv zt-3rxVKhWA+*Rc~K{fyj{$$ zh6AE~Avhs}>b6p^x{F!Ljkp&WNNDX*LWFL*kezg(RW0;1qb@bc5LQej zv*CrzM*7^wuFWBW<)x8NpC`N18$hAPExekP9atG0Fka~NLH%5-rQRQ3e#uXE{r=I^ zr<5xH?LSb$3SD_8Yz=6xwKs>lpQac`XN5Z9^eeQp!n6+Z4N@GvO7AC%&e&&h;0^pA?AoDo|9*~xFa;{m;Z?$@PWV{x@fk7C05o-#a zgA6s4p*>y`2gu(R%ZL;k@dLMBK5?=vWO>A0<&|W3|2c^k2wg@|+u{9ZI3OL1t+2WL zrakyEN}WurO7{59Iju2^3)|*5^D3xgb;qTvH?(}WAJZ@mM~xT8_=4vNJVF7v&TgMS&|1i^RC4K$Vfw^Q=GzW_)OOJ?(j17i5-=bxY~vB6L7+U zk_+?*a&r7~_f`jn64_qXfZ`Lcu)_|(cSM%p?Ex!?67Y(WTEEg=?77o^)`ih2)_)he zTZseM`mn>$8G`*b$mIQ~L>$1$KrEef*s*=pS$Mp)`(c7AuVwZF@tZu0eJKc7zXYe- zAs9PKa6sl)4`eydEW8!=upPRfLjyTIcbJ!#6<+``;IDp{ZkHT6?mWGGM*FS`t|Ha@ z_)oX;@rNr12z~6Po^EG30{1K9Y_uxl903FC9L54(`-pNpbEh7`z#$y41R=ioQcn*) z^&KJ4h?}r$8Pw`%P&O~qDz@n<4p>`*t&!&kl{=qWC7(f?6@}{d;GxMUy;kU>5|gBJw{5;XHe($*B@g>RgZ_?6)=ry5;++EdX{?SGhgw;nr}1}cK4dI zGcW8bxNo;T=v88wy}h@kD74p?o2^kUyhKUkVcNtF5nQa%Z}De5`?XIuWDFtqOqgBZ z5Al3iY~{K1v@n9(;x2I}@fcE_wP1mIGPuQE*0H}8A|NF+W+2U*6e_nA(1{9Jk<_;S z1)+s^=VA!X4Mwm4bpNg=Y!}`sHid?-$BuVH4%Tr1fQ>(vZaHy6EB%xXjzT9GyW@Zy zHD!f118b+yB=1#5u4&scw&JEKbpAUQ4@l}umqao4JnjwqY-y)e-P362I!NTBQV}oG z%NzI|Nn9tGUg^Ho=<=D)8BI9bD;f|(*YF+JT7terOU=gOo$10@V!mgrP8POUWCZ;Y zcpte}e<)QuW%rmjxL&Dj(tTJp92WoLVC06L-VwuYxsQK&u~C?!(`DuaB$TPT4_f+s z69<^)6w|lJqeGFKpg$c+6mOn{#Py(K9N-)D%veNnHJto#fe#0isDNQ?=zGHIKb_0bhdo z3)bT0y4q1mN{w6AWy2J=7Wzi=4_2vK95i@>5!v_Gl{g(MBO6Ay6&Xe((kWDYzD%L( z)P|1){hrS$W-*meh^4r&ktBOs>l=?u+&y4kp)$X7r%*wpb!>Algxxruc52T3YsInq7`@D;yuEFMo+>Zbmtq9o` zboj!R0&(sbWr+u@Ky}Tqkv8o(FN_T?Fn=9mmMH2$9PaTpS zI*MTL?!jvXhutyFKfC`GK+Mm=_%EHEE=ggt{%q4;{`B8D!7S{^%HV_jz}NP2Cgs^L zGYIjh1*78<{8=Gc!%F=#OP2~ihC|G$fg^L~>Vj6bJb7NoqdgZC52Q(x>FizL-7>m3;3vtq-K-faxtORNEwuiaT`$aqQ`coX zXC|*7d@YK4KhJ1#+ZbXScfeQsypbX<(9omy=!uQx<>on<8i3g9!5D+w8*gVyqpJPs zsHK&>{0jdXZ9B^xea-9E*KeQ#owa3b3ArBbmc}>ul9tr$_Ae@g9+|7-wYelR56wue zLllF*=Z!tjy%y~kDMz+yL!sSRXZ0j&GQGl&c-h2re&I+|N_r`BOSR(YgMr9qTYSUyx zu)DjcM)DN@v?wpCDZo2l{@IfnMzi(ahZzkjyCGZwx&Eh(V#Y6*0E@{X(_i)43G+*n z(l_q6lMu&Ew=jr%Ih%C29dL_3j1?#l3%PmtZK?S6qpl?m`mszwv-}0<2aQ~`!;)PC zx3Pw5e@Lc|l{=Fg?Yg*E*E`MS$yPzfCid^%<2oLTz04Y;k}~CT^}MbAT(#9BbWSvP zbBxWJpVG6*+-P19N@@z#B8J}VyhhMSHuAD!3|-bbv|+}&ClyCkXkA`8=(`!tZX9c6 zIC-*{AM;RbiY%u3HTyLelQJ?O7>uahe@E03oeuCp;_vWEKBJMIKedcUEhaYbSjEL}*{?tck3H?-mi>U5|4wd;XVbR&&6P=9r+wCE~D8 zA*xwX@It5)F@u&V`kLRt-ZXK(t30azsG}EE@8+Ip*daOJz_|bP7t7_U|M9w!S$Qem zVbPmk&vqk)j7vA6I^HLa zOi`rVzoPg8+h6@_J!+=H`+XanMPqT>>y>59QdtB;29*tBNvm{$jQ1IwbgBY$xw0N@mEpj zr!i+dQS=jUzNj|=+f#X_Vt%ZqP1%(+cA42wVYc`Tf#Tk+`Z6ZmmoMCY4->j3khU?_ zZXi61nl}3^9*{n5M)Y(No0R)1dq(#WDrI0;da+2YHgbgM1E$|wdbm%4h_Cf1L!W&` zC#LE<1L~%qhUg1gQI`ICJgW{xad_t`SlC%ifWZH7kAqPHgo2^z4MgXTu5Q@P@X2hW*!M;9WXjuGMu5~{zx0pjuxzUG8q{TSC# zUHHkswflJ8tZJ|7Zd|LGSlSqrKT=V}QhJ8FC&DW8xa2|gWt!{Mx{>NrK~)yN3mLLQ z?7E46d_);Cg-o%+ zF<6zf!J zM%On4y&=p^<@7fQHq8Yl(XiH&2LdqB&I~K(uC%liEx31rBYB%NP*__l;U6w2a-eMp zcbJ2&ZpbpZ1%P!y=Q(#9gk1tkuC(HlDcQL0NJ@DfBi?u_xxgugj?c#^*`=*rCjauc z+xoa+GQ`@V5ET)ZMGvA&WrH7HKs?k8_mYZ4dpk3O*YSOEU@cl`HZn8xqJpkb>M)7o6vcUP59xlC0N3NgvEyvl?!oJ z9H5wVfCDynDc}RHilOV6-9?adVSvA| zg02ozAD@Om*JL3+pr^c>dAPBIoLB#M`5+VZPP_;#_ebAfV&GV=sI4Ab%(*4DVXxzo z`2H?@>Gf3JjOqA)7M;GAu-e<6Wk8NWiKFC z>-OV>t@?r7ahK5Eb>NH4OcHbDZEDHIv0MQ?4!**{(`Q096#i^1g+e2lysl%t6lS9W zK2?j0@_x?@LFsR%iIui7Q5^|9E((4Deb%3}#ayHB1JRUih z2BnR{wEq+Vc{c+nCQ1XkrwN{+Rjidi6G(p5rDIhxs1xME4kG`~zju*n52scHbiw0a zwxa3P{^4`nM&C-;`HsvW;(>v!_n03>NM+zDo5Cj?kU)N>gDiD~t_ETCUWEY#mx&-x zAWwggbs;v6_2ggH&&wb~{!Tz1HbDiTrQ7h;Ab&lFFd+Xj?yqs#vI?NXf>QLlZs3Ow z3iX4+?FL>3<6j5H-Z20v4`-?uS-wFh+-*W{2f;LFnX0Q1yw@ZmXl!#-+dL}agvV36 zG$^~l8Bm-<`96mti@QC?_1fqj#-`utB2v4tO7bP$;{d0HgfR#aLQJWDP0V~yqF#X( z&)cB4M_P}S!%_sNU{is4DiU8vBD2jLTVJVLRtap&0Qu9mTV2$hf@Od0jhs(QyGt`| zI!SKPbgYuza^#N5g1wz#d6J}m-J~Eod#erbLZ0a5V)nGU5-)E3X}wv~+-~rGRir$N z$nFu6X3Z!B3Cl+1B)M>t%*?1g=&x}3IP*R<$@9*uo3$P-k@#8!Q!r`4LX+^fHPW(a z$xn?hHJ0Nw*|Ybskkz?fXY9 z&8*XG#LbwV?Ptq1V={eH53IjW<`qs5MgtP-iqts3X3b2rdQ`GbP(w>8ZA_wETd=6B zhg?ul$l7w8{f&y>lxHU~h;KHN@P0x}KJ@SH-IyG2N_~12W{gO51&M#Mz+#J7|+oNEEu`3U1nZ}h3*?00R zIv=WQws}xaX#k!xIq5`*U0$i3P33=`=23K3<00Bsedzdl-h@M~{}&sCV5-7QyiK@m z3cZuETw>4tXtL{^W=T~*pcb2S>oX=Sa{8g3A2-t>Bg*=>Z%jBr!62S zcP-g44C9q|>HDoXbDazOGmY0Bxl=-mGd&aZSndTH^gT8FL~(OBKuLc5f^Qh>DYC5g zkB^l+8fl=OwP#KU_bRcgoOB$a)8H(Sh|4By--u$lPZR%HT%_yA=iq9ayWF-~XdPRt zoK0Eyhg-r&7Z*-+cIWn^WBPU$6*Y+U*(YKIH+4e~8flm-!Ie5avH9~@{gDfv&85ft ze1=~sr_^d2ljVrrz*5#kn~E!!+^lA%RiSlFBei^cQ(xfCZ6$*16VZYpKhQA6{7jUC zl`$b~#{Mb?DQD6%gJKwp zWO1-?hyC$LISP!{Z3?i0-h=7vXs@!VcKK*+0#W+_yizIVPRh!1dGoj0k41O!T)Jg(vQLEmRS$Vp}D!2#VwB{qAg zCUNIX%>C*yo9Zg$-W&`j2YUk@dX=D5N)RujY+}nI6C?BO6k66F>gY6AE-mX*lkDSD zWsv9?5#2oexby`yg_PsHs8_(&H9F4>>TB4hQs_ae-rC|bD_!k(+JR;jxxT!^#q#Wh z6168Gi9a^ZUrN;u)+QAhv2O2t&KbSe6t2Q~zn#^cW1F=^VqC%pNwsMd5(kIG`&l{j z7VfXgSw9qLQP{hiL8hy%3^1;rLxPLsHTF)MDmU4UXU$=gg>B1 zcfRRkd;{E46(E&dVVv|dSg0(RSCDFMo*imm=^Ha%r!XMr-yPc_nM;-y*toY9`dYUD zUrXUO-p~e$fNO6jt02DJHt5>GY-Bjs#_Iycs`&(@w+OPio;)e6l`_ zo}V+M{LSM7k;spArCz_-OTWL7Hg>Y{E+JM z7uU=39Bn8PoSPWDjVeY$7UV+kAN9;z*+}(~%(f6; zOT%k2k6=qJoOGC2_ts3w{T-e$=~yo2m$o#Wftc%J{f#)%Gr59yFEZ2F(5NbkXGDU1 zn`!FNuxq5)&cWnyMya7@M~F<;lOi5OE3I5ybzF4yH3}MPFu{2XZT1#vr)Kcf99L&+ zPp-8&`abQT#-sDQm-6jlet35Z%(pY?7BvANqBDQ|d)oKuNU^+)NGP_=k~3E!VoYaC zRlde|%C^eu(XH3ZJp9YoJplK`Yx^GoZDj^vE!)mom1ce-#)YMQi?`g#UslM>`?SE zt6SJCaH86voEL^tSferViZLviQXhKuwZ+UY<9eKfc*4!pG$R_N)m zf9752rYJSdztN=f(PG1h4Epo>KHGmZRP0nfHM(DB?;Vj?!Vz=3I$`KI+6XuiHaH1MH+ruf22;@Qy6MDMUv4UB z>|-wc^?|~@o4 zj+0LnU22+sH3tUB2ZaHs5qZ6nU4peV&Wl_9TT@fr{pDv#hw?{NrghHksIpZV8PtGP z5{7YM%89b0$Z&(rH#sf7(2!g%?^+GhK|q;`nAjuj#_J1a-|iA3&NzA*@YZH;q3a=` zTf5s|29@6$NPBB^-KZ5Kc^4Ts`F8n@$L>t5zlqh#rh%juV%)=wZ@8lSs<6;bp|3_B zI+97XCk*A_$=-QK?~XPpdm!QNT6KwValyRS`z?&YzF-TRVn*SKbZrEU23zIOnhAcu ziSY1#2323jVzYg1#Y_*=ESKGmQ1Ym4N^2zVae+ksb(l{u_0Go+&5UGfH?LKB=P!sL z2wG(m$~+VUDHflO9Q8g{^O;Jhj%@hlBqYqfeE&{dFVE*7QG{x**1X|0G;%?h{K2Ta zBrM0+6gF0At2eNmEN=zOYFhuZh?9nN(`X0GR# zp`AOwJwqskpRqJ?$Q+{bx!=vXL{s>QFY=|#j}1DO!>Oi~vnOWzeT0j4a!*1zdP(06 z-6VX>W%?EekaNt%PT&`+Z*wbKa#5fq5N9AInB`jse$uT6;>UktIU{Nc$^9QcfZQZMkG@< zd>Ds25D~|?5f1kJIP>LDV#v3OohlVA?-gn5<(1cPEBTFnV(ehBnJ0M^>EFkuR%d9T z_+&!9dNjv&!sEv^$~pZg#7ynk@w^XNI-b7diK(cv_^m8OKXa+#kN28lNv@#A;PWL6pZ>i4^#H0Ur& zvt6w*n-^)u1>v^)6gfAR-7Fgh2(L^Rh}!&$VJLNnx zGG9Ms^S)Onu{n_&X%#Ek(OKF~ev_qzRQI*uiU4p_!s=_yDKzXP6cWdy=(m&ZaZ(VD zlwHs&ZIL%j!c-hu!=ZfYBGUxKGR61Xu{>a`>26QPB2VByz#?danHgn^LUtWm+y1U2 zaF@2a!`gB8*~ce%$_)SaW~zb_>ZgM@D@r(R6J4-WU9iB*lux3{Ru#3<%X&f9VPd9= z!-}zcQ0c05I-{?aRKU6ThG+_@aAtY5Uo1rG(Q{YNnC0K~K@#>=Lhga(7IYM~t8Kf? zo?#2DyOM8ian)mo=#8e@UTwR|s&BL1^e~BLE!E|qAY07Kpp2sPEYse%X?5A)8~3#c z5Eja)Fw;~Oq`M)crO(Yj_G9f!0NIVdTf*1|j6Ls_@UrKv>&`rH+S?3SI6??rubt-Q zWl)vrqw^OcpD%QIRq%!w%Vqk}8ydC1u(6_{Q(ej^eu!V`jMqw$>VA6LwwulC+K*DZ zHYX)zUn|~~i*}rR8k5-zWid~EVvDFzP~{K(5+K`;HcI1@9Y0BGc-spm>);ySJuxo} zuD*Zbn5N&0lA8H6bLh4b|I^Ff4JLi^{;kASwI?ZFevj{aP-Mrxe(-vlN3fg*c-Gh) z5!~k`aDyOktbxxnuGL7*e*8SB*Xde>dbcBki^DBXiQeIsF2hmVFEZb#JrD=uw^TwXGjyryXrY`8Yjs9RG fs->9k)4eq+7jpLr$u9yRdF7vl`(ONq;(q@JAI`&Z diff --git a/website/docs/assets/unreal_add_level.png b/website/docs/assets/unreal_add_level.png new file mode 100644 index 0000000000000000000000000000000000000000..caeef03d10148da9cccfdbc8b0816b130709710e GIT binary patch literal 8393 zcmb7qWmKD8({7bImo0daJJZ>?!rrM~@yoQ+gw(1&rswQ-}Qo zkbg>k*Z>0>R7+9jQS~J40WiU^l~$7m79`-Jz?i@s$MuaN^wA^yzP}IJkV}R2qem~y zl;os!e9VsWurf*IZ$C&Gq|-8Ug2DP2NmbU5De)g?lI*=l&h@>2YWw^1$IK~L?_b5< zdjbXdXj8e5(HVQAo?$1ueC>6`MdOgz9~-9N--q z1(N*WUJS!VhcrX|`WXHG^w-cm%EKzT&{Px0?41?X9+=XKHxvkZL|yZ0miR+k%hijzm~g*7?ORHLH_?>UmY%p6cl=&tnWPp zW6{|)(fgnFXnipff=}d=^gkv5y;y7W_MT7m0L!mdXfoN32?svhd$&IPxqCO3&NWHS zvY=!zJhy)J>$^a~&5DcE#m3Ah-BVcIxQ@O=LLAQl*2(#-&D*{`M zmPmVwhh3haBurd#QR7thL<+y3I6|rH>=*k}8J~1Nk9G$?X5rzPTGB@ESh$Qe$`*0q zrVm2ZRT(#{8#P0?RMI)!u2V739!`QD1nm6^Uzquy4Y&_d*}kbU>lo)Nf%f9C4JRqm zy)0IU;~&y=eYkG!8dn3ffUodU&JWMMGUvV+FL*G&Ll>)Gjp>jC6-hlHr-B*FQeJ7pbodoz z%7}4|+q|5YYQdo}@7T-^A6$G)hk}4>zC5t!uQ@(@=Er0MyBX#Xv3E}h=$tshRk&OG zTM*bSYB|EJra$MuegUp6a`K91%EEtU=Q~<6E~{af=b*Sms++b z*QcXTdf+~FEAV?GMfvuNq5@kNt8(<#;BatixOdgS@8UGn?Hy83Py?TG}z!p2ycT23rkY07ce!+p&v z;8K(MN64KNjo6=c?QD4-$#C6q+ork%t9ndEI)3ZOXdgC~!Uc_%;scAF;iSoO=Wk0_ zH9_|(o%h$qwkkMx$89xkn=txcdlMoXg2!CX9sG_OGf)==52d`5*Q>&nSO zeznET3UEZHoJC<%JH|%mm3jXsUA9F#bU2MkYXqI3MSAuY90#WD(e3PuY7%|wn6Y}N zC4P9&+17yjugdb#Ma4@u{XHdle``kv_mWEb=hm4{|@plhcz>3UwMf zS7kVB0ImvZl8VvMm&9uRp=bF z8BQ^pE#x@fb~`0`r@BFOvQy!h7kauJ*Tg5K*dfG7IyG*9 zt7y7oh_!Y8xplPt!s(Focgw{8w}4WHoRbh6j6BDlaa)J&vq(@Ff$ABp_t>cUEW@(A zb~|p?Qvl=4j$i0#f-PI5fLO{(D0v|+aeq1MKFshz)KB|Er7eNj&lk38OH>xsd@`gY zP_8`7$>_wWp>b6L)%U^{-H$F}_*vK0_d%m+9J=n3Y4l}wM=Q-WCZ6AQXKKu)1UBZL ziL6@KG_VtZWz=o|q(%1~)^tJ8S@@&SNuHOfj2ja1qs`V0zS6H295a8o)z5kjEkjPQ zlaiBXFTIdNoyx+_1}Nfu9(|tBm|GQiB?$yrnHLe%D%V(F#&y&BOHu==6B=mnb_lN<8nX8+2m2SGnQLIau?$QQZg>SbUmyuY4rlx+If;-l!v6;I1?j(Gy<79 zf|Z_YMaa2j-vootNP39xOIQDG{ek+e9z8HgB1%K8_0|{fBck$yJ$opf@DgJzf0p((nmYq za^SbjG$g<}K^j#8+}^O^mc%TEBs#}cg^%B69Vd8|ImwQnIKT4-mvi-a`PD@|4L;DK zy4{t`uhec$LOeF{1);b3nA$BLtHK>ts>qkWhGG%mPq@I&RB}EP#5bfNEaX+G>KF&V zbMsJ9@T31)?`Sh|8z57#R^<&uoekw2Xb=iH%W|I<#c=aOxO(sU=>J%lxR&AWQu_MV zt#|m{tWT18^^sOKv|-`X+Hw9u!=klV&dMFTNu+>qY=r7}3znKueDI$h33Ekq%gM7X z{LS3v{%7(Qfg6E$&TQcp6`8fw?a8NoWfPyDo^-r*Pq9jnXlf|Z$cSXvmSNp)Pbtl~ z4yQ2V4Nsn=uVOfQvuY~W*Cs3$TAS&?31mU`R5& zYdI&`2)v}-u?!mToCPJ-y51Ym9Rfbn4uj50@9hG5@Ljx`K9dut0Y9S=9&a!VJr@VE z={~}u#VZU2q(?F1(X9;Ol;$PB!X3J%nlSy$17beAFhcneB0-tRB01W|;pk*0i==~# zGg+?sq8-Ba_)BA*;b>yC{}Rcvr;k~>#_bPe;~Ne zeAef&<+=e|U}@@e@C&0f0U(ygRVmv~p-{C;j1kVRL8{iP`;1=Fzm1&_h##6DLZZ5w zNT0Ktu7`lOvraIr_o^E!ghCbvXF3JH7kzUe=GIWN2u$bmBiKex@9Pg=D$jRA@olXD zvWQe6pZq^0OC`2lB--|o%ICchR=4~Ot}WV06FaP=d{B3acp?c zkK2RU@|?4FIT|G_a5$IQ+!VWFUw_DX?kX?q-Dw|z_xx6|%(?=s*v|5IzVEM>B(X{L zb|YUMCl)PMF`sOaBu1Mq}fw%H~JL306;3x73ywNT-TyI32$U5&nadxEOnH=`2Vucy58NQ~>5e$Rj zRN(}psNpSqR*>OgRr_fgy_{-cy{GI~VT@#!pSRd{KfPT>*xXhYy(WYfQenr^nzB^l z>Z)#&V34TvOZPYr`RScjLdQuh@ba+1F8*_|%qcJtw+n#y;pzhB|NsvzxMqZd)`LZPnjOnnPPrIxEqEQ$$pB* zs+P?Rx0&E~=9a465@rmKTaOa5Vc=(*M+2*7@ejPV@)Mo#R~1JeXzu3K`s@ zlw+{DQ{#C!&-&a%y$kzO80p~bKaW;%Z}nTQO-cb@ddX&+>lOjm3l19(ZN@BE&PI=+ zp(5X$327`kl5QhA#A~|G#s}=n8qz^GdDWILccCHKqIfv}1=^GvcA(ZT1fLck!QCfO z64b1%Z5axa?wu%;sS73y7Q}f==@D>N3zO}?l=>~tJaAh-bvU!n2&&0VD5C0@yhB(y zxW5tXW%;4E1u+Xm?(Ru-RFeBat4cM(eKe&=dd#N270NcZ(Vc7H33Q=Ou(2&7cbg#r zjH>ydrIz4^wSyh^w(s3n3`SjtDo;B5Xs+g-d}7+b?ykX-$f#x9`&t#561673bis5e zQ>|ZuOm2)EUD-uMjNeun9ualta z%~jZLkm7rqHlH)g?W07B_2StmQi3Y}JgwZRLjn=-0@m4a!hpiFQW>V_-g9p`Fx!c; z89C7apwxtcn+1gMPvW+N8*{>2=5hiF7JO4@8S^aW&MLISll$@EEUbC6xl_1Z?W>ZI z8EG(Dq{3DN{RN4Pt>QDgx2ZZK08|tB-t1y7z!;gN6pPWi89_6C64=2^^SVa`hTcV# z9Z{GNtu;N;k{G2XCx;h!sFFz$(alF2H!XrEplW+6*46C-KJcLu_o|}eq$dq#V&{su zB=Q?Q|0YY2wIOKS!fk2%Qu!-j3o5*L$D=s9o)&HNIm1ZeB)ikEAAk6*ce|X;oXL!T z21@!&15>l!nIgs_bVd^bh$z{_-$Xeni+V;an8;@}nCU666!DaVp+9=?dBz9c*V=;x^-ki7;>9*jg#Q5mqecBhOew~JQ*iaXRJ<{#Y=*pp7 zuak+*AnZe3q(a);?-RNZM=vFL@WWI1x%XQsZ?Qm&B%G1ngzsy0C4ef)MKT97r7?B= z20t~66|!pKI*+PeDT@KOfrC)*mtVPV{8mj1@~MUEk#9*$zy0?_W=>*J7*|l)NP3xu zme7_Vl^D@A12;=B$xNi^s>hz&8bUkOBiW6C#4jq5ADd!<aLxAdQ8u$m%oPPRGlK)H*aa+RMh2!^mYWJZ#!cfb3;kb!+9}f5ja|9k+GO}2 z0)Zn_6EJ!n)7!E~g|&Cm#;-YjObt#Yyni&;dTdFTlS|B|``8nKo)<#%T2}fYtf%xx zwjg5M^ySHql$VB@TZKe2q%4Zxuo|C6{W|I5jC2&W`&a`EUTS5Y$G)a+}n8j2}( z-gYJ_L>D0pCQbZ{Nhv6@g8xAsXD`%zBsbJ_$7T)O7a^e!{9TAg$=kj3Nk%UyrjneERu1_f1|qoVBL9)W}iaTFSJt9I^7?2A!f^!X(jeO zmE^kWpeil*MiYJir82B#dr~}Cf2!bomDz5IN_oxXQpb!$I|}g*&G~nDj%e!SzU;KTc4ME+c^3f)&SJ?I))=Q8FnbCr*D z{}zh7v%2U_yWSa5x6U&(lPl_7EZAWm54zk{em%LH|4R82P=R3b1z#l?wq>wZCAq;x zhNC@=tqxiATno~5OAXgRRM+e7hOL1yu_*8tRnrE(V@auM@sjFxPw@6ksoPQVSO?;J zsk_O3_W7z|+|FiZ=5Q!Ijj&4^D<7^g7wfN;b#v|yp!V)1l7gX5GnTP=NK({=@Nfx| zro?L2|6q~RT&(}k#_!3KI>&X(<*n`g-$Bhq%(8l6u_|9ZCdgk`Yd@o!E00GtS;BL_ z7dI{Glfr9p^&v^Re1*O^EBiy=Yj{%By(eCo?hSNe?Qw57RoQ3k>F(wy=w~{c%{wo1 zU`aH*@KWtmedv;b-4zWg%vIJhTsIo{oOjvu=^56#8v~G73Cob^ygSqQel{d-rnu#k zq0K|5Qha=4)}d~V6Go>>+-Rw@o(T9Z8BOGF{J7D2eEiDYO6uYI`~sHla zs-f=dpjm1BFt_-fk3m1FPG3miFsfEk59e8A>9myI>+zV~hpl^uF+qKrprf^wz*u4+ z#G$@{V-5FKKIe?UxAn&E3y{s<$p+Lm$5c5CuAkvrm&UaV!1ckp_8ZI_?T|j_@~kr? z&ut8EseeVHfND+%2mg7d~@8Bf5Y`&lpT{$|76VScr)=jrc1w^xVaj#=dZ!DXWbdq-nP zHz)p+sGJW@FlL6pksbT`%q>&|J7-bz&g-t~S zoeliGiG>6>*mry5S@i&W{wDOviluQW?#vEDDSFyi{IIr|($%dK%N0OpX1?1VjGL*{ zX44+A2{{L4@N(!^bH>q#q@z&&mD=U;a3M)?4yi0g9xt@M-`(K=WxXBLUXYPt>+mp= z!ZsF7_R1R2q2BArs8a7sb{+O-{hD-u?wF!g0UNivmsXi~ts@*aIy+z+aozJ(hE`3o zUyK@6vk{H}h5IrUvF}62acA6M#{jpT!dDwo)6`5FF;8Jt+iBhi6hQVz3pviK0;gay zZgRp=#f^D?*zCF~lQN(0cWi%N`j}GS=9d4x>?$(M`a4S@2)}T~7;c`BF4~yH#m*c)D0I z2Yqa!T#XPcgYGt^K||tWrQ5fM5=5lvJ`!pv`AJ9mC^R%8)%))N&L_J!<7dEA@%a4@ zzkLF&24nfQYVVR^+z45Jkok=CM-y{G>}1DnQ4WmB57rlbbvmrHDoakYWpR~1Igigr zY|r0dCVUuyT1un>_5)i@mZj%n2OjVTvQ;X}GI+s~$D)!>nb1eT;ljZc7F)XGV;sejLhpx& zr4ksg1JRr-ysm8-V>`|AKOe7P&a~$GtLiG{KORxNYVtZQ9tK3@kZR0~2K^edGV(@e zDM$sNij$Ak@JDChtFahEdb_EioHtdqwcq?HR$gSGexN1A-x+`eW==!5 zPXjA_e6JDq?YPIbtN^}r)AzBP^M%x6a|SLp5m#@ND5WS%> zKJQ}_?L>Av0NXKufW!OaH{nA2LFr)O*kBwSWLp!IO^SkOpM^3p@_Brvha33!?$$r_ib1W05<=4bS5F>ZEeD~k}^Utw2V3(x95RQp+?!J;y{~J z?ouzOd_*rb9=T9eyb)#n31xg+iKWXb7OM%+{CKJORywKJ#Q>DX{`mhR63G}y5OF<- zKqk50Nn&Wn$x+QE?Be2v@ZnekOPy$%h>WG+AJIk3>Ut4;{5!FQ_INjz!};NqDtJY< zs_cD}ltzQ!xGZ1siH$u^jf?K)!KSv-jr#UaB6`FpO~cay1~+N@m&)9P!m)ggct7!< zj@r0kNn42zE&byhTeC;|=06LU9piN6h7DATWGqUFK8AmL1#mDT>^bzb2^DT59n3lD zMLWcXWc+$bUay~RdP87XQ&b0n(jDHS2lbL|FGXB0CoqwIS? zCBrw59b>=m`k*guR?F7^)ji3Rfu#Rg#)4xUnn)J1kdWylug&`eA~sa}HP97APJspm zLE4U@5;Dkxzd5Cjf1HZe(v0#y?Wi_w%f{zpWa3cfhHGPPz$F$uKAu5xJ-yityVaOl3bEZ zXfgOA<7XMHC=wA$C3T%s(ITvg}6;@?=_P5@8ik}2=vNqW2X_HmRcyazvGC9d+ VC)c$H{QcmOlDxWHwTwl`{{l*bXLbMp literal 0 HcmV?d00001 diff --git a/website/docs/assets/unreal_container.jpg b/website/docs/assets/unreal_container.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0fda640b00a4327255a316d96683dc9ae2331903 GIT binary patch literal 10414 zcmb_?2RK}7yY^Bidhb01NrY5ecGqf@mQKh6sbG(Mh7$=q*I= z493iv>~DYHxA*za`LFZ;*IBdPYp!?JGwXTod%f?|W-wEjMSxOWSxp(h!NCEnurB~J z11JJ`xVXPh?17Ix39bFDT4 zDHs^(X&I?$>1co71P2fM9ee^J0sV{|K|hq1)#bH#Np86;amrBsc`V9a4?+! z2LRv@V6FXa@Sg_`F4jgu&^01r6087%62Qg5!^6eL`)xH=ItY6mz^5Xh<`PyQq|vno zUH70Bd6AfZjr&1W8=c+3^pMXo4kXk_(WbHw7UF5|z+6RgGRc%DvqI&yu zHlD-8^gLoqHx7P7`vckk3|Q#@7qWi=`!}v>fD{i0`|$9n05E{$npJ-PW|8>)6~Yk% z$bznp!QJH+7@(&b%`s3fdYuk@H;??=rT;f{fY8-(>x!22&%D)M00Tf_vF;F-+o47P zYi6bjwX%Vh+KxP1$)xj4VJU1;11%=;*`X;zpTdV@#Z2IqZP0g#s3e;kW;bWOrbF)A zO$uPef@4}&6d`Gl2?Gv8tdtCAw;C*6u15Q^zfsQFy9@BeGplh;`hWpmP#olK)&7>- zzkvamm$|NsbH$CmJ$jV!C_a@X0uK1I!B>qU8~8|NW91yv=0$eh3A`Fe-nh8DSVS#c zP{CuLE_d~93?7OBY|q9#E%7$N-HG01hKs61r`Blk-rjH5=^iHq=t5`&LOy;u%On4? zdAd?&d%&@x-cU|kmT=OEn+-pfae`$*9%krrRCm*VoV zdGOX=e0*ah_-YgZm}hQqesN9;AdbI$0N$-z1$XC-fhUD7TgS#tfMN7d{3ZOH;-KK4 z7pqJ1$T)VcVSt8(1S~pm4L5o#SP=!0j`|-7AG>}1eTVl}o5XKlYUqo7zUOACX!Hws z7sZcDvVc5=b(~LTpOY8!1S5W>w;pU`xjU-B+qpFhc*&DH>ZX)P@mP82^sqc!dSk8P zuj@LT&>15ncAD_$Jp*U13BY45vZI)+8Tm-y36inf!T3Ab=g(}C#iArH5K!(M2b>$% z-ElpYc);}o7$9Btrf%@yH?LfEjw?PE=*-n7;9UC5rLVX9=0SbrgU=m$w@Ew@pJkzi z4&Qg0q3p*vtwr??g6hZTt=Z07mJ*pOpFtQvK|J#P2Mtplf9#5+b9L%*&FQ>=Dax}3 z-mUQ@v<_p_4D!BYMECTpcpqEbICvz|u~@>x#YH;Y@@~`JK(&qPWx{uIH3FlzsrzoB zU~)TT^8t(M{whSoM3XNoNX9Cj_lHzsor_gre5wYi+j9^+cq~wCXd4C2n{u3>6hx#J z61m~uJkt}`DJ_@+Kuadd(&icJCWi&J`>hej7=YZ-u)!zY&aK$klrxUS)HqGDKzG2iITc=0AjY+vr>0ZoT*11R!&nTx$j(}QG_IbQvP zvlpA?(&2hcTGu#ZCRcKI$}X9lVAZ*igvCWf-a>erkLjvfTrKAIO}CFI@3I2wIWe?} zy;fi+NCq-}|4bQKxo8e)H$e$XB$s}%%KFAB%^Gg>KmjK;^i$HcuQ4x23hj?-zb7uz zgi6X7*5<%Qvrwn-)@U%PxLX*E60t|TCHxw}c4ud2^}e~GJjJ)`t~%<{sop%KWjFVX z(>M-L3+ssM(aaGSKQVysMP`{Hg5u;GdhbNsFogJ401Hd3*6IAf2+xiS_RW97V?spcoXXQwzxsqS?U&xpMmMs=`~1rK7l$+A&F0UX!vmyQp2-*|F}{Td$5r z<_(nYB`bP7mq;LgQY_n2Z^F_oR|INrk!kwc_|Rb4CYEa@;YHh79z}AK+x~hN2bs4* zF|=5IOpK%(ztHz|hv5zAfQy@4b}Z3sCn$Nbb>sy;kQ#d1O zQ`{?ng6#K(b>Tr@Xn#O^mpzfzzBrdFLX`Y%ALjd+E5yb0WA;n^bQOVei|&1@h+=7m~FrROP~sS_{rL7~^ii?#tOR5%5C%FG}H)?35PNJcxw5hf)mw`^{J)pBh2iOJx zBf1jDGYQ{GdZXII9~LIPL;Y^jbV>K{!wnyY7E&sJ-gBisksCcCzpr;5y0FU48@ph4 z?$Ic7q5JcyV0~c0wFalLsP;18G5YKX-fZSNI^`b|99|{Xb+`jJlIq@DH}!9H8J)Ty zMTdvTWnzF(7*gJ}Z&{nBA2+F=KFdW8t%9n_@L8Y7OSMr zfaQtJ*iq)$W(=_Litem3)a_MT73+E&V$yA{;DyH7Ct10EB;5xRLxPwE{x!tT>V~l> zoY@0m5?7*(K5>!~*3-2b)x7t;Xtxmb(+EZ9wse8?U7>%uTrWs}+l_Lg@x~jqWbyGv zXHO#;8@^h$7d;h_Wm5_y+U;n%RT;Ke30X5m39K_p7o(?**2N=GuzTk_st#2punNmF z*oRUCLgJB+ zgW&E4-WLb+D_sk;O^~Jj`<*#FSr*WD%%v=^UGj`=9G2rG?i0>Uk?JP^mAM$e{v6f% zBYH*MH21hT3_tf|&=#h7a;$=)(Lk#XzQzFCU_Nwmn>>ft2KsL3ZRnzLSdwzSp*WzN zhts|i0@8X}tR9@dvL$#+_RPqUvRj@{AUMPm5?{%8(0`V^9Njq}T#W(bO@NKyJ5D*& zNJMe=eUtsP^oj>K`k%+)6KXcBtDP#}BkHXdbS>#oTuBSd5if%81d=v97W032X4Jw= zEoEPKxMo#CwC>dR^D^;v+{rQQG`%@6@=|zKgJLIW*^gUNk+*ma12nF~4on^Jem>JH z*N>reS15#Q)A@n-Qy?7n7$6>2tBLiOjZG`Z;YRW2 zT@(6q|C)h*gdWu&^!uLu8J8<%FBNm{){6lrpM?%rSn5CMW)4N_&UhZ)_0VOcm-)zd zySJ#k``MJo%ME$9F-w*vHxIHf_(G%*0>VPgJl$iQymOjJc~_!M5_;ir2C1vQDi5@G zESTSDh)*uvIMWs5u+_+FmYcXCHSo<;YN>8-_?`9UwdE$I1k;L>)yt9FS@Eg$Y|x_H z+|#aTaj2K^{^F=n$Ak8t+&++uabrp*UBR3+6L;W6 zpN7QdaP)9!6E-T{`$TcoQkyoOa>dDWsaMciEA&a0%ME@#$~95K0ICDK!W*i1ZqstM zJ+3!NbM4Q8ImN2F`$h?VYpJ$06>E!^I-xnt?9?2NPN4%EmL@X!9}Mb&)T#(kt%xD{H`8R|CHH!O2jDZuh#W zy&Zd+C$rt3dama%8QU2LDbW4UJbEw}FHyI<>Gj-xvcY^RNu2W(zUA0plmUI*dDGW5 zhDC;W*Lk-Cr{8LLlEJGm+|B<}lHtR9z*(8&y=JxicRcS!#<|2jF#t_4uLIT?6j3>{ z&N5?pq^6$q@LZn3E1`@TS6<+|fwC%-b&~W5(|H{RATUH`iVwzQa)b%)CIz@rIOeat z;$>zj3!h)Cm2>V)>#1it=?}EVvcS=g4lpe4qTq0X(KY$Ok=3Tz@5d03^v@}vXR3$6 z#-koQ*YAuO@GKdqIDPwwU9Q;mEL*BB;KZ?&ezmaa1ULD2rg|3tgRd*iFqnT*hO4w? zPPEHV@R>male|^Ifl)$`den*KkMz^f5tyC1Vkelq$)k9#gT@K&DIRTbM5Pc*NgvHp zODEr!?r~mK7loXk*o3f@xC;J+@`ZIK_AzlKzUbvV<(wR*LcZ*P)f9!_X;@CE^zw&sS<8y+a^qGkk=fZJ`gV{k&c6fO_@P3&IL^?2=uR28%bP10$XjW*W zTfy>K*_6xbr1sYK8wMDLSmsIu(<0&fe4QY@Ay`ehy<&Ww!}haq1C9lqYq!zgqvM^1 z4i>uhqG61c=B{v|f+RcR;&}C{Bo4{aSKktoM1Wp=Az6OBU$EC4HI`vbxICd6tvi~U z&`YvN@~=T`Et_bWOQSb|T;miWD2@Y?WpqBnlq=$;81q9?&#u*kIuIWB_bJi1MBxgd zHVjaG>@Qe9iRQLc%y;;R>hAq2bAHgPXH*o#AmDH1UE#K((hWi;1~^(5%RgTr*(l|% z`lv(eF5Dcl$m$NVK9K^B3~j!vN%19V-k{aNZ}>-QP{#L9?*h>3Dgg=s7=V2c36oxI zN&ym@t{~0WXcKRb0%!XTrS{=@4Liu zZ5_dW8Pt_~72lP;3rj3_k$uqS1bq2j064!?@J#UCx8_*&3>f(HAy{&?8G|bL&5Y&9E-goPx+zB#rQ9Ck+YhSia+A#0n z%XfB-CxpmC&X}Lt59INjrHn1^VDpB2NXw&- zNUwIov`>3ia}&j$qqrHeHu7(HV72mR?{G%X%mz z*xzC0E{c%<`R157ubLg;ZRKfo=9u2ny(*NEPcyXe$z9Br);pyqbsv=@K0gB7%|hkb zD24HoI?q9!@i95V$i5%~3=ku}mF)!8FTc$@7Dee!-aCYEwquq+=q{SM$eTT(jPsi5 zgV@K7J8y+?>Opu>zqY|E=dFWI>uKV4_!U}G72zFdFt&~;6zR@Ke}>JWi%Q4gidR^F z)_A39@JDnbU+-vTk%26|F=Wy7@#EA1qPB^?;C-AY!uthzwso3zFxMGCqgz)i+2#O$ z47Pm3d^pXIr(tm*0bh>J_bPL`Cmquf28kruVGm({Ic4ZVtAnHT-!6JLB*hbL3goA5 zw5E{OL=uOngfyZx*M-;C?tGM6azG^?IIT3o0AcV#tXJSeLNLIqW!Qni^0W}@RX(;~ z0{g|=tfI27I$rav@zOeUamLJCiS%{UCm#OqQ@rx)ucz#E!WXrB;dI|XPoed* z={Rr33_SGD3sdVLnit};)7aZ1ia5^SgUMR=vn|ng5(14z(i8U!mP){!xN(% zl23@*dh61Dp3_=-|JY8OAp+xK%BQ#eaz$^}TU-Mc52X!T%JwI<&Ap6`5q6`;QwY;o z>ES(6Ro{n3K35|z5x#k|`H@WbRTL{Ht2p|RLWeRK;KY<41LT6w#ahp~D7vx*>%qjq zEL0f41`F}|%jNErW;xnG368Trj4-}Pe<|p^?K;nl4#JWHE4frnoUEex@MfWQQD{Q`Bu>-qxMopUGW~dGH2lKGzkCg)0bOQ@oOlC zbI=7m1vZ$A%Ctl!I-rb4W-8lr!+X#_JF)rh2E%f9+CTJblm8|`X9JC6>Jr?C$`HT0 zzfD#u-dxY}Qz>zoD)+_$%X^)|GHSZ&Hhgl+E@p0|ZzsdFS0oUwJ(pnw5qs*Y%w4Ph zp{JR{q7!y+v9%XG?p%Nl!~m-!*b!Hc)tpt?9v@!(r)Cu_6;KW=6%?20G#DVdQs!NJ zr42Z1-*S~>vsM=a$iWUBVTVd?vFvy24)hy{XON~H!A$h%V3mmDCC7DFY>#QeDtoGj zJu7RXs!8Hw^Jw|$ka_;=_&dvYx5m6)qWwx+q%eSa5eAsizDk2b-OzF~*xjIOW{}ls zISuie&41Cc^l^(sT_SYDcYGmW!7I>s1JXNGlLy=Czp0t?W|oacgB#sU-{=s?VHZ^R ztIkCrO}Ni`ZT3-V46~xI2lHy*_3NlaoRA;)p`f+v4sy(28d9WP)+(%;>nAG2R9_jk zJPmeMzA@k5L>X4ZhQo>L?r#JdbH@PRkgZ3e=z(?&z_*M6zAsJ;KC;MUPm@cFR(E+m zvGvZ;9z93j7(~%7Z7`Nrr+f%$mRMeRY$#%SuBRf{oS$;nAYqpY+S|v(87e#hZDE}0 zE8J)(fFznaP8TiNi;r{>%|yrXPUJnMmbz;CGZFJSQ%$}%-|eV+oWCS!fkay6CAp=#9sUo6gF#tZtYi!t zcvwager!Y}=Wjg30F&=VOA0n6VLwrp=Mkqq4Ci15v`tXDCI-mkO~(Ks(Pk1=)oAL* z);o-g+R0SyjrxJnLq^_acd9S|x4{Am?{_^hk;9(K&`;CW+kk(@f(j6_*Y^a8B%6 zESjpUqL)UPp2lVr&ZY8n+{ANSBv*!NSmJJ&^hCgSw}lqIMg?>?G<}ZcvSlVwihUr~ zD0kJPw}|Q)A_n%rOJ(J)zd}*87@%DWhTP%!JBH6>9Bmh5*#p_9R}F~Vj8d_q8Feon z4`U@6D`(IA#O&%&*eOxgQaCW{sEn4A@rNqa^%a-IwHSVkh@Q< z)8fowfDWT4uweq^O{{CihLMBm|E%3_HV25BZ4)r+^LCSw4wvbA(02pT`d0aRMg&(n zlPkGrA-SWI1u3GssuAjaldq{}6fS-&}Zw&T-2i8O# z1*~}QJn`ORg+@O%B@w*o=1mfvXkFB?Iz0nQ&H5THlbu-MQ@S1k6d8zPfP@Kb2r*y9 z07J26e}q1ghQEXr!ohClid>u81T*C-0qU9C;!Q5syZEiIgKin$oJZlsRLN@kokn8- zE`Hdev*}O7>B(?=-KIwj>|W>QTHCyM2;=JO@du9VCdAE^HELA+=9l{f zpBXRXXF=&0$w4@V_%|XXAE<2^&2(Dud>q@okhfyIs_NmO>3WTpZ_vr2~S;~H_af`w3U7*a{YF=&2apD(o8s^n$(BVE&re| z)nonJWI=fwwYt-J3G}I%Bb+Zh=9!v$tb#nILnP&FEhe}7a9;By z-Agmjr|allt*o?oNtSE^U5O)8QLk;vkxb=I+v-=Nmo2b5XHx*ELKP%A?E#!$)1wZ) znYpjm{Ei#H1r(205cK3aS5)Ikg^+$o{YDp=QgADK(d)=QdrVyLp8AMe(Z^`@wEb8{ zl{M22VUAx~t`cRq;0c0-p80OSJaWRQ$%9uu?%0YfUQ0MzR;bw2mc&(?Uv)+zXN9q? zJPKGvd7&Gz-l;bCQqW|70Tont<%xb*id?_)$-)3G;8PRCQY-S(?c(Q^65#98A~fzI zuw0|I5@1|lJi-KDPSpn`j)i;;wi@XR-@KRj)(xIwLdsg&Af-VcPEH)NjKy>a^A02m)@D`hjUdsg+jbPxcO7(fb={RE zsZ?H0-Xizv*H6}|Pu33YD(o*8>67u;VZHy#NNTG$7A)jtQ1ZvN!L2v9MJt!JB0=a7 z#GDaHIjncw7lZvZ7Z{=sk#-};{>YY$_2-JHFz*Pxi9j`vAlFdS%UJcTok=RiYtPc` zsM!|zd%q-U5rSkT8P{a^++A#+w5xE%xi8(2@11Jvz-wdgZq{?#hCmUTeCqWzgqPZm z`8jI35;f_R>L0}lLx|hxukl6LTNH(eq+1@Oc21|1q{n)Ze){Gu<0G%>JkYhu((d}2 zo>V=wV9%qkWUonOIg2Ggts)O{UrlE6l|&Y;j^~g1 z;|gK<6DPD_{Wt+4uO(dw0L{0 zsD2K_G$rmTtTKa`=w-zQaeUK3Ez!Dt37zzA)5u9P-=*F}kkWkDs}VCE-9A=bX_*Jg zTv`**%cZ9BPqM#=Qq`R@pYu_!dz9>y_7FuSe*ul&Sjb6@|zt22U zmj@Rt6N==yE>^Ux(LO1=7kfTe!ur47g~*)4R>ezTlYZc1Zlhb+?rrbyerLrY?3Z%I zAJqlH90U3R_MJ zL|y%0;H})PC$tj=u6fMB7&hdt^`L`xTld1SDSHWay7}*E8Q=B+25{Fsg<^nuemqk& zO&Q1eSpx=WVZZ=U1%IEG=T`SzwW<7AK`-W!xgoX6Ve9i-Xrv|jR{UQlpMnQD&ImZ{ z<1a%)#9No7v8h&WK;`8U3|VT4)N=mUmp;k;mScYio23OZi%O?Yjs+6M+#iz9EkM^l zfD=%qJF#2FW<(G67(0YYO26j&lj;o-*5rs)$1JvaVSqqvl0=71^xM5`vDyBa9I#@Z`P{Cz^TH zEYgVGxDH0VL);?DZg5DjK02Ep7oA;BCeHRWw(xcK0n`%xDBu>y1m z>ri}#;G;e7Kbcke`}7I;H>L5v7l1tT!hW_f1{iFfyxMO?8E_+)CvE+FCP|S?#{8g7 zPgttmAGGQc3q9mW1&peXc#<4f8U2i_t*oBD&8$rNv+lR<&G93XL}{S z`EUyvGQc!W(Eom8mNvQCJ2L$z3r=@U2Y4=RO&BwO796(kDO;f}Im2d>Gmk3ip}voX zQQlsKjtzzWaVdPU*IVM0+;!V?o&@U|dFZJyJ(g!=WM!0o(*-wmuUcxIj<@;XE0_`)MP{+5Jz@7O z$FwZk{(!WI|EITg%Md={Y?-iKUosx7Tg~$CXThPuT@V-2}pudE|n2bA3x1IjmVXlG_7^H@^%Za(|5L`0WT{6O|l}_WU*X6WxtNXF;d1|hX)t2tt zusqw-T)IGQiI5A?P2nse!Tg$ppd=0nRh#F~#GgN!L&Y5H1bkl9SW%NUtat4(K%61PyDe(&SzaH__&}!WSyH2l7-pN3#5J7=F zgoT#cZEAi2Z9{Ipi7IT!Bj*2er2VV$euvA0$FsZZ2{yOGf6jSUQj$krZHWA^9BZ@u z)emta-1L5a5X}>l4hT9)ICAEz(gGhx(5E@SHMrVAZr6^toqI!y#Gc)0W%YYRSdDPc z>6;0u{YXa3Y#sZ!&$4QKk?%sJ2azT@ywa1W87;5+PZg}O94nk!R==!QJax%2>|cFV<$dWxAcM9wREF*l!#V#`9(bm6X?xyzbmi z8|o47edJ8@sDvlWl@KdLN#XREOWgnf literal 0 HcmV?d00001 diff --git a/website/docs/assets/unreal_create_render.png b/website/docs/assets/unreal_create_render.png new file mode 100644 index 0000000000000000000000000000000000000000..2e3ef20b35bf1d89bc77946472886d4719bf7b66 GIT binary patch literal 124745 zcmXV1byQSe7Zp@cB%~V@q`Mnbh8!A(lrBNK8wI69x4Yd z!D7wJ`_8-foW1wiFH}WI1`C4>E0Lvip{Pd-iQE)zY>Y?HL2_9m4rMV2r!$C5g98ywmiT~>u+FhXpTZIQ)%&-4gL>OsKuE$| z!c*E3P6+!u;XdXGD z@X>;x12HIU=4ZKl=r(0im9o~{*j}MGB|OE=Fbp;C!n(I+28hF{wk@$f3G0?Bz1B5vjR%ilzDtjR4=$HFD}`i9Ivd{E ze(>^O&hUms?xxaY*JrQJJH7hYCVClv8CZ){m6Xs%tjv!!CGzzh!Y}PEdXYnjQ!hVD zX|VsO*-O4)AO2H)-o3HK*=nfiE7zsvQRTMcG4Y-5X$qwVAG8top0_NH7i=Cwfwyp< zAg?(l6^YK-1J<0a*`s9&%bI3)L#bmP-bvS#eQyUecaUb0-$1=V;aNlf=f?9j!X+}O@@)f4oLURk z_N!gav%zL-Ll*}}`3!_-y=hh6+21tCpz^kiLn-%~g8D}NoRyL|9-qHlXNp!gbd?7H*4>l2&C^; z#ZXg!%P9!aY)z_%blG<6+h*8H*!AL#7&>^Sk6({~H}Bbp^O$b9zZcwd_6SbjwOF?> z{LxH&c_r77(nYKBZ-`JgTBIzAyZU)_T8YF}ra2Yqd5IxTy`I|z9q*w6_CZ>(NZ?)W zrGSeMg5I16q5L_rG&p0O#?*Flacb4wlFCxZlI`$ba1~4Ic5m_)@6JHzi**`>z-4s6 z^{0#g483#WYCveZp;B!(7e1SM>apx9D$|CsQKKu*+nczsoJ$ajZ z)bE?F6A}_-dV5LTUZzhCpItTd$F~ym%DI&D4frs3S&jbANLaODcTN10!^nF~@VF zPJo6r)NYA>35$KgDuiQF_sjtjCJ(wxxCFqQgo3Xa>Y3q91pUMwB-JX_uuH!m4 zjP7{vX}NA=kq81AS5m%FouR$p{H`KmGBIi(a@X1y)IgIF%mdzJqtcqS|+HnyG6e`Yr>kVyL}^$+4;o!{Ad<=^L1)Sk;rXyHx9gsfBiy7{Z;Dp-ui*}_<8?_ zXW}9+Zr4!evM8Tm`xY3Wf9nb!bK2p|T)GfXwDlfjl>D%e;Brp8YiP zMc;ZLp0)av#7+JLxPjaNdEV~;J=3jr7pWD18p(r}J8Nwh9WLfAB{R!nX3dlIgIc^V zQ;YS%BR`#wlOFEvd1g`-$el(SELdOqXqu1~h(0iLAp2~fQ0v00v9?t?5{Yx?`YUHk z;cx0~Y8nX9=WNW$cA~!rb?OyIO`-P8nMvCvjir|k53<{X`G&MdT6Cc0xMxF0KHDO~ zvSm8eX#U7O!%ClPC!U%w*9Z3gfuZ;eCP>436ZwztwEb=uS9|RJ&VDh;q4sm)3w1G$ zrq=aE9^$d8LrNg!5jp~$&g1^5>tr(Po}e((i@B`z#@c5WY}ncAdpk5$ zw#i*n&Z1TJCr>V}lUtEA@qMF7H-?yURo^Db;9{~At8kY_f=AwU`mYp=jtdbv_Ylp-_n zE@F7EseR8?Dc{hkGrAXE6yr-e`OHS?mX}RoWhw>K@f5#nYCL=;dRX;B?yP=8# zrZ!C-e$Z;njUrHc$3LfEOg64^rXr`d+b)Mc8^@tn(~W z{K7~)0#zS}x`rQ-D0`LpVLg@PWx7jNS8 z%R2%`6_#Oz6^&tihg2ER@X4jNY~S&29*3r1l;Lm;YK;pr;MtSg9ag`b&%+PnA>N=v1u$`Bjx3|0< z@mqaZ`OIpIxHrp6kI)>wwG&#Yw~j9yxW_Pf=yc+4yOI5@mfn$7zU&kn<_C_5$-~G%sjUSTq|7A)9N3MoPXNTK~y|5_tg1eWnE~74!Qc zUZ%wz7^K~U@<{%@;I>bM$8j2oYnuR%P1l*aYgd-|ssO}otL0+VtL3QA3zB&Cryz}U zd*#!)RPt0uk%`&nLeX^`&Owi@MJSd zDWRDTVxG((*X|Oql{7;6m7%TN?G*k>cU#P0^y4># z^+)zBA*rs7)5pKuSGjLXL5ud)KAi7i2TNybiuI|)$wl~gbr>M0a8<1>i^LdUsJ}=( z5qGk?nChuU(E|8iR7owssRP_}M)0KPVDq$=Ni|0`D0Pb(>)k=%)B3YbeEJ_UX4)kJnjsi-7$aBtR6MnX0;VF8`+*w*Ghu zAuF@x%j#R|nzRf5@BVFPGs)kon=km9>zh!5)hDJpnV^3jh=4t{dR@E>@_PdF6HK zVn>Zvbyj>>dTd=ICQAG@BqVnCGa5LlbqD{*qRn;d^(RL!H0ew)Vd-=)vR1c+lq-V= z8=mQtT-=l^_}58?$#$EF9q&{oIvV)BkGsTik?+;y6?6jxVt{soH4;8CZ}h&Y26k$` zQ_!6V_9AY1Ee)E=-|-GhHpau1Dm?0%3Z;KakE^QTL`{Em`<5mw+(q>*ThSQ>u+0TZ zZzq@9&n$$i$|``+Z{PR39+1#Y^T-W8uH@Ob}jqsNgFgo~0D-y27! zhcNPI0;h!qrnk5>vYM6x8|YVp}|Xu_26z+kFN;~-#f z#=E4%yBEo*G&I&}OVwpf^P34-8%)U!sDy;CI2}39+;Y~3Rh#}fch6#+=pv-=eD?-B zW?9G${t$CCwhw&(O(hg#PR5G+3Wq-HQsij)6}ax(RcI`g;c*C@RcTy%`rRssPtIg-+qdMN{KmXF+1Bpg3&Y%wtJG(tk z*ZD{iMXtl`fE7ujE0e_$MYwlwUYAx2LH8>GsIUu@rqU=D-GW_%8s{0V!sv2Xclftm zYbu$iGsCSf&jc@$u^VqY%nJzH%Q(vI_t zUPw%Bbi=mLm`e2wErMNjO!X@zd>S#OD*hqy;GV&xX?Im#VRXH)y4rpmQ2sY3Tqfvw z$5V9it~&c>out80Bsz97lM-UNwALRP=FgWe_frJxj7}Tc5tJdKiw@4-($Pq_l?IhD zXotBFEDNIqA}TQc)Oi%jRqzI;VkCwio;(9*CuSK2pz_d9^a^&UP0ey?``+4UZ=BmXc? zGmMyYGU5qG4T8N8y|lh_4*0LJ5?qydC&?9JG&wOFyq@oI>J<==b_gF~1_qmcZp73u zJpoxZg~QB6iLX~eimR0@Z#uCGfEPxRHNXk^x+B{h&y1j~TZN8FHolva8;e_G_)tNc zC(OS0bM_U-{d~M|$F-$9GL3Yv$l3-QEzbsX$RX%m7pzG8TiCVK3ou$Fe3*Qb6J^mY z#~?`%%~FWv^PF0!tyd9ITCE37!NS+5q}cqjc-S^eK#Ey%kXB{C4hpWTaiX%4ncEw) z3-R4|n1Zl2s+F2O$xru@FxLujLCV9$dIkLED~cuENv8^%Tk;O^5T|0I;O)0w#U~MV zdKuLLD$=QuxLxV5i;J-Y|MJ8e!)Nz}JYrMcsb12smEE%Lg(#a`9Qo7VpZ7|}2{bA>8&kc_ zj?a4mEnPs2XtMxTd>X8JC4wc#aoQP!cTlfn7cE*%#K4K6O-B2J-VA|_Ah zt~o)+zAB`KgX2#AIOw9!h-sT`GhnpMZ{3=bH8i9VwP7Y#?~_C-Wwsoo$>wj1V1qKX zIx4Hq#Vvn%1U{}SUBPT$Zk)N7r~PNc=6&~q_>iJK__3{&417~D&67Pgq>*s>D<>|l z6a&J+cU-1Q0mv)Tb@gROy+H0FVf7pfZX{Z^z8zAv*!Q4i1FkNX5LB}O3XQdRHC*}0 zg;IQ05s&)XZzJrJiVB}YIiC5Nama;Q*~Kv9T#EeD9T<_%N!#1E_V0PG z4>b)I`#nln1-3ifa&NF*1};)&w574!1GCFA>RG=+A1!eersT}nrj2%<7_;x@ZRb5A z@^PIYwlyla%s%~Nqg`|{$ZO*BF3OBrIL?nZzHLcPwdLBJ$0f&sNWAU-ahvOD_F?Y1eF%4XQS3%9wj)L?{0C!hqbz2zuG%FE&d zg+LKV)MiHAAobDAE$i)?A8J&Hx!4}|ZL72m&pD-gbpB;E;g-m8`($4-$DX@Hc*xQ3 zOThtmVgGP(H`|9y(7yT~MDT!my-DQiZE&lX_(x?MDdgk|TgjBMGbNsc@m&)C5D%B3vv#46TiF2DAYZN80 zIs`{uH9MjMvlTnMz_N(rm%3(iJ(biM7y5F-*^%Opw3) z2e}#b7Q81y^7hT1>g^q@q2t=3yhhb!&o#3rcG{~KrHxy0eAr;|Yj|%%=^(g0Eo{g& zv*36rsmiF}#@t%lVqn@^IcuRuh`vU-ZuQqJ6N=)+tWeJ@2XGHhhbe3CCNzX-H)FIr zg^HAQlbGJd6XEp0NHnB@qDz^!=4UQgy1OS?mY60{V1PtT-Q>qPdzLDgyqWYY8iHom zFzU@pD^ZPd!{aP#lp2KTOmQv>*Oph5p&ne@-rYa*EsffgW}VuLKbpg`7H43T_A^|( ziTXi3Sc@8T{(=)T4IymS;y=#=qfL%}di3!UJv?&VXBUMsJ%XTp|grBT(Xen-h@ zqmBypz7+P5rjjK4ZGxzkR-Lv5KUc4-iv*)MatXdJ)?rf_Llvx|m92NkZu0v-a&5jc0pd^CVmvNJTyT8!+8)$6I$2 z>6E9nwt9?D?7cT3yqJCOz1h03THg`xPAj87EbJBhol1JUs;{E~=Rn3~E65zlNYm#r z7%tOW9%>Wx&X?L>iEHJ)74ym3L^c*OcCN(T=T;Mbp$fmt<-xd~2p*5)NOL5NH?;k{ zZ0(ztej!;`rb8&&Ggm=2I%P;PL(}5Jo;}1G@LLFue2zu-qdELxSIACjeZ;G6DjN`} zwM*SycZgMN1xjk}Nb+j@bo@-`D#^ldI|L5I=ucB@UB!7w$JtJ_Oe6a>B$rHgE#gPS zaY8P&rF{%DE(z`_!@XD|;oX~=?F2|cYvx?<0kPN2$y&BGF)^FZuC;r0qSht4qH=GP zq^Wwf4vv-6zbA@DqYtriF7#%9EtU61ZTf}Mm%#uSs{Y zidK!KYjpQj+EflXJ}E3x^&a3SK^x0Gl@FJncObbpz1o)C(;~lFSF9r$gh~-$5h%-W zqp9z`@rE{kk;Iw>Z&8MClzq*qFVb?)v>4#BW?01u8pAwH zvdkLtLv|@~GJYB1$m+VzcUNZnK64}{L;8baFl5LeylV7>Vh{~MY46JF+CFF3Bk~6B z$b0r1R=S@4XPqRj+D~B7MR2t|sxsqi%06tvu1(Y_ra;VY^ykXf^1CrAIsf+uuOhbqwF z8RWz=`6ha#o5bDM>}L$~&w=-4lvX|Xe>ILiZp$SEoYXU$ez}Vjgk2tq<=x=K(L*GU zJi8p!q?D+6#9hiXYy~`2B^{MqF~7JsrW3_%t_$bl$<_VI3U@5MuW0C<5L3CjGoPKgym}ifG!l*%h@RhmE7yu8A~{ zWqW^Yws~Ev4gU>fmU7lOMuV*&ja%xzu^%#8YXhr)T3z|%j3rDV)TB(;&clhaXIKjo z%2nWTqNV{eJb*4Eh;R_TY z-X{)+4O(ZFx6aNp;I@ud_DQ9g%NmmE8rXxQx;y2*mcf;I@YC1HMoXy|nJxY*-Ay=0 z(4{rd-rw?|NAYOV%%Uo)?3PQORG{)81yH}uUq zGg;rn-pPENoc1CvA8r>EQ;j*om#=inPhqDi8>v-hA7GP`mfE?+zcbi1ZTJXL6>(yv zZ7X+TwWOTC1c!Q240|-uL{|$-?`=@zW)>Cg;*0oDtpA#k5cz^~=sL@>m^9h%WnyCU zY|XpmAD8!RnZAdEr%4?l9kg`u=R17Gyc?w{&-6u4?wNOfO)p&;oa%Z-&F=n+y!0Qb zlMmmEHKB?``@Y#ZoY$`&i1mChydyRwirII7;q;&O9jkvB_02~i-0>6XLw~pO_Q5Y+ z?6vIJVk9vy_= zpA(b9$($YE1lvDxd+?aFquu0UBN8X_8P6LTdK!851Y@_mfavqiIZgToTfHKTQg3j? zj+%yWntRhuyutlGyiju4+HNku`p=={((VtgDOlEFg)f*+cgM{nx361pZoOp`7{&TYDiw7}pJ09*6`%uS!}P_>;xj&G9}Wo>YqGJozGn=GpIs zy(yP6bK+o{?zAjOPJbon`U-18ND=-{kHG$Wr?aY{jBNIKjItJ}Gq=LqX?MtSn>3^s zCLrRg3Lt-?SK{Ep)c&f!{jL?^AGi6>1UMvXQWAa0Pc{}XKc(wM4m)1nDTAiCTDNCK z?T+rYgBkrRyf4Uv>OD{nhkeNKiMhMp<&3C14H6_6%O{A2l5VN;T?sD>^*1dJuK?s;ejCFFY0@A(`ehw7)vuBKJ6&aR8FfT@$>$OQP&{wQq+$k<9UUvdYR3<0b`-6k+^aYxL?oi zRWP=xwEr~Pv0=VRVxSYCM+LRI{gBfsq^&2s6u}5FCvm>?veXVZ=u>2rVUj;h&$*el z*01UIXyjdGYPX0d9uCy$)S(x&WzbAg=yIY=F*me8Y5Vk}lQ}`Ji{}Yvs>fPncmdODiI^r!q?o@`l5jmv$b4-PaHMvyo&i}YvBd7 zmgeRnG^DeydLzTFqY!&Bce>?^wJ78aF{km|%zXR1i91r=W1quf)w&i|Q#@4hk7J}> z*QskOqpxpZ##AV2i#FFfp`S4Uk*4y)C!(J9Jn0v7KU0OD*~B(0q_1nHl(5);V+*k{ z*!k0nY@gbg!j)O-}G)pygOk|1?iY&!{j!(zjX3EYC&O?7~8A1}W9&h@x zF2HUqdtgJKc@&Cz^#SLv?r>_sp){}3dnec zD8^_CG&#v7Km$SNahqb+f7+qXRxJ&hB27+F9+Lw2Q19qB{R+^1#TU=YzeN~7YJ+H2 z&XSP+>4%8_7KxKpam8)~F0N|RyYk=3pMdP#6+~#{3mk_ne)c(!RI#A5DLpZ?apd{c z$076R1dz+KlCNROwV>g3{gF3jDIaKZqEtu=B6BG@=x#A<^ct|8UAqz{nsUwZCDT)Q zA+;{0lR*VPGwHI($PguK#UCO}Wc4FB!=ulK~_(#B< zf{o|=uC9Ox3qF?XBBBCTP>bD1_MJjnfd+2FT>0CjLNu(@Ubcdj+#qzZ_d9e3k~v&n zu8kGJA)O74!nrJ!{;`|ZTx-WL0dY7V=pRAVcwK7ceCf;?qU9OF7@CguJ=!gQ27kk; zK%t%UXv>wjMTdmTQBb7;2 z!uSRq!Nql!eQIW<>>Z*)9ZWN$k`3q28fFyQ%CIlJy#qcykZ8`PWzu*iq=yK%U#6k^ z+aE1nawV4gOb~R>?6yb~jZCB1_x#@U;2_}&4T)umF`xw(C~*_}o`2R-4Qa@~F7iqT zphzRpVB%SleonvZb`L@mVoQ_5{{E6|7n5Q$T-+Agaauf*aAxI{D~zj3{7?NshYh7x z>rvw#;q;xzR!%C41`C&f_xdN%**`x2*n9?}WQQW10d)y}Bdu zbjJF&uT^Kw8jD`IblN75m`0U0MSm`DhB@C5!ToL(LH#D^sm9kDEfnIK{Qz*WvU*wc zwHPfujVl_(sz(tL)8!R&UJKC?L|2CGHZrqKPF1Lj&rXhPsVPw5E%+DzLz~SvkjZak zIifX5TcKsX-LNH|+NG;zD?q_>#T2!F)a8m}!GF$O?L<1qHxo?$44QsihU6Irn%GZk zDQ5k6K62hP3rprE1%s_^C-(AkxAK1|js84k2C}xq;H11a1LrONeA*_3 z03+0>@$;S1(ve$#_61Y5nH>4;yXkvw%yQQ87TqcMcdCd< z+O6!2x&Ew;Bnhyuv=GXC;dV}>=Ak9jbHr*1Rs`uM)BRPO9c+5JE2dU8QtO?ep~1IO zdQ+xf7@!iw6y4LL5U{LeW@(YrS~e3Q6wsmreaNG&ZCpsLkdUhpai{6R@6Kn5jul1J zz0J`AR@z66^hyHDB7E5blGZI7vu^zMoS!SSs*s>cyPCyjm%uJ3yXvXnQzOrpn!yO- zsVKzd_nCZXSLlXEOo#F_?`n+)+oU;CM+%xi8>BS^D+zj6&utLO0PkdNQ=T4-qvFV? ztsZ7}X2wSRE80-FCOXw1Z2I>?SADicW=pX>@<5-kxLlHxfY5;2Fv#NUhtuF-$Cw05mc0~cm(Gi(#yX1>1hBvD6O z+Pe5xWMJMUkjvYFS(cExnXIW6dMEWoa(c&=zOSUboy=QwVCgib{8zI)M*@Lo{^3P> z(g@n{J?O&1bJXjSVh>1r)sns0cVzplZQafzlal6*joV;PiHzfz*Q&ibiPWuxI1h!$ z@x$e-16t+ee{;euv$6nadIKy|dy-qiLjE#6eFmBwH@S@Q-v?VQL0}N5v>!p0pkL)F zK$Ej$(xgTX%%Omu8~XqCu`l1%=*+vf8;(5e5@ksufy9JTY^VQ0;< z0ROis0xa>bsWD8naVoB%_>D_h3hShy=AR~|#F28%(#YIZ}Mp^S|;&tMK!=yOR-~y}=*^aJ4^yokm7dooA_5&*X zqZ8VkJS!WU7ITJ&etZ7}D$d?N#1)Z@<==T^=vY2w-)&g8(0Ac> zms{}Q0Vy)2qAY?Mw&T@Nhbj7Zq21E>Ok*i6?n$DU-af452i;ry_6u3+a*D-ovW@zD zKrgH_pGEg2O)7Yx#F6-WSa>FmaVnXzc2VR1=gdO;h-aj8+vUw@`)1dTX%`vc&XBQ( z<@77Km{3=2gWu*qoXUSph~AH|-mKdF&R@ocJq;ZK2=^uZ)`Uxu*RC`SMo<+K>MoCz zqV%DA;XO=9zkPfD3Np#s&tZX8;nd9f;FKLdUw$vMtGP*-hSxmf$A|hC?w-&M_st$} z74$bEzQlRC{bRZhp$AwN4l2S`F;c%-RM6RNBPI_+|_7Wwl*0h$uF{pa?GTXIVHQ%?@<~e`SFUsZx!%LJ} zaH1u2sv{}=W5{k+k!`|roS%ZbDlyeWO%?w}LR_2X*>EW-ou~`$=^_P4!}P?6${j~0 zeY>g;of&*6pF_itB7{rAH%(<6wHtnvb>A;-$S00J6vfrxu7+@QDa>i!*&VPGM5xaf zHB0Ct+Fl6wv7F+7MAMnw9P~~8B&oa>wNq=6z>h0C`Ly#kCs*lE`zY_0ldyk*=&7^V z^wegTLDvRt3u7fx=ac{T7sg~_UHjwAK1KgOxfh$-hl$${ijXXXn%w0GG`JEqY28v- z7Ao<&nrJOQyIS;iW67Qj$6X(%p_lpLsVKZZB8wyKTa%;YTw@pe&1==dxK)AG0hzqn zJ~5BuzYrxWivOcqE1a{`@=YQA*jpb_QfUxDLVA}=W1-PzR0t!PmAhn}<{xoiKs$(& z>4OAW^@M~J`7&$jI$a9$0HAm0ynj$8@nL19<-RH#kf z$%4(>fxf*o`jCN@nAsT9TbdmH-Ni7kyz8Hd6P_&xcmNAv9mHX2FZ2OOXw@B5-VXiB z>>aMEXvtP0k!%~lSEt1FAibr5!DQo8LtlZWe&kZQ9i{Nn_nsbl^+(29iOo+HpfMN< zgwL;G2mkXwAs~F(tYE;}!PX3*eu5i{f+dZ2TK&jontz39E4@bZaS)#@IGFXl!8IcM?eq><7=9*kzN_34bdl6Dl!i z@p?l?H%&knej}w)xPnUzqs55?8Vp68&Msj8zAAA&Er! zJp9r{_(d8XjWD;LQ1EUNeYi$VdX5U|TF!}xs)5FLu8vt=)^bOBOG|U%r;Y<75y}2Y z(VA9I?W~>Nu?l>IC#^1ecc)3j*>vto1m1YenOwVqoZ>g$2u}rBh=#fB4|h-QI`cS) zpBceO$E0;*nfxHHK$_a1*un65(06!MOv>5>Q%4PD%8l?;YR7AaIF|mWr}7yGLMgdv zf29s{vP6=%6Sg=@PlQOgD!_yU0yaK!g$aV6smE1ahLqTq8J!Y0+ZP^V+ zY|7e@1J5c;BL?`J+u0f39QmwaFA%RKQndys zw`JPd6ubO&(DVEe@gl8VYw~rA<)VPhRDbK2nn%CI&SVM_@HD_NHT1vll8XnYV{I3Y zPc6(aIo?MxTt)URMktaG6}|O$k*~_^jNlp(9^5PIpm8?%bzJt}@ZaFCmp>&+wmLQF zSsgcc!XWI|U!FWo%l5pOsh87eM&d{B{XeuwfaaUivMiqM6%}xJFcPpku~SAxKp`Gi zV$~7Pctoga18}S92|ii#|EdyKFCC3=pYs|b&qdYOEbgx|$s#XNz5NUIwBmtJR==LL z+aNIV=&>l#z0YI{G4_3~X(}k4aR3@hXTtxfRr9h#61w@z$b%kAWf-+)tp06=fBJf% zF+-Nx-?!;VF41{4(vO4Gv6VPyJ;ctjr3oQdHcAXF;nASukjlJdE3)<}vet<@0i3F2 zN8AUR{&XTP)g=n?e~MCxMi3hPgXJGAnuCQzUXem?+Lj}F3aWGekSWloCoUFItZDpy!L{LO+R>MF;C!V3_ z=L|aK57V9N>UxDd^&c#FsrWb`Iev35KJkOh3yb~)Y1;oHa?PAX%bpcY5Fzg{RY8kC z)}+aiouF$7OE}GoW|AXIh!krjC0!49OtD-RQg8hs3h6F;ai2)9gdW`dvVPn-n<3)z zpTb|niFq>D#s(-awsD~u-!&8VKE|H{zV}hc#Q5Y&lhk6)3Z283>@77uJr37%$0D65 z2I#4Znj4l%Lw4j>4xJsP4gKwwF?_uH`*-y{Y|J^$> z&YsCy)cS3zSM(-4-=?lCrpR7aIV*SCO0p`2wW%REBr`0pYmaI$>-;7y5OQ`s%%j%^ z9K~))n%KR*E3m&7E%W#pzK#Z)B(0u74IWY;_KZ*Wy`%mn{$BN@EceI6>J*FLkhepC zKk9P2X_U!=u{QQ7>c{5g9*YFPJ%6;~OvcWH(XbB#2YexV{ga4mm=P!)`S}g1s0B+M zBaaDJQQjH{aB%KFi3EhohLv^o>-=F%pa_sP7y=TnokbRqq`97H*nV2A9|?aH7-bVa z{ZKGC2P|V!!gM&v#z5^!S}HBGXQ2V=S{{|%$sHZ8s?X4WZY`{#R%ePY00lJAw$u4w|@3 zfTL*%9S2XZJ;#HAx(0@Bc*%|cUNqGvROarfNP8eIF2Y^$UP6)pC2*ObY@nNV6bA!9 zKhP{$+&3Xz2-wUDl`li3Uk4rv5P9-bKRnjj^!oT$63zA*fXJi?2FKlW2m|Gm_lCJa zIrE(BvK*l2i|K$~O~lvt7)qi9U0vqsjcTBdj?3y=QAKV*r~!RzgTmtd!lUF>bS|WD z9sB_}65v%0WMj)3z_^;ug@%X_9FO;9ozRVeMuDM#0x*7MlwW}OYM%d}?Ja!jPw9JS zTGX2p*~hm44bfw6`92!kOZ!+ZYj^o#1wACvChfg5~cqORygkweI4O7^6~U;6CGyln~eb+V4e+H7BhQ>r+01GN&$07hd)nD z!A6@RTP5;3bTAeaq1an@0-S<@_k>?Ot%&X4%y~j@A<0reVi9 z;5<;S| zGK}T*k}2CtKNFNusz<|bXg={kodf6Fhg<-V=3;Y|IMUX2e`WN9WyY3U>Lnwil+-;D zfjw1F0(|?=5Axw>47BMIJ@^~YB>3*d+Fr$HKirS%&`J*zQvSXMl$zFrN#|j{TeOli zUvy17^6#PHhaY0xA>jG}?$a zKF)wL39M%j7>sVmLMBSl&}w;{hYfT_rW#!$SVm^zDr_0xF|gh)7I@8^D$YjK7vB-* zV>-zg10*l=JMAzmJr&z+nzlID4G46?U02HBhh4q40Q(tZ@co*QMJ3zdtk>)fVA;c* zQ5OIbFGaKPK2=HlZg?5bCNnmB`uBOgpnsVORZFM}y!hur14~vTcQ<@klB6u5MXyf8 zj^n6-3307@az~_2fKwb0E!CA#a8puaxIDAX6WkIeV((A!a6D5aHhrv^bcc7wljSzz zoZ9~um~!Kl`LK=qDPDwf7*m*f2j_+zLL;k2*Jfej>(ZU-G1dsSMZAe#1~m8!RMaLw zV|jRur99`YJ8KwvU6$r!H(wG2lF72~Z>iq*4c`A=7OVW}cg}&$=r9Sj2ce{WWIbLP z#}lCxlP0a^=~KXa`8~pD$mSM9xE#3kxaMgKEaU6-i6x5Y4eKqC6s<=i(#nDeUpyPj zF?dYD&|Tf($l&oIzsGb1AUYeXZ>uJc`KnTz4Mk5v;}P0B-<-$zlK>)M4)=j^!5gFh zOt>0O;|vfb!-XMWyNPOzV?#dUKQtE_8Vm-A6D5o7e@k-1E4uuySK{}j0BUrLif!0B z@r&PY*h$cRKv0}t z*I`1ga4pW)`;DzNny&5;0ZmRB14Mv#LuOT7srPSJu*RA~38m4?WKC;b<~+kJkStbf z9Y$gn)TsED&$8lO4trT~F>Wvp<)0NX>NkqdN24k>{EO2VXi8;p_SHL1D=>hMla}S2 z)$zrVF6H70$&7@Y`2G_Nd~^`4E`Va>ZuLm#6$vK3$U02Rd8*ato7inFi0i7y_KirJ zIg0KhP5$)6QUDQ}#AB<&AojiDju`KB`q)&_YW}&Lh6YoyYcCH zO&`n30cybs|{Y&^8Q7yUPr zAjdG;$?FBy25Zu)w3@^AFS$+-7!PXdSG6nrqff6w_9G^y!Mc} zVlrZr)2icL_7o~;Y;d{dL1TVL`;YW~ZFmNeYUJ#<;fz`v?kYvkp96T3I(fQBud7{v ze#5kuiDaZVXvS0>f>t$zVNKO2^?cm3Xm|vU>TeLTqg`=#PjKba?}05U^StXnOT;UuE*^fTM&6JU^!n+<#r15#DpG9-~Njo@3X9 z5l2UHm|;!KoBhUvQs@RJ7^lY2sy(*mu9IIMWcD_XK`e!`1mD^- zFdI6-j^7Fit^CvuZ}j4+zbDi%{}lLd&2yAOp_QE)D}mf~M(0e84lbruyP3ZILOBsu z2=lEUZy8%RzmU)t8)bU$Cr!v-Lwy7^=I~U$4b*L9>RhyT@oSbWnuVF}Ef;dQA)SLs z&thLmY?5$jDMvu7xP$}aU~APXp^YcA?UBX^7?|_mEkj`3uDK43Q`0;zX_?R<<8q{s z<4zW`5V0FE?kp1fiH?K3TusuJN3(pS(}A^}?XUqtSddnS051L8-&4#zgp!7rp5L1O)$p- z`QqK&DJSlDXkXO<(XzlJ3{W1K0t))(eGW+0*KbZuoh#MSfl~7fsF~om6)@Xc1*b`9{_)#UW_khs5_b6U|c|Ao)!~I>5fBdB@Ci zRlGrte2LR1WaK!1r~HuPFn8*_s}x&2)mi%KhMMRFPrv zvGjmm!#Q|7eUVIb=IQ%Jc?kZXT4+cVy^!Vn#8SjoV!`*oe8%g0K>?!z=vN)vu+R`j zwOvK6uByA@pO2kLMvR1bT_^9Kqb9DY0Q#8Z5PEFpur_ijP;<<^2GCY)dzgb$We*O1Pu4A?n%i@NM3-G{M{0w27&y!y{(F}rD=GK-PiD1-~fBG64 zI9#NH;-)I)aiJCc7g}<#uL0d(8JgDf#YwB(5#AZzK`2X>-PqsArxeuYZv(y%z#0Q* zpvA8##hb<=HoftsH*$S{n2cHCSUuT2-Irk(kPd|_k7@_cXAZeaOO+Z}IGk{F6|j|Y zbl#=pt}R{wE+~Uwv`5c=ie`{$>OS7MBM=nIru-rz7W=jj&r4vOhL+y3s`Ry)K{YVR z54K&_C13iaT0ujUh|jVdoi(Lue{3R1QlsE}(WYrk^A!!cQY*$J-DimaWYXqGy)m

sT z-rIdr{^(VkjR}U@QXVhxay}g|?=>4_Eh>lc&F%}BWa}ODXyEg@|H|%=Mwk?eo~unC zMRj}AO3a&~)SR{hg3t4D zn+=_qqy3I6)2GIZiJpC_h)2C4;E+`}vb*_M5+tW_gXB71cAv%B4;=Orz362t06Zcz z2|o6iB~-;sj~d@2@&+vSvmF1Cy8tWKb5)>yf*<<3$yi7-BK48Sh%LPYpgKXq=qb;m zB?`P+Pi0gNOjgxh=j2*8t$xLqz|xm|Y`s*i+X6%^gVE;d@IfaOsT21@CaaY9$H}W_ zy2st+{-mUaB;PZAy;9U@a%j&SyQub8PH?U3TBDKP;(YnNiK4sfEDtB87ai~AJ5j3L z+u~G?GX^U?hrw5$d)X;&&VQup&_`*2 zOxfdEH!WT_xH^9pPE%_38huhxi1YT>%E6BokErctmbxt^@w{ZY(D}>vTtvp|>Y@Y1 ztK+CX(H>ZIC3h3-HU+hQP=G}Eb_)UF*H&!fiC+uXBuVLu{Ro3%97K46`VX!%`Eqjq z`ooS(IG<0z?s;EGSq{Yua5f2iV3?JpW+$SZ0!4)#_2qvh%Yk;xgT@&ii|y-CXF;j$ z*7%1Q%y(pYRehk%-73)QkHr7_Caq?)gpda0W4NPTGjzxl*-<7aP^%VkFc*_ZA?L-% z*s*QNayZtze|9_bw|g#fxQ^g1Je<+Si}@)s^jKVc50{^&G{y6jn#S*DuuS11PSJPA z6)wpst*B?mupoD7eftk}4?Vo{s6!F0a++B#Jv`7yWf>zhv;mjgvwmYKZbnOql%rqx z0$OSDNH35g`#_T$uVmf#0$HO>OU<8&I$^%LZgPYsE8Y;F5aGlQ^G^>Rig-y)#!wP}R124CkTFn$}5cQCvzy-8`I1A`hqa z8DhTLTlyfQa)I|gu&gNsx_rOB2CjVJJuCJS1(_`9N=2Z3a&v26t^{m{$;BHU-Q{x; z#L75T?Rs>_G(Pd`4f)p3)=x~wz2xKq{{zYnZ)!Y~m4?$6kwKIr1!L0we@qT{_OZ3I zlXIGG$9s!j^`%2VXi2?9#fnDva3oiOO*Y8_ig?A2BjDmuDq9!Tru~fn&y_bt2521z z&K)&$E!Tetu^h%mPedyz?&-0h5w>QGDCjBvAj7D%D7SF5vHnOkLXoGy<#vYrtYv$b z_wJ5fs3=$hL9yF^ms&q&2w6W z3roNx9xnN-$AplOQ4w!eDg+YU= zM9*gN%l^dSQ8x{c_`qrpQ)G>la07BevL|VS=-K~}q#1FYV`_D1rTCSb@jH+o6LQ11 zCOnDvKR+YL;OHr+PGjD{?WrLjL%_EqCn4XNyYzxl1^J=6uLbM)rsb>LzD1d!(%F1WJDOqtan9 zkkaLat;lqfVRV%459a|x{V>V{*2e$s(&S81Cnq@_Z3Sj$-P318`L_RNh9hPL=^w(mh*1D25edbGar%>tirWt33t{OKk+an!>CFqe8fVz;0 zYFz~rYq7;@aND)3LIF`}`8i|FfIDeeWPGmi_(zUs(}G**Y^{3--nV3IJ_VCZKX2pd zxc!HSPLCRnqr1w0T}-%Gc(%#Nq1&Ayn49vD&+uz)#Zb!lFN=J8pVb2J!;C8*-Ee&= zsk0eTm_P9gp12S$1bvvrUPxOl`Mu-9*P?o@B{!1c9+ju=<0Lnf(~Z4f!>>FBSxzF>Qbp(w9K#!U z%`sEM1KaPHCMOfJj=ZKk(Tz6Gg8-MR3W_VP>$zBu{-c}hu>$YLynJOGtF>RDR+4+adQZdWsBco$p5U><)~^$hKW;_Ar&7H< z_R)#1%p?EerbJ%UmhlNjxoydH>m`53Y<=I%jB|IR+wE+0zZz=4>WJ_@^6b6KX6X6p zG_e`sSx;#7y$jPTbHUE$3v|a@AAJJSo{n5=MZKy>qWjDc^5}teg~KDH*?pd0ZZ^8{ zI-8v(oBPm_TCPtUd^_FS9y-63Ytmn#H_K~ht{f?HvRy8Jo>#8^RNvCT{d-Yr0{YCQ zHz(lvW_5nyLI8xV@?GsfnY2hJ(;1op zZU$CnE5uBV%D4eFlj`V(^ehqnC-CgGV{T$V^>y!l&U3llBLy3?y@qEQ&@P=qd(qzO z^mQF9MGVAso=-bovO#)GFKgWsI*)oP*hY?c*zDYo`6&%e=|`Z~OFz;nZm4Z#R1SJ> z(Z4(H-fa_l2X@9+^6vhFnxv_MM?fL^vr^O#8;?lgdtUkK_1rdD>Pte>%fzgaNMsJp zqgC-lAZxN{e{(d!ux**7(S`f+Nv67KOR)LmiO3M_awjd!bn?cS;PhEXr|@vLv~#Cp z(|wmcMg;#y#gB~0)jwGYy}yVe%HeeTd?))ZSLe9{P862g-`o;(;KTlIeW}Txnz_uD z9dG}5d?K9#^hB4_Qe|#jhkJyNn+?Gi980prygrmPCYxHEx3yvz7xbvvVsKJa1 zpmSk0U&dP0#_3mhXmqlm1FC2D`^?it0C)d;237P$;0WV^@#V}>D)KMP>S%rEQil0J zod=&UH{%rH(Rfq5cBfm8owp#k)DUoL-A}4d-<31clkuZi>%auiemU;b%l-a5EC3Lc z9UF15II1!hb>`fMew&>9&C{FTM-~)UQ@ERQPn1SUqgza*OM&s8o`IMzwkl!2)DBYg zPx(O$My|wQt*{zJR$`gIj=tR6v1}j?Ql3fmW}8l=aE3btG52Z#PIh{_e}e33xxW#~ zxnQ1?1ukguv@iF{S?L=)DwEdJFMyGV0BIa;oW&U%7_4lBmx48{pvMvv5(MT@Pel%r z{!kbvCB*+{{Er|*H#@Qa&w5c?=mV$Id8wX-^LeDt zK9=}8Zjn$ErOo63VU>m@dna`CcXdI^z(#N@QoX~yz#YQ+$DWT6fWI>?j{h44V0~Dc zE6&J3*WGn{>mL*RAPd2;fGZEJJf`t@e%xtFGb%w;BF(YOmm@onbI*~LjZJTJ3R7<> zMB+5o_}Zz(<^WDfsij&0GGW$VLGH!>k)^ffWnM}-ETDd<9kEdNki@E5NZ8Uq{swEJ zq6J0-`-C$6s!9XjCv8fDEXLD^MJx?AJiSSLQiEvWdAn`lO@Ke%CXZfbVHo7@h*lo= zcsLE40vugD4b9|>A-&&M@h~#dC^l!c9)FTOuYG;;Nlc-Zp1xFyn5x=Cghx=Msep76 zQdrhzL){Ha#jX}@C~`SQkiRi)j_+<%%E)!iFdmZhiq!?*3X+uT$rWTuO|@FAZTY00 za0$@Zcb~Tq9aN{K)ecv=fL7#(r)d@;KyS3U>%6i-Tr8AR?qy$-R+)`pMC=|Cc5xS? zAJPI$wwr}ukO$(!Re;T`x$9r7QGYJsCXWH$y?3*bu7wbz3hN zge2r+@SOUs+oAT=a~b`olXN|pp$AI$2Gf;@(%))`OyHN;{8ig*%vXm>>rL4w?re*4 ziVV+W!6gYUG&oCFmd#OSx7;jqwoFBwu3SW|_SYM+QVl6)Zv#ls{rl9TnGN~z)Ujn= zQCh+VP^V+JGVAf3@NV^<8N48yW8!5{N!fY~C>_CduO;x_ zEiJwYX`DLQFBBLfx28dp@MgX9-#grgmBJ*An0>Jmj7JcH5Mhqe*bpT$T5t5G~x=^=a`0;1~X)S^)% zO9vX-AB5{)XGZRvOhvC;#-7V=!@YwBAY9B6;c;=W`63XG3HYBBw6j|)&oIkLy!Z<{ z6MOwpk`2dxPc1|dy{jck5rX*DeKu}Jn>I=D+lfdExhotIE^2ib)(;Kp0A(Ly?$-ir z=`D!=Z7NPivhPO;5`zF+cLtfW%?zJc7cNX1P&5715K(AP3H5zHup z7#jh^azj*V+Kdp15p;JEO5*m)2DKjYl%{CL!OD#X#F3i5O!-!o26^fs7nVE*GNv8@ z5k6rr21;zJWGg3s?kYS7-b%F34cl{#4BK6F4|-6PBFl%fn%kQ(qWUgqiuFoxSvt?= z-;({4zIwoXv>05@!Qkn6iC# zrZ{c4Srl=iV?5$mycf*G41Zzy^qiaCSpz&@ zsgge)_1LdOg*T$DH!CfHj&G<*di*!~jsLf@SIRvS3L6+CP=P3crj;56Mc;ZiU zgI@{Ly1yy^=Jtwhl4~-hjgTxnZzbLl-{SP&6o)gJ622+vTI80$I1>&qSjLO&B!|*X z8cilC(mBBJD$bS(O0AQUInxW?HCe%LDGXh(B|g1;hh2;(Q{d*Px*GyrSE^DEE7O1A z0-4?HA3^Tl1|uM)d8jl#11!&w>mkB`G1paL*}~AU+gXAMi=_G;1*LmYVjj7PD`YfO zLShC9Q;Q2$1z|lJljfgp($Rx`Wg1zUEFZepB+<;w_S;}gmjxl*eGjV7tBxYtn%aq) zap(xi8%-)BcuB=GO;bi5W3e}Ez8H9Qhc{dm7Sqq?eW#A+^R4UJ#F(z1)^8s?9B$wh z+Qr2YlWkF|jd~hq_AXl7raEB^EphW%fRZ1Ctn+EP%ih;nHa$KsBMcu6*rl#_A3o{3 z%deRe-DmTwISd#?X9?W~bB{4H5`ozew!k3FUC*tp+S8Sk>!J3qfnTNnVsOLzsl`9- z_AFfUfqg+iZpX?`?9TWKPZt_Gv=0o(h2E4)(@5_*^G(&x~!d5@W%V<3u#zj*B3i(vr07hX8~v?2Di?Pz!A z+SDZAo@=t}x$k)TT}88J^(R?GLkqo%m#YdgS?}Wok8gz~0`uO@vnsO!*B6#u-0{oO zBX_U;J<&WLA$4`6^VrE=%uU$q72VYPYvGg?3O-e)s3Vj2$E(gQjkp|s~X%=&Oy*f855>)LFj{8-ZsqH zq}lB)->+=`I>DVve+la&b4+>Px?z38QSEcw3*&CBhg8*8IIQxS%ofr#e|CzMVlJeMRGCSH|{8CWT0N$g7<*c(=zJvcax{Jl-fT#wG(5q!-=7 z!XgmzS^FPGBFlSA%LST$6%pRTUjGL(@zR>PlJr)Q4Xq)?AkY=^GE&fn7m3d>$?UZ zjOb3Fz1?QTM5JL;)jr;KCcM4= z%MV4t{*W7733$yHDy32;*+P-k%5eDa^QL~CP4J7$$b%`ueelEc zDYd1!8S6wo&#rKu-6uRu{1;0WS^t`Q9S?uqIx#m>9&-1$;K~p1-t&)$;RMjJ(sZqs zfOKW~M&W$*hTy@^QDaQ$Yd`}V|~?2wwt{h-LvkAc!oMLUHUwC4i?>2U_I zyRVg5`696qarep$_e4 zocXydku3Yh9igbn<;EkTSHL1_sdurPEQ1<=-%58Vv+=u&YPIrLx$~N(N)%RIMdgrF zYZ`}Y6#(AJ81b_DJSXuaiL>7wk?*vN$0R&^p}xx(C%Idwj=-ghz4Qd^Ve=uK#Mqqa zVDpJ?!GGjM{my!(15+(#ZagcScvx(t?SNc!YRyJw^6lnR2&93EZ1MBWx2P8vZe~?1 zx$T`6*IT~`S(K`WQz@cL{!KN@wvSV@?5nGdrPQW=3L2ft}i> zlD*tLF-8kE-)*abEBq|ue?=gmcOg9V?vd_QqSY=8qD_AFpFNP$J>Rx|_UL)nlh~>dIkWcq`P2KG0PQFE5$bI(0PdCTE;#>3ny9d(ysRla zy(aJEy4wrWL%@t`m;Db!{2%=mkZqL~$+zEf@JBT2sG;voqkmYW?BrLsmO~%(&&CYt z-Mq`)^lvgsKiiU5Sw2J4U^4U>P4$%s-stVt$eR8+RIZX`X>c!O+9YumyVJG)D}g8W zc%z?>8RvOGx`^_(}SVryc{d=&rW8BZeJGRHC8jiQThGDW(HR${iRQwxuk;Y$E zIz$dKMVwhMKeF8s9!lS^*)JTFd@j+I7_j}XOyqKaA<;uh?jE!1SafMxBOj>x$0)*f z&#R3zhNM!q$M;l`g(EyW7kXGTRb`z@j-{F91U`S0JK@<0Fnqyh&I zzt7etUvb!c^I}q z(}#agf0%bWfl!*~3O`>#IQW(3usv}H?7jHngi;jkj#29_%(<(#(cc`{dDeSHBW0Q1 zjffsRc5%KQBDl)VpF0omJ$Gkb_S0{lH z0jJ(HHvN;}kM!5)sXR@YBquzQR2$iwkd@xLWOLq(Nr!AJt)i*|66ClDHVDk3HPkmE zP#Ic(G|yE(8K*d_0*1Z;Ym2vcc)r5uBD(vpOLcY@5n~jJ_86be7-R$)B8H_R%SJs= zf?Who!=6GqqTK3}XD;GGk0l$g|4m`SIwCQ0#z%YL=V;W}lJ%j`ARk`_+el#;%jbKX z%bRaU7*8W{1}K>e2F%Hdh9KhKh%Q+}A1fp!|JgF}9V7lEME&-S=-YpZ|3WjGZ3^a& zQD$w{>lr8J@f!QjDQc4Fvs7U6Ox_szf#J(1Nu!}fthJ99gnbVjni1;r*P4{GPdv_y zC=wUcRGWu%RvU4DP^Mz8Ki>inY^e_hNEL5O@ud|195JP5tPRD2*H4LfRD4)L1^>VmqRRvCCFu#~l??bhgif*!-Ug$= zga?S^?H!v-_ z(*j}hgAJsJ2q-e$Y1{Zd2^;d$;t#8XqLJQV&Qd1(F%^px|9MwzwZf-p+rfT1y@;gt zj`DpKi+s-y&iXyc*<qY0oA{i?q=YCsvu$_FN&|geJy*_wfXt^zFw`0O( zi83KFdb#4UJL0RHIe*T-9#E9qp2~!Cjj_M@vh;Ybdlq|Mv6YBh)U0YW9NVsts_t*Z z%=Fv&(C#DDz`B?W8?gH7Ks*9CG4tp4e>FSd$~1W3a$&m^9_9&5?i{cT@1UAHwi8p& z46i_HaW@GU1LcS~Y6-a4T^JVWo-t6nKQWx-xZqv3T-gqNxR8a0bv1$-d>jD z&Oe*|$XWA8vEx*mFXbW$I6|xPQf}tnbxs_jLsQ;SXAH#8fas|qxxB_^@TCoB7W&S61@P-V&Fc1&P+M6_^sw{_ z2^OlKk(6M^iDD|7B^IIn#*MU}#f26^xfqH2yz8A76sI+A5BXZ$d$UsX!n|{6>)>!{ zb!VUJ;Woy=0zkO{Pq39JIEhlzOg*cJhL{OTXqIy@xjZB5x_PFbwN;L2E(%Tt9|)gK zf)`;acD`7Zb~_F|Wh-N94B$r-1!@FTdeZ?lPl@VV`CFI1SC*+83)+PrCxpTL|TvCNfKBml5(ZO3lK z?Z4(zq(_Ngb~>jL8rQD=ZNU35Uq8>zY^GjV$1#k+{RTr6rmj=Z0_@SE%OFYGf>p{Z z-E3~z`!~QsXAHIhsW+kVYI$y=`E6|`eKe!{iQIOSr!X))KPGyEohhOBxQo2X({x%bdm_grj*#6r%yPIr#yrSKb z7G0`-T!0HqXAGr(km1(b0VbALrHBk2jAF60ae?=DLRI;e`&$e7=6^pt`SoZQS=z8f zu&J_i&laOhm#Sc2;)^dhUl%wnhO%Jx$GX-!t zD*9zSOEzuyWHkTArE)(=DOIYY&tZ>#7~L>*G`l(SI2wD58klhpr4Rbd1YzzVKR$l{ zIj`^f$#2sMBp0aw-)5uJYK4P%?T&F5;}{yHU>lq;Q@mbV6*$(7`K{5$-u33q-t}Qk z@G;+Bs`VtCFckIt9zK91@DaGA?U6Mc(TAG}VTV_&9%{W+o3%Zg;u%>ky_J9moFO;N zvKIB(bFmC+)t}@p3NP697A320yM^nIYPput;eh5K zeMf0W5Key45%K9)FDno=bEqeczcSDv@+d6<{4%`Pvlf>EsH`}C0NLm_&HH&kU?GPR2O`nVwdUgwCNSAw7=0;m}=7NGFGTyIUw8gd?Fr9WP098+_!J|4gEOF2wc}m zzLt>N3V1$Jx+<%1I+&qaqCz|VGi6wRctMlM*Xs{hcuX`Y&ZP4)MA3HS{al@N4l|Fw z;3&u<9p5*U5)U0_SLm_;D@_SP$!+W;Ry$$Ml@8|qtuh=gmvfi$R$Qx60atI-b|B9( zM;x;ggQ9rwHD0tumpz=I!xUvkggs$0{%@x#uRrtJ?j49GGFKfZ)F5ZP`4{0g^B6!p ziXj2fG%CYQfz?e(rR1Y<=Ttq>AXTPEiLP`k$PnWMJ7-FflOA1tnB|47$KtPKhZSMfm8SFUK&XvGNGu*3KjI~nX5 zgKxUGM+p)q0$Zf{<9`@`q_uPsgtqA@r$S)#4jp25U}8^XO&I?a1%p(Bl4liS!6sW+ zhSbE-@V!O0>OvWI+ImRKTjNfazQTfrk+!E9F3u)1CQwL@uQV*$94PwDBQUjZSMW1* z{+Hz5k^9P^TbU-$+nCYRKK0>e`K{>M*>5Y)y|R#+2k)Y#&>kSWU^j}t8l)gF8Lg3{riMF~dOG!InG4ZetN8x07COzVvHi(L= z%%X|RiC) z8On>3SsHqisZ?jzz+}J0BrLgxxKc=}Ka{pLzCg0?j^D8sbdr|(am&w*$(fkxgcIgd zbIW)c)%G%TGHHjFju6&An{cv1A^zy(ZvXpvj2D}vRvvaB&5AGO2gjG|MBtY^YU}du zm7Ynh!feE|x+IaqexU8ikg)zOG9ioZ=ikw!Aq#RsOiL`xHEU6VJ^eidaZ++}T?#fe zzcio$^AXg3x*T36^OekFCpp5?)TGfNKNwMj_b;%4R-yl0$Zc)fBc-wOqNVm-;?nuxpTeh;* z_t8A>CTSHDtFEGWDf4N`pNDEc6=3VQyBeM>$!ca>s}(BovgN)V8`xKWn;mFJ5Q_0~ zJypLKM7}nYg%4HkjV%^7;SZFCD>eTo116AAp!|~WW__5R7lMH>fwxdgN#a1~?#sO< z9t{>P^F$SnR74AdB|`&V?8^CPIJRr(&0^ooFxQ}FEL6Q!nfDmDqsfG%5AOBn4-`3b zaT>8#f(Lat-DfdZo>ShM7uE zCSJ*7j>_&~?E%)GwK8%&?#32Zjm&x9?o}rx8Jdav=$htG#r}*X%T0W%tu$hVm+?=8 zE`mo}x<#wguv?-E#R$yixu-MU9xu%Kwf_^AA#Q7$vgmb{!~my@EW%~HgvS^$r);gnNQ3Jwe2|1+AZ~(Pq|Q4^m}$k3vF*exG4nIM zk@UVzlB{R`RQjxhC9O$uF5I&~8U8yyA9NOGX%;Y_6qOJkp}3T5PimFJWX6={-2v=F z`$<7cdb?WTqvT|`DtpJBwyKPGpkI}>m8L-YS!`h5TVfW^k9losnT{*EDLS5e9tDVA zWsUh2#T)NiZgTd28Db;u9z zTCh-DtMDhim{r-VQj~BT4?R`*X1R=|u}j+=e_lcVxdxfC;BK8dgaxt|GUywaJoAhP ziPTFuEY@t!+(~ovP5^7v0+oCPf4rpw9yD@uT)Qy28tgr7PB_A^vG+y0LZXriY5Q3& z$VME8pF~tNeyNwul|8wlmITf?`IXq1Ny!9%xw9W5@)ujp=E&2#&}9TrCt~@Ma#AD@MNal(RW%Yvr%Ah@Kg*%COULGn@WS z4MB4tFVq@$W2jktNuM!s+kwK&DN`7I-eON~`Jro+=(*h1{PMir(g62bJHfXuM4*K| zD+TA5`P+)6RoS@A-e5_bNvQ6d>5xey7>~l66e`bsSS$mpSB8&#Zm}caaFC>JgSjP& zD=fe9xZziiYAow?QDF;1C!2c#L}IYQ=*%}E0MTJ-xw_a<=H+u#(++Raq+-lhDedHf z>RkXO^+h3aZu^lIkOIEXD>lozG$<`I%JBvXx zre*gysw(F!2a?GdO%qOHmfYpj>Qb9Jr?xri?5rb;G+9%tN^+iLmw}e8?dNecJ^d0RAA5)J~SxWx89w7`UDi#92!ds(#{Fw%2q_PQVBp%&t zw%!V8dlM@3Y;kXmY|Z#;+)I{U1*DT~YD-{quWsTqHfG*o=HjH^acZTT-)R^l*`u+o zE-gaK2ajKR-G~&~0{<>#uF;7j;dcND!Gj;+gi>v83;(UJQc^35T`;n$*}sov&Kn*a z_mxVVv}Hmmy>~L_!H#h_eqO;Iue)7UjZVAi`v>Ah7aETNU_{m8ceiudAWvdT=X z!o^AUdcpC-RyymmO~;y{GB@QJFnyxk6*z&heWlV*qc4wp#fNy7Kp7nvPE|AE^tnJd z-i94>q*3%KvC+rQ6*wxI-$tZO4sa~tfIHf|rGwkbjQOkA#nsMGk`~8CIGd(ve)33x z(s2(gZDcPro1-UwXkxc7A#?j(y#38#37{gL@Jy{?rY$H5q%;PduV+rZ?q7@?2eEi0 z$4BS(CE7{BF5*H^OY>cyQhr>*s-VJl1C_;QT~iibVWba+ zkInpTj7PuYx+knQx-6k#3`Q3w1}0bApo_S z71!SP%NHL1Tohl9BReqRWO4a>onwd8)VE!10e2#5_2D8{Yf)}({{FJ6zb0#w<9TYx z-NOBmbM4MfGt*k)j|<~BVqS(=nkW9fAk<-Z2Fh9)xj8$~rDL0A%WGGIZG%jPamxD6 z=QrJQYFM;k6zbAKNfv?QZ-;s8;(Ro2pA-HTlPM0g@HUEdiu$)_7w?cV2;gMx-efm1 z2Np`lUA)#$q>4w(M9}LnxU^RPME{+LocN-=XEJ$SS*Mvb`vYll6lhqD^g|#H!VzwJ znxd2wAYC0hYD7qjQ8AfMVb}$Y{_6;D(T_lX=G<-`s+;z;g|Z*`yH^sk>7T!wFOC(x zuojKu2+?$qwBPj0RTonFGPnJ49}TIFhwIs)-6nUBu1W1p66afs{QC5-pmVb(_=k;< z%?o|Dw*2po?z`tS(A%WHMU$J=DuqC388uK5%;( zQ5yM0p|_m7{Yj=7)C&AFOpmPe#C`c*&fWzNx=Tq4D6 zF!U|>PNrqy?4#JI)iRF=IP_LaJ!_I%Ql>9sn}gF|CZ!g8k5sZ0gae=GV_S}9Ns|dV zm7sHDhCvnQ_TBDXsCP=}s*uDZzcdvpO+b|8;F)CXt?w4j3r)3w#`_kMGQzWHn@_q1 z;zk+lcS`^*Mi?CJulh1L*qN+FP_T?6QA#V%SNxyA{(y0J#?>lPdX7YgDxwjLSP`@B zE=ajiA%1B6c0_}KV^K~2l5!?s^v61y4IZbUO17VcmP2}CMgOt| ze3Ja}!fE2JBTdS*t2KnxX)}smDwM(5)RZCKepbSVenQbC8>Zgq%34Q5i|(2FX>h@7T?Vs zVSXNmCurKvT>T(>vgJYIo4)XpMRTR(F0bd!bB?JW<|40gxHp!8c`19Um`O5z4?&+s zuf_DI)b4U?u$dyTuqV7W(n)W3{M*d6tzv1}#%Ee_p!%C&8oz_8&Zc}Ux?ag)qBch; zD&-dNRs-4Q&(cX6Wi8hh^#KeoG_2R1T8)Eogt5Iz^mRCB+~&>U@qH?r&K*Z$e}SP! zpKg_EiH+#=1eOFKI95rXx|6}-lrU@Ib#e|=TKmcxx6!X(9PSII#s%#`(XGo)@AGp3 z4QR^y2zLG!+sm`{L9_)lyFNZh6HH%}LrQgj{f27$fUbkVI+iBRb)-h+PXU(cyFe|^ zZ}*c+wxENHzQLulv70H!acTrw{vInpOZ{grbQ=?6TJZm z47qR&bO@N`IVHBZ1?n;#a5YmF7u+I4{qCMchWqvx5-m!AKlqRlLaJpJpRGZxTT2~XV+Tgxh`BsVdss2JP z`}TaU9P3?b6pU>*(0w}bY#t}!_kmq{AyQXB6~U_-2ib0mf*;QLw?Uxb+_r9F zMKgtn(h4M0C5k=1ilq{su}_I9B#nhuyHWEXcKR)4yZ-E^O|`&*ejE9l z+1~{?Ik)&pMDm9ew>8vDXkD9n5{|_wwrWbYJ0g6OMQfZR3~5k1Th!&cK-;@!lyFZ6 z`dGcW3sZTd=ncqog_Zp!UblKl?&JoZaoyBbPIhUe!LOr@u~_w$5M}EgC-Kaq>GXi! z5-8p8%|73EX$-V zneS+6?syEQCWLJ|sc0jYUIS-i<7r(cR__Yfyv06IF>}tAE)$|^FN@%8xfXJt&m5sy_fD#AtbJt7NnMZOX|9 z_R5hkw6UO@fUT7y+w+EsRy8>?_X{@+li(!q)rq=>;v~rRwws}5PZ=uH?oh(g*^7J# z1n>QOAL~j#CqFQ_&fEWP`5AS`+IB0mD^#0OJcG|$`gOq8)(MuS(UdT@{YQ$ur1Pc- zSB5ZDn%!SQGEyJ6UT)fB`{#UxJPi8iX!38Lb@~0-?x-K)?gBGECYKHO9h(esZ$~-b z>&*FN_21eps@u`_LShE0rBeEJD&e$E;M@GP-t;DS2eH^5oOo|`=bj&nC$TupiL=eN zg30Zm+n!ueKA;hYCazL)Ua~lHoU>dz7K#PD9QBdO7aDT8;bhpT_=P}^`~P2Iwos1Y zQcg;KY6vbRPf8BlZN5bVp~v2&$Vgm(3P@7!0a+%pY`{ zE}koCx*kBCH?TrSB!5KL0|laF0;>Cu_Q>_<+e<0_Rrg^y@?Af&eAKv)+I#Xg5)%0A zsJ+*i;);5+B6Jekj9>mMw>{%~`bAy}CmMqn^wnSyjlCs1WXQf46;lc<3G@XXP&u5@ zg&zsNt$}A{)T_R>`iwLyc@=2P>ty#P7A-cm>|ro!jXUVOg&-Np`*026bsX9&)j)2L z9Ap|Ct{+g-(Eq}cPdO*k`~4XIot66%(sHbbM2B@4XeveAjW)e8HAIqxrFF5?P55Ef zE^`INT)XGk6mqtn3sJ@U;mCXej;ZAUW!-wY;E3>@%)|$uKZHw>4-0+2$stcyrU^!qlQ6R-xSvJ)G%{ zY;EODlT77_v)&7s{%*(cqZS@e+#SJq2QH`Gr% zX9($6PZYL+nRA)Hy{z5Hxd4=~i-}d1P_-!Qt2O-aQRg0Dsf21Q{)umvxetM=<3B_Q z&sM~8k4`y3m8$yHtVn}RnUbxLJ-iC7C%JD$o=A!g)iy^Dpen56&vx!4aTnGcIT<132fg*l#bADC8gsGwrqP)P>3zpvxCw?RSQIh%F?%rr z1TrfuPUIfpRa-6DtEzmNE}yNbEEeSLAMao~`c4hja|Ea`K7mfLFiiS=CeH^; zh8-h_7kkiM(Fq zT|tzkS;lJ8-b~k|v_DyBxbjkz`a!kR)8Q9R)2u(pHkRVK0nR{7@I%O%Hr`QN?gi}L=EGKmfeNoAD|(MV?P zk@60h)=AOyPR#;86v+evuP{y)jN>pn4d4G`0gTf5+onPwkKyLD+B5a^E0lS9BBj|( z9@8+a%>8M;!KY>yG#!rXR?4D%-Ptwr77V6BaGa;56@g9=M07^nRpl$o`dHX%HO~ft#5|# z`OmEa1x~)SlFc4~Z!=y~%%0|kr?ds^4%|8GKy!PVxzr!g{h;*iQ+1#Mb5Y$hi>f}x zsJe=?N=m6EjzK9zvftcnH*XG2X9eOz@_#KU&o-D=Xa&9(uuPYJ`Nj*<2hvgPAGS~d z>a(Lq-iQlD6tEc4;kNV!@a@A6%(-{WNF*R;w5v!~c@^(#>FA_&^gNp&Xx-79K`R#j z(!qfRv{U5&GY6ymDS&kR*oDJ8=XAwP%{nzM26=)v0BRQ~uY?TSFv=H3^?$tdsn$kQ zlSKhj^WWp1#%uau-!reYRt+IsXRizDO{W{w_N%Al53SvVKhmA{QSY}~p7$>BT}k_1jCPh#U|jyMU; z{XFV&TBI?RYP7_eHOg!A+WwfRQC7-I&+P(~vT&E8e));(rGJ44TYJLG!861-m_6|0 z`RyTcIYdG=nyP!t`9^x_gM6%$;Mm5d4Q{`{4c0QVX#aO^>uTc5g|8NWEY~x7xFS6k zMt<;$vnj@{<5Q3n7gCL+LSeZf+e`04iCS@;U|{FhgtmdJBgqVGjNFH>!mS8S3d zi`eS8<{dw6g;$z^y$-syEBZNZegoUm01eT+mpB$8Uo^bqZMsp-I!#Yz>vTqSs2L-= zNKqwPx5B2@-O!3HJQgA3b*&h3-e1fB6?h zyMtS;&1jy^Z&yXtF+&>4mv??2p8N6Ij>w|^;G8OlSf!1zdZ(=ENA5f`F@005vscrh zxb=6$$y+PNq|}s0V2;qrJIBnq^OTHBSxW&j514$QQ?Y*ptEb8gbU6-tNt32zFDX>i zA9^rTmZix?G_^pNWs`dy&0T9|+?l69wznjVi0 zh4|<~uT4PbV{L0L!9_Gp0oJL?Tn(z_tbO$<2adf}4ah0^mqeU9w{JsyT)ffv)Eq}m zZ=y&5pyTO@A#+3p`FYe91K~^pCb;?85LSI*bNP2bnRaE$Hy93us;QVa*0AY zVC^h(?gXykd42+7I9nq!^N!oaW#=K9IBC6e0$8cF7t_yQvF~x;#^-Q4@TN3 zqZP7Rf`6-_b#5NJ=7=6$x?FfRK}jqr(&lWPnJ#u7!^o; zm!G(7)M_I^35svHkJxLa3A!&233LRh^wfNfP5}v0 z6|-+^7|m2tDFxE_-iO^0@oZWhj)ZVB{}N@>(%6>Jk(&%pX@_zIm`rGCQ+)5sOE<_I zVBZIUPB>d88X!_x7^5xWG_4Ih8Bubv5e1g;Km$bT6L97aG#HW4aGJ_QFzs2T7@p<0zn81UAc z_}`EZ@$slJh>nYFSBmE3pv6haF_s{!Vn6*gAJ@Bu`4+Zx$#wST*JC} z?fgK5_Rg<#QX2OjA0{&;nNu_(DkYA_($V7 zv}%4c7O(T+!%s%x{8Ovp`dj(;noQh!(?8MX>T}SpLl>NM(e-F?$Mxv*%6~Cq-f}#~ zHi_{xRYwf$gSL;}jWf0J91_g(`J;NZKKtH-F&(uY{GlP{d-E_FakP-cNyViZp)68bVO9BffJh?jaQ#+ z%T8UbunWv+&SD}6qov`@nagIPaV#6@)NfFqCTvCKpg6l zKL(RWkHF%cmC)hg&N#nu9{%%SM`Tqx1?Qi6D!SZ$8Sd}!7_PdjBQCq-OpN-VH+H5+ z;Pmru#L<>zv#>PpkWTE;R3|53K|a8=$kn2xW!jAd9}ah=e56t6dFm zJpTlq>(&$BFJ4DKToi5AbT+(ZRFo#-1s7g`Yp%YQzB1+BW#jx4>Y_)FZWug$8D4(# zJ?vaG1;d8Chu?kmao0U};g(ZlaZl?uNJ%^i&CWg=d z8*gF{i)S!bVwj(1;zmiKo?vup3i`OVJ2 z8{HqKe{L=PZZU}Q*QA7F(7i`{Y}~vABM5pscIKkV*_ZNsujBTAoQ}1tR^a$^FUQCs zBXHM+7c+R0g|s*yl9}u*pp)6^0p0$YsNPXL3G9mm4lkg~%|RxE1hLfIB64WLrstyJ z85iTh`|n0z>r$qbte|NagUwsEV+k8qUvb5iII8YZ`1RLcQGtPq>n=DA{rdO8u$e3H zO5b;p{PR?NG-x1Jh1Wrw`|rn%r&q$Acix5U_~UWjxu@ar_W#7~ci)fb>ZhS)^V=A4 z&A_VVOR0iIVD*a4SoqUYbbh=w*8Z`G0f+GnO5`D-Mt!{4qX(AGo`SUGo&38YuDkMT z+;`0lWJSn8MwE{l9TO0l`)7d8DK~P<#yrz?U#6?KuP1@>NucyVx4$bRiX;s40+bM( znTEKi7(`^{AvJ?tAR-gdNNnA{1JkE{iTL<Q4SrrYA3MU6aN237;e_fD7(I3zGGnTuZk<}V__W3tJ$^EF zG3oc{qmRN3mtTlzYGO)jS+-;~=FR;Hr=8gZ$*Ehg;@8#Kot}d}d`DHl0NnMUJ z60;Zlj5xMXob%;$1|kU55n;%oMkk;Xs4?$2=%e#Mu6&c!+sl){zDi)vKxb>+)d$v; z9x$`RPg88u`u$J>rW~`hw6tKYt4YD&d9tY0`PHPFPA$LuTLMG^Tq?~%rE7$%9E{sV z8(&&3vKZY|P{2nmj217089w-#=8={m6PF3WO#BTecm>$g)lZGfAIpFO51hvu)`&Dl zNST0~!J>y8`q%Op#U+|L$;DB;*eHTtnrXZfuKuyCkX1cxrp)K;RNJe9$JZ;)A7dM9 ztF~52%+l!ObKKWE~l}OJL7HXO$5{KHxB&C{t_`-B|ln)Ng2-7=}zuPh}v% z7WKQiW~R?85yVs>u)=}02?Uo2mFSzna0mTkz{ga_8fqt1Eig^j&?lKqGas;69j zPI%(yN=a5wfC<6uofuAAGCG^z3tXvoX+M!7T1puw@lE)yvFL~nPoDj)^RUj=u&L;8 z?e9REVLZ{t_HR2%5AgPRrg{hPB(M(>C=<}}JYp}|v@w*uB!=uI1eD_0OY)1`?HE;0 zYN^y!J&>A~4|E1mOd$sd6EQg{oo|6Sb1I&oNkK!7IP7gz`dE?@GoJu^KoOPwEfr9Myp_3wCBd2i0^q zX>2Sc!d5a#c`~N!hTlUpx+nr2 zfh!;AP9RMa>qH$NRsN+~U|gZ-CCwG(|eNV^tDsx=OnezEGp{=N9zQ9Tn-%6KzsapkqOP z0f||a#|1hi{idWOn{uz<%bdpr6su7QM2&;cG`i47I)*h@i@Y3~>cV=YtK}J>a)482 zs)##n$iR68-&&9hYKQz>Rk#|1+XP2@hQ7iXX$=BpnP*CIOs8c&h_{0$0Z#%YNWkqw z6Xc^~iK&P_>>>}LX%aDqG`f8QIvqsEX7GULP+pvXE>t^{X=w09+LTb%0G;{3n3t9r zgrMh8tp~r`n>Wnvo{wqfZ-SEpbVdDWI(xnk&NfaoUO{Ix?!oN%!jwB#E@~zn&jUJd zg?bV=kP^rTI?*&`Fi%%N=dQa@ptBa^J_DT%Ac#txJ~{%OO1TQs8RfIhnLtAj=q%P5 zM&Te5yP^pNrJ`nF0n^mo#o&}K&;>B-SId8e9@7C@2kPu!_NXF-vMJTsG6yYDjwrItxA}16^?*FQfmN;}?c2w?{zHMjIBK zO5QWX=~f5=?J=E3Id-%~&Rx2D;Q?Lg_uVJ4#3xK2SjC4y-thPFNPy z!-Duy6s`<}gfv1a4bTO(zhKNXRJaZRha6YEHuH7D^4tgy=*lBCZy!$rWs$(%fsX!n zM|mlhVYn#Jg*L|t2@3?@S`DhO>FP#B+`g#-xLg+NyFh1l=RZmnG38XQ7G?P9gnX`- z&35OuAg8FQHniyRYQP=`Nrvo@< ztmmUE%iQy}^(1i6Byezm&fq}4Nm&@^@~_rLK=g8fPD#M#MGz5Knf8OG>z4pY4Q!Mi ztnZjKEZ%fkua2`VjDMThFQHobbIPO9;gmFN@?{)z)BLGpEA51D6xB5y*>z4E88Lqg z&?55KGlGGKFm)W`iIfUsBDadnx}=+Bx_zETS0-_J8+#HchXnQ+=;|=D&P^|g@cUr; zy0RWL!1Rv-{Y;Hs=;&emrKUEisi`IyVSvqPT&+%8){kcaP04-wE{Y{zIovjzrCk-{ zlE^<7Ae~bN8|VZO8Iwdhb?(KV2qxa`%1J?3Ode`gtcLVm$w*62Hf$m>;R&o)O+j8v zHd0wr8xxzzeIihe)Jr)z>`B0rK-na40D$gzY-C$Ob>MPy&I+2QHk!IOpp(TIMPPgS>HlKcvSoPhy?3dtMIeey zA%QUVd(HK;oP)e^;mnB%WFj*)i?y?Cf|FDkfy^zY#bFw?J&}0&*aW*;)f!m++Zv;( zCDo|JiVFfWHNuRPOzhrCVgP>>;wx5U(`&X_OxcMXe=I@KkIJ#s>Qa-j0||JLEXDN_ zcVh5cuOfvcoLPYg>|!$>QK#q9ndbpr*(RU2wI_jtC4tfdT?tWF*L2O-=t6<6_{%5` z=&ac%UzhdKsnjcql;mBX?26T^SEFA)_Hl@dqZZ~j08^WdYVNzYCC)wV1Vl$i;>RDC zqQ^^paof$epy5%m81(*ISh#F0B4Xpw;Bc^{b5n1uIanl_wP%~yLa(8aT zvR^mj;U}I!Ts&Ei88`{BVAcb=gEeiv^LP>{n*>S&bk@~o0s%$)y6z946UY^gy6TjX z)RY8tD^{$)2OoT3OxFl%ikbA)rDc-Mch?U3=CV<>UL!o;s~={J`wEMH_#Qo9Y{w=! zgE3>yD(u{ufobChLc6aQpkvpqd{~bbAHoU)KU> zAKwT=`uD=7GaUsB_|FIPa8382Z*r*g&9*saO*| zAH5&nd@&wxkNpPix^zXOs+s8D>%Z(k7R%s7Jbim^3X8phvIitQpey^d_4e>2P&Nsa z2I$-)YDKd33pR$*G!=}xMxe=YG+jxnWPNnSzY3eK1;9ZyqkPG9jil)s3Uq$@=)!4k z=Ef!9wDZr$%~xN6N;DgHlT`DVh7B>i?*M!;ZZc}0bq-p0?t(`;w!=$LKZe{btI@UF zvuJYd9sH~dT3>cC5~33bh7maP)COpC&oy}F_4jey**Brti48Ek-wRl~ISo;X)$ztt zZSlo~QTSl;4|uTab7)W{3w>WD&}GIE)aWl{(8Q#!xP$_RT}AB}%W@;iAB=FqJ!LtW zx2-3EzpMoIggLtCB1Qcs(bla~x7?F@S&c4)c=IT~KQEjtb;d_W7Lz87x^7^;TDgEu zcwv|?ZBCkk5l=q(B$h84$lUq|dMU?bniy z{S-XX?Jb;s@g2Cl+41<_o6lkK@^v`loJ;WHqj%wx!EfV#6Tim;k3Eay>c!*PCpsY~ z5XGeDSor+WhB`e!8uk42nX&?1`DmJK`g8EKf7!|FU6?01_yUzHR75)I*MI(f z73!bZ6wMkm#h*VeL4W~)^RK!LqXrJbH`8ZfLtZ@2zvc>byzdHRY+Z|o@9)aA6Q+>( z8JM`MB~Cl3DZZxPt!B;YIOUXMF?!5MjQM;n8lG?w+THUnWbaskpO>sf!zL%<+KY}w z-&dZ)sL$u%vTJTb`?hys;Q*tnXePefXJ8XjoZ7L_U|(mc*$r>_N=IB621 z;;SPgD*_iZI~f<9b0V3AWAMwem8ck-jh`3H#>&-O7+sCU9k>3AX)TG^ylo3U{Ad^> zz7?1-%>2Ae<@A*(<^wW=3IN?f0@nj_A>Khf3GAx`q>=3v=)wtf6%5e%f=9CL=c~ff|}0J_20?8|?_Sv*}Zm*%wgqGLI9ByjNQLiej*(NBMe|hsl5(Y2uMXl`V(u!AvtAf@^j-+b!}W5oAK~>obC-`n%eA zr_9w%19Z0g#DM@i+xpRr()MBuc^2q7q>n9oiudv)@VAw~J_DWnbdHbiKmlE7(plm4 zS|m@*y&!HVQcVG!?Y*iL7Xkw5l%gXQvGpN6g&-x3Eh61$bZl20!ZRnZB>o&~kh^2Hiykl@pO|Xr26Oq(cDDMU3e->k~~BVbsT(T0;Y6fUf-ca3y2B znsyVEdA+%&$AyM#g&?3d{~E2=L%P51BKI!bSc2sZM!3y!4k*wCvcx`;zeq!s6Jwec zK{|U^L(MkPCmHRGT9+DL_=H;W--(}2z(B3c+Tr#*;q=qVbd?DzO;9LebusbIQ9C1W z3CN5;j{oShNao6E+w|~Q9?1_s?b(PtQZxBdji#m0_5RlA?DZ4qOkSdGwC&4ND{pC> z{FiJkI=4EIxnT{EE8747AOJ~3K~!>Y&U+I0TT0+a06Mo5mu^rf-MJRJFiTj(HEzkx z9V^YT;;bp_Fao>Bamzg{AEuitBgD&3I8#$+ zQKRE$KGIPcb9TRvU_W1wcQ{W12TB4*BG9?AFpX^-^boii+H``B(n@T3sZ3Q1NCv=4 zN*FB8{0*W~$pB$?b81-KP(wEJNHh+Aj(bo>UY!m+MKsEgn_Bi_f9INB!iv8oU6iRg{alfZ$Lz!3#> zMuW2dxO$eKr5X{ecu<9eKtre!8%vXozq42^p@uka$(rfj6`FBuOj_G}H3)P@@d;J& zEC{*YJonuBK&Oaj7?LwGIfg7G@=&qS1Q#VS^SBC3n5H{ynxp823v{Z})p3=~tdbH7 zbcOD;8HVF@bT8d};+oht8XTrT2tN;yy(NopGN?`ZbFvV}Q;RBIEXqV#b8vGC&x$>e zOUgU;;V6N<0UZ}bfB;ck3D`l4x`vvcwk4r3gC$;E;F&D9(kmv0wCUua6SyU(q_6~x zwF+F|v9U2`VVi)2xmdS9qlGLjd;y(hC$M+OZG-6Ij*z&hT7F1k3A`sc_(5#xNg z+|$hT%-zFr-Fjmxy97#dU5VS$tV#OOlag4}PlYM8*7e{4T}b#r!3dMj!o?k*5RdoX zc^hvJ7=UlT`=0fw0-rq8t#>rqwEYhj%$<)B!$y#xlMQo}7Q!?O<;^ihY4Yh*#h@TT zZXnyXF{JxGoDoc)I(jLUgr6&K&a^KSP)b7+S}^rQHLl80 zkME~XLMk=7OkXt56lw1WfzHQ$T#ZgWF&dI1uT`s7_=3zU6DCYxejmqa+-f=*D=wID z%uYX+IYE9u_NWj%i4Z{t%GP%rTMSFjRAZLKQ|)FD;*9ez!!_rgg--|c!5^Gp}+VeBbqd;Xu9e-q=7PU+?)9%m^ zZJkdhW)?+G6Z^^;MxD%&0IAag->O^%nQ zWNZd1R*FSh+76_qrI_BW!jwn?QXIk4PoT-!wb`^%+@4z*Zw+`K-;U{z+0(~SyNW@E zkNkK9Cl!GCND`p56r&9q-(oHrG-yD+yW27An{U{6;!BQ?DIrQ&j!3|+v~)zrQ&S~I zMbWg4jf_U>mQ3iZA)3eZXJ)e&Hy2xWZpOB3AI`k^T3mm@*%Q5N^Jf2o&qj_%yAJJ8y>>;^ zs#O{F8ytlyH7cWT-?uSx$T(h97UnOS&28?(3(q}+-&SqIJ*}U{nWvwMH%YO+D|rWY zq@>|}b{YHX%g=HA$tU67HV@$VCMO|2G6KKMn})ah_rVXVNTL4DP|OF^a_X)W zbbauC{QUD0_^UNSm0EQ${DVI1Xhx>vh#1V9^9`PR?qU47BMWC;dONPW_&kgo@)k1b zqioyxaa2f*L$6m}VzZy$_%yO*p$oB8m3qP+<#&x&dg8t98#vOOvf}fKs;=>O=!JMIQp>Lml z2&>!#ogZn9Gf%9HR_&j_acA9(t1rC>UyOba&HsIkG1YtZ>5uKn8SI72KDLHLPy_|` zp`qw-!T4?dTnnb?9PsJqoxqd8!I3~H(Aj!+>!XujE{R-{C2Mp>H8A3NsoiB9eC?(?0IoSuyu zP0qv>mt2TX{znk|ehwNQdomJY>LO!zGTQ&=GJHOL1U{Ow0&l%D9J9yt#dp&_V-xHo zBj%+u=hnYtj_#h^mT=$Z$;hs5|<<-5blZs-WK+ebM&9W(>S!W2?V9PCoT$Jl6hN zJonN7B-FeB*I#obJ&jwD6uuh+UVjC@{<)o9%POxlgsiQ?0!D zeFtaCdS~$@P<9EF1auJuy2SkkbfG@F&@2v(*W{Zy=t{p{YHF%U8!-_{ElmQQ-zm7K zQblshaP;W*C?BrwN^rKJFw%skLT_*~|hmXYC zb@WqJs)_do4#R-<-LZbjd<_4-FCKla6Mme%8d0&e8MXW;I<{|zKYslQ5iI@(^YhlO zTa8JR#xnqt#2&DBBFn_QbeRk&TK+HAKfp9>rHf2jj;X zGmw)|6&u+Id-B);+`k95kP7~#ZavWYs>>;}Sfu(ZHRUwjU3{r3&{ zbo|%&{EP3<_Wpkp=wfMxeu~w9?nD?hxyV*}{a=#D?WDeh}^FGhbh7UL#Pq>nC| z0o*7uJtg9Wp0D8dCG)674#L*d3^b+O-tE%^Pjr0}Kdjt<>C?W!qyM=RbLPxNWNcOZ z`^L+0!?kClbJw1z-uOcN)$$xIkyOcW?VLK>JZ_lJzas&%^%S*a?94f+VZAAACT{PF`P&HWIMbnSpS^HxzS ztV=qlD!BKaySPsjW`946wtEcL{k{fkSN)2A-c0SVZawyrScs|>6L7=j=dgM9U<@7q z0|vc63^Pad#jL4g2y#`B!ZesHl5~bsBb3=Hx}+H9v}N&{cJJDSW@ny-Hf>sC?V8{4 z>#xfY<&VUUUCH?Fhs8MIOnnKdB0uv&-4GO@EM4`9o~oww=Od z)KxcI9tQ{P{=#$HyEIP%1tefdJxt!LfX*S&;B8+F|S(AA2f zvFW6j__=c3C=yG{dDRd68r%A5W2z&%OeuET#dFqLc zapP6z^Dj~pF{NYKvPGCSbv#ninTbZKqGo5Ego+iS@csOsv3(cekzmu}))tsKW)gO6 z`WV1%(ZgjCmb0)+sC{jHIfk<4_CNJO%d!BEt( zx*(T~AL$vXL1KUimRCh_8%0Y6RMI?kU5W;)adrfA>(bKG5u?T#49O^>Y544n#$+zc z+;@r8nu*+oz?aFM5;~?K$CR)+UX_RWRL;fkR92?sW@)>zG<)^Ekr0!XH9n0~XYx^A z(X0&7SJ^B9%cZ=dDR1Q;ssv1$aTI0Z(*-O4*qMb^r}h$p57$NMjcfl)z}4vNgP!Y? zYcGcY6Q*E9#YFab$>N%po6IU*r2@KI0y@WZ^)q3((QzkWBU40*^U)cWTn9z{)Nay} zxsVBTQaZxb!+})_F4>VG;0#*6E{#dPDJ-?rO_?yuvU!DLU;O46f0VR zgHsdhAeKqEwY9TiF4lW>3K9&l0z>076%Nu~^_|E)b*iES0mYq9o4J=Dz;? ziG|}i?$x+jw?-GNCUdXUYVM_iIOo~7|7V+bL{9>TNCF1v%=)sXt4chzMy1gu&{DG+ zopqHNjZWc1E=aM8?;($lKu66&UAl}$cl?Q1?*Lt4(^ZR4=&Do)KNq*6t}$pt-8Et zpmV`HRL7D==YnwozJXFslu#Dt;()Ql!X*HtqJH9*dX6HEe_=OHr+zm zo%gn__q`{9BS8YDlb0++XI)uJNh(p)=qe;uW_hS+*65peQrqmeE{&U-QMt9QAthk7 z4A2q0=rf8WVm4+lVoMH#1|?+UTkZ~P!luwiXMAx~Bdtkm#%F*eoX#|>A%*HbMa5iL z0}`6=d5=B7T?xvK#%S81Nu#4L(2me~8aNGcq^a6kS66JspT`BV&~Gi+I5bOpRIn_N zn=?2UX|+)Mhj9%A|0;a`!cwvFRUMyazLqp6yw8s~2^a-L>p%*GeCUqB$aRvPo{1XO zs)^8%|C2w`!5Y_MvwW3??!(dOLfReH@<-QYT-vtnkLNvk$D!k#AB8$(0XKKjh+?1khdO*0e% zwXq$yjdFW0PXd1p3FNB+(vd71xGXU(3Ax2~Y~F$_1}a%E!~h@t>R|*tt#9dDL7;O@ z*WdGt`fK@P2y`@ES=tn-U<5U~jnwE0M_r8=c`!w1g`hb!O>5Pyi}*^Fq*O9ZMC}N9 zdWC|`f)#y|MH)p;N_vITx~adZ1-Y^<-Vj`KhZ#JbHEZ*a$&m;BaHgv&Be-@aG9l~N z9(;76+NWW_GiIv(DVqLKysypzUH;${rMCV(+i=@4pQYEC2ZVcE0iErHW?$Ci)_#iS z?oo*>QSzk=y8Jbkk9QSEf&`35Cpej<$~kL{pQ(;Mmst+$R;{MBNb=hBU5Jkr5R#0; zn7N@qCylNUdr3_t3{#T}5a>kHq^RpAwc;z3USbxtR7$Fi5o|D%&eV&V1iDBvcLW4{ zjxWskx}-gs2&cqr7~#yz;GQ&6=eNNZ=fbQRX3bXjTFc_tH&7O|=^edDX0U4-T2<3X z!wQFyR}COH)&Pd-jcC$gih&J#aN9eUU+3^25sV6$O_wvr+%}DY9L>yWLs4LU8Kv4$ z_oRqfC5=(p9dRtp;#GG3YYe%WZQFJ*yq@JmPXAFsUwA=`JP!;FMDN*xcF>W>Q1S zW0WQ=AfV%ZCQ?DI&X~@HUg}OptyG=FBuw>Z4RCM@zW7W_Rtx2pH%@yCtcur4Lp3^+ zeMebKd&@H*$6Q0>ozN3S%*!GLmEeV@if%h8Fd|M6K#ESwd}s95vQKV38cQ3}F8k5F zJv<4NT>?^ejD{wvIBU50s8k8)Vi`+V#UcQ5f>SXQ%di0?<13N{=`#TW`PvJymxKac zP@^NzZ7wd4&f0+D4q|jR5}7nzs~uI>0Xo&EQMGg|R59+VLcnTBs*lj=mH4MzJ4HFw zWZEvyV7{3^!~mULum-$J_RJR$pxIVhRWU%PNUU)cvMR&mSrbrvWD{1!L5}InrRM|* znmAUm6>CfdAVd4AO`*(hVU7VbYhp@eHkhP-5=NtS;l}oeDEtaNveA1D$f!Y&{1Da; zCx8~%nuwD=H`poWW3ebZy0PJ0cY)3rz7`P60(M4d<ACr5`j)WI`ugXS*<4ep_CE6 z5zwXcef2tZ5kZZPEamwc9lsao_`;-(XgR0bq+KmcQ-z>oD347GTN+K!P$dXy@$)+a zTHJ?bvH>bBW2ak~Ng{L%dA9+MNd(rhoF7#*n3NK?!oCNbqMPj+9E-;Uof} zd1$-L^7u}tZ8u1qPiU#*>M7wgy3w$tnNq9MC8#Py5oh;&hT=&bzFg3C%x8DZ1+GJ5 zlz)%M38d^q6w`ddBB)`Kt=Kf=QFRBuv{fUbl(!wzEQ5?1c+=$|MbwzL1`$g+ncOFd z2Xv(un78G@k-*-7uCPWI3Uqc22z4x#x|3capsOBbeRMfaFhc1irUa}|LuadDl?-(1 z%V5g0R2?B7Q%)@Z&cwDIYf-sMCDxLvM2mH#L}GSRFlA4o3y&jE`q}a?Gn1uOq$i4E zfqg;(8GF;1y23H3<}|>Fa1BOyGngotLa<6A+sDqGJCU56Y_^S!i$f~ktK>`XUFs<9 zr?!RZRCBkdW@FHM12F&d&oFi3XGji@A=pU?)}m+NL>e2%o@bg(+ngPJv~~M7WN;iR zBTFRxdN@t|unOeWBRy0inM_i*?L<7O;FDQiRxzmpw(r=E%2g{;$QxO<7G=uCH10T# zD@9F#q|wUXM^IBuYkHPqNq#)hzByKqNqOuSUn4KNGSbs?kw`+noUBwf5Q;V}5qIxS zK?WI@Yt^h_%IcKEC>KDOt4fyv=d8-?0o}n_yxv(#FM;v^T}Z8~l#*Z`os(WtW}q{r z8O3RIT44%c2wkaC%3qfi&jR^q^c(ODKK^(NCQts>)Swa%aa_|T>=|({N2D%zdGHaH>-(6v!)8l%P{pwsnf zY~f@nNex7jF(mFWLA? zjVr*|&oXCAKb<_FD}$`OO%I|3_5;vKqcc(00|9h2O-&rrmqE=hlA2EfqGKb_{l$k! zs5t`P&1STORO;Ee>8M`4Dw#xX#d1baCX63%TA1a?4wvc1qey$o6bV3%(q6Ik(wG;GFg@5uxs~DRHW9KPAyLwrN*K$?xr@X>vZu*V4X#ow>=ab0M8*|rbQxKe0=`Tj zXS7&PqdSPJ*E?0IC2*JnU4R;hEum8Rmmd+?v1IcIL(i8x;?r>x@a0zv44sl_r0O@U zhkyP1Myy!A3Zq7i#eMhRjmi}gQ17T}s9&=ZDaL`mBc|ZXi8E=UZo_Avy^S8d-^L4% zw8QEhKx^{1H9qDT3?Qkdvf#EPXX4fVL-EbDFL2xmr{LcIw8UA*);8X^A6IO^z_$ls z&Z@QO^VSb^i3#=NR|RLS#l)!fP)-gL<{1 zP?_HkoAD#wf9oCW+_sC`3`3VUKEQuFwZi;0JMnO*N01Pg=mA}+7m~N(;Vpr~8R(>k z(F`-`6>5+}u!zj0uPr7QJ)eIFqbE(o_^%h3fJ6ixdG+d5$IZ9?6AKrv#3;5qY}xWU z{EN+T`t}}(nP2~aGtW5-&vomB?oSNHqWM2y)Rfn0=H=k3%UdwNHxV@(HNo>wcg5sk zgE4081YCc|?YQcab1>lL*RYZNZtIC2yBTPxMZezL-P_{ZDdX|p)UVO|&A#|`>O_1q zc``OJNSz%+ZYyd>3IIowx+#bBNmN6rG38LB%fv~?9gCK?{F~&Oi`h3~7EUB#=%p8) zhbdFO#_VtA;Hk$SqDh^HCwjiieIxM0#J=dz=_!1^dJIsMD zhmLizfG7bYjCo!@l6O{!%|Y)MJ7Cbq!!do{@6>u|#t}ps)UAYDZ@nHrE&3fpKc0vO z+TMt&)xt1#^b~B~%D!Bz)c9afFZ6n97=D=j9cFwp00TdM2SbN`g}91Gqrqt>;F(T$ z;8|ii9yr8L(e{avHWZc zj?e%AAOJ~3K~#%Lm@#QG@~YIqs!f|wg=Td1igBhjVhYXMExXfc24^vVkd9+X&ED~W zdr-A{0yRCLdUPb->fHxprq4!nQgyukTxa&CosBt*e@7Ot`RR6dV)?37X#LU$7(M1) zOdtFfL2)M1LV#R|4g+)Afr=O>=Ts_z%EGXgmIq*KtP({0Eh!2S7WGP=J??UDYpX!`%^)~Jfx z{&ge9vHAAAUzX$Pwk_$OTZX}tzCvV`T6m$;eW+r1u^7owWzWZV*+CTX?=Ko6nU0em`IHwZm$`L*Fd~|;~ zi1=$<(7pnlt!*k*Fk%bSOA42OSs~i22I%N5W8^PV^{RB}XD}M-kIqHAwru{w z9IzK(d>dJ5xnvfO!}+H+!A)0PgfG8aichD{MXNh5C8+rD;b)Vva^r3!R<4M*-{^?} z-QLBrc?&R#KDwTLUdPPETaiZU^_q2RplgTw@yg>Jv3cEZG@BX8Byd$bsu4OrKmc65 z9IwAM5J?Gfcn^vXBvl(k=w}l zitHf~Lrp1wCdbreQ=98BYyP7Dvv(bEQdQ^o%ud;v?er#y(xj=_gYrj>nizY<#G2SO z8cpmy_J}P?f{KD5qM{-elw$84yMl`JW!c_$W@l&q=lkw^GqX$ESxNx!sLbk$ zm!2Dk*$>^0*<_L>p&{6B#9lb@_d{^e)&Inf>|(ax?z>{jn18bwPzibu7>WzeItFjd zd=9fe{t;K-dIw&A{xQ5U>p9fWSJH%R6y$7+U`7LGJ%1mryy1Gh_eCW_v;{^RtJOA| zZXpEgjMzG0Ve7k!^#wX{DX!J)%9t~zfJq!(X@rcdMZmDaoRglR00BY%zPYY6)_3i> ztrIXzpp&=}GQN=J%rm@$&WU1EPQ*?3j>U{meqgp#KBJzq zapq}9W2fzcczwnT$YPtog(cPaf^6`}gMW*DU9vIv^N-Q4eMcOA(0+Jg^5dBP=9d_K z|5(hKF%@%WzpS>tID?u+-S#n4H%qc52p3_7aY#Xf7`?p%=`=ZkPLnt3jCkSIw~*Jh zD=zr!X$Ub4X2O3S!qAU*fj z9)x}N7>2Ws9F7aFydIf7cg7i`jzV_r3O4ci1|7Qh!2$d2&UrpeK%I-<9&#)WKXhk& z@$st&GoG%dE{@OV7NVE|kZ-VE?vHG0^B#Ws z`Flo`WE0R#8WqE(84pLGKHh8Me}`Lq2vLIvDd8FT)q}ST%t?ABGMcid_fx z#rsr&zhijJs6YM@JNE0vKC*EJFK6SCCtkqEKm3eS{&XCE{%khB|8X&@*uc8aw%cLE zPJQsgD|1j8@nDyM+u;ayDDyK8@B8`78BP<&k3W5fuNSO934^6iIpufg(}PWUIG<1` z8*Tsh%R^ybs{=YwU8TbCLQ|&61v(SN+Q{ZDLG<5YFpAd+bYyFc8#6$c>j0fe z?~=q?^Fv#mq+VslQB_^SaJ5LRLWz%PiLr?W3F%8`v;@ zE=Z=9!7&0JLLz~zAy$g~_Bm9er2>eSKcE;Gi0qB1U9r(4acyj-BZ0V_Q>4BMF@F|; zJPSTaG-e|ovHf|uZ>}`t=D8wlYAxR|xmk?!lk!vy_b4wfL4HV5*w{n|S%~ni#TgUV zo?zHmTaHFH&XJNR0Xks8yzh}I z>qb};m#~&yS4QWSX>wA)6zCQufv#&Xs86~8T^0j+`|U7@K*t=)H846!WcNr1EV?1P z_S{Ze6Dp%4E0l#R6>lY6L%>cLVU%GZlG@LeD^(&nGkRnRCO)dRjZKmV8$o7}HbGWH zVM9S>j16Q{0%U%MSVSe8f&d(5aBPtClYn9AJSG_vQ3W~5g~w`vJ~qr2fE3^+-c!x> zYs!9{YNc0FyEw53>bM_+gC+G!l7}USS95E5F3GIm{20rk3@JmZm^LvenD51y8!P+8 z`8tD*f#PJYrsbLd3;~dgSkqg1?ea||`MN$yv6A8e1ZL*h@lQYJK~N>QOPyx9ky30x z*e`p8Q5KY|r=FlHKuVC5@x@#~AdJzRAc4$b0&)hEnAxP#0ukw&I zE9jZt;dXB;fekAGd7kANcG@2}>0FxB&*xl_YSFwOXlmlR;yRRaLZXF94N0vauFtb6 z(dOqGX_gt$EtEwv!0b~lRLRHeH((%vPRjqRs=B)6fIM9I!XrAddWnxLBqEHE`wOo6 zBuQP*XDV$-XE50^q!f`R+)DU~DHx#MLd`Fg7XslXok{XHCF$QQC0GRDwFtWWHsy~5 z1Z2+A=0=Xw`oSV2J=!e(m}WZ2xisKK-`84sTIoPkaC3dy>`>W_5sE(16x<(qpvlmX0N3%8wlDRw9u( zODQFJU+U9|Y~)yXAM=Hx^KL_Ogt$3unWhc9RPAW%DFL%6bVYF$>kA*(jQo%aU@;rX z`=rP($&dn_tTbKwrk>-K&K1d6xEbotWhqyu04Cg zOZ!nH84eScv;ZeVg7u`P532m(mKGA8>BQN&Y(y5C6m-*dZGcWMP%0~_C(!{*EmmaM zhcLbj1qwOLfP?~_{L`{YGEq})#8d%x-X~%u8sO-(l=mv|7?d)nFaYUp2Aa)yDL|*F z;@%Mhbmn;HFXx(d_HwwsE#nFc6QDCU(8$NkQvkcd`=LG!k%dj?cLmq)0Xd==jWwcDCrp9~Jbp4qZyLfmtzT z!I_>BEr70TFqB|)xw$#$OQ3TZUGvLQ_)_SJ(e4x_qW0*$y*dsU8%*Fcpp!wir}1{m zPei437L$|)*S-qEvJ9Xp>yu)Vy1SyAG92wrFK5zYE~ zL}iucw26i}PS>HtpyrD7NB=pXCwD_iQJHC*&&A%Cy2*Lmqvf}-U5}houh8@*lam_- z*8MT_H=WQ-V{Mgzo`Rt;YC|+?ueQuyQ(MO77qIqMYw3}Z&Mska18`U*O$CffeSy>_U>DwP=iNc$XdZkxj zSeO>Ml#)sOU0YYJCyeVE9QLIh?}hO=(fSSpF->R;nCXJop}DWJG-rHeM8?x1HahX8 z%9dbR@}6FJ^8IG2s{^nKObJG3)K@v7sICb{$ozcs(gYYrY?LE=#1k) zp+XpwvMRH9HMB*6!+Bqsoayx-n|NW(JYB|VZlV+%74Nb6PBAt*7wDw?yXm2090$hN z>F`R$4ToVm;O#)Y7CVq*BoJ9VNX7u*0G(q?6f~3@tW-S#bjC)fuVYdqRGdV{Mkker zT{$bzIUe8EKZ<&@TEDy9(@J0yNkFTMtLA7NdR}cH#dS$_HHV>FQu0h|TRQf1HCH*< zutuP3?+L`SrC=nJ*t4=jETLo=ouFTWB*;2*dOS2q!x<@lNp>TQPTjmIfaa0L)d`>z z8=S*yw4sZE4z+J7&EaPZKhbsX*yxPyjhu(aY5y=a$Qy7>0G*TD$&!=`G_Kb(>5PyI z7a@t4(@}#3UWmR%^>cwv;{N2^t{u_zy>OSm!(kQF#XcvDP}#Bowd0I1K&N*ll^E)n z3UvHPfljZ6!O7}ua2TD&3&}hkMkgUM`njX7>#@>bO(?DudvcoLDD=zeOy?uzm3of0 zY%cF?%dz$tD}fCw0VQV%pyPARbtSbTS?PlsTt>dRbjW$X0zl8J%>eau&UYR4~p>R-CcZ z8PAArsy@0v=i0|ab#)A7`b!)Z#_K9&*}W1B%ZqE0;9Zx2Y1wn`tR1jS$cpkh1y8{O z*I0StjRUH zc1=~=9e_&!of%shozdIPx0W~JkbR680OqIptFo_ z!!A)f+J=xobD%S8!z;kYwJN=&bC`)G$p!*+P9&QFI$0BnA!|}yy*cqrCf(Od6h70Z z)&z8_3+sX-k75$NG;XP{uOpa9moH&p1}_xotJkVjacX^WCP_L?1Z*9dC|C(NN;}9Fj84wzFj$e9rq`(&ER+yXTRbE792x(5?#G(il#?AaUe`!KK_qE(awj4u zXYih=u2j!z>uWWuAyo-d4~Xi^a+mt6jmM4h}}+9R>L9%3>7q;z-IHFJFXjcV#bv45_8t(26?f#2_VX*k$9r=s>jvX z=>(TmP1cw|16Zym+KkaT;G?q9X~|rj%m{Vfm~$8?HLxmUs*^!=TDe}aJs4AfsIIc7 zQN1O+#i*_c;;f^kv$+j{+JunloXGBLCC~~9I6x=FC7~^uUeK!o9i0&@j@umQ90#bH z$5`Sw(HzGCx;^(E8Luc>fx<LsG$iSgSYsep?nTL?yp>Z(A;cP7McfRNs{F5|bAp zU?6I$us+U190!iwOIkF#fS_?`EmT)Sd(CF9*0IqE6BGu=<4x+A02QwM>nuFebb9xGV$oI0MsvIBIE*VuSdu3V|i&9TwR7zwEv ze@;4^-ZM?)vzwK`W+DMyb7q|j*l8@RgoKFaRnkmd)sYiefy%QRy>`4U0TXD6e(0eQbs4|7aLVwU0tHRpxdC?bv`6s zCm2&|aIdfz8B+kt*s~1Gn4d(zaANr;gKO!*Z6jRYs?V ze8(}Wo=9S)L?;VVAuCg$Q!k=;TaB&GZHk=ibnmK$E+aFM&c^xa*s2n`ACDJGeXm~ zvi2q*SNhbT|`=lL%-N%i5pEuFhln~Q<}f-@y9B_*DjLyvuO)6x`V`Yp)%9h&heQUyZbYwp_S9=m*y!{q zqtd!5VkQ&9c%wCt#<9`4`5SG|jBxKu;_~euRs#QH38XSQM^kYqk3Ovebh&xt00cjJ zCfs;CHZ)$1tN>|Au}D9y%h@pD=)(UC+9%V@>Bs9WRyauI@sc$s(Oe+kLGBHd6_>yul!fdF8%WP>|0KePQ@n#MgnG z>LGTjFJ8EkK$rBArb@{I-4_4M*bA@{*y1FR9?-EHA=@g1>ngFL zut?Gz;MsoIa2n%2_uPT#S8I}3DM@AG1>%t~O3ltB7G$7(0+;J0#Ru>rch4mu*fV)-Iu zGoe3|-L^WmE3iPfMLr64(pCanuLRZy=sc1pI^vMy;wy_J)%wIP1T4?c5cmOG-CDc!?Y2 zMwzDkx!Fj#d9Y5arj>{b=H5>Do*uV)C-r^^VOU$PKYBEQ}bi9{U zY_4-%W3CS91j7Z=*9CNXEdP@q0S?Kj)vp32x~3SFnc3!nAnttK^fT?s<^W!z2gl|y z)wG@gyYrSSfwll$gh01+=`v%Z+vB%~$HNVEYj8955jF)nrd}wZ z$TD$&)9vDtU}s61rhkcQq*TIHbse2a0mbZVN|(3(rfG{t($_?K8i#S~(z)}JrZmm5 zruhj{flix^Il$$ru4|i;94l(--_iKG@0Xf_8oCE9s2(Gx>K%eh@T?<8>fC@ZNFBV}`>`L@&u6!vMV{5gQE zUq%+}EG}()38V{j*}(wnYs!;AH+1iV;*#m#-~e4W0$s^^0G(Uj$@DysD!36XyYBHY z30JDSsSiN9f5~EDyF#ZIt5)7MFu{L~ro>S!z&)k4V3G}){;eEWRZ)jp$yyVfkoOWb zlZ>hwSACb~_WDs?QI0I;z-0#*Oz1OxUj^VyRZ_z^O$m5mq0*mLKu}w2@E^;<2({t( z%27EiQ-pPYRYzq4PDdaQQ9;hlqEOu5r$FIj4zZt&grrPVnC!BV&4^g~1=+z|ejYH) zSF0)uxH;@juTNS_%Ebw&nrg<{OF~Q1m^P3;xlYPi?SE<-keYplj_F9UtBj>ZPFZ{3 ze2r*1c>z-?U|LgEj$Gz92iW>Ds^#f9U+yl$G|Ko=`%C-U${i7U%2*`5cWnf^HXgR| zmy|uj<|u)5fi5e^&W6?HSV%^v0mDOv9~kFaM3{YDmC@1bTD~4YCk-vMbsvGQDIP)5 z%BASsxg(;DF{YShYvIl)8Du@}f?l3DC%Zk{moYDm3qKOB(+VoNx!I_$t>YpMBCntm zdsBqTI2(}7q61Y`)yU7!L49ovd@R5xCCy_^Uewl9G3YtKRJKfZ@vc+k3$vSdAW(q1 zdLPOgnlNSJ1iUeIJl>x50&3U3nesY2K;=V zKoMj~tBisiE`H)oT|L^d^;(df!}Id;@k>b|>tF?tPw=WNt3poa4p_nY#slr(jZ`Dl zSjplE*&N-A6Gk0|t{p-c_u%8a{*I__sAS*DdISl`GF~0qPfB2;hfQ&eCAUWa03ZNK zL_t&3R?omr1dP%5h+6H#7QxF&?kibC#M$u4fpZpkSi0282jy}m#I4Fs%^ zKbx{`qFk5(!<=TnKL?R|wtlQ<$?0G=N(sP^K6pRgojM86&wLTJS-A-E{cM6_U0noG z0W~R~uMLaX^-b#fX?scel5oCd(pnunrtOVr(>jygdFzlsx5a~pw9%B;(SZ-7I}(&gp_F!q7_@%WU9m_6%Nf|+#P&cF_X`{R^T{)Df;o`)x< zJk1KA^1f6mjv>g_i&5s@%4!bBvdAd>+FaYueWZ?jfNtYhL>3#F5*d+H`i-(thL=oD zic&Vkq~Bu`8l?>*=aZeu`c?HT!WRx;(!@vc-jieT{>E12dV*+CIw%;UdmS*^H`wUIuK+Fxr%gwP8eNnRT&m3 zqvMyd0G+6f;fMg8vC&m5Tiym|glSKfp(WU9*CEhlWd(5SzpldM$0p;04?gDW9D-d0 zox60xX{Vixxu1WB7oMLP7>bMU~!lkxu358=I8(@`lc(|jSL znyS5xeIiu_^dLfj^@KX&tdoz!ZbQ1@!iz7&FAIxMz%JoFf@2x?IqBTfF>L#Gc;xO| zQL~}~!Q3t=YsyD80kKOiSzn|E(b_5m+jm55c2`_?#pU?!&6$`yWfHO}<8Ziy-#8@* zc{^~le1wHwXEwp#P>Ob0nW*;!QA4jq`O9r48D6Y#=)H(>TF&%@iU1L`9U=#(8Iqm2=SGpT_K`xWh33bgVfDYez?J;fI(`x@)@XI{hbmO)7`t#2) zV3%ES?msTWZoBP;7}c{6XHCTe_uYw)7gpop3D4mDrwDYjlR&3-LbYKvYI{Wi*v426 z`CT@SI(&cZvGX>#^R7F%@Ck`jksArLzdi0`oO0Hg=uuFOtY{_53Ttq}iRWX)VaMV8 zi!LXy5*TXB@yO__G4;inxN_{%*mIX1kyX2ly>J^b;n^2y6I+S{_TLS+-!g_Zv%`2{Wi%z+eA|DqaLHnvapHbB?zmAXs|EqxkAXvm z;MOZH!FTV>L1}pz?s{MX{A7D)U+`D_<%~0_Zb$LK%hU0|{bTU{_bYJWmDl0o(@%o8 zp#q)S<>2~T?!ZlxUcuDI9>q5^9>y7`pN2TWbLrBBxasPDVE&IkBI3>Fy(BXxlg<#w zkT`>q4&pQyp9Q)!1!i|wJF{`>wXX0cjb7CJ>Z(e}=oZsNMiMdjH~WhcqKuBp*9HN) zI6+QS*Q}5aqwl^RQ=XWN*I#>^izS38b--S|dgG)∨e?`UX!=eGccIe-aKm{I|H{ zw%hR5JMUq)1NX-*w~WROf4K}lefJHXdh0QUyTx$uK8K<{nu+cMcf{CxZpO56cj2+e zrsCMsF2*4T?Sp%6z7)%8R9{v@dsJ-@J+>Q!2kyBBFHC<7PrUR2#*cd(?-A%em@^%v zR58PuA+*Q}X1R`j9J(=)DV{{yNF=UsaIQhIY@x>RPYuU44%wJ6=2jFfZ$hWe{dwLMC@-&}!7~>J@4FA4ec%%8JmR-l8SIa12^znA{46F; zdJY-ceKG3z-(rtFdf?vsAH%`FKNY_jJOKCJb{)R?=|}kUa^a_*><@%!w~L~-vJUtD z>o$D!-lrHad@p*F>T%c2w=i9=9nL)e4BF}%P+Jip5c%<+36pWwUq)f)A=~2KF^}Mj zuYSP9|NILdyz?5qSz3*Yue=5~U4Aw`eg8eQFUUq_P8ZZ>cE@v*$04WuD;#;uiD>9J z1Xo@D5A^Tm#U+ebVv$t)p zK4fKoA<%t?DO0B6@0b1=xj8XBHRUN(auL^OXJgW1PvXB9-G=Wz`4G>2G6ny-?{>_3 z>P-Sb7YyD1cewiEb8y=QC(%(6NAI2Y!ygVDiN_xqjW6bXjbNv3=;YX*_A4JQIQuBP z_wgJ&^UB9~Wb9*jf9gYcpFmel?_x5t-2% zJo2w|@TW7*#fsn{Tz&apvG}uTc;d;I(U7qnjyPgCcHOlzCO^y{d9gYAY*n|Lr&9v(LZAm#qGL`IXn=jAM^s58h5V@BEVqo=;%pzDJ;> zDgzUrnubfxI|+H-8cdopowl`1oPGK+^zv3?^0RYr)?Y7T_|Ug_mFjyri#aSTs)D!u zju><2P59u+e__V#w@}Ra?YrM^@W(@T#$}gZj;OzYcGDm=PBK;P`X=sJ2?iIyOPAVN zpi6(5+auZzbdHT~pa2~gRXtgOgh=%0(jDc}IVbJR2nmc}r~_dW{Ar?^7s~eH*1N94 zv}d2j>=|>AO?y)~+JK%tsJfnXCcgP*5hgu485dl961w-y#+1oZF#o5e2zBj_@#80B z%q6#B-WMO^`41=HoD0vu{I7~=Oz(gZe>eu0QI%~f`dLFGDw}*LEi1)jvJy}QJ>}HX z(78i@uEQX@cJ<)qe_eyAGe5(GaZgZn9jB^m1)T-Us%mi9p-1EL%dbQQk6*TU39h;B zYHgn=>U>k9Gz*fkYtXSvN8EhfUwMtyxc06IIN;Dx*lR$0JTc+F`0D3+1jvjoKI2&I zJA4lw-;Qc-7~MmaIPj=Hp{9Tg>Cy|Z=(Fi~a_UQH@b#pcx;J*&wJV-|@l70Z_?ejh z{zrKA=_laNr!$65k4D-dqXKkfbn?w~&mI57mtTI5C!hZicG!S$D%tL@C9 zbSebsCG6F+2mW!<1*EG#e;y1fv`N|3m9oQe!A0LN<4j+Z4+yS`q$_p`n?(-N& zhln?8TO4)d2<*0d4?Oh9(>V07GvK3lb_&6(o*^jGeK($2K=Wac&59xnsVT<2cixE4 zzxWno$4| zaI$sr0(b4yn>I$Ew4zA!0H;2BFYi$vj=yLOo_qRny!z0Uw5Ps+jCMothr@n{14i`6 z6_;OzC~b66(7C~%3`z0RJIt>|`9iQjm;S1?N2D!*bb&6&SlRlTGAvoL)OcO{4BIpA zSN0O-`pO{Cbwyzzo8zS21L zrIpJW*Y+|hN;7DqI}z;}5ixo4Lo~QAL(Vok;=X$)Vf|6lO{Zd4_=sx7oT~O>LG)6X{U>mshB`!hNx&Zg4}~Ra%d+TG;km*E=wFAy_YOX zy5DYn$%5wMffwGT?d=f!^`t{E=b0&3u(XKD!)3VnpEuy?XP!n`Svh{Q=bkwD;Dhk{ zgOBI@`r^C`PeqrmVN9Gj1;r&%45gD}?-6})_vnYwxA&en>aasGZ~j;8TC^C&rRDf` z-Y>Ls`e{f7%FBxJ?|W~#~+)4etr96@)HkX{`~K8(n+VFXU|?ZosNg{iW=N` z=bhMZa0M=km*mHdk75#Uq^|@o$<7CcoT99!@JtHbmw8E)g(s$S%s=78YAU8-=se89BSjupT^ngyLV!CW+Fs?+S`!m9{MG;Ig za2GvUHRX%=uz;MQd#qq&MZ;}l$IJclDofYTe z-KS?@#lrcx=*F`#Y3iex_rqf3W-%Ot_M+VUApU;Id5(j_j|D$1po;t~+P80yBab|i zDr;B#@Wb~6oeF&V`CQCfxDaQac_x1T^do%r@kfZLdKlomsgBX{5Myw&EM>QOwKNLa zd!M~Iw*%GQ==b|*FMJzwUV0vjYw1Qs+RZMhog=<3VZLpx3*$? z_0?CA$uVAhVGhj#KJ@F~6DOZ>IGq_`y!qOPC@Wuxop#wCvtE9S%&eMoJ{!KD^z#cc(g-E~Ar)8ED|YzEv00@)m^jP^u&wi(A3kV{+cNqck9@?v`8 zWMQmkTVt6f6O5)6Lfm9`QFkOr%_q5~1b{{^Vu@)H0FwN(7}=kMn@DR#jdhW{$|%7@ zb3MiWL#q(gttbI9$~guIGO|S({*jf%P?aX;j!NvCrf^bm=0c2V0>6wWjDyUJ03~^z z5iW2)L00zHB!8y~x6DrfPX*~@#mnC^2LT6}n*g+ci7+*EXkt8LZZ@+q2(I#_ zBw!aMD2m)9hgQI_iD`P08tCCOVV(uaku&$#0G(T1MqbF-Eznsw+ISLZ3((awmUU%O zu_?T{^YH(RNBGSiWgH#^UxD)__voy;7v4IH&SI4 zrALzBRn3)(9kM{gbEj+2B*IIIx!B4igh8XV1hkxjZ^pkz01%I#!l^Jh0TBZRrrw;1 zx)=4?ylIqbVO?^f2pN(j){`J|F0vPk4(NHz?ov-r_EQEVe`)amBMU78AZM0q6tL4h z$=8%uJm+d>m244HJVC)<@`^3c zHM__5#io%!xQpd1&w5o5b#<7)v*VI-!`xuaGI&Pc@%VL1Ge)7Ehwjm(pktg(PZsl_F)mY?y0 zB~-<*Ng*swWfbQ=x}YR*Bsk|XC}BhbJSLDfMV*t2aKKU=9CDRf&_w`Uu32(>9Tur% zF3uDyUK3%RAjj|x00k< zH8gN}MX_k%0u1ao2=$Ch6Tn$@lhiJNg}lV3-GbX6lmC8+jD!naVxXcjUFbXl<) zvZy$!Xr~g!;^*fXOt{yTCiXE2SJv8aQWnWLG9<7=7>{27KtYHLUf_s}UCJkUyex4f zK-ZMx03Cta$+Ok6OfAT$lvm1R!2)4&RsIm2B((oUJbX`VPc<4Q5tAYTQk%|UMgj(A zGmOd9$`iiBB2p43C?%5Q2r@2ZppGhye0)Y0Y;9~B=Y0&e&&A3Us3TE3(%RMT{G|1v+(BkQGm<=I)O8pxU_$x zi~_1I8*meum>PkC*F2vCbgq1?jn0D6rjUU9M7uzjpU=E-n!&_0)7%+mW~0lH|7A+f zJHtzg>D*LJXz*@(#(e~z%JLG_R90g9?fRf0N&pfRY_@P@2#0O)F9Exn8YZqgHYxX_ zCH_p*M=u$QBY_~cav=POK$uT^qRNq8lPt$6Yr9-Y`%UN@1=ib+9dDr}X4H^kUH0m@=! zntFSjFThovveum4JzfS^aof#GV2u)RSDaD}HJ!xKt{sEd6?2@ml5(`ewe%hO>4<47 zQB+u{<;pxmcG)u?ZK^{xW54Qasx@}3N(hoH0HZR>3K^GLs8I)= z!MpAmuVcK)vIPsVP4})0!tFptX9`lb7U)u&&bVNpY(r^=nPvhbT-#iWo2Mo%wZ%Dz z`2v)-*~A z(6xo!@bGnk(i-Qlr*>>mMMv{@I_guV+F%Y zoH)9^1Bb*53zxB;S-Aq;fc`tMpraVRQ`Ih4zy?Ju(4{%Hm|eaL4Z3#3f^(kkxDRbFwgS@F14YmPRNoZFI?LTA*7!(4}`H4L?pb z_NRNi-QP-J>y^Oz0bPJ?3F<1#SPjVu7@qsx5Ah4mIS22){iasA*nRh1S^Uzp`dgjy zY8hRNM=OQVrTfrXpi5U`cK=_G1X>StuEnUOGop@tT_rO@+a13D(P#08#*V?Gk4|9D zqaVBOybD-hqgg10<2QBqhgDshJ!}@}em!f?Ug;*3Kb^0=y+Z0>P?v@ z1-ch!&5nQj<>$EKvMbn3*^eCs=$K_>!bDulzRAUOUt87Hs;-;xiMOZvRZ1XDpc5yA zOT`#xo0-WnO)NfJRJ6hX-G`rj7GJ_zZ6}^^g8Dpn-gzf&C1@bdc*k3Y=AefQmmUs!H?w>|b`(yXarrU)m;rIsGEjIQ~!W`XY4 zv-a$jZbAvP2I$D>B!9xqF>n~309|g56bz}u3b&N{xflK|{=>K5vPAc*to$3qPCM;F zpp&FoQ`gM`-8$~A1-ec6@Y_@UdL+*Htb_7+njXlef8FejYp4 zu>#NvIwKV59E@R|vrHXik3IIllEsCluWL%RjntUBB+yl@Ql=?U*CV%zANNFDG z>@G_BT5?*~HmH0Vv=N|n*hI(r^k=S?&lag;>5pQMuoBq%Bp_+1sT@F>Gp!Ej%=+Pf zPK6&)T?_KrY@H?83SCC$4}{|R?K^16rEcB2p+}GIsAQuQk1#rishJ8KZg`0lieyDM z7PhOd5ukI?)e*Fm4{8HwxX9dwLz8!SeGax@v-PbMdqrEc1f(uvOzTvdEylWMErHH~ zIA;$jjw?IH$>`X`inSXHS5S2ouWLc4?(vQt3fRlF2)VgA7{sLhl|>~^DRH;n)0%+J z)T2u#^&35Ty?}0gz-o)Wt#QDIfzCdfRsvQ68$kkT09~pwmH@f} z0-dO?E~E2>a^jh6cPBU&3WgNu%1bL9pleC59Ar(Xt`_Jv!Y#H#S_xPQvAklepy-&>6v&3oA5^j;gCaJC{Jm2G49bQNU)!{rdK2GviwA6t#LC)G|8z6j}*b z32bButP|)A=}LHA^%aAM#V091moLq8B3M!A09{6bM>{P503ZNKL_t(2E1oHx?Ah0~ zfE{-F_aA`T%6bcQ8~Hxk;jIL$1lA&fRso$8mgck~P@r=eT}z+?y{=-Li;&Uf7ZhN? z4g*kM9X0{OtMw{aMrR*TD*-EkjVyun1vKQk#BoPc50qjP~S5{)1)Cr{fQHq?pN)y;%xDRJv{{k1=X_WG>^Ho64b z0Ca9O6RBR3lkGz^Qg6JjQg}0vldl;RYX-X11PhxPvC(g^J%N>gm4H)>WNo>wCTlBE zcvD}QvQ1L+B$Szfa6>iAG&$`KGnwluovoV#UBn45X;IQc%OF`sXP-kW0V{!xEP+-r zy5^Y?NuY}(7>J{`wvq)(Qh-kC%QZ$K$jzhbI)DMgbqqCD)wM;gD*;ieM8;!LmTB^# zilv>JSiQulf~WZEwbe^D@)KZ(w-WdjNMIG9GflbLwP)eYMkx&^W#qJ8iTPEz>0UYJu)o@DSQdv=Z3Z5@;3B zwXCq@ri3*II&F6de`eet$h1JWv2UCm-%7wrp!E`H1JJq5P8nT*jIPQsIs*L+8HC8Lv%Okwuq=9CJsjIQh>V$BSjwZH1W(@U}DM07( z`q&zgd0|aaq^S6#}!uEnOp@W5fa#LLUeQCVKeGzky-_U(sQ)UnS=4?qiaZM#wSC@TRgfwoJa zl|Ywjn^PN|K$bWoGULqf5=O^3I$?A}ciSUgTU(39#weC8U4or<8iq)OHag4b+J2Ml zkyZj$0_!J%G=MH)n-kZnoRy{2L)jTL!BvvcIY76=PQ&62R9ow+DgmDtJ$v?|GeTMz z@(|1D*6(K7qpbw21llHnbb-$Cow`7mo#SPUZ%PVSzo9$EJxuLdwq!AK7-q8LF1ups z(iIwe@;?E(wu!|aWhG!Gu$4(5PO288KALD?O_j8HW`uMB(_}4GSshP;GP;~RW=zED zw8F63==u&G8jo{;2;=AqmoCNLBSxaQxJ)ztH#N{D+m>x*cg$Xnm4KB%`Vt@@q(-=z z6tMP;gpPyI8%-KEw$a$m#7^S|jcwbuolI<}vF)6Be|O!p&YJ)C%%1)1FFzQ-mAhco z$=^Tc)cP-+!o&S!$S4g@UEl8nxYEhuAF;9TtoVlhkUg0eZYV{ar6_hzt}3CaHYC@i z7UprRT7X347e18#X)P))_h{4p-!(MhDqh0o3C5y2>*Rgt(yfndsXC^Y zRsn#WFo=;}T5D2)!J@e&F5A>3Wp%{H2c#FP^~gf{&y!dN=GG>+-0nO2NmWH71f@;z|Ig^|98foWqRf& zBqV>Qd?Dn=tcsd}0}VnQ;Yb$mP*N7A^v3bz4AFUqNhz>rLbP^Xb^mRC!CS;4)&wj_-MlKcxJt zYHMp#i7Lhaq}!G+y8N2cQwSPun?!;E700 zTjlQlBQ`czplXOQfns?E4}(1BLXvRu(0kmdwnTDB&sh~1N?r@8jH`D7rbQHb8t<{J zXrXE6J^e7Z^ws~Fi8`LdSnLtSvrzlWb!`ARx&f6Bw}CA>Cv!^k$|yeDg(V>$8pF)& z?tM_Mk}B#4EGA>-E4Y(~q#ZlezuCC`?^(`LALZvt$ z1^qLbp;elemL0dS+bf1XVnZ#$7R^8v96eerYJ<;=G)LJEE1UF}*D3;shU9<^taH#M zmTc2ovV{-xsr!eULVaHDa`mm#x~jBeqFs4c55-`xV8jyIs(FYp3`TBt=LPRwWaqWa zWfr)XNafV@pnNpyv;A{e_^{ValBojALY-%LX4XO z*$^jFro{*t_p;2Mr)mpCLp!uwYSmdE{vrUu6;5)oR-8>cCx*XXW}2oP5Y4n%#1jzp z8+MKyG7_ft2Zysd~V)l=!tASyzAVSzN#sKiE@ zORDT7gP&YU-73?mmjAy9X)}bz(zD>2g8&CwNJc#(z}K^Tm4^qSsU8}4NOk+ZD=I6u zTBTCmZ?PF@>fuwl$M8s!QJ<6>jiX}RL) zpgEQk9#4i6v(IPAME?Y0I4j4j2O9%YuYpyN(a5lw)H`+qO5tW)qwFiG}kR3_>Zkn%sDg zO&oy>QdFTnG~Z`O2p(GaE@|-ZO()4k>XN8|iJH5U^?dWckYpR@Z-m3y??PB{z#9H& zKoN$XNLq~uQsWVpl!ZTN{$mc!mYDd@#5~{a@?1$vsT&^D>QQ3l>Fh6mW*1dU^4h~? zv_G-|sW}c>jc_a zMqWx}YrbI4kg#*L89n-Hxf?Ia0<@WKH|kp17tDu|OSJ1qF8BmF%HuQWhECB+-`Byl z9+)lNqb9!8nvqF}1S%z!X`E6lBHno!D~Mv!1TkF{N6^gl^R@zjK8iY+SZV*hDBX-D zkhD!iQ(GAIPO(5R&?o#ZXXYnfdpwK8ukMBy4<0NbIUQpj5f}f*7LqH*14j&ogbtgy zLbknlj#m6tQB=E^gp0<#LT~AFi3+Px^uG}?wCNS7h&0)m_WO=1STF7HinLVg`NJ#8 z`!bxS5Cz>FhIs<8LFEZJw**V11VDoEL3!eJ0i5Jgal(>oz9iH_Ohg(=YG2=KeszD8 zl!x;OCp(EV6|z&vrlQZiD$U>wgN<;4*`tL3-QJJ5oy|(?F4H(qw{568ojfloUs;?k zJGa9*YRMI7iPCQ<=nLH)-W;iOtq<+c7=9D`yWlSz=9kEH;BZRAGF}P%b@l#joW%|-Zn$S=OScLw&WW|);-WNP*-o7_(tJKZS(D)mHvRFKJ+ALBqJzQx6Whv-`pl5INH+PZa#Cp3yKz`aFt{K1R?k%_P3O+OV;>Tz5OV{}s zGYsQyz*Iv&d^NAt-Ex&DWLr29} ziuvUQ&SQ@F4e;Nr@6I*rOmGDop&Gt5_1`b)MQc@0_B3c!=U0^>s}Ogg6Z<@o4#|6c zOV)Lv6rkKPdRu-S-EU(LQ8*Aa52mtdVZ=7|ZD=Z8&73j|U?Q|!`HyPX1i6TCR3dZm zZ?<>U!bxwCcjp?ieplJbQY}ZO@jrQkQM74>QEkgkk`5ejYbT)nGDv7X3;8ckV1*oI!2%0k zI0aX*NK`s+rjFtN`@^x1Zk)9`DHi{l0C6GP{DpF?%9E{3;$~9U7a{YVm-Q-W^LQ(x za7mG1aD7hiQy%8`?(7nBnn(j{no+@+m#{goChhglpobP3c)rIUOXscwxqi#U#9i+I zv(6j3Rkt&_LrJg5x9-^GDh=!j?`vEFr)}7d&#Ck$s}JT8*aI~0cbZDup1e95iI^@- zOoXlh^!Op?Du`7KOPhjbjoEJ5s`8Xq3S0Pa!nBA zYyOtfuJm;7m2y2FPot|9g1I0Z>g%bdTq5duzw-^)mP{ zJ2fc(8$Tpet4GVufljUPjqPvVk<yIk=vzztLyW2yyYi6iyQj| zamI-~VNU$S8W;ai)KU+Fd($1=N`q+t9ep@I8ltb!kU7vJvv!!N-(hgje76_oVxar7 z3&FHeZ6af2;C9a3=&aM&ajMmPXMQeP0<*K-r9#>J)8})j0=J&0S|YM*4;gXI*3HB$ z7*y30itypex#@A~13X(j=M(5NU0{e?UpOb4L}UzdVN+G5aZg z>h8jkA%kuUYHJr2R_nN!iy$E&fT1iN4R)df$`8Ga?vK_-X;SoWz9uD`&>{&w^0!Pl z^#vNHU&1WtAtMCv_Dt5jht9g+`XJj;1gXH85B8ZIr9gV4P=~sLK|6dm!U=e?9ILPP zgF7dHl)eZ=y`WY$GkdBuZw1Bo+oN)w0o16h=*kTao@4>{&}|(gbBg_%WM$<%1&!ifrNt7c z!d92lvY%kTD^F7Vh8V(iEE{>zO1Nf3^&LCl^CnC*=FE)b+$k(1dmVmx0&7POL4FN; z4z(&!`k+2jK69HKr*-Bo_D;A)`;%GKxT%2vOy56lq^?D?D1=-^1jdO8dyh@#YSEvl z<51e1jU`|Gi^ zdFEq;q4$A<;T10ng%wBZxm-IO9D zI7!8C)>fWj?t$@YY3taPU=^Yx?dP`@uwn#P_KBhY(t^F`)}3kgn=+cKK+1T{WOZEXykJF!eMBtpyU2J*Bz~|JLyML^u*rwk3sy z(_SFZ{|jMkc{n2TgnJ8(nqm0uJ_ zM6(ppPcJlZ{YD8PHOqZZlb+q{lbXY-e5Y}guvpC=& z;vdnV;vIT9xrus8&}fV*Q!FvA@c0_s)v@2`d^Ck*{J5M4CD<2g`>q9uqi4Oa9_6BQ z-tu3$Hc3GuOkFNuid$UuGO(pQTZs#*?W<8`E;URo`iGoas8~*ucff3H*Kp5tTK##lXfenzdm=fnC3n@g4K{-6$ z3iMt=P7#Pdl}dD*xK9Hz*Eg2%%sNF_3UV`can15AUfBHC@8?pYsCXr$e59k}HODj@ z)SO6CadL_xm`FDel>dSjV31c10n2X6gD^=n$;UXJ_v3g!H8;0&)YaP^oPxDx~Ww z0PeHI0chmb{!FE|w9!0_YDgele<=rgs5UArno(grQ(AQz5UTwP;%2z8~AT^3t!P?ktU+frmuEB_j`#A3IUU*bcvYwzy0qU047$@ zw|_TI1230c1o~pLlYi{bIC%0~w`#_Y+d};) z9glb2cjN9Vta{~?zFR<)^U@T@wV42`J?qodwH`o5 zCcle>mQrb6PVG$PN)aK!rO*gbr$$8&gxe4$xe zG6x-WGM|ecfe?HlRgnh4i(RGt!lIY+2URM4`7oDOI);v&K^C3ad3Ju1g-5kv;2)a4Z{{9VrFT*L?CDU!(o#~tbT+kFi22Dz}=#Zd* z?&c-1U{I-lQyo?Jfwdl-&))F#`^fqLz;M@4ueS%cYnpTCIKo`vVT|&Bf0C4#&9Hu> z@9r>3s?lVs2NkjDMaIL@)Npdl=O=lTOA(n{n4E~nmsOP}Dq*|>8Q>Vz`Llswag)Tw z&?36jN8(xo0Ab1CoK_>!qPBw8P0f21pRBJYMDR1t#q{eEsc~ia4IqmPZ*3m!sm07pjb@ zp1$_;44-`OC;3+sdo(ZX@Sw)LaQ__eW?^zCoit1aK z(SFW+G1UrtmKByJ5vQa7cPR6{qaYgoBPDAZEO&XmXogNejNV}^VBWhmt}v!dIY~xOMKY)sr^2)7-S; zv=L2y6&rVJKpR1mLzILS0X|{CMcABt4EOv(`F93KU@Akoev1Wdd0@dn$*2^R33#7E z^*1>=CXrqPEP52C6nTFV=O1t4BBf6AbihXhxev)VkWZGSLkrbI6+Tw54?p=+|DDlx zV&66%rif)FP`~tJlP?wzf=`jboytZoFsYCvAWQxizIkI%f6K}3D*j24qm!G)Z!qv1 zL}c{8aXO_fJEy*Um9lW9PV)uyk*44%wkA4A>YL?3ThW(Hw(h;D@67rtN>cC1w$N}u zqc@#CJKgV|3XO2lHN28KHd9B!63u)$=!4T=R89c$UowBN96CerRhk-+NdAzBM(3MK=FS_daDbgFVg%X)mrgoDQlRkSM z1b?Z2xk@w!-1eaNYB~R>30DFcL>X>7X6iWFvrPeV$!8`Bl)!qthe?@il#sxddXLQ^eN4nPVO(r%5a_ zqFPBo6*WYeH7(j$F!zd#SVgouEXp-12=+xbrLnW+UY`3pc|}Dbjh7-+i~~^8Wra#7 z_xf4s1X_Yqa>@}AelT!v09%QYi}fXBSx11LnhRz(tP|$l+@-Wxwhw*4uXIT5uRuXX zJw4G#meqr@9m+uZvfq~N{T9n5Rc%|ipDk?qDl$3cl}gp6$u0+2lb=esdVQ!fR1!-Q zpJ^=3_+dZQ^t3{!XtT6*bSe#INAdt@JsI(PlEd=nN<+~GYHFNiHxcDpJwK1Yy>lk# z(2%Jko)jaZEoX!La+$a9IAinnj3n_O*uxT$KH-FJ7CUK^5HqY~hLukP&kDBW`xvP~ z#-g_Cn40U-8wz|%2!5YCa91qD?}PURDsJ?I1*{}dgQf|-3m71ndOpuLsVx_h*0Laj zh|21t_q8HKKe1uv{XJxD_g|rDWPZ6?4mub6Wx0JrMceqVSq;5^-lHfeYJMtbiqLL^ zzsQfv{N+-Rqe#KTmO*U2$S%Q95($#@$Gz8`6y)wIuVn$aBonjw{y`=^bru7(5+f61)4R>R5M(^Snd{$&Bh$5oT$ zfIi)rfB6TyHX*zu#KooWi0x{j*jWw6725>us6|)DT@QTv39t`=aqx(1U@oA{v$19Gp*+F-iVwV$^5BlR3D#xVQ=usV}5Mr)FPkL1$P1FdiIbA_0fgpjdf zLWE>`x^SGAs3>i$?DE3~PyzN?8l*^3=8HJ7@~7NrNA6Mc


8;lMR(C3OiYA(rG%8Odl)3#q zV`UznPEQ}By9Pt(plLf3o_&<^pDx}{kD|h2k)zOhw$OF zvcU74p`tiuXCdVK!Zftu3D@=f0Yg&!^#qr1ZxZFfrgcNTBl5hkQ3CK1Y@ZQPMq9{3 zr40p?|CtVrSbt^47#=zrNn*F*{`mF!B#o|%7CBryAz5SlwYtj=EJ{}s{q=`x{{lp2 z9?Q5T6Jtd$!nDHh(RsP?Is;Nk*&?O z-L~@T-u7(osOC*i{G}{sh9bF_Z=26%q+{uCEnx<6i5mXapB^wy%eyt+&&(tC$$yqM zy^+v|Vupi4slbbId{VA*r21yDguyqbrYu79 zzX@T565qf?dhL@=WZknxfAs%&T5I(i6nu;vhH>uh>mJ?;_z?BlX%YP7;Z%%ldrW`{JH5`=IzJhBl zLo^3}CeA^bDq&gKT}0jDle9PhmWi1dQv6G@ijJD(uB|aq^pZLVH8*QXJK=AxkDwdV zo~q_*hgOl=tcsOCvI%enPJ&uf76~=;1##dBW_UNOGbAs%YJ2myTmOQFZUpWeRa_$D zaJD=@)VC-vNU&!qahIZGhRf6z3Wc!m)r+jI`)aQHO*qW;)RIsh-R`gu6L{CuZu7ev z%&bY*3%we5yDeq%yjpnNLZkT%j@f+wtFP%wh@kk(yto$8x2ZrNdJZhnOa9{4x4c%> za})B#&hN@mmhh41ySFp)kBjfJ-ezoOejMMNi>CW~~{3Wx!A&^>q93N#x$Ksgu?fFgAhgeT~Hds%#^&HlY_?2`IMaeS>SxYAj za5u1;=+1%MEs)AMdS#+BjWhR0GtTE+Z5TPXuWS}cbTWVqOzti8Z?ilNy3kVNu#(8ApER^g|73ZDm>XKq|B9Rvf3`_HE_<8RSJZ^YewmgX5DqhQAJoY*A3jwoSEAcaUQ2cZmJ&@8qWO%HA zaJ>a%*VlkMceOAp{q|V(x!Xe8sfaQrLVj*F4l6*|v;wE`ejlUxGFtC$aN~T$rMfFn zW?GN{A!)kbLqj)MMS<^YDyBEb(T1<%s!z-72Wv@8@G=ICzeFy0p279hR{Pb?5{;$I zdR$IeK~oEi$2{|Dm|%x2?5NNTkXKvc|Gc%(7t!O?A#FM>Y0Y2Z;2;8mU?cgSX(I8tiL(8kzhZk5eYnoJ&}tVZIm1)`D z;Vtg%a4ChI%i)XP!MEL_4JyP?qg89jy`0z|xY-j#G1m%RK@~*(4pYQ2!qk4e7jM&F zHE}1!7@A@1-VQA=rrsgzi@t}w!UE@hd8TD;Gna_CiA8l2>U;m=ecM|%uD$Ka;yIj{ zI|>kt+md+Zd3X}qP7y*JkaJrfDBAkLCKzawIe-+gv~2d%#!_Z(3aq@`!$Yo~OiN*q zNCbMW$#V!_JsO}rZhfwT0lL5~FDSb(GST%G6Z@3lqD?ltd?7rE&+*L(Jf5yo1VuBxGH)}RXGaiq zw(YeZn$4-;^`^`$v0$CsylKw+BWJde7nh14e7jq7W_}a$T7tk)OLb9TJI)k7>ePUO z{4rQHqOGo1Oa5@P=5pS7R@Pn~21SpBVUsf}7E4A(Hv0oLc2frLnBDR=+pGJLD&7#S z=%{X_W5a;$b_{Gx(sgSm==KD)W@!DO{SVt}rOOR#>q($|rbFQ)YK_hH_ zqIT?VEKH71?0d&0C4iq32oWfl-ef%QOpZFt5%0|0p-H>;`tV&AUroVSe=Rgyt~aYw z7EWq%pc9M)_LT&hTS?$<`rJc#^X)iics{5-b*EnnH_*o@O-k>xLH$eA*c`Z_?Cu$L z`b(pJAVZg#?!i+VZlX%ct*@<)W!G_xg>ktVQMxDe13ABm)e%|X`ie1S%$i9@-G8GM zQPG)MC+2H%Tch2$iy7S}SAIogRDP4e`G9K6A#Xyh(_4e`Wh4Sq0&|?LG-5R3hMoYpZ zrWaHw+1gYVQ@n^2SG0bvzm6i9gfj=o|1gPC+i@zrS2XDPi$-JPC*|RjT7(_&GQVuN z7Fwdcw3d8!yhX3k@&YTOQRX|kos#F8XK~Y zN{g|!JcnSA<1&v<28CGkmA@_Hn29xVpsB~OvyB~*@r=H&LZ7#%Y+`C2%-B?Df#`G<}RkcuvaT2o)EC&-Ap{kM5u7(|NGLuy5d16ihr z4>D1FWU4(0M10Y zQv0~9$1wD9=*qgPFFjw9eY+O&*pgOvzo;x%+v4v$?a%X&9sN;5=PQ10AFo6X@kSn* zlV57Ltm27NYQUgEboNWnu1Z2zvWPZu)aj%y)54r-ld~r9TEFJFJ^;CH zMnI~I!wRPwsTso7-b5D~2rp=gH{sP!Q5`jCMpN#s98;9D%E>Y&C)x>d^cPzEg7+(= z*&0HJkcHrHhyAfSVI>#{iiiEHwb3QlNiU=l)G*#?6(@A3@~=o`exBAknXHw4(VY;> zFKfentv~zlG$h{InP35M7F%L%(nvguR-$)`#|OftMyZ#n@j7(n8YXQnvH)^#-T&pM z;bC%)$;8Z+0(@m{^vu)??kO{&8nKg7_k*!0ft z?TsU7R4P+K)D?T+OWUmU1R2%Ed)b7kzdJ#-)idE-TrAW#*CEK@YTCJSB{AzAU>`N` zs7K5;Ce!09{;krqPzf5>JNmP6Hyxg|r`Kk9Y5%D~8hVHVllvu3M-z5m5`$uX6e zYa1R3XJalr>MCv6`Q-dOmtLFPW>l`1#`aC^`((b-*D*j^knAZt|6u-wYK#wAff-NM z93KUMz`)^0!;jqT2pd#5f>pG?F53N))Qd_SOSM-fRvOIEY%0pLmc6`XkuebtbJcxc)}&uViBjOa^41@m;9v z)frUP1;s}sN01GZEcMwnbfdvOn*26B7$8+oms%_&h~n@D9GC(dtB;68^VD8s2sTUi zAMpyp%DpoHMXw17R+Z3HR#sal1$u>IEDI}VqS7TKhL}mW=2gOeUXAeYFg2w^<-*j? zO+9RJrfB~AWbJRy6x*E=lV=!6EeGP^Qn~3Bkz&M60MgSUv)f!tvXQ|nY9V`USC$iP zDFlwP&+Fn=1)d!WJ&Q!h_6;vrc0+f=cf8DhcGxvxrgEuY5=)Zmwg=%f`_ZiYrJc4Q zGpxAgOe8o(3su=j|5+fA3{*78{ru`l6%OL!{---zq|U+j8zsW2@uG8PzawDWh;^~k zYU&?^gaUcSyO2niFg5d+%gHaU2I;B6!A=1?HU`wuXPB`)Kd2#?ALIUpqQp?Z$R{ve z|I=WjC&vN99VtO8iKvoUSJ;oN!vxl;>2@adFav>?eg>DY%u({JV(PhnH?-uvKQJpN zdN`3ip82&!@{_vd8yIKityKPumqv1x_~5Cfvo~db39|YP4^$_Lva*97uwj&~A~!~j zs;#PQ^0VJ+4z3nbD#?FyVY+qm-D;4?EFQ+dBGxy+%S=_Boa8^s8Xxf|M|;wv-2Muj z4I1a`UR*kDV_WU{;NXKQ}HCL}-B?G<%0|5hb zXk!2pKw9L`OjfKh*jDidQOdWrw?cxBs5vQl@gKeihqe|{hbU9^wz}Y%T3Z<)UO)I1`fBv2>i-TFo4sZuf-I3!zk%NMoh$$C zYap>XY-uPOgQPsUZ?8`*Yp27l(tB@`nL$u4=Oa_7aAFkOl7QE?^b@RjZV+y_AHldF z3%)4>*4;sVUIt=Niw~`uMpy{*CzR#@`wE}RselcIUw_CoMG+R4Kjb* zp9x1$`o15p)x&>EC*eM|+Q~*vs2&z1u5Kj;=x&A-f#PF*Dbuo&%E>;mX zIl*;&nnB(gp>2+(2n%5E#@xszDjXzL0N4f{?-|X-)H8gDJCwzcN2ogtktp?Ork>gr zcAg1Xx3oD9dBlvD2!0hp!#TVf8)A+G1DD~lP9V_sJ8hlHmoS28w~&$Ak-EsNR9@p3orx_=2v;$x~7l8N-^_3 zMx3rAJLR2|)jL4bCMvHC*^hp^6--2Jl+%dcw^o)aQ0~ob1Hs&iEDC!@S? zL4Zp>G*Ye49#xVPT;t#582Jy978+{&uPC*(;S8^2%mfhR0h8H(;4>`#X2F}AqK^7* zFsxVv-Ndn-|HhL`%cJ8?@A>;{YSK=x7EX#{0%*CV!lgvA7K>t5RAb4>$!A1t7%6PV z!LcHy6FSro-w-a?)RGS z9QhJnJ)7rWueD8svE>H?!jFjpHMlU~qT*&AV1w*v`O$OGS8z7DXP(OF zGy{j?IA%5qv|KsDnXR8U?@S=v5y*wHwOql;)c`IC9C|(B9LHde>?)9eNN_-F?s&`Q zqR7#f@<=}hb(#J=txz+bRqbZ$K=bT(?Af_Odr?XRhfFX$D81I`o{R0GC^s%PhKB8> zZMnZ8Pdg=mzI3C|ut({);|VJv>vMmsW_OlN*C?an3Dcolfd*QhG(>wD+lk{mE+uVh z@MSGU+q^Y*}#o1cnnU0alwC4PtI~pOvpsEc zZ&@FhvHzM0jdf&SZsgUa$>kbu4nKC(EV;d~Bw3d`$zcj*=4VM!cMQ}QXG`O<3M$)3 zhZRG{kSva;j}CG_gNmIFVgD`9gFk$47hvD*P2BF3LE|u}B12Kx`WrH50gmFItcX?M z?^hKSau8+i-b-CGD;W%mL)hE(mxq`=UASvwu2VwZj;t!RO{2^8hQ}X9$YrMRG#Vbr|?{Zb%f=6s?^fC=KJcd_A;J* z)An!@S(2iRgWy{F2F=*PkishP10tBizU*KpO0+p^^Ld9qGc~VDDeAmB2}8xXex$GU z7-zy5<{rc?CJfC-whw>(WCciOc@kB1Zga>I+@cN1XZ^17f!WzvV=`}kb$sG&hvXKz zg3a>T*RRlj5j2wG`Ww3f?-=E^+nD^Thre1VIy~DqB0q*7safBRkf{BXt3VlD(0&=y zhCmSg_Fz&}LP7$mzb3@x+zR^g=h@}$`n&_(+xNpi8I)uJLZOf*lW?Hmrc~8h;dMDq zZ(v&e4{HVgs*=BWgy7bADam0@Ba2^*?HVx`vBftZO~{ z)d^hRTHSAUhOh4go0b1W{P2ulK z?(UVS5O3qsrEbK74DD%7ibOR^9;4i6jr|Eb=1XSFa)VMHKu>_N7tIO?56qj9n&-?$ zvU9q?3#<_d83uB5>I{B6FgsFtb2W|ZX|kF5f^IqV{dpZ$ zR<8lGT1W_xf7wl><8~&Y^t8PxovF}r zAoXSz%U7M!1wr>ixuexC+*EOA){`jizl$vh5(2Mez8}6!w{IzVA`M&V?MPd4pQ_|N z=E$9p&Ic3y-|ptZN3B#OQawi2a*6QgG-eP$1e{9SLK}0qsfysbc4ZUyjLa@#HTZp~cdMP6Af@Sl%ODKe6UV@^%C@q!~cR5>G9^u@Z^ZAyha>wTGft$D^=*e zoz1)(wl_8SO#Lv}P@8{s-|_43Yy|?5&l5Dm%MQKlhZSkWxRC`Bzmk4Zs1mb~lw36| za~B76b=MmfQOl!U#nZDnkNf+40_1~NrSRqV&g*99&k(J8bHfWa=hWe3b)lMP_^m(l z3We=k+DmEZDM=BrjAQ}eoJW~Ml#jv2w#`dXyi~>{Os02Bxhd@SPWU(@nH;L? zdTTXX0Efph(o=!U{+?FFre8c)xT=r&ZUzQ1^r61LlqONrq-CXr1xViYkbgc=!_z4{ z{;QDzHOq;53)!>gv8C+=?ZpgL*$VA-4QKbUQfEcyg5{wryWfH;+Yx5!RJaQY)r{VT zFIn5t{4YA8$tZcFmT3EWe9y1<0zFdrj&%@;eUV=KANrQ+&Dba@weZ!a-_4ll`Ouy> zObOf=k{I*?)yjQ)lO{v*C~GQ<<|~Rc8GFbpu@wxwBp)JvZiD{%dmi;E0vdi;`jePb zSc&Vw{e^>UV}tl_v7A+-#0B~{159+Pn#$xiJXm5sf#kE7Ky`e|_B<(tq$!FA~bF)v5EIqMhF ziIGmv`<@)*n3<-iEg$Z0r;AlfcE$m=oAL_0UfXUXRuK5hret2?6b>anh2sbV0cER! zO0EStCZ+)5-E{1^#tIfmAMpFQwC)+BGoer#je>HzT8LmK`?>1JBBCnQ+M*^)=dY|k zr5-9Ef3r|9mwcxiX=oL3r-u1L#rntiIaPI2=2MLoE?yI$J{ZSHyAZd(1#e{vV2@6* zQWmGQ@RSb!gDlZX6#qBg_?b6dPeVfrWdI4A^XD_Ftz1>6?4*2?)kXQ?Ki)#;zbSJ8 z_Fcl*1F-?dM%&a;XSlIF*}{N@A1(gg#rOiC)LAfuX92QJ*OWPy@#IA3LOq9`kyh=t zUhZ`9B%j?!+1lP!)GGa89a`Wxmu^@>CLu80f=6lf_m4ZW#flrZBHVFqF!*rBC?etu zx+d+8frF@(77|o=R&ijVPK6IBzp#T|{?98E`8mK3kfAWY0z_b6z|YG2c==MYSWb4g zTuOwCGc%gKT^l94^EW{lhxvvs$qu?a8F#(JQW39W0tuux1DvGP8ntUY$qV`1BE0D9 z6t}vN%JpG?ImUWFs@}*|f=l$%TSc-GQX_|e;ES?^2hrlBT0wHl)Dr;AGAe44#A>q^ z^cdkfjO5|@6yyI;f#7(w>e1Dj6p@0N}GtYp4ax?&+a;vbHV%f>F2zgC+@$h0vWOJnFOm8_>i8749kC|A8ObThKkb?@A#A;JjlE zCM!QXT`@Uyt1BPbZ6R_fggCK&DPTzR1k?8$lju`aYEFf6d8WOsy76YrXUMO~o zjbppW8Uk^8{fo*09T-!8q^!GHX=I>;Yik*k(0Jv`SngDGtY_aRVjhe7Yk5w@F7Mwr z&FwqoH`22a=4+k9riD$Ez;DdopFL~X`{pC}203Lvdd1NNuwj>g8!$&zE!RwpC&|EA zWp!+xk)0;TPN5uhItu)w0cVMvyi8oVnwu|~kQ52ZomA5?FIh0Evh}BULEJAnwp@#{ zawB}U&nagXqigZ;o|bZD&kG$k`__XZGFO}3Fn3}LA`B4wJ(no=YyM2AE3{+*zZC%Y z-FK%NG;cJ&A(+ayppGI+XJ#aei^{)b)+dCwntY)}%`vuIc-dNfk{U;eitgfpU}x0} z{wnWdv73)>P?x5mvoh)=n$Ov@tq0is_96J!3?jA6>-+TY_mwW?n?Z8>-nH-r2tnP( zRUT+}ZU$Ix1Hu(>0%w;t__Y4vf?|ErL`PPxKg#5t7n0O$-5m&zeH4e|;*xm4e$uu! zfXPG+QezP>2*mtWSRsa@asw^}rAqft^U*cQK}sGoR11(Sgou(%!o>xLuScb9a|RKf zQO9^AMdlByYQg8XZX}1LE7T3MZ(+xH842WGpRE+z?{DLHkxE+v;U40&WFmhpDi?#G zggWqb7ZyM#ZeV`wkzL0gsoM`nRC=NeiXn;^UX|3Pnz9@wun~Pg6A#8#M$HR692J5E zJ&r2MKK-G&h_(J#u_Ri2Sl?hJ_e%s|uaJB$3_2O5L9YzI(?BmX1O#vCt?SRNlrs29 zb4JrsRm%Vn4ZX}fiNyCz&NhS}ig3>1#Ji{1dkWIhr>g{glu`m{YcmZ}unf%V+SNq^ zwdbP8`q1RAxKZ-{RhpVVm*Hex|B7w>f&)6*q^Rh$vf*U{^LJPZ%YUa;UrRYwxG#sy zDSRTl5X{8f{X7m?)D4)l0CufvY&2`m{*=8k{B^rPpq72gAn=uqBT9Y!bL4z#TG`H2Gb-B-%eQP4KUnCHo>O+PQ9baDyLB-ibOT?-yaiDv; zQ6DBX-yl{qS)^8#(m;}uft7V;PQUTHoFWQGqLvNj6yWUd9ojcgtVUHqgXZM46JW#M z1&|4eULg7V_>B@Tha`FYw5p)Woi*zKg z233eCW+vn}z&#B}eTWW$qtVqt3R%!Fo-9%otR8nc6l8o^$rM8GcX0ZVC&GD%v;kCO z4K9`^LOUgA8wHp8XPlHV9TnD=>~vq$)zvGyJ__bB1g@6J-QQ}dVNj?Ltmf~F z_=$nZHGecnU&X_yRCme-s)auDEKWE2SD^X0Z5^+N)ndJ!w{rqU)|$}E;Ubm9G-zka zk|buu>a9ZK!@HChQ1X4+GR{6IS*X&oN)sF`$sA-Up`m(Z1iXxr)Xv+)b6;r;1yvlnYIgO6XXX zYX|Os$@^O=cNV|rK_RAd*h+)K)of+gbsqQ@T`oHHIkl}LVE6x601_=s zcVE|f$$Yf7M8H;+z0*V2W)wHe5ol-YjdM>&cCCj>ZmhpigJl_G1-K#F7EFl4$w8&K zmH|b|7=QfaA4+K?alPu0@xK#n#r+=u;y@k0(rN34P3#&LgCw>&EG-~lvaMldG|Rs@ z>4Zwjxr>I2pNU#2pH(~9gO%rz(ymy3uRte?n~(z#=%kp}&e=2bH@SDJx>}3|5(v>O zJSj!90a5IJ0lvBfQ0HA2UEI{s2EFl6;&sTIT+;i664Z&pymrVnx|Zn@?7PIC+d(;J zdH#IdB0=3N?i`N1!+ORfQzDdS+hCYg?&*`#ppG}`z$co8@l)&G^t-u<1!dJ0c;w+n z@YIt}p<#mt)aV$XXxy0oIYaR#O|AqO?Xpr|Sqi8b#$cl@3Rp?t!VWHIHI|=suEUTf zUDW6tRi*3#w##UAx8L`;yMT>miU@Q9J}%nCYgbyPYpuB%pmT6WGSEWOEnR!ljrjb- zkCC69OC+yE3Ry{9CB>>4mPf~HiCB!bCL~LVk%tPzu4PryC4R516g_VSf?738$z)#w zqHb0(f3=-YVigK&(+e#vlk+FnGMub zZA$-Llp`)=pc6%B2fKO+a47+BJwTV45<`>Osm*Z|=>Gn|lkVKT`2;#@V&Z!vTkz@? zKQrn|ThdE99v>am(ioblIgDrwyZmzYh?oo~Yh_F6;}ct2Dj}qjq0)|?3NqU$W+Dwd zFV#>%enoAo8jfJ+unT~-@SkkPC{oIHAp;P?0JpICDFcA`|IFJ!)oLj>ivV3v3Wfkq z1n7biSp?|3^a7*8N#2D)Pz5^v>XIM&R875Ck74>D4P?Hg#;teMus+oo%< z8l4SvHmOK`K-W-#uAG$-R(i=@4?XGLv}G%D3-U>L*--i9mVCbe@r?E4hD@*OxGBa;W}yEj0H{8HGyt1w~MV$*(+N9cmJc$xR;ZyM1bzFzI_-G zapC)!->?BJdoft{w;pTx=p59hL}SZ!72%p2uEpmck4Mh-U9`O!3Z|c{gzO~UJG4UU zlv0udtVM2d9Ll5kBMeIf&&FBJ(K;g*g*#RuJGY3Kn~n-`P%;V;M*@HtYJ$<#$;jVZ zhD;@IoRiGCFy$XiaQLTLkEW5?ci5f)bZmKjCT6 z2My5Cx9_C?EteMJ$QNJ46Zb!a zUPOTIYHy$uyE!@*IqVhj(!U?aknT}@Yd+m#H8s;pu3W$t4E_F=u}X1 z+g+Z9k^g!Wt*U-SyBr{&6W{6zD`# zGAHqvu9ilZ=PbfizCafphn=oOJp1&MC|Efie?4&kM!qx(3x8e@S4wl7`q!a2;e_Lm zyKy=8{I&$|e>fRU+x5nYC!K^A%^M*-(}C2~biDNRC-`OMZ}?)?%XsPSkr?~QGEAE~ z9h+Bvp+3HHG)C7pZ7_TCyO=m-B1#>JaL8Ljk`O}*FGYYZsFfQ5x}d}r0lIyGPHR{R zNEIwlIQ;P5xbn)Y=+^HgD3cSV6NmAz{u z>dG!)8g+f*=@;DDIoVkB{dZ`b7SE1d=~%R8GooXe9-=0p573E^ju*@qpz|?ZD`>iA zmnY)6fBzHfzMqB`DN$(KvN7&?>}h0mJsOt`zZRP}tV6ff8Q8gQIYy72fD%WVB6%gx zmy{8Y_um_b$?wd@?5SVllc~?(m9cN&{fTQZe*7d9ZJLR@Z~r@O;4ZlCii>gPvF&lw zZMUE-HiN+n(R+>EAN&>px}anh0lJ{XbtnOygTYMo(J>hD0)2FKnXWd_ne`fl4jnpR z=+N`fwrx8aXVF-)WGO!W=pz*FEucG%rmN+T+q7ki=Goe&t0*?!*#in$RCv-;32tOJ z-?_ueqkH(DBitoS7+$$}A*x-4Y+aa&HJfu0#fYglWOy_>Rr8{;*Y?p#)RiX_wX%|- zAF zguz^1^XMeACzylOGB}9m%&`Ye=xS>jVahw4`>c6%LEa~Ocg>d26fmF7UwVlI1-*do z#d?6w_R$Goi9fEC_4cjXw8m9eUdce@UVKQLD58HYkwpkXBPZPgW4W)dkFGADOHJhi zTFDYHRz|2sclTq@XqMZ?wX0dpCmzIavp>cU-_F7VPyQRfuH8s}DwcuKhd1e~>9}*)Q0!T?1Ru?ujWe&g5lzxE zv31R_DCHeF_uTXG)<0jt+T}EDKYkYfd1DmD&D=^8bsD}JeGg_%p9Lp@?zH3k5itAW z+W)&A`ITviOGs6=g5n?-M=y=R z&kGjdoexIf)v@p5ttmfY{HGH!XWSE*{Mi&ZW4mI=@kiq3VS{k(jW?h)rXe+9p@Rw4 zM`vzYNE;6eEA%g55ei~hLk{QKk%Zyk?)afu8DV$)^CzpCCIWQOZp7fHsOjkLd0CO5ISzTVp5-^KO{q$j1 zxx04ijE(D7W66R!Xvw6$y?eQO0|2^fu4B~olkwPNYjo5CcN76*-h2&fW`BSWKllI* zJM_a#FTaE>zplXbx88vceFotAOU}jCmGklbgvq$@np<(qz@zy_AZyB&_2|&4BgVco z4nKbP1IAB$3$KlP7h}IzgwG~?idkcy!MJxnKt)nV3_W8oZom9A48QtHmM!>3lllrvByUAwqei{dgkec75eNnfq%z|AYcTPX3D{*P471-v z8MEO!wrh>(yiJUdY(iqg*8ICY+|EM$vSvLT$(d-?B$WlXh1j+u2MwCELlYvs6iDRf zu&Z{Z8wI;1|D_0>y0*ZMygkS%ja46>WA8fbU=>D8MmwY>CZKDRM6707ObMgCWh5hr zXI8e<*yxOzp2`-F8}Q_B*H$$r4* zY|Z;X$ou!0vsO+9bCa!ojgQVxkk1NK>eI)6#4OimB2GFvS98sl;w9$MJ$~sZz3DyY%zur#_e zh756c?>7*;c5KIr?`JD<-u40+xj)mjS{;J4u#r#hsT=<928&>25L@Q;Gt+)$|@Y@AXb zNbuK|t{oDJ4|Nk?N$uBJU}w@fOVB6OON*!v)SP);?ZX@7roJlSPz)*9WiA$~yu-fF z0vcZ3_kx_dp-1zcx2olWY?LKYSbjvJmD+@vC151oD5`K_#E6l2^sal+ASG2*f#%Hx z=%n|vsZrOeCVgpIIseuQRQe|kpjcX!d@lW@xufvQdDpt?sWEJU*2`SH<9} z@zL=qzwEE4yBl{s3_G@NN8YBjNKQ(^>g-ZP#q1l5kgH1_s|>5CvxKYyI&Njz3FBUe zD(|qLu`uQ=c;*-Agsg)K3~7m^m$PV{6;!Qa6O)-g@;+xpUI+rdCZ$N zBOe_VAzjrpZ>wZp78RYn%B7Kln5|0CtKe$nZG)Iir0Gr59pXg}w1v*tCWdKC&I0hZ^IFYl?J`1Z>{>pb{no(^*W4H*AQrC+Yq7zK~)}N2!dkCmjsz$gv{a zkXEjMZ5TjjUS|jB+MzBy)#$q*uS2Q@W7GuOmWvyc{4E+ZXoOX} zidj zhxDf>X)vKl6Vd3TMS~Hbn65(hD?nznHv?8mu&cY6=JmFZP)bh)=#+3tV1JWG-K@D*m3N|&PB_4) zS$3-Uo8d zer}RSd=!O~K|nEElSswnVuvp_cE#>Jd+_|okr+LCG@M0cB;TNip12W9+-Sk94ov_S zAFWh1h;kwkRexd?+_1vBf;nQgDi}(096t=ho^kQqsd3ckO7l!}9P!beck1!(fv28> zP3t#MqnnEivVs1(yUZ`piGiqyE6)niO9tPFz7hvYBSy;xhjWPxeLUF9iJgd~F zr@vX^4IbzMeujO@5!|)^sl}`Vzkh`NdQPJu4ptMzDP+|Y({fSv7XXoJ*`JtsApL)`QM4C5&b>)IhksO^U>WNk1EaMx>xCGYSw zy3k4t&BGyt6rpYhY|eEI*aka^qE@l=GM3TSJ$rUz4cUi&{9!(_X);P#Xi7{H+a*{b zN#aGEL}e?T^q*1t+~r zx!=N>w+)TRGQ4zqT|j4LNec)OqUB;v1|gE!D?+vswihd=K9x}MUEb~oYR+MruC^~}|4-z; zdxV<$6#g?72DsNZQJ&Vp0Hmb3Ig87fSrV&hW@4pCBb-9ks>ZSTm3o0h1C($hsn$ia zQ}FUyH~v;YEF0;FtIX+iYKz1qW`ykTMWdJUr_l}R-P3*g#n)oq+;0$FybCFen#|i$ z$dosq^b*fVj2{P&o%nmi%QIUF~EJxy)3 z7v!8}_uoU_2WrmRm9O488%xaI$DXSu_R2GE9V-oTOw(zz$zyLDt^RJO#T&j~aPuCT z4RndIRd5yOG8kd`=q?!4$KB<~6S3;2<%nl(m^c-`-|8gL)sDK_C#z=uix&86xYwso zgW8Y;Hj_?mP*DAC{-R0Jq1pi^nq%#5R%$*Au8HetGeILDAKvzPbV;-FbBQ80ir#q=hYwTgz zHc|REB&1>SHdY)+a~z*dpV2yiPSISS1FEJ3x|*^PnTju9*?-F!%|g>@*$<)k%T!|w zkd4F~tewR|S+;V0i6TpT#-JB|*f~xr)X<`N;a)qOye_dlP!Ueg>qgoEV-K0dmj)(|flYSdK+ABKNW!{?!HVfyI8@k@p^=P-SAVWpRZBkyqT zb2xL>_2F%pdOa`-MD@Fv>Pz9T`{^^+i(+Y~-SEmAW9rlF>IS-_+q+9xfLlT*q060s zj3zCyIak^Wis|YDPzne&9k&F^#O4- znscpZEXcWQ1we>%mZvLJd53$S!4-+mYa5Z-Jh7PRVTn|RNT6@TJI!#H*2jabqp#(1UjC?2k7c9ee*xM z`YR*60KpE_*bk4|1|p*nK_gVt)%MYay5?oLp$31#HP^$@Zsvy&TL1tc07*naRP?ZEyWsPaCLbFOy<%6b*cs}7my(g^O^v|swhA~cD?w$&(PAvXT_u66IL2#%OwgjZ$6 z5e?(ru~jb8le48@N>gOD>VVx$&{LWwufDF{^VQlj(d4{>5#|ML0H| zs;?@Xtc{g2FteXIGPp%-dOh0LsV!i0D?|$sU!HXhU2vwdwo-!;HJfkJbG@ma3idVE z)97A+>p}gQbuM~2r|qYd#<+IiM&1UNTm|TYdzP$8s(N1UV_ff-TI(9pg^h5$h!=w% zgpa%fKi;}-h{HDSs8Av>*0UFmybsiz_b1%_n6EK>>j$H}ds8q81}R;WKI04*ibiK? zcLIL3)@!YMDnQ2)up$$6)jHY!jZ)pw)acv}s)?)csW1;bZ#>wbqSHs1BgBos;|C19PofXfK+c;IFP}77#PwVvk4_ zA~xL?3L7O9u;yyMH{feF^SkH8WrMNxf$t21dg%)&)){-UfKC=ywbTgO8ZJROiW;a+ zjOKkPQAC+qI6foWM;9z;*zdt$o_^mO9ilJR3{Unv~?+2q&G*TwtHk)*|hw zuE~@Z5Hh7CvMCL-k^_jGBw>-?^JPF{7;@Lik2#wN&%r;0<4$bmD)qq_CI#O@X63`O zqPjLApqlNX7|B?@rX2V>;88r8|12e485YP&N&|te+VWYlk+%ov1OOGG1HY>=^DS6J7s{qhcK5Gvvj z`2B@Ku8hgUvd*=sjwgk!ws9frDIu~xl?R%dHbQqLFILu_ntS?1dvA~)H_33TG4v-9W#=%PrTCu<@xAps4WHOKl2G6733Mc>>To++9cfV!}$ z;UrO`D|3oQCsA56BVik@FOp~Dzo4#Ya~=|M56~G<@&|N+>S{50)K+ozSkVimd+`A} zJ(EJ5!l|6IQbX1jxgS4^A<(#K`zucHb0wItK+*_&2sd%a%ETh=GLWo zsECZlDfxdUx^0n^)*S2=))$cGi^m$5@r9Z~K&8p?1v-x_K@ip44{x0>2uKe3F^wD; zk-+Z(bmsXs?uI1N2D?gKa0$D(bO);k=;CO)mK0}Ww@uSDkfy7fkrOG*71gj=>$do% zw2HN?Tv$E}*aX`QbO@*zjgJ02FK2pvIk$Q`Yaxq?XlFhf9I9=DNG z%tU@J1;O#ceqaSeEZFckxV^P2Ih_E30Ag$`Q(puu2yDWjP)UJBlORy|@09k16f*|I zP4Tb_SbThCr_6OpZo9m-=^@GR%`Xp=H? z;p3tX8cTqduI4g+HO*CMd*58JF~FlAY*HCqEO+m^$;8S zYKfe*cVr+%eihTas=5+w+O$y?*X1i$kZgfW&%B;PWNJOpc7IQ$*fAI$*&-77!zA!~ z0G<7i8lX!?AqjqCEwfcE9K{sD0b@zorYlKN6zH~-Ib9XM0gW@cO4wZ5t%lNU61WR0HxXBx!_URZ<8bNt9Fl_+|%- z)rkw7z{b@X&2p^TK&KP{Ja;Uz8a2Y{rw_sS@t;5voi*rSF(0eOsdKh2tlErWEUsV^ zo@0+c7S4ig`1Pj+s)a_yq{4*;IIK@!WF$K<_q%WO4H82wucAWyU zzalAWnT}Z+2H7JuFASN81YSh$VU%(GVap?O;y;T7eovqiB_%a26$J%_0Rdf7LN#1v z`PAsPDd$~O|Ar0SqLm3yN%M-fox0%r&1CYV8Rh%JuOH}CeNyI7IbS^K*5$v_Feg4f z2^E~Ri-8NSNOk~=R+BA?w8|Bg90Y$fB5DCs99yPI6P{{TX9%r})5#td@+JA^g^qdX z(6jK$t8dV!m`1>*&0b!jKq%Tvd~5k5^`e*SP5}4G!gLrC?l}!C8=k+Vt5lL=(EKPASnO)9u^f{t0yOMXr7bGWa z)v7H{IsSNpRwcfg_9eFO-ic~zm#%U*1|4+*`W@DrzR;Z*{njgpO-iGWg1eT9Rw+@f zDw^axGfi9vO>Y6$a=wH_t|DvN$SQmB)g@4_a$f|H`6?k$nkGb%Eh2$GLIVF)p!1r5 zU_lyzPBgl$TSTLyB6eb%w(iaZw>bK|iinn_^ilmvZYj49P1F80I_cNQg%%%=^M{^` z8B@Q&<-;$-PWE9OGxh_5N*1oTd^lQWHNh{xEW?a%rXhQ8DZ2L@Ky4}>?OQh1h;IeY z@xwRMvFewfknf5`Y;r1YyY_OLoNSHgj>nf@PsYH3UGTw{xWBQo?bL8upmm$Jc;|z!aLiH1p{!sXRgEKb|M-MuIjXo-|j?KO2%=!v@3QBR@iGxvDScq=zTcc%% zw)l9`MEtyJErwk-9NoKgA_6%v_2c*P+poV8tkQ7IpkvXmZ*NqVSD?706f?h`Lv?yL z4(r_m&09CZ*E8p%L&w9=rfE7lwaLPFb7qm2ehbRT#kG5H3GTb|L9AQz6S{P4h?ib{ ziT9~7jv90#`jO-`g&Os4CPaQSYYsMU*o5xA+M{*5#+Wf@0or!xiLCTEbZC`@W-YpC zTgkbzr(o&Q`IJ=(nV{*XloFfB7LmXoB!T}H(0Qlo8DDfYG6;02DnV{uo{8OEd-6%{ zyqp5EgYKbrRDs5=T4NKNNNX^{M*#I}bmC8oVVkoOJp8}|+Sm0ZcItK{BvN}z!Wn0u zgRNYQE0-+6;k`Q{H9Z02r_Ms(BTvC8#~guq-+Y0kD}TiiM;wVGdv?TU6DDAXD-q{h zbSYxX^Dt+|bmSGfaMjhrNlTxNx8MF4nGFtCX5%$$=VQgPg*fT>)2J;uFz1`^G3dA> z(7SIJOrJ3s8#nF1-jYQ8^_=rilDitKmdzudJbhfT>Bx3QVesG+(Y|p!UVY~+WUvJ8 ziVH46$>xptdiFddG-{6i{d;1|<~8_s;Ytkt%Q-l@TNljzY7$m$+JGWzO{ej*)buob zF?kY_7!AGShGBT@AOApHMsx8jqS{qVEiVyA^y`oGlq^h~Fb#eB^+1!>4KaP@0(9(f zI9g;SB0a7g2{BbzymT2h=9Hpy&;IDrzAIZ%u1BB4nqkzamsx_$-k7n;h~srcL+jVC zAKJI-gb&7kgbrPsp=aOrm@suV+IQ}Y6OQVI6+eE1pMKhcPF;>b_s)$dn=g?^0F7a9 zTvtU+TF@W#tVd3J2ut9<0d&TBRC^O%%k?b-wrCQV0H>w)Ndcu&0l#=n>*5|6|C9D|;{dScGp>FCv`4H{;q;M+Ni zQBJ_zQ{*7n{uQp=<@j~!H`M6puZqpUE@v`MIqd{=YU0LgZ;e4li=Mdf%pv&ohZzK% z)rd)LffG+2NPlECKKy(ZPC0cbx-?0_^oj3bXHhX?$o3*VGrs+1CYCQ>Nwc>Y5B&W$ zELye_3x3_s3~ka~no@KH4E2WeJL}qvQ{os?9QD5Qk!{->{Jt66{kwuz3@=lTLjNE+i)tco=mpvC>O| z(&(6oOQ0)oC*ko&?!na2|DmR}9~FB00Cn9hx8b?R?q+%gP#%@Q1{|5Vc<|A9;q~{> zdEgK_>MuZ?GoL;@MoTN>u`_1}KL30S+I4Eo`Ice9{8b!R1`4ZE=-WFNrF#}*&9ZO7 z24>WbGO@#%#I}tmp<|P3y#D4GG-%NW=L|UkE9QKPwd;4HJhm}`t{+m9qcHia#W?=t zzago72bTRn&B#dyY2%g{cELGl!|q0vG`qz&*f=d7?|u9^rv2~>PCs=pE;#uZf~p(2 ztSSC&@fyq+Hx<1G^hA2QOicUwJG5`p6YZKbz@qQI#_4Ab#&28JWA$&FasCCDVa(f~ zpjY?KICfw=jC^?{5>nbTsB#L1oH&T4Ycl8I#F8JEV#=gx1b3RQof;A7zD37w{pco$ z$I|(;nKbEQv^opJE*gTxY=E?xrn8HVyGj!giU8dox7H8oGapExYm``yCTSEu{q{vg z1iHQr)2OJK60igY2(p^D!lv@L;59m)lWuY3IHU3S!}nm!h=u?zWeWAKpU7+EZ4qmG3r zD5^kQT3ecV0|-KenEAygNQ%t_U4$r!X@Dd8_C%lV>3HXzkLaW8jq^`G89&XVX}V?$ zN;$@XNA+M6UI%8)`xVEXa5k#)*J0I?nJ6Yu#HTmGnSVJ2Urd~cJvn*EFUUjF4EE4q zprq@7!8mKknHcfF9Vn|RMcY2TaYCO#nDyb8=-Rs-8g*(ypqr0&t-7IE0|$Ph?=Zb- zJM`>x6w=b7v@!Ucxyz{yABKK?n&Pd|uM;58Vo>E=JaEsgNM(9Y!-kD;+R0~P%H%KT z?`(y3-J0T)$urTl$5CkCJQd3q%%(=?;)^T+H{Wm(W_7V_14=oGdp7F-_F67)jwhR(xplQ zP*EAHNrk7LHi+g~ITrr7fI!DSl1L-qw!n4QpNom(KfqdoSb23k(i=3z_19mHQ6nBg zIem<=8SSWjo`@|=zr&IhYv4$3hU1Puf&t8YOq%{9PXFtrhQ}N*|BN3kxhhwSTHA!!Z&&EtZ?<0F6qg4u~ zeepfIQ~PSyI1$Sh&Bm7eY78BAHM)0bgtteJ!jA2wIIJgqk*6GnC!cx(y$7C!UcHXM z_z&M=u`G8{$NwtXl4(aiAZ-#FP_ zX%*rpF|yf6G&*OZrP0-P^Jz}6Rorgp(YdT5TS@MfG=^dZG9I}19(?}(+teg>F~OC& zY7Bq~0l?K)UBgIVI(0e@F zfkT1h;oGib6I?fnoK;xxz3G)y$9=*~#Q=bJ~Tt~kk$^La}vV$ny}BDD$)V~Vh2=N^ugRrj(6t znaRSWSW>qqG+-`R5*bfeMkcv%G(X+Z%vW<3XgOL@w4^~W0!$WyDpwg>t0iiZa%pK1 zQzWF#S{#3;{VAlGytX9bCMBjLEj5EVXoYa{CzaXMOaZ13k2!lp;yf2WrxU+Xqn5~F zYDPMpe@RIWq^BpNh+`|EuP=t@%3xlav%C^jj80aRGmnqgs3etDIavXcsXHc91}q6i zg;Yy06{NJ3Q6AR)rcm1~-@6knnl?j0g(MbBr3JMo<^hTp8|z476o_LY%~E+4gBfut z92XObIlq|b3b^8uc}~jBT?|SLB`*HXh;2+YTgN3Opn%chvJw|EQWF_uVSb+E$1!&> zma?dd>}}MO6}EIx`)wUj){KIC|*Of&WJYmM=$k5_xyi| zMkkdK${OU|m!?L>3UtgZZ4&Q9syiQh88DQDQvzKE@kc5ipgkFzF?n@eDHDGQQhqn9 zIzoU>oZ&8(OWDP>RwbyZQ`H((Q%xaFYur*yFNB?1dthqQjwjdm&pQgVGM zqmpU_i}y?05K3W#u$f31n#@B>yQF^9I=9rUdJ0d>9F%y9iOO1*B|_CfnQ}2RR#Us7 z6iiauprnFwPD9sZ7+qu|LxYS#WWo=4M@-d;@Xtw&D{rR5ny{93X|lD{x_FIK-*~Eq zTXL1}rINx>V~G|X*&-77-;{tp{2srGmT#t|P??~=nYpbFRxfFf*g7=2X2}&utjxo% zJvm4vPI-V%eRS05nzd<%H6^lGYWH=ug<+?dRPrJzDa^&Pflk(-Rd*^trvPIOLV!Vg zxXLp2Dt0%d1g5;oFr{!&3sNgHhk*qFfoT2PFb?l)WtkK~Ew$1hqHS*1-qY-5&R2tc z3W0irVuXy);;NHxWPDna%VQk;zSjAzGB!QdLQ~VI*WU;P4)yG2tezh!pEX7N28iuB z*Nn?;F$xN0mcpa`H;Y_lOlIl^Bx_3Rbo7R1%R0tL#Ucanus%nC&O8i}?SD@K{(;U} z#tI~2kAoFHReth7jper}&yHbnt4NX&%maVbgMNMTtKvzX` zH#wQ+lQ3!skXeh}^It#gwKXODMyxU@U8{m|x{zhz$+d+t*%rNgS_5?ZrU#n+3Dx&&3)-xwL z!n%fzLFUWyEos7Ij;qlFJ!ABlldajBieq~`!BN!*E7r*V5ebAW0mTHP(b@PSwXS8X z;TE8aVIZ!OuBEzy5iK(uNR2MRhE2@Bm7L%{4Kv(QlrLK%*+H7NZi5Z<3supF|3A8>EO7C-e>t3Rb*roGof_ZIrH!@52t5bHivKESPYv`kDj@I#s zz{NBaJo3`^6khn9jCB89zr5#TYjhT1sg~&^5Se z(KRE`t#?s}ssreBSy|?%9G6VmBw43YJ|yg3Nld^N1c z78qL=vbQ;28%u3L1%BNC#TeNwB7xsU0-`@vF<9oHe_23HsF_MjNeaTvyRU@r#f<4% zg;vz)8YH-|dGl7)0ts~KG+l{!{GZY2+F=8eeggqI@hu5+hLUkwi%ZnRV7Hp0TwAO2 zS%Ci3Ek73tN2BvMW^awo@AuZUgY4*5>r@sQQ?MfQ>~C|-)lgm4oXXbtlz=$oD?|>A zNZ5dtn*s3)~|JY!9@GAf{6#w&_O)vKbfJzRa$H0L|AP{cObFpp6t@_Ca@y*Un+E>s!516oBp8tvRf3(n zvo)h6s#|)JJCTXP)jU}&Rm8>wx~(h$+n+{fH_5R(^2p!9J?A2qTj*O|Ko{5aI8KQ4oHi$FvaH~c=h=UI7 zu4cwEaLH{6-oYrx2k$62uyc-FJtBd?Bw)IL+2OW8twi;p(Nt}o0w(pBD*c?0;YKx! zbGS2DJs{dpBDv$5GZ?X@M3Q$+!f;)YR~y{4sf-0~o|c7y&BSLf%gJZ5>bQw98U$!Y zfG#AdM-GZe;D1vB_JR&h0Z@&OK-V%Y8tF0R*u7`BvT#RbGIhR;s$n8AB_llrZAjCU zTWLzMG>Yd1b#h5N;;M>NA)-`n|RHptof%D!e@T~`P>kM4lo zX-S5*JD!C$h)5<}XmX1j^*=5F&Bl;_f>3r8uLS6-(2{&~4dN?EQMQvMV??q0Z@t+) zYvwF6Tx@4~g%jO+^&OgUvF;SO;oOV#=d@x4P=kL-%wYsGDw;poI|=jRYeO)V+&b z_m7r!`>mT_lSN!;+TkP1y3m0K4S$ub}v-P@dd$fs* zBAIlKl9ElybJH$XQX^~9Q6xXbZg(hvC*5{pS>P^FSbv*lIJ2%+8sns;rkSKaiKb{$ znitTqxZSkT;(2^>=j{BoFt8zTsqJUaTG?o-?wSLvLN2ePYYM>ZJ*=!ZE9KA&qW72L z=KVc?mpr6f(Wi#ONY<{_XW7|a(s)@)zG!aovo`E6aqV^2qFuXos_7LM zlU0OlAEr6A&&H2LixX3K!GZ;tGG&UYmD-9-t1G-rSMt?qCwi-YsXvYGzk7der>HUJ z?M;U)cr3z|;Hd>d(%w+RD%P%ApvCWM@|+Gc`v3qS07*naRPE#-sc*IYk~Oa^JNL~_ zT=t!(Z(?&eIehW_S)MRqJVuWm&4SwmOEu;2mnEj*vHw5Uof|7RKwO>}LHO&Te~Zz4=7W)ucAy-UZ8mRyrNZH-Sg zViSUoY!M0k86=>+nuW|m7?0$C`&+;Eq;(++6A=Te(Jqv*hjVsLHndW8?do6MQf85t zpRc_^znS|TMv>GqC6Pd% zOhQ|fl$wH)k|KpL?bS%O6I;!-=W3H7Ng=&}CcS}HCb3b!44lco29OoB$mnbWx&0S64WN!%F6^&Q0PR1w&Xv<18jRh1bhSzRaxb2Y>40-nok;zr;eUaTqB z7m@!&BydnmK0}~RAur}FG zh%rT5EH9U4)-h;JpnLl1r?F_!B8(g{f&`lJ7B2ux&=OPG$fAghC?%|&EkhFf8aUjN zylf_1>J<9#Xo4__C89?N$oN6)SeRUniXfL*>O zGvqO8#zAx3$$if|?+mo>#1@7xvoo5Gj)2GJLdL!|vNm96ZQ`rc zTVZQ%0)D)vMS~w)Z0ewX{v(4wBqYG6+l!B`t|{9%y$*vt9(c?3FYbWAk9k9}MP zHht%e<8DBLu>Jp(U)egXoZhG zaUV8qT8HtUOk(>$b|hn}i_7K2O}E^P#!WJD&)xSj6(-$iU7}&CRwibn{41R+sF8^u zkw7J5a0^g!0tT`TVA#|zZUIWQ(4`1Y%u*gpZBaB)0XjLhvWh(~uDk*5vJ&v_E6=gt zLMbZPt5%qeC59YJs$r2*1=$i@4ql&)Xhh44sftE9<>6poisSnCK?|DX?@(rF&=405 zJrCVFw#A6&UnT{;M7img{1AaMm6Y zYa}wsKY;{71UiKt0lI3q3JG*1-jp>T)v8%jx3C3^=GD4&Yjo<=1#35KQSD2C&eE2Y zfYJgwL5jV5y@1Zr=xm_#C?N#CQqp}7=-(etJ@u3l;7J;bsG>jq_#;M)9Lb;uX}uL| zB$OP-J_t@c@Zhajzi|zwOr4JEa;egAXmt0IVHcrO=MH%E(MQbt)PZkFR3i3kQ#n;;}LO zyPS=6&N}Z3G9s|w!>41goy~TNsOdSP$`D5rJBF0$(KO9VNuyrLh;%F|+vUx|%aYnW z5su7Oc;ud2uzB%Jyf=0n8}4LrKBZuvNyMgS(FCT+N;8)H495KBPYlN%Lk+Nk69-6LsD#NHY zEt=B`qd8UPVz;h1H7)ZCoT^RK0G(7ySeBDzR^>BFJ9gZA^qH05@yDN}DO;f6AxXrk z>8a|+lU*BSHNzXPzotE7=gysrNs}hwrkieJv*+a)Nubj%+7{47(@#~*2G@7pdlUAu z>GMY)eol=}OiBSdC(Xx;*~YI6f$#w|Y1{-4KKLMxI_4-%Sy{LCH~f>qk;OkR!*#db zjw>#`8VS7qf-gVBYcKp0y9@b8QX^`YRn#2IaQp3d;NpufqDjrtx7Zjw^4O#Jbjs)G zbM!!rdgX1z6TEhpeLvsV1^DZPuKA zxRGewxG|Oy*q(j%Sq2*7@z^7eVA--|_!mvrjEoFDv1n#C(A`a-%gfu1k3SOA)dsp! zTsG_y^yu9Scl`bD7;@SWoWX8c*I$3Vn#{3GYpJA(dp3K*9zEztj2`t4Hmv*=FaPWB zSg~Xdrp#hDu=p(G?JdxN$nzZYd4C&<{QMlmO5lYiVe8Iq@bHuO;WMV!O#EOHuDS3G zoN!bpJo3zIIOW`H(YA2|e*QX!4!<1k#5A7cKd52@o)1PphgV*H6O-od#Jl4@$BJ3+ z z^f;6HU>xt!61$}eX8Y)J*&wztwX1I3yW!PWUd4Owy@vz>W`pz$1|67w(&uoRsf+RA zi!W*bqFeXwc=Aau=p{?=%rnoRL4yXKRF3E}t%MF$2qg{t26@6%`Y#t{#2{vXWhxJ!KZEimNd6f-~51 z5qN{msdEb}Xd0I?y`>o*f8q)Dd|g6c;XG{Km4|HwRT#jwhPPfd1f%}_JUfxaqDz;y zIC=2Vc>AL-(6U`WcJ^w3uP1$ktFO5V%UAu31)@hBCz@T;W-YNZo2e$^ zw9@5MH_*NJ-WUR81$OV+Ly$NUEn2i7i2Vc8r%%UoBc4UCp1o*NZonvNBocf`Bmj2n z)(!XEa}WEiF2;YDUXse_uAQ?M9UZSJD0kd{3kr)1@Y#FgsnIcF$#f3!Xrzt+KZZrKqJJ@P11HL5Z7oA0n_=?Yvr^fVmbzZ0B$o%CZRproP@D^@MXxKAb$ zJPV2L@r>A3QvgMH<<(cwl-lNkB|qWa2~%)n{~oyert^`_fJw3=6XlhqtP#m==b7JKc)0Am%`Ty@v_i@oNuDz>Nyh^~MEB48k* z(n!O=Fx@lvf6nuM-@SJVW(LIA`TXI`-227*y>Z?r&x0Pl_d|!)Eii53J2>NvlhC7A zcjVJkkdTy&{B_GQXXb2t`qMhxc+)Mo;)Fw3?lA_XammOormH)#4Q?EM36_5K5#F8f zDT-q)-x{S+i!DDl6Navlg<6yjKZ%@s1jH zJo@$TkLMqF3>#LhMhR2O;u2#pg6dxk?TEMj;{n7m93ygbDZ^39(6e8EoOjW=_-xvT zXpx%0y0MWM`_e0v-e};DV!m!g%z`P&l;^p@zp@0QC(kS!B zGrOckx4wAlUr*qH8!p6>1@ni#}C22dt~C# zXWzxpKVN}_@(pL4>nh^*mdu& zc=*n9asFSyJ~ zM<1i};XPAN6#`L2sj*kDUIda`&FdeR1f~M`H5K8CXRcm^SRlALspqWot@s!FlIn zpIuvG!UWnEBU|9-RhzkquR*xpZ9)_UY3DgZlTv!;id#p1TdkaYr41 z`QOgK@|7EK)G@~)IVv9y-v3W5S{1-kPyQQA=1!o;_5~Ctb;HixJL29^=i@H~y2?rc z9aCJIS4MLW{GocF;XoJu+AfT)iutX?ENXMv*&8juKVrmn*9OWNrXeN~*~iwbUCSUl zN;3jFFMZ0vT-`Q0QKyf*?m7!kkrP5OZoO%*n=o-Ao_qFrQ%#l6D$lNB({}FO9e2?_ zHk%TifKD-W=5;muALG&J0nIAOr&l!-7oB=K(h^zXj5Qn;{`TwxqmiALg9-x6fIfY3 z#<9l}JQRqi8kqU;iAQO=Y#(;xOaE7Ope93Ge5W<;aXqAk(s6>1``EyK~{3%uT9vF7!iIjj# z@x_dp^lYy|mu{K(Wa=CaMmhQp+z$uSi`ynC4#gat=U#dis~FW?N~!Sdv(Cq=`LnU~ z=Y_~(h)mmd85s7bgYo>cFEF^8yA{Jwnvdol_+#}z(|}Gu!r(DRJuv)>ajRMD7|tdy z!-$dBFnWVAU<9fk=Ko+q+{WwzXQK<|iH3kqQO6<`$<58df5wclT-7I@c*4A`vcW|Y zAb0J(8-Z*#og0Z<*cEgnl-PHw0jpTO3(mg)AAkHYMvoqiB%UK2bvc}r9%(1cQ>qp3 zQ%*Zt45h0If=(GJpi*L!k-+l{D;eL$;Mz*2p^-2OG09Y8d98U@*f>2?&K1ki7H4WwTQCAYw z6Zs6q;gzsGm2vb|moXHFv2$r8K}dwcY68frTCZB%h%Jp}{97E^S`wKJQBF0t5Q(g- zk<1|FQf7XXGDa^kHkD#_DsoE;`5EnsY+DqwK4MatDG?pZ_9Rg9EJYGc3Q7E|IEI^2 zpahYk#9~@lfR2bp5tN3lhGr=+lxOnC0*ODu+c=-8#(qY7RDMlMv6#YS8ygyz*37pGd#~og5>=p-6&&emc54 zrb}5uyOR4I+lK`P3bh1Q!47h5ab`+o9q16ixK9O=7Q|Kq5%@*(T)lTh6{R~KRZ&IOQ<3_c{oxQgN1Ue_mcz~h8(vAR6 zQkeijDyr1oQYQ5ayu^SaU=Y2EKXWvJe4rEV7|jyTOQh}0UPRd!USSxW>jRxwRBTb3 zL?}5-=V|#97G&=;FjD@l<8+JkNZH&uew+P4k+>wVa&1$QYy&za(EGMW13E_yJSdIJPHF^yRMTm;qQ2(h*}c`=Ij*E1@mx7yzhNR2?i+ROV*)4grzai6x=g0`-8- z*Temq^+uoUfs7Hbm>Q{{yiUO8+w(kj4@q6N_U#;9=`VecT*_-psE{*3%KmUNjXPVS zsI}5S%Rfw7cEmEOE7QV zJYxXv#42DLvk7#;fMNeqNo*p+6QXFvI=ZGRQzt!O5a9UV2Uk?}@-04S`K_&Mq52A> zZl$I>0#N<#Xetd)^tngHWQZN)m)F%kP&t6`rA!kxJt@`T#{*OcEe;e-Ei}c`dvDnd z^{u2?{WKe;`%d?={l1^=a%!w1J+Su)Ij!i2R9*u?w<%bPox0V2k>u(9 z?XAQrukn%Ao=_(^&p?`V|HJ zjmQ<@a_mX|VU-B0b&QkGrR6D>wy0<`*00}SYdhkii!KTXZdhi7 zx8Oa%Pzsn8t+1csLz9%{8eu4Ggl}O-kVmcIL*-;A2WZC7>jA4&LKTCl4s=>@CM%gp zBnd1ODedfXrm{*Ps?`M7O?G>8yy*83&^gL@4|b)N3gis%^gPF8>PwLBEXN;bM4k4% z&Swj(?u7$0$4IP`Mat8v}HqM)~GB2XrCn*r3S7GpIOa7*N9d38#`5IsIYp`zK!M{Df zu?(*6Xu1NqEroSL>iv#iAdRVyu{9fAL+0m>f5YY{x|uD(g+2gcju1sLuyq2Rz)jd7 z`@Pl_RlqcsIt-NM;%8=h$(InZ=We?Pa+wb%eOEM1?K*VYBGAdhY75hfS1SY}(sD_Y zncOZ~bhC$r9W@l$I!Dk!QSHMA5(7IA41Bwo&emdgF>yWw)_E7Zdv(aF{j9p-q6W~J z?aVjy)d6${Y=S#?HwSzg0Qa}wH9pXVS4*=6X?8{jAq@t)FlSF=mq*hf(oMo&OC3}= zPeYFy)3MfN4Vj+}eOB{%+ph;S!m$K6)uPE~#+LHZg+1Y$oUgiutYywtuDa7LQzH-` zS=&&!UZgPrFI=UVof#n5?i9SYz;6ZQtD=kcHrOXsiYxdw7*ik z81MXoA&CS{`wN^}Y@*&5KHw_>Y)P{?aCg5j%J3%Jt==Cs?{Wa`PFV~4T+Acq*9?Z)l^rvlmvTG7Go%jfoq{}JMdL`&t>9izKWZ3K<6?e z6!0u(gp-vNQ)l<^@GSVZ7x`7`d3X&_8oKJkUaZT^18^ngrh zvVkk>%GMWw&e)YKZq}uS`JULC^G_jIsnNv<6s@xwIB{Yx>9tGGKm~1WIue-h-i}_^ z)g;~w@zJ?u>_Lj7NY~V5Lt3)1O}weA&H(84?&{$rP3jg&ZdyogNw^rZVEBcT>hx_T zgK8zwMcQK1+1?`ZA8L|?3p$invtEtIdtUVpvx_}R!*7cKNVZBf&N&-YQpc`S1u^Rz z`CbJ+^0xXBd9HOXU|9EZ_f|l#nyk+(48qdf6hfotxYkq$IuA^}cefMFquUfQ+TD9g zSE@H9!3x}?xWPs9`sK7I#1V9^|2G=!R~@Ha6MnY3r?P>#aTE+3rBObu;M z)itVo6MkH{lyi$|lma^O#OVUDb!5`(x|#(goGs43No}B$eW`V;)+|!PBGdK+_n7Sg zbiRraT;#t8pc5#0i!>bQ+fNLPqP){)t&KMBB#U((y#e-qH6+OaHVmcwxjBr8e5F4>|7LP2nXWb_KVzoiyTu zy(BvOJ4}c$1>00r+2|xEE2MjKuDk#MAOJ~3K~%*1+f-LqoxEPvRqAn2npHElL3OR` zoxS(2uuvM8K@y|1yFfA9TFN}$WU{U#vYl)@)89f_BsKMCZ+{J@)p^q}7|`fR;PyAq zHT6|s=Vz#B?3`q5hkkgwx35(MV|zSx=GR@C%}?K6Jz#iZCY|cxTf(HDJ&8`g`>JdB za~(idATy;LEg}m^>sDZR8PT(Q56v>0XFkxaA|Kt>fer+^R!p81m5}!+Go&?b;dwLJ ze$)?iE}zr~KnHYMANASjeodegc-rdpa#$Va`T@_s2cWAW(1}zaT7|+xb3w# zoA0xIdLTGQcspn2@r4*!9U@6gyV8Z6Y!T=xk(peF3Rzr8Q&maQQH3DM6K0lEGmntEioOlLV+a*W-EXK9{o&e_b=o;cPBDKTP*O7aehr!bb9hT)SbR~5S{y093}j%*x- zoZQVox9zL&*S^D+fKHN}fX=tk)eCggK$1!O6IGGNeAfabkU}ov+_TRN$Zea?dJ08k z2~Ka3!Ccn@DD#&F+4KTc zO-pw1R4$?4fp4wD+N<=f+9q@V+@OuF_Qk=VJ)PrnW*s$>^@ydMIRl(;qYH@Sz$6Rp zNMakQuDUvLT<43+&WpwKOjJgi5DYJIFB8fFag7V!(|h-(nhJvNTt}bTck*~?N9F64 zOKPi%N2|77aMp>3p=9Gyy!qBUSj!?x38bZu)*PUUo7pZC*&8>ZTh~sAU{Rt)i((x(B&Lls7A-SUS(igh%4DqO{M)oXkIt=RoN(N+RgExt5%|vG-2v9F-unq#;)d{s4+J^Tkkf`)aJ{7p&r;0 z(3y?Sb4F;BEAyPFp%J!K%6ZFTRmMk`Lqg#!q$<4RZ?}vJsDQ+}wd*V|EQ$K;Y8G#C z>?Pj8$q~#T^`rdxi6lCg4Y8eo&YP{l_|cG->}3j2>Qu0@|GB_IkRtD;9}!&3ChkC| zzOQ|d+FHP{4|G!1oHN2bGej{ZI90m8^HLhP>Mwz#q#_T@T!RfYt=-jo&exTc&Yy-H10{eHAU!THv&kPbKv|Ra{c=ug%(kC!hW|x^?b`5tp8a zoUHYTrWE-t3!;yE`1PDeX%Ko){g`^amc*fa?qSN7%w_l?KS4t18vLsmh zf|6Z9MLO=j^Y8fLy(jVY?9Y%B(H5OLb;kXpuD~;_2B*T@tus=v|6aS`y~!VA=7OKG zQ|Fz~nFYiHbVcvkXE!XE`wgajI0=UwaunKi>V$VEet;F+fHQma#*NoshZzL9io#r! za-JT~vYMZ~I|(2C_ajs$Qgt>7*b4#DZ?;iX2RdI;Z3epSH-~qxw5Qs3ag0|P2^s9@nZ+FDYmIxJtFWyW4xat#2w za0P6;NOX+?Ixl-c*VIiq_e+h%cE9g$3FzeUGOAHFN|w-aR(m`0_><73MH;5O^STil zR?&G?MSv-zSG6dRg1hhfJKq1#|6%otMOc|1jh%aR$EYhW!1U?US(UpLZ$!rf?Iv@+ zUx|1S!rTHZ`RY6Tv~U5+S&dD0QD;e?uvlPcorgr-vO=;7FY&$GAC>+t7RwD9fn(E21gC)Jtj$>kXH zw?c|_J$mhd6OTR=%jSQ>a+Oo8e5GlEw9)0#+k6)#y8piUB9<>%h;>DkbY8T^jaOWX zCmw$k^A;>KGQ=o?T1sYD9CFGzD%ObS9{C6B>_+4A%Pz-;rK>S*;uIWq!k^HwdsjUF z{PQR%Vrj~lWZZJw-|+S`53@e+G^EnJQdU4SOH2}Nd9keX8|(F#*Xp1hx3=mk0B@G) zw#)SS}Ye3sD+0xWAP>~( z2XrpsTsAsAxDBg+#ZeO6Ue1Un0iCaF>F4hl-0qr``KwV84B_VcC!LF!~-3voc|1(yvT0&a? zJiIx6I(FS{51c>jFwCDf6M@(yl;#!T>#3g`g%4xjovLb}?4kqQRPc?WVcTz=xw#GQ zvgQ+aU=M7IMCWQo&_+jwVX3ka`|m%%)>eh$oCt2J>$+s=I;XnYH@?Tx)kX~h9lvZN ziH<;5f3|$%dZo!g*SNS(BlS~`Wu~)LR(!4ewz7} z1(U8XX02O~+1kv}7T%+V>!<(Ey|nY{)f-{nJU4zLJy5TWj^QQY0mJp#=;VxWRlqj5 zF+YH+YY^zDuC_3sTWxN8cX;u0_R~!o2X;#8cLBrIa$P-Fa8vrL8R(kQYfZnVE*^e= z%$;$>>Cwt?h+y)4BuNO%1B_i`qP`|g3Be|T3H~B%t)yC-m(So{v5`o;igPt>HCLkO zVTod(Z!sl6*@6SSrl=wUv5|2W7g=0Zf~a`@9nCBW)w5+>TqVD+EUF;HG9W!Z5i#+s z8(g}Xp4#<{-Al6|&;)V-#FKF+pPRI7g&w?F+g!MbbRK&FZzd~Gycy`4K4W&=l|g%T zn3kqM;T1{&tJfLf=B)52DKVOvHY{MhR;nwfKsBJ_?${zjK)1$oMmV7Jt6Di_nVfU< z(pE5Nvt!3jj#@peGEXqkx(R*a=+XtJe*hh{3Gct7FKLj;!8d!FbzgQ{#V(0qgkLa- zmr-m^W#m^PNR$n?lI*JiIx74eSJe@TVu5gTkaLm>Y|8jAhJeOo9;(I4GZPKAD@d+l zNCHsB9RhPj_l33PNLg?|A8jptLB5$)eHkV zs;*vsnHuNpfli{MW0xsC%t;g#O1pNoBs#^tG-v_Ucx&5@EqkGP3N~g!?2vPnA>lcu z922mkQWEXEBQEf@#1MG6T58VvVc8tQBCk}Vjj(sBd38M#n%Popz6t3BY$9`YmJvC` zOu)45Sw>@5J=cGB=*gamrJAwR|3ef{HDVi^@otAqp~gPMS6wy6{^&`2Tu#;157G}0 z*7s(D{gAn&Lkbfb;!BZh>?NFk2M&>qPR@u58k-4pEm~vIVgemwqx?tsQ5&`+Yp3O8 zQ&e(tN(wz@WCjQWH#sz#bl7CBKTWGi_iVmsJNAGl9f?OzTb|7tC(YFc8=nZ(edoeH zyhwLOjbPxo_h_e4@LJUl;CcQc2w{~ij~eWF)xD6rYm;e0Tw+3(>a;sIFj6mj|0On+tRAz6~9&k#8%wY!_P-;%Joh) zYRZ+{e$D@4$vkJsXj+HcSZ%}ZP|YUHdj&jVg@%lzBdZ8+cf)|rRBG?aZD@K!n%I1f z`k6x%O+iZj0D+ofef7^z`j^BX;&lbhF<=E6f2no5`BC+jzLq<<}-+tzkB^?iTM>l}?! z(}7MDP21&Aidr}UMIARakn-ve_<5Ahj;wzT&*jnC&4DS~Wz@EM6OB(i>Ek4Q3tRNh z#~S^#n$limNcEW{z#<=A0;!s+@)?J@*%3b+bZCFGw&+IU1ZyFQ?xzh@K^bi>8X*tp zG%swBRnm~$U;;A%oosZvR%k1f_3OTF^-bzHVOs%Xd##Z8~#wJW) zlji>BS30h@pyYcKZ0?ElKCI*(4 z6q897cxUzkGQIkl5sm9S0bNsUbp9l4Y80K*<>jH#i@GAW)h3x=kJbx?@xS-0eLJA5 zzI6Qi)LCuFdxkA;sX-00uG`tc=hSW|zc&|mLnTak7!{-Dov+J1$vSxNuv@82pj_-W zMXdc{W9&b=5yM@x{Wbe5yx;s#!k#mMvQ_ zOr$k494e4oQi*rJT1=*i-y+b3cHWwAT{qAv9mIt}_^Pyv%y))O9gP9HYI56l2RiTW z)qu|Svn>p8b)Zw(%J5)qd&lhrbiUwj`)6>WJLi;tYz^rA^Gv4w1%b}*13$jg|J%i8 zRX1)piLN@(1^)_4bncU8pldMJ{HiDVK*z*s0^MrX8KLlXeE&WHIU#oHwi9{QnRv}@ zXZ*KI>GAlrfKGi>*WwOe^tA!NfKIA~m{n{cxqG->-2EObljna&mD5yL#p!HCqO(+! zFr0OZ#sNvhHcNC8AHv=_n8M_X=kY&|(+YUZre_qPuDY z&;<+ISO$k`%sN%smkK>-t)tug)l{GA>gkO_O+7@X8Q8gUfb~mt)qzgNTD`YQ04f1X zj5M_Rd)o#9s*$Q|aI`cwl7#p#js%?f`GtxRi`MO4P+iy==S{nPL+=Ao%`-Qx(ZrG|xcp8>4o17UP>QP&_CVSi5+rmgT zWvw+|Q+E$&-lY-f;!6vW#S)n%8tz5*ex3`tx zqz*@oqhGnWs!2XvXwKwOqZWiWHN0~z+MbKl$zzsz9kP=v@6m)FRk61vTGeY{uk)KkBIiKP! zaBt=LT@a4vwm3q-9b$lS(q_;Z;Z-Y(Fp^VBOjZb7Pa~U$&Z|et4Jg6h`rQGU23aXD zx-wctV@%a_r_=^If8%fi6ioH=dOke8ON}Ta$D9oN3c$4TgYvp_(}-gcbHz1zQ^B4m z=R+`ag4Y_*)q@6M)5X1tY@D-{UF1b+yLux{-0GZ_&`u40PVI{e^!x)inl*BzYCkl~yo|YuJ!| zC6|yXn66t@WKT@P^yLMl%&AABv)Sc#DhVN%DGEf82C&$WA5@Vo#YSt{dB%IKo1Coo%F8)3tL)bYvZf z9Fk#5dR)blgPX`7Cje1@t7x@#lE&ZjfV*wbRaw`(P@!oZ!I~r{sk;#{pz!xU5A2+z z@5V`oYoEV)xKFA{a{5|euPx4-UdFA+vD$@n?W@`5!a`S|-M$X<2D^>Nwb*QrYOluG z%~)%tzSbo{aOU3%Ycd9x>L$_P`1`*!16{2y`9(g^Jj)>KbV^kTvNjXwB+*@R^uRzi z^T(ovfFv}GPHurI3+qu`HGw>U(x8{|2Ay~5jZPgp;k(a1L7<#L!vxG?7A($Wk)4iQ zaNmDB$0onAPnbkTJ~G>I9VHJV@Vx1(ny%hX8&!@9N`{fq@$>{&Ada&uhS#bvwBq*4 z%amuUi5y|yMGA}di&G>j%^T68(j(KA%|nD;3v$-_N&-^PW%0&l!I0ojbLi6=3i z+3KreO)?8a@Sda!5@;xrx{^1DV0Ei1eor{(&r(xWRkyW_3pz0^8SPspAZOibGMScA z%3?YPQKsbQdLG|9+J)j&(N-8it|5U%1cQgl_^h}n za=a;Qg`i`qA^T9^TFLgsGGe?;Rk+C278#R>HY_@mz*049*0Ka)R2VfH>&&0q%`(VNc?_ko0EKXAzKw@07)ugS6P|&z5Qf=I=bnQ!uE>+i%WV3CHbiPjv zR&O2(H!MP&jYeq{Jd4x}2F2*-oOn;ncx_4VyNju(SfH zsp*JHXo0Cqm@vzV>a`959RbCLnS-8JREl$lUxUA#eKubB$6c88)*C2IOSc-HhYvXn zCmnMPuD|LE#3V$bB)~j10tW&tCTnC*LLvfMr4}rZkVqDk@<IFlSed(&i#xF@4hiM)C?M#RaiIxH%PR75 z+wCK;iZL}WjCq?B;Iv655G2Ki7GMEpPWZC297M$t1OjCQ2^L@>z$V1EX3=F47~~Uh zv&oX1NDxfGX3psS4jzmH_UefDUVQ=U)^KBpZ{Z@^2sG5`xUK|d1P^Wo0ls5dQ85xD z;t(B?Ku}CVWi)L^%*B%=Sxf@UND^Ddu*h>IL9VDIABpiP2A1)vNKc^9hESwjKZ3bQ zIF%&?k0hjL?t-C59*nZ}KjF={Cm=F46Mg!2V;p1wzWQznN-7eK9x0N5A7fx40O4aP zkyWVJw#1l<66Pp#qu{R+xy5)x5ZA+Ey=3>$i!mDYUy^*1;# z5)hw6K=fXtKG0oZ}nCf5#qi1ePwIi#6Qvipx|)Cl>i71W7)RTN~18!;9kh z<>lttW>ZBDy|@GtIBKErW(lzgCcUW;PZd9qRHwhCZ4h$}0JkL7H3Qv_1G=_pv1pS} zjvS7q^pl7?|8huRH7hL@lCoM!`4L>O)0T69YXF_E%4m1By_XXxpObFbx#y$z?z>~R zE^Tn@P1j>3YYF6&8Fv2z55}L4I2yNKcN4h6keHc*BUz#@mURbm)~&_oQ$FHEO+*)Z zdlxSK8QFQo=&^GTbnV!VQs8t>wvC8RYl(sT4Z-xOpQ4Dxj003RRU&%8Uj5O7G~Aqr zw7nH###G)bJr*6hb-=vY--D_-DhV2W2Mxf+Rcr9$58vR*>o3B_yp1T%tw35*D!Cl1 zFn;_8RJ|$*W~CT1s4wZecOhXU;|$4^JM;4y$XZv3%=W#|um8UELID2>$`}HHG zZWNh)H{#1F?_zCs84f++GMsqufhb(@HRjG=g0B{=LQ(N*7A}g!yoGDgb?1E$QBs6% zoidTxsSD5u9qRx9AOJ~3K~xuQ0H1yI0jndeML}c|a=Ecwe$Hv8j^&nA;ivECqI266 zeErSWGV-wqa4Fhkw!;Z09EaUljC%2c^%(cYcvg!OD{v8hT)Ghl3_cP+&6&e`tcM`4 zq#Uom^EUR}qbK(5+lTUG1xl#e&YAlI$E*iVIO+(jBhY;RE!+Fk4_LV3XKr>e=+Uhw zI%lR~!Mv|%SD~~Pmw=46E!g)1G4;b4+}t){*cpE^5G~5Dz<*hsXWiSP5epd@n1g_mLVhV|&wy#u;+?SN5NUuzOuze5kl(T5+0QNu@Z!nVT6 z7o3XrEL6N`!9wiPu`Ry;a)#N6CO^Vl5X{$M&)K3%2 zNCl%bS4HWyoVG$qCd2-GJPzu&55Ar81trA@%qAGO?bZfIpL!&||85@9I)WtuXHyuA z!_Yq+g}j_ilx*Tz2el2Jcz85Qib~L?M^EfEpg%oGrRck77nHA^kI_$!!Qi7W$MJ_8 zgq+1+V)4)G@cG^F>u$Nv2pb>tY#^%-S+N>iuFtJ#2);0JuOU=d1?g8c>@gwqb&7azVghJ|*PqL6C&xfh*|U3%<_ zJ8pg)gZl4}lTSYYGiOgj`wrc?fyCgRJMKfzo;`5)y`%8yXOju=g;=t36Vh9E!@j%k zf|Wmh%{?}N<+N4)clHms^X`9O@$?Vz;X7|)w*dze=+43q3zt$IF2kNZI%4W4pWwZT zQ;|uN%Z+~>fwgOvQ~F!N`m}A)iuEhrd;5KSL1{LMn^}3Nc9cnTmVUSekke{vmgsg2 z25Qqzi>Ha9cs)%d>yRLzd+1^U-HJ7os0z4H=}?Hzz!zEZTL3y4{AH_y8FjC@lx6D3 z6ZQJXlkkuG?#8^ylkoC?Ucz2S9FHT9Jqcz5$C_u=b7DCz@j-iVLp~ z7u(2@BXRYG=VAS#AIwID#5M%T!8myEUU=f!XR%^q0ix2=G5qSQvF59JnEcwSxZwJ8 zS!ORCPdxN2)~v`z>z%t}>}#*!G%n!Iojc+F|9cqcopm;T`gRu5+GHXoy&Voe_#hn6 ze{bCPkH@fV*%}NVK8z*&V)5vhi8$!!Q?O6>47~Ex1LUP=xCKk{#UxPGOiAE^qe_lA z0%k0_ckP5bZ@B{Z|MOnVneziBz6IT zn*`&KH51DVBC(0mRS~6(1N!ZT$L<=5)6W}@__llFqBBm#;+Yfi z{s$kUBsvvmoOvv|c58vrPrre~kNz_T^=yyVpMM(Deq4**eYqL@?Q-12O=UrD3|<@e z9v;8-O3a!)2l=rrG4kr+*eNxDJMX#^#hedP!AmLe_U*ejjyd*NjDKeuzMSzjZolI~ z+CP@z->=WaEq{9e8CZ{p?)e**Q9|z0Yfp@Q=>?p5+6kCDa~h?v_AG|e3TdgSxa#6_ zc>QOXIPra?r>9X%rERk^mA1fSTzBQ4k(ad?W5-QFPC*r(di*xL`T8sPWY!uy@x*^{ z-#u63vndlY|v5|2Ok3|eQl#W{cZ3u0-jeDTR=s98{|rj1#}yA^F9X^y2X{Q5lguI6T-+i^gb z92Y=o{yIwctbapcBVzPLhX%58DB|T8*rcn7PQ!=GNs&?`UO1?_nn%h7a}<*&>augr zq^eelN5;N_ox1OY7ys`b+&JQT#J1~3Y2j=Pzv5P$J?u}o_`D-A=bKqvII*nG-dZq# zN1u5Gdk#2+$XkL{%NAg#U3R9UARFB~b;PTjxT9{ri4*(@BLbzLly)cD_-OYz=+Fak z!r=#E-TXORFrVU!xvOyEaYx|L1NOk^C!fO4+4%-^S6%^p9)sa?4c4)=-n4OV;q2ij zBfC70(%r{YO=FNnTi)2$#^CkG@4)wS=VHjQC*$CO2VnJr@9^QLGw>xPzjMzz87Ce- z5Z`{Yi1wOf^y<|X*}1E5>qE~|Ej|>3cg@7BtTnn}T`5(qR7!Bn5l*5lk@|0LkqqS- zIAm~Nod4&8aN)(5a&csEL1)m>5x~%6`cYl(il_fI2FrfVLvp)L7;)VVXhD;~W_bn) zFntH~!?)8v!ov?gj4pc*#^HZDnQB!8k|q#||QR-HZ}~ZxxO9GWPfHzYmUJVd+WlPsf~@b8+Jh!zei~!*g$Z zN&DyjVc{olVETvSu_2Iz$dr~ia8N%S&l<3+m;8WfpH9bwPiIj&>yFFMKbtD{H&k!m zA*Vm>m6e=mk?91sa-4qlQP_Lm?s)X6SJ0tNS6p}DF}RgrRgka??s@QESor0Kh@-ut zigwOD_Uud0eIL_4A5Y2s0{q|qJ&F~}bE#IQqivTiIQGy%=+&_m=70AsCVeyoKTy)U z?eF*F=O5=&&7O(-Ik#^9>vd$e$Yr0!qcA1|&p!1i{`1dUxN)t-6(g_5Ywu0Mw6EvV zL6?pbsa5uQRUtVG_St)HbZpxW zy=kla@coHsnUR3~_wR>Co_ZFmHs>R~V@F(l&9(UM<4-V|(&5N^uEft-KV$OPk62q2 z*hD~oW!&p{{{HLm)r{$gOKyX``wT>SOa zmyb{8Ey9q)kHnC@JL07$9zyOWRjf}&QF#`7xC%)u<;Sw4?4wo$vp%@|yhCyN8D}CQ zp*=m0E#-j4k%#Poetp{E#WAmA*@}F`w{C|k*xn64{)o+#Vk=2x$|8Q)uyzAVD3v^X z&nP_n)YHgI%4A!jaLI{-alu7bA+qHj7=*g`DIbxaXexapegEXtQKBt&Hy2x6kf4^T0hY zYShiBOiDKuyo_GR{`>8VBaS?ZK=(OjfB7wLyzb8&@1=NV+;rT2`)GVUZVcvq@d-9Y zC82=ED(2W=zkdCZ-nKIa58MYIjeiUC=g-GQ!$;!V`SbAlxN$6XNT8#)IiA;)^Jhy+ zp%-6u0bYLLRh)3dQK;Os5U;&97NuXOS9?FNTydgfc9-$a&GK|e*5&H9X1y)y)h2g-SAg@KYJDy(Ej+)-M3=owbx+n z2AT;{TCrqeBwil#JVxJq6{Y4JTz2jCcxl|*_+s9Vh)zkvPy$^iMkS1S@FC_-vZ5Ic za}jjd2x^p->bZv)+Zn_7He^`zF$DD>+{(c88KKp1&NR*;-N@!~& zK(_6QyKcFOi(wj;u3CV@k3SkuKl3~m(ry&ZP?I;5Zf zn;d~JmK5ThaT77&xx4V{Yj2_~b2kjX@LcrCjKxiV{VU&-K*xDePS0fj0R*}uDA7&$ z40C1?=tiD{b&}|)X5M!DgP1pA9KQPOV{GDPSwfFw3ds&2=XAdLuyh7^y=Lemt8fKQtv~o5Vej>*Da`-#L~95mT{ALIQ#6gFnIs{ z@voD6U{A3g6eHISLGQ1uHa5bUPON zoJ7Y6j>1iJx~(@cJL2K<4+zN3uBbNI8#dDO_SU@O+Cb;EHJmf9q^JPb48NRV0Ht{T z*_Q}J=~$h;5xder{N78?7d`D8l5cruG9$KuA}7o%P4*7)J)Y&`n-<9PM4Tk!l-k6;2-JH)jz8(kEgDE;>C zfvc{#oT^>{^5|WDcicNjON+-KdU~IG_IawlIgEc39epxxy>TQmTeqb5l5u;niFj%3 z8}#Cj>kveLbJeM@h!uMy6wCZ{(1MMlwt~5@Hd6dn)&FQ zRE#_Cy@w!`N(o64T^Zx%`qLqC3?`2M5L2dphC6TjD>h`U#`CYfjVA~~Q)nLeXz~Qs zSY=@GPCMhyn{Q-)=c9ljL#avexZ{T5q{J@4p~s$xGyZZOy`(?m{qf`R+0KzbarDv0V!(ibD4<7K6#wJjACCzWCeUee8@`z}80A)_Vkzxy6sc+o}J zsb_aQN&DRjni~v=S*|BT>fAVQ^Mkw;AoJsv_2z25QjCkb20|OP6+_k+B z^lS!yzVRIk4cDpy9;Z+Dj0LKB9rO9 zOyY!VL!-1(a00Z?l_(EwLmmyk(KJdYaLAMD6igxWX z8B86EO`Gy4Z8L_9$uN!$OpVDcP6YiT=+uJ?o#ud| z4UyQZa|fJ${6KC{HzJQ}pny&Rw@Jwf1P$i7ve-y20jzb)R#uaB!)A?1dlb=(A?H+u z6;Fyq*G?TQJ4WMK!amNCc&k)9{_E1Ut9c;TuUp4!3M@#sjGoUj#lA5ft|M)<6$~`a z%geE#Va+qu4d}H?Pm5_wPD)`sT@f~M?8R)Inwr9Sz)gzJlb1V@MXfa#MdqL}-MDe1 zUEhXts*KX9#yp2X<2!L9R7Q~H>vC)sEm~y3*&3J3wwcJv+F;|X`zA1kGM4jc!-fs? zz_#RYagGs4?eKEQe4f1e(4`sVb`0(ce_ANMmX5n63}q@+=FTG*926*L%vXg0T?DdA zqVUGNJW6!+rGUA{I>1w?xcgg^yO>S|?V#C=Mpj^|q%h@}YJqAID_E;SX=W;!#{kx- zNCtLtrKxbSthp*m97*H1RV3qtFXpw%0#LxLBHkr&3TTvSq{(m3<>HQ%VT8Iv1*^eV z5vato7Q?Ix-6NWStvpOkZcSnx3Pe@Y(;20&0^E6=uw~@p({)Pf)+^byd?ywAQQDFL z7GGMaOj=CJP|)Y4d|o-#)>BSC4xJ)54S`ia<&m zHlNQ`Vgc<^YE%?KAeM1va!^Rp(`KSOR#@Dx_-?m#T;&=hd-U5uMIPwLvxNJDu5XIEPF|Q<`sx8IbP?IR-#V63D!v0lzG^c$gTRVr(wCofks&#nBwTt)dZLHRLTV=fQskNT1KGMahVzc*UHg3Ea5_0;!a`im7gvFfdl>N|Dq#weXZ5sXRxeI91SF zr(HccDVYQyWAv%?jmA##hHF5LQxf5gjVU z+<298Pm-rn`G%Z*m?CBN{A)K5v( z0(!@c!~RmhE`e12DX`Hv>w4D?4uK`_3z(?>^IZL|d+VqAu(X&U&FeEM1($Gc=^4qi z!%2GdtJ>-Fa$`Di^-+uw#)u*Uw)y&-l~tmuwK#qg>-~SW5z6?>Ok@=>Y@+#uf%(*s zB1WG-glmebtI~}arory)5U@&{mWoz=9JHyh zA9!Y&FR^;<^!w8XcVnATTWs9DJX@2{UH)-R_L5rj$!38&Li3<5#uFcbypv7vC)oJ} z0p2DZeh>dy`Wa5v8r~-Vx#8!AHL3a6U%3Y)(RIs+MeE2-ShRGxX?+n7owGmZJCmfD z46ofZn>m0l&fd^qpbLK|A2@t42~AL$l3Wy#EjnWu(N9Iq^K<^LrKt>9gA#sL7cG&S zpRR&;_2)XUb34AiQ8K8LkW32kGGps&Yh9m*f9BH-spzsTZX*=%X^14=$Xca^FDXS? zBUE_>sHQin*s~Z!YLC8cAO@LA`u9=ZZ*RzSP;-&hJ&)YobSuk;Wm~QMgnE*u3G2B? z0vg*b5LT{RCCvq9H4s}2FIR|!f?)NFHAnZ7Yu;|;Npv1)*6i0UZ5lP(Yx1EC#NHPT zdB2+X56!LW8>|0$U43L5fO7ndRKkH|&HL0G_qzMK`P1KW4^&WU@7g8~JEvDLs6E&5 z58gX$zd$6_hbk_HD5^o5ncMQ(R~s3trCnblkXtO_5}o^`k%!GGkLH1(>Q<6C%w)UM za;G#9B;IWF-3?HTuE>zC#!l|0bE6s@z6ksaLw9wOqDeyreqvVfKXYdrv5!fXejoaG z@uXR=l|i5jwytsQto~BrEi}2weNDmugVe|p?=vOy(~D3=Rw#`cHCPQixhBvx@UrI9erG)(+jIv?bRAkK zUYCwLX<~PsvQHo;AxX*#RYXa2QFwE1HiI}r#i`9(7Gx#UR_JDFqnD&~6=UG>flf3< z1iK*6HQ}DYK?n!BUvgv|nAloZ2O1+N?QdqkajLbyDYy=*3A*(GU88%m0WTR2bdqjq ztFv(=SBF$rh4b@>A+w0#7Nt=EU8&Snejn`>eXcdo^#EOc4XV`x%|O?Hv1>l1sXZW@ zZAKcMMU~l%TG)hEbXY~)f6m~549;>$1ZZQ~#PE`F-)*Mqs>FV(wBml#;$Vn^tUJaT zCuWW?)uX`;@_|mOlx09G@JJG!r@9)$a8r*R2y|XaLR-Lw5RTI=B*6uhGVf(-FeG@N zR#RQK;mzynF(-V51D)FHRBHj9xcp42vS@yRwc6|mH^sVQ?^IuwdLw|&+7X6yL0Pz2 zqHF3zZ@z9TJs?FiF-{?7>#&N&aZ>1jjJW^&1I-yxVXU%|$SaG+#6?U#qE{}4sX2;+ z5<{=-J5^dJ1Uk`tZyV5Qmdola2~RdUkK60_1a!Wg&=$R?_n9qDANauLeO^z27Ei3UoLu3n<3@HsXaCn2>$LiIodyD(;Tvb2lHUEpwgui-^Jhzs z_$5|zv+DXw^y+W#o|XbeZpngzO~_&lrG+s)bjcwWUSf_iN?3*EvG{C71OsjJ=+R3u z@KYREm1Qk6B*6<7YO>^2ZHt#0>=u;onQQWWtJ&y&$C^*Lk* ztJc6^=lJjz&X(IC@M!?hxnY*^zk0%CT})HE7*piZ3en+=)B+HrFGhbrJS&> z(a|KdEzV)kIRohBP#b;O5dlRGTVw_$w_=hYe!U^dlF6C1%-x*OfcbIBkhdqpo{a6U*xHW``^7QRW=- z`@p+e`vfN*Lyui*&i0!^TfI{=&~3Ggem5u73_%M2Ta3{W#H)h*EQ`lAf8Ya`92%&y zAX``LrIhxwB}J{7Q<#NKn~TY_#xx;u>&sBhfBL@yRGV6tv%a4EV*=fl=gsy2x|%lo zH^j~W00vx1L_t(A|B2O#wbU)pIqLVYBOIi*2tdJydT|46cecumM%F!ksC~jBz1oobKkSFR$0Kf6~Mad ztbu`erq-yCNd$vliObH9{#>)Tn%l~cHcMjU> zybDSWHRo{cmiP9EHs$k)H6%Mfvg8T<{*G&mlEU=Z=#IpzYgXDN@@q5^}h zv)qy}Nw8+q-P`+6sdvyFT^$9BDdz7{%8ga2NK0|Gc<){PRy7CCMGe#kbT*H-e7KQnKDJj6)EA<#aaW7Zh1OcNF~!I?oE*-*e``fT}e`Sp<9q6JSYfNXAwAEf{W+NIw54 zvcHnTo6??&oXBb6PkOtFB{{Q#lzrh(_)s`SeG&6+HHPM#TjtjKU|Nsz8ughPswOM1 zP)Y8VSixs1J6C1-Xh^h5S6Z-Eg)?j>Dqux_3(nLQ<%?@42rxT9>?)1YyQtRD9bMG#R#A|z)bmt@#y_9wpH$V=wlx8JSTlHp@0pixI zHB-sw%*D_W)HpLfdy~L}#$ekl22|~=nH}N3kvF;RY2I86;ys@e z@Jcb-ybPkhdrB<{xz;MK4`J={Kj`6aPVV(W#0PF(hHqB(z2>GxUvRw>mx8s2*>x&C zX@%1HosoU;X3C2$Hgb(h=Cu}(x03nmDpF-d%4ue^&?&Bn2bcn;5_g)g5n6QooID?z z!Y-kkHx&D<2pA;>YT;-i`-kSd5}TAHqH_z{CcPFG zrPV^VRRv!PSgNs*ZqzhmFksBQc~$ZjgtzDmkR)` zK>{U5sG$EuZlFyrO=Sleca)JtPCVXAbyf>V1VHPBRY+6YWZp;+se&kr_*3*o>IefU z0&g)166DbmON-UU<=iPGb+yV)mNFJo6j{oIwRAee8UhtHgmRKPX#T}>6H_1Qp9mb% zQ^*Zt$&b=P~Yg)n^coU_l~pV-7c(pIA+Vid~(%@H@ze>?8m!@vLdRgzZ<*SOAI1A1CQeR>E_gPN1h*Do$-EE-lZIuYW% zW)X3mSCu=7N=fj-Z#>WoNE3QIIKN{#>TP6)aeF>*WL9%AIk21K z1<(jnRb^#mp02onMlSF#<$WmjGuK1h#F&^E2ttnkdubcXc_WV21R|9{)&{kXlKW zcUw^OTrVpL@FY)in)I)Ttf%UuOS{{=6elo@nGe~Ki%UP>k{n*a+$=@S1l?@VMqZ2? zToo`F&i+;KY znqLsg+C*QDTNf!>tR@;~eOyCWJj2un2A{8YH1wQx@ALO0 zkNS^6)GAEsKpWf7qo&W~3B65PgbBY5bP{toJ*jh8l$%Ms#>psCS9VLn30Wm`%doG`)0w#Aw-tM%`n9E%_3Z;#w5wjaw|i5G4Y5iqQnT@Jo$H=6SdpHe;^ z1=bSgjB4=r)HD2`e{;W2;o|G|)>y90)aLBx2|@F=S^d&So%)}id<#y#_4oraY9G^n z2|w?YaGtTbDs1KyG6`1ei=c7Mm#2YS^~HvjW{2LbQ5dKBHn{0%>O=YIRt!2Y>Ms23 z_t4fU&Dm|!=E?FW^@D9=MalBd=fz+cbv5Kcuc*1l0Ix)mxGP%L#8tqcT!cl^r4Dzg zRHsPuv&T;-o-s(98fJUWSzp0^l=YN-ZIW19`X~MV!IU{_)p~2DzSycKu2?(wZn5{q z_Z6gda{b0Nq+Zib+vz5Tx9Cp?KJ~j+*)_73QJoea=N9{JQJ?ME>2mXSbVY~-cfhyD zhSPAKi)8Yvf+8E=KMnbsweGOBY$vixi!PqN<7V-GGaAryH*n@z32aQ^a<+|lgHA?@ zj$gZOgB;eu5q#7s#B%9hrDZX*{_P{33-?kG-@ad^xZ`&u8(vLrdC^&hOVc9JY35cc zDxT8uls0l+1TESd%n@8!FwgxpM0)xFUf|>iZIQO@B0#zHqKK*@80)sOW&LZWt1Ojf zz3X(}KkzAegRwX5`_zbj9&u;=IyQiXo;D%0J@W|z#F-%u7F4pkief&SMlGh=b?~_J zhlKS-mQ_qyNo+DR!~<``OJ51MpN&`WK-Wq|cDoVxg`U*)8~n-#+c)9uf)bVV7a@Vp zgn12NxXlGv-f2n-W0qh)E|+3cw@$)h%kC(&SYnxZd)w#zQ5ol{p|5Hj&W2XZCl3|Q zC%==j2w>y|CD;0sUO7{9(^W^;|zzcnGQjT=kiIHnEU#MvEP%!n?EJ0 zJi1ax8Pm9B4cD||qr6h7Eq}Tx#?nf|dbf`wq!py7hplQYJt#9mb=$yvG8P2=P2Df@ z_la0=nF+(0B=D4jEpKbmGYg-Q_S9@<5oG#Ps=`K=z zy)i%iNfrdwpOv#Hj(kc+CPA07lJWN%Cq|4K`ozz&7jYLY4z}NH&jr?6N?1wY& z*zK>3te{-m>x~ug(0&I{gD-UesrtNca3)C z{FDljjT>_Z&!=l_CCC-IaqyTg(e~wg%YCcPVzUxUiIV7;mc;vi=8HR#HMa52uY5L< zmbLd}GvJ1Jx7K-vx747Qlqu_?Cn3qo-f5pM{mZL1x|4J7TuOv;CORv=bv75>GP>3O zMo+2zuRRX>&W^m=WgG1Uae4-h>pw~y5-`}C##Z|)qVYSX$^8C zs_$)fR{0L(G0U*G@*i}1N#tD~^(kzkuTkP<`tHCnM57u%1Nl!9G$S9Y!O9IGan3V! z)iMq?Dpx*92EX7#kxUhECD()(qCm(0UCY_E-IJF2^U+{N3Ac(uqGuAtCE?pN2*YDg z$z>|P_;}muWghhIBqDJaHc4q_$3vK3R@{&mn>ewcj*!#U&EJF_;orD0vY{ZYQ-|`O ze6q@jZkzQ==g$+_$Z57KCteOkBKFN(BGj{Bl;r2*Q>MPTBeKsQY7elrlz)0;2nCo%5gD#_%+vN(@ZQk!_6r+iC|KDb+X!%_6gx5o_^tJPiQ)C!H1 z3+J;E9m42_u}?+NtxWQJx|EzNWBIOq>?w=eh{A5OIjz_R!_RMY6}zcbpU0#n-yx+Z zLS8wqZlseipf~W4�qMQ~m)H3Ld6bI73i9;o7<%SL?sO;)63WS*EvdK-T*NA?JOx z7>y#y{Nonv=Q{o(;pSq{k=R$Y(Nn~$aog7vm#6OpHUqd>?%Dk~+aGH@=*R9iZsQB%L}2vjU2qtsO!eqP{F{; zoVZ`74PfHVDVINyuaTWf4!bh#quBXAeANGr0oQN@yRaylHaUlyw_*b&V&=|z#$lGw zEj;z+{edLu*!7p3Q4OgDBZ+!mSDY(7ZoUtV+N59!LvkeSL8gECc!UuPka0J48|g$* zve-p1sv0T$K`0$2clc}f1KMWd(5a4zp9M@ldecoh;@$KT4wTuql`a>Cx%bTdXYEX4l@% z^=0sq(dQ?5RU0|DQv@j#oady1Qa)l!k5;HfRoQ2M|8`cE^!8!Su}mgYo^RJf1YV|E)69|3Cr$`|TJ9Em6d*ZlO2LTASdv4azfb z>b{GSR0EVj7NBQQ8J|rNRHLip--5;fRmG+3)WDYi=?3R)<667K2jKD{&uxq=CzNal4`Nn+PDwzlP0%913p^Xft%skGxd%r)JV1@Dj|{H z>{(*r^-SsA4*U@itHZ7S`-fz9R5F_)p1%k9v(s;zWC1AQrw0bsrG!#qQ8rP|Rz~qe z8V)isb6GMDmm5`gWjC7uY&jCiXy))B?bc7NX=hM3SN8fiB(o))e;0$PdRYPhb+OZQ zjoor@;`*H%?{ujg`kO?4Wt1X!R3 zukqD-{Cu#(@D-}y98JZx={qERLuMt@dKl1v5ikz1qUKLE(P(#zrsNfR(UEaq7)$qyC< zP`lC?9#Ai-GM=WJK_`HVAIc=QtY#Nwu>v@lnD2a0<1q+&Y~r&h*2=as^IIhJJU^I1 zm5+qt;cc!n1-?Vjz zaf-isyIIXAN~ko$EMP#j(Ra=}L(<#TwAt&<)KkwyO22+Do_GpsZsh7P%$j5Bqv4p) z@{fU3XP!KAodE>o`DFiv{CvnIJV;YfDgQU%jgAJNp4gUzt=75DqeoBIKUO+n3S;^& zLIo*dSMkz~;DHzkZnY!`Jm8>aYY>}o#CTS7Jd@_G3maDClFd5D z=P#{9Vf1P(afhgkjk*!LWavx~zkCTO8V#;UtyO=)l8FzG1!M;EhreBqX0^&6+F(7D zdOlf-WqpF$o1a;hZVQl%A8l8l{eo^CaB{sCsMFaKg#c`&fxgDl;2d=FXWCjV&62iG zk3jbH37|DwxDtCJ$*cV?Hwhs&)>2~$9Hxv%Ajr3zWg!KC`t%0|*BE{7A4C+4dw0d% z?+x2W?>2!=Dq$)WeCAhcGjIh}RhzRQ54)lpyT3Zt81^!RUKSP?OUFN>IuB=@CPT+h zzQ2SkD&Daj{D+w>gvn1RF9luv{yDUTaSyrM99L?1p?|Rh0YoPowlwxLyu$An#H`@EJRJiuF~cR;wk`SY8f$Y%2P{|^?8@Rw~|ZsQ)ac@*Uf^D zZ3Bm_XYl)SO`Oe(a(|&3D{oiNdgXDqyr)Mrg?1%-vAt#@>G7Q1PgR*oR*Y1Pae8om za|;uF!EKgrX-S#@RqMe&_voZ*))Y=JMI1bhEnT@NdW zUlP^e;yPtUX*HO~R^?io4Ef}nkil_-VHD3S_+1w|!dA*k^2h{-HSE-0303P9sZMKB zz<8ohHLcmlQs#8Y9KsQVU#RoVXW|uKrrB-Jen!9fv5U5$>n9OGN4QWos~TB8J0g|G za{F2-dS>hBZIRwML{N9fk}`=MiRjbN2$lG&J#sp9LqDl~!oQrpFw;vP zBvw6kyoroX(t-;4eK;oQco)vznjaa>3HI5cczrr@4YQetm_qGTE(}Qm;l7A=~Om;r*&Nqe@E38oM1*nSQh)i6StZnp==6 z0%=xhC2s${9!Avi<-FfAW#Dj#WuN8pCId|e&=l>E(07ud_{!no%)R}Hr0Ue+7cSN* zg!br-%66B=F_=ZkM986qkD0dt;2>M$!~-7Ckt)mX;r%g%9j)6>OBF=bbCjY|7kctg z0CwoA+2soQzNIS|FM6ffcm;M*aQ$l8BgIscLbDuEV~V)->DrRdhck}*av>uZF>f3u zinW|2lV$L0Vq)GsWGs2}2A^4^A-o`J>K)4@g16pp&q@Zz0H7ID0q~)oIAfAB7k6zt``g=Pc9!-0vTx)lAhtxH`qXi0AkL zbyR97^m0(V8@$Zf>rn*FaA~I&mAA5~JD~^j;u+2G?$dA1R+H2RZ=B$9G4~f!ycCt} zRCSdeDOJ6mx83s3;}MS4mHE`zdkHd{@0nz z=*>sq;W7DmS%RL4pP1*|%KqcBBNlVzxC*ZHZZq9?31>g(}sCHQurY$p} zd`tw8BDG|R<-TM|y%Oz%;uW(Byey#NB3lf1;g=VuOvE`@h5k!r2N4AF2n%)?pSp#W z8CAcP6qoyGSSbMn%T^`{C!=0dwz9#;ik@Fp82vG1@_1TUeg8@^9uvZAN&L$I|J?%Y*@APeoTO0ztN*wD67`MInEXf<_!GOpKxj06{!*{x=74)%8 zrtp>st`^YRokW(Y<-eC3Tj%+&Fztg2vjI>J(9=^YnyR&b=(!llxc${zMRL-)an3Fx z+X+m;ezd|_Cz!q<0J>1Sn!TQ#!n`!V78Z1bY6;E&bulKL5(+`?+&Yyl0C4}Kx`~A@RkqdWH80Pmi!H3Hmq_ z8#dnGjnVKw6s%GRyUJ@VZ=4CVvHOKb^Yq=&TGMipSTsj)(^hbSWit?bF=Td0@+S`a zV<8t4FMz@zk=nI5l@Ot>e=UAVnDt`BFQ?kNFRPC|aM)`HJ!fafDxW%L$7FOhJ=Qj! zDst?khV|4DSm}-OBYsjG{vlBi6VN%cER%FQ{>sJL7;wIRnsJKrfmH6V#+Z^j-RfVPtL96+8hZ@nu#s}(+I!>{ zXOUx8XGDg9cC&FZvGah44?9-E)29|{5(FLJ9N|alli^apn4%Yom1D1jjZuOG8uRjY z2`=T73D@bpqF&N1sVU8JEYlC`U_JO^{DM>>JUrEgJd74UeC_)ZjVoT7$IroO+g*FI zCJQcOX1v=(s-8xE|J+&4|FdV;Bg08*{Dg=+76Y9q;Ba=OPlN)_E8(BW+)3ED=l0!# zYHfw&#wZh@s3#reloR1(VKj`@HjnNwaS-8)jdlgK2~c79H8WpH=GrD+34f_iKIck7 zHDH@pSI>O#+Az%@0}S3i&eg(Q~>#St4=aK;t1b8O38JT4*Lp z+~E5fV;8;hW?vEpjTC?$QM9^VK4o7OD%05m1pk= z#nKSI$gm-gw)LcE?2I28{BpHlXYL>smMD5JG?fJ2Hgx zk_~Yb@kajvP5Q{lytx?UsZxB!+XBVjb5+*Iy_Sc2yw ztR7NurSgje5}GK0n;3X;kzTkMBhnl`HWh_N& zj|jg{Ag2RiYc3^8X=*@F;vx^_@E*dk?EQYEAXz$k6D*eHkg=kYTgQ762SgJqE~5$s z`OX^k)2DlqZ};LjDCMIe`!l>6>B_eXt}hfEwX@83D?Hg| z-7xi|dEDhOcHF1s1wZ%3=&&0SOrd7Jyj-P@wV;}jgV8yq-L0On5H4wi^cxwF!|Q6ZKr`#WYU z{Q(1IS^J%_N<^Zl$B+B_OYy=1I~JQYO-c>Ms_EhntlF*XpMU#Z@pivuG~yH?$!ImSblCyA3!ZrJUuh4LX*A1OXco zZl9Gs+wHbn8-MQ;AUb>c)6gMv9styR(13y37S6b70ZJXFHCf?8%EOr85@X{gwi#|odpFdQ!xc`xYeIXy(Up-Gd`yZ<&c(L_Sh6kBkmDIIo!U4Xp91ab0G z^ID%A$gX9XP)lZau4q~!+h2&F=1Z&IlT3$3Dz2Pk<|Tm&QO?FfhbpL{z*f{m8MOn= zOp(rPI<^=5hQCIa&qTKGMC3-`CF_&zFhb@SLPC`#V#4NMNJRTJI()TCwx$1@-|(67 z5~$^mIE9f5lP9dV*y8w6L02kDZtxJ8cN-V z9yY{wm(S7KdR>ZeO5Gq)Z#arb^Xs{ML8H8POaS>gl+n1_?mkpn@VBz;hZ1?23mR2* zNW4tg!HQ9G_xMOCIm0NKvY`<~u%39vK+j+$?saC3IC2}afHmHIV7m4=D;{ggqjG6n z7P}%-WSR%u=iy4xR9Ly&?(18XLwBHA9+F6$@k-o?H4Hb)HosorKBOWtnAS39ja^8F zWACx)hQn>h+X+b5ta>74R4FTKloX6&_XYG4-XVB1$yke!CXg-$+a<@R*yuj#i6Ya&5ugiHSmZhLMSl=~6;I&gM;)(Cfl=T%h`KP@tMVt-}>BZ#7mX9is3ub#fYizoUOLpfqwS!eMo44`E>z<`sa^X z{5--mnu)~@`0i<5%Ctv1YNIqJisf@SWxI zDIr8DilTa6oUH$YJ4q1VKRhvv)S<)dMKZhGFc62Aw5Ah13x+|*4%oibhZJqcgp!a= z3gw?997zp@0z=74SPBT0P_Ufd=oHF5?!X%)PJc@0UmSo$9uk;$gk|A4DnG; zpq=KBz%PMr@hYIX4V0{nIhdDpEj?Cey+?b?Bf~H;_|55(P&?PZK-!5=v;a4e!BjsOO$lszWarAra z>NOwq%d9qTe|=nNKPTH)YNw}nT>PMUrXeQg$6v&$%q6@=q4jex4IcP9Zo1ZfHCz4B zT(QtU{6^`2Z?a|RD0;5F&j`TOY?Kxy{oda4nb4Ai@o8oGB?nKm`g_U_D-p&LIW-A9cPSk6ko&M&m3trR`-{fUy^{`kILk0%d$u~iR8cd9gDf~kM6Mf z$*3oeVP|0!z}Kyp!10+z8G)p`w|yR^>i+x3Krz<+GHG-vHiHKgKmKT7hiY)PJ6Qj= z0=y&%;065k_ku3`S99%GfNe7>yRWI&9vqada}4~25p2ImLo~)?b{R{J zwLO2NsAU0^2CH|R%EFp?tDdpAd4Rx0z8oUBv_HUd5y|xTAdA=yrQf#u_TjOQ*|j50 zp`Z@+h1uAJri1uTD3F!1giM=}J9EWaELf|FrIhen4^UUEJ{VTIVEgrgj~0nNv7KAM z_Wb#c)|*A<6G%jX@P{<(E1)e50;qIdWnslJD|;g_kmQRiu6z^UaYsOjVIWUVPwfVZ zDd_#H?iAMHKuSSNS;cMePU3xRzB#$_fS99>3c-xo9smA`Rh8|!UXreWHZ4(Xe^{ay z7srd;Fy~g)Bvu&XwMDS`>Js4-K-IW2x**?&HzXX{HedX;HZ9hLm zt$?K4*m*U4JzPvC*r=>%KO0)k1LpQ9RuQ`p}idoUG9-OVB7xg`fNHx(20_7rbHw_bMJa@ z1~J>>u`(;v;28mvwM?fd3Z8#U^s3D@q}Xa5#wPujw#U+KJ;53df+q#;!X%6fJb}tr z|vX_RNH}@ zu>#cK!{Y692N>C1ymra?7ZXGd1f*V(yb;Yj8%u+)t}Ee{MBYwo79zndiiz0_0VBn# za1jh{g({-_RMSMwOh_5xz+=!nRfhq~J-{WS)RCS5RIwjy=RJN`YRsV%UFoG21376zrZd38>t@a80TR;7 zD~JT(6=M5deH`8Xq_9+x)=&WGT4ycX!7j@bohKJK6IKk=yoL!Tb9cb{kr+4pM{Lc- z*V;S%C=|jAK(?K+h&>I)KHCJC$ZY+Aprv(YN6v2enydC%a|h^#+$=B^<)i2?Mn@E1 z|9bik%<6=A&&~)&dhsRQ<_=b@X}~rNRHcN<%)+3>P~^Tgu`wgF9nmLMQpzfhFuI6L zy`E5OUwW!#%WFH`LG4}}5`vaNQ=F_ihakNClDkqh|510pg|2{V(ipw&@h5)c^$n-D zGzgniY;W*DWVyWqSxXl?!X_U3d5_!{The0s+x``(g_lF4QtRElbSur^nmcGXM#?6G z+f>RX`}hq&o&L$(iJ%`cs#U3pt@kOoF6JFqQD~GHLeypPo1uAqN%ty^6V;qIAOlk| zR`QWb)u?*M)e&*5^L_}q)RE440yq{~aRMHeY-fYsWB+8Ie9zvHa2ah9N#IoOq*Pr?|iSxtoXnrsRQoa_hRuW?2!&l1u zf3@_X$%TJ(b*y}z$-MYiKBspfbMR1Riu^}Q2M0nO-M$?~X<(645>V>*C7Z9XV0E=-O^O+ zGIv*I5ipE~Mkq)uwhgZHE<>ZaGR~G+0>RbE82f6<5AB7wfzE%MK=O?ruq=FBE&nR| zRKP1oR$Hr?kv{h#*RW>!BL1P8QkaR*@CxY%6BnJ#4TcJrvN8P)fPN1{ZW2S>C@TcW z9DFGLGqNGZPq`;yUDJ6|G(%mTy(zO6-rMtVkbI~BVkQ9*aSaf%M6QBKj z@7ZVX^W*$bVXg`HnByMz7}s@;G5sMYBZh`dfc)gi6Eq3&PYO?-JmUktK6?2A_#1}V zvIhL~)LubM=t+#RiUYi1=Cr`A> zBt8i$x#;YrB7Gz3uB?#aUiY($d*b~h)9=#|eq@Iclf)+k_Wp0)i%)$knG=va**%~8 zcHj|4`@Z2y40+=HDM~=-<@?t{PuIV`eSmzJXRUT`a(8!cDsGy+-v0vSmflW+GfDc-YobMOZ<|wh)2mgqOEj7-G&e$T>l!^5zx+xvoB3cd z`8%nss7O+q^lp2iSb0daa=1hIdd+pMGrx3?h%^z$T1!s@88ui&!AbW{lHd;o57zldYEnkhMxW=8E|LhS;* zh+K(Qa+}n&;f0Uo21_Th?P)@Ofe; zq3m|8D@;b=6{NRYiN@RPR4zygQ)SwzJ#SY@U?;=;j5aN2aVQ_EY}=nA6}XQG zA{8e8C@ycES&&yoX*y_a+At18f2Sqhx6&QFJ}O2MYm)9U>5tXZ?u}sVi^eX8$lqV2 zTx_)cyO71O2Nl-B4$miL9mC?oEx2Fh3Ln&NCsvlu-40LY6UpL1;N`#|n4v5Ld z%N#YXhJDO&+?m9WZEkzGJK$z5E*rnr-=Av~G3z)U6$97;vuj+Zo84dqaA zj{eLp&(TklpF)mA?2-Yw*{d&F$}Ztxw}!&064|Yi_>v_W)xL(`tx@wwYCBDfU`O8dt7H^ycl6b0Z3f}TqWQ&H59s7sd$fWaWho7v+>kf4K zqbR=-qUjvyXN@^fYmAoRmPmi&soNHU&nn7)eF@J`hbrZ6ZY0}G?ksai3;@Bzlcc3O zCF^yqcy-bCum=fEK4`s>A$Hp$o3Ga_b?=XHuhtp8Xf&-GbAuUD z40va||Hu{|qtP9In9`!)Sa8YK^1NEFns=IQ{4*xV|TnZeo{nYj|L6IaO#}i;}9b*qpN;p71HxI!so;m;YYUc7F}7 zIUQ;7r`#O+cn~wC>!saQp>e)5C2NcUID=t!4JD2U>Q#y5aWx|y zOYs=;VVJj*BbU0_OKt7s{b`k)#NUm@7Qj7YM+l6~hdyAQ zuC}tnYfxx-%Vg8}zyFac@foDu*1?0@;`1nrMX@Y_vVdmD?g|mNV?mdZJ4clVGu2j z*_D2_p}=S8I|NgLY0((Fn9kin*wq)drzQnd^SK+cr&$s(oy%{~FS6%f$kV?aI%BOo59Eo7aat z@>5r$$iu57_lHO=Is>NZ`r*Pyr08HV#_#l7dh1=5uJ zoqWMky6XnM$A|~fm!C#N{v9(<_VAxF7T&x1(zjKaBoyHzQW-x_a*-+Ov(? zs~2*v2#S+-V-GcCq{InZax4;hh zj4K_gu*<`UYoUCke?FP1>Rei>x92Tw>)E5qZ3>&UtPZ6GE6ITnF1?d~ByRxtV&W?? z3%~o%-cdX0a0}2^dqIUG2_Y#zA!?Eaz887NhfLpMkkfa1xKGZebn_jUn=e;_%}2YA zMU2JtW%b29c`uiIi-;noKo&eT#Pn8bp9wG>mYICRD#>{I)QTV-&)Tjh${i{gV_8dy zj*|=jtl?jv_+w;TUdG{-Q)WEr6#6Dr{6>mcCDlRheFJWxl7v{RqFO8gJ?_$hD=qQ< zQw=LIB3nP&eor!+_WZNi4Idn4Ju-wZ-;TAe5&193rVdUZ3Sid*8dLODox++O18FdMQ*ok^0djutC6{btMp8uIkMGwr8gDaID19nWoH z=9AC>Bx5?Mw>G2V?9wc;u~$3~jmM5ZzrUIJ){86}j~n!-!FIK#@|$tYzF_;?A7cqQ zBPTR|1{ba*9a=4Om3f)tDxOYy}dMuJqf;5YnwDQ(R;dxt)H0!5L3!;JO;sAr!U zK0DTmWBEcs2;Etk8c(ASZ{!5876YsDez##|Gk){ilu=J{~|k)VXF6@&{a1{va2iwq;8 z8R%{pWsRH^qI3_O*j1_wM_ioFd5ty`1UD>_DQeX@-(;-mMdcLx_?NxVpjQ&bQZUVE zeJdqI^ab|UHNSg_-3PCPMi%bWLhO9h<5gZ5e&`!tLy&BKB%1d+g^xSmH!a@DXma#-`UnN8uwD?B8Xv!stMm4be9XZ$InG>9BpL4ZKr-}e z)C}AjM!A0KnoA5GDRemRkT2Z#8^y2%+vc1XJ(7AJBZ_?9t?QC@-=b}4b2@uveIdYs zPF^@;t_@=ydtPSF*MZ{e@YPLGncR{5@@Q$4pRJV`iHF0f^bPVmWY@eMySDfsgNe zQI*7{ZRv4QS14tfB3Wj)fCL>(%+iCTFJ$tBi2;MZfdkW_R9{l8!}7ZvX$C!s>ssXX zYsIIBe1t>E7N3)v3%5*ZC>bb}X0Q}On5EDQsLTJ<57Qx&N>9P%^z>G~%+Puq)1!Ve zECta@eBnqeBk(UHGHbVK^(?@r4+J8HXKi)7>{1fSojIgaI^G#j&U-y zm99X^=*FWWC}!ninb;@=yEpgdTae!^80$FFHay15!Qw0b$-t8|o1T}~CIrQ%t;03c zSC*UZZ6@T6=|VJ4t@;3==(HDT9sNVG=yIQk9l!ewrlNwAL{clL-7a9Upe}+yo~pIE z>a}T*BKx=EK$4Y+<(4yu4zsEBn=#@@kyY|Qv5)mdFMoTXp4=U%yh!91&TuwjT3qplL>fRSmCZ9_W z!4Y2!2yj8063&0QJmaTAUk|kAeUX|cOfWWST*SHB+hEPF#`X%EarH~{7;v%bvoP80 zL6>X%%Eb{f@&$giV~~6#&tW@4($pYn7EvZk-W&f;oj%q*3BK8@g@Z(pD876U#PA8{ zRZoTc)5%u1FTDHxJy$VOa4`IQUB%6qf*HY^86W-poe zaz;G8L}v`sj6x8;^g(dGR5Le$#y=_```foa6P3y&yO_9kzF69*HgfX{f8!%0s^A-4Utfl}$W)5Qu0jZQT4qQY z_+qak6|7BT(xHP68R_`H__$z2t!(VUvaJcm1x{Nh`IL#fJqUm!>$mvIFhG1Y3gZX1 z`?}EjV|*qnJT(7Wc@+cfb%e=N^uwXt_v>|+Y$^mfE8_`;;VXuCXow{ZYI@(wO0S#e zKVYhfL|4}ntn9o8hz{38=cXEF1&78jQpLfYFb zLlW`)of1EM*;?ov^Bs$Z+!X$$@o2$WAilr1M33v`|#k!bi z5vngA)qwzc=7JT)A2yookH0DVm?=2?uMyb3VMj+e>C%RfM*-qJux!ug>)sfvkZ^q^ z;x9*kWsYov|Cj`YqIMPF1x-&(WQm}dO;_bYm6espO4M0#4e>;Udmb&O4KM)lfw)RF z>nDxZ2Vz;g?jh>UE|n!ZEi<{m(qx9(+JHf>KK_9{q?4(IiunUTtxp-w*0Ekkw3$Eg2bO z>t)uQ%W7zq16%y7YN<$Llz5qG-dN+`yoLkg7VO$!??Gln5!1-aHIp*K%LXP}tzf_wH#{tw- zcGz(+fw`!e_u=-$y7)F@&bX<=7YPQaGC7L*GUirAWo1q@k{BxBfnN3MFB5(B4sys> z9_-UbnCf14XOyOL7OHyCEOpBUMo!}1FPTJ}%`HL}c!{8(m9Ae?wHB)IiQ;%1bc@Ln zTjoZxI~%VXO$LD|6hyMpe7p9pM!k+_40>k9`>i+oRUIz~`)bU_wUE1*Bt=d(eytC( z4*%Z7$fd>v2`XKydCGye4S8uk9cAu>vM`ugW!c>UgoXi=T1E-DC5Fz@cYSkQmR}i> zTsHuXFi&y_kTS|=GcV=X4drQZ9pxeHb6TD%<35>DEMD$OB1$f=^+&I- zhV#eavwqoX>1>oMV;vu2Vv+yd@yp}-G!oD(I84Z*UJ-vm=56D--pU5&)r?4HiX4wg zF~WTjYIrXG7-GZ%Tg6Z_Lc^%zb?1m1l~}*%sfqnm<81v!u&f&{A@Wm|43jk19)F_^ zK*TCfMX^*o*xbV)oft|wjoYS~xoOCT zMjjl_v5aGIg#W=jCc3lr@N803R`W~qDmgCC+{gWfZ1ZglzRL_KJ*&ykZx?Hx>wCE5 ztU)k?QgUyIG|JH*!|>Qwm{bKTRS)+!Fm;pETz|}2J2X^wp3@QGv%HO&KL-(H!fUu~ zm;Oom(*cuI{y_)Z+$-NHPE$Zy6+A8HGZr)CKVt@5iR}8wu3zi<^m0G15#pNkyxF4` z4n$XG_(M=bWb?3=Gw$Lpi5plWeh*9Jg)D2@$0jy$~uv!PB?OCn7r6Pbnybu=1GNOm@6 zNy||^TW6WAWW>T6!jIp9Iq}{TiFY~0H2u7?*Lwh|_q*rYEVd@4)euEnkUhNY4HD7* zrrCG({@`i$kwO%Fm**wu8N8ac$~#N2xRbG_LQY1G{kuD$WYm&!0u)^7M0EM-V$gBk zFNtU>c;ztXD(ZaNBwoG_DrFGKwZ1<>BcXp&D@c&e zr(2OGr_U-vj2OJ{tXn2Wl^Iegt3_+EQgQmkHXhh72#U2Bb&)3LvuDWiTjMm6jW{pN2k8kq^n-`!{it;+Lo zy||EdOH2odFEarNWnt!M1hcjw%OH{lVe>&J;{sbh=_U%eL12jYLd#?8={iyN z5(U)r4#S?kk>Px%W!iN|^&rFLK!5nB}w`{*n-*5PM=qt<8zE$%cE9bxkc#R>o zK0kghy8d`R^}RYa_A{#RT@ZN+Au^J!v}nI0pbs_l#K`rafl6DnJTph-4K=1rcfff|V&v85)s7OT?-4^pJ#l(&H{e{SVwvs^JX^xbt z8_ya>@2BNV9vO|s<}B~yj-~~1F*n(|ox(jLam{4xhya?KtdI*VH$Ktv4D}4ut#Y`R z5fMoXQ0-#Iso z40!j@%_pqi-XgZ)4p1qMCsu_Wuc#9OXn;tPrQxwrh;`V?SORL+Z%J7NSJdfI>P1+R z%>hk-YXg-9M3vbF!fZ}Gc*k4Wa3KYtQTj+gMz=DOeadp7VYzg_R}eyuvU!Y>r4fuz zyvy97JJIth0s1lVSi#|E`UC-^czSapjnuE1_7S@KW$}BTDXHSs8%c)imuQz1!qF+(??#xCbB&~5sN;a1HbPSIVyxT*`Ri*xhDYp zPqbT38emo-JwF^B@&92&kC%b3fKMz`<2eusfgg7oYpQu%h8E-h@}@sa05hX~K{#iL z7oh$x5tIIx8p(ABg2DeVx4f|b`SBxj3V9*&aDQhEpt9Id_8D=Y=lTfbnSy*}OfC!| zmfTIiGoTElAz<*xY19L!OlZMvhoKSZ1sY9P8H01a{=MUo7*$Sk!E>UG0OF%@-VAhb zkN4^q&8K}QOaQrF-hUmsQx<)7vf0Fd-1SIc2rN}`H}?7OkHYPMwhZ)jS6@27p~U0C z+D7f)p8n-*=4_u#Ei+Fa8&-BFzc?N_1|4t!bEVUG)QLS#<&|ADYRxHoiR?pWJAr-w zN>I`rxEe;xtFbYdNC6bsN)&DPT>z+Sc=MeMvI3?CHI`cvInUWvb>nDq>h=XQwt_Ht z4!@n`vH+~@F*n!LdH~{Kqg9uH0=n2egbG;NuqRz*&~Mh@Vxlg z68PBF#np%|tzXc8adeBA#QdOjo)NQDYZBhDDsGeoRdbzwQTC&tF%yystA zuPbM*o3O2Pr?=)BtLb%HwR%M_{o@>%FSm=!70!3C!wiU{aqh!CPbriw)4c9o)hqP8 z0RqK8g3P2!B%Q)3pXRdiO1MW9 z5Nc3J(fQsipy&DzJf{l|l%yi$bvunH({AF{{qkifg@*OZ;Mdlq5ieQie{pVXdE31P z9Sam%*06%ifM-_vu!7qb;j}l?uj9T)`6%3LfW+N<@J+V9_3h6$i7^P4d3ykTis7FLu5GLwK%ruzSzq6>LZ?chxO6()9egUU)ZQDsEZTPsL*J^QG%x1(*drbQX_<;uU>coxIxQwUi~j2zO4>CHpUY* zmSYSarYmpGij`&u22XO$syf<5G`1a-<+$WC(><@@MF~bek_nRNlm+Zm`S7wU6_v|syFIW7C+4j9L4Q$5PwqsY!-9X89j>jGrtAlxa$Ryu>xED?>XOw-LERsvkM7vJ3T*)Qik+yL!Hs4#v-*!Tp< z^F=F>==#IF7b$uD0p(b2mn$e%EsJ(A20K8HvuN|8GKI_c+e_;u(P@$RH{#&7r5GLN^(j1iR-d^&C+vg2WFm6vK*A}AR?UCQQ`ojY zmT$_jlCe&9zxcJ?yYw9-dNxp6y2p8 z-k$_wxWoelC_$O^&nm5`UjDVvg#!Z(4;pZDzPBT!*pUP%{$fS`7%1g`ZLMEHz|v7u z{i>b@Iqq-PVHXt?LkY8`1BMJ*!rZxl1Cza#;7U4OLFGrQd3P3Uo@%q7pKOeyNAqedraV%?mb)aPgn2UsTOh{)D5Q44?JAA?*$ zvs08=Y&&UzQGK3aV49%>xrjt}&5O~KKLZ7``}?7mt>~(+y4Rf7T$|dfXgzHnK*^a{ z5x#IJvlRde-qmXwzB4NZ)`GLlK;kh$2#>|=6 z(dSSthac?;#5DJOs=62mNoh`LtOm6a{TwA^Q}9SXg4xheoWpn^W(#jnlH3y*P%5ga zcxU!1AK*x6tRLr@2XhQIUdQEH!l%9JcAjCSueDaVepM~{*e%W8z&7y0%rQQ^^!&Q1Zl zwvkYA?&FrS-k%0FfcUjl!c!7}`W-2(x3+>Gi*5{aIH6F>RswIzKQ`1_td5xg3~Kfi zB_77PcscxLKJO8N((?ITO}pRT`!6-s^R_1QOp)GN*1Sv#-MKJlxNEA5@}AciaO2hJ z|JGhQJjM@biI!wA6%R(3Z?1xwy7F!F-gs_qjm^APT9v;D5lT zczN@)B4nU&YMErl)qG>m2@%CS!{m*s;jOXty?T_mem}EdqaG~_RAt({OSUHo3hC$! zdK%5$c`3ceC834{pK+mz_?S+>OiuBK1~8V?(X^c$A%3OcuUDAeek`t?)b_@mn~l^z z*gn+Fk@n3B=E5}1p*9-~k@-le)A2FCHl6JGh)udyd5i=G^NqiHU1>?@y z{+5tr@Ae0awRH*mwQ(C{l8ZkH8SWnS`g=z6I=R0zx0~R)j8PKljL}@dv=la>30Z^N zEIme;xfj&NZ`#MYg1?VbR|nq&Peq{Rne&{%;vjYtzdcFSffWQ z$_HA+M?5LdjMcT`t8CCV`YzWb{%em3ms*`c?4ZtNONk7tOoPEkihhu8>r~*-JB@zW z!Eo$Vu+-F<3O0!MCUhIbI}lSpAs7JGf@-vC4wcMuhARzF+Z29GRRitR*ie>xu*q(K zwLS(xC=G%DaUWZ_5-Z(9)G@+(r^b;i*T1UKCS0bHBoK+XN8bjz0Z@fC=*Cn=`9>|iVH8UMl-GG%%iiS*UT z`J`W8?I2vuu=hR?e;XL-C(TQl@))a zmdACr?z0s9PiySYfHb34N<_xI?x{mu^sAXRF?ZMdQur9uF*$3qI5KFx3`oWA)a$Y| z;=sDf$GP^s?A$}t>KxRu=>3chX?JfexAzfBhj3RxjNx>fGJBd*Q!-YLM9636IqlD7 z!0Sk3bn6DR{H?HjKAS(o-9fR%tQ%sLI0BiLvqk=4J7hMHxZiyoj4o{SZ%C-V-=1|i zOU2RZh9+qAUM8HI;fX4Cc=_}wZsDbuauh@{u)`@BG;@YtB1?_p>6X#LBcJO0_-3R) zJ37gr*=N2NmC^m%qBXzko;c+Ts zt)G`(p!k~Nt9Fj%IihIK)S<7bcoWfEy+WCrEG^1WhS92H#`gXqw|FMQ7e~SvD1m8q zTle%;rA$m+nkf|pJL<7hYLaVl@bDD7VC$Db&GhmH+l9B2++X{riZ*_w*uCA9+S)L= zI+E|MyH&f>@<{>bc=oVEIhs|=LehT>^Hvz+!NgL82*Kim7j3ZX|MX9 zB6K#d^Y~W8+)77U1o#^y$o6xJ_cT5HA?a`6I{t;iX&oDN!8dZX^X%%n?cy^_uq=KB z^J5dw(NFRh$8Hyn*WBQR9m_)U)3*jGLr9#5lQmQVueXg-Oh1mjA1}sw?b1v{^*QHg z_Qm_3%>@ zpfLjkxMXiOqDA2l0iQu^^ZySYmc+F7ybNzI%+XQZopKD)Vx`CGf=Kqb3hCng*d5>D zc6j!m=WFnedR=X(Y~?%w>(3x!Seo=K^f@#K~7a+jRyasr)E5Un8PIvuOyTWq~w zl8iB{5#Rp0S895)(c0qH*5NflHzu*W!q|V`$ibM*%H83o)MrqGwzB9d>Nhw+`h5Nd zM7^}fT+{ig&6v-*nJM`iWl6##qAr$SND2*phx4vyTLuj;@s*}xjaL;V?y?p|@A5S_ z3=ET$Hwv*Yn066V0ojaMqHyrPY*82M!0*g*kk9(0CFx&eAn?DNQY(5)q)<+GzB2;@ z%SikT1Ohk{ueUM?A`>>&r03`(KrMQtgWh7x<8xsKd2~o=|8DS@q3$i`!;l&m^dG-bV{jsE}D;5IfbEQcQZdZ6bKlKii}R= zfuXOP^QwjRd+={SmN=CY@yAwE6Ea)RdtAFJfZ7~tWc&`%Mm^OR`GJ{dcGc|Q0+ zKd^gJ!`wl#kCBI_Ujw7Y9Xx^rffjnk#(!Uu_?T&G2Y@Agtrz16@U3NWEm zTzMqYq86#U3I0^H8P}KIGV7fP^mj8bDnc795O1_m4=$_m)iE}UQFI4*64>IOa^A}g zNyY!ytxcy9B_-KQHK&<7f4BHn1TLg1obB?M;jc%?1mFMb*OS)7(;Z}9HyK*8o9Fuf z2H-PUyv?poLqC1K_gc%7&F3!ej<@$pleLSXQmTOEiN_PDeCyeI-AF2B8k8guHW3v> zFzm)#S`AG6GA3;yRdWFBn9#FO`FY#Sn%OOfUb9X~6n$37X+d8T(10Q>jpqR=G6sLU zq@YuoI!^d88>@F|yM3=yRu{p3n`N&J1<8|#wSly^%>V1ukZ`~MzQzPTC7a^>f=_Ge z?BEREZ|JvIcr{J^vMUQR78|Xd^46u3lLUvD0`-J!8M*$8XYXwB>P3E4*`2i(H0el3 zX3_c9pDFwIZ>b}1W0-2+M@;?^M3@wq%Cl9P@6Ozxs8$%)jQC-LOTA^o-F2<>P}nD} zv>uN^*Xu46m1smZ{tBfvT0a>*vd*eA`~?B<07R+*%b&2zxGRbNrg?(>G2xoh0otO7 z*Qx(;JMN0h+MnnsJ_oFSg4uPjs+~FYb!t8o%oZee{;IPAlXGhpE|qFPrNxT z5xiD>Rf%0_lfE!kZe!E9H`KH~Vq-apI5IaYF9%ra&Fn%L9+h93Pa>mB`e!&bJGF(bDPCul*4V zcE_8rt<632dx|mruCVlLF8pn`x4AzMb2Zvz^I(Y_^#>v?s*wuc)W>iV8@yM!=`L3@OY=SIXmH2+AqpMmj((!%ovdmZo{Zy_knd_!+ z4<=TCcf3dQTR+O5yKr0OulnA?&A+y3NsJXC6*EK)>)gPbG$%N!ur}S)a&2u1Vu(@7 z+}D%E6vi)e(Zd60UacbQ_TXEQ3?dH=;m0b7)2cJR4UX!Pls0xW;EH(!On(yvB0l0~ zByqF&wzvw%>CdCSN@~iMGNEvkfrfB}PO_DgSAK`O2-;sm?Sr~fbM)2Aq*jHFhmb<4 zL8u6f)T`4y`R%`wh*c4Baj01b1gZh0viOU4U4IquKF1i(Sx(KSZn3B;Rc3$*fMVeBleh^iE90U2udp#hEz+Jq^@!RpM`THA|52rUTZ*Apx zQk1(^e?~r*CJJ7c$lsAA)>}VTBOhU(wmZPdo(gj{Ve0a^_Dg!+~qWvh*#iFZSBm8YX6j1!WD~wIFFB~FIJT%J1MZR1mRTT`#%DXTsAw3bt(0T9p z9tiXZ?XS(L0hD7qw5Jv4%egPQX|&S9r85DN-I*s-#R?*{y+=~~3UQ?3}wkGtEc z6l5ka|GUeAP8~;11aabj4~(Rw>5*-(6cN+Eo|VCU6&Z5|{f$KEIG0g=JIT zhXt;=igkF48Kc2c7C;@?ey+h!7#bz)T#*5y%QVwko1IuPnBeORXZViVzXs|ggZ0&J4V;oc;F}f{!_^+uzgk4K6Gawh08uAd|2KuJ=kK)>vyGZjjX$kC^t&0Cdhu1 z;VWMxub7ja!o16u6>@6-8nbx~nU7bTSF@=Pal?7T5x`rieH`|!vEbCHNaFb>7!t|n zxczzPz@OFdC<+W-@wSIr%PYIYvx%t=SLYauawvU-2`n)`6?79e&G4tJ+S-2L2mkNz zoMU*%ywkK`n&-I)e6lt;1=7iE_cIe|U?W>}V0U`4ulWjPyBSa&B)h7*_3_7n7dRbx z=~Vm{aV-@C4{qh(*svF5pYD#amrT4wp%CJdPjpR`1WT`_7?B%9+rED8!W9xcE*2zE zgM4IAce>Oab0W`i`aUYpI$oUL>j*iKX(o`jnKc6GUTx_=J;6o1y6&$Z%MuBG6kQ}Gu+v>n z+M;DrxeMM#y^2W-=TF>#kV$lJw!mR)Uc`5zY3MOYGjR?;i-F>!WmD|!&Aph>+4clj zY3&Y!x_G4n!RdZFA65fThlSSPYkwKK*61O2j)8DHLg}@k(oTzxy-T;P(X$UnHi?{Y zVP*5y1TLvG$Oa;OCbt^G>mH_Mf2Vuuueo`@#+GrCLV!ou74w+%m-KzYZk zHp_OU8B)2IAzJ)x&1hXpsIS(T25E4>;B{_cYA0p!wU&!O8Ib=2*$<|(Yczx`kDQ-7 z++-i4W*bjWq>pF_DfhMw9PY)U^9NQa3l}Gzb@C;#D32Q%^dBE)=lwrWuSKLvsYc-r zcD)0;n%BI*(H41lm^(MYtotV_ z^7(j^35Ki97}8j=iF4vncl>qirKiVD1J>2vW1AacLPcw|5}8-OAzGV zlnl&pp4TAWmXGx@+S^Q_Er!XLb}D6Bh1PB;!{fP9Ch9dVwm}9zaTnmou;-gIEF(Y1 z;RHFlEnh-+t2eFfMm*hN-Zx>9H0jc?q7ma+ARUJW9>Qgmtf9!fCF69aFL$_bBfg3g z=EGWL9TCWCvqD#u2U&rNU>{`UD2!2sf?jW*rqHI}Q0BU`nyQKrrEkkkvfQ;|Medb% zskdIQ%->MK>4AkAmmQHzJaB^DzhKG67CuJI{qFU_ck`U4Dj?RGC`-?5aRpC{i!-ooEH!; zHC=VVJ^|{>TFTpwrykQ-q)lw;A!y$^k=^OER=b#*$TcU8^9g8P=|qV_FL+x89r z%8vnA)yzOgH$=2PrSF;4PXFwhthoqbZ*hVNfQ>f&h{XJT0t)?7czVYod5{>3S9yS( zFB9yngC-u({TP8#mf7#a5<~umpcsILnB^0K!?T41yOe~eKF73!m;t=2H&JA+ZFjUp zh+<^=P{#t%)5KNySiMmiwYjLf3taxDGI|T_nLmI8xq^mSc|RT9 zBH9sQBNODkwZl?p;S@S1YAy*o@iR+F!8vFKKjAJ{Lk;yL?=1liV& z`uQh)FkculBAOzNQ|Q=uiTlO=Or`h(IV|ksK8OTz=q=hgmMb-}H{;IhDD^p9hG8w` z&P}oy=W71NBlv&EI4D(l4$Ql}PrNvDaGcFmS8cli@6PC$qz)S1uyhP&V>%tsQLw+% zO_6*`fH9mwboRD*dK_?h#MVdzA!~bwf&haIOX3M!yJ}RlGZT^}8kSU~8RgSbO98)bF0nRm zzGnSdeLUN9FZX@?BN-{O{gJH_XenK&mG8W=#(G|Z+qvw9-kidVAmwpbN8VC0^D<%STJo3or}To`Lum$X+tlgYK`*_Fi)Y!X0KoXU8A zLiUpt0NN`62uJ zV%=tv686sR|36k;05Q61LfB`HzB)d(Q3w@vpsMo*>JU-%f+(lb=+g56{reSjSIC!-Q@0k)W4HX(fNOZhIgl{?OOv&-h%vB3|$FZMdq;5$Hq*t z(q$mx4euPcLTleSVYg~9WCHxM`>m79EdaT{%ZeZBs ztJkzk%Yu45A1^m-v%1w$^Sk9@+y80i%>SY8{yu({XhE(@4ARt9_BG0ynWBis8p%48 zHCx6q#+uY9OR{IDK|+PGgh7#gr?G}Gma%5+!rbTM`rePn{dnB}!1q_n_&A@lyw5qW z=X08K(1GBP^my^a-I?U*lhGe#dH$)XUh*tofwQ&72Tr&xsRy@W5atRc?rEuVJ6}hp za%+C_#nYF^rr>M{J~%2t94Wt|<40Jf*rUJb<1K&0mrEn`%V20*SumybUdSc}lzAmY z7e#9)D$fp;6BcUzl+uEemgBUu1B68Ge~Oaj_Z5_-&+E!lfBU(E8{~$OeII#?h(*d0 zlCH5-xq^|OPFMPQ{?^Xg0)M}$H~#uY*;p(3bM+ln=ZXwGaby*XmQZw3(kt^>>ls2{z3SVtiWwd0lOzR+b}fu&~dmpU1@KCM=o zqJ;WO817A_VQMXXTH`LsMsgS!(1t^`qV1(^nlHYmo1XWk@olf;9$l-l9Z;Qb{eY}A z?x@=BEb9DZlIJ1BQ|0X*E5FLobdSy}k;KX)$~zpKwlrYbIxe-onn2r*N9Q5;e6>)? zoy-1le-83=1U>t|L)a!w8UiH62nN9gWzXb#tRC8XxZ!q)9#)Nqm_2@KBU%sfbYM+F zi}N2}1U8v&zUGj#?kj$2wDpa~R6m0_Q8RHD&D{s)IESc((h)nZ(v=;l0<<>He)pEa zEq?7i<)Jc%OFa2JB1ZJ%r6$Ez3BuxOek-y0AmKD%v!)~OIF>T?d(Tr>q_bkryBP`h zRvk^eYQ!OKDmv_pK=nG+)IY&Z$Npf;*`G<+@-^2=)qZkbVn*)Y#@6RUzqRn*1o(~55F58qngj-w#2WP%W1d@*5UHfs#x^8_(eg3|f^}KFUZVBb}pYw*T zp|+n>N~1iTj2*B&;=^HahGW||M(JD9+n!5UOwXx5car)(9%i)SU91pp$b-Qgl^ixq z96a(@*;qyHDa2VyMf{ul`@17aw>FDce!D+8M zXD0GAu>ZEw+ip(S&ITbg)-?Dca=imOkRzDOifr6{Ol3RIS z6WeL)KdIbW=ZYM-D>GtOb)&pu@WuBTx7zKRST16V4+3Os3pruj_;LF;9BIC9%SbOw zH*LT@M4z;ntmdr2n;v4RMS%w$p+xEO!^R!$71aipK6p!ZRf2(Uq5=VbGS0+5AoBX= zoFZ-72VSmo5PO|d89i7X!+GbSY=r2+b6e`OXpT;}^;8F0S7s=sNLbb9l4-Mf=%zC+ z9qt$LmimVwwveD}>{~kQtO#jnuC`82T^-I8QtGO(jy+ep>Rm)HWiWkt04c`aAQW#w z8l91Hx0#mb$Iq-qpPHh4KclT*KM#Z|ADD%BLAI#e|Dr#D-o(Fy9NZNMZwxnH34FoK z*#gpdVj_kOV_O=S))&KfGbF_P*MnRrrLJjm=sTetVibvh?NH;SPZe{*2?`#$p+c;X zk)_yiP|x|xI1@w!W*jW520DM6PFq<6_Agf6ZCWm^Dc8U^i)2|ij^|N$74fPx&b-Q- zDsO1m?>oF2ILs#cV1+tHta{C;@r=>t*VjNAQ2(@~-wdLRU?*jTBVbpQJCJnW?QymM zK@-Qq7bqYS;t)MnlM*ZPNiE_$rbB+F2_#)hcw9i(7E1cI;AT?Z?;m1dY66K{k%;TG zI~{v4d?XL|DfRpL$B?1D*_u3{X0Y42`pU?xc%l@fh`C=F`!#1uE&^JN27;_D^Pg+P zL9p+mzJY^_c9e z3)SERg#z3cKPFQ!Ekg33bwSrqT^w-VQ0M{{6)t&9CJE45QcgoxjY=I8c9b3@OD6R4 zZgi=zHOCs7Qy&&JI+0cyddghTymh_rwc#TYA?NOAp(qLmT?|%kzh-9kRqitTQI}FJ zy|ho_UYEPcB_J}}0>xD)Lu3Xl7#82n4HDmjPvO})ixri8kop?A?*&PAgs-rC?KoT3 z%%9Yhqf5vHg|UJ~q8yE0$A*jn;G2Cv_x+2XB>F*UKj)y661q)%b(D3k7gWb^Gxxq>ka0b6x&_kv@|mc_RdRF_cugQ{S;=1*o0s%Xc2BP!B%Iivg%|Si^A+(~8c*Ee;-j zHP{Eq=;f70IJUay35dkb={$!V4;T=_ZU*7Mxrbr+8-#_qha8-J95(vhd(oi`bKT?@ z3gfmOyidL!%C%7Xvp}5NOdpi4oCAbiG|26?>Z-V=OLM5!Ah&G54 zTZK*T3TP$gjMxE|AQ{Sr=N_v9pQ*B^ZHTqBx!CF6 zh2HgENs5_+DN=>$?b~O3DP!jxSff>orUd#t@Mr$k+C~5-mUWWX`N1FiBhvhCb?LH`033d(VHW8N_mIsw&xlg;BAp6jhe0qP=PDzo2%!{ z{^c+?l#Z_PSGf5sA@RZS^IpFWiMdI9`E|G)^!gH1_CrX}LiT#%Ge+Rgft=4p3F{N-|{8 zDt=7XfKuj8c2cB3KU2>^Yomz847I1@$J)BCaWjSYOx)w-_mwg{D#%$>@X->c8H~4C z8T47MItW@{33(7BJ7jmMgk9&zUD{4aiDx9iZETAPuZPxMF7YBUDgj;nHwn26lP$vAs6{ zmB}twSq|8Q2IHsS+DIMGTHY=>*kGlM4#QSgI|KIb3Ami|`W09NkZB5wZT=W|vv9Gp z5zi#NKyxX0c_1x}^z4Zi)0VonMP$uu@^r-Cv|z1R?NBuMCdGXcA|Ak@F$_%`qwgC6 z9@yCo@)c1-QCxK|&pyL?c9WIQ^)WVabU29ZW*%kFMG;^A=O21N^WEgP3@brJiD^T* z?kf0K5frSfZcC%VKp9h$@H(6vEG`j~3*>wb$=y%vXpf*e^m=s>Ex-K=0)@>k(u^x4 zwZ#~jqny7UzLoH^+|AbS-fpIPG$cRbs{8N~B+~5`#Z~;NS@G^S#EGxIl!DebtTdDQ z_ch0xVqTqu*)FgWAL)uyd%)*|KDl{Z+KGC?+QeU=qV8>K#|BGswnM0&m4Zy1B0v!( zakCf|oKzn|(|kqVe(YP(NLOkHXVF9Q!DpmdDk_?MB7`785 z>?mH;^5^#NGWHs5#m+{2!HcBSg_^~`IQ5K|zD%CH=D-;}wzq`6jXNF>P1*XVLEiy@ z0n7sz&qkQRX)9+1PSkBZ9Wwv^qC6I>Ejj!&;S$Lu=ZD0yR2V)4;#ur|gs@3+)4>AN zw@*g>#Efq+)_na z&gR0oG{VyWpc~A!lO$LGMGyuI+g7%KEfv2@v%nePQE=|}x8Af}ia#5Z@zt-4pB; z+)jP#LZ-zHfD6$1_%X1{XCa&}+}xrZKXELiZfj-I;M=#MXSK1u=msz%-^ZP;uBwmk z1>?qq^GuMz31(eTvaDV5G&IL$;fVpFvM9ydmX-=4=#-fk#;9`2WESaM=ojoLPVJ+N zzhd^+>@?2%8J9`Uye~rh24Xazzout@I)tqS1d)U)r@W3H3CdxUr~JL>JfWHQz+!hR z$6W_Cz<+$Ev7b!UA_1`=S#PqD;|)hE3X^|&1pAyQ87I^mT0oPR5LI>0=q7J8U5fZ} zPz&Z<=dPLvQhwS~7nY?rQS)#&7`b~BlvTNQ;|?_mUzO|Lrd@vUa5nMXA1VO(_4B&G z%9=Z(w*co895f4Ne3{^|5bUID(bGIbdP}ay+JN#4pC6KV&~XogYA&N^hPh(U#{KLW zCSk(!-!j@v`h5V-zkp@JE?)mMJC7)0e|?(8+{P#7)JEr1tLnwt zeVm9OeBjYE2Lqv8l}WSLx&$Cp4x1bWyxVUUNn1}x`I8*qp)WYUWFQ~BljN%{)P*)v z<6^tW*FhiJpSiBS80O3$DV(|Y8OOJFB;C)P=;{-wGWMV_E{D3juBvC>7mA~`5k{MxD2eLX*7&4Dw_iRY z47@qdcdKQqPJM0rq4xfe_x}63aDe7g@E~rN&KEoHqhK(cC{_@R-!TcEhGue}eB- zXBdP&Kvr&NV6BWin2mZ#nl1VmP<0F)Bq3w!C+gN#$LoS;vU1Yb9A)3t2rK%lE1M2; zkbA1PC&D-~mizT(?}T;%L|>>5&*}!&Snx=FOxA@5CBB*YUvieUNBsIxGCJn_Uf)I; z19bNmkk>CV8jf3VX~H;eeDrV+57hqm_~O>dhKyM9SEOsDm4SKrZxd+Yt!E9H}0pZfXY zP5&ty_0Fs2^M-7n{F|OoHLR;1W43artz8h=%!aX<7-qaXg_O{YJi`X#Q(KTn8JFcp ziR|yZ3yo!$=h@p??_~?h4nVDejZ|-^{O{v0Zw4_@YhjlWJb^!J0_e%E>W>vw6?}zK z#6LsK-BQ)2x%_-SG4oO5>qPiWs?Vn|9$76drFDJWa@8h_`t@0$l;AcrGy6En3L^j`#KBhK z!}uGalk(5}4`LvNe;kIZA*g#b%`A-~l4R14vrz0RA~=-C_Kri9z!W zI)Kx|mAB$gc4ShFTj9g{6CT8HkqMP_f#fI8#51$zk8jiWYUuIO;)5yohp+Txm@Gh5 zzVHp8#VEyJkxSg&YIpG`m{~&5j=yRM0ilL#D=WQ(2!t`@KQ+jaYc&0H z6*NMa24Nwj6SBTip)C({vsO)NiWM>3yLB-_tLqBlNlzf*$BWG5aG-w4)sqBh2XWcI5UYIl4aYOp?_us{wVU zhi?RW*1NSt8yYe>S6gzobZ*vwNvy>4xWQ-hGh&M2=7w4byW(?7HeYspJm&wdUq?0h ztJ?x^1>_ZL1=~sT+)XSS!)xr2y#Vi1QqprgjB8jViYF|%+2GY^BeL*5fyL?_vH zIdWSGaU|R6)6G}EndzHc31#_NCl}T&R^$L>YH;dX&o|i;*rQdjPpV{Je_#4&Yjf*O z8Kl~oK5k|O#$=f>exDX!lXJ}mET#S8dB3oc5G+VCOvTh+X$ao4(5>BB1H!i^Gwev# zqz}f)78ZLXJpA&;d3YUIfPM;yBz`vHcgoF8`MlI-)?=DzSn`tT9AJCBpx6xITl<{x zNPc)V4!2Lc|G50a{Ii~*^?ImJv0O@FdcP&Pf297!W+!_EG=V}BBR{x#U=T#2mQ@2k z%-4E4xADmr+YkFvgJy@ey4oX%qfUo@TFV|Z@=hoo6XM_HGekeUen`qPi t5%TuMTpYfozivRT!Iw4)68MdOwxt*N z_Q>_Mj5u8BF!46{0l`X4Nem9IA`bo51QGm<>Ljb<3I~VT3Hy4~>-fza4oj`2vd~4aIRwhO{YCv-EOKF0P9F1HU`_cjGBAb-U`*sj-*ZD=gC1`%-$559VYiK^ctG&I_TZHckeU2s9LEwZ$9>E z?qd7Ip3l#6<$V?>=W4f??tU?}>3wXL%w%AS&vHHgi&5`6gV7VC&kzbkC4RT9S5-TO zMd>t3=~UdPs$Os2ycsHMy?>L;5*Jwmp`cdMGJI>mR!JI~Y(59|U-^!^a7z$}E1j0bZ|EiY zetxl?Eakgv@LcrsbiG^-&rnn5qXutWy1hLeiS*o_b{S+UVLdGG#GH8bu9q}mu7%>r z$nPxaY9*Rw=&<#{p%zt@d5v+DXU-{j+OhR+NLgUtyaYcdvcYHj~#2jZ!0+X-n zIg)KjwWg^8iE>W!P&(IO45^541&!o3E@@bNXjBwh)uf@%v=$@b*01l0e1}b^dpC5a z+h4QOKMDEJOh~?To-zr~f4b@Bwl~Fg%*-1neEn_Od$$DFR{UvrCW3eEyaz{#UTw|c z&7W;Eqd0*B-qXp}``@haRHO?&heG3xo`+rOba+lo#m(n41v(sIggwhbvzfD>Kh_<# z+!W*8PnpH(u$2}U|7hJb^++WRxINy;ez>(VW#aMvvt?o+k}D|3FdB1U{D^j-e2}>@ z)qT=nl3PTt`i%u9{}x%}yiXII$b`1p6W1sdi>YFToS6o$^nj zj1&BcLCD^<*Lc`sdDwiRyl1bS&g1By=Ur4%lfFGuZEI@laevt>a*bpCevbahPc%-f zm6h&jrD%MnH}lWmRfkkBGj5v2^7RN86A$unrOMc^U0iSwx!J3N zKE@I>UhL%GEjAN=xOE+`ceQF9etWSP@PM{SUA0@lP?I*9?g%xD!z|nqQla7#&4p028f|0Sfv4sA_*WaDb?$!@o zG8uYITby4+F8Pe~3c2=Er%d_r%)2E{`<_B^eKgG*_iH##H=rD4&FAS?2nJxbQ-zgto}vnco~^5o7~`)=3hfqlNgy=oS$nu9~j zmF3NJY4f>m)tEHV^ghM&FHfx58hhKgyVL9kJE`OapW()vgT|6FzAHf%bxw6gPW!jd z7JN^Yd@>m$=Pp6m4JE1w+xTCt6WqjCjp__>pC>)upRMIN-5gOV{_VVxW|bZ%azFKF ze|EA#0tu7Y+Q9$zoVj6(g2R4>Q{YeB;>#X{tj)Rr=B6X@)3ZsV3|wX2_84NmN)IFY zXC2oQdXCfbL1^!6B?^4Vu0d)y2*p)8tHfsUxfbwUttCR6FBWWN4D1`W3(j@l1^#Xg z2)JoFdhsD(Ih3Gy5a)5YyAbXedA{hW*)sB@vZj-$X|L7y9L%MgnXbH(SP?YU7e!?) zZ=|x3;Tim@pxhir{#W^F>=yF&r?`Rs=hIgEKAtbu-8PSpoDA8uz4vFTt29YXp>euq z)~?|XowV7$rUI)V5Nha_7`J@8LKiV96Yoz_T2ncOc{@FPDc|*zMd)WNK53QEjnS?~ zSxmPrwYgxMi<8eri(Z}TJ)=?1v~Dfh9{;K*VHNy@TUNgsbMCozB`6LxLa`rIi@{w} z1#2i$OHhkPCtEFNc`kgPgx~OWjJBGtw6Hw;tyOEPRCF@HQ1*)hoBTBzXEjG@^tg(M zP+6$!PaTx$C7GeP^&jKP9GjnfN=y2$oa?{0p|H0&6<`$EaslA*Gt$6?5PeL>y9BNY7y2dUu{tEjhx!%UEUH z9@MqK4>=MzY#MLCyg!Z$n39M4AkAwd<%NhvKqP^DWv`*7l@W|3JavB%%fCa%kovu# zW;NG%Mu5cMEk_MWw={nHMZc3u&1pTc>*jFvq%7iOmwS+jge}f)qxuc*KvHI}SU6%H z@6q7c+G$Wm2+cI<{ZaVGszJzx^J-7ptV1L8K3;`?o2oJE6rwkLde2J2xqg3t(W+1v#aZgznYk4%B{C< zozD;1s+Yh!ah*?zbDNcvfM(9>dC9>}8? z-eLKw{S{=Ups;As@Jrz3;tqYT!ah0M%CUS9Ev_9^YPY zY_}*f6*HQ&UUBV$_t^`7+99)^n2NHtx4$lLpN(fBcR|`sdpdH0KMdm*qx*lP5YX;J zVPDL`Wn8E#iO&kr_PHIgHo)sFl(2S=H1M2v*W_)z+92A+Z+DEbeG<6Jqi_Z3*)3~b zXkaM!nE!^ts3k3LTtZWev{lu9wrsh{f627E(dUNsOu(ZU&w3zSA=He}y+g?6cvQa4 z@e|sddL1{b^_KqE>JB<=C}u3?lYJc^%b54FEh}ehogpncauxLjDMZu zti~Dw#rm}nV}VaWaDu(1D4Qe2m+(5r<{CM${Z@P}2thaEPhU5Al)N@PlO$h4W#TIo za>?TlqUhw3lK*)eqr)T*iFoGVmdI!gUeMwE#}N_F`UC~XRR>(<{;9f9KZ$INHMXCu zwbO46t+YXwR(+eekO&k+B1@UlHipu=4og8oZaZz$B5S&H(B8Tle?cM^Ea=JKM_+>Ne4;prLQ;_2iNVJfhVi~KC+L8ASxTnn%yFLxAD30$G zWH?mn5J8#_f_AIU3GfmNF7JS z80)2l-S%gG!@!^A&o6T4c#j;WEA!p+$=J0UwX5HWn&Ca(EU6miXUGJ3yE}bnn9QVS zSz^e~W8`}*<$4O`p*=6|1BvI*O{{P=DuJbB$8|0=JJrk94eu;`G3QD>EdCMl8KbI` z34$~t^5DB-K;niYe7O|tAy}P~eLd=>*w8P4nEx#!)~^dbcTl-r1AX_3wRklu+UGPegnf#q7?2M-bgC_`D3 zDTqq8MnjBD-m>Zm-AE>P-s~z@^0AYxiINOItnc@<;t?Gy`rkia<}_ouLFT$pS+;#g z>(re-uo5CE5TmDAvgZt&1oti>k!5bc)B3_`+_^%SF-yUWgPU&$CFuVq9I+X+U{=%#wM zKB#Z<1zJW-Tcj9RS`MX}2w;HNb~K=7X}zt)yH#8&+d%I@>)m0dQKxZBc&+0lmRXTG ze?buvfvKEBnXQWG`Isbvi_5)RvKU*%w=bq^%Bk~738WbSy^ToPPv5GYw#x<2r_7)_ zR#_GTlq6}xrwzOwQzpo+ZI6((12Y6XOW$5^e}&4B`hBdE|MM9!@Q`>CGwY-F8JROa z#SAZj%$nb52Pp}^TX}n&<#49W{Ifbch>b3mp|kQiA8p=4B)D&chM{BKp%`mt3g|15 zIYIG=Eu2EpXzTW84q?!-{%zybMery;Y2%7Z64H*Qn;C?SrbY5TfG+6qv_D5ImX{|tp}ix&4^_3!3N zMelT55`1@t&+?SU>q>*WM)0FmE#`?Aw)-(A&HYNadK0W*Za^61Juznxq}CdlFot!j zPg}h(z8^K~=6bgbSFfX2OB(NuXcyr!b!SA1u-|p+YOKKU#=+@H50PW4NoUw}-S`PjtHTqGi&tN$FUWUFdxO_L}2R2<0QiabgpsxcafH9 z3F{K7=xpB+mSl+C3@3Aj@_3)4_fI+6H03P*=`+^;=^)FGNL-|mn$29K$sD9hVA**9 zIZ-sVC0V;`yxu9=e;eUZsX&E0YghH2>#X&11-)^&`jQ=1NSaDhB>E^8a zfsCM;Wc&w8?QtTjpKR5lU#prR*2#A&&;%-LO$=uV%-Y}PJsJ!$uLczO)BdaeKZMQjN{)Q~ zY_VS{J`=|jVpBvNHTGgb9R-O%WmQ=*I8gi~I1Pw1RmBGBpQV#QmfA3Ke#sT|PfSbo zRMo_qTzF8x#TgynvP7YeCT1f{_DvhQHfH9_ zl`LQ+7&ORVP1sUgyXZelBi4-=6!G{FyA&0dM%zlbLzy--cp&WEc|=tsJ{SL_sd?0e z?zP=I7M)V!X_w2EH!+yP=(DIlM>f~PyBZ*>h)iN?Xx;dF zb|%@`IXS4Sy>Yc8zwos5O}HYHh0qVDdxSCi%e$r(hT-4SQR$g$+r^g{M{qfujG@z&@xGenK&%Cmr1~;Xww?9bAS!L9 zaf*1o02>C+l=0(PcK?%p+Wo4?_Dj&sGe(4OhIVjfr^2%{@qMO%vV)@UQdVR;AmH4O z(EH`hwN(&{|M}kZgv%XJ5Nr`6(xWSl$0!PX1AT264UHHlgi3>e{y}d~8~oV0r+1Cy zI+!cnA%blEB+8S!G6_#?|4y(E5Of+1*~rNYiqNoXLTVIZ*r(jwCh5H zHn#xaVNcO_iF7BW7>HT#dEKL(!C@8a9#{FKqQ|KLhUWz~mV&XA^{IL5ixkmXZch{# zGy!T+v6Fj}%rZgdKd;|zDycm$g8jgN&ox0AO+*_p_}F%gsSX*!=c``~1NhbV^KMQ| z2vCLw?cZXyUNQxhL9VV#{GYcs>BHhP1EOrBG^ z-HtyqSjHbn`d*Bgt|%(C(YG#`Bk{o!f02XINMDU{nwnZtT~y)rUU-c7urC%=%GpWT zIDIi!OrbxoX4$)K-;r@Fne@aEdd!lC6Pq?aj&Peocy76A+;-j;_}IScdBv(k9ias0 z^wm2cCuNY(LcMIrC0A);Ogx550!6>R*rWfn|=Tw_i5_L_@nU+dcr-P&> zQ|Bdu9$n@8(HKeN8R|ST>s(i2{JFjllu*E|EWY}z^4zyR+=$pdG9^SOe@T>Y!j;vc-$nHV`^qiQBvK7Lrn`yXu6?k~_B*dAjpbSN z9p51#iTqQ=HpGRY|DZMu4<(31RI<`6{DAPTENl$nGM{iGjC!BH>6=cbEXSLm)ey*g ztZQ{AQhw|Kq*4LhwX1iw@%4|zuKtXxl!U#1#@5kdwboXg&L!-Px!~`v&q$;qV-@`=QZk4(RPRp z^ODvV!ZfjOKlN>2kxOm|^Nz#IWR64G=nzxQ0xNX;obp-_pF?aA_Xxz-;7xn_C=8DU zQYv|Qo(3X3zpOl_u3GX`9bh02dF*FQm-X84in3f(AQdG@3d->Cv7E*eg=yEP5!<1JI5&!sH5q$*l3Y<%^ zA`3g_{`>J;;;V>w@=O{Nce9aXg9n(<9B_Y{9fUMVo_1pNaINK|av-y%5c=o+iz^Qt z`J7Oc4?-7n=s$o(pE-XQqFbeQH-4uD4 z)(7NT2bU)0F_y@!lOZ|HxhtOz@f|O&u;BJK`>i zy%on)?n-+qXa0<$l?we3`N?zhi+qJrVg#2J^4rdEEFPHBsl*VjT_vRW4dEV?$4O=a zeJ3$fm1Dl0Z+SR}njwA;B4xfT$Ruv@266nK(K&65U;wo7NVu^0=l3E!rP{r_K8&i< z_zKHrziXOUl(=q@VQ#fU67v?n{jn+C4Wz*^)|S!&JfxN%y?q>)ukr+xEidWNS#DLCQ-Tx*J{^y9k;zVy0O48stF5lgsM)T-l^2 zD#n2{LtA4qr1rL7^c#BAd{c@Z_Nl3eGvnnZ@IK$ONo2Rw<)y*EzdI_y8!7wL zI_T>**zYkrBd||;Guu@>RuVpaoK7%gh^5RSuy~Sga}hqjfA2BxS*v+5VWpmKaL+ev zSH5`caf1}VH+H){9BEk=-oh%Nf0wCamT13!ZcZT6uzFs1VOI5em%HF?(eGmSS=0CD z9zG2&Zd1-8KJ)M6n%ZuUD{1%6$|Evr?d;F%4ig(}yJMXv2a?<__MH1E&%KL^i3pw_ zK)t^jhEJbez8xuZGUWDNxGmf+?`(;z{u!c3-X*GxeBT?kGx(W3hmVx1h>NhqIcan` zwQ#(G?Xosn$vE)-X7Rxxz$?Yj`+FLfZL*To@D0#5=Im;wEj@^Ki90Lm;}t%YfrPZ3 zpg3qNGdyX;`|%?9-d@w8we;#_dsZO!=p<6>w7cPs~nq|rWQQ#SAh(r8^e|rX9kVb5z>|(b(Z^ibuFeE z&AUpe)}BG<<4(fCiUUKLv@5j*ZV{E6GMm=@PnOLhq-MeaTRV8ANenh3H|e{nhqPTB z)MfSS%;(kn$wJVRoqWp!pFBYe0~ULo#A)M2+@Xobjy!lp)f~1Lf^E0UO*fS_vbku} zI~QQk(lryc*K%|4fk-EilrPcf?Q%zGq-T$Qz};E(y*@raxx%B1Y55PLD)7m6vo-cG zFi{5^!^s$|^*Fdz4(Tzac<9%xO7M_tjL~t?|n5JZ0id z{)6Lk3Tavzn${BNx@b*^Ve_>#ENt`d8D>A&J@&SBqNCDNuAK+~zulkvY?^;XvcS&6phS9{ID zd}D@HwjXx|wFpF_EZ&=d7vST4pV+}(ybl9q`h}$!j_V&B0)c*$3g&VPEphxUi0PPZ zFzo>C@ML{(pBo`{&MdZi(s0&oYqY&(GZj$d5n|^qY#jkCo8&Rc(}@MlnhF-#aj3#+ za*-~9=h7q$kETK!MI&$oE_H-+7~UFKW<`keJ9rOc?I*FmM=3@IZmltY+p z_4?3yp`O>$uT0&fA=_sJ4SF*{WcOU)z-+{sX{EQMK6ak8%xi}+wfod+c2a+Enfgk% zKH)zgWcDq+y$uXkYzkDe9SllE7x8pC zz5iI-ib`jRGRLME`A!5Mw5N`iTv4i&_1(9M5yM3F zw63_Dl+C)-ciyo@*EXd#3x(C&=e1%JLdJs?TeX>k%lYTJz51qQNod6jXFeB21{2d~ zWrV^$L_Wiro<`hUgUM-JwF}VO%T^v}rO_&$gMrS-P;+s`$o>3ooQ~&EF;6zP0#1^| z%TqAl)w#&7TJgR(u&)h8?Liw!Hj17LROs4;?_1M~J>j)J^2e+lWhn>fS1(XHehiym zgwSN7NUN+XZHziz^l^|Py?Z*u_sC|vU>a+LfZcuP*9=QA=t@SHk77wh7MR5XVy?o&H)qoDRL;j5mM?=IYRuSyEl zb5bxy{d&LW9Khd+w5j4h+tNEitfq6*Qd(@ZW-#GmdVhO0(Qt-_STe}^d7MOnbu!Jy z|G0rt!JYUIZ1M}!1zl%szUuJo$G}-6M!$MfwgG`e1Y6$M$ZOWMgOb_>`C=}?e@qahPQ}p4Lr@5GM#V#7 z*b6@r{07|(kw87YFw3X+DMz%DUe%ns;xmgG{nhFw2p ze=nm8`1V&BmrjCW1mKgS%R+V8X!DfGC%J==-i= zN0Ul#EXn>6m5ACqqMcKGBKAU7v*Ztt)e00V@{qMtWABCq4pZ;%ZZi54X*06@uU|S# z^Ew%XRVJufzzEOz9LQCNUbaLiHRCsgIs!Zh`Ui&S>E#K?5UE?rcf&;)j6)iwp`TA0 z(li}eKNQ94o4IkUVvAphrN~PsiA$)Rg0X8w`xn?i{icl4r??jCFGUKy4-&mnM8t;OLuf|xKdR>INa0pB`4yjN+j89uc&D;voD6Y{ zaDur-=HCJIT)nIPGtFL@REj)|oVDNWSEpHXLzTGSd%s?Pu zfRCmc`&yDvLvjiMXGnLLGBi4$KP%b#7BZ#P>0Udn<-M6pqc|T4E?FyK%Io0M{yK zj#CL6Ye0qeMQ11R<%}~T*{7tWr1Q`1v`{)^{u5c)|29bni`970SO)lHR4(c*q;c1Xfyr%nkAlnpK0Sy}DV8{9^b|b4x=S$G%H>Di!FLF7>jIn2;Y7HxMGw?dz#|H3u zC{N;bO=DF8LY^BKkH{1Pu?PZJ5C

87P+}^Q)B(I0`r?7$olt@N)=6Q>#>?C!sDb*nmV;NnBTNw%+mRv zgg1QD-DoW}xcN9lv`Vp%`oH;&nBT9u-xs6*hVgG4ewSJFKJ=$2y%-uyj6bN> zZfQk@uk1KGa=r}SKl0y-xT^k?!p2lyEkR(+xB}3I-w51=2%a%F9&5X=q7Q%86KO54 zJ$ph-iaHAZ9HoN9fu5BfL_rnxBaI)qdg05a=?#qM(+Kyxp&>-{yxjR#Bc;sn$k4Cc zzSk>g&WSwgV?L>(-X@>3bbi8wS+gmIxHD`E*5Tr{gyKjlPl{8`@R+LQSZN>8eBLuA z;qI@QAtiGnQ~$u%s)w`d6vdi9Zf@=pMbyH9%_8+dSdm5;{Q@Q4tUl$`71;ZiTdaA@EPC{y4>OjPe=k%X&xQ+&yBXecD1Py#alabspx@7Q^Gf5tKC=}$IiD|f zWne5;TNXpI*-PuX=78tsIPSdqwPP_x$Y`1E_VaO7PL%C_(=U!t9X_qF3ki!pRQlQQf@JqaO_^EC$U~ZdAl;ls(LX(O^+L9w zt!}7RBvXdXWza5ex=9S98%*|U<()gRO?uDABeiX_PGI*Ww8+f z7tQgYedT7NNaij}510j{h4VHTB-lN6KA+6zaQjR4Q7p&hZhg2}V9vl&(I^8B3C9|E z_F<$-WlqO??CQSby~BAft!VKv_hT|W*%f4#JNQ6@0;IG!Z2Nk5Wf!*gE7jCvE5P(Ukj(z$vB#OCBl5C|nIeh7UqI~G z3B!FKKvQU(cBt#r_i3as!(O-fUOCohhYD{{+=HDYFXnR;@K{s^&kZKI@<@`Zr~PXT zSBeMlHOK#LeWVXBTa9OhTmeC3pl#YVs2F$e!3Y3(Lu7Pc6NNDEXFa>~ql|vozyeT@ z;#OOVkx6I0`hMju!>z@OkULm$(KsXXXFGPNHgr&<4#G`Xn3-iZwto5&>*>mWHJFm@ zOImw>;?Ak66%ENcQ~p`rCJVXqjy-HRZvXlH-2=E~3Z(G|hTkZTsczt#e5!3tL$P>P zszVTVUYv`%OuOh$r`wA8-Ci>pa-4F^#{W+r+#~w&3`@27Z+^t40R-~5(uP%}Ouxu9 zn*<)TvF3fwqQFwG?w|wnqwXPG*{54@IB#EQpP%DLx;aBFd(igfA2#2TOs9T=T^Z3Q zRnHkPsrj3Y;dS~|Qk74ilhtYcJS(2Qqxho7c63(Z3SP%U-*aMytoM1F`{R(Z-*<@6 z_2^aGx$k(!x)edVvuMIf2$^4jZ}dx|2@V(&}LXVES9bj*K4O;@!xe(I1e z+2+j_rV)hwtP-2emUdFTY$j3v*%D-8E=NcE=-o(XeT8w6V~4eW8=)`7l_sA^3HCNc z`Op|+Nmia6yycr;A|g3#{;mm3Dfwyq>_Z9PdLeb)Fdb2qX)8ZzU#?Wb`7NUI@`P?b!S5fK}k~7#=e*wgp4``|CN;jgCSfhR}sr zpwPd0hWp2Hf5Wxw$J(R}E6x~<>A`O-E??eW&;1>mWA>u$|D85mt)BZet~d>lAQE8k zlyy^`=ZQfboLBR1vfj_1QHHNaS}C;Z*0Uj(5gYu6%)`n|4qmS|!~H~d3kgJYYhX=UF2i3B+UaRQvs#9K^}P@I z%~Ss%T1Fq(qC91sV%%^uBDr{|)a)nEYC;|}=#T;;KjBdyXmvbkEc#rJ?Rz<@2GUC| zB2b8BOrqXyx;l#QQMN)$Y>EPFb3rDwwYMFv2Vw{mXF7JH{5-d+S_!AiUSJwXpkUyQ z$3Luu{;Z_8Tg;BCr&ep>*N)JW=gmouhL%2j!RFKd8swv7J{4 z>ZX+h`5o-tzEN8sI6AjqMmu;&$G!)PzCVuFjsdcLu7+yI9;%7b1=A)5%9obc-|o_< zC%2@*uzORyxsyWVaIVMBA{A9D9zEQ<0?`jT_v68$F@T&QQk;_#4Lw#c(|Dj^WsI= zoogc&VI&Swg53wgP3DZ&@z}+E06NZ)jS)diakl%&L3P_@YvdV zI1yiACee9Uj7bu(1$0_E|61^zMmzXy zfBcuTV-z4Qyk!bUsMgcm7q|S$bFH&SbJXGz@zciYn)jK`+D-rbnBU9($pDetNi!X1 zCpr+84=Ta+n>Ce?ATUVYA45VH?!{zZSB|uY>*ozsHT6wAlS%;UBcZSo1O9KQyF2Ue zlkra17T|CWZfYu>Z2w8sWA$G*$$GY$4NK0b*IU##`PlfVXOb6s!fnp?@uVzF zqPCCD%h6hLML+j{gRlG00>~bSa33 zs^}80NjfSY|JCQjl3%)N>#gW5ohbS?d;oW^=RiB>u%ye)02O@MG zLUMWCF{^AE?qpiW&OLP`PINY?+GQDkM|jb+Oz3kOV0c`bZ{MR4 zA|_sf`HMD?jo>=TYH{FrRnq9)KC~qh`np`^rn8@h1WO7moR54V^S|G&S>bp!N34~X zPWvKXdW9_3@ES4X zgw-Lgij%f^(%n5h`$>nB?`%~h7Ynb$dxYZ31*mMT#hD&k4#RCNAr6$GZnGL%4E_&2 zWUCOK`AKGT#7(HQ(cpi7L1ipT91sEe^K*}+ocq7Sn=W-Bkrj7sFSW3~)@D8&uO~wL zPAPNaNzLx3Di6=cwwtrMZ45%!w_^Q2@4?Z$YsDvx=ccaF-l>@t8N+P;oiCUGkk>YXND(g z)yA_!H?d1)&}I!ArRDm$n@X8ou1_Vc`faQRzT5RPNgw^gi-PDAzItu zI0^6a>>e_CN5w-+=A?aNb<*W;iu`5HhQWi!aW&wXrALAlgTRTzTt=b5f5x1rM2vXm zXbd#t*pe9Tm&&wxqf3?RYF~4;k%`1Rtb33Ujjs1QGYS2of7W~q;b?m!J-Y3Xp@wlc z38ds!GXYU{e#r|Xcxi`5oInTlokrzJ;6U%jM6Tt)1{J5TK%=;{Z&|+P!9ppadi8*D z-zQD__2&+l^W#W~|LprJ-SD3j z!$R?)1ghe9gtz_bcT}Sn=f_KBKZ{Vy1Vx@JG|Jlz6`yjg@r04@zXulm`loX=E1UP0 zuHs{TOEW1$VFy6(L+7Pg{-W0>&XDGJ`tt-A_P3uG_na6ycNe330OXx?OwH;%O zgzR04%jw%LDcwE_(|h~(BE&`N2>Lxu=MY*g1oeH5{)2GFqwW8CYYCUraK16$@#ePM zi)z~2(?7(Y6zCBO&xsh-zpyd5$HJ507Q3It`y&Jp3Tn3j$q}#RuM~Jd SDDz~X zQzgJFxd4Cn`d`~t1XC+>S6>u^ss-M|sR^088~K2=lF$QC3qa+5hHG5625Q5`V1j6R z3s_TSnj{nKX(HV~MZ+9MxA)pLsZkxu;=Wkc=mWb#cYF9ym^o$gXUF2jl@VKXce#mn zs3G{#08$mg^D)FP@RzvuL=?Bc?T?@2jyux%^<@Wx-#QP3IOG%r9+XV!&y3uou6AXL zE>{0Mzm6Q#?@F<b*T?p-pp0Gf>r)O0~TTWfy$_n9w+jjGd) zhlLr520%|K&mAgv24$Zq9HH%tBGK1H_wVc@o?XFs0pnc+}88DHZj7om519tYJRf&RgGl%K;n*s)-^q{%Mg}rBjjexw~vxC zB+*69EMLA+OKbh$b#X7e+x&|2pQh(F-H6dXI#c?HV`=cqMs>YS`2 zfgc~(dJ0@l9w70_C3*ZJG0(8eV_;+W7?I+w3~#(*6+NrP_V2vCj0E3;G|n6tK*P5q z>i&Y~<<5KBHlJ-XbzGuXF&Mq?xsTAK7#Nb)k3h_!B9LYMxhuA<^&&6UR~HWKJ&dvJpqK4B z*tI(~GSH|17nGpPpk$8BCk*EZ402Vc*|B4JjLc?D)mR-7Xf^uOLA zum0A_5`-!dW#~aGE<-YgIu;wq&U-{06hnIC+UsIOC${rtu+xH66Y=$k&WO#Xs!bZu zDua&lQ|0fbi24ZEA#IP<^V6{dYpBq!jwdgiP=J$k#`Mb+ZDw#z=EW^E11{@N7LXt- zvjQTSP=4vo))N7rNpEM|L4R3B(VS|eJc5o*M508h7!TdYiBQo!g8IuDz`F;G2*!uH zI_b@ms_wC=w&vqzg^Ok51s`em}>r-nM#PZm2w({u}!qGv)=H? zEXOvUcMM`c*adPOOZKNi^aHP)KRh-v;>tr(uL+a3pU0FI=80oyW!V49DyU^`X&LxN zr>x(tc`LJV2Fk>)>|)24>`-dV*Nm{}Wk0X@GiBIzOWsSjwIYII))B;Q3=pawY@W+IPa4k~#Y%P*W zU8eV)(FhB#;b#5C<;=#97X*KyOniFe7h%CKf76mgbOoE$87}V%?~}j(0?k;qy=Mu6!g;C(!pD5`4t{p}j+Pn569iqK z{qsyyy(ld!*ToQW^k47mJjg~5pv$`XS99}SAw{d*dF8h_x7o5?11r?JhMp5P-8L)_ zpZnc=3&Pm-Z_adN|EI+j*83dy(kxTfvtGO(&B1HmJU3uWZ!^ohnzjS@Ub{#zY%1yt zQ+SkwE(@F=ceR>^xiyUxF?!TiLvvnkE_b7={Oul}cssWL5f83wDh=Y0`WruP$cQjp zKu+LKb?s&~Mz%4%Y<1jMXWZu&-|0JEZXGxS^L`Bm6*l{hRQz>XH2b%D$alM+3+Zuv z*5^Z+QCw!@zgFJOCSp5$1qvYLf%_unK4^dtFrpeQnZ!8e+ z_x`Nbs{Dfz#(w3q=XqPRov>R&L>{8awkOt0KT$#er^vov$p_ZPa$PgQAe*JS=`7OUR> zirO|k3#ycJ`Fn9b@vfRB)ZB2G#uD!5$uc8w{qTCbF&zsqI@TK|WY;?EfqqUz``oY2Z$ozJv=t$m2K0dChXiYs)GkH)sPYZ@AvkGtbe>Ud9Uj&bogS(vuK;a(I%=krJg)_W<@}- z*Nn^ncYCkMmSNvu*L<1-OGDbjY+}~Nys3PUtY^K zOIzy7jnEXB?UTO2*)O|%(eQ=s%W#SNrpA{fN)#Mw#+DKkn|SY@pYS~cOin$=b$63C zRoI`wqI^eNF@X)aKB-|55#$Er>XJLmw;s+34tr(Tt35T(<}mXknZdn*!Od>mjeRl6 z0RaT&S<#IwysQ0>M69n@5o-~p(yL7Af?n*_HAP0i|0e!_>>Af4Q(S=s_s6AV3&<); zOe3r*mWM`OmyhN&7QyHO37y06bE#1q+NG6ESEV(xJc23cJZn>(Ta!h(G{z)xfvwLJ z%(G*oUB%Mx`*@_L3^zLUnHRhdj8bh6Pxu&2j(Z2@VSOuW{)e$cged;wOrKD0u_53DZpp=&YN6KvP8 zpM|2ovY1$?n#+23+VyoJ0#k4HT)ydLD)MDUg6-=}tB5tirG+3t|T1`!TYmIRNR-Azxpn*TWX6-Wm#cJOmFp<0K?bq5|o&6*_9_f9wyCX!!~r?z3X=a?xPeaV#k2;EXX?(CS1 zzO1~)LPwQKBAfj`Qz4LJZLw~j3hS5CZTH?DblR4-c1>+fTF{l`>_HO9m`e#1xXI-th@%-yW6G z{g8)M*gGJOGF@K#e8;gzV|&^Ay`8qd=1j7|TB1E6;ljcuWoujJNo7P?OmxR;2Y=te zvH0cs&OiCszNv({V=CQzgMSQX7|H-%L9>s?=2=g78xNaW8;czeGJjCs6$)HitjwhT0H~@ z7D@{Q*Gw^%C245sX$=sXKSdERG8BQ3D7aY=amA>OQ9u_yzx+f4#TVt_gckYa!Y5B+ zvCz@S7U9*PDW+K%E zCx&$Y?UKXmBACu_&3vyck-xk8p=X3xo z3ASa{2R?psdz+&#t7tRvTwp94{HsT6o;WW3oriL7C_G{sarn;lYtADzZa;N!*8jJw zJ*EpVx@+I4Ksz>G2;>NU{q3Onc`wWtqE?tWLh-p7dQU1)6G%K4QY?ZgbTp0@AU?|% zA8-CMP;rK4x2$UhZ&9{xZ2&ij%+`=V@sT zOEQS!E<=GxCiVA)h%8r%odLD7wNmXwp6H0!gs;hOGk%MCjKPOPRYE(HGBZ~@kt2`4 zl;5hiN_9M~6}k^3f3^3a82d|P23+u;OHRqLdf?V+OFyxG}nUj)vBGNht=mq6^PWfxC z#et#RCXtNngJ`@uyPiF;SUZZSAUZ}*7FSnz9)wC*rm4b+KU{wt$T_il+9M^Xef?T% zv*28}58zw>26w-I$d>*z;VxMWVh6K36H zz!1R*QNK}-@2RwmKE)xlf*0IqLq*U<$7`xT$1ml>F)`fhT zsL4bi*tNokNK2%a=CTxjw(#@El>%l%@%CZ&0nwP`!q9}H&gp+go`i*%z|8jxX0Bww zXEF3@)ohr+`A3064j|*;gb_OfSZNTfIw6rOxzgC9DjgF_Hel0y0nZknM@k|oDdZU1 z3ZM^S1ZP;Hl^SHT#6^j#-!Vd3L)=O*j>U?ko=;T7cGNJH!X9ofAz~CQ=($s-ab;0)FGm+v8_d7V&WwOXG zU*?31Pt!bHj^fJq!}CFh^@*x68H#VjD4Z-3iEEtE#?0iFa#>}|PtSe9b)B`RxyGkc zi~qdM4{2D@&RJF8CY#R2Y7|*GX&68!2{Q=+b~ykMRxtL7Y2o{4F&n8Fhj85P1-fFN zVwilGslyKkdm4-M6EL`{4mXWD9|}idEHeRVw*2r0hGI;`HNJdUZAii;LQ28x3c0DO zf&gg-L7dO94964$lvJl8I*cfeo@Nsq3^Md#>MP_-S**IE(N;yz(x+V%-57-jtF%vv z5gKtH8+5D%S+|;fSxY%Y@aoCR8~vt%Kd5PPd^~i{oK_*?g}d6Xsbf#WXfW18oi1s^ zbpE=lsihvvdh$6<`!|Bl{MMug8LR}EC8NVa#_nIrp;N})2}#T(ycwf6X#A~dFN29I zd>O9HtY_c6ON@E2m*K+3!FJ>U4HQ*0jeOlY%x#}f8`+@hO7}kM7Zmo1v#NP-V`*6K z5$ikP^+tr1H?-t&M(_Zj1r|_(l8U-v5V3PD*~RG{H(`}xI1tF7^bWjw#TZ~5A9s;F zrfBG^qTmo>azH)Ezx0PEj4>>7nkZIDTu!6$Kxor&(^HF573Ln2M6~_yv9P8gUE-gw z;(GeLpkZ71R}izw4#^yH4T$~}QYX4woC6&8`{U{TCt#VJks#iZ{2OD@xdolA!c5pQ z2&X_G9gSdAEbiAiK_0#tf#`=t-TvG9XFGKeg#QO^V0-V|4)1<|{bjgV7IHSX)}r<6 z4O#kQzR`~i(ICtj+P)qP!E$1si!P{wswRedF~3)u{{a`A5I%6q*+JKzJ>t4XPi|;i zD-+82PcjH!a4pjI4UjoSvEH@#pwc@x0#dF(7x*6!$W;cvk}U#qh5@;IFswRN4!Bqd z%+AY{KUkJ+9yA&Q2Fbn@0C3N>UV$@p-L2|gqLg!cOWh_8w1xy2n)^Jsv25j!frTlcoAs$fWD z4~hNrC^cI%OVh!{0}C9}9N}4Mcw^_TfOay_V0=Wa)HNL>&Y>sA>Ui8~Lpd^wlmD999&H_~e^i0#WK+if~~`+zek0(lE?W z01R+CyWw!(U_ee7@Kdjxag49!VW^4E&lZUk(nAennhG#>l(=G=gyL1xk0Yc-|G`mKEpl%xIR?NfwHr^e8RU{HyqUo$R`C92#8y>;_FtO>wUo`3G^A3NVpCx}r|8>yB{DAGdVy8{JV>HXPE|pZyK=>RgJ1MvY2YiV1v-p~1-!bLrbh`8&I#6nJ>GSrr0?xW{Ll%& z7!8LcQH9{TEO%-_WwyBTrc2vfH^-co#_qRLlGs!&(@pcuC}H~SwB#xu)g4~}dBb)l zeI7>)6Z}mH4tJXQX7a?&^PI}VRl*5AeU=W{&(KiD%$!~)`cz-lNY~23j1F1k^8ywv zC=GLR{;+12pg0VTg7#^U!se~c+T7RFl+x@A1a5mtgDwX?>)EQh?41#_*FOee`sC_E z)Agk3{{$fh7xjjPG-wk^_cgTozgA(NH-ZdIgF5Y~`#fhKGiQg9C@nICVd zzS7IYZm6raUrqsiaT_(w)v^LxVF&47+66eG1ccQ_? zaz1jZ(8eD6pJen~k%p;>Oz+$ER*L$2`UiI-)*KZ5SOyV4_GTe1>>tKZNjmfW@}M+> zQ8tT*qyPJFsf1@rYQqHcu6NvDs^{Cf$g?9)%id2KpwVFY z37>%}C)$Qeh(Lu56klR%LAT`qFB=AzFG;xjBrc!*7K%5}%Ygxx4#IkL;$UoI5Z!>= zFvW!gbcQQN;yM~QW8j)|=Gs-=BB8-le26#za&BWq#i-DYP)8^LlTT#kV&dFXXcf=^ z&M~7R?DTZ5eLhUtmL$Dj#6nL~6op=f5b;NC{4->aCEc%SR>zzqD!9waEd?fE0&nQ4 zJS|M(itG|qI+_gX0(jJDYMDk`*-bl1gMKE=Tx6m?!4!dZLk@w18!}|Thh{{M$QEs2 zrxUpuZJv?crg0&X%m7c*3#Bs_KEDxkz#o$X#cFoHLWwIO691QNG6TeF>4BFCiwrx> zIYaN$D=1!GQ4U2`ccCZ*(+G3zHyvo5L3fV2Bbb%sLmvGgmMH1>gFjh}_TxKo)YGE= zV?xeL!GCmJIsg5i1SNqX2s|f3D4Czf?&pEc_|Vb7uY5==ic|bZByM$PAIr}4C*a^m z;X$bSYfMd!dHb4Q`<@muc{vKOIuWA)G;-A|#zipu2M*&qW#W-y>zP2t@r^7cQOsMSE#}mxi>?H#Fh&UP$$VbX2Sk=FZJj*&NBx&e> zn@;nK^}ZjIP`P}XT0h8-uA>2aoCj=xg`Plodd<dK5uijqU-6u z+Y}ifFfM$rFNn^pVOu||3H$*iSc04Z;7>wbbWW zZ{7K+D;L^+|Mn9?+ppcYxL&tONwF{;O!`UYf0;sLGOgaQ&_zj=Y!DM42nFNNShMr~ z+TM2<(oSuey+FxAk~?4ld7byKk++U*m{{`2!Lv0BMn6oW%|q$hbja9H1h%` zNdF5WfG0}#S|wXEwrB%}&KCzs14m!Rm0{vw8!Who&u=B4aU>19QbNax=zyW~->4UmdFL+Bae zaK08w_EVUkDT>GMh}y42sfu#m>2`5&e-?2}_N7l}hH#Z%4VJB4)WdWF#4Sv8jY`;S z@l`(wA|ngK<-^GgVNk@x+M4VY-;7sF%Fa{898fL>vXiQO4$)@Hk#$)L?EHgvL`zaXy;}d%gJiS}qxRt7QXQ5bMEre|<<^7;wr z5EklEqGNy(iQ}VT4%P*!r{DFTPAABj%z=DZU`UHojqaYq4S8gcUU?0q7Vc|e9JZ(u z_K}lPX~lv1DCD!V0DTUh*HDt-Y&g&Yamsn#*kD*avw>cj&s@+TTg>O3LUQTPO$F*j)+Ayl^etOyh*wM{lU)EM0!Vmhg&9O(&xofRGcVNUX>mqmw8 z$yC867ODr;5Oqo)7RUW8 zlx^*M{Se-|(t?fWHoK@Z33#lx%A@Eq;Vdw8JN~!1F%|d7Z5zg@+&(#o{XzW)=%!M~ zFgQntYd=fq#0CXsUPVuV@E&3Lo%MM@pQ}GkKf*^G#FjY5`q=37L{eU`{6foR#2P&y zF+_Lsn29Mzhoeo{|27!w0SQ|M(0hW|92jxAmhDeW&9aUw?RaEi-C2wXl1s3xh;X$8 zo=RYGm z5l$m)-e0c7BKOf|al`0L91Tza5J{O~&3acY5w;SvrOy>sT0v=LZP1HYeF&z{Yez zIlK)z3FnIgtDCyE&-XHXWW<=XlkFpH9V^O}J3`y+;#n?GHyFvQct(Cwc&cF}*xX zvW3V=wf#F|GJ~{5xCo|10HqkKH-E8%u0Ib_%X&I&DUlf6E3vVXczb8&AVH^HG%^9v zoCW{S^<0Gpu+i#qe&SU&D!1NQZWbWf`{W}%Sym?EUEo4^>D}&(IugxNA1|lekYoDh zq!xdbJe##QmVhq1uFDwrh~U~K#(9=8K>!WTS%I;%d8-S%(|DKE5k&6)j4^3YAf|Ux z)j14iK+H^>ES!C1hCiuv4zdm70?Cn$pdhLUN+p_X(~MB|PDZ#C zU{_1en__(JoC5Nb=nr&h z8y%v52m)x$-4T(C{lfO8!m-|JG;~ZZfwW3A$y!*L z3iG2HCQ~N)SP|ViRdk*dRhO{oLXlAVzF1+|v~jcfnmp9`U3m<^VgRJ2L_Ong4%X#e z7j#O7=rw=2NHSer(GaHtBdL5W{#j$jkkt~|a=B(_AumyI9PATsJv=UcDkaa0k-v$@ zTw+gMC0Kfw(jyH8PoCTG@P5643rV-30VHTXto%(%-r#M>M*Yk74LO?(*^Ed zgifsto)L(DeU`w%^I-3qjEWk&!vm`q_BB5tArg((KmgUvD#EMRu5@Dm4d zA5cIbF+l_YA8*p)BCjR|P$_mFj~QAD;vyhZ6qFia`eA3X-3*`&f^^D;JlK)J)_)&X zAm8~ebtL4F;@nlGi97m!-?Jk$fScqo}xV++? zNSCqyjeXVOkd$KILJne1V{18>_A~I_AeXK}viGhOolhr`#z7lGlA2xRH{98UFL50cx|uX80FIL0`j(aSqtfk$!B+eolccCsL;Z4_!)3o~tSN)7m+ZH;?TW zK7)z?20Zc3kE%p@45ScE3!o*UZ~h9*Y{s(GA8f%HBtEs(k3Ag#CvQ4$OpiJ)cNeZL zd&KNR_P6-?dzn>!=oMF_Y}yR*h4%^YEsGRD92d4FTNO(iK(6y||7vXt<_BE7Tr&u; z05kECH|5YS>8tYYm&%(n|IT57W8}43pzrdc?-qP$!aS^pX|!jHJnMNU`D+$;f&APt@7zNF;OS~IQaGfhZ)DRh(GmGH1UgC z*|UffdRYP|A8rAV9cRW6GFfE%37D&8hzRIwi>0~j1$sloSIrkUYl)hB?6 z5y4D6je_JW>}pR~vya_R7Y~uIhraX5rB~ZcLGvy(OIk^JpDj32U#*lw604=Qs41er z+XT3Rd^Z96|Bt4#42!C5xb_S+bVzrHbW1aIcT0C8Eg&%nNJ=w+v^1z7Eg%et(v7rq zcZ1}+y}#%CIS2fiea*gVt#vL7?NwF*+&36ZAvmO_K_ONA@XVw9yg&c52mH)o@x#wQ ziwm;e*3H*pbUjt4MaD(@a>~YtG^M178U%x2BYIbGlBWdMGBPF6Pjrz%8zJcGI_0WS z;1>h*GsxoM&-ed=KKxBRFH=CSuP4wAEHR*EbTDRkDL;Q-Sh0pl{0cTr=@qZkYc7-m&|Rts=1wxB8im zDYsYB?5^`6@!V+km)oNvQIWzzV0NAWL#WF<5d`BSbf;Jj-l`hx7D-uSu&^1|F&?a#A z7lA<2iQs}6hmQrq(DA!tf?DNgH@{GU(f7B7Tw=ibb%vum83Y~+l)Rs@y^SIjk4m!Z zwpPBYh7Q#X&>iNLk;IR1aUDSSNe&Vd|+ATF6UN>YmyvIj;c@w?nUdhA&2! zlllH{Y_GzYrbxr!_M^z07FC;D@!RJ=zaj0Ke-Kc2_C3)dZ^y{vaI59gT{EH1D0L^x zef;E=5f?#DV)FlGM|oXgc%vkwD-Ohki(J^HK$Bp@l=OpltiTAoE<$EUgyf5FpWscT z3q<_o{vspwX2;=W^%6DCY--TaKr-02g66J4o{)-u4^2>E93!tp#jWC8EG=8{W4nUx zU$+aricTlPJ0!DNu<{`H>3ZK-XyXqACMl!H4l+o0{*RQw7dKFcJ9|$@LW>?AME?8E zd+erCu9L5s{OFL4N-RkMPBj$=l!_Nk+B7C1;VsX~LPyAje|E&ohu0GJ9J~vdUEvoA zya*r3K~THgJ=32L_L#g-hd|7BJ?fRS_^&uo_X;gVqS;NTnXpgLH2aH(JISdCre{_9^!U7WrB3xo%Ct2Nr zjVYqO2)TBX8ppjPQ4@dUB}iiTL|Nh+i_H(D4v8ZC2H&4ZTrgGKUTwu`P_`poFd_}U zQp|6ceij)2#ZkHz!l<@aL`;rS~2E$2C!@ikk{raavZ`fPk&lok4Iw z*6^YWB?~Ey8xujl^|Q697YB~$G12QbxIWZ%=6u(2w}bQ{8$o1WuN~8hz%T8c%5gPO zwH)Nplu#t`Th|gwLItJPWrD)0nu(dL$=3JTIoC`63 zHqQRM;QezCr_DguHNHgdv2n(Dj52@ zF<(yi66!5gZ-n;_{8CWbze_Jq;v%MFV1YLqwc?#MuV%!$%_(y)JswB5P=!^Z;WY+h zNR<92&&BGK^4lwwEb?$L^-0By3vQ|6&_azOY+; zZOi{e=mZeK(Htb^`}kPqxwCE3s1P^E{1=3pXD5mlR`lPG6BG1(P zhZ-u0{Z(?FWcG$&gng47T>V7=6x;(^LH~pSceWB(a64T{MQ*^0= zMIfA#pd*O~zDsNl&4MvJqMs@%3L*)!>VE#d7wcbrlyi9d9@swqnUVuA0{<2`=6DI! z8GEYQHAz&M#`FVfLhH_(^8zRiO>$Z`+7H+OAJWJq%JXCVQcQ4v%D9QD<0~X)Y<5tB z9Zn3oUYRK4Y*8@PTemSe9|I14l-Az1{2IN*?WRwB1Om0awRGl5f%UA( zxl86TS~-5XA@A~`EScFRC#Jk|u_E4+;Zf1oMV*^TE@}JWK8g)G!P(I);FG(bQgG7K z@AJxxM<#gVeQ}Ma`~oio&MnG1rPZ-eI-)$D{`yh`HO}riU1IVa%84ChP-%z?zf~$k z!$>k_g(65X{DIZmxcy>7YFt#?#ABZ+X&LQxV1OkivjOX-`?db^R-YI_(iQWC8kbA) zzh@CAy-BytK$|p(5QRP$(_i#E$ z=pDTZsEZR60#60uoT3d~TFa$&b%7K?!wCs_4f+24<=7H4@gC=T9dkLd{pb3fusSVe zhvCMdn=^m3i>^R|i{Hs}Y13$XrL;)5{-J$Flx)40Y5Uvnqo*9Lq6>y3nq?TncI_yZ z+>J<}hZ;rPiP(O0S}a%3d%NpM$%$>U(I8x#!|D`;SE)#wZrOP(-BT$dP>-3O_OHTC zrKo%AcdNa-Z~N-yc2osiZ6B)Q+r&^t;A&W&ao%?v1Co@Rb#*>fR4BROrSO*n5Uu_7 zjDEoBU(SR1Pl4|g_6k|?S_bk$-q}Utq%}NSJ74h571Kn~+oV_7PJTis6yGINqeX*| zPC*gUedqQJ8)r4hP*kF!_f&Im-aZf+m_hjOSH4t{XXLAjtk5ARO3!8su%jELZuZrM zGzixda>7Q*8|_wp_)FAUwG*D13pNR?7o`F%qo1u^*EiGAxzt{TV(s=SnhrMG*%LnZ zg&lHI_IN-G+Jw+EHj0C*oO3XB3SJBGu804y>gs#RJ^6Up9~ZhyyyAf_8Zt z-$-i(5#bR~5#^#hz6g5Ft<3EnhkIRv`<5uqeO5UvVddgIueeOe`2I9UK_GtbZ@Uq& zn8?bw<3-@ntahH>DjlzF?GXQ9i;R&)p#8PhGB9Tjr+AV~TZV;1UoZM3ITDF|5C?8% z4sLHU@49ivZfv+gj_;)>cN4_>Kjn6BE*RpaZt4g_K8DUY^O6P%9#Hhv6dL`YiJ7EfV9AZ7Q0S!C zFo`a7oxLT4ap2eIu8}gmuhh1r%i%}!{vAb}uTz_M;YwAr$bz{uIKdvzE-v7~ddcP+ zxXqS5Hb`Y>^x)+*Mw{E?rg%NMiQV9{PL90FriK!F&T!R|*X|bY13%AU)qG~dm8AY< zBo@6Ffv2ZSL&>GmCRwttFX!Ms#nKEh6(tP*RC~eGyy*R^(XJE-_q6`ndhdx_QoI;? zceuG%U1Lb;g2mxp*Uz$ULdQAHEwfCS9Sc`fhljK`SM8^j=4!_I_e|(v&oDt3YD zU$G=dyvY&2)YDA>{aW$TeABaOO?y^&AuBYR#04#!05k$dg#EyIq3^NdmGs6e z@bSDLj5d1xb?2S=-yuh-^%q#&u{{?%Kuq26pCyFb-+dNHnGBjZ=55Bm0xm8v8h%l@ z&Z@Kk*}JX4ty5H<0GT=9Lf6$LK~rqE915>?vMFMApq__m8^JF1H^hjFu)4T4&?AKt9ibe0p_n2_x za)YZl^=?YARfSZQ^O&kFeC4I_I2pjBeEYaqWozCH68i{C6e`8AqcQv`4vice9L5j_ zoWAwp$ZQitN5k-FT%j~mZyNPae|P*Gnv|kv(hVxD01a+~E1JEUzj)GNO02{nYS*&2 zGObO}j;Ygm&%u>RF}g;0fn)+mEw>m4S`qo}ouo_q>mY(MSTs9SOY$}V#oT1X>sI#9 zyYu3s;@50OAvl|Sv|WFx8hrN8cO)()I#GvyAA?w2NCVKoNs!Th{&s&yL7OdDSRqfy zuG;3^@>A$$;yB4pI6iD|0pZd`@WnhHN=UMJ&_d2j+k3n_h&!aiYfVdxpvTl|DdCP zdpK0&B}0uO%xrT52snnKf0dp9Ir`-Yz<4;jYVQ2*eXMpL@qN#rhW$98R~)eLZr!2F;%Y2G4XdRH1TviQ!?HthCRR?hPq{W=?d9hW{8~ zqL)*8--}#5+^LS~YE5(fbT+)~Y9rcIfup$>Q|NWVubcPFw1BpIddVPi{GSYk2SlEQ z8drKQSGZmUs+l;KW&~5;0^j8Di7V!55J-u4*JFx^0lbbk4sBB0*uZ^%yF_7$^l^_P z8+OYs`)GU|w$lLQMo|e8GR~S8f`Wvtb&A{o!eDebD+S$nx8mN9MYo^vw~kx{A}7v` zpj?vM-78Iqz2Vn1s`U}``=(Cpt#1k9J&x$BE)}S{?6tsc!aU)#toyzk!SQpPx81h(bj$eZ#phpkARmBJ$Hkf-V6&;N$yyLx_Y~eVYZE_^{Q_w$ zTWF;qEzy4KZCEgl@&EyXMAV!TiqrKQz_nnr zUASu_AHjVzL{@`^6hJNm+5oaga30KaRiAodM`YH|bH5sxg}qkWJB5Wma-Gz)?y)y# zVv+9xH>qZ^t{5}v3JMDzZSL>6sJt|sp7dV5Sy%|xuyII>5Ro_|>r>D0;blddolmcS zv9WO1E7?1g< z6hfu*EL=EIoji=wc?squTI$TUSsRb@Auxi;?J}xcq6Z^{O#}UxNUX&L+i*fmfz|Us z1VD-_!QLxO?1|3H7ZNc>L+xv@Cux_-M}F3eX&%a#;nw&tReE+I>TeCYoa~U;6FvNy ze_?rKyuytNmsw=>$7QPSXm)q86=YAEQkWa| z__u?*i!tfD?vOFJ>Y4Iyqeru892>takd$`14nLy_bt?%9c3Jf&YgJr1zkvfm<>}^q zaIOe0C=^aM<`4}vSVJ++97u+%&ZMO{e@Xd5ESgza>=CJauX?aQSkTuXW zqd@uLmN2E@9t4@Eur|^_B66!iO#rNJbNo}XOMnzUfg6}W5;;5pGmZB7EgfGgTnb7h znpthX9@)ck(1Vs6PG5SMJ8N-TU61dmfcy65;013*b*MOw2>rCc^t6ap)d(*5a8R@; zta9{@(u}kfGKp6;c*&l~w3g4OJ+DT2FWh64xS*UhX+ez~%HiP?Fe~^j;9P}`tn(U# z)S^9)aQ6W40D8whjJ}p{4jXB{6@=dIqCq8IyIIR?DV^aBNptnyGzK`l$g29sXgCU6 z`bIDUc1;+zhLOD=-6hd@UGk*2_5NtokP7Lx{14i%K9tr@@At`nfd%qtGhIn?Psii@ zW-b*NpT!tgjHljsj?=IUz$=HiD>Lr+LBC>xJ4D25-12z4|6KwjNg|h8*fk8;LGoz5 znV+nJSC$J0T@zc(&_lCoOCJbMWMSB2f@1t>&r(+np;Y0p#4kJS5tN1L7pX_bIXVFg zj3Y6QY`%$EN28P|mC^#l4-~6SbEt!R+OkE}h9(2nInTs&;o}krBk+VGcn7SEn6dkt zPEFCOc7 z5))Rnhc~|I1k1X^0^|q8XfB{^zjFS(hiXWwEOo@ko-M0R4jMC_1)ENrt@4(1_;mxgEX)^ zRmRgzS0&mC@vRgH=-!>LWTOudfwK#L8CmSr`M_)u>oF#jnNl=bobreJ$aWwtZ~|hq z9w%=7EvzMbyn-pGPV?Bs_e3EkO}3*nedG*nqnXe!Pdk6Juz?ysE4)3Qas7d`qEzB? zo5HEEM%A;*6FAJh<2j4v(7u~cnp%+EBY@SN>v=a|g}PMATT*`1iS$8WV`#$a%{pbw z#RM3`&!IHl=>|3N;kLXb_^j5R0b_|t)C~P4*drLIfjTqEn&wx$8=4rui}6jCnXQ#R zPV-dByzkSD3UtypwnY~yh?F4&M~5-w`;FVjewUs=&*Cw#P~zb zXM9|+0P@r_oV4)YSUh&AxZW|mBRS7orS|Lbvt|c}RAH3g3)KX^r^dTt`WjYssPRt- zKO7Q(h4-3c-xtsT`6~GK)sceCf$DZDN}snE9+{85BMLgcJ?8Q&F5hMEhpHJ_Y8YgN zUyz@z5L~ea-4Obf(38zP-6%)z9hilWaBi6y2N^w|F%M6ls6xy8=3rP$Tw!D6fcGnL zOCX^C?L;crxERm9y@~t*Es|n=l!E|T7L*VxFABNf6QB7;Jix; zZR4AN`KTxNyA3Mia$o!)e-!gvR&kWQ7XfD=PogPte~)*IBk5n)>AwxR^q@swzF;sI z=W6WZu8jck3Pee!Wa%PXefz!*L&mLG#7EqB;f0R5`baQZuhG2wHl8e&FfLu#GRWN) zK#dQwL(!nY*(%7ILpq~5jZq3vrJ+ML&PH1Vd>G~=N@#c6f`^()*0i`7t)Bj48j|N~ zW`Zqkh19b`FH9-<>@ilAg+63@<5N;(Z`qZ-6PKRXXLAR%m!ByuaL%3MG*?_D#7@U< zCCe8az-QOuxRoXJlOADhbxhn2IQ@s}i`{>{N=eBvdb$qr8l9@`q5@S1qA{4Cr8(-D ztMyc!6@snH^7eNOb~%Yr*pg8rK`pBU`SmafMqtx{v0_vBqYHnsS>!Ej?R+&F-t?P`Fdwh^|;f5R|IC@+>d&z9aO?!CEbs^6X53EV(yy z`Kz`7%G}n^rFp|^`ZI6<|M%>)p!DAkYWHRKd+UZhwM<+Km;F1Iz)zLRn`!6GiQ@Px1)CSBinKz{(v~=chy~})e$i-243OSIkx2tqip{s~P)XilmxJ^`l7G+ypRct9 zq?WqnCsy!>a`9U+aS?BDax0#xIF^Cvz?{|TlN)VPRG1mp$_;ruZ`o~a{wzWBIN1ya zWTQOD4{HUWl=E$9pe?}e`n(ed*B26#ayTq}!LAf{hBW2U;z;I+Z59|H>8$jtG&UtU zi#osR^+67HqmqcT>E)>fX521<{G(RLAX6QQ(L>X7ei2giSKuX?ssCpa5*pb`Hw=eo2Y2SB7jNvT1hKtA}xq?{S>9Z zR`cQq?PKjPl3bdno8C9SK-(+-0p|eB#DGA--y!cY8AxE|DgYq;Uh~?`&aQyCSzZ%W zLzwdGqhZoiGxx4#d>$B9V3~zLcl#Lu1ud%K2>72S$sEA`=$22PnOOL{>r3e zq$({LVUKrX`x8eH#Tzd&w#;nIc^egEW{mB1K-o;C_DgC$tSia2?^ zuWk?Q42o4ZInRyUwGY^^h<3loCve^aIeeO+(1l4Wb(7wSVrVMxb0OCC*zV56e*Z8|`%0;W!q>^6%@KUbk3k)~tt=LpNHwb68NCMUL zi+2f64hizWbj@uY_FI~ZcS-Imo^IO8-B-WR!mL!j?XTkX_NXy}U(No;R}#@O*@_`d zPUa`*!Z^W(9ty@?iH=aC?Pxr0k|>wm&grXpg#<94dw$kaa$vD34on?%ZRt-UbWnmd zY%I3FB$W@`lUEv{ydU}i-BH&e{X{RkO_l7bgx3o_Y$7bIojdlaaY`c6DqspP?n|Tn zvyQb+2X(vnp|S#LFVsYO&2mz;qtH=V4$N^c-O#sT`#2+Ax%qr`O!%Y5*0l|6me&&5 z5hpvWw7XcHHP^D_+#4`=gryfqMzHC^s%-qHm9A1=uz}7gC}Rl9t1#RuX@W+MD1U3p z&diADZWUoPLL}-hjP3MNU=c0+QrTzKUtN?UcS%HUBH**MjO_RkA7enur@6{PqUd z`WivHjkrpaWGMGmbXX(MU4B1xtsfxVOG#*35YPVheT${jdrJ5m=@sS*EVUyY zwdvkPrX!viJ@&Q25zQ($q;XKV*CNQXh#Cc*HC!V;0n{>FzQFaL&=A|Q;^T-ze}@aV z+g=I(*iRMNOBK{mb50|{k*XH@{Q{XyD9oT!j`C*2SUkf9@py0>#uIH1Q6l1r{ltgwuu{=h>TvI@?K(n7Yvp}=AkMvq;B zB!WvA!Vb!4XpaiI3?B{0RotE${vbvl1fr0gk+JvFf(_S(9zafbna*(Sll<65TtA%> zq^En}(o7TO)V=~yq9y+{bStEuur@K-9jJxl*Xdf@JFt=suM7U3Cq~W&8xYuUQeCJ{ z$J`~(k--d@gJ4Gvbn;S&5p@|jb9*>Zi=#u-%RL&1s`0d$YA@34P#@cK7vZ>mb-BhL z^kO@7<&%RF)+yU`pvx(Z+*^@-5lZ0HB|x0 z{-Q?R>?YfUih`m}4>~*EK4~X_ekZZX7LGJy%)OfZZrgi*uv001Z1(a6qb7pEi>*ck z4SS>F`KaCq8lmEpZQ-WhWmR0zEty6eP5(!_Fu3s2Z=ynZ&SZHH@0&WDX;F5lUt zykQDY(|N^ru$z_F9V;<+VQX2SS|R7@0D2Hslj;0Rj_U1H#|b_BjZrJWm@hto{gt@x ze*8AYMndZ4-&+#PK*phGrJSu_3lG&|h^Xw&|9t4GLweAnUPRk7!JMh~tfKxO$fOL; zVrdgl%DwSzRt^@~8;OMO$>vE{r-x9W#Ro;ElZe$a=C>&LuAqP@$otD1!19(NSm3n{ zQy2w!_{xf2u>q4yL1w?q>~jP*>1Tq2&(L&r)VMVe89IlE!>xGjG*3>fc&~dB3*nV3 z%HkCoosuOldm>$6gy&vajiv5v1ff>w8a9aKctx!Akl;Tj| zi%W3sY;q~G;!~e#^P$ZgQk$!|!L$IKSUM3#AL!*N*!CMz6MbD4wl?xC35j%}<9mNM zPh1Mct@3fqZT^fRqgHRJ-+!|JHJ470Lkk*W2^qu&#>E^qS}X)Y6VbWcw;<2yL*{C1 z5H$!Q;wcRBXDh_rDNV3e{2N#0kzr&4B_Qk9MP+oCf0+m3D@=0BMLqykt@WW;m22eC z|M{&5Hs(=iez z%aFqQBgF|;srn|6VL>oKqmCT%(|{c>=cf-v4gqC;B`4I_?_8I8?Syzu_GY}_pNMVQ z@+_5Q>2slW+`XbwzW@)fprBKB;X; z+#(pplaPzgWAgtWx(CjQbl~UrdPgIWSpN>v(x`0|{FD571XjY800+nlxoL<6)G|Pn z3kn2#g1D_T{vLSIy~W)l^i@t;O>Cne^Lj=My}T<`NOKj@{vQqm zXHyvqT@s@|Uu9MpK=V^5t?(9v_a@hxbZ#d|Uj7KiD7F}=ZW*UNS)Z1^W%SnW7GXJr zcQs+~sgZrw+Che6=;wEo5$Yrn#No4i5RqI}RGT!ud-gg;z4RSV4yZjwW2K#q4%w5My1q+Pgf0T69wf@4-1bl7Fc=+0@Jno6T0KYOae# z_2++fRKTAcgC*`W&|2syIQ-Lmm5niAENb&D=2aY};6)VYN@Rl4TK)&4T$6X<_Cyfg z=O1~hz$}r`X+rG(0@1Q8z!cye`*%_k=5?{S+MerWVu?nL~jB8L^ysU_mc(W>8gymS)sOA2V zzNCL`1opwei-zD1d5FZ5+fxzbf5dQN0k-E?`!~y6sBbpQ1>5OM526fVuTYGoS%^Uu zcZ;ehtsK8~)uX85sl>nubq$4r3XuNFvByUU4pq08qL@p`?HDAG(^550F_(>4I!GI* z#yp4+e`fvPqxh(d&jz|sFemjzAgv&whT~2bWD8GkF4vizsDM~bh9G=kG$@kix(sv& z#g1H(5*3Imd)FjJo@o$yMT()YQGDi~Ks}gC8**>lmr>bg3_Dx+A10)iicsCr*?s6C^92)fMer^v*9PSA04A zAJb%Ygt>#xKVqgS;wo(5F8p0-kF3cm@IffbiDSxn<9bqgY916?B_1T3SUe^tTt5G0 zhO$jS-L%|^G18{zF_?V&>3?ozICS9F57AwOhr?5!Q8T6yr zof;}If|8vMrt%`n^>-J+SOn`7R8$bOBntq5E4C8+T3s7EJGktIF3p3Y^`5o_T_kF) ze$qh~Jq&w(A`fMkkGEH}RyO`uY2Uj(&SH^a0g6wiCp!?%un+atW~ z9(G`4#>>)u=k200*f!2`TQVcoT%P@_jeG|}g`2}}Vv-V65_z7&FB+D<7v7?q|6JQ4 zS9&CmtB!l{c~XjWJq4Q2$Y&R;S8uOrDu$9r;tM3-(h$p|aqav{`SGn**3GjpDZ#s< z!7&r)tO5?CehrZX`+Gu$O0h^J2#RF<=0EJcG0*T%SsJ{G)ew*bDx;_#TNq z(Tmne8-^_M6I@853T8y`)sv^DSFEeQXVg=wQnYY=hV6nrDkkFpMiB{ObzhO|9T$Mc z5aYfhXr)Ejzz!0=?@?jF;VqozGc*WxKI%G;ipV1 zMc-6w3M<@Gs>E#*`(0V|(4Pihy}D5NtsXYva7l3ajW#`k^C5|vcc|Qx(Jiv(9BnSP zqB~N8Y(U~xvuWTgT}S*teg|Q4B3jgLFz8Tleepv6`=!=>`o8dUps0(1#lmsObvRIi z&lgBdX3*ZRKVhXaR=g-AEC6n?NrE6TXwjV}x?_tVxykhYZICli6n zS{jS>oOMOmKqd!9m94mm9y4dsL>W zmrnosUe)v&$u`V{N)&6TC6YB~=k=+l#3J@>Wxp*>L^6YZ zxUzsLMr=Op>0xj(|El3E?>M+TH*hO^%f0jAYBPp0k!=~M%owZ*dvHB3OC5W9_oBV% z9uULp2{Ab>v_TmFld~z&|htlATQwOQ=$lt&3+nHkPN|O6M!j3s$_i$MU z0VIj>a_M(fw*uPYTF;1UWhW00? zC%sLHgXj9m9M#``ZE>%C>}ySqUYagEcIqLy4vZ6H8burmdvdd3kk^*I&5B+ixc`&TEwFufd(aqG{aQPk;I2KJIruR{v|Q*xbr_btQ8T1ZB?A z3o=PHgmkunss-bYo1Z-JWq?w!sb+bbItl#zj5D$*x-q~B3o=1oMUImDCaoR603sGd7@^nc$nSQf_4bE8WP)?XRl(D$8EvJwX*@(0PR zngLDZ)V4;#_h@zdx*wa29L@@Eby*lH<+u-X9fHq=B9kT+SgaBSR-~SaoGv_XsR`nw>ciImEOP5l~w^4lSN=RVZOGgo)V{XE>LYHM9hE_ z#vYp;cPc+mTFjj6`3x?&FlPx9u!@F0)pwMR>^8{hSYCjgKvC^tW#VQ!$#`T$P7X#YRkBqG9Dvj zrJ7jMY8$TNJWVPhj~LcbpALox!3+N6eim1yf|=)sEd|%-@E3 zt~PlY*b;4XPdfj+8gJgLg`mni?B6&cq+kI&}etFSkL|1{Ts{sI!C z*7JdLoF&rG2gy*?-@-h%$8}4#PoK9&7`=XN%SOCj)ctbvmvnGA?8$s{C|fvmVsMZp zn#Mp6OG&S?y7;b~t9h|57tnTv{45#pvMX7Z%ESa$-D67vA0NBNj@pioOp*Xw^~y7= zJnEw%3UuG73b2PTgZE(0n(CcS+g9{Ti4hqruoXhl+4EP-Fq_z4JCIF2abfb>;)l#u z)9)FU>wQJcny9;dJ~bWZA9AJoy41+6RLutQbs?A)IVJ=py&E|9Vk_bp*5z-fT-~SB zkicPzb0)enG_^)Fbk6BhPBh;KuD76makTPwlgerk0my`XIh1@wI5YI_xd_1nlJ#21 zMeZJj)jTa~!jZwV^-syID)mg->#0AoyA{6@DG}Z2z0SqGc*dZ8*QXV!ez(SJQapH; zi#7jy;UH@y{?n$JT0lDS<{Qk|ckKH@Spr&AHs}RvNIywl%@$Mn zj>-5)CTC-|ZvU#rUdnvfsoy-N4XO-uJ}N3i6&iNhbZ!xk_kZcJ=qda=`ZK`eWUGTd1+&yp7{ID>vVeibp3; zu6bEaiz>OXzrr9-;~B+PZzAn-Z=%p^@gDCS>2sXTlvjTfFZmdGYlqH!ri5w)%bSkR zwq6-8d+&Q3U2Kg8f1li!+Cneiw%dOgr=gYj`sv&Lx~ofn3E)Vo=Ip#b1IF6sfjk@X z)>HAOz?AGIAn;ZZO3E(Bnd$NZ)Fba9~SM#gvwYo@_?rc-_%BXTFcH$-rGtfHu5;pJ2$r z!sz7N7?g~a%ihxkf@Oe=!9iCqlXsh~QuPZ0X;Io=j!;5>vk#>{bJ^ z7;Kf~1vhcKJD=`4@49m2?aW(sOW~t%J?+4zCGlLV=HJ03eI{~JPN6_4qFpGd7laTF)!CQ#bYh81|spNX+SAMVjtk?t6 zFwpcSVE*)-UKpQNlV%AH?bFA)hjCgS0Yfu@7SfwGpjmVGHnc|XQ6L10uDt|E{4RSP z4aXgK-2Sgg7MDBT0;>P_UvV!a>)^|{6KY5yhpv`B3vl1HjvEj^X~4v{9pqp%74v;^ z2(r+~S1Ku?U0JX5##W6M){=`> zst3^O29>j%L&K6YZQpX&>h_=fu^3RwEZEo1!`=f9XI=D8KrJuu)uzx^3rFC}U#&Zv!G@_5s60&n z2YuMp@aou6;9)m)Z#0;*iRT)Kx4gGG?~_9_@i$tg%?r7jv#8G_1Yjxp_@M8bK2Ozd z58bcfd6rMiZ>%#PZUVx8r>Q+n3}(1KojiE|DF|heMk&XX~@e0T4?M7re0<0UbyqiGccER;Hm4#S+qa`h6`d=lsR zq6=Ryv#5=pwsJPtj)N=AZg9+NFXc=_Be+<91Cu~=ljT!pfy#C=HHsZg(=P7jR%d}3 z+VSnS#GF`)hv;^a_eXf0&({I}H*6ts5*B5@;1U3FAatX&QSy4R9$09X62*^<{~<~S zvZav{dXfL$M1)=ab5)=IMLq1lBIc}|5@sAZT#*e1jw2cWtiSM=-fVOX=;I2sSxzgc zFtIgli3s*U7$2fKB58trQx8pB!(;-K(6zMb68O7|6Xwn37+4xyC^xi|VuADQ8L6@X zb)Y`4wHqXqzq+t4;R01Tz35Ad^>3lzn)R49-;)*Q<8mOLRO}ApX7n-khX-gAjiSQJ z1vhbgZVPHd147?L%(^Hc!H61zlkt*u ztaMjXO*t_O1|KMFkk|fedyV1=mSLqM{@i{xVE=GA^P1`<;1nGA;QD1(gkJFqmt^7Wd6mrMu7Bwq&!+ei#on-$v|TF$I49-{M^ zf-Xl_{_bIE0`L6_bu&ee(UN~s-9S^@ewcIN2>VdM=l4hLd);+IA1L8DBO?7HlkTMV zpA&XyLsghOP#+Nxsg>oMn^Ls3lRne2g{W_SUKUOkn_G%%YxxnentI|=cXOlg?^pBH z?T7BK>KA?T&vU)^KR3fIf$Cp9f1JR=KF~qled`Y7x6f&`mQ=ClX+|;U518zcAUe=R;F>oUTMH-xn;Qsn8NA{2G^L^G@+_L~N8#F4D z$7GsJ)YnSs*g39RPU_c!8cwrDwrZ&j&i0`(N0 z+IKF_W}2nH-yi-Nz0|+YGs<;*{N8M~f6~s=QF_02;`gR^Q~II(FQIc3PBVA2Wf0(9 zuJ|)huzXQ6QKNI3*cPlQ6E}+x^mVMta_s*kH z>F1M~reED|Zb%&hWfF-S=$gE{TGt#c*~&YQqc(H3I?EQ{oC^s#_A@D#NdtwD4#>u9 z(vPP}<-SuvfR8p%UcH4^aIWKR)AmXc6krmS1Lc~Q2h3VA?=#1MvM4n?D>xexK3T?| zr5KFugkM|@o1mY0sSt8=3Bv?qLdZn>EAyJ?pn^hfybBEdp`c;vuNM$%1f+S)-cp9X|`SITTJ_^-ybGBXyKK zEhVg=ixPE_Hh>lko(8c^XR$eu;~uC;Ym*9*>6-XkFGomS@&wl{lCq^r`4U9G>cU*W z6!<-=jsF^XgHR zE;ShZ|3=kc#WB77wV=@!Ch|P27Lsl{Z@tp zK--SC+8WB1iuv(|E=K1RSgSKN@WL7spT?YSesAi$y0ggcxK}l>@qg)6d^nI;^EWq> z9d0gHEb>IUzoF^raHQoi?fxP?+}?=IRBC%TKcBWG`&YU9A zx*k7F-MQagJ{>zgvYPqbDP`u2-iMI4atKIb*;UVnUaBm!Ge!su3IQFm{PkkwYW#UY zdzG6j5qL-MZDN1qsJbMcza3E){7?{r8h`GCU)T>P8xdJg_=laEl|j(2+c$YutDy`wLn47;Moxbk{nXAxV^c-Kh^9M2FHpUSz0gBvM@OEbq`IhdV!GXlpH%r`RDd=kyg`%Ys?xm(ydn@u zq(7u8R=VXkh0cKCK*@@me$4D@=f?1!S!)78Re z_pA1qoU11BJvk{8$Kah(wB1udrEg(NJNH{lDQ+j?>Q^o=Q=5R7X!e|L8yjcp8u!m_#$ zlUpRGKc!Zf^0mf|EkJJx8liGZ{tb-pp~H@HR63a%t_Z=PYP>??8OEcRJ%!E|xtI}! z?2502xl+|AfGM?!`3=#^i@@L7VS3!#x9>{uEvUfy?|=V>(nvuhHj(NugRfB}qIXp1 z8NCitUf$x>8YEsiDRVeR>Q9VoJXzaj7&(Qwpm{$g*QU`C9~m*EYq8WXN8q)Hg#k9mb$O8EWDANU1P_p7Hii81!3xaJC) z#-V!Ld>}gpCl_$I5XSca;G|cxo&TyRm!W`Try8H};50I=mRC?`dC|#w-=*hs&wBYP z>l=Q?;wv*V8VQ#WffuH;A{tU}1Oe}P+}ZD%(BJjBc!I;#axdoi4~p|obpMa3v*3z? z?ZWj8LpRdh(%n6TfYO3=N-N#%AT83}oq}|CNq2XNbT|xMXMDf2&N@F~Sj+=^@B6;4 zK!D?#d1GTVCl>502GcJ9i2F03J1BrR8k3vX&@LX^cMmm*Dc1b^XBL)5{5%}huR?$4 zXOgu$2(0^~0>F{2tG@X6q&yf)u~raF(q2r+L8a{{O+CPphXD^th)vOuCzg7G%*%B4 zeJ&le#w;X&$~iTwD}xB4%}&aJKZ)tW;Ffr&^)Nu}o?f_nhXdSB;#W4KgD9b{*C0 zbU*06JuR_>W>^Krghvy=4#z1EM7_YKIrzmdbo+yA&x7z&V;_Nn%faf~dN5tfc8 zR3)e@?ow*_EbL!!#}n)iw>Ps)`k%Q<eDLXfDb zFxYK~NB~5c^xc@TG?+F30!aevcjWuDB4f;{pF3WmQ&(eB9A-2))eh}*-A6PiAX^7x z=t4_B>c&YI?Imtj1=Z(hl?uo3X1~roXM!_K6UQ5EEx6mY3h(Q@Cy`TmeQxBH(@%s= zoMWr$m57JRmF>w}h*JEc`Jmc@Px2zii4In(G`DFH5GK;YQpN##!AeZ50pu=w%{%t0 zsXpTkaG2ylT>%7}XIF8#*>87I_G{)U@5fXO}Z<$d_P|TZJvH6 zlaXjPSKNf!O>Hs!Jb92O=bCP`E%OYAl`fBF*x@oh{c|I?!!^tVg0MU6AD^qnL)TEg z5m_OdO_op`er}Oo<;DX;IsO0gNf-ypf@M1MB-la9US^~!<=>UkStc&j52q67nz*%NXw6fse0mfz;p9{2umOmAAcikOr#pK}q2u~TRYvPFi+aTX)#w;XJ$l-y3 zG}(V%GtmqnQOWNB6bq2;Pyw72U&&|#d-TIhyciI|$+`kVQx=N2MiIDyT-_?>A4PB= zjOLoMaZv!w&rk0^8t=79D&;fz^l3;S2yo$XkNi9Z+V~k%^r<>d~ z8o#k7Hoi7#fpy*JhpRm(DEaFqjoiF`o<5a~mqjgO_L$NUCPxnFABuNq%9heu`1FnO zkeW~KAX0W(TvK9|{Wtw(|&Q zkwx*Ck>jK|v^cWQqGLoZ?QY|;`;S!2qimz~qcY@pO1fjdrHAG4e191Q;xptg&oEF? zuuSDNZXXkXSm#cc~Hng8UNRPN0wMZ z>g@doD_xJP5{VkwR(1$|Z0q(a(M{#8vbYA-9o=9VCco1EJ3UXk9%?sDGz(rabC>n5R2<~VAL-nki z3^e2!Mmh4@L``K?w}3CtT|BUzd@?q4fuC)bX3v7i z`*-M@1JL`=6H3Q*xV|C>Lc`>410~~2$VaQUx=4R1pWberY<1rfDNgosi|Z7RYEZ?> zOPzL;z9RPcFeQJL_J)Q*ZU0kn2bIVd7Ohhio|;6aHjzdQHi-YaC+bINLoCDi~sXAQ4L9cS{v z>3c>`=cT;Ez1_?$-5P29!QjrH2oA*eu=RU6Y%F&*(Updqst1}pWT{rfP$ZYs05BaX2%IP9@b81 zxg-D?s+Uax`05`oY4?Iqkqz-u7-DQ9$W6x@17HYRSYD~3C`yoZjYS1?Foxnb17di! z0uS zXmJ~&4f9Ig`mbTl)2nd{$MouCUj#F0O?<_}VH5q^k3X8y2`hqTY(5tI{f#tn^QjFZ zQ4!!MQ+sbnGE&?7b-Fh(>-#;@Z>qhr(TZ_O+eCgT#FfhzY@;JcX8!I6N*NfKGh^${TA60!%t5?_~NlsFXDJs_C{9x?O*?M-yG^6V{DEh z;eGUu&iT)z_p}w9`<79Jl<@)F5jsFeIOAJjemef$KYNEEHM%?%FNW-i;})&OZUp~h z*=u9F{t5-LX8~I?&NNNmmeW4GsgP`cMDPOm1#@xA5dbh;QY2wQfSli)AfO@+vDg62 z^uTDLw}9k)#mnEaL9Np7d7s2l@*Lz6qk+c?gzdsag@Yqlaw&Tsau>|48D29>Ab(1w ze>(t>p!m*e`CD0Hc+%yo!oJ0-6D6}##!hVgqq^JHklbf3S@8zFJP~c$j5+N|8)lW^ z?@J#NpgUWI;+tzd5UdjsZgT6d6gkxMg*G6Q1h<*@O-gAVUk>Og7xHkTfpr|hX!Q|R zExdEAB&1n~yqUz*wegHea0WPG^EVhJqaxyMnUpo)+YS#(aN!!5klgz394^<#9lYYT z%cp;25DM2Zo`8LX|E@^0#s@IdR}TF5tJ~;IO`dasrK*l zaQuh3Dt3@~#3$xA+=>PzuWKd>)M7)7A66EVDLvPw)qH#pl}tVvD=58&^X`r6I{tXm z{q==olWp!{hn6;_DpjG1eL0_YBv{8(*$>s;`<2E5-{j%|_9VTHrhqPikR0)Y>APHL z%am)Gb!)o8Vq(cGB8f&QTfDRy?R&{gVrz&w*miTAW}O{y8xiJ*c@KvzP*FWnbjM?_ zB7=EXxhs`ZPTUSD)L(n5$7;vkv$H({Y*y3TdX$w{i`8wZ7jpcT9)HJgDheKmNxj*_ zrIXEod|(Soy9@j)(I^MuSuSEu)}vW_o1h zJpmLMQWgGzkKZlZX%lSh#X7``U~pCXdFMsVna{Yuq8Ed5W{QA)$JN50ICobr-QJt@4?LGb$to{_jIFwVpA+X4mb zQbZ%4|2(%DX0$@RhF?n3Te(i*gqO&5tKNrIoo;en^6|+GYO!>m5m+q+AgbHjEFqA( z8bsr^eY2bNd9A=KsVB|Ujhelpo6|zE)Nh1&)3RMFPaIo>!muOu@I1rVa)%kyiH)I2 z2wf#TKn!1;PA@0$sNbs*L<%ipsiXd8kX`$pb05G2yjwGi6D4m>*dd?NMjKVEF9r(1 zkRf~|qeB!fKXre}Vt++n;lOtDDGrgq`VhN}bnaXI9gqg+Xd@Xuhmg zpxBE3$JoEgOBb4!0gex%Bn9-<%Cf@y?qlLN6>Ym5tmEzU@lRGqe&H z9;IvGz?QxrV!tnGY`8*l=Lw3-&Y%-L$PI$uQ7U%E-`i{Tea<4KbgUU5+JpD5YyR!e zPT;Bqn~k71&l;*St~--#_Ha)3GhVe)*7Y+Sa8M~??gkxZ5{uD!QEe>AD`3okqt~gu zZKD_qA`oNJ^6^!L^IZPK0L%dtqSs62)*$y8-2Z!-aI5!vMST7YwE70n2Q++b_ z#eu_cX#5~3gMcY+W#*jPxc%C}o+}(o-dI{@zxsr{?_jL%5n#9?aT7i(@&Ty+r!~O^ zvRgh5o!ux7=)P9dOcEQZafbjgwUlfy@Q4q;C$Vdu8IhgzEza9ypCq&I1<&lX7X8MT zHW|0!-31G<0k^+^FDprnz{X|jz*j%MpM3)P_n!0&4Jj{%bl4pfNIj+p%)g<{M5od; z8h_7Gg8~TK0o1>%*bwo5T7#y!oO$sU&cus)j?+rFF7LW>q4e3^w4i_AioNJA5aoV@ z!>6+IC)_9?I_NWglf(OZY}o6Di+<#OxQ+U755wg4t0-sE1?&9PqRFOnxKPjEc2`u= zOvI6~V$yq6SRGTQvqR3U*3EFSS$=-n&LFrcPq;qm8b z!+j{O(U^&rc3aAGqO#450tPXTvfAp9JG15B)JD@gdhG45WGJxnxoFF{Ke#V{A2I@z zYRRP`Yh+T-<9!U3qhT)!ILZt6M|@wbJENV)fTMf_6ki_4D0@4UO6%_-$*gve2Aq&Z z#9wT>?VOdx9vR{$XR0AZu^mf0TJCWr)Y@RRA-C^IOQ<)*N|aBe6W4nCs4IokKSzJ# zBlU7OnPOJ+-E+glef~qV?4rq+4jj}p;`S}1jlGWQwrAqd;+MYm(+QHkRnWM;}&c6G@1{%d~yzoN9ZYf5@1MvZ^`F)^S3#T&Xst~+?*x=xkXXQY2W z%jNtYV0o07c*Qn&rHR~$UBp{mv|f$S-6(B(WWblDR#humb&ZKqy^9%9p~=g+xF06_ zM#?fADOTab1dh5($M0jD_28s|xM!`5xh~c+Fa6^br{UKn!X>rt`rC(T-+a#j+NFr9 zA;-m?t!i)QVX#-bncoh6P-%3@wqa>1!2K+#16AV zqLYNvIExboW$A-=zs7i4*cLvFZZ_<4sov4i=hJaqkp1J`KC-93c3|ofBmixsCWo%&pkLp{%&qIt>tM&KJe|T@>6m zF+9j=qP<8q>>O0l0LTHBv%T9%d%*FwH?p>D!#!oayIW>NdGp9%vUnUf?r&1;F9h$6 z+s6V>FXkrLWnnkT1PI?RnER}T50Qmmi$a8M=OShy^n3wdLfqA3V8E~aVS8&^#xUO? z3&4zgIx-+_WW$kOb4NMfePC;PA1--Ow|}^K+%@?LA+%8LylP8RI#bvMEvrE7 zoLt-5mv?=Z=I zuO7sQkF-;A{;mZfl|S(6%ueV(@h|tKHv8Y@Jw9|=9kRj38NH-!3M*W>d}V&!TN_ih z4kw#Yzf_pJq2VC8!NcAml-xWr^XYz)Bys49P6Pn4(Yr|*^JvQM2y{TKOE-pLFajq% zBsyN0*;)7iClhEvkm4FBDPP-ZiRaWSIjRKwXqp7$8)@7HYQ4QXh|MINSDBy7;9}l` z$lS**2hU|pfsIyi2jvT4L-01;ci87udOY=#96y7iw2w*~I2pCg@xqAZefDxpS8q+I z>o8mrkJ7vLb+x+f-ONT_<#lZ_<}@a#5&6-LId}!pKmEG9ltjHZj2}_UK^ponWa;{I zc_;$mo(M&QG`@lJ#8m~-Iis<6pMi8xbXs`l^rP@RKh{AkTR8QnIUXK%_p$Mf=;q8& zkb!V`--8bP*EInc7RgBH;3@XbNW;Q%18Mt*yFt626<7?-7>`K~a53y25#(VALG z!}1M_MK-|9ss|j7k}E! za~v%E^ZI9102LL|6d(Tf_5Omg!~E~clp)J>N&xl3kiOqOR^lQ0y3*1AlJ_@W%s6#2#uI_}5I-o%(?jz6osa5VK0`Nt zAX_(YtDt4f<$U_H;NMOY)$@DPGUQ1m%(V-%)OUxoDc7f^vsz!ye6~69i`r5II7D~0 z725|K4~tT+-(x7TCP;WR{cE4{+Bo~@&Gt+Ri-OszVGeHc8-P&k7!t$uti{ zRZ_fa9)p4ziQD@Z5Moi!Sc@_@>k z{~mkERYZ=!(qn@aEKgT91XQS!xNzIzZ#GTXTMew&$x04+i)UT70R))%gz|lDj2dx& z>aI@O;&$F}nXqN~i7B$Ax+?zTE7m7Rb{b;eDdINf5WcN_X0HNzRVC<}6sJh^d+AfA zku4$`w(OIgaC1eet!h*1SLQy<@YAhAMpjMa?6@7!FGuvRe5f;YiH$bkKhJr7i7bx9 zoEmOO^XfhvI@7sJ$>Ck!+Z2BAp$Oml>9u=q2h#vC->b3iLLdP{>U*F3C@Ib7NuL=$~50OS?j;pr41S21s)S zdlOw>%maJPtZ)hPS4RbEgD!1~lG=zsZcM*d&@C=An;G>W{EM+a#Sc_KGG)(p}4iy^j0pY`RN^qO#uL z4kqg!Zc}BA{#$%oU0Rr`QBL$xUVf95_BE-;k{tSs@z6V2t)0e!Wf=rWOy!(>d-A+U z3*nub3XG;Z^@Qe`zQ zvliIB&uU`m^;4FVu~rsrG#BBzX|3f=aUTVHoO_tc)B%8Mo~W~NfAnEGgm6G8M7UrS zqSQUzBuZ9|NP(;-cwCmMFi(Ku!@y-~5tLuDsu3OQax0etn90Db37K}ZLw}3zjOUBk zj)!Yt3i#QtnA;c~paQ9qgm?;e1Ro7z3Rl>X{m;eFZJKyQ?BC4oB)`r@m|)yT45RUI-Uq3m zmYSnHrC*9IP(RB=x=-B<-5*A&vQ@cPpsGl!)$(-V%egrmxK6U?DZT&Sel}Tf)n*-f zQbM8Lxa>z?GTCua3+>u``OtC;U8bHANP?A8HgZn-uSl1>%FcUw-;LY^Xgou2zRayf z6NTQNlX4DbLBH2ds>BQ-`EVj?JvubZj)>jdT9F!u#_gWeb7S`gxZnaPDIgn_-t(FL zE%jm35X4MKZ5PmnXg2?e4s5~r{YK=-yY&sgBH{^*qqu(BnL(+reM zRO0-+ggZo(g=`+DQ2SHjZ*ZsIQlR@@FgKHHDh-Nk%7QR9`Oyv(NaX)6`TM0zh*aYi%C6 zlf>x~hbmO^!MLm{eFie;9b`N6NfsCR%<3i7CQ*b&>Tk%ru8vQFW-8yuu*cWy*NZ{pBgmt9$Dgy4Qjc4# zJOZ$1q0TYWHSEmzfskweUp$>ls`3vKm-7>je~Bsn-a<2F_fLzQ$0*o`EE6InElc8; zGPUJPQfPs+C?L(jurPOAbeU}o%uH=RFRvjN@6awN9Bp#fsQ7ta(Qp$PGwSs!C`D0$ z`0Bz!E)YGIPCZ!7d(6xV;pw|n7S5zCM7+RyhLcZR^RL-J85>x|yZ9t*+MImqZFnVs zys=si0bgC!zbq2;bYX)tN_SAg$sp{wL^xINa8kAxi~j=}kG%92zr^GT%nFpUwc>Pg zvRS;i$Q{ky+*>}*^IN5VN+VHUKY(InmQshImvmEF7}Gn6pH}2ajp@cFQ@Vgl2THj* zJaE^-2yj#M2&`n;K?O(J5d7FxNy4U}6wJ-!;lSNpk0GGapZtO(ZNpA+t0qc++%L?t z&%ToRLQ&#`{PXvsKBkA^OeZ>>u&kVAZXrs5LqzsE@4%h9}Ou|q(Hu4&8v`UxNh@~zQb| z2DdcWG3{Y=r(ML5PBWD#ynRdQgBsVV#p!ryoXt-O(2}k!1I-&oXl&Z2bRK^n(uyhr z$A3%8sQ}-T-^&pmk8Cr&fJZG+6kPhZuajgE3XYgDafbCs=%V`oOBixUPFsM%`c6_0 zG626hsu$hG$Zo=CeKU}xe7jtzBDoz<2CP02j2&ATNN+)INVI1~>ZIbq&Z>C#1L?ay zK91sm2Mn9%nAQl{Eo6cz(aF`NI!6@i;t0Qc%~=e4iHJP)ZB1RFSMM0|(r~SV8?ZTX zkZR>^#xA#64kdF@MmFu0=d5&=v*^?ta;+wduuyLSeo<%6sR3_koQ3^H4Yk9jxXyDx z+>aiZ6jO{Puq5dmp3HSWOM-m|bb-E&Sk<{NX}F4fgFYx`-2-ojBN~G`<0~#s zTW$Ih#ZYHa?ng&oPKS-n{YS$()wA7UVk0OqFO4`m7tcoCLo{s+GeF;ScuFdwDUteV zvTc;y_?f+FH*2u7^rUrnVb5|hz{PugG+R$l*BWb|`sr&)QkCDW3TM;a>*JKkj{Sx| zU3Ulj$(G3zE87;2o^Lk`ejR3fFWr5}@mKJ^)}%5BcpSJ%tP1$8VmXVoz~bmL;5wq# z1>1n0Ep`>^48p$*w)>dRC^2HS6|FAU>AAa?=e`}2gJ^?YqaJq64#vY0r2eM`gGqbI zdacEx>E!F)ixPWgY-4wVje98uzj$#FpYC9;qXF*v{?fKj4U6ndk4HFrqni&cFLc?J-I+iRT{O|=RWx;dy5T$c|+#(0B(j$rRfT8U0nK?4cD z1Oj~eKYIhHwua}fr!FM^(BjanAnuWN4=}os4&b?#g2aHjpd_Gx2e+sy<@at-KJXNB zAG;I4Q8%(ccR#Cu=BBr>=ZvZQOTEo9LfTNFJHw}B*tv61kRhh^(_J>j#M`~&aPv1g z<&%5C4IXby+-44zrYSJ&`d*Vo8NpP3-@Q|m)k>C}Z%SH8EBBPrWo~sUM%}3{tlZe0 z8mv${*>$26X81&0B)p^Oa&|v!B_qKu)h5FXd`xo1$wxIqOdq+93 zv!@)cm-Fvm5-0smgQ1<1etoroZqj{Yt!&q^HcU9asg*dT>UooEbk_LVD&|l2bFlyZ z(3b?*ljYaWVVk0`N_B_!yGCezN@t6q^VQW}fumS!=51J4a-Z?FiKT!DFvfT6r?G+0 zcw~;Tm z;>4ol^zFl?5~_C3@k1L`82km$#9ns1fnOIWYtJ1-2VJwxytogZuL;*Y9y3M*$Vv20Ck*6BT@hnP4{!~ zyH_Z^h<>^a-3$eH1ao;l55+vD0GO8cHK01%9^h4h17YCDyqcL~LQNr~AcYcqqmDI) zDH&L_xFgl$((}g#n$9u%Pw<(}Y?Q|cdUFAIhwD^EK58R`L3o!&`yvZ61k2B>4T>ml zw7#-mDI?RLFP-}V7Ov+4@DeD*&_5Ze5&6$q8#w>bnjoVwO>G>+%d50W`{fPILQf=A z@Tq39FLFFQ9-C8t{h(CZ_GgtR)hzq8!k&^KM*_2MGQD2VH3i)nru>%*5p3S@fl6Bv zc6Sad76uldHm%4-_x{>jO0h`5hCczm2hvJJQ-!+<{OS4yW9b6A(f*4iBUJ$}i|M{5 zqIWM(>)R8n_S;dkV}d7FLt!Zy4tLhe8rZX$x0kYAT2FkIqvy|l@^>YiTWb>@HywN( z=Kix-i_>V-9Fu-~tE0BoSvtu?-8sHJmf0dCYF*-Yb1vs&dd7U#u0|PVHvQO-7@QsD z6|FPhESD{lIll;BRiw{OXC9DCiBIzhu@kB=c(t9?yWg$gs8(d&6})(F9$SN;e^E4J z#;EWV90F`={gISd8^*(3#csbyJ`Ob{{`lFZ!MK9tJ4_>}#M^N)eAqZ@Y;WU<eJq1_y}Pj1b+Gs0lqycHC46+Ot}D6-@p+U2eFoipc5u7 z5sTxBZ^!k0MN*^vudPfT>xVS3pJ#}`?y381&Bl~-Z~cFxwU^O1Ze?~COe7W}{Ahd{M8)74sFBIfy>}U0}9vQxim&N2iKdV2Fy=0q$pB;@7UpyA>_S&SLvVgav)IJ4V zGPq4?PY-x8T>D1+*U-#khxQ^vmpy?5s1)qZ@qzA({=5*WN`-ljktZ|!KTPMn%^3d} zHSRh@@`^Sq@4>-NL*r*hwZFc6D7#xWHCW&9`l~{ohs(3q0wPsuYH;dR)XGx-F{WYcQ(u1-I(z~g zMXM-_;Se%lGMm>N$GlPohe7oGG8yFC8bN`daIoWPW@m)uYSc&@NlU^Qv5{-P)g5n& zR)e~6a(WC-q`Ay8fn!eY^yRBKk}^!HGAjfRWEu%UiDki@#^`UP023{z+z1@#n+(3w zhpyTW+Yp!K4~rrb z*@2N&&HAdnIQvI5++r6q)>38SH}mJvh)?`XlKdaAd)3tLUA>QaCU0gY_7>OF;V)*B z%Y|f{>Jg1HHOYl!$bQ%Lx`e{d>7ySNS`O`djkRa`Kh~KX%}`OcC&W@HP-@`vJ5T+9 zyS-5;d2Rc^8^)~}Sw!$O7$}T~){JCIHdS+7`DlTnn259Gb+p{nJFY6R|5XO^y6^q) z@-#OMNK=D^)K)8Y)ELZmg>Vbmx&iw2KDBLKi!_;R+AaY}nDLs!0QuXt^8A6M&G}WaQh# zeZl@Eg_pSpu*a&@IrGck9ePauDHO?@ahE^dDT%I}&Gqf)b({P#|NdCUXM$|91avs4zpVbi zthib8t%^<)C`t4^YiX+JxG$&5Q!AXE?5wV6UaReNkJGW^edxx%7Wtdkwx9195I$Il zjg6hOdfvK96XVJqcfIMl&ue}U_P)+TNrcZ14Tc8Rfxqx4fc#1SjPCCBHq5?gxBOsa z)X-pAL$ziYs1>*kFeu)XXa<2<%ufK-sV>=j^(mZ3LxLozOYf;Cg0>V}0b<3CAQ?I@ zV|lQ#JGt8=`3sKPU$Hb|T$FDKL@|ry|T{iPf8ShJFyqY(iN!wl0)Y%rvbX zT`lML+t>`bMzKT^IYd@|KxbqhO6H^?l3tHZ;_J^5o)nr3#NpYlJ(hbP9G`T(2fp~% zKR&HRjy?2?#o*nJ*Pq18E4N)Pt>933YTfnxeX&=6S-JVL*Pioq{gO0z)}bVF)Dk8t zk$v_G9(R8lOzFc}Zt}e8d(T%q8HbbS_#$$UL4AL$x1Z8A`u1T(VbgHq?+WpVSzB{O z_2Ws~@iIMheX@PM4)^@=R_C$r7!D0hHbl_H%0(_aK?5ZMEl{y}NVir5bOm8v3Pj=l2*zZ%& zgAJIK(p#|V6htvi;OXX1!%CRs;^^$Jgt-Z; zFr;SZA@R7png_9FI;H@!Ed=fkDW$*SpLD9@Ymv>9iBgeDEh#{S$ODr@QkvTcWCI<} z@A2Mu*OpuqvE4v1AipXPWrrY^zyHe6O)954k)S?(xMGM#7Q?wH$8^>!!ZlzIMgKA| z9lmQJa!X_5UR+B|3!PhD*%FsD#tAQ=K?8SMJ|#4FuIHTImrj1-^{#hyZ% z6j1#9W4s}{1}d5u2GW`95}{6e4x`3OwDiu)t}`Zj`YszNo7DK;A=4v*`%+da0kt8j z(hbv+=&p@qseQ>~&vkV3cC}E{=c4Ris{0^MK^S%P=j7`*fCA$k!rLq~B;_Ffq zwUhM|1p0-W3{YU*7FL8RX*%l?BEw}jRd<8cJE7muvNgC`;1@F=5})rq2%>i^WSYK+ zx`otU7pER%JRNJXTV0s!=lcaA_k>qGE|y|%I609Klca#j%5uGHKCEEMptYMStP}vN ztIB8ko-N8|?mJd;rmxy+i@z`+LYLj7f1OG05~m^R%3#x-Eoz?6wP^blMW^74-5>EK zXW3tY_1lRD;;rws3dAfkf3^BV=fQj_QrGm$0;u+Y%8{u~r~wcAe12~KJwS8QAldJ? zd|8vsIc1L7CrnFt@|akg9B9Qk36uI_G_!)#U)h{#nxh;}1m(GyxEbw(k=?fPfYHG)fp{E%=6Cu@EZkC61N}XE^Q@upp50uQb79TP5jrmSSLKpO3wnEsnq1HT z#l&qDWifGQOdI^I)Y_5wVdeRP**elx8q%smO(sM&g_X7Y@BGm~?`8ym8wJI&JW+|X z|4f}m-f``Fi&yVG+vaia*g=I@ddT2p<$Ddg;}K)jRlAG*M>@V>nP4p&MPBw@^)6VA zQ*N{UAg7#L?4JpYqal}pT;BT++SsY@++keG;>|}rnG*jX!9;kZr~s+_Zn`aM6v#NF zBG8&Fr{t(+n{&%iuLvkPLI&F=jXlz85#Er^zLIYnt1+!Xr%gOH%IET~EF&VUPnjwNM?VpTADC}OBU}^oR zLN1>+dZgD)lunY%j=?FSu=9WVm?(s7-?^+*dA1`;-o=f>bn>Wp0vCA6=)xH<4j>mcK zQKR+$;-RR%63`%7AO$1XJ9^D3^7?g$#xVkVV9j7TUSw(98QEh%X3ZcCGZ>`%3m6YH z*}k|~#~vPq0z{c9$PwO`&Du3Wbv}N}?CneddUqnK0ITN$J0vYw-7P{PF~6pslr*A7 zbtjp9P@TP;B*|>_(`Eo>nOPUkg3RfDP3c{q*ybFNEG8tInB^4`B2{-_jQVlb*N_gN zTuqcyMSvL3nFjN9k?KH3rgV0rXd4vu^44zElw>kHXs)R1s6Wk3((l-YcaABDRo>{Z z>p(ghUh+T@g+t+b*FE{(z%5M}YO8j2Doy{+I>oU|LEPr2S?+)LTX8-C?3S%fX?Sc9 zepT#VDw;%+3@Qt(mqr!G@g;Lxnq&djLIHXUKjTK9U?Z*~fyV2PDFP0xd1H{C1$rGk znoQ9ll#hX2AHq43R?#q;55U86moj zT*Z!hxM$nD5lH@o_=ht`vixYisjtYyKi;3x+v~m7C2Z6t6tf>@P-n|q`z(r~z6w|V z?sOJ!GOJMe8(D|-7a&T4ucdRt`A_@ta0jn*aV(kPO#dR{|CJ7W!trkAKoj4$R$Cht z9uY9A!j4hH5$&1>xSJT!=NLdSg|aci2I(55fbk~8coZ<97&VX-8b}8aqW@&fqU=A1 zb&J?v9W^cHkt=11YnqUS1QbZ=qX{9H*8$*7S-Kh_+5RmJsNayZX}6mIGLY5{RcEPV zRc7y@z1qF3R(azy5x4{AIYC?z=>_iu#=Ow-m5=L0c+UfA9Mn-_*Bt=qdc@bHxYo9B z1cuMD^zfqi3=XGC$X zU$MO;wK}_Ud|DYp|3e>!F#rYol!rl_V2FBnvosZ$fQwhJEe4;)=Guqf1>zuatG<9O zpUl>L6)n!+5{>Q<7u9E~&Y$y+5H*nkRh)bkD3fI7ZAFdxAw?E{s`%a)6Jw}YZ)Div z1;Y5i`myOPI%x*|E?r?%@iZ#e)+j0^)tWG~@;Uh}OMzrfyL7`*!gufT7#Ua>EZkGO>R zx%IzIeLY3Z4Eki=HFh>Atxmoz63w>-dO8R<|9`z1Y4j-@0xs@JB`*Lb1yVHzO)tw( zDIw~}7~tY(y+XqnfEz!M(~6eYy!$$ENHND9=*N043Os>lKMOt{RkrPP1mZF=wEBeg zv*h3jz4;+|q>d>3ndTER9lAJuzIwwuDlg=fPg%c@Bh>`CzoBd?O;ojOcPjiPz~_FC z{B_d`u~wT4H`%yph&OOt76X&K!-@#@k*-b!i>+tpZ{$9^1*W*w>Szl5q_!b4I(k6Vs`c|D$(GSTGm(oV`N5 zT?UqB5DG|YNdnkK#BlYD0TrQP2+%&+AY`a8{tgw#O2RSJ|l8cG2uT!EgrL zF;3-4h>5-VSr5Ry7fYy$Ru2FJ7Tb<2Jy!?APt85`hCDWnQ#-J8Nly{HoQYzCR30ea z6Z@3+Z7<_coXOl_*==#Q%!-GttfAKUkfnG@sQN zbDvUWS*k{FDqb}{ST~<8$`(_+&be|#6y@SwCNA$#u1=Wr%-F?{rVKTVJN?5Ntq=R_ zv?Y9}DtFyzabWvZ9163JFWduDyHIFkGGB%R_+*>pNKSwi+DLO%glIXqVv@EX&6ia% z5mo{{1o03i6thm?#%8~Tqsq5zHKD;IgN+Zgf+ zxYG@<#9xv(XvRX~*i2Go=ZA!IoHci$zeOwiwPx})@wJ#Daa1j4HEUQmlHC0E*J=(` zj9^2_$Ch`laV619hdzbr@6gKHu2ihZ6h}FW{D%r_mQm2lDOrBfi}D%Brfw?0G>9ii zTe%s$|LNx78t-#V1Zaj`?c0{^P;yo##4kT88(HD1fW{chWWC}wxHZrc&VRTLKY~L- zswsGtI2saXU0L}`L`hWa>Zj!;wGHeAjM_s-;4BeMSFt0Tzcc~oL44Xem|%n@f;kNb z`QkwO=U1NEokwQX01mR!w?Bn;3YmG6XK5kBiW!e~ulTt|V$O#a&SOz;M~>mk1j(UXroACIZ;U%G-V1LMeA92bm1`eG z4V#XmI#2vf2{3U{9)~YWNlNmL-1Jf=c^C#WUBqies-&s3#ZR}lIKFe7E~8+NE=u5) zXG&nFNtkMXZQc5yw_4hFx()CHHK(2JH zjQ~QE_S?9a!K9;DWy&DmqT`^#m4mLskqsORsN9&KW`b<3sEv^SAJ@0uM+IGk)TC*8 zve?%=bIrYHBMZ~A-$(|yF@9}l_0wer!i|Iz6ToPM7iQ0O_ms=r;gPY3X22HimIe2$ z)Ah3TLrw$yCn1Gkcx%u5jtZn7#$g{z&q)eIZ+iV2YfG^inE-~`RU#X<2dG2lguL5z zK2Z8y_W9C4?6EA~$i6*WtcAvjG#Xp$m4;gvy|tP64%#Q_86mTRG1LWs2$iQUhs!@TN@g2L`O5!(j{wgDj;_QJRVfPMW>OzS+set=3xq3| zYn%GHyIOTu-{6eC`d#O^T~Rjyt&oE=E;)mPoCaqy$O#a9AhzI126d7k>Y)pDS$`p( z5M+xW+ffD0B>n?t4tcg&3o?Nzq;9Xbv%0F|_|%_$Y=d->*i-#97jhZjBiDJgi+x2D zCw497U#2aEESJ7xTO~bmlqgy`8$NI+WI|C<~tg`sxyrV0_!Nx&5*Q-u|Fx26<{}dB7YjIc= zCu-{3En$(l3oVD&#r1h3sJ+B~&04^(2>(873aJB}(m6s|FgyarzdMmOy`=M5g5z;l zgQli;6zvm7LN@4i0s~8W?jTGElrbdRd?6f80Ie*?jqs9!jzIO=j>9hm??PEzM#VVT zwU%Jch=tP$L5G(c(cWQc`P3#qRU!+7H2)lwAlNDDMVl?m{%E(?P}Gn5DZ0frsQGlD zN@)wav%fdF>#M0+-rfgS6Xg0);U;xpqK7HgQQ%2%7KoO9W1_>p`-yqmz*X8Os`oWK zV5E5*Jyx=aLlAg^6=zwgXQ8ET-~)q20kJFsHlB+J^_+f`j2gKdm4=xzR5-(kQwwV=#Q3_ zsynxJDAQcu*BRdl2?W@ zGOFm51XV6o+wu2|@C?!W@*^P@PN7v^2}(%%Zy-3bacZLZl@+ch>t4Cvw#5J8=`F*e zYQOhyx?^Zix?^ZXI*0D=?(XiA0fz3bp%E!55ov~yR#Itcr9lvVw)gk{{Pr7O496U^ z_gwp$z0P&6&xwbkP)xthogRz0;^+B66zN*^tE~hCauZiXNf$!-kd$W3l}RARP+lLW zLmkSDZnWX4FMIB4ki*0qQW&f+NUW&BEbQ$P0bsUE#2Tk(s_xbxB-U;OW;w}g zmr19EAO>GEE&iwAXW#a#X2Y=gV#zK1hCSQewa5hHJ*ze3kn}A)n7N~9BQdy;`|r;T zhWLvr5T=&;@}Qfx#ngmV3yZlR(c9#*fVdWJ|M$0~^#qu`i@Z7T+IOEP<4eC$Xb*n4 zv2MrSs^M98#~d}u)_j`xqzW&t?&uO>k4{?BKZ(}9>6YT2d6k-UP4wm=^yw4wV4(PA z#~aH4{fnh)2+8x^@fi;c;4DhM2LgY9CWRP(te58B$=)$kh|Ln06q2^Bs5GX{hiNzo zv>32zvQ}*?1u-92`0QZQEP37b!RAo?zU;>mh7>jV#F^#3tU78C&;V)nK9D^SpYxlS z#_i=-I_4tNYf^DB8PFi#hapsZ_DCu}I>ug79j#kyuOR4o&4YhLlhxr7BwYuy%6K<; z6WrFoJ`oYE#6Yf>C-o)WR_9dN7?Y~x1Ol|XCU;&KQ5BqVg%iygHaH~AS{`F39`tKI z*RA}2&vK;i{I8LE%jE{FrT6Q$&_PJj7C)Qq5m%xw7iV)Rpy}W1_DNOyFu9S<2%?@@ z`BTqT_Q_t2@V8ufV^kEdBagdCFyA-Qxw)UsWlUpLbK|)4Hu3+O#)3;ADk*{=Myt*r z`cU#R!cY0>zaD#jHp*jvQpR}1A?1Dg(O!euYGKM<)2|gLfqZ6w0AMv-UfrU#gy}FrG@&B}rp*`G> zgVGe?)nsH)JrF9N(%pazgFPMynFKX+Ul@%x00XJvP&P7Dta8dLt^b6E%M-~(+^^0H zRT$McC6uIk5k*llDsS?TS4g}557It@Z8qTgnna+I0Ko4QC@1pJYEOTcFGd!+?CCqx zA|g726a}Bz7HxfwRY|vDqCd_Fuw09vNv)l^_Vv$B_YLQKouY5puKpR`bcbQ(_^B4i zDQNyE{QS7ct#G7%D~#CYBib5Reepz;te4xs-uR#(FT|}e*c&4H|6HmJN@q57TBt!t zygM87paBIvF1p{7-fMc znTKPHF>=e8=OO4^Z)dxU3W+$FJ!c23xD-}KC@W^I-*_GReTY!HQu4l!tMZ+>_$=iJ z`EoQWbB{2=LhUV_PT;*&(FB{{2l)8GkygQs%s;J7Gz2TN`SAu<97O>oSIZZ#FM!+N zz5XjJEG1=fjD1r6pv(^OUp(LaqB^R3isG*Qy%Xk3F3Xk;%5#GH)2DM)+pNKw8 zGs7h~f94q{{w#R?Mauix9xfc#-~CR|rCK+*G(~IW|7TvvHXz-3A8_-@@J=}?w#}LkUi*OZ{A>Yy1i-k%>KZ~NSky)p!8`Yh05S8j(6QrKo zf5mc4vPH-@ly>Gr4yUC-l4uw!Yz$c8mkCDy0)42H+0kc99P!sub3VE0jPzUT1TYSw zr(sJSSESc&dYe?!pUcuq&=A3inEARhqGoLSB1rcg>xzqtGDlX|KUt5RW+l=g50mYr zhnk`R2IELF`;LhyC3`)C|5;#xSQ#y5^V?YRk$Y4#B-?aZ{sa_pHm=NPU>B$00Y)Qh zNlS}BHV^FuWg|jqNJBW0fi&xn_n1mzMFJelm*(MwBcB!qyB5Cjg1p>CY>DFrjQwtZ z2uVvt*FLz)f!_T=tP%0oI;1>~VT=>Eg60o3Dx*W0x<=P3i7=B)ploT9QAqpeYX~X% zbOpx;q-{U`@N%s^Z7HAZH-e~#ENW)f{%9Mu2(gkd<{f4&nw#_Y0~!Q|&R_rUvscqv zU8$p?F@B;LKvmK!KgVQ~r5Y3>0?khjAgQ6KeeXglwMTK|$nfO9f>YA`^et$wJ+NHVy}5|cCyDUUUW06lDP5FZ zwM2s?vsvJ}1VxPTi+oK`vq|Ur@9)C!r$fK2(UX4$TEOU;z(~c8LYab;2(p@Zg=FyS z;VO$L5eE^CDd&O)$MnL~)>(LelZg9*!_)Wx9V)O!hx$Si>qd6+j$X-nN)TPVw#V$v z;*16ouVRt9QjdXvzl=9KUZsQH7XG*6x?Z?oDls=A8pG-vL9DPdYTF-y@*VIC5JAQi z=0J*MEyqdNL=VSjXXzZm(dxJ1W9#l#;~EriyL5cgb7@O21{@Uci1vJjB`DZKId^`q z@O$-}Iy`*{$L9PgzRae*O8D{rG-fnuJB+9xp5J`ioU1&@y0t3Ih1d1B0-#*dkWxW zA3}Fu++r(__&`jk=)yw63&SXvrrZnL&!$>xuH)su+P5i3+KTRCL4@0mVznA;ODq2S z4pM1-_FN;R8OqU-t$I(d_?Y;S7QPKff}rUO(sJSL0DOu;V^FO+sF4wg=duVrI_uDL zYsjgDp8lCe>TQO2)R$uxnit7^X+U=xNknU=X~hF;SoU%UmabmHy5Qhb9?M515$dgW zpPyE@(8d*_tXfIiIau9d9=`n|q)>8_;W@vdtK^a$37vKQkZ8KCuT_rzQ>rQG!$crW zPMJyh;qZmmT6h$J-2{nYxC5p_s_M+@N^PJ=!>(O|T!w*^;=l0@z}hc?mY6I_zhm@a z=Q)zAQ$JE1;l(XB2VD=2WCTkK(kCXDv_6iv>{Zr{x!{<0!hwo#Gt|!@0i?{chM#EX z6)+(a&BA;;>YY0F9jW7Af6q!Vqb%faZXI2Pu(27KjG;-uF+I>Qg{iVGgUnFqWoN=HM?T>bbU=nG&1 zFIj*3{+QkfWyz*tv)>WsjZ>*OPIx#%J9NJmcMZzUD5vH9*?TCHZIPNa_NglA(J!)T zT(*p?+>(Bco@V1(oft^ zY4MEsiYeSC{J{e3?p=(YP7?=%6^x`a$PEn>N>S&UxYV;xAnsobMg9pdiZVa?UdwOvn!r3k`4CDY>`ohU2UIacH@h z59tbQ@{Y&X{25R!!=Fx!UERIby{%!tv+x*tpW&R@S_QDKHgbDy-!RJFt<|^`{ zV)LIBx?XvjoXI}$tr^D`M$JeBEXd1Fd+~L-U6Jte9E_q-L#r2uGMKIOzTDUS1I){ z0WUDalivmaui$5kd(*kbx%6d>W0%;Y??U0|8}K&Uz>F`~WdHSo>g3NE2jkSMRzOU8 z{pabC>+LHqA1L1?rxr9ka>YhOiYoyujSAKq^aiQ_tM}Db|;&CSZ2ju#K zQp+Q@N4Tw;kdCcdGu}eSxV^@-0yJZ3cfV(xOuA5Q)>OJve&J(eAaV9C6?^`$)~~-Z zhkDJ%s1v&t4~$K2E@^5P+7*5`go9)J;7RL*&RSu6lI`bqSrPoXt@r{BD6cQkVH5H{ z;=DC_-FN75&hvN{?GVE@0W&UE_=2ZTMQ(mINem zj5`#$lh>#hk?W=c@+#|Z0DpiFyr|jzz0h@t+|-T8S9E7ij*tu)k49AQNV`fQ-ty07 z-b03?g4Mgv23dVDWK@i}uL0}azkwA^$lHHkX%YA|fJ*4(U;|)@37HgAmjuQR=c{ZJ zK|H{B)&i{MsX>Ocg4BO3ChO`PgJT_xURTwH+RcCZRQJ>E`f%XKNAvxE{7`E^)+AZ| z>OD@AX+KSw{;S)2*I{k&gqu~p5WajWC^pAM84OLLN0HN)X(FgYV)WXP*Q!AMSZQ}v zYz%Uyj5gY;X_?H7Uv)ird8V{W#?629z9#6koSmYGb}qrYly=P9g!Y|>#B=)zv9tCRkkGnyq<awn*NE-DU2~Q1E_wSf2BWIt2rJGJhyWr$ z>ccz-n#O0ueo~i)->$MBu~%_BmIgbdxesOx9Nh!_!1`(e#2J7Ri!R11#&{^5C+h1$ za^$Cp#lTMzkx5-?Xl#DSh-4F#4HWv7F;;IHe)D-4h@9O&2j0@p$mfvAjN{UlE>UJi z`ZpG!XecML-w1AypzJAbOsx4M*!B&-7apeuEI7R4eSVT9Lb6hJLZq&{cBu97fTBaA zLN>4Nb98%^z>hkt$!j=*7*Q(5^-XeV!A%h?4L-_(88bqdp5VLfwAwPrY`%piJ?}t| z^96H~D2>Ym#TY3mNA<6b)TzF4PHsKB51Jsic4n34w@V6+q%Ljze*RSN-ps@|T<$~U zE^(r$H=lAD6!XL$?k92|Q4D6YnMz`&+l%?(Yq3Z9B%TF5i_Fh9F2e zcg<%r{Ep0h`?!6B+(D9(u%_2Cct59QFH?1_81`d;@MS34g7zPR71b+%8Q?PEoIto7sLzmOL0|8aL-}hMdpx7AvfG4AQ&>bW+`?Snkr{d0L`UoZwLhyTrht!U3lxF@9FtZpLrP_+EvF2OI)OAYH@&t%Z z;Ws29uf&d{*P=TQDt6Z}>=!K!z4Vu731k(@M*i)Xd=bBc)DPN3TCXlqq<&!1g?Zh7 z>FOsV0D?`wZqHFQtx5qvn(Yx1O)xcPP1N9B|J?_#y$nTi#e?d&Ws6Yty^>oTW~(c) z&0iYZO<~kSHQc|oXhKh0B|2GbZ)5l!0}`l;SFx#uf6%%*IYBe22C?WbV-PA35XZ<6 zb6_TYR(j+(x#NM9577srY%F}r3}!JM=}osIoNslA%a!ZH_xtv9k{-%3Fwx2sNV-c5 zY`yHWE+aSZfqTspqr~TEGg-MM?#c2493_=6Xeo)8t>jYIWX5h;%NT0QN+zjUJ5ZOD zTI1J)87{9DGztaIRpL z5k{w=&8iwVqQK`oxBU*73{|r+#&G~g$z<{5=t~Eh1Nk+jp;&&dBa@Ax@P4*#$7#!~ z z*k!4AbhH8?R$+DpHVR7aBIg6Bp>C>%12Wbam0evDcPuS|c7LDlcvG8yL2{Mln**st>EF!k>6`6{r$NR$$B4x^$OL zmgQzJoZL+Y^ZBJQso2A5oNDS17RD)pJ!_}_NNaEke&vKQd5L%o&Mr9}G5@-?>zjNV z5tKFTt?&$i!g&RvXNT|Yl`DnDZbDg{Rn_2yHV&AssP>G+!MC#g4!_@z9&n)4k{|F9ztpqWBgDPR?d*2{XN^!!%HEogFk>ZOMRhI8gKX9Z!=N_>D~C7 zWYA|UzdniLVz2t-C#3$!ohq?M^;tj2Kr7*NF0lIQ2+~BGJc;Y@A&XGf^h9o4b+)tQ zxv}9D53=&I>QCvJinYx6$eo`f)>s+*gAv*={TD=vBLv5RrV~B%nrp=CP^a(1kH-eW zrVawuqD%5wSFxD_lHSvwFj?&FuF?5UUUvG!o)F`7f0ds7SuC@oW&J@6rC?{7Ca||1 zO2u(UFlZ6$c+-gB9j?*^H_Zn&cM;pj$q;%ywaLjc80YBP#3d6cZGhf9Tb5}NMlQXT z)Z*R$nphPi;QvgdRUhP<3pqV8ybvb(P5l0uRf>VOUUz{_*O~?ODLLGuqs>$te4G|BcHo> zA-$}e))-_cJE{zleHr7+6wqO;tDYw?tuf*PpJmf${K|Xx8P7Sici9lLRQY;Y8(4SguNYb zOv^>#7JJ=)2(*c^^3zv0=^L6@*uhKYLTl!g@o0}^RNy2uci%99sYnp zlB7BNEoI$bMjvxB9Y4}y%8J>%FR@~I*F9kuE1pe%l~a%=W28_z@6HZa+L^UxyqxNs+jt9AZUR(f{lsAv)a9Qg@IhQIWsq#i)8 zwO?l>X2n4wu=de#BDy^|QCo$7F)9U^FIR}++vtq&4qeycBtgjP4;woib&xW&v>9yt z3+Y8TnjLEE_Vy@WUDu~NBwzi&T2mJ@j@c@~c5)?!vX|8CbJ-CbcCDu_f@uwvo(vT1 ze%dBQEL;d7vH5ZiHj%u2r&yWqsI`duy!l}FyE6x6*r|Bw7KfOk_c4Y2LO02Q23X=O z1H8%rFI_V5ltE@O%X}g5@A%Ge&Q~T-%}$%B*k`N#(MEn~;>t{rMp=RC5M_tI=PAcW zC3=03hnegbcj6+)(2Y)ypF9`-sp-f&^YPuKpJ!eR@-B)dLalY0^Px|q*QWft`T@x8 z+A5rBL(+x|9)vVxT>c|xJAUj;r8KQCb5r-7H9L~Ak5+|qr$Cbi4HmoKpM|(Ewi4WA z1svacYlz12TkdMR&j9(%Gsx~j5J5MErJzvl*qkf+p_6})48UR+hsMxzeSA0y+*F2h z5^FP7WtzluAXL3B8OGgXGfU;ak_usye30&DCW^_V-r?Cv)ML6aVW_oCL}ODai@D?Q zbf~^>U52oH+0v&Ap>J;bv-C-3DV{83yri{6t?Ab*14_HMiYPMlpn_~st;uk zTDycc#;F)f2E4JzbYK$0q5_U9=HCgMkq;!+!8qqvglWtIK9_Iyk^!3JlA^%(f(Y;U zuQGI$)rdNIvY`GfqmHUiegs51q=tnGaJ%KoV|jroL6R0~WnMmA9sTj9kIRb=ab{%8 z7xtzY?A8p~dC+CxAJBC)a^@7`>hEBhqXidKXj``9MhgmoVtvaNnPJyXOdN0JiC@)R z>M(A~(%S58${U_PlU^u?!A1opB-u;jrD-@P?&O}&wWU8~Q!qTAmuZ!B#AIM%VzQ-$ z#7hfjwb6ywG%>}}69KIgP`?a1LkA>)nDTAVAVO}QhfNHpfP`pEr8NHqTy2s-a0{-` zrY4h~lcP+=thJ<>N?^|y`{w(Kq-w?+63oTJ{y#qk7?j_%38Fn~ta^toqMJ~U@E)oe zz2S5kgPa*4r#X$G$fgLf+-j>mpt>D`DABA4^AssixiKCkR-zLhu)6Rq~k;Acc5ixHX1bq zwW4s}>H8LEW`KnL zBHYuivC@7)-QURRMr5#3RYI7EHILKQHrOC9t3ixE=as3?jU^Q>;f94vevxSJgH?i~PL*k0=1xZ7 zk~7&_$TAXr6pd@))SdW!FZZk&d4fpRktHTohLLFBmgJIMT{X)xT~}bznhhOWJHACr z@KOPfX+l|scGrSlHZHfRX9E~D)mSMxn!X8Vk;okkKAa19Crc+W3X&w>Ry4IU)z+?` z?(wi68^tm?87J(kB0!qioV3v6@P^sYVbDxNX+L|9IClz z`#S6EDEQ|=OS5l6CplSHamyQ4-Q$W6+*8860A zT?lN3LBR`vvxrLfE9!@LUrf0E33A)p33LRuW3WUKnvCm#SVBq4FBYNhqf?hKSaS=Y z?3}Pzp<)uiG7DmhQLqLh4xdzBJUuAx%wLBtcpodJ_tE1D!)0mrf<^4KpzB-g$51hLSe&dY8XV2|SEAB>f^w`IqPPw&ZaKkkr!PG~% zH>CE}lc&+MK6=H-QhI90`x_JWSlaY(wSToNNJHXg$!pEIb~Q5dzKq@`P2v{o)rME> zH{tPp-1Wd>)!&Uq|_-vS14h&Ycf_d;|>%dRS&ejhzTx}Qt-wbBw!?8KYm2Wy567O_@K+HsvzHU~zXBJnLW zZB~@ja14tKJxE<{Ky4-7nik0`@fZ~by_`0+_<*6dG~4RbRG})=iguD79RtijwfnP# zR_;1eo_$=BdGPI5>~bLzVnbTCy}cKvFGg*jPjt1_!?ha|5rg%U0?UnbLA$0=s zrxzf0PR)D^{Yg_z_gPiR$KeG~IKFTBY+xEM_M@BWAIum3px9Sq31=I~a54)ztVmdz zf%V68=+?yH@^njU3?VX@hW|YNl=9Vx;m4M`GE;t7L`y*f<`GH`#J1BZIAT^&M@f`p zaV{P@-svWh+F^x)isegU;Th1)tCEc{b7YnwKTwb=O9q4V;jMW}09?|T9dj@-h0)@R zhBWih?H+S}_^fJb99K;^v1nt;hD|hFg>sth z#1@E=!-ptsSzF5Sn%;&PAl8T{!0ziuVa=$3w@f0kZSDc$ERD^RnbAk(wWt}N&lm)G z*^xaz+UWlh0agvxGzdrz;x6P@y5FJrNLd7V;_>Mm=>!nbL zG4DY|A#q6pii76Ox)Hqd{woc?rIO#V;ar3H84@k|-@g-%LeU{FI#&!vTd~+wO=E5! z&x5{NXpU-t_Ze#T#;3zQ7@Zrz1}+{>(K znXNpd=06_hb0@DVBu$2lwAda>DVSd0OPEGvt8i8CrH>FsP7Dqj$ScaAi5Q)Glic48 zEBbfSXgGOy58Cm1>P#Qn2JNQ28uFc$s5OoQ)CPUs&VP-t={HsYV0;_svMpW?0}j3C zKyVmX1oy+>zaAK$%?LEVP=s&F`=BJyMRSNT8C+Q%4}{HfeRsHidHyr&2~)TO{F1L| zU_s7v%dWJp4n>`NoXgE=0Q;45&bt(L_?9=fe7-cff3+!REP^a&cD#h(5n`k2gFNhQ zT%m*AyOJY2eaD{A?5Ll^c@@MgC^>MFvj1XuYwxffpTN>UiGXN7(P|odS#@1Rgd|Ll zP;@VNZGXP`m|5^rqvetU0l)TL?U1YPQWo~{@sq>tuJE_FE zz2ET|c6Ms_;?}}(R8K15L|c@pN;4WiQ5b8QR=OYP81mPm)ZDwex(;Kr6_-n(Y~2+#k{{R_3^0J)A$z?+=JKyV=*#@tFTgw^{WG;N} z%sbab>1`@mPcS7uOEL;iCl+!sE0YV@=*M(2hMZjNDN4|X=aNHBWYLRhzv3O$HM>2> z&HA;Fa^MUz8_rM?nY5GJMU+L7>WZKSNPL&y>sYZoEt3?o7@bDGEx=B+b$hKgjiK9i z<~H^_lrOk}=}UTl9-3yQ?h)#lN;M!v;d$Y7GunRo_&9fvhbHVU@8{R;-N{Oul={tk1Ba2miqRiN~t;OS6Y@ z85`}(`&Vv}D)nF@6gEEI6MqifUkGerWW4)7Nbh2A{7x4lxKuZ=1*ihWgz@{~?Z}7YB`NS*RO8@C?)hAdo0PWb>s?vxMgn( zUw?&myt8Wq+i?)q{n#OH5mQyFjBO3pF9Bk8h+|btK^~>R1Mo0cRO1{L5o8Ipec8{T zKo`wCkd!)jFQYd(T8tzDNaucCuUo5-f|K4le*xrEEYe`T`WCdScJ|y#0Tpw5Cz&@j z6XfyX#z2I6sbv0DwC=-#DsTMNtW+dnMEEFp`tInu~%K{7(@_eNF_5{0p6+Qt z*Zp))e|m*0CYe%BF*~W*r|IE|sF9=2>XfFH6gr*%{7QpI7yY6WKtbE%HI)EOlr(vo zf1e(918a_87Zh+nFZ=uZ!0`gp>l=EYY*Wh+&UU_S>IL0DR|1l%wXNy+il;bagi!J) zRh@sEjU0mxQ`(NT^j8@-+kM;oOh_f!BPb2+SQeGo;?JUu0Q9-KqCC27!1+1jBk3=w zt!%GAgNg*x7w*K25|2eD_a|>f_~in_&#D_S^Iz}fdEL$%y#X+aX=wX(_=e7V8+Qbx z8_um)NcyDVKXw7(JyI;_!gM7aYP0cp^JerUWmJNx^W7XH+PA++#ZkTBHwBmu25O4? zy4+4z0$|=IId|FL&ws=0}iYn(mn?k^_&fQ#Hv6|k31uPjDXt1;WUa}%J^0o|A?*R2NT zBl$}6&m}YWhj((bH9?O*eY(C2~-EYAG-Z}b<8ublH2>~*g8n+ zPL4%GOui4IHE{;D9p3^-&;)7va1jIv28AS0@93xq8coEzUCCN#VbDGINwvK7-=G*_Ua z640fF*rMX&&`UUQR4yVFb22fQ_FHVa{kl#U5Ea(e+wT)1NGqzz$p~*R*|TRu3tH}j zH-9(hR}mK8E!Ucyfh=KLS;GvOIUFu#Jm0$AeW3V3itO3NwAP85c<>x7Q|zMm@mO{D z$5G!%`b8oWFFzpESvVzD*u%^VM5BLs_Rpj$VMMS@V~BXj!$1WrCeo8+f$^~7x?(k4 zQrPhXfxmuSytA{Mw6dm5kJ{3F0o#*)>EKpFKrTb`nE8zH84Ug-oQZkVHkLt)8tXxa zV+up{tU^hP>a)&3nb4u@9nDq!)agJ2YmLCW1h)l#yXg&O`m~3vn5t`TW6+IG9~@Ip z?o<(MeI9`Hu^pA9WA!Cgvcpus{(GcvuiS#d3+?M?(oAOlDRXQL3qBEEN&G^f356K1 zO2*?6%*0^k*BF~4U%b~W?D^%MRjhLCwP-L`=X1ed;gUbUfMY1LxTx7vbL{tcNM+Cz zz(0JT9a~eUU00s4aV%Z;*2yu*Zf}Z);=`)n(~0#nSC+-5-gl$ zc+Aoyb)M4yr-~+nwg+g)9aYQ>;~5iD(K6^66TSW5Z-tdVa{6=IQ&{z`Pl)9P$m3?= zyt)p?itppy6tjXJ-X-h>53^-~k6<91JR-8I9(LFoNM+J>z zspO>6xP9>@{sGlV{%WvoCl8mDkA78jTa!sHfHj#Wz>QulWLiRge@C9;SNC$zt@q+y zu}pl1mZa;zCml>QAH;cFLDx*EG*0yI5+m~S=)duu$8Pd0M6vJANLYAMPz~$x zvYzs6$9n7znf22p4>Y@=NX|3g9!rrb1_*#0cAfqwLaRoB@9TL}A>hfa+D&CzMkP#0Lg5S55$0@?$t%rCFPlU=mPdGjA7%}c ztiEgomSoK;JE9$@FK#ijNhggc0KDZx#E4b`t2kkiFi)~&$yF7|oCts&q869iu951J zLHgcy<4gK$ZZBa5!flG|bmi4-;F^{6-l(16l44?p2s+NV@SB2%`O(y2^>!KlXi-~Z z-RrdBVc`h*20zAYg%IyYcGX}>j6drCmWSQ6E@&A7rL`(VYt>j2+oERh(qE@tUY~l9 zjek9nk;u;Rf`~JNnPov6Bo~`imesVw%5{q5_YP0`@H%JhjXo`@W7S&wW|Z(Fr&PN-uzQ2{2J`>i#D10nSDsA z%rirKy|tPQ0QD82sGpvIaP1E+)`=F>+ZnMPW>p~)slgD-VEign_GqR)! zE}U>PV`A8CZG3uM3{hOOWztA{N1H>@9&{so)PAGOfAg5YVdRm~XA^A~no5o19JWAW zQ?#GzKT#Uo(a7zc8bWNGB&%S5V;AF)i)?!%mz`{Yb_Ut$?ZaMFE~Lh>SzAjM!Txa8 zU||nd3pSWpm#q-6uU*AMW&JF))OzAg8+vBH!D@fK1mWGP%;qJUL3b7dtAqa&4rycO zb7kvf%&fpn#6;^sN_gzq#3N22_2AwMB{WR4MeXby_bO79nn8|T;419gIFnb{scK3y zTi!Hw4_?}q#HZ*rvzxY6bc+!rC41TSqw~nc@|O!OT(_gVIyrf)50^K!8R$O02wQW^ z1@q`AkU!G1r%0@ol$tpIVkw!trx$w9Pgt}cQrUtC+{#X`DdqGXI`mZ=UqyOLk;nYm zCfxBmJxjf=JwUcVkggg{1KSAjOfSGJ{`-8`dV(bnmI;Hc#-4R;N}hIYMuePQH6iJ+ zFD4?S?eM&_Ifm=TpkQe`$mwoh_Z|(iNP4S*JkP=_l)V1vzC@w3c_fCI zd?Vb-R-=ZZOwfwi?8j?@N4S8=7o{@|A-%5AmmjQBqjE9W56)>zjyfu5X%kF45y}QMoSmfCstY&W)7BYD95m3%ny|$t;XEL+O%YF%< zM&+}55&Q6s{=h`T@D+9@OO?i{+IZESXe+A6@RB6RI8Rx*ya{Fh1gC%K{Z~0O3rc=a zppKzLrbu@Z&)t;&yv+B)_^Yb4_*SB0u?)S9Q&Pf!q<~C(5o*TBV$LLHiCj%Hlsyo8 zZu4}Oclr)%$|DBcJ^b7|O(?kld3pJfO%&;%e6KwFmKgrPR~xY#27i|vu(tQCwFbrK zP%ZzT%M}LGWbo+Te$D5x(cN-Bgqwg$AZ%4r0w{l@f;e1D*`G#lo%*Shm4*JN2v^xgTZU4IotO!WA3PZA*$0rhqG47A_fw9L@3`^;_NV3g7Qst zwML9leDkI+YT9_|H0uXad-H9E{2?aYv2V}K7y)hR9xIjNTGECi*o@@-X|D6Ka|Upx zrT(+}`Ob(a@~ZS6H#81?G4-VJjrrCR1jJRp{#Q`{-X@I1a((m8VEPCO;O$>*0M)wn z;ar7wW(L1&zJR?7APD2`F~g`u1WrHx{Uf-~48^h{&rsZNSnPomG}}4L?>0&A?*^=*!_KuO zH55D9t$>p^;h!&(I`a30^-vtPc3}qp$rg))F!t_vS{`b07N6BXRK6I~xvi(v#lw6W z8j*dXn%|C#&O8KDfr2q}0k@dg4p&$?>5zG-6pwrgV?{2;qL4%Sr7^wWS)x~Zti-i# z;e!n_o9w8~;oRpIyTh7*Iu=oqdkjgCy(pNa$bt@YN00V!40*z)5IPC!1;0~uT#NCw zvOTp`>OWYZjg)xdEYPG*-o2ad272iJrGj)Tu8FSyC7L@B^84|cUkJBh`J<&0trPZe z<%09USCY~+9=hv|*W>zQsh3?@8S{>Z(Sd zqGl+j1rGz6a&Qt`h3Xq~?J|0@ciS^1>~cD3zrr&NdefOYYKM z&D&5>9Ffpx@cOg1&J!jX5=OyLR-Jl1J#@|x(cc*fcXikJTo90VtW0BTP!=(KfwD3PtSuCWgW zOH1CPKL5!lf(aJ6roH_Mq6KLS+dKWOKp<60BQmhY_xPo_vGuTCg9VV+SQpFIRkTta zMIf@R@VMe8Qv6n@9hQ9xX8CpTH>^o3U|F-I=200B*>_l(y?vggmNE<1UlfPjR714I zgk*lXpq*#BBv<~R40?}fAyM?h>7ddMiHLvO7z!re$2L8+B8*wq7FM5RBewtInN#t% zlnEk;;hOBG6&t;Zrds=+7xFuxJ4@%6d8LK@JZ95G9H&pX;Uv&i#|Q!>wHv0eIT=i6 zRwPWT_D@2%ufF=}1_67+Q0UJO6u`@6b8js_wp@%ocTJOZcMrKw+|Ab2z^+nE%e7D# z8y^KkCoSQ6!JpLG$W$YA;Th4Rj5743#w+8>B+{%GNd_pq#?e_!lN^@JN+|2Hc=Cny zl9SaT#cJls?Qb(|Njms%JPgMZ4-w_X(~2}7i360TH0<=1^0F#%vRG~e$erF+5=^o; zVsY)FFBnPREA=E{*65OM?RnerbjhV40j+MyK$0LTX*6?`KuKy=!J*nbkFnLBOT;wfbU%kU_-Qutw%tF2l{j}-Yh0h6ONIrxr0@aG`f;P#d{KWF81Q2) z9`@&|>@THMgdk>VET7X{dA?5{Akz&=~Ly^}^G92zu=54i|y&`&I-b-@>vAw17-s9m>7lJVVAeA`=rj&a$?E7Ru;E^PXj zUQT_E6Uyb(ox9y*qBJc7GEXgw$j%}4Qlm=S? zw#YO-Xrv*QbK7RYgs4gn{sH_IEcQPO) zcBU#IZ6euiRYFHFRPvO6y%HvIn8Fd^4wi`jif~Kp=%YzpdKPc-ZkX}&=X6`+;fzxQ zX#0YPEHBBfyz#QKJxDNl$Q)(Tpitaz$CqA>C-Y^2=9r)ZHpkhBrg}UBuK(TZP`=v) z{ONnjkVi8^+wI6TBNaT%csBWkew8NzPboIfG7Vvxd$QyFSjyEl+Zp5{hObJKK;7}5 zk>Oh$k(>q+y=*?C~R81+E9z@f%wQ?QJC8sH#>-UMX5C(<`L7wM1Hpzb;T*Vd=B(vuV{H zvt;aL{8lrn+o6_!f1D=!mpwW=YFMM5`WNk;cP2KL=$cp1-{KMcTrZoSPGv*=*jg&N z5Q`ZmOcDR!(u1{?4yr-Zp>dtOzqxX>bbo27g@bbsa*Vdzv!<_sI?_QUu{!f!f_yV& zVz*o;N$!c;`G0k*YRhKO--!bT<=ZSvnpdA0iH)UM0e`%m{$^9%U$twyeWsWe0@Y`ZYSrN9RqZtSHs*`HL-x_>Yj#6cH)=~ z-cE8GX`v*)er{^g9D%Qeo#>>0XD6r1^Qm519iTuyvR5qR<|A*0X<>6FmF|0NA+5lJ zzYS>Zh&nT{w1Il5yYVj$uk2C%_aGlkdcn4-!MIDQiI#z7vcy_>L44NaSp`N`MNUs7 z0gc4Ho@5^>rTde~pqmV@uEowAR}MLplP=}XWTL$yZaJkQUu=uxV(D3Pdf+5UVb~|k zNeXAYW?4<$b53tqQ%0sPUU7KTorPN^XteUEn;QL{K%K0O>QeTmAV(%5+h_W0mqwjj z%LuWjJD85|P|}ETZ1_+YB2ETlKvo1-Um8%bB(b2^E7lJBCGWG!Ex4uedR?-R2g?1k zsR?}N7smcY_BN|pI|`jTzGyEZB`t~qmqrok=F%k{Dk31=E!{0G-3=1b z-61XA-Erv#>E`Y0_xIj-=Rd}PI}ZEYbIx9K&G}g$!X#_)Jn16*q|#B6MSny7?0kCE z6KDp?%!~;-p;v)dF?B3Vdsc(trKnPpfyP9CK64>Z^aQl|q+A89t1Y$vD0}sln?9;m z)}monV(ux?do(Bq*4NeaAMaXes=_GSMk8~Zv4q3Mkc09V%ZlueT?c#Qey}opqh0^? z`ccP#q=M6o2reH_R&OA0GP13k=-F?b;+iZ1X_`Vyl-hvA0Crk1QcYm zl&NdfkWzgZ@#EpoSAz;?RX!u*j+&GFqvG1l94Dz~6d-Rit}Kdofa>V(TUv&fDLF!0 z+H5o<>y+F?15<(tt_KC|N+fwQR}4$mV^H7e?0rUDlbg3K`*l2$yqADsJ@xg;zW7VU zI?RtFU9V%1M2&e^(e~b3HnIi5{W}rN@e=1%T;{`afuXnDZr{jAa~@p)28bGuSrUESE( z-d8&W$i^}&U2_7B( za18vPh8i_2&+qdjLGa**3o`j1kx&JstNC9o6NS=boYcw^NND0&7#Ya72(|CI>&w~6 z#>A^?pREs*8>8rdX4nxllT*k2L>BDRA84kR^-1Dqe5)DPj3kvCwak=4&~Q3}pj~an z&rKpxsFy>>7#6i#pcwvM z_{)3!AiRF*&7;b5RQ^`q##cn=H6N8zM#lBtNo+_-`G1JAk& zaU-UTi^Evv%%V>LGt38_F^4QX87Y^HWR#SdCCjZSq!@s6W-l|~rsll;OM6P)0i+b6 z$hznOpNOVb#s!{GutY)dJz2;;zxd$(L&@_e>ozxy{diYoTB9vhnWJ2}-prf<2c4?k zmavk84bp9I;_qI?Qt$Hy3Ulo@EpzK`)S3zhBW6iT8@;_Dg5#6V&W72H@LT#ay5Go9 zoc4?%^Jfk9vF<+9!zMeoa~l09u7bZQk3wG&z*74(RH>~DBRcj& zV9abft;e_4tl+Ay$JAP*bj(^p(fLdR`Ok@lp7c}uYCwtFP?Zt z>MNnQ>q`+a(m$XXkHayoY1=ls+T)T&4l(d`f*HF zE253sk;bcaUYMNoOgyM*&X-~QM`>FsMbu9D|fFt?EPjb`5t(pmx?T_E?%{?f4i60eP!2`K^d3b&8?`yL@Q0 zUyfqLZZ82$7wt|nZ$(?E7~Kfbg2OiOMT-$esR)#qdH+3MA(t%9?;2v!n71M$>%(Q6 zI-J|mb1)O+-Vcik<0T2-9bfv@bj|r9`XpY9vf#fHesRPsAs6&?`7|7bu4P6`O#9$A z5tOx0ncc2Wk3gy!%ErOr2ePDM<^MM=3Ioxar*I>jvU$}6m8NAYQaea87b+crK6FU$ zlEac)yA*8^74<3zL1TjuZ-6a={UCHR*%FvI{0fTer3O*CPKQO>AMTbOXt2bBU*VEC zZ>5(4;|tA#`dvyHa2gq(YmH_}a=$&9H7)T)!dDi&YOETIA~EL%S=N{fe}P-_Lf}%2 zOtYUgCiI7AeSx`%_&R;B2dkvpARBoBq%E1**x3Fg2SWozpUORAt_&EH{4G_bOz~m; z>%8>UjAVNtE7>@(8JX+qhGyCxbU#{JBn0?O?Fi$NC;4+ABpFGy>OV zw7YWqgl_39eQa7`@#j_7r)sD*5kQIL4|yG{OaZrA#c4Rxj89Jb{r^$XkbsAA1$j(F zWQ>4WxkNBA-i2$ou%b-}h4?wVbUg~OaHWKRk%jOAZoHtt)WjtEp_jHKKw^mZq~8)O zoUh^+mDlqiA+3Ejp~M)FTyO*ejO4#Tx!ag8fKc8sqii$4ewOAjTWJh)S-R11JZ!vh zHqW7Mtgq)Dkl+Q?BRttV)`W13j0MSUx?N}Vh)}2m;p+V6g&Fcgko`f0Yf01E{={oc z^q}l3Ln5$w7HCU>!Zm{P1AR}Id~NOX#*>I{Tq7yfAx^U5ij{Au+$*b28gZIJJXd5v zp`M>KWK5Vd>~SIq0FnRc+pfUD)oJN{5kLD3rV@oxXY}vEp$JNokXy~aEQ>5P<|u!p zUiEoptaD-id2QcjAH(+eU3R>|W4bmvAF_r;#+pL}33EmR5~>Spa;R6gEd~^+a z<>eJ{MH%FQ<^Nzp;gz&Cer@l=Me_qO$BZ7{OuLZQ8_$Q9JYXSgZq6kYW|&HQw5l2|>1;oB&bR1zp)|hN z2Li6SRxUt$XOknaK}dX8L+WA`fQR;W@|IqYmC*JP&84DkhL6NooD9^_sz}V3oZwL! zY{AcRk^Jc4Qnj&B8ZuUP48nGpZzXRamw3z>Z}olUob%MN(g>E_9_J0ltDZ*Sce*D8 z$Kh2aLF19v1I3?Z1pG-%Hi!2h^^sBpGEwFr(^#pPlq^ zmct)haUXcFRAEc)4x4~QbnGsiAM1$|M4taDYx0^0kwE%6ojesAJA2@;71lJKAU6Wa zlbhS{WgN5W8H2?4TAf}6CgD4V+Y}S z2)2XhCIfJrv_SI3)5-syx}#gY4DbtaQ4!2>ps)8qny2~n{1B_FC9ef1%{4x|ttwqM zD`liooZ4v+DskE`rNEq zG24d#ZEBtctDluB&lAL+(V6M?twzrHFDfC~yphQgmqCnpXKu~6>A@)f^+W6sL?TDD zl6ywezloN1os|J!l$hKv58uD0xW0ZcBf6-{J;4u5YjB;vL|vQdij!G!kVq*X(02av z1q(edW8fD~-9f(uD^)c40ETE)VLg0VMS(pLP6-grr!6cMb_CCADtP$R?tNooYVaZSE)UU%em{Ek1L<5rk~xcMFs1 zNUkLYJXyhUs%224_!Lbq{kDrmM*~Srm&P0`^IH#XOe0URF@k_mSVtyqRQ3!4F5_LWQ_~c=$H4BgMXZ_UR4@)0>D@^shKj-Q$cfNDe;QiZpr59rS(=wq*&To{ef7$J3K9NxS zqFj+r&S$NhNLE<<*H!M1D;fgJFS87~U(x>iQZC^gDwwv+ddJ6~xsMVs#&y+wF{5L= z?1Gdt5RD@h{{#RQ zh}-GNY?vo;@vsK~Ao%8_xpSLbp1isYd#Gu?qnElZ{;@bN;B>rZdDMzxY7T$1MX#0t z{c$@#1xoymh-V3#vm5!lYZ1kJ9;#}ZfINfhA86SWkcrwwh&WtN$rVr z$Dep&aPiR3q;I>&UCTEQk$iR(t9*eMNJj3qI4S`t_mT*3o<+~0)n*A7?hnVLiN&HL z3jg=}@hz%*8zHl&+kP^og+@%fm8FCagG7~EAd!?#o&RI6QPe>Nh=;86B z!{%c`@Abtp%b`oYzgd*3s_r-Pi$lbQGECgwoZBN;p+rZ$_ zPf}XQT5OJnE1V#eY?d7G+&3$}&Ht^G&!>+8&ln9namag6Q9osE=js@I4++Z-LzCHR{bXrrLi+P#jATR15B3j4Xr$FEYPzYtK)hGSS;tM^39 ztwV|vKY7j@iMYMrutK8RPlj-O2>j2J!a6+ODACeG7NJS;SX0{%vf}Sc!fUN7jX!4{w~^Yk#hIBwQssDx>^wg) zOtT1H$R@gv$e6udd&+%2*1bqN?2|ed7fOZuSz9cbi9Q{Jb7$5*c6Bw06TvP&rtJ7Q z>UG4G!?LlCRL5pBz)19Wn|9LNepj!l2zsmzX=t|iME3vm_t3*g?%!+aNo+8a2U z2Sz<0yc6bK)b~tXYTDG>XsNCdqUIoc1N9-EcxrB=lWh6%0!JY)xc_TgT5dcW?r8CG ze7G)+zEhGbM7j?p*eMwIjj2X#{qF~NuBVu1cvR^9W?D&2iN83~-iJ5MN~x~M1c??) z`#)9fh$qka7Pj))Np3tO+QkUVBd$DsS0M4R1#5{dFrc~{IrQ_M&GmI3 zB|N$u!5l_5>gC`0q(cmfs^pN_@|d5GA){0e_KM5qxizk9e`Ez2i`N&mu0igs0$NSR zE*dCV@xXH@U(t_NP%H@wL7vmDDD0w?h~QtubKyS+!hV#>z{r=c9`%mwJuY)xFS<6y zcCtV2R3&*KTqyO`GZc^E=-$Rr^I5m7BozD4FZ3&&34Wg)6uY0RNW5eNbb|VU?^nH3 zTU`$JLnz)GrDg7oztUmjW0RB?{1TeclkE!KbL!<^R7z%13N){Fgtyy(z$e6|babNm zJ-$=8+q3c;c_Gf%$Ca(5JMPj{K3=bFe`4ytUH3_l2(lSnEw!nnZDU`d2$8LE+= zl=l6`AH8Y?gzG%{X-WlDE-Klg%iHH9@;*1`+!b008~9hEZzAtMQ&cEPHTG=c5zV7F zN$faflfG&CUHrNF=$~>fUi;on(FPU=c~SM zFr*$+nWhtRO93L3t9at@f7L+166bioouCn+rsE6biF;6jp;v$$)TKALYF9Q@>cB{vNLF6_~sOYX;z&ME#rgX?k_uJ)o>u8ZCv)%3n6FUfdoy5K?P6>izui+#p<7U0(v9}95h$Gb9 zS@pg@R-fn)Ka927h9z=K9$13}09cBI=> zUpg*);;WBOk_-yLCSQeW;%80uy`H7}R~G(A9+zU5BlLLf?;Ge}HJ#Soe_S04Ea-mMY(lp*3GPe^+W1W`7Rz>x zUHOZ+HWSieG1$B9k={A}6&&4i!kx(-gGufTdzIVA+M5GPsH!D3w$s-8ui>hY15>W1 z9hb?N5G-j*237vS!r92o7~8|i{cG)MV2}3PkP@m<-#THNhrBYC46mK-n-C&#c&JaHO)Bz9g zt$P?!d!Xj&W(XZ~CC9vbw=k@IOx)ivlz@OkC4YDOYU1VoO9l^Xsd%O$dYQ`AT%slH z2j>PT2B|V-(oG4g=Ctmam&r-@bSkMidvkHqlx9Yn+3Dw(Za1pfZgWF>&G)PCKc8RH zZ^jKgKn~qHwxx{tZDgi(W{_h{c}!(4d+CBD4S8=(6u6A;u5@}^l|81XVYm!COXk1s z%?WR&oLnQ@#}_VsuYIqAeW^HUHO_1o2qSuNu9mqHP{>@*&ue!o5mV<{qOGm9kZjZZ zMy>=+oR5p*wkz4^*G3wNyS7|g_#TyBKTTzgU-jf<3lGoNeUq%bJQ{dp52cTYrtHtr zk*Vy*6|Pfxyf)aBTtFTUzUohoLi>&B(iw|Rf48YrMl-w=e*t%0<=UHH#@gN3i>TD| zEX->4o8Kw6e9rI>Uw*l{nl9b0`!a|csDie&8n*O`B-N<`N_U(2;HxRXM6>t2L~qw5 zzfBf}{&}if0W_G)>vs<<(apanVmp{NE%BKgD}|qH0(<=YIKF-TQ3}(yPo|f>9fv;X zq$!YMCgRyxql&bMsYvQZ0+L&csGV!mmv#d$ck5@b?UnK@;#8LWVy>&_h3&L`$GZG1 z)GwN1jY4nQZnXzAgE2L^L7u*U!@GSps19$GV0UOO-qts$$byJ&h75%are*W$e#txP zM^pAapVH0}sqRBPQCe}6;l10unSCnh90VWwSN5*qa-kUA4>N8rhd0*#z`cw)_`W9z zwv&_N^WN|CEE_dbS>7AhsbrrAcjcF{9Mmuwn`g*|)a1HYT07Ck>3L-|Y_lV5=3k9c z`T72>JL2x0iN#l&F502N+M|e`dMU$LSVB#<{|oVJ!V4+GU`&C!Gww!8>2;Y?gv8 z``o;dw^yS?q*YyNd%I(s_;m&+gDh%%Ch+&2Nn5U1p>Nc^jynWO8-dns2P}rmiH6xG$T|xhyH)dlqvrUB zm6>i%S9k_SW|O@&o;}~>oBjbS1IEOlYj=ADMI~Kr^IVHubi_>}xLK~rn`SIUmaKI# zqX%{Jz64d=oLhNpuZHCJhgy?E?Rw0&`v_aX+ePGT<6nG7?0umVJH!@B*D0+p%qk-G z(NqUW<(9q$7@(5%O)}iRJSV+9PnXojY%tNh*|Va^zri9Ur#Qr{F9@nx`abA}sD6fd zmyO>r`xdIgrRIG-P)poM5OvT(hu8->y;wd&Nyub*Efcy#Epf-wG~VnYb(e%YdhvVu zF*|I(r-(SYTZy_dcK~7`ZxXf2RMXOg7>hsc-{Srx7Q!E&>QT*VAV5fX?TY|PIzM+r zVF4Z!Q`3LXyQ7l71NGPH7BXGimXnz3re<<;mX4p~;*&`*8N68Q0gVV?cUeLh zYulXXq)Be*`yl(V0x#8W(`}OQ)%~sO5F=?p$@;q{b)^d#bsOJ&>d3)I9v(K%j=SDp z)ayh7R2ilO3Zb(=RYblWLfy{MB+TIbroWbYJ!mijn|W-wU+Z~}gu!IZr{^uCJ$lLk zMPC-#B~oe@L9xBltk0?EzS#e~JC`gfuRDt8ezL4*{>JsZXd8xPbKWa)cO%KE3(fp1 z*)q7hfpKd~R8z4%iV35o#YT9ku#G22K0+EM&9u#Cfjl(69D^`)RlSW9+-<&u*97xHI=k>oa3bT z^3QGV?8IBicZXhT{MembGrwu5X(tSt^e+=oXXAC*d%OR$%Zg^zyR*6!R?@jU=;=Yi z$r8+CJaY7Fh~0S{TT%?_kI_qgh+RJJ}}ZR|`u`ZISYfHx>qmD-}j-Y3@vn4*Lvu17+|KulEP z0{*TxP%=Yo1PIK^d)~l+vnwF)pbT(IePuZp*s}Q>KwK3Y_}$R&P_QsC5wHCb?ANCF z3|&{YOiNtccN!?$_2p&I43{pnqxII96YkcBV-tyTm~md=>|ekk|7$O$5n5aQ3O!W&d%V}v*5@f632xT z1Q$}RyKf*}jqFXwOu%6#uGcbrP^2O2v z5KHilXOPUIxy3>u1phh=TNy!OlBk<$9`4h?*!6;x-Q@En5UcZ40>DYp0m6n!BqOl7 zTJzfjRU94x_lGmu-V+>?xL?9bL2^#AX;g|c}y5a36EVtspg+lPSei!D!7 zG>3=%+xa`k!kPXW^VO-YSRYcw98_yVYxt)S+qvW`0;1*r&1-mIl5-of!Amw2JI+|Iu?UPVM zxem_es{uH^!5{RsIvHz9vj-<@OSw<|mIpzJ*|6uk>#Bctqbch(M6YsQ@P z8m*znSdWs>(Lk6HX+W5+qMzj>r4P}!TA@CMF#cK* zOERiWnz8Y@2GG_2BP}{1PE1s#Hx)B{o*MUc$HYu8UR+^jgj@Y7Mc$UQI@mwk@UeMR z{QFo+Jz|v#7!*mg%R2DUQHX6n^rtYx8%$qSRjWQ*f)2#CBk-g(EXW^S@0Jww7yhiP zQ(?tAdA74-D)Qi7r7ie?jDF+mckK>%2=L?bZSPu`!myK=wmmmWikErzb)2UA)mtl> z1eW*ON^7?Oo7qk9&kz?OPND*Vdtk@60<_!xe6jg2$+B6#s(uxA$t^3hz@+E6=Vg7f z)B!SOtH54&t2Sev5;3CGGR_@O{7}L@MCLiO4~ah1d1NL`HL%eFV$wD3>KJ?UwSYa* z_A5?>NK4L(cg5)fnQ z#W!x29_|e%VarQ>N)ER_pk5Q!<(GqV147-b{{#bqg^UVTrW^yTX)aWnRSdxv5ZzyD zM|!D-V?+B7cMAOFMGeT)1>vcRE%yiq44#GLw}7y~OIO}a0gTqiOtj~0%M!^}odniw z(`uJiUKQn_AJRty29xG^h6Mgz%Bt$P(1E*mi0kU*0GC z0W)?{JW_YArw3!h{_VsEeK}!u^OUxocRV`oaO#biSj}Z2Hwr-`;*el&ULIOi6&G4n zdi9(CS2U7TRoOYCh!k+Vt=_^c|KA0V&-n|e58JI`PEP4A{hXHk927pr3faDSQ8~Un zQT*L-3{xABWO>?aYJPQw>f62sEUHZ zi`kV=pWKsl#nKRK6wCqkfO!J~U-2-{Ih(r*y9;coUmB_p8 z9MC&@au5BlkGyklbD%l2MQn*|`P5Mxw?g!{+S{Rj1yogy?|J_D@cbxAHYX=d3OvXm z=ed{Ax%rC@AUujla@#7M2zc95942j$!W3E%Rh>@^YQWH}GDk|t`ZU)>()BY-UM#-T z4@G}jUvQbvRvtATy2&S%I+9_?`@7sxe?5iOcj1naO5Xcl?x5tudM@m3HHPU zyL0$wb8|(b#7PKY4hNuJQf9K&CT?CX9J=QeaCQF$_o73;-O-j}@v^4t-}ksTpQPzu z*f=8})*ar8)h>?5wH%|ezVs(`uT5R<%+zvv&*14eli7l|U-*ON7; zI}4PdTofDquVjKMo`3v?ZMxlkV$exI2}KE3vBo;4>|xz_o|s4|GIqMbRAlwls4NJB zfgA2=8$pvPv&m^`x;da7HHgyQoEo%8DL$wbKG|4?SKXZNNg*qdNOM>!_*gu~H=u9zPs zCjmsEv8yoJGIdvB3e4 zHdp+`jc%Sba&-yoZ(84`;+QS8zwx0UY%UMB<3ELqNkbtiBpmk>mrP|=lNu=MnhGx# zd96l1=!%|ooGfC6HEPT>v|+TxxpVT*U+L&ZyB@a)0Tt!YSAdUovo<`z6YOZ74b~mD zzuhJ__%)Bzw`h(fTrl)*R#Vj!E+>w>o?d8%Bc;3em9~M2?q+-Q`G&n40RidDloW5q z+l_q{d#o0F+EeV3#-(QruC~^f*NKR*+_2^O5)4dE(#j7qzLrBU%C;GvFW>4qTBi-^ zJto-qujg>l{b>y8@Zl;L&=YS7o_Snems;+&z92FUEymtA3`3*+GZR)6cSxGbXCK|m zcb(!JBT%7i-8ySIM-KLmazw=UPV>2?n}ji&O14uBEi40L;$awg^2EdpOQy>HuajQ; z)em6HlM<&gqiBu&dMC}m7qd&hkyN7jis`3N%?K8RiEI3#;2&C0X_< zQ;?SW%wE+^Ft_09ZO_8(kg@jN>ucj!?Jdup(w*+>(p4$`0cPu)91s0w=ljjn1cj>xyXV9tgLN0} zrJgYj`--v*d!fNRj&H)`NA-Wm=#TT1bR&5V$C8Y6?``W`&Ce3D(n1skKwUa*Zv-AE1MZLDJ%jL>3=|Z&QkaS4;A}Vf| zUc(FIS0i6%yQ8WFTZVo^`GS+y%vG*}%V5+s?l{*;6*h-`Arv$Eep7xt8p-&JNu&7H znj^&M5FgsqoI}x$jJfGM8EXqVWP8aGr6W{n(Q0aJVLc; z9`n(TFE)u_xVpOf%RaG*sri_091ug`otF`W2dA3%eaN&kFRs=7=|6L!sZ}jGZ3UDj z;vS=KK+NuU|7zcM*>&VIxfOOs%9ye4Q*ci5hJY2j(+wuo(7Wigey_c)T?ISYz)(M{ zJfwt6&PmBU{DtKb>X$=*Gb@vs+md|~T6NIJyR|cl&`RWt&@xvQ-5mBQ9`+^8t>1C$ zGTU$^hLnaancQ1m+l#ar&tXMW=w*=f4Pfvmv(d;T@T%OPQ_H73BP7cC5B^A*Ny%?y zHdfwRu|ZX6rnI}quKdk01&U5-TNP?kngB&1b=V;DT1U#hoKh>)=pw>XZ9AtuJ+uy7 zo%G=Fla1YZ*;Fbca#^5GuX(Wt=A;g>8*Rfl`VdVSCmEB?sOr}rTzCo}`mD6k(fexm z0%^2&*RTB-2AG|en?qn{7phd7_5<7}DE7^#3rss*7Jz|gZxc|jYO?**LdFaeeLyL5 z{fAG7|75ruk0UeE-iX-x=>}uvr#& zDVQ-C`rmF|{n7E#X!4WE(S%VCSnWq&nDsQ+#`yUsUJ*2jp~Ipv+H&R5yWfm`4QFx@W#O9 zkPAJm!JgftBPyl=C!XH%v^zpOY3OX~>+Q|h?Bm~kG0ql$mpzi&cEz@+%~`213QL|Q zF)ZLTl^;J^P#msP?|(wYtG6EC%4L7jI*WL`{sQ2M5VTG_HXLu)q`%pWFB9@@K8h1u zPi$b|WHr(y@A&O~p8syw0{vd-zZM%yJ+j=R!ACBbWW646f zRDpUIV)k$FY0bf2@)& z5xqz(;+wlWFo!DWdVnC+p{YnaP<OlXDx=NFUdZwL_*+nM{w5NndHq&Wck==CUy2?%-bk)B%0ecMIvH_tCyrR-Wm99%&!gRD*&m}A&0(z&L>-xz zRh#1UhX49<+Am7~ z8ic{iMso*}<6AMH>|cP=rh- z<8K=o63;bV_Nzz}gvi1!-f4wR5sRFv&qy!7rUC2FFna_XTH?mTv`MO}s=wq@vXs=U zI*tH?-4D>Sl&c+)Y#r>5G*C!j7}+uj&#LFGrDtGgebY{W#nlEhhhI}nx;YI^X<&eh zuTA(~^oWz0)BYJ)SyC`e*s$>%VZ%!`ORv$&s081G>vq!&Ct~){XIgK*E0Lywyt&la z)W1@6Bgp~_xKcxBf6#Yc13|*+FG*19bs#3B)Y@g^1(Wxq;{_qm7JkrZsvcu-{!>m8 z{#liDRyFAa1C$yL$^fL-2B$GLT@6^6^rjU!W={=qtp*gT9NEZM# z*~LT}sJWEQbBl|yyiu?x=NJHqOLxZewCrIkGpu)UJ130-GAn4vfF4du*JXecEIXkSs~3zpMqW z?iE}>hMMIP^A%G|=)Je_gP(hP>;2Nd;SFB2gNzT#w9#oQ!8v1z> z;3p$_5--Xq9Z-j&B$WW~1^0Jx>53Qr3taaK;TAUiTi%`!@>pA2@nCfr+@`4bc5JnR z2riP%c8)s}6n0fG{sU8Xhm#X`JFj&|L=Itq1t&gBsPZ_M$jUq!?d(Dlz3pcmG8z(7 z`ndp~^W%TN%$9Xuss_hh?tb}rZezLmM4C7l+sShsBlg)eqz*_~#?{M8&DSx-ep}S> zV|SqHe)-|WrEI~ZpL#NqnjbvX)Bl9*W%3XBtTAA0H&PN%>Zw4aQ-KSC)~K}L0N4>XN+F;9BVH} zy=#G9ae>p^GBq-_m_U*$xDzljE(?0g122)8@jU+|@EeNE1DVqhm+%J1;HCS`+|tgw z_yL!_4gY!f_bo%ACV6cDv{*;8}BTCdmj zWWOaq;0ZtdH%fxo3S=*~P12aPOP-kPHiHEgsLY%vJrI<*)3!i)GUO2x<8;Z3OO>EAqs>i=Ai^> zyg*b3dIRGz_<7aR0QF?tocV&DkM{dI7 z8_q=fg~{Hcnmb`Pw`Ndy=vU7It98Ms7>PMA{%?*W9nx42M-8O7l&rezHPOcWx#N8w zQTg41^6NiBdC9)HIGZ%?d5u5w%9v#mS=h(F-m$Y|#x4<>EJ}5jcFQvGse2x z*=_qNPgdEbYk710!=L0UVxD(5*i3Gy!vT1sY#U#orcvGfE8(120do}6n19*6nq=0!;&=m8r(cEm z@BC-!3}h7{Ht%v-a9Eq=2L|ZH=FgOm7V|XKj2p}$FB>Ulu9fvygB28!wyQZ+HBx5E z?o}Ik!%-_|UyaXRs^BtqefT;aNJ4xQYAIZ5NU$Z(j^Y(ZGchqUt)k@gwtTJ~*J|?z zB@~qlC74#1UYW?Kq;u%{Ly=cT+D4W#-*j$K)#BfU%4**S?4BN~mUP5i!$;tdd@mKOYv%1$g z-96R2yLRoW`Kc%`fdr2a4*&p=q$EYZ0RZ66005W^Ecj>7Mdwz|=NFjMHwj@tQ2s>yPkNh& zhZikF{fsznWYVyZC>NTV1%e1b#c-TJZ2H#-Od7?tKyldM=C;um0l(9a%k*`(mI$;K zLE(Ehrx#PxLAU#taZVl{859DJ$UK@L00Wy`y8K^rZanvrqAU=jyG?jQAV>ONYfs@7lHr+oJDlaU^w$8w1!lcSp4RHl< z0rPX#fHN%M2q9Cgt2d9zCi|%#Vo|e|3*6BK8gy9}HAv{&r)5#NFfgfNp9E0X_?wbx z_EXu|;&Oin>rLt2ruGG*_F+%1-comWg$przl%3P6#etg8Plph}1jNzJ zB&)2P;86(BckuEq$w30b+{2VT&?AySlfkNc2Hc<@01Z}1qA zy<;1^Ur5oMySmw~0ar%Yh(40s2Q&!lf*tzIP?j>dkNx5ni7E(8&2VKS0CX4x)A<0% zFJRMucm_5|ISDP?>A_f;R;DZ{^{GO_RDWbgLtxaF!_wzsGghbEF9fXQsTkVmQ$hLv zO(N0^3#Jz9^{X~=cC0h7o7$BNFTP4kAn!u_X(q=R%=9s?+$a7nM?lO(@t!wpowUx$ ze%%*jA(68hX(KLbMo$YU>=Bg|jK{A*^P7coxc*^M&v2BV4o+t)$rg!<0?pBWeT7Lp z4Pnh~L4(dIUkRK^h5TYMcs>X|c0-6$ExlG*9lwFI)swns846kH(%NouG38$wZRx`+ zLr(##tuug07fi1didv$t^Qg)Js#j-FFcd=dW3vZ1n1nLI*jT333xY5b3o~{ zJ=M(uc9TNM!rI`H_=mawp)jIXJ`2#VV+r%yk?R@QjlJ2m0L6zw5&R4$)wNH8WFqgH zrx=ctvCaa+=xmv?cu%pYzZuLB#NG0^?aIf(jHbvi;RM5;-K^J=VCCZ#`UXZlS4zo( z#s|5iCpgOHH=ag1wxSR3&h4uf$GSy9#I9?E=-&zZt7+?tEj{R|8=HAuBn+9-^+#mZ z!amudvkoWu63D%tZHp12`93i*@t~f`7%8c5?{;S{)M=2yn#tC%gl4U9Y1Fm1KD`N{ zC8RsBbK&PY*pwmBuv9bLmHZEgwsb7)ctJl_d)_P)i$tatFgb-tBPv1p0ibA+AS`R7 z2AgB4N?AOtYCyTs+9ITAwc{6M%c2ZQ2@u=|&;gdoVb;_Xv#B27WT65lC}XAJYZ_Pq zUhnTdsIbeZDJjT61ZP;(*|y>i9|Kt+c>T>_FNul*tVTx?)McwTyN+EkV~qvgF@}3h zE(uSAz}=0y(Z>W?5#B^#vyR)^0X#wN1l2A;%x=JIMu-9pt{Sd`m zRWibw{k`Vb?LUaDw-6j8)71GL@sUj(WMic$%I<)UAq8MXP>{~9uC6h}d|N3=NmQ_b z9wP%aa8mI(oa)HoJ08=+qZ%dJLo~)Tzzvf7z+&f0^{Dn$^II zSbZI{L0iKe)A2$wGY)1E4L(ziYrqyFLP^0g!Rwn;IQ1;ZigRqq6wnF?8Rt4u%Y?wK zUaC_?{yNq)h8*3}&Dwt+L(2fh*;nFZv~TUT1a1P3x-KFQs@KQ5GSGa+JgT zyzSvQDJ3j4X%A4rYTmuUFM-H9oD_`mC5$gH-TeHu1rj;f=U_8F0uB-If$b4jcYwmR zlUvabxh6m%d3pi1biPJZFUJU;GLdxUB~Bp^`562Cs!0X6K0QFIFz73(nsB*0dNdpy z+}raV0iUPy&e_t_m%&3av+o3ZgmF*XoK;2-Fx3oO9=TjOw{-9q!r1z2x%~5W=`W} z8RBNLKxWa&yG9Pl{$Px$bs+7d-a^4^ODg}_E$?Gx>tUDIfjm4q66TGFb-)iccwo1bqjUOZ2=SY$>0r`h4qHhqoa|HlNe; z*8npWe_;)aB9FBFXB=tOgf>=|77T(jlKD5-93B>*Q;M@8Ib#h*s_1#Oa9tG9G0!vS%2-W}B1%8JX6P zgjeG<@-BxF`}8jol*fj?0qNzy5D)g?Dp?R5%f%SlCF_0v15r?896(f?+(=w+!-krHB$>oi`aHbt;&? zK_(=y7uU55qGXB*kLy49wk-+N$Nd2!OkDn zCWxA*IW0A0kl40v0f~U&O_J|j2Ik?I@L|H;`l5}sW4vZ7N7}WqE6Pj^lw3b;ROXRm zpb&REscn!i&8+`}Fo!%7Az{l*sQhWDpwgFK?v@+-$5J<(%;)DGSg|@@iJHKPYQZ)l zk1DKOq%^Z}csF@z68G!=V}vA|avM_ThPNz(VOrYZk`fEIuerakns9Im3yTUcZ}Hw{ zsDi#Nbhr&3_m`BE(E2d?y|z%l-!4-BdEaa4%wB$}_rke|jAeU(%J&_vJ3J^5WT6dt zlYc+-zAekTTM`TUHeK~FZ^u|JQ6;C49gmRPZZg|>txOn(lr!(`t;{(Dq^z3sf5A~& zY5jQfDQ#+YK02Pgc=fKb*i98+IN1PBwuB83{9PTh{D)u;2EsE|wWxX801nYK$0>Lt zw&48sE~j?=>UQzdv|OC)EHLj!I zc9hb#^CQyD^J?7%a5|>vtEml1|1OBD7cn6U(>-s_Sf56Ftdv zF1_k2R>On828Ufx%p~nX|>+vhg z@ax#@b*6S!8?{!0rD73kk{SVrS+DL+U+^&@8j*&~`>FN>HnxwBk1+afH*O2|-=Vcw8jRO4TQOV41Tm8%F31SPh&gfJv* z%PMn!hg9kLoooeuH4~_{H=^i2H{WPC>GDy_>?3E7_PW9!iS25%S;a0rDH+aeMa*(s zwu*ZCQ(az&b$EZQ5iHdpSgCinxvJm`JPqNjBHHIkv4dJ(`)_Zl>5v^Y)fc(O?WfWr zrZek%>FQ*fvzgusaH_m9Ib7iNhU(mNiDBuzy)Q)hnGt8PxZD_xLN>*2_}y-H>UG+fiHYqO zqhx5t;)kCwTrSC2m_QbruscAO5|rZK+$=SGr0_3t?VlHA?q9z7ekUeU z=7amw+D||~w5w**{p|-db~be!{>LS|mg}!%HHOUV*AksKG^trkERu&t{aJxa_YuF1 zL6Q%PdX!zjYnec5n@g2RZ<~Q#xBh>3GM#HeSNF&%7GspcS$%K9=nOj{8>#R*iH!b=LnnL z_Yp2~ao7VvJF4Tp`ACezwEtfNkqkJmYz9}Nh4YMdzn82b zVG15ay)hb*PUtvFrMP;Zk%z*|8t{%UNPxqi}&twfNZee2JPrkNGi&m6|Q(Qb%((uTq7%4KilF%IQ3nep2D-$ouIO3AH0yQ<^2}0Lp_-Z0Gq*PU98gEQ4mZhut z?O^4h=!2Tn*NZW9W$(Xp2@@0Kd9c#Ge|q-U?J9JM|Y|qwmJ7W&hCzbYy8KDC?h3R*1_v=K8z3hI#4Cho&Vz@+vHTm_9XY<7gCH z75xO(!VPmb9$+1+_)X?xh`pNpr3|CR*MhR2!nM?m~Y3+1Oq z>=X{sND7NaQfc&R_*`Q&VVHO&sVi3!%(4tv!;0Zi6BLD7a~|6}Gh#js3yo97)BN1| zidb*2DA{qx5CMiAHP(!*_a(!9sI56KwPo4>py``yRpUQHZlMlgvEl1(;ie%#r+-&r zcK&W_{n{v4M>trMv6>FREFSuXz?J%IPy}V`DagasULTMkRg+?HR56TFn`0sF3=#~g zZ%4+-@hyW#L@_3$C5+X}Rg3syhbv1u1$d@MCj5_^{cxz>gAa@`5Im~cRuoh+6s3Ih z$m|-p4r!i&&8G<)J%_3d5NMt{vQv~#p54E1AZQWK?-ooTR*S9)nyz0=rr^#z7psg@ zB*l*+_SciD3jI0>`5J}VdolmHOXiG(fxj>znXwV*!;MFCHtDWfcM=O##Go4zcOg$9 zwSAPFz0h+|1OZzG>UL&Et1hytA`+-A5Z41p{l#Aw{+iO#PJWHZM{74AH;Za3G2)RS4Py zoYwlfhT`$Kv@y5-gKP@u@j)9&$iC0e05#`@%=SV zBP%PVpx|_}T^)(QBt%Li=uYI5e(AbgO!H=So{x*q%sRsI>tM;i^^e0Fa;dK0T_KfJOxmx>57YBl zhHBwjzVvxqp6~2@>lw9s{JW0A#>Npv$p1K}JOGg?5-Q*nihZvPq59(+S=0cK7rmi4(!$cXFtVOr#;A#0ugetT;?#^ER%z)`!Mr; z4uSD~li+mGZsSj#_rjS+L=r<7U@9PYvwl$p(j;vcRO0)$^{zn8Zdru{o+J5kO=E+G zIao$2+36*o5Io;7CJ-4aHApXA{zqLaoq1P*T9YByQMxfD$FbK^K#iK_;WQMQB=}Kq z5xW=9+$8)AM=O^qg&Xp)<~%b}+Dr!=Edy3en1!a&3TFRj+#-RoP|qYuvq&b7&?|Q; zOM4~NHj++(NokH-%*j_wGi?r!FJC32=*&S&PE9?0C^bCBC~E-GtxrR7aDOpfC?e$R z`-io*j-#hq)-jAfRlKgaz%Q9CZ00-3LO_~JrbF?b%bDu8jO2vmdKRcA@l(G88u zUU@3l-XJ94k%E7%GmDA;!iwhRETPkvCAf+J!wE#(A$jgi&Te7st9raS)i4rAdg(kY=E>Np#VT_^ZBiruIIFKdCgibP+DjPxs?&IK{vdD<6UaMR zQtEk;s{FNckCzp9u-5L+8`!qLJ>+7r9^ z&YTG~NZ00fZHnqz3xe~<+S+RmWNBmfCEeNLGRE(_QAI%b-CmNjOt=bLNe(@=I}~$8 zL4lF2Zc&U{en3t^N%UM#G^I|&_>r@SlX`sKMIcy5+g`6t{OD5?M@jV9*Ee`{S~BKh zQu=-q$TVj?A*su+M5NL};UYgn$cWlHSF0W7gi!F=U_}w_EiEmd!8qfl)-Qq&itsg( z5Fp(Aa$ z$Xn@Xm8iGik0J#p9l8&V11y_^VhG43Xs4$vCRMJ|=$vOX-7|TdE7Dm#Z8_T^OxD)Y z!HA@@6!y*5_FN4eiGBJJJ#_g7w~2wB!@m6$eWOp30x=4LA}#jUnXA_WX{SAf<6B~~ zwsW_J{$``mB9Ytr$MeUV^462a=EviFlM`|C%Ot~~|IEW9>xX8mcLV8J3vP;BP0pW{5cxSB|AEt(;!e6`tpRCANMAD8 zAf&#haw62^)bcy>Gj7q5^!TLoUa!h3#KsP zb%tt?Fwl|IRaO$FgFO{WdYVGTyc?ZXe;c{iljE{8%Q??h&|I+&TYDSW8+?BbyjNPm zCvQ|JRMy)rHv;y7XX{bxH@yedgPC!g{p;+RuB&{cme^*rB4QP0ww9+=Ya!BOj*&q}dZjfR*F*7solm5uduErl!Z=?1v zZaimOFwzZqz1-mhXms;?_DzL-_);^`FC~|&H#pvLN58bF8Qw!RvrRb&Z`-@b>B!mW z*y!+?SC-;u^DrYy#dp`0ETixj9mav9@AuVz4U&k$=kneg%6)4+q-4f-Ryt(^JD!Wh zd&Qq}%RD~LT9G?V&uJ4+J4yTXtH2nkS@);drxx)|rV1M27DPq~Og%(A8d>^FhQvAy z+R2BWYS~wJ(utB~)$F=Nk&0a0pD}{Z&WJzT%Ed5p7I7;mc*1Y8wG7-~h9NF*Kb*he%dTYdzL3;HI#gimFF;*zNMhVxx-`i+&kJX zT)&ysA)GwV8Y{pHRkKumuck{|^jkWrfk4nXU}XhYRs}Eb;_$_07#*o)alS1{@?va% z*f#rcf8{cteG{++p^|>luGRXmA5WvKTVP1-@VqW>()Hk==7D&`ni!z4ILdi1!U_I+ z@t`NiGvQ<50`66n>2~TboLwGzfAl_VPU^K3el_MZ{4xU>-W@)1bq^&l@#HyC8C)A6 zCa>%hg}_n2v~#cpwwSrSlpxP9kz_JTAC1DF#r#%+Bhdq6BYW6E3F;r()%93utQQTz6fa@DV?T&FKw zppjRH?sy~@rjcOBhV%KQ3U%FIrHjC~>r<}u-+`4-!{A{)Gu#Mi<%9c7YP4ib9Hs_> zT(~;Pn2A_~-^K^_|Ncp>lNtrmO1jX*GpJSgYm=RFZBC{)>AMsL)^$SSCgy4AqI|d0 zemyUrZ?HDC)eVoZO&zjOLAkN|)C}%>SIjj>$QNMjZ8+KDKXNtEU=f7GvezfRsli zV-5qe)#kv9IG$)n_JBi%J#ggmzY#K7a1nBbMFu;h)%hwZ+E@1rUzs9<(}<-tspoh0 zPDUzGn=!1{1o#Sw2{J1zRKRBit6OpDChvEQz{klnb(R|_`iF*Gy?IB=@^ZpJ%<$+i zHdO8>Nq5_D&$UXHC*ZJoa(LhAAUPuDufEh=0BQ1;>**|tB@@8n!e;%PQ>FLdlZh8Y zYs(lVyHVVDjGDrScIN5c4$M#Iu*YI@uCu41DJXi`I@o#)enreSMJJM%kN7i(NG*c! z6YeLm5JD(9qO&jhx)?z?T?sUi#qUJcHWz6yKSvQRckJg%FmC$2ZU@BPzZnqlblg`R z#47RjQAvzVM!spuv-5q?>+l#*GtI7HO$Pay4ise}N*&I_4KBHlbaGq58(;-kHt+#w z4)&UCsL1_n1BtPz3F+xwkmTK=H&h&LIZV_%gTZmcSUzbjSR!8S462j}OR z*B_0KOiqr=v-JpZfy0T>+6g%*$;+F1n#!}NH25k9?M$4{chCTVeB^u~D3@!s^aU1< z4C2&SIETf>#b!mG>g?FaVR_uAazhgnqjyJlwV$&IQ1h_z(NhmEwRjp~j2D^ic|4gr z)n&xS?dRly`F$6dUQ{L?p+ZLmw92vJzZ|r287bg{X}|QVZSvj5nE57Y__;Pz|Gc2g zT(f-$MbhFWHv8}`G|TiI5j)Ig-`{i4jqw^?IOkk@Rm*Ah;hzl&Bzm?q7}%CFHbQ#G z^(_nwHV|!!DBhP8-0`th{VAH~NK?-JZ6;Uwuz4TfFUP{HuC2gIN!(LY+|#mF*}^hb zBZr%Ng)mmL75#USx1p_&udc4Er>QIaVzrA`P#*o%c|3gh4GZh+Y(h^W4?8tZC6eWr zLD+iCJ4`oPNd?!Eadvk0C}w6Z>Z+Ws@Qr302_IRtAeC`%rQ*EuvD?VVTj!XK9|^qS z($t3shlTDQ;)`n`CQEA>R>4K)MOvcKLOSmcCocc64hqnaGVXHZ`HlmGrg>K_#O_5xH!m2SU1tlx# z0gNz8G&IFLI+inZy!sl%rXt(?oV(~3db}bVyeszX#wQ~%w(45apMnGc;iW~{EjTFi zz90?Y8os@>O2gc*z0XiKa0Q;|xN8jlGbro~B{V`jNcSah@|+Y^!? zhHUtJB-oivQ0fT>Pt?T7Kxc*3YvOw6U{AqGnk}uDNx22a0i21BY?cb`OInxz<}4aq zgi5O}p-nN|ikyWOd^CNG~xB#lU}9&`6=pg`dy-4ism!i6o~#uuD%r}q4JHr5?}?5(Ji)3*)J z57=RmE@y^GRsuj}7HN5o zu&A+k_p@Z(A6r8%r(o!xG56;lbWO^1VaJ+r(fA`X7}yYA`kz|G%bg-1(aqY1_zC#a z5Dsz?MF)L~kJ9ROP;|c|U%;mWlw@{2HV*G7sHux6s3|F_K$$6Mk6PRJ;F!4KHeB}* zzjuX_7XGEDkcnX!Rn6t*aicfAQbVIQiS7C0Oyv^eKo%NFR}ni!ZF~TWfu7v;^`Av~ z26OSiLo^8VKyZ;7lvt_^FuoKV$Sw8J(SHN2K!h=wF0zTCW->%l zqe529V<>WAM+Ae1=WQjz{NV-{be^I$nwA9N=3E(2U9-`A3Q{M7MT^_uc_L)FzZLaq z8dU;(f>g1Vw{PNUU`JQ~jUSB&^D&0CAKkkTu0x6rXW6_8lH72eGq!$x_8m8mY`nMNZUoo&3%#%21v9|9JP43ZHzs1qbd%>EF5zgqfdb{G>W`GH?9m$ zxQ4@b7c^}?@IS>*f)M%t1Xm7Ze-R2bubHud8i3Yl{iixp6D2~bCKdjAM*qZN{mNqB z%9}L)pXK-n8Cqu8&h09TkKfvHWR%+n?C~q?!zX@6=CVSQGb3jXym>wpp5wFm!p&A= zo9=|Q{SYf2PH%!QGPe)yuBn%u!@OcgObd}j6)nD2){7LXfrctc2xbC<7C!nlp?F|P z7{B8y$VMUyiR@K{5@MX%@j6D6G+|7P0twqK3?1g5txyLQ1R4 z3A^MoFPDym9I%?Mj938SaXHW9-?JJ|AIhE@`oq;YGdryNDC`E%_O^T!YtRTuyz2)8 zeGBghtWnGHL5V#zUyi|4#I=^CAwbHf3%_Yb|M7taX3NnuXvX!a0-F0%SpTlN_r&lJ zT9Wb3K=sEqFRgO$zz8<_10~4TpNFOq{fz$Y9{XMbD(~Wmpyr00oj1 zgz*KE-=ssY5$LL5Uij2tNu?Itm~&~b#Uzn_^*%Np(}WAz^l>cE@)Pcc^CZjje5`$j zn@A+Q?ngP@d|tQZC6y&KRr=-y-!Kz@<(9ID8GPAu(g`g+-1v4K%jMMW`;47J(ccsrv=x<1!Z8uOP_^q~s;4-8K0L8AMikcjoL(RRoa>e=7f4b&6hijuo{t`3I)Du8MI|IpCIhmH(G zh)j*R46MRHVk1I=3E9ns+A^dKFSjV2J*F}|!s#JPpdTU!nHn1x6c#Ql%%{JG_h6CK z@8kf6%DBch?02)n5Q%86eyw&>Q&UqG7f=*aOuqXzuAYO#Q>-2a?Df5c$Jx?mm_oJg zgD6B)@_TccOlS~)`x4T$Wt7tAIEx!H6XB{W1<(pj1^Uv{{-Tpm;Sy9v#}v(fOp%o^ zLKBP6VaL)4btoiE5ZTrT5&jf63I~rPYaBqxLk!qUfwqOZHQ00a*U-RFryUIk@RZQh zB#48UNcm&SK5J<7cl3Svm-CT+XPWT9>gU$5GnmnzwQ+BSID5oGe|zV06bqz=r$LSz z${A^wr&zl9{Sx$<@IpqJYJ+WQ9qU+75EMk{8@^Mw1EV>q60Qv61tJ=bk%b>-_#EsF zW00i3&&B-p-GK@y41jilKz__nL8KXu;eRp=)6V|FZlAV?=`lssyL}ZHf-sh9py-v` z$ZIt>f2P^!-aip;nNcd{*k2_p1Hagy)wGirS=E`IYZWMj5FlCI{JVGW=dL@fnPSx4 zxRdF4Nx2Q(xq$vrP8K1?9-h&Bnp~aVisFoqV9bTsR~UZQeRbdAnZi^Jjbpr2!Gjw0 zC0gzPCpXpaV_7domsxz!C}%K`h$^e?CZmB+0$^Z&c@_E^N}BQUt8GRbejr#qcB{+^ zFt#BjjTsYTlH%L|mK@(b{ZA5v2D+0Vdn~+oa~i8JPY8QxaLfV?=ETs!OexhM!(n`o zczG*D6EuVz6Z-)#FwUl4no9!)fD1GhU#*m?CAOFUXi+BNo)4hi zcBh1Oep<`~;~W?yP@oasul3)ILd;wEIVVu<2KYN+&=kD&E0bwK#`Ei4(8j(#5YHUH z{kT*21z<&vhxD0tmPe;@nlBBG_-?qX15|?tI9PjBVIVa6|5D~4mF6gO>It)m>|#JB z?IwIn2k`fbf@gJ6mfom^}b{d&zQmF1foyBH*-9WltP?3NStZ zXLda+7(SLlCgoH_@y`>SvuL%I8=*^!=Hrj~^#~M# z+S$@HE;BlG4DPIU%X39?@#KrebWey3#GRa#YUP{5`-uvDl59HnS!yxyoB4p7R$1vw z6u$0E&1|pR2D{yb!e1QVN{dyxEltkxr{0(`25BnBFQ!4vnM1ANO1jlpd~X&@c6CS! z=-wIrXoVK%0(F7RjpVm3MpKIK*}OuzbA)yp)1W*RPtg#7;a`q2OYl;W!$!Xu$i-UV zKK>NzEqeC^2#Wp>i;(_&Kx+jA5?ldtpL@6@6QRoS%$MEl%XY8vv_(=7>>0XEQB3i_=+y0pRo>-!xLGVz7<(@Y(6Ge5$5PkL>oK zJ$QgG)OLgO@lA`sn2em<@n$EZIXca_Jx7(;?*N!^p$yQ<+R!pu4OJ=Ob zR9*cbiqoIxlhx(zY7WmKf62GX(2?_p_w&l`{K1a*4eAH}+iJ#c9Apkgo%+2x#zSq# z{!2Zdza6ZTpx%B<%YqF>;d}{1zvpAWKd*it4;vpFSUTV%nu;ZO;$bX*8G2ceLpT3e z_QKe8I$coWbi599GIpGODW*{rEN!lbr!I1t+Q>P(+JqO?XmF72Ce1(KlfwLpIePQy z<3E|XFB0KI83-Vz)mP73?aR;v!sc5B=>O!Ep-$zzxnI1*~8Ha`MQkH_3%sN zR%Kb4R-5D9LO4D%x!+lE0w<#uu0`eMAEU$8ihY-;fa%ZdC-uvM_40JvhakJ=Q~#jd z2gvXEIclG$@w;PWjuUb(Y_`jURGq1>mJZ&Yc^I8ldZYDPyU*YImC%hKac4T3@{MZM zIEo;!FRHlG{f;{f54I^-0&Ik@_0>^6V8x^i^j#;Z2Zg9m8QAi&QmZdB{xPLA-Ga04 z+)fDkW5x;ys~|){lDguMbBk?=U3)e)6%``nLv}U2>!aR>D)pALh{j(1viA9hdSSP(Gx!z1H}1H$tSDiO&H+AW6r<#vZO z@$lYw@K0i4{wC8%Q_U9M77!n5mAZG`k&I?Q&4mt$Nn#>+F1aWLD9(oc&%dX%5XajCG#$AK zlXV=oiTs{-BN;n8u9LJl?Ka@)z?IN*&`3)_Zh27N_ZG3oH2BT(y|jjVYT`-;{_3ag%Zh-Xa+b3 zE(yWXy)MOoLNLhctisZU4Bk44W)FS^KOWAN`Fu|g9JU`Ycat_ao|^f}CACvCTD6pJ zRD-Q4Z28M*s`J4Qg$T9)D)=ng=jHA=?cCPRcV5f(??SAJiu~9)pZm~OwA6WbRakzD zWowPkOEh)1?|n?uM@eI27P*#ypj8f-V?U=#JtXxg%i|e(55^`3wD4>*P2#=>&LqO# zn;&ilZO}{7LXsUys4yn|6>nn3mhauT+a&1n#+FY82dGi&yBj)wy2vN+7O<%Iv^b+u zR;=TG-Qytu)Z+E)&kNyebNjgDxlC45y&`nF4pAF>a~LjFIc>CEcfV6frkBP{rY>{z zG_9OXV6e|yJ?HjX(LlfkrtnzZk3jvUj>dhR4kMXkgTdVp!X629l;L1*bPxu!HPe~Z zCX$AU`OFFR{n!rja+#3dz8|czWXSNlA+hbajEi_a3iZTYhZ1E@*Whpzq$c6tYI?c@ z!D!3p+D{EllZQnSvK!tW#gT{JU&h)$zLuUw(zjgJcW$_z*ZaZ=@i!gPa`HRwKIKct zM+eu&eDRm;?*Stpj59uIHsLMpkUQ};X z5umZ2n)!@O#AkYY^*KirF2D8O5L*5~Wa<<(IsOQ~-^=>L``unfwe#Lyc)BE@riQ^Q zHeE8s5RjoC@M8HfuaiA><@??%e35MLXq*z zyYthkx8l53{I4OURtMa)NG4e0*Im;rat@KCJ%G*a9gRdo3!TC_n(u2lP+3?S(yZ#4 z+vb#|v^BWrIUZpNDp2sPqzLe(U+K}2?;sNww2@Wfj8%8PhfW8~aOwN4>mx$NLw4Xc zb?LJ~mx8sN4pn-MJCb~{$ADxxx7n?pI#?C8=c_goe&{m56Z7EXS5XP)c4@lLw}tY? z$Nc#{$?j>nSZFGsLXLt|Cl&@yXCT94JC(HO7xeHUEW^i5g_&;VY}ag>{9nLpUqU%e z+Qd~J+TVxfix1n3HH$-m!HvIMVt=PrA=l)gF5uVhB8D*1Lbk~#h5W*5cf_;QLD6AZ zqet<27v6WHF)`}TgY?;&`8Rp9a6}se4&bp~Hk@&ay>caLfByQ-e#?_{iK-jY27!Npgq6wj(GyrEou zYlJhn<~zn%6yIOs_V9|@uv9-L%qhwhTSvDMIoLhcHc~Jlu0?kT6o1Rd|H?^ADQj6~ zltPIx1Aj0P-+>W$9dzm0>I?e~UEPicJ!x=oB>_IL3f{y=8ixmu!ORx}_$*As^VM*K zVm>7F9QCuiRAIh6RqNEgq#colRRN|n2v3}=EHJis^q({k(7jEZ&G@cIV7$usssC|M zyLPEs=cP61jhMx0fhUXWnjrDjR4;BNl7fO5@hoi7SI4uW(d#r^4+SJ@uGP35zJ6CK z$d5WzWms37AES05a+prUL6Tm%A3q!o%&UJNRZt5d;^-u%Uczai(t91LGb{Y=_cn%r zfEklB5=D??W0Zg(Bi&X+6)qnhAH>5^soPdT$58A{ti6)RsD~wg_ABxDU|DiJa5;jA zPvZXTRjIVmQ|VJgBaLkpb*Hm2nR1KAQ%qFLZt>g2WQXgY*G)VG2nz^!Lo4j6jc@aP z2S|hq`9tfV%hkjNmRM_^q4FRJEk`Iu`9`JRb8#EgNwXF z)0}aVVGsM}2UC@hL&Jd@Ds08Dikb>@Q?rq6n#{zJ)51}AX78x(v!{dL{$9i8*e-O& z0<}(==JV5yxZYtd2DkmHnw`r7gA9&QZrb>16UT2TCPaA57% zjyhJNsrT*^4}n+3uUte7y71aSL~w-Xk?tz&P$Pt~I+`c*95o0OHgfV70yHUUX?p@D z;oc69*dAGkC&Rbz8e33nR$=Yz2)r;N*j&(_RET=UxQVb#Y^d%rtH4nfHdqJKrE_m5YZ>t$%TM3S8~1O;3fOu^|8Naf3+;g(y}S{vz*7` z`%d(@RgzuB!yRZh9)XcCvw1sgKfSc$Z4y^elm5l@j0D+;!rImD z8L+f#d42y8a9h(@7UDvAaK`82$f;&N`x?@QiJQ&2$j%|5z1$)H=je621#Vfx#6;wI zua3Q~(QY#p^m>CZNwC@RyV|JAcKtbuP;-TBIGx#i%)fG7A`)*N$fLBu?H96{&U5fA z@%4;+4)|h-Fx-{g;c6@pYY9<<8D7PBRa__yvQ5Y$EcD)}k)ezwe#N1B? zs~0?-?_ugg`Yx48jeMCvl=d_MKfii6R!03qcldsJj^!!;F}#@JH{fpzB_-ui2r<&? zCA{i(wv+_ANsE{uSf_26?HX1VkCa}1@++&Y{OF}LyW7U!>)B2i*-O-`ibC$Z{uwcv zfixEc)oNE< zFwnnQZpSJglGmsUMC=7`Xvwu4JU}`vMD88+#2j_{c*3^6`2&-s~Xd=`=cg4+=xZo$6GmVzXFW$ZRXk&?%PXdg-a7 zGP&|Jon$r|ahy%(^ljp!3HVSbh-lvT#35 zXJgp-Z2^aUw7l_~mp(6Ndz^LK&DUIY*|Ahi^RdUDe0|m?q06MPee)AVSqcY3DgASTU%vn1Kd1kDZUvtvJ8IBV~ z08b`%X-?ophbRg_6`B+Q$mFrH5>qJEnOyhj3s1iC!Mt*JkIuPb<9E+L@#^Qx>Rbc+ zb;(GiQ@c$XJ7So;?qAP6|LBAFKlwv!;N+{ucLW`gn}b3uh$ngcbjn8 zCDW#i?Uk~7`6GhNf>?F{&GE4<3%*^pbm@0rtZwEgS4O%JTK?(m2C=C7$kWF6>*3Tn zT6FlFyw2zJAA8<~=S(WiPZG0o^4yAA8w#p$m&fYrYHDh#!d%Ov8XLzgSegbk>$QKs z_V(K!{IJO{x?O1{eR>TWIraQArVJ=b0;PjvIUZ#h!?QIj=dD;cfAOOAO&Y~CuKeno zCCit7yR?pH4AJ3f)6Xo>46oL&UAumBV`53C^!~#-W~BI6e!0Fu*2yH?uMA+<*wUHH zf^<%|(u9t~`nv zeEZdcYGLh1A8if#vWA{@&iKJ;l*6x)S4znAQlc-jLmpSPW%aV}=Pj8(YuUPO)qDXA zFcJjafF=#BoB6}yrOOt5K5r*YX-;<{YIyAcnpVU94RdEKS-f)7jyg?k+OcZ!obTr@ z+q|#$b6TR0rc^ee*Oc?GzVOr`c|L|NDC(pc?MSW&qWS@+;&WxvFIs8E;BI4Z zf)*PM0lzpYOngAgh3*NQhjGX=T?~Mg#Ab$n+F)s&%x5E^qsJZ~p)ilO84?r!v0-B` zU_YakWM9qFS)af6;=lg&+7D8X?qxmly`VTSG`?m4%0`G6@r#MjQ_V;)`b$m80OeLu zMW+)doG3l8=DR2FfB1=i|0s7FapAeA4C|Gt>NF=e?^wTL{-WjcKL79Z`~LFGSKIxH zz(Pl+aZw83#Cn7Aw+d7nqMPP^IpeGEX3klyT7H?8Ss2$W&3)OC2HE+SvpT7C%g@4>L{rx37Lp%$Do^5Vw zjPjh|a5){dD_(u_;XmK;>V~AUbEaKz-q@n3tePpSx-Kgk=$Qae#>gxuugH?FX>beC zOf(kbQ1^_*!gx!WE!-Z!&wR%e1$12eV4I?9nr488%i*ny*%&k_O1OO4;>8P>uA2Mq z(|^AG_BYqMMe?kmeaICB6HuWJ*Vr0u;6JG%1F{K}Ok=mP_Gd6pgR*P2gh`4>vTz2~ zN9SH*W}3Hd33WZ=)-#KenkqK0YgD}HIoW)8*XAv|oP|Z% zsdS{aIUtmDD@s$Hv|?}qODUeg_-&|ZW4fob)0^uyepS#6hck>0hk&m+xU=)}oK&E( zDWoXgy|#5OElx~|+=(lH~mY1vOzasSEjPIgX?S8u9o)C!6U^PD~=VlHh#Ne>oXiX>0^<2C)e zQdR30R`_X8dWVkb2D#CIf{n)N9V?b>O(-2T;ry$nU3$^DK|S&sHm|P_H0eMByIT_H zA=|krFzI-bVA<^ea2&s7`Kp?5G__mLEUvb4S4>GCJhWIWN6rJZ54i%6T#;BIcihES zV>nG`G?fOjSK;5B^TqRcK)S^N27VBZqBShEnTT0@Jk`Qx<{6xArf@M6i;HDV~BF#E&N z0$zCCUEJ%WGta#6;%TS$cWs~f?guk>Ms{vkK66&RqwDBXFFj{iLiL8)CduJ(;OLHX zBqb$pTD~N6)YVs>)g!xk_3QtBXA_m3(Ruvuulds-uRNuIoj-f-H?vl4n)UI2KU|(t zcFJvc+vFq230{xe>Cl5)m(F}+&T78Tkc+N3e^P1EhR@&naM9)_%Hi^Ou<>}a z5jbc{pU1ia2LVoWL39r;KZ^T6`zQF|VgQeco&=v43WyhzbGW=dk4wZQhpgaqc@sQN zQFN+1KYi}SkC)3M&%E-Ud+xgZwyQ>t$c!(@IILH3_6E=pgA=y2!RYu^8POGyjjV*U z6pvHWn>db=Wx2Al0&;;1fSkp!I>o|ph1R3c)hOBlBM=z311%GN1(g9u@eICLi0>BB zED$G484k!t*EO0^DOyHhXz=(d52Xo)%u|{|8~7@$!7(aFF)-|bD~KznILH$UNnRq0 z3uv%_0hWhEO!e7nk@zWml@=1Q{Ni9#Ty$(XT7Vk!-YsLligRRW^C}L2TtvKv=-2-5 zxkCDULPoONyaRMO>~HkrQ&CVwTnR}&2hZwK$RD9yUeO3PG=*YZQf{gz=C3wfSsq?9 zlt@ioy+V7uDM?8phnsB5q2^|PIED|{9PXrKAIK3%S=A%Wb^e&5s{vnNK8FY zS>CJ@Ib-8E09>=Sui_Y*$|PiGC2(?cRdoQbB+l&u75b8s+|V7=NF=5Lk5b{LCcoxP zOG@<6q3Xs!Os2hQnTbv#TwB>JQMxO=Fhx*8_2o^9=u6A?QsF>jZJ72ZCZ~#Ov%j%1 z7>+cH{V%@b`U~?{J#z1VmsW*1{@3U>FnHMOW9j&WPh)-A**->%HCNX~C>TB%x@G`3 zC8Z=f$!d5_j|G~VgCWYBo}A#Of>jM6MF&~UiAgDGZUOFYLyrcV>*_WO?9Ldrn+Q*Wfi#sR;f@R?CWFc zaLM?D^dzqfZobA4&;=A#aikWe!o{fH(GcT(>A4APEZ9)hEQ=0rVk$VAF<;h`l$e;n zM(e;X$tvwmOiJ>(u&Qu(Yk|57zmAKA>BD@_6{5fq$&yIA*5nGRXq0y47Ii-N+%h&ObVmOxw9HIdN;U|y^%gD(iNpzi`-uQRVja8xY@w&D~VU!>JIU~V_tj%FFbS|TyIH1F96AI z_8v?`L*g=7NhMFnAUWthxVwVx%j-O#bi}CPWj%6gUwh=m8LMiPUuh*AlyNU|1&|tJ zGeG593I=}o9(la~LsS;-TBtWlS<(ntbJ7kN0uv{SnArjl9(T+z+Y*0T-9nfb#379b z&^1K^J#CMB2~O;bTmjhHm>zBiVNwH00eAJlsZ~ha%pI-;UPmY_W(9gn3z zKyb4xU_a#dTp=(VQ&N(Xk`fymn~9XcgpjyMNaz}2nZ4=e+eZ%@iHU~+?1Y`6!dMI1_o=BM2k22w+XvvEo>aifD5?SS8>da(E3W^ zb+#@sUVj3-^ro-XJKLHtk^64nS_7mPR0f;vuZqBoYRXOIR}oIc+8ZzQm`M(~OMi?FO9N z&sTBwEQ6H-G_{#uJa@G34%o&KcUr*k@x%^?Tm_y#W%9{aUUBL69Xl&3Dy-yy9`V7b z4&*9n&dH~ond}qgXux2=gT+^s9By88bnnr#OG&pbUAuSd+O1njab{XlEG9?767J{3 z(x76}IQoa2H=-z$$b}GsV2WxAZ znp3j!$Bi03bYNL&X+My>M95 zR~t4C8qmjng-^~gtX#Wk-i-HNd+&?+8|o!%^3gi^3j;E?KRI_`96GrYdv(--3 zk6r`%_3qlGxVpM#)26MAz(XNeO4KAR8V%`MOx0yU;5C&7Bg-1Ya-pzamZP#Jfn8Db zE}c7Ge#r%1Z?eETM-J)5u;B%(w@g0$;_<@<@_I~kC!TWJ=}YGQ*ihFnVD!lsTzXZ{ zZXJ1!Uc7iQsR49JAXpf4@Sx`w7k4=MlnFI86+3or6L<$h^O0yI91fF-Lj;g3kUV(8 z7(!NL&{;DwGNw+Q3Kd_vbg3wcn4(Rt?G2k;Z5*3`KK9F#tJ&4fp+872aPBQ~elbAa zHn|=chfJ;jpq6dhwr}0GZRGG_Cr=z-RaL!Z+jdH$@a-SmMS_!6&-h8FWu-V+E`|%2 zp!FFRh)mUV)8MQ4nj*)7qCjoiv~A6r^{gP`V^u>0Eq3C`qfeVUZpiTdimdG1QO7c# z)6Y0VQPs;Xzo>6nuTCW$w`|@*=}f=g-LsNCfB*Y~AAj++JHe4%n7w{y?eLKkfB5d} zS6=$}hco7k96eT0Lu*znDk&|kYpf6Z8-PxK`e~lW;{mxGua~2+u=5#bo?2SgIVU%* zW5?o}s^+>{{~2dY9XEF5s4=5}VnNvrhr*|vdg_^HoLN@Zr)RHTnx>5(KYrxM5y{CZ z9XobX6t%j#3iMYq&E2rc^~hk8tBqq5(8qq2kEKJttzdcnDuC>5ay>A9M*8d%z#_md zUw`x6pYFW7s=Dg#yZ(IoX;T~y2Tqq-mmfw|xF-@gLBQeD`L+heT~I|ZI4i4Z5CNi5 zIxVZz#FI}u^Q^OEEgXvaFTV8RBws?PIdJYd=UjH_CC$yv!BAw_(2?hze<8;)2_A3Cd)D@ zBu@5fsu2k(p>QM;2{ij_nwsnU!PtnA6DEwG1Wgl(N`JidHlAa9bnSBf1sCktxvR0c zDG~<7+VJ}Wet)RBIS>d2AVE}vt>&>|!-fqTCpr#?Tmkr$Q8(tzoB!3<-{oXyj~Fow z$kkdeg)D>5DNQi6LB5aL#^9 zmr~JPEF0WWvGIjxpLyf;ci(yYo8iMJr=_O4L{^c546pGnj-`1Im*^6`!0+II!Z~?H zWEl}uV~Tc)ZinF1RE@#aC-{XO%pG9>y;f9gTfK5^`L4!~-v9Egw?C+kMu(qv+NWQA z@!0f7@45Fu*6ABm*2}?aojZ1zJ!j4nk3aIx+i$I1zi!@7i?(jtHEZUqf4}_F=1m&} z7FU_aD{aGu4I4I2XdEWFs;bH|?3rhrcGVS^u3WYH?RWkc3Ps2)94LeazBxnV&Q++o z;u8X`#n1*ZG+BmA14(lnuGj_&rd3_@rYG}B-pQv;dGW=UUwQT2+`LXh29zax7+G!F zRk;QDl4r#=>vz^QM7nhD>h`!GYFG+K`HzO-|?5I&2 z*R8($uDc$8eEPp$e0ju(k+DeF>EJ}s;r4jFZVxEqEsLdX*sx*4hK<7^S725_5HG&y z{C_-r|E}`NJN|OdPxBXQnquHmUR9?wO_L2nF+dm97z6xUjBZ-+6LMx!sJoJ>WPG+p z(_u-fYpVP9+fP6L=s%x+_{Kwj|J!SCd?I=abQbhZH5LvM$1ti+^WKDvhT0~P;dMj74JCl~O@*1P>;-_n%;BC8ilRg* zAYs9w%c>HIg#FE*e=+0Zk3RkU!*}kvsiaEPCftR(L-Xv z8kScC7bMS+;s=9%GGR5hf*gSs1fWWj<38#is|oW6v_^jJ7jSeRYkSiWZEGJjn| z$Kv8>G{W;j-?F|7<}Z;IgXj5h(2uXmA}~wBt$5TGIT~yN*7tgSvaCpvaJarsZf?%Y z%K2{Q%#YrF{iApOvuxd_jKa>Mz<>SK*PqY$4wC6qn6E@boWn_TY(rBcR1qs+)uugQ zCfAO}#{NM10l9RxvCTmnGEqgu2u|0XfChk;) z(%vt;_)<+>JTEDX!<{^8XKN| z`f-nF&@sQ$>Aw51XIattx7+>@4f+jM2sYII_4eC?GI!3Ef4J!Uv#K^Mee=yXKl}O{ zL3EIMw%~AgXo42PH=f#+5|77q%SV5QCP=OA{xUXf927u+_?zn7ZXe6@C+6T4S|HF= z?{s^)W4{GxT8>2(Me=wPO#=F5aLD9p77q9oMOZWn5y1jpjG@sKZn&V(Y*6!*^&fce zpKtPTl-uhuQ-Fw|wPJLTjrfFBrS-^?`LqB1(re`v&9uO4z(j_qsfI6EWJFqYxp;=F z-(C}{ulvvco_*)z4}V;~!Na*Yf3&W#QRa9>QoL@5H^D`*l&+|uU_?@BS))^Z$qttG zIQ8bHn&zlN(;}+@m+MLCDS?nb8kNK05CDn=s6|+g-WgDJoy+jBv=I|sLMRYa!bVUP7*~3#S8!-ekyx}j5~XmPveq~Pa>b7k zv}-3g9yek<`s9key14-w`nYQw8^1b$T~M0+g2MV(!|~JtaI4c4D0i0g7r|lfEsSky z7MMsna4?iYQ@B}#O!G45iO9N-RTG`O6914sL_vy!5EP}-a9tRdifS=G$MCA*)M-Y= z(;Cg_Bvpns_{N5ylTW3X@Eh;k_v)K(e)!2UUZoQ`n&D`P(*@DV;D-X>>grNVr*Nxd zmEjFWGgw-Xr5G=8tiho$hJ$Tb*fGA*W$4I4IWvdC|XEL&&yLIhP26RgtBpKr_R0JnwKiJWjXU>2a!31agP?E>A)r1bQs)_>7zP(;Dv(bPZ^o zUcGwNkRe0IjhirG!pWmXo^<^UH!WYW3Y1fi+pPhM8oB_PGMcXA7wcF~0QO99JECEU zWgLb9jO{iUM&JyAkyuKFhzu{7#fPf2u1y=5L%cmrJWgw~m&ZQMA!CpD2QW5l*sx*a z$k1M&oCB6t$+sJ5&^>9E1O1yQmH5peOJ3t?P&-I?ox^Y7P%1;~4F2KQR)GQ;jE2u< zNrs@wlJk@%$xSTJ@Qe>}hszd8E&^o>hgt{6~JV{G~ z6QmesQLvzJ#F)6PKg8j5hJ&0q`~)4wS6SpfGX-F1hzF^I2RH#h)sR2d7VE=8NQefB zVFVJ)8gzhsSQ?V06o6=DBZ>2HY3+;k(~N_6a7JD23|n)xVZ+98 zi}sVD_=PtHC=Hkf_>m*bO3)&%h644p>?wWL1;0LlV(2bpMcJo&mDqr3P- zk@f-EvcRkW1Ab{;)>s1+Q*-$*upba9LCwMDYf(%+icV zQzAoi4B=-G-ax}x3rv$@HK0`%zddWtgJVo65|jZb7!rb9Sgbgv33-sDNyUi075fq4 zy-ySWh3J-{;Ru95IK#lLtRVfpj=K{u^KCI$QL8z3gZSI9VZ(-vUkWI}j~pPpK>-Br zGv&I)L@n9?(hYu2$8wTffw6Ga4t`AsPwZCFFsv1|g{3SX(~2;|>^{%vG>g0Iqq8L- zNr4!h-EHucWiS9TD)KUx2Wvryi_j`ED~S)>3CxQWYdt=0DFE1Y{Ok{SVIAU$winFQ z_Icc0d33ijCfDwhUk}olfo<5ZVZ+AZqkYH~5SPdJi#!UyhJorQK3hcKL@qk5fcOji zfa{w~AItwRK~mc<%(E>42wTQWLXLPAm=m>83+RKtg!s`|!P|l;k)&QG?Hv=a%1Z_< zEyjio8#ZkGa%eYlB>*ALm^%mKT3{8wG%rPEIihHi?sj;{$|5rHOPB}_=0qp)vA$5g zPav@!9znE_i)jFm7nU$y5-Ur{n`o@4y0&T)5(Pr+Q7$tjgk{@Lxqmf4d7u_L*}B;@ zHjXM_^eBb>&HlYjAP|;h@-V}O4I3vA+Gjx%0ZMS(NrEy&o-=e^bcjx;GZvEt7GH|3 zs;w3U{O&1XBxF@w!;33&$=f+NKMdL=uKR)0mO4vp<}aaP_y@Z=m5cuf65a}jFvwZ> zF%#NL;cPie0QJFVwZuiHc8RYEs0ldnnJ_NvC3c!j0+Rw@Va?*=B&`+Y*a8$4TMUoZ zGwg$bK}HzIK#h)@1x-*WMU@9$@!X{)Z5mEj71f9~t()=T$6qaMPViW3%5D5sAx9ZS zH9LSovj*=l_yY(`8^3YT0|yQS+0Y*3ilcK)XOV_^FR{C3St`M1kz?pB5gn-lT6VD+;5Q26|guEA#10wm?qn|k*LQIpkx}UhgIu? zgH5i73d7JPMLzX`pC&AwHhsC*O}e7}bq8D{MhP{_LbBK1a_krZo0F3RKfsy?`LT4%y%#`LB~-}h2GbnUZvN9>Cr+Fo zifnp%n%nIy?$jwSzu@y3U$`7DhGo$pIjNc&g%oLgITn5(7alZO-~r80I1ZFV+%OSS z$H`hoRcZXPo379-t+9-vvoYC_B}G$YUeh2Zeyg2gVmdA925(5L9>e!}ki?Nm@oNDL zhg1jxnv@x9MF5t)j9DJ|L2-`JX^Q7LvMCAJQIUqzs77O$V-cj`tO;SV|K~vis(Nfx z3JYRccf)M(cKKL3gp{S2X)%2c6VSJ`P9rZ)S#<9kOGEzVy1ItC+S;m!n3tN9y}MYyZpF@+*nYAWm^JmxXQz+Y`Srr>4I>|3aE0>Bm)o4eK8aA3BB5YV(is-+ ztCqTI(SSb~QYeSO?bQOntlrK;rk;Osm&R{aH3-~MrikMm3KQwxaQQhQmsfZDba$eb zn65g*?s?02LxoPpppj_}*Ph@D0qq4~)|9DFJTkK8`=#4zhdi|48ts`cwmQXQXE3B| zN-P`*gd$OZq*4sagJ74jH-^H*Q=tT&Wz=w!KNuk%07+-}(~;W?8r%?}co6^!EkYkZ zJTU+l!5;|4G!AZVav#BHTMaiiAKEPgdILx_faQ(@_gm25U*}lpI=sdwiN3rBe;9XH z0I^OpP0fLftn8_$pLOz-Q%^qS)X8|9JZ}7${{4Fj2}CG{OA#x32r?Sm1NbOnmzQS2?P7} z6x0xHsE{y2=AJcG12TjPWBpBYFu;)H7ptigeVQnC82uoV_;mn9gcfWK>x7M?0_avC zIe1xqlSX`W_w!Ht= z2EeR2!!COK{&TnVCr< zK7)Wh&@oXxhAx(_Nla4yDffP{Y{jz0%a<+t=&=h1XEgbngwpdLd28;{WlI;YSh3>y z3zB%bX5e3bT)1TE(xr=+ELpPf$It%Qr#2*6skA?USv^Cqc<`-Rix(|k0y(Z&z51Cc z6c8=k23C-RMzpblJ?WlLW-t2W_P#7oAX20n4P^c9lhubj9%VUJbcnJHL}x)&RgfcI zpAXrF(qU)}%BPI;srbsK-yf1dv*bBng7^C0U3cjfmtS(l<(FM?`Bhh4eZ%i=_`^-V zFYVn^ldX0H_JT-3MTzQqIN;|r*)51HC(;HFazkb!jiE|K4rsD0NpxX;!F88ic>2i` zh7TEe;RP3t7&Tm>nEVdiF1hlWE3UbA+Bs*XBzk~cq1U5+i6sZx23_bxfyt&I@^LT$ zR$D;+3dkOK;X`k(-}v$C_l=#>C&TOW>_uWGlu%oIIYP*M~WrlKe1<8!m8eUjh>A*;clnIu(L-{^<&G(<5BR&Yesq!B~= zcI=!bspuz11H&)4Jex66+#D9lpyb_UXbQ^=FE%cl@%Sy5KfBT;8qshl==b}Z%zXV3 zIlil@jTYbt-m)UV&Kn-D?(z`hHpJ9e$?!m4k*b*&bx(luMw-8_zP`ci1F8&tAV@AD z_K4{2J|KnSeIO7ceZUaBDW6TtKIrNXvV$h#IFPL!2z?`h5Pn~}1DU&Ck zJbB{SNw>}t&pmyD6laB_07IvoNtw;F9{&A#<0g(jX~_Bi%KX#ZPj2oJ)vzfIJ!-Zd zxe?45nj8z7w?{oRsBHCqIqV;J$BZv;@q99?ny76kbrmUa$}^u|R{pm`AXi{Mm&^70 z>#w=?sw+}cQzRL)RTRbJcAtCpnSZ$9`oh8@RZ)muYsqq;GV?W7omfJVNH_}PgD?TO zA}q@%BqkD#5W@o-D~dc~%;*a)yZDCdt}E@+&J!EJHEXxrzsHDIM?}aH`LTrR#$KP@!z)$V`}^$@MxBc7y8H=RG0=@2ANy<>QVZj&Imh7wxC3GKs%M-8MHdLT`o_YQ6 z?|kx~FMP1RuBN7D*;8khBvH}!)4_kMFhFRwG8zg*oC4cN9{_5~yY0tdQ%!kg<+jxe z{@8O{O~_eT_J?ObeRxu05C}Bt=y~2l(;vQeYz_nx2HoZ+XhQ=(N;2rOp+l#gJ|#aT(U+ES;Z?sAKvR|?XPq_qvdhj(_4rbg zvc{iv;pyjJ>Ji1<1h*cl)s^OiM7}v1qIkD2HFeR_#VVjI6K$dHC%S&m~747>lK9p_Hk_`raq)U2HR;$biDxa85NMu&ni^Ha#? zs1u*YfD3e&# z4!?i)o{3&aj1k3#&%c}<=`d($afvGy2C`)x4RmAGsA2x$M@ z+`Q5!)?NJ63*&-t+3@q%aBWeN!j-@dp+rMTr(JvH;K=``4a#$IledqX(m5%CA^Hi1 zQB+3gbH^utQE%%5eV}OMRkJg0c=4fAfS~vN%%FY1x}59peE8#%Q%~y@0^TtMru&dy z?ip`9S6$LE4>E=ufyEtVlakXDx^_)T@cB^L!TZ9{wEPYw9LLkN77Nwa)=qufNQzq@vMx|1|pHM+VkN1tU(s z_Ug_v&hH8rsOzbBe6Tz$@^~|&$^!Q_Swdvp+={CA?3^nt-(2>^A&@H+BNzxi^vL7u z*KfG)nkz58_yS08?AS4Xy6xuLx`ydbJh@@xMvmnLloil9g z$yupM$qw4*Vps|Uhol;Y+nefeI7JbzvnU8i^CTDZy`2+twIjLQFj&SGIEpuxjs3vn zR|J_i*blV=V7$pW-9E2Z6zR{t`Ua;B$u(;=CPKlfDN4?O{v{RP{clM_lQSo~?y2WK z3r7@kUlS6fSibj3J(v$(eyJ`iH;t{V`00~*^(CdHS*og2yx?5AVr3{LvqyS58!4DD z-1+XCU&z5Il#XN&F9XXOGrpT?O~DWswT#Nj3@lr*N;gtPrvuKCKsMR| zApcm0+w1kZT>M90eWk15MXoyjnjsL-2eSM2>00~a#|vv}9l1FT&%N|Tb3h^&k@NwB z;(AXk6W@FOx!SDUbiTfB!Fw~SOZt`NU>`VYplOHG_wM`e6#nk+b75debG^ov`M#d{ zl}E&F<}FbKfD#hZy62FdYe>O{lU}H(Z*H32Yggm?ZR>fmqx&xen&ZSR+qWo6L{fX7 zGcN6&haV_UPT}>nKP_tXiFN9Gd2eX#lg{^Y;hJD=Kff@rK7Ad*seP-3o*9M;@I%Yt9@v4~-~EFyrY9zk9x+mmML14fy=E!z z7CF(qZr!ftrf7$P0uDz)Dsl%Xsc?m8i3MZG45eC`n#4+M#GHV@7BCxu+?uT}hK*wc zhAv0L-T(Z}#uZBzEnP74n_GJNBluj6052K&$6Ex1P-bdsFcMHD8E1S1cU9F+AjOtD zo2J;j%+wv_6)wE}8II+67Vmabs`9*R#frsEi3KIe$)zI(aBG%qmt#5vBxR7KK6lOD zuzbnlWk1dM^oAa-%3y*e%d}XEW_g~22zF;`N02^HeDmjd(@VUt zRy&(V)KSutQ^QhFipDsGW(8MuO*!Z*E%FTnVPQ_@)}7^U9w+NqmV>^a9aUyE;Sdu( z{p6h(Xn(q_pLyk3!i;ZbdIZ!yLExh@6&L(ap_anpi**d~5q+uOT$PcLmYG}9^PcS2 zmfTXfR~I~bfbMVf?9p}Srq!C!IXf$S(i0V%RxVro-Hb;kd7GTM-9(SeQCr{OfV<9u z6>(o+G+NsB4j}2sSGWD`}jni%yqIlKiR^@bD~4rnxXTkN}wRq1Ev>25buK z)fPz0AJ_oQqm5q#QqVv7)fty9JT)OHx3JT|3D0dxaNOUFWi z2ZAzF0{N=1UB9hk?^5goAUY}0-{-J`BWsLBqpcia7K0GiZ#`|D=E?IRU=WgxKpvSa@$5(mQ|H?CL|^%Ci#-j6nb?dfp!n%!{YX5sqVNQM_l&y2=*mM8>&UPx`kLDE> zFltOz6cxWlY9+BZIF4PldUKaSBU9KzCLWe$fwR8)=DPat8?klKjs-#LW@Yi(wz^Y2?8t|e)45jP%5Qd+_2NnC=LDa)9MCLHu149Cpeo{EL`fH zc;S%R@4r~-Z`SbLA&!AjGu=>LzD;tK4H`Ur$do^vRua|ly>(Q4P_>0Jf{eGi1&vRs za0qWYZ6KhHqHIj1IG$p;{TLQBBt`{sw^z%7*5a?+wJs=>4H-Caz|=d=?jF%;Mh*sd zGzN-?ojf=~XtUaeujj1o zcFwJ%ax-(wMh`mU{6W#Vb63;}Fbq4oV8w)8pTECo&>63eE&k!-xwT%09;jctVsotb zsHueoJ%*I_azwVR+rDh`%B5;juWr2u*EV5LMrh$6lBGee~gMA^~B5 z@3(B-HfQb+KhB%)_xlBb*D21RcwkI8uf*u7r<|6Vm7wVnp5-AtdF+czSK$Y4QWA{T zblD*aTQ+Z5wQ?1My@FwMZu{mfQ%{?C!G&i{n|9Vk7hhae?JuuvJm=hLl@&EN-+a@Q z$tU;f-sPnipR1^>j4Dz=$4*yY^T)H#x!C2VKKbnJ>heZa6=NFRzi(+lBD-+j+$KdZ zFxwLyEw3QOhZ+Yo$Bfv7Qtw?9vlAX;n@DK7)@jtGIqRSOVmr^$xD=^1U~KH`IJCqU ztk}G^Jc4Z23Lx`VEo&Hj%^xl~dF=3^jyaFL7fuY$U%WBYR1-;`bjc;>P8pLHt=zV0 z;r8v-TX$?(zM($3Z0y-*o-uXm8S?jU*NK5e->s=v()&!vDgX3~Rn1K+zgv-e`2)9{ zJz-d{v@IV$@z^ViWv}2%?~o>h%C~N)tq!r7d1>W~-(Ik$J|epd`}P$#f3#pHvt~tL z@ z3*(;XU=4bwBkWNVMJmwDh68()U zcMrWAO%BdmxGCCH(VQ~z(o3NaWT^Gq*8a4;ymsrB%_}z7C3GKk_St7nJ#DHs|D!sl zVbPqmb+MG*6Y?rQ`+RjMu>8vf8J9ow$Fs)|FU>8V@x((fERYh3Dtt5nH!H_C%&YKS zIlSn9Pu@8p@XRrtxBmP7 z*OqPfOuFl_o6nmte(bmjlg5?0KD+&_S9YfRj@pVQUE@;n^GgPfA9vEk@slQ>QKr9r z(|OYuF)o4U#isfHS=I5*zg#`4>G4(gfKxagaO8>=fDyrZta?t+Pvh8GLf+tzNW zQ(%A@PfbnfGBh_hf99$>1ID9#(}K6xm)vBT&q7B3b_~0@yoPi~u zJ*8EHqGJ)q@L{9x_{(*?L*rNx*NW-5*%GbeTSQPIWLFv(TH3gN)ypsa-}-eM8GJy% z8io+?H|ORhre!8T798uY*cq&^^Z)0~=ihwm^{tz?3xWg0Sb6yl4Ygs-;d12VbYdA- zAW*rhawDY+y5W#1Dm^VJg^?PnYW=K_#tkPmE}h{-C5|IatBL>pJIg@dnQPhNB9Mkl`MK zy94wR(IK#!9En6FGR?;cf}zo5`$Hw%6l1+UCuiuI5)FmPn*t1v4@?ATAXSda8qGuE ztR_VwF^zS(z|Lr~U{K|KFcin_n&M^P9Xw@l;3-RzOz|#}R${VB^A4u~MaKdtv_lm4 z7$w+fwYTuTKpQNt^Pt-9v)NQ=fAU5u&5BMQ#Q$D>fC0sX=Nma!$l?Gw8o@7c(463OIQQ6TO^yU&lmn6= zk6BP4g#6LNC)c(xG>z4u_b>{D(%VhxY;JC{tc6%sG#>|`a3>4JV1NTu48j6#ZAp%V z!%;&8k&j{%(#MHnHOz3D+vSS{6^EGK)ZqW*^EY38_2myg_yWi^7KlKTuKzi z-j;Kp0EJK#ZDzG$AT1Mu?Z?G`9q6nCfDBdU~aiOYaN-@Ho$fx)vIkJEi zd<|e&wBmUpt$;i+9Y~tz`KTmu49DR|b%3%}8bAGJ@`WiGasOaki>w>CCo!c1%_=~* zpzq_x#YZSQk4u=XV;=T@;f`S(PvmODh7B8S&?1;X2%E%qIDnXt5I%GTgP&=WSV}S& zl`t!?AR2?l&!0g^Ff^mHEWWhEq1eZ$gp7f0Ibc#s=kW+$tiiA<@Uw2HUYC;>SWS;; zdXz9LkS@N*FmqvH(w3lI8cK@LuGMsBAFB313JkR3`vqr(;jaPu zF@BX1&Ol}m42(ge%8Ob%jgOR(rg7!8j-vx$dYmmc@Vo8gX|vfNV37AQyb8n&qYawY zFg4_IXhE}7I)gLn7#Ncxi9kkHlk5h#Nr|v2Y}l}2!^W?Pb|Y65^{6}n2@vuFAiW{e z0PC2BMkkO2ezY7Pmom8TgsgJWD8yqKU{;0_L{W;xG+j28S72Fdu?eP#%mQVGBP0iL zAXlVT;4erw4h)PSJWJwX3KrwQSqK3JCwTEYc$Cf%3lJ^7EM(EG21+mlg970`Qj88| zVL@@FIhEp72A>d{$$^r}YdR0(08Eon8BV2nFrYHCxM~aaU9%u&!5~ds=}gLDG9dw! z57b~>|AlP?d<#{B)&tT-%^TyJ8b6d33_(2VZkYA8>S@D<4I4IoMOdqa8HPzsOW(Ys z7WZ)47l5A*uv!=-J?dDN3yB0m!Vy0s*Yct{5-P1}DrjV2Vc-OiqRR1!zOoU6hYcGb zFd9dA?Y;l~Z}H+KykNeNYeq%VfHXB#V_2YOWNA?B2p6J$NR(HwV|a^A&4|R#vaBr2 zEW>I#?s@?}hJl~vK|98Q0|djhP#B&#fzBX0Ne5C!J5h59M}iFkVGe}dkv$Zb0XF@z{Z5??R}yZAX|LJ_kdwq0A)7IaGc>6 zS}okTuZv*Ah7B7>gmxoWb6FAEiHhcMICR};ZfeTHG#z~CPa==Xi#|}QG8j4%+&(d zfd}RbXuS!MA}bE31B&MN`+1fNMM7@3hv#v7nm`}~p~&-H77}wh9KcslLKbJ0LH#Bb zveZo$Z1_Bxrtw3-ilW3MNCBMqW-Tb-Jg=ylp(*%rLc?%~BE<=TK!ju2NI2+qIH7AWpQUGC=vw2sbZqY0$+ms1rJm~qp@hBH!&QIbSNrHO-ds4e23`Rx^-JH805$n$Sm$`0V$9@F))A32b0BpD_E7%v(qzEJw9KeE2_q}ZQLAe zYT}$e-tFy}o6g9g4I2Q}Kq|l1Qj8d;#ng8{DD2f8RZQJ$@8#cJzZfI!G=aC=C zbK!l$CRYo;1i4Z%DFzLGa1Ck0h7B7Bi}oj1fJVOL0~VLt9g9XejzfWn^QSaueJ++xU>E~a4v8vUxddS6ajB0=`CVyDwtT~qb9 zd;aQlIfe`$cE;(a3#{l*6ZiQYBK7yaw$XDD~ts8dcqfASckX~*<`JhWj)O}*~9;A8W4I4IW93HH%;(%}gB5d@B$y3OE00Ojp6=$EIErk_BDhlYR+R2kApMLt(^78VA zh6a#da6(hH9zA-s-~I0O*Wa!V#fA^+ zw`0vBugBA+Td(TMosn>O{l?8zbphIwdi_ney!gcQ=l}WaYj1sW_Sxq(?p#q_u~FtZ zM&R+uF}0Ucwg=$s{*Bqza5WCz?2*E8Vc=jMTNxliw206Rj`*uM09C_L-~l?e4I4IW z90m?v`}r9leZs#1x}Oeur><#TI+t8~^<@t|^k8CA5>R9~63NZYyYIfgUvteB`FRCE zFceOZT886T4!;ctG7i79gEPF~VDZE249iH8?&3waAty2_qiVFls~Vr2nLG8I^Cz5g zdRf1L=U*_jw0A)uDvg|Q<_*{Ue!|e9y}I?j=#opXyYfO8ujLk%)HcTIt7?-xsS+jD zHZAdNmpb{3X`@Du@71;Y@P2)BGg86|T{djY4cGm? ztfX_F(jGZ^S?TE+wY7Ck^-T#W8Rd;ppyq-!@6I&~pMC89jhi>KN$CN@8Pmm#tQ<;H zYpN?=_@()SG`R5i_%vjF^lk%H zH!W6U*|1^5#xaR@BUc>xgTyBq034kz=Zh~s|K9tb4<6L_?z`@0SuQCt>469D>(jg2 zi!Z(Q=9_OiLFe*l7e)Q9zufYlmtOqy&9~iq-~C_D zo|BZ8Cb}FDUWJS}1~+1NdNi7Iy4?7ZE(|R2J6r~Sfg0+JuM~CK!iGe&W4G_V)s9A-%sY?>)Wx5AIM>mXevrFwDla%cWp*R#Nt+Rl7EB-I3S1Fers}4$@EzT0EnkjN^8v{D6APUGoI(}4#W*AwPK`GQ!DHQPgn;WCC(87gFfVQiu>orZ$ zVliMeSvP=z8I@u*wJFf-PV|_YG;mH~BI^c1SM)H?i;_Y&1R@@f&m%CjF4MTd1n>9z@vTY}hza0PNMy z0GB}Rc>#1rx67$hh9b*#_02W){;$6I_OZtvdHnI|AH4TLb7Lb#>7{-8db|lc zYs-80@12v8zGd}>>hfw(qdRvksj06{PIYHyrL9@JnW05pXLO)*{JIVcLeS}E-Cpk7 zZ@zu-k;gV}+Bj^)(8lVfXh0e{VN69sO+#G+qqEz0mK!valbsa}`ExSUVSuo4(-xpu z(1USg0MKct!&zNb=??{qI~Dl-jUsRK=-%b09~bbnQ&w=Z<$Z!Qo{i%X(9@w4a}JNk z4IQLsWeB??ogPSc-EQcUwM%sH;HPP!+nAGk%g`cZaSh=Zn*l|YnkNSCYhHE zF(oPS>@%he8!@o1wp?V@ALjn}*2h0wcGZpjay>peR7Rc3@t0qJ+o4;p^DexwLq;+c@#kbD zz4D*`%$T#TLrM1s?zw*FwslV0_tlT{-~49wMQ5LN&hSCd8Y;(q_syqYy#F@m@HT~w zQ_nbko$d0vH##9H(h?Nzq-PeoVaV}jz{kPyT|Da(>lxJGFLJg-UjA~ z#B$i!51>bxt>pG39y71v0JF-9e9B|X|6H)XGJ?<9O<$UKxZGTL-MeqT{`RNqRf^f) z-F1AyKouR8ahm1GER5xUAmC{rufksNXjWGLVii-%dWkuZ)s;e(#X^JJ#;H_E2t_Z1!~`Y z{_(HA{WivXRNkjj4oNeTlM;G%?Du4_*{28AESjtEu8=J6+_bc@ zzCm;385}#be@8}BG)Z5(eq%%ifymMtS7qYeU-x;x`^`Tj zc|`Qolvr}lNvBSnT;ST}NOW)b-@iUvFKP6#$ghT`DCV>a0d6_gvT5m_Fm#1-q^74N zdR=s|YG-|ztS>xp>jAT}?xaK)Cj}aVl8)0=AW05(50v8rIvq7)r{1`$PbW%IamG#F z?wj}Dj33G=q2*4;Z*)M-Pbj*@c}cBh-UfG~WmyI~go^L_TYenJ(7M{X3j%1C=h>s! zP-YR}e#F&3mX|8)G!KL0!@WSdhWjkE+&}cdfdfIf93^rEK;q^R;;M$uu^f3S2^D}C z9PY)>F^Op@i3x69jX}TLr{+rOAONJ=om-*kpa7^WZ{Q|PxSt}WD!75KB(apB@va+h zy3L6B-+AkmreFxpI-H;&b1LK14O&rxFr1_fisJ>EcS50Q98DW4j4|<6XBC}>Ucl%w zZ73YAae_;uMEs_mAwjdl&_ECz3MXh-0-4b?nwEL!7aR|z)+k!lRT$C>3~1z>&T_J* zYckYG<|s`BDbILuV`1Dq+1!43&$c>Z5r0CUrIn6Ba=qul;g&B1T+S-yhUiIQQfmuO6Mg^3BKISgPlCNYI-7!H8n~hWJ%x zf~6IIzbqUG6DNL&gZ8gZ|8zF47a?BAt#W`5h8Xf_$n^Vd18xk16%sRw@>2xh0a|LV zsI0E>M|p30UUp`J6UP~Jpknp*IwrMK*DN>0AkmO%qxD<2)T>8{Ux8U2$++6p$N!YE75J{>J*+>V~FN8AEAEfxvDI zFF3qB?*g72GkPTGfEzY#iblgurwhLXOLH`BaJ0^_FcuOZEX}Y)oxp7@7tR7ltz&L(K~e_3db|qq3q}^7bCyCrPSWPj0)tGc$nWBw2y+Add2(+~AhuZs>Qo zW=_McS#a|+;tin;4JU<62YKrYrCB^3Ova3bkwASw?G);1K-;v$g4`cp1>^}XAN zBxQB!+rMAG{=-l1=U%;TyT368<6a8#Q{w4CY$XyXnR4Y6*P`2QzV+E}%b_ddW^4qw zTn|5f0R>m_KE>j|XcJ>v2e0Z?v-+nERS{?=Iovqxu4UJ955K=gxNW1fIBq{v`M))*AmF5@pbVFn{^ z(}l0t;BXv#$a#bn(exo1lklY9_cKK!F%KG)~)wN@%PMs5^oz)>lj=B1u ze_`yIAs0OK`+mZbOUMq|*K=JGZXfRvm`1Ks0~kbsfu}`C@~>0a;=jmKGOx zD$Yw4!WBD1?(B};yXN!J`lhJC7%H9Ip-<0zF&YX+WDxn994(%F%T(c==hu`DGd};% zf`G^gzcjh-4r9_4As)g!rkzmRUcgd;FgJP695|4F@~Jt9&k8(fiNL-_BpQ2b{wq}aQ-1^s>dx&&eVO|DPvvhgA ztLyM76Uy>4JNF&Xzf(%{n&oTs-sfL&-o(KrnJF1L-NsBCQ>5ke8rC5{D?4N8g%@T0 z^zO`BgH0?w`42Z<-o+v0bt~)F$+>#rJi6=TvoE<~Op%b*sk5(X+m>3f^T?5ddw0yv z7;xI@-5b}=-&z@P4!H25%vCRcyDKr#&Ff8@mNyigb#Bkv&u8p#mrg$SqN(Exd?~#L z_DP{Cc1S5@Lq`rMPIkEk|GLE+qJ^WTj_OsEn~^!mwU7;9$ZVIP#j&G=REWq0G(Cne9|rV+}xejQws7jxthg`s`<`Cr%Wu%Pw&`g zz~CM^k=2WrDLu};^1`XZOHvckvb&BtcYKLZ(5-)dZdO*tkc%$su=0a%cg3j0ep7C_ z>GF;|n^RIYq-Wy#h2N_k#!tKY595oB#15Sk{5v*RGDSm23@z=LmEQN%Gs>cy7i_AE zfC_sgS3#4*e92AEJ~w>ZOYd#aIGPi)C;joEbFx=23+0zNH-7qE72}f~8J)YPC)G)j z!VbyJYu2o(kaX?^^i!PE=@2;GQ+oS7r>%V8uXCgX=iWn*_5qM>mz-8cZJ)oXCM-oejJ>IA)ia;3=P7T`i8tMRO^J(1?a;Yn_g;ncj_nOe1J1wV zln%jF^Vj=z&Bz~r-Hm4yG_PH`xh_V5637eHOFnz|^A)|W`1|0-H(y^6a`3qQ{jUa7 z#DH9e>&nERc$+`1fuzcCiYz%@LR5+|WXA?mq#I}lS+DCVZj1-ln51ARO4N*t-^_sw zK;r^2O5&Iy$hqAj6)cwR=YWeO_XpITF&S$VaEO4?Tee>V);DJNK&ujgmBBB}TYhjP z*8neDJXEXDfY#~lk@i8h^`0qOunO?2HrK&{;dloa=z2Ws2D!0ss@Ii>Fyw~mgV$X; z;l>3~pU82dOJo9bU;5J*5h>>Cdd@A^OfDOc@$ok?Nlf!pz5U!jKlmv);Lp!Kc|qRu zzrXd}n4i~f*5=aSR^ww&!b>FPgUeer=^NpF*=KFbzcLi9l+n4V>=-zNlL!*;u_l}iOMcIFS zXoHJ78OEKHwwLCr%Mst83CZi8fB3PP%`WHzg0nb>z zsYA+o4*X5yC&G^rf&lD+=H~OXvJ9Ws)u6INZ`XgKWL26{x zdoRyDC8=9ZY8}Tk3%&d0@A~M8A3_txCDAk)<>Hi{$L(^Yrn?+2#ge%!L+i=OX^gqR z6F53=>K!AauUxUt>q^QB{U{Q12f%mr13uIPzFuDGMy(ksun zYf0FXkT?3`J1+a(=(T^H^Zl}3r<9fzf4^x}v`g zx;|*LM}0xlZtUv|VgQH>#W|3|M;oeYxLwYo4n;iAhr?me3MieZ%4B0=0+9QV45R`I zB(R&697FS1HmEWFW3{0GI;yyM7PVd==0g$FM^7=u3Gls^5bpM^{asWsGCIDg_zlQVWL{OZF+ z4bI)`M-g(czW2SWpOP=>ao?uw1ObxMRD*XH3_5juSEn3t=Jm=_Hh=ej4}G71^S!5d z=f3mSCo3wI)YJd*`p|V(Jo0gUb0C<0#W%D2KAm~lw`V;$=Wp5HJU6G>&K)pzSZP5b9SdX(zasg|X@7fji{tvQzV7~V*Xw2^ z_?(ocPzf0)UH^|s>mI&))1bfHGsgIF#+oMHky|>di#q4o=}*)Rf8yp2E1!Awqjfu* zh3-?PjVN@nTFhTC=~m&N9dG=p;G%!LKW5$qSG-f_W`(Rl=Ui}R@rD;}Ul{37l<3|T zimHqz%Q_?Q_`KC>f!_!;T9SXRO2Dr?3^goQ8)#A_s&Sw}>~)qrdsQh;5LtY|UokjA z;5k~AVqj+-4v}MNH5QU-h>QqEXHw#+c@Qbb(nLBsImOcZzG=25% zTR&`zD8arrzyC+)tABX@2fsw=p_KvL5hQee=73B3LYH4VAXc2>;D8gjguJU5pHI0LkIM-XUMJg%O9Xt)B0{>P3BVhA1@s5C!6K%u$R z5zO_)hmlb@&>JjtfSYmozXW(Ph~AB+pld2+is>|B+BK~r+`a(%D%82r79^XfA4Kzo za0(>TPzt~ao+2nj@F<-XWoXMY*;RIU_2-}ODglETVt~Gtf{s~?Xfj~@upY?{A|M1r z>C9q?^8hhG_110Dh%YJ3B`H<(xoVs;XkJ3Wz7^lE+MVw{GpG&yazd9GHx0`qbV(Eg z0y0)1GmtkN*Hi^PDHSqnGFjj;V!#Zb7W?z^i}FjVOUo+r3rlkHi~qNdvg|VF>EAf2 zvx~}p7BF~}6<4~-N-uS{pscXWSz1<+pI=%~Qe9T*EGjO`%`Yu1t0}LpEkWnSImOi# zl~wuqCAlb`+R~EJocz-KlB&{*N))QNx~!_Eq@+CW{Al4Qszk9BmZBK``eEcDH@677 z-@ZfRkAC`75l88MQLDqwO|i1s?3~Z%geHw?vd`Li_AvR2 zS9S)hpe6hppm=-r&JUm5%1^p+a1${9f*lLU>lt-B*qK5bBWZu$$pa^yR8msMt}W_u zmDyRj#r~hopEv_u!_SfKzm1t6;~muTG!aFB_nSg>Qk0d+^f2v8TQLO?-`;aaE% zG;IlSG6MV}gB5rfr-8TNzAS?>hbG#|vaZOW5bCm`XlNBK+)z{*ZJrpMW+%&Om zG&vIjYT!jh)9j${f&<)CMUhk`05PEnoDdb7wii6W{oJ{xfZ7QvBuYVn=;5YhBnR&3 z08c@m@ZZ=$5&bm|^bgHZL;1msbc3m*Y+rPMEBt||z(9Yp?+qgd0cH=9-4t0Bcm?^SBt9msno?iIG;!jr| zuAxo8Iii8yaS+(i9S5~A|E_l)ee${IUs(LgN8fDE6PVx}Qs^o`l4F{6&A^ZT@XphZ zKK9o3QqXE0CZP(t;dhIy7sOqt;&A*?2uFN73LOs+(&^>@fWWwe)93h-ak%6Fc1~4Va|XQS0|aPuJ-Y^gjVevGG`Vrg7Qm} zKhrU<9{)p!F$EO_PVYkR`Fvx!MH4)^h0d@h^`fe(M0<2{BpE0v+jYFaWsgn@GvQTL zWTTc1Jty~7$dazh9&6{BL&R@i-|IL3OWX<|c$<6oJ5Q|V#@{|N6Lm0Pl^xo0$l7m2 zN2aR6sjys=UL7K;3JQIkP;~mx$tqLZ5hGjR`A3iBm;HQh;(0&~@?Qhki(2S7^M(}x zasXY&FCRMe8-77V-%zK+!W=Q`c88onrFk_=ij`aU_!(f)uNb3qXPbL3~Avx7EM!AaYCginP!pE3E5-NLRy|8Ihtg^E@lXZ1GNwA zX_8>TR>ml_+*+qtjUq7uh#UwL*9nWs3^xEA+7gQ+NuI`8j-gqSwV13t$IFsNQ=BR) z7$#}@mX>8vgf6nH`IIP<=0<){)ML>nb`_2-MO!#{)8t!{@|5+uIm)|=+f>f(_eE4S7z$I_J|Hi9tyt(A5yRL6-lXZ&Yd6oi? zAfI^$_vdnL<~;QBhfALw6)9>In)Zj}5^pi(l~-SSZqcIWp1iTA_Wg5 zZt>tz4#1W{W`!asdJRque5H{{nndi zcq&iSI4fj@V>nZEZ0z2>`_`>nPt$bJpaCkQD8?V~Tdk(*TBq3-CI&?4n81RCxMW3y zJ^{zj<`t+&;AX7oCR8wmjfw&{plhIFOrsb^(czH`n4(f>q?txObREc52nfJeenr4+ zCLDN9($JO`AXB=AtZ4!b(cvl%xq&mdRy7*ef$RdR=q519ZZZRJY8p#e&4MBZYN~r#~6l&YFYiXa$GqZJ*P7T8)u>xonrGV2IuG+nN727v#;kWRXA z@K_;-I02juJ+H9{m(nS#PY`K3fcO?8WEmIG00@*=V<|;f{4{IQwEuEy+n+#rWzFUG z&h^37EtfUDb(_P&gxt?U5qx!q+;dHG`Mxzjf9HzA%Yx6*d_aq6G)ONyv-cjYGS_X{ zG@ip{ceT?~UE^0>1yyWX{aDIbQe5s9G$yW5^KkE>)1{)MC~R_v4z`@14pq{iB}O&v z(I^t0E`f>eveSohq{x)`aJlSMfeYG^jD@AvOHYor(1cKMyv%A7kL8pGh@@66QY>KN zLfv7SP(p-ER-mxd&spnl1=t{##71#sRY_TuPl29N6};wp-7=GW+3OEi(4?-&d`71ZDV(aQ zVr|LEgIQHLsIQtTG4_tm<N28s;-oLpeN8W!N(>g;F7bdG#7)27&Bvv zaYRQ+l%J3TwB#$yFCtBLR6$n`E<19S20y)bZrFyb8iS-Rgsv*4n7Z1bx87QMDRKqq zXyLz%6hJgH0L)Xg*8^IkP7t6OqW{QA0nQ|$iGrA^vI2aE&f!8q6I;;IcPI+PgO*jG zhzPU~8+g?;WVIUom9gi8vGW`VBmuGuN#STS3Wyp7GJ=ux*_q8~qdEmd6P?2ipivUK z5D;#(E*)|t!99o!jh>L8Niy)OYiRfc4FyB3BDEGC5TU0gM5~ILUsQUPT>po0hFn{& z-n`drV$ll;Q16kj8uEZPw>20BZNY$cHbKNefzTi+Hn>T}(WkKgN@M)J03lV;mo1a^NNd45(WQzZ@lKJtuZz&2|hUkxU47` zO>r{l`;-R#GN(ZILivD4H3g_u)hP+>v?S8Fst{z0HtmKF9U5-6SJnE=R_kjoJdu0q zWL#=`=g!>+_UXBM@7`x#des)@fbI*^BwP+a*QOC*$#Mjm%oC7x5d$+4WBj5rc-XLR z9oyQiyw4jTFy-+D_o*0ei%T0gcu32J_1CUlwQSkfEN`<%$Br8}E+aij3HS(2{^^Hr zw`|_*$9Mr_2vwvs$h_6#@jm?M{Qdj)eg5fp(cv+&;9(gWBqb~=V%+4h$#v5FU|3V+ z`c*4d|L{{-cwEyqUHT3j=&L&Q_RG&ZYkVrsjTkwyL$~fu!HX#YJ1*UQ*IlH^5vrmq z1f>-8Dn(REN~>l~vUY3^_`EVwf|$ypvOPO? z?A@_*-;Q1TcI}2^*N&aXvQA_+Z+i0BQ9(c%z9?5BFrh3rXxJzrqi#}SvbUmKRV6{w zlM)lnB(Qapu=r#%ONt&R`0UkO zvUBs|lQS|JH$Rn~EqN;ov$G_P0|jEnlrb+n|JYmaezESS^`l4hD=jOntagPe=n8?y zD}Wk#d2AwqLgTpLuB@y!w(7j}QC;hoK;`{^$E*P8bL}=WC?GDk_xC)MtGJS(Cnv;b zoyw-s3=|R`_g`yVS1>MrK}Ty@Newkn)wl$s5G+(Q0E=TJL-QsxI?N{H!~`dtiHEz$ z3mDWF0x19_-Lz?w*|Trx*0uk@AtOf&?Jo=NxVZRPGiQ$*J+^1B-u(v*89#nvy^K06 z$6`2DU0n&fm)T}7si-zHEK6eFeEaP)&o1K7t8cVBH>T0iv5D7=8r8LH=YG9=b?MNV zCq#zS&OucG%%{2PA5$k3W9z!}k+YQWOR3 zWP;*ohGTp#m+JRf&0$Wj?AI9DWa{3%!+>5L`u6BDbm-8;#AKE+fhmxdp4_!t*FJsw zjGs2CS;uxb&xc1u_-bkizYxxtstSt&K6gZ97mY5Oq(eXN7ZIA#|C<&K&4mA<(;1#dv!fjNwAy6iH2;GWm-y zKELg@`yPAj=~iu;#>7NOGTb7|D@q?)xZu$zp3Ke9>)NS9Y*e(_YN2V0gZed@n8VKDvlV2LFFIF5uJu$K-lOzqm{RrYeTZz?kT@uBM}Ho+uh87!74U zWC?)bfJWxv5+^j0R0$SCAJ4gjonQ|OXL&v%JYv?2NxiyuAxN%Ct9B~I`DN+I(LELm zBP&u)P7cekQBhGqO~B)dD*FSHC~F!9tmY-q8*UQAXy9+Ksxe(Wm31U9Cws=sIitsn zNluL889Fv9uB60STH>@@t!9pvWLd_5K?%QKM6bsQPyqs(6#*XYhC{HTSYK(KJh_)B`3jk*A{OFb!$3$6(&?LBj2v>)NFI%8TBV)Qu zs47Sv{0DNya2nApTKR$wV7VYO!9R^)HHrm?VCaj$awbK^tagjdgjqOZ)7CvXB~FE8 z1YKo#Le&E_qY)HX%DBht6#^pq${dcXvc@r-*Xsr=(=YfKhS3qZQaYjFgj`Xa_u0~q zO3R%Y85zC1cTP!2#8q0BDFv+~1Tz%v+(0rWieW+f1jaX+c~7mAM=v9QyNyrw(zFDm zDu|*eupAGTH%U`=u$y&7_XpOj-nezwF-al1cI(oqOKXY-6$3J8ETpAzmgj(P2~oua zndVuU!9^3PnN2EGk*3n5DoFuBmcYg)FoLI8MO0YEf{{FJwh;_t;&4Uwglgx?0eaL| z@jn5e(xfn{Wuo77Yx6eAfmo|*;nZ(uWfv|yGOa_kPa=K~qX8R9K9^TO%h^Lte-6N` z7#rVf;qs5C#GK(*h!gZ+zuzw@m!Q4B6DW3F6a7B%?3gM7n!6fE>)5V^>fg6-=TXt$ zYTm~$+}yo^{V(w=&9N(%yusi4;4nUnLZi4L_pA7OaAgd_1dUV*Z78gPe1n>W))Of3 z1OL&^L+}SdM!ORkL!merxiVO47s`vKY1NfZx5sxl38bbb=|q6x#Jcs8jvP4^7M6t8 z?ieo*K|`QuN)QBRWfjAkEVgh_@Wwh=!SDA9xYZW1as9fN7Qa|hQqsCjOS73rW92Aq z2uGkXa9!gy*;8Asux8E?9`Ji(BOQXzlb@dt<}z5sNY%hK3R6wE=&dTL@e2{D4QmAw zBbbb&gcAq%2J7bv!Y}AfS8?S5l)n5LCcBdJFFue}#uvNDx{*?GEi?4>^hPm4gz3U< zuYUo8t8(P0MO$u5sPUaoH^5LD`kE;MwEj?kfF^f`9MO7^pzB$ma1W@=>jQN)1XT*8 z^uA&7D=%H!$|T6=vU6TE!FW&&r}&*TXOMdM@|W*-0cceCT2UNoj< z+(UcI**muBnu48LzipSBV@Or>3zBTyK0gmAE+g5HgOQjKK`1Y0?idN<6l2Q&>8HI- z+6ec>Dac-@goNUl1$J%B2fZy5<>9T z_!z7;%+|6+vqSrK*H%^0G<|-qK&k{usi~)gUpkA!t-)5?s= zUAuSb(4~`!BerhdkXKl#V0@eAE!(zf*{)Mtp!IF*)(c)q!g0SC=-Q<-aQKexJ2;MO z(xl0_vEw>->D;bO%YfIrcH@rXikgP?Q@eL*+oE~%I_V8^3ag#Y+I~ITG;fyCp=$>* zAbkJTcQkGFs{~k}T{?I4`TWO@9%E<*_%%K@zHi_Dt=lwN0?6?-VgS1AmH~&;;?b!qf^tnb?xlT&)T?ly-t}zb##Tm?-!uI z_?u=6{~^f@0a^`+6NC%?g0csR!8zPI_X}ABe-{FRLgyCra4xu_Ld*MI!O?oJQ1G*n zoDCj~@XTH4P>qf~6chnnqTmAj5VG2$=pWvxNl}AJNtI z=r^%ItD=fu+q&kqo)upo3fq!x;n3GNeglf&OY3)g!{T>7+U4Uzo97wD$4HiE(R4D3 z2=W&iE+S~wa038ys^Er7gmWg2K|6~xrkKobO}y*BJM8Ds4zZdlTay|$PO=mo-kMde z1Q$I*>=?yxXX8=Or@~pDCqe&5<5LL6B2G+2D=H~A=#|1X^zCz~NputPWRwm5kQ`4z z_e7d3V`tnMmi_sv6S9fX6(e)1EXOtKkZPBYZ~eZ=Z)VxD-QTR;Us5B1&P+lN1QDYd z8<4cDL2hx{#IYz-6oG-gp@D!C1PT8MNE79PM5~`64+M#o7n|GP(#^Dd)iFQX4&YC; zgo~jkC&nE=nN30RWbobGACtr2`FVo$3v!5R?%!%kzd!hSMYycV6)F$SW72hy8!#>- zBcrWmzHhG{jhi-Y*{W^p)~#B%YS|jls&(r&#xJ04n|k%qiwp7(?b$7fB117CKv2M7 z)<`rZmn(h|Sh zQ(IG8R#9GA9`g}fT zb@kChM^B$V1@<>a;9ifnx~BT%iR1hC?^iU{>2&7i<@)@7u;64_hw@gI75GK5tkMnf zR9BZBIdb&G$y}&GXkK9M3x0P=L2h}KQ^qK0=OrbDqU3khIIGG_wya<82OXUVemCig zf$#4jS6o)mWVSyHT?YduAvW3*5R>9!YP|mB_*j=uNQjN{i*i&%xU6X5VRjHZiy2KN zV>yoJIFh2Q77KKBa7$Fwn8P|G#F(fEQBlGjVIUh;s~JM^ zJjb#u^i~$L3EHZ|Zj)6VQt?Z2d~}q{A4rLhg*-q`Jbp1YIs!Dv$nY?*f$TObPLL*( zi2<-IoWLbGD!LvS?f`GGQIQuE!7o6Bp(3bSm>r5hpd!$~MnD`q0Y$J{AYz!ezDmj|e zQyg-vO?hNx9CV()Lq|g7T5s4LEi2wTL#`kls-{M_960~^r3GozIEnx*Uq}V7rFAMeN}!zP@3GgZru~t4H-RZczaWRu{GAa>4zg{$<>zH zyhVCM*|BYB$yF1*5knTf@zg!DCXKsp&K=!-Z-2Z#X8yXZcU!)FW22KHbvmm4xQAbx z-?Vu3ia^^b_dWB*{nJKXH}%?frmBLhlLby!i`$n!*T=tiX=5nb^&ZQMPx@V>Xx zUpsVbGmpLbi0R`7wqQ2t%q zx6Yh3z7?K->{w2<1T0%-${aT^{Pb5_3;z~!1 zc1{kXBSCbN;(uKZO~%yHOUhxy89DT)B*cZ}P?VsFiAGBVZO3Xcni9u>mSZ#}GsNi# zv&*WAnvy8R$40q)XPXjg+Obg)f^0M;D1y~WkQCba)6mPzCKJeInB4~2PgJ-gASrRt zk#4^TnXmEs;MeUJE_ch7fM`LEA$pvE+N`K3uc|1^%`3<)D9kU&FU-%&hd*#qP*_lq zpH~2`d3iafjvYEE`2}EBXw{ewO#wI>DU&z>)Pe)w6crWcs?E#E&&|#)D=pG7nIyEb;?msQ!n~Y9pVtpr1Dh4>PFyF; zE6RXtNzzaPFwE_C<>qAPp3bYTs)4i=O)4ud%gZas&(DQ2f)DUnT3YJy`Owl19hU{Q zsH9M*6;NV*E-5!BCp+&{MP;?Fq2>C7F4MRUOjA`|t?HygpzV$cOv%kY33XMNmy7lx zA=y9h;3u5KX`3_sqeYR7r1HbjW^7UOshX&@Q`+0yVq^jYW(a$OGo?89IJ6rRduf6binatcV ze=Z-Fa?PkwP1|&6*1ps5Yew1<>fAc-#tMyl`Ps*I&P!xw+tbzs%! zA8g6m_WjZ|StTF5@_de0fAGFLu&~$(6UL`EZPB4e&%Of(#-=yCVb*jn7xv5}_aCYV zEPL;jW&3ireEr^%m3#KD`t0rXhxh#W$u|cJzIgYgBbCC_3-9xBj#)FN#nx{+qGyK{ zD^~dh>6gg0UNj=tRR^r1LJV!{p3$lLha+Jtj@cM0q#6G@E+$u9R}?n2-S8RH>#m3~h ziB9g)ue*Ki^DoqPnK^v`_vMXarrz`9hX=w&%^Z>HJ+=Rc-fUzm;=uY{xh@$b%G7M^ zgz#<8eYbq&VrSEv>|gZmH1oC9N1Qej1D2bj+0t62N65wd)}8inxGIKc_G!+Y+OWYo z`GwKGr>`9|_t6(#Uh>lFe6xvf^Wb;)ig$GCdc)K2E!!luy79Il?DyRpvtCeXfHkA8y(N2S%j@rD^L+VD4EtQS&KN zCaBivh4l)9H$}U%dV9=3_g*|0vj$KHhN> z?a%~<7xdSc;SNpkoPd8IRzcP{o=HeZ00R*-Eb$8(oS_^bVnP#(RT@p_1=gZ~w}5ZZ zF8vs^1{G)sE{*jwprVl~`ud#8P^3>LxY)E=)2`jP{GAiW4v7+ub6_7Z2BYgFqy)qc zXFBLf=woNVtw!o70~-7UQ#Y;&4gAA@RYAK7#pU%`O(t(ZFmbFPN(@6O3Op5fOz1&@oWLbRQ-UOc2hb0! zX5QuXhuJJOD7dxO=-ITTogF88E8w0{A6-P-FL7vkH!#WE&@npiTdH)ON@61h+uY zzi&gQKyAg~x67v;?%sb%dANl_TS79iEl1Cq-tfp%bCwoFa&k><|2t-PvF!fzrO%EL zscDIk7Dn||HJrJ$)2_j{y%RHk|98>1M&7(H(T?ME#DM$X?0I13tWTYG6UdMv`V+fN zp4zwWfiIr<>YxxDcGeY9wr^!k=t4ZDmZ4iTB*t zdiTAvw>7wR)*x>CqKCdM3ulDtlrhiT+^qDomzUID^U0l#Pak<@ZN5*DT+y?B+R*aF z*vV_6!mfR8bFUr4CVp5Ih7yOzCJS`kL9@oTS9ZVs+>b?8LUc8p{_0H$TVHx{opsXv z_hgnm_r@Bxx8%gpVh`2f@m0?Ww>Nt5aDp8Va4F5l&b}*t)vSl|+uu5&yT0l5C)O15 zvbD?I&kxZ)`rxAtSuQN#?f%&A5n283`bL7z>KBZw-wBYA;P!`WkK|Ll$q^B&5;V(m zvaHe+3DS#VhfeK0IRt&na%nmATTT2W5um$(vcRPzDMm{ap~ZQ;e*2l0h>Bpf zT--7JJ3!?#et-Qi`~ayy+oYg>0phn<%q-aTJQ(-qf#<=IgC7?Cg(pl?IKz;t3WT9T zMW9{#fmwj!(6cqnLjVDexF`t}O@LkZ!F%s#=N97xV~ipz8nTulZRi$3+>H7}oAVr{lL&T5-gjIpd z!PQ{*;D>3^Qw@571Dp^&qR8L}X%gteBXA3SF#rxAe8`XnMl{-W8C-%X14E;NqwW@3 z0*VgBgJQjk-wkMxkG#GV`VnYg5n(oX4Dbsr9|#g$fbF2&LfeKm3H--%tcmA<$#|9l zHU+)`3IxuB9t4zHXo|p^MsOU?q0@n8Nu%Iq@Cp7QEeMN(b1cLLju11%58l8t3QiEg zJis}0I?%H~Iw`!#<(&7^5OD$E8Sj}X3$7k z$QdIUi)Y;<*UwuQaqXPj9)0}Q;f|-BF-CI8b0B~h$sshBUywt~rR9+Ob8;AJiJ_*% zvcZ-Jtqj^4w8S$_$p{{5N+h_S*OY={jEWF!iO{;wv_vxm2TDHM5>XML457;ScL73% zc6s=Mr2kLvm{X%vOYoLC5;et zLPQh>0f8(LEg^#vUE&m>NWAED73TQ-vZV1CX~T7nR4`K3NJS-71yjMwlyyu(Tl_z{ffwQ5E%7?yU zYkZG~P*Cjzvj}<{s(jR_AORI61NA8r$ZWk7zaoEs?YQ{*uN=YHg1^C&{|un;&bVeE zv`o}VUd3Mq2ztCthNt@S#>g>m*I9Sc62@tNoLQlnih>Y80PEiEb{CfUlbbdS*F0`7 z6&?{EY1Q1dHL1g<_qXkSX3mH{J%`VH?~q$U`>h<@xhJ}P|2m}0glsqhRH2nc5u+Ta+W zD2m1Ih;T#(=g=4ciZ(~YN9VjdYutdYecq3I4T?LZ_D$VE>uHx^+fAh{2*;^q)>emm55*|4)hDwv5fP@nS`gSVPL6IOM zB``P#4zvu=6p*q&z<(OtARs|d$mh8OOy8g5Oc*j4Q}F7yalR)bapZy~Orf7(UPzQ? z$OB~fjO#BPM!4WX(rBwZ60{-67P>~a(O>kRLbni5X~4yJhYASR$8P~ebRob6Jr2|} z{OmAl4dn^>I{&yNE}4N#Iff$sT?fh=DjVvzj0MKJs*nTYjDk- zy~B&DnY7HNjT+TYi^D6AXM3XRwr$ZW&SX#NKD2kUaNoA&EB2JK&6;M|!os8LG-%ta zV~5t^rjM@s;17H=(Iciw^DE#PYf|zFy-dO*7-E zk{YT>{S?~dNNmu!L!V9^TUz!m`Q{W)5w7Fa;bTXQjCHSi^TW+9oRlO#9oL{`qs%ya zcuMav{TtwgKYhFDWL32s-fm#mB+?v}oYr}8_of+*r43JgecH@LHW)CZt=%PA6M0W| zxeu9{sz}DvYhEYZmA@~mT*Z{AmV?{c^0#l->kVz)x=D*>O)^uPHmX~+`t!B>a`DJ+ zefz~?Y(nc!t$X!snETa-KOXQ#H|^N6E?u~L+VT4#)pqoM*Ct?1awF1Kd74 zf`dkyelg_`j1gJ`XizA00e{i!S0ium3v~gM7rX@s7TVB1elIR5=HR9URpg8DOECxC ze;(7X;7n8~&>zU~FTreaD2n`Fy>oTDT(7?K&fDK@tZ{ol*9f^@#V-IFVL0Y>=N-S~ zJ6C8)1g^h7A%EEcNB_pI*`FkX%>R<5^-GZbp2pWbGOR8JsvwXAsd|fdef8qE*)eHt z296)z(5zvByiMQ#xZyyFf|={}9nrT#{U{S%yXnjQNvZCSR~{DKwQT1*?->-Ws< z-@2BF(bs&s+iSdE1*fp|)?IrKXdGoUc9P*Mw!QMfc2k#;gIgt9S+;uByL+3o@-F%0 zFh^r5#&@~r%li`Eyzbi1oKY4^ll)p_gH8jk>6^jpnD4~OFMr&5x`tqNFV%3&qyd?+ zHijnlf4d@bV4W@Ze0a>n(y`rdn9(&xcIWR}{@UtX6H94IKu_#BXGULq_s6e(xz8sz zoBsMuiCdN|J`g>AM3*=#`dk)Wz2@ns)_JX@Dn|9c{gy6avZ8qM_k6c>*-^=CO>Eq` zbv-hB%Zj5_K(6)sjcly!+_CHU=_1pZk3Ju;{q{RQJrw{o`VY2)ivc}e@SW@WqlH#S zn8_Bw@My01A8lX%8@OV~6(<-$L=A_~RGm;YV{8|4^K)iFL%&$CSG|}q{zh^I0{;U* zx%`a)L2@iThSD^GXVH$Ns_6GiK}{U&Eog(>nFX@| zM7h#=n*Di@2NL$C10I7cZJiz8zQJfa8V3qgEZE1 z7&@{h1p>hcAVpf2b;iomz)z5qfDHYZQG`$gg68Nlb>^5P&u1^a^i4sdd)~SwdF#us ze7P&ncSgy@X_J{nFH_HiM~Trahm3EO0a?)@LNwTpM$Z)t&cvve_doq$)Uw+btjZOC z)%f^*fR=uhT+iaNCs%k{1TLv6rV<=OOR^4)LRJL=Q)JO^vxLz!4UZGDh2$180FM`1 zk7m5E$1s{2sMjeVMNHf8k}_WI{?ub6MBtvNbr)ChCVK9)CsB`_@bH z%3$OVf`kf$NY-#dI9t7tB0R6!+S-WlaF^RECO5sxkBWI zVqrK15h@n=Vi?Bf_3@xQ(-fMFa8@Y@`ZnT3e^D}M-d4yZM6Te1VmG{^*#0$g1q6%u z*I}{Q{@Y>U7hiwAl%y0gm~N-2Ct2 z_mJzA4v+ywvbVo>R71w&JhJBFoz-FHzj7BuO~Ig%qw>)N^nAxs;Q|Xr#0|yI*~5~JYJvM>#KFioFgJVBPO1) zSO{-HiCg}~j!^M=1TDBu;&KC6 z4WJ2yWFUY4j@|nYWtB@-{_o%N3@1pMVTJ1atb@Dvo~n>l>@N?9K#XWqb$ztXinpOoLcoU6xAyEzsf$2bR5Ib7kgZ=>L8L^=Vp+7;q1^fut5=QAX7j84z ztt`|tr~;zjPmrWv!A+bg+Gf@@Sx^;)q&1wfm@E-i-prd=o`+5flVu9i2q=g~>KL?G z((QIFTlN`2%G-BtCsmCF!x>CZ4DOP=*<`aj%qA<(F*@4pRI^yaZDC<1o`)jPB#u54 z3zj@fhJ{7)X1j$q)1(Yf7Q_F&?GNE`{QB{G@ZXYaz#q8twww24<@f`l(PSVmQLVet z+umHeqq4HFxGb-@G`pZif~U>Z96jX8Xjd;vb{Cd;HkNmW{9XWE6|k_(PJI{LJbG+Bb4_JYPL29o{0huU zI-;9To8FQ-ax7oORYjI`(n`pGom@$y>0Z7Ej?*;F(omnMkpIv@5+w9RELux|25tVi z!%*%2FfJljf}})I@Csr8R8i&WbV>4R@RyhC<6# z2~C$76}XB6`$J^WYaR+CX*HnmEC>A#Nl?IzD#-{K-=<^No2QJepBUy>eS#?6Gyj%j zr?T0Ev}?wUoqOGg72kh*&jXKfRvX3e&_}DXAF33x0hNhUq%5J$q$C+~#@1r&&}&BB zF>ypVqk1t?RMg31Mkp$+SYvOVJ!e9{_HVxP<`ZwcjawoU(i%Q^%bd1#V*HY#Qs!4* ze(~EcKjRrnQT2dGk*r;#A-B@IZ#}tU<(Kb&usqxxj*BY8T0IICm6UMv{2N;|Z6)~x zjtqS8!ONe2vwHUY`}+6orHXDFPZZ}B-gD1_oSfq6b7xJQJjL&ZvP-VABa^31(x@x! zd+EHzrm6qguyDZV9XN5;sZysT$;Qx#tjLY$KAN`S&gF+J9D}@Qvc%VG-lSQwd-X^A z@n&sDHnC?PKD@2KtzXPeQcYFsZ&pm+Gk4k(Rd(y&K6b1szVM7ejl%Wf!>e-yOySxM z9W<_e-2@)xf@bI)Z{7dE9?D@tok}poS*WsPOKW}0^@C!ox4-w(@q)7QGOu>FFZu;& zicH6+^?r0=#`kj`e^0B|BePD5>)@)Rm2&ZkKRqnmK~?USe35_GONFcW9mv6nr&!Yw z=pKJJfJ(rgIXf)OTf&&AE<~%&AoBn&9mp;G-vKjH{5vR?3?Bh3>Og=4q?C$3PoZ7eVi4eC0(!Es7ZRg zCQTbQiwrYsVB%qHR8qRtWNy~9S+f=`;}haYuzq#1MZ4zfw{Ga(t7r3;9rx@%(z#Px zD~Ao~-=}WwyPwRGFo!&s#j_pKm^_*GL?GDSUZ@sRA*{NxYrX;7OdVHQ?BZjwc z-|@}&K5E#sIauSo#oD+@)3)u}v}~Oj8DUd2(Ft~kPBw4aq(R+`Ce4}&pi@w6e0;3c zgpV6Fuv?e*ue|bd<0eg_q9PxC=%J47+jsBUtw)cZ3a-Z{)x~J@=n+HbOrOxTbGsg0 zyO^z(R;}BFD(K3=c`Kp*1AwMAf8t07+B!V+8vhqyt>3!WZMRRIJ+N(Nvy`L=t0_1T ze=#%(lhK#e(5E~-Rkdz7oF0Eb0i{y(c>PG_H=s&_+Zza=nXX{(daDheRc=t0jli1Z zarwX$K+rWw5Zpe|Ck7&;;yY&6x2T?Sm)|d7vFVLtYxaHg>1R*B{mBDg6!f|4$)!WX zYb5lx0%C9lQ+NCPkh&~@%8ZGwtv{_eSgn=B|^x6db|HyG%3g(B3t z1u2*nSrJ6hVhiios(B>t^+3XZ0n_BbRc8H{k)~_c$!OP}KLId_WGPVVC8A=R4eG>f z|Lntsudd$YiR#v~QC%#&ORrXG>WPm(T>9|hozC`Mud8PvgqoJq?th|w-UF|G`s{mO zF8cnY2N*6C+20T7Yt2#dEgH7HZ&J%hJ1wCd`BX71CAGc-jd~&vp@vXZ>v8!d6@#{L zVM~<7Gc6H83V1u-@a)>j^!lRM?x~Dud{+3^7^-YJp!k ztTvl1+$L+#+*R~h*Po4S=sJ;{QRngJp1SkS1^3eQ2T0<1U_Z~Tw zRwt3seNQa7flurt-Ql>tePj*76G ziP(rRP&JZL>VTb69Z+IZGZasaxA0U_l#OPI{fAG)Ce;bm&Xt4zRw*>`++8ocRaxr} zP8GfsaEcNOPwjl`wa?%DVa>arty_Jf+>1WJg@$=94JZ;xcb@ai$!}jh_|>axpPW7@ z+!NDc;PVem8(o(I0#l^`-?aa0k4(9?zTH(6IrXuJcYO8g!DVlre0iXw%IjBEoNW92 zhx13eWpz&Kna|8{WA8Lw8Re)Uxi)cg}xiP-=X`p3g6O z>DaPYj(q+48$)7kl&{v6ao?MF&r2tuOVU&YPaXQjJJ+@G1b|<`fDNzH6x7`bo$F73 zVDX)khWmv-&#!-ErPx*cZBTyJfBqhTp`a6Fqwb?#xuH|K)od!>_tE+jyAN&s;b5^x zjZUl+nZdh{>^-<*=gD0MzyIV=WWzX6;2Mt^6S?!vrQdGe{r$$BYmZe+e{H7LZ-StS z{mb{~)$MU(w{SbHAk|h=F+*Y0fH{qZaP-Sp4}bQ=yHgrrrBZT>0na@!eQZ6WCCUN5 zai5nLP95JciZu9@Vz?7)KAL{tyIB+j4-)XthSV-kfF2Y)&hMF!5sOp|6}|K_rof^D zEsMo5hLX?i5_S#{8p1sFGHF)8uZxwniMJmP_SAM|-#4x5RfNtGmU%B1Q{YH|Wg%61-Y@fuJyokHsm zXo?6Av#~7NNEz*)h*O-&l#-I%qE#!fGsa#wX5hd+AAk9|PJm!&tur&P8$WsY*zvD_ z^y!3|vv{*5HY&nQC@di{G#cSETg)bwMg!#}E8uLuetimavvUgyt@a2DL(`h1DSm<= zS>7BIpB$>7tN48Y754qd?f|Xjyv^t?0*({9Ec(jJDvL@h3QL_%pQQczfGUpTs&eql zmqy?E_?U;^de;>{Z*m_tudqP3b#9PoMziD1&6+j0c?z-$yaVo?GXdLk$3st#o_p`y z+#ajn85UIwhOQ|wEXF~g>1#Sha#lx5e3<+A-cP^Zxa)A%yRSSu^r;`V<#}neUl{t@ zCd061RxJ4Br(;;liQUD3Kkk-AGaGMM+;{$>r*^BO=MInFzu}elf7o=a;PVeZeSK$n zb^e}3&p$WrwnqlOz9((WsB7aQD3OYZj*Di|KmuxgOjvrP!>pi60d3J$oW1O;Z`L0x zUiQ|rAO5hBV~p|gKZQ4Fz8EJI)ZJe_{-xs>K`y2L)Fab%im*4S7hRHb#FZEm8xTaC z;aE(O6^qGkua_Ln(~PVGBk`>4^Xe%H(OOcyX4v5mgb7vam*%g2?~(fDRex!ueat2yLVh}9al{e&uV0=@Jq;Ry0 z0{WsH+|w`|8aDb~t)#1pF`@T-K=(zGRJ_ARE8dwmOzqXXtE>p!x^*R0t){Yk_uj4H z;b8%{v}@;Xj8YxpR?xmNU1n&EHJdb&MPI;`F<^h9B9?j3jD zI(qEL^m=t|;ii}*$H~)2s!Pfv?J*`kg3xIK%v=J~(WjE(FD^@<|0qxd_@`o&Z0t;o zJ~oS^?-;WbPE!<4P%KS*oE5vaZtl^e$BlDt?$x`$qEi&~iMp_5^>T{0aAC3gPn@RB zRt6_nT{AlgU{oJ`qVTT{DFF@Krf6wdwJC` z_+zk{P5oxwTU=57%a$tuO(3PfT1B7mH)frM+XT6*aZf(4<2P@85jj*x`-q_MBkbb*xug zTH*3S*qWMZzpD6aPoF-u`}pa-AFo~GmQ*MhNDnl(vnsES_Q4LeQD|$LBD!3)9#L^s zS2%y+ORm2Ydr4{xZ|i0XyL$X<#|+MT;*Y#=JqukY@UXag*8DA7C7h!u=y8z9q4${3 z3Q%J>H&McH#Ben&HV$k0@|4o{6Xy4sv7*@%&&&*S`~TT)2`4#9-u(9CyHe-h*q&xd z#gJnijM&u9!;`X?yl|+rw)*7Z?6oV8EA3mg_2wNosfV|3(K*3a%hzeyP$)i9m|u!M zfD&v^NMQ}ljz=I!^g8ph$Az=j;2fE$2#88`_%%+TVZ#Y}bxG(dnz;+l?X1rbfJ%VB zWXad4L7f&&>t|**-MoEYVOb5(6-8?3+iL`-kvtf`m<+TBM9iCb6G^I~;-h)S=anQ8 zV`!5sE06@*z8}{ZT`I^r@ba@ya5U4iZ~x(=M#Uy3f!dBK3MP0-BtDb?98IvKhSL}t zNH$qa0l$cd8Syhofjx_)4wob);0rLI(&-X<5ez+-;8s1=pT7U*{@r_0(wdICc4m)` zT`9rCXnvN_G*XvI5~t0ojByZ=sIZ(}#W+zVKy?H*mo>^8z&o{Tb=|1GtJkdDzUv@? z5t2UuYNSRG3QiLkYvP!Iw>DHkR}N64{O?ff@$LQUwUpRsHKgzU9ne6RlUv>LcK({B zr5j#b@$k@kZV4yLcO2VSTpM05IU=KN>%NS(zqF#DPHLl?^5b4nB}s;%2}uZGB!j-@ z6p9G}DWir32@DS~Hn*lxw{}szfsDmwSk&XEbgWUD1Iv76+gHvFAMSZ=>fjVhMtUs7 ziNXReiXN(Azjr_RAb-uLHQVl+7#qzPuZhnAQdBQc)8H8yNuxz7zp8~?#s3hH#1NUS zG9B3`ej;cN9=joj#vkaVk$r<^`h=tv6nmgzLg!3$?UqURtP@13x^~4+dC6mX#f#EE ztn}lAi6_5%^t}yDM!eXQCU6ze1()I=`c4`#Qfc}VU(A`5p$g-T0(<+fsFF7)! zeT%*>1N;pEv)W`=4IBX)j2F)oxC%lTcApfzec*)Vi`y%RZj( z@kmjT(V&jYvM-}<>d8|%ewom4678pkQ)q3dF4Ke42{& zx6ovbHigC4`sCQS1nBn*i>upr?%BU*r&V9Qw`|#0)ovLK`t3>UDL+}6Uu$5&= zvO2HykYs7oV#rPHO$W1fWo1{h71c#ij(Ciqj4_IUHy~joi)J`NZd3`)rqCqlD4$ALC=Ecl*my@b$yZoqN}O_LDEQy zW2sYzPp&K}PfF@>U5548$vwwTRwC{V#q={=2@{$i^XMDCI*AiBL77<6YNhNB!WvGS ztQ>Dbt3g@b%$dwQI;_0efyQtNo?<{_rU(jkKj*b;pCG=o^xKD<_|W|Hy2%^1?5?SB?OeMdBPsp1Tkm_~p#`)i>^pMQhtm?$ zNpOnbEUcYlI4}oVwQ2sqg8LtR^6?3iCRJ8fpFVp0;MVP$O3u4?!7X>+HDS`Vk#_cn zFF)?wrPCev-u>u(cU2YSZQqwA(N-0W9T1$!#PcTTlHfs|%sN(5UU}nf_uPBmz0Dh? z9XxnYAh}W3O}g#Q+wXq(-X=}bcJAE)lx(ubs+wU{Ym$xCBdoNf`LlAf51%fWd)u84 zJaAv@R_zY#J1zzS*Izp#vvI@BCavb(e8=5)-!q_FJ6&{s_{s87ljq)g_r3FOy(R0| z@sr0-glgx?!G9~bPPeCM<5%fPaetTPYK*fWtr^SDKZ4=5#OPj=hIN${g`hak(M?+p zmt}Sz)|@WMD$5r*Q|Z2yS!q)yG~_v@s@ys5t$A(r4R0L><+`9yh#B51rm9AXkF0+~ z-*iES4g)ozm%*ic&Pf z870Fa9G0kXny=U)g0$s39Quy(;E9GVn&5fz7^zv~rQ*RlVdTigua@OK2K6y;gdiI{wRxCeO zmhZb1w<2;te?ivCgoFe*gNAh3ZXyBDKqIFZNhcEHV|sOK*Q{NK)}4EG=-8=q`*xi= zwC~uVQ>Tt_bne)(W9Lqt+q7&`U0il_{{dI6i=?23z!gmkj)uXHiWB>f7UQIaQv9;Z z1xD$SW2abKRFlRH4(-{BX^P+H+PHq*u08u{)>KhkBzXl*A$&gXhP7*Q^YRn|eFhc& z`MrVcQ@K@DRX8NTP>u+PqN!z-Wt%o_K6U(*yV_ObtW_{V)MS^trZ_)8`}i>oS16YC zyFK48TUJqB8&J`3woc$auRpIK*XgV#DKg;mRaI6K4CnFs4jtILXYam%f`vuL0tX8L z*Xff-e_FdHAZsqKuy22s+v}w%wEII*-sw}PPKc^fSX$2TysF51_UzrUbr)DN9=Ef! zq}b<^!MspJ!C9JHT~Tu6bOCL0fY#x4SAYH4M{bu}r}$u1Un%%+H<4f{;`LR#{IX^& ze}}M;m*{pq9XYFxm4Q76>fD7;WG=aWj49wMEUFbQ9wAjlA%67u%+eL#Z$4NNH*v-@ z(?-r7+AjRS7oYpAHM@_Ob4*#eoKm-5eD%Jit9Ruqpj3%FzFO-ZF!${_!{<#L)!V*y zK);Vl(f3!J8xEOonK%8;iGwF}uUouheGP{l*mP>YQ>Igz-+KMmml zV9GVaZX7@2_Ab$1+%a>;K64}wmalWoC)EQN-uC$QL+6YeJiC9LicOnxZ5bWv$GLM4 z?A})>#5SKgYs~EvubF}!SfR;zTXPBmWy?1S!{^OdFn-wF>$??yx(sU?xpV1$XKcNm zT^-qLc5ErrC42h32_x!7TfY5qRX~(}^PybDl?`1_Ns6t`K7LTKT1{N7*IP-{d-RSu zw~guCPB`)Mhs*crL{?UH;{oFq%oumuh^D^JUVZ#vrNWD=Up<-m@EwbxML<+bRfI9N}^a3cjUX{sgpW3s5$!TPlX)gS@p^O`U@AnK4I9j zL7hyy-x~eMc7KGK!^IOGt$CAH8P)sVTC+1(=FWZC!GJ1>^?DEQTC?%B{eFg~@iuMH z8pq3c6A6GWs5oOX2UIF1($Tg_hE9-@PB9FrO98@Ahf&f-;-EK3igrBfxYt!9_yPn& zpjnm@+6xeUZ68-LlJ}{WfY)!=U5qB^8YWVvga)nW-8}2t&tKZGc9p6VCf3UHHcdx* zldfulD2a+-v0Dj}G*UzV6-9zy;~f@(76giDL>jKZg`#;(r*#FBWr3tLg3w5k#&J@X zMFmU|&3N<8&_MfWgD4#nWeHRuj_06<76b{+LeX_lWq6Kb7)Ak36h*MVp^WIYnenOD zfDoX-WTw!|3mhZe9ygC*5(Pn)(MP8#3Z5hZ^eM~InHjBU4=D=^^+1al;}g)9%LJxa zDGf3ss{h6y%-;yJ^}pO(xT>nUqVVMP_b)CeD+m2LxQWEgkJsCF|93~tKOfK1WYN)a zU?W?3&gSj8H4ODLgkX$Q>#*|-6hYTjIZ)#f(I^O-O9fI#m@FIvDq|qv6BLbPY-Sdh1aI)8 zFbD*1wOWnoH92(+SOp!~6#PMKK_q)Zi@ zuHZsjoCRk|H#;~|k$gUtv6y*W6+kNts0qBHRpk8-?=5^40O*c1UF+Pg<;p{aQIW>p z!W=JwrfaMZC27uL;yDmsRrdM=e&Z`aG;g(W(EmYb$>sD5h9EH}tAqb%k}|G5?548blNzYph%yLhb3k__UHqjfA}Jpr#jll+Jxi4YEUf{^in|13VOM# z0o4W9|3H$Umk5HunM`QsYYbV&V0Qv<`TahEgkF(UWwcHQo(9LUXi5h~`8;0ut4S)u zGLU1h&&!+5EX{fapFyG$$8rIPP1Q_1i*~5cb;OZIw2CUjnJ7vS8~nm8RZ~&fLD2vL z*EH}6^$+9?PQi+R7$`)=XtG`q@RtA$0RG^NF<6e?VnYrYh6PR!dAdUI-;%2o@TYeh zBw-X1I8a)ET$Q?0?&{kJVhVZrIVyMjxb>&QC7$2AfDbwu&RA4{7h*`S^R^e0b2~ zF#Se}E>Bz~*Z(I#z1LN8J%h`hToK8kHABHG^n=&~QbM#EdX;Lt@-qH{8(JR&1;TV- zAh-%Tqfn4NpgTmIa6GVzS$0Vq7A zsGv-stOV156rgg9E40%wio)2y9~^oRIW zvC|Lg&KVi}Cjk5^`2wPhQzniz{Zl)w{^R&Pseg(T`F)T;y<7#O#!P9>@-}3O9f0YpczCXq4o&R4?RNQB@LVqn*lQ-N%vC;esn@&&CANqJ)jAyP#i1iKDzi-vNJA zMBqT)C=K+v9b{MHmG5Y8Xk=3 zQNdAYqYNlda2u5zWyMHK!-=y8WYt*fbQOO9Xm8;OBg1SKW90vz15Pol-ENDtnXX)B z{XI|=1=zG@+ji|bc7mgAhmLjY*9Z6WHOv1Eps&74^53~U`WFGRsmPLYz8-xQ|0XVT zas|Q&wk}OYoQe!S!x`}(Se(Xp*3Oa*A`~MCA}ZvrqtRdJlfVpwa)Lh+4a*|3K`|H` zO#lxex+2i~OYn$z3cMpz&R{>}7rX`AIAQFB2vihI25>g%AW#g*Xhx~QIY_nP0!ASu z13^);2E~Pnh1e8DK_E*PDFx_NvO#pn5Ap$bgE@zbV8EXv7%kifpO6fcK9~wHvO%6Y z+QAWR#{_(@A~gddLsf?kgL*G6##Q_#7@Gd%$MqhcAE`Z8@z)^BvdLr`Jbc7Mk3ThF z$gqBchK|1O`pGk9lf*^)VqAt$WkFIvLq=a67vzf<)n5!*kGO!#Pz2niYU& zFAM}FR?rl(KTW<60oocD;w4SCv}PUCEPgb>?JE9#K!EbJLi7hrg}4vN79nGV43uJQHh8hV50Xp*`Rb(cHCG6NUVmyBjmQr; z)nIRfbP*jPt^@ADag`v^?u1~Lg4Gvl@X)LNTxj7|kW2)%=ab3`Uy^22okieepnwvQGf6lV{w;@ZC#bOyZadPuktqb!D9=P+iyKkBM z(sNI{-7Y~01Oonm-w(gCj3)7g!u%2tuSnoOgaG2b(5=4%UQ&*9T(%kPkcyEeqRl}|e3MsPA(4cS72D}K!LPQ*F+(LJOrNPyx3IdEh6^IEK3K_4+ zK32hR(9lE73jt6?A=5C#tQW-}Y9)pjFp3S2Xz-W>;sXO2?tl|wb7)b>U?nxQA}16K z@(bptQ4;t;p-@{v&M27>+y4wGR}eM2LqMEBoTwlu_l9l&?r7{Av=q=o(EeWNWAHjk z9Wkq+yIfR;tN2ZjBylXeK$6jW_pA8R0OQRT7B=eI>sgL}>d}RYB3Z09i`B}qTx4X_ zuu)^ij-NQ@`iUdQUe~BeCiq9K;gRXyGHv>l>qd6Y;Quau1!lD* zHR>^9^5n@=ra=7DXU-np!i+Z>`QVh!HcD{SyYA^b`$q(R#YogtnT&1Ix2f&$o-GHn zt2Noua@?&Gy2YB&EXI%@XqXt*dg7yVI$HgIZvMWmsZ{cyn{Vu97hDx(d$#T|_n6Yb z;k(MN{{b#{lLhQbV3>f)_!X0^TNTYTZPwi97rpxUW6v(U_x?d$TVnwi5C(K{hVh3i zJPqSC&4iwuPkqR=)oDq1HBqzJSH_ODE7 zqDGJ+$)V(crHsgetsPMy`ir;|+0W<#4Fwx)3M?%d7Pv0qltR#yrcj!UC=>CsVU$8n z5PO3Q;yW}T3?tS+ zqHIbhS(V^q3aSOw5qhbNh#WktIK=<=bU^=q08cpFZY2o9>ldJF^!WVnEWG}JrfI%_ zps1=}5W%_)h@$aXQwjdS0VlGo1O!o0RPd&%S}=&m7l7dK3+Lbh?qGF+AMklmaJU3V zSy91*s;WLn{akRrrlCBj7v{kTWdxVdNdQHuz%B5A@?fMG3?3}PMZv*8go3aT4xL;S zJXC~G@L&-jSCBWzA>=bu1W686+<5^1s-pJnJ7DNFqg6#7IeMHu%mL1ts)jkj>!ha- z95y1%;jr24t=hD0)1f1JrDHU+OF~xyo|Hbf+;-3CmaNGV<_HT5v)fDz8rA}X)9($! ze`bBJCJT@=!SC}K?miJMVN*rF7k#_a0NTz`@CU@u2Rt=Jax~~NV&t&abt5fSdl<@@ zl{e>p|NJLAT!K!h0f^iPs%uifh}7%zpFe#ZEQ6?K4Wq@<6NiphX$*-&9s;5e04pKr zI}}`(Au5Aep?)Ao#yzM^bQf|ViXvF@A!l$6>D#IlDx(j|2<4KH;_mh2AI+*sXxBat z%58Z2R{(vKh-`Qbga;(Vkl3HgA#(iNC%xFr%;FoBLC=W(( zXt_TRetsT;L4rkqJ^{`{J2ZjyVzyYEwU^lrO`-U7jI-#9ZVQWf>y5YSBu2TN6<|l& z&E~dkTkANzd(Q!eW}y{9kpMEX)Km%>1vy--KOjR5@Q6ADQ}lMl|?BEd4(Wde;QUp#((nd9%>mSkaRY`A z-<6g7_$^a^*s|yK1@pgLv*pEmZ(6_S@Vzsy%gQU7cFh2%$2YiJdxl|JHL0H*ACnjx z-8Qo!Nz+4ncC2!FW{w_oB)jl|+1Ib#d2q=Cb0PSv56)Y;ZQsLlCm%eKJLlS=rPZ|~ zdv^tq?9ie~Y-D&^aze8P=_Zq@Z|621zc6ulpHqdUcT5|*Wq;Q5cir&SPut#j_||VX z?tJpr89NW3oImlJywZwm2la$LwMW|)VK!@{jFblHDdBc|x3;M?qQDyon=r zA3gQ>{OR9r-1Wvo^OvpN`oi6_H|;%o_w;cm^GhcW@8|Xf`gdu=^L+D$b<+~#VxuEE zG;a(IW<>AK(z-M$u6y%ojP$s^*CK#|JjFP2rxBf{DiF=*Sp;=aM-(R=kcs#^%~S4GJIri&grW1 z@+B`kvwO!5ilTV4`Ph-es0m)WlTjr%)Be6U?@j#auDc%jY|WmX+qdr6eIVDV;tCsG zx9jK|t{c#~YxlOvdR0kLxm#8(O$PKy;o`cD95SG9pB~*BV@I}S)kF@u<+cvmzTHI< z4Go5~HS9U2e?2Vwc&QA{T#@bRZJSyue)!;pcfQ`Zd&kyIhw}XaOIoW27U%Jt^27nR zba8+B!B!ADwc)7yXN~CGxpkuywQxtCk2AI@P-Q8sX}6xqOx}TQdn;vHm-)th+7oWA zMdv=Xmk`wK?6*Pi%N-)ZPmCQ%d!KzwXgB`rd~53E4T2zS>vJc zzIgAAU#{8u+Jakt+`8wnc~kdhWzQWyyr|MSx?fl5z&p2Y8WrxSo0{0PZkp977$je%s{H+YTOo`i_}juiN(4!rQ)Cw;j$eZ*m1dlt62u07e^QkLc68%VW39 z{OaTPSFc;UZpR)zA}J{~UGTZ0qoX5Z<07MCqhq5nPN?;kg9nuf&*V_m_(Zihyj;TMo0=L(LTgDMbXvO)t`O#S!877zJ2@9_e#Jh zl|@36bhHCAM}Y0D2n>NS4B?f$Do4{6v)}Kh1%kn8j3p${5J-!vplu;J45O6*P^?C< zp$fV}@OP0bXq`ygBL3I`3Qygn*aQ3a$C-$&Teqc$^XoTkN^@}Aw{4H*@q_#JTXE^+ zv7?meEy_JD)mAx6i)zb?g__Eeylf)iK7IU{1(Oc#-yg$aJGSpg4P!TK*pv}r+OlP9 zf`#0-XHOWZWF0=jD!%;FCxMfyit{}crT*&j^1?jbTbq08IIjkd9y%Dw=(~6BO0v z;?2~S&08ULC_-u&2lFRRmHj4q~L*DHcUFW zZy%HwN?pgnuV240ErQ>=Wm~+7+`nh9oluS+JHklbf}B%|ySk#J5K|=Ac^{sGb_cDd zN1uKlz5DjIO&bm$*nd3hX#EBauNgOf*S4*@ckXD=sPX>2dqCjp)~|0eTaFz$^o!)G zie6j4Cm!py@1d!0o(PMyvY^v2KqeaQNbY#egc0?)+#|XzimFDNN1rCMJ%p^JXtvwh;Y1&_A?$v`&0I{m?yr&We9M`RAWVR~B0S3e#0VKjlr zHKT2-MEcZ$Z70g1mq5cmN}$QqC9_fwe!t;(OuzY^-JiU>wJKrYEf0;Wm$PY~%Uq{T zr-tssYYTiFMS#&M#Afzr5+P>q-J0tmNloJF_3YKDair%&Zb5k|*O&|v${l{-gyE~%4}5ETyc1tD|PU-lSZ{OR~Ho&mgx0+4{1W4*nhIP zl4w2W>D$|ut=U=Nb@MIybx)^Ge)shOKZg@$Tiqt@;(Z6#9IavLe?x%=%OfQ*zPzNk zytFi&BXdum23gHLbwc;nfb{yR%R!PWiwi)i3r?S86yNc~haHr{LP!Urbn1I zf*jhB97b~(v=Iv?o<5!htp?hT;H+?!6+>%+HdU0HO^KdU$Bx=?>EPadaXhhY+xE1e z9EO{=ZQT}UA`k2ZIh0QvJwl7#!kp8xtJ+yo=qfJ`I4eu@b8x>a`^0gxE*w5^Ad1y@ z?%0`XXE$!#lpbk%3#r%v#S z|Jb2J;k34Q*KP>1Y4eth2=j)Go09EJXq_uad{j)#>AcHx`$OP8HGuJ4Z2FQX9(v`T z8}bWI2RMsDTRV5_J$OKWGpT`gtbyUDVG1V3)r&cP;^0e8ta5v!=Go;hx)&sm9E z!$*w1?%um^|77Wh4?p~HczAdq5I}SPRJ3gz@E=XFB*7B|Sk7Q2qE5u+7KRNRwBW8= z9CmAwPpb5IW=^<9@OfBU)Qz)djvF=nqxau^_Sxq!l1r`E_}+W&>D-}>GZ3hDx!!p0 znGHXFuTs3uFoA%V=UH4sI~aWa*%#Y(Y+LlgBATW+p4SzEA(`mdsF^oS@6)S~B*?ff zFJAoI>P_qJUT}Yh_FZ*Rx8jVm+&TS**(dX}r%j(R@0MFofkp4pLpvu=nan$qg7tHy zfVbE*^{=jT1qjC8!hZt(K!BwvL6Uiv@r$B~Wdowfv5Y9m3{A<3N|7W;1xObN04RuP zU;)%P0_a?WTe6}sG%d<9$IyZ(@f;fv&IYGtL)il2CrM~nXBfqx77$U;w%`^Gf-1}G znc#j<+E^CO!3EsU<-yok8|A^M86yvb5uB7{#Rz_W5hRYKFDwFD?|oqr&ILzBkYva< zl-+bu5ftTY4JeA5Q6~X%o?lc7j;JiCQA1m!AAIb|S6+O+w73ZJ2Kk(S_kA6^cHgya z3+VcyAUwHn0c4kq(;BCA%QmTOEJP=5?7brcU_L?BIi5bXC&Bn-7~b zsjKwG-OsLe%8aASoCViMAOGyt*N?P$HMfvv>4(|N4aKqBCd#&g;d`U5z-f8l^_jTTT-|S@-VLTxR$i`#u7?GB@{=;`S zmzycsZ6EgZYh$#x?tT2n)3shA>Gp5dcYeR^@UNxGOSTT*KKQydX*WGIFZ#=Ib3d&N z?=)fFh>Emsh9WDdG8#yn*RL9>VGFdPQbe8)UM@M%YU@n?Knws90$^Z zw!R6MVRl=c z)bzR;>8Yv75ILB|6vf!=j_`;ui``_4h`MI%gx8*Xx^<(}#?9J@gt>FuR$v6&dFS29 zbuwOi^>weu%d#9sQYm%Ps;f#mw{Ov~QOh?z{I*@2)(H-b#6^oECbdqz^g0=!3D|6Q zXN}YC@x;c)rluq&#Dt84=2#k&AWq6~_CKCjKfCPDdaGEzE zKj4#LP$-B4fiog8g2OHF2Aigx(KK{GHlgWQ{^`1%wZIX$ip-2nQLrsK|{8s>FMV}caNsyLvk`Rb|8N+`B7EfLyUu*rn-aA9!x4JH(9iGQdFR|2_0GeKH2QWwzcRP;g; zvW-*-7T0YSXdQrXha)U391e%o;=nW}+!o=m*+b#a15`(JRP@r1K6>WqXWxJS{h8CJ zx@&9GG8!y?>E$UC$3OYhlZ#${Wx;|48Ff;^!fZ6hl$DhWf=JNp@e?Pbqay4U(~_4K zU(>%|lctT(#=QjEQW#kE$)}%w@x?{YJ@d@`Tjs^bCxBuLjKB2#cb2@gXztB(GwL^V z`TVD|Pf=)`#<6kZ)}+KZhnc%^&g{ukr`NAvFSs4Q*=#Xct<_akK36Tva787~S{INm zGj7bNmlnV9;-VK`d|}bpv15Zx#l&$RefYu4OJ05D)i-+d=%ea{!x2$aS?+OGMny-s zy&kZNY*w3%vqadUEM{B%#`Sel_xZ*6m=s@4RYiVIbYvvO(0leDN=Qx)R?ZcKKgWny zak&9BTd&9Cs;za^0%iDBMc%c2>y}L$wr<*Z@_5!+9{LSH=bE==e??}mUXEJtnW;O_ zRmc#ZqJkczAaT(YMKOi}f_$xk#q8W?X5zx27ktpk$Y`2DcTs2_68#f?duB!a3pWc1*y`>+2`1}>joCUw8Iv`@?1I#cU+4|-T94aqu^Q`b!C4Qp*n+3VkR)^zp#opUzl$q@Tye}K zYGy^_REbt}R_17p!4wcJR%a{3M*nHT;8I{iJ>-u*x5enmo))^bA5LR4c6>n-v z0_Zga?zcwr1Q3-V2xxj3nc1!r7Z%>HU+?Z6+ubl}^8Q07x(ytaRwv=s86*34?Qqk) zJBwYFw1bmbPNi*x&X`HMX-2vvxMLzgpJvh52z-98UjPbsM8=RfODk*`_|h@bQYWqD zuy*ZRzPsqw-t8N7@6h(G*WU^R1ePQF_wUxTW9Kei`z-$C$5A)jq7hP*nXw79StT*W z8xY(i!((iiOJZWvGh4Rn_~ZlgY76%sDLINKMq15Q(l2;?wK{{dW^;UEolxam#Z~;P z&~0|dvrAt2;LEQ+_+t6uH{POI7R&|1x?n-${JpF|&?a^F6Yn1DbN5F#HOeb2a{_AI zUa_XK@^m?o*0p^zyriJi(XmsnW^wNPiYh;$$#Sy+gOW+ySyW^lHED>p1u^S^A3R&T^=uIGTZGogX*Cx5|viNh9zkV`Y>VBcL!+HV?nWHzyIXX5?hP9 z`p)IczhALp<@(*nirp&u!tO<&80Pp-o1NXKjgPG;udJ@F>D;#;ElH39C@w66RQ4KY z6hjfpU>T{QwM0AZjX1z&Ta9rF@oE-Yx{PbBEq91d_F&psWd|}6lJ1W zuh#?gZDv`hH4V~)#zmkV{SWQkTUl0i>+QGp?$;|lCCOqn#ib@>W#{GQl}1M;SWRX| zQ*|kzDmugRiYUw4Ug!qHUj#CV8W=)iiGX%jgv|9zu^1$UJ%XtOqQVS)n;wt`G0E4Ki`sf39FTD5eCm(w7@dfuU ze)eeCI9P^S4Llpg(m3;4;%eL-0^wZ+DTOWRQj{?L->i*I%-~7?stp1TN-#$f} z&!1pRvb@=1W-pj?OPj*1VBF#uW3`8w7#bIMEt>N~lef1X*tdJgzYOo1jUjJ zNwG95N`i{(CeB3RB!ND|PU0jy6V76OH9%9$cEY4tV(OtOc`PcRz zs%qD5;M5ytv~1c;3rK1}u$wGUCkmnQW*XCE9w%uq+aY#}#wePCyvY)UX=XFO^!>M| zPMkD$!aUYdw@b&as<##`=~F=4w|E7S;w^q55UQOk2Y;#yzKY8T!6#T%QBhQo4@Y5s zUU5;8qR6hAT4!~&v%1PSs%xB1zt8uZZvk+eVpUJ>yEk+nK4r$#DN`p+n0?2>?{X|G z;oQIS^?N2>J7wzRvEy%j@vH5X8iwBgQi*ZJAKi1?v`JH@Ufb`Qg_~*!Hn^}2ciY%# z_28x-10;9OP9zz=Xx-Zj7CyW7q;O_U3uCq)Sai*l7j_DK#LoNLk9f7vYT@*%oHuXm zGj{5cY1=>FEVROkDko6gw>6Yj}}zgT>afcPk;K}f*EsXf#+-cj(lXh zpW|6w`|gge)4p(qnW)-hTV9wwXx!9kQzlKA`QR%*p4PPkpFOR`=+_yUqx|gf%<=@5?K&~1H zCQX8-r|M`PlcXt<28tgLAN)B31sz)O*H}hs>=U7&rJ@%CjM0j!s#jlob?T(?QzlNB zdBcrse_GFRtckOvr8MyS{W|4OPK{LLK-SUYe!mE59_w~0Yjv1Ao zpFiRH@fM3s0`r%mL`f878Lc&vCEZt(n{!IxtgJ2E?++wJMH8}AR8Sz}I&G#|6K=5q z;Yt<`D=sSVYFupn=0z1QyV;VN9-n=1SEzQb9Q@Y&n5(z~0Q`!UyU{c{4AgJbWX-nS zoAw;su=@ZU8~4EPzB?B@3?^eJ%r61rB4|eOR#sM4RaK#1XN_NhC<#mz-PIM~Qd#X5 z6fhw{bbzX91{Z3b$Y*6`O#lN|#amrb>q?n5wO8J!Z>%Lu?0I|yO1UNlJYJuqqUG=* zH&XT2)cR%6v1JcLq8lsYD8*A*QE5c%tZ|EIhIr67OI560w?phUW>SZa35u!+9zj)n zphFmWs1=-^|;OqPHrKsZS z%4=}i(XU6hNDCd49M`^2=SCSxJJzk+weO^enLD&=mDw=8b=#&1@!_9*^p)F<_Cyh6 zu}`mFP}c+S2+`(?Oe)hq~x=soj`p8}HUOoMRz}D^CS(Du_$@Mc*+O}xYs6j@fCJnak z*k4j14CvFpN2j(Ox^`&NwDF#udrqA=F=Xg~qT-^x2anL4B_=L;NY5TCmcA1Zys-@$ zw&~n0BR!*S^UPxh51cM2?Ao_~+xBfTTh#B|rDall`0DR|$j>jqEsh=oM?}QN4(i>R z4^%As^b-L$g(~O@0sD8Xa|J*j2fu$=bV25Q=rtD|0qD=stT#yFA88&}0zf;p z+svR2yWHMidMHpmLLl(*`|o`6!Fv}U@4vf$&u&zIs1kp<7STs)mWY)RawkrtYdyz}O{ zk(5y7EYoOKC#cXagpfTnR*axwI&Cuf{k~-%zFSjKA(JeIUofXh(I`psPMk0{7EDr2 z(Q*B~rQcFa((Sj*I)3mwTZB!fO*zN2c5UA15(oifhxG4}Sw9I3%Pm{C9N2walR*Q; z1G3P$XIH3{O+T$=cw2I2i(cKkm^42wO8fWjUAO(9NN~-Y*6-P=IZx9?#g!{J?5=hD z#|-P17-uJW+sYL`Wgj@gFf=Rf#ZroNCY!}(nPzRr!?`0&)%u3uJ!4^Wf@#% z_*=f`3J^f+`Kx*%Fsmd>gPz&;xY`Q zL=2SD3-YY0gvms^YOB$%!lXt}j36;7$t`cc^M+49dvfchZ6p~9!p}04PGhPjYf3ttwIS7bA5B56+2Fv;(SI)NI4bBebT zBqRHrB$~nm-3!TMD)c#o$*QV~-{%4S4%osg;b=DqOi{emI9i(w<~R@aDkw5WD1@d$ zNpOZ|&0HW*16Deta=J#UIOP?5cDo7cK?q2aq$?`;CQ#3z3%DvMsxFBNsbf|X`sB7^ z>T9wXo;hI1pb?GZ+*Z=H z?$sApl$d$?&vkT=0@`yIJ)@wB0nKt=7J~ZGJe(x@U!Av#qUD!sb5I06p|C@nh}t!fV2c9RuBTJ=tm2rfkn}5 zSmQnv=}av_AWao{jzJp+;G zyTugg^MOgJ<23kCHBAEFCWhA(P{0v?;~EJaim1zIUndEE8BGO-Q*>Pf??#6X#I5P7 z&QNIPlcJ*U(LfLyO)O-xtCfW7_fJ8zyks7c_b=Wc)D%Y8+Xu}{Fi0Vp0C*rrHbmC>h^ zP&b5Jf$M1~4rERSDMZyC3V;B*g{^VVu@lCtACG@!zlBY|yvX$`{t+N#sQBtb?mu>VS{{1pTx8X=ehh^`{~ zB@LzcM_VV9Nh4SeY$8>X6x3B4jvC2@HVV5O0W=mo0EA+EmsORZ3|0#mU}h6%=2*^* z&P-;`bQU}u7Mo77Dvox{`Z;Gn^{LQ9Xqc>N@SJIeqglf3E`IyXSF(=eU^*?!qNXcQ zKgb$J4;TzqN|KB~#uR0E_=v^`=<-z1O2G8QFbRF?6}?gc(-u7+k|054L!Y@(gG+2Q z3XNDJ6$j`Q?W9EjThgG{qmOhDB$(VFazLL9n*S@KWOSfm&?th_C(!5fpf3Zu_0D{v-Z%dqd@#1sxyIt!JwB>1#eYJL3IE` z6-3$C`Q$48{{Wsi$O0MFVC)mi*8lM3*I$3X?&D`?^h*=m{_w8TpL_r7mEV4~Z277k z@7|Cej@LQ{z3{~s-!A|9o8@0E|9<5=Gg3tVKR1}BiGFjJ8=m^~#}z+(^VRaNzx;O9 z<~OEgMDx0WI%$9w5@kb@4WQ<8#t|AtsO3N_BPh1swF^J}aodVJTcd$Vq(X?awZnqt z>n>k%1wfyVx_$T$9D=M1)bfKr=WJDU4=Ui?VNfz+9i&8@^>q%&yhA?FXUI{r2ig`z z;ozZzKEIEl&@>s2W566jsiSnEZ-5K1R)N7#IK;ZRhDN1PmJm?p49{n?90bZn@UB6F zC`Twy$OA+Sw^4+sEQXnmd?TNbJ#+*98Nrcn_z9+kLK%0AtelAzSHKp8xJg&)h$yL1qk1Fle*QkcYDXZsVisgj1R| zF+D9g&WcN_PFP~=)Txu2ni%6?F@?59hKE@^ekD2~K0VfoE2K3#E;%(7yhL%J^+B`e zqf*0Z-I|nMCpp#vF<_)M7(+sI7>9;GaL#JCTEn8EV^iv+)=5opupk|1P(un(judc1 z(IwcyD>#~Cf~m(wm`Dlw7hMTtXO%>yWz=t&qJvU~X@Z~Vdwoy;sk}<32a!3bbm2+q zX%I_lN;E5skkb}>lp`)Fwp|ncyBS@YHfh># z+PkNtTD5E*R<~=1_}pc;4Q$_{N&Qhzd0UQc5gS+a@%3H0v}xC&ZR?gT$G=Zcp3z$r z|HJ38Pqh95fto zj*Bp}$nt?$z}{^z?y>t@9(i%;?t*F&T%c*l0yB2WlFTo!xqN2}8~hRi5F4Tt@$j3ab%4i{Y;%y zAARxJN1wd6-5V3Z2^BfVOVB(~BhVR$9%uK(2X6`69x>sODP7~LHmxd8?>*+A@6ue-cKY94|69+X-O>EXEy!zP5LSO3}pPn~$JFaK?XHKq zcz)b$>NTZhkCvymZ}SGwcQQidzKZ`%=*bB&$8*a&j-Pi^S31WN*~E0}+qD&RSb&Mj z$Z*&zMR#hNqw3JnBP9xFDk4)Sw5PXiI_bgD2E9B-2iT|geyls-NjA3Ybg1}@||uyI%CFjAJ6I2yj$-+y;6Luwj303KBddZ z*>^rLzE8(qUDN5ZysYeMg(7g0W=nT3e`o2s_#roSA+{~wRjTQtOk8u*1MJ85esg6A zI{NZ>aI1E(*mVt!;WI2r(kdDZQ_w;(G)jvI0_nzxvO-LBX`r4I5i6Ps1;)OrN}>b= zgNCkDO*V{w@CCg9iW0da?-v0n{TCl#2Vc@1A_;}M2!{C!aT&((Zvq*j|H$+Dm8!am zzW{<_Ojd^@%;B&*Y$o=?o%GIwuKQfB=GTuqI;(xZF?X-fGkaY>BX!-Bew})adGx26 z%;7`v)lWb3@>e@I{jl(!N8a0s4P7`cVf!mn2KMaK_qMX3_l}Cf70v7MW{w?scxHzI z*WSO1XxaO^i3zKx^zYPj%##~Ejr)ykMEg8mwr;wx`GbX{d-fV~+xL#4Lt93eMX~Oj z52jJi&*(W|=#U{p=Y8&suG4AqolUn-?boU2$a}xYtv75^3pyZ!;vP7>X-kEzVY3GH z?6OCzcYW{leMb%j{Ic;5nc{7EuV3G}d*9yu22WVDtMSy?Ed;+H3qEf^h9)LaiogV0 z=T-dA1G-I;OGu0KtX{spHX$|&%}xSmBq(;9)fAs(HCfDPW)nD41fj;nCX$3fqnhZ) z;s%cIEPwg!RA2{lcSJ$5s*fTR8Xx?hKnUOvn>_%l(%S=fM2vOZ{`A0hIzZ=ka5wRdb z#L)*$6Os;)+XC2J#iU1(Bi8nj=~@zGsBqmJ&Gd>%}1< z%!Oo$jx#`aLpbj=1aLuz5sC481}GC?TSAF~XXqky>m1Go44okRGMI>Qea8JS<8O-Q zD*hxu>`2a8z5F;RACsSPvJs4^j}$GG90n?^-i!!sH*w=vMyFsvDL`LyPZqJ5Qt z_%3}%jqU7kRX6TFz^0)OE_&Czvkb-Huv_alO(n|}M^A{Mab%P&DmA@64ccVxp1nC0 zm3(Zpn3G*V(BYs>Ht5>LwCTMstD>XB!@}%lo~9eMX}}cLn!As?re~{o2W!h{kjWqo zmf;GHW|c9Hx`|PciLQ;+m23C-y&^OkD1s(>+4_CPj2$(4*pNQfKyE;|aQBw8LVOkf z<3LXuCmd-F62!y1PowW1oawMaT^IU+9yBBn!7D>NG|8t$wV$wXO5CcaKRIaQXmtNy z0`Q90>vg#*B{j2Gd;VCq+-cmkLweOUOKO8=z2j_2(PaMiUB`6POsuA}%z6oEs-Bn- z?~kabesD#QD*!cMpdc_UeO_-s6eL0P`~3kyAV>mz&?-n$#`81$!Y`UNbSaY&Jc5=K zDme5Eh+{+k5WS+Qz-WEJc_EF9GeUsUKqe$OpQ%G6jyv?M~ZVjEEbKTX&%ER#p{tN0(}Jx4Hfddi4g^lu1mgJbCZb=Egb58`;G5K8go@>Jq&pH_||H8Gt{=XfwJ*S^@+$&VxY1>tL?q!91gS zMKuLHe--~R2sAJTPgIfRlZOu#Wi5Mh-jwNc=FFLU=W|OpWuMv4mZZqSoI+Q{{%>Bn z>*hH#X5BP*?#&M^FNC%a)QR5ZLJX25o;rM}@aPv$%$o`^+&K5H=Ret&dv-_M^FUE_ z)}Fn5>w%djw;Sy)C5ozgV*jD?obR8$1yY|qXYL(Oy|d;-z);mmnk(J0@rahtb=pnc z0(-IwYrLU}=Ca^#GxvcOJMX%7%JkW{JoQzs!iAK9tN8y196GQRQ{mSAr6yJM7M0aT z)=#s#YNfEq_-IlttJrtESg=LL+Rb{c3#;F>zLayU&{u13Ir7HYoeE!^Gv)20Hbx7G zXfDJ*3r!Y~B>P4gdq;qy%;Vnwx_ z)TB|AikgWa8I$UN0TfLf*?Bao=kVt24b!l3gRR*PR!#NqbXXz=hAf<{BksOFYu3&WYJ=_2mCTak}Sso$+A4Js4{v(3$dfs zJLta(#wCy*ba-ICLfzn$s#B^Wa}7(xH zR{g=dJH0%EE0RaxlKYMs+cY_)W!G-)8^o~Q;ykKu_l|X#oTGbl3d@~p_w`$e)qea<8p5~)xH46)?->zNTI{L}|$E&FL0oPAxnvm3@YuENok}UqB z96YsSvqYx+#LlBRSVE6cqZ^fe`sR0s3Tv$G#!XHwcz4xNL9lh})z%!A*rsd8cFp1( zR_VZJyQPM0JGA9b{;*ZZ=n@}i&fT-c8<2j_L%E787of1GCdZk|k8aCWC7!jjRY zZ`XvxCcQhc#mV=SjXdgb*EE}HR{RC9x8&u$tW z+q`Snj;$KSY57MAtEq^zwq5$wPf2ajGSwV#l~;jcd4?-hDUi~8W_Nt^m)nb>kHy`k zPOa1EX$>w{ay=Up4rUclnqO3CJ|d+@=gtdmn!43k z!(q4EBf=tNQFK<yg| zfC5o8Ez4fFr^@a2yS-jrm#h}EuCh46R9BVvzR!NBt0_Z zhpwrvV4+hrTY>yNzJMSJpk25d}*awOLYu}2sGkOR+Bl0cO??+^k$ zOL~X;HgD;r$Q41?t55FDDpzQbMp>vWI8mgdGjzwYBZmsAi;m?fiAhl;LC}QktuDzcs#S=9v!tlB z!s`owPF8$i&z6(LUbIDv<}W>UFu#^8JDr2Y#D~+Qu9a;0YSn3XbwP=jRDI5(+|wns zvO-EucX^517r08U|JwjTOiqZ~y?bvBX5tu1lH3*L9!qSZ4G$FU-@0MP;VOcxEiLvF zW}Dq%#+-YWf4#NXqeMqoijJQ2*=;;Uk!Z8$n%up)u78MJ5j4f;N17f(GpxyX>f3i- zU!6~)y@zNacl!~4N?I)XmI|rMp3>~og_X|Y;u>W)>D_Di@Zmhq zgSw;u&p?%FT2fN-!2^dMe{i9*vc_b$$U0hnbQXdvk`(wyCNY626fOY=t6*MgV4nk7r_dw%hfr=EYoWa1Gc zDk`I@K+vFKsI(%}GJ0lny#5sPq=H(oB-#LH& z(41uD-+q14Z!3AkSw)>QFRM2ri;GwkDw&T$9k|TIgefkX2Pe zV^hJIcR0mCKvnc#5QC}$r=N|1W@t)RB~^n{gWEMl6jhu-7dSGVgL_-x1PyWF7o-kJ zG7O=i<*x+AP=uyPlB5wVWCWyJ76g?9TOG%N(F1ZY5(+3*Cpj8ZWN4HmLu;~_93Kr2 z=IRbj|J#731dX9{yOztoUt_l!+aH2qfL3E5S;avWL~k#V_<_zT8yhr$$gv=WAiq-R z8}bM^PBEsR4*~ssFvR%W%mAE1n=BDVH-N|laub}9hoqThX##B_A&Cm)*l147MW66f z#DI*`Txk2XPz*oo2QEJ-IA(%gg8|+`n<_z%siPf<2vHC}`|Pt321bnEiKLJ@Mp|O`A3QKlZ)@K#C&!yK|U4 zn-eTK=PWr2C{a-nl{4p@y*qO{1Lm9&6$2=sfS^c{oO4>(1vbaYox8vHdWIz8`Q2ad zcJJo5y)!-CmAk5b?^Rd5dh&_KAWjg8fcV?AX$!h(lZ)QoA!LCwr$cf(iOA^ zMO}6IB}b1Q*}iwbMLTtqy?W}US6_CC$Ca6tm6My_xKE#6iPC8}IBiPze-tE0ygOz+ zF?~(R(ZdH17XI=$bmYj^rC-fmwxuZgi$<9M16AiFP6kTjL}wgOPq9Cu8PL^1u#Wqt zIW!2JaKK}W{$&XCDIp+-Vkdzo_I--sA>4%Q&G3n^9Ci?=#tzyC10F}gk z!HYekLx@{ur?}&R@GOVs;7Cj}dWRF1X8)a&Ka=VxZo!-d3%_{dt$#FY z(K;r%F23;E1@q?5pa1o9&pp|9P)5Mv&{A?6rN_z+N_B-9*^bhhBVko`r)1TIA|v~C6Q%Oan>Ge@ zw#MKx^IJ-x>ccx%HA;16_&h(a+uXE$r$p&A9Ds0`IQ!27YA8j~B^&3>TQG0#nU7_= zN@|re4GkrcL=yj9Xc#R$E)m%CTkUWA8i9rj;T*^G5`Z5BZQw93sCOnw>Y(A+j!%FU zUGIN5{gas=w`kgI^x4DjzWFAPLkvbVlK|DxXx+5gz`-MfLFN1jlN@e`H^T?|0!;u_ z^8}7_S*)dq4i=<-!?L%v!4(QjzX3y|lG>?byB5vcPntAg$M&7)T`;9nm#&vixwu2S z_E%qbbz#vVko@hjGnHYSk(id2o*J#I@8- zN+oH_o095qxmz`FF>GMJbt{)0J$jVnosva6Qv9hdr<<{yETL)==kTXzHcOOF68|Eg z@-dt!;_$^84;)ENGV7V3>v|{{35O+pr_+^^y<+nmt z_|`k`%$WJ^Co|ug{^^ak-APmQe;>b^suYRF6zx-l*5WZ+u*_+nGw4B!n=@$lqi&@`pUp)0PKa@(!9{QaRPD{E!N z5_A)(hM*}G)U4E=&cIHY6KgK$Rmdh@71-dX}dC^R?9)Cd(WA>rEZUv2d?exP$8EDnuFn6~W)``Sr} zg@a|q2cCZ7@8_R;?!dt#CtPyFr?VH>Mqu5(w2Yt}(@iC(QKl?MiwX`!>!Mz#n=pa@ zjQl1!hl`FxqXtc}(6y^Tm^`necC2OjnCh~k36n0q>%K?2_3YiLbLXXTs4$(GrVOk`O+hoF_7+Nrt=}U3DoRLT*JuNjB zlT}SiNlAV7+2@808BEt|EtWc)a_x{hN-Z)VfdkWP#4Hchkk?O2y6orZ&k zwk|h`Uk5O2tJZBEdg_@^KmHJS=J26{g9QhQj~oVFHV~+TE5JQv2BcLm>(DBd#fk3dP%N<`nHX((LR`-5%^;g&CMt_rEbvc^fZ)k5P zT~T|q5~S%`ap3(Q&RMl}$DEr6|7H5hXU|st8NbprU9jbVFl5+>w(b3DQhlx8Xc%1- z$+Dn0(j3R|Jjt`9zz9wk!wG0x7$?J`g<&{oIKUN4i||j73@b3F%~6cSqCb4U;^DtN zef4!W-gx76Q!bsHnU@ug1f5RDS^Wo$A2;^utFP$Zz1z+`drC{o)~{LDv{~ybuDR~& zOU}=B(`z>G4Cp+^J7`_;x}4C#vkYfiX2*7|E}nehrBg2-I`ZuDn(D)c4(;EvouP%P z*W7gOgo(q4^mno3(#7BR8+Ok4DO0bUa*-9O-oAZ@#X6w7u2P%kJ8~GZdDl zUUlttS4{2IzWMImdt-!S_{h;yr(AUPm6v5^rtR3V1F|RxUW(*&6JOABCgEU7Nv+*q zbf~yWzU;~yF1q;qZe82&-Mjm%ufBNTzI(5@^74x(Pk!g^cQ$TZ_teAp7a!U`YyOh+ zrd)f$#0xH;d~r!>$;M3^5~b5{kX+C4UjS8A+q7?g=ieTB@}a+dKkr*Kqk?OVXw$UX zweL7<{-VXJ*8i}2{k$d1FTLU_dqC>U+ykvBT2+;O{elApyP%Qm-m`VZyvN3+{!#o2 z%$hrB>hp89Z`%%ScgOa<1&3$f*(xhDy+vLMLmC?A%gJ!lr`shz0aI1I{RRxrmN%{1 zyuFUFRJr#>Q%AGwZ@X;#Wv}jcq~@lHperOCp8+Jz7=`P07G`C4&CX8I%p|+kXV7_h zc~E63imGoGnvMekl{K9d9E!zsY}c}Ni}Y4)+O+D}qkX%MJ-T-7+NDdEu3hXeIO*E8 zTh~rqI){S6&Fj{xlD$~()JY^1&8=Rsu~pL!t=hCoPj_dexp(f}qtRmT9$gOZT;H}` zJ6CGPhBd2JEL}n{Vo^y2FFHE5Z`UN#^TW5ZH|!{|c((vbvmA4JJf%lV4;2;yp=V@f z4jeczEiKLKcdz()>4r6%BA}{9V$IsL&dN=vY3sip_0J3Dpn-7+sbBgOgtM{k!`*D;hMQ9um@^~;Dv62A%PVZ0uvWtzcI z6^-e%*~ddiEMUcKlV>-$>KUH(!0BDsqoLXAK@cZ1aYVci(*dm!EzT4hEfW z_wJoLh%+yyx0Kq-p|2NzcE$R;o1g!;FK5p9aK;yNRuxDLV`_3FSQiXOqftq<>}5eD zVMyVKqASsGI1-6Oqq4~g9HmF=YNI9}U(#e5;4c(aNi@y|)TpceHUoOPOJ+QC>jTfc z`|0!#-u-rSP~E%m$EUde6G_^1RJYf3Pa1sWC~?2u@%uT@DArib>CeqyFkfX{JWG2-(2w2F<`h+- z424pHB14nTG>R8#h+J1Aie?d{=)f*R(==!h&|8pb#@}=b9*W`>S(8=CG-EVN@s1RW zncP_vm6NiqLY^u!z2ur*z4fXM4~8~x{xTK1_Gzs13CBuj7X>oJh;So&2rr9_r!_c*pjYvSZ!!qoH zPyXYDXPzuQdX$2m2N&28qsNS$@|P<=eCO>RJ$t_P;xxlBM~oVsmXZ0@%ug)Ch@1O9 z4SJ*^<&M>>htKUl@WqM@51Z(UNJ9>JI*qyInfIr*HwY%Q{<*szdi}dSGTmtMw0ry; zD~H~CUbn`nB6ak|LB0OAt>c?}7q@-0-^llB(gcg}<&C}bF2L3+${^Z#!rg{`#kdVJ>*kYJUqA4i-#X589(e<;1pxxho#d-m_$v%hF5MG z@%p|zKUzQ2(y6Y!{b#vUVin}mtTG56Lfie!Hlk0BD6`S=(yhDKPC9sA=hL zr&Dw~y*^)x!{O#QQ11km<9t4k-{Ww5oFprtxnI%{IW(B)@TM>h7fBk>MxduKNm>vE z!NHqopO|16#_RET;ot3bi(ZQ&G)iFw!tZkWy=g9&54e+|EDuj*rnr46X%-LRiyDLj znIi~M+!k-fuKm56DSCsARRnM8ivSP z0?4zZ2nk_%LU6Diw_AiSNEY;DIBbq{yf~HxE!M*ER8n9qr-Mq(OywLdj&jyFz$M{_Lj&#=t7V<$|WdgTpMFD*V?#IPJx-FWS_9iADQ ztaO?@uEp-JXYY5rc)QBu7@VPSt%qN5{S~>h#<$>kN6sa4oBZXzTPJklB`F+AzvAYM zcP8}DWa!p+9l3hhtgEec*FX4u`w0_UL?y+by{=B}bEEU7zkP%g;>`osCE(t)iXsCP zRR`WTG~3cpJR_E=TSn)HS3Pas)|L_S&wgg>pogC97mlcK+deF-vZACmSjURqnwi(1 zb?sX__RP4ZdAFOF2BNaw{@LrRJCLF`CG(Qm+4sG1PgcR!>Gxdr_TeG7+}YXJ>#R{d z=+ExE`|VXBr@$4Ll)F4Whsy<2X2)Ew0OGu5m>QInuE~ZX*>xcP`M-74b8-dp!AZgf zg&Vnm_QkR^?{af)kLYx}9B{atPPDTD-*C9#x69#lIP6!hxc$qiTdN`L9e|3Q`~z&izJpgd#F5>fu|0`ra! zAc;Q>SibSX{#Dp+pmsV$=lBaQ^7(vUFaGJ(_dWu-5GrlL%|C@<=+?o)eLQn~w8An? zXI5t0JpIt(v=BX4A?C~^OhGumc z+mrwN?RP_2xh}eJ*Zx?yUW0W_gM64~dP+)WnjhR@Sgx*E*8XwlB%j;I0DVyjYd@2J)Sd2kHrop>O1AhbaB8h@C8n>H6zwyLDp6!&PM;&)4fQ(xpM_|c;6k>;# zL=yjgSO%^~18BS`%CYE}p#uhV>oB-y=STl`pP@rrKi)3R3~*$~n$f;vcU?g}(FW2r zFFHlOsLA7E6SUv-#n&$PF{YKoXk$wepXOzBrHhA0Sua-lTVI#X3yp|KQH_0LC1~etc~wbncD_l`dP(`-pI2`x+rNFm{56sT zm+zSnNp_ak?M&#)Nz_-=cXGA4jkFb4sF6sW$ZtS1$RTLM$eyr70ii-p;5r2g8PsIB ziS-wq{Q|&y7NMGiLQ;xpsnF6Gf)Xvm!?6r6;3`^-&t5N!s-mr&LaRaZNsP^vxV0US z71EPU@o<=l1JqJbAQ@z2dnGRL81AowDlE{dty@C!;@N?qFonq6iImx}1p~YY88R>l z;0GLIWcGnx?9E#_666CY6w;tESWt?g$Iu}ecGfW%l$^i^3LUnF3~kz$JqQv@48k-? z>|`K?W^(Rr68|B@=L2SD=VWE)q-SPQ6z%h;q@lP;K0-DWglWO7NmXU`%_*1K>g4>7-3llVgV1vrCJ8+qP01FBUGD zf9R~6ALu#a6`U){dcbQ`H2;h3rCW1whi*RCB!pIzFvdGl7S+qZ4s zXY7^JeynrGH~ywf%Suhp@_GHhkGA`h0FB=1^N@<7s0QQh+&pv7?%mKk_% z&e1W%4_RYKi?v60p{3!sMHq1bB<|e~9DPBqFd8T`I6)p|QA)sFW)W$S>nt&D$p*xRsaO!Lf10zr+}UQ(pz|lSD183LJDys2&^_?HEBku|Qd8Od{LE0GDiV&IVR}~*^#Tpz z*#M;~sG?a0(5gvkaFC>q(~FQMp>6^_a2Apc@Qi7Yv<_MwW({lbufb+SXv4^DNI`H3 z_y>s)NC-si;hdp3;6znb zU5BdvPaY`Ac~jRsKJ?y?{9|7J#}f}d^zfti-FnTC=Fy_f-@X6tB4)^a&p-G4HNA=N z-*|EMy2A#~@@(+?A4~gOdfmg%zIaQ=!WSR;s6=FO(3oIX&7W^M$RB5ZTjNYjWdZ<= zDZ%o?hl_(UPJFNdYH;Z^M-y+*@6(o-!z%L_I_OUy1|Giqe<} zVR5t}7={_sVuq!G;su0(6MQ)*$gM~_;1b_=8ioa`CnOluF%%F{ojnO?2?l^H7Wj5+7>g9=g!a#3WZe?XARpIA-Kdn^o`^Nf5-0vWhTT^ z;6f9UWIMr)F)?m*f&)ZHER109SykAt1XT8V;Opk74LJdChP&l%4D2fM4NoU3En% z)at~gGXTmc9IA_-#IGdrXFxZMCVAO#Qgr71Tn%0KrKUag{EO4ydt1{$dbXbkR$a17 z_a2fgeLiFQnU~_&0C`!B24mPF03^)_qFZ21;A$x*qbQ17--Dk0Cfr{5TM^wEdj zUt1ah)sx~~ZV|LuMN&;)mj~W`spZ_OCVzS~&2h2~8bg&J6x>el#OfC<7Lp0vcF&N4 zGGzy1no2Y%;p%BM=+#G+dI@i8j#vdkz;iYWnP{aAb0VA;=n~O_P7(I7J$3Y*h-!KmKEC!6ooA^ zYdp^a#X*>yy(9oG1_MD};Gu|g-3*7rF1vpR1_z#pSRf3DJ}!S?(D*VIBZ?O<$lxHS z3xeSGdZSW|V^}0fFv;t2i_r3*eBozoMZV~CQZ>04eFdW z^<&Zj7&{U(1#Mr~3{{mu-se~zTro7p1rY(;G7OE&he#5n5>aqKrl9tNOYjd#Z9w3l zk0c0HS3I7SlhY360IE-IO@(Ft^SLnKKvm@UN&HF@e+EFJ^m%Y|h`>okBz_5OCUkz& zrfoZPY}vZa@uNlS*6^%%ZQpj}a1prrttSQdaBHFj1fFM*)kunFc_3B$5-O*Jr7L2) z(c@dh)^6BWr!y|6LtrS&P{np5MxE86|D|WA&wKgBA8JX*sY^&9oW$<(Cjdd)o8{vA zIG)8@mHV} z_H1w34b+|4qfbZc$ns^I_J>Taeo^Z&G|tQ3yT1T>7#u~nyCz6~;@d%+^w=>`3j1|l&HLQ~+eI-KrMRJIrnIQL|r z!in!pk+6WEaQ+g=q`*~3jrMjjI46oQG%OS3QPt2?1u8n!72q-;T_8xi&P&i1gII0YQQ?$wtXAm;98vveLWOZE~#{*r{AkKR{J`mF7Xarg&Bm{zj1_Ymw zMvx3ZO=oeRIw*70cd?cdlx5<)s=7$E%jG037Ah@mcOj?3QdI?F<2VOl(xB)f_fsYa zk*Kg5Dho}S=Ry$))Cj{g9S(;a{pGIm^&FtNLdiygwYhoCUH%j(Ug(;k=pf^Pn#!8$ zG6zlODJ(?>39^bFq5GDx_a8aE4V;+;K%3{~^FKrW8xM71-8UM&%`#XabE8lmwOnKH>Zig@HR$SeJQP8@9V z?ObNTA5HNN2TfAoEG9{yPzoH!f||(!I1I!>OH=kxHysRxKbZ0Misef_`|N9;6Yarp zbfW7g~O6zCZ*#$l9g-M4;nOBiH1Pmgrvd01w0Rr47veB zW;oW+HGF=C3B>`bz0c!+=IO@*HPuzsH9sv|GHm!j(2|FqJ$m(;m4QHU@zKJ4`wp~h z*$xVB?AX!aaCNk{q^zXm)t9G5qBZftX+W@NU;(kxq41R>_#j0#$*A!jcM)bLs0Ae)7>J z<42DjIc(gh(PPJq8Gr7%=Zrr0+^ILk(we3;ZpBcHuAHK_0u0^MOhz$SMb|@h)f!HZ zGIYv8P!7gmezv;vpB{rjA~Y{?JJTjn`3QnwWxnmG1ssk;1)mtX(O6*oTk z;1hjb*0fP(r*_>6_f;4Yefu5PTrlZuhu~}5qFJ|Y`GbdaOO#H-!7*}0*qb3SxUPZ5 z!QsT!B$7xXiT@lLhUSWEF-hB?pv8~`sD^0Bfwe3b&!l*uUNTn&w(RHk4~60IwS`(a+^Q<+>3YLdgJ~*y9x@AeEjkB zCV3eSk3hM^gNF+(!%9id-oE=ls@L7LQOce7+&*x~pp1+Z3X)@?R?2(R-udvO1ABIs z6&_jm-NLTjdP*vB#pQoFxO3a??VCUP_~U^?hXup2lHwB5qG(3kx^-vM+$@S#h7B7$ zal-hvZQDXBL2q_AUH-J({f7#xt7?2HX@^V8Loo?5amnP1)-GSNclYk2#YbCaXmX*YUL5JIUps=W-qBb|H5zjemgJFMaT1IM? z*X>gcBQHO{{BYr+-P_VqQU!tEx^-K2R&Jtn8V;c36K6>zkwg;p5A~m1u@AM!_~M=E zsa+~)wejPmF@5`o-S@_qpqvVmqQ98=&YZ74@QdQ;5#z4B>JpFN8Ir8D)NBtUXZRhR zyY#883tw>IC|4@Mr!uTZ#GO+m#>IP5vr;TtP0Mmplq3oaOA&?&%q#RBc6LCH|)0={xVZr%_(x zto)|0PM>l0J@>i&{wCQuewOD5Lv)cGD`uv*%uCPr3F4H=S9~>R$&AmxY}28oOseTQ z-rS7zG`Ev?uofq%D&ML_*F@owPf} zDQSwPTAFMUnyE%4!_Y~_ib-Krk~o&Zm8cY<>#@=!g*h3SS6_2|n~oh_KA-4zG-;Z9 z=-`2(1BE_sD$R+}m>iCVb}Sc=p|0yv%c)Y45Ni6oLp z;{S+-K(3&NqIOE7Z3J9Nf)jgaieloc*zndVKqD`vM$)WiCMN5i42}~t&3fpehhKl= zrRSb|im(Kp^(a!5WJ%uZjzwi!Cgf;LV@%E^nySvwEG~jERe>dgp=eAeK%t}=MiBjy zVUmL7;;o%OFT4Gk>n@l&b=n(mTyWt8m)l}ETxQBLDGcS~X%|I+n#!^)F9@7vNxVoJ z7U}h-8@j10sw~Mg#enh(9Lq5rN76h==uw4I2p6HSMBw(n|Lstn_W1L!{q@0zo3&_Z zK)x6>x27nvq^btX8U)b4V(FTslNQGlG^v9)U_r2<%oH*=#g&(y_toqrU(Q+Hu~QdA zi&2b2GZ?|;Vg*Lz86_G@lup9|Jm|z(5=kVHMEyfUAlF}v1f#V>ny_v59v=~oAMyJX zlCA|hrOW9)arqeJSfsi(eDj_6_3YVm$j~9<$BqB&lTSPjR^S-Xomw4?>7|d+#1Ru>arz z1ItRQ{TbN=FUpF<(nKf_VHl^|<<#`3PK25@&)&W3fD$HXf`J5LF5o$kWF#6bD=*^( zR&Y=xCFM5q)rG^=)p}-n(`nDX*sXKdveMG4uD+6Cah(rl4HyQBfihTz06`!Ss3BAR zL`sUJ8Tpx+j1fJ2_+VI5>!Q)95*1ynKg}(IvR+adQq8m`tpj1j>v1*8^>5pr4;04fcb>P3#x?m|1 zUa@NJV^6!ys7;Mo^$rt^Tu2_zI*qsg*y%gizB->?a#~2 zA2xFE$aDIoWjOZkFO+1FARM4*@+^fLR#`x=V#}`G`wts3ZuH1L-8u(qYm1Ajwr@Ge zI(=iOOc*e%ckeE3wV1N@=XHYy4<0WHce&}s-!IL~%10ASB!O>G85Z=>U@&mt zz&_Q|#$7me@W}41+h^@NSQ1uK2lgC3>Fm+xj~ycjLcxK3oDGJ-(PJnRo;H0BN+6AW z1qX`4p$UJva?H8sb#K?UbpOt>ii+mld-okNa=@^G6DAHxajS=RZ;8|f7B5?S&P9Le zJ7CBK6DKKh^_Go4#|x(c;S@VLNhFa(67>uX!u+8{(hNZ}pe}an(zaP%R!)=r)*U;w zYuBNDyY`90{)xYVyN(@Z%=m22_I(U5D3(D{M0!d}X<6x!BgMSK5vZ$CmB^66{o1x~zkAod z9~UntEc)!>!v+uP+o@}(P$WEk`V3G+^YXF}9z1yX$WfZ2L?@prIJd4{Q(j$B83_y? zIIMHK_7z787kvAzWKctgj~PC6Nb45OTDEHb?VQ=0)~<^H*ao!= zq!STPJ9UK+oq6e*?H{@S)|nqXvV7Sh+U2KNC&f5))}dRJF4vh_7-|U1vH~Z7Z>TXQ zVabsw=*=R}L&U18%Sw#244NQuD*?d)NmevjRbseg2yKQs2$E7_5!2KS%i;vV$%z)) z-=_%6(Bv3YKt>QPf`ZJdnxZH%NH{~Y0xtr&Lg1#Z;mj^bmf!-NU6MmI&G5Wv7^Y5G zswBH13W|_5O$N5G2$m+%=n}(7lBR3m7fQ(jLJ}Dc@}iqoEE5A9{BTm-ENQ5;fx4eCqb(-EhQYRSh!$r zX=N3~iNIF&zB9nVhP_F9+!hdi9IGI|s3>;TIaY)0dnZm$zH;&<`=SjTPfwhlxOe(JaowJr$r~nwDXrK_uWOl4gNNm2jwZ?%Xemj+By= zKv5`afp&^E@(G=aYkGJjNC?yL2k{XyBnc`%Dqo39rvajZdv-ohqXox!Sd{&FLUm6f zi6oLpqW++PXs#5g89Fj4!vMvah6x9bh}vWdusJu*covN#+lHoVsO=_{vlGA(SklA; zxr(F-i=#}2R(JuZnl(uV4J6s@O%n!98mJlK-Z_bI?703Yb_MFk0bmH)jE_6pNMz&K zfgcIw7Ik-9BtT^vPZAE2NTR_5T9u)nzzIn@i6j~(8VI@CtOlRZ-jFblACE$|F(-5=*9d-qjf&A6Liz*Ym{GWx@%vjlGD7IzhAWISw6iwA(;KxXKHvg0!C zPjFyDL%|4+`?lE-eqg#1%!v2w!VN76v(%yRNeXqQ`d0*KlD<%ssY6%E}sFGsaw1!GcDC2&>&10;1{Rt|6c&iDyHYB zi#EO1^TPUHL;dboZDvJ9Q&(uv)EHLRt!O9~3&&(BDobHQRe+RmrmY2sy`3t2!ntWH zttRjf($dMWX$lz>-`d&(vy@30BxevjDyt-%QwC0XqBN4yC`u((F?>yRWVdSMrwLtk=5=h9!J=)}gzH~{smoU8per7~ z?}|Q6owCAbG|x@p8El*h2mb;LIo$lxx1YSWRXPwg3eri`8`O8D#U@9RVSpk?&0r~- zzxcun?zrXVYp=iV`WtTO+@-Uu$iR>!%~H1cdz&w58kxuhK0ajxHvwGfVw0||ky=cg zQ{mHQUgR~KGVygQa1-1@CN?2JU`q4=v@C{etGDZyEY#r7nR)`*a*eS*kf8T; zJ8x4J+3WMe;cz-#E?37cUApz?*|le{&fU6aW#@qV-{M!erzbr#| zWE!Y2W=u6%&YSn}Ll5`vnD5|e!JCqmlh?6RmvAJud*6O!L<>|x%QPSv$gFl!AY-R3 zsw0iK5@>TAD5fkh8Y+vZh(ZRCM3K$xAix0-CMF<$4Ji~?quTroL76%(q9s|wzGJ5q zYd4N717RXP0^mAwG9GBtf+T?SK}khsMG6I;#A#W`a<)sGoo%BSkWSPoAtA`Cco#X) zTq=lvi1klY+x6lOL4oHVX&mMKv4L5)L%~vj$yLaDy3%;ET+F@R!{PQlF+^64w z?3~=T?K=(_G7RW;$F{9F?sJAGWtnQs*LuXH`yaTveWS*myL9c^sYNbTxn=VaiL!J} zQ8ish-43e;1Q8#_1yvLQHwd(*itMagAe>?p&}a-z)u2tm9cVVHlsjnZ?YG}HvTI(G z)}16MSJ-hL>UJ^ZF#wV!>wCdjjtp~?yd3jGm?w2ABzrYfcN8#|_{ zzJAG~jpZgs@t#bxWaHkFsG{qjoMSow7R5rdG$Dbqon}ll2+i1E8Sn%C0GMd(>7a|C zDXR9U)#;}5G8fKXA0ct6!+*LiCoMF^LFeS4NQmU>bkzpy8M)fw;TSKisg}UFbI%=h z_IuMFn)~h7%T{mLylq!{MrKAT}29C0|Lnsi^1g61}Um!U{)um8C4u@h=Skh#X;XygnX)>xPvAR0Tv}lnB z->gZ2CJMD5dkicE__tXOl~rI`Oops!p>UlPjY`m#6hjbrh)$y@_$>u%(Yg}{98Iex z8HvWC;h-$ZlBzQl$EjS(bTFZz(3K#Dbxa5|JTtOP6?ogCdESPeVbHTpJM1M6NVN zUvl|XM+y%?LvGfh<^H{UHm+Nnlbb#A+_9>v6zt#k$OHE-n*SXmyRdHo7Z zEo?Tu^`jZ*R6KdzMUTI~Xu-F0=6%0xN14u8tS75&pAr4Kv}xPEd8!qztqDkmDKzfV zA(i#F?cKd=r%vtLU9yRa(mEaJPS?E6d$s3^ zK6~-6|9EHCf^WZ>wR~?()h*qU!o?MJ`J->|ANu^`)paywxQTMDiF4sFC7-M(gR74H85S4=rlSrZ6&I-WE9tWIs)wrSVCqbIcOaDbxeMt#N) z?a{hTn^ukdRJf!hXpmf5e)F8{MopWx?AohGhqjHyNM&tAHb8%UMvF~~aO=k(3(MW?6cqO9fQ7IM;)&{ET>gxLR>GQ&iFFg0$b5A|} z^pnp#^Vkzl&0o0q!6zPfdR)P99ZCFhmour`A(wgLVyMP=C2H#(T1ZgoW z^vkqG7@7u^eb(on9No92aR1IZb7uSfDPc)3ty71cd;Wm~`yaUdHmN4YTTp{U%Rp04 zjG=>oY8$V>!DC&+cqp2UXncnrLw{KCQ}N!thxYHDKI6S`xKbzO@=)Ze>u=bzarO0= zP121B5S=rl(c{ldJ5+FB_ntl9FJ98;tUjh9nTDb17RahlOATaL!sqeuEGNk_NW2X1 zAT5!WMUNxx-UlAqTTrxr@8KU8FCH~&kgOWP7<17@f7!TU{hpD{I1q z7Y71$pMLZqbjogz2M$3HaKz^~09v85|D6}+EPCP2&x>-i(!2*k{!E$rp_7l;zYSVwXwR-E!R}BYOAf+@alozubHObpx}5dd7JVJbgt+GNKx$ z$$MLly5`OcdpHU0L{&pgkx77M2sPIFqNo3MQ9GAC83MXe=t10F$KQ7IOElW z)Jw;=_QW*14+2FvCo6-qjJiMo2+&YsO(*{Kp2=PPW=7{LufJ?ir>oJP3z2~(n1BuLFb$ny z#h9*#WwoZVIvS`A*H!DfrW&lQ(?Ka(7pShRD33|8$(LUG%@<$y>(#ew_g;;g95GO7Eu(pd&Q5R2=yOJO?cROf{P}(Ro|T!A2KsIw5U8oG zEibJI1Z!9hhn#iQsIICGM?;n6NR5(4W@a~P)vED@7fk5TzT?NA&gk8*w<|Sm*r*A^ zMvlJk?z_9S>o9ugu!2LyvcX()*;P+G`e2XlozLnE!by)FT|39irUBs(ovM<=e+Kvr zEkblSU2ZqNWp%qJUNCvch>>u4$ndk_vhC`3z|i%aF0K7LRxAyB9LL@SS*A{Sn)V%V zR_p57ci(l-{deE_$hU!}{YDOHe$M&Z^4&qLp)kiwZqfstPUe4U3JrqH{5X3tyhg|=MxP%9Fw)gEGBSZ zr3|?I?k3Bxz53=GZ+-H;m5w1-pOtR2D=@Ev7o3I`h{-YFXmAev8dDTm$r(MWlePZ+ z+i$x0)?07CI{GYHsm;u5`u?Zi zJo5L4E2~OtB1Ip~n2{zr1)5eZV#~fmg|%A1@T}UfFF!Xc-6=nH@AVz?vl_QfN_4Oq_q|<@<^XYO5=MnD@=-;eA!PX8ZQ-vd&do zj@e7rw8+lQdZa243e;2_-nwCF<0gK?$z(K6sVUiUXy00mkqS$X*2ytX zrnhak*6RQ(RuNIfh#4})r#Ek9Y2ocl=ZmzasmjWqH@E4~D^WTP2+4lrzY9PNPN%C) zyLPi@ehTMxfx6{OelXE|+3$l*)HpHWI6#kdr=&Dar;4`hI6|hT`Kgklhsq^B-Q(sh zieZX2ZK^UYcUro#VcxRByf!@rZs)Aej&%IXxs75Z*J1GBjFOc`*3053FCx?R`i)yo!d)zZ@ZoE$6NQxs0g%5s?mMO7UtD5M z$im_m4cPLVa!d>&5VNScxiGaQYQBs=2lb7Q*d@_JI8T=y=W*Q{A{ z!NpU0_3krq+<1@EF?Q^jlG5VTG#@8&G|O2e?{xUuwrR;)y5H@}NY9llT=OFe^z7^b!-ftWI{1d`u21u)yS=H;y)aEuRYlicc;S^azu)O{e(=fX z_dWcWNs*c9sa~3JaTdo>@YA1?o}Hg(@`TI7&zU{Dq@?)Kvf+uhVPE8A0 zGhPhwovbknG`_n=2beD_EyaKQWp)zv1Pz2-aZx5skRIyn+FVquXb{HZRMW1Oy(n9hp+9Fi0boe)`p(une1<$2!h2^Wmb z$Vtu4$@Tg?O`2yF7VJ2>e|K(HjyElXaX29L%B23nsvWu(uFQYm!bi~(k)GOxP+*x z5PSpMZ!U@d8aQ3m2t{QzP+N1jifG!aYjdKqyxQHNQ;(Ku(XzULOqsggqEFupiVRg&h=V7M z5H>B@M)MqBx8~iSnDei_YHX{>&+}JTiR{UGD=zutMQ0oif%zq!Q7pB6#a1$h+%f&x zXJ2^vrB~khWX_sG*`Covl2jxnIo!DGh;GxR4dDB%oJPXYMX$g7{6Fsb(Q(1$eOkg)j7}JNVtf(P;DP0EkbXOJ^2hYf!rm3c;WYpGF{@&tB;@^V$POeZ{K-6RigTX~y zwp~LrE_hNFMO(j^+#DAOan*@xI|32!ml9J^lNIa~J)%)aA=`r)IG z;5GwlYA{$M$zhzd0JKL@3}r@Qk=nYTt`MQHWC8)2h6!mwQ%e>@5?j}=zV*f%AA0z) zFTa^{?wGN-%#>wKS#qMpY|5Id>IBC+7|4baQ?wYzF{)uPG;iYi9Y{1v(~xd~=T%K% zDazyWa6Bt(nn@UeQ0*N-p;-zXwzBF1HP1f%_~HF~GqanlSiP}Xej^X1I7N_Gbj1W=TBT`AXn^FJ>_-|p z;0YRr{?R*cF8lF^jqBE~U$c7arcD;E8cR6%{{h81&HW$TbKj13Prfqaowwh5>z#LB zdg`y^n^){zzF^@RZNyU_e)PddZ$5Sovwh)rOZEl{mL1SvM`+By>Aud*p^vM3-J#ez*AU!qZ@n@fH+NuBE^5{J`T$e^jj3JR|5*f;b6nP%#pOUmtAn46cC!C@z8$Q3s zw4w$3b~;S1XZtQbhr>|SV5o-Dg0)8vh%^a-2W5kEr!>!Rx^v6MMCmjfB)PSrBQ73o z-Mn$xkBfi$VbM>EzyEppQccw&p>Qw|IDUkJK}nK80FC=P6G)nO)b5)9+ICI(GoF0l z$vOK}XhtCFl2mCd=qC;?pI4%bC%Bbj`N|FR-hO|^h9fGQ;8#|3?Vow?qo3_m1@F!` zr`|KW66j9a^V6$OJ^AsxdEd;Q_07Vi8;h0rx>=HD%;Meq%A9Go$qtOx>alG!_Nw zDM1=Q@XeYwA2@LE;NipCv}sdQRaae6R&ZcvW?s(Ni!N^0p=0MZttmy>zF}kMo;}+4 z>M?Xc&m5n)dfjG)5J2;W?1A75BD27-lt~^rP^c>Am~rC=4(QvZO|zo?1yM=w+NE2E z_H8DW;{OAZ$tDXUz#YU{}JFK^qa+rUACSdLx0dTpX~8V*h!vPhyq z0-7iXQmugCd7k6>Ce53_IsKFOKKY&l@{j^cnqfqn_y3F-7%C0o}2jMtYVV=|->3)F!uv9{u+14N(FU zmTVXG24s&OJ>ZL`>Gbr>f+LkD42qlzsJfwG?U_dXL8_$AY&_-v2;fJ?L)1kncShR> z@4fx-)*mXWD@nmmn1pHD7PjMs15XUgOv^}V-m=L%ue@4UQx%hGXaR|UYycruS5$oX z-diadUO8IF(eeWiO^cCD?!EQ8s=dql5AG9XXds&}KATk*)Gg6}=>?NU4(RO>s9Cc< zoA>>XDyW(UsVT}eH(n#D()8&cQIvDU;EC5CHCJ5Gu60W}Rwv2wKc0B~Xldnb_g=Mg@17qPZ_o%jzj^+R zS6=nPLyv`Hv9A65Ty^DC56|q|y=D5xACy$dfv|D$l*wa84@vW}OPBue!Hh3LvgJZJA}IU zN&HF@e+G2JXp)x=Cq>0&%o&C*;bD;!m7SBrFu#0B0V*EI#Gt~$`5*D53@PNl;FS;0 zE`Id!4>y)c_)65WbUB=T!&fuT^U6fQ^hx)99x=I7M}YqTn1;ranuZk0iYDli3mOjV zP-!--MKo3RfPl_$CN1hF1=1+U4w4$>2*$-a4T4c<2A95t>u5rUPKI~5K}T01E=^`h zg`g>6O9H5CBia>$1Tv5%Ob?E9G2uDM@Z0H zaLyE+aj66w423}tW=Ye{Gor(x>kX{-C&{$|;rEa$0G^kj>%aQUpj`w0pvPpH1}XUb zf*uM>nrT6>7`16o-kdBKscYft(xA?tI#j)7S6gk{wT-*GTXA=HiZ{5sLvZ)v?hXyE z#frN_iWj#Q*J8!p-<;QTe|diZBaCEatt0!s&245LCx-z^qMR=ac>4{l7R~%5ho)~n zGB~z%!C`&-$;<^Guth}BitdS}6zQjmiNCDSG#R{%7rmA^>A<}B+JA)zPVm=jx*?Owk>XkvKr6wG8W>yY)ltmYXv6AgK=m|s-)0ff`v*gUz~^;{$pL-D<%QQe zjouHjxT?1JDgv7BlRgYi2?aG<<&>livn(kTf}oy&8-GOkw~AkGVdieL#VJ|C>Ig`~ zSVi{I3MgfhRA0z?+w{N=t zyqan~q!^^{`=bRrlRvLun=ac^4v;1cHqj3AG>t0y=V27CK^$beKApW3kcMTq?hQLy zba5kR!8~?`VJ^5yJ;|@%*=p&}mW1$RMLr8?2BMj-DXPbgk|V1vQ}2g(*3)r*noZGKldpfHvON`kEnzLP4ZFZlM8K&S8{DIF($}Fucgm?Bm?qSlo8d z3wx(FNz3T)?W9y}Su)*2%(H@l2L6F41X9hk;GIqcEhP|DMJ0DcC9uDag*NqPFh|i8 zOnrU{m-K>C4*zTVH+GiKm8^mlrB6RFN4zgW#?ekdcoB4hBb5c!T$1egHcTpZBP18`4Lb6$ur( zT3p3o==&cf`I9pCS-B4C=vwvUj=E}q=*q$MH|a1|KldeKM3_#?S-z$!@<0^4_%;G_ z)P0^F>1X~<1at1V+I1_(E_O6IrhVIG zEPE2T5A|v{)NoDe(X}YxMP-Q1B-R!)-HfsW3h4k*5ZqJxo**)?OD|fvH3ANC97Glz z^;MUYqK2L362K}ahlPPj3a0;mJn(3VN&2Z<&ONE|f&~CZKpxzvlgp6epTept31AUMm53VBr1U5>Wns^ilb1GoGC@Kc;ST*#0$l+)Un4yYrb4`9Hb ziW}!9ODkh+ss*-{%k1l}F{RBiefA`A6(qgf#I zY$-_bsk+nqB={`|+_p2X z4PF2lCcUJ&>`P26St$Es4IJ!eoBqR}x)8-CCM`e9)isX_gz_ zd9~vHdHNXqFJ_NW0{6T3kcv?>2NE*IEBXVDKyd1-W^l+d1;ajWwo@$hSE(WDi?NDf~&phv-K+Dmn29??OgC+e)9Q_ zl1j0tzl81U6t^cBD(fec1iwojMtm8%86^tFHXe@dgkFczVi{weeneTGn#~}cxf|Rn zHT=ksK#ycU?XVi0B@b&Fdj~HS`(-Eh;(H8=s^-_EWE z`0SRlK7uz$Y0#MP3(=PS_p-1|3j|dE<_Anu?LKE&zc9eXZMuPQ*;$5=b+)lr4!244 zI;=}XL$hAp(wf-<2c*EzRpBiQ5i03j5WC@jJQwXd(X5b=(50+;#=>zmx?B}c?F1Bw z2Hvp%eYW(<`>2XoWWz2ZH>ooXxN$RMlKq?L-Va*>wnzVi6Fx)lv$0=ODvHV+9PD15 zMmNZhG(iHVZ|s#(gqDS?FQtXIG2{xMG9lS5-Coqvo|@$i zJy0|XlmSNR>k>kr>mH<(%gdU;aq9xa@Gw}KKF`sPx)Xq-@MPkuLO~&9iP%|nSRg4< zv``ixkjiMtf`}apP3-;g6Gi>QY^ZdH9S8-i#>?3)CtK>zi<2aG%BoSygo+Bi9-UF? z7*$^B$cV+UFG$Aq30pstRyncBAOoDK99R-Uf+ScFjrg6JJFn2>Y8$&@U1Da=B_ib~ zB@)(EAca64oHVv0WQ(d>>Sl7k`36rmIycL;(>3BN|1J^zY%rJH8w;l$&o4DPbgYDM z&h;Hp2@`Q5Sw|PPJPR+A>};v~p)^_Sehi#E$go{3Pg%hAmfn5dM_Q?@KC3XqVV0BS ztj;=I8+dL;1S*|tyI1jiujjNR# z+G+6A=?~3qck0_q%7qOjG1g{kVHvyfon=LjW2*}aE%Kc+*9TQGg(|c-v=?~vKfB+} zmpceJgYK_$-8sk7DwszrRC;emhvV4=KVJ1%NtSqn{YO5jiniIV+@q5&2MgcCZ}tWd zRiI_2+rr1hjGR@l0<;a=98+`CKMm9BC&A>)@Z(4B!mqsfY;<}pMDd;K1mFKjpy=(+ z)LmMP+Gk4VcygXzIbE)QG9OO{$mGHE?nKrVj;78Mph;+yeA}Sn#^Rwme z1s@fd`x;u{x2U=i3xhAvnX2kq#u}dbTp=FYG}aot2i_ej0+uJj$LS^+Eh&z1cZ=Th&9PXZKvJ`~H0MYeS{`Mpmy7 zF;f-BY45|M>XM%77RRMHYe-W@C8`{Im)Z5Zu}BSb zP2b~n%{fFX59hs>mFQ({eXvxlP3ptvlI3!;!$29ZTN2N1p9fjQ;}}?Z3TkR-oZ35h zJL{WjdHPPC!w%+DGd=(2Di!!)s%hj~d89B0#+3M?UKva&W?*29%P6eTV>LX(w&umn z#3jPhTSHBe6c-etbm$_**}{uX zzCI2*C=<*;es7CEh4rt0Jgm9jycHA8Rm*l){Mr1sn83Bie)FVJ-{%f9K3U}lL<66a zDBiFu@~pKu@0g8&K<)rt?sax@-&h)=!kcwedVh@`V&~w=5w>Y!ug!`dBJ4SV@4t86 z-o4+n{f>!oD~wD?(f(T7V)3$6m8Ih@DO4IhOjiSyloCqyRj?TI2CSr#<~1}0)2OXA zii|+RS5*LEOtFAPA=vZIgPvEpzLv#BLX$kX)RqdRg$l8?Ho%K_zHso(S<|UYe{S(D zLE?=@KD+cXs|*htI2)9epE-f8;pDWv@XZ5#(tmrin^C%ixYVz^fHL?gpLJOL{c>Y< zS0*B1

k4LtPU!}zOQFN1DgcD&A4RYYr?1oOF$)&wcFN904i1{B5L-95XWzZYJ% zdwSL>j8_9M?G9Ib2A+ zM3;rQ`Uz!|@ig_~QfdDjG;Ce(t_6LeJ9m^xB?DK-jm*OgOO}z%KyLdeW}7$cd3m|% zdobs`uGOwKHh+0$-eneO!L&M#>&un;hRLHp6sadn1V5+IqYgz?$K`!XJ^toCZY{9o zj#+sPSmW*Yz4lGwLdfG2WGEGqZEA%$HxH;y9DTV7&Y99a!$w={$CXGLJ}${@Fuvynh<^-Ne6_nb)`zxaCmaTktKD7~H9<-aa`M z8@BB(hsHvb$jt=~_2|=z&(dG8@nSod90VXLg@_=l(4 z48u3rY0UuzAcLBKijw^sxiwZF>zYK3lSnRV@TXJtWBpf4n-j?Uc=0AP+vEm%2a9*K zt7zz`B5n798%|rzHO88(E<;N#O;7r!L5q=wNz%Uv76f-*suaQ!*B-;$z6VidJ^?^q zdY@5dO%XB4O?)dOygIciwX=i8W2JcO^SCo|_z<31*Uxwxx8Hs5d7+}PP1kRIL z+c(A2F|)AXPcvy^$ZEe)2q-VWcVm8bpgBGkH;5Q&pn=Y8u$x7;l1ZmyBjeNGJaiK| zwP)UqTnM?GJOq7~fo)N%yeAiFJDvNXJh!t;-hKaXy_XO&n0?UE)_FMEYz5R1+?Rwt zqXAGx@zz1w@sUHJ}&3*ulC4zpra(bW}5mQj#CMx^*Ot2bqY zmX+3UpM<<0slZ%YzcEqz6=zV*Rat!)^&)t#Lwms{u^I99H2XwZz*5OZY1t3=JHOL< zh3uo?181pFIYr}z5?)OXMCvOGJ-*yt8Gn{CmZS+B+W z`ZST4*W4KOQgsTlzbwJRkOj!F2z4a${s?+Q;G3E{UK9;BmD0~YcV2ChZBGIi2smgN z8yoBBFb~s+UVKrWjFFQ@lV9-v39eFF+6fyPGMJVy+b!1zfcTu&DunTvRs(Lgqu%nn zTF&?@o%a4ED1t}HHw>Lp3tHCYs;1m|u0iM&oXp@{_ghJ+|HkHNHFO^=W|r2q`xB@|G~iubZ9Wx%(yj zX|5?hfp$>FcnbCAvqXyVjR|_Z>%p@rp)zu4B!aF-4d&s|Xym+(N89Ey(W`~wI{}ZE z@4emj#llGyjQ6^ZRPL;I1F*sP>r91Tug6*H`<|~S>-nagSVxD3qr0oLNrRRMfG9x;X#ttJuU$A1T=xXX}YfCC|b8-wym;3Pj zTP0u;d{Yo`zGdoaH?>;0Qg3+mHuOYjDty=4zS*@`CvxNAAs+Pc_Uqrj?*#$(cM!;7 z?|_h{riN)VbR>tdt+uJ8j-U2YR2f^O&DprkYTOt?Y26z_h83NTe`czOAz%dEMgPNy zEsrf^Ndl=LO!kyvV`5_CsCVz&Ad^)nUM)q$R1 z=Y6<4EOh$Qa$I5PlFRj0S^|qG@e;^C+*R;8%5%~4bRTfh<9*huD|&sI4v&cPKk@@& z>`*qtTx)S!z$sf$Az$@6UfiiQ*%0xuKg5FWnLJp$+p-n^)%2yRj{odc_wTQ#>v)d7 zNBiYp4<OErvQ$Egg7X9Ev zZ`VKgW1v=FY-Hn3nkGWO*+xQ;->}H)%+sG6kmyy8(f8bD&croK`(^&yatk0@hq&au zW+;k!be^N<_+%`v`!HL$NS>;fEdbr_SXY%sd~)__apcEZ>-deDE$!`A1W=c1sPZH;7NlR)H4 zy1(cGr!gLw8MM@?+{HJU{z2X zoIylSQhkR5qL$ZRmU4Yxd{x2R@R`g@F3`VSP>P&(b@!|e*CL{~Xo_GyZKQH#oVf~< z`AM$j*6@wFzRnfGRG{j1s_~+@SB4)_OF6eIcsPTtq~}6v;fUgCJty{5%z=IUM&{p` zr{=?`BvZloo6zH$rNx|z+~W17>aMsjle`BxWvA6tt|egVwU9m;bt}LM$zf=pgF<7@ z=w7jc2BJG%j4Qw?iDkGfMwriLwTn|(onlTgJ|E_am5BJZqH@mi<*Nk+mi-j+FJ)le$w)T7!zH=Z((bS*KT`%$rs>`ng%7ZF+` zE9rw5y`EB0#Y=BIMZVGd9UVuVw}>FlBd3;MM~6zQrw~D_ws=}7J!=t`1EczQ5fb?p znZ?C$w;0AqhBS5ljqm5YdL+*>1+L8fd=(0f+R3?*Qs+)LtN5=HU)!#Wh8odIwa!`4 zuB!43^o_N7j_y7^P?8Hcu3{&LYtRf5c?#1GBdH|&vhIM$fZpfi0%$HhH={GUlt{pQ z4!_ruggXwgtM})L9GrLsWRt&nOf==7+MD(MX+ejLVclPq*TNdz$(`rB&5;KjTMtXl zskVwbu5mDU^BPXv zBhhBh756YkpL-l)pg7aNUCwRh&zKmx%8-nQy}elQnr=tUS0VIFLIeL!JPmIA`X;Qu z%%KN2tlq_rO4jK1b`sCg{jX`VKF|M;^@@?(@RF{v*K5aLrPm$^vU}0J*XBgJvQvoz zl7B8=M{!E}{-u85`0Dxk+OR{c3UB<;SY?7+FI81p)gdo;4~tB=@1Wdo)q-(y@7~JB zx@(=?N_@?r9(YWVymH_vy<3zF6$emNJtV`Z9Ht%4?Xo}N3Z>_H`$kNxUqxI+8?Bl6 z`krxn>sZK;SPYkh?<%_1V3+10+@pf1aKt=TG>|}RjZA~c^&BW(nN~FS{8hYu22t1D z-CYY1P7ewB0L+v@a(>c08!GJ+sm$hG?*Ktd1ANqV>DO;^U#iwFG$|mjuAS^VJN2s1KdgZ*;Ncxg_q*S4>GPUVaJkL7 zMtxK4XgV60KErxUax%osfC~rA1Yf#nc6AYz;d`2sG*zSd*_^FHUTFGbzx?KgMVf(* zjI3%W4kA@PG;RKHcZJYJ@8)L4gHov9l7v{K;<~z8c>mvEsIiEQW7q-OaCK^T zZaS^g2} z$@=>DPjh57<85bF%~b=U%}0;@(kk7>o0EG0bD0G{aDH7<=hosfe=hfhbm{1q0*L zlBsB(ER{Z*1Yk9^Rbm@0_-vYxSwewn#XZ8QSyqF8WwUbl>ztBI0%%Dk)HY^1`P6^~ zHT+1#{2cSa&%}z?hY#_lQB_n1#*sMk5qxRzDw%vXW=sVuzcJ%(@_u6UC$~BCY2-Sk6#Eqb7cF7 zJ&q(28VMdhV`6ya`tQ+&=Pqz1dbjr9w*x5y1dWwmu zWqFv|n~hhdg=9(l>n$V~00Z+&dbDIs*r|AcJO><IC3Z7b~*@N3F*Nibu(@4}bZ~RT&73)im1(FM0*91h$b-*wF zY8?zrtITXHH|18bb~}(jc7Kw;O5NqUf|?PNbhAQY5!yOQ(yUr&M8JUjquV^RBf=2J zIFQx_tG0J|ii1)V-A`@=UL0;IgI+-}=DKNj#U$Xzwi*}4Zv!q`cbq9$Tmp3JRrQ@Q zYOPODc~Fv}@Md0Z@_1)CR8<|i7_YQ$E@WihVMlo=1VXFo}0K=3y-;%e6^=#dI3JRg-1dVoPNh0X$C~h4qfdB^An! zA?v{ukq-1S3>>Ho>9_30w8t%&ONaV%<3j&em%Wh$$CV20(`B||eg=cV;}Ft>@b5lO zYYHBD8%0o5z-!`8V}s5x2@X&v3l{pn{F}WKoGxH{g^Rp9xUptKKR1rS%i~(j z1|(kBX212Vx$!W61+y;RPNh$KRA(1dNLYiHt>;A`ywh`r(Nt=PzbooPet^F_!v$5+G?__;Rc z?LEo0!|v&?S5131DjEnAYVL40bywj_lsNMy3!R(rNkF}QJuJMCBApw1!xVJ{a&}0% z8fr@-2CV)FChtN7+DH|ii~hg*zZg%bM&^2I${}@Zim3_{xBfje7oldrk!^S@hVH*G zce{O#{E)r-5(a|XwNS_Kq7+LrGQ}%cLd9gn5YO&=j;qEfa7wa{zcu%s=F=?I99M_1 zx~y9!HZ9f+mE_B#Yu8TAm@Z6ky!a%nXxcQb72@-;eQ z4%XReC(D@c#}EGny5Y1=!onjTBpx5%FvZ=rOeqa7-;ycXNdabxo8~-dG+^i%zT3%<4%u`gV4BVdLaWJ{fq&JS0!aMgvu%fK`Qfe7kH2Mr%XI7 zvjoL}VG0$kZimwic^#oN5Gn|&o_dDPK8S3a#6xmK2pbp80@kvG=hb==YV^j+gS(Ao zF_0@h=d&xOFw;J~e1m?xg&T|EFHH8SRzbV6j#QS4ST1#v#y?`%7J(d$=_OS6d*m88 zii@a{8$XN9sfYtV;apTquP5_}_V3k1%ASoTyJ!$`&v=O(wN1zMk(GwN|dwNX9hG|7YlayQ?GdQqfph9IcBsIzueN!9xvq^ zx}emu2(c?Lc&<%H2%7S9Vd2TkMAL+8+Q^rsjf{!!ZF$0tR}zmflVT{Z@Xi0RuhFgkhEzVo92@MH@o zjn&m{vS|XfqAjga--C{435bQBo**tUTc2J_H#LN$HgI$i7rls?zBY3WIPu2aN_z!; zs-Qa+hePj(zzv~hBa+q)fh^$UjzrCrXx=JCQF4P8ev0;Liuzmzkv4RfQLvF|_%E8B zxGRQFYERG~D)jZ9=}k*?UrgVGur`FmAuYa03%f82{KWZGi(+)iv$+B6hIFyGICb*T z@m7>H^Y3u>bk{8FtxQ-NgXSCZYpOY9i=V~?`)P1t*$g5OoSp86XPI#Drv%-kEBV3U zx}rFuaatg?o@77pz-}yLdV(Iul_EQbibCt^&dQ}HIZ!LugfSe`t_T~KIdpZ;<6{W< zJ~P08(5dF?!MeNGjihe0>~9DP%g5}#M^!Guo6UdUJRD%TX7SOn;>PXtkMCvVrZ2HA zd1$;+y*25Le~Z+3Dx16Iv}tu7(ye%^Dhq@cXJKv%yric{!yBHrf&Y1l6 zKTX3QR_REIfq_zle*Wy<5(>bq`>Bb(CE3o}`E}azA5V(y8s0=!_1x5lxVqSU|9AQ-@fm@8Gk zta2V|7$K{&YXIbo-S+AlGd^<~unD!*=4GfPlodLr-)3?1x0*k_zRS$);fs_jy$+ zo7W4cq(j6J6{+w(cF!6`5;kKLMpdh}k)4|>JXr%3*|sc9du6i_=>pVuNYQTL(c*gp zY$Ij@1!5&Ki7%m%R8JAtn!iv5f@UUUV``J~IC3etO|}-5_?Y?lbaj76N)?%LE4yN~ z$!mK%DevQA*dMfzW$hu+*QdKtXI8;bjT9`kTQ zpkK^k;k^W2ps9ZS`IQ=RG|WonGG6>q?XHXP>$SPitO|{93=~Ou9Z*pALC*nNya`Qq zj%`~T)t@fh0_c3<@#&(f4xH9?4O9j{6NCC?cx7Gh1I3DA( z?6hvIb@e+{gklSPNyY{*(=1_p)Oj5acIbZ^EhnepB3^X-r`mwZDFr{;`6bo{)jnWn z{b?;ap?g+K8!5Ns_GIG1G=IfXalH6$tM@C$%MIcMd!>g*8MHZ8t;qA zL%Y+T9w#~^LvKP2Ps1+lJc z-<)9;7bx0f=Wx7C?Jj2T7*izT_WucKUu`$&3g5@>9lBzthnJ51rWq8&;d2$Y|E18p zjjJ8x8U;Cf;6cFsl}>)D*Arn~s739TVPJn6FZ#ma-Ma5vW?>+|@`}{?PX?GNl+XJr z`Kzd54W56L?1=mC%8KYFi&yO|o*@i;$-?RO7v2}bR9AS?@felwjA}|_Jm<#((d1u` zE-q@Bk{Zxtktqt(TlFXRJ?g){#bcA!i@Yp}5!a=!R30m#Jx1e_b*S+}^O2;m&K+#u z+Bqc*Nm6}s?_f_wio=FDkX%_uyT~$m!*GA5OG6zsnkmdTXoq!80!j8fZnCOa=F~oO zxqV}%)z{UE$2Nv(36g=>ZNmuR3IK4m%1h^p&AI@i&6^1yZE0F`CR(X2EU@2%8d?wd zU>68h5oZ(poHbwOdYeP{*=X-{A4!O45xze+dh;iDs~)4h&@1~iH?I&$n=OeZS_Ura zcCpW~$5c&rIjf)LvrjiO@_U3gCL2bI(IJUEEGG@LYWz-4gJYV-bK;88Qm?kZHXxR| zOCe1gd_8nQP-oH$Iuy*EPl0Dr3I{3jh=NhM$4nuD2Sy}HWhT!)g*v3iNDWez_^_^U ziDHAwIF!Svy1f|8urv#<4&+V0tZ)jl@rjBrWE-*+%|Zk;gQ{A^S1=l^dDM|ILVFN4 z)e~g39R~zrVWjY{hYbm!DQohkM0jc9`lgyoIa6r?LI<46iRS60VJwvJM>UlaJUd0P z;N@aF8&wC$*+b#8T8ZM?OcPU@SY;kX8A5K+sb%KjqLLN4=8i+wiS?)$x_S^oX(3lj zUaXNS0xxFtQAgm(uygh&w__s7HN$rEn^iB;59H2w6Q~DY2GN)0hP3a6PoBz=3Clzc z(4?M9rA8cUf4dHyEB&mm!?HrlIV&yH?sf@-m7;}EO*2J6%cu6h!gh&^?ayTPy`+Kab ztE;~66@`M@{S!9}b|o7s9JA;qp2_V`+HkQ}uJE9ENHy&b>@>i9imPErIIHBdlw&C) zRvx5*s65JCX)dP1UZDk)m7YnQ7_lRY6KmHr;Z_UYfm4F($D;BLg-Vs(VK^zta2gys z$M^zEmXVn$LJq@l8CN@tmJLd?NzvjZTeO5uHP^$!%A-(Z2%Upe1y&(sW>tKa`dMC2 zRE@mKV%||YceF*>2fWF;i~n0+LL9oM_7tT@^~Y{9mexuL;e>VU2ya}jw21gX2Yaj_8k{h@D5$k-AVFMY#&8ZRwpg^u1d->;_p487`@ut zdbo3Ez3tL1@zzC+y57+Zt8zA2e`WgBwWs@JtVf>1-_pf74zdZAEc$#RxduT>`7pM1 z)#9W||0b{5#K9JjQz_QD@gm(sN0d}?({+(}1-;K9n)F|!eWHt|sg?Jv0FVW|H|&7H z#aN>1EqW2_mF15efLz^RZc7`^5~Lo9tIb;?S7l+GLvrxaqb6d#E4fUJEz}=fTf4x? zwKBIomb#7-r`$F&@W0jGmp`@zs0h_EJC_}9md+hJ6*J;bj`#ocOJrI`_1%6Gpum_W z?(S+o!YB>n6}y_*L1u4VklE$+^>xwd?}nfao*8lZ`J@mV6No-gH<|KvJisMLAecHA zWb5Yvsmr{o9Q;Z=!OFt)^`ZBH-{lY5uXxzWj8My=C=MdDpxZwlH-|Id?w)YkMKY-z z7gVSoMe+{wgWrV?^DK|-^<}R{DTA>p<2QWYoTdzTxjd(u;Eegb9U+~&0;cv$KR)nR z*7GvIRk5UcDffMa2%BXk?!TfVVF(xme@Sx)AGn}h3H}sfYDp5g*E}jBx~EpmOj#wQhUGH4GHBRgGj(rdQy8r%tchkd?SZ`6zpZ3BjN zWg(3s$j(*BBES^eX~m6mWpUd@&3iJF#p_3rAxB%ZQJZ3DiE8J2?#R-N2H);PH}`!L z@f~Z>vV6wHz*%fRMUA4kF?VI7DrtBuF(r&hMqi6Q5E4fG{scS_7O@Eh0!Ryct|7e-Co-}h zwp5D7Iz*2;+66_fZ!yD~1gr0n1&wuW)h&;%9|fc@L=bjS(BYXXGMlKsQ*X-o$5uU}x$+!WVq z#oZ}?{Z_+Q)19ufZqT81nNV!!R*=_l>q=SKccQ4c23DEvf`Efe$UES;)#E+=3v;dA zNmqw1k%;g7P+Qr@QUBwr`{JI06NM=)L_J@ZLGZ&M6A!=6pv&rEGt5im0f%=S`($iKrXO@S^$b|^`L`K7`lCIL?1z$$ zlauFY5}+p7r006n?@7pShci8U<4YBjVgK&vP*}-SZhJWQ=~V|N#d^2P#rdb`7BJQKR^GXeQPS15t2T$N z(()rchkg_VESJonSFHLW4vs?aIjA^=tN^&||K+5X&IIl5AV#B;xPHJsWa?%uuv!wM z#N!eeTjLL||Kl~TPM294T(ytF)dp!Zg%-(z%G?4;xhLJ?6w7SNeNN}Be+eD-ah?rJ zy*(!q2|t2~DW#br!1EBFhs57JH$r1h;N6L~&*qD6p#&t|nypn@%hZf!S&1+cgTc#T zsZK_zz8`y-?{PF+x4W9d(Cbz$<7tT2fQ|~hSKRdO$=^PfXZr*tK zGs|>u;!JmBAO4mLs0BIN?X_rk($Fa$*KR0u5gFwWFCXpu*u&Z4`TZ$-6*I@1+tW<+ zMEF}&Vg<0GuG)U5r~Y8oZulKiVUQL7huv1EdhnW7IefyA5bw-W1h81L^oS%#3rn*A z;MQ};P>-)0~AI6Vp zIGXj=)jLTewjTh@NI^b21=E&=KizF0g@5w_=*$ z)gCJXQ#2|6Pdd4qEDfg8oz-{5Qi{PDZ+o_%WXdTg$% zbQr!^&w6kNOx}Ixyj9L{Nq7DU)`wh0`ICdagJ0-<#S2?hlQL2mG|mzmu1_cQ+n)mk zDPxlFlA)W*Zf8T@ZA8%r4YSN6=$Kf$ zj60~vu4T6OnI3- zkwC2$W{^XYJ9PDt2c(B+p^pix%yHYzyCaEqsG(VGH|+313~$kxsdfm{NU~tNd#t?F zqP=PjTDUnr*4OLK|3wxS8OT2Ab_>)OhB)JSifOO7dFZTFq}uOF;}{9U#!-5qge1;} zrA9_Y^MbWp8Dcc4STuNX0AA%4w)-~Lwk>vZ@39O8Ze4i;?k(m_aXJn^1A_|9Quyes zv8p(hY>AbOJ4ERie$-Y*q%b9mWd410RdDZo?Xt(tU>DvWDlyhS2F!@KOgtSHN?47> zOkA7E$wk#g^8v60<*mJP_BKoy4!2pquPl^#ohF_N1h}f|mItxw82=ZVCghMs@u~5&`=YYbU+lsNfGIY-;k3;QVquK5agW zipgz-srmVN<1T#08pe6FKYMo&1)$5HG<124C4YTeC-p|2DW~VWOm3;m0`I9@GO-4U z|BTdaUm-$A2tMl{_o!7Dv8qcMIw{LGcUD0R!j)}_b4z2v=RDqOo3^BV$vTxpTU%b=aX!*5uHrr z)>7%YH7)kyb1^eemOuiF7n|*c96ZhX>3Ia5$I96drF(QaE}Bf# zCv`Bl9f*K@sIM0uj{%~L`XnJy9}ulmZhe5A|LfQIU4W=xBKN1kE<3?}ybabt<|f*4 zY^fqUWm`pKb%%Qj<_yN*>>#kHxC_LJTXSlEez{#hc^neXE3Adq zF%yzgJ15mo670UGQ^14rVUNFo7rQ<2eea;+oMiEL--98)Tu8+8X2c15+DV_clz6a9 zSTX6OLP^8%G)t^MxBYSLsd>x)A#?Xrq{Lf?FXVA5p>0CxcMzWN&cM&J%LBP<>1J`&ej!GwsqA6k_#xXKP1*IZ@K9)U zy^!40TWnknwpQdY!u$`in*y}mU6w1o|iFLk53g<7k z649-=^B#EmDhvTJ1y|f)8nEkuWn-KmXY8rubtkaWI`fmT_=nn+`nIA^uBs%2P^0b@bnnzNCbs-`F{g%Axhs=$s&HZ75`bl{7ORm9T%TL8SlF`SC5kv&x9c`6t`jzv_&Oa^ zAZo~7Rhul+B|>a;%A7)%BJC6<{gv(iq|_k`*t12RB0^AwoVt?s!62cjul>WcG0dJ8-+Jqu4ZGFvjsx=;QrwV`nFS%zPeUJ; zqgO63>wjB(pRX={_x@Z;PjL-L>{1jIbMt?-eO*DO4HG+m>q!53U@4Id8%$Yhz?$|G z`2S)7!Elgx-g_vNjuZe3-%v{q7`1N~c8C@|z)5ScZYycW3^wI)Gue>y7> z+xB0g10ov6EP+mC!BT%!*nYgg%~pBwJHdoA;$)gC(hh7S{w_c(h~3O zx~Bk7KyiR+deN*3^+`DQpOrhzI8?rk|8!e?VG2~1&;jeS+M%4la0a9(zJX#zEMXNR z8vTG*LOX2%#!MQ(7#pyOKyi;n_cj4LH^(>+h?k7aJ}A~>H5z^#K}y7NOYSz^vb92w z6P=!OnQa^kh$}fm9`vOsgtm^xnUQ^^3?e_YyG41}A#|50__Md-ZFKMCnc->{Jk(Ck z_x=YtqKw^qIidF{_;P zxu6%GNBm7%V9IiV{Dhj-aW6&G&gwT!^31PCOY`h87X%mmiehHw!8o!yqXFmRvaS!2 zdH8d5?PbLz%fYjL5UNQ2_@5!|3PX? z(nw`~q?Vp?_K#SJ)tW;aLg$#(n1UpJf^4`m#`Oe$5Hho>+(s%mdhy$A#Y(wpH}di*6LR6Z2TgOU~s()?0#`(MZzNdyLXfaE6}iCqj6 zc5034?q%q9-jDPvR)F|&CjROuAqoxTxo^B+>BdR!OJSylgt}ASNjo%Z8Fdn2%#zQ6 z2bg8(c%HaJ(bv{g$3^bp50!Y^f_5TgvmCJfZkDjLerOw<)a>gRV>*$1Rw}6NNZk5V z`Xu|%nxE!i5X;glm`1cx4AilJY6|vs=t#EnMnF&>gw_02i>AK4_Z78`5@n5?^Ry2( zy6@SaT~I``-2-LYJRI%|l1Y8cDVU2^Gutu^A%BhX(zTc_4Q-4y_rll1_5xETnUZ4j?MjA%@a1$aA1SmAmI?_H2Jp;?4c33z#n0Y>*T~dTQ|!Dy z-1K}RdYH(*c=;P*DCi{HWv#SxnUjnGk-QvTKCryaj+S8r-k$o5M$52p@OjKKpJ+6zNLGXzHD3{!|mMs;1Cswldu=r-2@ zae{-o%2HhpTZFA*;;J4F$Qe3}Ss-K$0)wq>ySDA=#=6OMzfG9IS&QCP@~>h83bJsMQ4PYHBOA zNG*qZ>LroHe+Iyc(P%6jj=&KNg^P|t^kd2_=$gRz`tcjK5@{Le!oF|w1l$J?@!y6j zvN(Dhy1vw9x()MT^3l@Cao^9xAtntvR zp8tI5{BORP`T5K*7A$}N&VJ3kxOoR8CgB78fJAH>8tZN`<>`Nn_tixd%zzS$gd>uY zaE7T$Bom>>Vy*7I{;Y3beEEo5q##Oi^hF~|pBvMn>xlbi)euS~cETg($ByrKQ9xuE zE6ux)e)rXep5v#mveLjL?Rtv(Z=uOD2%6M&nGuBa{1=~m{Q3KCC@wi1VL8T=IcnTR z!v^$IYDySW2D#NB(fE>IBzA6DKWpy1XjG?Y&O+7hB!D2uLV{@))Hu}cbkOPW4Jv4g z6tE#Ew$S7(%HS=U&u!FX()nXrwP{2#Y*EqSSDt%TQ{_-h8PI>gjn`lC-M8OOHw=58u zGUbASgSyQ5`pZR2e&jvrf;;QHG3N~J(~Z=%+CbeWpMJJ=C+g)`!WIxLsGw{hQ1jK- zZ-2jd-bWvP>J(f8%^ILg1M51yqsE*+eDE;Pr(-oGU(EclG^k9Ra&_ybjTuv;3=7)O z_AOiA{qUo6&YLuN=n$ueCgn)g!NTXBe?g(dMENujINTm6;-u!tC-3y8dugenv~QH|+Vt zPLXisw9R#y(9uXWWrqVgD7%J7hl(FWoFa21d)`r~OFT7wx>YlIO-W1JnP%vVu{{^H4!kA{V0^#tVGh)QHWs9U(#9%pSkX0pz zcWz$4d;6B{+qZ7tzGK_=9hVH+0ypEjy)P$YSGjlum%ASVgCU!Kf6Bm|9fe zB^-a{gh)_P!%8R^2rDv0kWQYDY37*oCS|3&7JWB&!LoHjM~<^XrAPK{z2drCCr!Sr z;%Jd5Fl*MWXGM>y)8SAg77Y-ZL>UTAaVF)As8&dk)o8#{Vljfd`R;pSD|< zn`*MIJ zd*1=rMUnl#y?kqWASCqOiy$CHI*1@B*u{puqo-#t2R1xU&tA?7A|ky*=)L!ngg`=i z|Mu;>|M&KrAPAPb^Zz??_x-SZvt{Lq<5S8;r%7A4tZ=tTzZnz;c$o|kDfxg=7X+%;L$%%?BDIO zkt2IGqQm*w`Q<+E=$Bp^-@fCZF(Zc$9X$HBo4V9&T(jA{YG&q34OV};s+3^}HnH`D zXa073ldRQi^AwIDbj{ncSC1x+f@6E~{ggxGIgU_6y`RmT-uuMTb;nXhJlZet{a0J6 zT+O>Z_}b@p4Cpjq(4c;eB5SrERxJ)&RZ~~vTlMad!Q~y@cRE0EK_k82;H2Vpn-3Q{ zhW>f#{Z|j_-*>>Z4-7PZSiO(v|L{8#I&z6kJNNJ2#vMF$MDn)l*s*2f9#>2pJL;P8 zz2p2RkL8p}mffSQO2Nw~ztJM^lZCqi7G*WLZrZdl%^gWiS~hk1jvbYn4!^8rgG(lk zA9eXvWBWJb%Ma`<&lvsi%l8kE3w9qq9`;=G(x*=iMjwuzu6xB@4TEX{9JWcS6Ib zo_%5RZ8xvaT)lE-=3`T)2u@aTa?Ut$_s%Wr)~wvRZO_jAN0Jia;+)K~<;$OX=J_gr z090d5*CG+gV)>_@duG$xl^a$s{q&Rf+q7?A?UzQ49J6TQy!GqWy#4Om-TU-p+~T&a zoA+$pwr=C@1IP2av~NdB!G#OwzwqJPZ36z}NymHar z-8**g-D{GTQxx*Dj_%#PXV>0?<&{;1Ma7#pZq-$tBJ{N@m&2oO+_L#-c8;u_v$jDD zG5p5?ZL-(n0SbFEzw|{WUkil2o|Bvc+{d>1-Dn`4o zAy-_2V3ii;q4QRX&iUw$F`peEk9_&SZLch=cEmMuv5|xCjUPO8+@x3c&>aSj=+AEb zr(VV3cP=g1L9MP~*!v`nd^5DI9PW{W&BUvYgju>(EsG*C#`0Sp$CQp0!J)a(q zOB7hvL6L5rjjOKkck0l_<)n^hA3U9($8sV?iiSqhB%PQPC#i~t{hF+%sjMftZLfjD z2K8S&BZ|ypI=VTlPEyfIk?*jEv!rKs~Tr0XUTe z2OpsZi7pB4Nhucmi4I4?T6d<1!#)->owQ7xa8i5r44iIn|2{pRfBNx*M-MGrv@{fv z`VSbQ8(dLo>8ceA%8LuOZrzo+W^Gck*OTA~Dj`XcK%sMr4vMlAMZysQCJ?lt7+^h8 zU~F0hU0Gezu|sFKD}K+;gQp7$X`XG}zU`^ptWB$zl@*odl~hW)9#LepaV;himPC$M zB7Rx%QxwQkX^OEd3hZEWNuAStO=*G|QSh5Pp%s;&w^HE?^x zHEWh0KbBosRy}{+#u>4!u(FyRSt>z)lA{pQ2l|Y;$Snb_cg_P{To& z+j-xEkBpyq-SAPP8>Xaf+qCBV@rM^b>WqaL{tE#$x~#0?+wXq(^owu5{ASjkeFu4# z1*5F?^7#Oh&6igk>e%}-E-anl2B>YoN;FPBJBxCGk)j4L@)4dUNm`O6hs(uM{O%vV zI@I#YNu3fJ47j{W-qH=Z4(`m54T|Qp-1VQo`pD$F?zwmJZI6Alu_Soz99$493XY)A zc-Y`Um-p-1)=|8DO-{gjrUQ*+K*E|&rapYvU6b#)=63kY(B@ptnw2L6U5na$Nk+Of zu_oZZ8^9o%Qny4+2H1ft{K&W*s?G) z4{C316gDV^6P+Hf4Yz|k%S^UGGt{9S2f{U*-PxLz%YD$@qi*R;}9N zr=_0(*x3;m*QjxZ<*U}bjaxT&YvrW{MPXN)u}_R`zH7}C6x?QgGA26b- zWz~_E9eT8F)4F~8_MJNR`1q4g1%}gQJ>XYrY66O^xOjnOxQfzB(&9J~R}mLQrcrYI z>C;7u-NOS6j&p5b$F2tgNgm#RxNWDN4^4UI_DOe)8#`W7wfNLz32Lye^ORvqid*n< z9HSAks`{F@N>Cl%CHLsy83stCus6&+}pXq=Hsm}*s7g=O)NKlwt_ zCM{}eYTkJLRj0F{|1Dx~{D9 z^tfE)?Jt`)=-uB@eFFf`O1d z`q+49H~=a)p(*Og+_F~V9(wwLNu#)SV`t)a%={rHVN z8B$jxt@`z6cWgQg+Due*4F3-5L#qmf#RSvUK*N+IRaKQW6|_=G)-_pEY{)i%7bQs& z9Ud^zkhF_p@B|}prMN@TCAx%B#50;)P zfyB3J-+?9r?J^uE^9llHTvCG;0zn0;s|!x&OL9onR4{r;5)}@HFj$%tX_M4^S*K6= zgp`&&hZ3B-Q>#`+O?l4IC}ugs!o)itfZ_E$tfmnP=?=E4ccmh#JKh?o8{-` zL^G!z;b%5O4D}9BeZW@QymkBXm6^*|ty#Jg~wUa)Kr|+X%I9KDBio|`yV!C1u2RwJ+SV(uV?Sf_gScuvxWP&=lN?? zlBwGGl8k zI9J}H*FV|h69u7q(-+gf-cn5wx}6C9%=ZhL;i`{sUc78gc7EAu$otc~b5uG%`zE;;>oxE+qdO!DX3>A*y3@-k8t|(n1G>dif@fjaZe|_1P zf6dF!Rd_cPR|?%C{#5HAdc`ywG;V(F_1C{M^|_+lT$&T`%jj7M>uh;>)vi4|n>?;0+KRs$x9T_+Ns-VfPTrvN>ci$}VD>CDL`I#v_SSm}C0=*Km}j59KhDYR z-+kcuzx@4lVddu^{PmJc+A$8{>8GAvHgB1WcU4FsmZ$#m;&a*A$3FkZca$T+V&leN zG3=!$@8=!Fl9kI}f8!mFasBnR*L!s9q?=}8QSm*KC+Fs7{o|VtHmut?bH)-vbhhqw z$%}t_;(;r#iYSqRS5KJs=3jwocdc9T-s>-%D)U7s{`nW5sHrUcYQ~IEMB!+!*A+K& z&NmHGl37Dg0{&+nzir*7jg})Vnm_dgF%M1k2p62^3V;^`^b`HEXqcwg?RosM`?9lh z&AP?}-XHu)N=(QvC|sSn-u(6XejrX)B5G~lD6S{T2^?+vbWrUtnIz3}XqCwU&)C;K z99;O9sbB3biLf+9GJ?oarW_0?MAHc$e|xuY>Mc)hjKm3NlZ>f@6f`x@GCd7d(g+Yr zo3KOj5FKHex+2vk0y!L=I@t#omH;p{P$29tSYRB}hDnJ$SOun`%8G80K28k$r3@TBybNT$^_QHaCK3` zAC;k0(ZR5S$21xA7kIAaYC%n205R{GKHc1K#OJ3O%tWjEG~S5SM9OQ1V@{c zMX8`n!&SHiLt~n02!@N?k)UHjycD`MCUt`WV;JvdER6w5q;O<1Z5rqdiUK7bXqPg8 z{|OBR;Zg*o?YR;3RhA^sU%x#H(1J)oAncH&hUAK_q!@e_(qIjsci1681SYfoAV#kB z2)~D10r0*wU57ul$3FqAP}>dtAM=_lL!(pMbxM>@oGcD1P&=TKqsu{|^}zm(87Xvn z-@4;Lw^+~AO`u1WVE!d$jUp;Q;Qm!+1z-ahBiA}y^d6dU`VEP04Goi}!S-SVUK9oR z;d%Jai2~1ZV8RJJ7+@j?JA%{UVi*DFN}xFStsQ3m6Tk+>J{u%3f&r`2pb3tnI<#q$ zn{}+LyqX|cmc(I3gvDC~Pgu|+(HjEokBd8ZZlJ-R$ zqy&5kIRe~lk+v&wc<+QaY#@z{MaFcpIk&LtxBQ3kVhx!PyRx z^A=wC36ejlO&{(wx5d1#8_-+3AT7XX%k_hKkI$MCX@V;uyJVpxi0IQ;0q zkno?y9Vi(T&B8BDGMs5Jnnvj8@VjoH-WlL>7Vx%#rtG!cNHA~pU{%R0FFd<@?_r7+ zp)3G9g4t>p3LN2p_WsSp+xDz(v<6o>M8{o3m<<4d4?R^kX1DWKo-U zfQkv#<970_V1c0wnzd~}0)H}uLEAErh|vUQO@gOP*f``@IDrQ4na#Fk(agmSq}ygF z1hWqb8mI4&U`k>?tW9(1&=)o&IAl1qHPP~oK9&7MG=7zZd$F@P%9M7bz>`rjwhthH z&A~Lvz`KD;qa+GZ!9xJkn~)W@K!^}H9(fQCajX63TEkD~5M2io1E`%t}fUGEsAv*y#0yWyvp|GgEX)~Q2C(nHhpW zy6~a!xoYzR3Ar{JTbO-6Y$z1(fWHt9lLT_2Y6l9$z_qdy^Ou)0Ze+odYa1%bEeOUX8v}r#|-80!| z>Vn#wuW1?%#lS1Icf}Ax3^BxT@!;YnSNof5)V4zd3>nkxeo6PiLxv2z9R3Z=$jH!j z93@1O(eXYuq6Q$`4J><}fA_PFVfRVHKt!dcB%jcvUgP%E3ck!n)F~kr<3^Dxw2Q=3gHSCX$ByB0CVGx4* zx|?skcKkKhOl;Ajg)B?x>#PnysOas)CMN2&#S5`2q57)8nCq~0Wr~jZ47T8z+bpt_whI?K%j|(ay3H^ONty(R8`k? zj$uifGAu$?WJwA`c$&el$c9PE7%C#G5Ty>$(U>maSV$T&BG~94Ld0VgxGt{O@ttjUtBXo{}0a0*_frs1L?iVle}4VC8@JKgmO zv3aib2)}clYYhJ#h|P12qCWT*2KlKjcZiYo&B`}Mp1=80?8uV0e62`9c=3Tdb;%K~ky zx&p&kEsyOc>U8-+s6%bLmIl%2g3n> zAQXuxCNXy64Lm0oJ&AK5YN49I*CQ{>jV#<_TZ=GZjM%(snpa^Of)ueUD-U9~h z+Ou=S2pJ6%FJ6b1u26!cYARhk;9hK*1tVwid)9IUFYFg29~vz~G{ZQS;X z5u^6+*|&7rDxPJo7%?o)lhD0af0xs{Hgo05Rm(ec>fE7yS4(9k-7=}HtaRD(<(Zjl zZoK}Q<{8-V)i0d)F+GMDVu;})Lj53D`xM>ATCYnl zdH3DF&7S?u!g(_$-8`OiFcDHH3zEwgEMGKx?)&e*CwSZhFEmU~4ON|(H~Zt2D;KO> zzeXWwXJQKLaOdaeEnhfy;rBD-V6{6XDIu+4lf<}%UwyKE^^)bw7w6^`b?Dl&Hh1a| ze#O2$SBh zVL}{TRbH@b+vcRCBm!p$=}I`*tM9-Tt=ql%>T9pPI`#8UKPf0I8Zc<+@Zm$(tzGr< zD=&Wg?YEGqR8{j_uQ4e(~+M{<3k)y09vjhe9{rI{A&iOn>{ex4;U&@6ks>hLV`% z6o?RM`dnUCBM8AA?@nm2Y{kmXojN`F*khl4@OObGHgDhNZIA@21FQJGPO!O&Kv+&{ zkY1ZR^#{M%o}brYDjHh3N4 zya{7Q4uAEPm$q+MyI}r&Mbj?nd1<*XTv}bdc;%uahYqb>xnbMp9T_c~HfWUW5Luq1 zAS#k|Sd@bXLf3SHG{7(pOA!kFiNh&MH;k6eTLt|7)vMMVK6F$y)y~~J`2GGR^A{gG za_o4{@sJb-Di8&RrA&(DLyGQlxM)Jh1@JgVl@x|%Ov|EKS`c{KBeErO4yQAY=U79F zBqYSArle&ZKC)%grlKmp#xU^?ddr%{hYlUxu|I3xvgPf&bajb>SL6sKOeiu*(zwx# z!{Iu9{J1L1x8HW_*fFD5XRa(PEua~SFv1+E6Nb(^1kJFRe|;s57-EPah8Qk@`b)0B zR=`IFIswF~Dz-O3AY63IL;|_ih50!gg2Pnh1+%|9m3LzEmM!hO_v+HCAAw%_t#HWC za{_5OD$0YJW;vW*IUJLBu zxOLN}VMB*f6z%qU$}7utMHe{%!ZEx)+(e}r;y!1OJI54 zAqpaxWgN$uCi+dTt|@==$v?Gh*?RY$LodDjhH6l{X$XuQt}bL*p5UD|x~_mqK^aOU zBrqaRIRjOa8rB(9FCw?yI;o(fWZcAwH{3jF@bHmChYe;Jo#W_;BIzdZx?vhhoZA`A zoET#GzXX(XP16)by}+ib_B!gZ{)rgsE9xb=MkmVv8{xDXoZ@MKHHjN8J7ZWlu{8Rr z^?bvX&{bWF1g;%BYWRT5CR{VIQ{Un5e(`OC#01vR8I#coQ6}6DSE`rtR2Tah-q|>< zfrrusT1`rhD=n+GbWx>v30WK%oYF{MA-GUw;elP-Mvfl+?uYN)bo2Fthm0tzRve;} zr-Ajf<4@y-KTdk!7%+q;_E+wtjst?cK9So7OFHE?$gZe;-U$ipYBH!5;ve zM4`xP?MhxT{PUVe=L+-sm zy(}#&jmXA*4?cG4bZ%i`QGQXCqFSmPZj_oh@bZDDi%!SIIlFc4R&u)h)QLQQrN41T zQxHaya<8794;;*@2xt@|217uv6j-e$&C;gPtVxC@hjViCjvhZz69`7aQvRvDPMtb6 zY2LKBq_m)*urM#Lrn0haySC+(RjjFZ>(qMP)~&SQ(G6U}#_rHI8G#9f{KcnFaRMtk z9hK$ft(!M4FD+MOQ{)}RMa6~1TG@87Y?P=IFc-pM;`%Ko^rhPXh zthQ{@T#iV)wrtcTzhM|PLAk23niT|pRYiW@$(+-r9WNVLR!}B-JYBkWT)lc#ZSK?` z#JIJ-1Id-5246nlsxhMy;^Re8bkrfOVS}s2jO=r1Phj7Rn_6`xB5OFYPnW|H1r+=L zFR%<DAqm6F_a$C3P+ zqJrc5cI`NFAb_np?G+pzugfqj-co5p8a;B<>HOT{^719i*Ho1&YgevbI_H};YgT@@ zV0pDavUd6M%X@TusHvL$-4_$C8($fyh$zyi%P*fZ>&J*pJ@M>Y z4?X$1WR~sQyXNI-Z&U{(wYgJ&5aZVW23VG&qK}qPI1&nl>$Xrh9F}B7RaG2+@cRKR z1#n!j79bONulCX(0WjpGGtvPONOYq?ldtUTpp{^g(U0GGS)B6g{Q5Hqz<|H_e86t1 zY^NWXl}c*SXV~aLZQ_54U-gJDZRArAj!f0T>L8hpW9}c`_}}AK%Q7i3q1%X?C-h7X z$IRD%3H8B=1f-9p0t#X9-r<7>PJQw|f}|saNRyX{imt4zdi$?$@7%F1 z91c3%Ua)7Qm!gPBs_tl5RaTY4gyou&xWq)0k89ld(kTz${^QrLZ^&G!8LUY;oSsyM za)24^3sr|BKAxdLp$6reW*A5hWJlOnqZ)czqYN+*1HquLx<+(6NRxEATn>l8i$Wj} zsI07HcouKuIFcfLHGWmoNy>1$T!J7%Vn`aY%?woguz%L;fu|rz8cg!C(h@~e;}a4^ zPSiA2(REo?4MPT9KR!N=VK~1(z_K*N#W1uY!ByGmcG5I$m_{%Vpe%Gi?2hwNBxx9C zI2_V+1!Tj1oF|?jS(4?7%gPzXWLeYAQ*oX+MXhIJnw)MAl((1@iF$zxI*~x<*ff2| zq#ILGlUei|hOdAkDURbT!UAT^IdN+4{KY_$Ec-hOC$NDM2qX-9{NcgznucCeBb7(i zf4}fRSYZDE69RZKu66GL0|vEDv`Y)kIm*_(^*Qsz)@x5*J>{drmwZzF!s+3cy`JZB zu@^2&AR(d6WdjBbXqkW#Xo^0y=HvN$D;1MtewIM&d=tHG`wgPE|G3vr8$!F0ci%k9 zz4e3V7UjAHwl>rm7;2E|{Nj?=`3J9l=9nTnT{phBy~oVP_pMFz{I-{}UjQUQ=MsBe z|L1#KZK4V47AXam&Y#nx*JWLMT-v35n_k^J zcS67~Y+a&ThYlTw3>&t7<3`OiNQO~mB`W0%98R7vgK6m^>#$n1X(ftoiWUPtnI!~5 zr$oW+iBC*SOyXG@{X=kOe4M*!lcp(2DWV`kK>@l#hh78`pOli(q#0=0Q1j#B<65CM63D3*}4{9P#n-2?=qD@reQ_S|%zq(1?IGkdm6} z@p^3{pj1@>EwM@S7LA)W_jtU#Q{a))h2-S;^z_CV85s_TQxNzD4N?I-FF>gk9U>4j zLKEko|c}Jl;ZMu<#Y9q z7-IOp1yE`2u%@aS->lj5X3qL?!^W)*Qj-Vv@8fhibp5x>r=^AbZEt$})j#(nb{{yB zody4L^GX8-1$9D|LVg>8u&nB~t!RIsl%-HG7{CibNypKVaxf5*Yppai9i>oMQlqvd z*pHs3-TL=v%>_>#Id(kz`0-IqyAUp{^8Q8<&VC|`hsz=2b zN;oVlQpiq2AP|B!2#2f4;i5x_N|X9r)=W`jEyO>Mi`%y_Fe|!@Aixp^ZoBVOWx)(a<~#re(4`ud6C!LbWj{hBHV4LiOu+N!H;# zrwfZ%-Vuq&rb*B&4fWk#iN~Z_Mv@{(uD0zC+8Nl0nx;WCP$}UoPOb$r5elo_4FDk_ z2;3WqggKsvB)}O^)qx1@UsUM8+=N>gmIYfAI4~RvaXbftP&hOv_QzxZgmk;X2tx^x zNCX~GlJG^qzOIAm$w1T$%hc4=2)sZuH1?!Hw%N&mV32x>CI!*Kajc>!V7wAE!Q<>; zA{-2gJTFNq8vN)XxjrH0Teuzp>d5)Ng`-8ZTbE8ipV5vdcnhIOWZ~lFg+(QXX=Y?J zib!&k#u>%MCFpJK7rM=C7_#3vYU;<6^iLmt<=f5svX372HrPH?BRQaFj_n z5-z#={u{dxhxg{IEN$tUw?(hc@qzX8zL>Y}K=!f22Tztp!l{?tFuY;q{$qt{qaN)a z`2L$M)g-C+xbL%l!AWb@*Usx6)K-UMsniOt#<8xQk%>H0g3C^>lm^MRix|H0%shDXn?Ebf(xb2E7 zuey3*+qg)<&f^7%-LJak{>SgS_R6cq4@@mOm|YT(G$nr6l)Ex~o*Q3z;-)LdjvU%1 zxOr8s?qDsIW`!2rhsRf}+Hwl)>)%^PV!nl=xacicT;c^wwq+O`X;?bY9_W-NdblS{ znlONt;QgHqc$Yvhx@N!zyjUyeCs2f9au&yM9!@#!CzQ7^;vMhEC z@jTCQNZ4=_#O?*#*UlM80NhK`_UZwkJz+X&!=`=SA+Q2Rkd$u9h8l5-w59`ji!_UU z)G@^H9|zPZj%A_t0V=DiGuLiZB&GjlecV_(&acaLFci7bsM`l>%f9&LSUBF}s@1Y- zlTK|n;M$u9JF=I}oH1i*u5kHHldkBRMudr^)`RYQaDcRD!@O_5TpRBB*xw)R8!6o4 z&zN-mWuBm-8>G|Kw(pR>&cM+i)}>lN1=Bzsv!R0Kt0Z+CKA>YV3+;og&>EJOHeu>> z{Q|4M_eDgLLzS0x2Zg5ie`UUgm&Ykt`%<^WBO?|kF z-*;^5q65Wh$=;>27OdKlQ%wmP+;2_?5Iqjd4-382(9I^yF%f z7NkjoCh_wsE*wJ`H0%HeB70z|21LUmpp3&W=t{q6i$qgg|k)vFk;bTj`KrDD{pNmrLnfNendjK$=stt#i?JyV$cYvXRwC&GP@M~91 zyb+Xqd@?&&EuweYQbAHw!Wm#o^blPD03MS`L_t)lAhFRI3-%RT5&#aOapQ(}!3D4& z@X!Er9^FBK5sSx+1zJiBFeKrbP?qFI>EE{)^w_bnTI%DyI)`I$2{@=WNT|xbtXXkYgQ6<*EH! z=Vfl(xbTN{Cmo4R8zehHc?Y4ObTmtEM5fa(kaY?=Z)5Ps@5jH{L#ZCoevtzi3)pSiE1CnA4|Ea})oA!C+&Q)2PX1;b^XWHjVY9Lzv!lH7WbvQ-BS5vLg z4gv1Rbw6r~iZuMtCTRW|nYDY&2!dzXqy#TKR;}0={ux}{mMf5hO$ESJ25=z7StMh3 z3B#L5!_WfbkFrOj*;ImT0tE$4TeNQJsF#VK!Io(Uw9QN0XF(82vS7p6eXMeKHD&A^HX(E5O8tBQV2lddBl`)v$ltKU3j0HK0 zH)D?BW5f`{MFUOKOUo-tO3O;i%0r4>(G~FiGSpydM2-1UjcCrKY4;RTP1#7v8pTD>3%r$r)YSbSzmhYrP?!;Z~4G zS+cJCNoC2&@2A{y*Fz6KbpPFxZoKWGPqs+4A6@MfS#nKL zUe58XtmDTEd{93PDR}AA-@MRh-D3~jd;i^&Cw-pn7HX9Ui!jWeBETE~9*pilWk1sf z!J(`uR(g{bpiV?jAt0m?KDj;f-AAr|;N4ZBe)m2*sU2CVnT#liHWBNJqJc3%*JdmQ zWNzOEvDAu&Uy|e)9MxeT$MDbLV$U_CN6OMIG%*Q^Hw;GBt+1>qny%VCs$hIUiGqt@ zf5K){j7giY8So1fS1afzKmn(4X>{yiCTWYIaMTmB66$~yjn*R;T=;`s(YDQp^oN2> zlOlBzl4Bp+rw~Z2QABs)3>I@pxZRHp5}{s@1c-wKLi`XdDg~fyLo!Sn(gU;z3Ae?f z9tmY0Hq;v+O<3U3x&;9g$cXw4wm19rL3|hq=8qi+qJ#4!n4~s+njjKnp|1)I#~CK0 z0~K?GZLGs-$S6=$`_P< z<>gr=bc><=dQ*ib3zCNp9eZhGUrupph%!{Q*M#d@FmzR3Ui_W+UL$Q?aDWjxR@*!M zhk_wDzBr*n-o`b@P2t>GGEk3c2EPX*aOyzq`6$3JB@Zm$n{>y01N^(U?>%_%(6OBS ziZJM~KZOGcOsxdQcDiaB-bPJ*$JQJ;uy@-w;)Xj07&u=5!ns^xN}Q*vqADo;e6s$z zpeQ=8sGw!DmI{ucjGlv*h-JBq2A+~rNA_*nar97yk&&jI%E@I~^y%HQiCS>-bgMh> z9M@1eng!y2{vc5Rx$_+Dam7_ul%ky+!#|0OJsSXfew*hW`kFX&g?6$L)@8E|>GSdGx{nL2ml{n_Iko`k5u+vZ7OY z1%-z7Oq?Ea3#q{BJiVPMg2wbXDl!2c70m znR<}IG4z%ip8Ng`vU!tjvpzn>JAWF>L=d##fHVr{Ok?F-30@ZuS}*VMCb&3;wD!C< z?!$)v$f~XZGZxJ0C@Ct<-@W{AS0$a9-9s>f6D06wOW*`y`Ny9Iub+3iuu8JN|KP4}lF7L}E+IPIm*SmXOh;WWIm2^Vi&vifoHm8=aanf77YmUrqn|aHFS|xA8=kz9e zgjyAV-5u}h-J&dE%L;jbr=v+j3zD?Q27?%zu#sQ!<0kSZ zvNB_vp}>t`w(67xL9v*!mIjYyQVx(un`(5d5yWd!3{WKqf@=_uNozW(MTr(fKuT=> zC1qKzQ|gfzX_ID6#vl!q!Fe^Di7tx`q*$<+4TF*lMnS!RAa#p{EJ0lXpOj^A+KvGh zyMZaMPl!2@s7JV<6A7$ZmZj-Bv_DV^;Wy6fzHQPC@4WY^rfJyKgMqMW_PMlYN=nlD z4O^g+{NkHJVfIMe=tNd{r)XsI3^YtSI!48wP&~!345@~^V_*AVP{Gt`-|Q|4<2Oy3 z<2kTIEmJcLe*E9o-&^@euNQVCyZO4aNI zT_PYpn&mibBTzp+UxF>zw8tMo`k?)xKt%=)1~HbQDT;<|5~P_Xakwt9KN`|-Mb&L7 zQNcLJw=(tNO`K=Uh((L#KD&kb0SfPi=JEo=t7rJ4AzyH z@%J}g-?nm2e!)q_B6)|SZ=ZgJ1*hlCo%_?*eUc1?f|@J|ZXS$J&0tvT9QQ6zouPi= ztQB}_tiX5`3Y-DyXP`&}k5MekW9hR@MU$ag%5sEZ;3VsGc_5^ssP>#u6&}~+j)Rh} z>N>}9AP0>>WHZzsqj?4OgMk{su&QQ6A|W1(N=suY(;;xC#mJUZQDsJ#1(474I>|(I zQ&)9WktovScoyCZMPc_C#0A-loh3~<;2rz=2ZM`VpZ?wc zGWm>yB=js{P(A;$`pcA$2j4s=nC$pj(@`tvtTg;wI9xd|5kCdLkOaW_DZD_cZP@o+ zNQzvQ-{<|nk4>HyMtqSxXzKhAJIuc3x)~MTbNnh{_zT5#wnW&+9m~-9v^Mv=_GO>- zcMSR9SOX6>Z@&!?1c;Gq6!n5!355ifbO{NK-hX}Cn@`+Tn13Qnv!pv|^tc-a_U>Jt ze+rmIw+#GX32aFbM~>{$&i{(F(G|^~`hpm_)+789x&91LeqzGf+o+TisBvcIA-q~E`^ z>b0(=hrfKd-)s9C#r@0P4*_7~O5vc!xZ<3@k68gotub<~L%pz9fcQB4sz?ZiMLTHT z$?@on&@>Ki-?V7n?A?2J?%A_@KVt8$J-dr)YKDv*@AM?6}hac zA|gfL)mx?^NirCtk#N}O^92LJh$L&8K5X#N4sBYM7Z>b3l-0BUur3{13x)(%(|HBbtq8K9Qggd>Y6HlpxPg-F--$BY$>8vRr(^42-v0}O&@&4*z}b6g6u=P z_8l5~?JX^vW)MpF&f9LkbG>jIl+w||>H{dHfajKv})6OG@4^MJg zS}5SF2?v6bEXzKhUlbjoU_djpva<3TU$q>nR%BVyh2#e5W3RkCSW}XdbuuL>VbsWe zo)pLBqlcy>r<^{1GB>Mm^p!VsZrh!R$oJiTgM&AY9yp$pbNc2zhm9PS zb@b2=vuAJGf4pVeP6=cv_sCw2aqK;s?cniq#g_G3ERx_wanh}~-h0nIV@8kc+4GXZ z!jj_Rs($_ZKlbngx+^}Nt>K#XVUp|xy79T&fpYf#a*|)!8 za$L|?wSWHs-WA`j>m>;u@x#AOU%%rB?MZ9i&~tpxx>jvF-1EqvuDx#JHRHyCL45M$ z$w5N~9XfPq%41Wmy6URFm-Wp$QRokI9ox5Pp2oia?%((9&Ek2gVIyx@DAcKE-$Prs zE&Xop&O^D)+jdS*P07nTc-t-G-kA2%wyj4G96T~)c)!Bpg5tu8+6<~Epjn=#V(t&? z2h0n(Km3n^Vt9e)&K|}KWq{&Dr_k*(Bv%-J=5n}ESqo@~_E5VowZD_Qm z;qs+GmN+B_zm1}oBbF0*&}&6r)qs`S_Dl8Z?amdd3g? z_8k86l>40`tI*!zBgXaa)N1zEpKsW{yYHpFZWuGf8^<$VZy>A_D&ypx)m2rko3@H~ zxQ`w`uw%!D_{4aIVNFw`D5ldTy?gc@TLUhtpgwgP;kVz=?x^ z&xJQWrSE{t{`Srr%U7=XZsrehPQGW)Zb6ln6!Y-TZCf|4S-5b?zTLZ9H%s$+9V&=p zc$}dMwlht8JszGFXu>j0l_YU&5k$<=j5jW|Nt0$Lva*&gSg>d77AX?$+`W4!>{~c@ z*4B+1ae;9JU)~?bUhxi*1NC8H~5zS7BC59Mch#`hQ4)w!w zB?$C%V3LdprYO$%G%cL(f)`*1{~(J-c@%!ZAmb^EVohLW``lS@xIHeH=dI~)Z{NCg z`SPXd8BKch?8CC0s%w?L3b!b-no)4NP-E!C2B~r+M3E-LA#b}xF&qi&rovd9646b9 zGAJ;k3CbjPu3Gob>(f8|<{z^bd^d1#51!Xk6BA2{szO0a;Jvz{tCDOP2E~A#4R$p= zf)32A+kPG3zda|1M8}ReEX-sWisJ}|GigB=1xpF|UwCR-r?&k!uHX6m3okkxVjvJS zOtZYA4D5B$<0z{sR!xP~X>OvFgHd z%A!?G5u7fPacBl((4>0a5Rw>Th#`g;{vBNWlxqi93Kix`1K9QSPQd)NpW;l zrH|(wF1O3Xsjo5^hBdg_qRH`sQ}6)C1La~LwPl4up>^xm-Fep?U(WdA`kSvCGIVeN z7c+O_sALFWDxqMAWkud4N@~cmq-ISU78aDDZHa@cKv@7$lpyfBW`x6Gx7T6PGRvw0 zW%;XWsw*P_pYhU*Z%)4b?(|0KQy%|QN=mY!8|7uCGy;91nM%aVQemH8qnR)WG)a%> zV)0odRSxXlzw`OOJpcX7Z!;1S6o2US=_1ja6d^(P;Al=riuaV4oUYBC`h#B@91uec zF~o2&pgvfxmc=`rIv9`We}s;xQbd(ioEWa@s%9vvp`vN1Ns1~fiY)7qFh?7T90I01 zzX7hPt_B12`WtTCw{zRU1N*abPKG0@X&Nn>w7g=()g@&WLL$|tU)PAgI_G$9VL@eb zgC;Z$#%8EvyNnep*82lAMGB0iJ4BJtOqOC)!g4feSglHjckIjFcf6)jHC48zq^48H zE=}9EEGRvpC{@+vl?C}_&0DmPjUdAYyL4{8VciyyPX$%LG%ZyDPNc%2h^i{ZB_*6! za5ZrGtMfXfCYR&~tAZZh5ubnZ$c}Am5A5HcoS0-<7RxZ+I8PuDU`U3g83!b&>Qv$B z9K(~^sQo3CrR5nZZbPa(m75)o=)sEc$^FM}xqh72OK;r1{#0@Cp@NzYmyIl}3VU46 z&dt)ctz1}}JM{-KZmoBKa%&i-5yO8o@cq`l@BevR{PSEblVBu0ppmZlw6>qW_wGk8 zKYAiBM`d`17i`bDXsp5`K-}mO6;Q3oci#TYpC3-} zk~_C=ef;TnDtzjUuij|aI)!w`O@HlgKYTWep*g>%a02tjtJCuHPfvSex+keYSS4?n zbmfar-_4r*hP4Nue(o)UWZr)B`GI{qONtbh^;>VbcmKZKbLM}ubm_v+KKVMKK}K5h z4)4AG+T<&S5Io)Qx(QECdB#IBhxe|V_S&=C59f{=ck|=-+}$c8BNz@pIOU0*yY_|^ zEwx$NocZ(P++I#o-g@c2AAb0Oa5SsUpn8JXJlA@J=sZ`i$5mP5r_RdiShoN70jgZ{ zCXMDTTITv`o-3TL4_4eCjf>y?A&@H_VN8~Hhh$>>sL^jeH$^jKl4DduCvWAo@FU9PSzDemLTukZVx=5Mhg_J5N5R& zlsP6&;N5aW)(wea&4?6MR4YBLIT+}=<#T&PRiPM@XDG48U*QCzq)cBhBC;-4rA0wC zX`dk*EFp3%84QI@%3+cM?PPV*qTn@?1Q^9mmtZL&mKP{WsLh~yf*85hBR~t|ak~=Y zJhjIpydxYIXxRQ+VDC5HQw;S8Bx&e+PVOl|5LhrBVjh|qHsKnB3H{Lj^i6rgOO|WyG zwrdaE#S~Dwogx}+UW>4ooH+|PV+V)~ITo%`s;0xUCnq(0>dD6!%$d1$>sD59;80+6 zm5nal3*f*laFxcC6DG$p@Ziy;*h#|WWKnGWL5y7MHGpWLqC`Ky!Vy-M z82)|rY*Ou36~h6$ z{fu0ZXaMm3AuQ<5EQ=O!Fc=hg7K~%CsmjtV8qw7qr1-+s_4j9!)mF-u-I2d2G|3Y022=F zju*&&WX-4`$Q9EvNt1@(I?9hO)Klve4Mf$QKj*F4ISInxH=2RxAgT(GxIjWw75H_4 z1dwxvvu#xXj!6*n%eTVopni(GRu$kF9oN{ZRRth*a7Gkkq2$@&5n=_dt?56)$N30000y2jIyE>A{;Ip2nYzGjI_8a2nbjN2ngsP4A^%G)+hPo_YbI>s+1^5{Ve|J_YH)V zh=K?RNK+F0y9wm?J*=~|jvELF($Iew=$KQbIS9yCnT)uIy0_s)9%S&6B=*?;#Oqb3 z8~)XmqDu!BUBvI-nP*yDXR=!cRem>c43N69-PWDPxH%nAzFKU761a2Tal=xrKRLbVuPnP{C@>uJbvOI`_dJM zCR^_Rs;X)I!JxTJn6u$y?#}Wtpxju*R;o4hR{w^^kJljunIJN%0Ly1ATPDo+eDW=H zlFiA$j&ehSz`pHt27`Y6)O25-H)no{E!LsWJUBR*V(hLp8%tCGvV#gFj{Y)c{?7v} zVj&+^@IPjG5KnP1(f9eHfm$l6n~5Ki2t*sLHjCEu-^K=vnXS-e(jzEazn=o8`=N5a%xdfHnn$Hsi4ar>djoqFzvOJz3W&3& zAd8GDx5f>(J^38rA|dv(*qgvWz?E@5U+XH_VCp&IF?4Bgv80)?V#b}-J$w4WJI#L% z>yszn2?mW*QN{tmTRV%JYo!`y>-0L}9r@Goop#Ub-uw-DAZ}|Gd54NX*IOM~7IT{P ze?aKnac9L0^!}kbl5PXfx(7=WzA!iYc3DOQ8OOJ*REHz9SbFfX-hQny(9~4aeZPo) zn$94ou!p##xpe%v9>JSGFlp@g@fBWRTfxHOe5)*8*mC`XUP1!hIC0nXj<1RK+nPLL zk*IBac?xk+k@L^m9(H^ri4OdbYMbiHXbTZt4I5Ds4jnCXT`h0XmfxjPXhJhPy}#9P z6B4Qtu$iQPNwD}~9Em*zT?DVN&W+&i`t6`+LF6jN= z(O@sftjSnyOj-jlaFGDnzl96;7I`D2KX+b}i^}iv{PKzLw4kfLU^|By7 zk&8~^%8_4SkSaXGO=ZX=*N)vg$!s&bKH0H1uaP(0Wsjr;01toa3uxCzkli3l>9~D~ z?aT&V+(?wya|o$pUihqX9I;%c|}yzWn`LWEN3b+ zP!R=jtIO?L75|k+-e=3Y=BVi(S<*13n!rR-0HAV@n9FE}7b+^Uj#-0CZU- zd*^T5$CKq2n=M`T7)Y4^CJhxlL%~-i>@>5l*arq9M1< za;Fv%nmm?NC(>9ap|}=fEXAFDpvYgyZD@S+?H+H_V*k8fQWOk2vzN8cp@!3evHS8x z1GLzF03(dRTaMS!=>aeHsw;5EO)*YE~i<=s0=Fl=X=P$m!o z$^YxbkZ2|6uVwT3gmnu)jxQs!vPN+oJC1K$!|u zB%R*pt)Apkx6RgVUn&S2O?)A#qxauH%c|EUvsqEa9-9hNhia!(p187K7sTgwe)7dZHLi)??chh?Gw!<3MRC^`(|xv_gRF$d_p{3H zL2v4A*^x`&X?O^|i%W(B^Nabkvsg2YDDZNBj?%H;EuZs!V(JHK(}yF_MNdf#Eh+JB zj>_r7H_T@@K}pa}FG|R%Z$GbpN`z4DRp8%+TSM>5U+bOH)BZ3;C@g-~f1EBCQ(nmD zvyQKy*piLR!-+~(47!)O|MqH=oD?^D9F84}T~~Xoj12Rb506Y3YW3OQXftQ<`%h!} zm{`nva7ko%EbaaWo!R>+0i0EnS$z3Q6(`k$C|$y5V2}l@Wi-IiMtqSLprWrvC(reT zgW^Hb8Bx%e=*_?Xc9P2HAE9@GkD?Iq)-j|{jK`BX`h-RZTt>r;vv<-?W0 zO@bcZSC_ArOfWHiy}+tIkIA1kD=qZ5#|RjUx!0YX8q4j~lDY91Gbs0ZGMiG}l-|=- zubrUx*%~VOp7RjwZJnPDOJj|v1W@{%Hl}YFU9af+hfen%GpzydFVjcQCWQY1a4t>q z@8B`(o`<#Fwb)t}Ak-wCGEmr#A5%Sp)u>W*g_a_tble11#;ao4j(V-Q37O<6=Y|e0 ze>&EK2j5fgsbclo-^R1)>df~cDyW~&bqi5vhUv#mR>xhh)6CaVb1a@71ww#tRlojY zq~F%d4b_hS*GPrjYJ*=5m&~h$XhtH7CVeabspiHri>J8Y;XH~Wq75>rm(}BQ*s1Tw z*CKL9Ih1+Myl8Xkb~$J4;ArWp%rd(u+*^Q7Kso=@%iK=j=iRGE!Oj-nHa|thQbeovc`t#8Cy}$$^s=+N{hBiMoGWZ=IR><9#a&?$sTaQIenn{m1S>w$YPs{^n1n zG3Q$??Uj_|%$%H*{DnrDohyOqBtb5}#l<}7!r4|xpMY{sBhR^A_Gd~2 zZGc{b4TIbdDb9Y}H+^UfP{4aeJ#zMj$Le8Y;MC57zY1URNr#3~YbiBG#lwM)9$Q7w zoqwH12YFGjPQ2D;wk*K|j4QbZS5xXM7URv2n zb+xZ3BHXmDMk*)s<@*&O**J`jO}zB#9TdjreZ9RY!Ia(<_dgo&Q|roq>l6I`ER1Dk zB##|G#dtGbhZ!FO{zEvJmm@y-8GG;LvqR7BPL-0#OTJc{`9i~QhQQzFcj5P+|S5OqXQICK71RPJ|K4uh*>0Pj-HWBL>Qj`H0|` z6v!3q{jW;Ig@pEOOvqgc#!PGZDQboE2hy?f@oO=79Ls+Zi(g7T7C3y0Laq4#m%3B@ ztl%1rPQzI3n5?Vbnp`bQTrZ_jSc168i56xaDlIv~SxNS)X}~6Oo2)N2y!I?h#5fvU zbn84qE#YbrMKYA)5cc6GWB5-xm^OxBtk^UI-SkLD>e2+p&cwJ#X!zj&k1d*%iXQT{1 z2|i~6{-dc9(FG8*u8o0=xxVg1MB zzCX6YpWip4*%DK6r^cYBo6&21eiC@Q1H5`OiH`nf0m{viWXJnhOY#Y2lsqnazM5}Yx_Urvyi()VK7>~`b2!= z|Fe+`Pc3lE3cGdEk{JHoQ{OMWczyflgG&8fE5ti@Ei|Wn(2pP}P zIQJaHf_TGL%~rqs-Bo{t`(Ca{WB;Y#`qqTkM;w3gUx)i5+^{3|hKBA#q_Rtfn=lfq zD`;R!WQr@OiR3b($wcxhs6a4f>tNY(T+x2_>lf!P(U&ATp(>e&EcBQ6z|F^g^tJG* zWy_F(3Yesxmd2l%01{9k<{@VGp$+_|!m(yRJ>x{m4$+3$325h{I~j>u*tjh@J+L42 zU7*eYfTLE}H2Uh2PssdO#Y!)w5LQU!SoBvmsd#{T+S2_Z8HPP-iQ9u(9SsLupneR8@^bW9EYzdMUH_ znnPrdN%MAH2T!1-Vnyg_7v)5AN3(vWdFjYl^1-+wPNaoR`EP@2S~yzJ{|#QUQ*TH1 z3O}$u1m}0_KbLni^G@khE1#qW(Uj8Yy5Us+zo;qH&*nTUws9)v2TYRpXygnOby!Q8 z(z%Nexmq%4Q*m+1eHy+O&Ohlj`KptxKvIjVZc{~R=uZz8__j?dFcj3t5AV7-Fw^l? zzen56v2Z@YOglcX7kwKJif*JamzInr5oIg5GdTLQmApz8PGO-D4R)1d2D8Aqd>Am# zo=~pSGK4y#yb76?!krZ+;j-m}keIG@76e zpymAnq+tA9z-`yT=KqIY<9iU5__@McbcgTf&k|WzT|LoEN(YDg;6-{Ed7(r33Wxrz z1io~${a|y@Q2dUVUfnAmEy;@!`BzcdKnCozKh}=pdq0#FjGso{r3KSI%`9p4i&P?^ zgvFT8l&%Di`71z?I5T+JR#8qtQ9<#58_DMRCMXaM*E?#dbKjcO91_gFO;lkIBB? zi0L;du55K|{Tmr`@_!w$Ys^C4_6=Oy4Q#Vpw%zQqILDeHMYfhee;<NHYpF7B-i!ws{a?;)Y3yX$ z7cf!aSWyavux|c&_uO6qo8D*mlOz}-LaP=(cSBx(&9@q(ks3kf zQ;{X%<#8srkjp6Gad{s ztGtr-pjuYle#`fU-PaT9fjTq>LQY}SNHGrby;A=2SQkc@>uMe)5fA9Mg3jmZc1s1QDg<-h5Heo=6_~|%W)cM-%%V_&E5(vA|_ynMZHbltAnRGlQ0d1O`5OEpDrZS;n))-?s!A~#N2fgMRmmm6at8d`id z4wb5l=XNGKzPJc76;DM)o@|n2qUJX~Lhwd?RX7DB&$f-J`%in6#(`)M^~-O>&NWiD zMu7KS3KtJ-ZX(=LHCsyVruG_Zci7zhTG@&aZ0y@Eg!r^3r!z3Sggs>m9UUVlryO4Y z)j|xT>P${-n#-981ux=atjvHFw~T`vJNtWlkeIDRC6tU)kVH_BkW>$cIea|*A2kbH zY=R3+tL%L4az?Nz1$oy6m|Vz6sl2CFZ4-Pq$V+7{8R}%=Eh9`!Q*&~66>S~Mqo0^TV{uZTh74DfN8O4=pO#MrT?(~MQt7c05 zAiweO@X+^NDXA#6r^}_;6V2v^Pok(}JG{T@rR%o4I6Cx~#e;m5A+_3Uu`R7cQ>Vmp zOwsnt2rQv&IfiudBs1JL@XK)vA_X~$ds1SyY$LDN1oKFVyEJPorlSYguKEsF@EWSLIyj%3 zssH@k+H6znhQ8{>xss9#1j6Ckbcp4}(RS9~&fd3@^8xRMp(+wfq*w6M(y^Ra?zPDvtw{AT zEOBF6CbRL8@bod1Q`l@NHmnWQxTSo3*?E?{@_)ll9>_||$OhB>!u)|jWOj@L+~>QjnE(FM19hat~#>{U66bCKQaI9Im9Rpk$HK3z&i4*l_sL)r2 zX-jIRS~%qMJko#=pTxo@vdf5TaeKt{qxms4i*8adK0@B*Z6xAoXwqrKqFYBi*2Pg0 znA#rnmiaM~p&TZWaR^}zNG*c%(k6q2+o+?|MSAH$JC}_%33z$CiY?&Mml|c#)+ERi^KtE!tLzt70(50Px8J_@?HdHOSeXkDMi@nrVIo@? zc4mWp(S+z%u1?w910WC2dr};h2%--g$44e}t96A6FKsvht#Xuy_ete%34Y})VDL0** zh}^)btG0Z*cATN^nZ~an&vA(nP=Y^BqRZ|pSZODXu#~c}5$V$4AZ{=m3uZ9Lk%JN= z6f_euRWvd(us6h`DgSCW+UT*^YUgskrdD@mL0`g!e@SaeAl?K1NLD(fQO2M8$vi0% zYy{WT+30SEfU(bUDhx1odD;s34jU^h7XqX-4c7K$&YMPH9n0z`=UK(YejgMvN%N@@ z_+}8wK}H0Iwv3R&n%Y=I7iw?si3W^en7I_oUb3?l6Wy=btOigx_SdEq1}|)-#gd=5 z|HN?iB?yITlHN9NTd$%Ou&hIZFCi8V1ZvX@1YgI zpxMWZgZjCn7^p#N=}P9xs9ES_nnThELdabegF{b#;EKSq$j~%};@+oYVP^P+@!BQG zM0@3Ei)LA%{vMC5p2{EmW2$sth3CAX90O|rtATZht^Udp+cr!|$qhLPfD3c1!7;O& zhbyQ2<)omtRKMkCKGkdt;RmL8^JyqTuD^O^Fl|6hkNxW9puJh%UinIu_yh)KnNS&j^*3cKB~ZRAEqc zOgU|8v~3$D^wvn3#XMYusu+b-3Q{@51Qn)}%66YBk@*B~+TwoQ0?StlZPXE1bh{@q zc)en@h)Q*XbC6>dc4CGLr4{)aHM_E0s`!2=oX?b;99^jEfjk~hv0=Hfgo>d{8FdVh zqtg?Jj+hm!j=S_&jrqNSm=#%c)A(3an$X6@=Kl#$Npy52ftxD!!4rPCFU0m&A4%(X zBFgUW2~CCT58nWb0w+w^jl~|6K7TsIilQb&grb7YFTpgDx!MM&uwX@2WD#R6y`ZC; z-cmbR@QxcQh6LY@iOEkL=x3E^ie~_ z)euu9ZC#Cjux+x7)0fr6>d+t*k<$;3jt-BZ*TcE#U}L3mhZe|n4k21bOi61nHq2XG ztoKx7+=oY$13QRP@4!&$Bew9lcdgL(XTtGe!qrLjbv#5ey24mDIHu;EQH%cW&`+tP zcJKcB?Cq9~4#$8lv90Ey*;1r~kl5TytHa1N|K3xJA>;96EJ>7(@d7|!O0S9Fv7!($ z8V9LJsh}Wjas?(PB6|smPNm~01T6X*X>H3(TOqmyC^Hc=)S)y6GjlW0&ST}I(o}c;E(_kXAi(oScFz6A}^XEi~%Nv?D9F=_v=XNl8gZ1q08aa zY<67VvHkM%XX$+kN7zP^1oW?Y<+{DDVVCiCkcTo&X#0nY-iyH8a&Zh9$Dmkh0;hY7E_NdMK zk3?XKBosngsk-J)VIDKX@;As16t2?cm@+`L%H&ZyiZ+WeY#5!u(zV}H>LlZz)lbPk z$HGmDrOL9-4o{bIV{f!!;${>jSGsU!(NQuo&X&GN`}x|+ejhdZn~K(=Fs^B#sT1HR zwAhF2!Hs07FiF6qYlPPrcz?4Uec`^30W{sG)KW?7(&~sKhTIl5syFRxFR&6Ys4nIp zCIvDn2vKTaL}V{x)coCO4_RtV{orR9Ndij_geH0zYkIg; zM>>Tw4ieVkp18(x_%OQ)f*mk65KKcxVNSGvk|p?+cuMwi<8*&Co|=Z{!(>*pW6Y1A z1xi{KE^1OPP6UHPp{4jnjd7f+YZ;_36xnxVnB_;u#k{(M@RwoO>r(=q*r z+u>Dj=Po{xht#|>H6w2!?FmY;$a3&BPG-c4u!Rol^N?h;qzh%p3WbVlv4`*~`{)IV zaIqMinM;m(5?5^+27`7TSXqrSasWw2M0vOlNCAc|M|CtwMiWdrsjPBF^=Z?ru0iHF ziWNclDp9gWtm!W%PM3LnWeXfeiGQ{dDi-qe`U=*~eCjF7jdM)4%KC9rlM;$4l+Xcz zF~Us4y9%6XE+QiNr0n__d4G#!?+}(O8l-X24@D@Qdn5SsnMH1}sD7^Z1yAc-7Df(JQ1Cy*oWkRz&&=62q9+mz*ZbWupd}RIr2u`c8 z1umLok;AHkw*G^tXoB)ZsI0bR`SBhCGVH>eM>7Fi2`?q(tFNw)1{Y~_lwFNXtEZo? zJX97wtaV-I_kM@TTr9pQ-cnioEr!c#Oo!nU2KNzWqA-irNYu8&sc*srA$@YdIj5*y zW+fI&ei4FmJF!dn{0cybTTt`!N7A6$op(~P2FCsgkIXEdJKk&;@LN2({L${X)8bV? zEjxy427-87Jun|FhIu~DtBi@UWr<&{qJ^?>z2F4X`e3~-&L$My#Dd#~ON|OAKsqNa zf`m5lHg7GtXKNDWz|kB7p`j!cpDvV&M z&_BOf6)1jv5+9yRxeKhS#=rM136)JX<))4hO=qb;ix*aD-PvDBdc((9vRD#OzF`dJ*_Fg z7^vaIHgD)d{9mcW^zTu)VBjjf1@??XhuYHaCwL_zj8LkG-j>c#oe)^P}k9| zg_yF1{fVV@lG@_-tr6w?kQD(dNJN z3rLWc*)chtEpW#sN#*bTK|@6@zgtq2DRTvsM2hC7iscGFnEDJ3STeBohdc6uSg({dYQMYD5c}3WX zv!jZTkNM_+4d240{WDn!z6X>7ri84d7jJlRCmu1X_`1y+i^6NGXlZpf%N3-$V8pte`OGb+53#THS zM3bsc3bh>#{TlZvPRYU&4&W3uRtJ|Rh-o8%FNMqO1}-PoO6-$Zx01o#h6E6qY>7M| zFPmX-kt(L3j$89yp%<%dOx)Mz_XOv;kXnHSXnybsTJv+hGN_CQpGTRzCoVkT2IESO zLBf3tp4pawmJ!ca9TP^{LeX=ep?3Cwx0ZgvTxwSGDt4)jjMg+_Sj{g1Iy z43C{<}`S$SD$DLKXH2#ArJN`dZ0cfu#Q&Sg>j8Cf;wPO)O9jgZjd;3!h&~DCQx) z_*JtTsyTC-WDZw~w$7Pg3~o}c_>s9!8GpImYuBNe*X4e(RBO|+SG@NQ{$hNrLJ*mU z)>SMG5yGlsj_J=Lv8Y}8;~%C!wqj}+f*g(PLBMc&tq8uV3WjqR37S8d3{-nntn^td zZIm!BAX5VdZn9iNXCC_2R)n+Axw+VxpD7k4C-`7M8K8@d%kJdyER5?k9Usb!G{#Nb z`iN{TDk0WyLU4$$GPln-`o?QqM_GvOoo@|}U*b|rMd|AN4;%+tpI|A=fj%l1MSC~? zR|ug;2!WIu5gzrXh=3#{vtYu6K!`MREV=<7ql&<&u_9I;3< z8A{lZy(!2==v`73pUrYo$b|g6oySI>b}u{hV+-VEDnKNnlYExz472=z z)5}`+Zu(5NYU@3ne>x&CTJ^dUAyT2`mg*KdrZ&B;+)f|Bgtxmm;nV>w$S1%bdlgGu z4oCe8&poDoh9z}HDjuhKtqRYTUC@99yaSKo;^|)Vo7B+M$;`_QuhX^oB7gcar)}Wf z*bW~ar_D~Qj6+NC)fX#!x91kGHTt)BAJUJ71TEhqlX>v*@)AV_P- zx_Y%mr}aqy1MwFDFv`D2CS)Tnhph5qduE^+uQ<>Y@(`C=FEIhMV~>A z+D`f+#F>oQ6=dITdNZ0!zblG?eXS%jBT8VO9RwL$43BgnBR|1Re-|Nnu;mju!YdO7 zVTts<(gO~ynwpj$bTsk#@mid3ry(&}k4Lb=TXoU}aYYF_IkQ0DH6^zYPe*BaGH>z$ zjc|he1&u_=X}>m(N;aB=xAW75%phe!N79kzEVn&-dMGOKr|IoMZEY6{1{$WF{>J?e zCncA)2aunGBsufu!0WZTq41^;g`T~;*LM;M0OgiQm(N?i0k20Rb1MBS4ltw`CAD5Q zp4Mj7h=*}~M8;IPtZIV+n20OHIckdiVdC7J;DSRdrluT74J{$c@@r2jl2!tn41vLU zRG3TgXRdpl2vsug6THYNt~UPuqZlHVs8|~eu8b7@$-_nz!-2^guRRYAHL#gSAFCT) z-@J0Lt9}6koUwT;M6@{NKL*tkMaX$8D~r95Ao?$bqx0w~=`Z?{*Uo289#8j1haH>k zLF2cIe35GqTb)b}iDL_~kGL2Bg{0^VTNov&#M#ADX$z#MYouRIVGo76&N0PXa#*V}8{7MB+A`APvIUiiY07rK6L>e%p@h@_I=7?cu-0g?YM9f0 zf3hyf66t?CA*I!@-s-wM8t_IN+Et?Vw0#xIP|*MMe%r6rpxZX9r`hST*xs*czux0= z`4iQ^DBxwC2>Y|WqOsCp&Jpo^#qYHbI+N+mZ{%$G?e5XG3*nRfxwUc*B!(r`c*(L# z_t$~c>fdyNu2XIJ-+=oHDB;%Xi@8fMM6xBysvERZYrze)b@yji@7gnX|6CaE8+@wZ z*Jk?bcm?61m+G({Lr#p_nr8cV@iFinq0wB@=g*t|*@&Po08i<7pM zY13g~nM6%FpQruv>yLGZO;5LfrD<6K6cJ?<6hmY3Fp(PH5@Kwr^~PtYs+GU$U3*>+ zNsfv3j~Gnq1Tsu#R}v--%%S3}a*Pl&h=Ea9&ox*wW=j!c_hxUe{0O!V+a1Z}P%unW zX@v!AnW93OLbO&FoBC zEK1rWlgXq6#<-I&+paX97RUvD`YaHI1aH*1y&3Bm`iNYSAq&k{Y3=Cj%=#QuRa_$Z z{W>=6@V)FQ$dvCpD^C)9^SrNAvDDQFwdlIIUhdtbYv_60e!C8On_2D23HzqBUevN- ztSu)vgdwJGT-WcPQw^hv`S;ruMDm@ksWHbe^CoEPE+sX%8JEl=f*G0cM;9Tw;8HIV z#Low<1pj5w(A+nwsKE*#Kr#F_>b)Z`5X!W`Nq6}3lf`St<@{(y&&c=Tp{K_`8id6? z6>eO=KSV4omtSmhSEeM;o_zX?zR;0fra8WzZtwfl)9YeQjI~ufIWXdE`!01V((reU z-dJE5qr*m}O=yBP^D$p+OL}Ie<7TUcQXoq~_Cm#1p~1NprJ=yyf7RNYx?uH0rT9`oM^41JGh8D0o7fA`KVbA>#MJ_^4D1Mch)mccZ=8I z)g%Lr(?XM9uWpV6U&lmorCQ+@s|0mrZspqtQL$8@>QYZXF8I=}{s6Mcz1BJX)CN9F zBifcS+a-H&Bw`B4)RsrgC`DwQS-2~>#1wpIOy zi^U|TKdpzee%CvVCg_s~F!0i(*H0(#8$_|l@ZMviOY$c+G;MNGoY#FHvt&sE50g_& zhI}8u*MuLs_>7q)<}5vab5};#UpF6vJ87)8PnQb`DB$7b4(qLlcTD~3jV@+UNOh@; zss1KGDl&h8ZQqH;e_Fj4&~(&=O+NyWnVc)>fk=S@RaNt{!Cybx?LPzmD#d9vIQ?$b zS*(?p)Tn-)TcF|vK{%qeu134n@a_3*L__><&9z~dfP4K5M3R9kiY4i<_>GNm7$gPt zi8BQUtkgO+RRE!2gB{HHJ_~-j+Qj~BJ;~g>T5GdI{)w9sRfJ$5)roVP2=Na9k0uog z;iImy!%?f6UK_o&!8sAl=Cs}DuvnhE)!osc+hP8ZD1ctynZP<0ORJ$`SPnsJ23VL) z?Ucxv{YlP%F&|2jT4=gwO1G7f5rX0nhsr^VfsmDvrRhB>+N`BQkNn2GM zWeKQISn%Khm@X^Z_E7BB)6-Ya(?I3=MU-m9E+l>#BOMKjrHvF~)2c{FlxU56QEQIS|qNXw4SH#wU^A1o(9&j{C z{MMInE;#k;QXm=E7p)1x0D5Xbg@D~OF2dY3!TP~o^ zNsx2aO)mDY172o&)_3~e2GY3PWgFGTlFchAFq}MiYOy}4(+Ubu4ANlTWLM7(eLu?U zHR&g?p@8_Mc3y-}!^bT_pBJACb*d-xr!vj7FX8Q6efQH}j1!H?+NsFk!`dY;l3f1x z`+u&5zW$Ci9M8Vpq+u6&`17XHyI>WNNg|^*E0u5$*n8%a*lOk#D1)opru-xs~1?d3wa zN+-}Gf!SziXmLc6W-=JEpYJQ(#>K(_HP|^=<>fVqK_?oC&(UOF3Kd9`2dwYbV~rLNL8-f@^IvI+13$c^FwO8PdkSF}{h~om&{FQE3+;{P%ON^6mqv@%C0SMZ3_2J8*NI*KHFah9 z5sQP1j?XwEaNjV+7_wyot3dEyeMjVcf6&KZUV$SgJpa^>8OlUT3>FLo34YvImYh1U zsBKGJmfO^ z!Oli?a`|NccF_L$j+f8Ox8git3_^>_8p>|hwBapq0S{r0nV<}c{L7S+B|P>B4#R=~ z9{@y`rwmyX%g~RqN%}M2wtM+z^cvt4?9oYY?~XeJkNcA_tO&(ARtBXS8I;Fe5QTuB zJJ!j0nJ($kl=jJC>QP5tcKcB%=>0ZHRW~ivWF3Vd^{;{@I`UO9nmhC$xf5ksVSK$f zFs!{54dMlAAe3&ybdN>>4$|K+hV_nKqx(7|jZ!Jz=wVH-;JvF)yMjAzW^l0-_t94j zZh|k@^2mMpFUq$jTu;Y(a&wW#v(Dk)kIrf`N|oDiLIeH-3U5K0A=y0E(i`?Zd1cFx zXpT*K7YR~;GnR4^qF1y*Yfm13Nj!S=rd*ks7#*m?{6b4thoz^DAXX)PUoLgfipx`U z%3;#5d`zF14iU^5>Y|Ji`B{>({10;rWu?djB_^!BC~ruJ%v6IxicK33Ljz`k2Fs7w zNLWCAESrSMlDu*=Igv&+R@=9~t`pbpOAj^$8;@)6v#IrzP}~0$VTe4N73RBg&m;sG zXz+akp1QS`mt6U@6Lt6;3;NxW)J?yA<-T40*m>{EvSoIES4ey7kZ(<{^*few5_n$z zEueoVlO&(N{gXuu&GnJ#Y5VIRP`S?0?5f|ypBAr>o~22C;EZWI@G1;5kbdF|GjZO` zAVbt;=FPxm3u9{K5L48(W4id*@Wa=~lZT+43+TBYk9vt#OFxlO8?d*4*9VUj@N$E5 znWiC&m}UQNazxkQ0K0U?^ImQj@6^_f9|HMg z@HwfgB2qrLwjTkjAD@wNrRUhSa{d_jzOIE7C16?~Vs>&PcG(+e)O{K9g2@?248xDT zSvwOwRZp-?zHGl*8M|fF`?iN2beOKu+I1NBCPmUzc3(DUXd;kzj}y7Q@Bv6r;ta~k2`(a`UEHoRu>6sWV{GX2bwV@K3|mnZau z-0n>2<1`3mu4k2v8{O;kdZmo7dPfX4l0Ck#7*W)7en=%;{4TV(3+d(M%*g8FyemrznlK=mcJcc$>wP&}<#B${tfIe=fpIWpH zJLZ2%o|J0){T6GF^8@Yb8kXA@>(5p>e7=Agd~sVque;Dy!rtTj<9?k6VxqsIobtxG z0fAo`X;kt3?g#gF4fq_U(!?m@o^NX>xHhisgkGz1PJOS>@ju?$qtWFi?^BBnf<9hu z;faEJi~sN(oy7>)QnkbaT(ht^x*z^dheq5TIoxJawFX(QyoFU}xCL#9=>i|{U6zxd zjipnEzU7hJ^J)WC1ht>G%SV6o%Qd<;Fv=`F?b1qV^(^PsN13fw7Gn+HPb3Ca4td#v z@x#tU@|8VI4HQ*UlWsNjN)IcAxG`?!Qm0+$XOvq(rMbN8G~*!eH4);PzjNR*gcqrS zjd0|J8dEpSrH?0RbVRW!8V}l3@*$nc#@=#&uUo8w-v6GV6`gxD^t?{)gR}0PkX;dm z!HBWV!jRMGR;_((b|dTiU855P+}<(T`TF|G#_m47mHk{vu&cQihG*XW;;g^lTm7%W1Yb-R|IS{T_>jt~dqipzb&4mz*Uw2mdetS)0L?1o#!{di;L_p`SmWDrXpIAR)Jh;(CUXs?3 zXL2TkKFwbGKJ?Ey+p3_pHcIaXcdq->toCx)=uj7Gh+^h*#<6MP@DZPLPY{*HwTpO~ z!3>3bmxFJh_RfOU)yN%K*DpM?mMtR+Kvf6VqjQn(e^co!U}EZHz__m2D{BHpo-!8l z-JOj~;Lk{ea2K{fP7)?h&}Bg!U;iFf((16|fMslHGh<^a>p}kLd7IPe*z(_ODWGR; zZtHqU6!ActwjE}H@~fe(j@`a6Kuk6DZke%F(HOb*IA@=_&}Dx*=8|oki-!7;3-z}2pE;YUw z#&Kfwe#rc5qe@L4aoOf8@8kaXWYqVyd+4y=6LBb_VOC^X=;w`r=rL?vZ!n$#f#0$% z(4`1u_I%pbPg;eRd#o4WQYhGH_n^}eJOVJpQi4;Q8|Lv>?)-9D75w+PlfR~$Hzss* za||E&e*c4uFz(^xd^R* z!j=%5Cc~m{-0o4Mst?d%JP-zI zr)&2=p%l(n+@$gPfGyr*pRQ4KNVUY-x(55XH=a?;Df3;9v8o)_SZ5S-o2s_UF)pPP z6lGMr<}#)BF~1)*m_DwzyM^8@W42xNaq@#0Je+IE~SblXs)EP+YT3HsSDRl1ms z8U@U>aq&Mc81Z1@PX*JI$aB^Vownx84|AZJd5a9*{kaZ$_nG*mvQKH?=6t#ybO7|g z}w_cab9nHv#MLeJD8WL4Nd$*3^|`8(CQID z4_ImS8R`lJt3LE_u;ZyL3##2!5t3-rpM!|%%B;xaQAankv|TD>V-*a(-N@5rQjvN*+|oGghE-KA^=!gPbyEN|JbKWHWQ_oVuZf7 zC+OCMO@YFvm)nLgjsGkz{%HC21p}=WJl<|wvy43*5mheRWV2Pi^KyZwBy{^6cFoV+ zoWj=QxEZK2wbj(t+S7c~dVB@E*)>p#cP4;g;}5I1S!yZH$*sDS6#O^|&@$W@_&)#! zLHWLww{hL35_89Cch7wCj`2O3rkOPVzCu5}cEJ}*x8~NF`soXIUU9|5d+vGq%~x({ zDTQTC^Hhecsco;E@!;LpUDhGXBBN&tF#kEw-8ox)LgQAA+1wpVKYQ!V_db5_-S=Mo zV#S^VLeo~6Sp%-Sf5wv!-!!CodJGe;*tTt7=9s77nEB}cCUi`-vO!NpK~8ST@`ZDY zVmgn#T?pQb;OLC&NE6(POso#kh_^L0}7dVi)NN z%`lN}iGNl}hO_QmGJk6kYe^E6l}lEk?yNH{#{@lm)K#Q0UnZcca(2#+@`?&*KNwq& z!eR4e9tj$njI=Jy9ia*-s3+Svbd*8lSitv1=LUpXMk;4$Dg3FD)@VZz zf>A^`DWr;067bug`L$3E=*5SS(%3DUmr|n$*qPB?4jYYWv{5MJWr#7j-bN}TQ}h{G zL1AcLXvNqiBC^>Kb5y9RfU4l50!56jRR>CNv|!B9slmB|^L!TO%hWZQ5;T@42}RX| z-h$2FfBf;%ypSH=wWE@@nlz1KSkGS{z4td++g`SL#o7Yho{*j(=l=QUx_~`JsNAw_ z(fprQ@Ak2=$k2{ySzH(t z>}FNfHD6KQuC05@Rl`7F{tNswU#6yMtRp79e#7|6T^l!T-QiX(W}XStwp1r)7J}Jp z^5s}_LS|YmR@t?3-qUX`VfmQ&rg4c0NwMMLr3+_&zF=EeEZ?(nH|dH`N=%B4v3fka zR<7HyzbsT-5(Fzs(3GZ=>VegN=TySV32XI*3S8HR&X=i^v?aMtie9{H%et*a3QN-z z;rE5zUe)4sCL}Sw^(!|Pd&8l;jazn?5s8fw`EcQ;#hddL#gx>rj>WfU)8DK2NoI3Q zYF)D@XZxmg`D%PZQW8zdYl_41km3M|K& z{uLt6J9$8FKoV6sG*%2RWHK|%9!w`}r6%^zf z{RM{CO3`QnEKMY}5Ctowppg_vE*e4&hf1oE=UAX^gPutSQKJ=5VnP>SKboe3;uE9< zQmN@IdR~f9IbCAmI7l5mqO3EN!VsvhQPXKOaEyU{LLot~(Rqwma9Du_DK_d~M^5@d z%O2T;9&v_DqG44~3Pe2^vkK&wA9-@{^G+v;@iQF<^h2GE=bZpe5hYQIJhlk+4m6)* zQA1gkLSYetA&}uXN)cs}Gg~-9k-{MvY8S1kvZjMvL+hS~*kMsZ&%2{GxFCQpjj|Ap zEYMycnC1ivPwSehh+*`YB+8iQ1)e=#X))&&;9pdG-bq&#Neqc9#qojwl>mXNjU;On z$C0Y6M1~eLpg1RD0aL(X_lN7ka$UHD`idkI>ge8^Y(S!h$77hd? zjpof39%4wLfTY7uV`f36;HQC~krya64B6;3&sv?au@-BsK37~nAmh&$|M%YdFkHmQ zmpr2&J*HJRh#%_1mBVw89`CH79)k%(gRj7%fxa45q`DqU1Q{aDX|?N*9WRSF{%^tf7pfpPF??AYO8Jw8d!p&nx}HRez~#{3%`p~wFQ zAl5PMMqfRAP`CQ*p0%Gn_2SYZkS-9t$mTN#68M9e8V+AgtrFNT5Vg3t*woY%2{qoT z5~7u8PNJw?F==X_zL&(u$7a^9omMNeS<6-vCS3upGb@;(DIy4FhT#-R4tV|0v_xr* zY)CX`2Q07(2hAY!3q}=ru;{)sGM%8Fs0jfvC<@w>;eZqj1cG62SQAxHdIl_YNb>tc zpEn#1Ng}x6oT?!;mdJ1@tjKc2{-PZsjF&6I^`+^u47MEv0Vy;(1~@&{2H6;qjfBxv z)FP3M=cZB7tD<2r7!2mZ035=Qw|rO5ss(eud+Xz+MVfl%yQ&T7;iINfI=J2_b5%Qu z`^O+f9Lo+FJm~7HrsVG1pP!!(nu#`nKZ$P9LW_|URny=hqhsv66!Pj61kfQ-6a)*) z3r(6fYtXQ9gGNmnH)`CtVV$ISS2zrkDx%rEXc8P*67@@i*n;|M48wp(Dk}J-EW;Rg z2+&Ppbnh7y)XcGMTes@oqgRVoZR*ud4fslmE8Vfl8GU
    J* zu@+;Tt0P6E`v1LtO)o2V)c*;Kq&z)Tq$@wPyDcL3@n+1EN*!g$*q;o{ZyF*<9n9! zZ0FANH$-iF>4eT{s0ik#)vcDD{dR9xybZ0fKV-?BvW42o`W*chy*nM7SS!(S2#Z-i zYA+Exq7}CK;4#3Wfsg*yz0nEfYtoG=p3M~+yuU+rCnEC4M<{jhi07Q2D4HE)Rn2qt zt3#Yc$5Y+e8U|-kd)D;quo1s*As+9mx^4o4(%zy|10wl;B1AIg;*sit99b$7ylx46 zLC0edjk_fKoxYew6yex=tA$d?fT0eFVMh~Ak>UDI_~UP)a$GMB2C$`0e{%MTtDF( zS3hXrBitpfbWy#Dg0-E9Mo*%k`f*9T4#1Vs5tuZS>K`T7eEeJFDalc0s%FiRat6+#P|3R5w^n}oS&#R!us+Q}yx3K~fEvK^=@+-J-Z znyKn?D~eWl40zdv6~&7~oo*w#GAh@)#6;v``9*lW$6|j?MUiw?MxITl?dmj8jQg0xU}XV@7As_q~%g#Bp$dZ*!ks5-`7u*p9>1xoS% z&^H+1MQsbsa-*1sRcqO2Dzjr>XR&h=OD6=tUIi7>sq6z&2Td~)9ZwoH(t$C|u ztLwQv757^1XaO&4+q#`A?jw(>g;ZMWgb3dNqS?O*#hzr;LC7t*MWe&rCabZvD*AI> z^U+a4DDpp!Y6+KA^~AHk25XQ9@w{iZ2s)y>&%jU!D~-A#ET~uAt3KdPrjXUz(7x(x zo~xSE_T-zir;}3?wHboQO$(WA9xTkt2KF4xAkh5LMWb0--Om3s^w|LB%y-bN$d^;y z;rV+HniXZ0u9BRz6JKXD9j`HyXQB*Vc9!)Fx2SEnp$6kAaiNs<#Y-+9 z>}1RRP3%B@yg6&?!q1V5{Df*q)@4bBK}A4Cxh|c~v;!IM6{kc^xOX}ke3~%z&46Ab zqP{x+l12QvmafQ|OQ}T5NRTOYIc|F(F^Dro^c6_lfz>&S)tCSj=QiF8Mdpg?b;zZO z=Y_rF$wnx`C9!F%6M0Dd`wq(k6i~7NnibHU(nPIR7%k7~HNOqWRcQ%1+U5*;O;?0( z&Fu9&L5Ub_s@Msh1-adf%&myaF@GY;w}u7}&QO=UPTBe9MJHR&Rgap=O*P-GBn)Sn zd6*61n%W|}6i~@==d*e2w0xc_xB*UO&y+evi$HVUPs&i~Eore>o0MXe^7o0oJ8U=q z=-Su6`Mq`d9c!QM!qrc^CkbyQj2E$=8)8UhVNLJZGC3WV$q~2S@=B(;p5X?|xK5}w zJ5X-*vhRUpk$jSb1Mc<_l?+$|`jA{_t=}l@2zgzqSeLeZ9IU{#9ir z0X3M>Eo#*`II96=cWR6j@qCe3bxC=?USQsLAQ@FC!+`Zi%c4TfdUcYd56u!*@z>_P z_q{6vq5C_%KvS*_aYpSwi-KuoR&C6%0p9v#g#$%?owiw7a#5d{FzT)w%-TmG?Rg^M z0}p@@uF{@iW@5+yn7$-(Ot-G07noe1P8?-XBh&ZbE5Z@j-YQA`rGo5?(rldN zFSA!!bt^@x&RH^U0a~dPutMOsgdeSQ8`*?So7o`;@map)mfXjR-O}PR zOXU%@vzIPtMV1pa29Pt3f>*55glT)j_m}HnwV5>5DSZ^^33S}-5p?sYWA$m~RD#^a;a!SchhktY6 zj2f>aiS84?Mm&$nVfouOs-Wj%BVV}DlkSSL*pbZc9 zsTHj0iaOQDfhStD1)U>~O=}yxT!IpB?2DMQf{Zk)KY)H%cQOnKZQ(@5rQb#lArL=5 zogviRC%-b*{IX|m2FO5PU4(};T{_sQ79KYNzs0)`!q1?kjOo{#fU z1OKErkvSm1VoNJoMbA1C86Z(Ryvw0~N1`eOq6Vj?OEPklqG#)!_$3uS_r$zh{`!R1 z(BD7HXtVG9S6|s71+}MY6ojkW75d1Ax3Gn!5}zND5wZ8>ct)mHH0dXQ z;$*Mzv4Y>}%j^~d={Qk_)O{A4oRzxZy`6fu^yWbI_LG)F_zxjBgRPY_J#9VBLWWBQ zHgiLkGq}1;p%+o2dD$pZw(yUwPR3r@Y-DH4-XGj5S6b=)= z#Y>7~M!L%DJFow&+K<>z)gQH&7GSDWd-L?`>TB9zfpN@FQSfeKCd?tr0Ia?)jLU?f zlHmiZVr~Ml&S+6S$<;<*-mnPg-Dz{%kpeMxvvYf|!=IxCx|DPWsMgP^r~q?}Dy9&i z6{pFfGrF@6Q&K~x2f3C#F#;>i#|jevwy|o0VlJL&IKDKGQc__N$wffvKUb)wws-I%_<-&4ded|tgdH#8xa2X;* zJQxoQGZx1(;ob0Irdj_Rm2UcTzFo#CMCu0dm8x8rJd7eM_D>LWFIW-9!-C*h-!gd| zq|`omfEMNOJIA?B#78IdW~Ms2N9B8@VA91BqsY|b6xenbYmgB{cdXXNI+K{hb13(K zuv6AY9=FESaFf&T_ZQ|hQcj9rg?lO_MYAtFk~1eYb>|9`Q)NhR75IB-C0E?@Yoxgn z7{|n@e2D2^wMU9d@@Sf>e8j9DJ!1WPE^(3>FoE1le(EDR*e2p`G{Pz~1@I<N9J@~D8wNc>}{|h0edV%$wyeh9Sis1 zd=8v!_lv+sb{NTxk`JY@l{gxi={@*?3q~$purj67j26M^XV$2J>sh<{j}7J@XV#ws z1rqXezd$je@=kX*H>a#@>tR{=zGlYA5_Ha8-`DBI_HMklXyP3KH)FvyuNA;oBH??V zV*)O!juR)>ONk!4kyk2-10Dx%#QjWqT@@~sWmCMd%}QdVpr4kKEC=YeHoVjx z4*N(G%WuS8+#+uKw1l|E{)Wd?Kj(HdWXBuAPFsBmm(TI4^tH0StGGmbNPA_DwNGN~ zRY1IE=}azrVsI%F9fCCa4O5aT&&SfMNt7%y-zzwHWDX z$((FXv6k1_ZmEjO?o`^*m?|@N>L><)xD|@lvT?Dw;$h38Az7P^m6&{B1qRo)RomIR z?^LUc57~9_a<=Uh3i0$R`n)4&D(Rg)VDF0*^z^GhP3C#WnFs1}T#HA)3coViX*A@F zw{uSF2Q>ZOxIGRbpA;0RTYueze0YvRnJy?`r4&vB0`3U-DIrhSpLe-S)Kqnz(o%%Q zx$U2$$C_f4m2$J^8scB@z3r>U?aL3aAAz5r0CyQIGd~4`<=zziFn#{ibN&r19J_Ma zcl{=Jb%eSUu*)h%@!CrbHrAAaE3qQo@$0EldSCdoJArg9PfWPUJnPtZzb6AN<0P+a z`w+*nAEl;>NJK(K#3jHp$o|QG3L7Q$z{4$$a93)J)|lrIxeX;)5^$wZOGpA5@YQra zQTUGCiLYh~j`7l;>dSl|q76hqR?0tegi12Y71|ZKR^?C}dRCpc1+Ts0qUTx7$ra2t z=N$=jW4Q|`@fw%~+1sv4HgG_?rYkAQ;ZOoEYB!-D=EAdFA7`(~lszbvRKUIR)OTmn zy(*Rc${>hiFOiNh)-mfG$&)_`4Ixz#C<(45EP;1V^OUF3;l@=Jbz$?o+)u=z_v$># ze{da<#aOSK%hjP<{^R9^ehq~L9=sC9q>Qa}cm8?7;56xqV9I_io9&aw08Y##P<;#6 z%z@0mK3F2gaaIh=$^@g%`3U|^FC+2IFsgD;s$9ra zlTD1@Hnx_DA;Nbz-AZ`s+p|VY0;`vXa3+Sx(#n+p;BR(SU%hvWS^rYV>bptX*;j0m z0f*Ji@10uLvJQ8|{${!4;-P>Y?IJ;W0_Q(zo;1jMam{)jcX|Tf9X^RM*x*yBe5`uB z`&#j?TzJyLz^x=~h?~cEmA}?UZLdh z)p|~{QRJsO+TXtt*$(;=m3cO8Ss&L|$hHcDCGzFGbTX;!37d*`9>Xd3swobh1qSB3 z=y@)ae0BzCsO+G3Ctved9PKki4L^`JlUMDGixzC0ijmh}o==KJZXzjMCDZkGZ~yhr z%~hQ0V!Ic?JErLco=5m171vhbe$*p@U*x#8Hv%kkZONR`D6s7RntWFjn`81BRzG3d z$ew#m|8_TMFy3vR{>@<$dyN}UXE4uh@eYeQ zv+Bv!57Sq#KEbkBkHti)UQ&wm9kEz5SB)ds0~w`M(7R%=VXLO;G54=>@|XsZ6BOOOI!D<_ zN0PX_8P6xJ$7Gyu52BPxSXWeaXq_evRNGP-GVO+@+xS7r59({N1oNi}p)`2{8dsZ{HL!>{_g}1VhU9dWu+hwda z#&N-?)Z3M;e7`u!k%|+X5*kK(9|)0~S792uz=+IjH6`c@mamA)|CM_hSiD9jW;XC0 zz{L6snr)$HzHnY7MV_62_QS+Hdxs) zRyVUn@6O5>?sg3du&xg4)vdOA@GBbYnaTFr+P`2jlH#~Vt`l4Q+XEX3k3O=m3LtRe)Kk1vx%+mahVH~d7?DOa}8ixSBWL!Rko>L z?VV&+l9VpU<~M&hWw-Zl3min`y}JcMj!Y>P93$;^Rqw+x4wJ6PO*!S;{D^@3Uj)4G h4w$h~HlI-ykxRZzOD<8p{Ql~dytIl`g~aEe{{vKj-<$vd literal 0 HcmV?d00001 diff --git a/website/docs/assets/unreal_openpype_tools_manage.png b/website/docs/assets/unreal_openpype_tools_manage.png new file mode 100644 index 0000000000000000000000000000000000000000..af7b182842f7046776a32dd3e5dc70e6101ed3b0 GIT binary patch literal 27475 zcmZs?Wk6d^8?{S|7l-2R#jUuzLvar-#oeXFr8op=Da9%7?hxGFEfCzDVkbQBcYYlC z0ZCwIX7=onYu)#ns4ptAXdj3^z`($u$;(Np!@#_=fxf>%LV*4buyuNZzPxi&mz992 z7$-S^et@$UR~CnXsg6f^GKGhJMs}9dbAy3F?|u7uH{kTw0tUu*UtUUF(;INoj+SCM z=dt0l?x!`kHt8RET9Vs}mBeBYHMjOTvcWtahrY?+o9rl`N@n_2?{M&tf{>nF0vt$J zex5HGwFgg*iit1?)RPE9%r=5U{~a#haa&jaq6{@O-qFlJ-ku)ovsSFVp1+q?JMZg8 z(4VVyJ2-b-_Zj7zmrjAR%-RW&$4tMCfY(;L4Y#u+B~?`n1*$|j8WlcwyDT{x>?j;m z`N=3LDwkj?WP!K8f1wel$BqIaN7L*}Q6=(;C&K-PUPk{(VB^+d12}Sw9VG>Zle!DT z2YkwhULcpLJBTU!_L?mhtnyag$NyJ(?mSNG$66#snXb|Bl@WoR#`BfNoy=?I?^vxw z?pECjR>rQNEz*6~<(Uy5=i=fTh|PRw&V{&}1g*A|bcD`EXb%UTe%kP@VqMcExBwyH zJsq&rPeo{@BuvbL2<Z{ErjX%Xqh-jccZ=C_V6`hXxDGuH`b}lYYVbX5hFOdKFF(_Z?O7%61*=zWC05wXn_V^5#ZCrYkvR)!uGmiYx?kCHfQfC4; zWru4qUTg`OS)wE9757p+EDuUmfiF*7mglifDVM8mmgl)byJ^O(kV%PyYr2tHtyhhV zf8%FmGH_T;8gCD7htC$S-RkJU_ZK5u0Avbtkd6G@_n}}~kGkjg^q2NH8BN#0-Z;O5 zs`TwjqmQoa1S#W5f+J|=- zsRetT0CeGJL?XSKxMjJjhh>I4F2J}J`CF~CNG+K@3VQ#R4a>iYAE;gjh_kC#mOV6( zLP*m92IGr2)Z&xvS(lN#7%%&(nIUoB!fylHlY~3?R5d!m*t=r>eaKLUkU3h;XWbR1 zH8d{EvBoTVCtn8!92_c}A;HV~*JqvK&80m%HmvJ^8g>kCUk8u~2M0drwGV`e=4lL1 z@2KnPFEBiNP=PgkE!8Sq$Ww>7B`2C+uV*YTtY4c)(AvHJ7%0BA_7V*3BXH1bWkF_O z)1F|uK4XYRYeA6h#MdoV^pw53pyIZs*AYlqL|~rKqoP*|zT=CYt%l}%>9Srz49rS7K?zLDS2g#+(=#yfRhXQ#pfXT_%KBqsNyra78 zwfnUo^Rn?pCiO3hYUkq>ernjpzY|9J|1}{tyE)bGf;XsD$^Y)5;__R5UsRgZWJpb& zWA(N42-yfKgU)1o zpM#C3(&^v&I^#{?fwD(%R?x{U_4f^WC1?ZZewbeh?H6~x=!|T7y%^}-U%nd9R>rOW z^Gdl9WMi@Gym}mt>3m#)5Z|pc0LcxIF9il8XldVOr1@>vOO)uHk>!|IkgDE4jMr-B zmvtS5*Z5btofWn<4HdRzt}|x4jn>+-=Xp}g^Jzs0vGI$)&(;OoT-JyrwmWqBI^nZ| zLle-@u)J*TC{+r0oad}RFi>zh{4p&|lNET|T;si23Mts|TbzIpkoXqg>WU;~<3xdM z2RLZ=A>ih;`1ShXxAhk5V!HOKzRsWd7vWW|A#|3s)$8(zi{7nnO)NoIohVRsUk!50 z$_en{;o&+T2f8M*^wgM74q)4<{wgkjmZ1BauaM(y>6UAAxqf`+BtXps#{9RId!tm< z)$P<_C#n3mP&dLvpp!Dj#Khd@r~Tg-oQDh9u%eSqpoG4ST+Yo{P`2RA%*@Dwh)lZh z-pYS7L7V>NTvYfJGTaWBD4sc(1upcjBxNr(-4y-ND=aLm>%Q@se*5;1sWUcy;M3|K z|90l$HtaU6;J}r>Y(J_UG-B(K<@%$fOz8a^eBeVoU1AruV-=Y9UV+Xwu5o74-(hcS zz4trx;FvgWf!4swMg+h$$7UrLs!1cK18Ms|M;)rcliP}6xOO!$uUVcF4e_)W?XnxW z^7UtM?08zp!IJU;KXkF(jw=Z46ymzmEGH!lx{o_!YMX)}Ib`FhCJAdF0ABT|j727N`<;4dQ)7F;Rt;sC;`4&qF z&{v7_oA0`9D6L)>u?MqQ&1NV`bFGH9%SCiWO8x0peB+1-o`@@kcuRA`f>y&iILm_ zcPE+m(<#@-a3Cs*r=%)T>I`~GAkfj%8S!$Issvy7{g%h!6@@$EIY#mx^Bm*Yxs^_( zbSn8l`&iOk@qsZ4R0{4X9Vrs^88JODCtuYhj%W1*xCJ!ZyOq;m55Tz0v0?(auq_n8 zCK5yF9NT@%$awJ*pzSTh!H*b(i+wz22wPyEUyBZ!;r7{C@HB_=gujt&Cu1?AI9Iwc zt&tG4b@q}39e>M_#Qk3IZ}a~*tbY1$Um4PCez5z9t{Iybr3^Vf&{v{~!*W(Uk&dBu zQ6Gz-vK&#Wm`_lOvU8gay1;)~GUX*tZ(F`F;xvt6kJve!fJo{FxkPm#WpR$| z{i{w2;ml;A0$6ZI7JgH)(~&$F@z7zwUjIy*e^(iF!UY{shJ7yBCRel&b6$I<_zho! z<~uDN4hP1xHFs%-KPv`SPEBq0Y=|RdM^v2^$Bvz^Rj*l>iGpM|7uv%QkmZ&9k-Hx6 z+*Oa~-;kW|JAXhjlC@gHKn#Gf<05x?9>M1T9c*NC+ETvttCKS(>&$v_5>Ll6ux@RC z93$cE^);|^z6bK44lrCSN1mk5a33Y>(59(8N%NH*Lf~x&Zo?oE<`1PuuRbJEUqVIF| z=lP}P5%%|GUwmNA>P44RTBV%J9(PdVQ{SHSaO0}hY0zdAr_0c*e&GEN`r)(}oX(;p zhO5V*_NT+WyT1;qK2{^RkP?(EhC9(q8=sx;*2BOU>yeI?7x4$#KNtp~IxpQABF3j3 zWRHHs9Z?u5oeya$Y>%B!`fduDKfGU>oo1&KUFO4ME!P-|QZyF+XwR1!8PL(ujb(%i z4u*)ON;WZSoTv!{kQxyOyT`e1B-Zv|#DLyIo_6Zvb8-c-bf4`2}62W2DI5k}}V%OfVVPJ`Zs? zJ>?8!eG17~TD~1yY&N&c*l4xh=Ma4ud~tQQGH9mQo{i#-9!}&d4o&5+3l>F2$`N!GJQTc{ zho(dN;=nVN_nU{62zxzJpNgZ0_)`1NPbRlGa`Hg6i{3^d3RNMqnZ1+F0Gdt%GWU~z zV|-t-veIxmy_7Z%>dnAWAXk_Lv~%me0PdPEQOlal$Wv%>iuc&}GPsOZcGbm8zgF!s z*EWS#FJ-2wOqPyDVh|F_AUtwt-~!6QHR(phiEXtKi<1J{C=fxGZ??bbtBg;tPTFrR zcMBnXvK2Orf74gni+;GH!fV>!BCABt6)=2TM&rsiso^+l?Yx#AO8U?HW!<*T!1eg^ zAMmN2S}ZKrgTOT6eEB5mu^Q$F(wICav9gp0-P7s^&#C19a{k!oM}6)g2qo*0v$-3Y zP9o?$(8W`}P(ssk*YEm-mI?m_o3xdz0gQ?jV8)rri{$cjiWC30O=w5DCmB{%xMl%= zH}H5Mu*%SP==7&&N>qR$Xq#%wEA6JUvq*@u;qbIr_BO?u_1^hmMThr!4f2d*NF(&6 z;}TqWnWqC#3+%eQOWAn62{NAVEJ_*hpE^6>6m8CXn#ADLCf#O=Pka2cq~_R`bEDRh zt#g~x`QSH#Vf;ZV0yQfDF)UDTDlJ3c< zaa?1td_E{jt3Og$nXpWuZFb-VYK)2WJy6XYblvyWDgbAfk*Cu)Z9$~%lN!E#z>H*~ZZmH02RDx%BB3dw%U6Pp{p3W6**V8dfulUr^X0O9-k~ASgnNK_T zut>nm{r!7Qba}?)f7mIyD7ow#jf}-c%Fi17L9*QRh!~|r{me~u`F))I zcDOw6#q8GeBGJ>4eD9|jl={ki(?1TS9@lGsDN^3N80~IwY5sC8*QOV%Zn8u&O|iQ# z*4flGP+3;hL;bg9A;bCC!MuQGa5_Izs7PnOgpvLa%@uk&n=~w+^`QwU`R>5%2#^88 z;I>J%!?^TLe*m_PuV|hr6lxpcIn{?!Y!ifY&{&VF;n>`R{SQ#4`uoHOy?Z7uzTID> zQ{GS<$fK!{zq$k6>6;oMRmg9_oWsy!Tvn|QuLN;uNsvUs7gHQcs*gENdl{jaJ_*cH zM0VHu`@u3hbi4B$ppL~oTv8%-2|`$(?Gl}y#*Q|~?)O@ipUl|h|1q6uCjkKWOgu}{ zHJrb>s;EUe6^GL0!o|p8>-2Lj;HVc}yv8+p?%f1|PUal(7P4T4Qad&~jjFM$qm)Z4 zD#$3zWslNhhpAnS_U)O+eLGb00#EhVs-biCKS0{oLcrHumCu}kK10lB=O0i~ z>~a|{TU;e?ylvFWj$!7ba)(ia2S}O3+e{?6fBv>5eV7QevMl9mQgnGfum%uCMrL1N65 z(9({aSh!|87b3p(@VAuIsKC@{l>opl;633I!zX}KgIm2fnL*)Y|MNv?K(Zm_p1(Zx z+lInwl}kgJQ7h{ZKU8xSd~@5(LYt}G&BcAE5Jtij!N4s_j?&*%GL?$^67r#f9F)z2i&Ct9&d&QOoW(qH87|w6 z=owp=)h|D|gyY|q4ZydM&D>O&%lZKl?u8sPSsc*sEoBW1I(qz>s1)TE%&$UI=$wTr zR!ONa!6zxYsauH3)VvQ8Aj4~v9tYJ*YHRKY4`mRliXSGgIcCsl^rFuD9-=qZ*2ckK zUkC&urdO$WkU9}o62v#t za~6uiksW0|+ij&x;e4ka7zqbWNh1auGl*O3LT7g-lI}s!YxqH2qnxvYaY29YyL8=e zZKQ(pBeA5c$KBwk=nr1+qQ6P^I3|fmDK7!sRrBr7eB^1AM9#0c&9o2&rqgh7S{1x! zX#5uE)8dXsX`rJ=<$dJ2Qa_!>{YP|rQ?z;}ZWRz=Uu^vy>MyTm!7sF|=UAszyyYzn zc${JG4u;JndIObO7~t4jJRQ1x4t@@-^*VQIDKtBiPh;Wuta^42zf7W;3dub$Dc|CL+WM$?a$S@{_7=8yM$+H4O542+@^ z3J7fWEIXO`)7Ela#`3M>kF5i*5tXrUA|C>KC*wgmOCyevfz@mV#W2ngyVqc*SRS?2`V{%zbItrI-X!6##fP5*(cA=ZE4 zE&v|pa0cbj(}6Rl$~@wJX;F=S|KWYTV*x6J-g(Gx!3JQ6-(5rLWh0Nh(07sU8wx6o0C~{@E%NX0HrCnDC~$&6Uh|8x|bJ)c+euhj?xzM~+VRGzmPMRW$2B`FTe=r(c2@_v+5VFMKEC3gXbwe>(!6xw zL?S;UR4^HP`S^CEEY)Jflf*~jyLMD$HlE#@MsD|djf7P2We&prvf`ar zr*f%#M~oQMkCERMS!g64`uBrB(L@A~e5w4pGiKGdTLSwEV@dpo=I!?gE#8B26qoEO zXzQ^e9_4j{ax%J79)rz&Z(R&4fj86Y8PEFU(2;N0c4Ff5l$fX^d|4&ENKW80sVY*_ zIqI~cCBpYZ=Z~?cJ*g<&1lz*8yZn;6o#7V}qi;fOEQx&BN~vbO&hA=MOceH7 zW$*MxqR{^QM4y#CHf)u3B=tA1H-Oj6gwGAvK}F_uCA%T4NY#dI{EmTu!dM$k9Z9+- zW!AhH2+{p3F=X*@{u@H=mFB9Iyy2{t(fZinTDd)pINWaRZ&tQ}Ti}1uO;sd#J_`$} z^;2B(khI$X>;`U|IDBMX1vE_9GvTLq@us3QsHU9=9>VwcE@MYQzEKZ2I*;~Z>XZ@~9-~?_* znBX%BjT&_#ye#Wza2ibYbw&L2=@uzb7Zn?yC>MWyul;oOt4t zM3{hGUSIm|jLEMKyTB;REHJq0*kV3fSKGngLopxLg#2~3fiH-iQz||d67omjb-BCwa$1qC z;@uFq1A@Lz=`dD4uQWCpzY(u?o72(lWgHsuNTJ)h z*0_LOi&A+;4Yl2T*((40ouO?R<*ai$GMCTd;7k%88W`#Wgmym z_67VYE0?l7l=89oApoaopZ_?TPmHoZgh|<-n}|bYe0OTF8L`iZS;9nGXon~|o6TXm z3x|xT9RImVHxiXPl-Iu1XGY5^XDCyC>DyqtN)M+hC3GLx=ItpeUnOj2&_K2Aa5rE= zWb>462Ty*9G1N}cQINK8D!cRkSnPXqsB!pZlAMfJlv&Z331|5{RDgRP8y5ERV(1MX zOAs^ngmf`Z!(dGfuIxMvl{d8sBeTbfw9#^isO7}xI;!nV)vLtkfIzARzp?^t{iM)2 zlErhAd|HogfgP2a#GjK9?a=tVnTI)n9uoG))@Z@zNu(l$kE%EYn^ti5&9ec#O%JmG zr#XsolEPqHNs)Ox?TFjSC@sp1UtyFe5*R)6`9fy30ySxGLYA~U5w}ou4pi_24T@+0 zHI9!0J|Eo}+yw?sdZE6DOBCP<=ET!PIA;uxFh$mj6#9pmW~E&BfUi(70n|1>xV?V> zrUS{}OSIPW#kda}w_a>J8nAjB9{ih`VAjxZ!F|iotbf?NaIGb=%hqWbJq<9JU?MIo z8YyJi19D9)>&}-O83+h{BBu&X82c^70vcc~?bkTQMEG8U)x&MN{eZwY!C2XjI!q-y zw4E`D>AF#Ul-trv;>%%U_)*hom<&T5!*5pmy(8hq{~s>jT;Pr%j#s-~WRvO-mwBQn zlsP%mIWmF9l20dhCtNV6N>j^OTvo@-g3>&Bd3vN%dBDR$RI2)G@+15s={vM;#>M8p z0(nkOIrF`iAgKW0qJ510Y9*wWw%){Lb?A-OqjdcDzmWQ z(@DCSom1lGgZmUh*2Y6Exbav5%P6-g7V+pE!!s2b=J|BC12G!*!f}(U0KM-Lxh#UF z0PaD>zDvGSl|J(gV}ep0?=AjE$D2dHnIx;Mw}pBL)U=5v22;;-1Nztox%4U5)Qsej zM8ZOe`N83hp6McO)<_~ra*Lo?g~ju*LCkY2(&R?-={Iz*`Cc>|b%Y^oZW_c~AX} z2aMXOlR2&>4SCWVIaZo(Z3A!W9EgERW_b1u2rQJa7>x4d2E(*Jm(pBE)V`@>t@!?+ z?`pH+n*wBNh?*p@YINWfL}Alh`)X6;R(z)iyij(wRz@qF*@kED;7k+jskOWYZZ_4LbX4$nUE zR_jt=ba%;nQ$!a*E|WE_0vlzTP?XAw&N5+zO4+6;(8>oYp-&m_M~*f%d-`{E^67>m zZ5Ea1jlE3H7JmNQHlygXsgv-UCS@sVjTKwnl3q}9Xv-bjwd_@Gjb!t1kCQOvCc;Vh_aUwtN1S}ss(2`F{3PQBskL%a^0lvj{@Dp>vM7u|!;ePjmi zb>HH$Ha_-247JwV(oWFe{`IhPFEo6w3NU6}F)Zyd#(`QRe;$$Zs~ z{6_H8ue2XaLjG+~9I;76N>A9A2<4JcS&t;r#ULv(3+C2vr8mKJ^=?ncj+j|D{z0P9 zC%nKkFT2vf?&bv&1t_iR}~Lk%Le zCe`KbKU=R_Ex++B&6Et7qEIQb$c&ib*fE3ltz1s8J3c)!!mBsD_Mb$?IAieJYXFkU zA|(nvT>CSuc(xZ>x1Gi0eHl!SKW4k);b91u?YX#5bb@` zFJLiK#CeINxas)o8>O1)$Na+#U5-I9p?cPzJtO@g{ir-8V;}q`JtoD*s?+2?li-B_ z2>FA!&{K*q&}}1kJkH;cukn;^Zw-?^yp27Bj|U0;Gq1#Sn`RvObU22c)ccHjS`0cT z8su8UUPN)lL_|uTcie!_=)9<_*S(U34)-%G3b$2#i;I{4L^Mm486{Q12Q8@+y(Qn` z@^%+<)y0{@YkGjlonpk{F%gDU0NIA8QIb2DfRb<8znGKIU$F@v@OiHVbJ@tv^2YnQ zikdeZKQ_rp#-%BEoJx1-qDDBbCEQCns`fn`i77Vh_H6_8kMFD7?_#;M6=~vNe0r}6 z>^EEj%hPJEraQk_**2S6Eu7ZWk_AF~)+`r}aUj*y-dhGd@yRaUYa{SS#j^Mxp(Oos z|4F3%>StSaI)P-K^o6Mueu@rfVN@jC$j=ZKh-xWoLxDbqoblw0@t0=#6!=;3;VOSz zdG0|TIlNx)u?rIn-~vC&Yyov7bG|5N^li=65>XUnD@X8@;jsY#(9u{`XU%#{t&zpy6ic1iv`R8HIK8)YXvJ%d0}e0ca3< zPG*!Qwyqrq(Y2o)H;uknJE{4<3?`)I28#92rjJrOU$z^Y z0{GL{+*Mehjseg`-|F|ud1(GBR$3o zMS3xA90rh5B2x?~dd&=oq3ElFw?ksS=uk9dVbn`^7?a@II@IUjp8IF*1iD}8uKm5n zZ?pNM)h35TK&WXikuMzG0S$Yh{mwZ;Qz{D;6~N?%vvVV&5i+-f;z)t_Y*cB}Uqg6W zvO-Prr>z(Fx`FYv&*tWDxOyn<3k_A>72S z-K7?+#~^*rM}^r9`n1>^r^`iv$|&y!`ynGqixm79ch7s7(-7^Sgc<2iUJ06$@A4;8 zg>kl@+pVlKHW7*6EDPS|TazR8b%tFYTQIz*F@NDp1t|OysWK0+EgcCycSq|Es3J7r zG}^gcV8AYNRO(lUf12eLvN&Y!(@+S{HMfY zejE|JcpmzB2Vb|BUw1D8nzrxbxQDv;aw35W+)`Txt*xQV>_lYjz|o~ST+hK^&yij? z^o-?l5zo`?iiM3QK~W?4x0tFxH)ku<6fq3+>L_h9-*(q)wfcSkk&RFWHIN3?e*6ck zZHN+lsCIPp`d^fUO0Mg3;6>I&iyyi=XqrD#H^4@N)CbdZXDc9r5(x^_v<{_jW0W!C zYdxn^5nu&n9t}GbMeo!wW1*#EZiR-wKjj4%x6PK3&T)`%#C!ZBvJa*kI)6@V$2!9F zi9ujGe;Tpr6b(u;?K++MXzWrZfXiHi$&AFi1MQ*sa?UE^Yd_AbUH%Y8%4HWi^X*^Fx zul^l{ZhnGo+Aq}|L^mD+;dAnuzHg-*?D*I|+7W}xbOjL1rjbA#1W3!DCkqxfjCMd} zndtK9;*!q38fta@Gz!JC{GV%xMBNe^!GC8-7P{?3lt`YIWxxks67qLJ)~2k(jc)PbG9SBP%BQE z^{==45P5@WTuP+*P?AP(MXA$V;^$l&{S@LgS^%ni5?&gGrP-i(+0MS?v@#w(vLrd- zP!_K|aBTp<1uKI@lhrN0kuLGEgg&mQ<2~m5+f>?)9V+IouY3P_5BsM(9=em}=7v6c zM|ie$m7-!`4N2ROLTt|65BgPO5~59EB!Z+05IRS)d6~~f!xn1;4oIYIAERCRHwBev zJNEzK=l$|~QJ?CHrYXa}xxIv!*%9#+T6po}U-Y7!U3J<>LXp!1=}3fZbG28nCtSnj zD7A?P5G%4cEmP#bI5=46Q!*@66Jtv2!nzhFZAC*_+La=3e)zbI24VJ(PE7`VNZIqf zTIJ<_+9?>U?xMdxlHC6boo#6gJ()f9gB~Jba${*)0FGBe{fdtsqA{_LAtoq%9IY** zP8eUQl+z^XZI%yryInjB<(ZP^q60n~FQgF}9&L$Bg1kHEb_*Q!bPYeDP}xb40VM}x z>LO%M&ckk~uQ~L&gsH3+C>^jw)?Mu+(K#f~#rJ1d+}~v7O^-iszMxzZ<4E#0rH5AU z-*Jxv)`RV5^6#Pozh` z$~a3fGZ6`=XBROk-FC+)BB>*(Y^#oA8^n{D8Kw5ucyR&f5Qgj;x8V2}TWjM)n(FGM zm`_bZ3Q8sGpLGu7+fhSJt4?o?htXc7DX6n1t2*4d>bUu)!SzfdrL>$4N`FQ^YF`;2 zPaOk#sXJX3+%-cV+8fh;oe6%ii&$Crx)UxiY;G62or0nZu+8lF`7Q-xi;lCbzQFNL*s&Q!O%1Vi()239NMv>u;ZW2e3lsTjA-Pam^*7lF8zanI5egQc-vo zTB_Uz=4;NX2kgqX#C&$#8q+X>uIY+Yk6A45;sNRnv-9eOBc~WCmrJIP-8Ij!dAIcB z4+WP|%ePwwDRUFl^yu6I@_z|&U4DpW`(GxjAV-A}ZfHhMyU?j29rso3n zI7!Y2Z(gK~8XW6)xTAB2Ej!$EYT>gPs4mA+xhb+E&r~Z{tbaor-(*Px{jj-rMg9et-}vRTAJVMSMPZ@Obf+c^5Z%@j;_Er{hN z@{z;3a`}rZq3TZ>=|MtCmv>kabj%Vze7i$Wn`#icuN?o;xgh=AQaW`>mN(ELU2l|* z3LUCDUN-VP)LnD!Zcx8HEzSw+t-c~+Wt?~K{ghJ8jLGvgK$IV^;tlkr2UG<$)h}iZ zGwcTFPe^_Ccz^VhVWiSv%Swa>P)abK=QZKvqDMW<2q{-9w4zXHJ`ZVx+cS7kl~G>S z>7XsSR;!XuqL^uoe;F41EuYLjM2Sx5L3pV}Xy$k_D-0B<5dhUH(GVR^IkE_@cNQB~ zwWkb_6Z#CJ306LMH<#40ONKH41JJNsJ8Du|^^JQzmUCG!mx%f?gRt`rN9gRpYvoSL z+Z1m0ipEBCb9(Il@2+R~e?F~Lq*xo=rpflVFL8`c zjLjt>Jx=5LPF`IR6@Yuz%<+lIS>jL9BK!{?NLmm8PlK}Bk2u6@FC~lq@(+vLc@4i8 z3`Ku9bk?xA)KGb2B{U-yFZfk8s!7PJI^L zY$VEJND}xxSz=vCPd|gSW7M_-9tp{=7vLxykLj=_h1x2Q7pghQWXT33?_RAKS}Tad z2|a~8RhM*D;vysNQiLJ7uI49?2c(a~Mvr#C&L>G-w&Fb&>Vo9xqpGdyU!|H$4Mj+P z&9Y)k>~gI%2nyj`93PkpJ)bv#hEPahFlm}!rFwSI9Tp=Bb31hdi#n&9A;3R`RsQ3e z4D!siF^J!lu93{{QmJ+5i%miC!`qD19Juw{CDe`SG`wqyCv7rp_Q5>J?g?CU$q%9; zH}P1^D9xj+7r=KDci|bAVOyN7QefvYi;T5Lwzh2hMLvyj{yihhUgwP8P1q8lO~h=` z8b(R0oreypKol>=m6=qX5qJ?W=!@+iYd#%SK_N3C(|=oB=JCF0E|~KsFoXk&g-ZUd zeK`kHtWx2^%uufm8HorXMig`#c)x3$Ex}j1#b6LRs$6B|5j=$ySZ0Ad)e^1GoQNU; zwU);u@Jbm<;?QXRE~W8V%qMqoxG6vjFry`6?l;LGqfG6qiihTg-hcb0+uw_?$qUS&y^fd~;F zgxmQG%g49mOWUc{Bg*+hOudDSzub9Sn2 zzg=ExgA6O4&t97|o1HH+-CxgBZXkagd{iIUhH;#XHzxy6HZw`bAz%rgNn(h&lMg-F zwUXX8c?a1Af#sFChr!k)D=ey0H*j^rOWSK$%>Z5q&}L$MJOhW!()uI9K_{b| zVwZp_YyD$Z;~%#2_pTO(G*W28 z^6Uwp342$Tk_p=#pZ%}wzGSQG5F-n96H0qm7)~#QQFiu()xv zSBRY|3%O{3c{!~Lo?%mR(b8|Dai6{Bo3Ea8Aw1$e3Fj6u%n5fnR}`8!MIXWZ6E0R= zOk~%7aoks#Hf$LE3+ruvqNMTGwsv?h^Vv#Bo5obWl7h zC3RoczduO*dT{-AOn`Af1^VcC#(`d0Oc=h~N>GDb;WgB&nzL1w1nLAUXJy1=loPqj z^O&kGjm9>T7!6NUZ1>Z7n`r1Eq$#TGOxzH9&+UEpeQ5Qx2xNv=8?#kIi!10Mbl=4 z`{Wl)5=l%Qg9=7<~=3do7av>UVTh)D%Da-z8F*nwk zU(NnA@4>&N0I`%nd_^h59Ejxo+nh))aFqS6?xWi!EaLscc_hX4E*z6gd=y6_7dFER zH4L)N%zPrGG^7E+U6fACQN~^Z8gysUH{FV|3p*9^WpMw`Tu1^HDzfOpkxNqdH#)~; zuX9Rq6T@BBEd(SSRlFq9HUIa`t`|;IS!|qsV+GdOR$%~Yce3odd@eEO>O4^kax(4! z3M=1xGaA<;_-3}AMJBOuXsQJi1(Wr!nKRoMxv65+M$?|-^H*d{{%E$=I8jBcpP(2r z?^KzD(xJ3eJcXB3r)6Y=K`Cmpqx1(^#Z$cVB+K3g2QPU}cjLKZ$7w^2tKwp z3tX+;p6rpT{Fi(9l4T`nr{|VYl@hRWiKG+%ppKNiI&F5hYb%9IqiJG{|CpkusM**m zB>@kyI0iz3hy#_?t6aI9KA=8GT_mM$mh|kP{xzvd&QOveXVyC%K)JB3%cof9&ls7#@eME&JU#f>(~Mrks)T?r zLrQhOWd*EU8yb>z_XOv1K@3+%WHW$n=s4v$tsf*oloDzggE5T@bR9k@R2&8_|CDR{ z{ceVT{TRD$V!XWP6yueCm8WxlFZ| zywvp4Xx?d4S$c)MsxvRYvEs(-5roaSrq!^d4w4}Z!FoNuee+NS<+a`Q&5U4d+}obO zU_AFiBzqg43o>~PcFDZkKx`wV>n|@+Zzm3B+#DJ30Y)M>uxm9zSE&&4r$r2)Vxy48 zY0%Drjbdiq|9y&Jd{-HgqM^!So#~^^Z~0to>1tNv?(6E@_Z_pAjijgYDG_DY@$anr zk{sdu=daXgHE8gu5NSWlh{ILV$OWZet$&d$F-B}bz5V7g@p4|BTx#f~wu*N<;uu3B ziT75=tJU@HyL*Rhb}?d=oqtEyjD(M&60KI}ElG4NW7=~3BF6BieEp98mW+8tg#AO} zaoeu!@a^**YWvlI=U>1|@Fp8ut)Eln$z4jb(LyTJLpElz0)a?E(sG~UWJ}o-b;&kC zTkzs?*i;Rp?;E>8c^lri;=F;Vr1A#~kSVVmtO*}&%5~;Ex!DX|?)Xu=k4LB6E+bTU z`raUU<~3_0?s?0QDmj!8dhBZqd6W#ThrbM(hw{7k5AC!p}SIY_DuW9{{qWe0F%_iEUOOEraUbz${H zcEsGZs_~2frE*kMS47ZWl|~yu4*mK)2=rl%YAys-al;TgG6c)C)1bE7p zR_*{;V5rzUXPKv~(9z3g0s|hYJt5sGGKL9tdVaouHKO0n-{K;Fn>3s#u^90Uz%A+$ zR~6(qL*0s%kr$nf3ZUbuSjTLi#ZvqG;qv;vaz~U~XVXqWGft7n~A! zcw5rjR$zwZUQkmo<+`g$I0%@@;K~SBvNA@PcdolZyRb#3leSx{*pY&UN z_yH^yLg@_F7`>G5^NnI4U2Sa#^utD#Rljq7fTpC*sg)Sshr*I_dMf3(ntB8?nP?-^ zX{A^*^&cZJ{SsQ~723y?TKTc4G>H0DvD&CWY5>q^>$&F8t32%SGAaLk6=-q@O$NT1 zmQWsxMoty&(zmt(EMKpsA=4L4=QYq&<#ALjwLuRMrD4df!nw)9uK7MB&v__OeAU;U z^d?LvEgtQ$^kl#-*24@nm3=Ix0+h6i^c6;*Vn|Idljs7Sbuoj{bXsB1luefulne~c z4!OzelXt3)Od60TYWlpnQ?T+ z02x*z0MGm(>6E+w=e-|i49y}Li5x%X)N~$~I@~TgL8C7n&n+A)jU<7I=c&%CzIID$ z9!=T&wa;x$pX67L|FR_7>@N+KPla==lSGi3HeLVgd<}}p&^nN466&IETKjkSmDn2; zLd(^@J>X8~k9Fs+NOr=b`>1}Iy}uNGn1!62M=G@<-1$Vf01Wy6ocs&gZDnO9wze8+ z-V0P*;uAKSFVj0BCr6G(kE6-%G75{6FP)a|DnXN5HbtIOv#y8d|61HBcDE0=H%!da zn|mxmSwsSNCetq26)JL`u2(O~c}`d<;&6i`(>quyz&m0W9*QE2qkjmN64o&lkr_Td3cg2T zr*ARIL(}6Pso7E^*0dKvWMpLexPaT%)AT(uXsTB9sFNE8-$;8AoejyNJq&-x<% ze^qtXZ&CG8*H@8H8l;DzK|;E_OHx9m8;PMirF(`h>5xW-7#dVMq!EUpyJ6_&!TWjM zAKri9T<1FHyU(@vUhA{Y-+n(Djdw@@uE-g%0DaA3o5kFwqLZD_(6!-})WR;>2HKLwE`yxsBxs)~(XgDAa)EMT~u zvKmna?c;fZSUzYh_-jag3Bms!855!*9CXkh5diKZZ1!5f-aXVL>u^M*qETK-})s4{xTy0tnP zgw$*^%01?N4C9yj>oT^vdpnkL!mgDWow*~@O-S$4%{{@YEvq7~3=UPVMiqP!k+WmV5rN}YBcU$d7qyvr{9r+>Z%M=p^;?WZW? zgYdjHPfQZI;kZg7;hE$UE7-(Q$%Tl1*nWYwWGi2kvhg1K8fR z+qZve4-)Q1r1nUSkTQ1T8$oYxl<9=3^1G<+tDb3ti~TF_Fc28#3sgkefTH8Q&bC?| zfT)SZ{~-;dPjJDLLCcAWM_>qw=2JYh-?7!|nVOLSuicpUOv%`R*E$&L_t5aa5poF| zfdDCMd2x88+fTn}`1aXm9skI$d7^0=#+4|k%}V(&{^DCk4RCejsjP!?a10^lTOxi= z^~(;@G+=#m%P1<0PvX=!|BGqc8zj0u*$DTZZT%FZkgahVAAE*?%KqTCPiqk%K=#ph zQ?s9NQ=2A|9ixG!zeOjNH-q})uykX9bT*cz{NjPi+?T+g2N2S}7jpPewAD+Dfmg0q z9!_jZ)Jx=5mge_l*0yWEzx_9&Jpa$7IGoEU*Mx_r9oDHSGhKFv))`J~jRvg79WUQp zqQW9BnG=4R9TY^5q7-Qjq^YpxS(C^OEQhyyjQ3;$f5cTkl1t8}^rm6|e=q~Crjb)Y z6wwy(Lx?q7=*Qbj zr_s>0`lKHDO{-un&&{{enXz8#i*&YFG|Dios$eG${ShhUXb=(2`nI`@Pkm2SZ27mg z2$cT>`><;bzrI|>DI*>%8*mjT(`y$We9v3|U@~XAs2yz}3l?~|z8oiBph)@S+ve=& zzfp0$V`(tG`q$k7;anI+x>5|anN-B#k zh^a0(N;UXn2-W$IaL4=k6f-=A*Jv7pX!8kiH=iUSp%L;syPu<0-oU@A^vZDWKkqgs zXR;(jozyL!+R8Qa&Sigl=tjs)nz~IV{o>-+TPL+bEXp|s;$V^lvU@!e4$rmK96gn4 zW!q1@N-_mrGkAhDcx|?9-Cc<)6?pG@xft&A-=(ajy(l#$A+R2MJ83NqY&a6NPf`Xj zWf@mf-V*I84m}p{>x5wb5Vs9&Jz`${H_Y$E$ANlm-DxWd2Qpw`S%r`U-{TUpBg|tpSoFXaAsL|Ich>!p6TGRLYF4n&0i-``oOp9wDD=zWvteN zMs7n-(6@x%Ajt)Y_2WS%EFgnw5_RJ7EJfcgEJ>2WT&sEiJFD{b{35q+*M_Ap$ag(> zZz!(LcSwPH=g$wbz(=tSy35hj+@_TR0)U`{R^Dh$RkFY5lD7D%Q!+f260)E*m=a5p zzU1rMeBFTp%|?`Kc)hz`xezycvf(KFrI5Nk;fHS>3&@|+sW^wg3i;v?FY1?OVOpe) zo=YvpMvk%F`jBQ6qt4LX=pHA#y!%9?UuVO?tJBJu zRWnrk&o)N#@l(r#h@2O00C*B5${gy*0EM=q*hw&aq2hD04m}tV%Rx+PDxz+E2bSv1 zrGEX7H!-BLu-hXbUC5IgtY;&weCFN4FS(6gV%9J=xlNhTyTj|++wh7ZH88We-ub}> zbmnvxw=VFhSv)v7VNSTPHtZ~$ZwNGp``g6)u??Wz62kv>?&x1LY~<}#YI>Uj5-o1c zefz`}q-7_rH7_*;nwWjBN;nL(o~w;h97!o+rX27B2g)HIR4I5Ve;Rw`}b)SsL3>{&Qyy&S=cb# z|Jqf3|J4H8xA1j4(+C~UC7zA(+8HjX0W%*xN$2V!-)j5r=f_ zB-ZwvplmGaA2Pcx1jk*?Ka9y_v$~${p7ta}JdBw?Itv;#JNd%daUUM|J4=8Z zkWuxtQ+Cpxl&#A{vbd7#eX8N)I%6i?yLBz|MI`r`F{X&8eH;{#|L|`z5N@t-KA1;# zbZu>3bZspbFUyJ z*lAh`8_%^nElNV57b1f19};sm?))*qHB>eof05_UnT&Ry3c#8d^5Z*_Gpz0RZJ>;( z;Hhl$o+5Gu<boTC6IPN+3-NSYaekvGNbH358F(4jg{)?6U9w=CtzhciqC&rU zjnB2yT+$~)gLl9q+&Elbbg?s1Q6Cez5FnV-b_6>~Xno&YKC(B5X(zJ^dhRvZ4baI? zbcy2^Y-3}leLJTjgrzlhC{I>q5z8&vr^m?xVv$~VYLg)*|4MV5TtzWB5qI&a);iy! zfif*SgI>+RzN5%b@iE{CE}~T}JVDT;%42C2VASD;hsU)x-np94yH>40s4o8QGL!U* zwO2$`zRgdNvCTiQ+5O92PVEn12WwZ*?-A}s^2*FHy-MsG-M~WCvTACdW zT}ai&>LV0C#?5oj_}U+WxD9xk&@BTH)r}6#WHfKQ$I?1p)xL&!V4b=a%YA|QIemop znzpg2q`|p5{I{V@gIK0fe~p>iV;aS@fPwUCz);)y!_4K2kgf}7d$_3DI{w8grxkZL z`GDD4sfPQjgw?GmbL=GOHLn*(d37W%Qvr6mB>rg)J<-0|H`y-55^+K1{z-C8frQJR znUpqcv}hl}Zoa7?>$nJP{7Ll-u}U|;$7UsoNMYwTWHf30jMQn^KhfUNdF!cgMxBwi z!{$kSH&^@a4G=dG+|*Hi$oW5S;RcUaa^}(sLh~EsYsBHBYPJR~ zJqlFJdug%h?PrjG>Ha(42#YHpulo7UOB48^_Y@N8b@X@2q2lzUXBZme+t42NuA|H! z>l-`h_M4lbkmKV4(sfW;XM%lms#-;553mBlcuofR^Fy0d7O_udRH5l9<}q=lAW|Sj#)z(rSI7ju2~x$ zQtS~bSu==hd@j45zr^}@k{0;)i9j>3@(lj@e$Hx;q`K494f#a54b&uBdIVksUX5^z ziD`}aU$)-=d}t_{vh;X2Ww)AQmit9cmH|*it!B>+6VQieqaU9-(MyQo(1O;nt_vuj zmOm$iEaG`0Bd*??oiOUgy<_K*w{QJOmHNh$el_J^|7?ri29I{K_y3H=JR@((Nw-)} z%7Yao8dkR(~tH1CYKaMEm8aAKGtdlsw!d^!J{_-Wla{BvFHYiQ5{o7u;2U5x(* z(K-CP*W_!ob-5L?4qLMhu1&2e&UE(UekPB1DX)|N6ep)}j;#m$HssrE;{D7MoyAh9 z%R3Q1D8u@l6nK3y4!p<}9HH*L8^>~dO6p?sCPFCV4U>Ff@R}N~taTBoJjqfmT`%RW z?LkTH+e_zm8DfE#o4eIq$lvzQ?s6*@yCkE%_8Azn5Q-`zen%q<)pV9I+w46p)^F> z^GSBrZ5t>jM%MU-a+e2{=6TQsoh9(4bGx-0FqtzYGCg9&!nf z7EY@*u{vgLLAKUEes&V6{h`WTDHID$P&rkv9%KxQ=yoYosKvC}nf1-SN!a?rVlvYU zt|U)?t!PP-2$A&Ah!l2#rqHT?s?(q2wMO0^7iB6AqoTnx*-0Qj$zjo(kiL+Vr_y7Kru&?@IzhUp6qOJB?JEAcx8pUhM!;yjNy_ErSH;-*@C(P7_ z+F^LX#~F$@xD`z0{VV_!dF2wyDO&%qDg;PLKcM+`QT1Ligo{+pa=hVx?L%`sEKB@y>Q6-5sJjV6--)x!}@}tg4-hPkjJ_A5%5xSOegN&3B*E5Z%Qu zkrx1dnvvnWal%@Gl~y&_?}mTzVMtggoDXCAj4X>R-&m;pc$2f%gn$oI{;-2eOfVbs17YOA)7wY*}y89-< z2oaTlYF;Cq394jLQ?OG~S(&!J*Pc@lUK&L(HZ;X|9$!&m9dnm4QMWU)Oc6!;d)iTG zKAknzT%V;F_8>={N&V-430g_dz_QDgXvWfkE8Wr&G)#KWhYHL{yg5tvP5@{@8ahc4 zh<-kU4JlD292BuOE4}Go`S_l^1sou^wcjTFJA}~v8dKC?YyYOUa&#QltdBz}=qv9@ zSebrTs1djYtd<0rN@kIwn*1(evy7bad>x&>_UnGSDS4sU{R12(x4!Vf@zz7X=~Qa? zERMQ44wA3uOvQ1KWU?~$!TUnJL$|}@c;_j)(U=31qm!+b)>Ahj?;{B77Hlf`qkQ@_ zOi}bo&=-mXXRi(D6F8|EUreQ&Mt3E>pGvNwY%hBfZkZTcxI5n}#*z9X9>V!;holg> z^lDz`Q`4~GIlH;%H?7wGR1NY=Rt#tIx2WFz(meJQv2>yAmYYDjXIq5=?S+M!+6)Z` zo=aa6QQ^mdT5qpMopLiFpg!`nF-7j{jW zqMWaOYfB--2@b}i9`2<5_h!T$XOhhfmNPHQC|GwdOyV8(f$f4hYzi~G<)`I_Tp?9f z+2&xEej)xfkvZNw<9bfax4)l|m5H!+@`!o&l=1U#m_c)V;GEebzj5+>0Uhpe5BJL33YyHdn2piJQ`^p1 ztwu36;gWUnapy=~kO%qP)*IQG8wmWw`p3;Dss|FDv5!!`agNCHX;!wpKab1Jre6xd zI*;?6L1t4*_IC9>zG7B;y;EX-q`#A*0=Wt$F0^$?e0ur_?w_)#Fo{z94e~X%JV|G% zR27;BDN_*8RbTu91`Be;5~a2s%GxzWq<8!EOr#qY#A4U-_6scBF z<0AqE-&x6+3l>|(f>stTJsEqMdThQuqz3DY)3_s3B8@!ude6P4JK^Q7Sdu;@tDnX(>d=Yl7l1UnSvm9V}b^ zam}jKF0r@4W8_x1QA}p*($~6q-wm?_*_5%d0a+r9eDc{Mv&#Cyv3y$`UX;6HQ`krB zmQLaJ%P}koqCB5yzBpZo?MC>!NJYrQc*Ol+?1%M5L7?PrmuWjHDhxOC$4_6aQZy69 z2B93#{yW+9oZOU`7TU?PK!nR5;&60wf5B&BVvVw`6sRrlmyf(F-a58?!39lU%cgj6 z7D(&suGJh$+tc-2Y`A`R_6h9}z1$|e^&LMK+|e(UIG=eq1DNXt)EOKUUMrYqVU?o) z_Q0^6$-gJn-?)HJt!fy1n0q|Cl?JWhw|qS~;h5d~ge2*|owxE>m)zN2)K4;?7we&9vl(!liKB9UbGfgCC zHCd(&8VX;F7{EVp^7qiZEu3&++!n7rYR(I7nu|0xwY@C@4gr(k&f0PgmX4L*u)l#) zKFS>M2fsY8`wob(sM69M>4@`xBRCvDiN_rmhBrYOV3b*m)q5`Zv8%dOn~wbU+g4=& z#$5W}o#dz}qbCr;P{{qhjL{CvbyNbqo(a-t#JwpA)bx$P&_<49O$|GPyWR%Z!Zp~XPoM2 z(a+^Yf`!q$iu5*8Tx)k!y}^FWR|VH7%YW)`2HP51KpNcEETmODL;DPoNXRvU7c)=yd z0S)I{*h|sy1M`T{tb5AP&_s95au30DKkNd%yr}9@0T&BQbKe@W)BJbC^@Q0je;}$D zTA&V1$Qr%R$GX3`u@}am=us_7pLUk5S=YDa(x9{aX~UEPV%Tntk2#jb_@N!6j9D z4WCUNi>Z1o(>2<*+ax+X4$4FtP8(@5uXphi&E*u>30h$!Oly9l^-t0CbWMTBN7I<8 z#S*ydJE61qXi=c8b^;{XGZ4}QgVXGo9J2z+DO>_s>A=zP8Ilqvk^Ta^HUX2K>!sEM zwQOoq`cN2=KBsJi(m7)VcFrGs3cE3#)9$YG?!McZ){gH>F&vLOI~s_4Nq~6 z*U?*qzl3@gX*Htqn$eb1MX)ea8RhG?_h;Vy4AP>V;J4$b0$>F2EWqsQ9 znhM6k=bwr=U|)x}d*Hu?M;5jvF5Ocq=Ui4eDW(e+({T^_8PAOI`>gskuztXA%5RK) zjCen$_gApHMtQHSOJ;5B{vaa)`<8VpGg*8>8CEZK_j)N}0YL0>B~Rn9lY_W*etJP2 zg*F)`bE7`_bX=?_d1YzbFHGKf3;G|tJ~-vC36j&XuVZ}OC#ELL9?Oyid`@Tm zUWl>XE`_J@ie28fLV?LE>L@(=1jo$&zfH@3Jn&mdC^Piz``k3z{%%fmn6~IQjb^h+j!_I(?zNS=zRQ$=Z@WK zD?fjik>@`VFEW|!x5Rv`tC@mCpxUBuTWxKgl?IHjqdmAF;+qG@q)c{yFahc5@u!x{N zYHQ=f z-B#2Q^TLhI>CSn+bmZ0BUfGx zx|;pU56j>xG(7J38#4+imqs9wZ3|A;iLJ|xHLavC` z@%Uv%A_%x~7H66HD-P`F6%<1`o*jpVrk~9|XAyemRVvY!`=+c0?S zYLiX)BcjXmJSvcKPjLUuKzfMM5@p2Df51BW$9bIghs@he?myu`M69X|Qi#g%R!Un^Yw}Ud?W)o8* z$LJN+&hg_MF!(yScB8*?Gs|Bo&Wuh~au|P9AbB3)y4BNWrVXm|bl%=xqK=CippL7F z5CC)sv$a91qdcDz=yWJj#gWigwUDb$^zV6%OW4as3mvCuj3WlMuigeP+kWQmweQQ{ zD&j8c*d}E2F%`K`7vbu8a5e11lCy22xeO>@OXO+lYj}4yJ8G*zeoTqXio86bQaspe z6qGvd6`f1BnEl1LKH{f3EKbHHAK#MA$S=Jyi{c#1%|KCAtb2KxWpMLgBQhFd{#jVr zqu(i=z61iH>io?O#tNup6r-GCY$SE$Tli=y)xOX$w3xdN7I1$ z`Nu_|3XWCwbDl{JMto1Zt<&_`*(<^(p(%UWCIz+B`D%2~2mM7#;5rs>F66=D zGa)^qAskN)pvrl0YL4t<&>Z1lY_j@V^D$)f~U+8dw;z|kLb?$h>htr zW6tqmgeX2aOup@n;rIoeyAK2iH2JFu?g&!!GgFvV9b5;9P*?K`K8Q-v{^~s@pSF!+ z8WuI)Ir&!$xF+)=GseQ6khtx>lq?7OXvVQ?k~=og6%ra6AO)^BgLw9T6JA~d!4leK zMR;cZV&9Gv8^63fM>2mtz_Fn-afqyWuV(xZ*O$u8Crh#Gl<^dn$uEzfK=gV<04a!y z<}XA}Ct?E1>15ivt$#TeD98R;G#gG&+S>AoXEsMl?-F7$8D*H{emM#QBhEj-`6ImT zllD|{9@YA#vO}~)jN;#qi90TjXM`m6CX=pqg@D)kWKBRKRDMkKn3w>Li6BLrM)6!~ zX4uab@=5MFJ}Ki#699#&!Rzf_fEey*l2PzQ2A^Pf(N|&Si(Ed@AA&HtYePrBADrYP zShcoE9nw>pdFba?Srue7XNG6``;U6{9fTbAWi)E9>yzxzZWbHG_oJ3ysSo-r;sklv zQ)Bv$)2&a*prMV5nE#N~Pgn_=SKvec@#gxO z=yL{sw6aDW&|R7ImmWgNpW3@Po`RvlZgb&!=&W<&Ip`uwB5!e?$*x&E|!uMCqM>i|sv4MqP(}jUL$3tg+t@Fxfy)57`mfb(| znO`SFSMYN_-#G3_%wBm+!~cK`+6^HyY@nluQmv}*p4p+JAQU?lyI~fo(^!WETUK3|OizIMie~2J9>Y4|Pz`?|@2(0Hye+Zuzxfd{` zwaD9-uOhJ5xOHL5d3T789epzy;Q3D7zLXBxl6<~pYJtcM7q#3TE2LA6l+ zk7k^qdXXMck{!sQnIO==%~afXbA6$ZOYSegb&-9i??N_`u{R%xI7$*solCz%&~O58 z;}(dQo6O#8Ot-=it*OF}+ZBW$z-5PniQz!J0twZYhl4hwg%G{-&`w1QX}3Foh7z*f z{o_6Y=xW9P>>J2PJYO+TP`%M~i3i6^3sE6usgqT}!ndhnMqe~LeU+&)RZb`0^TMr* zQOWm1?QGg&22AUTx<$2VE)ZxqZ62Gv2SLnHfUoWBTO0K~=2b}@r2d3s|Kxr-3#_Q1 z?PJB61LM5JQ5Ik^YR>i0Irf&{*^JwQS#DOG5-AOjI# zz{$8rDX#IS8|q)u7{GsPDIQQntK-_LqeF=5!?oZ-*7P&&wY<@To0>UlYl*6@4 zbV#7|Fi+>KK)f<01x;}ZzAA_9R*YQ;{~(MjVr*p@4M^PlGTHxkBLFo}8klJ)IT6pE zZeUZQS}+EN)7mkih`vqAnR{0!wh25Kcjbi*ooO-mHzhj8(V>jWSza&{zzV`FT2%-r z4yh>F3JrrbD=C^UQq|CS1t4t+jklb|vmfAPt}CRCrsA%>apu2kg~6T1b0zUEB34x# zv(HU)-&>sq6b^^?AcvhT`37GwQg22&D!}hS3JR58Uy(X--t{^45o0_xS>=!gLEBOV zRuEK-f2Fy!np$a4IP{-L>uU!>X<=$C0uB;#9V*I>wU*!!Z6q=J|@i0;FE~ry6CyA)NDu?18 zy2X+tGpZU;hl*=_izw`Z-LwTii?X&P3WCi&_Tvep!A zJ}}2`e{vrc0=Xxfw0D-6f%ocOym!jx;JA$qfc5~`knNPVoOljGhb+4{H7v8XMqNmRG_0{k{enzR*3F?!AxSmGYOl#?Y}vsj&+fbeNZJ{ z6Pe};cW*GubtK}$z$@@jJ$U~fQyi9%pqx=klBdw8Ac$)3DoAQTQcJh-SNNNc3V*^Q z-(Mt2t}_D(SZD6emehZ=_c9QH3%EQi9F0ACeIB{c8$bRK`xohlZvQfgXfpkU(c(2CpxRy1Ox=A!^KGBxVI)NJxSxF$Q78$ zg{jcP`{93+AAI~@I1V~XiX5GaI{ptvxY#zdIJyh?W0`*re4d;Ay&3Ce$qIXiu+x+TDJybuKZVTT&li zt|wCLxhCks#Y}EZm*il1CdS6_xtw4k|KX>~f-hU)MxXKLGE?zurj)ZZ_`;^a<`_4Z zH<^Y}7(W<4E?QA@f_*a>g=h2Q@!*CMm`>ndCigZ{X?{3Rof0hc&^i4+xFGvp3T1u) z%q_d!yBK4em{s`*tborJU~vDI1)4p7?z!rH+n{9r;-HckBlI49}(UhV!ahk>-uXvI&{A|n;)Gx(5Rk2{*)+< z!VG*9(e^n@a{`ZaGopc?^8}S=D5d9j* zvCR9D5Un|&G%?)ZkQ!;;MfN>WK|9l{w&MctK1s9ds=7QFMD~$paF|Y1$!1N3=N=-4 z34sSX^O^=~(ro%H)%m2tY60!1>xsl3t}bODIKe%#SE+`cg^8jFg)o|Bg2H!J*k=;m z?PNUMW2D5pad}4lF=?bD^Ccz|R@@Uc-!3*VjF2IHRxscdU4alz8eUuO-FnDxKn76Z zhPRd2FzW5iVIf~9Ntx{H^pe`>$A!_s7j`-C+)oUCe6cHaR!KK&7c%5)tL$Bq0CFy?fsr(F+;OjYvEApeJNwW>l z0+{X~pt*jttU^gCHW}}}YxBNRo!sNhG;2V5a9ce8F<~-0xV@lXfzDnr2>ueqiG?GD zbq!UjPsd{Y#W%n_y&XZ?1e=g-=l8?n(d~&hwiG>IM1XS?@qR0m;`FqY zeQso3il}g!{8lPZ94sWLWU%-Sx|)8gPQ)U5in>~5T30sccvLNXf8nx@`M%%4W=-?4 zrVQ`}0^YK!lh2w7&K?=V|D-6KCKlex6jQ%W5hY@Ey#!wk-M@->9*&|UhKJ9{?9cW| z`(Cv(Qos6kYubFI`f3|I#?vY9`iFiOinHn8!%R;_w0SF4pHOdfJZDT1I5=O1k<4FF zUKK%!-IohxV)`CWYdM`Y^e^)cn$rLC{MOL_Dpw=AA6Lw`y~mxCV96O2I_Luk6AN7< z3Yh}Vm^!yPA#m^#ZmW6}iZi{IB*n%J83RQfPL&J5cTkHv$#>c?ZSd56{apCo%O&$(?yre_ z|Ms|$udyl<@ED_eUnuZCeJc5>TQL<}u*z|Uc0DIm{co94dd^=`>MoMrn`I2}I&aTu zkKGMa=KU7#Q&Rlyn@73MI#ua~n$UNVVo_vez+WU7I{EdA;lI~DntuMcUUzf;=BxeE zS)Kgm|4bI=(Cwng(z>#%%9P^Z13~}|40Y>yGonaOg3DLqPAVGv zft-BO$obnD>n(xq=xmV#O=l^a%zdoKQ|tcd_OI%s6#sK$p%hcc1JJVRqf~&xe-}^u zlM=CS$m6v@`%!Pj`bQ`JElgk4+FFc7IDr#)d1tZ&{rYkpDr89po(kwg-$p49?3;D> z_V;HJz!Acm;YXu|iDWAT?23}X1=+-PJ9Y4*bdz50O`YG58ZkTSF#b<1xBELi9X}%@ z#rt2OUWdWwbt$1|WL!=#_&*=Wxcbw|?ZMB4O8GH!e{x|2p2;aHG8sN&8{cJ~mlT3` z2(7G#zW0lk6uNsafntSXRb2z3bEyD}o%tL;i?sCgX_$q5>5tmLBGKKwt95nl|8iSb za3ynL0ry0J6?hgN%VnF%V6#vq>jb`g2f9uC7R$byiq-?I9RkV8;|cD17ng6~VF};) z%nirLJAZKURJwudF*WqFdz|^_8nD6j=BQmioVaY9%5@eQ%~7!5<_@x>GWV-7qMm9Y|gPdY@NMpu|hLF z1EW+0;7fuoZTIDbP_d6^>Y9tff2;j+VD(q^+;rS!m&5b;RaHG_;$eMrBHP5W)_jEk zg_KsN6tWO{3T5rKsP^HsmBHyS)ma|Q>%7EVmRQkRHu}Fg2%2TJOnKw8r zzzM=qeKovYE(kKbCC-s$RhO50NS^U6sL|LN!y<(h(N=x*={0y-6pbnEEh&?aAkHR6 zuc4!X7$mVxE2CiO?2w5sqEPpEFr+?-{ZY0wxHdZ_!k~DHuc8M+d;Ik?O##$;{8~<9 ziv^`_EPF;YVRi8|yI&p^hX3yN7aUVs%tRw&$T=yV;U&ygV@3pIb&|?5|JI`CEfvtRiW*cEv!Nx(n{3~8{(X)+eNk;ZyJ|pe8ym8D)}VnKoodd9 zTBT`h*BWUgv4vCe70d7K9S2b>ZYvsDF+!8COP10WZ7){uwbcjin28Y$yGPaHQBO$2 zNhzdWyg#A>X6ozYk5g-E-_|o)@!pK0z?(Oa&}+pNhy)Copcj1rt%px`pi5l`y8Es@ z+9G+-G#(QZ=-PYq>KK@@RmqnAl#` zUeC`S`=YvNWz{6JbhUL1ylpRX{Tf(wE<>^lSt#IdwP4eg}T090t3N+v;l z%`Sw~?yH4(JAJ))e-ITKK!bB8+cOd)+3q7A<|*NmN`pDl6ASWCZ`j)OkkazXL|aA8 zW|OdmqClLHqkQy#$&$>kL#z-wz0r}W(6D`c6EYNR@biD1^S{jjI;*>JE6|<4&j=~G z`Io46-CQar@ew|pUa8ZZbv!ww2)zeLJtnsUUs3p#DH-2#Mq@HB#U8KxH}Bc`%>CZ0 zlMEkw2^&voCTv?f^gQ3#KyQ+S3^Du{a!XEAwoC3S8+7j%2*wc?z>VizEa2LLvA^OH z*JbY2!h7$b`MaipgF<)$8WaC}lnyWJ_;O8c1s^EU>F8+&nBHhJ+TUNmli&#JiCW?W zr)gfc2JhL?GW#&Z&>)Q6YfG4Trsbl0ie^`_M_dw>Q$dd01W%TEZ=c&~iC#>|3Iig@ zqcn~Y4DT;z7ShbF7+C1*Y6nftvl3oK+g>|9 zXJ*q~>B8Z2w`I8YzDE?;qas=|L>{-L+qcZ*y|hr~L8nTpVGY$_wz{xnj~T`Zvmx0C z*XpCs-61;rMMRHqn~2=Hr(;eN41|H_5QO;gYsbtFO&5yY8cbRr=jHu)vvX zr|tXbL%vP}IsP~;KN*r8k758Mtt`DKzSIvdF2u=`X|5}D36s2v{4>gs<-M@o*G zK7)h}oec23tADi?uY5s8y9+fq6;@1Hs>SpP`flNN%E)IpgT4M>Wv?gKu@|7TWwINE zRXz5sm`M&7nkZTZu%_>X>n-ZMq$U^8(xL6_zUdQsQH^4clfVv~6vc)=_0!ft-{B${ z&7;XK&Hb%_fL-{j9zrJ1bfHfplO#`3F60`9BznVl8!ag%v_s0nX|q)F?x}Y*=KTRB zNtSV4eHYUPDkNhRO?(P0QQF2gYP2hGo`)cfwh{6B5tf;%)$1xxx2o|VmxUA-P z@_Td?Md!^9bnkz#k5DRg&PQ;k zxnd?K{ImNOFV3@^X-fuEte+>vZcTbt9R5=8?Po9zB1u^S!()ukblMj8r|0kESB!Bt zV@2*MY#WWaa+TW0N=#<>XAC6BXcUB%HcrN;Dhc$~&4AIlZ*lkdR|f<70nBYjR&6o2 zE8=3EE|iW^`OAG8{du`oK@G~iZr2rkLm%L1F2+b+i-lM)h@Qm!k_6dC)Ul+~3NGcL zKCA`ZQn&Tjyiv`cPqG)iJ{28sP#6YuYd7K@Q?h1jVL0&;$Z@ho@XQ*q?tFZalt~{D z^P6yt$qXEpGaV5tlcYv&a0&xE1C0CN>9T-Yz|Tv^&NJ=X@Nk+#VOh^=MJ>2LD;NJKCiUT;( zM55ExjxwPZ6@Fd5kg@F3pRm-z1!ebM3Gtx|2=8RYM0R(H3@D~|@jq(X2@7I6n?(A3 z6S?r{J_{{=K;a=ggiwRhLi65!Sg1C1HAKY>ijhFkE$J}a)*}~{VC=d^VjC$=``e}k z-K4&s&`Fo4@vt`SNRFJ$%9@HRP6-_DM0qht*0Pi~E*hWw?B@iQVRi|o~w9lXNpY{g$cq;I;QTIY2f<+_^!j7A-G}Beg$skCH;9S9Vf{8 z36UiFD~jxt8G(N)Y|cK*@Ez1HzZ_Qs5mUFyNa)p{aFDDeR2)s;bAA5xH`YiTN{$&- z(quinyumIVQzq5Q5 z5$HVBUe!5Y=Nod@*rQ?YEUVzF;(IMPIg^>7-_BEs;CQT&IKx57M}_(PVT z?bP=v$XXd{E&8wdAfyy`qVTT7oc)2}!S7sSr^onJ0Np_&|Em^fD0>(W?<5oB#U&kG z#RXj1nJeY0MO!0s60ybjm)wc!TBVe4WOH}n@XO|6+3r_hZh@NzqBT0yoTSGiRDDY> znJ<6gVS!&-C~W$~?pLH5YayI67O~-zNCN&83(lwLdTu-5ARHtN5iFUVqhe|&(q=l54XUDqCb3^rd{ms4zpx;kKoThj;XJn z`mMQM{OmLuZV<;}%H}XVJJCtR1~s|3{}dXdUo3^%>(udA3tQf9*hXh(6T`Auz=OTL z2xYKi!88B8+9hr6x!c)>=UN{RJnWuYU_|e>Mw1j4hyt)8vCg#E&@bVQuEvyFTMv?& zv2Q%;`E%hjI~QFwUBhUP$b?PMvEL$&p9R14d(rtoE|!l|-amVXL|5h|b6)1J1BLw} zx9)tEe#$Pmr>#3PKssVc^LCc>k>Pj>xNmxRVORQ(COB$(Kua;nO7a#SoU-qMIW%BZpQoaAt@; z#kh{YH97(h!vDtYn2T;71v1Y?<1qRGRKP$k8c@9v*RP%b*AjM}LRwQ*``@7qC5+ z9dfmZ2bT;gNHGse(*zoyFR)EDA@1YQUP^dJ&|yrcW~Rb9=(M5LSMhksFckb34sTPPshW*MQmVCk(LEKamM4jl zbt9GZoRj~@@Ioi;r3o$YAZP)J2BR*FNFXQ@@9`-hcXYL;H&|GTR|-#8csxjWw&*Rq zKOEYx@VN*j=C$9Q6z~-yRARbaTFXI!q?n)OhfI;mQ()m>pOqwxQ0N9eD0ziJ^Gkfl zUumxdo&n&kgO=Oa~HPRu2tpRYWx-*5V$WaALDy5k!f7I)_i_l<7%b{p|x+tel}HF*b^_|Pny3bJ%Kyb;`C zI;F6s>0v+c+SZI&L~t|^TLtVImEG|OS`c=!v-`Gfif+&~12M0WhF z?nVpU+pL$u4;P{rL*(UlS?p2FQDw;%!MbuYMs>$iTndUDJKYUp;YN^R%oQa~^2tE# z9r6xIJlqi;DW{L~xaL;M&So;dJLw}EBHIqmtY+9hYxl+8emGdW`a_3Ib@jJsDXKA1 z3g7PUU{B5^6b*CU4^32(vQIgK*SYf3*702U(DZ)Z#e zHW{*fD^p_C7WA=vTwE*ETiRVP)Y?1is$4y8x%%aFo|27y?Cwcp=v6a(h|GWc{c&|& z#Z7->xjChEgIfzP7@;ZUlx+q0NO>U-dO4jje~$M0by?FSX2P>MT)Uzhabb0vn!5n3 z<{t29&8l9#|9t7h|A(l`h3_}1*6xd~x)RGNzdo6DF1|1EqmFvU7mb>@TzSgx6U4j3 z=SujA@3#&cW&bxqib|-aF|!dch;kxi(24wC)D%Yps->)%u(BY3<<$(8=8$4wt-P9YRjr-sOK|iq} zSAT3r9XleM5o0m&La*i_yc|fQmaXre=2xeWtJ+rrLwu}+Zdq4s&---ywPyDQW~3LHNU!@B3FT&VMEly@&?(1eehn@Nige_o$UAM`gx{;>WXs_GRR9 z=)anO)h-z7+#~^Q-u4*IQowP`F^Lx2Iy-Fo2I~$p@007t(-~E5icW^pIq2M@Av~{I zloO~r!X@$LW9t_fVn)LmoO6|c{b_*isRmdDDmw8izBXjTVVfpP0*oJG3atwnF4-SZ z36Yh;E;nT`45K6>6Y4$Loz8>-SP`gNXeg~AqzAGY>=!|y635~~jNeH6;I@!f?AKkA zw5?bzRXB;AsuOC)RKY>Q=b|ot(78+41HT5QGp)@7dm{9etGU@{NmDRENh)Y~jK>0m z;$;S%Pl>!=Z$gLup^Q#10E0OBa}}Z*`~Uv*@nQYI%tw&>JSg*l-do=n=U;RPG%;G# zVMwj6OVve72H{kc8XZn_Qoq_NH5L{Q-xAuCCMA;_bHH-)WB-uWuR(52uq~jxF+Ts8 z)@#@j-vXcqs;)SabUDJOwYqWo+nCIfgXvW;$lXkbd-scWGm;=ry zbDvq%cFVTo;cex~-~r4Dgid{>KMShW)_F zK!{745$Rx*K$wBLI!t$sYt+bZ36udntb8_G_b|qs=+P=cJSHHC2uawH<(=pNl~^vW zYzX>+B&iX*WI;LNVZp($xBSaw*k0WK>HrH*V^MN4LP9PN%R}rJl}nSt9(zlXI^2kF zLi4czRNDv)F46JAuQr+GfNt0o=f2<4TGd-WhjHqsrfqwoQJx_oj(>uk_O(&V<= z5)`%%B4%(N_0c0MEQcS^ed(_XXj8{(s@WG-Ki#?19S;z)L8q`^mgn?VVeUqGsBZwc zD0d+?nfrlv!+^+I6qZ<}Ouj2Zm#&Wf)VBm4|Nb(`o$kNXN5qZ8JNM&DG`#(km?X;XGH41Z9$%bo9ywX+$ABn zEG6Pza7Ap3`+-Vhz$`u_2yHSl?C2(6By$?>rl#w^o}b1Q*8;VM>bEGoI*zF2u1=$a z0lyVce#B%GGmVF4PkI?00EZ$|^!JLFJE^Aj5wld9l@!-4;dX&w@wu_+tp!_(1-EiC z%SCeZ(4Gv{-D8SF6|dT=-gD^!<6LffIBB?2#W~?2H6urAOh`-c@e*%TGV6@SxxGq& zlJF$qeoaHrZ4?SLqN7N@PWZiZo4ur{d(3lh{dMpNXNDOrS zFh_~u5j8jMISus~mno-xpT1+_gqV* z^-#QBCiA`9TGlEWJwr*lYNdVO1;d;7#W<6N@cv`S_z>8weqyr)I=$_GyN3}$v$=|$ z5pr1(w(VEZtz@xXr;J!$TbQP)ZvYs$nDoY3jXR?t8L@Q(!&Cbv>xrd=->ev$iLp|M zEY8NQxt`_!AuB?vQ8H7%^X#iw6yB{Z0rT_K`MMoAl6a~dF#cSrTDXn8_x0{z+!CHU^ze{AlSw+VeTTtcS2sPvNO2vIgkAMMC-bkM z)#IK1SS(76A()#aQsDf8U zcUG$8;}!79pe56p)F}fU;O~1Q_Vlns+HI;T2&;+Qb_Xx=l&}6#BWq4-L(}mXGz}>) zTvX8jG=BahlQS_|U3Uv&iS_3H0~oXp??)qc@aRfB$}bO zRqNHXpQQWd@w-7Y`Hb!XSdpu}Ta5&64+6G5LFqZz1I6wJSy*2&OsyKI1tSjvu4D$; z(c+A>oofnTe#{LtsU1TeB6vL^&~k;>{p6LxNvg;|0bF~P3pAw221;?cNf+m$*py_DqzUiRykNxy{L1y;cMwx z($vK`-reETwoI2lZ-C`|m?$1=S#kM)TK)g22s zYOw@25h@|Zh30k>+Y#^LeB26m$jwnM>;>K~{kOk0Gr`tpjF)~$;M`rQd>4%$`q5f# zerDq@*t_nPi3Y=ZH#>eEQJ*Cbj68b<<{CO#3co|>5)0DcR2nDqIIUXUu`6Tgb6`NG zDPsF6ds>CfqL?q5ial3rOO(P*efx*)svIxrEdcAZ{x=ppBw65Zd2mXmuu$a7a@kxu ze}yMqNW$iY7}+TV23gwvqlK#d8p8{~#7Zjv)8ChP6ezTs6MeVXnqgr(_JKP(;l^!h zn&tXio&G$g2rU>}d^1cnB2lS>)8&5onTjp-byh=qi9mRkKl_G6@1f-|6qzd@x(BbJ zRe*9}M`DFxXH;A-Q4YK<>_ik|I;FIt#k03mfqSPf+%n8`g>Xa*aQ%VZZ{>k-(vzC! z%8ruN%NI!0!d;Z6_P2KfV(R2BKFj2hTbytfHNnxe!>C{8XCmPHg9q#%4gIc)^8q;A zJNkGp8AjoAzw9P_JM~^0-_?pxI`t^1u(||!)ct(-GOq@rJXtQYLQ)Lv8DB26{Sz3N(e}MbTyX5&d|?y(P)2Q=rTTVtfh*YaBMP-zosN4XZEBagaXwS z-~2pNuKM|49Z~ffSR;oSefGY$G(e`D{1a*6#|v-QB;G0u^dB|E20NQ%kmk=yM^4J? zD5Y9@vTNY7g-ty5IST5q3|F>|Zq9#3#qv)(u7Uz`c>V7-Gbaq_Ua>x$9P3}e)(2SR zfC!BcVm?xgM5$bZTmT%*Dkpb$BN`uzKD`g+X!>4$Hywyk!jg8)Pi_w>;W zaMmDuMfrO)u@pM<^wW3dM@}|-!A|IyJ!q<3l$C}#%8zqGN~@CZ3Xk#Z25~-l;Yl?+ zJR0xmA+w%87(y>3%~TH*fE_F*9QgnfN5ZMk2T5>&PLjA*m^=yXkD9j%FtQdxORwj9?{}o-e{pv+$zdfpI=bajY7%(eCo?bjZQqcS()B z2M+Q?@x;cYA=+E6=#Nd!CHWIRhFt+B2n$KSH>qflga+jrSPjSP{mz3T`)SJ!1IxaO zI|dhILj2PxPNZyY9GYDYT=5BoBeATwA-1?kZ*Mvg!KTg|R_vBKU z38}H-U?#s!NCQ6$asBxIwZBHenqu}BBKo{xtb0Z8Pa`th9=fA0r4ki#lX2U>Rz`yW zxs_b>!OZ~D8haNCyKOJ zt=4+K3RfTAra2;$LeJWma$> z9_B-Ha$q%y723&kd=5RVL!FB0fxr&@Jys};2g;|A0=in!v0ZGN^=7-4h*RiV6uKu- zhg!yA18sb&K5#d%Ee64rR)J`u56Ev-5)WnD6-FTvE~6*R%B@4++O6{7fx}>Qm*e)B z6{~Eb$Oa+CiBxb#qv#2k1E~}KEH95siK^ zAr3>z!t-GJ;mw$;Vg8r=V;v<0uC%=J=Nl8`Aset8E~MbrN?73FPwa=atq2H2$z0)F|=3BA+mL2;PaaOgK@;+W@BHwcJY+`U&iEilG zaOise^*29Ha{m0+s6ynw}Dd| z#*zT=I{pG6AY`p76#(g6owwEXk7qmYI9}#`Srd&wts8Ecs9YsTxD4Rm6Ge!ig{9dM z7F~7xp?iq7evN&36#PCW`u6zQ5r$p693}jcT1)Y@WQGf#R~gvDD%9P0@<53$Sgltl zTY24>F4Yk@Rmt@VAen?NSuK$JN}}ZneY8C|&h&C(IZ(V>5ilhCQ^zi8geGB9VlZE1 z%U@DP#5|SFASMTOh!yY+2^nX|0iV>~Hw}e%l{eb0dew3CR7>ld1y`1;Z1@}`|G}v2 z5|^QyLk?3;2k6(8nH>$5p1NSYY1c_wYt5M7;o#L$C|aJs-I9PQ(w5PZ^RXRhy_i~4 z*KpZzymPyETJ3Ia?I}dkudnK`1n%0~fr)Fw{m(pdbLaV)w@j;vx97=@dvYkf=iK7+ zISoT5D)@oaw9O{AQfnv-~+^9j4^m?Ky*@-K zcY)^5p%SNxf>#lsgvQhSWsXd%gwHgnV|JiMt8LSzb zB!}V2ZMrRxjpOHj@!*0>tHZE_#2lMDYTML<_B!w5u83tq|_xuI{fJD|(QAFrAcppH7z zjE>7^A_C<5$ybzQWf$>N-5h}TcdFmMu%lyl&O-{w;r)2O$mpyV(GVDsWn5bB>5(>$ zf9=o(5Z>^iISYPev8-NJa>7weCl?>a5d}b^iBQsuM0)iEowd}#^jzCNQ3Bx7x8%+M z3DUZn1nbS>;Xxw}9|-h>}E(E-4b^OCmyhk>7%mhaCPm!!KVe(~dx@_Bx^( zVoT#rT0wHvpoz5PRI5lZiD05Wt_0-El1^YAAwkA*#ko?)F|j|L6-ef-$_t>lpTatzO__UaNGF-(k51i(QZrFz|fmZn7~5p{Vr+sN@T~T1*V{^c8N$ciNqh@S>UMOm|q~ zMX|JIi0u-smBTfeXB>uldM@j5eDUPCD7_Xk$MtPoA@UKn7oU{dN%r@71GAmLt?=FJShO#w>a|hc;~=`_VyMk#Stu`|U#ouEziw5X z5ieTa8VjD}I?Zl{AUoXS?uI$r&qH^6Hy9ZLE`HUY)YvJM^F&m2%8vhI0V+wlkBqFO( zSOcYu(r1RZO;n@7d`YK=z;e(hB7g~X;{;n~c`*SYOE&ZCMOv+7X9zc(4fD^)9ST*- zuC?vS8!If*KEiKcj}QuSPGqRqhePC=Nr@zL?jn&vWRFs-p0~#eT1cZV$Vy+FA$j<7 zE@NV$_%I%6RW3Yw*|>T$)Y4p95>`&dHS8L2)bqg z<+G>54!BhAwvtTxznK=REG5ccCfTtN__+MH_y7ExJ_g{K&_5D)Or4RQD7d_;QPfrt zCG4w7ZZ<>4J*VLv;WaaJiai78t@vEEklb#SZdjv-xh-b%JI0_R{3fsqxbsm zx={UlbYw)(fyfgbq_fxErk>{`C`C;n@52gC^nd!HO-Um^N_QS9oNPo#NEYaG5i{C~0qEE`zB^Ka zu=fO^YiH{ZYI`mM!R?B@y?&JE+)vRreU10B#A6Z~UbHvi!q#KL$v5|!SPi52s}Gcn z0NW-e%9FrQx~D=z%2;^L=?ik3p8>47_gFH`5^{g}YBv`(Q>)j0zrf@`lBHt=?`7od zN4x#+?`IfFBaa2MQnM?hA$O^9O8NSF5T(WY_?HQnyRt}T3^N(nUNlnEUDh_UK;Xcl z@N|Eoq*;`9nBNZohWsdg3!5vU6&Om3MC`f}SJn1R`sYY~gDZCuxzt;l@H!nII2Fz@`yrLI2}|V%yOVrp zD}R{SwQU`lwcE4m^eN|(-Om)QII=~zYok1RlBKPwjtqun@I-Pz-Lql8LFo zDL6w*>ZpjufDdPbG5e1L!Qbl}^iM+nrQO`U$zKI40adW}*v|F7`lqtw=N8!&^U=R{ zsOA1*^-F?Z2a8I(s=NqcWtDhzyk5Z>h^Gp&)&h!(r~O(7vh@a$zQ+@96=0#`%t+7w zDb3v4TITMms`ukm#m{5w|1EW_v|fWvxfV^C*Jt-nxzaXZ=)gCyr_~Z?<69O%j6+A5 z9MoL?=@Y&V;{(7IY<`Cs#!|$&azFT_?}Ryw!o&6MP2FKe_;#*NSm&_9aVEkuFK8o@ zy$bvr-;IQ4c|1tL1NP4~qkx6DdSkD7y$1BZu@}XVY3T%j-z1hIosP5xM7+aol)AVf z(0B#7m%Dywp;&(OoMGAwN6_%ztyxxtJf^%IBzQqw5{hN2?bt-^5wD`SnNEl?3>r=i zkBnI0Qxh}&ANtu4!UtbLni>XBpy4t>7K|J+N8N@KiF7|q9a1-4%GB&?#V>6HuuRf% z6CVp+S^;J1Xe3+dxdjQx3BYCx5{Kn*Lw8K=y#DEn`iU=>X za14m=`AKP?;C?|OY^1lxxCH};q4+JHV9n=KtJ9^!R5lZ<_rF3*bd3d)+-rzJiY!Qz zaDrEo=Zlg~bKLmYQ9oK8qA8O}K94OreN$$(sl7QOe&`C;nY=90vNh=>lzeut*7 zJeE$(T2YrqZC>I-^YHYa1f{c}jE7Q~7# zZp#1;?7IQqs$YstruHI6<^Augle;}G&z!=ilP&KOC12`SKBSbl#;dm!1tuYLTBA}hiw3`UIKjym+(1B(lJp<#;TMsSG!SuUnctJG|o7DvpO&DY~I4!|hs zHyw|DY5FxweU5qaz#1upKq_>|xRcv_>>F`s(lLn;EL)}(WI?Snf9^6+>CouT^$ckB znoIrH8{YFYRMMF2hcpA)9E&W$%G7kT^({JS^%tu^S$oeg?@Ausa~^GFQ%q!9Usyc_ zTlAf3n$NKjsz3BME?cmW1EWeUSJF}Ws|tN?=M9-$KP9k6dxk{##f{j^8Wa1Qyt##o z0aS9(_j%R|Hz)q%x6N3ZZv?hDlxOfLMr5f41-zE9-n%8G#XeRk7~$U|gf|TC5SQ3$ zKqqM`7L4>_KZIkcmbqg=A z9++I&Ls3)l6a{4Sq&q{ZRgn{nLkbdTB};YS!U8*U{A&{RQ&2>)6$Kdvld+yhaPzDV z_dp(vv6n_gnF5HRaItO-$xzo&oj|t`R&+^>o*3prI8P3fn7Nlpqt52~f&jJAab8`dbRTYId)$vrYn+ayA5QTPZ5pECdwhN!H^(-CUJaXiZ$VOQpFs_87!|{3 zVc28Ih=g^kU=JQVRL;XK*K*qtBjvhD5Q{I`dJRftB3ObXY}?7Mb2v7?HCf9C)i126 z0x|QM!InY}zqpI2%Si&TKxEf$yLeA#?!TnDa#FkDqtWVI*M>6f;LFVgqJaqNHfB(i-J{|1n`BT!d{dR7K^CApZ z*1~Hj=1}f|^z1O1f7!0)u5ed)svc=h0E1GJFRaPi@36j#;s3Awyv@2BDshgFCEs`K z>jDSCP6>;DI`0osN-mD%ke|rAc1|0VHHMV8udduRI(WWuG(WWbCDSDr8dcBL(HFm|hQv6ys7hE!^{)0wP zU}$u+$80w4!No-D-VdoD_RY_=hU^no9$hvY#!qW~&SM1BT6j#R47w;K*NLjGbGU*v zrXzj@cBv)Vt2O|-5p<*{2sU|e0(%r6khP*Lck?Tox~o( zFYEAqkHy@3)HVA9=b}v#Vlfe`=a;N^vGWgOZ*7^DolSG@H}MR-Ig!(Dx5(=2 z`hS|b>bEGmcn^q#(kLOjOG$S(NJ|JJ-6bjA4bma7lz?=1!-6ytOG)Q0A)QNiUU={Q z;r;{joS8Xup6@3|yp{dE)N3GN1IEeL1h58!&|7$#oXXw;&6QRLS)fIFyT_i0H&3{L z)jF8tm)=st(l}YD1PP`-0jSY=zjIPO?F0FcJVxjtwq=ZaQ*8+T{IpdwTaKe-n z>&xjy^tZvhB*u|V{1^p?zkZvk@{P+)Si2xxDp+VYjs*FTsF8>t#E)tK!Da|vr8k_1Z z17*^(26GoX3UdWH0Dk%f_c6u~mNrjlRv;jcshvx6b*A*6Pk*r=E+;m+b~+Ymqi$<& zigi8bZ|o2!Sy#fQZ`NcupNFiCu+R80?0*RFG^K=Z{pgk6`BmTT^GsSheQB>gQl8LT09n&8-uaa-UW3A*NK6@_#!Z-V2%-?E`u|WLl{6xEpCMxM_+73g|uhBuJ zXQBTc8@%Hi{U6X7Z9ZT6r{@crpccufyiET4OQix6rvapN@`>;9kCD~R+r8dh>?z76 z2s4gTA3J5tQRL#R=DPtaY~aJT{mX18W`ya6?3@hA-b3;BIz@=#Z9!7S>OW-aJ!P>v z#ZR9+N1k5xv0H#p>Tm5Bo-rnU^UJp7y1L=v;h!vrp7yD!nJjxV4e9_B$vA4w?Yr%9 zN`*Cc>M5aEvueKF3iAUq-)%Spn7rD#Tj*xQwq5qFl#8F=pS770B9Ih%uStT~z6C4{XKq11$N^b+V z13!}mmg}0yC15)BGP~^2O%33gtuKz`M_-^XJcK(ke}04ijlwS$oX#F*j}DDTJ*gPi z;eAqc3VB&uub=4&P{4D@r}NJ0OYm#s59(KBJZc$(_DA37oy`BO+~8GMKiqd2IhIza zZu2C6GoTt0?HOxKmT z-1@ct$z)8_`8APSa}plvXg+1n`Tpvs@0uTf%sb(6K=bC!c~>Jo6{L|-4Gm{Y-Ag|Z zt0smN#mp@9Jh(}KMi5875&F(Tf-IPJYCey@lX)WebnnAS%=j@t3tu8els8{icVV!& zdBB_B=y~fHq~P|t-$h6vc)G!xbFZ9pDT;_48<*))@cH6Xls0ny{OeaN+ePw}EOiNq zhm%t19f6@FwEw!kty)JoKH|KXiHE=W2#>law#NxAL7#KK@M|C&|v>;GGdf=yySGgWL@v$D1 z^sblYhe{`KzmIgs{!WO}Rn{jV%sf#4YTN{%IzbNmBH+gm@Xn3W5iOe2mv&v{cBGrP zq{s2kldGi0uqLMd&T!Cxr|Dm5o=lTal_=(=;c9pQSC9vy4DGT6rx8Uphd*&IaE9Y{ z28GdM2+SsB9Z`>AGP#U+|MTy-oW8o+Z+wq(i$3nEE+a~eRLs;AeV32Hyf(KcCZc~2 zP&>P7w`QI^Tw+TT&nk-bBs@IqU~JA^SSPlskyv*mv!tLg^3z6fs(iT)o;Jq2LM%hH zj@1Q}K^$VkNB@VrY=#fn?<-kNvVBz@D%1K6(=5Y zBj$V9oXr1ncZ#8RxQ$w5{GU^#XB-)HR>|qU$CtoK8OCG*V*XF^d5_|mn$>PnxG&wP zr)PIl^QFDXUiY`tZ2kGrT?#As!J=|bhbbKuM)C||3=9UhE`QGNu`$8F!Kt?lHSJOs zbw$zS?ESEBp9P*4f5_eXQ(E_5r(+t5Z>Vj-e#`B%gp@Z7+4tDb{8DMu+IhDGANqR< z3`ZCM>R>y5B+!P0`}qT{x_OPa49M9 zJApw1vK3jJZqd)RN(ra_HtT-sE#dJE6l1m-*|h!&k`+ARqn?O}u!E+yeFoy6u~&xv z_(A<{kBg>X*`Iu0#djv>%>_bD)eB=L1>k~ypY7CT5kogafA#qR4l~8F_BP=Q8PKiq z#UaBjL|;RcW#S=ykMC#8g*|f%n^leWU*&aSI+MDG3f!ObOgOkaHXXUqMJ|_wW?r)g zwW?0Smn|2#io^9h6Cw`&Q)v$nCT{=6RuG6Kt4-;Y+iIf9E?G&9Z~qsYyP>9So0na~ zeI@VAx8XM9t?A^X`DK!{S;rfDx*DgR9JYhJG0QsSRFQRz_b*D5+9TdXF<0b^X|-8l zf|)Ox$nf+h_mCuT;(OnVv65j7y6tO&xXiUofqX?$^fczgZn^3F?E%bCld191%RHQQ{)gehpe4Fhn2wuI z^-PGb`?5BubYnJjA6I1RHz!|}sNp-$ZCsIy!?3la=e)tmzeD>|@7?2*t2-Bg%R3c4 z&nG|KxW}*2hb~2dTWphFWl^3@Ae-dldG!+pC20ZfKId496BN?mBS3O~A3HlSp95_4 zO&lC$<_LF23hl=Tr3eL7@_4v&sE~Bu$KGrQ7v_dK)3T|m4iA)8RviWm&5S7|4q2bn zD`(TYCc<&PrjO)6_qPJFAA?C(L?#u*q+Ex49A!?gtl#0P_A|9U^7RBgp}>}yH{_!1 zhMN}Vuj*^9+)NoxJY!4MpC43GMwl;8U$3))JlE}2z`V55MFX0F9eHvDI%g8Wm8Pq)#PZgN^ zH?f=jCBiN-PQH)+g-iL%yoo|8DIBQK*BoRL6y^Em9OgyKSQ>94c@4PZe5%}PPwx}o zAbWOrnc2p*F-Xnw9nP(4zPYubfR2pM1-fSfSqUF_aUZWwI#oq{@33y|HtiV-GyMa< zOx6v995CGOuC#Ch=oM|BQZsGFN=B#7&Wt7Z|Nim7iiX4+JJ3Mw&iqAAp7CcwXg;1j zK}S^FHU@U-^cURvxj~~g4p{?QMW4ZZi(>0BPwdt}jYW+BUrqI^ZvA!ATV6-(^$j-+ z$|d=BECVx+(r@a0>t2d-=U_S|DI3P*2NK6cp`o_Rdy{!CXYJoN?3{BttT%^bH6Ps@ zACi_Yo7xVaYX>$P5ni`H)9@wY(0vDVDawv4p-)u?I>F{cVeUz`fWDzWE~&sZco@h# zcNp)d<-O}fizS?E<5+feUA(vtQHg0M#&1Io{rPuI3S^vxL~H;xSr20&Q-do582mE% zXV^*8l!GpFcO$PHs9*t@Z#ZxjzI3rd;@$S3RmE}5_%f?=nue+vWvqc^(28F zGahqz9QR+ByXB#mS!riM#e`6@sz`BfAh+Swx1ob;B6rcx2lv9nRei#|p5?Kl2D-N& z8&8d{^-i_2ys{U3_8y!LjNPDY1fVB#o@kV8$u+PY*l>;EvHh?-g-Ij9i3d-(L_|l$ zPtV8>F4ek!47Cq_(t*8ke1dXCW-J+>S&PEK-dh#Y!nk>kGHN-0hBEjge|SdDb@&0ergnF%@i4lpC;OEc2c;B2%;#wCR|(TXO;o zqGTaEIEb|rLRPsh_~OG+e^7w#TwvE?7-`A^CYZfCO?uzjdG1+8(hv$X6gSoTdf}YZ zj{v}Jaoig@?Sff;;qGU3eEDAcJF%7k?bm{GQ&}G%nf9AZoT}MWqzvq{r)BS(ZfZkO zSoZ@Vl(Z*BEORCLsalJs%0w44@EWB8HTGAkx`thtdS)GF5+F$i;?a5H0aTT-)27jN zEyt-3%Mo)c@npxeQR9WV&t&ptLJaT+|FwSjX4wRh!2LyT^PkZ%Iz;lWb@@abJlli0 z&zqo$f4+;{4IRfSip-^40e>h=YKR7_xbsV@Cl@TMu2Mb(Gpzmw%G0hC(6WD~9V+15K4hn@&>D-6y7voqxrJQeC~M`YcT?AO=v#wBY#nE` z5C28x`EdU)GXFLwK7;GnL^pFGAa8G)Y?{?8BkOL*5S$qvJ=VQh*X@U_larGh zZHKoroLpS0GnLj=ps=`zxo)dTGv{qp`4w8&4a|J-45SsW-|3ZHh%)`+V>+jb|) zP>>_QcZ>Zj!}IwZIhBpbgxHmK4e#Ugyz0iiRqs^3{C)lCeXC_>l!{~ci)joyEglT@ z0n+?W+V&T})McR2NqRZ1!`O1aW6|94PV`~~Q@wH>gdoFXiWE8I7ntU;BC`)pb* zctc)=nKh=`BcKGSpRJml^2qog#h6w6ekvj8KptLMzF{7_kU(HLxD}puB1K_-QfV)b zzrE^!o^Vt>v4NIc{a?2Osh=K~;y|>IL%AHkM%#2hhs09I7^0ToMKZknfET|QcGs`s z{m11%;7M-=F*bEibfHGP4zPrEZ~npt(MY3#c%r@(bm zz{gbjK+g%a;&|uV@d6>OQohtNbYx0wyF;1&yu`$FN;P+U{HU*s>WDbPaU(niqz&=* zv*{njWgVk$JXYtXv)bJ5pCdqW-*=B7pB<56VFwlKrUO5i41kH!LT&8K*ges|b=g>n zB(C+2sBt^1u<;FJ@Iu3844f;y3}lkKjUR_th|(8Ov@Ns4DGArha`l!H9cvYlIQ zCID5h6u@bR2%JkU8XOOpRnWzA1=sN`g7#7*SCigd1W?Y`z*}kdoYI;SDV8$3X?s#h zxxFMVXnke}G@5?0KYi0naqKNl_z-Urm znfy%<0rfDx9Vbtc^=o?|E8eP;WHlwkDOLz&C)`JTQEFnBnB%S0hyJsZ40f0GAL6%7 zkYGY%tt`k%r!Te{XZ`??g9k+#}C6)u{qer7iXUa9qo1KID;GU0H# z0pskjS;aCze(u#6nX`guJ3SIC0bi=0K;Ci+kA>$G@VwJFQ=yJ&c7JO2B|V-7W@ zgS7%*0%m{?bM~w*daioW7^b2uttI;zlcdE7(Px zWvtTK7V)lb$AxE0;hQy-*;eI6EtN*}AQ#ii?cU?B0?itl8JdVlQX8Hc1n$yYNAMm*4JGSnd7s%)U0=ZcRk1dg`ePA`RZ>+{V9 zWd!D+;d$AzT;?OG-FT5WZVl;%P>Z^6@%f%O*Ej7>lTocVwNx>JTl$sWVS}6or=%ph zftBgvl(g+BJ-ZMtrzra=oDCq$xDvX0{Avb1$G=aatHg8-ypE-(iKpIq z&+nn_shOZ@tT=Z16$70;mco$}$GdYx9`?~s57riLyU@m@GdV{2viYxs zaAqpUm7Xi$Ze4kq<6j`KjBTGGub`Q1?*oT4uN$|~3o>E?T3{%=Yv|V&zjlez`~-#K z*USkpy>@70;aPIH?GS9@i?)`q8cy4ti8h{qYAiPCh07wU+SWVXIh<*g5^`{qe7hqW zO1?8rnC6{p!tn@U35n&`10x9n^UJRL& zi9`%-DXyJi_PmWZ+V}hRkuE}Debg?<@D0Q2*_u^0@TIX2?Q*@Jp)KtN{R}Uyy_~P6VUJAx$_I}T z*Q{fp6f~kyqe!BwuSQx$mSCxlH)f4_6Us3jBY2TBcB0Pu`*QOy+nrlf^Ss&9Q7u&vhZ zZ}U?Lo9d?-fa#L5>1oxAMn%w>*wsq}i)eoeD&dc=QpXK2pO6h3N(0b_7GeZ?pq^YCGz+vbI**I*~htN$Jr@5E#1vtOV;M z&5?mZz72tsf`3t)xB<#z@Ik=#AaMw80g(Hc5?PV@7;gD-V>mBF(Gtm8s!zw>7Wa{+ zDda^aZuTw&mlsEaXIi}ci*p%$jkf5&;z`z=+v_N!E{xsacacjS^rE#uhH^L#cc1^tiZ9ir1;{|#^H~~2ZgZKE(_ZR zYb3EQGuUXUn3^&Zb5f~4qVbg^N zq@7MH=k?%~wJ8_Z{7Z;COGtjGdl^b6t{u+x_OVZ4G-yS9`*rWv3jBY=cDjIthbUl7 z|FHoaYm~Yjb-ZOhU}0^Tbd9?4vK+|$WVSw3qO7({=aJriaDt+L9UTaMQ@B@%;Y080 z1EUNNr5Q+#!7YMy0jD=TvxCo5a2-_t?vzD*i#fCK$t37cXIF(vjmCpX33`^|lopA` z!vAJ6!?!Z=UirpNGTBH>IhvRN_NRlOS?Y6L?Y2{+!S{P~FZ)v>^i8j3DYh%bz0T`G zJB9iBRNWIGEHQ56PMhCj=w=7kSsUZ`J(xG~3_mtXTIWDB zyW`M((_tH=KKWr=NN_vxSDozwABs!W@X2>l%0R(|_4~Q?v#Eu)-?cd-OIZ738jV)PTh{c!e54Q&O9bEUB%nABXf$mO zU1NyN-FR=$>uD)%=SffL&0|T9t~2Z%eC+oLbb(ai_@l;tYPE2iaKz*VqB~EX=;k4| z^0d+JK3z-_>n2EpW<+YK<=8B6`|5ou{?;xJ;6qR>j>|x`89@QIdJ?JLjUzal4L7|y z#AfevR5JqFC;eD`=sxID(Er^|NX;@SqkX?@TeRMa`D63!lgQCWT(HY28@-)oo32of zivLm^%5KZQd{cQH214gq7tN9R2=+WN4Bow~%wu^ur=g^?xH~?{YvW4E>~+JalaE-v za_P-_bz9)eI)gD~k^m2?5iwR|5$!_Cisi0Yt}i(~UF8&EfUSt!9mE!XO>Cp1m# z_dGCirwz`U-4WCXi|}9O|Jc9B(x?l+xfQ+PYAmEvnfvli^soS=IxYkB-I+3tYo9}5 z%_FvWy~ppK_qZ^tUTZxR(ru#_|DbV zlvf=k&fS$yw^q&?8i&uO4S5&UeYc~qafgX0deN3wNLuiHfyIwTcE5H zcx=%p#H__3sn5T&3l{n^L%d{2?}m22CYdV)N3~B#Y zpoHrh#I^^z4#-gNYdzn7PT z_#$U3r2VZ)CV<+8Vu{8zwOYvs*F^6tiBss0+IUIlGtv0Y!X72cK;e*N{f~q;`RMeEy07WH8T2Ht*I-eo`Jvwx@Br+`O48j*Q78pf(la+ zzd@Xmc6G%xv{4DKZTe&1)edSUc0!^P=+zxG>=Ps+K?|S^Qy{t2`vX|U++<+~$tID> zbjpV(?oH!ymC&3L7R>QcmLa$q#*-`T7rALnzsi0X#om`i@XcqxRb4ZE0h=@L`{CAg ztHUNLR{Eg$ZjSA&?B;m$r#Ts^2<)Qi_XD``qQfo3Jsn5Ji3WF76D&E0B|t9U&+lPc zG*=F(iUUyMw_;<*Pa{T9N5 zdihc9n#z~k09?Oo%`Hb=wf1yq;WKP@YD!4kGpVlRVFZ?$4Up6U!!i>F!8f_e)!QM6 zIWa`fo7+)WAlI9crZ!*TnyheUkW?bjSek|}Nv$6%sO{ZF06WoM_I+aiQ8Zj9Lu{XAv2aeTU{sNdbI zA%3cDDZ_cRX4(t4wV8Mq>&yO_e*bk@KHs2>we3f$rN#0Y4?BCLyU+qeFuy#gBhY5j)BZ`~1oq1rEc)Sc2aK)Hh1tWh|PsJ0{@_ zb9qpG-GAIZw6!#wH5Ilu47qGY-Pd?)nNPS#g-sWb+U?bsoXHT%4+B8)g*Dsas8>GJ zQFV`@Gm!r>I2e`LDX2=|)eFuiI6~|FsgCT`Kn_JS{WvTx8M850vMu*oKyv3ppo5Rc zP4@;Bs19tvCu)<>l;Bl55@ub$GR=mA3VHn{*0xG_?YK2|Y*x!BPKsqlc1{Uv+cR_j zeCeG`_(SQff(;qTioy`U1TYph|Ld>0rl*aVzzQaWPU#EsrZ|~?E^hE!TA4>BU1iT7 zxR{o#@im{cjxO@dJ^CJ^+KPjGCFpQ*2?%ap3QRzE+xB{YJOgV#HX!&E6VYfrrJNAK$rW%hIrvq0b6Hex}$Sp_pVV6)~^C70)e5GA@R zjuOH{oK^g`^}DPEj?z0-lW<&I!5DJF?+pl_qYN-JvqS4gbgF{I8=t`ZIg~wX#UaaC zRNqfQjyA!L4B2GPXTrkDUYIb8nSUhlD7*P{{xe6dt8+2y?rq0ye=PaA`b$6Eyi$?< z4w_nXt~3uGb04>_dHgnhqtsem5X#Rmrc_C~?Y(?cO&3Tf%&_MvgbJ(q&b%)j3s%Ji z8f9jub?BZ4b7?fxuvW(Xy4~Bn;!`)La}MvQmJ?z%Q;&=QZ&01#0iNd z)4dw2P|&MBFGH6-qYMFsXN=^}7#cD-eM=QQz|@CwfZF!~3!j*?8{(ykJS4 zISCen2exir{RAao>Y$1kB}QMG4!&4 zDK)GE6p6w1pf9gMEZNOF@)F~=blitHQCr!>^%ys&q zNM83?5R~XKUYRV3zJwG{cksi9TWACJJ#?ccBQq%E1Lw}pSnYk2Wg)StMn$;uG0p3z zQ8GbM051P{bs2iSll_??ercL6TvwWv(aS&g>A9ibis96{u}3J1(2@%}^~=I@M172R z_|nt1l!>E5S<#sV&te%oh4I2{?}d5rOs@mteK#>`js&ijx3|BzvJd&i_IP7(jNX|n ze_&*>2~O0I;chF=V5`K2(Zl-y5@A7P9$M=JpD!?%o!;DGdRn?6%SkpHITd8^Mo0*m zN0nz_9c3$eSvL+}q%C(S2^!y<{|5u9>1P5v&`Kin{7 zYV6iwx`L!*3&JNdO9$PFCJJUnBO#^>q1VHtd@-0qyZjG+R#_5*h?ItQBlKWj*3`eZ z(VS#NP%dcFH2v$8fE4O>pxI?35LmN{+^5O2Iqo$9k{IoDDiw8GE6xE`)c0P@1#jgx z53al9;cnjDQ|Al1bY4O;-gken;UhF9 zKTU0#C!`5!j!iCPl3PR>>Lx5A8g#sh`jJ9`{<2iGvlZEEE8w$;s7pyGOHc%)DjPj+ z<7#4(tg4}p^Bk(pQ%FTUCp-IC%ca8H)92+Bf2|M&L4Ba|RMI$uc-fsnGs_flMEI`% z5()btps!!rGP4mDo}=tvrYP?_7~DC2sIBNXNn^PP`^PDyZiqC99kcc-hUOc*df<`{ zNA$u%v99-C&w@;Mo~_IEsjr6XrkBXMy64iZ*vjM7@`!ke$l9sPqcsGpSSl-vRlW(X<~1HOY%#QJ2GT zq(U|wZnvvI8m?h;N|74s-+c)n60xAE`69jQ0%f?;A@yEM5c?o0#|@wMk3%&m$;@=5 zZ@c;w#fNYhPXikZ%hjBW45L}DK>!FyzVfE}?~0Lwet%4&?w-vq$Gp_TtQOSJr!Ps5 zU&HFO%=k=%Z28ev%weG=R+OuqX^Ps7?Do5dOF|yh@*#$PwgX$zb2tYk@$vCSpJzw= z53e)>tW1PfQLa0zAY);`z)$+?xY|@cTQK7*g@#{zAkyqlk2S)zV RPrtlEl9N`Fs+2Ga{2$Lx;9md$ literal 0 HcmV?d00001 diff --git a/website/docs/assets/unreal_openpype_tools_render.png b/website/docs/assets/unreal_openpype_tools_render.png new file mode 100644 index 0000000000000000000000000000000000000000..377dc2951eb1040153b5ada28254b06cb9a89fb9 GIT binary patch literal 27453 zcmZs?WmMZu^eqg9BE_K;cPnni-5m-PcL@}C2wohDyE_yp?(RWT+}+*X;fCjb?_KMa z4-k^|V`efl=gi*w>D24I~{}U7x6pDYIiFCl28W%?eN+`M-3o@LaI2|i_bu6e(;TCF*q9my*xp~;fRN>eKGI$LE)Q(}h2 zBg>43iIY3Tkt6cHf389)LW3CwlKD-!FHRoIEfWiy33-f$mUrzTb1fBoj2R}rNhyB! zu$RqT0C_+ixAq+F$X#z0d!z_M9867dF)hkLD^H*Xzg6?!dt)1WARS{9S$j z%(J~4oLk-XWhgOANx){jgYIT#yMeLnhSs}Xx{!{H>rED5Blo;&)=ksoAbk)rGoiO>s0>odVVMSmTH&q8V?U% zFYHx2y4g4=%C0Dcj9N33+ZML{RIhc|V(m?Arx~VGP2cN&EwOu3)P{%izb{fsGyvcM z;rVV&#ekiBm_peEZ7^ zej%qi@*@)9B3&uD*Y~;=aK3rui7M>@nP=nrtrX1bG&jBPqxan;^h$42oiyM~wx`Bd z<%0?>jYosGEtN0HvZ?!tQIm!Wjn{K-!<*ArJI!lNm2KBMzB-HyB=6G;X=Xl%yQSUn z5$#3uVK<;t8%{2qd~;}0tFwZ^`y^~K+qVTd%l%J+c{XDJ&Ngf3wF+$rj%*Q^)}M-3 zlHut)FC`B0(5}q)#r!;n$oTbJ8e2#%V(Eq06_tI5A?^ zWx0?(Lf@Osn1Jpa=Z>ScukV|LHo`=5Jf6=rDfx9@2-6T1+G`05QORW(%2D0p)UERe z>dT?cPSBd?Pw_$Yn`^v6zIN8Xaxaq$y5RR6_h~By!}*M8FDdBlG24>yc}v7?2-fCI zt7J_`H!15$lIMAv#L{$#*OH=p*Q#iF3`nct4Ow##WVnvAk2Tjf&DzibliB-E{)Pqm zT34BT;3cNkij3x`0skCNfgKDx;^&KOZg`>#kN53)$ZZC@lKq)X=*_zPBV4?h+=uHMRJ7oZGh5+f!vyH&=FPyZAVPV(I&a;dXI2MtCf=I}~7g z#>-SmqD~&3Y*9R@#KNdNOO$Bt_x?dWhb*x7s;ZROr}W~KOyF_m<;h3|8Er{8WRAk$ zZaNbj`(v}t3@uBL6_1i$Ed+{!gm^Tw_|6M2d;FM__edTiFGmboYWaPS24vqB%q^pZ z_paHkXy0Q}kC4y*HM275)Q#}qfhjB}S@9^L5JmPV@%0`J>}2q$`w;)+Q{8r62tN!0 z9TYk(#uf-Z2;d~XzOI$l|Ig_EA$T0fz2!QuT@_g>kbGa+@}EUCpo;=hO~3Qxp|{%+Im)iw2g6-jpoS|%Ke z`F9RHz+%PtAUx+uj=32{xWC`D-KuJewn*;+I`%4gc>SEf^RoqH`pe+16@^}|v;VO0 z@K|)08u6eFkvt~{6uLd2C0t&!1I}+CaeDSFav*dkf8cwA2A=GVPVQqKWUmm?xS1s3 z*M1!Hs%^WsdsRD7ET=cuf54jcHahEceVXVx!=i>vvt_@7JMs0j!5cl`obSy*>;Cdh zm8vpgHMlVmrIm@^s&fy17^(AVX(x4TjVg&m2k{brFoX)SKr>NWw_UGcLU*e*yPlnu z3@;&XnqPd+N(BFr6W3 z=!Clhwq2M1X|lRiJ@q}!Qs_VLdNDFWl3{<_*JQ{ic1%eM<70xJ+fJO2U?q3(s<#(9J7+-#itV)4yI`5e=_?jECVAW zPkCTLd=3^Q>=n8;dPBgQn|T5U|M!75CbYEh0Gi7WM(+{x2`Iq2KYD$AeOVukWj6Lw zp)CsuznL(h$CrUN-R8L?9v9xs#mhsW9OgHI#*K*D;pyoF&L%;n_wJ+EA77{j;<8`j z?GmUGHT>tq#_gJMos|OH9Hy+bT3z+xoFIc&h2HBDASf~)kr*Jbq??aC;DT4(O5PZkQa+a! zfQ5sIainU>vdTlG?*-ltxc7M1>3<@XVHee>JSi&5&1WE+$&Sy{BvsJdis$H90Wh+7 zBhs@e?sy;D^eqgVWLP#?^2$<5&;f^IzmfCrDJ9#5JP#pi9O zkOZ*xPNi3m`qBCwwQ-TZzn`!(DBS7=1KHfH{N`88(+p%6p9qawTc$V0vt(vl0HPoZ z(efKV%{HV~w~NSwX^duLq_~+@UCYHH>LU666f^Eo_!w9CCEarIU?S|VPo{=1R7-M@ zW>sOhOE(pl0p!Tipvb`Rjn&C+9$Uu?nlhC1be7fxBg0bd*)!$@;@`lb>&orrgHT=1 z?Yy1BkF;Ev8p@83X`Wl#=#FtCQbHB}{nC)FTHOMD#vw{L$dl;CAc*{Wv`H8)wz1hA zXVOotP#?|;FDH_aC{Ly_=q3tBO+&56#agQ9f8m)agT*BTd&GH+032~0Vcou!NF=lS z8$tE-x4GgIT^NXdGtX+6M$mN$;~?qIt|D^aOKU}Yu5L%6Is^^;YO=&ZsKi5LqjF%$ zH4RC*Fg}3|mm=G|IEOL)9fEvpobDE2f6=CJ%t01`i<=74AgF0ywGDa3TFw~J{;Q-)AV%+>q5w41MfXn z#|5xuFp}$=a)kc*rD&xgb^vzKpv7GMMpSTD>3cFYoE)Ot=kUo8T)2}&(3m`6+nMk6 zimkzF!<4+{1)$Ad(J-@X%RBxWGOT|@-q~?%(fMY-zIvI6H|#RU4kYTl;RiEKr`0`^{Se6VZgKl)GcAbyaVa-!q^4_@w%`$p}8* zU&5So28luZsE_A%XHu^NB(D}x!YKr-@D!ggNZ>?9MOH+HzkS~$CkK`0p}-G_^-OL> ztOuWGuo@G5HA*pK8+w~M zZ{fjNcP_&?LeJHXdoZ1P9dvzqbFV9xIN4aVYp!n1Z+-({P@!-6^L}b{GV_aIXVC)f z6}(UT^Wom@UmH7jkE6t$L!@k`JE3Q3_j|u0@T{B}_?7R~=RqnPNheU_^%YG>heWO4J5Kii|tk+;(%!B4ReU z9ilvHqiDe}5ST?sBdfSiqxaQwdt#Q2_;U2*(&x;M_^SKz{8bF#G4KGeDi>nijTE}x z-vY3(>nwF(x4PQWUp?KT3hdGl3w}o(v1Z0G%iz86Q0Bp;NvvWEQD^yiZvHKL5HHk< zY%NT)mmznH z2OQjK^)r4IxFwa^J29XRu@;I^OM4bb+Fgsv7l!JaRXj@y}Qs3d5b8(|J zw#C*VXx=_v=0Q_hHKV8m04)$4-6l>ZuYd`3TAmg%<%Y0eF22;%QTE&M zCP}FBtBPS~eqoV?F^5hNAE*c^=N1#sPC?#75hd(=y6>f9MstrnpYDoBJG%;M&b|*L zVM^9(Df*KMFc!kOXp+8f2lSF0GonS8w|HJ|wC@}s32DCGawzyJxsSo(AfhEzc2qS# z->YdPk<>i%4U;@Sb}|!mrFU3?91fymj2Wugb9G%CyUl$v5P+8g0E7 z?ECQmCoNO5cK)FfV;Ka2_{&H#J@vag2JKH3(7D0(pQ zzul{0ExRSnKeJ^n#W-xJcS-0Wp_tLL9L-g(B{E;?mzs7g!v{T4-LE&9L~!+7zO4*A zo{;D1^I~xqD4!V78&7=yCc605sMURT!{nWx97(d@^h2ynw0$IB-KVOt1tH5&gbY$m!Q#>nwq_DZ#=ip} zweTFNYKy?|=0vpAWQx-99`Kj6F9Cd8t|!EFB3-~H{kyV95GdiArT9}nscuB8{?=ql zKCw{mzeWdGD?*B z3o22}FOuSf1&MzmbjCK*_lEv1rH;Y~ni$Zm$A2A4Gr?Hxqe!IXzqdyFnK_PvL~uxv zF4r6@t!+?4pIfP+R*O0h=EsbN5!2f z?Rs7XnM(uBe^MZ0FD0gyaHR4q$DX#p0`fam+2}HT7QnPq&;P8+8S*WZhj*Nr>HLD8 zzU&;~@2Ly*vRO+#Y9guG=%?JV%IfdQUnpj7!w?q%;#qE&5Kzp`0o5E80wC)KhNx}G zC-LRa-OusM2!&1_+xSKNU@OEoRr(o#fXs#X}J!_LtnL$;62%kz)Jt+rPBr zU^Is{yC7FH-?~eLz(suT7hY{TOgt}Vwrh4@FN>!R7q@z+aklf!!5V#@F?%r?Mv6?w zCAP{t;?lZax*9r7e38X-k5ytZWf{gUUF`kX<~NeSo+x_NN?7EfspJd-sx#tWDW|lAJ{y>?t>?o|+Rxu@ z70cfRh|@mP^ZjzkD`pE=cNmaGXP*BaX;!2y^;)7-X^jozUnE2(_1I+%zccWH9nGSg2~X+ImxMu8R?BhvFA4xZ(Vb+Oqnn(#SFV=RH!c_Jia4-o|sHh(ETqlfx zYb2@OYL1Erl5x1O?NOI&9Xw`r>&S&mVoY|FDjvF{ceN1xGsGrRNfjeCosj$S$U>XB zSjkcsNmV}I?1qqvEVJ=bn$=^??z5!TF9?Y~*7+FK}rk}EV{20q}T?_lexbvH;Z?lZ9nB3t1f<6lN5Z(S!U-xrTk{&-Ea{b4rJJrLL=Lf?C3DuXX7ffH$sm?o$ObSVK+jrFYzUJ zVDmFL=-7EN5xU5fCWPtAjlH(ToK069q*anniz`WZH7vPcS&Z!-+u)LF-f;yd`WKMd z$E})9??Fdsp|Zl`@#*Vf*$h%=II-<7U7aTM;Znu|6|CQ(zfSXMp0)zzsxkV_$L)Du zrv&o*1g_!RtdNLE)v`FSdY&-xDJM~TnIe&{YsCk*mUER@&AN7@%`rw~^3vF;`W1Zr zUJ}LXX+>YzgLjp8COQXT>xyU_M9>$J1ii>Or&9(b=+DVrlI*z&!fff~rKD$5Bktr1 z#aZ6o&imgvl73zMhZl z1QaZhFw|z#CX>g?>?i)X>{XbH@ODNK6?^IjVPJ7F|d z%E0}J0lCyv#`C*bq{ie_n5SXy@)hr;M#@<3U|hbkuO1)aMUb1OCS`+1$x-vv4@Le` z8!raix@ksy55^X=RnoDUUI`9CUXb)!C@6)b>vAJ!Ug0?p%b!LVZ71TJHzrOz=8}S! zs_<3<_V%YS+`tCn0S7+^+|4G-^g_d{W9QWD^inC0_4Vbc`UGi0KH*$+3g-1AJ5Vmq zJp-oHCK?h(04gex>-p5)kBFl2pf){R&`Q55CmDqkACD38=!ZDD{TOMgHR^!#i!3t0 zBk=nrogAgU&^vCIOBL7~9J~kxlIgYq!)%=vU9>%FHIEDyNKYSS{4j8$y+TTQQ)9n(*LBm57C+;ulZk;HqNOXlC zLI^KLfjhEU*iW=0*WNTlESNHJ0kRG3l$B`VgqQ5AHL0Z1ke9|OyObC6o`RBxzG z`8|eaTHOtn2iTRlmXe0o3QwQ^Cfkr4%@^#gpR`=(JnuFX0jN8`R_CWnGvM%5;3KJh zO=akG)OZ*dDAZlq=F->QJ#EhST6Ne7*ts(B@+w9ewPlSi}t5`9(2cYsc7=KVuTCPv5!ud27eD z!)t911itv?(=>VV^|xW{RjryxU)_~+WO0z=wcoatnhl+1w`4G9DwIG#OhB8M!}r(Z zPS4iM9V6GPX2^0wPKqy15am%$YQU}2Q-{ZMDuQI*_qIQ^#%x5}iKcV!(^^la4{BU- z?4Umd&aO;%n@o^UVU(R@E;{8AOE6-q$6$gHkU+gv1vqL1UabS-Hd|rdrbH|O#JifK z)X3k##V|DMho&hI!Q2MnpfHoZGeVOD&izC@>5ulIC5ee`c{ROdL_e#<7$a9Ox`zL4 zS7gS)08$SpjHe7yB=RVQoWkt3uM3nbs{Wz0@lyjW&ZdN?VnrmJNoYP;4F8>DNugMk zP~Gtfq4+63d*6cGEPh28NpHv0ebNh|GF&~Rjc-L$N%G^C;^JNGNFEtNF-Hdb1FQSc zMzxcZ2}<{l`8_dpB zCO=bS)P#G_sK;bSKU>2!P!p;QEdlD;LjftGQvngsOAQhkR9QRF!SOoO_Z)f#8~@>S zSZ)L+ZA3zMbi_+6KbX^sexrI5-P9Vj(!C>+#K@-FuWB$+p0!czIt#u`DZrTt$ShKP zAu_e)T46O?)LJ7nLO&8+4*X`RlOcqQmp9OV*{zi6`!cTUe$j`c{>&^1!E0?8T|D+^ zgs?c}up62^-o}F1`S`jBUzO{{81$VfH|6eL@UN}Y`k(^w+Ln1YiMX)PnT}7%w3%nr zdU1cxmUu7!kV&EZUx_oaRXz*Y0vSMzKEy+5<^ z3$Ywl>ZgRzm)j2c)Jj0CXznyp^GY4!WGtOs(cFYE_AQ@$3$zSH_21iW&O{wCbJkO%(sn>sJ8n8!U@g3@Ph*r?6!VV82{xwGPa~ESXyG^vTFMgxa9Z>99ex%| zFwqaw7m{=8&U*6y{?YDnL+kQ&A1ykaB&o165zz)Hnu%`}<_G<7Z~9(&oPim2A})i! z>+!29f2ilJptdhX79ZbLEe-x2iBXVaxi(+VLOY*hDJK_uBrvN&&nVMliE0gGnT%^O zByyx#kXG35U47ryEvyj?u!z6)`Z9HY2C7p1E(%kzXaDf5d)Ps+>}6TmXo+r)swz@X zMq-6jD(1Nl`WtgU(i8JB7v9K9g}7RaXyIL(orzi>{4|wqxvAn~LzhGgroFupOuKn3 zAM>>xcSHUF8_Ub-&-Ud9+&Nuvh!LVzDE}jRvYA5N1CDIPSdzNQ!R`G+Qc4oZ2hrAg z?r`T}gZ2XoTOB5#?m;m7D4nXR1NJw_^sgeXUpUqhTIFc8{5xG)8f7LZDgy78+cBGG zwwILYv^F>czYB^Zkfrm5X@^GZsYe#glOcalqX@L>>Aynxk)@{ZiI*S|6*s`1OSau% zGAH2@!gFmr-t@J2H9>&5fcUn!9Cn$ldLBfRUJp(khzoldQ<)@G6q z-UY3T(Mk7(C&IO6A`V$HXg_O;^@~(7DlQ}QB&(D;RP?_PTaTg8QyaAh!;qT~zUH;> zzpC8S+~gTtmD!}bg?fD)tb&9oH(pmfpGL6ZSigH9YhKy6tXTRDiKpKNG3IrmE%_~R z=w6=?xg0?#b|);}z%g_$%Gjzfg2bu~q*$``5ngJ$fr9t^^aDmNF~BNUV(pg5*SmRISKnypd%vg^tw z?L&TXK$i=D9%?Z+T%J6L7R{qae8IT4W1c2+VmO@C`Y*pyz;`K~q`ej-)+!rm2Wilx z%RI=JtkBLEz1lJ&%JxZ-auwHA&r_#)=Ox|hM5p>_F&cU4!R0iX0j$GR{1N2CDQ{h8;ig~+Sp|%G{_reM-(H z;F^>vM+H`=$-@|03xgSv+brD26Iw2iYH`UlF;0Q7^#N`Nb_z>~Jrzuz_4CgD=kAgN z_nHu`_(IzKT9~$WZ(YI;g7BX&?Rz2ECHg-sTiT<`)49yH>(@!w4AvnD;wDdEJ#CJD zB#Il68?Ywk(I>Cgt&@h8mv$!W!6T!w6Gth7k5*;vPNQwcVnN_()0YqHwK!vzL@nIR zI>J86x6#3E`k?-FcW+Q`*pu-_Y?kT=O*EK4TSqeNZ{w&!%_VR+%vN<3Z{!v1V@|`b(=Kr9Mi` zH+}>fII>p#4NN;~B?HO=RPRGQ(?ea;r9`)XM1I#cZq4IJmAJjv(d@#&>9WFx8hDBy zlax0>zC(Q^UauBRcIDn`~}l+*-s$5NSLq#A_VUU_s}0#D<

    `CB2QUkU(80j z4E|99Y-1#Kt#Cq%<@tP-o?)*xj`0Ip>U|4Jj)afIv;L8eeVcQXKLJN`=Qqnn17&1JQIgi9e++ zmhvAlG6^iB1aaW@$7$dM_cDw96dGD;!-i7h*M;6MCMMvcr|;#QCCEH@Q9hqTrL$cX4Y(r40C5@^W*iZq|n zNL?K0&Fy~d1L5K`{zju{$!UvYRS>7toF%lT_0@K%W}NX5e)P|J^&eKqW5_{O3EnA{ z{wvZTQiI!X6$S5E)i$b?!$c%vO7;?~T$h|VHY?45hPL8vx6pa=yCrS2!EtE0GNGOP z1Oqj({qA3~+-WiEacM&$%-ymY@!#^w-$<1VB$b>p8p#H_n$W5aC<_B_QTZs2TYJbasgf226U_ za4kWG13j%6rwF6QOzL3KfRM z$~Q#gGafNyDW)7j`D!Yw?h=}>p0Q;Cw&&PK3Um3niV#}|PhBsS&ii-bIxQ3M|5T_Q zq342NmYbOKzLXvQI+plt|3@yN*=QV=-k3-TZ(eb^Vx&(9*>IrEvJXG+``UR-N$qDP znUf4k{$IU#toHJMhcl&$O@X3X5JA`a>Z-*)4@MsBd|u~rRj};K zWKadsBH*x-}+H|y{qDt&boepu9)8Mr2O|=ueOZ)5Y+Czxie}EN> zp?0as;*Yh!8PSeS`XBHj#}TLqIK*OnPGMx|Wag|)U`j_?r}E=kQty{nW^PDh#9hSP zic<`E(x?dNP%Qm$FrbP8?TimuWbn3(-a?1*9ZU1bmnORN-7M-?*~XCS4m$1)x6R&^ zuTl@O{<*F!URP6Pbr2iX?05PjJs6PUG>B+(eZv`JyLL(OuX!0E-4{bwW0mRaf6f}I zEzmlDhW36ULV%+b{?9~JxzidwCs1NF!iHq?U)3t&FKHo!6lhNk8aKsa zsIgLdb`=4MOttU$wUL|9TNi#c;$1RSI1FqGKy zgpx_BFB#_7WQe;BUO&tye-YlNQ$7RzAROqgxOs==H!zNH=frn0DQt|0;#MDdl@YM9 z%Fs>aGFe~?2oX13@aQcET5r!#RUE#;lH))x(M#eGEs`25ewdF84-OP>`h;J72GNE! zUl5q_TLS47BE?jQcM}aToNr%{h?8J@PNC(GRXZbq!c0FL}?!JHH0qDguPb?`ZnY7^=eFusTaHI5G z;A%LJLh!MxzP-pookfea;N6Lj=H#Nw(Z5s{BmVD9z;DfcRp!y~uF_+{yc1One?x^qhww)6 zZKjq9=HGuLnkV!&`(h|991vBgg9$;hQG%2!rVg_GMyot>vY?aqea)R<4?xcB$#xXv^px$zvmNH7OvriXs5{(g(cIr>d7 z5=6(FL5AjWkDvm$*hfxGM4)l&1B;~CH=Fcy=TlqOGhQ&5hkiD$cko(2q*awng`Jso zmBf#8Bx;3L4&)#;r^Bm&B+nwaP4n`9qq4@rFYZ+7?rejL5cEnH?Q_7y#DrZG)WL7j zc5og?bjpG#RvhCJ!|LAZvB2d3uhp8F>~dZoJMdA&dVjeqU1rItyN(Wq`oDZk$d9u& z#*RNrcDOEa<>Fn^T~AE>O*~@H%2@v}DX1@~hO~xy+lVKyP1+MUjcTANBubLcF-2kQ zN;LUpV~eqoa~qu%fSpZd4$^&-guj4X27 z>g+YM*jKOOr0eEusRi2+$ilx*)DIx1qIo2xMkKDAZ7z8qq&KNuSR!4M?c>)>V{|f9{UTkwDs^sT8GSVFt6Kr(k6@myn|nkD1}yW5^_Xgy)qNJc zJAmN6{So(KyW^BiHmIkDrS)N1Teb{!UBp0Z`1PU2e8B5r|364C*Sw#08J2kkIQ!QJ zxDDq@R`UHw_BfX;z&?u(0VR!V%7Sl{JC;Y`@WT%pdb(-Dub$d#=Yw{O0z}wGK5I1K z@;jokdJ3Df&b?KYibZAOmvg-xGC6(DTR%@Q{FzsQI%X01|MrFb>jI3vc@-^|SU>{Lc$Y z5N+(lRR z=9p0qo=!z}I2O-KkQ#m=U%Fy03vAq(bR4=J0OtccnEAxBfHm%I=%;>cLhP!XPTXq8 zZ<~WiBn@*O%4@kHuiGX&)ejag8j?HyHBPN<7f&R3sCVNpXi3V>5+_SpuAag)zh6Rz z|3zI5D58deto$hGf6ikdF`>#j7u+$RtR4An(fbo$^Is)6givVEj>KuFt=KXr(g(a|7UZIpmis_NeTBL|u*9vinRw6l3jDDnJIx!BfCwHlx~A#&g2is~muOQgh< z5)1Su6Qw)PZ^Ft$4SSg4SE!b4MI!(4GNc-8P3uNpMs``JfwJIOtwc1AWUMy&eV8v( zCZ1)83>D7>?@|rV*!E;vAc?nz7gQ@xiGMs{OV77f+pOD^pD;j7>^_XrTJa*?+)~FP z7D$`qje_CW@i(DW$DrqPIlC#tUm`CwJXDTjfsuJbAaUmPVq zT927*lFT$FX=5WQ#E;S6efhvFsNY~fHP%4*6)oDc)+7Z|oS?$E1<+sT?Co*L_DOj5 zGjR&45DSo^J=7p^JEOGS$3~ak4sbQUU{^yn1GqlFL2quACBmk&jnUyRIS?bJ8KGp+ z9UD!X^b4mHCIDULj&Fl%X)93?*ha3t6D4$ny~=#*KJWBXv@D!3hR2mL(t+so8%RA| zdCU6=kNA2Hmelr`8dJ;*;LDR^0^pD0j|w?`8&PpJ5JF}@_PSoJ>1-gve&?M@5KdZu zM&o+Q>_$>-(ey#*tWoOze!m(&WWLJoH7xnqc3twQ7E)IKVuR>g;92Xf)k>B-wx)+kHV%TPD1*OCBuR$#AIKl6R z8j9*Tfa_WFeou|VlswPfvRguAFTizE76D_^w(YB;i;Uyj-$nr~rsWcAB@KN>ocmBL zu@djB4PAkg^U#aHgOITZfkO_Eh-Lt^DIs$z+r`P3kUvtb=y4L0QNp6DV6_JTwE{_LmFCyRlt~E>N|^jw$r+x2-6VNR?lI znlwqa`YR!f25e9xy z=0Wu&`h%K|7Cq~4WZ|H#;N(I!dwnsg+z#vWfXH{^lvizkU#P83{I4|!n{=90_#d@7 zP<@E?r%*`#Wog)KaT>XD=x5>iC{?+rL1m;Hw!X&$XY`sujg`$FGWWXlC$$DCg~0&9 z?KqLm|C~7SGVdgZ^qu4!K@$>In@=L_N;xqD?p0tTrTE%D>a-o~@yRi6SKltcTC*kY z(VKBrTl&Y;fr0bX+HmW$b_;piMO;C9Cwp?GzX7-1v+w_5k(V?tb#*R!xuhmpwM%X_9gx*>AwlvwOi)8=|o~9cIg!gI(IE18ca-ciczTNJ+Xc2JItyk=gug^K<#lJQvE?EvoT45GJMQRsBe*5#^w^ezOLjRT%(DQl( zr6Zpx%2^61DV=~c_vLE!BfOuA1a}&C{P}RXG91)X)6(MF+e%p=n~i6^1^@qYq!SON z)T~rS$ksp=`etFH*Jo5TPl&|jSgz!PL21(08+Y!iCj3hb{86ZaX_IOS>S2>s>=jin zN3P<-gVEluFIrIYzuvd@VX<4A)_%RO1B>PggYi0enwO1`n!`%de?(~)`u`H854f1G z-$+$!DHK_^<%@9?gRaHkwdPFadSeU&eo$mBGjE>vMEAk5mP%xj8ZkFwjZm1&hj5t& zocHg`xLD$BnYdN^Y6hrAZB+NkMscp~ycZr)=l?4e{%4kh#bO<$-I4s)9dglxEJ`R6 zdcj0=)nv(#el^UMOi|Ct1P3Ypo379hD`?4Z?tD%DI8YQ&kgtT!y{*7^ainNIyxm?b z2t3$NTjmH${PkWakk0riuUKAExbs0JkZfbmLqL%YcN}DMi7H5m5CLAMT81*(91^X` zPgf$9bn&KEKzA?ENVHS@SPrgS{|*v#BBPcQsG zGzu!~v6(H$0I{iJ`R5O7)^8Sw=sdrfTcd>JFw#Kb#K1(!n9`x-#4ob1iFS*KI)maA z#z!d+QgWwX&f@^R4K{8vY|avo^A+9%x$pMNzaiYrZKl|2yFEGJs)E=1kAD~CCG1{W z{we<5RQM}q$1|uQZmmL<)9u(w>d`0aUEZX-caOW_2aaq$~M2QS3F4A2SRpkmqrTw&*_-f)#`~!O8CdYvsn7=B9pVBw9J0h>~rf zsWi%m2FK=3lYL@TK$NekD%%zr9cRvR0)id($c&JX!yj7P4^iZ#Ji>?D`IT&# znKbN{$WW;Q!$9aE!>db>3dOU1K`Z2@7MGrK2oy}a|?3;_dl{HBvr{W=ZT z;?Wg9;7DC_`0wwxRb`U=*8FDIq0V=FSUcw%4bDo$*D=RfVRn5LnM<$CmK}53pN;eRIu{+*Ga|hH zW@|l*uYXx(YkYcigm}sEtUL3(a-LP{)*hs>sWHJY8Qg882r{CeB&~JM6yP;MCHa1T zbS?$$OAG%a)3;+sv~o?bMBXUque+&!zNTAcn9i5#@g53A;{(B?ztv0xffCxd{ILk3 zrgnsVIgsD1b_YPRO-%n-T)KJ<8xtoRi0)4_WVj8>lyKL_lv_uh&d16YE3 z0*9CIA#$9+>6!4P3lBm%2N5R74qo(+x}VX+4>QR<4nr9PQ^sCPnt|F=G}LtCiS8t$ z&8%}MPJbEMzfP`IQc-Ch!y&Fbu?Tgz*EHZ&GEcaMEOyo5e| zDZRr^;aKydQuC#GM+i3Pnp zT}3cp%&_Dw!-r?{{}^vCdOwy@SoKbyPhId=?G_gveMVFM6xu!HUa}7bdf_|kZEXhw z;1%ZD%-jf2wfH%OJne@-7$P=T`CLaM7k_#~bwp~l1atcSV>tSB{-s|2eRT1qh2SAaw5w}cm*hK z7r_Ebn{-H7D+B)mm1#be&S+A0(1=-+2o#^>pA&GC-zQ^NC)$>Qf{=cF-lEoETJ=?H z|1zZm!EQc8U6xWG@Bh?gJgQG)FAoS#8F&6F<8ES+5Ohl4F9P>R4QQVl4Oow#{2X(R zD{XG)?Ve2Ctp{#Q8tnKk$v40oO*aiSwTJd1PG$oS4$5&23(7j=6XUh^G7X_94A$>4R$MVFgA33!U%#T7Q96jc@S13JwkpV zdatwATITDai)V9+ejRC7f}pe+qzvULQ^4%4?$f&5pgX82FwNc3949XDSMzv?K81x3 zH|Mlk82~Pn6SiSb`-_5_6kK{E~DNL$2JqXx)L}dyc>nhQm(oN70xrS$g&}IO@?`=jWi}3MQ zq|0Kj`7R1lVDkAIN`pa$Rbl*k`AL*;sy4lyEUZzmZ?f*mKpIGsK7oce z9IYssliMd%D6YF~k>TyZoR+92b0cm+*i@E*Qc9b_1C_9j0BgVqDtmc`G3=R{uO-Yi21xsl6$$8y2U5*sbE-AfZ zb8ou7AjC(-e*w}V!Qke*VTm0$(;m@rAd0s62DE9UwWZ|q&Af6RdqMJ++9HXp7sYPDq6%XuXMcVbO;tq!`Ro?>66t=MNjiq(_=Ktw84!O_vuXI1pS5VAktZMA9 zF|WAo1Mt3mGp;kPSbj&d?liPcZ-Ya;j#)hy5b!-AYj%7M@}T#O12!q47Z~OgR(lAE zR>RY}x-c=G=fI=ouKQnxdME42f^b3i*5Uns(x~Ie&nqQ8eZKAjarByRs@`hpCp22d z@wMCL%WOJv5lcI2>oDJ)i995DMo4O2c}qisDtj7*G2&CGDUIg0H0llPLD}W)zvCA| zo*g|H^4N#0uhyFd^uAqv4cooDz$e7+<2&3UjxQfqnt1O(liHNj?$@;ky$@{H8?OSBL7t9k zvXIl+N@r!J^`1;`9D1$W1AVa@odm}dA9@o*%SqDt|phS_J!AG#jF zZ4A0Ur3Vl=1YF+z7C0ZfWLT>=#&@H>oYYK&p3AQ#Y4$V!yBd#5>7u)qFi+9;ur1E` zC{?Hlc~GQWmDFH^1ucFH?kg;sK-*d4`Y7R1$~%XyF>4>% zSyg7Gc@Kos7V#frZn4km)(1b4OctH10sgfY_kzg~hfe0@h2Y!IYK1s73oN7|WC;}a zpU41S8|yirRH~Hh_4yTL3cUXi2r&?}*}v`WNmMB(;OygJxzFcJ3Gga#m<#Wv;2%6s zkC#V?;=_xG*f9S=0)f9pCQpOd{bp7R9KaN7hd+_87K5S+pO50KH&#=@JASh*-WQiq!*p@w>t1`2mHx@MaLy2%UfNssA|X7_Z1C9X z_{}J1-ofXSxJMWOC*wJq()u1F`hwt#orV2WCxYjY82@|+U()&0TXOGlV5Mw&H!}i| zk*0dbXfvyVdgp$;f64VIh-D*~YoHuufhxAUWOayiVX>0N`!FV^FX0PE-NHgU<&_Ma zQJDuhw=^uCZhZGQdqlcj9NO)tUh^2y->VxoR_Y01HShpahg>@9lE)6;)C)eDFVj#E zpe=uqzqQ&QGjaL!B@%zpK;p}z<8>(&fUEoK2JpoV4*W^Ajg4$gnMCoz>&_MhBr9(}BwWaiceKeMh411f?l83%OAxlI8_;J6!A8WZ zKKN48Vd@`Brju=p!__}etn}5x(QOW|IG@4t1JWz`@4RQdk%#d;UUpgUsc66Mma})K zwaFXJFrATI%;e)TTm^XKqDUjP7fGVbp{@+A@JA)vIE_v=n+N-~QL7Q|ZiWsC@0;u*hRia z40mRBXh#oz4VXh(Q6>c1@#r_{KdPK=)P2o!gyn4USVRcSy1FZcX_nzHpRbE|P6wB3Dp|IOCPX?W0*n8M$~0K+1Y{SE`bqOmpJ z0aXI*12U!=nv#7~>S6XM?v(u8KIE)4dGY6LyPI-rkNU*vpAf5qePBc5K>`O0Eu4MMpqqar%kJjnHq>FK6_S++ zhh+KnbhpU#5G$`&zEqeED$rgh>IQ8hv)0rSc(iz4Q`0CT4(2#6_5nH1s$=wC{?cU= zcsERF!uqgCE_o<>j>|PE4>Ni{N=RfT=RL+hg|A7&=Nd90){)WI^Xl~F(If83>hQ+a zVc~BO{%GoTdiwK9$DAddXtCvqaG0j;ns*nErO%=uF08fVz$diUv^s}8H;8)ojcjkx zP7Wo0_`3%6{YBpL#D!wf>Txbc&)Wp=^$yWN&>{QQo=EjRR~c?p6WXdOvAa&V(xAY? z>Tz9BWS%YG7PpF2E0tAIBG2G68i($ZhEmLgo)f%}w$CEEw4u(gl?4|p26Dm^XFLr( ztrLZOo=m7yl>6IuuN9S+8(Za7;8=3{wa@jW)VtnT`NwE^N=PfMve-%TqJ#M=fa<89 z{N!{k?HG)@h0L#fDkAW`s=~p**3J~)QE*8kp1WJjQnHui_YOz?b?Zk|2Ujl&WNEkX zD&BCSc8IM@c@t6s0fK6;{4%&Ex@~~XW0n=#ulq9(={#Y_i8GsX^>bO7`ugE(T&J-C zna${@ZlYlXE-WVK)pg`EjT{zk;y;Rf`-$0@o(__rtmZ?bt+UH}t$9)R_bo_F-Z?$4 z>m#aK1uw`$%F1I}`_X>_lubv0XWudjr4uj$=Ben*k8NR3Wd@(ZtoLudQW4aBL%+VH z(`>*&v;l3y1mA4$JtkW%Z>P3uK^vP=C4Go0EN4l6M>^On=Y}k;S?)73W0?7-{97s= zxH>IoV2;o-H$|p;=w-oP^wnp^gjp-CZPm4qI|J|_M~18~#4 z5|E6c42}1!_Z$Jwz2!=e66`WJ^E|YKn2iz;BjrcKZ*sLilygn$C800z_>s#v&d2do zpDwe#1DW8w(|wx0T`O_@uH^!Wx^>5N05)M3oAoa8r-W#oiMol}pJ^UHEj~(#mBK9P zp=^h6GBTF(O8n#itCe^h6KqKG?IjI|s2D^{%c!c(WKLRTP)SVDg0t zilZ+AdQuH11VM@_%e`f_zpeTroi6u!64$}9ZKwYtQ;1+e143{mAckRUp#;aeHI|plDuYs0ZHl12JMIq+Hkb?PUIJ01En>xU*JQpwuy>#P&$J>M zVj^W>Xo+-B=|@n*o$$Pxc%8#IGw=^`JGH*nwItUOp=T_rH<$NOr?NW8C1_hsN6Rf} z_}Kn_)x%^WvC#8C%tqduCYiMs_tdS$Qf+R46>|N&5)G1(uB+z^(#@&IN}*1XgL-$J zdedmjIe%m^@TkCR0Od=1milmGIqCoxyk>I*bD+kAgR5=t9BiAR()bzwhtA)%t(;25 z|LMg#049APyx2qOflc5PM;9=x#lQQJ(ij)7=_MqioBDl4d!6CoNf5QrD6wwPuLlpJ zb!9Wjq?%bg+j(-cuLX>%@%|H@`VZwp5yFOaOonnl!R{Wnys1x5Z(xnP;mxrpRNedlX&LYg&9T8d8qOv+cQ zntUC$P0Ly4L{$--#3;9}g)>9jBQX_GqJE5|?~|5$EIuST=3fI2ui2dc-TF+lYGi$m zZ8pI0mmsUAbQc7SC|$#@*{zUD5L$v&EflC3jzJ>;louQpa1IZlsfcfbXP(N^Tc1|B zC=u?if5u2Uu9@6IPpwILdXnW7frfl419ARO5l$h!_L+Vj>E-ZoL>&=oD!k$lNLaFP z*RSdK>&v0AUR@Y1M|00)H(C^SuMk%LLQMF;EzAosZ=D3zL&C7<9Y{31cjmbP)-}6Q%_0 z&$gsG&Szpv$~(tid9TmS|7((C|GIY!)o^_VSvaiRv>1qeV@#9$UbLP2EpBgI zivfU(RQlTRmR&=z>;e|$k6{YNuvoFEO_XW zZ9DiCXM@oU%cyys5g~J_#e)-1)qe&A9`W@8OSJoGGV3Xn$Y6%~T6i1FzFTH!$?GjXScO! zFTQloelvZvc`2V=cp&>1YQ%JCsDLe?ZNsdD$+}RIwHr|VR^*3M-hb;9J{QrE?R*9e zjHUFIKMhnwOv8lEyaFlq*e*0&1nX`xH4IQUi0Jdbk^WLkvXgscJS2AS`p-|ZxxZ}2 zO5HYB6iZ!Ok;Vf5+cMadfSEP3s=N!x(%MKP+K7UZMl_80yPMEDY; z#*S?kJKm??(6Xjd#RaU>){i%d1Xvv9UF-w4N1~#th5;|NnB-QVo%wMKZsSO{C}LxA zk4C3bWQJ$(!{Nsj`O1l=k67&!fl{<}<2Z2i4a|Jy^cEOq*Y&RI(XQ<)T}7wcCBY0! zDd648E2Rg%#p3P4h+0HVq~JhNSVIQ&I%cNcJ3)z^aXof)?)9h`GeE3jlL)!9IduoO z?l=@8>-VKVvQuN_rg01|@SU8jc{^aQFZ!oAc@b5sJOIo#G|~8Tug(6$uxa_nowPz~ zH}J?67>N5w#u*EtE2tHD|2-il{4H2g4*x!Q>q6gCa-3DLx8E~V_q@GPrWmTX<4%sN zvCsAXG2#1(%K~``*a<$gcCV~nw}xJ(1#`{Z$!CmevK|xBgs5&-4rW2@R~CRd`$iPj zf2vJPMwh)kg^$|2Ua#oSRvJlu8u9G)?Aq{Ksjw25U_vIrW5<>(OdR?0R88#Ht_7E~{G7FV50mJ?)r z@uIQM82PhXBqa@{gyhHSCloE*v5@#a!|;$ip<`p5R9DsKD9aWX*B68h0~H| zYrEy{3dxoOMR5`CFAE&*VUE}J9|9`yA7dzX%jYt7$z*5tNJz)GgN_%u8#G~ML5C~v zc=ni~0t@TywDleqd$bbI*9Lk!(y-tXAfhu zAkii3>$AUxx7W^c>j;UKJQ*f91)YQenb)VuE})Wm%U&Y{AMIyt(A#(QgC+g6TXU)U zmVt~aV`ECsc{!9EAxt=+Ip=iljYE(xn1{3TV-<4Zu8>C!eHztF6s>W8 zJTSJ>?bgxWwFJ=0BN`l#4?V#&>hioMJMgHu!rlxZ&mCy0Sox3{B_7=vH(;Q(GDB8N z=p;mtZvN6xL)y@(x0<^#Knr3888{FsV{EDuoE?c6feyaO_oR{v zx%sFKmZ_U%e`B-KC-Kxw*olXk-Y1Ru7f%u}qJ_TUn^kW1$Eb`j?!MApPZG^ZW&g`y zuWWUNQGHbwbd5hr0G4(+HFvk)#hHY0j~}Gz7lY5s+KpyWV{a(DIRd7QMhm?58;1aL zsl*tfd(%9|8wq2PwZ~E?Ydpr&J|VOis??jVrY!)B+=Hj2;*O6u;PW$Xb@XTyInfXi zjGm%TTPf~!e^q_J{_Ix;*Ql$+=TKgZpQrEFWb80h{S}@~Dck`mwS>=X8Hg!^DHCdE z4d6XNSk7y~JE$c;5utIRx~d)o<K6i*neni0~0Kvfh8j22)HtbbH$QUvAZ9Jy( zrA*IQ+g`KS@Pt#wiP4O)Ci`X&kYpjJz(L3wGhs$ElQAGg)5|?&K0cbkTrHN!!{9Rp zhp#pj`g$kzJFaxNG|}J;7dBu{Fgb-sFe{CAY~l}DF`HF?{6)(x&~dUED4 zD2V~LEJ*Q!wH!BRgpkT%T=%TE=b{&OJKMgds6o1}^1Jh7^32qm;6|F~qXU{yJ)9Jy z>Cqg-_g(cP#2qkjZ%!v$wJ|HGY*YzlsY<6uHG{yllhMJQMZImJw*R zUPanXFZj*t*@loUY+=$So98bx2|TjW$T9pbR!?k5JZoI2F8ItIZxm5@tZ~_S1@po#hpwq6*XmmM?r1 zBgNzj2Crgu_mzf|?BG!o-mDIm!z+KZ*XLfK2GOy&%Rhb`b2{Hi)5xx-z#|_6;u$PF z-@hiVmk?IG3n8)lp+_gW}luBk>i;eWb~yWz*M zx5UtIoImj%EVLCK$W*2Mlwd%s)NhQp0Q*XMMr+n)k^~Ze=Rj?7K-JjHeiK$d_wM$% zBr&|>v2gz`8!ovqMiBatryIqWaOv!yPA?)6W#8N4i;FS7T8L`I>zFQUC!=eHo*GEa zygP)-=EwjSnJ#d?t#zG{Jh;u~HDY(mgX58WvML7La{R=1qQp)-^kDM{K6zjIb6At< z#+qt}=#7-D)Hsg{)o4?P!$Uqy3b#eQgpkNG=Z-92q;ua^83me>q}Swo-hgXhgc+05 z5lw2>DkNcaQv55>E;mNtdE?wB($rC%TsuacyK9F@bzjhFudP)XbpQ2FWYuzd($jbC z$}#kOh8!V68)Dl?_S7JLt8hzE@Pk4~Wz|BF1pjvjSTlDNLt9Q{F`6 zLca(A4IbM4J^xE&bZJ-e(j%o}!EFtbYNlX04Ik0ZdTv4pv+mcxsh+nt}xESK6z@W~q-ltX|^bjV5YI4d3HQp26dybmG#hr%zIGlUntaedkOV9%aZ0o_h|9qck-hr z=t6EaMDV3|VyTd{OWu8;FMX@6K)q+$Hb;@Oih*}jui*^g2_f%n6=#@ zg|F#~OU|!cp3OVzC?fhf$L@O)fnppe=P7zuE-G!0$-iNZ}2w2k$&d~^Oi4@e1wy6 z0ZPD{VgvqQNHS?k{R%0Ip$?eZ@6hDpn^36*%<6%&+Uomzk55`AzszS-80l!NzU;wD ze57S-Rfod^c75_)!qeDbm{gHoibkvz#4DlXWE2ru422b&HzsX;kY~Mt>&#N(DwKA)m$(T zn$A;bblfo;GX^P>OhQLu-CTovS1_EAOj~)@X`D6ATAJuShltfHI>X zB3``ZId)(BI4msfd=7;}nA5vNqS-RoOYWpI{2LacXESpcJbtzPR^$V` z$Lk_0h-P2t;MG7{sN#y#r1p}Dc;C0fc*k|VQ@oj0w}2@2DR3`c@TmQ;6nmgs48ZpQ zeHYQhnNCj|nC(!rga%x0&lW)qA<_?yk_sMGer$)qn)4dqdu~ z)mBA$p%CeIDA2``F<1ViRGsYK_nwe+l#LcXPSKcv59(aK4q3JT$lK=#%ik&DE$Y}M ze(P%{ic}Zn`TgK-1jBi6-^OqmShkVK*9>dqJf9o0*PuM6ImwE=Jf%}W>^BKX9ruYX zq*=~&vu=+1s}4(0@W{pg%w`2jZ_S~)#`3aIResmIyv#DZd9V{53$^$tqU72CIgPm( z0-@{t#S6w1nfQQwPtVUwMlG!Nya?QBL#u^l6Z$Cj+8~eLdL^j@4;@PZ|29xjyu)__ zpmKa4jWP#`rQ0NvGc49w`BKz!sqy3G$FR-k{TqBSz~o;!T~Huu$R$*Q9#KgkYtxkG zV)z~P3?~PZ;#Ts;kLuI>1^F2K!5m4U?9vDZA{l? zQ$c9FqM!R*Y7q1z^|oHq_wRlYE*V29TANg@m2R*x0#@5iPUJowZz1f-;X5fksv#0j zPfeHe@VO-t_O|Fx96p@+uqdbOET7WJ`yvMQD(l1{MCxLoQks#$o{MCa73 zHtmMY^NbivM`F^p`x1(r+DCK1o@ws*Ku>5`SfCWR!5re%pDD7s;si}-mlfsvGmCpW zL280_d2z!25rJpNWa=DQqpW7~5C{9lD*q@5k#cy@~u={;f`Hsf^DT-&2?F#JMm9tiSrOxjn;c~t9{ z$_~{QHI7#vmvC8~NDocwOD13M2?cM!WKEq!>HOK4v9YNElV26=nj~`R*`ZxODW`ZB z1f)!+OsT2N3}5c{QH$e`B^iex(*=Ygih@Phk+}k5)k09FYaVv+^cwapo>9PGLm^PaB_Tb!iag!KB-rx0N z>$oE^d+jlmQ0wIDUMPi8BNKBi-MaeTxdR3&T%i+r6yU#B@Pd2gDpiat{`5Nwj`k&M z7PGE;Ckvc`nehI`d-A-iND?19LISZ<*F><=4km_!a8MLSh^=Tfkf=z2>YpNQSYf?W#9@u3(hMDI3Y!mrn~nN)r0Ekah=W;HJHeTOwU;Hdoe| zX@$aDzlpf)mJ>TsUv@Z~8V$tDlhN&XI_t1n3NyP7{Vo4V-s?flKm*zBt-c4*x?2OE zWjYy4^xu{C+EHx5=+r^7EM_XRb2z!IK0LDYZ zQxfDb<3|6}w<{CPCrPy~*(gP=30#CoP}@!%;_K7_C)Su>z^hGm?I(W91oT-IN0(4D zO~DR={?fA;H|%eXe-#Ygj1py{8&PZV%rogR}%J};`P4~^Zq5r@y zXa9L-?NR8Y?Xz_c^3?$WYd-WyqEOUjW-}>V{2#WmFp&71QVLG9e9_H(u z6O32FregS>LZ}L`--&Su1r9=aK;vtx=(MCQXvqP;ny6m{Nz-N+y_<~ZN;9-8RxKC@ z&oer(p^Cju%30v77vH8uOt|wyht9Rx`8#`!elG=RqWYO=~8OG5S~@^74K#er)r zB~`q_OS_@ML5~V0e@S``5t^5N$+%v6aUHMH$XK%qt2<)l+rfk@@82HZyRr>%6TnA$ z=V1P<&PZ9s{HGn}UnQ-kuG8b*(r`EG2Sz5=eyOiz_}nhR%fj?J5PYLDvmw(0#G73y zOW}_5^FD{lT9tcOy|~hTOytBx=Yh&A?AqF#EX#+<0?x0U3OPw6?UgxH_q975Nit)q zf%Px&O>R%5rKpC6!tC0g-HLA$i)N7pQO7&JshhKoxo2n|R+}LUb_?BT4zURrX9EDYv4OR}so$Qo)7Voki&5UaxHynD>5-+*nr|3WMbu&O zl-NbSuA?Id6(PP9sC0H+4ZM*f6ieG&(ElVxNtebU@mwc@+RnXB>)?#F@WX+XAVv3O zKB{KE@S?;`IY@Vj0h&~xt81DYRykgX;eli)bNo96ox1J6Ii!zulZ$;&CEXC6;fe5Q zG|zP*5x^uU@Ki-8D`QJQ6B3ltOUUx%Ve&#R9NmS;4aw@5wq`{#eN})7j{>tNWVx;^ zWMJKeJA3kgvA)Y7I6n2|Vc}Tp(aVd-rM^IV+NI51o{UbW!3&oj4O;gSD8XcB;0IOT zZ#Al#(a17L00001b5ch_0Itp) z=>Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D|D{PpK~#8N?0pA- zBt`Xp=k7_HcQ5Ch^N~3YIC2yu2L=CviUb1&3@C^>fg=0_MG(mXA`%oNa~zisF6X>? zCwK1u_o`-RXZJS39h^Gf*7j6aSFTrIz1LBk7>)`#__^N?fk5Oxf*=ZhfFJzQUM{=? z5GqQ6k8%-3Mt~o%Is~#P1b%Chj>!Y|*AsXWAC;iEEPj9%20R=aR=rO02}Cgne9QA3 zpVbTvkt77t0YQ-HD6bq(+er>x0cA@PE51h*=1Rr(SPE*@u9JEIMbCMA1Cg4a^Y^Yk z-2lmbK}6S5eJL~M=?(#8gOvz*VF?O=hWLu2VCy)1#CV_v? z1SyqDdY(svOO8e!o{pszpwkZI@sS(bBQSp;Vc@&Y3ShMnTeIx0d#QR5YZ^jGA^*#z zr(i@71fP`$; z{Oe!;x@*Ta#KD+J2o!{wGiMxs{PCGg25FJXbzKa4C=_D3amo=CMK+93V`GD+X{xHZ zt_P7zr_&ET^l(d4gIuMk6pO@=#05!m!q^8O5qFd}zY_#5ZMe4}xXoch%tHf(9oDZP zB@8_I;n3H4e_=u$ka8Tmv6JUJ^Umlb)i=?p}YLEr^G)tg(pn%LTeUbvh{bBB<;{{uLlJfC@wy@yTv-8O&D zF}e3J@no46m7J5sXf&efTOp{fm%{IEGtDFQDrry zR-0acN5cl>1*X$=Ro8Vz(Xc*YL?V$$Rdv;h<;xI7)ilS(!r^c#l|s&;kO4nJwr<%hSE^Vc*#IzI zh6V?!7X}IyVfE?Jfuy;k_Oe8jAAXPzwTr+r>*!jSDEgs+B5uLH4^>DcKjlP65y}Rb zIi$*wBV#=0G_VFiEH2u|lW-eu!ei$Jaof#Z6-kh zSOk1Vl;(OaxDHznVR=U&p(PO_M+FuX)G0cRzynF{;Xx*tNR$^D2UK;oFU+1w`|$GapR_)J9oVQ{_64L$6`<_Dk@)l^%W{#l6}vOM#6_2 zdMJ1wsZa#C4`nqpG^|>+Y9mr^+EiOxha-tZV&~3X{rmT~Z5wG>WoY82)5*1KR?A^o zb|t}K^BkNRNFWI6JLmu+8JpB9){)d1)VBy}@329*fZm`MQ%2ab4)i5tn&VMY-tvwG zvT){?s7RIw+=lWHmrA7LP`Du%EH-5ihX#J2$r~-jRn6;I$9oj{G z1Rr8^&_MD+nM2$2OFZfK*hN+>f6kY=gj6epx231zdI>6r=koD`bzZN4$RykQ6TE!n z37MgsA-E($xj^ptA$Ao1t$-h~AnDZoEE2*n8=0&Uvq4*fz6M#x)-){_v}a?312_oA zBRvVlLl*c*u>3l4-5=uU5DwQGPLfO>=uFf!_^EY}($+@NE?eJ8ytu}O9i9o*t$nt6 z2VlyvUn&A&OxVfg2$S+7jq$JvVa0F6aZ#peoyi80CbXpGm217`tRNUd$_)1I&X#NP z;1bQv*tY3KbRFF$D5@hWX+erq4K{^{3oQ@XV?sHg1i^6~G|BTEEF+jGj%8}9ykY&? z+S*zyGI~hI@?)AN&5~TbIyhGMb_hIAC@m>{@BQ~1>KjtYq-j~u*T#$)i^x#WUVrUX zQBr*Ywc_^^>9N5x_>kRig$%S!=h zjwmLZ%|Ikptz02%Xn-iWq!b4(^}Xn!$50P+AMp*Dny`7PADOv_b~6nKj%0^Ctz%06CM-M=fY*NknLuj<8)G5t*P6Fg1}s zTPbtt(UWZP%*CP&&proU19kw8r$qx1hX#`+;whOxvb6Xqq&S4|mjAZ{bBlMyegq=| zWZMSFF*Dq#COKkw{ok=mZZyrm3Rj)6xRRoaZAs_zs#}PLBilpwf zX1fr|vJFVWx1G9f4v}MG$KnvsCbWyyhdk}Uk5cS*IdtJTa=SDG$&t9`GGn{+E6f?E zX?_arbX*_iBJ?ouf5tY9lEHxy-m)h-ctrL1(Su49N43(5tYTg=%UJYf#SEl$AcU(1 zWW1>B%gCKNND^x6YN3B2^U|_%(zNJYqU5^%#!Xvx?%37P*l1d&s;Z#NDai+4gwYuAdZ8h2m}L)NCk!iSAzR|4-2pi#ZU^Mi^5o@Rf)Fk973Sm zfu|6Mj&dd>e)PeVP>fj;A~TEw>n8G}*AWf>tgGNaL!@^?50#jvh=$NXiiT7yd8-2# zvqIYiGRF%-O-Nl(6!agOLp{vp3qWg#RB4O4G{zLaEIlXiA8Rl_NOCR*e5XW z_)*M2farhvCI^g?lDM89Yw@?)WwIW+h(L7PMMWwWAuoaX9l1`PBCFHPd*|Mya zm6ULKf@jjvRQ0bPP$? zjWw%Rx3n~)_E5%Csl@ErvrrwFu5Y~l8nbFaPC+;lnLcB>nav`+xw(1m+O=TVp+kp4 zI#Q`LFmT|&0R#G@S0FH}-(TI5h$rIl5hF%m9Yk*6N}@bf2I-JqKI978iiYgzZS?}f@EoMwpAQJ50xDLsLxtFeO?Ej-2Hq<|TaTxawAsrm=d}23U zg9JmH0|g+2&bax0GM%z4D~pxdvJk>YlT`*Xgfn z(Tox!R3b>4?FxxRwy8PM)Ld8HzhcaoQPF6ObnF6pYjf0IY_TEb$ai7XOIX1cFJ884 z)oR!A1`Qg3B#q4t)22;@JU}kqdh-otTcS+@mcd!G=3*6uf6bb;s1KAuh|b218zCw4 z<{dM7G)j+v53C+Yf0E8s)%9G&EVeagF6K+@j!z(EGy$$bKn za1WGdnNGV<+Y!infO@hOEEc$m<`oeD0mXGvAWxOs=@t++7LSGwrINR6P@XI{;$gjH zVLbpb4~C&c4d_H2{SxHXcGz~}%oFFRl9@~-OUufZzPq?IR$5+N8IBDzO2^tkG!PWa zY6^$4{VM!q!;a;PS3$e!Ur|yX(?K(i1bxu3Z_PP`_X>hjanD9y?3qfzP1M88Thx?FLv`kQQ6w6f&GCe;ArFQ=nih zsuJlv`(|P~p2993^FydESGm&J?Ao>K_UzfSaN$C*=UZ>R1szaPR9#m?A>*8L&O?IT zyLaDz-ycNXfbNQ=LUc?EDTfRlbpAP?{C`(nHFn&1*2@1$VI&culexLw0+0&J#~wi6 z(7k%Oz21Q7ynRV}v0%ETf@GXnhQ|tQ!A%J&m=61h)|PYk zaBV@MbB4KHpsho@Mq$?b7Q?yB_5=+|6w5n58+kdoKt`HCK8KyVcE9=d!o}~tvv+SD z6urHBcSEZ?|NIMZ2!u$KCxvxHmtj(fepPwJWfx!iqaXbwXMKRM-IPpSfE|Q@kWr0J z+5*yNl_a$JXjP$lgU-oNK?kFfyry&+7M`}AVL!6swJPbLd<+Q+B#z9TY^#48o9HmMSL)@b8Ll#!euPC zTBFKA)w{Go|Pd$mBX?`dhhxnF*TUopb%%HG+Wqv?PoHtd8D(YVk|gcix#Q3G|4CNLqLi!St0DdCU%heuF~=YQ znA#IvPvFn07_Mm8lByay`qppnKrk*-$EfS+E)Ufc6^X9I^2I`uA)|En3cGoaI~?QN zRtI78IKiQ-xJnlY4)R9^U_dAcm=hk|v5B#y6znVjyI^aBbUdGK0-~dIj}0kwbkUVv zEGCg>qTKPyQ74X0La-DP{yFH5Kpxg5AYOd{Nh7QJf0Cm`l-2$K0K<;{ZC6}3u5UGg zAW`g&nRM#N#6#o*)lQMgNykF>hhv@B4vg~HyrM~&ClJmj+|$m>z$YXK1wn10ZPDN< z2yP4Sf`WMTeI3Bd*cbUEd7u&b>QjT*!y$aPdpNEi4(XDtu3o)5kw^?3Iuv||K1rw3 zD_1NR%SR5evRNmcEsd2t_V6Re9C;MdfHHi^cSO+@WXl&?EC;KxjU_o>BC8JH^L$!m z;8d!YJR~fGij=^Ym}$9hOF9LQ)&bPa?$Y8o2u{I&ff37dqNK!uxzc+qzYu+jT*QA3 zxVRl491KE5DnyQ@3cL&*_j4fqq3gP%!}dwR!PD&vo?_oa2nTob1No!Sbn=wf8!a!F z0n?vA2NQa0U{2i+>$eDaJTXU-W&hb%K> zglNslo%t?c?0rEIsJ@KLsowO9wF-7Bs)DUVWLrS_JRaqY4SEAT!Si5)m)oQBlPtGB zQeIy`CG-UctRe>%t)097(bo>?Q%(4ZiCzKq;M4v9p+G|SGkV{H1-yp3z_k(xW*WA1 z)`ick{8S_JD4@sry6B!l@h5NVZa0*N@WMmzL$!v7&2kE1{6j!6RTCs5NsRt{x-SO5 zoo3B4NBxlsS;?}HN@Xs+^pde-$Em84NF=eXudjdbf%~OsI2w+`V9kX?QScq5L>1_v zFi`@F2|`LzyimClt9HuznbGQ?YLGf`L}csQS) z`5tjRIt7)VtFGfYj*ry}CHo#dM+lE9)4i6kSeYT8bNn;}<;iM?`VuYBsVzcDG(3Fx z2q;U<&CSSM(~MLqiEwfJp>r%V+f>_75{Yi#umM4iR zT7J4InM&B1jAem`=yep}o|15+s(PTQRm)EHxIxnSGoyhesPxJiPoztzzMzCzec08O zK+r&?RL=sS_0U8eW;x4raZjSh*4QPZ0(jIeWQ5ezRD}%^z3+#9so8Y$#$$_8D-anw zZTr0d(;D;WP|7?T|jzgsyz`8oAZ@z@R`T_cxGpFZ+;p zU2A0QwR$m|_UzfSdi82(M*O~U-W3vvP?9H3o-l0quukQ?u)_*xTA#!vpKjs8cZLod zK5#%k+o6jA#KM86sY**czI^$LW9A(VMsqx0<~Q$AwaMs&q3QYoy*G?;HHMMj--l*g zbM5u6>mVOpr^n`&FJJ!3%P)x&4xj70ZmKDP*gF{I*{-W966go@24k&mO#k`SJzmv7 zE7APkYcK5Dx-p_iP}oGn5EaFCz52R_Ime$`GyX^?oj7xJ?EINA-v;G@ZY^2%l65;@ ze(~R#AnXbHurU*6A2K!6+Aq{$Ir0-f?@^7cw-q7DJGIMGzOiyS%KdYWS!jeAb}0$QWIfmo8+DBl*&LiB>V-=xOZe z%5YymgYq8;ww4|c^tD6!R1Rbs%ku!%siU>f}p6fZ8;^Sk>~&Ue5X^PE5o)=9m&qq(WQycec>9*vcLP??+zO> z$aY-hrYTBOJaO}PZ{EIr`%`~?+;T}D)HQ|YkJ4ZT+aNsOr#BGD68H{`4f{{ZQQi%{ z?`j%7V%*r&^zWBmuB+W6>8ch9g*5|uAG=2p84+0$)q{?ZDK_*_izixHXWOc^{OOtWIa})+x76-^{q@() z45ESD=v`cNi;f`v;?~>$`rtoZ3q9sJq&`yrgYU3~L*ZBpxW7-XJvtoh%%Qh>@BTy4 zp%wel^%WhGckf8LdhPl;lznm?%IdWvn-MF##|*fAj#bMcSV-=b*I)kkt1rL!(u*v1XUONkGRdPbxJO6b!&b7N|MaJx>wf3k z-`>7+CzLzzT`HBj@vAp(-Lmx$_udO;QzThaNTtE-CW0a-sj>tTgpNm=AuAgU$b9HG zmtGc%e3GfHo40`PL`jhhLk$_aECrzuQ6AaBWH_>v{M7*TJ_^-z{F2fEhaNh=cEg4T ze)5fbZoc83Z(j4KAKduFufJ1X-GAC)^F2~5T^GEEq>=+Ol@%&UfiZjb(SP~--(Gp` z>BA-ue(IrnGjv;vNT=*zb$d29ZAy%CLoS0ymt{>SyIL0ASV%V&ecqfoKmO6J<>jTm zi#(YCvE`{cTvD!tjF6-XvL+}doyE&_1i(9m?;^bSu02?DXicIpk+wube1AK|`u=ICST4ehYHA@hdkrHa5YGz541e)bFi* z@PP*^%1ae?+d8|DSpa(agkFM#qu@fy#Ycwhox-S5AV5{xvU$t0B}>P|mMqa7NC+w&MQ2xt;X_&wAC5VKIQo!74?p(;Ins|J5JY3ttm8lPSv6eZT5iY) z^FmqcCD}t~AzLl$_`91j2pv13pB>-z%0Hic`n7-W%-DfynTbvJ{OPXdWHWSV#W!#J z`nRsV_L^50t)|{|15K9$*?+&b;r>57=4ZWKJJMuU{;2(v+6f{|rf(L$m z_gAj_{L|0Boyj$m4<_>#02S(uJK*S0-hYZaaOhA_kFHPXkoKT;0L|A<9P|iDkuf=P zQ@t?N2X=+X??SU-s#cB!nWMjsGwHx05)Iw?yWf_Vmw)Fw-@Wd->mA2_=%K&FN+P+# zoq<)FKzzk0i*Ao&?b@(mGO3G{7RwFUwJaOn z1N7sOwr2{C9nvQqcgDyg&k{uTk)UYJ-19zDF=&h`>z)$?9&|c3GQJ}Cs^qh$<9tz9 zrHbk>f;Vm1(672Iq6kku^Zff8x5$G0)?5F0>8;l+EiCEsAAbL9SlF&4E}g>SvIYWbpy@-oecdLq<1`?1F#s^7j{N}6B!(obvmW*Ob0 zx}5d$u81NIZivE9e)Qu@F1qN7%P%ihqdGnFa9>?)Q zA>)=?zJKpM_x|QLxBu*CKMO}f2;@ge&FckFYaVm$+O@0RThY)!Z->`3gYJ5x`-Q3Hz6WrqC1VBPedFt&Jm-_Lpt!Eo zuX^xRU;ENe{_qF0W$&K%>tzr;orNSvwJ4&9{}2otK5WwD$rC0`Ds&SkO&ULb!l;p> zD8~L}A3m`Q0ELd8asv1vc2v>XY4ZaNdTBqqMC*CJs>;b^@=G^-`Pyr*{pL5nb@f$O zx3t6&82Ey1AsWD*>gpCmYOE>z&3X(Xe=9WrTBFxAin&*?gDTiB-!RGpd3YWQvQa6^ zm0N73eE5`#L8Ar17(8UUtPO;{@3;X}GD%YDv;_xK7h#k5CpLD}a6w2}X7bdt&g13Ql&i`5oswjfM9LIV1na4=ppD2` zmtJ}7f>W>g+~;on#Sar?fQRPITM#KPMUs9oNo+~7vZ>TouI8?YA`Wf@!uc0m@QrVN z^Q$-B*uj1M8(+WX+Ak8{k?N9*z8_FHGnfbZkIvo;K*lUT3S*D1fYQ+e3Ui3lE=S}1gFr+M2g8`(@Wz8 z-_@%tMtu76&tCWcE<5Vb>FlbCfBLaAGWAQo|MMS}Pd+48GXi>~u6zG_?g^IAd->(p z4n6u5#}ysdRYV_rCqWzalwl*rfBFm8o^jThn>TG;4?*|(g^C{g zWvBMQcl4nOSqx-XkTS7;(@y!!u(4BZx=>1ISsCZiebPA)3xbU$%hz_tGx1C|EqLi5 z0lm+2{qUdx!^ijAuxr!!2~*T?B`B{cQNMcayEDd*D=RCz?rXP|4-J`)v$wh4hXUzX z@nk&F(sJhyZksrG@D0~~v7(}K_|V}k$)-d)L7zSngm`m1nan9*a-JLkMyqyqzb85Hwe^Mx-A89MYk-~LW@RW<2Z zL7-{c%{SkC)X_(M;uB{h0+=zAF}WSr8}MvNi!8*HspB0VrMlJCRd2oawkx;_d(Rz! z1ZhE7RiC=+`K{UTU@K7QQMW);B*AXl6@|?ism}Ta%3~|&`k|`Y9(WAy}+PtyDJ7UEdQzyEE+dk>#O``14%lPOfJs9IK z#M#}^)Hk<&=X1km9ew(l7ixm5+Lo$ycO80P&>Ib?p{$lfbKQ(ak@AWtqFX@jyCnPw%H`y5eW} z39~!`kea73YV%|(an#uU69$Ip3ybs#GX&Ce7>tktR6`=_Gah{851+d7n)5z+*)MdQOy=3bPG4JCB1)2mR))eFkULqiM;0cZ_xaCq17K9Q(2nC`pidw5PL@^?WF;OfRg(^WP7dYR6D2e4H_R@1O z2(J&j^bM~n(&y)6VGVvn^Fnf;_^yZ|iYVfPgDg*gt`Rd2f$c5t~^t>HWmzNKrsES?Yc17EVN=@27L9N`gqb}VuZ?CLgc@ZSko6x#4M z@;Y|!4t^B0HWyMv5k+)|fS#lX{2av6u&#$A<>eLWY-Y!fU6x}iGJ9WL5V-9A7uhGH zm);ED2^a?v=sp!v0VNMHp$1YoJspA{WRfRJ3Z;iBMD`9s%97^*Es}nE2hpbsEXWm9 z#$OZDd0I)41J|e92z#G*U+MvLyqI&KfhB{~Vro8pR)o0xKZ2->DB{3?i+|xXBxV3N z&}VrjPnh!l+V>Q?O^(3ygg`70=~_5aR#u*|&F$NF(gCtpxnfjA5k(a7p9kBrHC-Dy zbi~#j+cowcK>BL}O)Ts-eH|uNPg{lLeU`dHYb{$@2y!(4#uk7j2~% zx|L_!vZlgdx3Z?H8%*Ea&p?hJ{mS6e)79(~AJAe|Ij?3HQA80Op6AJ`Tv=W@ecJSm zTQ;c@z2%gh3-U?JD+}qNaKwm2MvNNOZ_t2DCPUA2xb&3o2e;bz%Q|y!Rl&AL0!2w= zME4S;pCSp_+Ps$gR5TD&O;cq#l}@kUuzvW+F?1i_0RRf`3srwupe)y;x9B%CHibe) zMMb6S(j&%26j20+D2l06DijLAvv=>F<*Sw}BBwm!J7SGWBLr>8@x942r;i*tl06C` zitI%#t*6^QB)~^K=F&IH>k*V4eT&y~Ja7H_b;CxC`tP7T0CbNvsiC1kQIxW>GAQb~ z14R^31cO9SL@`*sdhN26OW5}y>FHSLM|77-xvCTSN#e}8vxkiyMVD|vcKeg^dV~E< zdEf>5+^_Gr?)vrXhm9Ns`pX^Le?XD{X zBJ@JkoT*FqfY3VzkcLd$j|_b&t;l*s6j4NHXr%{w&lJyaMDNS1Tbu_&|bKY)|4!wh$8-zz{revjNf^}RXLCy{6u-A%>}-MV*&fV57fIH zAr^6vqDc8g6wwX9cbFpF0mN!KFF}G9NO$rxW+V!v5Q&(v6IaAR0iVCDfgu;f^!VPp z;#d~3-$9C1Pc9GFb?pxU`p}u{7T&IaJSl(fhp%CX=&qEwrTE!^O9yxaWxJi=(NwHj0RMbwiI(jgc-obZZATti> z9a!7C!GAI3p-~uwmUs(Rxo|kF$cm<6d{rhr2W)VPDB{BdGbIp9OG`(O9%WfpHk-vf zhTPJ*w%*MnnsE}5qhs5;u9uXQ?AWn$+qSI;)HEGi9?kPzRiV$Jty;ATl-Geu#c|N` z1kdzsXnEr&PYx8-vRqN=qHo++ziTN!!LmN zblp(ZcCUUdqKJbHTy(*A)z#G-Hf-=bk5e8C@9k<0nU`c4@v+$V?_c9M-qx*Kvu0L? zp4j>dJlFH`THf+i%R6d$<4Vc{*|Yp$#F()YCQla>)wFDp;&+_N^j^=tD0!3xn$&u& zq+1{^H6zb)=mSNPQ1v`>_X+z_-u{33f}JU#M^$qB9}#kfEhx`%75V@u2x}qF`>c2fX{-8;W8%qnKS8+0)bR21>Q@iQ?BixwqSl3Yaf{M0O&vH0`ziYV`D{W z1$qnZ*)I-5hdLo={L?Kqyu6PJ=vr*ho37`gi}*Rqpw(yywiPcn0}^hRn=5`p5N5cgkzZD z&Yf|Am^dR@FL)WZI3uGKi_=ot-t^M@6e$_;gIDx;A$SZPe2ic zTHeUv`%TM((u(H5sI)CdHw+3PJ*#6!9XPNHQ({w7lWp4us{jX@tSE9c7Be)%b`JO# z=8GtzufTkSNjqV}#1$)6Fv`oF%Yt1i(k;OqClP5CjJYuDJkS75Y8gyNnEOkXEtcD#G?T)*0k-w1NX|qGNVF?z;}q4S)?)a&N(L%iO~Czpy;(;j@%FEP{j}I zSU0JfriFA>#i+5Mj}rXa7UQ_qAtRv6Ez1Pan~sGDMHF$cAt$N(2~1^U$WH>y2u>HR zq(P^;_kpAv8V-1z@#qdc&!?|j@Tdst;Cc;EvMiTZR#aA1MoMDgXfzxn$Cxk1C|-`V z1{#s@@KGa;kkP+?f0Pjohb2iSz3hWpwFeFuIaua&oe->!@W@=I!a-sY%XCC^YSS+j)aAKwD*g7ZKH0=nfsj0;=tw>l_6|y}&Np#uM z7#{m@PoaZ4NyYX8C>NEpOtWEc-PQl^v!|VU+9%FF`x9r8JLjykPd)kMmtT76x#ym{ z@PZ4@I^#?p4tK!?7c5w?Kvtl=@{f5_qj`o06&%mI;KB><{{8RAjUQ**uyJ9I(?XFu zT!8$TeFPBcTbU7*Xc*c@S#A+U^c|oiA{%H|q+pSzC2E?XD5U6MM$w5Q+m68a^E@aP z-~-=wVBh+V3%idc|KN}u*LFQy@+J0dZV}Z0+tH;+Iu%HHa|iUX98x^NU!|p`l@&RZ z(?7vrC@U-D;T-HpIZ~oCIA5bbu;tkuBtYFlhVjQg{_)*Ki>~?n7eYDgbCy4%$YN5*6f)Kvu4d2GjinA>C+FHI~TjK z|DaU3o+m3RWSpn}V345V34tfd&?zuT~W2-xBE=l&v!D53-S)WG!FwPS~w&4S8QT~lGFFsJHrG!jx2C6!9&FF$_x z*(A{?xjnilA_b1%d6I`r4=&<6*Z_T4)l^B8e)-E^zW3hyH{5Xj*s)_VFc=FsTDcf< zu^I6dS+9s93Xq!)e5PP_n3lD5+qQTj0aY&W>Bc(Ow^dbE6)~Ag?A^Pkwzih$ouZIM z)mMYFJtSES>49Q+qUr?54}6FJ+`%HcfGp9C)4Hzx^rt`Fwsq?bH{4KFRmrcZ^HBsO zcDWf__<5=#iYTHLG)1`c*%b-Pu~)8MlTN#yuhNMkKgeXVX4>4feMd`UgJIB{j73Gp zjLI|S2ZKakZ(zC!%J2mg4#q1|UUvYsIkp2Xb3Hd2jW#wkXqpy@L^?1Vx{-Rd*qKEX z(KRqZWOl(4JSL0Mp1rj#@r0r(kw_T2-lol)TH-A-Oeo?=c1Q7t)*mEjk&Yy|qKqwU zHtY%l99_!DMJb{H=y6aUOjy^pBhkoJS6wx6(xe~#=!ZLZ?#zz{Mv*gJ;rQeOizuQ9 z4onEl5d{`3BQ@7`8yg$5nT#g{w(VG!6;H%fN#^#`{(>aQf-8wxAut8Wm65xv@aNNu z)3Y;RkeJ2lmseC=e9^@N2MxUQcfZ@RWsBoD7(C8#+yiiL`;a1vD1yK=0Hz1P9^$(J z=@Whscn;m9;<|MIif$P6yiqREet@Khj6hK=A+QA5QDjf>HBA;s*DInEfVM2#E-fv+ z@WKlR4;k{iJMY}MVS_BwBWTdW^PrKVdyZ(E1H3f8gj`-l6!AX_qBxk6>`JN6-}i|S zNg*2-_NyXE^fexEpX+AL&Yzwng3SH&Z9++*kSeK)FUbxH7iCzr^vTDM+p0a7SpoDl z_mD$sYHI%Sm%qTCRTZe-^bxsmI1*+E(FXn~3y5MDE)t1onubBn82+S4-%2cW{ACK{Y1OLohkA59L6+q`yZ1c!;DhhK z_a5^0XtX5PbuPN-!Yi-5@`@|2!2hQ|4Tpc)Vi)fJedaUA%$w)f_J=fYJ5-Qb$&lMG zqKJbSTuw0q_MHWEM5ODniA18gxw-WXMQj##He*;GBICO@rs1x8Na?JV&N#M*3>13# z5PklFn2$bz(cYnwJ~_WPj$R9ZRElJ|Rx*{yS{d##RyLVV`T-0*kG)MQAD&5NQt3<@ zb&@q`O`_+!q6|9mKY$ZZ2U1$uV|(alcybk^@Ar1^dh9b*QC6*5xo-VBdUI6k(1D|O z?b@|t$BymWx!blK|J$}=i(MAJy|J;8Q~8GhilahFYPhF$w}>MCGtiNjxDa#NQIw#$ zxpw#Nou*}B<}$+wY{6G`_PRDqZluXgYj(E@W;}hWOoXl`k;c};3>L>#hw2k8CLIRJjG*zBX6kNxhIB_DP zY}&Nh^<0{}EL>AHu={DJopJ58*O->+xK1b(GBf4}ozLv$QI4~I{rXX(Mxh`Uh^%@b z9eM(#rvlOcyjC<2h|c_`Y4Y8!0QC?Q5z5>OJ>%TRk>eP74(y2B4L^iecgiSr~SMXMa=He zqenM2HEr3l1q?%Ck0>I$&yKx3YV)Q|?<{_&Yu{Oxr?NFb&(VPr$wqCsj*zNx#Ju#!sIycg`WMoz&@-DiU2E z?g*x0b`stRpkAW(wqu9GVUSn8a%hzddr%TVd{lAxS)iiGU_}tKnYG{trIVQ+(EZVf z&C~V;NZL-z1DUdNkX3i}r|R`W%lnwog_eglS60&N;EP&b5eFgiTHeZ)(DFKe-;WFR z%!8#WgcL2EO+}25D9VXsG7^pCVleQu$Z+Jykk!hF<8ftH8~QJ378M)NW2%MW6vAE%O;;S~JoAUkrY0smWYw{4n&kU9ZmfU-9D36&bRHM_ z3uCl4!v)8|o6wga?3m!N4iQGzKY9l#sw?HCLYUM>*pPItE}*;f7*Iq9*J)s;9)~NUh=UK9(WYr8lgY}; zO6-ER_J!LD<_%nj5n%4#e!4)HqXqgE!{M-HTX3$6gT4I(jf9yJjc8FtkUUEc*eeWj zr`On~gX=V~Q^(OEu-6W})gh#Bt`+~S4hx4ujMusaJI9u~$(K0K|Y^?gZv{TxSDa zz8H(XW7YX@xO@SotE;QiG_|a(3~k(JaigK{<%(BC5g!d8l*~RAEX%B_sv0?JRKI@x z5C}1atO77l5G5D8Js`_a;<%rmD@&zP*=%OVj-3E@`Jwz==>GDBc>2sTS&pPKf*2_q zGi8P!DoH!GOz*5BBR3a`Uko6Fn%@Z6Td;S_=lmZXsH#j4^6Ac<4I4I$95s?r9)K?C z&gTD5p}gAKT2Nj^MS0$gMRU0a-~(So5eE;5-9!<3o8SwQ0)FeLlyxt(`N%MFVTj7I zB8uR?T(+F-dP#Xw%G0D!g&c~hT1XQm!Oi$qMnl>_mi(L(QA8k%fdZEg?9m|yq>}0N z@*l0Es^%+zl?BFL?vO;6g8zF^C^H!BJaR!r6mif1VhKuuPC*Y=IgV-4i&Fd`U?=T- z7_YYNbm|-j(P7m3WCyeFChB^Ka-(Y)ALJ9#lI`fK8dGHOogqt_j9g7y%*`eIv^n~g zcl(fTTmZfy1?1y#(A3z?DfFTS80fq)MHErQ#|W}OMVgPQuE%0g!!T4u)l@)MRRyXa zdsS#VAc>4{1zMei2&5f?C0&PI)}<96o6gSdbO)&{R2r5dl_dkBYRpduUPg@SF*0c- zPXNiabFL(LqU86+Nun>x!N=#I(FMAuh=S+&Htcd}yUY=77g0nJ9}9@P6j|_n%eJ7y zIj%#OC`kPiBvFC_$leU!=D>nT1zy9R%x^;hy|IHthqRy5AA3)H;%SFWjg?e8wkYZ0 zVPhs_1vhCslB&=Hwe1nGXKZ^(ryyYEV?8D2i9$FOO{Y`K-hF4vm{CxjVf2&oiQdXZ z=>(|uf6%$S^73-F)D=<0e;6ROy(cLMIi0ub3|#&@3~A{G50wIwj$*fQN)1;q;|hhUZ?0ldEh&+9zR5scly!O)lhkJ+V%C4pWpR=4VkPb zYl2MHDXUm+LlDV=?j;*wCSqphc0LvazO1T^4fWr+=JSV+9xiw=Z+nG<{{jc6JYCm7 zd61i2Xc0yH=Kzz8$fVbl*R5^V9wL1cGVlZ4&^K+|xM;~D_HY6v&uz~*WM;IiDxP)g zGTEDMy)AB9cBKENBN|J&P#fV+38%@O@3wH>}^Vc=@|f z@mi-gm{SzzPn-}fuV~3;#ZWXbLN>dBjlLJjBhf|_JGn&=1}?a>OFiym(>>@#SVRJB zRq$m+i5iCNx$Vty?6lj@1EDf>*{%*B1d!;;71a?9bm_6ggSMtc6!CE&5HwxKkLP*~ z4fSum^A@*iL1@qwpfHHjhYX2TR>o85s)0jx*VTt&VGoSQE$4vF`w8p;w0%Jr0&?C? z0Nvx`nTz~%rX;zprK+0ixuASm&esO^c9-?yYBo1)KW3}EYe32OeNSPa82IJjFOWQ6@;utQzKAHT4s#8Gxn0b; zE}ci03_d;wZ4DK`mJ_xVFvqC4YoHH!#~u7=t#A=V6ww}}wdCeGd|buje21;9O1h%R zj$_sIuYsKu3Wq$;5oNhm!K0*%H99)v(;=jT^Y;nq*5y7<);rfB?Y<6iB z)NtySu0UE|w-JgcqKJbT^jrqLIy3|Sp4vV1y+lwzE9FV18BZje{D58sPl5FI6B!0C z`@)$l(IfiM3CYo!ec}-~89c~7kl3cv?d$kk<$LWw$h~){Xout-9H%(M1gNpxk1Yih zQA82{y`ag?ras?tAds`3B#Vw?XMNA5Z@&^3f*>Wvcf@V2XU4cA)?@TXs#1>*d9`!M zu|r5Z$3K?aOV=Uoz79;y6+YT65?v8R{Ewk+&hxYMIT8e|g$_v{r`%MHErQ z{}gf@2hhEE>}x?x4GZM*m~7K+DZ+q3HLmOFA;a~3Spq{*#6p^3*-$c`Pmc~tBDP>o z(hyNDC8jOopnY9O_;e2qyGyQqE zpn?FCKy1JCd_H3?&=y?;76gx}T)lc#azK%RUOeQF7$}VsTpm<@&f5kALUlwXa{K#& zGU`qBI4D2-!xey3B=EsKmoo~B>q9};a*w}#WlI2TO zi9VT+d?0Ur;ERV$>z7QM&B^qXX|o#}noBAwY)8~Vc>&DPcC#1hdr5hy4rorIr=q}I zM6I0uh{d3|b;c?A7UYh!qrA3*tpq}T{Nn&hU{%AOQjkqS%`rh}O<-}6nts_9X!~Vg zx#z0K4)DVAM_YebAMjmjBnn`D2GNIE-FlRHNL>XH=31R=40%~jd9>}d4{r|~W1pd; zo@jm94bPFI%Y+9D&|NY_LYBdFsJTmEFHQ5jcA{0Jy#IO7Q%3auP}rRGOR5{QPci;{ z!gmTBQ4|T}*cIi;g9n6URnd)btfVBHwlrN20*OBV43o5j!a@U&q3?~H(G?InqbI?- zC6*-n6qOM@WZRlYTRvz4F#;n>I*52E8A-^nCC1_^8;gd6684F$sT9f%2P8UwhlrR= z*eY}eFJSK*q;HH+N*v`^W{^+Liyyl7%<`o|iDX%|86*#ZFz+Sw6^wIA!m5b5%DRDW z!J#{O=oMKCvhyhq{K4ry{{kea2XV^P93OCW?#f^Gi*T)$d8#9G?L>-M)XifR>IZ&2kfnH zswk_m+`!Nc9Fw`i(h9H)F&RgpbU=O!;*qcgC=;we6Oes1h~CVOBd+9%f+`Ct1U7)a zN@x3s<)k3@76Slm;?mc{iS=-dX?e(;@~4<|^9uXyJIA;vrdMWHrSa89pOpfxf!K?L|wEsq*QB~Z~w?)frN9$oFB zr}n^%$N@SFtEGn`kOq{83K7j=hnv_)Lmfzk_##vqRFzu5k4YYCLVAu2E+8gFU}3Me z0=c0nXgVZ7H? zj^#M6@1ik~2fASZ>7{^HV@lKkBr7;T!-Zo$E%>Cqv(<(QL%$#v`VmV!`T{b5UScXX z#u@{PI#344hzW7lADGG7hG7t00fi&oI>Mw& zk(`hRnW6?94{kC4kiM`kh1a=#uKQQ z0yc0ZkO~-tSb$hjP?&5Z0%=y5)v_x4o@ZNWN%B-p0UPmmL-~RNN(GgYzD2?g2RXMz z)d|VEkPaO1gd+Q{MU+X?P()&InMhBP&_!zs!R(-CHi)bk{ecSUK17-%=x#|8-9Uws zf+|r|P%f)1%_Sl}{E&)DtOyS%6X`)ii02T)fyp@?(=ect*g|W|NWR5ti|PxJa5$5q zjM($D)GiKEJr?=#;ucwynF!2*8BQWHRu8^4O^u z{tA0Ks;RnVxn$sBhC<8{+0B}E#&!ZtEAbo!DxL~e4huT50Z53xJw~KPb}TK=Bu{KH zCc#4(8M7b=`otPCMa=`>Rb`T3#AB8u`H-HeT$GjqL{YK! zKc#>02sW0LMISnjg)o%a@d`k9;Qf=^*^fYnWla%Xtth4>IOt+Uf&dcrV`ZT~NS^Ou zFwmoP4FkPLjvdq^z9%`EW=(KZEkx%e(BDihumw~sewG+F)0X5V%uGW=gDUv3hyhlD z(nrf`;AuLRL_yNa;%OHvm+xeKH)DCAFoRAYVsxSMkw%CznPN!XhhHp&4rCf#2rZI# z4ez0@`iVNccMjmPP{aoY7k#YpuyA{nb9OST&9J3472S_fW`ik3BBjwtEE+2*E31$t z)$<)$1|dMoC~-%$jv>mQIdkU47hinVS!bPh-gy^Zc;N*XT!8H-Klw?Nm&v9z9W}#p zsyI$)`t+IKyXC9U#zK)$Dx3Yn4cC3)|6Q#+vgbm%#Oxr-6nqCv7Kj;FGzA-zMHM^^ zGsW>e7s{DyqevG@7FJ!)SL6VyYsSug`+GM}nKC(WX&DbNxuADR^cfJEOH{t(d%={k z6MlW$t+L{)u}G7fz3{3lZus)o^gs(7=#zmLSYD77=tD*-7JUb7>e~oJD_vNd;73GZ zhA$>1Y;p9vDvKdQNhVV_ef#DkkDd?B0MrS?2h*C8GiA}1Kwh%!snGL$A2Do?u1vxx z)}ivki#CJz12G#2CK$j+3|Dk~F|36X&8fFudlL$rYDlVPVEGZ1u%d-EL)Q&ShnlX) z(CZXg4@C^HvTmT}F6xazYaOBP9glVZ5D1w}`l~nHbm$R>d&!nkT?-@w`V-T#*w>?2 zxenmkWT>bhCr^|eS{G#1aU+USGHcqD@))#rj5cRO8cMpND~lN+hzIiglh1hm<$rHp zvu4kZtq8PXUFI&5L?wpwp1PALR zfDsq53X+zu+P;kOcLm!gbA$#B!w#{dZ(Y%;32G7teml4#K0;tk$Fzpvq|;dbAb?Z~Tbq4&QdJep%0N}3xUe*x zAd`-#&A5ZLU6zwp$~JAyR}D=MhxL-ulCttLBcx#RJ21obNJ%VI7L9t)spLpSd1XmS zG^E4wgcKSPXryJaXb6k7iA6h`b$tsgNG1y4i%MC5MBno|n&iV-=))G*j?n{xd7Anz8%S%h523QarDr-hK5-y8| zG)eP4B^t%CiqeWwtYS`9H00<}^QP3t_taVyjl>SZLk5BCt8|W; zM2W6s1d`?IdutnSxZ%rx{L2$jT`^4y${@rLTl%F1LP%i&JLagHe|p>UMQ=@*JYmM{ zSwH*L&*|;9suo6fmz9^4mBvDb=-JVdGE)$erh~y2vU{`1l#9hs4_A~SQbdDF2c=FF z^;oE~th6Ljiskg~yMBAs=RQ+W5eY|NNw8DU1Z+v9u-CyAS??nQlm(J*IyVdTriT-G zJ%U+UFaWXCI#6|({m=^%MJ4DnlI^C>%8IUPnq_Ach0a0Jg$Y#O zm&|nbtJhwC-+lM|^@&HHc>2+w|NK_4UMy1co8R64z@L75$FF`UtD#SxfBDmoJ^lE@ zPu=jvYhW4<9ya_3x83sBCm(+Jp$Cqgf28AJZI+EtNJSg4I(yK3bXY5{Wm?&UXJ^T7 z3{+SvW-5gR9JULc!$CcK+00m&2N7D#097P;MSksf8Tur2M+NZu9bYuP!x63 zmUUNL{nhJk{MM*db((z6D)`alq%?A(7JrVc6`h7q)@bK(5Rulm<8SR zE9+lXQ3n0av7iFepdl4Zd96G$dH&p8wcGx3*KbNZ*AS%@Yc_6Zj30jTX^%Yh>_1<5 z<@s0N{OXslligIhsZNJ!lgYR?Oax07{b*_Vx4!fJ7ytSEi+_9an#(W4YBJ%FIrlvD z=jUJj*X=*Q_46yJAP<+O{HQ)V9bi(QIsS|$z<9z9kvcYF`(04s1B^aP*?-ek>MO5 zDj77QuY~mQ+i$*c!bzVTJN|H(*fkYpnqf>HKk}Pj|LQUGj+{1Q*7=uQJa7Jq*MIp- z{rdMiZvMR0YgT{l>)$x%>`y-O;Gaee?O#z*=FtUMnO&ZNl`PtU*OG`c)lzUIG0~C^ z97zw!nuR&7fiIXOubmEC;owDG@uY4Jb=ds5fH*fy2&56XJM;vq9aSOiq z`RiW(_Y2?r&KFNU<&^DPcFHQf{F*xWV@6OKD|_v=*M9NSpMK-!TSko>Wm%3SL!~Rc zYm&g_M3yz^v>A#=2^t>|!&$|0>&%OBC+ozp=TGli@I&(>YvZoqKP5-jG z9h>X6Z;}HiX=WY0B+`G_=Rf{Lu&iNNz_|BrYXHJ}Q!FdsqSft>HdK_Z!G6GmuY$Qt@WABL4nB#_(B0Rls3>|u5)3XHoF zS<#4w=u2ZRWYJgDWMk9fH(s-BGgeu)XK$Tr+n(e!Wxbu-_Kq4huA;KKs#5#E|GW3A zH-0^kRjl^M9e3OvcmC?vcm49@Qx}N71)fS~vVlu&0FT08>_2euEGSNg9Wi>$D39zZ zxQ6~B{vvh|LaVTAUKuaYL9Y`mI=t>Uw zQ1R8k^|x%=@ZQR$u}HLTPvhndyGzTeo8s{e8#XwWHEZ_NjMJ1#*Hu?ni9yLfo_*`^ zBW4=9lQpeGOMFoO>Y+8&cmDp~o4)r$EdpaU9B*n^_~skn?tudaR#w3t4@aXBY(Whu zGr#J5i~z?pB;sQxjJxpMv!D6%eLHs4`m&A@vK<@M>nJ49<6zf{s-*h=_}6n!zxw{} z`es2?H~;gw<*)vG)22;`f`y&Og#iLKwA0Ojml6WgkpfduQi4`iKG15ZTlBA|L#n)f z`>utrzH#iyXR6@}3k#1Zc@PcH3mc*FlP317DZAwd-@E#YSFhW$0SkRti)?y-o#qGS zWtDm~W;#|lrq}eVkrZ0RP$C2+clNP}j~fu4{UzQK3mPKXuXh8#Zq|;rQeJ{O5;e&pG7J4?JX=>=ql* z28YraFS^IUnKo_m_1Asz`s;6)J!`h>@T(mbU8q&7YpRE5Q}GhruZ)Vele7fi^q{2) zv1o}TODWgN299mpfgKoHB$aU-RebgBH&0x!;N;^@xajh(9>e9>i}Ub$jZ?}1w^ofr!V%rI6M zvnZ4q?*A~52M(-3CYbHIuGiPsMItd|+Me=qr!xuxM(fwFUs+ih2u@M35)=6CFq=1T zrfQLmObNXJYFu{Mu#txzdSoOTJ7K{wfJjVr9eCV#M2io zUEbhZ7oL0GL%+Sp&{bCpPCjA&z_Rk`Go}t2F*KgY_Ny5@U`YP~gR732H$T;4Enc_INqNc1{v*w|P{_^m{e_pd@tzm@t4PVe5YidRwb@aS&SUCRJ!w2=N zcIQHK5V2BiZ;|YfZ}J&OB>k|I%On;5$xP|I0pk_VX{j5|ZrG&pYFh zho5+VSMzbl&YL!7IM$WXqsDD-%9ivWG;7L;LDiwss;XV}^~W4NZ_J2sSRc1;+3nJE zoupP0_py_Z`*sygkzRKoU0ON-z5TxfHUf0f7CfVA>=wGN!6!o7O)9N7wa}S1L6j>h zN+43%Y_`0-s-d9)hLY!d^!yeL5g($?^ zW9Nhk6Q@p}s%hG@e|u)h;uX4K#47Y{JGQ2iHWa(^vav zgp3X7P-q>$e(q_m~B5lkH@#-gQ*jyQ7m>^V~h4;pmGuYVJ7aa_+o@8VM)f9$c^J@G9Y z)?RS#*+WK+hUNACdmA=x* zaw{$lF*{SBn^ksg-?eh(ssV!sPnkBkUsYAz-klpZtXsHfNoje%xihCB|$4gD))b7xI!Ntw%+ zuc)uB9W`=9P0fIX3*T{Fx?vy>Cg#%SCl2)_EKROhq4yM89y+g`hgAUb{9!=Y)jP? zO_gEZnx>7aLZ@(T2RWlXX{aE+5Tr7&nte$Og(FZ*95?HFHfD7+9PtGulgUP-QO|{C zZJ|pfE$Tt5G~*sv%W)O8L@~-oj~;sUmFIrxOJ8-xFxsZ8zN{IrT@_!5>V_a2Y12%m zn+;7y4P%iiMXJQI@xs6FKj*wNEmKt_#Yx8%I#qzU;XpPl)3Pno&^2@|Liw2lRD&d= zw`ZQvHFWdtftZ9;QFMh!w@?`&1G*=Lu#)m@41uw2i`~yeC-_<=7{e{dlI(ddhRJnJ zo`6DGwv0{{?3h`Lea(`tlB2_5TVIP6NA<@=76)?xc9SSBM`3_vCWB_bM&(R~cmOQ=BSd`CS9y3XzO1d*P; z=xyX8TETgS1RGlO5Gl1lfTv9O_Up>^NwKC97zWgaqIkY>^%Ymg|Db=$Qd@CEXC^vGd1efjgB|J)ZGsKuHVcn+%IBWtV{ zbpIpe;5Zi62~a9pIeF5YV~;y#+=PK&`21%AU)A)mLoXU}>DoN?GnNV&yHK$aj#i+l zn5D>%-T94bwxj(nfK&&N^4RkdSmeN5I(1#E?DE>e4-b0Dv@A25g{6`gCq$tWc9t0m zf$}ioc~ElXK^IbKB*3UN9F$PbrFAf-0$nzu#!*x~t47zO}8+thkJ=jW5SYk@ioyl1K={OZZ&-Vjd2y^x3L3CCc6`&Ub zu=+4-n~orNl!OTEzGWE3f(A|wM4zS_sMY1K1fYu~>}(D?g!Dd1QlSD9Ns}%^bq8O% zbQhNxfX4$@km(E*T@+DMI|WdJP(@nTz6RyJ(d z;Kv?+v~F*mrfYOlGqhhtrRmO=3#3IlK0t-wgbxq3x@R01!ib-EDV%{F@7EVT-5Efc*FdX#Q4*G&_OTq!V#^FmUTSt%s zJ#c|WIsx6XB(hquF{j?cI*2|+CeRhp$)wXk$7C-lqHd;%M&)+qsA9bDc#C=l>P6WX z(Fd?@#(cq0Lyv&Xq(UtK5K1H6t((a2Tswu|~42@aSX2)BaE32xJ9R#U$LSUrY zv^-uIwmj2)0b%5|;c>W3AS4$~oJ2GM4-W8K0I3!r6-Hp}SWZ4r4#_jhfr^GTo2bv0 z1DL)P1Q90oAaSBBStcxdo*{zxJ4K)Y@`g}U{PPsVlkj1-K+8i`au8-lm5pXQEIWax=YHcr?9^BL5rAw)NsqQV9rz5QTbp^ zdTLGz3{lVm!6B8JY6I>>2Z^w)V1`2>grcGA$b>Xasw==oj+#b6tWgEXH?$W(4hKrh z10bXYGC(}erG=VFWm4y3=&*((CUiX)O9c)ih}7y%z?KU9lETO;PmxKhr#B?Z=zl@P zSYQd%=^8C{wCl?j;?rYe7&)p8dv^qNn}_sBDtoeuYEno*k2-?NNXDolkP0dWwd@=S zr*vF;dkIvdh`s{jK%GoRn5c(B7$OiKu?kK7E_)vX*Mngj3De6wpes(FI>UCH{{8!t zO+hz9aRLEylt;4ET2fa<779vh93Iww%!!Y%WWp%r0;KR$HjYffLGLd?6zF&K6_ufiBJPa9K6-IolXD*q!fg;%0rPtx0hdoK5 zzIAO`v=JgG{EQo~9%+PKg4MBLI&=@@2|+5NZ@~OkFk~b;7#rTzjQ#lGE&(y?f%2j{ z_9e@zD(x3zSE1v?BVDVTK_k zJHvp8NU*QgfWy58>on@g{SM+dHqdmNCk7hk2xNfy1z06);7RjER74{nI%1PtQ3LQ> zC`&lbcM%E|nk8qeWgxlqGAI=~vJ_zLaoI>{)T=ly`JR$SW>`&V0mT48`x7Nua8=)g ziVfut%||>a49MG)Wg@}W^Mai0P$H`u522cHe=9ofcLS0^(n)& ziJ*0kub{lwNpP5_6Chd-lotc#iE^IuYWnwg97~cQJv2;(86!PrBb_*RoQ8%5%d)sc za^xZv0Mnda(-hLedf27qG>3d-@{pg7cmm8lI>iG&Zx7qTwuRV&?{EOsap{T&YC%ng z^i!Mw?LkbI4Z4TIkQ)`3n_78vTFHGuSs-%jmH;0%G~rl5IgpPFGG`AispO}!88dQQ zj3?rZ)ebCKL5_J)CKNwcf3}wcJai$N(QzTj4DS%s9fV|3<@0Ey}_WB>^~K+;I|GFEx$br^MD)_qZtM4PQ7JQk!; zQ3M0A1362tGk_cjnk#BPeY_Mlwyg$cz|I0A3UWiXbV@>mc8*1WxRi?8hw=cXu5#q0 z9UJcJz8@(97@EmLZ-8MbUfu=J_YeklyRR%u_`}5KXGkz^gj~FRfHCQb@(cybp<~;V zrcH%-GRlLR4@0jbB^nxbGIVP_J8iBxf z=!+MwZQIZRahx4tD>)mlJorEp6@s8BCD_=zo0tjNX zX?*^-!3GFg9)tuE5X7QUbT?K_x)j+hAPPNdd9?OAwlihw6i~*1{sXL>mKTsR&W4E% zF>RZho9pZA;fG9f35F~poJ%Bu4=eMttNsqO!bl|2MtM}F+}P(m09Irq;_GT_amXLC z+&b+^b>cqAC!Hzdsty)>@lS0MYbq10J67RA=S>U?zgfI`4X z-r&bHApS&T5*VgB%LQ@-a-(sl3%?Lc$S~rw0${a*#^}^{;9^CT=&}_b?gE> zTNF_ioyCQrz!qSRHu_@{j9=82dD_bE1|+V%#=d7Q{)fQ^n^qxTh3Q}zkfz*`E`UMl zp7P|d4AGP!1pNmL#PCINDTe1uC5Nd7;HY?0M82o14{$r3Mqp8 zkXZ+7ftGqI8(JP)R|!;VPFC8$CSN=4ibbI4FnqYrb79re<0ce607tv19Muasvg%Q6 zM1&)Q5W-Bh2`>gEvfo|+>3bcwuRvvbz0z}Z!rGl8mve93fovoL4KW$4U^zOeryGXv zxj68 z)o3)DTj$~J1t@YaqyG1U`k9^Uz|5!DT*7WJLIq|)=fDVJ+vQCvVY(kU zumgYAv{`=O!LW55$Ct@GAnx$}OeTZ18vF_sZ|~l{jC68>*b0(xc3K&wF{O;&I2*vO z!2#q@R#t|vd=g%v4)Ew@!n_%6c_TU9HN`zp2iOxQi9|wEv{*ETmg5kXM92pgaZDZh zOabWu#L<+5nYYMPrZ%^$aaTa(yl%Vy0ZpP0<|pdXg<-od4i3YXgQOup*7T}sF&qhF za0|wr9&k)&GB||5_OjFg$c+7h&qTp-P180*^f05WL@MK^mr3rajLE8|)3pjx&s9Yz zbzQHnd2>lgD3H|p`lgy#1R5v!ERbXyHn8X0W-1m5)2dFlh35p$@!U`-g!&*jZCaQe zV*U;P)L9U5a`cxwkIdut24KuQxAp*3rFeO_Bd{ncikYQ{og$$KEj?WWquG;|N4H)I zffo$y-_IAkNVL=ebudArfs#P0WRs>+soL6F2q2e1?r87&mqdpK|Qb2nGlJz3i2Lv^?54ratHgfJw>so=FX=n}*Fa!l2db|uH z6$*v1<%ewBG0iLlBUcFowkFvJoRCPM94C9&QHTEa?%!H1@hz)1X57+gN1k%uJ-@P( zo7b*gCCKa%Nnf>6mHn!RFIoPtw-!G)aNxMNUww7ivZcGWZ7D5@1(Fshp)Y*l^Os(9 z!OJiGQ^D#jt7JG}ipWYR8ft86qNgpfOZOSzmtOM+r@~Pbx+ffKE8V|GmygM)fR>q4 z<+^n&Zg0WNlOo!I3kmrI`Z70-b=Lq_VB3|JCzTj_NjMUVgeZVAZPP4p7-9;t!!~!h zZ6BZxILZc|ZE3V9@6JFa`l6h)oMbASj3?9axZs)@JJpzMXlZVQ`UbttbseaasZ_kF zxjEk093Qf;2$qvaH+_LX5@elCERZL(K?)&>)-%9v7rGkRTB#A&z79+Occz z*6q8eOq!qvrsrkHOrBEL)Ua*K){O1fCsPgarg$<*_gB%Xo>nCDg@3;hs~ooYtv6)B zZAm0|*X~Iqla`suB${FFrk$X^DP33F+}v23$;Nl=sQtrz51)7DX=ZbM*0h^aW?fx< zV_jXUx!$$1zGHc|>(hr$@JILG(29;aaZ>Eb74acKa=;|VH0eeWCK4YIQp6X-uBUlH zX*di`fe9McR-c~jVgv?lAGAuZ?QWGpgrjW0w(XXd790Z45HgPKWF6atT8CvZA3qP+ z1##2)+P8oB+q>?6`MHIrjA9WkJd|rWTxd{NLYre&dGq3zsh+J7ScZ z%D%UL-5KYewR+R0RsVV+6K_C6U=N|bFem$X-Jpp51Bi-F(AtE7op})G19yUp;}{eO zv?Qgo*;EEDlgg%&8LWw^tdpUK0imih%?dhK?cVsZm7B+nn>1ulX*imlKDTDm<_#OS z#6NxQH`cA&wr#`CmtJ{Aj=6R5b%LOI8FRrI=Or7rO&mM6vNZOq-~4v_mTjw7FF)gq z1tHlqLgCZT`}BJo*KFCgY1NV?QzwkL@!Pi@dH9^0Z~NK%OJ4PLF!@iUJBMQi9Og3dLVtkDR3O0y*|S2(Sh`f_%Pr^gtbG1BpLZPo_s(^l27MAU?1`e z?pSIWHGoLjirg8YA`m|)vtW@>#=Zb&z^0! zeD52VoG|}{Q%)&~m5W%3efnG^)XU!AB2dJB2a{MbymR{i$r}lhrjQiHlktE3>!r2p z-e0?J_1d*-*R5NJ?S&U!NDFQ5u014|GM2k~&1PRx#!nnKebVp&vC!)G)+SByrI+5A za@66GVMBabJ?^+;s>&jQP$vamT|-NwCxk;KCmeVD(TB}C_tJ|d&N-~MzCNr=u9JQ7 z&9|q{IlZK8_`-#6oPNp)zx&;tE7mst__`}+j2Ts3GvMZHKXvA57tNVCe$wRe-~IMC zUDJ+*Ly9QUN7-b0??A5|R6$Y(dkKo#hv@$UAXQqyhGYl}=ZBuhWL&V*G*g$>-pl)a{aE`Z+UC&`er49 zzET85^$o?>+3DsYK4x@~CG0|w=#D5%aY=sjtv6P#d~eyZmCG2mZr!?L`%XWgNA&21 zbUM|G@ zdC(aTdJ|n6AV=M}o&Z~xKc+=M?mSJdFb1N%4oft04g#>As;b}r{`ZbO_PFEb zk2_()_(`w5_G;jJSl@C{+XMOdfslwdT4u8zmK=}HkX&%VB{c*4mG&Rw7ofF?#g$xBm5_ zrs>I4(s4~FWmYO%U*7^Of&7s@X{m-T%8DMCYnHsWZr$dqzj)Ib zpE&=-1q)`)nh}b?Dhbm!c_bkki5iC9+-$w|_S(^tW}b8TMMoYzW89F^GAX-d&5CV% z>(0LH>I=^Q)VXJ#d-$Pq`t>g(LJb7jP;I(*o(iO=iFnyeP3WB~V*db=1vznD$8id7 zq#_|BH3rgw2y|wQ?!NROOJ!wcm6a86<>lq2rKQl**&Rc$J0#zytMHoV#RPZlhE4jA zks~Kf-MnUnV5Tyuv}-ws&YAOt&s{ZiU}vtWmjETczkPZR?($Exvf>S?7)#I%LzD6}z`@dEw>PB{g*R$tMi1Dqp*1*{XGG zw(P8vMd|Q4GpA3P^}t`BU%G7FIj7G*_0$E^XH9+fZ_hS1rZgjr>Vl|AKv1E!YW8r; z54(XX=kFQx7u;@!o9ON+zIWqBVvQ4XIG;w$i8jrBW-KbnRwI zQw1ezV|~hIO2QE+2C`&mdPE7NL{nYHX$WK?q7Nu79pKq<%Sno{fZp$JYxHI(c8e~S zTJT?MwL3xE0j%8xPqcKIfOQM~6!t`qpj(mqav25dN4^S$$@1082eMm{dkrL`iIUsv z7Wu`mmwCsj$-qg9?FHYe)rY-2@<95G_P2fHLB$j}ey$z3uYU@fx> z_U86-%%OtO>H%~gf6m(@&_C#%UPq_+eW(fV|6Zf_+4c<(5}3EyY&H^(#7bh&^zvf1 zFEBQ$LNB9hYHaX3xfNLuCxkQ~l;=92yv9@pl;$vu9mRZ+k&}V*R8FqV=qua zavaZ<%7hz^@)bo=LFu!20iP|N42aq5^=upA1s>#0V2x$-Is~7fMIUH6!&$fcNf|=%P zFx62bWJ?bSQTbgCNxK8A-rU<|0@nVVkMeHwcMCMGn=JPM)w>miCFJuc2<+n(X;M;h z-fmIY%=VS&95lFW&{;xJH#B%cp^(Sk2+tuYG7OYtB7rKiXdS9fqt!ZC6oo6W*mxqn zLQ!;l&6iafcB^CwsX(_uoQkX=9J$$wpRp`0FhF?_7#KxllS1oPG+7aRJ1c_Y$lAm> zs|LgiLKZjzszh(>QC%yD>W1yu^v*IvOQsqP$u%_1a}|1V3!PMAZ^=gQQ3tY0@|yO5 zp1Y0%nf2-Ei$2gJL$76(&?jkSlG2CFdq#omp_#txM@wRnaM*SnCN`Y|lB3YK#)1a+ z^u^NUOBfUJW3&+`8M-G)X~&&B{A^N$Fu7^E^$aH{A*4G~~L_wxAdf*4phDoN)V_dVK%_S3F_@^ZhO zf-)znV>{jRM{6u}A^+_i=-PZP+rlwsJxRz1;)h;)*;+~uKwZ(`UI;TE`g|$W^99xD z6p(w^d5jKdkjhHRp!F40lSYGOEYL*_k5qVU+0i!OD6NQ8(RM~IKyN|uNq*pG4?_5= zqSEYO4_32Plj><(Rwx{j0>xpJ$5&`xdB|jr+^|D)iquk89GencO8KMWduHAIx3G+JlG0M>Em&_0CLD>f{L@E`CnUj2Owuwd3b5dTf`3)0;RU2 zyh1IniyGy!g5bAii*4>$XW-?c8+lbbt;W6(N!M=m7v!H$o)5&2$NHS__6X{?Ud$C9 zgO`F0wYb}6GQg}*!71!Kgv8D*0BxywX87mgXFDzgqTqNA?7>G=vyk#g{OA%XD2!1a zWC?zi-k48X9{j{)EZnwjPI<0vx%47_vLRUIvA()Ys*$0HJ}acWTXyQke%}wh8<+BVbq{gh>ObR*)nV0|7;bEG_|NtfCU-@z#+2 zLrK_+X}bfHKKfx-CpqAux9H{fJ0MqCHgath-Unc;f}-2FfZjddbsFT!wORbKzpl|b zLb=(}OVIa@S`)G@M~52a4q=7ip56fU>Fe#;y%zpJqi-8QC_wP^n+s)<+E!b#GPswI zPDdav1-F}^AX0%n?nPQ2@D>wIRC zzyS}Wgy}vIO2{ANj5{zL$yaVmXHoDA4{uv{pyNCan>`#<9WKh9Ixe(GfZS0Xl(LWG zcTS=BJm|G++!X}3i$Q3u4LX8X1YOgmqZm1~)xA%k7O*n$n+rngcrCD{uI@|D=oS+F zKDb^2G6IX01{$48m#K1Ky@gK9IrtF+(tshLUThE6y*wE^Wd_IIx~w5D$b?IA1|NOh$j9?p;O z9To?_TyjW=N!5lz!M1d4Uq{`CKBdSUyt~+o2(TzXOTiRnWww^a(fSMUya66Yt@df+ zu(TA6qq***8ez+0<~*z`a-f5Jhqn07#UMqRU&6_uz{AI@J@SY1Xbqt};4g^4_wp`> zb~}9mkpde4_S;tF+=%4C!~s3uvFiH`)agLmguy=DUqU^}I+Oa9{qo~DNk;S<0*?wm zpF99RLBPILAc<=(o4h}tBbQo)(zX>i&Z^4U4FMDZtU(`~=4(5-Tnsu24;hYV2+NoP zSdMghI7b#NK8YXWcE}vHoP{F^&xA`H2A9Pm&V?RJHDLrxb)d;bd;5aksE7Q;fb4e; zRs%Y@O(n7ud&r!MBp)#$DS!}r#-yx7N<)=kHDCoYVL~hD7XkE!8mcxePW%{hB0fX} zpFZ+UkH#^QmFaXjc_>RyR9tRYi>fH;fvh`<;>q+WOi#j5*ln^etAV5hlA7zo918sa z%97d8B3E=Ry}1PCu-ft#?6ts~p=%bIQs2nI39}+fF zIW?Ol=E&8bLfeJsY6RqRX%Dmj<)V98i84)iDmZtH#27XX#+Je`09=NkN3i3pICd%l zwI|gQzZ6Dg^5g6XcmlaD7Q}=cCHY9 zCT0|pOGfSRF+7}iu}cS7^Vk&2rKV6y%4Zj;$t)aOYCo?J&wz3-aIBYDX5?5OwL4s} zcK|@SQcscyu}Y!>XaSuCqbY~_u#uo<<{R5b@Zrt@e%8XAKaWB`heo&4Yv>eNfm~z$ zk{RO#o}wzKs%2WXhlz_fAtj4X?obfi&;k%-r5{J^D%q6ne`9#HkL@2fVwI}cq$ck-#KOkY=| z0-gkfVh*R!)xqUSig4E8M#@-XM-oaln6 z&_t158SO}cqXs_8q7G!!G4D_+Fo{{bps;u5DsmWK~8rA%F_|CU7d7ku+6O>C&Dp ziWWV_tJCB`Ll8|syL1)uq{(!F1${)V!v>2UBu`US1Hwz+N=5^ifQf-igM)tYeA{u* zq3rAo4LqGZq-6?H#BNcR%~sZ<6C>zv2s=ic_zyX?_8}$3ZWll-`alOfjI!aUD|?g* zen=|h9lNgLqgGf{=~-Wrr9f5{Lskvc#S2^-H;_A(ids-%G_R>tj4ApVbCx=vtq>R+ zlH5GUfmwVJ#ve7AF-!p{ew*uX@WTW7$yjtmEa$Q#DuSX|t|=)}xFqC?jz=fx5DR6a zo}N!1-KTqqdD`AXt_G6`k{x)}0|#Yo+h~2@3@Z~nnD!7)v=rbc{{VzIj+0C!R6uoKWL#R3qYPxpelbmJ2_5?^|`1YHne*EHy+6^~ve!1jI7#lLU) zuIG3V95HKq*c5-QuR%?c^dmJy0N zo}XxLi6@c~df1sR2;{OQPcIWg=;*o} z#3tSnPp4DaY!>o^4TLI@NJ7jIA05mFP;_iNn@wZ&Gr}S0ys{RuvZ=rYvmr-JQ{<@H zn6jeM(y+IrB(keM0RlE?k@H*-lz`c;Se6-h7CpK~U-IK8w!ym)B9cj&-mXm4$cB^% zjJ|b714r3{Um(@Z@g^%{S!u^MGtsC4{v?KzF$0l=%mw;5IEflrE5w%6bF@lf%SRs` z`auzNh>jLGMtQUhQ#dF9zw8b*TB2$7z?_7rQHRjS+rW6p%JbFcmUKMPluoypsF`Pn zv>0_+t~|<_6^EXLW~`>YZ5Ai>{}?EJr{%wd*E#E-aTx6B{12; z>d%w|4r&5Z7f^Zb=qTw5WUU_Xbr~dgmv9eusA7-rbE&wf~Lwk1tFcT=?VyC z*r;*e`R>gnx~O1$>BXa}hSbz3Hf6A`yPEGRzy8%PD$0$pp^1u^34+J|_IJ&dB-6J| z3r!-bWve`77rGMD7zC^)pd9)_pzmQ$(xnV`i5A2Q4M^}!-^~mdIOtbDzg@6`xihA| z@XTM%I_20*OVW2$R|+38Z^3Wwy?55^nJ%>yxR0<0n4(%yTPOt^58> zH&#Ss&vbTgul>J!es}l(-H}Mv(KoM2n^u`rv9a$7SOTxU_PU|NhFkH5GDG)8jp-EB zwQP#C0j!5c!CJt|@$+B*eCf)i%U3O@cSkX|bm@{si{5$WnZJPnA#bFl+4ihylW+aj z*9Z2mHf`ThO6+j;_1Ay-tOZADqD9LSwOS0EkY{Rt{p(*^5_?CC9rgETpRFzlD>SpA zjmeJd9Cpa;%PzYJt1?CaQo-sWC>kVG_OYgBJt(L?ED718D}Q8IpbH=uUCSsw5}*OQ_1(U<4Zq4Y(1 z^aOgA^g!Mvxk<>!jNgw2nc*{!5>hZ4NTd$#Q$$Y}eFK?@qUmB4FztlteeU`j-+uj_ zmCM>k_V+uRLRUX%afbA)T3&d}=Gb9Md5dhGh)vimCpXN0b%S2NDdU2&yR*Z(2Zz z1*vEvML5)v>4HoVMCSV$ydo;@&QU;paxBab-jur_zp5%Y8%s+Mg?s5ok;ID(ON z{Fauc=9cDk3d@h5OePu|8=IS2EZfC$lS(HWn;L2x8tUrn&6Wl^z{;*CEU&(9Z(Z%) zcq&s`UPd}u;3Sf%`uc`seY2Cbu^^{2abyeaAeqXPM5C~A95+krayV2|T^5rgLdx1* zyQ`rQ8P#EHX3|*9Q|W9|Q&VF@Lpp1kj?>)KP*=OFxvAE(O|0`RnCSJ5b+z^J_+BwE zp{taZRR*pCD-#koV)(HB)#VVhRK^}TdUSPVWstQX8wu0e-5js4uT3;HKr+I5?>4`gRUmgF3TK^+@8a^&R6lVh?<{)bjW^*(504%_deWpxQKpgurKP3QXG|Y6b_^**4A2V4j2%0C z_;9|<0mW_N#EE0ajx~%B?UqI_{eM?H{mk>{o^twx(c|YGH7}7&uUfTg<;s*-~8jOo}kZZ}CtlIX5c1+p|^_=sYCjvY-+@w%qu z?&kW2L=&B3u-vNBaNv{1cE>Kk?^TN(v2FkCP2Dl7UOb;Qw&7QK7y{A12P?TnxN z;FcYGciW!b(9%?2+Z=C9x@1*Al)Sq7#!@n%JE97##oc70tP3P_|gt}noh+S}M%+nlh> zj22jA$O?4UqolgxN4MVksnbtA>-3W@yYw<}Lvu3K(%gV~XZr#K8*`_jsVKHv5 z&vtUrYa!_t;B@c?))+ z(gXMX;kjp?{mKnDR#sO2?l*TVTD6m%*AV!)R&Lq=VWmC3op$48bc;KLgi{5(f<$tbNwR*|oMQ0v=MBut-Ty*9A4?nhe z;oE1Nc546r1D}2NA1hbA_w-XwR+N{=Q^_Cy;+LydEnD&4%10l(x20t-6cpQ)WF;(z z2ef$Q&1ru?O*EaY8#ZQmy1Bur-CZ3CyOw#)7q3~f_>JYuS3LXN^XQqdsyzGT6OQi= z88Py?=bwN3&DRz$eedlz-#+og1(0E^kf9J%UD)cX$?nu0IB?+CzV@{nZ@lr!E3f?W zm%lu0*s!Tnr(SZ&C4c(U{nuW5{eAb{f8KfLLl-{r#1qdy|ANxevdb^O9MLYi=%Vw^ zI}aRp!U-q*;upU>z##f zE?xKOtG}`G-Nmbxt-AN2yZhIKom4|u(|t!MD~;Uu$3L~CO$hE~7hU|`vUk?6Ui7W+ zd>iXrOEP}SnPfM#gm%Z?>*YCgcmz7~{?h!})>Q}cbFsU7P-f5?P;Y(i*Y@wpG><@qZ19aP0UU_Bs zkl~K${^}P$d-nNfS1gApEx+xX*Vk{`)M$yvo_g-`l`EF4T=|1<--?;=#cRK~V&$T@ zm%n||i3`leIxmwD70*^Of?*17x?$IzY+Pw<@COeWeeb>Z{OkFDy#28e#%UG=$p?tftAiWSd3@nA(nh+FQE$+K51gWb0B zcX#}vp?0sPg;p$kch!o;OO`EOaO^RH=Lqzoj(|=?6*xAs9~@l7RH*NMMh&u>ZAX6g?m0NR~ohS+7;8>xkZRHdiYo2s51wh5|%e z7y;N4;|Z`jjmnDt<+13OZ@B)qci(fy*`Ijsg?|j}SN*GB-FebUXZGtiprX9=?KfYa zeZ;(}GY_41%)GL)(n*si-~NNI9e4E6a}SyJ_*0LLnK(`km7K8Pl)4?;hYzUv<_%vy zY|i0BhYlYzcGUatuQ=nhv;Y0dE1DWIG=uXs)ME6446B*WeJgkT<~P%4&HB~PetO19 z$3>%&;UmYkG$$^);JmlqcCaZ~@{<7P}BuPW{l zN6e*<2e|^BEE2=DO>tNyC!TVmWo3>z>M$%qn>TJSQ)yW1cl`R-gQrX@8Qi}v-hATx z`F1L6XEScrF~TL1rr!F!Z_k)G;?~=KaQtz{LA_HT9CY^5%~+-{DTZM{m4o7W_0?D3 ze*5j&YJ?|tv)dGn4z2nRGZC`b3)a}W6I4}bUr=yJ}SId|Uq z+wXtx``2B2{j{l5M~xaadGe%@BS&0+{g-aL?S~5&E<~z|ii+8@XTR~r8%Tv!YS5rT z2pKnK#DLNgMHY-mIBEGqhmKJddZHfoCY`V3U+DxymkcAW@b>~B24PnWoInROD0x5)#_7%d{TaLPA49>@pO7WHJ9WMFu9)*pTrjZ{4-Ww%sY?Cyp6d z**~H*H8#s77613gKi7<$K5+cl`o{Q-851J9V5QP!;c%v<#V~XwT7LMv`DdJb{Md#+0X~Nh;X3l`N?84}RA}YFjcJFXtn%6WpLa~wbk05J{2_w|UwrPVBZl=KHhf5V%>Zb1=bd);4WGZ}kV6jr^PeA@ICJ)n zts75XaNK~wL%;Ri+osN#UJ;T)vUKGYSDthJdEffMFUCxsbJUUZ7A;(;(Iv0K0cu5O zVSNG8Lqa?)fTC{0X(}`X>#?U_{K+qV`-}g(XTtE2Ru-!l-J%Swj9pXuAVqS@>nl;V zfkYM+pzj0mfE-X6Sl)E4HPGFpvu4S%4QtmLC8a}0jQi%dez3c?{u|%=(u|o?fe}t4Es%two?+T2P@xunMUcVz{WdkMj+N-ax+p<;E!vlv8`1Dm*Y~53L+Z}gJ zn>(kXqO7j2_O(~vtlN_g>6LW9vzO*K&10@&>g=s+Y)&OMZrt#%7oNl7Tw7bWZu1UB z)3WL2cNe|0ZTn6w9Ep~c{@-19?`le%bIGS>O`8g9`zJsB`QEMTmMve~f9wcRG$jpl z)llV#>Si)~)@)e2D(jUUb>xXdDyy2~$z2&oDXqwu<~P53!_HMJ5y{n=dg1v2H8tsUdhhN%VNH?HRkoSQWI$_vAcK`_YHE%?`sh3ExZ|r| z{VIql7K?&(VB>%X!3$frZh;xg*AgI+NTgCJ=!0jTc_vim^73-*rc!Af+O>1n<}F+M z^{eSWV8Hh6JFp7FvO^ExP&gbOFkrw17hHf+KKt3Lrc9Zt>qd1=e>d48yJ;9u?Cmsk zZd(!*dQ4U{*?YjSnj;WG*m~Loqm2>I%M0xmIEbw%wwPJR_b>hAIqTkebKSaa4h#@g zh82b^$do0pwC#cH>B1~P%N7gj08;h*^=sDFw4$bRAlbmQ>ViTeCb&msq8jUU|Ktv0jpcQ%*l~-KxdA zHmtqik}GSf%96hBwP3f9ab`)-QZF z>&lK3Sax92NpROrJ19cc)PX~XeD{_gZQH%$x*M;aIAx+^dbPE+Pd)XxE(@v@G;H4- z7@xpXn8w!E_DnSVULW7p0HAAh!H&;UR1o14??)~!pnv@|v~ zy|ZL-6V?V@T}$6s$kbF;4eA$Kxomk;)*m3Lea1uk!89zLpQ9*^o?GFy|Y`BB|}!-tY!Po#*OQK{N1n4nK=5eDYMQz z`OHO&-Z4TEBWxHE&kIr&mHjKrhxo4Sx~?8%pi!vM=*p{I*Ee(>v9xg1h(!ZQrh8DJ zP9R-bIqa5`PFOH?c)!CZ4Exlj7hZb#WdjEf88~{(qzS{etyvq0VJIBXpaav_j zY}2M)yBqCejy|phytsaiFKa2sJMGM~CQTfBMYI0WMWg8^0Y-FM&p#V>yG)1Uqn7VE$N{ck$s+zLsO z?ba`i0I5{+kw+f+w3MA`$c><)NS1A@`+VpV$>Z_(bI(18QhxNK z+pfL#noXNF?yhZ0XIv@J(kU|>QMc{b;Mz7j=?2Pk>FN%+PmnxjeUmMUo(AhsrK#Gi zN^O$Ffxu)_)58zm-@m%%3)fz!M`K1bmd&J9RV!f1PJm-vmU5B|ur|@d>dY_tK}6TB zbbQP99rI5;=e!Fqv056}u32;Lc^4je)SQ#fy`bO3na{npZ0}y!o3rOHj5$ZnJNC0*xw&-6nEUSeZF55d3c)fEh2+Vqplgr4ymaJ{ zQL`tHo;<$dcX!atWasfV;=G8>J>F1+xQ>o=|)F?`skE<9uV#tm5bwGfsc zDDNU{BTdz0ERVJmHX^nwl$3^FeDT$B<0c$3b=>q}{qFkh9f@>$(jg};S-N7;J8zz_ z;5e+>Z0Pqvcf-36y2!w1_l;p?vOx!>v4&*)uYb5F-B5q+jbAeZ(R6&e+T@dt*P5cw z;KJ1^V(hvv^TU((cLFkf1iGRH1;eteB-1ny?%4F+CsYI`cS#tQrYjh9rO9a69561VD>q(q;hOiC z)F%_Ezz762mB@Gw-OA`G8uSP=kz}_X$j~&W&75({i3?^-9ye>+gaOs%E%Erqt=lF} znY7^KQ-)1A*7IVhx(YeC zJK~tbrc4k*QVMFEulbH(TJdDIIhDaoQRsO)C{C~&9oNu9l8jY??jT0*L2;|9s)E>V z*|G(G-t9w&4xKbXEVFUOJk&zsV-mqcARZSZ;dKCO? zRLC|h zbWTh24%1KiqL;NyOi~xVAW%bcBpO!eEv=#v3HeyPVo^P0pbI_G_vp4Uy4aX^V7Fh~ z`rY!7a@nV@)Jv*Tp6Swk)38D43L)K+Q{dSBCq&rjE8lez$pqM+i4w^YrD_9Y+!ws8 z;+xA>t$TlOsy=DGyYThEH5(dhcWhieq%wTy)N#`$k14OHFe2qy#Yov$-5{q{bxcUM z)Q+2e$gnZv`&WccI_9w9g9kZ|*U+4fl~o^g^l?WWHLt3yA{s)=N{0=pDKV5So3?!N z*qJkDOdCID+}P2xYbuA?rtNxmG@?~jL~ClQMi1_P)U+wprLo<6c1)Tv>+s`G9y@Ku zxJhFNYf^Iarcl{{!P5>?%gawX`_w@bMmc`Ap|P%_tYqZisw_~9qapEPb1Y^8{sP`vDkr+;F~ z>_di+8#7?|C_#&q8Ksp9xw2F#SSVR2> z4tVOBfBg8zzaCIs5%~1zITV==qnR71UZYTK;9#dh?%Cbzk}IjUtnIZ@e}3@KQap9z z=dW|a*2J-3vY7D08lH)o6H1B%B z;2}e9z4Zskg|j}E@#)j2lZURJIvz|rQG~sjN;YMzkShr3ruskr{tsWi{+h?0cnZ-T zxbM%8J@!;xZJpx^Aw6tml1=dhBz@!hwby^?`+t4r8DzV2+xE9!e+^6P-=2P|q%`)> z0;y*OO0e-?jbe zhaUHQ&9xKFjkSU3HYej?uSh7iXZMbOJontj&D%<&WvOI5nMu6%@;~P;tY;&R+(pFp7h-Ja`&8FrCD3ll_+jbLK+ty2b7>jO@%)~{yECLNqUc7km#EBE3 ze>si=t?l;Pf7Q^?0Nv`7g!3>|x7_kQw3?%`vf_#>u875AAUm+$qD70Kv%=23@PZ4& zk?`+-e=pc=_3DZ-oee2b8CQh01rR%?( zJ3xoTapT9slT&z^Y$8yR<%UKeE+o=B_SPC9{oS|TJo}_$et-A>HOG^$z5aS*!(LZ( zp}^X{P!cOKQwhP%zWeSwp^)*&V~_iey<^L|7ypiKdw0;}xj*>U7qTg9{nCY7cJE1> z*599c;)dIP*0O2C376k+_T|?<`M1BKWHX)q%9p;hYV|5zm&z;3uDa^#Bab=)7230B z&(D7T^M!A}y>;8x@7{d#)z^Fxk$?8RTmJp>i=X}cwGaRC0gJB5)V;rSQ8=RS-o0b# zs^xb+@o-af-IC4kFMoSUOS0wHcl`SLuio(Z6Hg%^M%M@6kU!{J57%{Oy_Ne*UXpSCo}M`d6yho3Fp|o7-=%-MNQ8CFD7EJGRv~ z)|n75StU6)Gkf;#%$SZY%1{6GiANuO?4r|8TfK7SE3dp+5{;>fkj5`hN zWf0p9u<&1T`Q?AT|2}3q*k8K(^Jg4$`b8I64c9?brz9}61-m^y@ zg$a`f{&XE&x3rTzLL1@vlUOILzPkEGjXp(DUt;u?kq2^-Xora3l~#O;ZFb?O;t5 zLpC@w-iY~w`GT1o3CBDi+DF#RLXd=LI3|lqD$@kwRuma(x-5lKNxP}3E)s=iXAKxI z$a6H)GLxysit@6oZKCd`mGwPZ$W-Vo64@AllZxwfi;|#*qGq7jfoNMP*d~>+Qc=+y zQGn$2FROxzW;^MU%CeMc3YJqAir9e2Tqp}2uL>ucGV6UfOFql>q zR!~`K3GBwSIeg@}fi*Q|vMHiNFx|SHTUxB#@(6_vnitd@$FZhPopR=xXWw_&&x&c%%8X#KVcpwrY*fg^UG2OsbwQOzN zPo+vKL*ep>kyJ7TB974M0}nMZLIy;_bG>Mk^h8+P`OybZPwZl! z9+2=n*olx63gFf6Aek(_sw$x4TW+~^&YU@Pi+5g@1kte^FzGnW+rLdCTs;oR7#Afs>-fpLbH#AqfJdM;c%396bcF(Is^vlsTz)9 zxy)p3$Ff3d5HSo@Gcu`+kWGfdk*ug$fn>!S!n)@Ac1?9nDx1~~h?dyc1hqZQSc9Gz zLO-CT$QeaIjnx9by1IO?nKm&wy%OI{|Lwuwef7&aW8wE!5t! z98IGeqm!x3|7Y(l;Or{S{_%VC+WKbW?nVN^LIMPLhvF`6DHWg;s0$UKNO^%$q(CVx z6nCdMfh5F`5Rwqr_3Ov~-|x)1ckga?BZVLI{eRwPXV0B8^2{^OJoC&mbLPx}P2u^z z25W`;rnnyyH8L>iLqBjVz8Z(wwfKf0e3%A8?jbZtjNu59XKPAS)4^XLDQJ`7jhlw3SDbS0*bVmLS_iug>#vM)RD1}BA8ifuq#_o#F#3sY6C!>q0Oo8><;5y}EwYU&kA@{^5@iFO zL{v^n7|697VrNI>y9WktP!%A;J^#=|e1F0V6qg1o!&pxjlRefMebR->5(s(eQX<8l zA#k$u-(Zv}6z+f& z<%wkkW_3N)t+*bG>^?t!i1p$_0D`kBjCL(}o$1R_OMkb=XUMy`rh zU{yN`Vn}CTZ6HQHPO2c!>|d zG^mA0K%eH5lZxTlv5G|W>@$wK=kB{;GzSWdGH7GUJ?P}Y%glyZ97yGh5nLT5%^`Ll znu+f}pfpGzWl|JZRu^2s0LeGObJUg-7HDm@B3KkQfEX^g>dK@5u9q<}+MdObsUS=2 z{@|lf_CBaajN{_`1SCd3LLhk$0|Q-1c*$2us(>#|ZpFWsNU{2>>Jj@IjQs3Q$a01}}V(P!y=kwW_ZJRP-eF zC~^SxbznwtX0U{^wJ zQ3GfvA*1vQ5ZnlXRLPve0|yVq7!?ywIK{#~%0nyjQ9SgWOO?v4QYfF&2vgFJ#C-P3 zylz5qMZ_3R_iq zvH<{sTjBMKL8!LC7}AZQ5>ljcP^AW>EJ;S;r&vRp79w1yPN#5t5kq80BSY};PjnOm z3Lo-Ig@yclQ&vb17{eo0SRDpZV=_db6@@Uba1QuuAG&Ky&xWq7oK#Z@~d*XkS>7?Q~_C$WnB?{ zT_A)?G7KE^xZhKuTywwx<>Dk7DHD`uIPNhN4b4Ca@CX}^?=7K~q43e%qQwz~Kw3rI zqDXe?m~cp(G9&-P78Mf6#)IqtA0q^x_`<&|k-8SMX2*_kyGqRVY#x6i8(cIWM;7|o zoah$9%O8ZJlZav<7JRCJ_HZSIh!%Z{EGRmrV?GFp*wroM16LMS1o*||3`q&J;RO!y zD?s38LWZEH@xz%&k9&v-*oFScLy*WTNGyjUpGih&zH%!7uf@<9E`dxpq_0N@id zP#SJZL7>n$q=}3`9SUQJS%UOK?VzuS8&Q%7Bo(qb!y85xeZ`#Y72NGig$^XhODPn^ z(1}FAEK;%<4*B9%6eAq_OPE>|F1#|Q{4L%UL7*3oF@nSrRUseD`DRdpf( zrW6=Uft@RH+yx>wbg0m$;*BK)iZLh*7Pkn-kFN0(MSMh}a}5nCMKHY`!1Ow=Oo|ln z3m2xADda`oD(JaC@Qb?jky5p(q6&$S(b~t|2O1 zkfG{P0~NxBc&YM`jRH*+l2im?!X*qnKp=)?whoYSYD^(U`-G;IpooB6ELey+SPY^_ z8)pzq3Tz@d8&>d{#jz}=houG~R$O83QSlL{@CbhgP^}6@1Q5uOg2tnC$U~r=5cWV^ zNomQ6KnWax*@@^CE(Cdl2&4zVh$}%cz8pdvtQAcM22q|=54^Q$>|$D{!$5(3i>iW0 zbfG2l-vek<*y9nyS263+VYE^nWL5EF1k`#V*M-*WhITXyngh{do<~Qq;&cb7q@|B| zh#Sw@;jUO_f+7opIUL*0pghxvgXP><%;t7!xYNXv zEenry02akUct^5?RVBn2891}q41gr`p&SBkOM(bL0zoTKz|R7Rm_emnfRbON@*PdV zD99ReSf?Q@X%G$>CHKC3K#6EBaeN2p)N~)t*SP0EO#Q-CUjl%SVA21JDhM9ScBP-( z5*G@D2u8z7KU+Y^is=6$Jz~R5f;8E33@FBjjE;7TIULMw@kY(F+O zgo)UYloBs65Pn|Q9@IbNo;BwdI)2Bs&bJ_A`}D?LD5J0I)Idv zIT0lCfq3XsB&F`ALx1bQ0BoXI-~ve+*XhN6OBxfREJ=SkP6;HfgHz#!c+j^+2KEFn zh#DLN3?qmTzs$%CqHKY%M9$X@e2T(@S%opq1y<;7FMRNSE-{fzYrqgf+SF#zi* zQsP%oNRx^j5j{u@0wNO&VkSvONR|BkaslOWgRlZQNBjDU7`?f|AQ(>)zXU*=Y!@Q3 zV5{JX7jD^CdjugBi>Aii^tfF+u<*!bA+izyxwAh?qgZT@l>*3pQb7 z2o*WS7x8QvrlkanCZzn)Iv}OI;9vOZWd+3z2Y(0$6dNfGnZy)fLn9T-2aq0K=|?PO zg^?^*2ph97Wtg!O03quHf;p&$PS}Bh7G@Flhj>!L$vTI6R$pcVWz_e@9!U`Sglb@K z!>x28Gz8>EoF0I;8uTmFhY@jL@xhW7CCcRj0wakuy#0R!MH{Ds#*!bua_}R74)FvD z%;fpfu(x^OH9IHgne3|~DGWUyQz8UGk9Pp;K_Dw-aiS2iRsP_@5dI^Hys-MABjc$A z02~WBhlNDS95CMy!=`}M$Xy8t1VJc>C;%XqH&Sr%L+S`p_+=c6EIMMv4}@9LDXu+$ zKv4qV1yW)c#KSLdWqB-D6d-S@n*kkH{6pm7K@8~@m7u08ItlOyIs9?~_|Tjfv3L$B z3U&Fl15DF>m7^L<{BmG(M6xn$n{Gfb2Vj_n88yWs054&v2-kEj7-4#a+c4LE3Lrvk zP2eV%@R1=dYzH(oqyd8x3?kjo;~ZW!-!OFOyu58g=kjbORRcX3BseP#1%X6`|J{KI zFH1loIK4n&RHhnVI2D4%BV@R#l{$!NK`OkApj`1?UT!1hI*U+WojD;epiyZ-DuJT} zm_wm1LelBbX&g1M(SbCGppQCoW-m+|=HgC7{F4(Qogjj*b47GiT2sUTfMQ_mpae}d zOj8amWEv!xGg}%O1wUeQSinri*L;|JsG99KIW9h-`ADcx)^OVl%mogxc8X;cWMEG~ z&WMf;#)3Q<2#{Qb5RpA0uLwwuiesYXI9A5CayIJWMOCpw8H^H2MTtaBBLUM@(>?iJ zaK~|6c>+}t&~^qt5kNsL0!ZOADFo*8+)G#lQX@b^@gM{(3T&FrU7uoO0~sWS6>yv9 zYKl5BOgLQRAc%Cbx=_+8O2>^{f(R3eI5tqgM@--VoX359jOqylNtDcOge_we&?y1Z zvpK&);CKca*qTZq&@w!fpKrJe$^UXd>t$fPhk@kv?pHbpo^pmF5iM9R#jdzxg%RAg zlFRehcdKCK^92czm28%;ILBf!hy~JwE%Ki_c5yY5&ZKK%3CMcEbv$$gHVPL;*t7U5 zLL6Nt-QF05?E8U(nwV-jldVry@gWwM5=9@B)!hat*G~}2R4NsVaVsX@+!h->$p&!1 z^H5{1I}5IWCh$sJ$)DD?R-}wYqn6_!Lx`>nT$c+v5dH2*>xNjA@}(aHAE?n>!OOH} zf`a3Exn!k*is@o)yRD@)Sy932u*ZW&U=BrM%Yat1a(OfYw7qRRtY8li@=D;$W^vD{2pK+;bx>t?QE& zvF6t1L?Vg2QpHGA(12)G+`@xfcmP5ncpg7-%ibk)F3lUnb2UWR`$*)uK`b7Fw#??T zww*_V136#h6+8HlOGpDvBykH69$bn6L?}KnF2=%AvJhk)P<4nLrAq**=mChXqpYn1 zvxw`qg5cS{%rsz54{4Y=@2HT-4ITrHD_UfGa}S_Yy|Ron}CfND4QPykA_BF zw#3k$0Q6Xv^_>eZeB_Zw9((N3C!Tyvx+kA{{K+Su_}9N4Xm4+aWay!W_|F|!P~WHb zH_y8uD7bFkrd7qa@r9m|6MTTD`>y)qpZuhoBMr0KyK3mO?jx3nP1i@=RL+CGSQ?jNI{0 zH{bm96Hi@w@pqI++H(u3O#X_?ulUVXS7kC8l!~@yQqiwAugK;A!8rA_QwIzf07fM% zlCsZi0iZ_G>FgChzWn|N{(kR$_ug~g-S^yg&)xUk{kQw?{l|m(|- zGtW9TnT%UjUObRRI$ePQ%5EuAmxI6|0c1e7vbmgX*#dKyqzVj(%P+sYp;s^NQQu4_ zb^#$5(z~r7`LI=B+l1b2YWFV(D7ToWQ5g4lt-H`2xiK**2*-SQ_ zN+TRnm4h(0w!(VCV2cF#Tpm$vu!~w-v-tvNVhZ2*Yinz3X>0RUW$5ssal?c(LDpJY zT3g%NY}-OtfjOBu^92hf z=CYagHkO6#j%}forlw{fMQ6Y;p3S721hl6z5M`KYqeqSM0@sM@g9h~vA2)vd1at%1HCDd8t(7mkx2M1=-86pw^It-y)9vjWHmpSvJovt~4c0Kk9h^iO zp;N&{@vQl4Itae1uHpP|etW~p_G_=Y{-FK$+Gn3#VNyKuuZK5m+&E_RXfTBGj&#M2 zCk2C|AnPE0OSjUYyRTcj_RqK6GGO2!Fquz)nFgtg8H$c$JUql1j2Sg%?6|QzOxmFY zV8ys`<1CBslS!5Vk!KZ#59zn-F1uv1PTtZcO`bM+^5k5mxjnTB!qU>*l4{Rj;^Nre z#2qFeFErP0fBRdmEGsZ1GtHu9A(egl_T$%@G8tq-h>$1;*E3E(-Hb*v*&K#D+sY%V z89389F5QW)Veo5Dk$(Mp@4WNmNs}fH8#WXI0LEle+2*G9=BBo6rhw74tu@`;(h8Bo z0DyMJJg5cG+M0v?n#&IxGI;v*X)TZk%ZA8FcA$?yEcwZR#>Sz;hMCbAQrd+)&roP- zLGPe^(irt%5hDoVR47>JN-d0U&SbMFm+b5S!L`tZAS%&0RSysvY$9#w*|L++C2$K) zBo5`GC>;vl|8UX!S*9PgtY}Mn?%F^6X6S(4xxB?CZe$|;%pd-A2SQf)%e0*VAsBR= zfM0q=G zHJiTl$KU_+UvGZnoi`A8<%OrZ?Q7d2@!?bUp7Y+!*{{EP-8EM`mYpwT6ivPG+ZQeP zVD@uQKQ(>&K8dQjv(7o~=KuM_Q;$6`W%9U!)dm(hnyq1o&a>aQ7t}sOhCcl8U+=y5 z&mS&W`1BKx9<=99P+`ZN_N_nNddCxwKYGv+N7VHna?juISup?I2kyTknJ`*%)P#3S<@a6~0#*Lp;tLMGk>TwhM&sp%{`}3Bce8y!jzxu|U zxgXs6*P9f>_Wg|SrOcQgO(rkA@OxH!&Wyy4JMOeObLYPC)|=ls?_49@>U;L|BaeCD z#h2zST=?+)_YSCxpY)AWPCMuP7oK?Fk^ApxE~tYh>@|1p-1ld`{JSg9bz4_dV#@A0 zmg=;7Zhile?FDV!hPL4Y2bf-=xuyNW%dU9!%~$5mf9L!QzL`l|5C7xtS#Q5MXUo-~_n?9I{p*1b z=FNTXx#vezS4Z008y#=#jD6>PFlW{quRZb1BQ*_mftS#N_yy;m`@y`~&%gNCjD7by z^}Gw-ocY1~bLRfyr&J-MMkM?6@q@#Zetq1d$3zg{J4C_PRG-eD1-wKK|*AcT5}@uP&@N#Nh%I zBbMf=P+XhoJ?J}r2?D7CHyY)87m!{~0=Qm*Sn;e6^%se0pMyJM;etvYx2%q=GQCCU zC?*&reehSHVrpR z`7|w@Kri^-4&%nY`s#~^A9MUuPd<6zzSA)&>D_<0qL{z_)vp$QxajQjzjOcn_wPMp z_fJ2YfA~=c_3c0CmfP>zd*9;^J?xNiqX$pfVGKB{tA?gTW6@-5Ym4jU_uXd?yWmWl zwy*DMt5>WEY$vAbPd)MU@!vRY_x<-m*}LqxbE~Ir^xl zpML6q{SJsY9%kGeKD?licLL?>JP3aD(1BG+&4@dJ;kT!A1Nx6dq05$kZaF-%0lA88 zPZ^=)VCKkeK)kYrDWjpk}tA%itDF?H8HPCD_#lTSV8dq23mwW+CJpK3I~d-E5bcJ6nk&p2e! zM;}j{y5l?Vzq?@Jf*)Rd(G@@ZLH|J`Fa62S_up^-L-*OMS7r3@gJw8RPLG;-jQEN^ zaM!%==z$Fl2OM-@TOoJOIcJ^vjc@$(?|=L5cfb4k8?PUAWKZ+I`RI-~9HuLkIR5GHCGQ&%g1Hhn~FXq6=Ps<0U7sUC2OH?N5L9lc%43 z{D8v`z3=aTPYxOETUkes{QCF5zu@$9FF5C$_y6nR-(CAB!;Bw#$f0}eI{Dl)j{E*a z-_F~PVOC!B{fiGhr9I>od3G#N7pG#$D*8;y>AQ2=|T00!btv}yMa3_=4T z%eLq{Iwac+$rc=QPyv-Y6z3^M5JWM`MwCKBWgO<$eGfhT&&OW4?H~8=G-go5wc_zO z#!0aiNucQtSV3{6)+*l0s^Ch=77@5F&WSoL@R2TF>7_?vk$^Z%xWbzo=@lxoTx5SE#P4@f~OW{P~|Q zUYtx+j~_Sc+TUOM=9{yRKIYs#r%zkEX8rfS_oH`Td;G;0UhF?$sQ3^#Od8b>I8q=pG^ z>&i7tAARhJmbM%$l#NX(!_^F5)dIC(!6uEk8qd%8x(GWBS@_XrEz2`+3>+b*ol4wzVI3!jUh&c>mmY-r051t~K#q zg>)_%PgM7>TCbrefl1E{`JmmTYEm0`NzHYoO#~2|8md$bKiX@+q7xGkinBj z4xBUVy?;FP%pu2~H+J|aC)FJB?8Y{v&R7vQ1IG6o6tM~$KA9J!3oktX+UH9aZnWDD z+IL2(sde6mi#}VIe)Y}eib~{Tb4wO4X=?Z0|KQWQx;}rt8cwV;Ib;Ym+N7RIJ1XP_azdH%J;xl;1cPpZ% zy=l_{Q>Ug^tzNWjHH^AkF3Yu2S*;S9?x56ViV&6p?FLuImo5teJp&Pv{LXlb3Kgss zW49L3d_CgDkr0#Oh_+6n&ph?qbt~5=j966DYAPxd$rw+}Hg!9f z-ned!5j9Q2#8{KfW(-YRw|31Re*McGc9^)+?t2`0+;Q_4E~u@pi1>NvXD1T$ZWt6d zD_!3}iB2|+ADB;pVI~X%HU#FgsHWxcu&}6s+IpJ7y&JE;J?HeZPC00wUH6+d>il!g zzTjKm-f`ziJ5D`t{MZSbn%guZ;o6q#z|OEu-D{{ztbkHXxqI%tkDJ=Ke$8q#*()Cy z2Oo0i-cxru;rL_6PuXqWqNSBJHEpdeb~@`hg;>l_R`641Ak5{_;$4*2|+SuWZPQStlzlb5yzc++NnyW zeet42M;&+EUNfeja{5_gr%ikI?OBDK?OUF1n$3-^0S|3()2SS$HSOspGp2!Eo)WP< zd(S=hJoV&LZu-+bqlWGL$OC`NqoplZtOLiO$SN!255D~7^xgM7c=|4fOda=+e?4ea zRYWUl@>bBdcf+ZN9sKM+?tA;C7Y{r38+`^2LQ9^1_Su(Sa_JKH8jf z&2_iVU(&wcKKo#7yZ-)%Mo-@1fPMGaf3Mx|y6bLJj~Irc8wP|iW*Cq#{I<5WrdwMv z4&_~bR=mD^f%y;|M^{a z-jPVeVNjD#MRtO*CEiLuK;Hg00tAiwc|4CV>OuHoo^C8%{@Fu!+&ZGZ`s}mLFGMie ziyNwm0ov!MyP4%b0BO*12bijH`ipsQX$gD_fOPpB8q3o#VJQPM##=)(3kA9G?5ERh zuJ2iv=ev<~CN*g2ps7=*dY+rjWITtP(V%^N+mG0u&m)dq&x|Zww_)m(aVo5ZbZVb{ z_T6>o$w9WIwzg`weGZIO*6`IoC-7|U5z!22(X67|AI$ma+ZX;cl{c5HSarokXRKWD z`P%ljtgq>sQ44+NVt9yX@tAG9zFmmHD2`~$H?G}v-#x~T9d9>h;)~#F7vSD3a zZT-ZtBW>5_an)ubs>gH=R2j>I8LGlyG-7cxS>y3&4h2?^#zj)c`uYxnf9=h;YetM7 zI%cQlP1*BLIXT<9fzOE}dGc}JILYz34i`~PeiI90DYrLeDc|{O+O}i!CFKT4k znkVcwW0UQ+IGGuH?>(^3ASjM?O>5S)t?xUe?+z0uBu&^8@d5n@Dz24nZ&|i_{mBRJ z?I}(s)#}(eO)<2fqBW-_2M!rHY|!1e|8erTq2nh^_~mbYIdSj3MhzVvv#oh^<{W#> zkzh)FT~&K~=7r~8{?4gK9C^gipMSpO{L@dYGh^9Iy4`8_l16o}{#BLrR(sk_<+Xg! z$24;5myH}dp<&dHAFo|LZSN@~$4wfNta|CCSEi01nQLnf+BQrcW=!94;EPW^V@7-L zf7oe3U}T%u#SF`=@IU?Rlka@z!i}362M-#8=7P>Z@BNQd!Pf?$mJUM=t)zk$u`u%p z{MZKDfAZh242&yhA9Dygm94~`Ky^5ULvj^^GlsMd1;5Q81x^-9?a5E|!_zUeb_38k z=s&Q*Fo8PhgcHY)n>c=~aN{O&yBSnolz)Y67WBkpoEVuEwXupS&(7a{`&|Qv4S!?S zJMYeW=Q}^RFjf(Dpb^ZdsmCIGDiXoyw`}R+@BQ?5Gv|Hq-rNP}pLgE8c?$~p>_6`N zN2ZXQ{qEbd-h21zKm4MhPlX$Js#b*>*H$Kf@rz$a&C0jld1mRd1*aT$$|)y*!;BjW zhHN#e20E-fZXm$;fU03o<#D!JTid3~F8Thq&imHuA1;4(&f?S0IQOO-Z`^yvuAj_) z_n!Oie&wYXqlt=$rkS{hX4DW z_uqc``8n?{R55m#QCE*E(F)8IVN_}f6YPveg18=YHC0W5sAw*ick))1X261*GiMHF zg0R2-_P4*?d+)uw@4g%8^u$5%x88c&uwldQzWcA_yMTIe5Ip9m#~*+EiWMtn&6@SX z3ojt$U3cAa^)PqeCxu~K3TS4edDHg-kr(q0YQ+A$6eQjc>-~mHg)Q(neRb?UV6zT_uhNY_;KUz z|HlKEfUCM@>fmZufx`^?=38z$>afGP3~ENX?4oiS%y&x4pd?@@0!TLBz9)|SrKEup z-ZQzcMO7oFuKawNS5PcVU9+J%mW;po*2{JEHBUYIyr20d;gsem%Tdo^I_x0d5#)4wBbYh3>?_|)fZptT~|M#f4}!; z&u(mPJ@vHH-h1=S4;Ll<3f8ZZ`&Di_nd2cWM?4v*b^O-At`m>cQKfms}>ydKi z?2lf3^PSP-#(p?=?ykG+`tr<$Cw}8pEut%)mCv?x$n;WXx4Rq`qO#moSMtEz4g``t?jwjX3l(f&b(j#@)r+1_|K*DKX~TJ z$3OY#H)*%sT65Wo>N*u>SLqGEGT@fABK}M!d(Aa}*mvKKW%Y8H zJb9=3`g$HV>qOdJO>rIFPE}P``hn&KN-ouI*;%wsB2ft|DxKSuOvZi3tgfnSZ{En0 zAQE+$g%xrw71cG(?d_F%tZz-d?N})%D|WF&s_J8AG*W2G#e;;RRVFK{b8aS;$)wu3 z@3*qTtgoxd<}9~$ zuegd^@DeqNL~UYy%ch#TD&6+d*-T|^gQ>=Xywld6uIt+yW=kQHLYL;o1S{|Ap-l~g z4uP2DBJ`^{2p+Lk;J3H5JK0<>RHLRUn{#yCfLZGKzOL()$z(c}hQXCgBr>@y)O2M< z1!oniR##t#aU+}0L-LBVYJdj`L|%;tR`Y0LNx2f?r=IZxDnJZE5RUBfirdy!?&lza zG64O3`}UO>a0R(AsQExFZRwWIy#uhzYxo7C_Ea*NZQ4-ZtJZg;xqLp=zNxlX zwPRa-2K3LQEsXIIzu<7)AnNf1TLZ*{Y|FN@xojequpPTsZCzVyYs672;z`fX8r2m! zXmiI?Qf;s%lvu2yvetJZR;D!)DO6O|8)nkgBKblNl;=}9MN?oAt9Ghy-vK$b5)*`U z>$=LS>TJ%gOvZKJF?J=Z<20Zd_ zvlR%zQXZPnHM|6vXm0yCiQ5aMrbp|F(U8+xw0WTV8efs7DucH0?Oas#^93iUsF(FZ zgi1MriAR0MT?Ctv2!jlRXk%j|I+T2TC(8n$TxDb|PNqlv9d_C+l}lGv^`!Ecm`hA7 zmw%Z|#>!iecc3duy#kxXbKPOXhqbo0C1a+CnWq^GTuc#Q%vlx51g1B69!|~iJMb0A zff*R6}+BAYsPhstK{+IcbJz73_$f47rRsc@Jpcl$P?3mhQPXl9bU~oOybo;2**OSmTrpvEs|h3J z6x@gxkHrnw@?EgsR3i%P@|^7+bi`plzT(mnXK%gkh9_QpKD_CGujn9n7^Id{z;sgu zTO2Y05ol&u#h&A$!iuJOwi}I_XjU%Zsan*4#$cU@5;-Arog{_NjnIS$$`$O&sw%GE z3Z_#)sq)BdnG^*{599}aSSaO7mQc;OMW9g4LO;T8^Id*j#HfftYw6JfNKx{-?&S-% zA9zsKx@x4`+Uu%oP;LMX2tzcQ&=eKwF0Ls#$5KqwwQW5T7-*b`>svM?%uK{nxk6OK zIBK@!(usJ&fJRdkOrFgcr%txx#G*0JO8ZWRpJO*FUG8qTt12rX*N|sa4&#mE*zqVc zMC?eiGTD~RU~sTBCx9wURvLy`u-u5M2k0VR!4Djd=KvYeWYo2-m=O;w9+QaHaa=2r zsIVNx0fBzr)Qz0&SH?}4gUP62pfd%E6HyC3=6T>v5QU8AQ{asmtqu(ICXoEUOyxnf z6a|=1Q88|!ds>-vB=Dk@F^s>C${B;k*Dk5`AWpc2zJp)n>I7R+d4uB#tun4A5(9<~ z%cQc^HPr_CKXG+IMA~F9*^GiI!%V}wA}~zi?14mts?7(QY_sE~N^lo+=qQb9jI z`law#exT&sO2(ix_96%l6SH=#BH!l$i(Dp!AqdzxsSoC<396(OrRq zj0GZKM-LGgnN8*d4@E!bdM59AXbvbB6o9?3T&UK$@kW#dS>wWkcpLDs4vF_ zDiNSj17D+sBgx_(8rX`k+JFT4(bUOgGTPk>6<^Ik@Q^erG5~1dut{A=!w`fDeu75) zf+2kHEFj=Si6X)=@eGuzgp$dFMo=^vSR!fK3Qe3o$ow_qcviS znYyEID=KeTO|_DUW_@qe*zws+uC}UL-Bv! zls<7!gluEc@G=;lqR2~J1Q8(aD23=$sH&7f@R4YdLa-DGapl3E5+zhDmm`^pDak3Z z79~q$5X33OFR}sz@$gc)OAduAS2B-KlMH3Iq0oyz3N=0DOZs0e(DV`>!A3&yw-baF z+fE<@*ntUTVyTHX14>~Ju{}hEcuQ2{7A_QO`e}P|0OAx$Q+Ng<0);9Rd6+=}B5dIvLK1f zFT0iSpw1j!;TOb}CRTtB{G}G-*erhd&#fKgJjwoFr1H9es9+}m31Z$Uy<3MZt2{Ld z+nt}V%ym=L7XXlkW)UI@b0MtU?!OI-H72ZxD(nF3(Oqdb{p&oQHb(b6nl~_wop8gcxgcll3M8g}%ym)I` z)B!@rJ_*tDU=pLHJk1N>s3ehw_6ah=Qbc~tJTYM;FNlp-;3zo`3?*RZ5YXLG6;Jc? z5zlo4BT!BDC)Ex#3kfBil2}3NmjH$jAxdJDBr^g2%X<%^q%LDh z>N_Tr=~!dglDsF#aJCD?udIg){LmnwzGFzSNErtWNx#HK)xkRvDZ(WO$OJedr{oG@ z5+M7Ayb)L6kx~}k;{q|FWlArvGO@Umm(cMY7@&iXKs?gd*}oZpCVB12$z)$Wl-EtV zB9wW*0C3nTZh%J{e3|tW187C4HMfN^%)1;0dv84&8(z zsQa`C2k{hpN7JlTvcn7EhWL!587FPsV6n?~H z5im?#B8e!j60v(G^nX3biV5x3xsGId=>S}04R?YH2< z99u5E1LS~A$A%YD)wPr`WoKYVDjzhvb79i|e}QUQPCjeJqA~Ig0kXq%&!z1JunZ&B ze+VH7_}La>WHJs;#nd8%H$u^r8on4^gs{FKRmLC>q%$-U#B@d2Y+bXJpsYjz-6v!y z{^3MHoaQi_BoIK`j8VeW;KZcZix5v+k9rIyAy31Ux{p_&C;`!hUI=;Te(oU`ruKZw z@^@f^lcLoHkWNmmw&1$U_w_;mBmbwQf3vGuyD+haOJ50gWP>fIhYY8=@n2TLB#aqXb%0 z4*byUWD)Vm!aAH_KxZx>SX@a!Iq;b&#e`^i0hxmNxTP=jmK?Ckz|J57#qz!=o^@p) zh8~tEiIUb17c(%LQQ5%$K@w)cSQmj^BsjwYnG6|lA5`=<}KtW2F28cOq2$e~MU?L(h-ty#rt}PQKFd&pTh>^gm2`$Uc+s!&Ia`|F8 zok*eYxb7pKr))2THNubdiCO`GoI7v00z}&A1d*guRFc(`PSCu~4p@Y}K{*g5711-U zYhvMKDIz(Xgz#2g#q(Sl8-_q?$b7i4S^|S`WnP&|He3mT@Z(3+7cn9MpHkB_Dse-s z!a<-q98fKr|dXhhVvEJ_N(VIag?! zAQAePc&;e2baGn)bUdvp7W0jWqbR-l^tQOMh{wANoy8K%9k5X-L@P@vL|b#kU3^83 z{g#SKJQwvmRnuJ!b19fBoR5WuBy1P4uY^UH9P`kyq!!>0gYbhKP{+0(M58)O;iRsl z8$=3{L}dk)h54GUsh%r7pch0;Lw6mUTLz0_Eiy>;Dl*e%56hFbt%wm-;tkj>X!W7C zcqKfRda<~lZwGefN&-gKpT+cl{<7yskJ*Ck*R1N-4N{+rWB}1437aLRm|!7 z9`_GBe1|L%PdK)tp^l<-W+~>~ngf4SO^f*$m~2_TRS!G;D!qJj`uo=V>mPZ&yIIQ-Cg;N%NY zkjOp)A!avV4MKJaK_PP>h?G?kju`Tjw?qzA2}Q{LV(CoAjK*BgOJ~{@4I0%|G&7=b z|2=ArjL`Rnc*oi2=kD=~()>-Wo8nN_T9j|+Qd@#|u4Eq@#D_Xy%{9=7sIDNY*BQ_# zU;aVx5jE#n*-WO8Nu$CsQ@0AXt{H_q#y;0HOw>*~fq7dTaoR3)iKd&-czNgvuvOF> zfN?8hT|W(ozq$6!Hv04U!EcdbNHoU1Hy340L{%{#YW;F;5if#g`Xu6 z>I_Our-?yjhOXgqgn`Ja{5l-MFccC!ixDCvTt_nr-4R9$Zf*e(Pvx=APGH+I%Ksd| zBI4dg$1a7RZ35X8;f(}hpbCTVcoCf+@7L8-DrM?uNik;vL&52=2w{`4Gs%z+hd`*E za+TMmEM-7#tsm7a#UD3uhg2q0S6OXnn&)r>eV9Z5nn8s$iTypu4~abS++ z>iRY8zt^6xKKiilKmcRVCThfnI%17kS4gQ5`0goZ9rVg8FEp-obFQJp^|>8O@RRpnUcYvQk*x3o*f24^p~8`g(;&7W_Z4+5G(g8Q^wPe(a;lIQ z&CGs}#-`$eGUXJb3kozIT<8J7hY@)1z4zLOshhVscl1iuA z)9JL4i0!(|E=k?MXqvU1w)Rvum2zwgQ}03{n@(r)IexZf@9BFb5+-?uO2o|l58S6V zTB+MXTYGCJpJ{1rYj10J3I%kkoK;9?((Uc3JS+;=Ln&>oO_@})@7mDB*+QYE9kDI> zY;zE?6BWs6)Aj;eYisLf?6c>T9e0T7n(KJ&nf#S;YJkb>!UDcU*DWk1)r+`h(yTmDQ)7d7iE% zUw;18BMv!uhlyig@!fdiO^IY;=bd+!U&Glx;1Cx$f%5BL|Ef=2uUvaZ=cEIw2F3zM z-!33!3D(_Sd+v4MLHiwi$blX1&;t)VX#aec9~+a&@B=%#|FBVYefrIw_1-&gy*hT> z@FNdCD5=I^lcG)A+S+i%1O-H-Qz_iGrCRe24D)Eg_EN35Ys_U^eem4(F%R(b7Kn-{ znw8J{mhF394w9J3Wl|YGfRC2zN`d!VVYxeFtU&{O#TBM>b@vks#XPsC8w%dfa{(0~D6 zE@!|lr@aMIkP3>6L)7+?1#!}0R_Us$nFj4g*VluHmC5U-fk7*5B3dQVhxDO3l)C?= z7hl?W%FgI}vg@Mb9ew06Kl|lRG5Zux6z~&OjEV{4CLFNWq{01%%z6Lg=bwM#yz?*I zXT|}`m#@9;&wp9=`Df=}aQ47~1BMSD6HipU^yI_$|NZZFyw|AlJOAd&OEPw@*>l_1 ztkU^7fz4>;&?n zAyVSxZU`97Z84_yD-d_FgO*4&$Jxn;(_ zdwu_V-*4Qs@v+AqdF=7Wjy(ME)6YCRnM}-_IrEW+A8BlA)Ocvq=JhKFIicBlFWrva zAz&(TZUtp`2nAoeQ_`^3N|qfn7J(MnwvzRq0NY*(G~a^-4cijxDCVaA{nGP;h7JGj z&#t}oS68$&H3e!ER<#GpDIlZ=|7Ak6O`y^scLUYIbQ0#jk2y3ficTmx#xIma0R@O# z-s}pPy6dz(r@>9wVL~ygsDHW879BZ!?4DD1f8n|3=gj-)h@+3q=d+hxcG;4}pYO59 z-idf}=bd+oC(PUL{re3!{?FNGo)T4qx;}${`kUY1dB<(H{?CnTSFad4Y`Ch&r%#_= zTb;c0``>%_?U_68I&DDzA^YyPS3Isg`p84eKVPnC{Q4OTD22xdaE|A|Xp3kw_CN6R z6)XR6?H`vc{^Y>@_r!F0{DfVG4I6<8(W+Iek2&F_ciwsDiXZ;K&1VlfaKF0xJ}Xkpg>bJlB)$Y6RFnZ)b%Z15eVybQGm7AJjWqNz=H9cR*PTg&1JDXj-bXf#^Q}tQ1 z-o5s^Yp?j_&-xA-GGW5lY+GuVopuB>W0f_#?7G{hA1%24n%{l8@(Ydi>k_}~Uzabfb9vE;f9?fv?XG~-F6$z(E{pXRiU03`*h{m*~-^XqTVy!XC; zjvhQDu=A*kYD6~|O-PF`7jgxl6SDgXSCTQ0FWVHQO2st9;__oldm)qU-M4S1;CQNW z<~irS{L(8IUve=r9&+#@C!caM8uw>Ex#I74+_GxP#|{1aoqGCNx8HEvop<~->jx*C zdR8H44<9^WQ14pLc74YfIdoXBno3^@4n6EhXy3p7<<@vb)y;R_b;oUgzT=i#o_hAV z(c{MMxBosry6gv!JpAAe6DA&a^znm-4S(eENB{8K-@W|o^O;nds+kY1M91wBsAE{g zt+_Dywn3M?-MTPo7Z56Ma~H+}(A&EW|9=H6LW#J#4_)NwzSXPB%xANA{PXd*=X`p@ zt#^(aHY{+RSQL{v63;JuljGY>7}cET*tu*@gRRFWUz<;Cw}dvF{y@EmwG0VdM}}?6 z6*o=uqLG5X^3zY>ck#wYL51;Rj!R_0^YO zeJ$0}7L6yyj~yM=)EA$7Y3`iaPd)K?dwb5PY#6`8~f8V|DzVpt)`5#7HHCa)KF1}#l zhi||0u5QMr?J?!DAAI-5Ki|6FVTbNLeOhBv)1gNmb@NTvU;oGJ`}eJ`u1T;%vNmeq zWE)q1yzKL(dZO2uF%w4(8j#Lq*R*GXcxAp|&6vL1wZFUS#vA`MYV??zimId*;kQUe})^^8@Xol`MR<|}nN#998ExSlxhY7>)VECCf z`_Myj4MuH)Ed}um1a0Sj68v@4N^8 z`qx5p*S0nxI4TXJFAZ3da8977i;lXU|!hD4RQF3?l>{}Vt%gNZuy2DCJEwHMLN zL}JtWHDgB%takE^Yu6wwZ{;zr$|pvGvK|zA&!A%vfZ9qIaq*T1=7?kBol;aL904I4DgunS(YqQcX-xQ4EXe!gbq zN;75}QPnM2sZ^V4Mxo&Dy7~HNo_{$Ot>p)E)W`vc?1qV(#_`qbRo^FSRQW+0h;ni% zu2e*Q1Ec9f)Q#{08ByyKl0Q)@qW?R;Y@ck)SR?6vpa z=UsS_msmoWo0^@a$v$a9<+xMjoFrK@a$;CwrutbkQ)i0gM8P|7kFF}YB78E z>_;Dc6lI|oH8(fHQX>}yU?hTmEU5;05d54ubIv&9jN!wF!-GnRO+Ey95PUpN29yJ` z%Y)#B(dVO-4I4Jp_OFg*Gx@xwtATCh)~{XV`~0|SDT^!=+P35oF5RiAFUpE9bI`V#2>{$!8GXUe$H{xMw8t_|uO)aR1-qX57v> zO`BTuWOV)~3%>o$Z)Q(8>CboFv9@`WT0Hmg|KN)OZA$!LrUs!|xo+JZf4+NU-`b15 z^DRuW%xI;Jc`2lb^T=%z$IU=>R3-1&!-fqBxE)V*49(X0YCJc=Nryzjp}TUA2zQ2h zD3OQ41xmPrE>lDzrud4Qs_J&35YvsQ5oE0vU%`lKX|PIjnL+&r4IVJ+v(G;LaN+EA z>(R#|rV;15pX1wh-tY@rV6RxSZuco;a)nH)t$DW{CQTeQBAacDCF4Uzj9V~m)Q_~JRPDz-tkKcc9^(UXStlv;uU$=DW@`Vc*D5_e~XR>1_Vf6;StLc7J zjU=?_=Sw#h{KN(4ofpkFE}H+YqG~PKbiAf&Q%mayv*tFfUq5o-fI{HpFppD`ni`8} z78IK2!gP*vt0U|uLn}Cz?eh3QP1Vt6*4K9s{Cl(J4jepw@ZgCXSGOL0+@UR*H9nV( z6&EvZ9wx{`4dH2SZiZ@*51q2vY*R~9I+NzPhA?$D8)Ff5?`=09w%=ag`Q8uKH?`+c zQ=ws@%cb5d2ZTTsQp&}iB^cAH2Fat#=fclCA^`*rDMfbIgi6meHEvpc#{-W$y+-bP z(2;&?)5KB3Y{&k5+2`};&BxGa8s^(?y>;h3_uYE^pQi77z(*f`I&ReDd~@Js3sc7Q z8BnLqeD_T+n$R2iZOo>l33G=@lQc7)$yhO{G+j&T+Ja9PA93g*x^AvoxjdW6=F*v6 zci#20PnXS{Idk!nPutS1>({M$bz3f@CmhU5p?n|-Dj$f+g}m(rHRsv=1`P-lgX@$0He|7} zgt1O1jALOWqbMnP5$HY5P!sX^(MKOMaooi5W5&b5vvI>l#G0mo{+vsvVLuv%2MthN z*DG!&3c2J5Kpv-h%lbF1+Zw7sg}psF^g3L{(*F!iZJZ_riQ~>5`>a{prp* z3qGE`@Uu%V{n6Uht1`LNBac54OICgG;ld9V&c6EUt9v!{HuZQSnN-w5MV0oeUtBe0 zz=W9}{CoMT&n~{`o9CQ$N+hOh@mNiDFBRr-)YMHgQB{rF#0#n=} z$3LFAV&kie*PMF#8F$=r^MU*B`uT_RAA0by1@Fu;%^F?N>T3Bt9rSqvHlwGkTep7Y z=gT&(U#CEICX+S2>Rx*N)lp-|&VKK`nQy$YV%btHq3elQMNRK`v;tkIrn)j3O{h@4 zW~{2V&WpzNWR)I`!5H!ELY1Myg8RA-f^TeU`Rm<(|KX+IT)g&UUsGnzni+_Fl8Iy@ z5lcY3L8Qk-IpF0iZ^_WK^lGjp?TP z(b5&KzxVMsFZj-+v7@iM=IZONyLR6E4;Fqne~&%(IOn``R;*mMc*$oMUH;=g-t<2$ zO>KAl<*#!;T{!Q<56(XIgumQ!19r|Nn)I6TfSQde7x5e2%w zxv}YQ4?Xhs%$W<9EV=on>mPsgk$?aDwfPGceztV^LytUsF}ZRH#wYQT22t*DFzlo?CFzS|SFU%}uAOpwyEUP3>v$qpr3lm&>EpFn(bQ#h{0y zaXX*WqGnDHf}B;aCsadEM{L+UF(YXgvV~MzG759HP!ZFj_5CE%zWRgUEyXQFOuxOL z<*bTY)2K8vDGMf&?F6P~ShiJJRSDIMeykgM!2)Ro!vN=~jH;@t&>W2Iti?bo?lH^5 zv|ZI$E|eB2fJP|JzDt0_KmtgmQ%F~i3Q?GC*45T=ULnAZ@Kqn*^$g#M>v}e#Sb@q9 zUa5hVFXZxh4`aWsRaI62m7yywEv=4i_wL)rjTm;uHehKd)V!TjG)-01G$cmV5{U$C zI6Y8Ij8hdAZSAd54Enn6#G;xWivVjT1{Jd=&iA3q4s<6FE&{_TjY++v>K$$RpbbC6b1nR^cC$C?(0V)s0 z@F=6=iXJZlSWdCls;Xe(n9t>MGmzVupcPwCy>zmrW$&u-9zp7O?_E)fv%qP;mSrEhf#V zPRz;`P+cRLaA2m}P7(u_s^nC^J?DJm?{1Vrp5( zin)R5s|6?~Ljzm5suhc(5iz^R6w~6nG*QRr$0yOT!5Hct(c(g-idjHs5IQbm<)|?+ z-()g&Omv5?DTzmk04=FkghoUEMfD(m8d}Oj6PE%}4kox7q72O`SZFNQ4{ST1h$noH zo37Eo1-pQTgUZuXei5;@t{yUz&Se8Ph(dTUXZKw+SuU655~-o_I4(?&f&jTRRfFb) zq_{jFmraJLN;DQrr&5$LScaUk7@!AC0AV1W%jiJ>>FtgUWl-9R9Nupk{zH}5GxGl( zDi0v5Ktm2cs*=v;KuBdp1wwqTxro>zX<=e_lbfl4ama}&qykj|v*>+8{q zVI%=_ozU6?A>7d-vhzu1vFEUB6aj-eMO=sghxPz^(+jvk9oYoo0)Ry%0pD%@vO91G z7E%HQdLF|yS`(-ug82-GAT~7cX)wf}JKjYRK)_mpIHDF`M?#3_*_coZqNuD9g4{wd z&~gg=D&IzuJ4RR~B;Y3+gxvw=!fYm4(2v~b4UI?DNkl%ZZr6{v?3FA_JRKI__7@!( zqdtpAw+Fji_Gj2pz^3rClB!zBS(!`*nx2dKK~PmyX~biK#!m3n90bp&+lo)an?CVL zZqy<0bnqn11e~lNY8c)N*&u&~UwXO#>Mq04rqbV&%99{*0?6SaNUe$x`6Z3=K+)(( z3gRIG@Q1NT#bWbbaZs6tw3j$2WjpOdP3N>8WOtEcM`fyT@G7j!Ztuy zg6M~tz}m*!Dj-*Yfq5M7jv5pSAbw@-bq-o5L4_casY>l0l7#@$!%$_r*Z8j&PzC z9JD7o8+8$(jIdI9J-Mxb>e-GffNAmX`Fv1UHe8@c>9!RhZcv~Y*hD^lWsY{4%RpfR zL7IdH4~gSjs1!r$HO+UR5TR!r#gODjK~$L# zM&XC805ci+!2lQ{oPu@dpjo)Pk|6T}(BWlw6o7)rerbav8Deo|ZP4056pZ2xSi&b* zApu_mfFID|k$+|3B|kvkZVgZe4kLoEK0hl5HAt<8AWqOUv2h_xOAlE=&hwV7&Dv4b zkr|5F8M^*oxae2pJpZ?bdx&(52X| zy3GJBfG&e8+6J9Ss08vIu9UnSK~Js=gmP8{)$zC`R!$ih%@RA>7k;TPqzgm03PM_h zX5)}5)Cc=Jf^d;_C;`+9rzr@BL<)0D#3BzYLJr5sgZg6{0}pivX<{sNB_sLnh$X0o zHsUl#2(&;AkR4BmTG7dxFd--ulVzwOgo`Vb2aPsCtxy3H4OJ>tGZHWaS&JnQjo=K+ zq+hx+KR~E5qylQ#3qr+<@Gl3zZ4`vbK-t6)AR9OYu!I2qh*<-CVnG!Agch{o*Bapq!Z5N1wx`9#@XaG{_2gqEKp`sx~fu-OuBSUB}LngpZ)PgTuqP}PkPEJIN zGjEuk04@vQ0u#n@xDXqH*fIjAbiA;s7V?_*y=2F5QoP@Bs5Yh!|d)3n4mT63eNI z6j?2zi}@yO5%xnj!WAXtS3S?wm7Jl$(uun0k3#yO48{3qwDQypkSKn2#EV6Ap7z6| z^&sOe=g~Z73+1sJbIn5x{3ujbgmi#H63HWI5YH~8N4WO_qdUSe6-5XTA_cXDnEtaB zXjZa#bUq}84k1LlF znw5mfHYzL5jXo zDX!~kn%30R6pcn_&6?H46i{mG>TtDPXP2FJ%BIuR)ir!r1~HHlDf?>CTdL8vs62!h zg|7sNk|FQ_6~$O8RFx7a@Da1XqjEKl&wzC$Z$QZfQ3tAxsW?IAK3-(>B3g!p`IQkr zrs_~y^1@qJ7=eNw$^yva4U$QfM;|ls43>^ZYz!dRGm0Itk=E+*HG<8VIC zGWj?)B)l6ycVe?cAoy}+po<2f)l5m{!vU}%%uKy0+nl=!`O z9YE!gHQNT@ceg691c-xtplQVE!)hYI7Yd-7;D_+Y&eapRF27+dRo?sWzh5qR6ok6E zI>?dZ*t<-bl7`BwsnNwJ;=>**I-cMRyW>|0HZy$7X%X+H0mezZb~qaa|SC)B8ILY zvPj@Kk9(oTxG{qj1V3mY2!)R9Y+LdNMyE(66L(R0{9(buoD+V4tQ|l$1BnxMC=3ZQ z5vi%L`52XHB?woB!)P6B|WNokk%&W!iEV0IS9usjL3vd?Yw2OIzcfZvhZ02Bv^ER%9ZaKrs;)y zZrR;IQc>|j6>-xR2nKbyI$#isR!vOS+yd%F!&pF(807Aj%NtPo`A{CTk*F_v z4lD?0Jt!1*ArVTN1y4JvJc$7fW{5-Bi`YtNLl3TeywMfPM5lY`Fsq9IH3u4s#fQMk z!Wz~z9mZ4`MhNkHkbjg<>UFD%_7?+U;RdZZN9;ws? zz%B@7CY|A#V*+i7EhYcvfKV~uV+tbDiIh}Kw8zj$RP{0ti3<6GZiHe93F(1&Wa(fv z8V`+$ysqm2aj}pNzQ_Wd|UzLz{I5 z_NCCrNqQ+KJK1JXN?Q)1^LG04^a;2U|`x9rX4$< z%R_u2C9o4Mt6Yx&kym;*i&;WuQVUBgq6+jW^9S534bGew}g9#!DQ{gJ6 z`LJl9L$D6OyU^PSDDVrb(IVIv6Q8jPnK%nn=mTmJWgd#pnd->B9efI!KMM@p>WyglfBTog$ z_62ZoavboJ{LzGUD;U@%nSfG}2-Lkmvm6)d%qwI9JLh{A_@bExX2jgEj6nxnrEqGH zO=#W{HeT_&xhwv#VEDw07$U?~hs^+~Fz87y|Ah1rMflqZfOPX-j{xuyBPwxqcK9zP zN5)ckx5xqAsuZz0*^TRjq$4Nz&Q8?Dp^{~(J3zyXNA{jFhd(LsNPw)RaG@Z(!_on0 zo2?{~ycR7;)X+{4PISiNnBAj84#)rm4A$7%wQHqKCM5wLt)ij=_9Afgs;_r!J04|6 zB{xX7C>SY2%8-pjz=!$B0S6s$^f5D<{~5c^^BT^UVH5^ebb872=Tk9b zi6=UqyJq!jGChcl7&+pA0}e=KpxoNYF=mjgQY_29@S+Pp`tT!!3!K8rAV($h*npxQ zFetpuo;m;owanAu(6vX89#vgk_2GvLMaS6;$ZT@3!wKawZ1}JN0|s=b@|1$2)zu9; z{-~o4JLHgFy<+P(tbkFN%@mG5;Tw}D?>K+{d<>N$C6KWjlu1I50A04nv^~mUBOl%b7X+nB~vIAIQK7NW11q!G)0iDEOIMq-ckAw8F$9%ryjV^9{cSxtxxaT zH5(gqpbdmci-5TU6b&htDi8kxJFlU>3W2j?8Oa+!5ESxx&-FEShwiJ&HVnO6QF(xF z*@b{GyxfV1j^t(G+l4%&x{K?Aq$4MMf&pbtgm$m;IP4+3+yTl|UU~SJqVgo5b*M;4 zHf?M8#FpDqVVgT-;ZT~(&00LSsrsS1{p$!2qf zJo|%+aof&Wc_7SY^R8oYW94DL6bg10SkgR$#BtnA zhUc_r)9tS3U1~ObCp@R7$?{ z4}dxL)gJ^Ojm5ul^63-CO}4Yv=_j2qZtPG{c*!L{++pGlM;vhkv{F$NDAc8EF|734 z3Bcjrz5D#&l1po!jCjmKGGHuAOqpiYWp!^?q{OWDsAG=0 z_wIXR(b%9tgQBJxkHs2#HC%DUkAHCS_y2tBpR1}X1HS+p##1YY4j+E-p)(FTX#dKJ z+SO~<9(%&E=U#Bmgo)#puUy{L(sIo4N6y%1uj7tA?(idysMhR7A1^4xdkr2w<~LXV zaG!l=jNfsxmCMb1{dGwFj6)9m?svbl)0CYdozRcd_Sk*DefB!yuw&M&TDjA1W8ZrF z6+3Igq%HXBX=j}A=sgctR3=XQ)~S0;+vSwwPv2$Aq-@)|miEk!yU*Bv{{s#ZU2OqsHL`R6byPde%33orc6q)C&{IO9w-^Ww#ek390oJ@?$}qmMp1?X=U6JMK7W zJ!D+CaN&Um9{9r_{;+SKzNejj>WC4;p&6kcul(^9`|Y>yq)C(Z+;h+7=4M!V(5Dw& zbPfe=rP01XmtLEiw6xJ@zz`K?7GXYk3asn z(2NuT6(m#Scqa{QUPK551>q7PZko1g?Zzp)>~g@~d(4^n4#pA~O`d9S-4JO35w7t8 z%Eb}T4mA`)u28_}XhF&oiMY^u)IPXafjMsCl)m+`Ykv8|2mW^dqQ%Pwjhk}%xfkWz z+Ey=Fbn$o39XoF1@Cnl{|H&oOcb+_;q4u4*pZ)O1KgZPlN8kHSpX#bj8ymlM(f7_i z=Zsx;ozmRaynNNleuD>}dDhwIopt8OAwvg^n(+OLzrXLEI}aaF`|kU5f{4L7msJi> zR@pCX<#mB_E3XW;3waN=X=fy5IlCuh%T`_&D7W&;;7hdf01z4;kO3QwPfZof9>Rzo zK&m@Av4xcMOD0fi9=kpefsHTmF0a;6h!g}h4OEz&5;QAZth+&7Lp@stzc z4m^eP_$t_>@xz0B{l+|lC{QJ$B&%FBl?$dV} zHGG(rZ9nFi1B1YdSJfSHSdLbN!8f z?B7u5L3ydBqKh}Ch4O;fg!+zq+_*f7T3>we#kb#n+iidO^KH2N^^UwSCX{Vfe`ZtbJoXoP7>oI*;G=J98 z#UGoAy7SLC_UtpyXkNdnY3({)QCF;5{lwGH#4D4>9CvI@UF`|SA3JHn*ky}9S^4?0 z&sMB;^u)473m4CQSEp$i3iezkv+5d=x zj_O_A8$2y8VGi9)9$Zhiu!8R@d%0Wv3$! zIq;|>j@WzO8N2T}y`guXe7a@!TdzO**rS_TGTc+ZH_br2+&Rnz9wQR@aLFfk-1XPL z|Kt8Q-gpC6OgtXPVD;fgA3gEJ6NQ4e@7~i6J!Jo*k2`j!DUn6}FUc!Y(C z488;9=(?!?%r{?s=81#wQK(L=i4iLH(c?{pTGI?EAPJe>N~H$kuOA^dG6_d zzww5isKn%a)uMTSx%H-3-<_| zYFo>uL@bI~Ogd}nM!Yr=ZCtx5UQt<9Ux)clb8CxXn5k^~`DdQ`+r9T(ef721UU&WH zE0*hOkV`dE`|DbDRfDM~`0`~$vkO^tS!i|LG;`@JkFlz(K&Q!D9`JZ>zO}i@vK%8C z{bcd7haY<6iH9HhWY_{nvZ`zGhQfyrRN_MQyu^Z);is^H-QE zM!eFF7@3@{@Ua({+hZ})j2d2qhcaOrf6IlD-VLlxF&B)fb8-hc$9MW)xZD#_rJSr*;1~bw&(1Er>c6v zwl-{9#}AVi=bOmFLNN_z$@~drqone9BMiNg6PK38#vfgF>F&Evz4V8d75o78w4GdW zlS?O|0eH@Qo!gevR;*fo>M;kjG;V6-p=i3P8x>}hW}m7nh6zS!z8f=i z%{11xHT4-jbmN9iFFpF?yDz@J_`^m0hYwo5apg15KKuF7&nkKk_B?*h0DTTtnXY?@ zgtC0an!HnS_St9q?#7QkoTsYERMu+f-8-Lad-0hkXTCS9wyMDkJO{>Mpeu@Is?Z>o zE7$LRFe9d$3CwFfmsTlEY)x@tCc|+1S`LDrJ#Sw1;9>McmeAMYHRPg$lx4G--~96D#~*w2uYU8p zj~0K%{W+=@crF(@0^j4Y3Y10eh4#65N)w-qz}UNV>C(CL=FVR*Z~pxG^XBoI&E}vk zbwejVFkn-$2PV(QaWVxvS8%cgE7hJl@z~?Nbm8#_A3o%mL#qeXL*LteWYoYRxz?7z zu{($Ds#TGA^@zz+bD4BaObIfDm==#E%%;?O zc}9zPjd$~I@ZSrv=WgzL0>o_YdIA`{n>*;QFuF5vJI_|!7K+*oV6cdX2vnC+VQvi| zT}TOmhoAB$EItXU#)Fagnj#SMX`&d+Ne=^Lb_odW1d7IW#0xI?)}aR-dN3v;2ONCF zAxAVfH6y8E8Yrib%LTrpDZZu|iJD%98Mm_SFFgO;j=Syp-iPyNefYtd=bjafn?}^s zpqLUBG0j9TO;lGbTDJHve|z}-1@qs0=iTpp|NG5NoAT-OOE11yQ{U^u1q&9;pZC)r zUshLN3AL3-RP(uNB6`g=H;fs*<2&!aykOz{AAaY%m;T@)Usobgqqd^fjObC5CSN>R z19q9ZR#}l$b*-(v<)07y{gO*BpZ)3QZ!P-#xMNPb|L^zhyxXLYKb(F0-FJLE|6|=q z8j4<9U4cI91U@QdSzgnIw$}F64UKDTI~z|Vs;a8refM3cr+40c_u0pvYFZD|&ro4< z)HRsVibPbau81n80X{>|R##Q|CS&zz%r01g<5n0ZEV!@hAo#WG*Z%8?C%%2rd7my{ zR9RU$|AX1$AQd$pN5jTpdt&Cj%dWdT``j}lM~^)J+;eZZ=8qVDuD#)g^Da0K%23Dr zmvSQJy~06kprJneX#SO#{pjOQKh|}VZ>z}nS|i+t2!VpsQtZSEojA^d_N)vMD=la$ z0yHwzVws>){?x$Md}qqIu@64^{JiDsul>_4n5v(3(h09U`^dU9>suXBW0i^?Z*Ob)_scIHe&Wfmzx&RV9Vgj^JZ2B~{`0~84?E`L zCF_6kqaO{dubVe();F)b>cMB9fg%?MK8f9?9`;`V?60EdZtf7(Ey!Np#dQm`=GnfR zxo**m)LV5u7Pc9n383;IFc^j*8)4`cLCO^b2v;F@lrf4CJiQZDX4H&TRtz3KER{)D zR##~-XWi%d7V0c$@nL;X^j>UZ^z+6zP(3AC5vBt(tZlWGW#DIpHXob0=vDq&; zX5O^}H(6H`OT^F(93_{{wk3QulK0aEHBnKU$))P*>fJ!scxbuP(%R%Wpx&#hjH1tH zvUXKUQi!T_Nk@eX?qT=%lclm^_B7V#%5JBJlHmYqHCBU z*|E5)=}tChsY*iE9nZ@KenL^>c0R8~m5S|PW2e%N zszFah8hZ6wvu15|RV6qB%dxGkO%=yq=W?0)`g#W@o_NhJm(LTsL=CxsxCpuQT-dRY z3mF4LPmH;g9t^E~WkNic0^#cWQgx@rupbsINrY;o%98X9t$4TfUsim3)(GVbSG+f})InayQk zGuC3J?wKU>^)`Is!WZ$?ej8WFW#e z3r9pC-hS?iXM2!u6QB-T$%!J1ixB@EK&et7RD9frK7!F0bAm9gI}nnoyRO&X-d0yv z_x}6uLrY5CCu5a<@hbF3sJtP=hqXiH)l{K#`!2^hOgY#*62Jw{#=<)Q24o-tly^wj z1%-lbntCeLR#lPoG~L!?RG?15bFHe%O7w(muArzkrg|#00Oog5STv6BYVl~LozFnv zVUhWolCv_fnA&rNs#s;E9<}V8YDQc|gXT0$E220?q{7Q+$#~N7+(7g5cEQjSsZ^$K z?>>%~D-_ywou_0NMgm(Pv=+B}b3)2PqcOSZy2gUKL+j-V7EEBFl%e~O9Xtk! z-0q@AR56`z_k6CdLYjqGhuJq%c_o14$9yH4b5gFKiy8@CNkSp0K0nfjDGH{#&~On) z&F0e)H^og2W?ihZHdV->{ZOP^$SQiE$1s3ez7DDb=w=?N7%{AZ2P082ATTBdv^dW~ zB0S7pgs)oVZ4tgIm4`})z8*kQ zL6TKIg~;b~$z-CcvWh~r1qvZcJP_N%INiO<>l>?7V`d^*F=XWM_AISDD1ws8DkU-_4svET?jSiYp4ql0byu+a;%35K(Pw(^A5j|NuCr) zFVA)2Q6e;^5vsbFckm6EfVp{$2KokB!!w0Qw1R>x9t4l}V;|y+x9qdbO&w^m{xN zL9BjpWs`};8KY33#MK$TqJ!WATEYNSg!=@gf#!LdDn4BnsvEXP(L-RP7sEu5WS0bH z@gO^h*Z8do2mzw0(*)oa3B)R4qyTf5Tcc<#B2Y%*7S|pCSXB&;p6YW>E!&T36b?hJ z7g2c_0|Wy$rU3<+Ny8)4vl!*e!VREA@n(iSiajSqbM5hRE#5rSIh3?5MgAxu_A@mZqm8xw4 zHlFN7J%c#!EA&H|U~UK#Pt`PZ<1n;aK$DA2%kAy$PrF}+~r^|`$h~cU2 zQF+YFsS~eZP!Sl&R>tt_D|!_3Af$nyBMLnV*OHV=B7{Do#8dV{AcoKzwkhRQ{PTin zo{~aAr3ojw2WBvZ=#ny-!~tVO1c)1xa2;8xoG{~_@8?65|nazf=XKnwt#RURt> zkUT1JlHp;Dgiz%Xil2N${N^eTEsnV@gaPKB>x%D+2(&^QcEVT=h~h_<#N!TNqe|tK zg23Y*Fn0Nziom24Zxf*b5xUM*JSs2r7-+pPrh7oM zm8?9dJWNv%NP&@J)?g;hSTtIZ95H%iTP{;sT?69?^D&?nf?ooWrA*~*1$Yh|H65gL z8&w|Clxots5C&5{6mO7P1c(TkSXh4oZ5a-DMIIVfLcNG^shUJZm_VkdaEGZwkUd|{ zD@vVTWG$wHpURWSiA_Y6^#edL9izm8Ev!|c>||m_V_pd5E3`1IRYey?VaA2dB5;T^ zoe-&tPg%q8BpMMyCktTPK#Ry5$w?Y(LVxjOKYqW)^#a{hVnD}J7x>LQC*p}AP)dPLZI@bcz|Gp0GO)>fV8_9jEgFdMjmI|yuwlU zfx%wH@f=7w*yLg6-MNBYpeL2rt?odWY*hj9g*ObBzAhl={7{^zGR9X?Z+Jm#n4m`x zt*{LM3Peu`VOz6pD*(N0PzhfideV*FUfx@~ffBdTl*!6CxCcLmG)@~rVebLNArn&N z&7M8GtIF#YOTx-aCM$=J9?_o3RM*x-z7&-giMYXbt2}1g8h}X{)~P)@lTxbE=1@B1 zxd#Bs&UFnzE~Ox<`_4~Fc4}g?H<%PE)AXLOa;ql3))WANCL~)80v=A*=Z$^*c4JHBBCD1~nZ$gVn$i>3^%JRvp|A=pv;j9EBW@LW&l zo+cH8E_I^Jy%`_^gH5rDWzgLW5lE<9Iyxx=c*AfZbh6=aUn~yFPS|(?&}KzHM7Im> z&7insxGhj#y{#hNZeTAf!uC+LRREv1(aJ+R@<&jD_N4;RFnshRBm$7m_siGvWH7}W zVuOLFP_U>w|JOmc4ctSk_bd!O#mI09uDFW3ZlGtxo#KQyY~wIk{y-(L@u6<`8zw;% zf}ruZREU8sObZ2zX!zOtL`V^!GSHX;r1~gVBG8vyX{3dLx`V>cW8`H1CQ8i0q_JUL_#b6(^42ilT8_0$HMjlIgbv)nwFQU&?Exw}W`Yxv)YZ|H z=m9W+=wOF*$WRWV8gx7j7uuqe*=akvE{Q5pSPTh&i>{blc&Qef!$-ppC z2Z)h%vrqt%QeqI*IhNtnFN_og1Gh{ela{a{qNB)SoB;e33zgWU!a@iQA9zG8?hq&h zhZ*okK<5ej59EqF#E}Db4VUMOrHRz#ASFUDp&V2WmPicc$?71xkZM!BkJk+d901Z4 z`dH95I#E@Tw;Uj9z=JwSgzz9FWI~suWgyebWLtq^l1|u!H_VP}$uTv8g^vvoI{8!d z%ip4tiG>p&eoJ}Tq9V8}bRXJt_*V*F4sqS3{t~KJ#_#er#XY--EFg^E3W$~tLEfra z9>A1uPo+5)_b7hdgT#V@1;S1cQb%Bta=MFtX&Qv{StbOO1{r}EzpOk0;c$ok%Cw=H zU>M`rD;UEC6Xp@IVfEyd-Ca^z0^z0pz-YqET zpbZy{@C-vJ(;iF}kRk|!EC}OPn(z|j0?v&UrxP8WP#SWYCKGf?ExI6V~_koGgYEE{^5sF{vG z%p&L~f$UhET_BJ+i_VRx4xAQo_zaTyp!Ilb@JS*+Zz(Lm4#$J=(QZCWx^lx(;<$6DW)D(Dbs>JlUnfThJcpM^#APatb7%+Um5sIX$SXK_bHxkk90#XE)ZfCI4J0?2_f zfXo0OJXrydrGkJkS^#(~c&*rlyp_+{b^%QT4K0@J3?&k!BBMGI%HDT`I)t{9Lkh$P z3Q!LM1Ikg=be>!r@I0G*E|bl)*@cW_qqqRg%I^x%P=s_r?!*wrWd>Ziyeg**q=Q2% zZ9wQ>YCSnzap9E&Q6>H#Y(JpwXiY>^Pc>mv*f7S_Kr_f&VPmr&p_wqA`HDmPV;g{@ zXt6!u(Js04lD4+?mgeT_x;l(+RaI4xywWp{&LA{dQD~=3*>UO~Q^$`TH+JmUG580- zJ8tZ#5hE5Y`cyNZJzXjSO>-fYQsZ#j6v`IGQ#8UpAntJ|Ls4buN!+nc(TeKvV@9Pn zt%t7RyXiu^fCvg6bL+lmpodP|V{%LTM$d*iF(9yqA92*$&zHgg5D)T1ca!_@gcigt z#TM+P#({M62Nxi2s@ie#PV3gJ>({qozZp}Z5Lz0W(SL0}(z|!>ou*95 zXA8M(4t)c51jqp@{J~y6{)8hIfA%R+V#af59K?Wz312OThU_~6f$d}`>_8$cswqGXQ9xFxVX zf7FXt>(y8`>rKtu7F4Bi2kR2BxR9Ipc983nZ zDNsOWbgq9yRLmt*b?}g3d+sr949goaX~&6UMvq;-V&%U3&RDxPk`X zQlpS`)`mhtWMOY4?S7u96%;*WsN4YpNIcGtc=!+(s9&9YfeaH=ei4I4waWlr+@nMpYAYe2i*7RjlTw{fZh$g z2Mr!{`dO#naQzLn)iwMss$%+D%=TRj_?U~3J}5GmC$PgNgIsF5<~UI0w(UYgIx#JOjlHuv3syMdE)ecD{;MpU2t{szp5TwZbg$W_1n;rs8smTt>w zMk3`${(k4pPu%y4jv0=$Y=ynf-6z6l`C(N-SLxw;%!N9R? z3tSpGauhTsx;?p_$)E|FnwmFlYHV+Bi$>$ah7If2ub)WF{d(2a)#JvEhk|ZzZx=E_ zydy`As;#N1t*b4myks&7ZBkoX1Iye6L7H~NX{R1AeV;*ndhfgUE=v}Dv~gqlgcD9a z?63p(*=NuD?z=k{PecM;s61)tZB2@Kexkqc!AI=1_w>c{-|=nBk0iiVje{4Yhf_rm zhGddR02(rtN?m>R)khzF^xmCe&pr3N@x~h~Dk|96Qt7GD-S^n<*kg~asf_jSQ*8u= z#Vb}e3>pRlE!W-#VFKEmmCZQ?@K!TS6z2tg-m)NCo|V^Z+dwyqIG)N+5)2+XdiPzY zP2FSKf&1@WU13&M)XZJ*(f<4Hvt;R#oK+~;1xRfHa>;G^fmO)ztW$0YLn#q{krJZi z_!<JY)RDK|eCw)Jt8K^HxM5@Crbf&VuDs&PA6)VS zgx6G6@x@BTbhSkPf&HfKI%(=|dqmCZjT_hRyZ>HC9(mNzp(EC=+mK0TX3W@o$}T(Z zJ!7A#d+b#g4VEwcEUPAZ4;=Qbb57l9%8mm^ji`vm-hcfK4BtCVop!=;$BY{_thKc@ zo3o}&p0x8$JMJ=d&o%4U&6u&vyKldiOL^2s5%-&Co&MZEpMeRn&tdyb-esqK_Bmk8 zs1f;8Yg>D2vDT?!3#im!5mMPv8C%cb>dv)#qzhENyB}Cu;h@D&W~0@6B4jVdJFTrXG0M zq5JGPbjDRn=B+*mT63be?)RAMyjA+`pHkB*vG-daxyFz{LwtVGEXi3(Btf0w{p^criST6w` zdZY)~n!e}^UkDv?Zsu?)LMPm1NlFfN#qZ`Iux#1kg1kk?wx@0Fi3KJP6AZhMTDNx1 zK}Vl3boi)G=g#pR%U5+_2J&OAs1%>+g(y0(va-~89V$;O&N87?YZ&hc(0Us;ZiELB zu=u2qbqk0>8g4Sc1Tl!5cka0tUwZKmFaIIjcfR|bFcI+!3!mnHbmu8k&pY#sUez_a z9-TIA`lLxaq|@nf<0kI5+a4G*PdxE>2v#&vfnoWQi@uf6gX&)WzyHI_t1By#33cp* zk@bCh2a)*r2~+l(Ha(wiA3kL8o_oxw@74E&u8zWc7b zYK8%8GlK!%%AbAaNqu_P@>CNG<2)wNiB0XW*b398?d*E_U3Qz2Zf*bkvN*lJ9>zm&+!S4L`W};_>4~Mtp1hxbaYG5y!%;UA%6rIKBs4 z<7+$!e%OG9woU6$v=Ogd@bQY8hQYe2UvtfGmn~bNs&O?EQ=yPmpP%+7faa|O+M>B> z)97as}~_9xmf<76`z|jH{G1-ZPDo$)%RBTD;qC zJACu}(<`g%a=z&YW?YL_s*!~1WG(B|voGjh-@n3$$7^eTaOIVXuh&*oegC5K$Bh}T zEBg3xV^2Q$$m%MSwt)i6T8~#$=<%ek8+FxHu<|t3JolWFYifX2`N1WZ4<0ng)RY#wTSi7Ek)7J=~j`q}MuKQ-jq#fq^X0EKoIsiDh31Z6*iJ*YGI7|d0el&26h-U;= zmEmn#zxMaH{LM@B{lO(a>|a-dnVo4GG$Aqd#qrTpQ#t+)>XMRI|Is~eta`exIbwYUG}FLM?w+*^X{8$SHVzplOhhO4jsoddO(s2n|F=-A$H*nZqGxqD(cOYgfZ@&53ZGZmD+VvZtnNWLAoa`9!`L(vZUC<4! zv2o+=xBunZKmPHB=U>1q&orZmpEqy*t+(E~cFl%E4n26pu%T78)q_Wk+JFE3EKFr0 zYD2I3B<4*6dJP!Z+u_3(@K-e=eyVBJhV^Uu4;j(F-=J|LMqv8emcyLK$YipZi}$Xt z#)Pp~zkY*<4b~!FbIV4tuB5Nu(0z;b<>11Wxz(gp3s&lM@?O@bUbxdi$ zu7lvmju}&*h(%S;FeyXTRrQ(@O~xy91H$J8U?e?04RN<(1dQPTKY7zqxv!{q|ch zfA-=}K7Q)q$8Nv%jz0DE)2HwF=Rg1PxtXmO{{#u_Z`DjQ0c$jckdtm@cWlvd9|j#?sr%H z>Tmbn8TihifrED1dD5=COs=Y|Y-s4a?>_tX>)$`nl^0%q`L^3`Lqo`g&;J#G#@O8T z2wl=i*8@=PZ^?M8tYf6R;}7k{=m+upMGF1sj}9YM=M9)N()lDMC_ zJ1kJ19Zdj#2_SF$VRMVEq3C$H1x7i*3S8w57Yi-_)7Ub>~( zDHJgJHa52|`t;Kc8#X-u!i!Hl`4r4O*L7E~S)GWQiA1cqEfZ)wv3BFym7eV;;uRa# ztZQm))?=}pmHv3)?DcC`z47Whk395bOLIFW*X`{sG()t=paFwov7{I~0^hPw0c3HY zr`j?bH>|I!sC0e5Eo;Ga@(NZO`sxTa*Q>hUbT?dv5 zMl}?{r8h_ttOnHFb|6}9ZEf*bGS!w^y<&M~W!%)YbZgGg&DNHd1q&B!YTESno9{gS z=u>TNtp&?!ZEZ87360OnI{HROZ971Q;o6hiK9Cv8^LKGy7`osqcQW=1Kv!hkkRr}3 z<~EB$MBC8Pa?!b`Pn$IMs-OL|xv7Qy*X6-ULS*pYz2t;aB|!8JLU@l5K2(UPusIGq z=wRD%jc5!Gb5>t$Irl!G$N{ED5?QPuW9YMd~Kh;3l_}1=eGa($6s!G{goG{?6UKwwzgYt zyY>0!UaYC;AMq04gX`Iz>q1kRiuu{HjZHc8#FJ0eoVF#OE$}p@y^!rUc#!M3f4$?b z2mkqC+<@J!JC3gfRhYjze%@E|2g=`1`iKNmg&+~2D$>pG&t@ieGE|=$v<7+165MNa*|THtNHL%cAkV z0|ty5KYmhMDqYvBYWS#ut?e5r&Jh>GyB-*^NJZ30z@){vuW1n#BNNP2CuXWuv3P|M z)#I2McwCo`#j8!T*0yN-E?>U#w%hN#{q{Sbd;X=i_B7ap*)h5oYJkB5CZviE#6AOw zL;K^m1YJ4So!buBE_C28YA#&|ueStUFm3KSny)kZtpdlIP7b-mTWx4-M^!CdOR3%^ zS6p_|!3W*-KexWOaFJtH1elLd7|@$#S+WwPLQAd#gp@Ivlr;Ed5eme&Z@>N&VbN@2 zr0VJ@ckqa4WQ8?=j`53M{PLUUed~gAFMzw~+ZUx%Y2b~TQCKmYIw-#FX9C6RRo@Ve zMf17z?DuBwz4yM)KL657FF$wYStm!MW-?J>8ZkpRpk1mHvACu!S@QWKPrvs3i!VL+ z;JGhMEcEM>pc)L)WIJRU|4j&1`CJdj7d*zx}OmKk?kt zkG$~geg_=z%rj4o8ae9u=bpR%`kU6QX^lo}^@v(ok>HLlRf(F_nOv};DUF%hruA#u zS{qe8V#cHIe=w_|cb|X#^MU*By3@~Qq9iFAi=lfYF+YmN6m9~;c%emNFgCS>VkBZ_ z+;X5v3kjVEYJOb@!LMGo;pNxgJL9a=UVZ&VOotbKI8W^8R?sLF$x38|mYg_o!e9P! z+mIncjz9i{AN}a^0RskJdF7QSobU}HDPmhlnS)&DifW~rZ@K>3*WY?K5{*0Dy~Zcy z9Mc5IT8IDw{J6?*ke6Zsa+_2s4aMB|>+1Vldf5-3dFHX_pMUCCzy8T*ix(~VY}xt^ zP2c|ECD+}0b7f7A3gr)zb^dF1+TyP z;<9C*Vd$X%2fpD&5^7Y}bT?4-C`KbEQBkdZe0o3 zam(B`Ko?Bix)N+VZsh8sU5XCPjkZ=az84WS*KJsP{2^!g1OYii7NwyCwfxgt@MZudryoRp}l+s=T{C9VP7QSxts6H4S*tnS zo^kw!hQ6CtudAu5sIHAKUA@+ zSFnckAGm1wQoq6+(z}2A^0kVtMyg|ata9VVR=be5vl&ANGu>oNH!4s%0S4Vyd=Pxb z^@3Q<*nR`*WARkJaZTguY#K9e9b?I;Q6uNgn}=C*EEa=Nxpe8$WHJdgv~Jxx+=8Um z)>g<2s4dBflqXz~MXvB70ZI|!GD(L-$;9yBidq0Ca+Rtn@gVz0AAKZu7zEt^tzl>! z(0_1sO+4bHe5bH#?fP^sNL17h9neSd-33cO#|Sfa@UUK06;4o?H|N7_CRtk>z53dV zZn@=8)fIi4(=BV)Ew?OJ!(0M(onqJl?A^NF)j{jV6?uzqrHmRiVDXYAzE?M9f6$7nQX3K|9%4p^hXP>U$t_5OKQ-tk!{VJ>|6$V8T7+k z!L?M!{~iE;K_6&>0MkG$zo(0Z_GLghdOjrGES?+p`>*yFz z8W1SRhseRi+-csq9$>pT2tGU_boT7o;nvO~C@;P8Dt;hdn>IE6_P19J8Z-zEyakm9 z-d?##0gC(eaN z_j7bz%Vygf`qX)@Y84y|uzujc;^K}{zM)Y7FEH!m;X)?NP;P~_b6Mm|+a4x?n8iDu?M5*12mkbl!;|Z5hr9V;Bjot>fRjOBhH7waC!fxk zl}StWoov2evIeuJY{Y9RIENg1%q16J6jyD{^%A;r^9|QL{rXJFx-$5R4ubbI16otf z+qx43W}un|pVo-;zF>QTIq*QSLKmYIxHb^s^JVw}h(fy~up}p?9xgq|SXo&q=cip{ z2~e&M;E`Je%9kur5CYNe2oZ@<6E*O8FjT?Hq4mgZeteGKib|*2V+Q87YCMt5rqhXN z6pdI@+q+;@)Yn9RblFKi`N^eDL5G#CC1SoCc$T9Ea_bD_!E>EFpSv4;(*|6mJV*4v zS1l`B$b#4M_GFAg%@!ji^bQkUw`%0SHl|OmeDealmJRfRvu@zeDe!KTG)BIfaoM#2R0OR zZS*HD)x2Cuk6}Y{X(V zfgff;GL);S++^dr7OXtf0}>3Chs1(dnKB6Yqyst+qX>h5mK79xgG9loDm*^-Xzw*@yGAg`9N_a8r!OVty7w^$ja&%9hksvtV1APUYqK=v|oSf;K@sr44}|Hy)FkkR%!$ zlmVtd0uslhjAOF@J^*Qgg@VQ~tP>!-TU~{4*^9~nH5U4y1V!|iT%dt)w*`>R))Sw& zBnn&t1sJVF3{7#YqRK0=S7?E*04Y-W;%w=NP^dgHjLFDAdF_oifi)hF&6@T8U3dKz ziP+eiNvhxlRh}rNXIcdiuL9z5MM7#b2CB#oA;L_DH$pksJcp8JUUC}*IjHEALh_d* zI6R`#LW~433U^!_hTs87C}`pcUkW0~1b&fGs82E4gw`)lz61fB4D#t5^P)n)7Xs81 zaR)j`frbJHFiYkOwn7yv;s`L3$v0sT&dhu&1I*lMD|#&SsGtHG6A3&a5Sk~HDr<)n z94CbKrhv)ybZ;{G&j}OBKmC9 zjl|h(RF)8=Ku6mKK5TVZh(s31mlH8E)5sN%ss$NB%mh6oj6*WY<6*CF9=tatd$gu?=>X6a65!YfKc?Z}klI;WFWydM{yM-;Nyv?Bpl_%d&-!eevI%Vzv zc3u7&qD0Jakua{SPGK{Z2k538Hly2lVhfcgn-ZX2cir`^t*xdRef){1a=9GlW#tPr zn**bIj4T3W6!dZ_7d9zw#lgHF4`M+9W^WHiYT*#*!xD_>zOBKyf<}RXC}cw8V;m-A zT4ZI>Qh5OZ^cN;XJpY3szJclpix_z@dNTyf;)x64rVm*fJ>s+v=vf+;S^%Afo&qBQ zIZdiEh%&V_iWvXdV@UnH@Jfbc{yKAvQ=#?((sRw7qE)5}!J45<+&1s|2<{@wPm?Tael+yy*duKql*wQihac zK%;_x;t(y&6`VkG0*qcf@B|@HJsR{4R37zJ;9^1n`%rOnzH2qLFJO}_Q)kXv3mA;Xn#SD>m| z;BbZ}LNSDt*t9v6@|T7XhM;{_<9t8^(RF#b* z=e)h@t5WUlu|%>xlht(-4JBm^I4}@`ZLsK-KzDV|rNxe>pfHG1g~s6+AVyY}FDXG3 z%aRw050RiiiU%ZF_?e6|5w^W(eq3R7B8Qj_h(L4$PKd#C{4)=5k?}|=R)IpvEl0vI zas5J2!qpS>RECt~F2Fj4UI}5M&B8Ya_KGfv$W8WlbYM1=NsGbb$2rjmn&P|S(EkqE z?is}MI?J-+;)OHhDrG@->IEyGn>&9&G#YapejL|zozm2|90WmItP1Lj!H`-E)j>+? z3L!?M9YP^mP%MqX#}YWMft4T#{hsz8M_81NaNI!GQ;h**0k()7LMf4;Lu3LX(p2E- za*u62|AQa zWYLsUsy_-P#GHu{NX0H>x5I7)kQu0l5L(ulG8{UlVi1aBDV$kSW3gpv5F?aZ_BuY3&^?n%f#Z*O=EAl?-K6}WK_J*lE|*1Z{J_)2mKX3*HX$5vz(WJLRthvKbIAY%x__yggDz7t%f%7X)N zcxGk}s7yz4Nt?#a;A~=`>OdlLAyBrsDEvPKpxbi(o|>P8Y?c#@=o}U%+AMr?5L_%v z1pMrNDu4VF9bCVW;r9m<5POwhUAXycH;7Sp? zv>=z9IE&)qzK`Jn2{`vgA`B;-1EMaZgv<~{m+JGC00<6!ok3Ql6f1`xXllWJv@ zJ_GSI4fr5Jh5izaQOtq?MAN}Zp(+uGrsc|mjKvR!FNzl9iXerDOe)iN#3~5EZOI`3 zQ9?15K^ZDSu^~G_mo#0VyI6!tV*dXOs2&Yov@As)txK`Q#0rAngN*b99jQ5g>I~|T zp+hhRMiWq@s@&+lz2iKn0Y!u`fVa}aZVY_#KkV1+e5ePEINBXN&lXqs!)OWk6}f>L z(;0Do0!KI*DAXUvK$=GZ*Iale5Lb54A29HH5uNAcVE{*3C_&7M!9Z}4&n9U>ag`cu z47En53aDJLgF@ll+~?{H^aVUr-RynBU;}O97sP;5I_)Q#&K#aa-6g;$G@Tf|iV8|_ zr3gU6Y6`Ca>1XY^j-zTm=A`0XSN1E+%V6tL3rO||PYF7ONIVjNK_CI(!W#N~7sTY3 z|KaDCUTr;sxgA&a#L7Nr^Ze`N{vQGeF0Z}>d0VQBB#E<6-+=mtfeKpyR|N1w-kep6 zCL!MdXmGB+!Gjwi09+YCJCaNM(~(PJhog~Cdyxzm73V*~B|$8jLo7tJ(=dZ1(88Di zL$mVP1!+KubYj-P;)I@sB|#r#NQ}z3MPe`-#B(+y3lUeAN+h%lNW2KeMIvDlg08O* zBrPSkMd*@tbJrD6GblXYh@e5K^Bh-0cV>OjE?iOiPk~*c4Agv~Fl^LlDk6}jr@c^ZSv?C3!LJyRn%ug>>guLqs5bZ)2a>mI;JvgCPpe)5K1oD95xN`u# z)WG~e!>l&qd5()}AUon%6)Fj)gcP#wB{44}x*P=gB9TmhdY~@+1vZg_K%-Aqp2@Z6+@H!m@4!*@$z+0U_m^Le>z|* zr~9jha*2T3GNATba)NQ212}~#2@Dh;6IsP@Ri2V0rk~v=?-4}ngdmvT0b0NQ{Sg|E z#Vac-#OycT|(YoP-QD6{xsqaKu3Gi%5`bC27SQrFZyI`1l zU^&oo6mL<1FcLT^GdwuZcJr|)pT)Vp&uzlG=39=zbusn`XbVnqwh7Rd9ZAt`9NRLZ zhOmfT)1*hJJWO!CLO#dsx|%$56@*bMNdOj90b`R6)dXtcYvVCgn0OEi6${;N)@#nhEJm)68bGrcP5%6e5!do&Y)sfiDbn zVKr)cGLf`x+cZoxkfy4+Ts9t$*_MsMO+y#^u0klQg5(k|VTT4r{EGn42A>a{APn+p zS*V|*8#<}wM8kug(^V~%%@$m%kjs5*g4D)o65J@4n5;SECe1$8<&LE=CeL zMaqwPt~X?O@AlRWg`8WkJvVMl8aIB;M~lIu0{5b*a;d5_Af#J%K^Z_yCF>)aVf3#5 z|JeHq0K1Co?|1$A`ew7)BpY`lLP$s;gaAQ;B)A24D5V8j3dJeZfEF(lC{UbID8-!+ zcbDDBW?i3O|9)rYy| zB+1vmq$uRmqKW$I+8rsAR^kj9JXBS^t5>at;GpDa6>}21r&kBya;j&~o{f!7hLKX) zHpPO>hds4Biy_6c@^A(V_3z&w+?nM%+{GC}bM@*p4v29D3BB(NhWnS6YL11%o7-9| zYwKNAb9CBx8{&?{wCEZJZJ9$d3rZ3cMs8{{@gQnN0ElFM5J!xM;vtfV^DWz}sVUk> zLQyoSPoF+JckN6jW9Tv3c?^Rw(+rL+>nB`M-|{JDDwKW!Fj)`;6M06BrY(4ZQ6v&e zCKHh4O|1=W(TJvc`<3=-XlM|;N^#E~^a`Zw*|Kd5kO9|7qLMUBl*h*%0SY0p&o90+ z0NZ_MMuI(g>wsau#b<#xqccYlo1x207E&#bny{swJ@QhmJ5$kUAe;~JmwDlgk`w7U zwOQ_V0HHf9MgI2NZ_{j?fp(GCtE4m`$RDMR}0a{yHo_hMJ0RsnA zZmX2l^ghc-qe#-g!GlJR99i0{%wy^3EE|Ufd(c2D!NlMy+FBa}i|s`UyT~*RunJlC zt_!WKKmmFPW7|mTu03Gbh#RiB=DwSLo-p;K;Og|YhCn+oK`fg#)V$~h5%Ki%zrXvQ zTXt=!)*Zpj^L_j7hvy!1ax4+8w&SYbXT}W(dX;jaWXLA+i9}iP2(W8VyFzK*ix5yJxa`1%X&Nyk-G`|A5Q}?Ui-nnyU%cxP4&!2x` zaZ%ARM<0`y$KU@IY|(~Z>x2+10?;KgnM}^V@M7IC-hBB53EeMBw4;v5W+B;;%|Rjn z7lP2z((>ACul4Ik8>4oCF=NK`=+T3O3somXEoav8$N%C-*EMY1por4al`EcpB$CIL{{iMG2$NgNM<9KZJH-6v>xGoUb zT~$SE;9*^=KA0GaCcppQd$VR8h2ap3waKyyDF{Rq@E_a=c%c@(y$H5JJcO+U9)0PU0^Iv=I#qB#QA(nSmR;^sIV%)gVE7mM|?X}mZOr1Js*72{s`pQu! z9edD(31%_@Lw)DY?T$3!3U8`~9P1h#s*1Y@nYp%y{(t0a;D}aU>@FeR2(;lmH23LS zQjnWhnA2zB;^l2^&4!gwRmtPXh9kxZNzLbTryn$0_W5$WxyfkTmi4RaTN9;y1|wT2 zsBGL;+1S)N`JhP!Il;EpmT#9VOX${!QRDmfD-#{JZs+<%%a&QL)f!2;rjkl}s%q+0 zN$69QTh$cv_zE;{Fw)#$C*x>)`QRbF`lqI3@{EC zEm{nPgv6Sr_3qspXC#irVu;Df$>~=eUb8v=nq-@bh<%h;LPPwP(zw)z4_W3x8Hq}>JPs6*JtTFPIAa3 zW`yNVoRxt+>jfXBBeMYjXE1st9uN)(dzO?S7DFY&id})mPx~Fh3l`Zg@3?)?pn>T) zkLUH*UdI_G4Dh0d53X!U;e!sIeD0Y?ZrM_?bkW9OxM=2))4g6PCpSlyy&rz~(Z!cs z6bJ&EfK@D`wkp5^Vnk`tHQatMJ0W{ z{pRcW^G_#f)#$EXyVem5SX3BQibru0f=}?Y#;xL>`M=n%yVyjHg57E^$o2%E4FHq2%ELAqz{H(j@KM9v|q*gn()N?*b^{WV7|kIgni-C`dGKLwDh8jF5=(%fol5m zpPz;ZJLQzQKAir;Hpq@~pGYLW@w~qD2c| ze)+GZJ;K#B8=G2o-T{>;1{x9b!j#oT*!KzF=QG@ zYb-%ph&YQ2hyfF8gr=ENwVVbVyqog+URyr96@E`EW`m-ImGb`^P!P#64 zRco3~JgN^LKH{>gE(PUQZ`@pO22MHo)Wrn_s~3KJ*+pk9Sh{q3BpmF~Z`VH zuZ`Vt_dQF#{sH|gjjrUu=3 zNp2YSH1453G0d_8%{9|-tG3kMb zi*~AY5(M1<#7rj*wLj=s+yPjBopIR@xU|B)6EJ3S{fKo1OBT!=)vDRC^R9>feARjL z=AWPZ$LnwGPB^lp8L*@%J-5udsgxSo0kU&>;3e>z7$s=iy=zxpT|I^hyjo_xcMDWB zir^Ont_R|h2QpniKxAbT8uY-yl{7_;)bGCU?mIvE@|zh)&ML^wU$%Iusq3?6&MwX^ zHj~M}z46MG*IskMg%{@(^$7SwBL@yVYUaTgU4Hr1*Z&CPKNt$D!JI>5*xXh+rB<(lBB6c)!1IU-U04EAaUPehtQ~q^bo4 zppP612BFD#UA(8cH3zW3knqoG2czc_l#u%G<=CksCR`sHU{eDjUhD|a{D zc;gMfd-!2VQ%&1_@(+*w;lYP=kGpy6w$&?F*EiIcl$G@>?>TyO|Ia`9=$ebKyz`g$ zzWD4bm0Py{=7IZe`Nc1FOE6q|4W zLOF0MfQZ?HUv`F$_q!bY?stQ1&i?|S$@JJ-ke~x?kL~xWA+NS^#hQ)lHqM!Ms;Z?w zCqU$2?rpc08Qt#yS+3)?ypVv_tarzqU7;f}XauBx3CKcaw^0k+TRqWQ zyUR=@v|uv!(}X++KI^@T(iCNYMVWLPP(eI>!G zYP)t+ZQi;yHz&ukoz_??5DdY+s;jSs)X2%nZ)|Q+WNFZV{;QTRgV{J>z~D7927*5UKcI=!T%XHISphc?W2o9lN}*X>qAIk|Vn&QpL314~DReyi??bg;-Pz(Y$P2a5A`CkH@J@#>Q z17IqeVuw}F7BISs?z)jk%Q?p#Gi&mMhwuH(&bk&+QE3A@8bY+D2e2RRVQwo25_bGN zY4RZ_pECENOD-KWXb_@c=<#(G=^T3pi@rtcXzT+z%)h2znxRLK9t*%9p|@&MW&-vW zJUQ2K6T0rW?&5F1z2nwj-v0ADUi!<+k=8a%Q&maPBn5^VIMJgBI~p2F`}X#jh8a%` z7&y4Jq*ySMrlBuhx%!sdetFw3Z~yb3pR1~>RuxI6*F+6lu-aNv)2GdjB!ud^#%be6 z#NvslW$J<)fGpI!3^lMGePm1WSgHicBQ?dNwxYiM_B(go{EOe*e(zhaygqU2pckPau+G(esVJ53LZdfC$;b_7dGHj?<5$?bD zuDkBJHxaiW$daj)ZO{nUWLr^ek4y^;9S~erCDjK315(gFoV0TS&3426{9e9&v1;u) zIX6ELE{e3K#*H4)P`^{4Roc*{V5#(~bU-u_hpl9$EGwZKy3v-5#o_Z=whQ~VGoJuy z2#C|?opSB9SN-knzyIx>f5^1Ipx1Rw`UvmM<4*y1JM-f+WB zpMCt9CU{eEDL=1okK*2`c%-eZ%5YkwP$)(3=~_N?w-u8e-RsprNYzzU(O=wiKuO;r z&GBTf-o--(lr%Q(c4hVR&%U_p=Iidc=|^w8^upH43d=_3AK{BQ@?+vodg8=e^o#Y=~cU>OaFzVfR->I*!1FHoB;Y1?I z({&5TNps{hZk0bg^oL$$z4#LkpU)qU(MIs}hyzqTb=#7qzvu>;VPw1ON6# zLqo%!-@Yg*Ddpe37(RSBvcu5p+qZn{mP%wnc*FV)rKLU3pMQRCULM-L=eIAC$@s8g z!!=FYxOL07t5(lDR)tCPI=39S*?QKI{kYx)ahL*H}@3Op} zOZU*H<(hfASfNzWn6t&)&QJ zwx3+_lWTAO{j*iOYJPS9Z`Lha_~tju!Z~4xyl5;56?OWVrwu48F3S(MG}I=N;=QB6W{F{nYllnv1Wze)s0}Pdxar<`HxkO5K3;cTIG@*t7S5Yj3#ONr?eP zU%m8$%5CfO%ZAQ5V&<%A2T8X6!H1u{{K?XkuJ`RzDp`p^{Z45{&()qE)G^K5HWcP60iWUzgdct4ndg3c$7PpY z`rAjJHf>90J0Xxy&tHH1o`A$mI@lH3`>X>%sO<-s8Nwl*mOKXVpCv<8ID!eYNmRTc z-?L9V^wlRHc=Ph%5V{6f0JmkZDLhDIq`>|l%W2>xh&<_<>#jo-Lg=HlYu5$>K_oy- zNB-l0Yno7CIvIAyNvE7R``FnfJ$m%(->-lFemM8;T{iEuQ!(%!d*rb|ARtR3I9_*z zL?X7cva+c)3Vds8Y8x9GqKQa#U3JCwipJK)-E}*vYIY|R7?6Rwx|+($ok_!8w`zr7 z5$fQ>E?W5MC!e&`Ho?8Esjq>jR#RP7T~pcExU+iK&YhLDtr0uu `~(bU{R)4ugh zt5&S|;>)jqu4%<~?WnA)Z8S`WMj_UgA z`o@(T)<+{vR=jQfnl+z(`bAA`eQQhG_R8(GwKcJ1bnCW?+M1TOrq&h97jN0N#S!GD zrlwuHwlz1`H?=giL}EKDcD(nuceYnbGsvX1LwPSmAT|IqWA(7f%QMtRZ zHm0W{(MZbFQ}F~07<2>dFPPZ#=g*%rXAXSN-Me=s5{a>6$Bi93_PFDY&(F_)=%L?3 zEJ9A@=H^vYY`x@?OHVrKq``v+FI~Fy>#x6t)H~;#b51%5-Equc{_^6+jT`Ih>q~o; z-tvo|A3o!-s;Vk~k|N!#fuyxH2;Qe9o`)x1rO4Y6np#hMUC9T)|E350A_ z)vk4G*Mcu;9WqU^;yxIY4ibtiIcG2qD8H_*4sH4J%P+s`01Fl@02;W9G*mbYPf?6T zB6T~rH`KtnY}r;-)6yEN-nnz_ipBMHH8qWm3B%d8wX%9!MZBeVorxqQX4B}-c4@$I{IG}i2n$K$(eYbtkEZrZvn9#7R)*HrJ`ozP9) zFbj(djz99)m!5yVpg3p1kY2C9`f5u<+uBtt7JmCxETQk*QB_}8o6xQ5y5@CTDn^YN zSD2TxYuo1CRoj!Xw&~NRfAY~cvf>4S9LK7z-?h7HSABg$)sCIDb+rhu+qlX0NG**J zzq{%hcdy&LX;)o+0=}GK?%ui<%(10%rzm+bOwo1`;|{(7h)HDTK;s=h_}?>f@6b8< z{-9%VJZHxPz-;$!fJ9!$q+Je5frCPnkO_4XUhH|9MBCdSU~8&VwPoG5O>1C7nOYFu z92_Oe3aLl>lNE$)B>pJa4lp}1pYE=yso^XH;4TIK_64!s#*G^udF0XJ;v$Nr7YsT9 zFe94Nt2YFeWsMjyyrgF-m>2G4*0Dl-Dy4tBU;%7|px>)|Bwf@T%My(Q99NZ=n+own z9IBJO8>v!hq6tUSHYK0xOGatUXCRc5GL2L!uK2X5VFWZ^z8XxW5~iTruAp0%*N@hl z9$gU)Rnvl?m?h|O`jo9V8BY}y=E|a*(xZZFTbASV1Plk!>((171|NP9?OBQp| z!8C^Dg?{?4eft6iMAFhN*qfGvc4=Awz7%)?UO!4kpCVFErVxO8Z^6-WT|e#0O`atj zMzqPaEHGS$fk>8!1riHFN;duXzYaPi9V2cA0;=qTEh@(1DKI(Q7elx1x$o|Y6At>vJO6n6k*5a@>T~H8 zXI^*x4U#9dTzV*mUU}Z{q-*|$D*xsPo90#PkwUz>{Ej| z1+){I;3kYFv{8fKpr~jsC`jE+pZoB{6H#=8N!zu_njG-SipOY_6**$Kq82i38vV2e z4aP1_1akl-2nYOh7CNAFhpVozNAmr_zg8T@6L#s#Zh%r?)G|)siSbM0iUb&K4S`F+ zF;fZGHoO4^!p>F0P(q3XmZtqN$ZukcHR-);1Ul9Qn!?!1EJa?nY#DSji+15UkktF= zlg~)Pi6RvEz4zWzR#pbi>sI8UHijH!#)%e)^Vfc<#&n$+NVppe229gZAjk+Yh2|w` zPS}EECElbLCyU$bh9M{qR+QGu&NrmRJkm$3TpUqKgs` zPsJKETnR}K1B9lhy=WA@2LukJcOr?`Y3|ZuJGIHK4D!H@H5__V5@L=<8Paj(Vu={8 zc|}F#1NtEkiV!8Ca@*GW<|axaE_uE5n+A$!c|E4%C@P5q08|B!#3`Ga#{h`vOaf-*GA^NmNhq^h{4P~R zRZ^w2Xol|=iZV=CKa2sOmmsZ~VQJYqN^nI5xXP}G4g%)r8dMtzsHF88K7+kw{T|AoXKcM`w%g(>?rW<7`1We#RpzCe6^%%YDYhgsX zV@D6a;_}P$a`SMfLh++5XpI>s|xj2t<-A=-QqeLjO zQPiPe1X&~-h19jw7}gR33ed}9eANl$7R6v3&}%%@R5tV}HyaD-4Af+H%8o_+5*d*& z+Y=;qp(yxm$_43`&S&#U6WSaDmsA7W@glS55VaN|#3Nf8SVia-%0cW;gB*FOXu_GP zAzlE41*nT(+{Pg(#Q5n_I>3MF+ZT+C6rvL)W3&Xu>7hj}Ad++#YScL1UX~KMGPk+_ zBVroRsVrZ*mpTGxl}^Pn$i}oMV0;8JvLE2aO5_;OjwQ@Kpi5-tV-5lx9t|~z?xG8+ zKS*ZNqJ7q`bd8WFNX>FW!ayKlxWOocw(@B{db2_h^9sVdcUQx-M}B$|M_j=XK~qUx zn6^H6&>&dBwgZ7^uUWlX6*Y)a5E-?TUZ*PgK1e{AhCFK`{R1e9nn{V$f(DrIvdNGE z_aOfNLXpR>I{q@6vdk1>{X{?P*#I;eBp1zZ5)}bmO*{b7AE3yqsj0pD?mPJx&XA!a z{{g7-jL16>VEn`=hHMP;OipS5J9Q|CSenMg9s-+gu>g-J*2OgwMEq|0CzFXKCSVaf zD={1~8XG{YKq`jPuuGPVZ#2X%(~rs6K;!9no%}Y7>b|3CGZ&T)Ksgvt9^&Nm1;d2r zrczmAR^9gjWPPwe23g*!G+J4}UD!+Aw0NIDWoFtl?-{wiWg>(*T2)p2UaR$e;gDwPmE11$yOIRd^PWE6fG%tU-P=_3FdfJY=II~6(WRT5V@iufkzB^EX8Q|31 z8&HD1z}{}9W7!W*jEvOC^bMSO47*^>q}wbieS_a)cj*p@MRDo++S?0>2BK-ID#BhP z^NWh2LB#_A*Q4vSgoyS>k!7FXs~b9GWy;V&W1fJWs0yl^s$}&sV2EUJA?O%JHMCO& z4N;bnqBw}b=W!Wt)Ex6=~ z{wKiDZ6AdDhi)6xrP1kY#@`15pf7PXEtW{f><7SKUB0x!>2-khWf1HL*_d=Z&C7tF z>8eZ0e+|$;3@P+k=UyXKmIa3oGO|aIj6#&S+v=Hz-cbK92p5a1L9yh5IaI5LONQGPtoYx#;Qt+g^`*dqdRDh z1Qm2ep%Z}I?4ajD@T=JCE*PIUh>Pi9o;W}DS@zOGs)W_lPp~vPmsyQ znSuvdpfm9FX&JZ2O*&X4zsc`s107=15%|rujLAmoc%FbV?zPMg?1`hj;QazGw7uM}*Z&6K1jUKYR5GuCLLG?R4m@Qy zkcs8Ll#GZ4N0KJU(`}X|dj5thP6eVH`{xvAF(fp};iktKX=3r)D2oC(G9s@7;G)xY z7SdVR!GW8I*+M!d$fo(>08r}4JKY}p1Wp-t>_Mb-R#2JKbARnehi0W255q531A&01 zc@4wRG+O!q@yC0s=Lftqavj-xa94088=hOocvdRa(Xn{i zu^rx>DACS}oWp5RAo&IC$plSv@+@4c z4YsUF$nLUwZK{=fq?Fzv)eOsyw;E?evT|TRF^jLgfK16*Ag7vjFcp#omM$Ia#tx&~ zG$Pvp6(SOT8j_~_2tnFXI$KUQfd7u%Nthz;0FTUnwzv$ic6D>GuHY!);E=xT5ShJ& zSTrFclGuHEf(x2c%P5YznB8PRJ=@5(S`xM6WTzZ_L8CR0WjvF zgy~%PM=Y(_V*J61VEAxxhqTO0#x*gROi#ixSvhpLUBn(35eM_p-8A?r5-4X;^<); zwSlc`!>|D+B(rD`P(B(ZneL-uMo|zUl#z`eAv}a1qAdvuib=B& zg)#x0J261V@czJ&1VKRgg* zGCH36q=QbRg&$Bh$i#?BfFBT~JxM#DOKD3h6w73q$R3iSK(HZg(Mckb=>>E%Y9dlW zIWn7pt#DyQw*yVyLDA3-^1KBQb~2EPeG|Kw7u}@rs19(9Z%aTcE4o04ZUPnzB$lw<~(mM4oie@v3Xc(@dPLs4uunJ7j?pg9V9YP?EVi{q3riysQX+a6_;_1@y_I(;L7xXvqO3k z)Z|6V4n3a{p}tId3`0r)y3w{AMbXf^K$jL*BHCkGo`ft4mf=a6=ps#paB}GbmDI)T z&v6s^g<(BqQj?joVN}CEdV#6~&DiW1Q^vpm{7GCPnvRL6f}D`4>#``@rWH5hsLM&2 zX3FsT10kPR@tB|)ts@7+pf8Eu1VkNl1q@2aSwT$cCUAAYk0kJjbBPQf0G%vG0+fq* z;zbBSA_H9zo3Ig-?m^aDG%mV1cyF?ykAr2SmhSstt^W?ddw(BC$I?1tf?N9r5DnwF zE0AOZvq+XlHp#-FR~szbki9bPmroyeLROkN1+##GDL^Zi_h=?;2;1gwUsP9DLq4H8 zZO$O8Y-$EzV^h;NV^ESo}524Ab9o5Y7ohc%eCvI>e?VlSzvsD&SZ3WYX^s zgTk~4Gi116QYzq#f=OqwA&Ego?ArzQjG-YmZQ5ZICQRJ0VSV=;1qR6WQwCI3J#yBO zr=NcMmtTIFjv~mEFKIA|A>hJ(U&ybxb_!i3O6Ws1H#avPj}sekz#!$t@9(f&kI$c< zG%d-ENs#hH8)oF9+0VyeIj^t zaDbu|gbTu|KQAv&^Lue8X(Vt9k{`u6#5qnXl@JA0lAwWrI>rWmNpi3{5yi(Q#1Rhw z0ccWUO_Y&{Mm13*Jt9hyUkmsGh5nrU+^`@kk+v4gN~u1-plHYk?jlOkYCwoG8dJz` zTefN1#YIK3EJ5nJusMMnC8Nfok4Q2iBm4%^TZ{pbmyv_rXJp~jP6LbT=4c0^?g1Qt zE<)tBoe=A5kczq$aNvWkp$*RYwillGkENRuLo>NR#J_Vo)_K36-#fZiIj& zA)tr~?R_EB=e7hK1BMK~|GxWjyqY3PmZV}tLGsfJX7oW>_TIMvDyWY<@^DFUo{>tB zuvC@T-uP=s^dYrjByA52zJvh=L}JI*?PSVK=qbabO}t^mB_YPlR00J-+M0%yB8x|N zthmQXl$P~+=%GJ)4B?PTj;A~`XP@xQ3onizH`X?E-4$cD$1oEf zBZ-e)tb>zV-SWZ@!j2-ul}cZ@u+K zZA~pHO%g6h4Ia^sJ>lHfUVrn&SN{6a%YVM{x{LFRb97NOW!PcB5j`w(N)@=22z#Sv=QcIFM5`6P%x)=QyUONZ=w1;BlfU{iRo4{mUD# z|M}S$9)J4DV~(HgQMFh+k%&YM-5^H5pQQ`%EKohuX_Bhp*3(Zto}U{sU2^Sp(@0q` zNex^a_SYC@v`;d(hZn!-j?YIUYeND$MU&-gDIG5nivqwK;0iDo{z5BtsHg z9FI@*`9!V%fPVc44;V9U!q8!Z!a>>sKd+#qZ~4F>Lk8vKP1;C>)U7e&_QFzj2bdz7_36F2w=}*FhIOwz|g!8 z0gRgd{ris`IWiOqu?X4-9;OxuRl;_wM*2mI7JvQqHweQF8#r*_h!G=#!5~Xa8^Mnq zH+InAK@g@q2E8+A@Zj?Da(>AHCi(E;!v_x@?DhI^S=ys;_Uzfe`RyNv_8%5$OOBo} zsku3F&pr45=}&*Es;WXKlhwhG0|r@D-unC7mtK15vB%DdwIxK;M3d=-8FJmb0me&W zEJ7RbFab!a5devTv&7g8X*f$tveyv(Z@=;J%#ov~4eEFO6&F@lSM@6Irzn0noW08m zOH1H-6yEo%ho((GY(T$WP~v4}W&L`U_U+RP9MrqK-{_IUh79Z%^vlsiLU-wtRbz$? z9Xwz_Zf@ZFTSlJ7Mm;@_v0~MJy}r zb@0TALxv7jeSzG9qT+&(>gwbj$XcKB!J%OA%F8aT*}V%6yXFr~7&mtGsNto>d6MAt z>`~$>fuzTmU)Zy}v`~X)7rmt=MPWY_q2ZGAu8=_uvXJ?>uOmxL5GH$cOX2SYfXRN3 zEvBds7M}xGkL@wK)jSW6aRDo znQbr`!%zWlL7kGL0g84D$eOJKNg!!O@$ttVA24J9iy|1*Z!rCktxhT6X^T?mkjaxS zJoBiwmiSj+tz5fi$(d)KRNi->W2;X+_4MbTfAr%Y|ERF2&=vd`$!5cj`4?Q^77m$r z&Uq)znAjR`v2${&w^k-oR!Lc(lg>GPLJ?} zj1T6vUiQ6g1Yc6%-MVS@?%g$c#bxh&yr@r~@^Eh9 zsi&UOf53p8P#&A^%Qz@|VXQqtk(KQ`Hviv1VM)`p9qmMhr5fa-QYJFt`PMv?n`+t9L&El_a zzVY%^E0-uGN&20}p^}@V)r%fE$U-G0r{^0E= zUwnJil<60qep;Wx@Qz(AD>qh*9XoE+(DItf>RW$#cY^k>kdiP7U8zS=L0vZ&|N-^UfVScqlyK7v6X?KR;)9f%fY2&os59hK`#2)2pxg^SSZb4T_L{OWg4#x3oV3r_m@A1{5o zbfYU5WeCpSTHF`WvuX&x*>wNPWd45ufDbdv_XQo%&Hy6lX_5$yLt;h?)`!jK?-^~nd#G!d#8^ePHX|FcZp6hoi%$^cl1L;5CDu>>q-CNo$)&4~MlDBB>9 zn%JS0DA!?gRHTqBC8z}xZLK$4fBhS8zc+i%iAgu;~0 zblT}h9C387^1cCIVBo+ZXPj~TamUU+<)jmzdi=5C;+|Tl@Q9g578Qi=zvrHB7AzPy zVFL7XFzo;Av(K)){Myy4*2B1w>G2(Oxh?XU1VPpfE7H{X>j!>)_z_3^@!^LDl=p+d zpn3xvH*LJ(h99q5v+m@1bC)k!{OjM`Ut7KFNue|2kEgLJJd;0ko zUV46OWz)R5$KU?T+Z4qssq#a=ec+a#{j4pLc=^>g9(?fOufF=Md_bR}!v{@2?C>u> zeD9Q_j=u7;tN-|i$Jeh}_v0U3ec?st(d4rz(SCV?V2kpMnMcn)?%46e%WLX(Z*NIV zn{{kSQPFRHeg9`)eR0^7L#wuIEyxcK9MUh9(&x@Q^Xj7QB>qHn)}XPtH0 zy${^?^wUq5mGz!5eo|Fk!`SiT`j!?ck|)}hEa_F=yS#rq8f|T8pa+03htthxB;b_q z_V~d=7ks-MJi#6SHei+#nBTPp#|j1mn%BQ%*>Vi?i>|w_I48%pOnM>(Dv^l?jzTeA znF;{D0%QXKh`4NkYZc_{=(cu;&z`UstN~;V61#wxMafm{q*1qhM=G8OrJ$hj`k(ydoO8}N`J|I_LOw-K zD-wX{B!xuX)-9WxJ_lWs;C4m-hSG2;Pyk)_9v~#T3C5EIk>C(Od+p z1wbm2K{m|e_YKJ2+}1W_>XZ|XIqLaGAF63+^2i#TKFc)Fggpyk?aJ1Pis(h(bX4TaQ@tRPd@sjr3)YZ^J7C#9zA3F$N&7o)&0&=FU6i;DaYqX^y#j=hljf?W!-Fmsj9%9KTm7DlEv!&83IN zwBCunHS7xV)S1Vee%9HS&!6|?V}EFKG>jsWyu=|B@@YW&OZB$Q_tL01z%;?x{4hG2lXD+KQB_hd(EaDy~_I! zA2Yma=kBJus3LiR6r&I{H~50PNrdVDV<6iQN+dx%30sKj`phYljyrtHYY#oRbNdeH z&XlgRyGTQig}MRqKEYN5xZ-CBIykc2m(B3gfdDZqu|E9@aLTA-FkZ0oN2mM|jR4CC$VfztTrox`zHMhkYTav&3{lk}^e%g;NzUZmP zA3f%n*=X4r=bU%dHCM=DjwI5w8mV5v^{H}6Zf^C?`k0}fcFL(jtYQ1sbzYw@W~4?R zG%g&@x&GSgF23ZFhPp=0t3{GYL&ty-B*pb2r{tq&O_2W3P@>0nOgL+x8=B0?3HS4R z*$95a<}HdZ*Xu1vL{t3-^r@+@v^}&?gFh~yLK(Xx5(&gumZj^uX_$#5?FWw-K!-V{ zG#G|{IGLF_L?xSWKmu8$ovhe>h&)+V zycX<2(NQGZuh1?~!$ypPU|POr{lN!~%?*YUDO=Ej$+(e7Bu(7_|2vMXs)5OeOjxw& z+f(PB@z>{HRy}@4kn>7P3QK#~HkuWcCClgc^R-}-M$d?Vg624i?cDI=AK!G#O2$;|M4fg8`}bY?T`_DD>tuOx23Xg|MGr)dhDpFYDt>xArCD- zMeAXH(34#HO;hIo9e`XBp=<=fl!TbaI`r^qH(!3~*B^ZNrx#vH$zDi0;z|!&?*;Bi zbN#*`%{#Wwb!3}=Gmv8dK$+oFTIjXCXkXAf?2l5vgw^R zCx)KXNTf+dysc(;_4Gq$-hStuzxc&XhfY5PwuY(%;5jO=m>dVDl}}T)R&3u@Q*-0Z zH(qtkmFJvxI(tedV<`Uw6^7Pd^kfbyHHj-jF6~^cJKn$-w}Is80~W0l%!M4Nc9Bb#+j}H{N#J zZFk;z@c4-f7c2<+gSX#$%gs045~0TuL1goJRS}M;V=JQ8&=hZt>Vx|C(_3mAc2`1b z36i+2qC%0h8?OG*FRs5KtST17sTaeiP*sA8f`)_YiXdp{D{s(GpVr417Q|#KiDB)P zXm`x)M(~xDmFzhw^(5a29#!nQ5q$st{rN`lNQ~@!BX~ts2Mip*H-g`>W5qPiCVapr4!VYpa%W)utDd}KY!(_Rd?Ke z`7& zTW8EV^4g!?aQ+2n7WW7@x5iFAb^dwtFR@%t$}|*F@HBVQwNMVdnFL@Oosp$b{x<( zw6h9~`O==HxXq`E0Jz7pbONf3#p}9p;wf`53=j|`0cHgNtbOFHBS()O{lue>q8yUO z9tn|xt+zDQ@2+WpT?psP_1MKdiWEtz+Fe~&+knBpb5|7xyhdwpthF1rv?QH%t5=R6 zJp#jW)5guPdUotW+TAdAN_&+-cC@s#R@YWX+nRUm+TPmI;`fC%Z`v*iS{NpaSKVH@ z<&{@nmn6UKTCqgy_U$_`b}@t?;&$(@GYxCU&h2&e@TonU*00YC`3j4RMOmw6 zYh+c)-`7U)wT*QABCMr+fMu&aLMdC{RTd$(<^ zSh-^5$`z~VSh;G&idCyttzNfo9ofPRTtTwYqee=FU=T#b>#J{QZETCh5~(#Sm)2J8 zOtd!Tl*6|3-aN1Y+k=%;lf3=O|4rhD@4=E z&o6-0y>ZjVZQCmH3JOijT)k!u>~{<;5&{w-dT2sV`c#q$LsS-KV!# z5~IzHRlBMqx+j*@>#DabU%DU?O`y4!p4_^9dt;lva{01xBL_pEN8|CzomGuZ&CRvD zc2sO$x49y!8}Vq%w#^$hZQf#2gV@7-3}ujoo*dy!)FA?OAaE9d+MdxY@4g^i;C~GE zi$E5-#f15*Bx2~D1A$g1ptII|y99QTt*RESG?Ktfd|@#YdBj>>C;=Sk(FSv#z`)hj z)eueGrA0mhua~~GVw&bbV-K=itGKWj>b-lB*I}mFG_C2=rwtr5kkl}eW*jT%;#4B_ z(n~J@k3b*^JMZFT_vf~c~blN`GjmM*c?SukBxNnL-5Q``C z13?Uclo1zH`q-bxv}H0HJaCQ^>i7fCCTF8-A=D@DTjmu6#m^(QJ^y&<&)41R>-h@-Ut= zOp*nD3r1x;5$CobHy+svZCZMoo0IF%a!qFI3AE=dB3;|`m`S7|HkCBEsH&#Gy!4nU zprq5nPdL1~W$Gm*J)4?Zf|?IV7_LRGiD|%mRpD<312D5w@tBo}L9qM$0aOH^9Y~?Q zMI}93T3RHO<8fsfy)MMf6fKQ49FV>>-sYnFWHG3D;dduZkM3BKXoQ3QWYPqJW-6AK zo1d`76kO|ATfpZ{8mVw7CkeJT46iDCy*{Kf3@`}o{-($(*q+85Mm?HO-Mz=?>kJSI zd#2d$6I*2Is&@W;0Ho1*`@x$5wQ-M>)MjD_I+LY`!R|m5*&#)b(rkP7{2`a-qFc}JST$C?k5TZqv?SSJU)(B_t5!D9-z!Jca1{DU8EW$5W z*eVeM+?GL0NlGSDqDiQc0YkLI2DbD%0IU*10ArKh$`pYT;R^ING#HE9gm;lH#IybG(^wN%@c z9H0l)Dxp8n7pkn#R2t|8hn_W`3ruPvSD;J;#iqDEOkKG|d|t{As?$UtBd z%_LW8BY2cde1SrB(?EN<9}R&7WYF(^e+WhZGYimq0jd<)6-kA{MFdULGAzL@NMRWU z7zQQFlA;?)C}SWafs2VrF=`!@z}x8%+t42OjBc#4NI}Gf`2sY_m1TcO4cM1-BEg&v zwnIAKK4H&X|1Lo7{}Yi1;P`{9KvmqOPu7wtpg?+nDv(i(bTq_R!tMcuAahpatyr-F z-U&IOGXK1RN6<8=eF5ry+F{e;u{bT472!d2c32n?qM0_)%ynkuppSU~0I`CMV0nsS zy-I7`1PKF(u%^2x0DX#)LkVFMP&VRZ#<_wFS%4Y@$8aT@2E({SMz8~FFP~BY2>R;Y;HW^cLxpz`&X)jx6a+)D}8tUdvB24N;uJJkSNW0^nhu z24!|D@;LA^0T5I?0APZED&nP~XV|NijKI8SXc*9H2F{0!!ZI-B`V!T`e43SgQ01h-9I?W*)i)#y>r5nhy(Tjs8H%ezN(TobV8BvNie#2GkL`nMmN*oLUQ6Ku!H2% zyFci1iymN6I`9Ftp66s0(o$(AXfO~kzKFpnA$SZGAs#G9a!Q~9z}{*niAQpX)j~h) zKqBHTk=6)Xk^qB@7HBXLO#%_6P$HI}($f{B7+OVRSb~*6GMCt&b8!|pUsGPu~z_k5?b#92Z@F|1Pcvh z>XHm}&5pJMvSA+B8-VWnO#@&>c9X1ABrz202Evif<%7cHxj76eYq@h%4G{9`oxG39j zVOdg$E{KIm>q5xV%o7l!3R$JdB$9It$qK8Q`K82=#I&pmWCn`xZA>zAFfcg+dpZ8^ z0~#{uS?VSdo%D(-#qzPulXQfR#MtvlGY`(N-Z;Q)=K(4hW*uxl&Y%zud(*}T3RDVk zM7NQQBO!tQCH5k&;{3<>D-Dc`(D7uG(X1u06mo)f;6wp^3`0`7bOU#YQ7H~QO>HBq z%>JQc4O9sQs6}L45I+M58)_!!X5wr7*dEz+WSqh5uvB4q;XrQKTkwJ`Qg5(6 zQME+}dE_KNffj`sbKoH33}k1(n1l>_s*=*6Do2L(0e{S+dPIeUfyZz>kn$)O!UFP6 zrhNlhYgn)SPXRb^G|~b7*C9R|2JlM~7aaf%Nry53yPGAYUP50G@{k^^52+g|1CS>s zCUzo15HEm~Ou7)T80rF?m|~AJI6H$0C{A<9)oMq9Q^Bjbt!X65|W!srFc~Wh4Z6Fwc_%n2}&(oPQfciT+wyd+M zWA=1x5n%CYk%d$wS;^kPE!GfHk`M{-G2va1YblY`!*HjW8yFoXy=N!EP7`ehHkhu^ z{EdR)4rzuwU^vc9q!rnevlH?-D#;DF5)cvaF)W&%u^>`NfFOohnRJOtffXtxq?pLV z4<*9LN2E-*JT_ZGCzE?6*i;~-14^OqOrlj}@PYrSfdn*PN%_;mmM|oTi^ZaFSmV@M z28dxKVK*72sBDUa#DNdbvY>&my2$NB*|cXE2AycI%rHV}JhO17GjKtEUwfFwCOhMb z7Mjs6Z=fqgs)}JMl4B0r#MWqWo0^hjojHv(AZ;5JP;YdCcE|QW09B_$ogvFngd46p zYgW>04DIxF`)@jgJYZ9xXNamH`b5W698MFaS%~)3!K@bGPZFP*VdQ6k()|5j1k{MGB&F~1C}34K z-2mV!94;*a9VbBc21v%|@|G-F0xy>6pW!Z{M{!S0)8g^?q)C&S8k+*404}?E636KE z9Yr4EktsX;_ysWxbt-62(uZGTkkNOuiNV--pmIcrJjff{QiKFOq;NG8qK()ZMLQ&( zSP5c`xXa0LZ67Ufat%(ew6r2-qVhRbTol^w`dHigb|e)#9(~jPxT_2L>}T;;jAh)CfM91MFTaK3?cLh zQH52;1}Fgtg)@d6Mi<5vRZ8gzaHBoMrsN1ZcN~d4%ElVc)3d>vg2}HsaB0<~sHAB2 zUQm$6HrZTQ#}WYFP2_b8j2|+bN`|UK^Rf#POQb6&kM0#=sw%>pwHL@5G#jKfhk}L;!K_aqYeZUXwDmnuAqJ; zF-Y5CA+iJV>@K1@(|JDt4q<&uzVHh&4^Pq>Vpg+#I{H`Kz>C&Yr0Ws(=upG`p@7{e=sZ{^+{+dsVMx%uVg}BY{ zvApe&?iUt)fFf^i9Lxa8+V67X2|&*<2stkD$TESwr_*$%tHQT~fFOE^j>N_$N)VH% z*0qDs3Zg`2Bj;-m^Z^Q@Cyz)?ea-Agvi3Su%K(UQR%0Mz?6dL7h))s+s4;xFAR`F{ zaY||%V}%;Q#{(02j5vUfbf8)A4!}r4d3a*pA_DyK>{KyDk~~I2)RP%5{5K<%cxnPf zbgH8rkd-B6?gB{zmYq$2S~B4;SfZp$vSgDLBSM#wv~4e$!M|JN5q%s3dKIA4yC^&o zx|Rp_1R^6%2g9|8v{r48COD?-q-d5DcZqq@_<`)4^6ZS^K|+nkWB_T2yd42j5Gc_t z2A$Klqm6iCY5Xx@cy%fYsx=+z4DG%Ay#is}v1R*%^qBmfKvh*R2>J!`D;y4ke`t+V zcTZOLgsjNRe$)}by4WM+=H{1_^xU{{-RRL{AxDb}i%iR;l`ne$nx7SUtf#4eQ9Y@| zbWejJ(&h<&y3fPi0ZY%wPaK4jX~Yr0#YTxhj*k!&O_nXf^vr@@hC-dp<3s7VvS>KM zj&W!;nPr?P<}1;-MKj=e!W?6|h9%1Y zlJ7<20a#gFrnCMdLXvsPX8Fk-M|p4o1<`^hM_xbIIxH`T?$CB(*Dx$6A;O7azH8V zSY-jWH8dl!3UDABA3ej?CvmTV6428w%G#;4{Qw&g^yC(Z@ges;0=1l2HGR2XpplRc zeh)yM2pqVRO!Q-+tG0TWx;8A)K9h9q_WEOJjE9)&=o%|GGalGlX*Z!Law z%M6D?dOQxJFWbDj?+P?e z(p`b164JrF!Jf+XdjXIi0tsD-&dmwupaU|Jpc_D948E+Dw`kF#td++>@_K0*eKMJV zt%qx$--iL25fGgK>MtFl#rJ+NmTu^VW#~HeH9;zoL;&2(mNye40=WYK8^QzhhjEd< zWM}+9-lZHn8Bci8M%P2%XdsCQJm93l#essC1=kDhoJz&~KImr2v>aCzqw#pY&rgyC zg97r1c{cm(yAJJ5vvE3kEFOo*Lv}O`tPi@&v>cfXKeQ0T0CJy@BU-a8I0-^SLp_r5 z{XQuf$xn8Z{X%-$CN)J?FoxTynt?rINQVCVH-cAviKJu3EhCZi`UF{__fj;?-_+cs z$Qn@P*b-oHbS-Yk3{7BbeD5)1@5~dDr_$W zeh)y;*g^Zlt)4>61c)IJbl$k?JI#oOX5R<8(!!pRhA0s00J4thrf zgBunNJ&g8i^retkB$*~?H-L_4KR~X3`$q6F-8}TL*`I&&`Jz=zo_y+|qCzDc3W7@S zzVqHY@BAG~lCeFGG^9hBxK8&n0K@htH~n(Z;F zzF;UEk`)y$T`(BR&&de~RXDU52TC9(oSUB)&JB4n2I8>98e3XzM{0}e)w}C`npm9U z$B2rg-28%)Xj@aFtrb3trUm@D1v&Zfr@fGTFzzNFJfXU|$`&+1@rDDTyl^lW_9`+Y zo=smmgDGUu9#Yv3f;sj*Yy@9geCD~Q-*@NlMwAVOSU>pSL!ik%_~7G5AAPitL z%gOy_d2!|;U{4KTmpD3ck4w8fzyA8)?z{i~=~E7AZK#9kMVd*F&}iOP@DEfA`~#E^ zX~HQ0enBKDr3~PTOhpEK^a&ovu!V+}SaT$4=rsN4_iN!mpfFq@LZKvL@P1x?_Gy7oPC;(2&nw!tZVR$En3I#AkItsf$2gF~iWc?|Nx^|2Mj>8Jw(Vq<2uBRi=#hgPDJCWK7zd42JC98rFsA2uO%W*0yd zoAIVQj0;iR+kbnz-yqt>mqU6Zcp4NK&`H}9aO9ln(NJkfCGoby56cR%~!;KOJ63(NlUr^kFw+#B{M5^b-&{MWZ$ zdKIRE3$>%Frentq19g308^MQiyvtXAv31wV;L;irU_a%D*-!#sHp3)frU(gY~4*RGlJ!OZD zEBT&N=?PyY9Hpr-q(>;>nLb z`SkjmetOgGHyuCoWWCjW_suu{^!WWCOA-{IIo!_e?+z%@cbFL9^UwQ&y%pGXJ!J-H z(05nnzAM^WE#C`(6O!zOGih&)+zTKhmwF_;__96{U`%}GKzo@aTQ4~8oM)bY@zgWU+_9~4_OY}7@yq$7j@GE0R>)rR<-EYW{yYISv#PA^) zTwY&j^Onu$pLhO(1&hx->zu#5@cgl}k6N~D(J7~%SWwvG&wqaL#8c*3$8swwt5doSTlu|z{QdHaFFWqklWH3p4;nW% z*3{5kx081JP{q2&$PGWbe)Nz*cieN|kijEdlfGw*PJ>YsPo_LDJ-)w<;0N{V+p`ds zi_dUP_{=4x;Et2-V6W66s0`4?Y3_QywVzWZ1G z`}Q%xsftS710rUMqVm}p91K@99*aNq)Kl-j{~kLC-h1!8IdkSTHa0*c@+=I+K|3$Hlt_<2W8nFa>_AmHm^%WBDsAAJFald%o#Hen+83(Y0Xy6<4v@* ze)HwWisVRvU_)h<<_!(+UAkj)wXH~bJxkWEU9)57E-e@e1OrnhjQ!K2k65C-e)D$Q zwd!lCwr^T*IgS#@6$HgKMbQ>bK{9PJog|MN)YKKr*DP4LsIaiWFs;VcXfPbM_0*Q? z-7tT84=M}#)k`m)f7ykn>xq`HzFi@T^6j_YHFQAGO&NJXQxG)_Z^js~3lho5?sZ!? zD4_{crX1SSE3a&7i*#2*8MpRQG~J-P^a~|Vdxg-J1au3didc-%{uxgKfQ7D<}Eon0ac}!%1zf0MgOp& z2c=9o5>50gEifVWM8S~Ma3Bz*H8heS%5b`9L^-<1aBq!AH|?mo{zpGOX4y>b|9driUMO1q!LAy_J*ekJ=d>kK232Q z9mItE{a!YL-&#@ONSdewQYpKnM^Vl0O4{>^7G6_btWX+R5IVN&+NSLqhHY9&Gi6u? zn_VTZlagc(3?Vq7!v_z(|NeWoZ{P8&-~V3m`4v@5>2Zbafq~MvmL^VfVUCLsb_hcv zKl#*?S6p$$l~>}p5&_8c&6_vTLO9+uE{K^sO^<A3A!tHlq!H54mmAIx zhn;w;vAO{njz9XiiQ~u3JMNgb-+i~TqN-=FURPaxrEOVn|Lq@!X@eb7$yUiqqVSo= z%sK3^BQKtR;rZvE8;_vK)?9M3gV3I?Zr0b=S8UyW{OsA|M~^=L>@!xcSr!a=+4DH| zNG@{_#r{A1X8_y+Eyy_NfB;!|0J)$taYpa(m`o?JUV|XRA1n(*@Y@}Ms>Ntv2iV=Q z;L}N8e);96AAS1a`yb+1xL~1aG7C#q!1`cF6r&qSG81GC6{ixhcszdCj2RbScIkN+ zoIm!UF)Bufg#~%Zw#J6~+R+oITz%b- zFTM2Q=~Jf!0zMc@kd%U$(lpPTZ@e?-#B)!Wd+O1%kGthZm;e3M7n_rblqh=@pASuj zn+xa47Xs!yB)fI>P4SpGVdD6BqGrdAt&*f!u2{8uw{7XCo^;a17hGJFTWn!CLRtrM zAu%<{RV2%THYMVUK3`B${f?-(qL|Wkh!&qD(!-GNX(RZR>(iC2GK5g`*gZd2};Mx|150gRd2lxi^ zaqircwr$#U_r3S2n%{sCF1Vn-0}(~fF;HlFwE@b{u6RcbbZ0O`kCix}A&QLFt6&@E z)-Wcf*P1~~*qRc&0a5YW=p@Ga?Fl)}Or);EgfQ*y_)gFz@1DnA zp-Z~#(H(S2((Uns0b0t+7mCsowfzB;Uf`2-3s_A1CA`THL?FuSQQQO8gxBi@Q?ZQQ z04Z`-1ltJyM2tOHTf3@uRx`j@I%xbs(+`{e#N$uEL8gUku)#b6%o)>2Zmrw}Lt)pB z9r^irlcr4W+plkJef^HA9ln6Hbw@=q=}|xvM%6(=qDSuV1gIV#-Kt*s$F)gu0r_RjZe`MRi{=T(f)UmaXf` zdiNSPVM5rS`{{?DRBYb@KPy~VxOn+W4HllPKnB5n^(m@l=v%jK(;XAmZZZXHNmpnX z)0D#ZvJrfBUDehdJI4+kbjYNGzWDOf1>Y`=!?mSP!^tqa7k;}C_I1yalB174X2bgR z0|xXj?_0io=k{sSr^}+a8^y{BBn}rvnZYl}lDNIH;^PlLqD29s0wtTd$Ba8V=n75% zTv~2UQhX;!^BB7U<(ke7yen2bNu(C+Y) z)=xWJ5P2t`a?+TwW0?a>8iBIl0^y`ig_sFhgNk&Z5ptij*BjS;` z?IckNqz{lY%_vMj%ans!I2mn$I){mrgtM1w(*oZ5NHnMhiu_^2Oj)9xGA-M&{b86P zt{ZnHQ&rRu3^>ctTN#J!5wb(~;01Yu;DNkdOMh6OvrF&wYg7m38f;T#mD>oK^fqDM09l&A=*?2AVd z@Wcb5Kufd*B=)+pWjSit@2c>cBk;Qw-Hs;{-e8V`!Jo9-;_=Z3jh{Yks%;tMY1{5M zUwyuL_byIS$X4hvH2c4{5xhrolD3hI#vQ{9_yU1ICen3Zz$U^XM>*bBdDa9!C{v^K-_B+X*QYnpMxP06HFU@}KlTAbuF4cmw`lI!5j z)l{#UY>PyrqA%2cK)?I$zU{@ozWIm8Uz|B>>OFVe^ySB2Jo?b%q9ulcVUJJMCBcj( zp`$HVFpWfBt_Bn=%ZMhDiXW5-f-BnES^_?=Y`Fo+Cu^E1?SIlO3;%yE?|%Z|4fv)e zWQ0V^`$>Lv9uXY@H{i?CM_eOqIXO97w{FeacNjA8K7}4VOKE!8;|?7(q%G1G3v14I5tow&flN(ElS=7^VWP110ZH~JWS~=6SddJ{!G#X(9PG1fQ*>;PZDB}b2taCy zU}cm`v-mE=2+JhOu*xjjLy=xBl||DwVT|g?1uxWz}Wd-bGFk6&z{7RO+q4L}8nRx6WX!FKGRg%^aTtk4Tg^adn)7Q@uzvSgqcuB^U zfKA>Un=+6r!7!b{qejh|btJu>2jM75AAj)v>Ma{N@jAeNX(M=nCdAl=ln7b0a0&fN zJtC`6f$U0vF$VTPbExa#Qo`mzi@4@P*Q3@BHW@3KZJRC#u;`&W*e(m+OE7#I%*n%pkqD7k%3bL#c8u%CBkkh-{reVll zC<*j7(=LJ|V?fbY>^v4d>S3rI032{a0wQ8M*gcTyNcUkUAVQ>r?SP`V$m9J)RL*wT z-m6J>6+iLt_X_A==v|lvexILOxi^eJ8Mb&1ObF*GH#c|FrcLCkXQMku=y^ut+7w*)#?~ zb2er%x_Dkl;WVdZN~e*;!s({5k;}q-3{#L0V}PVcj{xmEK+ZQq3?hjn9i_$)?T94O zLlWT!aWrBnj*U82EKkXh0TCFY85u!9Nq{U%g)qb(FNjA|?bJS6N`U~Q7i}F+Q7B3< zO$U|{q$gicL!K{xDXKI4cQ%5DK%nNPuZV8wcTh&8$u~rB3yaCMOd&`^Q61Xbxp>OL zHzyU?KK?{0l5q$SaAT9RAeoJZ4yzpTxa4OJl%GR~HX(=<fiYgm=>g%t+T(@?OS7R;`J*DQ!X?8qQITe_e6s*atd}<71G`c~$ zFpI=K0Xg7lr7sQuY+{vt0czc|VyXp|bc<2C1=JEA+%JF>UC!Sx!1T$>&4u|6GX)&7 z4}@?JVt&n8qANx*H_B1=r#$OZNh(74+>k!9FSg0zv)0VYvw7tWf`71Q0s1 zrUU{(vLVo&nP;!v0@?^`7p&FV+6rm5di82>HJ390Vz%DpeL3e18#E-Ih!++X!XKee z`7zQErP@dIcM*9Iv@S_HxJ3sp@+jN>fT6@Vm6B(&kgo53Hv|AgUMIX!1fV^FsFV)2 z1IbYo$)w(120x*K|1ONywe$>7YK92mN+Xxf>3eCQB+NR}uq{Y3`)8b`M{0GMQ< zosXCSm5J*}4HQ5te&Gfp0lI*TnDosK@NW@$X<{<07_jDK099uJ@j!;NNBVXrw7UXI z%oche;86Fm8X@wN3vT*$pm~l+y#IvNq)wx2Lilzcwg}F_X;$VqItbA(tr&0;@$ym%!<5xwqG7S z^k+>#ml5;#>C-2r>xp=L?AWpKcpQmn?&g1P<^5+x9+;wwpkYkA7tZP`^11?1DIM$t z+|XlzZnG0A`0t9zUVs(CU)S0jP=ViFO--F5B zp|s5W^#KA$`+$L(e;~ln-VeEvrduj>D+jv((~SFvj@@w}UGpCXTH*wyWjdX(t zG9iB1{@WQcsVK;^J8?-lk|I>qm|5+vZ{-XEmXGf4N-_Xnh`ATLKP})5KNntLMA=_CCyQFE!EdsGITq&5&k25>cr9q0pfJ#@HxyJxM zR4xlrmr+V|91^GP8je1*<+8=mZ~FxUSa+0JwcT{_5KeeDc%oAYi47!|-UC=T^; ze7l2mawZ?sArfQY2a04tK}qv8;g0m6XcfG|ta4Ilwz6Hk@}Y6Oemzn$)N zxWxgT#wM`H)4{Ma|7xdOe@8K6!P34Vq9Akjvi_)Ra3$pCK;KbUXJ9>j=by^ z?;?`k+oy>LdbJlt!q`SUt>l48FDkB}7@`0xg+BjCpMIooW#K>xHpomxW`Wh%4y;Ag zD4Nk>+nXbOHUXM$Gg`6M5$=dd4_%((fB_{?XQj0WKT#BQ80)omU~E8cFT#xco$A{2 z5)mLb^#ye}=x_6nBfDTW)XQ!W0c}K5XJiXNe}J!ukWA{a-oe4HC^nre1B5dY2>gRj ztWF{h%h+9dY6#A#k6v7HsXv&$PRb)o{%qDYEN+nL}G zk_v)46_LxL(S}<<2=#)1QTC3Y>ycBs$;KrW3v1Cv5XuMyL6F!C0ic9oEV3s_7F)Xm z?13ZL#w)sB!Q+?QfDu7c4rY{c|8{)5&%H{y;n)i?l^>Zf$LCV-R7G zOe8|#9Luo`%j5*NA$~|b0Zs4GWO2*=T z*np00x(aqq2sMm?)J711lFPD?NF-HRMV3@DN}NTDb8U|#MOvECa9P$QSw=6>$YLxF z^9zG0uD3zzvS9}l`RGCg4v+(jBAFtvhofY??%8ME^dgG+w{HY5DPF^}+hURC#(LY< z{aytCP8 zEeSb5x?hO^wJVlU5Q&m5>5v{Qulf9yiKc+EI7GJrg~x*~?9vMGYZ717Cq=4-I$ppYRt1rKfbAvh`;l#It*$x<}* zk2fAO6VaHDHVUW0U<4w!;E`y#K6NChKv<=bL#wF`Fpx;J601=o(=_J*6gpl1fPk?G zy~Zb5kmnv*Qk90Lx{wbfG$7rWu7Y7DLtQ}hyX=G1WLamJDht%hnT_CUYHEmo7;sSo zF$&JM4J$?6fztcj`&^MUPcAj5D&n-qF)934vRFN^dFKe}F_tn@Vlk7vN((ZBlj1AYzi#DJX# z8xkdwEduop>@3wxX{N0``P3sNB|g(=1uf!^{OzJ|RLk;0>)Vilvg??z&7dYo^4bPC zjLbmt7$veZU?bp)ygLY4*MbR4?GU^N~lNfUKQ5dHj-vpI&h8TuX=A_eWy( z@u!~s$=3^}Oqpt?bX!n$(i67qB;htj63Ip1F3-uywb@rzJr0H^z2CyfN%g@dr*8$B zHbi@xpxpqce)_=p{kLxf-xyC0I(X6-UoTp>cJrSe{k>l_9o??qUHA95-+Jrqms{Iv zVWfcnAURoaY&7i;l5zc~Kl^$A!2?pQ&G}vpl(9sJ4Cae7F~{{SI1dEp(A3oQ{PWMR z+5=XuTnT=KJ%uz>G^}`6o^#5nci(w;t{>w-GF{aXWLP%ApcnE7c4;gUg|0Qpz$H(X z>@4&L3}kRSd?lJRfLK9CpbJb#NWsY$Z3XI+-p`?0V00;>M~NgNFzdl@K7S5vZ^(Ai zr5(Ni56<-Oqk{Wp1u!q*9<4Axa96yfQ+YCO9CO$Q*<7;)sUF zrL8wykCIGzpj?nFsDbTukO;U- z26Qttb1^%7CuxICBw6oXYCA=zAr$Yb$!2 zoronP(MU^M6rM&?eZ3xwfc(FG;E(VGqOm5^BS84IzzvBsB@=BZ0g5gXO+{Lerzz4} zmxS-2n~_*79&MvnVv<%%B+=R$iAS4VOOHn4_uO}XGM;E?XoSa_Ff8~mU=lMKk!a}^ zL=K!7D`{GX9We`1sH|7-u%_8Y9Kx_Inv7B{ttOga+AR@qTXP)j2k+FCqbUbYQIxnb znM@h8k2__;*s;;(Ix7{m?F1w+jPEs@HePqlPrQyv_KhsHB@9{66wwR$*whMciY4`w zVWyHgvgpvy5fqe)OLj^y&1fn?pUxsTpWCBEx6aT_)DHnTvLwvtEm2c)EyZKL_4eN$ zd-^Y9#!b{CF~v1#`8cs98H0i=bIWhvfUf6&5rj){lxW1`iFgcFb2QQl%_~Z@_=TtL z7Lb|APRq7zJ*9v2;fLF{Z>!i@QL&?IeYoa zm1|e6c=p97VDiT!t|sT-bJqjQmo8hhbm^=aM`^y`FK)T%?YEv^vi!sG2ahvNsAR9l z5_L<5b%^1jy5Zu2-k*K`(Z`>DxMKOLrAro{HRmX(=yNW)_Pq~3U%GVJ+VAX!*X^pVT(!E$7YJ#-Yp=Uz&7v=W+Q*-LhJol9`nvVY6REh*+vBq@7k%~BM~jv( zSiI!hlTJF>)xp-5Pg5n>yMmNt1G}`e^ro9`y79&vFS_WW8*aFvZ{NP7M~^<|oO7Og z?)hu4{n4|}K7ZP2r$ZPWee}_%pMD06$MerWAIZ)->#WmGI}H-ysH2Yh-S2)sXU?4a zes%9nH{Zy7E)N1%Uw!387hiPFIp?w-p^e~gx#gC#&p!KSKf7iA{0ksY17d3Bk_8(! zZZ>S~jd#9Ka!O>MG-Bki_x|y*B!(nUNOCnvg0~2(1l)K)QcqFEb!%2!`?EX$_~Z-Y zCmv!%n!u@0-KHz>84d6cl0nh|coqPU>BS5#farznC?@#M`v_rfo~ z{BY@_!*{D{8>h@Xe)-}>i!We*MLwC1Xbpqg@DH*Y69*dUf9(If9}-`RpsdxZ&!an%=uO_@_TT6UfO+(%d6_B2ug!f&6Rc z^!)&Vx|?PtEw8Psw&8P9KBHyjqHnId_m4mS?XzP?9Hh{GYqpWpX*!;)b+m^j<>3r= zIY_QCRSF>IQ54CCWSdBmV@uW^tqXE#lpa7cOI0lj+m_~-lY z9x~UK61ukh}~XUP1}3e&hLKrn^9Ay_Zu;4dv(q6C(KDDBTcncFhae-Tnx~g zZ@h7E-#$ON@s>#w$Acblvu(pN4J%>j7{uQ1ZzK3AlP31e3&S+_27R_Gm-QN;h$`Zl z`%IHz8N)2y*9QV%4@Fkrd+(n=zxC!nJofacA;T;kN=T+hDC{mzmS%DG1p*%48}OBM z=?jPE$}G5VwiMPv>zA!Maqda|%7@g})lHf>?%_v%w|LdoqYpcD^29^NPCDeSUtBk6 z%!nydM_Xq6;tMaFH1*IgK403m$FO^Ex%tz7zVP_tj~15n{N|elM<05~s+Fr>dg--s z{=bblw)-f}VI(p)S3Fpi`p?{w~$IO})kH!_v zt6QFMFh`bzZ16E45_s`9>l%KMauyrDk5`VAX7ZgWLtC?^;4?U0G% zD>toy&8KJ~nmNrtmcrDZ=+z%2aLk|-q2*xWiH@0@8&p*7A0K^j-~GS){nIaw7&1t( zY}Kof6-y6tVCbPi%me6hbO6XZm<0e<1&e{X4S5Hbfr~{XV9!CBcBX`#}Nuu1^)UsKybvTSitSr7V3Dp-(ajuo%R^rULHY~D~8u||%V;PVJA ztuZ&4pAr=e+S`8q)0GRptX#Kda`UMzgp6__W;{5tE+c; z6_n`M^rX}T-sm_OWqc#}Km6ejKmF-XA(%p;An3c_jo?o_@x+3Hg1qiFf-mXWv*Ska zxWzYuKjVxuP|Ky4UIzPG)4cw0jshMM48bu(7@vmeaa9Q}D9r|?phUn~U_XybpTc0n zX3tLNk=X{S`9%fiojP~nhyU1Gxr?4wDw3|JNP>2dZk&(O7kh{HG*kmhpkWJR zAeqote6y^jp}A*y--gEKmZm1JWV%kIN6+4Ya7ieX=k<6tty;I>n-!xc9h@5qMIs5+ z7Z^QyM30i5v;~lzYL3=cRqfEk02Jj#7hk+^$%pIKtsXY!Filj$`5|vm9oV+HZm6uZS8+j+ zBKzp!galf_u&cL>ri*{{%U|B}P_8DMjWue(8wh$Q9Xxcy`bCfb>5+c@`d4hP+gcSV z>@{@Us4?qSEVbou@u0C2$4{(SyOMlk+C+#3Ifpa>*{CGn>jw$!F(S^I4K+e8q8LqC zDaYxVA2{>G*(=_9Y2&K3g2ze46G+7#N8#8%|8!(I4z!^F`KUBCn7+sO3g@l|-D$5Q z&At$ybj1;11jQMs<1T1!3G`7v&>TZuf-Py=yQ^z{dgD(ottZc! zGp@Y!uu=VQz4?Z7=bu;Dqp07Ak)y|s+P9Nudpk}Sv%SMT0#(S}{5@aW+)iX^5GO!mATOSee$tKQi;^fzqm#92NYi*nTW&8OOrAKJU^kHiNqxjbOi2? z5H|GC0o@1;ZOJ1Ga2oZb46oaE+TsaVsggJ74~DFy?$e}z;`walmf}8TW5x_W;^4`@ zy8r&wtCkHMRQBfEf1P&3%uhc1niQo9W#-j&`^cF`3>)6>OJ@sweD zgJL3?FjEGq`|G=(4eD1uY4G5AM;`g!`ybhg5-}}D_V(@7-kGcN(AHVg^Tj!p4UNjOt=%9)3y!-B3@BL%O5r@~-HQx8fX9kp)j~zZ@ z?x}OXTKtU`3}C?6N!_&|uT9%y*tTINQn~*8+UCe>|M>8(yYEgVW2PQU#AAPX=IQd@ zy@n6yH}B+=KKSQ9TOyI(<^78Da%#74S+k{L&_PqfIiaoVm!T7A!X#~>gR0asL~9Dt zZ#(dqeH{Nm=u#672n}!?=`=_qL-r&*9xdp9;x`Yp?AZ0w+in-Ueo3Ldhv<1DIhDkn ztbtvDHynU4XRwZp2a!kX!#uR$N3gkcL(+_x>v({0$eAe-E(x4zY4hgITQ+Ukux|PkAp`s0c>Ot_e)!hz#zrZSi^f6DQQP5g`FtRz>=C^XKZ;h@ z*w~}AWZtPKA9eH*Gmn_oqqv~Hsj;@H85gI`JF~p^XiYAp4Qgr9T!(S2h{5euk-9d| zAycQ?$>z#!YX!v_}!$%DD`TQj%rTzL1K#%wC-M6Ty82%_hVo|AUqJkCfNUP{Zg?Am z;>lCSjTqQt$CeETj~;aT$)_GR{mAiCW)>8e<_4w4>dN`^FS_H_yLVPJ`L$e{?@?r6 z*A{82ZQ42JppoC+k; z8lQRcQP`ESSme+{rkylMDc!H3N|^ZYp{o;YE`=={P!Lu1XvgD1eZsjuF?bk+K!Pd>e+ zt);29%1lLV`V0f;23rB!MyGxK)mN)mt*of5DDKe{ z4#>2r(=bN1@2JQL`*&7vuc@!``@;*rUOr;PxKmC$Zsv^XmKD9_=eMa!o)%Dh_AFSs zY^msx!r_qSRW@$eQjk~J7HQqKZClE)mM>d6d*ZI{rZ3Y>8BOjDjJ)bh71|@#W&w5L2s`fCF_eZVSlJ8a0nv(G*2nB!*; zA39=JZ4K0X^X}T39aTGOt419(=EM_^8`Q7glCQtsvZZoMRio+)UwrNvg@r{gzxHZV zZS_qz{bcgt(@T2xTDg8hQ!Fk>>ZoDEt!U%gW#0;ttcCL`s%m?e56KAy&OYy!e7crygpVmaM>X zLQRam9yggtUU})|7&f$)RrV$g7i!svwn(aKv*#yw}=;+ZShPuh9R~BT)+O&Q}Rbz}(7!m}Q*MD;( zctem9j>l?_3JJrXt7>7dk+NtxBqC`ucggSfLr@|mM+~wR8WEQ$8OjswMGR*$0FdNi zB)}~Y1i4U7HohZdBXI_x7Wm6qBmt-$X$lGo(vh%AAPm?S2yDX>*21oxq$R7WmNaZr zPeH&sqE`{6X#H+@AU%8ci^da1Dh5Zxu%%%`2Hkk=Wluc$81y0B$6o=<~&B4U(r*kyxjbh0aiCm9bS$nkW-MbRdBGAihCT>l>~n%-$ffD zG0C=k;eZR54Guf1OT_&?Z`9N!MbV9v9#7`_bHH~7x$+6YwSY-55Q15h&`nu%G5!q0 zwo)bvgI?FvfT`Oul?H;rw*nKSH%96RATKRfrZL++V8}}jxF>!2Zv`+kO&E79o5TlV z8K(m+dfz~UlV}D68E14#23O$4SOOQ|ip-QwJCHL=Z}POZw&v#MLN|kjz+GrBo0GAG zoSYn7Ae0q(VD9t;N(ab_Jk&%g!}Y)t0sm!ZkvNbPb)6nPdc@;Na6ELrVHhNI9h-gN z0u?(DVIBcUSQr=v2_pq%fG=cO0Y)%v0pL2|N2~21>{SH=fcAeEXxB5_q=oUc)Sk9` zHX$i6Sbzq^Hp0nd0;AC|ZNt#vPO_qy<^U1-f_e-gt}K{8MGiRd5JWg?iXtnD1`z`x zuE=sCmQ=hd^2m_#jt9QC>6q}RWJxgy2eO~}$ONh=pKT-V00$$P0v!ZJHqe2F1?vu4 zCM9^1rhU+)L(VvDt`Dqc*?d2C2EL%+t?gam(jo@80#I{t+kzIkl(#ZOfnxH7) z2T%n~1M8qgxI}uyfMOsZ(U3_iBp_@>>8_ z2q|@9FV-d~2v&xJevt&pu;`lvh{UyS=$cnW4_lU@s$TGEQDNbQ^UrB)X{o8LE$dm< zvqz72KK$pRWlMc95ot}YYs-Rdm=e95rzGP^C?D8K@k9cm2RNW|AcI>VCwv#E&y=A< zy=h(zObBLx=g!tVkSPV00B?YQVzD?{;W(Dx=T9Z$uoXZy89uozg87kFmT(sZI#hK>l1Wkhn(K>SmsaP%Apx(b)LW#|TtDR|}t zvz`*(alhO-r~SMj#5oh}2r%USeRDm2-Vp00C+HJ2+1ec^fut zKomF@?S++qKw9J>G-AXE@Hg}-L>~2SXF%axa=~Xi- zl?s87BaB^yZ6Y;_1ic|R&>$d|X$*3*(rE6F-V&lVV0<|i@S=tQOE}-C+IA$Mjk-xK zAtUcP!jo-pN7WIhdTfbmqgRT^P^WkZ$7Cu69H=<#9DXkE1n;J>pGJj2&vi`qS) zHa)BXty~L6JI@5J;6f)mT6kaZ%Z~ATgZ-xA2|5}qj?&Qgd9u3 zdX>}`Ej?oysMF|KjAJW~lo_Hcu0*STz2{mtTIp;2YTK^vDdB8jGp%>jbQDS;ha6VVR% z5J*xjxJL*hwS?lR2T>U@A##Z{)_@f;81qai5>?Q9jf^mCR|T7A12OPJFXE$a#(Tu_ z#PG!Vv_CBCWk5%)ewZQ;fB@P+FIka{p#GJSI4$U?xjaOW&C?F(8rs(#A}_mN9_b;+ zff|O0Baz2axe$36koy&R2SNg3P~c3q05J)1B$++v19Ux`rlc+sVZwu#SQx_|$!MY# zgOI%~M6U%QlKPq#iejn%)2Zl0Ou(_>XV5SkL;ZyqDldIT9@GMx4*ZHrs5K~z)q-5W zjCt&A#30fHD*`QQf@9H_DJikwLXJa>+b|iaCQ3pjgAZANH=C*_R$>f`Sj6yQ37qIe z4Z%&IN9f8le!eH5Wt?e|VPQmO1|kize+N*(MEU)|K9Y5~#e$$DDH>ejpAniu>=y7Q zhCmaj%UEwf1VT$;T!W)Lx=q_0Kmy{1%)T50Jp}Q>Ituw%sZKPPj%kak<$-C5cnG7;!x>7FUbMDl*CZx{dke=4dY1r78g29- zfbvYTcS4`eF%EdS*~C)cBIkLKFHFc?g0&B-;?57gf3dNh-TB6`2&yBV$NcE zk8A`0S&ICN06G!>VA!PXeCQnF9FQF)6QZo|sAXwjY(;t&Tqi-b0s~U25KoXnNQlm( zoQyXaL*htZBWN-HBR9c=KBh%v;5kwZ$U{pSS%nNK4%9TFnE;BDECP4pfZYIvNi{JQ zN+X70m_?Hq5iA1N1YAPu6hn*T9m=O`s)i>chE4NPrnSVvNXTBZ1R?04n`x|27`3~* z1KtQ38_Z_FK|>j((Zj0DW%{7URIJJE!12ifMm?B-TWtWu*aD0W2Rsc8>IgDz4nysx z?U;C$|2Z5WBkdzu`z`8ja1|Yp$*_R%i!KAPiMHr<+PMS{Ix=wHLwf32(AN@Wmp&cu z3HvmkKai7~hcjGYMdGp@T!5BSZ=x7_pNz;y9LSi2B0+W3EYUS}9@R<)F|05eFvPU< z01*Vl0Zc)_t}SAWgmS?Gus0+o!N3^!rGg+hF-VY++CpiFu?TxQh~-cy;~nZ{8ZPPb zS)o)TaueSXFEO_UVV0mBKo}0`>mMYbWu&5LJP|Ro?PLx8ZopfLvy}nxKfeV%0Bein z9WMB&;5YsiARI}(pH(^z76A599RJVR66o8pM|c|*kv+i z&;)k$hCB*I3`%Eg3T*ko(y#-71g!IT7>P*8@&lKwY*d$_70-}8+JQQTA9RPBmZ=~M zh(apRtplWQWdLFhwmmoR(@`jfyg&Sl^f-*^nDmVwDxllwZF#|CK+K?b(PpA7(UbQV zLLChHE7JUdGWa)Nn?O5nXrXP`_TX5tXHNL4^D z!KF1I^lZUPI)lv0cJj6Z)r|vq0)0|7s6udQvk5k`8Jn|?hVo{G00)!A>dlcx3w`ZR&n@4+Dec`P7xT zFR78x#@!yNDb$is0e<| z4;IOR7tLZQ6^kR_7r24+0H~`hWbSu_bQwrX6Amm3tRXbPV%r;$fS>^bM~2<|VL-aw z0FJOr7AXj6N)EIjwSa6Q^exdh>mu;?EE$oB%w+2k%YzRjbTY|1+p-PQGU;Qr=o*j; zB?A~Iq)kQIIVn5JcD`VGDl*;HUm(-I_8@KP>LNZubE09}MqNF)*Z zG^NdOlW9X-5@Jx&BzajwPz98(MaWX4sVoHW18$}W#@I+kw}?0xEZlj(m?gsn{fLTq zCSLph7T{E)`F=)Wg?0l(1Sl`2XD1oIv!g48y8sc5nQ`p7A|o6-b71}~PV|BYmlu?} zoz?yoXfK{50+oe-I&&5#o~01^0$jgA(jv@Lva~EK&(FApCCWxpEDM7qs0zj%xfo0_ z5{rPX=otd>7H)$B*r-5yBp}qlCEVGjLg)tLECdKhUW2WXmADH3 zXk)s)EFS-iSge1*GwGZZMe)>Nif7j>iq0&O14^bUS&-3){jzx^LWDFoBRW|f5S=KH z?jj4X3bbQ&FiqJRhyy7d%a0R7kC=;ilKdN1D`b_(CeM%>g)sM=0>p-3Z-zJ_2~OY? zV8bUpY$-9J&t)zZN0_ii{4WA-rIH!zp|$$dCh8;7I_!R?Y5M~%#L}JF8E60?n%z90 zvi1bzu0EPmniC98~Pl57|z%R?6 z1}cdn8E5iqDz+WyekM0K7h;+p%=KkSsX!<>R71%~B9T=~_y7-9IOLPdp3@;YD~wkG z*+`O$TFsA+6(cw9!I-hhO+-PC;@Y&?GVvKzNIpPkRR5#Eux zD-b{MTlgV15$UUye0GG9G@Y{p5Y4D7=}6z^F&&b23}#T`g%O%`un3|l2_E`E3@Hkd zf`lLXimq8+oHK=_i^-J5`i1o*Lb#+7Nf~2o6E83$02#?(PZvV}$b@T(;}y^W<%bj9 zV#XeN3Kke$Zi4QB-4 zgXO>gxfrDh__Ghln^Yrhc-0M6(rq(nTg@_!6s(l!-MMf1VY#3^jG2eA$@9BKof zi;#N2NRq+v@L24WZOWQT%d98|d)$zftKSu<)s9573gC6NqdM^`k9PD_irPy&PH9=c zqyb6>)PP+ru`L|30Mao^0+=&RCZq|6i22BjgXw@iH$}7vVhGIzG0B0VIdZ1+QH=!d zCfkGl2p|VUij--K1wlat#2^q45SWsvs46o=Q9Bt6%(Eqf#XvO}7`8`3PZ|l7Z&@kR zNri&6XCFuPUjwijs!Jx5IH#lb03rbcYJfnC)YEP`bIiHV!EYfY4}e-8mEP4A;Z0Iw z$kugO2O&NBUX&>pQI`Q#(*+pw(2<4i5ag>F(084iKsiBuaG+tEaEv9|%hrpOmXWY@ zT6?65l0sXe66QdFbQNQdbOC9gOLPQn^vMnuKo$~t9#=tgBuQbtN{wJG?+A#=Mv^)t zyTwJz700n`hh90*4Fd$B`y`M%btUEb!2(dD0YF0=h?=zNsYu_hrFCLy;dLOu%iusn zV358ASIHL$fGkMVqo`Onbf3@HtxZVCK;DopBJydP7LUav(P$!(h~tPxqp@ft5{<{A zDLoYohA?{A-oqTIQ`5n{0ll-?36KGd6*u%$G^RrK(}GJ{bIHUmSe@qmn7yLfmM0o% z3HW7El&Kt9YHeu=d;M@>;0FS0Y73qA4@j0J66gT2-qsfJd1--bB+^DTqC8?IdeK7H zA?_uJ`ZTH9$t&#Kxy$GC`u#qH(H0&F$Pm^6c@S0<;J0UA5{0_}D*d-_1n*J&iIg2} zio~1SWW}R;CDS%FueZ6W737BuVyJ!x;L?h}E|Psz0?gUXPJjfAj6`IdBMcy4pg{p( zDue-nKmdu6fa3uZ3S={qNzFs+4oK|Ln5Xhhiv~G+>5?dnGtbEK9T-qr%7=2OC^q4Z z7NHPa@+1U=I=I;~Oe3B$V)#$Slc`uTk??y1@mM0@4_dZ`p`<`MGe9PARp-t@JtVPl zAe!tl)WNlwM{H?n4uwLtZKP5LdXN`_Vn~2x1860RAsYafB=Ydn*48AEj1x@5rtcHb zJUxw9nuVm+bfpOyn0xR{M_^N#>F~cBNO^QAn~i4>oSsqJf^2!TmgZ)k=8BFD}SJb1xOzIAluO|NElMlj@raB(Ou{|<S{4}t4BIY1 zdq&Yh|70>9(G3utvj@2R-@Xxi%&>-znE3jeudmv&_O3gA9tyg7If43`#wVY8@`)#& z_%5rU(*O>GeRTQdm-p`78%huYtCO(k3?LNFV8Pbb*5Cg2x1W9X*~cG${80x$Ok-ms zPe$oI9!Qy~haGy|HP^ziB~z7zAk1Uhs+M-Va-C!%$=R9p#{NmLCm3`SW~BjOq&u>?IibsbYrg58^%n{`t4 zeih=*BOql%V?#3{^`vc^v3RVdr7c^*9diHx|MW>jK~x%tp0#uG@)S|=2?|^sM-{x` zaIW7E_ouP3xg`>)u9}+K`ub=# zr!g5P3ytx~6mfnYGBn_Se*q6sA> zC8Z5eR8)i%NC#u|C)kkB{ zh=!J{gSgbzCsJv25`xb}JX%|euBfT4tD_|b=}T3K`eGP^$0FjI*nNr~&T3?q|CS+12cw0JzpL}q^)l_lah!iaE8)aDix6lGFc zb#+};^}d|0qpW4=v1lE{7W9KEC^Za1M6a=Wh-+Om8iSz^1VT66bjzkqn?*rv-3pRv zIIacR0|)BrW4cZ+ey|OM5H%FAvE!p{)NlWDU=2R%hJSB=&_*`E^w#U_fyDYpS1n@wqwk=YId=58wYdd+vfc>$hwHQdbap(G|Ef9861TGw`_6UzqaT z>>p>|cE>$xp!n(MUs}Fu^{gLfb?DYPno9iTi9dZb?UQ9ISFBjI@|k;Y5aQeS3KgA3 zp1pkKiZv@&z5d43R5q8+rd>h4=fQ_pkxRL3{P?pA3)}zU#@j!6`}KwMXASS)7b282 zs;!{K!w*h~0iU=x;VWs|{=2y!d^v6Ex;1N8E}nPZ_sf-|sF zxu)r5-G=Pn7fozjRq3<&ME0_)@BDGmk`>FBd_HX|m()w7;1^%MXe*guX^*eJTDoGv zf>kS)F8S%Z^Cymu*y*5|4%$|Lm;F-M#|6Ny-)bRv(b&Fe`<$g4=j}~YeeuQI7Nva( z!mTEpbn48RKX8KLaDIf?y7b}&Kh2x}-REm+^)|y#UbbTC zij_cw?%H=CoEWPXYJ1@NAhPS4^vTV_`V+QxS;M(h#FJHED z<;w3rd6`RWy8FqO$Br8Zk^J<{Dccj-6VAJgK0ma4*&naH%t_Y;6dwm=tm%8=nOzxZ zv>G?LCmGMBgkUXa-uCd_kmOa%mfw5bBs;mUW8a=Hy*Wj6l7^jXbIgFd?s~i|TRw5b zgg4%PVabZcYgTW(@`{^Znf%tumCL^V=9^Ga*yXb^ZSCZDA6>nA?TTgV&KTLxN!FHE z9{a_tl{029TE2Y6-08E&3>*=!sv9$A+)oP^A6KTWrT zoT&ytEKRcUGaJSgiB6LZHkLj*U>5o8LYzoKU9qua$-HkLe)p$)p8w$3Hlb$O?S9^b zKpMQ?_Q?YKSQmS)DK*_-MlmQnl^2ObVCFrQhdl+3@WtRMz-z!b@iP5~04ySYfC(F& zA5f@J#`HiDzEg|lWzCBVFT3QTf4%t16_;N%clMl$X5~*k`H#^f$CMV8l@=E+U%GI_ z@h2WTxmRYQjk zsjJ>cpBAFEYXnge9)0NHVSRf1?TNpQ8aW07m%dQ~@@yxU%Q&1*{p}WlA3S(aaX7@g zb};DA>Dg9oT1hf3^g!z7V8cj~LH)^MFS?oFB@-N!j|2efko(}h_aD0F{#QPo)~9E0 zQ`aS4L>zxG%AbQK=n2SDvmc6BD0rY-X%*#V^)>r0y5#ciUHa>$J7fUgKp?-^h?ie^ zarv5^XP!7=+_(wH^zZZc$L?&_rp17+Egj3a=#q;^j681jnhl*h9{be8kNq_L^Vi;- z+^%!`&0DsfdD3ZX*R7lS=G4yJ2X$-HscLuCgcDEx`xB3@*^?gAzyH-&UvfMOn?>ox_KXAK3HQiI-d+ z4u>pPEGjN{8fLExY)y)?k}|lBqfR*fhFkAE;p7wFf9Lg%U3)HDy>8U7F|tqS(7Nr9 zvws{o__z^+j=%NK&-CikPtRr)-uK9#9`4s?z_TyCeB#72D#}_;|N86heY;h(Dt+Uv zw?+&ZRum3+(~-%6;<&y0cHejZ-H-m|Pc2)9jvLwU`|oF|GTB$2-4Q&D$47x*-DUrK zfKbt$JoY@5RH^HfmX=8Z|HX$NKK1v%Kl%2|UVZvHx&gN5hxbw7kSpK^l#(Yw9w36j zxnr36ArgtRLpBc%x!>vG@)Uj(Kq8rO#}$D#myE4n`eV(WeWAkgaACPhu=RnI^7_hUy52^AJ?+q7=gg8A`8Qt}rXrsHZB4Mj(Q9|~dw$0$(c84JGu@RN^K zRf(tK2w{qgOKc;zV#V^hSiG#fWt&!IFTL>M_C1MnFTSyNpI$q6?tSi`FYeyFVC|;W z;g(%x`ZBJ}Dfp4YK10sz-MwK&N;fEew9P%$i6|ctOWP!K`t85JZqM39wVU@0>OI0l zm=;!=hl63s&Fb~rcW?gY>+cGRTT~^@Z98j3M`p8Fd0JLTuMN=Aiw_quJWS^|4p-eWrZo`^b z&f1eS7cTtX%`!aktD-sG9y@C6##L*pch_Bi^-Y3AD=n#r!C($J!4iZ>LGa-}-E+@<56oM*JQJ&z z*jhM_OR*X>3w0db(`v$TZ}Z@g(*w|Wq4Bb}KN2*}_-i%@0p~LXnr9CAgEN{lbXDX+ zhCb@$o7}TS( zf0xpW&O7g>TW%?@Y~Hc&(7t_p*Q{KX%~^(_8#Es$=UQ4=)}iC#^;;svg?05cS|Vc^dO(G1tpRjm(^82w`=znsH!G@I(04z+BRGi!a!og_Sn%H zBu6r}9P${(+f(0+6c%1_{q<@%qWS~54BR5PASj*B<^gb`byVXKFdYmBN}>YW<>BXP zhMw^&lI?Q!>7)s7$`8{71o4TC^G3yW8--ZSJ`Hu7&e|?ANpF=8a2l z{{4;f=6#P?SyFgj611Fq%BiOxGx(S@&b^{-vlhFSFD)!8Ho?PyB#DAZ(_e(i|Nf|R z_bxqpv>(v7``2H81N$MX5-8#5T}>W)w17iQ#qq9b1|&Y0OQBIpFf96IR}eXYp6`Q; z7%HwsH}YIawhXT;>=U7;a-5w(Xd)=VL^M$-hd4nlh!nSM*2Z;g)D;Z-vswxfLt$7Y zg(k~c)9KZt? zAd(}Q|3$eW8T0nk1MJ&Ud{I$hC>WA>y3Y)uB+ZFDzr+tvm!mI=nKgCMTux(-1}$|d zuvfKV$aD>Zht`kEWRfs}rs;DXE|X4_l$MSkH@;V|UL8AjYF1IfL#9MnAk#H$Gpli& zZHe}(z5BZLYS*@+MOk6-2@_82)uXGEiTNdQnP3}muhN-JZSB6win5VIM|SJn(Xb3oLBE70*)Q|ZXc^tMd=eBsC#yjz7_ogK zv~A8bc*l}COaJW_f?v6QL#VP_kG><@x9N7`xM9)w#%wlDqr#=BJy66L?vO4KF{!{K zpeRxU&;`DLBtX%yNl2bNH3B*VM)=uhpNgwo`%ie z9)Xf=*r3ALRTqJOJ|}ICD)RoIXo?1>3V~qAGT~OAdSwXN}`wqcKU{`JR$!AXN)2(~g%61)^w^b!IWot1l?Gxx*-hxef z(eat2Nh}uu0sJ~a^7#wZWGd@dBHOlZDJm#x-nvcquH8=>H?l*ga?RFYIz&z}G!u@p z67Y-Af2yArf0U$HG7%~*op{dqel9SuQ@>MBKW+KKIgZNl0m;zmn_S>>z>jl&|Ni}c z#Xo++gp-axp|Cij_(hzOL9k6p;zE&tq6i(@b=bUR+ut6%qhp7z@ar%Th>ib`sXO$p zw}C8y(?r*hp#@x}*5qH9`np!L?dD6*m4gK)rg~c|7bMWR(^AIcOA79C!_f zMbaR2}!5{ed$M3)U#rvn9b($KGtLkb%Jw=fmzrVVk6NIYWJN|I{L+^h2 z(MO+sHe%%H*>mP;TJFP-Ki<1<&xh~4_2CEaU3cxINTE_wAJa694z4g5c;@M6U<&zeNd%FcD*r{L5 zc;S<;$DefC)G4pEYt`)A&p-M7?{EJ4!;cNi=NCjRnN-7pl&(k9nP8-F=gwWzzMi&Y z+YY}h#bdRS#Lt^IUsgkJz5T&IpZMF%Y2QN!`TXk6x;QTfF(#7HS~!-(HH3Dlv<@SH zvBvS)bWTra{XSYkb;*(?b#-;{zutJ`jn`j)eemEx$PTCAoO8~3{`nWc($`*ll`t?U zDJj0qpFe;8`yagf?DPMG z7mrdIAqF7&F z9}I=y4h%fznAhK&+O}Q0v(G)}&O7gH+qUh4_djsK`R8MtE0WSw+?dF^(5f%L^smb= znwsRB_0%)}YSp|=x(+@>%94d2X3q-xoDA5EY6*yE3vmzTkz69gFwUqQ2sY$}z`SkQTJJnd9G z>u`z>fjppLxErMYA90Er7e9$!2nyTu2l}OOI17M9`*| zLZqAJf`RX6&u`VT?X(#my)@-tPuzX)_MN-nqwL4>DWo9o$}pRUwri52bWE{{$DS>4zn!@JPq?- z9O3^(Ah97&3tEhTo}bzj2f-bU1jI)lzW2Htt|%!hcLcw|D`YmA(6C~>LpJfqjUky~ z009Gd{+$P0vz9Gkmok}5&mKKf=@i@qm}QIIqG>A73@E0-4=%vFx%19DCQLX1BT9u~ zV=sx)C5*02_QVrTD4@tq$=JRO-2pO1Efow09bWX=wCcA7XI^pxa)=kDtRD5l|9Ab7 zKv61IDyxRl6etfD>6V_hXwoOx6Dp7u*=NKJ#Zd(@EC+(Rl}qK4 z$*hT?5iU?8;ef88`*l;-5T|HaQ^&=9KT@jjvYWG`sbtHxZCbWyVQ3i%hS+hc_UulYwDch!2pC3;1}yuIEgXLMpg&h!YV&!duMcvN#S0f+Pc+ub`+9 z#wn+3jZ*+XBCWaS((=)~z&nQPR}|FZpgx2utO=;oTP_JL@}g#x1|aBe8oUF%tL5e8 z4U{8NqX}#|s6f*Qs&zRfwgXPWWYkSA(uab9o`^@Kd{X>-dK| zZ<+k+kg!x+^Eymi^+(FN=z*s6kucvYD)CIF6Q+BMPU=bZZ3GvAVjbqIpgV zWwJ@177Hr^eDZ?A!kp>IO2{WF=s6fJA(t!(h8-cSnYNLMM~VYFX%?TBNUM_UgZr7t zxLHkBL|ZTv-X9Vp2t2b^#-$)cg}g6Z@-*)hA%f*Neo0k%3F?UEHb|mw zST3jeTubr=9gXu#0mQ_v;25?c`vd7zwxGBWLSY&yktV5O?9f{TunTbbG5BcJA>tfH zBH~`zB0<9lg~M#qgKJevM(ab7u&zPsY34JA8jxi53@18>!f5~^fo^KigF_4%a6W5j z^t}X5cHu7jrHp~d#qmQeTlDERTaf6+yUArubJ*z7*WYk0h9r3Jm*jsv|Lk}3<}%hb zh5yn*@QUi!&75N!0T>X@)^wYE1fDdnX;Q=uOBVD8a^O7cUY_G6SwgF6DnE1(J@{(W zc$9@b%cX` z28;v8^x5byzbMiaM|#CjXZK0rGKrk)P%utkJIz7sD2gO2fppqz*1Yt}YcE;0Y}xKz z``fqc(5F|A@4uTdd-nGtQ~)nIY;9qm4%Nr;K{cLA6{sQf8Dz;H3TpJSKj)ZfTJ+s@ zAl?!p7@Mu}gwYA@2o+B=^*DIQG=)^)bO^8INVZQ_{F;#!1qAjI)PPTbm*$8rOsZhh z3up8mAR34v1hMz4Dh*3ErI4zy4EWI^!e!V%XkcoZKnpd}x{e%%hE5MpEa);9jyW7l zu#=WGBOBx6B-_%#FMm*mS}+U&eDZM^Os*&?OfHV1^{5yB+bWN_OZ4k)U^03TKRPUp z&w+CaD{sNYgKrHJ4$?_+d}eC!X|XrK^3n0=>0?8Q{%hOpD~oG@Neov)O8Dq4 z8;92Uq57yAoKtaD2)w2j+i(UMWq1jQY?NaNkW8=pco*meUPdaN*z|r5lp!`$hb#2l z3D+q%4#_TQh5_%4JQiY+;}6hM@C?&I@ZVVo-X#_zmGH6UWI0LF++{78uiSvY= zI86yuko+rFkZ~SO#4jp#`}gZ}-3`|x9#19`pMLuBjBmbHRap=u67)RepU9990XcaO z*ddM4jR2jvf$Jaw;TP(`0X>muqC9YhhO+8`=Kza<4TH#gY5=}?wICafZfX<($uskS z8?^~Opwc(GUvu~g%#$kdw*NafK9 zTL?atOj2OW-i|~fYa$RIs`7pfpqOX#^SyTDb|V+^kjz3aHUZQOpeZ^Kw1Z2cABia1 z(*>{H1nfM2imQ!8lg#oY&1ex(#T*XCRd&uW5qL$3FBW4X5KwlO${6kSgqP?s8svFT z`q>H0FpfQPy@3b7FL9R@pxz^4BvGV6#oFOj@S6aeAV@BQM>Y|jZzBU7voj(Re+Moe z{Btl6cUapSx{Xed0uD6bI}{r9!V%!eml%HF@_s$=G9g7Efyy7VL5>3m5@X!*CvcbBWvi z^4k%s`kw+2BHqTDRo$56^8)`&L15HN515t zZuo6DI~0(}x`JdKcw|Y6gd^c_pscjm@Apd*%@{?flg~;hs4w!TjcJXC2NH<`)WhC? zL#F&iy3#a}fKpj8?=q^SqS&Z)go15Utf6G1)Uj+Vi5SU>dMrhkvt#NJcFo%k0 zB!O%p(mXQ!GU;msR{j4PfP(RpIK-e)kUac{Y)63uWH;H^h5YZY0Z+#@0!BpRprH5* zoIo(*@%!kM{Kvq0`G13tHTFP@J@8HrO8TDwvwYMwMyg*Aoi7qziBms^%;zGI=KtOx^(~<}5+yn305x}dg5zxbqPo^qdn?4)v*gg(% zz2UX-0CfcHFi+h9@J3Yr@RtBN4#{+Q%6|v)^%6r66hWOq#3{t4#hDP(vhQ;;9(Zsl z-;M;H2?uz%u{AA46&;pH;H7#8je9_2fuCSgh3v7$PeHCm$m&mXdDsPdx0&&W710?&~s0){oetGH`DPy1HV`uG8~l0 zJ2|YFmkn(@xZ4^}nx=Zk4da>>IZzOLh;`mX94OVa{6T;V2Ve6N8aEH~do^mQIR^oqNel>^$YcHm@=-m_laq-YFM>L&=`%V8t5S)z}+ECdFLr zZuEhIc|eIgcd&1)kSNX{6JgYQkkW>dL?HDjtC$^APSP=6I@?(BJQaCbSab6@NcqvN z5cYOAt^H*)6)F4ZQ}(3JC;bBw z`!inmP1co_3Y^Fs`|sd+#~CXFf5MQGSO#d6R_i8x@7Pq9%kZ&VYog|$Hv(g^{*-a{LR zV-iL}#cCx+FkMI7$0;%sz0@-_fdGsmL+>?$^%~Jgr|ie$0AXX?q!!T+PDlxo)}z8C z6@-x?r5fq4F^T;XfS**9?uFBbET|OSL!rwaR-%?6Ct!GKhMISJ0C3%*C!ee!`h>a( zURu7A{{Zq8;m127!U=$!UU;A;=d%zo3{VRl5ow2RULsvS+J+t4jE$5&IWOdz;?UbS z&DmsQ$f;BEK#AGhF=agA|91CK6XPdJ&t5;+h}Z*7e{iJsImG&s-WYp8_^vkUma z0Ub9~3W9-<>2!!Q`?M5_u)B_4PGrLgHV+{HYywNXx;#Aj`n&^Hj&7fL7l>O_DI+tl z$B^t1Le=0hUG+K=M}&v0IJAs+A07Vywp0QsdM^=~z&HZpkXH_@qp6zIj|Tx=BVecx zY_!84tTOR{c`($|R4*e5$AlPp*v(}|EyLAxJ364RuoohJKlBHhR9af08@dvpRgpb* zHuMax(*e^bL<6cuTtLm`e70qZBE9xaVN-+RvTaL{XvBhYkv>lb#<2(G^h2ofn2F=0 zj@XbVrqPpaNr6H z0|k45lX$~^y;4lpsd@&ai5pHm?MjysKY}JmIjaEv767AHM1_WrJ?4Y*g-lWbo);DODN2;F@mwt5uLJUT%KixDCS2Y!$ty0Uri%2z#b;i zHr>rc#WsD%n)L!{bt2kB2*x9wttCg-iB=4(yrmbxc-be?96$6Ly@U)26TldsVbGKx zj0PX4>4ra`7?#GkNl%KX@lAn^0@j`b8>*pHA8+#_ePxtn9K_lJef*MW<6Y7a$VLLp zQ@2tNQoqsF{6_R5KFY6(ym#+jstuTsjateHVW#rX%F;5X^4LP~Oo$prRinxyzEIVm zN|vQujuwH>WYU_Z<#HLE;TStz*MflnP5PscjgyRGPj?QP)gmo{ooB>woWH=t0@vA+ z#B_n?4f;&m$yp{4$_~9oLz2MCQ_C1l&^r)i%2m z)RVvvZ&*_ZC>04{avqSIvanRv*)T44Jel7QL^TW+tmCf&?E-kY!X&vP_Ld2eB%ca6x~;uu)YMGe8gK{dbQ7BAg_Tax=A# zjCA*w86x^RiJ=?7%qE$7+#|w3C__Tn8Hb_Tz$b5}F4P}1n&%;b8)$(=q)$s*GH=&2$o+9%2s>49Z}= zfCWkrDTm<6t2`JXs665u)#TO4NMYUlv*00O|5O2cP5p)&ZutE3&p*op9enY{7fAW{ zzyBSJ$7rU`saylBn8kNFzmdHEHdYl(Rn=7j?lV& zr<-oNC8)4BJOr86DW+~@Dw06tTtDwq?z`u%vXZc^>+}gCNqTY0YeA6~f_GqipmSVW zZPsVfTzzON%Q0cAkw9-FL3ObWIiYOvPn@CP0v@AYR?+OPJMVQ&w}0=R@4Wf)$>T>^ znvTI^SYp3H!ybP8iJm=sIEDeEWbzUW67Qpj1DR~@(@#GP_*EO`m8>-j(8>HlkJ6nX zw45Fnib9sn6gvudOwVuNB~&Q3{Kca|(?Y)hT-Nrr?a<@7f4=bDce8H${q+TYUnHc~ z*VjM%r$0UN$fK!r>L6^tZiCJ``+|$gnm5;z@j|~sYbem8Qr2Idpf$OD=sD99J!< z81$RZm&v6eQyA+Q#Hh!y+)*P&z5Vt()22=PdfJySz3@_HWlNYZ-83{qv!D_oxDJ#c zt!d)IU-7;|4Oaz(KpD9rD1qqGSpCLC`rLD$J>6+k$ny#d6k_*WA#$yu?n|$+jWp z$0jof)EdYY>(IltBZ5Z-_IZDSogbYr2F$MHOPZ$HyLWH&`XB%J$Gh&j3nB}^@t^+m zr>KCWECE+a)9PdKWIT?T0zC)UGLejDGn!$zUAuIKJ%JYnd9xIy zOSi5?azr%UXsj-sO4ii@Rou|%YX;ezo=l|T@pw+p8K#v$HTBiWc%5Sz;AkqN)z*WO zx@@M-I(F^?v!yD5Hf>sU?$jY5OQvDQQrWJ(diCqqN0uZAQARVXlgZj>O(qrv zOJ!bt@PP*siJ0j)RW;SAcr;dD6OYBA4tx+HnwAQ4PHNZyI~c$n`|mFVUruYUUJf3$AVB2gcs^^{0J znZ7$r7(H3CEURmmuHAcd?bW+yaoY)dE?)8H8FUK@ic0ytFMn*dRC%W;)HlYPsEbY;C1m>HfKV^*@h8MM(Y!Ch(jWo zunij6uzra|G8(N>Bt;XMy83uJ8KY2!=VOU<0-{|XOGe`mH}pI-H{754 zXf&Fr*G+munZ!6zk39ADiBu{BhbkvXX@`&2Lz+@iLvPciqsog=fkrM}Utb%GCUZFh zS`P@5@PdG-E}l%qTr&e@YM6W+A|0(sCg2EXEh|GS==mhHHIq)jS%sIu2|_{hiZ*RK zgm}N6&L)%S_Gn#w6n+fd7~*`1bT$T>Vs*9gx)er@E5VV=CDWeo3>5}dm&#;%_3quh zYqz@Us%$!0*{V%Nc{7YabQQd_!qU>)?z|(LPJ0@e_4Y3Vi4VL)@=;-9=<73k_gDYr zuaC5A-`>imMH^u_$pYzlC_C~Wm`eH;fMO6N7zwfy)Q8jxO7|`U$S=nloqKk|oRTxZ!duRg-q4&i#fhUA$=Fk3T&A_+Qhh zH1rxsz3HZ_mabUv*%x1q7&Qi=KM=&mVvKcJE%@GZ{Ubg>|GErm3uEz&OV< zEy|it{r4L$zw+ve6>C5F^rKND1|SGM?aWL5_K$yl@a|hDoOnWc%T{mx`>hqrm%a7o z>j71brqZ|EddG@oOBOF%_NRyLipQ%EjTja!e$R=;RVi7sg|3}iLQghtQ<;v{BzEm6 zmLv?C3op9l>zUs!U%ukCDX*G_5e^0Jz4MP41A&6#*Is}9{*!O!6*cdq`XdiN^x%x?GiC9Y+{vSW zB+K)E`r*^hK7Zn$FEndbsl}rztg@)U*JIw*p#VX`-m+-97j{PjUKwrPFoC2&%$T4e zg7T^Uz@T9xGMe_J96+rBZfTMaA21zP(`4(j`ll4Da4n$W@tA!ILk( z{pLIGEM2x}#&5Rz*$xsulH7c5$|^v7ARygc>#8*aP)>KneF`ORH-+!2e$Xni0~$XZ;qF1BI$ z%B}0Ry!z5>FsB2D46p@XprCZxm!B~qhY{PGL4W`7GB9=z{9S&`B{xx8(+r9b_+e8Jog z-urhlo6A_d=8C7Ce%i9-E5DgOed4L7$K%Nf$B%z$@{2RR{bt$nCC@zh=hn>%>Qh>v zto`(FW-ME_{ORXkdgi(3y0$6xS(%LM@7jOxV~_qB$0AgqYiD&!%UOC_zx3jZo_*>0 zFTa_-Y~}L5K6q!5->pk#?|Jm;*CtO{viQfjOXglM>5_Q8*0X!>Cm+ApwNtLe_wRe` zMHgH+boj8}-G1BiFZ>ft?mwWPp<9+=BK(Krn9)smC)eL}BWVEBVs{7NeIK$l^l<}_ zz9To-RvcY)bj8*DW@7HF=^xEr^~b-y*uJuzNN){8>ydIIpPiODJ3^3((+1^6fB}LL z0e~!th$*4N*bV-?y=?@f97GNSFA!mP@)vPXSy>5>pfv+{DBdOPXlezum2)k*ZDsrJ zT{=DW*b|?A@y)pxT(o)XroqP!{QR@8h7B7X2^Dr|->$B%>dG5$JnOs*PCxGqzburs zZ1?Ejp1AVzOE13goOyF+ck0?rkR#*9PJq@QJYew4FTUER-@w+D?S>3FraoGA{{#1~ zT(wFPCB%trhAn~&8e8<5x9N22(P#SiGftU!=I5V$I(GC>+tS;%?-mGzo_PGPOINNu z_mWFqd1dmjWBP7dxB9eGPioe@^0}8L4;ww<#1qDJ=+Lrf_l}xnxSWhgR_23MHF4dv zM~oVo)3U>c4776kjxAd(O_O=?lg~al>zuO&jT>c3(t!SbQuVR&;$p|L)nKHuWvfr$ zfA`!|PyF=rPsffK??Y@x77F}vHQjJ{@i$xuzGJ)Qkr0NMBnV0>Wwom8Bq{#wJ9ZeR zDIB)Q=&uAw8yY_44FsrfzW2!&y}ER>u(wgwc-`_FqllLyU__CXS|NBS&@=%Kw<@@7_A%k1b z{C4`OC!hH6BY(Nz!bvU4D@%)tTeYox<;^$GIPa3(J9dsZwtrz!>Dd=fdiULr2KVl| ze#OF*jvv>$L+7{OdgGoOuDKjw&#v(}=56=_hC?qN& zNzG>Uy84=yty?s2+xnUtZ~o&wj~vr~sFseMd-kc>oHMw8=h?Hrn|SJ}e|!3cvo5@} zb-Pw0#|%+qci^DDuRZs|paI9W@6fq>m#&#i^4>o_aM8I}4jMS9qFJ-TqLL{u|MQQx z-*wg*XMX(Yr+s_%apcgz;ltrn4LNqe!w>zrN4LI+F)z4x9uv*-Noy6fQA`b7DR^DkVnbct)}?2!+G3`7GGJc*W- ztrS&$_^*$gb?ybdI(O*Mx`pKkeFhI*w|3ms^gANEt zOB@XB+_44677^UrMp;n+2;cm0+B+Y9{mNS(^y%CM?j^0dM56~L5RTxXC_HFPh2(g^ z{4@#`vC(s;hARhjBT@A0zzcu!jc5cYVQ68XK^3lIrp-;ORUZCJYu$#mo40JP*|}F%13kNSPSnNMu3Q_fssHAyFRJ&~DB6f z{@d^F+rM|`&KS&be(S>vpf)ylK6~M|$-XZP;bLy^^pA{2x|@ON^WwSCv_k3V`pnTT!MzN5CL27(55i?q;)Zqvc_AzKG}Y$@1i#>5Fw+BRR@MImpq$dCQBz+xd|>~oEgN?3*=O2_%0!($ z4gSk$Nx2=Ii{v|h;FEY)v`x;)O0?!xHlEZCH{7~yUr{smuUcBQZTtMCYiBIkFkr%I zHgD~y)2F`qS~{IX7wq1(_UL^JS8X`s z(%&67YDDYGvLAn1vTF6ZB)l}us;#OPe0Hq1CY?w?SA5?*T*s|x9p#_aA~jpJ=(Ubxa8tVix;i< z^IskxKXQC|QCTLF{(km%t5&bsxqbJpUAt6?>sDE+B~m}kpSPzz`Nn%+tlnO$aY{w2 zwqs8?W5G{L|M~pi$4wa3y0Q&S@1n&&Enc!<`_8RfckFjuf6JC-E0-=>yJlT|ZNjpR z58i*L`=H~Bi_!CieR_7B_Vo;hmq=UC{Z1$rpQ!rl<8>c?`0nC`3sLp9-FrnPP*hk@ zzi;2CpM9}?*PfdC%sXFyGjK>BpPS7sz1GxqJ> zFZsbqN%lFJOp25Ez5Dh(_Ru|jdvrZ^z|hmrIBUh~l?d@@){+D-Iag89+^-bSU1rC% zGI2lnp$2@CD$BGyoS*K((5qG;$cAH<%Nd3ZVrKj>|H2E;IdNF8@dG$6rojF>Arn6^;sUynw^bT)$EfC3pF1<;e->99X#la zbI<5B_}ImpwiZPq(ReI};My=HRfJoDaGQ0Z0BXs-1BpI2M9T>(6w3W}3&B@M>*JXW z{1lP%X@e^yHA=(bfGc%XHLt!pxmWk@S6q38qK4F95UwkIejkeB zFjL9~zyV3Z^`=>nUlJsCNSSik6z>}C+O$+;h^`_j0n@e7$%-HbB>2mI#A_}rPNY~3 zh~Xg8X)1+}qmR4WhU1u)DDj46V&ECHHewEAVBw)8+yW!{?9!1 z3?$R>aZn`?Ab9jGTUGLs5KGqm6{e)bRO?QAB8)`vqup5CF8LV-{s znaHADiY&N#MS1CpC9AKz=BBnS%XV+wn9aQnba#xGnX2NT@$0^3970YIF^pyPeIC~n!g^PN{-YFAoz>7>gMa4Cu( zA&EC78L09Feg(kwf(Aedh&OB%bNdeS>~O-vQ}S*V;s6xV)R88qn~WJHNQqLv{jF+FPVx@l^H&&p}ZHtjkL88Q}(%jD`^PUm=86#bT=I2xztY}vF$ z$Jo51X83?^;Au9M9yw@e|DIiv>3x9$dFbe|!AJowh^{O899oN93HUgvT2ShjE?IxW zHIEp%@SdGjcinQ)u3a1U$6^Us6+sTj0b)DI3k1*kd#=7- zHrbRSj$5;CBQ#=<-n}C&2I}MK=pm+v76SwhwV*5r28EeSCKiunav9Sy zP1~diLkB}DnfT&^w~p=8{mgUD*BmirXgX}F4MRjra^*8M08%KOHPew|nanIUG>R+s86yVIXkA=%KT|_&~LF zBUxj+x`Z&2R|PR5P&d+(PUr=wiI5_fMwCQ4*?-6}?Yp#3XHwDnG|v~T-@13=xUsD( zTR7Q7&aPEULKSVg8A3RkpzqJRR!-&JpvYy@+4^L%I=b(++io1tf8hKdSJqb7pK;>x zo!hsC^O6I$WGoaZuBuqCpHN$y_}3dB9(Urz3Bv}z@#4QQI%BD{&zBP=SCUoJ zboDIFl#*qd3LuF>DCGBt1G?jcN{c34dBxUEYa^~ben7uHyY}p_OG%+tiLC9Da^p@I z-nmy-s5Hqhg(9-!WNZr>P~VeEY>8IaCSnNv!aQFj2Q#*53T`T!g9@`PJCo5M1#ETj zgl^^icka{x7F(xR6!0gf(kaW* zRM~QTw&D*f{-BmiPJZ#lwr$!hSg>&M;w3lUc)jXZOpESYMZ!fP#jmKrNVs&zw#~QP z_qT(`q2+J38Yf|4nqWMW1E5qmCxnU@rUlb>-rmS{9(hs zX)AVIc=09A{p0D;BL=Tqyy%t5Q-7HDwIqjOvHTJb2PLa#5kx@`?4-5Vm+suLSC;&| z=!2*E&CGAwwQV9GSB(_P@Mu{QT)2>_XP@gQAuYq zb~YX6eNZL8*+THi^wUp2d;N76tl6=Ae{Jo&AAfXxw(GcRKxG1&fvy@lWGJGi9zA>f z;nqJ)d3{Q^?%m#b`<**(y8~987J>&$ngYj#{OZ4+`TJ=n9uEuRlNF0*dgLzxJiFKk zo;ri{!5HL|8bI~aLl~GUXj>dmLC&Z6)#Xc<{^gNJFTVWBH5)gsShwQUSD)+9w(OQ$ zZ@cllTkXZrBYs+_-Gn>b*PmU32+0FFy0c(&YXqQ}Y)sUa(~0)BpTOSy?f>c)y=+spe;Wa$>wL zI_bI_KcD;6q76&_@#vpctXPHS%$zZO`d8n4KYRWUa~4i{^VPO(+wi>PS3|O-(hX75 za9lnePd)dK=U;j0#U-mZe(=t}VNOtd(oFE?%-?&iAuAcWb|7 z`7-E@?VC2AaqjtFFI;`x$dN`i`>$u8S-y6|(j`m3oAIsW<8$d`C=zbpt;hTYOMCU| zb>8{sV~lj@*#5~UAAjbVe?s*fW&%Cgu%S#nZ!r`M-E;RJ&pmx&MR|!X`E@}d_Y?+^ z=BRPdj$VO7wB}KOYGW1JT=TM0a`Ak=E}gq1GpT~Y5+Af7J*%c?WE7ryOJ9-vfh8n= zfNVDV*kg|!cie&7>P?|0>cvV%e#}jdmH^LF6u&_kS@}>zlJylv-tY>sd5|Zh7C{$>fJ|GDM{hTQ1rjd=- z6hK3{c3Jbv`iudu4Z|Uu%9e!-APKf4N9z+6MHN2BNM{rNf{11rdLmT-tq=}Z)z!f^ zmlhSp>tieqazSJ`5(z1RWGosiEXeXsE|IDX7NM?$tJS9Tl8Tlgzig!9K?Po~W@ZwO zT;MScdilS+5WFEcIfu_yC*VO8mIZ>v;Y=Dqv1FPUdJc3&FceHBQ&7smV30o6kjpQBmM82!~U8&d`leKo%s);M90^U1f2Z zBid;*W!m%w0>QE1wff|6)^gMF+AzAqwPiJ=gp2BGt9`koUluJj08^dJrZE765?54I zKwkzi9YoK1Cg*5~L(E_xP*_@G()3P}05fAJ>Z&Ct*{pRdqqyyqmnWY&e6WbfE)t0p z7oxCfTDeR*1HQR@QDGs9$6{5H!UEo=n=r|2j3&b=elRvylPoDKiyIk)a+M|Jhz-&@ zBscEx=<8t6ABiR6a1xY2Q04_kOQCw54}ghgDxv41ggRWTMOwe~^b=baeE#%P|K!Ws z^CItPF*xTr-NzYjQ7~xvw2YN;>7hh{nXa{bcGOk>{q9G1U3qEUzU_#@lyHP@o^YI@ z>oqmiU^S%F9|#I+D3ddJBgGk6e?gJy<1@*0A{qAw{OIdYFbEYf@*3;F zP{gQ%(4n-K0>B_d*|u%l#^ca0A$op+IF$4O0wIhXwA=^Y(L_QP>%*7v2V}?rgc_k4 zbw&- ztP@T?^Zq+-Y@7h{+~5BE+4QFGMR@RES_s~i1w_zk_**D9N+V=V$>@R;aeml4t83O zrN{vU-$qXJ`&Ez%CSV}gK35VH_((F3p-}_tSZl~%e*D=rXHLxJ(n2_xGYn8btG(-p zGe~qH%m%&g!i$n(>L%nF$U!MjH`zrfZP1y`X3$Jkme4?qB#aE`a+=`}mG45>=<6aF z9-aV`f#K-xr=m!iSbe0hq+`EfcmMJBb0-Xe-b$J@=U8yjZU>CRkbyB1BwA|TGF>1c zw!xyyp%-3w^}d^~&BfzfKrvk6GwEv6gmFU0XgLkCBf!@{f4V*x7<5rGorJ~K4I|WQ0iylSK|Ag}f*XWL#w(n}@s>Lskeg?@Q-$C=i6JPH!vGY<>?5 z%XL)9t2{xhi&iV5ymIAA_|ddV7>xy(QI~63UIz5=@;h|wl*?s`%gYgnWs(^}ok!lF z(LhN9g~ag&$AJW~0a)`uGpqs$E)*PsbMS|KT!3DNK(90XnZM8JxCXA0&_gScPl8C> zqGY7=6xz^DM3Blx7kitBh7(SCAI2wKTM{@Tla_KNv4+z?BMu$`oEUnSk-h3m4-7yw z?`1=F3?d9dx^qD4gSC^$@m?IJS7aP|&I6?n7TXqe7!?Mo#5A8}^GHLSOcemJ_w;}b zV2nlT74Q?)z&}Uz^iUn+nJOlE&1N#xsi=^1LcuW9E7g85{C5_Dci~V#YtvI59Elc~ zN-TEh<;Hv=xk|isuZZ0r2`^?-qXzAqoxL*>XWW`wA17a5%@^bIL8(p)3uX^@98K0_V~T` zz*Vsns2j>iV}lq^p9O+&^&Y55%nWtO+2!@cEy`YO?7D_^E5s&Fr6SNS` zrgvHi165C6KUf7c_0i}@AAQ)otn8lq?p?HWsj4XGTnv69f`%M$y9mG-Ls(&W0X`EF zn>2k-JpR~I4?JvW87M;bqyQv9Dg@#ok?!Gu5&|PheBca;NW|#$bQJ>{rqtu~+>)A# z+VTbQBjP%p(SIXGI$}RG0T@m45Rnlh38JxvPN(kwSpX8jwxIIrqvYkSUA0mWBzkZ-_*s?b0Wu$=^1QMhHaaC^i5>MX&=RzXnnkjLYUn%a)O3sMB zJdhm1fNPeIUK)q>LyANj^(56!I>i-9l5y+_BVo#K;2imR)b+Li+wRkO7lA6WNz|!K zcF?zMoZt{$h9lCd(B3FTGmN1A@QcC7EIT8OkxETu6f*)D8$D|26ogQa)sT~TK$JIZ zpt*tXgn-Ed`7LBgp+jj3=@B+41vr(kHq4#zz*9F3fa)+*!C6ut$cC%fqe{U+h^M%& z1QUmzp#kTy>i+_$;{Q8f+@i6AJ$je%4jlk~lADS?peEB#zE*0DYalbd_~BDIM?vGz zkJQWb>MI5~4N-cd6Ou-p1QPr}YQY5NTv0DEwSz*;??89Z_@`)qu@l%D*hRAA2_E&S zPZE^?P4eaq+sf$%=wjFi0qt;1`Pe8UStBwnj$|VUr9gqLL_`@`EjVCW zhG8Ke6)i*({elnlT$(}^8{xPX3+&XC6HH zZjv8}>D?p}M7Rv78EIa-4}gc|XEp*N6|^&+l6%Kwg+7}nwQO0=hZVjgtTro(NHArr z6G;HP?rnnPKLetbYM~v80&t>ALfUYVbv*;n)z~(Wh*R3r0Tm#%0<|Aoc+hUH8TX;q$N(5OJu7)Tl=Su^dOFd+3y?pvpW2 zf>Y`UavvB_UfR4f293x0EDT7Y5f#Xbs`Ia2v3i>3`kw;P#mssWF9|@s-UwuF02sXF zOwkd5Ts&fzfv%_5xltU1H4)1HE_jrBkWZqK!JWw6&Sw2FFbc#xl=y0}Sq7=<4MJ!Agu@O)6eL_v8@?;h0 z3NCwM11!WdIyy3dE;e8phdhhq6>J2?Lj)RN1Dr2%tr6_!X{5`Po7nhE8{MKsDZ1ML zR~h~gY?~R0DBZhtIq{?u`2tyx^jt1l2VWDv$gQX<89x$zZ~Gqt1Oq#m1vCW2V`vd? zJo2ff`GzpSF-Z&=HgfY}geiOgKan4@29*aP01$_XQD_lUc~k{>2d#$-6lPMtLg1K$ z(fGy@d6mQpS~&#(RT&9>qB&!P-=Pm!w3W#U?hn`j6o7ljujb3 zru%3xFgZFRU|=-#DG}@uMjOyWSa3BzvVI*LEvJ`G;(ve;LVo}tOpKW{8kl5z+0k6~ z0f;EF+% zD1j*sl*xl-2mL{0Vsb!4(wC9R-~kycho;d3bv6-^a**H=VYDoYV?aNUW^p9=VEZ(L|KnY(Sehm42%>M!x$bu6ikpjN3>X6g3_|zOul@;40 zeV0hH8~Oxn$PoX)I2t^_0?&~GBGg9VI1xm>;B~}7z)}G~1@u${vH%w)QF;S-32fwp zi-Kf}-ZGTbd5jE?k-xa1X)<&@Dz+Wi8dIluy-n+~vExTf7(b>U9N?q%)#-RFn~p1@ zAPMw}m8R3TuNVyn(eyuneE+i}(nX%wvLki{ETBH4v6oMv9LS9e#62%Ri3YKbz`LG5 z@iI~`d))z`9zY+G`{IoXASDT7m+{Z?Q@e0XxKItLV{DY}!es;!5*+4P0kQKA zC?g%`>pdb+p(d9JHL1=9pwbvkB#@2SC{NQeG&or)QvHYEYB*$KelXBw8XX6~qm<$j zTq5=}fOs*3!Elp4pnTq?{~9zBlc$t{mvZ0|aVp;#1eD%z35mo&y7L7Kf>zSABFawv zP9=#G)SJ{mPoCIDWhnXr>qX`Sss0fr=!FjF!lz&U?iGtmV`>j4=}XE(EYxj%&hGqkA(X`y_}S8eqW>D+(a<$S^gg zc*pruY5}`hh>G(pWxH zXMQX6FuOtxBKqi%?nRI(_ejiNc3^{nq)AN-iFbq_3Tjc5r^kqJO(dT!(p)GHB^e}^ z@qZ4iUB3DF;CgL9{ zp48;G1L^sV5kQGl4@1m~(9J$3nP53xB|@3#k)EO6qa&uINLx{luuzi5c_aP#pCbVw zVob=N5gJAb0aZnXiDLFcKu>Z>%uzoPfJ;!bp5mswc@xgI#(Jr6{*bkV0gl<;I|K>f zFp#oQM<)#o^nh})Mm9-jd}h?qiI;W=Al1`)IPm;I09DEk4hEoutL=*P^pm$t^7yFP zOnLuWKmlklXgzvL>vQ|}?>FkWQ6;6N>2z8*bU3NP_+$GO6%>UF3I)Yq8;{C?06bTb zB7(*?;D_;rWQzbv-koxc9JtLiTZaKzNwmc}89fyd`(mVr6o8l@9IDL2dG(T{ZM6(q2zz-t^fC_tvdD3Q|HC3cLf zbn5Ye6(IdfvwWDR;n`bHs*s%u@dCX~O@vZOR)|O;{3hZzq#;P5sdg5EF4Lyh!~FgL zgvM)Q9)OdT{zV`zi8R5-Yd&i%0nyrU+z7~wy;l+Ccj&nafU>g8jWp0XGNY{`yBTEr z5W7%T8cZaC0xwvWB}sJmjE1IH7kNfaE6p>L5}PLHfrw$3DSBod$eX4G8mM-LhpZSC zWy%-ki8YXjq*qqiSU@XGdRzz~dPIpP*^oBMV`v1NGV<94)=HcZmnk3H&_kr%06^?n zED~p&P-aix9pDus4;k1|<8~zQ3K1#*+tWXzg@EFtJQZjbi9Myl9HIy@mI|^O2_QCf zCcvph6DcQ&Vgq>Dy!yP9CV*^*C!z>dPN-w-u)mrH{}dOizcd$(%=EVSf_q}0U|QYeTqFF>MzpOE9fff*2meDht%mQJg7 z(`wz`fp?5OMvJPd$b-z-6Vur5&qF>7*+?pfJrs{=*_vh8ripkPZMJ|6`G_jYLt`3@ zqkx0UWRn3u#2u!=ws?9`-_MK0CS>-w_^W}`L4!Dgic~5kD>8%|RszC}6zl;=K`qG1 z=35d?C_)99dabXoSN$ptMQBM{O@J~$7?}B_JSdL*$UwvLP&mNwMnH}K=L^AGoJ22? zCv#dlgJ4ird0nHMx#&gHFv-$1!mn0&0E^ZO@=3B}GOGhXqSpgn1`j-TfgfmECX>x+ zBs>sP07S^!2ZKSX7yM$~fnn$Hj;Wavh62s10SX%CBCWPhE2-hnL+;f@@^WB|07HX4 zXb|qhfa+;b2dIHV6YvATITAgD*dYI?6zYt82VKu*bI1v#G>JZM>S)z?Qp7Z`kmS|l zfY)jdyG1kbZ9x(>T?2LW9wcK4uCt8}&@N!Br`4g!&rnqbdzf{YIYttp@|Z)7V=C|G zf#n7F=9}YP>Y%c4T)-j2d2@mo#N&dXK79B$)4$oibvwO-K_nAS z6vW{D^P_)y;_)Y7oRLIdLZJ#tFeuceO0c8caKF(FRP8~4UT;DX12B2)*s*10WpBOp zZx$dN0tm2xhskRrxb%sVG2=!L960FDfBthmksyyhWeY7T+Ffww$?aQLE?Yk9tLdL* zGQRry)UCJPjuHFJGtZF5Az+d6LF612F#boGP2Obd^G={mZJTh z07GD|xZ?8Gt=rHV;s*eVOt|lX`)QeIa=XDTpW)iQ`u1+$tmSu~eGQ7}20tQrdLBft zB_I=Ohq)!R)D95u}nd7PEpBJW^LO zU1kp)IB@jnRi;?%Qu?&6=gCD%3qFrP*K9 z478XYK`^=Ub_6)jA9+b0R~XNcA483tN5bQ&FQgKU$W2E;PFF1O3G7EIRYATU@qxHV zSt&QJ8oB|7coPJn^1PnK-ZK@b#;ddmpg0rPgMlh^4Jz+bZJ&}fg)>e)e$C>qtM=>> z0;Mntp6vi?ltorX>tU!7hO#CU+3sJ%FXY>gZ_vZ|`e`{acOpMU;2lmbR(CX+$03p{*B_>U5;i2_F= zlQwh{+{kKLJe5dgQwU_B=^zt0P9#&QMB31@AlKzZ-L_MSL@E`BIv#k;F{(%pVc<JQTTyO1hJktU287xssF%ZVoI)45DE8cW3!hMsjC zE3295bSjlh02}ZmlL<6Gos3yF-3LtP^k_Vrh$Xdj0&EL}LPLj*glkb$R6KIz=ze|r zC=zFxMlzG_*RTKa6UG&W!@#K-dOV$uCa^;(v`LsT2?T-z1`J@Y;!%%# zWdZr4hO~pOH2}oJc6|>%{O9uGqD(SL4ls!fh7Xg^Cg5Gc0c2^|u;Jszk2(H?2~D=~ zW58prri3T+y;s-#-0CA8#5)DwB>U(0@_W$iU%1GEvua=~NOvf^OJ} zBm}Kq%cLRN=-pI0hy2NSJROT`Sv{9CGO2VX6^D(_8Ac+3fk0nA@;Sh)U3U4EsYEQ7 z&4O~caPe3iJ!sG?P}CII{#@2*)3z-X$ASfOe_Aka;ZFh|oT_60~R025TSv}LUcdx?2Qj6AzPiJx|nsw;XS0{ACy7;2= zFS+Er`9ID5_S@;RzWaXTmaVQJWwcx}7EQ*Z87&P52;&SsaS8)D6^$pN7C4YVRnc@h z>vQR4q*Nx6Oh;)6oK(U#!BWR`tV}waipPK_r)7OK{T~A}lT62BiBv2RMP!vM$s;5X_AT~0R>NjL|^qoxPo9IkxAFo*X^y|w{6GH-TU^4 zFpZ$ov^aE?gZgN0Aw3>H3`p?cC3+`btENw%jJ?*RM*I#|b=+Ps#ZQuIB3omTjy7Q{5C$((ZqEp+>fk62C zPv4p{b!x7lW9L47pSbrMr>cTu4P~*FF5~#v(7nl&z`++PJMgJ)-5NTFurT2 zj%{1@eD3*wWwS9&Pav4EEg_qB5M!jWc5!Lhjeqz(FGyY5^r+gm>w|y4wqpJE(Z`?C zuSb{W6_L-r`gX(SEx*6<>Mk8xZQHWx(Z`>vj%6lYb@S;b4hQ*5=6v<|Q%@OuyR7B2 zOxfoKtLo!EC)d4A>58rU3YvFHW^?sbyF$=jhJDhR=Z!t}`0}8#dCTg*-2Y@jD0tkM zfivfPTT;^I&$mC&yhti*Sz2AS|Fe%iT(N8gn?lb%d_(;GryzvHX z3_{nPJ9hQ%(|g&nWq*6}?{K~Eyz7t6Dk@+VT2{8)xN#$DK{s7^;f2sLK)7(>Pw&3_ z4j&2_zQi|QeX(}oPp#V*w5seZ3TwaoavCb_*`qhbfHbQHMad#VB6u2!bQch5v@sze z;iF#$v?hu&YtH=kow`2qmxu2A;~%SQYm`W_%gYpt(KFCPt+FRbrm163QqzbGLy!Ro z_VUm%FIlc?IrN6C>%U>lVe&zxsNMwq4IX z^YlnKynXeL_dWE$h1c9S>B@`8j~KRb^QJo<`s?v0PPqNX>*BT5-+b}qC)2)A!v&5l z(W~mNv}VofFTVJMzNuqKr<^kJ`Wr7l>zvcJZc9A>>`PZ%eR;EHg_0{h@bF*m`_rHL zb?=&rM?e4QjEKR@(WX;S~WLV zR#9=)6&F1Cz}>UvE;Suda8%A8l1iGNHGbT=CyaG`=Cp5TeEH$mrsj?xcU-4lox1jD zUl0sEde4Km{_eJd@`}aF*F5~-Q!OjYZ@uFhMU^_Y?_0Hd`$r$XF?-31(v}@ByX?{l z!$-g?`|z`mr++%r%$ZkQeo>1KEjzU77F0vue*X5GZ@rTd%lq{m@|U}B%+^=Wnfcw* zlc#ncGVX%&&Mc3J&CAL*Y}o$R)Q?)Wti1M`i-RF<)w=aBz3^IXZ7s|*l#bVH{|S&N zaZZ3DGDLEmr8wDH)4y%eq4n?X`}0c=-WIKmX-c8Z%cSe*<{JwL;d?d_Ko4+1kOZjp zf}+CuXswN)RI}i*!tl_mXR0E?=kxm&cq=f_@bXByn#f)wkZh9>65!O6MfmmzZ-+6! zpz#!xcW@|eUYIay)Q(+S9)04;%^TMrKX$ln zSzUVeZPTH{S6_X#bNAlU&%SWp{CRiWag%N&#!eVnR95ljx3liL_o=`9`SHQW^y<=~ zrADiBiLQeHF%XT`8Fpsm$bp)c8#H)WR@1j{*^1Cq=A@q%E`9vTr~mlSJuTa`=+&)j zCK(;jzpLYFYN+6tV}@>AwdR?>KDK4k#^c9N;7yyJHiAhMg&FV&5&QgB3&FQ*)lA@M zEfq1qRaMuuY|&Bi2j|V3n@(qF-9yL|1uzf>3L)~o7W38!n7;G0UQ>WXWp(5E7hnEy z&g{ukU+>Vmm1Sr?IO&RVSYDDQ3f@SN=oG(@K5uIfBp=-g&7UAua79{}%eDA!UAuMc z*lxyG)1Q6npFO*FI;LOu1&bHO)8_mCeq+kZFEnf3?#gR^ci+7aJ^ARvo!c}YGkWBJ zfy2~b(IbyOxnbohm2=juU)OK&kfMUZPu_d4zCN~W^_s_@{O5!B-dEY8bl@?)e_XtD zPr`ow*?-OW^2e8u0w=SLFA0pDHMJhN^Gji}Q5h|I6yKw%J z>ixSv`0(9#-uhToOD;I?yhRJgDHMa8_|yL7xHr%F8NVdgi&;Cckp*u|r1=KTh_E!-fu0 z{M>7AOnv6*=dZfrQoqc#>Dc9ptFM3b{(D}2`tg3fyA2sKq@cWf(2#);q{knB!nEDv zMvdRJaXaA4Z$5wj{dbeeB$(ob_N@2*GeEyPK4>(BJ@eK}LyDfsOn&R*c}vzk`LE}D zbZE}mhN`G^ca5IZLHrJvDFA2E^kc+1qG4F7KS0ah*p?*H3Luv2h>>szicY5YPxF!m z$Avw5&&%A;gXiBtYO&9krvn65oMp3*QGz@pw`=3dIp2L3D(l{>HBn=Q zMc=p)eS_f;2JWiWt1+-!b?h_j*#4EL4bP@8C@U=v7ne?bf7TV(U3tlwqoL&dZhg3* zG?`;zJ_eqd*}r?krfnO$b?Jb>uv_yM+qQ46ispR&2%?PRhYdLUf^!vBs3l!O1zUVwp=AUFI}8zS9w5<~+)Y+zDj_4O@UwRUn^eLN2TE&x54 z%Sy<6n236k(+GLpb|gtg8oWfC?l6fW`qnU)&7b|{7oTf6=d_8VWx>CD_gX}{t5&RD zy>d;zG3N~FUE1~8fAUD1kh*Uz2tP09@f3yZJ0d=i3pnfGnq zwMTB=T8W4&=700U^v?n%MQKyfH0$rrJTzz4H&Z9Sy?6KCLVuBMXu9NZqF7g5ZDuo$ zVj{#8>|C>wf&uMY#`f=+`Ta~+Es}~mcW>FDWy><#jLw_??YyPyeL}Nd=e1hBbm@k) zwR?YUr{=}Ws@K(2?|iOb1~8q5ZON*-zq&dYP$k39+w{eKnyCvj6^eu%`X)Ms$@v0D z)|~`R8}ai^q6mmwMmDj;H917hv{;Y0W5g>YGpw%wQNZBo1CK~ zXpz)_$VaR8g(8KDU&v{CGLe9vTvuN|`Crd|_Vsij7%^yJ7UztyeM~(8>=wtB6gO97 zs55%9qvsMBkD?;;lH#*%IFUruu^Ly`!I6=(m+Wx(}9Q0JfLk1Lqb|I;tj;0ub4u~HV=h8zp zR_7s(9?qiGHBbKi@xJ|fUw6}u^gScLhz8Qy3Jv9&!XabhaM5dW6$iKuznkMZ$<3L% z8%4ZkI8Hiaz_)<^YrA$PZ3NXoMYE!><}Ul!Ums{u;&3x{QR2_-kBl` z4n3sLe&0+Iz&kODR@X5-bx zA+bJ+AYKa>7TGYevdnY5M0dus2%a>}fMB?VWrhVUp~s_9_5ejlBr}GkR|M5BzWedt zFFp;A54vo}zF3ceL$iqlB#0Iwbld=~^T)}82#X2x%*&i*S;)zJbJl5jIILfpVC4@F z02BtjF->BX1ZwkW*$#%q&?Bgv}e6A>6VJpPV^l?28I2S{*fS#PKg zMG;HO%CfmsthPpAAD!fD_V3@eZ5uBLs$aoygi}Vi4yM-w=uVpRN^`hqf(6DQ#L>Hh z&g7PgcrzOil<8ShQv9ejlSx4o9E%<(=5pEQEt*%hXt`y>hBa$e@7uQ*dQg^Rgf~98 zo|*-((GaaQd-wM5)j?0j;?>oCdLIM#$4J%&{c`hm-L~)9zis2jeY^M4C){ld(GoA_ ze1frI!;XtDxw$^gZ`ro{`b$o)-M_CYol3g`T$m8NFM6iHagq!|1lNY?5EON9w6@E@ zK8mcYU$|`d+O5%jb)m9?bvrh!T(c7PprCn2wyYUVBEaWh1&p zyDdfJM4zqyb_>BTSiHEjW0&S_yHxFsoqfhBiR6BI0m+Aujo!6txE}Dp?cFCD8q@4VkC4n%DzSY-HTi>BP$~y)b_4*c)&9U3G1pVOsRi+SS>6j_eJW zCSnRH@;pk1y(q)c^DcVP0h3K(r2@q)3nqjWC}6>sT63w2xki!!!2vx z+K&-!XSeR$a?*(YJ9h3~wq(_+73=Dw@zRoTRn=~2oMFRnc}0uToP%*Kh4|R6jmx%d+0yYR{fz61^vj0t8ou4v<2&5VWi7 zqobWoUr}304FOL*!{WB-3$oWo4zJ7|=5q!!(DEO+{p}IT<-` zfB1H|h9PO=eY)Vn!4+-C52x zKmGLX z-Md380dRm3g{ugTpy{*86ryNJgu0c(Ma@Jdpl1{BzWsLZ0Rw(owS38%r8nGoqYU+< z(vscbaDiV^A$*a-f=!z@KJ?7XOIEF3wrbrSfB4<*T|07_)JGqGtb~JWmM>kkeCZR9 z{i(dHKvd;Gun3MrK}db*p+C2-?6h#nw>x&P``z_7Uoq(-UKV)2TpB5nTwaw`Nsl$yvf!uX3+Mc>d+Rs1pNUukjZ2qyTgVLU%qTf_ntj&yY<#LUY~-Ve&gS7-*)@$oWMy?G$e0y zPlatHBeJ4?_W8$`UUX3vj4C)r6G-UYEKhD&rh|agnd24~(bCYK)$~A7XjM8=b5>p& zaSfs_bE52*bDXV)0s$q&y8?YIN)GtLA&HZJnm2dqr=NX4XZG6FtL}c{pR3odT(W-S z!Zlm_4>)$?n&mB9lq{MzZ}F;4&%f|WNkK_SRC!6XL}BKv8J#<{`QfYg=FOeGuc{W3 z6OYH{tXMZ|@#2?Wdu9Gl3qJn%qm}E{t=zWrtFOOl-KI4N1&b)w3j~6}0$C18k_rz( zl%&OrR=oTETi<^-y|^eSG;0pgvvu?R4?eu=(ku3C*>&d~f7r2ky`S^VpSNJpu#t<` zt^aB9s^P^#v&YU@-q7)aDDLyqIi_klPu&M^t zbsN__^u)gwu3Wuj)9wWe7x(MiM-k*g#Dy*`0Sli=RRzsRt=q8nfqNgC{@u5gm6gnQLVJ<_nR)Fbk54@j*Fm=6LKW$ed}L&{k>$%GtJ(ImMPO zUmzGR%4#VLo_NX>1u@{~DBi(XH#OHWpiEQgoT{q2o>ru=>q=QIDJcjZcnyT)(j}pY zYrBz>f_;fZ&d?(PDP5Nih9iDa!T3+5atN_~T+XnfsuHjaK9fuM{ZMN~iMWOiE-VUX z^)$UX3}X@k!;-v24#N{v^D`8 zI(S})oc?PI!CO9E<2WOgb2UA}CNHJZCTs&3CkwI;Rz$+NOcwr|B++zP01jmOP6dY$ z3|cai%@TGGH4{cq3Q^$&-mz#kY6Lho%(Bl$uQ82=(Ac{>O6rUPawPIT1c|7$iN?!c zEi5XeA!MFZyd6D|x&KGU^$ zzf>4SSNZ7cJa#tcv&@hmL9Yz=7xF19lI3@^j;=s6`4FNSV4^Gq&__N#M~fQRA;W;| zr8z{YuCE|i5(yQ=>h?Q)4t->59I){|A(N{S6&u053lCk-1|tQCmt9BYb82as^2k4a zcj09h`kYd9NT`T>ZqZj@n6yA5nRCktphnwHrQ^Y%-_SFGU?69w;m0e1igZd7e7WKX zTCD{F;T(oDoEV=07XkvP<)Q`j#UC-J+c^yZM;I|X+&`K_*U+1gw5G&camg}SSfuOiy!-j6% zJ4mt^RD^h4HD|eeK|x74SU~U5Gg+kt{PMcRD8K~ z*e`*KjAr;HIaE*p1;a~xK;|Q%5L{{tVNWgt*U)hM0VSYil8`vd(MyX0=+8*F5Nh7B zv#J6`pkma+Y!YHgp*c9NB_T%jxj|8?C<>L7l=~Fb=T~FNY(c=^zInN+nURukC>Tm+ zG8lFKK!7|Iddnh|%bIXWE6SP)Y7qj5U{EN5F^BVk@LJ#_!C)!8%Az9bZ~CxW*ss8p zMyPE*2NqkB_|wlld*R|A*Q{T;WW|!@>sMVf>8dJ-qOhJM57gMFo1EX9+wMgXRmV=s4FQx7{C# z;8&G|0>4;X-pn5=fHP1S@#^z}Y5D2RoTLdg#&QdUBRh^2$DR1yVA@JbNP(3nP>X-#I! z zm`;3x)61wldA#%Q7- zEQ@Xu1l`j8GR&ueh!R#4ss*y0)Aejl6L@%k1>gkG(wF=g>0U=45sr8biKOoUyy+A4 zT&zBqPTFWjFa)jXP>k%!PhOA?2-6V(mtBmblq4e@;ov&JGl!ILSB~rJ+Pq;4WgWXM zTCpZvPy!q6(hCEJhPN=g^du&KMt0bf+9qjvKq8cdWy8M~psV4a!3v7xYd{mx{TfCr zh9bS5f_Cxp(-(!x5gQ?_gxpi#p^8KrbN|vnsr8 z8v=c7nTpZ9G#rwuPJ z&qCiahk&)3c_jeqX?m=K()4C7HKE~Y68fE_k=~P|CqGc&xQhIsiFt z4j!&Cy`$#%EKktbQz?WqXeY!S$Vl293$;MjU>fO$8k^frLd;kP{)-gKY zR)ALtPEiLT^QvKK-b%BqNIsG2Qcrr&iRdSK+63ZB+k4XX8v&ah+yKNs=Cgq0gPZW5 z02QG}s$e%%9{PtkLMrcwoy*h#Q+dcveQdBG4*ssR2PQ&)^w>K@1d zsWyyoC?2RQb_~&h%u=@_pVzYh6lYyY(n!A)8?cd2$PnCsJD`PzENdfb2aDkzf@$>B z5OE0jM5|gJGV}-oq=beE5@{_1955spQa~%%Ad6Mv)k^}EFHQ6yXC{+FSOz4BHw;rp zhmiTk)dN}wfO7egpayJVyhMi{5m<2oROwaboqEUA1zukq4Un5LFn@XkXkeoUEF(!K z@ekUS5^xzP@-#OBof}N$MeF@RY2$|Vrkfo-Y9zejY$j^z8qWz9B1K6CBP~auCyey+ zADRqj7qZ&0vDLJHt__C+<7gAAn*gz=2{MmcM+8VTn#Z<$32G7%2tgqmMUAIH<@L0e zt0AvffPUb3&}G`t6>JIM28{VkM}@q;BO1LDO$!`yOvI2iI-o4r4?Wq+U&WrY;XRU! zlb(h-g`uX!u`D`<>Zc9WVL;Pzt~78cyTe(ukv~KF8eL7ttk1ELQv!qxAZ@6H0!9KB zBheRpVB5hm*@s|{91epmhKg2^#iosw`ac7v=17icG#**s{oes&?ZJ?5jDwz_dEeN0 zNh~QXEh#Q;-MY1>@4zCRp7}NaF_bA##tmW!`=h?6D5C*q{ug^-#U6b2HuM7nhydgs zXQVhtTjkS_1o>-3;=$;T42Pp)DNH+HBQy zNf%#R?*ii4lJ3bWXmKoiCqP@Y^&MPb^ShZ@U zq$r#qlQDMKy4R%YEPDBqtyhV17$gU8|64{75SeiQu(4x0VB7zo^+zsq6hueIe}pTI zNi+!Zhev~-yWZp)4!v&njt~4`R6^BYPb-cyQSmMzsevFW@qcF^2O<@_?>k zx(VRPYg4IZJ_pd#?E~0G(yq{Y*bW1xA0RIlZB2mH_-nQUmHs~g4NUSlBM69nDfl(% z@xO^Z;!5u-DLc5>D2o6LeBvUVKZ?ocfHhBksIdOpn~%E{%djAqmNA}O6M zLaJJ{jz5LVhi*sb=`TXlqU^Yl-UC=kD%p;pAtnFYur(lJhom7I@?gheSC*L1)Cfpp zc@GC<^)4Q`ijH7yr9_IL8;CdryhIu)2dc!*+wv!bf*q2pI^eZ<2qYV^#G#>c>jd&mVeSbr9e(4HH&?j4*VWD8SzfMrNrw6PLoWKG5qJ&UFr zk)#AkZBZKiH_83;KviPH&a0Jh)6GG#iOOrFfyiXEd*uH`Q?N?AIJSbp+8UdL-%2{47&(8wTOgKrR=#Cp%ZYCjK z6Y`}F0XBxo9mfGe8=gbtY8OU|0O~*$zdUuro4Nz9Pe;H@pP{2K){8Pd3nn%mvJsE+ zfM5q?c%cwZy|9P`i|!aAQYLEPMPe_qRK zM7juZ;z5*vN~jdg?I5x9iByo4r-03+m-L~$Ecy^GxI_1F2m@^pP2}^M&;aa^@a2Ix z)TFIU-?7M#J!PYJ><|XQR?ssB+TeWI5)lj92{(oU#ipc6=r$p)9h zc!_=@7iFj<^&{O#ZP+ZDm&BU_eIT6J%d{SyN-n)H$GCXVMo$~z%y~Dl4{TH#O^_s+ z-BHhrILFgB5lJlQ-9ff6ugI}ianSp8dxMDLM2@4+jj-ks4ZjAcf%Ga8-7)b=G}!`_ zgDGsP2yihjGy(7(NuD4H0-0js7-JfxaP;7q@(|T5Bi*tlW5jl_&o=w=9brZ`n{#yA zB(q>ss~c-$rK$0awG%25X0V-d)07Mf`GMc2wKh1g-Di;yTdvO{FWajC6_Gg6KPY4} z17OdZSp(?lE3KwMuj@gYNZE440oF)XEpdT_hY>;dZ>dSZ4R+Iq6VUba5C-k0(@dRS%t`ktxqeV}eX+|KEUSbM0)02DVc;=!ybw8aj?_%g$+8+caHTYZ5Z;Xj&$l zNz=;(z(mYtlF4SYp+ZehI}Qke<|1*Yt02Lnl}M)A*~|;#=dS^bsgWa(JL{}-^5?$} zWLcgtVZvQ^-IYH-7*M}yS?OFRok<#2*2kHUA=9ySy3vKPN?DF5Qbz>z4&#K+c0|XO zcp2jq3J62jfdM0xb1-=MNjI_Yvv{(^R)tC90L=9sr{-s<0H8lyU(TYnvK{&qjET;H z;$!%|Nz|rkqqFFaC4Cu#)xr2pN(u+)0>VO?i&+Cts`Dxj8OTO*KHc>hP-1kSk0weR ztPGI~bU+1#%(BrW6U#C)*=#zKPNq_50xr?uq)&p=lXj+#K{8bf^h6Iu6rN8=Cm@87 z^8y&oX6U1v=~I%h+9tJ&Ueu%4S1emkXGBV&=P}IR2Tw=`a%`YM2DMy9*D_?BNsI~W zukX13X8=3`Ptb9^%}FLyo}^?n17nRIzk!or2kSv>G_uKl9N8EXe+e0J0NsVI4*7#& zzn`Wb+m0}zPd6p#7iA?_P*4|-Nq$u~Og7!=An=5SszQy-%NsXt+{B3!k8J`&81(Dc zZ^42E7;I##;eYU=1sQ>xN5x?>NqPte$q$J<;RaGNpd^o=7x`%FDt)rd7b+|pGU%AP zJ=>sY;GCeHBp8H>#12lRZA+Hro^tAORW-Xz4dNx~KK`n!uHCd^nE+|Qco3j4Q5CV0 zei?4!ny2H~kN~HtQN$}yK-!MY60r-5ibjtbvw8E@wryITd-kb5$JoDbKgUU$<+g3# zVf=*g=|mWL?|1!HekJxl=(wWcBJ*%#0rfXasULErePHp;( zG))XnjAatdytCwGWq|x>S1y~Ic-pBWM~>>>Z@|C-#~d>Nf5!~y*MHEl1Lw}2r^t$D zq@eW;$8FoWOTV7oYj^HI2hr3&W~(GYpb5P`A4J!$_~`{TNwi3*f#E1hm!NEzkt5zw z|H99O5&>f&q#h~6L0l&h1GyHxMb6RWIV#1dZeRc*;YC=BPF=g7e#YrThYdS+=#Zg< z`q%8=T~}XA&qyf1p<5;7?2%r>9}1E5l6aFWLSsRM;S1OfCmuUwgwN5^sTj{IwA7p6 zh!o67KA);PlABS7pFxBBl#|E1u8~Z}ndwEvVEa+J zjM5KFJSzNRW@=aduqi;wf2k5S=nl%nx{EcPO+1FXGh)OrJ6V%W*0W)PHi@Jih?~?p zVk_Hu#Tx-u06v+fX|g2l+q+8=g^G&Owyi3Yi3CN0sZ^q_F1oLJzh=^Vmx3sQ#S~o7 z)Vu>5@sXI72bf~$yA~~4M5EDoEFNnBRCDR2mnIT%m|2+|ZfI*&b9`Ano7FTM-Q&A3-VW!!?>kkPbwBA!gAbVG-Kpz9ilhgsHi zS}`P@NhK4pY&rqI68b8W)8eQ#5z{hp*VY2T@WhE{A%1F6QE|y7=M5V=P@=m7l5Pq^ zMvT7tn(JCLYlet7YdSGFDaklWG1bFI>+5MTYDG<_vbmIo`rt~@N7vv}kfNjEfS!Z6 zd;Elj`0pu4kyz^D=#?R2>dX zX}~TyCC!@s;nqJCg+p+Rd71% zL>$R0~04j%j)Z0uL$7r8Q_7!>|&WTq2dqrD;|4SRy`l!nkY> z+8An9(hVCzkx9iZjb7IRC5SJu>)Wq?r!HL(O&h2l@l8B-+{ocjkhu(qLD^gyyQ~IB zBctg_c!Q~op&9JvfeoiDosMR*2^fhHqsEt2w1lPxehdN8!hnO?l{KH=62gNeOr+*{xI{at0!+@nR zjYd9_lhcjLR&8&%{<`wgLSHT?u?4A^$-s31w#o~hSkZ$4vef{_JBQH)BT1i)2?&LJBU+`Luus@l45XMD4D{TdFcP(=+egM8MsMBz7E2tMc2i;5y2fArac z>EAr`r$4su)@SM3oz*pa7tEh~&9&F^oXWe1p5XE1JN?&yB#X1Y|F&1puD9NC?~{)_ zoQ$Sbf6(^6DoF-8pRExflW_Ftg-s?@B%=&0(-CQ{QUXu9a2NM zV@9u9yZWV9-yb$~$VF$LmQ6)xe>Zd1_p?KV#jrrGX-Ph7_RO!o{%V@0n^0w^pLW&_ zw_JJZt{s~<@0|Mj>nBef7YYYerRc?1-@N4V%er)GrN`qx&7L*qhgq^eG~tYigNF{R zPt?8p!MC)KB43zEx|1$DgBP4nzn%u0im(-d1_H9;7};ayl$@~p`_|FvEBe+owlIS+z^Y4gtD z5WsVRQ$XRNk@)J1Pul(N_mghAd&*OfBoY~)49z04dr8Pi#*o5W5~3pmuF*Srt_c;g zziO|p=ja{_EEuGbZcG?9;w?BnCM{e6$vSLfi6psjso5P3AU)}%lg>W-?DH9%a>^-Q zO_Y;Sg#*V`diUsm`pGBlhnA|U8#iG>X-V<4FQ>r)96Wf0tSA%4jp^2->(Vvr7A{(H z)uc;9ics33^_}U!b-EsmVy}Sf6UPCUAlhr`DeLgV(78M%gbAwbmI8- z?OSZ#vJSS|2S%8%olwN|d=rt3W%yJ%eDNjca{@Pi;UZO*PCI^#EXsq1A2(popskxX zW-{rs&p9uhN_{i!i*}VQM-CsZsDV{$*G-@P?aY}oPCa#etI7(E7RIpIq9FdNXc9Jh z)TkkFFpnKFD3eTWTem47Dxx4l)BP}a&c|O(8!~)E@1DKjR-AL@skv-A5GuR);tNE< znKyS96x<2NPw-i~fKH_jb`9HxIsXk8g74bCQcp$U)5?MH=56~+D%v0>d+V(?_wB1D zedkkX0vg#=3JjS*{xS$6pNvPJdio!`t7`uK^s}wY%Q@TN1&NpBBac23BKik3)rj=$ zl7V**7<+i=^sIoMW2wCDAKY)yh@pe_?%OwK&cfqHjO^dL`~KS6tnFL3V#WHE%gdU# zzW#T&PMh)d+#kO?_L$y-1`g=ccVNdZy=Kq;k>{)~omAd>bnDi2>!#I)rdL%b zfB0e1v}xZAIksQ7ZXLF5-^+`kIdi_>wqt8)W&80bpE2#LY3o+6JZ{*Kj_unTi2og0 zA_+D@@@brtaUDZY!<%+j8>YQx^@=4c))$mCzy8MSZ6m#6eXZZlNqm;C;BKbl@% z)~bKE4n{iOzC-u16HZR%(z9mIZr!fqm{Fr6iacuY;MUEW^Xwia-3+8#f^4AvHb4sL zm$yU3flBeNEjgCt8ls~YC{Dbt_NiCj-Cmn}@X;qbRBQFA}0K-Ls(E7I|4i-h(zqB5{#!?ldY^x@$|?l)y`A1TdWt z+A?qP`?l}+@PiMw?A+O_Z~yV*C!99%)X}3yo_NBEC52`2Wa5MO-g)P}ci(>J-wDkW z)KI6kt%^g+l-FK;_w9E-|NOIzZb*@$UVVC;c*5|BXPh>A!YQMU8(&f05`n>wKhF8+ z<4<|^~`h6Jmuu$D_b|Os3^AS$tE}>NIp9q+r4kkj!0RH zR+a5KR95D)M#|z%PD&+H9on@z>y#7DKJ(0WZQGWVm572Hi`RmWmh0QLqjt*V*FXN? z(>e3z0+UR)#SoVpS`GmX-DpHe>n#NT?6c3_amO7{OyO{d}(2>!IwPAe%XIpRX_ z&6+oFx)3}rc?-eAETNWbuDP~XuilEHcIeo#SXC4rP8hnE3k3^!PVp-NfgXmNE@v@Y z4RJv9{c}>)6hNi!xSO|bm3&U?@(Q>=hUJK|h@dT><1qLc)Sg_yJdnMne+nlNDbun? zrf=EYC2zg?_J^N*Tw7ZkP}L1DkGJpQ*$8%@T>)G#^6UH2K=IN)Nc=GYbjvPLqcP|KqA}NBXWKH}0OD=o$&8hFc z|K8oV-CDbAd$fAbS6_WKW7dMC3BKjtee=axvu1<*-rc*NdFI69#*K}Xw=HkgsYUaO z4aKk*voo{Il{90F6?W}O}N#o`(p1peO=7lSk zjvqCY*V3+;Ua@q+zu$QCtq(r^VEQc4A81xuVCAx$iLRn`y?j0so(y|Tl4s*@0U+ZS zw?hErLc<6&IT%M69HJ}*f;)F?gWB(0QJ#&(5jGm|WpEuTj~xCZ;vvXuBPbxjr$|z1 zSqUxZB+^1(w16FjE*y*}X*J4A^D3I$2xx*6X$Njg9#mJp29i(0K{f^~XRF-`uI1>d zlx-NYtn8_(s`_H{#toY}S4k$d`ucj5ipOGpMMi9%%<7zoQ76Rft98TjheNU2XtX{q zsB%0No%Z>MOO~wWxo{$D#^Z6+l*^?M)5C}F*rB5$2OS5euARxD=cQl(lB4I0L?Rwk z3lJluG?T~RHm!6jfrwQNM^>%dv~A^rY;@mOpL|$VT|0X0*wfEC^OYB#=B;SUqDymz z&I>_--Hetw+wW?-w(n}zyz7|J6Wm;Ce|44BZ=miIjvYGm^ixloHgm@A{nf#ea=O)E z`@*4Odb9#h&gY~O>3}~ZDWYQ=V7dv0Tb3yLeY}!3=+O?s&);Mr_~K^aNTeQKzoqN& zjO%M^9D3GCcLU+cvInGe?-knC^+92IPQyXH-q7y{0O$45`aAx3$FP9|Z@=|-x@k*_ zpO&yj$%dPEhrl5i^T1M31b_op4CUzw6Wtl)1yi@<(S3+7)lj%58p~!iRh7sE6y$=! zlEQ-UcPrO^@aF5~g^HkMHgDa}7k3?f!^lO8W^-=llh5B|UwPDWn(kP_G5!0WGV!FJ z7S7wfbxpbdJfw0wulg0gq-d@X3J0doUHa{NQwk)XYni+ERdcdpf;GGV&HLk$CCisB zL60Y+CU56NdTuWil{FLnL0`6xbBst)>9`Xn_Uh8{g@3)&x8IyQ7^L<*ukWyyBP&V(|0MJlU|U3FF7NFDt0qyo48DqgAxywYh92 z6e+YhnNuWkT6{blOb77;FZy_k&AeezpU1`{aQ)ZA&lkqkBX{7hf+o00k5K}J6;RfW z;LADu-kPchA9}2Brw;etep`}0q91~T4H-a{h=3+&^3*Uvqu?575qT6A6~=5$ zk;CNNAzFZzf$mJCQkF?8N5h|{J2=pU-qwJRdbz;`|SY2IJRU+<4K{Q1YWqQyL7Af?JBS?gVDg`l?O1JOQHJONg`{k$KfBDhM zWedA>@492}{x81#e8c+9%__Tbz7U3mZstrhLV%;EtlwNymr*Xb@FLkvY+k<%?1*R6 z;o{;{D*ffBpD$jpu(G^dmIaz_V+9;rG@PtqCBc4yCOv5M?ITgLVZqT`IF?1Q|2JF+ ze(AEc1d<^eM(Q%q|qA$Z$kqm^lL}2|eS;GN*>qrhwR-cVGwi*X-Ugwoi{4-+uea$De=s<@603 zw+0noGG2B29d}%R{qNp?Z<-C&ami;9A@riok<;VjKG zwN!omp6*>c&;9nhZ$J5L{_NRR@t7|2S)ZdjrXYzsckEp7(}MXwE!?nWQzn-yEpG;r zYpeI^*;p!)x()q@cJ?vvf&xWXYi^p>S4YpNn>Zbrk2qPS9kI{C0rDID$&}`R$O-J<{fB7!R7F z{AD1wsL6&h=wTcR4o!5>1U|)^Pd@$RiGv0{e*gW;x9rSH?8|2^w7G%c-J42@b8il6 z(~Uh!pc;HW3syzog@Ono7(NgPD*gbU(M)n8U@s(vDYXVuj8e3*W9$eyrFd5%?9iR6 zs$O@^^(P#E(u4^oU_0rgQ)00=u44E}BAgnCt8LgxaJ;;tIV#I$65r1FX2i(R%U5k! zxn}Lnw_K;F3Wg;vDUu9QBf$WI&n=s_K0Ec3Wh+I6pDzP z0MAyGq(GRovqB5OhebiItFN2#>g1blzIoB=6|>f?KJ~OS-hTVv#|-GRdd=#WCcn0R z-8Mxnk~pETFhVy#=~H!uhRIjg#OiA6_wC-5Nyp(c2f~4Q3+A?J-Ddv0AHV$MlX&%h zSy2R84Fro71!@H*ftG$mPC*j650=l7FW;Q6&YtO&cLh$>lYv2Fmt81^n zeABMg!EkWd(!~yjhmUSAhJqpJIB3cK1NwjYT3J;s3|pR{-cyU2VVPudZb+zHuQ+h~O3+f;%lvu~MWhrL;f` zg%&MTfZ`M=UZll?JAouX+>`ZeR$m+cpXbi(W;fXgU;CH#`+9bE-kCdB&pqe4=gi!> z^VH=(y1KBS6e6c_T^Z%WkKXOqzx0*AJ@w*q&t*DtiW)| zmUER%Hvjh*pa0Iu$1eJ6{)OkCRlRmOjQvmM%-wOuUi0QJd-2)lcbYIUA*+Rn;ul|f zwZqO|bMtkoBEcKTMeKUpm@arSqO{nA1!7y%X6#baP+!rn(uIw}5`&6`9xfBZWcO6w@ry*@SN`dqyCzPY z2+b6WcJ-i16DC%am-Cjh8q3;B--5W|XX@8)aFiJQg=Acd$5dc%HsiFkq$S1I4LP58 z;!&eKr8RD7Yx854Ck-e~B3(;Mi^++xkbJkWurL;j!*+LUuOppP;P5%=yqQTRin3|1 zxTv2mnVJUI#%$iu?wBzz0$tK<+sx#wf&$>3lgl~rSV1Ju;x2Hn4SA^42z zJ6gPt8cRZa)O_0M$adJ8qQY-1D=V$3sVOKZKr~p6aEf7(F-ax^gA^@Al@NkDbm3GG zf;}u^R1D$Z1F(rkMi?dh=|)gOzV;D|bDa-LT4$6Xw~llN9BsY_fiXZPII3|Y=BJ!? zi%S42xenXzFulS=)OGFFyjN0G9CO1rqZXpLicjfsJ>)fwXRA7iu+(N z?#Q-vSWa`sg6n|cupnhvRtAF#%~xN8GSb{kBr=v`oB8rSQe$0{C-s3#R8pFP-oWn5 zccl5rWmSn4#nECIH0>>UT}i?iG|g;5p<=|Pj@E26R=7Ux4y_v4ylSBZe@ctXss@+9 zvX!Yj@3qsEv5!A|7rZ>n)7o1*tX#UhG}YG81R+u+eq4c!k83}FoXysj)LMe7!JFUpSR&b zsp+;2z?sDh=k+ZsnmT22Lt~xi!Zwtofd$EUL8`o>yx*X~%T})JQ(j7l70*3I-pEco zVB}E@y6W-J~xZFq_v!u^P2Z-$Ai(JYw;ZOLCs)(#Tox+ICSbTX3{8!;wl!;-mWamM zThm%12KOzdX&xHpI;L+%qcGhJ2fe_h=Ur7n<0L*ZhA&}6p_B1M+|HW$TvkiOO?Yg% ze0j1EN(+CiEoaX<`ov#cym9gOd+zwnlP|szo@^v@)MpCm*k^tqsF^(57Py zI|z;3tnaxoY?JU1{ybIB*|rLmcO_Q=hI5)LB7^PVcF@0YoWXoR7@CHb0@-nZ-Urig z8UzNpK`Mgd-~c=UVg~Elcg5L+kQ~wl?$eZ$C6tA!55Zy8!5HJ``6Nz@MRg~iL*V5`Je0}0hyX-h&3=({0VMsJh^*IfMIGPMk2NrKQQ_w_YS^%;5e7#f9(@h7KF{)#Ak^efsd7 z1tf%(3@?W{2DKNR!msKE7b)%((B3FmtQFS?b{Oy@ii-eFuXOnssWTCp0OS%+e|f7M zsa)(loTJe$pQs2Dg={P+M0uoPPYMp8`Q%eDmqPLB@grvwt~YO^@dYGCh51D;F(#)x z@XZhiD$WTOA|>D8#5Iu|6^3-v*rsO%L_~9eiZFi}f?D#`9#n#MO`y`Or{qHf0~JLv zXd9ZOaS{-3DT80ISD>X(Rn!vgQ_%oO>t@vNU$PK9IE{v)df+!O4Bi6HAzk#t1qIL+ z9Cy(so!%YJx-KCQ$I#*KnAjVMiFQPAF;;I9k&?m;MGnXs*|7lLyg>lpLBIu``CMmO zyflVCS%DlV;$;k%T?DjnU{)P+^f4Em`#q6w*m*tyLrj31;13cD zc>-dJhKqYr%mbsScv#MRQMe3U;XArA-vg0=^p0yeXygHh9Wi0dsGDxMCZ0$+d?gZ@ z0o9NmM}Z3Tdh7V!#@E@Y0cnui*`6%uj6uL=+n#A{|a zqFi$BImLoGOBT#;zCRf$ z%N38som4704n-z>-yQjK4?k3B1+H;MJ*U$$g6MCSp^B`&6(Z_Fm)Tp?Q->;sdb;Fb zw=VA9<7^`@^0a$+uhc#A7KrM&v2Qmkv|ABE{KV}Re)ojGR2V+ICFG$mz(#-Je~05`YHz~ z5b8rAu!VgK9nnyKCPzBihEzaOFjT9AI?aZYA^9p_+67BBbU(6(NYIU;u;G;DazP_% z8)XuZ7qA`?HQ0=3p16C3K)$!AM>Bb7tq)5!Y3O_&0rL_&@W#^t3sfN4=~4u-Colfx zkQW?poe-@DMn&hNBe{Y+PK9a#EC?{T zgd-F!?m;pXXhJY2Uhu_f-X;?-W+E<7f|5b|BP_ITH zX72oB1<^Jk$p}J45t{->QxGj-0<;Lxm>p`1CZjgU6fn%o&>|Mgmp>utLDp;l4=^_1 z3b@5xSZa6)f+kEVR)9?)Ou#^+w~okcEUZrCtN2P()XTRyO<%m2?up6zl48RkLo+c5 zh?cMcorK)NBXQhy+C6+rwCy;oY)D~YU49aXANNj1qnaKqDlSH6AQMNd6UrNof=S%avz~*TDS_Y(y9q4j zSfFA|paYE>0qkIa0eOg7f#?#mD8~-(juV<=z&hfdY|9aghCk(*{f%}AyH|JzYz`8_ zD5UfTHAW_YJ_N(SMzs|9+(P)!pJqOI>x?vFQSRK@vniYv?Tn&u6&^3Z?1QZq< z9`+!xtJm-@>VZkyMO|59zm*Myyn};{1Z5W%13%0LH72(q+u<}3Fk{pBTO!!kV7F!F zG+k+JSwDQ}z>fBI_s3GY+X{u>h8OX=bI7GkKun8xlv z&r;<$JOed&ji#Dt9Dn!~Asiqn%N6ZnAiP2R5W+kgT%i7v0hDgWz#bBU;&9Ov*8(WS zjyLs1BoL(@E$vPy7!?!vg?LmCfjtODZX!%X>qaOBSE+PxRw%TPNhD!e%u1Bbgdn61 zw%}X{C8F_%YC&mdF-VGy*F>R=FJgj375GbwfSKbz$dAA%P65_En-mbLxfy|loZ-R8 z2z(5LoX{5<-&wZ!tywk%dI`W40>=EibmkwW5U~D1LYo9fAT#8j7zxNY$_k}{k^*L; zhdLPtws|ri(y_5LqW})FD$A~8J07e$n-7ZOv19@m7!~;;FF-$7N-GJ{f(@zI$28T^ zim-ozZV(j!=7il!3QO*yAQmcy?7{2JtR;?gXb!nA;-H$Mm%)FLl|b;$Y=hM&6iGw3 z!3^*`ej6LX!Br5E4nM3^5bz&LLIniV*c}4$Y2gZ6yNes!N6=L8iQ{)f(sbQ+a?k*r zaiFo3xZ`Vin_u(vT?by64$~eY0PgXn_sx21>x5q9V`ib-Z~<5kNKnth$PM0G?Yei?{T>`x zfG$RF!qP$A;Wk7#Q4YSlgTcvCC6o8R#KMLlOaIM?00FV*T@b#kD>*sMws690K5w#u zhkzi+7r{<(_(^!E4jTeF$0(nHNa6U3NnJo}S{4XFIQuM+KZ-C;$!(Dd za^MG3xoiM*5>fbWfy>qLd>Iu6Itu)yljOl=(^O3?HHDlI8MQ?|c0@vns0qSRjeoyR z+!nDhu&CkaDG*Urw>6mGP&eo>%pIcMc9ZBdl28S(N-gQrCjdc0UQ{x5$<`&OIFT^G zQP0uU7}!qC0zdyJ6cohnfRrFh?18TEn{W(CLcZ7&Z;1sQtaykp?4aj71}X#sEh4c0 zgSv1RB;uJ69mqRH5cV-Y6U67)BckR3=!W(D!N6XOu#P%K1<7;%A0T=tcs zD|khD=D-k9Sv30(g`z&<2^sk8GaN%bQqsO;6$EvTjXYQl<`I^0xN&SUd&ULHF`r0h09KA*zFZ-Ha%?v@K!`66!WcmmHj=iY$O)B!r1AneBmP4kAPy9jYjZ{-w9(+9 z@pcTF9o%CPNf3>q#b0m`A-%H@H4#ui=Y}0XtYdDdU;xKKRl{GAGu%c|cjS%m9$BI& z1S1)fiV()@Bz7yZ5lO=42E>eb&a^av8ILuh0tRWs#h%n#$AA!zZGj>&fk-K`P=ZWw zEl?y{*Q_VAVP$bqr0=9ni~FY#S}vFto_42zGa*}0L;i5j`cv&`i;9#y#U&9xOo$zu zC4`433}#~RNC3YG2|`15@C*@{UFQ}2h817|v>R|P^5!L+q2RE3qF|9$Jg^YgF|M$~ zf^2zP3THxsg(NdGijk;%Pyz#aAodVDaU=+(Q1VHry^sJ=XGRgS$Pz^a5xp1cmRS!H zb>13nL8!Ju+sv|y5N)%VY!%urlTW}q-s=ZU@P2#P?Cs24($FbpJ{yhdz|G3Cl1#Q; zQPg09r#!SGRaKT6+P6?)ih00+jwC=9jG+wsN`UXnjr<@ z8%Z=!2#_d>z)B_sr-Y>7(sof;zfKZT1;mgICj$SRh^oRJ^ler*VnYP@S_PRav?|!X zL2;y@Ipoili5w7o2@zIsEF`}4M%HoK9Wx0U&Wd{~kwkHMx1ZG_*U3fJxWku1C@s54^F&Vaa#391LTpIr(*{jX_?pK1RO}@Yr<25h zj37!Fm}?2jWHVqq`rZ*_nt2=t^HhUA*ly~jlOa`Eiyxzl*baqRs0U}n1yUyt;K;%- zGp$TCiUTfYYcrNFIi7~`f!3M;3+%UxgxGcx6i>{>)wn9XZ2=3!ByDOPMRTK3=3QQ)oLkda~SvTP6BIoQlWN2%%NU zY!Du9jK;WZZMTg~k`+l+HTE~+1^;va>5jP()pC6+V>x_n!_suwb}X0;s;XP2&G`?Z zrGnCs7t?N?&?@X5F>e`7GxK>3=*gNgeDfT@5TwDc4YJ^TfX!m(d0Fw1>71FvDARRc zl7^@vIkv5OU|s0wAs6%kk|3eVcLUiCL3}csCO?bsGLQp^VQvTpmv|I}U}Fn_UXH`h zUnq*4%jLn1ZuwzPEf5o^B9?9D@_9(9%{PMCscPUo`Mz!#2uFde3^gYdDp4k5;ukf> zFC@qb?lBQH*|8K~=F*)|9D0z>FWqR6C)(k2WhA6U08*j1*pklbb`f(84|`C^h%Hlq z^xuTGnPnFt+Gh0>yg^^#f{*byB4_ia%*EEAbR1U@r+b<$6~=z@Ju{!vHMO#$6uJvu zLnSrUAQqgu_bAy&q%F6&Y~b9*OAGo`NSf|D?5!|^kdS|Hdt3Ca?@yR8VesHVHfI_O z5I_u8*N61J^wLYZu5$ncv76adLS96*5o=RIhY4dgJYfvVEt0`{tW(>pCEkH_s8DO^4Sk;1A3I8nO_Xk3}{kA=y0%>ag2xdz6)x zJ@d>{?1)WCKwcLIL-KW9+h^w9Q>RY5_S$R1FjAPmDaGUQ;;CatmKLRI8#w~YjYb)r%ETu0Qq!J<);W$ddi0pJ>(*sD+G09DpWxIftYmKxGawFX zz6`LNF?~iwWd(T`M!3>-YLB%WISqXH1NaqHqN% zdu|&E5!>3^PCM`?PS{+EM---T~x--nRYH`dydUF!4wV6v+{_QD=n}1&Ua2t8Zi}~3lipG zI^?;AM+9c3nM0bq1GPE#ymJalQl8EAjvQUS^71QEdIIindnTQ;&9)9MDQD+%kc?d3 z%BC|}t_z>H94C#V_BQNLLs%CWRoXkU?H!qXw!?PvC4KswarRk`;}5MG@{=F^WZ!-E zg*1ZlroG1=dtUyd9}le>>^p#c7YQ@jEGh)s!?WE*ja}ian9LigSQkuG5iD|u?cM|7SurY(YOf{PdojLm|-}!$+=L}Z$JgDtj$u; zf=g%8-@oYlhs`>4{5S~FSZw?mJ7(PYaaUdSv!mU#!lLOPyWIoXHq-?NQJ=&Sgx|Ru+L5ED!%4YeQCTIiV!XzIvW^_DiWHKG#Hf%;T z8#0&A<#8%<>xF_)?6K5a!XRe3TR>?Y{9pz(vGXb&VhH=5lQqI7zB&`K|dj*@*VVFxlzLFx}=9RoHqEbX<$aV>jJ+)5=vVgqn!K2l-3M%0VSNjOy2?xX-GUwR7ivxzAqv zj2%0wv8iG98*er?uHS3#-HVHh%S#7FHGScncRu*=qin2iMc>Mk58gX(XT3zKrKK5w z2?^MF`iz~X?by=N^4|L&H8#|ZnJ~0JY=e@aFTMJDd%DH6GjPo!o(7*R3-`*^5~-pS z4%kmNjEcg(9W70tefZ9r`j(M9>{KD>7>wT*|*+@oK4$@<2I7ykZAd(Pf% z&;2KjA0>PKs>PrF?d4Y;e@HrG`?h9V+Pb>+n&c0w?6ac2MJ?!u@xOI_ot)1{OmF=3 zX=5jkE70Zo+O;n}^GYfapF97f_FQXm;n2emm{n0|B#RR5t*xJb_UW2ct0A&9(z)6i zeJC-KRdqf75 zBc!mfa7fj#OeVW>j$^G|yA~ryMOop{k^LTi=)w12 zefj3wuO2gc{O5C5_3Pj7(Z?P;_uTLCwo2y+8ji>!v%rerCQb=O{0#^aX1qH4t)E@- zlMBv2_s-wl0R&NFNeo~ZPJ%aVHYXZ8a^zRnU5~m!5@;B5`Ge?fzy0m9WlI9bjnQam zDS1bwE9v^9$BgPfdU$bRNl_xXZq>q9Uw>uNl$|cT8Br7*JR#$^WD8?PADvhsS)}47hXE>=vjmNRRW0S&iQoi zCtt)1N_O3QukkyK=;&yE?X9=k+S^@6Nw-^vAF&r4;y2&=2!@#=$6!dFaQp#)!hZb* z=Q8b|eD=Xti&yLMqUlpdPoFRfjQis2uRs21&ZrS%&;8z+P3s$$Em->EORt-1>?b_#_f8H>3zS+2u!fx0?Z~P{9Rdg88I^Vv2_OJbDtB)}A1s!rOhTw(IVSXMOHx3GMDRfYkEArqh(k;@883zG$& z_)sF`Hbm1!^}J8n4|aKe`YL|P&M`Pi_TMAGge%2 z-o-ymB~rSqju^GvCCcu9(Ker1xS`G+V7CV2MijR&$sTm z=k%R+nxMxEOi#_5FbV9_PCBx0SrQFZxopjPk#~pVZtfc+--~ZNIZ<{r1)(yYD{_>yvI9Q7T2Nzy={+VZ; zdCDo@!6^tf_?1^)dGg68Uvb5ir=4~>&~sqll9txB?Ya7b;^>!OF03pc5{)HqxbcRy z)$4SalK^I*9=9!xfxUz)vfQ+x<<~dfRFFzu{fn!Miwa%WmSpjv+%04hkY$?}UG)7+ zF8#q}KfDCn`4^l=M~xZ6*nvNY)DUFFl?_+Yfdaekx+^TKzU7sMtR8p7A)|-&ll5q_ zq`b5!39lv=?{m^=7v%G>3-bpbyw9jHBgc=Mw$I*&T9!L`!o-7S?wg3k4w!ks{xkP0 zEJ$Jafl0^}Z1UM-4?keoka9Vi@bx%s$!J9CJ7Beml$kS-gl z95=MMswgnr;34pJ3s*_Rje_EYEGMJU(w|>(Mb)564Mw8lw8B=itb*c#Lk~Wvw4`X4 zsWT2a;9#**8dr6Nd^(nV@(HKxzyF@9?z(=C69@dLEJZK6@ceP(M?mog4X8TlxZ|Oi zgNKcox!-|tRZ7P70}no6!o(KbafPYWpaBC9nt5<}VR^oz zKrtLg^w_RtX3Q~Rh zR!-k_=d!+iJ=^@`-8cVm*PW|t>vy{zHFR#D;#TWlpR9wVW z{JhA#dGqhQ^UnJErkOMM9W*s6g z>u$g8wp(wy^{;<>B^oP~9f{w6lq3s2j988%`t5!DqN;ztBGriNx+A%8atn(3KxGoK zc$W(h^mH(sV#nt!eD9bc6ew}JDREGB7cN{-T~j-K+BAhv8yv8`Q>=TVK%%6GSda$5 z#;uEjWQf5M>GnV~RK+zkPx^As$G6^c^DnNup}wK1ysYp0@4s7LUw7Z#_x|pVyM_)M zcKFQk$#}9)pFU$JjN5tV8G{E8TeGJ2hd;XV#phmF^ws>G-3>S2eDgWyd~e}`1?#F;J@oKH4?Xl3pkc%@@4xd8_df6-AV-&^ zRAF&ZNm<#zp=HBHRgD<5=!*|-xaOx1J#gRJwY7?@op${g?%o5!acYtiI)!esupZ4 zPuJ9B)L6A}Nk?-V%p}|7^Ap%e?Rm0g z+ih)axjYv>3rswSTyI%zMq7&{oHxXP2@1%y^VtoJww2dn$)@J!r3=4qs%d!r?{EC= z*_U#yImvQs)~<>fdMpv!(3;ltSdpQwTd@KLTOyTeXn^nAr0LOo-dz0kmupw8diC`; z9)05R+J-vM&b2nx@&T@A)w)mLsMn z<(95mQI~dh-fg#-lWnSPaO9+;CPs}NckJN@=UVDlE?bkyNk%m0nK^?mTiCLkbKERi z5KWZiJhiz!tH5dmZ#;Oin#T7fT~zYh`u2rwTG_Uom3LvU=N+r5xsfX+29KhIKoO^S z$As3LFtUh0s7DBS;$(9vXl>qb`WdI~vHR{n|JlznnH(DGz|a%Xw?N|3X4f|dNd(Z~ zgGfhoKWjDBtoB@6iN#x5b1<~I=8^~BB@$1hDk_V*g)^>VbzIT)cD%hXe?hvbh{Opl03L`|r8;-ap>^+AFiu=`3_! zlN6m(>G-ahuS+W%8;6e_>g7xrfMdsu8&WkuvN8bn+Vw5>J^1hg_uv2W-(OkZ&;)!| zRaa6>PlDvN9(dq!t!ZgZP5mKzO#_Ixm}X8=V}_A3qHq(~B=F1gxQsabdRb{~Z%LN* z`QnRt5B%<~C+>UXz1eS%o;bF?dHo;%^ru%|c_~#~;YSSK+VuIpCalAP0&PuoP0oqK z<#cj&3m49pwPah?8B{em;+hZN`=`eqdt8xXk}BshobV+eSy!PyHo6anI&33ZGZZ5s zrWx@$5nt?!aIMX}`1ZxEx88cwO*g?~ee12ax{D^<9&592U)*-vZBwR9`R;eWTUb~K z{os>3LBY2#I5ngvA$joa3zYKfUtfRGMHjAEv3$0B;z(S;YBe%h&b{psHK zKl)g(L3ZtY&;^?a7K(s0ikaM~xmJ)WU?9vVfx7UKc?u3N0ypLWA<=}JjM8nJ3mXH0 z6mT}`OL}PqyiQl~%!Up11BMKE?6Ifr{nLYwKlGQy3l?)eRjva)d-t7xeCC;FJl~DR zb)?K@%%MYvnzr@8V^2K!9{ z@pu$tr4x}iq_g!K+Kb8uWNk_F?On!INp|~&=BBc~19T(d=UcPu*OvF|_w?Uh`{UjB zJn+E%pU?Rm&KaZF1z@+nY%xD+pj zyz|a?&p73^*>CjkTe;w?d19)Yf@skw7ej|`OqekK&b#g!SXFiG(Z^iz!^;QuA8^@E zesc7&$6%yYLwj%|1@{3u+FNeF<(79ocu&=%4krY1NcJIFwDfui7PsHh3Ef1`^}eMeEp3F9{Fq4z@eF3OJ(2U(@y{H`QN|j#FI}!wUf_QowY z-%?pw;Ygl>VN>ICq-4AR9!5N##J~;JTD!XLcX!-z%gxur;#3OIe91ALw`ae5^zp~P z`09&ioN-b|V~v!{F8pfg#A&-f@xsfu{qD|j<3=m0lZd6BeEbDfQGa~-Wica856sWI zzFAmU>ew&^q-YERl`viJoew_B+mXlN0YCTLfQmlQht`gCP0jlA&pG$L-``}mRzLB~ zU;lL9<4-;J-1E;rKX&ZcbVnL|4=fq6kplkzn@~_2?9suB?<&5n5nYssrmd@AchmJh zUo`J?Enes-F;CJsP6}LOY)ar}P$fmgr1S`^I}93}VySQv3gT%B>_1uBtzU`hS*_XJ z%J?}UltSFc{feCWoC6jEfsfB_RGOsc4; zfXSBkwGKzAC@v_EM`{}yOi#y{-Ls?lhDb4w2HZvA@OcZ&!1t*Gn^bFU@cRKLEuJdre8GV5FISgKEJN6W~O zqY{Y%1^DEo7cE(6+CW}qVE;ZSB9rw?`xF^*Z9`*gS@}RSpN19SMWZknMh)mYu&fOJ zeqBS|=POq3Fmc>~J|!)U>*2MuwY8@^a^>ay;4l^z8zm*N)obhG@jgXG(QL>1Y*s4i zGqA0t*|9Q1hYbSOWIODphUQGJHDk3889ik2vc+*N)~|2>RcmYEbf;pem=SAeY=HF% ze^XXq_qYileliKi7WUeXJML)PHmtTg?)Ys}Qxnuak%)t#VAQqO{-V0N`lzFhuBsaH z#1oGJ4pONUzz(Q4d-m*IcG(4fF0>_{h~IP1eY4+~4RpKc2j4&R&_j^V=bwKLulCw& zuN^sZrs&$RVZ-Lln^#;^ z7}cXyLxwD0z9f-I=&DvzS36|r&`c%+w*clR4DkSuC>@Y0y28G0=w?0-j0$H6(`W?T z(NIHkD6AU^QX@Avx4ih`bH&9aVm?YpmgN)|7nYQiOrA6a{<{z@ITul5C1oYiWb4`$ z1Ir2`l2zN1$y(l!%8Gn8UE9*puWC?HO0})rqP2CpYV5w_PVF1oBDU0?PtQE+z=rir zU(cP}Cl#MQVchc7t3I6bc_LoW2GPmqEBf{uGpH(`Zcpc%TeIyeYnw+78#5rOTeknf zq7~?rNy7(6HL0bcp{BO3sUxkbamajSWxrfL*WTViE1S2D?FtBJ|FYIhrnYYN*bxK1 zoV&>L3*fkp96uDUMY_4QwQ+quol_N~|Db`%MAWjfOKMgag#~7NHr>=VaA0LoQSq`B zt9&2c1)aE2V+XBVvBHE+r^$l`RKkzWW!=KU;t|7#D!RXJ?eY!vO;+BiC@bwhuzxzA zU0uB{?Rw*P7@yF!xMnO^v|z)A4bV{Xj~#`2(kJNbv~P_dCy)(P0|%@SD~4eFx}3J{ zX`D%GMAV2Qbu@#wcD#U->BD*xYy{wlVHCeTNi3NztzX|TW9O-BRxRb!DVr;l!XqK zZ8n=t#-f(2+Dg=O9EtbWxJWvhnaO0iZUi4QXlOVbDR_^rB}X+QpUx5pQbjg=p?p>| z^qlRaj97u5$mTMRY`-H~It3F=84^%!P@}r7&`z<%W{LWi~qcn|SHNbQHmwo#}=KATTYr!CfwZ)Hy(g6ZjR24a+ zH#JqWOcUN8Z-J1Pe%y#f_^yDbX=sz|z}QEEKnw|aoUsh{qUgNoCR0fy7jh9y$U%J& z6(e)l97MBC8^u*tRsvFG44sU?1Tb=NfNHE*u_Ab!g7i*A%e%5?=VP*y&}5wp=BcLR zaq(^2k|ZS>Gn#U3S=%ZuDvCI+;z$uw%4giMlSiHYgHvy~>8AM~&mUDa`06Wuw0!0A zd+vWwkHc4pTAX=o>n=JrA{82KxpaXZhwIV0p{by#qyvHmLCSR`3|J1FEG5gj3u73N z!#jE~6@l%+!wk>yEZCgw`5atPh$38dKWBrF?Y40LLP*I!%b0Yip99iYHv#Gc7moaOOKC7lN$IvTvE_)~xTu3kpr! z%Vsh$9X%u!6&GbWPmoVAfGo6ic-MmCs3_uFqwbJL%M1O^uBV;~YYVmXoP5sr>}WD- zJCdWu_>vgh5MVZ#CKwqUBUPAbCvAkhX;ap$T@J|e98+>!Xe)}7_Lxvr7>~8Lr7MPv zSh}vUtg;kZ!kOMZ^FdXJv0_pdjpX2_fRM}XQ3K_K>H-sK$3RHXDPT9#0OOFH3rG>0 zsrDibna?j6K!#+_eh0`iBALRBkq77R;|zh~rWgdrl{^R^phJ-&DxD_2a)pE%?+x*e zpliE%Xe>1t7Bccd!p{5eC|lsJqS@pw^DA*ulol9fnnYfC1-`6KUt z(YG(y2=*6r2NBRdu$63v`6jv%p%g)KL!6UIBCDX6!(4e?9Efln;u`kkiKcM!6F4IR z1>^-0LqbA|J#W779kn1V$Wh#D=k!nD9d#8sBhcg0U0e}?t48pCDDn#{!w6^x`;{-F z1DOzvq0on+5Rnae|HBTv^rDMI%*fxKf9ChU|9u8ysuq=Gl^FovDJoDcR0%!~^5y{_ zP$LJ*D;L@#J6Ju4GpjA+lOyF`Loh<2EyQXr=*6gkSc62e9&KTUFiJeP zv9W&VX*;f46+#}$gCy}D!MI^%!z;=$yre3JEL~SqR#^rWCAD66dT!6o#+$Q22msVg)oD(kMe7VeZTVNs%nb zjG#u=i^>P<+$jON3fW@M@p_O%zzlLjVKSFL5az+A^so8$1ue9IjjWew0pmix*z_=1 z?89>$iyrLiPG0Z4v_x{y(BL3vn7Hg$%rST@fd27%cVUSpJpEZJ0wCZnaB8?UC#o7dAb_(jX#8R0{HkEYW&g;2D3#szGc5uZx%b zL}n-?H0MQT1apyhkg%(aK-fef8;^JxLLMUnKsJ4}^{o(U*9CdBIdQ<&1lKrhgvX4D zYKvYb$2ru9Mt4RQ7rGNN1mvN~%?iSP_dOqKJc28fAj1VMstshj?5p6c%TYC&Z4cK?Zn75_MtT zVhK~?ItHq*~od;cv3R>NDvX$(iwSX%qI@Ncpv@w@Xu7;<*5PcgE)tRM- zn!sb81YOV02!CmGllSn~+uPck8XD^AYHbVSyA4qX*a7*F8~O&qAkrZxjP_!bMF7PY z`D4Ta2XGPiBvy3gAw3v^79tBqaUsG$Pl1tQ!y)xq9Pl3(LK5`__k)~(aKKIxh8(bQ zEac!!>WmCI5sVZv7RWi<$65u=7EyU!q(E?376XCiVgqfnH@*cDwePHXcqW=BHqm4R zhT$8dhg}qc(F<@hh#TfxB zlywmuyKad(BX&Ov4@HxB+AT%rZ%AFILH_&?f&%#qgEpriUbqDv6`XYL*puJ|oG~a5 z-LKBe9NK`8y>C!dbO{FNU_&Kft73yi9G-@o&*$t=uWrQ5yAg&*7alvbQV>F1Dt1Ug zZ8}f8g8yOa-ZNVw)-|Ydk2-g%Yq;+U4URiQu@8fSQ}Nq*E{KfheK*xG44@_2&X>D| zPBIj@F)xuhv7+AXeZ%R z#R$?FC-`NY&Xdkq8&5W&-b4RAX=6KtfP{gYGI)EbdopR~$yG%%A06l=Lzh(7RP-%# z0^g1|-8M!=v0aYsb`dJckqUWi&%i=wiocs@zEQxoN+D0gPrw&($epM+Ff4c`gODDX zk(&S`)Dwg&J|~LdkRgDtPF(03446S+69gx`pDB6*;cUXD`3Hnh>_kn(Ig%kmAY-T9T*o}{I_## z14#%k24wjq#z5N_(K0?jeX0bm8_O1wyRt1)BfQ6tra<1i_==iU(8Z2-JRSsPQkM&Ms!x2L}NmJeXAz5U!%C!Q@R4 z=ZEx+zbT=e0p!)!%$T<0+BGYBKpr}Mzum^id`EF5t)yycbxnCi83f05siYfqh_pq> z>#BNpqBiqptQ$Ub-jupaDus}fDWF;i?ipG5Uy>}=P3USVAtQ*#JRrfr<4_#3EPnSA z5E6Wu#bpTZjBrG!(4$gB8xb7^^Hw3VLOoFfPW=Jo@sfZ%ei%&PCB{ekqlkoK)`J4X zbCdvS3U3d?8^wRuJ@Wu7dGi}~1uQEhz<4Htd4m!a5}A`#Vz3MO&C~*P00=A$=c2*k zoo^ z!Y=OY@$S?;Bshlf0QC&RfNUe-=1DdZvCUI_d3{4|@c1HtJSvq+K~{!U4b~)8k_|Oc z(Aw4}rbmGt%oH_3#@)9~d5a(+ubYqurBPJQ8-nbhevC-K?2L-v?x{E|Lmv@H16N!M zE(6F2MF^#Zgsaq1Tnj4($)sD!MIgBfq#}C`tz{XaOKs=e3siu15;LQKv&sR)grzVS zW+~bNY2YQnPiO&}hWt6l1z>}ML?*nq##$;YLBz!#jO0@^`V9)}iIk8lpcYV;m7}T? zc3BGJ^B^ddbzrVRD(;0UAzvDgLUqv{A_^l02do%cM#eE~#z!#sY%7yUoSlP{pl!Su zW*{0RQlTKWkF6FF#AQk$$Nx~Og0#4ZhKa*%ZR8uOtD^Eq1C)g`De@P2g+&D!M@-WM z&h|NhYyoD)o1_q=9`s>>$z;}4J6HUO60|>Y)oFmgH5U2#)lU$DL5o`<*_hN`%3M@95 zJAhk$oC2~AffX}1!1o^PhUtQ4(axf-qEl#*IaE?+mvdcRSAE;&=NJeBNDRqDd?Z2~ ze&`CaZ+Ypcp`rkXr8yel4@m$E!3`fMAk|K6A~9LQJrRbb0-AtQ1PrpD^Ac$3z+QCo zrm1Q=YhvaV4GLySDlTbgoMU3>kf1+?C%(QxV~Dy2b0cr(bVG9;E~*ZRqXMF$*aZHu z#$2t7c6x*#S7d*%LLrIgfMU@QfEp^rW|AR@Bcf45gWh42NIXU$>kPza6#OFvSPDep zV)TKOupd}Fi->SiE7)Y`v2$HpH`IJKhkl4gqq%HW2ONleSYgh;3C^~yeI3v!Uo>*yK$G<_2t}+Vjimpb}*_`7>lBr}%OLHQX09zm%y=6-63&K@J9a1%9`0(L< zNtKnA72#G%&F^SF};04P)d0*b~DToX7v{Pv6*k0N`s zwFRn3=wScS{bX&(DTJuVedy3~+s$~U%4KT}W&D_N*_IagdTxZP+KN&*i1Sw{yZ{dY zCd?5RSW}TDuMqjfv2H~B_8-vRmM$tvjv7ds;a8~{rfjG zG;F-30?r57Z)>w%zp~$uOy2aY7MK%7Ky7x5JB4NkfU zP#Tf7yakbsjvPI*s%rS4L4%8njrP{Yyltv_9ELY?vecNO8&k#&wAz|&3uB6okqARO z8-QFnz+qs3?G8g3odO#gjpR570TB+xf9c*WI-aG&J-4c+~LAzW@Et-kR;XQqG6@X-N(Q8JNWN5*$0?x%`C3ZMXmO^En@GXvo;U z>c@>|9)0ldk3H*|xppfZQKf7Sh77QW_+z?xezo5Ms`K+emX))uyvNVnpinNQ1zq$U zTlVs(b>)CTS6=b+zd!f)Y11d(eB;f&)ArfNpGV|m#*z*@{D>d@=!Z+b{<3EEYFk!w zV4{_kTo@pFE}y^lx}Uv1`%Tk|WHLDfx=tR31xlbwabgf(R715h`J5rvp6N*ep9O?; zZqVJ4BS#b#7JT*9ms^9p?ujCNjxl8D5a3sDkSE!)e&C@;pMC0Sd+oN{9=l9h^z|1l zt=Y+wr=NG;xrZEb$b%0)7>ji-lM_JRHp!3VO!KG{P8~UF#LBNf1>$*r3_>8o6-AYK z2c2SNf#Z3uQI0&p4@fZgA>3fU2?7R$>4kLUuvbx1Auk8Qqn_Mo_zh9+gW?{Yq-yGZICw-t4MI|M&qQIjm z=~HZ3b|Mjn@(K~#ggAs!?gZJEefX?dzy9@aUVr0_rAwC0`E1VTb3Xt2s|8nF@zWn% ze9<3%|A*p&lz0fDSxOA>x5LP)(W6F3^@5hx4dW+{oj!f%%E|#7nwm4&%((Gmh7BJ& zVf>^qV|Oqlx2e9?QuI_&+5Y?PHFWsUfy0LuM5CX-{VpJ~YSif6cHd<%k(;0Osj3<< zeE8t;<0m&awanOg()%C0olV1h({j3U{7EOi@#kkXB{FX334@0Zn>b<0!2bQLY+E{S z_8UBGi7q3{g(nyp}o;-QnsA1zLj2$p=pkCQIFi%Pi*BIdI!iOJR-;9y}P>l*{E1 z4N-;^ytyJnog(D(@jkq8XMQQwY3^XY{-xyu&za9em$eGuyD+nv8jTBwzf9J zLNF+ueu(&9b$zsqeD=(XM)Ul5|_}DLgcGcc{?+*X(v(LW% z!4H1;QSjGYA)5R0=J} zKOir-i^uptQ@~tA%4V~G9tWe5gK>we`rxVt=bGas9Z3MB8PRs#@4!ngJp0|}pZeh? zmptU(|4Wwr@_GWU%?RCEBLp zxs|m4dk9h;>>&x^SE#l`aN;CSmE?4~W5n>GnRL5rnYLvzF_2A`RG3z-gRxNDpHSqy z<;W`EGh{#XBvCRL9G(&aW%-(`uO2mOL>Lr_JpJ@D;+KvaHwe6xn7ZSP(@vPVe95X$ z<}67R#%AuftDz?g3o8}bc>VR4&OGDvSSsG!Y{KF@^@#mXIQbN{c))kQcfs(|Xltgq zV9?-PbDL%RLxv5VdB}bR1&Kb%qVm$g?|k^dcfNCMVTsY+q5x_Eh_Lw32v`)^oQY9A z-Ja>&x9C^D`OU{vJiREP&;PXK{zsob;Gkos@3=$tszq7n{ZZql>^5zp=41v99GK4J zKAOLz-@u_eO`W7k-r&P_|N4UOJK&ORxQ?nDrOjC_XGNw@o4D+`*Qe~ZUt3c{!}8@R zRg*oxZ{LbZlP0BN%B0bwmoB-uwQa*KH~xC}{SHbL^||V*YZ_O6-I41^6csI6u;9_V z@8OeGfPkh#sR0mRR=`@(fyvwH1>i4(r~;)|Pax&?r8`A>daSy2IF zs{ergOO`A_DdWbCJL#m8(C^6blTSW={PDl^FHh91Ti&$3sj#H-t&hH{8ZsnVSaR{j z7r}B*B?`p-!%o9z`|u8=Dr#-rnx9{N)#aC*eSSgVgTKEkZ|B@-3}cb-=R=GOd=tvC zc)=F5+wQyd>(^HVa>T`@dGqG2S+fR%K_KX^kAWt>!_@KBs~7(5(FalyGcU#d@W|gs z?SAmMNxK#)$&%s%SLr)_$KlHtf0B=w`y6v@TSwvKVO0$a$3mQ{X3RX}jC1vtrann| z-Q=M+-2K?T2c59zj2Zb2)%o^@Wy`+=KZ6%(wucapZuB>xZQlN4mf>L!f`n|nBwYPw zRr!ymZF;pJB9#~^Q>j#)IM6Y8Mva_Uc3a#@LMU&c<4h9I*g`ID4=*zruE)!}Vcq%^ z<>4P``Mkl=qe`rGed7(k`qk`rKG=Kae#Iq4pUwFU&dvV&9oVO&9BTR3XP>^{;)~BX z^XxvQrSVvN#E4Nd_uci}v(LWdqKh89|Gs3Z*hrP_w#Oc$M^=6I=||12ty5-9>)Wp{ z(B_@D-?`@MUoBs@1}%Wc9FbsGy5iX|{5%!@N@Gp+?RVUE#+hgR`JspUSN0Wdb9CXN zMK|1V!^%}_4n6GPT)Jb<+&S>}4?XArS=Fj*>*s#C;OnpE7Zqv4hV-*t8~7w)C~%#I z>SgQKHw+p&rnsbh(wLEel{NK^mao7c->|-R>C%N?%%A5Q(c!~}xS4FWBaQZ>4t4bn zzxmZKzklx84?g^GX-R3s=Gp_OAq;9*Xr82eiwnUI95f)AiYDSp#Ixh7US3?ONJ=7+ z0+UepPzSf9iCZ9aJ)Fk5^X7i>=_f}Vb6hG}0E^;K70h5dvH6JYwm;tOID_zXoYj5cjp-&EHSaV15LuW!uNH>6AZRKD`c->Pd@ zKKR%JS6uyzjO9fm+LHO7pK{Xax7~KH(jT;)K?J{-D*zuXP zS6q;I_Nj+|dgbM7*VW3Z0fUraynXZyqHb^8&7y5Po$`(9@gGOQEIb?#!?t@sA_2ts zu*&1wkI^W6;i#cwi$@J{kA@o`YSPmMc!)dsUE3CEYZc&)U~!usHCA2IZmz4((@@_a zzLF(bxwfjo12(j@y#L|*aFBtvwRH^uXEF(8i1$@#Pnvef0i&BZiMW>zwZm8a$v+X$iu+ zf3ZP{SccQSa^;Gatg^>$`=*o#6wlOj--r*a8glXp$Bh{|v~T}Gax`IR1{Yq|baK-3 zv-$km)ob8jI*!}Y+{`=aG;?^DjZenKGOOR#Lhz<9wPo5ZJF7~LZ|Bo3EfFpS5)#@T zP}gw~zD?TD+IraGvkus2@84egi<~g98qAz{+kg+m8l?OQD9v_x)IZK zY`kI5dne+SG6WD2A=}_167a4QGYARk`Ai?01pJI}ngyZ-el_pV2zygi`6I~+vgBM2 z+0QvyPj;y15if7%yomD2$8+KC{rqP?fAq0O+S)pRyqXr%6b+`9E_L9e5p6f~!^ez< z-+ROLH(Yn!ukx9!WT^1N9ShA>6wL*PWy$beE@Lw-n z5-%uEr7AmGvg0R?sc%~CMz~m_E8hJIhE^N^P{B8JC@fG6P&h?}T~3gLi@qri=FR+x zM;(62h37u^+zWsG+e>_2rE8Ebw4OsLghvdc=m6x2{NV_rAV|?0Z@h8uefQpX|GoF! zcmI9&J#g1uf2zk&$60jYQ)38|({8Ssw_-rQQTrcxyvfOz9j6_;`_$oOi|5U4Z_dj~ zOp&>^w#3_;(THR~CN)itxK282HPyFf)9rWOar=+IfBqv6J-VT}SyrT6y4|<(7||py z$Hv#)zQOjdp-l?ieEZ){kQ0zllyLMF;;=P>TrnE$39hOS_ZDUZ%spCsx~@WbUh(ST2#}$NTx6mOGcwhR;>E;i@ATi z^AEX9Ti*eLE!$0JHq86_%VUl?`k_Z2_Z<1759ZDJ>~kr?ZvsTrbTkpY=k7-?`N5Ur zcbL-AksdNK_1jyn&3cZj>alo%E*lY`ucqp$Bn;0OAS97cRjqkL%Yyk|o^i$*{)vYJ z(H?y0FSB2Jb+^6uzxVFDBrDxe3%e<$NSS0j%C{0-z_HQNmTzv&l$I1_n(JHF*J0#P zqWYJM7tT8V$Xjl_rMb0Dc5D|;VLX~hl^S|NGa|7lR2sSmxKm@OfMKvJ6eXL>Noq8v zNB|KSZoxwEuzbarYRUCrA^7#{*KfWMd}U?7U?KRiW5*(Sun_$6Wvh@RSO~sfzrLrQ zc4|>^F*fPJ@my#A4rnEc+uLEj{WYyzPmmC&=at0O*NLy)xhSC#0ni( zfA7r?cAq-&&rdv7SGNju?_d5Vt2X3c@m0q$m6*nwh!v7Tos0748Ej%`(xmt$B@ z#uJ`PU!%7k0O|?(jYKTdI`W8F2OWG+Wo6}%p;beN4aGKS(7@x4I}VC>=kNcJNW|f| zxjx2rH=k~+UcIWRwFBahp?gD9Vpwb8LfAr>?vzwYWG&DA>Z)$>fTvJ;Mm1|sIUtPUs&6-sm?de!FRlTmUWkYLI zb7NCu-Iw#fc>C@5VO!;^l&Y3gk{rBGo>YZ}RDMuV}_(2CB_~$=Ax_I&8 z#>U2e{rg?{v!Cv@_ny_&)fjE*5_MW zI+{1U_4e$gD^}#PHjF9Tao{7P;qgQq+5{xRvEw?ZC%Ot2+4`pS=}a2gg0X=-gv6Q9 za&`5(gxE0#)i*USUA(Ae zea+f+)ytQ!TE22+LqpxN1@mgE*Ey2vaBB88(Pbm`rX5?SZKBeavfNfV->@G4p#&KK zjE1)bi_W$zzGiE~H1nlcA_@6}mQEbM1M(^>D}yX2`$3(WK+KRt`R|0ab_M#p4MmHgbTH*p?eL;KIO6upG~dMFHUe zZ4_HFNwjx#O9zW`%Rgxrdq-bsQgcxjV2{brwXMNRE;Yy>v;;CcC%mLPoXso?GjUE9sIF8E?R=B>I z%|=tnyzINVe324G2K~%9+(~U%BEyj;nN-&8P*#6>eElooy02=t?wqwI$LvkcU zec&;G1*kxPN8JhFh)dvhDw$%cZj}ex1@emCY-?`=V)7GZd_g0$^){p6y<=R0tIs#E z0UeqOAHvS2J75AvHA5kWD(HsxY@5v+){%HD4)$V@)3qoh+%v6Ysxa%?@Tqfct&sJ2 zDk19zzpkfvNr(qm+*LW%y(gcyMksS5r?*KEYH^==ON90bLpD6^LA_{VPeMJmyW0Pm z1la&0K{XPIL_tA8uTr;>{7CSQZC%aUiZ&X>Ms=Z)MVsWkW7_y9+(-dhUcB z0D01uA&+7iI2}BR_=pDJ3&-tFLOzIxnxR@;haFxFE_z&{!1MW~2#N}CR3Q`_%C%v6 z5;u`GPz(q3gb;v3-b;cALnS~gOBBQq907Ew$q-jr-1tHg2f7N+l6ET!vrUCl-Y5+s zq0j~r_o${S^HocS50ynE@j@u{9O%yLO?ij<$F|oyNi!a zMAhhHsHhr%-s6`PfFDR{pg?2`j(L&N(u!e2sx+Xc=V_9^X4Q(?W_}Z{C;h7yf*0@L zsIITT8UizAHWr(vsN(e-L_zh*f4+^*0ti^>Ul6acpL&6+;3_0R*K|%6rwsI{FniN; zvl-a3S&S2bunDOM({`ior+glU2#Ak821VwJP7rDMV2^K<(OQFKL==@hhAW5?!Qkri z#v`LRK4b>f;Y%?N+7rB-gN}rFAs;r1%?^v)3W<2(@iquCxn{&xVsknd$_~#q(sq`y z{Z;u-A}B1n0~AOI4q*GhG61&VIEd2=am6=ZlOyZatUl-LGjQOU8OzMMww23dI3ZNk zVJHSx+gQlTRv?d>#wQlygc2HR4pE?!Aj+r=g$jTq9BQP2ZgD{-2?H({$dp}Z7a! z6#xLRMn13$I}?dYW8b-)RKWj0;BdS($rA*5z`nbLAXR{yFaxc;1;Gn$=b^udV9dMM zJQ3{Duo4x(zj$YoJw>R;uMZO(`GVu@c{ifrl2|H;GJvaHnR2x}>>$5Fc7(tX9Vr}; z$^bO^e#SqMw$sW368}?+{STy|n*uHX-5?$SIG_z&J&caf7YI)Pg(6Dt1Sy6ywr16; z3(r4WiZ~J5@;LOm5!=mM7UxO;e~?Rgc^NW-frew`8>a3-LcTgH)pg^!`xq|NKvW!~ zF|i5)PI2ek-vZ>qnsU7jWW|w{fF?{f5uLAqdlI08QPFY;FeBohB~nTmmBPUkloZG` zIRx>h!XRs^59iyc$b*tjvl1aJ1@VPLg4|T1G3U?#+#u>eC@26Th>yvjWvIy%;_ zT?_9oJl`&2(Yk!5d|m( zxP<-IdkQ%ylHAtXGIggNb-v!jT=|Ztrs+mZl2rpr4HO(Pa%3`*C@n2@9C(NuhviN} zpY^oz2rHD=&8$4cKrmOW_F*ScgfeMix@5l^Fu zCtG|TjbkDa>IUzV!#*;0B07K)@Wh6w;Hjqx&)rclY|ypiNR6^#`9W3SohiJphdcsg z69z)q4O~(I)g&6DUi7^Il%n#2j~HWEXY`%GQ79+W6IJyhI)R5KB8nF`uM~d#lF=<@ z&Q@b1hsD-KINsO`-G)IJFp3JL_bk`f9682$GQbGt=IbteZ+;~ic;|8%8c*RXsmOkl z-bd?0EAN{Up{kpeM+p{EivxHG9Ev)L5+kVl2)dELV=x2kXDmboHjCkrqY;Xr)rNji zFj%`O$`5dlAFQ%q_opx(;;UrR6-(3>l8-m(Mhw;%E- zA|f2WLSsdb6CFg~(s@Eg02!z!4K$c!D*Ou+07<3lNiM&W3QR*o6)DGu2A=K^9as$1 zhAW>U4|u^@Xr$~GL_9sB0s#@9#i3QxRJVtgbC3| zagPt@g1LyoIM_oJz)|W1FXI}9RX7%qFu1e;@u0p0+(68^q7LV0Cm%=0;cI4q#T=~-<{piOd zhL3=-wzjrzExT_>5DCbl(0pN0QRW!PF*fXbgo$77#JgD3>MjXka3I!brHC9RyU=OL|vAQK*n4^siDfMD2&57<~XDuF#?inv1TLkJ!|1V;k*MNA~_)EV$R06dgFrnXEbpHJ_GKIK#LWbv1{w>cr=iO!OJEw0P-ge-e?Vb9L`_&3 zb^&w3aaVW{vx^WI9tr9?W$fUTM+_9l;F9=ZkMb5C-E&y(>~{pZ@DB)Tg3ZF>!!Z$0 zBt{k@GyVvcBAh+iRobQ$Cfax@JnA)aNX)&HMC#xWwBt%t@>mCTaXCy&_+C?R9(Zv1cxqEhtFAE(@MrY^^W5QCBn^ z^m+`!5l;{jUO;src>olbU`X474j?2`jL91tBeKAb5s32~usgFwE*J&)iCf6J#tkCN zZxq4kqc4pO?RA9w<6o$|C@}ysKo3q)2VCvUvXdANZGm;ki4Oec5!_NyR6v_lu-iFF zG%lbnnde%3VT2-v__k!Ax=63{X)uwWJubHH)FULwCm`e? zG6At@A}G4w*WoL`baWM1WEMPM#r634EHU078g^aV|Clz`1a<4oY4aL(Z+oxA*hoU3 zoMtu0OK=Z;3jPqPc>{o!J%%nJSYl9-f=>x|)qpa`Cufjta#QFLF5*X^KXD!@!FEFC zCd5nKXlr%-Fa1M=u}6cw08K%%z7C1yln@9d&2xl@+5#JVOJ?}i2t%w4ms8?m*a{zA zL}1y)W3fy&t7%#)l?rV5trPXRn?P+&Ku<_Evf37fae}x(66`76um~vP6fn(;U<4OX zO~4i$K+9ppIdH`y5raajxQ^>!>s~8lhkC+rL8AfXBr&74$Hq(80_tizm6W%Au&4JO zW(xTiw;6&58`}_mXbUn9bq4ta2R#!Z5KRtGx(KDw^@e(5C$yPO5xn?j6jmEQ!4X?S zLgp&$f3yqAu6e#H+4K}S@Sw%paM`w#wkMOV#Ob=!JxbR$5ISEV%ZW)gI^}xJH|jm#O+K^avj-r@(T4g z;@WP;HY1vB=S;<6iwP(Y=Af8ulc;<+p0F;tTn@V9I()+vnV_+#Cb$P_cWiz}1Zj|@ zi$v8}L*3Ak1-~%Tjfm?4;K)Asl;A&aPrAZr<%VNFV*3%xmn=@ppt@pI0`hV~k0=U_ zLY~-=Pq!`5;T{`8SfZxELPW}N6FtNC{Ij;7v3)b=I=P&c%|>J^o9oD#>1aGA*6iy^ zw<XX#rWX_x z6c!f5DKhMo2vEp3l)|DSo%0WZhk61ukUrq3_}PpCc92&v1<=ZX zYcPY+EXUz19EF92s983f;p8AmGNMtmpY;Xk3RZzRK?cy?cp{$9=i~7>LO4%CFbx+) zOjd<(K=&Nyo7bn88lqqR=?lTTTGaKl;zUtFJef?wi^u^!6j_TJdO=ZfHl0DCK};Er zpJ3r;C2UG;dn_JIF95`VJz<+W8{eG}B4$VWu|y21_>xULfejLcnqwda{PE2Xn716^ zxELS85v{zuyiaKfQiF}Ws|lgteH&CUcnr)j2OAAzQ8){N7eEgHoaa-1*w#S3f>c2Q zg(jnhp_P@D*tS_-R)JBR69>ejc{G^~2=Wqqr+~2M(4lM-|5!D$MwGvidYpFS-8Q+% zA$nsJ9BgF+iAw!1A;_7HE`nyelIltcO;Mt%<(L?c)j+>QXYzo3!vD>rikye*CW%QZ zQ{r*HP>W4Cj^a7NE3dq=V8MbpbLJrJ;>C-h*g!1|q?>02PMp-% z-l3^{r$StTAEyHN#*LdWaNt1C66*zFfTU#vBPFzP#3=%l?6daRYmck0z2?zB|M?GpxFZ%zIIeg6amSr;`We5x?w22Y_<^eGY?7Fd zqbYhioq6Srmrgq2B$FTUvZ*>W&SjY|8U$>m8iSwNbS|FcN>@DzB22tZh~7JsbWcQsOWSAW-czSeyY|{^!$4A)e|tpRcb_9JI{k#o%A!@vKD+MM zS5?=fi;DVQ|LfmC4EEn||Kj2z#1=hEho84Yk$)fqogQ+JT1Fz-Z1$2%FIm1~#cMCW z5{<=Ri-{C$HMk!}>LdY+Qgd_j)6YILq-szY+KnJceJlG0(Xa<1Pn|aHl8b*(QBjVo zAAkJmop;{V+`PduEm+(N7qjr3@O=+{4GUdJ;|}2Nx#%@uWqW(8qDtr%jD*J=ed4L# zIk~7XA)&Lqd^(-K_>xO6|KX2sxc>UJwKY%@3JE0>@~zsQtN1<_2jX|E#FTPQCx8s2 zYA{PUe52ezq*-;*At7_XDZ0!&!(qV1EfFH{PS)0660`#pAB=!!OOsk;>QJ+;!@z<_ebM6IamzR`qEqL2T zwu%zr#~dGi_~DmdemR{^WBkP6jD47LQ=-^)5!?;fP8Xbi;Za8(b?SFdKjoC~e&>|$ zo^kqFW5(>jsKiE8-}YqBmbe6R|Ej(tM-MA6uXKI6-@t(rrc4|*dYGasrejz1uNXXh z(D)t3PM$b%KzWg5XUvGMCX05QFlo|+aRUYnY+By{#}W<0*gkpkj-yA9NhDIfr1mK- z9Wr>pgz@9zF{7oe$#L^yhOdgIwR*4!;<^?cI=pJ+$ic{YhfyPolN$82sHAf6;9;Xi zk18lCk<{3T5uw2rB1H#;=tl!A-lO{}>Fl@wNU}!-qdHQK5Hn!G9l=y%_!^VspjfRXK zJ*+4d6O+j}T0v*{U9V0M0NaNS9Xf8@xKt`70=N)-QBe`R5CVJ|F>+*8RaI~rP$H2SJ9g~Ikt3tg7*6|E z^f~a5eQvwu_u~hT&9tU>+Hq=2OUtjW|Mh+M-dnw{nid#!i5tX)cqa(84~JXgI3;x> zi5I{B`uyL{I_JE@4n82))*?G_&pGPCN(v%~cpC|_gsP_7xd4xlnlOrG@%3Sd8HXtW zB7l|Rva+c&c76W&7e-VKnTWCQM;E8_xyyd|p!4s=+Ge(Cr=zTYE=Ysy@NB1dxc(wz3Z;KMvNF121O#nhK>x6cuy+tL=vu?nzrkV@1DG0du!VV@6Mh7<$@DVJhp0Z zf6sUBz3=|F-g@`4A70R>v`q2gTtp)cD^5A{47X_LvEM!I_}wP9bTmg&$?D~+O)FnH zp#PEIIcfZaaqu>tfBvQSKm6dF(@z-Gze0%={rbk6JU?gIX<7ADTd~{hm>g+tGb{U- z{pr5nFJ8H%DAr$cGOz#rFONL^`mE!=yZf|>*|z$po_jU#DkmLtP@kj>zvk*+-tfi3 znqBrh;K&1Z183CCy3@}2US5W$7j+#y9xY1qegDkPlgB*u>c?Yu*}ramQ|;;%MKMiw zyvb8`-Sg0yWpSmrSU>y3^BpVq#8VIKy4OL8l74@@^N;xr)xPFnFn)LTn}2@jVc4|% z(xRdP#dr_t+gk{}Pf2p=(mB-)i~IB~c;)qX2MiumSXg%SF-H#@I-;bPuafe=>}a%-$eXPs8im!7sGaV zqKmH}LE?n34G+MV6(wuib#1M~hYa0gw;Ai|)@EIA%=jI~j32klq>)R$o`2=dcTU-P zuVW6{OV^ZA!U%zgNu#XufO)#Q!gl7njxyGNy|tW zQLSHbqJ4uO(F<)l&ljKdgG+{&rVJ}{#nsoQ{t(dwZ3Xo> zb%d48-}IXs7cX3Vz&;0+7niwa{)N9jdH4y(@3ZfI<->-i;;A76`X7Dd-h1!A?;(dA za?jm&!#0ejie}E-zr4KkhTq)y;U}N$IBk00{sW6r(f8ka<@^iIo%huOnw|7@g%S!= z7ualDc2j%9Z*IPE_kH&M-S2)oY{+0#%P``Lmn^yL(#yX5YT?mGAN~2~pWb-$Z))q- z&N||dM5^%JcR#xA_Pg)=!ygmL_~_9iY{wNk&6&ClwJV!i8%InS3tMUGxbd##ty{SLb0cX#$5I7D`Q z)#1En%W?9i=_%jlLh!=}4Xh|gr4muwvyG@xQQjXzVPPr-ORY-=@Y97;7$m_ASkVE} z1=}VB`N6>X;YS}m|J-wDe*cn+iVDlj!}jJ~%gw|Jk>MY9mp60}w(bN|I5>d#rrX=! zd-qL<^QmWFaOe?74IMi8_19j7<$L`ve)0Wt&uMIG`e4pyxBv0M+i&}W8u2HO9cz0| zG_L>Q4|iSo{qH~Vkr(2uV=sQ=8T|c6;dmU-||F{)6p~43JV>&RcVelJP`TpZDq9w`ae5+9enFDJlq7 zf}n2;B}eSd#4jKrQ4}hVPOts>*O_5u>}(&AH2I)1>Qfy>vf&g3&~&5gCImcwXLqA5*{!7@V=!7t0r zqYjYcxUMf*w0z$DuS!aaZ3ixDCZ0&TR(|!WRr$Qxr?kRzo#|7joORZjc0Rjm#fp-W zvP&=h(aGOA_J|{oN~B68B}TwTGbMPVIXAyy@yey^^Zu^8?p~@&?M*GFpNJSG#TAuj zoqp2kryOH+T=TqrIbP$&0@7I!-@Q>(Ro&2L94-IS2%wTtI5J$S(|&d~6Bnf~vvLgYe)?mc?uB zVVv!tPy+%&mSQ3J;NBm^GLXz5LujnyxDsdmDO?j>@*saGH_wk6W}kiLsWVSI{p?fE zy8YHWnwvIoeP&&cDw?=83}0SqXl?0JQL0Ij>)8GK4J;}w^sQ_*-SPKV-uUjRr=NZ1 zSvUXY=GCj$CNKo(7!QE_@Z_W^Q})POYI94+q|sIRTt4S}rmQ7nTxMD2vl>-Wblzw2 zEV@aOv?1Nr(4M{R);rEV;iMm&b>4k{ynEjR_j45g_><4L=z{YlHLj>pm^FN10k*Lo z=~I$eS5x0%8pj-SjGM1tv3!XhEyel$4%{jc{ zPCPReAeG5w`4Yb6xOvmIt+biR+xZBeIP=L5F|TujSXq|2&mMdH?8+-X`uLOk9{w}@ zTvgXBD+jrtO7|2~)Cxtg5D_Tkli&tfa$VK|5mY{^3kVh;tYhAterNXV8?U=Msmp_^ z1}m{72B%!s-gD30lL_tI3obkR%WlaP`B6_#`W#Wf zAwEj?ATC?u*2YW05$!zy6C7LgCB`%nxOL)4&*xHN;~{WZRiA$9=?CtA@ZNjw!}gcI zJdw@uvuT>j8&#f|7U$S`6sBr%MbYxvj@p`<-FDyenqORV<xz z7O}XY#?`M@tXQ$G`tr*zyWpI2PdMSEcr2Q0->_=siV@=`{pvS2U3>M_vkp11AQc0Q zMB_0@He-hT$RkhgH}j|qF8RTE7hL$0i_Uxao;%xZ(^A#Ak%-A$aYj)!J(fbPA)Tp2 z0(#ikvc9&d_N0@)d*#osz2+Cco;GFY4?q1-FHo=f`Q<U?F($ zaoF%-!9wtB)~wlKhaHNFi&LqB%@=}4DTzeVFpPyu7A>i%J>>Aie{}g3J58FndgUU* z06)>8X#D0R5pD3`L8qO0dU1K_^qr<2ao{0Eg$4T^deG$QQ~1(ih;%@`aDx;&j=j@P zliz#$?Yr-}SC7W{LN_O}!4`KpUJ4A|gdj*p_yrY+q!>Q=P@ZEaC5_j)gTN&Ui}ycd z)}@#J;F_OacFw6Mw>H+-)-~jv$bm;4|J^grUcGXq?ATXadf_jxzKTl@Nm|CVFWDxzZ0pe>73zNKwm;z=Q%5@dw|816A*A?uD2@jIT;% zMFq|=zQIdsZEX!60z$~d;JXQN91p^|5d4v|rcRp*n*s(L1cHxBeShZ6nG+^V_~V^- zCK7Qpz>{PP;_5jq8`f9XuXlZ^wx$}=TUuGJ=<>Rn>e|M-oYlUndTllb0M#SDvtZGZ zj;ym_{`@HuchIByf~8B}o&9Frn(B46wHe3k-@mV-NX>2abq%Z1Z7tQS>zi6~@o2%a zrAsi97ZjxoJ+@}m%EzC45^C%FW-i^fdeu5dyj#DaWkXF(oo#w+*RHN#-{5%ul0^&q zB$B0Nm9iSEu5b9@)6a_wj*w&t&rH_Kr2HS9!kE(A2nQb$u>vEnoW8 z*9*SP8M>6I0$m$hd*a-O?j@iO?ZnfCS-tJk11vg@@p zHLa?xh9`-EA#di*Tuy_COT3->?JNY}NWS(dh?SKTzWvthd0&2=%|h`N^jI#J|LUu+ z;<0!#ksLK*BoMZ!s1U%ku4dh^VMFt|TvJmM1~#tKNr8Y!1k%F0!)w>9{`8ZN0h}s2 zGA22{{UR$~4(u-_-c>?AhxYHncRiq+(IW%D?^o2ePE}FD&j@-~93$udJ%BY0H_d&FdF0 z{>qNXd30DlQ<#d?H#V)VZSXloi7Pb7ZnmYVX4RVNoQ)6|)E%o=uK;wxtysKh5unR< zy#`=weH{iZ+jdv1UAtuUvWVkU_3IDsp}MwaZA~rQmvt+aXEPZtP0HJrn<*m3hRwpZ zOy4MSFQxql5t~YD4TCLrXbIPb@OWb$y1~? zR#j1*Nfz3+0~=JWDV5- zSi@j+IPhD!OgiGC@Y2XbS{}ndYmPgfr!Hzb$r`bBAQ}k(>bU;61JpUF-k-w z9hK8lF-P+wM^yAQ*J8 zy#AF7!7H+#b4>tn#B`!Y+=wSk%Yg;z0fTHCETqX;-QZzc(w(BO~=y7%pKkWn& zvTR!noAUf%YRxU0CmglzvKO~Wpk7Na7k z*g+b2pBvc6ix6JRb4??v+KyyHa|tzks4s4kz^#Cb3SsJ@8iFLp%^SM0Zr!@`FT9|n zxKKC>uBM3j!>T7u96t%MAesT>Ra90Y0&)*h_aJCch^gE|2cUAuLXQQw9WqT0ws=DcK8joPLii$>v8+jbVXMsUY9R2fBa?KTw47YPu7 zvqr%@f(D;NqRFZXB$Z*{z_U~6d)k`DHRXj)6Yt@qX@EUhVTo*!FRNs~!GaAKl^lb^}2v{-RNJm5InF?tZ2{0 zTq7QrJrzI$KIr6?u*U>4DghE90q=xRIz*2XG+1uucn4xMG#&xA7p!W_g&xp3A~L9F zO(+6VaJLS&9?I$gdEmCj53QjL)Pce$6vA^{9UKLpxd@|=LCCXgB!|rK;hf{JTQwzX znJRB9AUlYWV{if%s>X$dIJ1F12+DGRj}S=Mqy%aSl`s{RYsNZUA0Jqa`1}?qABQ0; zPjXe?RU*JPSg;URu3o_>h+)(RL6r-8hE)=MdS^*-F71 z3Q{*7QEolkqHt=o#q9`e9DpLD2z_LX-tZ0tq{(iCyrKdQrV_?S2roL=mnM#zjHI@0 zjTkv{^{Q3n<(0?{ecTHl1#WeQi^5s(Ch~y_!Ug!G6eLJESrkT&XYY=gPGXN zjnse5Lhxb?U@|g~CSmYd2@Is7N;FJBK|8}t5Qjkl&qRBBX3r#C$&oE22rxE#Dp<;8 zGh*J7%3BbGJnRWBh!0T#8oB&@O7QgzKwelli$nIc1=I-(~6L`c@XlWoevt-$98Z+Qu zPzraOlBnwTQNZ`@B;+d!{SPDXh{jz&A#Vak1|-;uz5)e1A6_jwfx^+H*qxDk5W2y0 zGXdmXaNz|-MFs3<0eOHUg>Onz{ky4j7a_~;+optKc_G+$+k$I6AW{jj;02*7o!4

    PDET*K`=0^gNVx#dfC1iZIXCPTM+XEjA%bkWdAJ0(o)n}7p%a5*1)&%vSgrvlh=!}dUTAQz3(NiI5Q-6f z)Y(&Ay!eMnaCzf_INm%gJQcKg%fikO9TCI{dP~4%S85U^tTVZY6JBLXv2{fkyX~T` zwB5H&XiMS$*C^ob_VadgqL2r2E_?`hh8Wwp1=qq4&Qx-JAR}8Aoh@DI4Cn7gr)4wRa>hNb;V@7g9E8C=f3}0R6f+2C zH*RF4=*KRCGV`G)HXe#-BKoEjM&FD?8QZh{4}0<(5X%u|3Aq;MTO`E5o(&VHn=~*8 z;f1Z{1038O3VS8Q3NqSSc8Jyu>AJS=gnYWT|58X>VQ8-;`d9o4vpk)E! zcjJj5;cZbX*t9MZ@xsI4mT3>-Wvco{Lg5IMCE+`FGPV~AqlfmHINGRy+uT5+WnXd4w+Kh2VM*LKtUwF*4r{7IDCnt_B9RrJEy`;uX#fdWv5F zK-3;Nb~Q}&MR2J{gf8mNh^iEK5Y?hPi7;_xBZa}6MGkH%1uipWiO>G|G^~s0SZyReMB}qU-bVJh0IhJb zm@R&(Jp4WeAE7~lsr-D91`);v=?kCq`QoDp!inL8kf5r$~fW#r?mvYV% z7Q|Fy<2MkP1H%BAs0EG*D?lj1C%>k_`)Dqr;u31a<=zQ@Ap(lRxTZ0`?$}MP`NJ1k zdCWrj>tyif+}$Nmz>UHdNTR>Oc8 zoil3Cy5M6jC<)?&a4~Fv37u@l6_n53M?g5NL%4#9uoSTc*~9b`wL`yReB)~rz9zf; z1iQG-2e^pJ(Tnbni)iRS^oPjgzw}Qg_6pd`Z!4=p@en|{pumTnE7;B;c8LhtA_)Z8 zC=-M{3W2~0W-E-pzyK6LvDuIzCJD%b0jHPl*DbQLTB$*wB}-)S^pgF#LfGUmTC3HR!H|#GZK9n)9V-1u`TH#gZTjD^I{>HBf&+ zIA*T0q-h$FNc4cAY?W;jo9afQ z@j}f=C`rS1EH1z8d8+Fw1m!?lNfZ$D1Z4(t6iy*hIOmas)0~-8Ic?N)Y_WJ01ROk2 zG)<3Tb2&K=U@Aqdyq!ynIlzD;3sOWtB$762uS_%HpF2t6e}VXC;YVJlJa zIihGTluTFnSyJdPVUJ?$$&Ra7!RPwI#2bk^#UE(c#>r9Wb$&Dqs;ZQil~etEzoew3 zqoX5{OuDW^qpUX)WfC6~i3GsTG)>#KGnq_QP(Gi}<#Gs*$K#IcSdL8_DiX0BTOcWG zz}cHPftXWl1y+N2uiW7*d!QWGYq9}&FlZc4$~m56T2Ykjx;DQo?@|f@IK*Ed80x74 z-K2;KSyZJ2a7RoFQ)0Tt`FLE7hUkV2y5{ELGLoVpgsKp8$w;yfQ5Xr>1I9&Ejqlh4 zErF_%Z*wNB2SkNNDe3mMbT+F}`o-&@&?Nv6vqk;cQ@K1BQU^u+u8$2u9fOKX8Z?Y{ z8^{N%APaOwPzKnms#+$Sf#g7t@+O2RpUr16KG6Sg{Nmyw zNEDMwlI==a({*u5lg@=)#SJ5>>KUt@eC4Xs24YeR__;fT0qcqU=t&N8v}4)#950Ai z!Z%=T_~%OsZ@)J`8%0hvk{N_l7?8QE+SI;Ioco<>)t6)y;E2z`_E$ae_O%K%`+8i{+pYz{#^c zdjYu$uTXAnYu#zelz2RN;Vi<5$oQc`26+hTOxh!hUa6s>;kMgud+)vXKKkgR&pT<( zoH^Kk^2sN)wY3JuVp%gJ!`Bk79`oR^Dhl7Dp!on9#Ktcfs45|YbJqbJbZaFI*R?1t z6I237IBewjn{T}(0l`ykjI)q8&4X=#p&7=P=UYlXtJ$t`-vf8`?Hjkv7N}CjQ{H;} zHQja$*MeFAiEKM>dM>b(K0PEzdFP#X&N=-!T-20Ad&WKblrum4?DO4r-_6PAorso&4mg(VX29FFOz!pB z?-dpmJCX~q=jWjv@dXTy0I-;ViK0|kL8!zQ2@Ke{fn)r*NEH3QRqCE-8zePna})R7 z`;9l>`|Q(sf4=XJB{A8z_!7onpMCm;7oTixYv4OCG;9bQVWKS)QOxs!T9;gY`P7}K z+nM$vBkD=IBdKCyws4%gQ6O~?iq_`VzdZJrPd@o*&YVv_`|J~Jb3Xg@!;jv7@7;Ia zdH2l~D_4Z_Nnb*SQ9EkX=m#IX|K0ayzyID_4?XreL%8`$5q+oDFww$*n& z{zB7Lx(u#wq649H7EExl)Hfa8UIwfG(mzNTcwDh0BSwsdw*2)s+&XRCSlMdUZBwTP zp|3obqluyjIOx3#0+WU^MtsBQ1Q9}r3drG|GeIIE0kZ2?jKLeJ>smm-O3(!tTrhd^ zqa!LB~pb2 z1tke6oSn(Jk|h~MMFoWg2{;gK?HzE*p-!r%6&DsJxq1q3p&7c8h#7^2#f}rnW-UlE zOff@`TH>{glu@X9iV-&wg^6TQaWWBuV}qePq8ZUxER{^4`LHDuiD*$lG8r>`55VUc zi9}(l1l5ZfiszcJda`*7I-ky1>({S~CE~?JG0(SEBhjb0q_wFz-_}95G#Wz@g++yN zV6)>E#1eb$x>F{bP8O6!)l@PbFDgnUlS%k(06W-?>`<=J5sj$f5Cn7VU%wE1dEcVr zPCDuOU;k#(*sI7vnPECwOxsDCu08eav%k6FXS?mX zM{`q)CRRW5g~LrtrZ=)Zn9tH#XPvX-6bccxou*EQNjrQvSJ>7yz5y*F`We$4JAV9C zS6?~%rB}v}8b5K|jz=DG%%UZ$((PF%Z-beVnoJgyloTct3DtJ8@PCYCvQJ@ANnza3 z;X&9KIN}NRMPX49d=W@Kh9a8U$X=E#6P`wmM$UI+Q6skk=%DT*X`53ft( z=bq6MB4HQdGQ2z9W)DskD+h6c*vcY|ngST@WY0$t#O3JTH$gcIJ9W(-0m2Gigf z#gk7yIby^J5rx#LW586(5nm_Q6Z`Hn^R(k;8d~z@m)@WK#@{Z!^m|jL?y_Nh>-E?D z=F`tU`t9vE^&dE(sJH}ms#);y>1Uqp7FHa4+=-W-d7SI#*0g6Y2bX)!J|9J05AI-V={If?5AEqQKfAX`dVaoyTd@jysDvA%3QoTfN{ zfp>06dCLUueOn8`YlgdM{+!jT*D8A9OS3;It*BI^%9JVNh7B2W-~op#k)-VF5ry|I z`BJKYg>90m$;*~~ec{DdTz}0~hHkw7+M5{BY&8nImXZ_3+DJlvIq2qdxp&@rD^(DJ zIB4vcaoF1~yjn;S$Vny>J5HavV9{3(KYU+hMG5*m;zcovMU_Ov)+}4yY3CU?U30M} zoB3?ZzWeWQ#1cQb^2amxobFofpS<(>mDk;}`#!UN{KFqu`IeI6;)Y7o5s^7qlbms9#AA8~Hr{P8CII#Yg z{)q%Z@L^~uu*>=0qimSX3%>aD{Ht#N(M4Bij{n7{AEYHu)#6#pjvI4 z0p{^YjzoRL)syvWIB zPy5cP_don+m=&v5tUC0dga7{GEBo$yU`cT)8u^FEwadFX@F*}@p z#<9Cj-*L*6$qzjEKxO{{iK5a24?f6Hy;%qE|Ih;u&zQFBpn*gCloY-8+Dk_qb;MU+ zEr7vm!ztxmdDMp1Y$l&$1tjm6*IqMf!uXqSym``uaWRb7#YJDt{rvF54t@XK_s>7? zf@hw3@`%F@Ub1N65l0-JOcdO5>u--a=C~t|JPg*{*s&w>mIZ5Cyt5E#tXpKflfvCW(wPa*l$S6wx3 z=+Nta^{cTvj1f;#`JOGtn4E3O7Y>Eo+pEOg2?<+UTXDb#aoy}Eb|QaR zpJ?0w%B@(v%5g1zbe@kTR4=0H{2sH@r@Ze^et!9dKe*(80}fiXZq3D)UYs*?H{E>m zAqO9J#pORLEiD*5VmSD^V)^Rtoqf*GQNvz&>&-!f28pj9M@)1lm#{Un*&GU@!%HP0 zs_98Rn{$43&Glo)jKBPfU+l8;E`$4*q}$f_)Pw)hKTZ@jbTc;Si0s*L1*5UVyB~e} z=wpxn<*Fzs5)ux0V>~c#5GSQCXy)+Un#qJcu>m?UK%qK_} z25gPs&iL?Wl!#%a&9%#xXVUGd;$qvAX70D&{rBIy{{ed&hNdcdOIzE!@4TIg>qSM$ z73=CODcZLrvGD7~C?gp!`1;GQRwbOXz4zU9`|oZ`6{q-cPebOKs>1(tBy;`pWlI`6+({E>L_M#$ zxm}7EK5+j%RRagM=h|b2?)aXr={YO6bj9j{z1J7 zMiAJw5usYs=@Fwx9zSc=OOO9~O-;QoX&8#dBM1(^-Ki^!ty3UykX6?M5P8W&l((S8 z8+g9>-hkugJwI1cTHM^x{G02qd+d?N%1U72`h7}^|M0up|NQVnKmF;og~esIWu`mY z7JT_t?dr7^eJfhB=}fvK0a@~V)3H6*;b$LX1yCmjxLK4*%~I{f3%^|Y)rtWXqncan zFTeV_qAbBhMYmn?EmOd)u5DB5ahdlZd)dNt!Tvvx!q#=$NM1yv`wA?RB3VbydhUdg zRmUE*?^_T2Y2}io7~*pIoVZa33BVRA#iqH?jU~aXbLqVDfhlqfNGAJ&e^QV|n9-&r zZUP2}I0%x62@o4`5M_uxq>VXTwF1H*p9ZXcBEUY`WM(v#8K6(>l=C;`FlaTxmuKJaArir^?b|s z*VH!s;0G5SwEK+XX6}CLudh4m*rVgA#IAenarlu(G}dge@@ODh#bHNGxN*Es|>@iEGLzyaABcKX1u_N83q`P z!lO~0i%qdAt`XI>sLpA9-`+y-Uw*Z?Bj@XCD$|-9FrcEYcD3W$Ko%6v$H_sKXoHE; zc{5S07Ul$x={+%%UAU?}He4DvdgLuP-mrT0>Kkvn4FiH2jbb=aJLfQMLLv+CCrL)n zj|v1$<}DaO$UWDQZfR`XVf5%hg9f*DbY$~6$2Gwo+s<3I>C0waP3tkUzI*t-haY_C zA-}upjx)|Uqq=(Cq)C%6`oVb}>9iGb6~!|QIcgZ5EUPg-590R*KxmHZVBj1xc3j@^ zIU*CR*r^jWL z4?i*5aWF=_kO|%R6}B-t|jlDcKR7d95(C3BaR$UR?^YBzNw{k^3>hW_}+!5pLxbEGj=w# zs0-ELN}m~B&;IG2M-Dsmv|~>=>EQim{^;WG{PoFu8(Z6yL?NOmFal9+S<(|R)JT)$ z7z`ZEXl!Z%*G@X|_|wn$?srZ-W$4hsD^{+^+0Ln_oqE<8-zzR0WT<@rPk?I2v*4}i za;l~w+tB8W*iMUiF?>K@LsxxSjkqyIhSh01ZW@IG zLEz0Qz&<645kOaU$K`WdL-D`Wh2U2`BM~0L?eMBazo$fBlWuXTSQ&Yp=cf`kQaQ6MUb5VkC%pSmx*Nz2Dr> z^xZSgI(XI*vyMD`r)gsg3L{*o-H5^JH&o}0<4*q038$TY`l)@&3zjWgQc{xo&gsV; zdgP&gECHikRa}GX@S|ixu&%1G7c3Kog`!IG-(GlO<^hKsJZskBM;~(8W#>Ns!V8%W z)3c1?g7S0EI_K!4588Y0Nv)aY<#o+)cY|))65$$$w$81K5WZ16yKO!4qGcU!oO{#D z#&ICndRC%)nZ0fyGrx60i-Wmg-8Tetaw*@Ik)d*yCLMp$317ee?p?RvAs6+@>IvKR zF{X;CP+iRdo5YmVZku2yJUwy|L>PN^uXfK)k-Q&v_62Pu(AKsP9hn-h|P z*#Zz@m>hA$5rOWlTeq&RuCBJW7PpnE@!T-UL!@EwbiTEZ)k!uGXIN373KXW?=*JA@S)Ak zb$8tU2URUl^oVZAfB@73wjOMO6)V=jxN2-{Si5c=(CF*0=8YdcVyB%Z4;((Crgr@+ zv)>szZro0j$AcZSUwv~;O-)B84@+R_!iBo7=56ciCChBrZK_?naQ?j3bS{=CsIRMD zwsi5J0Rwm1X$l}K+H1fNS>sBh7Hmyfj%wQcd2=xsOrAV()D9zCTGroo`)~Q}lSJvB zJMX%ode!8qFaWbpJpEKdV+%~u2tN$8X^Xm3(BL3OcXjFt@4odQ=;tPvH`zcmT7skQ z6a;Q*18oM9-x>*9RmetH1p%z&bg52=(!>+2Sk{ZofUQ~`)nKq4gxVT0i99-AU`K}j@ zCILq2Y`U;0Wjl5v7SFXeMw7Q7qR%@>s<3_T&1I(Kb5S5R1BI*zKyxm;S)V(`f<)6%25SZ&qMwYSA} zHR5}T!or*@bhOUMDqvF{4+l^nRHrhW3sCyp953em!Ny!U6r_`_gu^G&y;QmNny1pt4L zo-FHcz(Ljd#s&s6bg0d*d#g@I-gn_!;;wu|T$2+iFxK^{1LV)IC7%2raMBL60kCu$N{W1U2R3H)0yfUh6<5mj+) zJE`dC5xxf`xGhqI>cAwuHOFynL)U{8(IERCvXJJXxABZlW(B4>DI-daq@Sr=05SCeG4p0VnMeTF036U6R0(?E z3&yxE)t&_iTNlURf(HjvIP;!(aZOcpE(8x-MOE@y6Ka6Q@=h6yZJ?Uxpa_*;s^rZd zH`MSgw_-bRy9CS)d8NC6v?{om@yOf-)|p7c7dC^D$RcF<@rmN zBZr>!FIos54!G^Py5oUCHdkce9YHWw*U%B8&#KrlcE zglvd!UpU<(Y)-DjrJ(_=p5L2ZyiWZ^|QAz}0jWG_9 zR9$l{escp|hn_^OxWF=cmlzE!(?tvDnd6tDAe?6r=EdT1__=Hs+$YzQlvt{?|IOF_ z^2g_&W?C690b?J1t#KWKUOl1S2$}yE@_Le}Nl(ZFAru6x<319xTuPmQ3v|vl1gSeY zjt%Q!-MY1ayt1Mqs4DssD$N%&RY{sWaYyttv~>9J;oF3~hu05Ql=Ge7NnxOhm3 zfhgS-&t@n+06B7(6A`&G3L*jbXhO4Mh!m0nHDF9ye>NxBNMdF_m@O2WH&7&9V|Zmv zw~TGuwr$(CF|nOY>=WCzHL)jlCY;!q*u3X^@9*>M(^_47?bX#)8V~5kVd^|EgUeXT zfJE+l6G4}8yxr zU?hYB5>yCskiL2%dj&;)%Q24jg`li?4ZvD25Ns+SHAT?w;C6{I4Z~{QKdAox1?sp0*8YP;ozeNQ2_!KFwO{J2qe%G!6=j|?^gn!EgeQ4n_As?mLAhc^iKN41fkatW8gV3Aw?xS=aY__yC}IcJzAVVN zQW7zluuSzKRVp(`epOHz_5>BUM&cJz_sFP-9+D^+QKEUuV^~--E(Xd!yHRwS}VC3{CE@sN?^jo#8tS*wff zqsZTq_05Z!YqF@+;3%QW@1pJy6{)c6|3=`Rq@uu~v2Dpuv~?UuW}6`{&XO4wSJj#q zApqZk7${QN&!fpm=v9Rr^s!(nc2fAQw}eCD3jfZ;LZ*W%F{F%A*YaZ@!p{KV0OfC} zF=|7LmVe2-dQ)To7=1YLDq2mq=wL1k*IK{9({LztT>NBAbzHrO5t1xW&`#4`!axCRDId0I2h zkLb+M0Y&%5)1jbL3D#m)GwNB%wQp6l4N+Ezv7&s0BKL1(Z?sYqCr={y(SsgPq5P#? z#xXP2%z*tF4&{YqmAWruziUtuX_vADud9g`A55(p1{Df9FxY5&@bQwFU}!W~SBLYj-@VoO>+9rV-0cGUf z$1zeb@Zf!xo09W9Dmvb-U=G44mbzLHsM2&)McCu+6WwGBxk7=y0Sp$Jqg#-!V^nc< zL0b)(FHLAnDNCLEeU$jA-vm#Z8NGS4^`;<9@l=NDD*Qjx+4cs_c|^UWC3G=RGyA#} z@dWaI%OZ%K zXPAzv&ZDbH&r4W@#Mh~^uy|jiq|Fzal1E0sx`J5z%qKPBXl%JmN6I^Pj4K=u^n8)D zH8pUCgsVzY6pdDYTQlufXiY*5B#FXl<|*~q)M0~(5evOO&5mniph%EPm3{~YO#np{ zMsQ4ai<8fXp$DUsKPL#^)zuDG;gRQcjYN@15(*-Pq9?T+yQ7MfLpd@{bd?5^NfvLb zQOq885^%@MPW@BBPlwQKp7cRBFVFRI6ke`26zf7UT8L9r6+!G{02+QYOsuWy>4c+y z>85;+5}3C?9Sl{)E1dc< z_fC{TE*=X1zQO56Dp)M#fwedSSGpwaH#<|(+hOLV)et8}gqPK~x zFM#17;(VUMYk&#H$k4Fy5di&yBbyYbgMWxf*ueT$ql87;wqGW;;U`Ovy>=E#u0`;# z^sFWRG|h-3VIhgYH>7#heY1A4)r0TWL#F$8DuR(3|3e})_DHgfiad0Y7%!FDjl2lY zrt!dfjJ{G=wzvFoY9g#SErzgF$Z+*^JD>QHCK&W*SIMw}tQJtFfkSbxosnDC1%XdkEt{N@X?ODZclwJg?UvyLM1GM`y-D@q^&aT0vs z+&QK>`14WuT9!{~qJ25h3YY3E0xjq-NIn5sWnfdk3K4md~S>;1o_0e>7=P6wk(7Q4Y_*L<(QeQ3IQ?v0N|N!eJD1kIqp=G@zErykH&qDP;@9U*8a>xQj(|0j0`gL_6C?}S>VB)k>FSUz_}mCd8(N@TQI zz0U7Oz$q!Zk2lWmNDVwiyi2?Dxi>2F+h6$JzWj( zfY?-zl`8ZG3oKmCYXkjg5b?PLUJp}p=+}lCT@Ivj|F|2bVH&1zBFb3^LlI~oX-Z~b zAOZlkyMvKN+M#G1UJFClLC@cm!>ZxiX@zAf5(c=cK05gJ8ZiJe`90(BH|)F=zIZj< z(8a~kvDF4;nXW6}2s^(R`}GGv&OhVwvzV(5s@|RzWWFgeZ@|CoxS+vy^{$HV(+c6= zk3pk%mI)03A6*yQ1{T?bOye1bo)d&Yz2--}Q-n3O^A16HD;!sbv>CI)z2^MA6xNw2 znC_&yrV4a6crDiQmxfG+$e8z*+s)#^k20g8xe36KS#R#Vd2l20vHz38-{F74Z>;#c zcjfryD**c2bu2IF!PoEj1mUKm z3CT9qW9|8Seugl>r5E(K567aw#tjqp+ud0RSo)+G!!(UCr;ahds^x|Gj z8NWYGU%^^6MY>^Zx!sNtA`Qk}=F5y$?AtJy#7>boQ7!U1?*$jjY$QYX8mb9OdoYfg z7r+<8b9RlaendjJj_yXh-futmb=1_@0jtgI^gO`GLml8?@XZSu*Ju|&6lc>O7o%m! zVBExjheTk&b7&7AB*TL5)Tn39o3&KOWzR`B9ClCZIXY51Gbm>^X26nz)J?`voyLyq zv!}|NNwul;DN|!DkT=g<>~3q{i^i%GEUU?{wJ>DHvkjR^QF=Uz=`^Y=sA|=mEV&y^ zPE*8A^ox#jcp4xPaeBP5bbr1a$kzJfS#Dg{}(Nh9h~j2 zL+jY52V6I?7`0hT=PJ5vPviQ(k-mrQHa#{RLVi!v;(~Ko?cS#{%(TN*+Xn5c9B>aZ za8C@J?D5`})IG9AfZiMULFN24pQ*a*w*;q;en@p}1$#T=KL7P#@ON&9?S~rUkGuIV zasR*0CJoiqe@qK2sA=B{u0J-J1D_5c@3D#e9f`LhO<+Y~OZ?cx%S>}T)r-%LbG8M| z-!8M*{kg4YC+?3KC4PTqgKQ`wQ|j@5J*@I;&*u0o6Vcn_@-;k{0?Li!O*Wa$ro4iB&Rdu z;iQe)H=JOI-cX)~X8BWX*;)8uCT5hB-dUbDkf^7^g#uEAeyB|hC5A(+S%z2Rv~MNG z!hg3fNBT|K;S8TyfqwmZ?LS5(nAMN%w<*(%m;oFS@C}|_Z0cEpSxcDm#T_^RzPX7K|8;=+DKL5)0oL`QS{om%iwz_@)Wv?0rY&~?w zUUfciKPEUCw4HB1t(5nLyk;?fjs1R15W0Oi-}!pbKbmf-?KHWT++Ucm#Lpl$f2D1i zarHTuwZ8h>>$J0^pKRJ|z_$GX08(GaL+ywIF7u(UoG)D!Qamqpm~F+D!m?_#>ipav z&;Qsp8M%FvOe*ewg z=kt1$5p*_X{Bb%aG~~o%b|QG~vx?}{zo+Qb1?Wm(g!rCN4!nzZ@;?{@>Hu6c1kt~= zmdOUVe;O(aJ=%v<>gW!bGOE!6e2(( z^31Nb_jTR$6MkqS-hMKkn$^peee`MFWr?>~bUgnlK|hi$_vo;8^D=A5|7)`^03Si~ zC6bJk>j$b%vtolqOp~*_ntqSDnF1`3i5v8l5cpEF-g12H<9R(7`1)D!_0fG4^3lDs z<+j@UTxZnc^H@F8(f8dOZm+C?6^<6A9Pnso^n4Wb9-+#zpHvQfFLYlNthIhcQ>QZ- zy#cqF?^jRR_ty-uf%`)Ev(U}FdBT<^6T#r)#Q6F>Hj{%>{zp;HHmCVwm<1VLJf}4I zzdfDtCttsqGnv%~;;(&f;8pFw)4uPV?LX;r)>*6Z;1_VbwVr|C@B zW6f&)*Y3L$Tbz+#Tm*I5{VS-U2znZx=NPKHya|Yi4GWxEeZ~!jcG65`B9LV8r>@|8 z#k%i*NLP+)GL7S=bY_S`a!jj(`$?b1DoI5C3C<--bt>{ z$`Ef$-_v8mY#zUI0wq(>A53ZA1O=e;A#%X^k{fnDo6q?}m3sMKuT{c~+@J|TWFZ;) zoKu`t1?s{v&{8K0cpq_^z~h#Jd@k#&3gNGXQELV^IZ5SZE`g@A9=EyflYBp?p4arb z?~DFi-yhG$-_OX}M)%nhDc!=aS#t~m_xndv8Ij_Xy7IF>@f74qCK_9hfJJ}CtXInu zCTaA%Zu@Yu^X_M&5K6@NPif(|LE0&1A=`;!!fsC{(kPOF`kENA+L=ucmFc@*EUrOsCk}R4nA48Fbb3 zv-M3}-+N_q0&h-m?L%ntPP}H$MN8y>x^VpgZ|DUd3EyuEA>nR}dg$RBD%Fkpgc4b^ zCOqY8v;<;`MEg%v;2K+Id;xpm^iDy}j}1g?Hw!_Z*vqHiUmqvjKvid8@`-8S;uLtI z7;)ChCA;%J+tJ<4ucYt#gqhL5m)n9}rG{HtvnDs;>}uKKF_ik;`@OFapUf^dogN|Ki(DU*zx}YQBK6<>8UY_30sNb<3@o&CETvj@=b42W`xc|?J^G)cSBcoi~*f6`8WxY~N?aEeB{7Rv-GYU_-$h$P&~Sc5{@ZzfV4&Kwg|2qN zR*U)AF=D#;MH7|IDL___IwD>);kxkW8mwW^OA;p#h(xTVyKS?iec2Uf7#mCSeSg$A z=;!67px@r~2OPmG1A{(B+i+#^FNEJ=j`89u_Py>!eJjFbUGPiR85&R)CxCGMv! z+-_yII$cZoZ{14=6$1keoB$FFNUAPuGl|5sw#6;W3<0v%f{eIT4`^=G}?!j^e`k%C7dTLcaJB*PG z>SppD|Cj4Y%qulBug`C5p-)If^`M}4bk0;~rK^pmq;~O_=wdXCrAnDCg ziTmZvp!-P3`)){P#>FMuQ%2vFR0Y+oPV;vFq8$|nb$7Y@MU&17!v>LH@0C43(cVds zL%FdGha=3tUD=tDvvdjEMDJI@Drh%dR26SJg5OGp(Boi5$MvtPyz3keTRx4bp!Z4b zmD;}Rrbj!n6l}b7eVUW@YwzPGY2x#2wz^Xp{%4_@#hO4F*;@(8C9wq=46zVYP#+1R z#_HniHixC{?(1JAejEN3mWv62L1VIpDcLR)B$A~>;JS0y>7w;;eZp%&20!^=VhHMn zNec(%sR9a2tkmxq8~Xg0w)zdf50Ru(V@CxL`%TBtf{B*2>9uW1wzWphqP3_y1d6D0 zCPd*Zv$jc(BBpE*Lte%=`h6s{g`TD;&{yqQ!e!3OF1^bsd{8Ct4H_3Ox{KrTc zG~ok-_@5rY)5!M8(=$d`hM{X!(pJCq>Qa%6{sP8q|4p&3I#of?S4@41(srYpQ!8!$ znS9PG8b)x`mE^%MfE|>*E`p0JD#a5PM!>s3Ncyu4?;AFdBy2b*U_Q_2I?C3o!_eim z!-U-`g`7kSmqBx>b2)WUKipTZWc<-)ze;5#R-C5hvOCcJ50HNN3_Mj=tNLhC4&)<< z4i`6FqOpOCPY51_l?hg^5;vlIuLOY=#Fr0T!;4SYO2jI{qRiIU+t6ERJQ8c_E$j7@ z5(=%3bv|)iv2;^DhG4`)0Qik2)0C)`WeG0V zMxAY{NSj`#raNxwMk?#dfDbhv956SXFXXT>t!p_&yY!r(IM{ZU#7raekiXu2eSYD{ z^IE!YoyT^P&E0#On|7jDAmngW++w9rkmloPy^bN4O@K79=N!cWr=gV0iPQ9;0?j#+ zfTPIO*=&(|ciRX>JbXKuzRudjiG@gf#tY$ax2&$jXqvl#AXks%b~5aQ2WB2ZYS_EGAQn9xPFswO;b48Uv&f|E-I3S3c4 znNx)qAV+RmDjLk9DBo9|(+a-mki*riFJ;!>vh znBgmResDHq3AJs~mfDx}SR92r{d(J=H+r3vgg>^6G0Ti79-ODE z&UOQ4lKpSlIoNqFPs>>?!hsi?*YBI(L`;WrWS3hi1`8xBm7Q48I_U@NQaYoMTHAgv z>my;uK-KiiAxvze+dJ*KtNh;k^*m_xNao(l4`50zv@I}*&S9uJ)c9qZEz@2O3yu7k zsr3TP!*!c({;pJT`G)ARZRKxo8wh!<>@OL=SFgG&pSKg@4;vs-b1>G*0vPb9Hd0oK zGsN~Xv7-HoW6DWb-5fMa)8LAJ_0qr(3U_%r7^!CDx_#MJF!k;y$NC{%MQ|#{UdT6B zN4(T=uk};U6*NSYpl?#=aadAOw7Z?BmIFK!`uvTAWeSnf<-(BzZvsZ`pFWA-qKE)| zi9d9lM!eR_{FvJYOx$gUXen?N$(**@E#II1>9Z!cJNWsIF>NDVfjnsw%0`c++$-m| z*<4*-$>vq=aykWGq~sKI-R5x{y}oHP3;Q)4M0HX1eiB~ias>=cTHf<-m?=A4qmFtS zCUl0BO=t-mVl_v)2=YJH!ecY>1>6?DMoTD^Fe?Wm8d5H{KlI2e@%seQF5ws9dtRmF zaMkN~ABPfu&8OlnoiG}3cx)`&@EoQpwj2_iT_BDw(=&D6`~iM|gk$%8ltRWXR=IaQ zk7vm%1#EF@Q&G<$NM{>Njl8(oqbov}$4Cvdp?dZfZr)$43JSTc0C!3rzteVYhjGx% z)mFFX)!#_^I0Aq{(81$WI+Kv~-d|jFzV>TAns9x4=opAYOeB7xd4|iy;cP6u=frwq z8#Mw~ZOd?MTpB(vnnW7B7&|(qC=q$yMQGvA+W>v7@2N<~KgF=>aO(h>J4yAsbyxg{ z{OQED!c9*QMzWfVN_sozIX1+~rMsT>6hF4kVtPtFK7=Jem}Zx^=?&R#;WA2mg(h8w z(fx)tHQI#VRiEFVYRv?}H=MuV&%#llx7Aa|9-Bd*Bv|pfKuE`v@|SlbtDRDI+_LRl z%+KSup3C=m%xTbVa+T!E+ivoEFQxsb{O@nllwM-ytD7}2jAp*_KaLUc`)Qj%dg@7*C}J>&Pz*e!$4hq`K2x3r?%HlOP0Nh+Q|rK4RP%60h7t>Hhl@^ zRCz*RBK0R}`|rqvIx?|Pqs#CHJ66F{_G)k7N-Hn&$7ir$cL(vMAkTAZ__N@PGY>|V zfa%bA+XdBnfPnFJp7^a)2(6RnX~vwrJSxW$5vbQ}Om?wxS?H zPSQVYVK)KkXX{rUzt_iY&{yPDqD?l;C{~3mk?gvNrCq2wft*Rul>b4SHa1*%_jTV% zk~B*;QMlg=F*37mGtTPhb-?xef=w!H7z`7Lc`c9M?JIpoL(eIh;5J5dn?nBIfYqI= zzg@4@&=@J60ME#-AMxj+^Zj0sA95Zilz=_?y`qCKL9) zW&zhp`Ra2z9~(6ddAtXuGn?%a`?n{a*XO+MI`C%=L7$k^bNQbG;LjNdYbYVM>lZ6^ z{I8`c^?oP%+WCIl%V+HEPMg(sAxBv`?f=#q&n9Bi^FAY1d#y%&=>3+my1zWHJ5Tby zyXcMCZCzIby#BViJ#YJW@>t%Dcq9nzPsq`rI>khEIQ_MmKaoyL#M|lxiWnC6>{V+U zd#+QWYJRWu2i?oiS=PK+W*F;T^7{Yzqs_=`cZMzWz3!019o$?5;<`|0U!}{NsZuPO zHNC>iH}h)25^>*9oX(WMsA43SD<{H-8v*@Y;^X$~VOM7*Z03Tf_X4?tI(eoXuSo(E zn^GvS`up$Nm=U6lV*(ldx;>PhdgieQDLPlp9ObZ#nD;0dZf5MUU7+*U#EIESjmG^KS#KizAUs)}kMa0%UUF8HB>AmH7KVe~#owm0 z3(y7UUx~~l*g*FmoplbgBpMFSQUYa)!n`yD@RC+DP4I^}<0%tD(FWqxzW1b7_b} z`9R@mWg}6NN7<@p;A@R>&)MD7jH@ajnA$v%fP)YZ84ucyYg@(RXS8OPu_}BVp&pjk zI64ky=x2=xi09_{TBsDbzHmkwpsG68NH7UQ;J4Yl1>-8+1szoAL4>|xunrU;-YVLN z)#N>t2ANgRu!K@+K@4R23kIM)juE1e|@-xP$l2MS+w@!);^cEr|Trpo0n(lQHNCJE66 z4(NcR&StTW3WkNoJoOBQz+q19AB}eo(Cx2IfuA+fs}1Ig+OgQc@KJrT$)_;<%i=m< z0H;*QX9L-8BeVhgbxHyWM=L3!P6xNKu=ks4OSyvIg;N~xvA}UdGi(+mm8mNiFE1W} zR?%+NEu^55Q!O4TNgZNgH%I33LV}8CehM3-U@211Cfji;CMj`{Ay-T%jwcbi-A*&U zKNs{Ei@)vNW`g@mXXqoC5iD8D$e zWi0QTE5*h_iIj+FBF4FFOKLdE@8%c+2=rCq`4-DX2ygL+z~{}K-lJ-IQ+WO6*i)(Y z+DNg7{iGd%m)qw`_zx+5RoaBGl+U}N`Ccsyg}YicP}k}Ru~TOQZ|BGKGb^SD0!iyX z<)4G8)=R4fkJh|LoYX~XS5_397U$*g-3vPd~9bQTQAvtZ?a=6?Z z{H)N5InmBS)dgqG(L_19EHbe1R8A(y64IYs&{;5GG?#hT{wH&e@2Fz*)ggcixF#G7 zoW8!)myqA*Awxqc8DWn?z@6F_ekw82gH1RIhr3{o5{Z=y{f!o2`q7MolL)tg;U2MU zIWNde)GgK+r$bVz#0|l+c$$)|AxVdEG|6K?lNQ6>e{sASbPIo)NhP*$XLpLK1!}}b zS0M~mGqJb_E92un5JcBs%YPf%Tm)ElKa_p~V^X+GtTwQX`8t0F&)^gs!3ya=0@frI zIcCr742tdet2FfNWm~(57=ABiRTP+JTFFb-nBw3~pPoSDiQ12-SQk5Muo&@Zscj(8 z(4UAJ$U=LoORUh_K;B8Kaonwen2JaGJ+0<^;ATSeixHd_#Z{rCNSZH43kyTqzh;{3 z5eoUEMvEmkk}7Y^+uoTylhENlfrIBH$n#0lx*1=#R8U|3@u%XIwWqB*nAT5C{~2>2Wfboxsn-NAbx{`o=26o0jahb z{3qEyoj9a}edQV#LSNt{`Je2*4S-a+)U}hUJ+0uw;jRGh?oMNgT5@uz%?KkWFb5&M9j&&(VbJe3o#Y{=Ep}NF z{>4Dgw|=c)8>BJntI}llx%4CgEC8JjE1{-Kig8>y37;&FROAuwGy+ms8^3jy(0chu z%cUpbM;`Kz>EEKZO{al1A?IX!jrDdEAoe;j>BH3edmFklrBFBExN>q`ukX7QxY1yT-CCs2~FE5ju22f^k5~cYMF*LUE+szQ!(XAsnri&DNn*0-_?|g zCTK_;01+ATa*GElCmPRRj9*rH>L*wB8xac6OQkt<%LSa{o6f%huh1IQaaVnncq}9i zHl9)bR0+f43Zz;yMdOyoD__H`oefFHN}(RkTT7qnDx;cIa`$sx;IL`%9F z6Mjg$wIwKcrozL3hB(wGc^Ql?=4yT@5azgM$`0l1(ILtWyqN^qj!LrrV~%70_dFeq z8>9cu-e7l&%c`>|fZIFLY)W3rLvrC%Ncfp}*PmmN1R{UD5ug|UeP4ig(xH}Dg2<0d zUzH8uMi8^Oz;*rzP#j{)05O{vx_u(l{Wg zLg`c?A}q7K-BXEgZE0$S%JsCIx$Q^5waF3*N^;qJ<#4$v2?re)$r}t3eis~a0-ukK zhQM!xQhry_bJlV#()EUcXFQk}Z4aGN0V?kF5CY*sl+VvuIVlfI{ksMyeXQI-R2z4a zA}A=#_Y>j167O1!Pyz?KA`zcI0el_^2AGu=ZRh4)&#N#IznBHce!z2~RnGk;L*#Ve zjS;W*rHGT7wPa&2S6aYWZ)cE9=3p#sS?w)dX$PMF94FQn>wk=(!fwqKuksQViz3iX z%Q(hW#tO#Z;|3-(L)iS{zWR_ELkbM{*)VrKfE>n*MY7A8_3CMkFV4P%aim#>tm+Eh zB?|2+5C-TE2_?Fc10o5)Kdx*MIC108AtaMNVH@+kb_c%t^7A!}=~6D*{^jl-l}43^ z!c0YoW{@Rz$q-i*JMqqk{R*Cybj4gLIh1lU7c=~uWcU*#zR+U*C?fYR4)hNx0;`>%RdiGrWZjTrla%y27+2b=LHM?Dz$0{y~{+AX%imW*Z9BoY2*%!5*&~#* z$5h`OPXrHG6p{fIo>?}Gs7hpI49x{M2YoDq5|tHyV8kU8?w@iEmQvc?hm- z3V52#VJjMygr#oUMh<$OL!BY|=6Z2M`(dYvOm>O(m{;kA z%M5^?As8e9R70aMggKjA@Q$lie*p~+jZnW&Fyj+4pV&}R-g(Xx_ zZ%NO2ZTS)uplD_a%XX0A6zRrrxTcaZ06w0sE zGPmINypd_MUG79>O>YbpTjdLEchM*TVkXoB4LB{X<&vjJaB_Whk~0kvk~F1aJLA$g zkParcP}X6$KCOTUbwv^>Y^-mJ%qQg{unDD#?S+9|DbGy!LSAjk4YiD%gPKQt@>19i zR?@{<%=DD#C^O$(BL2%%H3JAXF3XYxf8s%j0$A29F^*v#%6L(re08Y)DXW#a(xyxt z`?9}kaKS?&*Gd&y=uh^9(>DdwVX;Y`YISOz?x62P5GBRCJ0uvOx@nx8$h zB2}}N32H5C0tY6=vG_AISO?`S`y@q^+g^ORjCU=*3KE~zv(65qkeh{&23cR)nM0GZ zM)Ssgic?|J6QYVaMeuKq+zA}hBYyy_Mqx$gNbJK_o1Qj2hPMVc)$%CeIwZ_I%z4Dj z_*DUt$74emIuQr*dW-Frazfv-ve(6~Y#KRQ$`jG~#RP_-sK=n!1EG>LVFQLv&%7z7XLAdFHutp}&qK<1D1 z?=r^^Xsz<^iwMpV=y?S=&0 zAx<9wz8d=NaOsw^082ip&fE#w=1MxYp)`k8bQ@?a=Q{ZRrU0(~Ap6pVfo3{*`-HDt zgDG8Zbm;3OU-Jxo2X0)&lsSJP^(c5Yd~0e6i0nYvgWsV_qo_LB-E%Fn!J3k4!hx%5 z^e8i+3OKykvv3nLiKB{Un${O}Ujy^`G`>1!YIFn#&SWu&o}u)~h(`7E?%)aBs2N{M zcpr7RShi)FrLMc+7^baQGevCiMGTP+@@6&A>eCxIc%19XJB(BgL+$RaEl+C|F;S6eqmuYK~JX|S}plO?wuMGXa#ld}OOuL%xBw^$$57!)Ul`zVu z(QP-^w-O*yrpP%B;2@K3HeV-$@L+1R1I`@FEa;GVO<>60>7$-DNi%u8VzQ=6Zt9%H zaP<#;DF&j7Zl8MUL7xzXng}FkG8}f2-v5ooZN4K=#QT9&KigPgtkKz0H3XJkHcNwe z+F*`t9bJ!wB2_dYT}Eox7dx}{UIY~RCo;Bpe7$+2$FM5G7BXQd~CvOF6&5nC{8*)izBe;f}Cd5RG*(o zZH6gSR*OEG$eQ4q!7oZIoXr1=O#EiD)2?>6lUaSo&NAAh!V|O@g|7U=MS`9VhMYf( z8<&%$WdUzH#6~*{>qu?PrIM#vVgHcWw z_=s)bcXO4|G}Lv<@;I+XRBn1K6VP0Oru8Z884UbymHKw0+ih{uEdzExiKC>*3I#+T zF?1~(bYb?(;Q0ksZ%ds|F4bGwF=EfklF#Nt;U<&|;a-wxFNuMpB^mGr*O5yn6h%>7 z>Ur6Z-PGSa#f79%E?#9q5$U;%yZVguy@RygTg3F`aD;!@4I#2e026$Kf%*R-=iArB zq%$9C*XykF7NrQCk;5j_*k)7hL-YGq%EFEg*gkl$kCmOYOQb)fdKH>u;QbdSScP=$ zbs;me{pLkgLa#K(X2gb)0_{E%u9f@ z&Dm(X`M)H91I!vxMl3G@<$OrM2rgntrp z3TKuB=w|zRz_Z8#4wwz`wtH^j?g4iy^{;I57}u&){icuHDnoRKB^1*&zIN3~_z-%f z^C+2OG&pH%ML}CRZ7;q&0L8xLFbd>C8sW$ZY}%CNre};~F6%tH!WO(Q8L&A>3anyE z*{*s@bj4QB1{%8N%U?{rEF`j3eSO<~LL~-BhxRt!A?3C}UCk^Rl}%OsW?9O76Ukn+ zDjMF|ef1OIxw{1hn-#0MK0u7PPICkhhJpw?Vc}*i{d_o!3n0N%&|lKjLp3V4d?FZI zcvNZ9@xSW)tgeXB=W8dFNIHp*p7A{7L7XA8z!eAa3xMrLJ2`xG1{GA3& z`B5Dl9j9H6+36Iqh5d>-+#RhP+K{{=ggoyUG2uE084;Oah`QypqbAh4+6?5(Yn_<# z%4##@uv_*qQQ1sBOtmJuA0v#|oq&2<#nIxm&6G!htrUx;76V_*(0rQ#Iwp)eX#fG} z&R+IGSAO|o%@6yc|Ggv$1b;DF;YbM;6tI|z$-B8#89*V6v2{2FRhN~u`IgWPm=wC4 z{y>ctojQnI6C+H4m~m5>qgBXTr0~<7n28me@URzEISXi!tY9p}@*J_~sj?fQ&{cC- zbr;0fBk0bMwut#Uy||F4${FxVwV8(ow#YBjoO^X~aeAphu@8J5mg6EtcGw3r_XdCk z81fCU{Dgv9V#iGwOOaj74(MKTo9Zf#0+>9PAroG0HjFd*c_5s4ysaF>b;8SJxm`{e zqEokeO(xrHVlloW%_LnNEevm&HD$^@QlcpWLfv(nK~7WuEp0Gp&ixOnQ4>eq>He&* z!``pB5NtM!7tN>F>2GZEgFTu-{EWSOg*$4ZrJUYuAtb~jreV&Qy8PP<&cz(0uZ5w3 zl9qZQf5aA0rO-gmlWxCWY%31{Z_&Qf27g?y-MCxZL&DD<7kgsn-viH+_|!mW03vWa z3CS*1qd)+flJJ4S;HJgsVq*r_^}p{EF>0Xh=7qGg&6h{*vFqU#h|i|QCV zR&p>keR#4`qM<<{>Ey5h0t6U8z+h!E9AIQHk*>%#eWI43R>-L(U7G3b9}fU2boF45 zB}lt#ge%ZRAR;JVP06MeD_5L%{(0wJaKY@EvydyJRRRy`AFxT%{vJdBS>{h?LBw=C z1jwHr+fyN{#j6e!+qSU}P)O#_&IH3Kz=;7qUITETHC)%t^drkRrBaEXtx9$sQ4x=@ z9d7#8L7X&c5?ID@oY}KycXoCnF!tf&pJz!I3WZcEl}IFTs*+j-o@Y7Elqp@Hxebp$ zt}2=na2I$S@3~U|AN@4~N6FZ<@)bBskWx&0?tx70xgX7|Bh^ zWRX>jze5M*&V^O$IEtd+N7XdYxM7$m8=z?#YG#znC!KuqzJ2@9PK*}&Yg=FD00G7m zFh2Wwd%L>2AS~FoY&)du|Kv|>QE{|Ju~_`^kAJ*-*DeUYWm~EO(bqOVx#hOoeh`br zL2cP=Hl9c{H#PM)6n0b!EGx?4!-uzR-3s#YPqAd>cs!0)#$Ib{Yb-{Sjj>ROAJV7Vdd^S2mR{N5 zXTgF6bLXCHn#T0$)0drnc5_qnv(N4WNzI%wLT6D@Ob7s#zbolT^7hN=W-aHUl zCY?qBX3d&)&Wdx|+S{Q~7V`N?lPAxbIdlH}`LpNDX>Dsgc<{iqY10=hm_KdWG|=Vl z-MfhvEAY!2Ai%f;hA1=_s1y)v>_bwpbEM};G#d3o{8K`aWw8JD?c3k|?stO$lF8)6 zNfUFq{I|dTZFW|HK?p4hyc7v#+d^L4!~-Mb6gvVzyyXWI(7Q=OD?%& z>C&a09UTZ+Dwg&A@fL`0r+|I=|aRn_$n>A}GpivdnRfbW`{o4@w8uRXru37Ab6UwGk5p7j{9Pth2FkC=y%DcCUiCw!1ng`bI(0@;lf3U zWD=AY355lEb56?UvYj2BhH3Wp^q~Ap&pvzlv}uP99lGek3wQ6?)7sXybjgyQBS$7o zn!I4%e1w7NU01sR0bX2SK#u(Qf~-6Vq8jr&SC$o;qA*BEUwS|V>%jeM>keuf>*I^( zHAw}OCqa>NZ6&1U4X0Q(Wl7Oh$+d{x?6OfP6%|bvMVPt{Y;Q%OLZI`YsZps#GRUFy zL6x%{D-w#J0>~ORuLgxp(?NykdziiPSPWqbr4niq35U@bs0PLNJXKLp5->6j5t3j1 zu!W{6;S{SmSQ7c#woPklvG4%`j2~d=jyy?)3puTs>^K{+E)qvbBo4(8su@&2C~HmLUZJA zXxNSuiiC97&mo;X_bDMWjFN!w0-T6|6hKz#t`?J#e`3Bn26$eS2c`w(ITF}RmOW7t z=zEcze&OSY5?sGd$a>D-tL$T2^{no2fYPw#o-EXWvp5>^6h8ziPfhSs@d*&%gheoj zM+W2^R|SIaF)TRc4H=9QsqEn2596m>0gfF}acTV(M|LgQvt`eMH(P~egv3Y3i2d7N zuo2TAH9&v>F9dvPsRQ$_9iM~8JcSw2vFQIijx`DW^Sbu~2YhiAnzk}3guAsyKLgF4Q{g(J#mjxr(j`o0+Fjm;Lhx zD+Gb3KV&XE%m@Jj1bD$fW~skh1BtzcPIBXU<#HL42$F!#fhz z0pt9o?R=o!sQRL4Sr&LH9EsK#g(AJ5>bSB@6G(b`dL&7%HWwV-P(yb(4%ka#s|>JZ zp-4gNcL;!Bq9`kh#tv)G5}ANiq1L&Q>`Ia=$nb{`r$oW6EH!n^9Eymd(^@aI+zay= z7=V1L0T6ho0D%kK3+DLnWAfGS*ccXN=m|7kRg|G9&|^Hv56K!^q}M?-i0ephP_i$1 zC)0-+1#-~XKs9JCS;9`0wM1|ZG%g9v3-Tey*uW!)6vQc0raG=;8P?pnbLok#Bs#Xk z=e*$JLqsZyMWDcD{UKR)xgstb4Q$oXK{{rD0RMb|z2)!Kfa`RQL+e07Mw^>kpn%9S zseYQK?bx|f*VK?c%Bz$8k1zlE;JS`s7%eR=u~^))Enk)P1X)#V+g3E?(1Al+w{2C} z!%`N5aoWg~hoI;M%Q8#FQZAoEa;mE0EF-^Bzzo{}0siR$4Uc^%s}7Ja&!a_eVV%LM z4Tr<9#bjBuZ2IKjfrE!i#X?h4GylfckO0>)gYKm*XhT;YL=V2h1*5I46-{SZv~V0h zrRWNMsQYLrkNtu2z^3SxLZM)p_@#Hqkl(1Rha`sWs{jH1sW6lzaMe$xSBB(xJb^7F zlS!5UmP@BoxqO~xLyQO_)0env(Ok(Ee~)VQkxL~ z7ZapwS|*4O`_wm7lwmmy{UEX%iU9%y5J}_)3JWBJJ`y9tYywETV*(AC#J$Kr&b>_X~#y zPC5bQ1qeWZ$kp)@TkIY!?K{AIbq9($R33wYU?4J#Tn{miivZ0@;3M|xoF~9GbeNfc;DG$7)g)}{+heQ7KpSnaj!O_y170oc{ zi2wn{3P{ah_~!<~KNm>ZQ1F|k2L6eGV5~)a1qCh?4uRhQ{BTFhMgg|PmQWuCJn2T} z;spp0Ai#?Y(%UM8Xah9YyWa)6TYvxoUR=PkbVqdb@Ss6}??T})=nOky_SvsmT@Cx} zY0>zjI~140KMoKez)Kbo^O4=yz-B>Gyy}B;rt=Ze5#@2O7afjE)1CP~m=0y-2v{#b zfMW&Hmzbc@c@?5zbHqK@ za~+4?c%~In8inMHVTL55jd7_wH&7dx9rxg3@9{Fp6I$Bii?Oy&-Wi4qT`M9NTd#)$Uez|+=*9d1u>Ko-GIV? zZ&|zaLu5;m1_JI8(9TdY3E7T>BW0s(mQBMjdU|^PddD49 zOfW1Leh8W*Dw0wv6(>xXux#0~=9U(!pCH7-;c9hA#Tj1Wk)UPKcXFUIr8Ako{^c)a z!$8WmWough=O$?VddIoof(vHOo*mX{sk);8l0~!c*}eO(cl;IYrpQXETn3lv^sSo# zDI$GE)kmQ`dSYwaQyAr;+Wa&Ja$;4gP~KPoBCYoHXaK9p2uj(p#d9k8GRhmcJPztd z-xz8z;Q#@S382=|uymo3KoCH7@nka9+gmD?F;)j&kw*l?t|Igp)rmyBxw(1o-o2?* zAM>D*&({S>rBXNDbmNvSTONJvv9PXZ^LbSPUkng$WIH6~A)R8W7!HM_(P%glUO0at zcIdl57nfKj)|A7O~5gS%sG~lR}yCDG~Mfj$srde}l zFPOLBZ+HJK)!PS(sg@Hv(EeWWidQ`Q?7l}HS>-q;EA`lbw4hRVbxm2gaA7nW`_mu) zAS?8x9WVow$GSoy$B`*do;+zX0wHoTnMC2RUzY^y21*E&&pfIvj6=RZbRB4XGzLlt z7ahJ&jEBn%Hc(|L{UM4(Ca=FAAixU%3`CL}kuJmWcs$wElu4%xg@US*Hw{rXj7N9) zXInUkBGe-2M4*gfu>|Xqk8^CZx$LW6`O1Tb4&C*)zws{%(o;xDk`?GT107+2mkPU1 zRY7*TMqgot&bV*iJ{-2Tw(i}#k9|Ly#0r|%+_@)rbabv-^+;c;Hxh~XsnNEQtiM03 zLrRLGqIi3s*$cJnq&agAA3luStGtQ4px?pskw+eZq6#9RG{@+WU@R8TXS4hF?}ze> ziX1+481+IfC?(Hm03;b1@q3o(Oq%+w!WT#J_yS3>gC>nG{hoWEh=5rj^)fSmz^`?VQbe(801tNao$k;f9sD8LJ$v^|oi-IL#j^k) zUGaD#ozC>7Qb+>kC&?R5lL=KxvM#e42ZaRZ87I;=1<@wkpW2>C&>O4JPti(91p7dH zN5}p5KcFZYJrAfWYd?lPK^201?Yd|N)Cla^*4D;p0eSE&#)Pp_9_2&JF%wHw_v<*% zKSHyx(l+FH6zm}~uMcZ~((|ir7a+ik1q?zE8$=Ew2!g^PWR#?x-S(jy&I4tj5fCNW z7U&EK`Ce@BgGIn8Azg>v2r5Jvs4LhuAP6Kqd=H`oJq#*eNDpBL%ohrUk$P3)kQUg) zFbpseOlGdOp*~>Os)AKRas3W$0ZJt&(rqYsAT?DbqZ?5)kl(-$X~!Zl)B<$vn|%mJ zQ7a`uNs$5!PKmxEF_c3{*S2W|P>Lvtv3LwEuIsvXRNrVFJLLf&Iusm*N5N^It@aKN zYHg$)yo0d^aUdJXV?aNJ4T=aZ zAn|7Vlr#g$L`7)&ci9m;2AI`V@#f&ALRNLf!;yc67o(C$jbkBLpsFX}*OqKEia0P; zKTItuR|6zPsuI--b*bbDfv+Y^0qK}RkfXZrLyAI6Lq?DmGLdVl5=@V9N_3Nw!NL-6xU& zGx)`0rK39Hm_l-5p&0C=VQ_65Gk29=xTa)tGMP*y5^)%PY&i1~Ol+PMzFZGK;py9!+q{WJRN$!C=CUEP1|^HLs4KxQ^1%F>(6LDV|fd zN=1=o=dg)8)RH9b#1AZ}L=4Dyd{DuU8lyb1#U5X8kH}2od`C*0>)Do7vMmQ*uI;Fb zYNKKb$O>$T5LM3e8-_d7$6;k96p6%g#Rg;fGg+!y)p4Vg$)@=&OMx+lV$LXV05=?!_0p`l{Dm zcF83doV#-7%xTTZxaE0m9Ub%M%bK6Os_uH8@VJa7=ojUve;q0n>K(-I!u zG_8dT7wq1%2U;Nxku#}vo_Ff$r>$ADT2WNWfXbvhwz_QP*;l{u3eh!;T)vnuqG3@I zMOF}ov{jmSOzId>h%C9-V{`ysOeB*X9Ua@Z?~rSKsVmd_NiXbwj6T?(2K!6t))p3b|ivx^j8rTdtRXb{#;4tAK>GMJI zIubK7&EzLbB#N?5k?{Pj}q?8eV_=2aRI{%N!hX_<%w8b&>km27$9%&GHlxbc>E+<4RZ7n~o9hOKf@bS)Bmq+C-> z%4*nB9}OD2m*<0Z6Pn(27A#zJ>S?D!U5~~h7hQ5*TSpW&JZxKMT_4qddK%i2ciWeZjxB^|ME96Db zIq6|1O)T;M*&Gx4t`9vxr5_R@8s=jVWg7pI%Ei&mc9i>s5HQN*Z9cBFi;9X+JSVsS+c)7xUA zr)aWdIT|ZIhaYng>Q11ZB;M3W>{dN=B07gPTMoCp<)3M72sI#Pi=+MW=3LVGu z3=m-ap(^&Up|B-|2ICul{vPeXpQ%LLq+;Ssa_8UvX$VaCZCPmUoN(Sn=eNY7kN)F< zU;OB{2mXHVzGt4rv=Qs*sA@2dA{~^1`q^}TuXd;>@h+Gr-Xr0IBtU?13`iu#DzN~QPId>$r60b}wseBsq(XSc zlEi>alo|!-%66S-GLmd+eq!^cyZ(09k)BjU4?%AP3oV>`a(i>rkt4l5M~)oYx4%#_ zD)VJ4ju)_j455Ocz--Q;7gkCOPnz-AKi1}Q_MAnh$J^R%_J+XVl2@3YWkwnrQ`PFx zABZ0L!p;M+A~ELQKB;@<%U*fo&9}Vk-S6(~XfG8?RAr=#mgD@1M2`iIl2g!o%a$#> z_10V8_r4EqTEA`IGe<00dEJ|?-MZn4&1=@JdwkoHW#@Nxwm})(xBuXyYaf5)k;g2{ zI%UxURgso2KWFx=nY(xH&SbM%IE>bE9p{4cR)Qa0Q9XJ7{F&3HnWYj80n@T!=K5I% z2rw=Ik(9Xp0$yRHM*K#i?}LY;W41?Z#|Aa=WFugf9s;YPmpZif_Fw++CqMngwx`N^EpcG0V^BXijW?=dPz8lXS6>kyqIjebS4k}-a{ zn#qAfw3P%ESH=&sprJGxo?v>O<#|)4PP_EdSAFcmAHDkOt6)sCG#hQF2H|%V+wX6k z8uWW2U+!2eZ_l1RkFQ_f+T8lg&b>WHGRgMNg{Pf<-m^7%+Cws!TJY15`_TBuaWUGd6S z{_em3I~f)I&DcJ10lY0w;yN^vVZ1z^YD z=c(4XYM3EFrSl}1=q@rM)8DZ@8`;{f4bQ4`U2v^KcP!ZxSi12_QxV032M;{D>d`f; zSD$j~DdA{X76naF?z{V*`|tVtLk~Uh-5>sR$KJweXPk4+sdFB@`!9d|-~U~^;R!)9 zRK@WeC*PMTWU_^P-ax0>7Hr#%TXvpt*4f=1%`sKjylsyyhiunX$e^bQ$#stUCP07_ z2c!{^Qbc+Y|6%aY4gk~XaE1&+h#WaE1k@q9WJ%Lz&im{azWo1v_Ot);xzBvzGoQM6 z>Dj7ji>56}Ax8*Zbm^-<^|{Z#{jJx=LxSYkB(2QR6B9P^gDE&P`+gP_FPEZ;_~gkG`<{Jn+Y_5!efi~%W1&9uNQT~NqtEz5 zXCnrbDJ{x_Xbnt7Z!7R8As#)kG(mhgjYy;wt1{WG2ur*nhFn3n>8li(o`e7SpWpb( zH^08_i4BUVNDfi4L(bz4%?D;c>i(hML<1a}&~IRN&w*cCuum+_8-ciw!chpm!#&B7 zs!r3RSv829(f;TFWa>CJ`2N8M9+)}*OVo0^*M`1}1+rq90Y;&ZodSm#QL z>8V7mq|>rSt8-MC00B-Y)G!C>imP3hXdXV=#C zkISa1$x_*Ho7*S9{L;%7OzS>;Xg>s>=0z~6B?pluWra^qtC1Z_8HW%ku1UZ_u~Pc7 zh4Wv2X;)MD>i&m~!Fn_^im%hxkN|I}ev~ZLK%htvo zOA8=&pkGRs{cu^u#`Zv5>^xxDw2TzYWXJTRvMZ}%p{Msxzx??Z|Mg$*yZ635&+H*q zhBAieoQeEjmM(8Sa!8)qXMm&Z`azjdt^4z^OCBsUFd$6+a3rjULaL_r z_8xh9_pYn2xhA42KlskK-~5hueD}87zWbeT|JS#__B}IU&Z$56@z4I{Bk$=< zMlyL*l*9jc=%Le3Kkb)4|HUouez&e^gMzl2@MDyKCpuPZ=dg)wI3OJ@dp98y(v{xNq;; zb&uS8|NR>`Z@K&b?p8#vXp|mYv*z#j+_mA6hqgSqdB>i;g`(AWB)xUhrUxH-@Zr^~ zA6vU7-P`Nho@ogvfF>0G@$f@RI0|K)C=Z!ZL8y0F5vpjgb^!t$OORS22=RDa*Y!MG zJc@=P2CFRFj@{ngUR^|_IzXAgLSlUwMk$@nK+156u?{7fOeVAGOqsoXLH?p(ndaHc z&R+HKDoyuamVnx3JI<7@$(O7=3xVunp>x`dv(G>8imR@A-PKp0b=H!Owk9)u=&S$s z#l448mKatf8CDW61oGAN(9D@LHf`D@S5_Z^m`|B91$x@yLx<>P2QuaaTM|5NHy=*0^L^ec`V!2pQ^)OOQ=xQn5vw8CdMT0dhyOL~6x+^HQX)Ibech9aprs=YyY9~93X&d%YAS1L zb5nDvRH8&GKsq?Y7Hx%ge){RB`8yQ=Rc0V~^P+-4Hk;*#h4k_M|{d>Rm^>1DO z#`nL%E*qdeGa*c^doN)%H!^x8;hr?m?oNe2KQ%B3ty#QVtlpe-Ax&ona z^HWwH4Ys-1mzB53Hu8o z17IJ5Qx#Nt*jNR~i+%kH0l2(i3!x*q5(H>ld-gn&$z=UgvXsx|Kk%UseEq9mk3=Hy zvL)GZTv--XQL@XHt|Dzu(1e0vcq%MIFXrSm&$i@{EyrNe;-oAXkSvxr*`J9->}4-| z+3mmiO<0R?(J`P%o_)4aDs9-bQI%B}m4ksNsCh#zmxZY2h;~8I6h(4n2yw-R^&|wf zC8vbC%b~d8$)>0{qJj*KLi&wYU-|d@?#*Yj2#gwn;v8Fe`wj2>?)Sc{>-1FD^JGa- zv32B}EfzH^6qXXQpt9uvO-FL+^;A!Q%#)Bai<8yBCmGAK&{p&2&Aao?JL#R4N@Lan z?GIfH`P&wC%D!j!p`^Y)d#*f2;-rGG_nWENWV8kt3gx6;k%`XWqd06DIE4WnvRkyc zRM0bFio&iVphU@`C){#W537p$&42&qRd0CXAAbL5%PLE<$@Du65R{L9%flZ-0RoIS zK;&|{T)AB4vZtylY+fH)zk^vvzt~ae-Vu*G7)XRt83>Gi*>yr8xwWN9u8cnkR{Jh*klmmnFFN2G&!`;4C%j%AstbL`$4{j^de`XUO!f1e5wo`g{xPSSEXx z99sXF2Iq z57aK0nMer9n~nvGHv|$@9D)>-w9Z9et+bp#~wSs<2&uV=T zuL_Q2OQpP#?#rMSBB_0@Bcj}_VFnV}2)s7b1!8m@1?8b)jK7Gy=;;Feg6E)jyHMYt z*5Mk1QiAnxeW6A~!Jq&_bC%#}PLN1!<_WkEDk6QBjdXm^HjRQKsXgg}ASmGHHsDd9q;++5rBWHFn#{uo z4ikw)I2>}Jrcq$>biss3Uxb-!b9SOk1jU8(r5FiAPC@(RKvSU9Og7ij+}zjO>v|5o zkkJo>ce=Z~h**7wB1d_^1g8v0?Q@|fVMEiT>s1_H1zZ5o2x#<3B$CZ$DH1*Y_MdT& z35;A>w1y)i3ab)5uz&;5@!XT~9X-B)a>v53L!`@{55{lQbWB5}Bq)j>r1H;y{Jj7H zUR*#-na)JFzlz0Txm>1iX|oT@0+7ZLz{6BPhumM)h%^;hHq0^(DHjHi>5;=ncJJA} zaM3~tIS2(gCz458hn*IccS+s!EYj*=?n1kx*8=d*K4VB{@sD3hivKhW%Vr?OE_q@SX&SG@X)-MjZ3 zJb2LLDnZ3^-BtHd~#8#X% zX%gZ<#hpESHh3EQNZOxf$=ZtrM&lh{%A+^#fJTD{lo#NI28~eHg~&muATQBqtZVWl zEkp~ogUNW9(LnO#lL*~nGY!i;eBe+npJV*O5+T76KxD84i-qFirHeZ|JE4m}h9R+_ z5(Hzk~YKXNS;i)Ws3X4jN1NG+pE zcIl{;WKMRB0`5b;@FxY2UiWAedPql+5V!x@z%V4`;i7=8u7{2sIlO)QcE;TV9#UMq zcri{RIq+ReOA9U|r~?lHAVq8u^ynzh-){h8p*++ILD6uHHd$SxoG7RctVVEx*6{?w zK*|GsfbvZ60`uWPh64`|;iN$ec3@*24%(unsv5ekUvx=u=p6}g5owPieO(({J_80c z2MHu&RuQ1(Ar`$D;pdJsByn8nWQkF9lnp6_!y%Mb(^S)>df*I{KqSal$3s#y*bo{d z>rox~Q)3jIvlxM>iti&$N(3>G0(L-rY++H0Pt~SStSTI+3kq2*7LlN3(h8U;9ehv= zKg4K2*-&v1H|pEpPz-g495$?KGPk2WJ-vqyA0GHPMUY|4;s-@qOu*J6GP)=x3 zu8ok$3CXfQDj}7>5l|r|TDFDq8^m#Shpd>)2fgDu^sIw)N=k>foBzTYlI!m729F|X z04JydmE>q2Z7?P+51^tpjE0j%h0tvNL1P%)OyeKuA>I)U;zX?Di3=`=$Ev|e8f%ln zA=4$nX`Gdq-@q{aATIKEYeA&s1Myga5xqZ=YJ$V24C8OWjl$u<2@vTJ!_xa2u)(WL z!3#oJ_(foz#Hpws?KcRdz>A8l@5PfH2%Lomi2|6mDNFLsojXCeJQ%hz(QXBl7PIpK zehAd2K4u`9#@!h3Jc<+`z_9?A4*%eJqb@0oZ^#{qZjJwl*$pKTpqz@MBUawpM5_C> zo<|9uawQp`G;qiqPWf5z05w~FszLDZb+IUwL10QVY3IJ+D8(NkHek+w}L)&7hSS*U4`qZa5)4`0|{?vB< za2Hk*Z*+c&Z^4>0!T81Q2oz>a79S88wgFzcfNb*TA;%hx8@3Ft=Qeudi>zO|O?F(0 zjdIg_uf%$@X3x?Tjqd}hs*=+A```Z_2av98Hv7;+52aEmPI(lB30`%J(6EG=_lt?K zasd!fUVxV_0?Mlctym>WVmuySeCFb=t}g!E1Ja@Ex+i)xxsd}h)85{W6P8Jnzaeq} z4*8bKc07s~AV7cs#}w?vK&id0U1qP5RM)dH8GJngyoS>)EiLdu816X)_=B0o;71uB z@XOQy{0IR81Q;YZzVGnx!;K8$k^aCe-Z6upo+vamHEEjW=TrmnH^2D}{jeF2$bReA zts6IPOeT}quTF$v1*7SaivK;}hJYyk-mwOi#^0rqL;D;#FBYUx9KT|Z6^-ltDDl`# z{Qxg4#xxzH_Caj}K+4B!#CmKQ;aZUB`FzlysecECQ3Dzq z_zFpZ=4{KEK6B>$x$`typtXP1DMcpw>ZiQ2QC1Z-l}hd2y?gK8z3_p$htPah3kpg9 zjlM!g@Elqs-aj~|pqZU?ACl~CQKpx6sD#6|8tJ+IZUb;#hY}tKK>zuB4PdPIHv_zo zI9^?eAx}$j9G{zF^cX;`OP5I*HvO{3_x&wU8?kztYQ{#*Px+5nFnx=Ml zPw(pL;;+w!!(o(%2tp7+Wwig>-~JYd$knkO;xxx8l}aGKo}Qiq2M%Pj*=hy&%xS*g z2*z|}8xyEV9#I}@NL$u|BjiT&$o<_>fV7TZvBv~d%Nff{h$FW^fEN+RxhoO$xO6

    W{J@ql5ZUYyI%uRkUX@FbPL*Vi8mg*7zk@LgrkmP5T@UFnNln{~ zMx!Vj`>r@?Z=|~Ul;_dQBCu_X?7d&Ww(UZp07Z{##%Tq8F(!5CxQSr3fl#n=x$N51 z+{24pWEpJHFrq-w-NF3Qvv1_FO zGzD@5uMHLrdS#d?WS}ff4WmQ|gThD(&Cx>%0ce8Qm#@6?N*wSc5C;Me-4D@l2+!Ib zG9||%Ry^*gs10@$QAS2JB&v7J6F=S;hBi=2Nj8pafuhBXn#=v`k;mc<}Z z=KdoAQr|$Z+5$IPcrq_RdSjMHzCSn%~P?ah?zaZpnW= zMME(}nJtAvKvn25xS|l{kztFi>(aV3ln@afxDGlU`>C)Vb`VJtWMA=X0An?5QQ`)l zmZAua1OuuesU=YG24e9~ERP2?UQJ;9JZKIu2oTgU+^ZT}Ka!T$kf2qQ#ZN<7Kr)%S zGzvgNm0SE2qwbzEiZ(bacuFWkL&YW}Gmm;lIMf zr>7_?S;XRRe*IgV1`#?AQ5h}#V$=8Gs%0gHLqHG{ttv>WZdLhXMlgk`15WvSwBHaz z5}j6Z92F=4v#}csJx1;r6KZQZDiD-j%4xjV&haYt_yOxme}7bJ=K=`S&taBT5I8~oJd&Vd|2G5$VN+=d9m zfE;*8B!%pWm}=q<2bSh2VQNv$<(OkkB|#HFxJlD&5J;RRHu^^|NlBRuLg~|D`ED6$*YfLuCU3LH1fs~tLo+?X|1~}Uv*g<@9@=#fhse(0u>#M*IdA>g7 zh3SJN0M4>s6oLG~cL*l_<&S^iF+p=Y1XuSEf-2#8mTmi)!K;!LVvZn=bBk?L_S z_8{GDxR*)BRFay&^;O`9wB^GCMs8Rqs%0z*y7OOEpl|RpE$`2N{0mqHUWGy)V~=Q( zXow6=>_dy86{aeA@KZbkjv>Y?vwSNtn1(f=gmv)GQF$8%BMvx3!S3i1(@2=3!d%jaE!H%V;CpL5yyvoYuVv=qz@dAWLcU#VQdGbKU9Vw4x*S*QAw^g*}5#) uq3^@?J&An^od-iM_>mR%-USXM>Hh~r{m-MFnUAyp00000CcFtw*kzJigRgvA3pETc)dQKUq0jY{=7Cb(Pd`fVW6R*Vb<4!ST{n`SS*U6 z$>6EDK(+AO0sh=p{YndAh*axE3wEShPzOYrczDks>t$%QYH((zYFs;K`$_mfTCY=& zaxe0~Mb?a1M()4N3I$w=w_cJGz$Cux2qAU%V^<><;wqBCFn~b*L?2n?JpN(s{|gU< zs_845{>VO<1u0y8B^;}fN%ah6l`{nhh@<~{AzGGE8VSC26?7F=664dOj85=9JS%ip zJ0W;H`P=pm5V3a%m|wag;NogFpo#gHq1gn7){%Bs&4I+;eAeI=*&y>^(xRdHiw*IBI2L0bOZ z2OOCot`G|F7Bgb;IsB(a!p}WU=_QLyWHhK&Z4VGKIhsw`Bvt)V$*JrF&o7@y)*0up z1@QY8ie>UDN5Ji$LDShqkfg}?wUUa{e$1;6jnnYw!q|p>ihbVLs|T(Dmou3xlyz(b z8<8k_L5Kv?0!ws%+K^96^XK$CBYCGUzYhIQ*3EYXIlbRJl7@53`dR+@|feElY8x2i9$>B3QcmJ3*F^^5MaY|$*+6ve}FstCnNkKP<+4d0+@ zr|iw6)U$FOgTXG!<|8~6u3nrqBmzM89el!Q#nRo~$}baFSu~xL=*wKdS)yi9C;$GY zJEr|_tK1TG%F!b@9Jiuwh`C-*v)rBIfog-7R(@R`btsGE#s?4o2oT2I|7ndR!q z`>shaMuM|*#?c{bF;a5*c@7)fn9sY z5wn1^EY{CCwY^a`cRa|PyNIj-&prJ0kF-ePuF^FXLb^~gd9(_y*pYI1#wR2GFvb(B zD2HqjF%)Ul{4x^A`wP`Zeu|NhGf*+nNq?!wmE>Y@(;?+8+4-P{?1f2WzrwtA`aKji z#&4=j>Q=CW-j}Do_%s*sBe1hsIy6!zBaxmagoI&blio*M5fFa#^#6pPHMzm(HJJYQ zH4twySa-h>T)V*QvrSjMy}fOlQ0hw*iZ8j1IVY6D+e-G`v)1q+6jyVU*l1#+`RpI# zty!3*1Y8y^Ezy)ensE^koRuEdo3OK)uc3GRmza8TG|IlChDOTUvZ6W zzF(6yS@8Zq?1t(9n$F*_`0h&;?+wjZp$5UF^3D#w{hG14$EI-ota%1^v>y`w=+U>S zBO$E61czL3WL+p#_hh@}?3dG#)%dmtI$-_nUwey7)-o0&A|3=xBBb&V1sM(4layM} zMg?#83HVcb6Ey!lf-YLBw@mh-h1yv@Adhd*(a#bph!p9Nt}tS04F3&TEhwsZ>iF(H z@hU;0!PeR~OfwDSzQ~_(!(eV^x&dsZVLjoPl9#MlFIoZaCZ2SN**+Ya$`|$hvv6x^ z$e8gs;oh~IbjQa$!$5xYYogCw@xQwuYDAFjK_VCGQifGr%F)*4nWm`iyHdIL)oPo1 zmni!5Z+{TnNs){Gxm_SW;;s~{smXBt;HLZnQRD4NfC_H>qEp-N0x{km&e1-?+C8R# zk!;lnA$-M2xnG>Dp;J4X$}{|lmByTBwb74#O-1xlW7w7m{&RZ1|2Wv?g*{>d-6~!) z;FG7TXlcW`1w%&ndr~Mvb>GSy9NLm6iZ0m|Zl>mGa@9!;0DBteC5z-9Mu?Xe%~;_u z(&^r&KUSMFGZMpX!zLx`P4GcW!(}CSS{0|VhrzQ{wLP&=6A;iPP>H>3)>M6keX}SF zDEcpUV#40vCa2;1dp@=2=RG5CQr@6!>4wK0J;$Qj8&;)HckXDM^{92Bt#%u+XU*f} zEoYa2i<#X`?Tux7T)5;k*5(mB?{e`GExIU51~RLp7CW6?O3iI1eP#@XxxNeWxtRCb zetoJ2w8r)>R&9-d45e3T@Q~IxRGa~&^}P?B;yhzF z34h-#9~P6Kjkr^UW2fj0T$@Q9wDo)GKx)q7)ACb#%-o1ym*4BI4vJKhZg^e^VXuG} zP~^7%SM8{5dMc{AzQSuVvxf_DsKbab6R^!2M{cTsl5Gl zRnLG`PGi(s>nP`q!S1Ls+!%r~t3wCBK|N)AhgXjf80Mf^Jmz1aSb?rj_hI(2E}z8Z0}MV8yD8__PAl zJQBiIU)(9>q&noq#3o5Lc*$_K2h{U*Oa7tLaUPo!xt093t24EAD1h$6^AF4k77Ja` z4@qM2CN8YlOe|~vQ|ErBLgVOUcmqkNap~>ZlQu^NG5K6oH*#Uj$%9kwaf8C^r>eX7 z>?|#n>ueVKB-g6#4CPF=r?q8G<~eFGS?`K%VXKh=zGst5TFT3^{qqocU8w253@A%*&m zyg+1H$5f8@4PWsH%=SGY3q9_jW&I4L5Bs*H^~GeE!Iy>}`~Z2DgdD^ekdF`6zzYd# zqYh%$@}xpGrkdL;3N!G2fG;W=K1_h92f<3+SjSN(}R0A zAZfjtc#p%|WF@rNH~{n=pe_IYf-1f#jKi{49>Fr$9UVYgFDEwLFbMx-Rjr+F=z6U5 z@pC=>6XR>sPA?eIe)iroi?jK|m(peN>8Qved1h}wstoQ;hS?@S5N_UTl4pagE38Wc zUKtg_`Ef}$JIZcIPYeJRkewxZUq98R{pQ683zk_}rVvv~B|i(*ViU#;gY(c63_DcH!inc_wHK|1@*jKc0BrI)hM`=`m8n8uA0S_wyo-< z)1Q6DEFTZcBj9MoTRFgw;2$6 zKU6)SuhXWiZ1w#>!}+nCGGKMLoAIgU$(Om^yk8tNBC#6YR~&A4lNTafr=aqB^MZ_O zra>Ne3SxFBDSZ5%t)}05`n&y)!jj(LI-#ouTo3PzwyIAxo?h;WMFkyh8!qX?P2OoZ zEpU4A90|q#IC)iOz*m9)tmpG#yI*D_6V@QE#`&>OZev+YkyO|5jnB)WFNJKb@^#-} zQJlkc4a^g2V1&jEs%?m*%DGy|!yJ!3zWH=ZQ1{J0wPQ2ffK1u5Wv!aWLItv?4%ZhFVF`oi8OjR`@CY1$PhCU*mWxbQbVr^!ZLF7~Z z@NJC1X3(!Me*5L>ow3qgvOTU4WuaT#(UfUINz2?0gVlsUYe*JMWK1sJ+0|xav@U0K zBFzoFxL%ZdstStpssS&z&)z1*Sr##zL5wvtk-Ne@c;(0V>utx26V4wuO{0ccJU90x z+&Blv=ZFoRA+*NI>UR}(Rf8w0I6Z9;ZR-%vie_Zz-T9s_B1nP|?7rc&@O@YLXZQIY zB&nL(&*0`u%l$f^b@W$;(ofm2WIhSv$u3E7K86^c8JKw1IOR6|3@ZNe73ie5^s4?R z`dBfC0>jjt@ZmZUsG_T0%7*Um6qQ#(R=ce^Z1B*cnH0JiD$qS3iV^2=>;BbE6(b`f zK?%Cj`68aFodIBmj3k{RX{%rN;Vo)@^uzJXJhdxicg=# z@rN5#`pgo?s=Xcl%z7kx*83qN&hB5jLhHJ%ri<*j6xI9NTyfaQGt%-$w*s`olyOd= z%N2Xit9}BIV2fDn0kY?x^T?ZzmUlD|wS@<4?a8LDV}^PtL# Date: Mon, 25 Jul 2022 12:28:43 +0200 Subject: [PATCH 0097/2550] nuke: refactory Missing frames validator --- .../publish/help/validate_rendered_frames.xml | 17 +++++++ .../publish/validate_rendered_frames.py | 48 ++++++++++++------- 2 files changed, 48 insertions(+), 17 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml new file mode 100644 index 0000000000..434081c269 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml @@ -0,0 +1,17 @@ + + + + Rendered Frames + +## Missing Rendered Frames + +Render node "{node_name}" is set to "Use existing frames", but frames are missing. + +### How to repair? + +1. Use Repair button. +2. Set different target. +2. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index af5e8e9d27..f8e128cd26 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.api import ValidationException import clique +from openpype.pipeline import PublishXmlValidationError @pyblish.api.log @@ -36,7 +36,7 @@ class RepairActionBase(pyblish.api.Action): class RepairCollectionActionToLocal(RepairActionBase): - label = "Repair > rerender with `Local` machine" + label = "Repair - rerender with \"Local\"" def process(self, context, plugin): instances = self.get_instance(context, plugin) @@ -44,7 +44,7 @@ class RepairCollectionActionToLocal(RepairActionBase): class RepairCollectionActionToFarm(RepairActionBase): - label = "Repair > rerender `On farm` with remote machines" + label = "Repair - rerender with \"On farm\"" def process(self, context, plugin): instances = self.get_instance(context, plugin) @@ -63,6 +63,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): def process(self, instance): + f_data = { + "node_name": instance[0]["name"].value() + } + for repre in instance.data["representations"]: if not repre.get("files"): @@ -71,7 +75,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): "Check properties of write node (group) and" "select 'Local' option in 'Publish' dropdown.") self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) if isinstance(repre["files"], str): return @@ -82,30 +87,33 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): collection = collections[0] - fstartH = instance.data["frameStartHandle"] - fendH = instance.data["frameEndHandle"] + f_start_h = instance.data["frameStartHandle"] + f_end_h = instance.data["frameEndHandle"] - frame_length = int(fendH - fstartH + 1) + frame_length = int(f_end_h - f_start_h + 1) if frame_length != 1: if len(collections) != 1: msg = "There are multiple collections in the folder" self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) if not collection.is_contiguous(): msg = "Some frames appear to be missing" self.log.error(msg) - raise ValidationException(msg) + raise PublishXmlValidationError( + self, msg, formatting_data=f_data) - collected_frames_len = int(len(collection.indexes)) + collected_frames_len = len(collection.indexes) coll_start = min(collection.indexes) coll_end = max(collection.indexes) self.log.info("frame_length: {}".format(frame_length)) self.log.info("collected_frames_len: {}".format( collected_frames_len)) - self.log.info("fstartH-fendH: {}-{}".format(fstartH, fendH)) + self.log.info("f_start_h-f_end_h: {}-{}".format( + f_start_h, f_end_h)) self.log.info( "coll_start-coll_end: {}-{}".format(coll_start, coll_end)) @@ -116,13 +124,19 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): if ("slate" in instance.data["families"]) \ and (frame_length != collected_frames_len): collected_frames_len -= 1 - fstartH += 1 + f_start_h += 1 - assert ((collected_frames_len >= frame_length) - and (coll_start <= fstartH) - and (coll_end >= fendH)), ( - "{} missing frames. Use repair to render all frames" - ).format(__name__) + if ( + collected_frames_len >= frame_length + and coll_start <= f_start_h + and coll_end >= f_end_h + ): + raise PublishXmlValidationError( + self, ( + "{} missing frames. Use repair to " + "render all frames" + ).format(__name__), formatting_data=f_data + ) instance.data["collection"] = collection From d3e982ebcf757a07d731760734ac51315454449b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 16:49:11 +0200 Subject: [PATCH 0098/2550] nuke: [wip] validate script attributes --- .../help/validate_script_attributes.xml | 18 ++ .../nuke/plugins/publish/validate_script.py | 156 ------------------ .../publish/validate_script_attributes.py | 129 +++++++++++++++ .../plugins/publish/validate_write_nodes.py | 2 - 4 files changed, 147 insertions(+), 158 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml delete mode 100644 openpype/hosts/nuke/plugins/publish/validate_script.py create mode 100644 openpype/hosts/nuke/plugins/publish/validate_script_attributes.py diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml new file mode 100644 index 0000000000..96f8ab5d38 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml @@ -0,0 +1,18 @@ + + + + Script attributes + +## Invalid Script attributes + +Following script root attributes need to be fixed: +{missing_attributes} + +### How to repair? + +1. Either use Repair or Select button. +2. If you chose Select then rename asset knob to correct name. +3. Hit Reload button on the publisher. + + + \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py deleted file mode 100644 index b8d7494b9d..0000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_script.py +++ /dev/null @@ -1,156 +0,0 @@ -import pyblish.api - -from openpype.client import get_project, get_asset_by_id, get_asset_by_name -from openpype.pipeline import legacy_io - - -@pyblish.api.log -class ValidateScript(pyblish.api.InstancePlugin): - """ Validates file output. """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["workfile"] - label = "Check script settings" - hosts = ["nuke"] - optional = True - - def process(self, instance): - ctx_data = instance.context.data - project_name = legacy_io.active_project() - asset_name = ctx_data["asset"] - # TODO repace query with using 'instance.data["assetEntity"]' - asset = get_asset_by_name(project_name, asset_name) - asset_data = asset["data"] - - # These attributes will be checked - attributes = [ - "fps", - "frameStart", - "frameEnd", - "resolutionWidth", - "resolutionHeight", - "handleStart", - "handleEnd" - ] - - # Value of these attributes can be found on parents - hierarchical_attributes = [ - "fps", - "resolutionWidth", - "resolutionHeight", - "pixelAspect", - "handleStart", - "handleEnd" - ] - - missing_attributes = [] - asset_attributes = {} - for attr in attributes: - if attr in asset_data: - asset_attributes[attr] = asset_data[attr] - - elif attr in hierarchical_attributes: - # TODO this should be probably removed - # Hierarchical attributes is not a thing since Pype 2? - - # Try to find attribute on parent - parent_id = asset['parent'] - parent_type = "project" - if asset_data['visualParent'] is not None: - parent_type = "asset" - parent_id = asset_data['visualParent'] - - value = self.check_parent_hierarchical( - project_name, parent_type, parent_id, attr - ) - if value is None: - missing_attributes.append(attr) - else: - asset_attributes[attr] = value - else: - missing_attributes.append(attr) - - # Raise error if attributes weren't found on asset in database - if len(missing_attributes) > 0: - atr = ", ".join(missing_attributes) - msg = 'Missing attributes "{}" in asset "{}"' - message = msg.format(atr, asset_name) - raise ValueError(message) - - # Get handles from database, Default is 0 (if not found) - handle_start = 0 - handle_end = 0 - if "handleStart" in asset_attributes: - handle_start = asset_attributes["handleStart"] - if "handleEnd" in asset_attributes: - handle_end = asset_attributes["handleEnd"] - - asset_attributes["fps"] = float("{0:.4f}".format( - asset_attributes["fps"])) - - # Get values from nukescript - script_attributes = { - "handleStart": ctx_data["handleStart"], - "handleEnd": ctx_data["handleEnd"], - "fps": float("{0:.4f}".format(ctx_data["fps"])), - "frameStart": ctx_data["frameStart"], - "frameEnd": ctx_data["frameEnd"], - "resolutionWidth": ctx_data["resolutionWidth"], - "resolutionHeight": ctx_data["resolutionHeight"], - "pixelAspect": ctx_data["pixelAspect"] - } - - # Compare asset's values Nukescript X Database - not_matching = [] - for attr in attributes: - self.log.debug("asset vs script attribute \"{}\": {}, {}".format( - attr, asset_attributes[attr], script_attributes[attr]) - ) - if asset_attributes[attr] != script_attributes[attr]: - not_matching.append(attr) - - # Raise error if not matching - if len(not_matching) > 0: - msg = "Attributes '{}' are not set correctly" - # Alert user that handles are set if Frame start/end not match - if ( - (("frameStart" in not_matching) or ("frameEnd" in not_matching)) and - ((handle_start > 0) or (handle_end > 0)) - ): - msg += " (`handle_start` are set to {})".format(handle_start) - msg += " (`handle_end` are set to {})".format(handle_end) - message = msg.format(", ".join(not_matching)) - raise ValueError(message) - - def check_parent_hierarchical( - self, project_name, parent_type, parent_id, attr - ): - if parent_id is None: - return None - - doc = None - if parent_type == "project": - doc = get_project(project_name) - elif parent_type == "asset": - doc = get_asset_by_id(project_name, parent_id) - - if not doc: - return None - - doc_data = doc["data"] - if attr in doc_data: - self.log.info(attr) - return doc_data[attr] - - if parent_type == "project": - return None - - parent_id = doc_data.get("visualParent") - new_parent_type = "asset" - if parent_id is None: - parent_id = doc["parent"] - new_parent_type = "project" - - return self.check_parent_hierarchical( - project_name, new_parent_type, parent_id, attr - ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py new file mode 100644 index 0000000000..2411c7fe4e --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -0,0 +1,129 @@ +from pprint import pformat +import pyblish.api + +from openpype.client import get_project, get_asset_by_id, get_asset_by_name +from openpype.pipeline import legacy_io +from openpype.pipeline import PublishXmlValidationError +import nuke + + +@pyblish.api.log +class ValidateScriptAttributes(pyblish.api.InstancePlugin): + """ Validates file output. """ + + order = pyblish.api.ValidatorOrder + 0.1 + families = ["workfile"] + label = "Validatte script attributes" + hosts = ["nuke"] + optional = True + + def process(self, instance): + ctx_data = instance.context.data + project_name = legacy_io.active_project() + asset_name = ctx_data["asset"] + asset = get_asset_by_name(project_name, asset_name) + asset_data = asset["data"] + + # These attributes will be checked + attributes = [ + "fps", + "frameStart", + "frameEnd", + "resolutionWidth", + "resolutionHeight", + "handleStart", + "handleEnd" + ] + + asset_attributes = { + attr: asset_data[attr] + for attr in attributes + if attr in asset_data + } + + self.log.debug(pformat( + asset_attributes + )) + + handle_start = asset_attributes["handleStart"] + handle_end = asset_attributes["handleEnd"] + asset_attributes["fps"] = float("{0:.4f}".format( + asset_attributes["fps"])) + + root = nuke.root() + # Get values from nukescript + script_attributes = { + "handleStart": ctx_data["handleStart"], + "handleEnd": ctx_data["handleEnd"], + "fps": float("{0:.4f}".format(ctx_data["fps"])), + "frameStart": int(root["first_frame"].getValue()), + "frameEnd": int(root["last_frame"].getValue()), + "resolutionWidth": ctx_data["resolutionWidth"], + "resolutionHeight": ctx_data["resolutionHeight"], + "pixelAspect": ctx_data["pixelAspect"] + } + self.log.debug(pformat( + script_attributes + )) + # Compare asset's values Nukescript X Database + not_matching = [] + for attr in attributes: + self.log.debug( + "Asset vs Script attribute \"{}\": {}, {}".format( + attr, + asset_attributes[attr], + script_attributes[attr] + ) + ) + if asset_attributes[attr] != script_attributes[attr]: + not_matching.append({ + "name": attr, + "expected": asset_attributes[attr], + "actual": script_attributes[attr] + }) + + # Raise error if not matching + if not_matching: + msg = "Attributes '{}' are not set correctly" + # Alert user that handles are set if Frame start/end not match + message = msg.format(", ".join( + [at["name"] for at in not_matching])) + raise PublishXmlValidationError( + self, message, + formatting_data={ + "missing_attributes": not_matching + } + ) + + def check_parent_hierarchical( + self, project_name, parent_type, parent_id, attr + ): + if parent_id is None: + return None + + doc = None + if parent_type == "project": + doc = get_project(project_name) + elif parent_type == "asset": + doc = get_asset_by_id(project_name, parent_id) + + if not doc: + return None + + doc_data = doc["data"] + if attr in doc_data: + self.log.info(attr) + return doc_data[attr] + + if parent_type == "project": + return None + + parent_id = doc_data.get("visualParent") + new_parent_type = "asset" + if parent_id is None: + parent_id = doc["parent"] + new_parent_type = "project" + + return self.check_parent_hierarchical( + project_name, new_parent_type, parent_id, attr + ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index f0a7f01dfb..48dce623a9 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,10 +1,8 @@ import pyblish.api from openpype.api import get_errored_instances_from_context -import openpype.hosts.nuke.api.lib as nlib from openpype.hosts.nuke.api.lib import ( get_write_node_template_attr, set_node_knobs_from_settings - ) from openpype.pipeline import PublishXmlValidationError From b4b4725b5790b5ffee3b258375ef1bbfe7d17d45 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 17:57:13 +0200 Subject: [PATCH 0099/2550] nuke: getting testing values from script dirrectly --- .../publish/validate_script_attributes.py | 33 +++++++++++++------ 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index 2411c7fe4e..d9b9a35ece 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -4,6 +4,9 @@ import pyblish.api from openpype.client import get_project, get_asset_by_id, get_asset_by_name from openpype.pipeline import legacy_io from openpype.pipeline import PublishXmlValidationError +from openpype.hosts.nuke.api.lib import ( + get_avalon_knob_data +) import nuke @@ -45,22 +48,32 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): asset_attributes )) - handle_start = asset_attributes["handleStart"] - handle_end = asset_attributes["handleEnd"] asset_attributes["fps"] = float("{0:.4f}".format( asset_attributes["fps"])) root = nuke.root() + knob_data = get_avalon_knob_data(root) + + # Get frame range + first_frame = int(root["first_frame"].getValue()) + last_frame = int(root["last_frame"].getValue()) + + handle_start = int(knob_data["handleStart"]) + handle_end = int(knob_data["handleEnd"]) + + # Get format + _format = root["format"].value() + # Get values from nukescript script_attributes = { - "handleStart": ctx_data["handleStart"], - "handleEnd": ctx_data["handleEnd"], - "fps": float("{0:.4f}".format(ctx_data["fps"])), - "frameStart": int(root["first_frame"].getValue()), - "frameEnd": int(root["last_frame"].getValue()), - "resolutionWidth": ctx_data["resolutionWidth"], - "resolutionHeight": ctx_data["resolutionHeight"], - "pixelAspect": ctx_data["pixelAspect"] + "handleStart": handle_start, + "handleEnd": handle_end, + "fps": float("{0:.4f}".format(root['fps'].value())), + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "resolutionWidth": _format.width(), + "resolutionHeight": _format.height(), + "pixelAspect": _format.pixelAspect() } self.log.debug(pformat( script_attributes From 62a7b1f713e636d9ffbf66de6d33d9ea0c6c32a0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 18:04:01 +0200 Subject: [PATCH 0100/2550] nuke: adding repair action to script attribute validator --- .../publish/validate_script_attributes.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index d9b9a35ece..605145149d 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -4,12 +4,27 @@ import pyblish.api from openpype.client import get_project, get_asset_by_id, get_asset_by_name from openpype.pipeline import legacy_io from openpype.pipeline import PublishXmlValidationError +from openpype.api import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( - get_avalon_knob_data + get_avalon_knob_data, + WorkfileSettings ) import nuke +@pyblish.api.log +class RepairScriptAttributes(pyblish.api.Action): + label = "Repair" + on = "failed" + icon = "wrench" + + def process(self, context, plugin): + instances = get_errored_instances_from_context(context) + + self.log.debug(instances) + WorkfileSettings().set_context_settings() + + @pyblish.api.log class ValidateScriptAttributes(pyblish.api.InstancePlugin): """ Validates file output. """ @@ -19,6 +34,7 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): label = "Validatte script attributes" hosts = ["nuke"] optional = True + actions = [RepairScriptAttributes] def process(self, instance): ctx_data = instance.context.data From d4f96ae720c258c7ec6895d5398ee2a0c3e96812 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:30:23 +0200 Subject: [PATCH 0101/2550] change order of some collectors --- openpype/plugins/publish/collect_datetime_data.py | 2 +- openpype/plugins/publish/collect_machine_name.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_datetime_data.py b/openpype/plugins/publish/collect_datetime_data.py index 1675ae1a98..0d21490d8d 100644 --- a/openpype/plugins/publish/collect_datetime_data.py +++ b/openpype/plugins/publish/collect_datetime_data.py @@ -9,7 +9,7 @@ from openpype.api import config class CollectDateTimeData(pyblish.api.ContextPlugin): - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 label = "Collect DateTime data" def process(self, context): diff --git a/openpype/plugins/publish/collect_machine_name.py b/openpype/plugins/publish/collect_machine_name.py index 72ef68f8ed..8c25966031 100644 --- a/openpype/plugins/publish/collect_machine_name.py +++ b/openpype/plugins/publish/collect_machine_name.py @@ -11,7 +11,7 @@ import pyblish.api class CollectMachineName(pyblish.api.ContextPlugin): label = "Local Machine Name" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 hosts = ["*"] def process(self, context): From 0b88bc1fcd689d8096fe294e48951b2663d49aa9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:31:50 +0200 Subject: [PATCH 0102/2550] added collector to stored current context into publish context data --- .../publish/collect_current_context.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 openpype/plugins/publish/collect_current_context.py diff --git a/openpype/plugins/publish/collect_current_context.py b/openpype/plugins/publish/collect_current_context.py new file mode 100644 index 0000000000..ebcbc6a4aa --- /dev/null +++ b/openpype/plugins/publish/collect_current_context.py @@ -0,0 +1,44 @@ +""" +Provides: + context -> projectName (str) + context -> asset (str) + context -> task (str) +""" + +import pyblish.api +from openpype.pipeline import legacy_io + + +class CollectCurrentContext(pyblish.api.ContextPlugin): + """Collect project context into publish context data. + + Plugin does not override any value if is already set. + """ + + order = pyblish.api.CollectorOrder - 0.5 + label = "Collect Current context" + + def process(self, context): + # Set project name in context data + project_name = context.data.get("projectName") + asset_name = context.data.get("asset") + task_name = context.data.get("task") + if not project_name: + project_name = legacy_io.current_project() + context.data["projectName"] = project_name + + if not asset_name: + asset_name = legacy_io.Session.get("AVALON_ASSET") + context.data["asset"] = asset_name + + if not task_name: + task_name = legacy_io.Session.get("AVALON_TASK") + context.data["task"] = task_name + + # QUESTION should we be explicit with keys? (the same on instances) + # - 'asset' -> 'assetName' + # - 'task' -> 'taskName' + + self.log.info(( + "Collected project context\nProject: {}\nAsset: {}\nTask: {}" + ).format(project_name, asset_name, task_name)) From 477acd1d5ef55d71117d89b467831347b449989e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:32:05 +0200 Subject: [PATCH 0103/2550] create context plugin makes sure that project name is set --- openpype/plugins/publish/collect_from_create_context.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index d2be633cbe..78bd821bfb 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -19,6 +19,9 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): if not create_context: return + project_name = create_context.project_name + if project_name: + context.data["projectName"] = project_name for created_instance in create_context.instances: instance_data = created_instance.data_to_store() if instance_data["active"]: From 9ce6ea6f363eb24ef79c730a671c119b18ee92c3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 18:57:23 +0200 Subject: [PATCH 0104/2550] make sure legacy io is installed --- openpype/plugins/publish/collect_current_context.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_current_context.py b/openpype/plugins/publish/collect_current_context.py index ebcbc6a4aa..7e42700d7d 100644 --- a/openpype/plugins/publish/collect_current_context.py +++ b/openpype/plugins/publish/collect_current_context.py @@ -19,7 +19,10 @@ class CollectCurrentContext(pyblish.api.ContextPlugin): label = "Collect Current context" def process(self, context): - # Set project name in context data + # Make sure 'legacy_io' is intalled + legacy_io.install() + + # Check if values are already set project_name = context.data.get("projectName") asset_name = context.data.get("asset") task_name = context.data.get("task") From d585ae526cf1d9306091f242c039e2efa5b29d00 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 19:20:03 +0200 Subject: [PATCH 0105/2550] get project name from 'context.data["projectName"]' or 'anatomy.project_name' at obvious places --- .../submit_maya_remote_publish_deadline.py | 12 +++++------- .../plugins/publish/collect_anatomy_object.py | 11 +++++++---- .../plugins/publish/collect_avalon_entities.py | 12 +++++++----- openpype/plugins/publish/collect_hierarchy.py | 4 +--- .../plugins/publish/collect_rendered_files.py | 16 +++++----------- .../plugins/publish/collect_resources_path.py | 6 +----- .../plugins/publish/integrate_hero_version.py | 6 ++---- openpype/plugins/publish/integrate_thumbnail.py | 3 +-- 8 files changed, 29 insertions(+), 41 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 57572fcb24..6e53099162 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -5,7 +5,6 @@ from maya import cmds from openpype.pipeline import legacy_io, PublishXmlValidationError from openpype.settings import get_project_settings -import openpype.api import pyblish.api @@ -34,7 +33,9 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): targets = ["local"] def process(self, instance): - settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_name = instance.context.data["projectName"] + # TODO settings can be received from 'context.data["project_settings"]' + settings = get_project_settings(project_name) # use setting for publish job on farm, no reason to have it separately deadline_publish_job_sett = (settings["deadline"] ["publish"] @@ -53,9 +54,6 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): scene = instance.context.data["currentFile"] scenename = os.path.basename(scene) - # Get project code - project_name = legacy_io.Session["AVALON_PROJECT"] - job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=project_name, scene=scenename) @@ -107,8 +105,8 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO replace legacy_io with context.data ? - environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"] + # TODO replace legacy_io with context.data + environment["AVALON_PROJECT"] = project_name environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") diff --git a/openpype/plugins/publish/collect_anatomy_object.py b/openpype/plugins/publish/collect_anatomy_object.py index b1415098b6..8128221925 100644 --- a/openpype/plugins/publish/collect_anatomy_object.py +++ b/openpype/plugins/publish/collect_anatomy_object.py @@ -1,24 +1,27 @@ """Collect Anatomy object. Requires: - os.environ -> AVALON_PROJECT + context -> projectName Provides: context -> anatomy (openpype.pipeline.anatomy.Anatomy) """ -import os + import pyblish.api from openpype.pipeline import Anatomy class CollectAnatomyObject(pyblish.api.ContextPlugin): - """Collect Anatomy object into Context""" + """Collect Anatomy object into Context. + + Order offset could be changed to '-0.45'. + """ order = pyblish.api.CollectorOrder - 0.4 label = "Collect Anatomy Object" def process(self, context): - project_name = os.environ.get("AVALON_PROJECT") + project_name = context.data.get("projectName") if project_name is None: raise AssertionError( "Environment `AVALON_PROJECT` is not set." diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py index 6cd0d136e8..0a7afc086f 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_avalon_entities.py @@ -1,11 +1,13 @@ """Collect Anatomy and global anatomy data. Requires: - session -> AVALON_PROJECT, AVALON_ASSET + session -> AVALON_ASSET + context -> projectName Provides: - context -> projectEntity - project entity from database - context -> assetEntity - asset entity from database + context -> projectEntity - Project document from database. + context -> assetEntity - Asset document from database only if 'asset' is + set in context. """ import pyblish.api @@ -15,14 +17,14 @@ from openpype.pipeline import legacy_io class CollectAvalonEntities(pyblish.api.ContextPlugin): - """Collect Anatomy into Context""" + """Collect Anatomy into Context.""" order = pyblish.api.CollectorOrder - 0.1 label = "Collect Avalon Entities" def process(self, context): legacy_io.install() - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = context.data["projectName"] asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index 91d5162d62..687397be8a 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -1,7 +1,5 @@ import pyblish.api -from openpype.pipeline import legacy_io - class CollectHierarchy(pyblish.api.ContextPlugin): """Collecting hierarchy from `parents`. @@ -20,7 +18,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): def process(self, context): temp_context = {} - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = context.data["projectName"] final_context = {} final_context[project_name] = {} final_context[project_name]['entity_type'] = 'Project' diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index 670e57ed10..8c5d591148 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -1,7 +1,7 @@ """Loads publishing context from json and continues in publish process. Requires: - anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.4) Provides: context, instances -> All data from previous publishing process. @@ -21,6 +21,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. """ + order = pyblish.api.CollectorOrder - 0.2 # Keep "filesequence" for backwards compatibility of older jobs targets = ["filesequence", "farm"] @@ -122,19 +123,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): "Missing `OPENPYPE_PUBLISH_DATA`") paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep) - project_name = os.environ.get("AVALON_PROJECT") - if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` was not found." - "Could not set project `root` which may cause issues." - ) - - # TODO root filling should happen after collect Anatomy + # Using already collected Anatomy + anatomy = context.data["anatomy"] self.log.info("Getting root setting for project \"{}\"".format( - project_name + anatomy.project_name )) - anatomy = context.data["anatomy"] self.log.info("anatomy: {}".format(anatomy.roots)) try: session_is_set = False diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 8bdf70b529..00f65b8b67 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -13,8 +13,6 @@ import copy import pyblish.api -from openpype.pipeline import legacy_io - class CollectResourcesPath(pyblish.api.InstancePlugin): """Generate directory path where the files and resources will be stored""" @@ -58,7 +56,6 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "effect", "staticMesh", "skeletalMesh" - ] def process(self, instance): @@ -86,11 +83,10 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): else: # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) + ).format(anatomy.project_name)) file_path = anatomy_filled["publish"]["path"] # Directory diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 5f97a9bd41..735b7e50fa 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -71,7 +71,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): template_key = self._get_template_key(instance) anatomy = instance.context.data["anatomy"] - project_name = legacy_io.Session["AVALON_PROJECT"] + project_name = anatomy.project_name if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -454,7 +454,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) if bulk_writes: - project_name = legacy_io.Session["AVALON_PROJECT"] legacy_io.database[project_name].bulk_write( bulk_writes ) @@ -517,11 +516,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) + ).format(anatomy.project_name)) file_path = anatomy_filled[template_key]["path"] # Directory diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index fd50858a91..8ae0dd2d60 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -39,9 +39,8 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - project_name = legacy_io.Session["AVALON_PROJECT"] - anatomy = instance.context.data["anatomy"] + project_name = anatomy.project_name if "publish" not in anatomy.templates: self.log.warning("Anatomy is missing the \"publish\" key!") return From 2453892f3fe12f1eee9615f94ac5c88ab6414f94 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Jul 2022 19:21:09 +0200 Subject: [PATCH 0106/2550] raise KnownPublishError instead of AssertionError --- openpype/plugins/publish/collect_anatomy_object.py | 8 ++++---- .../plugins/publish/collect_avalon_entities.py | 9 +++++---- openpype/plugins/publish/collect_rendered_files.py | 14 +++++++++++--- 3 files changed, 20 insertions(+), 11 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_object.py b/openpype/plugins/publish/collect_anatomy_object.py index 8128221925..725cae2b14 100644 --- a/openpype/plugins/publish/collect_anatomy_object.py +++ b/openpype/plugins/publish/collect_anatomy_object.py @@ -8,7 +8,7 @@ Provides: """ import pyblish.api -from openpype.pipeline import Anatomy +from openpype.pipeline import Anatomy, KnownPublishError class CollectAnatomyObject(pyblish.api.ContextPlugin): @@ -23,10 +23,10 @@ class CollectAnatomyObject(pyblish.api.ContextPlugin): def process(self, context): project_name = context.data.get("projectName") if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` is not set." + raise KnownPublishError(( + "Project name is not set in 'projectName'." "Could not initialize project's Anatomy." - ) + )) context.data["anatomy"] = Anatomy(project_name) diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py index 0a7afc086f..3b05b6ae98 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_avalon_entities.py @@ -13,7 +13,7 @@ Provides: import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, KnownPublishError class CollectAvalonEntities(pyblish.api.ContextPlugin): @@ -29,9 +29,10 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin): task_name = legacy_io.Session["AVALON_TASK"] project_entity = get_project(project_name) - assert project_entity, ( - "Project '{0}' was not found." - ).format(project_name) + if not project_entity: + raise KnownPublishError( + "Project '{0}' was not found.".format(project_name) + ) self.log.debug("Collected Project \"{}\"".format(project_entity)) context.data["projectEntity"] = project_entity diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index 8c5d591148..8f8d0a5eeb 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -12,7 +12,7 @@ import json import pyblish.api -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, KnownPublishError class CollectRenderedFiles(pyblish.api.ContextPlugin): @@ -20,6 +20,10 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): This collector will try to find json files in provided `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. + Note: + We should split this collector and move the part which handle reading + of file and it's context from session data before collect anatomy + and instance creation dependent on anatomy can be done here. """ order = pyblish.api.CollectorOrder - 0.2 @@ -119,8 +123,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): def process(self, context): self._context = context - assert os.environ.get("OPENPYPE_PUBLISH_DATA"), ( - "Missing `OPENPYPE_PUBLISH_DATA`") + if not os.environ.get("OPENPYPE_PUBLISH_DATA"): + raise KnownPublishError("Missing `OPENPYPE_PUBLISH_DATA`") + + # QUESTION + # Do we support (or want support) multiple files in the variable? + # - what if they have different context? paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep) # Using already collected Anatomy From 907500c9e9354c47d945a1abd785f5520be26bc2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 21:26:13 +0200 Subject: [PATCH 0107/2550] nuke: validate script attributes finish --- .../help/validate_script_attributes.xml | 8 +- .../publish/validate_script_attributes.py | 107 ++++++++---------- 2 files changed, 49 insertions(+), 66 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml index 96f8ab5d38..871fc629ce 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml @@ -6,13 +6,13 @@ ## Invalid Script attributes Following script root attributes need to be fixed: -{missing_attributes} + +{failed_attributes} ### How to repair? -1. Either use Repair or Select button. -2. If you chose Select then rename asset knob to correct name. -3. Hit Reload button on the publisher. +1. Use Repair. +2. Hit Reload button on the publisher. \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index 605145149d..ef89d71c5b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -37,11 +37,18 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): actions = [RepairScriptAttributes] def process(self, instance): - ctx_data = instance.context.data + root = nuke.root() + knob_data = get_avalon_knob_data(root) project_name = legacy_io.active_project() - asset_name = ctx_data["asset"] - asset = get_asset_by_name(project_name, asset_name) - asset_data = asset["data"] + asset = get_asset_by_name( + project_name, + instance.context.data["asset"] + ) + # get asset data frame values + frame_start = asset["data"]["frameStart"] + frame_end = asset["data"]["frameEnd"] + handle_start = asset["data"]["handleStart"] + handle_end = asset["data"]["handleEnd"] # These attributes will be checked attributes = [ @@ -54,39 +61,36 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): "handleEnd" ] + # get only defined attributes from asset data asset_attributes = { - attr: asset_data[attr] + attr: asset["data"][attr] for attr in attributes - if attr in asset_data + if attr in asset["data"] } + # fix float to max 4 digints (only for evaluating) + fps_data = float("{0:.4f}".format( + asset_attributes["fps"])) + # fix frame values to include handles + asset_attributes.update({ + "frameStart": frame_start - handle_start, + "frameEnd": frame_end + handle_end, + "fps": fps_data + }) self.log.debug(pformat( asset_attributes )) - asset_attributes["fps"] = float("{0:.4f}".format( - asset_attributes["fps"])) - - root = nuke.root() - knob_data = get_avalon_knob_data(root) - - # Get frame range - first_frame = int(root["first_frame"].getValue()) - last_frame = int(root["last_frame"].getValue()) - - handle_start = int(knob_data["handleStart"]) - handle_end = int(knob_data["handleEnd"]) - # Get format _format = root["format"].value() # Get values from nukescript script_attributes = { - "handleStart": handle_start, - "handleEnd": handle_end, + "handleStart": int(knob_data["handleStart"]), + "handleEnd": int(knob_data["handleEnd"]), "fps": float("{0:.4f}".format(root['fps'].value())), - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, + "frameStart": int(root["first_frame"].getValue()), + "frameEnd": int(root["last_frame"].getValue()), "resolutionWidth": _format.width(), "resolutionHeight": _format.height(), "pixelAspect": _format.pixelAspect() @@ -94,6 +98,7 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): self.log.debug(pformat( script_attributes )) + # Compare asset's values Nukescript X Database not_matching = [] for attr in attributes: @@ -113,46 +118,24 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): # Raise error if not matching if not_matching: - msg = "Attributes '{}' are not set correctly" - # Alert user that handles are set if Frame start/end not match - message = msg.format(", ".join( - [at["name"] for at in not_matching])) + msg = "Following attributes are not set correctly: \n{}" + attrs_wrong_str = "\n".join([ + ( + "`{0}` is set to `{1}`, " + "but should be set to `{2}`" + ).format(at["name"], at["actual"], at["expected"]) + for at in not_matching + ]) + attrs_wrong_html = "
    ".join([ + ( + "-- __{0}__ is set to __{1}__, " + "but should be set to __{2}__" + ).format(at["name"], at["actual"], at["expected"]) + for at in not_matching + ]) raise PublishXmlValidationError( - self, message, + self, msg.format(attrs_wrong_str), formatting_data={ - "missing_attributes": not_matching + "failed_attributes": attrs_wrong_html } ) - - def check_parent_hierarchical( - self, project_name, parent_type, parent_id, attr - ): - if parent_id is None: - return None - - doc = None - if parent_type == "project": - doc = get_project(project_name) - elif parent_type == "asset": - doc = get_asset_by_id(project_name, parent_id) - - if not doc: - return None - - doc_data = doc["data"] - if attr in doc_data: - self.log.info(attr) - return doc_data[attr] - - if parent_type == "project": - return None - - parent_id = doc_data.get("visualParent") - new_parent_type = "asset" - if parent_id is None: - parent_id = doc["parent"] - new_parent_type = "project" - - return self.check_parent_hierarchical( - project_name, new_parent_type, parent_id, attr - ) From 06f338a95bf33644d84e365d01a3c4f6a68ac344 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Jul 2022 21:47:47 +0200 Subject: [PATCH 0108/2550] nuke: making validator code nicer --- .../publish/validate_script_attributes.py | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index ef89d71c5b..d16660f272 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -1,10 +1,10 @@ from pprint import pformat import pyblish.api -from openpype.client import get_project, get_asset_by_id, get_asset_by_name +import openpype.api +from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io from openpype.pipeline import PublishXmlValidationError -from openpype.api import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( get_avalon_knob_data, WorkfileSettings @@ -12,19 +12,6 @@ from openpype.hosts.nuke.api.lib import ( import nuke -@pyblish.api.log -class RepairScriptAttributes(pyblish.api.Action): - label = "Repair" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - instances = get_errored_instances_from_context(context) - - self.log.debug(instances) - WorkfileSettings().set_context_settings() - - @pyblish.api.log class ValidateScriptAttributes(pyblish.api.InstancePlugin): """ Validates file output. """ @@ -34,7 +21,7 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): label = "Validatte script attributes" hosts = ["nuke"] optional = True - actions = [RepairScriptAttributes] + actions = [openpype.api.RepairAction] def process(self, instance): root = nuke.root() @@ -139,3 +126,8 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): "failed_attributes": attrs_wrong_html } ) + + @classmethod + def repair(cls, instance): + cls.log.debug("__ repairing instance: {}".format(instance)) + WorkfileSettings().set_context_settings() From b5fb016331e3a86397f6337f44e4c885caf9cff1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:10:08 +0200 Subject: [PATCH 0109/2550] moved abstract template loader into openpype/pipeline/workfile --- openpype/{lib => pipeline/workfile}/abstract_template_loader.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/{lib => pipeline/workfile}/abstract_template_loader.py (100%) diff --git a/openpype/lib/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py similarity index 100% rename from openpype/lib/abstract_template_loader.py rename to openpype/pipeline/workfile/abstract_template_loader.py From b1f2831868001431ab5b949cf2a85729a9adfb04 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:12:04 +0200 Subject: [PATCH 0110/2550] moved 'get_loaders_by_name' to load utils --- openpype/lib/avalon_context.py | 15 --------------- openpype/pipeline/load/__init__.py | 2 ++ openpype/pipeline/load/utils.py | 14 ++++++++++++++ 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 316c8ad67e..86902cac56 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -943,21 +943,6 @@ def collect_last_version_repres(asset_entities): return output -@with_pipeline_io -def get_loaders_by_name(): - from openpype.pipeline import discover_loader_plugins - - loaders_by_name = {} - for loader in discover_loader_plugins(): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError( - "Duplicated loader name {} !".format(loader_name) - ) - loaders_by_name[loader_name] = loader - return loaders_by_name - - class BuildWorkfile: """Wrapper for build workfile process. diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index e46d9f152b..b6bdd13d50 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -16,6 +16,7 @@ from .utils import ( switch_container, get_loader_identifier, + get_loaders_by_name, get_representation_path_from_context, get_representation_path, @@ -61,6 +62,7 @@ __all__ = ( "switch_container", "get_loader_identifier", + "get_loaders_by_name", "get_representation_path_from_context", "get_representation_path", diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index fe5102353d..9945e1fce4 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -369,6 +369,20 @@ def get_loader_identifier(loader): return loader.__name__ +def get_loaders_by_name(): + from .plugins import discover_loader_plugins + + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {} !".format(loader_name) + ) + loaders_by_name[loader_name] = loader + return loaders_by_name + + def _get_container_loader(container): """Return the Loader corresponding to the container""" from .plugins import discover_loader_plugins From b2b6ffe0e4290840fc1ca1b5c98174f2bdfcbfaf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:13:56 +0200 Subject: [PATCH 0111/2550] updated 'collect_last_version_repres' with latest develop --- openpype/lib/avalon_context.py | 68 +++++++++++++++------------------- 1 file changed, 30 insertions(+), 38 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 86902cac56..4b552d13ed 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -847,7 +847,7 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): @with_pipeline_io -def collect_last_version_repres(asset_entities): +def collect_last_version_repres(asset_docs): """Collect subsets, versions and representations for asset_entities. Args: @@ -880,64 +880,56 @@ def collect_last_version_repres(asset_entities): ``` """ - if not asset_entities: - return {} + output = {} + if not asset_docs: + return output - asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} + asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} - subsets = list(legacy_io.find({ - "type": "subset", - "parent": {"$in": list(asset_entity_by_ids.keys())} - })) + project_name = legacy_io.active_project() + subsets = list(get_subsets( + project_name, asset_ids=asset_docs_by_ids.keys() + )) subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - sorted_versions = list(legacy_io.find({ - "type": "version", - "parent": {"$in": list(subset_entity_by_ids.keys())} - }).sort("name", -1)) + last_version_by_subset_id = get_last_versions( + project_name, subset_entity_by_ids.keys() + ) + last_version_docs_by_id = { + version["_id"]: version + for version in last_version_by_subset_id.values() + } + repre_docs = get_representations( + project_name, version_ids=last_version_docs_by_id.keys() + ) - subset_id_with_latest_version = [] - last_versions_by_id = {} - for version in sorted_versions: - subset_id = version["parent"] - if subset_id in subset_id_with_latest_version: - continue - subset_id_with_latest_version.append(subset_id) - last_versions_by_id[version["_id"]] = version + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + version_doc = last_version_docs_by_id[version_id] - repres = legacy_io.find({ - "type": "representation", - "parent": {"$in": list(last_versions_by_id.keys())} - }) + subset_id = version_doc["parent"] + subset_doc = subset_entity_by_ids[subset_id] - output = {} - for repre in repres: - version_id = repre["parent"] - version = last_versions_by_id[version_id] - - subset_id = version["parent"] - subset = subset_entity_by_ids[subset_id] - - asset_id = subset["parent"] - asset = asset_entity_by_ids[asset_id] + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_ids[asset_id] if asset_id not in output: output[asset_id] = { - "asset_entity": asset, + "asset_entity": asset_doc, "subsets": {} } if subset_id not in output[asset_id]["subsets"]: output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset, + "subset_entity": subset_doc, "version": { - "version_entity": version, + "version_entity": version_doc, "repres": [] } } output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre + repre_doc ) return output From 9b4b44ef3bdf490fca2a4df0f3451143a09e555c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:17:26 +0200 Subject: [PATCH 0112/2550] moved build template code into workfile --- openpype/{lib => pipeline/workfile}/build_template.py | 0 openpype/{lib => pipeline/workfile}/build_template_exceptions.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename openpype/{lib => pipeline/workfile}/build_template.py (100%) rename openpype/{lib => pipeline/workfile}/build_template_exceptions.py (100%) diff --git a/openpype/lib/build_template.py b/openpype/pipeline/workfile/build_template.py similarity index 100% rename from openpype/lib/build_template.py rename to openpype/pipeline/workfile/build_template.py diff --git a/openpype/lib/build_template_exceptions.py b/openpype/pipeline/workfile/build_template_exceptions.py similarity index 100% rename from openpype/lib/build_template_exceptions.py rename to openpype/pipeline/workfile/build_template_exceptions.py From 6462bf15d04ad53eaed484069e70f2c2312f0a2f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:24:16 +0200 Subject: [PATCH 0113/2550] fixed imports --- .../workfile/abstract_template_loader.py | 24 ++++++++++--------- openpype/pipeline/workfile/build_template.py | 4 ++-- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index e296e3207f..e95b89b518 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -4,23 +4,25 @@ from abc import ABCMeta, abstractmethod import traceback import six - -from openpype.settings import get_project_settings -from openpype.lib import Anatomy, get_linked_assets, get_loaders_by_name -from openpype.api import PypeLogger as Logger -from openpype.pipeline import legacy_io, load - +import logging from functools import reduce -from openpype.lib.build_template_exceptions import ( +from openpype.settings import get_project_settings +from openpype.lib import get_linked_assets, PypeLogger as Logger +from openpype.pipeline import legacy_io, Anatomy +from openpype.pipeline.load import ( + get_loaders_by_name, + get_representation_context, + load_with_repre_context, +) + +from .build_template_exceptions import ( TemplateAlreadyImported, TemplateLoadingFailed, TemplateProfileNotFound, TemplateNotFound ) -import logging - log = logging.getLogger(__name__) @@ -289,8 +291,8 @@ class AbstractTemplateLoader: pass def load(self, placeholder, loaders_by_name, last_representation): - repre = load.get_representation_context(last_representation) - return load.load_with_repre_context( + repre = get_representation_context(last_representation) + return load_with_repre_context( loaders_by_name[placeholder.loader], repre, options=parse_loader_args(placeholder.data['loader_args'])) diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py index 7f749cbec2..f4b57218fb 100644 --- a/openpype/pipeline/workfile/build_template.py +++ b/openpype/pipeline/workfile/build_template.py @@ -1,6 +1,6 @@ -from openpype.pipeline import registered_host -from openpype.lib import classes_from_module from importlib import import_module +from openpype.lib import classes_from_module +from openpype.pipeline import registered_host from .abstract_template_loader import ( AbstractPlaceholder, From 5dfb12a217f24e5551ec3f4a982823254efdb00e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:24:44 +0200 Subject: [PATCH 0114/2550] logger is created dynamically on demand and is using class name --- openpype/pipeline/workfile/abstract_template_loader.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index e95b89b518..27823479cf 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -72,6 +72,7 @@ class AbstractTemplateLoader: """ def __init__(self, placeholder_class): + self._log = None self.loaders_by_name = get_loaders_by_name() self.current_asset = legacy_io.Session["AVALON_ASSET"] @@ -91,8 +92,6 @@ class AbstractTemplateLoader: .get("type") ) - self.log = Logger().get_logger("BUILD TEMPLATE") - self.log.info( "BUILDING ASSET FROM TEMPLATE :\n" "Starting templated build for {asset} in {project}\n\n" @@ -112,6 +111,12 @@ class AbstractTemplateLoader: "There is no registered loaders. No assets will be loaded") return + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + def template_already_imported(self, err_msg): """In case template was already loaded. Raise the error as a default action. From 764207d033fc049f6726f901a99732c928595768 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:25:04 +0200 Subject: [PATCH 0115/2550] fix missing import 'get_loaders_by_name' --- openpype/lib/avalon_context.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 4b552d13ed..e60dbb9e8f 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -992,6 +992,9 @@ class BuildWorkfile: ... }] """ + + from openpype.pipeline.load import get_loaders_by_name + # Get current asset name and entity project_name = legacy_io.active_project() current_asset_name = legacy_io.Session["AVALON_ASSET"] From fe38df50bff954993570cd113371044dde4a5e43 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:26:18 +0200 Subject: [PATCH 0116/2550] removed 'get_loaders_by_name' from openpype lib init file --- openpype/lib/__init__.py | 2 -- openpype/pipeline/workfile/__init__.py | 0 2 files changed, 2 deletions(-) create mode 100644 openpype/pipeline/workfile/__init__.py diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index f4efffd726..fb52a9aca7 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -135,7 +135,6 @@ from .avalon_context import ( create_workfile_doc, save_workfile_data_to_doc, get_workfile_doc, - get_loaders_by_name, BuildWorkfile, @@ -307,7 +306,6 @@ __all__ = [ "create_workfile_doc", "save_workfile_data_to_doc", "get_workfile_doc", - "get_loaders_by_name", "BuildWorkfile", diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From c9ac330e2ebedc6e9900e0d2e6207a20326d0139 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:31:18 +0200 Subject: [PATCH 0117/2550] fixed imports in maya --- openpype/hosts/maya/api/menu.py | 6 +++--- openpype/hosts/maya/api/template_loader.py | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index c0bad7092f..833fbae881 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -8,12 +8,12 @@ import maya.cmds as cmds from openpype.api import BuildWorkfile -from openpype.lib.build_template import ( +from openpype.settings import get_project_settings +from openpype.pipeline import legacy_io +from openpype.pipeline.workfile.build_template import ( build_workfile_template, update_workfile_template ) -from openpype.settings import get_project_settings -from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py index c7946b6ad3..6b225442e7 100644 --- a/openpype/hosts/maya/api/template_loader.py +++ b/openpype/hosts/maya/api/template_loader.py @@ -1,11 +1,13 @@ from maya import cmds from openpype.pipeline import legacy_io -from openpype.lib.abstract_template_loader import ( +from openpype.pipeline.workfile.abstract_template_loader import ( AbstractPlaceholder, AbstractTemplateLoader ) -from openpype.lib.build_template_exceptions import TemplateAlreadyImported +from openpype.pipeline.workfile.build_template_exceptions import ( + TemplateAlreadyImported +) PLACEHOLDER_SET = 'PLACEHOLDERS_SET' From 1e8cf2a6ea87ded1131d5d3012cdd5980dc2f183 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:36:04 +0200 Subject: [PATCH 0118/2550] make sure '_log' attribute is available before abc init --- openpype/pipeline/workfile/abstract_template_loader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 27823479cf..3d942a0bdd 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -71,9 +71,9 @@ class AbstractTemplateLoader: as placeholders. Depending on current host """ - def __init__(self, placeholder_class): - self._log = None + _log = None + def __init__(self, placeholder_class): self.loaders_by_name = get_loaders_by_name() self.current_asset = legacy_io.Session["AVALON_ASSET"] self.project_name = legacy_io.Session["AVALON_PROJECT"] From 361ba53f26d89e94758ff8f32e48444ba1715771 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:54:57 +0200 Subject: [PATCH 0119/2550] use new location of 'get_default_components' function --- start.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/start.py b/start.py index ace33ab92a..08e0849303 100644 --- a/start.py +++ b/start.py @@ -1113,7 +1113,7 @@ def boot(): def get_info(use_staging=None) -> list: """Print additional information to console.""" - from openpype.lib.mongo import get_default_components + from openpype.client.mongo import get_default_components from openpype.lib.log import PypeLogger components = get_default_components() From bfbb1225d0ed7a7acccf900e42bdccad60a05ced Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 10:57:19 +0200 Subject: [PATCH 0120/2550] Use 'Logger' instead of 'PypeLogger' --- start.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/start.py b/start.py index 08e0849303..e83589d160 100644 --- a/start.py +++ b/start.py @@ -1114,7 +1114,11 @@ def boot(): def get_info(use_staging=None) -> list: """Print additional information to console.""" from openpype.client.mongo import get_default_components - from openpype.lib.log import PypeLogger + try: + from openpype.lib.log import Logger + except ImportError: + # Backwards compatibility for 'PypeLogger' + from openpype.lib.log import PypeLogger as Logger components = get_default_components() @@ -1141,14 +1145,14 @@ def get_info(use_staging=None) -> list: os.environ.get("MUSTER_REST_URL"))) # Reinitialize - PypeLogger.initialize() + Logger.initialize() mongo_components = get_default_components() if mongo_components["host"]: inf.append(("Logging to MongoDB", mongo_components["host"])) inf.append((" - port", mongo_components["port"] or "")) - inf.append((" - database", PypeLogger.log_database_name)) - inf.append((" - collection", PypeLogger.log_collection_name)) + inf.append((" - database", Logger.log_database_name)) + inf.append((" - collection", Logger.log_collection_name)) inf.append((" - user", mongo_components["username"] or "")) if mongo_components["auth_db"]: inf.append((" - auth source", mongo_components["auth_db"])) From f7cb4cd83a4fc107b2960903ee8b87fc28c0052c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 11:01:54 +0200 Subject: [PATCH 0121/2550] added missing default settings --- .../settings/defaults/system_settings/modules.json | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 9d8910689a..3ed41c7a49 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -131,16 +131,17 @@ } } }, + "kitsu": { + "enabled": false, + "server": "" + }, "shotgrid": { "enabled": false, "leecher_manager_url": "http://127.0.0.1:3000", "leecher_backend_url": "http://127.0.0.1:8090", + "filter_projects_by_login": true, "shotgrid_settings": {} }, - "kitsu": { - "enabled": false, - "server": "" - }, "timers_manager": { "enabled": true, "auto_stop": true, @@ -209,4 +210,4 @@ "linux": "" } } -} +} \ No newline at end of file From 4c849e8d86e7665cc4ee3e235403f2baf41e8b84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 27 Jul 2022 18:14:22 +0200 Subject: [PATCH 0122/2550] :bug: fix environment resolution this will fix environment resolution of general settings in one pass --- start.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/start.py b/start.py index e83589d160..cbf8ffd178 100644 --- a/start.py +++ b/start.py @@ -270,8 +270,11 @@ def set_openpype_global_environments() -> None: general_env = get_general_environments() + # first resolve general environment because merge doesn't expect + # values to be list. + # TODO: switch to OpenPype environment functions merged_env = acre.merge( - acre.parse(general_env), + acre.compute(acre.parse(general_env), cleanup=False), dict(os.environ) ) env = acre.compute( From 52314b0bf514f58c042c2a7c7bdd9d45a24ae2e9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 19:03:08 +0200 Subject: [PATCH 0123/2550] update ftrack api to 2.3.3 --- openpype/modules/ftrack/ftrack_server/lib.py | 21 +++++++++++++++++--- poetry.lock | 20 +++++++++---------- pyproject.toml | 2 +- 3 files changed, 28 insertions(+), 15 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 3da1e7c7f0..947dacf917 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -7,6 +7,7 @@ import threading import datetime import time import queue +import collections import appdirs import pymongo @@ -309,7 +310,20 @@ class CustomEventHubSession(ftrack_api.session.Session): # Currently pending operations. self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True + + # OpenPype change - In new API are operations properties + new_api = hasattr(self.__class__, "record_operations") + + if new_api: + self._record_operations = collections.defaultdict( + lambda: True + ) + self._auto_populate = collections.defaultdict( + lambda: auto_populate + ) + else: + self.record_operations = True + self.auto_populate = auto_populate self.cache_key_maker = cache_key_maker if self.cache_key_maker is None: @@ -328,6 +342,9 @@ class CustomEventHubSession(ftrack_api.session.Session): if cache is not None: self.cache.caches.append(cache) + if new_api: + self.merge_lock = threading.RLock() + self._managed_request = None self._request = requests.Session() self._request.auth = ftrack_api.session.SessionAuthentication( @@ -335,8 +352,6 @@ class CustomEventHubSession(ftrack_api.session.Session): ) self.request_timeout = timeout - self.auto_populate = auto_populate - # Fetch server information and in doing so also check credentials. self._server_information = self._fetch_server_information() diff --git a/poetry.lock b/poetry.lock index 0033bc0d73..33deab003e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -221,7 +221,7 @@ python-versions = "~=3.7" [[package]] name = "certifi" -version = "2022.5.18.1" +version = "2022.6.15" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -456,19 +456,20 @@ python-versions = ">=3.7" [[package]] name = "ftrack-python-api" -version = "2.0.0" +version = "2.3.3" description = "Python API for ftrack." category = "main" optional = false -python-versions = ">=2.7.9, <4.0" +python-versions = ">=2.7.9, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, < 3.10" [package.dependencies] +appdirs = ">=1,<2" arrow = ">=0.4.4,<1" -clique = ">=1.2.0,<2" +clique = "1.6.1" future = ">=0.16.0,<1" pyparsing = ">=2.0,<3" requests = ">=2,<3" -six = ">=1,<2" +six = ">=1.13.0,<2" termcolor = ">=1.1.0,<2" websocket-client = ">=0.40.0,<1" @@ -1885,8 +1886,8 @@ cachetools = [ {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, ] certifi = [ - {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, - {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] cffi = [ {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, @@ -2152,10 +2153,7 @@ frozenlist = [ {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, ] -ftrack-python-api = [ - {file = "ftrack-python-api-2.0.0.tar.gz", hash = "sha256:dd6f02c31daf5a10078196dc9eac4671e4297c762fbbf4df98de668ac12281d9"}, - {file = "ftrack_python_api-2.0.0-py2.py3-none-any.whl", hash = "sha256:d0df0f2df4b53947272f95e179ec98b477ee425bf4217b37bb59030ad989771e"}, -] +ftrack-python-api = [] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] diff --git a/pyproject.toml b/pyproject.toml index 1627b5e1c1..5785c7635b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ coolname = "*" clique = "1.6.*" Click = "^7" dnspython = "^2.1.0" -ftrack-python-api = "2.0.*" +ftrack-python-api = "^2.3.3" shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} gazu = "^0.8.28" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) From 3e7a9d3e468ebb7b9149fb3b5d7c1fed200732b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Jul 2022 19:04:22 +0200 Subject: [PATCH 0124/2550] use master branch of appdirs --- poetry.lock | 14 +++++++++----- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0033bc0d73..72e5763c9c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -92,7 +92,14 @@ version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/ActiveState/appdirs.git" +reference = "master" +resolved_reference = "193a2cbba58cce2542882fcedd0e49f6763672ed" [[package]] name = "arrow" @@ -1827,10 +1834,7 @@ ansicon = [ {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, ] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] +appdirs = [] arrow = [ {file = "arrow-0.17.0-py2.py3-none-any.whl", hash = "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5"}, {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, diff --git a/pyproject.toml b/pyproject.toml index 1627b5e1c1..4361c8c9f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ aiohttp = "^3.7" aiohttp_json_rpc = "*" # TVPaint server acre = { git = "https://github.com/pypeclub/acre.git" } opentimelineio = { version = "0.14.0.dev1", source = "openpype" } -appdirs = "^1.4.3" +appdirs = { git = "https://github.com/ActiveState/appdirs.git", branch = "master" } blessed = "^1.17" # openpype terminal formatting coolname = "*" clique = "1.6.*" From 6bb28d16df22e4d5c4cf6e763a85a545ba6da833 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 28 Jul 2022 11:53:51 +0200 Subject: [PATCH 0125/2550] fix build template and added few comments --- .../workfile/abstract_template_loader.py | 30 +++++++++++++------ openpype/pipeline/workfile/build_template.py | 9 +++++- 2 files changed, 29 insertions(+), 10 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 3d942a0bdd..00bc8f15a7 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -7,6 +7,7 @@ import six import logging from functools import reduce +from openpype.client import get_asset_by_name from openpype.settings import get_project_settings from openpype.lib import get_linked_assets, PypeLogger as Logger from openpype.pipeline import legacy_io, Anatomy @@ -74,18 +75,25 @@ class AbstractTemplateLoader: _log = None def __init__(self, placeholder_class): + # TODO template loader should expect host as and argument + # - host have all responsibility for most of code (also provide + # placeholder class) + # - also have responsibility for current context + # - this won't work in DCCs where multiple workfiles with + # different contexts can be opened at single time + # - template loader should have ability to change context + project_name = legacy_io.active_project() + asset_name = legacy_io.Session["AVALON_ASSET"] + self.loaders_by_name = get_loaders_by_name() - self.current_asset = legacy_io.Session["AVALON_ASSET"] - self.project_name = legacy_io.Session["AVALON_PROJECT"] + self.current_asset = asset_name + self.project_name = project_name self.host_name = legacy_io.Session["AVALON_APP"] self.task_name = legacy_io.Session["AVALON_TASK"] self.placeholder_class = placeholder_class - self.current_asset_docs = legacy_io.find_one({ - "type": "asset", - "name": self.current_asset - }) + self.current_asset_doc = get_asset_by_name(project_name, asset_name) self.task_type = ( - self.current_asset_docs + self.current_asset_doc .get("data", {}) .get("tasks", {}) .get(self.task_name, {}) @@ -218,7 +226,7 @@ class AbstractTemplateLoader: loaders_by_name = self.loaders_by_name current_asset = self.current_asset linked_assets = [asset['name'] for asset - in get_linked_assets(self.current_asset_docs)] + in get_linked_assets(self.current_asset_doc)] ignored_ids = ignored_ids or [] placeholders = self.get_placeholders() @@ -270,7 +278,11 @@ class AbstractTemplateLoader: self.postload(placeholder) def get_placeholder_representations( - self, placeholder, current_asset, linked_assets): + self, placeholder, current_asset, linked_assets + ): + # TODO This approach must be changed. Placeholders should return + # already prepared data and not query them here. + # - this is impossible to handle using query functions placeholder_db_filters = placeholder.convert_to_db_filters( current_asset, linked_assets) diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py index f4b57218fb..df6fe3514a 100644 --- a/openpype/pipeline/workfile/build_template.py +++ b/openpype/pipeline/workfile/build_template.py @@ -1,5 +1,6 @@ from importlib import import_module from openpype.lib import classes_from_module +from openpype.host import HostBase from openpype.pipeline import registered_host from .abstract_template_loader import ( @@ -35,7 +36,13 @@ def update_workfile_template(args): def build_template_loader(): - host_name = registered_host().__name__.partition('.')[2] + # TODO refactor to use advantage of 'HostBase' and don't import dynamically + # - hosts should have methods that gives option to return builders + host = registered_host() + if isinstance(host, HostBase): + host_name = host.name + else: + host_name = host.__name__.partition('.')[2] module_path = _module_path_format.format(host=host_name) module = import_module(module_path) if not module: From 628833be97308401e3279929a9866da03c6d8d9d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 14:48:37 +0200 Subject: [PATCH 0126/2550] flame: adding timewarp effect scraping --- openpype/hosts/flame/api/lib.py | 153 ++++++++++++++++++++++++++++++-- 1 file changed, 145 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index d59308ad6c..02481a1d2e 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -1,14 +1,16 @@ import sys import os import re +import sys import json import pickle import clique import tempfile +import traceback import itertools import contextlib import xml.etree.cElementTree as cET -from copy import deepcopy +from copy import deepcopy, copy from xml.etree import ElementTree as ET from pprint import pformat from .constants import ( @@ -266,7 +268,7 @@ def get_current_sequence(selection): def rescan_hooks(): import flame try: - flame.execute_shortcut('Rescan Python Hooks') + flame.execute_shortcut("Rescan Python Hooks") except Exception: pass @@ -1082,21 +1084,21 @@ class MediaInfoFile(object): xml_data (ET.Element): clip data """ try: - for out_track in xml_data.iter('track'): - for out_feed in out_track.iter('feed'): + for out_track in xml_data.iter("track"): + for out_feed in out_track.iter("feed"): # start frame out_feed_nb_ticks_obj = out_feed.find( - 'startTimecode/nbTicks') + "startTimecode/nbTicks") self.start_frame = out_feed_nb_ticks_obj.text # fps out_feed_fps_obj = out_feed.find( - 'startTimecode/rate') + "startTimecode/rate") self.fps = out_feed_fps_obj.text # drop frame mode out_feed_drop_mode_obj = out_feed.find( - 'startTimecode/dropMode') + "startTimecode/dropMode") self.drop_mode = out_feed_drop_mode_obj.text break except Exception as msg: @@ -1118,8 +1120,143 @@ class MediaInfoFile(object): tree = cET.ElementTree(xml_element_data) tree.write( fpath, xml_declaration=True, - method='xml', encoding='UTF-8' + method="xml", encoding="UTF-8" ) except IOError as error: raise IOError( "Not able to write data to file: {}".format(error)) + + +class TimeEffectMetadata(object): + log = log + temp_setup_path = "/var/tmp/temp_timewarp_setup.timewarp_node" + _data = {} + _retime_modes = { + 0: "speed", + 1: "timewarp", + 2: "duration" + } + + def __init__(self, segment=None, logger=None): + if logger: + self.log = logger + if segment: + self._data = self._get_metadata(segment) + + def _get_metadata(self, segment): + effects = segment.effects or [] + for effect in effects: + if effect.type == "Timewarp": + effect.save_setup(self.temp_setup_path) + + self._data = self._get_attributes_from_xml() + os.remove(self.temp_setup_path) + + def _get_attributes_from_xml(self): + with open(self.temp_setup_path, "r") as tw_setup_file: + tw_setup_string = tw_setup_file.read() + tw_setup_file.close() + + tw_setup_xml = ET.fromstring(tw_setup_string) + tw_setup = self._dictify(tw_setup_xml) + # pprint(tw_setup) + try: + tw_setup_state = tw_setup["Setup"]["State"][0] + mode = int( + tw_setup_state["TW_RetimerMode"][0]["_text"] + ) + r_data = { + "type": self._retime_modes[mode], + "effectStart": int( + tw_setup["Setup"]["Base"][0]["Range"][0]["Start"]), + "effectEnd": int( + tw_setup["Setup"]["Base"][0]["Range"][0]["End"]) + } + + if mode == 0: # speed + r_data[self._retime_modes[mode]] = int( + tw_setup_state["TW_Speed"] + [0]["Channel"][0]["Value"][0]["_text"] + ) / 100 + elif mode == 1: # timewarp + print("timing") + r_data[self._retime_modes[mode]] = self._get_anim_keys( + tw_setup_state["TW_Timing"] + ) + elif mode == 2: # duration + r_data[self._retime_modes[mode]] = { + "start": { + "source": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][0]["Value"][0]["_text"] + ), + "timeline": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][0]["Frame"][0]["_text"] + ) + }, + "end": { + "source": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][1]["Value"][0]["_text"] + ), + "timeline": int( + tw_setup_state["TW_DurationTiming"][0]["Channel"] + [0]["KFrames"][0]["Key"][1]["Frame"][0]["_text"] + ) + } + } + except Exception: + lines = traceback.format_exception(*sys.exc_info()) + self.log.error("\n".join(lines)) + return + + return r_data + + def _get_anim_keys(self, setup_cat, index=None): + return_data = { + "extrapolation": ( + setup_cat[0]["Channel"][0]["Extrap"][0]["_text"] + ), + "animKeys": [] + } + for key in setup_cat[0]["Channel"][0]["KFrames"][0]["Key"]: + if index and int(key["Index"]) != index: + continue + key_data = { + "source": float(key["Value"][0]["_text"]), + "timeline": float(key["Frame"][0]["_text"]), + "index": int(key["Index"]), + "curveMode": key["CurveMode"][0]["_text"], + "curveOrder": key["CurveOrder"][0]["_text"] + } + if key.get("TangentMode"): + key_data["tangentMode"] = key["TangentMode"][0]["_text"] + + return_data["animKeys"].append(key_data) + + return return_data + + def _dictify(self, xml_, root=True): + """ Convert xml object to dictionary + + Args: + xml_ (xml.etree.ElementTree.Element): xml data + root (bool, optional): is root available. Defaults to True. + + Returns: + dict: dictionarized xml + """ + + if root: + return {xml_.tag: self._dictify(xml_, False)} + + d = copy(xml_.attrib) + if xml_.text: + d["_text"] = xml_.text + + for x in xml_.findall("./*"): + if x.tag not in d: + d[x.tag] = [] + d[x.tag].append(self._dictify(x, False)) + return d From 2998253832daf43f62fc901de6dd11eccb2708fd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 14:58:43 +0200 Subject: [PATCH 0127/2550] flame: adding property to return data --- openpype/hosts/flame/api/lib.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 02481a1d2e..a02acd85a7 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -1129,7 +1129,6 @@ class MediaInfoFile(object): class TimeEffectMetadata(object): log = log - temp_setup_path = "/var/tmp/temp_timewarp_setup.timewarp_node" _data = {} _retime_modes = { 0: "speed", @@ -1137,23 +1136,34 @@ class TimeEffectMetadata(object): 2: "duration" } - def __init__(self, segment=None, logger=None): + def __init__(self, segment, logger=None): if logger: self.log = logger - if segment: - self._data = self._get_metadata(segment) + + self._data = self._get_metadata(segment) + + @property + def data(self): + """ Returns timewarp effect data + + Returns: + dict: retime data + """ + return self._data def _get_metadata(self, segment): effects = segment.effects or [] for effect in effects: if effect.type == "Timewarp": - effect.save_setup(self.temp_setup_path) + with maintained_temp_file_path(".timewarp_node") as tmp_path: + self.log.info("Temp File: {}".format(tmp_path)) + effect.save_setup(tmp_path) + return self._get_attributes_from_xml(tmp_path) - self._data = self._get_attributes_from_xml() - os.remove(self.temp_setup_path) + return {} - def _get_attributes_from_xml(self): - with open(self.temp_setup_path, "r") as tw_setup_file: + def _get_attributes_from_xml(self, tmp_path): + with open(tmp_path, "r") as tw_setup_file: tw_setup_string = tw_setup_file.read() tw_setup_file.close() From 7f9948eaad87d144db2fc58c5083798ebf34482f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 15:09:06 +0200 Subject: [PATCH 0128/2550] flame: adding timewarp class to api --- openpype/hosts/flame/api/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 2c461e5f16..76c1c93379 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -30,7 +30,8 @@ from .lib import ( maintained_temp_file_path, get_clip_segment, get_batch_group_from_desktop, - MediaInfoFile + MediaInfoFile, + TimeEffectMetadata ) from .utils import ( setup, @@ -107,6 +108,7 @@ __all__ = [ "get_clip_segment", "get_batch_group_from_desktop", "MediaInfoFile", + "TimeEffectMetadata", # pipeline "install", From 42fa3dd2097cf7d1b9c9442b042600981be64bb9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 15:09:25 +0200 Subject: [PATCH 0129/2550] flame: implementing timewarpmetadata class --- openpype/hosts/flame/otio/flame_export.py | 25 +++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 1e4ef866ed..a111176e29 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -275,7 +275,7 @@ def create_otio_reference(clip_data, fps=None): def create_otio_clip(clip_data): - from openpype.hosts.flame.api import MediaInfoFile + from openpype.hosts.flame.api import MediaInfoFile, TimeEffectMetadata segment = clip_data["PySegment"] @@ -284,14 +284,27 @@ def create_otio_clip(clip_data): media_timecode_start = media_info.start_frame media_fps = media_info.fps + # Timewarp metadata + tw_data = TimeEffectMetadata(segment, logger=log).data + log.debug("__ tw_data: {}".format(tw_data)) + # define first frame first_frame = media_timecode_start or utils.get_frame_from_filename( clip_data["fpath"]) or 0 _clip_source_in = int(clip_data["source_in"]) _clip_source_out = int(clip_data["source_out"]) + _clip_source_duration = clip_data["source_duration"] + _clip_record_in = clip_data["record_in"] + _clip_record_out = clip_data["record_out"] _clip_record_duration = int(clip_data["record_duration"]) + log.debug("_ first_frame: {}".format(first_frame)) + log.debug("_ _clip_source_in: {}".format(_clip_source_in)) + log.debug("_ _clip_source_out: {}".format(_clip_source_out)) + log.debug("_ _clip_record_in: {}".format(_clip_record_in)) + log.debug("_ _clip_record_out: {}".format(_clip_record_out)) + # first solve if the reverse timing speed = 1 if clip_data["source_in"] > clip_data["source_out"]: @@ -307,13 +320,17 @@ def create_otio_clip(clip_data): # secondly check if any change of speed if source_duration != _clip_record_duration: retime_speed = float(source_duration) / float(_clip_record_duration) - log.debug("_ retime_speed: {}".format(retime_speed)) + log.debug("_ calculated speed: {}".format(retime_speed)) speed *= retime_speed - log.debug("_ source_in: {}".format(source_in)) - log.debug("_ source_out: {}".format(source_out)) + # get speed from metadata if available + if tw_data.get("speed"): + speed = tw_data["speed"] + log.debug("_ metadata speed: {}".format(speed)) + log.debug("_ speed: {}".format(speed)) log.debug("_ source_duration: {}".format(source_duration)) + log.debug("_ _clip_source_duration: {}".format(_clip_source_duration)) log.debug("_ _clip_record_duration: {}".format(_clip_record_duration)) # create media reference From 009d7fc1fb765f18cadf1782bd66a5c3b95c38ee Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 15:18:07 +0200 Subject: [PATCH 0130/2550] flame: speed should be float --- openpype/hosts/flame/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index a02acd85a7..a5ae3c4468 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -1184,7 +1184,7 @@ class TimeEffectMetadata(object): } if mode == 0: # speed - r_data[self._retime_modes[mode]] = int( + r_data[self._retime_modes[mode]] = float( tw_setup_state["TW_Speed"] [0]["Channel"][0]["Value"][0]["_text"] ) / 100 From 8eb5c1ccb30c6fb7bfb6cddd2eb82d3697a652c1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 15:37:16 +0200 Subject: [PATCH 0131/2550] flame: more frame debug printing --- openpype/hosts/flame/otio/flame_export.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index a111176e29..6d6b33d2a1 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -289,16 +289,20 @@ def create_otio_clip(clip_data): log.debug("__ tw_data: {}".format(tw_data)) # define first frame - first_frame = media_timecode_start or utils.get_frame_from_filename( - clip_data["fpath"]) or 0 + file_first_frame = utils.get_frame_from_filename( + clip_data["fpath"]) + if file_first_frame: + file_first_frame = int(file_first_frame) + + first_frame = media_timecode_start or file_first_frame or 0 _clip_source_in = int(clip_data["source_in"]) _clip_source_out = int(clip_data["source_out"]) - _clip_source_duration = clip_data["source_duration"] _clip_record_in = clip_data["record_in"] _clip_record_out = clip_data["record_out"] _clip_record_duration = int(clip_data["record_duration"]) + log.debug("_ file_first_frame: {}".format(file_first_frame)) log.debug("_ first_frame: {}".format(first_frame)) log.debug("_ _clip_source_in: {}".format(_clip_source_in)) log.debug("_ _clip_source_out: {}".format(_clip_source_out)) @@ -315,6 +319,15 @@ def create_otio_clip(clip_data): source_in = _clip_source_in - int(first_frame) source_out = _clip_source_out - int(first_frame) + log.debug("_ source_in: {}".format(source_in)) + log.debug("_ source_out: {}".format(source_out)) + + if file_first_frame: + log.debug("_ file_source_in: {}".format( + file_first_frame + source_in)) + log.debug("_ file_source_in: {}".format( + file_first_frame + source_out)) + source_duration = (source_out - source_in + 1) # secondly check if any change of speed @@ -330,7 +343,6 @@ def create_otio_clip(clip_data): log.debug("_ speed: {}".format(speed)) log.debug("_ source_duration: {}".format(source_duration)) - log.debug("_ _clip_source_duration: {}".format(_clip_source_duration)) log.debug("_ _clip_record_duration: {}".format(_clip_record_duration)) # create media reference From 4a18e50352cda46e3b8de09bd7a40df15ea1384d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Thu, 28 Jul 2022 17:42:52 +0200 Subject: [PATCH 0132/2550] Update openpype/hosts/nuke/plugins/publish/validate_script_attributes.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../nuke/plugins/publish/validate_script_attributes.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index d16660f272..3907f40991 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -26,11 +26,7 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): def process(self, instance): root = nuke.root() knob_data = get_avalon_knob_data(root) - project_name = legacy_io.active_project() - asset = get_asset_by_name( - project_name, - instance.context.data["asset"] - ) + asset = instance.data["assetEntity"] # get asset data frame values frame_start = asset["data"]["frameStart"] frame_end = asset["data"]["frameEnd"] From 6a51b4e7891829761b2c49353d17a7fc3423edcc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Jul 2022 17:49:29 +0200 Subject: [PATCH 0133/2550] hound suggestions --- .../hosts/nuke/plugins/publish/validate_output_resolution.py | 2 +- .../hosts/nuke/plugins/publish/validate_script_attributes.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index 710adde069..fc07e9b83b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -89,4 +89,4 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): if cls.resolution_msg == invalid: reformat = cls.get_reformat(instance) reformat["format"].setValue(nuke.root()["format"].value()) - cls.log.info("I am fixing reformat to root.format") \ No newline at end of file + cls.log.info("I am fixing reformat to root.format") diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index 3907f40991..106d7a2524 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -2,8 +2,6 @@ from pprint import pformat import pyblish.api import openpype.api -from openpype.client import get_asset_by_name -from openpype.pipeline import legacy_io from openpype.pipeline import PublishXmlValidationError from openpype.hosts.nuke.api.lib import ( get_avalon_knob_data, From 2d601d051a9b59509c6af159c06f8424591af444 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 16:42:54 +0200 Subject: [PATCH 0134/2550] give ability to query by representation context and regex --- openpype/client/entities.py | 108 +++++++++++++++++++++++++++++++----- 1 file changed, 94 insertions(+), 14 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index dd5d831ecf..57c38784b0 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -7,6 +7,7 @@ that has project name as a context (e.g. on 'ProjectEntity'?). """ import os +import re import collections import six @@ -1035,17 +1036,70 @@ def get_representation_by_name( return conn.find_one(query_filter, _prepare_fields(fields)) +def _flatten_dict(data): + flatten_queue = collections.deque() + flatten_queue.append(data) + output = {} + while flatten_queue: + item = flatten_queue.popleft() + for key, value in item.items(): + if not isinstance(value, dict): + output[key] = value + continue + + tmp = {} + for subkey, subvalue in value.items(): + new_key = "{}.{}".format(key, subkey) + tmp[new_key] = subvalue + flatten_queue.append(tmp) + return output + + +def _regex_filters(filters): + output = [] + for key, value in filters.items(): + regexes = [] + a_values = [] + if isinstance(value, re.Pattern): + regexes.append(value) + elif isinstance(value, (list, tuple, set)): + for item in value: + if isinstance(item, re.Pattern): + regexes.append(item) + else: + a_values.append(item) + else: + a_values.append(value) + + key_filters = [] + if len(a_values) == 1: + key_filters.append({key: a_values[0]}) + elif a_values: + key_filters.append({key: {"$in": a_values}}) + + for regex in regexes: + key_filters.append({key: {"$regex": regex}}) + + if len(key_filters) == 1: + output.append(key_filters[0]) + else: + output.append({"$or": key_filters}) + + return output + + def _get_representations( project_name, representation_ids, representation_names, version_ids, - extensions, + context_filters, names_by_version_ids, standard, archived, fields ): + default_output = [] repre_types = [] if standard: repre_types.append("representation") @@ -1053,7 +1107,7 @@ def _get_representations( repre_types.append("archived_representation") if not repre_types: - return [] + return default_output if len(repre_types) == 1: query_filter = {"type": repre_types[0]} @@ -1063,25 +1117,21 @@ def _get_representations( if representation_ids is not None: representation_ids = _convert_ids(representation_ids) if not representation_ids: - return [] + return default_output query_filter["_id"] = {"$in": representation_ids} if representation_names is not None: if not representation_names: - return [] + return default_output query_filter["name"] = {"$in": list(representation_names)} if version_ids is not None: version_ids = _convert_ids(version_ids) if not version_ids: - return [] + return default_output query_filter["parent"] = {"$in": version_ids} - if extensions is not None: - if not extensions: - return [] - query_filter["context.ext"] = {"$in": list(extensions)} - + or_queries = [] if names_by_version_ids is not None: or_query = [] for version_id, names in names_by_version_ids.items(): @@ -1091,8 +1141,35 @@ def _get_representations( "name": {"$in": list(names)} }) if not or_query: + return default_output + or_queries.append(or_query) + + if context_filters is not None: + if not context_filters: return [] - query_filter["$or"] = or_query + _flatten_filters = _flatten_dict(context_filters) + flatten_filters = {} + for key, value in _flatten_filters.items(): + if not key.startswith("context"): + key = "context.{}".format(key) + flatten_filters[key] = value + + for item in _regex_filters(flatten_filters): + for key, value in item.items(): + if key == "$or": + or_queries.append(value) + else: + query_filter[key] = value + + if len(or_queries) == 1: + query_filter["$or"] = or_queries[0] + elif or_queries: + and_query = [] + for or_query in or_queries: + if isinstance(or_query, list): + or_query = {"$or": or_query} + and_query.append(or_query) + query_filter["$and"] = and_query conn = get_project_connection(project_name) @@ -1104,7 +1181,7 @@ def get_representations( representation_ids=None, representation_names=None, version_ids=None, - extensions=None, + context_filters=None, names_by_version_ids=None, archived=False, standard=True, @@ -1122,8 +1199,8 @@ def get_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (Iterable[str]): Filter by extension of main representation - file (without dot). + context_filters (Dict[str, List[str, re.Pattern]]): Filter by + representation context fields. names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. archived (bool): Output will also contain archived representations. @@ -1140,6 +1217,7 @@ def get_representations( representation_names=representation_names, version_ids=version_ids, extensions=extensions, + context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=True, archived=archived, @@ -1153,6 +1231,7 @@ def get_archived_representations( representation_names=None, version_ids=None, extensions=None, + context_filters=None, names_by_version_ids=None, fields=None ): @@ -1185,6 +1264,7 @@ def get_archived_representations( representation_names=representation_names, version_ids=version_ids, extensions=extensions, + context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=False, archived=True, From 5c8eac6b6357fa80859ffbed45be41cf8ae106da Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:07:57 +0200 Subject: [PATCH 0135/2550] OP-3405 - replaced find with get_representations --- .../modules/sync_server/sync_server_module.py | 32 +++++++------------ 1 file changed, 12 insertions(+), 20 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 4027561d22..81aff9368f 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -25,6 +25,8 @@ from .providers import lib from .utils import time_function, SyncStatus, SiteAlreadyPresentError +from openpype.client import get_representations + log = PypeLogger.get_logger("SyncServer") @@ -344,6 +346,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "files.sites.name": site_name } + # TODO currently not possible to replace with get_representations representations = list( self.connection.database[collection].find(query)) if not representations: @@ -391,12 +394,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ self.log.debug("Validation of {} for {} started".format(collection, site_name)) - query = { - "type": "representation" - } - - representations = list( - self.connection.database[collection].find(query)) + representations = list(get_representations(collection)) if not representations: self.log.debug("No repre found") return @@ -1593,14 +1591,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - query = { - "_id": ObjectId(representation_id) - } - - representation = self.connection.database[collection].find_one(query) - if not representation: + representations = get_representations(collection, [representation_id]) + if not representations: raise ValueError("Representation {} not found in {}". format(representation_id, collection)) + representation = representations[0] if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") @@ -1808,18 +1803,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - query = { - "_id": ObjectId(representation_id) - } - - representation = list( - self.connection.database[collection].find(query)) - if not representation: + representations = list(get_representations(collection, + [representation_id], + fields=["files"])) + if not representations: self.log.debug("No repre {} found".format( representation_id)) return - representation = representation.pop() + representation = representations.pop() local_file_path = '' for file in representation.get("files"): local_file_path = self.get_local_file_path(collection, From c65dd9747f5197868a9153fc109915ed654122ab Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:15:13 +0200 Subject: [PATCH 0136/2550] added new method 'get_representations' to get representations from placeholder --- .../workfile/abstract_template_loader.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 00bc8f15a7..0a422f5cca 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -456,8 +456,25 @@ class AbstractPlaceholder: @abstractmethod def clean(self): - """Clean placeholder from hierarchy after loading assets. + """Clean placeholder from hierarchy after loading assets.""" + + pass + + @abstractmethod + def get_representations(self, current_asset, linked_assets): + """Query representations based on placeholder data. + + Args: + current_asset (str): Name of current + context asset. + linked_assets (List[str]): Names of assets + linked to current context asset. + + Returns: + Iterable[Dict[str, Any]]: Representations that are matching + placeholder filters. """ + pass @abstractmethod From da8e25f4a1b7ea89bf9c7cac62c8a3ea10fbb9e6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:16:23 +0200 Subject: [PATCH 0137/2550] use 'get_representations' instead of 'convert_to_db_filters' --- .../workfile/abstract_template_loader.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 0a422f5cca..a2505c061e 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -280,19 +280,16 @@ class AbstractTemplateLoader: def get_placeholder_representations( self, placeholder, current_asset, linked_assets ): - # TODO This approach must be changed. Placeholders should return - # already prepared data and not query them here. - # - this is impossible to handle using query functions - placeholder_db_filters = placeholder.convert_to_db_filters( + placeholder_representations = placeholder.get_representations( current_asset, - linked_assets) - # get representation by assets - for db_filter in placeholder_db_filters: - placeholder_representations = list(legacy_io.find(db_filter)) - for representation in reduce(update_representations, - placeholder_representations, - dict()).values(): - yield representation + linked_assets + ) + for repre_doc in reduce( + update_representations, + placeholder_representations, + dict() + ).values(): + yield repre_doc def load_data_is_incorrect( self, placeholder, last_representation, ignored_ids): From c944ae35c9848045cfb73ccfc1b93f30f7af2989 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:17:03 +0200 Subject: [PATCH 0138/2550] OP-3405 - replaced find with get_representation_by_id --- openpype/modules/sync_server/tray/models.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index 6d1e85c17a..a97797c920 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -11,6 +11,7 @@ from openpype.tools.utils.delegates import pretty_timestamp from openpype.lib import PypeLogger from openpype.api import get_local_site_id +from openpype.client import get_representation_by_id from . import lib @@ -919,8 +920,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): repre_id = self.data(index, Qt.UserRole) - representation = list(self.dbcon.find({"type": "representation", - "_id": repre_id})) + representation = get_representation_by_id(self.project, repre_id) if representation: self.sync_server.update_db(self.project, None, None, representation.pop(), @@ -1357,11 +1357,10 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel): file_id = self.data(index, Qt.UserRole) updated_file = None - # conversion from cursor to list - representations = list(self.dbcon.find({"type": "representation", - "_id": self._id})) + representation = get_representation_by_id(self.project, self._id) + if not representation: + return - representation = representations.pop() for repre_file in representation["files"]: if repre_file["_id"] == file_id: updated_file = repre_file From 0e0cec5e0146a3001a4a349360324346fd0ab961 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:19:47 +0200 Subject: [PATCH 0139/2550] pass asset documents instead of just names --- .../workfile/abstract_template_loader.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index a2505c061e..96012eba36 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -223,10 +223,10 @@ class AbstractTemplateLoader: Returns: None """ + loaders_by_name = self.loaders_by_name - current_asset = self.current_asset - linked_assets = [asset['name'] for asset - in get_linked_assets(self.current_asset_doc)] + current_asset_doc = self.current_asset_doc + linked_assets = get_linked_assets(current_asset_doc) ignored_ids = ignored_ids or [] placeholders = self.get_placeholders() @@ -239,7 +239,7 @@ class AbstractTemplateLoader: )) placeholder_representations = self.get_placeholder_representations( placeholder, - current_asset, + current_asset_doc, linked_assets ) @@ -278,11 +278,11 @@ class AbstractTemplateLoader: self.postload(placeholder) def get_placeholder_representations( - self, placeholder, current_asset, linked_assets + self, placeholder, current_asset_doc, linked_asset_docs ): placeholder_representations = placeholder.get_representations( - current_asset, - linked_assets + current_asset_doc, + linked_asset_docs ) for repre_doc in reduce( update_representations, @@ -458,13 +458,13 @@ class AbstractPlaceholder: pass @abstractmethod - def get_representations(self, current_asset, linked_assets): + def get_representations(self, current_asset_doc, linked_asset_docs): """Query representations based on placeholder data. Args: - current_asset (str): Name of current + current_asset_doc (Dict[str, Any]): Document of current context asset. - linked_assets (List[str]): Names of assets + linked_asset_docs (List[Dict[str, Any]]): Documents of assets linked to current context asset. Returns: From ef674857f85f360954b4d6e2c6f6c0c4acf3f711 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:29:34 +0200 Subject: [PATCH 0140/2550] implemented get_representations for maya placeholder --- openpype/hosts/maya/api/template_loader.py | 80 +++++++++++----------- 1 file changed, 41 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py index 6b225442e7..f553730186 100644 --- a/openpype/hosts/maya/api/template_loader.py +++ b/openpype/hosts/maya/api/template_loader.py @@ -1,5 +1,7 @@ +import re from maya import cmds +from openpype.client import get_representations from openpype.pipeline import legacy_io from openpype.pipeline.workfile.abstract_template_loader import ( AbstractPlaceholder, @@ -191,48 +193,48 @@ class MayaPlaceholder(AbstractPlaceholder): cmds.hide(node) cmds.setAttr(node + '.hiddenInOutliner', True) - def convert_to_db_filters(self, current_asset, linked_asset): - if self.data['builder_type'] == "context_asset": - return [ - { - "type": "representation", - "context.asset": { - "$eq": current_asset, - "$regex": self.data['asset'] - }, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - } - ] + def get_representations(self, current_asset_doc, linked_asset_docs): + project_name = legacy_io.active_project() - elif self.data['builder_type'] == "linked_asset": - return [ - { - "type": "representation", - "context.asset": { - "$eq": asset_name, - "$regex": self.data['asset'] - }, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - } for asset_name in linked_asset - ] + builder_type = self.data["builder_type"] + if builder_type == "context_asset": + context_filters = { + "asset": [current_asset_doc["name"]], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representations": [self.data["representation"]], + "family": [self.data["family"]] + } + + elif builder_type != "linked_asset": + context_filters = { + "asset": [re.compile(self.data["asset"])], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]] + } else: - return [ - { - "type": "representation", - "context.asset": {"$regex": self.data['asset']}, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - } - ] + asset_regex = re.compile(self.data["asset"]) + linked_asset_names = [] + for asset_doc in linked_asset_docs: + asset_name = asset_doc["name"] + if asset_regex.match(asset_name): + linked_asset_names.append(asset_name) + + context_filters = { + "asset": linked_asset_names, + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]], + } + + return list(get_representations( + project_name, + context_filters=context_filters + )) def err_message(self): return ( From a6406f72d36d8eb748404af8fc6e6d61c6c6b451 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:39:57 +0200 Subject: [PATCH 0141/2550] added logger to placeholder --- .../pipeline/workfile/abstract_template_loader.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 96012eba36..d934c50daf 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -397,8 +397,19 @@ class AbstractPlaceholder: optional_attributes = {} def __init__(self, node): + self._log = None self.get_data(node) + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + def __repr__(self): + return "< {} {} >".format(self.__class__.__name__, self.name) + + def order(self): """Get placeholder order. Order is used to sort them by priority @@ -436,9 +447,9 @@ class AbstractPlaceholder: Bool: True if every attributes are a key of data """ if set(self.attributes).issubset(self.data.keys()): - print("Valid placeholder : {}".format(self.data["node"])) + self.log.debug("Valid placeholder: {}".format(self.data["node"])) return True - print("Placeholder is not valid : {}".format(self.data["node"])) + self.log.info("Placeholder is not valid: {}".format(self.data["node"])) return False @abstractmethod From 292d071f442a494cabd2161512012b13e391a9f8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:39:59 +0200 Subject: [PATCH 0142/2550] OP-3405 - query is required for updates --- openpype/modules/sync_server/sync_server_module.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 81aff9368f..6a3dbf6095 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1611,6 +1611,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} + query = { + "_id": ObjectId(representation_id) + } + if file_id: # reset site for particular file self._reset_site_for_file(collection, query, elem, file_id, site_name) From 8b7531b97775d3facefde41682dd19e9dd3e11f6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:44:03 +0200 Subject: [PATCH 0143/2550] added helper attributes to placeholder so there is no need to access it's 'data' --- .../workfile/abstract_template_loader.py | 50 +++++++++++++------ 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index d934c50daf..5ecc154ea4 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -231,11 +231,11 @@ class AbstractTemplateLoader: ignored_ids = ignored_ids or [] placeholders = self.get_placeholders() self.log.debug("Placeholders found in template: {}".format( - [placeholder.data['node'] for placeholder in placeholders] + [placeholder.name] for placeholder in placeholders] )) for placeholder in placeholders: self.log.debug("Start to processing placeholder {}".format( - placeholder.data['node'] + placeholder.name )) placeholder_representations = self.get_placeholder_representations( placeholder, @@ -246,7 +246,7 @@ class AbstractTemplateLoader: if not placeholder_representations: self.log.info( "There's no representation for this placeholder: " - "{}".format(placeholder.data['node']) + "{}".format(placeholder.name) ) continue @@ -264,8 +264,8 @@ class AbstractTemplateLoader: "Loader arguments used : {}".format( representation['context']['asset'], representation['context']['subset'], - placeholder.loader, - placeholder.data['loader_args'])) + placeholder.loader_name, + placeholder.loader_args)) try: container = self.load( @@ -307,19 +307,22 @@ class AbstractTemplateLoader: def load(self, placeholder, loaders_by_name, last_representation): repre = get_representation_context(last_representation) return load_with_repre_context( - loaders_by_name[placeholder.loader], + loaders_by_name[placeholder.loader_name], repre, - options=parse_loader_args(placeholder.data['loader_args'])) + options=parse_loader_args(placeholder.loader_args)) def load_succeed(self, placeholder, container): placeholder.parent_in_hierarchy(container) def load_failed(self, placeholder, last_representation): - self.log.warning("Got error trying to load {}:{} with {}\n\n" - "{}".format(last_representation['context']['asset'], - last_representation['context']['subset'], - placeholder.loader, - traceback.format_exc())) + self.log.warning( + "Got error trying to load {}:{} with {}".format( + last_representation['context']['asset'], + last_representation['context']['subset'], + placeholder.loader_name + ), + exc_info=True + ) def postload(self, placeholder): placeholder.clean() @@ -398,6 +401,7 @@ class AbstractPlaceholder: def __init__(self, node): self._log = None + self._name = node self.get_data(node) @property @@ -409,6 +413,17 @@ class AbstractPlaceholder: def __repr__(self): return "< {} {} >".format(self.__class__.__name__, self.name) + @property + def name(self): + return self._name + + @property + def loader_args(self): + return self.data["loader_args"] + + @property + def builder_type(self): + return self.data["builder_type"] def order(self): """Get placeholder order. @@ -423,12 +438,15 @@ class AbstractPlaceholder: return self.data.get('order') @property - def loader(self): - """Return placeholder loader type + def loader_name(self): + """Return placeholder loader type. + Returns: - string: Loader name + str: Loader name that will be used to load placeholder + representations. """ - return self.data.get('loader') + + return self.data["loader"] @property def is_context(self): From 2d7910a26410936f1d23282b9011780cccfc8680 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:46:14 +0200 Subject: [PATCH 0144/2550] renamed 'attributes' to 'required_keys' and 'optional_attributes' to 'optional_keys' --- openpype/hosts/maya/api/template_loader.py | 8 +++-- .../workfile/abstract_template_loader.py | 35 ++++++++++++------- 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py index f553730186..ecffafc93d 100644 --- a/openpype/hosts/maya/api/template_loader.py +++ b/openpype/hosts/maya/api/template_loader.py @@ -98,11 +98,11 @@ class MayaPlaceholder(AbstractPlaceholder): """Concrete implementation of AbstractPlaceholder for maya """ - optional_attributes = {'asset', 'subset', 'hierarchy'} + optional_keys = {'asset', 'subset', 'hierarchy'} def get_data(self, node): user_data = dict() - for attr in self.attributes.union(self.optional_attributes): + for attr in self.required_keys.union(self.optional_keys): attribute_name = '{}.{}'.format(node, attr) if not cmds.attributeQuery(attr, node=node, exists=True): print("{} not found".format(attribute_name)) @@ -112,7 +112,9 @@ class MayaPlaceholder(AbstractPlaceholder): asString=True) user_data['parent'] = ( cmds.getAttr(node + '.parent', asString=True) - or node.rpartition('|')[0] or "") + or node.rpartition('|')[0] + or "" + ) user_data['node'] = node if user_data['parent']: siblings = cmds.listRelatives(user_data['parent'], children=True) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 5ecc154ea4..56fb31fa0c 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -377,15 +377,17 @@ class AbstractTemplateLoader: @six.add_metaclass(ABCMeta) class AbstractPlaceholder: - """Abstraction of placeholders logic + """Abstraction of placeholders logic. + Properties: - attributes: A list of mandatory attribute to decribe placeholder + required_keys: A list of mandatory keys to decribe placeholder and assets to load. - optional_attributes: A list of optional attribute to decribe + optional_keys: A list of optional keys to decribe placeholder and assets to load loader: Name of linked loader to use while loading assets is_context: Is placeholder linked to context asset (or to linked assets) + Methods: is_repres_valid: loader: @@ -395,9 +397,15 @@ class AbstractPlaceholder: parent_in_hierachy: """ - attributes = {'builder_type', 'family', 'representation', - 'order', 'loader', 'loader_args'} - optional_attributes = {} + required_keys = { + "builder_type", + "family", + "representation", + "order", + "loader", + "loader_args" + } + optional_keys = {} def __init__(self, node): self._log = None @@ -459,15 +467,18 @@ class AbstractPlaceholder: return self.data.get('builder_type') == 'context_asset' def is_valid(self): - """Test validity of placeholder - i.e.: every attributes exists in placeholder data + """Test validity of placeholder. + + i.e.: every required key exists in placeholder data + Returns: - Bool: True if every attributes are a key of data + bool: True if every key is in data """ - if set(self.attributes).issubset(self.data.keys()): - self.log.debug("Valid placeholder: {}".format(self.data["node"])) + + if set(self.required_keys).issubset(self.data.keys()): + self.log.debug("Valid placeholder : {}".format(self.name)) return True - self.log.info("Placeholder is not valid: {}".format(self.data["node"])) + self.log.info("Placeholder is not valid : {}".format(self.name)) return False @abstractmethod From 736123d1c2496df1604d1b0c84df5a2646cc51f9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 17:48:47 +0200 Subject: [PATCH 0145/2550] modified 'is_context' property --- .../pipeline/workfile/abstract_template_loader.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 56fb31fa0c..a1d188ea6c 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -458,14 +458,22 @@ class AbstractPlaceholder: @property def is_context(self): - """Return placeholder type + """Check if is placeholder context type. + context_asset: For loading current asset linked_asset: For loading linked assets + + Question: + There seems to be more build options and this property is not used, + should be removed? + Returns: bool: true if placeholder is a context placeholder """ - return self.data.get('builder_type') == 'context_asset' + return self.builder_type == "context_asset" + + @property def is_valid(self): """Test validity of placeholder. From 0f5ec0f0c4cbd4db8c4968db75f6375b6bdf7f59 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 29 Jul 2022 17:54:51 +0200 Subject: [PATCH 0146/2550] OP-3405 - used get_representation_by_id --- .../modules/sync_server/sync_server_module.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 6a3dbf6095..71e35c7839 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -25,7 +25,7 @@ from .providers import lib from .utils import time_function, SyncStatus, SiteAlreadyPresentError -from openpype.client import get_representations +from openpype.client import get_representations, get_representation_by_id log = PypeLogger.get_logger("SyncServer") @@ -1591,11 +1591,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - representations = get_representations(collection, [representation_id]) - if not representations: + representation = get_representation_by_id(collection, + representation_id) + if not representation: raise ValueError("Representation {} not found in {}". format(representation_id, collection)) - representation = representations[0] + if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") @@ -1807,15 +1808,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - representations = list(get_representations(collection, - [representation_id], - fields=["files"])) - if not representations: + representation = get_representation_by_id(collection, + representation_id, + fields=["files"]) + if not representation: self.log.debug("No repre {} found".format( representation_id)) return - representation = representations.pop() local_file_path = '' for file in representation.get("files"): local_file_path = self.get_local_file_path(collection, From ccdff822a54c6bf146ad1a8a9b2206c319967719 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 18:11:11 +0200 Subject: [PATCH 0147/2550] moved 'get_project_database' and 'get_project_connection' to mongo --- openpype/client/entities.py | 30 ++---------------------------- openpype/client/mongo.py | 25 +++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 28 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index dd5d831ecf..0e94b99ae6 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,38 +6,12 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ -import os import collections import six from bson.objectid import ObjectId -from .mongo import OpenPypeMongoConnection - - -def _get_project_database(): - db_name = os.environ.get("AVALON_DB") or "avalon" - return OpenPypeMongoConnection.get_mongo_client()[db_name] - - -def get_project_connection(project_name): - """Direct access to mongo collection. - - We're trying to avoid using direct access to mongo. This should be used - only for Create, Update and Remove operations until there are implemented - api calls for that. - - Args: - project_name(str): Project name for which collection should be - returned. - - Returns: - pymongo.Collection: Collection realated to passed project. - """ - - if not project_name: - raise ValueError("Invalid project name {}".format(str(project_name))) - return _get_project_database()[project_name] +from .mongo import get_project_database, get_project_connection def _prepare_fields(fields, required_fields=None): @@ -72,7 +46,7 @@ def _convert_ids(in_ids): def get_projects(active=True, inactive=False, fields=None): - mongodb = _get_project_database() + mongodb = get_project_database() for project_name in mongodb.collection_names(): if project_name in ("system.indexes",): continue diff --git a/openpype/client/mongo.py b/openpype/client/mongo.py index a747250107..72acbc5476 100644 --- a/openpype/client/mongo.py +++ b/openpype/client/mongo.py @@ -208,3 +208,28 @@ class OpenPypeMongoConnection: mongo_url, time.time() - t1 )) return mongo_client + + +def get_project_database(): + db_name = os.environ.get("AVALON_DB") or "avalon" + return OpenPypeMongoConnection.get_mongo_client()[db_name] + + +def get_project_connection(project_name): + """Direct access to mongo collection. + + We're trying to avoid using direct access to mongo. This should be used + only for Create, Update and Remove operations until there are implemented + api calls for that. + + Args: + project_name(str): Project name for which collection should be + returned. + + Returns: + pymongo.Collection: Collection realated to passed project. + """ + + if not project_name: + raise ValueError("Invalid project name {}".format(str(project_name))) + return get_project_database()[project_name] From c429a41188c614570e9d1d39cd6605897fbfaf38 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 18:12:43 +0200 Subject: [PATCH 0148/2550] added initial variant of operations --- openpype/client/operations.py | 249 ++++++++++++++++++++++++++++++++++ 1 file changed, 249 insertions(+) create mode 100644 openpype/client/operations.py diff --git a/openpype/client/operations.py b/openpype/client/operations.py new file mode 100644 index 0000000000..365833b318 --- /dev/null +++ b/openpype/client/operations.py @@ -0,0 +1,249 @@ +import uuid +import copy +from abc import ABCMeta, abstractmethod + +import six +from bson.objectid import ObjectId +from pymongo import DeleteOne, InsertOne, UpdateOne + +from .mongo import get_project_connection + +REMOVED_VALUE = object() + + +@six.add_metaclass(ABCMeta) +class AbstractOperation(object): + """Base operation class.""" + + def __init__(self, entity_type): + self._entity_type = entity_type + self._id = uuid.uuid4() + + @property + def id(self): + return self._id + + @property + def entity_type(self): + return self._entity_type + + @abstractmethod + def to_mongo_operation(self): + pass + + +class CreateOperation(AbstractOperation): + def __init__(self, project_name, entity_type, data): + super(CreateOperation, self).__init__(entity_type) + + if not data: + data = {} + else: + data = copy.deepcopy(dict(data)) + + if "_id" not in data: + data["_id"] = ObjectId() + else: + data["_id"] = ObjectId(data["_id"]) + + self._entity_id = data["_id"] + self._data = data + + def __setitem__(self, key, value): + self.set_value(key, value) + + def __getitem__(self, key): + return self.data[key] + + def set_value(self, key, value): + self.data[key] = value + + def get(self, key, *args, **kwargs): + return self.data.get(key, *args, **kwargs) + + @property + def entity_id(self): + return self._entity_id + + @property + def data(self): + return self._data + + def to_mongo_operation(self): + return InsertOne(copy.deepcopy(self._data)) + + def to_data(self): + return { + "operation": "create", + "entity_type": self.entity_type, + "data": copy.deepcopy(self.data) + } + + +class UpdateOperation(AbstractOperation): + def __init__(self, project_name, entity_type, entity_id, update_fields): + super(CreateOperation, self).__init__(entity_type) + + self._entity_id = ObjectId(entity_id) + self._update_fields = update_fields + + @property + def entity_id(self): + return self._entity_id + + @property + def update_fields(self): + return self._update_fields + + def to_mongo_operation(self): + unset_data = {} + set_data = {} + for key, value in self._update_fields.items(): + if value is REMOVED_VALUE: + unset_data[key] = value + else: + set_data[key] = value + + op_data = {} + if unset_data: + op_data["$unset"] = unset_data + if set_data: + op_data["$set"] = set_data + + if not op_data: + return None + + return UpdateOne( + {"_id": self.entity_id}, + op_data + ) + + def to_data(self): + fields = {} + for key, value in self._update_fields.items(): + if value is REMOVED_VALUE: + value = None + fields[key] = value + + return { + "operation": "update", + "entity_type": self.entity_type, + "entity_id": str(self.entity_id), + "fields": fields + } + + +class DeleteOperation(AbstractOperation): + def __init__(self, entity_type, entity_id): + super(DeleteOperation, self).__init__(entity_type) + + self._entity_id = ObjectId(entity_id) + + @property + def entity_id(self): + return self._entity_id + + def to_mongo_operation(self): + return DeleteOne({"_id": self.entity_id}) + + def to_data(self): + return { + "operation": "delete", + "entity_type": self.entity_type, + "entity_id": str(self.entity_id) + } + + +class OperationsSession(object): + """Session storing operations that should happen in an order. + + At this moment does not handle anything special can be sonsidered as + stupid list of operations that will happen after each other. If creation + of same entity is there multiple times it's handled in any way and document + values are not validated. + + All operations must be related to single project. + + Args: + project_name (str): Project name to which are operations related. + """ + + def __init__(self, project_name): + self._project_name = project_name + self._operations = [] + + @property + def project_name(self): + return self._project_name + + def add(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + if not isinstance( + operation, + (CreateOperation, UpdateOperation, DeleteOperation) + ): + raise TypeError("Expected Operation object got {}".format( + str(type(operation)) + )) + + self._operations.append(operation) + + def append(self, operation): + """Add operation to be processed. + + Args: + operation (BaseOperation): Operation that should be processed. + """ + + self.add(operation) + + def extend(self, operations): + """Add operations to be processed. + + Args: + operations (List[BaseOperation]): Operations that should be + processed. + """ + + for operation in operations: + self.add(operation) + + def remove(self, operation): + """Remove operation.""" + + self._operations.remove(operation) + + def clear(self): + """Clear all registered operations.""" + + self._operations = [] + + def to_data(self): + return { + "project_name": self.project_name, + "operations": [ + operation.to_data() + for operation in self._operations + ] + } + + def commit(self): + """Commit session operations.""" + + operations, self._operations = self._operations, [] + if not operations: + return + + bulk_writes = [] + for operation in operations: + mongo_op = operation.to_mongo_operation() + if mongo_op is not None: + bulk_writes.append(mongo_op) + + if bulk_writes: + collection = get_project_connection(self.project_name) + collection.bulk_write(bulk_writes) From cc5abb15142a7c9d31d5602ba6434f9f534a670e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:20:47 +0200 Subject: [PATCH 0149/2550] few minor modifications and changes --- openpype/client/operations.py | 125 +++++++++++++++++++++------------- 1 file changed, 79 insertions(+), 46 deletions(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 365833b318..517a53c27f 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -15,9 +15,14 @@ REMOVED_VALUE = object() class AbstractOperation(object): """Base operation class.""" - def __init__(self, entity_type): + def __init__(self, project_name, entity_type): + self._project_name = project_name self._entity_type = entity_type - self._id = uuid.uuid4() + self._id = str(uuid.uuid4()) + + @property + def project_name(self): + return self._project_name @property def id(self): @@ -27,14 +32,28 @@ class AbstractOperation(object): def entity_type(self): return self._entity_type + @abstractproperty + def operation_name(self): + pass + @abstractmethod def to_mongo_operation(self): pass + def to_data(self): + return { + "id": self._id, + "entity_type": self.entity_type, + "project_name": self.project_name, + "operation": self.operation_name + } + class CreateOperation(AbstractOperation): + operation_name = "create" + def __init__(self, project_name, entity_type, data): - super(CreateOperation, self).__init__(entity_type) + super(CreateOperation, self).__init__(project_name, entity_type) if not data: data = {} @@ -73,32 +92,32 @@ class CreateOperation(AbstractOperation): return InsertOne(copy.deepcopy(self._data)) def to_data(self): - return { - "operation": "create", - "entity_type": self.entity_type, - "data": copy.deepcopy(self.data) - } + output = super(CreateOperation, self).to_data() + output["data"] = copy.deepcopy(self.data) + return output class UpdateOperation(AbstractOperation): - def __init__(self, project_name, entity_type, entity_id, update_fields): - super(CreateOperation, self).__init__(entity_type) + operation_name = "update" + + def __init__(self, project_name, entity_type, entity_id, update_data): + super(UpdateOperation, self).__init__(project_name, entity_type) self._entity_id = ObjectId(entity_id) - self._update_fields = update_fields + self._update_data = update_data @property def entity_id(self): return self._entity_id @property - def update_fields(self): - return self._update_fields + def update_data(self): + return self._update_data def to_mongo_operation(self): unset_data = {} set_data = {} - for key, value in self._update_fields.items(): + for key, value in self._update_data.items(): if value is REMOVED_VALUE: unset_data[key] = value else: @@ -120,22 +139,24 @@ class UpdateOperation(AbstractOperation): def to_data(self): fields = {} - for key, value in self._update_fields.items(): + for key, value in self._update_data.items(): if value is REMOVED_VALUE: value = None fields[key] = value - return { - "operation": "update", - "entity_type": self.entity_type, + output = super(UpdateOperation, self).to_data() + output.update({ "entity_id": str(self.entity_id), "fields": fields - } + }) + return output class DeleteOperation(AbstractOperation): - def __init__(self, entity_type, entity_id): - super(DeleteOperation, self).__init__(entity_type) + operation_name = "delete" + + def __init__(self, project_name, entity_type, entity_id): + super(DeleteOperation, self).__init__(project_name, entity_type) self._entity_id = ObjectId(entity_id) @@ -147,11 +168,9 @@ class DeleteOperation(AbstractOperation): return DeleteOne({"_id": self.entity_id}) def to_data(self): - return { - "operation": "delete", - "entity_type": self.entity_type, - "entity_id": str(self.entity_id) - } + output = super(DeleteOperation, self).to_data() + output["entity_id"] = self.entity_id + return output class OperationsSession(object): @@ -168,14 +187,9 @@ class OperationsSession(object): project_name (str): Project name to which are operations related. """ - def __init__(self, project_name): - self._project_name = project_name + def __init__(self): self._operations = [] - @property - def project_name(self): - return self._project_name - def add(self, operation): """Add operation to be processed. @@ -223,13 +237,10 @@ class OperationsSession(object): self._operations = [] def to_data(self): - return { - "project_name": self.project_name, - "operations": [ - operation.to_data() - for operation in self._operations - ] - } + return [ + operation.to_data() + for operation in self._operations + ] def commit(self): """Commit session operations.""" @@ -238,12 +249,34 @@ class OperationsSession(object): if not operations: return - bulk_writes = [] + operations_by_project = collections.defaultdict(list) for operation in operations: - mongo_op = operation.to_mongo_operation() - if mongo_op is not None: - bulk_writes.append(mongo_op) + operations_by_project[operation.project_name].append(operation) - if bulk_writes: - collection = get_project_connection(self.project_name) - collection.bulk_write(bulk_writes) + for project_name, operations in operations_by_project.items(): + bulk_writes = [] + for operation in operations: + mongo_op = operation.to_mongo_operation() + if mongo_op is not None: + bulk_writes.append(mongo_op) + + if bulk_writes: + collection = get_project_connection(project_name) + collection.bulk_write(bulk_writes) + + def create_entity(self, project_name, entity_type, data): + operation = CreateOperation(project_name, entity_type, data) + self.add(operation) + return operation + + def update_entity(self, project_name, entity_type, entity_id, update_data): + operation = UpdateOperation( + project_name, entity_type, entity_id, update_data + ) + self.add(operation) + return operation + + def delete_entity(self, project_name, entity_type, entity_id): + operation = DeleteOperation(project_name, entity_type, entity_id) + self.add(operation) + return operation From f39623d99138bee79021e87f476c7abca14e1bb2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:22:14 +0200 Subject: [PATCH 0150/2550] added helper functions to create new documents --- openpype/client/operations.py | 126 +++++++++++++++++++++++++++++++++- 1 file changed, 125 insertions(+), 1 deletion(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 517a53c27f..db3071abef 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -1,6 +1,8 @@ +import re import uuid import copy -from abc import ABCMeta, abstractmethod +import collections +from abc import ABCMeta, abstractmethod, abstractproperty import six from bson.objectid import ObjectId @@ -10,6 +12,128 @@ from .mongo import get_project_connection REMOVED_VALUE = object() +CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" +CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" +CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" +CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" +CURRENT_VERSION_SCHEMA = "openpype:version-3.0" +CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" + + +def _create_or_convert_to_mongo_id(mongo_id): + if mongo_id is None: + return ObjectId() + return ObjectId(mongo_id) + + +def new_project_document( + project_name, project_code, config, data=None, entity_id=None +): + if data is None: + data = {} + + data["code"] = project_code + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "name": project_name, + "type": CURRENT_PROJECT_SCHEMA, + "data": data, + "config": config + } + + +def new_asset_document( + name, project_id, parent_id, parents, data=None, entity_id=None +): + if data is None: + data = {} + if parent_id is not None: + parent_id = ObjectId(parent_id) + data["visualParent"] = parent_id + data["parents"] = parents + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "asset", + "name": name, + "parent": ObjectId(project_id), + "data": data, + "schema": CURRENT_ASSET_DOC_SCHEMA + } + + +def new_subset_document(name, family, asset_id, data=None, entity_id=None): + if data is None: + data = {} + data["family"] = family + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_SUBSET_SCHEMA, + "type": "subset", + "name": name, + "data": data, + "parent": asset_id + } + + +def new_version_doc(version, subset_id, data=None, entity_id=None): + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_VERSION_SCHEMA, + "type": "version", + "name": int(version), + "parent": subset_id, + "data": data + } + + +def new_representation_doc( + name, version_id, context, data=None, entity_id=None +): + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_REPRESENTATION_SCHEMA, + "type": "representation", + "parent": version_id, + "name": name, + "data": data, + + # Imprint shortcut to context for performance reasons. + "context": context + } + + +def _prepare_update_data(old_doc, new_doc, replace): + changes = {} + for key, value in new_doc.items(): + if key not in old_doc or value != old_doc[key]: + changes[key] = value + + if replace: + for key in old_doc.keys(): + if key not in new_doc: + changes[key] = REMOVED_VALUE + return changes + + +def prepare_subset_update_data(old_doc, new_doc, replace=True): + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_version_update_data(old_doc, new_doc, replace=True): + return _prepare_update_data(old_doc, new_doc, replace) + + +def prepare_representation_update_data(old_doc, new_doc, replace=True): + return _prepare_update_data(old_doc, new_doc, replace) + @six.add_metaclass(ABCMeta) class AbstractOperation(object): From 8b482a0a1f88f7c9931b8ce4f5ad08096c7f896a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Jul 2022 19:22:54 +0200 Subject: [PATCH 0151/2550] update oprations in integrator --- openpype/plugins/publish/integrate.py | 176 ++++++++++++++------------ 1 file changed, 98 insertions(+), 78 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index d817595888..b7d48fe9cf 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -5,8 +5,16 @@ import copy import clique import six +from openpype.client.operations import ( + OperationsSession, + new_subset_document, + new_version_doc, + new_representation_doc, + prepare_subset_update_data, + prepare_version_update_data, + prepare_representation_update_data, +) from bson.objectid import ObjectId -from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne import pyblish.api from openpype.client import ( @@ -282,9 +290,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_name = self.get_template_name(instance) - subset, subset_writes = self.prepare_subset(instance, project_name) - version, version_writes = self.prepare_version( - instance, subset, project_name + op_session = OperationsSession() + subset = self.prepare_subset( + instance, op_session, project_name + ) + version = self.prepare_version( + instance, op_session, subset, project_name ) instance.data["versionEntity"] = version @@ -334,7 +345,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Transaction to reduce the chances of another publish trying to # publish to the same version number since that chance can greatly # increase if the file transaction takes a long time. - legacy_io.bulk_write(subset_writes + version_writes) + op_session.commit() + self.log.info("Subset {subset[name]} and Version {version[name]} " "written to database..".format(subset=subset, version=version)) @@ -366,49 +378,49 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Finalize the representations now the published files are integrated # Get 'files' info for representations and its attached resources - representation_writes = [] new_repre_names_low = set() for prepared in prepared_representations: - representation = prepared["representation"] + repre_doc = prepared["representation"] + repre_update_data = prepared["repre_doc_update_data"] transfers = prepared["transfers"] destinations = [dst for src, dst in transfers] - representation["files"] = self.get_files_info( + repre_doc["files"] = self.get_files_info( destinations, sites=sites, anatomy=anatomy ) # Add the version resource file infos to each representation - representation["files"] += resource_file_infos + repre_doc["files"] += resource_file_infos # Set up representation for writing to the database. Since # we *might* be overwriting an existing entry if the version # already existed we'll use ReplaceOnce with `upsert=True` - representation_writes.append(ReplaceOne( - filter={"_id": representation["_id"]}, - replacement=representation, - upsert=True - )) + if repre_update_data is None: + op_session.create_entity( + project_name, repre_doc["type"], repre_doc + ) + else: + op_session.update_entity( + project_name, + repre_doc["type"], + repre_doc["_id"], + repre_update_data + ) - new_repre_names_low.add(representation["name"].lower()) + new_repre_names_low.add(repre_doc["name"].lower()) # Delete any existing representations that didn't get any new data # if the instance is not set to append mode if not instance.data.get("append", False): - delete_names = set() for name, existing_repres in existing_repres_by_name.items(): if name not in new_repre_names_low: # We add the exact representation name because `name` is # lowercase for name matching only and not in the database - delete_names.add(existing_repres["name"]) - if delete_names: - representation_writes.append(DeleteMany( - filter={ - "parent": version["_id"], - "name": {"$in": list(delete_names)} - } - )) + op_session.delete_entity( + project_name, "representation", existing_repres["_id"] + ) - # Write representations to the database - legacy_io.bulk_write(representation_writes) + self.log.debug("{}".format(op_session.to_data())) + op_session.commit() # Backwards compatibility # todo: can we avoid the need to store this? @@ -419,13 +431,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.info("Registered {} representations" "".format(len(prepared_representations))) - def prepare_subset(self, instance, project_name): + def prepare_subset(self, instance, op_session, project_name): asset_doc = instance.data["assetEntity"] subset_name = instance.data["subset"] + family = instance.data["family"] self.log.debug("Subset: {}".format(subset_name)) # Get existing subset if it exists - subset_doc = get_subset_by_name( + existing_subset_doc = get_subset_by_name( project_name, subset_name, asset_doc["_id"] ) @@ -438,69 +451,79 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if subset_group: data["subsetGroup"] = subset_group - bulk_writes = [] - if subset_doc is None: + subset_id = None + if existing_subset_doc: + subset_id = existing_subset_doc["_id"] + subset_doc = new_subset_document( + subset_name, family, asset_doc["_id"], data, subset_id + ) + + if existing_subset_doc is None: # Create a new subset self.log.info("Subset '%s' not found, creating ..." % subset_name) - subset_doc = { - "_id": ObjectId(), - "schema": "openpype:subset-3.0", - "type": "subset", - "name": subset_name, - "data": data, - "parent": asset_doc["_id"] - } - bulk_writes.append(InsertOne(subset_doc)) + op_session.create_entity( + project_name, subset_doc["type"], subset_doc + ) else: # Update existing subset data with new data and set in database. # We also change the found subset in-place so we don't need to # re-query the subset afterwards subset_doc["data"].update(data) - bulk_writes.append(UpdateOne( - {"type": "subset", "_id": subset_doc["_id"]}, - {"$set": { - "data": subset_doc["data"] - }} - )) + update_data = prepare_subset_update_data( + existing_subset_doc, subset_doc + ) + op_session.update_entity( + project_name, + subset_doc["type"], + subset_doc["_id"], + update_data + ) self.log.info("Prepared subset: {}".format(subset_name)) - return subset_doc, bulk_writes + return subset_doc - def prepare_version(self, instance, subset_doc, project_name): + def prepare_version(self, instance, op_session, subset_doc, project_name): version_number = instance.data["version"] - version_doc = { - "schema": "openpype:version-3.0", - "type": "version", - "parent": subset_doc["_id"], - "name": version_number, - "data": self.create_version_data(instance) - } - existing_version = get_version_by_name( project_name, version_number, subset_doc["_id"], fields=["_id"] ) + version_id = None + if existing_version: + version_id = existing_version["_id"] + + version_data = self.create_version_data(instance) + version_doc = new_version_doc( + version_number, + subset_doc["_id"], + version_data, + version_id + ) if existing_version: self.log.debug("Updating existing version ...") - version_doc["_id"] = existing_version["_id"] + update_data = prepare_version_update_data( + existing_version, version_doc + ) + op_session.update_entity( + project_name, + version_doc["type"], + version_doc["_id"], + update_data + ) else: self.log.debug("Creating new version ...") - version_doc["_id"] = ObjectId() - - bulk_writes = [ReplaceOne( - filter={"_id": version_doc["_id"]}, - replacement=version_doc, - upsert=True - )] + op_session.create_entity( + project_name, version_doc["type"], version_doc + ) self.log.info("Prepared version: v{0:03d}".format(version_doc["name"])) - return version_doc, bulk_writes + return version_doc def prepare_representation(self, repre, template_name, @@ -696,10 +719,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Use previous representation's id if there is a name match existing = existing_repres_by_name.get(repre["name"].lower()) + repre_id = None if existing: repre_id = existing["_id"] - else: - repre_id = ObjectId() # Store first transferred destination as published path data # - used primarily for reviews that are integrated to custom modules @@ -713,20 +735,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # and the actual representation entity for the database data = repre.get("data", {}) data.update({"path": published_path, "template": template}) - representation = { - "_id": repre_id, - "schema": "openpype:representation-2.0", - "type": "representation", - "parent": version["_id"], - "name": repre["name"], - "data": data, - - # Imprint shortcut to context for performance reasons. - "context": repre_context - } + repre_doc = new_representation_doc( + repre["name"], version["_id"], repre_context, data, repre_id + ) + update_data = None + if repre_id is not None: + update_data = prepare_representation_update_data( + existing, repre_doc + ) return { - "representation": representation, + "representation": repre_doc, + "repre_doc_update_data": update_data, "anatomy_data": template_data, "transfers": transfers, # todo: avoid the need for 'published_files' used by Integrate Hero From 23866fee29fd3eded8a9c6c5e82442f20ca5a596 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 12:43:27 +0200 Subject: [PATCH 0152/2550] added some docstrings --- openpype/client/operations.py | 180 +++++++++++++++++++++++++++++++++- 1 file changed, 179 insertions(+), 1 deletion(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index db3071abef..908566fca6 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -29,6 +29,24 @@ def _create_or_convert_to_mongo_id(mongo_id): def new_project_document( project_name, project_code, config, data=None, entity_id=None ): + """Create skeleton data of project document. + + Args: + project_name (str): Name of project. Used as identifier of a project. + project_code (str): Shorter version of projet without spaces and + special characters (in most of cases). Should be also considered + as unique name across projects. + config (Dic[str, Any]): Project config consist of roots, templates, + applications and other project Anatomy related data. + data (Dict[str, Any]): Project data with information about it's + attributes (e.g. 'fps' etc.) or integration specific keys. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of project document. + """ + if data is None: data = {} @@ -46,6 +64,22 @@ def new_project_document( def new_asset_document( name, project_id, parent_id, parents, data=None, entity_id=None ): + """Create skeleton data of asset document. + + Args: + name (str): Is considered as unique identifier of asset in project. + project_id (Union[str, ObjectId]): Id of project doument. + parent_id (Union[str, ObjectId]): Id of parent asset. + parents (List[str]): List of parent assets names. + data (Dict[str, Any]): Asset document data. Empty dictionary is used + if not passed. Value of 'parent_id' is used to fill 'visualParent'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of asset document. + """ + if data is None: data = {} if parent_id is not None: @@ -64,6 +98,21 @@ def new_asset_document( def new_subset_document(name, family, asset_id, data=None, entity_id=None): + """Create skeleton data of subset document. + + Args: + name (str): Is considered as unique identifier of subset under asset. + family (str): Subset's family. + asset_id (Union[str, ObjectId]): Id of parent asset. + data (Dict[str, Any]): Subset document data. Empty dictionary is used + if not passed. Value of 'family' is used to fill 'family'. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of subset document. + """ + if data is None: data = {} data["family"] = family @@ -78,6 +127,20 @@ def new_subset_document(name, family, asset_id, data=None, entity_id=None): def new_version_doc(version, subset_id, data=None, entity_id=None): + """Create skeleton data of version document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + subset_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + if data is None: data = {} @@ -94,6 +157,22 @@ def new_version_doc(version, subset_id, data=None, entity_id=None): def new_representation_doc( name, version_id, context, data=None, entity_id=None ): + """Create skeleton data of asset document. + + Args: + version (int): Is considered as unique identifier of version + under subset. + version_id (Union[str, ObjectId]): Id of parent version. + context (Dict[str, Any]): Representation context used for fill template + of to query. + data (Dict[str, Any]): Representation document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + if data is None: data = {} @@ -124,20 +203,59 @@ def _prepare_update_data(old_doc, new_doc, replace): def prepare_subset_update_data(old_doc, new_doc, replace=True): + """Compare two subset documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + return _prepare_update_data(old_doc, new_doc, replace) def prepare_version_update_data(old_doc, new_doc, replace=True): + """Compare two version documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + return _prepare_update_data(old_doc, new_doc, replace) def prepare_representation_update_data(old_doc, new_doc, replace=True): + """Compare two representation documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + return _prepare_update_data(old_doc, new_doc, replace) @six.add_metaclass(ABCMeta) class AbstractOperation(object): - """Base operation class.""" + """Base operation class. + + Opration represent a call into database. The call can create, change or + remove data. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + """ def __init__(self, project_name, entity_type): self._project_name = project_name @@ -150,6 +268,8 @@ class AbstractOperation(object): @property def id(self): + """Identifier of operation.""" + return self._id @property @@ -158,13 +278,23 @@ class AbstractOperation(object): @abstractproperty def operation_name(self): + """Stringified type of operation.""" + pass @abstractmethod def to_mongo_operation(self): + """Convert operation to Mongo batch operation.""" + pass def to_data(self): + """Convert opration to data that can be converted to json or others. + + Returns: + Dict[str, Any]: Description of operation. + """ + return { "id": self._id, "entity_type": self.entity_type, @@ -174,6 +304,15 @@ class AbstractOperation(object): class CreateOperation(AbstractOperation): + """Opeartion to create an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + data (Dict[str, Any]): Data of entity that will be created. + """ + operation_name = "create" def __init__(self, project_name, entity_type, data): @@ -222,6 +361,18 @@ class CreateOperation(AbstractOperation): class UpdateOperation(AbstractOperation): + """Opeartion to update an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Identifier of an entity. + update_data (Dict[str, Any]): Key -> value changes that will be set in + database. If value is set to 'REMOVED_VALUE' the key will be + removed. Only first level of dictionary is checked (on purpose). + """ + operation_name = "update" def __init__(self, project_name, entity_type, entity_id, update_data): @@ -277,6 +428,15 @@ class UpdateOperation(AbstractOperation): class DeleteOperation(AbstractOperation): + """Opeartion to delete an entity. + + Args: + project_name (str): On which project operation will happen. + entity_type (str): Type of entity on which change happens. + e.g. 'asset', 'representation' etc. + entity_id (Union[str, ObjectId]): Entity id that will be removed. + """ + operation_name = "delete" def __init__(self, project_name, entity_type, entity_id): @@ -389,11 +549,23 @@ class OperationsSession(object): collection.bulk_write(bulk_writes) def create_entity(self, project_name, entity_type, data): + """Fast access to 'CreateOperation'. + + Returns: + CreateOperation: Object of update operation. + """ + operation = CreateOperation(project_name, entity_type, data) self.add(operation) return operation def update_entity(self, project_name, entity_type, entity_id, update_data): + """Fast access to 'UpdateOperation'. + + Returns: + UpdateOperation: Object of update operation. + """ + operation = UpdateOperation( project_name, entity_type, entity_id, update_data ) @@ -401,6 +573,12 @@ class OperationsSession(object): return operation def delete_entity(self, project_name, entity_type, entity_id): + """Fast access to 'DeleteOperation'. + + Returns: + DeleteOperation: Object of delete operation. + """ + operation = DeleteOperation(project_name, entity_type, entity_id) self.add(operation) return operation From 7de3d76075937309b4e07da3c7383650ebdd5c0a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 12:46:44 +0200 Subject: [PATCH 0153/2550] removed unused import --- openpype/client/operations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 908566fca6..dfb1d8c4dd 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -1,4 +1,3 @@ -import re import uuid import copy import collections From 5f5aba7ae3a37ee27db59f4b651f7f85d1316a51 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 13:38:00 +0200 Subject: [PATCH 0154/2550] loader plugins can handle settings on their own --- openpype/pipeline/load/plugins.py | 49 +++++++++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index a30a2188a4..233aace035 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -1,6 +1,7 @@ +import os import logging -from openpype.lib import set_plugin_attributes_from_settings +from openpype.settings import get_system_settings, get_project_settings from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -37,6 +38,46 @@ class LoaderPlugin(list): def __init__(self, context): self.fname = self.filepath_from_context(context) + @classmethod + def apply_settings(cls, project_settings, system_settings): + host_name = os.environ.get("AVALON_APP") + plugin_type = "load" + plugin_type_settings = ( + project_settings + .get(host_name, {}) + .get(plugin_type, {}) + ) + global_type_settings = ( + project_settings + .get("global", {}) + .get(plugin_type, {}) + ) + if not global_type_settings and not plugin_type_settings: + return + + plugin_name = cls.__name__ + + plugin_settings = None + # Look for plugin settings in host specific settings + if plugin_name in plugin_type_settings: + plugin_settings = plugin_type_settings[plugin_name] + + # Look for plugin settings in global settings + elif plugin_name in global_type_settings: + plugin_settings = global_type_settings[plugin_name] + + if not plugin_settings: + return + + print(">>> We have preset for {}".format(plugin_name)) + for option, value in plugin_settings.items(): + if option == "enabled" and value is False: + setattr(cls, "active", False) + print(" - is disabled by preset") + else: + setattr(cls, option, value) + print(" - setting `{}`: `{}`".format(option, value)) + @classmethod def get_representations(cls): return cls.representations @@ -112,7 +153,11 @@ class SubsetLoaderPlugin(LoaderPlugin): def discover_loader_plugins(): plugins = discover(LoaderPlugin) - set_plugin_attributes_from_settings(plugins, LoaderPlugin) + project_name = os.environ.get("AVALON_PROJECT") + system_settings = get_system_settings() + project_settings = get_project_settings(project_name) + for plugin in plugins: + plugin.apply_settings(project_settings, system_settings) return plugins From b2d5146288a6b4c9ca9e0c3fc0adf339a902ec35 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 13:38:18 +0200 Subject: [PATCH 0155/2550] LegacyCreator plugin can handle settings on their own --- openpype/pipeline/create/creator_plugins.py | 13 ++++--- openpype/pipeline/create/legacy_create.py | 43 +++++++++++++++++++++ 2 files changed, 51 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 8cb161de20..4a1630d8ef 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -1,3 +1,4 @@ +import os import copy from abc import ( @@ -7,10 +8,8 @@ from abc import ( ) import six -from openpype.lib import ( - get_subset_name_with_asset_doc, - set_plugin_attributes_from_settings, -) +from openpype.settings import get_system_settings, get_project_settings +from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -439,7 +438,11 @@ def discover_creator_plugins(): def discover_legacy_creator_plugins(): plugins = discover(LegacyCreator) - set_plugin_attributes_from_settings(plugins, LegacyCreator) + project_name = os.environ.get("AVALON_PROJECT") + system_settings = get_system_settings() + project_settings = get_project_settings(project_name) + for plugin in plugins: + plugin.apply_settings(project_settings, system_settings) return plugins diff --git a/openpype/pipeline/create/legacy_create.py b/openpype/pipeline/create/legacy_create.py index 46e0e3d663..2764b3cb95 100644 --- a/openpype/pipeline/create/legacy_create.py +++ b/openpype/pipeline/create/legacy_create.py @@ -5,6 +5,7 @@ Renamed classes and functions - 'create' -> 'legacy_create' """ +import os import logging import collections @@ -37,6 +38,48 @@ class LegacyCreator(object): self.data.update(data or {}) + @classmethod + def apply_settings(cls, project_settings, system_settings): + """Apply OpenPype settings to a plugin class.""" + + host_name = os.environ.get("AVALON_APP") + plugin_type = "create" + plugin_type_settings = ( + project_settings + .get(host_name, {}) + .get(plugin_type, {}) + ) + global_type_settings = ( + project_settings + .get("global", {}) + .get(plugin_type, {}) + ) + if not global_type_settings and not plugin_type_settings: + return + + plugin_name = cls.__name__ + + plugin_settings = None + # Look for plugin settings in host specific settings + if plugin_name in plugin_type_settings: + plugin_settings = plugin_type_settings[plugin_name] + + # Look for plugin settings in global settings + elif plugin_name in global_type_settings: + plugin_settings = global_type_settings[plugin_name] + + if not plugin_settings: + return + + print(">>> We have preset for {}".format(plugin_name)) + for option, value in plugin_settings.items(): + if option == "enabled" and value is False: + setattr(cls, "active", False) + print(" - is disabled by preset") + else: + setattr(cls, option, value) + print(" - setting `{}`: `{}`".format(option, value)) + def process(self): pass From acb4b28b975c8e276602a32237de7ce37773342b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 14:14:33 +0200 Subject: [PATCH 0156/2550] moved filter pyblish plugins function to openpype.pipeline.publish.lib --- openpype/pipeline/context_tools.py | 2 +- openpype/pipeline/publish/lib.py | 93 ++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 0535ce5d54..c8c70e5ea8 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -18,8 +18,8 @@ from openpype.client import ( ) from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings -from openpype.lib import filter_pyblish_plugins +from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names from . import ( diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 739b2c8806..d5494cd8a4 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -6,6 +6,10 @@ import xml.etree.ElementTree import six import pyblish.plugin +import pyblish.api + +from openpype.lib import Logger +from openpype.settings import get_project_settings, get_system_settings class DiscoverResult: @@ -180,3 +184,92 @@ def publish_plugins_discover(paths=None): result.plugins = plugins return result + + +def filter_pyblish_plugins(plugins): + """Pyblish plugin filter which applies OpenPype settings. + + Apply OpenPype settings on discovered plugins. On plugin with implemented + class method 'def apply_settings(cls, project_settings, system_settings)' + is called the method. Default behavior looks for plugin name and current + host name to look for + + Args: + plugins (List[pyblish.plugin.Plugin]): Discovered plugins on which + are applied settings. + """ + + log = Logger.get_logger("filter_pyblish_plugins") + + # TODO: Don't use host from 'pyblish.api' but from defined host by us. + # - kept becau on farm is probably used host 'shell' which propably + # affect how settings are applied there + host = pyblish.api.current_host() + project_name = os.environ.get("AVALON_PROJECT") + + project_setting = get_project_settings(project_name) + system_settings = get_system_settings() + + # iterate over plugins + for plugin in plugins[:]: + if hasattr(plugin, "apply_settings"): + try: + # Use classmethod 'apply_settings' + # - can be used to target settings from custom settings place + # - skip default behavior when successful + plugin.apply_settings(project_setting, system_settings) + continue + + except Exception: + log.warning( + ( + "Failed to apply settings on plugin {}" + ).format(plugin.__name__), + exc_info=True + ) + + try: + config_data = ( + project_setting + [host] + ["publish"] + [plugin.__name__] + ) + except KeyError: + # host determined from path + file = os.path.normpath(inspect.getsourcefile(plugin)) + file = os.path.normpath(file) + + split_path = file.split(os.path.sep) + if len(split_path) < 4: + log.warning( + 'plugin path too short to extract host {}'.format(file) + ) + continue + + host_from_file = split_path[-4] + plugin_kind = split_path[-2] + + # TODO: change after all plugins are moved one level up + if host_from_file == "openpype": + host_from_file = "global" + + try: + config_data = ( + project_setting + [host_from_file] + [plugin_kind] + [plugin.__name__] + ) + except KeyError: + continue + + for option, value in config_data.items(): + if option == "enabled" and value is False: + log.info('removing plugin {}'.format(plugin.__name__)) + plugins.remove(plugin) + else: + log.info('setting {}:{} on plugin {}'.format( + option, value, plugin.__name__)) + + setattr(plugin, option, value) From cf42e8fa44bb61fe1d6f80f8e122b52fb8cc022b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 14:15:26 +0200 Subject: [PATCH 0157/2550] mark functions in openpype.lib as deprecated --- openpype/lib/plugin_tools.py | 101 +++++++++++++++++++---------------- 1 file changed, 54 insertions(+), 47 deletions(-) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 1d3c1eec6b..c94d1251fc 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -1,11 +1,13 @@ # -*- coding: utf-8 -*- """Avalon/Pyblish plugin tools.""" import os -import inspect import logging import re import json +import warnings +import functools + from openpype.client import get_asset_by_id from openpype.settings import get_project_settings @@ -17,6 +19,51 @@ log = logging.getLogger(__name__) DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" +class PluginToolsDeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", PluginToolsDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=PluginToolsDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper- + + if func is None: + return _decorator + return _decorator(func) + + class TaskNotSetError(KeyError): def __init__(self, msg=None): if not msg: @@ -197,6 +244,7 @@ def prepare_template_data(fill_pairs): return fill_data +@deprecated("openpype.pipeline.publish.lib.filter_pyblish_plugins") def filter_pyblish_plugins(plugins): """Filter pyblish plugins by presets. @@ -206,57 +254,14 @@ def filter_pyblish_plugins(plugins): Args: plugins (dict): Dictionary of plugins produced by :mod:`pyblish-base` `discover()` method. - """ - from pyblish import api - host = api.current_host() + from openpype.pipeline.publish.lib import filter_pyblish_plugins - presets = get_project_settings(os.environ['AVALON_PROJECT']) or {} - # skip if there are no presets to process - if not presets: - return - - # iterate over plugins - for plugin in plugins[:]: - - try: - config_data = presets[host]["publish"][plugin.__name__] - except KeyError: - # host determined from path - file = os.path.normpath(inspect.getsourcefile(plugin)) - file = os.path.normpath(file) - - split_path = file.split(os.path.sep) - if len(split_path) < 4: - log.warning( - 'plugin path too short to extract host {}'.format(file) - ) - continue - - host_from_file = split_path[-4] - plugin_kind = split_path[-2] - - # TODO: change after all plugins are moved one level up - if host_from_file == "openpype": - host_from_file = "global" - - try: - config_data = presets[host_from_file][plugin_kind][plugin.__name__] # noqa: E501 - except KeyError: - continue - - for option, value in config_data.items(): - if option == "enabled" and value is False: - log.info('removing plugin {}'.format(plugin.__name__)) - plugins.remove(plugin) - else: - log.info('setting {}:{} on plugin {}'.format( - option, value, plugin.__name__)) - - setattr(plugin, option, value) + filter_pyblish_plugins(plugins) +@deprecated def set_plugin_attributes_from_settings( plugins, superclass, host_name=None, project_name=None ): @@ -273,6 +278,8 @@ def set_plugin_attributes_from_settings( project_name (str): Name of project for which settings will be loaded. Value from environment `AVALON_PROJECT` is used if not entered. """ + + # Function is not used anymore from openpype.pipeline import LegacyCreator, LoaderPlugin # determine host application to use for finding presets From bb10fdd041c499f30e5ffa7dd4069828b9f42239 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:00:14 +0200 Subject: [PATCH 0158/2550] :rotating_light: f-strings and cosmetic issues --- igniter/bootstrap_repos.py | 10 +++---- start.py | 55 +++++++++++++++++--------------------- tools/create_zip.py | 2 +- 3 files changed, 29 insertions(+), 38 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 08333885c0..8888440f90 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -122,7 +122,7 @@ class OpenPypeVersion(semver.VersionInfo): if self.staging: if kwargs.get("build"): if "staging" not in kwargs.get("build"): - kwargs["build"] = "{}-staging".format(kwargs.get("build")) + kwargs["build"] = f"{kwargs.get('build')}-staging" else: kwargs["build"] = "staging" @@ -136,8 +136,7 @@ class OpenPypeVersion(semver.VersionInfo): return bool(result and self.staging == other.staging) def __repr__(self): - return "<{}: {} - path={}>".format( - self.__class__.__name__, str(self), self.path) + return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>" def __lt__(self, other: OpenPypeVersion): result = super().__lt__(other) @@ -232,10 +231,7 @@ class OpenPypeVersion(semver.VersionInfo): return openpype_version def __hash__(self): - if self.path: - return hash(self.path) - else: - return hash(str(self)) + return hash(self.path) if self.path else hash(str(self)) @staticmethod def is_version_in_dir( diff --git a/start.py b/start.py index cbf8ffd178..37cc4c063d 100644 --- a/start.py +++ b/start.py @@ -187,9 +187,8 @@ else: if "--headless" in sys.argv: os.environ["OPENPYPE_HEADLESS_MODE"] = "1" sys.argv.remove("--headless") -else: - if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": - os.environ.pop("OPENPYPE_HEADLESS_MODE", None) +elif os.getenv("OPENPYPE_HEADLESS_MODE") != "1": + os.environ.pop("OPENPYPE_HEADLESS_MODE", None) # Enabled logging debug mode when "--debug" is passed if "--verbose" in sys.argv: @@ -203,8 +202,8 @@ if "--verbose" in sys.argv: value = sys.argv.pop(idx) else: raise RuntimeError(( - "Expect value after \"--verbose\" argument. {}" - ).format(expected_values)) + f"Expect value after \"--verbose\" argument. {expected_values}" + )) log_level = None low_value = value.lower() @@ -225,8 +224,9 @@ if "--verbose" in sys.argv: if log_level is None: raise RuntimeError(( - "Unexpected value after \"--verbose\" argument \"{}\". {}" - ).format(value, expected_values)) + "Unexpected value after \"--verbose\" " + f"argument \"{value}\". {expected_values}" + )) os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) @@ -336,34 +336,33 @@ def run_disk_mapping_commands(settings): destination = destination.rstrip('/') source = source.rstrip('/') - if low_platform == "windows": - args = ["subst", destination, source] - elif low_platform == "darwin": - scr = "do shell script \"ln -s {} {}\" with administrator privileges".format(source, destination) # noqa: E501 + if low_platform == "darwin": + scr = f'do shell script "ln -s {source} {destination}" with administrator privileges' # noqa + args = ["osascript", "-e", scr] + elif low_platform == "windows": + args = ["subst", destination, source] else: args = ["sudo", "ln", "-s", source, destination] - _print("disk mapping args:: {}".format(args)) + _print(f"*** disk mapping arguments: {args}") try: if not os.path.exists(destination): output = subprocess.Popen(args) if output.returncode and output.returncode != 0: - exc_msg = "Executing was not successful: \"{}\"".format( - args) + exc_msg = f'Executing was not successful: "{args}"' raise RuntimeError(exc_msg) except TypeError as exc: - _print("Error {} in mapping drive {}, {}".format(str(exc), - source, - destination)) + _print( + f"Error {str(exc)} in mapping drive {source}, {destination}") raise def set_avalon_environments(): """Set avalon specific environments. - These are non modifiable environments for avalon workflow that must be set + These are non-modifiable environments for avalon workflow that must be set before avalon module is imported because avalon works with globals set with environment variables. """ @@ -508,7 +507,7 @@ def _process_arguments() -> tuple: ) if m and m.group('version'): use_version = m.group('version') - _print(">>> Requested version [ {} ]".format(use_version)) + _print(f">>> Requested version [ {use_version} ]") if "+staging" in use_version: use_staging = True break @@ -614,8 +613,8 @@ def _determine_mongodb() -> str: try: openpype_mongo = bootstrap.secure_registry.get_item( "openPypeMongo") - except ValueError: - raise RuntimeError("Missing MongoDB url") + except ValueError as e: + raise RuntimeError("Missing MongoDB url") from e return openpype_mongo @@ -816,11 +815,8 @@ def _bootstrap_from_code(use_version, use_staging): use_version, use_staging ) if version_to_use is None: - raise OpenPypeVersionNotFound( - "Requested version \"{}\" was not found.".format( - use_version - ) - ) + raise OpenPypeVersionIncompatible( + f"Requested version \"{use_version}\" was not found.") else: # Staging version should be used version_to_use = bootstrap.find_latest_openpype_version( @@ -906,7 +902,7 @@ def _boot_validate_versions(use_version, local_version): use_version, openpype_versions ) valid, message = bootstrap.validate_openpype_version(version_path) - _print("{}{}".format(">>> " if valid else "!!! ", message)) + _print(f'{">>> " if valid else "!!! "}{message}') def _boot_print_versions(use_staging, local_version, openpype_root): @@ -1043,7 +1039,7 @@ def boot(): if not result[0]: _print(f"!!! Invalid version: {result[1]}") sys.exit(1) - _print(f"--- version is valid") + _print("--- version is valid") else: try: version_path = _bootstrap_from_code(use_version, use_staging) @@ -1164,8 +1160,7 @@ def get_info(use_staging=None) -> list: formatted = [] for info in inf: padding = (maximum - len(info[0])) + 1 - formatted.append( - "... {}:{}[ {} ]".format(info[0], " " * padding, info[1])) + formatted.append(f'... {info[0]}:{" " * padding}[ {info[1]} ]') return formatted diff --git a/tools/create_zip.py b/tools/create_zip.py index 2fc351469a..6392428f58 100644 --- a/tools/create_zip.py +++ b/tools/create_zip.py @@ -61,7 +61,7 @@ def _print(msg: str, message_type: int = 0) -> None: else: header = term.darkolivegreen3("--- ") - print("{}{}".format(header, msg)) + print(f"{header}{msg}") if __name__ == "__main__": From a9f910ac227fd0f90a589ba9035d232c0c62e6ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:01:03 +0200 Subject: [PATCH 0159/2550] :recycle: add openpype version env var to deadline job --- .../deadline/plugins/publish/submit_aftereffects_deadline.py | 3 ++- .../deadline/plugins/publish/submit_harmony_deadline.py | 3 ++- .../deadline/plugins/publish/submit_houdini_remote_publish.py | 1 + .../deadline/plugins/publish/submit_houdini_render_deadline.py | 1 + .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 ++- .../plugins/publish/submit_maya_remote_publish_deadline.py | 3 ++- .../modules/deadline/plugins/publish/submit_nuke_deadline.py | 3 ++- .../modules/deadline/plugins/publish/submit_publish_job.py | 3 ++- 8 files changed, 14 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index de8df3dd9e..c55f85c8da 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -80,7 +80,8 @@ class AfterEffectsSubmitDeadline( "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index a1ee5e0957..3f9c09b592 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -274,7 +274,8 @@ class HarmonySubmitDeadline( "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index fdf67b51bc..95856137e2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -130,6 +130,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): # this application with so the Render Slave can build its own # similar environment using it, e.g. "houdini17.5;pluginx2.3" "AVALON_TOOLS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index aca88c7440..beda753723 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -101,6 +101,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): # this application with so the Render Slave can build its own # similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9" "AVALON_TOOLS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 145b6d795f..f253ceb21a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -525,7 +525,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 57572fcb24..9b1852392b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -102,7 +102,8 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin): keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", - "FTRACK_SERVER" + "FTRACK_SERVER", + "OPENPYPE_VERSION" ] environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 93fb511a34..a328c3633d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -261,7 +261,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "PYBLISHPLUGINPATH", "NUKE_PATH", "TOOL_ENV", - "FOUNDRY_LICENSE" + "FOUNDRY_LICENSE", + "OPENPYPE_VERSION" ] # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 43ea64e565..5c7998465b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -141,7 +141,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "OPENPYPE_USERNAME", "OPENPYPE_RENDER_JOB", "OPENPYPE_PUBLISH_JOB", - "OPENPYPE_MONGO" + "OPENPYPE_MONGO", + "OPENPYPE_VERSION" ] # custom deadline attributes From 0e126a2d829e814d39747b4073cac2fb2cbc7b45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:01:25 +0200 Subject: [PATCH 0160/2550] :recycle: handle multiple versions --- igniter/tools.py | 5 +++ openpype/cli.py | 23 ++++++++++++++ start.py | 83 ++++++++++++++++++++++++++++++++---------------- 3 files changed, 84 insertions(+), 27 deletions(-) diff --git a/igniter/tools.py b/igniter/tools.py index 57159b5e52..a9d592acf0 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -21,6 +21,11 @@ class OpenPypeVersionNotFound(Exception): pass +class OpenPypeVersionIncompatible(Exception): + """OpenPype version is not compatible with the installed one (build).""" + pass + + def should_add_certificate_path_to_mongo_url(mongo_url): """Check if should add ca certificate to mongo url. diff --git a/openpype/cli.py b/openpype/cli.py index 9a2dfaa141..ffe288040e 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -443,3 +443,26 @@ def interactive(): __version__, sys.version, sys.platform ) code.interact(banner) + + +@main.command() +@click.option("--build", help="Print only build version", + is_flag=True, default=False) +def version(build): + """Print OpenPype version.""" + + from openpype.version import __version__ + from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion + from pathlib import Path + import os + + if getattr(sys, 'frozen', False): + local_version = BootstrapRepos.get_version( + Path(os.getenv("OPENPYPE_ROOT"))) + else: + local_version = OpenPypeVersion.get_installed_version_str() + + if build: + print(local_version) + return + print(f"{__version__} (booted: {local_version})") diff --git a/start.py b/start.py index 37cc4c063d..5cdffafb6e 100644 --- a/start.py +++ b/start.py @@ -103,6 +103,9 @@ import site import distutils.spawn from pathlib import Path + +silent_mode = False + # OPENPYPE_ROOT is variable pointing to build (or code) directory # WARNING `OPENPYPE_ROOT` must be defined before igniter import # - igniter changes cwd which cause that filepath of this script won't lead @@ -138,40 +141,44 @@ if sys.__stdout__: term = blessed.Terminal() def _print(message: str): + if silent_mode: + return if message.startswith("!!! "): - print("{}{}".format(term.orangered2("!!! "), message[4:])) + print(f'{term.orangered2("!!! ")}{message[4:]}') return if message.startswith(">>> "): - print("{}{}".format(term.aquamarine3(">>> "), message[4:])) + print(f'{term.aquamarine3(">>> ")}{message[4:]}') return if message.startswith("--- "): - print("{}{}".format(term.darkolivegreen3("--- "), message[4:])) + print(f'{term.darkolivegreen3("--- ")}{message[4:]}') return if message.startswith("*** "): - print("{}{}".format(term.gold("*** "), message[4:])) + print(f'{term.gold("*** ")}{message[4:]}') return if message.startswith(" - "): - print("{}{}".format(term.wheat(" - "), message[4:])) + print(f'{term.wheat(" - ")}{message[4:]}') return if message.startswith(" . "): - print("{}{}".format(term.tan(" . "), message[4:])) + print(f'{term.tan(" . ")}{message[4:]}') return if message.startswith(" - "): - print("{}{}".format(term.seagreen3(" - "), message[7:])) + print(f'{term.seagreen3(" - ")}{message[7:]}') return if message.startswith(" ! "): - print("{}{}".format(term.goldenrod(" ! "), message[7:])) + print(f'{term.goldenrod(" ! ")}{message[7:]}') return if message.startswith(" * "): - print("{}{}".format(term.aquamarine1(" * "), message[7:])) + print(f'{term.aquamarine1(" * ")}{message[7:]}') return if message.startswith(" "): - print("{}{}".format(term.darkseagreen3(" "), message[4:])) + print(f'{term.darkseagreen3(" ")}{message[4:]}') return print(message) else: def _print(message: str): + if silent_mode: + return print(message) @@ -242,13 +249,14 @@ from igniter.tools import ( get_openpype_global_settings, get_openpype_path_from_settings, validate_mongo_connection, - OpenPypeVersionNotFound + OpenPypeVersionNotFound, + OpenPypeVersionIncompatible ) # noqa from igniter.bootstrap_repos import OpenPypeVersion # noqa: E402 bootstrap = BootstrapRepos() silent_commands = {"run", "igniter", "standalonepublisher", - "extractenvironments"} + "extractenvironments", "version"} def list_versions(openpype_versions: list, local_version=None) -> None: @@ -686,40 +694,47 @@ def _find_frozen_openpype(use_version: str = None, # Specific version is defined if use_version.lower() == "latest": # Version says to use latest version - _print("Finding latest version defined by use version") + _print(">>> Finding latest version defined by use version") openpype_version = bootstrap.find_latest_openpype_version( - use_staging + use_staging, compatible_with=installed_version ) else: - _print("Finding specified version \"{}\"".format(use_version)) + _print(f">>> Finding specified version \"{use_version}\"") openpype_version = bootstrap.find_openpype_version( use_version, use_staging ) if openpype_version is None: raise OpenPypeVersionNotFound( - "Requested version \"{}\" was not found.".format( - use_version - ) + f"Requested version \"{use_version}\" was not found." ) + if not openpype_version.is_compatible(installed_version): + raise OpenPypeVersionIncompatible(( + f"Requested version \"{use_version}\" is not compatible " + f"with installed version \"{installed_version}\"" + )) + elif studio_version is not None: # Studio has defined a version to use - _print("Finding studio version \"{}\"".format(studio_version)) + _print(f">>> Finding studio version \"{studio_version}\"") openpype_version = bootstrap.find_openpype_version( - studio_version, use_staging + studio_version, use_staging, compatible_with=installed_version ) if openpype_version is None: raise OpenPypeVersionNotFound(( - "Requested OpenPype version \"{}\" defined by settings" + "Requested OpenPype version " + f"\"{studio_version}\" defined by settings" " was not found." - ).format(studio_version)) + )) else: # Default behavior to use latest version - _print("Finding latest version") + _print(( + ">>> Finding latest version compatible " + f"with [ {installed_version} ]")) openpype_version = bootstrap.find_latest_openpype_version( - use_staging + use_staging, compatible_with=installed_version ) if openpype_version is None: if use_staging: @@ -800,7 +815,7 @@ def _bootstrap_from_code(use_version, use_staging): if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(_openpype_root)) - switch_str = f" - will switch to {use_version}" if use_version else "" + switch_str = f" - will switch to {use_version}" if use_version and use_version != local_version else "" # noqa _print(f" - booting version: {local_version}{switch_str}") assert local_version else: @@ -913,13 +928,24 @@ def _boot_print_versions(use_staging, local_version, openpype_root): _print("--- This will list only staging versions detected.") _print(" To see other version, omit --use-staging argument.") - openpype_versions = bootstrap.find_openpype(include_zips=True, - staging=use_staging) if getattr(sys, 'frozen', False): local_version = bootstrap.get_version(Path(openpype_root)) else: local_version = OpenPypeVersion.get_installed_version_str() + compatible_with = OpenPypeVersion(version=local_version) + if "--all" in sys.argv: + compatible_with = None + _print("--- Showing all version (even those not compatible).") + else: + _print(("--- Showing only compatible versions " + f"with [ {compatible_with.major}.{compatible_with.minor} ]")) + + openpype_versions = bootstrap.find_openpype( + include_zips=True, + staging=use_staging, + compatible_with=compatible_with) + list_versions(openpype_versions, local_version) @@ -936,6 +962,9 @@ def _boot_handle_missing_version(local_version, use_staging, message): def boot(): """Bootstrap OpenPype.""" + global silent_mode + if any(arg in silent_commands for arg in sys.argv): + silent_mode = True # ------------------------------------------------------------------------ # Set environment to OpenPype root path From 9205d4bde12baf8901a2ba675157cc0b4ad65919 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:02:24 +0200 Subject: [PATCH 0161/2550] :recycle: changes in bootstrapping for multiple versions --- igniter/bootstrap_repos.py | 196 ++++++++++++++++++++++++++++++------- 1 file changed, 158 insertions(+), 38 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 8888440f90..47f2525952 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -380,7 +380,8 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_local_versions( - cls, production: bool = None, staging: bool = None + cls, production: bool = None, + staging: bool = None, compatible_with: OpenPypeVersion = None ) -> List: """Get all versions available on this machine. @@ -390,6 +391,8 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. + compatible_with (OpenPypeVersion): Return only those compatible + with specified version. """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -406,10 +409,19 @@ class OpenPypeVersion(semver.VersionInfo): if not production and not staging: return [] + # DEPRECATED: backwards compatible way to look for versions in root dir_to_search = Path(user_data_dir("openpype", "pypeclub")) versions = OpenPypeVersion.get_versions_from_directory( - dir_to_search + dir_to_search, compatible_with=compatible_with ) + if compatible_with: + dir_to_search = Path( + user_data_dir("openpype", "pypeclub")) / f"{compatible_with.major}.{compatible_with.minor}" # noqa + versions += OpenPypeVersion.get_versions_from_directory( + dir_to_search, compatible_with=compatible_with + ) + + filtered_versions = [] for version in versions: if version.is_staging(): @@ -421,7 +433,8 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_remote_versions( - cls, production: bool = None, staging: bool = None + cls, production: bool = None, + staging: bool = None, compatible_with: OpenPypeVersion = None ) -> List: """Get all versions available in OpenPype Path. @@ -431,6 +444,8 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. + compatible_with (OpenPypeVersion): Return only those compatible + with specified version. """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -464,7 +479,14 @@ class OpenPypeVersion(semver.VersionInfo): if not dir_to_search: return [] - versions = cls.get_versions_from_directory(dir_to_search) + # DEPRECATED: look for version in root directory + versions = cls.get_versions_from_directory( + dir_to_search, compatible_with=compatible_with) + if compatible_with: + dir_to_search = dir_to_search / f"{compatible_with.major}.{compatible_with.minor}" # noqa + versions += cls.get_versions_from_directory( + dir_to_search, compatible_with=compatible_with) + filtered_versions = [] for version in versions: if version.is_staging(): @@ -475,11 +497,15 @@ class OpenPypeVersion(semver.VersionInfo): return list(sorted(set(filtered_versions))) @staticmethod - def get_versions_from_directory(openpype_dir: Path) -> List: + def get_versions_from_directory( + openpype_dir: Path, + compatible_with: OpenPypeVersion = None) -> List: """Get all detected OpenPype versions in directory. Args: openpype_dir (Path): Directory to scan. + compatible_with (OpenPypeVersion): Return only versions compatible + with build version specified as OpenPypeVersion. Returns: list of OpenPypeVersion @@ -514,6 +540,10 @@ class OpenPypeVersion(semver.VersionInfo): )[0]: continue + if compatible_with and not detected_version.is_compatible( + compatible_with): + continue + detected_version.path = item _openpype_versions.append(detected_version) @@ -545,8 +575,9 @@ class OpenPypeVersion(semver.VersionInfo): def get_latest_version( staging: bool = False, local: bool = None, - remote: bool = None - ) -> OpenPypeVersion: + remote: bool = None, + compatible_with: OpenPypeVersion = None + ) -> Union[OpenPypeVersion, None]: """Get latest available version. The version does not contain information about path and source. @@ -564,6 +595,9 @@ class OpenPypeVersion(semver.VersionInfo): staging (bool, optional): List staging versions if True. local (bool, optional): List local versions if True. remote (bool, optional): List remote versions if True. + compatible_with (OpenPypeVersion, optional) Return only version + compatible with compatible_with. + """ if local is None and remote is None: local = True @@ -594,7 +628,12 @@ class OpenPypeVersion(semver.VersionInfo): return None all_versions.sort() - return all_versions[-1] + latest_version: OpenPypeVersion + latest_version = all_versions[-1] + if compatible_with and not latest_version.is_compatible( + compatible_with): + return None + return latest_version @classmethod def get_expected_studio_version(cls, staging=False, global_settings=None): @@ -617,6 +656,21 @@ class OpenPypeVersion(semver.VersionInfo): return None return OpenPypeVersion(version=result) + def is_compatible(self, version: OpenPypeVersion): + """Test build compatibility. + + This will simply compare major and minor versions (ignoring patch + and the rest). + + Args: + version (OpenPypeVersion): Version to check compatibility with. + + Returns: + bool: if the version is compatible + + """ + return self.major == version.major and self.minor == version.minor + class BootstrapRepos: """Class for bootstrapping local OpenPype installation. @@ -737,8 +791,9 @@ class BootstrapRepos: return # create destination directory - if not self.data_dir.exists(): - self.data_dir.mkdir(parents=True) + destination = self.data_dir / f"{installed_version.major}.{installed_version.minor}" # noqa + if not destination.exists(): + destination.mkdir(parents=True) # create zip inside temporary directory. with tempfile.TemporaryDirectory() as temp_dir: @@ -766,7 +821,9 @@ class BootstrapRepos: Path to moved zip on success. """ - destination = self.data_dir / zip_file.name + version = OpenPypeVersion.version_in_str(zip_file.name) + destination_dir = self.data_dir / f"{version.major}.{version.minor}" + destination = destination_dir / zip_file.name if destination.exists(): self._print( @@ -778,7 +835,7 @@ class BootstrapRepos: self._print(str(e), LOG_ERROR, exc_info=True) return None try: - shutil.move(zip_file.as_posix(), self.data_dir.as_posix()) + shutil.move(zip_file.as_posix(), destination_dir.as_posix()) except shutil.Error as e: self._print(str(e), LOG_ERROR, exc_info=True) return None @@ -991,6 +1048,16 @@ class BootstrapRepos: @staticmethod def _validate_dir(path: Path) -> tuple: + """Validate checksums in a given path. + + Args: + path (Path): path to folder to validate. + + Returns: + tuple(bool, str): returns status and reason as a bool + and str in a tuple. + + """ checksums_file = Path(path / "checksums") if not checksums_file.exists(): # FIXME: This should be set to False sometimes in the future @@ -1072,7 +1139,20 @@ class BootstrapRepos: sys.path.insert(0, directory.as_posix()) @staticmethod - def find_openpype_version(version, staging): + def find_openpype_version( + version: Union[str, OpenPypeVersion], + staging: bool, + compatible_with: OpenPypeVersion = None + ) -> Union[OpenPypeVersion, None]: + """Find location of specified OpenPype version. + + Args: + version (Union[str, OpenPypeVersion): Version to find. + staging (bool): Filter staging versions. + compatible_with (OpenPypeVersion, optional): Find only + versions compatible with specified one. + + """ if isinstance(version, str): version = OpenPypeVersion(version=version) @@ -1081,7 +1161,8 @@ class BootstrapRepos: return installed_version local_versions = OpenPypeVersion.get_local_versions( - staging=staging, production=not staging + staging=staging, production=not staging, + compatible_with=compatible_with ) zip_version = None for local_version in local_versions: @@ -1095,7 +1176,8 @@ class BootstrapRepos: return zip_version remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging, production=not staging + staging=staging, production=not staging, + compatible_with=compatible_with ) for remote_version in remote_versions: if remote_version == version: @@ -1103,13 +1185,14 @@ class BootstrapRepos: return None @staticmethod - def find_latest_openpype_version(staging): + def find_latest_openpype_version( + staging, compatible_with: OpenPypeVersion = None): installed_version = OpenPypeVersion.get_installed_version() local_versions = OpenPypeVersion.get_local_versions( - staging=staging + staging=staging, compatible_with=compatible_with ) remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging + staging=staging, compatible_with=compatible_with ) all_versions = local_versions + remote_versions if not staging: @@ -1134,7 +1217,9 @@ class BootstrapRepos: self, openpype_path: Union[Path, str] = None, staging: bool = False, - include_zips: bool = False) -> Union[List[OpenPypeVersion], None]: + include_zips: bool = False, + compatible_with: OpenPypeVersion = None + ) -> Union[List[OpenPypeVersion], None]: """Get ordered dict of detected OpenPype version. Resolution order for OpenPype is following: @@ -1150,6 +1235,8 @@ class BootstrapRepos: otherwise. include_zips (bool, optional): If set True it will try to find OpenPype in zip files in given directory. + compatible_with (OpenPypeVersion, optional): Find only those + versions compatible with the one specified. Returns: dict of Path: Dictionary of detected OpenPype version. @@ -1168,30 +1255,56 @@ class BootstrapRepos: ("Finding OpenPype in non-filesystem locations is" " not implemented yet.")) - dir_to_search = self.data_dir - user_versions = self.get_openpype_versions(self.data_dir, staging) - # if we have openpype_path specified, search only there. + version_dir = "" + if compatible_with: + version_dir = f"{compatible_with.major}.{compatible_with.minor}" + + # if checks bellow for OPENPYPE_PATH and registry fails, use data_dir + # DEPRECATED: lookup in root of this folder is deprecated in favour + # of major.minor sub-folders. + dirs_to_search = [ + self.data_dir + ] + if compatible_with: + dirs_to_search.append(self.data_dir / version_dir) + if openpype_path: - dir_to_search = openpype_path + dirs_to_search = [openpype_path] + + if compatible_with: + dirs_to_search.append(openpype_path / version_dir) else: - if os.getenv("OPENPYPE_PATH"): - if Path(os.getenv("OPENPYPE_PATH")).exists(): - dir_to_search = Path(os.getenv("OPENPYPE_PATH")) + # first try OPENPYPE_PATH and if that is not available, + # try registry. + if os.getenv("OPENPYPE_PATH") \ + and Path(os.getenv("OPENPYPE_PATH")).exists(): + dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))] + + if compatible_with: + dirs_to_search.append( + Path(os.getenv("OPENPYPE_PATH")) / version_dir) else: try: registry_dir = Path( str(self.registry.get_item("openPypePath"))) if registry_dir.exists(): - dir_to_search = registry_dir + dirs_to_search = [registry_dir] + if compatible_with: + dirs_to_search.append(registry_dir / version_dir) except ValueError: # nothing found in registry, we'll use data dir pass - openpype_versions = self.get_openpype_versions(dir_to_search, staging) - openpype_versions += user_versions + openpype_versions = [] + for dir_to_search in dirs_to_search: + try: + openpype_versions += self.get_openpype_versions( + dir_to_search, staging, compatible_with=compatible_with) + except ValueError: + # location is invalid, skip it + pass - # remove zip file version if needed. if not include_zips: openpype_versions = [ v for v in openpype_versions if v.path.suffix != ".zip" @@ -1304,9 +1417,8 @@ class BootstrapRepos: raise ValueError( f"version {version} is not associated with any file") - destination = self.data_dir / version.path.stem - if destination.exists(): - assert destination.is_dir() + destination = self.data_dir / f"{version.major}.{version.minor}" / version.path.stem # noqa + if destination.exists() and destination.is_dir(): try: shutil.rmtree(destination) except OSError as e: @@ -1375,7 +1487,7 @@ class BootstrapRepos: else: dir_name = openpype_version.path.stem - destination = self.data_dir / dir_name + destination = self.data_dir / f"{openpype_version.major}.{openpype_version.minor}" / dir_name # noqa # test if destination directory already exist, if so lets delete it. if destination.exists() and force: @@ -1553,14 +1665,18 @@ class BootstrapRepos: return False return True - def get_openpype_versions(self, - openpype_dir: Path, - staging: bool = False) -> list: + def get_openpype_versions( + self, + openpype_dir: Path, + staging: bool = False, + compatible_with: OpenPypeVersion = None) -> list: """Get all detected OpenPype versions in directory. Args: openpype_dir (Path): Directory to scan. staging (bool, optional): Find staging versions if True. + compatible_with (OpenPypeVersion, optional): Get only versions + compatible with the one specified. Returns: list of OpenPypeVersion @@ -1570,7 +1686,7 @@ class BootstrapRepos: """ if not openpype_dir.exists() and not openpype_dir.is_dir(): - raise ValueError("specified directory is invalid") + raise ValueError(f"specified directory {openpype_dir} is invalid") _openpype_versions = [] # iterate over directory in first level and find all that might @@ -1595,6 +1711,10 @@ class BootstrapRepos: ): continue + if compatible_with and \ + not detected_version.is_compatible(compatible_with): + continue + detected_version.path = item if staging and detected_version.is_staging(): _openpype_versions.append(detected_version) From de70521f562084bf5a0cef20179ad2b73efa3bb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 1 Aug 2022 18:02:53 +0200 Subject: [PATCH 0162/2550] :recycle: deadline plugin support for job specific OP versions --- .../custom/plugins/GlobalJobPreLoad.py | 87 +++++++++++++++++-- .../custom/plugins/OpenPype/OpenPype.param | 11 ++- .../custom/plugins/OpenPype/OpenPype.py | 86 +++++++++++++++++- 3 files changed, 171 insertions(+), 13 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index bcd853f374..a43c6c7733 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -6,13 +6,29 @@ import subprocess import json import platform import uuid -from Deadline.Scripting import RepositoryUtils, FileUtils +import re +from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils + + +def get_openpype_version_from_path(path): + version_file = os.path.join(path, "openpype", "version.py") + if not os.path.isfile(version_file): + return None + version = {} + with open(version_file, "r") as vf: + exec(vf.read(), version) + + version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) + return version_match[1] def get_openpype_executable(): """Return OpenPype Executable from Event Plug-in Settings""" config = RepositoryUtils.GetPluginConfig("OpenPype") - return config.GetConfigEntryWithDefault("OpenPypeExecutable", "") + exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "") + dir_list = config.GetConfigEntryWithDefault( + "OpenPypeInstallationDirs", "") + return exe_list, dir_list def inject_openpype_environment(deadlinePlugin): @@ -25,16 +41,71 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Injecting OpenPype environments ...") try: print(">>> Getting OpenPype executable ...") - exe_list = get_openpype_executable() - openpype_app = FileUtils.SearchFileList(exe_list) - if openpype_app == "": + exe_list, dir_list = get_openpype_executable() + openpype_versions = [] + # if the job requires specific OpenPype version, + # lets go over all available and find compatible build. + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") + if requested_version: + print(("Scanning for compatible requested " + f"version {requested_version}")) + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if dir: + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = get_openpype_version_from_path(subdir) + if not version: + continue + openpype_versions.append((version, subdir)) + + exe = FileUtils.SearchFileList(exe_list) + if openpype_versions: + # if looking for requested compatible version, + # add the implicitly specified to the list too. + version = get_openpype_version_from_path( + os.path.dirname(exe)) + if version: + openpype_versions.append((version, os.path.dirname(exe))) + + if requested_version: + # sort detected versions + if openpype_versions: + openpype_versions.sort(key=lambda ver: ver[0]) + requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 + compatible_versions = [] + for version in openpype_versions: + v = version[0].split(".")[:3] + if v[0] == requested_major and v[1] == requested_minor: + compatible_versions.append(version) + if not compatible_versions: + raise RuntimeError( + ("Cannot find compatible version available " + "for version {} requested by the job. " + "Please add it through plugin configuration " + "in Deadline or install it to configured " + "directory.").format(requested_version)) + # sort compatible versions nad pick the last one + compatible_versions.sort(key=lambda ver: ver[0]) + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join( + compatible_versions[-1][1], "openpype_console.exe"), + os.path.join( + compatible_versions[-1][1], "openpype_console") + ] + exe = FileUtils.SearchFileList(";".join(exe_list)) + if exe == "": raise RuntimeError( "OpenPype executable was not found " + "in the semicolon separated list \"" + exe_list + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") - print("--- OpenPype executable: {}".format(openpype_app)) + print("--- OpenPype executable: {}".format(exe)) # tempfile.TemporaryFile cannot be used because of locking temp_file_name = "{}_{}.json".format( @@ -45,7 +116,7 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Temporary path: {}".format(export_url)) args = [ - openpype_app, + exe, "--headless", 'extractenvironments', export_url @@ -77,7 +148,7 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Executing: {}".format(args)) std_output = subprocess.check_output(args, - cwd=os.path.dirname(openpype_app), + cwd=os.path.dirname(exe), env=env) print(">>> Process result {}".format(std_output)) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param index 8bd6dce12d..b3ac18e20c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.param @@ -7,11 +7,20 @@ Index=0 Default=OpenPype Plugin for Deadline Description=Not configurable +[OpenPypeInstallationDirs] +Type=multilinemultifolder +Label=Directories where OpenPype versions are installed +Category=OpenPype Installation Directories +CategoryOrder=0 +Index=0 +Default=C:\Program Files (x86)\OpenPype +Description=Path or paths to directories where multiple versions of OpenPype might be installed. Enter every such path on separate lines. + [OpenPypeExecutable] Type=multilinemultifilename Label=OpenPype Executable Category=OpenPype Executables -CategoryOrder=0 +CategoryOrder=1 Index=0 Default= Description=The path to the OpenPype executable. Enter alternative paths on separate lines. diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 451d71fb63..b84560f175 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -1,10 +1,18 @@ +#!/usr/bin/env python3 + from System.IO import Path from System.Text.RegularExpressions import Regex from Deadline.Plugins import PluginType, DeadlinePlugin -from Deadline.Scripting import StringUtils, FileUtils, RepositoryUtils +from Deadline.Scripting import ( + StringUtils, + FileUtils, + DirectoryUtils, + RepositoryUtils +) import re +import os ###################################################################### @@ -52,13 +60,83 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): self.AddStdoutHandlerCallback( ".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress + @staticmethod + def get_openpype_version_from_path(path): + version_file = os.path.join(path, "openpype", "version.py") + if not os.path.isfile(version_file): + return None + version = {} + with open(version_file, "r") as vf: + exec(vf.read(), version) + + version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) + return version_match[1] + def RenderExecutable(self): - exeList = self.GetConfigEntry("OpenPypeExecutable") - exe = FileUtils.SearchFileList(exeList) + job = self.GetJob() + openpype_versions = [] + # if the job requires specific OpenPype version, + # lets go over all available and find compatible build. + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") + if requested_version: + self.LogInfo(( + "Scanning for compatible requested " + f"version {requested_version}")) + dir_list = self.GetConfigEntry("OpenPypeInstallationDirs") + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if dir: + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = self.get_openpype_version_from_path(subdir) + if not version: + continue + openpype_versions.append((version, subdir)) + + exe_list = self.GetConfigEntry("OpenPypeExecutable") + exe = FileUtils.SearchFileList(exe_list) + if openpype_versions: + # if looking for requested compatible version, + # add the implicitly specified to the list too. + version = self.get_openpype_version_from_path( + os.path.dirname(exe)) + if version: + openpype_versions.append((version, os.path.dirname(exe))) + + if requested_version: + # sort detected versions + if openpype_versions: + openpype_versions.sort(key=lambda ver: ver[0]) + requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 + compatible_versions = [] + for version in openpype_versions: + v = version[0].split(".")[:3] + if v[0] == requested_major and v[1] == requested_minor: + compatible_versions.append(version) + if not compatible_versions: + self.FailRender(("Cannot find compatible version available " + "for version {} requested by the job. " + "Please add it through plugin configuration " + "in Deadline or install it to configured " + "directory.").format(requested_version)) + # sort compatible versions nad pick the last one + compatible_versions.sort(key=lambda ver: ver[0]) + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join( + compatible_versions[-1][1], "openpype_console.exe"), + os.path.join( + compatible_versions[-1][1], "openpype_console") + ] + exe = FileUtils.SearchFileList(";".join(exe_list)) + if exe == "": self.FailRender( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exeList + "\". " + + "in the semicolon separated list \"" + exe_list + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") return exe From 8a55a83d7dc835da2d5f6416aa66686aedb922d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 18:38:54 +0200 Subject: [PATCH 0163/2550] added settings to be able fill empty intent and define it's label --- .../settings/defaults/system_settings/modules.json | 5 +++-- .../module_settings/schema_ftrack.json | 14 +++++++++++--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 8cd4114cb0..a3cf98f3ed 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -26,13 +26,14 @@ "linux": [] }, "intent": { + "allow_empty_intent": true, + "empty_intent_label": "", "items": { - "-": "-", "wip": "WIP", "final": "Final", "test": "Test" }, - "default": "-" + "default": "" }, "custom_attributes": { "show": { diff --git a/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json b/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json index 654ddf2938..7c5774415c 100644 --- a/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json +++ b/openpype/settings/entities/schemas/system_schema/module_settings/schema_ftrack.json @@ -50,8 +50,15 @@ "is_group": true, "children": [ { - "type": "label", - "label": "Intent" + "type": "boolean", + "key": "allow_empty_intent", + "label": "Allow empty intent" + }, + { + "type": "text", + "key": "empty_intent_label", + "label": "Empty item label", + "placeholder": "< Not set >" }, { "type": "dict-modifiable", @@ -64,7 +71,8 @@ { "key": "default", "type": "text", - "label": "Default Intent" + "label": "Default Intent", + "placeholder": "< First available >" }, { "type": "separator" From a591ea92efd534baf14d5f9fc549ba65dabc9894 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 18:39:45 +0200 Subject: [PATCH 0164/2550] changed model in pype publisher to use new settings --- openpype/tools/pyblish_pype/model.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/openpype/tools/pyblish_pype/model.py b/openpype/tools/pyblish_pype/model.py index 2931a379b3..31aa63677e 100644 --- a/openpype/tools/pyblish_pype/model.py +++ b/openpype/tools/pyblish_pype/model.py @@ -86,7 +86,7 @@ class IntentModel(QtGui.QStandardItemModel): First and default value is {"< Not Set >": None} """ - default_item = {"< Not Set >": None} + default_empty_label = "< Not set >" def __init__(self, parent=None): super(IntentModel, self).__init__(parent) @@ -102,27 +102,39 @@ class IntentModel(QtGui.QStandardItemModel): self._item_count = 0 self.default_index = 0 - intents_preset = ( + intent_settings = ( get_system_settings() .get("modules", {}) .get("ftrack", {}) .get("intent", {}) ) - default = intents_preset.get("default") - items = intents_preset.get("items", {}) + items = intent_settings.get("items", {}) if not items: return - for idx, item_value in enumerate(items.keys()): + allow_empty_intent = intent_settings.get("allow_empty_intent", True) + empty_intent_label = ( + intent_settings.get("empty_intent_label") + or self.default_empty_label + ) + listed_items = list(items.items()) + if allow_empty_intent: + listed_items.insert(0, ("", empty_intent_label)) + + default = intent_settings.get("default") + + for idx, item in enumerate(listed_items): + item_value = item[0] if item_value == default: self.default_index = idx break - self.add_items(items) + self._add_items(listed_items) - def add_items(self, items): - for value, label in items.items(): + def _add_items(self, items): + for item in items: + value, label = item new_item = QtGui.QStandardItem() new_item.setData(label, QtCore.Qt.DisplayRole) new_item.setData(value, Roles.IntentItemValue) From 23601cb2448437be40ac215ef1584080de2a5205 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 1 Aug 2022 18:40:28 +0200 Subject: [PATCH 0165/2550] unset intent from context if empty item is used --- openpype/tools/pyblish_pype/window.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/tools/pyblish_pype/window.py b/openpype/tools/pyblish_pype/window.py index 78590259bc..e167405325 100644 --- a/openpype/tools/pyblish_pype/window.py +++ b/openpype/tools/pyblish_pype/window.py @@ -523,6 +523,7 @@ class Window(QtWidgets.QDialog): instance_item.setData(enable_value, Roles.IsEnabledRole) def _add_intent_to_context(self): + context_value = None if ( self.intent_model.has_items and "intent" not in self.controller.context.data @@ -530,11 +531,17 @@ class Window(QtWidgets.QDialog): idx = self.intent_model.index(self.intent_box.currentIndex(), 0) intent_value = self.intent_model.data(idx, Roles.IntentItemValue) intent_label = self.intent_model.data(idx, QtCore.Qt.DisplayRole) + if intent_value: + context_value = { + "value": intent_value, + "label": intent_label + } - self.controller.context.data["intent"] = { - "value": intent_value, - "label": intent_label - } + # Unset intent if is set to empty value + if context_value is None: + self.controller.context.data.pop("intent", None) + else: + self.controller.context.data["intent"] = context_value def on_instance_toggle(self, index, state=None): """An item is requesting to be toggled""" From 3d7e1953075809af9323951046fc3d321da8352b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 2 Aug 2022 11:26:33 +0200 Subject: [PATCH 0166/2550] :recycle: skip non-existent local path when finding local version, stop crashing if directory to search doesn't exist - this will allow to just use build version --- igniter/bootstrap_repos.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 47f2525952..750b2f1bf7 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -514,10 +514,10 @@ class OpenPypeVersion(semver.VersionInfo): ValueError: if invalid path is specified. """ - if not openpype_dir.exists() and not openpype_dir.is_dir(): - raise ValueError("specified directory is invalid") - _openpype_versions = [] + if not openpype_dir.exists() and not openpype_dir.is_dir(): + return _openpype_versions + # iterate over directory in first level and find all that might # contain OpenPype. for item in openpype_dir.iterdir(): From 89bd23856c30e39f2493d99b2c743d3b918cccda Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 Aug 2022 12:25:51 +0200 Subject: [PATCH 0167/2550] OP-3405 - refactor - updated methods signature Renamed collection to project_name as when we are leaving MongoDB, collection doesnt make much sense. --- .../providers/abstract_provider.py | 8 +- .../modules/sync_server/providers/dropbox.py | 12 +- .../modules/sync_server/providers/gdrive.py | 16 +- .../sync_server/providers/local_drive.py | 12 +- .../modules/sync_server/providers/sftp.py | 16 +- openpype/modules/sync_server/sync_server.py | 71 +++---- .../modules/sync_server/sync_server_module.py | 189 +++++++++--------- openpype/modules/sync_server/tray/models.py | 2 +- 8 files changed, 164 insertions(+), 162 deletions(-) diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 688a17f14f..8c2fe1cad9 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -62,7 +62,7 @@ class AbstractProvider: @abc.abstractmethod def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -75,7 +75,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): name of project_name file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -87,7 +87,7 @@ class AbstractProvider: @abc.abstractmethod def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -99,7 +99,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index dfc42fed75..89d6990841 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -224,7 +224,7 @@ class DropboxHandler(AbstractProvider): return False def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -237,7 +237,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -290,7 +290,7 @@ class DropboxHandler(AbstractProvider): cursor.offset = f.tell() server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -301,7 +301,7 @@ class DropboxHandler(AbstractProvider): return path def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -313,7 +313,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -337,7 +337,7 @@ class DropboxHandler(AbstractProvider): self.dbx.files_download_to_file(local_path, source_path) server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index aa7329b104..bef707788b 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -251,7 +251,7 @@ class GDriveHandler(AbstractProvider): return folder_id def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -264,7 +264,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -324,7 +324,7 @@ class GDriveHandler(AbstractProvider): while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) @@ -333,7 +333,7 @@ class GDriveHandler(AbstractProvider): last_tick = time.time() log.debug("Uploaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -358,7 +358,7 @@ class GDriveHandler(AbstractProvider): return response['id'] def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'local_path'. @@ -372,7 +372,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -410,7 +410,7 @@ class GDriveHandler(AbstractProvider): while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) @@ -419,7 +419,7 @@ class GDriveHandler(AbstractProvider): last_tick = time.time() log.debug("Downloaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 172cb338cf..4951ef4d1a 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -82,7 +82,7 @@ class LocalDriveHandler(AbstractProvider): return editable def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False, direction="Upload"): """ Copies file from 'source_path' to 'target_path' @@ -95,7 +95,7 @@ class LocalDriveHandler(AbstractProvider): thread = threading.Thread(target=self._copy, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, direction) else: if os.path.exists(target_path): @@ -105,13 +105,13 @@ class LocalDriveHandler(AbstractProvider): return os.path.basename(target_path) def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download a file form 'source_path' to 'local_path' """ return self.upload_file(source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite, direction="Download") def delete_file(self, path): @@ -188,7 +188,7 @@ class LocalDriveHandler(AbstractProvider): except shutil.SameFileError: print("same files, skipping") - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -204,7 +204,7 @@ class LocalDriveHandler(AbstractProvider): status_val = target_file_size / source_file_size last_tick = time.time() log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 49b87b14ec..302ffae3e6 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -222,7 +222,7 @@ class SFTPHandler(AbstractProvider): return os.path.basename(path) def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -235,7 +235,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -256,7 +256,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._upload, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "upload") return os.path.basename(target_path) @@ -267,7 +267,7 @@ class SFTPHandler(AbstractProvider): conn.put(source_path, target_path) def download_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'target_path'. @@ -281,7 +281,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -302,7 +302,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._download, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "download") return os.path.basename(target_path) @@ -425,7 +425,7 @@ class SFTPHandler(AbstractProvider): pysftp.exceptions.ConnectionException): log.warning("Couldn't connect", exc_info=True) - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -446,7 +446,7 @@ class SFTPHandler(AbstractProvider): status_val = target_file_size / source_file_size last_tick = time.time() log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 356a75f99d..9cc55ec562 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -14,7 +14,7 @@ from .utils import SyncStatus, ResumableError log = PypeLogger().get_logger("SyncServer") -async def upload(module, collection, file, representation, provider_name, +async def upload(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Upload single 'file' of a 'representation' to 'provider'. @@ -31,7 +31,7 @@ async def upload(module, collection, file, representation, provider_name, Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source db file (dictionary): of file from representation in Mongo representation (dictionary): of representation provider_name (string): gdrive, gdc etc. @@ -47,7 +47,7 @@ async def upload(module, collection, file, representation, provider_name, # thread can do that at a time, upload/download to prepared # structure should be run in parallel remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) @@ -55,7 +55,7 @@ async def upload(module, collection, file, representation, provider_name, file_path = file.get("path", "") try: local_file_path, remote_file_path = resolve_paths(module, - file_path, collection, remote_site_name, remote_handler + file_path, project_name, remote_site_name, remote_handler ) except Exception as exp: print(exp) @@ -74,27 +74,28 @@ async def upload(module, collection, file, representation, provider_name, local_file_path, remote_file_path, module, - collection, + project_name, file, representation, remote_site_name, True ) - module.handle_alternate_site(collection, representation, remote_site_name, + module.handle_alternate_site(project_name, representation, + remote_site_name, file["_id"], file_id) return file_id -async def download(module, collection, file, representation, provider_name, +async def download(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Downloads file to local folder denoted in representation.Context. Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source file (dictionary) : info about processed file representation (dictionary): repr that 'file' belongs to provider_name (string): 'gdrive' etc @@ -108,20 +109,20 @@ async def download(module, collection, file, representation, provider_name, """ with module.lock: remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) file_path = file.get("path", "") local_file_path, remote_file_path = resolve_paths( - module, file_path, collection, remote_site_name, remote_handler + module, file_path, project_name, remote_site_name, remote_handler ) local_folder = os.path.dirname(local_file_path) os.makedirs(local_folder, exist_ok=True) - local_site = module.get_active_site(collection) + local_site = module.get_active_site(project_name) loop = asyncio.get_running_loop() file_id = await loop.run_in_executor(None, @@ -129,20 +130,20 @@ async def download(module, collection, file, representation, provider_name, remote_file_path, local_file_path, module, - collection, + project_name, file, representation, local_site, True ) - module.handle_alternate_site(collection, representation, local_site, + module.handle_alternate_site(project_name, representation, local_site, file["_id"], file_id) return file_id -def resolve_paths(module, file_path, collection, +def resolve_paths(module, file_path, project_name, remote_site_name=None, remote_handler=None): """ Returns tuple of local and remote file paths with {root} @@ -153,7 +154,7 @@ def resolve_paths(module, file_path, collection, Args: module(SyncServerModule): object to run SyncServerModule API file_path(string): path with {root} - collection(string): project name + project_name(string): project name remote_site_name(string): remote site remote_handler(AbstractProvider): implementation Returns: @@ -164,7 +165,7 @@ def resolve_paths(module, file_path, collection, remote_file_path = remote_handler.resolve_path(file_path) local_handler = lib.factory.get_provider( - 'local_drive', collection, module.get_active_site(collection)) + 'local_drive', project_name, module.get_active_site(project_name)) local_file_path = local_handler.resolve_path(file_path) return local_file_path, remote_file_path @@ -269,7 +270,7 @@ class SyncServerThread(threading.Thread): - gets list of collections in DB - gets list of active remote providers (has configuration, credentials) - - for each collection it looks for representations that should + - for each project_name it looks for representations that should be synced - synchronize found collections - update representations - fills error messages for exceptions @@ -282,17 +283,17 @@ class SyncServerThread(threading.Thread): import time start_time = time.time() self.module.set_sync_project_settings() # clean cache - collection = None + project_name = None enabled_projects = self.module.get_enabled_projects() - for collection in enabled_projects: - preset = self.module.sync_project_settings[collection] + for project_name in enabled_projects: + preset = self.module.sync_project_settings[project_name] - local_site, remote_site = self._working_sites(collection) + local_site, remote_site = self._working_sites(project_name) if not all([local_site, remote_site]): continue sync_repres = self.module.get_sync_representations( - collection, + project_name, local_site, remote_site ) @@ -310,7 +311,7 @@ class SyncServerThread(threading.Thread): remote_provider = \ self.module.get_provider_for_site(site=remote_site) handler = lib.factory.get_provider(remote_provider, - collection, + project_name, remote_site, presets=site_preset) limit = lib.factory.get_provider_batch_limit( @@ -341,7 +342,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( upload(self.module, - collection, + project_name, file, sync, remote_provider, @@ -353,7 +354,7 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, remote_site, - collection + project_name )) processed_file_path.add(file_path) if status == SyncStatus.DO_DOWNLOAD: @@ -361,7 +362,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( download(self.module, - collection, + project_name, file, sync, remote_provider, @@ -373,7 +374,7 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, local_site, - collection + project_name )) processed_file_path.add(file_path) @@ -384,12 +385,12 @@ class SyncServerThread(threading.Thread): return_exceptions=True) for file_id, info in zip(files_created, files_processed_info): - file, representation, site, collection = info + file, representation, site, project_name = info error = None if isinstance(file_id, BaseException): error = str(file_id) file_id = None - self.module.update_db(collection, + self.module.update_db(project_name, file_id, file, representation, @@ -399,7 +400,7 @@ class SyncServerThread(threading.Thread): duration = time.time() - start_time log.debug("One loop took {:.2f}s".format(duration)) - delay = self.module.get_loop_delay(collection) + delay = self.module.get_loop_delay(project_name) log.debug("Waiting for {} seconds to new loop".format(delay)) self.timer = asyncio.create_task(self.run_timer(delay)) await asyncio.gather(self.timer) @@ -458,19 +459,19 @@ class SyncServerThread(threading.Thread): self.timer.cancel() self.timer = None - def _working_sites(self, collection): - if self.module.is_project_paused(collection): + def _working_sites(self, project_name): + if self.module.is_project_paused(project_name): log.debug("Both sites same, skipping") return None, None - local_site = self.module.get_active_site(collection) - remote_site = self.module.get_remote_site(collection) + local_site = self.module.get_active_site(project_name) + remote_site = self.module.get_remote_site(project_name) if local_site == remote_site: log.debug("{}-{} sites same, skipping".format(local_site, remote_site)) return None, None - configured_sites = _get_configured_sites(self.module, collection) + configured_sites = _get_configured_sites(self.module, project_name) if not all([local_site in configured_sites, remote_site in configured_sites]): log.debug("Some of the sites {} - {} is not ".format(local_site, diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 71e35c7839..c4d90416bb 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -130,12 +130,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.projects_processed = set() """ Start of Public API """ - def add_site(self, collection, representation_id, site_name=None, + def add_site(self, project_name, representation_id, site_name=None, force=False): """ Adds new site to representation to be synced. - 'collection' must have synchronization enabled (globally or + 'project_name' must have synchronization enabled (globally or project only) Used as a API endpoint from outside applications (Loader etc). @@ -143,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to reset existing site. Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists @@ -153,25 +153,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") if not site_name: site_name = self.DEFAULT_SITE - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, force=force) - def remove_site(self, collection, representation_id, site_name, + def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): """ Removes 'site_name' for particular 'representation_id' on - 'collection' + 'project_name' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site remove_local_files (bool): remove only files for 'local_id' @@ -180,15 +180,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: throws ValueError if any issue """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, remove=True) if remove_local_files: - self._remove_local_file(collection, representation_id, site_name) + self._remove_local_file(project_name, representation_id, site_name) def compute_resource_sync_sites(self, project_name): """Get available resource sync sites state for publish process. @@ -335,9 +335,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return alt_site_pairs - def clear_project(self, collection, site_name): + def clear_project(self, project_name, site_name): """ - Clear 'collection' of 'site_name' and its local files + Clear 'project_name' of 'site_name' and its local files Works only on real local sites, not on 'studio' """ @@ -348,15 +348,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): # TODO currently not possible to replace with get_representations representations = list( - self.connection.database[collection].find(query)) + self.connection.database[project_name].find(query)) if not representations: self.log.debug("No repre found") return for repre in representations: - self.remove_site(collection, repre.get("_id"), site_name, True) + self.remove_site(project_name, repre.get("_id"), site_name, True) - def create_validate_project_task(self, collection, site_name): + def create_validate_project_task(self, project_name, site_name): """Adds metadata about project files validation on a queue. This process will loop through all representation and check if @@ -373,28 +373,28 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ task = { "type": "validate", - "project_name": collection, - "func": lambda: self.validate_project(collection, site_name, + "project_name": project_name, + "func": lambda: self.validate_project(project_name, site_name, reset_missing=True) } - self.projects_processed.add(collection) + self.projects_processed.add(project_name) self.long_running_tasks.append(task) - def validate_project(self, collection, site_name, reset_missing=False): - """Validate 'collection' of 'site_name' and its local files + def validate_project(self, project_name, site_name, reset_missing=False): + """Validate 'project_name' of 'site_name' and its local files If file present and not marked with a 'site_name' in DB, DB is updated with site name and file modified date. Args: - collection (string): project name + project_name (string): project name site_name (string): active site name reset_missing (bool): if True reset site in DB if missing physically """ - self.log.debug("Validation of {} for {} started".format(collection, + self.log.debug("Validation of {} for {} started".format(project_name, site_name)) - representations = list(get_representations(collection)) + representations = list(get_representations(project_name)) if not representations: self.log.debug("No repre found") return @@ -414,7 +414,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): continue file_path = repre_file.get("path", "") - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file_path) @@ -426,14 +426,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "Adding site {} for {}".format(site_name, repre_id)) - query = { - "_id": repre_id - } created_dt = datetime.fromtimestamp( os.path.getmtime(local_file_path)) elem = {"name": site_name, "created_dt": created_dt} - self._add_site(collection, query, repre, elem, + self._add_site(project_name, repre, elem, site_name=site_name, file_id=repre_file["_id"], force=True) @@ -443,41 +440,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.log.debug("Resetting site {} for {}". format(site_name, repre_id)) self.reset_site_on_representation( - collection, repre_id, site_name=site_name, + project_name, repre_id, site_name=site_name, file_id=repre_file["_id"]) sites_reset += 1 if sites_added % 100 == 0: self.log.debug("Sites added {}".format(sites_added)) - self.log.debug("Validation of {} for {} ended".format(collection, + self.log.debug("Validation of {} for {} ended".format(project_name, site_name)) self.log.info("Sites added {}, sites reset {}".format(sites_added, reset_missing)) - def pause_representation(self, collection, representation_id, site_name): + def pause_representation(self, project_name, representation_id, site_name): """ Sets 'representation_id' as paused, eg. no syncing should be happening on it. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ log.info("Pausing SyncServer for {}".format(representation_id)) self._paused_representations.add(representation_id) - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=True) - def unpause_representation(self, collection, representation_id, site_name): + def unpause_representation(self, project_name, + representation_id, site_name): """ Sets 'representation_id' as unpaused. Does not fail or warn if repre wasn't paused. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ @@ -487,7 +485,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): except KeyError: pass # self.paused_representations is not persistent - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=False) def is_representation_paused(self, representation_id, @@ -518,7 +516,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): happening on all representation inside. Args: - project_name (string): collection name + project_name (string): project_name name """ log.info("Pausing SyncServer for {}".format(project_name)) self._paused_projects.add(project_name) @@ -530,7 +528,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Does not fail or warn if project wasn't paused. Args: - project_name (string): collection name + project_name (string): """ log.info("Unpausing SyncServer for {}".format(project_name)) try: @@ -543,7 +541,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns if 'project_name' is paused or not. Args: - project_name (string): collection name + project_name (string): check_parents (bool): check if server itself is not paused Returns: @@ -942,8 +940,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return True return False - def handle_alternate_site(self, collection, representation, processed_site, - file_id, synced_file_id): + def handle_alternate_site(self, project_name, representation, + processed_site, file_id, synced_file_id): """ For special use cases where one site vendors another. @@ -956,7 +954,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): same location >> file is accesible on 'sftp' site right away. Args: - collection (str): name of project + project_name (str): name of project representation (dict) processed_site (str): real site_name of published/uploaded file file_id (ObjectId): DB id of file handled @@ -980,26 +978,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule): alternate_sites = set(alternate_sites) for alt_site in alternate_sites: - query = { - "_id": representation["_id"] - } elem = {"name": alt_site, "created_dt": datetime.now(), "id": synced_file_id} self.log.debug("Adding alternate {} to {}".format( alt_site, representation["_id"])) - self._add_site(collection, query, + self._add_site(project_name, representation, elem, alt_site, file_id=file_id, force=True) """ End of Public API """ - def get_local_file_path(self, collection, site_name, file_path): + def get_local_file_path(self, project_name, site_name, file_path): """ Externalized for app """ - handler = LocalDriveHandler(collection, site_name) + handler = LocalDriveHandler(project_name, site_name) local_file_path = handler.resolve_path(file_path) return local_file_path @@ -1286,7 +1281,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return sites.get(site, 'N/A') @time_function - def get_sync_representations(self, collection, active_site, remote_site): + def get_sync_representations(self, project_name, active_site, remote_site): """ Get representations that should be synced, these could be recognised by presence of document in 'files.sites', where key is @@ -1297,8 +1292,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): better performance. Goal is to get as few representations as possible. Args: - collection (string): name of collection (in most cases matches - project name + project_name (string): active_site (string): identifier of current active site (could be 'local_0' when working from home, 'studio' when working in the studio (default) @@ -1307,10 +1301,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: (list) of dictionaries """ - log.debug("Check representations for : {}".format(collection)) - self.connection.Session["AVALON_PROJECT"] = collection + log.debug("Check representations for : {}".format(project_name)) + self.connection.Session["AVALON_PROJECT"] = project_name # retry_cnt - number of attempts to sync specific file before giving up - retries_arr = self._get_retries_arr(collection) + retries_arr = self._get_retries_arr(project_name) match = { "type": "representation", "$or": [ @@ -1447,14 +1441,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return SyncStatus.DO_NOTHING - def update_db(self, collection, new_file_id, file, representation, + def update_db(self, project_name, new_file_id, file, representation, site, error=None, progress=None, priority=None): """ Update 'provider' portion of records in DB with success (file_id) or error (exception) Args: - collection (string): name of project - force to db connection as + project_name (string): name of project - force to db connection as each file might come from different collection new_file_id (string): file (dictionary): info about processed file (pulled from DB) @@ -1497,7 +1491,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if file_id: arr_filter.append({'f._id': ObjectId(file_id)}) - self.connection.database[collection].update_one( + self.connection.database[project_name].update_one( query, update, upsert=True, @@ -1560,7 +1554,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return -1, None - def reset_site_on_representation(self, collection, representation_id, + def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, remove=False, pause=None, force=False): """ @@ -1577,7 +1571,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Should be used when repre should be synced to new site. Args: - collection (string): name of project (eg. collection) in DB + project_name (string): name of project (eg. collection) in DB representation_id(string): _id of representation file_id (string): file _id in representation side (string): local or remote side @@ -1591,18 +1585,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - representation = get_representation_by_id(collection, + representation = get_representation_by_id(project_name, representation_id) if not representation: raise ValueError("Representation {} not found in {}". - format(representation_id, collection)) + format(representation_id, project_name)) if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") - local_site = self.get_active_site(collection) - remote_site = self.get_remote_site(collection) + local_site = self.get_active_site(project_name) + remote_site = self.get_remote_site(project_name) if side: if side == 'local': @@ -1612,42 +1606,44 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} - query = { - "_id": ObjectId(representation_id) - } - if file_id: # reset site for particular file - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, representation_id, elem, file_id, site_name) elif side: # reset site for whole representation - self._reset_site(collection, query, elem, site_name) + self._reset_site(project_name, representation_id, elem, site_name) elif remove: # remove site for whole representation - self._remove_site(collection, query, representation, site_name) + self._remove_site(project_name, + representation, site_name) elif pause is not None: - self._pause_unpause_site(collection, query, + self._pause_unpause_site(project_name, representation, site_name, pause) else: # add new site to all files for representation - self._add_site(collection, query, representation, elem, site_name, + self._add_site(project_name, representation, elem, site_name, force=force) - def _update_site(self, collection, query, update, arr_filter): + def _update_site(self, project_name, representation_id, + update, arr_filter): """ Auxiliary method to call update_one function on DB Used for refactoring ugly reset_provider_for_file """ - self.connection.database[collection].update_one( + query = { + "_id": ObjectId(representation_id) + } + + self.connection.database[project_name].update_one( query, update, upsert=True, array_filters=arr_filter ) - def _reset_site_for_file(self, collection, query, + def _reset_site_for_file(self, project_name, representation_id, elem, file_id, site_name): """ Resets 'site_name' for 'file_id' on representation in 'query' on - 'collection' + 'project_name' """ update = { "$set": {"files.$[f].sites.$[s]": elem} @@ -1660,9 +1656,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _reset_site(self, collection, query, elem, site_name): + def _reset_site(self, project_name, representation_id, elem, site_name): """ Resets 'site_name' for all files of representation in 'query' """ @@ -1674,9 +1670,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _remove_site(self, collection, query, representation, site_name): + def _remove_site(self, project_name, representation, site_name): """ Removes 'site_name' for 'representation' in 'query' @@ -1698,10 +1694,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): } arr_filter = [] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _pause_unpause_site(self, collection, query, - representation, site_name, pause): + def _pause_unpause_site(self, project_name, representation, + site_name, pause): """ Pauses/unpauses all files for 'representation' based on 'pause' @@ -1733,12 +1730,13 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _add_site(self, collection, query, representation, elem, site_name, + def _add_site(self, project_name, representation, elem, site_name, force=False, file_id=None): """ - Adds 'site_name' to 'representation' on 'collection' + Adds 'site_name' to 'representation' on 'project_name' Args: representation (dict) @@ -1746,10 +1744,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to remove existing or raises ValueError """ + representation_id = representation["_id"] reset_existing = False files = representation.get("files", []) if not files: - log.debug("No files for {}".format(representation["_id"])) + log.debug("No files for {}".format(representation_id)) return for repre_file in files: @@ -1759,7 +1758,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): for site in repre_file.get("sites"): if site["name"] == site_name: if force or site.get("error"): - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, + representation_id, elem, repre_file["_id"], site_name) reset_existing = True @@ -1785,14 +1785,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, + update, arr_filter) - def _remove_local_file(self, collection, representation_id, site_name): + def _remove_local_file(self, project_name, representation_id, site_name): """ Removes all local files for 'site_name' of 'representation_id' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site @@ -1808,7 +1809,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - representation = get_representation_by_id(collection, + representation = get_representation_by_id(project_name, representation_id, fields=["files"]) if not representation: @@ -1818,7 +1819,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): local_file_path = '' for file in representation.get("files"): - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file.get("path", "") ) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index a97797c920..f05a5bd8ea 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -441,7 +441,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): full text filtering. Allows pagination, most of heavy lifting is being done on DB side. - Single model matches to single collection. When project is changed, + Single model matches to single project. When project is changed, model is reset and refreshed. Args: From eb2c82558888fe5650bdab4bee1a60a498b685fa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 Aug 2022 16:09:59 +0200 Subject: [PATCH 0168/2550] OP-3405 - extracted aggregate query from Loader to Site Sync module --- .../modules/sync_server/sync_server_module.py | 89 +++++++++++++++++ openpype/tools/loader/model.py | 95 ++----------------- 2 files changed, 98 insertions(+), 86 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index c4d90416bb..8fdfab9c2e 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -988,6 +988,95 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation, elem, alt_site, file_id=file_id, force=True) + def get_repre_info_for_versions(self, project_name, version_ids, + active_site, remote_site): + """Returns representation documents for versions and sites combi + + Args: + project_name (str) + version_ids (list): of version[_id] + active_site (string): 'local', 'studio' etc + remote_site (string): dtto + Returns: + + """ + self.connection.Session["AVALON_PROJECT"] = project_name + query = [ + {"$match": {"parent": {"$in": version_ids}, + "type": "representation", + "files.sites.name": {"$exists": 1}}}, + {"$unwind": "$files"}, + {'$addFields': { + 'order_local': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', active_site]} + } + } + }}, + {'$addFields': { + 'order_remote': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', remote_site]} + } + } + }}, + {'$addFields': { + 'progress_local': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_local.progress"}, + "$order_local.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$addFields': { + 'progress_remote': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_remote.progress"}, + "$order_remote.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$group': { # first group by repre + '_id': '$_id', + 'parent': {'$first': '$parent'}, + 'avail_ratio_local': { + '$first': { + '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] + } + }, + 'avail_ratio_remote': { + '$first': { + '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] + } + } + }}, + {'$group': { # second group by parent, eg version_id + '_id': '$parent', + 'repre_count': {'$sum': 1}, # total representations + # fully available representation for site + 'avail_repre_local': {'$sum': "$avail_ratio_local"}, + 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, + }}, + ] + # docs = list(self.connection.aggregate(query)) + return self.connection.aggregate(query) + """ End of Public API """ def get_local_file_path(self, project_name, site_name, file_path): diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index a5174bd804..3ce44ea6c8 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -272,15 +272,15 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # update availability on active site when version changes if self.sync_server.enabled and version_doc: - query = self._repre_per_version_pipeline( + repre_info = self.sync_server.get_repre_info_for_versions( + project_name, [version_doc["_id"]], self.active_site, self.remote_site ) - docs = list(self.dbcon.aggregate(query)) - if docs: - repre = docs.pop() - version_doc["data"].update(self._get_repre_dict(repre)) + if repre_info: + version_doc["data"].update( + self._get_repre_dict(repre_info[0])) self.set_version(index, version_doc) @@ -478,16 +478,16 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): for _subset_id, doc in last_versions_by_subset_id.items(): version_ids.add(doc["_id"]) - query = self._repre_per_version_pipeline( + repres = self.sync_server.get_repre_info_for_versions( + project_name, list(version_ids), self.active_site, self.remote_site ) - - for doc in self.dbcon.aggregate(query): + for repre in repres: if self._doc_fetching_stop: return doc["active_provider"] = self.active_provider doc["remote_provider"] = self.remote_provider - repre_info[doc["_id"]] = doc + repre_info[repre["_id"]] = repre self._doc_payload = { "asset_docs_by_id": asset_docs_by_id, @@ -827,83 +827,6 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): return data - def _repre_per_version_pipeline(self, version_ids, - active_site, remote_site): - query = [ - {"$match": {"parent": {"$in": version_ids}, - "type": "representation", - "files.sites.name": {"$exists": 1}}}, - {"$unwind": "$files"}, - {'$addFields': { - 'order_local': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', active_site]} - } - } - }}, - {'$addFields': { - 'order_remote': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', remote_site]} - } - } - }}, - {'$addFields': { - 'progress_local': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_local.progress"}, - "$order_local.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_local.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$addFields': { - 'progress_remote': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_remote.progress"}, - "$order_remote.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_remote.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$group': { # first group by repre - '_id': '$_id', - 'parent': {'$first': '$parent'}, - 'avail_ratio_local': { - '$first': { - '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] - } - }, - 'avail_ratio_remote': { - '$first': { - '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] - } - } - }}, - {'$group': { # second group by parent, eg version_id - '_id': '$parent', - 'repre_count': {'$sum': 1}, # total representations - # fully available representation for site - 'avail_repre_local': {'$sum': "$avail_ratio_local"}, - 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, - }}, - ] - return query - class GroupMemberFilterProxyModel(QtCore.QSortFilterProxyModel): """Provide the feature of filtering group by the acceptance of members From 26c4a0f8ca19eeb4faaa85ceac1524c3bed71b7d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 Aug 2022 16:15:17 +0200 Subject: [PATCH 0169/2550] OP-3405 - Hound --- openpype/modules/sync_server/providers/local_drive.py | 3 ++- openpype/modules/sync_server/sync_server.py | 9 +++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 4951ef4d1a..01bc891d08 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -111,7 +111,8 @@ class LocalDriveHandler(AbstractProvider): Download a file form 'source_path' to 'local_path' """ return self.upload_file(source_path, local_path, - server, project_name, file, representation, site, + server, project_name, file, + representation, site, overwrite, direction="Download") def delete_file(self, path): diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 9cc55ec562..97538fcd4e 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -54,8 +54,9 @@ async def upload(module, project_name, file, representation, provider_name, file_path = file.get("path", "") try: - local_file_path, remote_file_path = resolve_paths(module, - file_path, project_name, remote_site_name, remote_handler + local_file_path, remote_file_path = resolve_paths( + module, file_path, project_name, + remote_site_name, remote_handler ) except Exception as exp: print(exp) @@ -270,8 +271,8 @@ class SyncServerThread(threading.Thread): - gets list of collections in DB - gets list of active remote providers (has configuration, credentials) - - for each project_name it looks for representations that should - be synced + - for each project_name it looks for representations that + should be synced - synchronize found collections - update representations - fills error messages for exceptions - waits X seconds and repeat From 9ed329aebe6e114d47195e6dc456898569e0d404 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 3 Aug 2022 14:26:05 +0200 Subject: [PATCH 0170/2550] :bug: filter out non-build versions and fixing the error message --- .../custom/plugins/GlobalJobPreLoad.py | 18 ++++++++++++++++-- .../custom/plugins/OpenPype/OpenPype.py | 18 ++++++++++++++++-- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index a43c6c7733..5e923eb09a 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -10,10 +10,23 @@ import re from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils -def get_openpype_version_from_path(path): +def get_openpype_version_from_path(path, build=True): + """Get OpenPype version from provided path. + path (str): Path to scan. + build (bool, optional): Get only builds, not sources + + Returns: + str or None: version of OpenPype if found. + + """ version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build + if not build and \ + (not os.path.isfile(os.path.join(path, "openpype_console")) or + not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + return None version = {} with open(version_file, "r") as vf: exec(vf.read(), version) @@ -101,7 +114,8 @@ def inject_openpype_environment(deadlinePlugin): if exe == "": raise RuntimeError( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exe_list + "\". " + + "in the semicolon separated list " + + "\"" + ";".join(exe_list) + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index b84560f175..764dc4c4ba 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -61,10 +61,23 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): ".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress @staticmethod - def get_openpype_version_from_path(path): + def get_openpype_version_from_path(path, build=True): + """Get OpenPype version from provided path. + path (str): Path to scan. + build (bool, optional): Get only builds, not sources + + Returns: + str or None: version of OpenPype if found. + + """ version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build + if not build and \ + (not os.path.isfile(os.path.join(path, "openpype_console")) or + not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + return None version = {} with open(version_file, "r") as vf: exec(vf.read(), version) @@ -136,7 +149,8 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): if exe == "": self.FailRender( "OpenPype executable was not found " + - "in the semicolon separated list \"" + exe_list + "\". " + + "in the semicolon separated list " + + "\"" + ";".join(exe_list) + "\". " + "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") return exe From c64925fb665ee3bcb49837dcb2fff7f03a7390f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 3 Aug 2022 16:12:21 +0200 Subject: [PATCH 0171/2550] :rotating_light: I hate you Hound so much --- .../deadline/repository/custom/plugins/OpenPype/OpenPype.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 764dc4c4ba..79101bb90c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -76,7 +76,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): # skip if the version is not build if not build and \ (not os.path.isfile(os.path.join(path, "openpype_console")) or - not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + not os.path.isfile(os.path.join(path, "openpype_console.exe"))): # noqa: E501 return None version = {} with open(version_file, "r") as vf: From c4fce5fea9ad37e0706c1b76500ed21585e66141 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 17:40:47 +0200 Subject: [PATCH 0172/2550] integrate description can use optional keys --- .../publish/integrate_ftrack_description.py | 69 +++++++++++++------ 1 file changed, 49 insertions(+), 20 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index c6a3d47f66..e7c265988e 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -6,9 +6,11 @@ Requires: """ import sys +import json import six import pyblish.api +from openpype.lib import StringTemplate class IntegrateFtrackDescription(pyblish.api.InstancePlugin): @@ -25,6 +27,10 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): description_template = "{comment}" def process(self, instance): + if not self.description_template: + self.log.info("Skipping. Description template is not set.") + return + # Check if there are any integrated AssetVersion entities asset_versions_key = "ftrackIntegratedAssetVersionsData" asset_versions_data_by_id = instance.data.get(asset_versions_key) @@ -38,39 +44,62 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): else: self.log.debug("Comment is set to `{}`".format(comment)) - session = instance.context.data["ftrackSession"] - intent = instance.context.data.get("intent") - intent_label = None - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent + if intent and "{intent}" in self.description_template: + value = intent.get("value") + if value: + intent = intent.get("label") or value - if not intent_label: - intent_label = intent_val or "" + if not intent and not comment: + self.log.info("Skipping. Intent and comment are empty.") + return # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) - if intent_label: - self.log.debug( - "Intent label is set to `{}`.".format(intent_label) - ) - + if intent: + self.log.debug("Intent is set to `{}`.".format(intent)) else: self.log.debug("Intent is not set.") + # If we would like to use more "optional" possibilities we would have + # come up with some expressions in templates or speicifc templates + # for all 3 possible combinations when comment and intent are + # set or not (when both are not set then description does not + # make sense). + fill_data = {} + if comment: + fill_data["comment"] = comment + if intent: + fill_data["intent"] = intent + + description = StringTemplate.format_template( + self.description_template, fill_data + ) + if not description.solved: + self.log.warning(( + "Couldn't solve template \"{}\" with data {}" + ).format( + self.description_template, json.dumps(fill_data, indent=4) + )) + return + + if not description: + self.log.debug(( + "Skipping. Result of template is empty string." + " Template \"{}\" Fill data: {}" + ).format( + self.description_template, json.dumps(fill_data, indent=4) + )) + return + + session = instance.context.data["ftrackSession"] for asset_version_data in asset_versions_data_by_id.values(): asset_version = asset_version_data["asset_version"] # Backwards compatibility for older settings using # attribute 'note_with_intent_template' - comment = self.description_template.format(**{ - "intent": intent_label, - "comment": comment - }) - asset_version["comment"] = comment + + asset_version["comment"] = description try: session.commit() From d7d8d45ee5589741092a66187f42f2332296420a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 3 Aug 2022 18:27:08 +0200 Subject: [PATCH 0173/2550] OP-3405 - representation is not a list Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/sync_server/tray/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index f05a5bd8ea..629c4cbbf1 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -923,7 +923,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): representation = get_representation_by_id(self.project, repre_id) if representation: self.sync_server.update_db(self.project, None, None, - representation.pop(), + representation, get_local_site_id(), priority=value) self.is_editing = False From 8f5360d9d55efefc7bdfa9e182b279bb046ce733 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 18:28:40 +0200 Subject: [PATCH 0174/2550] added ability to keep '<>' without formatting content unchanged --- openpype/lib/path_templates.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index c1282016ef..e4b18ec258 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -211,15 +211,28 @@ class StringTemplate(object): if counted_symb > -1: parts = tmp_parts.pop(counted_symb) counted_symb -= 1 + # If part contains only single string keep value + # unchanged if parts: # Remove optional start char parts.pop(0) - if counted_symb < 0: - out_parts = new_parts - else: - out_parts = tmp_parts[counted_symb] - # Store temp parts - out_parts.append(OptionalPart(parts)) + + if not parts: + value = "<>" + elif ( + len(parts) == 1 + and isinstance(parts[0], six.string_types) + ): + value = "<{}>".format(parts[0]) + else: + value = OptionalPart(parts) + + if counted_symb < 0: + out_parts = new_parts + else: + out_parts = tmp_parts[counted_symb] + # Store value + out_parts.append(value) continue if counted_symb < 0: @@ -793,6 +806,7 @@ class OptionalPart: parts(list): Parts of template. Can contain 'str', 'OptionalPart' or 'FormattingPart'. """ + def __init__(self, parts): self._parts = parts From 09e68b5a257916e07fcd8824fb0695b6e032a856 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 3 Aug 2022 18:30:25 +0200 Subject: [PATCH 0175/2550] use StringTemplate in integrate ftrack note --- .../plugins/publish/integrate_ftrack_note.py | 54 ++++++++++++------- 1 file changed, 34 insertions(+), 20 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 77a7ebdfcf..ac3fa874e0 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -9,9 +9,11 @@ Requires: """ import sys +import copy import six import pyblish.api +from openpype.lib import StringTemplate class IntegrateFtrackNote(pyblish.api.InstancePlugin): @@ -53,14 +55,10 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): intent = instance.context.data.get("intent") intent_label = None - if intent and isinstance(intent, dict): - intent_val = intent.get("value") - intent_label = intent.get("label") - else: - intent_val = intent - - if not intent_label: - intent_label = intent_val or "" + if intent: + value = intent["value"] + if value: + intent_label = intent["label"] or value # if intent label is set then format comment # - it is possible that intent_label is equal to "" (empty string) @@ -96,6 +94,14 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): labels.append(label) + base_format_data = { + "host_name": host_name, + "app_name": app_name, + "app_label": app_label, + "source": instance.data.get("source", '') + } + if comment: + base_format_data["comment"] = comment for asset_version_data in asset_versions_data_by_id.values(): asset_version = asset_version_data["asset_version"] component_items = asset_version_data["component_items"] @@ -109,23 +115,31 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): template = self.note_template if template is None: template = self.note_with_intent_template - format_data = { - "intent": intent_label, - "comment": comment, - "host_name": host_name, - "app_name": app_name, - "app_label": app_label, - "published_paths": "
    ".join(sorted(published_paths)), - "source": instance.data.get("source", '') - } - comment = template.format(**format_data) - if not comment: + format_data = copy.deepcopy(base_format_data) + format_data["published_paths"] = "
    ".join( + sorted(published_paths) + ) + if intent: + if "{intent}" in template: + format_data["intent"] = intent_label + else: + format_data["intent"] = intent + + note_text = StringTemplate.format_template(template, format_data) + if not note_text.solved: + self.log.warning(( + "Note template require more keys then can be provided." + "\nTemplate: {}\nData: {}" + ).format(template, format_data)) + continue + + if not note_text: self.log.info(( "Note for AssetVersion {} would be empty. Skipping." "\nTemplate: {}\nData: {}" ).format(asset_version["id"], template, format_data)) continue - asset_version.create_note(comment, author=user, labels=labels) + asset_version.create_note(note_text, author=user, labels=labels) try: session.commit() From 3137644299e4ade30ff8e9fe1184cf0430e3a925 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 11:07:29 +0200 Subject: [PATCH 0176/2550] :recycle: change macos installer --- setup.py | 2 +- tools/build.sh | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 8b5a545c16..eab0187983 100644 --- a/setup.py +++ b/setup.py @@ -152,7 +152,7 @@ build_exe_options = dict( ) bdist_mac_options = dict( - bundle_name="OpenPype", + bundle_name=f"OpenPype {__version__}", iconfile=mac_icon_path ) diff --git a/tools/build.sh b/tools/build.sh index 79fb748cd5..fa2c580648 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -193,15 +193,15 @@ if [ "$disable_submodule_update" == 1 ]; then if [[ "$OSTYPE" == "darwin"* ]]; then # fix code signing issue - codesign --remove-signature "$openpype_root/build/OpenPype.app/Contents/MacOS/lib/Python" + codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/Python" if command -v create-dmg > /dev/null 2>&1; then create-dmg \ - --volname "OpenPype Installer" \ + --volname "OpenPype $openpype_version Installer" \ --window-pos 200 120 \ --window-size 600 300 \ --app-drop-link 100 50 \ - "$openpype_root/build/OpenPype-Installer.dmg" \ - "$openpype_root/build/OpenPype.app" + "$openpype_root/build/OpenPype-Installer-$openpype_version.dmg" \ + "$openpype_root/build/OpenPype $openpype_version.app" else echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available." fi From 633c7a5cde89a27c69ad24108ef802c66da02c41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 11:26:33 +0200 Subject: [PATCH 0177/2550] :hammer: add more verbose info to Deadline --- .../repository/custom/plugins/GlobalJobPreLoad.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 5e923eb09a..793ee782f4 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -63,7 +63,7 @@ def inject_openpype_environment(deadlinePlugin): print(("Scanning for compatible requested " f"version {requested_version}")) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) - if dir: + if install_dir: sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -72,6 +72,7 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path(subdir) if not version: continue + print(f" - found: {version} - {subdir}") openpype_versions.append((version, subdir)) exe = FileUtils.SearchFileList(exe_list) @@ -81,12 +82,15 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path( os.path.dirname(exe)) if version: + print(f" - found: {version} - {os.path.dirname(exe)}") openpype_versions.append((version, os.path.dirname(exe))) if requested_version: # sort detected versions if openpype_versions: openpype_versions.sort(key=lambda ver: ver[0]) + print(("Latest available version found is " + f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -102,6 +106,8 @@ def inject_openpype_environment(deadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort(key=lambda ver: ver[0]) + print(("Latest compatible version found is " + f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let # Deadline decide. exe_list = [ From b9703f3fda15a9999edba3ce4be1bae43f74913a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 11:43:48 +0200 Subject: [PATCH 0178/2550] :bug: fix inverted condition --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 2 +- .../deadline/repository/custom/plugins/OpenPype/OpenPype.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 793ee782f4..e0fd22e218 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -23,7 +23,7 @@ def get_openpype_version_from_path(path, build=True): if not os.path.isfile(version_file): return None # skip if the version is not build - if not build and \ + if build and \ (not os.path.isfile(os.path.join(path, "openpype_console")) or not os.path.isfile(os.path.join(path, "openpype_console.exe"))): return None diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 79101bb90c..3eba347770 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -74,7 +74,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): if not os.path.isfile(version_file): return None # skip if the version is not build - if not build and \ + if build and \ (not os.path.isfile(os.path.join(path, "openpype_console")) or not os.path.isfile(os.path.join(path, "openpype_console.exe"))): # noqa: E501 return None From b65a360ca6415269fcd90a0ab1385be87ad8bb0b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 12:25:08 +0200 Subject: [PATCH 0179/2550] fix types in default settings --- openpype/settings/defaults/project_settings/maya.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index b98506f6a8..d52dd407f2 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -42,14 +42,14 @@ "multilayer_exr": true, "tiled": true, "aov_list": [], - "additional_options": {} + "additional_options": [] }, "vray_renderer": { "image_prefix": "maya///", "engine": "1", "image_format": "png", "aov_list": [], - "additional_options": {} + "additional_options": [] }, "redshift_renderer": { "image_prefix": "maya///", @@ -59,7 +59,7 @@ "multilayer_exr": true, "force_combine": true, "aov_list": [], - "additional_options": {} + "additional_options": [] } }, "create": { From a32ca255f6edd3c1c3f0b47c212a035e6b169792 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 12:25:30 +0200 Subject: [PATCH 0180/2550] resave settings to match formattings --- .../defaults/project_settings/maya.json | 31 +++++++++---------- .../project_settings/traypublisher.json | 8 +++-- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index d52dd407f2..ac0f161cf2 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -99,6 +99,20 @@ "enabled": true, "publish_mip_map": true }, + "CreateAnimation": { + "enabled": true, + "write_color_sets": false, + "defaults": [ + "Main" + ] + }, + "CreatePointCache": { + "enabled": true, + "write_color_sets": false, + "defaults": [ + "Main" + ] + }, "CreateMultiverseUsd": { "enabled": true, "defaults": [ @@ -117,14 +131,6 @@ "Main" ] }, - "CreateAnimation": { - "enabled": true, - "write_color_sets": false, - "defaults": [ - "Main" - ] - - }, "CreateAss": { "enabled": true, "defaults": [ @@ -163,13 +169,6 @@ "Sculpt" ] }, - "CreatePointCache": { - "enabled": true, - "write_color_sets": false, - "defaults": [ - "Main" - ] - }, "CreateRenderSetup": { "enabled": true, "defaults": [ @@ -977,4 +976,4 @@ "ValidateNoAnimation": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 2cb7d358ed..5db2a79772 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -294,8 +294,12 @@ } }, "BatchMovieCreator": { - "default_variants": ["Main"], - "default_tasks": ["Compositing"], + "default_variants": [ + "Main" + ], + "default_tasks": [ + "Compositing" + ], "extensions": [ ".mov" ] From 03c648c8fd897ab374752eea1175f6c67b281afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 13:08:51 +0200 Subject: [PATCH 0181/2550] :bug: fix executable detection on platforms --- .../custom/plugins/GlobalJobPreLoad.py | 24 ++++++++++++++----- .../custom/plugins/OpenPype/OpenPype.py | 17 ++++++++++--- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index e0fd22e218..2972eeec40 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -19,14 +19,24 @@ def get_openpype_version_from_path(path, build=True): str or None: version of OpenPype if found. """ + # fix path for application bundle on macos + if platform.system().lower() == "darwin": + path = os.path.join(path, "Contents", "MacOS", "lib", "Python") + version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build - if build and \ - (not os.path.isfile(os.path.join(path, "openpype_console")) or - not os.path.isfile(os.path.join(path, "openpype_console.exe"))): + exe = os.path.join(path, "openpype_console.exe") + if platform.system().lower() in ["linux", "darwin"]: + exe = os.path.join(path, "openpype_console") + + # if only builds are requested + if build and not os.path.isfile(exe): # noqa: E501 + print(f" ! path is not a build: {path}") return None + version = {} with open(version_file, "r") as vf: exec(vf.read(), version) @@ -64,6 +74,7 @@ def inject_openpype_environment(deadlinePlugin): f"version {requested_version}")) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) if install_dir: + print(f"Looking for OpenPype at: {install_dir}") sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -79,6 +90,7 @@ def inject_openpype_environment(deadlinePlugin): if openpype_versions: # if looking for requested compatible version, # add the implicitly specified to the list too. + print(f"Looking for OpenPype at: {os.path.dirname(exe)}") version = get_openpype_version_from_path( os.path.dirname(exe)) if version: @@ -89,8 +101,8 @@ def inject_openpype_environment(deadlinePlugin): # sort detected versions if openpype_versions: openpype_versions.sort(key=lambda ver: ver[0]) - print(("Latest available version found is " - f"{openpype_versions[-1][0]}")) + print(("Latest available version found is " + f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -166,7 +178,7 @@ def inject_openpype_environment(deadlinePlugin): env["OPENPYPE_HEADLESS_MODE"] = "1" env["AVALON_TIMEOUT"] = "5000" - print(">>> Executing: {}".format(args)) + print(">>> Executing: {}".format(" ".join(args))) std_output = subprocess.check_output(args, cwd=os.path.dirname(exe), env=env) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 3eba347770..aa3ddc7088 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -13,6 +13,7 @@ from Deadline.Scripting import ( import re import os +import platform ###################################################################### @@ -70,14 +71,24 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): str or None: version of OpenPype if found. """ + # fix path for application bundle on macos + if platform.system().lower() == "darwin": + path = os.path.join(path, "Contents", "MacOS", "lib", "Python") + version_file = os.path.join(path, "openpype", "version.py") if not os.path.isfile(version_file): return None + # skip if the version is not build - if build and \ - (not os.path.isfile(os.path.join(path, "openpype_console")) or - not os.path.isfile(os.path.join(path, "openpype_console.exe"))): # noqa: E501 + exe = os.path.join(path, "openpype_console.exe") + if platform.system().lower() in ["linux", "darwin"]: + exe = os.path.join(path, "openpype_console") + + # if only builds are requested + if build and not os.path.isfile(exe): # noqa: E501 + print(f" ! path is not a build: {path}") return None + version = {} with open(version_file, "r") as vf: exec(vf.read(), version) From 53877ebe96114f3a38e428c502d05ce72ec4dc46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 13:25:56 +0200 Subject: [PATCH 0182/2550] :rotating_light: unify output messages --- .../repository/custom/plugins/GlobalJobPreLoad.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 2972eeec40..b8a31e01ff 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -70,11 +70,11 @@ def inject_openpype_environment(deadlinePlugin): # lets go over all available and find compatible build. requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print(("Scanning for compatible requested " + print((">>> Scanning for compatible requested " f"version {requested_version}")) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) if install_dir: - print(f"Looking for OpenPype at: {install_dir}") + print(f"--- Looking for OpenPype at: {install_dir}") sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -83,7 +83,7 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path(subdir) if not version: continue - print(f" - found: {version} - {subdir}") + print(f" - found: {version} - {subdir}") openpype_versions.append((version, subdir)) exe = FileUtils.SearchFileList(exe_list) @@ -94,14 +94,14 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path( os.path.dirname(exe)) if version: - print(f" - found: {version} - {os.path.dirname(exe)}") + print(f" - found: {version} - {os.path.dirname(exe)}") openpype_versions.append((version, os.path.dirname(exe))) if requested_version: # sort detected versions if openpype_versions: openpype_versions.sort(key=lambda ver: ver[0]) - print(("Latest available version found is " + print(("*** Latest available version found is " f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] @@ -118,7 +118,7 @@ def inject_openpype_environment(deadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort(key=lambda ver: ver[0]) - print(("Latest compatible version found is " + print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let # Deadline decide. From 97f0d581b8c390711d9c7665133824d06eb2dfd2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 14:23:11 +0200 Subject: [PATCH 0183/2550] Added reference keys docs --- website/docs/admin_settings_project_anatomy.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/website/docs/admin_settings_project_anatomy.md b/website/docs/admin_settings_project_anatomy.md index 106faeb806..dbc4d255af 100644 --- a/website/docs/admin_settings_project_anatomy.md +++ b/website/docs/admin_settings_project_anatomy.md @@ -100,6 +100,11 @@ We have a few required anatomy templates for OpenPype to work properly, however +### Anatomy reference keys + +Anatomy templates have ability to use "referenced keys". Best example is `path` in publish or work templates which are just referencing to `folder` and `file` (`{@folder}/{@file}`) so any changes in folder or file template are propagated to the path template. The other advantage is to simplify version and frame formatting with paddings. In default templates you can notice that keys `{@version}` or `{@frame}` are used in templates. They are referencing to `Anatomy` -> `Templates` -> `Version` or `Frame` which handle version and frame formatting with padding. + +So if you set `project_anatomy/templates/defaults/version_padding` to `5` the `{@version}` key will be transformed to `v{version:0>5}` and version number in paths will have 5 numbers -> `v00001`. ## Attributes From d43a5388f746a4b4775a023e7356a9505a6c4dfd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 14:23:22 +0200 Subject: [PATCH 0184/2550] added optional keys --- website/docs/admin_settings_project_anatomy.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/website/docs/admin_settings_project_anatomy.md b/website/docs/admin_settings_project_anatomy.md index dbc4d255af..d11d651103 100644 --- a/website/docs/admin_settings_project_anatomy.md +++ b/website/docs/admin_settings_project_anatomy.md @@ -106,6 +106,12 @@ Anatomy templates have ability to use "referenced keys". Best example is `path` So if you set `project_anatomy/templates/defaults/version_padding` to `5` the `{@version}` key will be transformed to `v{version:0>5}` and version number in paths will have 5 numbers -> `v00001`. +### Optional keys + +In some cases are not all keys available and should be just ignored. For example `{frame}` should be available only for sequences but we have single publish template. To handle these cases it is possible to use optional marks which will ignore segment of template if can't be filled because of missing keys. To mark these segments use `<` and `>`. +. +Template `{project[code]}_{asset}_{subset}<_{output}><.{@frame}>.{ext}` can handle all 4 possible situations when `output` and `frame` keys are available or not. The optional segments can contain additional text, like in the example dot (`.`) for frame and underscore (`_`) for output, those are also ignored if the keys are not available. Optional segments without formatting keys are kept untouched: `
    ` -> stays as `
    `. It is possible to nest optional segments inside optional segments `<{asset}<.{@frame}>
    >` which may result in empty string if `asset` key is not available. + ## Attributes From bfaac0c688cb9a1cff522783ac668610df1d6926 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 14:23:42 +0200 Subject: [PATCH 0185/2550] added some basic information about attributes --- website/docs/admin_settings_project_anatomy.md | 11 +++++++++++ .../docs/assets/settings/anatomy_attributes.png | Bin 0 -> 14753 bytes 2 files changed, 11 insertions(+) create mode 100644 website/docs/assets/settings/anatomy_attributes.png diff --git a/website/docs/admin_settings_project_anatomy.md b/website/docs/admin_settings_project_anatomy.md index d11d651103..ddce934f55 100644 --- a/website/docs/admin_settings_project_anatomy.md +++ b/website/docs/admin_settings_project_anatomy.md @@ -114,6 +114,17 @@ Template `{project[code]}_{asset}_{subset}<_{output}><.{@frame}>.{ext}` can hand ## Attributes +Project attributes are used as default values for new assets under project, except `Applications` and `Active project` which are project specific. Values of attributes that are **not** project specific are always used from assets. So if `tools` are not loading as expected it is because the asset have different value. + +![anatomy_attributes](assets/settings/anatomy_attributes.png) + +**Most of attributes don't need detailed explanation.** + +| Attribute | Description | +| --- | --- | +| `Applications` | List of applications that can be used in the project. At the moment only possible filter of applications. | +| `Tools` | List of application tools. This value can be overridden per asset. | +| `Active project` | Project won't be visible in tools if enabled.
    - To revert check `Show Inactive projects` checkbox in project settings. | ## Task Types diff --git a/website/docs/assets/settings/anatomy_attributes.png b/website/docs/assets/settings/anatomy_attributes.png new file mode 100644 index 0000000000000000000000000000000000000000..777b1c36acc734afdf43c22a9d250ba313094b23 GIT binary patch literal 14753 zcmeHud012Ty6&>JqIJNC3dj%(Ds}@R1qB3>tuoj;&^jIT3K1^x4!TF z-tYb1mHhn~`^{hO`VxYm&EKA|ISWCb@gZnU=lXSE%Lr!dJMiC{h_m*`pyGDTaq!3I zen(Fqg`iU6Myb~q;O`BgCtM;RXbV;GZw)=R{`Xm;15qP%V34nvRqU;Yr^q9gMZEx;SU zH1fo0%YqlgMn+u>a3SHp7Udl|3X@mmzc?TrY21Kr<2NpuvB^gAuQ6h?bHN?A7@~q% z27gI)JKtn@Ed-s?3lem{P8L~`GS|o%I%Okxd3XVWA{2(9E$`My)YC$@xy_W8sSiUqY+$v0KiFrET?0suK^|(AFMVOP? zQ@jy^HfALwpC%v)<7&-4Wm(rlnfl(b+iUT7Bk6GNF&NcjP?=LV=fEaoVJJ}mtPlU~ zF`RiJ!j|R`ri@P;EL2wMsj-uDzslqCZYdwYe|<^b)hx$C(EI6(%J{2eqoyg!qGJ_z zeNC@hyXT!noi)jO0`Kq<1|ipf6i-Fl(};}Uowxz5AB8s3m&R%6-*l{(kzbBWo;Ht! z`MzHR{eHsD`Vb>abaNf%;g{TuJqg?t(H=_q;2;cjbMci9B3~SO*6oZ+=7K3x1A;=T z!19sh@Jh!Y+-+u2vab&Kt3R#0;D!DbMw&9SLOIa%S}ywLCU>0dZFU$^g%Ya19hi8+ z(B=ys@MfkjxYN}q)s}j;-IXr7=u)cBM4!fpD>BEEQbe^4pBLbimFO}7oKq`Av%9Jf zQ2O*dEh&wS84{)n2u>dmoAJ}&s)FbB_qv%d@hWmYHFz0kar!>#1zcn+`E_cNpRKGI+hz5lMV z>n}QaSuqTw#3G&dPPY;|tldGvgk%uNIj$bc*Hh5_j)jEStA{Je878weN?$?={jDne zA9B)(9vPDrCurwX{Md#M*Fesx6auS0H13(jB0Z+}&1hSVXNry0p?RF^=g`kZf(Z!M31zDT4@2-s%^nw#d)nhb+_evsNXc9$4 zq(t1W+{V`(PDh(rr9P||?zT!RXLV@b7``N#S>{%_@ROxZ3H^OK?rX+lS!d1YmAHpWWsmO zl~1;z4e}c%DZ9!SriEt9QTpesuUziNtIXUZBe#&`BD83(s~KTPx@m-3-j0y(7uJSs z0yv#4`r=BPqiW(c%T$rfWun@_n$xx1(szczD0n$^i@V6b*K56qE%|Nu+&+$9b?{{h zfuvJCVw|QfHouN)#zRYB8VicX4|M}iYRC9|5=h@QsmWznx#}%#n660$O3PKF}*FhzLm)x}MiWxdE z)EgU&_mejd{Wxg9Zsl%XX;>~3{ieQAz1S^T`Q*$RO?^LhogXu4a-`5jd;^3x^H|${ z(uI|;P7p+U5|T{bN7zZP$SLZx-{B5pBlDQ(UD|GSNuN$&aBmIL|G`Q6t~ijF^*7@O zuYLwYeshjS{7iIl!<%7_pdJQq3ee0UPfA#WLi1YU+z?iHem~h_7o61+$ta3MUZGw# zxx;u8V}l9O!rq-?m>p(A#kgpwx z?-j|*B(Mf*VC{LV*Nd)uIQ}lI+HLDB;fXzh>CmgA?u#$M&y>07APzjVC_lXa$dsKk z@4-!?ja6S9q8TckC_0r-FL>`tK2`r7;g59J+VVe+SH=t!`44>Ij?Fw|&9Y zJ^(?F%}&ys&J-0Vy=-a-EW7}KMPtyZMO`$s_6z7m;8?O~fBWzB11$n@yHAqKXEWYH zkbxbF7UEfL)vGBPu?!fU8*?Yy0;G#W8h!@7?PWNEN3!f3Y^9yOf?^7!-KocL3>&jf zlo&P24(~*n`F&uC35|k}g|nQ?)cm@BEGm1Ds38LB{ZfbaTc621PF!dw(Rf?bT ztKd?sP&PmCOhg^*k(p6t)^z}@P?hqBH!8GjK}9C zt?GO}m-6UHZX)B!Oih%$LAW10&oj4t?{@q}QExhIG256_b)JPT1vZkq7~Ffj(aIjX zj}o26L{Hro4OkqZCG8bLj*7%w*jI4@4nv-#5no9zj<@_BuGBCa&bL1+keK#9&*W;R z-D6DD(J{2Dj!rfB0}IYbtF~N7^fvw{2)aL9U#QuhqXl*zTHQGmdDn%55<2$0B@#zK`rnD`A>!geS)y z$!!cZ+;m`gKO6x;w;kQrHkMWlTCPRGKxHoR0m}>-s4LjjNd*yj@gM(bvqSGh&2$Mu zw0l$ZVj1chxRFcuP{uXbBI!tW4dP^StSfW4?|{6o%aRwAhyo(xD7sg%6h#6$$EWVO(_iAV~Kf95CBfw`jx49~r@yc33eq{esPv088EZlV%MmIK*} z04+Uo$}L&6Wf+Dxv%)Ny58dk2@N2YZ*P-Qc^wikYX|9)i@rwLx7$z=nNY-L2==Gbwb&{r<7m&R3pOtTfGJ*e1wDw$)BH zG1k?{jVOxvYmDrNnJAVsOL5*!;JhDtY0CLAYW$_x;#A+s7Sdp;$GWIm3SzUs@p(*> zcz=C>un*fN3)p@YzGrTq^%dj#fS1dis_Y~k8rf`dX|8L7cUll=Qe#z;MY?a1DlF%$ zvsVrjjBlJ^0{X%S;8DSQq(5WXoW;1Z4y}obq~XP4rfF++JzX7F`7< zkPdSKTKt}3$5KUKUyA>@6-JV|TikjO3m)0qC+ef+ZxqGEFIhoM%I8*yiL~Ne5<%2@ z=*{SHa1HPKPF*cT=uoJEE&ho=U`iH7gZc4H0+DlQOFKjC`{3+y);)IAK3k07h{X(! zAKwXsGa4Gj`-U`l^I;+>-RCrh(HFadh3*PCh@hEKS7?V4c*wJlxA8So`gJMaH!r=6 zdCBk{kTl!V)8Vv=3l2L6v>_9U)v=VI|F!^*0D`}Nf@8q|hUZL73 zx290?hh|%=bA$EOs%OZ-KJYydvy_`K* zTM8W8FT)KWXkYeT&d9bblYr~LP=jv>@?1m!O2(x(1H3}V?UrWOK%vh$yF_V4!%TJ2 zEsy~JwDI}-&1H@qK}mv_N?s-9naf{-8`)VXz4dxJU=+@36}Wsy>zbQsj$WeYG2=o^ zq+XW|CG__B#~nz#}1!Hjk7lx*}v6z{OgcIHrh}rkQwcPtZka zWoLi|bEYSYG{4)qCEbrFkJbqVJT@2JxL$bcCsly# zT`+KXs)!ObvkVUZ@y>0=s17B1iYB>VpR=#I9$d)%+XR$vQcg129uqwHlLG5pKJ^cH zgT$lOtgKiQaH5u{pAwMR(#<;4bz9`ms7L{&?#|`2ZUPuzuWA2>bJX8wQbZiCWIU|L z*+YWn*aB>v+{a;k_h2jX5=hXCVK!2?ZxzvR0+Tx)QFIM27Q%xn;@IKUfUBJx%A3*S zJjhyEo??Q0v5rSwC4MQfW2?oMTy&}0DSSjH?@5s7(8dBMvohsv5L7;%A~K)%^uL{Z zr2L1-e!)uEXQfRelkQag~X*uTqzllw~Y?e-4_7og-FN)?Cn?I9t@Y$OO`e>-zm?6p8@T zqfxE$Rs!nl&k(}UHPFEZ7adCY)hO#t2WBq273pxE^!HI*+W~#w4{*lBfo30OhlqAj zw0jPxb8c4MSO;A+7VF>ugq4oGvl$9sv?$-1`s5a9m6%RUNhI7(Y`S$+2qp{+-!dV?Y&Ijt6Fla^RX2~bfU4Ad>UMM924&JIy=xPxx(Eaw9%f zR$byNuxK@PklPg8oN_R@o#uq3kK7`K6qHYTlp^cx4)?ZHlYNCIaJ4sY;kW4t4t{); zRV}&%b)0oQdik{jo9on=1kc0dXemKIpP`XVFYk$Iis@G6+nzw8w?^AbW|;WA&*Od( z?(*Qmzturd5L2h54P?ABa;28u_##MX)04CqgY5AJJI0c|f+_^4ej>^>o2h8ZA^y2`T1*qx)c-Ul$Q`Ug{gzKHI9}}> z-tx31$1Q+!hjHi}*7EfH+NO`Y(g-|>Qqet8$p`Hhiq*K!{IzmImqUD~$iM1O`ImMg zWS1?PCriU>Gs#8;PtrP;rcKiohvyOS-U|ll7Hbq&!y$;w62aD3H}Ii3+#ayNu9k`( z0GsS~J&9o$x|ReO>`La=>c|Fp6y?>YYRGw@D3i`Pl*(Id&(~=wv5za`!i<2B^*ch* z5I;8JjI^R3Swv5?+I3!Ll!L}VAy~;Z9edXmDQy6XRa@N~c5DC9&dW}#JA2zvU%F)m z-#Aw|*{_ha#@jUbZkA@R`sXcIs+{4Zo71a{Txv-zMe9saP7A^rWWwQ-H2PjszutAW zNyFpIUgP?;hsq;gNIbDSsUZYdK zON{xB&BZ>Jigb&MS-#Zwm(OsI){BOyio)!hEP4Z~^ug-t%%F-}E6o3@#aa?t!Ry_n>LnNq zFA0m~&lTE)-@arZ%=7(b)M6!4J}G?CGFyxlg?qnPXqw{Rv2t?m!JxgIjco)Yl%5Go zImAiA+$8C^^)p3_dc;Iu`(9^mY7^E{7eOgrJWal{ze8v~bT!zBCOoI*wyPm&MnYuh zw_h0c4nxIQwx@Rq{w#S)6q!LC&^?rFq<6=%l8rLU&cn2HacYg!{ctTyoV6yh+AczF z%F&QSH;X@NY6;E4b~Z%I^OZNhUbaulPL>;BmL>{jm+U>rKsSf+&s}YEFF4}&vwb;Z zw1fI{aoc^+C2!=bb=D994d#A`pp7(zSavZlRbW%fRoqad5xBwx=a-%8sW?guZpz}qB)V3Z{uT;xnUu8kr-@A$Ygf#FX6t{!+J5xoz6?z z;u$2IKOJG(Y_HJPROA!cp&|9tBeP4yUT`_5bT}3}mz0WN(?!22LA&ii`+Nlc{F@?@AcEGRHGNvqtvPEwF0#RR z4FKHm^K0<_UKNI`ThHdl>b~*bQ{WxOY9|oMh1j}d4MrSgG&0UZc|f3tyh%bWd%Cox2IY3a-kRm=FC0Ma-z&19R8@PuP%A>fxbxXozey%WWjpORq1wh~|16 z*z?3%&$ZA`x*X-N?J$QXhu_uY>`RQX<^&wgZH_rW8%auVr&TRZ-xB-~%b1DzL3k$< z`xYVAkrCQZy$c~LTs$1S?{V^@m~jnjlG|A_=$tp ztkcwWWvc^0pTSNWjMTcDY?3;qN}Sm@cpwq53;RLeR8g;FE2vInw^b|{rP*Q(52W;A z!hSUwF1N*478C8%xfKuQV@dqurUd;ak=mOkO4W!(A$Av|a1X-}MOQXy5^$`$ zSx)qZOu8O}z|b5%DZ{pgh9pyzrp0$iuYkQk8ELgtUAYVrJ9Cgukp-GBDpamW^jFrE zM_A5Ln$QakPFk`^oVpNN8Eci77@RufdeCVXeftD=Y6JT>SXY~*5N`qjmz&+2g6{x#`(ZP>hA*A$krDBU0(g`JAY%|_As-6ZPxpo=Epw=+ z;@d>*vGHZ4G<0q#Jgo#D1Vi3h^!iCTlP3KsMHIdh;c@_seG;nvYVyYK-eko`k=!f5 z@RvL0IfySh%H)ps3|o1(OP!BE9{x- z8}E1XcI89%GuYPnkmG!>*VYO5+TUs$-BY*22DzNF=EO0!3MyDl zlchgYXOxlx$zRNT)a)-c#cjN>n`l+fTHha8UtiG7Z(w9u-)-4zTf}GcewLxLIM`o> z@8aQfO}=*xz9h?KGOw6ubeRJO{s6-uTDkic4StyPeW8x!XOwQjqGLCZJ&*uR@9S|| zLdld#YY!cj%+~XRDuU3CKoH}{3I7IG8>@e5N#vw9|5M`?9Jq@wqO%>Nr=qe)nF^H; zbu+0d!n<5CSV_4PP-s4{O<-WRQ(F5<3XHvJESzQ@v++To zWi5f&m?Ex7jQ+&2Lfk@WRZMi=O8>n&A-PUnUVnE~kJ> z$EsU7kovr5GDYO*LuV+N2>c_Py}Mkb*Gt95MtMNF%=ez)c6`3fygnPb@YOGIGOrUt z7HZkm_-+a|gdX82F)hx#kKjx)o;C7{(Hg^akxa}plrszGdj-~47$oU-98Q21v0u~s zr@evWLc2~FRc0;br-Wi}jVJ{Elj{7-CB4T?)Ez#n%PA#)5%DPO)I(gmX=HY%+X?ay zSCXRQTYnAMOz)CBBQj2_^1*D2i4I`4d_Yguk{$!ROy%pm;O$_05Ji6*gIe=|u_N2d zUx#vdBzs-3?r(*&27Y-hrLEyV^&0=+R1N=zqvxq#!@6pPZvB2TCjF@nB?=Rtwn6A9 zd2h1B1lf5{ZN6fq$2Bzc5uAcO9mC*Q3wmHbJe`yef z1ff{~$wLzQ9Fx)kpsmmBGJRFHGZ&jkzsG%T7w%MDfF#~cx4NvrT$l}p$MbpB(DO?6 zxjiN)3{FNn_>KY*b#~{KRVt`8jSMWZjoilViM`D8HS|$#??UXmtwjs4EK^?8q6FJv zmf=y0TCDosP|QOhvW;D9V!BBhxJ@gwRNoX)b9mM_AMMF2=e*zTi1kF#h@83I+TQnf zlIeEo1MZGzvvaj`buBXzgCA~UT*tzhAvV!RJelW4M9hHGTSmJhVelyF8CRhri;M{= z$Go`GAoO9;T4AU}VEALkk`y)Kc5kKdW;`qGwG0TyfnS5}@UEL)BV=NaF`;ys^EM0e z6k)MdC(mYH9%|4&1w-v$Tkk6LB%S6h9}unV1T!x~r(%DHj4sV415D(_G($=e+mLMV^16^1h;ol?lYJ`W(f;CH#^OW#@g{(Dv=WshlQ=6myz#I-6b2uykXI ztxZ4R9lCCcevRn(Wd4}&v5___2EAEn$n~>;rD}BUqQCP_BF_VR=rzwgVdwB%W82#_ zVMaSnxkH^tHfK%i$mrf}^D)CG(FUI7ak)eC9arRe6TASC#KPP@s!dvx-FQcA-4MID zd+Ij%4&%ShZ}P8_R!kwAfMeph6o$(7YQHhDKXj75nTnSlX)lh+$q(NCA?#UBx&!AB z)Bdn;65~7i1pJ_Q=8S=q?)DRq87dSWRz?Nt2RORX_>QhU@5_^r2%d4VYs{L=W1 zQXdU~!9HK>QY<}9j9uEl;Z`2fsXAyC67y3f@~_*ko^;`qQm2)K_fteZZmfoGglYF_ z6@E`_@MDY!zxG|g8O8aJa0v2+?C?s8`oyRV>;3vLxae>H;FZU8aB4lu@TTw2_!36~ z!?~@IPCqqI*QkFStGuE%PLav&oS?R^+)u8$&$7Uf>Q;q<1_e-ZMe-HU*=-D11s%wAjmIQQNsW zw-u1L35H+4!uM9@!K@$u?4DPqD$zPPPla2}k4UBZT?jP-{acbM@=CX^3=CQUd;E&S zeGaT878~JmH{yqEFJ$8Kw9Xs~x5M-9yZt0nW=?X*?F)2GT3npb>&2w1BuS>cV0S-4 z+PH3+i{Vcl?K7;hzcW*TO`907PzXoHHd1%HFb(g@ob-^y5ZLqAm0mthC!P1D#eg`j zPA;FQg6nbLw9$m5^7>4EL)1`!;E1QbdbW*XYL`eL*&BK$JwzW|JB-n)pw^)4N4O#{ zucsAL{QF<)BGBy%;s@x{#INx)B&@@VIch6E;)>z98EDcG+RX}>eSu<-E)(Z?dtQ}$ZUSNw-ksa0x!WFq=;)R+g1wc~YK{{s6N z#v0gV2J!9T^5%wUJ1Mh`A64kB7+H5tPCos14ZI-=#=14|#{99jXlFVG*$&#&2R@Ct zk0a{!IfaZK<22>XBSA4KjMt~))Q9u=Q&nBvLmgk`Dc+@gDVqj@3T&VFE9(e zwo;!5v$_7#w!Mn$8!7SF%K>g_)zwJBvH$#-=e^>4JWh)URE%9Nfu=BRHDWqS|9JQo zhgTn(6@A6C{r8cVEvzCZ_C&b#D`srtTKhOoz?G3UJ-j;e7Q75ro5HBcddACFXKbw+ zeB<|3My&M#*>E4UV$z#~m0vQK7fg;GSAt&f-B^@e?Sjw2nEM}0?|<+8Sb0w+FRMam zbQApk<;I!yP(prHX@#3?I#1NYS{-O{<&lOt8knD!slZHs%4l6&uw)OP%vFwFeA765 z=Z~=#6P%vi5h-c8VaOTzN3v~xzN$Jmmn(UhmCILFPvJed zGOrY429BAnO%|ECI)&1wu_g=H5kUkq{_d1hY;4^Cc}3w$B;fN$b!5IZLqMR&9to-w zlJp)T{H-jw({vNV&Og?kb>#tp1*U8i;B1eYHuJBq2$4^rzgtZNeEww}ge5v}3T%&R z7sXfK-Iv?YvGUFI$)|W3S2Z_Fa!aIQsKQSHhKuPg&%`dp(*6;yCupQAHSdGz(M!a0 z=w2Fw-u1rGCA*XetcGAc8H@{eK0|Ed>$%FGEs5MNySDg=o9{$4KQik<^v`#%B-Z`z z3)nTl>$mDmTnVyOH^nBPJN(bl^9OGIcOe*umt1wWuDXX+comsmIyms^Lth`WP2p;O z;sT#aeA%AFM&%;1SGE9nM!|F;+uh-X zJqTzhBNfE?|L8ON^PMR0-o@mU#FG6EzeKv3Eg!*&81i{PHcvL%IKJ>6kKZPnmMuB@qvf0zd)aJKh||TR_dBWmvl?gKHhF2lOuWLX{zMlC?9D)GlT~T` z7Pct9xXe6b|rm1W%xr=3FOMn%aBAalhvOGeR=4Xef%l73H{Iw^n9OyO5 zl}_H{NeL9mZ^WoZ-opL0H5WUQS;y|A2o8kFhXf!jhPT;cHjVtymtivJzc6;)>Z(%o zw%@~=ts?Tg_L#N1*u~@?Jv_C3XxEc@8DyF#x0U|6EJ`XoihxfF@Cg6 zn|@_*&12&SuZw;iT}S*zvXXmSW2UQ{5B)N=e3+-d^x|C<+@yhL6#z2m4CPUpD0;u@ z)5|(V>H=}tFbsKw_t9#+gWA9F!Bw}sBc4HadczE{rwNDYr+AhfRb3F2gE_g2Z+tbv zgpnU)(ssh^mv~H|FWP>$S(_%mUvgS_5;s#ve87zHlp=)wvbX0xpusPB5+9|CCg9Hx zSHW*wl@H8{OWS_aHgt&V&gq#!_;t0yNER`1q>;%2`ob{XfLAvdh%Xi3C&9}(gWfMB zDMcVkD(=EicG13u@mPy*H?f+-?^HMB6ln(;Ha(cHe!j0bowPvo7 Date: Thu, 4 Aug 2022 14:24:01 +0200 Subject: [PATCH 0186/2550] at least a little bit enhanced task types --- website/docs/admin_settings_project_anatomy.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/admin_settings_project_anatomy.md b/website/docs/admin_settings_project_anatomy.md index ddce934f55..361710fc9c 100644 --- a/website/docs/admin_settings_project_anatomy.md +++ b/website/docs/admin_settings_project_anatomy.md @@ -129,7 +129,7 @@ Project attributes are used as default values for new assets under project, exce ## Task Types -Current state of default Task descriptors. +Available task types on a project. Each task on an asset is referencing to a task type on project which allows access to task type attributes. At this moment only `short_name` is available (can be used in templates as `{task[short_name]}`). ![tasks](assets/settings/anatomy_tasks.png) From 097638c9e54c6fd6cd02d88d456e820c72c6a9fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 14:35:01 +0200 Subject: [PATCH 0187/2550] :recycle: natural sort versions --- .../repository/custom/plugins/GlobalJobPreLoad.py | 13 +++++++++++-- .../repository/custom/plugins/OpenPype/OpenPype.py | 12 ++++++++++-- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index b8a31e01ff..17f911a686 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -100,7 +100,12 @@ def inject_openpype_environment(deadlinePlugin): if requested_version: # sort detected versions if openpype_versions: - openpype_versions.sort(key=lambda ver: ver[0]) + # use natural sorting + openpype_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) print(("*** Latest available version found is " f"{openpype_versions[-1][0]}")) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 @@ -117,7 +122,11 @@ def inject_openpype_environment(deadlinePlugin): "in Deadline or install it to configured " "directory.").format(requested_version)) # sort compatible versions nad pick the last one - compatible_versions.sort(key=lambda ver: ver[0]) + compatible_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index aa3ddc7088..d270a1b87e 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -132,7 +132,11 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): if requested_version: # sort detected versions if openpype_versions: - openpype_versions.sort(key=lambda ver: ver[0]) + openpype_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -146,7 +150,11 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): "in Deadline or install it to configured " "directory.").format(requested_version)) # sort compatible versions nad pick the last one - compatible_versions.sort(key=lambda ver: ver[0]) + compatible_versions.sort( + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split('(\d+)', ver[0]) + ]) # create list of executables for different platform and let # Deadline decide. exe_list = [ From 7de8c3394a0aa3ed5dadb6fb78e4b217956509bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 14:38:57 +0200 Subject: [PATCH 0188/2550] :rotating_light: fix invalid sequence warning --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 4 ++-- .../deadline/repository/custom/plugins/OpenPype/OpenPype.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 17f911a686..ae5f2e5914 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -104,7 +104,7 @@ def inject_openpype_environment(deadlinePlugin): openpype_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) print(("*** Latest available version found is " f"{openpype_versions[-1][0]}")) @@ -125,7 +125,7 @@ def inject_openpype_environment(deadlinePlugin): compatible_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index d270a1b87e..00292ed5a9 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -135,7 +135,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): openpype_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] @@ -153,7 +153,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): compatible_versions.sort( key=lambda ver: [ int(t) if t.isdigit() else t.lower() - for t in re.split('(\d+)', ver[0]) + for t in re.split(r"(\d+)", ver[0]) ]) # create list of executables for different platform and let # Deadline decide. From 52eba357d6c1eeae3b2b73d13ec99140a8801a9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 14:46:24 +0200 Subject: [PATCH 0189/2550] :rotating_light: fix hound :dog: --- .../repository/custom/plugins/GlobalJobPreLoad.py | 8 ++++---- .../repository/custom/plugins/OpenPype/OpenPype.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index ae5f2e5914..172649c951 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -123,10 +123,10 @@ def inject_openpype_environment(deadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) print(("*** Latest compatible version found is " f"{compatible_versions[-1][0]}")) # create list of executables for different platform and let diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py index 00292ed5a9..6b0f69d98f 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPype/OpenPype.py @@ -151,10 +151,10 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin): "directory.").format(requested_version)) # sort compatible versions nad pick the last one compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) + key=lambda ver: [ + int(t) if t.isdigit() else t.lower() + for t in re.split(r"(\d+)", ver[0]) + ]) # create list of executables for different platform and let # Deadline decide. exe_list = [ From bfa906eb62043decb0c55549fbc678575384c052 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:35:09 +0200 Subject: [PATCH 0190/2550] OP-3698 - added profile to Webpublisher settings for timeouts Currently applicable only to PS --- .../project_settings/webpublisher.json | 9 ++++++ .../schema_project_webpublisher.json | 32 +++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index 77168c25e6..cba472514e 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -1,4 +1,13 @@ { + "timeout_profiles": [ + { + "hosts": [ + "photoshop" + ], + "task_types": [], + "timeout": 600 + } + ], "publish": { "CollectPublishedFiles": { "task_type_to_family": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index b76a0fa844..2ef7a05b21 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -5,6 +5,38 @@ "label": "Web Publisher", "is_file": true, "children": [ + { + "type": "list", + "collapsible": true, + "use_label_wrap": true, + "key": "timeout_profiles", + "label": "Timeout profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "hosts", + "label": "Host names", + "type": "hosts-enum", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum", + "multiselection": true + }, + { + "type": "separator" + }, + { + "type": "number", + "key": "timeout", + "label": "Timeout (sec)" + } + ] + } + }, { "type": "dict", "collapsible": true, From c05f893333aed9a3a1638a097b15d682b886bb3d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:36:16 +0200 Subject: [PATCH 0191/2550] OP-3698 - implemented timout or Webpublisher's PS processing --- openpype/lib/remote_publish.py | 29 +++++++++++++++++++++-------- openpype/pype_commands.py | 21 +++++++++++++++++++-- 2 files changed, 40 insertions(+), 10 deletions(-) diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index 38c6b07c5b..9409b72e39 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -1,4 +1,5 @@ import os +import sys from datetime import datetime import collections @@ -9,6 +10,8 @@ import pyblish.api from openpype.client.mongo import OpenPypeMongoConnection from openpype.lib.plugin_tools import parse_json +from openpype.lib.profiles_filtering import filter_profiles +from openpype.api import get_project_settings ERROR_STATUS = "error" IN_PROGRESS_STATUS = "in_progress" @@ -175,14 +178,8 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): ) -def fail_batch(_id, batches_in_progress, dbcon): - """Set current batch as failed as there are some stuck batches.""" - running_batches = [str(batch["_id"]) - for batch in batches_in_progress - if batch["_id"] != _id] - msg = "There are still running batches {}\n". \ - format("\n".join(running_batches)) - msg += "Ask admin to check them and reprocess current batch" +def fail_batch(_id, dbcon, msg): + """Set current batch as failed as there is some problem.""" dbcon.update_one( {"_id": _id}, {"$set": @@ -259,3 +256,19 @@ def get_task_data(batch_dir): "Cannot parse batch meta in {} folder".format(task_data)) return task_data + + +def get_timeout(project_name, host_name, task_type): + """Returns timeout(seconds) from Setting profile.""" + filter_data = { + "task_types": task_type, + "hosts": host_name + } + timeout_profiles = (get_project_settings(project_name)["webpublisher"] + ["timeout_profiles"]) + matching_item = filter_profiles(timeout_profiles, filter_data) + timeout = sys.maxsize + if matching_item: + timeout = matching_item["timeout"] + + return timeout diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 124eacbe39..0e217ad8a1 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -15,6 +15,7 @@ from openpype.lib.remote_publish import ( fail_batch, find_variant_key, get_task_data, + get_timeout, IN_PROGRESS_STATUS ) @@ -222,10 +223,17 @@ class PypeCommands: batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) if len(batches_in_progress) > 1: - fail_batch(_id, batches_in_progress, dbcon) + running_batches = [str(batch["_id"]) + for batch in batches_in_progress + if batch["_id"] != _id] + msg = "There are still running batches {}\n". \ + format("\n".join(running_batches)) + msg += "Ask admin to check them and reprocess current batch" + fail_batch(_id, dbcon, msg) print("Another batch running, probably stuck, ask admin for help") - asset, task_name, _ = get_batch_asset_task_info(task_data["context"]) + asset, task_name, task_type = get_batch_asset_task_info( + task_data["context"]) application_manager = ApplicationManager() found_variant_key = find_variant_key(application_manager, host_name) @@ -269,8 +277,17 @@ class PypeCommands: launched_app = application_manager.launch(app_name, **data) + timeout = get_timeout(project, host_name, task_type) + + time_start = time.time() while launched_app.poll() is None: time.sleep(0.5) + if time.time() - time_start > timeout: + launched_app.terminate() + msg = "Timeout reached" + fail_batch(_id, dbcon, msg) + raise ValueError("Timeout reached") + @staticmethod def remotepublish(project, batch_path, user_email, targets=None): From e48eea04e6785a5ca96627bd32d60d5b2f3dbf90 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:38:10 +0200 Subject: [PATCH 0192/2550] OP-3698 - refactor - renamed variables --- openpype/pype_commands.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 0e217ad8a1..c18ca218c6 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -171,7 +171,7 @@ class PypeCommands: log.info("Publish finished.") @staticmethod - def remotepublishfromapp(project, batch_path, host_name, + def remotepublishfromapp(project_name, batch_path, host_name, user_email, targets=None): """Opens installed variant of 'host' and run remote publish there. @@ -190,8 +190,8 @@ class PypeCommands: Runs publish process as user would, in automatic fashion. Args: - project (str): project to publish (only single context is expected - per call of remotepublish + project_name (str): project to publish (only single context is + expected per call of remotepublish batch_path (str): Path batch folder. Contains subfolders with resources (workfile, another subfolder 'renders' etc.) host_name (str): 'photoshop' @@ -232,7 +232,7 @@ class PypeCommands: fail_batch(_id, dbcon, msg) print("Another batch running, probably stuck, ask admin for help") - asset, task_name, task_type = get_batch_asset_task_info( + asset_name, task_name, task_type = get_batch_asset_task_info( task_data["context"]) application_manager = ApplicationManager() @@ -241,8 +241,8 @@ class PypeCommands: # must have for proper launch of app env = get_app_environments_for_context( - project, - asset, + project_name, + asset_name, task_name, app_name ) @@ -270,14 +270,14 @@ class PypeCommands: data = { "last_workfile_path": workfile_path, "start_last_workfile": True, - "project_name": project, - "asset_name": asset, + "project_name": project_name, + "asset_name": asset_name, "task_name": task_name } launched_app = application_manager.launch(app_name, **data) - timeout = get_timeout(project, host_name, task_type) + timeout = get_timeout(project_name, host_name, task_type) time_start = time.time() while launched_app.poll() is None: From f6899fad62aa430eb1d36e18f2e170d8aba9e25e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 4 Aug 2022 15:40:47 +0200 Subject: [PATCH 0193/2550] OP-3698 - updated docstring Removed raise, already in function Added default to 1 hour --- openpype/lib/remote_publish.py | 9 ++++++--- openpype/pype_commands.py | 2 -- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index 9409b72e39..b4b05c053b 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -1,5 +1,4 @@ import os -import sys from datetime import datetime import collections @@ -179,7 +178,11 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): def fail_batch(_id, dbcon, msg): - """Set current batch as failed as there is some problem.""" + """Set current batch as failed as there is some problem. + + Raises: + ValueError + """ dbcon.update_one( {"_id": _id}, {"$set": @@ -267,7 +270,7 @@ def get_timeout(project_name, host_name, task_type): timeout_profiles = (get_project_settings(project_name)["webpublisher"] ["timeout_profiles"]) matching_item = filter_profiles(timeout_profiles, filter_data) - timeout = sys.maxsize + timeout = 3600 if matching_item: timeout = matching_item["timeout"] diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index c18ca218c6..a447aa916b 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -286,8 +286,6 @@ class PypeCommands: launched_app.terminate() msg = "Timeout reached" fail_batch(_id, dbcon, msg) - raise ValueError("Timeout reached") - @staticmethod def remotepublish(project, batch_path, user_email, targets=None): From 7f6e6649cd217997bea383bdbf1a351362717bec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 17:04:53 +0200 Subject: [PATCH 0194/2550] let ffmpeg handle scales by forcing original aspect ratio --- openpype/plugins/publish/extract_review.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 533a87acb4..fe5d34b1a1 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1390,9 +1390,11 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("height_half_pad: `{}`".format(height_half_pad)) filters.extend([ - "scale={}x{}:flags=lanczos".format( - width_scale, height_scale - ), + ( + "scale={}x{}" + ":flags=lanczos" + ":force_original_aspect_ratio=decrease" + ).format(output_width, output_height), "pad={}:{}:{}:{}:{}".format( output_width, output_height, width_half_pad, height_half_pad, From a0fed43787fab4b945ea850235dde2270d0203b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 17:07:54 +0200 Subject: [PATCH 0195/2550] don't even calculate the padded part --- openpype/plugins/publish/extract_review.py | 23 +--------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index fe5d34b1a1..7442d3aacb 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1369,35 +1369,14 @@ class ExtractReview(pyblish.api.InstancePlugin): or input_width != output_width or pixel_aspect != 1 ): - if input_res_ratio < output_res_ratio: - self.log.debug( - "Input's resolution ratio is lower then output's" - ) - width_scale = int(input_width * scale_factor_by_height) - width_half_pad = int((output_width - width_scale) / 2) - height_scale = output_height - height_half_pad = 0 - else: - self.log.debug("Input is heigher then output") - width_scale = output_width - width_half_pad = 0 - height_scale = int(input_height * scale_factor_by_width) - height_half_pad = int((output_height - height_scale) / 2) - - self.log.debug("width_scale: `{}`".format(width_scale)) - self.log.debug("width_half_pad: `{}`".format(width_half_pad)) - self.log.debug("height_scale: `{}`".format(height_scale)) - self.log.debug("height_half_pad: `{}`".format(height_half_pad)) - filters.extend([ ( "scale={}x{}" ":flags=lanczos" ":force_original_aspect_ratio=decrease" ).format(output_width, output_height), - "pad={}:{}:{}:{}:{}".format( + "pad={}:{}:(ow-iw)/2:(oh-ih)/2:{}".format( output_width, output_height, - width_half_pad, height_half_pad, overscan_color_value ), "setsar=1" From b7c377e42288f0c7cdab55dd5d0ce6ac6e46499d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 18:07:01 +0200 Subject: [PATCH 0196/2550] handle create, update and delete operations properly --- .../event_push_frame_values_to_task.py | 57 ++++++++++++++++--- 1 file changed, 48 insertions(+), 9 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 0914933de4..0895967fb1 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -380,33 +380,49 @@ class PushFrameValuesToTaskEvent(BaseEvent): uncommited_changes = False for idx, item in enumerate(changes): new_value = item["new_value"] + old_value = item["old_value"] attr_id = item["attr_id"] entity_id = item["entity_id"] attr_key = item["attr_key"] - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = attr_id - entity_key["entity_id"] = entity_id + entity_key = collections.OrderedDict(( + ("configuration_id", attr_id), + ("entity_id", entity_id) + )) self._cached_changes.append({ "attr_key": attr_key, "entity_id": entity_id, "value": new_value, "time": datetime.datetime.now() }) + old_value_is_set = ( + old_value is not ftrack_api.symbol.NOT_SET + and old_value is not None + ) if new_value is None: + if not old_value_is_set: + continue op = ftrack_api.operation.DeleteEntityOperation( "CustomAttributeValue", entity_key ) - else: + + elif old_value_is_set: op = ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", + "CustomAttributeValue", entity_key, "value", - ftrack_api.symbol.NOT_SET, + old_value, new_value ) + else: + op = ftrack_api.operation.CreateEntityOperation( + "CustomAttributeValue", + entity_key, + {"value": new_value} + ) + session.recorded_operations.push(op) self.log.info(( "Changing Custom Attribute \"{}\" to value" @@ -550,7 +566,11 @@ class PushFrameValuesToTaskEvent(BaseEvent): attr_ids = set(attr_id_to_key.keys()) current_values_by_id = self.get_current_values( - session, attr_ids, entity_ids, task_entity_ids, hier_attrs + session, + attr_ids, + entity_ids, + task_entity_ids, + hier_attrs ) changes = [] @@ -567,7 +587,12 @@ class PushFrameValuesToTaskEvent(BaseEvent): # Convert new value from string new_value = values.get(attr_key) - if new_value is not None and old_value is not None: + new_value_is_valid = ( + old_value is not ftrack_api.symbol.NOT_SET + and new_value is not None + ) + + if new_value is not None and new_value_is_valid: try: new_value = type(old_value)(new_value) except Exception: @@ -581,6 +606,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): changes.append({ "new_value": new_value, "attr_id": attr_id, + "old_value": old_value, "entity_id": entity_id, "attr_key": attr_key }) @@ -645,15 +671,28 @@ class PushFrameValuesToTaskEvent(BaseEvent): return interesting_data, changed_keys_by_object_id def get_current_values( - self, session, attr_ids, entity_ids, task_entity_ids, hier_attrs + self, + session, + attr_ids, + entity_ids, + task_entity_ids, + hier_attrs ): current_values_by_id = {} if not attr_ids or not entity_ids: return current_values_by_id + for entity_id in entity_ids: + current_values_by_id[entity_id] = {} + for attr_id in attr_ids: + current_values_by_id[entity_id][attr_id] = ( + ftrack_api.symbol.NOT_SET + ) + values = query_custom_attributes( session, attr_ids, entity_ids, True ) + for item in values: entity_id = item["entity_id"] attr_id = item["configuration_id"] From 7e2f7efa64b7b7869f97a86f065532748582770e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 18:07:07 +0200 Subject: [PATCH 0197/2550] handle new added entities --- .../event_push_frame_values_to_task.py | 181 ++++++++++++++++-- 1 file changed, 166 insertions(+), 15 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 0895967fb1..dc76920a57 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -1,10 +1,11 @@ import collections import datetime +import copy import ftrack_api from openpype_modules.ftrack.lib import ( BaseEvent, - query_custom_attributes + query_custom_attributes, ) @@ -124,10 +125,15 @@ class PushFrameValuesToTaskEvent(BaseEvent): # Separate value changes and task parent changes _entities_info = [] + added_entities = [] + added_entity_ids = set() task_parent_changes = [] for entity_info in entities_info: if entity_info["entity_type"].lower() == "task": task_parent_changes.append(entity_info) + elif entity_info.get("action") == "add": + added_entities.append(entity_info) + added_entity_ids.add(entity_info["entityId"]) else: _entities_info.append(entity_info) entities_info = _entities_info @@ -136,6 +142,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): interesting_data, changed_keys_by_object_id = self.filter_changes( session, event, entities_info, interest_attributes ) + self.interesting_data_for_added( + session, + added_entities, + interest_attributes, + interesting_data, + changed_keys_by_object_id + ) if not interesting_data and not task_parent_changes: return @@ -151,9 +164,13 @@ class PushFrameValuesToTaskEvent(BaseEvent): # - it is a complex way how to find out if interesting_data: self.process_attribute_changes( - session, object_types_by_name, - interesting_data, changed_keys_by_object_id, - interest_entity_types, interest_attributes + session, + object_types_by_name, + interesting_data, + changed_keys_by_object_id, + interest_entity_types, + interest_attributes, + added_entity_ids ) if task_parent_changes: @@ -163,8 +180,12 @@ class PushFrameValuesToTaskEvent(BaseEvent): ) def process_task_parent_change( - self, session, object_types_by_name, task_parent_changes, - interest_entity_types, interest_attributes + self, + session, + object_types_by_name, + task_parent_changes, + interest_entity_types, + interest_attributes ): """Push custom attribute values if task parent has changed. @@ -176,6 +197,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): real hierarchical value and non hierarchical custom attribute value should be set to hierarchical value. """ + # Store task ids which were created or moved under parent with entity # type defined in settings (interest_entity_types). task_ids = set() @@ -448,9 +470,14 @@ class PushFrameValuesToTaskEvent(BaseEvent): self.log.warning("Changing of values failed.", exc_info=True) def process_attribute_changes( - self, session, object_types_by_name, - interesting_data, changed_keys_by_object_id, - interest_entity_types, interest_attributes + self, + session, + object_types_by_name, + interesting_data, + changed_keys_by_object_id, + interest_entity_types, + interest_attributes, + added_entity_ids ): # Prepare task object id task_object_id = object_types_by_name["task"]["id"] @@ -538,15 +565,26 @@ class PushFrameValuesToTaskEvent(BaseEvent): parent_id_by_task_id[task_id] = task_entity["parent_id"] self.finalize_attribute_changes( - session, interesting_data, - changed_keys, attrs_by_obj_id, hier_attrs, - task_entity_ids, parent_id_by_task_id + session, + interesting_data, + changed_keys, + attrs_by_obj_id, + hier_attrs, + task_entity_ids, + parent_id_by_task_id, + added_entity_ids ) def finalize_attribute_changes( - self, session, interesting_data, - changed_keys, attrs_by_obj_id, hier_attrs, - task_entity_ids, parent_id_by_task_id + self, + session, + interesting_data, + changed_keys, + attrs_by_obj_id, + hier_attrs, + task_entity_ids, + parent_id_by_task_id, + added_entity_ids ): attr_id_to_key = {} for attr_confs in attrs_by_obj_id.values(): @@ -580,7 +618,11 @@ class PushFrameValuesToTaskEvent(BaseEvent): parent_id = entity_id values = interesting_data[parent_id] + added_entity = entity_id in added_entity_ids for attr_id, old_value in current_values.items(): + if added_entity and attr_id in hier_attrs: + continue + attr_key = attr_id_to_key.get(attr_id) if not attr_key: continue @@ -591,6 +633,8 @@ class PushFrameValuesToTaskEvent(BaseEvent): old_value is not ftrack_api.symbol.NOT_SET and new_value is not None ) + if added_entity and not new_value_is_valid: + continue if new_value is not None and new_value_is_valid: try: @@ -625,6 +669,7 @@ class PushFrameValuesToTaskEvent(BaseEvent): interesting_data = {} changed_keys_by_object_id = {} + for entity_info in entities_info: # Care only about changes if specific keys entity_changes = {} @@ -670,6 +715,100 @@ class PushFrameValuesToTaskEvent(BaseEvent): return interesting_data, changed_keys_by_object_id + def interesting_data_for_added( + self, + session, + added_entities, + interest_attributes, + interesting_data, + changed_keys_by_object_id + ): + if not added_entities or not interest_attributes: + return + + object_type_ids = set() + entity_ids = set() + all_entity_ids = set() + object_id_by_entity_id = {} + project_id = None + entity_ids_by_parent_id = collections.defaultdict(set) + for entity_info in added_entities: + object_id = entity_info["objectTypeId"] + entity_id = entity_info["entityId"] + object_type_ids.add(object_id) + entity_ids.add(entity_id) + object_id_by_entity_id[entity_id] = object_id + + for item in entity_info["parents"]: + entity_id = item["entityId"] + all_entity_ids.add(entity_id) + parent_id = item["parentId"] + if not parent_id: + project_id = entity_id + else: + entity_ids_by_parent_id[parent_id].add(entity_id) + + hier_attrs = self.get_hierarchical_configurations( + session, interest_attributes + ) + if not hier_attrs: + return + + hier_attrs_key_by_id = { + attr_conf["id"]: attr_conf["key"] + for attr_conf in hier_attrs + } + default_values_by_key = { + attr_conf["key"]: attr_conf["default"] + for attr_conf in hier_attrs + } + + values = query_custom_attributes( + session, list(hier_attrs_key_by_id.keys()), all_entity_ids, True + ) + values_per_entity_id = {} + for entity_id in all_entity_ids: + values_per_entity_id[entity_id] = {} + for attr_name in interest_attributes: + values_per_entity_id[entity_id][attr_name] = None + + for item in values: + entity_id = item["entity_id"] + key = hier_attrs_key_by_id[item["configuration_id"]] + values_per_entity_id[entity_id][key] = item["value"] + + fill_queue = collections.deque() + fill_queue.append((project_id, default_values_by_key)) + while fill_queue: + item = fill_queue.popleft() + entity_id, values_by_key = item + entity_values = values_per_entity_id[entity_id] + new_values_by_key = copy.deepcopy(values_by_key) + for key, value in values_by_key.items(): + current_value = entity_values[key] + if current_value is None: + entity_values[key] = value + else: + new_values_by_key[key] = current_value + + for child_id in entity_ids_by_parent_id[entity_id]: + fill_queue.append((child_id, new_values_by_key)) + + for entity_id in entity_ids: + entity_changes = {} + for key, value in values_per_entity_id[entity_id].items(): + if value is not None: + entity_changes[key] = value + + if not entity_changes: + continue + + interesting_data[entity_id] = entity_changes + object_id = object_id_by_entity_id[entity_id] + if object_id not in changed_keys_by_object_id: + changed_keys_by_object_id[object_id] = set() + changed_keys_by_object_id[object_id] |= set(entity_changes.keys()) + def get_current_values( self, session, @@ -738,6 +877,18 @@ class PushFrameValuesToTaskEvent(BaseEvent): output[obj_id][attr["key"]] = attr["id"] return output, hiearchical + def get_hierarchical_configurations(self, session, interest_attributes): + hier_attr_query = ( + "select id, key, object_type_id, is_hierarchical, default" + " from CustomAttributeConfiguration" + " where key in ({}) and is_hierarchical is true" + ) + if not interest_attributes: + return [] + return list(session.query(hier_attr_query.format( + self.join_query_keys(interest_attributes), + )).all()) + def register(session): PushFrameValuesToTaskEvent(session).register() From 49799c2d8871a52fb1fd8210b31a1e51fd5f3f2b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 4 Aug 2022 18:26:38 +0200 Subject: [PATCH 0198/2550] fix merge conflict --- openpype/pipeline/workfile/abstract_template_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index a1d188ea6c..5d8d79397a 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -231,7 +231,7 @@ class AbstractTemplateLoader: ignored_ids = ignored_ids or [] placeholders = self.get_placeholders() self.log.debug("Placeholders found in template: {}".format( - [placeholder.name] for placeholder in placeholders] + [placeholder.name for placeholder in placeholders] )) for placeholder in placeholders: self.log.debug("Start to processing placeholder {}".format( From 34dff12fb35b898f2c06c08b97f59a95c33063b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 4 Aug 2022 19:13:48 +0200 Subject: [PATCH 0199/2550] :bug: fix build directory on darwin --- tools/build_dependencies.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/tools/build_dependencies.py b/tools/build_dependencies.py index d3566dd289..d186ead881 100644 --- a/tools/build_dependencies.py +++ b/tools/build_dependencies.py @@ -29,6 +29,7 @@ import shutil import blessed import enlighten import time +import re term = blessed.Terminal() @@ -52,7 +53,7 @@ def _print(msg: str, type: int = 0) -> None: else: header = term.darkolivegreen3("--- ") - print("{}{}".format(header, msg)) + print(f"{header}{msg}") def count_folders(path: Path) -> int: @@ -95,16 +96,22 @@ assert site_pkg, "No venv site-packages are found." _print(f"Working with: {site_pkg}", 2) openpype_root = Path(os.path.dirname(__file__)).parent +version = {} +with open(openpype_root / "openpype" / "version.py") as fp: + exec(fp.read(), version) + +version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) +openpype_version = version_match[1] # create full path if platform.system().lower() == "darwin": build_dir = openpype_root.joinpath( "build", - "OpenPype.app", + f"OpenPype {openpype_version}.app", "Contents", "MacOS") else: - build_subdir = "exe.{}-{}".format(get_platform(), sys.version[0:3]) + build_subdir = f"exe.{get_platform()}-{sys.version[:3]}" build_dir = openpype_root / "build" / build_subdir _print(f"Using build at {build_dir}", 2) From 7d1f1bb064190873beee61c0a4eb4df598747c88 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 09:50:11 +0200 Subject: [PATCH 0200/2550] remove extensions arguments --- openpype/client/entities.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 57c38784b0..a3fcd01f80 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1216,7 +1216,6 @@ def get_representations( representation_ids=representation_ids, representation_names=representation_names, version_ids=version_ids, - extensions=extensions, context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=True, @@ -1230,7 +1229,6 @@ def get_archived_representations( representation_ids=None, representation_names=None, version_ids=None, - extensions=None, context_filters=None, names_by_version_ids=None, fields=None @@ -1247,8 +1245,6 @@ def get_archived_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - extensions (Iterable[str]): Filter by extension of main representation - file (without dot). names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. fields (Iterable[str]): Fields that should be returned. All fields are @@ -1263,7 +1259,6 @@ def get_archived_representations( representation_ids=representation_ids, representation_names=representation_names, version_ids=version_ids, - extensions=extensions, context_filters=context_filters, names_by_version_ids=names_by_version_ids, standard=False, From 08a9cb207385a0906cc56d063c19de3aa88eb51d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:08:07 +0200 Subject: [PATCH 0201/2550] fix typo --- openpype/lib/plugin_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index c94d1251fc..060db94ae0 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -57,7 +57,7 @@ def deprecated(new_destination): stacklevel=4 ) return decorated_func(*args, **kwargs) - return wrapper- + return wrapper if func is None: return _decorator From 6d2a869b2ebdb9a46545a1e650fe8c009f93fed3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:08:20 +0200 Subject: [PATCH 0202/2550] discover loader plugins can expect project name --- openpype/pipeline/load/plugins.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index 233aace035..7438b3230f 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -2,6 +2,7 @@ import os import logging from openpype.settings import get_system_settings, get_project_settings +from openpype.pipeline import legacy_io from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -151,9 +152,10 @@ class SubsetLoaderPlugin(LoaderPlugin): pass -def discover_loader_plugins(): +def discover_loader_plugins(project_name=None): plugins = discover(LoaderPlugin) - project_name = os.environ.get("AVALON_PROJECT") + if not project_name: + project_name = legacy_io.active_project() system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: From 0b24237bfe178270e062e3828e804edecfe6eb23 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:08:54 +0200 Subject: [PATCH 0203/2550] loader pass project name to discover loader plugins --- openpype/tools/loader/widgets.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 13e18b3757..48c038418a 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -434,7 +434,8 @@ class SubsetWidget(QtWidgets.QWidget): # Get all representation->loader combinations available for the # index under the cursor, so we can list the user the options. - available_loaders = discover_loader_plugins() + project_name = self.dbcon.active_project() + available_loaders = discover_loader_plugins(project_name) if self.tool_name: available_loaders = lib.remove_tool_name_from_loaders( available_loaders, self.tool_name @@ -1330,7 +1331,8 @@ class RepresentationWidget(QtWidgets.QWidget): selected_side = self._get_selected_side(point_index, rows) # Get all representation->loader combinations available for the # index under the cursor, so we can list the user the options. - available_loaders = discover_loader_plugins() + project_name = self.dbcon.active_project() + available_loaders = discover_loader_plugins(project_name) filtered_loaders = [] for loader in available_loaders: From cbfa9015b1f7a5d134a6ea436db587d8251fc324 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 10:45:35 +0200 Subject: [PATCH 0204/2550] catch failed applied settings --- openpype/pipeline/create/creator_plugins.py | 14 +++++++++++++- openpype/pipeline/load/plugins.py | 13 ++++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 4a1630d8ef..9a5d559774 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -437,12 +437,24 @@ def discover_creator_plugins(): def discover_legacy_creator_plugins(): + from openpype.lib import Logger + + log = Logger.get_logger("CreatorDiscover") + plugins = discover(LegacyCreator) project_name = os.environ.get("AVALON_PROJECT") system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: - plugin.apply_settings(project_settings, system_settings) + try: + plugin.apply_settings(project_settings, system_settings) + except Exception: + log.warning( + "Failed to apply settings to loader {}".format( + plugin.__name__ + ), + exc_info=True + ) return plugins diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index 7438b3230f..8cba8d8217 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -153,13 +153,24 @@ class SubsetLoaderPlugin(LoaderPlugin): def discover_loader_plugins(project_name=None): + from openpype.lib import Logger + + log = Logger.get_logger("LoaderDiscover") plugins = discover(LoaderPlugin) if not project_name: project_name = legacy_io.active_project() system_settings = get_system_settings() project_settings = get_project_settings(project_name) for plugin in plugins: - plugin.apply_settings(project_settings, system_settings) + try: + plugin.apply_settings(project_settings, system_settings) + except Exception: + log.warning( + "Failed to apply settings to loader {}".format( + plugin.__name__ + ), + exc_info=True + ) return plugins From e014deb411ebc4daaf031df28927b136fedaed56 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 5 Aug 2022 12:20:22 +0200 Subject: [PATCH 0205/2550] small variable name changes --- openpype/client/operations.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index dfb1d8c4dd..69d1eb2bb6 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -55,7 +55,7 @@ def new_project_document( "_id": _create_or_convert_to_mongo_id(entity_id), "name": project_name, "type": CURRENT_PROJECT_SCHEMA, - "data": data, + "entity_data": data, "config": config } @@ -290,6 +290,10 @@ class AbstractOperation(object): def to_data(self): """Convert opration to data that can be converted to json or others. + Warning: + Current state returns ObjectId objects which cannot be parsed by + json. + Returns: Dict[str, Any]: Description of operation. """ @@ -412,16 +416,16 @@ class UpdateOperation(AbstractOperation): ) def to_data(self): - fields = {} + changes = {} for key, value in self._update_data.items(): if value is REMOVED_VALUE: value = None - fields[key] = value + changes[key] = value output = super(UpdateOperation, self).to_data() output.update({ - "entity_id": str(self.entity_id), - "fields": fields + "entity_id": self.entity_id, + "changes": changes }) return output From fa7b7d67f94b7f8dca87088034204f3dc6f1a03f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 5 Aug 2022 16:29:13 +0200 Subject: [PATCH 0206/2550] :bug: fix aov separator in redshift --- openpype/hosts/maya/api/lib_renderproducts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index c145f92f91..295791576d 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -963,7 +963,7 @@ class RenderProductsRedshift(ARenderProducts): """ prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - prefix = "{}{}".format(prefix, self.aov_separator) + prefix = "{}{}".format(prefix, self.layer_data["aov_separator"]) return prefix def get_render_products(self): From 10ff3562739d260cf0ad13817c5ee2fd4a3a7636 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 5 Aug 2022 16:44:30 +0200 Subject: [PATCH 0207/2550] :recycle: refactor the fix --- openpype/hosts/maya/api/lib_renderproducts.py | 65 ++++++++++++------- 1 file changed, 42 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 295791576d..1e883ea43f 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -309,6 +309,42 @@ class ARenderProducts: return lib.get_attr_in_layer(plug, layer=self.layer) + @staticmethod + def extract_separator(file_prefix): + """Extract AOV separator character from the prefix. + + Default behavior extracts the part between + last occurrences of and + + Todo: + This code also triggers for V-Ray which overrides it explicitly + so this code will invalidly debug log it couldn't extract the + AOV separator even though it does set it in RenderProductsVray. + + Args: + file_prefix (str): File prefix with tokens. + + Returns: + str or None: prefix character if it can be extracted. + """ + layer_tokens = ["", ""] + aov_tokens = ["", ""] + + def match_last(tokens, text): + """regex match the last occurence from a list of tokens""" + pattern = "(?:.*)({})".format("|".join(tokens)) + return re.search(pattern, text, re.IGNORECASE) + + layer_match = match_last(layer_tokens, file_prefix) + aov_match = match_last(aov_tokens, file_prefix) + separator = None + if layer_match and aov_match: + matches = sorted((layer_match, aov_match), + key=lambda match: match.end(1)) + separator = file_prefix[matches[0].end(1):matches[1].start(1)] + return separator + + def _get_layer_data(self): # type: () -> LayerMetadata # ______________________________________________ @@ -317,7 +353,7 @@ class ARenderProducts: # ____________________/ _, scene_basename = os.path.split(cmds.file(q=True, loc=True)) scene_name, _ = os.path.splitext(scene_basename) - + kwargs = {} file_prefix = self.get_renderer_prefix() # If the Render Layer belongs to a Render Setup layer then the @@ -332,26 +368,8 @@ class ARenderProducts: # defaultRenderLayer renders as masterLayer layer_name = "masterLayer" - # AOV separator - default behavior extracts the part between - # last occurences of and - # todo: This code also triggers for V-Ray which overrides it explicitly - # so this code will invalidly debug log it couldn't extract the - # aov separator even though it does set it in RenderProductsVray - layer_tokens = ["", ""] - aov_tokens = ["", ""] - - def match_last(tokens, text): - """regex match the last occurence from a list of tokens""" - pattern = "(?:.*)({})".format("|".join(tokens)) - return re.search(pattern, text, re.IGNORECASE) - - layer_match = match_last(layer_tokens, file_prefix) - aov_match = match_last(aov_tokens, file_prefix) - kwargs = {} - if layer_match and aov_match: - matches = sorted((layer_match, aov_match), - key=lambda match: match.end(1)) - separator = file_prefix[matches[0].end(1):matches[1].start(1)] + separator = self.extract_separator(file_prefix) + if separator: kwargs["aov_separator"] = separator else: log.debug("Couldn't extract aov separator from " @@ -962,8 +980,9 @@ class RenderProductsRedshift(ARenderProducts): :func:`ARenderProducts.get_renderer_prefix()` """ - prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - prefix = "{}{}".format(prefix, self.layer_data["aov_separator"]) + file_prefix = super(RenderProductsRedshift, self).get_renderer_prefix() + separator = self.extract_separator(file_prefix) + prefix = "{}{}".format(file_prefix, separator or "_") return prefix def get_render_products(self): From 401a04c767eff76a8981a1371c36f2ec36fc9d9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 5 Aug 2022 17:14:10 +0200 Subject: [PATCH 0208/2550] :bug: fix missing variable and handle unset Settings value --- openpype/hosts/maya/plugins/publish/collect_render.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index d1e87c95bb..e6fc8a01e5 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -205,7 +205,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): .get('maya')\ .get('create')\ .get('CreateRender')\ - .get('default_render_image_folder') + .get('default_render_image_folder') or "" # replace relative paths with absolute. Render products are # returned as list of dictionaries. publish_meta_path = None @@ -318,7 +318,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "useReferencedAovs": render_instance.data.get( "useReferencedAovs") or render_instance.data.get( "vrayUseReferencedAovs") or False, - "aovSeparator": aov_separator + "aovSeparator": layer_render_products.layer_data.aov_separator # noqa: E501 } # Collect Deadline url if Deadline module is enabled From 5bd2d1d3c865510e7c4c8528f579ba6ca0d90f18 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 6 Aug 2022 03:45:37 +0000 Subject: [PATCH 0209/2550] [Automated] Bump version --- CHANGELOG.md | 36 +++++++++++++++--------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c9671c8b8..15a120ec2a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,35 +1,45 @@ # Changelog -## [3.12.3-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.3-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) -**🆕 New features** - -- Traypublisher: simple editorial publishing [\#3492](https://github.com/pypeclub/OpenPype/pull/3492) - **🚀 Enhancements** +- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) +- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) +- General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) - Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) - Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) - General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) **🐛 Bug fixes** +- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) +- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) +- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) +- AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) +- Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) - TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) - Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) - Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) +- General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** +- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) - General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) - General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) **Merged pull requests:** +- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) +- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) - Enable write color sets on animation publish automatically [\#3582](https://github.com/pypeclub/OpenPype/pull/3582) ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) @@ -51,7 +61,6 @@ - Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) - NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) - Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) -- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) **🐛 Bug fixes** @@ -60,7 +69,6 @@ - NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) - General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) -- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) - Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) - Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) - Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) @@ -95,20 +103,6 @@ **🚀 Enhancements** - TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) -- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) -- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) -- General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) -- General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) - -**🐛 Bug fixes** - -- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) -- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) -- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) -- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) -- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) -- General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) -- Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) diff --git a/openpype/version.py b/openpype/version.py index 636dff5930..3f1056249a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.3-nightly.2" +__version__ = "3.12.3-nightly.3" diff --git a/pyproject.toml b/pyproject.toml index 9ab2fd4513..66aca5e5e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.3-nightly.2" # OpenPype +version = "3.12.3-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From ed13f96a1222dbede0b8ea62268e2a8350d84ee6 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 19:44:43 +0800 Subject: [PATCH 0210/2550] fix the bug of failing to extract look when UDIMs format used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 0b26e922d5..bbd21cfa42 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,9 +429,14 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - if files_metadata[source]["color_space"] == "Raw": + try: + if files_metadata[source]["color_space"] == "Raw": # set color space to raw if we linearized it - color_space = "Raw" + color_space = "Raw" + except KeyError: + #set color space to Raw if the attribute of the color space is raw. + if cmds.getAttr(color_space_attr) == "Raw": + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From 13bc6cab8efca3d9038e76a7a6d7fb5e11663f57 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 20:10:04 +0800 Subject: [PATCH 0211/2550] fix the bug of failing to extract the look with the UDIMs format in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index bbd21cfa42..32724c64c1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -430,11 +430,11 @@ class ExtractLook(openpype.api.Extractor): color_space = "Raw" else: try: - if files_metadata[source]["color_space"] == "Raw": + if files_metadata[source]["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" except KeyError: - #set color space to Raw if the attribute of the color space is raw. + # set color space to Raw if the attribute of the color space is raw. if cmds.getAttr(color_space_attr) == "Raw": color_space = "Raw" # Remap file node filename to destination From 1a7164fa90be5e394ce994a07c0355937a4987c7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 20:11:07 +0800 Subject: [PATCH 0212/2550] fix the bug of failing to extract the look with the UDIMs format in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 32724c64c1..c6737c7215 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -434,7 +434,7 @@ class ExtractLook(openpype.api.Extractor): # set color space to raw if we linearized it color_space = "Raw" except KeyError: - # set color space to Raw if the attribute of the color space is raw. + # set color space to Raw if its attribute is raw. if cmds.getAttr(color_space_attr) == "Raw": color_space = "Raw" # Remap file node filename to destination From 0aeded448613e0faddbeb09a711456dada9e0826 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 8 Aug 2022 16:04:31 +0200 Subject: [PATCH 0213/2550] Add collectors for input links for Maya + Fusion --- .../fusion/plugins/publish/collect_inputs.py | 112 +++++++++ openpype/hosts/maya/api/lib_rendersetup.py | 68 ++++++ .../maya/plugins/publish/collect_inputs.py | 214 ++++++++++++++++++ 3 files changed, 394 insertions(+) create mode 100644 openpype/hosts/fusion/plugins/publish/collect_inputs.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_inputs.py diff --git a/openpype/hosts/fusion/plugins/publish/collect_inputs.py b/openpype/hosts/fusion/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..e610575e3a --- /dev/null +++ b/openpype/hosts/fusion/plugins/publish/collect_inputs.py @@ -0,0 +1,112 @@ +import pyblish.api + +from openpype.pipeline import registered_host + + +def collect_input_containers(tools): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + + # Lookup by node ids + lookup = frozenset([tool.Name for tool in tools]) + + containers = [] + host = registered_host() + for container in host.ls(): + + name = container["_tool"].Name + + # We currently assume no "groups" as containers but just single tools + # like a single "Loader" operator. As such we just check whether the + # Loader is part of the processing queue. + if name in lookup: + containers.append(container) + + return containers + + +def iter_upstream(tool): + """Yields all upstream inputs for the current tool. + + Yields: + tool: The input tools. + + """ + + def get_connected_input_tools(tool): + """Helper function that returns connected input tools for a tool.""" + inputs = [] + + # Filter only to actual types that will have sensible upstream + # connections. So we ignore just "Number" inputs as they can be + # many to iterate, slowing things down quite a bit - and in practice + # they don't have upstream connections. + VALID_INPUT_TYPES = ['Image', 'Particles', 'Mask', 'DataType3D'] + for type_ in VALID_INPUT_TYPES: + for input_ in tool.GetInputList(type_).values(): + output = input_.GetConnectedOutput() + if output: + input_tool = output.GetTool() + inputs.append(input_tool) + + return inputs + + # Initialize process queue with the node's inputs itself + queue = get_connected_input_tools(tool) + + # We keep track of which node names we have processed so far, to ensure we + # don't process the same hierarchy again. We are not pushing the tool + # itself into the set as that doesn't correctly recognize the same tool. + # Since tool names are unique in a comp in Fusion we rely on that. + collected = set(tool.Name for tool in queue) + + # Traverse upstream references for all nodes and yield them as we + # process the queue. + while queue: + upstream_tool = queue.pop() + yield upstream_tool + + # Find upstream tools that are not collected yet. + upstream_inputs = get_connected_input_tools(upstream_tool) + upstream_inputs = [t for t in upstream_inputs if + t.Name not in collected] + + queue.extend(upstream_inputs) + collected.update(tool.Name for tool in upstream_inputs) + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect source input containers used for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.2 + hosts = ["fusion"] + + def process(self, instance): + + # Get all upstream and include itself + tool = instance[0] + nodes = list(iter_upstream(tool)) + nodes.append(tool) + + # Collect containers for the given set of nodes + containers = collect_input_containers(nodes) + + inputs = [c["representation"] for c in containers] + instance.data["inputs"] = inputs + + self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/maya/api/lib_rendersetup.py b/openpype/hosts/maya/api/lib_rendersetup.py index 0fdc54a068..a00be52d8e 100644 --- a/openpype/hosts/maya/api/lib_rendersetup.py +++ b/openpype/hosts/maya/api/lib_rendersetup.py @@ -348,3 +348,71 @@ def get_attr_overrides(node_attr, layer, break return reversed(plug_overrides) + + +def get_shader_in_layer(node, layer): + """Return the assigned shader in a renderlayer without switching layers. + + This has been developed and tested for Legacy Renderlayers and *not* for + Render Setup. + + Note: This will also return the shader for any face assignments, however + it will *not* return the components they are assigned to. This could + be implemented, but since Maya's renderlayers are famous for breaking + with face assignments there has been no need for this function to + support that. + + Returns: + list: The list of assigned shaders in the given layer. + + """ + + def _get_connected_shader(shape): + """Return current shader""" + return cmds.listConnections(shape + ".instObjGroups", + source=False, + destination=True, + plugs=False, + connections=False, + type="shadingEngine") or [] + + # We check the instObjGroups (shader connection) for layer overrides. + plug = node + ".instObjGroups" + + # Ignore complex query if we're in the layer anyway (optimization) + current_layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + if layer == current_layer: + return _get_connected_shader(plug) + + connections = cmds.listConnections(plug, + plugs=True, + source=False, + destination=True, + type="renderLayer") or [] + connections = filter(lambda x: x.endswith(".outPlug"), connections) + if not connections: + # If no overrides anywhere on the shader, just get the current shader + return _get_connected_shader(plug) + + def _get_override(connections, layer): + """Return the overridden connection for that layer in connections""" + # If there's an override on that layer, return that. + for connection in connections: + if (connection.startswith(layer + ".outAdjustments") and + connection.endswith(".outPlug")): + + # This is a shader override on that layer so get the shader + # connected to .outValue of the .outAdjustment[i] + out_adjustment = connection.rsplit(".", 1)[0] + connection_attr = out_adjustment + ".outValue" + override = cmds.listConnections(connection_attr) or [] + + return override + + override_shader = _get_override(connections, layer) + if override_shader is not None: + return override_shader + else: + # Get the override for "defaultRenderLayer" (=masterLayer) + return _get_override(connections, layer="defaultRenderLayer") diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..8afa1e4757 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -0,0 +1,214 @@ +import copy +from maya import cmds +import maya.api.OpenMaya as om +import pyblish.api + +from openpype.pipeline import registered_host +from openpype.hosts.maya.api.lib import get_container_members +from openpype.hosts.maya.api.lib_rendersetup import get_shader_in_layer + + +def iter_history(nodes, + filter=om.MFn.kInvalid, + direction=om.MItDependencyGraph.kUpstream): + """Iterate unique upstream history for list of nodes. + + This acts as a replacement to maya.cmds.listHistory. + It's faster by about 2x-3x. It returns less than + maya.cmds.listHistory as it excludes the input nodes + from the output (unless an input node was history + for another input node). It also excludes duplicates. + + Args: + nodes (list): Maya node names to start search from. + filter (om.MFn.Type): Filter to only specific types. + e.g. to dag nodes using om.MFn.kDagNode + direction (om.MItDependencyGraph.Direction): Direction to traverse in. + Defaults to upstream. + + Yields: + str: Node names in upstream history. + + """ + if not nodes: + return + + sel = om.MSelectionList() + for node in nodes: + sel.add(node) + + it = om.MItDependencyGraph(sel.getDependNode(0)) # init iterator + handle = om.MObjectHandle + + traversed = set() + fn_dep = om.MFnDependencyNode() + fn_dag = om.MFnDagNode() + for i in range(sel.length()): + + start_node = sel.getDependNode(i) + start_node_hash = handle(start_node).hashCode() + if start_node_hash in traversed: + continue + + it.resetTo(start_node, + filter=filter, + direction=direction) + while not it.isDone(): + + node = it.currentNode() + node_hash = handle(node).hashCode() + + if node_hash in traversed: + it.prune() + it.next() + continue + + traversed.add(node_hash) + + if node.hasFn(om.MFn.kDagNode): + fn_dag.setObject(node) + yield fn_dag.fullPathName() + else: + fn_dep.setObject(node) + yield fn_dep.name() + + it.next() + + +def collect_input_containers(containers, nodes): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + # Assume the containers have collected their cached '_members' data + # in the collector. + return [container for container in containers + if any(node in container["_members"] for node in nodes)] + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect input source inputs for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.34 + hosts = ["maya"] + + def process(self, instance): + + # For large scenes the querying of "host.ls()" can be relatively slow + # e.g. up to a second. Many instances calling it easily slows this + # down. As such, we cache it so we trigger it only once. + # todo: Instead of hidden cache make "CollectContainers" plug-in + cache_key = "__cache_containers" + scene_containers = instance.context.data.get(cache_key, None) + if scene_containers is None: + # Query the scenes' containers if there's no cache yet + host = registered_host() + scene_containers = list(host.ls()) + for container in scene_containers: + # Embed the members into the container dictionary + container_members = set(get_container_members(container)) + container["_members"] = container_members + instance.context.data["__cache_containers"] = scene_containers + + # Collect the relevant input containers for this instance + if "renderlayer" in set(instance.data.get("families", [])): + # Special behavior for renderlayers + self.log.debug("Collecting renderlayer inputs....") + containers = self._collect_renderlayer_inputs(scene_containers, + instance) + + else: + # Basic behavior + nodes = instance[:] + + # Include any input connections of history with long names + # For optimization purposes only trace upstream from shape nodes + # looking for used dag nodes. This way having just a constraint + # on a transform is also ignored which tended to give irrelevant + # inputs for the majority of our use cases. We tend to care more + # about geometry inputs. + shapes = cmds.ls(nodes, + type=("mesh", "nurbsSurface", "nurbsCurve"), + noIntermediate=True) + if shapes: + history = list(iter_history(shapes, filter=om.MFn.kShape)) + history = cmds.ls(history, long=True) + + # Include the transforms in the collected history as shapes + # are excluded from containers + transforms = cmds.listRelatives(cmds.ls(history, shapes=True), + parent=True, + fullPath=True, + type="transform") + if transforms: + history.extend(transforms) + + if history: + nodes = list(set(nodes + history)) + + # Collect containers for the given set of nodes + containers = collect_input_containers(scene_containers, + nodes) + + inputs = [c["representation"] for c in containers] + instance.data["inputs"] = inputs + + self.log.info("Collected inputs: %s" % inputs) + + def _collect_renderlayer_inputs(self, scene_containers, instance): + """Collects inputs from nodes in renderlayer, incl. shaders + camera""" + + # Get the renderlayer + renderlayer = instance.data.get("setMembers") + + if renderlayer == "defaultRenderLayer": + # Assume all loaded containers in the scene are inputs + # for the masterlayer + return copy.deepcopy(scene_containers) + else: + # Get the members of the layer + members = cmds.editRenderLayerMembers(renderlayer, + query=True, + fullNames=True) or [] + + # In some cases invalid objects are returned from + # `editRenderLayerMembers` so we filter them out + members = cmds.ls(members, long=True) + + # Include all children + children = cmds.listRelatives(members, + allDescendents=True, + fullPath=True) or [] + members.extend(children) + + # Include assigned shaders in renderlayer + shapes = cmds.ls(members, shapes=True, long=True) + shaders = set() + for shape in shapes: + shape_shaders = get_shader_in_layer(shape, layer=renderlayer) + if not shape_shaders: + continue + shaders.update(shape_shaders) + members.extend(shaders) + + # Explicitly include the camera being rendered in renderlayer + cameras = instance.data.get("cameras") + members.extend(cameras) + + containers = collect_input_containers(scene_containers, members) + + return containers + From 38c35a87dea322e8fb81179cb40abd0549a905b7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 8 Aug 2022 22:15:50 +0800 Subject: [PATCH 0214/2550] fix AiImage colorspace and UDIMs errored out while extracting the look --- openpype/hosts/maya/plugins/publish/extract_look.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index c6737c7215..9974f97f1b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,14 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - try: - if files_metadata[source]["color_space"] == "Raw": - # set color space to raw if we linearized it - color_space = "Raw" - except KeyError: - # set color space to Raw if its attribute is raw. - if cmds.getAttr(color_space_attr) == "Raw": - color_space = "Raw" + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From d92c6eac115d0e857738c5944abe11fc2c840f1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 8 Aug 2022 16:58:33 +0200 Subject: [PATCH 0215/2550] Remove blank line --- openpype/hosts/maya/plugins/publish/collect_inputs.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py index 8afa1e4757..d34f289e05 100644 --- a/openpype/hosts/maya/plugins/publish/collect_inputs.py +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -211,4 +211,3 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): containers = collect_input_containers(scene_containers, members) return containers - From 4721a683094acfc2fd709d44c83d3907d91a2aa2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 8 Aug 2022 17:00:17 +0200 Subject: [PATCH 0216/2550] Shush the hound - code is correct --- openpype/hosts/maya/plugins/publish/collect_inputs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py index d34f289e05..43941bde4f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_inputs.py +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -60,7 +60,7 @@ def iter_history(nodes, if node_hash in traversed: it.prune() - it.next() + it.next() # noqa: B305 continue traversed.add(node_hash) @@ -72,7 +72,7 @@ def iter_history(nodes, fn_dep.setObject(node) yield fn_dep.name() - it.next() + it.next() # noqa: B305 def collect_input_containers(containers, nodes): From 13302ca23e804ab476e1822657b91c8369bd9cb9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:29:02 +0200 Subject: [PATCH 0217/2550] mix audio using side file for filters --- .../publish/extract_otio_audio_tracks.py | 98 ++++++++++++------- 1 file changed, 62 insertions(+), 36 deletions(-) diff --git a/openpype/plugins/publish/extract_otio_audio_tracks.py b/openpype/plugins/publish/extract_otio_audio_tracks.py index 00c1748cdc..ed30a2f0f5 100644 --- a/openpype/plugins/publish/extract_otio_audio_tracks.py +++ b/openpype/plugins/publish/extract_otio_audio_tracks.py @@ -57,15 +57,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): audio_inputs.insert(0, empty) # create cmd - cmd = path_to_subprocess_arg(self.ffmpeg_path) + " " - cmd += self.create_cmd(audio_inputs) - cmd += path_to_subprocess_arg(audio_temp_fpath) - - # run subprocess - self.log.debug("Executing: {}".format(cmd)) - openpype.api.run_subprocess( - cmd, shell=True, logger=self.log - ) + self.mix_audio(audio_inputs, audio_temp_fpath) # remove empty os.remove(empty["mediaPath"]) @@ -245,46 +237,80 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): "durationSec": max_duration_sec } - def create_cmd(self, inputs): + def mix_audio(self, audio_inputs, audio_temp_fpath): """Creating multiple input cmd string Args: - inputs (list): list of input dicts. Order mater. + audio_inputs (list): list of input dicts. Order mater. Returns: str: the command body - """ + + longest_input = 0 + for audio_input in audio_inputs: + audio_len = audio_input["durationSec"] + if audio_len > longest_input: + longest_input = audio_len + # create cmd segments - _inputs = "" - _filters = "-filter_complex \"" - _channels = "" - for index, input in enumerate(inputs): - input_format = input.copy() - input_format.update({"i": index}) - input_format["mediaPath"] = path_to_subprocess_arg( - input_format["mediaPath"] + input_args = [] + filters = [] + tag_names = [] + for index, audio_input in enumerate(audio_inputs): + input_args.extend([ + "-ss", str(audio_input["startSec"]), + "-t", str(audio_input["durationSec"]), + "-i", audio_input["mediaPath"] + ]) + + # Output tag of a filtered audio input + tag_name = "[r{}]".format(index) + tag_names.append(tag_name) + # Delay in audio by delay in item + filters.append("[{}]adelay={}:all=1{}".format( + index, audio_input["delayMilSec"], tag_name + )) + + # Mixing filter + # - dropout transition (when audio will get loader) is set to be + # higher then any input audio item + # - volume is set to number of inputs - each mix adds 1/n volume + # where n is input inder (to get more info read ffmpeg docs and + # send a giftcard to contributor) + filters.append( + ( + "{}amix=inputs={}:duration=first:" + "dropout_transition={},volume={}[a]" + ).format( + "".join(tag_names), + len(audio_inputs), + (longest_input * 1000) + 1000, + len(audio_inputs), ) + ) - _inputs += ( - "-ss {startSec} " - "-t {durationSec} " - "-i {mediaPath} " - ).format(**input_format) + # Store filters to a file (separated by ',') + # - this is to avoid "too long" command issue in ffmpeg + with tempfile.NamedTemporaryFile( + delete=False, mode="w", suffix=".txt" + ) as tmp_file: + filters_tmp_filepath = tmp_file.name + tmp_file.write(",".join(filters)) - _filters += "[{i}]adelay={delayMilSec}:all=1[r{i}]; ".format( - **input_format) - _channels += "[r{}]".format(index) + args = [self.ffmpeg_path] + args.extend(input_args) + args.extend([ + "-filter_complex_script", filters_tmp_filepath, + "-map", "[a]" + ]) + args.append(audio_temp_fpath) - # merge all cmd segments together - cmd = _inputs + _filters + _channels - cmd += str( - "amix=inputs={inputs}:duration=first:" - "dropout_transition=1000,volume={inputs}[a]\" " - ).format(inputs=len(inputs)) - cmd += "-map \"[a]\" " + # run subprocess + self.log.debug("Executing: {}".format(args)) + openpype.api.run_subprocess(args, logger=self.log) - return cmd + os.remove(filters_tmp_filepath) def create_temp_file(self, name): """Create temp wav file From 8db8ada9642bcdf2c5f364fbf78c902344b1613e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:46:32 +0200 Subject: [PATCH 0218/2550] changed 'node' variable to 'identifier' and added it's docstrings --- .../workfile/abstract_template_loader.py | 20 +++++++------------ 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 5d8d79397a..16287bbd4e 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -384,17 +384,11 @@ class AbstractPlaceholder: and assets to load. optional_keys: A list of optional keys to decribe placeholder and assets to load - loader: Name of linked loader to use while loading assets - is_context: Is placeholder linked - to context asset (or to linked assets) + loader_name: Name of linked loader to use while loading assets - Methods: - is_repres_valid: - loader: - order: - is_valid: - get_data: - parent_in_hierachy: + Args: + identifier (str): Placeholder identifier. Should be possible to be + used as identifier in "a scene" (e.g. unique node name). """ required_keys = { @@ -407,10 +401,10 @@ class AbstractPlaceholder: } optional_keys = {} - def __init__(self, node): + def __init__(self, identifier): self._log = None - self._name = node - self.get_data(node) + self._name = identifier + self.get_data(identifier) @property def log(self): From 5d0cd42a8133bcf7d65bbcef0c7b093ef058d7b2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:47:01 +0200 Subject: [PATCH 0219/2550] renamed 'order' method to 'get_order' --- .../pipeline/workfile/abstract_template_loader.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 16287bbd4e..fe1f15c140 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -336,7 +336,7 @@ class AbstractTemplateLoader: placeholders = map(placeholder_class, self.get_template_nodes()) valid_placeholders = filter(placeholder_class.is_valid, placeholders) sorted_placeholders = sorted(valid_placeholders, - key=placeholder_class.order) + key=placeholder_class.get_order) return sorted_placeholders @abstractmethod @@ -427,17 +427,24 @@ class AbstractPlaceholder: def builder_type(self): return self.data["builder_type"] + @property def order(self): - """Get placeholder order. + return self.data["order"] + + def get_order(self): + """Placeholder order. + Order is used to sort them by priority Priority is lowset first, highest last (ex: 1: First to load 100: Last to load) + Returns: - Int: Order priority + int: Order priority """ - return self.data.get('order') + + return self.order @property def loader_name(self): From 7e8e61c0e4d51334d6de0d1f9cd672fa0dae5313 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:48:14 +0200 Subject: [PATCH 0220/2550] changed 'get_data' docstring --- openpype/pipeline/workfile/abstract_template_loader.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index fe1f15c140..66943eafe7 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -537,10 +537,12 @@ class AbstractPlaceholder: pass @abstractmethod - def get_data(self, node): - """ - Collect placeholders information. + def get_data(self, identifier): + """Collect information about placeholder by identifier. + Args: - node (AnyNode): A unique node decided by Placeholder implementation + identifier (str): A unique placeholder identifier defined by + implementation. """ + pass From a1cd1890d6db952e4feee357e204444aed0015ed Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:48:28 +0200 Subject: [PATCH 0221/2550] modified 'parent_in_hierarchy' docstring --- openpype/pipeline/workfile/abstract_template_loader.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 66943eafe7..a1629d9b79 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -491,13 +491,13 @@ class AbstractPlaceholder: return False @abstractmethod - def parent_in_hierarchy(self, containers): - """Place container in correct hierarchy - given by placeholder + def parent_in_hierarchy(self, container): + """Place loaded container in correct hierarchy given by placeholder + Args: - containers (String): Container name returned back by - placeholder's loader. + container (Dict[str, Any]): Loaded container created by loader. """ + pass @abstractmethod From 56150d4abb72d8b0025a7724e002eab792aa34a0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:48:48 +0200 Subject: [PATCH 0222/2550] removed unused method 'convert_to_db_filters' --- .../pipeline/workfile/abstract_template_loader.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index a1629d9b79..c36e489017 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -523,19 +523,6 @@ class AbstractPlaceholder: pass - @abstractmethod - def convert_to_db_filters(self, current_asset, linked_asset): - """map current placeholder data as a db filter - args: - current_asset (String): Name of current asset in context - linked asset (list[String]) : Names of assets linked to - current asset in context - Returns: - dict: a dictionnary describing a filter to look for asset in - a database - """ - pass - @abstractmethod def get_data(self, identifier): """Collect information about placeholder by identifier. From 56bbbdbd583b51ba07bac08e753fb5a2050a768f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 8 Aug 2022 17:49:20 +0200 Subject: [PATCH 0223/2550] removed unused import --- openpype/pipeline/workfile/abstract_template_loader.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index c36e489017..725ab1dab3 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -1,8 +1,6 @@ import os from abc import ABCMeta, abstractmethod -import traceback - import six import logging from functools import reduce From 26572719c9eb82dc6f818665c2544ef376d6769a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 8 Aug 2022 17:01:40 +0100 Subject: [PATCH 0224/2550] Added FBX support for update in reference loader --- openpype/hosts/maya/api/plugin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 9280805945..2b0c6131b4 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -208,7 +208,8 @@ class ReferenceLoader(Loader): file_type = { "ma": "mayaAscii", "mb": "mayaBinary", - "abc": "Alembic" + "abc": "Alembic", + "fbx": "fbx" }.get(representation["name"]) assert file_type, "Unsupported representation: %s" % representation From ab810691c5d4d9dc3bc314a0b6ce482260d1a4ee Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 8 Aug 2022 22:34:57 +0200 Subject: [PATCH 0225/2550] nuke: wrong key name in settings for write node type --- openpype/hosts/nuke/api/lib.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 501ab4ba93..c1f49cbf8c 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -912,7 +912,7 @@ def get_render_path(node): avalon_knob_data = read_avalon_data(node) nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -1920,7 +1920,7 @@ class WorkfileSettings(object): families.append(avalon_knob_data.get("families")) nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -2219,7 +2219,7 @@ def get_write_node_template_attr(node): avalon_knob_data = read_avalon_data(node) # get template data nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) From 61457bffde96102079c3ccfb83b9a201a3ea4b8d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 9 Aug 2022 15:19:12 +0800 Subject: [PATCH 0226/2550] fix the bug of failing to extract look with UDIMs format in aiIMage --- openpype/hosts/maya/plugins/publish/extract_look.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 9974f97f1b..ed8ada3c62 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,17 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - color_space = "Raw" + #get all the resolved files + src = files_metadata.get(source) + if src: + if files_metadata[source]["color_space"] == "Raw": + # set color space to raw if we linearized it + color_space = "Raw" + else: + # if the files are unresolved + if files_metadata[filepath]["color_space"] == "Raw": + # set color space to raw if we linearized it + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From de84296711bf8420850af5b065c328c55a2c7a27 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 9 Aug 2022 15:20:25 +0800 Subject: [PATCH 0227/2550] fix the bug of failing to extract look with UDIMs format in aiIMage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index ed8ada3c62..d69eaffe59 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - #get all the resolved files + # get all the resolved files src = files_metadata.get(source) if src: if files_metadata[source]["color_space"] == "Raw": From cb5dd41fba13c7f8e6a7fd62067d4bdddee46f66 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 9 Aug 2022 15:43:01 +0800 Subject: [PATCH 0228/2550] fix the bug of failing to extract look with UDIMs format in aiIMage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index d69eaffe59..80d82a4f58 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all the resolved files + # get all the resolved files in Maya File Path Editor src = files_metadata.get(source) if src: if files_metadata[source]["color_space"] == "Raw": From b570374264f0a7cda4f5b4dc15f3c048a675548e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Tue, 9 Aug 2022 08:28:26 +0000 Subject: [PATCH 0229/2550] [Automated] Bump version --- CHANGELOG.md | 21 +++++++++++++-------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 15a120ec2a..788c915b9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,20 +1,29 @@ # Changelog -## [3.12.3-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.13.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) +**🆕 New features** + +- Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) + **🚀 Enhancements** +- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) - Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) - Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) - General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) - Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) - Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) +- General: Python module appdirs from git [\#3589](https://github.com/pypeclub/OpenPype/pull/3589) +- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) - General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) **🐛 Bug fixes** +- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) +- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) - Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) - General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) - Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) @@ -23,6 +32,7 @@ - Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) - TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) - Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) +- Fix general settings environment variables resolution [\#3587](https://github.com/pypeclub/OpenPype/pull/3587) - Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) - General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) @@ -32,8 +42,8 @@ **🔀 Refactored code** - General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) +- General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) - General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) -- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) **Merged pull requests:** @@ -60,7 +70,6 @@ - Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) - Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) - NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) -- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) **🐛 Bug fixes** @@ -84,13 +93,13 @@ **🔀 Refactored code** +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) - General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) - Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) - General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) - General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) - General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) -- Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) **Merged pull requests:** @@ -100,10 +109,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) -**🚀 Enhancements** - -- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) - ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0) diff --git a/openpype/version.py b/openpype/version.py index 3f1056249a..5dc4c0be8a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.3-nightly.3" +__version__ = "3.13.0-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 31a6505280..13a7609920 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.3-nightly.3" # OpenPype +version = "3.13.0-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From e595dbba85733664544c4073f92fde1a1063b68f Mon Sep 17 00:00:00 2001 From: OpenPype Date: Tue, 9 Aug 2022 08:39:56 +0000 Subject: [PATCH 0230/2550] [Automated] Release --- CHANGELOG.md | 7 ++++--- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 788c915b9d..3124201758 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.13.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) **🆕 New features** @@ -44,6 +44,7 @@ - General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) - General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) - General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) **Merged pull requests:** @@ -88,12 +89,12 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) +- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) - TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) - NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) **🔀 Refactored code** -- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) - General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) - General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) - Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) diff --git a/openpype/version.py b/openpype/version.py index 5dc4c0be8a..d2eb3a8ab6 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.0-nightly.1" +__version__ = "3.13.0" diff --git a/pyproject.toml b/pyproject.toml index 13a7609920..03922a8e67 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.0-nightly.1" # OpenPype +version = "3.13.0" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 9427d791ea3536dda99e591280cc415969f1e3c1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 11:19:08 +0200 Subject: [PATCH 0231/2550] moved workfile path resolving into openpype/pipeline/workfile --- openpype/pipeline/workfile/__init__.py | 14 ++ openpype/pipeline/workfile/path_resolving.py | 184 +++++++++++++++++++ 2 files changed, 198 insertions(+) create mode 100644 openpype/pipeline/workfile/__init__.py create mode 100644 openpype/pipeline/workfile/path_resolving.py diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py new file mode 100644 index 0000000000..3a51491cdd --- /dev/null +++ b/openpype/pipeline/workfile/__init__.py @@ -0,0 +1,14 @@ +from .path_resolving import ( + get_workfile_template_key_from_context, + get_workfile_template_key, + get_workdir_with_workdir_data, + get_workdir, +) + + +__all__ = ( + "get_workfile_template_key_from_context", + "get_workfile_template_key", + "get_workdir_with_workdir_data", + "get_workdir", +) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py new file mode 100644 index 0000000000..9525dd59dc --- /dev/null +++ b/openpype/pipeline/workfile/path_resolving.py @@ -0,0 +1,184 @@ +from openpype.client import get_asset_by_name +from openpype.settings import get_project_settings +from openpype.lib import filter_profiles +from openpype.pipeline import Anatomy +from openpype.pipeline.template_data import get_template_data + + +def get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings=None +): + """Helper function to get template key for workfile template. + + Do the same as `get_workfile_template_key` but returns value for "session + context". + + It is required to pass one of 'dbcon' with already set project name or + 'project_name' arguments. + + Args: + asset_name(str): Name of asset document. + task_name(str): Task name for which is template key retrieved. + Must be available on asset document under `data.tasks`. + host_name(str): Name of host implementation for which is workfile + used. + project_name(str): Project name where asset and task is. Not required + when 'dbcon' is passed. + project_settings(Dict[str, Any]): Project settings for passed + 'project_name'. Not required at all but makes function faster. + """ + + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.tasks"] + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) + + +def get_workfile_template_key( + task_type, host_name, project_name, project_settings=None +): + """Workfile template key which should be used to get workfile template. + + Function is using profiles from project settings to return right template + for passet task type and host name. + + Args: + task_type(str): Name of task type. + host_name(str): Name of host implementation (e.g. "maya", "nuke", ...) + project_name(str): Name of project in which context should look for + settings. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. + """ + + default = "work" + if not task_type or not host_name: + return default + + if not project_settings: + project_settings = get_project_settings(project_name) + + try: + profiles = ( + project_settings + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_template_profiles"] + ) + except Exception: + profiles = [] + + if not profiles: + return default + + profile_filter = { + "task_types": task_type, + "hosts": host_name + } + profile = filter_profiles(profiles, profile_filter) + if profile: + return profile["workfile_template"] or default + return default + + +def get_workdir_with_workdir_data( + workdir_data, + project_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + It is possible to pass only project's name instead of project's anatomy but + one of them **must** be entered. It is preferred to enter anatomy if is + available as initialization of a new Anatomy object may be time consuming. + + Args: + workdir_data (Dict[str, Any]): Data to fill workdir template. + project_name (str): Project's name. + otherwise Anatomy object is created with using the project name. + anatomy (Anatomy): Anatomy object for specific project. Faster + processing if is passed. + template_key (str): Key of work templates in anatomy templates. If not + passed `get_workfile_template_key_from_context` is used to get it. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_name) + + if not template_key: + template_key = get_workfile_template_key( + workdir_data["task"]["type"], + workdir_data["app"], + workdir_data["project"]["name"], + project_settings + ) + + anatomy_filled = anatomy.format(workdir_data) + # Output is TemplateResult object which contain useful data + output = anatomy_filled[template_key]["folder"] + if output: + return output.normalized() + return output + + +def get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + Args: + project_doc (Dict[str, Any]): Mongo document of project from MongoDB. + asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. + task_name (str): Task name for which are workdir data preapred. + host_name (str): Host which is used to workdir. This is required + because workdir template may contain `{app}` key. In `Session` + is stored under `AVALON_APP` key. + anatomy (Anatomy): Optional argument. Anatomy object is created using + project name from `project_doc`. It is preferred to pass this + argument as initialization of a new Anatomy object may be time + consuming. + template_key (str): Key of work templates in anatomy templates. Default + value is defined in `get_workdir_with_workdir_data`. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_doc["name"]) + + workdir_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # Output is TemplateResult object which contain useful data + return get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + template_key, + project_settings + ) From fabec0819beeab79cf1695d164420896254d750c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 11:19:29 +0200 Subject: [PATCH 0232/2550] maked moved functions as deprecated --- openpype/lib/avalon_context.py | 100 +++++++++++---------------------- 1 file changed, 32 insertions(+), 68 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 42854f39d6..636806d1f4 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -321,6 +321,8 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): ) +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key_from_context") def get_workfile_template_key_from_context( asset_name, task_name, host_name, project_name=None, dbcon=None, project_settings=None @@ -349,27 +351,26 @@ def get_workfile_template_key_from_context( ValueError: When both 'dbcon' and 'project_name' were not passed. """ + + from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context + ) + if not project_name: if not dbcon: raise ValueError(( "`get_workfile_template_key_from_context` requires to pass" " one of 'dbcon' or 'project_name' arguments." )) - project_name = dbcon.active_project() - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.tasks"] - ) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - return get_workfile_template_key( - task_type, host_name, project_name, project_settings + return get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings ) +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key") def get_workfile_template_key( task_type, host_name, project_name=None, project_settings=None ): @@ -393,40 +394,12 @@ def get_workfile_template_key( ValueError: When both 'project_name' and 'project_settings' were not passed. """ - default = "work" - if not task_type or not host_name: - return default - if not project_settings: - if not project_name: - raise ValueError(( - "`get_workfile_template_key` requires to pass" - " one of 'project_name' or 'project_settings' arguments." - )) - project_settings = get_project_settings(project_name) + from openpype.pipeline.workfile import get_workfile_template_key - try: - profiles = ( - project_settings - ["global"] - ["tools"] - ["Workfiles"] - ["workfile_template_profiles"] - ) - except Exception: - profiles = [] - - if not profiles: - return default - - profile_filter = { - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles(profiles, profile_filter) - if profile: - return profile["workfile_template"] or default - return default + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) @deprecated("openpype.pipeline.template_data.get_template_data") @@ -454,6 +427,7 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir_with_workdir_data( workdir_data, anatomy=None, project_name=None, template_key=None ): @@ -480,31 +454,24 @@ def get_workdir_with_workdir_data( Raises: ValueError: When both `anatomy` and `project_name` are set to None. """ + if not anatomy and not project_name: raise ValueError(( "Missing required arguments one of `project_name` or `anatomy`" " must be entered." )) - if not anatomy: - from openpype.pipeline import Anatomy - anatomy = Anatomy(project_name) + if not project_name: + project_name = anatomy.project_name - if not template_key: - template_key = get_workfile_template_key( - workdir_data["task"]["type"], - workdir_data["app"], - project_name=workdir_data["project"]["name"] - ) + from openpype.pipeline.workfile import get_workdir_with_workdir_data - anatomy_filled = anatomy.format(workdir_data) - # Output is TemplateResult object which contain useful data - output = anatomy_filled[template_key]["folder"] - if output: - return output.normalized() - return output + return get_workdir_with_workdir_data( + workdir_data, project_name, anatomy, template_key + ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir( project_doc, asset_doc, @@ -533,18 +500,15 @@ def get_workdir( TemplateResult: Workdir path. """ - from openpype.pipeline import Anatomy - from openpype.pipeline.template_data import get_template_data - - if not anatomy: - anatomy = Anatomy(project_doc["name"]) - - workdir_data = get_template_data( - project_doc, asset_doc, task_name, host_name - ) + from openpype.pipeline.workfile import get_workdir # Output is TemplateResult object which contain useful data - return get_workdir_with_workdir_data( - workdir_data, anatomy, template_key=template_key + return get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + template_key ) From 4a4bb22f60353bb9cfcdb64c516b2b145cb7c966 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 11:36:36 +0200 Subject: [PATCH 0233/2550] Refactor collect inputs as `inputRepresentations` --- openpype/hosts/fusion/plugins/publish/collect_inputs.py | 6 ++++-- openpype/hosts/houdini/plugins/publish/collect_inputs.py | 6 ++++-- openpype/hosts/maya/plugins/publish/collect_inputs.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/fusion/plugins/publish/collect_inputs.py b/openpype/hosts/fusion/plugins/publish/collect_inputs.py index e610575e3a..8f9857b02f 100644 --- a/openpype/hosts/fusion/plugins/publish/collect_inputs.py +++ b/openpype/hosts/fusion/plugins/publish/collect_inputs.py @@ -1,3 +1,5 @@ +from bson.objectid import ObjectId + import pyblish.api from openpype.pipeline import registered_host @@ -106,7 +108,7 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): # Collect containers for the given set of nodes containers = collect_input_containers(nodes) - inputs = [c["representation"] for c in containers] - instance.data["inputs"] = inputs + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/houdini/plugins/publish/collect_inputs.py b/openpype/hosts/houdini/plugins/publish/collect_inputs.py index 8c7098c710..9ee0248bd9 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_inputs.py +++ b/openpype/hosts/houdini/plugins/publish/collect_inputs.py @@ -1,3 +1,5 @@ +from bson.objectid import ObjectId + import pyblish.api from openpype.pipeline import registered_host @@ -115,7 +117,7 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): # Collect containers for the given set of nodes containers = collect_input_containers(nodes) - inputs = [c["representation"] for c in containers] - instance.data["inputs"] = inputs + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py index 43941bde4f..470fceffc9 100644 --- a/openpype/hosts/maya/plugins/publish/collect_inputs.py +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -1,4 +1,6 @@ import copy +from bson.objectid import ObjectId + from maya import cmds import maya.api.OpenMaya as om import pyblish.api @@ -163,8 +165,8 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): containers = collect_input_containers(scene_containers, nodes) - inputs = [c["representation"] for c in containers] - instance.data["inputs"] = inputs + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs self.log.info("Collected inputs: %s" % inputs) From 19f81dbf40bbe509506f9f13b4dcfa70133f8b92 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 11:38:44 +0200 Subject: [PATCH 0234/2550] Add Collector to convert `inputRepresentations` -> `inputVersions` --- ...llect_input_representations_to_versions.py | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 openpype/plugins/publish/collect_input_representations_to_versions.py diff --git a/openpype/plugins/publish/collect_input_representations_to_versions.py b/openpype/plugins/publish/collect_input_representations_to_versions.py new file mode 100644 index 0000000000..03f2abf51f --- /dev/null +++ b/openpype/plugins/publish/collect_input_representations_to_versions.py @@ -0,0 +1,48 @@ +import pyblish.api + +from bson.objectid import ObjectId + +from openpype.client import get_representations + + +class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin): + """Converts collected input representations to input versions. + + Any data in `instance.data["inputRepresentations"]` gets converted into + `instance.data["inputVersions"]` as supported in OpenPype v3. + + """ + # This is a ContextPlugin because then we can query the database only once + # for the conversion of representation ids to version ids (optimization) + label = "Input Representations to Versions" + order = pyblish.api.CollectorOrder + 0.499 + hosts = ["*"] + + def process(self, context): + # Query all version ids for representation ids from the database once + representations = set() + for instance in context: + inst_repre = instance.data.get("inputRepresentations", []) + representations.update(inst_repre) + + representations_docs = get_representations( + project_name=context.data["projectEntity"]["name"], + representation_ids=representations, + fields=["_id", "parent"]) + + representation_id_to_version_id = { + repre["_id"]: repre["parent"] for repre in representations_docs + } + + for instance in context: + inst_repre = instance.data.get("inputRepresentations", []) + if not inst_repre: + continue + + input_versions = instance.data.get("inputVersions", []) + for repre_id in inst_repre: + repre_id = ObjectId(repre_id) + version_id = representation_id_to_version_id[repre_id] + input_versions.append(version_id) + instance.data["inputVersions"] = input_versions + From 97d55eb335e417102c519d10f280a28afb3275c4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 11:39:17 +0200 Subject: [PATCH 0235/2550] modified docstrings --- openpype/pipeline/workfile/path_resolving.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 9525dd59dc..07a814f616 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -13,17 +13,13 @@ def get_workfile_template_key_from_context( Do the same as `get_workfile_template_key` but returns value for "session context". - It is required to pass one of 'dbcon' with already set project name or - 'project_name' arguments. - Args: asset_name(str): Name of asset document. task_name(str): Task name for which is template key retrieved. Must be available on asset document under `data.tasks`. host_name(str): Name of host implementation for which is workfile used. - project_name(str): Project name where asset and task is. Not required - when 'dbcon' is passed. + project_name(str): Project name where asset and task is. project_settings(Dict[str, Any]): Project settings for passed 'project_name'. Not required at all but makes function faster. """ @@ -104,7 +100,6 @@ def get_workdir_with_workdir_data( Args: workdir_data (Dict[str, Any]): Data to fill workdir template. project_name (str): Project's name. - otherwise Anatomy object is created with using the project name. anatomy (Anatomy): Anatomy object for specific project. Faster processing if is passed. template_key (str): Key of work templates in anatomy templates. If not From 257f027d900e259d611bc70becaa1a30065ee3fd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 11:40:28 +0200 Subject: [PATCH 0236/2550] Remove blank line --- .../plugins/publish/collect_input_representations_to_versions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/collect_input_representations_to_versions.py b/openpype/plugins/publish/collect_input_representations_to_versions.py index 03f2abf51f..18a19bce80 100644 --- a/openpype/plugins/publish/collect_input_representations_to_versions.py +++ b/openpype/plugins/publish/collect_input_representations_to_versions.py @@ -45,4 +45,3 @@ class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin): version_id = representation_id_to_version_id[repre_id] input_versions.append(version_id) instance.data["inputVersions"] = input_versions - From c4a932d3e2cf989b7f98e7d309b6368049619679 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 12:17:42 +0200 Subject: [PATCH 0237/2550] Refactor `get_output_link_versions` to query `data.inputLinks.id` instead of `data.inputLinks.input` --- openpype/client/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index dd5d831ecf..326c8a58a9 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -819,7 +819,7 @@ def get_output_link_versions(project_name, version_id, fields=None): # Does make sense to look for hero versions? query_filter = { "type": "version", - "data.inputLinks.input": version_id + "data.inputLinks.id": version_id } return conn.find(query_filter, _prepare_fields(fields)) From 48c94ea22b0f53108d3023f48bd3c681b108b60d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:29:30 +0200 Subject: [PATCH 0238/2550] added operations for workfile info --- openpype/client/operations.py | 47 +++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 69d1eb2bb6..c4b95bf696 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -17,6 +17,7 @@ CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" CURRENT_VERSION_SCHEMA = "openpype:version-3.0" CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" +CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" def _create_or_convert_to_mongo_id(mongo_id): @@ -188,6 +189,38 @@ def new_representation_doc( } +def new_workfile_info_doc( + filename, asset_id, task_name, files, data=None, entity_id=None +): + """Create skeleton data of workfile info document. + + Workfile document is at this moment used primarily for artist notes. + + Args: + filename (str): Filename of workfile. + asset_id (Union[str, ObjectId]): Id of asset under which workfile live. + task_name (str): Task under which was workfile created. + files (List[str]): List of rootless filepaths related to workfile. + data (Dict[str, Any]): Additional metadata. + + Returns: + Dict[str, Any]: Skeleton of workfile info document. + """ + + if not data: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "workfile", + "parent": ObjectId(asset_id), + "task_name": task_name, + "filename": filename, + "data": data, + "files": files + } + + def _prepare_update_data(old_doc, new_doc, replace): changes = {} for key, value in new_doc.items(): @@ -243,6 +276,20 @@ def prepare_representation_update_data(old_doc, new_doc, replace=True): return _prepare_update_data(old_doc, new_doc, replace) +def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): + """Compare two workfile info documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + @six.add_metaclass(ABCMeta) class AbstractOperation(object): """Base operation class. From adcc7010c2f84e2cd6edc2fe01065082cb63f8ca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:31:08 +0200 Subject: [PATCH 0239/2550] workfiles tool use operations session to create workfile info documents --- openpype/tools/workfiles/window.py | 69 +++++++++++++++++++++--------- 1 file changed, 48 insertions(+), 21 deletions(-) diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 0b0d67e589..de42b80d64 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -1,18 +1,20 @@ import os import datetime +import copy from Qt import QtCore, QtWidgets, QtGui from openpype.client import ( - get_asset_by_id, get_asset_by_name, get_workfile_info, ) +from openpype.client.operations import ( + OperationsSession, + new_workfile_info_doc, + prepare_workfile_info_update_data, +) from openpype import style from openpype import resources -from openpype.lib import ( - create_workfile_doc, - save_workfile_data_to_doc, -) +from openpype.pipeline import Anatomy from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.tasks_widget import TasksWidget @@ -324,10 +326,23 @@ class Window(QtWidgets.QWidget): workfile_doc, data = self.side_panel.get_workfile_data() if not workfile_doc: filepath = self.files_widget._get_selected_filepath() - self._create_workfile_doc(filepath, force=True) - workfile_doc = self._get_current_workfile_doc() + workfile_doc = self._create_workfile_doc(filepath) - save_workfile_data_to_doc(workfile_doc, data, legacy_io) + new_workfile_doc = copy.deepcopy(workfile_doc) + new_workfile_doc["data"] = data + update_data = prepare_workfile_info_update_data( + workfile_doc, new_workfile_doc + ) + if not update_data: + return + + project_name = legacy_io.active_project() + + session = OperationsSession() + session.update_entity( + project_name, "workfile", workfile_doc["_id"], update_data + ) + session.commit() def _get_current_workfile_doc(self, filepath=None): if filepath is None: @@ -343,20 +358,32 @@ class Window(QtWidgets.QWidget): project_name, asset_id, task_name, filename ) - def _create_workfile_doc(self, filepath, force=False): - workfile_doc = None - if not force: - workfile_doc = self._get_current_workfile_doc(filepath) + def _create_workfile_doc(self, filepath): + workfile_doc = self._get_current_workfile_doc(filepath) + if workfile_doc: + return workfile_doc - if not workfile_doc: - workdir, filename = os.path.split(filepath) - asset_id = self.assets_widget.get_selected_asset_id() - project_name = legacy_io.active_project() - asset_doc = get_asset_by_id(project_name, asset_id) - task_name = self.tasks_widget.get_selected_task_name() - create_workfile_doc( - asset_doc, task_name, filename, workdir, legacy_io - ) + workdir, filename = os.path.split(filepath) + + project_name = legacy_io.active_project() + asset_id = self.assets_widget.get_selected_asset_id() + task_name = self.tasks_widget.get_selected_task_name() + + anatomy = Anatomy(project_name) + success, rootless_dir = anatomy.find_root_template_from_path(workdir) + filepath = "/".join([ + os.path.normpath(rootless_dir).replace("\\", "/"), + filename + ]) + + workfile_doc = new_workfile_info_doc( + filename, asset_id, task_name, [filepath] + ) + + session = OperationsSession() + session.create_entity(project_name, "workfile", workfile_doc) + session.commit() + return workfile_doc def refresh(self): # Refresh asset widget From c64578684d4d280121c30d402815934c54af6683 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:31:26 +0200 Subject: [PATCH 0240/2550] marked create and update workfile doc functions as deprecated --- openpype/lib/avalon_context.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 636806d1f4..c341b35b71 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -670,7 +670,6 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): return changes -@with_pipeline_io @deprecated("openpype.client.get_workfile_info") def get_workfile_doc(asset_id, task_name, filename, dbcon=None): """Return workfile document for entered context. @@ -691,13 +690,14 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io project_name = dbcon.active_project() return get_workfile_info(project_name, asset_id, task_name, filename) -@with_pipeline_io +@deprecated def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): """Creates or replace workfile document in mongo. @@ -718,6 +718,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Filter of workfile document @@ -764,7 +765,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): ) -@with_pipeline_io +@deprecated def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not workfile_doc: # TODO add log message @@ -775,6 +776,7 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Convert data to mongo modification keys/values From b89e99e8905a91deda2211138570978023c3e26e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:47:29 +0200 Subject: [PATCH 0241/2550] change imports of 'get_workfile_template_key', 'get_workfile_template_key_from_context' and 'get_workdir_with_workdir_data' and 'get_workdir' in code --- .../plugins/publish/integrate_batch_group.py | 10 +++++++-- .../tvpaint/plugins/load/load_workfile.py | 7 +++--- openpype/lib/applications.py | 22 +++++++++++++------ .../action_fill_workfile_attr.py | 11 +++++----- openpype/tools/workfiles/files_widget.py | 3 ++- 5 files changed, 35 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index b59107f155..4d45f67ded 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -3,9 +3,9 @@ import copy from collections import OrderedDict from pprint import pformat import pyblish -from openpype.lib import get_workdir import openpype.hosts.flame.api as opfapi import openpype.pipeline as op_pipeline +from openpype.pipeline.workfile import get_workdir class IntegrateBatchGroup(pyblish.api.InstancePlugin): @@ -324,7 +324,13 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): project_doc = instance.data["projectEntity"] asset_entity = instance.data["assetEntity"] anatomy = instance.context.data["anatomy"] + project_settings = instance.context.data["project_settings"] return get_workdir( - project_doc, asset_entity, task_data["name"], "flame", anatomy + project_doc, + asset_entity, + task_data["name"], + "flame", + anatomy, + project_settings=project_settings ) diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 8b09d20755..40ce972a09 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -2,7 +2,6 @@ import os from openpype.lib import ( StringTemplate, - get_workfile_template_key_from_context, get_last_workfile_with_version, ) from openpype.pipeline import ( @@ -10,6 +9,9 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) +from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context, +) from openpype.pipeline.template_data import get_template_data_with_names from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -57,8 +59,7 @@ class LoadWorkfile(plugin.Loader): asset_name, task_name, host_name, - project_name=project_name, - dbcon=legacy_io + project_name=project_name ) anatomy = Anatomy(project_name) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index da8623ea13..f1ddae6063 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -27,11 +27,7 @@ from openpype.settings.constants import ( from . import PypeLogger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username -from .avalon_context import ( - get_workdir_with_workdir_data, - get_workfile_template_key, - get_last_workfile -) +from .avalon_context import get_last_workfile from .python_module_tools import ( modules_from_path, @@ -1635,7 +1631,14 @@ def prepare_context_environments(data, env_group=None): data["task_type"] = task_type try: - workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + from openpype.pipeline.workfile import get_workdir_with_workdir_data + + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) except Exception as exc: raise ApplicationLaunchFailed( @@ -1725,11 +1728,16 @@ def _prepare_last_workfile(data, workdir): if not last_workfile_path: extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) if extensions: + from openpype.pipeline import get_workfile_template_key + anatomy = data["anatomy"] project_settings = data["project_settings"] task_type = workdir_data["task"]["type"] template_key = get_workfile_template_key( - task_type, app.host_name, project_settings=project_settings + task_type, + app.host_name, + project_name, + project_settings=project_settings ) # Find last workfile file_template = str(anatomy.templates[template_key]["file"]) diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index c7fa2dce5e..fb1cdf340e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -12,12 +12,10 @@ from openpype.client import ( get_assets, ) from openpype.settings import get_project_settings, get_system_settings -from openpype.lib import ( - get_workfile_template_key, - StringTemplate, -) +from openpype.lib import StringTemplate from openpype.pipeline import Anatomy from openpype.pipeline.template_data import get_template_data +from openpype.pipeline.workfile import get_workfile_template_key from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks @@ -299,7 +297,10 @@ class FillWorkfileAttributeAction(BaseAction): task_type = workfile_data["task"]["type"] template_key = get_workfile_template_key( - task_type, host_name, project_settings=project_settings + task_type, + host_name, + project_name, + project_settings=project_settings ) if template_key in templates_by_key: template = templates_by_key[template_key] diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 34692b7102..a4109c511e 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -12,7 +12,6 @@ from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate from openpype.lib import ( emit_event, - get_workfile_template_key, create_workdir_extra_folders, ) from openpype.lib.avalon_context import ( @@ -24,6 +23,8 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) +from openpype.pipeline.workfile import get_workfile_template_key + from .model import ( WorkAreaFilesModel, PublishFilesModel, From 02007784faa52417e2e8bd9381dd4d7b523f1e1c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 12:56:50 +0200 Subject: [PATCH 0242/2550] moved 'get_last_workfile_with_version' and 'get_last_workfile' to 'openpype.pipeline.workfile' --- .../tvpaint/plugins/load/load_workfile.py | 6 +- openpype/lib/applications.py | 6 +- openpype/lib/avalon_context.py | 92 ++---------- openpype/pipeline/workfile/__init__.py | 6 + openpype/pipeline/workfile/path_resolving.py | 131 +++++++++++++++++- openpype/tools/workfiles/save_as_dialog.py | 2 +- 6 files changed, 153 insertions(+), 90 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 40ce972a09..a99b300730 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,9 +1,6 @@ import os -from openpype.lib import ( - StringTemplate, - get_last_workfile_with_version, -) +from openpype.lib import StringTemplate from openpype.pipeline import ( registered_host, legacy_io, @@ -11,6 +8,7 @@ from openpype.pipeline import ( ) from openpype.pipeline.workfile import ( get_workfile_template_key_from_context, + get_last_workfile_with_version, ) from openpype.pipeline.template_data import get_template_data_with_names from openpype.hosts.tvpaint.api import lib, pipeline, plugin diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index f1ddae6063..8c92665366 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -27,7 +27,6 @@ from openpype.settings.constants import ( from . import PypeLogger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username -from .avalon_context import get_last_workfile from .python_module_tools import ( modules_from_path, @@ -1728,7 +1727,10 @@ def _prepare_last_workfile(data, workdir): if not last_workfile_path: extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) if extensions: - from openpype.pipeline import get_workfile_template_key + from openpype.pipeline.workfile import ( + get_workfile_template_key, + get_last_workfile + ) anatomy = data["anatomy"] project_settings = data["project_settings"] diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index c341b35b71..a2a1839218 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1696,6 +1696,7 @@ def get_custom_workfile_template(template_profiles): ) +@deprecated("openpype.pipeline.workfile.get_last_workfile_with_version") def get_last_workfile_with_version( workdir, file_template, fill_data, extensions ): @@ -1711,78 +1712,15 @@ def get_last_workfile_with_version( tuple: Last workfile with version if there is any otherwise returns (None, None). """ - if not os.path.exists(workdir): - return None, None - # Fast match on extension - filenames = [ - filename - for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in extensions - ] + from openpype.pipeline.workfile import get_last_workfile_with_version - # Build template without optionals, version to digits only regex - # and comment to any definable value. - _ext = [] - for ext in extensions: - if not ext.startswith("."): - ext = "." + ext - # Escape dot for regex - ext = "\\" + ext - _ext.append(ext) - ext_expression = "(?:" + "|".join(_ext) + ")" - - # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end - file_template = re.sub(r"\.?{ext}", ext_expression, file_template) - # Replace optional keys with optional content regex - file_template = re.sub(r"<.*?>", r".*?", file_template) - # Replace `{version}` with group regex - file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) - file_template = re.sub(r"{comment.*?}", r".+?", file_template) - file_template = StringTemplate.format_strict_template( - file_template, fill_data + return get_last_workfile_with_version( + workdir, file_template, fill_data, extensions ) - # Match with ignore case on Windows due to the Windows - # OS not being case-sensitive. This avoids later running - # into the error that the file did exist if it existed - # with a different upper/lower-case. - kwargs = {} - if platform.system().lower() == "windows": - kwargs["flags"] = re.IGNORECASE - - # Get highest version among existing matching files - version = None - output_filenames = [] - for filename in sorted(filenames): - match = re.match(file_template, filename, **kwargs) - if not match: - continue - - file_version = int(match.group(1)) - if version is None or file_version > version: - output_filenames[:] = [] - version = file_version - - if file_version == version: - output_filenames.append(filename) - - output_filename = None - if output_filenames: - if len(output_filenames) == 1: - output_filename = output_filenames[0] - else: - last_time = None - for _output_filename in output_filenames: - full_path = os.path.join(workdir, _output_filename) - mod_time = os.path.getmtime(full_path) - if last_time is None or last_time < mod_time: - output_filename = _output_filename - last_time = mod_time - - return output_filename, version - +@deprecated("openpype.pipeline.workfile.get_last_workfile") def get_last_workfile( workdir, file_template, fill_data, extensions, full_path=False ): @@ -1800,22 +1738,12 @@ def get_last_workfile( Returns: str: Last or first workfile as filename of full path to filename. """ - filename, version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions + + from openpype.pipeline.workfile import get_last_workfile + + return get_last_workfile( + workdir, file_template, fill_data, extensions, full_path ) - if filename is None: - data = copy.deepcopy(fill_data) - data["version"] = 1 - data.pop("comment", None) - if not data.get("ext"): - data["ext"] = extensions[0] - data["ext"] = data["ext"].replace('.', '') - filename = StringTemplate.format_strict_template(file_template, data) - - if full_path: - return os.path.normpath(os.path.join(workdir, filename)) - - return filename @with_pipeline_io diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index 3a51491cdd..dc4955f7af 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -3,6 +3,9 @@ from .path_resolving import ( get_workfile_template_key, get_workdir_with_workdir_data, get_workdir, + + get_last_workfile_with_version, + get_last_workfile, ) @@ -11,4 +14,7 @@ __all__ = ( "get_workfile_template_key", "get_workdir_with_workdir_data", "get_workdir", + + "get_last_workfile_with_version", + "get_last_workfile", ) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 07a814f616..7362902bcd 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -1,6 +1,11 @@ +import os +import re +import copy +import platform + from openpype.client import get_asset_by_name from openpype.settings import get_project_settings -from openpype.lib import filter_profiles +from openpype.lib import filter_profiles, StringTemplate from openpype.pipeline import Anatomy from openpype.pipeline.template_data import get_template_data @@ -177,3 +182,127 @@ def get_workdir( template_key, project_settings ) + + +def get_last_workfile_with_version( + workdir, file_template, fill_data, extensions +): + """Return last workfile version. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(Dict[str, Any]): Data for filling template. + extensions(Iterable[str]): All allowed file extensions of workfile. + + Returns: + Tuple[Union[str, None], Union[int, None]]: Last workfile with version + if there is any workfile otherwise None for both. + """ + + if not os.path.exists(workdir): + return None, None + + # Fast match on extension + filenames = [ + filename + for filename in os.listdir(workdir) + if os.path.splitext(filename)[1] in extensions + ] + + # Build template without optionals, version to digits only regex + # and comment to any definable value. + _ext = [] + for ext in extensions: + if not ext.startswith("."): + ext = "." + ext + # Escape dot for regex + ext = "\\" + ext + _ext.append(ext) + ext_expression = "(?:" + "|".join(_ext) + ")" + + # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end + file_template = re.sub(r"\.?{ext}", ext_expression, file_template) + # Replace optional keys with optional content regex + file_template = re.sub(r"<.*?>", r".*?", file_template) + # Replace `{version}` with group regex + file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) + file_template = re.sub(r"{comment.*?}", r".+?", file_template) + file_template = StringTemplate.format_strict_template( + file_template, fill_data + ) + + # Match with ignore case on Windows due to the Windows + # OS not being case-sensitive. This avoids later running + # into the error that the file did exist if it existed + # with a different upper/lower-case. + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Get highest version among existing matching files + version = None + output_filenames = [] + for filename in sorted(filenames): + match = re.match(file_template, filename, **kwargs) + if not match: + continue + + file_version = int(match.group(1)) + if version is None or file_version > version: + output_filenames[:] = [] + version = file_version + + if file_version == version: + output_filenames.append(filename) + + output_filename = None + if output_filenames: + if len(output_filenames) == 1: + output_filename = output_filenames[0] + else: + last_time = None + for _output_filename in output_filenames: + full_path = os.path.join(workdir, _output_filename) + mod_time = os.path.getmtime(full_path) + if last_time is None or last_time < mod_time: + output_filename = _output_filename + last_time = mod_time + + return output_filename, version + + +def get_last_workfile( + workdir, file_template, fill_data, extensions, full_path=False +): + """Return last workfile filename. + + Returns file with version 1 if there is not workfile yet. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(Dict[str, Any]): Data for filling template. + extensions(Iterable[str]): All allowed file extensions of workfile. + full_path(bool): Full path to file is returned if set to True. + + Returns: + str: Last or first workfile as filename of full path to filename. + """ + + filename, version = get_last_workfile_with_version( + workdir, file_template, fill_data, extensions + ) + if filename is None: + data = copy.deepcopy(fill_data) + data["version"] = 1 + data.pop("comment", None) + if not data.get("ext"): + data["ext"] = extensions[0] + data["ext"] = data["ext"].replace('.', '') + filename = StringTemplate.format_strict_template(file_template, data) + + if full_path: + return os.path.normpath(os.path.join(workdir, filename)) + + return filename diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index ea602846e7..cded4eb1a5 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -5,11 +5,11 @@ import logging from Qt import QtWidgets, QtCore -from openpype.lib import get_last_workfile_with_version from openpype.pipeline import ( registered_host, legacy_io, ) +from openpype.pipeline.workfile import get_last_workfile_with_version from openpype.pipeline.template_data import get_template_data_with_names from openpype.tools.utils import PlaceholderLineEdit From 5b559fd28d439a2e9ba2185eae428b7c63b69fb5 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 9 Aug 2022 13:17:06 +0200 Subject: [PATCH 0243/2550] create shelf manager definition for houdini in openpype project settings --- .../defaults/project_settings/houdini.json | 21 +++++ .../schema_project_houdini.json | 6 +- .../schemas/schema_houdini_scriptshelf.json | 81 +++++++++++++++++++ 3 files changed, 107 insertions(+), 1 deletion(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 911bf82d9b..5805f600c5 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -1,4 +1,25 @@ { + "shelves": [ + { + "shelf_set_name": "OpenPype Shelves", + "shelf_set_source_path": "/path/to/your/shelf_set_file", + "shelf_definition": [ + { + "shelf_name": "OpenPype Shelf", + "shelf_file_path": "/path/to/your/shelf_file", + "tools_list": [ + { + "name": "OpenPype Tool", + "filepath": "/path/to/your/tool_file", + "script": "/path/to/your/tool_script", + "icon": "/path/to/your/icon", + "help": "Help message for your tool" + } + ] + } + ] + } + ], "create": { "CreateArnoldAss": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json index cad99dde22..bde4352964 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json @@ -5,6 +5,10 @@ "label": "Houdini", "is_file": true, "children": [ + { + "type": "schema", + "name": "schema_houdini_scriptshelf" + }, { "type": "schema", "name": "schema_houdini_create" @@ -28,4 +32,4 @@ ] } ] -} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json new file mode 100644 index 0000000000..5a84c6d5cc --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -0,0 +1,81 @@ +{ + "type": "list", + "key": "shelves", + "label": "Shelves Manager", + "is_group": true, + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "shelf_set_name", + "label": "Shelf Set Name" + }, + { + "type": "path", + "key": "shelf_set_source_path", + "label": "Shelf Set Path", + "multipath": true, + "multiplatform": true + }, + { + "type": "list", + "key": "shelf_definition", + "label": "Shelves", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "shelf_name", + "label": "Shelf Name" + }, + { + "type": "text", + "key": "shelf_file_path", + "label": "Shelf File Path" + }, + { + "type": "list", + "key": "tools_list", + "label": "Tools", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "filepath", + "label": "File Path" + }, + { + "type": "text", + "key": "script", + "label": "Script" + }, + { + "type": "text", + "key": "icon", + "label": "Icon" + }, + { + "type": "text", + "key": "help", + "label": "Help" + } + ] + } + } + ] + } + } + ] + } +} \ No newline at end of file From bf463afc41abcb4afd25006422b17d940aee1300 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 13:50:16 +0200 Subject: [PATCH 0244/2550] moved 'get_workdir_from_session' to context tools --- .../fusion/scripts/fusion_switch_shot.py | 2 +- .../hosts/fusion/utility_scripts/switch_ui.py | 2 +- openpype/lib/avalon_context.py | 27 +++----------- openpype/pipeline/context_tools.py | 35 +++++++++++++++++++ openpype/scripts/fusion_switch_shot.py | 2 +- 5 files changed, 43 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index 87ff8e2ffe..49ef340679 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -15,7 +15,7 @@ from openpype.pipeline import ( from openpype.lib import version_up from openpype.hosts.fusion import api from openpype.hosts.fusion.api import lib -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/utility_scripts/switch_ui.py index 01d55db647..93f775b24b 100644 --- a/openpype/hosts/fusion/utility_scripts/switch_ui.py +++ b/openpype/hosts/fusion/utility_scripts/switch_ui.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( legacy_io, ) from openpype.hosts.fusion import api -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Fusion Switch Shot") diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index a2a1839218..1b2ac459a1 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -554,6 +554,8 @@ def compute_session_changes( dict: The required changes in the Session dictionary. """ + from openpype.pipeline.context_tools import get_workdir_from_session + changes = dict() # If no changes, return directly @@ -600,30 +602,11 @@ def compute_session_changes( return changes -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_workdir_from_session") def get_workdir_from_session(session=None, template_key=None): - from openpype.pipeline import Anatomy - from openpype.pipeline.context_tools import get_template_data_from_session + from openpype.pipeline.context_tools import get_workdir_from_session - if session is None: - session = legacy_io.Session - project_name = session["AVALON_PROJECT"] - host_name = session["AVALON_APP"] - anatomy = Anatomy(project_name) - template_data = get_template_data_from_session(session) - anatomy_filled = anatomy.format(template_data) - - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - task_type, - host_name, - project_name=project_name - ) - path = anatomy_filled[template_key]["folder"] - if path: - path = os.path.normpath(path) - return path + return get_workdir_from_session(session, template_key) @with_pipeline_io diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index c8c70e5ea8..13185c72b2 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -22,6 +22,7 @@ from openpype.settings import get_project_settings from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names +from .workfile import get_workfile_template_key from . import ( legacy_io, register_loader_plugin_path, @@ -377,3 +378,37 @@ def get_template_data_from_session(session=None, system_settings=None): return get_template_data_with_names( project_name, asset_name, task_name, host_name, system_settings ) + + +def get_workdir_from_session(session=None, template_key=None): + """Template data for template fill from session keys. + + Args: + session (Union[Dict[str, str], None]): The Session to use. If not + provided use the currently active global Session. + template_key (str): Prepared template key from which workdir is + calculated. + + Returns: + str: Workdir path. + """ + + if session is None: + session = legacy_io.Session + project_name = session["AVALON_PROJECT"] + host_name = session["AVALON_APP"] + anatomy = Anatomy(project_name) + template_data = get_template_data_from_session(session) + anatomy_filled = anatomy.format(template_data) + + if not template_key: + task_type = template_data["task"]["type"] + template_key = get_workfile_template_key( + task_type, + host_name, + project_name=project_name + ) + path = anatomy_filled[template_key]["folder"] + if path: + path = os.path.normpath(path) + return path diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index 15f189e7cb..fc22f060a2 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -17,7 +17,7 @@ from openpype.pipeline import ( legacy_io, ) -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") From 01d87ba032dc5930526f7740bdcbd4840b9fb508 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 15:10:45 +0200 Subject: [PATCH 0245/2550] moved build workfile to 'openpype.pipeline.workfile' --- openpype/lib/avalon_context.py | 658 +----------------- openpype/pipeline/workfile/__init__.py | 4 + openpype/pipeline/workfile/build_workfile.py | 693 +++++++++++++++++++ 3 files changed, 701 insertions(+), 654 deletions(-) create mode 100644 openpype/pipeline/workfile/build_workfile.py diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 1b2ac459a1..b32c9bce6d 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -777,661 +777,11 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): ) -class BuildWorkfile: - """Wrapper for build workfile process. +@deprecated("openpype.pipeline.workfile.BuildWorkfile") +def BuildWorkfile(): + from openpype.pipeline.workfile import BuildWorkfile - Load representations for current context by build presets. Build presets - are host related, since each host has it's loaders. - """ - - log = logging.getLogger("BuildWorkfile") - - @staticmethod - def map_subsets_by_family(subsets): - subsets_by_family = collections.defaultdict(list) - for subset in subsets: - family = subset["data"].get("family") - if not family: - families = subset["data"].get("families") - if not families: - continue - family = families[0] - - subsets_by_family[family].append(subset) - return subsets_by_family - - def process(self): - """Main method of this wrapper. - - Building of workfile is triggered and is possible to implement - post processing of loaded containers if necessary. - """ - containers = self.build_workfile() - - return containers - - @with_pipeline_io - def build_workfile(self): - """Prepares and load containers into workfile. - - Loads latest versions of current and linked assets to workfile by logic - stored in Workfile profiles from presets. Profiles are set by host, - filtered by current task name and used by families. - - Each family can specify representation names and loaders for - representations and first available and successful loaded - representation is returned as container. - - At the end you'll get list of loaded containers per each asset. - - loaded_containers [{ - "asset_entity": , - "containers": [, , ...] - }, { - "asset_entity": , - "containers": [, ...] - }, { - ... - }] - """ - from openpype.pipeline import discover_loader_plugins - - # Get current asset name and entity - project_name = legacy_io.active_project() - current_asset_name = legacy_io.Session["AVALON_ASSET"] - current_asset_entity = get_asset_by_name( - project_name, current_asset_name - ) - # Skip if asset was not found - if not current_asset_entity: - print("Asset entity with name `{}` was not found".format( - current_asset_name - )) - return - - # Prepare available loaders - loaders_by_name = {} - for loader in discover_loader_plugins(): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError( - "Duplicated loader name {0}!".format(loader_name) - ) - loaders_by_name[loader_name] = loader - - # Skip if there are any loaders - if not loaders_by_name: - self.log.warning("There are no registered loaders.") - return - - # Get current task name - current_task_name = legacy_io.Session["AVALON_TASK"] - - # Load workfile presets for task - self.build_presets = self.get_build_presets( - current_task_name, current_asset_entity - ) - - # Skip if there are any presets for task - if not self.build_presets: - self.log.warning( - "Current task `{}` does not have any loading preset.".format( - current_task_name - ) - ) - return - - # Get presets for loading current asset - current_context_profiles = self.build_presets.get("current_context") - # Get presets for loading linked assets - link_context_profiles = self.build_presets.get("linked_assets") - # Skip if both are missing - if not current_context_profiles and not link_context_profiles: - self.log.warning( - "Current task `{}` has empty loading preset.".format( - current_task_name - ) - ) - return - - elif not current_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any loading" - " preset for it's context." - ).format(current_task_name)) - - elif not link_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any" - "loading preset for it's linked assets." - ).format(current_task_name)) - - # Prepare assets to process by workfile presets - assets = [] - current_asset_id = None - if current_context_profiles: - # Add current asset entity if preset has current context set - assets.append(current_asset_entity) - current_asset_id = current_asset_entity["_id"] - - if link_context_profiles: - # Find and append linked assets if preset has set linked mapping - link_assets = get_linked_assets(current_asset_entity) - if link_assets: - assets.extend(link_assets) - - # Skip if there are no assets. This can happen if only linked mapping - # is set and there are no links for his asset. - if not assets: - self.log.warning( - "Asset does not have linked assets. Nothing to process." - ) - return - - # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(assets) - - # Load containers by prepared entities and presets - loaded_containers = [] - # - Current asset containers - if current_asset_id and current_asset_id in prepared_entities: - current_context_data = prepared_entities.pop(current_asset_id) - loaded_data = self.load_containers_by_asset_data( - current_context_data, current_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # - Linked assets container - for linked_asset_data in prepared_entities.values(): - loaded_data = self.load_containers_by_asset_data( - linked_asset_data, link_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # Return list of loaded containers - return loaded_containers - - @with_pipeline_io - def get_build_presets(self, task_name, asset_doc): - """ Returns presets to build workfile for task name. - - Presets are loaded for current project set in - io.Session["AVALON_PROJECT"], filtered by registered host - and entered task name. - - Args: - task_name (str): Task name used for filtering build presets. - - Returns: - (dict): preset per entered task name - """ - host_name = os.environ["AVALON_APP"] - project_settings = get_project_settings( - legacy_io.Session["AVALON_PROJECT"] - ) - - host_settings = project_settings.get(host_name) or {} - # Get presets for host - wb_settings = host_settings.get("workfile_builder") - if not wb_settings: - # backward compatibility - wb_settings = host_settings.get("workfile_build") or {} - - builder_profiles = wb_settings.get("profiles") - if not builder_profiles: - return None - - task_type = ( - asset_doc - .get("data", {}) - .get("tasks", {}) - .get(task_name, {}) - .get("type") - ) - filter_data = { - "task_types": task_type, - "tasks": task_name - } - return filter_profiles(builder_profiles, filter_data) - - def _filter_build_profiles(self, build_profiles, loaders_by_name): - """ Filter build profiles by loaders and prepare process data. - - Valid profile must have "loaders", "families" and "repre_names" keys - with valid values. - - "loaders" expects list of strings representing possible loaders. - - "families" expects list of strings for filtering - by main subset family. - - "repre_names" expects list of strings for filtering by - representation name. - - Lowered "families" and "repre_names" are prepared for each profile with - all required keys. - - Args: - build_profiles (dict): Profiles for building workfile. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list): Filtered and prepared profiles. - """ - valid_profiles = [] - for profile in build_profiles: - # Check loaders - profile_loaders = profile.get("loaders") - if not profile_loaders: - self.log.warning(( - "Build profile has missing loaders configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check if any loader is available - loaders_match = False - for loader_name in profile_loaders: - if loader_name in loaders_by_name: - loaders_match = True - break - - if not loaders_match: - self.log.warning(( - "All loaders from Build profile are not available: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check families - profile_families = profile.get("families") - if not profile_families: - self.log.warning(( - "Build profile is missing families configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check representation names - profile_repre_names = profile.get("repre_names") - if not profile_repre_names: - self.log.warning(( - "Build profile is missing" - " representation names filtering: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Prepare lowered families and representation names - profile["families_lowered"] = [ - fam.lower() for fam in profile_families - ] - profile["repre_names_lowered"] = [ - name.lower() for name in profile_repre_names - ] - - valid_profiles.append(profile) - - return valid_profiles - - def _prepare_profile_for_subsets(self, subsets, profiles): - """Select profile for each subset by it's data. - - Profiles are filtered for each subset individually. - Profile is filtered by subset's family, optionally by name regex and - representation names set in profile. - It is possible to not find matching profile for subset, in that case - subset is skipped and it is possible that none of subsets have - matching profile. - - Args: - subsets (list): Subset documents. - profiles (dict): Build profiles. - - Returns: - (dict) Profile by subset's id. - """ - # Prepare subsets - subsets_by_family = self.map_subsets_by_family(subsets) - - profiles_per_subset_id = {} - for family, subsets in subsets_by_family.items(): - family_low = family.lower() - for profile in profiles: - # Skip profile if does not contain family - if family_low not in profile["families_lowered"]: - continue - - # Precompile name filters as regexes - profile_regexes = profile.get("subset_name_filters") - if profile_regexes: - _profile_regexes = [] - for regex in profile_regexes: - _profile_regexes.append(re.compile(regex)) - profile_regexes = _profile_regexes - - # TODO prepare regex compilation - for subset in subsets: - # Verify regex filtering (optional) - if profile_regexes: - valid = False - for pattern in profile_regexes: - if re.match(pattern, subset["name"]): - valid = True - break - - if not valid: - continue - - profiles_per_subset_id[subset["_id"]] = profile - - # break profiles loop on finding the first matching profile - break - return profiles_per_subset_id - - def load_containers_by_asset_data( - self, asset_entity_data, build_profiles, loaders_by_name - ): - """Load containers for entered asset entity by Build profiles. - - Args: - asset_entity_data (dict): Prepared data with subsets, last version - and representations for specific asset. - build_profiles (dict): Build profiles. - loaders_by_name (dict): Available loaders per name. - - Returns: - (dict) Output contains asset document and loaded containers. - """ - - # Make sure all data are not empty - if not asset_entity_data or not build_profiles or not loaders_by_name: - return - - asset_entity = asset_entity_data["asset_entity"] - - valid_profiles = self._filter_build_profiles( - build_profiles, loaders_by_name - ) - if not valid_profiles: - self.log.warning( - "There are not valid Workfile profiles. Skipping process." - ) - return - - self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) - - subsets_by_id = {} - version_by_subset_id = {} - repres_by_version_id = {} - for subset_id, in_data in asset_entity_data["subsets"].items(): - subset_entity = in_data["subset_entity"] - subsets_by_id[subset_entity["_id"]] = subset_entity - - version_data = in_data["version"] - version_entity = version_data["version_entity"] - version_by_subset_id[subset_id] = version_entity - repres_by_version_id[version_entity["_id"]] = ( - version_data["repres"] - ) - - if not subsets_by_id: - self.log.warning("There are not subsets for asset {0}".format( - asset_entity["name"] - )) - return - - profiles_per_subset_id = self._prepare_profile_for_subsets( - subsets_by_id.values(), valid_profiles - ) - if not profiles_per_subset_id: - self.log.warning("There are not valid subsets.") - return - - valid_repres_by_subset_id = collections.defaultdict(list) - for subset_id, profile in profiles_per_subset_id.items(): - profile_repre_names = profile["repre_names_lowered"] - - version_entity = version_by_subset_id[subset_id] - version_id = version_entity["_id"] - repres = repres_by_version_id[version_id] - for repre in repres: - repre_name_low = repre["name"].lower() - if repre_name_low in profile_repre_names: - valid_repres_by_subset_id[subset_id].append(repre) - - # DEBUG message - msg = "Valid representations for Asset: `{}`".format( - asset_entity["name"] - ) - for subset_id, repres in valid_repres_by_subset_id.items(): - subset = subsets_by_id[subset_id] - msg += "\n# Subset Name/ID: `{}`/{}".format( - subset["name"], subset_id - ) - for repre in repres: - msg += "\n## Repre name: `{}`".format(repre["name"]) - - self.log.debug(msg) - - containers = self._load_containers( - valid_repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ) - - return { - "asset_entity": asset_entity, - "containers": containers - } - - @with_pipeline_io - def _load_containers( - self, repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ): - """Real load by collected data happens here. - - Loading of representations per subset happens here. Each subset can - loads one representation. Loading is tried in specific order. - Representations are tried to load by names defined in configuration. - If subset has representation matching representation name each loader - is tried to load it until any is successful. If none of them was - successful then next representation name is tried. - Subset process loop ends when any representation is loaded or - all matching representations were already tried. - - Args: - repres_by_subset_id (dict): Available representations mapped - by their parent (subset) id. - subsets_by_id (dict): Subset documents mapped by their id. - profiles_per_subset_id (dict): Build profiles mapped by subset id. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list) Objects of loaded containers. - """ - from openpype.pipeline import ( - IncompatibleLoaderError, - load_container, - ) - - loaded_containers = [] - - # Get subset id order from build presets. - build_presets = self.build_presets.get("current_context", []) - build_presets += self.build_presets.get("linked_assets", []) - subset_ids_ordered = [] - for preset in build_presets: - for preset_family in preset["families"]: - for id, subset in subsets_by_id.items(): - if preset_family not in subset["data"].get("families", []): - continue - - subset_ids_ordered.append(id) - - # Order representations from subsets. - print("repres_by_subset_id", repres_by_subset_id) - representations_ordered = [] - representations = [] - for id in subset_ids_ordered: - for subset_id, repres in repres_by_subset_id.items(): - if repres in representations: - continue - - if id == subset_id: - representations_ordered.append((subset_id, repres)) - representations.append(repres) - - print("representations", representations) - - # Load ordered representations. - for subset_id, repres in representations_ordered: - subset_name = subsets_by_id[subset_id]["name"] - - profile = profiles_per_subset_id[subset_id] - loaders_last_idx = len(profile["loaders"]) - 1 - repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 - - repre_by_low_name = { - repre["name"].lower(): repre for repre in repres - } - - is_loaded = False - for repre_name_idx, profile_repre_name in enumerate( - profile["repre_names_lowered"] - ): - # Break iteration if representation was already loaded - if is_loaded: - break - - repre = repre_by_low_name.get(profile_repre_name) - if not repre: - continue - - for loader_idx, loader_name in enumerate(profile["loaders"]): - if is_loaded: - break - - loader = loaders_by_name.get(loader_name) - if not loader: - continue - try: - container = load_container( - loader, - repre["_id"], - name=subset_name - ) - loaded_containers.append(container) - is_loaded = True - - except Exception as exc: - if exc == IncompatibleLoaderError: - self.log.info(( - "Loader `{}` is not compatible with" - " representation `{}`" - ).format(loader_name, repre["name"])) - - else: - self.log.error( - "Unexpected error happened during loading", - exc_info=True - ) - - msg = "Loading failed." - if loader_idx < loaders_last_idx: - msg += " Trying next loader." - elif repre_name_idx < repre_names_last_idx: - msg += ( - " Loading of subset `{}` was not successful." - ).format(subset_name) - else: - msg += " Trying next representation." - self.log.info(msg) - - return loaded_containers - - @with_pipeline_io - def _collect_last_version_repres(self, asset_docs): - """Collect subsets, versions and representations for asset_entities. - - Args: - asset_entities (list): Asset entities for which want to find data - - Returns: - (dict): collected entities - - Example output: - ``` - { - {Asset ID}: { - "asset_entity": , - "subsets": { - {Subset ID}: { - "subset_entity": , - "version": { - "version_entity": , - "repres": [ - , , ... - ] - } - }, - ... - } - }, - ... - } - output[asset_id]["subsets"][subset_id]["version"]["repres"] - ``` - """ - - output = {} - if not asset_docs: - return output - - asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} - - project_name = legacy_io.active_project() - subsets = list(get_subsets( - project_name, asset_ids=asset_docs_by_ids.keys() - )) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - - last_version_by_subset_id = get_last_versions( - project_name, subset_entity_by_ids.keys() - ) - last_version_docs_by_id = { - version["_id"]: version - for version in last_version_by_subset_id.values() - } - repre_docs = get_representations( - project_name, version_ids=last_version_docs_by_id.keys() - ) - - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = last_version_docs_by_id[version_id] - - subset_id = version_doc["parent"] - subset_doc = subset_entity_by_ids[subset_id] - - asset_id = subset_doc["parent"] - asset_doc = asset_docs_by_ids[asset_id] - - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset_doc, - "subsets": {} - } - - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset_doc, - "version": { - "version_entity": version_doc, - "repres": [] - } - } - - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre_doc - ) - - return output + return BuildWorkfile() @with_pipeline_io diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index dc4955f7af..3bc125cfc4 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -8,6 +8,8 @@ from .path_resolving import ( get_last_workfile, ) +from .build_workfile import BuildWorkfile + __all__ = ( "get_workfile_template_key_from_context", @@ -17,4 +19,6 @@ __all__ = ( "get_last_workfile_with_version", "get_last_workfile", + + "BuildWorkfile", ) diff --git a/openpype/pipeline/workfile/build_workfile.py b/openpype/pipeline/workfile/build_workfile.py new file mode 100644 index 0000000000..bb6fcb4189 --- /dev/null +++ b/openpype/pipeline/workfile/build_workfile.py @@ -0,0 +1,693 @@ +import os +import re +import collections +import json + +from openpype.client import ( + get_asset_by_name, + get_subsets, + get_last_versions, + get_representations, +) +from openpype.settings import get_project_settings +from openpype.lib import ( + get_linked_assets, + filter_profiles, + Logger, +) +from openpype.pipeline import legacy_io +from openpype.pipeline.load import ( + discover_loader_plugins, + IncompatibleLoaderError, + load_container, +) + + +class BuildWorkfile: + """Wrapper for build workfile process. + + Load representations for current context by build presets. Build presets + are host related, since each host has it's loaders. + """ + + _log = None + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @staticmethod + def map_subsets_by_family(subsets): + subsets_by_family = collections.defaultdict(list) + for subset in subsets: + family = subset["data"].get("family") + if not family: + families = subset["data"].get("families") + if not families: + continue + family = families[0] + + subsets_by_family[family].append(subset) + return subsets_by_family + + def process(self): + """Main method of this wrapper. + + Building of workfile is triggered and is possible to implement + post processing of loaded containers if necessary. + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + return self.build_workfile() + + def build_workfile(self): + """Prepares and load containers into workfile. + + Loads latest versions of current and linked assets to workfile by logic + stored in Workfile profiles from presets. Profiles are set by host, + filtered by current task name and used by families. + + Each family can specify representation names and loaders for + representations and first available and successful loaded + representation is returned as container. + + At the end you'll get list of loaded containers per each asset. + + loaded_containers [{ + "asset_entity": , + "containers": [, , ...] + }, { + "asset_entity": , + "containers": [, ...] + }, { + ... + }] + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + loaded_containers = [] + + # Get current asset name and entity + project_name = legacy_io.active_project() + current_asset_name = legacy_io.Session["AVALON_ASSET"] + current_asset_entity = get_asset_by_name( + project_name, current_asset_name + ) + # Skip if asset was not found + if not current_asset_entity: + print("Asset entity with name `{}` was not found".format( + current_asset_name + )) + return loaded_containers + + # Prepare available loaders + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {0}!".format(loader_name) + ) + loaders_by_name[loader_name] = loader + + # Skip if there are any loaders + if not loaders_by_name: + self.log.warning("There are no registered loaders.") + return loaded_containers + + # Get current task name + current_task_name = legacy_io.Session["AVALON_TASK"] + + # Load workfile presets for task + self.build_presets = self.get_build_presets( + current_task_name, current_asset_entity + ) + + # Skip if there are any presets for task + if not self.build_presets: + self.log.warning( + "Current task `{}` does not have any loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + # Get presets for loading current asset + current_context_profiles = self.build_presets.get("current_context") + # Get presets for loading linked assets + link_context_profiles = self.build_presets.get("linked_assets") + # Skip if both are missing + if not current_context_profiles and not link_context_profiles: + self.log.warning( + "Current task `{}` has empty loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + elif not current_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any loading" + " preset for it's context." + ).format(current_task_name)) + + elif not link_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any" + "loading preset for it's linked assets." + ).format(current_task_name)) + + # Prepare assets to process by workfile presets + assets = [] + current_asset_id = None + if current_context_profiles: + # Add current asset entity if preset has current context set + assets.append(current_asset_entity) + current_asset_id = current_asset_entity["_id"] + + if link_context_profiles: + # Find and append linked assets if preset has set linked mapping + link_assets = get_linked_assets(current_asset_entity) + if link_assets: + assets.extend(link_assets) + + # Skip if there are no assets. This can happen if only linked mapping + # is set and there are no links for his asset. + if not assets: + self.log.warning( + "Asset does not have linked assets. Nothing to process." + ) + return loaded_containers + + # Prepare entities from database for assets + prepared_entities = self._collect_last_version_repres(assets) + + # Load containers by prepared entities and presets + # - Current asset containers + if current_asset_id and current_asset_id in prepared_entities: + current_context_data = prepared_entities.pop(current_asset_id) + loaded_data = self.load_containers_by_asset_data( + current_context_data, current_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # - Linked assets container + for linked_asset_data in prepared_entities.values(): + loaded_data = self.load_containers_by_asset_data( + linked_asset_data, link_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # Return list of loaded containers + return loaded_containers + + def get_build_presets(self, task_name, asset_doc): + """ Returns presets to build workfile for task name. + + Presets are loaded for current project set in + io.Session["AVALON_PROJECT"], filtered by registered host + and entered task name. + + Args: + task_name (str): Task name used for filtering build presets. + + Returns: + Dict[str, Any]: preset per entered task name + """ + + host_name = os.environ["AVALON_APP"] + project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) + + host_settings = project_settings.get(host_name) or {} + # Get presets for host + wb_settings = host_settings.get("workfile_builder") + if not wb_settings: + # backward compatibility + wb_settings = host_settings.get("workfile_build") or {} + + builder_profiles = wb_settings.get("profiles") + if not builder_profiles: + return None + + task_type = ( + asset_doc + .get("data", {}) + .get("tasks", {}) + .get(task_name, {}) + .get("type") + ) + filter_data = { + "task_types": task_type, + "tasks": task_name + } + return filter_profiles(builder_profiles, filter_data) + + def _filter_build_profiles(self, build_profiles, loaders_by_name): + """ Filter build profiles by loaders and prepare process data. + + Valid profile must have "loaders", "families" and "repre_names" keys + with valid values. + - "loaders" expects list of strings representing possible loaders. + - "families" expects list of strings for filtering + by main subset family. + - "repre_names" expects list of strings for filtering by + representation name. + + Lowered "families" and "repre_names" are prepared for each profile with + all required keys. + + Args: + build_profiles (Dict[str, Any]): Profiles for building workfile. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Filtered and prepared profiles. + """ + + valid_profiles = [] + for profile in build_profiles: + # Check loaders + profile_loaders = profile.get("loaders") + if not profile_loaders: + self.log.warning(( + "Build profile has missing loaders configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check if any loader is available + loaders_match = False + for loader_name in profile_loaders: + if loader_name in loaders_by_name: + loaders_match = True + break + + if not loaders_match: + self.log.warning(( + "All loaders from Build profile are not available: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check families + profile_families = profile.get("families") + if not profile_families: + self.log.warning(( + "Build profile is missing families configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check representation names + profile_repre_names = profile.get("repre_names") + if not profile_repre_names: + self.log.warning(( + "Build profile is missing" + " representation names filtering: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Prepare lowered families and representation names + profile["families_lowered"] = [ + fam.lower() for fam in profile_families + ] + profile["repre_names_lowered"] = [ + name.lower() for name in profile_repre_names + ] + + valid_profiles.append(profile) + + return valid_profiles + + def _prepare_profile_for_subsets(self, subsets, profiles): + """Select profile for each subset by it's data. + + Profiles are filtered for each subset individually. + Profile is filtered by subset's family, optionally by name regex and + representation names set in profile. + It is possible to not find matching profile for subset, in that case + subset is skipped and it is possible that none of subsets have + matching profile. + + Args: + subsets (List[Dict[str, Any]]): Subset documents. + profiles (List[Dict[str, Any]]): Build profiles. + + Returns: + Dict[str, Any]: Profile by subset's id. + """ + + # Prepare subsets + subsets_by_family = self.map_subsets_by_family(subsets) + + profiles_per_subset_id = {} + for family, subsets in subsets_by_family.items(): + family_low = family.lower() + for profile in profiles: + # Skip profile if does not contain family + if family_low not in profile["families_lowered"]: + continue + + # Precompile name filters as regexes + profile_regexes = profile.get("subset_name_filters") + if profile_regexes: + _profile_regexes = [] + for regex in profile_regexes: + _profile_regexes.append(re.compile(regex)) + profile_regexes = _profile_regexes + + # TODO prepare regex compilation + for subset in subsets: + # Verify regex filtering (optional) + if profile_regexes: + valid = False + for pattern in profile_regexes: + if re.match(pattern, subset["name"]): + valid = True + break + + if not valid: + continue + + profiles_per_subset_id[subset["_id"]] = profile + + # break profiles loop on finding the first matching profile + break + return profiles_per_subset_id + + def load_containers_by_asset_data( + self, asset_entity_data, build_profiles, loaders_by_name + ): + """Load containers for entered asset entity by Build profiles. + + Args: + asset_entity_data (Dict[str, Any]): Prepared data with subsets, + last versions and representations for specific asset. + build_profiles (Dict[str, Any]): Build profiles. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + Dict[str, Any]: Output contains asset document + and loaded containers. + """ + + # Make sure all data are not empty + if not asset_entity_data or not build_profiles or not loaders_by_name: + return + + asset_entity = asset_entity_data["asset_entity"] + + valid_profiles = self._filter_build_profiles( + build_profiles, loaders_by_name + ) + if not valid_profiles: + self.log.warning( + "There are not valid Workfile profiles. Skipping process." + ) + return + + self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) + + subsets_by_id = {} + version_by_subset_id = {} + repres_by_version_id = {} + for subset_id, in_data in asset_entity_data["subsets"].items(): + subset_entity = in_data["subset_entity"] + subsets_by_id[subset_entity["_id"]] = subset_entity + + version_data = in_data["version"] + version_entity = version_data["version_entity"] + version_by_subset_id[subset_id] = version_entity + repres_by_version_id[version_entity["_id"]] = ( + version_data["repres"] + ) + + if not subsets_by_id: + self.log.warning("There are not subsets for asset {0}".format( + asset_entity["name"] + )) + return + + profiles_per_subset_id = self._prepare_profile_for_subsets( + subsets_by_id.values(), valid_profiles + ) + if not profiles_per_subset_id: + self.log.warning("There are not valid subsets.") + return + + valid_repres_by_subset_id = collections.defaultdict(list) + for subset_id, profile in profiles_per_subset_id.items(): + profile_repre_names = profile["repre_names_lowered"] + + version_entity = version_by_subset_id[subset_id] + version_id = version_entity["_id"] + repres = repres_by_version_id[version_id] + for repre in repres: + repre_name_low = repre["name"].lower() + if repre_name_low in profile_repre_names: + valid_repres_by_subset_id[subset_id].append(repre) + + # DEBUG message + msg = "Valid representations for Asset: `{}`".format( + asset_entity["name"] + ) + for subset_id, repres in valid_repres_by_subset_id.items(): + subset = subsets_by_id[subset_id] + msg += "\n# Subset Name/ID: `{}`/{}".format( + subset["name"], subset_id + ) + for repre in repres: + msg += "\n## Repre name: `{}`".format(repre["name"]) + + self.log.debug(msg) + + containers = self._load_containers( + valid_repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ) + + return { + "asset_entity": asset_entity, + "containers": containers + } + + def _load_containers( + self, repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ): + """Real load by collected data happens here. + + Loading of representations per subset happens here. Each subset can + loads one representation. Loading is tried in specific order. + Representations are tried to load by names defined in configuration. + If subset has representation matching representation name each loader + is tried to load it until any is successful. If none of them was + successful then next representation name is tried. + Subset process loop ends when any representation is loaded or + all matching representations were already tried. + + Args: + repres_by_subset_id (Dict[str, Dict[str, Any]]): Available + representations mapped by their parent (subset) id. + subsets_by_id (Dict[str, Dict[str, Any]]): Subset documents + mapped by their id. + profiles_per_subset_id (Dict[str, Dict[str, Any]]): Build profiles + mapped by subset id. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Objects of loaded containers. + """ + + loaded_containers = [] + + # Get subset id order from build presets. + build_presets = self.build_presets.get("current_context", []) + build_presets += self.build_presets.get("linked_assets", []) + subset_ids_ordered = [] + for preset in build_presets: + for preset_family in preset["families"]: + for id, subset in subsets_by_id.items(): + if preset_family not in subset["data"].get("families", []): + continue + + subset_ids_ordered.append(id) + + # Order representations from subsets. + print("repres_by_subset_id", repres_by_subset_id) + representations_ordered = [] + representations = [] + for id in subset_ids_ordered: + for subset_id, repres in repres_by_subset_id.items(): + if repres in representations: + continue + + if id == subset_id: + representations_ordered.append((subset_id, repres)) + representations.append(repres) + + print("representations", representations) + + # Load ordered representations. + for subset_id, repres in representations_ordered: + subset_name = subsets_by_id[subset_id]["name"] + + profile = profiles_per_subset_id[subset_id] + loaders_last_idx = len(profile["loaders"]) - 1 + repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 + + repre_by_low_name = { + repre["name"].lower(): repre for repre in repres + } + + is_loaded = False + for repre_name_idx, profile_repre_name in enumerate( + profile["repre_names_lowered"] + ): + # Break iteration if representation was already loaded + if is_loaded: + break + + repre = repre_by_low_name.get(profile_repre_name) + if not repre: + continue + + for loader_idx, loader_name in enumerate(profile["loaders"]): + if is_loaded: + break + + loader = loaders_by_name.get(loader_name) + if not loader: + continue + try: + container = load_container( + loader, + repre["_id"], + name=subset_name + ) + loaded_containers.append(container) + is_loaded = True + + except Exception as exc: + if exc == IncompatibleLoaderError: + self.log.info(( + "Loader `{}` is not compatible with" + " representation `{}`" + ).format(loader_name, repre["name"])) + + else: + self.log.error( + "Unexpected error happened during loading", + exc_info=True + ) + + msg = "Loading failed." + if loader_idx < loaders_last_idx: + msg += " Trying next loader." + elif repre_name_idx < repre_names_last_idx: + msg += ( + " Loading of subset `{}` was not successful." + ).format(subset_name) + else: + msg += " Trying next representation." + self.log.info(msg) + + return loaded_containers + + def _collect_last_version_repres(self, asset_docs): + """Collect subsets, versions and representations for asset_entities. + + Args: + asset_docs (List[Dict[str, Any]]): Asset entities for which + want to find data. + + Returns: + Dict[str, Any]: collected entities + + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... + } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ + + output = {} + if not asset_docs: + return output + + asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} + + project_name = legacy_io.active_project() + subsets = list(get_subsets( + project_name, asset_ids=asset_docs_by_ids.keys() + )) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + + last_version_by_subset_id = get_last_versions( + project_name, subset_entity_by_ids.keys() + ) + last_version_docs_by_id = { + version["_id"]: version + for version in last_version_by_subset_id.values() + } + repre_docs = get_representations( + project_name, version_ids=last_version_docs_by_id.keys() + ) + + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + version_doc = last_version_docs_by_id[version_id] + + subset_id = version_doc["parent"] + subset_doc = subset_entity_by_ids[subset_id] + + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_ids[asset_id] + + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset_doc, + "subsets": {} + } + + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset_doc, + "version": { + "version_entity": version_doc, + "repres": [] + } + } + + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre_doc + ) + + return output From 65268fbc09e946aaa623ed178773fa2fa2961ac4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 15:11:33 +0200 Subject: [PATCH 0246/2550] changed import of 'BuildWorkfile' in code --- openpype/hosts/maya/api/menu.py | 2 +- openpype/hosts/nuke/api/lib.py | 2 +- openpype/hosts/nuke/api/pipeline.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index c3ce8b0227..b7ab529a55 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,9 +6,9 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile from openpype.settings import get_project_settings from openpype.pipeline import legacy_io +from openpype.pipeline.workfile import BuildWorkfile from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 501ab4ba93..cf659344f0 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -21,7 +21,6 @@ from openpype.client import ( ) from openpype.api import ( Logger, - BuildWorkfile, get_version_from_path, get_current_project_settings, ) @@ -40,6 +39,7 @@ from openpype.pipeline import ( Anatomy, ) from openpype.pipeline.context_tools import get_current_project_asset +from openpype.pipeline.workfile import BuildWorkfile from . import gizmo_menu diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 0afc56d2f7..c1cd8f771a 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -9,7 +9,6 @@ import pyblish.api import openpype from openpype.api import ( Logger, - BuildWorkfile, get_current_project_settings ) from openpype.lib import register_event_callback @@ -22,6 +21,7 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.workfile import BuildWorkfile from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop From a006b5df63bb0b3f3935f0873a2f4537966ffddb Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 9 Aug 2022 15:43:47 +0200 Subject: [PATCH 0247/2550] set up the shelf creation in the _set_context_settings function --- openpype/hosts/houdini/api/lib.py | 32 ++++++++++++++++++++++++++ openpype/hosts/houdini/api/pipeline.py | 2 ++ 2 files changed, 34 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index c8a7f92bb9..55832abeb3 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -460,3 +460,35 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) + + +def create_shelf(): + hou.shelves.beginChangeBlock() + + custom_shelf = hou.shelves.newShelf( + file_path='', + name="custom_shelf", + label="Custom Shelf" + ) + + new_tool = hou.shelves.newTool( + file_path='', + name='new_tool', + label='New Tool', + script='', + language=hou.scriptLanguage.Python, + icon='', + help='This is a new tool' + ) + + if new_tool not in custom_shelf.tools(): + custom_shelf.setTools(list(custom_shelf.tools()) + [new_tool]) + + shelf_set = [ + shelf for shelf in hou.shelves.shelfSets().values() + if shelf.label() == "Create and Refine" + ][0] + + shelf_set.setShelves(shelf_set.shelves() + (custom_shelf,)) + + hou.shelves.endChangeBlock() diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b5f5459392..2f414020c4 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -309,6 +309,7 @@ def _set_context_settings(): fps resolution renderer + shelves Returns: None @@ -320,6 +321,7 @@ def _set_context_settings(): lib.set_scene_fps(fps) lib.reset_framerange() + lib.create_shelf() def on_pyblish_instance_toggled(instance, new_value, old_value): From 4db98639274917c908c5866c49c477779eb69d96 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:17:00 +0200 Subject: [PATCH 0248/2550] moved 'get_custom_workfile_template' and 'get_custom_workfile_template_by_string_context' to 'openpype.pipeline.workfile' --- openpype/pipeline/workfile/__init__.py | 6 + openpype/pipeline/workfile/path_resolving.py | 185 +++++++++++++++++-- 2 files changed, 176 insertions(+), 15 deletions(-) diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index 3bc125cfc4..0aad29b6f9 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -6,6 +6,9 @@ from .path_resolving import ( get_last_workfile_with_version, get_last_workfile, + + get_custom_workfile_template, + get_custom_workfile_template_by_string_context, ) from .build_workfile import BuildWorkfile @@ -20,5 +23,8 @@ __all__ = ( "get_last_workfile_with_version", "get_last_workfile", + "get_custom_workfile_template", + "get_custom_workfile_template_by_string_context", + "BuildWorkfile", ) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 7362902bcd..6740b710f5 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -3,9 +3,13 @@ import re import copy import platform -from openpype.client import get_asset_by_name +from openpype.client import get_project, get_asset_by_name from openpype.settings import get_project_settings -from openpype.lib import filter_profiles, StringTemplate +from openpype.lib import ( + filter_profiles, + Logger, + StringTemplate, +) from openpype.pipeline import Anatomy from openpype.pipeline.template_data import get_template_data @@ -189,11 +193,20 @@ def get_last_workfile_with_version( ): """Return last workfile version. + Usign workfile template and it's filling data find most possible last + version of workfile which was created for the context. + + Functionality is fully based on knowing which keys are optional or what + values are expected as value. + + The last modified file is used if more files can be considered as + last workfile. + Args: - workdir(str): Path to dir where workfiles are stored. - file_template(str): Template of file name. - fill_data(Dict[str, Any]): Data for filling template. - extensions(Iterable[str]): All allowed file extensions of workfile. + workdir (str): Path to dir where workfiles are stored. + file_template (str): Template of file name. + fill_data (Dict[str, Any]): Data for filling template. + extensions (Iterable[str]): All allowed file extensions of workfile. Returns: Tuple[Union[str, None], Union[int, None]]: Last workfile with version @@ -203,23 +216,26 @@ def get_last_workfile_with_version( if not os.path.exists(workdir): return None, None + dotted_extensions = { + ".{}".format(ext) + for ext in extensions + if not ext.startswith(".") + } # Fast match on extension filenames = [ filename for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in extensions + if os.path.splitext(filename)[1] in dotted_extensions ] # Build template without optionals, version to digits only regex # and comment to any definable value. - _ext = [] - for ext in extensions: - if not ext.startswith("."): - ext = "." + ext - # Escape dot for regex - ext = "\\" + ext - _ext.append(ext) - ext_expression = "(?:" + "|".join(_ext) + ")" + # Escape extensions dot for regex + regex_exts = [ + "\\" + ext + for ext in dotted_extensions + ] + ext_expression = "(?:" + "|".join(regex_exts) + ")" # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end file_template = re.sub(r"\.?{ext}", ext_expression, file_template) @@ -306,3 +322,142 @@ def get_last_workfile( return os.path.normpath(os.path.join(workdir, filename)) return filename + + +def get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Custom workfile template can be used as first version of workfiles. + Template is a file on a disk which is set in settings. Expected settings + structure to have this feature enabled is: + project settings + |- + |- workfile_builder + |- create_first_version - a bool which must be set to 'True' + |- custom_templates - profiles based on task name/type which + points to a file which is copied as + first workfile + + It is expected that passed argument are already queried documents of + project and asset as parents of processing task name. + + Args: + project_doc (Dict[str, Any]): Project document from MongoDB. + asset_doc (Dict[str, Any]): Asset document from MongoDB. + task_name (str): Name of task for which templates are filtered. + host_name (str): Name of host. + anatomy (Anatomy): Optionally passed anatomy object for passed project + name. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. Existence of formatted path is not validated. + None: If no profile is matching context. + """ + + log = Logger.get_logger("CustomWorkfileResolve") + + project_name = project_doc["name"] + if project_settings is None: + project_settings = get_project_settings(project_name) + + host_settings = project_settings.get(host_name) + if not host_settings: + log.info("Host \"{}\" doesn't have settings".format(host_name)) + return None + + workfile_builder_settings = host_settings.get("workfile_builder") + if not workfile_builder_settings: + log.info(( + "Seems like old version of settings is used." + " Can't access custom templates in host \"{}\"." + ).format(host_name)) + return + + if not workfile_builder_settings["create_first_version"]: + log.info(( + "Project \"{}\" has turned off to create first workfile for" + " host \"{}\"" + ).format(project_name, host_name)) + return + + # Backwards compatibility + template_profiles = workfile_builder_settings.get("custom_templates") + if not template_profiles: + log.info( + "Custom templates are not filled. Skipping template copy." + ) + return + + if anatomy is None: + anatomy = Anatomy(project_name) + + # get project, asset, task anatomy context data + anatomy_context_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # add root dict + anatomy_context_data["root"] = anatomy.roots + + # get task type for the task in context + current_task_type = anatomy_context_data["task"]["type"] + + # get path from matching profile + matching_item = filter_profiles( + template_profiles, + {"task_types": current_task_type} + ) + # when path is available try to format it in case + # there are some anatomy template strings + if matching_item: + template = matching_item["path"][platform.system().lower()] + return StringTemplate.format_strict_template( + template, anatomy_context_data + ).normalized() + + return None + + +def get_custom_workfile_template_by_string_context( + project_name, + asset_name, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Passed context are string representations of project, asset and task. + Function will query documents of project and asset to be able use + `get_custom_workfile_template` for rest of logic. + + Args: + project_name(str): Project name. + asset_name(str): Asset name. + task_name(str): Task name. + host_name (str): Name of host. + anatomy(Anatomy): Optionally prepared anatomy object for passed + project. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + None: If no profile is matching context. + """ + + project_doc = get_project(project_name) + asset_doc = get_asset_by_name(project_name, asset_name) + + return get_custom_workfile_template( + project_doc, asset_doc, task_name, host_name, anatomy, project_settings + ) From c9289630e01245342a8ff5e7652301643638efc7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:17:23 +0200 Subject: [PATCH 0249/2550] moved 'get_custom_workfile_template' as 'get_custom_workfile_template_from_session' into context tools --- openpype/pipeline/context_tools.py | 35 +++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 13185c72b2..5f763cd249 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -22,7 +22,10 @@ from openpype.settings import get_project_settings from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names -from .workfile import get_workfile_template_key +from .workfile import ( + get_workfile_template_key, + get_custom_workfile_template_by_string_context, +) from . import ( legacy_io, register_loader_plugin_path, @@ -412,3 +415,33 @@ def get_workdir_from_session(session=None, template_key=None): if path: path = os.path.normpath(path) return path + + +def get_custom_workfile_template_from_session( + session=None, project_settings=None +): + """Filter and fill workfile template profiles by current context. + + Current context is defined by `legacy_io.Session`. That's why this + function should be used only inside host where context is set and stable. + + Args: + session (Union[None, Dict[str, str]]): Session from which are taken + data. + project_settings(Dict[str, Any]): Template profiles from settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + """ + + if session is None: + session = legacy_io.Session + + return get_custom_workfile_template_by_string_context( + session["AVALON_PROJECT"], + session["AVALON_ASSET"], + session["AVALON_TASK"], + session["AVALON_APP"], + project_settings=project_settings + ) From fbe1a773c016e94569913cbe8837deebea90bcb4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:17:39 +0200 Subject: [PATCH 0250/2550] marked functions in avalon context as deprecated --- openpype/lib/avalon_context.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index b32c9bce6d..b970cbf4e6 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -528,6 +528,7 @@ def template_data_from_session(session=None): """ from openpype.pipeline.context_tools import get_template_data_from_session + return get_template_data_from_session(session) @@ -908,6 +909,8 @@ def _get_task_context_data_for_anatomy( return data +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_context") def get_custom_workfile_template_by_context( template_profiles, project_doc, asset_doc, task_name, anatomy=None ): @@ -961,6 +964,9 @@ def get_custom_workfile_template_by_context( return None +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_string_context" +) def get_custom_workfile_template_by_string_context( template_profiles, project_name, asset_name, task_name, dbcon=None, anatomy=None @@ -1005,7 +1011,7 @@ def get_custom_workfile_template_by_string_context( ) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_custom_workfile_template") def get_custom_workfile_template(template_profiles): """Filter and fill workfile template profiles by current context. @@ -1020,6 +1026,8 @@ def get_custom_workfile_template(template_profiles): context. (Existence of formatted path is not validated.) """ + from openpype.pipeline import legacy_io + return get_custom_workfile_template_by_string_context( template_profiles, legacy_io.Session["AVALON_PROJECT"], From 939955339c46c0aa02634546286a5e6217bf2cd9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 16:18:23 +0200 Subject: [PATCH 0251/2550] use moved functions in code --- openpype/hooks/pre_copy_template_workfile.py | 52 +++++++------------- openpype/hosts/nuke/api/lib.py | 17 +++---- 2 files changed, 26 insertions(+), 43 deletions(-) diff --git a/openpype/hooks/pre_copy_template_workfile.py b/openpype/hooks/pre_copy_template_workfile.py index dffac22ee2..70c549919f 100644 --- a/openpype/hooks/pre_copy_template_workfile.py +++ b/openpype/hooks/pre_copy_template_workfile.py @@ -1,11 +1,11 @@ import os import shutil -from openpype.lib import ( - PreLaunchHook, - get_custom_workfile_template_by_context, +from openpype.lib import PreLaunchHook +from openpype.settings import get_project_settings +from openpype.pipeline.workfile import ( + get_custom_workfile_template, get_custom_workfile_template_by_string_context ) -from openpype.settings import get_project_settings class CopyTemplateWorkfile(PreLaunchHook): @@ -54,41 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook): project_name = self.data["project_name"] asset_name = self.data["asset_name"] task_name = self.data["task_name"] + host_name = self.application.host_name project_settings = get_project_settings(project_name) - host_settings = project_settings[self.application.host_name] - - workfile_builder_settings = host_settings.get("workfile_builder") - if not workfile_builder_settings: - # TODO remove warning when deprecated - self.log.warning(( - "Seems like old version of settings is used." - " Can't access custom templates in host \"{}\"." - ).format(self.application.full_label)) - return - - if not workfile_builder_settings["create_first_version"]: - self.log.info(( - "Project \"{}\" has turned off to create first workfile for" - " application \"{}\"" - ).format(project_name, self.application.full_label)) - return - - # Backwards compatibility - template_profiles = workfile_builder_settings.get("custom_templates") - if not template_profiles: - self.log.info( - "Custom templates are not filled. Skipping template copy." - ) - return project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") if project_doc and asset_doc: self.log.debug("Started filtering of custom template paths.") - template_path = get_custom_workfile_template_by_context( - template_profiles, project_doc, asset_doc, task_name, anatomy + template_path = get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + project_settings ) else: @@ -96,10 +77,13 @@ class CopyTemplateWorkfile(PreLaunchHook): "Global data collection probably did not execute." " Using backup solution." )) - dbcon = self.data.get("dbcon") template_path = get_custom_workfile_template_by_string_context( - template_profiles, project_name, asset_name, task_name, - dbcon, anatomy + project_name, + asset_name, + task_name, + host_name, + anatomy, + project_settings ) if not template_path: diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index cf659344f0..a5f2631a02 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -38,7 +38,10 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) -from openpype.pipeline.context_tools import get_current_project_asset +from openpype.pipeline.context_tools import ( + get_current_project_asset, + get_custom_workfile_template_from_session +) from openpype.pipeline.workfile import BuildWorkfile from . import gizmo_menu @@ -2444,15 +2447,12 @@ def _launch_workfile_app(): def process_workfile_builder(): - from openpype.lib import ( - env_value_to_bool, - get_custom_workfile_template - ) # to avoid looping of the callback, remove it! nuke.removeOnCreate(process_workfile_builder, nodeClass="Root") # get state from settings - workfile_builder = get_current_project_settings()["nuke"].get( + project_settings = get_current_project_settings() + workfile_builder = project_settings["nuke"].get( "workfile_builder", {}) # get all imortant settings @@ -2462,7 +2462,6 @@ def process_workfile_builder(): # get settings createfv_on = workfile_builder.get("create_first_version") or None - custom_templates = workfile_builder.get("custom_templates") or None builder_on = workfile_builder.get("builder_on_start") or None last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") @@ -2470,8 +2469,8 @@ def process_workfile_builder(): # generate first version in file not existing and feature is enabled if createfv_on and not os.path.exists(last_workfile_path): # get custom template path if any - custom_template_path = get_custom_workfile_template( - custom_templates + custom_template_path = get_custom_workfile_template_from_session( + project_settings=project_settings ) # if custom template is defined From 27a62892a02ea1a7f15c4c0bbea13988e80f44d3 Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 9 Aug 2022 16:43:24 +0200 Subject: [PATCH 0252/2550] Kitsu|Fix: Movie project type fails & first loop children names Fix #3635 --- openpype/modules/kitsu/utils/update_op_with_zou.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 8f5566e8ec..e03cf2b30e 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -219,18 +219,23 @@ def update_op_assets( # Add parents for hierarchy item_data["parents"] = [] - while parent_zou_id is not None: - parent_doc = asset_doc_ids[parent_zou_id] + ancestor_id = parent_zou_id + while ancestor_id is not None: + parent_doc = asset_doc_ids[ancestor_id] item_data["parents"].insert(0, parent_doc["name"]) # Get parent entity parent_entity = parent_doc["data"]["zou"] - parent_zou_id = parent_entity.get("parent_id") + ancestor_id = parent_entity.get("parent_id") - if item_type in ["Shot", "Sequence"]: + # Build OpenPype compatible name + if item_type in ["Shot", "Sequence"] and parent_zou_id is not None: # Name with parents hierarchy "({episode}_){sequence}_{shot}" # to avoid duplicate name issue item_name = f"{item_data['parents'][-1]}_{item['name']}" + + # Update doc name + asset_doc_ids[item["id"]]["name"] = item_name else: item_name = item["name"] From cdd90ad2a79de9ba0c2000a00eff65efcde30a8d Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 9 Aug 2022 17:13:08 +0200 Subject: [PATCH 0253/2550] main structure to generate shelves --- openpype/hosts/houdini/api/pipeline.py | 4 +-- openpype/hosts/houdini/api/shelves.py | 47 ++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 openpype/hosts/houdini/api/shelves.py diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 2f414020c4..f809f0ce56 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.pipeline.load import any_outdated_containers import openpype.hosts.houdini -from openpype.hosts.houdini.api import lib +from openpype.hosts.houdini.api import lib, shelves from openpype.lib import ( register_event_callback, @@ -74,6 +74,7 @@ def install(): # so it initializes into the correct scene FPS, Frame Range, etc. # todo: make sure this doesn't trigger when opening with last workfile _set_context_settings() + shelves.generate_shelves() def uninstall(): @@ -321,7 +322,6 @@ def _set_context_settings(): lib.set_scene_fps(fps) lib.reset_framerange() - lib.create_shelf() def on_pyblish_instance_toggled(instance, new_value, old_value): diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py new file mode 100644 index 0000000000..b8f6419175 --- /dev/null +++ b/openpype/hosts/houdini/api/shelves.py @@ -0,0 +1,47 @@ +import os +import logging + +from openpype.settings import get_project_settings + +log = logging.getLogger(__name__) + + +def generate_shelves(): + # load configuration of custom menu + project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + shelves_set_config = project_settings["houdini"]["shelves"] + + if not shelves_set_config: + log.warning("No custom shelves found.") + return + + # run the shelf generator for Houdini + for shelf_set in shelves_set_config: + pass + # if shelf_set_source_path is not None we load the source path and return + + # if the shelf set name already exists, do nothing, else, create a new one + + # go through each shelf + # if shelf_file_path exists, load the shelf and return + # if the shelf name already exists, do nothing, else, create a new one + + # go through each tool + # if filepath exists, load the tool, add it to the shelf and continue + # create the tool + # add it to a list of tools + + # add the tools list to the shelf with the tools already in it + # add the shelf to the shelf set with the shelfs already in it + + +def get_or_create_shelf_set(): + pass + + +def get_or_create_shelf(): + pass + + +def get_or_create_tool(): + pass From 6ef14510e161f01713150f383b172f8d4239aa07 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:00:23 +0200 Subject: [PATCH 0254/2550] implemented method to stop timer using web server --- .../modules/timers_manager/timers_manager.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 3453e4bc4c..28702510f6 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -415,6 +415,36 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): return requests.post(rest_api_url, json=data) + @staticmethod + def stop_timer_with_webserver(logger=None): + """Prepared method for calling stop timers on REST api. + + Args: + logger (logging.Logger): Logger used for logging messages. + """ + + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + msg = "Couldn't find webserver url" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + try: + import requests + except Exception: + msg = "Couldn't start timer ('requests' is not available)" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + return requests.post(rest_api_url) + def on_host_install(self, host, host_name, project_name): self.log.debug("Installing task changed callback") register_event_callback("taskChanged", self._on_host_task_change) From 29239178cba6cb3b5e6462771f301b5c104cae75 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:00:41 +0200 Subject: [PATCH 0255/2550] timers manager is adding plugin paths --- .../modules/timers_manager/timers_manager.py | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 28702510f6..bfd450ce8c 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -6,7 +6,8 @@ from openpype.client import get_asset_by_name from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, - ILaunchHookPaths + ILaunchHookPaths, + IPluginPaths ) from openpype.lib.events import register_event_callback @@ -72,7 +73,12 @@ class ExampleTimersManagerConnector: self._timers_manager_module.timer_stopped(self._module.id) -class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): +class TimersManager( + OpenPypeModule, + ITrayService, + ILaunchHookPaths, + IPluginPaths +): """ Handles about Timers. Should be able to start/stop all timers at once. @@ -177,11 +183,21 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): def get_launch_hook_paths(self): """Implementation of `ILaunchHookPaths`.""" + return os.path.join( os.path.dirname(os.path.abspath(__file__)), "launch_hooks" ) + def get_plugin_paths(self): + """Implementation of `IPluginPaths`.""" + + timer_module_dir = os.path.dirname(os.path.abspath(__file__)) + + return { + "publish": [os.path.join(timer_module_dir, "plugins", "publish")] + } + @staticmethod def get_timer_data_for_context( project_name, asset_name, task_name, logger=None @@ -388,6 +404,7 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): logger (logging.Logger): Logger object. Using 'print' if not passed. """ + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: msg = "Couldn't find webserver url" From 70bcd6bf9062df6bb72948b02b3344c153f242fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:04:48 +0200 Subject: [PATCH 0256/2550] moved start and stop plugins into timers manager --- .../plugins/publish/start_timer.py | 39 +++++++++++++++++++ .../plugins/publish/stop_timer.py | 27 +++++++++++++ openpype/plugins/publish/start_timer.py | 14 ------- openpype/plugins/publish/stop_timer.py | 17 -------- 4 files changed, 66 insertions(+), 31 deletions(-) create mode 100644 openpype/modules/timers_manager/plugins/publish/start_timer.py create mode 100644 openpype/modules/timers_manager/plugins/publish/stop_timer.py delete mode 100644 openpype/plugins/publish/start_timer.py delete mode 100644 openpype/plugins/publish/stop_timer.py diff --git a/openpype/modules/timers_manager/plugins/publish/start_timer.py b/openpype/modules/timers_manager/plugins/publish/start_timer.py new file mode 100644 index 0000000000..6408327ca1 --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/start_timer.py @@ -0,0 +1,39 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + +import pyblish.api + +from openpype.pipeline import legacy_io + + +class StartTimer(pyblish.api.ContextPlugin): + label = "Start Timer" + order = pyblish.api.IntegratorOrder + 1 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + project_name = legacy_io.active_project() + asset_name = legacy_io.Session.get("AVALON_ASSET") + task_name = legacy_io.Session.get("AVALON_TASK") + if not project_name or not asset_name or not task_name: + self.log.info(( + "Current context does not contain all" + " required information to start a timer." + )) + return + timers_manager.start_timer_with_webserver( + project_name, asset_name, task_name, self.log + ) diff --git a/openpype/modules/timers_manager/plugins/publish/stop_timer.py b/openpype/modules/timers_manager/plugins/publish/stop_timer.py new file mode 100644 index 0000000000..a8674ff2ca --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/stop_timer.py @@ -0,0 +1,27 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + + +import pyblish.api + + +class StopTimer(pyblish.api.ContextPlugin): + label = "Stop Timer" + order = pyblish.api.ExtractorOrder - 0.49 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + timers_manager.stop_timer_with_webserver(self.log) diff --git a/openpype/plugins/publish/start_timer.py b/openpype/plugins/publish/start_timer.py deleted file mode 100644 index 112d92bef0..0000000000 --- a/openpype/plugins/publish/start_timer.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api - -from openpype.lib import change_timer_to_current_context - - -class StartTimer(pyblish.api.ContextPlugin): - label = "Start Timer" - order = pyblish.api.IntegratorOrder + 1 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - change_timer_to_current_context() diff --git a/openpype/plugins/publish/stop_timer.py b/openpype/plugins/publish/stop_timer.py deleted file mode 100644 index 414e43a3c4..0000000000 --- a/openpype/plugins/publish/stop_timer.py +++ /dev/null @@ -1,17 +0,0 @@ -import os -import requests - -import pyblish.api - - -class StopTimer(pyblish.api.ContextPlugin): - label = "Stop Timer" - order = pyblish.api.ExtractorOrder - 0.49 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) - requests.post(rest_api_url) From 51f58340617a225d872f7a99aea8e75b514a0f87 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:05:17 +0200 Subject: [PATCH 0257/2550] changed order of collect modules manager --- openpype/plugins/publish/collect_modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_modules.py b/openpype/plugins/publish/collect_modules.py index 2f6cb1ef0e..d76096bcd9 100644 --- a/openpype/plugins/publish/collect_modules.py +++ b/openpype/plugins/publish/collect_modules.py @@ -7,7 +7,7 @@ import pyblish.api class CollectModules(pyblish.api.ContextPlugin): """Collect OpenPype modules.""" - order = pyblish.api.CollectorOrder - 0.45 + order = pyblish.api.CollectorOrder - 0.5 label = "OpenPype Modules" def process(self, context): From e35fd6e476dd3fb1cab539b1e39aaa1704ef62b5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:19:01 +0200 Subject: [PATCH 0258/2550] use constant to define timer module dir --- openpype/modules/timers_manager/timers_manager.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index bfd450ce8c..93332ace4f 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -13,6 +13,8 @@ from openpype.lib.events import register_event_callback from .exceptions import InvalidContextError +TIMER_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + class ExampleTimersManagerConnector: """Timers manager can handle timers of multiple modules/addons. @@ -34,6 +36,7 @@ class ExampleTimersManagerConnector: } ``` """ + # Not needed at all def __init__(self, module): # Store timer manager module to be able call it's methods when needed @@ -185,17 +188,15 @@ class TimersManager( """Implementation of `ILaunchHookPaths`.""" return os.path.join( - os.path.dirname(os.path.abspath(__file__)), + TIMER_MODULE_DIR, "launch_hooks" ) def get_plugin_paths(self): """Implementation of `IPluginPaths`.""" - timer_module_dir = os.path.dirname(os.path.abspath(__file__)) - return { - "publish": [os.path.join(timer_module_dir, "plugins", "publish")] + "publish": [os.path.join(TIMER_MODULE_DIR, "plugins", "publish")] } @staticmethod From 77d78aadf979632938cae81f94468f919490cdc8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:19:38 +0200 Subject: [PATCH 0259/2550] mark 'change_timer_to_current_context' in 'openpype.lib' as deprecated --- openpype/lib/avalon_context.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 42854f39d6..eb98ec1d9c 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1515,13 +1515,21 @@ def get_creator_by_name(creator_name, case_sensitive=False): return None -@with_pipeline_io +@deprecated def change_timer_to_current_context(): """Called after context change to change timers. + Deprecated: + This method is specific for TimersManager module so please use the + functionality from there. Function will be removed after release + version 3.14.* + TODO: - use TimersManager's static method instead of reimplementing it here """ + + from openpype.pipeline import legacy_io + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: log.warning("Couldn't find webserver url") From 1c133cf6b126cf4f4a0277ddd455c75455dc93b1 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 9 Aug 2022 17:46:58 +0100 Subject: [PATCH 0260/2550] FIx to use project name instead of code in update for ReferenceLoader --- openpype/hosts/maya/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 2b0c6131b4..8c3f6f071a 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -235,7 +235,7 @@ class ReferenceLoader(Loader): path = self.prepare_root_value(path, representation["context"] ["project"] - ["code"]) + ["name"]) content = cmds.file(path, loadReference=reference_node, type=file_type, From 4bb98863bd5476794faeb28fb37b9c77cc837dfe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 9 Aug 2022 18:47:28 +0200 Subject: [PATCH 0261/2550] add all keys from anatomy data to representation context even if it's already there --- openpype/plugins/publish/integrate_hero_version.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 735b7e50fa..7d698ff98d 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -313,13 +313,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): } repre_context = template_filled.used_values for key in self.db_representation_context_keys: - if ( - key in repre_context or - key not in anatomy_data - ): - continue - - repre_context[key] = anatomy_data[key] + value = anatomy_data.get(key) + if value is not None: + repre_context[key] = value # Prepare new repre repre = copy.deepcopy(repre_info["representation"]) From eb0e014beaac279ef019fa13c8213c3ff2196754 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 9 Aug 2022 18:35:32 +0100 Subject: [PATCH 0262/2550] Fix call to load file in case of fbx file --- openpype/hosts/maya/api/plugin.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 8c3f6f071a..652874997c 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -236,10 +236,16 @@ class ReferenceLoader(Loader): representation["context"] ["project"] ["name"]) + + params = { + "loadReference": reference_node, + "returnNewNodes": True + } + if file_type != "fbx": + params["type"] = file_type + content = cmds.file(path, - loadReference=reference_node, - type=file_type, - returnNewNodes=True) + **params) except RuntimeError as exc: # When changing a reference to a file that has load errors the # command will raise an error even if the file is still loaded From 6c10d4412320867ff40422196b562db2ca128ca5 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 10 Aug 2022 03:43:25 +0000 Subject: [PATCH 0263/2550] [Automated] Bump version --- CHANGELOG.md | 3 +-- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3124201758..b7ef795f0a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) **🆕 New features** @@ -89,7 +89,6 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) -- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) - TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) - NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) diff --git a/openpype/version.py b/openpype/version.py index d2eb3a8ab6..c41e69d00d 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.0" +__version__ = "3.13.1-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 03922a8e67..994c83d369 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.0" # OpenPype +version = "3.13.1-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From b74655c167aefd2d84e8cd6716d7b7b3c02783cd Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 11:56:05 +0200 Subject: [PATCH 0264/2550] set multipath to false for shelf set path --- .../settings/defaults/project_settings/houdini.json | 6 +++++- .../schemas/schema_houdini_scriptshelf.json | 10 +++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 5805f600c5..2ceed37935 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -2,7 +2,11 @@ "shelves": [ { "shelf_set_name": "OpenPype Shelves", - "shelf_set_source_path": "/path/to/your/shelf_set_file", + "shelf_set_source_path": { + "windows": "", + "darwin": "", + "linux": "/path/to/your/shelf_set_file" + }, "shelf_definition": [ { "shelf_name": "OpenPype Shelf", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json index 5a84c6d5cc..ae05cef74e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -16,7 +16,7 @@ "type": "path", "key": "shelf_set_source_path", "label": "Shelf Set Path", - "multipath": true, + "multipath": false, "multiplatform": true }, { @@ -33,7 +33,7 @@ "label": "Shelf Name" }, { - "type": "text", + "type": "path", "key": "shelf_file_path", "label": "Shelf File Path" }, @@ -51,17 +51,17 @@ "label": "Name" }, { - "type": "text", + "type": "path", "key": "filepath", "label": "File Path" }, { - "type": "text", + "type": "path", "key": "script", "label": "Script" }, { - "type": "text", + "type": "path", "key": "icon", "label": "Icon" }, From 4d477592492407e806e636175b72dd06ed7a42c1 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 10 Aug 2022 11:29:46 +0100 Subject: [PATCH 0265/2550] Fixed with the right type parameter for FBX --- openpype/hosts/maya/api/plugin.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 652874997c..e50ebfccad 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -209,7 +209,7 @@ class ReferenceLoader(Loader): "ma": "mayaAscii", "mb": "mayaBinary", "abc": "Alembic", - "fbx": "fbx" + "fbx": "FBX" }.get(representation["name"]) assert file_type, "Unsupported representation: %s" % representation @@ -236,16 +236,10 @@ class ReferenceLoader(Loader): representation["context"] ["project"] ["name"]) - - params = { - "loadReference": reference_node, - "returnNewNodes": True - } - if file_type != "fbx": - params["type"] = file_type - content = cmds.file(path, - **params) + loadReference=reference_node, + type=file_type, + returnNewNodes=True) except RuntimeError as exc: # When changing a reference to a file that has load errors the # command will raise an error even if the file is still loaded From a302caf6bd431b98136ce5b41c56cb0c60e49b4f Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 13:05:37 +0200 Subject: [PATCH 0266/2550] setting shelf set filepath if any in right OS --- openpype/hosts/houdini/api/shelves.py | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index b8f6419175..6ea4b4a9fd 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,26 +1,38 @@ import os import logging +import platform from openpype.settings import get_project_settings -log = logging.getLogger(__name__) +import hou + +log = logging.getLogger("openpype.hosts.houdini") def generate_shelves(): + current_os = platform.system().lower() # load configuration of custom menu project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) shelves_set_config = project_settings["houdini"]["shelves"] if not shelves_set_config: - log.warning("No custom shelves found.") + log.warning( + "SHELF ERROR: No custom shelves found in project settings." + ) return # run the shelf generator for Houdini - for shelf_set in shelves_set_config: - pass - # if shelf_set_source_path is not None we load the source path and return + for shelf_set_config in shelves_set_config: + shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') + # if shelf_set_source_path is not None we load the source path and continue + if shelf_set_filepath[current_os]: + hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) + # hou.ShelfSet.setFilePath(file_path=shelf_set_filepath[operating_system]) + continue # if the shelf set name already exists, do nothing, else, create a new one + shelf_set_name = shelf_set_config.get('shelf_set_name') + shelf_set = get_or_create_shelf_set(shelf_set_name) # go through each shelf # if shelf_file_path exists, load the shelf and return @@ -35,8 +47,9 @@ def generate_shelves(): # add the shelf to the shelf set with the shelfs already in it -def get_or_create_shelf_set(): - pass +def get_or_create_shelf_set(shelf_set_name): + log.warning("IN GET OR CREATE SHELF SET: {}".format(shelf_set_name)) + hou.shelves.shelves() def get_or_create_shelf(): From f03e63502e80dc7d3a8717db54e22132d0276bdc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 13:59:26 +0200 Subject: [PATCH 0267/2550] fixed dotted extensions --- openpype/pipeline/workfile/path_resolving.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 6740b710f5..aa75d29372 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -216,11 +216,13 @@ def get_last_workfile_with_version( if not os.path.exists(workdir): return None, None - dotted_extensions = { - ".{}".format(ext) - for ext in extensions - if not ext.startswith(".") - } + + dotted_extensions = set() + for ext in extensions: + if not ext.startswith("."): + ext = ".{}".format(ext) + dotted_extensions.add(ext) + # Fast match on extension filenames = [ filename From 8858377019184f17ddf00b8bd7d3a1e8f06f0e8e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 14:32:07 +0200 Subject: [PATCH 0268/2550] formatting changes --- openpype/pipeline/workfile/path_resolving.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index aa75d29372..ed1d1d793e 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -216,7 +216,6 @@ def get_last_workfile_with_version( if not os.path.exists(workdir): return None, None - dotted_extensions = set() for ext in extensions: if not ext.startswith("."): @@ -227,7 +226,7 @@ def get_last_workfile_with_version( filenames = [ filename for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in dotted_extensions + if os.path.splitext(filename)[-1] in dotted_extensions ] # Build template without optionals, version to digits only regex From 6f68b998965893401d1679913084942e7329b086 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Aug 2022 15:47:32 +0200 Subject: [PATCH 0269/2550] Fix refactor typo --- openpype/hosts/maya/api/lib_rendersetup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersetup.py b/openpype/hosts/maya/api/lib_rendersetup.py index a00be52d8e..e616f26e1b 100644 --- a/openpype/hosts/maya/api/lib_rendersetup.py +++ b/openpype/hosts/maya/api/lib_rendersetup.py @@ -367,9 +367,9 @@ def get_shader_in_layer(node, layer): """ - def _get_connected_shader(shape): + def _get_connected_shader(plug): """Return current shader""" - return cmds.listConnections(shape + ".instObjGroups", + return cmds.listConnections(plug, source=False, destination=True, plugs=False, From 0528494d9e53368275754befa73bea7dcf7948dd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 16:10:52 +0200 Subject: [PATCH 0270/2550] extract review can scale to match pixel ratio --- openpype/plugins/publish/extract_review.py | 63 ++++++++-------------- 1 file changed, 22 insertions(+), 41 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 7442d3aacb..e16f324e0a 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1210,7 +1210,6 @@ class ExtractReview(pyblish.api.InstancePlugin): # Get instance data pixel_aspect = temp_data["pixel_aspect"] - if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " @@ -1230,6 +1229,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # - settings value can't have None but has value of 0 output_width = output_def.get("width") or output_width or None output_height = output_def.get("height") or output_height or None + # Force to use input resolution if output resolution was not defined + # in settings. Resolution from instance is not used when + # 'use_input_res' is set to 'True'. + use_input_res = False # Overscal color overscan_color_value = "black" @@ -1241,6 +1244,17 @@ class ExtractReview(pyblish.api.InstancePlugin): ) self.log.debug("Overscan color: `{}`".format(overscan_color_value)) + # Scale input to have proper pixel aspect ratio + # - scale width by the pixel aspect ratio + scale_pixel_aspect = output_def.get("scale_pixel_aspect", True) + if scale_pixel_aspect and pixel_aspect != 1: + # Change input width after pixel aspect + input_width = int(input_width * pixel_aspect) + use_input_res = True + filters.append(( + "scale={}x{}:flags=lanczos".format(input_width, input_height) + )) + # Convert overscan value video filters overscan_crop = output_def.get("overscan_crop") overscan = OverscanCrop( @@ -1251,13 +1265,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # resolution by it's values if overscan_crop_filters: filters.extend(overscan_crop_filters) + # Change input resolution after overscan crop input_width = overscan.width() input_height = overscan.height() - # Use output resolution as inputs after cropping to skip usage of - # instance data resolution - if output_width is None or output_height is None: - output_width = input_width - output_height = input_height + use_input_res = True # Make sure input width and height is not an odd number input_width_is_odd = bool(input_width % 2 != 0) @@ -1283,8 +1294,10 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("input_width: `{}`".format(input_width)) self.log.debug("input_height: `{}`".format(input_height)) - # Use instance resolution if output definition has not set it. - if output_width is None or output_height is None: + # Use instance resolution if output definition has not set it + # - use instance resolution only if there were not scale changes + # that may massivelly affect output 'use_input_res' + if not use_input_res and output_width is None or output_height is None: output_width = temp_data["resolution_width"] output_height = temp_data["resolution_height"] @@ -1326,7 +1339,6 @@ class ExtractReview(pyblish.api.InstancePlugin): output_width == input_width and output_height == input_height and not letter_box_enabled - and pixel_aspect == 1 ): self.log.debug( "Output resolution is same as input's" @@ -1336,39 +1348,8 @@ class ExtractReview(pyblish.api.InstancePlugin): new_repre["resolutionHeight"] = input_height return filters - # defining image ratios - input_res_ratio = ( - (float(input_width) * pixel_aspect) / input_height - ) - output_res_ratio = float(output_width) / float(output_height) - self.log.debug("input_res_ratio: `{}`".format(input_res_ratio)) - self.log.debug("output_res_ratio: `{}`".format(output_res_ratio)) - - # Round ratios to 2 decimal places for comparing - input_res_ratio = round(input_res_ratio, 2) - output_res_ratio = round(output_res_ratio, 2) - - # get scale factor - scale_factor_by_width = ( - float(output_width) / (input_width * pixel_aspect) - ) - scale_factor_by_height = ( - float(output_height) / input_height - ) - - self.log.debug( - "scale_factor_by_with: `{}`".format(scale_factor_by_width) - ) - self.log.debug( - "scale_factor_by_height: `{}`".format(scale_factor_by_height) - ) - # scaling none square pixels and 1920 width - if ( - input_height != output_height - or input_width != output_width - or pixel_aspect != 1 - ): + if input_height != output_height or input_width != output_width: filters.extend([ ( "scale={}x{}" From 3d62093224be2b3786823b175f1bfd1ffa3aad3d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Aug 2022 16:14:50 +0200 Subject: [PATCH 0271/2550] Refactor moved usage of CreateRender settings --- openpype/hosts/maya/api/lib_rendersettings.py | 3 +-- .../hosts/maya/plugins/publish/validate_render_image_rule.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 9aea55a03b..7cd2193086 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -60,8 +60,7 @@ class RenderSettings(object): try: aov_separator = self._aov_chars[( self._project_settings["maya"] - ["create"] - ["CreateRender"] + ["RenderSettings"] ["aov_separator"] )] except KeyError: diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 642ca9e25d..0abcf2f12a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -41,6 +41,5 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def get_default_render_image_folder(instance): return instance.context.data.get('project_settings')\ .get('maya') \ - .get('create') \ - .get('CreateRender') \ + .get('RenderSettings') \ .get('default_render_image_folder') From 7a16cb723b8329d493697c153881533808a2c0e2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 16:42:34 +0200 Subject: [PATCH 0272/2550] added settings for rescaling when pixel aspect ratio is not 1 --- openpype/settings/defaults/project_settings/global.json | 1 + .../projects_schema/schemas/schema_global_publish.json | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index e509db2791..0ff9363ba7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -85,6 +85,7 @@ ], "width": 0, "height": 0, + "scale_pixel_aspect": true, "bg_color": [ 0, 0, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index b9d0b7daba..e1aa230b49 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -319,6 +319,15 @@ "minimum": 0, "maximum": 100000 }, + { + "type": "label", + "label": "Rescale input when it's pixel aspect ratio is not 1. Usefull for anamorph reviews." + }, + { + "key": "scale_pixel_aspect", + "label": "Scale pixel aspect", + "type": "boolean" + }, { "type": "label", "label": "Background color is used only when input have transparency and Alpha is higher than 0." From f74101be342ced01df8057f353123692fb559ff3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Aug 2022 16:45:19 +0200 Subject: [PATCH 0273/2550] Remove unused get current renderer logic The `renderer` variable wasn't used --- openpype/hosts/maya/plugins/publish/collect_render.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index e6fc8a01e5..e1f4efcc07 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -154,12 +154,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): layer_name = "rs_{}".format(expected_layer_name) # collect all frames we are expecting to be rendered - renderer = self.get_render_attribute("currentRenderer", - layer=layer_name) - # handle various renderman names - if renderer.startswith("renderman"): - renderer = "renderman" - # return all expected files for all cameras and aovs in given # frame range layer_render_products = get_layer_render_products(layer_name) From 74a91f4d22ebcacbab07f05ca44fd8e1dbf1d6c2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Aug 2022 17:01:42 +0200 Subject: [PATCH 0274/2550] Fix more missing refactors --- openpype/hosts/maya/plugins/publish/collect_render.py | 3 +-- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index e6fc8a01e5..085403bdf7 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -203,8 +203,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): aov_dict = {} default_render_file = context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ + .get('RenderSettings')\ .get('default_render_image_folder') or "" # replace relative paths with absolute. Render products are # returned as list of dictionaries. diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index f253ceb21a..13dfc0183a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -413,8 +413,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # Gather needed data ------------------------------------------------ default_render_file = instance.context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ + .get('RenderSettings')\ .get('default_render_image_folder') filename = os.path.basename(filepath) comment = context.data.get("comment", "") From b69e2e2003f768b111fa50635a0c5f3268ca7357 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 17:34:48 +0200 Subject: [PATCH 0275/2550] get shelf set or create one --- openpype/hosts/houdini/api/shelves.py | 29 ++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 6ea4b4a9fd..d89f3153ea 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -21,13 +21,19 @@ def generate_shelves(): ) return - # run the shelf generator for Houdini for shelf_set_config in shelves_set_config: shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') + # if shelf_set_source_path is not None we load the source path and continue if shelf_set_filepath[current_os]: + if not os.path.isfile(shelf_set_filepath[current_os]): + raise FileNotFoundError( + "SHELF ERROR: This path doesn't exist - {}".format( + shelf_set_filepath[current_os] + ) + ) + hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) - # hou.ShelfSet.setFilePath(file_path=shelf_set_filepath[operating_system]) continue # if the shelf set name already exists, do nothing, else, create a new one @@ -47,9 +53,22 @@ def generate_shelves(): # add the shelf to the shelf set with the shelfs already in it -def get_or_create_shelf_set(shelf_set_name): - log.warning("IN GET OR CREATE SHELF SET: {}".format(shelf_set_name)) - hou.shelves.shelves() +def get_or_create_shelf_set(shelf_set_label): + all_shelves = hou.shelves.shelfSets().values() + + shelf_set = [ + shelf for shelf in all_shelves if shelf.label() == shelf_set_label + ] + + if shelf_set: + return shelf_set[0] + + shelf_set_name = shelf_set_label.replace(' ', '_').lower() + new_shelf_set = hou.shelves.newShelfSet( + name=shelf_set_name, + label=shelf_set_label + ) + return new_shelf_set def get_or_create_shelf(): From 937ba13ea0c62b63d2d56a0f1895932089070983 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 17:47:33 +0200 Subject: [PATCH 0276/2550] remove filepath for shelf and tools --- .../settings/defaults/project_settings/houdini.json | 2 -- .../schemas/schema_houdini_scriptshelf.json | 12 +----------- 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 2ceed37935..a818f82d6b 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -10,11 +10,9 @@ "shelf_definition": [ { "shelf_name": "OpenPype Shelf", - "shelf_file_path": "/path/to/your/shelf_file", "tools_list": [ { "name": "OpenPype Tool", - "filepath": "/path/to/your/tool_file", "script": "/path/to/your/tool_script", "icon": "/path/to/your/icon", "help": "Help message for your tool" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json index ae05cef74e..812ab7d8c9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -15,7 +15,7 @@ { "type": "path", "key": "shelf_set_source_path", - "label": "Shelf Set Path", + "label": "Shelf Set Path (optional)", "multipath": false, "multiplatform": true }, @@ -32,11 +32,6 @@ "key": "shelf_name", "label": "Shelf Name" }, - { - "type": "path", - "key": "shelf_file_path", - "label": "Shelf File Path" - }, { "type": "list", "key": "tools_list", @@ -50,11 +45,6 @@ "key": "name", "label": "Name" }, - { - "type": "path", - "key": "filepath", - "label": "File Path" - }, { "type": "path", "key": "script", From ea37f4c3c5313e6c088e533c10c721b33d490333 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 18:06:38 +0200 Subject: [PATCH 0277/2550] get or create shelf implementation --- openpype/hosts/houdini/api/shelves.py | 52 +++++++++++++++++++++------ 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index d89f3153ea..76fe0cbd87 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,3 +1,4 @@ +from cProfile import label import os import logging import platform @@ -17,14 +18,13 @@ def generate_shelves(): if not shelves_set_config: log.warning( - "SHELF ERROR: No custom shelves found in project settings." + "SHELF WARNGING: No custom shelves found in project settings." ) return for shelf_set_config in shelves_set_config: shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') - # if shelf_set_source_path is not None we load the source path and continue if shelf_set_filepath[current_os]: if not os.path.isfile(shelf_set_filepath[current_os]): raise FileNotFoundError( @@ -36,13 +36,33 @@ def generate_shelves(): hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) continue - # if the shelf set name already exists, do nothing, else, create a new one shelf_set_name = shelf_set_config.get('shelf_set_name') + if not shelf_set_name: + log.warning( + "SHELF WARNGING: No name found in shelf set definition." + ) + return + shelf_set = get_or_create_shelf_set(shelf_set_name) - # go through each shelf - # if shelf_file_path exists, load the shelf and return - # if the shelf name already exists, do nothing, else, create a new one + shelves_definition = shelf_set_config.get('shelf_definition') + + if not shelves_definition: + log.warning( + "SHELF WARNING: \ +No shelf definition found for shelf set named '{}'".format(shelf_set_name) + ) + return + + for shelf_definition in shelves_definition: + shelf_name = shelf_definition.get('shelf_name') + if not shelf_name: + log.warning( + "SHELF WARNGING: No name found in shelf set definition." + ) + return + + shelf = get_or_create_shelf(shelf_name) # go through each tool # if filepath exists, load the tool, add it to the shelf and continue @@ -54,10 +74,10 @@ def generate_shelves(): def get_or_create_shelf_set(shelf_set_label): - all_shelves = hou.shelves.shelfSets().values() + all_shelves_sets = hou.shelves.shelfSets().values() shelf_set = [ - shelf for shelf in all_shelves if shelf.label() == shelf_set_label + shelf for shelf in all_shelves_sets if shelf.label() == shelf_set_label ] if shelf_set: @@ -71,8 +91,20 @@ def get_or_create_shelf_set(shelf_set_label): return new_shelf_set -def get_or_create_shelf(): - pass +def get_or_create_shelf(shelf_label): + all_shelves = hou.shelves.shelves().values() + + shelf = [s for s in all_shelves if s.label() == shelf_label] + + if shelf: + return shelf[0] + + shelf_name = shelf_label.replace(' ', '_').lower() + new_shelf = hou.shelves.newShelf( + name=shelf_name, + label=shelf_label + ) + return new_shelf def get_or_create_tool(): From 02edebad41f26680f0f7ceb3b2b21fe6cfebebab Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Aug 2022 18:33:03 +0200 Subject: [PATCH 0278/2550] fix import string --- openpype/pipeline/workfile/build_template.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py index df6fe3514a..e6396578c5 100644 --- a/openpype/pipeline/workfile/build_template.py +++ b/openpype/pipeline/workfile/build_template.py @@ -15,7 +15,7 @@ from .build_template_exceptions import ( MissingTemplateLoaderClass ) -_module_path_format = 'openpype.{host}.template_loader' +_module_path_format = 'openpype.hosts.{host}.api.template_loader' def build_workfile_template(*args): From c4854be5c090cf63f2044fc81b8a7e33ee8c642d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 10 Aug 2022 18:48:54 +0200 Subject: [PATCH 0279/2550] OP-3682 - extracted sha256 method to lib --- openpype/client/addon_distribution.py | 149 ++++++++++++++++++++++++++ openpype/lib/path_tools.py | 20 ++++ openpype/tools/repack_version.py | 24 +---- 3 files changed, 172 insertions(+), 21 deletions(-) create mode 100644 openpype/client/addon_distribution.py diff --git a/openpype/client/addon_distribution.py b/openpype/client/addon_distribution.py new file mode 100644 index 0000000000..3246c5bb72 --- /dev/null +++ b/openpype/client/addon_distribution.py @@ -0,0 +1,149 @@ +import os +from enum import Enum +from zipfile import ZipFile +from abc import abstractmethod + +import attr + +from openpype.lib.path_tools import sha256sum +from openpype.lib import PypeLogger + +log = PypeLogger().get_logger(__name__) + + +class UrlType(Enum): + HTTP = {} + GIT = {} + OS = {} + + +@attr.s +class AddonInfo(object): + """Object matching json payload from Server""" + name = attr.ib(default=None) + version = attr.ib(default=None) + addon_url = attr.ib(default=None) + type = attr.ib(default=None) + hash = attr.ib(default=None) + + +class AddonDownloader: + + def __init__(self): + self._downloaders = {} + + def register_format(self, downloader_type, downloader): + self._downloaders[downloader_type] = downloader + + def get_downloader(self, downloader_type): + downloader = self._downloaders.get(downloader_type) + if not downloader: + raise ValueError(f"{downloader_type} not implemented") + return downloader() + + @classmethod + @abstractmethod + def download(cls, addon_url, destination): + """Returns url to downloaded addon zip file. + + Args: + addon_url (str): http or OS or any supported protocol url to addon + zip file + destination (str): local folder to unzip + Retursn: + (str) local path to addon zip file + """ + pass + + @classmethod + def check_hash(cls, addon_path, addon_hash): + """Compares 'hash' of downloaded 'addon_url' file. + + Args: + addon_path (str): local path to addon zip file + addon_hash (str): sha256 hash of zip file + Raises: + ValueError if hashes doesn't match + """ + if addon_hash != sha256sum(addon_path): + raise ValueError( + "{} doesn't match expected hash".format(addon_path)) + + @classmethod + def unzip(cls, addon_path, destination): + """Unzips local 'addon_path' to 'destination'. + + Args: + addon_path (str): local path to addon zip file + destination (str): local folder to unzip + """ + addon_file_name = os.path.basename(addon_path) + addon_base_file_name, _ = os.path.splitext(addon_file_name) + with ZipFile(addon_path, "r") as zip_ref: + log.debug(f"Unzipping {addon_path} to {destination}.") + zip_ref.extractall( + os.path.join(destination, addon_base_file_name)) + + @classmethod + def remove(cls, addon_url): + pass + + +class OSAddonDownloader(AddonDownloader): + + @classmethod + def download(cls, addon_url, destination): + # OS doesnt need to download, unzip directly + if not os.path.exists(addon_url): + raise ValueError("{} is not accessible".format(addon_url)) + return addon_url + + +def get_addons_info(): + """Returns list of addon information from Server""" + # TODO temp + addon_info = AddonInfo( + **{"name": "openpype_slack", + "version": "1.0.0", + "addon_url": "c:/projects/openpype_slack_1.0.0.zip", + "type": UrlType.OS, + "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa + + return [addon_info] + + +def update_addon_state(addon_infos, destination_folder, factory): + """Loops through all 'addon_infos', compares local version, unzips. + + Loops through server provided list of dictionaries with information about + available addons. Looks if each addon is already present and deployed. + If isn't, addon zip gets downloaded and unzipped into 'destination_folder'. + Args: + addon_infos (list of AddonInfo) + destination_folder (str): local path + factory (AddonDownloader): factory to get appropriate downloader per + addon type + """ + for addon in addon_infos: + full_name = "{}_{}".format(addon.name, addon.version) + addon_url = os.path.join(destination_folder, full_name) + + if os.path.isdir(addon_url): + log.debug(f"Addon version folder {addon_url} already exists.") + continue + + downloader = factory.get_downloader(addon.type) + downloader.download(addon.addon_url, destination_folder) + + +def cli(args): + addon_folder = "c:/Users/petrk/AppData/Local/pypeclub/openpype/addons" + + downloader_factory = AddonDownloader() + downloader_factory.register_format(UrlType.OS, OSAddonDownloader) + + print(update_addon_state(get_addons_info(), addon_folder, + downloader_factory)) + print(sha256sum("c:/projects/openpype_slack_1.0.0.zip")) + + diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 4f28be3302..2083dc48d1 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -5,6 +5,7 @@ import json import logging import six import platform +import hashlib from openpype.client import get_project from openpype.settings import get_project_settings @@ -478,3 +479,22 @@ class HostDirmap: log.debug("local sync mapping:: {}".format(mapping)) return mapping + + +def sha256sum(filename): + """Calculate sha256 for content of the file. + + Args: + filename (str): Path to file. + + Returns: + str: hex encoded sha256 + + """ + h = hashlib.sha256() + b = bytearray(128 * 1024) + mv = memoryview(b) + with open(filename, 'rb', buffering=0) as f: + for n in iter(lambda: f.readinto(mv), 0): + h.update(mv[:n]) + return h.hexdigest() \ No newline at end of file diff --git a/openpype/tools/repack_version.py b/openpype/tools/repack_version.py index 0172264c79..414152970a 100644 --- a/openpype/tools/repack_version.py +++ b/openpype/tools/repack_version.py @@ -7,10 +7,11 @@ from pathlib import Path import platform from zipfile import ZipFile from typing import List -import hashlib import sys from igniter.bootstrap_repos import OpenPypeVersion +from openpype.lib.path_tools import sha256sum + class VersionRepacker: @@ -45,25 +46,6 @@ class VersionRepacker: print("{}{}".format(header, msg)) - @staticmethod - def sha256sum(filename): - """Calculate sha256 for content of the file. - - Args: - filename (str): Path to file. - - Returns: - str: hex encoded sha256 - - """ - h = hashlib.sha256() - b = bytearray(128 * 1024) - mv = memoryview(b) - with open(filename, 'rb', buffering=0) as f: - for n in iter(lambda: f.readinto(mv), 0): - h.update(mv[:n]) - return h.hexdigest() - @staticmethod def _filter_dir(path: Path, path_filter: List) -> List[Path]: """Recursively crawl over path and filter.""" @@ -104,7 +86,7 @@ class VersionRepacker: nits="%", color="green") for file in file_list: checksums.append(( - VersionRepacker.sha256sum(file.as_posix()), + sha256sum(file.as_posix()), file.resolve().relative_to(self.version_path), file )) From 66b280796e30ad89bfc5ef2e43f3f1b677d64a4f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 10 Aug 2022 18:49:24 +0200 Subject: [PATCH 0280/2550] OP-3682 - implemented local disk downloader --- openpype/client/addon_distribution.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/client/addon_distribution.py b/openpype/client/addon_distribution.py index 3246c5bb72..8fe9567688 100644 --- a/openpype/client/addon_distribution.py +++ b/openpype/client/addon_distribution.py @@ -144,6 +144,5 @@ def cli(args): print(update_addon_state(get_addons_info(), addon_folder, downloader_factory)) - print(sha256sum("c:/projects/openpype_slack_1.0.0.zip")) From 159052f8f9555ec1706d2c565b74133c785096ec Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 11:24:41 +0200 Subject: [PATCH 0281/2550] OP-3682 - Hound --- openpype/lib/path_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 2083dc48d1..0ae5e44d79 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -497,4 +497,4 @@ def sha256sum(filename): with open(filename, 'rb', buffering=0) as f: for n in iter(lambda: f.readinto(mv), 0): h.update(mv[:n]) - return h.hexdigest() \ No newline at end of file + return h.hexdigest() From a6ddb2d44b9ec9edb76c2a41f1b471909afabde6 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 11:44:42 +0200 Subject: [PATCH 0282/2550] filter mandatory attributes for tool --- openpype/hosts/houdini/api/shelves.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 76fe0cbd87..a37ec88d64 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -64,8 +64,17 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf = get_or_create_shelf(shelf_name) - # go through each tool - # if filepath exists, load the tool, add it to the shelf and continue + tools = [] + for tool in shelf_definition.get('tools_list'): + mandatory_attributes = ['name', 'script'] + if not all( + [v for k, v in tool.items() if k in mandatory_attributes] + ): + log.warning("TOOLS ERROR: You need to specify at least \ +the name and the script path of the tool.") + return + + tool = get_or_create_tool(tool, shelf) # create the tool # add it to a list of tools @@ -107,5 +116,5 @@ def get_or_create_shelf(shelf_label): return new_shelf -def get_or_create_tool(): +def get_or_create_tool(tool_definition, shelf): pass From bbf113cac4c8dd0dde7cca18646641107a505b44 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 11:54:07 +0200 Subject: [PATCH 0283/2550] Set default value for default render image folder to "renders" --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ac0f161cf2..ce9cd4d606 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -33,7 +33,7 @@ }, "RenderSettings": { "apply_render_settings": true, - "default_render_image_folder": "", + "default_render_image_folder": "renders", "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { From f0a6a6414ea86178f0d02ed83d8816919a86beb1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 11:54:35 +0200 Subject: [PATCH 0284/2550] Tweak ValidateRenderImageRule docstring and invalidation error message --- .../publish/validate_render_image_rule.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 0abcf2f12a..a9be996e0c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -11,7 +11,11 @@ def get_file_rule(rule): class ValidateRenderImageRule(pyblish.api.InstancePlugin): - """Validates "images" file rule is set to "renders/" + """Validates Maya Workpace "images" file rule matches project settings. + + This validates against the configured default render image folder: + Studio Settings > Project > Maya > + Render Settings > Default render image folder. """ @@ -23,11 +27,13 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def process(self, instance): - default_render_file = self.get_default_render_image_folder(instance) + required_images_rule = self.get_default_render_image_folder(instance) + current_images_rule = get_file_rule("images") - assert get_file_rule("images") == default_render_file, ( - "Workspace's `images` file rule must be set to: {}".format( - default_render_file + assert current_images_rule == required_images_rule, ( + "Invalid workspace `images` file rule value: '{}'. " + "Must be set to: '{}'".format( + current_images_rule, required_images_rule ) ) From 27125a1088786f004404302485b750fa1594462d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 12:02:55 +0200 Subject: [PATCH 0285/2550] OP-3682 - extract file_handler from tests Addon distribution could use already implemented methods for dowloading from HTTP (GDrive urls). --- {tests => openpype}/lib/file_handler.py | 0 tests/lib/testing_classes.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename {tests => openpype}/lib/file_handler.py (100%) diff --git a/tests/lib/file_handler.py b/openpype/lib/file_handler.py similarity index 100% rename from tests/lib/file_handler.py rename to openpype/lib/file_handler.py diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 2b4d7deb48..75f859de48 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -10,7 +10,7 @@ import glob import platform from tests.lib.db_handler import DBHandler -from tests.lib.file_handler import RemoteFileHandler +from openpype.lib.file_handler import RemoteFileHandler from openpype.lib.remote_publish import find_variant_key From 4f9d1c34e22d22729fc99fc92abcfaeb16ca253b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 12:39:27 +0200 Subject: [PATCH 0286/2550] added IHostModule to be able identify module representing a host --- openpype/modules/interfaces.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 334485cab2..424dd158fd 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -1,4 +1,4 @@ -from abc import abstractmethod +from abc import abstractmethod, abstractproperty from openpype import resources @@ -320,3 +320,13 @@ class ISettingsChangeListener(OpenPypeInterface): self, old_value, new_value, changes, project_name, new_value_metadata ): pass + + +class IHostModule(OpenPypeInterface): + """Module which also contain a host implementation.""" + + @abstractproperty + def host_name(self): + """Name of host which module represents.""" + + pass From c86ab4fecfbb6e502723bb86dbbf0748a8135753 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 12:40:08 +0200 Subject: [PATCH 0287/2550] added ability to inmport host modules on load modules --- openpype/modules/base.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1bd343fd07..32fa4d2f31 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -140,7 +140,7 @@ class _LoadCache: def get_default_modules_dir(): """Path to default OpenPype modules.""" - current_dir = os.path.abspath(os.path.dirname(__file__)) + current_dir = os.path.dirname(os.path.abspath(__file__)) output = [] for folder_name in ("default_modules", ): @@ -298,6 +298,8 @@ def _load_modules(): # Add current directory at first place # - has small differences in import logic current_dir = os.path.abspath(os.path.dirname(__file__)) + hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts") + module_dirs.insert(0, hosts_dir) module_dirs.insert(0, current_dir) processed_paths = set() @@ -314,6 +316,7 @@ def _load_modules(): continue is_in_current_dir = dirpath == current_dir + is_in_host_dir = dirpath == hosts_dir for filename in os.listdir(dirpath): # Ignore filenames if filename in IGNORED_FILENAMES: @@ -353,6 +356,24 @@ def _load_modules(): sys.modules[new_import_str] = default_module setattr(openpype_modules, basename, default_module) + elif is_in_host_dir: + import_str = "openpype.hosts.{}".format(basename) + new_import_str = "{}.{}".format(modules_key, basename) + # Until all hosts are converted to be able use them as + # modules is this error check needed + try: + default_module = __import__( + import_str, fromlist=("", ) + ) + sys.modules[new_import_str] = default_module + setattr(openpype_modules, basename, default_module) + + except Exception: + log.warning( + "Failed to import host folder {}".format(basename), + exc_info=True + ) + elif os.path.isdir(fullpath): import_module_from_dirpath(dirpath, filename, modules_key) From 5736b9133cd8f2b2a62146cf6c9fb8310a74f4b5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 12:40:32 +0200 Subject: [PATCH 0288/2550] added helper methods to be able get host module by host name --- openpype/modules/base.py | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 32fa4d2f31..ef577e5aa2 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -825,6 +825,45 @@ class ModulesManager: output.extend(hook_paths) return output + def get_host_module(self, host_name): + """Find host module by host name. + + Args: + host_name (str): Host name for which is found host module. + + Returns: + OpenPypeModule: Found host module by name. + None: There was not found module inheriting IHostModule which has + host name set to passed 'host_name'. + """ + + from openpype_interfaces import IHostModule + + for module in self.get_enabled_modules(): + if ( + isinstance(module, IHostModule) + and module.host_name == host_name + ): + return module + return None + + def get_host_names(self): + """List of available host names based on host modules. + + Returns: + Iterable[str]: All available host names based on enabled modules + inheriting 'IHostModule'. + """ + + from openpype_interfaces import IHostModule + + host_names = { + module.host_name + for module in self.get_enabled_modules() + if isinstance(module, IHostModule) + } + return host_names + def print_report(self): """Print out report of time spent on modules initialization parts. From a2dadc85bd51c2fc25baf5098ec5fdcf08e00269 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 12:42:54 +0200 Subject: [PATCH 0289/2550] added 'OpenPypeMaya' module --- openpype/hosts/maya/__init__.py | 7 +++++++ openpype/hosts/maya/module.py | 10 ++++++++++ 2 files changed, 17 insertions(+) create mode 100644 openpype/hosts/maya/module.py diff --git a/openpype/hosts/maya/__init__.py b/openpype/hosts/maya/__init__.py index c1c82c62e5..2178534b89 100644 --- a/openpype/hosts/maya/__init__.py +++ b/openpype/hosts/maya/__init__.py @@ -1,4 +1,5 @@ import os +from .module import OpenPypeMaya def add_implementation_envs(env, _app): @@ -25,3 +26,9 @@ def add_implementation_envs(env, _app): for key, value in defaults.items(): if not env.get(key): env[key] = value + + +__all__ = ( + "OpenPypeMaya", + "add_implementation_envs", +) diff --git a/openpype/hosts/maya/module.py b/openpype/hosts/maya/module.py new file mode 100644 index 0000000000..8dfd96d4ab --- /dev/null +++ b/openpype/hosts/maya/module.py @@ -0,0 +1,10 @@ +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + + +class OpenPypeMaya(OpenPypeModule, IHostModule): + name = "openpype_maya" + host_name = "maya" + + def initialize(self, module_settings): + self.enabled = True From 88be0405986196894b16ae5cb98d303d3d0e9598 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 12:43:50 +0200 Subject: [PATCH 0290/2550] modev 'add_implementation_envs' to maya module and application knows that it should look there --- openpype/hosts/maya/__init__.py | 28 ---------------------------- openpype/hosts/maya/module.py | 27 +++++++++++++++++++++++++++ openpype/lib/applications.py | 6 ++++-- 3 files changed, 31 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/maya/__init__.py b/openpype/hosts/maya/__init__.py index 2178534b89..72b4d5853c 100644 --- a/openpype/hosts/maya/__init__.py +++ b/openpype/hosts/maya/__init__.py @@ -1,34 +1,6 @@ -import os from .module import OpenPypeMaya -def add_implementation_envs(env, _app): - # Add requirements to PYTHONPATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_python_paths = [ - os.path.join(pype_root, "openpype", "hosts", "maya", "startup") - ] - old_python_path = env.get("PYTHONPATH") or "" - for path in old_python_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_python_paths: - new_python_paths.append(norm_path) - - env["PYTHONPATH"] = os.pathsep.join(new_python_paths) - - # Set default values if are not already set via settings - defaults = { - "OPENPYPE_LOG_NO_COLORS": "Yes" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value - - __all__ = ( "OpenPypeMaya", - "add_implementation_envs", ) diff --git a/openpype/hosts/maya/module.py b/openpype/hosts/maya/module.py index 8dfd96d4ab..0af68788bc 100644 --- a/openpype/hosts/maya/module.py +++ b/openpype/hosts/maya/module.py @@ -1,6 +1,9 @@ +import os from openpype.modules import OpenPypeModule from openpype.modules.interfaces import IHostModule +MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + class OpenPypeMaya(OpenPypeModule, IHostModule): name = "openpype_maya" @@ -8,3 +11,27 @@ class OpenPypeMaya(OpenPypeModule, IHostModule): def initialize(self, module_settings): self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to PYTHONPATH + new_python_paths = [ + os.path.join(MAYA_ROOT_DIR, "startup") + ] + old_python_path = env.get("PYTHONPATH") or "" + for path in old_python_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_python_paths: + new_python_paths.append(norm_path) + + env["PYTHONPATH"] = os.pathsep.join(new_python_paths) + + # Set default values if are not already set via settings + defaults = { + "OPENPYPE_LOG_NO_COLORS": "Yes" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index da8623ea13..e47ec8cd11 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1508,8 +1508,10 @@ def prepare_app_environments( final_env = None # Add host specific environments if app.host_name and implementation_envs: - module = __import__("openpype.hosts", fromlist=[app.host_name]) - host_module = getattr(module, app.host_name, None) + host_module = modules_manager.get_host_module(app.host_name) + if not host_module: + module = __import__("openpype.hosts", fromlist=[app.host_name]) + host_module = getattr(module, app.host_name, None) add_implementation_envs = None if host_module: add_implementation_envs = getattr( From 8fe20486a91a8943b847b610d342df163dee3e1b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 12:51:01 +0200 Subject: [PATCH 0291/2550] Remove usage of mel eval and pymel --- .../plugins/publish/validate_render_image_rule.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index a9be996e0c..b94bdb0b14 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -1,15 +1,9 @@ -import maya.mel as mel -import pymel.core as pm +from maya import cmds import pyblish.api import openpype.api -def get_file_rule(rule): - """Workaround for a bug in python with cmds.workspace""" - return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule)) - - class ValidateRenderImageRule(pyblish.api.InstancePlugin): """Validates Maya Workpace "images" file rule matches project settings. @@ -28,7 +22,7 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def process(self, instance): required_images_rule = self.get_default_render_image_folder(instance) - current_images_rule = get_file_rule("images") + current_images_rule = cmds.workspace(fileRuleEntry="images") assert current_images_rule == required_images_rule, ( "Invalid workspace `images` file rule value: '{}'. " @@ -40,8 +34,8 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): @classmethod def repair(cls, instance): default = cls.get_default_render_image_folder(instance) - pm.workspace.fileRules["images"] = default - pm.system.Workspace.save() + cmds.workspace(fileRule=("images", default)) + cmds.workspace(saveWorkspace=True) @staticmethod def get_default_render_image_folder(instance): From 7cd47ff6c4641d7e78c8d3e9823f4d58fdea1135 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 12:54:33 +0200 Subject: [PATCH 0292/2550] Only update and save the workspace once This avoids saving it many times on repair in scenes with many renderlayers and thus many renderlayer instances since repair runs per instance. --- .../maya/plugins/publish/validate_render_image_rule.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index b94bdb0b14..4d3796e429 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -33,9 +33,13 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): @classmethod def repair(cls, instance): - default = cls.get_default_render_image_folder(instance) - cmds.workspace(fileRule=("images", default)) - cmds.workspace(saveWorkspace=True) + + required_images_rule = cls.get_default_render_image_folder(instance) + current_images_rule = cmds.workspace(fileRuleEntry="images") + + if current_images_rule != required_images_rule: + cmds.workspace(fileRule=("images", required_images_rule)) + cmds.workspace(saveWorkspace=True) @staticmethod def get_default_render_image_folder(instance): From 66899d9dd9b50c6bd9285d191fd1da116ab03f4f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 13:04:13 +0200 Subject: [PATCH 0293/2550] OP-3682 - implemented download from HTTP Handles shared links from GDrive. --- openpype/client/addon_distribution.py | 59 ++++++++++++++++++--------- 1 file changed, 40 insertions(+), 19 deletions(-) diff --git a/openpype/client/addon_distribution.py b/openpype/client/addon_distribution.py index 8fe9567688..de84c7301a 100644 --- a/openpype/client/addon_distribution.py +++ b/openpype/client/addon_distribution.py @@ -7,14 +7,15 @@ import attr from openpype.lib.path_tools import sha256sum from openpype.lib import PypeLogger +from openpype.lib.file_handler import RemoteFileHandler log = PypeLogger().get_logger(__name__) class UrlType(Enum): - HTTP = {} - GIT = {} - OS = {} + HTTP = "http" + GIT = "git" + OS = "os" @attr.s @@ -70,19 +71,15 @@ class AddonDownloader: "{} doesn't match expected hash".format(addon_path)) @classmethod - def unzip(cls, addon_path, destination): - """Unzips local 'addon_path' to 'destination'. + def unzip(cls, addon_zip_path, destination): + """Unzips local 'addon_zip_path' to 'destination'. Args: - addon_path (str): local path to addon zip file + addon_zip_path (str): local path to addon zip file destination (str): local folder to unzip """ - addon_file_name = os.path.basename(addon_path) - addon_base_file_name, _ = os.path.splitext(addon_file_name) - with ZipFile(addon_path, "r") as zip_ref: - log.debug(f"Unzipping {addon_path} to {destination}.") - zip_ref.extractall( - os.path.join(destination, addon_base_file_name)) + RemoteFileHandler.unzip(addon_zip_path, destination) + os.remove(addon_zip_path) @classmethod def remove(cls, addon_url): @@ -99,6 +96,23 @@ class OSAddonDownloader(AddonDownloader): return addon_url +class HTTPAddonDownloader(AddonDownloader): + CHUNK_SIZE = 100000 + + @classmethod + def download(cls, addon_url, destination): + log.debug(f"Downloading {addon_url} to {destination}") + file_name = os.path.basename(destination) + _, ext = os.path.splitext(file_name) + if (ext.replace(".", '') not + in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)): + file_name += ".zip" + RemoteFileHandler.download_url(addon_url, + destination, + filename=file_name) + + return os.path.join(destination, file_name) + def get_addons_info(): """Returns list of addon information from Server""" # TODO temp @@ -109,7 +123,14 @@ def get_addons_info(): "type": UrlType.OS, "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa - return [addon_info] + http_addon = AddonInfo( + **{"name": "openpype_slack", + "version": "1.0.0", + "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa + "type": UrlType.HTTP, + "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa + + return [http_addon] def update_addon_state(addon_infos, destination_folder, factory): @@ -126,14 +147,15 @@ def update_addon_state(addon_infos, destination_folder, factory): """ for addon in addon_infos: full_name = "{}_{}".format(addon.name, addon.version) - addon_url = os.path.join(destination_folder, full_name) + addon_dest = os.path.join(destination_folder, full_name) - if os.path.isdir(addon_url): - log.debug(f"Addon version folder {addon_url} already exists.") + if os.path.isdir(addon_dest): + log.debug(f"Addon version folder {addon_dest} already exists.") continue downloader = factory.get_downloader(addon.type) - downloader.download(addon.addon_url, destination_folder) + zip_file_path = downloader.download(addon.addon_url, addon_dest) + downloader.unzip(zip_file_path, addon_dest) def cli(args): @@ -141,8 +163,7 @@ def cli(args): downloader_factory = AddonDownloader() downloader_factory.register_format(UrlType.OS, OSAddonDownloader) + downloader_factory.register_format(UrlType.HTTP, HTTPAddonDownloader) print(update_addon_state(get_addons_info(), addon_folder, downloader_factory)) - - From eab14fc5e9204126554e8be5898010e0db0398ca Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 13:20:27 +0200 Subject: [PATCH 0294/2550] Include inputVersions with the publish job instance metadata --- .../modules/deadline/plugins/publish/submit_publish_job.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index f05ef31938..2fa7da5dac 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -774,7 +774,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "resolutionHeight": data.get("resolutionHeight", 1080), "multipartExr": data.get("multipartExr", False), "jobBatchName": data.get("jobBatchName", ""), - "useSequenceForReview": data.get("useSequenceForReview", True) + "useSequenceForReview": data.get("useSequenceForReview", True), + # map inputVersions `ObjectId` -> `str` so json supports it + "inputVersions": list(map(str, data.get("inputVersions", []))) } # skip locking version if we are creating v01 From 32176ba234cf1bff28e15c4efce51cc00d641037 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 13:29:06 +0200 Subject: [PATCH 0295/2550] modules does not have to inherit from ILaunchHookPaths and application is passed to 'collect_launch_hook_paths --- openpype/lib/applications.py | 4 +++- openpype/modules/base.py | 38 ++++++++++++++++++++++++++++------ openpype/modules/interfaces.py | 33 ++++++++++++++++++++++++++++- 3 files changed, 67 insertions(+), 8 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index e47ec8cd11..5443320960 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -996,7 +996,9 @@ class ApplicationLaunchContext: paths.append(path) # Load modules paths - paths.extend(self.modules_manager.collect_launch_hook_paths()) + paths.extend( + self.modules_manager.collect_launch_hook_paths(self.application) + ) return paths diff --git a/openpype/modules/base.py b/openpype/modules/base.py index ef577e5aa2..e26075283d 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -789,24 +789,50 @@ class ModulesManager: output.extend(paths) return output - def collect_launch_hook_paths(self): - """Helper to collect hooks from modules inherited ILaunchHookPaths. + def collect_launch_hook_paths(self, app): + """Helper to collect application launch hooks. + + It used to be based on 'ILaunchHookPaths' which is not true anymore. + Module just have to have implemented 'get_launch_hook_paths' method. + + Args: + app (Application): Application object which can be used for + filtering of which launch hook paths are returned. Returns: list: Paths to launch hook directories. """ - from openpype_interfaces import ILaunchHookPaths str_type = type("") expected_types = (list, tuple, set) output = [] for module in self.get_enabled_modules(): - # Skip module that do not inherit from `ILaunchHookPaths` - if not isinstance(module, ILaunchHookPaths): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(app) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) continue - hook_paths = module.get_launch_hook_paths() if not hook_paths: continue diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 424dd158fd..de9ba13800 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -50,12 +50,32 @@ class IPluginPaths(OpenPypeInterface): class ILaunchHookPaths(OpenPypeInterface): """Module has launch hook paths to return. + Modules does not have to inherit from this interface (changed 8.11.2022). + Module just have to have implemented 'get_launch_hook_paths' to be able use + the advantage. + Expected result is list of paths. ["path/to/launch_hooks_dir"] """ @abstractmethod - def get_launch_hook_paths(self): + def get_launch_hook_paths(self, app): + """Paths to directory with application launch hooks. + + Method can be also defined without arguments. + ```python + def get_launch_hook_paths(self): + return [] + ``` + + Args: + app (Application): Application object which can be used for + filtering of which launch hook paths are returned. + + Returns: + Iterable[str]: Path to directories where launch hooks can be found. + """ + pass @@ -66,6 +86,7 @@ class ITrayModule(OpenPypeInterface): The module still must be usable if is not used in tray even if would do nothing. """ + tray_initialized = False _tray_manager = None @@ -78,16 +99,19 @@ class ITrayModule(OpenPypeInterface): This is where GUIs should be loaded or tray specific parts should be prepared. """ + pass @abstractmethod def tray_menu(self, tray_menu): """Add module's action to tray menu.""" + pass @abstractmethod def tray_start(self): """Start procedure in Pype tray.""" + pass @abstractmethod @@ -96,6 +120,7 @@ class ITrayModule(OpenPypeInterface): This is place where all threads should be shut. """ + pass def execute_in_main_thread(self, callback): @@ -104,6 +129,7 @@ class ITrayModule(OpenPypeInterface): Some callbacks need to be processed on main thread (menu actions must be added on main thread or they won't get triggered etc.) """ + if not self.tray_initialized: # TODO Called without initialized tray, still main thread needed try: @@ -128,6 +154,7 @@ class ITrayModule(OpenPypeInterface): msecs (int): Duration of message visibility in miliseconds. Default is 10000 msecs, may differ by Qt version. """ + if self._tray_manager: self._tray_manager.show_tray_message(title, message, icon, msecs) @@ -280,16 +307,19 @@ class ITrayService(ITrayModule): def set_service_running_icon(self): """Change icon of an QAction to green circle.""" + if self.menu_action: self.menu_action.setIcon(self.get_icon_running()) def set_service_failed_icon(self): """Change icon of an QAction to red circle.""" + if self.menu_action: self.menu_action.setIcon(self.get_icon_failed()) def set_service_idle_icon(self): """Change icon of an QAction to orange circle.""" + if self.menu_action: self.menu_action.setIcon(self.get_icon_idle()) @@ -303,6 +333,7 @@ class ISettingsChangeListener(OpenPypeInterface): "publish": ["path/to/publish_plugins"] } """ + @abstractmethod def on_system_settings_save( self, old_value, new_value, changes, new_value_metadata From 0ae844401cc271ef0edf9b16a5dda4893dd7bcfd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 13:29:35 +0200 Subject: [PATCH 0296/2550] maya is registering it's launch hooks --- openpype/hosts/maya/module.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/maya/module.py b/openpype/hosts/maya/module.py index 0af68788bc..e058f1cef5 100644 --- a/openpype/hosts/maya/module.py +++ b/openpype/hosts/maya/module.py @@ -35,3 +35,10 @@ class OpenPypeMaya(OpenPypeModule, IHostModule): for key, value in defaults.items(): if not env.get(key): env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(MAYA_ROOT_DIR, "hooks") + ] From 58af54c4437d0495f2f00c7962455bd8cdbf1a1a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 13:33:35 +0200 Subject: [PATCH 0297/2550] let host module add it's prelaunch hooks and don't guess it --- openpype/lib/applications.py | 34 +++++++++++++--------------------- 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 5443320960..e23cc6215f 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -962,32 +962,24 @@ class ApplicationLaunchContext: # TODO load additional studio paths from settings import openpype - pype_dir = os.path.dirname(os.path.abspath(openpype.__file__)) + openpype_dir = os.path.dirname(os.path.abspath(openpype.__file__)) - # --- START: Backwards compatibility --- - hooks_dir = os.path.join(pype_dir, "hooks") + global_hooks_dir = os.path.join(openpype_dir, "hooks") - subfolder_names = ["global"] - if self.host_name: - subfolder_names.append(self.host_name) - for subfolder_name in subfolder_names: - path = os.path.join(hooks_dir, subfolder_name) - if ( - os.path.exists(path) - and os.path.isdir(path) - and path not in paths - ): - paths.append(path) - # --- END: Backwards compatibility --- - - subfolders_list = [ - ["hooks"] + hooks_dirs = [ + global_hooks_dir ] if self.host_name: - subfolders_list.append(["hosts", self.host_name, "hooks"]) + # If host requires launch hooks and is module then launch hooks + # should be collected using 'collect_launch_hook_paths' + # - module have to implement 'get_launch_hook_paths' + host_module = self.modules_manager.get_host_module(self.host_name) + if not host_module: + hooks_dirs.append(os.path.join( + openpype_dir, "hosts", self.host_name, "hooks" + )) - for subfolders in subfolders_list: - path = os.path.join(pype_dir, *subfolders) + for path in hooks_dirs: if ( os.path.exists(path) and os.path.isdir(path) From 7d304d0f8695775e6f3e49d2b0271ac2b8564883 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 13:39:32 +0200 Subject: [PATCH 0298/2550] host module can define workfile extensions --- openpype/lib/applications.py | 23 ++++++++++++++++------- openpype/modules/interfaces.py | 11 +++++++++++ 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index e23cc6215f..0f380d0f4b 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1303,6 +1303,7 @@ def get_app_environments_for_context( dict: Environments for passed context and application. """ + from openpype.modules import ModulesManager from openpype.pipeline import AvalonMongoDB, Anatomy # Avalon database connection @@ -1315,8 +1316,6 @@ def get_app_environments_for_context( asset_doc = get_asset_by_name(project_name, asset_name) if modules_manager is None: - from openpype.modules import ModulesManager - modules_manager = ModulesManager() # Prepare app object which can be obtained only from ApplciationManager @@ -1343,7 +1342,7 @@ def get_app_environments_for_context( }) prepare_app_environments(data, env_group, modules_manager) - prepare_context_environments(data, env_group) + prepare_context_environments(data, env_group, modules_manager) # Discard avalon connection dbcon.uninstall() @@ -1564,7 +1563,7 @@ def apply_project_environments_value( return env -def prepare_context_environments(data, env_group=None): +def prepare_context_environments(data, env_group=None, modules_manager=None): """Modify launch environments with context data for launched host. Args: @@ -1652,10 +1651,10 @@ def prepare_context_environments(data, env_group=None): data["env"]["AVALON_APP"] = app.host_name data["env"]["AVALON_WORKDIR"] = workdir - _prepare_last_workfile(data, workdir) + _prepare_last_workfile(data, workdir, modules_manager) -def _prepare_last_workfile(data, workdir): +def _prepare_last_workfile(data, workdir, modules_manager): """last workfile workflow preparation. Function check if should care about last workfile workflow and tries @@ -1670,8 +1669,13 @@ def _prepare_last_workfile(data, workdir): result will be stored. workdir (str): Path to folder where workfiles should be stored. """ + + from openpype.modules import ModulesManager from openpype.pipeline import HOST_WORKFILE_EXTENSIONS + if not modules_manager: + modules_manager = ModulesManager() + log = data["log"] _workdir_data = data.get("workdir_data") @@ -1719,7 +1723,12 @@ def _prepare_last_workfile(data, workdir): # Last workfile path last_workfile_path = data.get("last_workfile_path") or "" if not last_workfile_path: - extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + host_module = modules_manager.get_host_module(app.host_name) + if host_module: + extensions = host_module.get_workfile_extensions() + else: + extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + if extensions: anatomy = data["anatomy"] project_settings = data["project_settings"] diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index de9ba13800..14f49204ee 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -361,3 +361,14 @@ class IHostModule(OpenPypeInterface): """Name of host which module represents.""" pass + + def get_workfile_extensions(self): + """Define workfile extensions for host. + + Not all hosts support workfiles thus this is optional implementation. + + Returns: + List[str]: Extensions used for workfiles with dot. + """ + + return [] From 9b623c1dd3e335aeb48d3428f6a0cba5e5793e51 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 14:14:21 +0200 Subject: [PATCH 0299/2550] maya define it's workfile extensions only in module itself --- openpype/hosts/maya/api/workio.py | 4 +--- openpype/hosts/maya/module.py | 3 +++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/workio.py b/openpype/hosts/maya/api/workio.py index fd4961c4bf..8c31974c73 100644 --- a/openpype/hosts/maya/api/workio.py +++ b/openpype/hosts/maya/api/workio.py @@ -2,11 +2,9 @@ import os from maya import cmds -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["maya"] + return [".ma", ".mb"] def has_unsaved_changes(): diff --git a/openpype/hosts/maya/module.py b/openpype/hosts/maya/module.py index e058f1cef5..5a215be8d2 100644 --- a/openpype/hosts/maya/module.py +++ b/openpype/hosts/maya/module.py @@ -42,3 +42,6 @@ class OpenPypeMaya(OpenPypeModule, IHostModule): return [ os.path.join(MAYA_ROOT_DIR, "hooks") ] + + def get_workfile_extensions(self): + return [".ma", ".mb"] From 6d3d52c05c630b9f559ff9a86f0e8cc574007fc7 Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Thu, 11 Aug 2022 14:46:43 +0200 Subject: [PATCH 0300/2550] Blender Validators settings schemas and defaults --- .../defaults/project_settings/blender.json | 62 +++++++++- .../schema_project_blender.json | 4 + .../schemas/schema_blender_publish.json | 114 ++++++++++++++++++ 3 files changed, 179 insertions(+), 1 deletion(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json diff --git a/openpype/settings/defaults/project_settings/blender.json b/openpype/settings/defaults/project_settings/blender.json index a7262dcb5d..a596d13865 100644 --- a/openpype/settings/defaults/project_settings/blender.json +++ b/openpype/settings/defaults/project_settings/blender.json @@ -2,5 +2,65 @@ "workfile_builder": { "create_first_version": false, "custom_templates": [] + }, + "publish": { + "ValidateCameraZeroKeyframe": { + "enabled": true, + "optional": true, + "active": true + }, + "ValidateMeshHasUvs": { + "enabled": true, + "optional": true, + "active": true + }, + "ValidateTransformZero": { + "enabled": true, + "optional": false, + "active": true + }, + "ExtractBlend": { + "enabled": true, + "optional": true, + "active": true, + "pack_images": true, + "families": [ + "model", + "camera", + "rig", + "action", + "layout" + ] + }, + "ExtractBlendAnimation": { + "enabled": true, + "optional": true, + "active": true + }, + "ExtractCamera": { + "enabled": true, + "optional": true, + "active": true + }, + "ExtractFBX": { + "enabled": true, + "optional": true, + "active": false + }, + "ExtractAnimationFBX": { + "enabled": true, + "optional": true, + "active": false + }, + "ExtractABC": { + "enabled": true, + "optional": true, + "active": false + }, + "ExtractLayout": { + "enabled": true, + "optional": true, + "active": false + } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json b/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json index af09329a03..4c72ebda2f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_blender.json @@ -12,6 +12,10 @@ "workfile_builder/builder_on_start", "workfile_builder/profiles" ] + }, + { + "type": "schema", + "name": "schema_blender_publish" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json new file mode 100644 index 0000000000..6111ae4a74 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json @@ -0,0 +1,114 @@ +{ + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "label", + "label": "Validators" + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateCameraZeroKeyframe", + "label": "Validate Camera Zero Keyframe" + } + ] + }, + + { + "type": "collapsible-wrap", + "label": "Model", + "children": [ + { + "type": "label", + "label": "Validators" + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateMeshHasUvs", + "label": "Validate Mesh Has UVs" + }, + { + "key": "ValidateTransformZero", + "label": "Validate Transform Zero" + } + ] + } + ] + }, + { + "type": "splitter" + }, + { + "type": "label", + "label": "Extractors" + }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractBlend", + "label": "Extract Blend", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "type": "boolean", + "key": "active", + "label": "Active" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "task-types-enum" + } + ] + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ExtractFBX", + "label": "Extract FBX (model and rig)", + }, + { + "key": "ExtractABC", + "label": "Extract ABC (model and pointcache)" + }, + { + "key": "ExtractBlendAnimation", + "label": "Extract Animation as Blend" + }, + { + "key": "ExtractAnimationFBX", + "label": "Extract Animation as FBX" + }, + { + "key": "ExtractCamera", + "label": "Extract FBX Camera as FBX" + }, + { + "key": "ExtractLayout", + "label": "Extract Layout as JSON" + } + ] + } + ] +} From 25616886bff2b6fda0b4c9646ea9256389ba248f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 15:08:14 +0200 Subject: [PATCH 0301/2550] raise and error when nothing is selected --- openpype/hosts/maya/api/lib_template_builder.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py index 855c72e361..34a8450a26 100644 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ b/openpype/hosts/maya/api/lib_template_builder.py @@ -40,6 +40,9 @@ def create_placeholder(): placeholder_name = create_placeholder_name(args, options) selection = cmds.ls(selection=True) + if not selection: + raise ValueError("Nothing is selected") + placeholder = cmds.spaceLocator(name=placeholder_name)[0] # get the long name of the placeholder (with the groups) From 683468c5633a42b8c5e80510ab060f981452d02c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 15:22:08 +0200 Subject: [PATCH 0302/2550] use 'filter_profiles' function for profiles filtering --- .../workfile/abstract_template_loader.py | 29 +++++++++++-------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 725ab1dab3..51d06cdb3f 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -7,7 +7,11 @@ from functools import reduce from openpype.client import get_asset_by_name from openpype.settings import get_project_settings -from openpype.lib import get_linked_assets, PypeLogger as Logger +from openpype.lib import ( + Logger, + filter_profiles, + get_linked_assets, +) from openpype.pipeline import legacy_io, Anatomy from openpype.pipeline.load import ( get_loaders_by_name, @@ -167,22 +171,23 @@ class AbstractTemplateLoader: anatomy = Anatomy(project_name) project_settings = get_project_settings(project_name) - build_info = project_settings[host_name]['templated_workfile_build'] - profiles = build_info['profiles'] + build_info = project_settings[host_name]["templated_workfile_build"] + profile = filter_profiles( + build_info["profiles"], + { + "task_types": task_type, + "tasks": task_name + } + ) - for prf in profiles: - if prf['task_types'] and task_type not in prf['task_types']: - continue - if prf['tasks'] and task_name not in prf['tasks']: - continue - path = prf['path'] - break - else: # IF no template were found (no break happened) + if not profile: raise TemplateProfileNotFound( "No matching profile found for task '{}' of type '{}' " "with host '{}'".format(task_name, task_type, host_name) ) - if path is None: + + path = profile["path"] + if not path: raise TemplateLoadingFailed( "Template path is not set.\n" "Path need to be set in {}\\Template Workfile Build " From bb9a16100acd9a7d94ec6ff6ea15891916eea580 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 15:22:23 +0200 Subject: [PATCH 0303/2550] removed unnecessary finally statement --- openpype/pipeline/workfile/abstract_template_loader.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 51d06cdb3f..0ed32033af 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -205,9 +205,8 @@ class AbstractTemplateLoader: raise KeyError( "Could not solve key '{}' in template path '{}'".format( missing_key, path)) - finally: - solved_path = os.path.normpath(solved_path) + solved_path = os.path.normpath(solved_path) if not os.path.exists(solved_path): raise TemplateNotFound( "Template found in openPype settings for task '{}' with host " From 6cba799c460dc3c9745bf68fc6edcd3c6ab345e0 Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Thu, 11 Aug 2022 15:39:03 +0200 Subject: [PATCH 0304/2550] refactor blender Validators --- .../publish/validate_camera_zero_keyframe.py | 19 ++++++++++-------- .../plugins/publish/validate_mesh_has_uv.py | 17 ++++++++-------- .../validate_mesh_no_negative_scale.py | 19 ++++++++---------- .../publish/validate_no_colons_in_name.py | 15 ++++++++------ .../plugins/publish/validate_object_mode.py | 20 +++++++++---------- .../publish/validate_transform_zero.py | 19 ++++++++++++------ .../defaults/project_settings/blender.json | 6 +++++- .../schemas/schema_blender_publish.json | 9 ++------- 8 files changed, 66 insertions(+), 58 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py index 39b9b67511..bfd7224b80 100644 --- a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py +++ b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py @@ -1,9 +1,11 @@ from typing import List import mathutils +import bpy import pyblish.api -import openpype.hosts.blender.api.action +from openpype.api import ValidateContentsOrder +from openpype.hosts.blender.api.action import SelectInvalidAction class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): @@ -14,21 +16,21 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): in Unreal and Blender. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["camera"] category = "geometry" version = (0, 1, 0) label = "Zero Keyframe" - actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + actions = [SelectInvalidAction] _identity = mathutils.Matrix() - @classmethod - def get_invalid(cls, instance) -> List: + @staticmethod + def get_invalid(instance) -> List: invalid = [] - for obj in [obj for obj in instance]: - if obj.type == "CAMERA": + for obj in set(instance): + if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA": if obj.animation_data and obj.animation_data.action: action = obj.animation_data.action frames_set = set() @@ -45,4 +47,5 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Object found in instance is not in Object Mode: {invalid}") + f"Camera must have a keyframe at frame 0: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py index 1c73476fc8..d83ead78cc 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py @@ -3,18 +3,19 @@ from typing import List import bpy import pyblish.api -import openpype.hosts.blender.api.action +from openpype.api import ValidateContentsOrder +from openpype.hosts.blender.api.action import SelectInvalidAction class ValidateMeshHasUvs(pyblish.api.InstancePlugin): """Validate that the current mesh has UV's.""" - order = pyblish.api.ValidatorOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] category = "geometry" label = "Mesh Has UV's" - actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + actions = [SelectInvalidAction] optional = True @staticmethod @@ -33,20 +34,20 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance) -> List: invalid = [] - # TODO (jasper): only check objects in the collection that will be published? - for obj in [ - obj for obj in instance]: + for obj in set(instance): try: if obj.type == 'MESH': # Make sure we are in object mode. bpy.ops.object.mode_set(mode='OBJECT') if not cls.has_uvs(obj): invalid.append(obj) - except: + except RuntimeError: continue return invalid def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}") + raise RuntimeError( + f"Meshes found in instance without valid UV's: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 00159a2d36..b7687009d7 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -3,29 +3,26 @@ from typing import List import bpy import pyblish.api -import openpype.hosts.blender.api.action +from openpype.api import ValidateContentsOrder +from openpype.hosts.blender.api.action import SelectInvalidAction class ValidateMeshNoNegativeScale(pyblish.api.Validator): """Ensure that meshes don't have a negative scale.""" - order = pyblish.api.ValidatorOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] label = "Mesh No Negative Scale" - actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + actions = [SelectInvalidAction] @staticmethod def get_invalid(instance) -> List: invalid = [] - # TODO (jasper): only check objects in the collection that will be published? - for obj in [ - obj for obj in bpy.data.objects if obj.type == 'MESH' - ]: - if any(v < 0 for v in obj.scale): - invalid.append(obj) - - return invalid + for obj in set(instance): + if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': + if any(v < 0 for v in obj.scale): + invalid.append(obj) def process(self, instance): invalid = self.get_invalid(instance) diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py index 261ff864d5..cb8fa0f34a 100644 --- a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py @@ -1,7 +1,10 @@ from typing import List +import bpy + import pyblish.api -import openpype.hosts.blender.api.action +from openpype.api import ValidateContentsOrder +from openpype.hosts.blender.api.action import SelectInvalidAction class ValidateNoColonsInName(pyblish.api.InstancePlugin): @@ -12,20 +15,20 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model", "rig"] version = (0, 1, 0) label = "No Colons in names" - actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + actions = [SelectInvalidAction] - @classmethod + @staticmethod def get_invalid(cls, instance) -> List: invalid = [] - for obj in [obj for obj in instance]: + for obj in set(instance): if ':' in obj.name: invalid.append(obj) - if obj.type == 'ARMATURE': + if isinstance(obj, bpy.types.Object) and obj.type == 'ARMATURE': for bone in obj.data.bones: if ':' in bone.name: invalid.append(obj) diff --git a/openpype/hosts/blender/plugins/publish/validate_object_mode.py b/openpype/hosts/blender/plugins/publish/validate_object_mode.py index 90ef0b7c41..36b7a59eb2 100644 --- a/openpype/hosts/blender/plugins/publish/validate_object_mode.py +++ b/openpype/hosts/blender/plugins/publish/validate_object_mode.py @@ -1,7 +1,9 @@ from typing import List +import bpy + import pyblish.api -import openpype.hosts.blender.api.action +from openpype.hosts.blender.api.action import SelectInvalidAction class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin): @@ -12,20 +14,16 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin): families = ["model", "rig", "layout"] category = "geometry" label = "Validate Object Mode" - actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + actions = [SelectInvalidAction] optional = False - @classmethod + @staticmethod def get_invalid(cls, instance) -> List: invalid = [] - for obj in [obj for obj in instance]: - try: - if obj.type == 'MESH' or obj.type == 'ARMATURE': - # Check if the object is in object mode. - if not obj.mode == 'OBJECT': - invalid.append(obj) - except Exception: - continue + for obj in set(instance): + if isinstance(obj, bpy.types.Object): + if not obj.mode == 'OBJECT': + invalid.append(obj) return invalid def process(self, instance): diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py index 7456dbc423..737c43cc3f 100644 --- a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -1,9 +1,11 @@ from typing import List import mathutils +import bpy import pyblish.api -import openpype.hosts.blender.api.action +from openpype.api import ValidateContentsOrder +from openpype.hosts.blender.api.action import SelectInvalidAction class ValidateTransformZero(pyblish.api.InstancePlugin): @@ -15,21 +17,24 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] category = "geometry" version = (0, 1, 0) label = "Transform Zero" - actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + actions = [SelectInvalidAction] _identity = mathutils.Matrix() @classmethod def get_invalid(cls, instance) -> List: invalid = [] - for obj in [obj for obj in instance]: - if obj.matrix_basis != cls._identity: + for obj in set(instance): + if ( + isinstance(obj, bpy.types.Object) + and obj.matrix_basis != cls._identity + ): invalid.append(obj) return invalid @@ -37,4 +42,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Object found in instance is not in Object Mode: {invalid}") + "Object found in instance has not" + f" transform to zero: {invalid}" + ) diff --git a/openpype/settings/defaults/project_settings/blender.json b/openpype/settings/defaults/project_settings/blender.json index a596d13865..2720e0286d 100644 --- a/openpype/settings/defaults/project_settings/blender.json +++ b/openpype/settings/defaults/project_settings/blender.json @@ -14,6 +14,11 @@ "optional": true, "active": true }, + "ValidateMeshNoNegativeScale": { + "enabled": true, + "optional": false, + "active": true + }, "ValidateTransformZero": { "enabled": true, "optional": false, @@ -23,7 +28,6 @@ "enabled": true, "optional": true, "active": true, - "pack_images": true, "families": [ "model", "camera", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json index 6111ae4a74..4dab373efd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json @@ -18,15 +18,10 @@ } ] }, - { "type": "collapsible-wrap", "label": "Model", "children": [ - { - "type": "label", - "label": "Validators" - }, { "type": "schema_template", "name": "template_publish_plugin", @@ -76,7 +71,7 @@ "key": "families", "label": "Families", "type": "list", - "object_type": "task-types-enum" + "object_type": "text" } ] }, @@ -86,7 +81,7 @@ "template_data": [ { "key": "ExtractFBX", - "label": "Extract FBX (model and rig)", + "label": "Extract FBX (model and rig)" }, { "key": "ExtractABC", From 12a8307a8331334ee9700efba2127211ea332ff0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 15:40:02 +0200 Subject: [PATCH 0305/2550] simplified path formatting --- .../workfile/abstract_template_loader.py | 43 +++++++++++++------ 1 file changed, 30 insertions(+), 13 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 0ed32033af..5afec56d71 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -8,6 +8,7 @@ from functools import reduce from openpype.client import get_asset_by_name from openpype.settings import get_project_settings from openpype.lib import ( + StringTemplate, Logger, filter_profiles, get_linked_assets, @@ -192,19 +193,35 @@ class AbstractTemplateLoader: "Template path is not set.\n" "Path need to be set in {}\\Template Workfile Build " "Settings\\Profiles".format(host_name.title())) - try: - solved_path = None - while True: + + # Try fill path with environments and anatomy roots + fill_data = { + key: value + for key, value in os.environ.items() + } + fill_data["root"] = anatomy.roots + result = StringTemplate.format_template(path, fill_data) + if result.solved: + path = result.normalized() + + if path and os.path.exists(path): + self.log.info("Found template at: '{}'".format(path)) + return path + + solved_path = None + while True: + try: solved_path = anatomy.path_remapper(path) - if solved_path is None: - solved_path = path - if solved_path == path: - break - path = solved_path - except KeyError as missing_key: - raise KeyError( - "Could not solve key '{}' in template path '{}'".format( - missing_key, path)) + except KeyError as missing_key: + raise KeyError( + "Could not solve key '{}' in template path '{}'".format( + missing_key, path)) + + if solved_path is None: + solved_path = path + if solved_path == path: + break + path = solved_path solved_path = os.path.normpath(solved_path) if not os.path.exists(solved_path): @@ -213,7 +230,7 @@ class AbstractTemplateLoader: "'{}' does not exists. (Not found : {})".format( task_name, host_name, solved_path)) - self.log.info("Found template at : '{}'".format(solved_path)) + self.log.info("Found template at: '{}'".format(solved_path)) return solved_path From b8376b4a42a4ff333e6305b88ee94b3b13e6fb0c Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Thu, 11 Aug 2022 15:44:54 +0200 Subject: [PATCH 0306/2550] added validator no negative scale to the schema --- .../projects_schema/schemas/schema_blender_publish.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json index 4dab373efd..58428ad60a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_blender_publish.json @@ -30,6 +30,10 @@ "key": "ValidateMeshHasUvs", "label": "Validate Mesh Has UVs" }, + { + "key": "ValidateMeshNoNegativeScale", + "label": "Validate Mesh No Negative Scale" + }, { "key": "ValidateTransformZero", "label": "Validate Transform Zero" From 748dcf1ad207edd3dbf3bc98120d9e46bf9b39e0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 15:51:27 +0200 Subject: [PATCH 0307/2550] fix filter and sort --- .../workfile/abstract_template_loader.py | 29 ++++++------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 5afec56d71..1c8ede25e6 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -351,11 +351,15 @@ class AbstractTemplateLoader: self.populate_template(ignored_ids=loaded_containers_ids) def get_placeholders(self): - placeholder_class = self.placeholder_class - placeholders = map(placeholder_class, self.get_template_nodes()) - valid_placeholders = filter(placeholder_class.is_valid, placeholders) - sorted_placeholders = sorted(valid_placeholders, - key=placeholder_class.get_order) + placeholders = map(self.placeholder_class, self.get_template_nodes()) + valid_placeholders = filter( + lambda i: i.is_valid, + placeholders + ) + sorted_placeholders = list(sorted( + valid_placeholders, + key=lambda i: i.order + )) return sorted_placeholders @abstractmethod @@ -450,21 +454,6 @@ class AbstractPlaceholder: def order(self): return self.data["order"] - def get_order(self): - """Placeholder order. - - Order is used to sort them by priority - Priority is lowset first, highest last - (ex: - 1: First to load - 100: Last to load) - - Returns: - int: Order priority - """ - - return self.order - @property def loader_name(self): """Return placeholder loader type. From 7eaa278c741ceaa30daf056dd17ec9e4b4ceed10 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 15:54:19 +0200 Subject: [PATCH 0308/2550] removed invalid default setting for templates --- openpype/settings/defaults/project_settings/maya.json | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 9c2c737ece..e9109abd22 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -123,7 +123,6 @@ "defaults": [ "Main" ] - }, "CreateAss": { "enabled": true, @@ -969,13 +968,7 @@ ] }, "templated_workfile_build": { - "profiles": [ - { - "task_types": [], - "tasks": [], - "path": "/path/to/your/template" - } - ] + "profiles": [] }, "filters": { "preset 1": { From bc33432a57bf16245f5bbc9ca22f1f26fbea9dd1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 16:05:56 +0200 Subject: [PATCH 0309/2550] OP-3682 - updated hash logic Currently only checking hash of zip file. --- openpype/client/addon_distribution.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/openpype/client/addon_distribution.py b/openpype/client/addon_distribution.py index de84c7301a..95c8e7d23f 100644 --- a/openpype/client/addon_distribution.py +++ b/openpype/client/addon_distribution.py @@ -1,6 +1,5 @@ import os from enum import Enum -from zipfile import ZipFile from abc import abstractmethod import attr @@ -66,9 +65,10 @@ class AddonDownloader: Raises: ValueError if hashes doesn't match """ + if not os.path.exists(addon_path): + raise ValueError(f"{addon_path} doesn't exist.") if addon_hash != sha256sum(addon_path): - raise ValueError( - "{} doesn't match expected hash".format(addon_path)) + raise ValueError(f"{addon_path} doesn't match expected hash.") @classmethod def unzip(cls, addon_zip_path, destination): @@ -153,9 +153,14 @@ def update_addon_state(addon_infos, destination_folder, factory): log.debug(f"Addon version folder {addon_dest} already exists.") continue - downloader = factory.get_downloader(addon.type) - zip_file_path = downloader.download(addon.addon_url, addon_dest) - downloader.unzip(zip_file_path, addon_dest) + try: + downloader = factory.get_downloader(addon.type) + zip_file_path = downloader.download(addon.addon_url, addon_dest) + downloader.check_hash(zip_file_path, addon.hash) + downloader.unzip(zip_file_path, addon_dest) + except Exception: + log.warning(f"Error happened during updating {addon.name}", + stack_info=True) def cli(args): From 0f90ca4a7a8a856da60e345ee86a3d7f3758c23a Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Thu, 11 Aug 2022 16:09:16 +0200 Subject: [PATCH 0310/2550] fix and clean Blender validators attrs --- .../blender/plugins/publish/validate_camera_zero_keyframe.py | 2 -- openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py | 2 +- .../blender/plugins/publish/validate_mesh_no_negative_scale.py | 1 + .../hosts/blender/plugins/publish/validate_no_colons_in_name.py | 1 + openpype/hosts/blender/plugins/publish/validate_object_mode.py | 2 +- .../hosts/blender/plugins/publish/validate_transform_zero.py | 2 +- 6 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py index bfd7224b80..ea45318219 100644 --- a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py +++ b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py @@ -24,8 +24,6 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): label = "Zero Keyframe" actions = [SelectInvalidAction] - _identity = mathutils.Matrix() - @staticmethod def get_invalid(instance) -> List: invalid = [] diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py index d83ead78cc..4995eedad4 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py @@ -13,7 +13,7 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): order = ValidateContentsOrder hosts = ["blender"] families = ["model"] - category = "geometry" + category = "uv" label = "Mesh Has UV's" actions = [SelectInvalidAction] optional = True diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index b7687009d7..449e711663 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -13,6 +13,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): order = ValidateContentsOrder hosts = ["blender"] families = ["model"] + category = "geometry" label = "Mesh No Negative Scale" actions = [SelectInvalidAction] diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py index cb8fa0f34a..f1889e5837 100644 --- a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py @@ -18,6 +18,7 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin): order = ValidateContentsOrder hosts = ["blender"] families = ["model", "rig"] + category = "cleanup" version = (0, 1, 0) label = "No Colons in names" actions = [SelectInvalidAction] diff --git a/openpype/hosts/blender/plugins/publish/validate_object_mode.py b/openpype/hosts/blender/plugins/publish/validate_object_mode.py index 36b7a59eb2..65b0bf7655 100644 --- a/openpype/hosts/blender/plugins/publish/validate_object_mode.py +++ b/openpype/hosts/blender/plugins/publish/validate_object_mode.py @@ -12,7 +12,7 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder - 0.01 hosts = ["blender"] families = ["model", "rig", "layout"] - category = "geometry" + category = "cleanup" label = "Validate Object Mode" actions = [SelectInvalidAction] optional = False diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py index 737c43cc3f..7443e3c64e 100644 --- a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -20,7 +20,7 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): order = ValidateContentsOrder hosts = ["blender"] families = ["model"] - category = "geometry" + category = "cleanup" version = (0, 1, 0) label = "Transform Zero" actions = [SelectInvalidAction] From 161ae6ef77f0ac0f2017e7b64fdd50331c03592d Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 14:59:52 +0200 Subject: [PATCH 0311/2550] change key 'name' by 'label' for tool name --- openpype/settings/defaults/project_settings/houdini.json | 2 +- .../projects_schema/schemas/schema_houdini_scriptshelf.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index a818f82d6b..78e0d595cf 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -12,7 +12,7 @@ "shelf_name": "OpenPype Shelf", "tools_list": [ { - "name": "OpenPype Tool", + "label": "OpenPype Tool", "script": "/path/to/your/tool_script", "icon": "/path/to/your/icon", "help": "Help message for your tool" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json index 812ab7d8c9..bab9b604b4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -42,7 +42,7 @@ "children": [ { "type": "text", - "key": "name", + "key": "label", "label": "Name" }, { From 778140b388c57ef8af0c4f69250cebf673dd6e74 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 16:21:23 +0200 Subject: [PATCH 0312/2550] add tool creation and adding tool to shelf and shelf to shelf_set --- openpype/hosts/houdini/api/shelves.py | 49 +++++++++++++++++++++------ 1 file changed, 39 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index a37ec88d64..0687e2f519 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,4 +1,3 @@ -from cProfile import label import os import logging import platform @@ -64,22 +63,23 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf = get_or_create_shelf(shelf_name) - tools = [] - for tool in shelf_definition.get('tools_list'): + for tool_definition in shelf_definition.get('tools_list'): mandatory_attributes = ['name', 'script'] if not all( - [v for k, v in tool.items() if k in mandatory_attributes] + [v for k, v in tool_definition.items() if + k in mandatory_attributes] ): log.warning("TOOLS ERROR: You need to specify at least \ the name and the script path of the tool.") return - tool = get_or_create_tool(tool, shelf) - # create the tool - # add it to a list of tools + tool = get_or_create_tool(tool_definition, shelf) - # add the tools list to the shelf with the tools already in it - # add the shelf to the shelf set with the shelfs already in it + if tool not in shelf.tools(): + shelf.setTools(list(shelf.tools()) + [tool]) + + if shelf not in shelf_set.shelves(): + shelf_set.setShelves(shelf_set.shelves() + (shelf,)) def get_or_create_shelf_set(shelf_set_label): @@ -117,4 +117,33 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - pass + existing_tools = shelf.tools() + tool_label = tool_definition.get('label') + + existing_tool = [ + tool for tool in existing_tools if tool.label() == tool_label + ] + + if existing_tool: + tool_definition.pop('name', None) + tool_definition.pop('label', None) + existing_tool[0].setData(**tool_definition) + return existing_tool[0] + + tool_name = tool_label.replace(' ', '_').lower() + + if not os.path.exists(tool_definition['script']): + log.warning( + "TOOL ERROR: This path doesn't exist - {}".format( + tool_definition['script'] + ) + ) + return + + with open(tool_definition['script']) as f: + script = f.read() + tool_definition.update({'script': script}) + + new_tool = hou.shelves.newTool(name=tool_name, **tool_definition) + + return new_tool From ceeb652699a4dd5a2ceb2ccba15ae84f57684e07 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 16:36:55 +0200 Subject: [PATCH 0313/2550] OP-3682 - changed logging method PypeLogger is obsolete --- openpype/client/addon_distribution.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/client/addon_distribution.py b/openpype/client/addon_distribution.py index 95c8e7d23f..46098cfa11 100644 --- a/openpype/client/addon_distribution.py +++ b/openpype/client/addon_distribution.py @@ -1,14 +1,11 @@ import os from enum import Enum from abc import abstractmethod - import attr from openpype.lib.path_tools import sha256sum -from openpype.lib import PypeLogger from openpype.lib.file_handler import RemoteFileHandler - -log = PypeLogger().get_logger(__name__) +from openpype.lib import Logger class UrlType(Enum): @@ -28,6 +25,7 @@ class AddonInfo(object): class AddonDownloader: + log = Logger.get_logger(__name__) def __init__(self): self._downloaders = {} @@ -101,7 +99,7 @@ class HTTPAddonDownloader(AddonDownloader): @classmethod def download(cls, addon_url, destination): - log.debug(f"Downloading {addon_url} to {destination}") + cls.log.debug(f"Downloading {addon_url} to {destination}") file_name = os.path.basename(destination) _, ext = os.path.splitext(file_name) if (ext.replace(".", '') not @@ -113,6 +111,7 @@ class HTTPAddonDownloader(AddonDownloader): return os.path.join(destination, file_name) + def get_addons_info(): """Returns list of addon information from Server""" # TODO temp @@ -145,6 +144,10 @@ def update_addon_state(addon_infos, destination_folder, factory): factory (AddonDownloader): factory to get appropriate downloader per addon type """ + from openpype.lib import Logger + + log = Logger.get_logger(__name__) + for addon in addon_infos: full_name = "{}_{}".format(addon.name, addon.version) addon_dest = os.path.join(destination_folder, full_name) From cf0ac3f8b56c5d0f63ab6fa00966fcfe6b76ee08 Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Thu, 11 Aug 2022 17:10:02 +0200 Subject: [PATCH 0314/2550] blender ops refresh manager after process events --- openpype/hosts/blender/api/lib.py | 2 +- openpype/hosts/blender/api/ops.py | 17 ++++++++++++----- .../hosts/blender/blender_addon/startup/init.py | 8 +++++++- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py index 20098c0fe8..9cd1ace821 100644 --- a/openpype/hosts/blender/api/lib.py +++ b/openpype/hosts/blender/api/lib.py @@ -234,7 +234,7 @@ def lsattrs(attrs: Dict) -> List: def read(node: bpy.types.bpy_struct_meta_idprop): """Return user-defined attributes from `node`""" - data = dict(node.get(pipeline.AVALON_PROPERTY)) + data = dict(node.get(pipeline.AVALON_PROPERTY, {})) # Ignore hidden/internal data data = { diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index 4f8410da74..e0e09277df 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -26,7 +26,7 @@ PREVIEW_COLLECTIONS: Dict = dict() # This seems like a good value to keep the Qt app responsive and doesn't slow # down Blender. At least on macOS I the interace of Blender gets very laggy if # you make it smaller. -TIMER_INTERVAL: float = 0.01 +TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1 class BlenderApplication(QtWidgets.QApplication): @@ -164,6 +164,12 @@ def _process_app_events() -> Optional[float]: dialog.setDetailedText(detail) dialog.exec_() + # Refresh Manager + if GlobalClass.app: + manager = GlobalClass.app.get_window("WM_OT_avalon_manager") + if manager: + manager.refresh() + if not GlobalClass.is_windows: if OpenFileCacher.opening_file: return TIMER_INTERVAL @@ -192,10 +198,11 @@ class LaunchQtApp(bpy.types.Operator): self._app = BlenderApplication.get_app() GlobalClass.app = self._app - bpy.app.timers.register( - _process_app_events, - persistent=True - ) + if not bpy.app.timers.is_registered(_process_app_events): + bpy.app.timers.register( + _process_app_events, + persistent=True + ) def execute(self, context): """Execute the operator. diff --git a/openpype/hosts/blender/blender_addon/startup/init.py b/openpype/hosts/blender/blender_addon/startup/init.py index 13a4b8a7a1..8dbff8a91d 100644 --- a/openpype/hosts/blender/blender_addon/startup/init.py +++ b/openpype/hosts/blender/blender_addon/startup/init.py @@ -1,4 +1,10 @@ from openpype.pipeline import install_host from openpype.hosts.blender import api -install_host(api) + +def register(): + install_host(api) + + +def unregister(): + pass From 542eedb4b299aeab0a6e74a361e72e3961c17bfb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 17:19:58 +0200 Subject: [PATCH 0315/2550] OP-3682 - moved file to distribution folder Needs to be separate from Openpype. Igniter and Openpype (and tests) could import from this if necessary. --- {openpype/client => distribution}/addon_distribution.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {openpype/client => distribution}/addon_distribution.py (100%) diff --git a/openpype/client/addon_distribution.py b/distribution/addon_distribution.py similarity index 100% rename from openpype/client/addon_distribution.py rename to distribution/addon_distribution.py From cfbc9b00777073b945b1ec25e18c32b89127ed7c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 17:24:38 +0200 Subject: [PATCH 0316/2550] OP-3682 - replaced Logger to logging Shouldn't import anything from Openpype --- distribution/addon_distribution.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 46098cfa11..b76cd8e3f8 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -2,10 +2,10 @@ import os from enum import Enum from abc import abstractmethod import attr +import logging from openpype.lib.path_tools import sha256sum from openpype.lib.file_handler import RemoteFileHandler -from openpype.lib import Logger class UrlType(Enum): @@ -25,7 +25,7 @@ class AddonInfo(object): class AddonDownloader: - log = Logger.get_logger(__name__) + log = logging.getLogger(__name__) def __init__(self): self._downloaders = {} @@ -132,7 +132,8 @@ def get_addons_info(): return [http_addon] -def update_addon_state(addon_infos, destination_folder, factory): +def update_addon_state(addon_infos, destination_folder, factory, + log=None): """Loops through all 'addon_infos', compares local version, unzips. Loops through server provided list of dictionaries with information about @@ -143,10 +144,10 @@ def update_addon_state(addon_infos, destination_folder, factory): destination_folder (str): local path factory (AddonDownloader): factory to get appropriate downloader per addon type + log (logging.Logger) """ - from openpype.lib import Logger - - log = Logger.get_logger(__name__) + if not log: + log = logging.getLogger(__name__) for addon in addon_infos: full_name = "{}_{}".format(addon.name, addon.version) From 98444762cd97da52e62370677762a82b25b850c8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 17:27:14 +0200 Subject: [PATCH 0317/2550] OP-3682 - moved file_handler --- distribution/addon_distribution.py | 5 ++--- {openpype/lib => distribution}/file_handler.py | 2 +- tests/lib/testing_classes.py | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) rename {openpype/lib => distribution}/file_handler.py (99%) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index b76cd8e3f8..e29e9bbf9b 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -4,8 +4,7 @@ from abc import abstractmethod import attr import logging -from openpype.lib.path_tools import sha256sum -from openpype.lib.file_handler import RemoteFileHandler +from distribution.file_handler import RemoteFileHandler class UrlType(Enum): @@ -65,7 +64,7 @@ class AddonDownloader: """ if not os.path.exists(addon_path): raise ValueError(f"{addon_path} doesn't exist.") - if addon_hash != sha256sum(addon_path): + if addon_hash != RemoteFileHandler.calculate_md5(addon_path): raise ValueError(f"{addon_path} doesn't match expected hash.") @classmethod diff --git a/openpype/lib/file_handler.py b/distribution/file_handler.py similarity index 99% rename from openpype/lib/file_handler.py rename to distribution/file_handler.py index ee3abc6ecb..8c8b4230ce 100644 --- a/openpype/lib/file_handler.py +++ b/distribution/file_handler.py @@ -21,7 +21,7 @@ class RemoteFileHandler: 'tar.gz', 'tar.xz', 'tar.bz2'] @staticmethod - def calculate_md5(fpath, chunk_size): + def calculate_md5(fpath, chunk_size=10000): md5 = hashlib.md5() with open(fpath, 'rb') as f: for chunk in iter(lambda: f.read(chunk_size), b''): diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 75f859de48..e819ae80de 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -10,7 +10,7 @@ import glob import platform from tests.lib.db_handler import DBHandler -from openpype.lib.file_handler import RemoteFileHandler +from distribution.file_handler import RemoteFileHandler from openpype.lib.remote_publish import find_variant_key From b0c8a47f0f27a734f8ba9f201ae08dabe5d1271d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 17:32:22 +0200 Subject: [PATCH 0318/2550] Revert "OP-3682 - extracted sha256 method to lib" This reverts commit c4854be5 --- openpype/lib/path_tools.py | 19 ------------------- openpype/tools/repack_version.py | 24 +++++++++++++++++++++--- 2 files changed, 21 insertions(+), 22 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 0ae5e44d79..11648f9969 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -5,7 +5,6 @@ import json import logging import six import platform -import hashlib from openpype.client import get_project from openpype.settings import get_project_settings @@ -480,21 +479,3 @@ class HostDirmap: log.debug("local sync mapping:: {}".format(mapping)) return mapping - -def sha256sum(filename): - """Calculate sha256 for content of the file. - - Args: - filename (str): Path to file. - - Returns: - str: hex encoded sha256 - - """ - h = hashlib.sha256() - b = bytearray(128 * 1024) - mv = memoryview(b) - with open(filename, 'rb', buffering=0) as f: - for n in iter(lambda: f.readinto(mv), 0): - h.update(mv[:n]) - return h.hexdigest() diff --git a/openpype/tools/repack_version.py b/openpype/tools/repack_version.py index 414152970a..0172264c79 100644 --- a/openpype/tools/repack_version.py +++ b/openpype/tools/repack_version.py @@ -7,11 +7,10 @@ from pathlib import Path import platform from zipfile import ZipFile from typing import List +import hashlib import sys from igniter.bootstrap_repos import OpenPypeVersion -from openpype.lib.path_tools import sha256sum - class VersionRepacker: @@ -46,6 +45,25 @@ class VersionRepacker: print("{}{}".format(header, msg)) + @staticmethod + def sha256sum(filename): + """Calculate sha256 for content of the file. + + Args: + filename (str): Path to file. + + Returns: + str: hex encoded sha256 + + """ + h = hashlib.sha256() + b = bytearray(128 * 1024) + mv = memoryview(b) + with open(filename, 'rb', buffering=0) as f: + for n in iter(lambda: f.readinto(mv), 0): + h.update(mv[:n]) + return h.hexdigest() + @staticmethod def _filter_dir(path: Path, path_filter: List) -> List[Path]: """Recursively crawl over path and filter.""" @@ -86,7 +104,7 @@ class VersionRepacker: nits="%", color="green") for file in file_list: checksums.append(( - sha256sum(file.as_posix()), + VersionRepacker.sha256sum(file.as_posix()), file.resolve().relative_to(self.version_path), file )) From 0fb8988522a328afa33cc08960a8a2a678e2b26c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 11 Aug 2022 17:35:12 +0200 Subject: [PATCH 0319/2550] OP-3682 - Hound --- openpype/lib/path_tools.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 11648f9969..4f28be3302 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -478,4 +478,3 @@ class HostDirmap: log.debug("local sync mapping:: {}".format(mapping)) return mapping - From 66ee0beaf6d0e09eae6a8a9887a90651618a73f2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 17:43:47 +0200 Subject: [PATCH 0320/2550] fix empty or query --- openpype/client/entities.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index f9d3badb1a..c798c0ad6d 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1130,11 +1130,12 @@ def _get_representations( for item in _regex_filters(flatten_filters): for key, value in item.items(): - if key == "$or": - or_queries.append(value) - else: + if key != "$or": query_filter[key] = value + elif value: + or_queries.append(value) + if len(or_queries) == 1: query_filter["$or"] = or_queries[0] elif or_queries: From 532432d81739b2996ae94e56b6bf2faf36498dc3 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 17:57:38 +0200 Subject: [PATCH 0321/2550] add docstrings --- openpype/hosts/houdini/api/shelves.py | 43 +++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 0687e2f519..bb92aa828e 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -10,8 +10,15 @@ log = logging.getLogger("openpype.hosts.houdini") def generate_shelves(): + """This function generates complete shelves from shef set to tools + in Houdini from openpype project settings houdini shelf definition. + + Raises: + FileNotFoundError: Raised when the shelf set filepath does not exist + """ current_os = platform.system().lower() - # load configuration of custom menu + + # load configuration of houdini shelves project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) shelves_set_config = project_settings["houdini"]["shelves"] @@ -57,13 +64,15 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf_name = shelf_definition.get('shelf_name') if not shelf_name: log.warning( - "SHELF WARNGING: No name found in shelf set definition." + "SHELF WARNGING: No name found in shelf definition." ) return shelf = get_or_create_shelf(shelf_name) for tool_definition in shelf_definition.get('tools_list'): + # We verify that the name and script attibutes of the tool + # are set mandatory_attributes = ['name', 'script'] if not all( [v for k, v in tool_definition.items() if @@ -75,14 +84,25 @@ the name and the script path of the tool.") tool = get_or_create_tool(tool_definition, shelf) + # Add the tool to the shelf if not already in it if tool not in shelf.tools(): shelf.setTools(list(shelf.tools()) + [tool]) + # Add the shelf in the shelf set if not already in it if shelf not in shelf_set.shelves(): shelf_set.setShelves(shelf_set.shelves() + (shelf,)) def get_or_create_shelf_set(shelf_set_label): + """This function verifies if the shelf set label exists. If not, + creates a new shelf set. + + Arguments: + shelf_set_label {str} -- The label of the shelf set + + Returns: + hou.ShelfSet -- The shelf set existing or the new one + """ all_shelves_sets = hou.shelves.shelfSets().values() shelf_set = [ @@ -101,6 +121,15 @@ def get_or_create_shelf_set(shelf_set_label): def get_or_create_shelf(shelf_label): + """This function verifies if the shelf label exists. If not, creates + a new shelf. + + Arguments: + shelf_label {str} -- The label of the shelf + + Returns: + hou.Shelf -- The shelf existing or the new one + """ all_shelves = hou.shelves.shelves().values() shelf = [s for s in all_shelves if s.label() == shelf_label] @@ -117,6 +146,16 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): + """This function verifies if the tool exsist and update it. If not, creates + a new one. + + Arguments: + tool_definition {dict} -- Dict with label, script, icon and help + shelf {hou.Shelf} -- The parent shelf of the tool + + Returns: + hou.Tool -- The tool updated or the new one + """ existing_tools = shelf.tools() tool_label = tool_definition.get('label') From cd167a9055723c941e34c15fd3d5cc8edbaf481e Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 11 Aug 2022 18:00:34 +0200 Subject: [PATCH 0322/2550] Removed submodule vendor/configs/OpenColorIO-Configs --- .gitmodules | 5 +---- vendor/configs/OpenColorIO-Configs | 1 - 2 files changed, 1 insertion(+), 5 deletions(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/.gitmodules b/.gitmodules index bac3132b77..fe93791c4e 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,7 +4,4 @@ [submodule "tools/modules/powershell/PSWriteColor"] path = tools/modules/powershell/PSWriteColor - url = https://github.com/EvotecIT/PSWriteColor.git -[submodule "vendor/configs/OpenColorIO-Configs"] - path = vendor/configs/OpenColorIO-Configs - url = https://github.com/imageworks/OpenColorIO-Configs + url = https://github.com/EvotecIT/PSWriteColor.git \ No newline at end of file diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 37ed6bc897168e42159fb656f06d413b11c601da Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 11 Aug 2022 18:02:21 +0200 Subject: [PATCH 0323/2550] :recycle: change location of ocio configs --- .../maya/plugins/publish/extract_look.py | 18 +- poetry.lock | 821 +++--------------- pyproject.toml | 4 +- 3 files changed, 131 insertions(+), 712 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 0b26e922d5..b425efba6f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -40,15 +40,15 @@ def get_ocio_config_path(profile_folder): Returns: str: Path to vendorized config file. """ - return os.path.join( - os.environ["OPENPYPE_ROOT"], - "vendor", - "configs", - "OpenColorIO-Configs", - profile_folder, - "config.ocio" - ) - + try: + import OpenColorIOConfigs + return os.path.join( + os.path.dirname(OpenColorIOConfigs.__file__), + profile_folder, + "config.ocio" + ) + except ImportError: + return None def find_paths_by_hash(texture_hash): """Find the texture hash key in the dictionary. diff --git a/poetry.lock b/poetry.lock index 919a352505..df8d8ab14a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -48,7 +48,7 @@ aiohttp = ">=3,<4" [[package]] name = "aiohttp-middlewares" -version = "2.0.0" +version = "2.1.0" description = "Collection of useful middlewares for aiohttp applications." category = "main" optional = false @@ -114,7 +114,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.11.5" +version = "2.11.7" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -147,7 +147,7 @@ python-versions = ">=3.5" [[package]] name = "atomicwrites" -version = "1.4.0" +version = "1.4.1" description = "Atomic file writes." category = "dev" optional = false @@ -155,17 +155,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "autopep8" @@ -181,11 +181,11 @@ toml = "*" [[package]] name = "babel" -version = "2.9.1" +version = "2.10.3" description = "Internationalization utilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] pytz = ">=2015.7" @@ -236,7 +236,7 @@ python-versions = ">=3.6" [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -279,7 +279,7 @@ test = ["pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)" [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.5" description = "Cross-platform colored terminal text." category = "dev" optional = false @@ -306,7 +306,7 @@ python-versions = "*" [[package]] name = "coverage" -version = "6.4.1" +version = "6.4.3" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -320,7 +320,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "37.0.2" +version = "37.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -408,7 +408,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "dropbox" -version = "11.31.0" +version = "11.33.0" description = "Official Dropbox API Client" category = "main" optional = false @@ -433,7 +433,7 @@ prefixed = ">=0.3.2" [[package]] name = "evdev" -version = "1.5.0" +version = "1.6.0" description = "Bindings to the Linux input handling subsystem" category = "main" optional = false @@ -455,7 +455,7 @@ pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "frozenlist" -version = "1.3.0" +version = "1.3.1" description = "A list-like structure which implements collections.abc.MutableSequence" category = "main" optional = false @@ -490,7 +490,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "gazu" -version = "0.8.28" +version = "0.8.30" description = "Gazu is a client for Zou, the API to store the data of your CG production." category = "main" optional = false @@ -530,7 +530,7 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" [[package]] name = "google-api-core" -version = "2.8.1" +version = "2.8.2" description = "Google API client core library" category = "main" optional = false @@ -539,13 +539,11 @@ python-versions = ">=3.6" [package.dependencies] google-auth = ">=1.25.0,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" -protobuf = ">=3.15.0,<4.0.0dev" +protobuf = ">=3.15.0,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" [package.extras] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] [[package]] name = "google-api-python-client" @@ -565,7 +563,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "2.7.0" +version = "2.10.0" description = "Google Authentication Library" category = "main" optional = false @@ -598,14 +596,14 @@ six = "*" [[package]] name = "googleapis-common-protos" -version = "1.56.2" +version = "1.56.4" description = "Common protobufs used in Google APIs" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -protobuf = ">=3.15.0,<4.0.0dev" +protobuf = ">=3.15.0,<5.0.0dev" [package.extras] grpc = ["grpcio (>=1.0.0,<2.0.0dev)"] @@ -631,7 +629,7 @@ python-versions = ">=3.5" [[package]] name = "imagesize" -version = "1.3.0" +version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" optional = false @@ -639,7 +637,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.11.4" +version = "4.12.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -652,7 +650,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -692,15 +690,15 @@ testing = ["colorama", "docopt", "pytest (>=3.1.0)"] [[package]] name = "jeepney" -version = "0.7.1" +version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"] -trio = ["trio", "async-generator"] +trio = ["async-generator", "trio"] +test = ["async-timeout", "trio", "testpath", "pytest-asyncio (>=0.17)", "pytest-trio", "pytest"] [[package]] name = "jinja2" @@ -799,6 +797,21 @@ category = "main" optional = false python-versions = ">=3.7" +[[package]] +name = "opencolorio-configs" +version = "1.0.2" +description = "Curated set of OpenColorIO Configs for use in OpenPype" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.source] +type = "git" +url = "https://github.com/pypeclub/OpenColorIO-Configs.git" +reference = "main" +resolved_reference = "07c5e865bf2b115b589dd2876ae632cd410821b5" + [[package]] name = "opentimelineio" version = "0.14.0.dev1" @@ -875,14 +888,14 @@ six = "*" [[package]] name = "pillow" -version = "9.1.1" +version = "9.2.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] @@ -930,11 +943,11 @@ python-versions = "*" [[package]] name = "protobuf" -version = "3.19.4" -description = "Protocol Buffers" +version = "4.21.5" +description = "" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "py" @@ -1354,7 +1367,7 @@ use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "rsa" -version = "4.8" +version = "4.9" description = "Pure-Python RSA implementation" category = "main" optional = false @@ -1408,7 +1421,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "slack-sdk" -version = "3.17.0" +version = "3.18.1" description = "The Slack API Platform SDK for Python" category = "main" optional = false @@ -1487,9 +1500,9 @@ docutils = "*" sphinx = "*" [package.extras] +test = ["pytest-cov", "pytest (>=3.0.0)"] +lint = ["pylint", "flake8", "black"] dev = ["pre-commit"] -lint = ["black", "flake8", "pylint"] -test = ["pytest (>=3.0.0)", "pytest-cov"] [[package]] name = "sphinx-rtd-theme" @@ -1638,11 +1651,11 @@ python-versions = ">=3.6" [[package]] name = "typing-extensions" -version = "4.0.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "uritemplate" @@ -1654,11 +1667,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.9" +version = "1.26.11" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] @@ -1711,11 +1724,11 @@ ujson = ["ujson"] [[package]] name = "yarl" -version = "1.7.2" +version = "1.8.1" description = "Yet another URL library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] idna = ">=2.0" @@ -1724,20 +1737,20 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" -version = "3.8.0" +version = "3.8.1" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "bd8e0a03668c380c6e76c8cd6c71020692f4ea9f32de7a4f09433564faa9dad0" +content-hash = "89fb7e8ad310b5048bf78561f1146194c8779e286d839cc000f04e88be87f3f3" [metadata.files] acre = [] @@ -1819,10 +1832,7 @@ aiohttp-json-rpc = [ {file = "aiohttp-json-rpc-0.13.3.tar.gz", hash = "sha256:6237a104478c22c6ef96c7227a01d6832597b414e4b79a52d85593356a169e99"}, {file = "aiohttp_json_rpc-0.13.3-py3-none-any.whl", hash = "sha256:4fbd197aced61bd2df7ae3237ead7d3e08833c2ccf48b8581e1828c95ebee680"}, ] -aiohttp-middlewares = [ - {file = "aiohttp-middlewares-2.0.0.tar.gz", hash = "sha256:e08ba04dc0e8fe379aa5e9444a68485c275677ee1e18c55cbb855de0c3629502"}, - {file = "aiohttp_middlewares-2.0.0-py3-none-any.whl", hash = "sha256:29cf1513176b4013844711975ff520e26a8a5d8f9fefbbddb5e91224a86b043e"}, -] +aiohttp-middlewares = [] aiosignal = [ {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, @@ -1840,10 +1850,7 @@ arrow = [ {file = "arrow-0.17.0-py2.py3-none-any.whl", hash = "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5"}, {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] -astroid = [ - {file = "astroid-2.11.5-py3-none-any.whl", hash = "sha256:14ffbb4f6aa2cf474a0834014005487f7ecd8924996083ab411e7fa0b508ce0b"}, - {file = "astroid-2.11.5.tar.gz", hash = "sha256:f4e4ec5294c4b07ac38bab9ca5ddd3914d4bf46f9006eb5c0ae755755061044e"}, -] +astroid = [] async-timeout = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, @@ -1852,99 +1859,21 @@ asynctest = [ {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, ] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] +atomicwrites = [] +attrs = [] autopep8 = [ {file = "autopep8-1.5.7-py2.py3-none-any.whl", hash = "sha256:aa213493c30dcdac99537249ee65b24af0b2c29f2e83cd8b3f68760441ed0db9"}, {file = "autopep8-1.5.7.tar.gz", hash = "sha256:276ced7e9e3cb22e5d7c14748384a5cf5d9002257c0ed50c0e075b68011bb6d0"}, ] -babel = [ - {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, - {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, -] -bcrypt = [ - {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa"}, - {file = "bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e"}, - {file = "bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129"}, - {file = "bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"}, -] +babel = [] +bcrypt = [] blessed = [ {file = "blessed-1.19.1-py2.py3-none-any.whl", hash = "sha256:63b8554ae2e0e7f43749b6715c734cc8f3883010a809bf16790102563e6cf25b"}, {file = "blessed-1.19.1.tar.gz", hash = "sha256:9a0d099695bf621d4680dd6c73f6ad547f6a3442fbdbe80c4b1daa1edbc492fc"}, ] -cachetools = [ - {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, - {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, -] -certifi = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, -] -cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, -] +cachetools = [] +certifi = [] +cffi = [] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, @@ -1957,10 +1886,7 @@ clique = [ {file = "clique-1.6.1-py2.py3-none-any.whl", hash = "sha256:8619774fa035661928dd8c93cd805acf2d42533ccea1b536c09815ed426c9858"}, {file = "clique-1.6.1.tar.gz", hash = "sha256:90165c1cf162d4dd1baef83ceaa1afc886b453e379094fa5b60ea470d1733e66"}, ] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] +colorama = [] commonmark = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, @@ -1969,73 +1895,8 @@ coolname = [ {file = "coolname-1.1.0-py2.py3-none-any.whl", hash = "sha256:e6a83a0ac88640f4f3d2070438dbe112fe80cfebc119c93bd402976ec84c0978"}, {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, ] -coverage = [ - {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, - {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, - {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, - {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, - {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, - {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, - {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, - {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, - {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, - {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, - {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, - {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, - {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, - {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, - {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, - {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, - {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, - {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, - {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, - {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, - {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, - {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, - {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, -] -cryptography = [ - {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:ef15c2df7656763b4ff20a9bc4381d8352e6640cfeb95c2972c38ef508e75181"}, - {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3c81599befb4d4f3d7648ed3217e00d21a9341a9a688ecdd615ff72ffbed7336"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2bd1096476aaac820426239ab534b636c77d71af66c547b9ddcd76eb9c79e004"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:31fe38d14d2e5f787e0aecef831457da6cec68e0bb09a35835b0b44ae8b988fe"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:093cb351031656d3ee2f4fa1be579a8c69c754cf874206be1d4cf3b542042804"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59b281eab51e1b6b6afa525af2bd93c16d49358404f814fe2c2410058623928c"}, - {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:0cc20f655157d4cfc7bada909dc5cc228211b075ba8407c46467f63597c78178"}, - {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f8ec91983e638a9bcd75b39f1396e5c0dc2330cbd9ce4accefe68717e6779e0a"}, - {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:46f4c544f6557a2fefa7ac8ac7d1b17bf9b647bd20b16decc8fbcab7117fbc15"}, - {file = "cryptography-37.0.2-cp36-abi3-win32.whl", hash = "sha256:731c8abd27693323b348518ed0e0705713a36d79fdbd969ad968fbef0979a7e0"}, - {file = "cryptography-37.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:471e0d70201c069f74c837983189949aa0d24bb2d751b57e26e3761f2f782b8d"}, - {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a68254dd88021f24a68b613d8c51d5c5e74d735878b9e32cc0adf19d1f10aaf9"}, - {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:a7d5137e556cc0ea418dca6186deabe9129cee318618eb1ffecbd35bee55ddc1"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aeaba7b5e756ea52c8861c133c596afe93dd716cbcacae23b80bc238202dc023"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95e590dd70642eb2079d280420a888190aa040ad20f19ec8c6e097e38aa29e06"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1b9362d34363f2c71b7853f6251219298124aa4cc2075ae2932e64c91a3e2717"}, - {file = "cryptography-37.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e53258e69874a306fcecb88b7534d61820db8a98655662a3dd2ec7f1afd9132f"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:1f3bfbd611db5cb58ca82f3deb35e83af34bb8cf06043fa61500157d50a70982"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:419c57d7b63f5ec38b1199a9521d77d7d1754eb97827bbb773162073ccd8c8d4"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:dc26bb134452081859aa21d4990474ddb7e863aa39e60d1592800a8865a702de"}, - {file = "cryptography-37.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b8398b3d0efc420e777c40c16764d6870bcef2eb383df9c6dbb9ffe12c64452"}, - {file = "cryptography-37.0.2.tar.gz", hash = "sha256:f224ad253cc9cea7568f49077007d2263efa57396a2f2f78114066fd54b5c68e"}, -] +coverage = [] +cryptography = [] cx-freeze = [ {file = "cx_Freeze-6.9-cp310-cp310-win32.whl", hash = "sha256:776d4fb68a4831691acbd3c374362b9b48ce2e568514a73c3d4cb14d5dcf1470"}, {file = "cx_Freeze-6.9-cp310-cp310-win_amd64.whl", hash = "sha256:243f36d35a034a409cd6247d8cb5d1fbfd7374e3e668e813d0811f64d6bd5ed3"}, @@ -2064,14 +1925,8 @@ cx-logging = [ {file = "cx_Logging-3.0-cp39-cp39-win_amd64.whl", hash = "sha256:302e9c4f65a936c288a4fa59a90e7e142d9ef994aa29676731acafdcccdbb3f5"}, {file = "cx_Logging-3.0.tar.gz", hash = "sha256:ba8a7465facf7b98d8f494030fb481a2e8aeee29dc191e10383bb54ed42bdb34"}, ] -deprecated = [ - {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, - {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, -] -dill = [ - {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"}, - {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, -] +deprecated = [] +dill = [] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, @@ -2080,90 +1935,22 @@ docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] -dropbox = [ - {file = "dropbox-11.31.0-py2-none-any.whl", hash = "sha256:393a99dfe30d42fd73c265b9b7d24bb21c9a961739cd097c3541e709eb2a209c"}, - {file = "dropbox-11.31.0-py3-none-any.whl", hash = "sha256:5f924102fd6464def81573320c6aa4ea9cd3368e1b1c13d838403dd4c9ffc919"}, - {file = "dropbox-11.31.0.tar.gz", hash = "sha256:f483d65b702775b9abf7b9328f702c68c6397fc01770477c6ddbfb1d858a5bcf"}, -] +dropbox = [] enlighten = [ {file = "enlighten-1.10.2-py2.py3-none-any.whl", hash = "sha256:b237fe562b320bf9f1d4bb76d0c98e0daf914372a76ab87c35cd02f57aa9d8c1"}, {file = "enlighten-1.10.2.tar.gz", hash = "sha256:7a5b83cd0f4d095e59d80c648ebb5f7ffca0cd8bcf7ae6639828ee1ad000632a"}, ] -evdev = [ - {file = "evdev-1.5.0.tar.gz", hash = "sha256:5b33b174f7c84576e7dd6071e438bf5ad227da95efd4356a39fe4c8355412fe6"}, -] +evdev = [] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] -frozenlist = [ - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"}, - {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"}, - {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"}, - {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"}, - {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"}, - {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, - {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, -] +frozenlist = [] ftrack-python-api = [] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] -gazu = [ - {file = "gazu-0.8.28-py2.py3-none-any.whl", hash = "sha256:ec4f7c2688a2b37ee8a77737e4e30565ad362428c3ade9046136a998c043e51c"}, -] +gazu = [] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, @@ -2172,42 +1959,24 @@ gitpython = [ {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] -google-api-core = [ - {file = "google-api-core-2.8.1.tar.gz", hash = "sha256:958024c6aa3460b08f35741231076a4dd9a4c819a6a39d44da9627febe8b28f0"}, - {file = "google_api_core-2.8.1-py3-none-any.whl", hash = "sha256:ce1daa49644b50398093d2a9ad886501aa845e2602af70c3001b9f402a9d7359"}, -] +google-api-core = [] google-api-python-client = [ {file = "google-api-python-client-1.12.11.tar.gz", hash = "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9"}, {file = "google_api_python_client-1.12.11-py2.py3-none-any.whl", hash = "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b"}, ] -google-auth = [ - {file = "google-auth-2.7.0.tar.gz", hash = "sha256:8a954960f852d5f19e6af14dd8e75c20159609e85d8db37e4013cc8c3824a7e1"}, - {file = "google_auth-2.7.0-py2.py3-none-any.whl", hash = "sha256:df549a1433108801b11bdcc0e312eaf0d5f0500db42f0523e4d65c78722e8475"}, -] +google-auth = [] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] -googleapis-common-protos = [ - {file = "googleapis-common-protos-1.56.2.tar.gz", hash = "sha256:b09b56f5463070c2153753ef123f07d2e49235e89148e9b2459ec8ed2f68d7d3"}, - {file = "googleapis_common_protos-1.56.2-py2.py3-none-any.whl", hash = "sha256:023eaea9d8c1cceccd9587c6af6c20f33eeeb05d4148670f2b0322dc1511700c"}, -] +googleapis-common-protos = [] httplib2 = [ {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, ] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -imagesize = [ - {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, - {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, - {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, -] +idna = [] +imagesize = [] +importlib-metadata = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -2220,18 +1989,12 @@ jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] -jeepney = [ - {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"}, - {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"}, -] +jeepney = [] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] -jinxed = [ - {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, - {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, -] +jinxed = [] jsonschema = [ {file = "jsonschema-2.6.0-py2.py3-none-any.whl", hash = "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08"}, {file = "jsonschema-2.6.0.tar.gz", hash = "sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02"}, @@ -2283,28 +2046,12 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2313,27 +2060,14 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2343,12 +2077,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2418,15 +2146,13 @@ multidict = [ {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, ] +opencolorio-configs = [] opentimelineio = [] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -paramiko = [ - {file = "paramiko-2.11.0-py2.py3-none-any.whl", hash = "sha256:655f25dc8baf763277b933dfcea101d636581df8d6b9774d1fb653426b72c270"}, - {file = "paramiko-2.11.0.tar.gz", hash = "sha256:003e6bee7c034c21fbb051bf83dc0a9ee4106204dd3c53054c71452cc4ec3938"}, -] +paramiko = [] parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, @@ -2435,50 +2161,8 @@ pathlib2 = [ {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] -pillow = [ - {file = "Pillow-9.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe"}, - {file = "Pillow-9.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e"}, - {file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602"}, - {file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530"}, - {file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2"}, - {file = "Pillow-9.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a"}, - {file = "Pillow-9.1.1-cp310-cp310-win32.whl", hash = "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c"}, - {file = "Pillow-9.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108"}, - {file = "Pillow-9.1.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b"}, - {file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d"}, - {file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84"}, - {file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4"}, - {file = "Pillow-9.1.1-cp37-cp37m-win32.whl", hash = "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578"}, - {file = "Pillow-9.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9"}, - {file = "Pillow-9.1.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf"}, - {file = "Pillow-9.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b"}, - {file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546"}, - {file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f"}, - {file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a"}, - {file = "Pillow-9.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1"}, - {file = "Pillow-9.1.1-cp38-cp38-win32.whl", hash = "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54"}, - {file = "Pillow-9.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf"}, - {file = "Pillow-9.1.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92"}, - {file = "Pillow-9.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a"}, - {file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251"}, - {file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d"}, - {file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1"}, - {file = "Pillow-9.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601"}, - {file = "Pillow-9.1.1-cp39-cp39-win32.whl", hash = "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45"}, - {file = "Pillow-9.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c"}, - {file = "Pillow-9.1.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6"}, - {file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098"}, - {file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765"}, - {file = "Pillow-9.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8"}, - {file = "Pillow-9.1.1.tar.gz", hash = "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0"}, -] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] +pillow = [] +platformdirs = [] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, @@ -2491,34 +2175,7 @@ prefixed = [ {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] -protobuf = [ - {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, - {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, - {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, - {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, - {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, - {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, - {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, - {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, - {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, - {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, - {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, - {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, - {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, - {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, - {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, - {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, - {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, - {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, -] +protobuf = [] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -2577,14 +2234,8 @@ pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, -] -pylint = [ - {file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"}, - {file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"}, -] +pygments = [] +pylint = [] pymongo = [ {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, @@ -2711,42 +2362,10 @@ pynput = [ {file = "pynput-1.7.6-py3.9.egg", hash = "sha256:264429fbe676e98e9050ad26a7017453bdd08768adb25cafb918347cf9f1eb4a"}, {file = "pynput-1.7.6.tar.gz", hash = "sha256:3a5726546da54116b687785d38b1db56997ce1d28e53e8d22fc656d8b92e533c"}, ] -pyobjc-core = [ - {file = "pyobjc-core-8.5.tar.gz", hash = "sha256:704c275439856c0d1287469f0d589a7d808d48b754a93d9ce5415d4eaf06d576"}, - {file = "pyobjc_core-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0c234143b48334443f5adcf26e668945a6d47bc1fa6223e80918c6c735a029d9"}, - {file = "pyobjc_core-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1486ee533f0d76f666804ce89723ada4db56bfde55e56151ba512d3f849857f8"}, - {file = "pyobjc_core-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:412de06dfa728301c04b3e46fd7453320a8ae8b862e85236e547cd797a73b490"}, - {file = "pyobjc_core-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b3e09cccb1be574a82cc9f929ae27fc4283eccc75496cb5d51534caa6bb83a3"}, - {file = "pyobjc_core-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:eeafe21f879666ab7f57efcc6b007c9f5f8733d367b7e380c925203ed83f000d"}, - {file = "pyobjc_core-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c0071686976d7ea8c14690950e504a13cb22b4ebb2bc7b5ec47c1c1c0f6eff41"}, -] -pyobjc-framework-applicationservices = [ - {file = "pyobjc-framework-ApplicationServices-8.5.tar.gz", hash = "sha256:fa3015ef8e3add90af3447d7fdcc7f8dd083cc2a1d58f99a569480a2df10d2b1"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:436b16ebe448a829a8312e10208eec81a2adcae1fff674dbcc3262e1bd76e0ca"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:408958d14aa7fcf46f2163754c211078bc63be1368934d86188202914dce077d"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1d6cd4ce192859a22e208da4d7177a1c3ceb1ef2f64c339fd881102b1210cadd"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0251d092adb1d2d116fd9f147ceef0e53b158a46c21245131c40b9d7b786d0db"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:9742e69fe6d4545d0e02b0ad0a7a2432bc9944569ee07d6e90ffa5ef614df9f7"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16f5677c14ea903c6aaca1dd121521825c39e816cae696d6ae32c0b287252ab2"}, -] -pyobjc-framework-cocoa = [ - {file = "pyobjc-framework-Cocoa-8.5.tar.gz", hash = "sha256:569bd3a020f64b536fb2d1c085b37553e50558c9f907e08b73ffc16ae68e1861"}, - {file = "pyobjc_framework_Cocoa-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7a7c160416696bf6035dfcdf0e603aaa52858d6afcddfcc5ab41733619ac2529"}, - {file = "pyobjc_framework_Cocoa-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6ceba444282030be8596b812260e8d28b671254a51052ad778d32da6e17db847"}, - {file = "pyobjc_framework_Cocoa-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f46b2b161b8dd40c7b9e00bc69636c3e6480b2704a69aee22ee0154befbe163a"}, - {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b31d425aee8698cbf62b187338f5ca59427fa4dca2153a73866f7cb410713119"}, - {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:898359ac1f76eedec8aa156847682378a8950824421c40edb89391286e607dc4"}, - {file = "pyobjc_framework_Cocoa-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:baa2947f76b119a3360973d74d57d6dada87ac527bab9a88f31596af392f123c"}, -] -pyobjc-framework-quartz = [ - {file = "pyobjc-framework-Quartz-8.5.tar.gz", hash = "sha256:d2bc5467a792ddc04814f12a1e9c2fcaf699a1c3ad3d4264cfdce6b9c7b10624"}, - {file = "pyobjc_framework_Quartz-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9f0fb663f7872c9de94169031ac42b91ad01bd4cad49a9f1a0164be8f028426"}, - {file = "pyobjc_framework_Quartz-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:567eec91287cfe9a1b6433717192c585935de8f3daa28d82ce72fdd6c7ac00f6"}, - {file = "pyobjc_framework_Quartz-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f910ab41a712ffc7a8c3e3716a2d6f39ea4419004b26a2fd2d2f740ff5c262c"}, - {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29d07066781628278bf0e5278abcfc96ef6724c66c5629a0b4c214d319a82e55"}, - {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:72abcde1a3d72be11f2c881c9b9872044c8f2de86d2047b67fe771713638b107"}, - {file = "pyobjc_framework_Quartz-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8809b9a2df2f461697bdb45b6d1b5a4f881f88f09450e3990858e64e3e26c530"}, -] +pyobjc-core = [] +pyobjc-framework-applicationservices = [] +pyobjc-framework-cocoa = [] +pyobjc-framework-quartz = [] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, @@ -2770,14 +2389,8 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-engineio = [ - {file = "python-engineio-3.14.2.tar.gz", hash = "sha256:eab4553f2804c1ce97054c8b22cf0d5a9ab23128075248b97e1a5b2f29553085"}, - {file = "python_engineio-3.14.2-py2.py3-none-any.whl", hash = "sha256:5a9e6086d192463b04a1428ff1f85b6ba631bbb19d453b144ffc04f530542b84"}, -] -python-socketio = [ - {file = "python-socketio-4.6.1.tar.gz", hash = "sha256:cd1f5aa492c1eb2be77838e837a495f117e17f686029ebc03d62c09e33f4fa10"}, - {file = "python_socketio-4.6.1-py2.py3-none-any.whl", hash = "sha256:5a21da53fdbdc6bb6c8071f40e13d100e0b279ad997681c2492478e06f370523"}, -] +python-engineio = [] +python-socketio = [] python-xlib = [ {file = "python-xlib-0.31.tar.gz", hash = "sha256:74d83a081f532bc07f6d7afcd6416ec38403d68f68b9b9dc9e1f28fbf2d799e9"}, {file = "python_xlib-0.31-py2.py3-none-any.whl", hash = "sha256:1ec6ce0de73d9e6592ead666779a5732b384e5b8fb1f1886bd0a81cafa477759"}, @@ -2805,10 +2418,7 @@ pywin32-ctypes = [ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, ] -"qt.py" = [ - {file = "Qt.py-1.3.7-py2.py3-none-any.whl", hash = "sha256:150099d1c6f64c9621a2c9d79d45102ec781c30ee30ee69fc082c6e9be7324fe"}, - {file = "Qt.py-1.3.7.tar.gz", hash = "sha256:803c7bdf4d6230f9a466be19d55934a173eabb61406d21cb91e80c2a3f773b1f"}, -] +"qt.py" = [] qtawesome = [ {file = "QtAwesome-0.7.3-py2.py3-none-any.whl", hash = "sha256:ddf4530b4af71cec13b24b88a4cdb56ec85b1e44c43c42d0698804c7137b09b0"}, {file = "QtAwesome-0.7.3.tar.gz", hash = "sha256:b98b9038d19190e83ab26d91c4d8fc3a36591ee2bc7f5016d4438b8240d097bd"}, @@ -2821,18 +2431,9 @@ recommonmark = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, ] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -rsa = [ - {file = "rsa-4.8-py3-none-any.whl", hash = "sha256:95c5d300c4e879ee69708c428ba566c59478fd653cc3a22243eeb8ed846950bb"}, - {file = "rsa-4.8.tar.gz", hash = "sha256:5c6bd9dc7a543b7fe4304a631f8a8a3b674e2bbfc49c2ae96200cdbe55df6b17"}, -] -secretstorage = [ - {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"}, - {file = "SecretStorage-3.3.2.tar.gz", hash = "sha256:0a8eb9645b320881c222e827c26f4cfcf55363e8b374a021981ef886657a912f"}, -] +requests = [] +rsa = [] +secretstorage = [] semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, @@ -2842,10 +2443,7 @@ six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -slack-sdk = [ - {file = "slack_sdk-3.17.0-py2.py3-none-any.whl", hash = "sha256:0816efc43d1d2db8286e8dbcbb2e86fd0f71c206c01c521c2cb054ecb40f9ced"}, - {file = "slack_sdk-3.17.0.tar.gz", hash = "sha256:860cd0e50c454b955f14321c8c5486a47cc1e0e84116acdb009107f836752feb"}, -] +slack-sdk = [] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, @@ -2854,18 +2452,9 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -speedcopy = [ - {file = "speedcopy-2.1.4-py3-none-any.whl", hash = "sha256:e09eb1de67ae0e0b51d5b99a28882009d565a37a3cb3c6bae121e3a5d3cccb17"}, - {file = "speedcopy-2.1.4.tar.gz", hash = "sha256:eff007a97e49ec1934df4fa8074f4bd1cf4a3b14c5499d914988785cff0c199a"}, -] -sphinx = [ - {file = "Sphinx-5.0.1-py3-none-any.whl", hash = "sha256:36aa2a3c2f6d5230be94585bc5d74badd5f9ed8f3388b8eedc1726fe45b1ad30"}, - {file = "Sphinx-5.0.1.tar.gz", hash = "sha256:f4da1187785a5bc7312cc271b0e867a93946c319d106363e102936a3d9857306"}, -] -sphinx-qt-documentation = [ - {file = "sphinx_qt_documentation-0.4-py3-none-any.whl", hash = "sha256:fa131093f75cd1bd48699cd132e18e4d46ba9eaadc070e6026867cea75ecdb7b"}, - {file = "sphinx_qt_documentation-0.4.tar.gz", hash = "sha256:f43ba17baa93e353fb94045027fb67f9d935ed158ce8662de93f08b88eec6774"}, -] +speedcopy = [] +sphinx = [] +sphinx-qt-documentation = [] sphinx-rtd-theme = [ {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, @@ -2914,44 +2503,13 @@ tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -typed-ast = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, -] -typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, -] +typed-ast = [] +typing-extensions = [] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] -urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, -] +urllib3 = [] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, @@ -2960,151 +2518,10 @@ websocket-client = [ {file = "websocket-client-0.59.0.tar.gz", hash = "sha256:d376bd60eace9d437ab6d7ee16f4ab4e821c9dae591e1b783c58ebd8aaf80c5c"}, {file = "websocket_client-0.59.0-py2.py3-none-any.whl", hash = "sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32"}, ] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] +wrapt = [] wsrpc-aiohttp = [ {file = "wsrpc-aiohttp-3.2.0.tar.gz", hash = "sha256:f467abc51bcdc760fc5aeb7041abdeef46eeca3928dc43dd6e7fa7a533563818"}, {file = "wsrpc_aiohttp-3.2.0-py3-none-any.whl", hash = "sha256:fa9b0bf5cb056898cb5c9f64cbc5eacb8a5dd18ab1b7f0cd4a2208b4a7fde282"}, ] -yarl = [ - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, - {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, - {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, - {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, - {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, - {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, - {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, - {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, - {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, - {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, - {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, - {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, - {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, - {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, -] -zipp = [ - {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, - {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, -] +yarl = [] +zipp = [] diff --git a/pyproject.toml b/pyproject.toml index 994c83d369..1d757deaa0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,6 +70,7 @@ requests = "^2.25.1" pysftp = "^0.2.9" dropbox = "^11.20.0" aiohttp-middlewares = "^2.0.0" +OpenColorIO-Configs = { git = "https://github.com/pypeclub/OpenColorIO-Configs.git", branch = "main" } [tool.poetry.dev-dependencies] @@ -80,13 +81,14 @@ cx_freeze = "~6.9" GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" +markupsafe = "2.0.1" pycodestyle = "^2.5.0" pydocstyle = "^3.0.0" pylint = "^2.4.4" pytest = "^6.1" pytest-cov = "*" pytest-print = "*" -Sphinx = "*" +Sphinx = "5.0.1" sphinx-rtd-theme = "*" sphinxcontrib-websupport = "*" sphinx-qt-documentation = "*" From 74161e931e17a736c3dae6ae24678f9db4d497d0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 18:02:47 +0200 Subject: [PATCH 0324/2550] Show whether a subset is loaded into the current scene --- openpype/tools/loader/model.py | 157 +++++++++++++++++++------------ openpype/tools/loader/widgets.py | 3 +- 2 files changed, 101 insertions(+), 59 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index a5174bd804..3130f879df 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -14,9 +14,11 @@ from openpype.client import ( get_versions, get_hero_versions, get_version_by_name, - get_representations + get_representations, + get_representations_parents ) from openpype.pipeline import ( + registered_host, HeroVersionType, schema, ) @@ -136,7 +138,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration", "handles", "step", - "repre_info" + "repre_info", + "loaded_in_scene" ] column_labels_mapping = { @@ -150,7 +153,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration": "Duration", "handles": "Handles", "step": "Step", - "repre_info": "Availability" + "repre_info": "Availability", + "loaded_in_scene": "In scene" } SortAscendingRole = QtCore.Qt.UserRole + 2 @@ -231,8 +235,14 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): self._doc_fetching_stop = False self._doc_payload = {} - self.doc_fetched.connect(self._on_doc_fetched) + self._host = registered_host() + self._loaded_representation_ids = set() + # Refresh loaded scene containers only every 3 seconds at most + self._host_loaded_refresh_timeout = 3 + self._host_loaded_refresh_time = 0 + + self.doc_fetched.connect(self._on_doc_fetched) self.refresh() def get_item_by_id(self, item_id): @@ -472,6 +482,29 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): last_versions_by_subset_id[subset_id] = hero_version + # Check loaded subsets + subsets_loaded_by_id = set() + ids = self._loaded_representation_ids + if ids: + if self._doc_fetching_stop: + return + + # Get subsets from representations + # todo: optimize with aggregation query to distinct subset id + representations = get_representations(project_name, + representation_ids=ids, + fields=["parent"]) + parents_by_repre_id = get_representations_parents( + project_name, + representations=representations + ) + for repre_id, repre_parents in parents_by_repre_id.items(): + _, repre_subset, _, _ = repre_parents + subsets_loaded_by_id.add(repre_subset["_id"]) + + if self._doc_fetching_stop: + return + repre_info = {} if self.sync_server.enabled: version_ids = set() @@ -494,7 +527,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "subset_docs_by_id": subset_docs_by_id, "subset_families": subset_families, "last_versions_by_subset_id": last_versions_by_subset_id, - "repre_info_by_version_id": repre_info + "repre_info_by_version_id": repre_info, + "subsets_loaded_by_id": subsets_loaded_by_id } self.doc_fetched.emit() @@ -526,6 +560,17 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): self.doc_fetched.emit() return + # Collect scene container representations to compare loaded state + # This runs in the main thread because it involves the host DCC + if self._host: + time_since_refresh = time.time() - self._host_loaded_refresh_time + print(time_since_refresh) + if time_since_refresh > self._host_loaded_refresh_timeout: + repre_ids = {con.get("representation") + for con in self._host.ls()} + self._loaded_representation_ids = repre_ids + self._host_loaded_refresh_time = time.time() + self.fetch_subset_and_version() def _on_doc_fetched(self): @@ -547,6 +592,10 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "repre_info_by_version_id" ) + subsets_loaded_by_id = self._doc_payload.get( + "subsets_loaded_by_id" + ) + if ( asset_docs_by_id is None or subset_docs_by_id is None @@ -561,7 +610,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id, - repre_info_by_version_id + repre_info_by_version_id, + subsets_loaded_by_id ) self.endResetModel() self.refreshed.emit(True) @@ -589,8 +639,12 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): return merge_group def _fill_subset_items( - self, asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id, - repre_info_by_version_id + self, + asset_docs_by_id, + subset_docs_by_id, + last_versions_by_subset_id, + repre_info_by_version_id, + subsets_loaded_by_id ): _groups_tuple = self.groups_config.split_subsets_for_groups( subset_docs_by_id.values(), self._grouping @@ -614,6 +668,37 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "index": self.index(group_item.row(), 0) } + def _add_subset_item(subset_doc, parent_item, parent_index): + last_version = last_versions_by_subset_id.get( + subset_doc["_id"] + ) + # do not show subset without version + if not last_version: + return + + data = copy.deepcopy(subset_doc) + data["subset"] = subset_doc["name"] + + asset_id = subset_doc["parent"] + data["asset"] = asset_docs_by_id[asset_id]["name"] + + data["last_version"] = last_version + + loaded = subset_doc["_id"] in subsets_loaded_by_id + data["loaded_in_scene"] = "yes" if loaded else "no" + + # Sync server data + data.update( + self._get_last_repre_info(repre_info_by_version_id, + last_version["_id"])) + + item = Item() + item.update(data) + self.add_child(item, parent_item) + + index = self.index(item.row(), 0, parent_index) + self.set_version(index, last_version) + subset_counter = 0 for group_name, subset_docs_by_name in subset_docs_by_group.items(): parent_item = group_item_by_name[group_name]["item"] @@ -636,31 +721,9 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): _parent_index = parent_index for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - - data = copy.deepcopy(subset_doc) - data["subset"] = subset_name - data["asset"] = asset_docs_by_id[asset_id]["name"] - - last_version = last_versions_by_subset_id.get( - subset_doc["_id"] - ) - data["last_version"] = last_version - - # do not show subset without version - if not last_version: - continue - - data.update( - self._get_last_repre_info(repre_info_by_version_id, - last_version["_id"])) - - item = Item() - item.update(data) - self.add_child(item, _parent_item) - - index = self.index(item.row(), 0, _parent_index) - self.set_version(index, last_version) + _add_subset_item(subset_doc, + parent_item=_parent_item, + parent_index=_parent_index) for subset_name in sorted(subset_docs_without_group.keys()): subset_docs = subset_docs_without_group[subset_name] @@ -675,31 +738,9 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): subset_counter += 1 for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - - data = copy.deepcopy(subset_doc) - data["subset"] = subset_name - data["asset"] = asset_docs_by_id[asset_id]["name"] - - last_version = last_versions_by_subset_id.get( - subset_doc["_id"] - ) - data["last_version"] = last_version - - # do not show subset without version - if not last_version: - continue - - data.update( - self._get_last_repre_info(repre_info_by_version_id, - last_version["_id"])) - - item = Item() - item.update(data) - self.add_child(item, parent_item) - - index = self.index(item.row(), 0, parent_index) - self.set_version(index, last_version) + _add_subset_item(subset_doc, + parent_item=parent_item, + parent_index=parent_index) def data(self, index, role): if not index.isValid(): diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 48c038418a..3c4a89aa0f 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -168,7 +168,8 @@ class SubsetWidget(QtWidgets.QWidget): ("duration", 60), ("handles", 55), ("step", 10), - ("repre_info", 65) + ("repre_info", 65), + ("loaded_in_scene", 20) ) def __init__( From a07650226a3fe54eb3becfbc16881acf7e8ae6cf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 18:09:19 +0200 Subject: [PATCH 0325/2550] Remove unused variables --- openpype/tools/loader/model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 3130f879df..6cb9ba2c6d 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -498,8 +498,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): project_name, representations=representations ) - for repre_id, repre_parents in parents_by_repre_id.items(): - _, repre_subset, _, _ = repre_parents + for repre_parents in parents_by_repre_id.values(): + repre_subset = repre_parents[1] subsets_loaded_by_id.add(repre_subset["_id"]) if self._doc_fetching_stop: From 7f48af4bdcf524ae91447038b658a60aa256f80e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 00:11:52 +0800 Subject: [PATCH 0326/2550] Collect full_exp_files instead of leaving it empty --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index e6fc8a01e5..26ad0818e0 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -219,6 +219,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): full_paths.append(full_path) publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths + full_exp_files = [aov_dict] frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) From 56dad829047afc22ea61d5281660f18b47e9b585 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 18:13:18 +0200 Subject: [PATCH 0327/2550] admin docs for houdini shelves manager --- website/docs/admin_hosts_houdini.md | 11 +++++++++++ .../assets/houdini-admin_shelvesmanager.png | Bin 0 -> 28464 bytes website/sidebars.js | 5 +++-- 3 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 website/docs/admin_hosts_houdini.md create mode 100644 website/docs/assets/houdini-admin_shelvesmanager.png diff --git a/website/docs/admin_hosts_houdini.md b/website/docs/admin_hosts_houdini.md new file mode 100644 index 0000000000..64c54db591 --- /dev/null +++ b/website/docs/admin_hosts_houdini.md @@ -0,0 +1,11 @@ +--- +id: admin_hosts_houdini +title: Houdini +sidebar_label: Houdini +--- + +## Shelves Manager +You can add your custom shelf set into Houdini by setting your shelf sets, shelves and tools in **Houdini -> Shelves Manager**. +![Custom menu definition](assets/houdini-admin_shelvesmanager.png) + +The Shelf Set Path is used to load a .shelf file to generate your shelf set. If the path is specified, you don't have to set the shelves and tools. \ No newline at end of file diff --git a/website/docs/assets/houdini-admin_shelvesmanager.png b/website/docs/assets/houdini-admin_shelvesmanager.png new file mode 100644 index 0000000000000000000000000000000000000000..ba2f15a6a5eb5af0286a4f1485ddd97b351beab6 GIT binary patch literal 28464 zcmce;byQqmwlzwW;1Dc9gF6Iw2^8*5p+PG^fZ$#P5}@$l!9$_JgL{Qw!QCxTxVy_c z^sl?$?Y@2Qcz=A~V~mWlbM`rB@3r@yYtFe6qNXB?g-(Kwf`Wo2F9*^=?(&^{9d{Enj^zieS?OdPvQtu}F)vhC-j6K%#X^tZiZqpdePF&z-BxIRO1-hOJ zp#tyM@zj9(Lis5EZ54Py@l)e*5Q-L?aDV*@Qf1=&0Qf~^V8^M-?$z$U9465R{DgA* zG5Hbja{Yk!zNvF5i~-=WP*75d!7-0p=oE~>F)tGq^#V1%TTP13PXBcdF(`pHZ-l<* z&>^o}_Yelx%^Koo7cQ&5?<`^vt|8VF*+<}nCyO`^hyOSsu92-K%nUgaSu8keGiK!8 zucQ=ylx8f(h79dtsG;g_3rSSUfC}On`8eu#WGfDg?o8X3DygWv)64}L9h4~_J*My6 za0A=;OiQ0SU%Rn&M#eJ|Kzi$5A=b4lb22o5Ykxd)q~Mi(zb(-;Kls6>0vr|^YQT${ zr&g#9YAY{tXp$|Rh#*awXJH++sE0Mi?}XFASDx6R zuXW~$J#0!}Jtq9U$9Rn6rB7$qLZ4WfOoCuO#+Ys0@4%YLN!Au(Xgs|gM zM!=`OKG`Qvk_1!^dN?i^R1#!lj=5?^88}LRZn)IV*%#}~Tw>gZ#GNqWhOD`~)qU_% z-Sy-oCuapj`|M%sYs}U%R_99yrW7cKwkuo-)SPkH_{&`h#MRY|%_c+i7zdBnTjV~9 zgwl@YrM!&CWWp@K>}+g%3kU~oWo4h)S9z^q1A{#S(U+;I`D6LMV={KiX^f$tV)3;V zv@^cWV+S8STzdixBno=(m0-2Qf@KxBGlINW>rmW&o<4LbI;*p%hb1T%dwjF9IfdH3 zrHRE`_1!x;C#PROvgjUk;RDy6#)wT7%9S4pIj(E|^gyuiCJh_-*2UgFu~0s2PRBjB zU#O|eUCr3>`5qN;DHOl|I1L`z+@H0NI!Xq-{K3f=1YUkp{_8aRk0(lDAbOhJ$I%!3 zXAs|klO*_quKt%c-kIWTcnbb~vYrI4-p`=4^qCc#8Hw37;QjyU+iMNHjjTCsdtF5X z5qxU&>vuMD55I%wcdy~rnC=&+W4|ge%jrzi`JJpr8!BE5jZ|CHRb_7H^1~RNc1-kS zdGWNP{C-ZkSBl-SXr5l#g`oj=sO$p2A}d+XJk7SQU$P6jG5NrUL8Ax)(OyB^WDbS0 zf^nmJKkVic$PnFda{wKJGo*edXl9POc@h+)nbN1PrR8%<71JRHN|s7e=*bL{Nz&G% z7ia$5X&t%8=U}8#{$8DMXV(@767$PX^`#V>ftCYE6L)o`QX3l@l zrelufGp5#PVv>8&GF!0Sr_xG#6FL?TZENUn97WQBeqC}$*}QdZdm`#3i+(ARxgupMEV!#=J3t*l3nDjd7!bQSf0LC8Ha7A3R%Z`>ey`s)#l}lKKo#^|u}NJ!f%x#| zC^<_wrMHAt{7)o_5+-s}RLOaX*i6tw*u==}_YKc-107c*KkVDxvmh8Z}Fi?_ZA@?nl zGrAceZI43F8{GvS86qsf44xz-&rMx}B`qqKkA?09jtdaUNePn%Vd^0vi=b>y`Nrgf zdY_2UOO2gq^7(J`;X(4u0&kuZ>Q83Sz)OsQc3(ik9WuOXE$zAOa- z0bSQOW8Cy5iUbAao8tOU(d@sif!$Q8eb29v<(GK;U{n*f?-8K3dOJ|tZ!WwpF60)` z=?OA?LK#%_r>ry5;YDF3K?7WKZb-@{Fa>4`lk@%%=D0wNbs&W>dLNbXUO8zuQVHF} zd%?j#1_paks;CXNh{|A1rq*U~Vmk4nrVjc}F05-*-g$k|!;4RGX!FT84csCRIyTU$ zu9Yuo&hql*%z%lyo^ZF};*AXYM}tiAS1)j*f2q%P;{Yq~H^r?<(QKW<;*%%Ayp~>e zm+!r=X{p}h(oDW|$9SZ@5(u&g%V6D$BCK;xrZI|s3im&Tw`kqjLuE3i<^NilfYORJmgMp9^`xHV++ zJ#{c1Kc@#hvb1zm8&4?_`yQ`JG;PknvymkjL-#$Rait92oC*slpIX{X%Ys&;WE z8`bgfrbO5A%k*mP_}vkTS+l5t4`-o}IUSRl)eGRxj>(jqHos!L^rQAI*?TL|zv(8I zC(N97QX=M;<+vy7uksk^TJl;W<+~yKnz6L+6)Jw03zI8^48kMAE%kV%Ox;r)MquN0 zv<(K9IE5a;bo{3PDmztkcEOZ)IY!h%e`EIB@oz7D6KfV8L<6?i$1iLmu`|nbKKos& zeX9WOU>zhhL;;{ql=Y7P51Y(?-ksQwMYEeKbk4D7iBs>9h>Lmcz<)q^e?K`m{jgzM zN=iv6Eg4xW!2oPLgYC}(IyamqCWJ*O_5#~BZh3=a*0SJ|s@mhDqthUDOcx=bRiki? zuA@e3A?9AQ?^RQ2c9A1DwvoT{2Ne_}S{Kn>sP7MndFAoA1$pA(E&Y`vdv$&FN>EZt zDL$UpO&A^6RzG4a5s8dxpJYEqxiyIT_XD@^P548Dh<;jqUu(m~Gw4LHWGO`GV5uI|!kkKg_{6u$H@y#r_j^*o7Hv#+m|lQ3AT zaL;Oc%wGkV57TnkQ74$`R!J4?E-NQHg!sIul?)Kcrv~c14j$Bl0FwXt#FRNSj!+u@ z)|pmmmzG)8wJ0y;))vMHlMW_HwmXqp8V1y5d|J3$Ktr5>xp zD*BjehY*W5n36NxZZg!C1_CCXrtFZ3Oh{IVBc79qjs0PAj60_Cg?>f4_f=VV; zI}n7{oGeqsfAa1j5Y{8PZ$3S}IU17Pt3T@J>wL-Q;{ozG zPZe~Q%l-1jtpV~31qBkE-0G**={)%sLXtl@SbMav9%SL~k^|WXPE*=efeUbaNU;RYw;z7{6&M5K}eh1^6 zoO1WQc~etUO6b0+o12@|zWtsfU32u2w6VJPwqk@(_t(#8bDl%qiuxJ6;DOE=j4QMyDx0Ep1MO&ZH*s_G%$7 z-D_CGTD@?zwDGbV=yrOD;kZX=TwI*+UcF*W_U7hhUWQizo1Z!`;h09rptN4$CSp{E zJi}tE*v%HD_7f!38Wc0ipwI$jIt)a8C_^uZX$qosR%mYg!`f1N8TzdXrxu@n%;+W# zx`i%%;y6TLnRdyD*Tvjagpr#ZvzqlH$aI#^uHOp?Kwj_h3lciJFXnB^T%|FtErw$o z9Jan1IOY|w7#Ye(if6pReqH5<@Ayn&47iV6jZRTP%LYUjI~(qdil+sk(C3#?r=WEE zYv0yjmrLmq%+^$gAg(TD0}E>Sqd-&(@TphR#giwGE^*{2Qb$Wo?nX^pUr?Xc@XxNS zt>E9sN=yD()hw6u>hVkNNJ{bB>){+wrR$99eaAXF9>5k`CyKAmmpF;}(QF4vFWZj6 zl^&dU^5yT)xwOFru-{uJVyBblSuzpzTP0K8r@NkaO=mF1tF`ykLXJzHTi(5b6KPVz z?UH6F&aWPk^1YX%PKZ@;c}f~o5jUf-~{)b>G;@`io*#0@y0tbJT!avSg}6f@ej+Lauf(j zh=kt${P-&%0A{?XoSR>+MM%Mz>Ua{~u9zSq;xM1|S9}a~Uq3&}%H7QBC zjVl_CURS5+DydsU>_W4Wadgwq135U3$!-0n-jcej&W)*7xLU3?hK@r#uFoAC=EQam ztpUz~On7S?oRpMA5fu#6&pkoXV%aCGq(3?t0O8Q_>uD+`1*@!b>zU_}0KJD$Dv2U{5n{SGt z<<0o@?)CV@7~*JG*d|^fYpC*29|gth;q2Nz6kxM*@WZz2bU<(a}e^aHfpM@`6>LWE}h(3uyzb%fQ_n9p`Fe#t7 z9-1e@A6YFD7ag}di?B;y1UK$y+|6DkB4$4zBg$F#ZavRP9a5q1R+hVB@1TcW-@bjr zBIVQ4*0!5JG{1XrlgQzxP0(H}esg@#Nm;{nTU~5G>9bF@5D~g+8uD(Z(>77$Dx9^) z($X?4T`bMb$LAW@^O!^^04hsbn%*i^l$VdT z^t?LVQCSnTCwYK!%yyu~M2$^A~nVi{Di(XejzKjE7h4wGXKZYc%9b|ySiZa3 zy1DzqWdRyxC!|n>Hn{WBl)7T7al73zIvOvf=DnD^0}9GJd?t}dVb`)IxT!dR-Bv;j z9lCki-K8NLBj;1Ge1+e?d+z&-UzdM%L&HEpiI6wFiKjzU>hjF4eLT&cxkzWQO!nCz zfer{*&TgjDQ~|X z`f}&RJz^AhU3B04wf8U$;EPwVEvs^0v)Qhjjm*r%QC%PwGZwzpP- z$6sYod5ZoRN}mib(j(?B=PSxt%(ILp@FXDUr#%AMId+*S9w&}y|CK(=!~{A?VHJ`C-5r^5=?EzWOB?2z zqSHVER;vdbb*sAMz2#wqxsAIYkkS{EdC-T3QV)E-veapjoA221@$_e)_OBp2I3@XA z|2|zz>LS#H&JGue_abrZe5WFu)%edSkQ_*#6eUYp2|gDMXT`&_Eu^KEdA?&^=%6?0Fzsdt9Eo(TwR~uP*l1z=&daR01NAuD;^%w zmD`vf&+`{@+uWW(rzN9zV#m-<6){ID^bJ7HbLmyeHqT@ZnN@G8Nxbo*cT~^3q%QGF zhj)Dv#eXbtuBxb0ZlaOCyLOX5I`(Ommg4BQI`H*Vtbe<8j_A_oMwV~XH1>|^);{NI zT>KH!{c0;9zI|r&Vu3HxS`}gB`$zcJBAsP}OLthy?8R8A0Z&~!1Y*AMiSOxB(Bst6AMLP4fvZoa`^P$RtYBg_UOf`7ehRPq_qq0~R7gVwh1I>gZKG#Iq}0-E*ukJ-4kA_cKYvMfk> zxG<$z4&f=Nt80^X{cJ0L%(8hgM8&aPb#%vPBAHWFhtbkZnKZ+Kyo&sQABYil!vTE* zb@BnK_-g31d3wHuNm#EFde6|elc;@uN=L5dpf|vZ z4L?*HT#LFy$EF_MzMJMfcbcJdmNtKkhu1pY+0O^PaT&anZTs_171VgThEsQ0{%wHL z4mu*KvoU)!a&hS*D%zjT}WEo4)rA+?y1; zU2Wsk_eNN6Ufcf71q2g~KdVx)O;~>0aoX;8mVJaQ~HEUzjoMV3e6J#(uvfQPNR^ za}r%!v^0l(;)<{eE{>cI>v>U|btz_OxQn=5gWiQwgA>LsS8r0X&t_&74h4LJ=V6aq z6LeuPjwu!uk*4F;M9NoS`k^dFj^m(HTZ(afyi$<_DR?lI2KoF(gpvluRM%MCYx}~z zVcOyl*ym=NKxmfyu^clw(_I;~?HT(LuktT03St@okU58Z4uqW72b|8pJVi=TI(0Ti z$zIU-#UcbIkRm<&qRI4=R_Ez?%%b9vfs(4a{wqwft?z;@+KF%5ptt;-f0#; zpqrB5@0xpe$&TodTjWDPxwv0i@%|?~?W@SV)&>g@a>#;j!e~x}ZmJ1Wt7ebee=X!` zZ0NsO%IEcSF=_oS3=PP};Pkl*0M}UZ?y;A5bb5Nfj~F@mB>(6j6&x$ zt6wid-SOvZkdhP^t+#eq7tW}*r5Xv_@tBK}8JTdZ&xI^>)JF_B^+@{BKE3zT z`?U^Tm_RgSrdL%{Z%A6G$b(|oVR)&uJ$+Y<3P(oru_1C+dSE*NMV_G-Zvce|E*9=+ z05*v4wQXAb0ir$9PtxQvelz?PW_=sDz(^Mn;;Mxoje9hM&u*NUE!=5PFF3%cidc@mvo=hc$i$Z_>xsFHJq}d-RsMKn zv|b?1>~59Ycq>*ceL z?EU^i{x$T52buqO>^r@ErX^c$fC*=x*5`NiXRvwgXGXb|Pea|j{n2@ut!hHl;d-G0 zlAK(nqfoDwwD_nUibI z=JlRUiMKy(>E$1tX1E-WG+&am1knf$wWX(j&BUt?C{pm3q-jwW+KnuVaivNZdU7pQktTN+3m=yKT6I|&mfO!Lu8?Lk4^TwZ2rSeGm z*mi1}rUL=yi&wD|RR{y+BYW?~@72MHT>jMR^L876XSJ?XaX2K8(@ot5+p^4s(7S)K z-6w=xJ7N^d>~sm9qXR3tup2-qt{Z!ePb;|p8jC04XGb614x&tWVq*h*4+j`nslElf zX>-T;;N^Y2TsY%B=jS4?;X&rU`PN&ybsF6T(v4rY?dG`{7PNSV;ld8knjr)f4D_l;N}iy^8>n}C8*s97W~s@Zg8LH}ynlEBet+oI zckHi82PBe5|G<$%&_IurOrT9wqjU1pT7ds08`l9qTh>4N za$FuqP-dRC@C39RA*V(Nd5%ZpX_*9Dwra7U9=7y*dwxLYzEF9|a>vGtmbEOAgB% zN1Vf1LrrgPoB%@<) z|6?#SIn~AllUwK4*FZhUxrotVunq^&vS`7(2R#me6iJ9`dkw`;lVaYIR9M#4LRTuR z{+;dI=o5gWybkd#%8G=!fD4Q9cipp%JEz%lOK*$k8s^iX1}AJRAr>X|ZM7TV7)!8u zK}PjQ0Xk37&BoHY%fp{WfViJZ3TXTEgMu#KZg1e&luTGIM7Xd2N=hmJmI%H6Uvf`i zzWo;=U&6gZnM7?yAnVO?ii?xar{o7s;6Ma5_Sik9_9u ziR3B&>f={kn7Y1O1woqflse+BZ-V>tN4C;ESCdXd6ZpK2d%+d@BS$4u6~UO~)5b@) zYk|aC%-Y`4k5Kk;fRPI+J7vD9jwctAHj^|nePhZZj6=!!%l);(d}GTeRQ|m$adCQI zG8SsrlG-{u@4Wu}ys+?+(Jvm~I;o%ArQd-ll>eyFEj=X{x!!LrWi^3LSteFNjT6i@ zH8oRe01k;5U5&A3Kd^d-z&bw6 z#ZyN-ajQ_v{ot~@6GrOm4;rB{Qf> z;tC^7#HW&(p2x-<9v66iE((w`T;g$I7cM(pxRO4rM= zuAs{8c4B8|=d05u*jjv#u}5Y>!De|6d&B!~f#|vu8D=I?dax^$Gkskzh)4B69*AC1(?@rII`m8GYth_G}cWfLS4DipN zA1^3JTJ{+GtR~SB5)z6GlzQBpG(ZNLx66BSRZ==$_a^kEOq7}|0E$!+H$?Pe84_*hnBx*iKF00+!Yx3Z@v9EM z=eyf+INLh8tiPR4hST6`q~*_ zn!4PnNf|h~J{riFFkwJh#{||Oh8$K7S}-+rzg09(2a(tB^4mPZD(-YZ7VFe!F(A~; z(PAWUOG$$T1OzxaIn9HcyncN~~T)pf0{^D70+SQ)l zO&IJxNRKdJ@3zJ>I>KKbO+hkpFkhDMC7MuEQ!|+GT`YPYfz%Bv->DlLLt__$m+{G- z7yTgs`l`o088>5p>T&?KerTOIkmemlE3YuN(SV^G(|B?^BW7H4DzWIlF#<6JB6n-N zn1;HAtq6LxbG6&xu=p!50e*W_jYmXBdG2YAQf743VE>*>pDN?UjNga47 zT(8d$9QdH-d?^zpM#7$l%N^o26fIjPd>M>9Je4L0=z!?edrQj|nxi!wKoA&67j^GT zziGpd65fvNr0^3FqiQaxF6lBXzCb?x*#8umG&=J#3(c4`vDa5j?w5h?F&aG{bLF`! z?z?=%#H6}f$F{B3rf(kL?aVctbr+V>aOiht*xGGKfJX_4Nx? z)xovDhoTg2Vm1VF2vuLNO%D%qT8t}pJ+w<`u@qEvcjn?=dmXl+qa(XVe<40 zbirrn4AeOqV6{P>F?t0>Mb9U9gHlIgH!e3rS+cRuWBx@-N%Qq^?xWEH7bL5KK%J4~ zg+)cQaAarXlcZ0QQc~WsXU4VroYccxCsZv|&A=X2F9a;Wys!JcIDK&OY%pLSFr^LgeKyA_^WyB1I^0I-O{_)z#G}7CyAL9<8DU>>sj} z@=&ty%v2=BrC#^(FYs7veNetj(O@LdFV1alPKU9^6Uh@uwMxdu#F&^ITmn&RQJ0xu z2Fo8}Vj>ky8ZLXt#mIO1cIeH*f%jfBDLy{8^LQ^HCIX`4n23B(CZHvD*JL}X7?-*O zfdq)3-us!sVtN%puyQwxV*QgVP3celeiK%)&)8I4fUxPfGJOfO-Uxr+k`=|rBO@b| zem09Tc=4x>@Y!pyZAtn?(4HS;BUh+i5i|08M*YupsIUWIe<{i2j_ZW3+Nh9lEtK7D zaBw;AB6{WJ<)3&y!OEkNiF^e=<}?q*CVCkm6FkTMOwz)_!o`ILug=I`^Gk^7S0X7# zvb>QE8(Z6y<5^&x`^sq!dk08)!8nxkaQ{zFiHM2$F=wmUjxM+C8O8LrPrLvnFbIb- zSBn{(piC($#KD1RE)k>U`3%!bh=_`ESbY8*|L90{&!3khyfcc@Z)HXIDMMc6*X7$r zk%d?$)=rWLwqjLYxXyP#Zk%uO7B_C-V-o!St?U(>UFFbQT7Z~W0Y=?OmkSb}{f`0C3GFecbh8A&ukj1eMdbZ#US z=Gcl<#`fXyp18oS?QYFgUCVH0UpTXO?c31(j23`POe}8R!5c8sbX=EHbyDRPj1=c+ zRMXK(NQqSh0pm5%Z^VH(?DpPknh=@(>Hi3aQfOP)e)g!iMn{y#VV-xw*tTNI*z>}Y zT0ud<-pASU>a zk|Ij>*bC4L6MMS5$DxgGT@?er%MEpOs@FI2FfcG2l017bdJ@>cjgvJ5PHufPm=q!& zfYKYErvhO03kms{+oS6v=e;J~?^<3WzclZlo2kwZBocqp=0&2pV3X(jA@W;g6#$I- zIQ*|f`6?0>o6O^aFcERQw4s}cxH;Vv-mPuwi-z=mRX5SSYmE|5tyn`wiT^Nbx6M;Y zJ51xk1cHR+?sDSFXGWkOqo*9$Sr7Kk7X0Qn9->snA4T_NmMS|4*d2tOz;D@R6Hf%) z&W{U2x>;0~#88h3BP zEH<}U!uE){y^X;o>98AdCKQR3f2u$tH4o>K?AN^0zHT#`HNgqYgM_B&zLe~y;dPH>n%)Hulwrigi7 zAIspqZ}CzBDRk$ZmM1j!<=D;o8l)jG*BKZr+LAiPvSWg6d#m$hX$nlQ#Wae1;kAx6 zlbdv2eOB-~IzL`3ZuIFNB4jfM$x<-1VUhw!Cb;IvlL(qmn-{9Xfs44v*A5&A7mkfz z2DBz~-8+dzp)t68Fud*XhCu&wtx8o*nhdJL^ z)SiOd-M#d!h~LOpgC&o(vQy>Vu>!q6VpFkEyh;Al?(prY>FbVhTUM6FNVt08Jns*yRb*AXT~Rx1h*2dg8=8`+VjUzuRIzl97>-#3y+gc z4h)5S4*JT>V!Q1|06aRbhkc8tkv1iXnN8`(d_f_+G?g7U@4rdfKdtQ(WeYWHba{%3 zU8u=AT~9>G{@rxB=3mGP_~}j><{f6&38kg4L*i52$v)FdU(1Bb!)76`!-6-S?wc}J?}6DmAcv=euk<8Rio3lBJhAZ!-2hGw z4h-}?Zwa27n!+;0p%BSmj;GfgtQZg#K1=t8F>-P`cJNMIr>4!(vf?Qxak(yKqoD_y z-CUjk41g~l0eaw{135Sd1h{aTJ1U2=ApOafTrU^&PjmpspX?Dza|0tdUPE;!Dl^=7U?0li!5II_E2aA|i zTp18o-f|B{KSdN(5C@CeXncJvd#t<_44`j?HP-o)K~dSm(HhDi!yPo8GJHJSihV+9 zP#ahkOlFtNV%U5aXG@ou3K6;A%Rn+?w86~<(TJABlR905@1mm{Uu`fCyLC3KGUQ$wh zY_S2r_^_}r*_<59oykl!#^lc{o?>ivqt~*lEb-lmCQwVrxJv1SBFNiqulX>PCEa5w z?ZnF6Kn`C_(w>@BAswD9&ESn#8Op0Y15 z5=rh$TpsWBF-p97fX8WkMk(~#Vzzd&CVN<6y2yRgN+gSYw#~!vUsTkCWAhTfuabwi zd^l2|zhv6+RTZa-LMmz@H4ljd%hHJy6Sddh?mjNJ$OB`lO3oKfW#;ESV+1u8!_(H$ zAqn>~>0#rBsAO;=&kv9T97YNtgPI$cgLY!zFY1Mu&s=>J2Zgg^Y}wPMOu(HW9Y%~F zO&gZ`N;5tu6xL|GXC5WWXEU1Lu$$Zj*-JlL@Q#9&`}Nbq@flQJgg>RW;iN-Bsg-z8 zC>YSY3T%hRPe17%KaUR3oGcno6>YG!wGn#vHj!pS@R(i>uv#_0uO}@X-D@j<$hGwH z>~lgw`f;)H`MwAx35h<~Gn6p|sp=P2!OGIZh38vlrHo8@36uNpp`8Xk@+!8}wr9|#$P_tHs3z*m7Gr>REh$m*Trwmju;|)vF$L*@JC(pOyfHcU`YEFoSn|lO#fUB<1jTy%^w>FDUB3gq!f7Y|p zX<_^Nsh2%Z)Bbx*&rHTbP#h4a+Yi;8EtJx#?-ElFhs)`A*b{A!&K9IWGFD1yr*r(I zGmdYGb3`g>&Icnq*jo1HSA^B#h)KFJ0AM&u!Y?-0h@-1JZt>{*VH*dQ_7kKdD#H76 z2nX3A9ns)u@7=l?vQg-lLE&wYc=(&0T0@KAiSOYDmLtDK@w>V+4!^p^4pK|-{LRrW z|GdD>V0Tw6!`;nt?3*`l06r|gpkTUjrStaDC5Opv0b{6v-{l67VcJ_>+Zoh|o&S+) zCME1^eLgfDHNozA`VyP~B*R3azL%PzQBeqFzc8J8x$yfQ6Y#v*GG87bSV3BV^!8xQ zXT!XcLeMAQb8b)%WMP4L@F;n<^4-kOzoea}pQ$&www5#Zr4fKYoY6D@7$T)4rNpoQ ze8R|Jr>D2-PZh)2B5Cgf2OgAbE8YTW70Rs0fL>$#SvA31l`o*EpNF`y;fEF z&ZqVMu5+&9hv z=h!V$fW_Hha~}^vISHqK)RIx#Ed|*2DCvPw?=dzcdinB)XV0qh*;AB{I)t6Ut}p!rCbd_;hXP%vdX~I0&X7wVVTvnaAwn(c5T z0sQ)`=wnkxR&}n`(V3kmy!>xB^XlWnzv|mXasT(y;B~t=0I2@;{{jGQ{WqiN-^=WO z&P$=+YI(Mg00o~xnnpvb-*!qfoa5LoLhGZsv*y#31MK2jEJE!{a1fpcwe0>sV4v>G4HJOVD|Y(9zYu>gca-w>;=3pyU!BHA>KaAhzz)`f|pO&0EKO{Tctrfw-G z-2S}J=Y%t6#jO#Tx9fss)x&fb-Pj9n3_2=i_d2;+<)}F^e;^3I0dA!5WxI8=UT|KK zn0|(r)R-m4ujX@96V2h%(FcG8`^z`7q}!&d>c^i~OKh@wHGM#}vGY$o2!cl&EY?+C z&blz<5T13XXS<};4=&6Xof=%g;}5}0t-p@oY7MO)bgKurm36S1^D!u#@v#F`ee~%> z_Cj&KnEmtxA6`-oBC||;G_px{)wLzX0#Zb`e@SQm1Uw6n#T7A* zs?q&ihqS@oUi-+0j>W01Gd{U`%lXJwshw(U zv*D$;>-}Q7K7EU55{|N6(x-fS+pEO)E8&Y8ksV9E#IvOU!Z+J_>>sCX9qh)YoAs-9 zPwM0AiK;ZJLW|ADbnJ}0j0!c1n-J9{dJ=eX?cKT5FbrIxsfTD?v`4piB<9b;a6b*M zU8zE_*F@H+^gV#ury$2ewKEcJp`%)lGl8C77%%GPYl4`!8VRjpP4!LqZ=PlLfBZ7H z`rF=C>rlVz19aRIqZhU%s1}@geD~4S)Yco~k1Lsmp5&x3b?1xGXPIP;Ya`5N0;|!& zmHgfK%=7!Wj73si*{Z~93$E5iswOkP-`sbZZ5`3~>Q}j+aU}s?&6ePosV|4a$^Nl| zXPOe`A&_^{*zhT5D{oKv%5Xj=rj4sT_rguBVhmo#;p{(u-Uh;TGSzlx8qP`)w>GXi zDb3Tg1VWQDGb{w?@d||GzDs8dPzN_Hpy+9;0s;u}Q=6W6lI!ikyr^8H<<+T1aYi$1 zz#U4`DmRWX%yeZ&=E`a_oefcVNivc9g#Oe_S_JJ%jy_AK z&u_yZTPeoE_XDzco742USIZ-1g=lCd&grJMDvuGl;ugtvQPthmemAK=p7r`vV! zFAl9KeMdJ1!o-a*QNj{gFOX*oKY}ElQAz1Jzp5~mBf7g(T|s7f)Ua=of-TRs{fre} zPj8xry(2~gYv#Hg;!=E7DX{HXxawfuiJPm+LSGnC7~nteVz7BAR;eJ^!rrZO3^iC3 z4$bg^|EM$eTp=VSO)>Gi(RTOxgSfjf9Z0{qI>ma))z&&PGCn@u^5z9Gsr%)zbqp<} zFMY1qfFEV@hk57C^)-+!#SF)k7&Qn92>}?1mWfH8^0Yrqn2fJ;-nfj@3AqifK>0Z{ z=ffD*9vT@L85QQhsvQF2^6kLnOG!&isX@-{6bNazCR9U@ zpHYqCz3m&KewbNSYT`#~V4yGc2qny{tFTZrwp2GRQ9RF|9PbS-C@nRE#0X!Yo%y1+ zB-+hGdr_F=1sj_iq;_0QQ$|~aR+|?^qFZsQdydhE1 zCY*TOH&})1G8a2PeYr657`Ogz@}|0Z!+_Zf^K}OsCW?fc4qxkQ9%YDym7DwC?ug>% z>gs%riTJ~h*WS)e;m4RLj7&^Fsw#%6t`Q;l3l!X|w>MWNcGHhhemMF8J1ipuLmnby z0Z2feKEdWfx3x9DI%MG}H9%0HREBpLzcta(AAc?XH0lFa7>u-l&0jRkqIzrD#Tv@FW1qSJZun3}dE`V0EsJ$Xl)MWJGr zP2h`M#F<`TU0@>o{rT)R&Qq>r7+$E1bO^#%-1?>xa;a#20*D8`_HnTt9eR^{RLzbG zyycs`M^ABzshbW5>Q@KTnCW1JQ4^3*TM3#f&dJu)$dU6ag|EU^uM#Z5QRamTSE-^y z5EiE1yx*A7+L~;Z3m#U8CV6d1=k)N65Fi%x+YaU@hRu0IwZ4Zeg;PG`s$NR<7qRk_ zDy*Ii+mo5WslX;toSRE{yh7_sE*bLqXz}aWnM!WSk*u5)L1 zHyu$_x;qP^BMDC`ZmAk>7Q8GRP(C-jlGAwxeXM-q)Ldw_7?#|8<2KMq7U{M7GEl!r zxBlk(^cbt~h1-#->?=?aipS%^L0w(FFDrCscGh|8OeJ&Z$IbaFI31EO^MS{p-gJm2 z2$Ph95D)U?I~j>5=4L^EtEN_CUASIB7#JK9qEQ7WgkIF#?sY{x$I#SBz!E`qbnus- zZ#fxQK;E}OtS(P zg-UWD$1DvcHw~d-Vw%ZUbUgw8qcTF$zqov06w<)T# zjYJPq(~=AMX`^4Zf_W<|t+Z0(o}8S9^x;@Kxk?9TDzaD{-KOMhSRF1_W}E250xwoF zgze0}m7r1_MFwz6WID)KZ-8Bj=HHr1ssllp65w#+K3NFv_PH3LzMt_;fy$YX21~^0 z(p8=k_kn7mj;P7)jt@YK4ZY>v=|id?RBAqdvHq?tOK1Sbb5We|Jrq;)c3ywB;C}ou+-O$ODHX+5|B{fMTEL{{sOHoY)nR@8#a>>s!fI-2#X5~dP9LQk z+1c69-MKn?dpCi|`*78YC~Yv?tgfhNU}~z?VxkB}R#S|z+u(71JTdPvaYS=fUgg^!f{Yk*6^qBW}vF|>;I2OW63<=Sp z#Wk~5(>tjD;g1411$y(CB>>O4*yU2@xVK+RW-m^9gq>X@njpEKl-tR2#G|2s5~$ey zC(HXT=xr6t9(*v0C>IkWBZQWP?|$`89#f30$>1J$*eO`=%wA$sa;Az#R?0j4uSmiw&8&kv)s;#{2%=?>kRi+%blV;l#{yBD9E zL(gdL&dWNRu&@_y%cxf##O-r)*1M^Vz*+;f{CSV&n^Rbr!fOAmY4!T*@(`~N|3Sd} zTKEk2JnoYx2;Q%r4QVe=mQk6v1u_GnJ-L4lOnKP)2uQY;h#(Tf{*jfCu(oFE?0g#h zm`3bX0fIMx4?t}O?iO)zYw3nKp7%exn4dj6I6TY}m`fxi1co?b-uJO0xdc}i3$jsB zIKl5O!U!$7_?fwFI;&@t9rFTV7=jp4k`*rd2AJ_O*guj61&fP{QaRmKiglWh5qjg8 zKyeKXaq*QI!yAu-)fn`)53e$wOGFTOSXWh51)8n30?p9H#cg_envVw+T=fP_9hMgt z`BUY_>jJl|yA9>m+tXavS;DB-hI(L;NM}}EI#zCsO_(EQmyvx{GfJiUqzqmKgsnjY zc~cg$g>mZ{;Ts{fd5ibc_hhtNZ?*Qty>4H8ug0hn-f$m6Mie$T--x_C#d^wj)QGn0 zQNJ`J6VH?a$O&b43cvGHL8q;~MNt6(0hxG4fb~R!%G9^AWa1pQr#Tt7-fH(RuK#h| z9EU+HTZDLcPW!C^lap$Kg3apXCdKDh_7GTi>}+fZ7V?ZIUxaO^<{Ug%oNao5m&UOurxfIZQwuR@@YAOLc90F_w=;DaNE$ zqj#J)#+GP#u-u&~(d>AzJSYwuqLJeW+YTBwX)JlCba=@ool?nfEm{a2*C;(f;`&8xQ zIV<|APV{y6A4i=w!qy+UEF$VwFMJDhrFyWDSzsxhBW;H#+nN_61%0m}NhzuIsd5?! zoq$DeKbi_G%GH^@Li7`#F{!#ZWC)8)prW#TtWvUZ6W^ZTAX>6Ws}>RJ?(W{%*{Kt^ zvDxkGLx(2-2O&iu`(R~_&;_W#+> z`y=l`ZV+XtY*hK08p#_#A~G_wHoTaqx6y9{(Vz0Iw_z2Yk3;19aaZT7Y0wu29w0zZ zm0v7%2yn-gm6s6)rkn9SXD_dJKb@=*1ceNdq3i1_oERKpVlV;XcLeflB_1m-3RE$U zD3V2dBILzX;gtfQ=IGWzUzzq^m4si`$1j2XF1jZt7eFE8%_BrPLd zNfbV*!1(Fnm!wQ4ZM6Q;lg9|q{p=Q27dA27cSlEiHTIVgL%6tNY~+heT^Ac0w^77S;r~YF zZW9GNoiKWNNu1x#s6VY6r9RR09h%mf)_GD_0dK9+XcoseE~6(^ZmO;OJuBPL>`7(O zBb2$aO!X=9_S=A!&|`b4tl_>R8fB5&i3=nlA-#od{_5?mEueHN(SHgJ1=~p*`XZ-p z!dyZ_lxs*EQdU(}l#|1<;d;2z-_jxktpRtlwKWrTv^MpehDJh=g^9`Y*W^C0^Lx`I zIeGch!_Ce`Tghm20v?u^FEjJXq$%+bAgO2_9UbbXdp5gaVGgr)+S)08lBf_MS|=w? zzy}UtbC|wya}x)vU_PKQ940$rGVmFkn(O0*GuvC5!hR^5#x&jHxANRqztBe(G!KI? z$ci?XHfNIC4fb6c=9(IT@%zaGF=65Fnl{&$J)e5R1H@3%18rZTu+a?6GGIEV)qNlI zjAeS!BkV6%v#?+PU|;=TGnc+w7gGqcLiTmX?<1uyd+kv+5_uQ+#6gO%*Qa-Mg>F+SPJQ z@%VUnT%RX2=^F{`6`V-`iguVPyYVarC;i_LGkP0AWx~~K_~lV5i@vX7uFF-;RfjHj z1yNy8Q0K{0en?OGOgct1On|$M1C!)z7ex1%KMP_(k)$s*9zUYMMwLqw&{k3TwpsNT zPS@BOdjrsz_ue}3HDru>bag86%o)uH6S@v@3Rzw;6J42bB$+vFy7E0GDYoJ_18lR(dzC8iE@oQzH zv4Bwgr0u&pKPmjp3`|VpDLBZb785(SS0+W$Mh3N}Ba+d-aGq4^-CUW-O~5q{YJn;J z!;jz3gMjV1>7R(+E25jipHZxoNBnAb=HhK8*{*EbhJ$M@f2h^(`+Tzbq_f4h_}Woh zk91boED?QuU0&q-pYgO|P`bR(5&Ef@r5J@xW^>}REwXVqY7o(Z&szZmsJhi4vZQVC z=&zFs1J#-_}A=g_F#_JMon4Sdqak*cUFt59R$T& zPeDnhzV-x@R7{Fdsj*R&nHl~B(_&Y1Lc(#96zRP#1#X+Kcuxp)JN_r#bzYKCo?$E5 zc2WPvoo)VszP6Xg6<|qduQyk>FG2L7bHY#a%EQ(Ek+h;S%Ye+YEXPgFTK@Lgdgp4( zsb6)ae6_W;3~}7ep~0?b0j)%vq~T~qDth3?Lop?HPFrssoI#tW25dZhzm z>hA$)-uPvj7F7`ct1u>&anQWI@e`3NRdT;;_p!2t1ulSW}#Rq4d> zcXx)AAR*f2{eq+-V(3dw$(v`#k5B_HsE7(lOs1u&LrsWbiT_)BDK>gxT8h7N zoZ6b?8G!k`|H)QJ?nyiZBti=CIxhHTLU^HUTX=e`G}WM#OGb;#Eddh=0;c58j{9>n72Am-|7h?SZ^k|Stka7RmyUYD$_H8`m9pqe^8 zwi~%4G8E;lpsOry?2^~*!9$rooDwt?#YHRoF5(RA%VP(D$pZT> zFN_K^*8kCY^tMqq%(wSnzdNf5#HH|`_NVXG7nCfQs`w>?4!(#|F{YRsowQbVXO|e+pMw?n z>c99G9osPz>q`@S{47QNnojY26phq>QW--`KxNGS7nRx9K1q=ffy0XhJW{l^h&C6t zeh=r%&j&JaX}tW|`kx$Uo&<25n1AIsW&eh?$qXL^xX1un*%$gT-rVX{T;jvsLI*Ci z{e8rA7IM>)#T)G&4YBgwGMnR;Z*+};W5}|G_^XpCI%6B;;iXljI)qK{r^<|sgI5G? z^*}lV7Aen9%R#e|Y>+E)Ev}KIzH;q$xbmboeqwAak%~wJwXp3?U}T~>Epr^!&*D&`dy0nqz0t9o ze*0_3T$b^krHI0S8k%r^6@U37j*V|fZZcWx>+7(@q<-T_V*aXHX_5CwT|>t12QDuH z^)+hr;pLP#y9EV|(Aj#|P~HZ8`Hxqq>G+QUV^l;fy9jp$YE2gXLxSk6M>X_~qR2y` zgn_wk48F<XhXG zNLT_fagaA5DiNkvsumC8=<91kbZ@ikPS=`P*$&pCokQP%s z`3Hde3E53H+~)?H1#P~=f%zc5x7y=SRzcwrBuN>qIW+~97|DzVV(9j~W8+G0w4W#< z9LWv}42&S*3?cs@!^#Q}Fo2T4F%t4Y4G98>d!V;BG(0>!>{qM{%x?KZl>eFqp!dXPW-9O-ta-0nlG~q;M7p3N3NgHl%12Q>^m%0<)(R_h zI{Cev?JF*8YG%EoJT3MBQaVSCi&FP_%znDqEwZS&XSNamV@=s>M zYqSv?iqFKDAb7jFwg#xlrW;r%goKF@Hu0s2o*p@L_BU1n(<)i0oX|NuI$}$#^6T;u zEBz*)9G8}c1>rCUFK$yrQ{ppfmC&)hAK03bh$OLd+#Y8679T&6O6I9n`5xpKh=KZg z7=F%njePF(^HQ=jY|oL$oluF>?+BiBhEv`{%Im49P(Kn;l8W`#YU&jn;K(Q`g%1 zqTT9AO16nh-mp2rV`$X8Qg}D}vYc)fL24X}W8*cpf85Z9948S5x&Oy|;N(eV&YE}= zK3TYOoQRxRSzFOa?_hTBZ&m4OnA?M>(@}d+ntZ1x)jEg6>aPo;vJ}mXFJkCv#7a^j z2(#(#e*~o8T3jQLle6rdq|5+!UnM6e{imMx_AI~#pf}kC<@l42!yDM*GiWU=HbX)l zWTtZ4%#F^zT>ro20?vMZlv2#hkE=Dl&dm`L$`|^;U<{8?kq4Ltpq52jq7IHkFVi(t zruc{ds*!8Pmqh*kZ4f27Ozr@A&w03U->g6L|9HMgq<941FJnF^W8R3EiX0B;#D96N zxI)&@CwP>tWR!l*yjW+LzicQQL`63|FtB6idX1+K1~xKc(U;(?&-nWk&++-+{RqqQja>KpC~{XtN{5RQX)GEl$U^XFiDrV7DZ zX~WRP?$5!Wv)s3*mKWxaAzWPC&*w{9D=h>dM{qDYe_zW1T56>Jyw zNhhq&;|UDNw{dknP3M zIuM!yQz|vKceLxDje7z(;kmb`KSIWKCn7G6aF;aNyF~n&)v`Z)FwGwt*J_!p!nYbb z_Kl)$iS(sg!+9}xwfLA#K$FeBq|Dq@$PPa&qn+g6Fz$)Vl+rT>_KLTK^WLUC#FRZ+DV;ai}7qrejL~ zPb@y##X$Vf+Uo7CkPtxObDF|WqeoYB+e!Khxvc`hwuzd$^2Ld!Y>zPT_>KU1USG4Z zjc6*^Lf*;AGmQ=Ig#?qheorXF!6B8wPq47-#Vk1AhX!!s(%#&I4VIkf`mh)jih2yF z`SV44Sky$J_Dr-jF!dJa>6e(7FC zHS4tf>$rhk7}b$nL&AB$kN7Xp@&&W)VgsRamv$vQQEzi{t`>ANDBJO1^ejvc)PM`%!j?fDOY%!)!@1KtBibh48lkjipB*g88P1 zqO7&K^}JpOi?Jh&wbY|HG_)7ggRRigu-aSCPP3jbea^u#+Io0#ygPp|0CccjAfa27 zfcPlN#K#P$FHQ#qXrB=&L$7JUJMi>yKQ0kdl1&uVzu`f(olaOBOqU)Zy{60PIBw_IEK_l-<`m*lS>?xOLo+s|{ z&FSRXlTY#&7G^Vo;}%<=^TY!mdL^9}jJUeG$|mxC>NlQmaJ9c}^0+tw0dY{+@h%|# z;nn5Ij;y?wV!!1SjwAbZF|Y|IW~h1TT-bZxLMA4wAjF>a?c0NZRv@R$Oin7PC|gf8 zsHS+QV z-v$#DG&S9y!}>!8Zx=6j!x2Hu9uW%aP2{yunPAf978frHKg-zc)y^JF_-DgJ2-#?^4^*Bfm z=$K&irKP19ORp38#u_C~!=s`AqNq*t@|yEK2kuj*c$>?W1*BzVuWfsl4LiMh26{lMHmgn3xv_ykMtEU=M!-Yx zKNV={c_`2RMUX}G20}Wh?h7(0fz)&|A}=AB%FlgOz6T~BgiDN+pFjlnUU**dKsg&q zm#5pw{La=cE<93E5Am4P8qI;Qnf$`_U~R}oERAQ8{Jow+!TQLzb^D0$YjzfvJzy4X zYIMJKTna2NmytB>%GTE>3O5FF{rWngd1c-r%{a>ds2>W4!{eCL5g}0L7@t||LwNY{ z;bAhT`w39#)7TtC6l6kgCjn+fF5rE`|0OSU%L{EH&3sGys1yDCbY99AIneBxq^nr% z%|x~}Ymge_&UBWA%b7G(Qd;_m_X7yN-RZxy^z_f9hI@LX(_Iw%-a) zv5MUy+qlf7?j8eoNqdtlF}gvN|lL8zL7F@cjKX7^vZIaF1thZw5;= zwcx*^sbu6+U+~&~@w)ocixW&qMWvjjh#=HDh72@fbMw65$VOojQ5~FaP)h-UoR`=2 z#`hkzCSag2u*TxVdwL1D(wq+Tx~2%;q&P(tlsphAyI*McuX$=Bc?XWKVWRp%kgs>8E0GgHvsfDZ{n^j- zA3uHoD3AG+&!Md&d@;>~%WOD^F1I`0faxAGvZMzb)%Pdc^hF7ghuV_2IBm!~p)}%j zo8mZCVkd_G27KkfY7U|jL~sdYh``g2gsHleCr~YRU*0be29P!0*{=s>JmMDa`l0Z> zzJt>$<)-U!L*()CQzfUa*K2$#VavxSHJ9a$`4T2)S%mB0z7nv>0)M zN&GRPpY`%KF$ly1YD!A)Y;0DSmysb*J0dGPxdt!eG023r-a7lt&&wg3ot=GoeheyR zP)glLCrL<6RZv!^!U+YD9DwnPJA6m!Hn;Ab)FE!J^e6lH_&`6wZ2tPC74wgw?h+i4 zJPPYrY?fESQp-viep>Rb`W}q{=T>g>SZjZpy|%loV4chMeG z(|!u+jO?|!aufx$`21`MR4<8L#`SsgWz5`pclt@`$wODiW4|CQ+54}hq*#-p^bmmvTL8WMj2u^0^EaL)#AkG!|fi~Ysb zW+pc=L*7LKJaLizb5^*7Qgwo<&JW?{=JepOpZM2bN+%y43BYm4yM0as;gT)_InBjsG>-#Y{@lV z05)50i8^Ce*$(f$&LpHtMP=piOtqFjB{Tew0N^ryjx(jIGVvmN;cQoFRN9_+I03HG?P*BV~+2PaVNQ;1DRJDOTvRz%__Uk$g9Jafx9L z-VER8I=)K9bT>7%gR2La0H9TBo2fsERjzc#13%Sqs7XZv*iIzQn-k^#4EhRqb;#>d zUe5481kG|%U8||ula}uJ^1B0((Jei}SyS<@MNK&kfLHh-cR%)0ZWgjNkq6D0eOD`B zbmJtt}#ci-+Wr(?e`kP3SGv9`iabNJspdSXD% zC$B*3=`O9AXs3fDq<{1y$EvRqX0}?)>+qk?9*zgce}P&zVes|^Th2qvmj`nNdTfP#7uBqb*Isz^l7_dft@(tn@; literal 0 HcmV?d00001 diff --git a/website/sidebars.js b/website/sidebars.js index 9d60a5811c..6ccfc42180 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -101,6 +101,7 @@ module.exports = { items: [ "admin_hosts_blender", "admin_hosts_hiero", + "admin_hosts_houdini", "admin_hosts_maya", "admin_hosts_nuke", "admin_hosts_resolve", @@ -146,7 +147,7 @@ module.exports = { ], }, ], - Dev: [ + Dev: [ "dev_introduction", "dev_requirements", "dev_build", @@ -160,5 +161,5 @@ module.exports = { "dev_publishing" ] } - ] + ] }; From 782a393a20ba61538fcacd181d0f1a7f47bc798b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 00:16:41 +0800 Subject: [PATCH 0328/2550] Collect full_exp_files instead of leaving it empty --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 26ad0818e0..e132cffe53 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -199,7 +199,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): ) # append full path - full_exp_files = [] aov_dict = {} default_render_file = context.data.get('project_settings')\ .get('maya')\ From 8ba5d0079952731a2e7a98490701750bffa28a9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 18:59:43 +0200 Subject: [PATCH 0329/2550] move env setup function used in prelaunch hook from api higher --- openpype/hosts/resolve/utils.py | 54 +++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 openpype/hosts/resolve/utils.py diff --git a/openpype/hosts/resolve/utils.py b/openpype/hosts/resolve/utils.py new file mode 100644 index 0000000000..382a7cf344 --- /dev/null +++ b/openpype/hosts/resolve/utils.py @@ -0,0 +1,54 @@ +import os +import shutil +from openpype.lib import Logger + +RESOLVE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def setup(env): + log = Logger.get_logger("ResolveSetup") + scripts = {} + us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR") + us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") + us_paths = [os.path.join( + RESOLVE_ROOT_DIR, + "utility_scripts" + )] + + # collect script dirs + if us_env: + log.info(f"Utility Scripts Env: `{us_env}`") + us_paths = us_env.split( + os.pathsep) + us_paths + + # collect scripts from dirs + for path in us_paths: + scripts.update({path: os.listdir(path)}) + + log.info(f"Utility Scripts Dir: `{us_paths}`") + log.info(f"Utility Scripts: `{scripts}`") + + # make sure no script file is in folder + for s in os.listdir(us_dir): + path = os.path.join(us_dir, s) + log.info(f"Removing `{path}`...") + if os.path.isdir(path): + shutil.rmtree(path, onerror=None) + else: + os.remove(path) + + # copy scripts into Resolve's utility scripts dir + for d, sl in scripts.items(): + # directory and scripts list + for s in sl: + # script in script list + src = os.path.join(d, s) + dst = os.path.join(us_dir, s) + log.info(f"Copying `{src}` to `{dst}`...") + if os.path.isdir(src): + shutil.copytree( + src, dst, symlinks=False, + ignore=None, ignore_dangling_symlinks=False + ) + else: + shutil.copy2(src, dst) From 07d89fc23b593890d10f7094af2be04399f8dedd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 19:01:24 +0200 Subject: [PATCH 0330/2550] fixed imports to use in-DCC imports from resolve.api --- openpype/hosts/resolve/__init__.py | 129 ----------------- openpype/hosts/resolve/api/__init__.py | 134 +++++++++++++++++- openpype/hosts/resolve/api/action.py | 2 +- openpype/hosts/resolve/api/lib.py | 6 +- openpype/hosts/resolve/api/menu.py | 4 +- openpype/hosts/resolve/api/pipeline.py | 13 +- openpype/hosts/resolve/api/preload_console.py | 4 +- openpype/hosts/resolve/api/utils.py | 96 ++----------- openpype/hosts/resolve/api/workio.py | 19 +-- .../hosts/resolve/hooks/pre_resolve_setup.py | 20 +-- .../plugins/create/create_shot_clip.py | 15 +- .../hosts/resolve/plugins/load/load_clip.py | 21 +-- .../plugins/publish/extract_workfile.py | 4 +- .../plugins/publish/precollect_instances.py | 26 ++-- .../OpenPype_sync_util_scripts.py | 5 +- .../utility_scripts/__OpenPype__Menu__.py | 6 +- .../utility_scripts/tests/test_otio_as_edl.py | 4 +- .../testing_create_timeline_item_from_path.py | 15 +- .../tests/testing_load_media_pool_item.py | 10 +- 19 files changed, 233 insertions(+), 300 deletions(-) diff --git a/openpype/hosts/resolve/__init__.py b/openpype/hosts/resolve/__init__.py index 3e49ce3b9b..e69de29bb2 100644 --- a/openpype/hosts/resolve/__init__.py +++ b/openpype/hosts/resolve/__init__.py @@ -1,129 +0,0 @@ -from .api.utils import ( - setup, - get_resolve_module -) - -from .api.pipeline import ( - install, - uninstall, - ls, - containerise, - update_container, - publish, - launch_workfiles_app, - maintained_selection, - remove_instance, - list_instances -) - -from .api.lib import ( - maintain_current_timeline, - publish_clip_color, - get_project_manager, - get_current_project, - get_current_timeline, - create_bin, - get_media_pool_item, - create_media_pool_item, - create_timeline_item, - get_timeline_item, - get_video_track_names, - get_current_timeline_items, - get_pype_timeline_item_by_name, - get_timeline_item_pype_tag, - set_timeline_item_pype_tag, - imprint, - set_publish_attribute, - get_publish_attribute, - create_compound_clip, - swap_clips, - get_pype_clip_metadata, - set_project_manager_to_folder_name, - get_otio_clip_instance_data, - get_reformated_path -) - -from .api.menu import launch_pype_menu - -from .api.plugin import ( - ClipLoader, - TimelineItemLoader, - Creator, - PublishClip -) - -from .api.workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root -) - -from .api.testing_utils import TestGUI - - -__all__ = [ - # pipeline - "install", - "uninstall", - "ls", - "containerise", - "update_container", - "reload_pipeline", - "publish", - "launch_workfiles_app", - "maintained_selection", - "remove_instance", - "list_instances", - - # utils - "setup", - "get_resolve_module", - - # lib - "maintain_current_timeline", - "publish_clip_color", - "get_project_manager", - "get_current_project", - "get_current_timeline", - "create_bin", - "get_media_pool_item", - "create_media_pool_item", - "create_timeline_item", - "get_timeline_item", - "get_video_track_names", - "get_current_timeline_items", - "get_pype_timeline_item_by_name", - "get_timeline_item_pype_tag", - "set_timeline_item_pype_tag", - "imprint", - "set_publish_attribute", - "get_publish_attribute", - "create_compound_clip", - "swap_clips", - "get_pype_clip_metadata", - "set_project_manager_to_folder_name", - "get_otio_clip_instance_data", - "get_reformated_path", - - # menu - "launch_pype_menu", - - # plugin - "ClipLoader", - "TimelineItemLoader", - "Creator", - "PublishClip", - - # workio - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - - "TestGUI" -] diff --git a/openpype/hosts/resolve/api/__init__.py b/openpype/hosts/resolve/api/__init__.py index 48bd938e57..cf1edb4c35 100644 --- a/openpype/hosts/resolve/api/__init__.py +++ b/openpype/hosts/resolve/api/__init__.py @@ -1,11 +1,137 @@ """ resolve api """ -import os bmdvr = None bmdvf = None -API_DIR = os.path.dirname(os.path.abspath(__file__)) -HOST_DIR = os.path.dirname(API_DIR) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +from .utils import ( + get_resolve_module +) + +from .pipeline import ( + install, + uninstall, + ls, + containerise, + update_container, + publish, + launch_workfiles_app, + maintained_selection, + remove_instance, + list_instances +) + +from .lib import ( + maintain_current_timeline, + publish_clip_color, + get_project_manager, + get_current_project, + get_current_timeline, + create_bin, + get_media_pool_item, + create_media_pool_item, + create_timeline_item, + get_timeline_item, + get_video_track_names, + get_current_timeline_items, + get_pype_timeline_item_by_name, + get_timeline_item_pype_tag, + set_timeline_item_pype_tag, + imprint, + set_publish_attribute, + get_publish_attribute, + create_compound_clip, + swap_clips, + get_pype_clip_metadata, + set_project_manager_to_folder_name, + get_otio_clip_instance_data, + get_reformated_path +) + +from .menu import launch_pype_menu + +from .plugin import ( + ClipLoader, + TimelineItemLoader, + Creator, + PublishClip +) + +from .workio import ( + open_file, + save_file, + current_file, + has_unsaved_changes, + file_extensions, + work_root +) + +from .testing_utils import TestGUI + + +__all__ = [ + "bmdvr", + "bmdvf", + + # pipeline + "install", + "uninstall", + "ls", + "containerise", + "update_container", + "reload_pipeline", + "publish", + "launch_workfiles_app", + "maintained_selection", + "remove_instance", + "list_instances", + + # utils + "get_resolve_module", + + # lib + "maintain_current_timeline", + "publish_clip_color", + "get_project_manager", + "get_current_project", + "get_current_timeline", + "create_bin", + "get_media_pool_item", + "create_media_pool_item", + "create_timeline_item", + "get_timeline_item", + "get_video_track_names", + "get_current_timeline_items", + "get_pype_timeline_item_by_name", + "get_timeline_item_pype_tag", + "set_timeline_item_pype_tag", + "imprint", + "set_publish_attribute", + "get_publish_attribute", + "create_compound_clip", + "swap_clips", + "get_pype_clip_metadata", + "set_project_manager_to_folder_name", + "get_otio_clip_instance_data", + "get_reformated_path", + + # menu + "launch_pype_menu", + + # plugin + "ClipLoader", + "TimelineItemLoader", + "Creator", + "PublishClip", + + # workio + "open_file", + "save_file", + "current_file", + "has_unsaved_changes", + "file_extensions", + "work_root", + + "TestGUI" +] diff --git a/openpype/hosts/resolve/api/action.py b/openpype/hosts/resolve/api/action.py index f8f338a850..d55a24a39a 100644 --- a/openpype/hosts/resolve/api/action.py +++ b/openpype/hosts/resolve/api/action.py @@ -4,7 +4,7 @@ from __future__ import absolute_import import pyblish.api -from ...action import get_errored_instances_from_context +from openpype.action import get_errored_instances_from_context class SelectInvalidAction(pyblish.api.Action): diff --git a/openpype/hosts/resolve/api/lib.py b/openpype/hosts/resolve/api/lib.py index 93ccdaf812..f41eb36caf 100644 --- a/openpype/hosts/resolve/api/lib.py +++ b/openpype/hosts/resolve/api/lib.py @@ -4,13 +4,13 @@ import re import os import contextlib from opentimelineio import opentime + +from openpype.lib import Logger from openpype.pipeline.editorial import is_overlapping_otio_ranges from ..otio import davinci_export as otio_export -from openpype.api import Logger - -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) self = sys.modules[__name__] self.project_manager = None diff --git a/openpype/hosts/resolve/api/menu.py b/openpype/hosts/resolve/api/menu.py index 9e0dd12376..2c7678ee5b 100644 --- a/openpype/hosts/resolve/api/menu.py +++ b/openpype/hosts/resolve/api/menu.py @@ -3,13 +3,13 @@ import sys from Qt import QtWidgets, QtCore +from openpype.tools.utils import host_tools + from .pipeline import ( publish, launch_workfiles_app ) -from openpype.tools.utils import host_tools - def load_stylesheet(): path = os.path.join(os.path.dirname(__file__), "menu_style.qss") diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index 4a7d1c5bea..1c8d9dc01c 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -7,7 +7,7 @@ from collections import OrderedDict from pyblish import api as pyblish -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( schema, register_loader_plugin_path, @@ -16,11 +16,15 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) -from . import lib -from . import PLUGINS_DIR from openpype.tools.utils import host_tools -log = Logger().get_logger(__name__) +from . import lib +from .utils import get_resolve_module + +log = Logger.get_logger(__name__) + +HOST_DIR = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -39,7 +43,6 @@ def install(): See the Maya equivalent for inspiration on how to implement this. """ - from .. import get_resolve_module log.info("openpype.hosts.resolve installed") diff --git a/openpype/hosts/resolve/api/preload_console.py b/openpype/hosts/resolve/api/preload_console.py index 1e3a56b4dd..a822ea2460 100644 --- a/openpype/hosts/resolve/api/preload_console.py +++ b/openpype/hosts/resolve/api/preload_console.py @@ -1,9 +1,9 @@ #!/usr/bin/env python import time from openpype.hosts.resolve.utils import get_resolve_module -from openpype.api import Logger +from openpype.lib import Logger -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) wait_delay = 2.5 wait = 0.00 diff --git a/openpype/hosts/resolve/api/utils.py b/openpype/hosts/resolve/api/utils.py index 9b3762f328..871b3af38d 100644 --- a/openpype/hosts/resolve/api/utils.py +++ b/openpype/hosts/resolve/api/utils.py @@ -4,21 +4,21 @@ Resolve's tools for setting environment """ -import sys import os -import shutil -from . import HOST_DIR -from openpype.api import Logger -log = Logger().get_logger(__name__) +import sys + +from openpype.lib import Logger + +log = Logger.get_logger(__name__) def get_resolve_module(): - from openpype.hosts import resolve + from openpype.hosts.resolve import api # dont run if already loaded - if resolve.api.bmdvr: + if api.bmdvr: log.info(("resolve module is assigned to " - f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}")) - return resolve.api.bmdvr + f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}")) + return api.bmdvr try: """ The PYTHONPATH needs to be set correctly for this import @@ -71,79 +71,9 @@ def get_resolve_module(): # assign global var and return bmdvr = bmd.scriptapp("Resolve") bmdvf = bmd.scriptapp("Fusion") - resolve.api.bmdvr = bmdvr - resolve.api.bmdvf = bmdvf + api.bmdvr = bmdvr + api.bmdvf = bmdvf log.info(("Assigning resolve module to " - f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}")) + f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}")) log.info(("Assigning resolve module to " - f"`pype.hosts.resolve.api.bmdvf`: {resolve.api.bmdvf}")) - - -def _sync_utility_scripts(env=None): - """ Synchronizing basic utlility scripts for resolve. - - To be able to run scripts from inside `Resolve/Workspace/Scripts` menu - all scripts has to be accessible from defined folder. - """ - if not env: - env = os.environ - - # initiate inputs - scripts = {} - us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR") - us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") - us_paths = [os.path.join( - HOST_DIR, - "utility_scripts" - )] - - # collect script dirs - if us_env: - log.info(f"Utility Scripts Env: `{us_env}`") - us_paths = us_env.split( - os.pathsep) + us_paths - - # collect scripts from dirs - for path in us_paths: - scripts.update({path: os.listdir(path)}) - - log.info(f"Utility Scripts Dir: `{us_paths}`") - log.info(f"Utility Scripts: `{scripts}`") - - # make sure no script file is in folder - if next((s for s in os.listdir(us_dir)), None): - for s in os.listdir(us_dir): - path = os.path.join(us_dir, s) - log.info(f"Removing `{path}`...") - if os.path.isdir(path): - shutil.rmtree(path, onerror=None) - else: - os.remove(path) - - # copy scripts into Resolve's utility scripts dir - for d, sl in scripts.items(): - # directory and scripts list - for s in sl: - # script in script list - src = os.path.join(d, s) - dst = os.path.join(us_dir, s) - log.info(f"Copying `{src}` to `{dst}`...") - if os.path.isdir(src): - shutil.copytree( - src, dst, symlinks=False, - ignore=None, ignore_dangling_symlinks=False - ) - else: - shutil.copy2(src, dst) - - -def setup(env=None): - """ Wrapper installer started from pype.hooks.resolve.ResolvePrelaunch() - """ - if not env: - env = os.environ - - # synchronize resolve utility scripts - _sync_utility_scripts(env) - - log.info("Resolve OpenPype wrapper has been installed") + f"`pype.hosts.resolve.api.bmdvf`: {api.bmdvf}")) diff --git a/openpype/hosts/resolve/api/workio.py b/openpype/hosts/resolve/api/workio.py index f175769387..5a742ecf7e 100644 --- a/openpype/hosts/resolve/api/workio.py +++ b/openpype/hosts/resolve/api/workio.py @@ -2,14 +2,14 @@ import os from openpype.api import Logger -from .. import ( +from .lib import ( get_project_manager, get_current_project, set_project_manager_to_folder_name ) -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) exported_projet_ext = ".drp" @@ -60,7 +60,7 @@ def open_file(filepath): # load project from input path project = pm.LoadProject(fname) log.info(f"Project {project.GetName()} opened...") - return True + except AttributeError: log.warning((f"Project with name `{fname}` does not exist! It will " f"be imported from {filepath} and then loaded...")) @@ -69,9 +69,8 @@ def open_file(filepath): project = pm.LoadProject(fname) log.info(f"Project imported/loaded {project.GetName()}...") return True - else: - return False - + return False + return True def current_file(): pm = get_project_manager() @@ -80,13 +79,9 @@ def current_file(): name = project.GetName() fname = name + exported_projet_ext current_file = os.path.join(current_dir, fname) - normalised = os.path.normpath(current_file) - - # Unsaved current file - if normalised == "": + if not current_file: return None - - return normalised + return os.path.normpath(current_file) def work_root(session): diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index 978e3760fd..1d977e2d8e 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,7 +1,7 @@ import os -import importlib + from openpype.lib import PreLaunchHook -from openpype.hosts.resolve.api import utils +from openpype.hosts.resolve.utils import setup class ResolvePrelaunch(PreLaunchHook): @@ -43,18 +43,6 @@ class ResolvePrelaunch(PreLaunchHook): self.launch_context.env.get("PRE_PYTHON_SCRIPT", "")) self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...") - try: - __import__("openpype.hosts.resolve") - __import__("pyblish") - except ImportError: - self.log.warning( - "pyblish: Could not load Resolve integration.", - exc_info=True - ) - - else: - # Resolve Setup integration - importlib.reload(utils) - self.log.debug(f"-- utils.__file__: `{utils.__file__}`") - utils.setup(self.launch_context.env) + # Resolve Setup integration + setup(self.launch_context.env) diff --git a/openpype/hosts/resolve/plugins/create/create_shot_clip.py b/openpype/hosts/resolve/plugins/create/create_shot_clip.py index dbf10c5163..4b14f2493f 100644 --- a/openpype/hosts/resolve/plugins/create/create_shot_clip.py +++ b/openpype/hosts/resolve/plugins/create/create_shot_clip.py @@ -1,9 +1,12 @@ # from pprint import pformat -from openpype.hosts import resolve -from openpype.hosts.resolve.api import lib +from openpype.hosts.resolve.api import plugin, lib +from openpype.hosts.resolve.api.lib import ( + get_video_track_names, + create_bin, +) -class CreateShotClip(resolve.Creator): +class CreateShotClip(plugin.Creator): """Publishable clip""" label = "Create Publishable Clip" @@ -11,7 +14,7 @@ class CreateShotClip(resolve.Creator): icon = "film" defaults = ["Main"] - gui_tracks = resolve.get_video_track_names() + gui_tracks = get_video_track_names() gui_name = "OpenPype publish attributes creator" gui_info = "Define sequential rename and fill hierarchy data." gui_inputs = { @@ -250,7 +253,7 @@ class CreateShotClip(resolve.Creator): sq_markers = self.timeline.GetMarkers() # create media bin for compound clips (trackItems) - mp_folder = resolve.create_bin(self.timeline.GetName()) + mp_folder = create_bin(self.timeline.GetName()) kwargs = { "ui_inputs": widget.result, @@ -264,6 +267,6 @@ class CreateShotClip(resolve.Creator): self.rename_index = i self.log.info(track_item_data) # convert track item to timeline media pool item - track_item = resolve.PublishClip( + track_item = plugin.PublishClip( self, track_item_data, **kwargs).convert() track_item.SetClipColor(lib.publish_clip_color) diff --git a/openpype/hosts/resolve/plugins/load/load_clip.py b/openpype/hosts/resolve/plugins/load/load_clip.py index 190a5a7206..a0c78c182f 100644 --- a/openpype/hosts/resolve/plugins/load/load_clip.py +++ b/openpype/hosts/resolve/plugins/load/load_clip.py @@ -1,21 +1,22 @@ from copy import deepcopy -from importlib import reload from openpype.client import ( get_version_by_id, get_last_version_by_subset_id, ) -from openpype.hosts import resolve +# from openpype.hosts import resolve from openpype.pipeline import ( get_representation_path, legacy_io, ) from openpype.hosts.resolve.api import lib, plugin -reload(plugin) -reload(lib) +from openpype.hosts.resolve.api.pipeline import ( + containerise, + update_container, +) -class LoadClip(resolve.TimelineItemLoader): +class LoadClip(plugin.TimelineItemLoader): """Load a subset to timeline as clip Place clip to timeline on its asset origin timings collected @@ -46,7 +47,7 @@ class LoadClip(resolve.TimelineItemLoader): }) # load clip to timeline and get main variables - timeline_item = resolve.ClipLoader( + timeline_item = plugin.ClipLoader( self, context, **options).load() namespace = namespace or timeline_item.GetName() version = context['version'] @@ -80,7 +81,7 @@ class LoadClip(resolve.TimelineItemLoader): self.log.info("Loader done: `{}`".format(name)) - return resolve.containerise( + return containerise( timeline_item, name, namespace, context, self.__class__.__name__, @@ -98,7 +99,7 @@ class LoadClip(resolve.TimelineItemLoader): context.update({"representation": representation}) name = container['name'] namespace = container['namespace'] - timeline_item_data = resolve.get_pype_timeline_item_by_name(namespace) + timeline_item_data = lib.get_pype_timeline_item_by_name(namespace) timeline_item = timeline_item_data["clip"]["item"] project_name = legacy_io.active_project() version = get_version_by_id(project_name, representation["parent"]) @@ -109,7 +110,7 @@ class LoadClip(resolve.TimelineItemLoader): self.fname = get_representation_path(representation) context["version"] = {"data": version_data} - loader = resolve.ClipLoader(self, context) + loader = plugin.ClipLoader(self, context) timeline_item = loader.update(timeline_item) # add additional metadata from the version to imprint Avalon knob @@ -136,7 +137,7 @@ class LoadClip(resolve.TimelineItemLoader): # update color of clip regarding the version order self.set_item_color(timeline_item, version) - return resolve.update_container(timeline_item, data_imprint) + return update_container(timeline_item, data_imprint) @classmethod def set_item_color(cls, timeline_item, version): diff --git a/openpype/hosts/resolve/plugins/publish/extract_workfile.py b/openpype/hosts/resolve/plugins/publish/extract_workfile.py index e3d60465a2..ea8f19cd8c 100644 --- a/openpype/hosts/resolve/plugins/publish/extract_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/extract_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api import openpype.api -from openpype.hosts import resolve +from openpype.hosts.resolve.api.lib import get_project_manager class ExtractWorkfile(openpype.api.Extractor): @@ -29,7 +29,7 @@ class ExtractWorkfile(openpype.api.Extractor): os.path.join(staging_dir, drp_file_name)) # write out the drp workfile - resolve.get_project_manager().ExportProject( + get_project_manager().ExportProject( project.GetName(), drp_file_path) # create drp workfile representation diff --git a/openpype/hosts/resolve/plugins/publish/precollect_instances.py b/openpype/hosts/resolve/plugins/publish/precollect_instances.py index ee51998c0d..8ec169ad65 100644 --- a/openpype/hosts/resolve/plugins/publish/precollect_instances.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_instances.py @@ -1,9 +1,15 @@ -import pyblish -from openpype.hosts import resolve - -# # developer reload modules from pprint import pformat +import pyblish + +from openpype.hosts.resolve.api.lib import ( + get_current_timeline_items, + get_timeline_item_pype_tag, + publish_clip_color, + get_publish_attribute, + get_otio_clip_instance_data, +) + class PrecollectInstances(pyblish.api.ContextPlugin): """Collect all Track items selection.""" @@ -14,8 +20,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin): def process(self, context): otio_timeline = context.data["otioTimeline"] - selected_timeline_items = resolve.get_current_timeline_items( - filter=True, selecting_color=resolve.publish_clip_color) + selected_timeline_items = get_current_timeline_items( + filter=True, selecting_color=publish_clip_color) self.log.info( "Processing enabled track items: {}".format( @@ -27,7 +33,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): timeline_item = timeline_item_data["clip"]["item"] # get pype tag data - tag_data = resolve.get_timeline_item_pype_tag(timeline_item) + tag_data = get_timeline_item_pype_tag(timeline_item) self.log.debug(f"__ tag_data: {pformat(tag_data)}") if not tag_data: @@ -67,7 +73,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "asset": asset, "item": timeline_item, "families": families, - "publish": resolve.get_publish_attribute(timeline_item), + "publish": get_publish_attribute(timeline_item), "fps": context.data["fps"], "handleStart": handle_start, "handleEnd": handle_end, @@ -75,7 +81,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }) # otio clip data - otio_data = resolve.get_otio_clip_instance_data( + otio_data = get_otio_clip_instance_data( otio_timeline, timeline_item_data) or {} data.update(otio_data) @@ -134,7 +140,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "asset": asset, "family": family, "families": [], - "publish": resolve.get_publish_attribute(timeline_item) + "publish": get_publish_attribute(timeline_item) }) context.create_instance(**data) diff --git a/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py b/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py index 3a16b9c966..8f3917bece 100644 --- a/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py +++ b/openpype/hosts/resolve/utility_scripts/OpenPype_sync_util_scripts.py @@ -6,10 +6,11 @@ from openpype.pipeline import install_host def main(env): - import openpype.hosts.resolve as bmdvr + from openpype.hosts.resolve.utils import setup + import openpype.hosts.resolve.api as bmdvr # Registers openpype's Global pyblish plugins install_host(bmdvr) - bmdvr.setup(env) + setup(env) if __name__ == "__main__": diff --git a/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py b/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py index 89ade9238b..1087a7b7a0 100644 --- a/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py +++ b/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py @@ -2,13 +2,13 @@ import os import sys from openpype.pipeline import install_host -from openpype.api import Logger +from openpype.lib import Logger -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def main(env): - import openpype.hosts.resolve as bmdvr + import openpype.hosts.resolve.api as bmdvr # activate resolve from openpype install_host(bmdvr) diff --git a/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py b/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py index 8433bd9172..92f2e43a72 100644 --- a/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py +++ b/openpype/hosts/resolve/utility_scripts/tests/test_otio_as_edl.py @@ -6,8 +6,8 @@ import opentimelineio as otio from openpype.pipeline import install_host -from openpype.hosts.resolve import TestGUI -import openpype.hosts.resolve as bmdvr +import openpype.hosts.resolve.api as bmdvr +from openpype.hosts.resolve.api.testing_utils import TestGUI from openpype.hosts.resolve.otio import davinci_export as otio_export diff --git a/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py b/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py index 477955d527..91a361ec08 100644 --- a/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py +++ b/openpype/hosts/resolve/utility_scripts/tests/testing_create_timeline_item_from_path.py @@ -2,11 +2,16 @@ import os import sys -from openpype.pipeline import install_host -from openpype.hosts.resolve import TestGUI -import openpype.hosts.resolve as bmdvr import clique +from openpype.pipeline import install_host +from openpype.hosts.resolve.api.testing_utils import TestGUI +import openpype.hosts.resolve.api as bmdvr +from openpype.hosts.resolve.api.lib import ( + create_media_pool_item, + create_timeline_item, +) + class ThisTestGUI(TestGUI): extensions = [".exr", ".jpg", ".mov", ".png", ".mp4", ".ari", ".arx"] @@ -55,10 +60,10 @@ class ThisTestGUI(TestGUI): # skip if unwanted extension if ext not in self.extensions: return - media_pool_item = bmdvr.create_media_pool_item(fpath) + media_pool_item = create_media_pool_item(fpath) print(media_pool_item) - track_item = bmdvr.create_timeline_item(media_pool_item) + track_item = create_timeline_item(media_pool_item) print(track_item) diff --git a/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py b/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py index 872d620162..2e83188bde 100644 --- a/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py +++ b/openpype/hosts/resolve/utility_scripts/tests/testing_load_media_pool_item.py @@ -1,13 +1,17 @@ #! python3 from openpype.pipeline import install_host -import openpype.hosts.resolve as bmdvr +from openpype.hosts.resolve import api as bmdvr +from openpype.hosts.resolve.api.lib import ( + create_media_pool_item, + create_timeline_item, +) def file_processing(fpath): - media_pool_item = bmdvr.create_media_pool_item(fpath) + media_pool_item = create_media_pool_item(fpath) print(media_pool_item) - track_item = bmdvr.create_timeline_item(media_pool_item) + track_item = create_timeline_item(media_pool_item) print(track_item) From a777238d83282e24c6238b92143b3a424ccde40d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Aug 2022 19:11:49 +0200 Subject: [PATCH 0331/2550] fix handling of host name in error message --- openpype/host/host.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/host/host.py b/openpype/host/host.py index 48907e7ec7..9cdbb819e1 100644 --- a/openpype/host/host.py +++ b/openpype/host/host.py @@ -19,8 +19,15 @@ class MissingMethodsError(ValueError): joined_missing = ", ".join( ['"{}"'.format(item) for item in missing_methods] ) + if isinstance(host, HostBase): + host_name = host.name + else: + try: + host_name = host.__file__.replace("\\", "/").split("/")[-3] + except Exception: + host_name = str(host) message = ( - "Host \"{}\" miss methods {}".format(host.name, joined_missing) + "Host \"{}\" miss methods {}".format(host_name, joined_missing) ) super(MissingMethodsError, self).__init__(message) From b3517a2da945b72b6b13eff292fa7d6ed63861b0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 09:47:21 +0200 Subject: [PATCH 0332/2550] Remove print statement --- openpype/tools/loader/model.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 6cb9ba2c6d..d9b1c708e0 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -564,7 +564,6 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # This runs in the main thread because it involves the host DCC if self._host: time_since_refresh = time.time() - self._host_loaded_refresh_time - print(time_since_refresh) if time_since_refresh > self._host_loaded_refresh_timeout: repre_ids = {con.get("representation") for con in self._host.ls()} From ca40a71f5c33a1f557039661bbbdd8db5d22738b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 09:48:52 +0200 Subject: [PATCH 0333/2550] Reduce queries to get loaded subset ids --- openpype/tools/loader/model.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index d9b1c708e0..9d1f1e045c 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -483,24 +483,22 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): last_versions_by_subset_id[subset_id] = hero_version # Check loaded subsets - subsets_loaded_by_id = set() + loaded_subset_ids = set() ids = self._loaded_representation_ids if ids: if self._doc_fetching_stop: return - # Get subsets from representations + # Get subset ids from loaded representations in workfile # todo: optimize with aggregation query to distinct subset id representations = get_representations(project_name, representation_ids=ids, fields=["parent"]) - parents_by_repre_id = get_representations_parents( - project_name, - representations=representations - ) - for repre_parents in parents_by_repre_id.values(): - repre_subset = repre_parents[1] - subsets_loaded_by_id.add(repre_subset["_id"]) + version_ids = set(repre["parent"] for repre in representations) + versions = get_versions(project_name, + version_ids=version_ids, + fields=["parent"]) + loaded_subset_ids = set(version["parent"] for version in versions) if self._doc_fetching_stop: return @@ -528,7 +526,7 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "subset_families": subset_families, "last_versions_by_subset_id": last_versions_by_subset_id, "repre_info_by_version_id": repre_info, - "subsets_loaded_by_id": subsets_loaded_by_id + "subsets_loaded_by_id": loaded_subset_ids } self.doc_fetched.emit() From 5f0a8d700e5fc72570b70e56240b488819da3d43 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 11:22:10 +0200 Subject: [PATCH 0334/2550] Maya Redshift: Skip aov file format check for Cryptomatte --- .../plugins/publish/validate_rendersettings.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 1dab3274a0..feb6a16dac 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -180,14 +180,17 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): redshift_AOV_prefix )) invalid = True - # get aov format - aov_ext = cmds.getAttr( - "{}.fileFormat".format(aov), asString=True) - default_ext = cmds.getAttr( - "redshiftOptions.imageFormat", asString=True) + # check aov file format + aov_ext = cmds.getAttr("{}.fileFormat".format(aov)) + default_ext = cmds.getAttr("redshiftOptions.imageFormat") + aov_type = cmds.getAttr("{}.aovType".format(aov)) + if aov_type == "Cryptomatte": + # redshift Cryptomatte AOV always uses "Cryptomatte (EXR)" + # so we ignore validating file format for it. + pass - if default_ext != aov_ext: + elif default_ext != aov_ext: cls.log.error(("AOV file format is not the same " "as the one set globally " "{} != {}").format(default_ext, From d9e3815878b3868b18478b9dea2328d140bf2d92 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 12 Aug 2022 12:23:06 +0200 Subject: [PATCH 0335/2550] Refactored content of help, eg error message --- openpype/plugins/publish/help/validate_containers.xml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/help/validate_containers.xml b/openpype/plugins/publish/help/validate_containers.xml index e540c3c7a9..8424ee919c 100644 --- a/openpype/plugins/publish/help/validate_containers.xml +++ b/openpype/plugins/publish/help/validate_containers.xml @@ -3,9 +3,9 @@ Not up-to-date assets -## Obsolete containers found +## Outdated containers found -Scene contains one or more obsolete loaded containers, eg. items loaded into scene by Loader. +Scene contains one or more outdated loaded containers, eg. versions of items loaded into scene by Loader are not latest. ### How to repair? @@ -17,8 +17,7 @@ Use 'Scene Inventory' and update all highlighted old container to latest OR ### __Detailed Info__ (optional) -This validator protects you from rendering obsolete content, someone modified some referenced asset in this scene, eg. - by skipping this you would ignore changes to that asset. +This validates whether you're working with the latest versions of published content loaded into your scene. This protects you from using outdated versions of an asset. \ No newline at end of file From ec157e0a2a3a04aa18caf3135846ff3ad29486aa Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 18:34:12 +0800 Subject: [PATCH 0336/2550] fix the bug of failing to extract look when UDIM format used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 80d82a4f58..bf7f5bc757 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -436,6 +436,16 @@ class ExtractLook(openpype.api.Extractor): # set color space to raw if we linearized it color_space = "Raw" else: + + # if the files are unresolved from `source` + # assume color space from the first file of + # the resource + first_file = next(iter(resource.get("files", [])), None) + if not first_file: + # No files for this resource? Can this happen? Should this error? + continue + + filepath = os.path.normpath(first_file) # if the files are unresolved if files_metadata[filepath]["color_space"] == "Raw": # set color space to raw if we linearized it From 82c4f19979ea7055cb742c3321a0bcd9b2d5a73d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 18:36:05 +0800 Subject: [PATCH 0337/2550] fix the bug of failing to extract look when UDIM format used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index bf7f5bc757..8e09a564d0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -436,15 +436,12 @@ class ExtractLook(openpype.api.Extractor): # set color space to raw if we linearized it color_space = "Raw" else: - # if the files are unresolved from `source` # assume color space from the first file of # the resource first_file = next(iter(resource.get("files", [])), None) if not first_file: - # No files for this resource? Can this happen? Should this error? continue - filepath = os.path.normpath(first_file) # if the files are unresolved if files_metadata[filepath]["color_space"] == "Raw": From 312b6d3243ce66bc2e2749c964fd1b178369f9ec Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 13:00:41 +0200 Subject: [PATCH 0338/2550] :bug: fix finding of last version --- igniter/bootstrap_repos.py | 73 ++++++++++++++++++-------------------- start.py | 3 ++ 2 files changed, 38 insertions(+), 38 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 750b2f1bf7..73ef8283a7 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -514,6 +514,9 @@ class OpenPypeVersion(semver.VersionInfo): ValueError: if invalid path is specified. """ + installed_version = OpenPypeVersion.get_installed_version() + if not compatible_with: + compatible_with = installed_version _openpype_versions = [] if not openpype_dir.exists() and not openpype_dir.is_dir(): return _openpype_versions @@ -540,8 +543,7 @@ class OpenPypeVersion(semver.VersionInfo): )[0]: continue - if compatible_with and not detected_version.is_compatible( - compatible_with): + if not detected_version.is_compatible(compatible_with): continue detected_version.path = item @@ -610,6 +612,8 @@ class OpenPypeVersion(semver.VersionInfo): remote = True installed_version = OpenPypeVersion.get_installed_version() + if not compatible_with: + compatible_with = installed_version local_versions = [] remote_versions = [] if local: @@ -630,8 +634,7 @@ class OpenPypeVersion(semver.VersionInfo): all_versions.sort() latest_version: OpenPypeVersion latest_version = all_versions[-1] - if compatible_with and not latest_version.is_compatible( - compatible_with): + if not latest_version.is_compatible(compatible_with): return None return latest_version @@ -1153,10 +1156,12 @@ class BootstrapRepos: versions compatible with specified one. """ + installed_version = OpenPypeVersion.get_installed_version() + if not compatible_with: + compatible_with = installed_version if isinstance(version, str): version = OpenPypeVersion(version=version) - installed_version = OpenPypeVersion.get_installed_version() if installed_version == version: return installed_version @@ -1250,51 +1255,41 @@ class BootstrapRepos: ok install it as normal version. """ + installed_version = OpenPypeVersion.get_installed_version() + if not compatible_with: + compatible_with = installed_version if openpype_path and not isinstance(openpype_path, Path): raise NotImplementedError( ("Finding OpenPype in non-filesystem locations is" " not implemented yet.")) - version_dir = "" - if compatible_with: - version_dir = f"{compatible_with.major}.{compatible_with.minor}" + version_dir = f"{compatible_with.major}.{compatible_with.minor}" # if checks bellow for OPENPYPE_PATH and registry fails, use data_dir # DEPRECATED: lookup in root of this folder is deprecated in favour # of major.minor sub-folders. - dirs_to_search = [ - self.data_dir - ] - if compatible_with: - dirs_to_search.append(self.data_dir / version_dir) + dirs_to_search = [self.data_dir, self.data_dir / version_dir] if openpype_path: - dirs_to_search = [openpype_path] - - if compatible_with: - dirs_to_search.append(openpype_path / version_dir) - else: + dirs_to_search = [openpype_path, openpype_path / version_dir] + elif os.getenv("OPENPYPE_PATH") \ + and Path(os.getenv("OPENPYPE_PATH")).exists(): # first try OPENPYPE_PATH and if that is not available, # try registry. - if os.getenv("OPENPYPE_PATH") \ - and Path(os.getenv("OPENPYPE_PATH")).exists(): - dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))] + dirs_to_search = [Path(os.getenv("OPENPYPE_PATH")), + Path(os.getenv("OPENPYPE_PATH")) / version_dir] + else: + try: + registry_dir = Path( + str(self.registry.get_item("openPypePath"))) + if registry_dir.exists(): + dirs_to_search = [ + registry_dir, registry_dir / version_dir + ] - if compatible_with: - dirs_to_search.append( - Path(os.getenv("OPENPYPE_PATH")) / version_dir) - else: - try: - registry_dir = Path( - str(self.registry.get_item("openPypePath"))) - if registry_dir.exists(): - dirs_to_search = [registry_dir] - if compatible_with: - dirs_to_search.append(registry_dir / version_dir) - - except ValueError: - # nothing found in registry, we'll use data dir - pass + except ValueError: + # nothing found in registry, we'll use data dir + pass openpype_versions = [] for dir_to_search in dirs_to_search: @@ -1685,6 +1680,9 @@ class BootstrapRepos: ValueError: if invalid path is specified. """ + installed_version = OpenPypeVersion.get_installed_version() + if not compatible_with: + compatible_with = installed_version if not openpype_dir.exists() and not openpype_dir.is_dir(): raise ValueError(f"specified directory {openpype_dir} is invalid") @@ -1711,8 +1709,7 @@ class BootstrapRepos: ): continue - if compatible_with and \ - not detected_version.is_compatible(compatible_with): + if not detected_version.is_compatible(compatible_with): continue detected_version.path = item diff --git a/start.py b/start.py index 5cdffafb6e..c7bced20bd 100644 --- a/start.py +++ b/start.py @@ -629,6 +629,9 @@ def _determine_mongodb() -> str: def _initialize_environment(openpype_version: OpenPypeVersion) -> None: version_path = openpype_version.path + if not version_path: + _print(f"!!! Version {openpype_version} doesn't have path set.") + raise ValueError("No path set in specified OpenPype version.") os.environ["OPENPYPE_VERSION"] = str(openpype_version) # set OPENPYPE_REPOS_ROOT to point to currently used OpenPype version. os.environ["OPENPYPE_REPOS_ROOT"] = os.path.normpath( From 7526d4cfa5252b646469c79db782b1b4a04373ae Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 12 Aug 2022 13:34:37 +0200 Subject: [PATCH 0339/2550] Update openpype/plugins/publish/help/validate_containers.xml Co-authored-by: Roy Nieterau --- openpype/plugins/publish/help/validate_containers.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/help/validate_containers.xml b/openpype/plugins/publish/help/validate_containers.xml index 8424ee919c..5d18bb4c19 100644 --- a/openpype/plugins/publish/help/validate_containers.xml +++ b/openpype/plugins/publish/help/validate_containers.xml @@ -5,7 +5,7 @@ ## Outdated containers found -Scene contains one or more outdated loaded containers, eg. versions of items loaded into scene by Loader are not latest. +Scene contains one or more outdated loaded containers, eg. versions loaded into scene by Loader are not latest. ### How to repair? From 2cf01d8605e2588ce437579b55d409cf2027b452 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 13:44:13 +0200 Subject: [PATCH 0340/2550] Fix Scene Inventory select actions --- openpype/tools/sceneinventory/view.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index 63d181b2d6..e0e43aaba7 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -551,16 +551,16 @@ class SceneInventoryView(QtWidgets.QTreeView): "toggle": selection_model.Toggle, }[options.get("mode", "select")] - for item in iter_model_rows(model, 0): - item = item.data(InventoryModel.ItemRole) + for index in iter_model_rows(model, 0): + item = index.data(InventoryModel.ItemRole) if item.get("isGroupNode"): continue name = item.get("objectName") if name in object_names: - self.scrollTo(item) # Ensure item is visible + self.scrollTo(index) # Ensure item is visible flags = select_mode | selection_model.Rows - selection_model.select(item, flags) + selection_model.select(index, flags) object_names.remove(name) From 089cd3f9fa3587178c9fe73371b4470588b8467b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 13:55:00 +0200 Subject: [PATCH 0341/2550] added missing docstring for 'context_filters' argument --- openpype/client/entities.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index c798c0ad6d..67ddb09ddb 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1220,6 +1220,8 @@ def get_archived_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. + context_filters (Dict[str, List[str, re.Pattern]]): Filter by + representation context fields. names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. fields (Iterable[str]): Fields that should be returned. All fields are From aefb992ce55145f94790bbaa5cdbf17136684e1e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 13:55:35 +0200 Subject: [PATCH 0342/2550] removed unused 'is_context' property --- .../workfile/abstract_template_loader.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 1c8ede25e6..e2f9fdba0f 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -465,23 +465,6 @@ class AbstractPlaceholder: return self.data["loader"] - @property - def is_context(self): - """Check if is placeholder context type. - - context_asset: For loading current asset - linked_asset: For loading linked assets - - Question: - There seems to be more build options and this property is not used, - should be removed? - - Returns: - bool: true if placeholder is a context placeholder - """ - - return self.builder_type == "context_asset" - @property def is_valid(self): """Test validity of placeholder. From 32c2440e4a6d5d5ec3d54c2d1a44ffc5f0f81ae5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 13:55:55 +0200 Subject: [PATCH 0343/2550] fix docstring header --- openpype/pipeline/workfile/abstract_template_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index e2f9fdba0f..05a98a1ddc 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -456,7 +456,7 @@ class AbstractPlaceholder: @property def loader_name(self): - """Return placeholder loader type. + """Return placeholder loader name. Returns: str: Loader name that will be used to load placeholder From 551f34a873c89e739dc0b5d28a74eeec3f79dac2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 14:25:29 +0200 Subject: [PATCH 0344/2550] Add subsetGroup column to scene inventory --- openpype/tools/sceneinventory/model.py | 11 +++++++++-- openpype/tools/sceneinventory/window.py | 3 ++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index 63fbe04c5c..97cc11ff23 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -34,7 +34,8 @@ from .lib import ( class InventoryModel(TreeModel): """The model for the inventory""" - Columns = ["Name", "version", "count", "family", "loader", "objectName"] + Columns = ["Name", "version", "count", "family", + "subsetGroup", "loader", "objectName"] OUTDATED_COLOR = QtGui.QColor(235, 30, 30) CHILD_OUTDATED_COLOR = QtGui.QColor(200, 160, 30) @@ -157,8 +158,13 @@ class InventoryModel(TreeModel): # Family icon return item.get("familyIcon", None) + column_name = self.Columns[index.column()] + + if column_name == "subsetGroup" and item.get("subsetGroup"): + return qtawesome.icon("fa.object-group", + color=get_default_entity_icon_color()) + if item.get("isGroupNode"): - column_name = self.Columns[index.column()] if column_name == "active_site": provider = item.get("active_site_provider") return self._site_icons.get(provider) @@ -423,6 +429,7 @@ class InventoryModel(TreeModel): group_node["familyIcon"] = family_icon group_node["count"] = len(group_items) group_node["isGroupNode"] = True + group_node["subsetGroup"] = subset["data"].get("subsetGroup") if self.sync_enabled: progress = get_progress_for_repre( diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index 054c2a2daa..02addbccfe 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -88,7 +88,8 @@ class SceneInventoryWindow(QtWidgets.QDialog): view.setColumnWidth(1, 55) # version view.setColumnWidth(2, 55) # count view.setColumnWidth(3, 150) # family - view.setColumnWidth(4, 100) # namespace + view.setColumnWidth(4, 120) # subsetGroup + view.setColumnWidth(5, 150) # loader # apply delegates version_delegate = VersionDelegate(legacy_io, self) From dc73bbdb13044d077b5576cd33ebc7b51597a70c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 20:49:34 +0800 Subject: [PATCH 0345/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- .../maya/plugins/publish/extract_look.py | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 8e09a564d0..991f44c74f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -430,22 +430,25 @@ class ExtractLook(openpype.api.Extractor): color_space = "Raw" else: # get all the resolved files in Maya File Path Editor - src = files_metadata.get(source) - if src: - if files_metadata[source]["color_space"] == "Raw": + metadata = files_metadata.get(source) + if metadata: + metadata = files_metadata[source] + if metadata["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" else: # if the files are unresolved from `source` # assume color space from the first file of # the resource - first_file = next(iter(resource.get("files", [])), None) - if not first_file: - continue - filepath = os.path.normpath(first_file) - # if the files are unresolved - if files_metadata[filepath]["color_space"] == "Raw": - # set color space to raw if we linearized it + metadata = files_metadata.get(source) + if not metadata: + first_file = next(iter(resource.get("files", [])), None) + if not first_file: + continue + first_filepath = os.path.normpath(first_file) + metadata = files_metadata[first_filepath] + if metadata["color_space"] == "Raw": + # set color space to raw if we linearized it color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space From fc65721838a90111c9137b45f062d1f51ad06c08 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 20:52:47 +0800 Subject: [PATCH 0346/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 991f44c74f..02957bb0ad 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -442,7 +442,8 @@ class ExtractLook(openpype.api.Extractor): # the resource metadata = files_metadata.get(source) if not metadata: - first_file = next(iter(resource.get("files", [])), None) + first_file = next(iter(resource.get( + "files", [])), None) if not first_file: continue first_filepath = os.path.normpath(first_file) From f5578cf664321d4c2488c2ac46dbb893f8822cf0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 20:57:18 +0800 Subject: [PATCH 0347/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 02957bb0ad..68d80de5b8 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all the resolved files in Maya File Path Editor + # get all the resolved files metadata = files_metadata.get(source) if metadata: metadata = files_metadata[source] From 0c72b8e278d3e0ac2af5b69ff09b115908a4b632 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 15:31:56 +0200 Subject: [PATCH 0348/2550] :recycle: refactor compatibility check --- igniter/bootstrap_repos.py | 143 +++++++++++++++++++------------------ igniter/install_thread.py | 19 ++++- openpype/version.py | 2 +- start.py | 24 +++---- 4 files changed, 104 insertions(+), 84 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 73ef8283a7..3a2dbe81c4 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -411,16 +411,7 @@ class OpenPypeVersion(semver.VersionInfo): # DEPRECATED: backwards compatible way to look for versions in root dir_to_search = Path(user_data_dir("openpype", "pypeclub")) - versions = OpenPypeVersion.get_versions_from_directory( - dir_to_search, compatible_with=compatible_with - ) - if compatible_with: - dir_to_search = Path( - user_data_dir("openpype", "pypeclub")) / f"{compatible_with.major}.{compatible_with.minor}" # noqa - versions += OpenPypeVersion.get_versions_from_directory( - dir_to_search, compatible_with=compatible_with - ) - + versions = OpenPypeVersion.get_versions_from_directory(dir_to_search) filtered_versions = [] for version in versions: @@ -498,14 +489,11 @@ class OpenPypeVersion(semver.VersionInfo): @staticmethod def get_versions_from_directory( - openpype_dir: Path, - compatible_with: OpenPypeVersion = None) -> List: + openpype_dir: Path) -> List: """Get all detected OpenPype versions in directory. Args: openpype_dir (Path): Directory to scan. - compatible_with (OpenPypeVersion): Return only versions compatible - with build version specified as OpenPypeVersion. Returns: list of OpenPypeVersion @@ -515,17 +503,27 @@ class OpenPypeVersion(semver.VersionInfo): """ installed_version = OpenPypeVersion.get_installed_version() - if not compatible_with: - compatible_with = installed_version - _openpype_versions = [] + openpype_versions = [] if not openpype_dir.exists() and not openpype_dir.is_dir(): - return _openpype_versions + return openpype_versions # iterate over directory in first level and find all that might # contain OpenPype. for item in openpype_dir.iterdir(): - - # if file, strip extension, in case of dir not. + # if the item is directory with major.minor version, dive deeper + try: + ver_dir = item.name.split(".")[ + 0] == installed_version.major and \ + item.name.split(".")[ + 1] == installed_version.minor # noqa: E051 + if item.is_dir() and ver_dir: + _versions = OpenPypeVersion.get_versions_from_directory( + item) + if _versions: + openpype_versions.append(_versions) + except IndexError: + pass + # if file exists, strip extension, in case of dir don't. name = item.name if item.is_dir() else item.stem result = OpenPypeVersion.version_in_str(name) @@ -543,13 +541,10 @@ class OpenPypeVersion(semver.VersionInfo): )[0]: continue - if not detected_version.is_compatible(compatible_with): - continue - detected_version.path = item - _openpype_versions.append(detected_version) + openpype_versions.append(detected_version) - return sorted(_openpype_versions) + return sorted(openpype_versions) @staticmethod def get_installed_version_str() -> str: @@ -577,15 +572,14 @@ class OpenPypeVersion(semver.VersionInfo): def get_latest_version( staging: bool = False, local: bool = None, - remote: bool = None, - compatible_with: OpenPypeVersion = None + remote: bool = None ) -> Union[OpenPypeVersion, None]: - """Get latest available version. + """Get the latest available version. The version does not contain information about path and source. - This is utility version to get latest version from all found. Build - version is not listed if staging is enabled. + This is utility version to get the latest version from all found. + Build version is not listed if staging is enabled. Arguments 'local' and 'remote' define if local and remote repository versions are used. All versions are used if both are not set (or set @@ -597,8 +591,9 @@ class OpenPypeVersion(semver.VersionInfo): staging (bool, optional): List staging versions if True. local (bool, optional): List local versions if True. remote (bool, optional): List remote versions if True. - compatible_with (OpenPypeVersion, optional) Return only version - compatible with compatible_with. + + Returns: + Latest OpenPypeVersion or None """ if local is None and remote is None: @@ -612,8 +607,6 @@ class OpenPypeVersion(semver.VersionInfo): remote = True installed_version = OpenPypeVersion.get_installed_version() - if not compatible_with: - compatible_with = installed_version local_versions = [] remote_versions = [] if local: @@ -633,10 +626,7 @@ class OpenPypeVersion(semver.VersionInfo): all_versions.sort() latest_version: OpenPypeVersion - latest_version = all_versions[-1] - if not latest_version.is_compatible(compatible_with): - return None - return latest_version + return all_versions[-1] @classmethod def get_expected_studio_version(cls, staging=False, global_settings=None): @@ -1191,13 +1181,27 @@ class BootstrapRepos: @staticmethod def find_latest_openpype_version( - staging, compatible_with: OpenPypeVersion = None): + staging: bool, + compatible_with: OpenPypeVersion = None + ) -> Union[OpenPypeVersion, None]: + """Find the latest available OpenPype version in all location. + + Args: + staging (bool): True to look for staging versions. + compatible_with (OpenPypeVersion, optional): If set, it will + try to find the latest version compatible with the + one specified. + + Returns: + Latest OpenPype version on None if nothing was found. + + """ installed_version = OpenPypeVersion.get_installed_version() local_versions = OpenPypeVersion.get_local_versions( - staging=staging, compatible_with=compatible_with + staging=staging ) remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging, compatible_with=compatible_with + staging=staging ) all_versions = local_versions + remote_versions if not staging: @@ -1206,6 +1210,12 @@ class BootstrapRepos: if not all_versions: return None + if compatible_with: + all_versions = [ + version for version in all_versions + if version.is_compatible(installed_version) + ] + all_versions.sort() latest_version = all_versions[-1] if latest_version == installed_version: @@ -1222,8 +1232,7 @@ class BootstrapRepos: self, openpype_path: Union[Path, str] = None, staging: bool = False, - include_zips: bool = False, - compatible_with: OpenPypeVersion = None + include_zips: bool = False ) -> Union[List[OpenPypeVersion], None]: """Get ordered dict of detected OpenPype version. @@ -1256,36 +1265,29 @@ class BootstrapRepos: """ installed_version = OpenPypeVersion.get_installed_version() - if not compatible_with: - compatible_with = installed_version if openpype_path and not isinstance(openpype_path, Path): raise NotImplementedError( ("Finding OpenPype in non-filesystem locations is" " not implemented yet.")) - version_dir = f"{compatible_with.major}.{compatible_with.minor}" - # if checks bellow for OPENPYPE_PATH and registry fails, use data_dir # DEPRECATED: lookup in root of this folder is deprecated in favour # of major.minor sub-folders. - dirs_to_search = [self.data_dir, self.data_dir / version_dir] + dirs_to_search = [self.data_dir] if openpype_path: - dirs_to_search = [openpype_path, openpype_path / version_dir] + dirs_to_search = [openpype_path] elif os.getenv("OPENPYPE_PATH") \ and Path(os.getenv("OPENPYPE_PATH")).exists(): # first try OPENPYPE_PATH and if that is not available, # try registry. - dirs_to_search = [Path(os.getenv("OPENPYPE_PATH")), - Path(os.getenv("OPENPYPE_PATH")) / version_dir] + dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))] else: try: registry_dir = Path( str(self.registry.get_item("openPypePath"))) if registry_dir.exists(): - dirs_to_search = [ - registry_dir, registry_dir / version_dir - ] + dirs_to_search = [registry_dir] except ValueError: # nothing found in registry, we'll use data dir @@ -1295,7 +1297,7 @@ class BootstrapRepos: for dir_to_search in dirs_to_search: try: openpype_versions += self.get_openpype_versions( - dir_to_search, staging, compatible_with=compatible_with) + dir_to_search, staging) except ValueError: # location is invalid, skip it pass @@ -1663,15 +1665,12 @@ class BootstrapRepos: def get_openpype_versions( self, openpype_dir: Path, - staging: bool = False, - compatible_with: OpenPypeVersion = None) -> list: + staging: bool = False) -> list: """Get all detected OpenPype versions in directory. Args: openpype_dir (Path): Directory to scan. staging (bool, optional): Find staging versions if True. - compatible_with (OpenPypeVersion, optional): Get only versions - compatible with the one specified. Returns: list of OpenPypeVersion @@ -1681,17 +1680,24 @@ class BootstrapRepos: """ installed_version = OpenPypeVersion.get_installed_version() - if not compatible_with: - compatible_with = installed_version if not openpype_dir.exists() and not openpype_dir.is_dir(): raise ValueError(f"specified directory {openpype_dir} is invalid") - _openpype_versions = [] + openpype_versions = [] # iterate over directory in first level and find all that might # contain OpenPype. for item in openpype_dir.iterdir(): - - # if file, strip extension, in case of dir not. + # if the item is directory with major.minor version, dive deeper + try: + ver_dir = item.name.split(".")[0] == installed_version.major and item.name.split(".")[1] == installed_version.minor # noqa: E051 + if item.is_dir() and ver_dir: + _versions = self.get_openpype_versions( + item, staging=staging) + if _versions: + openpype_versions.append(_versions) + except IndexError: + pass + # if it is file, strip extension, in case of dir don't. name = item.name if item.is_dir() else item.stem result = OpenPypeVersion.version_in_str(name) @@ -1709,17 +1715,14 @@ class BootstrapRepos: ): continue - if not detected_version.is_compatible(compatible_with): - continue - detected_version.path = item if staging and detected_version.is_staging(): - _openpype_versions.append(detected_version) + openpype_versions.append(detected_version) if not staging and not detected_version.is_staging(): - _openpype_versions.append(detected_version) + openpype_versions.append(detected_version) - return sorted(_openpype_versions) + return sorted(openpype_versions) class OpenPypeVersionExists(Exception): diff --git a/igniter/install_thread.py b/igniter/install_thread.py index 8e31f8cb8f..0cccf664e7 100644 --- a/igniter/install_thread.py +++ b/igniter/install_thread.py @@ -62,7 +62,7 @@ class InstallThread(QThread): progress_callback=self.set_progress, message=self.message) local_version = OpenPypeVersion.get_installed_version_str() - # if user did entered nothing, we install OpenPype from local version. + # if user did enter nothing, we install OpenPype from local version. # zip content of `repos`, copy it to user data dir and append # version to it. if not self._path: @@ -93,6 +93,23 @@ class InstallThread(QThread): detected = bs.find_openpype(include_zips=True) if detected: + if not OpenPypeVersion.get_installed_version().is_compatible( + detected[-1]): + self.message.emit(( + f"Latest detected version {detected[-1]} " + "is not compatible with the currently running " + f"{local_version}" + ), True) + self.message.emit(( + "Filtering detected versions to compatible ones..." + ), False) + + detected = [ + version for version in detected + if version.is_compatible( + OpenPypeVersion.get_installed_version()) + ] + if OpenPypeVersion( version=local_version, path=Path()) < detected[-1]: self.message.emit(( diff --git a/openpype/version.py b/openpype/version.py index c41e69d00d..d85f9f60ed 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- -"""Package declaring Pype version.""" +"""Package declaring OpenPype version.""" __version__ = "3.13.1-nightly.1" diff --git a/start.py b/start.py index c7bced20bd..52e98bb6e1 100644 --- a/start.py +++ b/start.py @@ -699,8 +699,7 @@ def _find_frozen_openpype(use_version: str = None, # Version says to use latest version _print(">>> Finding latest version defined by use version") openpype_version = bootstrap.find_latest_openpype_version( - use_staging, compatible_with=installed_version - ) + use_staging) else: _print(f">>> Finding specified version \"{use_version}\"") openpype_version = bootstrap.find_openpype_version( @@ -712,18 +711,11 @@ def _find_frozen_openpype(use_version: str = None, f"Requested version \"{use_version}\" was not found." ) - if not openpype_version.is_compatible(installed_version): - raise OpenPypeVersionIncompatible(( - f"Requested version \"{use_version}\" is not compatible " - f"with installed version \"{installed_version}\"" - )) - elif studio_version is not None: # Studio has defined a version to use _print(f">>> Finding studio version \"{studio_version}\"") openpype_version = bootstrap.find_openpype_version( - studio_version, use_staging, compatible_with=installed_version - ) + studio_version, use_staging) if openpype_version is None: raise OpenPypeVersionNotFound(( "Requested OpenPype version " @@ -737,8 +729,8 @@ def _find_frozen_openpype(use_version: str = None, ">>> Finding latest version compatible " f"with [ {installed_version} ]")) openpype_version = bootstrap.find_latest_openpype_version( - use_staging, compatible_with=installed_version - ) + use_staging, compatible_with=installed_version) + if openpype_version is None: if use_staging: reason = "Didn't find any staging versions." @@ -756,6 +748,14 @@ def _find_frozen_openpype(use_version: str = None, _initialize_environment(openpype_version) return version_path + if not installed_version.is_compatible(openpype_version): + raise OpenPypeVersionIncompatible( + ( + f"Latest version found {openpype_version} is not " + f"compatible with currently running {installed_version}" + ) + ) + # test if latest detected is installed (in user data dir) is_inside = False try: From ffea3e85fee6058fd3fc38982d228c51f463645c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:34:30 +0800 Subject: [PATCH 0349/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- .../maya/plugins/publish/extract_look.py | 25 +++++++------------ 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 68d80de5b8..5ece5e2e1b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -431,24 +431,17 @@ class ExtractLook(openpype.api.Extractor): else: # get all the resolved files metadata = files_metadata.get(source) - if metadata: - metadata = files_metadata[source] - if metadata["color_space"] == "Raw": - # set color space to raw if we linearized it - color_space = "Raw" - else: - # if the files are unresolved from `source` - # assume color space from the first file of - # the resource - metadata = files_metadata.get(source) - if not metadata: - first_file = next(iter(resource.get( - "files", [])), None) - if not first_file: - continue + # if the files are unresolved from `source` + # assume color space from the first file of + # the resource + if not metadata: + first_file = next(iter(resource.get( + "files", [])), None) + if not first_file: + continue first_filepath = os.path.normpath(first_file) metadata = files_metadata[first_filepath] - if metadata["color_space"] == "Raw": + if metadata["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" # Remap file node filename to destination From 9b01e6e0326b4750c043da207adc2b8495a8ebce Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:36:40 +0800 Subject: [PATCH 0350/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 5ece5e2e1b..63a695cecf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -442,8 +442,8 @@ class ExtractLook(openpype.api.Extractor): first_filepath = os.path.normpath(first_file) metadata = files_metadata[first_filepath] if metadata["color_space"] == "Raw": - # set color space to raw if we linearized it - color_space = "Raw" + # set color space to raw if we linearized it + color_space = "Raw" # Remap file node filename to destination remap[color_space_attr] = color_space attr = resource["attribute"] From a9cee020b5f2044af533c06323c697162821624f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:38:45 +0800 Subject: [PATCH 0351/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 63a695cecf..95f319a924 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -431,7 +431,7 @@ class ExtractLook(openpype.api.Extractor): else: # get all the resolved files metadata = files_metadata.get(source) - # if the files are unresolved from `source` + # if the files are unresolved from `source` # assume color space from the first file of # the resource if not metadata: From c1d3d704106638e1d28ef338a958496790578c40 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 15:40:17 +0200 Subject: [PATCH 0352/2550] :rotating_light: fix hound :dog: --- igniter/bootstrap_repos.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 3a2dbe81c4..6a04198fc9 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1264,7 +1264,6 @@ class BootstrapRepos: ok install it as normal version. """ - installed_version = OpenPypeVersion.get_installed_version() if openpype_path and not isinstance(openpype_path, Path): raise NotImplementedError( ("Finding OpenPype in non-filesystem locations is" @@ -1689,7 +1688,7 @@ class BootstrapRepos: for item in openpype_dir.iterdir(): # if the item is directory with major.minor version, dive deeper try: - ver_dir = item.name.split(".")[0] == installed_version.major and item.name.split(".")[1] == installed_version.minor # noqa: E051 + ver_dir = item.name.split(".")[0] == installed_version.major and item.name.split(".")[1] == installed_version.minor # noqa: E501 if item.is_dir() and ver_dir: _versions = self.get_openpype_versions( item, staging=staging) From 85575e3a99f5618304fc41f5e73a117fe66abc0b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:40:40 +0800 Subject: [PATCH 0353/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 95f319a924..c9e41503da 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,7 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all the resolved files + # get all resolved files metadata = files_metadata.get(source) # if the files are unresolved from `source` # assume color space from the first file of From f9f275f6a0555c5e1250b6f2b19aa606ce2fb6e3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:47:08 +0800 Subject: [PATCH 0354/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index c9e41503da..93bfa8c913 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,6 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - # get all resolved files metadata = files_metadata.get(source) # if the files are unresolved from `source` # assume color space from the first file of From cd64ffb8f8a85b30edb4e7c01fb2d90d33bd77ba Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 12 Aug 2022 21:51:45 +0800 Subject: [PATCH 0355/2550] fix the bug of failing to extract look when UDIM formats used in AiImage --- openpype/hosts/maya/plugins/publish/extract_look.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 93bfa8c913..8be0c7aae5 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,9 +429,10 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: + # get the resolved files metadata = files_metadata.get(source) - # if the files are unresolved from `source` - # assume color space from the first file of + # if the files are unresolved from `source` + # assume color space from the first file of # the resource if not metadata: first_file = next(iter(resource.get( From ca424baf73db7d1df54d3faabacc032a9362b2c5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 16:39:20 +0200 Subject: [PATCH 0356/2550] Scene Inventory: Maya add actions to select from or to scene --- .../plugins/inventory/select_containers.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 openpype/hosts/maya/plugins/inventory/select_containers.py diff --git a/openpype/hosts/maya/plugins/inventory/select_containers.py b/openpype/hosts/maya/plugins/inventory/select_containers.py new file mode 100644 index 0000000000..4b7c92729f --- /dev/null +++ b/openpype/hosts/maya/plugins/inventory/select_containers.py @@ -0,0 +1,46 @@ +from maya import cmds + +from openpype.pipeline import InventoryAction, registered_host +from openpype.hosts.maya.api.lib import get_container_members + + +class SelectInScene(InventoryAction): + """Select nodes in the scene from selected containers in scene inventory""" + + label = "Select In Scene" + icon = "search" + color = "#888888" + order = 99 + + def process(self, containers): + + all_members = [] + for container in containers: + members = get_container_members(container) + all_members.extend(members) + cmds.select(all_members, replace=True, noExpand=True) + + +class SelectFromScene(InventoryAction): + """Select containers in scene inventory from the current scene selection""" + + label = "Select From Scene" + icon = "search" + color = "#888888" + order = 100 + + def process(self, containers): + + selection = set(cmds.ls(selection=True, long=True, objectsOnly=True)) + host = registered_host() + + to_select = [] + for container in host.ls(): + members = get_container_members(container) + if any(member in selection for member in members): + to_select.append(container["objectName"]) + + return { + "objectNames": to_select, + "options": {"clear": True} + } From 7176723aa5f8710ca422d9fd40577a6b85bc7b81 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 17:26:46 +0200 Subject: [PATCH 0357/2550] :bug: fix arguments and recursive folders --- igniter/bootstrap_repos.py | 44 +++++++++++++------------------------- 1 file changed, 15 insertions(+), 29 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 6a04198fc9..01d7c4bb7e 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -425,7 +425,7 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_remote_versions( cls, production: bool = None, - staging: bool = None, compatible_with: OpenPypeVersion = None + staging: bool = None ) -> List: """Get all versions available in OpenPype Path. @@ -470,13 +470,7 @@ class OpenPypeVersion(semver.VersionInfo): if not dir_to_search: return [] - # DEPRECATED: look for version in root directory - versions = cls.get_versions_from_directory( - dir_to_search, compatible_with=compatible_with) - if compatible_with: - dir_to_search = dir_to_search / f"{compatible_with.major}.{compatible_with.minor}" # noqa - versions += cls.get_versions_from_directory( - dir_to_search, compatible_with=compatible_with) + versions = cls.get_versions_from_directory(dir_to_search) filtered_versions = [] for version in versions: @@ -511,18 +505,13 @@ class OpenPypeVersion(semver.VersionInfo): # contain OpenPype. for item in openpype_dir.iterdir(): # if the item is directory with major.minor version, dive deeper - try: - ver_dir = item.name.split(".")[ - 0] == installed_version.major and \ - item.name.split(".")[ - 1] == installed_version.minor # noqa: E051 - if item.is_dir() and ver_dir: - _versions = OpenPypeVersion.get_versions_from_directory( - item) - if _versions: - openpype_versions.append(_versions) - except IndexError: - pass + + if item.is_dir() and re.match(r"^\d+\.\d+$", item.name): + _versions = OpenPypeVersion.get_versions_from_directory( + item) + if _versions: + openpype_versions += _versions + # if file exists, strip extension, in case of dir don't. name = item.name if item.is_dir() else item.stem result = OpenPypeVersion.version_in_str(name) @@ -1687,15 +1676,12 @@ class BootstrapRepos: # contain OpenPype. for item in openpype_dir.iterdir(): # if the item is directory with major.minor version, dive deeper - try: - ver_dir = item.name.split(".")[0] == installed_version.major and item.name.split(".")[1] == installed_version.minor # noqa: E501 - if item.is_dir() and ver_dir: - _versions = self.get_openpype_versions( - item, staging=staging) - if _versions: - openpype_versions.append(_versions) - except IndexError: - pass + if item.is_dir() and re.match(r"^\d+\.\d+$", item.name): + _versions = self.get_openpype_versions( + item, staging=staging) + if _versions: + openpype_versions += _versions + # if it is file, strip extension, in case of dir don't. name = item.name if item.is_dir() else item.stem result = OpenPypeVersion.version_in_str(name) From 7676827644c385991e35687b303032dd491e9361 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 17:27:47 +0200 Subject: [PATCH 0358/2550] initial commit of settings dev --- website/docs/dev_settings.md | 890 +++++++++++++++++++++++++++++++++++ website/sidebars.js | 1 + 2 files changed, 891 insertions(+) create mode 100644 website/docs/dev_settings.md diff --git a/website/docs/dev_settings.md b/website/docs/dev_settings.md new file mode 100644 index 0000000000..483bd18535 --- /dev/null +++ b/website/docs/dev_settings.md @@ -0,0 +1,890 @@ +--- +id: dev_settings +title: Settings +sidebar_label: Settings +--- + +Settings gives ability to change how OpenPype behaves in certain situations. Settings are split into 3 categories **system settings**, **project anatomy** and **project settings**. Project anatomy and project settings are in grouped into single category but there is a technical difference (explained later). Only difference in system and project settings is that system settings can't be technically handled on a project level or their values must be available no matter in which project are values received. Settings have headless entities or settings UI. + +There is one more category **local settings** but they don't have ability to be changed or defined easily. Local settings can change how settings work per machine, can affect both system and project settings but they're hardcoded for predefined values at this moment. + +## Settings schemas +System and project settings are defined by settings schemas. Schema define structure of output value, what value types output will contain, how settings are stored and how it's UI input will look. + +## Settings values +Output of settings is a json serializable value. There are 3 possible types of value **default values**, **studio overrides** and **project overrides**. Default values must be always available for all settings schemas, their values are stored to code. Default values is what everyone who just installed OpenPype will use as default values. It is good practice to set example values but they should be relevant. + +Setting overrides is what makes settings powerful tool. Overrides contain only a part of settings with additional metadata which describe which parts of settings values that should be replaced from overrides values. Using overrides gives ability to save only specific values and use default values for rest. It is super useful in project settings which have up to 2 levels of overrides. In project settings are used **default values** as base on which are applied **studio overrides** and then **project overrides**. In practice it is possible to save only studio overrides which affect all projects. Changes in studio overrides are then propagated to all projects without project overrides. But values can be locked on project level so studio overrides are not used. + +## Settings storage +As was mentined default values are stored into repository files. Overrides are stored to Mongo database. The value in mongo contain only overrides with metadata so their content on it's own is useless and must be used with combination of default values. System settings and project settings are stored into special collection. Single document represents one set of overrides with OpenPype version for which is stored. Settings are versioned and are loaded in specific order - current OpenPype version overrides or first lower available. If there are any overrides with same or lower version then first higher version is used. If there are any overrides then no overrides are applied. + +Project anatomy is stored into project document thus is not versioned and it's values are always overriden. Any changes in anatomy schema may have drastic effect on production and OpenPype updates. + +## Settings schema items +As was mentioned schema items define output type of values, how they are stored and how they look in UI. +- schemas are (by default) defined by a json files +- OpenPype core system settings schemas are stored in `~/openpype/settings/entities/schemas/system_schema/` and project settings in `~/openpype/settings/entities/schemas/projects_schema/` + - both contain `schema_main.json` which are entry points +- OpenPype modules/addons can define their settings schemas using `BaseModuleSettingsDef` in that case some functionality may be slightly modified +- single schema item is represented by dictionary (object) in json which has `"type"` key. + - **type** is only common key which is required for all schema items +- each item may have "input modifiers" (other keys in dictionary) and they may be required or optional based on the type +- there are special keys across all items + - `"is_file"` - this key is used when defaults values are stored which define that this key is a filename where it's values are stored + - key is validated must be once in hierarchy else it won't be possible to store default values + - make sense to fill it only if it's value if `true` + - `"is_group"` - define that all values under a key in settings hierarchy will be overridden if any value is modified + - this key is not allowed for all inputs as they may not have technical ability to handle it + - key is validated can be only once in hierarchy and is automatically filled on last possible item if is not defined in schemas + - make sense to fill it only if it's value if `true` +- all entities can have set `"tooltip"` key with description which will be shown in UI on hover + +### Inner schema +Settings schemas are big json files which would became unmanageable if would be in single file. To be able to split them into multiple files to help organize them special types `schema` and `template` were added. Both types are relating to a different file by filename. If json file contains dictionary it is considered as `schema` if contains list it is considered as `template`. + +#### schema +Schema item is replaced by content of entered schema name. It is recommended that schema file is used only once in settings hierarchy. Templates are meant for reusing. +- schema must have `"name"` key which is name of schema that should be used + +```javascript +{ + "type": "schema", + "name": "my_schema_name" +} +``` + +#### template +Templates are almost the same as schema items but can contain one or more items which can be formatted with additional data or some keys can be skipped if needed. Templates are meant for reusing the same schemas with ability to modify content. + +- legacy name is `schema_template` (still usable) +- template must have `"name"` key which is name of template file that should be used +- to fill formatting keys use `"template_data"` +- all items in template, except `__default_values__`, will replace `template` item in original schema +- template may contain other templates + +```javascript +// Example template json file content +[ + { + // Define default values for formatting values + // - gives ability to set the value but have default value + "__default_values__": { + "multipath_executables": true + } + }, { + "type": "raw-json", + "label": "{host_label} Environments", + "key": "{host_name}_environments" + }, { + "type": "path", + "key": "{host_name}_executables", + "label": "{host_label} - Full paths to executables", + "multiplatform": "{multipath_executables}", + "multipath": true + } +] +``` +```javascript +// Example usage of the template in schema +{ + "type": "dict", + "key": "template_examples", + "label": "Schema template examples", + "children": [ + { + "type": "template", + "name": "example_template", + "template_data": [ + { + "host_label": "Maya 2019", + "host_name": "maya_2019", + "multipath_executables": false + }, + { + "host_label": "Maya 2020", + "host_name": "maya_2020" + }, + { + "host_label": "Maya 2021", + "host_name": "maya_2021" + } + ] + } + ] +} +``` +```javascript +// The same schema defined without templates +{ + "type": "dict", + "key": "template_examples", + "label": "Schema template examples", + "children": [ + { + "type": "raw-json", + "label": "Maya 2019 Environments", + "key": "maya_2019_environments" + }, { + "type": "path", + "key": "maya_2019_executables", + "label": "Maya 2019 - Full paths to executables", + "multiplatform": false, + "multipath": true + }, { + "type": "raw-json", + "label": "Maya 2020 Environments", + "key": "maya_2020_environments" + }, { + "type": "path", + "key": "maya_2020_executables", + "label": "Maya 2020 - Full paths to executables", + "multiplatform": true, + "multipath": true + }, { + "type": "raw-json", + "label": "Maya 2021 Environments", + "key": "maya_2021_environments" + }, { + "type": "path", + "key": "maya_2021_executables", + "label": "Maya 2021 - Full paths to executables", + "multiplatform": true, + "multipath": true + } + ] +} +``` + +Template data can be used only to fill templates in values but not in keys. It is also possible to define default values for unfilled fields to do so one of items in list must be dictionary with key `"__default_values__"` and value as dictionary with default key: values (as in example above). +```javascript +{ + ... + // Allowed + "key": "{to_fill}" + ... + // Not allowed + "{to_fill}": "value" + ... +} +``` + +Because formatting value can be only string it is possible to use formatting values which are replaced with different type. +```javascript +// Template data +{ + "template_data": { + "executable_multiplatform": { + "type": "schema", + "name": "my_multiplatform_schema" + } + } +} +// Template content +{ + ... + // Allowed - value is replaced with dictionary + "multiplatform": "{executable_multiplatform}" + ... + // Not allowed - there is no way how it could be replaced + "multiplatform": "{executable_multiplatform}_enhanced_string" + ... +} +``` + +#### dynamic_schema +Dynamic schema item marks a place in settings schema where schemas defined by `BaseModuleSettingsDef` can be placed. +- example: +``` +{ + "type": "dynamic_schema", + "name": "project_settings/global" +} +``` +- `BaseModuleSettingsDef` with implemented `get_settings_schemas` can return a dictionary where key define a dynamic schema name and value schemas that will be put there +- dynamic schemas work almost the same way as templates + - one item can be replaced by multiple items (or by 0 items) +- goal is to dynamically load settings of OpenPype modules without having their schemas or default values in core repository + - values of these schemas are saved using the `BaseModuleSettingsDef` methods +- we recommend to use `JsonFilesSettingsDef` which has full implementation of storing default values to json files + - requires only to implement method `get_settings_root_path` which should return path to root directory where settings schema can be found and default values will be saved + +### Basic Dictionary inputs +These inputs wraps another inputs into {key: value} relation + +#### dict +- this is dictionary type wrapping more inputs with keys defined in schema +- may be used as dynamic children (e.g. in `list` or `dict-modifiable`) + - in that case the only key modifier is `children` which is list of it's keys + - USAGE: e.g. List of dictionaries where each dictionary have same structure. +- if is not used as dynamic children then must have defined `"key"` under which are it's values stored +- may be with or without `"label"` (only for GUI) + - `"label"` must be set to be able mark item as group with `"is_group"` key set to True +- item with label can visually wrap it's children + - this option is enabled by default to turn off set `"use_label_wrap"` to `False` + - label wrap is by default collapsible + - that can be set with key `"collapsible"` to `True`/`False` + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + - it is possible to add lighter background with `"highlight_content"` (Default: `False`) + - lighter background has limits of maximum applies after 3-4 nested highlighted items there is not much difference in the color + - output is dictionary `{the "key": children values}` +``` +# Example +{ + "key": "applications", + "type": "dict", + "label": "Applications", + "collapsible": true, + "highlight_content": true, + "is_group": true, + "is_file": true, + "children": [ + ...ITEMS... + ] +} + +# Without label +{ + "type": "dict", + "key": "global", + "children": [ + ...ITEMS... + ] +} + +# When used as widget +{ + "type": "list", + "key": "profiles", + "label": "Profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, { + "key": "hosts", + "label": "Hosts", + "type": "list", + "object_type": "text" + } + ... + ] + } +} +``` + +#### dict-roots +- entity can be used only in Project settings +- keys of dictionary are based on current project roots +- they are not updated "live" it is required to save root changes and then + modify values on this entity + # TODO do live updates +``` +{ + "type": "dict-roots", + "key": "roots", + "label": "Roots", + "object_type": { + "type": "path", + "multiplatform": true, + "multipath": false + } +} +``` + +#### dict-conditional +- is similar to `dict` but has always available one enum entity + - the enum entity has single selection and it's value define other children entities +- each value of enumerator have defined children that will be used + - there is no way how to have shared entities across multiple enum items +- value from enumerator is also stored next to other values + - to define the key under which will be enum value stored use `enum_key` + - `enum_key` must match key regex and any enum item can't have children with same key + - `enum_label` is label of the entity for UI purposes +- enum items are define with `enum_children` + - it's a list where each item represents single item for the enum + - all items in `enum_children` must have at least `key` key which represents value stored under `enum_key` + - enum items can define `label` for UI purposes + - most important part is that item can define `children` key where are definitions of it's children (`children` value works the same way as in `dict`) +- to set default value for `enum_key` set it with `enum_default` +- entity must have defined `"label"` if is not used as widget +- is set as group if any parent is not group (can't have children as group) +- may be with or without `"label"` (only for GUI) + - `"label"` must be set to be able mark item as group with `"is_group"` key set to True +- item with label can visually wrap it's children + - this option is enabled by default to turn off set `"use_label_wrap"` to `False` + - label wrap is by default collapsible + - that can be set with key `"collapsible"` to `True`/`False` + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + - it is possible to add lighter background with `"highlight_content"` (Default: `False`) + - lighter background has limits of maximum applies after 3-4 nested highlighted items there is not much difference in the color +- for UI porposes was added `enum_is_horizontal` which will make combobox appear next to children inputs instead of on top of them (Default: `False`) + - this has extended ability of `enum_on_right` which will move combobox to right side next to children widgets (Default: `False`) +- output is dictionary `{the "key": children values}` +- using this type as template item for list type can be used to create infinite hierarchies + +``` +# Example +{ + "type": "dict-conditional", + "key": "my_key", + "label": "My Key", + "enum_key": "type", + "enum_label": "label", + "enum_children": [ + # Each item must be a dictionary with 'key' + { + "key": "action", + "label": "Action", + "children": [ + { + "type": "text", + "key": "key", + "label": "Key" + }, + { + "type": "text", + "key": "label", + "label": "Label" + }, + { + "type": "text", + "key": "command", + "label": "Comand" + } + ] + }, + { + "key": "menu", + "label": "Menu", + "children": [ + { + "key": "children", + "label": "Children", + "type": "list", + "object_type": "text" + } + ] + }, + { + # Separator does not have children as "separator" value is enough + "key": "separator", + "label": "Separator" + } + ] +} +``` + +How output of the schema could look like on save: +``` +{ + "type": "separator" +} + +{ + "type": "action", + "key": "action_1", + "label": "Action 1", + "command": "run command -arg" +} + +{ + "type": "menu", + "children": [ + "child_1", + "child_2" + ] +} +``` + +### Inputs for setting any kind of value (`Pure` inputs) +- all inputs must have defined `"key"` if are not used as dynamic item + - they can also have defined `"label"` + +#### boolean +- simple checkbox, nothing more to set +``` +{ + "type": "boolean", + "key": "my_boolean_key", + "label": "Do you want to use Pype?" +} +``` + +#### number +- number input, can be used for both integer and float + - key `"decimal"` defines how many decimal places will be used, 0 is for integer input (Default: `0`) + - key `"minimum"` as minimum allowed number to enter (Default: `-99999`) + - key `"maxium"` as maximum allowed number to enter (Default: `99999`) +- key `"steps"` will change single step value of UI inputs (using arrows and wheel scroll) +- for UI it is possible to show slider to enable this option set `show_slider` to `true` +``` +{ + "type": "number", + "key": "fps", + "label": "Frame rate (FPS)" + "decimal": 2, + "minimum": 1, + "maximum": 300000 +} +``` + +``` +{ + "type": "number", + "key": "ratio", + "label": "Ratio" + "decimal": 3, + "minimum": 0, + "maximum": 1, + "show_slider": true +} +``` + +#### text +- simple text input + - key `"multiline"` allows to enter multiple lines of text (Default: `False`) + - key `"placeholder"` allows to show text inside input when is empty (Default: `None`) + +``` +{ + "type": "text", + "key": "deadline_pool", + "label": "Deadline pool" +} +``` + +#### path-input +- Do not use this input in schema please (use `path` instead) +- this input is implemented to add additional features to text input +- this is meant to be used in proxy input `path` + +#### raw-json +- a little bit enhanced text input for raw json +- can store dictionary (`{}`) or list (`[]`) but not both + - by default stores dictionary to change it to list set `is_list` to `True` +- has validations of json format +- output can be stored as string + - this is to allow any keys in dictionary + - set key `store_as_string` to `true` + - code using that setting must expected that value is string and use json module to convert it to python types + +``` +{ + "type": "raw-json", + "key": "profiles", + "label": "Extract Review profiles", + "is_list": true +} +``` + +#### enum +- enumeration of values that are predefined in schema +- multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`) +- values are defined under value of key `"enum_items"` as list + - each item in list is simple dictionary where value is label and key is value which will be stored + - should be possible to enter single dictionary if order of items doesn't matter +- it is possible to set default selected value/s with `default` attribute + - it is recommended to use this option only in single selection mode + - at the end this option is used only when defying default settings value or in dynamic items + +``` +{ + "key": "tags", + "label": "Tags", + "type": "enum", + "multiselection": true, + "enum_items": [ + {"burnin": "Add burnins"}, + {"ftrackreview": "Add to Ftrack"}, + {"delete": "Delete output"}, + {"slate-frame": "Add slate frame"}, + {"no-handles": "Skip handle frames"} + ] +} +``` + +#### anatomy-templates-enum +- enumeration of all available anatomy template keys +- have only single selection mode +- it is possible to define default value `default` + - `"work"` is used if default value is not specified +- enum values are not updated on the fly it is required to save templates and + reset settings to recache values +``` +{ + "key": "host", + "label": "Host name", + "type": "anatomy-templates-enum", + "default": "publish" +} +``` + +#### hosts-enum +- enumeration of available hosts +- multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`) +- it is possible to add empty value (represented with empty string) with setting `"use_empty_value"` to `True` (Default: `False`) +- it is possible to set `"custom_labels"` for host names where key `""` is empty value (Default: `{}`) +- to filter host names it is required to define `"hosts_filter"` which is list of host names that will be available + - do not pass empty string if `use_empty_value` is enabled + - ignoring host names would be more dangerous in some cases +``` +{ + "key": "host", + "label": "Host name", + "type": "hosts-enum", + "multiselection": false, + "use_empty_value": true, + "custom_labels": { + "": "N/A", + "nuke": "Nuke" + }, + "hosts_filter": [ + "nuke" + ] +} +``` + +#### apps-enum +- enumeration of available application and their variants from system settings + - applications without host name are excluded +- can be used only in project settings +- has only `multiselection` +- used only in project anatomy +``` +{ + "type": "apps-enum", + "key": "applications", + "label": "Applications" +} +``` + +#### tools-enum +- enumeration of available tools and their variants from system settings +- can be used only in project settings +- has only `multiselection` +- used only in project anatomy +``` +{ + "type": "tools-enum", + "key": "tools_env", + "label": "Tools" +} +``` + +#### task-types-enum +- enumeration of task types from current project +- enum values are not updated on the fly and modifications of task types on project require save and reset to be propagated to this enum +- has set `multiselection` to `True` but can be changed to `False` in schema + +#### deadline_url-enum +- deadline module specific enumerator using deadline system settings to fill it's values +- TODO: move this type to deadline module + +### Inputs for setting value using Pure inputs +- these inputs also have required `"key"` +- attribute `"label"` is required in few conditions + - when item is marked `as_group` or when `use_label_wrap` +- they use Pure inputs "as widgets" + +#### list +- output is list +- items can be added and removed +- items in list must be the same type +- to wrap item in collapsible widget with label on top set `use_label_wrap` to `True` + - when this is used `collapsible` and `collapsed` can be set (same as `dict` item does) +- type of items is defined with key `"object_type"` +- there are 2 possible ways how to set the type: + 1.) dictionary with item modifiers (`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` (example below) + 2.) item type name as string without modifiers (e.g. `text`) + 3.) enhancement of 1.) there is also support of `template` type but be carefull about endless loop of templates + - goal of using `template` is to easily change same item definitions in multiple lists + +1.) with item modifiers +``` +{ + "type": "list", + "key": "exclude_ports", + "label": "Exclude ports", + "object_type": { + "type": "number", # number item type + "minimum": 1, # minimum modifier + "maximum": 65535 # maximum modifier + } +} +``` + +2.) without modifiers +``` +{ + "type": "list", + "key": "exclude_ports", + "label": "Exclude ports", + "object_type": "text" +} +``` + +3.) with template definition +``` +# Schema of list item where template is used +{ + "type": "list", + "key": "menu_items", + "label": "Menu Items", + "object_type": { + "type": "template", + "name": "template_object_example" + } +} + +# WARNING: +# In this example the template use itself inside which will work in `list` +# but may cause an issue in other entity types (e.g. `dict`). + +'template_object_example.json' : +[ + { + "type": "dict-conditional", + "use_label_wrap": true, + "collapsible": true, + "key": "menu_items", + "label": "Menu items", + "enum_key": "type", + "enum_label": "Type", + "enum_children": [ + { + "key": "action", + "label": "Action", + "children": [ + { + "type": "text", + "key": "key", + "label": "Key" + } + ] + }, { + "key": "menu", + "label": "Menu", + "children": [ + { + "key": "children", + "label": "Children", + "type": "list", + "object_type": { + "type": "template", + "name": "template_object_example" + } + } + ] + } + ] + } +] +``` + +#### dict-modifiable +- one of dictionary inputs, this is only used as value input +- items in this input can be removed and added same way as in `list` input +- value items in dictionary must be the same type +- type of items is defined with key `"object_type"` +- required keys may be defined under `"required_keys"` + - required keys must be defined as a list (e.g. `["key_1"]`) and are moved to the top + - these keys can't be removed or edited (it is possible to edit label if item is collapsible) +- there are 2 possible ways how to set the type: + 1.) dictionary with item modifiers (`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` (example below) + 2.) item type name as string without modifiers (e.g. `text`) +- this input can be collapsible + - that can be set with key `"collapsible"` as `True`/`False` (Default: `True`) + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + +1.) with item modifiers +``` +{ + "type": "dict-modifiable", + "object_type": { + "type": "number", + "minimum": 0, + "maximum": 300 + }, + "is_group": true, + "key": "templates_mapping", + "label": "Muster - Templates mapping", + "is_file": true +} +``` + +2.) without modifiers +``` +{ + "type": "dict-modifiable", + "object_type": "text", + "is_group": true, + "key": "templates_mapping", + "label": "Muster - Templates mapping", + "is_file": true +} +``` + +#### path +- input for paths, use `path-input` internally +- has 2 input modifiers `"multiplatform"` and `"multipath"` + - `"multiplatform"` - adds `"windows"`, `"linux"` and `"darwin"` path inputs result is dictionary + - `"multipath"` - it is possible to enter multiple paths + - if both are enabled result is dictionary with lists + +``` +{ + "type": "path", + "key": "ffmpeg_path", + "label": "FFmpeg path", + "multiplatform": true, + "multipath": true +} +``` + +#### list-strict +- input for strict number of items in list +- each child item can be different type with different possible modifiers +- it is possible to display them in horizontal or vertical layout + - key `"horizontal"` as `True`/`False` (Default: `True`) +- each child may have defined `"label"` which is shown next to input + - label does not reflect modifications or overrides (TODO) +- children item are defined under key `"object_types"` which is list of dictionaries + - key `"children"` is not used because is used for hierarchy validations in schema +- USAGE: For colors, transformations, etc. Custom number and different modifiers + give ability to define if color is HUE or RGB, 0-255, 0-1, 0-100 etc. + +``` +{ + "type": "list-strict", + "key": "color", + "label": "Color", + "object_types": [ + { + "label": "Red", + "type": "number", + "minimum": 0, + "maximum": 255, + "decimal": 0 + }, { + "label": "Green", + "type": "number", + "minimum": 0, + "maximum": 255, + "decimal": 0 + }, { + "label": "Blue", + "type": "number", + "minimum": 0, + "maximum": 255, + "decimal": 0 + }, { + "label": "Alpha", + "type": "number", + "minimum": 0, + "maximum": 1, + "decimal": 6 + } + ] +} +``` + +#### color +- preimplemented entity to store and load color values +- entity store and expect list of 4 integers in range 0-255 + - integers represents rgba [Red, Green, Blue, Alpha] + +``` +{ + "type": "color", + "key": "bg_color", + "label": "Background Color" +} +``` + +### Noninteractive items +Items used only for UI purposes. + +#### label +- add label with note or explanations +- it is possible to use html tags inside the label +- set `work_wrap` to `true`/`false` if you want to enable word wrapping in UI (default: `false`) + +``` +{ + "type": "label", + "label": "RED LABEL: Normal label" +} +``` + +#### separator +- legacy name is `splitter` (still usable) +- visual separator of items (more divider than separator) + +``` +{ + "type": "separator" +} +``` + +### Anatomy +Anatomy represents data stored on project document. + +#### anatomy +- entity works similarly to `dict` +- anatomy has always all keys overridden with overrides + - overrides are not applied as all anatomy data must be available from project document + - all children must be groups + +### Proxy wrappers +- should wraps multiple inputs only visually +- these does not have `"key"` key and do not allow to have `"is_file"` or `"is_group"` modifiers enabled +- can't be used as widget (first item in e.g. `list`, `dict-modifiable`, etc.) + +#### form +- wraps inputs into form look layout +- should be used only for Pure inputs + +``` +{ + "type": "dict-form", + "children": [ + { + "type": "text", + "key": "deadline_department", + "label": "Deadline apartment" + }, { + "type": "number", + "key": "deadline_priority", + "label": "Deadline priority" + }, { + ... + } + ] +} +``` + + +#### collapsible-wrap +- wraps inputs into collapsible widget + - looks like `dict` but does not hold `"key"` +- should be used only for Pure inputs + +``` +{ + "type": "collapsible-wrap", + "label": "Collapsible example" + "children": [ + { + "type": "text", + "key": "_example_input_collapsible", + "label": "Example input in collapsible wrapper" + }, { + ... + } + ] +} diff --git a/website/sidebars.js b/website/sidebars.js index 9d60a5811c..b7b44bbada 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -152,6 +152,7 @@ module.exports = { "dev_build", "dev_testing", "dev_contribute", + "dev_settings", { type: "category", label: "Hosts integrations", From 60ea9728f63afa2c0ec2c32bd619fec8e64993ec Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 17:43:02 +0200 Subject: [PATCH 0359/2550] :rotating_light: fix hound :dog: --- igniter/bootstrap_repos.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 01d7c4bb7e..3dab67ebf1 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -496,7 +496,6 @@ class OpenPypeVersion(semver.VersionInfo): ValueError: if invalid path is specified. """ - installed_version = OpenPypeVersion.get_installed_version() openpype_versions = [] if not openpype_dir.exists() and not openpype_dir.is_dir(): return openpype_versions @@ -1667,7 +1666,6 @@ class BootstrapRepos: ValueError: if invalid path is specified. """ - installed_version = OpenPypeVersion.get_installed_version() if not openpype_dir.exists() and not openpype_dir.is_dir(): raise ValueError(f"specified directory {openpype_dir} is invalid") From c3b69e86d4a64d63865dbdb55a06b3a1f7a67c38 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 17:52:33 +0200 Subject: [PATCH 0360/2550] small tweaks --- website/docs/dev_settings.md | 64 ++++++++++++++++++++---------------- 1 file changed, 35 insertions(+), 29 deletions(-) diff --git a/website/docs/dev_settings.md b/website/docs/dev_settings.md index 483bd18535..cb16ae76ca 100644 --- a/website/docs/dev_settings.md +++ b/website/docs/dev_settings.md @@ -214,7 +214,7 @@ These inputs wraps another inputs into {key: value} relation #### dict - this is dictionary type wrapping more inputs with keys defined in schema -- may be used as dynamic children (e.g. in `list` or `dict-modifiable`) +- may be used as dynamic children (e.g. in [list](#list) or [dict-modifiable](#dict-modifiable)) - in that case the only key modifier is `children` which is list of it's keys - USAGE: e.g. List of dictionaries where each dictionary have same structure. - if is not used as dynamic children then must have defined `"key"` under which are it's values stored @@ -600,7 +600,7 @@ How output of the schema could look like on save: - type of items is defined with key `"object_type"` - there are 2 possible ways how to set the type: 1.) dictionary with item modifiers (`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` (example below) - 2.) item type name as string without modifiers (e.g. `text`) + 2.) item type name as string without modifiers (e.g. [text](#text)) 3.) enhancement of 1.) there is also support of `template` type but be carefull about endless loop of templates - goal of using `template` is to easily change same item definitions in multiple lists @@ -690,18 +690,31 @@ How output of the schema could look like on save: - one of dictionary inputs, this is only used as value input - items in this input can be removed and added same way as in `list` input - value items in dictionary must be the same type -- type of items is defined with key `"object_type"` - required keys may be defined under `"required_keys"` - required keys must be defined as a list (e.g. `["key_1"]`) and are moved to the top - these keys can't be removed or edited (it is possible to edit label if item is collapsible) -- there are 2 possible ways how to set the type: - 1.) dictionary with item modifiers (`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` (example below) - 2.) item type name as string without modifiers (e.g. `text`) +- type of items is defined with key `"object_type"` + - there are 2 possible ways how to set the object type (Examples below): + 1. just a type name as string without modifiers (e.g. `"text"`) + 2. full types with modifiers as dictionary(`number` input has `minimum`, `maximum` and `decimals`) in that case item type must be set as value of `"type"` - this input can be collapsible + - `"use_label_wrap"` must be set to `True` (Default behavior) - that can be set with key `"collapsible"` as `True`/`False` (Default: `True`) - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) -1.) with item modifiers +1. **Object type** without modifiers +``` +{ + "type": "dict-modifiable", + "object_type": "text", + "is_group": true, + "key": "templates_mapping", + "label": "Muster - Templates mapping", + "is_file": true +} +``` + +2. **Object type** with item modifiers ``` { "type": "dict-modifiable", @@ -717,22 +730,10 @@ How output of the schema could look like on save: } ``` -2.) without modifiers -``` -{ - "type": "dict-modifiable", - "object_type": "text", - "is_group": true, - "key": "templates_mapping", - "label": "Muster - Templates mapping", - "is_file": true -} -``` - #### path - input for paths, use `path-input` internally - has 2 input modifiers `"multiplatform"` and `"multipath"` - - `"multiplatform"` - adds `"windows"`, `"linux"` and `"darwin"` path inputs result is dictionary + - `"multiplatform"` - adds `"windows"`, `"linux"` and `"darwin"` path inputs (result is dictionary) - `"multipath"` - it is possible to enter multiple paths - if both are enabled result is dictionary with lists @@ -797,6 +798,8 @@ How output of the schema could look like on save: - preimplemented entity to store and load color values - entity store and expect list of 4 integers in range 0-255 - integers represents rgba [Red, Green, Blue, Alpha] +- has modifier `"use_alpha"` which can be `True`/`False` + - alpha is always `255` if set to `True` and alpha slider is not visible in UI ``` { @@ -806,6 +809,13 @@ How output of the schema could look like on save: } ``` +### Anatomy +Anatomy represents data stored on project document. Item cares about **Project Anatomy**. + +#### anatomy +- entity is just enhanced [dict](#dict) item +- anatomy has always all keys overridden with overrides + ### Noninteractive items Items used only for UI purposes. @@ -831,15 +841,6 @@ Items used only for UI purposes. } ``` -### Anatomy -Anatomy represents data stored on project document. - -#### anatomy -- entity works similarly to `dict` -- anatomy has always all keys overridden with overrides - - overrides are not applied as all anatomy data must be available from project document - - all children must be groups - ### Proxy wrappers - should wraps multiple inputs only visually - these does not have `"key"` key and do not allow to have `"is_file"` or `"is_group"` modifiers enabled @@ -888,3 +889,8 @@ Anatomy represents data stored on project document. } ] } +``` + + +## How to add new settings +Always start with modifying or adding new schema and don't worry about values. When you think schema is ready to use launch OpenPype settings in development mode using `poetry run python ./start.py settings --dev` or prepared script in `~/openpype/tools/run_settings(.sh|.ps1)`. Settings opened in development mode have checkbox `Modify defaults` available in bottom left corner. When checked default values are modified and saved on `Save`. This is recommended approach how default settings should be created instead of direct modification of files. From 8b94d746e5595caa42bafae6184663683fd8e4f4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 18:00:26 +0200 Subject: [PATCH 0361/2550] show outdated build dialog when expected version can't be used with current build --- openpype/tools/tray/pype_tray.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 4e5db06a92..2f3e1bcab3 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -10,19 +10,19 @@ from Qt import QtCore, QtGui, QtWidgets import openpype.version from openpype.api import ( - Logger, resources, get_system_settings ) -from openpype.lib import ( - get_openpype_execute_args, +from openpype.lib import get_openpype_execute_args, Logger +from openpype.lib.openpype_version import ( op_version_control_available, + get_expected_version, + get_installed_version, is_current_version_studio_latest, is_current_version_higher_than_expected, is_running_from_build, is_running_staging, - get_expected_version, - get_openpype_version + get_openpype_version, ) from openpype.modules import TrayModulesManager from openpype import style @@ -329,6 +329,21 @@ class TrayManager: self._version_dialog.close() return + installed_version = get_installed_version() + expected_version = get_expected_version() + + # Request new build if is needed + if not expected_version.is_compatible(installed_version): + if ( + self._version_dialog is not None + and self._version_dialog.isVisible() + ): + self._version_dialog.close() + + dialog = BuildVersionDialog() + dialog.exec_() + return + if self._version_dialog is None: self._version_dialog = VersionUpdateDialog() self._version_dialog.restart_requested.connect( @@ -338,7 +353,6 @@ class TrayManager: self._outdated_version_ignored ) - expected_version = get_expected_version() current_version = get_openpype_version() current_is_higher = is_current_version_higher_than_expected() From ad64c3a66e10c2c34ecd4fe3549f636ce5777959 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 18:02:08 +0200 Subject: [PATCH 0362/2550] added backwards compatibility for 'is_compatible' method --- openpype/tools/tray/pype_tray.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 2f3e1bcab3..85bc00ead6 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -333,7 +333,11 @@ class TrayManager: expected_version = get_expected_version() # Request new build if is needed - if not expected_version.is_compatible(installed_version): + if ( + # Backwards compatibility + not hasattr(expected_version, "is_compatible") + or not expected_version.is_compatible(installed_version) + ): if ( self._version_dialog is not None and self._version_dialog.isVisible() From a3ee45edd906aae36e09f1e40cf815d2c4f9605e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 18:14:23 +0200 Subject: [PATCH 0363/2550] Refactor host.ls() to host.get_containers() Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/maya/plugins/inventory/select_containers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/inventory/select_containers.py b/openpype/hosts/maya/plugins/inventory/select_containers.py index 4b7c92729f..13c2322bc0 100644 --- a/openpype/hosts/maya/plugins/inventory/select_containers.py +++ b/openpype/hosts/maya/plugins/inventory/select_containers.py @@ -35,7 +35,7 @@ class SelectFromScene(InventoryAction): host = registered_host() to_select = [] - for container in host.ls(): + for container in host.get_containers(): members = get_container_members(container) if any(member in selection for member in members): to_select.append(container["objectName"]) From 1469c471d15c5556e4f0b2f5d06f1e07dcc74724 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 18:23:12 +0200 Subject: [PATCH 0364/2550] added javascript notation --- website/docs/dev_settings.md | 74 ++++++++++++++++++------------------ 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/website/docs/dev_settings.md b/website/docs/dev_settings.md index cb16ae76ca..e5917c7549 100644 --- a/website/docs/dev_settings.md +++ b/website/docs/dev_settings.md @@ -195,7 +195,7 @@ Because formatting value can be only string it is possible to use formatting val #### dynamic_schema Dynamic schema item marks a place in settings schema where schemas defined by `BaseModuleSettingsDef` can be placed. - example: -``` +```javascript { "type": "dynamic_schema", "name": "project_settings/global" @@ -228,8 +228,8 @@ These inputs wraps another inputs into {key: value} relation - it is possible to add lighter background with `"highlight_content"` (Default: `False`) - lighter background has limits of maximum applies after 3-4 nested highlighted items there is not much difference in the color - output is dictionary `{the "key": children values}` -``` -# Example +```javascript +// Example { "key": "applications", "type": "dict", @@ -243,7 +243,7 @@ These inputs wraps another inputs into {key: value} relation ] } -# Without label +// Without label { "type": "dict", "key": "global", @@ -252,7 +252,7 @@ These inputs wraps another inputs into {key: value} relation ] } -# When used as widget +// When used as widget { "type": "list", "key": "profiles", @@ -283,7 +283,7 @@ These inputs wraps another inputs into {key: value} relation - they are not updated "live" it is required to save root changes and then modify values on this entity # TODO do live updates -``` +```javascript { "type": "dict-roots", "key": "roots", @@ -327,8 +327,8 @@ These inputs wraps another inputs into {key: value} relation - output is dictionary `{the "key": children values}` - using this type as template item for list type can be used to create infinite hierarchies -``` -# Example +```javascript +// Example { "type": "dict-conditional", "key": "my_key", @@ -336,7 +336,7 @@ These inputs wraps another inputs into {key: value} relation "enum_key": "type", "enum_label": "label", "enum_children": [ - # Each item must be a dictionary with 'key' + // Each item must be a dictionary with 'key' { "key": "action", "label": "Action", @@ -371,7 +371,7 @@ These inputs wraps another inputs into {key: value} relation ] }, { - # Separator does not have children as "separator" value is enough + // Separator does not have children as "separator" value is enough "key": "separator", "label": "Separator" } @@ -380,7 +380,7 @@ These inputs wraps another inputs into {key: value} relation ``` How output of the schema could look like on save: -``` +```javascript { "type": "separator" } @@ -407,7 +407,7 @@ How output of the schema could look like on save: #### boolean - simple checkbox, nothing more to set -``` +```javascript { "type": "boolean", "key": "my_boolean_key", @@ -422,7 +422,7 @@ How output of the schema could look like on save: - key `"maxium"` as maximum allowed number to enter (Default: `99999`) - key `"steps"` will change single step value of UI inputs (using arrows and wheel scroll) - for UI it is possible to show slider to enable this option set `show_slider` to `true` -``` +```javascript { "type": "number", "key": "fps", @@ -433,7 +433,7 @@ How output of the schema could look like on save: } ``` -``` +```javascript { "type": "number", "key": "ratio", @@ -450,7 +450,7 @@ How output of the schema could look like on save: - key `"multiline"` allows to enter multiple lines of text (Default: `False`) - key `"placeholder"` allows to show text inside input when is empty (Default: `None`) -``` +```javascript { "type": "text", "key": "deadline_pool", @@ -473,7 +473,7 @@ How output of the schema could look like on save: - set key `store_as_string` to `true` - code using that setting must expected that value is string and use json module to convert it to python types -``` +```javascript { "type": "raw-json", "key": "profiles", @@ -492,7 +492,7 @@ How output of the schema could look like on save: - it is recommended to use this option only in single selection mode - at the end this option is used only when defying default settings value or in dynamic items -``` +```javascript { "key": "tags", "label": "Tags", @@ -515,7 +515,7 @@ How output of the schema could look like on save: - `"work"` is used if default value is not specified - enum values are not updated on the fly it is required to save templates and reset settings to recache values -``` +```javascript { "key": "host", "label": "Host name", @@ -532,7 +532,7 @@ How output of the schema could look like on save: - to filter host names it is required to define `"hosts_filter"` which is list of host names that will be available - do not pass empty string if `use_empty_value` is enabled - ignoring host names would be more dangerous in some cases -``` +```javascript { "key": "host", "label": "Host name", @@ -555,7 +555,7 @@ How output of the schema could look like on save: - can be used only in project settings - has only `multiselection` - used only in project anatomy -``` +```javascript { "type": "apps-enum", "key": "applications", @@ -568,7 +568,7 @@ How output of the schema could look like on save: - can be used only in project settings - has only `multiselection` - used only in project anatomy -``` +```javascript { "type": "tools-enum", "key": "tools_env", @@ -605,7 +605,7 @@ How output of the schema could look like on save: - goal of using `template` is to easily change same item definitions in multiple lists 1.) with item modifiers -``` +```javascript { "type": "list", "key": "exclude_ports", @@ -619,7 +619,7 @@ How output of the schema could look like on save: ``` 2.) without modifiers -``` +```javascript { "type": "list", "key": "exclude_ports", @@ -629,8 +629,8 @@ How output of the schema could look like on save: ``` 3.) with template definition -``` -# Schema of list item where template is used +```javascript +// Schema of list item where template is used { "type": "list", "key": "menu_items", @@ -641,9 +641,9 @@ How output of the schema could look like on save: } } -# WARNING: -# In this example the template use itself inside which will work in `list` -# but may cause an issue in other entity types (e.g. `dict`). +// WARNING: +// In this example the template use itself inside which will work in `list` +// but may cause an issue in other entity types (e.g. `dict`). 'template_object_example.json' : [ @@ -703,7 +703,7 @@ How output of the schema could look like on save: - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) 1. **Object type** without modifiers -``` +```javascript { "type": "dict-modifiable", "object_type": "text", @@ -715,7 +715,7 @@ How output of the schema could look like on save: ``` 2. **Object type** with item modifiers -``` +```javascript { "type": "dict-modifiable", "object_type": { @@ -737,7 +737,7 @@ How output of the schema could look like on save: - `"multipath"` - it is possible to enter multiple paths - if both are enabled result is dictionary with lists -``` +```javascript { "type": "path", "key": "ffmpeg_path", @@ -759,7 +759,7 @@ How output of the schema could look like on save: - USAGE: For colors, transformations, etc. Custom number and different modifiers give ability to define if color is HUE or RGB, 0-255, 0-1, 0-100 etc. -``` +```javascript { "type": "list-strict", "key": "color", @@ -801,7 +801,7 @@ How output of the schema could look like on save: - has modifier `"use_alpha"` which can be `True`/`False` - alpha is always `255` if set to `True` and alpha slider is not visible in UI -``` +```javascript { "type": "color", "key": "bg_color", @@ -824,7 +824,7 @@ Items used only for UI purposes. - it is possible to use html tags inside the label - set `work_wrap` to `true`/`false` if you want to enable word wrapping in UI (default: `false`) -``` +```javascript { "type": "label", "label": "RED LABEL: Normal label" @@ -835,7 +835,7 @@ Items used only for UI purposes. - legacy name is `splitter` (still usable) - visual separator of items (more divider than separator) -``` +```javascript { "type": "separator" } @@ -850,7 +850,7 @@ Items used only for UI purposes. - wraps inputs into form look layout - should be used only for Pure inputs -``` +```javascript { "type": "dict-form", "children": [ @@ -875,7 +875,7 @@ Items used only for UI purposes. - looks like `dict` but does not hold `"key"` - should be used only for Pure inputs -``` +```javascript { "type": "collapsible-wrap", "label": "Collapsible example" From cbff5972a2b1c6aa7ebd9b19d519f122c955fe61 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Aug 2022 18:23:24 +0200 Subject: [PATCH 0365/2550] added screenshot of settings UI in dev mode --- website/docs/assets/settings_dev.png | Bin 0 -> 15237 bytes website/docs/dev_settings.md | 2 ++ 2 files changed, 2 insertions(+) create mode 100644 website/docs/assets/settings_dev.png diff --git a/website/docs/assets/settings_dev.png b/website/docs/assets/settings_dev.png new file mode 100644 index 0000000000000000000000000000000000000000..4d0359461e570163925bda6b3a2f5dd1c87c3e7b GIT binary patch literal 15237 zcmeHucT|&Un|BrgbC8of_j8x)zOLW(y9&`aER02W zAK488fkaKNUAYMY?VJIDe!BPTPrx@Oyc{Fo*N%{z#+N|lJu(#F!_QuZ=7u0pMZzBL z?Onj<--50=gn&R|EyAB2?a;S(K_J~DCRYrt@4HgR5s@w)*(&_xd~>>aQi|)%>+l`c z=MMVsdtIiKBDE|3(}yK%-5!wG`L9{~WDgrA@b$!YFXrz3nq@e3`m46Lv55Guwx3`A zRCAE|ZTFMsyIdZU2|o=2eIq%>QxcjTYNl#Bm(3e-&@mS$X`?6jH*X%X z-@GNjtcQyTxBeVowYuuE-D4HAdv~QIAe!u=`NC8ejxUQ|GR|H|L6XCVsiQ8<2rpiF zwq~7O64@yY1p%1k_ zp6Hl;sRS)2HL4+YfIy+yk1ltL>71IuIW>g(Xm%V>sa;8`-cMXvnK8CLXoTPM#&uVV zDM!%v>KKZQB0he^)M>IbOw8~crvKWrx$KH6i*#R|wHnA> z(|`^3xRov|2{^bVv+N7Tz&7QaLojW?N8|+a{t=bW5RVN_?~8@QL&L*|28P?E@UYw5 z)AE{ANsHOsf;|dxvYOgGOFs4qN$d>vIBAdA?;?{iDU(b`c5(zY53F5~^l3GMa?!0( zv0=Jb=ZsvtCFjkOLcO@7VAKa&n?;-zYIE+h)8r zlHeJAvu#`NLJO1}v4xUD#%;SyZmKAks$3T%SHJ|dJZ62`gyM4cUrgK1P3$Ccg*VL2 zS8c{{Dd;$NrYu#_TE4#{hfg2YV?Ex^P9(Y^=Ib{sbWh6Wa48VCw0ICG_wjlXy!`^a zG&xOfuw4~T&`#}(*@MRUkc;Farbmra14wqMF%*1~0rRxZJMTZ?k!sjp!N-tc=ZqW} zgs_#jsD#!q8LYp_H~@om*QBW@k0jAvT91Y&>aNmCB@39>BiAbn4U$X0j(^5^2Td0Y z>%ngL5J(@_ep6`x!)Z^gW=J&=vs>yUr&{(y>7FEnvP&QpFtTqU>Zh3T9t?X$ReWxF z`CEf!EBi?Wi{)L7%bVEICU?7lgA&Z^a!*Vo@OAIMa|y@$XT=BXa{|}L6tNzUr32&m zH4clN2Zk~0j}r=IFMy*1yD!i}w3>o_0#91>iP+901@j{0@mq8G#VA1@DG8zn7ib9d z%cgK^N;f4ZQvJF*y1GSVB0G_5QP(&Dv_=(ZU5eJN zEO-n7HY{4eY#+KS(uXQz6dH%s7%{_;yO%i!hnPyGD-wf4g9{hn^tVS59d60ldT-uB zbqsE}pMv(MjrBIJxP{!-8Lm&1VX;fPi0shSHymkK_=Jnr{GHV&sjdeKlDF)`?GsT% zOvV=JToHI}X?;~902Oo9Bq1V;lIr%#bUEr`orVZ^kl zK;zDiXn*tYZ)-&r!vz(blZZFnj_&|%8f2E2(4B4!W8WLZQ}BCyS=5pQY}D>I6Gpp= z#HF}{Ns}6lq1NGy9f^lhKOccv^#7u^Hc)DVx7;5k?VMzj<{owSf*0<^f^6utIZUaU zNgH-+<_ygPLj3^q9(FpR#E8pjjLRwF1>5~BIKp#?{4JNV3zTczNjHYBqI{q?p*56( zS{Twrpk1h7!ux7vY`}e&KH10^-%!?}f|b+mbMmbUU%k2;ReN+eCfL$_w>cdA=JR#f zKoH%_eGjLj$Rd^oop{zSwq|6=c&tm?GJKWVPu@5L85@6}C6hhCO<3JfkM1RY<0pnv z=7w(Gvqe+OOP%CF4={66h^x9O15Q5hGo>X;<)(Mw>!;7HI-Sq0g9#>#cyI}Vea*&3 zjNJrorfl>w_u%X&&N6zx%PT+WGpDa@>V75Hes7A#o~W5~UcB!3j_{q3AFW5d;Ddg7 zk#IWgzJPb2s+N(p+0^!=h>)nHRogEU8ALN6-k<)k_S)tCBxI&5c}@m>2&wJ|kNBgp zTOC!!h3o}=*mvD(=woG3Lh>1_!Z|O7b;iJKQDubm!2#T>TDl{zA01FUMl7kz&9#%6 z@0Gg&CvOpFm|IhdXf8YDO$NR|0}UIk+c14v(2jG+x(U^%)>usV=^0S+QZ+S;WRh{( zH?foY%vW}Ngo4&LKxN%)oXzP@&h}(ndHA2!WNdjkg!+Kcu^Cgb6`T(X%uEH$ju@ok zBWXI?pXuoPiP0*nrB3YuTuT%&cz{29z=GfMdfqB(uRin|N&Z!K5cB)<)#%ggv4}jc z_;Z{XRf$DW<*t+lvq156E7bSCP`A`6^?;WsOFkCy6X-#UPd%UzpruQ<)yn}N67lam z*Bu?e2^*VRQ73OKljzW?T#jH)aM^oY26sQ=BIH-Lz8dA!k+ zCq~HUoP$@66;5wiwdE*fffFJb8`~DY~1c=AA!NE+tXn6kCF|;NK)Q}o@ z2yp9D_m39t1pOKGHE;{=1^2XHp^M-LQU|TPUP32dtIBu5UULX>}Vl5vH(cn`qbFNPDy*-#G*o+?t4x zV-LKe7zV6y3v}dK-sj9h*_FLE^X`# zc|%7ocOqQ7Z2R|fOaz-ES_1WS=dGF?y$CH@6r)pRFX6T;7AZSzlDc^9jwz?t6NQRmx9=i0| z99oogo8IU-Lr*%3Nc=MUD}Va-9(l7ES^|Mb@Z*Qxfm`RP@fn@(paN~!hsQBd=q;>) z`X3JZE9-uq{&00x@zxs83$wk5X3rO%$ZaQXh7JRKQdcIhC^ZM=Ga3s{)0{<@V>Y_0 zk@JJQVusC-=ziAvM;64XYOCYKU2w!kY_qdEHM2~!Bo^#r7D|)s4@`Li=6o9PuO~Lm z=4qiRwR86<-#=f7SgkRZK09N%-8gY^wZ=%`klf?y_`S=#2tOMenwY*-PSjL!eIe(@ z%$)R>W7A-9u~nQ_D@~L@aUsGM7rEjMV?RfUu+-7s6Ax=8)wZa_q z`%MFXduvT*v&X&W*Igd1zqRDW**Mez$m2f$yDD$a3es`4Li?35wEX(z?E`zzstfs$ zo-}sM6Ce8483nC{bvwIqV1)WmyDP%8FXM7}n@pHZrlzR0j$UT|>qz2LuC8&?6d_^_ ztkKFlw`Lb`U-yXSAHzIAkK22W-C>WE1iMfiimo%jnb)VlydIW?4s3KLM{7(?X9KM+ zv$?vMhwU-HzKWgTjC-|5u0Fhf(kD5Bhp88JQLS&M%71l@C@tks>6(}e`T<%HrxCYp za6h ztr{L=2ENpNhrRCrfJ{pV1Dc*x8?JL=QJWvKG<>{FS+Jla0Ifsu@f!Hhbc@nLFa7gz z-AdVp|F8${XGWeO0P?w3)m1`&pw0dpsQn{^`QZqPGO57YApHq;aZRz%9sC3GkN*<~ zE$neGh6V@4l%(~%zddlp!#zwb)r0;N_zyJ5>cTvLC~4`fK4OzLfroM8T{xT6-js$< zz-&9(7;ip3IslPb8yc-?3?E$WCrq7}r@e$I=euK`ib?n3pv}j7mnybSti5}RIsb=2KN z{z1xl(B1{WTN};1cXe>ksP+%igr0wY_nP5tg~i3Kzt()JOfWa@us=NHg;R?=P5~ne z!s(?_=X0T&J~y15v*L7Y=rabS(~#4u__ua1U4eM*rwXWmz%@6&nccq<{QgbKS;lLj z%$If;Y)6Ap!F-mqyjme%#YNuf-W?t9yc^+gDR4P=J{6s5o>4{I11RkTrKou&E_tQW zr01Ds)br8HUDpNMak*CF{k8ET{Bhm#ZZQdB(d5>* zrii$NmR4Ft2(9iOT=N&ugV|0AG-Dcu-*`32r`42{^ZeaA^yIh;IBenUrjD5ge!W8F zYr2|N#8?$Ak!%q1Ovb>I@zy^yG;-ykh!f(jF@SD%OOx115NfMv!|nk}|gfR?~On;d@cjYn=cT$GG**ubs|_hKWk7c@Gvp&B8Bj7g%nm zr67hB<@o53kSF zthN{dBhHD#T1i!^G0a2|r7yt!SF?1F&=m3Iueaw%Oo;WdVxT`C95Uc^(IZ*lN|W*c z-t#U48F4XhCva)4CSLuy8au#CrT%0Wjiub3l5j=}IQ#i*9>#y@KyvN6X>H(_2<8g7Z=IR*1k4`A` zlf=FF2^8edSQ*Aaa%R>okC))+qYYt*TKLy8tfTY|GYH_7>I4r^_~*|nBlb^nyz+*q zBlU{X068=s99S)TN2&^FFXwUMJICJy9K0i>-?LewQa~!{%D*M#{~m$&a0(OT?J#0P zwt*HR0GsWrEh;=EI|3*?=+cRQBK6<%Q~%FN|30|g&rC&k<9W$~D{?#CgCqlBSwA6- zuOg08lnxuK+cOh6#z<2=<^-&Lfm&WDZ+SE4{Ooc`q3zuyYm;}600OOUcfM3Fru(>Y ztcA#Idvej^n&eEPVczFh{-Pb~tZcP03y>oLAm23V@!U#&Y4Uu3J|?+i(fm(RS* z2<4IY47bicub2&noGec5h73uT#UjN7SRhpGz{t7uJVS&zB<<)?FJ=OomE9l&MTTBKTEk6@)289_sBsUE36i&VD( zljM%d3MbLBgT@KW8f|UQ`P<4ua(v$wDkK;$f|D5;I8VQfkt$#KdYa(N9uq3}&jX%c zOTAB*gBzMu7k~-Aj7ky{O5+LZ!}ZRt7br8;<0rAcgkX>UghU%+QhR}ydQDUHtuE=3 zL@SR$#+)iLl5J*M3G;7wc@PK=6is}UJEyY7~IE-=MvQ`LExlHG~^&phc zHxsX<=;}CRf5{EMCDjcOj8G9O`NckfkX+ZfefcVIYfFD)BYz7^|D6^5U19&Nb_-)z z35xgJvp!%2Aog7;87lsd?Luknk`Br_LdV{z{vu#LAY(EnBO`R`5Rkb3_2u94yZ?5D z|LS%^yV6`WBQ^)^y79LzX$G{_?J3VdFm2StLI837RfJUKxyf;DHJ6|DpGj@6-~O8*$yCR>)fG!|jL(=iU{4j`jOW@$kafyc7lS&)QO8c)7pt!^6i!Lmx_R=zC;4 zE%e(`zo}Od+JK$c-k-d=WQQ7kcbw&4LL+-!`M4=*8yJ_`X@Q`pA7@$Hu_$C1WRQ+( zR*q-^mUX)~UGLSjSh}TJ>mAmR_A^^?!8s@}utI?G{_?_Lu*41SZrTspSliPp*PtY& z)l?fw-cy3RQZU=A<8!S#z~(}HT;o)+cur1}9x7j?x)umeU2?+({LuIAi<@Gbh4O~x zyl$J4ytgy?&ieRJ?#M?=W@brZE9QB~W?RzrN|S@4m{6b5y;eH&+-T9R$BJhw6`yOy z%x(=d74y>4Q*)$p!akLH#+YlMfV;dY8%sf>=8GyV5y`Lo9coT>wE}i+`exZCLSuF8 z`oXe@g@;iD-iONx=g5bm)+=VOi`%15iLt2{rt?q4+o_;WL~L_C2G`oPeIl(CBWcwM zJp$%)e<0oBQgU1sKP#nQGt-@qF$i}i)g@hj6HsD%wZLK0{9tm*37<%2rq8X3CJnIx z%Z?YvL`t#O9-(5M?)5oczo=+xXL`ytWvpb>y3DQOs3a**Rc)4O+I!mE4u}Iv0Ea8@ za}`qRoIb}Of?urMk8lsE%^10v-MUbE@>It8a5JtKbEr(Id{O$siPVa&>qmB?3F^o2&yfu}W=*^5#Stw$7Z^WqN zM_)XH=*>>AF7|h-2{x^cU>1L_gwUEyBZ^sw>d=dS@Qtbp}RHSy!=Rqrhw4 zbuj4D92~HKpS}S$d^~w!!MnTj@#sgpb6=qPleMdT?I|Y=QC*2esf*WVDl~e}ef5Jo zRJ^Bb4aS5}S4Et*?~W!;++r3dlipe~*Naj{mv(_396h*KvG+Rh?{1*`?6}ZlRS7*- zDb|r}Gzf6w+_3+yef>yT{lgfo&X+d*AsZ*7sv|UH7jNXCqj2v3t4#A_nW867*V+>l zAtHW?T17hn$npN+D8?=URjXtBf0T3nN88B~=Dzat9_|z=dfeY{NV&y{AEz7zfVP{L zx6ymJfm)zdt1c}cUD;6uMop6r>>h5q>5!zf1N5QdW+NJ{Gg1+P;1p8?>r=Q&%l2f8 z`be4(KiKPPU6sk@YPUZFpVD5K1LdfL)Ij*f{(OV!Sd)|Wh8?Y}(3^j{R!qHyg1>)D zQmphLn;+IaB|Y!&%C?WhM5W3(n9BjVBm0l<*Pn-~2e&STp8r?uQwRK<(60Io5$0(c zFweGHk3Ewzy6$&6mfI#>-(SG#(H;RFzSNa#rXJ==o3&QS%!%nLuu^Wmf3upKZllnh z^=hTV=e`L|uW$K@iB7M)mO*l7kJS&~Y=#wk<1KW~vEt{1o}7^tefOSeV8L`*W_Hoq zT)mwUX%Wh|j%f!RkTmsOha+|v@Y+*K`Lbp25~L+XNXuH2_7!lOu!A;P1Em<(mw*TF z$d6I+jx4`>C#BC_!U~(_oDt`gn3L@8gj@KD~!^139kE2p!CPUsn2Lr77YNIV-75trI}SLNi2J8QtLt?l%sWhGt3pyz z+8#UWUT0Qro49cM=Hu$wH!^aH{Krh9A|~w}d|ox~)KJ71Wdp$U?HB+|vb{@jw_(f~ z=M9tf`=@-Qr1Zj_-RC2OtR@5I=nWxvTGj7oj+J!HwFx|4# z8ozeL)Nk6|^OUjNi()&z&Y3tZj$VnKY-K_LBewKb4(A1s2p|>#q&a*U;C3SUIRAyH zafy;({DQk;3bSTd`Q2}3jt!3t1af@GB_$Z>>$+OMWT(LxQ zv0U_~Q%X}>@XTYH3ex$_@ToVTEGIVu35c7WfkJBsv8KJTGr&$8{75bWK<7e|9 zf>G9?z+1noHkP>O5+%;f)9Im+KK^dEVi1{X7x?BMOO z5^RQk!MRMpD0}|~uCf0bdc^VPKSFui$vK^;sOqmHuNCa>&ele|vjOj3 ztAt=woD@L;(bm%E&4GdZ$H1}3F6IeIa7}>W#K!{+hiDk(w`@sDw{zC1$cJwq2S-*+ zug)sh`TAbYMha2G|1=g?H&L2_aeEC#-v%J;r{F7t4C}?gfsLyB{$IbAc$!`NrL1f* z{rvIfn8G`&b^gGuj5~|C!`n+JB56T46RS&7!RsF`Pokz?d)c-psm{*mO^m`#ZB=3u z(g7Zrd0Y~jU6A+-U<@f&1^zp7l=5whK zEMa+urHFwJ0}Yc58@6Fl{q;&K|JF+ujdscAUOan z!3UY^m89LfVYe4p4kGm5nSBQUK72@IJuy1fCE2=F;iLvbL(#69fWb0i$ErfG7}Znh zwsu;OHNdc3!rp7)H~byeTeeO9=vAjJ^^d~+00}qTTR9BKcVZML44s2sg?C%alY75n>ywG5y~;M)@*nzAtm?Cu}6qW|>G|B(;<-tnH|{Cp1BO`F3q?%-%~*+X|P>*XWdq6 z5Kfzbdvv+o-b1~^UJFDE617wW!7lfj?>FJ9BfHWiH;YLV*!4k%1K^+aRkOXvMZi^l z3?I@7Tl;F{Cj@J6$x<^7XM})x!~lHfvT%|viu~Al<}B$1M23jN@I~-2e_G>hkJ_Js znC6c{`Qc4bIsU@DTRGh(t2?uVKIlb$XT?h1E6*KOsRUSXe6)5UnKHj+b8)3%)L>_& zi6YzIIVldv3O#LQ%nmG6c|vu=d^LUAJ|vq)@tOr+fC9{qg}ou9v`&ZNyGgbO2ZrO| z%M<=U;jH3hp?tznZ$mlZEev?)%dW}Qs>cUu%1(Ai-33bqL}rsP360UvxNZ1 zDDZ6USm_nS`1e2mq~B<4P&N*<(kMn#+EY8loipPcwU#M}7@Vz#P|T0gP%;smCdjHD zB^DT;IB8(junMmF4Up^=GqCIG5#%VD2u?S5?DHEQvfM)xE)puM%DZm~oDry%y@ZUD z{T3I1UHv({kC?E>hTa^^dJ6+~^@_kH9&3)!0s0ia6|^!iP~$cS=^kDZu*w8^Lhc=~ zsiTMQzs^xTV}q52y%o6xz@^%ds{o1%ar$Sp(#U1 z5BGtN-nau7x88oZvMH)_EVKH~R>-96_XShoral0h>K=-gY)t?b;QP5r5tej?-+u?b z?<5$uoCOEDX!?x5-Nh0X2Opg%WSw3C2L%K)8v=(rj^f{H?ws@ju0r@kCbPGxj-5^K zJymwNj{W2SYK<P9q0zjdO2of*LPxQd!S4f<2-Xd&zOpRc4I=;;7vsh+XBoT`@yMA^S)rcF=>v(6KOhi-?%&Wts;)06>v`3(5B zEGKNsOn}{*PgI7_{q$oZ_d9UdO}7CA(kUF~5ac$^*Z;92 zAWpS7>*Z2a?#UtKQ z)pC`Zzk1I-;#2{7-tqLb3H{QXFe>3=OO=VD#a3IhxHIn(P}j-TJwR-HSVlKM18DN~ zRN0cu7?TnT6g4lKiMz~~`mtE@iRO-~k$o^W7I{)Q zmKRMiBwwIh9;9ag=BGNa@zTj;9*8ud-u^eL}7N+@A{Kt&3cS6C_W$Ivm`Dz7N>`)5eM3HuqDtBB zlh_>HngGYoBEqjIcIGs}zI87CSn5lBHra;QXdjUC@CnDae~n90x>eE_`0YVavX^ zF-2~=`CV16!?J2|KNfnKHqo{WXO$?P?Kx%cIANl%Ij03l1t3c(VDXcMuy>C>0WhWf z7TZ-AMFbG;B?PG^yq1t?ic2vekpAK&Y3oMi40~!4O!l)2)A@t?lMlZq}zixtTYs2yOaRQxRbQ-F@t;Mn>X^+d^xFiH!hz(Jp%R~A$(LgfhdtY zxH~S83&YriGMzO5Thh_(3V=CG~n%lKbJ z52uM~r-bKlCqz?`72)4+qY5nZK4oM-(IEfQv~IS>9_BPLGbf~NJ%s(LR{oRqd1nd} zgh|Pub5tc+{Bj>+5I(0tG>K_E<`iaWj9BPE!)4a&u4g5neN3E)KGZ(j*S6#3{lNf`{ ziL-zvE&%ZOZf3?kwa*G4!vTuL8Lm3*ZIZm{Zmse2`S~X@ae;neOe${Jobdm@flnXLI7JdrK0%cxIi{r1`ek6?8I z4yFC2k5&(2>GNezg0^byv8Izd*1s)MRDU#b9F7`)eBs{y8u`vUtbK=d7qKeQa?Kt+ zV;4jCKo`x>nX?9bt<)s?+dtMJ`fd8V2P$ zYEA|jVpitZ(%8bROo1$LM2{rJ*}-@RhWPgDruA*N^3JE(CBYpkwq7k_#fPmhaXM|^ z4?G=b3&sKTdjJ&FMMRIJmHh;gIcc(#k*OaT(Guv*Adr`=qF)XZ9UAHjOTGKaSE_kM434=)W+&Hef+@dAu zc(w~R`JI$m)xJ~3vscDEDw?u_aogX`&nu*_jIhk3<=iTj>3)MFxA%kBl|wViKhwA) zY5q$3+Jsya}Qe4wa>ciDqj*M021#*m(BE^48( z#?p1!bjD#nxjaY4oS^KWchpx-=g)FB-)nZyV~srB5{6?9Ki4m5T^vQIAfPrxk{b1r zlEK-D2GXX5XwLMQLRzrR#&Pmg5S{|%1)I#zg&ZLiy7y-9*B5kYkj`mRKa-UmU=a8q z^GSObwria>lECX;U0QPrUJ)F2(d0aU&M(uE;WVWL8`}j(D?Wb(?5N}089^GZnQ5vr zRzl8kwrw_>Hq+__hmmuMPm|gv6IQyan1~^lU?*<2DM8-tepBTI)=+&VFGG+iciyW9 zZ>r!nTx(IdtH@48%AeDqiH*EL8}|giD(v3F8sGxt)JFWX3hjRW8Ygd_2>GNK(3#xv?*UrGWY74^;*DR|k! zeLqcdYjazuuJK&u%-HdZNzHklZmc<{L}{7#ge5!EyhEsr1I@R>TC~ogQr~i0xy2FE zWv*4qI$LfdAK@$-ubHqpkDcU$JqciPTfy76OB-m|bt%G739zN~K}eY*fPGec~u?)#P3fd6e~P>_ zdNE4H9gCuEcdm!d<7X!Mb{D6bY~a!@GoY9 zJZ}73y*R;xv!68y#_M$E4Nd|`Ja}y^qhbD*VMqr>VBhD?P+iBuk4_Q4ywsKZxfrytns=GRJE+n(wuD9f103 zUzWzXs2sM%@N!~h`X-)D))$Uz(~qwNUNrkvY^bZ*l}pUg51U;iq*=6QX|uCfgvIC_ z<&pXjyN3RyfAXhFve^z$yt{E4DB=188P(-h|IocQHiYE3V1OTso-Ay)gANgkJe(Q?BcpF3_>e5cVtC|M>sezpEvh3)>GthR@yz a Date: Fri, 12 Aug 2022 18:28:14 +0200 Subject: [PATCH 0366/2550] OP-3682 - added more fields to metadata Additional fields could be useful in the future for some addon Store or pulling information into customer's internal CRM. --- distribution/addon_distribution.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index e29e9bbf9b..465950f6e8 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -3,6 +3,7 @@ from enum import Enum from abc import abstractmethod import attr import logging +import requests from distribution.file_handler import RemoteFileHandler @@ -21,6 +22,9 @@ class AddonInfo(object): addon_url = attr.ib(default=None) type = attr.ib(default=None) hash = attr.ib(default=None) + description = attr.ib(default=None) + license = attr.ib(default=None) + authors = attr.ib(default=None) class AddonDownloader: From 79a3777d8663ca58d855469c60299486374a3539 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 18:41:55 +0200 Subject: [PATCH 0367/2550] :recycle: distribute ocio as zips --- .../maya/plugins/publish/extract_look.py | 6 ++++- poetry.lock | 25 +++---------------- pyproject.toml | 6 +++-- tools/fetch_thirdparty_libs.py | 23 +++++++++++++---- 4 files changed, 30 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index b425efba6f..b416669b87 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -43,7 +43,11 @@ def get_ocio_config_path(profile_folder): try: import OpenColorIOConfigs return os.path.join( - os.path.dirname(OpenColorIOConfigs.__file__), + os.environ["OPENPYPE_ROOT"], + "vendor", + "bin", + "ocioconfig" + "OpenColorIOConfigs", profile_folder, "config.ocio" ) diff --git a/poetry.lock b/poetry.lock index df8d8ab14a..21b6bda880 100644 --- a/poetry.lock +++ b/poetry.lock @@ -797,21 +797,6 @@ category = "main" optional = false python-versions = ">=3.7" -[[package]] -name = "opencolorio-configs" -version = "1.0.2" -description = "Curated set of OpenColorIO Configs for use in OpenPype" -category = "main" -optional = false -python-versions = "*" -develop = false - -[package.source] -type = "git" -url = "https://github.com/pypeclub/OpenColorIO-Configs.git" -reference = "main" -resolved_reference = "07c5e865bf2b115b589dd2876ae632cd410821b5" - [[package]] name = "opentimelineio" version = "0.14.0.dev1" @@ -1284,7 +1269,7 @@ python-versions = "*" [[package]] name = "pytz" -version = "2022.1" +version = "2022.2" description = "World timezone definitions, modern and historical" category = "dev" optional = false @@ -1750,7 +1735,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "89fb7e8ad310b5048bf78561f1146194c8779e286d839cc000f04e88be87f3f3" +content-hash = "de7422afb6aed02f75e1696afdda9ad6c7bf32da76b5022ee3e8f71a1ac4bae2" [metadata.files] acre = [] @@ -2146,7 +2131,6 @@ multidict = [ {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, ] -opencolorio-configs = [] opentimelineio = [] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -2398,10 +2382,7 @@ python-xlib = [ python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, ] -pytz = [ - {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, - {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, -] +pytz = [] pywin32 = [ {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"}, {file = "pywin32-301-cp35-cp35m-win_amd64.whl", hash = "sha256:9635df6998a70282bd36e7ac2a5cef9ead1627b0a63b17c731312c7a0daebb72"}, diff --git a/pyproject.toml b/pyproject.toml index 1d757deaa0..b7b3fb967f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,8 +70,6 @@ requests = "^2.25.1" pysftp = "^0.2.9" dropbox = "^11.20.0" aiohttp-middlewares = "^2.0.0" -OpenColorIO-Configs = { git = "https://github.com/pypeclub/OpenColorIO-Configs.git", branch = "main" } - [tool.poetry.dev-dependencies] flake8 = "^3.7" @@ -144,6 +142,10 @@ hash = "3894dec7e4e521463891a869586850e8605f5fd604858b674c87323bf33e273d" url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz" hash = "sha256:..." +[openpype.thirdparty.ocioconfig] +url = "https://distribute.openpype.io/thirdparty/OpenColorIO-Configs-1.0.2.zip" +hash = "4ac17c1f7de83465e6f51dd352d7117e07e765b66d00443257916c828e35b6ce" + [tool.pyright] include = [ "igniter", diff --git a/tools/fetch_thirdparty_libs.py b/tools/fetch_thirdparty_libs.py index b616beab27..421cc32dbd 100644 --- a/tools/fetch_thirdparty_libs.py +++ b/tools/fetch_thirdparty_libs.py @@ -109,13 +109,20 @@ except AttributeError: for k, v in thirdparty.items(): _print(f"processing {k}") - destination_path = openpype_root / "vendor" / "bin" / k / platform_name - url = v.get(platform_name).get("url") + destination_path = openpype_root / "vendor" / "bin" / k + if not v.get(platform_name): _print(("missing definition for current " - f"platform [ {platform_name} ]"), 1) - sys.exit(1) + f"platform [ {platform_name} ]"), 2) + _print("trying to get universal url for all platforms") + url = v.get("url") + if not url: + _print("cannot get url", 1) + sys.exit(1) + else: + url = v.get(platform_name).get("url") + destination_path = destination_path / platform_name parsed_url = urlparse(url) @@ -147,7 +154,13 @@ for k, v in thirdparty.items(): # get file with checksum _print("Calculating sha256 ...", 2) calc_checksum = sha256_sum(temp_file) - if v.get(platform_name).get("hash") != calc_checksum: + + if v.get(platform_name): + item_hash = v.get(platform_name).get("hash") + else: + item_hash = v.get("hash") + + if item_hash != calc_checksum: _print("Downloaded files checksum invalid.") sys.exit(1) From 5a2e8f6d8f814bd0d7f6707580edd08e0098fec6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 12 Aug 2022 18:44:16 +0200 Subject: [PATCH 0368/2550] :bug: remove import --- .../maya/plugins/publish/extract_look.py | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index b416669b87..cece8ee22b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -40,19 +40,17 @@ def get_ocio_config_path(profile_folder): Returns: str: Path to vendorized config file. """ - try: - import OpenColorIOConfigs - return os.path.join( - os.environ["OPENPYPE_ROOT"], - "vendor", - "bin", - "ocioconfig" - "OpenColorIOConfigs", - profile_folder, - "config.ocio" - ) - except ImportError: - return None + + return os.path.join( + os.environ["OPENPYPE_ROOT"], + "vendor", + "bin", + "ocioconfig" + "OpenColorIOConfigs", + profile_folder, + "config.ocio" + ) + def find_paths_by_hash(texture_hash): """Find the texture hash key in the dictionary. From 6187cf18f50c43ca60f234bce439a4dc466263cf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 12 Aug 2022 19:08:30 +0200 Subject: [PATCH 0369/2550] OP-3682 - implemented basic GET Used publish Postman mock server for testing --- distribution/addon_distribution.py | 59 ++++++++++++++++++++---------- 1 file changed, 40 insertions(+), 19 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 465950f6e8..86b6de3a74 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -34,7 +34,7 @@ class AddonDownloader: self._downloaders = {} def register_format(self, downloader_type, downloader): - self._downloaders[downloader_type] = downloader + self._downloaders[downloader_type.value] = downloader def get_downloader(self, downloader_type): downloader = self._downloaders.get(downloader_type) @@ -115,24 +115,31 @@ class HTTPAddonDownloader(AddonDownloader): return os.path.join(destination, file_name) -def get_addons_info(): +def get_addons_info(server_endpoint): """Returns list of addon information from Server""" # TODO temp - addon_info = AddonInfo( - **{"name": "openpype_slack", - "version": "1.0.0", - "addon_url": "c:/projects/openpype_slack_1.0.0.zip", - "type": UrlType.OS, - "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa + # addon_info = AddonInfo( + # **{"name": "openpype_slack", + # "version": "1.0.0", + # "addon_url": "c:/projects/openpype_slack_1.0.0.zip", + # "type": UrlType.OS, + # "hash": "4f6b8568eb9dd6f510fd7c4dcb676788"}) # noqa + # + # http_addon = AddonInfo( + # **{"name": "openpype_slack", + # "version": "1.0.0", + # "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa + # "type": UrlType.HTTP, + # "hash": "4f6b8568eb9dd6f510fd7c4dcb676788"}) # noqa - http_addon = AddonInfo( - **{"name": "openpype_slack", - "version": "1.0.0", - "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa - "type": UrlType.HTTP, - "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa + response = requests.get(server_endpoint) + if not response.ok: + raise Exception(response.text) - return [http_addon] + addons_info = [] + for addon in response.json(): + addons_info.append(AddonInfo(**addon)) + return addons_info def update_addon_state(addon_infos, destination_folder, factory, @@ -167,15 +174,29 @@ def update_addon_state(addon_infos, destination_folder, factory, downloader.unzip(zip_file_path, addon_dest) except Exception: log.warning(f"Error happened during updating {addon.name}", - stack_info=True) + exc_info=True) + + +def check_addons(server_endpoint, addon_folder, downloaders): + """Main entry point to compare existing addons with those on server.""" + addons_info = get_addons_info(server_endpoint) + update_addon_state(addons_info, + addon_folder, + downloaders) def cli(args): - addon_folder = "c:/Users/petrk/AppData/Local/pypeclub/openpype/addons" + addon_folder = "c:/projects/testing_addons/pypeclub/openpype/addons" downloader_factory = AddonDownloader() downloader_factory.register_format(UrlType.OS, OSAddonDownloader) downloader_factory.register_format(UrlType.HTTP, HTTPAddonDownloader) - print(update_addon_state(get_addons_info(), addon_folder, - downloader_factory)) + test_endpoint = "https://34e99f0f-f987-4715-95e6-d2d88caa7586.mock.pstmn.io/get_addons_info" # noqa + if os.environ.get("OPENPYPE_SERVER"): # TODO or from keychain + server_endpoint = os.environ.get("OPENPYPE_SERVER") + "get_addons_info" + else: + server_endpoint = test_endpoint + + check_addons(server_endpoint, addon_folder, downloader_factory) + From 385b6b97f02c2a384e3432fd8f204ee4f6810e18 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 12 Aug 2022 19:09:06 +0200 Subject: [PATCH 0370/2550] OP-3682 - Hound --- distribution/addon_distribution.py | 1 - 1 file changed, 1 deletion(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 86b6de3a74..a0c48923df 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -199,4 +199,3 @@ def cli(args): server_endpoint = test_endpoint check_addons(server_endpoint, addon_folder, downloader_factory) - From 1ad9728962b92e55fa4d16601a7a48add381a456 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 19:32:13 +0200 Subject: [PATCH 0371/2550] :recycle: remove forgotten args, fix typos --- igniter/bootstrap_repos.py | 35 +++++++++++++++-------------------- start.py | 2 +- 2 files changed, 16 insertions(+), 21 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 3dab67ebf1..56ec2749ca 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -381,7 +381,7 @@ class OpenPypeVersion(semver.VersionInfo): @classmethod def get_local_versions( cls, production: bool = None, - staging: bool = None, compatible_with: OpenPypeVersion = None + staging: bool = None ) -> List: """Get all versions available on this machine. @@ -391,8 +391,10 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. - compatible_with (OpenPypeVersion): Return only those compatible - with specified version. + + Returns: + list: of compatible versions available on the machine. + """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -435,8 +437,7 @@ class OpenPypeVersion(semver.VersionInfo): Args: production (bool): Return production versions. staging (bool): Return staging versions. - compatible_with (OpenPypeVersion): Return only those compatible - with specified version. + """ # Return all local versions if arguments are set to None if production is None and staging is None: @@ -745,9 +746,9 @@ class BootstrapRepos: self, repo_dir: Path = None) -> Union[OpenPypeVersion, None]: """Copy zip created from OpenPype repositories to user data dir. - This detect OpenPype version either in local "live" OpenPype + This detects OpenPype version either in local "live" OpenPype repository or in user provided path. Then it will zip it in temporary - directory and finally it will move it to destination which is user + directory, and finally it will move it to destination which is user data directory. Existing files will be replaced. Args: @@ -758,7 +759,7 @@ class BootstrapRepos: """ # if repo dir is not set, we detect local "live" OpenPype repository - # version and use it as a source. Otherwise repo_dir is user + # version and use it as a source. Otherwise, repo_dir is user # entered location. if repo_dir: version = self.get_version(repo_dir) @@ -1122,21 +1123,19 @@ class BootstrapRepos: @staticmethod def find_openpype_version( version: Union[str, OpenPypeVersion], - staging: bool, - compatible_with: OpenPypeVersion = None + staging: bool ) -> Union[OpenPypeVersion, None]: """Find location of specified OpenPype version. Args: version (Union[str, OpenPypeVersion): Version to find. staging (bool): Filter staging versions. - compatible_with (OpenPypeVersion, optional): Find only - versions compatible with specified one. + + Returns: + requested OpenPypeVersion. """ installed_version = OpenPypeVersion.get_installed_version() - if not compatible_with: - compatible_with = installed_version if isinstance(version, str): version = OpenPypeVersion(version=version) @@ -1144,8 +1143,7 @@ class BootstrapRepos: return installed_version local_versions = OpenPypeVersion.get_local_versions( - staging=staging, production=not staging, - compatible_with=compatible_with + staging=staging, production=not staging ) zip_version = None for local_version in local_versions: @@ -1159,8 +1157,7 @@ class BootstrapRepos: return zip_version remote_versions = OpenPypeVersion.get_remote_versions( - staging=staging, production=not staging, - compatible_with=compatible_with + staging=staging, production=not staging ) for remote_version in remote_versions: if remote_version == version: @@ -1237,8 +1234,6 @@ class BootstrapRepos: otherwise. include_zips (bool, optional): If set True it will try to find OpenPype in zip files in given directory. - compatible_with (OpenPypeVersion, optional): Find only those - versions compatible with the one specified. Returns: dict of Path: Dictionary of detected OpenPype version. diff --git a/start.py b/start.py index 52e98bb6e1..bfbcc77bc9 100644 --- a/start.py +++ b/start.py @@ -689,7 +689,7 @@ def _find_frozen_openpype(use_version: str = None, # Collect OpenPype versions installed_version = OpenPypeVersion.get_installed_version() # Expected version that should be used by studio settings - # - this option is used only if version is not explictly set and if + # - this option is used only if version is not explicitly set and if # studio has set explicit version in settings studio_version = OpenPypeVersion.get_expected_studio_version(use_staging) From b61e47a15d4ea7f843aa5a17963f8f4d0d73c77f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 19:45:26 +0200 Subject: [PATCH 0372/2550] :recycle: don't look for compatible version automatically --- igniter/bootstrap_repos.py | 12 +----------- start.py | 2 +- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 56ec2749ca..dfcca2cf33 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1166,16 +1166,12 @@ class BootstrapRepos: @staticmethod def find_latest_openpype_version( - staging: bool, - compatible_with: OpenPypeVersion = None + staging: bool ) -> Union[OpenPypeVersion, None]: """Find the latest available OpenPype version in all location. Args: staging (bool): True to look for staging versions. - compatible_with (OpenPypeVersion, optional): If set, it will - try to find the latest version compatible with the - one specified. Returns: Latest OpenPype version on None if nothing was found. @@ -1195,12 +1191,6 @@ class BootstrapRepos: if not all_versions: return None - if compatible_with: - all_versions = [ - version for version in all_versions - if version.is_compatible(installed_version) - ] - all_versions.sort() latest_version = all_versions[-1] if latest_version == installed_version: diff --git a/start.py b/start.py index bfbcc77bc9..9837252a1f 100644 --- a/start.py +++ b/start.py @@ -729,7 +729,7 @@ def _find_frozen_openpype(use_version: str = None, ">>> Finding latest version compatible " f"with [ {installed_version} ]")) openpype_version = bootstrap.find_latest_openpype_version( - use_staging, compatible_with=installed_version) + use_staging) if openpype_version is None: if use_staging: From aa0fe93a504a3a513239c541e698a99600de9736 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 12 Aug 2022 19:50:07 +0200 Subject: [PATCH 0373/2550] :bug: fix version list --- start.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/start.py b/start.py index 9837252a1f..084eb7451a 100644 --- a/start.py +++ b/start.py @@ -726,7 +726,7 @@ def _find_frozen_openpype(use_version: str = None, else: # Default behavior to use latest version _print(( - ">>> Finding latest version compatible " + ">>> Finding latest version " f"with [ {installed_version} ]")) openpype_version = bootstrap.find_latest_openpype_version( use_staging) @@ -947,7 +947,12 @@ def _boot_print_versions(use_staging, local_version, openpype_root): openpype_versions = bootstrap.find_openpype( include_zips=True, staging=use_staging, - compatible_with=compatible_with) + ) + openpype_versions = [ + version for version in openpype_versions + if version.is_compatible( + OpenPypeVersion.get_installed_version()) + ] list_versions(openpype_versions, local_version) From fd56f09c8423ea6438d6606c69dfa6c45ba9e8eb Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 13 Aug 2022 03:48:01 +0000 Subject: [PATCH 0374/2550] [Automated] Bump version --- CHANGELOG.md | 25 +++++++++++++++++++------ openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 21 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b7ef795f0a..2adb4ac154 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [3.13.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...HEAD) + +**🐛 Bug fixes** + +- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) + +**🔀 Refactored code** + +- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) +- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) + +**Merged pull requests:** + +- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) + ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) @@ -37,6 +55,7 @@ - General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** @@ -68,13 +87,9 @@ - Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) - General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) - Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) -- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) -- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) -- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) **🐛 Bug fixes** -- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) - NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) @@ -89,8 +104,6 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) -- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) -- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) **🔀 Refactored code** diff --git a/openpype/version.py b/openpype/version.py index c41e69d00d..6ff5dfb7b5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.1-nightly.1" +__version__ = "3.13.1-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 994c83d369..9cbdc295ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.1-nightly.1" # OpenPype +version = "3.13.1-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From ae491af33b234f6ef7130c7f52f8a9e67cd032a4 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 15 Aug 2022 01:58:36 +0300 Subject: [PATCH 0375/2550] Adjust schema to include all lights flag. --- openpype/settings/defaults/project_settings/maya.json | 5 +++-- .../projects_schema/schemas/schema_maya_render_settings.json | 5 +++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ac0f161cf2..c95d47d576 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -33,7 +33,8 @@ }, "RenderSettings": { "apply_render_settings": true, - "default_render_image_folder": "", + "default_render_image_folder": "renders", + "enable_all_lights": false, "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { @@ -976,4 +977,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index af197604f8..6ee02ca78f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -14,6 +14,11 @@ "key": "default_render_image_folder", "label": "Default render image folder" }, + { + "type": "boolean", + "key": "enable_all_lights", + "label": "Include all lights in Render Setup Layers by default" + }, { "key": "aov_separator", "label": "AOV Separator character", From bbe7bc2fdb533375d9acc48a8c6b2f5c1538ecc1 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 15 Aug 2022 01:59:20 +0300 Subject: [PATCH 0376/2550] Include `RenderSetupIncludeLights` flag in plugin info, grab value from render instance. --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index f253ceb21a..7966861358 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -62,6 +62,7 @@ payload_skeleton_template = { "RenderLayer": None, # Render only this layer "Renderer": None, "ProjectPath": None, # Resolve relative references + "RenderSetupIncludeLights": None, # Include all lights flag. }, "AuxFiles": [] # Mandatory for Deadline, may be empty } @@ -413,8 +414,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # Gather needed data ------------------------------------------------ default_render_file = instance.context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ + .get('RenderSettings')\ .get('default_render_image_folder') filename = os.path.basename(filepath) comment = context.data.get("comment", "") @@ -505,6 +505,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.payload_skeleton["JobInfo"]["Comment"] = comment self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer + self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa # Adding file dependencies. dependencies = instance.context.data["fileDependencies"] dependencies.append(filepath) From d7aba60460ce19af1c9a4c2bb629c967f8d06750 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 15 Aug 2022 01:59:36 +0300 Subject: [PATCH 0377/2550] Validate lights flag --- .../hosts/maya/plugins/publish/validate_rendersettings.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 1dab3274a0..93ef7d7af7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -242,6 +242,14 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): instance.context.data["project_settings"]["maya"]["publish"]["ValidateRenderSettings"].get( # noqa: E501 "{}_render_attributes".format(renderer)) or [] ) + settings_lights_flag = instance.context.data["project_settings"].get( + "maya", {}).get( + "RenderSettings", {}).get( + "enable_all_lights", {}) + + instance_lights_flag = instance.data.get("renderSetupIncludeLights") + if settings_lights_flag != instance_lights_flag: + cls.log.warning('Instance flag for "Render Setup Include Lights" is set to {0} and Settings flag is set to {1}'.format(instance_lights_flag, settings_lights_flag)) # noqa # go through definitions and test if such node.attribute exists. # if so, compare its value from the one required. From 5322527226344498aec2b830847b42a60e91eca8 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 15 Aug 2022 01:59:56 +0300 Subject: [PATCH 0378/2550] add flag attribute to render creator --- openpype/hosts/maya/plugins/create/create_render.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index fbe670b1ea..2f09aaee87 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -71,7 +71,7 @@ class CreateRender(plugin.Creator): label = "Render" family = "rendering" icon = "eye" - + enable_all_lights = True _token = None _user = None _password = None @@ -220,6 +220,12 @@ class CreateRender(plugin.Creator): self.data["tilesY"] = 2 self.data["convertToScanline"] = False self.data["useReferencedAovs"] = False + self.data["renderSetupIncludeLights"] = ( + self._project_settings.get( + "maya", {}).get( + "RenderSettings", {}).get( + "enable_all_lights", {}) + ) # Disable for now as this feature is not working yet # self.data["assScene"] = False From 5146c5a7e7f83032ba5d512a6861fb8f9b1b47f1 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 15 Aug 2022 02:00:15 +0300 Subject: [PATCH 0379/2550] Add flag to collector, fix settings path bug --- openpype/hosts/maya/api/lib_rendersettings.py | 3 +- .../maya/plugins/publish/collect_render.py | 8 +++-- .../publish/validate_render_image_rule.py | 31 +++++++++++-------- 3 files changed, 24 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 9aea55a03b..7cd2193086 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -60,8 +60,7 @@ class RenderSettings(object): try: aov_separator = self._aov_chars[( self._project_settings["maya"] - ["create"] - ["CreateRender"] + ["RenderSettings"] ["aov_separator"] )] except KeyError: diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index e132cffe53..7035da2ec7 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -202,8 +202,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): aov_dict = {} default_render_file = context.data.get('project_settings')\ .get('maya')\ - .get('create')\ - .get('CreateRender')\ + .get('RenderSettings')\ .get('default_render_image_folder') or "" # replace relative paths with absolute. Render products are # returned as list of dictionaries. @@ -318,7 +317,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "useReferencedAovs": render_instance.data.get( "useReferencedAovs") or render_instance.data.get( "vrayUseReferencedAovs") or False, - "aovSeparator": layer_render_products.layer_data.aov_separator # noqa: E501 + "aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501 + "renderSetupIncludeLights": render_instance.data.get( + "renderSetupIncludeLights" + ) } # Collect Deadline url if Deadline module is enabled diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 642ca9e25d..353d0ad63a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -1,6 +1,6 @@ import maya.mel as mel -import pymel.core as pm +from maya import cmds import pyblish.api import openpype.api @@ -11,8 +11,10 @@ def get_file_rule(rule): class ValidateRenderImageRule(pyblish.api.InstancePlugin): - """Validates "images" file rule is set to "renders/" - + """Validates Maya Workpace "images" file rule matches project settings. + This validates against the configured default render image folder: + Studio Settings > Project > Maya > + Render Settings > Default render image folder. """ order = openpype.api.ValidateContentsOrder @@ -22,25 +24,28 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): actions = [openpype.api.RepairAction] def process(self, instance): + required_images_rule = self.get_default_render_image_folder(instance) + current_images_rule = cmds.workspace(fileRuleEntry="images") - default_render_file = self.get_default_render_image_folder(instance) - - assert get_file_rule("images") == default_render_file, ( - "Workspace's `images` file rule must be set to: {}".format( - default_render_file + assert current_images_rule == required_images_rule, ( + "Invalid workspace `images` file rule value: '{}'. " + "Must be set to: '{}'".format( + current_images_rule, required_images_rule ) ) @classmethod def repair(cls, instance): - default = cls.get_default_render_image_folder(instance) - pm.workspace.fileRules["images"] = default - pm.system.Workspace.save() + required_images_rule = cls.get_default_render_image_folder(instance) + current_images_rule = cmds.workspace(fileRuleEntry="images") + + if current_images_rule != required_images_rule: + cmds.workspace(fileRule=("images", required_images_rule)) + cmds.workspace(saveWorkspace=True) @staticmethod def get_default_render_image_folder(instance): return instance.context.data.get('project_settings')\ .get('maya') \ - .get('create') \ - .get('CreateRender') \ + .get('RenderSettings') \ .get('default_render_image_folder') From 8fcf5ffa28ae615219d2d3a31b0419bec3a746b6 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 15 Aug 2022 02:19:02 +0300 Subject: [PATCH 0380/2550] Revert "Add flag to collector, fix settings path bug" This reverts part of commit 5146c5a7e7f83032ba5d512a6861fb8f9b1b47f1. --- .../publish/validate_render_image_rule.py | 31 ++++++++----------- 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 353d0ad63a..642ca9e25d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -1,6 +1,6 @@ import maya.mel as mel +import pymel.core as pm -from maya import cmds import pyblish.api import openpype.api @@ -11,10 +11,8 @@ def get_file_rule(rule): class ValidateRenderImageRule(pyblish.api.InstancePlugin): - """Validates Maya Workpace "images" file rule matches project settings. - This validates against the configured default render image folder: - Studio Settings > Project > Maya > - Render Settings > Default render image folder. + """Validates "images" file rule is set to "renders/" + """ order = openpype.api.ValidateContentsOrder @@ -24,28 +22,25 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): actions = [openpype.api.RepairAction] def process(self, instance): - required_images_rule = self.get_default_render_image_folder(instance) - current_images_rule = cmds.workspace(fileRuleEntry="images") - assert current_images_rule == required_images_rule, ( - "Invalid workspace `images` file rule value: '{}'. " - "Must be set to: '{}'".format( - current_images_rule, required_images_rule + default_render_file = self.get_default_render_image_folder(instance) + + assert get_file_rule("images") == default_render_file, ( + "Workspace's `images` file rule must be set to: {}".format( + default_render_file ) ) @classmethod def repair(cls, instance): - required_images_rule = cls.get_default_render_image_folder(instance) - current_images_rule = cmds.workspace(fileRuleEntry="images") - - if current_images_rule != required_images_rule: - cmds.workspace(fileRule=("images", required_images_rule)) - cmds.workspace(saveWorkspace=True) + default = cls.get_default_render_image_folder(instance) + pm.workspace.fileRules["images"] = default + pm.system.Workspace.save() @staticmethod def get_default_render_image_folder(instance): return instance.context.data.get('project_settings')\ .get('maya') \ - .get('RenderSettings') \ + .get('create') \ + .get('CreateRender') \ .get('default_render_image_folder') From d2f9c100c35edbe9cafd59db6e732c9ba058e309 Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Mon, 15 Aug 2022 02:19:42 +0300 Subject: [PATCH 0381/2550] Fix correct path bug --- .../hosts/maya/plugins/publish/validate_render_image_rule.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 642ca9e25d..0abcf2f12a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -41,6 +41,5 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): def get_default_render_image_folder(instance): return instance.context.data.get('project_settings')\ .get('maya') \ - .get('create') \ - .get('CreateRender') \ + .get('RenderSettings') \ .get('default_render_image_folder') From b1f29676227726f4367c4f6aa4de9defd305d41e Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Mon, 15 Aug 2022 11:00:10 +0200 Subject: [PATCH 0382/2550] validate mesh has UV safe code --- .../hosts/blender/plugins/publish/validate_mesh_has_uv.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py index 4995eedad4..d87b4ff1ef 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py @@ -36,9 +36,10 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): invalid = [] for obj in set(instance): try: - if obj.type == 'MESH': - # Make sure we are in object mode. - bpy.ops.object.mode_set(mode='OBJECT') + if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': + if obj.mode != 'OBJECT': + # Make sure we are in object mode. + bpy.ops.object.mode_set(mode='OBJECT') if not cls.has_uvs(obj): invalid.append(obj) except RuntimeError: From dd2becdb7964bf43d71368c89f7a9fdae48ce4a0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Aug 2022 11:55:16 +0200 Subject: [PATCH 0383/2550] nuke: collect workfile adding KnownPublishErrorl when untitled --- openpype/hosts/nuke/plugins/publish/precollect_workfile.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py index 7349a8f424..822f405a6f 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py @@ -8,6 +8,7 @@ from openpype.hosts.nuke.api.lib import ( add_publish_knob, get_avalon_knob_data ) +from openpype.pipeline import KnownPublishError class CollectWorkfile(pyblish.api.ContextPlugin): @@ -22,6 +23,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin): current_file = os.path.normpath(nuke.root().name()) + if current_file.lower() == "root": + raise KnownPublishError( + "Workfile is not correct file name. \n" + "Use workfile tool to manage the name correctly." + ) + knob_data = get_avalon_knob_data(root) add_publish_knob(root) From 4bd375409e11822978d352cae20fd1081615aa55 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Aug 2022 11:55:37 +0200 Subject: [PATCH 0384/2550] nuke: fixing validate rendered frames --- openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index f8e128cd26..237ff423e5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -127,7 +127,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): f_start_h += 1 if ( - collected_frames_len >= frame_length + collected_frames_len != frame_length and coll_start <= f_start_h and coll_end >= f_end_h ): From e6584a9b940782bb6927e807b6a19412a1fd2fe4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 12:08:20 +0200 Subject: [PATCH 0385/2550] removed pype 2 compatibility --- .../custom/plugins/GlobalJobPreLoad.py | 48 ------------------- 1 file changed, 48 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 172649c951..cd36e45921 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -260,52 +260,6 @@ def pype_command_line(executable, arguments, workingDirectory): return executable, arguments, workingDirectory -def pype(deadlinePlugin): - """Remaps `PYPE_METADATA_FILE` and `PYPE_PYTHON_EXE` environment vars. - - `PYPE_METADATA_FILE` is used on farm to point to rendered data. This path - originates on platform from which this job was published. To be able to - publish on different platform, this path needs to be remapped. - - `PYPE_PYTHON_EXE` can be used to specify custom location of python - interpreter to use for Pype. This is remappeda also if present even - though it probably doesn't make much sense. - - Arguments: - deadlinePlugin: Deadline job plugin passed by Deadline - - """ - print(">>> Getting job ...") - job = deadlinePlugin.GetJob() - # PYPE should be here, not OPENPYPE - backward compatibility!! - pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE") - pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE") - print(">>> Having backward compatible env vars {}/{}".format(pype_metadata, - pype_python)) - # test if it is pype publish job. - if pype_metadata: - pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata) - if platform.system().lower() == "linux": - pype_metadata = pype_metadata.replace("\\", "/") - - print("- remapping PYPE_METADATA_FILE: {}".format(pype_metadata)) - job.SetJobEnvironmentKeyValue("PYPE_METADATA_FILE", pype_metadata) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_METADATA_FILE", pype_metadata) - - if pype_python: - pype_python = RepositoryUtils.CheckPathMapping(pype_python) - if platform.system().lower() == "linux": - pype_python = pype_python.replace("\\", "/") - - print("- remapping PYPE_PYTHON_EXE: {}".format(pype_python)) - job.SetJobEnvironmentKeyValue("PYPE_PYTHON_EXE", pype_python) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_PYTHON_EXE", pype_python) - - deadlinePlugin.ModifyCommandLineCallback += pype_command_line - - def __main__(deadlinePlugin): print("*** GlobalJobPreload start ...") print(">>> Getting job ...") @@ -329,5 +283,3 @@ def __main__(deadlinePlugin): inject_render_job_id(deadlinePlugin) elif openpype_render_job == '1' or openpype_remote_job == '1': inject_openpype_environment(deadlinePlugin) - else: - pype(deadlinePlugin) # backward compatibility with Pype2 From 919a6146c6c16d5f98caff3eb79792e876b2de49 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 12:08:32 +0200 Subject: [PATCH 0386/2550] removed unused function --- .../custom/plugins/GlobalJobPreLoad.py | 26 ------------------- 1 file changed, 26 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index cd36e45921..98c727f618 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -234,32 +234,6 @@ def inject_render_job_id(deadlinePlugin): print(">>> Injection end.") -def pype_command_line(executable, arguments, workingDirectory): - """Remap paths in comand line argument string. - - Using Deadline rempper it will remap all path found in command-line. - - Args: - executable (str): path to executable - arguments (str): arguments passed to executable - workingDirectory (str): working directory path - - Returns: - Tuple(executable, arguments, workingDirectory) - - """ - print("-" * 40) - print("executable: {}".format(executable)) - print("arguments: {}".format(arguments)) - print("workingDirectory: {}".format(workingDirectory)) - print("-" * 40) - print("Remapping arguments ...") - arguments = RepositoryUtils.CheckPathMapping(arguments) - print("* {}".format(arguments)) - print("-" * 40) - return executable, arguments, workingDirectory - - def __main__(deadlinePlugin): print("*** GlobalJobPreload start ...") print(">>> Getting job ...") From 963b66eb5808249dd47ae5e6bd62a53972352655 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 12:15:35 +0200 Subject: [PATCH 0387/2550] fixed python 2 compatibility --- .../custom/plugins/GlobalJobPreLoad.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 98c727f618..61b95cf06d 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -34,7 +34,7 @@ def get_openpype_version_from_path(path, build=True): # if only builds are requested if build and not os.path.isfile(exe): # noqa: E501 - print(f" ! path is not a build: {path}") + print(" ! path is not a build: {}".format(path)) return None version = {} @@ -70,11 +70,12 @@ def inject_openpype_environment(deadlinePlugin): # lets go over all available and find compatible build. requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print((">>> Scanning for compatible requested " - f"version {requested_version}")) + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) if install_dir: - print(f"--- Looking for OpenPype at: {install_dir}") + print("--- Looking for OpenPype at: {}".format(install_dir)) sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -83,18 +84,20 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path(subdir) if not version: continue - print(f" - found: {version} - {subdir}") + print(" - found: {} - {}".format(version, subdir)) openpype_versions.append((version, subdir)) exe = FileUtils.SearchFileList(exe_list) if openpype_versions: # if looking for requested compatible version, # add the implicitly specified to the list too. - print(f"Looking for OpenPype at: {os.path.dirname(exe)}") + print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) version = get_openpype_version_from_path( os.path.dirname(exe)) if version: - print(f" - found: {version} - {os.path.dirname(exe)}") + print(" - found: {} - {}".format( + version, os.path.dirname(exe) + )) openpype_versions.append((version, os.path.dirname(exe))) if requested_version: @@ -106,8 +109,9 @@ def inject_openpype_environment(deadlinePlugin): int(t) if t.isdigit() else t.lower() for t in re.split(r"(\d+)", ver[0]) ]) - print(("*** Latest available version found is " - f"{openpype_versions[-1][0]}")) + print(( + "*** Latest available version found is {}" + ).format(openpype_versions[-1][0])) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -127,8 +131,9 @@ def inject_openpype_environment(deadlinePlugin): int(t) if t.isdigit() else t.lower() for t in re.split(r"(\d+)", ver[0]) ]) - print(("*** Latest compatible version found is " - f"{compatible_versions[-1][0]}")) + print(( + "*** Latest compatible version found is {}" + ).format(compatible_versions[-1][0])) # create list of executables for different platform and let # Deadline decide. exe_list = [ From 553fcdff538178019d76d73ccb0b83119a816ef4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 13:22:25 +0200 Subject: [PATCH 0388/2550] added python 2 compatible attrs to vendor --- .../vendor/python/python_2/attr/__init__.py | 80 + .../vendor/python/python_2/attr/__init__.pyi | 484 +++ openpype/vendor/python/python_2/attr/_cmp.py | 154 + openpype/vendor/python/python_2/attr/_cmp.pyi | 13 + .../vendor/python/python_2/attr/_compat.py | 261 ++ .../vendor/python/python_2/attr/_config.py | 33 + .../vendor/python/python_2/attr/_funcs.py | 422 +++ openpype/vendor/python/python_2/attr/_make.py | 3173 +++++++++++++++++ .../vendor/python/python_2/attr/_next_gen.py | 216 ++ .../python/python_2/attr/_version_info.py | 87 + .../python/python_2/attr/_version_info.pyi | 9 + .../vendor/python/python_2/attr/converters.py | 155 + .../python/python_2/attr/converters.pyi | 13 + .../vendor/python/python_2/attr/exceptions.py | 94 + .../python/python_2/attr/exceptions.pyi | 17 + .../vendor/python/python_2/attr/filters.py | 54 + .../vendor/python/python_2/attr/filters.pyi | 6 + openpype/vendor/python/python_2/attr/py.typed | 0 .../vendor/python/python_2/attr/setters.py | 79 + .../vendor/python/python_2/attr/setters.pyi | 19 + .../vendor/python/python_2/attr/validators.py | 561 +++ .../python/python_2/attr/validators.pyi | 78 + .../vendor/python/python_2/attrs/__init__.py | 70 + .../vendor/python/python_2/attrs/__init__.pyi | 63 + .../python/python_2/attrs/converters.py | 3 + .../python/python_2/attrs/exceptions.py | 3 + .../vendor/python/python_2/attrs/filters.py | 3 + .../vendor/python/python_2/attrs/py.typed | 0 .../vendor/python/python_2/attrs/setters.py | 3 + .../python/python_2/attrs/validators.py | 3 + 30 files changed, 6156 insertions(+) create mode 100644 openpype/vendor/python/python_2/attr/__init__.py create mode 100644 openpype/vendor/python/python_2/attr/__init__.pyi create mode 100644 openpype/vendor/python/python_2/attr/_cmp.py create mode 100644 openpype/vendor/python/python_2/attr/_cmp.pyi create mode 100644 openpype/vendor/python/python_2/attr/_compat.py create mode 100644 openpype/vendor/python/python_2/attr/_config.py create mode 100644 openpype/vendor/python/python_2/attr/_funcs.py create mode 100644 openpype/vendor/python/python_2/attr/_make.py create mode 100644 openpype/vendor/python/python_2/attr/_next_gen.py create mode 100644 openpype/vendor/python/python_2/attr/_version_info.py create mode 100644 openpype/vendor/python/python_2/attr/_version_info.pyi create mode 100644 openpype/vendor/python/python_2/attr/converters.py create mode 100644 openpype/vendor/python/python_2/attr/converters.pyi create mode 100644 openpype/vendor/python/python_2/attr/exceptions.py create mode 100644 openpype/vendor/python/python_2/attr/exceptions.pyi create mode 100644 openpype/vendor/python/python_2/attr/filters.py create mode 100644 openpype/vendor/python/python_2/attr/filters.pyi create mode 100644 openpype/vendor/python/python_2/attr/py.typed create mode 100644 openpype/vendor/python/python_2/attr/setters.py create mode 100644 openpype/vendor/python/python_2/attr/setters.pyi create mode 100644 openpype/vendor/python/python_2/attr/validators.py create mode 100644 openpype/vendor/python/python_2/attr/validators.pyi create mode 100644 openpype/vendor/python/python_2/attrs/__init__.py create mode 100644 openpype/vendor/python/python_2/attrs/__init__.pyi create mode 100644 openpype/vendor/python/python_2/attrs/converters.py create mode 100644 openpype/vendor/python/python_2/attrs/exceptions.py create mode 100644 openpype/vendor/python/python_2/attrs/filters.py create mode 100644 openpype/vendor/python/python_2/attrs/py.typed create mode 100644 openpype/vendor/python/python_2/attrs/setters.py create mode 100644 openpype/vendor/python/python_2/attrs/validators.py diff --git a/openpype/vendor/python/python_2/attr/__init__.py b/openpype/vendor/python/python_2/attr/__init__.py new file mode 100644 index 0000000000..f95c96dd57 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/__init__.py @@ -0,0 +1,80 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import sys + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._version_info import VersionInfo + + +__version__ = "21.4.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + +__all__ = [ + "Attribute", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "evolve", + "exceptions", + "fields", + "fields_dict", + "filters", + "get_run_validators", + "has", + "ib", + "make_class", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + +if sys.version_info[:2] >= (3, 6): + from ._next_gen import define, field, frozen, mutable # noqa: F401 + + __all__.extend(("define", "field", "frozen", "mutable")) diff --git a/openpype/vendor/python/python_2/attr/__init__.pyi b/openpype/vendor/python/python_2/attr/__init__.pyi new file mode 100644 index 0000000000..c0a2126503 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/__init__.pyi @@ -0,0 +1,484 @@ +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List[Attribute[Any]]], List[Attribute[Any]] +] +_CompareWithType = Callable[[Any, Any], bool] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +# TODO: add support for returning NamedTuple from the mypy plugin +class _Fields(Tuple[Attribute[Any], ...]): + def __getattr__(self, name: str) -> Attribute[Any]: ... + +def fields(cls: type) -> _Fields: ... +def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... +def validate(inst: Any) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/openpype/vendor/python/python_2/attr/_cmp.py b/openpype/vendor/python/python_2/attr/_cmp.py new file mode 100644 index 0000000000..6cffa4dbab --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_cmp.py @@ -0,0 +1,154 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import functools + +from ._compat import new_class +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body)) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = "__%s__" % (name,) + method.__doc__ = "Return a %s b. Computed by attrs." % ( + _operation_names[name], + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/openpype/vendor/python/python_2/attr/_cmp.pyi b/openpype/vendor/python/python_2/attr/_cmp.pyi new file mode 100644 index 0000000000..e71aaff7a1 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Type + +from . import _CompareWithType + +def cmp_using( + eq: Optional[_CompareWithType], + lt: Optional[_CompareWithType], + le: Optional[_CompareWithType], + gt: Optional[_CompareWithType], + ge: Optional[_CompareWithType], + require_same_type: bool, + class_name: str, +) -> Type: ... diff --git a/openpype/vendor/python/python_2/attr/_compat.py b/openpype/vendor/python/python_2/attr/_compat.py new file mode 100644 index 0000000000..dc0cb02b64 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_compat.py @@ -0,0 +1,261 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import platform +import sys +import threading +import types +import warnings + + +PY2 = sys.version_info[0] == 2 +PYPY = platform.python_implementation() == "PyPy" +PY36 = sys.version_info[:2] >= (3, 6) +HAS_F_STRINGS = PY36 +PY310 = sys.version_info[:2] >= (3, 10) + + +if PYPY or PY36: + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +if PY2: + from collections import Mapping, Sequence + + from UserDict import IterableUserDict + + # We 'bundle' isclass instead of using inspect as importing inspect is + # fairly expensive (order of 10-15 ms for a modern machine in 2016) + def isclass(klass): + return isinstance(klass, (type, types.ClassType)) + + def new_class(name, bases, kwds, exec_body): + """ + A minimal stub of types.new_class that we need for make_class. + """ + ns = {} + exec_body(ns) + + return type(name, bases, ns) + + # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. + TYPE = "type" + + def iteritems(d): + return d.iteritems() + + # Python 2 is bereft of a read-only dict proxy, so we make one! + class ReadOnlyDict(IterableUserDict): + """ + Best-effort read-only dict wrapper. + """ + + def __setitem__(self, key, val): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item assignment" + ) + + def update(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'update'" + ) + + def __delitem__(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item deletion" + ) + + def clear(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'clear'" + ) + + def pop(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'pop'" + ) + + def popitem(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'popitem'" + ) + + def setdefault(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'setdefault'" + ) + + def __repr__(self): + # Override to be identical to the Python 3 version. + return "mappingproxy(" + repr(self.data) + ")" + + def metadata_proxy(d): + res = ReadOnlyDict() + res.data.update(d) # We blocked update, so we have to do it like this. + return res + + def just_warn(*args, **kw): # pragma: no cover + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + +else: # Python 3 and later. + from collections.abc import Mapping, Sequence # noqa + + def just_warn(*args, **kw): + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + def isclass(klass): + return isinstance(klass, type) + + TYPE = "class" + + def iteritems(d): + return d.items() + + new_class = types.new_class + + def metadata_proxy(d): + return types.MappingProxyType(dict(d)) + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + if PY2: + co = set_first_cellvar_to.func_code + else: + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + # CPython 3.8+ has an incompatible CodeType signature + # (added a posonlyargcount argument) but also added + # CodeType.replace() to do this without counting parameters. + set_first_freevar_code = co.replace( + co_cellvars=co.co_freevars, co_freevars=co.co_cellvars + ) + else: + args = [co.co_argcount] + if not PY2: + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + if PY2: + cell = make_func_with_cell().func_closure[0] + else: + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/openpype/vendor/python/python_2/attr/_config.py b/openpype/vendor/python/python_2/attr/_config.py new file mode 100644 index 0000000000..fc9be29d00 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_config.py @@ -0,0 +1,33 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/openpype/vendor/python/python_2/attr/_funcs.py b/openpype/vendor/python/python_2/attr/_funcs.py new file mode 100644 index 0000000000..4c90085a40 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_funcs.py @@ -0,0 +1,422 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy + +from ._compat import iteritems +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(v) + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(val) + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in iteritems(v) + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in iteritems(changes): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/openpype/vendor/python/python_2/attr/_make.py b/openpype/vendor/python/python_2/attr/_make.py new file mode 100644 index 0000000000..d46f8a3e7a --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_make.py @@ -0,0 +1,3173 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy +import inspect +import linecache +import sys +import warnings + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + HAS_F_STRINGS, + PY2, + PY310, + PYPY, + isclass, + iteritems, + metadata_proxy, + new_class, + ordered_dict, + set_closure_cell, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + PythonTooOldError, + UnannotatedAttributeError, +) + + +if not PY2: + import typing + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_{}" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = metadata_proxy({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(object): + """ + Sentinel class to indicate the lack of a value when ``None`` is ambiguous. + + ``_Nothing`` is a singleton. There is only ever one of it. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + """ + + _singleton = None + + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super(_Nothing, cls).__new__(cls) + return _Nothing._singleton + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + def __len__(self): + return 0 # __bool__ for Python 2 + + +NOTHING = _Nothing() +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + if PY2: + # For some reason `type(None)` isn't callable in Python 2, but we don't + # actually need a constructor for None objects, we just need any + # available function that returns None. + def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): + return _none_constructor, _args + + else: + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending_metadata`. + :param type: The type of the attribute. In Python 3.6 or greater, the + preferred method to specify the type is using a variable annotation + (see `PEP 526 `_). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs=None): + """ + Create the method with the script given and return the method object. + """ + locs = {} + if globs is None: + globs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = "{}-{}>".format(base_filename[:-1], count) + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = "{}Attributes".format(cls_name) + attr_class_template = [ + "class {}(tuple):".format(attr_class_name), + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + + Requires Python 3. + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _counter_getter(e): + """ + Key function for sorting to avoid re-creating a lambda for every class. + """ + return e[1].counter + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in iteritems(these)] + + if not isinstance(these, ordered_dict): + ca_list.sort(key=_counter_getter) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + "default value or factory. Attribute in question: %r" % (a,) + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder(object): + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = set(a.name for a in base_attrs) + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + else: + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = object.__setattr__ + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in iteritems(self._cls_dict) + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = object.__setattr__ + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overriden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in iteritems(existing_slots) + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + qualname = getattr(self._cls, "__qualname__", None) + if qualname is not None: + cd["__qualname__"] = qualname + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . On Python 3, + # if a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = "Method generated by attrs for class %s." % ( + self._cls.__qualname__, + ) + except AttributeError: + pass + + return method + + +_CMP_DEPRECATION = ( + "The usage of `cmp` is deprecated and will be removed on or after " + "2021-06-01. Please use `eq` and `order` instead." +) + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + + auto_detect must be False on Python 2. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + A class decorator that adds `dunder + `_\ -methods according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + If *these* is an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the attributes inside *these*. Otherwise the order + of the definition of the attributes is used. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises + an `attrs.exceptions.PythonTooOldError`. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method + is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a `slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the default + dict classes, but have some gotchas you should know about, so we + encourage you to read the `glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated + attributes (Python 3.6 and later only) from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + `PEP 634 `_ (Structural + Pattern Matching). It is a tuple of all positional-only ``__init__`` + parameter names on Python 3.10 and later. Ignored on older Python + versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + """ + if auto_detect and PY2: + raise PythonTooOldError( + "auto_detect only works on Python 3 and later." + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + hash_ = hash # work around the lack of nonlocal + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + + if getattr(cls, "__class__", None) is None: + raise TypeError("attrs only works with new-style classes.") + + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + if ( + hash_ is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + else: + hash = hash_ + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +if PY2: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return ( + getattr(cls.__setattr__, "__module__", None) + == _frozen_setattrs.__module__ + and cls.__setattr__.__name__ == _frozen_setattrs.__name__ + ) + +else: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ == _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + ) + return unique_filename + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + if not PY2: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + " %d," % (type_hash,), + ] + ) + + for a in attrs: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = "_%s_key" % (a.name,) + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + " %s(self.%s)," + % ( + cmp_name, + a.name, + ) + ) + others.append( + " %s(other.%s)," + % ( + cmp_name, + a.name, + ) + ) + else: + lines.append(" self.%s," % (a.name,)) + others.append(" other.%s," % (a.name,)) + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +if HAS_F_STRINGS: + + def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r + for name, r, _ in attr_names_with_reprs + if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name + if i + else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = ( + '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + ) + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + " return f'%s(%s)'" % (cls_name_fragment, repr_fragment), + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + +else: + + def _make_repr(attrs, ns, _): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the + full name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + already_repring = _compat.repr_context.already_repring + except AttributeError: + already_repring = set() + _compat.repr_context.already_repring = already_repring + + if id(self) in already_repring: + return "..." + real_cls = self.__class__ + if ns is None: + qualname = getattr(real_cls, "__qualname__", None) + if qualname is not None: # pragma: no cover + # This case only happens on Python 3.5 and 3.6. We exclude + # it from coverage, because we don't want to slow down our + # test suite by running them under coverage too for this + # one line. + class_name = qualname.rsplit(">.", 1)[-1] + else: + class_name = real_cls.__name__ + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) + # for the duration of this call, it's safe to depend on id(...) + # stability, and not need to track the instance and therefore + # worry about properties like weakref- or hash-ability. + already_repring.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + already_repring.remove(id(self)) + + return __repr__ + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: an ordered dict where keys are attribute names and values are + `attrs.Attribute`\\ s. This will be a `dict` if it's + naturally ordered like on Python 3.6+ or an + :class:`~collections.OrderedDict` otherwise. + + .. versionadded:: 18.1.0 + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return ordered_dict(((a.name, a) for a in attrs)) + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr"] = _obj_setattr + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return "_setattr('%s', %s)" % (attr_name, value_var) + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return "self.%s = %s" % (attr_name, value) + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +if PY2: + + def _unpack_kw_only_py2(attr_name, default=None): + """ + Unpack *attr_name* from _kw_only dict. + """ + if default is not None: + arg_default = ", %s" % default + else: + arg_default = "" + return "%s = _kw_only.pop('%s'%s)" % ( + attr_name, + attr_name, + arg_default, + ) + + def _unpack_kw_only_lines_py2(kw_only_args): + """ + Unpack all *kw_only_args* from _kw_only dict and handle errors. + + Given a list of strings "{attr_name}" and "{attr_name}={default}" + generates list of lines of code that pop attrs from _kw_only dict and + raise TypeError similar to builtin if required attr is missing or + extra key is passed. + + >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) + try: + a = _kw_only.pop('a') + b = _kw_only.pop('b', 42) + except KeyError as _key_error: + raise TypeError( + ... + if _kw_only: + raise TypeError( + ... + """ + lines = ["try:"] + lines.extend( + " " + _unpack_kw_only_py2(*arg.split("=")) + for arg in kw_only_args + ) + lines += """\ +except KeyError as _key_error: + raise TypeError( + '__init__() missing required keyword-only argument: %s' % _key_error + ) +if _kw_only: + raise TypeError( + '__init__() got an unexpected keyword argument %r' + % next(iter(_kw_only)) + ) +""".split( + "\n" + ) + return lines + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return "_inst_dict['%s'] = %s" % (attr_name, value_var) + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + arg_name = a.name.lstrip("_") + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = "%s=NOTHING" % (arg_name,) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append("if %s is not NOTHING:" % (arg_name,)) + + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None and not PY2: + # Try to get the type from the converter. + sig = None + try: + sig = inspect.signature(a.converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + sig_params = list(sig.parameters.values()) + if ( + sig_params + and sig_params[0].annotation + is not inspect.Parameter.empty + ): + annotations[arg_name] = sig_params[0].annotation + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append( + " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) + ) + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join("self." + a.name for a in attrs if a.init) + + lines.append("BaseException.__init__(self, %s)" % (vals,)) + + args = ", ".join(args) + if kw_only_args: + if PY2: + lines = _unpack_kw_only_lines_py2(kw_only_args) + lines + + args += "%s**_kw_only" % (", " if args else "",) # leading comma + else: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + return ( + """\ +def {init_name}(self, {args}): + {lines} +""".format( + init_name=("__attrs_init__" if attrs_init else "__init__"), + args=args, + lines="\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +class Attribute(object): + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self, Attribute) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + metadata_proxy(metadata) + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict + ) + + @property + def cmp(self): + """ + Simulate the presence of a cmp attribute and warn. + """ + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) + + return self.eq and self.order + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + metadata_proxy(value) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr(object): + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + ) + ) + ( + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory(object): + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the names or attributes inside *attrs*. Otherwise the + order of the definition of the attributes is used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = dict((a, attrib()) for a in attrs) + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator(object): + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not PY2: + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type. + sig = None + try: + sig = inspect.signature(converters[0]) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if ( + params + and params[0].annotation is not inspect.Parameter.empty + ): + pipe_converter.__annotations__["val"] = params[ + 0 + ].annotation + # Get return type. + sig = None + try: + sig = inspect.signature(converters[-1]) + except (ValueError, TypeError): # inspect failed + pass + if sig and sig.return_annotation is not inspect.Signature().empty: + pipe_converter.__annotations__[ + "return" + ] = sig.return_annotation + + return pipe_converter diff --git a/openpype/vendor/python/python_2/attr/_next_gen.py b/openpype/vendor/python/python_2/attr/_next_gen.py new file mode 100644 index 0000000000..068253688c --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_next_gen.py @@ -0,0 +1,216 @@ +# SPDX-License-Identifier: MIT + +""" +These are Python 3.6+-only and keyword-only APIs that call `attr.s` and +`attr.ib` with different default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` + (c.f. *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* (see :term:`slotted classes` for potentially surprising + behaviors) + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - *match_args=True* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/openpype/vendor/python/python_2/attr/_version_info.py b/openpype/vendor/python/python_2/attr/_version_info.py new file mode 100644 index 0000000000..cdaeec37a1 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_version_info.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo(object): + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/openpype/vendor/python/python_2/attr/_version_info.pyi b/openpype/vendor/python/python_2/attr/_version_info.pyi new file mode 100644 index 0000000000..45ced08633 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/openpype/vendor/python/python_2/attr/converters.py b/openpype/vendor/python/python_2/attr/converters.py new file mode 100644 index 0000000000..1fb6c05d7b --- /dev/null +++ b/openpype/vendor/python/python_2/attr/converters.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import PY2 +from ._make import NOTHING, Factory, pipe + + +if not PY2: + import inspect + import typing + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + if not PY2: + sig = None + try: + sig = inspect.signature(converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + optional_converter.__annotations__["val"] = typing.Optional[ + params[0].annotation + ] + if sig.return_annotation is not inspect.Signature.empty: + optional_converter.__annotations__["return"] = typing.Optional[ + sig.return_annotation + ] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError("Cannot convert value to bool: {}".format(val)) diff --git a/openpype/vendor/python/python_2/attr/converters.pyi b/openpype/vendor/python/python_2/attr/converters.pyi new file mode 100644 index 0000000000..0f58088a37 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, Optional, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/openpype/vendor/python/python_2/attr/exceptions.py b/openpype/vendor/python/python_2/attr/exceptions.py new file mode 100644 index 0000000000..b2f1edc32a --- /dev/null +++ b/openpype/vendor/python/python_2/attr/exceptions.py @@ -0,0 +1,94 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/openpype/vendor/python/python_2/attr/exceptions.pyi b/openpype/vendor/python/python_2/attr/exceptions.pyi new file mode 100644 index 0000000000..f2680118b4 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/openpype/vendor/python/python_2/attr/filters.py b/openpype/vendor/python/python_2/attr/filters.py new file mode 100644 index 0000000000..a1978a8775 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/filters.py @@ -0,0 +1,54 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import isclass +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isclass(cls)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/openpype/vendor/python/python_2/attr/filters.pyi b/openpype/vendor/python/python_2/attr/filters.pyi new file mode 100644 index 0000000000..993866865e --- /dev/null +++ b/openpype/vendor/python/python_2/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/openpype/vendor/python/python_2/attr/py.typed b/openpype/vendor/python/python_2/attr/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/vendor/python/python_2/attr/setters.py b/openpype/vendor/python/python_2/attr/setters.py new file mode 100644 index 0000000000..b1cbb5d83e --- /dev/null +++ b/openpype/vendor/python/python_2/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from __future__ import absolute_import, division, print_function + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +NO_OP = object() +""" +Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. + +Does not work in `pipe` or within lists. + +.. versionadded:: 20.1.0 +""" diff --git a/openpype/vendor/python/python_2/attr/setters.pyi b/openpype/vendor/python/python_2/attr/setters.pyi new file mode 100644 index 0000000000..3f5603c2b0 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar, cast + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/openpype/vendor/python/python_2/attr/validators.py b/openpype/vendor/python/python_2/attr/validators.py new file mode 100644 index 0000000000..0b0c8342f2 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/validators.py @@ -0,0 +1,561 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + +from __future__ import absolute_import, division, print_function + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator(object): + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator(object): + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call (options + are `re.fullmatch`, `re.search`, `re.match`, default + is ``None`` which means either `re.fullmatch` or an emulation of + it on Python 2). For performance reasons, they won't be used directly + but on a pre-`re.compile`\ ed pattern. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + fullmatch = getattr(re, "fullmatch", None) + valid_funcs = (fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + elif fullmatch: + match_func = pattern.fullmatch + else: # Python 2 fullmatch emulation (https://bugs.python.org/issue16203) + pattern = re.compile( + r"(?:{})\Z".format(pattern.pattern), pattern.flags + ) + match_func = pattern.match + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator(object): + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator(object): + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator(object): + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ) + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator(object): + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable(object): + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else " {iterable!r}".format(iterable=self.iterable_validator) + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping(object): + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator(object): + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator(object): + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(max=self.max_length) + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) diff --git a/openpype/vendor/python/python_2/attr/validators.pyi b/openpype/vendor/python/python_2/attr/validators.pyi new file mode 100644 index 0000000000..5e00b85433 --- /dev/null +++ b/openpype/vendor/python/python_2/attr/validators.pyi @@ -0,0 +1,78 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... diff --git a/openpype/vendor/python/python_2/attrs/__init__.py b/openpype/vendor/python/python_2/attrs/__init__.py new file mode 100644 index 0000000000..a704b8b56b --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/__init__.py @@ -0,0 +1,70 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/openpype/vendor/python/python_2/attrs/__init__.pyi b/openpype/vendor/python/python_2/attrs/__init__.pyi new file mode 100644 index 0000000000..7426fa5ddb --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/__init__.pyi @@ -0,0 +1,63 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import define as define +from attr import evolve as evolve +from attr import Factory as Factory +from attr import exceptions as exceptions +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/openpype/vendor/python/python_2/attrs/converters.py b/openpype/vendor/python/python_2/attrs/converters.py new file mode 100644 index 0000000000..edfa8d3c16 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/exceptions.py b/openpype/vendor/python/python_2/attrs/exceptions.py new file mode 100644 index 0000000000..bd9efed202 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/filters.py b/openpype/vendor/python/python_2/attrs/filters.py new file mode 100644 index 0000000000..52959005b0 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/py.typed b/openpype/vendor/python/python_2/attrs/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/vendor/python/python_2/attrs/setters.py b/openpype/vendor/python/python_2/attrs/setters.py new file mode 100644 index 0000000000..9b50770804 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/openpype/vendor/python/python_2/attrs/validators.py b/openpype/vendor/python/python_2/attrs/validators.py new file mode 100644 index 0000000000..ab2c9b3024 --- /dev/null +++ b/openpype/vendor/python/python_2/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa From c6383837e0c094a4172c6895db768a3d3ccebc34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Mon, 15 Aug 2022 14:00:46 +0200 Subject: [PATCH 0389/2550] :recycle: remove unnecessary type hint Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- igniter/bootstrap_repos.py | 1 - 1 file changed, 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index dfcca2cf33..c5003b062e 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -614,7 +614,6 @@ class OpenPypeVersion(semver.VersionInfo): return None all_versions.sort() - latest_version: OpenPypeVersion return all_versions[-1] @classmethod From 9cfa2e12e388be7f6910d97a54c12be5aa452e07 Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Mon, 15 Aug 2022 14:20:11 +0200 Subject: [PATCH 0390/2550] reviews fix and clean - bugfix with staticmethod --- .../publish/validate_camera_zero_keyframe.py | 12 ++++------ .../plugins/publish/validate_mesh_has_uv.py | 24 +++++++------------ .../validate_mesh_no_negative_scale.py | 10 ++++---- .../publish/validate_no_colons_in_name.py | 16 ++++++------- .../plugins/publish/validate_object_mode.py | 17 +++++++------ .../publish/validate_transform_zero.py | 11 ++++----- 6 files changed, 40 insertions(+), 50 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py index ea45318219..5ba4808875 100644 --- a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py +++ b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py @@ -1,11 +1,10 @@ from typing import List -import mathutils import bpy import pyblish.api -from openpype.api import ValidateContentsOrder -from openpype.hosts.blender.api.action import SelectInvalidAction +import openpype.api +import openpype.hosts.blender.api.action class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): @@ -16,18 +15,17 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): in Unreal and Blender. """ - order = ValidateContentsOrder + order = openpype.api.ValidateContentsOrder hosts = ["blender"] families = ["camera"] - category = "geometry" version = (0, 1, 0) label = "Zero Keyframe" - actions = [SelectInvalidAction] + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] @staticmethod def get_invalid(instance) -> List: invalid = [] - for obj in set(instance): + for obj in instance: if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA": if obj.animation_data and obj.animation_data.action: action = obj.animation_data.action diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py index d87b4ff1ef..1a52b3f851 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py @@ -3,19 +3,19 @@ from typing import List import bpy import pyblish.api -from openpype.api import ValidateContentsOrder -from openpype.hosts.blender.api.action import SelectInvalidAction +import openpype.api +import openpype.hosts.blender.api.action class ValidateMeshHasUvs(pyblish.api.InstancePlugin): """Validate that the current mesh has UV's.""" - order = ValidateContentsOrder + order = openpype.api.ValidateContentsOrder hosts = ["blender"] families = ["model"] - category = "uv" + category = "geometry" label = "Mesh Has UV's" - actions = [SelectInvalidAction] + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] optional = True @staticmethod @@ -34,16 +34,10 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance) -> List: invalid = [] - for obj in set(instance): - try: - if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': - if obj.mode != 'OBJECT': - # Make sure we are in object mode. - bpy.ops.object.mode_set(mode='OBJECT') - if not cls.has_uvs(obj): - invalid.append(obj) - except RuntimeError: - continue + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': + if not cls.has_uvs(obj): + invalid.append(obj) return invalid def process(self, instance): diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 449e711663..3c5c7c11eb 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -3,24 +3,24 @@ from typing import List import bpy import pyblish.api -from openpype.api import ValidateContentsOrder -from openpype.hosts.blender.api.action import SelectInvalidAction +import openpype.api +import openpype.hosts.blender.api.action class ValidateMeshNoNegativeScale(pyblish.api.Validator): """Ensure that meshes don't have a negative scale.""" - order = ValidateContentsOrder + order = openpype.api.ValidateContentsOrder hosts = ["blender"] families = ["model"] category = "geometry" label = "Mesh No Negative Scale" - actions = [SelectInvalidAction] + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] @staticmethod def get_invalid(instance) -> List: invalid = [] - for obj in set(instance): + for obj in instance: if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': if any(v < 0 for v in obj.scale): invalid.append(obj) diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py index f1889e5837..daf35c61ac 100644 --- a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py @@ -3,8 +3,8 @@ from typing import List import bpy import pyblish.api -from openpype.api import ValidateContentsOrder -from openpype.hosts.blender.api.action import SelectInvalidAction +import openpype.api +import openpype.hosts.blender.api.action class ValidateNoColonsInName(pyblish.api.InstancePlugin): @@ -15,18 +15,17 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + order = openpype.api.ValidateContentsOrder hosts = ["blender"] families = ["model", "rig"] - category = "cleanup" version = (0, 1, 0) label = "No Colons in names" - actions = [SelectInvalidAction] + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] @staticmethod - def get_invalid(cls, instance) -> List: + def get_invalid(instance) -> List: invalid = [] - for obj in set(instance): + for obj in instance: if ':' in obj.name: invalid.append(obj) if isinstance(obj, bpy.types.Object) and obj.type == 'ARMATURE': @@ -40,4 +39,5 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Objects found with colon in name: {invalid}") + f"Objects found with colon in name: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_object_mode.py b/openpype/hosts/blender/plugins/publish/validate_object_mode.py index 65b0bf7655..ac60e00f89 100644 --- a/openpype/hosts/blender/plugins/publish/validate_object_mode.py +++ b/openpype/hosts/blender/plugins/publish/validate_object_mode.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -from openpype.hosts.blender.api.action import SelectInvalidAction +import openpype.hosts.blender.api.action class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin): @@ -12,22 +12,21 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder - 0.01 hosts = ["blender"] families = ["model", "rig", "layout"] - category = "cleanup" label = "Validate Object Mode" - actions = [SelectInvalidAction] + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] optional = False @staticmethod - def get_invalid(cls, instance) -> List: + def get_invalid(instance) -> List: invalid = [] - for obj in set(instance): - if isinstance(obj, bpy.types.Object): - if not obj.mode == 'OBJECT': - invalid.append(obj) + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.mode != "OBJECT": + invalid.append(obj) return invalid def process(self, instance): invalid = self.get_invalid(instance) if invalid: raise RuntimeError( - f"Object found in instance is not in Object Mode: {invalid}") + f"Object found in instance is not in Object Mode: {invalid}" + ) diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py index 7443e3c64e..6e03094794 100644 --- a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -4,8 +4,8 @@ import mathutils import bpy import pyblish.api -from openpype.api import ValidateContentsOrder -from openpype.hosts.blender.api.action import SelectInvalidAction +import openpype.api +import openpype.hosts.blender.api.action class ValidateTransformZero(pyblish.api.InstancePlugin): @@ -17,20 +17,19 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + order = openpype.api.ValidateContentsOrder hosts = ["blender"] families = ["model"] - category = "cleanup" version = (0, 1, 0) label = "Transform Zero" - actions = [SelectInvalidAction] + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] _identity = mathutils.Matrix() @classmethod def get_invalid(cls, instance) -> List: invalid = [] - for obj in set(instance): + for obj in instance: if ( isinstance(obj, bpy.types.Object) and obj.matrix_basis != cls._identity From 403f5ddfc9cc754a13a3419b06260a34a8f682c6 Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Mon, 15 Aug 2022 15:11:25 +0200 Subject: [PATCH 0391/2550] fix mesh uv validator with editmode --- .../blender/plugins/publish/validate_mesh_has_uv.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py index 1a52b3f851..83146c641e 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py @@ -26,7 +26,10 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): for uv_layer in obj.data.uv_layers: for polygon in obj.data.polygons: for loop_index in polygon.loop_indices: - if not uv_layer.data[loop_index].uv: + if ( + loop_index >= len(uv_layer.data) + or not uv_layer.data[loop_index].uv + ): return False return True @@ -36,6 +39,11 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): invalid = [] for obj in instance: if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': + if obj.mode != "OBJECT": + cls.log.warning( + f"Mesh object {obj.name} should be in 'OBJECT' mode" + " to be properly checked." + ) if not cls.has_uvs(obj): invalid.append(obj) return invalid From 5cb99f5209d7aff8b81982dcb5ba0b604f6557b1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 15 Aug 2022 15:55:13 +0200 Subject: [PATCH 0392/2550] Move set render settings menu entry --- openpype/hosts/maya/api/menu.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index b7ab529a55..b4511571fb 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -99,13 +99,6 @@ def install(): cmds.menuItem(divider=True) - cmds.menuItem( - "Set Render Settings", - command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa - ) - - cmds.menuItem(divider=True) - cmds.menuItem( "Work Files...", command=lambda *args: host_tools.show_workfiles( @@ -127,6 +120,12 @@ def install(): "Set Colorspace", command=lambda *args: lib.set_colorspace(), ) + + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa + ) + cmds.menuItem(divider=True, parent=MENU_NAME) cmds.menuItem( "Build First Workfile", From 4504078481da5cf0ebf7128cf7fdc3062d3c926e Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Mon, 15 Aug 2022 16:28:33 +0200 Subject: [PATCH 0393/2550] publisher collect current workfile as publishable representation --- .../plugins/publish/collect_current_file.py | 71 +++++++++++++++++-- 1 file changed, 67 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/collect_current_file.py b/openpype/hosts/blender/plugins/publish/collect_current_file.py index 72976c490b..1ca28f67f6 100644 --- a/openpype/hosts/blender/plugins/publish/collect_current_file.py +++ b/openpype/hosts/blender/plugins/publish/collect_current_file.py @@ -1,6 +1,23 @@ +import os import bpy import pyblish.api +from openpype.pipeline import legacy_io +from openpype.hosts.blender.api import workio + + +class SaveWorkfiledAction(pyblish.api.Action): + """Save Workfile.""" + label = "Save Workfile" + on = "failed" + icon = "save" + + def process(self, context, plugin): + current_file = workio.current_file() + if current_file: + workio.save_file(current_file) + else: + bpy.ops.wm.avalon_workfiles() class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): @@ -8,12 +25,58 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 label = "Blender Current File" - hosts = ['blender'] + hosts = ["blender"] + actions = [SaveWorkfiledAction] def process(self, context): """Inject the current working file""" - current_file = bpy.data.filepath - context.data['currentFile'] = current_file + current_file = workio.current_file() + has_unsaved_changes = workio.has_unsaved_changes() - assert current_file != '', "Current file is empty. " \ + context.data["currentFile"] = current_file + + assert current_file, ( + "Current file is empty. Save the file before continuing." + ) + + assert not has_unsaved_changes, ( + "Current file has unsaved changes. " "Save the file before continuing." + ) + + folder, file = os.path.split(current_file) + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = "workfile" + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": "workfile", + "families": ["workfile"], + "setMembers": [current_file], + "frameStart": bpy.context.scene.frame_start, + "frameEnd": bpy.context.scene.frame_end, + }) + + data["representations"] = [{ + "name": ext.lstrip("."), + "ext": ext.lstrip("."), + "files": file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info("Collected instance: {}".format(file)) + self.log.info("Scene path: {}".format(current_file)) + self.log.info("staging Dir: {}".format(folder)) + self.log.info("subset: {}".format(subset)) From e5b1cc59bdccc2175364ae24cdddb7eb40a7c2f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Mon, 15 Aug 2022 17:37:40 +0200 Subject: [PATCH 0394/2550] :bug: missing comma Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index cece8ee22b..67b5f2496b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -45,7 +45,7 @@ def get_ocio_config_path(profile_folder): os.environ["OPENPYPE_ROOT"], "vendor", "bin", - "ocioconfig" + "ocioconfig", "OpenColorIOConfigs", profile_folder, "config.ocio" From 4193b54700c42405ffa22e5353985202ce858ee2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 18:36:33 +0200 Subject: [PATCH 0395/2550] added more information when auto sync is turned on/off --- .../event_sync_to_avalon.py | 41 ++++++++++++++++--- 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index a4e791aaf0..738181dc9a 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -697,13 +697,22 @@ class SyncToAvalonEvent(BaseEvent): continue auto_sync = changes[CUST_ATTR_AUTO_SYNC]["new"] - if auto_sync == "1": + turned_on = auto_sync == "1" + ft_project = self.cur_project + username = self._get_username(session, event) + message = ( + "Auto sync was turned {} for project \"{}\" by \"{}\"." + ).format( + "on" if turned_on else "off", + ft_project["full_name"], + username + ) + if turned_on: + message += " Triggering syncToAvalon action." + self.log.debug(message) + + if turned_on: # Trigger sync to avalon action if auto sync was turned on - ft_project = self.cur_project - self.log.debug(( - "Auto sync was turned on for project <{}>." - " Triggering syncToAvalon action." - ).format(ft_project["full_name"])) selection = [{ "entityId": ft_project["id"], "entityType": "show" @@ -851,6 +860,26 @@ class SyncToAvalonEvent(BaseEvent): self.report() return True + def _get_username(self, session, event): + username = "Unknown" + event_source = event.get("source") + if not event_source: + return username + user_info = event_source.get("user") + if not user_info: + return username + user_id = user_info.get("id") + if not user_id: + return username + + user_entity = session.query( + "User where id is {}".format(user_id) + ).first() + if user_entity: + username = user_entity["username"] or username + return username + + def process_removed(self): """ Handles removed entities (not removed tasks - handle separately). From 61e8d7e9f1fbffd91e268ef3ff721cc136395f27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 19:14:01 +0200 Subject: [PATCH 0396/2550] use 'get_projects' instead of 'projects' method on AvalonMongoDB --- .../modules/kitsu/utils/update_zou_with_op.py | 8 +++- .../modules/sync_server/sync_server_module.py | 9 ++-- openpype/tools/launcher/models.py | 3 +- openpype/tools/libraryloader/app.py | 4 +- .../project_manager/project_manager/model.py | 7 +--- openpype/tools/sceneinventory/window.py | 6 +-- openpype/tools/utils/models.py | 41 ++++++++++--------- 7 files changed, 39 insertions(+), 39 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index 57d7094e95..da924aa5ee 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -6,7 +6,11 @@ from typing import List import gazu from pymongo import UpdateOne -from openpype.client import get_project, get_assets +from openpype.client import ( + get_projects, + get_project, + get_assets, +) from openpype.pipeline import AvalonMongoDB from openpype.api import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials @@ -37,7 +41,7 @@ def sync_zou(login: str, password: str): dbcon = AvalonMongoDB() dbcon.install() - op_projects = [p for p in dbcon.projects()] + op_projects = list(get_projects()) for project_doc in op_projects: sync_zou_from_op_project(project_doc["name"], dbcon, project_doc) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 8fdfab9c2e..c7f9484e55 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -6,7 +6,7 @@ import platform import copy from collections import deque, defaultdict - +from openpype.client import get_projects from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule from openpype.settings import ( @@ -913,7 +913,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): enabled_projects = [] if self.enabled: - for project in self.connection.projects(projection={"name": 1}): + for project in get_projects(fields=["name"]): project_name = project["name"] if self.is_project_enabled(project_name): enabled_projects.append(project_name) @@ -1242,10 +1242,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def _prepare_sync_project_settings(self, exclude_locals): sync_project_settings = {} system_sites = self.get_all_site_configs() - project_docs = self.connection.projects( - projection={"name": 1}, - only_active=True - ) + project_docs = get_projects(fields=["name"]) for project_doc in project_docs: project_name = project_doc["name"] sites = copy.deepcopy(system_sites) # get all configured sites diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 3f899cc05e..6d40d21f96 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -10,6 +10,7 @@ from Qt import QtCore, QtGui import qtawesome from openpype.client import ( + get_projects, get_project, get_assets, ) @@ -527,7 +528,7 @@ class LauncherModel(QtCore.QObject): current_project = self.project_name project_names = set() project_docs_by_name = {} - for project_doc in self._dbcon.projects(only_active=True): + for project_doc in get_projects(): project_name = project_doc["name"] project_names.add(project_name) project_docs_by_name[project_name] = project_doc diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py index 5f4d10d796..d2af1b7151 100644 --- a/openpype/tools/libraryloader/app.py +++ b/openpype/tools/libraryloader/app.py @@ -3,7 +3,7 @@ import sys from Qt import QtWidgets, QtCore, QtGui from openpype import style -from openpype.client import get_project +from openpype.client import get_projects, get_project from openpype.pipeline import AvalonMongoDB from openpype.tools.utils import lib as tools_lib from openpype.tools.loader.widgets import ( @@ -239,7 +239,7 @@ class LibraryLoaderWindow(QtWidgets.QDialog): def get_filtered_projects(self): projects = list() - for project in self.dbcon.projects(): + for project in get_projects(fields=["name", "data.library_project"]): is_library = project.get("data", {}).get("library_project", False) if ( (is_library and self.show_libraries) or diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index c5bde5aaec..3aaee75698 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -8,6 +8,7 @@ from pymongo import UpdateOne, DeleteOne from Qt import QtCore, QtGui from openpype.client import ( + get_projects, get_project, get_assets, get_asset_ids_with_subsets, @@ -54,12 +55,8 @@ class ProjectModel(QtGui.QStandardItemModel): self._items_by_name[None] = none_project new_project_items.append(none_project) - project_docs = self.dbcon.projects( - projection={"name": 1}, - only_active=True - ) project_names = set() - for project_doc in project_docs: + for project_doc in get_projects(fields=["name"]): project_name = project_doc.get("name") if not project_name: continue diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index 054c2a2daa..463280b71c 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -4,8 +4,9 @@ import sys from Qt import QtWidgets, QtCore import qtawesome -from openpype.pipeline import legacy_io from openpype import style +from openpype.client import get_projects +from openpype.pipeline import legacy_io from openpype.tools.utils.delegates import VersionDelegate from openpype.tools.utils.lib import ( qt_app_context, @@ -195,8 +196,7 @@ def show(root=None, debug=False, parent=None, items=None): if not os.environ.get("AVALON_PROJECT"): any_project = next( - project for project in legacy_io.projects() - if project.get("active", True) is not False + project for project in get_projects() ) project_name = any_project["name"] diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 8991614fe1..1faccef4dd 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -3,6 +3,7 @@ import logging import Qt from Qt import QtCore, QtGui +from openpype.client import get_projects from .constants import ( PROJECT_IS_ACTIVE_ROLE, PROJECT_NAME_ROLE, @@ -296,29 +297,29 @@ class ProjectModel(QtGui.QStandardItemModel): self._default_item = item project_names = set() - if self.dbcon is not None: - for project_doc in self.dbcon.projects( - projection={"name": 1, "data.active": 1}, - only_active=self._only_active - ): - project_name = project_doc["name"] - project_names.add(project_name) - if project_name in self._items_by_name: - item = self._items_by_name[project_name] - else: - item = QtGui.QStandardItem(project_name) + project_docs = get_projects( + inactive=not self._only_active, + fields=["name", "data.active"] + ) + for project_doc in project_docs: + project_name = project_doc["name"] + project_names.add(project_name) + if project_name in self._items_by_name: + item = self._items_by_name[project_name] + else: + item = QtGui.QStandardItem(project_name) - self._items_by_name[project_name] = item - new_items.append(item) + self._items_by_name[project_name] = item + new_items.append(item) - is_active = project_doc.get("data", {}).get("active", True) - item.setData(project_name, PROJECT_NAME_ROLE) - item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) + is_active = project_doc.get("data", {}).get("active", True) + item.setData(project_name, PROJECT_NAME_ROLE) + item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) - if not is_active: - font = item.font() - font.setItalic(True) - item.setFont(font) + if not is_active: + font = item.font() + font.setItalic(True) + item.setFont(font) root_item = self.invisibleRootItem() for project_name in tuple(self._items_by_name.keys()): From 260ef9999516d437ad399a0b746ff73632106314 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 19:15:42 +0200 Subject: [PATCH 0397/2550] removed unused code --- openpype/tools/utils/lib.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 2169cf8ef1..99d8c75ab4 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -443,10 +443,6 @@ class FamilyConfigCache: if profiles: # Make sure connection is installed # - accessing attribute which does not have auto-install - self.dbcon.install() - database = getattr(self.dbcon, "database", None) - if database is None: - database = self.dbcon._database asset_doc = get_asset_by_name( project_name, asset_name, fields=["data.tasks"] ) or {} From e86ea84da897f0ecd6608bdfff460c742a10042e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 19:18:37 +0200 Subject: [PATCH 0398/2550] use 'get_projects' in standalone publisher --- openpype/tools/standalonepublish/widgets/widget_asset.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_asset.py b/openpype/tools/standalonepublish/widgets/widget_asset.py index 73114f7960..77d756a606 100644 --- a/openpype/tools/standalonepublish/widgets/widget_asset.py +++ b/openpype/tools/standalonepublish/widgets/widget_asset.py @@ -3,6 +3,7 @@ from Qt import QtWidgets, QtCore import qtawesome from openpype.client import ( + get_projects, get_project, get_asset_by_id, ) @@ -291,9 +292,7 @@ class AssetWidget(QtWidgets.QWidget): def _set_projects(self): project_names = list() - for doc in self.dbcon.projects(projection={"name": 1}, - only_active=True): - + for doc in get_projects(fields=["name"]): project_name = doc.get("name") if project_name: project_names.append(project_name) @@ -320,8 +319,7 @@ class AssetWidget(QtWidgets.QWidget): def on_project_change(self): projects = list() - for project in self.dbcon.projects(projection={"name": 1}, - only_active=True): + for project in get_projects(fields=["name"]): projects.append(project['name']) project_name = self.combo_projects.currentText() if project_name in projects: From 68ca5898920d79c93bc51e699f319e5987019063 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Aug 2022 19:18:50 +0200 Subject: [PATCH 0399/2550] use 'get_projects' in settings --- openpype/tools/settings/settings/widgets.py | 60 ++++++++------------- 1 file changed, 22 insertions(+), 38 deletions(-) diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 88d923c16a..1a4a6877b0 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -3,6 +3,7 @@ import uuid from Qt import QtWidgets, QtCore, QtGui import qtawesome +from openpype.client import get_projects from openpype.pipeline import AvalonMongoDB from openpype.style import get_objected_colors from openpype.tools.utils.widgets import ImageButton @@ -783,8 +784,6 @@ class ProjectModel(QtGui.QStandardItemModel): self.setColumnCount(2) - self.dbcon = None - self._only_active = only_active self._default_item = None self._items_by_name = {} @@ -828,9 +827,6 @@ class ProjectModel(QtGui.QStandardItemModel): index = self.index(index.row(), 0, index.parent()) return super(ProjectModel, self).flags(index) - def set_dbcon(self, dbcon): - self.dbcon = dbcon - def refresh(self): # Change id of versions refresh self._version_refresh_id = uuid.uuid4() @@ -846,31 +842,30 @@ class ProjectModel(QtGui.QStandardItemModel): self._default_item.setData("", PROJECT_VERSION_ROLE) project_names = set() - if self.dbcon is not None: - for project_doc in self.dbcon.projects( - projection={"name": 1, "data.active": 1}, - only_active=self._only_active - ): - project_name = project_doc["name"] - project_names.add(project_name) - if project_name in self._items_by_name: - item = self._items_by_name[project_name] - else: - item = QtGui.QStandardItem(project_name) + for project_doc in get_projects( + inactive=not self._only_active, + fields=["name", "data.active"] + ): + project_name = project_doc["name"] + project_names.add(project_name) + if project_name in self._items_by_name: + item = self._items_by_name[project_name] + else: + item = QtGui.QStandardItem(project_name) - self._items_by_name[project_name] = item - new_items.append(item) + self._items_by_name[project_name] = item + new_items.append(item) - is_active = project_doc.get("data", {}).get("active", True) - item.setData(project_name, PROJECT_NAME_ROLE) - item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) - item.setData("", PROJECT_VERSION_ROLE) - item.setData(False, PROJECT_IS_SELECTED_ROLE) + is_active = project_doc.get("data", {}).get("active", True) + item.setData(project_name, PROJECT_NAME_ROLE) + item.setData(is_active, PROJECT_IS_ACTIVE_ROLE) + item.setData("", PROJECT_VERSION_ROLE) + item.setData(False, PROJECT_IS_SELECTED_ROLE) - if not is_active: - font = item.font() - font.setItalic(True) - item.setFont(font) + if not is_active: + font = item.font() + font.setItalic(True) + item.setFont(font) root_item = self.invisibleRootItem() for project_name in tuple(self._items_by_name.keys()): @@ -1067,8 +1062,6 @@ class ProjectListWidget(QtWidgets.QWidget): self.project_model = project_model self.inactive_chk = inactive_chk - self.dbcon = None - def set_entity(self, entity): self._entity = entity @@ -1211,15 +1204,6 @@ class ProjectListWidget(QtWidgets.QWidget): selected_project = index.data(PROJECT_NAME_ROLE) break - if not self.dbcon: - try: - self.dbcon = AvalonMongoDB() - self.dbcon.install() - except Exception: - self.dbcon = None - self.current_project = None - - self.project_model.set_dbcon(self.dbcon) self.project_model.refresh() self.project_proxy.sort(0) From bcb15c2fc5b28ff38ea3d5a9beca41186dec1615 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Aug 2022 21:01:30 +0200 Subject: [PATCH 0400/2550] nuke: validation type mishmash wip --- .../nuke/plugins/publish/validate_write_nodes.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 48dce623a9..9c9b8babaa 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,3 +1,4 @@ +import six import pyblish.api from openpype.api import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( @@ -72,11 +73,21 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): for knob_data in check_knobs: key = knob_data["name"] value = knob_data["value"] + node_value = write_node[key].value() + + # fix type differences + if type(node_value) in (int, float): + value = float(value) + node_value = float(node_value) + else: + value = str(value) + node_value = str(node_value) + self.log.debug("__ key: {} | value: {}".format( key, value )) if ( - str(write_node[key].value()) != str(value) + node_value != value and key != "file" and key != "tile_color" ): From 2cebaf718cc3c7df21a3543e90bac91ae56e68d8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Aug 2022 21:04:51 +0200 Subject: [PATCH 0401/2550] global: moving collect audio to global --- .../plugins/publish/collect_audio.py | 113 ------------------ openpype/plugins/publish/collect_audio.py | 51 ++++++++ 2 files changed, 51 insertions(+), 113 deletions(-) delete mode 100644 openpype/hosts/celaction/plugins/publish/collect_audio.py create mode 100644 openpype/plugins/publish/collect_audio.py diff --git a/openpype/hosts/celaction/plugins/publish/collect_audio.py b/openpype/hosts/celaction/plugins/publish/collect_audio.py deleted file mode 100644 index c6e3bf2c03..0000000000 --- a/openpype/hosts/celaction/plugins/publish/collect_audio.py +++ /dev/null @@ -1,113 +0,0 @@ -import os -import collections -from pprint import pformat - -import pyblish.api - -from openpype.client import ( - get_subsets, - get_last_versions, - get_representations -) -from openpype.pipeline import legacy_io - - -class AppendCelactionAudio(pyblish.api.ContextPlugin): - - label = "Colect Audio for publishing" - order = pyblish.api.CollectorOrder + 0.1 - - def process(self, context): - self.log.info('Collecting Audio Data') - asset_doc = context.data["assetEntity"] - - # get all available representations - subsets = self.get_subsets( - asset_doc, - representations=["audio", "wav"] - ) - self.log.info(f"subsets is: {pformat(subsets)}") - - if not subsets.get("audioMain"): - raise AttributeError("`audioMain` subset does not exist") - - reprs = subsets.get("audioMain", {}).get("representations", []) - self.log.info(f"reprs is: {pformat(reprs)}") - - repr = next((r for r in reprs), None) - if not repr: - raise "Missing `audioMain` representation" - self.log.info(f"representation is: {repr}") - - audio_file = repr.get('data', {}).get('path', "") - - if os.path.exists(audio_file): - context.data["audioFile"] = audio_file - self.log.info( - 'audio_file: {}, has been added to context'.format(audio_file)) - else: - self.log.warning("Couldn't find any audio file on Ftrack.") - - def get_subsets(self, asset_doc, representations): - """ - Query subsets with filter on name. - - The method will return all found subsets and its defined version - and subsets. Version could be specified with number. Representation - can be filtered. - - Arguments: - asset_doct (dict): Asset (shot) mongo document - representations (list): list for all representations - - Returns: - dict: subsets with version and representations in keys - """ - - # Query all subsets for asset - project_name = legacy_io.active_project() - subset_docs = get_subsets( - project_name, asset_ids=[asset_doc["_id"]], fields=["_id"] - ) - # Collect all subset ids - subset_ids = [ - subset_doc["_id"] - for subset_doc in subset_docs - ] - - # Check if we found anything - assert subset_ids, ( - "No subsets found. Check correct filter. " - "Try this for start `r'.*'`: asset: `{}`" - ).format(asset_doc["name"]) - - last_versions_by_subset_id = get_last_versions( - project_name, subset_ids, fields=["_id", "parent"] - ) - - version_docs_by_id = {} - for version_doc in last_versions_by_subset_id.values(): - version_docs_by_id[version_doc["_id"]] = version_doc - - repre_docs = get_representations( - project_name, - version_ids=version_docs_by_id.keys(), - representation_names=representations - ) - repre_docs_by_version_id = collections.defaultdict(list) - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - repre_docs_by_version_id[version_id].append(repre_doc) - - output_dict = {} - for version_id, repre_docs in repre_docs_by_version_id.items(): - version_doc = version_docs_by_id[version_id] - subset_id = version_doc["parent"] - subset_doc = last_versions_by_subset_id[subset_id] - # Store queried docs by subset name - output_dict[subset_doc["name"]] = { - "representations": repre_docs, - "version": version_doc - } - - return output_dict diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py new file mode 100644 index 0000000000..022334e0f3 --- /dev/null +++ b/openpype/plugins/publish/collect_audio.py @@ -0,0 +1,51 @@ +import pyblish.api +from pprint import pformat + +from openpype.client import ( + get_last_version_by_subset_name, + get_representations, +) +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) + + +class CollectAudio(pyblish.api.InstancePlugin): + + label = "Colect Audio" + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["standalonepublisher"] + + def process(self, instance): + self.log.info('Collecting Audio Data') + + project_name = legacy_io.active_project() + asset_name = instance.data["asset"] + # * Add audio to instance if exists. + # Find latest versions document + last_version_doc = get_last_version_by_subset_name( + project_name, "audioMain", asset_name=asset_name, fields=["_id"] + ) + + repre_doc = None + if last_version_doc: + # Try to find it's representation (Expected there is only one) + repre_docs = list(get_representations( + project_name, version_ids=[last_version_doc["_id"]] + )) + if not repre_docs: + self.log.warning( + "Version document does not contain any representations" + ) + else: + repre_doc = repre_docs[0] + + # Add audio to instance if representation was found + if repre_doc: + instance.data["audio"] = [{ + "offset": 0, + "filename": get_representation_path(repre_doc) + }] + + self.log.debug("instance.data: {}".format(pformat(instance.data))) From 7c06a1fe8cbbdd612fb9a6a8f6b8df092f8e810a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Aug 2022 21:06:40 +0200 Subject: [PATCH 0402/2550] global: improving hosts and families in collect audio --- openpype/plugins/publish/collect_audio.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 022334e0f3..f4cad86f94 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -15,7 +15,24 @@ class CollectAudio(pyblish.api.InstancePlugin): label = "Colect Audio" order = pyblish.api.CollectorOrder + 0.1 - hosts = ["standalonepublisher"] + families = ["review"] + hosts = [ + "nuke", + "maya", + "shell", + "hiero", + "premiere", + "harmony", + "traypublisher", + "standalonepublisher", + "fusion", + "tvpaint", + "resolve", + "webpublisher", + "aftereffects", + "flame", + "unreal" + ] def process(self, instance): self.log.info('Collecting Audio Data') From 495a65707cfa7384dcd1498863a34942cb85a5ac Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Aug 2022 21:13:09 +0200 Subject: [PATCH 0403/2550] Global: improving docstring and comments --- openpype/plugins/publish/collect_audio.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index f4cad86f94..4ba47f739d 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -12,7 +12,9 @@ from openpype.pipeline import ( class CollectAudio(pyblish.api.InstancePlugin): + """ Collecting available audio subset to instance + """ label = "Colect Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] @@ -35,11 +37,12 @@ class CollectAudio(pyblish.api.InstancePlugin): ] def process(self, instance): + # * Add audio to instance if exists. self.log.info('Collecting Audio Data') project_name = legacy_io.active_project() asset_name = instance.data["asset"] - # * Add audio to instance if exists. + # Find latest versions document last_version_doc = get_last_version_by_subset_name( project_name, "audioMain", asset_name=asset_name, fields=["_id"] From 874d95270f35f305650a05649a6344379ccbe4e1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 16 Aug 2022 10:11:45 +0200 Subject: [PATCH 0404/2550] Fix logic --- openpype/tools/loader/widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 48c038418a..bb943303bc 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -584,9 +584,9 @@ class SubsetWidget(QtWidgets.QWidget): for repre_doc in repre_docs: repre_ids.append(repre_doc["_id"]) + # keep only version ids without representation with that name version_id = repre_doc["parent"] - if version_id not in version_ids: - version_ids.remove(version_id) + version_ids.remove(version_id) for version_id in version_ids: joined_subset_names = ", ".join([ From 48706101137e544dc0ad66eb3f36e153a77ed699 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 16 Aug 2022 10:17:11 +0200 Subject: [PATCH 0405/2550] Report subsets without representation in one go --- openpype/tools/loader/widgets.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index bb943303bc..48a23e053a 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -567,12 +567,12 @@ class SubsetWidget(QtWidgets.QWidget): # Trigger project_name = self.dbcon.active_project() - subset_names_by_version_id = collections.defaultdict(set) + subset_name_by_version_id = dict() for item in items: version_id = item["version_document"]["_id"] - subset_names_by_version_id[version_id].add(item["subset"]) + subset_name_by_version_id[version_id] = item["subset"] - version_ids = set(subset_names_by_version_id.keys()) + version_ids = set(subset_name_by_version_id.keys()) repre_docs = get_representations( project_name, representation_names=[representation_name], @@ -588,10 +588,11 @@ class SubsetWidget(QtWidgets.QWidget): version_id = repre_doc["parent"] version_ids.remove(version_id) - for version_id in version_ids: + if version_ids: + # report versions that didn't have valid representation joined_subset_names = ", ".join([ - '"{}"'.format(subset) - for subset in subset_names_by_version_id[version_id] + '"{}"'.format(subset_name_by_version_id[version_id]) + for version_id in version_ids ]) self.echo("Subsets {} don't have representation '{}'".format( joined_subset_names, representation_name From ba3e20a712c8d91e3d1c68dd1d2547bcffad08eb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 16 Aug 2022 10:48:51 +0200 Subject: [PATCH 0406/2550] Don't skip empty value - all keys are considered required attributes --- openpype/hosts/maya/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f565f6a308..34138e64bd 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -349,8 +349,6 @@ def containerise(name, ] for key, value in data: - if not value: - continue if isinstance(value, (int, float)): cmds.addAttr(container, longName=key, attributeType="short") From b7d4a2a747ff572c091ca7f6d50b01911d570439 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 16 Aug 2022 10:49:50 +0200 Subject: [PATCH 0407/2550] Simplify logic since all values should be strings --- openpype/hosts/maya/api/pipeline.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 34138e64bd..d4067ea659 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -349,14 +349,8 @@ def containerise(name, ] for key, value in data: - - if isinstance(value, (int, float)): - cmds.addAttr(container, longName=key, attributeType="short") - cmds.setAttr(container + "." + key, value) - - else: - cmds.addAttr(container, longName=key, dataType="string") - cmds.setAttr(container + "." + key, value, type="string") + cmds.addAttr(container, longName=key, dataType="string") + cmds.setAttr(container + "." + key, value, type="string") main_container = cmds.ls(AVALON_CONTAINERS, type="objectSet") if not main_container: From e48eb3ba47f9afc41ce3c4c06b6e7ffb36746f89 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 11:15:01 +0200 Subject: [PATCH 0408/2550] remove create_shelf function since it is no longer needed --- openpype/hosts/houdini/api/lib.py | 32 ------------------------------- 1 file changed, 32 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 55832abeb3..c8a7f92bb9 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -460,35 +460,3 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) - - -def create_shelf(): - hou.shelves.beginChangeBlock() - - custom_shelf = hou.shelves.newShelf( - file_path='', - name="custom_shelf", - label="Custom Shelf" - ) - - new_tool = hou.shelves.newTool( - file_path='', - name='new_tool', - label='New Tool', - script='', - language=hou.scriptLanguage.Python, - icon='', - help='This is a new tool' - ) - - if new_tool not in custom_shelf.tools(): - custom_shelf.setTools(list(custom_shelf.tools()) + [new_tool]) - - shelf_set = [ - shelf for shelf in hou.shelves.shelfSets().values() - if shelf.label() == "Create and Refine" - ][0] - - shelf_set.setShelves(shelf_set.shelves() + (custom_shelf,)) - - hou.shelves.endChangeBlock() From c9f60bb848b81f9b4c095281cfae3c3d27e8d652 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 11:19:50 +0200 Subject: [PATCH 0409/2550] remove invalid default values --- .../defaults/project_settings/houdini.json | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 78e0d595cf..43d2ad132a 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -5,21 +5,8 @@ "shelf_set_source_path": { "windows": "", "darwin": "", - "linux": "/path/to/your/shelf_set_file" - }, - "shelf_definition": [ - { - "shelf_name": "OpenPype Shelf", - "tools_list": [ - { - "label": "OpenPype Tool", - "script": "/path/to/your/tool_script", - "icon": "/path/to/your/icon", - "help": "Help message for your tool" - } - ] - } - ] + "linux": "" + } } ], "create": { From 345a476159ed6d0f684f89316a488bba52347d93 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 11:55:09 +0200 Subject: [PATCH 0410/2550] prepared settings to be able change task status on creation --- .../defaults/project_settings/ftrack.json | 3 ++ .../schema_project_ftrack.json | 38 +++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 3e86581a03..9847e58cfa 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -434,6 +434,9 @@ "enabled": false, "custom_attribute_keys": [] }, + "IntegrateHierarchyToFtrack": { + "create_task_status_profiles": [] + }, "IntegrateFtrackNote": { "enabled": true, "note_template": "{intent}: {comment}", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index c06bec0f58..3f472c6c6a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -841,6 +841,44 @@ } ] }, + { + "type": "dict", + "key": "IntegrateHierarchyToFtrack", + "label": "Integrate Hierarchy to ftrack", + "is_group": true, + "collapsible": true, + "children": [ + { + "type": "label", + "label": "Set task status on new task creation. Ftrack's default status is used otherwise." + }, + { + "type": "list", + "key": "create_task_status_profiles", + "object_type": { + "type": "dict", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "text", + "key": "status_name", + "label": "Status name" + } + ] + } + } + ] + }, { "type": "dict", "collapsible": true, From 63e6088391b59f575c5406d4183e041d4f38c724 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 16 Aug 2022 11:55:23 +0200 Subject: [PATCH 0411/2550] Refactor `.remove` to `.discard` to fix bug if version wasn't in version_ids --- openpype/tools/loader/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 48a23e053a..2d8b4b048d 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -586,7 +586,7 @@ class SubsetWidget(QtWidgets.QWidget): # keep only version ids without representation with that name version_id = repre_doc["parent"] - version_ids.remove(version_id) + version_ids.discard(version_id) if version_ids: # report versions that didn't have valid representation From 088a2d2003e111769084821ac1e2c2ece3cf2e35 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 11:55:23 +0200 Subject: [PATCH 0412/2550] use task status profiles to change task status id on creation --- .../publish/integrate_hierarchy_ftrack.py | 62 ++++++++++++++----- 1 file changed, 45 insertions(+), 17 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index b8855ee2bd..8d39baa8d7 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -1,9 +1,12 @@ import sys import collections import six -import pyblish.api from copy import deepcopy + +import pyblish.api + from openpype.client import get_asset_by_id +from openpype.lib import filter_profiles # Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` @@ -73,6 +76,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): "traypublisher" ] optional = False + create_task_status_profiles = [] def process(self, context): self.context = context @@ -82,14 +86,16 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - self.session = self.context.data["ftrackSession"] + session = self.context.data["ftrackSession"] project_name = self.context.data["projectEntity"]["name"] query = 'Project where full_name is "{}"'.format(project_name) - project = self.session.query(query).one() - auto_sync_state = project[ - "custom_attributes"][CUST_ATTR_AUTO_SYNC] + project = session.query(query).one() + auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC] - self.ft_project = None + self.session = session + self.ft_project = project + self.task_types = self.get_all_task_types(project) + self.task_statuses = self.get_task_statuses(project) # disable termporarily ftrack project's autosyncing if auto_sync_state: @@ -121,10 +127,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.log.debug(entity_type) if entity_type.lower() == 'project': - query = 'Project where full_name is "{}"'.format(entity_name) - entity = self.session.query(query).one() - self.ft_project = entity - self.task_types = self.get_all_task_types(entity) + entity = self.ft_project elif self.ft_project is None or parent is None: raise AssertionError( @@ -217,13 +220,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): task_type=task_type, parent=entity ) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) # Incoming links. self.create_links(project_name, entity_data, entity) @@ -303,7 +299,37 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return tasks + def get_task_statuses(self, project_entity): + project_schema = project_entity["project_schema"] + task_workflow_statuses = project_schema["_task_workflow"]["statuses"] + return { + status["id"]: status + for status in task_workflow_statuses + } + def create_task(self, name, task_type, parent): + filter_data = { + "task_names": name, + "task_types": task_type + } + profile = filter_profiles( + self.create_task_status_profiles, + filter_data + ) + status_id = None + if profile: + status_name = profile["status_name"] + status_name_low = status_name.lower() + for _status_id, status in self.task_statuses.items(): + if status["name"].lower() == status_name_low: + status_id = _status_id + break + + if status_id is None: + self.log.warning( + "Task status \"{}\" was not found".format(status_name) + ) + task = self.session.create('Task', { 'name': name, 'parent': parent @@ -312,6 +338,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.log.info(task_type) self.log.info(self.task_types) task['type'] = self.task_types[task_type] + if status_id is not None: + task["status_id"] = status_id try: self.session.commit() From c9f3340f3c750c0b64790607a254d43e76dfb134 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 16 Aug 2022 12:18:53 +0200 Subject: [PATCH 0413/2550] OP-3723 - resize saved images in PS for ffmpeg Ffmpeg cannot handle pictures higher than 16384x16384. Uses PIL to resize to max size(with aspect ratio). In the future this plugin should be refactored (to use general ExtractThumbnail and ExtractReview). --- .../plugins/publish/extract_review.py | 55 +++++++++++++++++-- 1 file changed, 49 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index d076610ead..7f78a46527 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -1,5 +1,6 @@ import os import shutil +from PIL import Image import openpype.api import openpype.lib @@ -8,10 +9,17 @@ from openpype.hosts.photoshop import api as photoshop class ExtractReview(openpype.api.Extractor): """ - Produce a flattened or sequence image file from all 'image' instances. + Produce a flattened or sequence image files from all 'image' instances. If no 'image' instance is created, it produces flattened image from all visible layers. + + It creates review, thumbnail and mov representations. + + 'review' family could be used in other steps as a reference, as it + contains flattened image by default. (Eg. artist could load this + review as a single item and see full image. In most cases 'image' + family is separated by layers to better usage in animation or comp.) """ label = "Extract Review" @@ -49,7 +57,7 @@ class ExtractReview(openpype.api.Extractor): "stagingDir": staging_dir, "tags": self.jpg_options['tags'], }) - + processed_img_names = img_list else: self.log.info("Extract layers to flatten image.") img_list = self._saves_flattened_layers(staging_dir, layers) @@ -57,26 +65,33 @@ class ExtractReview(openpype.api.Extractor): instance.data["representations"].append({ "name": "jpg", "ext": "jpg", - "files": img_list, + "files": img_list, # cannot be [] for single frame "stagingDir": staging_dir, "tags": self.jpg_options['tags'] }) + processed_img_names = [img_list] ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") instance.data["stagingDir"] = staging_dir - # Generate thumbnail. + source_files_pattern = os.path.join(staging_dir, + self.output_seq_filename) + source_files_pattern = self._check_and_resize(processed_img_names, + source_files_pattern, + staging_dir) + # Generate thumbnail thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg") self.log.info(f"Generate thumbnail {thumbnail_path}") args = [ ffmpeg_path, "-y", - "-i", os.path.join(staging_dir, self.output_seq_filename), + "-i", source_files_pattern, "-vf", "scale=300:-1", "-vframes", "1", thumbnail_path ] + self.log.debug("thumbnail args:: {}".format(args)) output = openpype.lib.run_subprocess(args) instance.data["representations"].append({ @@ -94,11 +109,12 @@ class ExtractReview(openpype.api.Extractor): args = [ ffmpeg_path, "-y", - "-i", os.path.join(staging_dir, self.output_seq_filename), + "-i", source_files_pattern, "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", "-vframes", str(img_number), mov_path ] + self.log.debug("mov args:: {}".format(args)) output = openpype.lib.run_subprocess(args) self.log.debug(output) instance.data["representations"].append({ @@ -120,6 +136,33 @@ class ExtractReview(openpype.api.Extractor): self.log.info(f"Extracted {instance} to {staging_dir}") + def _check_and_resize(self, processed_img_names, source_files_pattern, + staging_dir): + """Check if saved image could be used in ffmpeg. + + Ffmpeg has max size 16384x16384. Saved image(s) must be resized to be + used as a source for thumbnail or review mov. + """ + max_ffmpeg_size = 16384 + first_url = os.path.join(staging_dir, processed_img_names[0]) + with Image.open(first_url) as im: + width, height = im.size + + if width > max_ffmpeg_size or height > max_ffmpeg_size: + resized_dir = os.path.join(staging_dir, "resized") + os.mkdir(resized_dir) + source_files_pattern = os.path.join(resized_dir, + self.output_seq_filename) + for file_name in processed_img_names: + source_url = os.path.join(staging_dir, file_name) + with Image.open(source_url) as res_img: + # 'thumbnail' automatically keeps aspect ratio + res_img.thumbnail((max_ffmpeg_size, max_ffmpeg_size), + Image.ANTIALIAS) + res_img.save(os.path.join(resized_dir, file_name)) + + return source_files_pattern + def _get_image_path_from_instances(self, instance): img_list = [] From 9c7bcb84aa42a2f2c083c856ae421a9d264f32dc Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 12:25:05 +0200 Subject: [PATCH 0414/2550] fix typo and tool creation --- openpype/hosts/houdini/api/shelves.py | 33 +++++++++++++++++---------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index bb92aa828e..d9a3a34da6 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -10,7 +10,7 @@ log = logging.getLogger("openpype.hosts.houdini") def generate_shelves(): - """This function generates complete shelves from shef set to tools + """This function generates complete shelves from shelf set to tools in Houdini from openpype project settings houdini shelf definition. Raises: @@ -23,8 +23,8 @@ def generate_shelves(): shelves_set_config = project_settings["houdini"]["shelves"] if not shelves_set_config: - log.warning( - "SHELF WARNGING: No custom shelves found in project settings." + log.info( + "SHELF INFO: No custom shelves found in project settings." ) return @@ -45,7 +45,7 @@ def generate_shelves(): shelf_set_name = shelf_set_config.get('shelf_set_name') if not shelf_set_name: log.warning( - "SHELF WARNGING: No name found in shelf set definition." + "SHELF WARNING: No name found in shelf set definition." ) return @@ -54,8 +54,8 @@ def generate_shelves(): shelves_definition = shelf_set_config.get('shelf_definition') if not shelves_definition: - log.warning( - "SHELF WARNING: \ + log.info( + "SHELF INFO: \ No shelf definition found for shelf set named '{}'".format(shelf_set_name) ) return @@ -64,26 +64,34 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf_name = shelf_definition.get('shelf_name') if not shelf_name: log.warning( - "SHELF WARNGING: No name found in shelf definition." + "SHELF WARNING: No name found in shelf definition." ) return shelf = get_or_create_shelf(shelf_name) + if not shelf_definition.get('tools_list'): + log.warning("TOOLS INFO: No tool definition found for \ +shelf named {}".format(shelf_name)) + return + + mandatory_attributes = ['name', 'script'] for tool_definition in shelf_definition.get('tools_list'): # We verify that the name and script attibutes of the tool # are set - mandatory_attributes = ['name', 'script'] if not all( [v for k, v in tool_definition.items() if k in mandatory_attributes] ): log.warning("TOOLS ERROR: You need to specify at least \ the name and the script path of the tool.") - return + continue tool = get_or_create_tool(tool_definition, shelf) + if not tool: + return + # Add the tool to the shelf if not already in it if tool not in shelf.tools(): shelf.setTools(list(shelf.tools()) + [tool]) @@ -105,12 +113,12 @@ def get_or_create_shelf_set(shelf_set_label): """ all_shelves_sets = hou.shelves.shelfSets().values() - shelf_set = [ + shelf_sets = [ shelf for shelf in all_shelves_sets if shelf.label() == shelf_set_label ] - if shelf_set: - return shelf_set[0] + if shelf_sets: + return shelf_sets[0] shelf_set_name = shelf_set_label.replace(' ', '_').lower() new_shelf_set = hou.shelves.newShelfSet( @@ -170,6 +178,7 @@ def get_or_create_tool(tool_definition, shelf): return existing_tool[0] tool_name = tool_label.replace(' ', '_').lower() + log.warning(tool_definition) if not os.path.exists(tool_definition['script']): log.warning( From a556f2393276048e4eaeeeeb11b82d5df9fbab59 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 16 Aug 2022 12:33:39 +0200 Subject: [PATCH 0415/2550] Force `str` type for the values Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/maya/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index d4067ea659..84963c55a4 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -350,7 +350,7 @@ def containerise(name, for key, value in data: cmds.addAttr(container, longName=key, dataType="string") - cmds.setAttr(container + "." + key, value, type="string") + cmds.setAttr(container + "." + key, str(value), type="string") main_container = cmds.ls(AVALON_CONTAINERS, type="objectSet") if not main_container: From 64ce18b0e49b4fdefafa93ddc2508a3ca424e9b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 16 Aug 2022 12:34:29 +0200 Subject: [PATCH 0416/2550] Remove redundant `str` conversion since that's now done after --- openpype/hosts/maya/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 84963c55a4..2ee8fb8e5d 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -344,7 +344,7 @@ def containerise(name, ("id", AVALON_CONTAINER_ID), ("name", name), ("namespace", namespace), - ("loader", str(loader)), + ("loader", loader), ("representation", context["representation"]["_id"]), ] From 538513304e9b3ebcd433765881a620a0e00bc48c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 16 Aug 2022 13:20:25 +0200 Subject: [PATCH 0417/2550] OP-3682 - refactored OS to FILESYSTEM --- distribution/addon_distribution.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index a0c48923df..3cc2374b93 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -11,7 +11,7 @@ from distribution.file_handler import RemoteFileHandler class UrlType(Enum): HTTP = "http" GIT = "git" - OS = "os" + FILESYSTEM = "filesystem" @attr.s @@ -122,7 +122,7 @@ def get_addons_info(server_endpoint): # **{"name": "openpype_slack", # "version": "1.0.0", # "addon_url": "c:/projects/openpype_slack_1.0.0.zip", - # "type": UrlType.OS, + # "type": UrlType.FILESYSTEM, # "hash": "4f6b8568eb9dd6f510fd7c4dcb676788"}) # noqa # # http_addon = AddonInfo( @@ -189,7 +189,7 @@ def cli(args): addon_folder = "c:/projects/testing_addons/pypeclub/openpype/addons" downloader_factory = AddonDownloader() - downloader_factory.register_format(UrlType.OS, OSAddonDownloader) + downloader_factory.register_format(UrlType.FILESYSTEM, OSAddonDownloader) downloader_factory.register_format(UrlType.HTTP, HTTPAddonDownloader) test_endpoint = "https://34e99f0f-f987-4715-95e6-d2d88caa7586.mock.pstmn.io/get_addons_info" # noqa From 5e9d4f7603f887e3eff156e38943c4e46cd4acb6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 13:55:41 +0200 Subject: [PATCH 0418/2550] change label of 'IntegrateFtrackInstance' in settings --- .../entities/schemas/projects_schema/schema_project_ftrack.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index c06bec0f58..6aa8ea9c7d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -948,7 +948,7 @@ { "type": "dict", "key": "IntegrateFtrackInstance", - "label": "IntegrateFtrackInstance", + "label": "Integrate Ftrack Instance", "is_group": true, "children": [ { From a8bc744185e12d3ff65f5760ab9ff98b01069482 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 13:56:40 +0200 Subject: [PATCH 0419/2550] store ftrack task to instance on creation --- .../plugins/publish/integrate_ftrack_api.py | 3 +- .../publish/integrate_hierarchy_ftrack.py | 31 ++++++++++++++----- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 20a69e060c..159e60024d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -13,6 +13,7 @@ Provides: import os import sys import collections + import six import pyblish.api import clique @@ -21,7 +22,7 @@ import clique class IntegrateFtrackApi(pyblish.api.InstancePlugin): """ Commit components to server. """ - order = pyblish.api.IntegratorOrder+0.499 + order = pyblish.api.IntegratorOrder + 0.499 label = "Integrate Ftrack Api" families = ["ftrack"] diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index b8855ee2bd..c520c6f2cf 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -153,8 +153,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # CUSTOM ATTRIBUTES custom_attributes = entity_data.get('custom_attributes', []) instances = [ - i for i in self.context if i.data['asset'] in entity['name'] + instance + for instance in self.context + if instance.data.get("asset") == entity["name"] ] + + for instance in instances: + instance.data["ftrackEntity"] = entity + for key in custom_attributes: hier_attr = hier_attr_by_key.get(key) # Use simple method if key is not hierarchical @@ -184,9 +190,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): ) ) - for instance in instances: - instance.data['ftrackEntity'] = entity - try: self.session.commit() except Exception: @@ -196,13 +199,22 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # TASKS + instances_by_task_name = collections.defaultdict(list) + for instance in instances: + task_name = instance.data.get("task") + if task_name: + instances_by_task_name[task_name].append(instance) + tasks = entity_data.get('tasks', []) existing_tasks = [] tasks_to_create = [] for child in entity['children']: - if child.entity_type.lower() == 'task': - existing_tasks.append(child['name'].lower()) - # existing_tasks.append(child['type']['name']) + if child.entity_type.lower() == "task": + task_name_low = child["name"].lower() + existing_tasks.append(task_name_low) + + for instance in instances_by_task_name[task_name_low]: + instance["ftrackTask"] = child for task_name in tasks: task_type = tasks[task_name]["type"] @@ -212,7 +224,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): tasks_to_create.append((task_name, task_type)) for task_name, task_type in tasks_to_create: - self.create_task( + task_entity = self.create_task( name=task_name, task_type=task_type, parent=entity @@ -225,6 +237,9 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.session._configure_locations() six.reraise(tp, value, tb) + for instance in instances_by_task_name[task_name.lower()]: + instance.data["ftrackTask"] = task_entity + # Incoming links. self.create_links(project_name, entity_data, entity) try: From f5d7634e007d4e9a27f76b7abb693daa7b9ba055 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 14:15:08 +0200 Subject: [PATCH 0420/2550] change tools mandatory attributes to set type and iterate only on those attributes --- openpype/hosts/houdini/api/shelves.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index d9a3a34da6..498fffc7cd 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -75,13 +75,12 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf named {}".format(shelf_name)) return - mandatory_attributes = ['name', 'script'] + mandatory_attributes = {'name', 'script'} for tool_definition in shelf_definition.get('tools_list'): # We verify that the name and script attibutes of the tool # are set if not all( - [v for k, v in tool_definition.items() if - k in mandatory_attributes] + tool_definition[key] for key in mandatory_attributes ): log.warning("TOOLS ERROR: You need to specify at least \ the name and the script path of the tool.") From 46726e5afedc88ef22b31f515e70b08308643acd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 16 Aug 2022 14:31:18 +0200 Subject: [PATCH 0421/2550] OP-3713 - changed type to tri-state Customer wants to have more granularity, they want to create flatten 'image', but not separate 'image' per layer. --- .../settings/defaults/project_settings/photoshop.json | 2 +- .../projects_schema/schema_project_photoshop.json | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index d9b7a8083f..b08e73f1ee 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -8,7 +8,7 @@ }, "publish": { "CollectColorCodedInstances": { - "create_flatten_image": false, + "create_flatten_image": "no", "flatten_subset_template": "", "color_code_mapping": [] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index badf94229b..6935ec8e5e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -45,9 +45,15 @@ "label": "Set color for publishable layers, set its resulting family and template for subset name. \nCan create flatten image from published instances.(Applicable only for remote publishing!)" }, { - "type": "boolean", "key": "create_flatten_image", - "label": "Create flatten image" + "label": "Create flatten image", + "type": "enum", + "multiselection": false, + "enum_items": [ + { "yes": "Yes" }, + { "no": "No" }, + { "only": "Only flatten" } + ] }, { "type": "text", From 9bfc1447b7945cd11d11414df3ffad42f6014292 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 16 Aug 2022 14:32:47 +0200 Subject: [PATCH 0422/2550] OP-3713 - implement tri-state logic for create_flatten_image Customer wants to have more granularity, they want to create flatten 'image', but not separate 'image' per layer. --- .../publish/collect_color_coded_instances.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index 71bd2cd854..9adc16d0fd 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -32,7 +32,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): # TODO check if could be set globally, probably doesn't make sense when # flattened template cannot subset_template_name = "" - create_flatten_image = False + create_flatten_image = "no" # probably not possible to configure this globally flatten_subset_template = "" @@ -98,13 +98,16 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): "Subset {} already created, skipping.".format(subset)) continue - instance = self._create_instance(context, layer, resolved_family, - asset_name, subset, task_name) + if self.create_flatten_image != "only": + instance = self._create_instance(context, layer, + resolved_family, + asset_name, subset, task_name) + created_instances.append(instance) + existing_subset_names.append(subset) publishable_layers.append(layer) - created_instances.append(instance) - if self.create_flatten_image and publishable_layers: + if self.create_flatten_image != "no" and publishable_layers: self.log.debug("create_flatten_image") if not self.flatten_subset_template: self.log.warning("No template for flatten image") @@ -116,7 +119,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): first_layer = publishable_layers[0] # dummy layer first_layer.name = subset - family = created_instances[0].data["family"] # inherit family + family = resolved_family # inherit family instance = self._create_instance(context, first_layer, family, asset_name, subset, task_name) From b679a46b9c3b9cbabbf056bf530d26ae6bdb1309 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 14:32:55 +0200 Subject: [PATCH 0423/2550] changed default value of 'asset_versions_status_profiles' to match settings --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index a1e5922730..7caf17c18d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -42,7 +42,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "reference": "reference" } keep_first_subset_name_for_review = True - asset_versions_status_profiles = {} + asset_versions_status_profiles = [] def process(self, instance): self.log.debug("instance {}".format(instance)) From fce758d3fa8bb307ca3d9501ec772617a7a0987e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 14:33:43 +0200 Subject: [PATCH 0424/2550] removed unused settings 'first_version_status' in ftrack --- .../defaults/project_settings/ftrack.json | 4 ---- .../projects_schema/schema_project_ftrack.json | 18 ------------------ 2 files changed, 22 deletions(-) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 3e86581a03..98d1587a35 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -96,10 +96,6 @@ "mapping": {}, "asset_types_to_skip": [] }, - "first_version_status": { - "enabled": true, - "status": "" - }, "next_task_update": { "enabled": true, "mapping": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 6aa8ea9c7d..b8a1f011a3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -299,24 +299,6 @@ } ] }, - { - "type": "dict", - "key": "first_version_status", - "label": "Set status on first created version", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "text", - "key": "status", - "label": "Status" - } - ] - }, { "type": "dict", "key": "next_task_update", From e0abb7245c231d2cabec782b1172b9257fd096da Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 14:34:15 +0200 Subject: [PATCH 0425/2550] fix type and docstring style to match OpenPype's --- openpype/hosts/houdini/api/pipeline.py | 1 - openpype/hosts/houdini/api/shelves.py | 5 ++--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f809f0ce56..d7a8135d86 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -310,7 +310,6 @@ def _set_context_settings(): fps resolution renderer - shelves Returns: None diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 498fffc7cd..725d162980 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -105,7 +105,7 @@ def get_or_create_shelf_set(shelf_set_label): creates a new shelf set. Arguments: - shelf_set_label {str} -- The label of the shelf set + shelf_set_label (str) -- The label of the shelf set Returns: hou.ShelfSet -- The shelf set existing or the new one @@ -153,7 +153,7 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsist and update it. If not, creates + """This function verifies if the tool exsists and updates it. If not, creates a new one. Arguments: @@ -177,7 +177,6 @@ def get_or_create_tool(tool_definition, shelf): return existing_tool[0] tool_name = tool_label.replace(' ', '_').lower() - log.warning(tool_definition) if not os.path.exists(tool_definition['script']): log.warning( From cfb14d32b50920d06fbfc6d1f74da2798910b3da Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 15:42:25 +0200 Subject: [PATCH 0426/2550] Show dialog if installed version is not compatible in UI mode --- start.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/start.py b/start.py index 084eb7451a..d1198a85e4 100644 --- a/start.py +++ b/start.py @@ -748,12 +748,21 @@ def _find_frozen_openpype(use_version: str = None, _initialize_environment(openpype_version) return version_path + in_headless_mode = os.getenv("OPENPYPE_HEADLESS_MODE") == "1" if not installed_version.is_compatible(openpype_version): - raise OpenPypeVersionIncompatible( - ( - f"Latest version found {openpype_version} is not " - f"compatible with currently running {installed_version}" + message = "Version {} is not compatible with installed version {}." + # Show UI to user + if not in_headless_mode: + igniter.show_message_dialog( + "Incompatible OpenPype installation", + message.format( + "{}".format(openpype_version), + "{}".format(installed_version) + ) ) + # Raise incompatible error + raise OpenPypeVersionIncompatible( + message.format(openpype_version, installed_version) ) # test if latest detected is installed (in user data dir) @@ -768,7 +777,7 @@ def _find_frozen_openpype(use_version: str = None, if not is_inside: # install latest version to user data dir - if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": + if in_headless_mode: version_path = bootstrap.install_version( openpype_version, force=True ) From 7d3be59f59757c37988163b5332621d743e13c67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 16 Aug 2022 16:02:05 +0200 Subject: [PATCH 0427/2550] :sparkles: collect workfile --- .../plugins/publish/collect_current_file.py | 50 +++++++++++++++++-- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index c0b987ebbc..1383c274a2 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -1,27 +1,28 @@ import os import hou +from openpype.pipeline import legacy_io import pyblish.api class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - order = pyblish.api.CollectorOrder - 0.5 + order = pyblish.api.CollectorOrder - 0.01 label = "Houdini Current File" hosts = ["houdini"] def process(self, context): """Inject the current working file""" - filepath = hou.hipFile.path() - if not os.path.exists(filepath): + current_file = hou.hipFile.path() + if not os.path.exists(current_file): # By default Houdini will even point a new scene to a path. # However if the file is not saved at all and does not exist, # we assume the user never set it. filepath = "" - elif os.path.basename(filepath) == "untitled.hip": + elif os.path.basename(current_file) == "untitled.hip": # Due to even a new file being called 'untitled.hip' we are unable # to confirm the current scene was ever saved because the file # could have existed already. We will allow it if the file exists, @@ -33,4 +34,43 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): "saved correctly." ) - context.data["currentFile"] = filepath + context.data["currentFile"] = current_file + + folder, file = os.path.split(current_file) + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) From 129a38ebc0204fc9d6777a1b876d1f882fc929f4 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 14:34:15 +0200 Subject: [PATCH 0428/2550] fix type and docstring style to match OpenPype's --- openpype/hosts/houdini/api/pipeline.py | 1 - openpype/hosts/houdini/api/shelves.py | 17 ++++++++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f809f0ce56..d7a8135d86 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -310,7 +310,6 @@ def _set_context_settings(): fps resolution renderer - shelves Returns: None diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 498fffc7cd..ba3fcc2af9 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -105,10 +105,10 @@ def get_or_create_shelf_set(shelf_set_label): creates a new shelf set. Arguments: - shelf_set_label {str} -- The label of the shelf set + shelf_set_label (str): The label of the shelf set Returns: - hou.ShelfSet -- The shelf set existing or the new one + hou.ShelfSet: The shelf set existing or the new one """ all_shelves_sets = hou.shelves.shelfSets().values() @@ -132,10 +132,10 @@ def get_or_create_shelf(shelf_label): a new shelf. Arguments: - shelf_label {str} -- The label of the shelf + shelf_label (str): The label of the shelf Returns: - hou.Shelf -- The shelf existing or the new one + hou.Shelf: The shelf existing or the new one """ all_shelves = hou.shelves.shelves().values() @@ -153,15 +153,15 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsist and update it. If not, creates + """This function verifies if the tool exsists and updates it. If not, creates a new one. Arguments: - tool_definition {dict} -- Dict with label, script, icon and help - shelf {hou.Shelf} -- The parent shelf of the tool + tool_definition (dict): Dict with label, script, icon and help + shelf (hou.Shelf): The parent shelf of the tool Returns: - hou.Tool -- The tool updated or the new one + hou.Tool: The tool updated or the new one """ existing_tools = shelf.tools() tool_label = tool_definition.get('label') @@ -177,7 +177,6 @@ def get_or_create_tool(tool_definition, shelf): return existing_tool[0] tool_name = tool_label.replace(' ', '_').lower() - log.warning(tool_definition) if not os.path.exists(tool_definition['script']): log.warning( From 141b275fc614aa1456c97bbe16706497524cb0f0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 16 Aug 2022 17:02:37 +0200 Subject: [PATCH 0429/2550] OP-3713 - fix missing family Resulted in failure in integrate --- .../plugins/publish/collect_color_coded_instances.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index 9adc16d0fd..7d78140c5b 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -62,6 +62,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): publishable_layers = [] created_instances = [] + family_from_settings = None for layer in layers: self.log.debug("Layer:: {}".format(layer)) if layer.parents: @@ -80,6 +81,9 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): self.log.debug("!!! Not found family or template, skip") continue + if not family_from_settings: + family_from_settings = resolved_family + fill_pairs = { "variant": variant, "family": resolved_family, @@ -119,7 +123,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): first_layer = publishable_layers[0] # dummy layer first_layer.name = subset - family = resolved_family # inherit family + family = family_from_settings # inherit family instance = self._create_instance(context, first_layer, family, asset_name, subset, task_name) From 45368a7ba83f0bdb59e9b4e591e6a1fee0736fff Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 16 Aug 2022 17:15:09 +0200 Subject: [PATCH 0430/2550] OP-3713 - added more documentation --- .../publish/collect_color_coded_instances.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index 7d78140c5b..f93ba51574 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -9,14 +9,22 @@ from openpype.settings import get_project_settings class CollectColorCodedInstances(pyblish.api.ContextPlugin): - """Creates instances for configured color code of a layer. + """Creates instances for layers marked by configurable color. Used in remote publishing when artists marks publishable layers by color- - coding. + coding. Top level layers (group) must be marked by specific color to be + published as an instance of 'image' family. Can add group for all publishable layers to allow creation of flattened image. (Cannot contain special background layer as it cannot be grouped!) + Based on value `create_flatten_image` from Settings: + - "yes": create flattened 'image' subset of all publishable layers + create + 'image' subset per publishable layer + - "only": create ONLY flattened 'image' subset of all publishable layers + - "no": do not create flattened 'image' subset at all, + only separate subsets per marked layer. + Identifier: id (str): "pyblish.avalon.instance" """ @@ -33,7 +41,6 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): # flattened template cannot subset_template_name = "" create_flatten_image = "no" - # probably not possible to configure this globally flatten_subset_template = "" def process(self, context): From 74934a51b9ce7d581473426ef9206fad1ab4b486 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Aug 2022 17:21:37 +0200 Subject: [PATCH 0431/2550] Nuke: removing audio inclusion from precollect write --- .../nuke/plugins/publish/precollect_writes.py | 28 ------------------- 1 file changed, 28 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index e37cc8a80a..17c4bc30cf 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -201,34 +201,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): if not instance.data["review"]: instance.data["useSequenceForReview"] = False - project_name = legacy_io.active_project() - asset_name = instance.data["asset"] - # * Add audio to instance if exists. - # Find latest versions document - last_version_doc = get_last_version_by_subset_name( - project_name, "audioMain", asset_name=asset_name, fields=["_id"] - ) - - repre_doc = None - if last_version_doc: - # Try to find it's representation (Expected there is only one) - repre_docs = list(get_representations( - project_name, version_ids=[last_version_doc["_id"]] - )) - if not repre_docs: - self.log.warning( - "Version document does not contain any representations" - ) - else: - repre_doc = repre_docs[0] - - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.debug("instance.data: {}".format(pformat(instance.data))) def is_prerender(self, families): From 737edadfd5166eb20197d818eefe2a1a22041b0d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Aug 2022 17:23:42 +0200 Subject: [PATCH 0432/2550] global: preparation for settings attribute --- openpype/plugins/publish/collect_audio.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 4ba47f739d..7e3b42f375 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -36,6 +36,8 @@ class CollectAudio(pyblish.api.InstancePlugin): "unreal" ] + audio_subset_name = "audioMain" + def process(self, instance): # * Add audio to instance if exists. self.log.info('Collecting Audio Data') @@ -45,7 +47,10 @@ class CollectAudio(pyblish.api.InstancePlugin): # Find latest versions document last_version_doc = get_last_version_by_subset_name( - project_name, "audioMain", asset_name=asset_name, fields=["_id"] + project_name, + self.audio_subset_name, + asset_name=asset_name, + fields=["_id"] ) repre_doc = None From 266975d6942156e71565a3847e4e62a9490dee71 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Aug 2022 17:36:39 +0200 Subject: [PATCH 0433/2550] settings: adding collect audio plugin --- .../defaults/project_settings/global.json | 4 ++++ .../schemas/schema_global_publish.json | 21 +++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 0ff9363ba7..9258343440 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -3,6 +3,10 @@ "CollectAnatomyInstanceData": { "follow_workfile_version": false }, + "CollectAudio": { + "enabled": true, + "audio_subset_name": "audioMain" + }, "CollectSceneVersion": { "hosts": [ "aftereffects", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index e1aa230b49..2efee92832 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -18,6 +18,27 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CollectAudio", + "label": "Collect Audio", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "audio_subset_name", + "label": "Name of audio variant", + "type": "text", + "placeholder": "audioMain" + } + ] + }, { "type": "dict", "collapsible": true, From e0c7ba861733e0cf4ec9087fdadcfe6b0d729aea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 17:53:16 +0200 Subject: [PATCH 0434/2550] added new plugin which change task status for instances if they are rendered on farm --- .../publish/integrate_ftrack_farm_status.py | 129 ++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py new file mode 100644 index 0000000000..ecf258a870 --- /dev/null +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -0,0 +1,129 @@ +import pyblish.api +from openpype.lib import profiles_filtering + + +class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): + """Change task status when should be published on farm. + + Instance which has set "farm" key in data to 'True' is considered as will + be rendered on farm thus it's status should be changed. + """ + + order = pyblish.api.IntegratorOrder + 0.48 + label = "Integrate Ftrack Component" + families = ["ftrack"] + + farm_status_profiles = [] + + def process(self, context): + # Quick end + if not self.farm_status_profiles: + project_name = context.data["projectName"] + self.log.info(( + "Status profiles are not filled for project \"{}\". Skipping" + ).format(project_name)) + return + + filtered_instances = self.filter_instances(context) + instances_with_status_names = self.get_instances_with_statuse_names( + context, filtered_instances + ) + if instances_with_status_names: + self.fill_statuses(context, instances_with_status_names) + + def filter_instances(self, context): + filtered_instances = [] + for instance in context: + subset_name = instance.data["subset"] + msg_start = "SKipping instance {}.".format(subset_name) + if not instance.data.get("farm"): + self.log.debug( + "{} Won't be rendered on farm.".format(msg_start) + ) + continue + + task_entity = instance.data.get("ftrackTask") + if not task_entity: + self.log.debug( + "{} Does not have filled task".format(msg_start) + ) + continue + + filtered_instances.append(instance) + return filtered_instances + + def get_instances_with_statuse_names(self, context, instances): + instances_with_status_names = [] + for instance in instances: + family = instance.data["family"] + subset_name = instance.data["subset"] + task_entity = instance.data["ftrackTask"] + host_name = context.data["hostName"] + task_name = task_entity["name"] + task_type = task_entity["type"]["name"] + status_profile = profiles_filtering( + self.farm_status_profiles, + { + "hosts": host_name, + "task_types": task_type, + "task_names": task_name, + "families": family, + "subsets": subset_name, + }, + logger=self.log + ) + if not status_profile: + # There already is log in 'profiles_filtering' + continue + + status_name = status_profile["status_name"] + if status_name: + instances_with_status_names.append((instance, status_name)) + return instances_with_status_names + + def fill_statuses(self, context, instances_with_status_names): + # Prepare available task statuses on the project + project_name = context.data["projectName"] + session = context.data["ftrackSession"] + project_entity = session.query(( + "select project_schema from Project where full_name is \"{}\"" + ).format(project_name)).one() + project_schema = project_entity["project_schema"] + task_workflow_statuses = project_schema["_task_workflow"]["statuses"] + + # Keep track if anything has changed + status_changed = False + found_status_id_by_status_name = {} + for item in instances_with_status_names: + instance, status_name = item + + status_name_low = status_name.lower() + status_id = found_status_id_by_status_name.get(status_name_low) + + if status_id is None: + # Skip if status name was already tried to be found + if status_name_low in found_status_id_by_status_name: + continue + + for status in task_workflow_statuses: + if status["name"].lower() == status_name_low: + status_id = status["id"] + break + + # Store the result to be reused in following instances + found_status_id_by_status_name[status_name_low] = status_id + + if status_id is None: + self.log.warning(( + "Status \"{}\" is not available on project \"{}\"" + ).format(status_name, project_name)) + continue + + # Change task status id + task_entity = instance.data["ftrackTask"] + if status_id != task_entity["status_id"]: + task_entity["status_id"] = status_id + status_changed = True + + if status_changed: + session.commit() From 41dd9e84f574663aef840596fa4e4c8a37a6a49b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 16 Aug 2022 17:53:27 +0200 Subject: [PATCH 0435/2550] added settings schema for new plugin --- .../defaults/project_settings/ftrack.json | 3 + .../schema_project_ftrack.json | 60 +++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 3e86581a03..610c85d232 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -489,6 +489,9 @@ }, "keep_first_subset_name_for_review": true, "asset_versions_status_profiles": [] + }, + "IntegrateFtrackFarmStatus": { + "farm_status_profiles": [] } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index c06bec0f58..a821b1de76 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -1003,6 +1003,66 @@ } } ] + }, + { + "type": "dict", + "key": "IntegrateFtrackFarmStatus", + "label": "Integrate Ftrack Farm Status", + "children": [ + { + "type": "label", + "label": "Change status of task when it's subset is rendered on farm" + }, + { + "type": "list", + "collapsible": true, + "key": "farm_status_profiles", + "label": "Farm status profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "hosts", + "label": "Host names", + "type": "hosts-enum", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "key": "subsets", + "label": "Subset names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "key": "status_name", + "label": "Status name", + "type": "text" + } + ] + } + } + ] } ] } From 87546e5a3c480be02d4ab5d316175ce11e28829a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 16 Aug 2022 18:49:00 +0200 Subject: [PATCH 0436/2550] :wrench: add repair action --- .../publish/validate_workfile_paths.py | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py new file mode 100644 index 0000000000..604d4af392 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +import os +import openpype.api +import pyblish.api +import hou + + +class ValidateWorkfilePaths(pyblish.api.InstancePlugin): + """Validate workfile paths so they are absolute.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["houdini"] + label = "Validate Workfile Paths" + actions = [openpype.api.RepairAction] + optional = True + + node_types = ["file", "alembic"] + prohibited_vars = ["$HIP", "$JOB"] + + def process(self, instance): + invalid = self.get_invalid() + self.log.info( + "node types to check: {}".format(", ".join(self.node_types))) + self.log.info( + "prohibited vars: {}".format(", ".join(self.prohibited_vars)) + ) + if invalid: + for param in invalid: + self.log.error("{}: {}".format( + param.path(), + param.unexpandedString())) + + raise RuntimeError("Invalid paths found") + + @classmethod + def get_invalid(cls): + invalid = [] + for param, _ in hou.fileReferences(): + # skip nodes we are not interested in + if param.node().type().name() not in cls.node_types: + continue + + if any( + v for v in cls.prohibited_vars + if v in param.unexpandedString()): + invalid.append(param) + + return invalid + + @classmethod + def repair(cls, instance): + """Replace $HIP and $JOB vars for published path.""" + # determine path of published scene + anatomy = instance.context.data['anatomy'] + template_data = instance.data.get("anatomyData") + rep = instance.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + filepath = os.path.normpath(template_filled) + hip_dir = os.path.dirname(filepath) + invalid = cls.get_invalid() + for param in invalid: + cls.log.info("processing: {}".format(param.path())) + # replace $HIP + invalid_path = param.unexpandedString() + param.set(invalid_path.replace("$HIP", hip_dir)) + # replace $JOB + param.set(invalid_path.replace("$JOB", hip_dir)) + cls.log.info("Replacing {} for {}".format(invalid_path, hip_dir)) From b36b8ebee0ae424ab1189bc3d9629ef672097bb4 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 17 Aug 2022 04:12:09 +0000 Subject: [PATCH 0437/2550] [Automated] Bump version --- CHANGELOG.md | 19 ++++++++----------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 10 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2adb4ac154..80673e9f8a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,15 @@ # Changelog -## [3.13.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.13.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...HEAD) **🐛 Bug fixes** +- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) +- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) - General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) - Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) **🔀 Refactored code** @@ -16,7 +19,10 @@ **Merged pull requests:** +- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) +- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645) - Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) +- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) @@ -55,7 +61,6 @@ - General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) -- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** @@ -85,11 +90,10 @@ - General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) - Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) -- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) -- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) **🐛 Bug fixes** +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) - NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) @@ -98,12 +102,6 @@ - Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) - Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) - General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538) -- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) -- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) -- Nuke: double slate [\#3521](https://github.com/pypeclub/OpenPype/pull/3521) -- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) -- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) -- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) **🔀 Refactored code** @@ -112,7 +110,6 @@ - Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) - General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) - General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) -- General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) **Merged pull requests:** diff --git a/openpype/version.py b/openpype/version.py index 6ff5dfb7b5..9ae52e8370 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.1-nightly.2" +__version__ = "3.13.1-nightly.3" diff --git a/pyproject.toml b/pyproject.toml index 9cbdc295ff..83ccf233d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.1-nightly.2" # OpenPype +version = "3.13.1-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 4d61eec9952b61331c32b89d66fae35de26d13b4 Mon Sep 17 00:00:00 2001 From: Thomas Fricard <51854004+friquette@users.noreply.github.com> Date: Wed, 17 Aug 2022 10:12:43 +0200 Subject: [PATCH 0438/2550] fix typo Co-authored-by: Roy Nieterau --- openpype/hosts/houdini/api/shelves.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index ba3fcc2af9..a802d70457 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -153,7 +153,7 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsists and updates it. If not, creates + """This function verifies if the tool exists and updates it. If not, creates a new one. Arguments: From ccaef43535dd0d80c3184a325f51bfaea8409d75 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 10:32:11 +0200 Subject: [PATCH 0439/2550] changed description label --- .../entities/schemas/projects_schema/schema_project_ftrack.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index a821b1de76..1967a1150f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -1011,7 +1011,7 @@ "children": [ { "type": "label", - "label": "Change status of task when it's subset is rendered on farm" + "label": "Change status of task when it's subset is submitted to farm" }, { "type": "list", From 8d65c65fc9ebcccc8d58fe3b55e7cb81b4706106 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Wed, 17 Aug 2022 11:34:00 +0300 Subject: [PATCH 0440/2550] Remove unused attribute. Co-authored-by: Roy Nieterau --- openpype/hosts/maya/plugins/create/create_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 2f09aaee87..668cb57292 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -71,7 +71,6 @@ class CreateRender(plugin.Creator): label = "Render" family = "rendering" icon = "eye" - enable_all_lights = True _token = None _user = None _password = None From e364c025e6b3b29e6c0dfecf8a029e42bb83d0fb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 17 Aug 2022 10:34:58 +0200 Subject: [PATCH 0441/2550] Tweak labels for clearer distinction between the two actions --- openpype/hosts/maya/plugins/inventory/select_containers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/inventory/select_containers.py b/openpype/hosts/maya/plugins/inventory/select_containers.py index 4b7c92729f..d4a7ff401d 100644 --- a/openpype/hosts/maya/plugins/inventory/select_containers.py +++ b/openpype/hosts/maya/plugins/inventory/select_containers.py @@ -7,7 +7,7 @@ from openpype.hosts.maya.api.lib import get_container_members class SelectInScene(InventoryAction): """Select nodes in the scene from selected containers in scene inventory""" - label = "Select In Scene" + label = "Select in scene" icon = "search" color = "#888888" order = 99 @@ -21,10 +21,10 @@ class SelectInScene(InventoryAction): cmds.select(all_members, replace=True, noExpand=True) -class SelectFromScene(InventoryAction): +class HighlightBySceneSelection(InventoryAction): """Select containers in scene inventory from the current scene selection""" - label = "Select From Scene" + label = "Highlight by scene selection" icon = "search" color = "#888888" order = 100 From 41ac0d65c4c7fe145cf4347e1faf5af6b5b7dfa6 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Wed, 17 Aug 2022 11:35:10 +0300 Subject: [PATCH 0442/2550] Fix bug in default. Co-authored-by: Roy Nieterau --- openpype/hosts/maya/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 668cb57292..5418ec1f2f 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -223,7 +223,7 @@ class CreateRender(plugin.Creator): self._project_settings.get( "maya", {}).get( "RenderSettings", {}).get( - "enable_all_lights", {}) + "enable_all_lights", False) ) # Disable for now as this feature is not working yet # self.data["assScene"] = False From 7deb3079247f56ba606b008c462099f18a73ae74 Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Wed, 17 Aug 2022 11:35:26 +0300 Subject: [PATCH 0443/2550] Fix bug in default. Co-authored-by: Roy Nieterau --- openpype/hosts/maya/plugins/publish/validate_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 93ef7d7af7..f19c0bff36 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -245,7 +245,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): settings_lights_flag = instance.context.data["project_settings"].get( "maya", {}).get( "RenderSettings", {}).get( - "enable_all_lights", {}) + "enable_all_lights", False) instance_lights_flag = instance.data.get("renderSetupIncludeLights") if settings_lights_flag != instance_lights_flag: From 2f3e6a73e3f8d130fc639cd1c5c1429e4957ea2a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 10:48:50 +0200 Subject: [PATCH 0444/2550] Change label of plugin --- .../ftrack/plugins/publish/integrate_ftrack_farm_status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index ecf258a870..f725de3144 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -10,7 +10,7 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): """ order = pyblish.api.IntegratorOrder + 0.48 - label = "Integrate Ftrack Component" + label = "Integrate Ftrack Farm Status" families = ["ftrack"] farm_status_profiles = [] @@ -35,7 +35,7 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): filtered_instances = [] for instance in context: subset_name = instance.data["subset"] - msg_start = "SKipping instance {}.".format(subset_name) + msg_start = "Skipping instance {}.".format(subset_name) if not instance.data.get("farm"): self.log.debug( "{} Won't be rendered on farm.".format(msg_start) From 7095bff502f13498bb1dd7a7a173693bf43e72dd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 11:04:58 +0200 Subject: [PATCH 0445/2550] set "farm" to true in maya render colletor --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index c3e6c98020..0d45ad4f9e 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -354,6 +354,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): instance = context.create_instance(expected_layer_name) instance.data["label"] = label + instance.data["farm"] = True instance.data.update(data) self.log.debug("data: {}".format(json.dumps(data, indent=4))) From 58f19f15f4a2c3d4ad6d0dd71089c0357904dcd9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 11:05:04 +0200 Subject: [PATCH 0446/2550] skip disabled instances --- .../ftrack/plugins/publish/integrate_ftrack_farm_status.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index f725de3144..fcbe71e0ac 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -34,6 +34,9 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): def filter_instances(self, context): filtered_instances = [] for instance in context: + # Skip disabled instances + if instance.data.get("publish") is False: + continue subset_name = instance.data["subset"] msg_start = "Skipping instance {}.".format(subset_name) if not instance.data.get("farm"): From 346e3b8300e01ac8b3ab4e2c52a7d0c25a169d33 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 11:11:39 +0200 Subject: [PATCH 0447/2550] removed families filter --- .../ftrack/plugins/publish/integrate_ftrack_farm_status.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index fcbe71e0ac..24f784f83d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -11,7 +11,6 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder + 0.48 label = "Integrate Ftrack Farm Status" - families = ["ftrack"] farm_status_profiles = [] From 4dbca722bac4a918fa992a0860361460338ef970 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 17 Aug 2022 11:14:29 +0200 Subject: [PATCH 0448/2550] OP-3713 - refactored keys and labels --- .../plugins/publish/collect_color_coded_instances.py | 2 +- .../schemas/projects_schema/schema_project_photoshop.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index f93ba51574..c157c932fd 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -109,7 +109,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): "Subset {} already created, skipping.".format(subset)) continue - if self.create_flatten_image != "only": + if self.create_flatten_image != "flatten_only": instance = self._create_instance(context, layer, resolved_family, asset_name, subset, task_name) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 6935ec8e5e..db06147a51 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -50,9 +50,9 @@ "type": "enum", "multiselection": false, "enum_items": [ - { "yes": "Yes" }, - { "no": "No" }, - { "only": "Only flatten" } + { "flatten_with_images": "Flatten with images" }, + { "flatten_only": "Flatten only" }, + { "no": "No" } ] }, { From 95c19cc412ecbfd0caf42e06ada91640e8da5885 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 11:22:56 +0200 Subject: [PATCH 0449/2550] fill context entities in all instances --- .../plugins/publish/collect_ftrack_api.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py index 14da188150..99a555014e 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py @@ -105,11 +105,17 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): context.data["ftrackEntity"] = asset_entity context.data["ftrackTask"] = task_entity - self.per_instance_process(context, asset_name, task_name) + self.per_instance_process(context, asset_entity, task_entity) def per_instance_process( - self, context, context_asset_name, context_task_name + self, context, context_asset_entity, context_task_entity ): + context_task_name = None + context_asset_name = None + if context_asset_entity: + context_asset_name = context_asset_entity["name"] + if context_task_entity: + context_task_name = context_task_entity["name"] instance_by_asset_and_task = {} for instance in context: self.log.debug( @@ -120,6 +126,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): if not instance_asset_name and not instance_task_name: self.log.debug("Instance does not have set context keys.") + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = context_task_entity continue elif instance_asset_name and instance_task_name: @@ -131,6 +139,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): "Instance's context is same as in publish context." " Asset: {} | Task: {}" ).format(context_asset_name, context_task_name)) + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = context_task_entity continue asset_name = instance_asset_name task_name = instance_task_name @@ -141,6 +151,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): "Instance's context task is same as in publish" " context. Task: {}" ).format(context_task_name)) + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = context_task_entity continue asset_name = context_asset_name @@ -152,6 +164,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): "Instance's context asset is same as in publish" " context. Asset: {}" ).format(context_asset_name)) + instance.data["ftrackEntity"] = context_asset_entity + instance.data["ftrackTask"] = None continue # Do not use context's task name From ee4ad799902f313a98ac1e4ab1403617e2d7d4bf Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 17 Aug 2022 11:48:29 +0200 Subject: [PATCH 0450/2550] change logs messages --- openpype/hosts/houdini/api/shelves.py | 35 +++++++++++++++------------ 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index ba3fcc2af9..805ce4c397 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -6,7 +6,7 @@ from openpype.settings import get_project_settings import hou -log = logging.getLogger("openpype.hosts.houdini") +log = logging.getLogger("openpype.hosts.houdini.shelves") def generate_shelves(): @@ -23,8 +23,8 @@ def generate_shelves(): shelves_set_config = project_settings["houdini"]["shelves"] if not shelves_set_config: - log.info( - "SHELF INFO: No custom shelves found in project settings." + log.debug( + "No custom shelves found in project settings." ) return @@ -34,7 +34,7 @@ def generate_shelves(): if shelf_set_filepath[current_os]: if not os.path.isfile(shelf_set_filepath[current_os]): raise FileNotFoundError( - "SHELF ERROR: This path doesn't exist - {}".format( + "This path doesn't exist - {}".format( shelf_set_filepath[current_os] ) ) @@ -45,7 +45,7 @@ def generate_shelves(): shelf_set_name = shelf_set_config.get('shelf_set_name') if not shelf_set_name: log.warning( - "SHELF WARNING: No name found in shelf set definition." + "No name found in shelf set definition." ) return @@ -54,9 +54,10 @@ def generate_shelves(): shelves_definition = shelf_set_config.get('shelf_definition') if not shelves_definition: - log.info( - "SHELF INFO: \ -No shelf definition found for shelf set named '{}'".format(shelf_set_name) + log.debug( + "No shelf definition found for shelf set named '{}'".format( + shelf_set_name + ) ) return @@ -64,15 +65,18 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf_name = shelf_definition.get('shelf_name') if not shelf_name: log.warning( - "SHELF WARNING: No name found in shelf definition." + "No name found in shelf definition." ) return shelf = get_or_create_shelf(shelf_name) if not shelf_definition.get('tools_list'): - log.warning("TOOLS INFO: No tool definition found for \ -shelf named {}".format(shelf_name)) + log.debug( + "No tool definition found for shelf named {}".format( + shelf_name + ) + ) return mandatory_attributes = {'name', 'script'} @@ -82,8 +86,9 @@ shelf named {}".format(shelf_name)) if not all( tool_definition[key] for key in mandatory_attributes ): - log.warning("TOOLS ERROR: You need to specify at least \ -the name and the script path of the tool.") + log.warning( + "You need to specify at least the name and \ +the script path of the tool.") continue tool = get_or_create_tool(tool_definition, shelf) @@ -153,7 +158,7 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsists and updates it. If not, creates + """This function verifies if the tool exists and updates it. If not, creates a new one. Arguments: @@ -180,7 +185,7 @@ def get_or_create_tool(tool_definition, shelf): if not os.path.exists(tool_definition['script']): log.warning( - "TOOL ERROR: This path doesn't exist - {}".format( + "This path doesn't exist - {}".format( tool_definition['script'] ) ) From b178bb538496123fe748d54dc3271ebabc019cfe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 17 Aug 2022 11:49:50 +0200 Subject: [PATCH 0451/2550] OP-3722 - added check for empty context --- openpype/pype_commands.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index a447aa916b..b266479bb1 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -232,6 +232,11 @@ class PypeCommands: fail_batch(_id, dbcon, msg) print("Another batch running, probably stuck, ask admin for help") + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + asset_name, task_name, task_type = get_batch_asset_task_info( task_data["context"]) From 98ac7f538e6482007115aff917cdf4ccd39fbc83 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 17 Aug 2022 11:59:15 +0200 Subject: [PATCH 0452/2550] condition for case where audio is already collected --- openpype/plugins/publish/collect_audio.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 7e3b42f375..6aed3f82fe 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -68,9 +68,10 @@ class CollectAudio(pyblish.api.InstancePlugin): # Add audio to instance if representation was found if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] + if not instance.data.get("audio"): + instance.data["audio"] = [{ + "offset": 0, + "filename": get_representation_path(repre_doc) + }] self.log.debug("instance.data: {}".format(pformat(instance.data))) From 51c27f28c0791633819f935e230a179ea20ff00b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 12:20:06 +0200 Subject: [PATCH 0453/2550] added ability to add additional metadata to components --- .../publish/integrate_ftrack_instances.py | 134 +++++++++++------- 1 file changed, 85 insertions(+), 49 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index a1e5922730..3f0cc176a2 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -3,6 +3,7 @@ import json import copy import pyblish.api +from openpype.lib.openpype_version import get_openpype_version from openpype.lib.transcoding import ( get_ffprobe_streams, convert_ffprobe_fps_to_float, @@ -20,6 +21,17 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): label = "Integrate Ftrack Component" families = ["ftrack"] + metadata_keys_to_label = { + "openpype_version": "OpenPype version", + "frame_start": "Frame start", + "frame_end": "Frame end", + "duration": "Duration", + "width": "Resolution width", + "height": "Resolution height", + "fps": "FPS", + "code": "Codec" + } + family_mapping = { "camera": "cam", "look": "look", @@ -43,6 +55,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): } keep_first_subset_name_for_review = True asset_versions_status_profiles = {} + additional_metadata_keys = [] def process(self, instance): self.log.debug("instance {}".format(instance)) @@ -105,7 +118,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "component_data": None, "component_path": None, "component_location": None, - "component_location_name": None + "component_location_name": None, + "additional_data": {} } # Filter types of representations @@ -152,6 +166,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "name": "thumbnail" } thumbnail_item["thumbnail"] = True + # Create copy of item before setting location src_components_to_add.append(copy.deepcopy(thumbnail_item)) # Create copy of first thumbnail @@ -248,19 +263,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): first_thumbnail_component[ "asset_data"]["name"] = extended_asset_name - component_meta = self._prepare_component_metadata( - instance, repre, repre_path, True - ) - # Change location review_item["component_path"] = repre_path # Change component data review_item["component_data"] = { # Default component name is "main". "name": "ftrackreview-mp4", - "metadata": { - "ftr_meta": json.dumps(component_meta) - } + "metadata": self._prepare_component_metadata( + instance, repre, repre_path, True + ) } if is_first_review_repre: @@ -302,13 +313,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): component_data = copy_src_item["component_data"] component_name = component_data["name"] component_data["name"] = component_name + "_src" - component_meta = self._prepare_component_metadata( + component_data["metadata"] = self._prepare_component_metadata( instance, repre, copy_src_item["component_path"], False ) - if component_meta: - component_data["metadata"] = { - "ftr_meta": json.dumps(component_meta) - } component_list.append(copy_src_item) # Add others representations as component @@ -326,16 +333,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ): other_item["asset_data"]["name"] = extended_asset_name - component_meta = self._prepare_component_metadata( - instance, repre, published_path, False - ) component_data = { - "name": repre["name"] + "name": repre["name"], + "metadata": self._prepare_component_metadata( + instance, repre, published_path, False + ) } - if component_meta: - component_data["metadata"] = { - "ftr_meta": json.dumps(component_meta) - } other_item["component_data"] = component_data other_item["component_location_name"] = unmanaged_location_name other_item["component_path"] = published_path @@ -354,6 +357,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): )) instance.data["ftrackComponentsList"] = component_list + def _collect_additional_metadata(self, streams): + pass + def _get_repre_path(self, instance, repre, only_published): """Get representation path that can be used for integration. @@ -423,6 +429,11 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): def _prepare_component_metadata( self, instance, repre, component_path, is_review ): + metadata = {} + if "openpype_version" in self.additional_metadata_keys: + label = self.metadata_keys_to_label["openpype_version"] + metadata[label] = get_openpype_version() + extension = os.path.splitext(component_path)[-1] streams = [] try: @@ -442,13 +453,23 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # - exr is special case which can have issues with reading through # ffmpegh but we want to set fps for it if not video_streams and extension not in [".exr"]: - return {} + return metadata stream_width = None stream_height = None stream_fps = None frame_out = None + codec_label = None for video_stream in video_streams: + codec_label = video_stream.get("codec_long_name") + if not codec_label: + codec_label = video_stream.get("codec") + + if codec_label: + pix_fmt = video_stream.get("pix_fmt") + if pix_fmt: + codec_label += " ({})".format(pix_fmt) + tmp_width = video_stream.get("width") tmp_height = video_stream.get("height") if tmp_width and tmp_height: @@ -456,8 +477,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): stream_height = tmp_height input_framerate = video_stream.get("r_frame_rate") - duration = video_stream.get("duration") - if input_framerate is None or duration is None: + stream_duration = video_stream.get("duration") + if input_framerate is None or stream_duration is None: continue try: stream_fps = convert_ffprobe_fps_to_float( @@ -473,9 +494,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): stream_height = tmp_height self.log.debug("FPS from stream is {} and duration is {}".format( - input_framerate, duration + input_framerate, stream_duration )) - frame_out = float(duration) * stream_fps + frame_out = float(stream_duration) * stream_fps break # Prepare FPS @@ -483,43 +504,58 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): if instance_fps is None: instance_fps = instance.context.data["fps"] - if not is_review: - output = {} - fps = stream_fps or instance_fps - if fps: - output["frameRate"] = fps - - if stream_width and stream_height: - output["width"] = int(stream_width) - output["height"] = int(stream_height) - return output - - frame_start = repre.get("frameStartFtrack") - frame_end = repre.get("frameEndFtrack") - if frame_start is None or frame_end is None: - frame_start = instance.data["frameStart"] - frame_end = instance.data["frameEnd"] - - fps = None repre_fps = repre.get("fps") if repre_fps is not None: repre_fps = float(repre_fps) fps = stream_fps or repre_fps or instance_fps + # Prepare frame ranges + frame_start = repre.get("frameStartFtrack") + frame_end = repre.get("frameEndFtrack") + if frame_start is None or frame_end is None: + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + duration = (frame_end - frame_start) + 1 + + for key, value in [ + ("fps", fps), + ("frame_start", frame_start), + ("frame_end", frame_end), + ("duration", duration), + ("width", stream_width), + ("height", stream_height), + ("fps", fps), + ("code", codec_label) + ]: + if not value or key not in self.additional_metadata_keys: + continue + label = self.metadata_keys_to_label[key] + metadata[label] = value + + if not is_review: + ftr_meta = {} + if fps: + ftr_meta["frameRate"] = fps + + if stream_width and stream_height: + ftr_meta["width"] = int(stream_width) + ftr_meta["height"] = int(stream_height) + metadata["ftr_meta"] = json.dumps(ftr_meta) + return metadata + # Frame end of uploaded video file should be duration in frames # - frame start is always 0 # - frame end is duration in frames if not frame_out: - frame_out = frame_end - frame_start + 1 + frame_out = duration # Ftrack documentation says that it is required to have # 'width' and 'height' in review component. But with those values # review video does not play. - component_meta = { + metadata["ftr_meta"] = json.dumps({ "frameIn": 0, "frameOut": frame_out, "frameRate": float(fps) - } - - return component_meta + }) + return metadata From b66c8088c3c9fcde06cdcf6cb837c1deb2c5cc1b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 12:24:01 +0200 Subject: [PATCH 0454/2550] added settings for 'additional_metadata_keys' --- .../defaults/project_settings/ftrack.json | 3 ++- .../projects_schema/schema_project_ftrack.json | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 9847e58cfa..952657251c 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -491,7 +491,8 @@ "usd": "usd" }, "keep_first_subset_name_for_review": true, - "asset_versions_status_profiles": [] + "asset_versions_status_profiles": [], + "additional_metadata_keys": [] } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 3f472c6c6a..1a63e589b2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -1039,6 +1039,22 @@ } ] } + }, + { + "key": "additional_metadata_keys", + "label": "Additional metadata keys on components", + "type": "enum", + "multiselection": true, + "enum_items": [ + {"openpype_version": "OpenPype version"}, + {"frame_start": "Frame start"}, + {"frame_end": "Frame end"}, + {"duration": "Duration"}, + {"width": "Resolution width"}, + {"height": "Resolution height"}, + {"fps": "FPS"}, + {"code": "Codec"} + ] } ] } From 3226eb5e8f239d63817fe907278e2961eefee6f8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 17 Aug 2022 18:46:59 +0800 Subject: [PATCH 0455/2550] fix the break of file sequence collection in review when the subset name with the version string --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 54ef09e060..a1048398c3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -139,7 +139,8 @@ class ExtractPlayblast(openpype.api.Extractor): collected_files = os.listdir(stagingdir) collections, remainder = clique.assemble(collected_files, - minimum_items=1) + minimum_items=1, + patterns=[r'\.(?P(?P0*)\d+)\.\D+\d?$']) self.log.debug("filename {}".format(filename)) frame_collection = None From 7576f3824e4aec860c68ee88dea1dcd33de3a4ae Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 17 Aug 2022 18:53:48 +0800 Subject: [PATCH 0456/2550] fix the break of file sequence collection in review when the subset name with the version string --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index a1048398c3..6626eb6a7a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -138,9 +138,10 @@ class ExtractPlayblast(openpype.api.Extractor): self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) + pattern_frame = [r'\.(?P(?P0*)\d+)\.\D+\d?$'] collections, remainder = clique.assemble(collected_files, minimum_items=1, - patterns=[r'\.(?P(?P0*)\d+)\.\D+\d?$']) + patterns=pattern_frame) self.log.debug("filename {}".format(filename)) frame_collection = None From 05f1b732b6edd1732139350528ad614095da5b70 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 13:22:34 +0200 Subject: [PATCH 0457/2550] fill context task entity in collect ftrack api --- openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py index 99a555014e..e13b7e65cd 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py @@ -165,7 +165,7 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): " context. Asset: {}" ).format(context_asset_name)) instance.data["ftrackEntity"] = context_asset_entity - instance.data["ftrackTask"] = None + instance.data["ftrackTask"] = context_task_entity continue # Do not use context's task name From 4dba68c5bdade98048dd1ca15d7f03ac004dcf28 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 13:30:14 +0200 Subject: [PATCH 0458/2550] fix function import and call --- .../ftrack/plugins/publish/integrate_ftrack_farm_status.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index 24f784f83d..0a7ad0b532 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.lib import profiles_filtering +from openpype.lib import filter_profiles class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): @@ -63,7 +63,7 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): host_name = context.data["hostName"] task_name = task_entity["name"] task_type = task_entity["type"]["name"] - status_profile = profiles_filtering( + status_profile = filter_profiles( self.farm_status_profiles, { "hosts": host_name, @@ -75,7 +75,7 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): logger=self.log ) if not status_profile: - # There already is log in 'profiles_filtering' + # There already is log in 'filter_profiles' continue status_name = status_profile["status_name"] From cb34f4619e54ae887bc1ea38a0e1ec106d228167 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 13:30:20 +0200 Subject: [PATCH 0459/2550] log availabl status names --- .../plugins/publish/integrate_ftrack_farm_status.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index 0a7ad0b532..8bebfd8485 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -93,6 +93,10 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): project_schema = project_entity["project_schema"] task_workflow_statuses = project_schema["_task_workflow"]["statuses"] + joined_status_names = ", ".join({ + '"{}"'.format(status["name"]) + for status in task_workflow_statuses + }) # Keep track if anything has changed status_changed = False found_status_id_by_status_name = {} @@ -117,8 +121,9 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): if status_id is None: self.log.warning(( - "Status \"{}\" is not available on project \"{}\"" - ).format(status_name, project_name)) + "Status \"{}\" is not available on project \"{}\"." + " Available statuses are {}" + ).format(status_name, project_name, joined_status_names)) continue # Change task status id From bc3aa4b1609e067e7b4a31a9874e8a415bcfcc71 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 13:54:06 +0200 Subject: [PATCH 0460/2550] fix getting of task statuses --- .../plugins/publish/integrate_ftrack_farm_status.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index 8bebfd8485..658df70895 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -90,12 +90,17 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): project_entity = session.query(( "select project_schema from Project where full_name is \"{}\"" ).format(project_name)).one() + task_type = session.query( + "select id from ObjectType where name is \"Task\"" + ).first() project_schema = project_entity["project_schema"] - task_workflow_statuses = project_schema["_task_workflow"]["statuses"] + task_statuses = project_schema.get_statuses( + "Task", task_type["id"] + ) joined_status_names = ", ".join({ '"{}"'.format(status["name"]) - for status in task_workflow_statuses + for status in task_statuses }) # Keep track if anything has changed status_changed = False @@ -111,7 +116,7 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): if status_name_low in found_status_id_by_status_name: continue - for status in task_workflow_statuses: + for status in task_statuses: if status["name"].lower() == status_name_low: status_id = status["id"] break From 671cf183fd73629e7a140784e518bc7718fa5431 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 14:18:13 +0200 Subject: [PATCH 0461/2550] fix statuses lookup by task type --- .../publish/integrate_ftrack_farm_status.py | 62 +++++++++---------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index 658df70895..c5fc3dd68f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -90,49 +90,49 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): project_entity = session.query(( "select project_schema from Project where full_name is \"{}\"" ).format(project_name)).one() - task_type = session.query( - "select id from ObjectType where name is \"Task\"" - ).first() project_schema = project_entity["project_schema"] - task_statuses = project_schema.get_statuses( - "Task", task_type["id"] - ) - joined_status_names = ", ".join({ - '"{}"'.format(status["name"]) - for status in task_statuses - }) + task_type_ids = set() + for item in instances_with_status_names: + instance, _ = item + task_entity = instance.data["ftrackTask"] + task_type_ids.add(task_entity["type"]["id"]) + + task_statuses_by_type_id = { + task_type_id: project_schema.get_statuses("Task", task_type_id) + for task_type_id in task_type_ids + } + # Keep track if anything has changed + skipped_status_names = set() status_changed = False - found_status_id_by_status_name = {} for item in instances_with_status_names: instance, status_name = item - + task_entity = instance.data["ftrackTask"] + task_statuses = task_statuses_by_type_id[task_entity["type"]["id"]] status_name_low = status_name.lower() - status_id = found_status_id_by_status_name.get(status_name_low) + + status_id = None + # Skip if status name was already tried to be found + for status in task_statuses: + if status["name"].lower() == status_name_low: + status_id = status["id"] + break if status_id is None: - # Skip if status name was already tried to be found - if status_name_low in found_status_id_by_status_name: - continue - - for status in task_statuses: - if status["name"].lower() == status_name_low: - status_id = status["id"] - break - - # Store the result to be reused in following instances - found_status_id_by_status_name[status_name_low] = status_id - - if status_id is None: - self.log.warning(( - "Status \"{}\" is not available on project \"{}\"." - " Available statuses are {}" - ).format(status_name, project_name, joined_status_names)) + if status_name_low not in skipped_status_names: + skipped_status_names.add(status_name_low) + joined_status_names = ", ".join({ + '"{}"'.format(status["name"]) + for status in task_statuses + }) + self.log.warning(( + "Status \"{}\" is not available on project \"{}\"." + " Available statuses are {}" + ).format(status_name, project_name, joined_status_names)) continue # Change task status id - task_entity = instance.data["ftrackTask"] if status_id != task_entity["status_id"]: task_entity["status_id"] = status_id status_changed = True From 6d4a80cd30b8adf926a44b46c2c8f70ee04217f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 14:28:27 +0200 Subject: [PATCH 0462/2550] added some logs related to status changes --- .../plugins/publish/integrate_ftrack_farm_status.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py index c5fc3dd68f..ab5738c33f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -113,10 +113,12 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): status_name_low = status_name.lower() status_id = None + status_name = None # Skip if status name was already tried to be found for status in task_statuses: if status["name"].lower() == status_name_low: status_id = status["id"] + status_name = status["name"] break if status_id is None: @@ -136,6 +138,13 @@ class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): if status_id != task_entity["status_id"]: task_entity["status_id"] = status_id status_changed = True + path = "/".join([ + item["name"] + for item in task_entity["link"] + ]) + self.log.debug("Set status \"{}\" to \"{}\"".format( + status_name, path + )) if status_changed: session.commit() From 5a0b15c63b90a97417d43d9a3cfff7ba927dd4e1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 15:35:46 +0200 Subject: [PATCH 0463/2550] fix typo in codec --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 3f0cc176a2..1bf4caac77 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -29,7 +29,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "width": "Resolution width", "height": "Resolution height", "fps": "FPS", - "code": "Codec" + "codec": "Codec" } family_mapping = { @@ -526,7 +526,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ("width", stream_width), ("height", stream_height), ("fps", fps), - ("code", codec_label) + ("codec", codec_label) ]: if not value or key not in self.additional_metadata_keys: continue From db6f46895b9c2a3659bfb5803705388b7d2f7dfd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 17 Aug 2022 16:01:31 +0200 Subject: [PATCH 0464/2550] OP-3723 - remove PIL limit High resolution could trigger " could be decompression bomb DOS attack". --- openpype/hosts/photoshop/plugins/publish/extract_review.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 7f78a46527..151440b914 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -144,6 +144,7 @@ class ExtractReview(openpype.api.Extractor): used as a source for thumbnail or review mov. """ max_ffmpeg_size = 16384 + Image.MAX_IMAGE_PIXELS = None first_url = os.path.join(staging_dir, processed_img_names[0]) with Image.open(first_url) as im: width, height = im.size From 5484c083230cbc3090db1b2dba6d582d21a1f849 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 16:58:41 +0200 Subject: [PATCH 0465/2550] context label collector does not require 'currentFile' to be filled --- .../plugins/publish/collect_context_label.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/collect_context_label.py b/openpype/plugins/publish/collect_context_label.py index 8cf71882aa..0ca19b28c1 100644 --- a/openpype/plugins/publish/collect_context_label.py +++ b/openpype/plugins/publish/collect_context_label.py @@ -1,5 +1,6 @@ """ -Requires: +Optional: + context -> hostName (str) context -> currentFile (str) Provides: context -> label (str) @@ -16,16 +17,16 @@ class CollectContextLabel(pyblish.api.ContextPlugin): label = "Context Label" def process(self, context): + host_name = context.data.get("hostName") + if not host_name: + host_name = pyblish.api.registered_hosts()[-1] + # Use host name as base for label + label = host_name.title() - # Get last registered host - host = pyblish.api.registered_hosts()[-1] - - # Get scene name from "currentFile" - path = context.data.get("currentFile") or "" - base = os.path.basename(path) + # Get scene name from "currentFile" and use basename as ending of label + path = context.data.get("currentFile") + if path: + label += " - {}".format(os.path.basename(path)) # Set label - label = "{host} - {scene}".format(host=host.title(), scene=base) - if host == "standalonepublisher": - label = host.title() context.data["label"] = label From e3c43d22159b78428c7ee7f75d89f793dc86dba7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 16:59:12 +0200 Subject: [PATCH 0466/2550] it is possible to have set custom context label and in that case the plugin is skipped --- openpype/plugins/publish/collect_context_label.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/plugins/publish/collect_context_label.py b/openpype/plugins/publish/collect_context_label.py index 0ca19b28c1..1dec0b380b 100644 --- a/openpype/plugins/publish/collect_context_label.py +++ b/openpype/plugins/publish/collect_context_label.py @@ -17,6 +17,12 @@ class CollectContextLabel(pyblish.api.ContextPlugin): label = "Context Label" def process(self, context): + # Add ability to use custom context label + context_label = context.data.get("contextLabel") + if context_label: + context.data["label"] = context_label + return + host_name = context.data.get("hostName") if not host_name: host_name = pyblish.api.registered_hosts()[-1] From 09af23e2d789dcb02450cbd6eed9be53c062c416 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 17 Aug 2022 17:26:10 +0200 Subject: [PATCH 0467/2550] resolve: fixing import in collector --- .../hosts/resolve/plugins/publish/precollect_workfile.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py index 53e67aee0e..0f94216556 100644 --- a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py @@ -1,11 +1,9 @@ import pyblish.api from pprint import pformat -from importlib import reload -from openpype.hosts import resolve +from openpype.hosts.resolve import api as rapi from openpype.pipeline import legacy_io from openpype.hosts.resolve.otio import davinci_export -reload(davinci_export) class PrecollectWorkfile(pyblish.api.ContextPlugin): @@ -18,9 +16,9 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" - project = resolve.get_current_project() + project = rapi.get_current_project() fps = project.GetSetting("timelineFrameRate") - video_tracks = resolve.get_video_track_names() + video_tracks = rapi.get_video_track_names() # adding otio timeline to context otio_timeline = davinci_export.create_otio_timeline(project) From ebdf8a348a3eb4e34162564c719a872b5e30b71e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 17 Aug 2022 17:41:23 +0200 Subject: [PATCH 0468/2550] OP-3723 - changed max limit Official 16384x16384 actually didn't work because int overflow. 16000 tested and worked. --- openpype/hosts/photoshop/plugins/publish/extract_review.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 151440b914..64decbb957 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -143,7 +143,8 @@ class ExtractReview(openpype.api.Extractor): Ffmpeg has max size 16384x16384. Saved image(s) must be resized to be used as a source for thumbnail or review mov. """ - max_ffmpeg_size = 16384 + # 16384x16384 actually didn't work because int overflow + max_ffmpeg_size = 16000 Image.MAX_IMAGE_PIXELS = None first_url = os.path.join(staging_dir, processed_img_names[0]) with Image.open(first_url) as im: From fe278d7135998a368db562eabeb5a636ce56e0ca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 18:05:42 +0200 Subject: [PATCH 0469/2550] Don't force to have label on all instances and in context. --- openpype/tools/pyblish_pype/control.py | 1 - openpype/tools/pyblish_pype/model.py | 19 ++++++++++++------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/openpype/tools/pyblish_pype/control.py b/openpype/tools/pyblish_pype/control.py index f657936b79..05e53a989a 100644 --- a/openpype/tools/pyblish_pype/control.py +++ b/openpype/tools/pyblish_pype/control.py @@ -244,7 +244,6 @@ class Controller(QtCore.QObject): self.context.optional = False self.context.data["publish"] = True - self.context.data["label"] = "Context" self.context.data["name"] = "context" self.context.data["host"] = reversed(pyblish.api.registered_hosts()) diff --git a/openpype/tools/pyblish_pype/model.py b/openpype/tools/pyblish_pype/model.py index 31aa63677e..309126a884 100644 --- a/openpype/tools/pyblish_pype/model.py +++ b/openpype/tools/pyblish_pype/model.py @@ -596,11 +596,6 @@ class InstanceItem(QtGui.QStandardItem): instance._logs = [] instance.optional = getattr(instance, "optional", True) instance.data["publish"] = instance.data.get("publish", True) - instance.data["label"] = ( - instance.data.get("label") - or getattr(instance, "label", None) - or instance.data["name"] - ) family = self.data(Roles.FamiliesRole)[0] self.setData( @@ -616,9 +611,19 @@ class InstanceItem(QtGui.QStandardItem): def data(self, role=QtCore.Qt.DisplayRole): if role == QtCore.Qt.DisplayRole: + label = None if settings.UseLabel: - return self.instance.data["label"] - return self.instance.data["name"] + label = ( + self.instance.data.get("label") + or getattr(self.instance, "label", None) + ) + + if not label: + if self.is_context: + label = "Context" + else: + label = self.instance.data["name"] + return label if role == QtCore.Qt.DecorationRole: icon_name = self.instance.data.get("icon") or "file" From 55bf1bea91bda6c01a7d47f7c2437b80a4ccfc58 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 18:06:05 +0200 Subject: [PATCH 0470/2550] change label access in report --- openpype/tools/publisher/publish_report_viewer/report_items.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/publish_report_viewer/report_items.py b/openpype/tools/publisher/publish_report_viewer/report_items.py index 8a01569723..206f999bac 100644 --- a/openpype/tools/publisher/publish_report_viewer/report_items.py +++ b/openpype/tools/publisher/publish_report_viewer/report_items.py @@ -79,7 +79,7 @@ class PublishReport: context_data = data["context"] context_data["name"] = "context" - context_data["label"] = context_data["label"] or "Context" + context_data["label"] = context_data.get("label") or "Context" logs = [] plugins_items_by_id = {} From 6761aa7d68016ad0e319ddae56c956d656c8bd44 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 17 Aug 2022 18:07:30 +0200 Subject: [PATCH 0471/2550] Change the check of "label" key --- openpype/plugins/publish/collect_context_label.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/collect_context_label.py b/openpype/plugins/publish/collect_context_label.py index 1dec0b380b..6cdeba8418 100644 --- a/openpype/plugins/publish/collect_context_label.py +++ b/openpype/plugins/publish/collect_context_label.py @@ -18,9 +18,11 @@ class CollectContextLabel(pyblish.api.ContextPlugin): def process(self, context): # Add ability to use custom context label - context_label = context.data.get("contextLabel") - if context_label: - context.data["label"] = context_label + label = context.data.get("label") + if label: + self.log.debug("Context label is already set to \"{}\"".format( + label + )) return host_name = context.data.get("hostName") @@ -36,3 +38,6 @@ class CollectContextLabel(pyblish.api.ContextPlugin): # Set label context.data["label"] = label + self.log.debug("Context label is changed to \"{}\"".format( + label + )) From da3268c9a75e04a8464589fc1c1153e264fec60a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 09:51:45 +0200 Subject: [PATCH 0472/2550] resave default settings --- openpype/settings/defaults/project_settings/ftrack.json | 2 +- openpype/settings/defaults/project_settings/shotgrid.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 58b6a55958..2d5f889aa5 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -498,4 +498,4 @@ "farm_status_profiles": [] } } -} +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/shotgrid.json b/openpype/settings/defaults/project_settings/shotgrid.json index 83b6f69074..774bce714b 100644 --- a/openpype/settings/defaults/project_settings/shotgrid.json +++ b/openpype/settings/defaults/project_settings/shotgrid.json @@ -19,4 +19,4 @@ "step": "step" } } -} +} \ No newline at end of file From ed2aedd0feec05d7d53f2e62789f77838a6f2f47 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 18 Aug 2022 10:17:02 +0200 Subject: [PATCH 0473/2550] processing PR comments --- openpype/plugins/publish/collect_audio.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 6aed3f82fe..cf074392ee 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -15,7 +15,7 @@ class CollectAudio(pyblish.api.InstancePlugin): """ Collecting available audio subset to instance """ - label = "Colect Audio" + label = "Collect Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] hosts = [ @@ -39,8 +39,14 @@ class CollectAudio(pyblish.api.InstancePlugin): audio_subset_name = "audioMain" def process(self, instance): - # * Add audio to instance if exists. - self.log.info('Collecting Audio Data') + if instance.data.get("audio"): + self.log.info( + "Skipping Audio collecion. It is already collected" + ) + return + + # Add audio to instance if exists. + self.log.info('Collecting Audio Data ...') project_name = legacy_io.active_project() asset_name = instance.data["asset"] @@ -68,10 +74,10 @@ class CollectAudio(pyblish.api.InstancePlugin): # Add audio to instance if representation was found if repre_doc: - if not instance.data.get("audio"): - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] + instance.data["audio"] = [{ + "offset": 0, + "filename": get_representation_path(repre_doc) + }] + self.log.info("Audio Data added to instance ...") self.log.debug("instance.data: {}".format(pformat(instance.data))) From 694a07579287b12b5fcce35fc4a405700ca3f64c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 18 Aug 2022 11:45:01 +0200 Subject: [PATCH 0474/2550] Refactor `subsetGroup` column name to `group` Co-authored-by: Milan Kolar --- openpype/tools/sceneinventory/model.py | 6 +++--- openpype/tools/sceneinventory/window.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index 97cc11ff23..1a3b7c7055 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -35,7 +35,7 @@ class InventoryModel(TreeModel): """The model for the inventory""" Columns = ["Name", "version", "count", "family", - "subsetGroup", "loader", "objectName"] + "group", "loader", "objectName"] OUTDATED_COLOR = QtGui.QColor(235, 30, 30) CHILD_OUTDATED_COLOR = QtGui.QColor(200, 160, 30) @@ -160,7 +160,7 @@ class InventoryModel(TreeModel): column_name = self.Columns[index.column()] - if column_name == "subsetGroup" and item.get("subsetGroup"): + if column_name == "group" and item.get("group"): return qtawesome.icon("fa.object-group", color=get_default_entity_icon_color()) @@ -429,7 +429,7 @@ class InventoryModel(TreeModel): group_node["familyIcon"] = family_icon group_node["count"] = len(group_items) group_node["isGroupNode"] = True - group_node["subsetGroup"] = subset["data"].get("subsetGroup") + group_node["group"] = subset["data"].get("subsetGroup") if self.sync_enabled: progress = get_progress_for_repre( diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index 02addbccfe..1f4585b650 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -88,7 +88,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view.setColumnWidth(1, 55) # version view.setColumnWidth(2, 55) # count view.setColumnWidth(3, 150) # family - view.setColumnWidth(4, 120) # subsetGroup + view.setColumnWidth(4, 120) # group view.setColumnWidth(5, 150) # loader # apply delegates From ec405eb9130c6fe9c8b13dde34983fb43341507d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 11:52:46 +0200 Subject: [PATCH 0475/2550] prepared some classes to handle settings locks --- openpype/settings/handlers.py | 162 ++++++++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 15ae2351fd..8d13875d0b 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -22,6 +22,168 @@ from .constants import ( ) +class SettingsStateInfo: + """Helper state information for Settings state. + + Is used to hold information about last save and last opened UI. Keep + information about the time when that happened and on which machine under + which user. + + To create currrent machine and time information use 'create_new' method. + """ + + timestamp_format = "%Y-%m-%d %H:%M:%S.%f" + + def __init__( + self, timestamp, hostname, hostip, username, system_name, local_id + ): + self.timestamp = timestamp + self._timestamp_obj = datetime.datetime.strptime( + timestamp, self.timestamp_format + ) + self.hostname = hostname + self.hostip = hostip + self.username = username + self.system_name = system_name + self.local_id = local_id + + def copy(self): + return self.from_data(self.to_data()) + + @property + def timestamp_obj(self): + return self._timestamp_obj + + @classmethod + def create_new(cls): + """Create information about this machine for current time.""" + + from openpype.lib.pype_info import get_workstation_info + + now = datetime.datetime.now() + workstation_info = get_workstation_info() + + return cls( + now.strftime(cls.timestamp_format), + workstation_info["hostname"], + workstation_info["hostip"], + workstation_info["username"], + workstation_info["system_name"], + workstation_info["local_id"] + ) + + @classmethod + def from_data(cls, data): + """Create object from data.""" + + return cls( + data["timestamp"], + data["hostname"], + data["hostip"], + data["username"], + data["system_name"], + data["local_id"] + ) + + def to_data(self): + return { + "timestamp": self.timestamp, + "hostname": self.hostname, + "hostip": self.hostip, + "username": self.username, + "system_name": self.system_name, + "local_id": self.local_id, + } + + def __eq__(self, other): + if not isinstance(other, SettingsStateInfo): + return False + + if other.timestamp_obj != self.timestamp_obj: + return False + + return ( + self.hostname == other.hostname + and self.hostip == other.hostip + and self.username == other.username + and self.system_name == other.system_name + and self.local_id == other.local_id + ) + + +class SettingsState: + """State of settings with last saved and last opened. + + Args: + openpype_version (str): OpenPype version string. + settings_type (str): Type of settings. System or project settings. + last_saved_info (Union[None, SettingsStateInfo]): Information about + machine and time when were settings saved last time. + last_opened_info (Union[None, SettingsStateInfo]): This is settings UI + specific information similar to last saved describes who had opened + settings as last. + project_name (Union[None, str]): Identifier for project settings. + """ + + def __init__( + self, + openpype_version, + settings_type, + last_saved_info, + last_opened_info, + project_name=None + ): + self.openpype_version = openpype_version + self.settings_type = settings_type + self.last_saved_info = last_saved_info + self.last_opened_info = last_opened_info + self.project_name = project_name + + def __eq__(self, other): + if not isinstance(other, SettingsState): + return False + + return ( + self.openpype_version == other.openpype_version + and self.settings_type == other.settings_type + and self.last_saved_info == other.last_saved_info + and self.last_opened_info == other.last_opened_info + and self.project_name == other.project_name + ) + + def copy(self): + return self.__class__( + self.openpype_version, + self.settings_type, + self.last_saved_info.copy(), + self.last_opened_info.copy(), + self.project_name + ) + + def on_save(self, openpype_version): + self.openpype_version = openpype_version + self.last_saved_info = SettingsStateInfo.create_new() + + @classmethod + def from_document(cls, openpype_version, settings_type, document): + document = document or {} + last_saved_info = document.get("last_saved_info") + if last_saved_info: + last_saved_info = SettingsStateInfo.from_data(last_saved_info) + + last_opened_info = document.get("last_opened_info") + if last_opened_info: + last_opened_info = SettingsStateInfo.from_data(last_opened_info) + + return cls( + openpype_version, + settings_type, + last_saved_info, + last_opened_info, + document.get("project_name") + ) + + @six.add_metaclass(ABCMeta) class SettingsHandler: @abstractmethod From 17957a760c4c75ded01e944605f164ce058b252c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 11:53:24 +0200 Subject: [PATCH 0476/2550] 'update_data' and 'update_from_document' always require version --- openpype/settings/handlers.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 8d13875d0b..5a0a30e4a6 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -453,13 +453,12 @@ class CacheValues: return {} return copy.deepcopy(self.data) - def update_data(self, data, version=None): + def update_data(self, data, version): self.data = data self.creation_time = datetime.datetime.now() - if version is not None: - self.version = version + self.version = version - def update_from_document(self, document, version=None): + def update_from_document(self, document, version): data = {} if document: if "data" in document: @@ -468,9 +467,9 @@ class CacheValues: value = document["value"] if value: data = json.loads(value) + self.data = data - if version is not None: - self.version = version + self.version = version def to_json_string(self): return json.dumps(self.data or {}) @@ -1567,7 +1566,7 @@ class MongoLocalSettingsHandler(LocalSettingsHandler): """ data = data or {} - self.local_settings_cache.update_data(data) + self.local_settings_cache.update_data(data, None) self.collection.replace_one( { @@ -1590,6 +1589,6 @@ class MongoLocalSettingsHandler(LocalSettingsHandler): "site_id": self.local_site_id }) - self.local_settings_cache.update_from_document(document) + self.local_settings_cache.update_from_document(document, None) return self.local_settings_cache.data_copy() From 0dc4f1a78622edf932d8623b8ecafdae8c18afc7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 11:53:49 +0200 Subject: [PATCH 0477/2550] cache also can have settings state --- openpype/settings/handlers.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 5a0a30e4a6..0ee1f74692 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -447,6 +447,7 @@ class CacheValues: self.data = None self.creation_time = None self.version = None + self.settings_state = None def data_copy(self): if not self.data: @@ -458,6 +459,9 @@ class CacheValues: self.creation_time = datetime.datetime.now() self.version = version + def update_settings_state(self, settings_state): + self.settings_state = settings_state + def update_from_document(self, document, version): data = {} if document: From 14b5ac4c251f5cb3b9a263f7f0b03bc0567ca45f Mon Sep 17 00:00:00 2001 From: Allan Ihsan Date: Thu, 18 Aug 2022 14:01:54 +0300 Subject: [PATCH 0478/2550] Add `extract_obj.py` and `obj.py` --- openpype/hosts/maya/api/obj.py | 0 .../hosts/maya/plugins/publish/extract_obj.py | 62 +++++++++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 openpype/hosts/maya/api/obj.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_obj.py diff --git a/openpype/hosts/maya/api/obj.py b/openpype/hosts/maya/api/obj.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/maya/plugins/publish/extract_obj.py b/openpype/hosts/maya/plugins/publish/extract_obj.py new file mode 100644 index 0000000000..7c915a80d8 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_obj.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +import os + +from maya import cmds +import maya.mel as mel +import pyblish.api +import openpype.api +from openpype.hosts.maya.api.lib import maintained_selection + +from openpype.hosts.maya.api import obj + + +class ExtractObj(openpype.api.Extractor): + """Extract OBJ from Maya. + + This extracts reproducible OBJ exports ignoring any of the settings + set on the local machine in the OBJ export options window. + + """ + order = pyblish.api.ExtractorOrder + label = "Extract OBJ" + families = ["obj"] + + def process(self, instance): + obj_exporter = obj.OBJExtractor(log=self.log) + + # Define output path + + staging_dir = self.staging_dir(instance) + filename = "{0}.fbx".format(instance.name) + path = os.path.join(staging_dir, filename) + + # The export requires forward slashes because we need to + # format it into a string in a mel expression + path = path.replace('\\', '/') + + self.log.info("Extracting OBJ to: {0}".format(path)) + + members = instance.data["setMembners"] + self.log.info("Members: {0}".format(members)) + self.log.info("Instance: {0}".format(instance[:])) + + obj_exporter.set_options_from_instance(instance) + + # Export + with maintained_selection(): + obj_exporter.export(members, path) + cmds.select(members, r=1, noExpand=True) + mel.eval('file -force -options "{0};{1};{2};{3};{4}" -typ "OBJexport" -pr -es "{5}";'.format(grp_flag, ptgrp_flag, mats_flag, smooth_flag, normals_flag, path)) # noqa + + if "representation" not in instance.data: + instance.data["representation"] = [] + + representation = { + 'name':'obj', + 'ext':'obx', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract OBJ successful to: {0}".format(path)) From 851b573a81798a85b61ac6e1bfb20b83e91e8d02 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 18 Aug 2022 21:21:56 +0800 Subject: [PATCH 0479/2550] add write_color_sets in create_rig and enable options of swtiching on/off for write_color_sets in create_model/rig --- .../hosts/maya/plugins/create/create_model.py | 4 +- .../hosts/maya/plugins/create/create_rig.py | 7 ++- .../defaults/project_settings/maya.json | 4 +- .../schemas/schema_maya_create.json | 60 ++++++++++++++++--- 4 files changed, 61 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_model.py b/openpype/hosts/maya/plugins/create/create_model.py index 37faad23a0..041d3a77e2 100644 --- a/openpype/hosts/maya/plugins/create/create_model.py +++ b/openpype/hosts/maya/plugins/create/create_model.py @@ -9,12 +9,12 @@ class CreateModel(plugin.Creator): family = "model" icon = "cube" defaults = ["Main", "Proxy", "_MD", "_HD", "_LD"] - + write_color_sets = False def __init__(self, *args, **kwargs): super(CreateModel, self).__init__(*args, **kwargs) # Vertex colors with the geometry - self.data["writeColorSets"] = False + self.data["writeColorSets"] = self.write_color_sets self.data["writeFaceSets"] = False # Include attributes by attribute name or prefix diff --git a/openpype/hosts/maya/plugins/create/create_rig.py b/openpype/hosts/maya/plugins/create/create_rig.py index 8032e5fbbd..37fadbe3e1 100644 --- a/openpype/hosts/maya/plugins/create/create_rig.py +++ b/openpype/hosts/maya/plugins/create/create_rig.py @@ -13,13 +13,16 @@ class CreateRig(plugin.Creator): label = "Rig" family = "rig" icon = "wheelchair" + write_color_sets = False + def __init__(self, *args, **kwargs): + super(CreateRig, self).__init__(*args, **kwargs) + self.data["writeColorSets"] = self.write_color_sets def process(self): with lib.undo_chunk(): instance = super(CreateRig, self).process() - self.log.info("Creating Rig instance set up ...") controls = cmds.sets(name="controls_SET", empty=True) pointcache = cmds.sets(name="out_SET", empty=True) - cmds.sets([controls, pointcache], forceElement=instance) + cmds.sets([controls, pointcache], forceElement=instance) \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ac0f161cf2..4e950aa8b5 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -33,7 +33,7 @@ }, "RenderSettings": { "apply_render_settings": true, - "default_render_image_folder": "", + "default_render_image_folder": "renders", "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { @@ -163,6 +163,7 @@ }, "CreateModel": { "enabled": true, + "write_color_sets": false, "defaults": [ "Main", "Proxy", @@ -183,6 +184,7 @@ }, "CreateRig": { "enabled": true, + "write_color_sets": false, "defaults": [ "Main", "Sim", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 431add28df..b9ef6cb80c 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -135,6 +135,56 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreateModel", + "label": "Create Model", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CreateRig", + "label": "Create Rig", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, @@ -160,7 +210,7 @@ } ] }, - + { "type": "schema_template", "name": "template_create_plugin", @@ -197,10 +247,6 @@ "key": "CreateMayaScene", "label": "Create Maya Scene" }, - { - "key": "CreateModel", - "label": "Create Model" - }, { "key": "CreateRenderSetup", "label": "Create Render Setup" @@ -209,10 +255,6 @@ "key": "CreateReview", "label": "Create Review" }, - { - "key": "CreateRig", - "label": "Create Rig" - }, { "key": "CreateSetDress", "label": "Create Set Dress" From 2b62f28e903e524e66d9520a86ea21ae3df81283 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 15:27:13 +0200 Subject: [PATCH 0480/2550] fix 'get_representations_parents' function to be able handle hero versions --- openpype/client/entities.py | 88 ++++++++++++++++++++----------------- 1 file changed, 47 insertions(+), 41 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 67ddb09ddb..f1f1d30214 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1259,58 +1259,64 @@ def get_representations_parents(project_name, representations): dict[ObjectId, tuple]: Parents by representation id. """ - repres_by_version_id = collections.defaultdict(list) - versions_by_version_id = {} - versions_by_subset_id = collections.defaultdict(list) - subsets_by_subset_id = {} - subsets_by_asset_id = collections.defaultdict(list) + repre_docs_by_version_id = collections.defaultdict(list) + version_docs_by_version_id = {} + version_docs_by_subset_id = collections.defaultdict(list) + subset_docs_by_subset_id = {} + subset_docs_by_asset_id = collections.defaultdict(list) output = {} - for representation in representations: - repre_id = representation["_id"] + for repre_doc in representations: + repre_id = repre_doc["_id"] + version_id = repre_doc["parent"] output[repre_id] = (None, None, None, None) - version_id = representation["parent"] - repres_by_version_id[version_id].append(representation) + repre_docs_by_version_id[version_id].append(repre_doc) - versions = get_versions( - project_name, version_ids=repres_by_version_id.keys() + version_docs = get_versions( + project_name, + version_ids=repre_docs_by_version_id.keys(), + hero=True ) - for version in versions: - version_id = version["_id"] - subset_id = version["parent"] - versions_by_version_id[version_id] = version - versions_by_subset_id[subset_id].append(version) + for version_doc in version_docs: + version_id = version_doc["_id"] + subset_id = version_doc["parent"] + version_docs_by_version_id[version_id] = version_doc + version_docs_by_subset_id[subset_id].append(version_doc) - subsets = get_subsets( - project_name, subset_ids=versions_by_subset_id.keys() + subset_docs = get_subsets( + project_name, subset_ids=version_docs_by_subset_id.keys() ) - for subset in subsets: - subset_id = subset["_id"] - asset_id = subset["parent"] - subsets_by_subset_id[subset_id] = subset - subsets_by_asset_id[asset_id].append(subset) + for subset_doc in subset_docs: + subset_id = subset_doc["_id"] + asset_id = subset_doc["parent"] + subset_docs_by_subset_id[subset_id] = subset_doc + subset_docs_by_asset_id[asset_id].append(subset_doc) - assets = get_assets(project_name, asset_ids=subsets_by_asset_id.keys()) - assets_by_id = { - asset["_id"]: asset - for asset in assets + asset_docs = get_assets( + project_name, asset_ids=subset_docs_by_asset_id.keys() + ) + asset_docs_by_id = { + asset_doc["_id"]: asset_doc + for asset_doc in asset_docs } - project = get_project(project_name) + project_doc = get_project(project_name) - for version_id, representations in repres_by_version_id.items(): - asset = None - subset = None - version = versions_by_version_id.get(version_id) - if version: - subset_id = version["parent"] - subset = subsets_by_subset_id.get(subset_id) - if subset: - asset_id = subset["parent"] - asset = assets_by_id.get(asset_id) + for version_id, repre_docs in repre_docs_by_version_id.items(): + asset_doc = None + subset_doc = None + version_doc = version_docs_by_version_id.get(version_id) + if version_doc: + subset_id = version_doc["parent"] + subset_doc = subset_docs_by_subset_id.get(subset_id) + if subset_doc: + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_id.get(asset_id) - for representation in representations: - repre_id = representation["_id"] - output[repre_id] = (version, subset, asset, project) + for repre_doc in repre_docs: + repre_id = repre_doc["_id"] + output[repre_id] = ( + version_doc, subset_doc, asset_doc, project_doc + ) return output From ce746737154e5c7b12f9a0da5ef47b0edd911f64 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 15:28:10 +0200 Subject: [PATCH 0481/2550] Be explicit in error message what is missing --- openpype/pipeline/load/utils.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 9945e1fce4..99d6876d4b 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -222,13 +222,20 @@ def get_representation_context(representation): project_name, representation ) + if not representation: + raise AssertionError("Representation was not found in database") + version, subset, asset, project = get_representation_parents( project_name, representation ) - - assert all([representation, version, subset, asset, project]), ( - "This is a bug" - ) + if not version: + raise AssertionError("Version was not found in database") + if not subset: + raise AssertionError("Subset was not found in database") + if not asset: + raise AssertionError("Asset was not found in database") + if not project: + raise AssertionError("Project was not found in database") context = { "project": { From 9d54333e93afe14b3686cc429009632cf1f24f00 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 15:28:54 +0200 Subject: [PATCH 0482/2550] load error can handle invalid hero version --- openpype/tools/loader/widgets.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 2d8b4b048d..597c35e89b 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -1547,6 +1547,11 @@ def _load_representations_by_loader(loader, repre_contexts, return for repre_context in repre_contexts.values(): + version_doc = repre_context["version"] + if version_doc["type"] == "hero_version": + version_name = "Hero" + else: + version_name = version_doc.get("name") try: if data_by_repre_id: _id = repre_context["representation"]["_id"] @@ -1564,7 +1569,7 @@ def _load_representations_by_loader(loader, repre_contexts, None, repre_context["representation"]["name"], repre_context["subset"]["name"], - repre_context["version"]["name"] + version_name )) except Exception as exc: @@ -1577,7 +1582,7 @@ def _load_representations_by_loader(loader, repre_contexts, formatted_traceback, repre_context["representation"]["name"], repre_context["subset"]["name"], - repre_context["version"]["name"] + version_name )) return error_info From d90fa8b8fba57992894272827576b9bfd354fd4b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 18 Aug 2022 21:32:56 +0800 Subject: [PATCH 0483/2550] add write_color_sets in create_rig and enable options of swtiching on/off for write_color_sets in create_model/rig --- openpype/hosts/maya/plugins/create/create_rig.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_rig.py b/openpype/hosts/maya/plugins/create/create_rig.py index 37fadbe3e1..9484605076 100644 --- a/openpype/hosts/maya/plugins/create/create_rig.py +++ b/openpype/hosts/maya/plugins/create/create_rig.py @@ -14,6 +14,7 @@ class CreateRig(plugin.Creator): family = "rig" icon = "wheelchair" write_color_sets = False + def __init__(self, *args, **kwargs): super(CreateRig, self).__init__(*args, **kwargs) self.data["writeColorSets"] = self.write_color_sets @@ -25,4 +26,4 @@ class CreateRig(plugin.Creator): self.log.info("Creating Rig instance set up ...") controls = cmds.sets(name="controls_SET", empty=True) pointcache = cmds.sets(name="out_SET", empty=True) - cmds.sets([controls, pointcache], forceElement=instance) \ No newline at end of file + cmds.sets([controls, pointcache], forceElement=instance) From ac6de74b76dd741152c11d71c2262f605847acd7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 15:33:09 +0200 Subject: [PATCH 0484/2550] handle hero version type in load clip --- openpype/hosts/nuke/plugins/load/load_clip.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index b2dc4a52d7..346773b5af 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -162,7 +162,15 @@ class LoadClip(plugin.NukeLoader): data_imprint = {} for k in add_keys: if k == 'version': - data_imprint[k] = context["version"]['name'] + version_doc = context["version"] + if version_doc["type"] == "hero_version": + version = "hero" + else: + version = version_doc.get("name") + + if version: + data_imprint[k] = version + elif k == 'colorspace': colorspace = repre["data"].get(k) colorspace = colorspace or version_data.get(k) From 0e6ff4a21d224ed188cdf076a4fd00a1a8f696ea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 15:59:51 +0200 Subject: [PATCH 0485/2550] cache can be set to outdated --- openpype/settings/handlers.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 0ee1f74692..f6e81a7d0a 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -485,6 +485,9 @@ class CacheValues: delta = (datetime.datetime.now() - self.creation_time).seconds return delta > self.cache_lifetime + def set_outdated(self): + self.create_time = None + class MongoSettingsHandler(SettingsHandler): """Settings handler that use mongo for storing and loading of settings.""" From 4a322f1ceda11843c13627ee5c1f1d9070c82f12 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 16:00:23 +0200 Subject: [PATCH 0486/2550] removed 'SettingsState' and kept only 'SettingsStateInfo' --- openpype/settings/handlers.py | 152 +++++++++++++++------------------- 1 file changed, 66 insertions(+), 86 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index f6e81a7d0a..e4b4bc3dc4 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -23,11 +23,11 @@ from .constants import ( class SettingsStateInfo: - """Helper state information for Settings state. + """Helper state information about some settings state. - Is used to hold information about last save and last opened UI. Keep + Is used to hold information about last saved and last opened UI. Keep information about the time when that happened and on which machine under - which user. + which user and on which openpype version. To create currrent machine and time information use 'create_new' method. """ @@ -35,12 +35,28 @@ class SettingsStateInfo: timestamp_format = "%Y-%m-%d %H:%M:%S.%f" def __init__( - self, timestamp, hostname, hostip, username, system_name, local_id + self, + openpype_version, + settings_type, + project_name, + timestamp, + hostname, + hostip, + username, + system_name, + local_id ): + self.openpype_version = openpype_version + self.settings_type = settings_type + self.project_name = project_name + + timestamp_obj = None + if timestamp: + timestamp_obj = datetime.datetime.strptime( + timestamp, self.timestamp_format + ) self.timestamp = timestamp - self._timestamp_obj = datetime.datetime.strptime( - timestamp, self.timestamp_format - ) + self.timestamp_obj = timestamp_obj self.hostname = hostname self.hostip = hostip self.username = username @@ -50,12 +66,8 @@ class SettingsStateInfo: def copy(self): return self.from_data(self.to_data()) - @property - def timestamp_obj(self): - return self._timestamp_obj - @classmethod - def create_new(cls): + def create_new(cls, openpype_version, settings_type, project_name): """Create information about this machine for current time.""" from openpype.lib.pype_info import get_workstation_info @@ -64,6 +76,9 @@ class SettingsStateInfo: workstation_info = get_workstation_info() return cls( + openpype_version, + settings_type, + project_name, now.strftime(cls.timestamp_format), workstation_info["hostname"], workstation_info["hostip"], @@ -77,6 +92,9 @@ class SettingsStateInfo: """Create object from data.""" return cls( + data["openpype_version"], + data["settings_type"], + data["project_name"], data["timestamp"], data["hostname"], data["hostip"], @@ -86,6 +104,40 @@ class SettingsStateInfo: ) def to_data(self): + data = self.to_document_data() + data.update({ + "openpype_version": self.openpype_version, + "settings_type": self.settings_type, + "project_name": self.project_name + }) + return data + + @classmethod + def from_document(cls, openpype_version, settings_type, document): + document = document or {} + project_name = document.get("project_name") + last_saved_info = document.get("last_saved_info") + if last_saved_info: + copy_last_saved_info = copy.deepcopy(last_saved_info) + copy_last_saved_info.update({ + "openpype_version": openpype_version, + "settings_type": settings_type, + "project_name": project_name, + }) + return cls.from_data(copy_last_saved_info) + return cls( + openpype_version, + settings_type, + project_name, + None, + None, + None, + None, + None, + None + ) + + def to_document_data(self): return { "timestamp": self.timestamp, "hostname": self.hostname, @@ -103,7 +155,8 @@ class SettingsStateInfo: return False return ( - self.hostname == other.hostname + self.openpype_version == other.openpype_version + and self.hostname == other.hostname and self.hostip == other.hostip and self.username == other.username and self.system_name == other.system_name @@ -111,79 +164,6 @@ class SettingsStateInfo: ) -class SettingsState: - """State of settings with last saved and last opened. - - Args: - openpype_version (str): OpenPype version string. - settings_type (str): Type of settings. System or project settings. - last_saved_info (Union[None, SettingsStateInfo]): Information about - machine and time when were settings saved last time. - last_opened_info (Union[None, SettingsStateInfo]): This is settings UI - specific information similar to last saved describes who had opened - settings as last. - project_name (Union[None, str]): Identifier for project settings. - """ - - def __init__( - self, - openpype_version, - settings_type, - last_saved_info, - last_opened_info, - project_name=None - ): - self.openpype_version = openpype_version - self.settings_type = settings_type - self.last_saved_info = last_saved_info - self.last_opened_info = last_opened_info - self.project_name = project_name - - def __eq__(self, other): - if not isinstance(other, SettingsState): - return False - - return ( - self.openpype_version == other.openpype_version - and self.settings_type == other.settings_type - and self.last_saved_info == other.last_saved_info - and self.last_opened_info == other.last_opened_info - and self.project_name == other.project_name - ) - - def copy(self): - return self.__class__( - self.openpype_version, - self.settings_type, - self.last_saved_info.copy(), - self.last_opened_info.copy(), - self.project_name - ) - - def on_save(self, openpype_version): - self.openpype_version = openpype_version - self.last_saved_info = SettingsStateInfo.create_new() - - @classmethod - def from_document(cls, openpype_version, settings_type, document): - document = document or {} - last_saved_info = document.get("last_saved_info") - if last_saved_info: - last_saved_info = SettingsStateInfo.from_data(last_saved_info) - - last_opened_info = document.get("last_opened_info") - if last_opened_info: - last_opened_info = SettingsStateInfo.from_data(last_opened_info) - - return cls( - openpype_version, - settings_type, - last_saved_info, - last_opened_info, - document.get("project_name") - ) - - @six.add_metaclass(ABCMeta) class SettingsHandler: @abstractmethod From 20509b4610250c6d6725c50659b0ce3a065b0e92 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 16:02:46 +0200 Subject: [PATCH 0487/2550] changed 'settings_state' to 'last_saved_info' in cache --- openpype/settings/handlers.py | 44 ++++++++++++++++++++++++++++++----- 1 file changed, 38 insertions(+), 6 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index e4b4bc3dc4..e34a4c3540 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -368,7 +368,7 @@ class SettingsHandler: """OpenPype versions that have any studio project anatomy overrides. Returns: - list: OpenPype versions strings. + List[str]: OpenPype versions strings. """ pass @@ -379,7 +379,7 @@ class SettingsHandler: """OpenPype versions that have any studio project settings overrides. Returns: - list: OpenPype versions strings. + List[str]: OpenPype versions strings. """ pass @@ -393,8 +393,39 @@ class SettingsHandler: project_name(str): Name of project. Returns: - list: OpenPype versions strings. + List[str]: OpenPype versions strings. """ + + pass + + @abstractmethod + def get_system_last_saved_info(self): + """State of last system settings overrides at the moment when called. + + This method must provide most recent data so using cached data is not + the way. + + Returns: + SettingsStateInfo: Information about system settings overrides. + """ + + pass + + @abstractmethod + def get_project_last_saved_info(self, project_name): + """State of last project settings overrides at the moment when called. + + This method must provide most recent data so using cached data is not + the way. + + Args: + project_name (Union[None, str]): Project name for which state + should be returned. + + Returns: + SettingsStateInfo: Information about project settings overrides. + """ + pass @@ -427,7 +458,7 @@ class CacheValues: self.data = None self.creation_time = None self.version = None - self.settings_state = None + self.last_saved_info = None def data_copy(self): if not self.data: @@ -439,8 +470,8 @@ class CacheValues: self.creation_time = datetime.datetime.now() self.version = version - def update_settings_state(self, settings_state): - self.settings_state = settings_state + def update_last_saved_info(self, last_saved_info): + self.last_saved_info = last_saved_info def update_from_document(self, document, version): data = {} @@ -1288,6 +1319,7 @@ class MongoSettingsHandler(SettingsHandler): self.project_anatomy_cache[project_name].update_from_document( document, version ) + else: project_doc = get_project(project_name) self.project_anatomy_cache[project_name].update_data( From ba434d5f713de2eb478bd1cea533163d482d4033 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 16:03:49 +0200 Subject: [PATCH 0488/2550] changed how update of settings happens --- openpype/settings/handlers.py | 49 +++++++++++++++++++++++------------ 1 file changed, 32 insertions(+), 17 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index e34a4c3540..43b1d37c34 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -696,20 +696,28 @@ class MongoSettingsHandler(SettingsHandler): system_settings_data ) - # Store system settings - self.collection.replace_one( + system_settings_doc = self.collection.find_one( { "type": self._system_settings_key, "version": self._current_version }, - { - "type": self._system_settings_key, - "data": system_settings_data, - "version": self._current_version - }, - upsert=True + {"_id": True} ) + # Store system settings + new_system_settings_doc = { + "type": self._system_settings_key, + "version": self._current_version, + "data": system_settings_data, + } + if not system_settings_doc: + self.collections.insert_one(new_system_settings_doc) + else: + self.collections.update_one( + {"_id": system_settings_doc["_id"]}, + {"$set": new_system_settings_doc} + ) + # Store global settings self.collection.replace_one( { @@ -844,26 +852,33 @@ class MongoSettingsHandler(SettingsHandler): def _save_project_data(self, project_name, doc_type, data_cache): is_default = bool(project_name is None) - replace_filter = { + query_filter = { "type": doc_type, "is_default": is_default, "version": self._current_version } - replace_data = { + last_saved_info = data_cache.last_saved_info + new_project_settings_doc = { "type": doc_type, "data": data_cache.data, "is_default": is_default, - "version": self._current_version + "version": self._current_version, } if not is_default: - replace_filter["project_name"] = project_name - replace_data["project_name"] = project_name + query_filter["project_name"] = project_name + new_project_settings_doc["project_name"] = project_name - self.collection.replace_one( - replace_filter, - replace_data, - upsert=True + project_settings_doc = self.collection.find_one( + query_filter, + {"_id": True} ) + if project_settings_doc: + self.collection.update_one( + {"_id": project_settings_doc["_id"]}, + new_project_settings_doc + ) + else: + self.collection.insert_one(new_project_settings_doc) def _get_versions_order_doc(self, projection=None): # TODO cache From 6c9d6b3865cfdce385aba868d61b9cb09985ab88 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 16:05:15 +0200 Subject: [PATCH 0489/2550] added helper methods for query of override documents --- openpype/settings/handlers.py | 57 +++++++++++++++++++++-------------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 43b1d37c34..6080b5e77f 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -1205,18 +1205,7 @@ class MongoSettingsHandler(SettingsHandler): globals_document = self.collection.find_one({ "type": GLOBAL_SETTINGS_KEY }) - document = ( - self._get_studio_system_settings_overrides_for_version() - ) - if document is None: - document = self._find_closest_system_settings() - - version = None - if document: - if document["type"] == self._system_settings_key: - version = document["version"] - else: - version = LEGACY_SETTINGS_VERSION + document, version = self._get_system_settings_overrides_doc() merged_document = self._apply_global_settings( document, globals_document @@ -1232,21 +1221,27 @@ class MongoSettingsHandler(SettingsHandler): return data, cache.version return data + def _get_system_settings_overrides_doc(self): + document = ( + self._get_studio_system_settings_overrides_for_version() + ) + if document is None: + document = self._find_closest_system_settings() + + version = None + if document: + if document["type"] == self._system_settings_key: + version = document["version"] + else: + version = LEGACY_SETTINGS_VERSION + + return document, version + def _get_project_settings_overrides(self, project_name, return_version): if self.project_settings_cache[project_name].is_outdated: - document = self._get_project_settings_overrides_for_version( + document, version = self._get_project_settings_overrides_doc( project_name ) - if document is None: - document = self._find_closest_project_settings(project_name) - - version = None - if document: - if document["type"] == self._project_settings_key: - version = document["version"] - else: - version = LEGACY_SETTINGS_VERSION - self.project_settings_cache[project_name].update_from_document( document, version ) @@ -1257,6 +1252,22 @@ class MongoSettingsHandler(SettingsHandler): return data, cache.version return data + def _get_project_settings_overrides_doc(self, project_name): + document = self._get_project_settings_overrides_for_version( + project_name + ) + if document is None: + document = self._find_closest_project_settings(project_name) + + version = None + if document: + if document["type"] == self._project_settings_key: + version = document["version"] + else: + version = LEGACY_SETTINGS_VERSION + + return document, version + def get_studio_project_settings_overrides(self, return_version): """Studio overrides of default project settings.""" return self._get_project_settings_overrides(None, return_version) From 8f121275bdbb08eb686ce482f529b97c96dfe1cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 16:06:11 +0200 Subject: [PATCH 0490/2550] implemented methods to get last saved information --- openpype/settings/handlers.py | 45 +++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 6080b5e77f..af2bf104de 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -688,6 +688,15 @@ class MongoSettingsHandler(SettingsHandler): # Update cache self.system_settings_cache.update_data(data, self._current_version) + last_saved_info = SettingsStateInfo.create_new( + self._current_version, + SYSTEM_SETTINGS_KEY, + None + ) + self.system_settings_cache.update_last_saved_info( + last_saved_info + ) + # Get copy of just updated cache system_settings_data = self.system_settings_cache.data_copy() @@ -709,6 +718,7 @@ class MongoSettingsHandler(SettingsHandler): "type": self._system_settings_key, "version": self._current_version, "data": system_settings_data, + "last_saved_info": last_saved_info.to_document_data() } if not system_settings_doc: self.collections.insert_one(new_system_settings_doc) @@ -749,6 +759,14 @@ class MongoSettingsHandler(SettingsHandler): data_cache = self.project_settings_cache[project_name] data_cache.update_data(overrides, self._current_version) + last_saved_info = SettingsStateInfo.create_new( + self._current_version, + PROJECT_SETTINGS_KEY, + project_name + ) + + data_cache.update_last_saved_info(last_saved_info) + self._save_project_data( project_name, self._project_settings_key, data_cache ) @@ -863,6 +881,7 @@ class MongoSettingsHandler(SettingsHandler): "data": data_cache.data, "is_default": is_default, "version": self._current_version, + "last_saved_info": last_saved_info.to_data() } if not is_default: query_filter["project_name"] = project_name @@ -1207,6 +1226,9 @@ class MongoSettingsHandler(SettingsHandler): }) document, version = self._get_system_settings_overrides_doc() + last_saved_info = SettingsStateInfo.from_document( + version, SYSTEM_SETTINGS_KEY, document + ) merged_document = self._apply_global_settings( document, globals_document ) @@ -1214,6 +1236,9 @@ class MongoSettingsHandler(SettingsHandler): self.system_settings_cache.update_from_document( merged_document, version ) + self.system_settings_cache.update_last_saved_info( + last_saved_info + ) cache = self.system_settings_cache data = cache.data_copy() @@ -1237,6 +1262,13 @@ class MongoSettingsHandler(SettingsHandler): return document, version + def get_system_last_saved_info(self): + # Make sure settings are recaches + self.system_settings_cache.set_outdated() + self.get_studio_system_settings_overrides(False) + + return self.system_settings_cache.last_saved_info.copy() + def _get_project_settings_overrides(self, project_name, return_version): if self.project_settings_cache[project_name].is_outdated: document, version = self._get_project_settings_overrides_doc( @@ -1245,6 +1277,12 @@ class MongoSettingsHandler(SettingsHandler): self.project_settings_cache[project_name].update_from_document( document, version ) + last_saved_info = SettingsStateInfo.from_document( + version, PROJECT_SETTINGS_KEY, document + ) + self.project_settings_cache[project_name].update_last_saved_info( + last_saved_info + ) cache = self.project_settings_cache[project_name] data = cache.data_copy() @@ -1268,6 +1306,13 @@ class MongoSettingsHandler(SettingsHandler): return document, version + def get_project_last_saved_info(self, project_name): + # Make sure settings are recaches + self.project_settings_cache[project_name].set_outdated() + self._get_project_settings_overrides(project_name, False) + + return self.project_settings_cache[project_name].last_saved_info.copy() + def get_studio_project_settings_overrides(self, return_version): """Studio overrides of default project settings.""" return self._get_project_settings_overrides(None, return_version) From 745386decba851f07fcc72b8e9cac7d758ca4ef9 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 18 Aug 2022 22:12:05 +0800 Subject: [PATCH 0491/2550] add write_color_sets in create_rig and enable options of swtiching on/off for write_color_sets in create_model/rig --- openpype/hosts/maya/plugins/create/create_rig.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/create/create_rig.py b/openpype/hosts/maya/plugins/create/create_rig.py index 9484605076..8eb1fab5e0 100644 --- a/openpype/hosts/maya/plugins/create/create_rig.py +++ b/openpype/hosts/maya/plugins/create/create_rig.py @@ -18,6 +18,7 @@ class CreateRig(plugin.Creator): def __init__(self, *args, **kwargs): super(CreateRig, self).__init__(*args, **kwargs) self.data["writeColorSets"] = self.write_color_sets + self.data["writeFaceSets"] = False def process(self): From 4a86093a824450a74e17fb8aa77d25ed23ede8e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 18 Aug 2022 16:15:19 +0200 Subject: [PATCH 0492/2550] :recycle: resolve current path to absolute --- .../publish/validate_workfile_paths.py | 23 ++++--------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 604d4af392..9e087fe51c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import os import openpype.api import pyblish.api import hou @@ -50,24 +49,10 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): @classmethod def repair(cls, instance): - """Replace $HIP and $JOB vars for published path.""" - # determine path of published scene - anatomy = instance.context.data['anatomy'] - template_data = instance.data.get("anatomyData") - rep = instance.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - filepath = os.path.normpath(template_filled) - hip_dir = os.path.dirname(filepath) invalid = cls.get_invalid() for param in invalid: cls.log.info("processing: {}".format(param.path())) - # replace $HIP - invalid_path = param.unexpandedString() - param.set(invalid_path.replace("$HIP", hip_dir)) - # replace $JOB - param.set(invalid_path.replace("$JOB", hip_dir)) - cls.log.info("Replacing {} for {}".format(invalid_path, hip_dir)) + cls.log.info("Replacing {} for {}".format( + param.unexpandedString(), + hou.text.expandString(param.unexpandedString()))) + param.set(hou.text.expandString(param.unexpandedString())) From 9ea46a2a3690df28c2781cdd0919ed44e0d275fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 16:45:37 +0200 Subject: [PATCH 0493/2550] make available api functions in settings to have access to lock information and last saved information --- openpype/settings/handlers.py | 127 +++++++++++++++++++++++++++++++++- openpype/settings/lib.py | 25 +++++++ 2 files changed, 149 insertions(+), 3 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index af2bf104de..3dc33503ea 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -67,7 +67,9 @@ class SettingsStateInfo: return self.from_data(self.to_data()) @classmethod - def create_new(cls, openpype_version, settings_type, project_name): + def create_new( + cls, openpype_version, settings_type=None, project_name=None + ): """Create information about this machine for current time.""" from openpype.lib.pype_info import get_workstation_info @@ -112,6 +114,20 @@ class SettingsStateInfo: }) return data + @classmethod + def create_new_empty(cls, openpype_version, settings_type=None): + return cls( + openpype_version, + settings_type, + None, + None, + None, + None, + None, + None, + None + ) + @classmethod def from_document(cls, openpype_version, settings_type, document): document = document or {} @@ -428,6 +444,54 @@ class SettingsHandler: pass + # UI related calls + @abstractmethod + def get_last_opened_info(self): + """Get information about last opened UI. + + Last opened UI is empty if there is noone who would have opened UI at + the moment when called. + + Returns: + Union[None, SettingsStateInfo]: Information about machine who had + opened Settings UI. + """ + + pass + + @abstractmethod + def opened_ui(self): + """Callback called when settings UI is opened. + + Information about this machine must be available when + 'get_last_opened_info' is called from anywhere until 'closed_ui' is + called again. + + Returns: + SettingsStateInfo: Object representing information about this + machine. Must be passed to 'closed_ui' when finished. + """ + + pass + + @abstractmethod + def closed_ui(self, info_obj): + """Callback called when settings UI is closed. + + From the moment this method is called the information about this + machine is removed and no more available when 'get_last_opened_info' + is called. + + Callback should validate if this machine is still stored as opened ui + before changing any value. + + Args: + info_obj (SettingsStateInfo): Object created when 'opened_ui' was + called. + """ + + pass + @six.add_metaclass(ABCMeta) class LocalSettingsHandler: @@ -690,8 +754,7 @@ class MongoSettingsHandler(SettingsHandler): last_saved_info = SettingsStateInfo.create_new( self._current_version, - SYSTEM_SETTINGS_KEY, - None + SYSTEM_SETTINGS_KEY ) self.system_settings_cache.update_last_saved_info( last_saved_info @@ -1610,6 +1673,64 @@ class MongoSettingsHandler(SettingsHandler): return output return self._sort_versions(output) + def get_last_opened_info(self): + doc = self.collection.find_one({ + "type": "last_opened_settings_ui", + "version": self._current_version + }) or {} + info_data = doc.get("info") + if not info_data: + return SettingsStateInfo.create_new_empty(self._current_version) + + # Fill not available information + info_data["openpype_version"] = self._current_version + info_data["settings_type"] = None + info_data["project_name"] = None + return SettingsStateInfo.from_data(info_data) + + def opened_ui(self): + doc_filter = { + "type": "last_opened_settings_ui", + "version": self._current_version + } + + opened_info = SettingsStateInfo.create_new(self._current_version) + new_doc_data = copy.deepcopy(doc_filter) + new_doc_data["info"] = opened_info.to_document_data() + + doc = self.collection.find_one( + doc_filter, + {"_id": True} + ) + if doc: + self.collection.update_one( + {"_id": doc["_id"]}, + {"$set": new_doc_data} + ) + else: + self.collection.insert_one(new_doc_data) + return opened_info + + def closed_ui(self, info_obj): + doc_filter = { + "type": "last_opened_settings_ui", + "version": self._current_version + } + doc = self.collection.find_one(doc_filter) or {} + info_data = doc.get("info") + if not info_data: + return + + info_data["openpype_version"] = self._current_version + info_data["settings_type"] = None + info_data["project_name"] = None + current_info = SettingsStateInfo.from_data(info_data) + if current_info == info_obj: + self.collection.update_one( + {"_id": doc["_id"]}, + {"$set": {"info": None}} + ) + class MongoLocalSettingsHandler(LocalSettingsHandler): """Settings handler that use mongo for store and load local settings. diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 6df41112c8..58cfd3862c 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -91,6 +91,31 @@ def calculate_changes(old_value, new_value): return changes +@require_handler +def get_system_last_saved_info(): + return _SETTINGS_HANDLER.get_system_last_saved_info() + + +@require_handler +def get_project_last_saved_info(project_name): + return _SETTINGS_HANDLER.get_project_last_saved_info(project_name) + + +@require_handler +def get_last_opened_info(): + return _SETTINGS_HANDLER.get_last_opened_info() + + +@require_handler +def opened_ui(): + return _SETTINGS_HANDLER.opened_ui() + + +@require_handler +def closed_ui(info_obj): + return _SETTINGS_HANDLER.closed_ui(info_obj) + + @require_handler def save_studio_settings(data): """Save studio overrides of system settings. From 2b6d705c441fb147945a53d435895e919dc0111b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 18 Aug 2022 17:16:51 +0200 Subject: [PATCH 0494/2550] Added better logging when DL fails In some specific cases DL sends broken json payload even if response.ok. Handle parsing of broken json better. --- openpype/modules/deadline/abstract_submit_deadline.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 3f54273a56..5cf8222b1c 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -4,6 +4,7 @@ It provides Deadline JobInfo data class. """ +import json.decoder import os from abc import abstractmethod import platform @@ -627,7 +628,12 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self.log.debug(payload) raise RuntimeError(response.text) - result = response.json() + try: + result = response.json() + except json.decoder.JSONDecodeError: + self.log.warning("Broken response {}".format(response)) + raise RuntimeError("Broken response from DL") + # for submit publish job self._instance.data["deadlineSubmissionJob"] = result From beedfd2ecee833854db505e0566de377a8243649 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 18 Aug 2022 17:22:28 +0200 Subject: [PATCH 0495/2550] Added better logging when DL fails In some specific cases DL sends broken json payload even if response.ok. Handle parsing of broken json better. --- openpype/modules/deadline/abstract_submit_deadline.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 5cf8222b1c..c38f16149e 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -631,7 +631,9 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): try: result = response.json() except json.decoder.JSONDecodeError: - self.log.warning("Broken response {}".format(response)) + msg = "Broken response {}. ".format(response) + msg += "Try restarting DL webservice" + self.log.warning() raise RuntimeError("Broken response from DL") # for submit publish job From 9251a0fd4294725a5fb1dfbef68788c409554230 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 18:09:55 +0200 Subject: [PATCH 0496/2550] changed function names --- openpype/settings/handlers.py | 20 ++++++++++---------- openpype/settings/lib.py | 8 ++++---- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 3dc33503ea..1b59531943 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -460,22 +460,22 @@ class SettingsHandler: pass @abstractmethod - def opened_ui(self): + def opened_settings_ui(self): """Callback called when settings UI is opened. Information about this machine must be available when - 'get_last_opened_info' is called from anywhere until 'closed_ui' is - called again. + 'get_last_opened_info' is called from anywhere until + 'closed_settings_ui' is called again. Returns: SettingsStateInfo: Object representing information about this - machine. Must be passed to 'closed_ui' when finished. + machine. Must be passed to 'closed_settings_ui' when finished. """ pass @abstractmethod - def closed_ui(self, info_obj): + def closed_settings_ui(self, info_obj): """Callback called when settings UI is closed. From the moment this method is called the information about this @@ -486,8 +486,8 @@ class SettingsHandler: before changing any value. Args: - info_obj (SettingsStateInfo): Object created when 'opened_ui' was - called. + info_obj (SettingsStateInfo): Object created when + 'opened_settings_ui' was called. """ pass @@ -1680,7 +1680,7 @@ class MongoSettingsHandler(SettingsHandler): }) or {} info_data = doc.get("info") if not info_data: - return SettingsStateInfo.create_new_empty(self._current_version) + return None # Fill not available information info_data["openpype_version"] = self._current_version @@ -1688,7 +1688,7 @@ class MongoSettingsHandler(SettingsHandler): info_data["project_name"] = None return SettingsStateInfo.from_data(info_data) - def opened_ui(self): + def opened_settings_ui(self): doc_filter = { "type": "last_opened_settings_ui", "version": self._current_version @@ -1711,7 +1711,7 @@ class MongoSettingsHandler(SettingsHandler): self.collection.insert_one(new_doc_data) return opened_info - def closed_ui(self, info_obj): + def closed_settings_ui(self, info_obj): doc_filter = { "type": "last_opened_settings_ui", "version": self._current_version diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 58cfd3862c..5eaddf6e6e 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -107,13 +107,13 @@ def get_last_opened_info(): @require_handler -def opened_ui(): - return _SETTINGS_HANDLER.opened_ui() +def opened_settings_ui(): + return _SETTINGS_HANDLER.opened_settings_ui() @require_handler -def closed_ui(info_obj): - return _SETTINGS_HANDLER.closed_ui(info_obj) +def closed_settings_ui(info_obj): + return _SETTINGS_HANDLER.closed_settings_ui(info_obj) @require_handler From baa2505fbfa2055aded2e2c439805d44a3a82347 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 18:10:32 +0200 Subject: [PATCH 0497/2550] show a dialog if someone else has opened settings UI --- openpype/tools/settings/settings/window.py | 136 ++++++++++++++++++++- 1 file changed, 131 insertions(+), 5 deletions(-) diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 22778e4a5b..96f11f3932 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -1,4 +1,16 @@ from Qt import QtWidgets, QtGui, QtCore + +from openpype import style + +from openpype.lib import is_admin_password_required +from openpype.widgets import PasswordDialog + +from openpype.settings.lib import ( + get_last_opened_info, + opened_settings_ui, + closed_settings_ui, +) + from .categories import ( CategoryState, SystemWidget, @@ -10,10 +22,6 @@ from .widgets import ( SettingsTabWidget ) from .search_dialog import SearchEntitiesDialog -from openpype import style - -from openpype.lib import is_admin_password_required -from openpype.widgets import PasswordDialog class MainWidget(QtWidgets.QWidget): @@ -25,6 +33,10 @@ class MainWidget(QtWidgets.QWidget): def __init__(self, user_role, parent=None, reset_on_show=True): super(MainWidget, self).__init__(parent) + # Object referencing to this machine and time when UI was opened + # - is used on close event + self._last_opened_info = None + self._user_passed = False self._reset_on_show = reset_on_show @@ -74,7 +86,7 @@ class MainWidget(QtWidgets.QWidget): self._on_restart_required ) tab_widget.reset_started.connect(self._on_reset_started) - tab_widget.reset_started.connect(self._on_reset_finished) + tab_widget.reset_finished.connect(self._on_reset_finished) tab_widget.full_path_requested.connect(self._on_full_path_request) header_tab_widget.context_menu_requested.connect( @@ -131,11 +143,38 @@ class MainWidget(QtWidgets.QWidget): def showEvent(self, event): super(MainWidget, self).showEvent(event) + if self._reset_on_show: self._reset_on_show = False # Trigger reset with 100ms delay QtCore.QTimer.singleShot(100, self.reset) + elif not self._last_opened_info: + self._check_on_ui_open() + + def _check_on_ui_open(self): + last_opened_info = get_last_opened_info() + if last_opened_info is not None: + if self._last_opened_info != last_opened_info: + self._last_opened_info = None + else: + self._last_opened_info = opened_settings_ui() + + if self._last_opened_info is not None: + return + + dialog = SettingsUIOpenedElsewhere(last_opened_info, self) + dialog.exec_() + if dialog.result() == 1: + self._last_opened_info = opened_settings_ui() + return + + def closeEvent(self, event): + if self._last_opened_info: + closed_settings_ui(self._last_opened_info) + self._last_opened_info = None + super(MainWidget, self).closeEvent(event) + def _show_password_dialog(self): if self._password_dialog: self._password_dialog.open() @@ -221,6 +260,8 @@ class MainWidget(QtWidgets.QWidget): if current_widget is widget: self._update_search_dialog() + self._check_on_ui_open() + def keyPressEvent(self, event): if event.matches(QtGui.QKeySequence.Find): # todo: search in all widgets (or in active)? @@ -231,3 +272,88 @@ class MainWidget(QtWidgets.QWidget): return return super(MainWidget, self).keyPressEvent(event) + + +class SettingsUIOpenedElsewhere(QtWidgets.QDialog): + def __init__(self, info_obj, parent=None): + super(SettingsUIOpenedElsewhere, self).__init__(parent) + + self._result = 0 + + self.setWindowTitle("Someone else has opened Settings UI") + + message_label = QtWidgets.QLabel(( + "Someone else has opened Settings UI. That may cause data loss." + " Please contact the person on the other side." + "

    You can open the UI in view-only mode or take" + " the control which will cause the other settings won't be able" + " to save changes.
    " + ), self) + message_label.setWordWrap(True) + + separator_widget_1 = QtWidgets.QFrame(self) + separator_widget_2 = QtWidgets.QFrame(self) + for separator_widget in ( + separator_widget_1, + separator_widget_2 + ): + separator_widget.setObjectName("Separator") + separator_widget.setMinimumHeight(1) + separator_widget.setMaximumHeight(1) + + other_information = QtWidgets.QWidget(self) + other_information_layout = QtWidgets.QFormLayout(other_information) + other_information_layout.setContentsMargins(0, 0, 0, 0) + for label, value in ( + ("Username", info_obj.username), + ("Host name", info_obj.hostname), + ("Host IP", info_obj.hostip), + ("System name", info_obj.system_name), + ("Local ID", info_obj.local_id), + ("Time Stamp", info_obj.timestamp), + ): + other_information_layout.addRow( + label, + QtWidgets.QLabel(value, other_information) + ) + + footer_widget = QtWidgets.QWidget(self) + buttons_widget = QtWidgets.QWidget(footer_widget) + + take_control_btn = QtWidgets.QPushButton( + "Take control", buttons_widget + ) + view_mode_btn = QtWidgets.QPushButton( + "View only", buttons_widget + ) + + buttons_layout = QtWidgets.QHBoxLayout(buttons_widget) + buttons_layout.setContentsMargins(0, 0, 0, 0) + buttons_layout.addWidget(take_control_btn, 1) + buttons_layout.addWidget(view_mode_btn, 1) + + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) + footer_layout.addStretch(1) + footer_layout.addWidget(buttons_widget, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(message_label, 0) + layout.addWidget(separator_widget_1, 0) + layout.addWidget(other_information, 1, QtCore.Qt.AlignHCenter) + layout.addWidget(separator_widget_2, 0) + layout.addWidget(footer_widget, 0) + + take_control_btn.clicked.connect(self._on_take_control) + view_mode_btn.clicked.connect(self._on_view_mode) + + def result(self): + return self._result + + def _on_take_control(self): + self._result = 1 + self.close() + + def _on_view_mode(self): + self._result = 0 + self.close() From 936363a6608aed3bf4bc4b2a2b6977b9f6d03142 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 18 Aug 2022 18:45:54 +0200 Subject: [PATCH 0498/2550] settings can take go to view mode or take control --- .../tools/settings/settings/categories.py | 19 ++++++++++ openpype/tools/settings/settings/window.py | 38 +++++++++++++++---- 2 files changed, 49 insertions(+), 8 deletions(-) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index f42027d9e2..0410fa1810 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -121,6 +121,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): self.user_role = user_role self.entity = None + self._edit_mode = None self._state = CategoryState.Idle @@ -191,6 +192,21 @@ class SettingsCategoryWidget(QtWidgets.QWidget): ) raise TypeError("Unknown type: {}".format(label)) + def set_edit_mode(self, enabled): + if enabled is self._edit_mode: + return + + self.save_btn.setEnabled(enabled) + if enabled: + tooltip = ( + "Someone else has opened settings UI." + "\nTry hit refresh to check if settings are already available." + ) + else: + tooltip = "Save settings" + + self.save_btn.setToolTip(tooltip) + @property def state(self): return self._state @@ -434,6 +450,9 @@ class SettingsCategoryWidget(QtWidgets.QWidget): self.set_state(CategoryState.Idle) def save(self): + if not self._edit_mode: + return + if not self.items_are_valid(): return diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 96f11f3932..013a273e98 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -36,6 +36,8 @@ class MainWidget(QtWidgets.QWidget): # Object referencing to this machine and time when UI was opened # - is used on close event self._last_opened_info = None + self._edit_mode = None + self._main_reset = False self._user_passed = False self._reset_on_show = reset_on_show @@ -152,6 +154,12 @@ class MainWidget(QtWidgets.QWidget): elif not self._last_opened_info: self._check_on_ui_open() + def closeEvent(self, event): + if self._last_opened_info: + closed_settings_ui(self._last_opened_info) + self._last_opened_info = None + super(MainWidget, self).closeEvent(event) + def _check_on_ui_open(self): last_opened_info = get_last_opened_info() if last_opened_info is not None: @@ -161,19 +169,27 @@ class MainWidget(QtWidgets.QWidget): self._last_opened_info = opened_settings_ui() if self._last_opened_info is not None: + if self._edit_mode is not True: + self._set_edit_mode(True) + return + + if self._edit_mode is False: return dialog = SettingsUIOpenedElsewhere(last_opened_info, self) dialog.exec_() - if dialog.result() == 1: + edit_enabled = dialog.result() == 1 + if edit_enabled: self._last_opened_info = opened_settings_ui() + self._set_edit_mode(edit_enabled) + + def _set_edit_mode(self, mode): + if self._edit_mode is mode: return - def closeEvent(self, event): - if self._last_opened_info: - closed_settings_ui(self._last_opened_info) - self._last_opened_info = None - super(MainWidget, self).closeEvent(event) + self._edit_mode = mode + for tab_widget in self.tab_widgets: + tab_widget.set_edit_mode(mode) def _show_password_dialog(self): if self._password_dialog: @@ -215,8 +231,11 @@ class MainWidget(QtWidgets.QWidget): if self._reset_on_show: self._reset_on_show = False + self._main_reset = True for tab_widget in self.tab_widgets: tab_widget.reset() + self._main_reset = False + self._check_on_ui_open() def _update_search_dialog(self, clear=False): if self._search_dialog.isVisible(): @@ -260,7 +279,8 @@ class MainWidget(QtWidgets.QWidget): if current_widget is widget: self._update_search_dialog() - self._check_on_ui_open() + if not self._main_reset: + self._check_on_ui_open() def keyPressEvent(self, event): if event.matches(QtGui.QKeySequence.Find): @@ -340,7 +360,9 @@ class SettingsUIOpenedElsewhere(QtWidgets.QDialog): layout = QtWidgets.QVBoxLayout(self) layout.addWidget(message_label, 0) layout.addWidget(separator_widget_1, 0) - layout.addWidget(other_information, 1, QtCore.Qt.AlignHCenter) + layout.addStretch(1) + layout.addWidget(other_information, 0, QtCore.Qt.AlignHCenter) + layout.addStretch(1) layout.addWidget(separator_widget_2, 0) layout.addWidget(footer_widget, 0) From 70cfa733f3e7e985580ec8fff8520c31ec5184c8 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 18 Aug 2022 18:34:34 +0000 Subject: [PATCH 0499/2550] [Automated] Bump version --- CHANGELOG.md | 27 +++++++++++++++++++-------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 80673e9f8a..b192d26250 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,25 @@ # Changelog -## [3.13.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...HEAD) +**🆕 New features** + +- Maya: Build workfile by template [\#3578](https://github.com/pypeclub/OpenPype/pull/3578) + +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) +- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) +- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) +- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) +- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) + **🐛 Bug fixes** +- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) +- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) - General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) - Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) - General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) @@ -14,8 +28,12 @@ **🔀 Refactored code** +- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) +- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653) +- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) - TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) - General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) +- General: Workfiles builder using query functions [\#3598](https://github.com/pypeclub/OpenPype/pull/3598) **Merged pull requests:** @@ -89,7 +107,6 @@ **🚀 Enhancements** - General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) -- Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) **🐛 Bug fixes** @@ -100,16 +117,10 @@ - General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) - Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) - Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) -- Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) -- General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538) **🔀 Refactored code** - General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) -- General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) -- Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) -- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) -- General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) **Merged pull requests:** diff --git a/openpype/version.py b/openpype/version.py index 9ae52e8370..38723ed123 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.1-nightly.3" +__version__ = "3.14.0-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 287a3c78f0..4d4aff01a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.1-nightly.3" # OpenPype +version = "3.14.0-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 9f879bb22a2a01fe17adc1b7e9e61df8603e6537 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 18 Aug 2022 18:47:09 +0000 Subject: [PATCH 0500/2550] [Automated] Release --- CHANGELOG.md | 6 +++--- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b192d26250..e19993ad75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.14.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...3.14.0) **🆕 New features** @@ -25,6 +25,7 @@ - General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) - Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) - Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) **🔀 Refactored code** @@ -69,7 +70,6 @@ - Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) - General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) - Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) -- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) - AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) - Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) - TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) diff --git a/openpype/version.py b/openpype/version.py index 38723ed123..c28b480940 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.0-nightly.1" +__version__ = "3.14.0" diff --git a/pyproject.toml b/pyproject.toml index 4d4aff01a2..e670d0a2ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.0-nightly.1" # OpenPype +version = "3.14.0" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 407b6b518e0da63e8efd021952c01b6311cf0640 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 19 Aug 2022 12:05:38 +0200 Subject: [PATCH 0501/2550] use controlled to handle last opened info --- .../tools/settings/settings/categories.py | 23 ++- openpype/tools/settings/settings/window.py | 136 ++++++++++++------ 2 files changed, 114 insertions(+), 45 deletions(-) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index 0410fa1810..2e3c6d9dda 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -115,13 +115,19 @@ class SettingsCategoryWidget(QtWidgets.QWidget): "settings to update them to you current running OpenPype version." ) - def __init__(self, user_role, parent=None): + def __init__(self, controller, parent=None): super(SettingsCategoryWidget, self).__init__(parent) - self.user_role = user_role + self._controller = controller + controller.event_system.add_callback( + "edit.mode.changed", + self._edit_mode_changed + ) self.entity = None self._edit_mode = None + self._last_saved_info = None + self._reset_crashed = False self._state = CategoryState.Idle @@ -192,11 +198,16 @@ class SettingsCategoryWidget(QtWidgets.QWidget): ) raise TypeError("Unknown type: {}".format(label)) + def _edit_mode_changed(self, event): + self.set_edit_mode(event["edit_mode"]) + def set_edit_mode(self, enabled): if enabled is self._edit_mode: return - self.save_btn.setEnabled(enabled) + self._edit_mode = enabled + + self.save_btn.setEnabled(enabled and not self._reset_crashed) if enabled: tooltip = ( "Someone else has opened settings UI." @@ -302,7 +313,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): footer_layout = QtWidgets.QHBoxLayout(footer_widget) footer_layout.setContentsMargins(5, 5, 5, 5) - if self.user_role == "developer": + if self._controller.user_role == "developer": self._add_developer_ui(footer_layout, footer_widget) footer_layout.addWidget(empty_label, 1) @@ -683,14 +694,16 @@ class SettingsCategoryWidget(QtWidgets.QWidget): ) def _on_reset_crash(self): + self._reset_crashed = True self.save_btn.setEnabled(False) if self.breadcrumbs_model is not None: self.breadcrumbs_model.set_entity(None) def _on_reset_success(self): + self._reset_crashed = True if not self.save_btn.isEnabled(): - self.save_btn.setEnabled(True) + self.save_btn.setEnabled(self._edit_mode) if self.breadcrumbs_model is not None: path = self.breadcrumbs_bar.path() diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 013a273e98..612975e30a 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -3,6 +3,7 @@ from Qt import QtWidgets, QtGui, QtCore from openpype import style from openpype.lib import is_admin_password_required +from openpype.lib.events import EventSystem from openpype.widgets import PasswordDialog from openpype.settings.lib import ( @@ -24,6 +25,81 @@ from .widgets import ( from .search_dialog import SearchEntitiesDialog +class SettingsController: + """Controller for settings tools. + + Added when tool was finished for checks of last opened in settings + categories and being able communicated with main widget logic. + """ + + def __init__(self, user_role): + self._user_role = user_role + self._event_system = EventSystem() + + self._opened_info = None + self._last_opened_info = None + self._edit_mode = None + + @property + def user_role(self): + return self._user_role + + @property + def event_system(self): + return self._event_system + + @property + def opened_info(self): + return self._opened_info + + @property + def last_opened_info(self): + return self._last_opened_info + + @property + def edit_mode(self): + return self._edit_mode + + def ui_closed(self): + if self._opened_info is not None: + closed_settings_ui(self._opened_info) + + self._opened_info = None + self._edit_mode = None + + def set_edit_mode(self, enabled): + if self._edit_mode is enabled: + return + + opened_info = None + if enabled: + opened_info = opened_settings_ui() + self._last_opened_info = opened_info + + self._opened_info = opened_info + self._edit_mode = enabled + + self.event_system.emit( + "edit.mode.changed", + {"edit_mode": enabled}, + "controller" + ) + + def update_last_opened_info(self): + print("update_last_opened_info") + last_opened_info = get_last_opened_info() + enabled = False + if ( + last_opened_info is None + or self._opened_info == last_opened_info + ): + enabled = True + + self._last_opened_info = last_opened_info + + self.set_edit_mode(enabled) + + class MainWidget(QtWidgets.QWidget): trigger_restart = QtCore.Signal() @@ -33,11 +109,12 @@ class MainWidget(QtWidgets.QWidget): def __init__(self, user_role, parent=None, reset_on_show=True): super(MainWidget, self).__init__(parent) + controller = SettingsController(user_role) + # Object referencing to this machine and time when UI was opened # - is used on close event - self._last_opened_info = None - self._edit_mode = None self._main_reset = False + self._controller = controller self._user_passed = False self._reset_on_show = reset_on_show @@ -55,8 +132,8 @@ class MainWidget(QtWidgets.QWidget): header_tab_widget = SettingsTabWidget(parent=self) - studio_widget = SystemWidget(user_role, header_tab_widget) - project_widget = ProjectWidget(user_role, header_tab_widget) + studio_widget = SystemWidget(controller, header_tab_widget) + project_widget = ProjectWidget(controller, header_tab_widget) tab_widgets = [ studio_widget, @@ -151,45 +228,24 @@ class MainWidget(QtWidgets.QWidget): # Trigger reset with 100ms delay QtCore.QTimer.singleShot(100, self.reset) - elif not self._last_opened_info: - self._check_on_ui_open() - def closeEvent(self, event): - if self._last_opened_info: - closed_settings_ui(self._last_opened_info) - self._last_opened_info = None + self._controller.ui_closed() + super(MainWidget, self).closeEvent(event) - def _check_on_ui_open(self): - last_opened_info = get_last_opened_info() - if last_opened_info is not None: - if self._last_opened_info != last_opened_info: - self._last_opened_info = None - else: - self._last_opened_info = opened_settings_ui() - - if self._last_opened_info is not None: - if self._edit_mode is not True: - self._set_edit_mode(True) + def _check_on_reset(self): + self._controller.update_last_opened_info() + if self._controller.edit_mode: return - if self._edit_mode is False: - return + # if self._edit_mode is False: + # return - dialog = SettingsUIOpenedElsewhere(last_opened_info, self) + dialog = SettingsUIOpenedElsewhere( + self._controller.last_opened_info, self + ) dialog.exec_() - edit_enabled = dialog.result() == 1 - if edit_enabled: - self._last_opened_info = opened_settings_ui() - self._set_edit_mode(edit_enabled) - - def _set_edit_mode(self, mode): - if self._edit_mode is mode: - return - - self._edit_mode = mode - for tab_widget in self.tab_widgets: - tab_widget.set_edit_mode(mode) + self._controller.set_edit_mode(dialog.result() == 1) def _show_password_dialog(self): if self._password_dialog: @@ -235,7 +291,7 @@ class MainWidget(QtWidgets.QWidget): for tab_widget in self.tab_widgets: tab_widget.reset() self._main_reset = False - self._check_on_ui_open() + self._check_on_reset() def _update_search_dialog(self, clear=False): if self._search_dialog.isVisible(): @@ -280,7 +336,7 @@ class MainWidget(QtWidgets.QWidget): self._update_search_dialog() if not self._main_reset: - self._check_on_ui_open() + self._check_on_reset() def keyPressEvent(self, event): if event.matches(QtGui.QKeySequence.Find): @@ -306,8 +362,8 @@ class SettingsUIOpenedElsewhere(QtWidgets.QDialog): "Someone else has opened Settings UI. That may cause data loss." " Please contact the person on the other side." "

    You can open the UI in view-only mode or take" - " the control which will cause the other settings won't be able" - " to save changes.
    " + " the control which will cause settings on the other side" + " won't be able to save changes.
    " ), self) message_label.setWordWrap(True) From 496728e9abc54dbce957127e8e8134f629ed3ed5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 19 Aug 2022 12:09:52 +0200 Subject: [PATCH 0502/2550] :recycle: handle host name that is not set --- openpype/plugins/publish/extract_review.py | 2 ++ openpype/plugins/publish/integrate_subset_group.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index e16f324e0a..27117510b2 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1459,6 +1459,8 @@ class ExtractReview(pyblish.api.InstancePlugin): output = -1 regexes = self.compile_list_of_regexes(in_list) for regex in regexes: + if not value: + continue if re.match(regex, value): output = 1 break diff --git a/openpype/plugins/publish/integrate_subset_group.py b/openpype/plugins/publish/integrate_subset_group.py index 910cb060a6..79dd10fb8f 100644 --- a/openpype/plugins/publish/integrate_subset_group.py +++ b/openpype/plugins/publish/integrate_subset_group.py @@ -93,6 +93,6 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin): return { "families": anatomy_data["family"], "tasks": task.get("name"), - "hosts": anatomy_data["app"], + "hosts": anatomy_data.get("app"), "task_types": task.get("type") } From 6d056c774d7011b8810a66bd63073df02596b50c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 19 Aug 2022 12:24:18 +0200 Subject: [PATCH 0503/2550] Small grammar fixes Just ran through google spellcheck. --- website/docs/dev_settings.md | 77 ++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 38 deletions(-) diff --git a/website/docs/dev_settings.md b/website/docs/dev_settings.md index 492d25930d..94590345e8 100644 --- a/website/docs/dev_settings.md +++ b/website/docs/dev_settings.md @@ -4,26 +4,26 @@ title: Settings sidebar_label: Settings --- -Settings gives ability to change how OpenPype behaves in certain situations. Settings are split into 3 categories **system settings**, **project anatomy** and **project settings**. Project anatomy and project settings are in grouped into single category but there is a technical difference (explained later). Only difference in system and project settings is that system settings can't be technically handled on a project level or their values must be available no matter in which project are values received. Settings have headless entities or settings UI. +Settings give the ability to change how OpenPype behaves in certain situations. Settings are split into 3 categories **system settings**, **project anatomy** and **project settings**. Project anatomy and project settings are grouped into a single category but there is a technical difference (explained later). Only difference in system and project settings is that system settings can't be technically handled on a project level or their values must be available no matter in which project the values are received. Settings have headless entities or settings UI. There is one more category **local settings** but they don't have ability to be changed or defined easily. Local settings can change how settings work per machine, can affect both system and project settings but they're hardcoded for predefined values at this moment. ## Settings schemas -System and project settings are defined by settings schemas. Schema define structure of output value, what value types output will contain, how settings are stored and how it's UI input will look. +System and project settings are defined by settings schemas. Schema defines the structure of output value, what value types output will contain, how settings are stored and how its UI input will look. ## Settings values -Output of settings is a json serializable value. There are 3 possible types of value **default values**, **studio overrides** and **project overrides**. Default values must be always available for all settings schemas, their values are stored to code. Default values is what everyone who just installed OpenPype will use as default values. It is good practice to set example values but they should be relevant. +Output of settings is a json serializable value. There are 3 possible types of value **default values**, **studio overrides** and **project overrides**. Default values must be always available for all settings schemas, their values are stored to code. Default values are what everyone who just installed OpenPype will use as default values. It is good practice to set example values but they should be actually relevant. -Setting overrides is what makes settings powerful tool. Overrides contain only a part of settings with additional metadata which describe which parts of settings values that should be replaced from overrides values. Using overrides gives ability to save only specific values and use default values for rest. It is super useful in project settings which have up to 2 levels of overrides. In project settings are used **default values** as base on which are applied **studio overrides** and then **project overrides**. In practice it is possible to save only studio overrides which affect all projects. Changes in studio overrides are then propagated to all projects without project overrides. But values can be locked on project level so studio overrides are not used. +Setting overrides is what makes settings a powerful tool. Overrides contain only a part of settings with additional metadata that describe which parts of settings values should be replaced from overrides values. Using overrides gives the ability to save only specific values and use default values for rest. It is super useful in project settings which have up to 2 levels of overrides. In project settings are used **default values** as base on which are applied **studio overrides** and then **project overrides**. In practice it is possible to save only studio overrides which affect all projects. Changes in studio overrides are then propagated to all projects without project overrides. But values can be locked on project level so studio overrides are not used. ## Settings storage -As was mentined default values are stored into repository files. Overrides are stored to Mongo database. The value in mongo contain only overrides with metadata so their content on it's own is useless and must be used with combination of default values. System settings and project settings are stored into special collection. Single document represents one set of overrides with OpenPype version for which is stored. Settings are versioned and are loaded in specific order - current OpenPype version overrides or first lower available. If there are any overrides with same or lower version then first higher version is used. If there are any overrides then no overrides are applied. +As was mentioned default values are stored into repository files. Overrides are stored in the Mongo database. The value in mongo contain only overrides with metadata so their content on it's own is useless and must be used with combination of default values. System settings and project settings are stored into special collection. Single document represents one set of overrides with OpenPype version for which is stored. Settings are versioned and are loaded in specific order - current OpenPype version overrides or first lower available. If there are any overrides with the same or lower version then the first higher version is used. If there are any overrides then no overrides are applied. -Project anatomy is stored into project document thus is not versioned and it's values are always overriden. Any changes in anatomy schema may have drastic effect on production and OpenPype updates. +Project anatomy is stored into a project document thus is not versioned and its values are always overridden. Any changes in anatomy schema may have a drastic effect on production and OpenPype updates. ## Settings schema items As was mentioned schema items define output type of values, how they are stored and how they look in UI. -- schemas are (by default) defined by a json files +- schemas are (by default) defined by json files - OpenPype core system settings schemas are stored in `~/openpype/settings/entities/schemas/system_schema/` and project settings in `~/openpype/settings/entities/schemas/projects_schema/` - both contain `schema_main.json` which are entry points - OpenPype modules/addons can define their settings schemas using `BaseModuleSettingsDef` in that case some functionality may be slightly modified @@ -31,20 +31,21 @@ As was mentioned schema items define output type of values, how they are stored - **type** is only common key which is required for all schema items - each item may have "input modifiers" (other keys in dictionary) and they may be required or optional based on the type - there are special keys across all items - - `"is_file"` - this key is used when defaults values are stored which define that this key is a filename where it's values are stored - - key is validated must be once in hierarchy else it won't be possible to store default values - - make sense to fill it only if it's value if `true` - - `"is_group"` - define that all values under a key in settings hierarchy will be overridden if any value is modified - - this key is not allowed for all inputs as they may not have technical ability to handle it - - key is validated can be only once in hierarchy and is automatically filled on last possible item if is not defined in schemas - - make sense to fill it only if it's value if `true` + - `"is_file"` - this key is used when defaults values are stored in the file. Its value matches the filename where values are stored + - key is validated, must be unique in hierarchy otherwise it won't be possible to store default values + - make sense to fill it only if it's value if `true` + + - `"is_group"` - define that all values under a key in settings hierarchy will be overridden if any value is modified + - this key is not allowed for all inputs as they may not have technical ability to handle it + - key is validated, must be unique in hierarchy and is automatically filled on last possible item if is not defined in schemas + - make sense to fill it only if it's value if `true` - all entities can have set `"tooltip"` key with description which will be shown in UI on hover ### Inner schema -Settings schemas are big json files which would became unmanageable if would be in single file. To be able to split them into multiple files to help organize them special types `schema` and `template` were added. Both types are relating to a different file by filename. If json file contains dictionary it is considered as `schema` if contains list it is considered as `template`. +Settings schemas are big json files which would become unmanageable if they were in a single file. To be able to split them into multiple files to help organize them special types `schema` and `template` were added. Both types are related to a different file by filename. If a json file contains a dictionary it is considered as `schema` if it contains a list it is considered as a `template`. #### schema -Schema item is replaced by content of entered schema name. It is recommended that schema file is used only once in settings hierarchy. Templates are meant for reusing. +Schema item is replaced by content of entered schema name. It is recommended that the schema file is used only once in settings hierarchy. Templates are meant for reusing. - schema must have `"name"` key which is name of schema that should be used ```javascript @@ -156,7 +157,7 @@ Templates are almost the same as schema items but can contain one or more items } ``` -Template data can be used only to fill templates in values but not in keys. It is also possible to define default values for unfilled fields to do so one of items in list must be dictionary with key `"__default_values__"` and value as dictionary with default key: values (as in example above). +Template data can be used only to fill templates in values but not in keys. It is also possible to define default values for unfilled fields to do so one of the items in the list must be a dictionary with key "__default_values__"` and value as dictionary with default key: values (as in example above). ```javascript { ... @@ -169,7 +170,7 @@ Template data can be used only to fill templates in values but not in keys. It i } ``` -Because formatting value can be only string it is possible to use formatting values which are replaced with different type. +Because formatting values can be only string it is possible to use formatting values which are replaced with different types. ```javascript // Template data { @@ -201,7 +202,7 @@ Dynamic schema item marks a place in settings schema where schemas defined by `B "name": "project_settings/global" } ``` -- `BaseModuleSettingsDef` with implemented `get_settings_schemas` can return a dictionary where key define a dynamic schema name and value schemas that will be put there +- `BaseModuleSettingsDef` with implemented `get_settings_schemas` can return a dictionary where key defines a dynamic schema name and value schemas that will be put there - dynamic schemas work almost the same way as templates - one item can be replaced by multiple items (or by 0 items) - goal is to dynamically load settings of OpenPype modules without having their schemas or default values in core repository @@ -215,12 +216,12 @@ These inputs wraps another inputs into {key: value} relation #### dict - this is dictionary type wrapping more inputs with keys defined in schema - may be used as dynamic children (e.g. in [list](#list) or [dict-modifiable](#dict-modifiable)) - - in that case the only key modifier is `children` which is list of it's keys - - USAGE: e.g. List of dictionaries where each dictionary have same structure. + - in that case the only key modifier is `children` which is a list of its keys + - USAGE: e.g. List of dictionaries where each dictionary has the same structure. - if is not used as dynamic children then must have defined `"key"` under which are it's values stored - may be with or without `"label"` (only for GUI) - - `"label"` must be set to be able mark item as group with `"is_group"` key set to True -- item with label can visually wrap it's children + - `"label"` must be set to be able to mark item as group with `"is_group"` key set to True +- item with label can visually wrap its children - this option is enabled by default to turn off set `"use_label_wrap"` to `False` - label wrap is by default collapsible - that can be set with key `"collapsible"` to `True`/`False` @@ -314,16 +315,16 @@ These inputs wraps another inputs into {key: value} relation - entity must have defined `"label"` if is not used as widget - is set as group if any parent is not group (can't have children as group) - may be with or without `"label"` (only for GUI) - - `"label"` must be set to be able mark item as group with `"is_group"` key set to True -- item with label can visually wrap it's children - - this option is enabled by default to turn off set `"use_label_wrap"` to `False` - - label wrap is by default collapsible - - that can be set with key `"collapsible"` to `True`/`False` - - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) - - it is possible to add lighter background with `"highlight_content"` (Default: `False`) - - lighter background has limits of maximum applies after 3-4 nested highlighted items there is not much difference in the color -- for UI porposes was added `enum_is_horizontal` which will make combobox appear next to children inputs instead of on top of them (Default: `False`) - - this has extended ability of `enum_on_right` which will move combobox to right side next to children widgets (Default: `False`) + - `"label"` must be set to be able to mark item as group with `"is_group"` key set to True +- item with label can visually wrap its children + - this option is enabled by default to turn off set `"use_label_wrap"` to `False` + - label wrap is by default collapsible + - that can be set with key `"collapsible"` to `True`/`False` + - with key `"collapsed"` as `True`/`False` can be set that is collapsed when GUI is opened (Default: `False`) + - it is possible to add lighter background with `"highlight_content"` (Default: `False`) + - lighter background has limits of maximum applies after 3-4 nested highlighted items there is not much difference in the color +- for UI purposes was added `enum_is_horizontal` which will make combobox appear next to children inputs instead of on top of them (Default: `False`) + - this has extended ability of `enum_on_right` which will move combobox to right side next to children widgets (Default: `False`) - output is dictionary `{the "key": children values}` - using this type as template item for list type can be used to create infinite hierarchies @@ -795,7 +796,7 @@ How output of the schema could look like on save: ``` #### color -- preimplemented entity to store and load color values +- pre implemented entity to store and load color values - entity store and expect list of 4 integers in range 0-255 - integers represents rgba [Red, Green, Blue, Alpha] - has modifier `"use_alpha"` which can be `True`/`False` @@ -842,9 +843,9 @@ Items used only for UI purposes. ``` ### Proxy wrappers -- should wraps multiple inputs only visually -- these does not have `"key"` key and do not allow to have `"is_file"` or `"is_group"` modifiers enabled -- can't be used as widget (first item in e.g. `list`, `dict-modifiable`, etc.) +- should wrap multiple inputs only visually +- these do not have `"key"` key and do not allow to have `"is_file"` or `"is_group"` modifiers enabled +- can't be used as a widget (first item in e.g. `list`, `dict-modifiable`, etc.) #### form - wraps inputs into form look layout @@ -893,6 +894,6 @@ Items used only for UI purposes. ## How to add new settings -Always start with modifying or adding new schema and don't worry about values. When you think schema is ready to use launch OpenPype settings in development mode using `poetry run python ./start.py settings --dev` or prepared script in `~/openpype/tools/run_settings(.sh|.ps1)`. Settings opened in development mode have checkbox `Modify defaults` available in bottom left corner. When checked default values are modified and saved on `Save`. This is recommended approach how default settings should be created instead of direct modification of files. +Always start with modifying or adding a new schema and don't worry about values. When you think schema is ready to use launch OpenPype settings in development mode using `poetry run python ./start.py settings --dev` or prepared script in `~/openpype/tools/run_settings(.sh|.ps1)`. Settings opened in development mode have the checkbox `Modify defaults` available in the bottom left corner. When checked default values are modified and saved on `Save`. This is a recommended approach on how default settings should be created instead of direct modification of files. ![Modify default settings](assets/settings_dev.png) From 4f03f2dd09f1eed5a0f23d2d9c9a428ae7656560 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 19 Aug 2022 12:33:32 +0200 Subject: [PATCH 0504/2550] modified dialog --- openpype/tools/settings/settings/window.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 612975e30a..fcbcd129d0 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -359,11 +359,12 @@ class SettingsUIOpenedElsewhere(QtWidgets.QDialog): self.setWindowTitle("Someone else has opened Settings UI") message_label = QtWidgets.QLabel(( - "Someone else has opened Settings UI. That may cause data loss." + "Someone else has opened Settings UI which could cause data loss." " Please contact the person on the other side." - "

    You can open the UI in view-only mode or take" - " the control which will cause settings on the other side" - " won't be able to save changes.
    " + "

    You can open the UI in view-only mode." + " All changes in view mode will be lost." + "

    You can take the control which will cause that" + " all changes of settings on the other side will be lost.
    " ), self) message_label.setWordWrap(True) @@ -435,3 +436,7 @@ class SettingsUIOpenedElsewhere(QtWidgets.QDialog): def _on_view_mode(self): self._result = 0 self.close() + + def showEvent(self, event): + super(SettingsUIOpenedElsewhere, self).showEvent(event) + self.resize(600, 400) From 4b58ce2b3ac96e337392c8c24b4203129cf51cdb Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 19 Aug 2022 18:45:57 +0800 Subject: [PATCH 0505/2550] fix the bug of breakng the sequences with version string in subset name when extracting playblast --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 6626eb6a7a..cc1939c584 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -138,10 +138,10 @@ class ExtractPlayblast(openpype.api.Extractor): self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) - pattern_frame = [r'\.(?P(?P0*)\d+)\.\D+\d?$'] + patterns = [clique.PATTERNS["frames"]] collections, remainder = clique.assemble(collected_files, minimum_items=1, - patterns=pattern_frame) + patterns=patterns) self.log.debug("filename {}".format(filename)) frame_collection = None From fc4db8802d260d62a6cd93ea914bb5558151ba0c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 19 Aug 2022 13:01:46 +0200 Subject: [PATCH 0506/2550] Fixed issues after code review Warning should print exception. JSONDecoder is not in Pype2 --- .../deadline/abstract_submit_deadline.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index c38f16149e..9d952586d2 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -16,7 +16,12 @@ import attr import requests import pyblish.api -from openpype.pipeline.publish import AbstractMetaInstancePlugin +from openpype.pipeline.publish import ( + AbstractMetaInstancePlugin, + KnownPublishError +) + +JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) def requests_post(*args, **kwargs): @@ -616,7 +621,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): str: resulting Deadline job id. Throws: - RuntimeError: if submission fails. + KnownPublishError: if submission fails. """ url = "{}/api/jobs".format(self._deadline_url) @@ -626,15 +631,15 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self.log.error(response.status_code) self.log.error(response.content) self.log.debug(payload) - raise RuntimeError(response.text) + raise KnownPublishError(response.text) try: result = response.json() except json.decoder.JSONDecodeError: msg = "Broken response {}. ".format(response) - msg += "Try restarting DL webservice" - self.log.warning() - raise RuntimeError("Broken response from DL") + msg += "Try restarting the Deadline Webservice." + self.log.warning(msg, exc_info=True) + raise KnownPublishError("Broken response from DL") # for submit publish job self._instance.data["deadlineSubmissionJob"] = result From 8cd15708b65213092924263b0386f8bec28dc7c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 19 Aug 2022 13:07:52 +0200 Subject: [PATCH 0507/2550] :bug: use the right key Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/plugins/publish/integrate_subset_group.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_subset_group.py b/openpype/plugins/publish/integrate_subset_group.py index 79dd10fb8f..a24ebba3a5 100644 --- a/openpype/plugins/publish/integrate_subset_group.py +++ b/openpype/plugins/publish/integrate_subset_group.py @@ -93,6 +93,6 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin): return { "families": anatomy_data["family"], "tasks": task.get("name"), - "hosts": anatomy_data.get("app"), + "hosts": instance.context.data["hostName"], "task_types": task.get("type") } From 04397ccd2f8791a54917d505d1453a7a7e7e74cf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 19 Aug 2022 13:10:31 +0200 Subject: [PATCH 0508/2550] OP-3723 - changed source files to 8K 16K was causing memory issues on some machines. --- openpype/hosts/photoshop/plugins/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 64decbb957..60ae575b0a 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -144,7 +144,7 @@ class ExtractReview(openpype.api.Extractor): used as a source for thumbnail or review mov. """ # 16384x16384 actually didn't work because int overflow - max_ffmpeg_size = 16000 + max_ffmpeg_size = 8192 Image.MAX_IMAGE_PIXELS = None first_url = os.path.join(staging_dir, processed_img_names[0]) with Image.open(first_url) as im: From d4bfbe3b9e1510d358f162628170cd29c145a198 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 19 Aug 2022 14:03:06 +0200 Subject: [PATCH 0509/2550] Updated missed occurence Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 9d952586d2..0bad981fdf 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -635,7 +635,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): try: result = response.json() - except json.decoder.JSONDecodeError: + except JSONDecodeError: msg = "Broken response {}. ".format(response) msg += "Try restarting the Deadline Webservice." self.log.warning(msg, exc_info=True) From 31cc50534439315117e7bd68626a5fe807df3f2a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 19 Aug 2022 20:23:32 +0800 Subject: [PATCH 0510/2550] add write_color_sets in create_rig and enable options of swtiching on/off for write_color_sets in create_model/rig --- .../maya/plugins/create/create_animation.py | 3 ++- .../hosts/maya/plugins/create/create_model.py | 3 ++- .../maya/plugins/create/create_pointcache.py | 4 +++- .../hosts/maya/plugins/create/create_rig.py | 3 ++- .../defaults/project_settings/maya.json | 4 ++++ .../schemas/schema_maya_create.json | 20 +++++++++++++++++++ 6 files changed, 33 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index e47d4e5b5a..5ef5f61ab1 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -12,6 +12,7 @@ class CreateAnimation(plugin.Creator): family = "animation" icon = "male" write_color_sets = False + write_face_sets = False def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) @@ -24,7 +25,7 @@ class CreateAnimation(plugin.Creator): # Write vertex colors with the geometry. self.data["writeColorSets"] = self.write_color_sets - self.data["writeFaceSets"] = False + self.data["writeFaceSets"] = self.write_face_sets # Include only renderable visible shapes. # Skips locators and empty transforms diff --git a/openpype/hosts/maya/plugins/create/create_model.py b/openpype/hosts/maya/plugins/create/create_model.py index 041d3a77e2..520e962f74 100644 --- a/openpype/hosts/maya/plugins/create/create_model.py +++ b/openpype/hosts/maya/plugins/create/create_model.py @@ -10,12 +10,13 @@ class CreateModel(plugin.Creator): icon = "cube" defaults = ["Main", "Proxy", "_MD", "_HD", "_LD"] write_color_sets = False + write_face_sets = False def __init__(self, *args, **kwargs): super(CreateModel, self).__init__(*args, **kwargs) # Vertex colors with the geometry self.data["writeColorSets"] = self.write_color_sets - self.data["writeFaceSets"] = False + self.data["writeFaceSets"] = self.write_face_sets # Include attributes by attribute name or prefix self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index 5516445de8..ab8fe12079 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -12,6 +12,7 @@ class CreatePointCache(plugin.Creator): family = "pointcache" icon = "gears" write_color_sets = False + write_face_sets = False def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) @@ -21,7 +22,8 @@ class CreatePointCache(plugin.Creator): # Vertex colors with the geometry. self.data["writeColorSets"] = self.write_color_sets - self.data["writeFaceSets"] = False # Vertex colors with the geometry. + # Vertex colors with the geometry. + self.data["writeFaceSets"] = self.write_face_sets self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups diff --git a/openpype/hosts/maya/plugins/create/create_rig.py b/openpype/hosts/maya/plugins/create/create_rig.py index 8eb1fab5e0..3b0ee1e22a 100644 --- a/openpype/hosts/maya/plugins/create/create_rig.py +++ b/openpype/hosts/maya/plugins/create/create_rig.py @@ -14,11 +14,12 @@ class CreateRig(plugin.Creator): family = "rig" icon = "wheelchair" write_color_sets = False + write_face_sets = False def __init__(self, *args, **kwargs): super(CreateRig, self).__init__(*args, **kwargs) self.data["writeColorSets"] = self.write_color_sets - self.data["writeFaceSets"] = False + self.data["writeFaceSets"] = self.write_face_sets def process(self): diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 4e950aa8b5..b4164c63f0 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -102,6 +102,7 @@ "CreateAnimation": { "enabled": true, "write_color_sets": false, + "write_face_sets": false, "defaults": [ "Main" ] @@ -109,6 +110,7 @@ "CreatePointCache": { "enabled": true, "write_color_sets": false, + "write_face_sets": false, "defaults": [ "Main" ] @@ -164,6 +166,7 @@ "CreateModel": { "enabled": true, "write_color_sets": false, + "write_face_sets": false, "defaults": [ "Main", "Proxy", @@ -185,6 +188,7 @@ "CreateRig": { "enabled": true, "write_color_sets": false, + "write_face_sets": false, "defaults": [ "Main", "Sim", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index b9ef6cb80c..7e12897336 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -127,6 +127,11 @@ "key": "write_color_sets", "label": "Write Color Sets" }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, { "type": "list", "key": "defaults", @@ -152,6 +157,11 @@ "key": "write_color_sets", "label": "Write Color Sets" }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, { "type": "list", "key": "defaults", @@ -177,6 +187,11 @@ "key": "write_color_sets", "label": "Write Color Sets" }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, { "type": "list", "key": "defaults", @@ -202,6 +217,11 @@ "key": "write_color_sets", "label": "Write Color Sets" }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, { "type": "list", "key": "defaults", From ba45c7b1694a27005c7f78a47f2e90179bdd11b5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 19 Aug 2022 16:14:41 +0200 Subject: [PATCH 0511/2550] improving code readability --- openpype/plugins/publish/collect_otio_subset_resources.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index 9c19f8a78e..3387cd1176 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -121,10 +121,8 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): otio.schema.ImageSequenceReference ): is_sequence = True - else: - # for OpenTimelineIO 0.12 and older - if metadata.get("padding"): - is_sequence = True + elif metadata.get("padding"): + is_sequence = True self.log.info( "frame_start-frame_end: {}-{}".format(frame_start, frame_end)) From 102965ea69f3ae738dd92af2c39ec9bc8ae577d4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 19 Aug 2022 16:15:16 +0200 Subject: [PATCH 0512/2550] editorial fixing handles to int and adding speed attribute --- openpype/pipeline/editorial.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/editorial.py b/openpype/pipeline/editorial.py index f62a1842e0..564d78ea6f 100644 --- a/openpype/pipeline/editorial.py +++ b/openpype/pipeline/editorial.py @@ -263,16 +263,17 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end): "retime": True, "speed": time_scalar, "timewarps": time_warp_nodes, - "handleStart": round(handle_start), - "handleEnd": round(handle_end) + "handleStart": int(round(handle_start)), + "handleEnd": int(round(handle_end)) } } returning_dict = { "mediaIn": media_in_trimmed, "mediaOut": media_out_trimmed, - "handleStart": round(handle_start), - "handleEnd": round(handle_end) + "handleStart": int(round(handle_start)), + "handleEnd": int(round(handle_end)), + "speed": time_scalar } # add version data only if retime From 869c9255ff266e90ec2f95abae67c234263beefb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 19 Aug 2022 16:15:43 +0200 Subject: [PATCH 0513/2550] flame: improving extractor of subsets --- .../publish/extract_subset_resources.py | 124 ++++++++++++++++-- 1 file changed, 113 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index d34f5d5854..432bc3b500 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -8,6 +8,9 @@ import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi from openpype.hosts.flame.api import MediaInfoFile +from openpype.pipeline.editorial import ( + get_media_range_with_retimes +) import flame @@ -65,20 +68,50 @@ class ExtractSubsetResources(openpype.api.Extractor): # get configured workfile frame start/end (handles excluded) frame_start = instance.data["frameStart"] # get media source first frame - source_first_frame = instance.data["sourceFirstFrame"] + source_first_frame = instance.data["sourceFirstFrame"] # 1001 # get timeline in/out of segment clip_in = instance.data["clipIn"] clip_out = instance.data["clipOut"] + # get retimed attributres + retimed_data = self._get_retimed_attributes(instance) + self.log.debug("_ retimed_data: {}".format( + pformat(retimed_data) + )) + # get individual keys + r_handle_start = retimed_data["handle_start"] + r_handle_end = retimed_data["handle_end"] + r_source_dur = retimed_data["source_duration"] + r_speed = retimed_data["speed"] + r_handles = max(r_handle_start, r_handle_end) + # get handles value - take only the max from both handle_start = instance.data["handleStart"] - handle_end = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) + include_handles = instance.data.get("includeHandles") + self.log.debug("_ include_handles: {}".format(include_handles)) # get media source range with handles source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] + # retime if needed + if r_speed != 1.0: + source_start_handles = ( + instance.data["sourceStart"] - r_handle_start) + source_end_handles = ( + source_start_handles + # TODO: duration exclude 1 - might be problem + + (r_source_dur - 1) + + r_handle_start + + r_handle_end + ) + + self.log.debug("_ source_start_handles: {}".format( + source_start_handles)) + self.log.debug("_ source_end_handles: {}".format( + source_end_handles)) # create staging dir path staging_dir = self.staging_dir(instance) @@ -93,6 +126,19 @@ class ExtractSubsetResources(openpype.api.Extractor): } export_presets.update(self.export_presets_mapping) + # set versiondata if any retime + version_data = retimed_data.get("version_data") + + if version_data: + instance.data["versionData"].update(version_data) + + if instance.data.get("versionData"): + if r_speed != 1.0: + instance.data["versionData"].update({ + "frameStart": source_start_handles + r_handle_start, + "frameEnd": source_end_handles - r_handle_end, + }) + # loop all preset names and for unique_name, preset_config in export_presets.items(): modify_xml_data = {} @@ -117,14 +163,22 @@ class ExtractSubsetResources(openpype.api.Extractor): # get frame range with handles for representation range frame_start_handle = frame_start - handle_start + if include_handles: + if r_speed == 1.0: + frame_start_handle = frame_start + else: + frame_start_handle = ( + frame_start - handle_start) + r_handle_start + + self.log.debug("_ frame_start_handle: {}".format( + frame_start_handle)) # calculate duration with handles source_duration_handles = ( - source_end_handles - source_start_handles) + source_end_handles - source_start_handles) + 1 - # define in/out marks - in_mark = (source_start_handles - source_first_frame) + 1 - out_mark = in_mark + source_duration_handles + self.log.debug("_ source_duration_handles: {}".format( + source_duration_handles)) exporting_clip = None name_patern_xml = "_{}.".format( @@ -142,19 +196,28 @@ class ExtractSubsetResources(openpype.api.Extractor): "__{}.").format( unique_name) - # change in/out marks to timeline in/out + # only for h264 with baked retime in_mark = clip_in - out_mark = clip_out + out_mark = clip_out + 1 + + modify_xml_data["nbHandles"] = handles else: + in_mark = (source_start_handles - source_first_frame) + 1 + out_mark = in_mark + source_duration_handles exporting_clip = self.import_clip(clip_path) exporting_clip.name.set_value("{}_{}".format( asset_name, segment_name)) + modify_xml_data["nbHandles"] = ( + handles if r_speed == 1.0 else r_handles) # add xml tags modifications modify_xml_data.update({ + # TODO: handles only to Sequence preset + # TODO: enable Start frame attribute "exportHandles": True, - "nbHandles": handles, - "startFrame": frame_start, + "startFrame": frame_start_handle, + # enum position low start from 0 + "frameIndex": 0, "namePattern": name_patern_xml }) @@ -162,6 +225,12 @@ class ExtractSubsetResources(openpype.api.Extractor): # add any xml overrides collected form segment.comment modify_xml_data.update(instance.data["xml_overrides"]) + self.log.debug(pformat(modify_xml_data)) + self.log.debug("_ sequence publish {}".format( + export_type == "Sequence Publish")) + self.log.debug("_ in_mark: {}".format(in_mark)) + self.log.debug("_ out_mark: {}".format(out_mark)) + export_kwargs = {} # validate xml preset file is filled if preset_file == "": @@ -283,7 +352,7 @@ class ExtractSubsetResources(openpype.api.Extractor): representation_data.update({ "frameStart": frame_start_handle, "frameEnd": ( - frame_start_handle + source_duration_handles), + frame_start_handle + source_duration_handles) - 1, "fps": instance.data["fps"] }) @@ -303,6 +372,39 @@ class ExtractSubsetResources(openpype.api.Extractor): self.log.debug("All representations: {}".format( pformat(instance.data["representations"]))) + def _get_retimed_attributes(self, instance): + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + include_handles = instance.data.get("includeHandles") + self.log.debug("_ include_handles: {}".format(include_handles)) + + # get basic variables + otio_clip = instance.data["otioClip"] + otio_avalable_range = otio_clip.available_range() + available_duration = otio_avalable_range.duration.value + self.log.debug( + ">> available_duration: {}".format(available_duration)) + + # get available range trimmed with processed retimes + retimed_attributes = get_media_range_with_retimes( + otio_clip, handle_start, handle_end) + self.log.debug( + ">> retimed_attributes: {}".format(retimed_attributes)) + + r_media_in = int(retimed_attributes["mediaIn"]) + r_media_out = int(retimed_attributes["mediaOut"]) + version_data = retimed_attributes.get("versionData") + + return { + "version_data": version_data, + "handle_start": int(retimed_attributes["handleStart"]), + "handle_end": int(retimed_attributes["handleEnd"]), + "source_duration": ( + (r_media_out - r_media_in) + 1 + ), + "speed": float(retimed_attributes["speed"]) + } + def _should_skip(self, preset_config, clip_path, unique_name): # get activating attributes activated_preset = preset_config["active"] From faec36f1d65292121af9be129b2857d7d100a60f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 19 Aug 2022 16:34:37 +0200 Subject: [PATCH 0514/2550] code cleanup --- .../plugins/publish/extract_subset_resources.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 432bc3b500..2f4f90fe55 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -84,7 +84,6 @@ class ExtractSubsetResources(openpype.api.Extractor): r_handle_end = retimed_data["handle_end"] r_source_dur = retimed_data["source_duration"] r_speed = retimed_data["speed"] - r_handles = max(r_handle_start, r_handle_end) # get handles value - take only the max from both handle_start = instance.data["handleStart"] @@ -183,6 +182,7 @@ class ExtractSubsetResources(openpype.api.Extractor): exporting_clip = None name_patern_xml = "_{}.".format( unique_name) + if export_type == "Sequence Publish": # change export clip to sequence exporting_clip = flame.duplicate(sequence_clip) @@ -199,25 +199,22 @@ class ExtractSubsetResources(openpype.api.Extractor): # only for h264 with baked retime in_mark = clip_in out_mark = clip_out + 1 - - modify_xml_data["nbHandles"] = handles + modify_xml_data.update({ + "exportHandles": True, + "nbHandles": handles + }) else: in_mark = (source_start_handles - source_first_frame) + 1 out_mark = in_mark + source_duration_handles exporting_clip = self.import_clip(clip_path) exporting_clip.name.set_value("{}_{}".format( asset_name, segment_name)) - modify_xml_data["nbHandles"] = ( - handles if r_speed == 1.0 else r_handles) # add xml tags modifications modify_xml_data.update({ - # TODO: handles only to Sequence preset - # TODO: enable Start frame attribute - "exportHandles": True, - "startFrame": frame_start_handle, # enum position low start from 0 "frameIndex": 0, + "startFrame": frame_start_handle, "namePattern": name_patern_xml }) From 8d08d5966a5eb213d4a8de57bf497cda83ccb631 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 19 Aug 2022 17:39:59 +0200 Subject: [PATCH 0515/2550] cleaning code --- .../publish/extract_subset_resources.py | 48 ++++--------------- 1 file changed, 10 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 2f4f90fe55..ddf126c445 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -50,7 +50,6 @@ class ExtractSubsetResources(openpype.api.Extractor): export_presets_mapping = {} def process(self, instance): - if not self.keep_original_representation: # remove previeous representation if not needed instance.data["representations"] = [] @@ -68,7 +67,7 @@ class ExtractSubsetResources(openpype.api.Extractor): # get configured workfile frame start/end (handles excluded) frame_start = instance.data["frameStart"] # get media source first frame - source_first_frame = instance.data["sourceFirstFrame"] # 1001 + source_first_frame = instance.data["sourceFirstFrame"] # get timeline in/out of segment clip_in = instance.data["clipIn"] @@ -76,9 +75,7 @@ class ExtractSubsetResources(openpype.api.Extractor): # get retimed attributres retimed_data = self._get_retimed_attributes(instance) - self.log.debug("_ retimed_data: {}".format( - pformat(retimed_data) - )) + # get individual keys r_handle_start = retimed_data["handle_start"] r_handle_end = retimed_data["handle_end"] @@ -90,7 +87,6 @@ class ExtractSubsetResources(openpype.api.Extractor): handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) include_handles = instance.data.get("includeHandles") - self.log.debug("_ include_handles: {}".format(include_handles)) # get media source range with handles source_start_handles = instance.data["sourceStartH"] @@ -101,17 +97,11 @@ class ExtractSubsetResources(openpype.api.Extractor): instance.data["sourceStart"] - r_handle_start) source_end_handles = ( source_start_handles - # TODO: duration exclude 1 - might be problem + (r_source_dur - 1) + r_handle_start + r_handle_end ) - self.log.debug("_ source_start_handles: {}".format( - source_start_handles)) - self.log.debug("_ source_end_handles: {}".format( - source_end_handles)) - # create staging dir path staging_dir = self.staging_dir(instance) @@ -125,18 +115,20 @@ class ExtractSubsetResources(openpype.api.Extractor): } export_presets.update(self.export_presets_mapping) + if not instance.data.get("versionData"): + instance.data["versionData"] = {} + # set versiondata if any retime version_data = retimed_data.get("version_data") if version_data: instance.data["versionData"].update(version_data) - if instance.data.get("versionData"): - if r_speed != 1.0: - instance.data["versionData"].update({ - "frameStart": source_start_handles + r_handle_start, - "frameEnd": source_end_handles - r_handle_end, - }) + if r_speed != 1.0: + instance.data["versionData"].update({ + "frameStart": source_start_handles + r_handle_start, + "frameEnd": source_end_handles - r_handle_end, + }) # loop all preset names and for unique_name, preset_config in export_presets.items(): @@ -176,9 +168,6 @@ class ExtractSubsetResources(openpype.api.Extractor): source_duration_handles = ( source_end_handles - source_start_handles) + 1 - self.log.debug("_ source_duration_handles: {}".format( - source_duration_handles)) - exporting_clip = None name_patern_xml = "_{}.".format( unique_name) @@ -222,9 +211,6 @@ class ExtractSubsetResources(openpype.api.Extractor): # add any xml overrides collected form segment.comment modify_xml_data.update(instance.data["xml_overrides"]) - self.log.debug(pformat(modify_xml_data)) - self.log.debug("_ sequence publish {}".format( - export_type == "Sequence Publish")) self.log.debug("_ in_mark: {}".format(in_mark)) self.log.debug("_ out_mark: {}".format(out_mark)) @@ -264,7 +250,6 @@ class ExtractSubsetResources(openpype.api.Extractor): thumb_frame_number = int(in_mark + ( source_duration_handles / 2)) - self.log.debug("__ in_mark: {}".format(in_mark)) self.log.debug("__ thumb_frame_number: {}".format( thumb_frame_number )) @@ -276,9 +261,6 @@ class ExtractSubsetResources(openpype.api.Extractor): "out_mark": out_mark }) - self.log.debug("__ modify_xml_data: {}".format( - pformat(modify_xml_data) - )) preset_path = opfapi.modify_preset_file( preset_orig_xml_path, staging_dir, modify_xml_data) @@ -366,21 +348,13 @@ class ExtractSubsetResources(openpype.api.Extractor): # at the end remove the duplicated clip flame.delete(exporting_clip) - self.log.debug("All representations: {}".format( - pformat(instance.data["representations"]))) def _get_retimed_attributes(self, instance): handle_start = instance.data["handleStart"] handle_end = instance.data["handleEnd"] - include_handles = instance.data.get("includeHandles") - self.log.debug("_ include_handles: {}".format(include_handles)) # get basic variables otio_clip = instance.data["otioClip"] - otio_avalable_range = otio_clip.available_range() - available_duration = otio_avalable_range.duration.value - self.log.debug( - ">> available_duration: {}".format(available_duration)) # get available range trimmed with processed retimes retimed_attributes = get_media_range_with_retimes( @@ -412,8 +386,6 @@ class ExtractSubsetResources(openpype.api.Extractor): unique_name, activated_preset, filter_path_regex ) ) - self.log.debug( - "__ clip_path: `{}`".format(clip_path)) # skip if not activated presete if not activated_preset: From b15471501a44a09e5e205460aea6b6fc0c365e91 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 19 Aug 2022 17:41:11 +0200 Subject: [PATCH 0516/2550] hound suggestions --- openpype/hosts/flame/api/lib.py | 1 - openpype/hosts/flame/plugins/publish/extract_subset_resources.py | 1 - 2 files changed, 2 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index a5ae3c4468..94c46fe937 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -1,7 +1,6 @@ import sys import os import re -import sys import json import pickle import clique diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index ddf126c445..8a03ba119c 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -348,7 +348,6 @@ class ExtractSubsetResources(openpype.api.Extractor): # at the end remove the duplicated clip flame.delete(exporting_clip) - def _get_retimed_attributes(self, instance): handle_start = instance.data["handleStart"] handle_end = instance.data["handleEnd"] From afd13c31698eac6b0d2d4347547361ca9be7e002 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 19 Aug 2022 18:59:37 +0200 Subject: [PATCH 0517/2550] :wrench: add settings --- .../defaults/project_settings/houdini.json | 12 +++++ .../schema_project_houdini.json | 18 +------ .../schemas/schema_houdini_publish.json | 50 +++++++++++++++++++ 3 files changed, 64 insertions(+), 16 deletions(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 911bf82d9b..b7d2104ba1 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -47,6 +47,18 @@ } }, "publish": { + "ValidateWorkfilePaths": { + "enabled": true, + "optional": true, + "node_types": [ + "file", + "alembic" + ], + "prohibited_vars": [ + "$HIP", + "$JOB" + ] + }, "ValidateContainers": { "enabled": true, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json index cad99dde22..d8728c0f4b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json @@ -10,22 +10,8 @@ "name": "schema_houdini_create" }, { - "type": "dict", - "collapsible": true, - "key": "publish", - "label": "Publish plugins", - "children": [ - { - "type": "schema_template", - "name": "template_publish_plugin", - "template_data": [ - { - "key": "ValidateContainers", - "label": "ValidateContainers" - } - ] - } - ] + "type": "schema", + "name": "schema_houdini_publish" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json new file mode 100644 index 0000000000..aa6eaf5164 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json @@ -0,0 +1,50 @@ +{ + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "ValidateWorkfilePaths", + "label": "Validate Workfile Paths", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "key": "node_types", + "label": "Node types", + "type": "list", + "object_type": "text" + }, + { + "key": "prohibited_vars", + "label": "Prohibited variables", + "type": "list", + "object_type": "text" + } + ] + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateContainers", + "label": "ValidateContainers" + } + ] + } + ] +} \ No newline at end of file From 5d14fdd1cef6eb2d40925efeadfbcab3af219ca5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 19 Aug 2022 19:02:10 +0200 Subject: [PATCH 0518/2550] fix _reset_crashed --- openpype/tools/settings/settings/categories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index 2e3c6d9dda..fd95b4ca71 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -701,7 +701,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): self.breadcrumbs_model.set_entity(None) def _on_reset_success(self): - self._reset_crashed = True + self._reset_crashed = False if not self.save_btn.isEnabled(): self.save_btn.setEnabled(self._edit_mode) From 24d733ecf36f6694f4861b112f1c0ecb1b27072a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 19 Aug 2022 19:09:42 +0200 Subject: [PATCH 0519/2550] :dog: fix hound --- .../hosts/houdini/plugins/publish/validate_workfile_paths.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 9e087fe51c..79b3e894e5 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -26,9 +26,8 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): ) if invalid: for param in invalid: - self.log.error("{}: {}".format( - param.path(), - param.unexpandedString())) + self.log.error( + "{}: {}".format(param.path(), param.unexpandedString())) raise RuntimeError("Invalid paths found") From 890d1becaa8a4fcc597977d6b0cbe25e21bf34d3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 19 Aug 2022 19:11:00 +0200 Subject: [PATCH 0520/2550] moved dialog to separated file --- openpype/tools/settings/settings/dialogs.py | 115 ++++++++++++++++++++ openpype/tools/settings/settings/window.py | 93 +--------------- 2 files changed, 116 insertions(+), 92 deletions(-) create mode 100644 openpype/tools/settings/settings/dialogs.py diff --git a/openpype/tools/settings/settings/dialogs.py b/openpype/tools/settings/settings/dialogs.py new file mode 100644 index 0000000000..dea056b89d --- /dev/null +++ b/openpype/tools/settings/settings/dialogs.py @@ -0,0 +1,115 @@ +from Qt import QtWidgets, QtCore + + +class BaseInfoDialog(QtWidgets.QDialog): + width = 600 + height = 400 + + def __init__(self, message, title, info_obj, parent=None): + super(BaseInfoDialog, self).__init__(parent) + self._result = 0 + self._info_obj = info_obj + + self.setWindowTitle(title) + + message_label = QtWidgets.QLabel(message, self) + message_label.setWordWrap(True) + + separator_widget_1 = QtWidgets.QFrame(self) + separator_widget_2 = QtWidgets.QFrame(self) + for separator_widget in ( + separator_widget_1, + separator_widget_2 + ): + separator_widget.setObjectName("Separator") + separator_widget.setMinimumHeight(1) + separator_widget.setMaximumHeight(1) + + other_information = QtWidgets.QWidget(self) + other_information_layout = QtWidgets.QFormLayout(other_information) + other_information_layout.setContentsMargins(0, 0, 0, 0) + for label, value in ( + ("Username", info_obj.username), + ("Host name", info_obj.hostname), + ("Host IP", info_obj.hostip), + ("System name", info_obj.system_name), + ("Local ID", info_obj.local_id), + ("Time Stamp", info_obj.timestamp), + ): + other_information_layout.addRow( + label, + QtWidgets.QLabel(value, other_information) + ) + + footer_widget = QtWidgets.QWidget(self) + buttons_widget = QtWidgets.QWidget(footer_widget) + + buttons_layout = QtWidgets.QHBoxLayout(buttons_widget) + buttons_layout.setContentsMargins(0, 0, 0, 0) + buttons = self.get_buttons(buttons_widget) + for button in buttons: + buttons_layout.addWidget(button, 1) + + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) + footer_layout.addStretch(1) + footer_layout.addWidget(buttons_widget, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(message_label, 0) + layout.addWidget(separator_widget_1, 0) + layout.addStretch(1) + layout.addWidget(other_information, 0, QtCore.Qt.AlignHCenter) + layout.addStretch(1) + layout.addWidget(separator_widget_2, 0) + layout.addWidget(footer_widget, 0) + + def showEvent(self, event): + super(BaseInfoDialog, self).showEvent(event) + self.resize(self.width, self.height) + + def result(self): + return self._result + + def get_buttons(self, parent): + return [] + + +class SettingsUIOpenedElsewhere(BaseInfoDialog): + def __init__(self, info_obj, parent=None): + title = "Someone else has opened Settings UI" + message = ( + "Someone else has opened Settings UI which could cause data loss." + " Please contact the person on the other side." + "

    You can continue in view-only mode." + " All changes in view mode will be lost." + "

    You can take control which will cause that" + " all changes of settings on the other side will be lost.
    " + ) + super(SettingsUIOpenedElsewhere, self).__init__( + message, title, info_obj, parent + ) + + def _on_take_control(self): + self._result = 1 + self.close() + + def _on_view_mode(self): + self._result = 0 + self.close() + + def get_buttons(self, parent): + take_control_btn = QtWidgets.QPushButton( + "Take control", parent + ) + view_mode_btn = QtWidgets.QPushButton( + "View only", parent + ) + + take_control_btn.clicked.connect(self._on_take_control) + view_mode_btn.clicked.connect(self._on_view_mode) + + return [ + take_control_btn, + view_mode_btn + ] diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index fcbcd129d0..2750785535 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -12,6 +12,7 @@ from openpype.settings.lib import ( closed_settings_ui, ) +from .dialogs import SettingsUIOpenedElsewhere from .categories import ( CategoryState, SystemWidget, @@ -348,95 +349,3 @@ class MainWidget(QtWidgets.QWidget): return return super(MainWidget, self).keyPressEvent(event) - - -class SettingsUIOpenedElsewhere(QtWidgets.QDialog): - def __init__(self, info_obj, parent=None): - super(SettingsUIOpenedElsewhere, self).__init__(parent) - - self._result = 0 - - self.setWindowTitle("Someone else has opened Settings UI") - - message_label = QtWidgets.QLabel(( - "Someone else has opened Settings UI which could cause data loss." - " Please contact the person on the other side." - "

    You can open the UI in view-only mode." - " All changes in view mode will be lost." - "

    You can take the control which will cause that" - " all changes of settings on the other side will be lost.
    " - ), self) - message_label.setWordWrap(True) - - separator_widget_1 = QtWidgets.QFrame(self) - separator_widget_2 = QtWidgets.QFrame(self) - for separator_widget in ( - separator_widget_1, - separator_widget_2 - ): - separator_widget.setObjectName("Separator") - separator_widget.setMinimumHeight(1) - separator_widget.setMaximumHeight(1) - - other_information = QtWidgets.QWidget(self) - other_information_layout = QtWidgets.QFormLayout(other_information) - other_information_layout.setContentsMargins(0, 0, 0, 0) - for label, value in ( - ("Username", info_obj.username), - ("Host name", info_obj.hostname), - ("Host IP", info_obj.hostip), - ("System name", info_obj.system_name), - ("Local ID", info_obj.local_id), - ("Time Stamp", info_obj.timestamp), - ): - other_information_layout.addRow( - label, - QtWidgets.QLabel(value, other_information) - ) - - footer_widget = QtWidgets.QWidget(self) - buttons_widget = QtWidgets.QWidget(footer_widget) - - take_control_btn = QtWidgets.QPushButton( - "Take control", buttons_widget - ) - view_mode_btn = QtWidgets.QPushButton( - "View only", buttons_widget - ) - - buttons_layout = QtWidgets.QHBoxLayout(buttons_widget) - buttons_layout.setContentsMargins(0, 0, 0, 0) - buttons_layout.addWidget(take_control_btn, 1) - buttons_layout.addWidget(view_mode_btn, 1) - - footer_layout = QtWidgets.QHBoxLayout(footer_widget) - footer_layout.setContentsMargins(0, 0, 0, 0) - footer_layout.addStretch(1) - footer_layout.addWidget(buttons_widget, 0) - - layout = QtWidgets.QVBoxLayout(self) - layout.addWidget(message_label, 0) - layout.addWidget(separator_widget_1, 0) - layout.addStretch(1) - layout.addWidget(other_information, 0, QtCore.Qt.AlignHCenter) - layout.addStretch(1) - layout.addWidget(separator_widget_2, 0) - layout.addWidget(footer_widget, 0) - - take_control_btn.clicked.connect(self._on_take_control) - view_mode_btn.clicked.connect(self._on_view_mode) - - def result(self): - return self._result - - def _on_take_control(self): - self._result = 1 - self.close() - - def _on_view_mode(self): - self._result = 0 - self.close() - - def showEvent(self, event): - super(SettingsUIOpenedElsewhere, self).showEvent(event) - self.resize(600, 400) From 2ea277992ea217ff9d4b65cd3b36dd5ad2218f8e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 19 Aug 2022 21:06:05 +0200 Subject: [PATCH 0521/2550] update ftrack cli commands --- website/docs/module_ftrack.md | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/website/docs/module_ftrack.md b/website/docs/module_ftrack.md index 667782754f..ad9cf75e8f 100644 --- a/website/docs/module_ftrack.md +++ b/website/docs/module_ftrack.md @@ -13,7 +13,7 @@ Ftrack is currently the main project management option for OpenPype. This docume ## Prepare Ftrack for OpenPype ### Server URL -If you want to connect Ftrack to OpenPype you might need to make few changes in Ftrack settings. These changes would take a long time to do manually, so we prepared a few Ftrack actions to help you out. First, you'll need to launch OpenPype settings, enable [Ftrack module](admin_settings_system.md#Ftrack), and enter the address to your Ftrack server. +If you want to connect Ftrack to OpenPype you might need to make few changes in Ftrack settings. These changes would take a long time to do manually, so we prepared a few Ftrack actions to help you out. First, you'll need to launch OpenPype settings, enable [Ftrack module](admin_settings_system.md#Ftrack), and enter the address to your Ftrack server. ### Login Once your server is configured, restart OpenPype and you should be prompted to enter your [Ftrack credentials](artist_ftrack.md#How-to-use-Ftrack-in-OpenPype) to be able to run our Ftrack actions. If you are already logged in to Ftrack in your browser, it is enough to press `Ftrack login` and it will connect automatically. @@ -26,7 +26,7 @@ You can only use our Ftrack Actions and publish to Ftrack if each artist is logg ### Custom Attributes After successfully connecting OpenPype with you Ftrack, you can right click on any project in Ftrack and you should see a bunch of actions available. The most important one is called `OpenPype Admin` and contains multiple options inside. -To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Create/Update Custom Attributes](manager_ftrack_actions.md#create-update-avalon-attributes), which creates and sets the Custom Attributes necessary for OpenPype to function. +To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Create/Update Custom Attributes](manager_ftrack_actions.md#create-update-avalon-attributes), which creates and sets the Custom Attributes necessary for OpenPype to function. @@ -34,7 +34,7 @@ To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Ftrack Event Server is the key to automation of many tasks like _status change_, _thumbnail update_, _automatic synchronization to Avalon database_ and many more. Event server should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with enough certainty. ### Running event server -There are specific launch arguments for event server. With `openpype_console eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _credentials (Username and API key)_. Ftrack server URL and Event path are set from OpenPype's environments by default, but the credentials must be done separatelly for security reasons. +There are specific launch arguments for event server. With `openpype_console module ftrack eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _credentials (Username and API key)_. Ftrack server URL and Event path are set from OpenPype's environments by default, but the credentials must be done separatelly for security reasons. @@ -53,7 +53,7 @@ There are specific launch arguments for event server. With `openpype_console eve - **`--ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee"`** : User's API key - `--ftrack-url "https://yourdomain.ftrackapp.com/"` : Ftrack server URL _(it is not needed to enter if you have set `FTRACK_SERVER` in OpenPype' environments)_ -So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype_console.exe eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype_console.exe eventserver`. +So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype_console.exe module ftrack eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype_console.exe module ftrack eventserver`. @@ -72,7 +72,7 @@ We do not recommend setting your Ftrack user and api key environments in a persi ### Where to run event server -We recommend you to run event server on stable server machine with ability to connect to Avalon database and Ftrack web server. Best practice we recommend is to run event server as service. It can be Windows or Linux. +We recommend you to run event server on stable server machine with ability to connect to Avalon database and Ftrack web server. Best practice we recommend is to run event server as service. It can be Windows or Linux. :::important Event server should **not** run more than once! It may cause major issues. @@ -99,11 +99,10 @@ Event server should **not** run more than once! It may cause major issues. - add content to the file: ```sh #!/usr/bin/env bash -export OPENPYPE_DEBUG=1 export OPENPYPE_MONGO= pushd /mnt/path/to/openpype -./openpype_console eventserver --ftrack-user --ftrack-api-key +./openpype_console module ftrack eventserver --ftrack-user --ftrack-api-key --debug ``` - change file permission: `sudo chmod 0755 /opt/openpype/run_event_server.sh` @@ -140,14 +139,13 @@ WantedBy=multi-user.target - create service file: `openpype-ftrack-eventserver.bat` -- add content to the service file: +- add content to the service file: ```sh @echo off -set OPENPYPE_DEBUG=1 set OPENPYPE_MONGO= pushd \\path\to\openpype -openpype_console.exe eventserver --ftrack-user --ftrack-api-key +openpype_console.exe module ftrack eventserver --ftrack-user --ftrack-api-key --debug ``` - download and install `nssm.cc` - create Windows service according to nssm.cc manual @@ -174,7 +172,7 @@ This event updates entities on their changes Ftrack. When new entity is created Deleting an entity by Ftrack's default is not processed for security reasons _(to delete entity use [Delete Asset/Subset action](manager_ftrack_actions.md#delete-asset-subset))_. ::: -### Synchronize Hierarchical and Entity Attributes +### Synchronize Hierarchical and Entity Attributes Auto-synchronization of hierarchical attributes from Ftrack entities. @@ -190,7 +188,7 @@ Change status of next task from `Not started` to `Ready` when previous task is a Multiple detailed rules for next task update can be configured in the settings. -### Delete Avalon ID from new entity +### Delete Avalon ID from new entity Is used to remove value from `Avalon/Mongo Id` Custom Attribute when entity is created. @@ -215,7 +213,7 @@ This event handler allows setting of different status to a first created Asset V This is useful for example if first version publish doesn't contain any actual reviewable work, but is only used for roundtrip conform check, in which case this version could receive status `pending conform` instead of standard `pending review` ### Update status on next task -Change status on next task by task types order when task status state changed to "Done". All tasks with the same Task mapping of next task status changes From → To. Some status can be ignored. +Change status on next task by task types order when task status state changed to "Done". All tasks with the same Task mapping of next task status changes From → To. Some status can be ignored. ## Publish plugins @@ -238,7 +236,7 @@ Add Ftrack Family: enabled #### Advanced adding if additional families present -In special cases adding 'ftrack' based on main family ('Families' set higher) is not enough. +In special cases adding 'ftrack' based on main family ('Families' set higher) is not enough. (For example upload to Ftrack for 'plate' main family should only happen if 'review' is contained in instance 'families', not added in other cases. ) -![Collect Ftrack Family](assets/ftrack/ftrack-collect-advanced.png) \ No newline at end of file +![Collect Ftrack Family](assets/ftrack/ftrack-collect-advanced.png) From 752f1cde1c307398fa56ada929f8299ee97face5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 19 Aug 2022 21:10:28 +0200 Subject: [PATCH 0522/2550] removed outdated repositories information --- website/docs/system_introduction.md | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/website/docs/system_introduction.md b/website/docs/system_introduction.md index 71c5d64aa8..db297b527a 100644 --- a/website/docs/system_introduction.md +++ b/website/docs/system_introduction.md @@ -48,24 +48,3 @@ to the table - Some DCCs do not support using Environment variables in file paths. This will make it very hard to maintain full multiplatform compatibility as well variable storage roots. - Relying on VPN connection and using it to work directly of network storage will be painfully slow. - - -## Repositories - -### [OpenPype](https://github.com/pypeclub/pype) - -This is where vast majority of the code that works with your data lives. It acts -as Avalon-Config, if we're speaking in avalon terms. - -Avalon gives us the ability to work with a certain host, say Maya, in a standardized manner, but OpenPype defines **how** we work with all the data, allows most of the behavior to be configured on a very granular level and provides a comprehensive build and installation tools for it. - -Thanks to that, we are able to maintain one codebase for vast majority of the features across all our clients deployments while keeping the option to tailor the pipeline to each individual studio. - -### [Avalon-core](https://github.com/pypeclub/avalon-core) - -Avalon-core is the heart of OpenPype. It provides the base functionality including key GUIs (albeit expanded and modified by us), database connection, standards for data structures, working with entities and some universal tools. - -Avalon is being actively developed and maintained by a community of studios and TDs from around the world, with Pype Club team being an active contributor as well. - -Due to the extensive work we've done on OpenPype and the need to react quickly to production needs, we -maintain our own fork of avalon-core, which is kept up to date with upstream changes as much as possible. From 40efddb9b40a3d74d136dd9a06647fd2b73ded91 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 19 Aug 2022 21:21:28 +0200 Subject: [PATCH 0523/2550] removed eventserver from global cli commands --- website/docs/admin_openpype_commands.md | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 53fc12410f..cf45138fa9 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -40,7 +40,6 @@ For more information [see here](admin_use.md#run-openpype). | module | Run command line arguments for modules. | | | repack-version | Tool to re-create version zip. | [📑](#repack-version-arguments) | | tray | Launch OpenPype Tray. | [📑](#tray-arguments) -| eventserver | This should be ideally used by system service (such as systemd or upstart on linux and window service). | [📑](#eventserver-arguments) | | launch | Launch application in Pype environment. | [📑](#launch-arguments) | | publish | Pype takes JSON from provided path and use it to publish data in it. | [📑](#publish-arguments) | | extractenvironments | Extract environment variables for entered context to a json file. | [📑](#extractenvironments-arguments) | @@ -57,25 +56,7 @@ For more information [see here](admin_use.md#run-openpype). openpype_console tray ``` --- -### `launch` arguments {#eventserver-arguments} -You have to set either proper environment variables to provide URL and credentials or use -option to specify them. -| Argument | Description | -| --- | --- | -| `--ftrack-url` | URL to ftrack server (can be set with `FTRACK_SERVER`) | -| `--ftrack-user` |user name to log in to ftrack (can be set with `FTRACK_API_USER`) | -| `--ftrack-api-key` | ftrack api key (can be set with `FTRACK_API_KEY`) | -| `--legacy` | run event server without mongo storing | -| `--clockify-api-key` | Clockify API key (can be set with `CLOCKIFY_API_KEY`) | -| `--clockify-workspace` | Clockify workspace (can be set with `CLOCKIFY_WORKSPACE`) | - -To run ftrack event server: -```shell -openpype_console eventserver --ftrack-url= --ftrack-user= --ftrack-api-key= -``` - ---- ### `launch` arguments {#launch-arguments} | Argument | Description | From ce4c00d20a677c08bcd046ec24a0a3ba6c9e4792 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 19 Aug 2022 21:46:21 +0200 Subject: [PATCH 0524/2550] add information about naming limitations to key concepts --- website/docs/artist_concepts.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/website/docs/artist_concepts.md b/website/docs/artist_concepts.md index 9005cffe87..f67ab89b9c 100644 --- a/website/docs/artist_concepts.md +++ b/website/docs/artist_concepts.md @@ -10,6 +10,8 @@ sidebar_label: Key Concepts In our pipeline all the main entities the project is made from are internally considered *'Assets'*. Episode, sequence, shot, character, prop, etc. All of these behave identically in the pipeline. Asset names need to be absolutely unique within the project because they are their key identifier. +OpenPype has limitation regarging duplicated names. Name of assets must be unique across whole project. + ### Subset Usually, an asset needs to be created in multiple *'flavours'*. A character might have multiple different looks, model needs to be published in different resolutions, a standard animation rig might not be usable in a crowd system and so on. 'Subsets' are here to accommodate all this variety that might be needed within a single asset. A model might have subset: *'main'*, *'proxy'*, *'sculpt'*, while data of *'look'* family could have subsets *'main'*, *'dirty'*, *'damaged'*. Subsets have some recommendations for their names, but ultimately it's up to the artist to use them for separation of publishes when needed. @@ -24,6 +26,11 @@ A numbered iteration of a given subset. Each version contains at least one [repr Each published variant can come out of the software in multiple representations. All of them hold exactly the same data, but in different formats. A model, for example, might be saved as `.OBJ`, Alembic, Maya geometry or as all of them, to be ready for pickup in any other applications supporting these formats. + +#### Naming convention + +At this moment names of assets, tasks, subsets or representations can contain only letters, numbers and underscore. + ### Family Each published [subset][3b89d8e0] can have exactly one family assigned to it. Family determines the type of data that the subset holds. Family doesn't dictate the file type, but can enforce certain technical specifications. For example OpenPype default configuration expects `model` family to only contain geometry without any shaders or joints when it is published. From 2591c1fad5355b824198389dae7c918061e67125 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 19 Aug 2022 21:46:36 +0200 Subject: [PATCH 0525/2550] add link to key concepts into system introduction --- website/docs/system_introduction.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/system_introduction.md b/website/docs/system_introduction.md index db297b527a..b8a2cea487 100644 --- a/website/docs/system_introduction.md +++ b/website/docs/system_introduction.md @@ -17,7 +17,7 @@ various usage scenarios. You can find detailed breakdown of technical requirements [here](dev_requirements), but in general OpenPype should be able to operate in most studios fairly quickly. The main obstacles are usually related to workflows and habits, that -might now be fully compatible with what OpenPype is expecting or enforcing. +might now be fully compatible with what OpenPype is expecting or enforcing. It is recommended to go through artists [key concepts](artist_concepts) to get idea about basics. Keep in mind that if you run into any workflows that are not supported, it's usually just because we haven't hit that particular case and it can most likely be added upon request. From 2b4e3ef9fad40d9573b3dd37b73d68dba0ad1d3e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 20 Aug 2022 03:56:03 +0000 Subject: [PATCH 0526/2550] [Automated] Bump version --- CHANGELOG.md | 16 ++++++++++++++-- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e19993ad75..65a3cb27e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,20 @@ # Changelog +## [3.14.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD) + +**🚀 Enhancements** + +- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) + +**🐛 Bug fixes** + +- RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) + ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...3.14.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) **🆕 New features** @@ -25,7 +37,6 @@ - General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) - Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) - Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) -- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) **🔀 Refactored code** @@ -70,6 +81,7 @@ - Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) - General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) - Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) - AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) - Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) - TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) diff --git a/openpype/version.py b/openpype/version.py index c28b480940..174aca1e6c 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.0" +__version__ = "3.14.1-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index e670d0a2ff..e01cc71201 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.0" # OpenPype +version = "3.14.1-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 3beca31c61317c068e9d667c8b0817265c3e26f3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 22 Aug 2022 10:57:54 +0200 Subject: [PATCH 0527/2550] OP-3723 - introduced max_downscale_size value to Settings Studios might want to set maximum size to resize for ffmpeg to work based on OS or resources. --- .../hosts/photoshop/plugins/publish/extract_review.py | 8 ++++---- .../settings/defaults/project_settings/photoshop.json | 1 + .../projects_schema/schema_project_photoshop.json | 9 +++++++++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 60ae575b0a..5d37c86ed8 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -30,6 +30,7 @@ class ExtractReview(openpype.api.Extractor): jpg_options = None mov_options = None make_image_sequence = None + max_downscale_size = 8192 def process(self, instance): staging_dir = self.staging_dir(instance) @@ -143,14 +144,12 @@ class ExtractReview(openpype.api.Extractor): Ffmpeg has max size 16384x16384. Saved image(s) must be resized to be used as a source for thumbnail or review mov. """ - # 16384x16384 actually didn't work because int overflow - max_ffmpeg_size = 8192 Image.MAX_IMAGE_PIXELS = None first_url = os.path.join(staging_dir, processed_img_names[0]) with Image.open(first_url) as im: width, height = im.size - if width > max_ffmpeg_size or height > max_ffmpeg_size: + if width > self.max_downscale_size or height > self.max_downscale_size: resized_dir = os.path.join(staging_dir, "resized") os.mkdir(resized_dir) source_files_pattern = os.path.join(resized_dir, @@ -159,7 +158,8 @@ class ExtractReview(openpype.api.Extractor): source_url = os.path.join(staging_dir, file_name) with Image.open(source_url) as res_img: # 'thumbnail' automatically keeps aspect ratio - res_img.thumbnail((max_ffmpeg_size, max_ffmpeg_size), + res_img.thumbnail((self.max_downscale_size, + self.max_downscale_size), Image.ANTIALIAS) res_img.save(os.path.join(resized_dir, file_name)) diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index d9b7a8083f..758ac64a35 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -32,6 +32,7 @@ }, "ExtractReview": { "make_image_sequence": false, + "max_downscale_size": 8192, "jpg_options": { "tags": [] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index badf94229b..49860301b6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -186,6 +186,15 @@ "key": "make_image_sequence", "label": "Makes an image sequence instead of a flatten image" }, + { + "type": "number", + "key": "max_downscale_size", + "label": "Maximum size of sources for review", + "tooltip": "FFMpeg can only handle limited resolution for creation of review and/or thumbnail", + "minimum": 300, + "maximum": 16384, + "decimal": 0 + }, { "type": "dict", "collapsible": false, From f4106714aeabd13eec6d8085cb3f3be39feb4f00 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 11:27:19 +0200 Subject: [PATCH 0528/2550] added dialogs for 2 other cases --- openpype/tools/settings/settings/dialogs.py | 64 +++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/openpype/tools/settings/settings/dialogs.py b/openpype/tools/settings/settings/dialogs.py index dea056b89d..a3eed68ae3 100644 --- a/openpype/tools/settings/settings/dialogs.py +++ b/openpype/tools/settings/settings/dialogs.py @@ -113,3 +113,67 @@ class SettingsUIOpenedElsewhere(BaseInfoDialog): take_control_btn, view_mode_btn ] + + +class SettingsLastSavedChanged(BaseInfoDialog): + width = 500 + height = 300 + + def __init__(self, info_obj, parent=None): + title = "Settings has changed" + message = ( + "Settings has changed while you had opened this settings session." + "

    It is recommended to refresh settings" + " and re-apply changes in the new session." + ) + super(SettingsLastSavedChanged, self).__init__( + message, title, info_obj, parent + ) + + def _on_save(self): + self._result = 1 + self.close() + + def _on_close(self): + self._result = 0 + self.close() + + def get_buttons(self, parent): + close_btn = QtWidgets.QPushButton( + "Close", parent + ) + save_btn = QtWidgets.QPushButton( + "Save anyway", parent + ) + + close_btn.clicked.connect(self._on_close) + save_btn.clicked.connect(self._on_save) + + return [ + close_btn, + save_btn + ] + + +class SettingsControlTaken(BaseInfoDialog): + width = 500 + height = 300 + + def __init__(self, info_obj, parent=None): + title = "Settings control taken" + message = ( + "Someone took control over your settings." + "

    It is not possible to save changes of currently" + " opened session. Copy changes you want to keep and hit refresh." + ) + super(SettingsControlTaken, self).__init__( + message, title, info_obj, parent + ) + + def _on_confirm(self): + self.close() + + def get_buttons(self, parent): + confirm_btn = QtWidgets.QPushButton("Understand", parent) + confirm_btn.clicked.connect(self._on_confirm) + return [confirm_btn] From 537bfaa8d683213f03a78b962d1767698947c390 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 11:27:45 +0200 Subject: [PATCH 0529/2550] removed print --- openpype/tools/settings/settings/window.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 2750785535..a907a034d1 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -87,7 +87,6 @@ class SettingsController: ) def update_last_opened_info(self): - print("update_last_opened_info") last_opened_info = get_last_opened_info() enabled = False if ( From 80b5c0c064c47d4bab6e870759a2fc4caf54cfd9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 11:28:00 +0200 Subject: [PATCH 0530/2550] categories have checks related to last saved settings --- .../tools/settings/settings/categories.py | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index fd95b4ca71..f4b2c13a12 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -36,6 +36,11 @@ from openpype.settings.entities.op_version_entity import ( ) from openpype.settings import SaveWarningExc +from openpype.settings.lib import ( + get_system_last_saved_info, + get_project_last_saved_info, +) +from .dialogs import SettingsLastSavedChanged, SettingsControlTaken from .widgets import ( ProjectListWidget, VersionAction @@ -205,6 +210,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): if enabled is self._edit_mode: return + was_false = self._edit_mode is False self._edit_mode = enabled self.save_btn.setEnabled(enabled and not self._reset_crashed) @@ -218,6 +224,10 @@ class SettingsCategoryWidget(QtWidgets.QWidget): self.save_btn.setToolTip(tooltip) + # Reset when last saved information has changed + if was_false and not self._check_last_saved_info(): + self.reset() + @property def state(self): return self._state @@ -748,7 +758,24 @@ class SettingsCategoryWidget(QtWidgets.QWidget): """Callback on any tab widget save.""" return + def _check_last_saved_info(self): + raise NotImplementedError(( + "{} does not have implemented '_check_last_saved_info'" + ).format(self.__class__.__name__)) + def _save(self): + self._controller.update_last_opened_info() + if not self._controller.opened_info: + dialog = SettingsControlTaken(self._last_saved_info, self) + dialog.exec_() + return + + if not self._check_last_saved_info(): + dialog = SettingsLastSavedChanged(self._last_saved_info, self) + dialog.exec_() + if dialog.result() == 0: + return + # Don't trigger restart if defaults are modified if self.is_modifying_defaults: require_restart = False @@ -807,6 +834,13 @@ class SystemWidget(SettingsCategoryWidget): self._actions = [] super(SystemWidget, self).__init__(*args, **kwargs) + def _check_last_saved_info(self): + if self.is_modifying_defaults: + return True + + last_saved_info = get_system_last_saved_info() + return self._last_saved_info == last_saved_info + def contain_category_key(self, category): if category == "system_settings": return True @@ -821,6 +855,10 @@ class SystemWidget(SettingsCategoryWidget): ) entity.on_change_callbacks.append(self._on_entity_change) self.entity = entity + last_saved_info = None + if not self.is_modifying_defaults: + last_saved_info = get_system_last_saved_info() + self._last_saved_info = last_saved_info try: if self.is_modifying_defaults: entity.set_defaults_state() @@ -854,6 +892,13 @@ class ProjectWidget(SettingsCategoryWidget): def __init__(self, *args, **kwargs): super(ProjectWidget, self).__init__(*args, **kwargs) + def _check_last_saved_info(self): + if self.is_modifying_defaults: + return True + + last_saved_info = get_project_last_saved_info(self.project_name) + return self._last_saved_info == last_saved_info + def contain_category_key(self, category): if category in ("project_settings", "project_anatomy"): return True @@ -933,6 +978,11 @@ class ProjectWidget(SettingsCategoryWidget): entity.on_change_callbacks.append(self._on_entity_change) self.project_list_widget.set_entity(entity) self.entity = entity + + last_saved_info = None + if not self.is_modifying_defaults: + last_saved_info = get_project_last_saved_info(self.project_name) + self._last_saved_info = last_saved_info try: if self.is_modifying_defaults: self.entity.set_defaults_state() From 7cd6a423ead42bae21a2fa2e491bf32870402fe8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 11:55:04 +0200 Subject: [PATCH 0531/2550] fix attribute name --- openpype/settings/handlers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 1b59531943..09f36aa16e 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -784,9 +784,9 @@ class MongoSettingsHandler(SettingsHandler): "last_saved_info": last_saved_info.to_document_data() } if not system_settings_doc: - self.collections.insert_one(new_system_settings_doc) + self.collection.insert_one(new_system_settings_doc) else: - self.collections.update_one( + self.collection.update_one( {"_id": system_settings_doc["_id"]}, {"$set": new_system_settings_doc} ) From c3fe68c58a40fcca6f998b1d17e67aa690404349 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 12:02:21 +0200 Subject: [PATCH 0532/2550] converted unreal to openpype module --- openpype/hosts/unreal/__init__.py | 26 ++++----------------- openpype/hosts/unreal/module.py | 39 +++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 22 deletions(-) create mode 100644 openpype/hosts/unreal/module.py diff --git a/openpype/hosts/unreal/__init__.py b/openpype/hosts/unreal/__init__.py index 10e9c5100e..41222f4f94 100644 --- a/openpype/hosts/unreal/__init__.py +++ b/openpype/hosts/unreal/__init__.py @@ -1,24 +1,6 @@ -import os -import openpype.hosts -from openpype.lib.applications import Application +from .module import UnrealModule -def add_implementation_envs(env: dict, _app: Application) -> None: - """Modify environments to contain all required for implementation.""" - # Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation - - ue_plugin = "UE_5.0" if _app.name[:1] == "5" else "UE_4.7" - unreal_plugin_path = os.path.join( - os.path.dirname(os.path.abspath(openpype.hosts.__file__)), - "unreal", "integration", ue_plugin - ) - if not env.get("OPENPYPE_UNREAL_PLUGIN"): - env["OPENPYPE_UNREAL_PLUGIN"] = unreal_plugin_path - - # Set default environments if are not set via settings - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "UnrealModule", +) diff --git a/openpype/hosts/unreal/module.py b/openpype/hosts/unreal/module.py new file mode 100644 index 0000000000..a30c9e9e36 --- /dev/null +++ b/openpype/hosts/unreal/module.py @@ -0,0 +1,39 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class UnrealModule(OpenPypeModule, IHostModule): + name = "unreal" + host_name = "unreal" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, app) -> None: + """Modify environments to contain all required for implementation.""" + # Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation + + ue_plugin = "UE_5.0" if app.name[:1] == "5" else "UE_4.7" + unreal_plugin_path = os.path.join( + UNREAL_ROOT_DIR, "integration", ue_plugin + ) + if not env.get("OPENPYPE_UNREAL_PLUGIN"): + env["OPENPYPE_UNREAL_PLUGIN"] = unreal_plugin_path + + # Set default environments if are not set via settings + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(UNREAL_ROOT_DIR, "hooks") + ] From 44b9146e4fa43188a3065090797a05b319d58e59 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 12:16:50 +0200 Subject: [PATCH 0533/2550] use Unreal host as class instead of module --- openpype/hosts/unreal/api/__init__.py | 4 ++- openpype/hosts/unreal/api/pipeline.py | 27 +++++++++++++++++++ .../UE_4.7/Content/Python/init_unreal.py | 4 ++- .../UE_5.0/Content/Python/init_unreal.py | 4 ++- 4 files changed, 36 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/unreal/api/__init__.py b/openpype/hosts/unreal/api/__init__.py index ede71aa218..870982f5f9 100644 --- a/openpype/hosts/unreal/api/__init__.py +++ b/openpype/hosts/unreal/api/__init__.py @@ -19,6 +19,7 @@ from .pipeline import ( show_tools_dialog, show_tools_popup, instantiate, + UnrealHost, ) __all__ = [ @@ -36,5 +37,6 @@ __all__ = [ "show_experimental_tools", "show_tools_dialog", "show_tools_popup", - "instantiate" + "instantiate", + "UnrealHost", ] diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index bbca7916d3..ee4282e357 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -14,6 +14,7 @@ from openpype.pipeline import ( ) from openpype.tools.utils import host_tools import openpype.hosts.unreal +from openpypr.host import HostBase, ILoadHost import unreal # noqa @@ -29,6 +30,32 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +class UnrealHost(HostBase, ILoadHost): + """Unreal host implementation. + + For some time this class will re-use functions from module based + implementation for backwards compatibility of older unreal projects. + """ + + name = "unreal" + + def install(self): + install() + + def get_containers(self): + return ls() + + def show_tools_popup(self): + """Show tools popup with actions leading to show other tools.""" + + show_tools_popup() + + def show_tools_dialog(self): + """Show tools dialog with actions leading to show other tools.""" + + show_tools_dialog() + + def install(): """Install Unreal configuration for OpenPype.""" print("-=" * 40) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py b/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py index 4bb03b07ed..b85f970699 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py +++ b/openpype/hosts/unreal/integration/UE_4.7/Content/Python/init_unreal.py @@ -3,7 +3,9 @@ import unreal openpype_detected = True try: from openpype.pipeline import install_host - from openpype.hosts.unreal import api as openpype_host + from openpype.hosts.unreal.api import UnrealHost + + openpype_host = UnrealHost() except ImportError as exc: openpype_host = None openpype_detected = False diff --git a/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py b/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py index 4bb03b07ed..b85f970699 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py +++ b/openpype/hosts/unreal/integration/UE_5.0/Content/Python/init_unreal.py @@ -3,7 +3,9 @@ import unreal openpype_detected = True try: from openpype.pipeline import install_host - from openpype.hosts.unreal import api as openpype_host + from openpype.hosts.unreal.api import UnrealHost + + openpype_host = UnrealHost() except ImportError as exc: openpype_host = None openpype_detected = False From d98bc3509057f2c59a278656ba6360a10825b57d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 13:22:38 +0200 Subject: [PATCH 0534/2550] implemented 'get_workfile_extensions' in unreal module --- openpype/hosts/unreal/module.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/unreal/module.py b/openpype/hosts/unreal/module.py index a30c9e9e36..aa08c8c130 100644 --- a/openpype/hosts/unreal/module.py +++ b/openpype/hosts/unreal/module.py @@ -37,3 +37,6 @@ class UnrealModule(OpenPypeModule, IHostModule): return [ os.path.join(UNREAL_ROOT_DIR, "hooks") ] + + def get_workfile_extensions(self): + return [".uproject"] From 969241426ad6d53e916b2c8d140742b9bb80f635 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 14:19:17 +0200 Subject: [PATCH 0535/2550] moved and modified 'compute_session_changes' into context tools --- openpype/pipeline/context_tools.py | 57 ++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 5f763cd249..66bf33e821 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -445,3 +445,60 @@ def get_custom_workfile_template_from_session( session["AVALON_APP"], project_settings=project_settings ) + + +def compute_session_changes( + session, asset_doc, task_name, template_key=None +): + """Compute the changes for a session object on task under asset. + + Function does not change the session object, only returns changes. + + Args: + session (Dict[str, str]): The initial session to compute changes to. + This is required for computing the full Work Directory, as that + also depends on the values that haven't changed. + asset_doc (Dict[str, Any]): Asset document to switch to. + task_name (str): Name of task to switch to. + template_key (Union[str, None]): Prepare workfile template key in + anatomy templates. + + Returns: + Dict[str, str]: Changes in the Session dictionary. + """ + + changes = {} + + # Get asset document and asset + if not asset_doc: + task_name = None + asset_name = None + else: + asset_name = asset_doc["name"] + + # Detect any changes compared session + mapping = { + "AVALON_ASSET": asset_name, + "AVALON_TASK": task_name, + } + changes = { + key: value + for key, value in mapping.items() + if value != session.get(key) + } + if not changes: + return changes + + # Compute work directory (with the temporary changed session so far) + changed_session = session.copy() + changed_session.update(changes) + + workdir = None + if asset_doc: + workdir = get_workdir_from_session( + changed_session, template_key + ) + + changes["AVALON_WORKDIR"] = workdir + + return changes From 097546e429fa7c7afdb4abfb19ef8458979173c6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 14:19:53 +0200 Subject: [PATCH 0536/2550] moved 'update_current_task' to context tools and renamed to 'change_current_context' --- openpype/pipeline/context_tools.py | 44 ++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 66bf33e821..00fe353208 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -16,6 +16,7 @@ from openpype.client import ( get_asset_by_name, version_is_latest, ) +from openpype.lib.events import emit_event from openpype.modules import load_modules, ModulesManager from openpype.settings import get_project_settings @@ -502,3 +503,46 @@ def compute_session_changes( changes["AVALON_WORKDIR"] = workdir return changes + + +def change_current_context(asset_doc, task_name, template_key=None): + """Update active Session to a new task work area. + + This updates the live Session to a different task under asset. + + Args: + asset_doc (Dict[str, Any]): The asset document to set. + task_name (str): The task to set under asset. + template_key (Union[str, None]): Prepared template key to be used for + workfile template in Anatomy. + + Returns: + Dict[str, str]: The changed key, values in the current Session. + """ + + changes = compute_session_changes( + legacy_io.Session, + asset_doc, + task_name, + template_key=template_key + ) + + # Update the Session and environments. Pop from environments all keys with + # value set to None. + for key, value in changes.items(): + legacy_io.Session[key] = value + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value + + data = changes.copy() + # Convert env keys to human readable keys + data["project_name"] = legacy_io.Session["AVALON_PROJECT"] + data["asset_name"] = legacy_io.Session["AVALON_ASSET"] + data["task_name"] = legacy_io.Session["AVALON_TASK"] + + # Emit session change + emit_event("taskChanged", data) + + return changes From 6257fcb7774857e23abcabea6261193c8caf35af Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 14:34:43 +0200 Subject: [PATCH 0537/2550] marked 'compute_session_changes' and 'update_current_task' as deprecated in openpype.lib --- openpype/lib/avalon_context.py | 98 ++++++++++------------------------ 1 file changed, 27 insertions(+), 71 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index eed17fce9d..31fdf4c596 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -7,6 +7,8 @@ import logging import functools import warnings +import six + from openpype.client import ( get_project, get_assets, @@ -526,7 +528,7 @@ def template_data_from_session(session=None): return get_template_data_from_session(session) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.compute_session_changes") def compute_session_changes( session, task=None, asset=None, app=None, template_key=None ): @@ -547,54 +549,24 @@ def compute_session_changes( Returns: dict: The required changes in the Session dictionary. + + Deprecated: + Function will be removed after release version 3.16.* """ - from openpype.pipeline.context_tools import get_workdir_from_session + from openpype.pipeline import legacy_io + from openpype.pipeline.context_tools import compute_session_changes - changes = dict() + if isinstance(asset, six.string_types): + project_name = legacy_io.active_project() + asset = get_asset_by_name(project_name, asset) - # If no changes, return directly - if not any([task, asset, app]): - return changes - - # Get asset document and asset - asset_document = None - asset_tasks = None - if isinstance(asset, dict): - # Assume asset database document - asset_document = asset - asset_tasks = asset_document.get("data", {}).get("tasks") - asset = asset["name"] - - if not asset_document or not asset_tasks: - # Assume asset name - project_name = session["AVALON_PROJECT"] - asset_document = get_asset_by_name( - project_name, asset, fields=["data.tasks"] - ) - assert asset_document, "Asset must exist" - - # Detect any changes compared session - mapping = { - "AVALON_ASSET": asset, - "AVALON_TASK": task, - "AVALON_APP": app, - } - changes = { - key: value - for key, value in mapping.items() - if value and value != session.get(key) - } - if not changes: - return changes - - # Compute work directory (with the temporary changed session so far) - _session = session.copy() - _session.update(changes) - - changes["AVALON_WORKDIR"] = get_workdir_from_session(_session) - - return changes + return compute_session_changes( + session, + asset, + task, + template_key + ) @deprecated("openpype.pipeline.context_tools.get_workdir_from_session") @@ -604,7 +576,7 @@ def get_workdir_from_session(session=None, template_key=None): return get_workdir_from_session(session, template_key) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.change_current_context") def update_current_task(task=None, asset=None, app=None, template_key=None): """Update active Session to a new task work area. @@ -617,35 +589,19 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): Returns: dict: The changed key, values in the current Session. + + Deprecated: + Function will be removed after release version 3.16.* """ - changes = compute_session_changes( - legacy_io.Session, - task=task, - asset=asset, - app=app, - template_key=template_key - ) + from openpype.pipeline import legacy_io + from openpype.pipeline.context_tools import change_current_context - # Update the Session and environments. Pop from environments all keys with - # value set to None. - for key, value in changes.items(): - legacy_io.Session[key] = value - if value is None: - os.environ.pop(key, None) - else: - os.environ[key] = value + project_name = legacy_io.acitve_project() + if isinstance(asset, six.string_types): + asset = get_asset_by_name(project_name, asset) - data = changes.copy() - # Convert env keys to human readable keys - data["project_name"] = legacy_io.Session["AVALON_PROJECT"] - data["asset_name"] = legacy_io.Session["AVALON_ASSET"] - data["task_name"] = legacy_io.Session["AVALON_TASK"] - - # Emit session change - emit_event("taskChanged", data) - - return changes + return change_current_context(asset, task, template_key) @deprecated("openpype.client.get_workfile_info") From 38d2233b3f03ba8f151ecd74449f352ef41b8fea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 14:37:14 +0200 Subject: [PATCH 0538/2550] added or modified removement version in Deprecated category of deprecated functions --- openpype/lib/avalon_context.py | 69 ++++++++++++++++++++++++++++------ 1 file changed, 58 insertions(+), 11 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 31fdf4c596..ca8a04b9d0 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -180,7 +180,7 @@ def is_latest(representation): bool: Whether the representation is of latest version. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.context_tools import is_representation_from_latest @@ -193,7 +193,7 @@ def any_outdated(): """Return whether the current scene has any outdated content. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.load import any_outdated_containers @@ -214,7 +214,7 @@ def get_asset(asset_name=None): (MongoDB document) Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.context_tools import get_current_project_asset @@ -226,7 +226,7 @@ def get_asset(asset_name=None): def get_system_general_anatomy_data(system_settings=None): """ Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.template_data import get_general_template_data @@ -298,7 +298,7 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): dict: Last version document for entered. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ if not project_name: @@ -346,6 +346,9 @@ def get_workfile_template_key_from_context( Raises: ValueError: When both 'dbcon' and 'project_name' were not passed. + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import ( @@ -389,6 +392,9 @@ def get_workfile_template_key( Raises: ValueError: When both 'project_name' and 'project_settings' were not passed. + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import get_workfile_template_key @@ -413,7 +419,7 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): dict: Data prepared for filling workdir template. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.template_data import get_template_data @@ -449,6 +455,9 @@ def get_workdir_with_workdir_data( Raises: ValueError: When both `anatomy` and `project_name` are set to None. + + Deprecated: + Function will be removed after release version 3.15.* """ if not anatomy and not project_name: @@ -494,6 +503,9 @@ def get_workdir( Returns: TemplateResult: Workdir path. + + Deprecated: + Function will be removed after release version 3.15.* """ from openpype.pipeline.workfile import get_workdir @@ -520,7 +532,7 @@ def template_data_from_session(session=None): dict: All available data from session. Deprecated: - Function will be removed after release version 3.14.* + Function will be removed after release version 3.15.* """ from openpype.pipeline.context_tools import get_template_data_from_session @@ -571,6 +583,21 @@ def compute_session_changes( @deprecated("openpype.pipeline.context_tools.get_workdir_from_session") def get_workdir_from_session(session=None, template_key=None): + """Calculate workdir path based on session data. + + Args: + session (Union[None, Dict[str, str]]): Session to use. If not passed + current context session is used (from legacy_io). + template_key (Union[str, None]): Precalculate template key to define + workfile template name in Anatomy. + + Returns: + str: Workdir path. + + Deprecated: + Function will be removed after release version 3.16.* + """ + from openpype.pipeline.context_tools import get_workdir_from_session return get_workdir_from_session(session, template_key) @@ -620,6 +647,9 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): Returns: dict: Workfile document or None. + + Deprecated: + Function will be removed after release version 3.15.* """ # Use legacy_io if dbcon is not entered @@ -730,6 +760,11 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): @deprecated("openpype.pipeline.workfile.BuildWorkfile") def BuildWorkfile(): + """Build workfile class was moved to workfile pipeline. + + Deprecated: + Function will be removed after release version 3.16.* + """ from openpype.pipeline.workfile import BuildWorkfile return BuildWorkfile() @@ -772,10 +807,7 @@ def change_timer_to_current_context(): Deprecated: This method is specific for TimersManager module so please use the functionality from there. Function will be removed after release - version 3.14.* - - TODO: - - use TimersManager's static method instead of reimplementing it here + version 3.15.* """ from openpype.pipeline import legacy_io @@ -890,6 +922,9 @@ def get_custom_workfile_template_by_context( Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ if anatomy is None: @@ -948,6 +983,9 @@ def get_custom_workfile_template_by_string_context( Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ project_name = None @@ -982,6 +1020,9 @@ def get_custom_workfile_template(template_profiles): Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline import legacy_io @@ -1010,6 +1051,9 @@ def get_last_workfile_with_version( Returns: tuple: Last workfile with version if there is any otherwise returns (None, None). + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import get_last_workfile_with_version @@ -1036,6 +1080,9 @@ def get_last_workfile( Returns: str: Last or first workfile as filename of full path to filename. + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.pipeline.workfile import get_last_workfile From d74eb5961ee67a8b304e4fc616195b24021f9b96 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 14:42:45 +0200 Subject: [PATCH 0539/2550] fix typo --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index ca8a04b9d0..0f4f04e4d3 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -624,7 +624,7 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): from openpype.pipeline import legacy_io from openpype.pipeline.context_tools import change_current_context - project_name = legacy_io.acitve_project() + project_name = legacy_io.active_project() if isinstance(asset, six.string_types): asset = get_asset_by_name(project_name, asset) From 867f91d9f4fff0e57ec185e6dfafeb311de9fe08 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 14:42:52 +0200 Subject: [PATCH 0540/2550] removed unused import --- openpype/lib/avalon_context.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 0f4f04e4d3..f08adb5470 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -17,7 +17,6 @@ from openpype.client import ( get_workfile_info, ) from .profiles_filtering import filter_profiles -from .events import emit_event from .path_templates import StringTemplate legacy_io = None From 2fea675167c6f1a2441d014171918fd00c56288f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 14:58:52 +0200 Subject: [PATCH 0541/2550] use new functions in workfiles tool --- openpype/tools/workfiles/files_widget.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index a4109c511e..a5d5b14bb6 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -14,15 +14,15 @@ from openpype.lib import ( emit_event, create_workdir_extra_folders, ) -from openpype.lib.avalon_context import ( - update_current_task, - compute_session_changes -) from openpype.pipeline import ( registered_host, legacy_io, Anatomy, ) +from openpype.pipeline.context_tools import ( + compute_session_changes, + change_current_context +) from openpype.pipeline.workfile import get_workfile_template_key from .model import ( @@ -408,8 +408,8 @@ class FilesWidget(QtWidgets.QWidget): ) changes = compute_session_changes( session, - asset=self._get_asset_doc(), - task=self._task_name, + self._get_asset_doc(), + self._task_name, template_key=self.template_key ) session.update(changes) @@ -422,8 +422,8 @@ class FilesWidget(QtWidgets.QWidget): session = legacy_io.Session.copy() changes = compute_session_changes( session, - asset=self._get_asset_doc(), - task=self._task_name, + self._get_asset_doc(), + self._task_name, template_key=self.template_key ) if not changes: @@ -431,9 +431,9 @@ class FilesWidget(QtWidgets.QWidget): # to avoid any unwanted Task Changed callbacks to be triggered. return - update_current_task( - asset=self._get_asset_doc(), - task=self._task_name, + change_current_context( + self._get_asset_doc(), + self._task_name, template_key=self.template_key ) From 9b60b9faa89c8551c88977c42ad9f404869c7680 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 15:19:26 +0200 Subject: [PATCH 0542/2550] change title if in view mode --- openpype/tools/settings/settings/window.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index a907a034d1..77a2f64dac 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -105,6 +105,7 @@ class MainWidget(QtWidgets.QWidget): widget_width = 1000 widget_height = 600 + window_title = "OpenPype Settings" def __init__(self, user_role, parent=None, reset_on_show=True): super(MainWidget, self).__init__(parent) @@ -122,7 +123,7 @@ class MainWidget(QtWidgets.QWidget): self._password_dialog = None self.setObjectName("SettingsMainWidget") - self.setWindowTitle("OpenPype Settings") + self.setWindowTitle(self.window_title) self.resize(self.widget_width, self.widget_height) @@ -155,6 +156,11 @@ class MainWidget(QtWidgets.QWidget): self._shadow_widget = ShadowWidget("Working...", self) self._shadow_widget.setVisible(False) + controller.event_system.add_callback( + "edit.mode.changed", + self._edit_mode_changed + ) + header_tab_widget.currentChanged.connect(self._on_tab_changed) search_dialog.path_clicked.connect(self._on_search_path_clicked) @@ -301,6 +307,12 @@ class MainWidget(QtWidgets.QWidget): entity = widget.entity self._search_dialog.set_root_entity(entity) + def _edit_mode_changed(self, event): + title = self.window_title + if not event["edit_mode"]: + title += " [View only]" + self.setWindowTitle(title) + def _on_tab_changed(self): self._update_search_dialog() From 1b64160644a1af0a8fd4349814a1c5e3fe496b85 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 15:21:48 +0200 Subject: [PATCH 0543/2550] use pretty time instead of timestamp --- openpype/tools/settings/settings/dialogs.py | 25 ++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/dialogs.py b/openpype/tools/settings/settings/dialogs.py index a3eed68ae3..f25374a48c 100644 --- a/openpype/tools/settings/settings/dialogs.py +++ b/openpype/tools/settings/settings/dialogs.py @@ -1,5 +1,7 @@ from Qt import QtWidgets, QtCore +from openpype.tools.utils.delegates import pretty_date + class BaseInfoDialog(QtWidgets.QDialog): width = 600 @@ -34,13 +36,17 @@ class BaseInfoDialog(QtWidgets.QDialog): ("Host IP", info_obj.hostip), ("System name", info_obj.system_name), ("Local ID", info_obj.local_id), - ("Time Stamp", info_obj.timestamp), ): other_information_layout.addRow( label, QtWidgets.QLabel(value, other_information) ) + timestamp_label = QtWidgets.QLabel( + pretty_date(info_obj.timestamp_obj), other_information + ) + other_information_layout.addRow("Time", timestamp_label) + footer_widget = QtWidgets.QWidget(self) buttons_widget = QtWidgets.QWidget(footer_widget) @@ -64,10 +70,27 @@ class BaseInfoDialog(QtWidgets.QDialog): layout.addWidget(separator_widget_2, 0) layout.addWidget(footer_widget, 0) + timestamp_timer = QtCore.QTimer() + timestamp_timer.setInterval(1000) + timestamp_timer.timeout.connect(self._on_timestamp_timer) + + self._timestamp_label = timestamp_label + self._timestamp_timer = timestamp_timer + def showEvent(self, event): super(BaseInfoDialog, self).showEvent(event) + self._timestamp_timer.start() self.resize(self.width, self.height) + def closeEvent(self, event): + self._timestamp_timer.stop() + super(BaseInfoDialog, self).closeEvent(event) + + def _on_timestamp_timer(self): + self._timestamp_label.setText( + pretty_date(self._info_obj.timestamp_obj) + ) + def result(self): return self._result From 5df9c1b41f99fd2d587355c54b5c15eef53ce588 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 22 Aug 2022 16:48:32 +0200 Subject: [PATCH 0544/2550] Added default variant to workfile collectors for PS|AE Will only propagate in workfile subset (and final published name of workfile) if {variant} is used in subset name template. (By default it isn't.) --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 4 +++- openpype/hosts/photoshop/plugins/publish/collect_workfile.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 9cb6900b0a..fef5448a4c 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): label = "Collect After Effects Workfile Instance" order = pyblish.api.CollectorOrder + 0.1 + default_variant = "Main" + def process(self, context): existing_instance = None for instance in context: @@ -71,7 +73,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): family = "workfile" subset = get_subset_name_with_asset_doc( family, - "", + self.default_variant, context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index e4f0a07b34..6599f5c96e 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): label = "Collect Workfile" hosts = ["photoshop"] + default_variant = "Main" + def process(self, context): existing_instance = None for instance in context: @@ -22,7 +24,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): family = "workfile" subset = get_subset_name_with_asset_doc( family, - "", + self.default_variant, context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], From 9434cb43c1d480ea523093907ad0fbe2a4a24744 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 17:07:46 +0200 Subject: [PATCH 0545/2550] fix published workfile filtering --- openpype/tools/workfiles/model.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index d5b7cef339..9a7fd659a9 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -299,7 +299,6 @@ class PublishFilesModel(QtGui.QStandardItemModel): self.project_name, asset_ids=[self._asset_id], fields=["_id", "name"] - ) subset_ids = [subset_doc["_id"] for subset_doc in subset_docs] @@ -329,7 +328,9 @@ class PublishFilesModel(QtGui.QStandardItemModel): # extension extensions = [ext.replace(".", "") for ext in self._file_extensions] repre_docs = get_representations( - self.project_name, version_ids, extensions + self.project_name, + version_ids=version_ids, + context_filters={"ext": extensions} ) # Filter queried representations by task name if task is set From 45da6cf5d0fbdb31a91eef2115e04299860f5f48 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 22 Aug 2022 17:50:06 +0200 Subject: [PATCH 0546/2550] Added possibility to propagate collected variant context.data["variant"] might be filled only by collect_batch_data, which should take precedence --- openpype/hosts/photoshop/plugins/publish/collect_workfile.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 6599f5c96e..9cf6d5227e 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -22,9 +22,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin): break family = "workfile" + # context.data["variant"] might come only from collect_batch_data + variant = context.data.get("variant") or self.default_variant subset = get_subset_name_with_asset_doc( family, - self.default_variant, + variant, context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], From 20d08049672cd328f4a4ac093b01864ce61d270f Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 22 Aug 2022 17:50:59 +0200 Subject: [PATCH 0547/2550] Fix typo Co-authored-by: Simone Barbieri --- openpype/hosts/unreal/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index ee4282e357..d396b64072 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.tools.utils import host_tools import openpype.hosts.unreal -from openpypr.host import HostBase, ILoadHost +from openpype.host import HostBase, ILoadHost import unreal # noqa From 4a4712b02047ff61ea23a90b5348768458b4aa34 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 22 Aug 2022 17:55:57 +0200 Subject: [PATCH 0548/2550] Added default variant to new creator --- .../photoshop/plugins/create/workfile_creator.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index 43302329f1..ce0245d5c6 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -11,6 +11,8 @@ class PSWorkfileCreator(AutoCreator): identifier = "workfile" family = "workfile" + default_variant = "Main" + def get_instance_attr_defs(self): return [] @@ -35,7 +37,6 @@ class PSWorkfileCreator(AutoCreator): existing_instance = instance break - variant = '' project_name = legacy_io.Session["AVALON_PROJECT"] asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] @@ -43,15 +44,17 @@ class PSWorkfileCreator(AutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) data = { "asset": asset_name, "task": task_name, - "variant": variant + "variant": self.default_variant } data.update(self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name )) new_instance = CreatedInstance( @@ -67,7 +70,8 @@ class PSWorkfileCreator(AutoCreator): ): asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) existing_instance["asset"] = asset_name existing_instance["task"] = task_name From c0457a88ea7e67f55818fd4b19db9eec7f887ec4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 22 Aug 2022 17:56:25 +0200 Subject: [PATCH 0549/2550] Added overwrite old subset name for different context --- openpype/hosts/photoshop/plugins/create/workfile_creator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index ce0245d5c6..e79d16d154 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -75,3 +75,4 @@ class PSWorkfileCreator(AutoCreator): ) existing_instance["asset"] = asset_name existing_instance["task"] = task_name + existing_instance["subset"] = subset_name From be568a0e4140312712ddfe3c8b2b4df573ffd279 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 22 Aug 2022 18:03:17 +0200 Subject: [PATCH 0550/2550] Added default variant for workfile creator for AE --- .../plugins/create/workfile_creator.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py index badb3675fd..3b6dee3b83 100644 --- a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -11,6 +11,8 @@ class AEWorkfileCreator(AutoCreator): identifier = "workfile" family = "workfile" + default_variant = "Main" + def get_instance_attr_defs(self): return [] @@ -35,7 +37,6 @@ class AEWorkfileCreator(AutoCreator): existing_instance = instance break - variant = '' project_name = legacy_io.Session["AVALON_PROJECT"] asset_name = legacy_io.Session["AVALON_ASSET"] task_name = legacy_io.Session["AVALON_TASK"] @@ -44,15 +45,17 @@ class AEWorkfileCreator(AutoCreator): if existing_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) data = { "asset": asset_name, "task": task_name, - "variant": variant + "variant": self.default_variant } data.update(self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name )) new_instance = CreatedInstance( @@ -69,7 +72,8 @@ class AEWorkfileCreator(AutoCreator): ): asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name + self.default_variant, task_name, asset_doc, + project_name, host_name ) existing_instance["asset"] = asset_name existing_instance["task"] = task_name From 0e7c183c1d74380c5cc3b0ac843b5e5d82bf60d8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 22 Aug 2022 18:03:57 +0200 Subject: [PATCH 0551/2550] Added overwrite subset for different context in AE --- openpype/hosts/aftereffects/plugins/create/workfile_creator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py index 3b6dee3b83..f82d15b3c9 100644 --- a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -77,3 +77,4 @@ class AEWorkfileCreator(AutoCreator): ) existing_instance["asset"] = asset_name existing_instance["task"] = task_name + existing_instance["subset"] = subset_name From d91274bb98e470d501837bcd8f3feacc823a25ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:07:38 +0200 Subject: [PATCH 0552/2550] moved traypublish action into traypublisher host --- openpype/hosts/traypublisher/__init__.py | 6 +++++ .../traypublisher/module.py} | 25 ++++++++++--------- 2 files changed, 19 insertions(+), 12 deletions(-) create mode 100644 openpype/hosts/traypublisher/__init__.py rename openpype/{modules/traypublish_action.py => hosts/traypublisher/module.py} (70%) diff --git a/openpype/hosts/traypublisher/__init__.py b/openpype/hosts/traypublisher/__init__.py new file mode 100644 index 0000000000..4eb7bf3eef --- /dev/null +++ b/openpype/hosts/traypublisher/__init__.py @@ -0,0 +1,6 @@ +from .module import TrayPublishModule + + +__all__ = ( + "TrayPublishModule", +) diff --git a/openpype/modules/traypublish_action.py b/openpype/hosts/traypublisher/module.py similarity index 70% rename from openpype/modules/traypublish_action.py rename to openpype/hosts/traypublisher/module.py index 39163b8eb8..25012900bc 100644 --- a/openpype/modules/traypublish_action.py +++ b/openpype/hosts/traypublisher/module.py @@ -1,25 +1,24 @@ import os + +import click + from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules.interfaces import ITrayAction, IHostModule + +TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class TrayPublishAction(OpenPypeModule, ITrayAction): +class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction): label = "New Publish (beta)" name = "traypublish_tool" + host_name = "traypublish" def initialize(self, modules_settings): - import openpype self.enabled = True self.publish_paths = [ - os.path.join( - openpype.PACKAGE_DIR, - "hosts", - "traypublisher", - "plugins", - "publish" - ) + os.path.join(TRAYPUBLISH_ROOT_DIR, "plugins", "publish") ] self._experimental_tools = None @@ -29,7 +28,7 @@ class TrayPublishAction(OpenPypeModule, ITrayAction): self._experimental_tools = ExperimentalTools() def tray_menu(self, *args, **kwargs): - super(TrayPublishAction, self).tray_menu(*args, **kwargs) + super(TrayPublishModule, self).tray_menu(*args, **kwargs) traypublisher = self._experimental_tools.get("traypublisher") visible = False if traypublisher and traypublisher.enabled: @@ -45,5 +44,7 @@ class TrayPublishAction(OpenPypeModule, ITrayAction): self.publish_paths.extend(publish_paths) def run_traypublisher(self): - args = get_openpype_execute_args("traypublisher") + args = get_openpype_execute_args( + "module", "traypublish_tool", "launch" + ) run_detached_process(args) From 2a54de9b538ba4a29faaf3a82da043e276535877 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:07:52 +0200 Subject: [PATCH 0553/2550] added cli commands to traypublisher module --- openpype/hosts/traypublisher/module.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/openpype/hosts/traypublisher/module.py b/openpype/hosts/traypublisher/module.py index 25012900bc..6a088af635 100644 --- a/openpype/hosts/traypublisher/module.py +++ b/openpype/hosts/traypublisher/module.py @@ -48,3 +48,20 @@ class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction): "module", "traypublish_tool", "launch" ) run_detached_process(args) + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group(TrayPublishModule.name, help="TrayPublisher related commands.") +def cli_main(): + pass + + +@cli_main.command() +def launch(): + """Launch TrayPublish tool UI.""" + + from openpype.tools import traypublisher + + traypublisher.main() From e7000f0108d3a49934b1e7e3e5c20d2d63ffa7f2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:08:03 +0200 Subject: [PATCH 0554/2550] removed global traypublisher cli command --- openpype/cli.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index ffe288040e..4b653ac43c 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -46,12 +46,6 @@ def standalonepublisher(): PypeCommands().launch_standalone_publisher() -@main.command() -def traypublisher(): - """Show new OpenPype Standalone publisher UI.""" - PypeCommands().launch_traypublisher() - - @main.command() def tray(): """Launch pype tray. From f9c49fcaefd04dbb5661f22193224456cf97da88 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:12:06 +0200 Subject: [PATCH 0555/2550] better fill of module name --- openpype/hosts/traypublisher/module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/module.py b/openpype/hosts/traypublisher/module.py index 6a088af635..92a2312fec 100644 --- a/openpype/hosts/traypublisher/module.py +++ b/openpype/hosts/traypublisher/module.py @@ -45,7 +45,7 @@ class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction): def run_traypublisher(self): args = get_openpype_execute_args( - "module", "traypublish_tool", "launch" + "module", self.name, "launch" ) run_detached_process(args) From ffaa0b7adf6816e12500506f5d77ff4ab1ade3f8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:22:24 +0200 Subject: [PATCH 0556/2550] moved standalonepublish action into standalone publish host --- openpype/hosts/standalonepublisher/__init__.py | 6 ++++++ .../standalonepublisher/standalonepublish_module.py} | 0 2 files changed, 6 insertions(+) rename openpype/{modules/standalonepublish_action.py => hosts/standalonepublisher/standalonepublish_module.py} (100%) diff --git a/openpype/hosts/standalonepublisher/__init__.py b/openpype/hosts/standalonepublisher/__init__.py index e69de29bb2..64c6d995f7 100644 --- a/openpype/hosts/standalonepublisher/__init__.py +++ b/openpype/hosts/standalonepublisher/__init__.py @@ -0,0 +1,6 @@ +from standalonepublish_module import StandAlonePublishModule + + +__all__ = ( + "StandAlonePublishModule", +) diff --git a/openpype/modules/standalonepublish_action.py b/openpype/hosts/standalonepublisher/standalonepublish_module.py similarity index 100% rename from openpype/modules/standalonepublish_action.py rename to openpype/hosts/standalonepublisher/standalonepublish_module.py From 8c849670872d1d0be4ad5611e7c21c4d77fff8e1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:23:20 +0200 Subject: [PATCH 0557/2550] modified standalone publish action to work also as host module --- .../standalonepublish_module.py | 35 +++++++------------ 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/standalonepublisher/standalonepublish_module.py b/openpype/hosts/standalonepublisher/standalonepublish_module.py index ba53ce9b9e..2cd46ce342 100644 --- a/openpype/hosts/standalonepublisher/standalonepublish_module.py +++ b/openpype/hosts/standalonepublisher/standalonepublish_module.py @@ -1,26 +1,26 @@ import os import platform import subprocess + +import click + from openpype.lib import get_openpype_execute_args +from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules.interfaces import ITrayAction, IHostModule + +STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class StandAlonePublishAction(OpenPypeModule, ITrayAction): +class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostModule): label = "Publish" name = "standalonepublish_tool" + host_name = "standalonepublisher" def initialize(self, modules_settings): - import openpype self.enabled = modules_settings[self.name]["enabled"] self.publish_paths = [ - os.path.join( - openpype.PACKAGE_DIR, - "hosts", - "standalonepublisher", - "plugins", - "publish" - ) + os.path.join(STANDALONEPUBLISH_ROOT_DIR, "plugins", "publish") ] def tray_init(self): @@ -31,19 +31,10 @@ class StandAlonePublishAction(OpenPypeModule, ITrayAction): def connect_with_modules(self, enabled_modules): """Collect publish paths from other modules.""" + publish_paths = self.manager.collect_plugin_paths()["publish"] self.publish_paths.extend(publish_paths) def run_standalone_publisher(self): - args = get_openpype_execute_args("standalonepublisher") - kwargs = {} - if platform.system().lower() == "darwin": - new_args = ["open", "-na", args.pop(0), "--args"] - new_args.extend(args) - args = new_args - - detached_process = getattr(subprocess, "DETACHED_PROCESS", None) - if detached_process is not None: - kwargs["creationflags"] = detached_process - - subprocess.Popen(args, **kwargs) + args = get_openpype_execute_args("module", self.name, "launch") + run_detached_process(args) From 9db2eb3cc0d85d706e5637ae19bfc4ae6f49f028 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:23:32 +0200 Subject: [PATCH 0558/2550] added cli commands for standalone publisher --- .../standalonepublish_module.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/openpype/hosts/standalonepublisher/standalonepublish_module.py b/openpype/hosts/standalonepublisher/standalonepublish_module.py index 2cd46ce342..2d0114dee1 100644 --- a/openpype/hosts/standalonepublisher/standalonepublish_module.py +++ b/openpype/hosts/standalonepublisher/standalonepublish_module.py @@ -38,3 +38,22 @@ class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostModule): def run_standalone_publisher(self): args = get_openpype_execute_args("module", self.name, "launch") run_detached_process(args) + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group( + StandAlonePublishModule.name, + help="StandalonePublisher related commands.") +def cli_main(): + pass + + +@cli_main.command() +def launch(): + """Launch StandalonePublisher tool UI.""" + + from openpype.tools import standalonepublish + + standalonepublish.main() From 6a271aae101d86e33eaffe6571b736d8b0ab8c88 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:25:10 +0200 Subject: [PATCH 0559/2550] removed standalonepublisher from global cli commands --- openpype/cli.py | 6 ------ openpype/pype_commands.py | 5 ----- website/docs/admin_openpype_commands.md | 7 ------- 3 files changed, 18 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 4b653ac43c..398d1a94c0 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -40,12 +40,6 @@ def settings(dev): PypeCommands().launch_settings_gui(dev) -@main.command() -def standalonepublisher(): - """Show Pype Standalone publisher UI.""" - PypeCommands().launch_standalone_publisher() - - @main.command() def tray(): """Launch pype tray. diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index a447aa916b..66bf5e9bb4 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -76,11 +76,6 @@ class PypeCommands: import (run_webserver) return run_webserver(*args, **kwargs) - @staticmethod - def launch_standalone_publisher(): - from openpype.tools import standalonepublish - standalonepublish.main() - @staticmethod def launch_traypublisher(): from openpype.tools import traypublisher diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 53fc12410f..8345398e1d 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -48,7 +48,6 @@ For more information [see here](admin_use.md#run-openpype). | interactive | Start python like interactive console session. | | | projectmanager | Launch Project Manager UI | [📑](#projectmanager-arguments) | | settings | Open Settings UI | [📑](#settings-arguments) | -| standalonepublisher | Open Standalone Publisher UI | [📑](#standalonepublisher-arguments) | --- ### `tray` arguments {#tray-arguments} @@ -159,12 +158,6 @@ openpypeconsole settings ``` --- -### `standalonepublisher` arguments {#standalonepublisher-arguments} -`standalonepublisher` has no command-line arguments. -```shell -openpype_console standalonepublisher -``` - ### `repack-version` arguments {#repack-version-arguments} Takes path to unzipped and possibly modified OpenPype version. Files will be zipped, checksums recalculated and version will be determined by folder name From 986e4325c2379001bf33ca3729e8e9608ce9fe87 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:26:01 +0200 Subject: [PATCH 0560/2550] fix import --- openpype/hosts/standalonepublisher/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/standalonepublisher/__init__.py b/openpype/hosts/standalonepublisher/__init__.py index 64c6d995f7..394d5be397 100644 --- a/openpype/hosts/standalonepublisher/__init__.py +++ b/openpype/hosts/standalonepublisher/__init__.py @@ -1,4 +1,4 @@ -from standalonepublish_module import StandAlonePublishModule +from .standalonepublish_module import StandAlonePublishModule __all__ = ( From ab1e6c4e3dddacb0726c4eddea078af8fca42ebd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:30:27 +0200 Subject: [PATCH 0561/2550] removed unused imports --- openpype/hosts/standalonepublisher/standalonepublish_module.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/standalonepublisher/standalonepublish_module.py b/openpype/hosts/standalonepublisher/standalonepublish_module.py index 2d0114dee1..bf8e1d2c23 100644 --- a/openpype/hosts/standalonepublisher/standalonepublish_module.py +++ b/openpype/hosts/standalonepublisher/standalonepublish_module.py @@ -1,6 +1,4 @@ import os -import platform -import subprocess import click From 227a21c057412ee764facce5ba0ebec6bf19630c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:33:52 +0200 Subject: [PATCH 0562/2550] removed uninstall function --- openpype/hosts/tvpaint/api/__init__.py | 2 -- openpype/hosts/tvpaint/api/pipeline.py | 13 ------------- 2 files changed, 15 deletions(-) diff --git a/openpype/hosts/tvpaint/api/__init__.py b/openpype/hosts/tvpaint/api/__init__.py index c461b33f4b..43d411d8f9 100644 --- a/openpype/hosts/tvpaint/api/__init__.py +++ b/openpype/hosts/tvpaint/api/__init__.py @@ -6,7 +6,6 @@ from . import pipeline from . import plugin from .pipeline import ( install, - uninstall, maintained_selection, remove_instance, list_instances, @@ -33,7 +32,6 @@ __all__ = ( "plugin", "install", - "uninstall", "maintained_selection", "remove_instance", "list_instances", diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 0118c0104b..73e2c2335c 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -91,19 +91,6 @@ def install(): register_event_callback("application.exit", application_exit) -def uninstall(): - """Uninstall TVPaint-specific functionality. - - This function is called automatically on calling `uninstall_host()`. - """ - - log.info("OpenPype - Uninstalling TVPaint integration") - pyblish.api.deregister_host("tvpaint") - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - deregister_loader_plugin_path(LOAD_PATH) - deregister_creator_plugin_path(CREATE_PATH) - - def containerise( name, namespace, members, context, loader, current_containers=None ): From b9c175d9691bc07f3bc5db9f31604363edf7f969 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:39:01 +0200 Subject: [PATCH 0563/2550] converted tvpaint into module --- openpype/hosts/tvpaint/__init__.py | 26 +++++---------- openpype/hosts/tvpaint/tvpaint_module.py | 42 ++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 18 deletions(-) create mode 100644 openpype/hosts/tvpaint/tvpaint_module.py diff --git a/openpype/hosts/tvpaint/__init__.py b/openpype/hosts/tvpaint/__init__.py index 09b7c52cd1..068631a010 100644 --- a/openpype/hosts/tvpaint/__init__.py +++ b/openpype/hosts/tvpaint/__init__.py @@ -1,20 +1,10 @@ -import os +from .tvpaint_module import ( + get_launch_script_path, + TVPaintModule, +) -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value - - -def get_launch_script_path(): - current_dir = os.path.dirname(os.path.abspath(__file__)) - return os.path.join( - current_dir, - "api", - "launch_script.py" - ) +__all__ = ( + "get_launch_script_path", + "TVPaintModule", +) diff --git a/openpype/hosts/tvpaint/tvpaint_module.py b/openpype/hosts/tvpaint/tvpaint_module.py new file mode 100644 index 0000000000..a2471553a6 --- /dev/null +++ b/openpype/hosts/tvpaint/tvpaint_module.py @@ -0,0 +1,42 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def get_launch_script_path(): + return os.path.join( + TVPAINT_ROOT_DIR, + "api", + "launch_script.py" + ) + + + +class TVPaintModule(OpenPypeModule, IHostModule): + name = "tvpaint" + host_name = "tvpaint" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(env, _app): + """Modify environments to contain all required for implementation.""" + + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(TVPAINT_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".tvpp"] From c6f8b4559d249655f2b003982b8cf07f71fc70cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 22 Aug 2022 18:45:05 +0200 Subject: [PATCH 0564/2550] import TVPAINT_ROOT_DIR in init --- openpype/hosts/tvpaint/__init__.py | 2 ++ openpype/hosts/tvpaint/tvpaint_module.py | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/__init__.py b/openpype/hosts/tvpaint/__init__.py index 068631a010..0a84b575dc 100644 --- a/openpype/hosts/tvpaint/__init__.py +++ b/openpype/hosts/tvpaint/__init__.py @@ -1,10 +1,12 @@ from .tvpaint_module import ( get_launch_script_path, TVPaintModule, + TVPAINT_ROOT_DIR, ) __all__ = ( "get_launch_script_path", "TVPaintModule", + "TVPAINT_ROOT_DIR", ) diff --git a/openpype/hosts/tvpaint/tvpaint_module.py b/openpype/hosts/tvpaint/tvpaint_module.py index a2471553a6..c29602babc 100644 --- a/openpype/hosts/tvpaint/tvpaint_module.py +++ b/openpype/hosts/tvpaint/tvpaint_module.py @@ -13,7 +13,6 @@ def get_launch_script_path(): ) - class TVPaintModule(OpenPypeModule, IHostModule): name = "tvpaint" host_name = "tvpaint" From d65607eedbc2a80121820cc8b6efc66cbc759006 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:27:26 +0200 Subject: [PATCH 0565/2550] removed unused imports --- openpype/hosts/tvpaint/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 73e2c2335c..427c927264 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -16,8 +16,6 @@ from openpype.pipeline import ( legacy_io, register_loader_plugin_path, register_creator_plugin_path, - deregister_loader_plugin_path, - deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) From 5bf34ebed7c4c848681c484c50ec70bd4ebb728d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:29:27 +0200 Subject: [PATCH 0566/2550] fix project overrides save to mongo --- openpype/settings/handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 09f36aa16e..79ec6248ac 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -957,7 +957,7 @@ class MongoSettingsHandler(SettingsHandler): if project_settings_doc: self.collection.update_one( {"_id": project_settings_doc["_id"]}, - new_project_settings_doc + {"$set": new_project_settings_doc} ) else: self.collection.insert_one(new_project_settings_doc) From 3176a0130d6b3ab110c0fd3c3616c6c56172df5d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:40:27 +0200 Subject: [PATCH 0567/2550] replaced changelog and upgrade notes with releases information --- website/docs/admin_releases.md | 9 + website/docs/changelog.md | 1138 -------------------------------- website/docs/upgrade_notes.md | 165 ----- website/sidebars.js | 6 +- 4 files changed, 10 insertions(+), 1308 deletions(-) create mode 100644 website/docs/admin_releases.md delete mode 100644 website/docs/changelog.md delete mode 100644 website/docs/upgrade_notes.md diff --git a/website/docs/admin_releases.md b/website/docs/admin_releases.md new file mode 100644 index 0000000000..bba5a22110 --- /dev/null +++ b/website/docs/admin_releases.md @@ -0,0 +1,9 @@ +--- +id: admin_releases +title: Releases +sidebar_label: Releases +--- + +Information about releases can be found on GitHub [Releases page](https://github.com/pypeclub/OpenPype/releases). + +You can find features and bugfixes in the codebase or full changelog for advanced users. diff --git a/website/docs/changelog.md b/website/docs/changelog.md deleted file mode 100644 index 448592b930..0000000000 --- a/website/docs/changelog.md +++ /dev/null @@ -1,1138 +0,0 @@ ---- -id: changelog -title: Changelog -sidebar_label: Changelog ---- - -## [2.18.0](https://github.com/pypeclub/openpype/tree/2.18.0) -_**release date:** (2021-05-18)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/2.17.3...2.18.0) - -**Enhancements:** - -- Use SubsetLoader and multiple contexts for delete_old_versions [\#1484](ttps://github.com/pypeclub/OpenPype/pull/1484)) -- TVPaint: Increment workfile version on successful publish. [\#1489](https://github.com/pypeclub/OpenPype/pull/1489) -- Maya: Use of multiple deadline servers [\#1483](https://github.com/pypeclub/OpenPype/pull/1483) - -**Fixed bugs:** - -- Use instance frame start instead of timeline. [\#1486](https://github.com/pypeclub/OpenPype/pull/1486) -- Maya: Redshift - set proper start frame on proxy [\#1480](https://github.com/pypeclub/OpenPype/pull/1480) -- Maya: wrong collection of playblasted frames [\#1517](https://github.com/pypeclub/OpenPype/pull/1517) -- Existing subsets hints in creator [\#1502](https://github.com/pypeclub/OpenPype/pull/1502) - - -### [2.17.3](https://github.com/pypeclub/openpype/tree/2.17.3) -_**release date:** (2021-05-06)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/CI/3.0.0-rc.3...2.17.3) - -**Fixed bugs:** - -- Nuke: workfile version synced to db version always [\#1479](https://github.com/pypeclub/OpenPype/pull/1479) - -### [2.17.2](https://github.com/pypeclub/openpype/tree/2.17.2) -_**release date:** (2021-05-04)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/CI/3.0.0-rc.1...2.17.2) - -**Enhancements:** - -- Forward/Backward compatible apps and tools with OpenPype 3 [\#1463](https://github.com/pypeclub/OpenPype/pull/1463) - -### [2.17.1](https://github.com/pypeclub/openpype/tree/2.17.1) -_**release date:** (2021-04-30)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/2.17.0...2.17.1) - -**Enhancements:** - -- Faster settings UI loading [\#1442](https://github.com/pypeclub/OpenPype/pull/1442) -- Nuke: deadline submission with gpu [\#1414](https://github.com/pypeclub/OpenPype/pull/1414) -- TVPaint frame range definition [\#1424](https://github.com/pypeclub/OpenPype/pull/1424) -- PS - group all published instances [\#1415](https://github.com/pypeclub/OpenPype/pull/1415) -- Add task name to context pop up. [\#1383](https://github.com/pypeclub/OpenPype/pull/1383) -- Enhance review letterbox feature. [\#1371](https://github.com/pypeclub/OpenPype/pull/1371) -- AE add duration validation [\#1363](https://github.com/pypeclub/OpenPype/pull/1363) - -**Fixed bugs:** - -- Houdini menu filename [\#1417](https://github.com/pypeclub/OpenPype/pull/1417) -- Nuke: fixing undo for loaded mov and sequence [\#1433](https://github.com/pypeclub/OpenPype/pull/1433) -- AE - validation for duration was 1 frame shorter [\#1426](https://github.com/pypeclub/OpenPype/pull/1426) - -**Merged pull requests:** - -- Maya: Vray - problem getting all file nodes for look publishing [\#1399](https://github.com/pypeclub/OpenPype/pull/1399) -- Maya: Support for Redshift proxies [\#1360](https://github.com/pypeclub/OpenPype/pull/1360) - -## [2.17.0](https://github.com/pypeclub/openpype/tree/2.17.0) -_**release date:** (2021-04-20)_ - -[Full Changelog](https://github.com/pypeclub/openpype/compare/CI/3.0.0-beta.2...2.17.0) - -**Enhancements:** - -- Forward compatible ftrack group [\#1243](https://github.com/pypeclub/OpenPype/pull/1243) -- Maya: Make tx option configurable with presets [\#1328](https://github.com/pypeclub/OpenPype/pull/1328) -- TVPaint asset name validation [\#1302](https://github.com/pypeclub/OpenPype/pull/1302) -- TV Paint: Set initial project settings. [\#1299](https://github.com/pypeclub/OpenPype/pull/1299) -- TV Paint: Validate mark in and out. [\#1298](https://github.com/pypeclub/OpenPype/pull/1298) -- Validate project settings [\#1297](https://github.com/pypeclub/OpenPype/pull/1297) -- After Effects: added SubsetManager [\#1234](https://github.com/pypeclub/OpenPype/pull/1234) -- Show error message in pyblish UI [\#1206](https://github.com/pypeclub/OpenPype/pull/1206) - -**Fixed bugs:** - -- Hiero: fixing source frame from correct object [\#1362](https://github.com/pypeclub/OpenPype/pull/1362) -- Nuke: fix colourspace, prerenders and nuke panes opening [\#1308](https://github.com/pypeclub/OpenPype/pull/1308) -- AE remove orphaned instance from workfile - fix self.stub [\#1282](https://github.com/pypeclub/OpenPype/pull/1282) -- Nuke: deadline submission with search replaced env values from preset [\#1194](https://github.com/pypeclub/OpenPype/pull/1194) -- Ftrack custom attributes in bulks [\#1312](https://github.com/pypeclub/OpenPype/pull/1312) -- Ftrack optional pypclub role [\#1303](https://github.com/pypeclub/OpenPype/pull/1303) -- After Effects: remove orphaned instances [\#1275](https://github.com/pypeclub/OpenPype/pull/1275) -- Avalon schema names [\#1242](https://github.com/pypeclub/OpenPype/pull/1242) -- Handle duplication of Task name [\#1226](https://github.com/pypeclub/OpenPype/pull/1226) -- Modified path of plugin loads for Harmony and TVPaint [\#1217](https://github.com/pypeclub/OpenPype/pull/1217) -- Regex checks in profiles filtering [\#1214](https://github.com/pypeclub/OpenPype/pull/1214) -- Update custom ftrack session attributes [\#1202](https://github.com/pypeclub/OpenPype/pull/1202) -- Nuke: write node colorspace ignore `default\(\)` label [\#1199](https://github.com/pypeclub/OpenPype/pull/1199) - -## [2.16.0](https://github.com/pypeclub/pype/tree/2.16.0) - - _**release date:** 2021-03-22_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.3...2.16.0) - -**Enhancements:** - -- Nuke: deadline submit limit group filter [\#1167](https://github.com/pypeclub/pype/pull/1167) -- Maya: support for Deadline Group and Limit Groups - backport 2.x [\#1156](https://github.com/pypeclub/pype/pull/1156) -- Maya: fixes for Redshift support [\#1152](https://github.com/pypeclub/pype/pull/1152) -- Nuke: adding preset for a Read node name to all img and mov Loaders [\#1146](https://github.com/pypeclub/pype/pull/1146) -- nuke deadline submit with environ var from presets overrides [\#1142](https://github.com/pypeclub/pype/pull/1142) -- Change timers after task change [\#1138](https://github.com/pypeclub/pype/pull/1138) -- Nuke: shortcuts for Pype menu [\#1127](https://github.com/pypeclub/pype/pull/1127) -- Nuke: workfile template [\#1124](https://github.com/pypeclub/pype/pull/1124) -- Sites local settings by site name [\#1117](https://github.com/pypeclub/pype/pull/1117) -- Reset loader's asset selection on context change [\#1106](https://github.com/pypeclub/pype/pull/1106) -- Bulk mov render publishing [\#1101](https://github.com/pypeclub/pype/pull/1101) -- Photoshop: mark publishable instances [\#1093](https://github.com/pypeclub/pype/pull/1093) -- Added ability to define BG color for extract review [\#1088](https://github.com/pypeclub/pype/pull/1088) -- TVPaint extractor enhancement [\#1080](https://github.com/pypeclub/pype/pull/1080) -- Photoshop: added support for .psb in workfiles [\#1078](https://github.com/pypeclub/pype/pull/1078) -- Optionally add task to subset name [\#1072](https://github.com/pypeclub/pype/pull/1072) -- Only extend clip range when collecting. [\#1008](https://github.com/pypeclub/pype/pull/1008) -- Collect audio for farm reviews. [\#1073](https://github.com/pypeclub/pype/pull/1073) - - -**Fixed bugs:** - -- Fix path spaces in jpeg extractor [\#1174](https://github.com/pypeclub/pype/pull/1174) -- Maya: Bugfix: superclass for CreateCameraRig [\#1166](https://github.com/pypeclub/pype/pull/1166) -- Maya: Submit to Deadline - fix typo in condition [\#1163](https://github.com/pypeclub/pype/pull/1163) -- Avoid dot in repre extension [\#1125](https://github.com/pypeclub/pype/pull/1125) -- Fix versions variable usage in standalone publisher [\#1090](https://github.com/pypeclub/pype/pull/1090) -- Collect instance data fix subset query [\#1082](https://github.com/pypeclub/pype/pull/1082) -- Fix getting the camera name. [\#1067](https://github.com/pypeclub/pype/pull/1067) -- Nuke: Ensure "NUKE\_TEMP\_DIR" is not part of the Deadline job environment. [\#1064](https://github.com/pypeclub/pype/pull/1064) - -### [2.15.3](https://github.com/pypeclub/pype/tree/2.15.3) - - _**release date:** 2021-02-26_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.2...2.15.3) - -**Enhancements:** - -- Maya: speedup renderable camera collection [\#1053](https://github.com/pypeclub/pype/pull/1053) -- Harmony - add regex search to filter allowed task names for collectin… [\#1047](https://github.com/pypeclub/pype/pull/1047) - -**Fixed bugs:** - -- Ftrack integrate hierarchy fix [\#1085](https://github.com/pypeclub/pype/pull/1085) -- Explicit subset filter in anatomy instance data [\#1059](https://github.com/pypeclub/pype/pull/1059) -- TVPaint frame offset [\#1057](https://github.com/pypeclub/pype/pull/1057) -- Auto fix unicode strings [\#1046](https://github.com/pypeclub/pype/pull/1046) - -### [2.15.2](https://github.com/pypeclub/pype/tree/2.15.2) - - _**release date:** 2021-02-19_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.1...2.15.2) - -**Enhancements:** - -- Maya: Vray scene publishing [\#1013](https://github.com/pypeclub/pype/pull/1013) - -**Fixed bugs:** - -- Fix entity move under project [\#1040](https://github.com/pypeclub/pype/pull/1040) -- smaller nuke fixes from production [\#1036](https://github.com/pypeclub/pype/pull/1036) -- TVPaint thumbnail extract fix [\#1031](https://github.com/pypeclub/pype/pull/1031) - -### [2.15.1](https://github.com/pypeclub/pype/tree/2.15.1) - - _**release date:** 2021-02-12_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.0...2.15.1) - -**Enhancements:** - -- Delete version as loader action [\#1011](https://github.com/pypeclub/pype/pull/1011) -- Delete old versions [\#445](https://github.com/pypeclub/pype/pull/445) - -**Fixed bugs:** - -- PS - remove obsolete functions from pywin32 [\#1006](https://github.com/pypeclub/pype/pull/1006) -- Clone description of review session objects. [\#922](https://github.com/pypeclub/pype/pull/922) - -## [2.15.0](https://github.com/pypeclub/pype/tree/2.15.0) - - _**release date:** 2021-02-09_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.6...2.15.0) - -**Enhancements:** - -- Resolve - loading and updating clips [\#932](https://github.com/pypeclub/pype/pull/932) -- Release/2.15.0 [\#926](https://github.com/pypeclub/pype/pull/926) -- Photoshop: add option for template.psd and prelaunch hook [\#894](https://github.com/pypeclub/pype/pull/894) -- Nuke: deadline presets [\#993](https://github.com/pypeclub/pype/pull/993) -- Maya: Alembic only set attributes that exists. [\#986](https://github.com/pypeclub/pype/pull/986) -- Harmony: render local and handle fixes [\#981](https://github.com/pypeclub/pype/pull/981) -- PSD Bulk export of ANIM group [\#965](https://github.com/pypeclub/pype/pull/965) -- AE - added prelaunch hook for opening last or workfile from template [\#944](https://github.com/pypeclub/pype/pull/944) -- PS - safer handling of loading of workfile [\#941](https://github.com/pypeclub/pype/pull/941) -- Maya: Handling Arnold referenced AOVs [\#938](https://github.com/pypeclub/pype/pull/938) -- TVPaint: switch layer IDs for layer names during identification [\#903](https://github.com/pypeclub/pype/pull/903) -- TVPaint audio/sound loader [\#893](https://github.com/pypeclub/pype/pull/893) -- Clone review session with children. [\#891](https://github.com/pypeclub/pype/pull/891) -- Simple compositing data packager for freelancers [\#884](https://github.com/pypeclub/pype/pull/884) -- Harmony deadline submission [\#881](https://github.com/pypeclub/pype/pull/881) -- Maya: Optionally hide image planes from reviews. [\#840](https://github.com/pypeclub/pype/pull/840) -- Maya: handle referenced AOVs for Vray [\#824](https://github.com/pypeclub/pype/pull/824) -- DWAA/DWAB support on windows [\#795](https://github.com/pypeclub/pype/pull/795) -- Unreal: animation, layout and setdress updates [\#695](https://github.com/pypeclub/pype/pull/695) - -**Fixed bugs:** - -- Maya: Looks - disable hardlinks [\#995](https://github.com/pypeclub/pype/pull/995) -- Fix Ftrack custom attribute update [\#982](https://github.com/pypeclub/pype/pull/982) -- Prores ks in burnin script [\#960](https://github.com/pypeclub/pype/pull/960) -- terminal.py crash on import [\#839](https://github.com/pypeclub/pype/pull/839) -- Extract review handle bizarre pixel aspect ratio [\#990](https://github.com/pypeclub/pype/pull/990) -- Nuke: add nuke related env var to sumbission [\#988](https://github.com/pypeclub/pype/pull/988) -- Nuke: missing preset's variable [\#984](https://github.com/pypeclub/pype/pull/984) -- Get creator by name fix [\#979](https://github.com/pypeclub/pype/pull/979) -- Fix update of project's tasks on Ftrack sync [\#972](https://github.com/pypeclub/pype/pull/972) -- nuke: wrong frame offset in mov loader [\#971](https://github.com/pypeclub/pype/pull/971) -- Create project structure action fix multiroot [\#967](https://github.com/pypeclub/pype/pull/967) -- PS: remove pywin installation from hook [\#964](https://github.com/pypeclub/pype/pull/964) -- Prores ks in burnin script [\#959](https://github.com/pypeclub/pype/pull/959) -- Subset family is now stored in subset document [\#956](https://github.com/pypeclub/pype/pull/956) -- DJV new version arguments [\#954](https://github.com/pypeclub/pype/pull/954) -- TV Paint: Fix single frame Sequence [\#953](https://github.com/pypeclub/pype/pull/953) -- nuke: missing `file` knob update [\#933](https://github.com/pypeclub/pype/pull/933) -- Photoshop: Create from single layer was failing [\#920](https://github.com/pypeclub/pype/pull/920) -- Nuke: baking mov with correct colorspace inherited from write [\#909](https://github.com/pypeclub/pype/pull/909) -- Launcher fix actions discover [\#896](https://github.com/pypeclub/pype/pull/896) -- Get the correct file path for the updated mov. [\#889](https://github.com/pypeclub/pype/pull/889) -- Maya: Deadline submitter - shared data access violation [\#831](https://github.com/pypeclub/pype/pull/831) -- Maya: Take into account vray master AOV switch [\#822](https://github.com/pypeclub/pype/pull/822) - -**Merged pull requests:** - -- Refactor blender to 3.0 format [\#934](https://github.com/pypeclub/pype/pull/934) - -### [2.14.6](https://github.com/pypeclub/pype/tree/2.14.6) - - _**release date:** 2021-01-15_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.5...2.14.6) - -**Fixed bugs:** - -- Nuke: improving of hashing path [\#885](https://github.com/pypeclub/pype/pull/885) - -**Merged pull requests:** - -- Hiero: cut videos with correct secons [\#892](https://github.com/pypeclub/pype/pull/892) -- Faster sync to avalon preparation [\#869](https://github.com/pypeclub/pype/pull/869) - -### [2.14.5](https://github.com/pypeclub/pype/tree/2.14.5) - - _**release date:** 2021-01-06_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.4...2.14.5) - -**Merged pull requests:** - -- Pype logger refactor [\#866](https://github.com/pypeclub/pype/pull/866) - -### [2.14.4](https://github.com/pypeclub/pype/tree/2.14.4) - - _**release date:** 2020-12-18_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.3...2.14.4) - -**Merged pull requests:** - -- Fix - AE - added explicit cast to int [\#837](https://github.com/pypeclub/pype/pull/837) - -### [2.14.3](https://github.com/pypeclub/pype/tree/2.14.3) - - _**release date:** 2020-12-16_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.2...2.14.3) - -**Fixed bugs:** - -- TVPaint repair invalid metadata [\#809](https://github.com/pypeclub/pype/pull/809) -- Feature/push hier value to nonhier action [\#807](https://github.com/pypeclub/pype/pull/807) -- Harmony: fix palette and image sequence loader [\#806](https://github.com/pypeclub/pype/pull/806) - -**Merged pull requests:** - -- respecting space in path [\#823](https://github.com/pypeclub/pype/pull/823) - -### [2.14.2](https://github.com/pypeclub/pype/tree/2.14.2) - - _**release date:** 2020-12-04_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.1...2.14.2) - -**Enhancements:** - -- Collapsible wrapper in settings [\#767](https://github.com/pypeclub/pype/pull/767) - -**Fixed bugs:** - -- Harmony: template extraction and palettes thumbnails on mac [\#768](https://github.com/pypeclub/pype/pull/768) -- TVPaint store context to workfile metadata \(764\) [\#766](https://github.com/pypeclub/pype/pull/766) -- Extract review audio cut fix [\#763](https://github.com/pypeclub/pype/pull/763) - -**Merged pull requests:** - -- AE: fix publish after background load [\#781](https://github.com/pypeclub/pype/pull/781) -- TVPaint store members key [\#769](https://github.com/pypeclub/pype/pull/769) - -### [2.14.1](https://github.com/pypeclub/pype/tree/2.14.1) - - _**release date:** 2020-11-27_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.0...2.14.1) - -**Enhancements:** - -- Settings required keys in modifiable dict [\#770](https://github.com/pypeclub/pype/pull/770) -- Extract review may not add audio to output [\#761](https://github.com/pypeclub/pype/pull/761) - -**Fixed bugs:** - -- After Effects: frame range, file format and render source scene fixes [\#760](https://github.com/pypeclub/pype/pull/760) -- Hiero: trimming review with clip event number [\#754](https://github.com/pypeclub/pype/pull/754) -- TVPaint: fix updating of loaded subsets [\#752](https://github.com/pypeclub/pype/pull/752) -- Maya: Vray handling of default aov [\#748](https://github.com/pypeclub/pype/pull/748) -- Maya: multiple renderable cameras in layer didn't work [\#744](https://github.com/pypeclub/pype/pull/744) -- Ftrack integrate custom attributes fix [\#742](https://github.com/pypeclub/pype/pull/742) - -
    - -## [2.14.0](https://github.com/pypeclub/pype/tree/2.14.0) - - _**release date:** 2020-11-24_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.7...2.14.0) - -**Enhancements:** - -- Ftrack: Event for syncing shot or asset status with tasks.[\#736](https://github.com/pypeclub/pype/pull/736) -- Maya: add camera rig publishing option [\#721](https://github.com/pypeclub/pype/pull/721) -- Maya: Ask user to select non-default camera from scene or create a new. [\#678](https://github.com/pypeclub/pype/pull/678) -- Maya: Camera name can be added to burnins. [\#674](https://github.com/pypeclub/pype/pull/674) -- Sort instances by label in pyblish gui [\#719](https://github.com/pypeclub/pype/pull/719) -- Synchronize ftrack hierarchical and shot attributes [\#716](https://github.com/pypeclub/pype/pull/716) -- Standalone Publisher: Publish editorial from separate image sequences [\#699](https://github.com/pypeclub/pype/pull/699) -- Render publish plugins abstraction [\#687](https://github.com/pypeclub/pype/pull/687) -- TV Paint: image loader with options [\#675](https://github.com/pypeclub/pype/pull/675) -- **TV Paint (Beta):** initial implementation of creators and local rendering [\#693](https://github.com/pypeclub/pype/pull/693) -- **After Effects (Beta):** base integration with loaders [\#667](https://github.com/pypeclub/pype/pull/667) -- Harmony: Javascript refactoring and overall stability improvements [\#666](https://github.com/pypeclub/pype/pull/666) - -**Fixed bugs:** - -- TVPaint: extract review fix [\#740](https://github.com/pypeclub/pype/pull/740) -- After Effects: Review were not being sent to ftrack [\#738](https://github.com/pypeclub/pype/pull/738) -- Maya: vray proxy was not loading [\#722](https://github.com/pypeclub/pype/pull/722) -- Maya: Vray expected file fixes [\#682](https://github.com/pypeclub/pype/pull/682) - -**Deprecated:** - -- Removed artist view from pyblish gui [\#717](https://github.com/pypeclub/pype/pull/717) -- Maya: disable legacy override check for cameras [\#715](https://github.com/pypeclub/pype/pull/715) - - - - -### [2.13.7](https://github.com/pypeclub/pype/tree/2.13.7) - - _**release date:** 2020-11-19_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.6...2.13.7) - -**Merged pull requests:** - -- fix\(SP\): getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) - - - - -### [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) - - _**release date:** 2020-11-15_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.5...2.13.6) - -**Fixed bugs:** - -- Maya workfile version wasn't syncing with renders properly [\#711](https://github.com/pypeclub/pype/pull/711) -- Maya: Fix for publishing multiple cameras with review from the same scene [\#710](https://github.com/pypeclub/pype/pull/710) - - - - -### [2.13.5](https://github.com/pypeclub/pype/tree/2.13.5) - - _**release date:** 2020-11-12_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.4...2.13.5) - - -**Fixed bugs:** - -- Wrong thumbnail file was picked when publishing sequence in standalone publisher [\#703](https://github.com/pypeclub/pype/pull/703) -- Fix: Burnin data pass and FFmpeg tool check [\#701](https://github.com/pypeclub/pype/pull/701) - - - - -### [2.13.4](https://github.com/pypeclub/pype/tree/2.13.4) - - _**release date:** 2020-11-09_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.3...2.13.4) - - -**Fixed bugs:** - -- Photoshop unhiding hidden layers [\#688](https://github.com/pypeclub/pype/issues/688) -- Nuke: Favorite directories "shot dir" "project dir" - not working \#684 [\#685](https://github.com/pypeclub/pype/pull/685) - - - - - -### [2.13.3](https://github.com/pypeclub/pype/tree/2.13.3) - - _**release date:** _2020-11-03_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.2...2.13.3) - -**Fixed bugs:** - -- Fix ffmpeg executable path with spaces [\#680](https://github.com/pypeclub/pype/pull/680) -- Hotfix: Added default version number [\#679](https://github.com/pypeclub/pype/pull/679) - - - - -### [2.13.2](https://github.com/pypeclub/pype/tree/2.13.2) - - _**release date:** 2020-10-28_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.1...2.13.2) - -**Fixed bugs:** - -- Nuke: wrong conditions when fixing legacy write nodes [\#665](https://github.com/pypeclub/pype/pull/665) - - - - -### [2.13.1](https://github.com/pypeclub/pype/tree/2.13.1) - - _**release date:** 2020-10-23_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.0...2.13.1) - -**Fixed bugs:** - -- Photoshop: Layer name is not propagating to metadata [\#654](https://github.com/pypeclub/pype/issues/654) -- Photoshop: Loader in fails with "can't set attribute" [\#650](https://github.com/pypeclub/pype/issues/650) -- Hiero: Review video file adding one frame to the end [\#659](https://github.com/pypeclub/pype/issues/659) - - - -## [2.13.0](https://github.com/pypeclub/pype/tree/2.13.0) - - _**release date:** 2020-10-16_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.5...2.13.0) - -**Enhancements:** - -- Deadline Output Folder [\#636](https://github.com/pypeclub/pype/issues/636) -- Nuke Camera Loader [\#565](https://github.com/pypeclub/pype/issues/565) -- Deadline publish job shows publishing output folder [\#649](https://github.com/pypeclub/pype/pull/649) -- Get latest version in lib [\#642](https://github.com/pypeclub/pype/pull/642) -- Improved publishing of multiple representation from SP [\#638](https://github.com/pypeclub/pype/pull/638) -- TvPaint: launch shot work file from within Ftrack [\#631](https://github.com/pypeclub/pype/pull/631) -- Add mp4 support for RV action. [\#628](https://github.com/pypeclub/pype/pull/628) -- Maya: allow renders to have version synced with workfile [\#618](https://github.com/pypeclub/pype/pull/618) -- Renaming nukestudio host folder to hiero [\#617](https://github.com/pypeclub/pype/pull/617) -- Harmony: More efficient publishing [\#615](https://github.com/pypeclub/pype/pull/615) -- Ftrack server action improvement [\#608](https://github.com/pypeclub/pype/pull/608) -- Deadline user defaults to pype username if present [\#607](https://github.com/pypeclub/pype/pull/607) -- Standalone publisher now has icon [\#606](https://github.com/pypeclub/pype/pull/606) -- Nuke render write targeting knob improvement [\#603](https://github.com/pypeclub/pype/pull/603) -- Animated pyblish gui [\#602](https://github.com/pypeclub/pype/pull/602) -- Maya: Deadline - make use of asset dependencies optional [\#591](https://github.com/pypeclub/pype/pull/591) -- Nuke: Publishing, loading and updating alembic cameras [\#575](https://github.com/pypeclub/pype/pull/575) -- Maya: add look assigner to pype menu even if scriptsmenu is not available [\#573](https://github.com/pypeclub/pype/pull/573) -- Store task types in the database [\#572](https://github.com/pypeclub/pype/pull/572) -- Maya: Tiled EXRs to scanline EXRs render option [\#512](https://github.com/pypeclub/pype/pull/512) -- Fusion: basic integration refresh [\#452](https://github.com/pypeclub/pype/pull/452) - -**Fixed bugs:** - -- Burnin script did not propagate ffmpeg output [\#640](https://github.com/pypeclub/pype/issues/640) -- Pyblish-pype spacer in terminal wasn't transparent [\#646](https://github.com/pypeclub/pype/pull/646) -- Lib subprocess without logger [\#645](https://github.com/pypeclub/pype/pull/645) -- Nuke: prevent crash if we only have single frame in sequence [\#644](https://github.com/pypeclub/pype/pull/644) -- Burnin script logs better output [\#641](https://github.com/pypeclub/pype/pull/641) -- Missing audio on farm submission. [\#639](https://github.com/pypeclub/pype/pull/639) -- review from imagesequence error [\#633](https://github.com/pypeclub/pype/pull/633) -- Hiero: wrong order of fps clip instance data collecting [\#627](https://github.com/pypeclub/pype/pull/627) -- Add source for review instances. [\#625](https://github.com/pypeclub/pype/pull/625) -- Task processing in event sync [\#623](https://github.com/pypeclub/pype/pull/623) -- sync to avalon doesn t remove renamed task [\#619](https://github.com/pypeclub/pype/pull/619) -- Intent publish setting wasn't working with default value [\#562](https://github.com/pypeclub/pype/pull/562) -- Maya: Updating a look where the shader name changed, leaves the geo without a shader [\#514](https://github.com/pypeclub/pype/pull/514) - - -### [2.12.5](https://github.com/pypeclub/pype/tree/2.12.5) - -_**release date:** 2020-10-14_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.4...2.12.5) - -**Fixed Bugs:** - -- Harmony: Disable application launch logic [\#637](https://github.com/pypeclub/pype/pull/637) - -### [2.12.4](https://github.com/pypeclub/pype/tree/2.12.4) - -_**release date:** 2020-10-08_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.3...2.12.4) - -**Fixed bugs:** - -- Sync to avalon doesn't remove renamed task [\#605](https://github.com/pypeclub/pype/issues/605) - - -**Merged pull requests:** - -- NukeStudio: small fixes [\#622](https://github.com/pypeclub/pype/pull/622) -- NukeStudio: broken order of plugins [\#620](https://github.com/pypeclub/pype/pull/620) - -### [2.12.3](https://github.com/pypeclub/pype/tree/2.12.3) - -_**release date:** 2020-10-06_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.2...2.12.3) - -**Fixed bugs:** - -- Harmony: empty scene contamination [\#583](https://github.com/pypeclub/pype/issues/583) -- Edit publishing in SP doesn't respect shot selection for publishing [\#542](https://github.com/pypeclub/pype/issues/542) -- Pathlib breaks compatibility with python2 hosts [\#281](https://github.com/pypeclub/pype/issues/281) -- Maya: fix maya scene type preset exception [\#569](https://github.com/pypeclub/pype/pull/569) -- Standalone publisher editorial plugins interfering [\#580](https://github.com/pypeclub/pype/pull/580) - -### [2.12.2](https://github.com/pypeclub/pype/tree/2.12.2) - -_**release date:** 2020-09-25_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.1...2.12.2) - -**Fixed bugs:** - -- Harmony: Saving heavy scenes will crash [\#507](https://github.com/pypeclub/pype/issues/507) -- Extract review a representation name with `\*\_burnin` [\#388](https://github.com/pypeclub/pype/issues/388) -- Hierarchy data was not considering active instances [\#551](https://github.com/pypeclub/pype/pull/551) - -### [2.12.1](https://github.com/pypeclub/pype/tree/2.12.1) - -_**release date:** 2020-09-15_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.0...2.12.1) - -**Fixed bugs:** - -- dependency security alert ! [\#484](https://github.com/pypeclub/pype/issues/484) -- Maya: RenderSetup is missing update [\#106](https://github.com/pypeclub/pype/issues/106) -- \ extract effects creates new instance [\#78](https://github.com/pypeclub/pype/issues/78) - - - - -## [2.12.0](https://github.com/pypeclub/pype/tree/2.12.0) ## - -_**release date:** 09 Sept 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.8...2.12.0) - -**Enhancements:** - -- Pype now uses less mongo connections [\#509](https://github.com/pypeclub/pype/pull/509) -- Nuke: adding image loader [\#499](https://github.com/pypeclub/pype/pull/499) -- Completely new application launcher [\#443](https://github.com/pypeclub/pype/pull/443) -- Maya: Optional skip review on renders. [\#441](https://github.com/pypeclub/pype/pull/441) -- Ftrack: Option to push status from task to latest version [\#440](https://github.com/pypeclub/pype/pull/440) -- Maya: Properly containerize image plane loads. [\#434](https://github.com/pypeclub/pype/pull/434) -- Option to keep the review files. [\#426](https://github.com/pypeclub/pype/pull/426) -- Maya: Isolate models during preview publishing [\#425](https://github.com/pypeclub/pype/pull/425) -- Ftrack attribute group is backwards compatible [\#418](https://github.com/pypeclub/pype/pull/418) -- Maya: Publishing of tile renderings on Deadline [\#398](https://github.com/pypeclub/pype/pull/398) -- Slightly better logging gui [\#383](https://github.com/pypeclub/pype/pull/383) -- Standalonepublisher: editorial family features expansion [\#411](https://github.com/pypeclub/pype/pull/411) - -**Fixed bugs:** - -- Maya: Fix tile order for Draft Tile Assembler [\#511](https://github.com/pypeclub/pype/pull/511) -- Remove extra dash [\#501](https://github.com/pypeclub/pype/pull/501) -- Fix: strip dot from repre names in single frame renders [\#498](https://github.com/pypeclub/pype/pull/498) -- Better handling of destination during integrating [\#485](https://github.com/pypeclub/pype/pull/485) -- Fix: allow thumbnail creation for single frame renders [\#460](https://github.com/pypeclub/pype/pull/460) -- added missing argument to launch\_application in ftrack app handler [\#453](https://github.com/pypeclub/pype/pull/453) -- Burnins: Copy bit rate of input video to match quality. [\#448](https://github.com/pypeclub/pype/pull/448) -- Standalone publisher is now independent from tray [\#442](https://github.com/pypeclub/pype/pull/442) -- Bugfix/empty enumerator attributes [\#436](https://github.com/pypeclub/pype/pull/436) -- Fixed wrong order of "other" category collapssing in publisher [\#435](https://github.com/pypeclub/pype/pull/435) -- Multiple reviews where being overwritten to one. [\#424](https://github.com/pypeclub/pype/pull/424) -- Cleanup plugin fail on instances without staging dir [\#420](https://github.com/pypeclub/pype/pull/420) -- deprecated -intra parameter in ffmpeg to new `-g` [\#417](https://github.com/pypeclub/pype/pull/417) -- Delivery action can now work with entered path [\#397](https://github.com/pypeclub/pype/pull/397) - - - - - -### [2.11.8](https://github.com/pypeclub/pype/tree/2.11.8) ## - -_**release date:** 27 Aug 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.7...2.11.8) - -**Fixed bugs:** - -- pyblish pype - other group is collapsed before plugins are done [\#431](https://github.com/pypeclub/pype/issues/431) -- Alpha white edges in harmony on PNGs [\#412](https://github.com/pypeclub/pype/issues/412) -- harmony image loader picks wrong representations [\#404](https://github.com/pypeclub/pype/issues/404) -- Clockify crash when response contain symbol not allowed by UTF-8 [\#81](https://github.com/pypeclub/pype/issues/81) - - - - -### [2.11.7](https://github.com/pypeclub/pype/tree/2.11.7) ## - -_**release date:** 21 Aug 2020_ - - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.6...2.11.7) - -**Fixed bugs:** - -- Clean Up Baked Movie [\#369](https://github.com/pypeclub/pype/issues/369) -- celaction last workfile wasn't picked up correctly [\#459](https://github.com/pypeclub/pype/pull/459) - - - -### [2.11.5](https://github.com/pypeclub/pype/tree/2.11.5) ## - -_**release date:** 13 Aug 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.4...2.11.5) - -**Enhancements:** - -- Standalone publisher now only groups sequence if the extension is known [\#439](https://github.com/pypeclub/pype/pull/439) - -**Fixed bugs:** - -- Logs have been disable for editorial by default to speed up publishing [\#433](https://github.com/pypeclub/pype/pull/433) -- Various fixes for celaction [\#430](https://github.com/pypeclub/pype/pull/430) -- Harmony: invalid variable scope in validate scene settings [\#428](https://github.com/pypeclub/pype/pull/428) -- Harmomny: new representation name for audio was not accepted [\#427](https://github.com/pypeclub/pype/pull/427) - - - - -### [2.11.3](https://github.com/pypeclub/pype/tree/2.11.3) ## - -_**release date:** 4 Aug 2020_ - -[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.2...2.11.3) - -**Fixed bugs:** - -- Harmony: publishing performance issues [\#408](https://github.com/pypeclub/pype/pull/408) - - - - -## 2.11.0 ## - -_**release date:** 27 July 2020_ - -**new:** -- _(blender)_ namespace support [\#341](https://github.com/pypeclub/pype/pull/341) -- _(blender)_ start end frames [\#330](https://github.com/pypeclub/pype/pull/330) -- _(blender)_ camera asset [\#322](https://github.com/pypeclub/pype/pull/322) -- _(pype)_ toggle instances per family in pyblish GUI [\#320](https://github.com/pypeclub/pype/pull/320) -- _(pype)_ current release version is now shown in the tray menu [#379](https://github.com/pypeclub/pype/pull/379) - - -**improved:** -- _(resolve)_ tagging for publish [\#239](https://github.com/pypeclub/pype/issues/239) -- _(pype)_ Support publishing a subset of shots with standalone editorial [\#336](https://github.com/pypeclub/pype/pull/336) -- _(harmony)_ Basic support for palettes [\#324](https://github.com/pypeclub/pype/pull/324) -- _(photoshop)_ Flag outdated containers on startup and publish. [\#309](https://github.com/pypeclub/pype/pull/309) -- _(harmony)_ Flag Outdated containers [\#302](https://github.com/pypeclub/pype/pull/302) -- _(photoshop)_ Publish review [\#298](https://github.com/pypeclub/pype/pull/298) -- _(pype)_ Optional Last workfile launch [\#365](https://github.com/pypeclub/pype/pull/365) - - -**fixed:** -- _(premiere)_ workflow fixes [\#346](https://github.com/pypeclub/pype/pull/346) -- _(pype)_ pype-setup does not work with space in path [\#327](https://github.com/pypeclub/pype/issues/327) -- _(ftrack)_ Ftrack delete action cause circular error [\#206](https://github.com/pypeclub/pype/issues/206) -- _(nuke)_ Priority was forced to 50 [\#345](https://github.com/pypeclub/pype/pull/345) -- _(nuke)_ Fix ValidateNukeWriteKnobs [\#340](https://github.com/pypeclub/pype/pull/340) -- _(maya)_ If camera attributes are connected, we can ignore them. [\#339](https://github.com/pypeclub/pype/pull/339) -- _(pype)_ stop appending of tools environment to existing env [\#337](https://github.com/pypeclub/pype/pull/337) -- _(ftrack)_ Ftrack timeout needs to look at AVALON\_TIMEOUT [\#325](https://github.com/pypeclub/pype/pull/325) -- _(harmony)_ Only zip files are supported. [\#310](https://github.com/pypeclub/pype/pull/310) -- _(pype)_ hotfix/Fix event server mongo uri [\#305](https://github.com/pypeclub/pype/pull/305) -- _(photoshop)_ Subset was not named or validated correctly. [\#304](https://github.com/pypeclub/pype/pull/304) - - - - - -## 2.10.0 ## - -_**release date:** 17 June 2020_ - -**new:** -- _(harmony)_ **Toon Boom Harmony** has been greatly extended to support rigging, scene build, animation and rendering workflows. [#270](https://github.com/pypeclub/pype/issues/270) [#271](https://github.com/pypeclub/pype/issues/271) [#190](https://github.com/pypeclub/pype/issues/190) [#191](https://github.com/pypeclub/pype/issues/191) [#172](https://github.com/pypeclub/pype/issues/172) [#168](https://github.com/pypeclub/pype/issues/168) -- _(pype)_ Added support for rudimentary **edl publishing** into individual shots. [#265](https://github.com/pypeclub/pype/issues/265) -- _(celaction)_ Simple **Celaction** integration has been added with support for workfiles and rendering. [#255](https://github.com/pypeclub/pype/issues/255) -- _(maya)_ Support for multiple job types when submitting to the farm. We can now render Maya or Standalone render jobs for Vray and Arnold (limited support for arnold) [#204](https://github.com/pypeclub/pype/issues/204) -- _(photoshop)_ Added initial support for Photoshop [#232](https://github.com/pypeclub/pype/issues/232) - -**improved:** -- _(blender)_ Updated support for rigs and added support Layout family [#233](https://github.com/pypeclub/pype/issues/233) [#226](https://github.com/pypeclub/pype/issues/226) -- _(premiere)_ It is now possible to choose different storage root for workfiles of different task types. [#255](https://github.com/pypeclub/pype/issues/255) -- _(maya)_ Support for unmerged AOVs in Redshift multipart EXRs [#197](https://github.com/pypeclub/pype/issues/197) -- _(pype)_ Pype repository has been refactored in preparation for 3.0 release [#169](https://github.com/pypeclub/pype/issues/169) -- _(deadline)_ All file dependencies are now passed to deadline from maya to prevent premature start of rendering if caches or textures haven't been coppied over yet. [#195](https://github.com/pypeclub/pype/issues/195) -- _(nuke)_ Script validation can now be made optional. [#194](https://github.com/pypeclub/pype/issues/194) -- _(pype)_ Publishing can now be stopped at any time. [#194](https://github.com/pypeclub/pype/issues/194) - -**fix:** -- _(pype)_ Pyblish-lite has been integrated into pype repository, plus various publishing GUI fixes. [#274](https://github.com/pypeclub/pype/issues/274) [#275](https://github.com/pypeclub/pype/issues/275) [#268](https://github.com/pypeclub/pype/issues/268) [#227](https://github.com/pypeclub/pype/issues/227) [#238](https://github.com/pypeclub/pype/issues/238) -- _(maya)_ Alembic extractor was getting wrong frame range type in certain scenarios [#254](https://github.com/pypeclub/pype/issues/254) -- _(maya)_ Attaching a render to subset in maya was not passing validation in certain scenarios [#256](https://github.com/pypeclub/pype/issues/256) -- _(ftrack)_ Various small fixes to ftrack sync [#263](https://github.com/pypeclub/pype/issues/263) [#259](https://github.com/pypeclub/pype/issues/259) -- _(maya)_ Look extraction is now able to skp invalid connections in shaders [#207](https://github.com/pypeclub/pype/issues/207) - - - - - -## 2.9.0 ## - -_**release date:** 25 May 2020_ - -**new:** -- _(pype)_ Support for **Multiroot projects**. You can now store project data on multiple physical or virtual storages and target individual publishes to these locations. For instance render can be stored on a faster storage than the rest of the project. [#145](https://github.com/pypeclub/pype/issues/145), [#38](https://github.com/pypeclub/pype/issues/38) -- _(harmony)_ Basic implementation of **Toon Boom Harmony** has been added. [#142](https://github.com/pypeclub/pype/issues/142) -- _(pype)_ OSX support is in public beta now. There are issues to be expected, but the main implementation should be functional. [#141](https://github.com/pypeclub/pype/issues/141) - - -**improved:** - -- _(pype)_ **Review extractor** has been completely rebuilt. It now supports granular filtering so you can create **multiple outputs** for different tasks, families or hosts. [#103](https://github.com/pypeclub/pype/issues/103), [#166](https://github.com/pypeclub/pype/issues/166), [#165](https://github.com/pypeclub/pype/issues/165) -- _(pype)_ **Burnin** generation had been extended to **support same multi-output filtering** as review extractor [#103](https://github.com/pypeclub/pype/issues/103) -- _(pype)_ Publishing file templates can now be specified in config for each individual family [#114](https://github.com/pypeclub/pype/issues/114) -- _(pype)_ Studio specific plugins can now be appended to pype standard publishing plugins. [#112](https://github.com/pypeclub/pype/issues/112) -- _(nukestudio)_ Reviewable clips no longer need to be previously cut, exported and re-imported to timeline. **Pype can now dynamically cut reviewable quicktimes** from continuous offline footage during publishing. [#23](https://github.com/pypeclub/pype/issues/23) -- _(deadline)_ Deadline can now correctly differentiate between staging and production pype. [#154](https://github.com/pypeclub/pype/issues/154) -- _(deadline)_ `PYPE_PYTHON_EXE` env variable can now be used to direct publishing to explicit python installation. [#120](https://github.com/pypeclub/pype/issues/120) -- _(nuke)_ Nuke now check for new version of loaded data on file open. [#140](https://github.com/pypeclub/pype/issues/140) -- _(nuke)_ frame range and limit checkboxes are now exposed on write node. [#119](https://github.com/pypeclub/pype/issues/119) - - - -**fix:** - -- _(nukestudio)_ Project Location was using backslashes which was breaking nukestudio native exporting in certains configurations [#82](https://github.com/pypeclub/pype/issues/82) -- _(nukestudio)_ Duplicity in hierarchy tags was prone to throwing publishing error [#130](https://github.com/pypeclub/pype/issues/130), [#144](https://github.com/pypeclub/pype/issues/144) -- _(ftrack)_ multiple stability improvements [#157](https://github.com/pypeclub/pype/issues/157), [#159](https://github.com/pypeclub/pype/issues/159), [#128](https://github.com/pypeclub/pype/issues/128), [#118](https://github.com/pypeclub/pype/issues/118), [#127](https://github.com/pypeclub/pype/issues/127) -- _(deadline)_ multipart EXRs were stopping review publishing on the farm. They are still not supported for automatic review generation, but the publish will go through correctly without the quicktime. [#155](https://github.com/pypeclub/pype/issues/155) -- _(deadline)_ If deadline is non-responsive it will no longer freeze host when publishing [#149](https://github.com/pypeclub/pype/issues/149) -- _(deadline)_ Sometimes deadline was trying to launch render before all the source data was coppied over. [#137](https://github.com/pypeclub/pype/issues/137) _(harmony)_ Basic implementation of **Toon Boom Harmony** has been added. [#142](https://github.com/pypeclub/pype/issues/142) -- _(nuke)_ Filepath knob wasn't updated properly. [#131](https://github.com/pypeclub/pype/issues/131) -- _(maya)_ When extracting animation, the "Write Color Set" options on the instance were not respected. [#108](https://github.com/pypeclub/pype/issues/108) -- _(maya)_ Attribute overrides for AOV only worked for the legacy render layers. Now it works for new render setup as well [#132](https://github.com/pypeclub/pype/issues/132) -- _(maya)_ Stability and usability improvements in yeti workflow [#104](https://github.com/pypeclub/pype/issues/104) - - - - - -## 2.8.0 ## - -_**release date:** 20 April 2020_ - -**new:** - -- _(pype)_ Option to generate slates from json templates. [PYPE-628] [#26](https://github.com/pypeclub/pype/issues/26) -- _(pype)_ It is now possible to automate loading of published subsets into any scene. Documentation will follow :). [PYPE-611] [#24](https://github.com/pypeclub/pype/issues/24) - -**fix:** - -- _(maya)_ Some Redshift render tokens could break publishing. [PYPE-778] [#33](https://github.com/pypeclub/pype/issues/33) -- _(maya)_ Publish was not preserving maya file extension. [#39](https://github.com/pypeclub/pype/issues/39) -- _(maya)_ Rig output validator was failing on nodes without shapes. [#40](https://github.com/pypeclub/pype/issues/40) -- _(maya)_ Yeti caches can now be properly versioned up in the scene inventory. [#40](https://github.com/pypeclub/pype/issues/40) -- _(nuke)_ Build first workfiles was not accepting jpeg sequences. [#34](https://github.com/pypeclub/pype/issues/34) -- _(deadline)_ Trying to generate ffmpeg review from multipart EXRs no longer crashes publishing. [PYPE-781] -- _(deadline)_ Render publishing is more stable in multiplatform environments. [PYPE-775] - - - - - -## 2.7.0 ## - -_**release date:** 30 March 2020_ - -**new:** - -- _(maya)_ Artist can now choose to load multiple references of the same subset at once [PYPE-646, PYPS-81] -- _(nuke)_ Option to use named OCIO colorspaces for review colour baking. [PYPS-82] -- _(pype)_ Pype can now work with `master` versions for publishing and loading. These are non-versioned publishes that are overwritten with the latest version during publish. These are now supported in all the GUIs, but their publishing is deactivated by default. [PYPE-653] -- _(blender)_ Added support for basic blender workflow. We currently support `rig`, `model` and `animation` families. [PYPE-768] -- _(pype)_ Source timecode can now be used in burn-ins. [PYPE-777] -- _(pype)_ Review outputs profiles can now specify delivery resolution different than project setting [PYPE-759] -- _(nuke)_ Bookmark to current context is now added automatically to all nuke browser windows. [PYPE-712] - -**change:** - -- _(maya)_ It is now possible to publish camera without. baking. Keep in mind that unbaked cameras can't be guaranteed to work in other hosts. [PYPE-595] -- _(maya)_ All the renders from maya are now grouped in the loader by their Layer name. [PYPE-482] -- _(nuke/hiero)_ Any publishes from nuke and hiero can now be versioned independently of the workfile. [PYPE-728] - - -**fix:** - -- _(nuke)_ Mixed slashes caused issues in ocio config path. -- _(pype)_ Intent field in pyblish GUI was passing label instead of value to ftrack. [PYPE-733] -- _(nuke)_ Publishing of pre-renders was inconsistent. [PYPE-766] -- _(maya)_ Handles and frame ranges were inconsistent in various places during publishing. -- _(nuke)_ Nuke was crashing if it ran into certain missing knobs. For example DPX output missing `autocrop` [PYPE-774] -- _(deadline)_ Project overrides were not working properly with farm render publishing. -- _(hiero)_ Problems with single frame plates publishing. -- _(maya)_ Redshift RenderPass token were breaking render publishing. [PYPE-778] -- _(nuke)_ Build first workfile was not accepting jpeg sequences. -- _(maya)_ Multipart (Multilayer) EXRs were breaking review publishing due to FFMPEG incompatiblity [PYPE-781] - - - - -## 2.6.0 ## - -_**release date:** 9 March 2020_ - -**change:** -- _(maya)_ render publishing has been simplified and made more robust. Render setup layers are now automatically added to publishing subsets and `render globals` family has been replaced with simple `render` [PYPE-570] -- _(avalon)_ change context and workfiles apps, have been merged into one, that allows both actions to be performed at the same time. [PYPE-747] -- _(pype)_ thumbnails are now automatically propagate to asset from the last published subset in the loader -- _(ftrack)_ publishing comment and intent are now being published to ftrack note as well as describtion. [PYPE-727] -- _(pype)_ when overriding existing version new old representations are now overriden, instead of the new ones just being appended. (to allow this behaviour, the version validator need to be disabled. [PYPE-690]) -- _(pype)_ burnin preset has been significantly simplified. It now doesn't require passing function to each field, but only need the actual text template. to use this, all the current burnin PRESETS MUST BE UPDATED for all the projects. -- _(ftrack)_ credentials are now stored on a per server basis, so it's possible to switch between ftrack servers without having to log in and out. [PYPE-723] - - -**new:** -- _(pype)_ production and development deployments now have different colour of the tray icon. Orange for Dev and Green for production [PYPE-718] -- _(maya)_ renders can now be attached to a publishable subset rather than creating their own subset. For example it is possible to create a reviewable `look` or `model` render and have it correctly attached as a representation of the subsets [PYPE-451] -- _(maya)_ after saving current scene into a new context (as a new shot for instance), all the scene publishing subsets data gets re-generated automatically to match the new context [PYPE-532] -- _(pype)_ we now support project specific publish, load and create plugins [PYPE-740] -- _(ftrack)_ new action that allow archiving/deleting old published versions. User can keep how many of the latest version to keep when the action is ran. [PYPE-748, PYPE-715] -- _(ftrack)_ it is now possible to monitor and restart ftrack event server using ftrack action. [PYPE-658] -- _(pype)_ validator that prevent accidental overwrites of previously published versions. [PYPE-680] -- _(avalon)_ avalon core updated to version 5.6.0 -- _(maya)_ added validator to make sure that relative paths are used when publishing arnold standins. -- _(nukestudio)_ it is now possible to extract and publish audio family from clip in nuke studio [PYPE-682] - -**fix**: -- _(maya)_ maya set framerange button was ignoring handles [PYPE-719] -- _(ftrack)_ sync to avalon was sometime crashing when ran on empty project -- _(nukestudio)_ publishing same shots after they've been previously archived/deleted would result in a crash. [PYPE-737] -- _(nuke)_ slate workflow was breaking in certain scenarios. [PYPE-730] -- _(pype)_ rendering publish workflow has been significantly improved to prevent error resulting from implicit render collection. [PYPE-665, PYPE-746] -- _(pype)_ launching application on a non-synced project resulted in obscure [PYPE-528] -- _(pype)_ missing keys in burnins no longer result in an error. [PYPE-706] -- _(ftrack)_ create folder structure action was sometimes failing for project managers due to wrong permissions. -- _(Nukestudio)_ using `source` in the start frame tag could result in wrong frame range calculation -- _(ftrack)_ sync to avalon action and event have been improved by catching more edge cases and provessing them properly. - - - - -## 2.5.0 ## - -_**release date:** 11 Feb 2020_ - -**change:** -- _(pype)_ added many logs for easier debugging -- _(pype)_ review presets can now be separated between 2d and 3d renders [PYPE-693] -- _(pype)_ anatomy module has been greatly improved to allow for more dynamic pulblishing and faster debugging [PYPE-685] -- _(pype)_ avalon schemas have been moved from `pype-config` to `pype` repository, for simplification. [PYPE-670] -- _(ftrack)_ updated to latest ftrack API -- _(ftrack)_ publishing comments now appear in ftrack also as a note on version with customisable category [PYPE-645] -- _(ftrack)_ delete asset/subset action had been improved. It is now able to remove multiple entities and descendants of the selected entities [PYPE-361, PYPS-72] -- _(workfiles)_ added date field to workfiles app [PYPE-603] -- _(maya)_ old deprecated loader have been removed in favour of a single unified reference loader (old scenes will upgrade automatically to the new loader upon opening) [PYPE-633, PYPE-697] -- _(avalon)_ core updated to 5.5.15 [PYPE-671] -- _(nuke)_ library loader is now available in nuke [PYPE-698] - - -**new:** -- _(pype)_ added pype render wrapper to allow rendering on mixed platform farms. [PYPE-634] -- _(pype)_ added `pype launch` command. It let's admin run applications with dynamically built environment based on the given context. [PYPE-634] -- _(pype)_ added support for extracting review sequences with burnins [PYPE-657] -- _(publish)_ users can now set intent next to a comment when publishing. This will then be reflected on an attribute in ftrack. [PYPE-632] -- _(burnin)_ timecode can now be added to burnin -- _(burnin)_ datetime keys can now be added to burnin and anatomy [PYPE-651] -- _(burnin)_ anatomy templates can now be used in burnins. [PYPE=626] -- _(nuke)_ new validator for render resolution -- _(nuke)_ support for attach slate to nuke renders [PYPE-630] -- _(nuke)_ png sequences were added to loaders -- _(maya)_ added maya 2020 compatibility [PYPE-677] -- _(maya)_ ability to publish and load .ASS standin sequences [PYPS-54] -- _(pype)_ thumbnails can now be published and are visible in the loader. `AVALON_THUMBNAIL_ROOT` environment variable needs to be set for this to work [PYPE-573, PYPE-132] -- _(blender)_ base implementation of blender was added with publishing and loading of .blend files [PYPE-612] -- _(ftrack)_ new action for preparing deliveries [PYPE-639] - - -**fix**: -- _(burnin)_ more robust way of finding ffmpeg for burnins. -- _(pype)_ improved UNC paths remapping when sending to farm. -- _(pype)_ float frames sometimes made their way to representation context in database, breaking loaders [PYPE-668] -- _(pype)_ `pype install --force` was failing sometimes [PYPE-600] -- _(pype)_ padding in published files got calculated wrongly sometimes. It is now instead being always read from project anatomy. [PYPE-667] -- _(publish)_ comment publishing was failing in certain situations -- _(ftrack)_ multiple edge case scenario fixes in auto sync and sync-to-avalon action -- _(ftrack)_ sync to avalon now works on empty projects -- _(ftrack)_ thumbnail update event was failing when deleting entities [PYPE-561] -- _(nuke)_ loader applies proper colorspaces from Presets -- _(nuke)_ publishing handles didn't always work correctly [PYPE-686] -- _(maya)_ assembly publishing and loading wasn't working correctly - - - - - - -## 2.4.0 ## - -_**release date:** 9 Dec 2019_ - -**change:** -- _(ftrack)_ version to status ftrack event can now be configured from Presets - - based on preset `presets/ftracc/ftrack_config.json["status_version_to_task"]` -- _(ftrack)_ sync to avalon event has been completely re-written. It now supports most of the project management situations on ftrack including moving, renaming and deleting entities, updating attributes and working with tasks. -- _(ftrack)_ sync to avalon action has been also re-writen. It is now much faster (up to 100 times depending on a project structure), has much better logging and reporting on encountered problems, and is able to handle much more complex situations. -- _(ftrack)_ sync to avalon trigger by checking `auto-sync` toggle on ftrack [PYPE-504] -- _(pype)_ various new features in the REST api -- _(pype)_ new visual identity used across pype -- _(pype)_ started moving all requirements to pip installation rather than vendorising them in pype repository. Due to a few yet unreleased packages, this means that pype can temporarily be only installed in the offline mode. - -**new:** -- _(nuke)_ support for publishing gizmos and loading them as viewer processes -- _(nuke)_ support for publishing nuke nodes from backdrops and loading them back -- _(pype)_ burnins can now work with start and end frames as keys - - use keys `{frame_start}`, `{frame_end}` and `{current_frame}` in burnin preset to use them. [PYPS-44,PYPS-73, PYPE-602] -- _(pype)_ option to filter logs by user and level in loggin GUI -- _(pype)_ image family added to standalone publisher [PYPE-574] -- _(pype)_ matchmove family added to standalone publisher [PYPE-574] -- _(nuke)_ validator for comparing arbitrary knobs with values from presets -- _(maya)_ option to force maya to copy textures in the new look publish rather than hardlinking them -- _(pype)_ comments from pyblish GUI are now being added to ftrack version -- _(maya)_ validator for checking outdated containers in the scene -- _(maya)_ option to publish and load arnold standin sequence [PYPE-579, PYPS-54] - -**fix**: -- _(pype)_ burnins were not respecting codec of the input video -- _(nuke)_ lot's of various nuke and nuke studio fixes across the board [PYPS-45] -- _(pype)_ workfiles app is not launching with the start of the app by default [PYPE-569] -- _(ftrack)_ ftrack integration during publishing was failing under certain situations [PYPS-66] -- _(pype)_ minor fixes in REST api -- _(ftrack)_ status change event was crashing when the target status was missing [PYPS-68] -- _(ftrack)_ actions will try to reconnect if they fail for some reason -- _(maya)_ problems with fps mapping when using float FPS values -- _(deadline)_ overall improvements to deadline publishing -- _(setup)_ environment variables are now remapped on the fly based on the platform pype is running on. This fixes many issues in mixed platform environments. - - - - -## 2.3.6 # - -_**release date:** 27 Nov 2019_ - -**hotfix**: -- _(ftrack)_ was hiding important debug logo -- _(nuke)_ crashes during workfile publishing -- _(ftrack)_ event server crashes because of signal problems -- _(muster)_ problems with muster render submissions -- _(ftrack)_ thumbnail update event syntax errors - - - - -## 2.3.0 ## - -_release date: 6 Oct 2019_ - -**new**: -- _(maya)_ support for yeti rigs and yeti caches -- _(maya)_ validator for comparing arbitrary attributes against ftrack -- _(pype)_ burnins can now show current date and time -- _(muster)_ pools can now be set in render globals in maya -- _(pype)_ Rest API has been implemented in beta stage -- _(nuke)_ LUT loader has been added -- _(pype)_ rudimentary user module has been added as preparation for user management -- _(pype)_ a simple logging GUI has been added to pype tray -- _(nuke)_ nuke can now bake input process into mov -- _(maya)_ imported models now have selection handle displayed by defaulting -- _(avalon)_ it's is now possible to load multiple assets at once using loader -- _(maya)_ added ability to automatically connect yeti rig to a mesh upon loading - -**changed**: -- _(ftrack)_ event server now runs two parallel processes and is able to keep queue of events to process. -- _(nuke)_ task name is now added to all rendered subsets -- _(pype)_ adding more families to standalone publisher -- _(pype)_ standalone publisher now uses pyblish-lite -- _(pype)_ standalone publisher can now create review quicktimes -- _(ftrack)_ queries to ftrack were sped up -- _(ftrack)_ multiple ftrack action have been deprecated -- _(avalon)_ avalon upstream has been updated to 5.5.0 -- _(nukestudio)_ published transforms can now be animated -- - -**fix**: -- _(maya)_ fps popup button didn't work in some cases -- _(maya)_ geometry instances and references in maya were losing shader assignments -- _(muster)_ muster rendering templates were not working correctly -- _(maya)_ arnold tx texture conversion wasn't respecting colorspace set by the artist -- _(pype)_ problems with avalon db sync -- _(maya)_ ftrack was rounding FPS making it inconsistent -- _(pype)_ wrong icon names in Creator -- _(maya)_ scene inventory wasn't showing anything if representation was removed from database after it's been loaded to the scene -- _(nukestudio)_ multiple bugs squashed -- _(loader)_ loader was taking long time to show all the loading action when first launcher in maya - -## 2.2.0 ## -_**release date:** 8 Sept 2019_ - -**new**: -- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts -- _(nuke)_ option to choose deadline chunk size on write nodes -- _(nukestudio)_ added option to publish soft effects (subTrackItems) from NukeStudio as subsets including LUT files. these can then be loaded in nuke or NukeStudio -- _(nuke)_ option to build nuke script from previously published latest versions of plate and render subsets. -- _(nuke)_ nuke writes now have deadline tab. -- _(ftrack)_ Prepare Project action can now be used for creating the base folder structure on disk and in ftrack, setting up all the initial project attributes and it automatically prepares `pype_project_config` folder for the given project. -- _(clockify)_ Added support for time tracking in clockify. This currently in addition to ftrack time logs, but does not completely replace them. -- _(pype)_ any attributes in Creator and Loader plugins can now be customised using pype preset system - -**changed**: -- nukestudio now uses workio API for workfiles -- _(maya)_ "FIX FPS" prompt in maya now appears in the middle of the screen -- _(muster)_ can now be configured with custom templates -- _(pype)_ global publishing plugins can now be configured using presets as well as host specific ones - - -**fix**: -- wrong version retrieval from path in certain scenarios -- nuke reset resolution wasn't working in certain scenarios - -## 2.1.0 ## -_release date: 6 Aug 2019_ - -A large cleanup release. Most of the change are under the hood. - -**new**: -- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts -- _(pype)_ Added configurable option to add burnins to any generated quicktimes -- _(ftrack)_ Action that identifies what machines pype is running on. -- _(system)_ unify subprocess calls -- _(maya)_ add audio to review quicktimes -- _(nuke)_ add crop before write node to prevent overscan problems in ffmpeg -- **Nuke Studio** publishing and workfiles support -- **Muster** render manager support -- _(nuke)_ Framerange, FPS and Resolution are set automatically at startup -- _(maya)_ Ability to load published sequences as image planes -- _(system)_ Ftrack event that sets asset folder permissions based on task assignees in ftrack. -- _(maya)_ Pyblish plugin that allow validation of maya attributes -- _(system)_ added better startup logging to tray debug, including basic connection information -- _(avalon)_ option to group published subsets to groups in the loader -- _(avalon)_ loader family filters are working now - -**changed**: -- change multiple key attributes to unify their behaviour across the pipeline - - `frameRate` to `fps` - - `startFrame` to `frameStart` - - `endFrame` to `frameEnd` - - `fstart` to `frameStart` - - `fend` to `frameEnd` - - `handle_start` to `handleStart` - - `handle_end` to `handleEnd` - - `resolution_width` to `resolutionWidth` - - `resolution_height` to `resolutionHeight` - - `pixel_aspect` to `pixelAspect` - -- _(nuke)_ write nodes are now created inside group with only some attributes editable by the artist -- rendered frames are now deleted from temporary location after their publishing is finished. -- _(ftrack)_ RV action can now be launched from any entity -- after publishing only refresh button is now available in pyblish UI -- added context instance pyblish-lite so that artist knows if context plugin fails -- _(avalon)_ allow opening selected files using enter key -- _(avalon)_ core updated to v5.2.9 with our forked changes on top - -**fix**: -- faster hierarchy retrieval from db -- _(nuke)_ A lot of stability enhancements -- _(nuke studio)_ A lot of stability enhancements -- _(nuke)_ now only renders a single write node on farm -- _(ftrack)_ pype would crash when launcher project level task -- work directory was sometimes not being created correctly -- major pype.lib cleanup. Removing of unused functions, merging those that were doing the same and general house cleaning. -- _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner diff --git a/website/docs/upgrade_notes.md b/website/docs/upgrade_notes.md deleted file mode 100644 index 8231cf997d..0000000000 --- a/website/docs/upgrade_notes.md +++ /dev/null @@ -1,165 +0,0 @@ ---- -id: update_notes -title: Update Notes -sidebar_label: Update Notes ---- - - - -## **Updating to 2.13.0** ## - -### MongoDB - -**Must** - -Due to changes in how tasks are stored in the database (we added task types and possibility of more arbitrary data.), we must take a few precautions when updating. -1. Make sure that ftrack event server with sync to avalon is NOT running during the update. -2. Any project that is to be worked on with 2.13 must be synced from ftrack to avalon with the updated sync to avalon action, or using and updated event server sync to avalon event. - -If 2.12 event servers runs when trying to update the project sync with 2.13, it will override any changes. - -### Nuke Studio / hiero - -Make sure to re-generate pype tags and replace any `task` tags on your shots with the new ones. This will allow you to make multiple tasks of the same type, but with different task name at the same time. - -### Nuke - -Due to a minor update to nuke write node, artists will be prompted to update their write nodes before being able to publish any old shots. There is a "repair" action for this in the publisher, so it doesn't have to be done manually. - - - - -## **Updating to 2.12.0** ## - -### Apps and tools - -**Must** - -run Create/Update Custom attributes action (to update custom attributes group) -check if studio has set custom intent values and move values to ~/config/presets/global/intent.json - -**Optional** - -Set true/false on application and tools by studio usage (eliminate app list in Ftrack and time for registering Ftrack ations) - - - - -## **Updating to 2.11.0** ## - -### Maya in deadline - -We added or own maya deadline plugin to make render management easier. It operates the same as standard mayaBatch in deadline, but allow us to separate Pype sumitted jobs from standard submitter. You'll need to follow this guide to update this [install pype deadline](https://pype.club/docs/admin_hosts#pype-dealine-supplement-code) - - - - -## **Updating to 2.9.0** ## - -### Review and Burnin PRESETS - -This release introduces a major update to working with review and burnin presets. They can now be much more granular and can target extremely specific usecases. The change is backwards compatible with previous format of review and burnin presets, however we highly recommend updating all the presets to the new format. Documentation on what this looks like can be found on pype main [documentation page](https://pype.club/docs/admin_presets_plugins#publishjson). - -### Multiroot and storages - -With the support of multiroot projects, we removed the old `storage.json` from configuration and replaced it with simpler `config/anatomy/roots.json`. This is a required change, but only needs to be done once per studio during the update to 2.9.0. [Read More](https://pype.club/docs/next/admin_config#roots) - - - - -## **Updating to 2.7.0** ## - -### Master Versions -To activate `master` version workflow you need to activate `integrateMasterVersion` plugin in the `config/presets/plugins/global/publish.json` - -``` -"IntegrateMasterVersion": {"enabled": true}, -``` - -### Ftrack - -Make sure that `intent` attributes in ftrack is set correctly. It should follow this setup unless you have your custom values -``` -{ - "label": "Intent", - "key": "intent", - "type": "enumerator", - "entity_type": "assetversion", - "group": "avalon", - "config": { - "multiselect": false, - "data": [ - {"test": "Test"}, - {"wip": "WIP"}, - {"final": "Final"} - ] - } -``` - - - - -## **Updating to 2.6.0** ## - -### Dev vs Prod - -If you want to differentiate between dev and prod deployments of pype, you need to add `config.ini` file to `pype-setup/pypeapp` folder with content. - -``` -[Default] -dev=true -``` - -### Ftrack - -You will have to log in to ftrack in pype after the update. You should be automatically prompted with the ftrack login window when you launch 2.6 release for the first time. - -Event server has to be restarted after the update to enable the ability to control it via action. - -### Presets - -There is a major change in the way how burnin presets are being stored. We simplified the preset format, however that means the currently running production configs need to be tweaked to match the new format. - -:::note Example of converting burnin preset from 2.5 to 2.6 - -2.5 burnin preset - -``` -"burnins":{ - "TOP_LEFT": { - "function": "text", - "text": "{dd}/{mm}/{yyyy}" - }, - "TOP_CENTERED": { - "function": "text", - "text": "" - }, - "TOP_RIGHT": { - "function": "text", - "text": "v{version:0>3}" - }, - "BOTTOM_LEFT": { - "function": "text", - "text": "{frame_start}-{current_frame}-{frame_end}" - }, - "BOTTOM_CENTERED": { - "function": "text", - "text": "{asset}" - }, - "BOTTOM_RIGHT": { - "function": "frame_numbers", - "text": "{username}" - } -``` - -2.6 burnin preset -``` -"burnins":{ - "TOP_LEFT": "{dd}/{mm}/{yyyy}", - "TOP_CENTER": "", - "TOP_RIGHT": "v{version:0>3}" - "BOTTOM_LEFT": "{frame_start}-{current_frame}-{frame_end}", - "BOTTOM_CENTERED": "{asset}", - "BOTTOM_RIGHT": "{username}" -} -``` diff --git a/website/sidebars.js b/website/sidebars.js index 9d60a5811c..c4d07e728f 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -109,11 +109,7 @@ module.exports = { "admin_hosts_tvpaint" ], }, - { - type: "category", - label: "Releases", - items: ["changelog", "update_notes"], - }, + "admin_releases", { type: "category", collapsed: false, From 7466063001d7efce8ef63302e22ac50607be20bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 23 Aug 2022 10:41:02 +0200 Subject: [PATCH 0568/2550] Fix typo Co-authored-by: Milan Kolar --- website/docs/system_introduction.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/system_introduction.md b/website/docs/system_introduction.md index b8a2cea487..05627b5359 100644 --- a/website/docs/system_introduction.md +++ b/website/docs/system_introduction.md @@ -17,7 +17,7 @@ various usage scenarios. You can find detailed breakdown of technical requirements [here](dev_requirements), but in general OpenPype should be able to operate in most studios fairly quickly. The main obstacles are usually related to workflows and habits, that -might now be fully compatible with what OpenPype is expecting or enforcing. It is recommended to go through artists [key concepts](artist_concepts) to get idea about basics. +might not be fully compatible with what OpenPype is expecting or enforcing. It is recommended to go through artists [key concepts](artist_concepts) to get idea about basics. Keep in mind that if you run into any workflows that are not supported, it's usually just because we haven't hit that particular case and it can most likely be added upon request. From c69736a597cf5b20d6090a5c20f0d05494679852 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:52:07 +0200 Subject: [PATCH 0569/2550] tvpaint is installed as object ingeriting HostBase --- openpype/hosts/tvpaint/api/__init__.py | 12 +- openpype/hosts/tvpaint/api/launch_script.py | 7 +- openpype/hosts/tvpaint/api/pipeline.py | 214 ++++++++++++-------- 3 files changed, 131 insertions(+), 102 deletions(-) diff --git a/openpype/hosts/tvpaint/api/__init__.py b/openpype/hosts/tvpaint/api/__init__.py index 43d411d8f9..b07658c583 100644 --- a/openpype/hosts/tvpaint/api/__init__.py +++ b/openpype/hosts/tvpaint/api/__init__.py @@ -5,11 +5,7 @@ from . import workio from . import pipeline from . import plugin from .pipeline import ( - install, - maintained_selection, - remove_instance, - list_instances, - ls + TVPaintHost, ) from .workio import ( @@ -31,11 +27,7 @@ __all__ = ( "pipeline", "plugin", - "install", - "maintained_selection", - "remove_instance", - "list_instances", - "ls", + "TVPaintHost", # Workfiles API "open_file", diff --git a/openpype/hosts/tvpaint/api/launch_script.py b/openpype/hosts/tvpaint/api/launch_script.py index 0b25027fc6..c474a10529 100644 --- a/openpype/hosts/tvpaint/api/launch_script.py +++ b/openpype/hosts/tvpaint/api/launch_script.py @@ -10,10 +10,10 @@ from Qt import QtWidgets, QtCore, QtGui from openpype import style from openpype.pipeline import install_host -from openpype.hosts.tvpaint.api.communication_server import ( - CommunicationWrapper +from openpype.hosts.tvpaint.api import ( + TVPaintHost, + CommunicationWrapper, ) -from openpype.hosts.tvpaint import api as tvpaint_host log = logging.getLogger(__name__) @@ -30,6 +30,7 @@ def main(launch_args): # - QApplicaiton is also main thread/event loop of the server qt_app = QtWidgets.QApplication([]) + tvpaint_host = TVPaintHost() # Execute pipeline installation install_host(tvpaint_host) diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 427c927264..6c90de2aa9 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -1,6 +1,5 @@ import os import json -import contextlib import tempfile import logging @@ -9,7 +8,8 @@ import requests import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.hosts import tvpaint +from openpype.host import HostBase, IWorkfileHost, ILoadHost +from openpype.hosts.tvpaint import TVPAINT_ROOT_DIR from openpype.api import get_current_project_settings from openpype.lib import register_event_callback from openpype.pipeline import ( @@ -26,11 +26,6 @@ from .lib import ( log = logging.getLogger(__name__) -HOST_DIR = os.path.dirname(os.path.abspath(tvpaint.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") METADATA_SECTION = "avalon" SECTION_NAME_CONTEXT = "context" @@ -63,30 +58,132 @@ instances=2 """ -def install(): - """Install TVPaint-specific functionality.""" +class TVPaintHost(HostBase, IWorkfileHost, ILoadHost): + name = "tvpaint" - log.info("OpenPype - Installing TVPaint integration") - legacy_io.install() + def install(self): + """Install TVPaint-specific functionality.""" - # Create workdir folder if does not exist yet - workdir = legacy_io.Session["AVALON_WORKDIR"] - if not os.path.exists(workdir): - os.makedirs(workdir) + log.info("OpenPype - Installing TVPaint integration") + legacy_io.install() - pyblish.api.register_host("tvpaint") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) + # Create workdir folder if does not exist yet + workdir = legacy_io.Session["AVALON_WORKDIR"] + if not os.path.exists(workdir): + os.makedirs(workdir) - registered_callbacks = ( - pyblish.api.registered_callbacks().get("instanceToggled") or [] - ) - if on_instance_toggle not in registered_callbacks: - pyblish.api.register_callback("instanceToggled", on_instance_toggle) + plugins_dir = os.path.join(TVPAINT_ROOT_DIR, "plugins") + publish_dir = os.path.join(plugins_dir, "publish") + load_dir = os.path.join(plugins_dir, "load") + create_dir = os.path.join(plugins_dir, "create") - register_event_callback("application.launched", initial_launch) - register_event_callback("application.exit", application_exit) + pyblish.api.register_host("tvpaint") + pyblish.api.register_plugin_path(publish_dir) + register_loader_plugin_path(load_dir) + register_creator_plugin_path(create_dir) + + registered_callbacks = ( + pyblish.api.registered_callbacks().get("instanceToggled") or [] + ) + if self.on_instance_toggle not in registered_callbacks: + pyblish.api.register_callback( + "instanceToggled", self.on_instance_toggle + ) + + register_event_callback("application.launched", self.initial_launch) + register_event_callback("application.exit", self.application_exit) + + def open_workfile(self, filepath): + george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( + filepath.replace("\\", "/") + ) + return execute_george_through_file(george_script) + + def save_workfile(self, filepath=None): + if not filepath: + filepath = self.get_current_workfile() + context = { + "project": legacy_io.Session["AVALON_PROJECT"], + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] + } + save_current_workfile_context(context) + + # Execute george script to save workfile. + george_script = "tv_SaveProject {}".format(filepath.replace("\\", "/")) + return execute_george(george_script) + + def work_root(self, session): + return session["AVALON_WORKDIR"] + + def get_current_workfile(self): + return execute_george("tv_GetProjectName") + + def workfile_has_unsaved_changes(self): + return None + + def get_workfile_extensions(self): + return [".tvpp"] + + def get_containers(self): + return get_containers() + + def initial_launch(self): + # Setup project settings if its the template that's launched. + # TODO also check for template creation when it's possible to define + # templates + last_workfile = os.environ.get("AVALON_LAST_WORKFILE") + if not last_workfile or os.path.exists(last_workfile): + return + + log.info("Setting up project...") + set_context_settings() + + def application_exit(self): + """Logic related to TimerManager. + + Todo: + This should be handled out of TVPaint integration logic. + """ + + data = get_current_project_settings() + stop_timer = data["tvpaint"]["stop_timer_on_application_exit"] + + if not stop_timer: + return + + # Stop application timer. + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + requests.post(rest_api_url) + + def on_instance_toggle(self, instance, old_value, new_value): + """Update instance data in workfile on publish toggle.""" + # Review may not have real instance in wokrfile metadata + if not instance.data.get("uuid"): + return + + instance_id = instance.data["uuid"] + found_idx = None + current_instances = list_instances() + for idx, workfile_instance in enumerate(current_instances): + if workfile_instance["uuid"] == instance_id: + found_idx = idx + break + + if found_idx is None: + return + + if "active" in current_instances[found_idx]: + current_instances[found_idx]["active"] = new_value + self.write_instances(current_instances) + + def list_instances(self): + """List all created instances from current workfile.""" + return list_instances() + + def write_instances(self, data): + return write_instances(data) def containerise( @@ -116,7 +213,7 @@ def containerise( "representation": str(context["representation"]["_id"]) } if current_containers is None: - current_containers = ls() + current_containers = get_containers() # Add container to containers list current_containers.append(container_data) @@ -127,15 +224,6 @@ def containerise( return container_data -@contextlib.contextmanager -def maintained_selection(): - # TODO implement logic - try: - yield - finally: - pass - - def split_metadata_string(text, chunk_length=None): """Split string by length. @@ -359,12 +447,7 @@ def write_instances(data): return write_workfile_metadata(SECTION_NAME_INSTANCES, data) -# Backwards compatibility -def _write_instances(*args, **kwargs): - return write_instances(*args, **kwargs) - - -def ls(): +def get_containers(): output = get_workfile_metadata(SECTION_NAME_CONTAINERS) if output: for item in output: @@ -376,53 +459,6 @@ def ls(): return output -def on_instance_toggle(instance, old_value, new_value): - """Update instance data in workfile on publish toggle.""" - # Review may not have real instance in wokrfile metadata - if not instance.data.get("uuid"): - return - - instance_id = instance.data["uuid"] - found_idx = None - current_instances = list_instances() - for idx, workfile_instance in enumerate(current_instances): - if workfile_instance["uuid"] == instance_id: - found_idx = idx - break - - if found_idx is None: - return - - if "active" in current_instances[found_idx]: - current_instances[found_idx]["active"] = new_value - write_instances(current_instances) - - -def initial_launch(): - # Setup project settings if its the template that's launched. - # TODO also check for template creation when it's possible to define - # templates - last_workfile = os.environ.get("AVALON_LAST_WORKFILE") - if not last_workfile or os.path.exists(last_workfile): - return - - log.info("Setting up project...") - set_context_settings() - - -def application_exit(): - data = get_current_project_settings() - stop_timer = data["tvpaint"]["stop_timer_on_application_exit"] - - if not stop_timer: - return - - # Stop application timer. - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) - requests.post(rest_api_url) - - def set_context_settings(asset_doc=None): """Set workfile settings by asset document data. From 0b473de76bbe3548a2d43cba7c083a415b5d7f78 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:53:27 +0200 Subject: [PATCH 0570/2550] changed imports in plugin logic --- openpype/hosts/tvpaint/api/plugin.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/tvpaint/api/plugin.py b/openpype/hosts/tvpaint/api/plugin.py index 15ad8905e0..da456e7067 100644 --- a/openpype/hosts/tvpaint/api/plugin.py +++ b/openpype/hosts/tvpaint/api/plugin.py @@ -4,11 +4,11 @@ import uuid from openpype.pipeline import ( LegacyCreator, LoaderPlugin, + registered_host, ) -from openpype.hosts.tvpaint.api import ( - pipeline, - lib -) + +from .lib import get_layers_data +from .pipeline import get_current_workfile_context class Creator(LegacyCreator): @@ -22,7 +22,7 @@ class Creator(LegacyCreator): dynamic_data = super(Creator, cls).get_dynamic_data(*args, **kwargs) # Change asset and name by current workfile context - workfile_context = pipeline.get_current_workfile_context() + workfile_context = get_current_workfile_context() asset_name = workfile_context.get("asset") task_name = workfile_context.get("task") if "asset" not in dynamic_data and asset_name: @@ -67,10 +67,12 @@ class Creator(LegacyCreator): self.log.debug( "Storing instance data to workfile. {}".format(str(data)) ) - return pipeline.write_instances(data) + host = registered_host() + return host.write_instances(data) def process(self): - data = pipeline.list_instances() + host = registered_host() + data = host.list_instances() data.append(self.data) self.write_instances(data) @@ -108,7 +110,7 @@ class Loader(LoaderPlugin): counter_regex = re.compile(r"_(\d{3})$") higher_counter = 0 - for layer in lib.get_layers_data(): + for layer in get_layers_data(): layer_name = layer["name"] if not layer_name.startswith(layer_name_base): continue From bcad1ab7fd120938593d59d35fc39cae2677cbc8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:53:43 +0200 Subject: [PATCH 0571/2550] changed import of CommunicationsWrapper --- openpype/hosts/tvpaint/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/api/lib.py b/openpype/hosts/tvpaint/api/lib.py index a341f48859..5e64773b8e 100644 --- a/openpype/hosts/tvpaint/api/lib.py +++ b/openpype/hosts/tvpaint/api/lib.py @@ -2,7 +2,7 @@ import os import logging import tempfile -from . import CommunicationWrapper +from .communication_server import CommunicationWrapper log = logging.getLogger(__name__) From 88b900bda06e7b13bee7e62a2185cb4eefab3c65 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:54:58 +0200 Subject: [PATCH 0572/2550] use explicit imports --- .../plugins/create/create_render_layer.py | 22 +++++---- .../plugins/create/create_render_pass.py | 10 ++-- .../hosts/tvpaint/plugins/load/load_image.py | 5 +- .../plugins/load/load_reference_image.py | 41 +++++++++++----- .../hosts/tvpaint/plugins/load/load_sound.py | 7 ++- .../tvpaint/plugins/load/load_workfile.py | 21 +++++--- .../plugins/publish/collect_workfile_data.py | 49 ++++++++++++------- .../plugins/publish/extract_sequence.py | 20 +++++--- .../publish/increment_workfile_version.py | 7 +-- .../plugins/publish/validate_asset_name.py | 9 ++-- .../tvpaint/plugins/publish/validate_marks.py | 6 +-- .../plugins/publish/validate_start_frame.py | 6 +-- .../publish/validate_workfile_metadata.py | 6 +-- 13 files changed, 132 insertions(+), 77 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py index 3b5bd47189..a085830e96 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py @@ -1,11 +1,15 @@ -from openpype.pipeline import CreatorError from openpype.lib import prepare_template_data +from openpype.pipeline import CreatorError from openpype.hosts.tvpaint.api import ( plugin, - pipeline, - lib, CommunicationWrapper ) +from openpype.hosts.tvpaint.api.lib import ( + get_layers_data, + get_groups_data, + execute_george_through_file, +) +from openpype.hosts.tvpaint.api.pipeline import list_instances class CreateRenderlayer(plugin.Creator): @@ -63,7 +67,7 @@ class CreateRenderlayer(plugin.Creator): # Validate that communication is initialized if CommunicationWrapper.communicator: # Get currently selected layers - layers_data = lib.get_layers_data() + layers_data = get_layers_data() selected_layers = [ layer @@ -81,8 +85,8 @@ class CreateRenderlayer(plugin.Creator): def process(self): self.log.debug("Query data from workfile.") - instances = pipeline.list_instances() - layers_data = lib.get_layers_data() + instances = list_instances() + layers_data = get_layers_data() self.log.debug("Checking for selection groups.") # Collect group ids from selection @@ -109,7 +113,7 @@ class CreateRenderlayer(plugin.Creator): self.log.debug(f"Selected group id is \"{group_id}\".") self.data["group_id"] = group_id - group_data = lib.get_groups_data() + group_data = get_groups_data() group_name = None for group in group_data: if group["group_id"] == group_id: @@ -176,7 +180,7 @@ class CreateRenderlayer(plugin.Creator): return self.log.debug("Querying groups data from workfile.") - groups_data = lib.get_groups_data() + groups_data = get_groups_data() self.log.debug("Changing name of the group.") selected_group = None @@ -195,7 +199,7 @@ class CreateRenderlayer(plugin.Creator): b=selected_group["blue"], name=new_group_name ) - lib.execute_george_through_file(rename_script) + execute_george_through_file(rename_script) self.log.info( f"Name of group with index {group_id}" diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py index 26fa8ac51a..a44cb29f20 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py @@ -2,10 +2,10 @@ from openpype.pipeline import CreatorError from openpype.lib import prepare_template_data from openpype.hosts.tvpaint.api import ( plugin, - pipeline, - lib, CommunicationWrapper ) +from openpype.hosts.tvpaint.api.lib import get_layers_data +from openpype.hosts.tvpaint.api.pipeline import list_instances class CreateRenderPass(plugin.Creator): @@ -54,7 +54,7 @@ class CreateRenderPass(plugin.Creator): # Validate that communication is initialized if CommunicationWrapper.communicator: # Get currently selected layers - layers_data = lib.layers_data() + layers_data = get_layers_data() selected_layers = [ layer @@ -72,8 +72,8 @@ class CreateRenderPass(plugin.Creator): def process(self): self.log.debug("Query data from workfile.") - instances = pipeline.list_instances() - layers_data = lib.layers_data() + instances = list_instances() + layers_data = get_layers_data() self.log.debug("Checking selection.") # Get all selected layers and their group ids diff --git a/openpype/hosts/tvpaint/plugins/load/load_image.py b/openpype/hosts/tvpaint/plugins/load/load_image.py index f861d0119e..151db94135 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_image.py +++ b/openpype/hosts/tvpaint/plugins/load/load_image.py @@ -1,5 +1,6 @@ import qargparse -from openpype.hosts.tvpaint.api import lib, plugin +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import execute_george_through_file class ImportImage(plugin.Loader): @@ -79,4 +80,4 @@ class ImportImage(plugin.Loader): layer_name, load_options_str ) - return lib.execute_george_through_file(george_script) + return execute_george_through_file(george_script) diff --git a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py index af1a4a9b6b..393236fba6 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py +++ b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py @@ -1,7 +1,21 @@ import collections + import qargparse -from openpype.pipeline import get_representation_context -from openpype.hosts.tvpaint.api import lib, pipeline, plugin + +from openpype.pipeline import ( + get_representation_context, + register_host, +) +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import ( + get_layers_data, + execute_george_through_file, +) +from openpype.hosts.tvpaint.api.pipeline import ( + write_workfile_metadata, + SECTION_NAME_CONTAINERS, + containerise, +) class LoadImage(plugin.Loader): @@ -79,10 +93,10 @@ class LoadImage(plugin.Loader): load_options_str ) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) loaded_layer = None - layers = lib.layers_data() + layers = get_layers_data() for layer in layers: if layer["name"] == layer_name: loaded_layer = layer @@ -95,7 +109,7 @@ class LoadImage(plugin.Loader): layer_names = [loaded_layer["name"]] namespace = namespace or layer_name - return pipeline.containerise( + return containerise( name=name, namespace=namespace, members=layer_names, @@ -109,7 +123,7 @@ class LoadImage(plugin.Loader): return if layers is None: - layers = lib.layers_data() + layers = get_layers_data() available_ids = set(layer["layer_id"] for layer in layers) @@ -152,14 +166,15 @@ class LoadImage(plugin.Loader): line = "tv_layerkill {}".format(layer_id) george_script_lines.append(line) george_script = "\n".join(george_script_lines) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) def _remove_container(self, container, members=None): if not container: return representation = container["representation"] members = self.get_members_from_container(container) - current_containers = pipeline.ls() + host = register_host() + current_containers = host.get_containers() pop_idx = None for idx, cur_con in enumerate(current_containers): cur_members = self.get_members_from_container(cur_con) @@ -179,8 +194,8 @@ class LoadImage(plugin.Loader): return current_containers.pop(pop_idx) - pipeline.write_workfile_metadata( - pipeline.SECTION_NAME_CONTAINERS, current_containers + write_workfile_metadata( + SECTION_NAME_CONTAINERS, current_containers ) def remove(self, container): @@ -214,7 +229,7 @@ class LoadImage(plugin.Loader): break old_layers = [] - layers = lib.layers_data() + layers = get_layers_data() previous_layer_ids = set(layer["layer_id"] for layer in layers) if old_layers_are_ids: for layer in layers: @@ -263,7 +278,7 @@ class LoadImage(plugin.Loader): new_container = self.load(context, name, namespace, {}) new_layer_names = self.get_members_from_container(new_container) - layers = lib.layers_data() + layers = get_layers_data() new_layers = [] for layer in layers: @@ -304,4 +319,4 @@ class LoadImage(plugin.Loader): # Execute george scripts if there are any if george_script_lines: george_script = "\n".join(george_script_lines) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) diff --git a/openpype/hosts/tvpaint/plugins/load/load_sound.py b/openpype/hosts/tvpaint/plugins/load/load_sound.py index 3f42370f5c..f312db262a 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_sound.py +++ b/openpype/hosts/tvpaint/plugins/load/load_sound.py @@ -1,6 +1,9 @@ import os import tempfile -from openpype.hosts.tvpaint.api import lib, plugin +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import ( + execute_george_through_file, +) class ImportSound(plugin.Loader): @@ -64,7 +67,7 @@ class ImportSound(plugin.Loader): ) self.log.info("*** George script:\n{}\n***".format(george_script)) # Execute geoge script - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) # Read output file lines = [] diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index a99b300730..fc7588f56e 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -11,7 +11,13 @@ from openpype.pipeline.workfile import ( get_last_workfile_with_version, ) from openpype.pipeline.template_data import get_template_data_with_names -from openpype.hosts.tvpaint.api import lib, pipeline, plugin +from openpype.hosts.tvpaint.api import plugin +from openpype.hosts.tvpaint.api.lib import ( + execute_george_through_file, +) +from openpype.hosts.tvpaint.api.pipeline import ( + get_current_workfile_context, +) class LoadWorkfile(plugin.Loader): @@ -26,9 +32,9 @@ class LoadWorkfile(plugin.Loader): # Load context of current workfile as first thing # - which context and extension has host = registered_host() - current_file = host.current_file() + current_file = host.get_current_workfile() - context = pipeline.get_current_workfile_context() + context = get_current_workfile_context() filepath = self.fname.replace("\\", "/") @@ -40,7 +46,7 @@ class LoadWorkfile(plugin.Loader): george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( filepath ) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) # Save workfile. host_name = "tvpaint" @@ -69,12 +75,13 @@ class LoadWorkfile(plugin.Loader): file_template = anatomy.templates[template_key]["file"] # Define saving file extension + extensions = host.get_workfile_extensions() if current_file: # Match the extension of current file _, extension = os.path.splitext(current_file) else: # Fall back to the first extension supported for this host. - extension = host.file_extensions()[0] + extension = extensions[0] data["ext"] = extension @@ -83,7 +90,7 @@ class LoadWorkfile(plugin.Loader): folder_template, data ) version = get_last_workfile_with_version( - work_root, file_template, data, host.file_extensions() + work_root, file_template, data, extensions )[1] if version is None: @@ -97,4 +104,4 @@ class LoadWorkfile(plugin.Loader): file_template, data ) path = os.path.join(work_root, filename) - host.save_file(path) + host.save_workfile(path) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index c59ef82f85..8fe71a4a46 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -5,7 +5,22 @@ import tempfile import pyblish.api from openpype.pipeline import legacy_io -from openpype.hosts.tvpaint.api import pipeline, lib +from openpype.hosts.tvpaint.api.lib import ( + execute_george, + execute_george_through_file, + get_layers_data, + get_groups_data, +) +from openpype.hosts.tvpaint.api.pipeline import ( + SECTION_NAME_CONTEXT, + SECTION_NAME_INSTANCES, + SECTION_NAME_CONTAINERS, + + get_workfile_metadata_string, + write_workfile_metadata, + get_current_workfile_context, + list_instances, +) class ResetTVPaintWorkfileMetadata(pyblish.api.Action): @@ -15,12 +30,12 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action): def process(self, context, plugin): metadata_keys = { - pipeline.SECTION_NAME_CONTEXT: {}, - pipeline.SECTION_NAME_INSTANCES: [], - pipeline.SECTION_NAME_CONTAINERS: [] + SECTION_NAME_CONTEXT: {}, + SECTION_NAME_INSTANCES: [], + SECTION_NAME_CONTAINERS: [] } for metadata_key, default in metadata_keys.items(): - json_string = pipeline.get_workfile_metadata_string(metadata_key) + json_string = get_workfile_metadata_string(metadata_key) if not json_string: continue @@ -35,7 +50,7 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action): ).format(metadata_key, default, json_string), exc_info=True ) - pipeline.write_workfile_metadata(metadata_key, default) + write_workfile_metadata(metadata_key, default) class CollectWorkfileData(pyblish.api.ContextPlugin): @@ -45,8 +60,8 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): actions = [ResetTVPaintWorkfileMetadata] def process(self, context): - current_project_id = lib.execute_george("tv_projectcurrentid") - lib.execute_george("tv_projectselect {}".format(current_project_id)) + current_project_id = execute_george("tv_projectcurrentid") + execute_george("tv_projectselect {}".format(current_project_id)) # Collect and store current context to have reference current_context = { @@ -60,7 +75,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect context from workfile metadata self.log.info("Collecting workfile context") - workfile_context = pipeline.get_current_workfile_context() + workfile_context = get_current_workfile_context() # Store workfile context to pyblish context context.data["workfile_context"] = workfile_context if workfile_context: @@ -96,7 +111,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect instances self.log.info("Collecting instance data from workfile") - instance_data = pipeline.list_instances() + instance_data = list_instances() context.data["workfileInstances"] = instance_data self.log.debug( "Instance data:\"{}".format(json.dumps(instance_data, indent=4)) @@ -104,7 +119,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect information about layers self.log.info("Collecting layers data from workfile") - layers_data = lib.layers_data() + layers_data = get_layers_data() layers_by_name = {} for layer in layers_data: layer_name = layer["name"] @@ -120,14 +135,14 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect information about groups self.log.info("Collecting groups data from workfile") - group_data = lib.groups_data() + group_data = get_groups_data() context.data["groupsData"] = group_data self.log.debug( "Group data:\"{}".format(json.dumps(group_data, indent=4)) ) self.log.info("Collecting scene data from workfile") - workfile_info_parts = lib.execute_george("tv_projectinfo").split(" ") + workfile_info_parts = execute_george("tv_projectinfo").split(" ") # Project frame start - not used workfile_info_parts.pop(-1) @@ -139,10 +154,10 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): workfile_path = " ".join(workfile_info_parts).replace("\"", "") # Marks return as "{frame - 1} {state} ", example "0 set". - result = lib.execute_george("tv_markin") + result = execute_george("tv_markin") mark_in_frame, mark_in_state, _ = result.split(" ") - result = lib.execute_george("tv_markout") + result = execute_george("tv_markout") mark_out_frame, mark_out_state, _ = result.split(" ") scene_data = { @@ -156,7 +171,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): "sceneMarkInState": mark_in_state == "set", "sceneMarkOut": int(mark_out_frame), "sceneMarkOutState": mark_out_state == "set", - "sceneStartFrame": int(lib.execute_george("tv_startframe")), + "sceneStartFrame": int(execute_george("tv_startframe")), "sceneBgColor": self._get_bg_color() } self.log.debug( @@ -188,7 +203,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): ] george_script = "\n".join(george_script_lines) - lib.execute_george_through_file(george_script) + execute_george_through_file(george_script) with open(output_filepath, "r") as stream: data = stream.read() diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 77712347bd..1ebaf1da64 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -5,7 +5,13 @@ import tempfile from PIL import Image import pyblish.api -from openpype.hosts.tvpaint.api import lib + +from openpype.hosts.tvpaint.api.lib import ( + execute_george, + execute_george_through_file, + get_layers_pre_post_behavior, + get_layers_exposure_frames, +) from openpype.hosts.tvpaint.lib import ( calculate_layers_extraction_data, get_frame_filename_template, @@ -61,7 +67,7 @@ class ExtractSequence(pyblish.api.Extractor): # different way when Start Frame is not `0` # NOTE It will be set back after rendering scene_start_frame = instance.context.data["sceneStartFrame"] - lib.execute_george("tv_startframe 0") + execute_george("tv_startframe 0") # Frame start/end may be stored as float frame_start = int(instance.data["frameStart"]) @@ -113,7 +119,7 @@ class ExtractSequence(pyblish.api.Extractor): output_filepaths_by_frame_idx, thumbnail_fullpath = result # Change scene frame Start back to previous value - lib.execute_george("tv_startframe {}".format(scene_start_frame)) + execute_george("tv_startframe {}".format(scene_start_frame)) # Sequence of one frame if not output_filepaths_by_frame_idx: @@ -241,7 +247,7 @@ class ExtractSequence(pyblish.api.Extractor): george_script_lines.append(" ".join(orig_color_command)) - lib.execute_george_through_file("\n".join(george_script_lines)) + execute_george_through_file("\n".join(george_script_lines)) first_frame_filepath = None output_filepaths_by_frame_idx = {} @@ -304,8 +310,8 @@ class ExtractSequence(pyblish.api.Extractor): return [], None self.log.debug("Collecting pre/post behavior of individual layers.") - behavior_by_layer_id = lib.get_layers_pre_post_behavior(layer_ids) - exposure_frames_by_layer_id = lib.get_layers_exposure_frames( + behavior_by_layer_id = get_layers_pre_post_behavior(layer_ids) + exposure_frames_by_layer_id = get_layers_exposure_frames( layer_ids, layers ) extraction_data_by_layer_id = calculate_layers_extraction_data( @@ -410,7 +416,7 @@ class ExtractSequence(pyblish.api.Extractor): ",".join(frames_to_render), layer_id, layer["name"] )) # Let TVPaint render layer's image - lib.execute_george_through_file("\n".join(george_script_lines)) + execute_george_through_file("\n".join(george_script_lines)) # Fill frames between `frame_start_index` and `frame_end_index` self.log.debug("Filling frames not rendered frames.") diff --git a/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py b/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py index 24d6558168..a85caf2557 100644 --- a/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py +++ b/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py @@ -1,7 +1,7 @@ import pyblish.api -from openpype.api import version_up -from openpype.hosts.tvpaint.api import workio +from openpype.lib import version_up +from openpype.pipeline import registered_host class IncrementWorkfileVersion(pyblish.api.ContextPlugin): @@ -17,6 +17,7 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin): assert all(result["success"] for result in context.data["results"]), ( "Publishing not successful so version is not increased.") + host = registered_host() path = context.data["currentFile"] - workio.save_file(version_up(path)) + host.save_workfile(version_up(path)) self.log.info('Incrementing workfile version') diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py b/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py index 70816f9f18..7e35726030 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py @@ -1,6 +1,9 @@ import pyblish.api from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import pipeline +from openpype.hosts.tvpaint.api.pipeline import ( + list_instances, + write_instances, +) class FixAssetNames(pyblish.api.Action): @@ -15,7 +18,7 @@ class FixAssetNames(pyblish.api.Action): def process(self, context, plugin): context_asset_name = context.data["asset"] - old_instance_items = pipeline.list_instances() + old_instance_items = list_instances() new_instance_items = [] for instance_item in old_instance_items: instance_asset_name = instance_item.get("asset") @@ -25,7 +28,7 @@ class FixAssetNames(pyblish.api.Action): ): instance_item["asset"] = context_asset_name new_instance_items.append(instance_item) - pipeline._write_instances(new_instance_items) + write_instances(new_instance_items) class ValidateAssetNames(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py index d1f299e006..12d50e17ff 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py @@ -2,7 +2,7 @@ import json import pyblish.api from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import lib +from openpype.hosts.tvpaint.api.lib import execute_george class ValidateMarksRepair(pyblish.api.Action): @@ -15,10 +15,10 @@ class ValidateMarksRepair(pyblish.api.Action): def process(self, context, plugin): expected_data = ValidateMarks.get_expected_data(context) - lib.execute_george( + execute_george( "tv_markin {} set".format(expected_data["markIn"]) ) - lib.execute_george( + execute_george( "tv_markout {} set".format(expected_data["markOut"]) ) diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py b/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py index ddc738c6ed..066e54c670 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py @@ -1,6 +1,6 @@ import pyblish.api from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import lib +from openpype.hosts.tvpaint.api.lib import execute_george class RepairStartFrame(pyblish.api.Action): @@ -11,7 +11,7 @@ class RepairStartFrame(pyblish.api.Action): on = "failed" def process(self, context, plugin): - lib.execute_george("tv_startframe 0") + execute_george("tv_startframe 0") class ValidateStartFrame(pyblish.api.ContextPlugin): @@ -24,7 +24,7 @@ class ValidateStartFrame(pyblish.api.ContextPlugin): optional = True def process(self, context): - start_frame = lib.execute_george("tv_startframe") + start_frame = execute_george("tv_startframe") if start_frame == 0: return diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py b/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py index eac345f395..d66ae50c60 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_workfile_metadata.py @@ -1,6 +1,5 @@ import pyblish.api -from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.tvpaint.api import save_file +from openpype.pipeline import PublishXmlValidationError, registered_host class ValidateWorkfileMetadataRepair(pyblish.api.Action): @@ -13,8 +12,9 @@ class ValidateWorkfileMetadataRepair(pyblish.api.Action): def process(self, context, _plugin): """Save current workfile which should trigger storing of metadata.""" current_file = context.data["currentFile"] + host = registered_host() # Save file should trigger - save_file(current_file) + host.save_workfile(current_file) class ValidateWorkfileMetadata(pyblish.api.ContextPlugin): From 326508888727c4626bf60e7469c7efab5ce6483a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:55:13 +0200 Subject: [PATCH 0573/2550] removed unused workio --- openpype/hosts/tvpaint/api/__init__.py | 17 -------- openpype/hosts/tvpaint/api/workio.py | 58 -------------------------- 2 files changed, 75 deletions(-) delete mode 100644 openpype/hosts/tvpaint/api/workio.py diff --git a/openpype/hosts/tvpaint/api/__init__.py b/openpype/hosts/tvpaint/api/__init__.py index b07658c583..5d42a8cc02 100644 --- a/openpype/hosts/tvpaint/api/__init__.py +++ b/openpype/hosts/tvpaint/api/__init__.py @@ -8,15 +8,6 @@ from .pipeline import ( TVPaintHost, ) -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root, -) - __all__ = ( "CommunicationWrapper", @@ -28,12 +19,4 @@ __all__ = ( "plugin", "TVPaintHost", - - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root" ) diff --git a/openpype/hosts/tvpaint/api/workio.py b/openpype/hosts/tvpaint/api/workio.py deleted file mode 100644 index 1a5ad00ca8..0000000000 --- a/openpype/hosts/tvpaint/api/workio.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Host API required for Work Files. -# TODO @iLLiCiT implement functions: - has_unsaved_changes -""" - -from openpype.pipeline import ( - HOST_WORKFILE_EXTENSIONS, - legacy_io, -) -from .lib import ( - execute_george, - execute_george_through_file -) -from .pipeline import save_current_workfile_context - - -def open_file(filepath): - """Open the scene file in Blender.""" - george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( - filepath.replace("\\", "/") - ) - return execute_george_through_file(george_script) - - -def save_file(filepath): - """Save the open scene file.""" - # Store context to workfile before save - context = { - "project": legacy_io.Session["AVALON_PROJECT"], - "asset": legacy_io.Session["AVALON_ASSET"], - "task": legacy_io.Session["AVALON_TASK"] - } - save_current_workfile_context(context) - - # Execute george script to save workfile. - george_script = "tv_SaveProject {}".format(filepath.replace("\\", "/")) - return execute_george(george_script) - - -def current_file(): - """Return the path of the open scene file.""" - george_script = "tv_GetProjectName" - return execute_george(george_script) - - -def has_unsaved_changes(): - """Does the open scene file have unsaved changes?""" - return False - - -def file_extensions(): - """Return the supported file extensions for Blender scene files.""" - return HOST_WORKFILE_EXTENSIONS["tvpaint"] - - -def work_root(session): - """Return the default root to browse for work files.""" - return session["AVALON_WORKDIR"] From dbd983f8c8c35c7d747c247e9593d451b42f6d5d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 10:55:23 +0200 Subject: [PATCH 0574/2550] cleanup imports in api init file --- openpype/hosts/tvpaint/api/__init__.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/openpype/hosts/tvpaint/api/__init__.py b/openpype/hosts/tvpaint/api/__init__.py index 5d42a8cc02..7b53aad9a4 100644 --- a/openpype/hosts/tvpaint/api/__init__.py +++ b/openpype/hosts/tvpaint/api/__init__.py @@ -1,9 +1,4 @@ from .communication_server import CommunicationWrapper -from . import lib -from . import launch_script -from . import workio -from . import pipeline -from . import plugin from .pipeline import ( TVPaintHost, ) @@ -12,11 +7,5 @@ from .pipeline import ( __all__ = ( "CommunicationWrapper", - "lib", - "launch_script", - "workio", - "pipeline", - "plugin", - "TVPaintHost", ) From bf5584d77f9766dc5da23890c76dba1841c6bfdb Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 23 Aug 2022 11:46:47 +0200 Subject: [PATCH 0575/2550] Change avalon to openpype Co-authored-by: Roy Nieterau --- website/docs/module_ftrack.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/module_ftrack.md b/website/docs/module_ftrack.md index ad9cf75e8f..6d5529b512 100644 --- a/website/docs/module_ftrack.md +++ b/website/docs/module_ftrack.md @@ -72,7 +72,7 @@ We do not recommend setting your Ftrack user and api key environments in a persi ### Where to run event server -We recommend you to run event server on stable server machine with ability to connect to Avalon database and Ftrack web server. Best practice we recommend is to run event server as service. It can be Windows or Linux. +We recommend you to run event server on stable server machine with ability to connect to OpenPype database and Ftrack web server. Best practice we recommend is to run event server as service. It can be Windows or Linux. :::important Event server should **not** run more than once! It may cause major issues. From 4943b7889eecb207304683b01dca778d376dc9ee Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 23 Aug 2022 11:47:07 +0200 Subject: [PATCH 0576/2550] grammar fix Co-authored-by: Roy Nieterau --- website/docs/artist_concepts.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/artist_concepts.md b/website/docs/artist_concepts.md index f67ab89b9c..7582540811 100644 --- a/website/docs/artist_concepts.md +++ b/website/docs/artist_concepts.md @@ -10,7 +10,7 @@ sidebar_label: Key Concepts In our pipeline all the main entities the project is made from are internally considered *'Assets'*. Episode, sequence, shot, character, prop, etc. All of these behave identically in the pipeline. Asset names need to be absolutely unique within the project because they are their key identifier. -OpenPype has limitation regarging duplicated names. Name of assets must be unique across whole project. +OpenPype has a limitation regarding duplicated names. Name of assets must be unique across whole project. ### Subset From 90910cc3eeb929c70542f13b1b9fd3c4b5179025 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 15:02:01 +0200 Subject: [PATCH 0577/2550] use project name in prepare root value instead of project code --- openpype/hosts/maya/plugins/load/_load_animation.py | 2 +- openpype/hosts/maya/plugins/load/load_ass.py | 7 ++++--- openpype/hosts/maya/plugins/load/load_look.py | 2 +- openpype/hosts/maya/plugins/load/load_reference.py | 2 +- openpype/hosts/maya/plugins/load/load_yeti_rig.py | 2 +- 5 files changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/_load_animation.py b/openpype/hosts/maya/plugins/load/_load_animation.py index 0010efb829..b419a730b5 100644 --- a/openpype/hosts/maya/plugins/load/_load_animation.py +++ b/openpype/hosts/maya/plugins/load/_load_animation.py @@ -36,7 +36,7 @@ class AbcLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # hero_001 (abc) # asset_counter{optional} file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, sharedReferenceFile=False, diff --git a/openpype/hosts/maya/plugins/load/load_ass.py b/openpype/hosts/maya/plugins/load/load_ass.py index 1f0eb88995..d1b12ceaba 100644 --- a/openpype/hosts/maya/plugins/load/load_ass.py +++ b/openpype/hosts/maya/plugins/load/load_ass.py @@ -65,8 +65,9 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): proxyPath = proxyPath_base + ".ma" + project_name = context["project"]["name"] file_url = self.prepare_root_value(proxyPath, - context["project"]["code"]) + project_name) nodes = cmds.file(file_url, namespace=namespace, @@ -85,7 +86,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): proxyShape.dso.set(path) proxyShape.aiOverrideShaders.set(0) - settings = get_project_settings(os.environ['AVALON_PROJECT']) + settings = get_project_settings(project_name) colors = settings['maya']['load']['colors'] c = colors.get(family) @@ -128,7 +129,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): file_url = self.prepare_root_value(proxyPath, representation["context"] ["project"] - ["code"]) + ["name"]) content = cmds.file(file_url, loadReference=reference_node, type="mayaAscii", diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py index 7392adc4dd..3ef19ad96f 100644 --- a/openpype/hosts/maya/plugins/load/load_look.py +++ b/openpype/hosts/maya/plugins/load/load_look.py @@ -33,7 +33,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with lib.maintained_selection(): file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, reference=True, diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index e4355ed3d4..fa8cbfbe64 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -52,7 +52,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with maintained_selection(): cmds.loadPlugin("AbcImport.mll", quiet=True) file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, sharedReferenceFile=False, diff --git a/openpype/hosts/maya/plugins/load/load_yeti_rig.py b/openpype/hosts/maya/plugins/load/load_yeti_rig.py index 241c28467a..4b730ad2c1 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_rig.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_rig.py @@ -54,7 +54,7 @@ class YetiRigLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # load rig with lib.maintained_selection(): file_url = self.prepare_root_value(self.fname, - context["project"]["code"]) + context["project"]["name"]) nodes = cmds.file(file_url, namespace=namespace, reference=True, From 6a42f07d8e5977193236f9b3665a5a655188ae1c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 15:05:55 +0200 Subject: [PATCH 0578/2550] fix missing argument --- openpype/hosts/tvpaint/tvpaint_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/tvpaint_module.py b/openpype/hosts/tvpaint/tvpaint_module.py index c29602babc..a004359231 100644 --- a/openpype/hosts/tvpaint/tvpaint_module.py +++ b/openpype/hosts/tvpaint/tvpaint_module.py @@ -20,7 +20,7 @@ class TVPaintModule(OpenPypeModule, IHostModule): def initialize(self, module_settings): self.enabled = True - def add_implementation_envs(env, _app): + def add_implementation_envs(self, env, _app): """Modify environments to contain all required for implementation.""" defaults = { From 9c2c1118ac40d6f8ec3b691a947c2a985439da83 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 15:07:47 +0200 Subject: [PATCH 0579/2550] added notes into client directory --- openpype/client/notes.md | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 openpype/client/notes.md diff --git a/openpype/client/notes.md b/openpype/client/notes.md new file mode 100644 index 0000000000..a261b86eca --- /dev/null +++ b/openpype/client/notes.md @@ -0,0 +1,39 @@ +# Client functionality +## Reason +Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code. + +Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tighly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state. + +## Queries +Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity. + +## Changes +Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data. + +### Create +Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues. + +### Update +Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare__update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementaion. + +### Delete +Delete operation need entity id. Entity will be deleted from mongo. + + +## What (probably) won't be replaced +Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future. +- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data. +- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3. +- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure. +- Code parts that is marked as deprecated in v3 or will be deprecated in v4. + - integrate asset legacy publish plugin - already is legacy kept for safety + - integrate thumbnail - thumbnails will be stored in different way in v4 + - input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation". + +## Known missing replacements +- change subset group in loader tool +- integrate subset group +- query input links in openpype lib +- create project in openpype lib +- save/create workfile doc in openpype lib +- integrate hero version From ffa3b0829f800f3ee43d7b8e0cf80801dffda591 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 23 Aug 2022 15:12:31 +0200 Subject: [PATCH 0580/2550] flame: fixing frame ranges after client tests --- .../publish/extract_subset_resources.py | 57 ++++++++++++------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 8a03ba119c..3e1e8db986 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -69,6 +69,9 @@ class ExtractSubsetResources(openpype.api.Extractor): # get media source first frame source_first_frame = instance.data["sourceFirstFrame"] + self.log.debug("_ frame_start: {}".format(frame_start)) + self.log.debug("_ source_first_frame: {}".format(source_first_frame)) + # get timeline in/out of segment clip_in = instance.data["clipIn"] clip_out = instance.data["clipOut"] @@ -102,6 +105,25 @@ class ExtractSubsetResources(openpype.api.Extractor): + r_handle_end ) + # get frame range with handles for representation range + frame_start_handle = frame_start - handle_start + repre_frame_start = frame_start_handle + if include_handles: + if r_speed == 1.0: + frame_start_handle = frame_start + else: + frame_start_handle = ( + frame_start - handle_start) + r_handle_start + + self.log.debug("_ frame_start_handle: {}".format( + frame_start_handle)) + self.log.debug("_ repre_frame_start: {}".format( + repre_frame_start)) + + # calculate duration with handles + source_duration_handles = ( + source_end_handles - source_start_handles) + 1 + # create staging dir path staging_dir = self.staging_dir(instance) @@ -120,15 +142,22 @@ class ExtractSubsetResources(openpype.api.Extractor): # set versiondata if any retime version_data = retimed_data.get("version_data") + self.log.debug("_ version_data: {}".format(version_data)) if version_data: instance.data["versionData"].update(version_data) if r_speed != 1.0: instance.data["versionData"].update({ - "frameStart": source_start_handles + r_handle_start, - "frameEnd": source_end_handles - r_handle_end, + "frameStart": frame_start_handle, + "frameEnd": ( + (frame_start_handle + source_duration_handles - 1) + - (r_handle_start + r_handle_end) + ) }) + self.log.debug("_ i_version_data: {}".format( + instance.data["versionData"] + )) # loop all preset names and for unique_name, preset_config in export_presets.items(): @@ -152,22 +181,6 @@ class ExtractSubsetResources(openpype.api.Extractor): ) ) - # get frame range with handles for representation range - frame_start_handle = frame_start - handle_start - if include_handles: - if r_speed == 1.0: - frame_start_handle = frame_start - else: - frame_start_handle = ( - frame_start - handle_start) + r_handle_start - - self.log.debug("_ frame_start_handle: {}".format( - frame_start_handle)) - - # calculate duration with handles - source_duration_handles = ( - source_end_handles - source_start_handles) + 1 - exporting_clip = None name_patern_xml = "_{}.".format( unique_name) @@ -203,7 +216,7 @@ class ExtractSubsetResources(openpype.api.Extractor): modify_xml_data.update({ # enum position low start from 0 "frameIndex": 0, - "startFrame": frame_start_handle, + "startFrame": repre_frame_start, "namePattern": name_patern_xml }) @@ -248,7 +261,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "namePattern": "__thumbnail" }) thumb_frame_number = int(in_mark + ( - source_duration_handles / 2)) + (out_mark - in_mark + 1) / 2)) self.log.debug("__ thumb_frame_number: {}".format( thumb_frame_number @@ -329,9 +342,9 @@ class ExtractSubsetResources(openpype.api.Extractor): # add frame range if preset_config["representation_add_range"]: representation_data.update({ - "frameStart": frame_start_handle, + "frameStart": repre_frame_start, "frameEnd": ( - frame_start_handle + source_duration_handles) - 1, + repre_frame_start + source_duration_handles) - 1, "fps": instance.data["fps"] }) From b96cff6ea9f85f80d5ee801fc8bd17020e484580 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 23 Aug 2022 15:24:58 +0200 Subject: [PATCH 0581/2550] Removed submodule vendor/configs/OpenColorIO-Configs --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 0d8ab1c17ea388b3f639d1bedab67359463af44c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 15:35:35 +0200 Subject: [PATCH 0582/2550] fix unsetting of value --- openpype/client/operations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index c4b95bf696..618cdf9d1e 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -444,7 +444,7 @@ class UpdateOperation(AbstractOperation): set_data = {} for key, value in self._update_data.items(): if value is REMOVED_VALUE: - unset_data[key] = value + unset_data[key] = None else: set_data[key] = value From 0f114331ec45dacc571ab2dc3b04dc869d833d12 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 15:35:57 +0200 Subject: [PATCH 0583/2550] use client options to change subset group --- openpype/tools/loader/widgets.py | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 597c35e89b..cbf5720803 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -17,6 +17,7 @@ from openpype.client import ( get_thumbnail_id_from_source, get_thumbnail, ) +from openpype.client.operations import OperationsSession, REMOVED_VALUE from openpype.pipeline import HeroVersionType, Anatomy from openpype.pipeline.thumbnail import get_thumbnail_binary from openpype.pipeline.load import ( @@ -614,26 +615,30 @@ class SubsetWidget(QtWidgets.QWidget): box.show() def group_subsets(self, name, asset_ids, items): - field = "data.subsetGroup" + subset_ids = { + item["_id"] + for item in items + if item.get("_id") + } + if not subset_ids: + return if name: - update = {"$set": {field: name}} self.echo("Group subsets to '%s'.." % name) else: - update = {"$unset": {field: ""}} self.echo("Ungroup subsets..") - subsets = list() - for item in items: - subsets.append(item["subset"]) + project_name = self.dbcon.active_project() + op_session = OperationsSession() + for subset_id in subset_ids: + op_session.update_entity( + project_name, + "subset", + subset_id, + {"data.subsetGroup": name or REMOVED_VALUE} + ) - for asset_id in asset_ids: - filtr = { - "type": "subset", - "parent": asset_id, - "name": {"$in": subsets}, - } - self.dbcon.update_many(filtr, update) + op_session.commit() def echo(self, message): print(message) From 149b65b8b05750966ab5d93a5751e7085cf81652 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 23 Aug 2022 16:14:38 +0200 Subject: [PATCH 0584/2550] global: audio PR comments --- openpype/plugins/publish/collect_audio.py | 45 +++++++++++++++++------ 1 file changed, 33 insertions(+), 12 deletions(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index cf074392ee..3a765d345d 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -12,10 +12,12 @@ from openpype.pipeline import ( class CollectAudio(pyblish.api.InstancePlugin): - """ Collecting available audio subset to instance + """Collect asset's last published audio. + The audio subset name searched for is defined in: + project settings > Collect Audio """ - label = "Collect Audio" + label = "Collect Asset Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] hosts = [ @@ -46,10 +48,33 @@ class CollectAudio(pyblish.api.InstancePlugin): return # Add audio to instance if exists. - self.log.info('Collecting Audio Data ...') + self.log.info(( + "Searching for audio subset '{subset}'" + " in asset '{asset}'" + ).format( + subset=self.audio_subset_name, + asset=instance.data["asset"] + )) + + repre_doc = self._get_repre_doc(instance) + + # Add audio to instance if representation was found + if repre_doc: + instance.data["audio"] = [{ + "offset": 0, + "filename": get_representation_path(repre_doc) + }] + self.log.info("Audio Data added to instance ...") + + def _get_repre_doc(self, instance): + cache = instance.context.data.get("__cache_asset_audio", {}) + asset_name = instance.data["asset"] + + # first try to get it from cache + if asset_name in cache: + return cache[asset_name] project_name = legacy_io.active_project() - asset_name = instance.data["asset"] # Find latest versions document last_version_doc = get_last_version_by_subset_name( @@ -72,12 +97,8 @@ class CollectAudio(pyblish.api.InstancePlugin): else: repre_doc = repre_docs[0] - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.info("Audio Data added to instance ...") + # update cache + cache[asset_name] = repre_doc + instance.context.data["__cache_asset_audio"].update(cache) - self.log.debug("instance.data: {}".format(pformat(instance.data))) + return repre_doc From 7f234e1d814a92cbe1e446aeef7c71a2a2165163 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 16:20:54 +0200 Subject: [PATCH 0585/2550] fix iterator index acces --- openpype/tools/loader/model.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 3ce44ea6c8..4f1f37b217 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -272,11 +272,13 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # update availability on active site when version changes if self.sync_server.enabled and version_doc: - repre_info = self.sync_server.get_repre_info_for_versions( - project_name, - [version_doc["_id"]], - self.active_site, - self.remote_site + repre_info = list( + self.sync_server.get_repre_info_for_versions( + project_name, + [version_doc["_id"]], + self.active_site, + self.remote_site + ) ) if repre_info: version_doc["data"].update( From 4062bf56f32a6d5ec3e6bf6ec62c61910b3365bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 16:36:24 +0200 Subject: [PATCH 0586/2550] print traceback on crashed dynamic thread --- openpype/tools/utils/lib.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 99d8c75ab4..fb2348518a 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -2,6 +2,7 @@ import os import sys import contextlib import collections +import traceback from Qt import QtWidgets, QtCore, QtGui import qtawesome @@ -643,7 +644,11 @@ class DynamicQThread(QtCore.QThread): def create_qthread(func, *args, **kwargs): class Thread(QtCore.QThread): def run(self): - func(*args, **kwargs) + try: + func(*args, **kwargs) + except: + traceback.print_exception(*sys.exc_info()) + raise return Thread() From fce4e6e3d8f5c3d7f2b929a0328324ce1d951e9b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 16:38:45 +0200 Subject: [PATCH 0587/2550] fix version specific repre info in loader --- openpype/tools/loader/model.py | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 4f1f37b217..929e497890 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -272,7 +272,7 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # update availability on active site when version changes if self.sync_server.enabled and version_doc: - repre_info = list( + repres_info = list( self.sync_server.get_repre_info_for_versions( project_name, [version_doc["_id"]], @@ -280,9 +280,9 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): self.remote_site ) ) - if repre_info: + if repres_info: version_doc["data"].update( - self._get_repre_dict(repre_info[0])) + self._get_repre_dict(repres_info[0])) self.set_version(index, version_doc) @@ -474,29 +474,34 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): last_versions_by_subset_id[subset_id] = hero_version - repre_info = {} + repre_info_by_version_id = {} if self.sync_server.enabled: - version_ids = set() + versions_by_id = {} for _subset_id, doc in last_versions_by_subset_id.items(): - version_ids.add(doc["_id"]) + versions_by_id[doc["_id"]] = doc - repres = self.sync_server.get_repre_info_for_versions( + repres_info = self.sync_server.get_repre_info_for_versions( project_name, - list(version_ids), self.active_site, self.remote_site + list(versions_by_id.keys()), + self.active_site, + self.remote_site ) - for repre in repres: + for repre_info in repres_info: if self._doc_fetching_stop: return + + version_id = repre_info["_id"] + doc = versions_by_id[version_id] doc["active_provider"] = self.active_provider doc["remote_provider"] = self.remote_provider - repre_info[repre["_id"]] = repre + repre_info_by_version_id[version_id] = repre_info self._doc_payload = { "asset_docs_by_id": asset_docs_by_id, "subset_docs_by_id": subset_docs_by_id, "subset_families": subset_families, "last_versions_by_subset_id": last_versions_by_subset_id, - "repre_info_by_version_id": repre_info + "repre_info_by_version_id": repre_info_by_version_id } self.doc_fetched.emit() From c393105e2502ed4b9dba95aa2a49cefbd849c2a0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 16:39:31 +0200 Subject: [PATCH 0588/2550] use BaseException --- openpype/tools/utils/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index fb2348518a..97b680b77e 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -646,7 +646,7 @@ def create_qthread(func, *args, **kwargs): def run(self): try: func(*args, **kwargs) - except: + except BaseException: traceback.print_exception(*sys.exc_info()) raise return Thread() From 265d67f1fc2aa42072b0c32c688c9ef6e3a467f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 17:19:48 +0200 Subject: [PATCH 0589/2550] added helper getters to modules manager --- openpype/modules/base.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index e26075283d..1316d7f734 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -562,6 +562,40 @@ class ModulesManager: self.initialize_modules() self.connect_modules() + def __getitem__(self, module_name): + return self.modules_by_name[module_name] + + def get(self, module_name, default=None): + """Access module by name. + + Args: + module_name (str): Name of module which should be returned. + default (Any): Default output if module is not available. + + Returns: + Union[OpenPypeModule, None]: Module found by name or None. + """ + return self.modules_by_name.get(module_name, default) + + def get_enabled_module(self, module_name, default=None): + """Fast access to enabled module. + + If module is available but is not enabled default value is returned. + + Args: + module_name (str): Name of module which should be returned. + default (Any): Default output if module is not available or is + not enabled. + + Returns: + Union[OpenPypeModule, None]: Enabled module found by name or None. + """ + + module = self.get(module_name) + if module is not None and module.enabled: + return module + return default + def initialize_modules(self): """Import and initialize modules.""" # Make sure modules are loaded From 309a272a1833ad73badeab5235ece8707c904c33 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 23 Aug 2022 17:27:47 +0200 Subject: [PATCH 0590/2550] nuke: fixing validate knobs --- openpype/hosts/nuke/plugins/publish/validate_knobs.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index 573c25f3fe..e2b11892e5 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -65,13 +65,22 @@ class ValidateKnobs(pyblish.api.ContextPlugin): # Filter families. families = [instance.data["family"]] families += instance.data.get("families", []) - families = list(set(families) & set(cls.knobs.keys())) + if not families: continue # Get all knobs to validate. knobs = {} for family in families: + # check if dot in family + if "." in family: + family = family.split(".")[0] + + # avoid families not in settings + if family not in cls.knobs: + continue + + # get presets of knobs for preset in cls.knobs[family]: knobs[preset] = cls.knobs[family][preset] From eb897ac579e0993103cc2d12c82d574181e55754 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 23 Aug 2022 17:28:03 +0200 Subject: [PATCH 0591/2550] remove unused import --- openpype/hosts/nuke/plugins/publish/validate_write_nodes.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 9c9b8babaa..362ff31174 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,4 +1,3 @@ -import six import pyblish.api from openpype.api import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( From d2d90ed2e098587cd466243f5d666093ed1db55f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 23 Aug 2022 17:30:43 +0200 Subject: [PATCH 0592/2550] hound catch --- openpype/plugins/publish/collect_audio.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 3a765d345d..e2fb766ec4 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -1,5 +1,4 @@ import pyblish.api -from pprint import pformat from openpype.client import ( get_last_version_by_subset_name, From bc7aa718add1ffd053f942d7a8913cce05c24ddd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 17:45:49 +0200 Subject: [PATCH 0593/2550] use current schemas from client --- openpype/lib/avalon_context.py | 12 +++++++++--- .../event_sync_to_avalon.py | 4 ++-- openpype/modules/ftrack/lib/avalon_sync.py | 19 ++++++++----------- .../project_manager/project_manager/model.py | 10 ++++------ 4 files changed, 23 insertions(+), 22 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index eed17fce9d..b9d66291be 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -14,6 +14,11 @@ from openpype.client import ( get_last_version_by_subset_name, get_workfile_info, ) +from openpype.client.operations import ( + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_PROJECT_SCHEMA, + CURRENT_PROJECT_CONFIG_SCHEMA, +) from .profiles_filtering import filter_profiles from .events import emit_event from .path_templates import StringTemplate @@ -23,10 +28,11 @@ legacy_io = None log = logging.getLogger("AvalonContext") +# Backwards compatibility - should not be used anymore CURRENT_DOC_SCHEMAS = { - "project": "openpype:project-3.0", - "asset": "openpype:asset-3.0", - "config": "openpype:config-2.0" + "project": CURRENT_PROJECT_SCHEMA, + "asset": CURRENT_ASSET_DOC_SCHEMA, + "config": CURRENT_PROJECT_CONFIG_SCHEMA } PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" PROJECT_NAME_REGEX = re.compile( diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 738181dc9a..e549de7ed0 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -18,6 +18,7 @@ from openpype.client import ( get_archived_assets, get_asset_ids_with_subsets ) +from openpype.client.operations import CURRENT_ASSET_DOC_SCHEMA from openpype.pipeline import AvalonMongoDB, schema from openpype_modules.ftrack.lib import ( @@ -35,7 +36,6 @@ from openpype_modules.ftrack.lib.avalon_sync import ( convert_to_fps, InvalidFpsValue ) -from openpype.lib import CURRENT_DOC_SCHEMAS class SyncToAvalonEvent(BaseEvent): @@ -1236,7 +1236,7 @@ class SyncToAvalonEvent(BaseEvent): "_id": mongo_id, "name": name, "type": "asset", - "schema": CURRENT_DOC_SCHEMAS["asset"], + "schema": CURRENT_ASSET_DOC_SCHEMA, "parent": proj["_id"], "data": { "ftrackId": ftrack_ent["id"], diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index f8883cefbd..72be6a8e9a 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -14,6 +14,11 @@ from openpype.client import ( get_versions, get_representations ) +from openpype.client.operations import ( + CURRENT_ASSET_DOC_SCHEMA, + CURRENT_PROJECT_SCHEMA, + CURRENT_PROJECT_CONFIG_SCHEMA, +) from openpype.api import ( Logger, get_anatomy_settings @@ -32,14 +37,6 @@ import ftrack_api log = Logger.get_logger(__name__) -# Current schemas for avalon types -CURRENT_DOC_SCHEMAS = { - "project": "openpype:project-3.0", - "asset": "openpype:asset-3.0", - "config": "openpype:config-2.0" -} - - class InvalidFpsValue(Exception): pass @@ -2063,7 +2060,7 @@ class SyncEntitiesFactory: item["_id"] = new_id item["parent"] = self.avalon_project_id - item["schema"] = CURRENT_DOC_SCHEMAS["asset"] + item["schema"] = CURRENT_ASSET_DOC_SCHEMA item["data"]["visualParent"] = avalon_parent new_id_str = str(new_id) @@ -2198,8 +2195,8 @@ class SyncEntitiesFactory: project_item["_id"] = new_id project_item["parent"] = None - project_item["schema"] = CURRENT_DOC_SCHEMAS["project"] - project_item["config"]["schema"] = CURRENT_DOC_SCHEMAS["config"] + project_item["schema"] = CURRENT_PROJECT_SCHEMA + project_item["config"]["schema"] = CURRENT_PROJECT_CONFIG_SCHEMA self.ftrack_avalon_mapper[self.ft_project_id] = new_id self.avalon_ftrack_mapper[new_id] = self.ft_project_id diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 3aaee75698..6f40140e5e 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -13,10 +13,8 @@ from openpype.client import ( get_assets, get_asset_ids_with_subsets, ) -from openpype.lib import ( - CURRENT_DOC_SCHEMAS, - PypeLogger, -) +from openpype.client.operations import CURRENT_ASSET_DOC_SCHEMA +from openpype.lib import Logger from .constants import ( IDENTIFIER_ROLE, @@ -203,7 +201,7 @@ class HierarchyModel(QtCore.QAbstractItemModel): @property def log(self): if self._log is None: - self._log = PypeLogger.get_logger("ProjectManagerModel") + self._log = Logger.get_logger("ProjectManagerModel") return self._log @property @@ -1961,7 +1959,7 @@ class AssetItem(BaseItem): } schema_name = ( self._origin_asset_doc.get("schema") - or CURRENT_DOC_SCHEMAS["asset"] + or CURRENT_ASSET_DOC_SCHEMA ) doc = { From 2ded3136c7903ce1dcf651c932fec17c05e22422 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 17:47:04 +0200 Subject: [PATCH 0594/2550] moved project name regex to client operations and use it from there --- openpype/client/operations.py | 6 ++++++ openpype/lib/avalon_context.py | 6 ++---- openpype/tools/project_manager/project_manager/widgets.py | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 618cdf9d1e..c0716ee109 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -1,3 +1,4 @@ +import re import uuid import copy import collections @@ -11,6 +12,11 @@ from .mongo import get_project_connection REMOVED_VALUE = object() +PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" +PROJECT_NAME_REGEX = re.compile( + "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) +) + CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index b9d66291be..2abd634832 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -18,6 +18,8 @@ from openpype.client.operations import ( CURRENT_ASSET_DOC_SCHEMA, CURRENT_PROJECT_SCHEMA, CURRENT_PROJECT_CONFIG_SCHEMA, + PROJECT_NAME_ALLOWED_SYMBOLS, + PROJECT_NAME_REGEX, ) from .profiles_filtering import filter_profiles from .events import emit_event @@ -34,10 +36,6 @@ CURRENT_DOC_SCHEMAS = { "asset": CURRENT_ASSET_DOC_SCHEMA, "config": CURRENT_PROJECT_CONFIG_SCHEMA } -PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_" -PROJECT_NAME_REGEX = re.compile( - "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS) -) class AvalonContextDeprecatedWarning(DeprecationWarning): diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 371d1ba2ef..d0715f204d 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -5,8 +5,8 @@ from .constants import ( NAME_ALLOWED_SYMBOLS, NAME_REGEX ) -from openpype.lib import ( - create_project, +from openpype.lib import create_project +from openpype.client.operations import ( PROJECT_NAME_ALLOWED_SYMBOLS, PROJECT_NAME_REGEX ) From 38e907d4ea5bfe663cdf929fa0befb0cfa18c283 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 17:48:43 +0200 Subject: [PATCH 0595/2550] removed unused import and added deprecation comment --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 2abd634832..780a830f21 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1,6 +1,5 @@ """Should be used only inside of hosts.""" import os -import re import copy import platform import logging @@ -31,6 +30,7 @@ log = logging.getLogger("AvalonContext") # Backwards compatibility - should not be used anymore +# - Will be removed in OP 3.16.* CURRENT_DOC_SCHEMAS = { "project": CURRENT_PROJECT_SCHEMA, "asset": CURRENT_ASSET_DOC_SCHEMA, From f9182cb0f9979179f4b0647e915d630e635b81e7 Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Tue, 23 Aug 2022 17:58:19 +0200 Subject: [PATCH 0596/2550] remove unsaved changes check --- .../blender/plugins/publish/collect_current_file.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/collect_current_file.py b/openpype/hosts/blender/plugins/publish/collect_current_file.py index 1ca28f67f6..c3097a0694 100644 --- a/openpype/hosts/blender/plugins/publish/collect_current_file.py +++ b/openpype/hosts/blender/plugins/publish/collect_current_file.py @@ -13,11 +13,7 @@ class SaveWorkfiledAction(pyblish.api.Action): icon = "save" def process(self, context, plugin): - current_file = workio.current_file() - if current_file: - workio.save_file(current_file) - else: - bpy.ops.wm.avalon_workfiles() + bpy.ops.wm.avalon_workfiles() class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): @@ -31,7 +27,6 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): def process(self, context): """Inject the current working file""" current_file = workio.current_file() - has_unsaved_changes = workio.has_unsaved_changes() context.data["currentFile"] = current_file @@ -39,11 +34,6 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): "Current file is empty. Save the file before continuing." ) - assert not has_unsaved_changes, ( - "Current file has unsaved changes. " - "Save the file before continuing." - ) - folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) From da80b2506ec6b5ba0ccddd1d32b4177401d8c0b8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 18:21:47 +0200 Subject: [PATCH 0597/2550] moved get creator by name to pipeline.create --- openpype/lib/avalon_context.py | 22 +++++----------- openpype/pipeline/create/__init__.py | 8 ++++-- openpype/pipeline/create/creator_plugins.py | 28 +++++++++++++++++++++ 3 files changed, 40 insertions(+), 18 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index f08adb5470..b7d0774cf8 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -769,7 +769,7 @@ def BuildWorkfile(): return BuildWorkfile() -@with_pipeline_io +@deprecated("openpype.pipeline.create.get_legacy_creator_by_name") def get_creator_by_name(creator_name, case_sensitive=False): """Find creator plugin by name. @@ -780,23 +780,13 @@ def get_creator_by_name(creator_name, case_sensitive=False): Returns: Creator: Return first matching plugin or `None`. + + Deprecated: + Function will be removed after release version 3.16.* """ - from openpype.pipeline import discover_legacy_creator_plugins + from openpype.pipeline.create import get_legacy_creator_by_name - # Lower input creator name if is not case sensitive - if not case_sensitive: - creator_name = creator_name.lower() - - for creator_plugin in discover_legacy_creator_plugins(): - _creator_name = creator_plugin.__name__ - - # Lower creator plugin name if is not case sensitive - if not case_sensitive: - _creator_name = _creator_name.lower() - - if _creator_name == creator_name: - return creator_plugin - return None + return get_legacy_creator_by_name(creator_name, case_sensitive) @deprecated diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index bd196ccfd1..733e7766b2 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -9,8 +9,10 @@ from .creator_plugins import ( AutoCreator, HiddenCreator, - discover_creator_plugins, discover_legacy_creator_plugins, + get_legacy_creator_by_name, + + discover_creator_plugins, register_creator_plugin, deregister_creator_plugin, register_creator_plugin_path, @@ -38,8 +40,10 @@ __all__ = ( "AutoCreator", "HiddenCreator", - "discover_creator_plugins", "discover_legacy_creator_plugins", + "get_legacy_creator_by_name", + + "discover_creator_plugins", "register_creator_plugin", "deregister_creator_plugin", "register_creator_plugin_path", diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 9a5d559774..9e1530a6a7 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -458,6 +458,34 @@ def discover_legacy_creator_plugins(): return plugins +def get_legacy_creator_by_name(creator_name, case_sensitive=False): + """Find creator plugin by name. + + Args: + creator_name (str): Name of creator class that should be returned. + case_sensitive (bool): Match of creator plugin name is case sensitive. + Set to `False` by default. + + Returns: + Creator: Return first matching plugin or `None`. + """ + + # Lower input creator name if is not case sensitive + if not case_sensitive: + creator_name = creator_name.lower() + + for creator_plugin in discover_legacy_creator_plugins(): + _creator_name = creator_plugin.__name__ + + # Lower creator plugin name if is not case sensitive + if not case_sensitive: + _creator_name = _creator_name.lower() + + if _creator_name == creator_name: + return creator_plugin + return None + + def register_creator_plugin(plugin): if issubclass(plugin, BaseCreator): register_plugin(BaseCreator, plugin) From fe75b25c9b83762553957e1d4c763c6b27785ddb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 18:23:11 +0200 Subject: [PATCH 0598/2550] use 'get_legacy_creator_by_name' instead of 'get_creator_by_name' --- .../hosts/blender/plugins/load/load_layout_blend.py | 4 ++-- .../hosts/blender/plugins/load/load_layout_json.py | 2 +- openpype/hosts/blender/plugins/load/load_rig.py | 4 ++-- openpype/hosts/maya/plugins/load/load_reference.py | 10 ++++++---- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index cf8e89ed1f..e0124053bf 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -6,12 +6,12 @@ from typing import Dict, List, Optional import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.create import get_legacy_creator_by_name from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, @@ -157,7 +157,7 @@ class BlendLayoutLoader(plugin.AssetLoader): t.id = local_obj elif local_obj.type == 'EMPTY': - creator_plugin = lib.get_creator_by_name("CreateAnimation") + creator_plugin = get_legacy_creator_by_name("CreateAnimation") if not creator_plugin: raise ValueError("Creator plugin \"CreateAnimation\" was " "not found.") diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py index a0580af4a0..eca098627e 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_json.py +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -118,7 +118,7 @@ class JsonLayoutLoader(plugin.AssetLoader): # Camera creation when loading a layout is not necessary for now, # but the code is worth keeping in case we need it in the future. # # Create the camera asset and the camera instance - # creator_plugin = lib.get_creator_by_name("CreateCamera") + # creator_plugin = get_legacy_creator_by_name("CreateCamera") # if not creator_plugin: # raise ValueError("Creator plugin \"CreateCamera\" was " # "not found.") diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 4dfa96167f..1d23a70061 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -6,12 +6,12 @@ from typing import Dict, List, Optional import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.create import get_legacy_creator_by_name from openpype.hosts.blender.api import ( plugin, get_selection, @@ -244,7 +244,7 @@ class BlendRigLoader(plugin.AssetLoader): objects = self._process(libpath, asset_group, group_name, action) if create_animation: - creator_plugin = lib.get_creator_by_name("CreateAnimation") + creator_plugin = get_legacy_creator_by_name("CreateAnimation") if not creator_plugin: raise ValueError("Creator plugin \"CreateAnimation\" was " "not found.") diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index e4355ed3d4..15fd3575d5 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -2,10 +2,10 @@ import os from maya import cmds from openpype.api import get_project_settings -from openpype.lib import get_creator_by_name -from openpype.pipeline import ( - legacy_io, +from openpype.pipeline import legacy_io +from openpype.pipeline.create import ( legacy_create, + get_legacy_creator_by_name, ) import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api.lib import maintained_selection @@ -153,7 +153,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): self.log.info("Creating subset: {}".format(namespace)) # Create the animation instance - creator_plugin = get_creator_by_name(self.animation_creator_name) + creator_plugin = get_legacy_creator_by_name( + self.animation_creator_name + ) with maintained_selection(): cmds.select([output, controls] + roots, noExpand=True) legacy_create( From 88a11e86f4a710444acb5d025f672834b9aa2404 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 18:56:01 +0200 Subject: [PATCH 0599/2550] copied code to openpype/pipeline/create content --- openpype/pipeline/create/constants.py | 2 + openpype/pipeline/create/subset_name.py | 143 ++++++++++++++++++++++++ 2 files changed, 145 insertions(+) create mode 100644 openpype/pipeline/create/subset_name.py diff --git a/openpype/pipeline/create/constants.py b/openpype/pipeline/create/constants.py index bfbbccfd12..3af9651947 100644 --- a/openpype/pipeline/create/constants.py +++ b/openpype/pipeline/create/constants.py @@ -1,6 +1,8 @@ SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_." +DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", + "DEFAULT_SUBSET_TEMPLATE", ) diff --git a/openpype/pipeline/create/subset_name.py b/openpype/pipeline/create/subset_name.py new file mode 100644 index 0000000000..d5dcf44c04 --- /dev/null +++ b/openpype/pipeline/create/subset_name.py @@ -0,0 +1,143 @@ +import os + +from openpype.client import get_asset_by_id +from openpype.settings import get_project_settings +from openpype.lib import filter_profiles, prepare_template_data +from openpype.pipeline import legacy_io + +from .constants import DEFAULT_SUBSET_TEMPLATE + + +class TaskNotSetError(KeyError): + def __init__(self, msg=None): + if not msg: + msg = "Creator's subset name template requires task name." + super(TaskNotSetError, self).__init__(msg) + + +def get_subset_name_with_asset_doc( + family, + variant, + task_name, + asset_doc, + project_name=None, + host_name=None, + default_template=None, + dynamic_data=None +): + """Calculate subset name based on passed context and OpenPype settings. + + Subst name templates are defined in `project_settings/global/tools/creator + /subset_name_profiles` where are profiles with host name, family, task name + and task type filters. If context does not match any profile then + `DEFAULT_SUBSET_TEMPLATE` is used as default template. + + That's main reason why so many arguments are required to calculate subset + name. + + Args: + family (str): Instance family. + variant (str): In most of cases it is user input during creation. + task_name (str): Task name on which context is instance created. + asset_doc (dict): Queried asset document with it's tasks in data. + Used to get task type. + project_name (str): Name of project on which is instance created. + Important for project settings that are loaded. + host_name (str): One of filtering criteria for template profile + filters. + default_template (str): Default template if any profile does not match + passed context. Constant 'DEFAULT_SUBSET_TEMPLATE' is used if + is not passed. + dynamic_data (dict): Dynamic data specific for a creator which creates + instance. + dbcon (AvalonMongoDB): Mongo connection to be able query asset document + if 'asset_doc' is not passed. + """ + + if not family: + return "" + + if not host_name: + host_name = os.environ["AVALON_APP"] + + # Use only last part of class family value split by dot (`.`) + family = family.rsplit(".", 1)[-1] + + if project_name is None: + project_name = legacy_io.Session["AVALON_PROJECT"] + + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + # Get settings + tools_settings = get_project_settings(project_name)["global"]["tools"] + profiles = tools_settings["creator"]["subset_name_profiles"] + filtering_criteria = { + "families": family, + "hosts": host_name, + "tasks": task_name, + "task_types": task_type + } + + matching_profile = filter_profiles(profiles, filtering_criteria) + template = None + if matching_profile: + template = matching_profile["template"] + + # Make sure template is set (matching may have empty string) + if not template: + template = default_template or DEFAULT_SUBSET_TEMPLATE + + # Simple check of task name existence for template with {task} in + # - missing task should be possible only in Standalone publisher + if not task_name and "{task" in template.lower(): + raise TaskNotSetError() + + fill_pairs = { + "variant": variant, + "family": family, + "task": task_name + } + if dynamic_data: + # Dynamic data may override default values + for key, value in dynamic_data.items(): + fill_pairs[key] = value + + return template.format(**prepare_template_data(fill_pairs)) + + +def get_subset_name( + family, + variant, + task_name, + asset_id, + project_name=None, + host_name=None, + default_template=None, + dynamic_data=None, + dbcon=None +): + """Calculate subset name using OpenPype settings. + + This variant of function expects asset id as argument. + + This is legacy function should be replaced with + `get_subset_name_with_asset_doc` where asset document is expected. + """ + + if project_name is None: + project_name = dbcon.project_name + + asset_doc = get_asset_by_id(project_name, asset_id, fields=["data.tasks"]) + + return get_subset_name_with_asset_doc( + family, + variant, + task_name, + asset_doc or {}, + project_name, + host_name, + default_template, + dynamic_data + ) From 65b3a9a5a399bcd5fc633b96250623cf0f287292 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 18:57:07 +0200 Subject: [PATCH 0600/2550] added ability to pass project settings --- openpype/pipeline/create/subset_name.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/create/subset_name.py b/openpype/pipeline/create/subset_name.py index d5dcf44c04..b6028d6427 100644 --- a/openpype/pipeline/create/subset_name.py +++ b/openpype/pipeline/create/subset_name.py @@ -23,7 +23,8 @@ def get_subset_name_with_asset_doc( project_name=None, host_name=None, default_template=None, - dynamic_data=None + dynamic_data=None, + project_settings=None ): """Calculate subset name based on passed context and OpenPype settings. @@ -71,7 +72,9 @@ def get_subset_name_with_asset_doc( task_type = task_info.get("type") # Get settings - tools_settings = get_project_settings(project_name)["global"]["tools"] + if not project_settings: + project_settings = get_project_settings(project_name) + tools_settings = project_settings["global"]["tools"] profiles = tools_settings["creator"]["subset_name_profiles"] filtering_criteria = { "families": family, @@ -116,7 +119,7 @@ def get_subset_name( host_name=None, default_template=None, dynamic_data=None, - dbcon=None + project_settings=None ): """Calculate subset name using OpenPype settings. @@ -127,7 +130,7 @@ def get_subset_name( """ if project_name is None: - project_name = dbcon.project_name + project_name = legacy_io.Session["AVALON_PROJECT"] asset_doc = get_asset_by_id(project_name, asset_id, fields=["data.tasks"]) @@ -139,5 +142,6 @@ def get_subset_name( project_name, host_name, default_template, - dynamic_data + dynamic_data, + project_settings ) From daea5fd45e52770dd59057c9d836bf8dd23643b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 18:58:59 +0200 Subject: [PATCH 0601/2550] import content to create level --- openpype/pipeline/create/__init__.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index bd196ccfd1..4f3d2c03e5 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -1,6 +1,14 @@ from .constants import ( - SUBSET_NAME_ALLOWED_SYMBOLS + SUBSET_NAME_ALLOWED_SYMBOLS, + DEFAULT_SUBSET_TEMPLATE, ) + +from .subset_name import ( + TaskNotSetError, + get_subset_name, + get_subset_name_with_asset_doc, +) + from .creator_plugins import ( CreatorError, @@ -30,6 +38,11 @@ from .legacy_create import ( __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", + "DEFAULT_SUBSET_TEMPLATE", + + "TaskNotSetError", + "get_subset_name", + "get_subset_name_with_asset_doc", "CreatorError", From 476153e81c31e5b755159618368eccbfb1d68b1d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 19:01:56 +0200 Subject: [PATCH 0602/2550] changed imports of task not set error --- .../traypublisher/plugins/create/create_movie_batch.py | 6 ++++-- openpype/tools/publisher/widgets/create_dialog.py | 4 ++-- openpype/tools/publisher/widgets/widgets.py | 6 ++++-- openpype/tools/standalonepublish/widgets/widget_family.py | 8 +++++--- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index c5f0d6b75e..5d0fe4b177 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -6,13 +6,15 @@ from openpype.client import get_assets, get_asset_by_name from openpype.lib import ( FileDef, BoolDef, - get_subset_name_with_asset_doc, - TaskNotSetError, ) from openpype.pipeline import ( CreatedInstance, CreatorError ) +from openpype.pipeline.create import ( + get_subset_name_with_asset_doc, + TaskNotSetError, +) from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index d4740b2493..173df7d5c8 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -11,10 +11,10 @@ except Exception: from Qt import QtWidgets, QtCore, QtGui from openpype.client import get_asset_by_name, get_subsets -from openpype.lib import TaskNotSetError from openpype.pipeline.create import ( CreatorError, - SUBSET_NAME_ALLOWED_SYMBOLS + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, ) from openpype.tools.utils import ( ErrorMessageBox, diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 5a5f8c4c37..aa7e3be687 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -6,7 +6,6 @@ import collections from Qt import QtWidgets, QtCore, QtGui import qtawesome -from openpype.lib import TaskNotSetError from openpype.widgets.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -17,7 +16,10 @@ from openpype.tools.utils import ( BaseClickableFrame, set_style_property, ) -from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from openpype.pipeline.create import ( + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, +) from .assets_widget import AssetsDialog from .tasks_widget import TasksModel from .icons import ( diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index 1736be84ab..eab66d75b3 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -8,10 +8,12 @@ from openpype.client import ( get_subsets, get_last_version_by_subset_id, ) -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import LegacyCreator -from openpype.lib import TaskNotSetError -from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from openpype.pipeline.create import ( + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, +) from . import HelpRole, FamilyRole, ExistsRole, PluginRole, PluginKeyRole from . import FamilyDescriptionWidget From 7e59a577a66f857ecd28920ed457915e14c1f0b3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 19:12:28 +0200 Subject: [PATCH 0603/2550] use new import of 'get_subset_name_with_asset_doc' --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 2 +- .../hosts/flame/plugins/publish/collect_timeline_otio.py | 6 +++--- openpype/hosts/harmony/plugins/publish/collect_workfile.py | 4 ++-- openpype/hosts/photoshop/plugins/publish/collect_review.py | 2 +- .../hosts/photoshop/plugins/publish/collect_workfile.py | 2 +- .../plugins/publish/collect_bulk_mov_instances.py | 2 +- openpype/hosts/tvpaint/plugins/publish/collect_instances.py | 2 +- .../hosts/tvpaint/plugins/publish/collect_scene_render.py | 2 +- openpype/hosts/tvpaint/plugins/publish/collect_workfile.py | 2 +- .../webpublisher/plugins/publish/collect_published_files.py | 6 ++---- .../plugins/publish/collect_tvpaint_instances.py | 2 +- 11 files changed, 15 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index fef5448a4c..b1f40113a4 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -1,8 +1,8 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectWorkfile(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index 0a9b0db334..c0c7eee7f2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -1,9 +1,9 @@ import pyblish.api -import openpype.lib as oplib -from openpype.pipeline import legacy_io import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export +from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollecTimelineOTIO(pyblish.api.ContextPlugin): @@ -24,7 +24,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # create subset name - subset_name = oplib.get_subset_name_with_asset_doc( + subset_name = get_subset_name_with_asset_doc( family, variant, task_name, diff --git a/openpype/hosts/harmony/plugins/publish/collect_workfile.py b/openpype/hosts/harmony/plugins/publish/collect_workfile.py index c0493315a4..924661d310 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/collect_workfile.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- """Collect current workfile from Harmony.""" -import pyblish.api import os +import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectWorkfile(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 2ea5503f3f..ce475524a7 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,7 +10,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectReview(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 9cf6d5227e..5e673bebb1 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectWorkfile(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py index 052a97af7d..7a66026e1c 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py @@ -2,8 +2,8 @@ import copy import json import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc from openpype.client import get_asset_by_name +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectBulkMovInstances(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 9b6d5c4879..68bfa8ef6a 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -3,8 +3,8 @@ import copy import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectInstances(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index 20c5bb586a..a7bc2f3c76 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -3,7 +3,7 @@ import copy import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectRenderScene(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index 88c5f4dbc7..f88b32b980 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -3,8 +3,8 @@ import json import pyblish.api from openpype.client import get_asset_by_name -from openpype.lib import get_subset_name_with_asset_doc from openpype.pipeline import legacy_io +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectWorkfile(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 20e277d794..5b0a4a6910 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -23,10 +23,8 @@ from openpype.lib import ( get_ffprobe_streams, convert_ffprobe_fps_value, ) -from openpype.lib.plugin_tools import ( - parse_json, - get_subset_name_with_asset_doc -) +from openpype.lib.plugin_tools import parse_json +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectPublishedFiles(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 92f581be5f..3a9f8eb8f2 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -10,7 +10,7 @@ import re import copy import pyblish.api -from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name_with_asset_doc class CollectTVPaintInstances(pyblish.api.ContextPlugin): From ce31b9a47706f0c71f56fc9625d560e9cc5185a0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 19:13:23 +0200 Subject: [PATCH 0604/2550] provide more data as arguments during publishing --- .../aftereffects/plugins/publish/collect_workfile.py | 3 ++- .../flame/plugins/publish/collect_timeline_otio.py | 3 +++ .../harmony/plugins/publish/collect_workfile.py | 3 ++- .../photoshop/plugins/publish/collect_review.py | 3 ++- .../photoshop/plugins/publish/collect_workfile.py | 3 ++- .../plugins/publish/collect_bulk_mov_instances.py | 4 +++- .../tvpaint/plugins/publish/collect_instances.py | 3 ++- .../tvpaint/plugins/publish/collect_scene_render.py | 3 ++- .../tvpaint/plugins/publish/collect_workfile.py | 3 ++- .../plugins/publish/collect_published_files.py | 9 +++++++-- .../plugins/publish/collect_tvpaint_instances.py | 12 ++++++++---- 11 files changed, 35 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index b1f40113a4..bd52f569a3 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -77,7 +77,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) # Create instance instance = context.create_instance(subset) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index c0c7eee7f2..e57ef270b8 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -29,6 +29,9 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): variant, task_name, asset_doc, + context.data["projectName"], + context.data["hostName"], + project_settings=context.data["project_settings"] ) # adding otio timeline to context diff --git a/openpype/hosts/harmony/plugins/publish/collect_workfile.py b/openpype/hosts/harmony/plugins/publish/collect_workfile.py index 924661d310..3d1d2f03c2 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/collect_workfile.py @@ -23,7 +23,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) # Create instance diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index ce475524a7..eb2ad644e5 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -33,7 +33,8 @@ class CollectReview(pyblish.api.ContextPlugin): context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) instance = context.create_instance(subset) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 5e673bebb1..21ec914910 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -30,7 +30,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"] + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) file_path = context.data["currentFile"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py index 7a66026e1c..fa99a8c7a7 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py @@ -49,7 +49,9 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): self.subset_name_variant, task_name, asset_doc, - project_name + project_name, + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] ) instance_name = f"{asset_name}_{subset_name}" diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 68bfa8ef6a..cd7eccc067 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -113,7 +113,8 @@ class CollectInstances(pyblish.api.ContextPlugin): task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) instance_data["subset"] = new_subset_name diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index a7bc2f3c76..d909317274 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -82,7 +82,8 @@ class CollectRenderScene(pyblish.api.ContextPlugin): asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance_data = { diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index f88b32b980..ef67ae8003 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -45,7 +45,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) # Create Workfile instance diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 5b0a4a6910..4a497a9514 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -79,8 +79,13 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): extension.replace(".", '')) subset_name = get_subset_name_with_asset_doc( - family, variant, task_name, asset_doc, - project_name=project_name, host_name="webpublisher" + family, + variant, + task_name, + asset_doc, + project_name=project_name, + host_name="webpublisher", + project_settings=context.data["project_settings"] ) version = self._get_next_version( project_name, asset_doc, subset_name diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 3a9f8eb8f2..bdacdbdc26 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -53,7 +53,8 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) workfile_instance = self._create_workfile_instance( context, workfile_subset_name @@ -67,7 +68,8 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): task_name, asset_doc, project_name, - host_name + host_name, + project_settings=context.data["project_settings"] ) review_instance = self._create_review_instance( context, review_subset_name @@ -121,7 +123,8 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance = self._create_render_pass_instance( @@ -144,7 +147,8 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=context.data["project_settings"] ) instance = self._create_render_layer_instance( context, layers, subset_name From df0565222c0f0061ca34472a02f5aa1747faf32e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 19:15:54 +0200 Subject: [PATCH 0605/2550] marked functions in openpype.lib as deprecated --- openpype/lib/plugin_tools.py | 94 +++++++++++------------------------- 1 file changed, 28 insertions(+), 66 deletions(-) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 060db94ae0..6534e7355f 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -8,16 +8,10 @@ import json import warnings import functools -from openpype.client import get_asset_by_id from openpype.settings import get_project_settings -from .profiles_filtering import filter_profiles - log = logging.getLogger(__name__) -# Subset name template used when plugin does not have defined any -DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" - class PluginToolsDeprecatedWarning(DeprecationWarning): pass @@ -64,13 +58,14 @@ def deprecated(new_destination): return _decorator(func) -class TaskNotSetError(KeyError): - def __init__(self, msg=None): - if not msg: - msg = "Creator's subset name template requires task name." - super(TaskNotSetError, self).__init__(msg) +@deprecated("openpype.pipeline.create.TaskNotSetError") +def TaskNotSetError(*args, **kwargs): + from openpype.pipeline.create import TaskNotSetError + + return TaskNotSetError(*args, **kwargs) +@deprecated("openpype.pipeline.create.get_subset_name_with_asset_doc") def get_subset_name_with_asset_doc( family, variant, @@ -109,61 +104,22 @@ def get_subset_name_with_asset_doc( dbcon (AvalonMongoDB): Mongo connection to be able query asset document if 'asset_doc' is not passed. """ - if not family: - return "" - if not host_name: - host_name = os.environ["AVALON_APP"] + from openpype.pipeline.create import get_subset_name_with_asset_doc - # Use only last part of class family value split by dot (`.`) - family = family.rsplit(".", 1)[-1] - - if project_name is None: - from openpype.pipeline import legacy_io - - project_name = legacy_io.Session["AVALON_PROJECT"] - - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - # Get settings - tools_settings = get_project_settings(project_name)["global"]["tools"] - profiles = tools_settings["creator"]["subset_name_profiles"] - filtering_criteria = { - "families": family, - "hosts": host_name, - "tasks": task_name, - "task_types": task_type - } - - matching_profile = filter_profiles(profiles, filtering_criteria) - template = None - if matching_profile: - template = matching_profile["template"] - - # Make sure template is set (matching may have empty string) - if not template: - template = default_template or DEFAULT_SUBSET_TEMPLATE - - # Simple check of task name existence for template with {task} in - # - missing task should be possible only in Standalone publisher - if not task_name and "{task" in template.lower(): - raise TaskNotSetError() - - fill_pairs = { - "variant": variant, - "family": family, - "task": task_name - } - if dynamic_data: - # Dynamic data may override default values - for key, value in dynamic_data.items(): - fill_pairs[key] = value - - return template.format(**prepare_template_data(fill_pairs)) + return get_subset_name_with_asset_doc( + family, + variant, + task_name, + asset_doc, + project_name, + host_name, + default_template, + dynamic_data + ) +@deprecated("openpype.pipeline.create.get_subset_name") def get_subset_name( family, variant, @@ -183,16 +139,16 @@ def get_subset_name( `get_subset_name_with_asset_doc` where asset document is expected. """ + from openpype.pipeline.create import get_subset_name + if project_name is None: project_name = dbcon.project_name - asset_doc = get_asset_by_id(project_name, asset_id, fields=["data.tasks"]) - - return get_subset_name_with_asset_doc( + return get_subset_name( family, variant, task_name, - asset_doc or {}, + asset_id, project_name, host_name, default_template, @@ -254,6 +210,9 @@ def filter_pyblish_plugins(plugins): Args: plugins (dict): Dictionary of plugins produced by :mod:`pyblish-base` `discover()` method. + + Deprecated: + Function will be removed after release version 3.15.* """ from openpype.pipeline.publish.lib import filter_pyblish_plugins @@ -277,6 +236,9 @@ def set_plugin_attributes_from_settings( Value from environment `AVALON_APP` is used if not entered. project_name (str): Name of project for which settings will be loaded. Value from environment `AVALON_PROJECT` is used if not entered. + + Deprecated: + Function will be removed after release version 3.15.* """ # Function is not used anymore From 7a4cd9c1faca8c4ca3d7f2fea871c241c38b1320 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 23 Aug 2022 19:20:04 +0200 Subject: [PATCH 0606/2550] removed 'get_subset_name' and renamed 'get_subset_name_with_asset_doc' to 'get_subset_name' --- .../plugins/publish/collect_workfile.py | 4 +- .../plugins/publish/collect_timeline_otio.py | 4 +- .../plugins/publish/collect_workfile.py | 4 +- .../plugins/publish/collect_review.py | 4 +- .../plugins/publish/collect_workfile.py | 4 +- .../publish/collect_bulk_mov_instances.py | 4 +- .../plugins/create/create_movie_batch.py | 6 +-- .../plugins/publish/collect_instances.py | 4 +- .../plugins/publish/collect_scene_render.py | 4 +- .../plugins/publish/collect_workfile.py | 4 +- .../publish/collect_published_files.py | 4 +- .../publish/collect_tvpaint_instances.py | 10 ++--- openpype/lib/plugin_tools.py | 13 +++--- openpype/pipeline/create/__init__.py | 2 - openpype/pipeline/create/subset_name.py | 40 +------------------ 15 files changed, 37 insertions(+), 74 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index bd52f569a3..3c5013b3bd 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,7 +2,7 @@ import os import pyblish.api from openpype.pipeline import legacy_io -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -71,7 +71,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, self.default_variant, context.data["anatomyData"]["task"]["name"], diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index e57ef270b8..917041e053 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -3,7 +3,7 @@ import pyblish.api import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export from openpype.pipeline import legacy_io -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollecTimelineOTIO(pyblish.api.ContextPlugin): @@ -24,7 +24,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # create subset name - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( family, variant, task_name, diff --git a/openpype/hosts/harmony/plugins/publish/collect_workfile.py b/openpype/hosts/harmony/plugins/publish/collect_workfile.py index 3d1d2f03c2..3624147435 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/collect_workfile.py @@ -3,7 +3,7 @@ import os import pyblish.api -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -17,7 +17,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): """Plugin entry point.""" family = "workfile" basename = os.path.basename(context.data["currentFile"]) - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, "", context.data["anatomyData"]["task"]["name"], diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index eb2ad644e5..7f395b46d7 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,7 +10,7 @@ import os import pyblish.api -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectReview(pyblish.api.ContextPlugin): @@ -27,7 +27,7 @@ class CollectReview(pyblish.api.ContextPlugin): def process(self, context): family = "review" - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, context.data.get("variant", ''), context.data["anatomyData"]["task"]["name"], diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 21ec914910..9a5aad5569 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -24,7 +24,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): family = "workfile" # context.data["variant"] might come only from collect_batch_data variant = context.data.get("variant") or self.default_variant - subset = get_subset_name_with_asset_doc( + subset = get_subset_name( family, variant, context.data["anatomyData"]["task"]["name"], diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py index fa99a8c7a7..7925b0ecf3 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py @@ -3,7 +3,7 @@ import json import pyblish.api from openpype.client import get_asset_by_name -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectBulkMovInstances(pyblish.api.InstancePlugin): @@ -44,7 +44,7 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): task_name = available_task_names[_task_name_low] break - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.new_instance_family, self.subset_name_variant, task_name, diff --git a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index 5d0fe4b177..abe29d7473 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -12,7 +12,7 @@ from openpype.pipeline import ( CreatorError ) from openpype.pipeline.create import ( - get_subset_name_with_asset_doc, + get_subset_name, TaskNotSetError, ) @@ -132,7 +132,7 @@ class BatchMovieCreator(TrayPublishCreator): task_name = self._get_task_name(asset_doc) try: - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.family, variant, task_name, @@ -145,7 +145,7 @@ class BatchMovieCreator(TrayPublishCreator): # but user have ability to change it # NOTE: This expect that there is not task 'Undefined' on asset task_name = "Undefined" - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.family, variant, task_name, diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index cd7eccc067..ae1326a5bd 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -4,7 +4,7 @@ import pyblish.api from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectInstances(pyblish.api.ContextPlugin): @@ -107,7 +107,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Use empty variant value variant = "" task_name = legacy_io.Session["AVALON_TASK"] - new_subset_name = get_subset_name_with_asset_doc( + new_subset_name = get_subset_name( family, variant, task_name, diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index d909317274..92a2815ba0 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -3,7 +3,7 @@ import copy import pyblish.api from openpype.client import get_asset_by_name -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectRenderScene(pyblish.api.ContextPlugin): @@ -75,7 +75,7 @@ class CollectRenderScene(pyblish.api.ContextPlugin): dynamic_data["render_pass"] = dynamic_data["renderpass"] task_name = workfile_context["task"] - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( "render", variant, task_name, diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index ef67ae8003..8c7c8c3899 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -4,7 +4,7 @@ import pyblish.api from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): @@ -39,7 +39,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # Use empty variant value variant = "" task_name = legacy_io.Session["AVALON_TASK"] - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( family, variant, task_name, diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 4a497a9514..f2d1d19609 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -24,7 +24,7 @@ from openpype.lib import ( convert_ffprobe_fps_value, ) from openpype.lib.plugin_tools import parse_json -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectPublishedFiles(pyblish.api.ContextPlugin): @@ -78,7 +78,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): is_sequence, extension.replace(".", '')) - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( family, variant, task_name, diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index bdacdbdc26..948e86c23e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -10,7 +10,7 @@ import re import copy import pyblish.api -from openpype.pipeline.create import get_subset_name_with_asset_doc +from openpype.pipeline.create import get_subset_name class CollectTVPaintInstances(pyblish.api.ContextPlugin): @@ -47,7 +47,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): new_instances = [] # Workfile instance - workfile_subset_name = get_subset_name_with_asset_doc( + workfile_subset_name = get_subset_name( self.workfile_family, self.workfile_variant, task_name, @@ -62,7 +62,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): new_instances.append(workfile_instance) # Review instance - review_subset_name = get_subset_name_with_asset_doc( + review_subset_name = get_subset_name( self.review_family, self.review_variant, task_name, @@ -116,7 +116,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): "family": "render" } - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.render_pass_family, render_pass, task_name, @@ -140,7 +140,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): # Override family for subset name "family": "render" } - subset_name = get_subset_name_with_asset_doc( + subset_name = get_subset_name( self.render_layer_family, variant, task_name, diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 6534e7355f..065188625e 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -8,6 +8,7 @@ import json import warnings import functools +from openpype.client import get_asset_by_id from openpype.settings import get_project_settings log = logging.getLogger(__name__) @@ -65,7 +66,7 @@ def TaskNotSetError(*args, **kwargs): return TaskNotSetError(*args, **kwargs) -@deprecated("openpype.pipeline.create.get_subset_name_with_asset_doc") +@deprecated("openpype.pipeline.create.get_subset_name") def get_subset_name_with_asset_doc( family, variant, @@ -105,9 +106,9 @@ def get_subset_name_with_asset_doc( if 'asset_doc' is not passed. """ - from openpype.pipeline.create import get_subset_name_with_asset_doc + from openpype.pipeline.create import get_subset_name - return get_subset_name_with_asset_doc( + return get_subset_name( family, variant, task_name, @@ -119,7 +120,7 @@ def get_subset_name_with_asset_doc( ) -@deprecated("openpype.pipeline.create.get_subset_name") +@deprecated def get_subset_name( family, variant, @@ -144,11 +145,13 @@ def get_subset_name( if project_name is None: project_name = dbcon.project_name + asset_doc = get_asset_by_id(project_name, asset_id, fields=["data.tasks"]) + return get_subset_name( family, variant, task_name, - asset_id, + asset_doc, project_name, host_name, default_template, diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index 4f3d2c03e5..b698224924 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -6,7 +6,6 @@ from .constants import ( from .subset_name import ( TaskNotSetError, get_subset_name, - get_subset_name_with_asset_doc, ) from .creator_plugins import ( @@ -42,7 +41,6 @@ __all__ = ( "TaskNotSetError", "get_subset_name", - "get_subset_name_with_asset_doc", "CreatorError", diff --git a/openpype/pipeline/create/subset_name.py b/openpype/pipeline/create/subset_name.py index b6028d6427..f508263708 100644 --- a/openpype/pipeline/create/subset_name.py +++ b/openpype/pipeline/create/subset_name.py @@ -1,6 +1,5 @@ import os -from openpype.client import get_asset_by_id from openpype.settings import get_project_settings from openpype.lib import filter_profiles, prepare_template_data from openpype.pipeline import legacy_io @@ -15,7 +14,7 @@ class TaskNotSetError(KeyError): super(TaskNotSetError, self).__init__(msg) -def get_subset_name_with_asset_doc( +def get_subset_name( family, variant, task_name, @@ -108,40 +107,3 @@ def get_subset_name_with_asset_doc( fill_pairs[key] = value return template.format(**prepare_template_data(fill_pairs)) - - -def get_subset_name( - family, - variant, - task_name, - asset_id, - project_name=None, - host_name=None, - default_template=None, - dynamic_data=None, - project_settings=None -): - """Calculate subset name using OpenPype settings. - - This variant of function expects asset id as argument. - - This is legacy function should be replaced with - `get_subset_name_with_asset_doc` where asset document is expected. - """ - - if project_name is None: - project_name = legacy_io.Session["AVALON_PROJECT"] - - asset_doc = get_asset_by_id(project_name, asset_id, fields=["data.tasks"]) - - return get_subset_name_with_asset_doc( - family, - variant, - task_name, - asset_doc or {}, - project_name, - host_name, - default_template, - dynamic_data, - project_settings - ) From 65d785d100c986128a88f1dc2c77b5321b85d7da Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 24 Aug 2022 00:29:08 +0200 Subject: [PATCH 0607/2550] Draft stash for refactoring maya submit deadline to use `AbstractSubmitDeadline` base. - This does *NOT* work currently! --- .../deadline/abstract_submit_deadline.py | 22 + .../plugins/publish/submit_maya_deadline.py | 1297 ++++++++--------- 2 files changed, 623 insertions(+), 696 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 0bad981fdf..577378335e 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -359,6 +359,27 @@ class DeadlineJobInfo(object): def OutputDirectory(self, val): # noqa: N802 self._outputDirectory.append(val) + # Asset Dependency + # ---------------------------------------------- + _assetDependency = attr.ib(factory=list) + + @property + def AssetDependency(self): # noqa: N802 + """Return all OutputDirectory values formatted for Deadline. + + Returns: + dict: as `{'OutputDirectory0': 'dir'}` + + """ + out = {} + for index, v in enumerate(self._assetDependency): + out["AssetDependency{}".format(index)] = v + return out + + @OutputDirectory.setter + def AssetDependency(self, val): # noqa: N802 + self._assetDependency.append(val) + # Tile Job # ---------------------------------------------- TileJob = attr.ib(default=None) # Default: false @@ -396,6 +417,7 @@ class DeadlineJobInfo(object): serialized.update(self.OutputFilename) serialized.update(self.OutputFilenameTile) serialized.update(self.OutputDirectory) + serialized.update(self.AssetDependency) return serialized diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7966861358..6dfa48a9f8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -18,7 +18,6 @@ Attributes: from __future__ import print_function import os -import json import getpass import copy import re @@ -27,252 +26,32 @@ from datetime import datetime import itertools from collections import OrderedDict +import attr import clique -import requests from maya import cmds -import pyblish.api - -from openpype.lib import requests_post from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io -# Documentation for keys available at: -# https://docs.thinkboxsoftware.com -# /products/deadline/8.0/1_User%20Manual/manual -# /manual-submission.html#job-info-file-options - -payload_skeleton_template = { - "JobInfo": { - "BatchName": None, # Top-level group name - "Name": None, # Job name, as seen in Monitor - "UserName": None, - "Plugin": "MayaBatch", - "Frames": "{start}-{end}x{step}", - "Comment": None, - "Priority": 50, - }, - "PluginInfo": { - "SceneFile": None, # Input - "OutputFilePath": None, # Output directory and filename - "OutputFilePrefix": None, - "Version": cmds.about(version=True), # Mandatory for Deadline - "UsingRenderLayers": True, - "RenderLayer": None, # Render only this layer - "Renderer": None, - "ProjectPath": None, # Resolve relative references - "RenderSetupIncludeLights": None, # Include all lights flag. - }, - "AuxFiles": [] # Mandatory for Deadline, may be empty -} +from openpype_modules.deadline import abstract_submit_deadline +from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo -def _format_tiles( - filename, index, tiles_x, tiles_y, - width, height, prefix): - """Generate tile entries for Deadline tile job. - - Returns two dictionaries - one that can be directly used in Deadline - job, second that can be used for Deadline Assembly job configuration - file. - - This will format tile names: - - Example:: - { - "OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr", - "OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr" - } - - And add tile prefixes like: - - Example:: - Image prefix is: - `maya///_` - - Result for tile 0 for 4x4 will be: - `maya///_tile_1x1_4x4__` - - Calculating coordinates is tricky as in Job they are defined as top, - left, bottom, right with zero being in top-left corner. But Assembler - configuration file takes tile coordinates as X, Y, Width and Height and - zero is bottom left corner. - - Args: - filename (str): Filename to process as tiles. - index (int): Index of that file if it is sequence. - tiles_x (int): Number of tiles in X. - tiles_y (int): Number if tikes in Y. - width (int): Width resolution of final image. - height (int): Height resolution of final image. - prefix (str): Image prefix. - - Returns: - (dict, dict): Tuple of two dictionaires - first can be used to - extend JobInfo, second has tiles x, y, width and height - used for assembler configuration. - - """ - tile = 0 - out = {"JobInfo": {}, "PluginInfo": {}} - cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y - - cfg["TilesCropped"] = "False" - - for tile_x in range(1, tiles_x + 1): - for tile_y in reversed(range(1, tiles_y + 1)): - tile_prefix = "_tile_{}x{}_{}x{}_".format( - tile_x, tile_y, - tiles_x, - tiles_y - ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) - new_filename = "{}/{}{}".format( - os.path.dirname(filename), - tile_prefix, - os.path.basename(filename) - ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 - - cfg["Tile{}".format(tile)] = new_filename - cfg["Tile{}Tile".format(tile)] = new_filename - cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) - - cfg["Tile{}Width".format(tile)] = w_space - cfg["Tile{}Height".format(tile)] = h_space - - tile += 1 - return out, cfg +@attr.s +class DeadlinePluginInfo(): + SceneFile = attr.ib(default=None) # Input + OutputFilePath = attr.ib(default=None) # Output directory and filename + OutputFilePrefix = attr.ib(default=None) + Version = attr.ib(default=None) # Mandatory for Deadline + UsingRenderLayers = attr.ib(default=True) + RenderLayer = attr.ib(default=None) # Render only this layer + Renderer = attr.ib(default=None) + ProjectPath = attr.ib(default=None) # Resolve relative references + RenderSetupIncludeLights = attr.ib(default=None) # Include all lights flag -def get_renderer_variables(renderlayer, root): - """Retrieve the extension which has been set in the VRay settings. - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - root (str): base path to render - - Returns: - dict - - """ - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings( - fullPath=True, - gin="#" * int(padding), - lut=True, - layer=renderlayer or lib.get_current_renderlayer())[0] - filename_0 = re.sub('_', '_beauty', - filename_0, flags=re.IGNORECASE) - prefix_attr = "defaultRenderGlobals.imageFilePrefix" - - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) - - if renderer == "vray": - renderlayer = renderlayer.split("_")[-1] - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - if extension in ["exr (multichannel)", "exr (deep)"]: - extension = "exr" - - prefix_attr = "vraySettings.fileNamePrefix" - filename_prefix = cmds.getAttr(prefix_attr) - # we need to determine path for vray as maya `renderSettings` query - # does not work for vray. - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = "{}.{}.{}".format( - filename_0, "#" * int(padding), extension) - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "renderman": - prefix_attr = "rmanGlobals.imageFileFormat" - # NOTE: This is guessing extensions from renderman display types. - # Some of them are just framebuffers, d_texture format can be - # set in display setting. We set those now to None, but it - # should be handled more gracefully. - display_types = { - "d_deepexr": "exr", - "d_it": None, - "d_null": None, - "d_openexr": "exr", - "d_png": "png", - "d_pointcloud": "ptc", - "d_targa": "tga", - "d_texture": None, - "d_tiff": "tif" - } - - extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0], - "exr" - ) or "exr" - - filename_prefix = "{}/{}".format( - cmds.getAttr("rmanGlobals.imageOutputDir"), - cmds.getAttr("rmanGlobals.imageFileFormat") - ) - - renderlayer = renderlayer.split("_")[-1] - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "redshift": - # mapping redshift extension dropdown values to strings - ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - extension = ext_mapping[ - cmds.getAttr("redshiftOptions.imageFormat") - ] - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - - filename_prefix = cmds.getAttr(prefix_attr) - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - -class MayaSubmitDeadline(pyblish.api.InstancePlugin): +class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): """Submit available render layers to Deadline. Renders are submitted to a Deadline Web Service as @@ -284,15 +63,12 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): """ - label = "Submit to Deadline" - order = pyblish.api.IntegratorOrder + 0.1 + label = "Submit Render to Deadline" hosts = ["maya"] families = ["renderlayer"] targets = ["local"] - use_published = True tile_assembler_plugin = "OpenPypeTileAssembler" - asset_dependencies = False priority = 50 tile_priority = 50 limit_groups = [] @@ -300,32 +76,173 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): pluginInfo = {} group = "none" - def process(self, instance): - """Plugin entry point.""" - instance.data["toBeRenderedOn"] = "deadline" + def get_job_info(self): + job_info = DeadlineJobInfo(Plugin="MayaBatch") + + # todo: test whether this works for existing production cases + # where custom jobInfo was stored in the project settings + for key, value in self.jobInfo.items(): + setattr(job_info, key, value) + + instance = self._instance context = instance.context - self._instance = instance - self.payload_skeleton = copy.deepcopy(payload_skeleton_template) + filepath = context.data["currentFile"] + filename = os.path.basename(filepath) - # get default deadline webservice url from deadline module - self.deadline_url = instance.context.data.get("defaultDeadline") - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - self.deadline_url = instance.data.get("deadlineUrl") - assert self.deadline_url, "Requires Deadline Webservice URL" + job_info.Name = "%s - %s" % (filename, instance.name) + job_info.BatchName = filename + job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") + job_info.UserName = context.data.get( + "deadlineUser", getpass.getuser()) - # just using existing names from Setting - self._job_info = self.jobInfo + # Deadline requires integers in frame range + frames = "{start}-{end}x{step}".format( + start=int(instance.data["frameStartHandle"]), + end=int(instance.data["frameEndHandle"]), + step=int(instance.data["byFrameStep"]), + ) + job_info.Frames = frames - self._plugin_info = self.pluginInfo + job_info.Pool = instance.data.get("primaryPool") + job_info.SecondaryPool = instance.data.get("secondaryPool") + job_info.ChunkSize = instance.data.get("chunkSize", 10) + job_info.Comment = context.data.get("comment") + job_info.Priority = instance.data.get("priority", self.priority) + + if self.group != "none" and self.group: + job_info.Group = self.group + + if self.limit_groups: + job_info.LimitGroups = ",".join(self.limit_groups) + + self.payload_skeleton["JobInfo"]["Name"] = jobname + self.payload_skeleton["JobInfo"]["BatchName"] = src_filename + + # Optional, enable double-click to preview rendered + # frames from Deadline Monitor + self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ + os.path.dirname(output_filename_0).replace("\\", "/") + self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ + output_filename_0.replace("\\", "/") + + # Add options from RenderGlobals------------------------------------- + render_globals = instance.data.get("renderGlobals", {}) + self.payload_skeleton["JobInfo"].update(render_globals) + + keys = [ + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "OPENPYPE_SG_USER", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "OPENPYPE_DEV", + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" + ] + # Add mongo url if it's enabled + if self._instance.context.data.get("deadlinePassMongoUrl"): + keys.append("OPENPYPE_MONGO") + + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **legacy_io.Session) + + + # TODO: Taken from old publish class - test whether still needed + environment["OPENPYPE_LOG_NO_COLORS"] = "1" + environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) + # to recognize job from PYPE for turning Event On/Off + environment["OPENPYPE_RENDER_JOB"] = "1" + + for key in keys: + val = environment.get(key) + if val: + job_info.EnvironmentKeyValue = "{key}={value}".format( + key=key, + value=val + ) + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + + for i, filepath in enumerate(instance.data["files"]): + dirname = os.path.dirname(filepath) + fname = os.path.basename(filepath) + job_info.OutputDirectory = dirname.replace("\\", "/") + job_info.OutputFilename = fname + + # Adding file dependencies. + if self.asset_dependencies: + dependencies = instance.context.data["fileDependencies"] + dependencies.append(context.data["currentFile"]) + for dependency in dependencies: + job_info.AssetDependency = dependency + + # Add list of expected files to job + # --------------------------------- + exp = instance.data.get("expectedFiles") + + def _get_output_filename(files): + col, rem = clique.assemble(files) + if not col and rem: + # we couldn't find any collections but have + # individual files. + assert len(rem) == 1, ( + "Found multiple non related files " + "to render, don't know what to do " + "with them.") + return rem[0] + else: + return col[0].format('{head}{padding}{tail}') + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + for _aov, files in exp[0].items(): + output_file = _get_output_filename(files) + job_info.OutputFilename = output_file + else: + output_file = _get_output_filename(exp) + job_info.OutputFilename = output_file + + return job_info + + def get_plugin_info(self): + + instance = self._instance + context = instance.context + + renderlayer = instance.data['setMembers'] # rs_beauty + + self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer + self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa + + # Output driver to render + plugin_info = DeadlinePluginInfo( + SceneFile=context.data["currentFile"], + Version=cmds.about(version=True), + ) + + return attr.asdict(plugin_info) + + def process_submission(self): + # Override to NOT submit by default when calling super process() method + pass + + def process(self, instance): + super(MayaSubmitDeadline, self).process(instance) + + # TODO: Avoid the need for this logic here, needed for submit publish + # Store output dir for unified publisher (filesequence) + output_dir = os.path.dirname(instance.data["files"][0]) + instance.data["outputDir"] = output_dir + instance.data["toBeRenderedOn"] = "deadline" self.limit_groups = self.limit context = instance.context workspace = context.data["workspaceDir"] - anatomy = context.data['anatomy'] - instance.data["toBeRenderedOn"] = "deadline" filepath = None patches = ( @@ -336,80 +253,24 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "scene_patches", {}) ) - # Handle render/export from published scene or not ------------------ - if self.use_published: - patched_files = [] - for i in context: - if "workfile" not in i.data["families"]: - continue - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - filepath = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - filepath)) + # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") + # todo: on self.use_published replace path for publishRenderMetadataFolder + # todo: on self.use_published apply scene patches to workfile instance + # rep = i.data.get("representations")[0].get("name") - if not os.path.exists(filepath): - self.log.error("published scene does not exist!") - raise - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(filepath))[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - exp = instance.data.get("expectedFiles") + # if instance.data.get("publishRenderMetadataFolder"): + # instance.data["publishRenderMetadataFolder"] = \ + # instance.data["publishRenderMetadataFolder"].replace( + # orig_scene, new_scene) + # self.log.info("Scene name was switched {} -> {}".format( + # orig_scene, new_scene + # )) + # # patch workfile is needed + # if filepath not in patched_files: + # patched_file = self._patch_workfile(filepath, patches) + # patched_files.append(patched_file) - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - f.replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( - f.replace(orig_scene, new_scene) - ) - instance.data["expectedFiles"] = [new_exp] - - if instance.data.get("publishRenderMetadataFolder"): - instance.data["publishRenderMetadataFolder"] = \ - instance.data["publishRenderMetadataFolder"].replace( - orig_scene, new_scene) - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) - # patch workfile is needed - if filepath not in patched_files: - patched_file = self._patch_workfile(filepath, patches) - patched_files.append(patched_file) - - all_instances = [] - for result in context.data["results"]: - if (result["instance"] is not None and - result["instance"] not in all_instances): # noqa: E128 - all_instances.append(result["instance"]) - - # fallback if nothing was set - if not filepath: - self.log.warning("Falling back to workfile") - filepath = context.data["currentFile"] - - self.log.debug(filepath) + filepath = self.scene_path # collect by super().process # Gather needed data ------------------------------------------------ default_render_file = instance.context.data.get('project_settings')\ @@ -417,10 +278,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): .get('RenderSettings')\ .get('default_render_image_folder') filename = os.path.basename(filepath) - comment = context.data.get("comment", "") dirname = os.path.join(workspace, default_render_file) renderlayer = instance.data['setMembers'] # rs_beauty - deadline_user = context.data.get("user", getpass.getuser()) # Always use the original work file name for the Job name even when # rendering is done from the published Work File. The original work @@ -454,116 +313,34 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): pass # Fill in common data to payload ------------------------------------ - payload_data = {} - payload_data["filename"] = filename - payload_data["filepath"] = filepath - payload_data["jobname"] = jobname - payload_data["deadline_user"] = deadline_user - payload_data["comment"] = comment - payload_data["output_filename_0"] = output_filename_0 - payload_data["render_variables"] = render_variables - payload_data["renderlayer"] = renderlayer - payload_data["workspace"] = workspace - payload_data["dirname"] = dirname - - self.log.info("--- Submission data:") - for k, v in payload_data.items(): - self.log.info("- {}: {}".format(k, v)) - self.log.info("-" * 20) - - frame_pattern = self.payload_skeleton["JobInfo"]["Frames"] - self.payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format( - start=int(self._instance.data["frameStartHandle"]), - end=int(self._instance.data["frameEndHandle"]), - step=int(self._instance.data["byFrameStep"])) - - self.payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get( - "mayaRenderPlugin", "MayaBatch") - - self.payload_skeleton["JobInfo"]["BatchName"] = src_filename - # Job name, as seen in Monitor - self.payload_skeleton["JobInfo"]["Name"] = jobname - # Arbitrary username, for visualisation in Monitor - self.payload_skeleton["JobInfo"]["UserName"] = deadline_user - # Set job priority - self.payload_skeleton["JobInfo"]["Priority"] = \ - self._instance.data.get("priority", self.priority) - - if self.group != "none" and self.group: - self.payload_skeleton["JobInfo"]["Group"] = self.group - - if self.limit_groups: - self.payload_skeleton["JobInfo"]["LimitGroups"] = \ - ",".join(self.limit_groups) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ - os.path.dirname(output_filename_0).replace("\\", "/") - self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ - output_filename_0.replace("\\", "/") - - self.payload_skeleton["JobInfo"]["Comment"] = comment - self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa - # Adding file dependencies. - dependencies = instance.context.data["fileDependencies"] - dependencies.append(filepath) - if self.asset_dependencies: - for dependency in dependencies: - key = "AssetDependency" + str(dependencies.index(dependency)) - self.payload_skeleton["JobInfo"][key] = dependency - - # Handle environments ----------------------------------------------- - # We need those to pass them to pype for it to set correct context - keys = [ - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", - "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" - ] - # Add mongo url if it's enabled - if instance.context.data.get("deadlinePassMongoUrl"): - keys.append("OPENPYPE_MONGO") - - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) - # to recognize job from PYPE for turning Event On/Off - environment["OPENPYPE_RENDER_JOB"] = "1" - self.payload_skeleton["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - # Add options from RenderGlobals------------------------------------- - render_globals = instance.data.get("renderGlobals", {}) - self.payload_skeleton["JobInfo"].update(render_globals) + payload_data = { + "filename": filename, + "filepath": filepath, + "jobname": jobname, + "comment": comment, + "output_filename_0": output_filename_0, + "render_variables": render_variables, + "renderlayer": renderlayer, + "workspace": workspace, + "dirname": dirname, + } # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] for x in ['vrayscene', 'assscene']), ( "Vray Scene and Ass Scene options are mutually exclusive") - if "vrayscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "vray") - if "assscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "arnold") - - # Prepare main render job ------------------------------------------- if "vrayscene" in instance.data["families"]: + vray_export_payload = self._get_vray_export_payload(payload_data) + export_job = self.submit(vray_export_payload) + payload = self._get_vray_render_payload(payload_data) + elif "assscene" in instance.data["families"]: + ass_export_payload = self._get_arnold_export_payload(payload_data) + export_job = self.submit(ass_export_payload) + payload = self._get_arnold_render_payload(payload_data) else: payload = self._get_maya_payload(payload_data) @@ -572,267 +349,222 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): if export_job: payload["JobInfo"]["JobDependency0"] = export_job - # Add list of expected files to job --------------------------------- - exp = instance.data.get("expectedFiles") - exp_index = 0 - output_filenames = {} - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - for _aov, files in exp[0].items(): - col, rem = clique.assemble(files) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - exp_index += 1 - else: - col, rem = clique.assemble(exp) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file - plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) - self.preflight_check(instance) - # add jobInfo and pluginInfo variables from Settings - payload["JobInfo"].update(self._job_info) - payload["PluginInfo"].update(self._plugin_info) + payload["JobInfo"].update(self.jobInfo) + payload["PluginInfo"].update(self.pluginInfo) - # Prepare tiles data ------------------------------------------------ if instance.data.get("tileRendering"): - # if we have sequence of files, we need to create tile job for - # every frame + # Prepare tiles data + self._tile_render(instance, payload) + else: + # Submit main render job + self.submit(payload) - payload["JobInfo"]["TileJob"] = True - payload["JobInfo"]["TileJobTilesInX"] = instance.data.get("tilesX") - payload["JobInfo"]["TileJobTilesInY"] = instance.data.get("tilesY") - payload["PluginInfo"]["ImageHeight"] = instance.data.get("resolutionHeight") # noqa: E501 - payload["PluginInfo"]["ImageWidth"] = instance.data.get("resolutionWidth") # noqa: E501 - payload["PluginInfo"]["RegionRendering"] = True + def _tile_render(self, instance, payload): - assembly_payload = { - "AuxFiles": [], - "JobInfo": { - "BatchName": payload["JobInfo"]["BatchName"], - "Frames": 1, - "Name": "{} - Tile Assembly Job".format( - payload["JobInfo"]["Name"]), - "OutputDirectory0": - payload["JobInfo"]["OutputDirectory0"].replace( - "\\", "/"), - "Plugin": self.tile_assembler_plugin, - "MachineLimit": 1 - }, - "PluginInfo": { - "CleanupTiles": 1, - "ErrorOnMissing": True - } + # As collected by super process() + job_info = self.job_info + plugin_info = self.pluginInfo + + # if we have sequence of files, we need to create tile job for + # every frame + + job_info.TileJob = True + job_info.TileJobTilesInX = instance.data.get("tilesX") + job_info.TileJobTilesInY = instance.data.get("tilesY") + + plugin_info["ImageHeight"] = instance.data.get("resolutionHeight") + plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") + plugin_info["RegionRendering"] = True + + assembly_payload = { + "AuxFiles": [], + "JobInfo": { + "BatchName": payload["JobInfo"]["BatchName"], + "Frames": 1, + "Name": "{} - Tile Assembly Job".format( + payload["JobInfo"]["Name"]), + "OutputDirectory0": + payload["JobInfo"]["OutputDirectory0"].replace( + "\\", "/"), + "Plugin": self.tile_assembler_plugin, + "MachineLimit": 1 + }, + "PluginInfo": { + "CleanupTiles": 1, + "ErrorOnMissing": True } - assembly_payload["JobInfo"].update(output_filenames) - assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( - "tile_priority", self.tile_priority) - assembly_payload["JobInfo"]["UserName"] = deadline_user + } + assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( + "tile_priority", self.tile_priority) - frame_payloads = [] - assembly_payloads = [] + frame_payloads = [] + assembly_payloads = [] - R_FRAME_NUMBER = re.compile(r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 - REPL_FRAME_NUMBER = re.compile(r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 + R_FRAME_NUMBER = re.compile( + r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 + REPL_FRAME_NUMBER = re.compile( + r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - # get files from `beauty` - files = exp[0].get("beauty") - # assembly files are used for assembly jobs as we need to put - # together all AOVs - assembly_files = list( - itertools.chain.from_iterable( - [f for _, f in exp[0].items()])) - if not files: - # if beauty doesn't exists, use first aov we found - files = exp[0].get(list(exp[0].keys())[0]) - else: - files = exp - assembly_files = files + exp = instance.data["expectedFiles"] + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + # get files from `beauty` + files = exp[0].get("beauty") + # assembly files are used for assembly jobs as we need to put + # together all AOVs + assembly_files = list( + itertools.chain.from_iterable( + [f for _, f in exp[0].items()])) + if not files: + # if beauty doesn't exists, use first aov we found + files = exp[0].get(list(exp[0].keys())[0]) + else: + files = exp + assembly_files = files - frame_jobs = {} + frame_jobs = {} - file_index = 1 - for file in files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_payload = copy.deepcopy(payload) - new_payload["JobInfo"]["Name"] = \ - "{} (Frame {} - {} tiles)".format( - payload["JobInfo"]["Name"], - frame, - instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 + file_index = 1 + for file in files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_payload = copy.deepcopy(payload) + new_payload["JobInfo"]["Name"] = \ + "{} (Frame {} - {} tiles)".format( + payload["JobInfo"]["Name"], + frame, + instance.data.get("tilesX") * instance.data.get("tilesY") + # noqa: E501 ) - self.log.info( - "... preparing job {}".format( - new_payload["JobInfo"]["Name"])) - new_payload["JobInfo"]["TileJobFrame"] = frame + self.log.info( + "... preparing job {}".format( + new_payload["JobInfo"]["Name"])) + new_payload["JobInfo"]["TileJobFrame"] = frame - tiles_data = _format_tiles( + tiles_data = _format_tiles( + file, 0, + instance.data.get("tilesX"), + instance.data.get("tilesY"), + instance.data.get("resolutionWidth"), + instance.data.get("resolutionHeight"), + payload["PluginInfo"]["OutputFilePrefix"] + )[0] + new_payload["JobInfo"].update(tiles_data["JobInfo"]) + new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + + self.log.info("hashing {} - {}".format(file_index, file)) + job_hash = hashlib.sha256( + ("{}_{}".format(file_index, file)).encode("utf-8")) + frame_jobs[frame] = job_hash.hexdigest() + new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() + new_payload["JobInfo"]["ExtraInfo1"] = file + + frame_payloads.append(new_payload) + file_index += 1 + + file_index = 1 + for file in assembly_files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_assembly_payload = copy.deepcopy(assembly_payload) + new_assembly_payload["JobInfo"]["Name"] = \ + "{} (Frame {})".format( + assembly_payload["JobInfo"]["Name"], + frame) + new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( + REPL_FRAME_NUMBER, + "\\1{}\\3".format("#" * len(frame)), file) + + new_assembly_payload["PluginInfo"]["Renderer"] = \ + self._instance.data["renderer"] # noqa: E501 + new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[ + frame] # noqa: E501 + new_assembly_payload["JobInfo"]["ExtraInfo1"] = file + assembly_payloads.append(new_assembly_payload) + file_index += 1 + + self.log.info( + "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) + + url = "{}/api/jobs".format(self.deadline_url) + tiles_count = instance.data.get("tilesX") * instance.data.get( + "tilesY") # noqa: E501 + + for tile_job in frame_payloads: + response = self.submit(tile_job) + + job_id = response.json()["_id"] + hash = response.json()["Props"]["Ex0"] + + for assembly_job in assembly_payloads: + if assembly_job["JobInfo"]["ExtraInfo0"] == hash: + assembly_job["JobInfo"]["JobDependency0"] = job_id + + for assembly_job in assembly_payloads: + file = assembly_job["JobInfo"]["ExtraInfo1"] + # write assembly job config files + now = datetime.now() + + config_file = os.path.join( + os.path.dirname(output_filename_0), + "{}_config_{}.txt".format( + os.path.splitext(file)[0], + now.strftime("%Y_%m_%d_%H_%M_%S") + ) + ) + + try: + if not os.path.isdir(os.path.dirname(config_file)): + os.makedirs(os.path.dirname(config_file)) + except OSError: + # directory is not available + self.log.warning( + "Path is unreachable: `{}`".format( + os.path.dirname(config_file))) + + # add config file as job auxFile + assembly_job["AuxFiles"] = [config_file] + + with open(config_file, "w") as cf: + print("TileCount={}".format(tiles_count), file=cf) + print("ImageFileName={}".format(file), file=cf) + print("ImageWidth={}".format( + instance.data.get("resolutionWidth")), file=cf) + print("ImageHeight={}".format( + instance.data.get("resolutionHeight")), file=cf) + + tiles = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), payload["PluginInfo"]["OutputFilePrefix"] - )[0] - new_payload["JobInfo"].update(tiles_data["JobInfo"]) - new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + )[1] + sorted(tiles) + for k, v in tiles.items(): + print("{}={}".format(k, v), file=cf) - self.log.info("hashing {} - {}".format(file_index, file)) - job_hash = hashlib.sha256( - ("{}_{}".format(file_index, file)).encode("utf-8")) - frame_jobs[frame] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo1"] = file + job_idx = 1 + instance.data["assemblySubmissionJobs"] = [] + for ass_job in assembly_payloads: + self.log.info("submitting assembly job {} of {}".format( + job_idx, len(assembly_payloads) + )) + response = self.submit(ass_job) - frame_payloads.append(new_payload) - file_index += 1 + instance.data["assemblySubmissionJobs"].append( + response.json()["_id"]) + job_idx += 1 - file_index = 1 - for file in assembly_files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_assembly_payload = copy.deepcopy(assembly_payload) - new_assembly_payload["JobInfo"]["Name"] = \ - "{} (Frame {})".format( - assembly_payload["JobInfo"]["Name"], - frame) - new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( - REPL_FRAME_NUMBER, - "\\1{}\\3".format("#" * len(frame)), file) - - new_assembly_payload["PluginInfo"]["Renderer"] = self._instance.data["renderer"] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[frame] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo1"] = file - assembly_payloads.append(new_assembly_payload) - file_index += 1 - - self.log.info( - "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - - url = "{}/api/jobs".format(self.deadline_url) - tiles_count = instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 - - for tile_job in frame_payloads: - response = requests_post(url, json=tile_job) - if not response.ok: - raise Exception(response.text) - - job_id = response.json()["_id"] - hash = response.json()["Props"]["Ex0"] - - for assembly_job in assembly_payloads: - if assembly_job["JobInfo"]["ExtraInfo0"] == hash: - assembly_job["JobInfo"]["JobDependency0"] = job_id - - for assembly_job in assembly_payloads: - file = assembly_job["JobInfo"]["ExtraInfo1"] - # write assembly job config files - now = datetime.now() - - config_file = os.path.join( - os.path.dirname(output_filename_0), - "{}_config_{}.txt".format( - os.path.splitext(file)[0], - now.strftime("%Y_%m_%d_%H_%M_%S") - ) - ) - - try: - if not os.path.isdir(os.path.dirname(config_file)): - os.makedirs(os.path.dirname(config_file)) - except OSError: - # directory is not available - self.log.warning( - "Path is unreachable: `{}`".format( - os.path.dirname(config_file))) - - # add config file as job auxFile - assembly_job["AuxFiles"] = [config_file] - - with open(config_file, "w") as cf: - print("TileCount={}".format(tiles_count), file=cf) - print("ImageFileName={}".format(file), file=cf) - print("ImageWidth={}".format( - instance.data.get("resolutionWidth")), file=cf) - print("ImageHeight={}".format( - instance.data.get("resolutionHeight")), file=cf) - - tiles = _format_tiles( - file, 0, - instance.data.get("tilesX"), - instance.data.get("tilesY"), - instance.data.get("resolutionWidth"), - instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - )[1] - sorted(tiles) - for k, v in tiles.items(): - print("{}={}".format(k, v), file=cf) - - job_idx = 1 - instance.data["assemblySubmissionJobs"] = [] - for ass_job in assembly_payloads: - self.log.info("submitting assembly job {} of {}".format( - job_idx, len(assembly_payloads) - )) - self.log.debug(json.dumps(ass_job, indent=4, sort_keys=True)) - response = requests_post(url, json=ass_job) - if not response.ok: - raise Exception(response.text) - - instance.data["assemblySubmissionJobs"].append( - response.json()["_id"]) - job_idx += 1 - - instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] - self.log.info("Setting batch name on instance: {}".format( - instance.data["jobBatchName"])) - else: - # Submit job to farm -------------------------------------------- - self.log.info("Submitting ...") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) - - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - raise Exception(response.text) - instance.data["deadlineSubmissionJob"] = response.json() + instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] + self.log.info("Setting batch name on instance: {}".format( + instance.data["jobBatchName"])) def _get_maya_payload(self, data): payload = copy.deepcopy(self.payload_skeleton) @@ -1045,39 +777,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): payload["PluginInfo"].update(plugin_info) return payload - def _submit_export(self, data, format): - if format == "vray": - payload = self._get_vray_export_payload(data) - self.log.info("Submitting vrscene export job.") - elif format == "arnold": - payload = self._get_arnold_export_payload(data) - self.log.info("Submitting ass export job.") - - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - self.log.error("Submition failed!") - self.log.error(response.status_code) - self.log.error(response.content) - self.log.debug(payload) - raise RuntimeError(response.text) - - dependency = response.json() - return dependency["_id"] - - def preflight_check(self, instance): - """Ensure the startFrame, endFrame and byFrameStep are integers.""" - for key in ("frameStartHandle", "frameEndHandle", "byFrameStep"): - value = instance.data[key] - - if int(value) == value: - continue - - self.log.warning( - "%f=%d was rounded off to nearest integer" - % (value, int(value)) - ) - def format_vray_output_filename(self, filename, template, dir=False): """Format the expected output file of the Export job. @@ -1160,3 +859,209 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "Applied {} patch to scene.".format( patches[i]["name"])) return file + + +def _format_tiles( + filename, index, tiles_x, tiles_y, + width, height, prefix): + """Generate tile entries for Deadline tile job. + + Returns two dictionaries - one that can be directly used in Deadline + job, second that can be used for Deadline Assembly job configuration + file. + + This will format tile names: + + Example:: + { + "OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr", + "OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr" + } + + And add tile prefixes like: + + Example:: + Image prefix is: + `maya///_` + + Result for tile 0 for 4x4 will be: + `maya///_tile_1x1_4x4__` + + Calculating coordinates is tricky as in Job they are defined as top, + left, bottom, right with zero being in top-left corner. But Assembler + configuration file takes tile coordinates as X, Y, Width and Height and + zero is bottom left corner. + + Args: + filename (str): Filename to process as tiles. + index (int): Index of that file if it is sequence. + tiles_x (int): Number of tiles in X. + tiles_y (int): Number if tikes in Y. + width (int): Width resolution of final image. + height (int): Height resolution of final image. + prefix (str): Image prefix. + + Returns: + (dict, dict): Tuple of two dictionaires - first can be used to + extend JobInfo, second has tiles x, y, width and height + used for assembler configuration. + + """ + tile = 0 + out = {"JobInfo": {}, "PluginInfo": {}} + cfg = OrderedDict() + w_space = width / tiles_x + h_space = height / tiles_y + + cfg["TilesCropped"] = "False" + + for tile_x in range(1, tiles_x + 1): + for tile_y in reversed(range(1, tiles_y + 1)): + tile_prefix = "_tile_{}x{}_{}x{}_".format( + tile_x, tile_y, + tiles_x, + tiles_y + ) + out_tile_index = "OutputFilename{}Tile{}".format( + str(index), tile + ) + new_filename = "{}/{}{}".format( + os.path.dirname(filename), + tile_prefix, + os.path.basename(filename) + ) + out["JobInfo"][out_tile_index] = new_filename + out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + + out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 + out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 + out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 + out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + + cfg["Tile{}".format(tile)] = new_filename + cfg["Tile{}Tile".format(tile)] = new_filename + cfg["Tile{}FileName".format(tile)] = new_filename + cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space + + cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) + + cfg["Tile{}Width".format(tile)] = w_space + cfg["Tile{}Height".format(tile)] = h_space + + tile += 1 + return out, cfg + + +def get_renderer_variables(renderlayer, root): + """Retrieve the extension which has been set in the VRay settings. + + Will return None if the current renderer is not VRay + For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which + start with `rs`. Use the actual node name, do NOT use the `nice name` + + Args: + renderlayer (str): the node name of the renderlayer. + root (str): base path to render + + Returns: + dict + + """ + renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) + render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) + + padding = cmds.getAttr("{}.{}".format(render_attrs["node"], + render_attrs["padding"])) + + filename_0 = cmds.renderSettings( + fullPath=True, + gin="#" * int(padding), + lut=True, + layer=renderlayer or lib.get_current_renderlayer())[0] + filename_0 = re.sub('_', '_beauty', + filename_0, flags=re.IGNORECASE) + prefix_attr = "defaultRenderGlobals.imageFilePrefix" + + scene = cmds.file(query=True, sceneName=True) + scene, _ = os.path.splitext(os.path.basename(scene)) + + if renderer == "vray": + renderlayer = renderlayer.split("_")[-1] + # Maya's renderSettings function does not return V-Ray file extension + # so we get the extension from vraySettings + extension = cmds.getAttr("vraySettings.imageFormatStr") + + # When V-Ray image format has not been switched once from default .png + # the getAttr command above returns None. As such we explicitly set + # it to `.png` + if extension is None: + extension = "png" + + if extension in ["exr (multichannel)", "exr (deep)"]: + extension = "exr" + + prefix_attr = "vraySettings.fileNamePrefix" + filename_prefix = cmds.getAttr(prefix_attr) + # we need to determine path for vray as maya `renderSettings` query + # does not work for vray. + + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = "{}.{}.{}".format( + filename_0, "#" * int(padding), extension) + filename_0 = os.path.normpath(os.path.join(root, filename_0)) + elif renderer == "renderman": + prefix_attr = "rmanGlobals.imageFileFormat" + # NOTE: This is guessing extensions from renderman display types. + # Some of them are just framebuffers, d_texture format can be + # set in display setting. We set those now to None, but it + # should be handled more gracefully. + display_types = { + "d_deepexr": "exr", + "d_it": None, + "d_null": None, + "d_openexr": "exr", + "d_png": "png", + "d_pointcloud": "ptc", + "d_targa": "tga", + "d_texture": None, + "d_tiff": "tif" + } + + extension = display_types.get( + cmds.listConnections("rmanDefaultDisplay.displayType")[0], + "exr" + ) or "exr" + + filename_prefix = "{}/{}".format( + cmds.getAttr("rmanGlobals.imageOutputDir"), + cmds.getAttr("rmanGlobals.imageFileFormat") + ) + + renderlayer = renderlayer.split("_")[-1] + + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = os.path.normpath(os.path.join(root, filename_0)) + elif renderer == "redshift": + # mapping redshift extension dropdown values to strings + ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] + extension = ext_mapping[ + cmds.getAttr("redshiftOptions.imageFormat") + ] + else: + # Get the extension, getAttr defaultRenderGlobals.imageFormat + # returns an index number. + filename_base = os.path.basename(filename_0) + extension = os.path.splitext(filename_base)[-1].strip(".") + + filename_prefix = cmds.getAttr(prefix_attr) + return {"ext": extension, + "filename_prefix": filename_prefix, + "padding": padding, + "filename_0": filename_0} + + From cdab361dd933781965be776389fb0af48af4cf72 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 24 Aug 2022 04:13:30 +0000 Subject: [PATCH 0608/2550] [Automated] Bump version --- CHANGELOG.md | 50 ++++++++++++++++++++++----------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65a3cb27e6..a45f65b6f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,16 +1,40 @@ # Changelog -## [3.14.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD) +### 📖 Documentation + +- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) +- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) + **🚀 Enhancements** +- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) +- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) **🐛 Bug fixes** +- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) +- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) +- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) - RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) + +**🔀 Refactored code** + +- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) +- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) +- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) +- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705) +- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702) + +**Merged pull requests:** + +- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) +- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) @@ -91,7 +115,6 @@ - General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) -- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** @@ -111,32 +134,9 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) -### 📖 Documentation - -- Update website with more studios [\#3554](https://github.com/pypeclub/OpenPype/pull/3554) -- Documentation: Update publishing dev docs [\#3549](https://github.com/pypeclub/OpenPype/pull/3549) - -**🚀 Enhancements** - -- General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) - **🐛 Bug fixes** - Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) -- Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) -- NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) -- Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) -- General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) -- Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) -- Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) - -**🔀 Refactored code** - -- General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) - -**Merged pull requests:** - -- Maya: fix active pane loss [\#3566](https://github.com/pypeclub/OpenPype/pull/3566) ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) diff --git a/openpype/version.py b/openpype/version.py index 174aca1e6c..e738689c20 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.1-nightly.1" +__version__ = "3.14.1-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index e01cc71201..bfc570f597 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.1-nightly.1" # OpenPype +version = "3.14.1-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 0053a7ad7709327e5cce0cb0a3ead5e100c2c08e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 10:38:55 +0200 Subject: [PATCH 0609/2550] fix last saved object access --- openpype/settings/handlers.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 79ec6248ac..def8c16ea7 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -831,7 +831,10 @@ class MongoSettingsHandler(SettingsHandler): data_cache.update_last_saved_info(last_saved_info) self._save_project_data( - project_name, self._project_settings_key, data_cache + project_name, + self._project_settings_key, + data_cache, + last_saved_info ) def save_project_anatomy(self, project_name, anatomy_data): @@ -849,8 +852,16 @@ class MongoSettingsHandler(SettingsHandler): self._save_project_anatomy_data(project_name, data_cache) else: + last_saved_info = SettingsStateInfo.create_new( + self._current_version, + PROJECT_ANATOMY_KEY, + project_name + ) self._save_project_data( - project_name, self._project_anatomy_key, data_cache + project_name, + self._project_anatomy_key, + data_cache, + last_saved_info ) @classmethod @@ -931,14 +942,16 @@ class MongoSettingsHandler(SettingsHandler): {"$set": update_dict} ) - def _save_project_data(self, project_name, doc_type, data_cache): + def _save_project_data( + self, project_name, doc_type, data_cache, last_saved_info + ): is_default = bool(project_name is None) query_filter = { "type": doc_type, "is_default": is_default, "version": self._current_version } - last_saved_info = data_cache.last_saved_info + new_project_settings_doc = { "type": doc_type, "data": data_cache.data, @@ -946,6 +959,7 @@ class MongoSettingsHandler(SettingsHandler): "version": self._current_version, "last_saved_info": last_saved_info.to_data() } + if not is_default: query_filter["project_name"] = project_name new_project_settings_doc["project_name"] = project_name From 5ca80dbdea9d35a0610e91677b1d145675e20e70 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 11:17:43 +0200 Subject: [PATCH 0610/2550] hiero is converted to module --- openpype/hosts/hiero/__init__.py | 47 +++++---------------------- openpype/hosts/hiero/module.py | 54 ++++++++++++++++++++++++++++++++ 2 files changed, 62 insertions(+), 39 deletions(-) create mode 100644 openpype/hosts/hiero/module.py diff --git a/openpype/hosts/hiero/__init__.py b/openpype/hosts/hiero/__init__.py index d2ac82391b..a307e265d5 100644 --- a/openpype/hosts/hiero/__init__.py +++ b/openpype/hosts/hiero/__init__.py @@ -1,41 +1,10 @@ -import os -import platform +from .module import ( + HIERO_ROOT_DIR, + HieroModule, +) -def add_implementation_envs(env, _app): - # Add requirements to HIERO_PLUGIN_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_hiero_paths = [ - os.path.join(pype_root, "openpype", "hosts", "hiero", "api", "startup") - ] - old_hiero_path = env.get("HIERO_PLUGIN_PATH") or "" - for path in old_hiero_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_hiero_paths: - new_hiero_paths.append(norm_path) - - env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "HIERO_ROOT_DIR", + "HieroModule", +) diff --git a/openpype/hosts/hiero/module.py b/openpype/hosts/hiero/module.py new file mode 100644 index 0000000000..373b89962d --- /dev/null +++ b/openpype/hosts/hiero/module.py @@ -0,0 +1,54 @@ +import os +import platform +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HieroModule(OpenPypeModule, IHostModule): + name = "hiero" + host_name = "hiero" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to HIERO_PLUGIN_PATH + new_hiero_paths = [ + os.path.join(HIERO_ROOT_DIR, "api", "startup") + ] + old_hiero_path = env.get("HIERO_PLUGIN_PATH") or "" + for path in old_hiero_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_hiero_paths: + new_hiero_paths.append(norm_path) + + env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + # Try to add QuickTime to PATH + quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" + if platform.system() == "windows" and os.path.exists(quick_time_path): + path_value = env.get("PATH") or "" + path_paths = [ + path + for path in path_value.split(os.pathsep) + if path + ] + path_paths.append(quick_time_path) + env["PATH"] = os.pathsep.join(path_paths) + + def get_workfile_extensions(self): + return [".hrox"] From 8839adaf89477086f62539c0c40a6a8baa05120e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 11:18:15 +0200 Subject: [PATCH 0611/2550] added protobuf as vendorized module for hiero --- openpype/hosts/hiero/module.py | 9 + .../hiero/vendor/google/protobuf/__init__.py | 33 + .../hiero/vendor/google/protobuf/any_pb2.py | 26 + .../hiero/vendor/google/protobuf/api_pb2.py | 32 + .../google/protobuf/compiler/__init__.py | 0 .../google/protobuf/compiler/plugin_pb2.py | 35 + .../vendor/google/protobuf/descriptor.py | 1224 +++++++++++ .../google/protobuf/descriptor_database.py | 177 ++ .../vendor/google/protobuf/descriptor_pb2.py | 1925 +++++++++++++++++ .../vendor/google/protobuf/descriptor_pool.py | 1295 +++++++++++ .../vendor/google/protobuf/duration_pb2.py | 26 + .../hiero/vendor/google/protobuf/empty_pb2.py | 26 + .../vendor/google/protobuf/field_mask_pb2.py | 26 + .../google/protobuf/internal/__init__.py | 0 .../protobuf/internal/_parameterized.py | 443 ++++ .../protobuf/internal/api_implementation.py | 112 + .../google/protobuf/internal/builder.py | 130 ++ .../google/protobuf/internal/containers.py | 710 ++++++ .../google/protobuf/internal/decoder.py | 1029 +++++++++ .../google/protobuf/internal/encoder.py | 829 +++++++ .../protobuf/internal/enum_type_wrapper.py | 124 ++ .../protobuf/internal/extension_dict.py | 213 ++ .../protobuf/internal/message_listener.py | 78 + .../internal/message_set_extensions_pb2.py | 36 + .../internal/missing_enum_values_pb2.py | 37 + .../internal/more_extensions_dynamic_pb2.py | 29 + .../protobuf/internal/more_extensions_pb2.py | 41 + .../protobuf/internal/more_messages_pb2.py | 556 +++++ .../protobuf/internal/no_package_pb2.py | 27 + .../protobuf/internal/python_message.py | 1539 +++++++++++++ .../google/protobuf/internal/type_checkers.py | 435 ++++ .../protobuf/internal/well_known_types.py | 878 ++++++++ .../google/protobuf/internal/wire_format.py | 268 +++ .../vendor/google/protobuf/json_format.py | 912 ++++++++ .../hiero/vendor/google/protobuf/message.py | 424 ++++ .../vendor/google/protobuf/message_factory.py | 185 ++ .../vendor/google/protobuf/proto_builder.py | 134 ++ .../vendor/google/protobuf/pyext/__init__.py | 0 .../google/protobuf/pyext/cpp_message.py | 65 + .../google/protobuf/pyext/python_pb2.py | 34 + .../vendor/google/protobuf/reflection.py | 95 + .../hiero/vendor/google/protobuf/service.py | 228 ++ .../google/protobuf/service_reflection.py | 295 +++ .../google/protobuf/source_context_pb2.py | 26 + .../vendor/google/protobuf/struct_pb2.py | 36 + .../vendor/google/protobuf/symbol_database.py | 194 ++ .../vendor/google/protobuf/text_encoding.py | 110 + .../vendor/google/protobuf/text_format.py | 1795 +++++++++++++++ .../vendor/google/protobuf/timestamp_pb2.py | 26 + .../hiero/vendor/google/protobuf/type_pb2.py | 42 + .../vendor/google/protobuf/util/__init__.py | 0 .../google/protobuf/util/json_format_pb2.py | 72 + .../protobuf/util/json_format_proto3_pb2.py | 129 ++ .../vendor/google/protobuf/wrappers_pb2.py | 42 + 54 files changed, 17192 insertions(+) create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/__init__.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/compiler/__init__.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/descriptor.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/__init__.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/encoder.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/enum_type_wrapper.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/json_format.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/message.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/message_factory.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/pyext/__init__.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/reflection.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/service.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/text_format.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/util/__init__.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py create mode 100644 openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py diff --git a/openpype/hosts/hiero/module.py b/openpype/hosts/hiero/module.py index 373b89962d..375486e034 100644 --- a/openpype/hosts/hiero/module.py +++ b/openpype/hosts/hiero/module.py @@ -30,6 +30,15 @@ class HieroModule(OpenPypeModule, IHostModule): env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Add vendor to PYTHONPATH + python_path = env["PYTHONPATH"] + python_path_parts = [] + if python_path: + python_path_parts = python_path.split(os.pathsep) + vendor_path = os.path.join(HIERO_ROOT_DIR, "vendor") + python_path_parts.insert(0, vendor_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + # Set default values if are not already set via settings defaults = { "LOGLEVEL": "DEBUG" diff --git a/openpype/hosts/hiero/vendor/google/protobuf/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/__init__.py new file mode 100644 index 0000000000..03f3b29ee7 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Copyright 2007 Google Inc. All Rights Reserved. + +__version__ = '3.20.1' diff --git a/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py new file mode 100644 index 0000000000..9121193d11 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/any_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/any.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _ANY._serialized_start=46 + _ANY._serialized_end=84 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py new file mode 100644 index 0000000000..1721b10a75 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/api_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/api.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _API._serialized_start=113 + _API._serialized_end=370 + _METHOD._serialized_start=373 + _METHOD._serialized_end=586 + _MIXIN._serialized_start=588 + _MIXIN._serialized_end=623 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/compiler/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/compiler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 0000000000..715a891370 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/compiler/plugin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' + _VERSION._serialized_start=101 + _VERSION._serialized_end=171 + _CODEGENERATORREQUEST._serialized_start=174 + _CODEGENERATORREQUEST._serialized_end=360 + _CODEGENERATORRESPONSE._serialized_start=363 + _CODEGENERATORRESPONSE._serialized_end=684 + _CODEGENERATORRESPONSE_FILE._serialized_start=499 + _CODEGENERATORRESPONSE_FILE._serialized_end=626 + _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 + _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py new file mode 100644 index 0000000000..ad70be9a11 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor.py @@ -0,0 +1,1224 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Descriptors essentially contain exactly the information found in a .proto +file, in types that make this information accessible in Python. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import threading +import warnings + +from google.protobuf.internal import api_implementation + +_USE_C_DESCRIPTORS = False +if api_implementation.Type() == 'cpp': + # Used by MakeDescriptor in cpp mode + import binascii + import os + from google.protobuf.pyext import _message + _USE_C_DESCRIPTORS = True + + +class Error(Exception): + """Base error for this module.""" + + +class TypeTransformationError(Error): + """Error transforming between python proto type and corresponding C++ type.""" + + +if _USE_C_DESCRIPTORS: + # This metaclass allows to override the behavior of code like + # isinstance(my_descriptor, FieldDescriptor) + # and make it return True when the descriptor is an instance of the extension + # type written in C++. + class DescriptorMetaclass(type): + def __instancecheck__(cls, obj): + if super(DescriptorMetaclass, cls).__instancecheck__(obj): + return True + if isinstance(obj, cls._C_DESCRIPTOR_CLASS): + return True + return False +else: + # The standard metaclass; nothing changes. + DescriptorMetaclass = type + + +class _Lock(object): + """Wrapper class of threading.Lock(), which is allowed by 'with'.""" + + def __new__(cls): + self = object.__new__(cls) + self._lock = threading.Lock() # pylint: disable=protected-access + return self + + def __enter__(self): + self._lock.acquire() + + def __exit__(self, exc_type, exc_value, exc_tb): + self._lock.release() + + +_lock = threading.Lock() + + +def _Deprecated(name): + if _Deprecated.count > 0: + _Deprecated.count -= 1 + warnings.warn( + 'Call to deprecated create function %s(). Note: Create unlinked ' + 'descriptors is going to go away. Please use get/find descriptors from ' + 'generated code or query the descriptor_pool.' + % name, + category=DeprecationWarning, stacklevel=3) + + +# Deprecated warnings will print 100 times at most which should be enough for +# users to notice and do not cause timeout. +_Deprecated.count = 100 + + +_internal_create_key = object() + + +class DescriptorBase(metaclass=DescriptorMetaclass): + + """Descriptors base class. + + This class is the base of all descriptor classes. It provides common options + related functionality. + + Attributes: + has_options: True if the descriptor has non-default options. Usually it + is not necessary to read this -- just call GetOptions() which will + happily return the default instance. However, it's sometimes useful + for efficiency, and also useful inside the protobuf implementation to + avoid some bootstrapping issues. + """ + + if _USE_C_DESCRIPTORS: + # The class, or tuple of classes, that are considered as "virtual + # subclasses" of this descriptor class. + _C_DESCRIPTOR_CLASS = () + + def __init__(self, options, serialized_options, options_class_name): + """Initialize the descriptor given its options message and the name of the + class of the options message. The name of the class is required in case + the options message is None and has to be created. + """ + self._options = options + self._options_class_name = options_class_name + self._serialized_options = serialized_options + + # Does this descriptor have non-default options? + self.has_options = (options is not None) or (serialized_options is not None) + + def _SetOptions(self, options, options_class_name): + """Sets the descriptor's options + + This function is used in generated proto2 files to update descriptor + options. It must not be used outside proto2. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def GetOptions(self): + """Retrieves descriptor options. + + This method returns the options set or creates the default options for the + descriptor. + """ + if self._options: + return self._options + + from google.protobuf import descriptor_pb2 + try: + options_class = getattr(descriptor_pb2, + self._options_class_name) + except AttributeError: + raise RuntimeError('Unknown options class name %s!' % + (self._options_class_name)) + + with _lock: + if self._serialized_options is None: + self._options = options_class() + else: + self._options = _ParseOptions(options_class(), + self._serialized_options) + + return self._options + + +class _NestedDescriptorBase(DescriptorBase): + """Common class for descriptors that can be nested.""" + + def __init__(self, options, options_class_name, name, full_name, + file, containing_type, serialized_start=None, + serialized_end=None, serialized_options=None): + """Constructor. + + Args: + options: Protocol message options or None + to use default message options. + options_class_name (str): The class name of the above options. + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + file (FileDescriptor): Reference to file info. + containing_type: if provided, this is a nested descriptor, with this + descriptor as parent, otherwise None. + serialized_start: The start index (inclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_end: The end index (exclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_options: Protocol message serialized options or None. + """ + super(_NestedDescriptorBase, self).__init__( + options, serialized_options, options_class_name) + + self.name = name + # TODO(falk): Add function to calculate full_name instead of having it in + # memory? + self.full_name = full_name + self.file = file + self.containing_type = containing_type + + self._serialized_start = serialized_start + self._serialized_end = serialized_end + + def CopyToProto(self, proto): + """Copies this to the matching proto in descriptor_pb2. + + Args: + proto: An empty proto instance from descriptor_pb2. + + Raises: + Error: If self couldn't be serialized, due to to few constructor + arguments. + """ + if (self.file is not None and + self._serialized_start is not None and + self._serialized_end is not None): + proto.ParseFromString(self.file.serialized_pb[ + self._serialized_start:self._serialized_end]) + else: + raise Error('Descriptor does not contain serialization.') + + +class Descriptor(_NestedDescriptorBase): + + """Descriptor for a protocol message type. + + Attributes: + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + containing_type (Descriptor): Reference to the descriptor of the type + containing us, or None if this is top-level. + fields (list[FieldDescriptor]): Field descriptors for all fields in + this type. + fields_by_number (dict(int, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed + by "number" attribute in each FieldDescriptor. + fields_by_name (dict(str, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by + "name" attribute in each :class:`FieldDescriptor`. + nested_types (list[Descriptor]): Descriptor references + for all protocol message types nested within this one. + nested_types_by_name (dict(str, Descriptor)): Same Descriptor + objects as in :attr:`nested_types`, but indexed by "name" attribute + in each Descriptor. + enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references + for all enums contained within this type. + enum_types_by_name (dict(str, EnumDescriptor)): Same + :class:`EnumDescriptor` objects as in :attr:`enum_types`, but + indexed by "name" attribute in each EnumDescriptor. + enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping + from enum value name to :class:`EnumValueDescriptor` for that value. + extensions (list[FieldDescriptor]): All extensions defined directly + within this message type (NOT within a nested type). + extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor + objects as :attr:`extensions`, but indexed by "name" attribute of each + FieldDescriptor. + is_extendable (bool): Does this type define any extension ranges? + oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields + in this message. + oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in + :attr:`oneofs`, but indexed by "name" attribute. + file (FileDescriptor): Reference to file descriptor. + + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.Descriptor + + def __new__( + cls, + name=None, + full_name=None, + filename=None, + containing_type=None, + fields=None, + nested_types=None, + enum_types=None, + extensions=None, + options=None, + serialized_options=None, + is_extendable=True, + extension_ranges=None, + oneofs=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + syntax=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindMessageTypeByName(full_name) + + # NOTE(tmarek): The file argument redefining a builtin is nothing we can + # fix right now since we don't know how many clients already rely on the + # name of the argument. + def __init__(self, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + serialized_options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin + syntax=None, create_key=None): + """Arguments to __init__() are as described in the description + of Descriptor fields above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('Descriptor') + + super(Descriptor, self).__init__( + options, 'MessageOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + # We have fields in addition to fields_by_name and fields_by_number, + # so that: + # 1. Clients can index fields by "order in which they're listed." + # 2. Clients can easily iterate over all fields with the terse + # syntax: for f in descriptor.fields: ... + self.fields = fields + for field in self.fields: + field.containing_type = self + self.fields_by_number = dict((f.number, f) for f in fields) + self.fields_by_name = dict((f.name, f) for f in fields) + self._fields_by_camelcase_name = None + + self.nested_types = nested_types + for nested_type in nested_types: + nested_type.containing_type = self + self.nested_types_by_name = dict((t.name, t) for t in nested_types) + + self.enum_types = enum_types + for enum_type in self.enum_types: + enum_type.containing_type = self + self.enum_types_by_name = dict((t.name, t) for t in enum_types) + self.enum_values_by_name = dict( + (v.name, v) for t in enum_types for v in t.values) + + self.extensions = extensions + for extension in self.extensions: + extension.extension_scope = self + self.extensions_by_name = dict((f.name, f) for f in extensions) + self.is_extendable = is_extendable + self.extension_ranges = extension_ranges + self.oneofs = oneofs if oneofs is not None else [] + self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) + for oneof in self.oneofs: + oneof.containing_type = self + self.syntax = syntax or "proto2" + + @property + def fields_by_camelcase_name(self): + """Same FieldDescriptor objects as in :attr:`fields`, but indexed by + :attr:`FieldDescriptor.camelcase_name`. + """ + if self._fields_by_camelcase_name is None: + self._fields_by_camelcase_name = dict( + (f.camelcase_name, f) for f in self.fields) + return self._fields_by_camelcase_name + + def EnumValueName(self, enum, value): + """Returns the string name of an enum value. + + This is just a small helper method to simplify a common operation. + + Args: + enum: string name of the Enum. + value: int, value of the enum. + + Returns: + string name of the enum value. + + Raises: + KeyError if either the Enum doesn't exist or the value is not a valid + value for the enum. + """ + return self.enum_types_by_name[enum].values_by_number[value].name + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.DescriptorProto. + + Args: + proto: An empty descriptor_pb2.DescriptorProto. + """ + # This function is overridden to give a better doc comment. + super(Descriptor, self).CopyToProto(proto) + + +# TODO(robinson): We should have aggressive checking here, +# for example: +# * If you specify a repeated field, you should not be allowed +# to specify a default value. +# * [Other examples here as needed]. +# +# TODO(robinson): for this and other *Descriptor classes, we +# might also want to lock things down aggressively (e.g., +# prevent clients from setting the attributes). Having +# stronger invariants here in general will reduce the number +# of runtime checks we must do in reflection.py... +class FieldDescriptor(DescriptorBase): + + """Descriptor for a single field in a .proto file. + + Attributes: + name (str): Name of this field, exactly as it appears in .proto. + full_name (str): Name of this field, including containing scope. This is + particularly relevant for extensions. + index (int): Dense, 0-indexed index giving the order that this + field textually appears within its message in the .proto file. + number (int): Tag number declared for this field in the .proto file. + + type (int): (One of the TYPE_* constants below) Declared type. + cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to + represent this field. + + label (int): (One of the LABEL_* constants below) Tells whether this + field is optional, required, or repeated. + has_default_value (bool): True if this field has a default value defined, + otherwise false. + default_value (Varies): Default value of this field. Only + meaningful for non-repeated scalar fields. Repeated fields + should always set this to [], and non-repeated composite + fields should always set this to None. + + containing_type (Descriptor): Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + Somewhat confusingly, for extension fields, this is the + descriptor of the EXTENDED message, not the descriptor + of the message containing this field. (See is_extension and + extension_scope below). + message_type (Descriptor): If a composite field, a descriptor + of the message type contained in this field. Otherwise, this is None. + enum_type (EnumDescriptor): If this field contains an enum, a + descriptor of that enum. Otherwise, this is None. + + is_extension: True iff this describes an extension field. + extension_scope (Descriptor): Only meaningful if is_extension is True. + Gives the message that immediately contains this extension field. + Will be None iff we're a top-level (file-level) extension field. + + options (descriptor_pb2.FieldOptions): Protocol message field options or + None to use default field options. + + containing_oneof (OneofDescriptor): If the field is a member of a oneof + union, contains its descriptor. Otherwise, None. + + file (FileDescriptor): Reference to file descriptor. + """ + + # Must be consistent with C++ FieldDescriptor::Type enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + TYPE_DOUBLE = 1 + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + MAX_TYPE = 18 + + # Must be consistent with C++ FieldDescriptor::CppType enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + CPPTYPE_INT32 = 1 + CPPTYPE_INT64 = 2 + CPPTYPE_UINT32 = 3 + CPPTYPE_UINT64 = 4 + CPPTYPE_DOUBLE = 5 + CPPTYPE_FLOAT = 6 + CPPTYPE_BOOL = 7 + CPPTYPE_ENUM = 8 + CPPTYPE_STRING = 9 + CPPTYPE_MESSAGE = 10 + MAX_CPPTYPE = 10 + + _PYTHON_TO_CPP_PROTO_TYPE_MAP = { + TYPE_DOUBLE: CPPTYPE_DOUBLE, + TYPE_FLOAT: CPPTYPE_FLOAT, + TYPE_ENUM: CPPTYPE_ENUM, + TYPE_INT64: CPPTYPE_INT64, + TYPE_SINT64: CPPTYPE_INT64, + TYPE_SFIXED64: CPPTYPE_INT64, + TYPE_UINT64: CPPTYPE_UINT64, + TYPE_FIXED64: CPPTYPE_UINT64, + TYPE_INT32: CPPTYPE_INT32, + TYPE_SFIXED32: CPPTYPE_INT32, + TYPE_SINT32: CPPTYPE_INT32, + TYPE_UINT32: CPPTYPE_UINT32, + TYPE_FIXED32: CPPTYPE_UINT32, + TYPE_BYTES: CPPTYPE_STRING, + TYPE_STRING: CPPTYPE_STRING, + TYPE_BOOL: CPPTYPE_BOOL, + TYPE_MESSAGE: CPPTYPE_MESSAGE, + TYPE_GROUP: CPPTYPE_MESSAGE + } + + # Must be consistent with C++ FieldDescriptor::Label enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + LABEL_OPTIONAL = 1 + LABEL_REQUIRED = 2 + LABEL_REPEATED = 3 + MAX_LABEL = 3 + + # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, + # and kLastReservedNumber in descriptor.h + MAX_FIELD_NUMBER = (1 << 29) - 1 + FIRST_RESERVED_FIELD_NUMBER = 19000 + LAST_RESERVED_FIELD_NUMBER = 19999 + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FieldDescriptor + + def __new__(cls, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + _message.Message._CheckCalledFromGeneratedFile() + if is_extension: + return _message.default_pool.FindExtensionByName(full_name) + else: + return _message.default_pool.FindFieldByName(full_name) + + def __init__(self, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + """The arguments are as described in the description of FieldDescriptor + attributes above. + + Note that containing_type may be None, and may be set later if necessary + (to deal with circular references between message types, for example). + Likewise for extension_scope. + """ + if create_key is not _internal_create_key: + _Deprecated('FieldDescriptor') + + super(FieldDescriptor, self).__init__( + options, serialized_options, 'FieldOptions') + self.name = name + self.full_name = full_name + self.file = file + self._camelcase_name = None + if json_name is None: + self.json_name = _ToJsonName(name) + else: + self.json_name = json_name + self.index = index + self.number = number + self.type = type + self.cpp_type = cpp_type + self.label = label + self.has_default_value = has_default_value + self.default_value = default_value + self.containing_type = containing_type + self.message_type = message_type + self.enum_type = enum_type + self.is_extension = is_extension + self.extension_scope = extension_scope + self.containing_oneof = containing_oneof + if api_implementation.Type() == 'cpp': + if is_extension: + self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) + else: + self._cdescriptor = _message.default_pool.FindFieldByName(full_name) + else: + self._cdescriptor = None + + @property + def camelcase_name(self): + """Camelcase name of this field. + + Returns: + str: the name in CamelCase. + """ + if self._camelcase_name is None: + self._camelcase_name = _ToCamelCase(self.name) + return self._camelcase_name + + @property + def has_presence(self): + """Whether the field distinguishes between unpopulated and default values. + + Raises: + RuntimeError: singular field that is not linked with message nor file. + """ + if self.label == FieldDescriptor.LABEL_REPEATED: + return False + if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or + self.containing_oneof): + return True + if hasattr(self.file, 'syntax'): + return self.file.syntax == 'proto2' + if hasattr(self.message_type, 'syntax'): + return self.message_type.syntax == 'proto2' + raise RuntimeError( + 'has_presence is not ready to use because field %s is not' + ' linked with message type nor file' % self.full_name) + + @staticmethod + def ProtoTypeToCppProtoType(proto_type): + """Converts from a Python proto type to a C++ Proto Type. + + The Python ProtocolBuffer classes specify both the 'Python' datatype and the + 'C++' datatype - and they're not the same. This helper method should + translate from one to another. + + Args: + proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) + Returns: + int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. + Raises: + TypeTransformationError: when the Python proto type isn't known. + """ + try: + return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] + except KeyError: + raise TypeTransformationError('Unknown proto_type: %s' % proto_type) + + +class EnumDescriptor(_NestedDescriptorBase): + + """Descriptor for an enum defined in a .proto file. + + Attributes: + name (str): Name of the enum type. + full_name (str): Full name of the type, including package name + and any enclosing type(s). + + values (list[EnumValueDescriptor]): List of the values + in this enum. + values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "name" field of each EnumValueDescriptor. + values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "number" field of each EnumValueDescriptor. + containing_type (Descriptor): Descriptor of the immediate containing + type of this enum, or None if this is an enum defined at the + top level in a .proto file. Set by Descriptor's constructor + if we're passed into one. + file (FileDescriptor): Reference to file descriptor. + options (descriptor_pb2.EnumOptions): Enum options message or + None to use default enum options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumDescriptor + + def __new__(cls, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindEnumTypeByName(full_name) + + def __init__(self, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + """Arguments are as described in the attribute description above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('EnumDescriptor') + + super(EnumDescriptor, self).__init__( + options, 'EnumOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + self.values = values + for value in self.values: + value.type = self + self.values_by_name = dict((v.name, v) for v in values) + # Values are reversed to ensure that the first alias is retained. + self.values_by_number = dict((v.number, v) for v in reversed(values)) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.EnumDescriptorProto. + + Args: + proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(EnumDescriptor, self).CopyToProto(proto) + + +class EnumValueDescriptor(DescriptorBase): + + """Descriptor for a single value within an enum. + + Attributes: + name (str): Name of this value. + index (int): Dense, 0-indexed index giving the order that this + value appears textually within its enum in the .proto file. + number (int): Actual number assigned to this enum value. + type (EnumDescriptor): :class:`EnumDescriptor` to which this value + belongs. Set by :class:`EnumDescriptor`'s constructor if we're + passed into one. + options (descriptor_pb2.EnumValueOptions): Enum value options message or + None to use default enum value options options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor + + def __new__(cls, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + # There is no way we can build a complete EnumValueDescriptor with the + # given parameters (the name of the Enum is not known, for example). + # Fortunately generated files just pass it to the EnumDescriptor() + # constructor, which will ignore it, so returning None is good enough. + return None + + def __init__(self, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('EnumValueDescriptor') + + super(EnumValueDescriptor, self).__init__( + options, serialized_options, 'EnumValueOptions') + self.name = name + self.index = index + self.number = number + self.type = type + + +class OneofDescriptor(DescriptorBase): + """Descriptor for a oneof field. + + Attributes: + name (str): Name of the oneof field. + full_name (str): Full name of the oneof field, including package name. + index (int): 0-based index giving the order of the oneof field inside + its containing type. + containing_type (Descriptor): :class:`Descriptor` of the protocol message + type that contains this field. Set by the :class:`Descriptor` constructor + if we're passed into one. + fields (list[FieldDescriptor]): The list of field descriptors this + oneof can contain. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.OneofDescriptor + + def __new__( + cls, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindOneofByName(full_name) + + def __init__( + self, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('OneofDescriptor') + + super(OneofDescriptor, self).__init__( + options, serialized_options, 'OneofOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_type = containing_type + self.fields = fields + + +class ServiceDescriptor(_NestedDescriptorBase): + + """Descriptor for a service. + + Attributes: + name (str): Name of the service. + full_name (str): Full name of the service, including package name. + index (int): 0-indexed index giving the order that this services + definition appears within the .proto file. + methods (list[MethodDescriptor]): List of methods provided by this + service. + methods_by_name (dict(str, MethodDescriptor)): Same + :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but + indexed by "name" attribute in each :class:`MethodDescriptor`. + options (descriptor_pb2.ServiceOptions): Service options message or + None to use default service options. + file (FileDescriptor): Reference to file info. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor + + def __new__( + cls, + name=None, + full_name=None, + index=None, + methods=None, + options=None, + serialized_options=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindServiceByName(full_name) + + def __init__(self, name, full_name, index, methods, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + if create_key is not _internal_create_key: + _Deprecated('ServiceDescriptor') + + super(ServiceDescriptor, self).__init__( + options, 'ServiceOptions', name, full_name, file, + None, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + self.index = index + self.methods = methods + self.methods_by_name = dict((m.name, m) for m in methods) + # Set the containing service for each method in this service. + for method in self.methods: + method.containing_service = self + + def FindMethodByName(self, name): + """Searches for the specified method, and returns its descriptor. + + Args: + name (str): Name of the method. + Returns: + MethodDescriptor or None: the descriptor for the requested method, if + found. + """ + return self.methods_by_name.get(name, None) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.ServiceDescriptorProto. + + Args: + proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(ServiceDescriptor, self).CopyToProto(proto) + + +class MethodDescriptor(DescriptorBase): + + """Descriptor for a method in a service. + + Attributes: + name (str): Name of the method within the service. + full_name (str): Full name of method. + index (int): 0-indexed index of the method inside the service. + containing_service (ServiceDescriptor): The service that contains this + method. + input_type (Descriptor): The descriptor of the message that this method + accepts. + output_type (Descriptor): The descriptor of the message that this method + returns. + client_streaming (bool): Whether this method uses client streaming. + server_streaming (bool): Whether this method uses server streaming. + options (descriptor_pb2.MethodOptions or None): Method options message, or + None to use default method options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.MethodDescriptor + + def __new__(cls, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindMethodByName(full_name) + + def __init__(self, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + """The arguments are as described in the description of MethodDescriptor + attributes above. + + Note that containing_service may be None, and may be set later if necessary. + """ + if create_key is not _internal_create_key: + _Deprecated('MethodDescriptor') + + super(MethodDescriptor, self).__init__( + options, serialized_options, 'MethodOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_service = containing_service + self.input_type = input_type + self.output_type = output_type + self.client_streaming = client_streaming + self.server_streaming = server_streaming + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.MethodDescriptorProto. + + Args: + proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. + + Raises: + Error: If self couldn't be serialized, due to too few constructor + arguments. + """ + if self.containing_service is not None: + from google.protobuf import descriptor_pb2 + service_proto = descriptor_pb2.ServiceDescriptorProto() + self.containing_service.CopyToProto(service_proto) + proto.CopyFrom(service_proto.method[self.index]) + else: + raise Error('Descriptor does not contain a service.') + + +class FileDescriptor(DescriptorBase): + """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. + + Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and + :attr:`dependencies` fields are only set by the + :py:mod:`google.protobuf.message_factory` module, and not by the generated + proto code. + + Attributes: + name (str): Name of file, relative to root of source tree. + package (str): Name of the package + syntax (str): string indicating syntax of the file (can be "proto2" or + "proto3") + serialized_pb (bytes): Byte string of serialized + :class:`descriptor_pb2.FileDescriptorProto`. + dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` + objects this :class:`FileDescriptor` depends on. + public_dependencies (list[FileDescriptor]): A subset of + :attr:`dependencies`, which were declared as "public". + message_types_by_name (dict(str, Descriptor)): Mapping from message names + to their :class:`Descriptor`. + enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to + their :class:`EnumDescriptor`. + extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension + names declared at file scope to their :class:`FieldDescriptor`. + services_by_name (dict(str, ServiceDescriptor)): Mapping from services' + names to their :class:`ServiceDescriptor`. + pool (DescriptorPool): The pool this descriptor belongs to. When not + passed to the constructor, the global default pool is used. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FileDescriptor + + def __new__(cls, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + # FileDescriptor() is called from various places, not only from generated + # files, to register dynamic proto files and messages. + # pylint: disable=g-explicit-bool-comparison + if serialized_pb == b'': + # Cpp generated code must be linked in if serialized_pb is '' + try: + return _message.default_pool.FindFileByName(name) + except KeyError: + raise RuntimeError('Please link in cpp generated lib for %s' % (name)) + elif serialized_pb: + return _message.default_pool.AddSerializedFile(serialized_pb) + else: + return super(FileDescriptor, cls).__new__(cls) + + def __init__(self, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + """Constructor.""" + if create_key is not _internal_create_key: + _Deprecated('FileDescriptor') + + super(FileDescriptor, self).__init__( + options, serialized_options, 'FileOptions') + + if pool is None: + from google.protobuf import descriptor_pool + pool = descriptor_pool.Default() + self.pool = pool + self.message_types_by_name = {} + self.name = name + self.package = package + self.syntax = syntax or "proto2" + self.serialized_pb = serialized_pb + + self.enum_types_by_name = {} + self.extensions_by_name = {} + self.services_by_name = {} + self.dependencies = (dependencies or []) + self.public_dependencies = (public_dependencies or []) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.FileDescriptorProto. + + Args: + proto: An empty descriptor_pb2.FileDescriptorProto. + """ + proto.ParseFromString(self.serialized_pb) + + +def _ParseOptions(message, string): + """Parses serialized options. + + This helper function is used to parse serialized options in generated + proto2 files. It must not be used outside proto2. + """ + message.ParseFromString(string) + return message + + +def _ToCamelCase(name): + """Converts name to camel-case and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + if result: + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + # Lower-case the first letter. + if result and result[0].isupper(): + result[0] = result[0].lower() + return ''.join(result) + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _ToJsonName(name): + """Converts name to Json name and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + return ''.join(result) + + +def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, + syntax=None): + """Make a protobuf Descriptor given a DescriptorProto protobuf. + + Handles nested descriptors. Note that this is limited to the scope of defining + a message inside of another message. Composite fields can currently only be + resolved if the message is defined in the same scope as the field. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: Optional package name for the new message Descriptor (string). + build_file_if_cpp: Update the C++ descriptor pool if api matches. + Set to False on recursion, so no duplicates are created. + syntax: The syntax/semantics that should be used. Set to "proto3" to get + proto3 field presence semantics. + Returns: + A Descriptor for protobuf messages. + """ + if api_implementation.Type() == 'cpp' and build_file_if_cpp: + # The C++ implementation requires all descriptors to be backed by the same + # definition in the C++ descriptor pool. To do this, we build a + # FileDescriptorProto with the same definition as this descriptor and build + # it into the pool. + from google.protobuf import descriptor_pb2 + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.message_type.add().MergeFrom(desc_proto) + + # Generate a random name for this proto file to prevent conflicts with any + # imported ones. We need to specify a file name so the descriptor pool + # accepts our FileDescriptorProto, but it is not important what that file + # name is actually set to. + proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') + + if package: + file_descriptor_proto.name = os.path.join(package.replace('.', '/'), + proto_name + '.proto') + file_descriptor_proto.package = package + else: + file_descriptor_proto.name = proto_name + '.proto' + + _message.default_pool.Add(file_descriptor_proto) + result = _message.default_pool.FindFileByName(file_descriptor_proto.name) + + if _USE_C_DESCRIPTORS: + return result.message_types_by_name[desc_proto.name] + + full_message_name = [desc_proto.name] + if package: full_message_name.insert(0, package) + + # Create Descriptors for enum types + enum_types = {} + for enum_proto in desc_proto.enum_type: + full_name = '.'.join(full_message_name + [enum_proto.name]) + enum_desc = EnumDescriptor( + enum_proto.name, full_name, None, [ + EnumValueDescriptor(enum_val.name, ii, enum_val.number, + create_key=_internal_create_key) + for ii, enum_val in enumerate(enum_proto.value)], + create_key=_internal_create_key) + enum_types[full_name] = enum_desc + + # Create Descriptors for nested types + nested_types = {} + for nested_proto in desc_proto.nested_type: + full_name = '.'.join(full_message_name + [nested_proto.name]) + # Nested types are just those defined inside of the message, not all types + # used by fields in the message, so no loops are possible here. + nested_desc = MakeDescriptor(nested_proto, + package='.'.join(full_message_name), + build_file_if_cpp=False, + syntax=syntax) + nested_types[full_name] = nested_desc + + fields = [] + for field_proto in desc_proto.field: + full_name = '.'.join(full_message_name + [field_proto.name]) + enum_desc = None + nested_desc = None + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + if field_proto.HasField('type_name'): + type_name = field_proto.type_name + full_type_name = '.'.join(full_message_name + + [type_name[type_name.rfind('.')+1:]]) + if full_type_name in nested_types: + nested_desc = nested_types[full_type_name] + elif full_type_name in enum_types: + enum_desc = enum_types[full_type_name] + # Else type_name references a non-local type, which isn't implemented + field = FieldDescriptor( + field_proto.name, full_name, field_proto.number - 1, + field_proto.number, field_proto.type, + FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), + field_proto.label, None, nested_desc, enum_desc, None, False, None, + options=_OptionsOrNone(field_proto), has_default_value=False, + json_name=json_name, create_key=_internal_create_key) + fields.append(field) + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, + list(nested_types.values()), list(enum_types.values()), [], + options=_OptionsOrNone(desc_proto), + create_key=_internal_create_key) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py new file mode 100644 index 0000000000..073eddc711 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_database.py @@ -0,0 +1,177 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a container for DescriptorProtos.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import warnings + + +class Error(Exception): + pass + + +class DescriptorDatabaseConflictingDefinitionError(Error): + """Raised when a proto is added with the same name & different descriptor.""" + + +class DescriptorDatabase(object): + """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" + + def __init__(self): + self._file_desc_protos_by_file = {} + self._file_desc_protos_by_symbol = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this database. + + Args: + file_desc_proto: The FileDescriptorProto to add. + Raises: + DescriptorDatabaseConflictingDefinitionError: if an attempt is made to + add a proto with the same name but different definition than an + existing proto in the database. + """ + proto_name = file_desc_proto.name + if proto_name not in self._file_desc_protos_by_file: + self._file_desc_protos_by_file[proto_name] = file_desc_proto + elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: + raise DescriptorDatabaseConflictingDefinitionError( + '%s already added, but with different descriptor.' % proto_name) + else: + return + + # Add all the top-level descriptors to the index. + package = file_desc_proto.package + for message in file_desc_proto.message_type: + for name in _ExtractSymbols(message, package): + self._AddSymbol(name, file_desc_proto) + for enum in file_desc_proto.enum_type: + self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) + for enum_value in enum.value: + self._file_desc_protos_by_symbol[ + '.'.join((package, enum_value.name))] = file_desc_proto + for extension in file_desc_proto.extension: + self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) + for service in file_desc_proto.service: + self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) + + def FindFileByName(self, name): + """Finds the file descriptor proto by file name. + + Typically the file name is a relative path ending to a .proto file. The + proto with the given name will have to have been added to this database + using the Add method or else an error will be raised. + + Args: + name: The file name to find. + + Returns: + The file descriptor proto matching the name. + + Raises: + KeyError if no file by the given name was added. + """ + + return self._file_desc_protos_by_file[name] + + def FindFileContainingSymbol(self, symbol): + """Finds the file descriptor proto containing the specified symbol. + + The symbol should be a fully qualified name including the file descriptor's + package and any containing messages. Some examples: + + 'some.package.name.Message' + 'some.package.name.Message.NestedEnum' + 'some.package.name.Message.some_field' + + The file descriptor proto containing the specified symbol must be added to + this database using the Add method or else an error will be raised. + + Args: + symbol: The fully qualified symbol name. + + Returns: + The file descriptor proto containing the symbol. + + Raises: + KeyError if no file contains the specified symbol. + """ + try: + return self._file_desc_protos_by_symbol[symbol] + except KeyError: + # Fields, enum values, and nested extensions are not in + # _file_desc_protos_by_symbol. Try to find the top level + # descriptor. Non-existent nested symbol under a valid top level + # descriptor can also be found. The behavior is the same with + # protobuf C++. + top_level, _, _ = symbol.rpartition('.') + try: + return self._file_desc_protos_by_symbol[top_level] + except KeyError: + # Raise the original symbol as a KeyError for better diagnostics. + raise KeyError(symbol) + + def FindFileContainingExtension(self, extendee_name, extension_number): + # TODO(jieluo): implement this API. + return None + + def FindAllExtensionNumbers(self, extendee_name): + # TODO(jieluo): implement this API. + return [] + + def _AddSymbol(self, name, file_desc_proto): + if name in self._file_desc_protos_by_symbol: + warn_msg = ('Conflict register for file "' + file_desc_proto.name + + '": ' + name + + ' is already defined in file "' + + self._file_desc_protos_by_symbol[name].name + '"') + warnings.warn(warn_msg, RuntimeWarning) + self._file_desc_protos_by_symbol[name] = file_desc_proto + + +def _ExtractSymbols(desc_proto, package): + """Pulls out all the symbols from a descriptor proto. + + Args: + desc_proto: The proto to extract symbols from. + package: The package containing the descriptor type. + + Yields: + The fully qualified name found in the descriptor. + """ + message_name = package + '.' + desc_proto.name if package else desc_proto.name + yield message_name + for nested_type in desc_proto.nested_type: + for symbol in _ExtractSymbols(nested_type, message_name): + yield symbol + for enum_type in desc_proto.enum_type: + yield '.'.join((message_name, enum_type.name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py new file mode 100644 index 0000000000..f570386432 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pb2.py @@ -0,0 +1,1925 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/descriptor.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/descriptor.proto', + package='google.protobuf', + syntax='proto2', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' + ) +else: + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') + +if _descriptor._USE_C_DESCRIPTORS == False: + _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.protobuf.FieldDescriptorProto.Type', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=3, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=4, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=5, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=6, number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=7, number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=8, number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=9, number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=10, number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=11, number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=12, number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=13, number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=14, number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=15, number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=16, number=17, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=17, number=18, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) + + _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.protobuf.FieldDescriptorProto.Label', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LABEL_OPTIONAL', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REQUIRED', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REPEATED', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) + + _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( + name='OptimizeMode', + full_name='google.protobuf.FileOptions.OptimizeMode', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEED', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CODE_SIZE', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LITE_RUNTIME', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) + + _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( + name='CType', + full_name='google.protobuf.FieldOptions.CType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CORD', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STRING_PIECE', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) + + _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( + name='JSType', + full_name='google.protobuf.FieldOptions.JSType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='JS_NORMAL', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_STRING', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_NUMBER', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) + + _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( + name='IdempotencyLevel', + full_name='google.protobuf.MethodOptions.IdempotencyLevel', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='IDEMPOTENCY_UNKNOWN', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NO_SIDE_EFFECTS', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='IDEMPOTENT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) + + + _FILEDESCRIPTORSET = _descriptor.Descriptor( + name='FileDescriptorSet', + full_name='google.protobuf.FileDescriptorSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEDESCRIPTORPROTO = _descriptor.Descriptor( + name='FileDescriptorProto', + full_name='google.protobuf.FileDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, + number=10, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, + number=11, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( + name='ExtensionRange', + full_name='google.protobuf.DescriptorProto.ExtensionRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( + name='ReservedRange', + full_name='google.protobuf.DescriptorProto.ReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO = _descriptor.Descriptor( + name='DescriptorProto', + full_name='google.protobuf.DescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.DescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='field', full_name='google.protobuf.DescriptorProto.field', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.options', index=7, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, + number=10, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( + name='ExtensionRangeOptions', + full_name='google.protobuf.ExtensionRangeOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( + name='FieldDescriptorProto', + full_name='google.protobuf.FieldDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDDESCRIPTORPROTO_TYPE, + _FIELDDESCRIPTORPROTO_LABEL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( + name='OneofDescriptorProto', + full_name='google.protobuf.OneofDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( + name='EnumReservedRange', + full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumDescriptorProto', + full_name='google.protobuf.EnumDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumValueDescriptorProto', + full_name='google.protobuf.EnumValueDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( + name='ServiceDescriptorProto', + full_name='google.protobuf.ServiceDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _METHODDESCRIPTORPROTO = _descriptor.Descriptor( + name='MethodDescriptorProto', + full_name='google.protobuf.MethodDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEOPTIONS = _descriptor.Descriptor( + name='FileOptions', + full_name='google.protobuf.FileOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, + number=27, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, + number=9, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, + number=42, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, + number=31, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, + number=36, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, + number=37, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, + number=39, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, + number=40, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, + number=41, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, + number=44, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, + number=45, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEOPTIONS_OPTIMIZEMODE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _MESSAGEOPTIONS = _descriptor.Descriptor( + name='MessageOptions', + full_name='google.protobuf.MessageOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDOPTIONS = _descriptor.Descriptor( + name='FieldOptions', + full_name='google.protobuf.FieldOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, + number=15, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDOPTIONS_CTYPE, + _FIELDOPTIONS_JSTYPE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ONEOFOPTIONS = _descriptor.Descriptor( + name='OneofOptions', + full_name='google.protobuf.OneofOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMOPTIONS = _descriptor.Descriptor( + name='EnumOptions', + full_name='google.protobuf.EnumOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMVALUEOPTIONS = _descriptor.Descriptor( + name='EnumValueOptions', + full_name='google.protobuf.EnumValueOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _SERVICEOPTIONS = _descriptor.Descriptor( + name='ServiceOptions', + full_name='google.protobuf.ServiceOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _METHODOPTIONS = _descriptor.Descriptor( + name='MethodOptions', + full_name='google.protobuf.MethodOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, + number=34, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _METHODOPTIONS_IDEMPOTENCYLEVEL, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( + name='NamePart', + full_name='google.protobuf.UninterpretedOption.NamePart', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, + number=2, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _UNINTERPRETEDOPTION = _descriptor.Descriptor( + name='UninterpretedOption', + full_name='google.protobuf.UninterpretedOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='google.protobuf.SourceCodeInfo.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _SOURCECODEINFO = _descriptor.Descriptor( + name='SourceCodeInfo', + full_name='google.protobuf.SourceCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_SOURCECODEINFO_LOCATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( + name='Annotation', + full_name='google.protobuf.GeneratedCodeInfo.Annotation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _GENERATEDCODEINFO = _descriptor.Descriptor( + name='GeneratedCodeInfo', + full_name='google.protobuf.GeneratedCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS + _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO + _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS + _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE + _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS + _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE + _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL + _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE + _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS + _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO + _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO + _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS + _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE + _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS + _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO + _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS + _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS + _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE + _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS + _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE + _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE + _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS + _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS + _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL + _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS + _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION + _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART + _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO + _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION + _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO + _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION + DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET + DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS + DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS + DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS + DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS + DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS + DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS + DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS + DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION + DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO + DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + +else: + _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _FILEDESCRIPTORSET._serialized_start=53 + _FILEDESCRIPTORSET._serialized_end=124 + _FILEDESCRIPTORPROTO._serialized_start=127 + _FILEDESCRIPTORPROTO._serialized_end=602 + _DESCRIPTORPROTO._serialized_start=605 + _DESCRIPTORPROTO._serialized_end=1286 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 + _EXTENSIONRANGEOPTIONS._serialized_start=1288 + _EXTENSIONRANGEOPTIONS._serialized_end=1391 + _FIELDDESCRIPTORPROTO._serialized_start=1394 + _FIELDDESCRIPTORPROTO._serialized_end=2119 + _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 + _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 + _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 + _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 + _ONEOFDESCRIPTORPROTO._serialized_start=2121 + _ONEOFDESCRIPTORPROTO._serialized_end=2205 + _ENUMDESCRIPTORPROTO._serialized_start=2208 + _ENUMDESCRIPTORPROTO._serialized_end=2500 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 + _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 + _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 + _SERVICEDESCRIPTORPROTO._serialized_start=2613 + _SERVICEDESCRIPTORPROTO._serialized_end=2757 + _METHODDESCRIPTORPROTO._serialized_start=2760 + _METHODDESCRIPTORPROTO._serialized_end=2953 + _FILEOPTIONS._serialized_start=2956 + _FILEOPTIONS._serialized_end=3761 + _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 + _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 + _MESSAGEOPTIONS._serialized_start=3764 + _MESSAGEOPTIONS._serialized_end=4024 + _FIELDOPTIONS._serialized_start=4027 + _FIELDOPTIONS._serialized_end=4473 + _FIELDOPTIONS_CTYPE._serialized_start=4354 + _FIELDOPTIONS_CTYPE._serialized_end=4401 + _FIELDOPTIONS_JSTYPE._serialized_start=4403 + _FIELDOPTIONS_JSTYPE._serialized_end=4456 + _ONEOFOPTIONS._serialized_start=4475 + _ONEOFOPTIONS._serialized_end=4569 + _ENUMOPTIONS._serialized_start=4572 + _ENUMOPTIONS._serialized_end=4719 + _ENUMVALUEOPTIONS._serialized_start=4721 + _ENUMVALUEOPTIONS._serialized_end=4846 + _SERVICEOPTIONS._serialized_start=4848 + _SERVICEOPTIONS._serialized_end=4971 + _METHODOPTIONS._serialized_start=4974 + _METHODOPTIONS._serialized_end=5275 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 + _UNINTERPRETEDOPTION._serialized_start=5278 + _UNINTERPRETEDOPTION._serialized_end=5564 + _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 + _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 + _SOURCECODEINFO._serialized_start=5567 + _SOURCECODEINFO._serialized_end=5780 + _SOURCECODEINFO_LOCATION._serialized_start=5646 + _SOURCECODEINFO_LOCATION._serialized_end=5780 + _GENERATEDCODEINFO._serialized_start=5783 + _GENERATEDCODEINFO._serialized_end=5950 + _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 + _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py new file mode 100644 index 0000000000..911372a8b0 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/descriptor_pool.py @@ -0,0 +1,1295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides DescriptorPool to use as a container for proto2 descriptors. + +The DescriptorPool is used in conjection with a DescriptorDatabase to maintain +a collection of protocol buffer descriptors for use when dynamically creating +message types at runtime. + +For most applications protocol buffers should be used via modules generated by +the protocol buffer compiler tool. This should only be used when the type of +protocol buffers used in an application or library cannot be predetermined. + +Below is a straightforward example on how to use this class:: + + pool = DescriptorPool() + file_descriptor_protos = [ ... ] + for file_descriptor_proto in file_descriptor_protos: + pool.Add(file_descriptor_proto) + my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') + +The message descriptor can be used in conjunction with the message_factory +module in order to create a protocol buffer class that can be encoded and +decoded. + +If you want to get a Python class for the specified proto, use the +helper functions inside google.protobuf.message_factory +directly instead of this class. +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import collections +import warnings + +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import text_encoding + + +_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access + + +def _Deprecated(func): + """Mark functions as deprecated.""" + + def NewFunc(*args, **kwargs): + warnings.warn( + 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' + 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' + 'instead.' % func.__name__, + category=DeprecationWarning) + return func(*args, **kwargs) + NewFunc.__name__ = func.__name__ + NewFunc.__doc__ = func.__doc__ + NewFunc.__dict__.update(func.__dict__) + return NewFunc + + +def _NormalizeFullyQualifiedName(name): + """Remove leading period from fully-qualified type name. + + Due to b/13860351 in descriptor_database.py, types in the root namespace are + generated with a leading period. This function removes that prefix. + + Args: + name (str): The fully-qualified symbol name. + + Returns: + str: The normalized fully-qualified symbol name. + """ + return name.lstrip('.') + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) + + +class DescriptorPool(object): + """A collection of protobufs dynamically constructed by descriptor protos.""" + + if _USE_C_DESCRIPTORS: + + def __new__(cls, descriptor_db=None): + # pylint: disable=protected-access + return descriptor._message.DescriptorPool(descriptor_db) + + def __init__(self, descriptor_db=None): + """Initializes a Pool of proto buffs. + + The descriptor_db argument to the constructor is provided to allow + specialized file descriptor proto lookup code to be triggered on demand. An + example would be an implementation which will read and compile a file + specified in a call to FindFileByName() and not require the call to Add() + at all. Results from this database will be cached internally here as well. + + Args: + descriptor_db: A secondary source of file descriptors. + """ + + self._internal_db = descriptor_database.DescriptorDatabase() + self._descriptor_db = descriptor_db + self._descriptors = {} + self._enum_descriptors = {} + self._service_descriptors = {} + self._file_descriptors = {} + self._toplevel_extensions = {} + # TODO(jieluo): Remove _file_desc_by_toplevel_extension after + # maybe year 2020 for compatibility issue (with 3.4.1 only). + self._file_desc_by_toplevel_extension = {} + self._top_enum_values = {} + # We store extensions in two two-level mappings: The first key is the + # descriptor of the message being extended, the second key is the extension + # full name or its tag number. + self._extensions_by_name = collections.defaultdict(dict) + self._extensions_by_number = collections.defaultdict(dict) + + def _CheckConflictRegister(self, desc, desc_name, file_name): + """Check if the descriptor name conflicts with another of the same name. + + Args: + desc: Descriptor of a message, enum, service, extension or enum value. + desc_name (str): the full name of desc. + file_name (str): The file name of descriptor. + """ + for register, descriptor_type in [ + (self._descriptors, descriptor.Descriptor), + (self._enum_descriptors, descriptor.EnumDescriptor), + (self._service_descriptors, descriptor.ServiceDescriptor), + (self._toplevel_extensions, descriptor.FieldDescriptor), + (self._top_enum_values, descriptor.EnumValueDescriptor)]: + if desc_name in register: + old_desc = register[desc_name] + if isinstance(old_desc, descriptor.EnumValueDescriptor): + old_file = old_desc.type.file.name + else: + old_file = old_desc.file.name + + if not isinstance(desc, descriptor_type) or ( + old_file != file_name): + error_msg = ('Conflict register for file "' + file_name + + '": ' + desc_name + + ' is already defined in file "' + + old_file + '". Please fix the conflict by adding ' + 'package name on the proto file, or use different ' + 'name for the duplication.') + if isinstance(desc, descriptor.EnumValueDescriptor): + error_msg += ('\nNote: enum values appear as ' + 'siblings of the enum type instead of ' + 'children of it.') + + raise TypeError(error_msg) + + return + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + file_desc_proto (FileDescriptorProto): The file descriptor to add. + """ + + self._internal_db.Add(file_desc_proto) + + def AddSerializedFile(self, serialized_file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + serialized_file_desc_proto (bytes): A bytes string, serialization of the + :class:`FileDescriptorProto` to add. + + Returns: + FileDescriptor: Descriptor for the added file. + """ + + # pylint: disable=g-import-not-at-top + from google.protobuf import descriptor_pb2 + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + serialized_file_desc_proto) + file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) + file_desc.serialized_pb = serialized_file_desc_proto + return file_desc + + # Add Descriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddDescriptor(self, desc): + self._AddDescriptor(desc) + + # Never call this method. It is for internal usage only. + def _AddDescriptor(self, desc): + """Adds a Descriptor to the pool, non-recursively. + + If the Descriptor contains nested messages or enums, the caller must + explicitly register them. This method also registers the FileDescriptor + associated with the message. + + Args: + desc: A Descriptor. + """ + if not isinstance(desc, descriptor.Descriptor): + raise TypeError('Expected instance of descriptor.Descriptor.') + + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + + self._descriptors[desc.full_name] = desc + self._AddFileDescriptor(desc.file) + + # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddEnumDescriptor(self, enum_desc): + self._AddEnumDescriptor(enum_desc) + + # Never call this method. It is for internal usage only. + def _AddEnumDescriptor(self, enum_desc): + """Adds an EnumDescriptor to the pool. + + This method also registers the FileDescriptor associated with the enum. + + Args: + enum_desc: An EnumDescriptor. + """ + + if not isinstance(enum_desc, descriptor.EnumDescriptor): + raise TypeError('Expected instance of descriptor.EnumDescriptor.') + + file_name = enum_desc.file.name + self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) + self._enum_descriptors[enum_desc.full_name] = enum_desc + + # Top enum values need to be indexed. + # Count the number of dots to see whether the enum is toplevel or nested + # in a message. We cannot use enum_desc.containing_type at this stage. + if enum_desc.file.package: + top_level = (enum_desc.full_name.count('.') + - enum_desc.file.package.count('.') == 1) + else: + top_level = enum_desc.full_name.count('.') == 0 + if top_level: + file_name = enum_desc.file.name + package = enum_desc.file.package + for enum_value in enum_desc.values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, enum_value.name))) + self._CheckConflictRegister(enum_value, full_name, file_name) + self._top_enum_values[full_name] = enum_value + self._AddFileDescriptor(enum_desc.file) + + # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddServiceDescriptor(self, service_desc): + self._AddServiceDescriptor(service_desc) + + # Never call this method. It is for internal usage only. + def _AddServiceDescriptor(self, service_desc): + """Adds a ServiceDescriptor to the pool. + + Args: + service_desc: A ServiceDescriptor. + """ + + if not isinstance(service_desc, descriptor.ServiceDescriptor): + raise TypeError('Expected instance of descriptor.ServiceDescriptor.') + + self._CheckConflictRegister(service_desc, service_desc.full_name, + service_desc.file.name) + self._service_descriptors[service_desc.full_name] = service_desc + + # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddExtensionDescriptor(self, extension): + self._AddExtensionDescriptor(extension) + + # Never call this method. It is for internal usage only. + def _AddExtensionDescriptor(self, extension): + """Adds a FieldDescriptor describing an extension to the pool. + + Args: + extension: A FieldDescriptor. + + Raises: + AssertionError: when another extension with the same number extends the + same message. + TypeError: when the specified extension is not a + descriptor.FieldDescriptor. + """ + if not (isinstance(extension, descriptor.FieldDescriptor) and + extension.is_extension): + raise TypeError('Expected an extension descriptor.') + + if extension.extension_scope is None: + self._toplevel_extensions[extension.full_name] = extension + + try: + existing_desc = self._extensions_by_number[ + extension.containing_type][extension.number] + except KeyError: + pass + else: + if extension is not existing_desc: + raise AssertionError( + 'Extensions "%s" and "%s" both try to extend message type "%s" ' + 'with field number %d.' % + (extension.full_name, existing_desc.full_name, + extension.containing_type.full_name, extension.number)) + + self._extensions_by_number[extension.containing_type][ + extension.number] = extension + self._extensions_by_name[extension.containing_type][ + extension.full_name] = extension + + # Also register MessageSet extensions with the type name. + if _IsMessageSetExtension(extension): + self._extensions_by_name[extension.containing_type][ + extension.message_type.full_name] = extension + + @_Deprecated + def AddFileDescriptor(self, file_desc): + self._InternalAddFileDescriptor(file_desc) + + # Never call this method. It is for internal usage only. + def _InternalAddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + self._AddFileDescriptor(file_desc) + # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. + # FieldDescriptor.file is added in code gen. Remove this solution after + # maybe 2020 for compatibility reason (with 3.4.1 only). + for extension in file_desc.extensions_by_name.values(): + self._file_desc_by_toplevel_extension[ + extension.full_name] = file_desc + + def _AddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + if not isinstance(file_desc, descriptor.FileDescriptor): + raise TypeError('Expected instance of descriptor.FileDescriptor.') + self._file_descriptors[file_desc.name] = file_desc + + def FindFileByName(self, file_name): + """Gets a FileDescriptor by file name. + + Args: + file_name (str): The path to the file to get a descriptor for. + + Returns: + FileDescriptor: The descriptor for the named file. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + try: + return self._file_descriptors[file_name] + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileByName(file_name) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file named %s' % file_name) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindFileContainingSymbol(self, symbol): + """Gets the FileDescriptor for the file containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + symbol = _NormalizeFullyQualifiedName(symbol) + try: + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + pass + + try: + # Try fallback database. Build and find again if possible. + self._FindFileContainingSymbolInDb(symbol) + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + raise KeyError('Cannot find a file containing %s' % symbol) + + def _InternalFindFileContainingSymbol(self, symbol): + """Gets the already built FileDescriptor containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + try: + return self._descriptors[symbol].file + except KeyError: + pass + + try: + return self._enum_descriptors[symbol].file + except KeyError: + pass + + try: + return self._service_descriptors[symbol].file + except KeyError: + pass + + try: + return self._top_enum_values[symbol].type.file + except KeyError: + pass + + try: + return self._file_desc_by_toplevel_extension[symbol] + except KeyError: + pass + + # Try fields, enum values and nested extensions inside a message. + top_name, _, sub_name = symbol.rpartition('.') + try: + message = self.FindMessageTypeByName(top_name) + assert (sub_name in message.extensions_by_name or + sub_name in message.fields_by_name or + sub_name in message.enum_values_by_name) + return message.file + except (KeyError, AssertionError): + raise KeyError('Cannot find a file containing %s' % symbol) + + def FindMessageTypeByName(self, full_name): + """Loads the named descriptor from the pool. + + Args: + full_name (str): The full name of the descriptor to load. + + Returns: + Descriptor: The descriptor for the named type. + + Raises: + KeyError: if the message cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._descriptors[full_name] + + def FindEnumTypeByName(self, full_name): + """Loads the named enum descriptor from the pool. + + Args: + full_name (str): The full name of the enum descriptor to load. + + Returns: + EnumDescriptor: The enum descriptor for the named type. + + Raises: + KeyError: if the enum cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._enum_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._enum_descriptors[full_name] + + def FindFieldByName(self, full_name): + """Loads the named field descriptor from the pool. + + Args: + full_name (str): The full name of the field descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named field. + + Raises: + KeyError: if the field cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, field_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.fields_by_name[field_name] + + def FindOneofByName(self, full_name): + """Loads the named oneof descriptor from the pool. + + Args: + full_name (str): The full name of the oneof descriptor to load. + + Returns: + OneofDescriptor: The oneof descriptor for the named oneof. + + Raises: + KeyError: if the oneof cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, oneof_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.oneofs_by_name[oneof_name] + + def FindExtensionByName(self, full_name): + """Loads the named extension descriptor from the pool. + + Args: + full_name (str): The full name of the extension descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named extension. + + Raises: + KeyError: if the extension cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + try: + # The proto compiler does not give any link between the FileDescriptor + # and top-level extensions unless the FileDescriptorProto is added to + # the DescriptorDatabase, but this can impact memory usage. + # So we registered these extensions by name explicitly. + return self._toplevel_extensions[full_name] + except KeyError: + pass + message_name, _, extension_name = full_name.rpartition('.') + try: + # Most extensions are nested inside a message. + scope = self.FindMessageTypeByName(message_name) + except KeyError: + # Some extensions are defined at file scope. + scope = self._FindFileContainingSymbolInDb(full_name) + return scope.extensions_by_name[extension_name] + + def FindExtensionByNumber(self, message_descriptor, number): + """Gets the extension of the specified message with the specified number. + + Extensions have to be registered to this pool by calling :func:`Add` or + :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): descriptor of the extended message. + number (int): Number of the extension field. + + Returns: + FieldDescriptor: The descriptor for the extension. + + Raises: + KeyError: when no extension with the given number is known for the + specified message. + """ + try: + return self._extensions_by_number[message_descriptor][number] + except KeyError: + self._TryLoadExtensionFromDB(message_descriptor, number) + return self._extensions_by_number[message_descriptor][number] + + def FindAllExtensions(self, message_descriptor): + """Gets all the known extensions of a given message. + + Extensions have to be registered to this pool by build related + :func:`Add` or :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): Descriptor of the extended message. + + Returns: + list[FieldDescriptor]: Field descriptors describing the extensions. + """ + # Fallback to descriptor db if FindAllExtensionNumbers is provided. + if self._descriptor_db and hasattr( + self._descriptor_db, 'FindAllExtensionNumbers'): + full_name = message_descriptor.full_name + all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) + for number in all_numbers: + if number in self._extensions_by_number[message_descriptor]: + continue + self._TryLoadExtensionFromDB(message_descriptor, number) + + return list(self._extensions_by_number[message_descriptor].values()) + + def _TryLoadExtensionFromDB(self, message_descriptor, number): + """Try to Load extensions from descriptor db. + + Args: + message_descriptor: descriptor of the extended message. + number: the extension number that needs to be loaded. + """ + if not self._descriptor_db: + return + # Only supported when FindFileContainingExtension is provided. + if not hasattr( + self._descriptor_db, 'FindFileContainingExtension'): + return + + full_name = message_descriptor.full_name + file_proto = self._descriptor_db.FindFileContainingExtension( + full_name, number) + + if file_proto is None: + return + + try: + self._ConvertFileProtoToFileDescriptor(file_proto) + except: + warn_msg = ('Unable to load proto file %s for extension number %d.' % + (file_proto.name, number)) + warnings.warn(warn_msg, RuntimeWarning) + + def FindServiceByName(self, full_name): + """Loads the named service descriptor from the pool. + + Args: + full_name (str): The full name of the service descriptor to load. + + Returns: + ServiceDescriptor: The service descriptor for the named service. + + Raises: + KeyError: if the service cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._service_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._service_descriptors[full_name] + + def FindMethodByName(self, full_name): + """Loads the named service method descriptor from the pool. + + Args: + full_name (str): The full name of the method descriptor to load. + + Returns: + MethodDescriptor: The method descriptor for the service method. + + Raises: + KeyError: if the method cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + service_name, _, method_name = full_name.rpartition('.') + service_descriptor = self.FindServiceByName(service_name) + return service_descriptor.methods_by_name[method_name] + + def _FindFileContainingSymbolInDb(self, symbol): + """Finds the file in descriptor DB containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: The file that contains the specified symbol. + + Raises: + KeyError: if the file cannot be found in the descriptor database. + """ + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file containing %s' % symbol) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def _ConvertFileProtoToFileDescriptor(self, file_proto): + """Creates a FileDescriptor from a proto or returns a cached copy. + + This method also has the side effect of loading all the symbols found in + the file into the appropriate dictionaries in the pool. + + Args: + file_proto: The proto to convert. + + Returns: + A FileDescriptor matching the passed in proto. + """ + if file_proto.name not in self._file_descriptors: + built_deps = list(self._GetDeps(file_proto.dependency)) + direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] + public_deps = [direct_deps[i] for i in file_proto.public_dependency] + + file_descriptor = descriptor.FileDescriptor( + pool=self, + name=file_proto.name, + package=file_proto.package, + syntax=file_proto.syntax, + options=_OptionsOrNone(file_proto), + serialized_pb=file_proto.SerializeToString(), + dependencies=direct_deps, + public_dependencies=public_deps, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope = {} + + # This loop extracts all the message and enum types from all the + # dependencies of the file_proto. This is necessary to create the + # scope of available message types when defining the passed in + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( + dependency.message_types_by_name.values())) + scope.update((_PrefixWithDot(enum.full_name), enum) + for enum in dependency.enum_types_by_name.values()) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( + message_type, file_proto.package, file_descriptor, scope, + file_proto.syntax) + file_descriptor.message_types_by_name[message_desc.name] = ( + message_desc) + + for enum_type in file_proto.enum_type: + file_descriptor.enum_types_by_name[enum_type.name] = ( + self._ConvertEnumDescriptor(enum_type, file_proto.package, + file_descriptor, None, scope, True)) + + for index, extension_proto in enumerate(file_proto.extension): + extension_desc = self._MakeFieldDescriptor( + extension_proto, file_proto.package, index, file_descriptor, + is_extension=True) + extension_desc.containing_type = self._GetTypeFromScope( + file_descriptor.package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, + file_descriptor.package, scope) + file_descriptor.extensions_by_name[extension_desc.name] = ( + extension_desc) + self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( + file_descriptor) + + for desc_proto in file_proto.message_type: + self._SetAllFieldTypes(file_proto.package, desc_proto, scope) + + if file_proto.package: + desc_proto_prefix = _PrefixWithDot(file_proto.package) + else: + desc_proto_prefix = '' + + for desc_proto in file_proto.message_type: + desc = self._GetTypeFromScope( + desc_proto_prefix, desc_proto.name, scope) + file_descriptor.message_types_by_name[desc_proto.name] = desc + + for index, service_proto in enumerate(file_proto.service): + file_descriptor.services_by_name[service_proto.name] = ( + self._MakeServiceDescriptor(service_proto, index, scope, + file_proto.package, file_descriptor)) + + self._file_descriptors[file_proto.name] = file_descriptor + + # Add extensions to the pool + file_desc = self._file_descriptors[file_proto.name] + for extension in file_desc.extensions_by_name.values(): + self._AddExtensionDescriptor(extension) + for message_type in file_desc.message_types_by_name.values(): + for extension in message_type.extensions: + self._AddExtensionDescriptor(extension) + + return file_desc + + def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, + scope=None, syntax=None): + """Adds the proto to the pool in the specified package. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: The package the proto should be located in. + file_desc: The file containing this message. + scope: Dict mapping short and full symbols to message and enum types. + syntax: string indicating syntax of the file ("proto2" or "proto3") + + Returns: + The added descriptor. + """ + + if package: + desc_name = '.'.join((package, desc_proto.name)) + else: + desc_name = desc_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + if scope is None: + scope = {} + + nested = [ + self._ConvertMessageDescriptor( + nested, desc_name, file_desc, scope, syntax) + for nested in desc_proto.nested_type] + enums = [ + self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, + scope, False) + for enum in desc_proto.enum_type] + fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) + for index, field in enumerate(desc_proto.field)] + extensions = [ + self._MakeFieldDescriptor(extension, desc_name, index, file_desc, + is_extension=True) + for index, extension in enumerate(desc_proto.extension)] + oneofs = [ + # pylint: disable=g-complex-comprehension + descriptor.OneofDescriptor( + desc.name, + '.'.join((desc_name, desc.name)), + index, + None, + [], + _OptionsOrNone(desc), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for index, desc in enumerate(desc_proto.oneof_decl) + ] + extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] + if extension_ranges: + is_extendable = True + else: + is_extendable = False + desc = descriptor.Descriptor( + name=desc_proto.name, + full_name=desc_name, + filename=file_name, + containing_type=None, + fields=fields, + oneofs=oneofs, + nested_types=nested, + enum_types=enums, + extensions=extensions, + options=_OptionsOrNone(desc_proto), + is_extendable=is_extendable, + extension_ranges=extension_ranges, + file=file_desc, + serialized_start=None, + serialized_end=None, + syntax=syntax, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for nested in desc.nested_types: + nested.containing_type = desc + for enum in desc.enum_types: + enum.containing_type = desc + for field_index, field_desc in enumerate(desc_proto.field): + if field_desc.HasField('oneof_index'): + oneof_index = field_desc.oneof_index + oneofs[oneof_index].fields.append(fields[field_index]) + fields[field_index].containing_oneof = oneofs[oneof_index] + + scope[_PrefixWithDot(desc_name)] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._descriptors[desc_name] = desc + return desc + + def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, + containing_type=None, scope=None, top_level=False): + """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. + + Args: + enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the enum descriptor. + containing_type: The type containing this enum. + scope: Scope containing available types. + top_level: If True, the enum is a top level symbol. If False, the enum + is defined inside a message. + + Returns: + The added descriptor + """ + + if package: + enum_name = '.'.join((package, enum_proto.name)) + else: + enum_name = enum_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + values = [self._MakeEnumValueDescriptor(value, index) + for index, value in enumerate(enum_proto.value)] + desc = descriptor.EnumDescriptor(name=enum_proto.name, + full_name=enum_name, + filename=file_name, + file=file_desc, + values=values, + containing_type=containing_type, + options=_OptionsOrNone(enum_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope['.%s' % enum_name] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._enum_descriptors[enum_name] = desc + + # Add top level enum values. + if top_level: + for value in values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, value.name))) + self._CheckConflictRegister(value, full_name, file_name) + self._top_enum_values[full_name] = value + + return desc + + def _MakeFieldDescriptor(self, field_proto, message_name, index, + file_desc, is_extension=False): + """Creates a field descriptor from a FieldDescriptorProto. + + For message and enum type fields, this method will do a look up + in the pool for the appropriate descriptor for that type. If it + is unavailable, it will fall back to the _source function to + create it. If this type is still unavailable, construction will + fail. + + Args: + field_proto: The proto describing the field. + message_name: The name of the containing message. + index: Index of the field + file_desc: The file containing the field descriptor. + is_extension: Indication that this field is for an extension. + + Returns: + An initialized FieldDescriptor object + """ + + if message_name: + full_name = '.'.join((message_name, field_proto.name)) + else: + full_name = field_proto.name + + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + + return descriptor.FieldDescriptor( + name=field_proto.name, + full_name=full_name, + index=index, + number=field_proto.number, + type=field_proto.type, + cpp_type=None, + message_type=None, + enum_type=None, + containing_type=None, + label=field_proto.label, + has_default_value=False, + default_value=None, + is_extension=is_extension, + extension_scope=None, + options=_OptionsOrNone(field_proto), + json_name=json_name, + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _SetAllFieldTypes(self, package, desc_proto, scope): + """Sets all the descriptor's fields's types. + + This method also sets the containing types on any extensions. + + Args: + package: The current package of desc_proto. + desc_proto: The message descriptor to update. + scope: Enclosing scope of available types. + """ + + package = _PrefixWithDot(package) + + main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) + + if package == '.': + nested_package = _PrefixWithDot(desc_proto.name) + else: + nested_package = '.'.join([package, desc_proto.name]) + + for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): + self._SetFieldType(field_proto, field_desc, nested_package, scope) + + for extension_proto, extension_desc in ( + zip(desc_proto.extension, main_desc.extensions)): + extension_desc.containing_type = self._GetTypeFromScope( + nested_package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, nested_package, scope) + + for nested_type in desc_proto.nested_type: + self._SetAllFieldTypes(nested_package, nested_type, scope) + + def _SetFieldType(self, field_proto, field_desc, package, scope): + """Sets the field's type, cpp_type, message_type and enum_type. + + Args: + field_proto: Data about the field in proto format. + field_desc: The descriptor to modify. + package: The package the field's container is in. + scope: Enclosing scope of available types. + """ + if field_proto.type_name: + desc = self._GetTypeFromScope(package, field_proto.type_name, scope) + else: + desc = None + + if not field_proto.HasField('type'): + if isinstance(desc, descriptor.Descriptor): + field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE + else: + field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM + + field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( + field_proto.type) + + if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): + field_desc.message_type = desc + + if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.enum_type = desc + + if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: + field_desc.has_default_value = False + field_desc.default_value = [] + elif field_proto.HasField('default_value'): + field_desc.has_default_value = True + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = float(field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = field_proto.default_value + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = field_proto.default_value.lower() == 'true' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values_by_name[ + field_proto.default_value].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = text_encoding.CUnescape( + field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = int(field_proto.default_value) + else: + field_desc.has_default_value = False + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = 0.0 + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = u'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = False + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values[0].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = b'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = 0 + + field_desc.type = field_proto.type + + def _MakeEnumValueDescriptor(self, value_proto, index): + """Creates a enum value descriptor object from a enum value proto. + + Args: + value_proto: The proto describing the enum value. + index: The index of the enum value. + + Returns: + An initialized EnumValueDescriptor object. + """ + + return descriptor.EnumValueDescriptor( + name=value_proto.name, + index=index, + number=value_proto.number, + options=_OptionsOrNone(value_proto), + type=None, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _MakeServiceDescriptor(self, service_proto, service_index, scope, + package, file_desc): + """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. + + Args: + service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. + service_index: The index of the service in the File. + scope: Dict mapping short and full symbols to message and enum types. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the service descriptor. + + Returns: + The added descriptor. + """ + + if package: + service_name = '.'.join((package, service_proto.name)) + else: + service_name = service_proto.name + + methods = [self._MakeMethodDescriptor(method_proto, service_name, package, + scope, index) + for index, method_proto in enumerate(service_proto.method)] + desc = descriptor.ServiceDescriptor( + name=service_proto.name, + full_name=service_name, + index=service_index, + methods=methods, + options=_OptionsOrNone(service_proto), + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._service_descriptors[service_name] = desc + return desc + + def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, + index): + """Creates a method descriptor from a MethodDescriptorProto. + + Args: + method_proto: The proto describing the method. + service_name: The name of the containing service. + package: Optional package name to look up for types. + scope: Scope containing available types. + index: Index of the method in the service. + + Returns: + An initialized MethodDescriptor object. + """ + full_name = '.'.join((service_name, method_proto.name)) + input_type = self._GetTypeFromScope( + package, method_proto.input_type, scope) + output_type = self._GetTypeFromScope( + package, method_proto.output_type, scope) + return descriptor.MethodDescriptor( + name=method_proto.name, + full_name=full_name, + index=index, + containing_service=None, + input_type=input_type, + output_type=output_type, + client_streaming=method_proto.client_streaming, + server_streaming=method_proto.server_streaming, + options=_OptionsOrNone(method_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _ExtractSymbols(self, descriptors): + """Pulls out all the symbols from descriptor protos. + + Args: + descriptors: The messages to extract descriptors from. + Yields: + A two element tuple of the type name and descriptor object. + """ + + for desc in descriptors: + yield (_PrefixWithDot(desc.full_name), desc) + for symbol in self._ExtractSymbols(desc.nested_types): + yield symbol + for enum in desc.enum_types: + yield (_PrefixWithDot(enum.full_name), enum) + + def _GetDeps(self, dependencies, visited=None): + """Recursively finds dependencies for file protos. + + Args: + dependencies: The names of the files being depended on. + visited: The names of files already found. + + Yields: + Each direct and indirect dependency. + """ + + visited = visited or set() + for dependency in dependencies: + if dependency not in visited: + visited.add(dependency) + dep_desc = self.FindFileByName(dependency) + yield dep_desc + public_files = [d.name for d in dep_desc.public_dependencies] + yield from self._GetDeps(public_files, visited) + + def _GetTypeFromScope(self, package, type_name, scope): + """Finds a given type name in the current scope. + + Args: + package: The package the proto should be located in. + type_name: The name of the type to be found in the scope. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The descriptor for the requested type. + """ + if type_name not in scope: + components = _PrefixWithDot(package).split('.') + while components: + possible_match = '.'.join(components + [type_name]) + if possible_match in scope: + type_name = possible_match + break + else: + components.pop(-1) + return scope[type_name] + + +def _PrefixWithDot(name): + return name if name.startswith('.') else '.%s' % name + + +if _USE_C_DESCRIPTORS: + # TODO(amauryfa): This pool could be constructed from Python code, when we + # support a flag like 'use_cpp_generated_pool=True'. + # pylint: disable=protected-access + _DEFAULT = descriptor._message.default_pool +else: + _DEFAULT = DescriptorPool() + + +def Default(): + return _DEFAULT diff --git a/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py new file mode 100644 index 0000000000..a8ecc07bdf --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/duration_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/duration.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DURATION._serialized_start=51 + _DURATION._serialized_end=93 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py new file mode 100644 index 0000000000..0b4d554db3 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/empty_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/empty.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _EMPTY._serialized_start=48 + _EMPTY._serialized_end=55 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py new file mode 100644 index 0000000000..80a4e96e59 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/field_mask_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/field_mask.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _FIELDMASK._serialized_start=53 + _FIELDMASK._serialized_end=79 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py new file mode 100644 index 0000000000..afdbb78c36 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/_parameterized.py @@ -0,0 +1,443 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Adds support for parameterized tests to Python's unittest TestCase class. + +A parameterized test is a method in a test case that is invoked with different +argument tuples. + +A simple example: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Each invocation is a separate test case and properly isolated just +like a normal test method, with its own setUp/tearDown cycle. In the +example above, there are three separate testcases, one of which will +fail due to an assertion error (1 + 1 != 3). + +Parameters for individual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters): + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + +If a parameterized test fails, the error message will show the +original test name (which is modified internally) and the arguments +for the specific invocation, which are part of the string returned by +the shortDescription() method on test cases. + +The id method of the test, used internally by the unittest framework, +is also modified to show the arguments. To make sure that test names +stay the same across several invocations, object representations like + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into '<__main__.Foo>'. For even more descriptive names, +especially in test logs, you can use the named_parameters decorator. In +this case, only tuples are supported, and the first parameters has to +be a string (or an object that returns an apt name when converted via +str()): + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, strings.startswith(prefix)) + +Named tests also have the benefit that they can be run individually +from the command line: + + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s + + OK + +Parameterized Classes +===================== +If invocation arguments are shared across test methods in a single +TestCase class, instead of decorating all test methods +individually, the class itself can be decorated: + + @parameterized.parameters( + (1, 2, 3) + (4, 5, 9)) + class ArithmeticTest(parameterized.TestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) + + def testSubtract(self, arg2, arg2, result): + self.assertEqual(result - arg1, arg2) + +Inputs from Iterables +===================== +If parameters should be shared across several test cases, or are dynamically +created from other sources, a single non-tuple iterable can be passed into +the decorator. This iterable will be used to obtain the test cases: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Single-Argument Test Methods +============================ +If a test method takes only one argument, the single argument does not need to +be wrapped into a tuple: + + class NegativeNumberExample(parameterized.TestCase): + @parameterized.parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) +""" + +__author__ = 'tmarek@google.com (Torsten Marek)' + +import functools +import re +import types +import unittest +import uuid + +try: + # Since python 3 + import collections.abc as collections_abc +except ImportError: + # Won't work after python 3.8 + import collections as collections_abc + +ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') +_SEPARATOR = uuid.uuid1().hex +_FIRST_ARG = object() +_ARGUMENT_REPR = object() + + +def _CleanRepr(obj): + return ADDR_RE.sub(r'<\1>', repr(obj)) + + +# Helper function formerly from the unittest module, removed from it in +# Python 2.7. +def _StrClass(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + + +def _NonStringIterable(obj): + return (isinstance(obj, collections_abc.Iterable) and + not isinstance(obj, str)) + + +def _FormatParameterList(testcase_params): + if isinstance(testcase_params, collections_abc.Mapping): + return ', '.join('%s=%s' % (argname, _CleanRepr(value)) + for argname, value in testcase_params.items()) + elif _NonStringIterable(testcase_params): + return ', '.join(map(_CleanRepr, testcase_params)) + else: + return _FormatParameterList((testcase_params,)) + + +class _ParameterizedTestIter(object): + """Callable and iterable class for producing new test cases.""" + + def __init__(self, test_method, testcases, naming_type): + """Returns concrete test functions for a test and a list of parameters. + + The naming_type is used to determine the name of the concrete + functions as reported by the unittest framework. If naming_type is + _FIRST_ARG, the testcases must be tuples, and the first element must + have a string representation that is a valid Python identifier. + + Args: + test_method: The decorated test method. + testcases: (list of tuple/dict) A list of parameter + tuples/dicts for individual test invocations. + naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + """ + self._test_method = test_method + self.testcases = testcases + self._naming_type = naming_type + + def __call__(self, *args, **kwargs): + raise RuntimeError('You appear to be running a parameterized test case ' + 'without having inherited from parameterized.' + 'TestCase. This is bad because none of ' + 'your test cases are actually being run.') + + def __iter__(self): + test_method = self._test_method + naming_type = self._naming_type + + def MakeBoundParamTest(testcase_params): + @functools.wraps(test_method) + def BoundParamTest(self): + if isinstance(testcase_params, collections_abc.Mapping): + test_method(self, **testcase_params) + elif _NonStringIterable(testcase_params): + test_method(self, *testcase_params) + else: + test_method(self, testcase_params) + + if naming_type is _FIRST_ARG: + # Signal the metaclass that the name of the test function is unique + # and descriptive. + BoundParamTest.__x_use_name__ = True + BoundParamTest.__name__ += str(testcase_params[0]) + testcase_params = testcase_params[1:] + elif naming_type is _ARGUMENT_REPR: + # __x_extra_id__ is used to pass naming information to the __new__ + # method of TestGeneratorMetaclass. + # The metaclass will make sure to create a unique, but nondescriptive + # name for this test. + BoundParamTest.__x_extra_id__ = '(%s)' % ( + _FormatParameterList(testcase_params),) + else: + raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) + + BoundParamTest.__doc__ = '%s(%s)' % ( + BoundParamTest.__name__, _FormatParameterList(testcase_params)) + if test_method.__doc__: + BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) + return BoundParamTest + return (MakeBoundParamTest(c) for c in self.testcases) + + +def _IsSingletonList(testcases): + """True iff testcases contains only a single non-tuple element.""" + return len(testcases) == 1 and not isinstance(testcases[0], tuple) + + +def _ModifyClass(class_object, testcases, naming_type): + assert not getattr(class_object, '_id_suffix', None), ( + 'Cannot add parameters to %s,' + ' which already has parameterized methods.' % (class_object,)) + class_object._id_suffix = id_suffix = {} + # We change the size of __dict__ while we iterate over it, + # which Python 3.x will complain about, so use copy(). + for name, obj in class_object.__dict__.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) + and isinstance(obj, types.FunctionType)): + delattr(class_object, name) + methods = {} + _UpdateClassDictForParamTestCase( + methods, id_suffix, name, + _ParameterizedTestIter(obj, testcases, naming_type)) + for name, meth in methods.items(): + setattr(class_object, name, meth) + + +def _ParameterDecorator(naming_type, testcases): + """Implementation of the parameterization decorators. + + Args: + naming_type: The naming type. + testcases: Testcase parameters. + + Returns: + A function for modifying the decorated object. + """ + def _Apply(obj): + if isinstance(obj, type): + _ModifyClass( + obj, + list(testcases) if not isinstance(testcases, collections_abc.Sequence) + else testcases, + naming_type) + return obj + else: + return _ParameterizedTestIter(obj, testcases, naming_type) + + if _IsSingletonList(testcases): + assert _NonStringIterable(testcases[0]), ( + 'Single parameter argument must be a non-string iterable') + testcases = testcases[0] + + return _Apply + + +def parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples/dicts/objects (for tests + with only one argument). + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_ARGUMENT_REPR, testcases) + + +def named_parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. The first element of + each parameter tuple should be a string and will be appended to the + name of the test method. + + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples. + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_FIRST_ARG, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for test cases with test generators. + + A test generator is an iterable in a testcase that produces callables. These + callables must be single-argument methods. These methods are injected into + the class namespace and the original iterable is removed. If the name of the + iterable conforms to the test pattern, the injected methods will be picked + up as tests by the unittest framework. + + In general, it is supposed to be used in conjunction with the + parameters decorator. + """ + + def __new__(mcs, class_name, bases, dct): + dct['_id_suffix'] = id_suffix = {} + for name, obj in dct.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) and + _NonStringIterable(obj)): + iterator = iter(obj) + dct.pop(name) + _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) + + return type.__new__(mcs, class_name, bases, dct) + + +def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): + """Adds individual test cases to a dictionary. + + Args: + dct: The target dictionary. + id_suffix: The dictionary for mapping names to test IDs. + name: The original name of the test case. + iterator: The iterator generating the individual test cases. + """ + for idx, func in enumerate(iterator): + assert callable(func), 'Test generators must yield callables, got %r' % ( + func,) + if getattr(func, '__x_use_name__', False): + new_name = func.__name__ + else: + new_name = '%s%s%d' % (name, _SEPARATOR, idx) + assert new_name not in dct, ( + 'Name of parameterized test case "%s" not unique' % (new_name,)) + dct[new_name] = func + id_suffix[new_name] = getattr(func, '__x_extra_id__', '') + + +class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): + """Base class for test cases using the parameters decorator.""" + + def _OriginalName(self): + return self._testMethodName.split(_SEPARATOR)[0] + + def __str__(self): + return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) + + def id(self): # pylint: disable=invalid-name + """Returns the descriptive ID of the test. + + This is used internally by the unittesting framework to get a name + for the test to be used in reports. + + Returns: + The test id. + """ + return '%s.%s%s' % (_StrClass(self.__class__), + self._OriginalName(), + self._id_suffix.get(self._testMethodName, '')) + + +def CoopTestCase(other_base_class): + """Returns a new base class with a cooperative metaclass base. + + This enables the TestCase to be used in combination + with other base classes that have custom metaclasses, such as + mox.MoxTestBase. + + Only works with metaclasses that do not override type.__new__. + + Example: + + import google3 + import mox + + from google3.testing.pybase import parameterized + + class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): + ... + + Args: + other_base_class: (class) A test case base class. + + Returns: + A new class object. + """ + metaclass = type( + 'CoopMetaclass', + (other_base_class.__metaclass__, + TestGeneratorMetaclass), {}) + return metaclass( + 'CoopTestCase', + (other_base_class, TestCase), {}) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py new file mode 100644 index 0000000000..7fef237670 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/api_implementation.py @@ -0,0 +1,112 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import os +import sys +import warnings + +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _api_version = _api_implementation.api_version +except ImportError: + _api_version = -1 # Unspecified by compiler flags. + +if _api_version == 1: + raise ValueError('api_version=1 is no longer supported.') + + +_default_implementation_type = ('cpp' if _api_version > 0 else 'python') + + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python' and 'cpp' are valid +# values. Any other value will be ignored. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _default_implementation_type) + +if _implementation_type != 'python': + _implementation_type = 'cpp' + +if 'PyPy' in sys.version and _implementation_type == 'cpp': + warnings.warn('PyPy does not work yet with cpp protocol buffers. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + + +# Detect if serialization should be deterministic by default +try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps. + # + # NOTE: Merely importing this automatically enables deterministic proto + # serialization for C++ code, but we still need to export it as a boolean so + # that we can do the same for `_implementation_type == 'python'`. + # + # NOTE2: It is possible for C++ code to enable deterministic serialization by + # default _without_ affecting Python code, if the C++ implementation is not in + # use by this module. That is intended behavior, so we don't actually expose + # this boolean outside of this module. + # + # pylint: disable=g-import-not-at-top,unused-import + from google.protobuf import enable_deterministic_proto_serialization + _python_deterministic_proto_serialization = True +except ImportError: + _python_deterministic_proto_serialization = False + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +def _SetType(implementation_type): + """Never use! Only for protobuf benchmark.""" + global _implementation_type + _implementation_type = implementation_type + + +# See comment on 'Type' above. +def Version(): + return 2 + + +# For internal use only +def IsPythonDefaultSerializationDeterministic(): + return _python_deterministic_proto_serialization diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py new file mode 100644 index 0000000000..64353ee4af --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/builder.py @@ -0,0 +1,130 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Builds descriptors, message classes and services for generated _pb2.py. + +This file is only called in python generated _pb2.py files. It builds +descriptors, message classes and services that users can directly use +in generated code. +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +_sym_db = _symbol_database.Default() + + +def BuildMessageAndEnumDescriptors(file_des, module): + """Builds message and enum descriptors. + + Args: + file_des: FileDescriptor of the .proto file + module: Generated _pb2 module + """ + + def BuildNestedDescriptors(msg_des, prefix): + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + module_name = prefix + name.upper() + module[module_name] = nested_msg + BuildNestedDescriptors(nested_msg, module_name + '_') + for enum_des in msg_des.enum_types: + module[prefix + enum_des.name.upper()] = enum_des + + for (name, msg_des) in file_des.message_types_by_name.items(): + module_name = '_' + name.upper() + module[module_name] = msg_des + BuildNestedDescriptors(msg_des, module_name + '_') + + +def BuildTopDescriptorsAndMessages(file_des, module_name, module): + """Builds top level descriptors and message classes. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + + def BuildMessage(msg_des): + create_dict = {} + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + create_dict[name] = BuildMessage(nested_msg) + create_dict['DESCRIPTOR'] = msg_des + create_dict['__module__'] = module_name + message_class = _reflection.GeneratedProtocolMessageType( + msg_des.name, (_message.Message,), create_dict) + _sym_db.RegisterMessage(message_class) + return message_class + + # top level enums + for (name, enum_des) in file_des.enum_types_by_name.items(): + module['_' + name.upper()] = enum_des + module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) + for enum_value in enum_des.values: + module[enum_value.name] = enum_value.number + + # top level extensions + for (name, extension_des) in file_des.extensions_by_name.items(): + module[name.upper() + '_FIELD_NUMBER'] = extension_des.number + module[name] = extension_des + + # services + for (name, service) in file_des.services_by_name.items(): + module['_' + name.upper()] = service + + # Build messages. + for (name, msg_des) in file_des.message_types_by_name.items(): + module[name] = BuildMessage(msg_des) + + +def BuildServices(file_des, module_name, module): + """Builds services classes and services stub class. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + # pylint: disable=g-import-not-at-top + from google.protobuf import service as _service + from google.protobuf import service_reflection + # pylint: enable=g-import-not-at-top + for (name, service) in file_des.services_by_name.items(): + module[name] = service_reflection.GeneratedServiceType( + name, (_service.Service,), + dict(DESCRIPTOR=service, __module__=module_name)) + stub_name = name + '_Stub' + module[stub_name] = service_reflection.GeneratedServiceStubType( + stub_name, (module[name],), + dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py new file mode 100644 index 0000000000..29fbb53d2f --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/containers.py @@ -0,0 +1,710 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains container classes to represent different protocol buffer types. + +This file defines container classes which represent categories of protocol +buffer field types which need extra maintenance. Currently these categories +are: + +- Repeated scalar fields - These are all repeated fields which aren't + composite (e.g. they are of simple types like int32, string, etc). +- Repeated composite fields - Repeated fields which are composite. This + includes groups and nested messages. +""" + +import collections.abc +import copy +import pickle +from typing import ( + Any, + Iterable, + Iterator, + List, + MutableMapping, + MutableSequence, + NoReturn, + Optional, + Sequence, + TypeVar, + Union, + overload, +) + + +_T = TypeVar('_T') +_K = TypeVar('_K') +_V = TypeVar('_V') + + +class BaseContainer(Sequence[_T]): + """Base container class.""" + + # Minimizes memory usage and disallows assignment to other attributes. + __slots__ = ['_message_listener', '_values'] + + def __init__(self, message_listener: Any) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + """ + self._message_listener = message_listener + self._values = [] + + @overload + def __getitem__(self, key: int) -> _T: + ... + + @overload + def __getitem__(self, key: slice) -> List[_T]: + ... + + def __getitem__(self, key): + """Retrieves item by the specified key.""" + return self._values[key] + + def __len__(self) -> int: + """Returns the number of elements in the container.""" + return len(self._values) + + def __ne__(self, other: Any) -> bool: + """Checks if another instance isn't equal to this one.""" + # The concrete classes should define __eq__. + return not self == other + + __hash__ = None + + def __repr__(self) -> str: + return repr(self._values) + + def sort(self, *args, **kwargs) -> None: + # Continue to support the old sort_function keyword argument. + # This is expected to be a rare occurrence, so use LBYL to avoid + # the overhead of actually catching KeyError. + if 'sort_function' in kwargs: + kwargs['cmp'] = kwargs.pop('sort_function') + self._values.sort(*args, **kwargs) + + def reverse(self) -> None: + self._values.reverse() + + +# TODO(slebedev): Remove this. BaseContainer does *not* conform to +# MutableSequence, only its subclasses do. +collections.abc.MutableSequence.register(BaseContainer) + + +class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, type-checked, list-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_type_checker'] + + def __init__( + self, + message_listener: Any, + type_checker: Any, + ) -> None: + """Args: + + message_listener: A MessageListener implementation. The + RepeatedScalarFieldContainer will call this object's Modified() method + when it is modified. + type_checker: A type_checkers.ValueChecker instance to run on elements + inserted into this container. + """ + super().__init__(message_listener) + self._type_checker = type_checker + + def append(self, value: _T) -> None: + """Appends an item to the list. Similar to list.append().""" + self._values.append(self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position. Similar to list.insert().""" + self._values.insert(key, self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given iterable. Similar to list.extend().""" + if elem_seq is None: + return + try: + elem_seq_iter = iter(elem_seq) + except TypeError: + if not elem_seq: + # silently ignore falsy inputs :-/. + # TODO(ptucker): Deprecate this behavior. b/18413862 + return + raise + + new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] + if new_values: + self._values.extend(new_values) + self._message_listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one. We do not check the types of the individual fields. + """ + self._values.extend(other) + self._message_listener.Modified() + + def remove(self, elem: _T): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value) -> None: + """Sets the item on the specified position.""" + if isinstance(key, slice): + if key.step is not None: + raise ValueError('Extended slices not supported') + self._values[key] = map(self._type_checker.CheckValue, value) + self._message_listener.Modified() + else: + self._values[key] = self._type_checker.CheckValue(value) + self._message_listener.Modified() + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + # Special case for the same type which should be common and fast. + if isinstance(other, self.__class__): + return other._values == self._values + # We are presumably comparing against some other sequence type. + return other == self._values + + def __deepcopy__( + self, + unused_memo: Any = None, + ) -> 'RepeatedScalarFieldContainer[_T]': + clone = RepeatedScalarFieldContainer( + copy.deepcopy(self._message_listener), self._type_checker) + clone.MergeFrom(self) + return clone + + def __reduce__(self, **kwargs) -> NoReturn: + raise pickle.PickleError( + "Can't pickle repeated scalar fields, convert to list first") + + +# TODO(slebedev): Constrain T to be a subtype of Message. +class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, list-like container for holding repeated composite fields.""" + + # Disallows assignment to other attributes. + __slots__ = ['_message_descriptor'] + + def __init__(self, message_listener: Any, message_descriptor: Any) -> None: + """ + Note that we pass in a descriptor instead of the generated directly, + since at the time we construct a _RepeatedCompositeFieldContainer we + haven't yet necessarily initialized the type that will be contained in the + container. + + Args: + message_listener: A MessageListener implementation. + The RepeatedCompositeFieldContainer will call this object's + Modified() method when it is modified. + message_descriptor: A Descriptor instance describing the protocol type + that should be present in this container. We'll use the + _concrete_class field of this descriptor when the client calls add(). + """ + super().__init__(message_listener) + self._message_descriptor = message_descriptor + + def add(self, **kwargs: Any) -> _T: + """Adds a new element at the end of the list and returns it. Keyword + arguments may be used to initialize the element. + """ + new_element = self._message_descriptor._concrete_class(**kwargs) + new_element._SetListener(self._message_listener) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + return new_element + + def append(self, value: _T) -> None: + """Appends one element by copying the message.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position by copying.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.insert(key, new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given sequence of elements of the same type + + as this one, copying each individual message. + """ + message_class = self._message_descriptor._concrete_class + listener = self._message_listener + values = self._values + for message in elem_seq: + new_element = message_class() + new_element._SetListener(listener) + new_element.MergeFrom(message) + values.append(new_element) + listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one, copying each individual message. + """ + self.extend(other) + + def remove(self, elem: _T) -> None: + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value): + # This method is implemented to make RepeatedCompositeFieldContainer + # structurally compatible with typing.MutableSequence. It is + # otherwise unsupported and will always raise an error. + raise TypeError( + f'{self.__class__.__name__} object does not support item assignment') + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + if not isinstance(other, self.__class__): + raise TypeError('Can only compare repeated composite fields against ' + 'other repeated composite fields.') + return self._values == other._values + + +class ScalarMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', + '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + key_checker: Any, + value_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._key_checker = key_checker + self._value_checker = value_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + val = self._value_checker.DefaultValue() + self._values[key] = val + return val + + def __contains__(self, item: _K) -> bool: + # We check the key's type to match the strong-typing flavor of the API. + # Also this makes it easier to match the behavior of the C++ implementation. + self._key_checker.CheckValue(item) + return item in self._values + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __setitem__(self, key: _K, value: _V) -> _T: + checked_key = self._key_checker.CheckValue(key) + checked_value = self._value_checker.CheckValue(value) + self._values[checked_key] = checked_value + self._message_listener.Modified() + + def __delitem__(self, key: _K) -> None: + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: + self._values.update(other._values) + self._message_listener.Modified() + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class MessageMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for with submessage values.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_values', '_message_listener', + '_message_descriptor', '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + message_descriptor: Any, + key_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._message_descriptor = message_descriptor + self._key_checker = key_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + key = self._key_checker.CheckValue(key) + try: + return self._values[key] + except KeyError: + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + self._values[key] = new_element + self._message_listener.Modified() + return new_element + + def get_or_create(self, key: _K) -> _V: + """get_or_create() is an alias for getitem (ie. map[key]). + + Args: + key: The key to get or create in the map. + + This is useful in cases where you want to be explicit that the call is + mutating the map. This can avoid lint errors for statements like this + that otherwise would appear to be pointless statements: + + msg.my_map[key] + """ + return self[key] + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __contains__(self, item: _K) -> bool: + item = self._key_checker.CheckValue(item) + return item in self._values + + def __setitem__(self, key: _K, value: _V) -> NoReturn: + raise ValueError('May not set values directly, call my_map[key].foo = 5') + + def __delitem__(self, key: _K) -> None: + key = self._key_checker.CheckValue(key) + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: + # pylint: disable=protected-access + for key in other._values: + # According to documentation: "When parsing from the wire or when merging, + # if there are duplicate map keys the last key seen is used". + if key in self: + del self[key] + self[key].CopyFrom(other[key]) + # self._message_listener.Modified() not required here, because + # mutations to submessages already propagate. + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class _UnknownField: + """A parsed unknown field.""" + + # Disallows assignment to other attributes. + __slots__ = ['_field_number', '_wire_type', '_data'] + + def __init__(self, field_number, wire_type, data): + self._field_number = field_number + self._wire_type = wire_type + self._data = data + return + + def __lt__(self, other): + # pylint: disable=protected-access + return self._field_number < other._field_number + + def __eq__(self, other): + if self is other: + return True + # pylint: disable=protected-access + return (self._field_number == other._field_number and + self._wire_type == other._wire_type and + self._data == other._data) + + +class UnknownFieldRef: # pylint: disable=missing-class-docstring + + def __init__(self, parent, index): + self._parent = parent + self._index = index + + def _check_valid(self): + if not self._parent: + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + if self._index >= len(self._parent): + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + + @property + def field_number(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._field_number + + @property + def wire_type(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._wire_type + + @property + def data(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._data + + +class UnknownFieldSet: + """UnknownField container""" + + # Disallows assignment to other attributes. + __slots__ = ['_values'] + + def __init__(self): + self._values = [] + + def __getitem__(self, index): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + size = len(self._values) + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError('index %d out of range'.index) + + return UnknownFieldRef(self, index) + + def _internal_get(self, index): + return self._values[index] + + def __len__(self): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + return len(self._values) + + def _add(self, field_number, wire_type, data): + unknown_field = _UnknownField(field_number, wire_type, data) + self._values.append(unknown_field) + return unknown_field + + def __iter__(self): + for i in range(len(self)): + yield UnknownFieldRef(self, i) + + def _extend(self, other): + if other is None: + return + # pylint: disable=protected-access + self._values.extend(other._values) + + def __eq__(self, other): + if self is other: + return True + # Sort unknown fields because their order shouldn't + # affect equality test. + values = list(self._values) + if other is None: + return not values + values.sort() + # pylint: disable=protected-access + other_values = sorted(other._values) + return values == other_values + + def _clear(self): + for value in self._values: + # pylint: disable=protected-access + if isinstance(value._data, UnknownFieldSet): + value._data._clear() # pylint: disable=protected-access + self._values = None diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py new file mode 100644 index 0000000000..bc1b7b785c --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/decoder.py @@ -0,0 +1,1029 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import math +import struct + +from google.protobuf.internal import containers +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import message + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + + +def _SignedVarintDecoder(bits, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + signbit = 1 << (bits - 1) + mask = (1 << bits) - 1 + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = (result ^ signbit) - signbit + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# All 32-bit and 64-bit values are represented as int. +_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +_DecodeSignedVarint = _SignedVarintDecoder(64, int) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) + + +def ReadTag(buffer, pos): + """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + + Args: + buffer: memoryview object of the encoded bytes + pos: int of the current position to start from + + Returns: + Tuple[bytes, int] of the tag data and new position. + """ + start = pos + while buffer[pos] & 0x80: + pos += 1 + pos += 1 + + tag_bytes = buffer[start:pos].tobytes() + return tag_bytes, pos + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (new_value, pos) = decode_value(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not new_value: + field_dict.pop(key, None) + else: + field_dict[key] = new_value + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + """Decode serialized float to a float and new position. + + Args: + buffer: memoryview of the serialized bytes + pos: int, position in the memory view to start at. + + Returns: + Tuple[float, int] of the deserialized float value and new position + in the serialized data. + """ + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos].tobytes() + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (math.nan, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (-math.inf, new_pos) + return (math.inf, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (math.nan, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + # pylint: disable=protected-access + del message._unknown_field_set._values[-1] + # pylint: enable=protected-access + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not enum_value: + field_dict.pop(key, None) + return pos + # pylint: disable=protected-access + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, enum_value) + # pylint: enable=protected-access + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos].tobytes()) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = buffer[pos:new_pos].tobytes() + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(descriptor): + """Returns a decoder for a MessageSet item. + + The parameter is the message Descriptor. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + """Decode serialized message set to its value and new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = message.Extensions._FindExtensionByNumber(type_id) + # pylint: disable=protected-access + if extension is not None: + value = field_dict.get(extension) + if value is None: + message_type = extension.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + message._FACTORY.GetPrototype(message_type) + value = field_dict.setdefault( + extension, message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + type_id, + wire_format.WIRETYPE_LENGTH_DELIMITED, + buffer[message_start:message_end].tobytes()) + # pylint: enable=protected-access + + return pos + + return DecodeItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].CopyFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1].tobytes()) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed64(buffer, pos): + """Decode a fixed64.""" + new_pos = pos + 8 + return (struct.unpack(' end: + raise _DecodeError('Truncated message.') + return pos + + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + + +def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): + """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" + + unknown_field_set = containers.UnknownFieldSet() + while end_pos is None or pos < end_pos: + (tag_bytes, pos) = ReadTag(buffer, pos) + (tag, _) = _DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if wire_type == wire_format.WIRETYPE_END_GROUP: + break + (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + + return (unknown_field_set, pos) + + +def _DecodeUnknownField(buffer, pos, wire_type): + """Decode a unknown field. Returns the UnknownField and new position.""" + + if wire_type == wire_format.WIRETYPE_VARINT: + (data, pos) = _DecodeVarint(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED64: + (data, pos) = _DecodeFixed64(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED32: + (data, pos) = _DecodeFixed32(buffer, pos) + elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: + (size, pos) = _DecodeVarint(buffer, pos) + data = buffer[pos:pos+size].tobytes() + pos += size + elif wire_type == wire_format.WIRETYPE_START_GROUP: + (data, pos) = _DecodeUnknownFieldSet(buffer, pos) + elif wire_type == wire_format.WIRETYPE_END_GROUP: + return (0, -1) + else: + raise _DecodeError('Wrong wire type in tag.') + + return (data, pos) + + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed32(buffer, pos): + """Decode a fixed32.""" + + new_pos = pos + 4 + return (struct.unpack('B').pack + + def EncodeVarint(write, value, unused_deterministic=None): + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeVarint + + +def _SignedVarintEncoder(): + """Return an encoder for a basic signed varint value (does not include + tag).""" + + local_int2byte = struct.Struct('>B').pack + + def EncodeSignedVarint(write, value, unused_deterministic=None): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeSignedVarint + + +_EncodeVarint = _VarintEncoder() +_EncodeSignedVarint = _SignedVarintEncoder() + + +def _VarintBytes(value): + """Encode the given integer as a varint and return the bytes. This is only + called at startup time so it doesn't need to be fast.""" + + pieces = [] + _EncodeVarint(pieces.append, value, True) + return b"".join(pieces) + + +def TagBytes(field_number, wire_type): + """Encode the given tag and return the bytes. Only called at startup.""" + + return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) + +# -------------------------------------------------------------------- +# As with sizers (see above), we have a number of common encoder +# implementations. + + +def _SimpleEncoder(wire_type, encode_value, compute_value_size): + """Return a constructor for an encoder for fields of a particular type. + + Args: + wire_type: The field's wire type, for encoding tags. + encode_value: A function which encodes an individual value, e.g. + _EncodeVarint(). + compute_value_size: A function which computes the size of an individual + value, e.g. _VarintSize(). + """ + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(element) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, element, deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, element, deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, value, deterministic) + return EncodeField + + return SpecificEncoder + + +def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): + """Like SimpleEncoder but additionally invokes modify_value on every value + before passing it to encode_value. Usually modify_value is ZigZagEncode.""" + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(modify_value(element)) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, modify_value(element), deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, modify_value(element), deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, modify_value(value), deterministic) + return EncodeField + + return SpecificEncoder + + +def _StructPackEncoder(wire_type, format): + """Return a constructor for an encoder for a fixed-width field. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + write(local_struct_pack(format, element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + write(local_struct_pack(format, element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + return write(local_struct_pack(format, value)) + return EncodeField + + return SpecificEncoder + + +def _FloatingPointEncoder(wire_type, format): + """Return a constructor for an encoder for float fields. + + This is like StructPackEncoder, but catches errors that may be due to + passing non-finite floating-point values to struct.pack, and makes a + second attempt to encode those values. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: + write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x80\xFF') + elif value != value: # NaN + write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN + write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: + raise ValueError('Can\'t encode floating-point values that are ' + '%d bytes long (only 4 or 8)' % value_size) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + # This try/except block is going to be faster than any code that + # we could write to check whether element is finite. + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + try: + write(local_struct_pack(format, value)) + except SystemError: + EncodeNonFiniteOrRaise(write, value) + return EncodeField + + return SpecificEncoder + + +# ==================================================================== +# Here we declare an encoder constructor for each field type. These work +# very similarly to sizer constructors, described earlier. + + +Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) + +UInt32Encoder = UInt64Encoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) + +SInt32Encoder = SInt64Encoder = _ModifiedEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, + wire_format.ZigZagEncode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str + ValueType = int + + def __init__(self, enum_type): + """Inits EnumTypeWrapper with an EnumDescriptor.""" + self._enum_type = enum_type + self.DESCRIPTOR = enum_type # pylint: disable=invalid-name + + def Name(self, number): # pylint: disable=invalid-name + """Returns a string containing the name of an enum value.""" + try: + return self._enum_type.values_by_number[number].name + except KeyError: + pass # fall out to break exception chaining + + if not isinstance(number, int): + raise TypeError( + 'Enum value for {} must be an int, but got {} {!r}.'.format( + self._enum_type.name, type(number), number)) + else: + # repr here to handle the odd case when you pass in a boolean. + raise ValueError('Enum {} has no name defined for value {!r}'.format( + self._enum_type.name, number)) + + def Value(self, name): # pylint: disable=invalid-name + """Returns the value corresponding to the given enum name.""" + try: + return self._enum_type.values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise ValueError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def keys(self): + """Return a list of the string names in the enum. + + Returns: + A list of strs, in the order they were defined in the .proto file. + """ + + return [value_descriptor.name + for value_descriptor in self._enum_type.values] + + def values(self): + """Return a list of the integer values in the enum. + + Returns: + A list of ints, in the order they were defined in the .proto file. + """ + + return [value_descriptor.number + for value_descriptor in self._enum_type.values] + + def items(self): + """Return a list of the (name, value) pairs of the enum. + + Returns: + A list of (str, int) pairs, in the order they were defined + in the .proto file. + """ + return [(value_descriptor.name, value_descriptor.number) + for value_descriptor in self._enum_type.values] + + def __getattr__(self, name): + """Returns the value corresponding to the given enum name.""" + try: + return super( + EnumTypeWrapper, + self).__getattribute__('_enum_type').values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise AttributeError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py new file mode 100644 index 0000000000..b346cf283e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/extension_dict.py @@ -0,0 +1,213 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains _ExtensionDict class to represent extensions. +""" + +from google.protobuf.internal import type_checkers +from google.protobuf.descriptor import FieldDescriptor + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +# TODO(robinson): Unify error handling of "unknown extension" crap. +# TODO(robinson): Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for Extension fields on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """ + Args: + extended_message: Message instance for which we are the Extensions dict. + """ + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + message_type = extension_handle.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + self._extended_message._FACTORY.GetPrototype(message_type) + assert getattr(extension_handle.message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (extension_handle.full_name, + extension_handle.message_type.full_name)) + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [field for field in my_fields if field.is_extension] + other_fields = [field for field in other_fields if field.is_extension] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __len__(self): + fields = self._extended_message.ListFields() + # Get rid of non-extension fields. + extension_fields = [field for field in fields if field[0].is_extension] + return len(extension_fields) + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necessary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def __delitem__(self, extension_handle): + self._extended_message.ClearExtension(extension_handle) + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_name.get(name, None) + + def _FindExtensionByNumber(self, number): + """Tries to find a known extension with the field number. + + Args: + number: Extension field number. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_number.get(number, None) + + def __iter__(self): + # Return a generator over the populated extension fields + return (f[0] for f in self._extended_message.ListFields() + if f[0].is_extension) + + def __contains__(self, extension_handle): + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if extension_handle not in self._extended_message._fields: + return False + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + return bool(self._extended_message._fields.get(extension_handle)) + + if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + value = self._extended_message._fields.get(extension_handle) + # pylint: disable=protected-access + return value is not None and value._is_present_in_parent + + return True diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py new file mode 100644 index 0000000000..0fc255a774 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_listener.py @@ -0,0 +1,78 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py new file mode 100644 index 0000000000..63651a3f19 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/message_set_extensions_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/message_set_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageSet.RegisterExtension(message_set_extension3) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + + DESCRIPTOR._options = None + _TESTMESSAGESET._options = None + _TESTMESSAGESET._serialized_options = b'\010\001' + _TESTMESSAGESET._serialized_start=83 + _TESTMESSAGESET._serialized_end=113 + _TESTMESSAGESETEXTENSION1._serialized_start=116 + _TESTMESSAGESETEXTENSION1._serialized_end=281 + _TESTMESSAGESETEXTENSION2._serialized_start=284 + _TESTMESSAGESETEXTENSION2._serialized_end=451 + _TESTMESSAGESETEXTENSION3._serialized_start=453 + _TESTMESSAGESETEXTENSION3._serialized_end=493 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py new file mode 100644 index 0000000000..5497083197 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/missing_enum_values_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/missing_enum_values.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTENUMVALUES._serialized_start=88 + _TESTENUMVALUES._serialized_end=409 + _TESTENUMVALUES_NESTEDENUM._serialized_start=378 + _TESTENUMVALUES_NESTEDENUM._serialized_end=409 + _TESTMISSINGENUMVALUES._serialized_start=412 + _TESTMISSINGENUMVALUES._serialized_end=751 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 + _JUSTSTRING._serialized_start=753 + _JUSTSTRING._serialized_end=780 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py new file mode 100644 index 0000000000..0953706bac --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions_dynamic.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) + + DESCRIPTOR._options = None + _DYNAMICMESSAGETYPE._serialized_start=132 + _DYNAMICMESSAGETYPE._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py new file mode 100644 index 0000000000..1cfa1b7c8b --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_extensions_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + ExtendedMessage.RegisterExtension(optional_int_extension) + ExtendedMessage.RegisterExtension(optional_message_extension) + ExtendedMessage.RegisterExtension(repeated_int_extension) + ExtendedMessage.RegisterExtension(repeated_message_extension) + + DESCRIPTOR._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None + _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' + _NESTEDMESSAGE.fields_by_name['submessage']._options = None + _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE._serialized_start=77 + _TOPLEVELMESSAGE._serialized_end=230 + _NESTEDMESSAGE._serialized_start=232 + _NESTEDMESSAGE._serialized_end=314 + _EXTENDEDMESSAGE._serialized_start=316 + _EXTENDEDMESSAGE._serialized_end=391 + _FOREIGNMESSAGE._serialized_start=393 + _FOREIGNMESSAGE._serialized_end=438 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py new file mode 100644 index 0000000000..d7f7115609 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/more_messages_pb2.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_messages.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + OutOfOrderFields.RegisterExtension(optional_uint64) + OutOfOrderFields.RegisterExtension(optional_int64) + globals()['class'].RegisterExtension(globals()['continue']) + getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) + globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) + + DESCRIPTOR._options = None + _IS._serialized_start=2669 + _IS._serialized_end=2696 + _OUTOFORDERFIELDS._serialized_start=74 + _OUTOFORDERFIELDS._serialized_end=178 + _CLASS._serialized_start=181 + _CLASS._serialized_end=514 + _CLASS_TRY._serialized_start=448 + _CLASS_TRY._serialized_end=476 + _CLASS_FOR._serialized_start=478 + _CLASS_FOR._serialized_end=506 + _EXTENDCLASS._serialized_start=516 + _EXTENDCLASS._serialized_end=579 + _TESTFULLKEYWORD._serialized_start=581 + _TESTFULLKEYWORD._serialized_end=707 + _LOTSNESTEDMESSAGE._serialized_start=710 + _LOTSNESTEDMESSAGE._serialized_end=2667 + _LOTSNESTEDMESSAGE_B0._serialized_start=731 + _LOTSNESTEDMESSAGE_B0._serialized_end=735 + _LOTSNESTEDMESSAGE_B1._serialized_start=737 + _LOTSNESTEDMESSAGE_B1._serialized_end=741 + _LOTSNESTEDMESSAGE_B2._serialized_start=743 + _LOTSNESTEDMESSAGE_B2._serialized_end=747 + _LOTSNESTEDMESSAGE_B3._serialized_start=749 + _LOTSNESTEDMESSAGE_B3._serialized_end=753 + _LOTSNESTEDMESSAGE_B4._serialized_start=755 + _LOTSNESTEDMESSAGE_B4._serialized_end=759 + _LOTSNESTEDMESSAGE_B5._serialized_start=761 + _LOTSNESTEDMESSAGE_B5._serialized_end=765 + _LOTSNESTEDMESSAGE_B6._serialized_start=767 + _LOTSNESTEDMESSAGE_B6._serialized_end=771 + _LOTSNESTEDMESSAGE_B7._serialized_start=773 + _LOTSNESTEDMESSAGE_B7._serialized_end=777 + _LOTSNESTEDMESSAGE_B8._serialized_start=779 + _LOTSNESTEDMESSAGE_B8._serialized_end=783 + _LOTSNESTEDMESSAGE_B9._serialized_start=785 + _LOTSNESTEDMESSAGE_B9._serialized_end=789 + _LOTSNESTEDMESSAGE_B10._serialized_start=791 + _LOTSNESTEDMESSAGE_B10._serialized_end=796 + _LOTSNESTEDMESSAGE_B11._serialized_start=798 + _LOTSNESTEDMESSAGE_B11._serialized_end=803 + _LOTSNESTEDMESSAGE_B12._serialized_start=805 + _LOTSNESTEDMESSAGE_B12._serialized_end=810 + _LOTSNESTEDMESSAGE_B13._serialized_start=812 + _LOTSNESTEDMESSAGE_B13._serialized_end=817 + _LOTSNESTEDMESSAGE_B14._serialized_start=819 + _LOTSNESTEDMESSAGE_B14._serialized_end=824 + _LOTSNESTEDMESSAGE_B15._serialized_start=826 + _LOTSNESTEDMESSAGE_B15._serialized_end=831 + _LOTSNESTEDMESSAGE_B16._serialized_start=833 + _LOTSNESTEDMESSAGE_B16._serialized_end=838 + _LOTSNESTEDMESSAGE_B17._serialized_start=840 + _LOTSNESTEDMESSAGE_B17._serialized_end=845 + _LOTSNESTEDMESSAGE_B18._serialized_start=847 + _LOTSNESTEDMESSAGE_B18._serialized_end=852 + _LOTSNESTEDMESSAGE_B19._serialized_start=854 + _LOTSNESTEDMESSAGE_B19._serialized_end=859 + _LOTSNESTEDMESSAGE_B20._serialized_start=861 + _LOTSNESTEDMESSAGE_B20._serialized_end=866 + _LOTSNESTEDMESSAGE_B21._serialized_start=868 + _LOTSNESTEDMESSAGE_B21._serialized_end=873 + _LOTSNESTEDMESSAGE_B22._serialized_start=875 + _LOTSNESTEDMESSAGE_B22._serialized_end=880 + _LOTSNESTEDMESSAGE_B23._serialized_start=882 + _LOTSNESTEDMESSAGE_B23._serialized_end=887 + _LOTSNESTEDMESSAGE_B24._serialized_start=889 + _LOTSNESTEDMESSAGE_B24._serialized_end=894 + _LOTSNESTEDMESSAGE_B25._serialized_start=896 + _LOTSNESTEDMESSAGE_B25._serialized_end=901 + _LOTSNESTEDMESSAGE_B26._serialized_start=903 + _LOTSNESTEDMESSAGE_B26._serialized_end=908 + _LOTSNESTEDMESSAGE_B27._serialized_start=910 + _LOTSNESTEDMESSAGE_B27._serialized_end=915 + _LOTSNESTEDMESSAGE_B28._serialized_start=917 + _LOTSNESTEDMESSAGE_B28._serialized_end=922 + _LOTSNESTEDMESSAGE_B29._serialized_start=924 + _LOTSNESTEDMESSAGE_B29._serialized_end=929 + _LOTSNESTEDMESSAGE_B30._serialized_start=931 + _LOTSNESTEDMESSAGE_B30._serialized_end=936 + _LOTSNESTEDMESSAGE_B31._serialized_start=938 + _LOTSNESTEDMESSAGE_B31._serialized_end=943 + _LOTSNESTEDMESSAGE_B32._serialized_start=945 + _LOTSNESTEDMESSAGE_B32._serialized_end=950 + _LOTSNESTEDMESSAGE_B33._serialized_start=952 + _LOTSNESTEDMESSAGE_B33._serialized_end=957 + _LOTSNESTEDMESSAGE_B34._serialized_start=959 + _LOTSNESTEDMESSAGE_B34._serialized_end=964 + _LOTSNESTEDMESSAGE_B35._serialized_start=966 + _LOTSNESTEDMESSAGE_B35._serialized_end=971 + _LOTSNESTEDMESSAGE_B36._serialized_start=973 + _LOTSNESTEDMESSAGE_B36._serialized_end=978 + _LOTSNESTEDMESSAGE_B37._serialized_start=980 + _LOTSNESTEDMESSAGE_B37._serialized_end=985 + _LOTSNESTEDMESSAGE_B38._serialized_start=987 + _LOTSNESTEDMESSAGE_B38._serialized_end=992 + _LOTSNESTEDMESSAGE_B39._serialized_start=994 + _LOTSNESTEDMESSAGE_B39._serialized_end=999 + _LOTSNESTEDMESSAGE_B40._serialized_start=1001 + _LOTSNESTEDMESSAGE_B40._serialized_end=1006 + _LOTSNESTEDMESSAGE_B41._serialized_start=1008 + _LOTSNESTEDMESSAGE_B41._serialized_end=1013 + _LOTSNESTEDMESSAGE_B42._serialized_start=1015 + _LOTSNESTEDMESSAGE_B42._serialized_end=1020 + _LOTSNESTEDMESSAGE_B43._serialized_start=1022 + _LOTSNESTEDMESSAGE_B43._serialized_end=1027 + _LOTSNESTEDMESSAGE_B44._serialized_start=1029 + _LOTSNESTEDMESSAGE_B44._serialized_end=1034 + _LOTSNESTEDMESSAGE_B45._serialized_start=1036 + _LOTSNESTEDMESSAGE_B45._serialized_end=1041 + _LOTSNESTEDMESSAGE_B46._serialized_start=1043 + _LOTSNESTEDMESSAGE_B46._serialized_end=1048 + _LOTSNESTEDMESSAGE_B47._serialized_start=1050 + _LOTSNESTEDMESSAGE_B47._serialized_end=1055 + _LOTSNESTEDMESSAGE_B48._serialized_start=1057 + _LOTSNESTEDMESSAGE_B48._serialized_end=1062 + _LOTSNESTEDMESSAGE_B49._serialized_start=1064 + _LOTSNESTEDMESSAGE_B49._serialized_end=1069 + _LOTSNESTEDMESSAGE_B50._serialized_start=1071 + _LOTSNESTEDMESSAGE_B50._serialized_end=1076 + _LOTSNESTEDMESSAGE_B51._serialized_start=1078 + _LOTSNESTEDMESSAGE_B51._serialized_end=1083 + _LOTSNESTEDMESSAGE_B52._serialized_start=1085 + _LOTSNESTEDMESSAGE_B52._serialized_end=1090 + _LOTSNESTEDMESSAGE_B53._serialized_start=1092 + _LOTSNESTEDMESSAGE_B53._serialized_end=1097 + _LOTSNESTEDMESSAGE_B54._serialized_start=1099 + _LOTSNESTEDMESSAGE_B54._serialized_end=1104 + _LOTSNESTEDMESSAGE_B55._serialized_start=1106 + _LOTSNESTEDMESSAGE_B55._serialized_end=1111 + _LOTSNESTEDMESSAGE_B56._serialized_start=1113 + _LOTSNESTEDMESSAGE_B56._serialized_end=1118 + _LOTSNESTEDMESSAGE_B57._serialized_start=1120 + _LOTSNESTEDMESSAGE_B57._serialized_end=1125 + _LOTSNESTEDMESSAGE_B58._serialized_start=1127 + _LOTSNESTEDMESSAGE_B58._serialized_end=1132 + _LOTSNESTEDMESSAGE_B59._serialized_start=1134 + _LOTSNESTEDMESSAGE_B59._serialized_end=1139 + _LOTSNESTEDMESSAGE_B60._serialized_start=1141 + _LOTSNESTEDMESSAGE_B60._serialized_end=1146 + _LOTSNESTEDMESSAGE_B61._serialized_start=1148 + _LOTSNESTEDMESSAGE_B61._serialized_end=1153 + _LOTSNESTEDMESSAGE_B62._serialized_start=1155 + _LOTSNESTEDMESSAGE_B62._serialized_end=1160 + _LOTSNESTEDMESSAGE_B63._serialized_start=1162 + _LOTSNESTEDMESSAGE_B63._serialized_end=1167 + _LOTSNESTEDMESSAGE_B64._serialized_start=1169 + _LOTSNESTEDMESSAGE_B64._serialized_end=1174 + _LOTSNESTEDMESSAGE_B65._serialized_start=1176 + _LOTSNESTEDMESSAGE_B65._serialized_end=1181 + _LOTSNESTEDMESSAGE_B66._serialized_start=1183 + _LOTSNESTEDMESSAGE_B66._serialized_end=1188 + _LOTSNESTEDMESSAGE_B67._serialized_start=1190 + _LOTSNESTEDMESSAGE_B67._serialized_end=1195 + _LOTSNESTEDMESSAGE_B68._serialized_start=1197 + _LOTSNESTEDMESSAGE_B68._serialized_end=1202 + _LOTSNESTEDMESSAGE_B69._serialized_start=1204 + _LOTSNESTEDMESSAGE_B69._serialized_end=1209 + _LOTSNESTEDMESSAGE_B70._serialized_start=1211 + _LOTSNESTEDMESSAGE_B70._serialized_end=1216 + _LOTSNESTEDMESSAGE_B71._serialized_start=1218 + _LOTSNESTEDMESSAGE_B71._serialized_end=1223 + _LOTSNESTEDMESSAGE_B72._serialized_start=1225 + _LOTSNESTEDMESSAGE_B72._serialized_end=1230 + _LOTSNESTEDMESSAGE_B73._serialized_start=1232 + _LOTSNESTEDMESSAGE_B73._serialized_end=1237 + _LOTSNESTEDMESSAGE_B74._serialized_start=1239 + _LOTSNESTEDMESSAGE_B74._serialized_end=1244 + _LOTSNESTEDMESSAGE_B75._serialized_start=1246 + _LOTSNESTEDMESSAGE_B75._serialized_end=1251 + _LOTSNESTEDMESSAGE_B76._serialized_start=1253 + _LOTSNESTEDMESSAGE_B76._serialized_end=1258 + _LOTSNESTEDMESSAGE_B77._serialized_start=1260 + _LOTSNESTEDMESSAGE_B77._serialized_end=1265 + _LOTSNESTEDMESSAGE_B78._serialized_start=1267 + _LOTSNESTEDMESSAGE_B78._serialized_end=1272 + _LOTSNESTEDMESSAGE_B79._serialized_start=1274 + _LOTSNESTEDMESSAGE_B79._serialized_end=1279 + _LOTSNESTEDMESSAGE_B80._serialized_start=1281 + _LOTSNESTEDMESSAGE_B80._serialized_end=1286 + _LOTSNESTEDMESSAGE_B81._serialized_start=1288 + _LOTSNESTEDMESSAGE_B81._serialized_end=1293 + _LOTSNESTEDMESSAGE_B82._serialized_start=1295 + _LOTSNESTEDMESSAGE_B82._serialized_end=1300 + _LOTSNESTEDMESSAGE_B83._serialized_start=1302 + _LOTSNESTEDMESSAGE_B83._serialized_end=1307 + _LOTSNESTEDMESSAGE_B84._serialized_start=1309 + _LOTSNESTEDMESSAGE_B84._serialized_end=1314 + _LOTSNESTEDMESSAGE_B85._serialized_start=1316 + _LOTSNESTEDMESSAGE_B85._serialized_end=1321 + _LOTSNESTEDMESSAGE_B86._serialized_start=1323 + _LOTSNESTEDMESSAGE_B86._serialized_end=1328 + _LOTSNESTEDMESSAGE_B87._serialized_start=1330 + _LOTSNESTEDMESSAGE_B87._serialized_end=1335 + _LOTSNESTEDMESSAGE_B88._serialized_start=1337 + _LOTSNESTEDMESSAGE_B88._serialized_end=1342 + _LOTSNESTEDMESSAGE_B89._serialized_start=1344 + _LOTSNESTEDMESSAGE_B89._serialized_end=1349 + _LOTSNESTEDMESSAGE_B90._serialized_start=1351 + _LOTSNESTEDMESSAGE_B90._serialized_end=1356 + _LOTSNESTEDMESSAGE_B91._serialized_start=1358 + _LOTSNESTEDMESSAGE_B91._serialized_end=1363 + _LOTSNESTEDMESSAGE_B92._serialized_start=1365 + _LOTSNESTEDMESSAGE_B92._serialized_end=1370 + _LOTSNESTEDMESSAGE_B93._serialized_start=1372 + _LOTSNESTEDMESSAGE_B93._serialized_end=1377 + _LOTSNESTEDMESSAGE_B94._serialized_start=1379 + _LOTSNESTEDMESSAGE_B94._serialized_end=1384 + _LOTSNESTEDMESSAGE_B95._serialized_start=1386 + _LOTSNESTEDMESSAGE_B95._serialized_end=1391 + _LOTSNESTEDMESSAGE_B96._serialized_start=1393 + _LOTSNESTEDMESSAGE_B96._serialized_end=1398 + _LOTSNESTEDMESSAGE_B97._serialized_start=1400 + _LOTSNESTEDMESSAGE_B97._serialized_end=1405 + _LOTSNESTEDMESSAGE_B98._serialized_start=1407 + _LOTSNESTEDMESSAGE_B98._serialized_end=1412 + _LOTSNESTEDMESSAGE_B99._serialized_start=1414 + _LOTSNESTEDMESSAGE_B99._serialized_end=1419 + _LOTSNESTEDMESSAGE_B100._serialized_start=1421 + _LOTSNESTEDMESSAGE_B100._serialized_end=1427 + _LOTSNESTEDMESSAGE_B101._serialized_start=1429 + _LOTSNESTEDMESSAGE_B101._serialized_end=1435 + _LOTSNESTEDMESSAGE_B102._serialized_start=1437 + _LOTSNESTEDMESSAGE_B102._serialized_end=1443 + _LOTSNESTEDMESSAGE_B103._serialized_start=1445 + _LOTSNESTEDMESSAGE_B103._serialized_end=1451 + _LOTSNESTEDMESSAGE_B104._serialized_start=1453 + _LOTSNESTEDMESSAGE_B104._serialized_end=1459 + _LOTSNESTEDMESSAGE_B105._serialized_start=1461 + _LOTSNESTEDMESSAGE_B105._serialized_end=1467 + _LOTSNESTEDMESSAGE_B106._serialized_start=1469 + _LOTSNESTEDMESSAGE_B106._serialized_end=1475 + _LOTSNESTEDMESSAGE_B107._serialized_start=1477 + _LOTSNESTEDMESSAGE_B107._serialized_end=1483 + _LOTSNESTEDMESSAGE_B108._serialized_start=1485 + _LOTSNESTEDMESSAGE_B108._serialized_end=1491 + _LOTSNESTEDMESSAGE_B109._serialized_start=1493 + _LOTSNESTEDMESSAGE_B109._serialized_end=1499 + _LOTSNESTEDMESSAGE_B110._serialized_start=1501 + _LOTSNESTEDMESSAGE_B110._serialized_end=1507 + _LOTSNESTEDMESSAGE_B111._serialized_start=1509 + _LOTSNESTEDMESSAGE_B111._serialized_end=1515 + _LOTSNESTEDMESSAGE_B112._serialized_start=1517 + _LOTSNESTEDMESSAGE_B112._serialized_end=1523 + _LOTSNESTEDMESSAGE_B113._serialized_start=1525 + _LOTSNESTEDMESSAGE_B113._serialized_end=1531 + _LOTSNESTEDMESSAGE_B114._serialized_start=1533 + _LOTSNESTEDMESSAGE_B114._serialized_end=1539 + _LOTSNESTEDMESSAGE_B115._serialized_start=1541 + _LOTSNESTEDMESSAGE_B115._serialized_end=1547 + _LOTSNESTEDMESSAGE_B116._serialized_start=1549 + _LOTSNESTEDMESSAGE_B116._serialized_end=1555 + _LOTSNESTEDMESSAGE_B117._serialized_start=1557 + _LOTSNESTEDMESSAGE_B117._serialized_end=1563 + _LOTSNESTEDMESSAGE_B118._serialized_start=1565 + _LOTSNESTEDMESSAGE_B118._serialized_end=1571 + _LOTSNESTEDMESSAGE_B119._serialized_start=1573 + _LOTSNESTEDMESSAGE_B119._serialized_end=1579 + _LOTSNESTEDMESSAGE_B120._serialized_start=1581 + _LOTSNESTEDMESSAGE_B120._serialized_end=1587 + _LOTSNESTEDMESSAGE_B121._serialized_start=1589 + _LOTSNESTEDMESSAGE_B121._serialized_end=1595 + _LOTSNESTEDMESSAGE_B122._serialized_start=1597 + _LOTSNESTEDMESSAGE_B122._serialized_end=1603 + _LOTSNESTEDMESSAGE_B123._serialized_start=1605 + _LOTSNESTEDMESSAGE_B123._serialized_end=1611 + _LOTSNESTEDMESSAGE_B124._serialized_start=1613 + _LOTSNESTEDMESSAGE_B124._serialized_end=1619 + _LOTSNESTEDMESSAGE_B125._serialized_start=1621 + _LOTSNESTEDMESSAGE_B125._serialized_end=1627 + _LOTSNESTEDMESSAGE_B126._serialized_start=1629 + _LOTSNESTEDMESSAGE_B126._serialized_end=1635 + _LOTSNESTEDMESSAGE_B127._serialized_start=1637 + _LOTSNESTEDMESSAGE_B127._serialized_end=1643 + _LOTSNESTEDMESSAGE_B128._serialized_start=1645 + _LOTSNESTEDMESSAGE_B128._serialized_end=1651 + _LOTSNESTEDMESSAGE_B129._serialized_start=1653 + _LOTSNESTEDMESSAGE_B129._serialized_end=1659 + _LOTSNESTEDMESSAGE_B130._serialized_start=1661 + _LOTSNESTEDMESSAGE_B130._serialized_end=1667 + _LOTSNESTEDMESSAGE_B131._serialized_start=1669 + _LOTSNESTEDMESSAGE_B131._serialized_end=1675 + _LOTSNESTEDMESSAGE_B132._serialized_start=1677 + _LOTSNESTEDMESSAGE_B132._serialized_end=1683 + _LOTSNESTEDMESSAGE_B133._serialized_start=1685 + _LOTSNESTEDMESSAGE_B133._serialized_end=1691 + _LOTSNESTEDMESSAGE_B134._serialized_start=1693 + _LOTSNESTEDMESSAGE_B134._serialized_end=1699 + _LOTSNESTEDMESSAGE_B135._serialized_start=1701 + _LOTSNESTEDMESSAGE_B135._serialized_end=1707 + _LOTSNESTEDMESSAGE_B136._serialized_start=1709 + _LOTSNESTEDMESSAGE_B136._serialized_end=1715 + _LOTSNESTEDMESSAGE_B137._serialized_start=1717 + _LOTSNESTEDMESSAGE_B137._serialized_end=1723 + _LOTSNESTEDMESSAGE_B138._serialized_start=1725 + _LOTSNESTEDMESSAGE_B138._serialized_end=1731 + _LOTSNESTEDMESSAGE_B139._serialized_start=1733 + _LOTSNESTEDMESSAGE_B139._serialized_end=1739 + _LOTSNESTEDMESSAGE_B140._serialized_start=1741 + _LOTSNESTEDMESSAGE_B140._serialized_end=1747 + _LOTSNESTEDMESSAGE_B141._serialized_start=1749 + _LOTSNESTEDMESSAGE_B141._serialized_end=1755 + _LOTSNESTEDMESSAGE_B142._serialized_start=1757 + _LOTSNESTEDMESSAGE_B142._serialized_end=1763 + _LOTSNESTEDMESSAGE_B143._serialized_start=1765 + _LOTSNESTEDMESSAGE_B143._serialized_end=1771 + _LOTSNESTEDMESSAGE_B144._serialized_start=1773 + _LOTSNESTEDMESSAGE_B144._serialized_end=1779 + _LOTSNESTEDMESSAGE_B145._serialized_start=1781 + _LOTSNESTEDMESSAGE_B145._serialized_end=1787 + _LOTSNESTEDMESSAGE_B146._serialized_start=1789 + _LOTSNESTEDMESSAGE_B146._serialized_end=1795 + _LOTSNESTEDMESSAGE_B147._serialized_start=1797 + _LOTSNESTEDMESSAGE_B147._serialized_end=1803 + _LOTSNESTEDMESSAGE_B148._serialized_start=1805 + _LOTSNESTEDMESSAGE_B148._serialized_end=1811 + _LOTSNESTEDMESSAGE_B149._serialized_start=1813 + _LOTSNESTEDMESSAGE_B149._serialized_end=1819 + _LOTSNESTEDMESSAGE_B150._serialized_start=1821 + _LOTSNESTEDMESSAGE_B150._serialized_end=1827 + _LOTSNESTEDMESSAGE_B151._serialized_start=1829 + _LOTSNESTEDMESSAGE_B151._serialized_end=1835 + _LOTSNESTEDMESSAGE_B152._serialized_start=1837 + _LOTSNESTEDMESSAGE_B152._serialized_end=1843 + _LOTSNESTEDMESSAGE_B153._serialized_start=1845 + _LOTSNESTEDMESSAGE_B153._serialized_end=1851 + _LOTSNESTEDMESSAGE_B154._serialized_start=1853 + _LOTSNESTEDMESSAGE_B154._serialized_end=1859 + _LOTSNESTEDMESSAGE_B155._serialized_start=1861 + _LOTSNESTEDMESSAGE_B155._serialized_end=1867 + _LOTSNESTEDMESSAGE_B156._serialized_start=1869 + _LOTSNESTEDMESSAGE_B156._serialized_end=1875 + _LOTSNESTEDMESSAGE_B157._serialized_start=1877 + _LOTSNESTEDMESSAGE_B157._serialized_end=1883 + _LOTSNESTEDMESSAGE_B158._serialized_start=1885 + _LOTSNESTEDMESSAGE_B158._serialized_end=1891 + _LOTSNESTEDMESSAGE_B159._serialized_start=1893 + _LOTSNESTEDMESSAGE_B159._serialized_end=1899 + _LOTSNESTEDMESSAGE_B160._serialized_start=1901 + _LOTSNESTEDMESSAGE_B160._serialized_end=1907 + _LOTSNESTEDMESSAGE_B161._serialized_start=1909 + _LOTSNESTEDMESSAGE_B161._serialized_end=1915 + _LOTSNESTEDMESSAGE_B162._serialized_start=1917 + _LOTSNESTEDMESSAGE_B162._serialized_end=1923 + _LOTSNESTEDMESSAGE_B163._serialized_start=1925 + _LOTSNESTEDMESSAGE_B163._serialized_end=1931 + _LOTSNESTEDMESSAGE_B164._serialized_start=1933 + _LOTSNESTEDMESSAGE_B164._serialized_end=1939 + _LOTSNESTEDMESSAGE_B165._serialized_start=1941 + _LOTSNESTEDMESSAGE_B165._serialized_end=1947 + _LOTSNESTEDMESSAGE_B166._serialized_start=1949 + _LOTSNESTEDMESSAGE_B166._serialized_end=1955 + _LOTSNESTEDMESSAGE_B167._serialized_start=1957 + _LOTSNESTEDMESSAGE_B167._serialized_end=1963 + _LOTSNESTEDMESSAGE_B168._serialized_start=1965 + _LOTSNESTEDMESSAGE_B168._serialized_end=1971 + _LOTSNESTEDMESSAGE_B169._serialized_start=1973 + _LOTSNESTEDMESSAGE_B169._serialized_end=1979 + _LOTSNESTEDMESSAGE_B170._serialized_start=1981 + _LOTSNESTEDMESSAGE_B170._serialized_end=1987 + _LOTSNESTEDMESSAGE_B171._serialized_start=1989 + _LOTSNESTEDMESSAGE_B171._serialized_end=1995 + _LOTSNESTEDMESSAGE_B172._serialized_start=1997 + _LOTSNESTEDMESSAGE_B172._serialized_end=2003 + _LOTSNESTEDMESSAGE_B173._serialized_start=2005 + _LOTSNESTEDMESSAGE_B173._serialized_end=2011 + _LOTSNESTEDMESSAGE_B174._serialized_start=2013 + _LOTSNESTEDMESSAGE_B174._serialized_end=2019 + _LOTSNESTEDMESSAGE_B175._serialized_start=2021 + _LOTSNESTEDMESSAGE_B175._serialized_end=2027 + _LOTSNESTEDMESSAGE_B176._serialized_start=2029 + _LOTSNESTEDMESSAGE_B176._serialized_end=2035 + _LOTSNESTEDMESSAGE_B177._serialized_start=2037 + _LOTSNESTEDMESSAGE_B177._serialized_end=2043 + _LOTSNESTEDMESSAGE_B178._serialized_start=2045 + _LOTSNESTEDMESSAGE_B178._serialized_end=2051 + _LOTSNESTEDMESSAGE_B179._serialized_start=2053 + _LOTSNESTEDMESSAGE_B179._serialized_end=2059 + _LOTSNESTEDMESSAGE_B180._serialized_start=2061 + _LOTSNESTEDMESSAGE_B180._serialized_end=2067 + _LOTSNESTEDMESSAGE_B181._serialized_start=2069 + _LOTSNESTEDMESSAGE_B181._serialized_end=2075 + _LOTSNESTEDMESSAGE_B182._serialized_start=2077 + _LOTSNESTEDMESSAGE_B182._serialized_end=2083 + _LOTSNESTEDMESSAGE_B183._serialized_start=2085 + _LOTSNESTEDMESSAGE_B183._serialized_end=2091 + _LOTSNESTEDMESSAGE_B184._serialized_start=2093 + _LOTSNESTEDMESSAGE_B184._serialized_end=2099 + _LOTSNESTEDMESSAGE_B185._serialized_start=2101 + _LOTSNESTEDMESSAGE_B185._serialized_end=2107 + _LOTSNESTEDMESSAGE_B186._serialized_start=2109 + _LOTSNESTEDMESSAGE_B186._serialized_end=2115 + _LOTSNESTEDMESSAGE_B187._serialized_start=2117 + _LOTSNESTEDMESSAGE_B187._serialized_end=2123 + _LOTSNESTEDMESSAGE_B188._serialized_start=2125 + _LOTSNESTEDMESSAGE_B188._serialized_end=2131 + _LOTSNESTEDMESSAGE_B189._serialized_start=2133 + _LOTSNESTEDMESSAGE_B189._serialized_end=2139 + _LOTSNESTEDMESSAGE_B190._serialized_start=2141 + _LOTSNESTEDMESSAGE_B190._serialized_end=2147 + _LOTSNESTEDMESSAGE_B191._serialized_start=2149 + _LOTSNESTEDMESSAGE_B191._serialized_end=2155 + _LOTSNESTEDMESSAGE_B192._serialized_start=2157 + _LOTSNESTEDMESSAGE_B192._serialized_end=2163 + _LOTSNESTEDMESSAGE_B193._serialized_start=2165 + _LOTSNESTEDMESSAGE_B193._serialized_end=2171 + _LOTSNESTEDMESSAGE_B194._serialized_start=2173 + _LOTSNESTEDMESSAGE_B194._serialized_end=2179 + _LOTSNESTEDMESSAGE_B195._serialized_start=2181 + _LOTSNESTEDMESSAGE_B195._serialized_end=2187 + _LOTSNESTEDMESSAGE_B196._serialized_start=2189 + _LOTSNESTEDMESSAGE_B196._serialized_end=2195 + _LOTSNESTEDMESSAGE_B197._serialized_start=2197 + _LOTSNESTEDMESSAGE_B197._serialized_end=2203 + _LOTSNESTEDMESSAGE_B198._serialized_start=2205 + _LOTSNESTEDMESSAGE_B198._serialized_end=2211 + _LOTSNESTEDMESSAGE_B199._serialized_start=2213 + _LOTSNESTEDMESSAGE_B199._serialized_end=2219 + _LOTSNESTEDMESSAGE_B200._serialized_start=2221 + _LOTSNESTEDMESSAGE_B200._serialized_end=2227 + _LOTSNESTEDMESSAGE_B201._serialized_start=2229 + _LOTSNESTEDMESSAGE_B201._serialized_end=2235 + _LOTSNESTEDMESSAGE_B202._serialized_start=2237 + _LOTSNESTEDMESSAGE_B202._serialized_end=2243 + _LOTSNESTEDMESSAGE_B203._serialized_start=2245 + _LOTSNESTEDMESSAGE_B203._serialized_end=2251 + _LOTSNESTEDMESSAGE_B204._serialized_start=2253 + _LOTSNESTEDMESSAGE_B204._serialized_end=2259 + _LOTSNESTEDMESSAGE_B205._serialized_start=2261 + _LOTSNESTEDMESSAGE_B205._serialized_end=2267 + _LOTSNESTEDMESSAGE_B206._serialized_start=2269 + _LOTSNESTEDMESSAGE_B206._serialized_end=2275 + _LOTSNESTEDMESSAGE_B207._serialized_start=2277 + _LOTSNESTEDMESSAGE_B207._serialized_end=2283 + _LOTSNESTEDMESSAGE_B208._serialized_start=2285 + _LOTSNESTEDMESSAGE_B208._serialized_end=2291 + _LOTSNESTEDMESSAGE_B209._serialized_start=2293 + _LOTSNESTEDMESSAGE_B209._serialized_end=2299 + _LOTSNESTEDMESSAGE_B210._serialized_start=2301 + _LOTSNESTEDMESSAGE_B210._serialized_end=2307 + _LOTSNESTEDMESSAGE_B211._serialized_start=2309 + _LOTSNESTEDMESSAGE_B211._serialized_end=2315 + _LOTSNESTEDMESSAGE_B212._serialized_start=2317 + _LOTSNESTEDMESSAGE_B212._serialized_end=2323 + _LOTSNESTEDMESSAGE_B213._serialized_start=2325 + _LOTSNESTEDMESSAGE_B213._serialized_end=2331 + _LOTSNESTEDMESSAGE_B214._serialized_start=2333 + _LOTSNESTEDMESSAGE_B214._serialized_end=2339 + _LOTSNESTEDMESSAGE_B215._serialized_start=2341 + _LOTSNESTEDMESSAGE_B215._serialized_end=2347 + _LOTSNESTEDMESSAGE_B216._serialized_start=2349 + _LOTSNESTEDMESSAGE_B216._serialized_end=2355 + _LOTSNESTEDMESSAGE_B217._serialized_start=2357 + _LOTSNESTEDMESSAGE_B217._serialized_end=2363 + _LOTSNESTEDMESSAGE_B218._serialized_start=2365 + _LOTSNESTEDMESSAGE_B218._serialized_end=2371 + _LOTSNESTEDMESSAGE_B219._serialized_start=2373 + _LOTSNESTEDMESSAGE_B219._serialized_end=2379 + _LOTSNESTEDMESSAGE_B220._serialized_start=2381 + _LOTSNESTEDMESSAGE_B220._serialized_end=2387 + _LOTSNESTEDMESSAGE_B221._serialized_start=2389 + _LOTSNESTEDMESSAGE_B221._serialized_end=2395 + _LOTSNESTEDMESSAGE_B222._serialized_start=2397 + _LOTSNESTEDMESSAGE_B222._serialized_end=2403 + _LOTSNESTEDMESSAGE_B223._serialized_start=2405 + _LOTSNESTEDMESSAGE_B223._serialized_end=2411 + _LOTSNESTEDMESSAGE_B224._serialized_start=2413 + _LOTSNESTEDMESSAGE_B224._serialized_end=2419 + _LOTSNESTEDMESSAGE_B225._serialized_start=2421 + _LOTSNESTEDMESSAGE_B225._serialized_end=2427 + _LOTSNESTEDMESSAGE_B226._serialized_start=2429 + _LOTSNESTEDMESSAGE_B226._serialized_end=2435 + _LOTSNESTEDMESSAGE_B227._serialized_start=2437 + _LOTSNESTEDMESSAGE_B227._serialized_end=2443 + _LOTSNESTEDMESSAGE_B228._serialized_start=2445 + _LOTSNESTEDMESSAGE_B228._serialized_end=2451 + _LOTSNESTEDMESSAGE_B229._serialized_start=2453 + _LOTSNESTEDMESSAGE_B229._serialized_end=2459 + _LOTSNESTEDMESSAGE_B230._serialized_start=2461 + _LOTSNESTEDMESSAGE_B230._serialized_end=2467 + _LOTSNESTEDMESSAGE_B231._serialized_start=2469 + _LOTSNESTEDMESSAGE_B231._serialized_end=2475 + _LOTSNESTEDMESSAGE_B232._serialized_start=2477 + _LOTSNESTEDMESSAGE_B232._serialized_end=2483 + _LOTSNESTEDMESSAGE_B233._serialized_start=2485 + _LOTSNESTEDMESSAGE_B233._serialized_end=2491 + _LOTSNESTEDMESSAGE_B234._serialized_start=2493 + _LOTSNESTEDMESSAGE_B234._serialized_end=2499 + _LOTSNESTEDMESSAGE_B235._serialized_start=2501 + _LOTSNESTEDMESSAGE_B235._serialized_end=2507 + _LOTSNESTEDMESSAGE_B236._serialized_start=2509 + _LOTSNESTEDMESSAGE_B236._serialized_end=2515 + _LOTSNESTEDMESSAGE_B237._serialized_start=2517 + _LOTSNESTEDMESSAGE_B237._serialized_end=2523 + _LOTSNESTEDMESSAGE_B238._serialized_start=2525 + _LOTSNESTEDMESSAGE_B238._serialized_end=2531 + _LOTSNESTEDMESSAGE_B239._serialized_start=2533 + _LOTSNESTEDMESSAGE_B239._serialized_end=2539 + _LOTSNESTEDMESSAGE_B240._serialized_start=2541 + _LOTSNESTEDMESSAGE_B240._serialized_end=2547 + _LOTSNESTEDMESSAGE_B241._serialized_start=2549 + _LOTSNESTEDMESSAGE_B241._serialized_end=2555 + _LOTSNESTEDMESSAGE_B242._serialized_start=2557 + _LOTSNESTEDMESSAGE_B242._serialized_end=2563 + _LOTSNESTEDMESSAGE_B243._serialized_start=2565 + _LOTSNESTEDMESSAGE_B243._serialized_end=2571 + _LOTSNESTEDMESSAGE_B244._serialized_start=2573 + _LOTSNESTEDMESSAGE_B244._serialized_end=2579 + _LOTSNESTEDMESSAGE_B245._serialized_start=2581 + _LOTSNESTEDMESSAGE_B245._serialized_end=2587 + _LOTSNESTEDMESSAGE_B246._serialized_start=2589 + _LOTSNESTEDMESSAGE_B246._serialized_end=2595 + _LOTSNESTEDMESSAGE_B247._serialized_start=2597 + _LOTSNESTEDMESSAGE_B247._serialized_end=2603 + _LOTSNESTEDMESSAGE_B248._serialized_start=2605 + _LOTSNESTEDMESSAGE_B248._serialized_end=2611 + _LOTSNESTEDMESSAGE_B249._serialized_start=2613 + _LOTSNESTEDMESSAGE_B249._serialized_end=2619 + _LOTSNESTEDMESSAGE_B250._serialized_start=2621 + _LOTSNESTEDMESSAGE_B250._serialized_end=2627 + _LOTSNESTEDMESSAGE_B251._serialized_start=2629 + _LOTSNESTEDMESSAGE_B251._serialized_end=2635 + _LOTSNESTEDMESSAGE_B252._serialized_start=2637 + _LOTSNESTEDMESSAGE_B252._serialized_end=2643 + _LOTSNESTEDMESSAGE_B253._serialized_start=2645 + _LOTSNESTEDMESSAGE_B253._serialized_end=2651 + _LOTSNESTEDMESSAGE_B254._serialized_start=2653 + _LOTSNESTEDMESSAGE_B254._serialized_end=2659 + _LOTSNESTEDMESSAGE_B255._serialized_start=2661 + _LOTSNESTEDMESSAGE_B255._serialized_end=2667 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py new file mode 100644 index 0000000000..d46dee080a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/no_package_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/no_package.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _NOPACKAGEENUM._serialized_start=106 + _NOPACKAGEENUM._serialized_end=169 + _NOPACKAGEMESSAGE._serialized_start=45 + _NOPACKAGEMESSAGE._serialized_end=104 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py new file mode 100644 index 0000000000..2921d5cb6e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/python_message.py @@ -0,0 +1,1539 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. +# +# TODO(robinson): Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +from io import BytesIO +import struct +import sys +import weakref + +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import api_implementation +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import extension_dict +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import text_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' +_ExtensionDict = extension_dict._ExtensionDict + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + + Raises: + RuntimeError: Generated code only work with python cpp extension. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + if isinstance(descriptor, str): + raise RuntimeError('The generated code only work with python cpp ' + 'extension, but it is using pure python runtime.') + + # If a concrete class already exists for this descriptor, don't try to + # create another. Doing so will break any messages that already exist with + # the existing class. + # + # The C++ implementation appears to have its own internal `PyMessageFactory` + # to achieve similar results. + # + # This most commonly happens in `text_format.py` when using descriptors from + # a custom pool; it calls symbol_database.Global().getPrototype() on a + # descriptor which already has an existing concrete class. + new_class = getattr(descriptor, '_concrete_class', None) + if new_class: + return new_class + + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + # If this is an _existing_ class looked up via `_concrete_class` in the + # __new__ method above, then we don't need to re-initialize anything. + existing_class = getattr(descriptor, '_concrete_class', None) + if existing_class: + assert existing_class is cls, ( + 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' + % (descriptor.full_name)) + return + + cls._decoders_by_tag = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(descriptor), None) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO(kenton): Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_unknown_field_set', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name['value'] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_packable = (is_repeated and + wire_format.IsTypePackable(field_descriptor.type)) + is_proto3 = field_descriptor.containing_type.syntax == 'proto3' + if not is_packable: + is_packed = False + elif field_descriptor.containing_type.syntax == 'proto2': + is_packed = (field_descriptor.has_options and + field_descriptor.GetOptions().packed) + else: + has_packed_false = (field_descriptor.has_options and + field_descriptor.GetOptions().HasField('packed') and + field_descriptor.GetOptions().packed == False) + is_packed = not has_packed_false + is_map_entry = _IsMapField(field_descriptor) + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor, + _IsMessageMapField(field_descriptor)) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._encoder = field_encoder + field_descriptor._sizer = sizer + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor) + + def AddDecoder(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + type_checkers.SupportsOpenEnums(field_descriptor)): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + clear_if_default = False + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + elif (is_proto3 and not is_repeated and + field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): + clear_if_default = True + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + elif decode_type == _FieldDescriptor.TYPE_STRING: + field_decoder = decoder.StringDecoder( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + # pylint: disable=protected-access + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + + cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) + + AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], + False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker, + field.message_type) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker, + field.message_type) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # _concrete_class may not yet be initialized. + message_type = field.message_type + def MakeSubMessageDefault(message): + assert getattr(message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (field.full_name, message_type.full_name)) + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + raise exc.with_traceback(sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, str): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + # _unknown_field_set is None when empty for efficiency, and will be + # turned into UnknownFieldSet struct if fields are added. + self._unknown_field_set = None # pylint: disable=protected-access + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError('%s() got an unexpected keyword argument "%s"' % + (message_descriptor.name, field_name)) + if field_value is None: + # field=None is the same as no field at all. + continue + if field.label == _FieldDescriptor.LABEL_REPEATED: + copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + copy[key].MergeFrom(field_value[key]) + else: + copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + copy.add(**val) + else: + copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + copy.extend(field_value) + self._fields[field] = copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + copy = field._default_constructor(self) + new_val = field_value + if isinstance(field_value, dict): + new_val = field.message_type._concrete_class(**field_value) + try: + copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +class _FieldProperty(property): + __slots__ = ('DESCRIPTOR',) + + def __init__(self, descriptor, getter, setter, doc): + property.__init__(self, getter, setter, doc=doc) + self.DESCRIPTOR = descriptor + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + is_proto3 = field.containing_type.syntax == 'proto3' + + def getter(self): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + clear_when_set_to_default = is_proto3 and not field.containing_oneof + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + try: + new_value = type_checker.CheckValue(new_value) + except TypeError as e: + raise TypeError( + 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) + if clear_when_set_to_default and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO(robinson): Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) + + # TODO(amauryfa): Migrate all users of these attributes to functions like + # pool.FindExtensionByNumber(descriptor). + if descriptor.file is not None: + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + pool = descriptor.file.pool + cls._extensions_by_number = pool._extensions_by_number[descriptor] + cls._extensions_by_name = pool._extensions_by_name[descriptor] + +def _AddStaticMethods(cls): + # TODO(robinson): This probably needs to be thread-safe(?) + def RegisterExtension(extension_handle): + extension_handle.containing_type = cls.DESCRIPTOR + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + # pylint: disable=protected-access + cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) + _AttachFieldHelpers(cls, extension_handle) + cls.RegisterExtension = staticmethod(RegisterExtension) + + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + +_PROTO3_ERROR_TEMPLATE = \ + ('Protocol message %s has no non-repeated submessage field "%s" ' + 'nor marked as optional') +_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + is_proto3 = (message_descriptor.syntax == "proto3") + error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and + not field.containing_oneof): + continue + hassable_fields[field.name] = field + + # Has methods are supported for oneof descriptors. + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError: + raise ValueError(error_msg % (message_descriptor.full_name, field_name)) + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + + # Similar to ClearField(), above. + if extension_handle in self._fields: + del self._fields[extension_handle] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % extension_handle.full_name) + + if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(extension_handle) + return value is not None and value._is_present_in_parent + else: + return extension_handle in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is different from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + # TODO(amauryfa): Don't use the factory of generated messages. + # To make Any work with custom factories, use the message factory of the + # parent message. + # pylint: disable=g-import-not-at-top + from google.protobuf import symbol_database + factory = symbol_database.Default() + + type_url = msg.type_url + + if not type_url: + return None + + # TODO(haberman): For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split('/')[-1] + descriptor = factory.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + message_class = factory.GetPrototype(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return False + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, + # then use it for the comparison. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + size = descriptor.fields_by_name['key']._sizer(self.key) + size += descriptor.fields_by_name['value']._sizer(self.value) + else: + for field_descriptor, field_value in self.ListFields(): + size += field_descriptor._sizer(field_value) + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self, **kwargs): + # Check if the message has all of its required fields set. + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString(**kwargs) + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self, **kwargs): + out = BytesIO() + self._InternalSerialize(out.write, **kwargs) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes, deterministic=None): + if deterministic is None: + deterministic = ( + api_implementation.IsPythonDefaultSerializationDeterministic()) + else: + deterministic = bool(deterministic) + + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + descriptor.fields_by_name['key']._encoder( + write_bytes, self.key, deterministic) + descriptor.fields_by_name['value']._encoder( + write_bytes, self.value, deterministic) + else: + for field_descriptor, field_value in self.ListFields(): + field_descriptor._encoder(write_bytes, field_value, deterministic) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + serialized = memoryview(serialized) + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + decoders_by_tag = cls._decoders_by_tag + + def InternalParse(self, buffer, pos, end): + """Create a message from serialized bytes. + + Args: + self: Message, instance of the proto message object. + buffer: memoryview of the serialized data. + pos: int, position to start in the serialized data. + end: int, end position of the serialized data. + + Returns: + Message object. + """ + # Guard against internal misuse, since this function is called internally + # quite extensively, and its easy to accidentally pass bytes. + assert isinstance(buffer, memoryview) + self._Modified() + field_dict = self._fields + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) + if field_decoder is None: + if not self._unknown_fields: # pylint: disable=protected-access + self._unknown_fields = [] # pylint: disable=protected-access + if unknown_field_set is None: + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + # pylint: disable=protected-access + (tag, _) = decoder._DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if field_number == 0: + raise message_mod.DecodeError('Field number 0 is illegal.') + # TODO(jieluo): remove old_pos. + old_pos = new_pos + (data, new_pos) = decoder._DecodeUnknownField( + buffer, new_pos, wire_type) # pylint: disable=protected-access + if new_pos == -1: + return pos + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + # TODO(jieluo): remove _unknown_fields. + new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) + if new_pos == -1: + return pos + self._unknown_fields.append( + (tag_bytes, buffer[old_pos:new_pos].tobytes())) + pos = new_pos + else: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_desc: + self._UpdateOneofState(field_desc) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type.has_options and + field.message_type.GetOptions().map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = '(%s)' % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = '%s[%s].' % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = '%s[%d].' % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + '.' + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _FullyQualifiedClassName(klass): + module = klass.__module__ + name = getattr(klass, '__qualname__', klass.__name__) + if module in (None, 'builtins', '__builtin__'): + return name + return module + '.' + name + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + 'Parameter to MergeFrom() must be instance of same class: ' + 'expected %s got %s.' % (_FullyQualifiedClassName(cls), + _FullyQualifiedClassName(msg.__class__))) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + # pylint: disable=protected-access + if self._unknown_field_set is None: + self._unknown_field_set = containers.UnknownFieldSet() + self._unknown_field_set._extend(msg._unknown_field_set) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + # pylint: disable=protected-access + if self._unknown_field_set is not None: + self._unknown_field_set._clear() + self._unknown_field_set = None + + self._oneofs = {} + self._Modified() + + +def _UnknownFields(self): + if self._unknown_field_set is None: # pylint: disable=protected-access + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + return self._unknown_field_set # pylint: disable=protected-access + + +def _DiscardUnknownFields(self): + self._unknown_fields = [] + self._unknown_field_set = None # pylint: disable=protected-access + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + value[key].DiscardUnknownFields() + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for sub_message in value: + sub_message.DiscardUnknownFields() + else: + value.DiscardUnknownFields() + + +def _SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + # Adds methods which do not depend on cls. + cls.Clear = _Clear + cls.UnknownFields = _UnknownFields + cls.DiscardUnknownFields = _DiscardUnknownFields + cls._SetListener = _SetListener + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.qux = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py new file mode 100644 index 0000000000..a53e71fe8e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/type_checkers.py @@ -0,0 +1,435 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + corresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import ctypes +import numbers + +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + + +def TruncateToFourByteFloat(original): + return ctypes.c_float(original).value + + +def ToShortestFloat(original): + """Returns the shortest float that has same value in wire.""" + # All 4 byte floats have between 6 and 9 significant digits, so we + # start with 6 as the lower bound. + # It has to be iterative because use '.9g' directly can not get rid + # of the noises for most values. For example if set a float_field=0.9 + # use '.9g' will print 0.899999976. + precision = 6 + rounded = float('{0:.{1}g}'.format(original, precision)) + while TruncateToFourByteFloat(rounded) != original: + precision += 1 + rounded = float('{0:.{1}g}'.format(original, precision)) + return rounded + + +def SupportsOpenEnums(field_descriptor): + return field_descriptor.containing_type.syntax == 'proto3' + + +def GetTypeChecker(field): + """Returns a type checker for a message field of the specified types. + + Args: + field: FieldDescriptor object for this field. + + Returns: + An instance of TypeChecker which can be used to verify the types + of values assigned to a field of the specified type. + """ + if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and + field.type == _FieldDescriptor.TYPE_STRING): + return UnicodeValueChecker() + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + if SupportsOpenEnums(field): + # When open enums are supported, any int32 can be assigned. + return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] + else: + return EnumValueChecker(field.enum_type) + return _VALUE_CHECKERS[field.cpp_type] + + +# None of the typecheckers below make any attempt to guard against people +# subclassing builtin types and doing weird things. We're not trying to +# protect against malicious clients here, just people accidentally shooting +# themselves in the foot in obvious ways. +class TypeChecker(object): + + """Type checker used to catch type errors as early as possible + when the client is setting scalar fields in protocol messages. + """ + + def __init__(self, *acceptable_types): + self._acceptable_types = acceptable_types + + def CheckValue(self, proposed_value): + """Type check the provided value and return it. + + The returned value might have been normalized to another type. + """ + if not isinstance(proposed_value, self._acceptable_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), self._acceptable_types)) + raise TypeError(message) + return proposed_value + + +class TypeCheckerWithDefault(TypeChecker): + + def __init__(self, default_value, *acceptable_types): + TypeChecker.__init__(self, *acceptable_types) + self._default_value = default_value + + def DefaultValue(self): + return self._default_value + + +class BoolValueChecker(object): + """Type checker used for bool fields.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bool, int))) + raise TypeError(message) + return bool(proposed_value) + + def DefaultValue(self): + return False + + +# IntValueChecker and its subclasses perform integer type-checks +# and bounds-checks. +class IntValueChecker(object): + + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + + if not self._MIN <= int(proposed_value) <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) + # We force all values to int to make alternate implementations where the + # distinction is more significant (e.g. the C++ implementation) simpler. + proposed_value = int(proposed_value) + return proposed_value + + def DefaultValue(self): + return 0 + + +class EnumValueChecker(object): + + """Checker used for enum fields. Performs type-check and range check.""" + + def __init__(self, enum_type): + self._enum_type = enum_type + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, numbers.Integral): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + if int(proposed_value) not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) + return proposed_value + + def DefaultValue(self): + return self._enum_type.values[0].number + + +class UnicodeValueChecker(object): + + """Checker used for string fields. + + Always returns a unicode value, even if the input is of type str. + """ + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, (bytes, str)): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bytes, str))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is valid UTF-8 data. + if isinstance(proposed_value, bytes): + try: + proposed_value = proposed_value.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' + 'encoding. Non-UTF-8 strings must be converted to ' + 'unicode objects before being added.' % + (proposed_value)) + else: + try: + proposed_value.encode('utf8') + except UnicodeEncodeError: + raise ValueError('%.1024r isn\'t a valid unicode string and ' + 'can\'t be encoded in UTF-8.'% + (proposed_value)) + + return proposed_value + + def DefaultValue(self): + return u"" + + +class Int32ValueChecker(IntValueChecker): + # We're sure to use ints instead of longs here since comparison may be more + # efficient. + _MIN = -2147483648 + _MAX = 2147483647 + + +class Uint32ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 32) - 1 + + +class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 + + +class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 + + +# The max 4 bytes float is about 3.4028234663852886e+38 +_FLOAT_MAX = float.fromhex('0x1.fffffep+127') +_FLOAT_MIN = -_FLOAT_MAX +_INF = float('inf') +_NEG_INF = float('-inf') + + +class DoubleValueChecker(object): + """Checker used for double fields. + + Performs type-check and range check. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + if (not hasattr(proposed_value, '__float__') and + not hasattr(proposed_value, '__index__')) or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: int, float' % + (proposed_value, type(proposed_value))) + raise TypeError(message) + return float(proposed_value) + + def DefaultValue(self): + return 0.0 + + +class FloatValueChecker(DoubleValueChecker): + """Checker used for float fields. + + Performs type-check and range check. + + Values exceeding a 32-bit float will be converted to inf/-inf. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + converted_value = super().CheckValue(proposed_value) + # This inf rounding matches the C++ proto SafeDoubleToFloat logic. + if converted_value > _FLOAT_MAX: + return _INF + if converted_value < _FLOAT_MIN: + return _NEG_INF + + return TruncateToFourByteFloat(converted_value) + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), + _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), + _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), + _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), +} + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py new file mode 100644 index 0000000000..b581ab750a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/well_known_types.py @@ -0,0 +1,878 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains well known classes. + +This files defines well known classes which need extra maintenance including: + - Any + - Duration + - FieldMask + - Struct + - Timestamp +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import calendar +import collections.abc +import datetime + +from google.protobuf.descriptor import FieldDescriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_NANOS_PER_SECOND = 1000000000 +_NANOS_PER_MILLISECOND = 1000000 +_NANOS_PER_MICROSECOND = 1000 +_MILLIS_PER_SECOND = 1000 +_MICROS_PER_SECOND = 1000000 +_SECONDS_PER_DAY = 24 * 3600 +_DURATION_SECONDS_MAX = 315576000000 + + +class Any(object): + """Class for Any Message type.""" + + __slots__ = () + + def Pack(self, msg, type_url_prefix='type.googleapis.com/', + deterministic=None): + """Packs the specified message into current Any message.""" + if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': + self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + else: + self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + self.value = msg.SerializeToString(deterministic=deterministic) + + def Unpack(self, msg): + """Unpacks the current Any message into specified message.""" + descriptor = msg.DESCRIPTOR + if not self.Is(descriptor): + return False + msg.ParseFromString(self.value) + return True + + def TypeName(self): + """Returns the protobuf type name of the inner message.""" + # Only last part is to be used: b/25630112 + return self.type_url.split('/')[-1] + + def Is(self, descriptor): + """Checks if this Any represents the given protobuf type.""" + return '/' in self.type_url and self.TypeName() == descriptor.full_name + + +_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) +_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( + 0, tz=datetime.timezone.utc) + + +class Timestamp(object): + """Class for Timestamp message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Timestamp to RFC 3339 date string format. + + Returns: + A string converted from timestamp. The string is always Z-normalized + and uses 3, 6 or 9 fractional digits as required to represent the + exact time. Example of the return format: '1972-01-01T10:00:20.021Z' + """ + nanos = self.nanos % _NANOS_PER_SECOND + total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND + seconds = total_sec % _SECONDS_PER_DAY + days = (total_sec - seconds) // _SECONDS_PER_DAY + dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) + + result = dt.isoformat() + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 'Z' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03dZ' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06dZ' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09dZ' % nanos + + def FromJsonString(self, value): + """Parse a RFC 3339 date string format to Timestamp. + + Args: + value: A date string. Any fractional digits (or none) and any offset are + accepted as long as they fit into nano-seconds precision. + Example of accepted format: '1972-01-01T10:00:20.021-05:00' + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) + timezone_offset = value.find('Z') + if timezone_offset == -1: + timezone_offset = value.find('+') + if timezone_offset == -1: + timezone_offset = value.rfind('-') + if timezone_offset == -1: + raise ValueError( + 'Failed to parse timestamp: missing valid timezone offset.') + time_value = value[0:timezone_offset] + # Parse datetime and nanos. + point_position = time_value.find('.') + if point_position == -1: + second_value = time_value + nano_value = '' + else: + second_value = time_value[:point_position] + nano_value = time_value[point_position + 1:] + if 't' in second_value: + raise ValueError( + 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' + 'lowercase \'t\' is not accepted'.format(second_value)) + date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) + td = date_object - datetime.datetime(1970, 1, 1) + seconds = td.seconds + td.days * _SECONDS_PER_DAY + if len(nano_value) > 9: + raise ValueError( + 'Failed to parse Timestamp: nanos {0} more than ' + '9 fractional digits.'.format(nano_value)) + if nano_value: + nanos = round(float('0.' + nano_value) * 1e9) + else: + nanos = 0 + # Parse timezone offsets. + if value[timezone_offset] == 'Z': + if len(value) != timezone_offset + 1: + raise ValueError('Failed to parse timestamp: invalid trailing' + ' data {0}.'.format(value)) + else: + timezone = value[timezone_offset:] + pos = timezone.find(':') + if pos == -1: + raise ValueError( + 'Invalid timezone offset value: {0}.'.format(timezone)) + if timezone[0] == '+': + seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + else: + seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + # Set seconds and nanos + self.seconds = int(seconds) + self.nanos = int(nanos) + + def GetCurrentTime(self): + """Get the current UTC into Timestamp.""" + self.FromDatetime(datetime.datetime.utcnow()) + + def ToNanoseconds(self): + """Converts Timestamp to nanoseconds since epoch.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts Timestamp to microseconds since epoch.""" + return (self.seconds * _MICROS_PER_SECOND + + self.nanos // _NANOS_PER_MICROSECOND) + + def ToMilliseconds(self): + """Converts Timestamp to milliseconds since epoch.""" + return (self.seconds * _MILLIS_PER_SECOND + + self.nanos // _NANOS_PER_MILLISECOND) + + def ToSeconds(self): + """Converts Timestamp to seconds since epoch.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds since epoch to Timestamp.""" + self.seconds = nanos // _NANOS_PER_SECOND + self.nanos = nanos % _NANOS_PER_SECOND + + def FromMicroseconds(self, micros): + """Converts microseconds since epoch to Timestamp.""" + self.seconds = micros // _MICROS_PER_SECOND + self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND + + def FromMilliseconds(self, millis): + """Converts milliseconds since epoch to Timestamp.""" + self.seconds = millis // _MILLIS_PER_SECOND + self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND + + def FromSeconds(self, seconds): + """Converts seconds since epoch to Timestamp.""" + self.seconds = seconds + self.nanos = 0 + + def ToDatetime(self, tzinfo=None): + """Converts Timestamp to a datetime. + + Args: + tzinfo: A datetime.tzinfo subclass; defaults to None. + + Returns: + If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone + information, i.e. not aware that it's UTC). + + Otherwise, returns a timezone-aware datetime in the input timezone. + """ + delta = datetime.timedelta( + seconds=self.seconds, + microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) + if tzinfo is None: + return _EPOCH_DATETIME_NAIVE + delta + else: + return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta + + def FromDatetime(self, dt): + """Converts datetime to Timestamp. + + Args: + dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. + """ + # Using this guide: http://wiki.python.org/moin/WorkingWithTime + # And this conversion guide: http://docs.python.org/library/time.html + + # Turn the date parameter into a tuple (struct_time) that can then be + # manipulated into a long value of seconds. During the conversion from + # struct_time to long, the source date in UTC, and so it follows that the + # correct transformation is calendar.timegm() + self.seconds = calendar.timegm(dt.utctimetuple()) + self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND + + +class Duration(object): + """Class for Duration message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Duration to string format. + + Returns: + A string converted from self. The string format will contains + 3, 6, or 9 fractional digits depending on the precision required to + represent the exact Duration value. For example: "1s", "1.010s", + "1.000000100s", "-3.100s" + """ + _CheckDurationValid(self.seconds, self.nanos) + if self.seconds < 0 or self.nanos < 0: + result = '-' + seconds = - self.seconds + int((0 - self.nanos) // 1e9) + nanos = (0 - self.nanos) % 1e9 + else: + result = '' + seconds = self.seconds + int(self.nanos // 1e9) + nanos = self.nanos % 1e9 + result += '%d' % seconds + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 's' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03ds' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06ds' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09ds' % nanos + + def FromJsonString(self, value): + """Converts a string to Duration. + + Args: + value: A string to be converted. The string must end with 's'. Any + fractional digits (or none) are accepted as long as they fit into + precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Duration JSON value not a string: {!r}'.format(value)) + if len(value) < 1 or value[-1] != 's': + raise ValueError( + 'Duration must end with letter "s": {0}.'.format(value)) + try: + pos = value.find('.') + if pos == -1: + seconds = int(value[:-1]) + nanos = 0 + else: + seconds = int(value[:pos]) + if value[0] == '-': + nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) + else: + nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) + _CheckDurationValid(seconds, nanos) + self.seconds = seconds + self.nanos = nanos + except ValueError as e: + raise ValueError( + 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) + + def ToNanoseconds(self): + """Converts a Duration to nanoseconds.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts a Duration to microseconds.""" + micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) + return self.seconds * _MICROS_PER_SECOND + micros + + def ToMilliseconds(self): + """Converts a Duration to milliseconds.""" + millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) + return self.seconds * _MILLIS_PER_SECOND + millis + + def ToSeconds(self): + """Converts a Duration to seconds.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds to Duration.""" + self._NormalizeDuration(nanos // _NANOS_PER_SECOND, + nanos % _NANOS_PER_SECOND) + + def FromMicroseconds(self, micros): + """Converts microseconds to Duration.""" + self._NormalizeDuration( + micros // _MICROS_PER_SECOND, + (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) + + def FromMilliseconds(self, millis): + """Converts milliseconds to Duration.""" + self._NormalizeDuration( + millis // _MILLIS_PER_SECOND, + (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) + + def FromSeconds(self, seconds): + """Converts seconds to Duration.""" + self.seconds = seconds + self.nanos = 0 + + def ToTimedelta(self): + """Converts Duration to timedelta.""" + return datetime.timedelta( + seconds=self.seconds, microseconds=_RoundTowardZero( + self.nanos, _NANOS_PER_MICROSECOND)) + + def FromTimedelta(self, td): + """Converts timedelta to Duration.""" + self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, + td.microseconds * _NANOS_PER_MICROSECOND) + + def _NormalizeDuration(self, seconds, nanos): + """Set Duration by seconds and nanos.""" + # Force nanos to be negative if the duration is negative. + if seconds < 0 and nanos > 0: + seconds += 1 + nanos -= _NANOS_PER_SECOND + self.seconds = seconds + self.nanos = nanos + + +def _CheckDurationValid(seconds, nanos): + if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: + raise ValueError( + 'Duration is not valid: Seconds {0} must be in range ' + '[-315576000000, 315576000000].'.format(seconds)) + if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: + raise ValueError( + 'Duration is not valid: Nanos {0} must be in range ' + '[-999999999, 999999999].'.format(nanos)) + if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): + raise ValueError( + 'Duration is not valid: Sign mismatch.') + + +def _RoundTowardZero(value, divider): + """Truncates the remainder part after division.""" + # For some languages, the sign of the remainder is implementation + # dependent if any of the operands is negative. Here we enforce + # "rounded toward zero" semantics. For example, for (-5) / 2 an + # implementation may give -3 as the result with the remainder being + # 1. This function ensures we always return -2 (closer to zero). + result = value // divider + remainder = value % divider + if result < 0 and remainder > 0: + return result + 1 + else: + return result + + +class FieldMask(object): + """Class for FieldMask message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + camelcase_paths = [] + for path in self.paths: + camelcase_paths.append(_SnakeCaseToCamelCase(path)) + return ','.join(camelcase_paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + if not isinstance(value, str): + raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) + self.Clear() + if value: + for path in value.split(','): + self.paths.append(_CamelCaseToSnakeCase(path)) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name.get(name) + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +def _SnakeCaseToCamelCase(path_name): + """Converts a path name from snake_case to camelCase.""" + result = [] + after_underscore = False + for c in path_name: + if c.isupper(): + raise ValueError( + 'Fail to print FieldMask to Json string: Path name ' + '{0} must not contain uppercase letters.'.format(path_name)) + if after_underscore: + if c.islower(): + result.append(c.upper()) + after_underscore = False + else: + raise ValueError( + 'Fail to print FieldMask to Json string: The ' + 'character after a "_" must be a lowercase letter ' + 'in path name {0}.'.format(path_name)) + elif c == '_': + after_underscore = True + else: + result += c + + if after_underscore: + raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' + 'in path name {0}.'.format(path_name)) + return ''.join(result) + + +def _CamelCaseToSnakeCase(path_name): + """Converts a field name from camelCase to snake_case.""" + result = [] + for c in path_name: + if c == '_': + raise ValueError('Fail to parse FieldMask: Path name ' + '{0} must not contain "_"s.'.format(path_name)) + if c.isupper(): + result += '_' + result += c.lower() + else: + result += c + return ''.join(result) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + __slots__ = ('_root',) + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + if source.HasField(name): + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + repeated_destination.MergeFrom(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node and prefix: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) + + +def _SetStructValue(struct_value, value): + if value is None: + struct_value.null_value = 0 + elif isinstance(value, bool): + # Note: this check must come before the number check because in Python + # True and False are also considered numbers. + struct_value.bool_value = value + elif isinstance(value, str): + struct_value.string_value = value + elif isinstance(value, (int, float)): + struct_value.number_value = value + elif isinstance(value, (dict, Struct)): + struct_value.struct_value.Clear() + struct_value.struct_value.update(value) + elif isinstance(value, (list, ListValue)): + struct_value.list_value.Clear() + struct_value.list_value.extend(value) + else: + raise ValueError('Unexpected type') + + +def _GetStructValue(struct_value): + which = struct_value.WhichOneof('kind') + if which == 'struct_value': + return struct_value.struct_value + elif which == 'null_value': + return None + elif which == 'number_value': + return struct_value.number_value + elif which == 'string_value': + return struct_value.string_value + elif which == 'bool_value': + return struct_value.bool_value + elif which == 'list_value': + return struct_value.list_value + elif which is None: + raise ValueError('Value not set') + + +class Struct(object): + """Class for Struct message type.""" + + __slots__ = () + + def __getitem__(self, key): + return _GetStructValue(self.fields[key]) + + def __contains__(self, item): + return item in self.fields + + def __setitem__(self, key, value): + _SetStructValue(self.fields[key], value) + + def __delitem__(self, key): + del self.fields[key] + + def __len__(self): + return len(self.fields) + + def __iter__(self): + return iter(self.fields) + + def keys(self): # pylint: disable=invalid-name + return self.fields.keys() + + def values(self): # pylint: disable=invalid-name + return [self[key] for key in self] + + def items(self): # pylint: disable=invalid-name + return [(key, self[key]) for key in self] + + def get_or_create_list(self, key): + """Returns a list for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('list_value'): + # Clear will mark list_value modified which will indeed create a list. + self.fields[key].list_value.Clear() + return self.fields[key].list_value + + def get_or_create_struct(self, key): + """Returns a struct for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('struct_value'): + # Clear will mark struct_value modified which will indeed create a struct. + self.fields[key].struct_value.Clear() + return self.fields[key].struct_value + + def update(self, dictionary): # pylint: disable=invalid-name + for key, value in dictionary.items(): + _SetStructValue(self.fields[key], value) + +collections.abc.MutableMapping.register(Struct) + + +class ListValue(object): + """Class for ListValue message type.""" + + __slots__ = () + + def __len__(self): + return len(self.values) + + def append(self, value): + _SetStructValue(self.values.add(), value) + + def extend(self, elem_seq): + for value in elem_seq: + self.append(value) + + def __getitem__(self, index): + """Retrieves item by the specified index.""" + return _GetStructValue(self.values.__getitem__(index)) + + def __setitem__(self, index, value): + _SetStructValue(self.values.__getitem__(index), value) + + def __delitem__(self, key): + del self.values[key] + + def items(self): + for i in range(len(self)): + yield self[i] + + def add_struct(self): + """Appends and returns a struct value as the next value in the list.""" + struct_value = self.values.add().struct_value + # Clear will mark struct_value modified which will indeed create a struct. + struct_value.Clear() + return struct_value + + def add_list(self): + """Appends and returns a list value as the next value in the list.""" + list_value = self.values.add().list_value + # Clear will mark list_value modified which will indeed create a list. + list_value.Clear() + return list_value + +collections.abc.MutableSequence.register(ListValue) + + +WKTBASES = { + 'google.protobuf.Any': Any, + 'google.protobuf.Duration': Duration, + 'google.protobuf.FieldMask': FieldMask, + 'google.protobuf.ListValue': ListValue, + 'google.protobuf.Struct': Struct, + 'google.protobuf.Timestamp': Timestamp, +} diff --git a/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py b/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py new file mode 100644 index 0000000000..883f525585 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/internal/wire_format.py @@ -0,0 +1,268 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Constants and static functions to support protocol buffer wire format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +from google.protobuf import descriptor +from google.protobuf import message + + +TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. +TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 + +# These numbers identify the wire type of a protocol buffer value. +# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded +# tag-and-type to store one of these WIRETYPE_* constants. +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_VARINT = 0 +WIRETYPE_FIXED64 = 1 +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 +WIRETYPE_END_GROUP = 4 +WIRETYPE_FIXED32 = 5 +_WIRETYPE_MAX = 5 + + +# Bounds for various integer types. +INT32_MAX = int((1 << 31) - 1) +INT32_MIN = int(-(1 << 31)) +UINT32_MAX = (1 << 32) - 1 + +INT64_MAX = (1 << 63) - 1 +INT64_MIN = -(1 << 63) +UINT64_MAX = (1 << 64) - 1 + +# "struct" format strings that will encode/decode the specified formats. +FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) + + +def ZigZagEncode(value): + """ZigZag Transform: Encodes signed integers so that they can be + effectively used with varint encoding. See wire_format.h for + more details. + """ + if value >= 0: + return value << 1 + return (value << 1) ^ (~0) + + +def ZigZagDecode(value): + """Inverse of ZigZagEncode().""" + if not value & 0x1: + return value >> 1 + return (value >> 1) ^ (~0) + + + +# The *ByteSize() functions below return the number of bytes required to +# serialize "field number + type" information and then serialize the value. + + +def Int32ByteSize(field_number, int32): + return Int64ByteSize(field_number, int32) + + +def Int32ByteSizeNoTag(int32): + return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) + + +def Int64ByteSize(field_number, int64): + # Have to convert to uint before calling UInt64ByteSize(). + return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) + + +def UInt32ByteSize(field_number, uint32): + return UInt64ByteSize(field_number, uint32) + + +def UInt64ByteSize(field_number, uint64): + return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) + + +def SInt32ByteSize(field_number, int32): + return UInt32ByteSize(field_number, ZigZagEncode(int32)) + + +def SInt64ByteSize(field_number, int64): + return UInt64ByteSize(field_number, ZigZagEncode(int64)) + + +def Fixed32ByteSize(field_number, fixed32): + return TagByteSize(field_number) + 4 + + +def Fixed64ByteSize(field_number, fixed64): + return TagByteSize(field_number) + 8 + + +def SFixed32ByteSize(field_number, sfixed32): + return TagByteSize(field_number) + 4 + + +def SFixed64ByteSize(field_number, sfixed64): + return TagByteSize(field_number) + 8 + + +def FloatByteSize(field_number, flt): + return TagByteSize(field_number) + 4 + + +def DoubleByteSize(field_number, double): + return TagByteSize(field_number) + 8 + + +def BoolByteSize(field_number, b): + return TagByteSize(field_number) + 1 + + +def EnumByteSize(field_number, enum): + return UInt32ByteSize(field_number, enum) + + +def StringByteSize(field_number, string): + return BytesByteSize(field_number, string.encode('utf-8')) + + +def BytesByteSize(field_number, b): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(len(b)) + + len(b)) + + +def GroupByteSize(field_number, message): + return (2 * TagByteSize(field_number) # START and END group. + + message.ByteSize()) + + +def MessageByteSize(field_number, message): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(message.ByteSize()) + + message.ByteSize()) + + +def MessageSetItemByteSize(field_number, msg): + # First compute the sizes of the tags. + # There are 2 tags for the beginning and ending of the repeated group, that + # is field number 1, one with field number 2 (type_id) and one with field + # number 3 (message). + total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) + + # Add the number of bytes for type_id. + total_size += _VarUInt64ByteSizeNoTag(field_number) + + message_size = msg.ByteSize() + + # The number of bytes for encoding the length of the message. + total_size += _VarUInt64ByteSizeNoTag(message_size) + + # The size of the message. + total_size += message_size + return total_size + + +def TagByteSize(field_number): + """Returns the bytes required to serialize a tag with this field number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) + + +# Private helper function for the *ByteSize() functions above. + +def _VarUInt64ByteSizeNoTag(uint64): + """Returns the number of bytes required to serialize a single varint + using boundary value comparisons. (unrolled loop optimization -WPierce) + uint64 must be unsigned. + """ + if uint64 <= 0x7f: return 1 + if uint64 <= 0x3fff: return 2 + if uint64 <= 0x1fffff: return 3 + if uint64 <= 0xfffffff: return 4 + if uint64 <= 0x7ffffffff: return 5 + if uint64 <= 0x3ffffffffff: return 6 + if uint64 <= 0x1ffffffffffff: return 7 + if uint64 <= 0xffffffffffffff: return 8 + if uint64 <= 0x7fffffffffffffff: return 9 + if uint64 > UINT64_MAX: + raise message.EncodeError('Value out of range: %d' % uint64) + return 10 + + +NON_PACKABLE_TYPES = ( + descriptor.FieldDescriptor.TYPE_STRING, + descriptor.FieldDescriptor.TYPE_GROUP, + descriptor.FieldDescriptor.TYPE_MESSAGE, + descriptor.FieldDescriptor.TYPE_BYTES +) + + +def IsTypePackable(field_type): + """Return true iff packable = true is valid for fields of this type. + + Args: + field_type: a FieldDescriptor::Type value. + + Returns: + True iff fields of this type are packable. + """ + return field_type not in NON_PACKABLE_TYPES diff --git a/openpype/hosts/hiero/vendor/google/protobuf/json_format.py b/openpype/hosts/hiero/vendor/google/protobuf/json_format.py new file mode 100644 index 0000000000..5024ed89d7 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/json_format.py @@ -0,0 +1,912 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in JSON format. + +Simple usage example: + + # Create a proto object and serialize it to a json format string. + message = my_proto_pb2.MyMessage(foo='bar') + json_string = json_format.MessageToJson(message) + + # Parse a json format string to proto object. + message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + + +import base64 +from collections import OrderedDict +import json +import math +from operator import methodcaller +import re +import sys + +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import symbol_database + + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, + descriptor.FieldDescriptor.CPPTYPE_UINT32, + descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_INFINITY = 'Infinity' +_NEG_INFINITY = '-Infinity' +_NAN = 'NaN' + +_UNPAIRED_SURROGATE_PATTERN = re.compile( + u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: + raise ParseError('Message too deep. Max recursion depth is {0}'.format( + self.max_recursion_depth)) + message_descriptor = message.DESCRIPTOR + full_name = message_descriptor.full_name + if not path: + path = message_descriptor.name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value, message, path) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) + else: + self._ConvertFieldValuePair(value, message, path) + self.recursion_depth -= 1 + + def _ConvertFieldValuePair(self, js, message, path): + """Convert field value pairs into regular message. + + Args: + js: A JSON object to convert the field value pairs. + message: A regular protocol message to record the data. + path: parent path to log parse error info. + + Raises: + ParseError: In case of problems converting. + """ + names = [] + message_descriptor = message.DESCRIPTOR + fields_by_json_name = dict((f.json_name, f) + for f in message_descriptor.fields) + for name in js: + try: + field = fields_by_json_name.get(name, None) + if not field: + field = message_descriptor.fields_by_name.get(name, None) + if not field and _VALID_EXTENSION_NAME.match(name): + if not message_descriptor.is_extendable: + raise ParseError( + 'Message type {0} does not have extensions at {1}'.format( + message_descriptor.full_name, path)) + identifier = name[1:-1] # strip [] brackets + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + # Try looking for extension by the message type name, dropping the + # field name following the final . separator in full_name. + identifier = '.'.join(identifier.split('.')[:-1]) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + if self.ignore_unknown_fields: + continue + raise ParseError( + ('Message type "{0}" has no field named "{1}" at "{2}".\n' + ' Available Fields(except extensions): "{3}"').format( + message_descriptor.full_name, name, path, + [f.json_name for f in message_descriptor.fields])) + if name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" fields at "{2}".'.format( + message.DESCRIPTOR.full_name, name, path)) + names.append(name) + value = js[name] + # Check no other oneof field is parsed. + if field.containing_oneof is not None and value is not None: + oneof_name = field.containing_oneof.name + if oneof_name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" oneof fields at "{2}".'.format( + message.DESCRIPTOR.full_name, oneof_name, + path)) + names.append(oneof_name) + + if value is None: + if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.message_type.full_name == 'google.protobuf.Value'): + sub_message = getattr(message, field.name) + sub_message.null_value = 0 + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM + and field.enum_type.full_name == 'google.protobuf.NullValue'): + setattr(message, field.name, 0) + else: + message.ClearField(field.name) + continue + + # Parse field value. + if _IsMapEntry(field): + message.ClearField(field.name) + self._ConvertMapFieldValue(value, message, field, + '{0}.{1}'.format(path, name)) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + message.ClearField(field.name) + if not isinstance(value, list): + raise ParseError('repeated field {0} must be in [] which is ' + '{1} at {2}'.format(name, value, path)) + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + # Repeated message field. + for index, item in enumerate(value): + sub_message = getattr(message, field.name).add() + # None is a null_value in Value. + if (item is None and + sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + self.ConvertMessage(item, sub_message, + '{0}.{1}[{2}]'.format(path, name, index)) + else: + # Repeated scalar field. + for index, item in enumerate(value): + if item is None: + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + getattr(message, field.name).append( + _ConvertScalarFieldValue( + item, field, '{0}.{1}[{2}]'.format(path, name, index))) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + sub_message = message.Extensions[field] + else: + sub_message = getattr(message, field.name) + sub_message.SetInParent() + self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) + else: + if field.is_extension: + message.Extensions[field] = _ConvertScalarFieldValue( + value, field, '{0}.{1}'.format(path, name)) + else: + setattr( + message, field.name, + _ConvertScalarFieldValue(value, field, + '{0}.{1}'.format(path, name))) + except ParseError as e: + if field and field.containing_oneof is None: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + else: + raise ParseError(str(e)) + except ValueError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + except TypeError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + + def _ConvertAnyMessage(self, value, message, path): + """Convert a JSON representation into Any message.""" + if isinstance(value, dict) and not value: + return + try: + type_url = value['@type'] + except KeyError: + raise ParseError( + '@type is missing when parsing any message at {0}'.format(path)) + + try: + sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) + except TypeError as e: + raise ParseError('{0} at {1}'.format(e, path)) + message_descriptor = sub_message.DESCRIPTOR + full_name = message_descriptor.full_name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value['value'], sub_message, + '{0}.value'.format(path)) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, + '{0}.value'.format(path))( + self) + else: + del value['@type'] + self._ConvertFieldValuePair(value, sub_message, path) + value['@type'] = type_url + # Sets Any message + message.value = sub_message.SerializeToString() + message.type_url = type_url + + def _ConvertGenericMessage(self, value, message, path): + """Convert a JSON representation into message with FromJsonString.""" + # Duration, Timestamp, FieldMask have a FromJsonString method to do the + # conversion. Users can also call the method directly. + try: + message.FromJsonString(value) + except ValueError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + def _ConvertValueMessage(self, value, message, path): + """Convert a JSON representation into Value message.""" + if isinstance(value, dict): + self._ConvertStructMessage(value, message.struct_value, path) + elif isinstance(value, list): + self._ConvertListValueMessage(value, message.list_value, path) + elif value is None: + message.null_value = 0 + elif isinstance(value, bool): + message.bool_value = value + elif isinstance(value, str): + message.string_value = value + elif isinstance(value, _INT_OR_FLOAT): + message.number_value = value + else: + raise ParseError('Value {0} has unexpected type {1} at {2}'.format( + value, type(value), path)) + + def _ConvertListValueMessage(self, value, message, path): + """Convert a JSON representation into ListValue message.""" + if not isinstance(value, list): + raise ParseError('ListValue must be in [] which is {0} at {1}'.format( + value, path)) + message.ClearField('values') + for index, item in enumerate(value): + self._ConvertValueMessage(item, message.values.add(), + '{0}[{1}]'.format(path, index)) + + def _ConvertStructMessage(self, value, message, path): + """Convert a JSON representation into Struct message.""" + if not isinstance(value, dict): + raise ParseError('Struct must be in a dict which is {0} at {1}'.format( + value, path)) + # Clear will mark the struct as modified so it will be created even if + # there are no values. + message.Clear() + for key in value: + self._ConvertValueMessage(value[key], message.fields[key], + '{0}.{1}'.format(path, key)) + return + + def _ConvertWrapperMessage(self, value, message, path): + """Convert a JSON representation into Wrapper message.""" + field = message.DESCRIPTOR.fields_by_name['value'] + setattr( + message, 'value', + _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) + + def _ConvertMapFieldValue(self, value, message, field, path): + """Convert map field value for a message map field. + + Args: + value: A JSON object to convert the map field value. + message: A protocol message to record the converted data. + field: The descriptor of the map field to be converted. + path: parent path to log parse error info. + + Raises: + ParseError: In case of convert problems. + """ + if not isinstance(value, dict): + raise ParseError( + 'Map field {0} must be in a dict which is {1} at {2}'.format( + field.name, value, path)) + key_field = field.message_type.fields_by_name['key'] + value_field = field.message_type.fields_by_name['value'] + for key in value: + key_value = _ConvertScalarFieldValue(key, key_field, + '{0}.key'.format(path), True) + if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self.ConvertMessage(value[key], + getattr(message, field.name)[key_value], + '{0}[{1}]'.format(path, key_value)) + else: + getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( + value[key], value_field, path='{0}[{1}]'.format(path, key_value)) + + +def _ConvertScalarFieldValue(value, field, path, require_str=False): + """Convert a single scalar field value. + + Args: + value: A scalar value to convert the scalar field value. + field: The descriptor of the field to convert. + path: parent path to log parse error info. + require_str: If True, the field value must be a str. + + Returns: + The converted scalar field value + + Raises: + ParseError: In case of convert problems. + """ + try: + if field.cpp_type in _INT_TYPES: + return _ConvertInteger(value) + elif field.cpp_type in _FLOAT_TYPES: + return _ConvertFloat(value, field) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return _ConvertBool(value, require_str) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + if isinstance(value, str): + encoded = value.encode('utf-8') + else: + encoded = value + # Add extra padding '=' + padded_value = encoded + b'=' * (4 - len(encoded) % 4) + return base64.urlsafe_b64decode(padded_value) + else: + # Checking for unpaired surrogates appears to be unreliable, + # depending on the specific Python version, so we check manually. + if _UNPAIRED_SURROGATE_PATTERN.search(value): + raise ParseError('Unpaired surrogate') + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + # Convert an enum value. + enum_value = field.enum_type.values_by_name.get(value, None) + if enum_value is None: + try: + number = int(value) + enum_value = field.enum_type.values_by_number.get(number, None) + except ValueError: + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + if enum_value is None: + if field.file.syntax == 'proto3': + # Proto3 accepts unknown enums. + return number + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + return enum_value.number + except ParseError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + +def _ConvertInteger(value): + """Convert an integer. + + Args: + value: A scalar value to convert. + + Returns: + The integer value. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + if isinstance(value, float) and not value.is_integer(): + raise ParseError('Couldn\'t parse integer: {0}'.format(value)) + + if isinstance(value, str) and value.find(' ') != -1: + raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) + + if isinstance(value, bool): + raise ParseError('Bool value {0} is not acceptable for ' + 'integer field'.format(value)) + + return int(value) + + +def _ConvertFloat(value, field): + """Convert an floating point number.""" + if isinstance(value, float): + if math.isnan(value): + raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') + if math.isinf(value): + if value > 0: + raise ParseError('Couldn\'t parse Infinity or value too large, ' + 'use quoted "Infinity" instead') + else: + raise ParseError('Couldn\'t parse -Infinity or value too small, ' + 'use quoted "-Infinity" instead') + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + # pylint: disable=protected-access + if value > type_checkers._FLOAT_MAX: + raise ParseError('Float value too large') + # pylint: disable=protected-access + if value < type_checkers._FLOAT_MIN: + raise ParseError('Float value too small') + if value == 'nan': + raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') + try: + # Assume Python compatible syntax. + return float(value) + except ValueError: + # Check alternative spellings. + if value == _NEG_INFINITY: + return float('-inf') + elif value == _INFINITY: + return float('inf') + elif value == _NAN: + return float('nan') + else: + raise ParseError('Couldn\'t parse float: {0}'.format(value)) + + +def _ConvertBool(value, require_str): + """Convert a boolean value. + + Args: + value: A scalar value to convert. + require_str: If True, value must be a str. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + if require_str: + if value == 'true': + return True + elif value == 'false': + return False + else: + raise ParseError('Expected "true" or "false", not {0}'.format(value)) + + if not isinstance(value, bool): + raise ParseError('Expected true or false without quotes') + return value + +_WKTJSONMETHODS = { + 'google.protobuf.Any': ['_AnyMessageToJsonObject', + '_ConvertAnyMessage'], + 'google.protobuf.Duration': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', + '_ConvertListValueMessage'], + 'google.protobuf.Struct': ['_StructMessageToJsonObject', + '_ConvertStructMessage'], + 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.Value': ['_ValueMessageToJsonObject', + '_ConvertValueMessage'] +} diff --git a/openpype/hosts/hiero/vendor/google/protobuf/message.py b/openpype/hosts/hiero/vendor/google/protobuf/message.py new file mode 100644 index 0000000000..76c6802f70 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/message.py @@ -0,0 +1,424 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): We should just make these methods all "pure-virtual" and move +# all implementation out, into reflection.py for now. + + +"""Contains an abstract base class for protocol messages.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +class Error(Exception): + """Base error type for this module.""" + pass + + +class DecodeError(Error): + """Exception raised when deserializing messages.""" + pass + + +class EncodeError(Error): + """Exception raised when serializing messages.""" + pass + + +class Message(object): + + """Abstract base class for protocol messages. + + Protocol message classes are almost always generated by the protocol + compiler. These generated types subclass Message and implement the methods + shown below. + """ + + # TODO(robinson): Link to an HTML document here. + + # TODO(robinson): Document that instances of this class will also + # have an Extensions attribute with __getitem__ and __setitem__. + # Again, not sure how to best convey this. + + # TODO(robinson): Document that the class must also have a static + # RegisterExtension(extension_field) method. + # Not sure how to best express at this point. + + # TODO(robinson): Document these fields and methods. + + __slots__ = [] + + #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. + DESCRIPTOR = None + + def __deepcopy__(self, memo=None): + clone = type(self)() + clone.MergeFrom(self) + return clone + + def __eq__(self, other_msg): + """Recursively compares two messages by value and structure.""" + raise NotImplementedError + + def __ne__(self, other_msg): + # Can't just say self != other_msg, since that would infinitely recurse. :) + return not self == other_msg + + def __hash__(self): + raise TypeError('unhashable object') + + def __str__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def __unicode__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def MergeFrom(self, other_msg): + """Merges the contents of the specified message into current message. + + This method merges the contents of the specified message into the current + message. Singular fields that are set in the specified message overwrite + the corresponding fields in the current message. Repeated fields are + appended. Singular sub-messages and groups are recursively merged. + + Args: + other_msg (Message): A message to merge into the current message. + """ + raise NotImplementedError + + def CopyFrom(self, other_msg): + """Copies the content of the specified message into the current message. + + The method clears the current message and then merges the specified + message using MergeFrom. + + Args: + other_msg (Message): A message to copy into the current one. + """ + if self is other_msg: + return + self.Clear() + self.MergeFrom(other_msg) + + def Clear(self): + """Clears all data that was set in the message.""" + raise NotImplementedError + + def SetInParent(self): + """Mark this as present in the parent. + + This normally happens automatically when you assign a field of a + sub-message, but sometimes you want to make the sub-message + present while keeping it empty. If you find yourself using this, + you may want to reconsider your design. + """ + raise NotImplementedError + + def IsInitialized(self): + """Checks if the message is initialized. + + Returns: + bool: The method returns True if the message is initialized (i.e. all of + its required fields are set). + """ + raise NotImplementedError + + # TODO(robinson): MergeFromString() should probably return None and be + # implemented in terms of a helper that returns the # of bytes read. Our + # deserialization routines would use the helper when recursively + # deserializing, but the end user would almost always just want the no-return + # MergeFromString(). + + def MergeFromString(self, serialized): + """Merges serialized protocol buffer data into this message. + + When we find a field in `serialized` that is already present + in this message: + + - If it's a "repeated" field, we append to the end of our list. + - Else, if it's a scalar, we overwrite our field. + - Else, (it's a nonrepeated composite), we recursively merge + into the existing composite. + + Args: + serialized (bytes): Any object that allows us to call + ``memoryview(serialized)`` to access a string of bytes using the + buffer interface. + + Returns: + int: The number of bytes read from `serialized`. + For non-group messages, this will always be `len(serialized)`, + but for messages which are actually groups, this will + generally be less than `len(serialized)`, since we must + stop when we reach an ``END_GROUP`` tag. Note that if + we *do* stop because of an ``END_GROUP`` tag, the number + of bytes returned does not include the bytes + for the ``END_GROUP`` tag information. + + Raises: + DecodeError: if the input cannot be parsed. + """ + # TODO(robinson): Document handling of unknown fields. + # TODO(robinson): When we switch to a helper, this will return None. + raise NotImplementedError + + def ParseFromString(self, serialized): + """Parse serialized protocol buffer data into this message. + + Like :func:`MergeFromString()`, except we clear the object first. + + Raises: + message.DecodeError if the input cannot be parsed. + """ + self.Clear() + return self.MergeFromString(serialized) + + def SerializeToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + A binary string representation of the message if all of the required + fields in the message are set (i.e. the message is initialized). + + Raises: + EncodeError: if the message isn't initialized (see :func:`IsInitialized`). + """ + raise NotImplementedError + + def SerializePartialToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + This method is similar to SerializeToString but doesn't check if the + message is initialized. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + bytes: A serialized representation of the partial message. + """ + raise NotImplementedError + + # TODO(robinson): Decide whether we like these better + # than auto-generated has_foo() and clear_foo() methods + # on the instances themselves. This way is less consistent + # with C++, but it makes reflection-type access easier and + # reduces the number of magically autogenerated things. + # + # TODO(robinson): Be sure to document (and test) exactly + # which field names are accepted here. Are we case-sensitive? + # What do we do with fields that share names with Python keywords + # like 'lambda' and 'yield'? + # + # nnorwitz says: + # """ + # Typically (in python), an underscore is appended to names that are + # keywords. So they would become lambda_ or yield_. + # """ + def ListFields(self): + """Returns a list of (FieldDescriptor, value) tuples for present fields. + + A message field is non-empty if HasField() would return true. A singular + primitive field is non-empty if HasField() would return true in proto2 or it + is non zero in proto3. A repeated field is non-empty if it contains at least + one element. The fields are ordered by field number. + + Returns: + list[tuple(FieldDescriptor, value)]: field descriptors and values + for all fields in the message which are not empty. The values vary by + field type. + """ + raise NotImplementedError + + def HasField(self, field_name): + """Checks if a certain field is set for the message. + + For a oneof group, checks if any field inside is set. Note that if the + field_name is not defined in the message descriptor, :exc:`ValueError` will + be raised. + + Args: + field_name (str): The name of the field to check for presence. + + Returns: + bool: Whether a value has been set for the named field. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def ClearField(self, field_name): + """Clears the contents of a given field. + + Inside a oneof group, clears the field set. If the name neither refers to a + defined field or oneof group, :exc:`ValueError` is raised. + + Args: + field_name (str): The name of the field to check for presence. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def WhichOneof(self, oneof_group): + """Returns the name of the field that is set inside a oneof group. + + If no field is set, returns None. + + Args: + oneof_group (str): the name of the oneof group to check. + + Returns: + str or None: The name of the group that is set, or None. + + Raises: + ValueError: no group with the given name exists + """ + raise NotImplementedError + + def HasExtension(self, extension_handle): + """Checks if a certain extension is present for this message. + + Extensions are retrieved using the :attr:`Extensions` mapping (if present). + + Args: + extension_handle: The handle for the extension to check. + + Returns: + bool: Whether the extension is present for this message. + + Raises: + KeyError: if the extension is repeated. Similar to repeated fields, + there is no separate notion of presence: a "not present" repeated + extension is an empty list. + """ + raise NotImplementedError + + def ClearExtension(self, extension_handle): + """Clears the contents of a given extension. + + Args: + extension_handle: The handle for the extension to clear. + """ + raise NotImplementedError + + def UnknownFields(self): + """Returns the UnknownFieldSet. + + Returns: + UnknownFieldSet: The unknown fields stored in this message. + """ + raise NotImplementedError + + def DiscardUnknownFields(self): + """Clears all fields in the :class:`UnknownFieldSet`. + + This operation is recursive for nested message. + """ + raise NotImplementedError + + def ByteSize(self): + """Returns the serialized size of this message. + + Recursively calls ByteSize() on all contained messages. + + Returns: + int: The number of bytes required to serialize this message. + """ + raise NotImplementedError + + @classmethod + def FromString(cls, s): + raise NotImplementedError + + @staticmethod + def RegisterExtension(extension_handle): + raise NotImplementedError + + def _SetListener(self, message_listener): + """Internal method used by the protocol message implementation. + Clients should not call this directly. + + Sets a listener that this message will call on certain state transitions. + + The purpose of this method is to register back-edges from children to + parents at runtime, for the purpose of setting "has" bits and + byte-size-dirty bits in the parent and ancestor objects whenever a child or + descendant object is modified. + + If the client wants to disconnect this Message from the object tree, she + explicitly sets callback to None. + + If message_listener is None, unregisters any existing listener. Otherwise, + message_listener must implement the MessageListener interface in + internal/message_listener.py, and we discard any listener registered + via a previous _SetListener() call. + """ + raise NotImplementedError + + def __getstate__(self): + """Support the pickle protocol.""" + return dict(serialized=self.SerializePartialToString()) + + def __setstate__(self, state): + """Support the pickle protocol.""" + self.__init__() + serialized = state['serialized'] + # On Python 3, using encoding='latin1' is required for unpickling + # protos pickled by Python 2. + if not isinstance(serialized, bytes): + serialized = serialized.encode('latin1') + self.ParseFromString(serialized) + + def __reduce__(self): + message_descriptor = self.DESCRIPTOR + if message_descriptor.containing_type is None: + return type(self), (), self.__getstate__() + # the message type must be nested. + # Python does not pickle nested classes; use the symbol_database on the + # receiving end. + container = message_descriptor + return (_InternalConstructMessage, (container.full_name,), + self.__getstate__()) + + +def _InternalConstructMessage(full_name): + """Constructs a nested message.""" + from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top + + return symbol_database.Default().GetSymbol(full_name)() diff --git a/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py b/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py new file mode 100644 index 0000000000..3656fa6874 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/message_factory.py @@ -0,0 +1,185 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a factory class for generating dynamic messages. + +The easiest way to use this class is if you have access to the FileDescriptor +protos containing the messages you want to create you can just do the following: + +message_classes = message_factory.GetMessages(iterable_of_file_descriptors) +my_proto_instance = message_classes['some.proto.package.MessageName']() +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message + +if api_implementation.Type() == 'cpp': + from google.protobuf.pyext import cpp_message as message_impl +else: + from google.protobuf.internal import python_message as message_impl + + +# The type of all Message classes. +_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType + + +class MessageFactory(object): + """Factory for creating Proto2 messages from descriptors in a pool.""" + + def __init__(self, pool=None): + """Initializes a new factory.""" + self.pool = pool or descriptor_pool.DescriptorPool() + + # local cache of all classes built from protobuf descriptors + self._classes = {} + + def GetPrototype(self, descriptor): + """Obtains a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + if descriptor not in self._classes: + result_class = self.CreatePrototype(descriptor) + # The assignment to _classes is redundant for the base implementation, but + # might avoid confusion in cases where CreatePrototype gets overridden and + # does not call the base implementation. + self._classes[descriptor] = result_class + return result_class + return self._classes[descriptor] + + def CreatePrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Don't call this function directly, it always creates a new class. Call + GetPrototype() instead. This method is meant to be overridden in subblasses + to perform additional operations on the newly constructed class. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + descriptor_name = descriptor.name + result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( + descriptor_name, + (message.Message,), + { + 'DESCRIPTOR': descriptor, + # If module not set, it wrongly points to message_factory module. + '__module__': None, + }) + result_class._FACTORY = self # pylint: disable=protected-access + # Assign in _classes before doing recursive calls to avoid infinite + # recursion. + self._classes[descriptor] = result_class + for field in descriptor.fields: + if field.message_type: + self.GetPrototype(field.message_type) + for extension in result_class.DESCRIPTOR.extensions: + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result_class + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if the descriptor + pool cannot satisfy them. + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for desc in file_desc.message_types_by_name.values(): + result[desc.full_name] = self.GetPrototype(desc) + + # While the extension FieldDescriptors are created by the descriptor pool, + # the python classes created in the factory need them to be registered + # explicitly, which is done below. + # + # The call to RegisterExtension will specifically check if the + # extension was already registered on the object and either + # ignore the registration if the original was the same, or raise + # an error if they were different. + + for extension in file_desc.extensions_by_name.values(): + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result + + +_FACTORY = MessageFactory() + + +def GetMessages(file_protos): + """Builds a dictionary of all the messages available in a set of files. + + Args: + file_protos: Iterable of FileDescriptorProto to build messages out of. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + # The cpp implementation of the protocol buffer library requires to add the + # message in topological order of the dependency graph. + file_by_name = {file_proto.name: file_proto for file_proto in file_protos} + def _AddFile(file_proto): + for dependency in file_proto.dependency: + if dependency in file_by_name: + # Remove from elements to be visited, in order to cut cycles. + _AddFile(file_by_name.pop(dependency)) + _FACTORY.pool.Add(file_proto) + while file_by_name: + _AddFile(file_by_name.popitem()[1]) + return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py b/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py new file mode 100644 index 0000000000..a4667ce63e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/proto_builder.py @@ -0,0 +1,134 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Dynamic Protobuf class creator.""" + +from collections import OrderedDict +import hashlib +import os + +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor +from google.protobuf import message_factory + + +def _GetMessageFromFactory(factory, full_name): + """Get a proto class from the MessageFactory by name. + + Args: + factory: a MessageFactory instance. + full_name: str, the fully qualified name of the proto type. + Returns: + A class, for the type identified by full_name. + Raises: + KeyError, if the proto is not found in the factory's descriptor pool. + """ + proto_descriptor = factory.pool.FindMessageTypeByName(full_name) + proto_cls = factory.GetPrototype(proto_descriptor) + return proto_cls + + +def MakeSimpleProtoClass(fields, full_name=None, pool=None): + """Create a Protobuf class whose fields are basic types. + + Note: this doesn't validate field names! + + Args: + fields: dict of {name: field_type} mappings for each field in the proto. If + this is an OrderedDict the order will be maintained, otherwise the + fields will be sorted by name. + full_name: optional str, the fully-qualified name of the proto type. + pool: optional DescriptorPool instance. + Returns: + a class, the new protobuf class with a FileDescriptor. + """ + factory = message_factory.MessageFactory(pool=pool) + + if full_name is not None: + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # Get a list of (name, field_type) tuples from the fields dict. If fields was + # an OrderedDict we keep the order, but otherwise we sort the field to ensure + # consistent ordering. + field_items = fields.items() + if not isinstance(fields, OrderedDict): + field_items = sorted(field_items) + + # Use a consistent file name that is unlikely to conflict with any imported + # proto files. + fields_hash = hashlib.sha1() + for f_name, f_type in field_items: + fields_hash.update(f_name.encode('utf-8')) + fields_hash.update(str(f_type).encode('utf-8')) + proto_file_name = fields_hash.hexdigest() + '.proto' + + # If the proto is anonymous, use the same hash to name it. + if full_name is None: + full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + + fields_hash.hexdigest()) + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # This is the first time we see this proto: add a new descriptor to the pool. + factory.pool.Add( + _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) + return _GetMessageFromFactory(factory, full_name) + + +def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): + """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" + package, name = full_name.rsplit('.', 1) + file_proto = descriptor_pb2.FileDescriptorProto() + file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) + file_proto.package = package + desc_proto = file_proto.message_type.add() + desc_proto.name = name + for f_number, (f_name, f_type) in enumerate(field_items, 1): + field_proto = desc_proto.field.add() + field_proto.name = f_name + # # If the number falls in the reserved range, reassign it to the correct + # # number after the range. + if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: + f_number += ( + descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - + descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) + field_proto.number = f_number + field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL + field_proto.type = f_type + return file_proto diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py new file mode 100644 index 0000000000..fc8eb32d79 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/pyext/cpp_message.py @@ -0,0 +1,65 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Protocol message implementation hooks for C++ implementation. + +Contains helper functions used to create protocol message classes from +Descriptor objects at runtime backed by the protocol buffer C++ API. +""" + +__author__ = 'tibell@google.com (Johan Tibell)' + +from google.protobuf.pyext import _message + + +class GeneratedProtocolMessageType(_message.MessageMeta): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py new file mode 100644 index 0000000000..2c6ecf4c98 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/pyext/python_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/pyext/python.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestAllExtensions.RegisterExtension(optional_nested_message_extension) + TestAllExtensions.RegisterExtension(repeated_nested_message_extension) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'H\001' + _TESTALLTYPES._serialized_start=72 + _TESTALLTYPES._serialized_end=388 + _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 + _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 + _FOREIGNMESSAGE._serialized_start=390 + _FOREIGNMESSAGE._serialized_end=428 + _TESTALLEXTENSIONS._serialized_start=430 + _TESTALLEXTENSIONS._serialized_end=459 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/reflection.py b/openpype/hosts/hiero/vendor/google/protobuf/reflection.py new file mode 100644 index 0000000000..81e18859a8 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/reflection.py @@ -0,0 +1,95 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +from google.protobuf import message_factory +from google.protobuf import symbol_database + +# The type of all Message classes. +# Part of the public interface, but normally only used by message factories. +GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE + +MESSAGE_CLASS_CACHE = {} + + +# Deprecated. Please NEVER use reflection.ParseMessage(). +def ParseMessage(descriptor, byte_str): + """Generate a new Message instance from this Descriptor and a byte string. + + DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). + Please use MessageFactory.GetPrototype() instead. + + Args: + descriptor: Protobuf Descriptor object + byte_str: Serialized protocol buffer byte string + + Returns: + Newly created protobuf Message object. + """ + result_class = MakeClass(descriptor) + new_msg = result_class() + new_msg.ParseFromString(byte_str) + return new_msg + + +# Deprecated. Please NEVER use reflection.MakeClass(). +def MakeClass(descriptor): + """Construct a class object for a protobuf described by descriptor. + + DEPRECATED: use MessageFactory.GetPrototype() instead. + + Args: + descriptor: A descriptor.Descriptor object describing the protobuf. + Returns: + The Message class object described by the descriptor. + """ + # Original implementation leads to duplicate message classes, which won't play + # well with extensions. Message factory info is also missing. + # Redirect to message_factory. + return symbol_database.Default().GetPrototype(descriptor) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/service.py b/openpype/hosts/hiero/vendor/google/protobuf/service.py new file mode 100644 index 0000000000..5625246324 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/service.py @@ -0,0 +1,228 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""DEPRECATED: Declares the RPC service interfaces. + +This module declares the abstract interfaces underlying proto2 RPC +services. These are intended to be independent of any particular RPC +implementation, so that proto2 services can be used on top of a variety +of implementations. Starting with version 2.3.0, RPC implementations should +not try to build on these, but should instead provide code generator plugins +which generate code specific to the particular RPC implementation. This way +the generated code can be more appropriate for the implementation in use +and can avoid unnecessary layers of indirection. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class RpcException(Exception): + """Exception raised on failed blocking RPC method call.""" + pass + + +class Service(object): + + """Abstract base interface for protocol-buffer-based RPC services. + + Services themselves are abstract classes (implemented either by servers or as + stubs), but they subclass this base interface. The methods of this + interface can be used to call the methods of the service without knowing + its exact type at compile time (analogous to the Message interface). + """ + + def GetDescriptor(): + """Retrieves this service's descriptor.""" + raise NotImplementedError + + def CallMethod(self, method_descriptor, rpc_controller, + request, done): + """Calls a method of the service specified by method_descriptor. + + If "done" is None then the call is blocking and the response + message will be returned directly. Otherwise the call is asynchronous + and "done" will later be called with the response value. + + In the blocking case, RpcException will be raised on error. + + Preconditions: + + * method_descriptor.service == GetDescriptor + * request is of the exact same classes as returned by + GetRequestClass(method). + * After the call has started, the request must not be modified. + * "rpc_controller" is of the correct type for the RPC implementation being + used by this Service. For stubs, the "correct type" depends on the + RpcChannel which the stub is using. + + Postconditions: + + * "done" will be called when the method is complete. This may be + before CallMethod() returns or it may be at some point in the future. + * If the RPC failed, the response value passed to "done" will be None. + Further details about the failure can be found by querying the + RpcController. + """ + raise NotImplementedError + + def GetRequestClass(self, method_descriptor): + """Returns the class of the request message for the specified method. + + CallMethod() requires that the request is of a particular subclass of + Message. GetRequestClass() gets the default instance of this required + type. + + Example: + method = service.GetDescriptor().FindMethodByName("Foo") + request = stub.GetRequestClass(method)() + request.ParseFromString(input) + service.CallMethod(method, request, callback) + """ + raise NotImplementedError + + def GetResponseClass(self, method_descriptor): + """Returns the class of the response message for the specified method. + + This method isn't really needed, as the RpcChannel's CallMethod constructs + the response protocol message. It's provided anyway in case it is useful + for the caller to know the response type in advance. + """ + raise NotImplementedError + + +class RpcController(object): + + """An RpcController mediates a single method call. + + The primary purpose of the controller is to provide a way to manipulate + settings specific to the RPC implementation and to find out about RPC-level + errors. The methods provided by the RpcController interface are intended + to be a "least common denominator" set of features which we expect all + implementations to support. Specific implementations may provide more + advanced features (e.g. deadline propagation). + """ + + # Client-side methods below + + def Reset(self): + """Resets the RpcController to its initial state. + + After the RpcController has been reset, it may be reused in + a new call. Must not be called while an RPC is in progress. + """ + raise NotImplementedError + + def Failed(self): + """Returns true if the call failed. + + After a call has finished, returns true if the call failed. The possible + reasons for failure depend on the RPC implementation. Failed() must not + be called before a call has finished. If Failed() returns true, the + contents of the response message are undefined. + """ + raise NotImplementedError + + def ErrorText(self): + """If Failed is true, returns a human-readable description of the error.""" + raise NotImplementedError + + def StartCancel(self): + """Initiate cancellation. + + Advises the RPC system that the caller desires that the RPC call be + canceled. The RPC system may cancel it immediately, may wait awhile and + then cancel it, or may not even cancel the call at all. If the call is + canceled, the "done" callback will still be called and the RpcController + will indicate that the call failed at that time. + """ + raise NotImplementedError + + # Server-side methods below + + def SetFailed(self, reason): + """Sets a failure reason. + + Causes Failed() to return true on the client side. "reason" will be + incorporated into the message returned by ErrorText(). If you find + you need to return machine-readable information about failures, you + should incorporate it into your response protocol buffer and should + NOT call SetFailed(). + """ + raise NotImplementedError + + def IsCanceled(self): + """Checks if the client cancelled the RPC. + + If true, indicates that the client canceled the RPC, so the server may + as well give up on replying to it. The server should still call the + final "done" callback. + """ + raise NotImplementedError + + def NotifyOnCancel(self, callback): + """Sets a callback to invoke on cancel. + + Asks that the given callback be called when the RPC is canceled. The + callback will always be called exactly once. If the RPC completes without + being canceled, the callback will be called after completion. If the RPC + has already been canceled when NotifyOnCancel() is called, the callback + will be called immediately. + + NotifyOnCancel() must be called no more than once per request. + """ + raise NotImplementedError + + +class RpcChannel(object): + + """Abstract interface for an RPC channel. + + An RpcChannel represents a communication line to a service which can be used + to call that service's methods. The service may be running on another + machine. Normally, you should not use an RpcChannel directly, but instead + construct a stub {@link Service} wrapping it. Example: + + Example: + RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") + RpcController controller = rpcImpl.Controller() + MyService service = MyService_Stub(channel) + service.MyMethod(controller, request, callback) + """ + + def CallMethod(self, method_descriptor, rpc_controller, + request, response_class, done): + """Calls the method identified by the descriptor. + + Call the given method of the remote service. The signature of this + procedure looks the same as Service.CallMethod(), but the requirements + are less strict in one important way: the request object doesn't have to + be of any specific class as long as its descriptor is method.input_type. + """ + raise NotImplementedError diff --git a/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py b/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py new file mode 100644 index 0000000000..f82ab7145a --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/service_reflection.py @@ -0,0 +1,295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains metaclasses used to create protocol service and service stub +classes from ServiceDescriptor objects at runtime. + +The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to +inject all useful functionality into the classes output by the protocol +compiler at compile-time. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class GeneratedServiceType(type): + + """Metaclass for service classes created at runtime from ServiceDescriptors. + + Implementations for all methods described in the Service class are added here + by this class. We also create properties to allow getting/setting all fields + in the protocol message. + + The protocol compiler currently uses this metaclass to create protocol service + classes at runtime. Clients can also manually create their own classes at + runtime, as in this example:: + + mydescriptor = ServiceDescriptor(.....) + class MyProtoService(service.Service): + __metaclass__ = GeneratedServiceType + DESCRIPTOR = mydescriptor + myservice_instance = MyProtoService() + # ... + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service class. + + Args: + name: Name of the class (ignored, but required by the metaclass + protocol). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service class is subclassed. + if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] + service_builder = _ServiceBuilder(descriptor) + service_builder.BuildService(cls) + cls.DESCRIPTOR = descriptor + + +class GeneratedServiceStubType(GeneratedServiceType): + + """Metaclass for service stubs created at runtime from ServiceDescriptors. + + This class has similar responsibilities as GeneratedServiceType, except that + it creates the service stub classes. + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service stub class. + + Args: + name: Name of the class (ignored, here). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service stub is subclassed. + if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] + service_stub_builder = _ServiceStubBuilder(descriptor) + service_stub_builder.BuildServiceStub(cls) + + +class _ServiceBuilder(object): + + """This class constructs a protocol service class using a service descriptor. + + Given a service descriptor, this class constructs a class that represents + the specified service descriptor. One service builder instance constructs + exactly one service class. That means all instances of that class share the + same builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + service class. + """ + self.descriptor = service_descriptor + + def BuildService(builder, cls): + """Constructs the service class. + + Args: + cls: The class that will be constructed. + """ + + # CallMethod needs to operate with an instance of the Service class. This + # internal wrapper function exists only to be able to pass the service + # instance to the method that does the real CallMethod work. + # Making sure to use exact argument names from the abstract interface in + # service.py to match the type signature + def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): + return builder._CallMethod(self, method_descriptor, rpc_controller, + request, done) + + def _WrapGetRequestClass(self, method_descriptor): + return builder._GetRequestClass(method_descriptor) + + def _WrapGetResponseClass(self, method_descriptor): + return builder._GetResponseClass(method_descriptor) + + builder.cls = cls + cls.CallMethod = _WrapCallMethod + cls.GetDescriptor = staticmethod(lambda: builder.descriptor) + cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' + cls.GetRequestClass = _WrapGetRequestClass + cls.GetResponseClass = _WrapGetResponseClass + for method in builder.descriptor.methods: + setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) + + def _CallMethod(self, srvc, method_descriptor, + rpc_controller, request, callback): + """Calls the method described by a given method descriptor. + + Args: + srvc: Instance of the service for which this method is called. + method_descriptor: Descriptor that represent the method to call. + rpc_controller: RPC controller to use for this method's execution. + request: Request protocol message. + callback: A callback to invoke after the method has completed. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'CallMethod() given method descriptor for wrong service type.') + method = getattr(srvc, method_descriptor.name) + return method(rpc_controller, request, callback) + + def _GetRequestClass(self, method_descriptor): + """Returns the class of the request protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + request protocol message class. + + Returns: + A class that represents the input protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetRequestClass() given method descriptor for wrong service type.') + return method_descriptor.input_type._concrete_class + + def _GetResponseClass(self, method_descriptor): + """Returns the class of the response protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + response protocol message class. + + Returns: + A class that represents the output protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetResponseClass() given method descriptor for wrong service type.') + return method_descriptor.output_type._concrete_class + + def _GenerateNonImplementedMethod(self, method): + """Generates and returns a method that can be set for a service methods. + + Args: + method: Descriptor of the service method for which a method is to be + generated. + + Returns: + A method that can be added to the service class. + """ + return lambda inst, rpc_controller, request, callback: ( + self._NonImplementedMethod(method.name, rpc_controller, callback)) + + def _NonImplementedMethod(self, method_name, rpc_controller, callback): + """The body of all methods in the generated service class. + + Args: + method_name: Name of the method being executed. + rpc_controller: RPC controller used to execute this method. + callback: A callback which will be invoked when the method finishes. + """ + rpc_controller.SetFailed('Method %s not implemented.' % method_name) + callback(None) + + +class _ServiceStubBuilder(object): + + """Constructs a protocol service stub class using a service descriptor. + + Given a service descriptor, this class constructs a suitable stub class. + A stub is just a type-safe wrapper around an RpcChannel which emulates a + local implementation of the service. + + One service stub builder instance constructs exactly one class. It means all + instances of that class share the same service stub builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service stub class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + stub class. + """ + self.descriptor = service_descriptor + + def BuildServiceStub(self, cls): + """Constructs the stub class. + + Args: + cls: The class that will be constructed. + """ + + def _ServiceStubInit(stub, rpc_channel): + stub.rpc_channel = rpc_channel + self.cls = cls + cls.__init__ = _ServiceStubInit + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateStubMethod(method)) + + def _GenerateStubMethod(self, method): + return (lambda inst, rpc_controller, request, callback=None: + self._StubMethod(inst, method, rpc_controller, request, callback)) + + def _StubMethod(self, stub, method_descriptor, + rpc_controller, request, callback): + """The body of all service methods in the generated stub class. + + Args: + stub: Stub instance. + method_descriptor: Descriptor of the invoked method. + rpc_controller: Rpc controller to execute the method. + request: Request protocol message. + callback: A callback to execute when the method finishes. + Returns: + Response message (in case of blocking call). + """ + return stub.rpc_channel.CallMethod( + method_descriptor, rpc_controller, request, + method_descriptor.output_type._concrete_class, callback) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py new file mode 100644 index 0000000000..30cca2e06e --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/source_context_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/source_context.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SOURCECONTEXT._serialized_start=57 + _SOURCECONTEXT._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py new file mode 100644 index 0000000000..149728ca08 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/struct_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _STRUCT_FIELDSENTRY._options = None + _STRUCT_FIELDSENTRY._serialized_options = b'8\001' + _NULLVALUE._serialized_start=474 + _NULLVALUE._serialized_end=501 + _STRUCT._serialized_start=50 + _STRUCT._serialized_end=182 + _STRUCT_FIELDSENTRY._serialized_start=113 + _STRUCT_FIELDSENTRY._serialized_end=182 + _VALUE._serialized_start=185 + _VALUE._serialized_end=419 + _LISTVALUE._serialized_start=421 + _LISTVALUE._serialized_end=472 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py b/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py new file mode 100644 index 0000000000..fdcf8cf06c --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/symbol_database.py @@ -0,0 +1,194 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A database of Python protocol buffer generated symbols. + +SymbolDatabase is the MessageFactory for messages generated at compile time, +and makes it easy to create new instances of a registered type, given only the +type's protocol buffer symbol name. + +Example usage:: + + db = symbol_database.SymbolDatabase() + + # Register symbols of interest, from one or multiple files. + db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) + db.RegisterMessage(my_proto_pb2.MyMessage) + db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) + + # The database can be used as a MessageFactory, to generate types based on + # their name: + types = db.GetMessages(['my_proto.proto']) + my_message_instance = types['MyMessage']() + + # The database's underlying descriptor pool can be queried, so it's not + # necessary to know a type's filename to be able to generate it: + filename = db.pool.FindFileContainingSymbol('MyMessage') + my_message_instance = db.GetMessages([filename])['MyMessage']() + + # This functionality is also provided directly via a convenience method: + my_message_instance = db.GetSymbol('MyMessage')() +""" + + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message_factory + + +class SymbolDatabase(message_factory.MessageFactory): + """A database of Python generated symbols.""" + + def RegisterMessage(self, message): + """Registers the given message type in the local database. + + Calls to GetSymbol() and GetMessages() will return messages registered here. + + Args: + message: A :class:`google.protobuf.message.Message` subclass (or + instance); its descriptor will be registered. + + Returns: + The provided message. + """ + + desc = message.DESCRIPTOR + self._classes[desc] = message + self.RegisterMessageDescriptor(desc) + return message + + def RegisterMessageDescriptor(self, message_descriptor): + """Registers the given message descriptor in the local database. + + Args: + message_descriptor (Descriptor): the message descriptor to add. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddDescriptor(message_descriptor) + + def RegisterEnumDescriptor(self, enum_descriptor): + """Registers the given enum descriptor in the local database. + + Args: + enum_descriptor (EnumDescriptor): The enum descriptor to register. + + Returns: + EnumDescriptor: The provided descriptor. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddEnumDescriptor(enum_descriptor) + return enum_descriptor + + def RegisterServiceDescriptor(self, service_descriptor): + """Registers the given service descriptor in the local database. + + Args: + service_descriptor (ServiceDescriptor): the service descriptor to + register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddServiceDescriptor(service_descriptor) + + def RegisterFileDescriptor(self, file_descriptor): + """Registers the given file descriptor in the local database. + + Args: + file_descriptor (FileDescriptor): The file descriptor to register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._InternalAddFileDescriptor(file_descriptor) + + def GetSymbol(self, symbol): + """Tries to find a symbol in the local database. + + Currently, this method only returns message.Message instances, however, if + may be extended in future to support other symbol types. + + Args: + symbol (str): a protocol buffer symbol. + + Returns: + A Python class corresponding to the symbol. + + Raises: + KeyError: if the symbol could not be found. + """ + + return self._classes[self.pool.FindMessageTypeByName(symbol)] + + def GetMessages(self, files): + # TODO(amauryfa): Fix the differences with MessageFactory. + """Gets all registered messages from a specified file. + + Only messages already created and registered will be returned; (this is the + case for imported _pb2 modules) + But unlike MessageFactory, this version also returns already defined nested + messages, but does not register any message extensions. + + Args: + files (list[str]): The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. + + Raises: + KeyError: if a file could not be found. + """ + + def _GetAllMessages(desc): + """Walk a message Descriptor and recursively yields all message names.""" + yield desc + for msg_desc in desc.nested_types: + for nested_desc in _GetAllMessages(msg_desc): + yield nested_desc + + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for msg_desc in file_desc.message_types_by_name.values(): + for desc in _GetAllMessages(msg_desc): + try: + result[desc.full_name] = self._classes[desc] + except KeyError: + # This descriptor has no registered class, skip it. + pass + return result + + +_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) + + +def Default(): + """Returns the default SymbolDatabase.""" + return _DEFAULT diff --git a/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py b/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py new file mode 100644 index 0000000000..759cf11f62 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/text_encoding.py @@ -0,0 +1,110 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Encoding related utilities.""" +import re + +_cescape_chr_to_symbol_map = {} +_cescape_chr_to_symbol_map[9] = r'\t' # optional escape +_cescape_chr_to_symbol_map[10] = r'\n' # optional escape +_cescape_chr_to_symbol_map[13] = r'\r' # optional escape +_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape +_cescape_chr_to_symbol_map[39] = r"\'" # optional escape +_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape + +# Lookup table for unicode +_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_unicode_to_str[byte] = string + +# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + + [chr(i) for i in range(32, 127)] + + [r'\%03o' % i for i in range(127, 256)]) +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_byte_to_str[byte] = string +del byte, string + + +def CEscape(text, as_utf8): + # type: (...) -> str + """Escape a bytes string for use in an text protocol buffer. + + Args: + text: A byte string to be escaped. + as_utf8: Specifies if result may contain non-ASCII characters. + In Python 3 this allows unescaped non-ASCII Unicode characters. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + Returns: + Escaped string (str). + """ + # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not + # satisfy our needs; they encodes unprintable characters using two-digit hex + # escapes whereas our C++ unescaping function allows hex escapes to be any + # length. So, "\0011".encode('string_escape') ends up being "\\x011", which + # will be decoded in C++ as a single-character string with char code 0x11. + text_is_unicode = isinstance(text, str) + if as_utf8 and text_is_unicode: + # We're already unicode, no processing beyond control char escapes. + return text.translate(_cescape_chr_to_symbol_map) + ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. + if as_utf8: + return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) + return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) + + +_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') + + +def CUnescape(text): + # type: (str) -> bytes + """Unescape a text string with C-style escape sequences to UTF-8 bytes. + + Args: + text: The data to parse in a str. + Returns: + A byte string. + """ + + def ReplaceHex(m): + # Only replace the match if the number of leading back slashes is odd. i.e. + # the slash itself is not escaped. + if len(m.group(1)) & 1: + return m.group(1) + 'x0' + m.group(2) + return m.group(0) + + # This is required because the 'string_escape' encoding doesn't + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + + return (result.encode('utf-8') # Make it bytes to allow decode. + .decode('unicode_escape') + # Make it bytes again to return the proper type. + .encode('raw_unicode_escape')) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/text_format.py b/openpype/hosts/hiero/vendor/google/protobuf/text_format.py new file mode 100644 index 0000000000..412385c26f --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/text_format.py @@ -0,0 +1,1795 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in text format. + +Simple usage example:: + + # Create a proto object and serialize it to a text proto string. + message = my_proto_pb2.MyMessage(foo='bar') + text_proto = text_format.MessageToString(message) + + # Parse a text proto string. + message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +# TODO(b/129989314) Import thread contention leads to test failures. +import encodings.raw_unicode_escape # pylint: disable=unused-import +import encodings.unicode_escape # pylint: disable=unused-import +import io +import math +import re + +from google.protobuf.internal import decoder +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import text_encoding + +# pylint: disable=g-import-not-at-top +__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', + 'PrintFieldValue', 'Merge', 'MessageToBytes'] + +_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), + type_checkers.Int32ValueChecker(), + type_checkers.Uint64ValueChecker(), + type_checkers.Int64ValueChecker()) +_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) +_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) +_QUOTES = frozenset(("'", '"')) +_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' + + +class Error(Exception): + """Top-level module error for text_format.""" + + +class ParseError(Error): + """Thrown in case of text parsing or tokenizing error.""" + + def __init__(self, message=None, line=None, column=None): + if message is not None and line is not None: + loc = str(line) + if column is not None: + loc += ':{0}'.format(column) + message = '{0} : {1}'.format(loc, message) + if message is not None: + super(ParseError, self).__init__(message) + else: + super(ParseError, self).__init__() + self._line = line + self._column = column + + def GetLine(self): + return self._line + + def GetColumn(self): + return self._column + + +class TextWriter(object): + + def __init__(self, as_utf8): + self._writer = io.StringIO() + + def write(self, val): + return self._writer.write(val) + + def close(self): + return self._writer.close() + + def getvalue(self): + return self._writer.getvalue() + + +def MessageToString( + message, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + indent=0, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + # type: (...) -> str + """Convert protobuf message to text format. + + Double values can be formatted compactly with 15 digits of + precision (which is the most that IEEE 754 "double" can guarantee) + using double_format='.15g'. To ensure that converting to text and back to a + proto will result in an identical value, double_format='.17g' should be used. + + Args: + message: The protocol buffers message. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, fields of a proto message will be printed using + the order defined in source code instead of the field number, extensions + will be printed at the end of the message and their relative order is + determined by the extension number. By default, use the field number + order. + float_format (str): If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest float + that has same value in wire will be printed. Also affect double field + if double_format is not set but float_format is set. + double_format (str): If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, use ``str()`` + use_field_number: If True, print field numbers instead of names. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + indent (int): The initial indent level, in terms of spaces, for pretty + print. + message_formatter (function(message, indent, as_one_line) -> unicode|None): + Custom formatter for selected sub-messages (usually based on message + type). Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if the + field is a proto message. + + Returns: + str: A string of the text formatted protocol buffer message. + """ + out = TextWriter(as_utf8) + printer = _Printer( + out, + indent, + as_utf8, + as_one_line, + use_short_repeated_primitives, + pointy_brackets, + use_index_order, + float_format, + double_format, + use_field_number, + descriptor_pool, + message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + result = out.getvalue() + out.close() + if as_one_line: + return result.rstrip() + return result + + +def MessageToBytes(message, **kwargs): + # type: (...) -> bytes + """Convert protobuf message to encoded text format. See MessageToString.""" + text = MessageToString(message, **kwargs) + if isinstance(text, bytes): + return text + codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' + return text.encode(codec) + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def PrintMessage(message, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + printer = _Printer( + out=out, indent=indent, as_utf8=as_utf8, + as_one_line=as_one_line, + use_short_repeated_primitives=use_short_repeated_primitives, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format, + double_format=double_format, + use_field_number=use_field_number, + descriptor_pool=descriptor_pool, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + + +def PrintField(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field name/value pair.""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintField(field, value) + + +def PrintFieldValue(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field value (not including name).""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintFieldValue(field, value) + + +def _BuildMessageFromTypeName(type_name, descriptor_pool): + """Returns a protobuf message instance. + + Args: + type_name: Fully-qualified protobuf message type name string. + descriptor_pool: DescriptorPool instance. + + Returns: + A Message instance of type matching type_name, or None if the a Descriptor + wasn't found matching type_name. + """ + # pylint: disable=g-import-not-at-top + if descriptor_pool is None: + from google.protobuf import descriptor_pool as pool_mod + descriptor_pool = pool_mod.Default() + from google.protobuf import symbol_database + database = symbol_database.Default() + try: + message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) + except KeyError: + return None + message_type = database.GetPrototype(message_descriptor) + return message_type() + + +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 + + +class _Printer(object): + """Text format printer for protocol message.""" + + def __init__( + self, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Initialize the Printer. + + Double values can be formatted compactly with 15 digits of precision + (which is the most that IEEE 754 "double" can guarantee) using + double_format='.15g'. To ensure that converting to text and back to a proto + will result in an identical value, double_format='.17g' should be used. + + Args: + out: To record the text format result. + indent: The initial indent level for pretty print. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, print fields of a proto message using the order + defined in source code instead of the field number. By default, use the + field number order. + float_format: If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest + float that has same value in wire will be printed. Also affect double + field if double_format is not set but float_format is set. + double_format: If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, str() is used. + use_field_number: If True, print field numbers instead of names. + descriptor_pool: A DescriptorPool used to resolve Any types. + message_formatter: A function(message, indent, as_one_line): unicode|None + to custom format selected sub-messages (usually based on message type). + Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if + the field is a proto message. + """ + self.out = out + self.indent = indent + self.as_utf8 = as_utf8 + self.as_one_line = as_one_line + self.use_short_repeated_primitives = use_short_repeated_primitives + self.pointy_brackets = pointy_brackets + self.use_index_order = use_index_order + self.float_format = float_format + if double_format is not None: + self.double_format = double_format + else: + self.double_format = float_format + self.use_field_number = use_field_number + self.descriptor_pool = descriptor_pool + self.message_formatter = message_formatter + self.print_unknown_fields = print_unknown_fields + self.force_colon = force_colon + + def _TryPrintAsAnyMessage(self, message): + """Serializes if message is a google.protobuf.Any field.""" + if '/' not in message.type_url: + return False + packed_message = _BuildMessageFromTypeName(message.TypeName(), + self.descriptor_pool) + if packed_message: + packed_message.MergeFromString(message.value) + colon = ':' if self.force_colon else '' + self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) + self._PrintMessageFieldValue(packed_message) + self.out.write(' ' if self.as_one_line else '\n') + return True + else: + return False + + def _TryCustomFormatMessage(self, message): + formatted = self.message_formatter(message, self.indent, self.as_one_line) + if formatted is None: + return False + + out = self.out + out.write(' ' * self.indent) + out.write(formatted) + out.write(' ' if self.as_one_line else '\n') + return True + + def PrintMessage(self, message): + """Convert protobuf message to text format. + + Args: + message: The protocol buffers message. + """ + if self.message_formatter and self._TryCustomFormatMessage(message): + return + if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and + self._TryPrintAsAnyMessage(message)): + return + fields = message.ListFields() + if self.use_index_order: + fields.sort( + key=lambda x: x[0].number if x[0].is_extension else x[0].index) + for field, value in fields: + if _IsMapEntry(field): + for key in sorted(value): + # This is slow for maps with submessage entries because it copies the + # entire tree. Unfortunately this would take significant refactoring + # of this file to work around. + # + # TODO(haberman): refactor and optimize if this becomes an issue. + entry_submsg = value.GetEntryClass()(key=key, value=value[key]) + self.PrintField(field, entry_submsg) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if (self.use_short_repeated_primitives + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): + self._PrintShortRepeatedPrimitivesValue(field, value) + else: + for element in value: + self.PrintField(field, element) + else: + self.PrintField(field, value) + + if self.print_unknown_fields: + self._PrintUnknownFields(message.UnknownFields()) + + def _PrintUnknownFields(self, unknown_fields): + """Print unknown fields.""" + out = self.out + for field in unknown_fields: + out.write(' ' * self.indent) + out.write(str(field.field_number)) + if field.wire_type == WIRETYPE_START_GROUP: + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(field.data) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: + try: + # If this field is parseable as a Message, it is probably + # an embedded message. + # pylint: disable=protected-access + (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( + memoryview(field.data), 0, len(field.data)) + except Exception: # pylint: disable=broad-except + pos = 0 + + if pos == len(field.data): + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(embedded_unknown_message) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + else: + # A string or bytes field. self.as_utf8 may not work. + out.write(': \"') + out.write(text_encoding.CEscape(field.data, False)) + out.write('\" ' if self.as_one_line else '\"\n') + else: + # varint, fixed32, fixed64 + out.write(': ') + out.write(str(field.data)) + out.write(' ' if self.as_one_line else '\n') + + def _PrintFieldName(self, field): + """Print field name.""" + out = self.out + out.write(' ' * self.indent) + if self.use_field_number: + out.write(str(field.number)) + else: + if field.is_extension: + out.write('[') + if (field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): + out.write(field.message_type.full_name) + else: + out.write(field.full_name) + out.write(']') + elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: + # For groups, use the capitalized name. + out.write(field.message_type.name) + else: + out.write(field.name) + + if (self.force_colon or + field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): + # The colon is optional in this case, but our cross-language golden files + # don't include it. Here, the colon is only included if force_colon is + # set to True + out.write(':') + + def PrintField(self, field, value): + """Print a single field name/value pair.""" + self._PrintFieldName(field) + self.out.write(' ') + self.PrintFieldValue(field, value) + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintShortRepeatedPrimitivesValue(self, field, value): + """"Prints short repeated primitives value.""" + # Note: this is called only when value has at least one element. + self._PrintFieldName(field) + self.out.write(' [') + for i in range(len(value) - 1): + self.PrintFieldValue(field, value[i]) + self.out.write(', ') + self.PrintFieldValue(field, value[-1]) + self.out.write(']') + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintMessageFieldValue(self, value): + if self.pointy_brackets: + openb = '<' + closeb = '>' + else: + openb = '{' + closeb = '}' + + if self.as_one_line: + self.out.write('%s ' % openb) + self.PrintMessage(value) + self.out.write(closeb) + else: + self.out.write('%s\n' % openb) + self.indent += 2 + self.PrintMessage(value) + self.indent -= 2 + self.out.write(' ' * self.indent + closeb) + + def PrintFieldValue(self, field, value): + """Print a single field value (not including name). + + For repeated fields, the value should be a single element. + + Args: + field: The descriptor of the field to be printed. + value: The value of the field. + """ + out = self.out + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self._PrintMessageFieldValue(value) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + out.write(enum_value.name) + else: + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') + if isinstance(value, str) and not self.as_utf8: + out_value = value.encode('utf-8') + else: + out_value = value + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # We always need to escape all binary data in TYPE_BYTES fields. + out_as_utf8 = False + else: + out_as_utf8 = self.as_utf8 + out.write(text_encoding.CEscape(out_value, out_as_utf8)) + out.write('\"') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + if value: + out.write('true') + else: + out.write('false') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + if self.float_format is not None: + out.write('{1:{0}}'.format(self.float_format, value)) + else: + if math.isnan(value): + out.write(str(value)) + else: + out.write(str(type_checkers.ToShortestFloat(value))) + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and + self.double_format is not None): + out.write('{1:{0}}'.format(self.double_format, value)) + else: + out.write(str(value)) + + +def Parse(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + NOTE: for historical reasons this function does not clear the input + message. This is different from what the binary msg.ParseFrom(...) does. + If text contains a field already set in message, the value is appended if the + field is repeated. Otherwise, an error is raised. + + Example:: + + a = MyProto() + a.repeated_field.append('test') + b = MyProto() + + # Repeated fields are combined + text_format.Parse(repr(a), b) + text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] + + # Non-repeated fields cannot be overwritten + a.singular_field = 1 + b.singular_field = 2 + text_format.Parse(repr(a), b) # ParseError + + # Binary version: + b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" + + Caller is responsible for clearing the message as needed. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def Merge(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + Like Parse(), but allows repeated values for a non-repeated field, and uses + the last one. This means any non-repeated, top-level fields specified in text + replace those in the message. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return MergeLines( + text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def ParseLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Parse() for caveats. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.ParseLines(lines, message) + + +def MergeLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Merge() for more details. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.MergeLines(lines, message) + + +class _Parser(object): + """Text format parser for protocol message.""" + + def __init__(self, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + self.allow_unknown_extension = allow_unknown_extension + self.allow_field_number = allow_field_number + self.descriptor_pool = descriptor_pool + self.allow_unknown_field = allow_unknown_field + + def ParseLines(self, lines, message): + """Parses a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = False + self._ParseOrMerge(lines, message) + return message + + def MergeLines(self, lines, message): + """Merges a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = True + self._ParseOrMerge(lines, message) + return message + + def _ParseOrMerge(self, lines, message): + """Converts a text representation of a protocol message into a message. + + Args: + lines: Lines of a message's text representation. + message: A protocol buffer message to merge into. + + Raises: + ParseError: On text parsing problems. + """ + # Tokenize expects native str lines. + str_lines = ( + line if isinstance(line, str) else line.decode('utf-8') + for line in lines) + tokenizer = Tokenizer(str_lines) + while not tokenizer.AtEnd(): + self._MergeField(tokenizer, message) + + def _MergeField(self, tokenizer, message): + """Merges a single protocol message field into a message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + message: A protocol message to record the data. + + Raises: + ParseError: In case of text parsing problems. + """ + message_descriptor = message.DESCRIPTOR + if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and + tokenizer.TryConsume('[')): + type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) + tokenizer.Consume(']') + tokenizer.TryConsume(':') + if tokenizer.TryConsume('<'): + expanded_any_end_token = '>' + else: + tokenizer.Consume('{') + expanded_any_end_token = '}' + expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, + self.descriptor_pool) + if not expanded_any_sub_message: + raise ParseError('Type %s not found in descriptor pool' % + packed_type_name) + while not tokenizer.TryConsume(expanded_any_end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % + (expanded_any_end_token,)) + self._MergeField(tokenizer, expanded_any_sub_message) + deterministic = False + + message.Pack(expanded_any_sub_message, + type_url_prefix=type_url_prefix, + deterministic=deterministic) + return + + if tokenizer.TryConsume('['): + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + name = '.'.join(name) + + if not message_descriptor.is_extendable: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" does not have extensions.' % + message_descriptor.full_name) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(name) + # pylint: enable=protected-access + + + if not field: + if self.allow_unknown_extension: + field = None + else: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" not registered. ' + 'Did you import the _pb2 module which defines it? ' + 'If you are trying to place the extension in the MessageSet ' + 'field of another message that is in an Any or MessageSet field, ' + 'that message\'s _pb2 module must be imported as well' % name) + elif message_descriptor != field.containing_type: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" does not extend message type "%s".' % + (name, message_descriptor.full_name)) + + tokenizer.Consume(']') + + else: + name = tokenizer.ConsumeIdentifierOrNumber() + if self.allow_field_number and name.isdigit(): + number = ParseInteger(name, True, True) + field = message_descriptor.fields_by_number.get(number, None) + if not field and message_descriptor.is_extendable: + field = message.Extensions._FindExtensionByNumber(number) + else: + field = message_descriptor.fields_by_name.get(name, None) + + # Group names are expected to be capitalized as they appear in the + # .proto file, which actually matches their type names, not their field + # names. + if not field: + field = message_descriptor.fields_by_name.get(name.lower(), None) + if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: + field = None + + if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and + field.message_type.name != name): + field = None + + if not field and not self.allow_unknown_field: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" has no field named "%s".' % + (message_descriptor.full_name, name)) + + if field: + if not self._allow_multiple_scalars and field.containing_oneof: + # Check if there's a different field set in this oneof. + # Note that we ignore the case if the same field was set before, and we + # apply _allow_multiple_scalars to non-scalar fields as well. + which_oneof = message.WhichOneof(field.containing_oneof.name) + if which_oneof is not None and which_oneof != field.name: + raise tokenizer.ParseErrorPreviousToken( + 'Field "%s" is specified along with field "%s", another member ' + 'of oneof "%s" for message type "%s".' % + (field.name, which_oneof, field.containing_oneof.name, + message_descriptor.full_name)) + + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + tokenizer.TryConsume(':') + merger = self._MergeMessageField + else: + tokenizer.Consume(':') + merger = self._MergeScalarField + + if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and + tokenizer.TryConsume('[')): + # Short repeated format, e.g. "foo: [1, 2, 3]" + if not tokenizer.TryConsume(']'): + while True: + merger(tokenizer, message, field) + if tokenizer.TryConsume(']'): + break + tokenizer.Consume(',') + + else: + merger(tokenizer, message, field) + + else: # Proto field is unknown. + assert (self.allow_unknown_extension or self.allow_unknown_field) + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + + def _ConsumeAnyTypeUrl(self, tokenizer): + """Consumes a google.protobuf.Any type URL and returns the type name.""" + # Consume "type.googleapis.com/". + prefix = [tokenizer.ConsumeIdentifier()] + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('/') + # Consume the fully-qualified type name. + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + return '.'.join(prefix), '.'.join(name) + + def _MergeMessageField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: The message of which field is a member. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + """ + is_map_entry = _IsMapEntry(field) + + if tokenizer.TryConsume('<'): + end_token = '>' + else: + tokenizer.Consume('{') + end_token = '}' + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + sub_message = message.Extensions[field].add() + elif is_map_entry: + sub_message = getattr(message, field.name).GetEntryClass()() + else: + sub_message = getattr(message, field.name).add() + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + sub_message = message.Extensions[field] + else: + # Also apply _allow_multiple_scalars to message field. + # TODO(jieluo): Change to _allow_singular_overwrites. + if (not self._allow_multiple_scalars and + message.HasField(field.name)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + sub_message = getattr(message, field.name) + sub_message.SetInParent() + + while not tokenizer.TryConsume(end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) + self._MergeField(tokenizer, sub_message) + + if is_map_entry: + value_cpptype = field.message_type.fields_by_name['value'].cpp_type + if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + value = getattr(message, field.name)[sub_message.key] + value.CopyFrom(sub_message.value) + else: + getattr(message, field.name)[sub_message.key] = sub_message.value + + @staticmethod + def _IsProto3Syntax(message): + message_descriptor = message.DESCRIPTOR + return (hasattr(message_descriptor, 'syntax') and + message_descriptor.syntax == 'proto3') + + def _MergeScalarField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: A protocol message to record the data. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + RuntimeError: On runtime errors. + """ + _ = self.allow_unknown_extension + value = None + + if field.type in (descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_SFIXED32): + value = _ConsumeInt32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_SINT64, + descriptor.FieldDescriptor.TYPE_SFIXED64): + value = _ConsumeInt64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_FIXED32): + value = _ConsumeUint32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_FIXED64): + value = _ConsumeUint64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, + descriptor.FieldDescriptor.TYPE_DOUBLE): + value = tokenizer.ConsumeFloat() + elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: + value = tokenizer.ConsumeBool() + elif field.type == descriptor.FieldDescriptor.TYPE_STRING: + value = tokenizer.ConsumeString() + elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: + value = tokenizer.ConsumeByteString() + elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: + value = tokenizer.ConsumeEnum(field) + else: + raise RuntimeError('Unknown field type %d' % field.type) + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + message.Extensions[field].append(value) + else: + getattr(message, field.name).append(value) + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + not self._IsProto3Syntax(message) and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + else: + message.Extensions[field] = value + else: + duplicate_error = False + if not self._allow_multiple_scalars: + if self._IsProto3Syntax(message): + # Proto3 doesn't represent presence so we try best effort to check + # multiple scalars by compare to default values. + duplicate_error = bool(getattr(message, field.name)) + else: + duplicate_error = message.HasField(field.name) + + if duplicate_error: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + else: + setattr(message, field.name, value) + + +def _SkipFieldContents(tokenizer): + """Skips over contents (value or message) of a field. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + # Try to guess the type of this field. + # If this field is not a message, there should be a ":" between the + # field name and the field value and also the field value should not + # start with "{" or "<" which indicates the beginning of a message body. + # If there is no ":" or there is a "{" or "<" after ":", this field has + # to be a message or the input is ill-formed. + if tokenizer.TryConsume(':') and not tokenizer.LookingAt( + '{') and not tokenizer.LookingAt('<'): + _SkipFieldValue(tokenizer) + else: + _SkipFieldMessage(tokenizer) + + +def _SkipField(tokenizer): + """Skips over a complete field (name and value/message). + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + if tokenizer.TryConsume('['): + # Consume extension name. + tokenizer.ConsumeIdentifier() + while tokenizer.TryConsume('.'): + tokenizer.ConsumeIdentifier() + tokenizer.Consume(']') + else: + tokenizer.ConsumeIdentifierOrNumber() + + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldMessage(tokenizer): + """Skips over a field message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + + if tokenizer.TryConsume('<'): + delimiter = '>' + else: + tokenizer.Consume('{') + delimiter = '}' + + while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): + _SkipField(tokenizer) + + tokenizer.Consume(delimiter) + + +def _SkipFieldValue(tokenizer): + """Skips over a field value. + + Args: + tokenizer: A tokenizer to parse the field name and values. + + Raises: + ParseError: In case an invalid field value is found. + """ + # String/bytes tokens can come in multiple adjacent string literals. + # If we can consume one, consume as many as we can. + if tokenizer.TryConsumeByteString(): + while tokenizer.TryConsumeByteString(): + pass + return + + if (not tokenizer.TryConsumeIdentifier() and + not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and + not tokenizer.TryConsumeFloat()): + raise ParseError('Invalid field value: ' + tokenizer.token) + + +class Tokenizer(object): + """Protocol buffer text representation tokenizer. + + This class handles the lower level string parsing by splitting it into + meaningful tokens. + + It was directly ported from the Java protocol buffer API. + """ + + _WHITESPACE = re.compile(r'\s+') + _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) + _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) + _TOKEN = re.compile('|'.join([ + r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier + r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number + ] + [ # quoted str for each quote mark + # Avoid backtracking! https://stackoverflow.com/a/844267 + r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) + for mark in _QUOTES + ])) + + _IDENTIFIER = re.compile(r'[^\d\W]\w*') + _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') + + def __init__(self, lines, skip_comments=True): + self._position = 0 + self._line = -1 + self._column = 0 + self._token_start = None + self.token = '' + self._lines = iter(lines) + self._current_line = '' + self._previous_line = 0 + self._previous_column = 0 + self._more_lines = True + self._skip_comments = skip_comments + self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT + or self._WHITESPACE) + self._SkipWhitespace() + self.NextToken() + + def LookingAt(self, token): + return self.token == token + + def AtEnd(self): + """Checks the end of the text was reached. + + Returns: + True iff the end was reached. + """ + return not self.token + + def _PopLine(self): + while len(self._current_line) <= self._column: + try: + self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False + return + else: + self._line += 1 + self._column = 0 + + def _SkipWhitespace(self): + while True: + self._PopLine() + match = self._whitespace_pattern.match(self._current_line, self._column) + if not match: + break + length = len(match.group(0)) + self._column += length + + def TryConsume(self, token): + """Tries to consume a given piece of text. + + Args: + token: Text to consume. + + Returns: + True iff the text was consumed. + """ + if self.token == token: + self.NextToken() + return True + return False + + def Consume(self, token): + """Consumes a piece of text. + + Args: + token: Text to consume. + + Raises: + ParseError: If the text couldn't be consumed. + """ + if not self.TryConsume(token): + raise self.ParseError('Expected "%s".' % token) + + def ConsumeComment(self): + result = self.token + if not self._COMMENT.match(result): + raise self.ParseError('Expected comment.') + self.NextToken() + return result + + def ConsumeCommentOrTrailingComment(self): + """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" + + # Tokenizer initializes _previous_line and _previous_column to 0. As the + # tokenizer starts, it looks like there is a previous token on the line. + just_started = self._line == 0 and self._column == 0 + + before_parsing = self._previous_line + comment = self.ConsumeComment() + + # A trailing comment is a comment on the same line than the previous token. + trailing = (self._previous_line == before_parsing + and not just_started) + + return trailing, comment + + def TryConsumeIdentifier(self): + try: + self.ConsumeIdentifier() + return True + except ParseError: + return False + + def ConsumeIdentifier(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER.match(result): + raise self.ParseError('Expected identifier.') + self.NextToken() + return result + + def TryConsumeIdentifierOrNumber(self): + try: + self.ConsumeIdentifierOrNumber() + return True + except ParseError: + return False + + def ConsumeIdentifierOrNumber(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER_OR_NUMBER.match(result): + raise self.ParseError('Expected identifier or number, got %s.' % result) + self.NextToken() + return result + + def TryConsumeInteger(self): + try: + self.ConsumeInteger() + return True + except ParseError: + return False + + def ConsumeInteger(self): + """Consumes an integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + try: + result = _ParseAbstractInteger(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeFloat(self): + try: + self.ConsumeFloat() + return True + except ParseError: + return False + + def ConsumeFloat(self): + """Consumes an floating point number. + + Returns: + The number parsed. + + Raises: + ParseError: If a floating point number couldn't be consumed. + """ + try: + result = ParseFloat(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeBool(self): + """Consumes a boolean value. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + try: + result = ParseBool(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeByteString(self): + try: + self.ConsumeByteString() + return True + except ParseError: + return False + + def ConsumeString(self): + """Consumes a string value. + + Returns: + The string parsed. + + Raises: + ParseError: If a string value couldn't be consumed. + """ + the_bytes = self.ConsumeByteString() + try: + return str(the_bytes, 'utf-8') + except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): + """Consumes a byte array value. + + Returns: + The array parsed (as a string). + + Raises: + ParseError: If a byte array value couldn't be consumed. + """ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in _QUOTES: + the_list.append(self._ConsumeSingleByteString()) + return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. + + String literals (whether bytes or text) can come in multiple adjacent + tokens which are automatically concatenated, like in C or Python. This + method only consumes one token. + + Returns: + The token parsed. + Raises: + ParseError: When the wrong format data is found. + """ + text = self.token + if len(text) < 1 or text[0] not in _QUOTES: + raise self.ParseError('Expected string but found: %r' % (text,)) + + if len(text) < 2 or text[-1] != text[0]: + raise self.ParseError('String missing ending quote: %r' % (text,)) + + try: + result = text_encoding.CUnescape(text[1:-1]) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ParseErrorPreviousToken(self, message): + """Creates and *returns* a ParseError for the previously read token. + + Args: + message: A message to set for the exception. + + Returns: + A ParseError instance. + """ + return ParseError(message, self._previous_line + 1, + self._previous_column + 1) + + def ParseError(self, message): + """Creates and *returns* a ParseError for the current token.""" + return ParseError('\'' + self._current_line + '\': ' + message, + self._line + 1, self._column + 1) + + def _StringParseError(self, e): + return self.ParseError('Couldn\'t parse string: ' + str(e)) + + def NextToken(self): + """Reads the next meaningful token.""" + self._previous_line = self._line + self._previous_column = self._column + + self._column += len(self.token) + self._SkipWhitespace() + + if not self._more_lines: + self.token = '' + return + + match = self._TOKEN.match(self._current_line, self._column) + if not match and not self._skip_comments: + match = self._COMMENT.match(self._current_line, self._column) + if match: + token = match.group(0) + self.token = token + else: + self.token = self._current_line[self._column] + +# Aliased so it can still be accessed by current visibility violators. +# TODO(dbarnett): Migrate violators to textformat_tokenizer. +_Tokenizer = Tokenizer # pylint: disable=invalid-name + + +def _ConsumeInt32(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) + + +def _ConsumeUint32(tokenizer): + """Consumes an unsigned 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) + + +def _TryConsumeInt64(tokenizer): + try: + _ConsumeInt64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeInt64(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) + + +def _TryConsumeUint64(tokenizer): + try: + _ConsumeUint64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeUint64(tokenizer): + """Consumes an unsigned 64bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 64bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) + + +def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): + """Consumes an integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer with given characteristics couldn't be consumed. + """ + try: + result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) + except ValueError as e: + raise tokenizer.ParseError(str(e)) + tokenizer.NextToken() + return result + + +def ParseInteger(text, is_signed=False, is_long=False): + """Parses an integer. + + Args: + text: The text to parse. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + result = _ParseAbstractInteger(text) + + # Check if the integer is sane. Exceptions handled by callers. + checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] + checker.CheckValue(result) + return result + + +def _ParseAbstractInteger(text): + """Parses an integer without checking size/signedness. + + Args: + text: The text to parse. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + orig_text = text + c_octal_match = re.match(r'(-?)0(\d+)$', text) + if c_octal_match: + # Python 3 no longer supports 0755 octal syntax without the 'o', so + # we always use the '0o' prefix for multi-digit numbers starting with 0. + text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) + try: + return int(text, 0) + except ValueError: + raise ValueError('Couldn\'t parse integer: %s' % orig_text) + + +def ParseFloat(text): + """Parse a floating point number. + + Args: + text: Text to parse. + + Returns: + The number parsed. + + Raises: + ValueError: If a floating point number couldn't be parsed. + """ + try: + # Assume Python compatible syntax. + return float(text) + except ValueError: + # Check alternative spellings. + if _FLOAT_INFINITY.match(text): + if text[0] == '-': + return float('-inf') + else: + return float('inf') + elif _FLOAT_NAN.match(text): + return float('nan') + else: + # assume '1.0f' format + try: + return float(text.rstrip('f')) + except ValueError: + raise ValueError('Couldn\'t parse float: %s' % text) + + +def ParseBool(text): + """Parse a boolean value. + + Args: + text: Text to parse. + + Returns: + Boolean values parsed + + Raises: + ValueError: If text is not a valid boolean. + """ + if text in ('true', 't', '1', 'True'): + return True + elif text in ('false', 'f', '0', 'False'): + return False + else: + raise ValueError('Expected "true" or "false".') + + +def ParseEnum(field, value): + """Parse an enum value. + + The value can be specified by a number (the enum value), or by + a string literal (the enum name). + + Args: + field: Enum field descriptor. + value: String value. + + Returns: + Enum value number. + + Raises: + ValueError: If the enum value could not be parsed. + """ + enum_descriptor = field.enum_type + try: + number = int(value, 0) + except ValueError: + # Identifier. + enum_value = enum_descriptor.values_by_name.get(value, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value named %s.' % + (enum_descriptor.full_name, value)) + else: + # Numeric value. + if hasattr(field.file, 'syntax'): + # Attribute is checked for compatibility. + if field.file.syntax == 'proto3': + # Proto3 accept numeric unknown enums. + return number + enum_value = enum_descriptor.values_by_number.get(number, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value with number %d.' % + (enum_descriptor.full_name, number)) + return enum_value.number diff --git a/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py new file mode 100644 index 0000000000..558d496941 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/timestamp_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _TIMESTAMP._serialized_start=52 + _TIMESTAMP._serialized_end=95 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py new file mode 100644 index 0000000000..19903fb6b4 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/type_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SYNTAX._serialized_start=1413 + _SYNTAX._serialized_end=1459 + _TYPE._serialized_start=113 + _TYPE._serialized_end=328 + _FIELD._serialized_start=331 + _FIELD._serialized_end=1056 + _FIELD_KIND._serialized_start=610 + _FIELD_KIND._serialized_end=938 + _FIELD_CARDINALITY._serialized_start=940 + _FIELD_CARDINALITY._serialized_end=1056 + _ENUM._serialized_start=1059 + _ENUM._serialized_end=1265 + _ENUMVALUE._serialized_start=1267 + _ENUMVALUE._serialized_end=1350 + _OPTION._serialized_start=1352 + _OPTION._serialized_end=1411 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/__init__.py b/openpype/hosts/hiero/vendor/google/protobuf/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py new file mode 100644 index 0000000000..66a5836c82 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) + + DESCRIPTOR._options = None + _TESTBOOLMAP_BOOLMAPENTRY._options = None + _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' + _ENUMVALUE._serialized_start=1607 + _ENUMVALUE._serialized_end=1657 + _TESTFLAGSANDSTRINGS._serialized_start=62 + _TESTFLAGSANDSTRINGS._serialized_end=199 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 + _TESTBASE64BYTEARRAYS._serialized_start=201 + _TESTBASE64BYTEARRAYS._serialized_end=234 + _TESTJAVASCRIPTJSON._serialized_start=236 + _TESTJAVASCRIPTJSON._serialized_end=307 + _TESTJAVASCRIPTORDERJSON1._serialized_start=309 + _TESTJAVASCRIPTORDERJSON1._serialized_end=390 + _TESTJAVASCRIPTORDERJSON2._serialized_start=393 + _TESTJAVASCRIPTORDERJSON2._serialized_end=530 + _TESTLARGEINT._serialized_start=532 + _TESTLARGEINT._serialized_end=568 + _TESTNUMBERS._serialized_start=571 + _TESTNUMBERS._serialized_end=731 + _TESTNUMBERS_MYTYPE._serialized_start=691 + _TESTNUMBERS_MYTYPE._serialized_end=731 + _TESTCAMELCASE._serialized_start=733 + _TESTCAMELCASE._serialized_end=817 + _TESTBOOLMAP._serialized_start=819 + _TESTBOOLMAP._serialized_end=943 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 + _TESTRECURSION._serialized_start=945 + _TESTRECURSION._serialized_end=1024 + _TESTSTRINGMAP._serialized_start=1027 + _TESTSTRINGMAP._serialized_end=1161 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 + _TESTSTRINGSERIALIZER._serialized_start=1164 + _TESTSTRINGSERIALIZER._serialized_end=1360 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 + _TESTMESSAGEWITHEXTENSION._serialized_start=1362 + _TESTMESSAGEWITHEXTENSION._serialized_end=1398 + _TESTEXTENSION._serialized_start=1400 + _TESTEXTENSION._serialized_end=1522 + _TESTDEFAULTENUMVALUE._serialized_start=1524 + _TESTDEFAULTENUMVALUE._serialized_end=1605 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py new file mode 100644 index 0000000000..5498deafa9 --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/util/json_format_proto3_pb2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format_proto3.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' + _TESTMAP_BOOLMAPENTRY._options = None + _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT32MAPENTRY._options = None + _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT64MAPENTRY._options = None + _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT32MAPENTRY._options = None + _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT64MAPENTRY._options = None + _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_STRINGMAPENTRY._options = None + _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_BOOLMAPENTRY._options = None + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT32MAPENTRY._options = None + _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT64MAPENTRY._options = None + _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT32MAPENTRY._options = None + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT64MAPENTRY._options = None + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_STRINGMAPENTRY._options = None + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_MAPMAPENTRY._options = None + _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTBOOLVALUE_BOOLMAPENTRY._options = None + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' + _ENUMTYPE._serialized_start=4849 + _ENUMTYPE._serialized_end=4877 + _MESSAGETYPE._serialized_start=277 + _MESSAGETYPE._serialized_end=305 + _TESTMESSAGE._serialized_start=308 + _TESTMESSAGE._serialized_end=968 + _TESTONEOF._serialized_start=971 + _TESTONEOF._serialized_end=1239 + _TESTMAP._serialized_start=1242 + _TESTMAP._serialized_end=1851 + _TESTMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTMAP_INT32MAPENTRY._serialized_start=1605 + _TESTMAP_INT32MAPENTRY._serialized_end=1652 + _TESTMAP_INT64MAPENTRY._serialized_start=1654 + _TESTMAP_INT64MAPENTRY._serialized_end=1701 + _TESTMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP._serialized_start=1854 + _TESTNESTEDMAP._serialized_end=2627 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 + _TESTSTRINGMAP._serialized_start=2629 + _TESTSTRINGMAP._serialized_end=2752 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 + _TESTWRAPPER._serialized_start=2755 + _TESTWRAPPER._serialized_end=3761 + _TESTTIMESTAMP._serialized_start=3763 + _TESTTIMESTAMP._serialized_end=3873 + _TESTDURATION._serialized_start=3875 + _TESTDURATION._serialized_end=3982 + _TESTFIELDMASK._serialized_start=3984 + _TESTFIELDMASK._serialized_end=4042 + _TESTSTRUCT._serialized_start=4044 + _TESTSTRUCT._serialized_end=4145 + _TESTANY._serialized_start=4147 + _TESTANY._serialized_end=4239 + _TESTVALUE._serialized_start=4241 + _TESTVALUE._serialized_end=4339 + _TESTLISTVALUE._serialized_start=4341 + _TESTLISTVALUE._serialized_end=4451 + _TESTBOOLVALUE._serialized_start=4454 + _TESTBOOLVALUE._serialized_end=4591 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 + _TESTCUSTOMJSONNAME._serialized_start=4593 + _TESTCUSTOMJSONNAME._serialized_end=4636 + _TESTEXTENSIONS._serialized_start=4638 + _TESTEXTENSIONS._serialized_end=4712 + _TESTENUMVALUE._serialized_start=4715 + _TESTENUMVALUE._serialized_end=4847 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py b/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py new file mode 100644 index 0000000000..e49eb4c15d --- /dev/null +++ b/openpype/hosts/hiero/vendor/google/protobuf/wrappers_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/wrappers.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DOUBLEVALUE._serialized_start=51 + _DOUBLEVALUE._serialized_end=79 + _FLOATVALUE._serialized_start=81 + _FLOATVALUE._serialized_end=108 + _INT64VALUE._serialized_start=110 + _INT64VALUE._serialized_end=137 + _UINT64VALUE._serialized_start=139 + _UINT64VALUE._serialized_end=167 + _INT32VALUE._serialized_start=169 + _INT32VALUE._serialized_end=196 + _UINT32VALUE._serialized_start=198 + _UINT32VALUE._serialized_end=226 + _BOOLVALUE._serialized_start=228 + _BOOLVALUE._serialized_end=254 + _STRINGVALUE._serialized_start=256 + _STRINGVALUE._serialized_end=284 + _BYTESVALUE._serialized_start=286 + _BYTESVALUE._serialized_end=313 +# @@protoc_insertion_point(module_scope) From 2687bbe2029353f32a05bd70321a9a65da990556 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 11:20:09 +0200 Subject: [PATCH 0612/2550] removed usage of HOST_WORKFILE_EXTENSIONS --- openpype/hosts/hiero/api/workio.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/hiero/api/workio.py b/openpype/hosts/hiero/api/workio.py index 394cb5e2ab..762e22804f 100644 --- a/openpype/hosts/hiero/api/workio.py +++ b/openpype/hosts/hiero/api/workio.py @@ -2,13 +2,12 @@ import os import hiero from openpype.api import Logger -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS log = Logger.get_logger(__name__) def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["hiero"] + return [".hrox"] def has_unsaved_changes(): From c238a9cbc15d516b1d2b662eb6b67aec5d341afa Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 24 Aug 2022 10:51:12 +0100 Subject: [PATCH 0613/2550] Fix maya extractor for instance_name --- openpype/hosts/maya/plugins/publish/extract_layout.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 991217684a..92ca6c883f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -56,7 +56,7 @@ class ExtractLayout(openpype.api.Extractor): json_element = { "family": family, - "instance_name": cmds.getAttr(f"{container}.name"), + "instance_name": cmds.getAttr(f"{container}.namespace"), "representation": str(representation_id), "version": str(version_id) } From e3de88e4fe54f0e483d16841730883a6762a7f85 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 24 Aug 2022 10:52:22 +0100 Subject: [PATCH 0614/2550] Implemented loader for layouts for existing scenes --- .../plugins/load/load_layout_existing.py | 403 ++++++++++++++++++ 1 file changed, 403 insertions(+) create mode 100644 openpype/hosts/unreal/plugins/load/load_layout_existing.py diff --git a/openpype/hosts/unreal/plugins/load/load_layout_existing.py b/openpype/hosts/unreal/plugins/load/load_layout_existing.py new file mode 100644 index 0000000000..297e8d1a4c --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_layout_existing.py @@ -0,0 +1,403 @@ +import json +from pathlib import Path + +import unreal +from unreal import EditorLevelLibrary + +from bson.objectid import ObjectId + +from openpype import pipeline +from openpype.pipeline import ( + discover_loader_plugins, + loaders_from_representation, + load_container, + AVALON_CONTAINER_ID, + legacy_io, +) +from openpype.hosts.unreal.api import plugin +from openpype.hosts.unreal.api import pipeline as upipeline + + +class ExistingLayoutLoader(plugin.Loader): + """ + Load Layout for an existing scene, and match the existing assets. + """ + + families = ["layout"] + representations = ["json"] + + label = "Load Layout on Existing Scene" + icon = "code-fork" + color = "orange" + ASSET_ROOT = "/Game/OpenPype" + + @staticmethod + def _create_container( + asset_name, asset_dir, asset, representation, parent, family + ): + container_name = f"{asset_name}_CON" + + container = None + if not unreal.EditorAssetLibrary.does_asset_exist( + f"{asset_dir}/{container_name}" + ): + container = upipeline.create_container(container_name, asset_dir) + else: + ar = unreal.AssetRegistryHelpers.get_asset_registry() + obj = ar.get_asset_by_object_path( + f"{asset_dir}/{container_name}.{container_name}") + container = obj.get_asset() + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + # "loader": str(self.__class__.__name__), + "representation": representation, + "parent": parent, + "family": family + } + + upipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + return container.get_path_name() + + @staticmethod + def _get_current_level(): + ue_version = unreal.SystemLibrary.get_engine_version().split('.') + ue_major = ue_version[0] + + if ue_major == '4': + return EditorLevelLibrary.get_editor_world() + elif ue_major == '5': + return unreal.LevelEditorSubsystem().get_current_level() + + raise NotImplementedError( + f"Unreal version {ue_major} not supported") + + @staticmethod + def _transform_from_basis(transform, basis, conversion): + """Transform a transform from a basis to a new basis.""" + # Get the basis matrix + basis_matrix = unreal.Matrix( + basis[0], + basis[1], + basis[2], + basis[3] + ) + transform_matrix = unreal.Matrix( + transform[0], + transform[1], + transform[2], + transform[3] + ) + + new_transform = ( + basis_matrix.get_inverse() * transform_matrix * basis_matrix) + + return conversion.inverse() * new_transform.transform() + + def _get_transform(self, ext, import_data, lasset): + conversion = unreal.Matrix.IDENTITY.transform() + + # Check for the conversion settings. We cannot access + # the alembic conversion settings, so we assume that + # the maya ones have been applied. + if ext == '.fbx': + loc = import_data.import_translation + rot = import_data.import_rotation.to_vector() + scale = import_data.import_scale + conversion = unreal.Transform( + location=[loc.x, loc.y, loc.z], + rotation=[rot.x, rot.y, rot.z], + scale=[scale, scale, scale] + ) + elif ext == '.abc': + # This is the standard conversion settings for + # alembic files from Maya. + conversion = unreal.Transform( + location=[0.0, 0.0, 0.0], + rotation=[0.0, 0.0, 0.0], + scale=[1.0, -1.0, 1.0] + ) + + transform = self._transform_from_basis( + lasset.get('transform_matrix'), + lasset.get('basis'), + conversion + ) + return transform + + @staticmethod + def _get_fbx_loader(loaders, family): + name = "" + if family == 'rig': + name = "SkeletalMeshFBXLoader" + elif family == 'model': + name = "StaticMeshFBXLoader" + elif family == 'camera': + name = "CameraLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + @staticmethod + def _get_abc_loader(loaders, family): + name = "" + if family == 'rig': + name = "SkeletalMeshAlembicLoader" + elif family == 'model': + name = "StaticMeshAlembicLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + def _load_asset(self, representation, version, instance_name, family): + valid_formats = ['fbx', 'abc'] + + repr_data = legacy_io.find_one({ + "type": "representation", + "parent": ObjectId(version), + "name": {"$in": valid_formats} + }) + repr_format = repr_data.get('name') + + all_loaders = discover_loader_plugins() + loaders = loaders_from_representation( + all_loaders, representation) + + loader = None + + if repr_format == 'fbx': + loader = self._get_fbx_loader(loaders, family) + elif repr_format == 'abc': + loader = self._get_abc_loader(loaders, family) + + if not loader: + raise AssertionError(f"No valid loader found for {representation}") + + assets = load_container( + loader, + representation, + namespace=instance_name + ) + + return assets + + def load(self, context, name, namespace, options): + print("Loading Layout and Match Assets") + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + asset = context.get('asset').get('name') + container_name = f"{asset}_{name}_CON" + + actors = EditorLevelLibrary.get_all_level_actors() + + with open(self.fname, "r") as fp: + data = json.load(fp) + + layout_data = [] + + # Get all the representations in the JSON from the database. + for element in data: + if element.get('representation'): + layout_data.append(( + pipeline.legacy_io.find_one({ + "_id": ObjectId(element.get('representation')) + }), + element + )) + + containers = [] + actors_matched = [] + + for (repr_data, lasset) in layout_data: + if not repr_data: + raise AssertionError("Representation not found") + if not (repr_data.get('data') or repr_data.get('data').get('path')): + raise AssertionError("Representation does not have path") + if not repr_data.get('context'): + raise AssertionError("Representation does not have context") + + # For every actor in the scene, check if it has a representation in + # those we got from the JSON. If so, create a container for it. + # Otherwise, remove it from the scene. + found = False + + for actor in actors: + if not actor.get_class().get_name() == 'StaticMeshActor': + continue + if actor in actors_matched: + continue + + # Get the original path of the file from which the asset has + # been imported. + actor.set_actor_label(lasset.get('instance_name')) + smc = actor.get_editor_property('static_mesh_component') + mesh = smc.get_editor_property('static_mesh') + import_data = mesh.get_editor_property('asset_import_data') + filename = import_data.get_first_filename() + path = Path(filename) + + if not path.name in repr_data.get('data').get('path'): + continue + + asset_name = path.with_suffix('').name + mesh_path = Path(mesh.get_path_name()).parent.as_posix() + + # Create the container for the asset. + asset = repr_data.get('context').get('asset') + subset = repr_data.get('context').get('subset') + container = self._create_container( + f"{asset}_{subset}", mesh_path, asset, + repr_data.get('_id'), repr_data.get('parent'), + repr_data.get('context').get('family') + ) + containers.append(container) + + # Set the transform for the actor. + transform = self._get_transform( + path.suffix, import_data, lasset) + actor.set_actor_transform(transform, False, True) + + actors_matched.append(actor) + found = True + break + + # If an actor has not been found for this representation, + # we check if it has been loaded already by checking all the + # loaded containers. If so, we add it to the scene. Otherwise, + # we load it. + if found: + continue + + all_containers = upipeline.ls() + + loaded = False + + for container in all_containers: + repr = container.get('representation') + + if not repr == str(repr_data.get('_id')): + continue + + asset_dir = container.get('namespace') + + filter = unreal.ARFilter( + class_names=["StaticMesh"], + package_paths=[asset_dir], + recursive_paths=False) + assets = ar.get_assets(filter) + + for asset in assets: + obj = asset.get_asset() + actor = EditorLevelLibrary.spawn_actor_from_object( + obj, unreal.Vector(0.0, 0.0, 0.0) + ) + + actor.set_actor_label(lasset.get('instance_name')) + smc = actor.get_editor_property( + 'static_mesh_component') + mesh = smc.get_editor_property('static_mesh') + import_data = mesh.get_editor_property( + 'asset_import_data') + filename = import_data.get_first_filename() + path = Path(filename) + + transform = self._get_transform( + path.suffix, import_data, lasset) + + actor.set_actor_transform(transform, False, True) + + loaded = True + break + + # If the asset has not been loaded yet, we load it. + if loaded: + continue + + assets = self._load_asset( + lasset.get('representation'), + lasset.get('version'), + lasset.get('instance_name'), + lasset.get('family') + ) + + for asset in assets: + obj = ar.get_asset_by_object_path(asset).get_asset() + if not obj.get_class().get_name() == 'StaticMesh': + continue + actor = EditorLevelLibrary.spawn_actor_from_object( + obj, unreal.Vector(0.0, 0.0, 0.0) + ) + + actor.set_actor_label(lasset.get('instance_name')) + smc = actor.get_editor_property('static_mesh_component') + mesh = smc.get_editor_property('static_mesh') + import_data = mesh.get_editor_property('asset_import_data') + filename = import_data.get_first_filename() + path = Path(filename) + + transform = self._transform_from_basis( + lasset.get('transform_matrix'), + lasset.get('basis'), + unreal.Matrix.IDENTITY.transform() + ) + + actor.set_actor_transform(transform, False, True) + + break + + # Check if an actor was not matched to a representation. + # If so, remove it from the scene. + for actor in actors: + if not actor.get_class().get_name() == 'StaticMeshActor': + continue + if actor not in actors_matched: + EditorLevelLibrary.destroy_actor(actor) + + curr_level = self._get_current_level() + + if not curr_level: + return + + curr_level_path = Path( + curr_level.get_outer().get_path_name()).parent.as_posix() + + if not unreal.EditorAssetLibrary.does_asset_exist( + f"{curr_level_path}/{container_name}" + ): + upipeline.create_container( + container=container_name, path=curr_level_path) + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "asset": asset, + "namespace": curr_level_path, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"], + "loaded_assets": containers + } + upipeline.imprint(f"{curr_level_path}/{container_name}", data) From d0036ac186fc72079bc4522baf9bce22d7e090fd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 11:59:27 +0200 Subject: [PATCH 0615/2550] close settings on tray exit to remove settings lock --- openpype/modules/settings_action.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 2b4b51e3ad..1e7eca4dec 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -23,6 +23,11 @@ class SettingsAction(OpenPypeModule, ITrayAction): """Initialization in tray implementation of ITrayAction.""" self.create_settings_window() + def tray_exit(self): + # Close settings UI to remove settings lock + if self.settings_window: + self.settings_window.close() + def on_action_trigger(self): """Implementation for action trigger of ITrayAction.""" self.show_settings_window() From a83f7b5811824daf2327734d8be382580707ed5e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 12:13:12 +0200 Subject: [PATCH 0616/2550] fix empty values from info --- openpype/tools/settings/settings/dialogs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/dialogs.py b/openpype/tools/settings/settings/dialogs.py index f25374a48c..b1b4daa1a0 100644 --- a/openpype/tools/settings/settings/dialogs.py +++ b/openpype/tools/settings/settings/dialogs.py @@ -39,7 +39,7 @@ class BaseInfoDialog(QtWidgets.QDialog): ): other_information_layout.addRow( label, - QtWidgets.QLabel(value, other_information) + QtWidgets.QLabel(value or "N/A", other_information) ) timestamp_label = QtWidgets.QLabel( From deec5e5c936abd1fca49cc586875e0d4f671e761 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Aug 2022 12:30:29 +0200 Subject: [PATCH 0617/2550] nuke: fixing setting colorspace --- openpype/hosts/nuke/api/lib.py | 38 ++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index a53d932db1..10ddfca51e 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1945,15 +1945,25 @@ class WorkfileSettings(object): if not write_node: return - # write all knobs to node - for knob in nuke_imageio_writes["knobs"]: - value = knob["value"] - if isinstance(value, six.text_type): - value = str(value) - if str(value).startswith("0x"): - value = int(value, 16) + try: + # write all knobs to node + for knob in nuke_imageio_writes["knobs"]: + value = knob["value"] + if isinstance(value, six.text_type): + value = str(value) + if str(value).startswith("0x"): + value = int(value, 16) - write_node[knob["name"]].setValue(value) + log.debug("knob: {}| value: {}".format( + knob["name"], value + )) + write_node[knob["name"]].setValue(value) + except TypeError: + log.warning( + "Legacy workflow didnt work, switching to current") + + set_node_knobs_from_settings( + write_node, nuke_imageio_writes["knobs"]) def set_reads_colorspace(self, read_clrs_inputs): """ Setting colorspace to Read nodes @@ -2010,12 +2020,14 @@ class WorkfileSettings(object): # get imageio nuke_colorspace = get_nuke_imageio_settings() + log.info("Setting colorspace to workfile...") try: self.set_root_colorspace(nuke_colorspace["workfile"]) except AttributeError: msg = "set_colorspace(): missing `workfile` settings in template" nuke.message(msg) + log.info("Setting colorspace to viewers...") try: self.set_viewers_colorspace(nuke_colorspace["viewer"]) except AttributeError: @@ -2023,24 +2035,18 @@ class WorkfileSettings(object): nuke.message(msg) log.error(msg) + log.info("Setting colorspace to write nodes...") try: self.set_writes_colorspace() except AttributeError as _error: nuke.message(_error) log.error(_error) + log.info("Setting colorspace to read nodes...") read_clrs_inputs = nuke_colorspace["regexInputs"].get("inputs", []) if read_clrs_inputs: self.set_reads_colorspace(read_clrs_inputs) - try: - for key in nuke_colorspace: - log.debug("Preset's colorspace key: {}".format(key)) - except TypeError: - msg = "Nuke is not in templates! Contact your supervisor!" - nuke.message(msg) - log.error(msg) - def reset_frame_range_handles(self): """Set frame range to current asset""" From b3cd5e1ea060a533d983d7cfb4b231f240430226 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 24 Aug 2022 11:46:18 +0100 Subject: [PATCH 0618/2550] Hound fixes --- openpype/hosts/unreal/plugins/load/load_layout_existing.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout_existing.py b/openpype/hosts/unreal/plugins/load/load_layout_existing.py index 297e8d1a4c..c20af950d9 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout_existing.py +++ b/openpype/hosts/unreal/plugins/load/load_layout_existing.py @@ -231,7 +231,8 @@ class ExistingLayoutLoader(plugin.Loader): for (repr_data, lasset) in layout_data: if not repr_data: raise AssertionError("Representation not found") - if not (repr_data.get('data') or repr_data.get('data').get('path')): + if not (repr_data.get('data') or + repr_data.get('data').get('path')): raise AssertionError("Representation does not have path") if not repr_data.get('context'): raise AssertionError("Representation does not have context") @@ -256,7 +257,7 @@ class ExistingLayoutLoader(plugin.Loader): filename = import_data.get_first_filename() path = Path(filename) - if not path.name in repr_data.get('data').get('path'): + if path.name not in repr_data.get('data').get('path'): continue asset_name = path.with_suffix('').name From aaa95efb3c80080166de20c9f48bbfe6a8c11548 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Aug 2022 13:45:53 +0200 Subject: [PATCH 0619/2550] Removed submodule vendor/configs/OpenColorIO-Configs --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 3ab974f98c96c823953e349fe499b48c5e4a0a7c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Aug 2022 13:46:19 +0200 Subject: [PATCH 0620/2550] Removed submodule vendor/configs/OpenColorIO-Configs --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 4c16b8930fd38d6c9f792c5983d6e2e88f1587ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 24 Aug 2022 14:14:47 +0200 Subject: [PATCH 0621/2550] Update openpype/plugins/publish/collect_audio.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/plugins/publish/collect_audio.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index e2fb766ec4..0825c281ad 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -66,7 +66,10 @@ class CollectAudio(pyblish.api.InstancePlugin): self.log.info("Audio Data added to instance ...") def _get_repre_doc(self, instance): - cache = instance.context.data.get("__cache_asset_audio", {}) + cache = instance.context.data.get("__cache_asset_audio") + if cache is None: + cache = {} + instance.context.data["__cache_asset_audio"] = cache asset_name = instance.data["asset"] # first try to get it from cache From 6ff16a6d25f2b81f5634d526059734225c556ede Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 24 Aug 2022 14:14:58 +0200 Subject: [PATCH 0622/2550] Update openpype/plugins/publish/collect_audio.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/plugins/publish/collect_audio.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 0825c281ad..7d53b24e54 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -101,6 +101,5 @@ class CollectAudio(pyblish.api.InstancePlugin): # update cache cache[asset_name] = repre_doc - instance.context.data["__cache_asset_audio"].update(cache) return repre_doc From da5353aa2dd1d9e774a72dbc57a6ac5d7368afdd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Aug 2022 14:20:35 +0200 Subject: [PATCH 0623/2550] git: update gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index ea5b20eb69..4b773e97ed 100644 --- a/.gitignore +++ b/.gitignore @@ -107,3 +107,6 @@ website/.docusaurus mypy.ini tools/run_eventserver.* + +# Developer tools +tools/dev_* From 0f95f87d773ddcbe979fe28d5f0196f1befad38e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 24 Aug 2022 14:59:14 +0200 Subject: [PATCH 0624/2550] More draft refactoring - still not functional (WIP commit for my own sanity) --- .../plugins/publish/submit_maya_deadline.py | 185 ++++-------------- 1 file changed, 35 insertions(+), 150 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 6dfa48a9f8..5a7d0b98c6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -31,7 +31,6 @@ import clique from maya import cmds -from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline @@ -87,11 +86,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - filepath = context.data["currentFile"] - filename = os.path.basename(filepath) + # Always use the original work file name for the Job name even when + # rendering is done from the published Work File. The original work + # file name is clearer because it can also have subversion strings, + # etc. which are stripped for the published file. + src_filepath = context.data["currentFile"] + src_filename = os.path.basename(src_filepath) - job_info.Name = "%s - %s" % (filename, instance.name) - job_info.BatchName = filename + job_info.Name = "%s - %s" % (src_filename, instance.name) + job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") job_info.UserName = context.data.get( "deadlineUser", getpass.getuser()) @@ -116,9 +119,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if self.limit_groups: job_info.LimitGroups = ",".join(self.limit_groups) - self.payload_skeleton["JobInfo"]["Name"] = jobname - self.payload_skeleton["JobInfo"]["BatchName"] = src_filename - # Optional, enable double-click to preview rendered # frames from Deadline Monitor self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ @@ -227,11 +227,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return attr.asdict(plugin_info) def process_submission(self): - # Override to NOT submit by default when calling super process() method - pass - def process(self, instance): - super(MayaSubmitDeadline, self).process(instance) + instance = self._instance + context = instance.context + + # Generated by AbstractSubmitDeadline. The `job_info`, `plugin_info` + # and `aux_files` are the skeleton payloads that are the basis for + # all the maya submissions + job_info = self.job_info + plugin_info = self.plugin_info + aux_files = self.aux_files + filepath = self.scene_path # publish if `use_publish` else workfile # TODO: Avoid the need for this logic here, needed for submit publish # Store output dir for unified publisher (filesequence) @@ -241,21 +247,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.limit_groups = self.limit - context = instance.context - workspace = context.data["workspaceDir"] - - filepath = None - patches = ( - context.data["project_settings"].get( - "deadline", {}).get( - "publish", {}).get( - "MayaSubmitDeadline", {}).get( - "scene_patches", {}) - ) + # Patch workfile (only when use_published is enabled) + if self.use_published: + patches = ( + context.data["project_settings"].get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "scene_patches", {}) + ) + self._patch_workfile(filepath, patches) # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") # todo: on self.use_published replace path for publishRenderMetadataFolder - # todo: on self.use_published apply scene patches to workfile instance # rep = i.data.get("representations")[0].get("name") # if instance.data.get("publishRenderMetadataFolder"): @@ -270,9 +274,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # patched_file = self._patch_workfile(filepath, patches) # patched_files.append(patched_file) - filepath = self.scene_path # collect by super().process - # Gather needed data ------------------------------------------------ + workspace = context.data["workspaceDir"] default_render_file = instance.context.data.get('project_settings')\ .get('maya')\ .get('RenderSettings')\ @@ -281,14 +284,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): dirname = os.path.join(workspace, default_render_file) renderlayer = instance.data['setMembers'] # rs_beauty - # Always use the original work file name for the Job name even when - # rendering is done from the published Work File. The original work - # file name is clearer because it can also have subversion strings, - # etc. which are stripped for the published file. - src_filename = os.path.basename(context.data["currentFile"]) - jobname = "%s - %s" % (src_filename, instance.name) - # Get the variables depending on the renderer + # TODO: Find replacement logic for `get_renderer_variables` through + # what is collected for the render or is implemented in maya + # api `lib_renderproducts` render_variables = get_renderer_variables(renderlayer, dirname) filename_0 = render_variables["filename_0"] if self.use_published: @@ -842,8 +841,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): str: Patched file path or None """ - if os.path.splitext(file)[1].lower() != ".ma" or not patches: - return None + if not patches or os.path.splitext(file)[1].lower() != ".ma": + return compiled_regex = [re.compile(p["regex"]) for p in patches] with open(file, "r+") as pf: @@ -931,7 +930,7 @@ def _format_tiles( os.path.basename(filename) ) out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + out["PluginInfo"]["RegionPrefix{}".format(tile)] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 @@ -951,117 +950,3 @@ def _format_tiles( tile += 1 return out, cfg - - -def get_renderer_variables(renderlayer, root): - """Retrieve the extension which has been set in the VRay settings. - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - root (str): base path to render - - Returns: - dict - - """ - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings( - fullPath=True, - gin="#" * int(padding), - lut=True, - layer=renderlayer or lib.get_current_renderlayer())[0] - filename_0 = re.sub('_', '_beauty', - filename_0, flags=re.IGNORECASE) - prefix_attr = "defaultRenderGlobals.imageFilePrefix" - - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) - - if renderer == "vray": - renderlayer = renderlayer.split("_")[-1] - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - if extension in ["exr (multichannel)", "exr (deep)"]: - extension = "exr" - - prefix_attr = "vraySettings.fileNamePrefix" - filename_prefix = cmds.getAttr(prefix_attr) - # we need to determine path for vray as maya `renderSettings` query - # does not work for vray. - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = "{}.{}.{}".format( - filename_0, "#" * int(padding), extension) - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "renderman": - prefix_attr = "rmanGlobals.imageFileFormat" - # NOTE: This is guessing extensions from renderman display types. - # Some of them are just framebuffers, d_texture format can be - # set in display setting. We set those now to None, but it - # should be handled more gracefully. - display_types = { - "d_deepexr": "exr", - "d_it": None, - "d_null": None, - "d_openexr": "exr", - "d_png": "png", - "d_pointcloud": "ptc", - "d_targa": "tga", - "d_texture": None, - "d_tiff": "tif" - } - - extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0], - "exr" - ) or "exr" - - filename_prefix = "{}/{}".format( - cmds.getAttr("rmanGlobals.imageOutputDir"), - cmds.getAttr("rmanGlobals.imageFileFormat") - ) - - renderlayer = renderlayer.split("_")[-1] - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "redshift": - # mapping redshift extension dropdown values to strings - ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - extension = ext_mapping[ - cmds.getAttr("redshiftOptions.imageFormat") - ] - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - - filename_prefix = cmds.getAttr(prefix_attr) - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - From 2b9d3bded2fb9b2af2a589c186760f884fdcb752 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 15:00:43 +0200 Subject: [PATCH 0625/2550] created nuke module --- openpype/hosts/nuke/__init__.py | 47 +++++----------------------- openpype/hosts/nuke/module.py | 54 +++++++++++++++++++++++++++++++++ 2 files changed, 62 insertions(+), 39 deletions(-) create mode 100644 openpype/hosts/nuke/module.py diff --git a/openpype/hosts/nuke/__init__.py b/openpype/hosts/nuke/__init__.py index 134a6621c4..718307583e 100644 --- a/openpype/hosts/nuke/__init__.py +++ b/openpype/hosts/nuke/__init__.py @@ -1,41 +1,10 @@ -import os -import platform +from .module import ( + NUKE_ROOT_DIR, + NukeModule, +) -def add_implementation_envs(env, _app): - # Add requirements to NUKE_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - new_nuke_paths = [ - os.path.join(pype_root, "openpype", "hosts", "nuke", "startup") - ] - old_nuke_path = env.get("NUKE_PATH") or "" - for path in old_nuke_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_nuke_paths: - new_nuke_paths.append(norm_path) - - env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "NUKE_ROOT_DIR", + "NukeModule", +) diff --git a/openpype/hosts/nuke/module.py b/openpype/hosts/nuke/module.py new file mode 100644 index 0000000000..a50444f817 --- /dev/null +++ b/openpype/hosts/nuke/module.py @@ -0,0 +1,54 @@ +import os +import platform +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class NukeModule(OpenPypeModule, IHostModule): + name = "nuke" + host_name = "nuke" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to NUKE_PATH + new_nuke_paths = [ + os.path.join(NUKE_ROOT_DIR, "startup") + ] + old_nuke_path = env.get("NUKE_PATH") or "" + for path in old_nuke_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_nuke_paths: + new_nuke_paths.append(norm_path) + + env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + # Try to add QuickTime to PATH + quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" + if platform.system() == "windows" and os.path.exists(quick_time_path): + path_value = env.get("PATH") or "" + path_paths = [ + path + for path in path_value.split(os.pathsep) + if path + ] + path_paths.append(quick_time_path) + env["PATH"] = os.pathsep.join(path_paths) + + def get_workfile_extensions(self): + return [".nk"] From 9b4654a1c8b8f2dfdbd840553a69781282a496ff Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 15:01:49 +0200 Subject: [PATCH 0626/2550] removed usage of HOST_WORKFILE_EXTENSIONS in nuke --- openpype/hosts/nuke/api/workio.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/workio.py b/openpype/hosts/nuke/api/workio.py index 68fcb0927f..65b86bf01b 100644 --- a/openpype/hosts/nuke/api/workio.py +++ b/openpype/hosts/nuke/api/workio.py @@ -2,11 +2,9 @@ import os import nuke -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["nuke"] + return [".nk"] def has_unsaved_changes(): From 013e37b44d3c0937ff748a6bcee5d42eef62219a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 15:02:04 +0200 Subject: [PATCH 0627/2550] added protobuf into nuke vendor --- openpype/hosts/nuke/module.py | 9 + .../nuke/vendor/google/protobuf/__init__.py | 33 + .../nuke/vendor/google/protobuf/any_pb2.py | 26 + .../nuke/vendor/google/protobuf/api_pb2.py | 32 + .../google/protobuf/compiler/__init__.py | 0 .../google/protobuf/compiler/plugin_pb2.py | 35 + .../nuke/vendor/google/protobuf/descriptor.py | 1224 +++++++++++ .../google/protobuf/descriptor_database.py | 177 ++ .../vendor/google/protobuf/descriptor_pb2.py | 1925 +++++++++++++++++ .../vendor/google/protobuf/descriptor_pool.py | 1295 +++++++++++ .../vendor/google/protobuf/duration_pb2.py | 26 + .../nuke/vendor/google/protobuf/empty_pb2.py | 26 + .../vendor/google/protobuf/field_mask_pb2.py | 26 + .../google/protobuf/internal/__init__.py | 0 .../protobuf/internal/_parameterized.py | 443 ++++ .../protobuf/internal/api_implementation.py | 112 + .../google/protobuf/internal/builder.py | 130 ++ .../google/protobuf/internal/containers.py | 710 ++++++ .../google/protobuf/internal/decoder.py | 1029 +++++++++ .../google/protobuf/internal/encoder.py | 829 +++++++ .../protobuf/internal/enum_type_wrapper.py | 124 ++ .../protobuf/internal/extension_dict.py | 213 ++ .../protobuf/internal/message_listener.py | 78 + .../internal/message_set_extensions_pb2.py | 36 + .../internal/missing_enum_values_pb2.py | 37 + .../internal/more_extensions_dynamic_pb2.py | 29 + .../protobuf/internal/more_extensions_pb2.py | 41 + .../protobuf/internal/more_messages_pb2.py | 556 +++++ .../protobuf/internal/no_package_pb2.py | 27 + .../protobuf/internal/python_message.py | 1539 +++++++++++++ .../google/protobuf/internal/type_checkers.py | 435 ++++ .../protobuf/internal/well_known_types.py | 878 ++++++++ .../google/protobuf/internal/wire_format.py | 268 +++ .../vendor/google/protobuf/json_format.py | 912 ++++++++ .../nuke/vendor/google/protobuf/message.py | 424 ++++ .../vendor/google/protobuf/message_factory.py | 185 ++ .../vendor/google/protobuf/proto_builder.py | 134 ++ .../vendor/google/protobuf/pyext/__init__.py | 0 .../google/protobuf/pyext/cpp_message.py | 65 + .../google/protobuf/pyext/python_pb2.py | 34 + .../nuke/vendor/google/protobuf/reflection.py | 95 + .../nuke/vendor/google/protobuf/service.py | 228 ++ .../google/protobuf/service_reflection.py | 295 +++ .../google/protobuf/source_context_pb2.py | 26 + .../nuke/vendor/google/protobuf/struct_pb2.py | 36 + .../vendor/google/protobuf/symbol_database.py | 194 ++ .../vendor/google/protobuf/text_encoding.py | 110 + .../vendor/google/protobuf/text_format.py | 1795 +++++++++++++++ .../vendor/google/protobuf/timestamp_pb2.py | 26 + .../nuke/vendor/google/protobuf/type_pb2.py | 42 + .../vendor/google/protobuf/util/__init__.py | 0 .../google/protobuf/util/json_format_pb2.py | 72 + .../protobuf/util/json_format_proto3_pb2.py | 129 ++ .../vendor/google/protobuf/wrappers_pb2.py | 42 + 54 files changed, 17192 insertions(+) create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/__init__.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/compiler/__init__.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/descriptor.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/__init__.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/encoder.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/enum_type_wrapper.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/json_format.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/message.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/message_factory.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/pyext/__init__.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/reflection.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/service.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/text_format.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/util/__init__.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py create mode 100644 openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py diff --git a/openpype/hosts/nuke/module.py b/openpype/hosts/nuke/module.py index a50444f817..e4706a36cb 100644 --- a/openpype/hosts/nuke/module.py +++ b/openpype/hosts/nuke/module.py @@ -30,6 +30,15 @@ class NukeModule(OpenPypeModule, IHostModule): env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Add vendor to PYTHONPATH + python_path = env["PYTHONPATH"] + python_path_parts = [] + if python_path: + python_path_parts = python_path.split(os.pathsep) + vendor_path = os.path.join(NUKE_ROOT_DIR, "vendor") + python_path_parts.insert(0, vendor_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + # Set default values if are not already set via settings defaults = { "LOGLEVEL": "DEBUG" diff --git a/openpype/hosts/nuke/vendor/google/protobuf/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/__init__.py new file mode 100644 index 0000000000..03f3b29ee7 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Copyright 2007 Google Inc. All Rights Reserved. + +__version__ = '3.20.1' diff --git a/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py new file mode 100644 index 0000000000..9121193d11 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/any_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/any.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _ANY._serialized_start=46 + _ANY._serialized_end=84 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py new file mode 100644 index 0000000000..1721b10a75 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/api_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/api.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _API._serialized_start=113 + _API._serialized_end=370 + _METHOD._serialized_start=373 + _METHOD._serialized_end=586 + _MIXIN._serialized_start=588 + _MIXIN._serialized_end=623 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/compiler/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/compiler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 0000000000..715a891370 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/compiler/plugin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' + _VERSION._serialized_start=101 + _VERSION._serialized_end=171 + _CODEGENERATORREQUEST._serialized_start=174 + _CODEGENERATORREQUEST._serialized_end=360 + _CODEGENERATORRESPONSE._serialized_start=363 + _CODEGENERATORRESPONSE._serialized_end=684 + _CODEGENERATORRESPONSE_FILE._serialized_start=499 + _CODEGENERATORRESPONSE_FILE._serialized_end=626 + _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 + _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py new file mode 100644 index 0000000000..ad70be9a11 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor.py @@ -0,0 +1,1224 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Descriptors essentially contain exactly the information found in a .proto +file, in types that make this information accessible in Python. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import threading +import warnings + +from google.protobuf.internal import api_implementation + +_USE_C_DESCRIPTORS = False +if api_implementation.Type() == 'cpp': + # Used by MakeDescriptor in cpp mode + import binascii + import os + from google.protobuf.pyext import _message + _USE_C_DESCRIPTORS = True + + +class Error(Exception): + """Base error for this module.""" + + +class TypeTransformationError(Error): + """Error transforming between python proto type and corresponding C++ type.""" + + +if _USE_C_DESCRIPTORS: + # This metaclass allows to override the behavior of code like + # isinstance(my_descriptor, FieldDescriptor) + # and make it return True when the descriptor is an instance of the extension + # type written in C++. + class DescriptorMetaclass(type): + def __instancecheck__(cls, obj): + if super(DescriptorMetaclass, cls).__instancecheck__(obj): + return True + if isinstance(obj, cls._C_DESCRIPTOR_CLASS): + return True + return False +else: + # The standard metaclass; nothing changes. + DescriptorMetaclass = type + + +class _Lock(object): + """Wrapper class of threading.Lock(), which is allowed by 'with'.""" + + def __new__(cls): + self = object.__new__(cls) + self._lock = threading.Lock() # pylint: disable=protected-access + return self + + def __enter__(self): + self._lock.acquire() + + def __exit__(self, exc_type, exc_value, exc_tb): + self._lock.release() + + +_lock = threading.Lock() + + +def _Deprecated(name): + if _Deprecated.count > 0: + _Deprecated.count -= 1 + warnings.warn( + 'Call to deprecated create function %s(). Note: Create unlinked ' + 'descriptors is going to go away. Please use get/find descriptors from ' + 'generated code or query the descriptor_pool.' + % name, + category=DeprecationWarning, stacklevel=3) + + +# Deprecated warnings will print 100 times at most which should be enough for +# users to notice and do not cause timeout. +_Deprecated.count = 100 + + +_internal_create_key = object() + + +class DescriptorBase(metaclass=DescriptorMetaclass): + + """Descriptors base class. + + This class is the base of all descriptor classes. It provides common options + related functionality. + + Attributes: + has_options: True if the descriptor has non-default options. Usually it + is not necessary to read this -- just call GetOptions() which will + happily return the default instance. However, it's sometimes useful + for efficiency, and also useful inside the protobuf implementation to + avoid some bootstrapping issues. + """ + + if _USE_C_DESCRIPTORS: + # The class, or tuple of classes, that are considered as "virtual + # subclasses" of this descriptor class. + _C_DESCRIPTOR_CLASS = () + + def __init__(self, options, serialized_options, options_class_name): + """Initialize the descriptor given its options message and the name of the + class of the options message. The name of the class is required in case + the options message is None and has to be created. + """ + self._options = options + self._options_class_name = options_class_name + self._serialized_options = serialized_options + + # Does this descriptor have non-default options? + self.has_options = (options is not None) or (serialized_options is not None) + + def _SetOptions(self, options, options_class_name): + """Sets the descriptor's options + + This function is used in generated proto2 files to update descriptor + options. It must not be used outside proto2. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def GetOptions(self): + """Retrieves descriptor options. + + This method returns the options set or creates the default options for the + descriptor. + """ + if self._options: + return self._options + + from google.protobuf import descriptor_pb2 + try: + options_class = getattr(descriptor_pb2, + self._options_class_name) + except AttributeError: + raise RuntimeError('Unknown options class name %s!' % + (self._options_class_name)) + + with _lock: + if self._serialized_options is None: + self._options = options_class() + else: + self._options = _ParseOptions(options_class(), + self._serialized_options) + + return self._options + + +class _NestedDescriptorBase(DescriptorBase): + """Common class for descriptors that can be nested.""" + + def __init__(self, options, options_class_name, name, full_name, + file, containing_type, serialized_start=None, + serialized_end=None, serialized_options=None): + """Constructor. + + Args: + options: Protocol message options or None + to use default message options. + options_class_name (str): The class name of the above options. + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + file (FileDescriptor): Reference to file info. + containing_type: if provided, this is a nested descriptor, with this + descriptor as parent, otherwise None. + serialized_start: The start index (inclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_end: The end index (exclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_options: Protocol message serialized options or None. + """ + super(_NestedDescriptorBase, self).__init__( + options, serialized_options, options_class_name) + + self.name = name + # TODO(falk): Add function to calculate full_name instead of having it in + # memory? + self.full_name = full_name + self.file = file + self.containing_type = containing_type + + self._serialized_start = serialized_start + self._serialized_end = serialized_end + + def CopyToProto(self, proto): + """Copies this to the matching proto in descriptor_pb2. + + Args: + proto: An empty proto instance from descriptor_pb2. + + Raises: + Error: If self couldn't be serialized, due to to few constructor + arguments. + """ + if (self.file is not None and + self._serialized_start is not None and + self._serialized_end is not None): + proto.ParseFromString(self.file.serialized_pb[ + self._serialized_start:self._serialized_end]) + else: + raise Error('Descriptor does not contain serialization.') + + +class Descriptor(_NestedDescriptorBase): + + """Descriptor for a protocol message type. + + Attributes: + name (str): Name of this protocol message type. + full_name (str): Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + containing_type (Descriptor): Reference to the descriptor of the type + containing us, or None if this is top-level. + fields (list[FieldDescriptor]): Field descriptors for all fields in + this type. + fields_by_number (dict(int, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed + by "number" attribute in each FieldDescriptor. + fields_by_name (dict(str, FieldDescriptor)): Same + :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by + "name" attribute in each :class:`FieldDescriptor`. + nested_types (list[Descriptor]): Descriptor references + for all protocol message types nested within this one. + nested_types_by_name (dict(str, Descriptor)): Same Descriptor + objects as in :attr:`nested_types`, but indexed by "name" attribute + in each Descriptor. + enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references + for all enums contained within this type. + enum_types_by_name (dict(str, EnumDescriptor)): Same + :class:`EnumDescriptor` objects as in :attr:`enum_types`, but + indexed by "name" attribute in each EnumDescriptor. + enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping + from enum value name to :class:`EnumValueDescriptor` for that value. + extensions (list[FieldDescriptor]): All extensions defined directly + within this message type (NOT within a nested type). + extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor + objects as :attr:`extensions`, but indexed by "name" attribute of each + FieldDescriptor. + is_extendable (bool): Does this type define any extension ranges? + oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields + in this message. + oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in + :attr:`oneofs`, but indexed by "name" attribute. + file (FileDescriptor): Reference to file descriptor. + + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.Descriptor + + def __new__( + cls, + name=None, + full_name=None, + filename=None, + containing_type=None, + fields=None, + nested_types=None, + enum_types=None, + extensions=None, + options=None, + serialized_options=None, + is_extendable=True, + extension_ranges=None, + oneofs=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + syntax=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindMessageTypeByName(full_name) + + # NOTE(tmarek): The file argument redefining a builtin is nothing we can + # fix right now since we don't know how many clients already rely on the + # name of the argument. + def __init__(self, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + serialized_options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin + syntax=None, create_key=None): + """Arguments to __init__() are as described in the description + of Descriptor fields above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('Descriptor') + + super(Descriptor, self).__init__( + options, 'MessageOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + # We have fields in addition to fields_by_name and fields_by_number, + # so that: + # 1. Clients can index fields by "order in which they're listed." + # 2. Clients can easily iterate over all fields with the terse + # syntax: for f in descriptor.fields: ... + self.fields = fields + for field in self.fields: + field.containing_type = self + self.fields_by_number = dict((f.number, f) for f in fields) + self.fields_by_name = dict((f.name, f) for f in fields) + self._fields_by_camelcase_name = None + + self.nested_types = nested_types + for nested_type in nested_types: + nested_type.containing_type = self + self.nested_types_by_name = dict((t.name, t) for t in nested_types) + + self.enum_types = enum_types + for enum_type in self.enum_types: + enum_type.containing_type = self + self.enum_types_by_name = dict((t.name, t) for t in enum_types) + self.enum_values_by_name = dict( + (v.name, v) for t in enum_types for v in t.values) + + self.extensions = extensions + for extension in self.extensions: + extension.extension_scope = self + self.extensions_by_name = dict((f.name, f) for f in extensions) + self.is_extendable = is_extendable + self.extension_ranges = extension_ranges + self.oneofs = oneofs if oneofs is not None else [] + self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) + for oneof in self.oneofs: + oneof.containing_type = self + self.syntax = syntax or "proto2" + + @property + def fields_by_camelcase_name(self): + """Same FieldDescriptor objects as in :attr:`fields`, but indexed by + :attr:`FieldDescriptor.camelcase_name`. + """ + if self._fields_by_camelcase_name is None: + self._fields_by_camelcase_name = dict( + (f.camelcase_name, f) for f in self.fields) + return self._fields_by_camelcase_name + + def EnumValueName(self, enum, value): + """Returns the string name of an enum value. + + This is just a small helper method to simplify a common operation. + + Args: + enum: string name of the Enum. + value: int, value of the enum. + + Returns: + string name of the enum value. + + Raises: + KeyError if either the Enum doesn't exist or the value is not a valid + value for the enum. + """ + return self.enum_types_by_name[enum].values_by_number[value].name + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.DescriptorProto. + + Args: + proto: An empty descriptor_pb2.DescriptorProto. + """ + # This function is overridden to give a better doc comment. + super(Descriptor, self).CopyToProto(proto) + + +# TODO(robinson): We should have aggressive checking here, +# for example: +# * If you specify a repeated field, you should not be allowed +# to specify a default value. +# * [Other examples here as needed]. +# +# TODO(robinson): for this and other *Descriptor classes, we +# might also want to lock things down aggressively (e.g., +# prevent clients from setting the attributes). Having +# stronger invariants here in general will reduce the number +# of runtime checks we must do in reflection.py... +class FieldDescriptor(DescriptorBase): + + """Descriptor for a single field in a .proto file. + + Attributes: + name (str): Name of this field, exactly as it appears in .proto. + full_name (str): Name of this field, including containing scope. This is + particularly relevant for extensions. + index (int): Dense, 0-indexed index giving the order that this + field textually appears within its message in the .proto file. + number (int): Tag number declared for this field in the .proto file. + + type (int): (One of the TYPE_* constants below) Declared type. + cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to + represent this field. + + label (int): (One of the LABEL_* constants below) Tells whether this + field is optional, required, or repeated. + has_default_value (bool): True if this field has a default value defined, + otherwise false. + default_value (Varies): Default value of this field. Only + meaningful for non-repeated scalar fields. Repeated fields + should always set this to [], and non-repeated composite + fields should always set this to None. + + containing_type (Descriptor): Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + Somewhat confusingly, for extension fields, this is the + descriptor of the EXTENDED message, not the descriptor + of the message containing this field. (See is_extension and + extension_scope below). + message_type (Descriptor): If a composite field, a descriptor + of the message type contained in this field. Otherwise, this is None. + enum_type (EnumDescriptor): If this field contains an enum, a + descriptor of that enum. Otherwise, this is None. + + is_extension: True iff this describes an extension field. + extension_scope (Descriptor): Only meaningful if is_extension is True. + Gives the message that immediately contains this extension field. + Will be None iff we're a top-level (file-level) extension field. + + options (descriptor_pb2.FieldOptions): Protocol message field options or + None to use default field options. + + containing_oneof (OneofDescriptor): If the field is a member of a oneof + union, contains its descriptor. Otherwise, None. + + file (FileDescriptor): Reference to file descriptor. + """ + + # Must be consistent with C++ FieldDescriptor::Type enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + TYPE_DOUBLE = 1 + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + MAX_TYPE = 18 + + # Must be consistent with C++ FieldDescriptor::CppType enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + CPPTYPE_INT32 = 1 + CPPTYPE_INT64 = 2 + CPPTYPE_UINT32 = 3 + CPPTYPE_UINT64 = 4 + CPPTYPE_DOUBLE = 5 + CPPTYPE_FLOAT = 6 + CPPTYPE_BOOL = 7 + CPPTYPE_ENUM = 8 + CPPTYPE_STRING = 9 + CPPTYPE_MESSAGE = 10 + MAX_CPPTYPE = 10 + + _PYTHON_TO_CPP_PROTO_TYPE_MAP = { + TYPE_DOUBLE: CPPTYPE_DOUBLE, + TYPE_FLOAT: CPPTYPE_FLOAT, + TYPE_ENUM: CPPTYPE_ENUM, + TYPE_INT64: CPPTYPE_INT64, + TYPE_SINT64: CPPTYPE_INT64, + TYPE_SFIXED64: CPPTYPE_INT64, + TYPE_UINT64: CPPTYPE_UINT64, + TYPE_FIXED64: CPPTYPE_UINT64, + TYPE_INT32: CPPTYPE_INT32, + TYPE_SFIXED32: CPPTYPE_INT32, + TYPE_SINT32: CPPTYPE_INT32, + TYPE_UINT32: CPPTYPE_UINT32, + TYPE_FIXED32: CPPTYPE_UINT32, + TYPE_BYTES: CPPTYPE_STRING, + TYPE_STRING: CPPTYPE_STRING, + TYPE_BOOL: CPPTYPE_BOOL, + TYPE_MESSAGE: CPPTYPE_MESSAGE, + TYPE_GROUP: CPPTYPE_MESSAGE + } + + # Must be consistent with C++ FieldDescriptor::Label enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + LABEL_OPTIONAL = 1 + LABEL_REQUIRED = 2 + LABEL_REPEATED = 3 + MAX_LABEL = 3 + + # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, + # and kLastReservedNumber in descriptor.h + MAX_FIELD_NUMBER = (1 << 29) - 1 + FIRST_RESERVED_FIELD_NUMBER = 19000 + LAST_RESERVED_FIELD_NUMBER = 19999 + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FieldDescriptor + + def __new__(cls, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + _message.Message._CheckCalledFromGeneratedFile() + if is_extension: + return _message.default_pool.FindExtensionByName(full_name) + else: + return _message.default_pool.FindFieldByName(full_name) + + def __init__(self, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + serialized_options=None, + has_default_value=True, containing_oneof=None, json_name=None, + file=None, create_key=None): # pylint: disable=redefined-builtin + """The arguments are as described in the description of FieldDescriptor + attributes above. + + Note that containing_type may be None, and may be set later if necessary + (to deal with circular references between message types, for example). + Likewise for extension_scope. + """ + if create_key is not _internal_create_key: + _Deprecated('FieldDescriptor') + + super(FieldDescriptor, self).__init__( + options, serialized_options, 'FieldOptions') + self.name = name + self.full_name = full_name + self.file = file + self._camelcase_name = None + if json_name is None: + self.json_name = _ToJsonName(name) + else: + self.json_name = json_name + self.index = index + self.number = number + self.type = type + self.cpp_type = cpp_type + self.label = label + self.has_default_value = has_default_value + self.default_value = default_value + self.containing_type = containing_type + self.message_type = message_type + self.enum_type = enum_type + self.is_extension = is_extension + self.extension_scope = extension_scope + self.containing_oneof = containing_oneof + if api_implementation.Type() == 'cpp': + if is_extension: + self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) + else: + self._cdescriptor = _message.default_pool.FindFieldByName(full_name) + else: + self._cdescriptor = None + + @property + def camelcase_name(self): + """Camelcase name of this field. + + Returns: + str: the name in CamelCase. + """ + if self._camelcase_name is None: + self._camelcase_name = _ToCamelCase(self.name) + return self._camelcase_name + + @property + def has_presence(self): + """Whether the field distinguishes between unpopulated and default values. + + Raises: + RuntimeError: singular field that is not linked with message nor file. + """ + if self.label == FieldDescriptor.LABEL_REPEATED: + return False + if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or + self.containing_oneof): + return True + if hasattr(self.file, 'syntax'): + return self.file.syntax == 'proto2' + if hasattr(self.message_type, 'syntax'): + return self.message_type.syntax == 'proto2' + raise RuntimeError( + 'has_presence is not ready to use because field %s is not' + ' linked with message type nor file' % self.full_name) + + @staticmethod + def ProtoTypeToCppProtoType(proto_type): + """Converts from a Python proto type to a C++ Proto Type. + + The Python ProtocolBuffer classes specify both the 'Python' datatype and the + 'C++' datatype - and they're not the same. This helper method should + translate from one to another. + + Args: + proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) + Returns: + int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. + Raises: + TypeTransformationError: when the Python proto type isn't known. + """ + try: + return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] + except KeyError: + raise TypeTransformationError('Unknown proto_type: %s' % proto_type) + + +class EnumDescriptor(_NestedDescriptorBase): + + """Descriptor for an enum defined in a .proto file. + + Attributes: + name (str): Name of the enum type. + full_name (str): Full name of the type, including package name + and any enclosing type(s). + + values (list[EnumValueDescriptor]): List of the values + in this enum. + values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "name" field of each EnumValueDescriptor. + values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, + but indexed by the "number" field of each EnumValueDescriptor. + containing_type (Descriptor): Descriptor of the immediate containing + type of this enum, or None if this is an enum defined at the + top level in a .proto file. Set by Descriptor's constructor + if we're passed into one. + file (FileDescriptor): Reference to file descriptor. + options (descriptor_pb2.EnumOptions): Enum options message or + None to use default enum options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumDescriptor + + def __new__(cls, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindEnumTypeByName(full_name) + + def __init__(self, name, full_name, filename, values, + containing_type=None, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + """Arguments are as described in the attribute description above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + if create_key is not _internal_create_key: + _Deprecated('EnumDescriptor') + + super(EnumDescriptor, self).__init__( + options, 'EnumOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + + self.values = values + for value in self.values: + value.type = self + self.values_by_name = dict((v.name, v) for v in values) + # Values are reversed to ensure that the first alias is retained. + self.values_by_number = dict((v.number, v) for v in reversed(values)) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.EnumDescriptorProto. + + Args: + proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(EnumDescriptor, self).CopyToProto(proto) + + +class EnumValueDescriptor(DescriptorBase): + + """Descriptor for a single value within an enum. + + Attributes: + name (str): Name of this value. + index (int): Dense, 0-indexed index giving the order that this + value appears textually within its enum in the .proto file. + number (int): Actual number assigned to this enum value. + type (EnumDescriptor): :class:`EnumDescriptor` to which this value + belongs. Set by :class:`EnumDescriptor`'s constructor if we're + passed into one. + options (descriptor_pb2.EnumValueOptions): Enum value options message or + None to use default enum value options options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor + + def __new__(cls, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + # There is no way we can build a complete EnumValueDescriptor with the + # given parameters (the name of the Enum is not known, for example). + # Fortunately generated files just pass it to the EnumDescriptor() + # constructor, which will ignore it, so returning None is good enough. + return None + + def __init__(self, name, index, number, + type=None, # pylint: disable=redefined-builtin + options=None, serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('EnumValueDescriptor') + + super(EnumValueDescriptor, self).__init__( + options, serialized_options, 'EnumValueOptions') + self.name = name + self.index = index + self.number = number + self.type = type + + +class OneofDescriptor(DescriptorBase): + """Descriptor for a oneof field. + + Attributes: + name (str): Name of the oneof field. + full_name (str): Full name of the oneof field, including package name. + index (int): 0-based index giving the order of the oneof field inside + its containing type. + containing_type (Descriptor): :class:`Descriptor` of the protocol message + type that contains this field. Set by the :class:`Descriptor` constructor + if we're passed into one. + fields (list[FieldDescriptor]): The list of field descriptors this + oneof can contain. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.OneofDescriptor + + def __new__( + cls, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindOneofByName(full_name) + + def __init__( + self, name, full_name, index, containing_type, fields, options=None, + serialized_options=None, create_key=None): + """Arguments are as described in the attribute description above.""" + if create_key is not _internal_create_key: + _Deprecated('OneofDescriptor') + + super(OneofDescriptor, self).__init__( + options, serialized_options, 'OneofOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_type = containing_type + self.fields = fields + + +class ServiceDescriptor(_NestedDescriptorBase): + + """Descriptor for a service. + + Attributes: + name (str): Name of the service. + full_name (str): Full name of the service, including package name. + index (int): 0-indexed index giving the order that this services + definition appears within the .proto file. + methods (list[MethodDescriptor]): List of methods provided by this + service. + methods_by_name (dict(str, MethodDescriptor)): Same + :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but + indexed by "name" attribute in each :class:`MethodDescriptor`. + options (descriptor_pb2.ServiceOptions): Service options message or + None to use default service options. + file (FileDescriptor): Reference to file info. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor + + def __new__( + cls, + name=None, + full_name=None, + index=None, + methods=None, + options=None, + serialized_options=None, + file=None, # pylint: disable=redefined-builtin + serialized_start=None, + serialized_end=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindServiceByName(full_name) + + def __init__(self, name, full_name, index, methods, options=None, + serialized_options=None, file=None, # pylint: disable=redefined-builtin + serialized_start=None, serialized_end=None, create_key=None): + if create_key is not _internal_create_key: + _Deprecated('ServiceDescriptor') + + super(ServiceDescriptor, self).__init__( + options, 'ServiceOptions', name, full_name, file, + None, serialized_start=serialized_start, + serialized_end=serialized_end, serialized_options=serialized_options) + self.index = index + self.methods = methods + self.methods_by_name = dict((m.name, m) for m in methods) + # Set the containing service for each method in this service. + for method in self.methods: + method.containing_service = self + + def FindMethodByName(self, name): + """Searches for the specified method, and returns its descriptor. + + Args: + name (str): Name of the method. + Returns: + MethodDescriptor or None: the descriptor for the requested method, if + found. + """ + return self.methods_by_name.get(name, None) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.ServiceDescriptorProto. + + Args: + proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. + """ + # This function is overridden to give a better doc comment. + super(ServiceDescriptor, self).CopyToProto(proto) + + +class MethodDescriptor(DescriptorBase): + + """Descriptor for a method in a service. + + Attributes: + name (str): Name of the method within the service. + full_name (str): Full name of method. + index (int): 0-indexed index of the method inside the service. + containing_service (ServiceDescriptor): The service that contains this + method. + input_type (Descriptor): The descriptor of the message that this method + accepts. + output_type (Descriptor): The descriptor of the message that this method + returns. + client_streaming (bool): Whether this method uses client streaming. + server_streaming (bool): Whether this method uses server streaming. + options (descriptor_pb2.MethodOptions or None): Method options message, or + None to use default method options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.MethodDescriptor + + def __new__(cls, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access + return _message.default_pool.FindMethodByName(full_name) + + def __init__(self, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=False, + server_streaming=False, + options=None, + serialized_options=None, + create_key=None): + """The arguments are as described in the description of MethodDescriptor + attributes above. + + Note that containing_service may be None, and may be set later if necessary. + """ + if create_key is not _internal_create_key: + _Deprecated('MethodDescriptor') + + super(MethodDescriptor, self).__init__( + options, serialized_options, 'MethodOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_service = containing_service + self.input_type = input_type + self.output_type = output_type + self.client_streaming = client_streaming + self.server_streaming = server_streaming + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.MethodDescriptorProto. + + Args: + proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. + + Raises: + Error: If self couldn't be serialized, due to too few constructor + arguments. + """ + if self.containing_service is not None: + from google.protobuf import descriptor_pb2 + service_proto = descriptor_pb2.ServiceDescriptorProto() + self.containing_service.CopyToProto(service_proto) + proto.CopyFrom(service_proto.method[self.index]) + else: + raise Error('Descriptor does not contain a service.') + + +class FileDescriptor(DescriptorBase): + """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. + + Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and + :attr:`dependencies` fields are only set by the + :py:mod:`google.protobuf.message_factory` module, and not by the generated + proto code. + + Attributes: + name (str): Name of file, relative to root of source tree. + package (str): Name of the package + syntax (str): string indicating syntax of the file (can be "proto2" or + "proto3") + serialized_pb (bytes): Byte string of serialized + :class:`descriptor_pb2.FileDescriptorProto`. + dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` + objects this :class:`FileDescriptor` depends on. + public_dependencies (list[FileDescriptor]): A subset of + :attr:`dependencies`, which were declared as "public". + message_types_by_name (dict(str, Descriptor)): Mapping from message names + to their :class:`Descriptor`. + enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to + their :class:`EnumDescriptor`. + extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension + names declared at file scope to their :class:`FieldDescriptor`. + services_by_name (dict(str, ServiceDescriptor)): Mapping from services' + names to their :class:`ServiceDescriptor`. + pool (DescriptorPool): The pool this descriptor belongs to. When not + passed to the constructor, the global default pool is used. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FileDescriptor + + def __new__(cls, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + # FileDescriptor() is called from various places, not only from generated + # files, to register dynamic proto files and messages. + # pylint: disable=g-explicit-bool-comparison + if serialized_pb == b'': + # Cpp generated code must be linked in if serialized_pb is '' + try: + return _message.default_pool.FindFileByName(name) + except KeyError: + raise RuntimeError('Please link in cpp generated lib for %s' % (name)) + elif serialized_pb: + return _message.default_pool.AddSerializedFile(serialized_pb) + else: + return super(FileDescriptor, cls).__new__(cls) + + def __init__(self, name, package, options=None, + serialized_options=None, serialized_pb=None, + dependencies=None, public_dependencies=None, + syntax=None, pool=None, create_key=None): + """Constructor.""" + if create_key is not _internal_create_key: + _Deprecated('FileDescriptor') + + super(FileDescriptor, self).__init__( + options, serialized_options, 'FileOptions') + + if pool is None: + from google.protobuf import descriptor_pool + pool = descriptor_pool.Default() + self.pool = pool + self.message_types_by_name = {} + self.name = name + self.package = package + self.syntax = syntax or "proto2" + self.serialized_pb = serialized_pb + + self.enum_types_by_name = {} + self.extensions_by_name = {} + self.services_by_name = {} + self.dependencies = (dependencies or []) + self.public_dependencies = (public_dependencies or []) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.FileDescriptorProto. + + Args: + proto: An empty descriptor_pb2.FileDescriptorProto. + """ + proto.ParseFromString(self.serialized_pb) + + +def _ParseOptions(message, string): + """Parses serialized options. + + This helper function is used to parse serialized options in generated + proto2 files. It must not be used outside proto2. + """ + message.ParseFromString(string) + return message + + +def _ToCamelCase(name): + """Converts name to camel-case and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + if result: + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + # Lower-case the first letter. + if result and result[0].isupper(): + result[0] = result[0].lower() + return ''.join(result) + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _ToJsonName(name): + """Converts name to Json name and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + return ''.join(result) + + +def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, + syntax=None): + """Make a protobuf Descriptor given a DescriptorProto protobuf. + + Handles nested descriptors. Note that this is limited to the scope of defining + a message inside of another message. Composite fields can currently only be + resolved if the message is defined in the same scope as the field. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: Optional package name for the new message Descriptor (string). + build_file_if_cpp: Update the C++ descriptor pool if api matches. + Set to False on recursion, so no duplicates are created. + syntax: The syntax/semantics that should be used. Set to "proto3" to get + proto3 field presence semantics. + Returns: + A Descriptor for protobuf messages. + """ + if api_implementation.Type() == 'cpp' and build_file_if_cpp: + # The C++ implementation requires all descriptors to be backed by the same + # definition in the C++ descriptor pool. To do this, we build a + # FileDescriptorProto with the same definition as this descriptor and build + # it into the pool. + from google.protobuf import descriptor_pb2 + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.message_type.add().MergeFrom(desc_proto) + + # Generate a random name for this proto file to prevent conflicts with any + # imported ones. We need to specify a file name so the descriptor pool + # accepts our FileDescriptorProto, but it is not important what that file + # name is actually set to. + proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') + + if package: + file_descriptor_proto.name = os.path.join(package.replace('.', '/'), + proto_name + '.proto') + file_descriptor_proto.package = package + else: + file_descriptor_proto.name = proto_name + '.proto' + + _message.default_pool.Add(file_descriptor_proto) + result = _message.default_pool.FindFileByName(file_descriptor_proto.name) + + if _USE_C_DESCRIPTORS: + return result.message_types_by_name[desc_proto.name] + + full_message_name = [desc_proto.name] + if package: full_message_name.insert(0, package) + + # Create Descriptors for enum types + enum_types = {} + for enum_proto in desc_proto.enum_type: + full_name = '.'.join(full_message_name + [enum_proto.name]) + enum_desc = EnumDescriptor( + enum_proto.name, full_name, None, [ + EnumValueDescriptor(enum_val.name, ii, enum_val.number, + create_key=_internal_create_key) + for ii, enum_val in enumerate(enum_proto.value)], + create_key=_internal_create_key) + enum_types[full_name] = enum_desc + + # Create Descriptors for nested types + nested_types = {} + for nested_proto in desc_proto.nested_type: + full_name = '.'.join(full_message_name + [nested_proto.name]) + # Nested types are just those defined inside of the message, not all types + # used by fields in the message, so no loops are possible here. + nested_desc = MakeDescriptor(nested_proto, + package='.'.join(full_message_name), + build_file_if_cpp=False, + syntax=syntax) + nested_types[full_name] = nested_desc + + fields = [] + for field_proto in desc_proto.field: + full_name = '.'.join(full_message_name + [field_proto.name]) + enum_desc = None + nested_desc = None + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + if field_proto.HasField('type_name'): + type_name = field_proto.type_name + full_type_name = '.'.join(full_message_name + + [type_name[type_name.rfind('.')+1:]]) + if full_type_name in nested_types: + nested_desc = nested_types[full_type_name] + elif full_type_name in enum_types: + enum_desc = enum_types[full_type_name] + # Else type_name references a non-local type, which isn't implemented + field = FieldDescriptor( + field_proto.name, full_name, field_proto.number - 1, + field_proto.number, field_proto.type, + FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), + field_proto.label, None, nested_desc, enum_desc, None, False, None, + options=_OptionsOrNone(field_proto), has_default_value=False, + json_name=json_name, create_key=_internal_create_key) + fields.append(field) + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, + list(nested_types.values()), list(enum_types.values()), [], + options=_OptionsOrNone(desc_proto), + create_key=_internal_create_key) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py new file mode 100644 index 0000000000..073eddc711 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_database.py @@ -0,0 +1,177 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a container for DescriptorProtos.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import warnings + + +class Error(Exception): + pass + + +class DescriptorDatabaseConflictingDefinitionError(Error): + """Raised when a proto is added with the same name & different descriptor.""" + + +class DescriptorDatabase(object): + """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" + + def __init__(self): + self._file_desc_protos_by_file = {} + self._file_desc_protos_by_symbol = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this database. + + Args: + file_desc_proto: The FileDescriptorProto to add. + Raises: + DescriptorDatabaseConflictingDefinitionError: if an attempt is made to + add a proto with the same name but different definition than an + existing proto in the database. + """ + proto_name = file_desc_proto.name + if proto_name not in self._file_desc_protos_by_file: + self._file_desc_protos_by_file[proto_name] = file_desc_proto + elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: + raise DescriptorDatabaseConflictingDefinitionError( + '%s already added, but with different descriptor.' % proto_name) + else: + return + + # Add all the top-level descriptors to the index. + package = file_desc_proto.package + for message in file_desc_proto.message_type: + for name in _ExtractSymbols(message, package): + self._AddSymbol(name, file_desc_proto) + for enum in file_desc_proto.enum_type: + self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) + for enum_value in enum.value: + self._file_desc_protos_by_symbol[ + '.'.join((package, enum_value.name))] = file_desc_proto + for extension in file_desc_proto.extension: + self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) + for service in file_desc_proto.service: + self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) + + def FindFileByName(self, name): + """Finds the file descriptor proto by file name. + + Typically the file name is a relative path ending to a .proto file. The + proto with the given name will have to have been added to this database + using the Add method or else an error will be raised. + + Args: + name: The file name to find. + + Returns: + The file descriptor proto matching the name. + + Raises: + KeyError if no file by the given name was added. + """ + + return self._file_desc_protos_by_file[name] + + def FindFileContainingSymbol(self, symbol): + """Finds the file descriptor proto containing the specified symbol. + + The symbol should be a fully qualified name including the file descriptor's + package and any containing messages. Some examples: + + 'some.package.name.Message' + 'some.package.name.Message.NestedEnum' + 'some.package.name.Message.some_field' + + The file descriptor proto containing the specified symbol must be added to + this database using the Add method or else an error will be raised. + + Args: + symbol: The fully qualified symbol name. + + Returns: + The file descriptor proto containing the symbol. + + Raises: + KeyError if no file contains the specified symbol. + """ + try: + return self._file_desc_protos_by_symbol[symbol] + except KeyError: + # Fields, enum values, and nested extensions are not in + # _file_desc_protos_by_symbol. Try to find the top level + # descriptor. Non-existent nested symbol under a valid top level + # descriptor can also be found. The behavior is the same with + # protobuf C++. + top_level, _, _ = symbol.rpartition('.') + try: + return self._file_desc_protos_by_symbol[top_level] + except KeyError: + # Raise the original symbol as a KeyError for better diagnostics. + raise KeyError(symbol) + + def FindFileContainingExtension(self, extendee_name, extension_number): + # TODO(jieluo): implement this API. + return None + + def FindAllExtensionNumbers(self, extendee_name): + # TODO(jieluo): implement this API. + return [] + + def _AddSymbol(self, name, file_desc_proto): + if name in self._file_desc_protos_by_symbol: + warn_msg = ('Conflict register for file "' + file_desc_proto.name + + '": ' + name + + ' is already defined in file "' + + self._file_desc_protos_by_symbol[name].name + '"') + warnings.warn(warn_msg, RuntimeWarning) + self._file_desc_protos_by_symbol[name] = file_desc_proto + + +def _ExtractSymbols(desc_proto, package): + """Pulls out all the symbols from a descriptor proto. + + Args: + desc_proto: The proto to extract symbols from. + package: The package containing the descriptor type. + + Yields: + The fully qualified name found in the descriptor. + """ + message_name = package + '.' + desc_proto.name if package else desc_proto.name + yield message_name + for nested_type in desc_proto.nested_type: + for symbol in _ExtractSymbols(nested_type, message_name): + yield symbol + for enum_type in desc_proto.enum_type: + yield '.'.join((message_name, enum_type.name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py new file mode 100644 index 0000000000..f570386432 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pb2.py @@ -0,0 +1,1925 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/descriptor.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/descriptor.proto', + package='google.protobuf', + syntax='proto2', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' + ) +else: + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') + +if _descriptor._USE_C_DESCRIPTORS == False: + _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.protobuf.FieldDescriptorProto.Type', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=3, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=4, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=5, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=6, number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=7, number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=8, number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=9, number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=10, number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=11, number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=12, number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=13, number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=14, number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=15, number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=16, number=17, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=17, number=18, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) + + _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.protobuf.FieldDescriptorProto.Label', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LABEL_OPTIONAL', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REQUIRED', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LABEL_REPEATED', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) + + _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( + name='OptimizeMode', + full_name='google.protobuf.FileOptions.OptimizeMode', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEED', index=0, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CODE_SIZE', index=1, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='LITE_RUNTIME', index=2, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) + + _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( + name='CType', + full_name='google.protobuf.FieldOptions.CType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CORD', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STRING_PIECE', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) + + _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( + name='JSType', + full_name='google.protobuf.FieldOptions.JSType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='JS_NORMAL', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_STRING', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JS_NUMBER', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) + + _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( + name='IdempotencyLevel', + full_name='google.protobuf.MethodOptions.IdempotencyLevel', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='IDEMPOTENCY_UNKNOWN', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NO_SIDE_EFFECTS', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='IDEMPOTENT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + ) + _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) + + + _FILEDESCRIPTORSET = _descriptor.Descriptor( + name='FileDescriptorSet', + full_name='google.protobuf.FileDescriptorSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEDESCRIPTORPROTO = _descriptor.Descriptor( + name='FileDescriptorProto', + full_name='google.protobuf.FileDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, + number=10, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, + number=11, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( + name='ExtensionRange', + full_name='google.protobuf.DescriptorProto.ExtensionRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( + name='ReservedRange', + full_name='google.protobuf.DescriptorProto.ReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _DESCRIPTORPROTO = _descriptor.Descriptor( + name='DescriptorProto', + full_name='google.protobuf.DescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.DescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='field', full_name='google.protobuf.DescriptorProto.field', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.options', index=7, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, + number=10, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( + name='ExtensionRangeOptions', + full_name='google.protobuf.ExtensionRangeOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( + name='FieldDescriptorProto', + full_name='google.protobuf.FieldDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDDESCRIPTORPROTO_TYPE, + _FIELDDESCRIPTORPROTO_LABEL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( + name='OneofDescriptorProto', + full_name='google.protobuf.OneofDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( + name='EnumReservedRange', + full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumDescriptorProto', + full_name='google.protobuf.EnumDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumValueDescriptorProto', + full_name='google.protobuf.EnumValueDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( + name='ServiceDescriptorProto', + full_name='google.protobuf.ServiceDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _METHODDESCRIPTORPROTO = _descriptor.Descriptor( + name='MethodDescriptorProto', + full_name='google.protobuf.MethodDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _FILEOPTIONS = _descriptor.Descriptor( + name='FileOptions', + full_name='google.protobuf.FileOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, + number=27, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, + number=9, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, + number=42, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, + number=31, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, + number=36, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, + number=37, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, + number=39, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, + number=40, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, + number=41, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, + number=44, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, + number=45, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEOPTIONS_OPTIMIZEMODE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _MESSAGEOPTIONS = _descriptor.Descriptor( + name='MessageOptions', + full_name='google.protobuf.MessageOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _FIELDOPTIONS = _descriptor.Descriptor( + name='FieldOptions', + full_name='google.protobuf.FieldOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, + number=15, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDOPTIONS_CTYPE, + _FIELDOPTIONS_JSTYPE, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ONEOFOPTIONS = _descriptor.Descriptor( + name='OneofOptions', + full_name='google.protobuf.OneofOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMOPTIONS = _descriptor.Descriptor( + name='EnumOptions', + full_name='google.protobuf.EnumOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _ENUMVALUEOPTIONS = _descriptor.Descriptor( + name='EnumValueOptions', + full_name='google.protobuf.EnumValueOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _SERVICEOPTIONS = _descriptor.Descriptor( + name='ServiceOptions', + full_name='google.protobuf.ServiceOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _METHODOPTIONS = _descriptor.Descriptor( + name='MethodOptions', + full_name='google.protobuf.MethodOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, + number=34, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _METHODOPTIONS_IDEMPOTENCYLEVEL, + ], + serialized_options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + ) + + + _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( + name='NamePart', + full_name='google.protobuf.UninterpretedOption.NamePart', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, + number=2, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _UNINTERPRETEDOPTION = _descriptor.Descriptor( + name='UninterpretedOption', + full_name='google.protobuf.UninterpretedOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='google.protobuf.SourceCodeInfo.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _SOURCECODEINFO = _descriptor.Descriptor( + name='SourceCodeInfo', + full_name='google.protobuf.SourceCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_SOURCECODEINFO_LOCATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + + _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( + name='Annotation', + full_name='google.protobuf.GeneratedCodeInfo.Annotation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _GENERATEDCODEINFO = _descriptor.Descriptor( + name='GeneratedCodeInfo', + full_name='google.protobuf.GeneratedCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + ) + + _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS + _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO + _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS + _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE + _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO + _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS + _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE + _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL + _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE + _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS + _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO + _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO + _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO + _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS + _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE + _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS + _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO + _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS + _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS + _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE + _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS + _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE + _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE + _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS + _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS + _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL + _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION + _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS + _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION + _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART + _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO + _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION + _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO + _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION + DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET + DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO + DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS + DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS + DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS + DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS + DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS + DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS + DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS + DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS + DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION + DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO + DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + +else: + _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _FILEDESCRIPTORSET._serialized_start=53 + _FILEDESCRIPTORSET._serialized_end=124 + _FILEDESCRIPTORPROTO._serialized_start=127 + _FILEDESCRIPTORPROTO._serialized_end=602 + _DESCRIPTORPROTO._serialized_start=605 + _DESCRIPTORPROTO._serialized_end=1286 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 + _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 + _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 + _EXTENSIONRANGEOPTIONS._serialized_start=1288 + _EXTENSIONRANGEOPTIONS._serialized_end=1391 + _FIELDDESCRIPTORPROTO._serialized_start=1394 + _FIELDDESCRIPTORPROTO._serialized_end=2119 + _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 + _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 + _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 + _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 + _ONEOFDESCRIPTORPROTO._serialized_start=2121 + _ONEOFDESCRIPTORPROTO._serialized_end=2205 + _ENUMDESCRIPTORPROTO._serialized_start=2208 + _ENUMDESCRIPTORPROTO._serialized_end=2500 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 + _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 + _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 + _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 + _SERVICEDESCRIPTORPROTO._serialized_start=2613 + _SERVICEDESCRIPTORPROTO._serialized_end=2757 + _METHODDESCRIPTORPROTO._serialized_start=2760 + _METHODDESCRIPTORPROTO._serialized_end=2953 + _FILEOPTIONS._serialized_start=2956 + _FILEOPTIONS._serialized_end=3761 + _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 + _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 + _MESSAGEOPTIONS._serialized_start=3764 + _MESSAGEOPTIONS._serialized_end=4024 + _FIELDOPTIONS._serialized_start=4027 + _FIELDOPTIONS._serialized_end=4473 + _FIELDOPTIONS_CTYPE._serialized_start=4354 + _FIELDOPTIONS_CTYPE._serialized_end=4401 + _FIELDOPTIONS_JSTYPE._serialized_start=4403 + _FIELDOPTIONS_JSTYPE._serialized_end=4456 + _ONEOFOPTIONS._serialized_start=4475 + _ONEOFOPTIONS._serialized_end=4569 + _ENUMOPTIONS._serialized_start=4572 + _ENUMOPTIONS._serialized_end=4719 + _ENUMVALUEOPTIONS._serialized_start=4721 + _ENUMVALUEOPTIONS._serialized_end=4846 + _SERVICEOPTIONS._serialized_start=4848 + _SERVICEOPTIONS._serialized_end=4971 + _METHODOPTIONS._serialized_start=4974 + _METHODOPTIONS._serialized_end=5275 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 + _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 + _UNINTERPRETEDOPTION._serialized_start=5278 + _UNINTERPRETEDOPTION._serialized_end=5564 + _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 + _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 + _SOURCECODEINFO._serialized_start=5567 + _SOURCECODEINFO._serialized_end=5780 + _SOURCECODEINFO_LOCATION._serialized_start=5646 + _SOURCECODEINFO_LOCATION._serialized_end=5780 + _GENERATEDCODEINFO._serialized_start=5783 + _GENERATEDCODEINFO._serialized_end=5950 + _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 + _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py new file mode 100644 index 0000000000..911372a8b0 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/descriptor_pool.py @@ -0,0 +1,1295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides DescriptorPool to use as a container for proto2 descriptors. + +The DescriptorPool is used in conjection with a DescriptorDatabase to maintain +a collection of protocol buffer descriptors for use when dynamically creating +message types at runtime. + +For most applications protocol buffers should be used via modules generated by +the protocol buffer compiler tool. This should only be used when the type of +protocol buffers used in an application or library cannot be predetermined. + +Below is a straightforward example on how to use this class:: + + pool = DescriptorPool() + file_descriptor_protos = [ ... ] + for file_descriptor_proto in file_descriptor_protos: + pool.Add(file_descriptor_proto) + my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') + +The message descriptor can be used in conjunction with the message_factory +module in order to create a protocol buffer class that can be encoded and +decoded. + +If you want to get a Python class for the specified proto, use the +helper functions inside google.protobuf.message_factory +directly instead of this class. +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import collections +import warnings + +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import text_encoding + + +_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access + + +def _Deprecated(func): + """Mark functions as deprecated.""" + + def NewFunc(*args, **kwargs): + warnings.warn( + 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' + 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' + 'instead.' % func.__name__, + category=DeprecationWarning) + return func(*args, **kwargs) + NewFunc.__name__ = func.__name__ + NewFunc.__doc__ = func.__doc__ + NewFunc.__dict__.update(func.__dict__) + return NewFunc + + +def _NormalizeFullyQualifiedName(name): + """Remove leading period from fully-qualified type name. + + Due to b/13860351 in descriptor_database.py, types in the root namespace are + generated with a leading period. This function removes that prefix. + + Args: + name (str): The fully-qualified symbol name. + + Returns: + str: The normalized fully-qualified symbol name. + """ + return name.lstrip('.') + + +def _OptionsOrNone(descriptor_proto): + """Returns the value of the field `options`, or None if it is not set.""" + if descriptor_proto.HasField('options'): + return descriptor_proto.options + else: + return None + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) + + +class DescriptorPool(object): + """A collection of protobufs dynamically constructed by descriptor protos.""" + + if _USE_C_DESCRIPTORS: + + def __new__(cls, descriptor_db=None): + # pylint: disable=protected-access + return descriptor._message.DescriptorPool(descriptor_db) + + def __init__(self, descriptor_db=None): + """Initializes a Pool of proto buffs. + + The descriptor_db argument to the constructor is provided to allow + specialized file descriptor proto lookup code to be triggered on demand. An + example would be an implementation which will read and compile a file + specified in a call to FindFileByName() and not require the call to Add() + at all. Results from this database will be cached internally here as well. + + Args: + descriptor_db: A secondary source of file descriptors. + """ + + self._internal_db = descriptor_database.DescriptorDatabase() + self._descriptor_db = descriptor_db + self._descriptors = {} + self._enum_descriptors = {} + self._service_descriptors = {} + self._file_descriptors = {} + self._toplevel_extensions = {} + # TODO(jieluo): Remove _file_desc_by_toplevel_extension after + # maybe year 2020 for compatibility issue (with 3.4.1 only). + self._file_desc_by_toplevel_extension = {} + self._top_enum_values = {} + # We store extensions in two two-level mappings: The first key is the + # descriptor of the message being extended, the second key is the extension + # full name or its tag number. + self._extensions_by_name = collections.defaultdict(dict) + self._extensions_by_number = collections.defaultdict(dict) + + def _CheckConflictRegister(self, desc, desc_name, file_name): + """Check if the descriptor name conflicts with another of the same name. + + Args: + desc: Descriptor of a message, enum, service, extension or enum value. + desc_name (str): the full name of desc. + file_name (str): The file name of descriptor. + """ + for register, descriptor_type in [ + (self._descriptors, descriptor.Descriptor), + (self._enum_descriptors, descriptor.EnumDescriptor), + (self._service_descriptors, descriptor.ServiceDescriptor), + (self._toplevel_extensions, descriptor.FieldDescriptor), + (self._top_enum_values, descriptor.EnumValueDescriptor)]: + if desc_name in register: + old_desc = register[desc_name] + if isinstance(old_desc, descriptor.EnumValueDescriptor): + old_file = old_desc.type.file.name + else: + old_file = old_desc.file.name + + if not isinstance(desc, descriptor_type) or ( + old_file != file_name): + error_msg = ('Conflict register for file "' + file_name + + '": ' + desc_name + + ' is already defined in file "' + + old_file + '". Please fix the conflict by adding ' + 'package name on the proto file, or use different ' + 'name for the duplication.') + if isinstance(desc, descriptor.EnumValueDescriptor): + error_msg += ('\nNote: enum values appear as ' + 'siblings of the enum type instead of ' + 'children of it.') + + raise TypeError(error_msg) + + return + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + file_desc_proto (FileDescriptorProto): The file descriptor to add. + """ + + self._internal_db.Add(file_desc_proto) + + def AddSerializedFile(self, serialized_file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + serialized_file_desc_proto (bytes): A bytes string, serialization of the + :class:`FileDescriptorProto` to add. + + Returns: + FileDescriptor: Descriptor for the added file. + """ + + # pylint: disable=g-import-not-at-top + from google.protobuf import descriptor_pb2 + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + serialized_file_desc_proto) + file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) + file_desc.serialized_pb = serialized_file_desc_proto + return file_desc + + # Add Descriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddDescriptor(self, desc): + self._AddDescriptor(desc) + + # Never call this method. It is for internal usage only. + def _AddDescriptor(self, desc): + """Adds a Descriptor to the pool, non-recursively. + + If the Descriptor contains nested messages or enums, the caller must + explicitly register them. This method also registers the FileDescriptor + associated with the message. + + Args: + desc: A Descriptor. + """ + if not isinstance(desc, descriptor.Descriptor): + raise TypeError('Expected instance of descriptor.Descriptor.') + + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + + self._descriptors[desc.full_name] = desc + self._AddFileDescriptor(desc.file) + + # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddEnumDescriptor(self, enum_desc): + self._AddEnumDescriptor(enum_desc) + + # Never call this method. It is for internal usage only. + def _AddEnumDescriptor(self, enum_desc): + """Adds an EnumDescriptor to the pool. + + This method also registers the FileDescriptor associated with the enum. + + Args: + enum_desc: An EnumDescriptor. + """ + + if not isinstance(enum_desc, descriptor.EnumDescriptor): + raise TypeError('Expected instance of descriptor.EnumDescriptor.') + + file_name = enum_desc.file.name + self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) + self._enum_descriptors[enum_desc.full_name] = enum_desc + + # Top enum values need to be indexed. + # Count the number of dots to see whether the enum is toplevel or nested + # in a message. We cannot use enum_desc.containing_type at this stage. + if enum_desc.file.package: + top_level = (enum_desc.full_name.count('.') + - enum_desc.file.package.count('.') == 1) + else: + top_level = enum_desc.full_name.count('.') == 0 + if top_level: + file_name = enum_desc.file.name + package = enum_desc.file.package + for enum_value in enum_desc.values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, enum_value.name))) + self._CheckConflictRegister(enum_value, full_name, file_name) + self._top_enum_values[full_name] = enum_value + self._AddFileDescriptor(enum_desc.file) + + # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddServiceDescriptor(self, service_desc): + self._AddServiceDescriptor(service_desc) + + # Never call this method. It is for internal usage only. + def _AddServiceDescriptor(self, service_desc): + """Adds a ServiceDescriptor to the pool. + + Args: + service_desc: A ServiceDescriptor. + """ + + if not isinstance(service_desc, descriptor.ServiceDescriptor): + raise TypeError('Expected instance of descriptor.ServiceDescriptor.') + + self._CheckConflictRegister(service_desc, service_desc.full_name, + service_desc.file.name) + self._service_descriptors[service_desc.full_name] = service_desc + + # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() + # or AddSerializedFile() to add a FileDescriptorProto instead. + @_Deprecated + def AddExtensionDescriptor(self, extension): + self._AddExtensionDescriptor(extension) + + # Never call this method. It is for internal usage only. + def _AddExtensionDescriptor(self, extension): + """Adds a FieldDescriptor describing an extension to the pool. + + Args: + extension: A FieldDescriptor. + + Raises: + AssertionError: when another extension with the same number extends the + same message. + TypeError: when the specified extension is not a + descriptor.FieldDescriptor. + """ + if not (isinstance(extension, descriptor.FieldDescriptor) and + extension.is_extension): + raise TypeError('Expected an extension descriptor.') + + if extension.extension_scope is None: + self._toplevel_extensions[extension.full_name] = extension + + try: + existing_desc = self._extensions_by_number[ + extension.containing_type][extension.number] + except KeyError: + pass + else: + if extension is not existing_desc: + raise AssertionError( + 'Extensions "%s" and "%s" both try to extend message type "%s" ' + 'with field number %d.' % + (extension.full_name, existing_desc.full_name, + extension.containing_type.full_name, extension.number)) + + self._extensions_by_number[extension.containing_type][ + extension.number] = extension + self._extensions_by_name[extension.containing_type][ + extension.full_name] = extension + + # Also register MessageSet extensions with the type name. + if _IsMessageSetExtension(extension): + self._extensions_by_name[extension.containing_type][ + extension.message_type.full_name] = extension + + @_Deprecated + def AddFileDescriptor(self, file_desc): + self._InternalAddFileDescriptor(file_desc) + + # Never call this method. It is for internal usage only. + def _InternalAddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + self._AddFileDescriptor(file_desc) + # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. + # FieldDescriptor.file is added in code gen. Remove this solution after + # maybe 2020 for compatibility reason (with 3.4.1 only). + for extension in file_desc.extensions_by_name.values(): + self._file_desc_by_toplevel_extension[ + extension.full_name] = file_desc + + def _AddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + if not isinstance(file_desc, descriptor.FileDescriptor): + raise TypeError('Expected instance of descriptor.FileDescriptor.') + self._file_descriptors[file_desc.name] = file_desc + + def FindFileByName(self, file_name): + """Gets a FileDescriptor by file name. + + Args: + file_name (str): The path to the file to get a descriptor for. + + Returns: + FileDescriptor: The descriptor for the named file. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + try: + return self._file_descriptors[file_name] + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileByName(file_name) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file named %s' % file_name) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindFileContainingSymbol(self, symbol): + """Gets the FileDescriptor for the file containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + + symbol = _NormalizeFullyQualifiedName(symbol) + try: + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + pass + + try: + # Try fallback database. Build and find again if possible. + self._FindFileContainingSymbolInDb(symbol) + return self._InternalFindFileContainingSymbol(symbol) + except KeyError: + raise KeyError('Cannot find a file containing %s' % symbol) + + def _InternalFindFileContainingSymbol(self, symbol): + """Gets the already built FileDescriptor containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: Descriptor for the file that contains the specified + symbol. + + Raises: + KeyError: if the file cannot be found in the pool. + """ + try: + return self._descriptors[symbol].file + except KeyError: + pass + + try: + return self._enum_descriptors[symbol].file + except KeyError: + pass + + try: + return self._service_descriptors[symbol].file + except KeyError: + pass + + try: + return self._top_enum_values[symbol].type.file + except KeyError: + pass + + try: + return self._file_desc_by_toplevel_extension[symbol] + except KeyError: + pass + + # Try fields, enum values and nested extensions inside a message. + top_name, _, sub_name = symbol.rpartition('.') + try: + message = self.FindMessageTypeByName(top_name) + assert (sub_name in message.extensions_by_name or + sub_name in message.fields_by_name or + sub_name in message.enum_values_by_name) + return message.file + except (KeyError, AssertionError): + raise KeyError('Cannot find a file containing %s' % symbol) + + def FindMessageTypeByName(self, full_name): + """Loads the named descriptor from the pool. + + Args: + full_name (str): The full name of the descriptor to load. + + Returns: + Descriptor: The descriptor for the named type. + + Raises: + KeyError: if the message cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._descriptors[full_name] + + def FindEnumTypeByName(self, full_name): + """Loads the named enum descriptor from the pool. + + Args: + full_name (str): The full name of the enum descriptor to load. + + Returns: + EnumDescriptor: The enum descriptor for the named type. + + Raises: + KeyError: if the enum cannot be found in the pool. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._enum_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._enum_descriptors[full_name] + + def FindFieldByName(self, full_name): + """Loads the named field descriptor from the pool. + + Args: + full_name (str): The full name of the field descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named field. + + Raises: + KeyError: if the field cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, field_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.fields_by_name[field_name] + + def FindOneofByName(self, full_name): + """Loads the named oneof descriptor from the pool. + + Args: + full_name (str): The full name of the oneof descriptor to load. + + Returns: + OneofDescriptor: The oneof descriptor for the named oneof. + + Raises: + KeyError: if the oneof cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, oneof_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.oneofs_by_name[oneof_name] + + def FindExtensionByName(self, full_name): + """Loads the named extension descriptor from the pool. + + Args: + full_name (str): The full name of the extension descriptor to load. + + Returns: + FieldDescriptor: The field descriptor for the named extension. + + Raises: + KeyError: if the extension cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + try: + # The proto compiler does not give any link between the FileDescriptor + # and top-level extensions unless the FileDescriptorProto is added to + # the DescriptorDatabase, but this can impact memory usage. + # So we registered these extensions by name explicitly. + return self._toplevel_extensions[full_name] + except KeyError: + pass + message_name, _, extension_name = full_name.rpartition('.') + try: + # Most extensions are nested inside a message. + scope = self.FindMessageTypeByName(message_name) + except KeyError: + # Some extensions are defined at file scope. + scope = self._FindFileContainingSymbolInDb(full_name) + return scope.extensions_by_name[extension_name] + + def FindExtensionByNumber(self, message_descriptor, number): + """Gets the extension of the specified message with the specified number. + + Extensions have to be registered to this pool by calling :func:`Add` or + :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): descriptor of the extended message. + number (int): Number of the extension field. + + Returns: + FieldDescriptor: The descriptor for the extension. + + Raises: + KeyError: when no extension with the given number is known for the + specified message. + """ + try: + return self._extensions_by_number[message_descriptor][number] + except KeyError: + self._TryLoadExtensionFromDB(message_descriptor, number) + return self._extensions_by_number[message_descriptor][number] + + def FindAllExtensions(self, message_descriptor): + """Gets all the known extensions of a given message. + + Extensions have to be registered to this pool by build related + :func:`Add` or :func:`AddExtensionDescriptor`. + + Args: + message_descriptor (Descriptor): Descriptor of the extended message. + + Returns: + list[FieldDescriptor]: Field descriptors describing the extensions. + """ + # Fallback to descriptor db if FindAllExtensionNumbers is provided. + if self._descriptor_db and hasattr( + self._descriptor_db, 'FindAllExtensionNumbers'): + full_name = message_descriptor.full_name + all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) + for number in all_numbers: + if number in self._extensions_by_number[message_descriptor]: + continue + self._TryLoadExtensionFromDB(message_descriptor, number) + + return list(self._extensions_by_number[message_descriptor].values()) + + def _TryLoadExtensionFromDB(self, message_descriptor, number): + """Try to Load extensions from descriptor db. + + Args: + message_descriptor: descriptor of the extended message. + number: the extension number that needs to be loaded. + """ + if not self._descriptor_db: + return + # Only supported when FindFileContainingExtension is provided. + if not hasattr( + self._descriptor_db, 'FindFileContainingExtension'): + return + + full_name = message_descriptor.full_name + file_proto = self._descriptor_db.FindFileContainingExtension( + full_name, number) + + if file_proto is None: + return + + try: + self._ConvertFileProtoToFileDescriptor(file_proto) + except: + warn_msg = ('Unable to load proto file %s for extension number %d.' % + (file_proto.name, number)) + warnings.warn(warn_msg, RuntimeWarning) + + def FindServiceByName(self, full_name): + """Loads the named service descriptor from the pool. + + Args: + full_name (str): The full name of the service descriptor to load. + + Returns: + ServiceDescriptor: The service descriptor for the named service. + + Raises: + KeyError: if the service cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._service_descriptors: + self._FindFileContainingSymbolInDb(full_name) + return self._service_descriptors[full_name] + + def FindMethodByName(self, full_name): + """Loads the named service method descriptor from the pool. + + Args: + full_name (str): The full name of the method descriptor to load. + + Returns: + MethodDescriptor: The method descriptor for the service method. + + Raises: + KeyError: if the method cannot be found in the pool. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + service_name, _, method_name = full_name.rpartition('.') + service_descriptor = self.FindServiceByName(service_name) + return service_descriptor.methods_by_name[method_name] + + def _FindFileContainingSymbolInDb(self, symbol): + """Finds the file in descriptor DB containing the specified symbol. + + Args: + symbol (str): The name of the symbol to search for. + + Returns: + FileDescriptor: The file that contains the specified symbol. + + Raises: + KeyError: if the file cannot be found in the descriptor database. + """ + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file containing %s' % symbol) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def _ConvertFileProtoToFileDescriptor(self, file_proto): + """Creates a FileDescriptor from a proto or returns a cached copy. + + This method also has the side effect of loading all the symbols found in + the file into the appropriate dictionaries in the pool. + + Args: + file_proto: The proto to convert. + + Returns: + A FileDescriptor matching the passed in proto. + """ + if file_proto.name not in self._file_descriptors: + built_deps = list(self._GetDeps(file_proto.dependency)) + direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] + public_deps = [direct_deps[i] for i in file_proto.public_dependency] + + file_descriptor = descriptor.FileDescriptor( + pool=self, + name=file_proto.name, + package=file_proto.package, + syntax=file_proto.syntax, + options=_OptionsOrNone(file_proto), + serialized_pb=file_proto.SerializeToString(), + dependencies=direct_deps, + public_dependencies=public_deps, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope = {} + + # This loop extracts all the message and enum types from all the + # dependencies of the file_proto. This is necessary to create the + # scope of available message types when defining the passed in + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( + dependency.message_types_by_name.values())) + scope.update((_PrefixWithDot(enum.full_name), enum) + for enum in dependency.enum_types_by_name.values()) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( + message_type, file_proto.package, file_descriptor, scope, + file_proto.syntax) + file_descriptor.message_types_by_name[message_desc.name] = ( + message_desc) + + for enum_type in file_proto.enum_type: + file_descriptor.enum_types_by_name[enum_type.name] = ( + self._ConvertEnumDescriptor(enum_type, file_proto.package, + file_descriptor, None, scope, True)) + + for index, extension_proto in enumerate(file_proto.extension): + extension_desc = self._MakeFieldDescriptor( + extension_proto, file_proto.package, index, file_descriptor, + is_extension=True) + extension_desc.containing_type = self._GetTypeFromScope( + file_descriptor.package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, + file_descriptor.package, scope) + file_descriptor.extensions_by_name[extension_desc.name] = ( + extension_desc) + self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( + file_descriptor) + + for desc_proto in file_proto.message_type: + self._SetAllFieldTypes(file_proto.package, desc_proto, scope) + + if file_proto.package: + desc_proto_prefix = _PrefixWithDot(file_proto.package) + else: + desc_proto_prefix = '' + + for desc_proto in file_proto.message_type: + desc = self._GetTypeFromScope( + desc_proto_prefix, desc_proto.name, scope) + file_descriptor.message_types_by_name[desc_proto.name] = desc + + for index, service_proto in enumerate(file_proto.service): + file_descriptor.services_by_name[service_proto.name] = ( + self._MakeServiceDescriptor(service_proto, index, scope, + file_proto.package, file_descriptor)) + + self._file_descriptors[file_proto.name] = file_descriptor + + # Add extensions to the pool + file_desc = self._file_descriptors[file_proto.name] + for extension in file_desc.extensions_by_name.values(): + self._AddExtensionDescriptor(extension) + for message_type in file_desc.message_types_by_name.values(): + for extension in message_type.extensions: + self._AddExtensionDescriptor(extension) + + return file_desc + + def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, + scope=None, syntax=None): + """Adds the proto to the pool in the specified package. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: The package the proto should be located in. + file_desc: The file containing this message. + scope: Dict mapping short and full symbols to message and enum types. + syntax: string indicating syntax of the file ("proto2" or "proto3") + + Returns: + The added descriptor. + """ + + if package: + desc_name = '.'.join((package, desc_proto.name)) + else: + desc_name = desc_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + if scope is None: + scope = {} + + nested = [ + self._ConvertMessageDescriptor( + nested, desc_name, file_desc, scope, syntax) + for nested in desc_proto.nested_type] + enums = [ + self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, + scope, False) + for enum in desc_proto.enum_type] + fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) + for index, field in enumerate(desc_proto.field)] + extensions = [ + self._MakeFieldDescriptor(extension, desc_name, index, file_desc, + is_extension=True) + for index, extension in enumerate(desc_proto.extension)] + oneofs = [ + # pylint: disable=g-complex-comprehension + descriptor.OneofDescriptor( + desc.name, + '.'.join((desc_name, desc.name)), + index, + None, + [], + _OptionsOrNone(desc), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for index, desc in enumerate(desc_proto.oneof_decl) + ] + extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] + if extension_ranges: + is_extendable = True + else: + is_extendable = False + desc = descriptor.Descriptor( + name=desc_proto.name, + full_name=desc_name, + filename=file_name, + containing_type=None, + fields=fields, + oneofs=oneofs, + nested_types=nested, + enum_types=enums, + extensions=extensions, + options=_OptionsOrNone(desc_proto), + is_extendable=is_extendable, + extension_ranges=extension_ranges, + file=file_desc, + serialized_start=None, + serialized_end=None, + syntax=syntax, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + for nested in desc.nested_types: + nested.containing_type = desc + for enum in desc.enum_types: + enum.containing_type = desc + for field_index, field_desc in enumerate(desc_proto.field): + if field_desc.HasField('oneof_index'): + oneof_index = field_desc.oneof_index + oneofs[oneof_index].fields.append(fields[field_index]) + fields[field_index].containing_oneof = oneofs[oneof_index] + + scope[_PrefixWithDot(desc_name)] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._descriptors[desc_name] = desc + return desc + + def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, + containing_type=None, scope=None, top_level=False): + """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. + + Args: + enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the enum descriptor. + containing_type: The type containing this enum. + scope: Scope containing available types. + top_level: If True, the enum is a top level symbol. If False, the enum + is defined inside a message. + + Returns: + The added descriptor + """ + + if package: + enum_name = '.'.join((package, enum_proto.name)) + else: + enum_name = enum_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + values = [self._MakeEnumValueDescriptor(value, index) + for index, value in enumerate(enum_proto.value)] + desc = descriptor.EnumDescriptor(name=enum_proto.name, + full_name=enum_name, + filename=file_name, + file=file_desc, + values=values, + containing_type=containing_type, + options=_OptionsOrNone(enum_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + scope['.%s' % enum_name] = desc + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._enum_descriptors[enum_name] = desc + + # Add top level enum values. + if top_level: + for value in values: + full_name = _NormalizeFullyQualifiedName( + '.'.join((package, value.name))) + self._CheckConflictRegister(value, full_name, file_name) + self._top_enum_values[full_name] = value + + return desc + + def _MakeFieldDescriptor(self, field_proto, message_name, index, + file_desc, is_extension=False): + """Creates a field descriptor from a FieldDescriptorProto. + + For message and enum type fields, this method will do a look up + in the pool for the appropriate descriptor for that type. If it + is unavailable, it will fall back to the _source function to + create it. If this type is still unavailable, construction will + fail. + + Args: + field_proto: The proto describing the field. + message_name: The name of the containing message. + index: Index of the field + file_desc: The file containing the field descriptor. + is_extension: Indication that this field is for an extension. + + Returns: + An initialized FieldDescriptor object + """ + + if message_name: + full_name = '.'.join((message_name, field_proto.name)) + else: + full_name = field_proto.name + + if field_proto.json_name: + json_name = field_proto.json_name + else: + json_name = None + + return descriptor.FieldDescriptor( + name=field_proto.name, + full_name=full_name, + index=index, + number=field_proto.number, + type=field_proto.type, + cpp_type=None, + message_type=None, + enum_type=None, + containing_type=None, + label=field_proto.label, + has_default_value=False, + default_value=None, + is_extension=is_extension, + extension_scope=None, + options=_OptionsOrNone(field_proto), + json_name=json_name, + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _SetAllFieldTypes(self, package, desc_proto, scope): + """Sets all the descriptor's fields's types. + + This method also sets the containing types on any extensions. + + Args: + package: The current package of desc_proto. + desc_proto: The message descriptor to update. + scope: Enclosing scope of available types. + """ + + package = _PrefixWithDot(package) + + main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) + + if package == '.': + nested_package = _PrefixWithDot(desc_proto.name) + else: + nested_package = '.'.join([package, desc_proto.name]) + + for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): + self._SetFieldType(field_proto, field_desc, nested_package, scope) + + for extension_proto, extension_desc in ( + zip(desc_proto.extension, main_desc.extensions)): + extension_desc.containing_type = self._GetTypeFromScope( + nested_package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, nested_package, scope) + + for nested_type in desc_proto.nested_type: + self._SetAllFieldTypes(nested_package, nested_type, scope) + + def _SetFieldType(self, field_proto, field_desc, package, scope): + """Sets the field's type, cpp_type, message_type and enum_type. + + Args: + field_proto: Data about the field in proto format. + field_desc: The descriptor to modify. + package: The package the field's container is in. + scope: Enclosing scope of available types. + """ + if field_proto.type_name: + desc = self._GetTypeFromScope(package, field_proto.type_name, scope) + else: + desc = None + + if not field_proto.HasField('type'): + if isinstance(desc, descriptor.Descriptor): + field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE + else: + field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM + + field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( + field_proto.type) + + if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): + field_desc.message_type = desc + + if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.enum_type = desc + + if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: + field_desc.has_default_value = False + field_desc.default_value = [] + elif field_proto.HasField('default_value'): + field_desc.has_default_value = True + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = float(field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = field_proto.default_value + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = field_proto.default_value.lower() == 'true' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values_by_name[ + field_proto.default_value].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = text_encoding.CUnescape( + field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = int(field_proto.default_value) + else: + field_desc.has_default_value = False + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = 0.0 + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = u'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = False + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values[0].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = b'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: + field_desc.default_value = None + elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: + field_desc.default_value = None + else: + # All other types are of the "int" type. + field_desc.default_value = 0 + + field_desc.type = field_proto.type + + def _MakeEnumValueDescriptor(self, value_proto, index): + """Creates a enum value descriptor object from a enum value proto. + + Args: + value_proto: The proto describing the enum value. + index: The index of the enum value. + + Returns: + An initialized EnumValueDescriptor object. + """ + + return descriptor.EnumValueDescriptor( + name=value_proto.name, + index=index, + number=value_proto.number, + options=_OptionsOrNone(value_proto), + type=None, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _MakeServiceDescriptor(self, service_proto, service_index, scope, + package, file_desc): + """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. + + Args: + service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. + service_index: The index of the service in the File. + scope: Dict mapping short and full symbols to message and enum types. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the service descriptor. + + Returns: + The added descriptor. + """ + + if package: + service_name = '.'.join((package, service_proto.name)) + else: + service_name = service_proto.name + + methods = [self._MakeMethodDescriptor(method_proto, service_name, package, + scope, index) + for index, method_proto in enumerate(service_proto.method)] + desc = descriptor.ServiceDescriptor( + name=service_proto.name, + full_name=service_name, + index=service_index, + methods=methods, + options=_OptionsOrNone(service_proto), + file=file_desc, + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + self._CheckConflictRegister(desc, desc.full_name, desc.file.name) + self._service_descriptors[service_name] = desc + return desc + + def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, + index): + """Creates a method descriptor from a MethodDescriptorProto. + + Args: + method_proto: The proto describing the method. + service_name: The name of the containing service. + package: Optional package name to look up for types. + scope: Scope containing available types. + index: Index of the method in the service. + + Returns: + An initialized MethodDescriptor object. + """ + full_name = '.'.join((service_name, method_proto.name)) + input_type = self._GetTypeFromScope( + package, method_proto.input_type, scope) + output_type = self._GetTypeFromScope( + package, method_proto.output_type, scope) + return descriptor.MethodDescriptor( + name=method_proto.name, + full_name=full_name, + index=index, + containing_service=None, + input_type=input_type, + output_type=output_type, + client_streaming=method_proto.client_streaming, + server_streaming=method_proto.server_streaming, + options=_OptionsOrNone(method_proto), + # pylint: disable=protected-access + create_key=descriptor._internal_create_key) + + def _ExtractSymbols(self, descriptors): + """Pulls out all the symbols from descriptor protos. + + Args: + descriptors: The messages to extract descriptors from. + Yields: + A two element tuple of the type name and descriptor object. + """ + + for desc in descriptors: + yield (_PrefixWithDot(desc.full_name), desc) + for symbol in self._ExtractSymbols(desc.nested_types): + yield symbol + for enum in desc.enum_types: + yield (_PrefixWithDot(enum.full_name), enum) + + def _GetDeps(self, dependencies, visited=None): + """Recursively finds dependencies for file protos. + + Args: + dependencies: The names of the files being depended on. + visited: The names of files already found. + + Yields: + Each direct and indirect dependency. + """ + + visited = visited or set() + for dependency in dependencies: + if dependency not in visited: + visited.add(dependency) + dep_desc = self.FindFileByName(dependency) + yield dep_desc + public_files = [d.name for d in dep_desc.public_dependencies] + yield from self._GetDeps(public_files, visited) + + def _GetTypeFromScope(self, package, type_name, scope): + """Finds a given type name in the current scope. + + Args: + package: The package the proto should be located in. + type_name: The name of the type to be found in the scope. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The descriptor for the requested type. + """ + if type_name not in scope: + components = _PrefixWithDot(package).split('.') + while components: + possible_match = '.'.join(components + [type_name]) + if possible_match in scope: + type_name = possible_match + break + else: + components.pop(-1) + return scope[type_name] + + +def _PrefixWithDot(name): + return name if name.startswith('.') else '.%s' % name + + +if _USE_C_DESCRIPTORS: + # TODO(amauryfa): This pool could be constructed from Python code, when we + # support a flag like 'use_cpp_generated_pool=True'. + # pylint: disable=protected-access + _DEFAULT = descriptor._message.default_pool +else: + _DEFAULT = DescriptorPool() + + +def Default(): + return _DEFAULT diff --git a/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py new file mode 100644 index 0000000000..a8ecc07bdf --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/duration_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/duration.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DURATION._serialized_start=51 + _DURATION._serialized_end=93 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py new file mode 100644 index 0000000000..0b4d554db3 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/empty_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/empty.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _EMPTY._serialized_start=48 + _EMPTY._serialized_end=55 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py new file mode 100644 index 0000000000..80a4e96e59 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/field_mask_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/field_mask.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _FIELDMASK._serialized_start=53 + _FIELDMASK._serialized_end=79 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py new file mode 100644 index 0000000000..afdbb78c36 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/_parameterized.py @@ -0,0 +1,443 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Adds support for parameterized tests to Python's unittest TestCase class. + +A parameterized test is a method in a test case that is invoked with different +argument tuples. + +A simple example: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Each invocation is a separate test case and properly isolated just +like a normal test method, with its own setUp/tearDown cycle. In the +example above, there are three separate testcases, one of which will +fail due to an assertion error (1 + 1 != 3). + +Parameters for individual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters): + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + +If a parameterized test fails, the error message will show the +original test name (which is modified internally) and the arguments +for the specific invocation, which are part of the string returned by +the shortDescription() method on test cases. + +The id method of the test, used internally by the unittest framework, +is also modified to show the arguments. To make sure that test names +stay the same across several invocations, object representations like + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into '<__main__.Foo>'. For even more descriptive names, +especially in test logs, you can use the named_parameters decorator. In +this case, only tuples are supported, and the first parameters has to +be a string (or an object that returns an apt name when converted via +str()): + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, strings.startswith(prefix)) + +Named tests also have the benefit that they can be run individually +from the command line: + + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s + + OK + +Parameterized Classes +===================== +If invocation arguments are shared across test methods in a single +TestCase class, instead of decorating all test methods +individually, the class itself can be decorated: + + @parameterized.parameters( + (1, 2, 3) + (4, 5, 9)) + class ArithmeticTest(parameterized.TestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) + + def testSubtract(self, arg2, arg2, result): + self.assertEqual(result - arg1, arg2) + +Inputs from Iterables +===================== +If parameters should be shared across several test cases, or are dynamically +created from other sources, a single non-tuple iterable can be passed into +the decorator. This iterable will be used to obtain the test cases: + + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Single-Argument Test Methods +============================ +If a test method takes only one argument, the single argument does not need to +be wrapped into a tuple: + + class NegativeNumberExample(parameterized.TestCase): + @parameterized.parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) +""" + +__author__ = 'tmarek@google.com (Torsten Marek)' + +import functools +import re +import types +import unittest +import uuid + +try: + # Since python 3 + import collections.abc as collections_abc +except ImportError: + # Won't work after python 3.8 + import collections as collections_abc + +ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') +_SEPARATOR = uuid.uuid1().hex +_FIRST_ARG = object() +_ARGUMENT_REPR = object() + + +def _CleanRepr(obj): + return ADDR_RE.sub(r'<\1>', repr(obj)) + + +# Helper function formerly from the unittest module, removed from it in +# Python 2.7. +def _StrClass(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + + +def _NonStringIterable(obj): + return (isinstance(obj, collections_abc.Iterable) and + not isinstance(obj, str)) + + +def _FormatParameterList(testcase_params): + if isinstance(testcase_params, collections_abc.Mapping): + return ', '.join('%s=%s' % (argname, _CleanRepr(value)) + for argname, value in testcase_params.items()) + elif _NonStringIterable(testcase_params): + return ', '.join(map(_CleanRepr, testcase_params)) + else: + return _FormatParameterList((testcase_params,)) + + +class _ParameterizedTestIter(object): + """Callable and iterable class for producing new test cases.""" + + def __init__(self, test_method, testcases, naming_type): + """Returns concrete test functions for a test and a list of parameters. + + The naming_type is used to determine the name of the concrete + functions as reported by the unittest framework. If naming_type is + _FIRST_ARG, the testcases must be tuples, and the first element must + have a string representation that is a valid Python identifier. + + Args: + test_method: The decorated test method. + testcases: (list of tuple/dict) A list of parameter + tuples/dicts for individual test invocations. + naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + """ + self._test_method = test_method + self.testcases = testcases + self._naming_type = naming_type + + def __call__(self, *args, **kwargs): + raise RuntimeError('You appear to be running a parameterized test case ' + 'without having inherited from parameterized.' + 'TestCase. This is bad because none of ' + 'your test cases are actually being run.') + + def __iter__(self): + test_method = self._test_method + naming_type = self._naming_type + + def MakeBoundParamTest(testcase_params): + @functools.wraps(test_method) + def BoundParamTest(self): + if isinstance(testcase_params, collections_abc.Mapping): + test_method(self, **testcase_params) + elif _NonStringIterable(testcase_params): + test_method(self, *testcase_params) + else: + test_method(self, testcase_params) + + if naming_type is _FIRST_ARG: + # Signal the metaclass that the name of the test function is unique + # and descriptive. + BoundParamTest.__x_use_name__ = True + BoundParamTest.__name__ += str(testcase_params[0]) + testcase_params = testcase_params[1:] + elif naming_type is _ARGUMENT_REPR: + # __x_extra_id__ is used to pass naming information to the __new__ + # method of TestGeneratorMetaclass. + # The metaclass will make sure to create a unique, but nondescriptive + # name for this test. + BoundParamTest.__x_extra_id__ = '(%s)' % ( + _FormatParameterList(testcase_params),) + else: + raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) + + BoundParamTest.__doc__ = '%s(%s)' % ( + BoundParamTest.__name__, _FormatParameterList(testcase_params)) + if test_method.__doc__: + BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) + return BoundParamTest + return (MakeBoundParamTest(c) for c in self.testcases) + + +def _IsSingletonList(testcases): + """True iff testcases contains only a single non-tuple element.""" + return len(testcases) == 1 and not isinstance(testcases[0], tuple) + + +def _ModifyClass(class_object, testcases, naming_type): + assert not getattr(class_object, '_id_suffix', None), ( + 'Cannot add parameters to %s,' + ' which already has parameterized methods.' % (class_object,)) + class_object._id_suffix = id_suffix = {} + # We change the size of __dict__ while we iterate over it, + # which Python 3.x will complain about, so use copy(). + for name, obj in class_object.__dict__.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) + and isinstance(obj, types.FunctionType)): + delattr(class_object, name) + methods = {} + _UpdateClassDictForParamTestCase( + methods, id_suffix, name, + _ParameterizedTestIter(obj, testcases, naming_type)) + for name, meth in methods.items(): + setattr(class_object, name, meth) + + +def _ParameterDecorator(naming_type, testcases): + """Implementation of the parameterization decorators. + + Args: + naming_type: The naming type. + testcases: Testcase parameters. + + Returns: + A function for modifying the decorated object. + """ + def _Apply(obj): + if isinstance(obj, type): + _ModifyClass( + obj, + list(testcases) if not isinstance(testcases, collections_abc.Sequence) + else testcases, + naming_type) + return obj + else: + return _ParameterizedTestIter(obj, testcases, naming_type) + + if _IsSingletonList(testcases): + assert _NonStringIterable(testcases[0]), ( + 'Single parameter argument must be a non-string iterable') + testcases = testcases[0] + + return _Apply + + +def parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples/dicts/objects (for tests + with only one argument). + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_ARGUMENT_REPR, testcases) + + +def named_parameters(*testcases): # pylint: disable=invalid-name + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. The first element of + each parameter tuple should be a string and will be appended to the + name of the test method. + + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples. + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_FIRST_ARG, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for test cases with test generators. + + A test generator is an iterable in a testcase that produces callables. These + callables must be single-argument methods. These methods are injected into + the class namespace and the original iterable is removed. If the name of the + iterable conforms to the test pattern, the injected methods will be picked + up as tests by the unittest framework. + + In general, it is supposed to be used in conjunction with the + parameters decorator. + """ + + def __new__(mcs, class_name, bases, dct): + dct['_id_suffix'] = id_suffix = {} + for name, obj in dct.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) and + _NonStringIterable(obj)): + iterator = iter(obj) + dct.pop(name) + _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) + + return type.__new__(mcs, class_name, bases, dct) + + +def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): + """Adds individual test cases to a dictionary. + + Args: + dct: The target dictionary. + id_suffix: The dictionary for mapping names to test IDs. + name: The original name of the test case. + iterator: The iterator generating the individual test cases. + """ + for idx, func in enumerate(iterator): + assert callable(func), 'Test generators must yield callables, got %r' % ( + func,) + if getattr(func, '__x_use_name__', False): + new_name = func.__name__ + else: + new_name = '%s%s%d' % (name, _SEPARATOR, idx) + assert new_name not in dct, ( + 'Name of parameterized test case "%s" not unique' % (new_name,)) + dct[new_name] = func + id_suffix[new_name] = getattr(func, '__x_extra_id__', '') + + +class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): + """Base class for test cases using the parameters decorator.""" + + def _OriginalName(self): + return self._testMethodName.split(_SEPARATOR)[0] + + def __str__(self): + return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) + + def id(self): # pylint: disable=invalid-name + """Returns the descriptive ID of the test. + + This is used internally by the unittesting framework to get a name + for the test to be used in reports. + + Returns: + The test id. + """ + return '%s.%s%s' % (_StrClass(self.__class__), + self._OriginalName(), + self._id_suffix.get(self._testMethodName, '')) + + +def CoopTestCase(other_base_class): + """Returns a new base class with a cooperative metaclass base. + + This enables the TestCase to be used in combination + with other base classes that have custom metaclasses, such as + mox.MoxTestBase. + + Only works with metaclasses that do not override type.__new__. + + Example: + + import google3 + import mox + + from google3.testing.pybase import parameterized + + class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): + ... + + Args: + other_base_class: (class) A test case base class. + + Returns: + A new class object. + """ + metaclass = type( + 'CoopMetaclass', + (other_base_class.__metaclass__, + TestGeneratorMetaclass), {}) + return metaclass( + 'CoopTestCase', + (other_base_class, TestCase), {}) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py new file mode 100644 index 0000000000..7fef237670 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/api_implementation.py @@ -0,0 +1,112 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import os +import sys +import warnings + +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _api_version = _api_implementation.api_version +except ImportError: + _api_version = -1 # Unspecified by compiler flags. + +if _api_version == 1: + raise ValueError('api_version=1 is no longer supported.') + + +_default_implementation_type = ('cpp' if _api_version > 0 else 'python') + + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python' and 'cpp' are valid +# values. Any other value will be ignored. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _default_implementation_type) + +if _implementation_type != 'python': + _implementation_type = 'cpp' + +if 'PyPy' in sys.version and _implementation_type == 'cpp': + warnings.warn('PyPy does not work yet with cpp protocol buffers. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + + +# Detect if serialization should be deterministic by default +try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps. + # + # NOTE: Merely importing this automatically enables deterministic proto + # serialization for C++ code, but we still need to export it as a boolean so + # that we can do the same for `_implementation_type == 'python'`. + # + # NOTE2: It is possible for C++ code to enable deterministic serialization by + # default _without_ affecting Python code, if the C++ implementation is not in + # use by this module. That is intended behavior, so we don't actually expose + # this boolean outside of this module. + # + # pylint: disable=g-import-not-at-top,unused-import + from google.protobuf import enable_deterministic_proto_serialization + _python_deterministic_proto_serialization = True +except ImportError: + _python_deterministic_proto_serialization = False + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +def _SetType(implementation_type): + """Never use! Only for protobuf benchmark.""" + global _implementation_type + _implementation_type = implementation_type + + +# See comment on 'Type' above. +def Version(): + return 2 + + +# For internal use only +def IsPythonDefaultSerializationDeterministic(): + return _python_deterministic_proto_serialization diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py new file mode 100644 index 0000000000..64353ee4af --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/builder.py @@ -0,0 +1,130 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Builds descriptors, message classes and services for generated _pb2.py. + +This file is only called in python generated _pb2.py files. It builds +descriptors, message classes and services that users can directly use +in generated code. +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +_sym_db = _symbol_database.Default() + + +def BuildMessageAndEnumDescriptors(file_des, module): + """Builds message and enum descriptors. + + Args: + file_des: FileDescriptor of the .proto file + module: Generated _pb2 module + """ + + def BuildNestedDescriptors(msg_des, prefix): + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + module_name = prefix + name.upper() + module[module_name] = nested_msg + BuildNestedDescriptors(nested_msg, module_name + '_') + for enum_des in msg_des.enum_types: + module[prefix + enum_des.name.upper()] = enum_des + + for (name, msg_des) in file_des.message_types_by_name.items(): + module_name = '_' + name.upper() + module[module_name] = msg_des + BuildNestedDescriptors(msg_des, module_name + '_') + + +def BuildTopDescriptorsAndMessages(file_des, module_name, module): + """Builds top level descriptors and message classes. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + + def BuildMessage(msg_des): + create_dict = {} + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + create_dict[name] = BuildMessage(nested_msg) + create_dict['DESCRIPTOR'] = msg_des + create_dict['__module__'] = module_name + message_class = _reflection.GeneratedProtocolMessageType( + msg_des.name, (_message.Message,), create_dict) + _sym_db.RegisterMessage(message_class) + return message_class + + # top level enums + for (name, enum_des) in file_des.enum_types_by_name.items(): + module['_' + name.upper()] = enum_des + module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) + for enum_value in enum_des.values: + module[enum_value.name] = enum_value.number + + # top level extensions + for (name, extension_des) in file_des.extensions_by_name.items(): + module[name.upper() + '_FIELD_NUMBER'] = extension_des.number + module[name] = extension_des + + # services + for (name, service) in file_des.services_by_name.items(): + module['_' + name.upper()] = service + + # Build messages. + for (name, msg_des) in file_des.message_types_by_name.items(): + module[name] = BuildMessage(msg_des) + + +def BuildServices(file_des, module_name, module): + """Builds services classes and services stub class. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + # pylint: disable=g-import-not-at-top + from google.protobuf import service as _service + from google.protobuf import service_reflection + # pylint: enable=g-import-not-at-top + for (name, service) in file_des.services_by_name.items(): + module[name] = service_reflection.GeneratedServiceType( + name, (_service.Service,), + dict(DESCRIPTOR=service, __module__=module_name)) + stub_name = name + '_Stub' + module[stub_name] = service_reflection.GeneratedServiceStubType( + stub_name, (module[name],), + dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py new file mode 100644 index 0000000000..29fbb53d2f --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/containers.py @@ -0,0 +1,710 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains container classes to represent different protocol buffer types. + +This file defines container classes which represent categories of protocol +buffer field types which need extra maintenance. Currently these categories +are: + +- Repeated scalar fields - These are all repeated fields which aren't + composite (e.g. they are of simple types like int32, string, etc). +- Repeated composite fields - Repeated fields which are composite. This + includes groups and nested messages. +""" + +import collections.abc +import copy +import pickle +from typing import ( + Any, + Iterable, + Iterator, + List, + MutableMapping, + MutableSequence, + NoReturn, + Optional, + Sequence, + TypeVar, + Union, + overload, +) + + +_T = TypeVar('_T') +_K = TypeVar('_K') +_V = TypeVar('_V') + + +class BaseContainer(Sequence[_T]): + """Base container class.""" + + # Minimizes memory usage and disallows assignment to other attributes. + __slots__ = ['_message_listener', '_values'] + + def __init__(self, message_listener: Any) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + """ + self._message_listener = message_listener + self._values = [] + + @overload + def __getitem__(self, key: int) -> _T: + ... + + @overload + def __getitem__(self, key: slice) -> List[_T]: + ... + + def __getitem__(self, key): + """Retrieves item by the specified key.""" + return self._values[key] + + def __len__(self) -> int: + """Returns the number of elements in the container.""" + return len(self._values) + + def __ne__(self, other: Any) -> bool: + """Checks if another instance isn't equal to this one.""" + # The concrete classes should define __eq__. + return not self == other + + __hash__ = None + + def __repr__(self) -> str: + return repr(self._values) + + def sort(self, *args, **kwargs) -> None: + # Continue to support the old sort_function keyword argument. + # This is expected to be a rare occurrence, so use LBYL to avoid + # the overhead of actually catching KeyError. + if 'sort_function' in kwargs: + kwargs['cmp'] = kwargs.pop('sort_function') + self._values.sort(*args, **kwargs) + + def reverse(self) -> None: + self._values.reverse() + + +# TODO(slebedev): Remove this. BaseContainer does *not* conform to +# MutableSequence, only its subclasses do. +collections.abc.MutableSequence.register(BaseContainer) + + +class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, type-checked, list-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_type_checker'] + + def __init__( + self, + message_listener: Any, + type_checker: Any, + ) -> None: + """Args: + + message_listener: A MessageListener implementation. The + RepeatedScalarFieldContainer will call this object's Modified() method + when it is modified. + type_checker: A type_checkers.ValueChecker instance to run on elements + inserted into this container. + """ + super().__init__(message_listener) + self._type_checker = type_checker + + def append(self, value: _T) -> None: + """Appends an item to the list. Similar to list.append().""" + self._values.append(self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position. Similar to list.insert().""" + self._values.insert(key, self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given iterable. Similar to list.extend().""" + if elem_seq is None: + return + try: + elem_seq_iter = iter(elem_seq) + except TypeError: + if not elem_seq: + # silently ignore falsy inputs :-/. + # TODO(ptucker): Deprecate this behavior. b/18413862 + return + raise + + new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] + if new_values: + self._values.extend(new_values) + self._message_listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one. We do not check the types of the individual fields. + """ + self._values.extend(other) + self._message_listener.Modified() + + def remove(self, elem: _T): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value) -> None: + """Sets the item on the specified position.""" + if isinstance(key, slice): + if key.step is not None: + raise ValueError('Extended slices not supported') + self._values[key] = map(self._type_checker.CheckValue, value) + self._message_listener.Modified() + else: + self._values[key] = self._type_checker.CheckValue(value) + self._message_listener.Modified() + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + # Special case for the same type which should be common and fast. + if isinstance(other, self.__class__): + return other._values == self._values + # We are presumably comparing against some other sequence type. + return other == self._values + + def __deepcopy__( + self, + unused_memo: Any = None, + ) -> 'RepeatedScalarFieldContainer[_T]': + clone = RepeatedScalarFieldContainer( + copy.deepcopy(self._message_listener), self._type_checker) + clone.MergeFrom(self) + return clone + + def __reduce__(self, **kwargs) -> NoReturn: + raise pickle.PickleError( + "Can't pickle repeated scalar fields, convert to list first") + + +# TODO(slebedev): Constrain T to be a subtype of Message. +class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): + """Simple, list-like container for holding repeated composite fields.""" + + # Disallows assignment to other attributes. + __slots__ = ['_message_descriptor'] + + def __init__(self, message_listener: Any, message_descriptor: Any) -> None: + """ + Note that we pass in a descriptor instead of the generated directly, + since at the time we construct a _RepeatedCompositeFieldContainer we + haven't yet necessarily initialized the type that will be contained in the + container. + + Args: + message_listener: A MessageListener implementation. + The RepeatedCompositeFieldContainer will call this object's + Modified() method when it is modified. + message_descriptor: A Descriptor instance describing the protocol type + that should be present in this container. We'll use the + _concrete_class field of this descriptor when the client calls add(). + """ + super().__init__(message_listener) + self._message_descriptor = message_descriptor + + def add(self, **kwargs: Any) -> _T: + """Adds a new element at the end of the list and returns it. Keyword + arguments may be used to initialize the element. + """ + new_element = self._message_descriptor._concrete_class(**kwargs) + new_element._SetListener(self._message_listener) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + return new_element + + def append(self, value: _T) -> None: + """Appends one element by copying the message.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key: int, value: _T) -> None: + """Inserts the item at the specified position by copying.""" + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + new_element.CopyFrom(value) + self._values.insert(key, new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq: Iterable[_T]) -> None: + """Extends by appending the given sequence of elements of the same type + + as this one, copying each individual message. + """ + message_class = self._message_descriptor._concrete_class + listener = self._message_listener + values = self._values + for message in elem_seq: + new_element = message_class() + new_element._SetListener(listener) + new_element.MergeFrom(message) + values.append(new_element) + listener.Modified() + + def MergeFrom( + self, + other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], + ) -> None: + """Appends the contents of another repeated field of the same type to this + one, copying each individual message. + """ + self.extend(other) + + def remove(self, elem: _T) -> None: + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key: Optional[int] = -1) -> _T: + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + @overload + def __setitem__(self, key: int, value: _T) -> None: + ... + + @overload + def __setitem__(self, key: slice, value: Iterable[_T]) -> None: + ... + + def __setitem__(self, key, value): + # This method is implemented to make RepeatedCompositeFieldContainer + # structurally compatible with typing.MutableSequence. It is + # otherwise unsupported and will always raise an error. + raise TypeError( + f'{self.__class__.__name__} object does not support item assignment') + + def __delitem__(self, key: Union[int, slice]) -> None: + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __eq__(self, other: Any) -> bool: + """Compares the current instance with another one.""" + if self is other: + return True + if not isinstance(other, self.__class__): + raise TypeError('Can only compare repeated composite fields against ' + 'other repeated composite fields.') + return self._values == other._values + + +class ScalarMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', + '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + key_checker: Any, + value_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._key_checker = key_checker + self._value_checker = value_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + val = self._value_checker.DefaultValue() + self._values[key] = val + return val + + def __contains__(self, item: _K) -> bool: + # We check the key's type to match the strong-typing flavor of the API. + # Also this makes it easier to match the behavior of the C++ implementation. + self._key_checker.CheckValue(item) + return item in self._values + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __setitem__(self, key: _K, value: _V) -> _T: + checked_key = self._key_checker.CheckValue(key) + checked_value = self._value_checker.CheckValue(value) + self._values[checked_key] = checked_value + self._message_listener.Modified() + + def __delitem__(self, key: _K) -> None: + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: + self._values.update(other._values) + self._message_listener.Modified() + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class MessageMap(MutableMapping[_K, _V]): + """Simple, type-checked, dict-like container for with submessage values.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_values', '_message_listener', + '_message_descriptor', '_entry_descriptor'] + + def __init__( + self, + message_listener: Any, + message_descriptor: Any, + key_checker: Any, + entry_descriptor: Any, + ) -> None: + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + entry_descriptor: The MessageDescriptor of a map entry: key and value. + """ + self._message_listener = message_listener + self._message_descriptor = message_descriptor + self._key_checker = key_checker + self._entry_descriptor = entry_descriptor + self._values = {} + + def __getitem__(self, key: _K) -> _V: + key = self._key_checker.CheckValue(key) + try: + return self._values[key] + except KeyError: + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + self._values[key] = new_element + self._message_listener.Modified() + return new_element + + def get_or_create(self, key: _K) -> _V: + """get_or_create() is an alias for getitem (ie. map[key]). + + Args: + key: The key to get or create in the map. + + This is useful in cases where you want to be explicit that the call is + mutating the map. This can avoid lint errors for statements like this + that otherwise would appear to be pointless statements: + + msg.my_map[key] + """ + return self[key] + + @overload + def get(self, key: _K) -> Optional[_V]: + ... + + @overload + def get(self, key: _K, default: _T) -> Union[_V, _T]: + ... + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __contains__(self, item: _K) -> bool: + item = self._key_checker.CheckValue(item) + return item in self._values + + def __setitem__(self, key: _K, value: _V) -> NoReturn: + raise ValueError('May not set values directly, call my_map[key].foo = 5') + + def __delitem__(self, key: _K) -> None: + key = self._key_checker.CheckValue(key) + del self._values[key] + self._message_listener.Modified() + + def __len__(self) -> int: + return len(self._values) + + def __iter__(self) -> Iterator[_K]: + return iter(self._values) + + def __repr__(self) -> str: + return repr(self._values) + + def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: + # pylint: disable=protected-access + for key in other._values: + # According to documentation: "When parsing from the wire or when merging, + # if there are duplicate map keys the last key seen is used". + if key in self: + del self[key] + self[key].CopyFrom(other[key]) + # self._message_listener.Modified() not required here, because + # mutations to submessages already propagate. + + def InvalidateIterators(self) -> None: + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self) -> None: + self._values.clear() + self._message_listener.Modified() + + def GetEntryClass(self) -> Any: + return self._entry_descriptor._concrete_class + + +class _UnknownField: + """A parsed unknown field.""" + + # Disallows assignment to other attributes. + __slots__ = ['_field_number', '_wire_type', '_data'] + + def __init__(self, field_number, wire_type, data): + self._field_number = field_number + self._wire_type = wire_type + self._data = data + return + + def __lt__(self, other): + # pylint: disable=protected-access + return self._field_number < other._field_number + + def __eq__(self, other): + if self is other: + return True + # pylint: disable=protected-access + return (self._field_number == other._field_number and + self._wire_type == other._wire_type and + self._data == other._data) + + +class UnknownFieldRef: # pylint: disable=missing-class-docstring + + def __init__(self, parent, index): + self._parent = parent + self._index = index + + def _check_valid(self): + if not self._parent: + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + if self._index >= len(self._parent): + raise ValueError('UnknownField does not exist. ' + 'The parent message might be cleared.') + + @property + def field_number(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._field_number + + @property + def wire_type(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._wire_type + + @property + def data(self): + self._check_valid() + # pylint: disable=protected-access + return self._parent._internal_get(self._index)._data + + +class UnknownFieldSet: + """UnknownField container""" + + # Disallows assignment to other attributes. + __slots__ = ['_values'] + + def __init__(self): + self._values = [] + + def __getitem__(self, index): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + size = len(self._values) + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError('index %d out of range'.index) + + return UnknownFieldRef(self, index) + + def _internal_get(self, index): + return self._values[index] + + def __len__(self): + if self._values is None: + raise ValueError('UnknownFields does not exist. ' + 'The parent message might be cleared.') + return len(self._values) + + def _add(self, field_number, wire_type, data): + unknown_field = _UnknownField(field_number, wire_type, data) + self._values.append(unknown_field) + return unknown_field + + def __iter__(self): + for i in range(len(self)): + yield UnknownFieldRef(self, i) + + def _extend(self, other): + if other is None: + return + # pylint: disable=protected-access + self._values.extend(other._values) + + def __eq__(self, other): + if self is other: + return True + # Sort unknown fields because their order shouldn't + # affect equality test. + values = list(self._values) + if other is None: + return not values + values.sort() + # pylint: disable=protected-access + other_values = sorted(other._values) + return values == other_values + + def _clear(self): + for value in self._values: + # pylint: disable=protected-access + if isinstance(value._data, UnknownFieldSet): + value._data._clear() # pylint: disable=protected-access + self._values = None diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py new file mode 100644 index 0000000000..bc1b7b785c --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/decoder.py @@ -0,0 +1,1029 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import math +import struct + +from google.protobuf.internal import containers +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import message + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + + +def _SignedVarintDecoder(bits, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + signbit = 1 << (bits - 1) + mask = (1 << bits) - 1 + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = (result ^ signbit) - signbit + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# All 32-bit and 64-bit values are represented as int. +_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +_DecodeSignedVarint = _SignedVarintDecoder(64, int) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) + + +def ReadTag(buffer, pos): + """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + + Args: + buffer: memoryview object of the encoded bytes + pos: int of the current position to start from + + Returns: + Tuple[bytes, int] of the tag data and new position. + """ + start = pos + while buffer[pos] & 0x80: + pos += 1 + pos += 1 + + tag_bytes = buffer[start:pos].tobytes() + return tag_bytes, pos + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (new_value, pos) = decode_value(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not new_value: + field_dict.pop(key, None) + else: + field_dict[key] = new_value + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + """Decode serialized float to a float and new position. + + Args: + buffer: memoryview of the serialized bytes + pos: int, position in the memory view to start at. + + Returns: + Tuple[float, int] of the deserialized float value and new position + in the serialized data. + """ + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos].tobytes() + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (math.nan, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (-math.inf, new_pos) + return (math.inf, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (math.nan, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + # pylint: disable=protected-access + del message._unknown_field_set._values[-1] + # pylint: enable=protected-access + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, element) + # pylint: enable=protected-access + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not enum_value: + field_dict.pop(key, None) + return pos + # pylint: disable=protected-access + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + field_number, wire_format.WIRETYPE_VARINT, enum_value) + # pylint: enable=protected-access + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos].tobytes()) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = buffer[pos:new_pos].tobytes() + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(descriptor): + """Returns a decoder for a MessageSet item. + + The parameter is the message Descriptor. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + """Decode serialized message set to its value and new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = message.Extensions._FindExtensionByNumber(type_id) + # pylint: disable=protected-access + if extension is not None: + value = field_dict.get(extension) + if value is None: + message_type = extension.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + message._FACTORY.GetPrototype(message_type) + value = field_dict.setdefault( + extension, message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) + if message._unknown_field_set is None: + message._unknown_field_set = containers.UnknownFieldSet() + message._unknown_field_set._add( + type_id, + wire_format.WIRETYPE_LENGTH_DELIMITED, + buffer[message_start:message_end].tobytes()) + # pylint: enable=protected-access + + return pos + + return DecodeItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].CopyFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1].tobytes()) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed64(buffer, pos): + """Decode a fixed64.""" + new_pos = pos + 8 + return (struct.unpack(' end: + raise _DecodeError('Truncated message.') + return pos + + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + + +def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): + """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" + + unknown_field_set = containers.UnknownFieldSet() + while end_pos is None or pos < end_pos: + (tag_bytes, pos) = ReadTag(buffer, pos) + (tag, _) = _DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if wire_type == wire_format.WIRETYPE_END_GROUP: + break + (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + + return (unknown_field_set, pos) + + +def _DecodeUnknownField(buffer, pos, wire_type): + """Decode a unknown field. Returns the UnknownField and new position.""" + + if wire_type == wire_format.WIRETYPE_VARINT: + (data, pos) = _DecodeVarint(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED64: + (data, pos) = _DecodeFixed64(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED32: + (data, pos) = _DecodeFixed32(buffer, pos) + elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: + (size, pos) = _DecodeVarint(buffer, pos) + data = buffer[pos:pos+size].tobytes() + pos += size + elif wire_type == wire_format.WIRETYPE_START_GROUP: + (data, pos) = _DecodeUnknownFieldSet(buffer, pos) + elif wire_type == wire_format.WIRETYPE_END_GROUP: + return (0, -1) + else: + raise _DecodeError('Wrong wire type in tag.') + + return (data, pos) + + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed32(buffer, pos): + """Decode a fixed32.""" + + new_pos = pos + 4 + return (struct.unpack('B').pack + + def EncodeVarint(write, value, unused_deterministic=None): + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeVarint + + +def _SignedVarintEncoder(): + """Return an encoder for a basic signed varint value (does not include + tag).""" + + local_int2byte = struct.Struct('>B').pack + + def EncodeSignedVarint(write, value, unused_deterministic=None): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: + write(local_int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(local_int2byte(bits)) + + return EncodeSignedVarint + + +_EncodeVarint = _VarintEncoder() +_EncodeSignedVarint = _SignedVarintEncoder() + + +def _VarintBytes(value): + """Encode the given integer as a varint and return the bytes. This is only + called at startup time so it doesn't need to be fast.""" + + pieces = [] + _EncodeVarint(pieces.append, value, True) + return b"".join(pieces) + + +def TagBytes(field_number, wire_type): + """Encode the given tag and return the bytes. Only called at startup.""" + + return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) + +# -------------------------------------------------------------------- +# As with sizers (see above), we have a number of common encoder +# implementations. + + +def _SimpleEncoder(wire_type, encode_value, compute_value_size): + """Return a constructor for an encoder for fields of a particular type. + + Args: + wire_type: The field's wire type, for encoding tags. + encode_value: A function which encodes an individual value, e.g. + _EncodeVarint(). + compute_value_size: A function which computes the size of an individual + value, e.g. _VarintSize(). + """ + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(element) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, element, deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, element, deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, value, deterministic) + return EncodeField + + return SpecificEncoder + + +def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): + """Like SimpleEncoder but additionally invokes modify_value on every value + before passing it to encode_value. Usually modify_value is ZigZagEncode.""" + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(modify_value(element)) + local_EncodeVarint(write, size, deterministic) + for element in value: + encode_value(write, modify_value(element), deterministic) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, deterministic): + for element in value: + write(tag_bytes) + encode_value(write, modify_value(element), deterministic) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, deterministic): + write(tag_bytes) + return encode_value(write, modify_value(value), deterministic) + return EncodeField + + return SpecificEncoder + + +def _StructPackEncoder(wire_type, format): + """Return a constructor for an encoder for a fixed-width field. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + write(local_struct_pack(format, element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + write(local_struct_pack(format, element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + return write(local_struct_pack(format, value)) + return EncodeField + + return SpecificEncoder + + +def _FloatingPointEncoder(wire_type, format): + """Return a constructor for an encoder for float fields. + + This is like StructPackEncoder, but catches errors that may be due to + passing non-finite floating-point values to struct.pack, and makes a + second attempt to encode those values. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: + write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x80\xFF') + elif value != value: # NaN + write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN + write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: + raise ValueError('Can\'t encode floating-point values that are ' + '%d bytes long (only 4 or 8)' % value_size) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value, deterministic): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size, deterministic) + for element in value: + # This try/except block is going to be faster than any code that + # we could write to check whether element is finite. + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value, unused_deterministic=None): + for element in value: + write(tag_bytes) + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value, unused_deterministic=None): + write(tag_bytes) + try: + write(local_struct_pack(format, value)) + except SystemError: + EncodeNonFiniteOrRaise(write, value) + return EncodeField + + return SpecificEncoder + + +# ==================================================================== +# Here we declare an encoder constructor for each field type. These work +# very similarly to sizer constructors, described earlier. + + +Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) + +UInt32Encoder = UInt64Encoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) + +SInt32Encoder = SInt64Encoder = _ModifiedEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, + wire_format.ZigZagEncode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str + ValueType = int + + def __init__(self, enum_type): + """Inits EnumTypeWrapper with an EnumDescriptor.""" + self._enum_type = enum_type + self.DESCRIPTOR = enum_type # pylint: disable=invalid-name + + def Name(self, number): # pylint: disable=invalid-name + """Returns a string containing the name of an enum value.""" + try: + return self._enum_type.values_by_number[number].name + except KeyError: + pass # fall out to break exception chaining + + if not isinstance(number, int): + raise TypeError( + 'Enum value for {} must be an int, but got {} {!r}.'.format( + self._enum_type.name, type(number), number)) + else: + # repr here to handle the odd case when you pass in a boolean. + raise ValueError('Enum {} has no name defined for value {!r}'.format( + self._enum_type.name, number)) + + def Value(self, name): # pylint: disable=invalid-name + """Returns the value corresponding to the given enum name.""" + try: + return self._enum_type.values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise ValueError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def keys(self): + """Return a list of the string names in the enum. + + Returns: + A list of strs, in the order they were defined in the .proto file. + """ + + return [value_descriptor.name + for value_descriptor in self._enum_type.values] + + def values(self): + """Return a list of the integer values in the enum. + + Returns: + A list of ints, in the order they were defined in the .proto file. + """ + + return [value_descriptor.number + for value_descriptor in self._enum_type.values] + + def items(self): + """Return a list of the (name, value) pairs of the enum. + + Returns: + A list of (str, int) pairs, in the order they were defined + in the .proto file. + """ + return [(value_descriptor.name, value_descriptor.number) + for value_descriptor in self._enum_type.values] + + def __getattr__(self, name): + """Returns the value corresponding to the given enum name.""" + try: + return super( + EnumTypeWrapper, + self).__getattribute__('_enum_type').values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise AttributeError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py new file mode 100644 index 0000000000..b346cf283e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/extension_dict.py @@ -0,0 +1,213 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains _ExtensionDict class to represent extensions. +""" + +from google.protobuf.internal import type_checkers +from google.protobuf.descriptor import FieldDescriptor + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +# TODO(robinson): Unify error handling of "unknown extension" crap. +# TODO(robinson): Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for Extension fields on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """ + Args: + extended_message: Message instance for which we are the Extensions dict. + """ + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + message_type = extension_handle.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=protected-access + self._extended_message._FACTORY.GetPrototype(message_type) + assert getattr(extension_handle.message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (extension_handle.full_name, + extension_handle.message_type.full_name)) + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [field for field in my_fields if field.is_extension] + other_fields = [field for field in other_fields if field.is_extension] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __len__(self): + fields = self._extended_message.ListFields() + # Get rid of non-extension fields. + extension_fields = [field for field in fields if field[0].is_extension] + return len(extension_fields) + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necessary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def __delitem__(self, extension_handle): + self._extended_message.ClearExtension(extension_handle) + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_name.get(name, None) + + def _FindExtensionByNumber(self, number): + """Tries to find a known extension with the field number. + + Args: + number: Extension field number. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_number.get(number, None) + + def __iter__(self): + # Return a generator over the populated extension fields + return (f[0] for f in self._extended_message.ListFields() + if f[0].is_extension) + + def __contains__(self, extension_handle): + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if extension_handle not in self._extended_message._fields: + return False + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + return bool(self._extended_message._fields.get(extension_handle)) + + if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + value = self._extended_message._fields.get(extension_handle) + # pylint: disable=protected-access + return value is not None and value._is_present_in_parent + + return True diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py new file mode 100644 index 0000000000..0fc255a774 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_listener.py @@ -0,0 +1,78 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py new file mode 100644 index 0000000000..63651a3f19 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/message_set_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageSet.RegisterExtension(message_set_extension3) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) + TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + + DESCRIPTOR._options = None + _TESTMESSAGESET._options = None + _TESTMESSAGESET._serialized_options = b'\010\001' + _TESTMESSAGESET._serialized_start=83 + _TESTMESSAGESET._serialized_end=113 + _TESTMESSAGESETEXTENSION1._serialized_start=116 + _TESTMESSAGESETEXTENSION1._serialized_end=281 + _TESTMESSAGESETEXTENSION2._serialized_start=284 + _TESTMESSAGESETEXTENSION2._serialized_end=451 + _TESTMESSAGESETEXTENSION3._serialized_start=453 + _TESTMESSAGESETEXTENSION3._serialized_end=493 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py new file mode 100644 index 0000000000..5497083197 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/missing_enum_values.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None + _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' + _TESTENUMVALUES._serialized_start=88 + _TESTENUMVALUES._serialized_end=409 + _TESTENUMVALUES_NESTEDENUM._serialized_start=378 + _TESTENUMVALUES_NESTEDENUM._serialized_end=409 + _TESTMISSINGENUMVALUES._serialized_start=412 + _TESTMISSINGENUMVALUES._serialized_end=751 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 + _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 + _JUSTSTRING._serialized_start=753 + _JUSTSTRING._serialized_end=780 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py new file mode 100644 index 0000000000..0953706bac --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions_dynamic.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) + google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) + + DESCRIPTOR._options = None + _DYNAMICMESSAGETYPE._serialized_start=132 + _DYNAMICMESSAGETYPE._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py new file mode 100644 index 0000000000..1cfa1b7c8b --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_extensions_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + ExtendedMessage.RegisterExtension(optional_int_extension) + ExtendedMessage.RegisterExtension(optional_message_extension) + ExtendedMessage.RegisterExtension(repeated_int_extension) + ExtendedMessage.RegisterExtension(repeated_message_extension) + + DESCRIPTOR._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None + _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None + _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' + _NESTEDMESSAGE.fields_by_name['submessage']._options = None + _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' + _TOPLEVELMESSAGE._serialized_start=77 + _TOPLEVELMESSAGE._serialized_end=230 + _NESTEDMESSAGE._serialized_start=232 + _NESTEDMESSAGE._serialized_end=314 + _EXTENDEDMESSAGE._serialized_start=316 + _EXTENDEDMESSAGE._serialized_end=391 + _FOREIGNMESSAGE._serialized_start=393 + _FOREIGNMESSAGE._serialized_end=438 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py new file mode 100644 index 0000000000..d7f7115609 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/more_messages_pb2.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_messages.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + OutOfOrderFields.RegisterExtension(optional_uint64) + OutOfOrderFields.RegisterExtension(optional_int64) + globals()['class'].RegisterExtension(globals()['continue']) + getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) + globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) + + DESCRIPTOR._options = None + _IS._serialized_start=2669 + _IS._serialized_end=2696 + _OUTOFORDERFIELDS._serialized_start=74 + _OUTOFORDERFIELDS._serialized_end=178 + _CLASS._serialized_start=181 + _CLASS._serialized_end=514 + _CLASS_TRY._serialized_start=448 + _CLASS_TRY._serialized_end=476 + _CLASS_FOR._serialized_start=478 + _CLASS_FOR._serialized_end=506 + _EXTENDCLASS._serialized_start=516 + _EXTENDCLASS._serialized_end=579 + _TESTFULLKEYWORD._serialized_start=581 + _TESTFULLKEYWORD._serialized_end=707 + _LOTSNESTEDMESSAGE._serialized_start=710 + _LOTSNESTEDMESSAGE._serialized_end=2667 + _LOTSNESTEDMESSAGE_B0._serialized_start=731 + _LOTSNESTEDMESSAGE_B0._serialized_end=735 + _LOTSNESTEDMESSAGE_B1._serialized_start=737 + _LOTSNESTEDMESSAGE_B1._serialized_end=741 + _LOTSNESTEDMESSAGE_B2._serialized_start=743 + _LOTSNESTEDMESSAGE_B2._serialized_end=747 + _LOTSNESTEDMESSAGE_B3._serialized_start=749 + _LOTSNESTEDMESSAGE_B3._serialized_end=753 + _LOTSNESTEDMESSAGE_B4._serialized_start=755 + _LOTSNESTEDMESSAGE_B4._serialized_end=759 + _LOTSNESTEDMESSAGE_B5._serialized_start=761 + _LOTSNESTEDMESSAGE_B5._serialized_end=765 + _LOTSNESTEDMESSAGE_B6._serialized_start=767 + _LOTSNESTEDMESSAGE_B6._serialized_end=771 + _LOTSNESTEDMESSAGE_B7._serialized_start=773 + _LOTSNESTEDMESSAGE_B7._serialized_end=777 + _LOTSNESTEDMESSAGE_B8._serialized_start=779 + _LOTSNESTEDMESSAGE_B8._serialized_end=783 + _LOTSNESTEDMESSAGE_B9._serialized_start=785 + _LOTSNESTEDMESSAGE_B9._serialized_end=789 + _LOTSNESTEDMESSAGE_B10._serialized_start=791 + _LOTSNESTEDMESSAGE_B10._serialized_end=796 + _LOTSNESTEDMESSAGE_B11._serialized_start=798 + _LOTSNESTEDMESSAGE_B11._serialized_end=803 + _LOTSNESTEDMESSAGE_B12._serialized_start=805 + _LOTSNESTEDMESSAGE_B12._serialized_end=810 + _LOTSNESTEDMESSAGE_B13._serialized_start=812 + _LOTSNESTEDMESSAGE_B13._serialized_end=817 + _LOTSNESTEDMESSAGE_B14._serialized_start=819 + _LOTSNESTEDMESSAGE_B14._serialized_end=824 + _LOTSNESTEDMESSAGE_B15._serialized_start=826 + _LOTSNESTEDMESSAGE_B15._serialized_end=831 + _LOTSNESTEDMESSAGE_B16._serialized_start=833 + _LOTSNESTEDMESSAGE_B16._serialized_end=838 + _LOTSNESTEDMESSAGE_B17._serialized_start=840 + _LOTSNESTEDMESSAGE_B17._serialized_end=845 + _LOTSNESTEDMESSAGE_B18._serialized_start=847 + _LOTSNESTEDMESSAGE_B18._serialized_end=852 + _LOTSNESTEDMESSAGE_B19._serialized_start=854 + _LOTSNESTEDMESSAGE_B19._serialized_end=859 + _LOTSNESTEDMESSAGE_B20._serialized_start=861 + _LOTSNESTEDMESSAGE_B20._serialized_end=866 + _LOTSNESTEDMESSAGE_B21._serialized_start=868 + _LOTSNESTEDMESSAGE_B21._serialized_end=873 + _LOTSNESTEDMESSAGE_B22._serialized_start=875 + _LOTSNESTEDMESSAGE_B22._serialized_end=880 + _LOTSNESTEDMESSAGE_B23._serialized_start=882 + _LOTSNESTEDMESSAGE_B23._serialized_end=887 + _LOTSNESTEDMESSAGE_B24._serialized_start=889 + _LOTSNESTEDMESSAGE_B24._serialized_end=894 + _LOTSNESTEDMESSAGE_B25._serialized_start=896 + _LOTSNESTEDMESSAGE_B25._serialized_end=901 + _LOTSNESTEDMESSAGE_B26._serialized_start=903 + _LOTSNESTEDMESSAGE_B26._serialized_end=908 + _LOTSNESTEDMESSAGE_B27._serialized_start=910 + _LOTSNESTEDMESSAGE_B27._serialized_end=915 + _LOTSNESTEDMESSAGE_B28._serialized_start=917 + _LOTSNESTEDMESSAGE_B28._serialized_end=922 + _LOTSNESTEDMESSAGE_B29._serialized_start=924 + _LOTSNESTEDMESSAGE_B29._serialized_end=929 + _LOTSNESTEDMESSAGE_B30._serialized_start=931 + _LOTSNESTEDMESSAGE_B30._serialized_end=936 + _LOTSNESTEDMESSAGE_B31._serialized_start=938 + _LOTSNESTEDMESSAGE_B31._serialized_end=943 + _LOTSNESTEDMESSAGE_B32._serialized_start=945 + _LOTSNESTEDMESSAGE_B32._serialized_end=950 + _LOTSNESTEDMESSAGE_B33._serialized_start=952 + _LOTSNESTEDMESSAGE_B33._serialized_end=957 + _LOTSNESTEDMESSAGE_B34._serialized_start=959 + _LOTSNESTEDMESSAGE_B34._serialized_end=964 + _LOTSNESTEDMESSAGE_B35._serialized_start=966 + _LOTSNESTEDMESSAGE_B35._serialized_end=971 + _LOTSNESTEDMESSAGE_B36._serialized_start=973 + _LOTSNESTEDMESSAGE_B36._serialized_end=978 + _LOTSNESTEDMESSAGE_B37._serialized_start=980 + _LOTSNESTEDMESSAGE_B37._serialized_end=985 + _LOTSNESTEDMESSAGE_B38._serialized_start=987 + _LOTSNESTEDMESSAGE_B38._serialized_end=992 + _LOTSNESTEDMESSAGE_B39._serialized_start=994 + _LOTSNESTEDMESSAGE_B39._serialized_end=999 + _LOTSNESTEDMESSAGE_B40._serialized_start=1001 + _LOTSNESTEDMESSAGE_B40._serialized_end=1006 + _LOTSNESTEDMESSAGE_B41._serialized_start=1008 + _LOTSNESTEDMESSAGE_B41._serialized_end=1013 + _LOTSNESTEDMESSAGE_B42._serialized_start=1015 + _LOTSNESTEDMESSAGE_B42._serialized_end=1020 + _LOTSNESTEDMESSAGE_B43._serialized_start=1022 + _LOTSNESTEDMESSAGE_B43._serialized_end=1027 + _LOTSNESTEDMESSAGE_B44._serialized_start=1029 + _LOTSNESTEDMESSAGE_B44._serialized_end=1034 + _LOTSNESTEDMESSAGE_B45._serialized_start=1036 + _LOTSNESTEDMESSAGE_B45._serialized_end=1041 + _LOTSNESTEDMESSAGE_B46._serialized_start=1043 + _LOTSNESTEDMESSAGE_B46._serialized_end=1048 + _LOTSNESTEDMESSAGE_B47._serialized_start=1050 + _LOTSNESTEDMESSAGE_B47._serialized_end=1055 + _LOTSNESTEDMESSAGE_B48._serialized_start=1057 + _LOTSNESTEDMESSAGE_B48._serialized_end=1062 + _LOTSNESTEDMESSAGE_B49._serialized_start=1064 + _LOTSNESTEDMESSAGE_B49._serialized_end=1069 + _LOTSNESTEDMESSAGE_B50._serialized_start=1071 + _LOTSNESTEDMESSAGE_B50._serialized_end=1076 + _LOTSNESTEDMESSAGE_B51._serialized_start=1078 + _LOTSNESTEDMESSAGE_B51._serialized_end=1083 + _LOTSNESTEDMESSAGE_B52._serialized_start=1085 + _LOTSNESTEDMESSAGE_B52._serialized_end=1090 + _LOTSNESTEDMESSAGE_B53._serialized_start=1092 + _LOTSNESTEDMESSAGE_B53._serialized_end=1097 + _LOTSNESTEDMESSAGE_B54._serialized_start=1099 + _LOTSNESTEDMESSAGE_B54._serialized_end=1104 + _LOTSNESTEDMESSAGE_B55._serialized_start=1106 + _LOTSNESTEDMESSAGE_B55._serialized_end=1111 + _LOTSNESTEDMESSAGE_B56._serialized_start=1113 + _LOTSNESTEDMESSAGE_B56._serialized_end=1118 + _LOTSNESTEDMESSAGE_B57._serialized_start=1120 + _LOTSNESTEDMESSAGE_B57._serialized_end=1125 + _LOTSNESTEDMESSAGE_B58._serialized_start=1127 + _LOTSNESTEDMESSAGE_B58._serialized_end=1132 + _LOTSNESTEDMESSAGE_B59._serialized_start=1134 + _LOTSNESTEDMESSAGE_B59._serialized_end=1139 + _LOTSNESTEDMESSAGE_B60._serialized_start=1141 + _LOTSNESTEDMESSAGE_B60._serialized_end=1146 + _LOTSNESTEDMESSAGE_B61._serialized_start=1148 + _LOTSNESTEDMESSAGE_B61._serialized_end=1153 + _LOTSNESTEDMESSAGE_B62._serialized_start=1155 + _LOTSNESTEDMESSAGE_B62._serialized_end=1160 + _LOTSNESTEDMESSAGE_B63._serialized_start=1162 + _LOTSNESTEDMESSAGE_B63._serialized_end=1167 + _LOTSNESTEDMESSAGE_B64._serialized_start=1169 + _LOTSNESTEDMESSAGE_B64._serialized_end=1174 + _LOTSNESTEDMESSAGE_B65._serialized_start=1176 + _LOTSNESTEDMESSAGE_B65._serialized_end=1181 + _LOTSNESTEDMESSAGE_B66._serialized_start=1183 + _LOTSNESTEDMESSAGE_B66._serialized_end=1188 + _LOTSNESTEDMESSAGE_B67._serialized_start=1190 + _LOTSNESTEDMESSAGE_B67._serialized_end=1195 + _LOTSNESTEDMESSAGE_B68._serialized_start=1197 + _LOTSNESTEDMESSAGE_B68._serialized_end=1202 + _LOTSNESTEDMESSAGE_B69._serialized_start=1204 + _LOTSNESTEDMESSAGE_B69._serialized_end=1209 + _LOTSNESTEDMESSAGE_B70._serialized_start=1211 + _LOTSNESTEDMESSAGE_B70._serialized_end=1216 + _LOTSNESTEDMESSAGE_B71._serialized_start=1218 + _LOTSNESTEDMESSAGE_B71._serialized_end=1223 + _LOTSNESTEDMESSAGE_B72._serialized_start=1225 + _LOTSNESTEDMESSAGE_B72._serialized_end=1230 + _LOTSNESTEDMESSAGE_B73._serialized_start=1232 + _LOTSNESTEDMESSAGE_B73._serialized_end=1237 + _LOTSNESTEDMESSAGE_B74._serialized_start=1239 + _LOTSNESTEDMESSAGE_B74._serialized_end=1244 + _LOTSNESTEDMESSAGE_B75._serialized_start=1246 + _LOTSNESTEDMESSAGE_B75._serialized_end=1251 + _LOTSNESTEDMESSAGE_B76._serialized_start=1253 + _LOTSNESTEDMESSAGE_B76._serialized_end=1258 + _LOTSNESTEDMESSAGE_B77._serialized_start=1260 + _LOTSNESTEDMESSAGE_B77._serialized_end=1265 + _LOTSNESTEDMESSAGE_B78._serialized_start=1267 + _LOTSNESTEDMESSAGE_B78._serialized_end=1272 + _LOTSNESTEDMESSAGE_B79._serialized_start=1274 + _LOTSNESTEDMESSAGE_B79._serialized_end=1279 + _LOTSNESTEDMESSAGE_B80._serialized_start=1281 + _LOTSNESTEDMESSAGE_B80._serialized_end=1286 + _LOTSNESTEDMESSAGE_B81._serialized_start=1288 + _LOTSNESTEDMESSAGE_B81._serialized_end=1293 + _LOTSNESTEDMESSAGE_B82._serialized_start=1295 + _LOTSNESTEDMESSAGE_B82._serialized_end=1300 + _LOTSNESTEDMESSAGE_B83._serialized_start=1302 + _LOTSNESTEDMESSAGE_B83._serialized_end=1307 + _LOTSNESTEDMESSAGE_B84._serialized_start=1309 + _LOTSNESTEDMESSAGE_B84._serialized_end=1314 + _LOTSNESTEDMESSAGE_B85._serialized_start=1316 + _LOTSNESTEDMESSAGE_B85._serialized_end=1321 + _LOTSNESTEDMESSAGE_B86._serialized_start=1323 + _LOTSNESTEDMESSAGE_B86._serialized_end=1328 + _LOTSNESTEDMESSAGE_B87._serialized_start=1330 + _LOTSNESTEDMESSAGE_B87._serialized_end=1335 + _LOTSNESTEDMESSAGE_B88._serialized_start=1337 + _LOTSNESTEDMESSAGE_B88._serialized_end=1342 + _LOTSNESTEDMESSAGE_B89._serialized_start=1344 + _LOTSNESTEDMESSAGE_B89._serialized_end=1349 + _LOTSNESTEDMESSAGE_B90._serialized_start=1351 + _LOTSNESTEDMESSAGE_B90._serialized_end=1356 + _LOTSNESTEDMESSAGE_B91._serialized_start=1358 + _LOTSNESTEDMESSAGE_B91._serialized_end=1363 + _LOTSNESTEDMESSAGE_B92._serialized_start=1365 + _LOTSNESTEDMESSAGE_B92._serialized_end=1370 + _LOTSNESTEDMESSAGE_B93._serialized_start=1372 + _LOTSNESTEDMESSAGE_B93._serialized_end=1377 + _LOTSNESTEDMESSAGE_B94._serialized_start=1379 + _LOTSNESTEDMESSAGE_B94._serialized_end=1384 + _LOTSNESTEDMESSAGE_B95._serialized_start=1386 + _LOTSNESTEDMESSAGE_B95._serialized_end=1391 + _LOTSNESTEDMESSAGE_B96._serialized_start=1393 + _LOTSNESTEDMESSAGE_B96._serialized_end=1398 + _LOTSNESTEDMESSAGE_B97._serialized_start=1400 + _LOTSNESTEDMESSAGE_B97._serialized_end=1405 + _LOTSNESTEDMESSAGE_B98._serialized_start=1407 + _LOTSNESTEDMESSAGE_B98._serialized_end=1412 + _LOTSNESTEDMESSAGE_B99._serialized_start=1414 + _LOTSNESTEDMESSAGE_B99._serialized_end=1419 + _LOTSNESTEDMESSAGE_B100._serialized_start=1421 + _LOTSNESTEDMESSAGE_B100._serialized_end=1427 + _LOTSNESTEDMESSAGE_B101._serialized_start=1429 + _LOTSNESTEDMESSAGE_B101._serialized_end=1435 + _LOTSNESTEDMESSAGE_B102._serialized_start=1437 + _LOTSNESTEDMESSAGE_B102._serialized_end=1443 + _LOTSNESTEDMESSAGE_B103._serialized_start=1445 + _LOTSNESTEDMESSAGE_B103._serialized_end=1451 + _LOTSNESTEDMESSAGE_B104._serialized_start=1453 + _LOTSNESTEDMESSAGE_B104._serialized_end=1459 + _LOTSNESTEDMESSAGE_B105._serialized_start=1461 + _LOTSNESTEDMESSAGE_B105._serialized_end=1467 + _LOTSNESTEDMESSAGE_B106._serialized_start=1469 + _LOTSNESTEDMESSAGE_B106._serialized_end=1475 + _LOTSNESTEDMESSAGE_B107._serialized_start=1477 + _LOTSNESTEDMESSAGE_B107._serialized_end=1483 + _LOTSNESTEDMESSAGE_B108._serialized_start=1485 + _LOTSNESTEDMESSAGE_B108._serialized_end=1491 + _LOTSNESTEDMESSAGE_B109._serialized_start=1493 + _LOTSNESTEDMESSAGE_B109._serialized_end=1499 + _LOTSNESTEDMESSAGE_B110._serialized_start=1501 + _LOTSNESTEDMESSAGE_B110._serialized_end=1507 + _LOTSNESTEDMESSAGE_B111._serialized_start=1509 + _LOTSNESTEDMESSAGE_B111._serialized_end=1515 + _LOTSNESTEDMESSAGE_B112._serialized_start=1517 + _LOTSNESTEDMESSAGE_B112._serialized_end=1523 + _LOTSNESTEDMESSAGE_B113._serialized_start=1525 + _LOTSNESTEDMESSAGE_B113._serialized_end=1531 + _LOTSNESTEDMESSAGE_B114._serialized_start=1533 + _LOTSNESTEDMESSAGE_B114._serialized_end=1539 + _LOTSNESTEDMESSAGE_B115._serialized_start=1541 + _LOTSNESTEDMESSAGE_B115._serialized_end=1547 + _LOTSNESTEDMESSAGE_B116._serialized_start=1549 + _LOTSNESTEDMESSAGE_B116._serialized_end=1555 + _LOTSNESTEDMESSAGE_B117._serialized_start=1557 + _LOTSNESTEDMESSAGE_B117._serialized_end=1563 + _LOTSNESTEDMESSAGE_B118._serialized_start=1565 + _LOTSNESTEDMESSAGE_B118._serialized_end=1571 + _LOTSNESTEDMESSAGE_B119._serialized_start=1573 + _LOTSNESTEDMESSAGE_B119._serialized_end=1579 + _LOTSNESTEDMESSAGE_B120._serialized_start=1581 + _LOTSNESTEDMESSAGE_B120._serialized_end=1587 + _LOTSNESTEDMESSAGE_B121._serialized_start=1589 + _LOTSNESTEDMESSAGE_B121._serialized_end=1595 + _LOTSNESTEDMESSAGE_B122._serialized_start=1597 + _LOTSNESTEDMESSAGE_B122._serialized_end=1603 + _LOTSNESTEDMESSAGE_B123._serialized_start=1605 + _LOTSNESTEDMESSAGE_B123._serialized_end=1611 + _LOTSNESTEDMESSAGE_B124._serialized_start=1613 + _LOTSNESTEDMESSAGE_B124._serialized_end=1619 + _LOTSNESTEDMESSAGE_B125._serialized_start=1621 + _LOTSNESTEDMESSAGE_B125._serialized_end=1627 + _LOTSNESTEDMESSAGE_B126._serialized_start=1629 + _LOTSNESTEDMESSAGE_B126._serialized_end=1635 + _LOTSNESTEDMESSAGE_B127._serialized_start=1637 + _LOTSNESTEDMESSAGE_B127._serialized_end=1643 + _LOTSNESTEDMESSAGE_B128._serialized_start=1645 + _LOTSNESTEDMESSAGE_B128._serialized_end=1651 + _LOTSNESTEDMESSAGE_B129._serialized_start=1653 + _LOTSNESTEDMESSAGE_B129._serialized_end=1659 + _LOTSNESTEDMESSAGE_B130._serialized_start=1661 + _LOTSNESTEDMESSAGE_B130._serialized_end=1667 + _LOTSNESTEDMESSAGE_B131._serialized_start=1669 + _LOTSNESTEDMESSAGE_B131._serialized_end=1675 + _LOTSNESTEDMESSAGE_B132._serialized_start=1677 + _LOTSNESTEDMESSAGE_B132._serialized_end=1683 + _LOTSNESTEDMESSAGE_B133._serialized_start=1685 + _LOTSNESTEDMESSAGE_B133._serialized_end=1691 + _LOTSNESTEDMESSAGE_B134._serialized_start=1693 + _LOTSNESTEDMESSAGE_B134._serialized_end=1699 + _LOTSNESTEDMESSAGE_B135._serialized_start=1701 + _LOTSNESTEDMESSAGE_B135._serialized_end=1707 + _LOTSNESTEDMESSAGE_B136._serialized_start=1709 + _LOTSNESTEDMESSAGE_B136._serialized_end=1715 + _LOTSNESTEDMESSAGE_B137._serialized_start=1717 + _LOTSNESTEDMESSAGE_B137._serialized_end=1723 + _LOTSNESTEDMESSAGE_B138._serialized_start=1725 + _LOTSNESTEDMESSAGE_B138._serialized_end=1731 + _LOTSNESTEDMESSAGE_B139._serialized_start=1733 + _LOTSNESTEDMESSAGE_B139._serialized_end=1739 + _LOTSNESTEDMESSAGE_B140._serialized_start=1741 + _LOTSNESTEDMESSAGE_B140._serialized_end=1747 + _LOTSNESTEDMESSAGE_B141._serialized_start=1749 + _LOTSNESTEDMESSAGE_B141._serialized_end=1755 + _LOTSNESTEDMESSAGE_B142._serialized_start=1757 + _LOTSNESTEDMESSAGE_B142._serialized_end=1763 + _LOTSNESTEDMESSAGE_B143._serialized_start=1765 + _LOTSNESTEDMESSAGE_B143._serialized_end=1771 + _LOTSNESTEDMESSAGE_B144._serialized_start=1773 + _LOTSNESTEDMESSAGE_B144._serialized_end=1779 + _LOTSNESTEDMESSAGE_B145._serialized_start=1781 + _LOTSNESTEDMESSAGE_B145._serialized_end=1787 + _LOTSNESTEDMESSAGE_B146._serialized_start=1789 + _LOTSNESTEDMESSAGE_B146._serialized_end=1795 + _LOTSNESTEDMESSAGE_B147._serialized_start=1797 + _LOTSNESTEDMESSAGE_B147._serialized_end=1803 + _LOTSNESTEDMESSAGE_B148._serialized_start=1805 + _LOTSNESTEDMESSAGE_B148._serialized_end=1811 + _LOTSNESTEDMESSAGE_B149._serialized_start=1813 + _LOTSNESTEDMESSAGE_B149._serialized_end=1819 + _LOTSNESTEDMESSAGE_B150._serialized_start=1821 + _LOTSNESTEDMESSAGE_B150._serialized_end=1827 + _LOTSNESTEDMESSAGE_B151._serialized_start=1829 + _LOTSNESTEDMESSAGE_B151._serialized_end=1835 + _LOTSNESTEDMESSAGE_B152._serialized_start=1837 + _LOTSNESTEDMESSAGE_B152._serialized_end=1843 + _LOTSNESTEDMESSAGE_B153._serialized_start=1845 + _LOTSNESTEDMESSAGE_B153._serialized_end=1851 + _LOTSNESTEDMESSAGE_B154._serialized_start=1853 + _LOTSNESTEDMESSAGE_B154._serialized_end=1859 + _LOTSNESTEDMESSAGE_B155._serialized_start=1861 + _LOTSNESTEDMESSAGE_B155._serialized_end=1867 + _LOTSNESTEDMESSAGE_B156._serialized_start=1869 + _LOTSNESTEDMESSAGE_B156._serialized_end=1875 + _LOTSNESTEDMESSAGE_B157._serialized_start=1877 + _LOTSNESTEDMESSAGE_B157._serialized_end=1883 + _LOTSNESTEDMESSAGE_B158._serialized_start=1885 + _LOTSNESTEDMESSAGE_B158._serialized_end=1891 + _LOTSNESTEDMESSAGE_B159._serialized_start=1893 + _LOTSNESTEDMESSAGE_B159._serialized_end=1899 + _LOTSNESTEDMESSAGE_B160._serialized_start=1901 + _LOTSNESTEDMESSAGE_B160._serialized_end=1907 + _LOTSNESTEDMESSAGE_B161._serialized_start=1909 + _LOTSNESTEDMESSAGE_B161._serialized_end=1915 + _LOTSNESTEDMESSAGE_B162._serialized_start=1917 + _LOTSNESTEDMESSAGE_B162._serialized_end=1923 + _LOTSNESTEDMESSAGE_B163._serialized_start=1925 + _LOTSNESTEDMESSAGE_B163._serialized_end=1931 + _LOTSNESTEDMESSAGE_B164._serialized_start=1933 + _LOTSNESTEDMESSAGE_B164._serialized_end=1939 + _LOTSNESTEDMESSAGE_B165._serialized_start=1941 + _LOTSNESTEDMESSAGE_B165._serialized_end=1947 + _LOTSNESTEDMESSAGE_B166._serialized_start=1949 + _LOTSNESTEDMESSAGE_B166._serialized_end=1955 + _LOTSNESTEDMESSAGE_B167._serialized_start=1957 + _LOTSNESTEDMESSAGE_B167._serialized_end=1963 + _LOTSNESTEDMESSAGE_B168._serialized_start=1965 + _LOTSNESTEDMESSAGE_B168._serialized_end=1971 + _LOTSNESTEDMESSAGE_B169._serialized_start=1973 + _LOTSNESTEDMESSAGE_B169._serialized_end=1979 + _LOTSNESTEDMESSAGE_B170._serialized_start=1981 + _LOTSNESTEDMESSAGE_B170._serialized_end=1987 + _LOTSNESTEDMESSAGE_B171._serialized_start=1989 + _LOTSNESTEDMESSAGE_B171._serialized_end=1995 + _LOTSNESTEDMESSAGE_B172._serialized_start=1997 + _LOTSNESTEDMESSAGE_B172._serialized_end=2003 + _LOTSNESTEDMESSAGE_B173._serialized_start=2005 + _LOTSNESTEDMESSAGE_B173._serialized_end=2011 + _LOTSNESTEDMESSAGE_B174._serialized_start=2013 + _LOTSNESTEDMESSAGE_B174._serialized_end=2019 + _LOTSNESTEDMESSAGE_B175._serialized_start=2021 + _LOTSNESTEDMESSAGE_B175._serialized_end=2027 + _LOTSNESTEDMESSAGE_B176._serialized_start=2029 + _LOTSNESTEDMESSAGE_B176._serialized_end=2035 + _LOTSNESTEDMESSAGE_B177._serialized_start=2037 + _LOTSNESTEDMESSAGE_B177._serialized_end=2043 + _LOTSNESTEDMESSAGE_B178._serialized_start=2045 + _LOTSNESTEDMESSAGE_B178._serialized_end=2051 + _LOTSNESTEDMESSAGE_B179._serialized_start=2053 + _LOTSNESTEDMESSAGE_B179._serialized_end=2059 + _LOTSNESTEDMESSAGE_B180._serialized_start=2061 + _LOTSNESTEDMESSAGE_B180._serialized_end=2067 + _LOTSNESTEDMESSAGE_B181._serialized_start=2069 + _LOTSNESTEDMESSAGE_B181._serialized_end=2075 + _LOTSNESTEDMESSAGE_B182._serialized_start=2077 + _LOTSNESTEDMESSAGE_B182._serialized_end=2083 + _LOTSNESTEDMESSAGE_B183._serialized_start=2085 + _LOTSNESTEDMESSAGE_B183._serialized_end=2091 + _LOTSNESTEDMESSAGE_B184._serialized_start=2093 + _LOTSNESTEDMESSAGE_B184._serialized_end=2099 + _LOTSNESTEDMESSAGE_B185._serialized_start=2101 + _LOTSNESTEDMESSAGE_B185._serialized_end=2107 + _LOTSNESTEDMESSAGE_B186._serialized_start=2109 + _LOTSNESTEDMESSAGE_B186._serialized_end=2115 + _LOTSNESTEDMESSAGE_B187._serialized_start=2117 + _LOTSNESTEDMESSAGE_B187._serialized_end=2123 + _LOTSNESTEDMESSAGE_B188._serialized_start=2125 + _LOTSNESTEDMESSAGE_B188._serialized_end=2131 + _LOTSNESTEDMESSAGE_B189._serialized_start=2133 + _LOTSNESTEDMESSAGE_B189._serialized_end=2139 + _LOTSNESTEDMESSAGE_B190._serialized_start=2141 + _LOTSNESTEDMESSAGE_B190._serialized_end=2147 + _LOTSNESTEDMESSAGE_B191._serialized_start=2149 + _LOTSNESTEDMESSAGE_B191._serialized_end=2155 + _LOTSNESTEDMESSAGE_B192._serialized_start=2157 + _LOTSNESTEDMESSAGE_B192._serialized_end=2163 + _LOTSNESTEDMESSAGE_B193._serialized_start=2165 + _LOTSNESTEDMESSAGE_B193._serialized_end=2171 + _LOTSNESTEDMESSAGE_B194._serialized_start=2173 + _LOTSNESTEDMESSAGE_B194._serialized_end=2179 + _LOTSNESTEDMESSAGE_B195._serialized_start=2181 + _LOTSNESTEDMESSAGE_B195._serialized_end=2187 + _LOTSNESTEDMESSAGE_B196._serialized_start=2189 + _LOTSNESTEDMESSAGE_B196._serialized_end=2195 + _LOTSNESTEDMESSAGE_B197._serialized_start=2197 + _LOTSNESTEDMESSAGE_B197._serialized_end=2203 + _LOTSNESTEDMESSAGE_B198._serialized_start=2205 + _LOTSNESTEDMESSAGE_B198._serialized_end=2211 + _LOTSNESTEDMESSAGE_B199._serialized_start=2213 + _LOTSNESTEDMESSAGE_B199._serialized_end=2219 + _LOTSNESTEDMESSAGE_B200._serialized_start=2221 + _LOTSNESTEDMESSAGE_B200._serialized_end=2227 + _LOTSNESTEDMESSAGE_B201._serialized_start=2229 + _LOTSNESTEDMESSAGE_B201._serialized_end=2235 + _LOTSNESTEDMESSAGE_B202._serialized_start=2237 + _LOTSNESTEDMESSAGE_B202._serialized_end=2243 + _LOTSNESTEDMESSAGE_B203._serialized_start=2245 + _LOTSNESTEDMESSAGE_B203._serialized_end=2251 + _LOTSNESTEDMESSAGE_B204._serialized_start=2253 + _LOTSNESTEDMESSAGE_B204._serialized_end=2259 + _LOTSNESTEDMESSAGE_B205._serialized_start=2261 + _LOTSNESTEDMESSAGE_B205._serialized_end=2267 + _LOTSNESTEDMESSAGE_B206._serialized_start=2269 + _LOTSNESTEDMESSAGE_B206._serialized_end=2275 + _LOTSNESTEDMESSAGE_B207._serialized_start=2277 + _LOTSNESTEDMESSAGE_B207._serialized_end=2283 + _LOTSNESTEDMESSAGE_B208._serialized_start=2285 + _LOTSNESTEDMESSAGE_B208._serialized_end=2291 + _LOTSNESTEDMESSAGE_B209._serialized_start=2293 + _LOTSNESTEDMESSAGE_B209._serialized_end=2299 + _LOTSNESTEDMESSAGE_B210._serialized_start=2301 + _LOTSNESTEDMESSAGE_B210._serialized_end=2307 + _LOTSNESTEDMESSAGE_B211._serialized_start=2309 + _LOTSNESTEDMESSAGE_B211._serialized_end=2315 + _LOTSNESTEDMESSAGE_B212._serialized_start=2317 + _LOTSNESTEDMESSAGE_B212._serialized_end=2323 + _LOTSNESTEDMESSAGE_B213._serialized_start=2325 + _LOTSNESTEDMESSAGE_B213._serialized_end=2331 + _LOTSNESTEDMESSAGE_B214._serialized_start=2333 + _LOTSNESTEDMESSAGE_B214._serialized_end=2339 + _LOTSNESTEDMESSAGE_B215._serialized_start=2341 + _LOTSNESTEDMESSAGE_B215._serialized_end=2347 + _LOTSNESTEDMESSAGE_B216._serialized_start=2349 + _LOTSNESTEDMESSAGE_B216._serialized_end=2355 + _LOTSNESTEDMESSAGE_B217._serialized_start=2357 + _LOTSNESTEDMESSAGE_B217._serialized_end=2363 + _LOTSNESTEDMESSAGE_B218._serialized_start=2365 + _LOTSNESTEDMESSAGE_B218._serialized_end=2371 + _LOTSNESTEDMESSAGE_B219._serialized_start=2373 + _LOTSNESTEDMESSAGE_B219._serialized_end=2379 + _LOTSNESTEDMESSAGE_B220._serialized_start=2381 + _LOTSNESTEDMESSAGE_B220._serialized_end=2387 + _LOTSNESTEDMESSAGE_B221._serialized_start=2389 + _LOTSNESTEDMESSAGE_B221._serialized_end=2395 + _LOTSNESTEDMESSAGE_B222._serialized_start=2397 + _LOTSNESTEDMESSAGE_B222._serialized_end=2403 + _LOTSNESTEDMESSAGE_B223._serialized_start=2405 + _LOTSNESTEDMESSAGE_B223._serialized_end=2411 + _LOTSNESTEDMESSAGE_B224._serialized_start=2413 + _LOTSNESTEDMESSAGE_B224._serialized_end=2419 + _LOTSNESTEDMESSAGE_B225._serialized_start=2421 + _LOTSNESTEDMESSAGE_B225._serialized_end=2427 + _LOTSNESTEDMESSAGE_B226._serialized_start=2429 + _LOTSNESTEDMESSAGE_B226._serialized_end=2435 + _LOTSNESTEDMESSAGE_B227._serialized_start=2437 + _LOTSNESTEDMESSAGE_B227._serialized_end=2443 + _LOTSNESTEDMESSAGE_B228._serialized_start=2445 + _LOTSNESTEDMESSAGE_B228._serialized_end=2451 + _LOTSNESTEDMESSAGE_B229._serialized_start=2453 + _LOTSNESTEDMESSAGE_B229._serialized_end=2459 + _LOTSNESTEDMESSAGE_B230._serialized_start=2461 + _LOTSNESTEDMESSAGE_B230._serialized_end=2467 + _LOTSNESTEDMESSAGE_B231._serialized_start=2469 + _LOTSNESTEDMESSAGE_B231._serialized_end=2475 + _LOTSNESTEDMESSAGE_B232._serialized_start=2477 + _LOTSNESTEDMESSAGE_B232._serialized_end=2483 + _LOTSNESTEDMESSAGE_B233._serialized_start=2485 + _LOTSNESTEDMESSAGE_B233._serialized_end=2491 + _LOTSNESTEDMESSAGE_B234._serialized_start=2493 + _LOTSNESTEDMESSAGE_B234._serialized_end=2499 + _LOTSNESTEDMESSAGE_B235._serialized_start=2501 + _LOTSNESTEDMESSAGE_B235._serialized_end=2507 + _LOTSNESTEDMESSAGE_B236._serialized_start=2509 + _LOTSNESTEDMESSAGE_B236._serialized_end=2515 + _LOTSNESTEDMESSAGE_B237._serialized_start=2517 + _LOTSNESTEDMESSAGE_B237._serialized_end=2523 + _LOTSNESTEDMESSAGE_B238._serialized_start=2525 + _LOTSNESTEDMESSAGE_B238._serialized_end=2531 + _LOTSNESTEDMESSAGE_B239._serialized_start=2533 + _LOTSNESTEDMESSAGE_B239._serialized_end=2539 + _LOTSNESTEDMESSAGE_B240._serialized_start=2541 + _LOTSNESTEDMESSAGE_B240._serialized_end=2547 + _LOTSNESTEDMESSAGE_B241._serialized_start=2549 + _LOTSNESTEDMESSAGE_B241._serialized_end=2555 + _LOTSNESTEDMESSAGE_B242._serialized_start=2557 + _LOTSNESTEDMESSAGE_B242._serialized_end=2563 + _LOTSNESTEDMESSAGE_B243._serialized_start=2565 + _LOTSNESTEDMESSAGE_B243._serialized_end=2571 + _LOTSNESTEDMESSAGE_B244._serialized_start=2573 + _LOTSNESTEDMESSAGE_B244._serialized_end=2579 + _LOTSNESTEDMESSAGE_B245._serialized_start=2581 + _LOTSNESTEDMESSAGE_B245._serialized_end=2587 + _LOTSNESTEDMESSAGE_B246._serialized_start=2589 + _LOTSNESTEDMESSAGE_B246._serialized_end=2595 + _LOTSNESTEDMESSAGE_B247._serialized_start=2597 + _LOTSNESTEDMESSAGE_B247._serialized_end=2603 + _LOTSNESTEDMESSAGE_B248._serialized_start=2605 + _LOTSNESTEDMESSAGE_B248._serialized_end=2611 + _LOTSNESTEDMESSAGE_B249._serialized_start=2613 + _LOTSNESTEDMESSAGE_B249._serialized_end=2619 + _LOTSNESTEDMESSAGE_B250._serialized_start=2621 + _LOTSNESTEDMESSAGE_B250._serialized_end=2627 + _LOTSNESTEDMESSAGE_B251._serialized_start=2629 + _LOTSNESTEDMESSAGE_B251._serialized_end=2635 + _LOTSNESTEDMESSAGE_B252._serialized_start=2637 + _LOTSNESTEDMESSAGE_B252._serialized_end=2643 + _LOTSNESTEDMESSAGE_B253._serialized_start=2645 + _LOTSNESTEDMESSAGE_B253._serialized_end=2651 + _LOTSNESTEDMESSAGE_B254._serialized_start=2653 + _LOTSNESTEDMESSAGE_B254._serialized_end=2659 + _LOTSNESTEDMESSAGE_B255._serialized_start=2661 + _LOTSNESTEDMESSAGE_B255._serialized_end=2667 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py new file mode 100644 index 0000000000..d46dee080a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/no_package_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/no_package.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _NOPACKAGEENUM._serialized_start=106 + _NOPACKAGEENUM._serialized_end=169 + _NOPACKAGEMESSAGE._serialized_start=45 + _NOPACKAGEMESSAGE._serialized_end=104 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py new file mode 100644 index 0000000000..2921d5cb6e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/python_message.py @@ -0,0 +1,1539 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. +# +# TODO(robinson): Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +from io import BytesIO +import struct +import sys +import weakref + +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import api_implementation +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import extension_dict +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import text_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' +_ExtensionDict = extension_dict._ExtensionDict + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + + Raises: + RuntimeError: Generated code only work with python cpp extension. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + if isinstance(descriptor, str): + raise RuntimeError('The generated code only work with python cpp ' + 'extension, but it is using pure python runtime.') + + # If a concrete class already exists for this descriptor, don't try to + # create another. Doing so will break any messages that already exist with + # the existing class. + # + # The C++ implementation appears to have its own internal `PyMessageFactory` + # to achieve similar results. + # + # This most commonly happens in `text_format.py` when using descriptors from + # a custom pool; it calls symbol_database.Global().getPrototype() on a + # descriptor which already has an existing concrete class. + new_class = getattr(descriptor, '_concrete_class', None) + if new_class: + return new_class + + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + # If this is an _existing_ class looked up via `_concrete_class` in the + # __new__ method above, then we don't need to re-initialize anything. + existing_class = getattr(descriptor, '_concrete_class', None) + if existing_class: + assert existing_class is cls, ( + 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' + % (descriptor.full_name)) + return + + cls._decoders_by_tag = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(descriptor), None) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO(kenton): Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_unknown_field_set', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name['value'] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_packable = (is_repeated and + wire_format.IsTypePackable(field_descriptor.type)) + is_proto3 = field_descriptor.containing_type.syntax == 'proto3' + if not is_packable: + is_packed = False + elif field_descriptor.containing_type.syntax == 'proto2': + is_packed = (field_descriptor.has_options and + field_descriptor.GetOptions().packed) + else: + has_packed_false = (field_descriptor.has_options and + field_descriptor.GetOptions().HasField('packed') and + field_descriptor.GetOptions().packed == False) + is_packed = not has_packed_false + is_map_entry = _IsMapField(field_descriptor) + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor, + _IsMessageMapField(field_descriptor)) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._encoder = field_encoder + field_descriptor._sizer = sizer + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor) + + def AddDecoder(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + type_checkers.SupportsOpenEnums(field_descriptor)): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + clear_if_default = False + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + elif (is_proto3 and not is_repeated and + field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): + clear_if_default = True + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + elif decode_type == _FieldDescriptor.TYPE_STRING: + field_decoder = decoder.StringDecoder( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + # pylint: disable=protected-access + field_descriptor, field_descriptor._default_constructor, + clear_if_default) + + cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) + + AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], + False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker, + field.message_type) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker, + field.message_type) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # _concrete_class may not yet be initialized. + message_type = field.message_type + def MakeSubMessageDefault(message): + assert getattr(message_type, '_concrete_class', None), ( + 'Uninitialized concrete class found for field %r (message type %r)' + % (field.full_name, message_type.full_name)) + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + raise exc.with_traceback(sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, str): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + # _unknown_field_set is None when empty for efficiency, and will be + # turned into UnknownFieldSet struct if fields are added. + self._unknown_field_set = None # pylint: disable=protected-access + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError('%s() got an unexpected keyword argument "%s"' % + (message_descriptor.name, field_name)) + if field_value is None: + # field=None is the same as no field at all. + continue + if field.label == _FieldDescriptor.LABEL_REPEATED: + copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + copy[key].MergeFrom(field_value[key]) + else: + copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + copy.add(**val) + else: + copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + copy.extend(field_value) + self._fields[field] = copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + copy = field._default_constructor(self) + new_val = field_value + if isinstance(field_value, dict): + new_val = field.message_type._concrete_class(**field_value) + try: + copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +class _FieldProperty(property): + __slots__ = ('DESCRIPTOR',) + + def __init__(self, descriptor, getter, setter, doc): + property.__init__(self, getter, setter, doc=doc) + self.DESCRIPTOR = descriptor + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + is_proto3 = field.containing_type.syntax == 'proto3' + + def getter(self): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + clear_when_set_to_default = is_proto3 and not field.containing_oneof + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + try: + new_value = type_checker.CheckValue(new_value) + except TypeError as e: + raise TypeError( + 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) + if clear_when_set_to_default and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO(robinson): Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) + + # TODO(amauryfa): Migrate all users of these attributes to functions like + # pool.FindExtensionByNumber(descriptor). + if descriptor.file is not None: + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + pool = descriptor.file.pool + cls._extensions_by_number = pool._extensions_by_number[descriptor] + cls._extensions_by_name = pool._extensions_by_name[descriptor] + +def _AddStaticMethods(cls): + # TODO(robinson): This probably needs to be thread-safe(?) + def RegisterExtension(extension_handle): + extension_handle.containing_type = cls.DESCRIPTOR + # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. + # pylint: disable=protected-access + cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) + _AttachFieldHelpers(cls, extension_handle) + cls.RegisterExtension = staticmethod(RegisterExtension) + + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + +_PROTO3_ERROR_TEMPLATE = \ + ('Protocol message %s has no non-repeated submessage field "%s" ' + 'nor marked as optional') +_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + is_proto3 = (message_descriptor.syntax == "proto3") + error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and + not field.containing_oneof): + continue + hassable_fields[field.name] = field + + # Has methods are supported for oneof descriptors. + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError: + raise ValueError(error_msg % (message_descriptor.full_name, field_name)) + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + + # Similar to ClearField(), above. + if extension_handle in self._fields: + del self._fields[extension_handle] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, extension_handle): + extension_dict._VerifyExtensionHandle(self, extension_handle) + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % extension_handle.full_name) + + if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(extension_handle) + return value is not None and value._is_present_in_parent + else: + return extension_handle in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is different from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + # TODO(amauryfa): Don't use the factory of generated messages. + # To make Any work with custom factories, use the message factory of the + # parent message. + # pylint: disable=g-import-not-at-top + from google.protobuf import symbol_database + factory = symbol_database.Default() + + type_url = msg.type_url + + if not type_url: + return None + + # TODO(haberman): For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split('/')[-1] + descriptor = factory.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + message_class = factory.GetPrototype(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return False + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, + # then use it for the comparison. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + size = descriptor.fields_by_name['key']._sizer(self.key) + size += descriptor.fields_by_name['value']._sizer(self.value) + else: + for field_descriptor, field_value in self.ListFields(): + size += field_descriptor._sizer(field_value) + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self, **kwargs): + # Check if the message has all of its required fields set. + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString(**kwargs) + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self, **kwargs): + out = BytesIO() + self._InternalSerialize(out.write, **kwargs) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes, deterministic=None): + if deterministic is None: + deterministic = ( + api_implementation.IsPythonDefaultSerializationDeterministic()) + else: + deterministic = bool(deterministic) + + descriptor = self.DESCRIPTOR + if descriptor.GetOptions().map_entry: + # Fields of map entry should always be serialized. + descriptor.fields_by_name['key']._encoder( + write_bytes, self.key, deterministic) + descriptor.fields_by_name['value']._encoder( + write_bytes, self.value, deterministic) + else: + for field_descriptor, field_value in self.ListFields(): + field_descriptor._encoder(write_bytes, field_value, deterministic) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + serialized = memoryview(serialized) + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + decoders_by_tag = cls._decoders_by_tag + + def InternalParse(self, buffer, pos, end): + """Create a message from serialized bytes. + + Args: + self: Message, instance of the proto message object. + buffer: memoryview of the serialized data. + pos: int, position to start in the serialized data. + end: int, end position of the serialized data. + + Returns: + Message object. + """ + # Guard against internal misuse, since this function is called internally + # quite extensively, and its easy to accidentally pass bytes. + assert isinstance(buffer, memoryview) + self._Modified() + field_dict = self._fields + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) + if field_decoder is None: + if not self._unknown_fields: # pylint: disable=protected-access + self._unknown_fields = [] # pylint: disable=protected-access + if unknown_field_set is None: + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + # pylint: disable=protected-access + unknown_field_set = self._unknown_field_set + # pylint: disable=protected-access + (tag, _) = decoder._DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if field_number == 0: + raise message_mod.DecodeError('Field number 0 is illegal.') + # TODO(jieluo): remove old_pos. + old_pos = new_pos + (data, new_pos) = decoder._DecodeUnknownField( + buffer, new_pos, wire_type) # pylint: disable=protected-access + if new_pos == -1: + return pos + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + # TODO(jieluo): remove _unknown_fields. + new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) + if new_pos == -1: + return pos + self._unknown_fields.append( + (tag_bytes, buffer[old_pos:new_pos].tobytes())) + pos = new_pos + else: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_desc: + self._UpdateOneofState(field_desc) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type.has_options and + field.message_type.GetOptions().map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = '(%s)' % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = '%s[%s].' % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = '%s[%d].' % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + '.' + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _FullyQualifiedClassName(klass): + module = klass.__module__ + name = getattr(klass, '__qualname__', klass.__name__) + if module in (None, 'builtins', '__builtin__'): + return name + return module + '.' + name + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + 'Parameter to MergeFrom() must be instance of same class: ' + 'expected %s got %s.' % (_FullyQualifiedClassName(cls), + _FullyQualifiedClassName(msg.__class__))) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + # pylint: disable=protected-access + if self._unknown_field_set is None: + self._unknown_field_set = containers.UnknownFieldSet() + self._unknown_field_set._extend(msg._unknown_field_set) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + # pylint: disable=protected-access + if self._unknown_field_set is not None: + self._unknown_field_set._clear() + self._unknown_field_set = None + + self._oneofs = {} + self._Modified() + + +def _UnknownFields(self): + if self._unknown_field_set is None: # pylint: disable=protected-access + # pylint: disable=protected-access + self._unknown_field_set = containers.UnknownFieldSet() + return self._unknown_field_set # pylint: disable=protected-access + + +def _DiscardUnknownFields(self): + self._unknown_fields = [] + self._unknown_field_set = None # pylint: disable=protected-access + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + value[key].DiscardUnknownFields() + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for sub_message in value: + sub_message.DiscardUnknownFields() + else: + value.DiscardUnknownFields() + + +def _SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + # Adds methods which do not depend on cls. + cls.Clear = _Clear + cls.UnknownFields = _UnknownFields + cls.DiscardUnknownFields = _DiscardUnknownFields + cls._SetListener = _SetListener + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.qux = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py new file mode 100644 index 0000000000..a53e71fe8e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/type_checkers.py @@ -0,0 +1,435 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + corresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import ctypes +import numbers + +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + + +def TruncateToFourByteFloat(original): + return ctypes.c_float(original).value + + +def ToShortestFloat(original): + """Returns the shortest float that has same value in wire.""" + # All 4 byte floats have between 6 and 9 significant digits, so we + # start with 6 as the lower bound. + # It has to be iterative because use '.9g' directly can not get rid + # of the noises for most values. For example if set a float_field=0.9 + # use '.9g' will print 0.899999976. + precision = 6 + rounded = float('{0:.{1}g}'.format(original, precision)) + while TruncateToFourByteFloat(rounded) != original: + precision += 1 + rounded = float('{0:.{1}g}'.format(original, precision)) + return rounded + + +def SupportsOpenEnums(field_descriptor): + return field_descriptor.containing_type.syntax == 'proto3' + + +def GetTypeChecker(field): + """Returns a type checker for a message field of the specified types. + + Args: + field: FieldDescriptor object for this field. + + Returns: + An instance of TypeChecker which can be used to verify the types + of values assigned to a field of the specified type. + """ + if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and + field.type == _FieldDescriptor.TYPE_STRING): + return UnicodeValueChecker() + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + if SupportsOpenEnums(field): + # When open enums are supported, any int32 can be assigned. + return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] + else: + return EnumValueChecker(field.enum_type) + return _VALUE_CHECKERS[field.cpp_type] + + +# None of the typecheckers below make any attempt to guard against people +# subclassing builtin types and doing weird things. We're not trying to +# protect against malicious clients here, just people accidentally shooting +# themselves in the foot in obvious ways. +class TypeChecker(object): + + """Type checker used to catch type errors as early as possible + when the client is setting scalar fields in protocol messages. + """ + + def __init__(self, *acceptable_types): + self._acceptable_types = acceptable_types + + def CheckValue(self, proposed_value): + """Type check the provided value and return it. + + The returned value might have been normalized to another type. + """ + if not isinstance(proposed_value, self._acceptable_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), self._acceptable_types)) + raise TypeError(message) + return proposed_value + + +class TypeCheckerWithDefault(TypeChecker): + + def __init__(self, default_value, *acceptable_types): + TypeChecker.__init__(self, *acceptable_types) + self._default_value = default_value + + def DefaultValue(self): + return self._default_value + + +class BoolValueChecker(object): + """Type checker used for bool fields.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bool, int))) + raise TypeError(message) + return bool(proposed_value) + + def DefaultValue(self): + return False + + +# IntValueChecker and its subclasses perform integer type-checks +# and bounds-checks. +class IntValueChecker(object): + + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): + if not hasattr(proposed_value, '__index__') or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + + if not self._MIN <= int(proposed_value) <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) + # We force all values to int to make alternate implementations where the + # distinction is more significant (e.g. the C++ implementation) simpler. + proposed_value = int(proposed_value) + return proposed_value + + def DefaultValue(self): + return 0 + + +class EnumValueChecker(object): + + """Checker used for enum fields. Performs type-check and range check.""" + + def __init__(self, enum_type): + self._enum_type = enum_type + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, numbers.Integral): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (int,))) + raise TypeError(message) + if int(proposed_value) not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) + return proposed_value + + def DefaultValue(self): + return self._enum_type.values[0].number + + +class UnicodeValueChecker(object): + + """Checker used for string fields. + + Always returns a unicode value, even if the input is of type str. + """ + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, (bytes, str)): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bytes, str))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is valid UTF-8 data. + if isinstance(proposed_value, bytes): + try: + proposed_value = proposed_value.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' + 'encoding. Non-UTF-8 strings must be converted to ' + 'unicode objects before being added.' % + (proposed_value)) + else: + try: + proposed_value.encode('utf8') + except UnicodeEncodeError: + raise ValueError('%.1024r isn\'t a valid unicode string and ' + 'can\'t be encoded in UTF-8.'% + (proposed_value)) + + return proposed_value + + def DefaultValue(self): + return u"" + + +class Int32ValueChecker(IntValueChecker): + # We're sure to use ints instead of longs here since comparison may be more + # efficient. + _MIN = -2147483648 + _MAX = 2147483647 + + +class Uint32ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 32) - 1 + + +class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 + + +class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 + + +# The max 4 bytes float is about 3.4028234663852886e+38 +_FLOAT_MAX = float.fromhex('0x1.fffffep+127') +_FLOAT_MIN = -_FLOAT_MAX +_INF = float('inf') +_NEG_INF = float('-inf') + + +class DoubleValueChecker(object): + """Checker used for double fields. + + Performs type-check and range check. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + if (not hasattr(proposed_value, '__float__') and + not hasattr(proposed_value, '__index__')) or ( + type(proposed_value).__module__ == 'numpy' and + type(proposed_value).__name__ == 'ndarray'): + message = ('%.1024r has type %s, but expected one of: int, float' % + (proposed_value, type(proposed_value))) + raise TypeError(message) + return float(proposed_value) + + def DefaultValue(self): + return 0.0 + + +class FloatValueChecker(DoubleValueChecker): + """Checker used for float fields. + + Performs type-check and range check. + + Values exceeding a 32-bit float will be converted to inf/-inf. + """ + + def CheckValue(self, proposed_value): + """Check and convert proposed_value to float.""" + converted_value = super().CheckValue(proposed_value) + # This inf rounding matches the C++ proto SafeDoubleToFloat logic. + if converted_value > _FLOAT_MAX: + return _INF + if converted_value < _FLOAT_MIN: + return _NEG_INF + + return TruncateToFourByteFloat(converted_value) + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), + _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), + _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), + _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), +} + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py new file mode 100644 index 0000000000..b581ab750a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/well_known_types.py @@ -0,0 +1,878 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains well known classes. + +This files defines well known classes which need extra maintenance including: + - Any + - Duration + - FieldMask + - Struct + - Timestamp +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import calendar +import collections.abc +import datetime + +from google.protobuf.descriptor import FieldDescriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_NANOS_PER_SECOND = 1000000000 +_NANOS_PER_MILLISECOND = 1000000 +_NANOS_PER_MICROSECOND = 1000 +_MILLIS_PER_SECOND = 1000 +_MICROS_PER_SECOND = 1000000 +_SECONDS_PER_DAY = 24 * 3600 +_DURATION_SECONDS_MAX = 315576000000 + + +class Any(object): + """Class for Any Message type.""" + + __slots__ = () + + def Pack(self, msg, type_url_prefix='type.googleapis.com/', + deterministic=None): + """Packs the specified message into current Any message.""" + if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': + self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + else: + self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) + self.value = msg.SerializeToString(deterministic=deterministic) + + def Unpack(self, msg): + """Unpacks the current Any message into specified message.""" + descriptor = msg.DESCRIPTOR + if not self.Is(descriptor): + return False + msg.ParseFromString(self.value) + return True + + def TypeName(self): + """Returns the protobuf type name of the inner message.""" + # Only last part is to be used: b/25630112 + return self.type_url.split('/')[-1] + + def Is(self, descriptor): + """Checks if this Any represents the given protobuf type.""" + return '/' in self.type_url and self.TypeName() == descriptor.full_name + + +_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) +_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( + 0, tz=datetime.timezone.utc) + + +class Timestamp(object): + """Class for Timestamp message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Timestamp to RFC 3339 date string format. + + Returns: + A string converted from timestamp. The string is always Z-normalized + and uses 3, 6 or 9 fractional digits as required to represent the + exact time. Example of the return format: '1972-01-01T10:00:20.021Z' + """ + nanos = self.nanos % _NANOS_PER_SECOND + total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND + seconds = total_sec % _SECONDS_PER_DAY + days = (total_sec - seconds) // _SECONDS_PER_DAY + dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) + + result = dt.isoformat() + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 'Z' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03dZ' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06dZ' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09dZ' % nanos + + def FromJsonString(self, value): + """Parse a RFC 3339 date string format to Timestamp. + + Args: + value: A date string. Any fractional digits (or none) and any offset are + accepted as long as they fit into nano-seconds precision. + Example of accepted format: '1972-01-01T10:00:20.021-05:00' + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) + timezone_offset = value.find('Z') + if timezone_offset == -1: + timezone_offset = value.find('+') + if timezone_offset == -1: + timezone_offset = value.rfind('-') + if timezone_offset == -1: + raise ValueError( + 'Failed to parse timestamp: missing valid timezone offset.') + time_value = value[0:timezone_offset] + # Parse datetime and nanos. + point_position = time_value.find('.') + if point_position == -1: + second_value = time_value + nano_value = '' + else: + second_value = time_value[:point_position] + nano_value = time_value[point_position + 1:] + if 't' in second_value: + raise ValueError( + 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' + 'lowercase \'t\' is not accepted'.format(second_value)) + date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) + td = date_object - datetime.datetime(1970, 1, 1) + seconds = td.seconds + td.days * _SECONDS_PER_DAY + if len(nano_value) > 9: + raise ValueError( + 'Failed to parse Timestamp: nanos {0} more than ' + '9 fractional digits.'.format(nano_value)) + if nano_value: + nanos = round(float('0.' + nano_value) * 1e9) + else: + nanos = 0 + # Parse timezone offsets. + if value[timezone_offset] == 'Z': + if len(value) != timezone_offset + 1: + raise ValueError('Failed to parse timestamp: invalid trailing' + ' data {0}.'.format(value)) + else: + timezone = value[timezone_offset:] + pos = timezone.find(':') + if pos == -1: + raise ValueError( + 'Invalid timezone offset value: {0}.'.format(timezone)) + if timezone[0] == '+': + seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + else: + seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + # Set seconds and nanos + self.seconds = int(seconds) + self.nanos = int(nanos) + + def GetCurrentTime(self): + """Get the current UTC into Timestamp.""" + self.FromDatetime(datetime.datetime.utcnow()) + + def ToNanoseconds(self): + """Converts Timestamp to nanoseconds since epoch.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts Timestamp to microseconds since epoch.""" + return (self.seconds * _MICROS_PER_SECOND + + self.nanos // _NANOS_PER_MICROSECOND) + + def ToMilliseconds(self): + """Converts Timestamp to milliseconds since epoch.""" + return (self.seconds * _MILLIS_PER_SECOND + + self.nanos // _NANOS_PER_MILLISECOND) + + def ToSeconds(self): + """Converts Timestamp to seconds since epoch.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds since epoch to Timestamp.""" + self.seconds = nanos // _NANOS_PER_SECOND + self.nanos = nanos % _NANOS_PER_SECOND + + def FromMicroseconds(self, micros): + """Converts microseconds since epoch to Timestamp.""" + self.seconds = micros // _MICROS_PER_SECOND + self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND + + def FromMilliseconds(self, millis): + """Converts milliseconds since epoch to Timestamp.""" + self.seconds = millis // _MILLIS_PER_SECOND + self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND + + def FromSeconds(self, seconds): + """Converts seconds since epoch to Timestamp.""" + self.seconds = seconds + self.nanos = 0 + + def ToDatetime(self, tzinfo=None): + """Converts Timestamp to a datetime. + + Args: + tzinfo: A datetime.tzinfo subclass; defaults to None. + + Returns: + If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone + information, i.e. not aware that it's UTC). + + Otherwise, returns a timezone-aware datetime in the input timezone. + """ + delta = datetime.timedelta( + seconds=self.seconds, + microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) + if tzinfo is None: + return _EPOCH_DATETIME_NAIVE + delta + else: + return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta + + def FromDatetime(self, dt): + """Converts datetime to Timestamp. + + Args: + dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. + """ + # Using this guide: http://wiki.python.org/moin/WorkingWithTime + # And this conversion guide: http://docs.python.org/library/time.html + + # Turn the date parameter into a tuple (struct_time) that can then be + # manipulated into a long value of seconds. During the conversion from + # struct_time to long, the source date in UTC, and so it follows that the + # correct transformation is calendar.timegm() + self.seconds = calendar.timegm(dt.utctimetuple()) + self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND + + +class Duration(object): + """Class for Duration message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts Duration to string format. + + Returns: + A string converted from self. The string format will contains + 3, 6, or 9 fractional digits depending on the precision required to + represent the exact Duration value. For example: "1s", "1.010s", + "1.000000100s", "-3.100s" + """ + _CheckDurationValid(self.seconds, self.nanos) + if self.seconds < 0 or self.nanos < 0: + result = '-' + seconds = - self.seconds + int((0 - self.nanos) // 1e9) + nanos = (0 - self.nanos) % 1e9 + else: + result = '' + seconds = self.seconds + int(self.nanos // 1e9) + nanos = self.nanos % 1e9 + result += '%d' % seconds + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 's' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03ds' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06ds' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09ds' % nanos + + def FromJsonString(self, value): + """Converts a string to Duration. + + Args: + value: A string to be converted. The string must end with 's'. Any + fractional digits (or none) are accepted as long as they fit into + precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s + + Raises: + ValueError: On parsing problems. + """ + if not isinstance(value, str): + raise ValueError('Duration JSON value not a string: {!r}'.format(value)) + if len(value) < 1 or value[-1] != 's': + raise ValueError( + 'Duration must end with letter "s": {0}.'.format(value)) + try: + pos = value.find('.') + if pos == -1: + seconds = int(value[:-1]) + nanos = 0 + else: + seconds = int(value[:pos]) + if value[0] == '-': + nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) + else: + nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) + _CheckDurationValid(seconds, nanos) + self.seconds = seconds + self.nanos = nanos + except ValueError as e: + raise ValueError( + 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) + + def ToNanoseconds(self): + """Converts a Duration to nanoseconds.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts a Duration to microseconds.""" + micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) + return self.seconds * _MICROS_PER_SECOND + micros + + def ToMilliseconds(self): + """Converts a Duration to milliseconds.""" + millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) + return self.seconds * _MILLIS_PER_SECOND + millis + + def ToSeconds(self): + """Converts a Duration to seconds.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds to Duration.""" + self._NormalizeDuration(nanos // _NANOS_PER_SECOND, + nanos % _NANOS_PER_SECOND) + + def FromMicroseconds(self, micros): + """Converts microseconds to Duration.""" + self._NormalizeDuration( + micros // _MICROS_PER_SECOND, + (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) + + def FromMilliseconds(self, millis): + """Converts milliseconds to Duration.""" + self._NormalizeDuration( + millis // _MILLIS_PER_SECOND, + (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) + + def FromSeconds(self, seconds): + """Converts seconds to Duration.""" + self.seconds = seconds + self.nanos = 0 + + def ToTimedelta(self): + """Converts Duration to timedelta.""" + return datetime.timedelta( + seconds=self.seconds, microseconds=_RoundTowardZero( + self.nanos, _NANOS_PER_MICROSECOND)) + + def FromTimedelta(self, td): + """Converts timedelta to Duration.""" + self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, + td.microseconds * _NANOS_PER_MICROSECOND) + + def _NormalizeDuration(self, seconds, nanos): + """Set Duration by seconds and nanos.""" + # Force nanos to be negative if the duration is negative. + if seconds < 0 and nanos > 0: + seconds += 1 + nanos -= _NANOS_PER_SECOND + self.seconds = seconds + self.nanos = nanos + + +def _CheckDurationValid(seconds, nanos): + if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: + raise ValueError( + 'Duration is not valid: Seconds {0} must be in range ' + '[-315576000000, 315576000000].'.format(seconds)) + if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: + raise ValueError( + 'Duration is not valid: Nanos {0} must be in range ' + '[-999999999, 999999999].'.format(nanos)) + if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): + raise ValueError( + 'Duration is not valid: Sign mismatch.') + + +def _RoundTowardZero(value, divider): + """Truncates the remainder part after division.""" + # For some languages, the sign of the remainder is implementation + # dependent if any of the operands is negative. Here we enforce + # "rounded toward zero" semantics. For example, for (-5) / 2 an + # implementation may give -3 as the result with the remainder being + # 1. This function ensures we always return -2 (closer to zero). + result = value // divider + remainder = value % divider + if result < 0 and remainder > 0: + return result + 1 + else: + return result + + +class FieldMask(object): + """Class for FieldMask message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + camelcase_paths = [] + for path in self.paths: + camelcase_paths.append(_SnakeCaseToCamelCase(path)) + return ','.join(camelcase_paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + if not isinstance(value, str): + raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) + self.Clear() + if value: + for path in value.split(','): + self.paths.append(_CamelCaseToSnakeCase(path)) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name.get(name) + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +def _SnakeCaseToCamelCase(path_name): + """Converts a path name from snake_case to camelCase.""" + result = [] + after_underscore = False + for c in path_name: + if c.isupper(): + raise ValueError( + 'Fail to print FieldMask to Json string: Path name ' + '{0} must not contain uppercase letters.'.format(path_name)) + if after_underscore: + if c.islower(): + result.append(c.upper()) + after_underscore = False + else: + raise ValueError( + 'Fail to print FieldMask to Json string: The ' + 'character after a "_" must be a lowercase letter ' + 'in path name {0}.'.format(path_name)) + elif c == '_': + after_underscore = True + else: + result += c + + if after_underscore: + raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' + 'in path name {0}.'.format(path_name)) + return ''.join(result) + + +def _CamelCaseToSnakeCase(path_name): + """Converts a field name from camelCase to snake_case.""" + result = [] + for c in path_name: + if c == '_': + raise ValueError('Fail to parse FieldMask: Path name ' + '{0} must not contain "_"s.'.format(path_name)) + if c.isupper(): + result += '_' + result += c.lower() + else: + result += c + return ''.join(result) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + __slots__ = ('_root',) + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + if source.HasField(name): + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + repeated_destination.MergeFrom(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node and prefix: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) + + +def _SetStructValue(struct_value, value): + if value is None: + struct_value.null_value = 0 + elif isinstance(value, bool): + # Note: this check must come before the number check because in Python + # True and False are also considered numbers. + struct_value.bool_value = value + elif isinstance(value, str): + struct_value.string_value = value + elif isinstance(value, (int, float)): + struct_value.number_value = value + elif isinstance(value, (dict, Struct)): + struct_value.struct_value.Clear() + struct_value.struct_value.update(value) + elif isinstance(value, (list, ListValue)): + struct_value.list_value.Clear() + struct_value.list_value.extend(value) + else: + raise ValueError('Unexpected type') + + +def _GetStructValue(struct_value): + which = struct_value.WhichOneof('kind') + if which == 'struct_value': + return struct_value.struct_value + elif which == 'null_value': + return None + elif which == 'number_value': + return struct_value.number_value + elif which == 'string_value': + return struct_value.string_value + elif which == 'bool_value': + return struct_value.bool_value + elif which == 'list_value': + return struct_value.list_value + elif which is None: + raise ValueError('Value not set') + + +class Struct(object): + """Class for Struct message type.""" + + __slots__ = () + + def __getitem__(self, key): + return _GetStructValue(self.fields[key]) + + def __contains__(self, item): + return item in self.fields + + def __setitem__(self, key, value): + _SetStructValue(self.fields[key], value) + + def __delitem__(self, key): + del self.fields[key] + + def __len__(self): + return len(self.fields) + + def __iter__(self): + return iter(self.fields) + + def keys(self): # pylint: disable=invalid-name + return self.fields.keys() + + def values(self): # pylint: disable=invalid-name + return [self[key] for key in self] + + def items(self): # pylint: disable=invalid-name + return [(key, self[key]) for key in self] + + def get_or_create_list(self, key): + """Returns a list for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('list_value'): + # Clear will mark list_value modified which will indeed create a list. + self.fields[key].list_value.Clear() + return self.fields[key].list_value + + def get_or_create_struct(self, key): + """Returns a struct for this key, creating if it didn't exist already.""" + if not self.fields[key].HasField('struct_value'): + # Clear will mark struct_value modified which will indeed create a struct. + self.fields[key].struct_value.Clear() + return self.fields[key].struct_value + + def update(self, dictionary): # pylint: disable=invalid-name + for key, value in dictionary.items(): + _SetStructValue(self.fields[key], value) + +collections.abc.MutableMapping.register(Struct) + + +class ListValue(object): + """Class for ListValue message type.""" + + __slots__ = () + + def __len__(self): + return len(self.values) + + def append(self, value): + _SetStructValue(self.values.add(), value) + + def extend(self, elem_seq): + for value in elem_seq: + self.append(value) + + def __getitem__(self, index): + """Retrieves item by the specified index.""" + return _GetStructValue(self.values.__getitem__(index)) + + def __setitem__(self, index, value): + _SetStructValue(self.values.__getitem__(index), value) + + def __delitem__(self, key): + del self.values[key] + + def items(self): + for i in range(len(self)): + yield self[i] + + def add_struct(self): + """Appends and returns a struct value as the next value in the list.""" + struct_value = self.values.add().struct_value + # Clear will mark struct_value modified which will indeed create a struct. + struct_value.Clear() + return struct_value + + def add_list(self): + """Appends and returns a list value as the next value in the list.""" + list_value = self.values.add().list_value + # Clear will mark list_value modified which will indeed create a list. + list_value.Clear() + return list_value + +collections.abc.MutableSequence.register(ListValue) + + +WKTBASES = { + 'google.protobuf.Any': Any, + 'google.protobuf.Duration': Duration, + 'google.protobuf.FieldMask': FieldMask, + 'google.protobuf.ListValue': ListValue, + 'google.protobuf.Struct': Struct, + 'google.protobuf.Timestamp': Timestamp, +} diff --git a/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py b/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py new file mode 100644 index 0000000000..883f525585 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/internal/wire_format.py @@ -0,0 +1,268 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Constants and static functions to support protocol buffer wire format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +from google.protobuf import descriptor +from google.protobuf import message + + +TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. +TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 + +# These numbers identify the wire type of a protocol buffer value. +# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded +# tag-and-type to store one of these WIRETYPE_* constants. +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_VARINT = 0 +WIRETYPE_FIXED64 = 1 +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 +WIRETYPE_END_GROUP = 4 +WIRETYPE_FIXED32 = 5 +_WIRETYPE_MAX = 5 + + +# Bounds for various integer types. +INT32_MAX = int((1 << 31) - 1) +INT32_MIN = int(-(1 << 31)) +UINT32_MAX = (1 << 32) - 1 + +INT64_MAX = (1 << 63) - 1 +INT64_MIN = -(1 << 63) +UINT64_MAX = (1 << 64) - 1 + +# "struct" format strings that will encode/decode the specified formats. +FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) + + +def ZigZagEncode(value): + """ZigZag Transform: Encodes signed integers so that they can be + effectively used with varint encoding. See wire_format.h for + more details. + """ + if value >= 0: + return value << 1 + return (value << 1) ^ (~0) + + +def ZigZagDecode(value): + """Inverse of ZigZagEncode().""" + if not value & 0x1: + return value >> 1 + return (value >> 1) ^ (~0) + + + +# The *ByteSize() functions below return the number of bytes required to +# serialize "field number + type" information and then serialize the value. + + +def Int32ByteSize(field_number, int32): + return Int64ByteSize(field_number, int32) + + +def Int32ByteSizeNoTag(int32): + return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) + + +def Int64ByteSize(field_number, int64): + # Have to convert to uint before calling UInt64ByteSize(). + return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) + + +def UInt32ByteSize(field_number, uint32): + return UInt64ByteSize(field_number, uint32) + + +def UInt64ByteSize(field_number, uint64): + return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) + + +def SInt32ByteSize(field_number, int32): + return UInt32ByteSize(field_number, ZigZagEncode(int32)) + + +def SInt64ByteSize(field_number, int64): + return UInt64ByteSize(field_number, ZigZagEncode(int64)) + + +def Fixed32ByteSize(field_number, fixed32): + return TagByteSize(field_number) + 4 + + +def Fixed64ByteSize(field_number, fixed64): + return TagByteSize(field_number) + 8 + + +def SFixed32ByteSize(field_number, sfixed32): + return TagByteSize(field_number) + 4 + + +def SFixed64ByteSize(field_number, sfixed64): + return TagByteSize(field_number) + 8 + + +def FloatByteSize(field_number, flt): + return TagByteSize(field_number) + 4 + + +def DoubleByteSize(field_number, double): + return TagByteSize(field_number) + 8 + + +def BoolByteSize(field_number, b): + return TagByteSize(field_number) + 1 + + +def EnumByteSize(field_number, enum): + return UInt32ByteSize(field_number, enum) + + +def StringByteSize(field_number, string): + return BytesByteSize(field_number, string.encode('utf-8')) + + +def BytesByteSize(field_number, b): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(len(b)) + + len(b)) + + +def GroupByteSize(field_number, message): + return (2 * TagByteSize(field_number) # START and END group. + + message.ByteSize()) + + +def MessageByteSize(field_number, message): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(message.ByteSize()) + + message.ByteSize()) + + +def MessageSetItemByteSize(field_number, msg): + # First compute the sizes of the tags. + # There are 2 tags for the beginning and ending of the repeated group, that + # is field number 1, one with field number 2 (type_id) and one with field + # number 3 (message). + total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) + + # Add the number of bytes for type_id. + total_size += _VarUInt64ByteSizeNoTag(field_number) + + message_size = msg.ByteSize() + + # The number of bytes for encoding the length of the message. + total_size += _VarUInt64ByteSizeNoTag(message_size) + + # The size of the message. + total_size += message_size + return total_size + + +def TagByteSize(field_number): + """Returns the bytes required to serialize a tag with this field number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) + + +# Private helper function for the *ByteSize() functions above. + +def _VarUInt64ByteSizeNoTag(uint64): + """Returns the number of bytes required to serialize a single varint + using boundary value comparisons. (unrolled loop optimization -WPierce) + uint64 must be unsigned. + """ + if uint64 <= 0x7f: return 1 + if uint64 <= 0x3fff: return 2 + if uint64 <= 0x1fffff: return 3 + if uint64 <= 0xfffffff: return 4 + if uint64 <= 0x7ffffffff: return 5 + if uint64 <= 0x3ffffffffff: return 6 + if uint64 <= 0x1ffffffffffff: return 7 + if uint64 <= 0xffffffffffffff: return 8 + if uint64 <= 0x7fffffffffffffff: return 9 + if uint64 > UINT64_MAX: + raise message.EncodeError('Value out of range: %d' % uint64) + return 10 + + +NON_PACKABLE_TYPES = ( + descriptor.FieldDescriptor.TYPE_STRING, + descriptor.FieldDescriptor.TYPE_GROUP, + descriptor.FieldDescriptor.TYPE_MESSAGE, + descriptor.FieldDescriptor.TYPE_BYTES +) + + +def IsTypePackable(field_type): + """Return true iff packable = true is valid for fields of this type. + + Args: + field_type: a FieldDescriptor::Type value. + + Returns: + True iff fields of this type are packable. + """ + return field_type not in NON_PACKABLE_TYPES diff --git a/openpype/hosts/nuke/vendor/google/protobuf/json_format.py b/openpype/hosts/nuke/vendor/google/protobuf/json_format.py new file mode 100644 index 0000000000..5024ed89d7 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/json_format.py @@ -0,0 +1,912 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in JSON format. + +Simple usage example: + + # Create a proto object and serialize it to a json format string. + message = my_proto_pb2.MyMessage(foo='bar') + json_string = json_format.MessageToJson(message) + + # Parse a json format string to proto object. + message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + + +import base64 +from collections import OrderedDict +import json +import math +from operator import methodcaller +import re +import sys + +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import symbol_database + + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, + descriptor.FieldDescriptor.CPPTYPE_UINT32, + descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_INFINITY = 'Infinity' +_NEG_INFINITY = '-Infinity' +_NAN = 'NaN' + +_UNPAIRED_SURROGATE_PATTERN = re.compile( + u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: + raise ParseError('Message too deep. Max recursion depth is {0}'.format( + self.max_recursion_depth)) + message_descriptor = message.DESCRIPTOR + full_name = message_descriptor.full_name + if not path: + path = message_descriptor.name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value, message, path) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) + else: + self._ConvertFieldValuePair(value, message, path) + self.recursion_depth -= 1 + + def _ConvertFieldValuePair(self, js, message, path): + """Convert field value pairs into regular message. + + Args: + js: A JSON object to convert the field value pairs. + message: A regular protocol message to record the data. + path: parent path to log parse error info. + + Raises: + ParseError: In case of problems converting. + """ + names = [] + message_descriptor = message.DESCRIPTOR + fields_by_json_name = dict((f.json_name, f) + for f in message_descriptor.fields) + for name in js: + try: + field = fields_by_json_name.get(name, None) + if not field: + field = message_descriptor.fields_by_name.get(name, None) + if not field and _VALID_EXTENSION_NAME.match(name): + if not message_descriptor.is_extendable: + raise ParseError( + 'Message type {0} does not have extensions at {1}'.format( + message_descriptor.full_name, path)) + identifier = name[1:-1] # strip [] brackets + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + # Try looking for extension by the message type name, dropping the + # field name following the final . separator in full_name. + identifier = '.'.join(identifier.split('.')[:-1]) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(identifier) + # pylint: enable=protected-access + if not field: + if self.ignore_unknown_fields: + continue + raise ParseError( + ('Message type "{0}" has no field named "{1}" at "{2}".\n' + ' Available Fields(except extensions): "{3}"').format( + message_descriptor.full_name, name, path, + [f.json_name for f in message_descriptor.fields])) + if name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" fields at "{2}".'.format( + message.DESCRIPTOR.full_name, name, path)) + names.append(name) + value = js[name] + # Check no other oneof field is parsed. + if field.containing_oneof is not None and value is not None: + oneof_name = field.containing_oneof.name + if oneof_name in names: + raise ParseError('Message type "{0}" should not have multiple ' + '"{1}" oneof fields at "{2}".'.format( + message.DESCRIPTOR.full_name, oneof_name, + path)) + names.append(oneof_name) + + if value is None: + if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.message_type.full_name == 'google.protobuf.Value'): + sub_message = getattr(message, field.name) + sub_message.null_value = 0 + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM + and field.enum_type.full_name == 'google.protobuf.NullValue'): + setattr(message, field.name, 0) + else: + message.ClearField(field.name) + continue + + # Parse field value. + if _IsMapEntry(field): + message.ClearField(field.name) + self._ConvertMapFieldValue(value, message, field, + '{0}.{1}'.format(path, name)) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + message.ClearField(field.name) + if not isinstance(value, list): + raise ParseError('repeated field {0} must be in [] which is ' + '{1} at {2}'.format(name, value, path)) + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + # Repeated message field. + for index, item in enumerate(value): + sub_message = getattr(message, field.name).add() + # None is a null_value in Value. + if (item is None and + sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + self.ConvertMessage(item, sub_message, + '{0}.{1}[{2}]'.format(path, name, index)) + else: + # Repeated scalar field. + for index, item in enumerate(value): + if item is None: + raise ParseError('null is not allowed to be used as an element' + ' in a repeated field at {0}.{1}[{2}]'.format( + path, name, index)) + getattr(message, field.name).append( + _ConvertScalarFieldValue( + item, field, '{0}.{1}[{2}]'.format(path, name, index))) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + sub_message = message.Extensions[field] + else: + sub_message = getattr(message, field.name) + sub_message.SetInParent() + self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) + else: + if field.is_extension: + message.Extensions[field] = _ConvertScalarFieldValue( + value, field, '{0}.{1}'.format(path, name)) + else: + setattr( + message, field.name, + _ConvertScalarFieldValue(value, field, + '{0}.{1}'.format(path, name))) + except ParseError as e: + if field and field.containing_oneof is None: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + else: + raise ParseError(str(e)) + except ValueError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + except TypeError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + + def _ConvertAnyMessage(self, value, message, path): + """Convert a JSON representation into Any message.""" + if isinstance(value, dict) and not value: + return + try: + type_url = value['@type'] + except KeyError: + raise ParseError( + '@type is missing when parsing any message at {0}'.format(path)) + + try: + sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) + except TypeError as e: + raise ParseError('{0} at {1}'.format(e, path)) + message_descriptor = sub_message.DESCRIPTOR + full_name = message_descriptor.full_name + if _IsWrapperMessage(message_descriptor): + self._ConvertWrapperMessage(value['value'], sub_message, + '{0}.value'.format(path)) + elif full_name in _WKTJSONMETHODS: + methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, + '{0}.value'.format(path))( + self) + else: + del value['@type'] + self._ConvertFieldValuePair(value, sub_message, path) + value['@type'] = type_url + # Sets Any message + message.value = sub_message.SerializeToString() + message.type_url = type_url + + def _ConvertGenericMessage(self, value, message, path): + """Convert a JSON representation into message with FromJsonString.""" + # Duration, Timestamp, FieldMask have a FromJsonString method to do the + # conversion. Users can also call the method directly. + try: + message.FromJsonString(value) + except ValueError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + def _ConvertValueMessage(self, value, message, path): + """Convert a JSON representation into Value message.""" + if isinstance(value, dict): + self._ConvertStructMessage(value, message.struct_value, path) + elif isinstance(value, list): + self._ConvertListValueMessage(value, message.list_value, path) + elif value is None: + message.null_value = 0 + elif isinstance(value, bool): + message.bool_value = value + elif isinstance(value, str): + message.string_value = value + elif isinstance(value, _INT_OR_FLOAT): + message.number_value = value + else: + raise ParseError('Value {0} has unexpected type {1} at {2}'.format( + value, type(value), path)) + + def _ConvertListValueMessage(self, value, message, path): + """Convert a JSON representation into ListValue message.""" + if not isinstance(value, list): + raise ParseError('ListValue must be in [] which is {0} at {1}'.format( + value, path)) + message.ClearField('values') + for index, item in enumerate(value): + self._ConvertValueMessage(item, message.values.add(), + '{0}[{1}]'.format(path, index)) + + def _ConvertStructMessage(self, value, message, path): + """Convert a JSON representation into Struct message.""" + if not isinstance(value, dict): + raise ParseError('Struct must be in a dict which is {0} at {1}'.format( + value, path)) + # Clear will mark the struct as modified so it will be created even if + # there are no values. + message.Clear() + for key in value: + self._ConvertValueMessage(value[key], message.fields[key], + '{0}.{1}'.format(path, key)) + return + + def _ConvertWrapperMessage(self, value, message, path): + """Convert a JSON representation into Wrapper message.""" + field = message.DESCRIPTOR.fields_by_name['value'] + setattr( + message, 'value', + _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) + + def _ConvertMapFieldValue(self, value, message, field, path): + """Convert map field value for a message map field. + + Args: + value: A JSON object to convert the map field value. + message: A protocol message to record the converted data. + field: The descriptor of the map field to be converted. + path: parent path to log parse error info. + + Raises: + ParseError: In case of convert problems. + """ + if not isinstance(value, dict): + raise ParseError( + 'Map field {0} must be in a dict which is {1} at {2}'.format( + field.name, value, path)) + key_field = field.message_type.fields_by_name['key'] + value_field = field.message_type.fields_by_name['value'] + for key in value: + key_value = _ConvertScalarFieldValue(key, key_field, + '{0}.key'.format(path), True) + if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self.ConvertMessage(value[key], + getattr(message, field.name)[key_value], + '{0}[{1}]'.format(path, key_value)) + else: + getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( + value[key], value_field, path='{0}[{1}]'.format(path, key_value)) + + +def _ConvertScalarFieldValue(value, field, path, require_str=False): + """Convert a single scalar field value. + + Args: + value: A scalar value to convert the scalar field value. + field: The descriptor of the field to convert. + path: parent path to log parse error info. + require_str: If True, the field value must be a str. + + Returns: + The converted scalar field value + + Raises: + ParseError: In case of convert problems. + """ + try: + if field.cpp_type in _INT_TYPES: + return _ConvertInteger(value) + elif field.cpp_type in _FLOAT_TYPES: + return _ConvertFloat(value, field) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return _ConvertBool(value, require_str) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + if isinstance(value, str): + encoded = value.encode('utf-8') + else: + encoded = value + # Add extra padding '=' + padded_value = encoded + b'=' * (4 - len(encoded) % 4) + return base64.urlsafe_b64decode(padded_value) + else: + # Checking for unpaired surrogates appears to be unreliable, + # depending on the specific Python version, so we check manually. + if _UNPAIRED_SURROGATE_PATTERN.search(value): + raise ParseError('Unpaired surrogate') + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + # Convert an enum value. + enum_value = field.enum_type.values_by_name.get(value, None) + if enum_value is None: + try: + number = int(value) + enum_value = field.enum_type.values_by_number.get(number, None) + except ValueError: + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + if enum_value is None: + if field.file.syntax == 'proto3': + # Proto3 accepts unknown enums. + return number + raise ParseError('Invalid enum value {0} for enum type {1}'.format( + value, field.enum_type.full_name)) + return enum_value.number + except ParseError as e: + raise ParseError('{0} at {1}'.format(e, path)) + + +def _ConvertInteger(value): + """Convert an integer. + + Args: + value: A scalar value to convert. + + Returns: + The integer value. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + if isinstance(value, float) and not value.is_integer(): + raise ParseError('Couldn\'t parse integer: {0}'.format(value)) + + if isinstance(value, str) and value.find(' ') != -1: + raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) + + if isinstance(value, bool): + raise ParseError('Bool value {0} is not acceptable for ' + 'integer field'.format(value)) + + return int(value) + + +def _ConvertFloat(value, field): + """Convert an floating point number.""" + if isinstance(value, float): + if math.isnan(value): + raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') + if math.isinf(value): + if value > 0: + raise ParseError('Couldn\'t parse Infinity or value too large, ' + 'use quoted "Infinity" instead') + else: + raise ParseError('Couldn\'t parse -Infinity or value too small, ' + 'use quoted "-Infinity" instead') + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + # pylint: disable=protected-access + if value > type_checkers._FLOAT_MAX: + raise ParseError('Float value too large') + # pylint: disable=protected-access + if value < type_checkers._FLOAT_MIN: + raise ParseError('Float value too small') + if value == 'nan': + raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') + try: + # Assume Python compatible syntax. + return float(value) + except ValueError: + # Check alternative spellings. + if value == _NEG_INFINITY: + return float('-inf') + elif value == _INFINITY: + return float('inf') + elif value == _NAN: + return float('nan') + else: + raise ParseError('Couldn\'t parse float: {0}'.format(value)) + + +def _ConvertBool(value, require_str): + """Convert a boolean value. + + Args: + value: A scalar value to convert. + require_str: If True, value must be a str. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + if require_str: + if value == 'true': + return True + elif value == 'false': + return False + else: + raise ParseError('Expected "true" or "false", not {0}'.format(value)) + + if not isinstance(value, bool): + raise ParseError('Expected true or false without quotes') + return value + +_WKTJSONMETHODS = { + 'google.protobuf.Any': ['_AnyMessageToJsonObject', + '_ConvertAnyMessage'], + 'google.protobuf.Duration': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', + '_ConvertListValueMessage'], + 'google.protobuf.Struct': ['_StructMessageToJsonObject', + '_ConvertStructMessage'], + 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', + '_ConvertGenericMessage'], + 'google.protobuf.Value': ['_ValueMessageToJsonObject', + '_ConvertValueMessage'] +} diff --git a/openpype/hosts/nuke/vendor/google/protobuf/message.py b/openpype/hosts/nuke/vendor/google/protobuf/message.py new file mode 100644 index 0000000000..76c6802f70 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/message.py @@ -0,0 +1,424 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): We should just make these methods all "pure-virtual" and move +# all implementation out, into reflection.py for now. + + +"""Contains an abstract base class for protocol messages.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +class Error(Exception): + """Base error type for this module.""" + pass + + +class DecodeError(Error): + """Exception raised when deserializing messages.""" + pass + + +class EncodeError(Error): + """Exception raised when serializing messages.""" + pass + + +class Message(object): + + """Abstract base class for protocol messages. + + Protocol message classes are almost always generated by the protocol + compiler. These generated types subclass Message and implement the methods + shown below. + """ + + # TODO(robinson): Link to an HTML document here. + + # TODO(robinson): Document that instances of this class will also + # have an Extensions attribute with __getitem__ and __setitem__. + # Again, not sure how to best convey this. + + # TODO(robinson): Document that the class must also have a static + # RegisterExtension(extension_field) method. + # Not sure how to best express at this point. + + # TODO(robinson): Document these fields and methods. + + __slots__ = [] + + #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. + DESCRIPTOR = None + + def __deepcopy__(self, memo=None): + clone = type(self)() + clone.MergeFrom(self) + return clone + + def __eq__(self, other_msg): + """Recursively compares two messages by value and structure.""" + raise NotImplementedError + + def __ne__(self, other_msg): + # Can't just say self != other_msg, since that would infinitely recurse. :) + return not self == other_msg + + def __hash__(self): + raise TypeError('unhashable object') + + def __str__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def __unicode__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def MergeFrom(self, other_msg): + """Merges the contents of the specified message into current message. + + This method merges the contents of the specified message into the current + message. Singular fields that are set in the specified message overwrite + the corresponding fields in the current message. Repeated fields are + appended. Singular sub-messages and groups are recursively merged. + + Args: + other_msg (Message): A message to merge into the current message. + """ + raise NotImplementedError + + def CopyFrom(self, other_msg): + """Copies the content of the specified message into the current message. + + The method clears the current message and then merges the specified + message using MergeFrom. + + Args: + other_msg (Message): A message to copy into the current one. + """ + if self is other_msg: + return + self.Clear() + self.MergeFrom(other_msg) + + def Clear(self): + """Clears all data that was set in the message.""" + raise NotImplementedError + + def SetInParent(self): + """Mark this as present in the parent. + + This normally happens automatically when you assign a field of a + sub-message, but sometimes you want to make the sub-message + present while keeping it empty. If you find yourself using this, + you may want to reconsider your design. + """ + raise NotImplementedError + + def IsInitialized(self): + """Checks if the message is initialized. + + Returns: + bool: The method returns True if the message is initialized (i.e. all of + its required fields are set). + """ + raise NotImplementedError + + # TODO(robinson): MergeFromString() should probably return None and be + # implemented in terms of a helper that returns the # of bytes read. Our + # deserialization routines would use the helper when recursively + # deserializing, but the end user would almost always just want the no-return + # MergeFromString(). + + def MergeFromString(self, serialized): + """Merges serialized protocol buffer data into this message. + + When we find a field in `serialized` that is already present + in this message: + + - If it's a "repeated" field, we append to the end of our list. + - Else, if it's a scalar, we overwrite our field. + - Else, (it's a nonrepeated composite), we recursively merge + into the existing composite. + + Args: + serialized (bytes): Any object that allows us to call + ``memoryview(serialized)`` to access a string of bytes using the + buffer interface. + + Returns: + int: The number of bytes read from `serialized`. + For non-group messages, this will always be `len(serialized)`, + but for messages which are actually groups, this will + generally be less than `len(serialized)`, since we must + stop when we reach an ``END_GROUP`` tag. Note that if + we *do* stop because of an ``END_GROUP`` tag, the number + of bytes returned does not include the bytes + for the ``END_GROUP`` tag information. + + Raises: + DecodeError: if the input cannot be parsed. + """ + # TODO(robinson): Document handling of unknown fields. + # TODO(robinson): When we switch to a helper, this will return None. + raise NotImplementedError + + def ParseFromString(self, serialized): + """Parse serialized protocol buffer data into this message. + + Like :func:`MergeFromString()`, except we clear the object first. + + Raises: + message.DecodeError if the input cannot be parsed. + """ + self.Clear() + return self.MergeFromString(serialized) + + def SerializeToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + A binary string representation of the message if all of the required + fields in the message are set (i.e. the message is initialized). + + Raises: + EncodeError: if the message isn't initialized (see :func:`IsInitialized`). + """ + raise NotImplementedError + + def SerializePartialToString(self, **kwargs): + """Serializes the protocol message to a binary string. + + This method is similar to SerializeToString but doesn't check if the + message is initialized. + + Keyword Args: + deterministic (bool): If true, requests deterministic serialization + of the protobuf, with predictable ordering of map keys. + + Returns: + bytes: A serialized representation of the partial message. + """ + raise NotImplementedError + + # TODO(robinson): Decide whether we like these better + # than auto-generated has_foo() and clear_foo() methods + # on the instances themselves. This way is less consistent + # with C++, but it makes reflection-type access easier and + # reduces the number of magically autogenerated things. + # + # TODO(robinson): Be sure to document (and test) exactly + # which field names are accepted here. Are we case-sensitive? + # What do we do with fields that share names with Python keywords + # like 'lambda' and 'yield'? + # + # nnorwitz says: + # """ + # Typically (in python), an underscore is appended to names that are + # keywords. So they would become lambda_ or yield_. + # """ + def ListFields(self): + """Returns a list of (FieldDescriptor, value) tuples for present fields. + + A message field is non-empty if HasField() would return true. A singular + primitive field is non-empty if HasField() would return true in proto2 or it + is non zero in proto3. A repeated field is non-empty if it contains at least + one element. The fields are ordered by field number. + + Returns: + list[tuple(FieldDescriptor, value)]: field descriptors and values + for all fields in the message which are not empty. The values vary by + field type. + """ + raise NotImplementedError + + def HasField(self, field_name): + """Checks if a certain field is set for the message. + + For a oneof group, checks if any field inside is set. Note that if the + field_name is not defined in the message descriptor, :exc:`ValueError` will + be raised. + + Args: + field_name (str): The name of the field to check for presence. + + Returns: + bool: Whether a value has been set for the named field. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def ClearField(self, field_name): + """Clears the contents of a given field. + + Inside a oneof group, clears the field set. If the name neither refers to a + defined field or oneof group, :exc:`ValueError` is raised. + + Args: + field_name (str): The name of the field to check for presence. + + Raises: + ValueError: if the `field_name` is not a member of this message. + """ + raise NotImplementedError + + def WhichOneof(self, oneof_group): + """Returns the name of the field that is set inside a oneof group. + + If no field is set, returns None. + + Args: + oneof_group (str): the name of the oneof group to check. + + Returns: + str or None: The name of the group that is set, or None. + + Raises: + ValueError: no group with the given name exists + """ + raise NotImplementedError + + def HasExtension(self, extension_handle): + """Checks if a certain extension is present for this message. + + Extensions are retrieved using the :attr:`Extensions` mapping (if present). + + Args: + extension_handle: The handle for the extension to check. + + Returns: + bool: Whether the extension is present for this message. + + Raises: + KeyError: if the extension is repeated. Similar to repeated fields, + there is no separate notion of presence: a "not present" repeated + extension is an empty list. + """ + raise NotImplementedError + + def ClearExtension(self, extension_handle): + """Clears the contents of a given extension. + + Args: + extension_handle: The handle for the extension to clear. + """ + raise NotImplementedError + + def UnknownFields(self): + """Returns the UnknownFieldSet. + + Returns: + UnknownFieldSet: The unknown fields stored in this message. + """ + raise NotImplementedError + + def DiscardUnknownFields(self): + """Clears all fields in the :class:`UnknownFieldSet`. + + This operation is recursive for nested message. + """ + raise NotImplementedError + + def ByteSize(self): + """Returns the serialized size of this message. + + Recursively calls ByteSize() on all contained messages. + + Returns: + int: The number of bytes required to serialize this message. + """ + raise NotImplementedError + + @classmethod + def FromString(cls, s): + raise NotImplementedError + + @staticmethod + def RegisterExtension(extension_handle): + raise NotImplementedError + + def _SetListener(self, message_listener): + """Internal method used by the protocol message implementation. + Clients should not call this directly. + + Sets a listener that this message will call on certain state transitions. + + The purpose of this method is to register back-edges from children to + parents at runtime, for the purpose of setting "has" bits and + byte-size-dirty bits in the parent and ancestor objects whenever a child or + descendant object is modified. + + If the client wants to disconnect this Message from the object tree, she + explicitly sets callback to None. + + If message_listener is None, unregisters any existing listener. Otherwise, + message_listener must implement the MessageListener interface in + internal/message_listener.py, and we discard any listener registered + via a previous _SetListener() call. + """ + raise NotImplementedError + + def __getstate__(self): + """Support the pickle protocol.""" + return dict(serialized=self.SerializePartialToString()) + + def __setstate__(self, state): + """Support the pickle protocol.""" + self.__init__() + serialized = state['serialized'] + # On Python 3, using encoding='latin1' is required for unpickling + # protos pickled by Python 2. + if not isinstance(serialized, bytes): + serialized = serialized.encode('latin1') + self.ParseFromString(serialized) + + def __reduce__(self): + message_descriptor = self.DESCRIPTOR + if message_descriptor.containing_type is None: + return type(self), (), self.__getstate__() + # the message type must be nested. + # Python does not pickle nested classes; use the symbol_database on the + # receiving end. + container = message_descriptor + return (_InternalConstructMessage, (container.full_name,), + self.__getstate__()) + + +def _InternalConstructMessage(full_name): + """Constructs a nested message.""" + from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top + + return symbol_database.Default().GetSymbol(full_name)() diff --git a/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py b/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py new file mode 100644 index 0000000000..3656fa6874 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/message_factory.py @@ -0,0 +1,185 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a factory class for generating dynamic messages. + +The easiest way to use this class is if you have access to the FileDescriptor +protos containing the messages you want to create you can just do the following: + +message_classes = message_factory.GetMessages(iterable_of_file_descriptors) +my_proto_instance = message_classes['some.proto.package.MessageName']() +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message + +if api_implementation.Type() == 'cpp': + from google.protobuf.pyext import cpp_message as message_impl +else: + from google.protobuf.internal import python_message as message_impl + + +# The type of all Message classes. +_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType + + +class MessageFactory(object): + """Factory for creating Proto2 messages from descriptors in a pool.""" + + def __init__(self, pool=None): + """Initializes a new factory.""" + self.pool = pool or descriptor_pool.DescriptorPool() + + # local cache of all classes built from protobuf descriptors + self._classes = {} + + def GetPrototype(self, descriptor): + """Obtains a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + if descriptor not in self._classes: + result_class = self.CreatePrototype(descriptor) + # The assignment to _classes is redundant for the base implementation, but + # might avoid confusion in cases where CreatePrototype gets overridden and + # does not call the base implementation. + self._classes[descriptor] = result_class + return result_class + return self._classes[descriptor] + + def CreatePrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Don't call this function directly, it always creates a new class. Call + GetPrototype() instead. This method is meant to be overridden in subblasses + to perform additional operations on the newly constructed class. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + descriptor_name = descriptor.name + result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( + descriptor_name, + (message.Message,), + { + 'DESCRIPTOR': descriptor, + # If module not set, it wrongly points to message_factory module. + '__module__': None, + }) + result_class._FACTORY = self # pylint: disable=protected-access + # Assign in _classes before doing recursive calls to avoid infinite + # recursion. + self._classes[descriptor] = result_class + for field in descriptor.fields: + if field.message_type: + self.GetPrototype(field.message_type) + for extension in result_class.DESCRIPTOR.extensions: + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result_class + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if the descriptor + pool cannot satisfy them. + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for desc in file_desc.message_types_by_name.values(): + result[desc.full_name] = self.GetPrototype(desc) + + # While the extension FieldDescriptors are created by the descriptor pool, + # the python classes created in the factory need them to be registered + # explicitly, which is done below. + # + # The call to RegisterExtension will specifically check if the + # extension was already registered on the object and either + # ignore the registration if the original was the same, or raise + # an error if they were different. + + for extension in file_desc.extensions_by_name.values(): + if extension.containing_type not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type] + extended_class.RegisterExtension(extension) + return result + + +_FACTORY = MessageFactory() + + +def GetMessages(file_protos): + """Builds a dictionary of all the messages available in a set of files. + + Args: + file_protos: Iterable of FileDescriptorProto to build messages out of. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + # The cpp implementation of the protocol buffer library requires to add the + # message in topological order of the dependency graph. + file_by_name = {file_proto.name: file_proto for file_proto in file_protos} + def _AddFile(file_proto): + for dependency in file_proto.dependency: + if dependency in file_by_name: + # Remove from elements to be visited, in order to cut cycles. + _AddFile(file_by_name.pop(dependency)) + _FACTORY.pool.Add(file_proto) + while file_by_name: + _AddFile(file_by_name.popitem()[1]) + return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py b/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py new file mode 100644 index 0000000000..a4667ce63e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/proto_builder.py @@ -0,0 +1,134 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Dynamic Protobuf class creator.""" + +from collections import OrderedDict +import hashlib +import os + +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor +from google.protobuf import message_factory + + +def _GetMessageFromFactory(factory, full_name): + """Get a proto class from the MessageFactory by name. + + Args: + factory: a MessageFactory instance. + full_name: str, the fully qualified name of the proto type. + Returns: + A class, for the type identified by full_name. + Raises: + KeyError, if the proto is not found in the factory's descriptor pool. + """ + proto_descriptor = factory.pool.FindMessageTypeByName(full_name) + proto_cls = factory.GetPrototype(proto_descriptor) + return proto_cls + + +def MakeSimpleProtoClass(fields, full_name=None, pool=None): + """Create a Protobuf class whose fields are basic types. + + Note: this doesn't validate field names! + + Args: + fields: dict of {name: field_type} mappings for each field in the proto. If + this is an OrderedDict the order will be maintained, otherwise the + fields will be sorted by name. + full_name: optional str, the fully-qualified name of the proto type. + pool: optional DescriptorPool instance. + Returns: + a class, the new protobuf class with a FileDescriptor. + """ + factory = message_factory.MessageFactory(pool=pool) + + if full_name is not None: + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # Get a list of (name, field_type) tuples from the fields dict. If fields was + # an OrderedDict we keep the order, but otherwise we sort the field to ensure + # consistent ordering. + field_items = fields.items() + if not isinstance(fields, OrderedDict): + field_items = sorted(field_items) + + # Use a consistent file name that is unlikely to conflict with any imported + # proto files. + fields_hash = hashlib.sha1() + for f_name, f_type in field_items: + fields_hash.update(f_name.encode('utf-8')) + fields_hash.update(str(f_type).encode('utf-8')) + proto_file_name = fields_hash.hexdigest() + '.proto' + + # If the proto is anonymous, use the same hash to name it. + if full_name is None: + full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + + fields_hash.hexdigest()) + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # This is the first time we see this proto: add a new descriptor to the pool. + factory.pool.Add( + _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) + return _GetMessageFromFactory(factory, full_name) + + +def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): + """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" + package, name = full_name.rsplit('.', 1) + file_proto = descriptor_pb2.FileDescriptorProto() + file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) + file_proto.package = package + desc_proto = file_proto.message_type.add() + desc_proto.name = name + for f_number, (f_name, f_type) in enumerate(field_items, 1): + field_proto = desc_proto.field.add() + field_proto.name = f_name + # # If the number falls in the reserved range, reassign it to the correct + # # number after the range. + if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: + f_number += ( + descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - + descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) + field_proto.number = f_number + field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL + field_proto.type = f_type + return file_proto diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py new file mode 100644 index 0000000000..fc8eb32d79 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/pyext/cpp_message.py @@ -0,0 +1,65 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Protocol message implementation hooks for C++ implementation. + +Contains helper functions used to create protocol message classes from +Descriptor objects at runtime backed by the protocol buffer C++ API. +""" + +__author__ = 'tibell@google.com (Johan Tibell)' + +from google.protobuf.pyext import _message + + +class GeneratedProtocolMessageType(_message.MessageMeta): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = factory.GetPrototype(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py new file mode 100644 index 0000000000..2c6ecf4c98 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/pyext/python_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/pyext/python.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestAllExtensions.RegisterExtension(optional_nested_message_extension) + TestAllExtensions.RegisterExtension(repeated_nested_message_extension) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'H\001' + _TESTALLTYPES._serialized_start=72 + _TESTALLTYPES._serialized_end=388 + _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 + _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 + _FOREIGNMESSAGE._serialized_start=390 + _FOREIGNMESSAGE._serialized_end=428 + _TESTALLEXTENSIONS._serialized_start=430 + _TESTALLEXTENSIONS._serialized_end=459 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/reflection.py b/openpype/hosts/nuke/vendor/google/protobuf/reflection.py new file mode 100644 index 0000000000..81e18859a8 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/reflection.py @@ -0,0 +1,95 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +from google.protobuf import message_factory +from google.protobuf import symbol_database + +# The type of all Message classes. +# Part of the public interface, but normally only used by message factories. +GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE + +MESSAGE_CLASS_CACHE = {} + + +# Deprecated. Please NEVER use reflection.ParseMessage(). +def ParseMessage(descriptor, byte_str): + """Generate a new Message instance from this Descriptor and a byte string. + + DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). + Please use MessageFactory.GetPrototype() instead. + + Args: + descriptor: Protobuf Descriptor object + byte_str: Serialized protocol buffer byte string + + Returns: + Newly created protobuf Message object. + """ + result_class = MakeClass(descriptor) + new_msg = result_class() + new_msg.ParseFromString(byte_str) + return new_msg + + +# Deprecated. Please NEVER use reflection.MakeClass(). +def MakeClass(descriptor): + """Construct a class object for a protobuf described by descriptor. + + DEPRECATED: use MessageFactory.GetPrototype() instead. + + Args: + descriptor: A descriptor.Descriptor object describing the protobuf. + Returns: + The Message class object described by the descriptor. + """ + # Original implementation leads to duplicate message classes, which won't play + # well with extensions. Message factory info is also missing. + # Redirect to message_factory. + return symbol_database.Default().GetPrototype(descriptor) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/service.py b/openpype/hosts/nuke/vendor/google/protobuf/service.py new file mode 100644 index 0000000000..5625246324 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/service.py @@ -0,0 +1,228 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""DEPRECATED: Declares the RPC service interfaces. + +This module declares the abstract interfaces underlying proto2 RPC +services. These are intended to be independent of any particular RPC +implementation, so that proto2 services can be used on top of a variety +of implementations. Starting with version 2.3.0, RPC implementations should +not try to build on these, but should instead provide code generator plugins +which generate code specific to the particular RPC implementation. This way +the generated code can be more appropriate for the implementation in use +and can avoid unnecessary layers of indirection. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class RpcException(Exception): + """Exception raised on failed blocking RPC method call.""" + pass + + +class Service(object): + + """Abstract base interface for protocol-buffer-based RPC services. + + Services themselves are abstract classes (implemented either by servers or as + stubs), but they subclass this base interface. The methods of this + interface can be used to call the methods of the service without knowing + its exact type at compile time (analogous to the Message interface). + """ + + def GetDescriptor(): + """Retrieves this service's descriptor.""" + raise NotImplementedError + + def CallMethod(self, method_descriptor, rpc_controller, + request, done): + """Calls a method of the service specified by method_descriptor. + + If "done" is None then the call is blocking and the response + message will be returned directly. Otherwise the call is asynchronous + and "done" will later be called with the response value. + + In the blocking case, RpcException will be raised on error. + + Preconditions: + + * method_descriptor.service == GetDescriptor + * request is of the exact same classes as returned by + GetRequestClass(method). + * After the call has started, the request must not be modified. + * "rpc_controller" is of the correct type for the RPC implementation being + used by this Service. For stubs, the "correct type" depends on the + RpcChannel which the stub is using. + + Postconditions: + + * "done" will be called when the method is complete. This may be + before CallMethod() returns or it may be at some point in the future. + * If the RPC failed, the response value passed to "done" will be None. + Further details about the failure can be found by querying the + RpcController. + """ + raise NotImplementedError + + def GetRequestClass(self, method_descriptor): + """Returns the class of the request message for the specified method. + + CallMethod() requires that the request is of a particular subclass of + Message. GetRequestClass() gets the default instance of this required + type. + + Example: + method = service.GetDescriptor().FindMethodByName("Foo") + request = stub.GetRequestClass(method)() + request.ParseFromString(input) + service.CallMethod(method, request, callback) + """ + raise NotImplementedError + + def GetResponseClass(self, method_descriptor): + """Returns the class of the response message for the specified method. + + This method isn't really needed, as the RpcChannel's CallMethod constructs + the response protocol message. It's provided anyway in case it is useful + for the caller to know the response type in advance. + """ + raise NotImplementedError + + +class RpcController(object): + + """An RpcController mediates a single method call. + + The primary purpose of the controller is to provide a way to manipulate + settings specific to the RPC implementation and to find out about RPC-level + errors. The methods provided by the RpcController interface are intended + to be a "least common denominator" set of features which we expect all + implementations to support. Specific implementations may provide more + advanced features (e.g. deadline propagation). + """ + + # Client-side methods below + + def Reset(self): + """Resets the RpcController to its initial state. + + After the RpcController has been reset, it may be reused in + a new call. Must not be called while an RPC is in progress. + """ + raise NotImplementedError + + def Failed(self): + """Returns true if the call failed. + + After a call has finished, returns true if the call failed. The possible + reasons for failure depend on the RPC implementation. Failed() must not + be called before a call has finished. If Failed() returns true, the + contents of the response message are undefined. + """ + raise NotImplementedError + + def ErrorText(self): + """If Failed is true, returns a human-readable description of the error.""" + raise NotImplementedError + + def StartCancel(self): + """Initiate cancellation. + + Advises the RPC system that the caller desires that the RPC call be + canceled. The RPC system may cancel it immediately, may wait awhile and + then cancel it, or may not even cancel the call at all. If the call is + canceled, the "done" callback will still be called and the RpcController + will indicate that the call failed at that time. + """ + raise NotImplementedError + + # Server-side methods below + + def SetFailed(self, reason): + """Sets a failure reason. + + Causes Failed() to return true on the client side. "reason" will be + incorporated into the message returned by ErrorText(). If you find + you need to return machine-readable information about failures, you + should incorporate it into your response protocol buffer and should + NOT call SetFailed(). + """ + raise NotImplementedError + + def IsCanceled(self): + """Checks if the client cancelled the RPC. + + If true, indicates that the client canceled the RPC, so the server may + as well give up on replying to it. The server should still call the + final "done" callback. + """ + raise NotImplementedError + + def NotifyOnCancel(self, callback): + """Sets a callback to invoke on cancel. + + Asks that the given callback be called when the RPC is canceled. The + callback will always be called exactly once. If the RPC completes without + being canceled, the callback will be called after completion. If the RPC + has already been canceled when NotifyOnCancel() is called, the callback + will be called immediately. + + NotifyOnCancel() must be called no more than once per request. + """ + raise NotImplementedError + + +class RpcChannel(object): + + """Abstract interface for an RPC channel. + + An RpcChannel represents a communication line to a service which can be used + to call that service's methods. The service may be running on another + machine. Normally, you should not use an RpcChannel directly, but instead + construct a stub {@link Service} wrapping it. Example: + + Example: + RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") + RpcController controller = rpcImpl.Controller() + MyService service = MyService_Stub(channel) + service.MyMethod(controller, request, callback) + """ + + def CallMethod(self, method_descriptor, rpc_controller, + request, response_class, done): + """Calls the method identified by the descriptor. + + Call the given method of the remote service. The signature of this + procedure looks the same as Service.CallMethod(), but the requirements + are less strict in one important way: the request object doesn't have to + be of any specific class as long as its descriptor is method.input_type. + """ + raise NotImplementedError diff --git a/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py b/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py new file mode 100644 index 0000000000..f82ab7145a --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/service_reflection.py @@ -0,0 +1,295 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains metaclasses used to create protocol service and service stub +classes from ServiceDescriptor objects at runtime. + +The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to +inject all useful functionality into the classes output by the protocol +compiler at compile-time. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class GeneratedServiceType(type): + + """Metaclass for service classes created at runtime from ServiceDescriptors. + + Implementations for all methods described in the Service class are added here + by this class. We also create properties to allow getting/setting all fields + in the protocol message. + + The protocol compiler currently uses this metaclass to create protocol service + classes at runtime. Clients can also manually create their own classes at + runtime, as in this example:: + + mydescriptor = ServiceDescriptor(.....) + class MyProtoService(service.Service): + __metaclass__ = GeneratedServiceType + DESCRIPTOR = mydescriptor + myservice_instance = MyProtoService() + # ... + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service class. + + Args: + name: Name of the class (ignored, but required by the metaclass + protocol). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service class is subclassed. + if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] + service_builder = _ServiceBuilder(descriptor) + service_builder.BuildService(cls) + cls.DESCRIPTOR = descriptor + + +class GeneratedServiceStubType(GeneratedServiceType): + + """Metaclass for service stubs created at runtime from ServiceDescriptors. + + This class has similar responsibilities as GeneratedServiceType, except that + it creates the service stub classes. + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service stub class. + + Args: + name: Name of the class (ignored, here). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service stub is subclassed. + if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: + return + + descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] + service_stub_builder = _ServiceStubBuilder(descriptor) + service_stub_builder.BuildServiceStub(cls) + + +class _ServiceBuilder(object): + + """This class constructs a protocol service class using a service descriptor. + + Given a service descriptor, this class constructs a class that represents + the specified service descriptor. One service builder instance constructs + exactly one service class. That means all instances of that class share the + same builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + service class. + """ + self.descriptor = service_descriptor + + def BuildService(builder, cls): + """Constructs the service class. + + Args: + cls: The class that will be constructed. + """ + + # CallMethod needs to operate with an instance of the Service class. This + # internal wrapper function exists only to be able to pass the service + # instance to the method that does the real CallMethod work. + # Making sure to use exact argument names from the abstract interface in + # service.py to match the type signature + def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): + return builder._CallMethod(self, method_descriptor, rpc_controller, + request, done) + + def _WrapGetRequestClass(self, method_descriptor): + return builder._GetRequestClass(method_descriptor) + + def _WrapGetResponseClass(self, method_descriptor): + return builder._GetResponseClass(method_descriptor) + + builder.cls = cls + cls.CallMethod = _WrapCallMethod + cls.GetDescriptor = staticmethod(lambda: builder.descriptor) + cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' + cls.GetRequestClass = _WrapGetRequestClass + cls.GetResponseClass = _WrapGetResponseClass + for method in builder.descriptor.methods: + setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) + + def _CallMethod(self, srvc, method_descriptor, + rpc_controller, request, callback): + """Calls the method described by a given method descriptor. + + Args: + srvc: Instance of the service for which this method is called. + method_descriptor: Descriptor that represent the method to call. + rpc_controller: RPC controller to use for this method's execution. + request: Request protocol message. + callback: A callback to invoke after the method has completed. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'CallMethod() given method descriptor for wrong service type.') + method = getattr(srvc, method_descriptor.name) + return method(rpc_controller, request, callback) + + def _GetRequestClass(self, method_descriptor): + """Returns the class of the request protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + request protocol message class. + + Returns: + A class that represents the input protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetRequestClass() given method descriptor for wrong service type.') + return method_descriptor.input_type._concrete_class + + def _GetResponseClass(self, method_descriptor): + """Returns the class of the response protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + response protocol message class. + + Returns: + A class that represents the output protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetResponseClass() given method descriptor for wrong service type.') + return method_descriptor.output_type._concrete_class + + def _GenerateNonImplementedMethod(self, method): + """Generates and returns a method that can be set for a service methods. + + Args: + method: Descriptor of the service method for which a method is to be + generated. + + Returns: + A method that can be added to the service class. + """ + return lambda inst, rpc_controller, request, callback: ( + self._NonImplementedMethod(method.name, rpc_controller, callback)) + + def _NonImplementedMethod(self, method_name, rpc_controller, callback): + """The body of all methods in the generated service class. + + Args: + method_name: Name of the method being executed. + rpc_controller: RPC controller used to execute this method. + callback: A callback which will be invoked when the method finishes. + """ + rpc_controller.SetFailed('Method %s not implemented.' % method_name) + callback(None) + + +class _ServiceStubBuilder(object): + + """Constructs a protocol service stub class using a service descriptor. + + Given a service descriptor, this class constructs a suitable stub class. + A stub is just a type-safe wrapper around an RpcChannel which emulates a + local implementation of the service. + + One service stub builder instance constructs exactly one class. It means all + instances of that class share the same service stub builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service stub class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + stub class. + """ + self.descriptor = service_descriptor + + def BuildServiceStub(self, cls): + """Constructs the stub class. + + Args: + cls: The class that will be constructed. + """ + + def _ServiceStubInit(stub, rpc_channel): + stub.rpc_channel = rpc_channel + self.cls = cls + cls.__init__ = _ServiceStubInit + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateStubMethod(method)) + + def _GenerateStubMethod(self, method): + return (lambda inst, rpc_controller, request, callback=None: + self._StubMethod(inst, method, rpc_controller, request, callback)) + + def _StubMethod(self, stub, method_descriptor, + rpc_controller, request, callback): + """The body of all service methods in the generated stub class. + + Args: + stub: Stub instance. + method_descriptor: Descriptor of the invoked method. + rpc_controller: Rpc controller to execute the method. + request: Request protocol message. + callback: A callback to execute when the method finishes. + Returns: + Response message (in case of blocking call). + """ + return stub.rpc_channel.CallMethod( + method_descriptor, rpc_controller, request, + method_descriptor.output_type._concrete_class, callback) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py new file mode 100644 index 0000000000..30cca2e06e --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/source_context_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/source_context.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SOURCECONTEXT._serialized_start=57 + _SOURCECONTEXT._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py new file mode 100644 index 0000000000..149728ca08 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/struct_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _STRUCT_FIELDSENTRY._options = None + _STRUCT_FIELDSENTRY._serialized_options = b'8\001' + _NULLVALUE._serialized_start=474 + _NULLVALUE._serialized_end=501 + _STRUCT._serialized_start=50 + _STRUCT._serialized_end=182 + _STRUCT_FIELDSENTRY._serialized_start=113 + _STRUCT_FIELDSENTRY._serialized_end=182 + _VALUE._serialized_start=185 + _VALUE._serialized_end=419 + _LISTVALUE._serialized_start=421 + _LISTVALUE._serialized_end=472 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py b/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py new file mode 100644 index 0000000000..fdcf8cf06c --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/symbol_database.py @@ -0,0 +1,194 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A database of Python protocol buffer generated symbols. + +SymbolDatabase is the MessageFactory for messages generated at compile time, +and makes it easy to create new instances of a registered type, given only the +type's protocol buffer symbol name. + +Example usage:: + + db = symbol_database.SymbolDatabase() + + # Register symbols of interest, from one or multiple files. + db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) + db.RegisterMessage(my_proto_pb2.MyMessage) + db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) + + # The database can be used as a MessageFactory, to generate types based on + # their name: + types = db.GetMessages(['my_proto.proto']) + my_message_instance = types['MyMessage']() + + # The database's underlying descriptor pool can be queried, so it's not + # necessary to know a type's filename to be able to generate it: + filename = db.pool.FindFileContainingSymbol('MyMessage') + my_message_instance = db.GetMessages([filename])['MyMessage']() + + # This functionality is also provided directly via a convenience method: + my_message_instance = db.GetSymbol('MyMessage')() +""" + + +from google.protobuf.internal import api_implementation +from google.protobuf import descriptor_pool +from google.protobuf import message_factory + + +class SymbolDatabase(message_factory.MessageFactory): + """A database of Python generated symbols.""" + + def RegisterMessage(self, message): + """Registers the given message type in the local database. + + Calls to GetSymbol() and GetMessages() will return messages registered here. + + Args: + message: A :class:`google.protobuf.message.Message` subclass (or + instance); its descriptor will be registered. + + Returns: + The provided message. + """ + + desc = message.DESCRIPTOR + self._classes[desc] = message + self.RegisterMessageDescriptor(desc) + return message + + def RegisterMessageDescriptor(self, message_descriptor): + """Registers the given message descriptor in the local database. + + Args: + message_descriptor (Descriptor): the message descriptor to add. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddDescriptor(message_descriptor) + + def RegisterEnumDescriptor(self, enum_descriptor): + """Registers the given enum descriptor in the local database. + + Args: + enum_descriptor (EnumDescriptor): The enum descriptor to register. + + Returns: + EnumDescriptor: The provided descriptor. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddEnumDescriptor(enum_descriptor) + return enum_descriptor + + def RegisterServiceDescriptor(self, service_descriptor): + """Registers the given service descriptor in the local database. + + Args: + service_descriptor (ServiceDescriptor): the service descriptor to + register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._AddServiceDescriptor(service_descriptor) + + def RegisterFileDescriptor(self, file_descriptor): + """Registers the given file descriptor in the local database. + + Args: + file_descriptor (FileDescriptor): The file descriptor to register. + """ + if api_implementation.Type() == 'python': + # pylint: disable=protected-access + self.pool._InternalAddFileDescriptor(file_descriptor) + + def GetSymbol(self, symbol): + """Tries to find a symbol in the local database. + + Currently, this method only returns message.Message instances, however, if + may be extended in future to support other symbol types. + + Args: + symbol (str): a protocol buffer symbol. + + Returns: + A Python class corresponding to the symbol. + + Raises: + KeyError: if the symbol could not be found. + """ + + return self._classes[self.pool.FindMessageTypeByName(symbol)] + + def GetMessages(self, files): + # TODO(amauryfa): Fix the differences with MessageFactory. + """Gets all registered messages from a specified file. + + Only messages already created and registered will be returned; (this is the + case for imported _pb2 modules) + But unlike MessageFactory, this version also returns already defined nested + messages, but does not register any message extensions. + + Args: + files (list[str]): The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. + + Raises: + KeyError: if a file could not be found. + """ + + def _GetAllMessages(desc): + """Walk a message Descriptor and recursively yields all message names.""" + yield desc + for msg_desc in desc.nested_types: + for nested_desc in _GetAllMessages(msg_desc): + yield nested_desc + + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for msg_desc in file_desc.message_types_by_name.values(): + for desc in _GetAllMessages(msg_desc): + try: + result[desc.full_name] = self._classes[desc] + except KeyError: + # This descriptor has no registered class, skip it. + pass + return result + + +_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) + + +def Default(): + """Returns the default SymbolDatabase.""" + return _DEFAULT diff --git a/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py b/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py new file mode 100644 index 0000000000..759cf11f62 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/text_encoding.py @@ -0,0 +1,110 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Encoding related utilities.""" +import re + +_cescape_chr_to_symbol_map = {} +_cescape_chr_to_symbol_map[9] = r'\t' # optional escape +_cescape_chr_to_symbol_map[10] = r'\n' # optional escape +_cescape_chr_to_symbol_map[13] = r'\r' # optional escape +_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape +_cescape_chr_to_symbol_map[39] = r"\'" # optional escape +_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape + +# Lookup table for unicode +_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_unicode_to_str[byte] = string + +# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + + [chr(i) for i in range(32, 127)] + + [r'\%03o' % i for i in range(127, 256)]) +for byte, string in _cescape_chr_to_symbol_map.items(): + _cescape_byte_to_str[byte] = string +del byte, string + + +def CEscape(text, as_utf8): + # type: (...) -> str + """Escape a bytes string for use in an text protocol buffer. + + Args: + text: A byte string to be escaped. + as_utf8: Specifies if result may contain non-ASCII characters. + In Python 3 this allows unescaped non-ASCII Unicode characters. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + Returns: + Escaped string (str). + """ + # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not + # satisfy our needs; they encodes unprintable characters using two-digit hex + # escapes whereas our C++ unescaping function allows hex escapes to be any + # length. So, "\0011".encode('string_escape') ends up being "\\x011", which + # will be decoded in C++ as a single-character string with char code 0x11. + text_is_unicode = isinstance(text, str) + if as_utf8 and text_is_unicode: + # We're already unicode, no processing beyond control char escapes. + return text.translate(_cescape_chr_to_symbol_map) + ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. + if as_utf8: + return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) + return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) + + +_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') + + +def CUnescape(text): + # type: (str) -> bytes + """Unescape a text string with C-style escape sequences to UTF-8 bytes. + + Args: + text: The data to parse in a str. + Returns: + A byte string. + """ + + def ReplaceHex(m): + # Only replace the match if the number of leading back slashes is odd. i.e. + # the slash itself is not escaped. + if len(m.group(1)) & 1: + return m.group(1) + 'x0' + m.group(2) + return m.group(0) + + # This is required because the 'string_escape' encoding doesn't + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + + return (result.encode('utf-8') # Make it bytes to allow decode. + .decode('unicode_escape') + # Make it bytes again to return the proper type. + .encode('raw_unicode_escape')) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/text_format.py b/openpype/hosts/nuke/vendor/google/protobuf/text_format.py new file mode 100644 index 0000000000..412385c26f --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/text_format.py @@ -0,0 +1,1795 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in text format. + +Simple usage example:: + + # Create a proto object and serialize it to a text proto string. + message = my_proto_pb2.MyMessage(foo='bar') + text_proto = text_format.MessageToString(message) + + # Parse a text proto string. + message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +# TODO(b/129989314) Import thread contention leads to test failures. +import encodings.raw_unicode_escape # pylint: disable=unused-import +import encodings.unicode_escape # pylint: disable=unused-import +import io +import math +import re + +from google.protobuf.internal import decoder +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import text_encoding + +# pylint: disable=g-import-not-at-top +__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', + 'PrintFieldValue', 'Merge', 'MessageToBytes'] + +_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), + type_checkers.Int32ValueChecker(), + type_checkers.Uint64ValueChecker(), + type_checkers.Int64ValueChecker()) +_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) +_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) +_QUOTES = frozenset(("'", '"')) +_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' + + +class Error(Exception): + """Top-level module error for text_format.""" + + +class ParseError(Error): + """Thrown in case of text parsing or tokenizing error.""" + + def __init__(self, message=None, line=None, column=None): + if message is not None and line is not None: + loc = str(line) + if column is not None: + loc += ':{0}'.format(column) + message = '{0} : {1}'.format(loc, message) + if message is not None: + super(ParseError, self).__init__(message) + else: + super(ParseError, self).__init__() + self._line = line + self._column = column + + def GetLine(self): + return self._line + + def GetColumn(self): + return self._column + + +class TextWriter(object): + + def __init__(self, as_utf8): + self._writer = io.StringIO() + + def write(self, val): + return self._writer.write(val) + + def close(self): + return self._writer.close() + + def getvalue(self): + return self._writer.getvalue() + + +def MessageToString( + message, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + indent=0, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + # type: (...) -> str + """Convert protobuf message to text format. + + Double values can be formatted compactly with 15 digits of + precision (which is the most that IEEE 754 "double" can guarantee) + using double_format='.15g'. To ensure that converting to text and back to a + proto will result in an identical value, double_format='.17g' should be used. + + Args: + message: The protocol buffers message. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than only ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, fields of a proto message will be printed using + the order defined in source code instead of the field number, extensions + will be printed at the end of the message and their relative order is + determined by the extension number. By default, use the field number + order. + float_format (str): If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest float + that has same value in wire will be printed. Also affect double field + if double_format is not set but float_format is set. + double_format (str): If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, use ``str()`` + use_field_number: If True, print field numbers instead of names. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + indent (int): The initial indent level, in terms of spaces, for pretty + print. + message_formatter (function(message, indent, as_one_line) -> unicode|None): + Custom formatter for selected sub-messages (usually based on message + type). Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if the + field is a proto message. + + Returns: + str: A string of the text formatted protocol buffer message. + """ + out = TextWriter(as_utf8) + printer = _Printer( + out, + indent, + as_utf8, + as_one_line, + use_short_repeated_primitives, + pointy_brackets, + use_index_order, + float_format, + double_format, + use_field_number, + descriptor_pool, + message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + result = out.getvalue() + out.close() + if as_one_line: + return result.rstrip() + return result + + +def MessageToBytes(message, **kwargs): + # type: (...) -> bytes + """Convert protobuf message to encoded text format. See MessageToString.""" + text = MessageToString(message, **kwargs) + if isinstance(text, bytes): + return text + codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' + return text.encode(codec) + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def PrintMessage(message, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + printer = _Printer( + out=out, indent=indent, as_utf8=as_utf8, + as_one_line=as_one_line, + use_short_repeated_primitives=use_short_repeated_primitives, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format, + double_format=double_format, + use_field_number=use_field_number, + descriptor_pool=descriptor_pool, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintMessage(message) + + +def PrintField(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field name/value pair.""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintField(field, value) + + +def PrintFieldValue(field, + value, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Print a single field value (not including name).""" + printer = _Printer(out, indent, as_utf8, as_one_line, + use_short_repeated_primitives, pointy_brackets, + use_index_order, float_format, double_format, + message_formatter=message_formatter, + print_unknown_fields=print_unknown_fields, + force_colon=force_colon) + printer.PrintFieldValue(field, value) + + +def _BuildMessageFromTypeName(type_name, descriptor_pool): + """Returns a protobuf message instance. + + Args: + type_name: Fully-qualified protobuf message type name string. + descriptor_pool: DescriptorPool instance. + + Returns: + A Message instance of type matching type_name, or None if the a Descriptor + wasn't found matching type_name. + """ + # pylint: disable=g-import-not-at-top + if descriptor_pool is None: + from google.protobuf import descriptor_pool as pool_mod + descriptor_pool = pool_mod.Default() + from google.protobuf import symbol_database + database = symbol_database.Default() + try: + message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) + except KeyError: + return None + message_type = database.GetPrototype(message_descriptor) + return message_type() + + +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 + + +class _Printer(object): + """Text format printer for protocol message.""" + + def __init__( + self, + out, + indent=0, + as_utf8=False, + as_one_line=False, + use_short_repeated_primitives=False, + pointy_brackets=False, + use_index_order=False, + float_format=None, + double_format=None, + use_field_number=False, + descriptor_pool=None, + message_formatter=None, + print_unknown_fields=False, + force_colon=False): + """Initialize the Printer. + + Double values can be formatted compactly with 15 digits of precision + (which is the most that IEEE 754 "double" can guarantee) using + double_format='.15g'. To ensure that converting to text and back to a proto + will result in an identical value, double_format='.17g' should be used. + + Args: + out: To record the text format result. + indent: The initial indent level for pretty print. + as_utf8: Return unescaped Unicode for non-ASCII characters. + In Python 3 actual Unicode characters may appear as is in strings. + In Python 2 the return value will be valid UTF-8 rather than ASCII. + as_one_line: Don't introduce newlines between fields. + use_short_repeated_primitives: Use short repeated format for primitives. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, print fields of a proto message using the order + defined in source code instead of the field number. By default, use the + field number order. + float_format: If set, use this to specify float field formatting + (per the "Format Specification Mini-Language"); otherwise, shortest + float that has same value in wire will be printed. Also affect double + field if double_format is not set but float_format is set. + double_format: If set, use this to specify double field formatting + (per the "Format Specification Mini-Language"); if it is not set but + float_format is set, use float_format. Otherwise, str() is used. + use_field_number: If True, print field numbers instead of names. + descriptor_pool: A DescriptorPool used to resolve Any types. + message_formatter: A function(message, indent, as_one_line): unicode|None + to custom format selected sub-messages (usually based on message type). + Use to pretty print parts of the protobuf for easier diffing. + print_unknown_fields: If True, unknown fields will be printed. + force_colon: If set, a colon will be added after the field name even if + the field is a proto message. + """ + self.out = out + self.indent = indent + self.as_utf8 = as_utf8 + self.as_one_line = as_one_line + self.use_short_repeated_primitives = use_short_repeated_primitives + self.pointy_brackets = pointy_brackets + self.use_index_order = use_index_order + self.float_format = float_format + if double_format is not None: + self.double_format = double_format + else: + self.double_format = float_format + self.use_field_number = use_field_number + self.descriptor_pool = descriptor_pool + self.message_formatter = message_formatter + self.print_unknown_fields = print_unknown_fields + self.force_colon = force_colon + + def _TryPrintAsAnyMessage(self, message): + """Serializes if message is a google.protobuf.Any field.""" + if '/' not in message.type_url: + return False + packed_message = _BuildMessageFromTypeName(message.TypeName(), + self.descriptor_pool) + if packed_message: + packed_message.MergeFromString(message.value) + colon = ':' if self.force_colon else '' + self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) + self._PrintMessageFieldValue(packed_message) + self.out.write(' ' if self.as_one_line else '\n') + return True + else: + return False + + def _TryCustomFormatMessage(self, message): + formatted = self.message_formatter(message, self.indent, self.as_one_line) + if formatted is None: + return False + + out = self.out + out.write(' ' * self.indent) + out.write(formatted) + out.write(' ' if self.as_one_line else '\n') + return True + + def PrintMessage(self, message): + """Convert protobuf message to text format. + + Args: + message: The protocol buffers message. + """ + if self.message_formatter and self._TryCustomFormatMessage(message): + return + if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and + self._TryPrintAsAnyMessage(message)): + return + fields = message.ListFields() + if self.use_index_order: + fields.sort( + key=lambda x: x[0].number if x[0].is_extension else x[0].index) + for field, value in fields: + if _IsMapEntry(field): + for key in sorted(value): + # This is slow for maps with submessage entries because it copies the + # entire tree. Unfortunately this would take significant refactoring + # of this file to work around. + # + # TODO(haberman): refactor and optimize if this becomes an issue. + entry_submsg = value.GetEntryClass()(key=key, value=value[key]) + self.PrintField(field, entry_submsg) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if (self.use_short_repeated_primitives + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE + and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): + self._PrintShortRepeatedPrimitivesValue(field, value) + else: + for element in value: + self.PrintField(field, element) + else: + self.PrintField(field, value) + + if self.print_unknown_fields: + self._PrintUnknownFields(message.UnknownFields()) + + def _PrintUnknownFields(self, unknown_fields): + """Print unknown fields.""" + out = self.out + for field in unknown_fields: + out.write(' ' * self.indent) + out.write(str(field.field_number)) + if field.wire_type == WIRETYPE_START_GROUP: + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(field.data) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: + try: + # If this field is parseable as a Message, it is probably + # an embedded message. + # pylint: disable=protected-access + (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( + memoryview(field.data), 0, len(field.data)) + except Exception: # pylint: disable=broad-except + pos = 0 + + if pos == len(field.data): + if self.as_one_line: + out.write(' { ') + else: + out.write(' {\n') + self.indent += 2 + + self._PrintUnknownFields(embedded_unknown_message) + + if self.as_one_line: + out.write('} ') + else: + self.indent -= 2 + out.write(' ' * self.indent + '}\n') + else: + # A string or bytes field. self.as_utf8 may not work. + out.write(': \"') + out.write(text_encoding.CEscape(field.data, False)) + out.write('\" ' if self.as_one_line else '\"\n') + else: + # varint, fixed32, fixed64 + out.write(': ') + out.write(str(field.data)) + out.write(' ' if self.as_one_line else '\n') + + def _PrintFieldName(self, field): + """Print field name.""" + out = self.out + out.write(' ' * self.indent) + if self.use_field_number: + out.write(str(field.number)) + else: + if field.is_extension: + out.write('[') + if (field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): + out.write(field.message_type.full_name) + else: + out.write(field.full_name) + out.write(']') + elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: + # For groups, use the capitalized name. + out.write(field.message_type.name) + else: + out.write(field.name) + + if (self.force_colon or + field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): + # The colon is optional in this case, but our cross-language golden files + # don't include it. Here, the colon is only included if force_colon is + # set to True + out.write(':') + + def PrintField(self, field, value): + """Print a single field name/value pair.""" + self._PrintFieldName(field) + self.out.write(' ') + self.PrintFieldValue(field, value) + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintShortRepeatedPrimitivesValue(self, field, value): + """"Prints short repeated primitives value.""" + # Note: this is called only when value has at least one element. + self._PrintFieldName(field) + self.out.write(' [') + for i in range(len(value) - 1): + self.PrintFieldValue(field, value[i]) + self.out.write(', ') + self.PrintFieldValue(field, value[-1]) + self.out.write(']') + self.out.write(' ' if self.as_one_line else '\n') + + def _PrintMessageFieldValue(self, value): + if self.pointy_brackets: + openb = '<' + closeb = '>' + else: + openb = '{' + closeb = '}' + + if self.as_one_line: + self.out.write('%s ' % openb) + self.PrintMessage(value) + self.out.write(closeb) + else: + self.out.write('%s\n' % openb) + self.indent += 2 + self.PrintMessage(value) + self.indent -= 2 + self.out.write(' ' * self.indent + closeb) + + def PrintFieldValue(self, field, value): + """Print a single field value (not including name). + + For repeated fields, the value should be a single element. + + Args: + field: The descriptor of the field to be printed. + value: The value of the field. + """ + out = self.out + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + self._PrintMessageFieldValue(value) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + out.write(enum_value.name) + else: + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') + if isinstance(value, str) and not self.as_utf8: + out_value = value.encode('utf-8') + else: + out_value = value + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # We always need to escape all binary data in TYPE_BYTES fields. + out_as_utf8 = False + else: + out_as_utf8 = self.as_utf8 + out.write(text_encoding.CEscape(out_value, out_as_utf8)) + out.write('\"') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + if value: + out.write('true') + else: + out.write('false') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: + if self.float_format is not None: + out.write('{1:{0}}'.format(self.float_format, value)) + else: + if math.isnan(value): + out.write(str(value)) + else: + out.write(str(type_checkers.ToShortestFloat(value))) + elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and + self.double_format is not None): + out.write('{1:{0}}'.format(self.double_format, value)) + else: + out.write(str(value)) + + +def Parse(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + NOTE: for historical reasons this function does not clear the input + message. This is different from what the binary msg.ParseFrom(...) does. + If text contains a field already set in message, the value is appended if the + field is repeated. Otherwise, an error is raised. + + Example:: + + a = MyProto() + a.repeated_field.append('test') + b = MyProto() + + # Repeated fields are combined + text_format.Parse(repr(a), b) + text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] + + # Non-repeated fields cannot be overwritten + a.singular_field = 1 + b.singular_field = 2 + text_format.Parse(repr(a), b) # ParseError + + # Binary version: + b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" + + Caller is responsible for clearing the message as needed. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def Merge(text, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + Like Parse(), but allows repeated values for a non-repeated field, and uses + the last one. This means any non-repeated, top-level fields specified in text + replace those in the message. + + Args: + text (str): Message text representation. + message (Message): A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + Message: The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return MergeLines( + text.split(b'\n' if isinstance(text, bytes) else u'\n'), + message, + allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + + +def ParseLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Parse() for caveats. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.ParseLines(lines, message) + + +def MergeLines(lines, + message, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + """Parses a text representation of a protocol message into a message. + + See Merge() for more details. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + allow_field_number: if True, both field number and field name are allowed. + descriptor_pool: A DescriptorPool used to resolve Any types. + allow_unknown_field: if True, skip over unknown field and keep + parsing. Avoid to use this option if possible. It may hide some + errors (e.g. spelling error on field name) + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + parser = _Parser(allow_unknown_extension, + allow_field_number, + descriptor_pool=descriptor_pool, + allow_unknown_field=allow_unknown_field) + return parser.MergeLines(lines, message) + + +class _Parser(object): + """Text format parser for protocol message.""" + + def __init__(self, + allow_unknown_extension=False, + allow_field_number=False, + descriptor_pool=None, + allow_unknown_field=False): + self.allow_unknown_extension = allow_unknown_extension + self.allow_field_number = allow_field_number + self.descriptor_pool = descriptor_pool + self.allow_unknown_field = allow_unknown_field + + def ParseLines(self, lines, message): + """Parses a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = False + self._ParseOrMerge(lines, message) + return message + + def MergeLines(self, lines, message): + """Merges a text representation of a protocol message into a message.""" + self._allow_multiple_scalars = True + self._ParseOrMerge(lines, message) + return message + + def _ParseOrMerge(self, lines, message): + """Converts a text representation of a protocol message into a message. + + Args: + lines: Lines of a message's text representation. + message: A protocol buffer message to merge into. + + Raises: + ParseError: On text parsing problems. + """ + # Tokenize expects native str lines. + str_lines = ( + line if isinstance(line, str) else line.decode('utf-8') + for line in lines) + tokenizer = Tokenizer(str_lines) + while not tokenizer.AtEnd(): + self._MergeField(tokenizer, message) + + def _MergeField(self, tokenizer, message): + """Merges a single protocol message field into a message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + message: A protocol message to record the data. + + Raises: + ParseError: In case of text parsing problems. + """ + message_descriptor = message.DESCRIPTOR + if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and + tokenizer.TryConsume('[')): + type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) + tokenizer.Consume(']') + tokenizer.TryConsume(':') + if tokenizer.TryConsume('<'): + expanded_any_end_token = '>' + else: + tokenizer.Consume('{') + expanded_any_end_token = '}' + expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, + self.descriptor_pool) + if not expanded_any_sub_message: + raise ParseError('Type %s not found in descriptor pool' % + packed_type_name) + while not tokenizer.TryConsume(expanded_any_end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % + (expanded_any_end_token,)) + self._MergeField(tokenizer, expanded_any_sub_message) + deterministic = False + + message.Pack(expanded_any_sub_message, + type_url_prefix=type_url_prefix, + deterministic=deterministic) + return + + if tokenizer.TryConsume('['): + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + name = '.'.join(name) + + if not message_descriptor.is_extendable: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" does not have extensions.' % + message_descriptor.full_name) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(name) + # pylint: enable=protected-access + + + if not field: + if self.allow_unknown_extension: + field = None + else: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" not registered. ' + 'Did you import the _pb2 module which defines it? ' + 'If you are trying to place the extension in the MessageSet ' + 'field of another message that is in an Any or MessageSet field, ' + 'that message\'s _pb2 module must be imported as well' % name) + elif message_descriptor != field.containing_type: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" does not extend message type "%s".' % + (name, message_descriptor.full_name)) + + tokenizer.Consume(']') + + else: + name = tokenizer.ConsumeIdentifierOrNumber() + if self.allow_field_number and name.isdigit(): + number = ParseInteger(name, True, True) + field = message_descriptor.fields_by_number.get(number, None) + if not field and message_descriptor.is_extendable: + field = message.Extensions._FindExtensionByNumber(number) + else: + field = message_descriptor.fields_by_name.get(name, None) + + # Group names are expected to be capitalized as they appear in the + # .proto file, which actually matches their type names, not their field + # names. + if not field: + field = message_descriptor.fields_by_name.get(name.lower(), None) + if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: + field = None + + if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and + field.message_type.name != name): + field = None + + if not field and not self.allow_unknown_field: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" has no field named "%s".' % + (message_descriptor.full_name, name)) + + if field: + if not self._allow_multiple_scalars and field.containing_oneof: + # Check if there's a different field set in this oneof. + # Note that we ignore the case if the same field was set before, and we + # apply _allow_multiple_scalars to non-scalar fields as well. + which_oneof = message.WhichOneof(field.containing_oneof.name) + if which_oneof is not None and which_oneof != field.name: + raise tokenizer.ParseErrorPreviousToken( + 'Field "%s" is specified along with field "%s", another member ' + 'of oneof "%s" for message type "%s".' % + (field.name, which_oneof, field.containing_oneof.name, + message_descriptor.full_name)) + + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + tokenizer.TryConsume(':') + merger = self._MergeMessageField + else: + tokenizer.Consume(':') + merger = self._MergeScalarField + + if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and + tokenizer.TryConsume('[')): + # Short repeated format, e.g. "foo: [1, 2, 3]" + if not tokenizer.TryConsume(']'): + while True: + merger(tokenizer, message, field) + if tokenizer.TryConsume(']'): + break + tokenizer.Consume(',') + + else: + merger(tokenizer, message, field) + + else: # Proto field is unknown. + assert (self.allow_unknown_extension or self.allow_unknown_field) + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + + def _ConsumeAnyTypeUrl(self, tokenizer): + """Consumes a google.protobuf.Any type URL and returns the type name.""" + # Consume "type.googleapis.com/". + prefix = [tokenizer.ConsumeIdentifier()] + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('.') + prefix.append(tokenizer.ConsumeIdentifier()) + tokenizer.Consume('/') + # Consume the fully-qualified type name. + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + return '.'.join(prefix), '.'.join(name) + + def _MergeMessageField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: The message of which field is a member. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + """ + is_map_entry = _IsMapEntry(field) + + if tokenizer.TryConsume('<'): + end_token = '>' + else: + tokenizer.Consume('{') + end_token = '}' + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + sub_message = message.Extensions[field].add() + elif is_map_entry: + sub_message = getattr(message, field.name).GetEntryClass()() + else: + sub_message = getattr(message, field.name).add() + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + sub_message = message.Extensions[field] + else: + # Also apply _allow_multiple_scalars to message field. + # TODO(jieluo): Change to _allow_singular_overwrites. + if (not self._allow_multiple_scalars and + message.HasField(field.name)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + sub_message = getattr(message, field.name) + sub_message.SetInParent() + + while not tokenizer.TryConsume(end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) + self._MergeField(tokenizer, sub_message) + + if is_map_entry: + value_cpptype = field.message_type.fields_by_name['value'].cpp_type + if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + value = getattr(message, field.name)[sub_message.key] + value.CopyFrom(sub_message.value) + else: + getattr(message, field.name)[sub_message.key] = sub_message.value + + @staticmethod + def _IsProto3Syntax(message): + message_descriptor = message.DESCRIPTOR + return (hasattr(message_descriptor, 'syntax') and + message_descriptor.syntax == 'proto3') + + def _MergeScalarField(self, tokenizer, message, field): + """Merges a single scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: A protocol message to record the data. + field: The descriptor of the field to be merged. + + Raises: + ParseError: In case of text parsing problems. + RuntimeError: On runtime errors. + """ + _ = self.allow_unknown_extension + value = None + + if field.type in (descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_SFIXED32): + value = _ConsumeInt32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_SINT64, + descriptor.FieldDescriptor.TYPE_SFIXED64): + value = _ConsumeInt64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_FIXED32): + value = _ConsumeUint32(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_FIXED64): + value = _ConsumeUint64(tokenizer) + elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, + descriptor.FieldDescriptor.TYPE_DOUBLE): + value = tokenizer.ConsumeFloat() + elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: + value = tokenizer.ConsumeBool() + elif field.type == descriptor.FieldDescriptor.TYPE_STRING: + value = tokenizer.ConsumeString() + elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: + value = tokenizer.ConsumeByteString() + elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: + value = tokenizer.ConsumeEnum(field) + else: + raise RuntimeError('Unknown field type %d' % field.type) + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + message.Extensions[field].append(value) + else: + getattr(message, field.name).append(value) + else: + if field.is_extension: + if (not self._allow_multiple_scalars and + not self._IsProto3Syntax(message) and + message.HasExtension(field)): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + else: + message.Extensions[field] = value + else: + duplicate_error = False + if not self._allow_multiple_scalars: + if self._IsProto3Syntax(message): + # Proto3 doesn't represent presence so we try best effort to check + # multiple scalars by compare to default values. + duplicate_error = bool(getattr(message, field.name)) + else: + duplicate_error = message.HasField(field.name) + + if duplicate_error: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + else: + setattr(message, field.name, value) + + +def _SkipFieldContents(tokenizer): + """Skips over contents (value or message) of a field. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + # Try to guess the type of this field. + # If this field is not a message, there should be a ":" between the + # field name and the field value and also the field value should not + # start with "{" or "<" which indicates the beginning of a message body. + # If there is no ":" or there is a "{" or "<" after ":", this field has + # to be a message or the input is ill-formed. + if tokenizer.TryConsume(':') and not tokenizer.LookingAt( + '{') and not tokenizer.LookingAt('<'): + _SkipFieldValue(tokenizer) + else: + _SkipFieldMessage(tokenizer) + + +def _SkipField(tokenizer): + """Skips over a complete field (name and value/message). + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + if tokenizer.TryConsume('['): + # Consume extension name. + tokenizer.ConsumeIdentifier() + while tokenizer.TryConsume('.'): + tokenizer.ConsumeIdentifier() + tokenizer.Consume(']') + else: + tokenizer.ConsumeIdentifierOrNumber() + + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldMessage(tokenizer): + """Skips over a field message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + + if tokenizer.TryConsume('<'): + delimiter = '>' + else: + tokenizer.Consume('{') + delimiter = '}' + + while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): + _SkipField(tokenizer) + + tokenizer.Consume(delimiter) + + +def _SkipFieldValue(tokenizer): + """Skips over a field value. + + Args: + tokenizer: A tokenizer to parse the field name and values. + + Raises: + ParseError: In case an invalid field value is found. + """ + # String/bytes tokens can come in multiple adjacent string literals. + # If we can consume one, consume as many as we can. + if tokenizer.TryConsumeByteString(): + while tokenizer.TryConsumeByteString(): + pass + return + + if (not tokenizer.TryConsumeIdentifier() and + not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and + not tokenizer.TryConsumeFloat()): + raise ParseError('Invalid field value: ' + tokenizer.token) + + +class Tokenizer(object): + """Protocol buffer text representation tokenizer. + + This class handles the lower level string parsing by splitting it into + meaningful tokens. + + It was directly ported from the Java protocol buffer API. + """ + + _WHITESPACE = re.compile(r'\s+') + _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) + _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) + _TOKEN = re.compile('|'.join([ + r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier + r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number + ] + [ # quoted str for each quote mark + # Avoid backtracking! https://stackoverflow.com/a/844267 + r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) + for mark in _QUOTES + ])) + + _IDENTIFIER = re.compile(r'[^\d\W]\w*') + _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') + + def __init__(self, lines, skip_comments=True): + self._position = 0 + self._line = -1 + self._column = 0 + self._token_start = None + self.token = '' + self._lines = iter(lines) + self._current_line = '' + self._previous_line = 0 + self._previous_column = 0 + self._more_lines = True + self._skip_comments = skip_comments + self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT + or self._WHITESPACE) + self._SkipWhitespace() + self.NextToken() + + def LookingAt(self, token): + return self.token == token + + def AtEnd(self): + """Checks the end of the text was reached. + + Returns: + True iff the end was reached. + """ + return not self.token + + def _PopLine(self): + while len(self._current_line) <= self._column: + try: + self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False + return + else: + self._line += 1 + self._column = 0 + + def _SkipWhitespace(self): + while True: + self._PopLine() + match = self._whitespace_pattern.match(self._current_line, self._column) + if not match: + break + length = len(match.group(0)) + self._column += length + + def TryConsume(self, token): + """Tries to consume a given piece of text. + + Args: + token: Text to consume. + + Returns: + True iff the text was consumed. + """ + if self.token == token: + self.NextToken() + return True + return False + + def Consume(self, token): + """Consumes a piece of text. + + Args: + token: Text to consume. + + Raises: + ParseError: If the text couldn't be consumed. + """ + if not self.TryConsume(token): + raise self.ParseError('Expected "%s".' % token) + + def ConsumeComment(self): + result = self.token + if not self._COMMENT.match(result): + raise self.ParseError('Expected comment.') + self.NextToken() + return result + + def ConsumeCommentOrTrailingComment(self): + """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" + + # Tokenizer initializes _previous_line and _previous_column to 0. As the + # tokenizer starts, it looks like there is a previous token on the line. + just_started = self._line == 0 and self._column == 0 + + before_parsing = self._previous_line + comment = self.ConsumeComment() + + # A trailing comment is a comment on the same line than the previous token. + trailing = (self._previous_line == before_parsing + and not just_started) + + return trailing, comment + + def TryConsumeIdentifier(self): + try: + self.ConsumeIdentifier() + return True + except ParseError: + return False + + def ConsumeIdentifier(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER.match(result): + raise self.ParseError('Expected identifier.') + self.NextToken() + return result + + def TryConsumeIdentifierOrNumber(self): + try: + self.ConsumeIdentifierOrNumber() + return True + except ParseError: + return False + + def ConsumeIdentifierOrNumber(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER_OR_NUMBER.match(result): + raise self.ParseError('Expected identifier or number, got %s.' % result) + self.NextToken() + return result + + def TryConsumeInteger(self): + try: + self.ConsumeInteger() + return True + except ParseError: + return False + + def ConsumeInteger(self): + """Consumes an integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + try: + result = _ParseAbstractInteger(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeFloat(self): + try: + self.ConsumeFloat() + return True + except ParseError: + return False + + def ConsumeFloat(self): + """Consumes an floating point number. + + Returns: + The number parsed. + + Raises: + ParseError: If a floating point number couldn't be consumed. + """ + try: + result = ParseFloat(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeBool(self): + """Consumes a boolean value. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + try: + result = ParseBool(self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeByteString(self): + try: + self.ConsumeByteString() + return True + except ParseError: + return False + + def ConsumeString(self): + """Consumes a string value. + + Returns: + The string parsed. + + Raises: + ParseError: If a string value couldn't be consumed. + """ + the_bytes = self.ConsumeByteString() + try: + return str(the_bytes, 'utf-8') + except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): + """Consumes a byte array value. + + Returns: + The array parsed (as a string). + + Raises: + ParseError: If a byte array value couldn't be consumed. + """ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in _QUOTES: + the_list.append(self._ConsumeSingleByteString()) + return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. + + String literals (whether bytes or text) can come in multiple adjacent + tokens which are automatically concatenated, like in C or Python. This + method only consumes one token. + + Returns: + The token parsed. + Raises: + ParseError: When the wrong format data is found. + """ + text = self.token + if len(text) < 1 or text[0] not in _QUOTES: + raise self.ParseError('Expected string but found: %r' % (text,)) + + if len(text) < 2 or text[-1] != text[0]: + raise self.ParseError('String missing ending quote: %r' % (text,)) + + try: + result = text_encoding.CUnescape(text[1:-1]) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) + except ValueError as e: + raise self.ParseError(str(e)) + self.NextToken() + return result + + def ParseErrorPreviousToken(self, message): + """Creates and *returns* a ParseError for the previously read token. + + Args: + message: A message to set for the exception. + + Returns: + A ParseError instance. + """ + return ParseError(message, self._previous_line + 1, + self._previous_column + 1) + + def ParseError(self, message): + """Creates and *returns* a ParseError for the current token.""" + return ParseError('\'' + self._current_line + '\': ' + message, + self._line + 1, self._column + 1) + + def _StringParseError(self, e): + return self.ParseError('Couldn\'t parse string: ' + str(e)) + + def NextToken(self): + """Reads the next meaningful token.""" + self._previous_line = self._line + self._previous_column = self._column + + self._column += len(self.token) + self._SkipWhitespace() + + if not self._more_lines: + self.token = '' + return + + match = self._TOKEN.match(self._current_line, self._column) + if not match and not self._skip_comments: + match = self._COMMENT.match(self._current_line, self._column) + if match: + token = match.group(0) + self.token = token + else: + self.token = self._current_line[self._column] + +# Aliased so it can still be accessed by current visibility violators. +# TODO(dbarnett): Migrate violators to textformat_tokenizer. +_Tokenizer = Tokenizer # pylint: disable=invalid-name + + +def _ConsumeInt32(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) + + +def _ConsumeUint32(tokenizer): + """Consumes an unsigned 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) + + +def _TryConsumeInt64(tokenizer): + try: + _ConsumeInt64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeInt64(tokenizer): + """Consumes a signed 32bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) + + +def _TryConsumeUint64(tokenizer): + try: + _ConsumeUint64(tokenizer) + return True + except ParseError: + return False + + +def _ConsumeUint64(tokenizer): + """Consumes an unsigned 64bit integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 64bit integer couldn't be consumed. + """ + return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) + + +def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): + """Consumes an integer number from tokenizer. + + Args: + tokenizer: A tokenizer used to parse the number. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer parsed. + + Raises: + ParseError: If an integer with given characteristics couldn't be consumed. + """ + try: + result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) + except ValueError as e: + raise tokenizer.ParseError(str(e)) + tokenizer.NextToken() + return result + + +def ParseInteger(text, is_signed=False, is_long=False): + """Parses an integer. + + Args: + text: The text to parse. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + result = _ParseAbstractInteger(text) + + # Check if the integer is sane. Exceptions handled by callers. + checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] + checker.CheckValue(result) + return result + + +def _ParseAbstractInteger(text): + """Parses an integer without checking size/signedness. + + Args: + text: The text to parse. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + orig_text = text + c_octal_match = re.match(r'(-?)0(\d+)$', text) + if c_octal_match: + # Python 3 no longer supports 0755 octal syntax without the 'o', so + # we always use the '0o' prefix for multi-digit numbers starting with 0. + text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) + try: + return int(text, 0) + except ValueError: + raise ValueError('Couldn\'t parse integer: %s' % orig_text) + + +def ParseFloat(text): + """Parse a floating point number. + + Args: + text: Text to parse. + + Returns: + The number parsed. + + Raises: + ValueError: If a floating point number couldn't be parsed. + """ + try: + # Assume Python compatible syntax. + return float(text) + except ValueError: + # Check alternative spellings. + if _FLOAT_INFINITY.match(text): + if text[0] == '-': + return float('-inf') + else: + return float('inf') + elif _FLOAT_NAN.match(text): + return float('nan') + else: + # assume '1.0f' format + try: + return float(text.rstrip('f')) + except ValueError: + raise ValueError('Couldn\'t parse float: %s' % text) + + +def ParseBool(text): + """Parse a boolean value. + + Args: + text: Text to parse. + + Returns: + Boolean values parsed + + Raises: + ValueError: If text is not a valid boolean. + """ + if text in ('true', 't', '1', 'True'): + return True + elif text in ('false', 'f', '0', 'False'): + return False + else: + raise ValueError('Expected "true" or "false".') + + +def ParseEnum(field, value): + """Parse an enum value. + + The value can be specified by a number (the enum value), or by + a string literal (the enum name). + + Args: + field: Enum field descriptor. + value: String value. + + Returns: + Enum value number. + + Raises: + ValueError: If the enum value could not be parsed. + """ + enum_descriptor = field.enum_type + try: + number = int(value, 0) + except ValueError: + # Identifier. + enum_value = enum_descriptor.values_by_name.get(value, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value named %s.' % + (enum_descriptor.full_name, value)) + else: + # Numeric value. + if hasattr(field.file, 'syntax'): + # Attribute is checked for compatibility. + if field.file.syntax == 'proto3': + # Proto3 accept numeric unknown enums. + return number + enum_value = enum_descriptor.values_by_number.get(number, None) + if enum_value is None: + raise ValueError('Enum type "%s" has no value with number %d.' % + (enum_descriptor.full_name, number)) + return enum_value.number diff --git a/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py new file mode 100644 index 0000000000..558d496941 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/timestamp_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _TIMESTAMP._serialized_start=52 + _TIMESTAMP._serialized_end=95 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py new file mode 100644 index 0000000000..19903fb6b4 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/type_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _SYNTAX._serialized_start=1413 + _SYNTAX._serialized_end=1459 + _TYPE._serialized_start=113 + _TYPE._serialized_end=328 + _FIELD._serialized_start=331 + _FIELD._serialized_end=1056 + _FIELD_KIND._serialized_start=610 + _FIELD_KIND._serialized_end=938 + _FIELD_CARDINALITY._serialized_start=940 + _FIELD_CARDINALITY._serialized_end=1056 + _ENUM._serialized_start=1059 + _ENUM._serialized_end=1265 + _ENUMVALUE._serialized_start=1267 + _ENUMVALUE._serialized_end=1350 + _OPTION._serialized_start=1352 + _OPTION._serialized_end=1411 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/__init__.py b/openpype/hosts/nuke/vendor/google/protobuf/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py new file mode 100644 index 0000000000..66a5836c82 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) + + DESCRIPTOR._options = None + _TESTBOOLMAP_BOOLMAPENTRY._options = None + _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' + _ENUMVALUE._serialized_start=1607 + _ENUMVALUE._serialized_end=1657 + _TESTFLAGSANDSTRINGS._serialized_start=62 + _TESTFLAGSANDSTRINGS._serialized_end=199 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 + _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 + _TESTBASE64BYTEARRAYS._serialized_start=201 + _TESTBASE64BYTEARRAYS._serialized_end=234 + _TESTJAVASCRIPTJSON._serialized_start=236 + _TESTJAVASCRIPTJSON._serialized_end=307 + _TESTJAVASCRIPTORDERJSON1._serialized_start=309 + _TESTJAVASCRIPTORDERJSON1._serialized_end=390 + _TESTJAVASCRIPTORDERJSON2._serialized_start=393 + _TESTJAVASCRIPTORDERJSON2._serialized_end=530 + _TESTLARGEINT._serialized_start=532 + _TESTLARGEINT._serialized_end=568 + _TESTNUMBERS._serialized_start=571 + _TESTNUMBERS._serialized_end=731 + _TESTNUMBERS_MYTYPE._serialized_start=691 + _TESTNUMBERS_MYTYPE._serialized_end=731 + _TESTCAMELCASE._serialized_start=733 + _TESTCAMELCASE._serialized_end=817 + _TESTBOOLMAP._serialized_start=819 + _TESTBOOLMAP._serialized_end=943 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 + _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 + _TESTRECURSION._serialized_start=945 + _TESTRECURSION._serialized_end=1024 + _TESTSTRINGMAP._serialized_start=1027 + _TESTSTRINGMAP._serialized_end=1161 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 + _TESTSTRINGSERIALIZER._serialized_start=1164 + _TESTSTRINGSERIALIZER._serialized_end=1360 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 + _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 + _TESTMESSAGEWITHEXTENSION._serialized_start=1362 + _TESTMESSAGEWITHEXTENSION._serialized_end=1398 + _TESTEXTENSION._serialized_start=1400 + _TESTEXTENSION._serialized_end=1522 + _TESTDEFAULTENUMVALUE._serialized_start=1524 + _TESTDEFAULTENUMVALUE._serialized_end=1605 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py new file mode 100644 index 0000000000..5498deafa9 --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format_proto3.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' + _TESTMAP_BOOLMAPENTRY._options = None + _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT32MAPENTRY._options = None + _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_INT64MAPENTRY._options = None + _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT32MAPENTRY._options = None + _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTMAP_UINT64MAPENTRY._options = None + _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTMAP_STRINGMAPENTRY._options = None + _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_BOOLMAPENTRY._options = None + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT32MAPENTRY._options = None + _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_INT64MAPENTRY._options = None + _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT32MAPENTRY._options = None + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_UINT64MAPENTRY._options = None + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_STRINGMAPENTRY._options = None + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTNESTEDMAP_MAPMAPENTRY._options = None + _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' + _TESTSTRINGMAP_STRINGMAPENTRY._options = None + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' + _TESTBOOLVALUE_BOOLMAPENTRY._options = None + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' + _ENUMTYPE._serialized_start=4849 + _ENUMTYPE._serialized_end=4877 + _MESSAGETYPE._serialized_start=277 + _MESSAGETYPE._serialized_end=305 + _TESTMESSAGE._serialized_start=308 + _TESTMESSAGE._serialized_end=968 + _TESTONEOF._serialized_start=971 + _TESTONEOF._serialized_end=1239 + _TESTMAP._serialized_start=1242 + _TESTMAP._serialized_end=1851 + _TESTMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTMAP_INT32MAPENTRY._serialized_start=1605 + _TESTMAP_INT32MAPENTRY._serialized_end=1652 + _TESTMAP_INT64MAPENTRY._serialized_start=1654 + _TESTMAP_INT64MAPENTRY._serialized_end=1701 + _TESTMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP._serialized_start=1854 + _TESTNESTEDMAP._serialized_end=2627 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 + _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 + _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 + _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 + _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 + _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 + _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 + _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 + _TESTSTRINGMAP._serialized_start=2629 + _TESTSTRINGMAP._serialized_end=2752 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 + _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 + _TESTWRAPPER._serialized_start=2755 + _TESTWRAPPER._serialized_end=3761 + _TESTTIMESTAMP._serialized_start=3763 + _TESTTIMESTAMP._serialized_end=3873 + _TESTDURATION._serialized_start=3875 + _TESTDURATION._serialized_end=3982 + _TESTFIELDMASK._serialized_start=3984 + _TESTFIELDMASK._serialized_end=4042 + _TESTSTRUCT._serialized_start=4044 + _TESTSTRUCT._serialized_end=4145 + _TESTANY._serialized_start=4147 + _TESTANY._serialized_end=4239 + _TESTVALUE._serialized_start=4241 + _TESTVALUE._serialized_end=4339 + _TESTLISTVALUE._serialized_start=4341 + _TESTLISTVALUE._serialized_end=4451 + _TESTBOOLVALUE._serialized_start=4454 + _TESTBOOLVALUE._serialized_end=4591 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 + _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 + _TESTCUSTOMJSONNAME._serialized_start=4593 + _TESTCUSTOMJSONNAME._serialized_end=4636 + _TESTEXTENSIONS._serialized_start=4638 + _TESTEXTENSIONS._serialized_end=4712 + _TESTENUMVALUE._serialized_start=4715 + _TESTENUMVALUE._serialized_end=4847 +# @@protoc_insertion_point(module_scope) diff --git a/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py b/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py new file mode 100644 index 0000000000..e49eb4c15d --- /dev/null +++ b/openpype/hosts/nuke/vendor/google/protobuf/wrappers_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/wrappers.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' + _DOUBLEVALUE._serialized_start=51 + _DOUBLEVALUE._serialized_end=79 + _FLOATVALUE._serialized_start=81 + _FLOATVALUE._serialized_end=108 + _INT64VALUE._serialized_start=110 + _INT64VALUE._serialized_end=137 + _UINT64VALUE._serialized_start=139 + _UINT64VALUE._serialized_end=167 + _INT32VALUE._serialized_start=169 + _INT32VALUE._serialized_end=196 + _UINT32VALUE._serialized_start=198 + _UINT32VALUE._serialized_end=226 + _BOOLVALUE._serialized_start=228 + _BOOLVALUE._serialized_end=254 + _STRINGVALUE._serialized_start=256 + _STRINGVALUE._serialized_end=284 + _BYTESVALUE._serialized_start=286 + _BYTESVALUE._serialized_end=313 +# @@protoc_insertion_point(module_scope) From 1615f74af3e8daddb8c625329f67f0756deb9bc9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 24 Aug 2022 15:40:02 +0200 Subject: [PATCH 0628/2550] OP-3214 - updated format of addon info response When downloading it should go through each source until it succeeds --- distribution/addon_distribution.py | 37 +++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 3cc2374b93..2efbb34274 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -4,6 +4,7 @@ from abc import abstractmethod import attr import logging import requests +import platform from distribution.file_handler import RemoteFileHandler @@ -14,13 +15,26 @@ class UrlType(Enum): FILESYSTEM = "filesystem" +@attr.s +class MultiPlatformPath(object): + windows = attr.ib(default=None) + linux = attr.ib(default=None) + darwin = attr.ib(default=None) + + +@attr.s +class AddonSource(object): + type = attr.ib() + url = attr.ib(default=None) + path = attr.ib(default=attr.Factory(MultiPlatformPath)) + + @attr.s class AddonInfo(object): """Object matching json payload from Server""" - name = attr.ib(default=None) - version = attr.ib(default=None) - addon_url = attr.ib(default=None) - type = attr.ib(default=None) + name = attr.ib() + version = attr.ib() + sources = attr.ib(default=attr.Factory(list), type=AddonSource) hash = attr.ib(default=None) description = attr.ib(default=None) license = attr.ib(default=None) @@ -44,12 +58,11 @@ class AddonDownloader: @classmethod @abstractmethod - def download(cls, addon_url, destination): + def download(cls, source, destination): """Returns url to downloaded addon zip file. Args: - addon_url (str): http or OS or any supported protocol url to addon - zip file + source (dict): {type:"http", "url":"https://} ...} destination (str): local folder to unzip Retursn: (str) local path to addon zip file @@ -90,8 +103,9 @@ class AddonDownloader: class OSAddonDownloader(AddonDownloader): @classmethod - def download(cls, addon_url, destination): + def download(cls, source, destination): # OS doesnt need to download, unzip directly + addon_url = source["path"].get(platform.system().lower()) if not os.path.exists(addon_url): raise ValueError("{} is not accessible".format(addon_url)) return addon_url @@ -101,14 +115,15 @@ class HTTPAddonDownloader(AddonDownloader): CHUNK_SIZE = 100000 @classmethod - def download(cls, addon_url, destination): - cls.log.debug(f"Downloading {addon_url} to {destination}") + def download(cls, source, destination): + source_url = source["url"] + cls.log.debug(f"Downloading {source_url} to {destination}") file_name = os.path.basename(destination) _, ext = os.path.splitext(file_name) if (ext.replace(".", '') not in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)): file_name += ".zip" - RemoteFileHandler.download_url(addon_url, + RemoteFileHandler.download_url(source_url, destination, filename=file_name) From 34c15c24292a3ae434b509987acb9b28f8176106 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 24 Aug 2022 15:41:03 +0200 Subject: [PATCH 0629/2550] OP-3214 - fixed update_addon_state Should be able to update whatever can. --- distribution/addon_distribution.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 2efbb34274..f5af0f77ed 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -170,26 +170,36 @@ def update_addon_state(addon_infos, destination_folder, factory, factory (AddonDownloader): factory to get appropriate downloader per addon type log (logging.Logger) + Returns: + (dict): {"addon_full_name":"exists"|"updated"|"failed" """ if not log: log = logging.getLogger(__name__) + download_states = {} for addon in addon_infos: full_name = "{}_{}".format(addon.name, addon.version) addon_dest = os.path.join(destination_folder, full_name) if os.path.isdir(addon_dest): log.debug(f"Addon version folder {addon_dest} already exists.") + download_states[full_name] = "exists" continue - try: - downloader = factory.get_downloader(addon.type) - zip_file_path = downloader.download(addon.addon_url, addon_dest) - downloader.check_hash(zip_file_path, addon.hash) - downloader.unzip(zip_file_path, addon_dest) - except Exception: - log.warning(f"Error happened during updating {addon.name}", - exc_info=True) + for source in addon.sources: + download_states[full_name] = "failed" + try: + downloader = factory.get_downloader(source["type"]) + zip_file_path = downloader.download(source, addon_dest) + downloader.check_hash(zip_file_path, addon.hash) + downloader.unzip(zip_file_path, addon_dest) + download_states[full_name] = "updated" + break + except Exception: + log.warning(f"Error happened during updating {addon.name}", + exc_info=True) + + return download_states def check_addons(server_endpoint, addon_folder, downloaders): From 882e00baefda3be849c5b92be897d0b9d27ad3e1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 15:52:47 +0200 Subject: [PATCH 0630/2550] use Logger instead of PypeLogger --- openpype/client/entities.py | 2 +- openpype/hosts/maya/api/plugin.py | 5 +-- openpype/hosts/tvpaint/worker/worker_job.py | 4 +- .../webserver_service/webpublish_routes.py | 6 +-- .../webserver_service/webserver_cli.py | 4 +- openpype/lib/applications.py | 10 ++--- openpype/lib/execute.py | 4 +- openpype/lib/path_templates.py | 5 --- openpype/lib/remote_publish.py | 4 +- openpype/modules/base.py | 12 ++--- openpype/modules/deadline/deadline_module.py | 4 +- .../ftrack/ftrack_server/ftrack_server.py | 21 ++++----- openpype/modules/log_viewer/tray/models.py | 14 +++--- openpype/modules/royalrender/api.py | 12 +++-- openpype/modules/sync_server/sync_server.py | 5 +-- .../modules/sync_server/sync_server_module.py | 4 +- openpype/modules/sync_server/tray/app.py | 3 -- .../modules/sync_server/tray/delegates.py | 5 +-- openpype/modules/sync_server/tray/lib.py | 5 --- openpype/modules/sync_server/tray/models.py | 5 +-- openpype/modules/sync_server/tray/widgets.py | 5 +-- .../modules/timers_manager/idle_threads.py | 4 +- openpype/modules/webserver/server.py | 45 ++++++++++++------- openpype/pipeline/anatomy.py | 4 +- openpype/pype_commands.py | 24 +++++----- openpype/settings/entities/base_entity.py | 4 +- 26 files changed, 109 insertions(+), 111 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index f1f1d30214..3d2730a17c 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1455,7 +1455,7 @@ def get_workfile_info( """ ## Custom data storage: - Settings - OP settings overrides and local settings -- Logging - logs from PypeLogger +- Logging - logs from Logger - Webpublisher - jobs - Ftrack - events - Maya - Shaders diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index e50ebfccad..39d821f620 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -4,6 +4,7 @@ from maya import cmds import qargparse +from openpype.lib import Logger from openpype.pipeline import ( LegacyCreator, LoaderPlugin, @@ -50,9 +51,7 @@ def get_reference_node(members, log=None): # Warn the user when we're taking the highest reference node if len(references) > 1: if not log: - from openpype.lib import PypeLogger - - log = PypeLogger().get_logger(__name__) + log = Logger.get_logger(__name__) log.warning("More than one reference node found in " "container, using highest reference node: " diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 1c785ab2ee..95c0a678bc 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -9,7 +9,7 @@ from abc import ABCMeta, abstractmethod, abstractproperty import six -from openpype.api import PypeLogger +from openpype.lib import Logger from openpype.modules import ModulesManager @@ -328,7 +328,7 @@ class TVPaintCommands: def log(self): """Access to logger object.""" if self._log is None: - self._log = PypeLogger.get_logger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log @property diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 6444a5191d..2e9d460a98 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -12,9 +12,7 @@ from openpype.client import ( get_assets, OpenPypeMongoConnection, ) -from openpype.lib import ( - PypeLogger, -) +from openpype.lib import Logger from openpype.lib.remote_publish import ( get_task_data, ERROR_STATUS, @@ -23,7 +21,7 @@ from openpype.lib.remote_publish import ( from openpype.settings import get_project_settings from openpype_modules.webserver.base_routes import RestApiEndpoint -log = PypeLogger.get_logger("WebpublishRoutes") +log = Logger.get_logger("WebpublishRoutes") class ResourceRestApiEndpoint(RestApiEndpoint): diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 6620e5d5cf..936bd9735f 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -7,7 +7,7 @@ import json import subprocess from openpype.client import OpenPypeMongoConnection -from openpype.lib import PypeLogger +from openpype.lib import Logger from .webpublish_routes import ( RestApiResource, @@ -28,7 +28,7 @@ from openpype.lib.remote_publish import ( ) -log = PypeLogger.get_logger("webserver_gui") +log = Logger.get_logger("webserver_gui") def run_webserver(*args, **kwargs): diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 074e815160..73f9607835 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -24,7 +24,7 @@ from openpype.settings.constants import ( METADATA_KEYS, M_DYNAMIC_KEY_LABEL ) -from . import PypeLogger +from .log import Logger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username @@ -138,7 +138,7 @@ def get_logger(): """Global lib.applications logger getter.""" global _logger if _logger is None: - _logger = PypeLogger.get_logger(__name__) + _logger = Logger.get_logger(__name__) return _logger @@ -373,7 +373,7 @@ class ApplicationManager: """ def __init__(self, system_settings=None): - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.app_groups = {} self.applications = {} @@ -735,7 +735,7 @@ class LaunchHook: Always should be called """ - self.log = PypeLogger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.launch_context = launch_context @@ -877,7 +877,7 @@ class ApplicationLaunchContext: # Logger logger_name = "{}-{}".format(self.__class__.__name__, self.app_name) - self.log = PypeLogger.get_logger(logger_name) + self.log = Logger.get_logger(logger_name) self.executable = executable diff --git a/openpype/lib/execute.py b/openpype/lib/execute.py index c3e35772f3..f1f2a4fa0a 100644 --- a/openpype/lib/execute.py +++ b/openpype/lib/execute.py @@ -5,7 +5,7 @@ import platform import json import tempfile -from .log import PypeLogger as Logger +from .log import Logger from .vendor_bin_utils import find_executable # MSDN process creation flag (Windows only) @@ -40,7 +40,7 @@ def execute(args, log_levels = ['DEBUG:', 'INFO:', 'ERROR:', 'WARNING:', 'CRITICAL:'] - log = Logger().get_logger('execute') + log = Logger.get_logger('execute') log.info("Executing ({})".format(" ".join(args))) popen = subprocess.Popen( args, diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index e4b18ec258..b160054e38 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -6,11 +6,6 @@ import collections import six -from .log import PypeLogger - -log = PypeLogger.get_logger(__name__) - - KEY_PATTERN = re.compile(r"(\{.*?[^{0]*\})") KEY_PADDING_PATTERN = re.compile(r"([^:]+)\S+[><]\S+") SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)") diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index b4b05c053b..2a901544cc 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -66,7 +66,7 @@ def publish(log, close_plugin_name=None, raise_error=False): """Loops through all plugins, logs to console. Used for tests. Args: - log (OpenPypeLogger) + log (openpype.lib.Logger) close_plugin_name (str): name of plugin with responsibility to close host app """ @@ -98,7 +98,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): Args: dbcon (OpenPypeMongoConnection) _id (str) - id of current job in DB - log (OpenPypeLogger) + log (openpype.lib.Logger) batch_id (str) - id sent from frontend close_plugin_name (str): name of plugin with responsibility to close host app diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1316d7f734..7fc848af2d 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -26,7 +26,7 @@ from openpype.settings.lib import ( get_studio_system_settings_overrides, load_json_file ) -from openpype.lib import PypeLogger +from openpype.lib import Logger # Files that will be always ignored on modules import IGNORED_FILENAMES = ( @@ -93,7 +93,7 @@ class _ModuleClass(object): def log(self): if self._log is None: super(_ModuleClass, self).__setattr__( - "_log", PypeLogger.get_logger(self.name) + "_log", Logger.get_logger(self.name) ) return self._log @@ -290,7 +290,7 @@ def _load_modules(): # Change `sys.modules` sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) - log = PypeLogger.get_logger("ModulesLoader") + log = Logger.get_logger("ModulesLoader") # Look for OpenPype modules in paths defined with `get_module_dirs` # - dynamically imported OpenPype modules and addons @@ -440,7 +440,7 @@ class OpenPypeModule: def __init__(self, manager, settings): self.manager = manager - self.log = PypeLogger.get_logger(self.name) + self.log = Logger.get_logger(self.name) self.initialize(settings) @@ -1059,7 +1059,7 @@ class TrayModulesManager(ModulesManager): ) def __init__(self): - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.modules = [] self.modules_by_id = {} @@ -1235,7 +1235,7 @@ def get_module_settings_defs(): settings_defs = [] - log = PypeLogger.get_logger("ModuleSettingsLoad") + log = Logger.get_logger("ModuleSettingsLoad") for raw_module in openpype_modules: for attr_name in dir(raw_module): diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index c30db75188..bbd0f74e8a 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -3,7 +3,7 @@ import requests import six import sys -from openpype.lib import requests_get, PypeLogger +from openpype.lib import requests_get, Logger from openpype.modules import OpenPypeModule from openpype_interfaces import IPluginPaths @@ -58,7 +58,7 @@ class DeadlineModule(OpenPypeModule, IPluginPaths): """ if not log: - log = PypeLogger.get_logger(__name__) + log = Logger.get_logger(__name__) argument = "{}/api/pools?NamesOnly=true".format(webservice) try: diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/ftrack/ftrack_server/ftrack_server.py index 8944591b71..c75b8f7172 100644 --- a/openpype/modules/ftrack/ftrack_server/ftrack_server.py +++ b/openpype/modules/ftrack/ftrack_server/ftrack_server.py @@ -7,12 +7,10 @@ import traceback import ftrack_api from openpype.lib import ( - PypeLogger, + Logger, modules_from_path ) -log = PypeLogger.get_logger(__name__) - """ # Required - Needed for connection to Ftrack FTRACK_SERVER # Ftrack server e.g. "https://myFtrack.ftrackapp.com" @@ -43,10 +41,13 @@ class FtrackServer: server.run_server() .. """ + # set Ftrack logging to Warning only - OPTIONAL ftrack_log = logging.getLogger("ftrack_api") ftrack_log.setLevel(logging.WARNING) + self.log = Logger.get_logger(__name__) + self.stopped = True self.is_running = False @@ -72,7 +73,7 @@ class FtrackServer: # Get all modules with functions modules, crashed = modules_from_path(path) for filepath, exc_info in crashed: - log.warning("Filepath load crashed {}.\n{}".format( + self.log.warning("Filepath load crashed {}.\n{}".format( filepath, traceback.format_exception(*exc_info) )) @@ -87,7 +88,7 @@ class FtrackServer: break if not register_function: - log.warning( + self.log.warning( "\"{}\" - Missing register method".format(filepath) ) continue @@ -97,7 +98,7 @@ class FtrackServer: ) if not register_functions: - log.warning(( + self.log.warning(( "There are no events with `register` function" " in registered paths: \"{}\"" ).format("| ".join(paths))) @@ -106,7 +107,7 @@ class FtrackServer: try: register_func(self.session) except Exception: - log.warning( + self.log.warning( "\"{}\" - register was not successful".format(filepath), exc_info=True ) @@ -141,7 +142,7 @@ class FtrackServer: self.session = session if load_files: if not self.handler_paths: - log.warning(( + self.log.warning(( "Paths to event handlers are not set." " Ftrack server won't launch." )) @@ -151,8 +152,8 @@ class FtrackServer: self.set_files(self.handler_paths) msg = "Registration of event handlers has finished!" - log.info(len(msg) * "*") - log.info(msg) + self.log.info(len(msg) * "*") + self.log.info(msg) # keep event_hub on session running self.session.event_hub.wait() diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/log_viewer/tray/models.py index aea62c381b..d369ffeb64 100644 --- a/openpype/modules/log_viewer/tray/models.py +++ b/openpype/modules/log_viewer/tray/models.py @@ -1,6 +1,6 @@ import collections from Qt import QtCore, QtGui -from openpype.lib.log import PypeLogger +from openpype.lib import Logger class LogModel(QtGui.QStandardItemModel): @@ -41,14 +41,14 @@ class LogModel(QtGui.QStandardItemModel): self.dbcon = None # Crash if connection is not possible to skip this module - if not PypeLogger.initialized: - PypeLogger.initialize() + if not Logger.initialized: + Logger.initialize() - connection = PypeLogger.get_log_mongo_connection() + connection = Logger.get_log_mongo_connection() if connection: - PypeLogger.bootstrap_mongo_log() - database = connection[PypeLogger.log_database_name] - self.dbcon = database[PypeLogger.log_collection_name] + Logger.bootstrap_mongo_log() + database = connection[Logger.log_database_name] + self.dbcon = database[Logger.log_collection_name] def headerData(self, section, orientation, role): if ( diff --git a/openpype/modules/royalrender/api.py b/openpype/modules/royalrender/api.py index ed9e71f240..de1dba8724 100644 --- a/openpype/modules/royalrender/api.py +++ b/openpype/modules/royalrender/api.py @@ -5,13 +5,10 @@ import os from openpype.settings import get_project_settings from openpype.lib.local_settings import OpenPypeSettingsRegistry -from openpype.lib import PypeLogger, run_subprocess +from openpype.lib import Logger, run_subprocess from .rr_job import RRJob, SubmitFile, SubmitterParameter -log = PypeLogger.get_logger("RoyalRender") - - class Api: _settings = None @@ -19,6 +16,7 @@ class Api: RR_SUBMIT_API = 2 def __init__(self, settings, project=None): + self.log = Logger.get_logger("RoyalRender") self._settings = settings self._initialize_rr(project) @@ -137,7 +135,7 @@ class Api: rr_console += ".exe" args = [rr_console, file] - run_subprocess(" ".join(args), logger=log) + run_subprocess(" ".join(args), logger=self.log) def _submit_using_api(self, file): # type: (SubmitFile) -> None @@ -159,11 +157,11 @@ class Api: rr_server = tcp.getRRServer() if len(rr_server) == 0: - log.info("Got RR IP address {}".format(rr_server)) + self.log.info("Got RR IP address {}".format(rr_server)) # TODO: Port is hardcoded in RR? If not, move it to Settings if not tcp.setServer(rr_server, 7773): - log.error( + self.log.error( "Can not set RR server: {}".format(tcp.errorMessage())) raise RoyalRenderException(tcp.errorMessage()) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 97538fcd4e..d01ab1d3a0 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -6,12 +6,11 @@ import concurrent.futures from concurrent.futures._base import CancelledError from .providers import lib -from openpype.lib import PypeLogger +from openpype.lib import Logger from .utils import SyncStatus, ResumableError - -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") async def upload(module, project_name, file, representation, provider_name, diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index c7f9484e55..c72b310907 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -13,7 +13,7 @@ from openpype.settings import ( get_project_settings, get_system_settings, ) -from openpype.lib import PypeLogger, get_local_site_id +from openpype.lib import Logger, get_local_site_id from openpype.pipeline import AvalonMongoDB, Anatomy from openpype.settings.lib import ( get_default_anatomy_settings, @@ -28,7 +28,7 @@ from .utils import time_function, SyncStatus, SiteAlreadyPresentError from openpype.client import get_representations, get_representation_by_id -log = PypeLogger.get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class SyncServerModule(OpenPypeModule, ITrayModule): diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index 96fad6a247..9b9768327e 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -2,7 +2,6 @@ from Qt import QtWidgets, QtCore, QtGui from openpype.tools.settings import style -from openpype.lib import PypeLogger from openpype import resources from .widgets import ( @@ -10,8 +9,6 @@ from .widgets import ( SyncRepresentationSummaryWidget ) -log = PypeLogger().get_logger("SyncServer") - class SyncServerWindow(QtWidgets.QDialog): """ diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py index 5ab809a816..988eb40d28 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/sync_server/tray/delegates.py @@ -1,8 +1,7 @@ import os from Qt import QtCore, QtWidgets, QtGui -from openpype.lib import PypeLogger -from . import lib +from openpype.lib import Logger from openpype.tools.utils.constants import ( LOCAL_PROVIDER_ROLE, @@ -16,7 +15,7 @@ from openpype.tools.utils.constants import ( EDIT_ICON_ROLE ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class PriorityDelegate(QtWidgets.QStyledItemDelegate): diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/sync_server/tray/lib.py index 87344be634..ff93815639 100644 --- a/openpype/modules/sync_server/tray/lib.py +++ b/openpype/modules/sync_server/tray/lib.py @@ -2,11 +2,6 @@ import attr import abc import six -from openpype.lib import PypeLogger - - -log = PypeLogger().get_logger("SyncServer") - STATUS = { 0: 'In Progress', 1: 'Queued', diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index 629c4cbbf1..d63d046508 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -9,8 +9,7 @@ import qtawesome from openpype.tools.utils.delegates import pretty_timestamp -from openpype.lib import PypeLogger -from openpype.api import get_local_site_id +from openpype.lib import Logger, get_local_site_id from openpype.client import get_representation_by_id from . import lib @@ -33,7 +32,7 @@ from openpype.tools.utils.constants import ( ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class _SyncRepresentationModel(QtCore.QAbstractTableModel): diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py index b4ee447ac4..c40aa98f24 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/sync_server/tray/widgets.py @@ -9,8 +9,7 @@ import qtawesome from openpype.tools.settings import style -from openpype.api import get_local_site_id -from openpype.lib import PypeLogger +from openpype.lib import Logger, get_local_site_id from openpype.tools.utils.delegates import pretty_timestamp @@ -36,7 +35,7 @@ from openpype.tools.utils.constants import ( TRIES_ROLE ) -log = PypeLogger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") class SyncProjectListWidget(QtWidgets.QWidget): diff --git a/openpype/modules/timers_manager/idle_threads.py b/openpype/modules/timers_manager/idle_threads.py index 9ec27e659b..7242761143 100644 --- a/openpype/modules/timers_manager/idle_threads.py +++ b/openpype/modules/timers_manager/idle_threads.py @@ -2,7 +2,7 @@ import time from Qt import QtCore from pynput import mouse, keyboard -from openpype.lib import PypeLogger +from openpype.lib import Logger class IdleItem: @@ -31,7 +31,7 @@ class IdleManager(QtCore.QThread): def __init__(self): super(IdleManager, self).__init__() - self.log = PypeLogger.get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.signal_reset_timer.connect(self._reset_time) self.idle_item = IdleItem() diff --git a/openpype/modules/webserver/server.py b/openpype/modules/webserver/server.py index 82b681f406..44b14acbb6 100644 --- a/openpype/modules/webserver/server.py +++ b/openpype/modules/webserver/server.py @@ -4,16 +4,16 @@ import asyncio from aiohttp import web -from openpype.lib import PypeLogger +from openpype.lib import Logger from .cors_middleware import cors_middleware -log = PypeLogger.get_logger("WebServer") - class WebServerManager: """Manger that care about web server thread.""" def __init__(self, port=None, host=None): + self._log = None + self.port = port or 8079 self.host = host or "localhost" @@ -33,6 +33,12 @@ class WebServerManager: self.webserver_thread = WebServerThread(self) + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @property def url(self): return "http://{}:{}".format(self.host, self.port) @@ -51,12 +57,12 @@ class WebServerManager: if not self.is_running: return try: - log.debug("Stopping Web server") + self.log.debug("Stopping Web server") self.webserver_thread.is_running = False self.webserver_thread.stop() except Exception: - log.warning( + self.log.warning( "Error has happened during Killing Web server", exc_info=True ) @@ -74,7 +80,10 @@ class WebServerManager: class WebServerThread(threading.Thread): """ Listener for requests in thread.""" + def __init__(self, manager): + self._log = None + super(WebServerThread, self).__init__() self.is_running = False @@ -84,6 +93,12 @@ class WebServerThread(threading.Thread): self.site = None self.tasks = [] + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @property def port(self): return self.manager.port @@ -96,13 +111,13 @@ class WebServerThread(threading.Thread): self.is_running = True try: - log.info("Starting WebServer server") + self.log.info("Starting WebServer server") self.loop = asyncio.new_event_loop() # create new loop for thread asyncio.set_event_loop(self.loop) self.loop.run_until_complete(self.start_server()) - log.debug( + self.log.debug( "Running Web server on URL: \"localhost:{}\"".format(self.port) ) @@ -110,7 +125,7 @@ class WebServerThread(threading.Thread): self.loop.run_forever() except Exception: - log.warning( + self.log.warning( "Web Server service has failed", exc_info=True ) finally: @@ -118,7 +133,7 @@ class WebServerThread(threading.Thread): self.is_running = False self.manager.thread_stopped() - log.info("Web server stopped") + self.log.info("Web server stopped") async def start_server(self): """ Starts runner and TCPsite """ @@ -138,17 +153,17 @@ class WebServerThread(threading.Thread): while self.is_running: while self.tasks: task = self.tasks.pop(0) - log.debug("waiting for task {}".format(task)) + self.log.debug("waiting for task {}".format(task)) await task - log.debug("returned value {}".format(task.result)) + self.log.debug("returned value {}".format(task.result)) await asyncio.sleep(0.5) - log.debug("Starting shutdown") + self.log.debug("Starting shutdown") await self.site.stop() - log.debug("Site stopped") + self.log.debug("Site stopped") await self.runner.cleanup() - log.debug("Runner stopped") + self.log.debug("Runner stopped") tasks = [ task for task in asyncio.all_tasks() @@ -156,7 +171,7 @@ class WebServerThread(threading.Thread): ] list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks results = await asyncio.gather(*tasks, return_exceptions=True) - log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + self.log.debug(f'Finished awaiting cancelled tasks, results: {results}...') await self.loop.shutdown_asyncgens() # to really make sure everything else has time to stop await asyncio.sleep(0.07) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 08db4749b3..cb6e07154b 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -14,9 +14,9 @@ from openpype.lib.path_templates import ( TemplatesDict, FormatObject, ) -from openpype.lib.log import PypeLogger +from openpype.lib.log import Logger -log = PypeLogger.get_logger(__name__) +log = Logger.get_logger(__name__) class ProjectNotSet(Exception): diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 66bf5e9bb4..71fa7fb6c0 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -5,7 +5,6 @@ import sys import json import time -from openpype.lib import PypeLogger from openpype.api import get_app_environments_for_context from openpype.lib.plugin_tools import get_batch_asset_task_info from openpype.lib.remote_publish import ( @@ -27,10 +26,11 @@ class PypeCommands: """ @staticmethod def launch_tray(): - PypeLogger.set_process_name("Tray") - + from openpype.lib import Logger from openpype.tools import tray + Logger.set_process_name("Tray") + tray.main() @staticmethod @@ -47,10 +47,12 @@ class PypeCommands: @staticmethod def add_modules(click_func): """Modules/Addons can add their cli commands dynamically.""" + + from openpype.lib import Logger from openpype.modules import ModulesManager manager = ModulesManager() - log = PypeLogger.get_logger("AddModulesCLI") + log = Logger.get_logger("CLI-AddModules") for module in manager.modules: try: module.cli(click_func) @@ -96,10 +98,10 @@ class PypeCommands: Raises: RuntimeError: When there is no path to process. """ + + from openpype.lib import Logger from openpype.modules import ModulesManager from openpype.pipeline import install_openpype_plugins - - from openpype.api import Logger from openpype.tools.utils.host_tools import show_publish from openpype.tools.utils.lib import qt_app_context @@ -107,7 +109,7 @@ class PypeCommands: import pyblish.api import pyblish.util - log = Logger.get_logger() + log = Logger.get_logger("CLI-publish") install_openpype_plugins() @@ -195,11 +197,12 @@ class PypeCommands: targets (list): Pyblish targets (to choose validator for example) """ + import pyblish.api - from openpype.api import Logger from openpype.lib import ApplicationManager - log = Logger.get_logger() + from openpype.lib import Logger + log = Logger.get_logger("CLI-remotepublishfromapp") log.info("remotepublishphotoshop command") @@ -311,10 +314,11 @@ class PypeCommands: import pyblish.api import pyblish.util + from openpype.lib import Logger from openpype.pipeline import install_host from openpype.hosts.webpublisher import api as webpublisher - log = PypeLogger.get_logger() + log = Logger.get_logger("remotepublish") log.info("remotepublish command") diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 741f13c49b..f28fefdf5a 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -15,7 +15,7 @@ from .exceptions import ( EntitySchemaError ) -from openpype.lib import PypeLogger +from openpype.lib import Logger @six.add_metaclass(ABCMeta) @@ -478,7 +478,7 @@ class BaseItemEntity(BaseEntity): def log(self): """Auto created logger for debugging or warnings.""" if self._log is None: - self._log = PypeLogger.get_logger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log @abstractproperty From 3cdfc5b350dd1ccc3e940967f13cdda42c987739 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 15:54:07 +0200 Subject: [PATCH 0631/2550] use class log attribues instead of global loggers --- openpype/modules/sync_server/sync_server.py | 61 ++++++++-------- .../modules/sync_server/sync_server_module.py | 69 +++++++++++-------- 2 files changed, 73 insertions(+), 57 deletions(-) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index d01ab1d3a0..8b11055e65 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -10,8 +10,6 @@ from openpype.lib import Logger from .utils import SyncStatus, ResumableError -log = Logger.get_logger("SyncServer") - async def upload(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): @@ -237,6 +235,7 @@ class SyncServerThread(threading.Thread): Stopped when tray is closed. """ def __init__(self, module): + self.log = Logger.get_logger(self.__class__.__name__) super(SyncServerThread, self).__init__() self.module = module self.loop = None @@ -248,17 +247,17 @@ class SyncServerThread(threading.Thread): self.is_running = True try: - log.info("Starting Sync Server") + self.log.info("Starting Sync Server") self.loop = asyncio.new_event_loop() # create new loop for thread asyncio.set_event_loop(self.loop) self.loop.set_default_executor(self.executor) asyncio.ensure_future(self.check_shutdown(), loop=self.loop) asyncio.ensure_future(self.sync_loop(), loop=self.loop) - log.info("Sync Server Started") + self.log.info("Sync Server Started") self.loop.run_forever() except Exception: - log.warning( + self.log.warning( "Sync Server service has failed", exc_info=True ) finally: @@ -378,8 +377,9 @@ class SyncServerThread(threading.Thread): )) processed_file_path.add(file_path) - log.debug("Sync tasks count {}". - format(len(task_files_to_process))) + self.log.debug("Sync tasks count {}".format( + len(task_files_to_process) + )) files_created = await asyncio.gather( *task_files_to_process, return_exceptions=True) @@ -398,28 +398,31 @@ class SyncServerThread(threading.Thread): error) duration = time.time() - start_time - log.debug("One loop took {:.2f}s".format(duration)) + self.log.debug("One loop took {:.2f}s".format(duration)) delay = self.module.get_loop_delay(project_name) - log.debug("Waiting for {} seconds to new loop".format(delay)) + self.log.debug( + "Waiting for {} seconds to new loop".format(delay) + ) self.timer = asyncio.create_task(self.run_timer(delay)) await asyncio.gather(self.timer) except ConnectionResetError: - log.warning("ConnectionResetError in sync loop, " - "trying next loop", - exc_info=True) + self.log.warning( + "ConnectionResetError in sync loop, trying next loop", + exc_info=True) except CancelledError: # just stopping server pass except ResumableError: - log.warning("ResumableError in sync loop, " - "trying next loop", - exc_info=True) + self.log.warning( + "ResumableError in sync loop, trying next loop", + exc_info=True) except Exception: self.stop() - log.warning("Unhandled except. in sync loop, stopping server", - exc_info=True) + self.log.warning( + "Unhandled except. in sync loop, stopping server", + exc_info=True) def stop(self): """Sets is_running flag to false, 'check_shutdown' shuts server down""" @@ -432,16 +435,17 @@ class SyncServerThread(threading.Thread): while self.is_running: if self.module.long_running_tasks: task = self.module.long_running_tasks.pop() - log.info("starting long running") + self.log.info("starting long running") await self.loop.run_in_executor(None, task["func"]) - log.info("finished long running") + self.log.info("finished long running") self.module.projects_processed.remove(task["project_name"]) await asyncio.sleep(0.5) tasks = [task for task in asyncio.all_tasks() if task is not asyncio.current_task()] list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks results = await asyncio.gather(*tasks, return_exceptions=True) - log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + self.log.debug( + f'Finished awaiting cancelled tasks, results: {results}...') await self.loop.shutdown_asyncgens() # to really make sure everything else has time to stop self.executor.shutdown(wait=True) @@ -454,29 +458,32 @@ class SyncServerThread(threading.Thread): def reset_timer(self): """Called when waiting for next loop should be skipped""" - log.debug("Resetting timer") + self.log.debug("Resetting timer") if self.timer: self.timer.cancel() self.timer = None def _working_sites(self, project_name): if self.module.is_project_paused(project_name): - log.debug("Both sites same, skipping") + self.log.debug("Both sites same, skipping") return None, None local_site = self.module.get_active_site(project_name) remote_site = self.module.get_remote_site(project_name) if local_site == remote_site: - log.debug("{}-{} sites same, skipping".format(local_site, - remote_site)) + self.log.debug("{}-{} sites same, skipping".format( + local_site, remote_site)) return None, None configured_sites = _get_configured_sites(self.module, project_name) if not all([local_site in configured_sites, remote_site in configured_sites]): - log.debug("Some of the sites {} - {} is not ".format(local_site, - remote_site) + - "working properly") + self.log.debug( + "Some of the sites {} - {} is not working properly".format( + local_site, remote_site + ) + ) + return None, None return local_site, remote_site diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index c72b310907..3ef680c5a6 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -462,7 +462,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ - log.info("Pausing SyncServer for {}".format(representation_id)) + self.log.info("Pausing SyncServer for {}".format(representation_id)) self._paused_representations.add(representation_id) self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=True) @@ -479,7 +479,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ - log.info("Unpausing SyncServer for {}".format(representation_id)) + self.log.info("Unpausing SyncServer for {}".format(representation_id)) try: self._paused_representations.remove(representation_id) except KeyError: @@ -518,7 +518,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Args: project_name (string): project_name name """ - log.info("Pausing SyncServer for {}".format(project_name)) + self.log.info("Pausing SyncServer for {}".format(project_name)) self._paused_projects.add(project_name) def unpause_project(self, project_name): @@ -530,7 +530,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Args: project_name (string): """ - log.info("Unpausing SyncServer for {}".format(project_name)) + self.log.info("Unpausing SyncServer for {}".format(project_name)) try: self._paused_projects.remove(project_name) except KeyError: @@ -558,14 +558,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): It won't check anything, not uploading/downloading... """ - log.info("Pausing SyncServer") + self.log.info("Pausing SyncServer") self._paused = True def unpause_server(self): """ Unpause server """ - log.info("Unpausing SyncServer") + self.log.info("Unpausing SyncServer") self._paused = False def is_paused(self): @@ -876,7 +876,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): # val = val[platform.system().lower()] # except KeyError: # st = "{}'s field value {} should be".format(key, val) # noqa: E501 - # log.error(st + " multiplatform dict") + # self.log.error(st + " multiplatform dict") # # item["namespace"] = item["namespace"].replace('{site}', # site_name) @@ -1148,7 +1148,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if self.enabled: self.sync_server_thread.start() else: - log.info("No presets or active providers. " + + self.log.info("No presets or active providers. " + "Synchronization not possible.") def tray_exit(self): @@ -1166,12 +1166,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if not self.is_running: return try: - log.info("Stopping sync server server") + self.log.info("Stopping sync server server") self.sync_server_thread.is_running = False self.sync_server_thread.stop() - log.info("Sync server stopped") + self.log.info("Sync server stopped") except Exception: - log.warning( + self.log.warning( "Error has happened during Killing sync server", exc_info=True ) @@ -1256,7 +1256,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): sync_project_settings[project_name] = proj_settings if not sync_project_settings: - log.info("No enabled and configured projects for sync.") + self.log.info("No enabled and configured projects for sync.") return sync_project_settings def get_sync_project_setting(self, project_name, exclude_locals=False, @@ -1387,7 +1387,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: (list) of dictionaries """ - log.debug("Check representations for : {}".format(project_name)) + self.log.debug("Check representations for : {}".format(project_name)) self.connection.Session["AVALON_PROJECT"] = project_name # retry_cnt - number of attempts to sync specific file before giving up retries_arr = self._get_retries_arr(project_name) @@ -1466,9 +1466,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): }}, {"$sort": {'priority': -1, '_id': 1}}, ] - log.debug("active_site:{} - remote_site:{}".format(active_site, - remote_site)) - log.debug("query: {}".format(aggr)) + self.log.debug("active_site:{} - remote_site:{}".format( + active_site, remote_site + )) + self.log.debug("query: {}".format(aggr)) representations = self.connection.aggregate(aggr) return representations @@ -1503,7 +1504,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if get_local_site_id() not in (local_site, remote_site): # don't do upload/download for studio sites - log.debug("No local site {} - {}".format(local_site, remote_site)) + self.log.debug("No local site {} - {}".format(local_site, remote_site)) return SyncStatus.DO_NOTHING _, remote_rec = self._get_site_rec(sites, remote_site) or {} @@ -1594,11 +1595,16 @@ class SyncServerModule(OpenPypeModule, ITrayModule): error_str = '' source_file = file.get("path", "") - log.debug("File for {} - {source_file} process {status} {error_str}". - format(representation_id, - status=status, - source_file=source_file, - error_str=error_str)) + self.log.debug( + ( + "File for {} - {source_file} process {status} {error_str}" + ).format( + representation_id, + status=status, + source_file=source_file, + error_str=error_str + ) + ) def _get_file_info(self, files, _id): """ @@ -1772,7 +1778,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): break if not found: msg = "Site {} not found".format(site_name) - log.info(msg) + self.log.info(msg) raise ValueError(msg) update = { @@ -1799,7 +1805,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): break if not found: msg = "Site {} not found".format(site_name) - log.info(msg) + self.log.info(msg) raise ValueError(msg) if pause: @@ -1834,7 +1840,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): reset_existing = False files = representation.get("files", []) if not files: - log.debug("No files for {}".format(representation_id)) + self.log.debug("No files for {}".format(representation_id)) return for repre_file in files: @@ -1851,7 +1857,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): reset_existing = True else: msg = "Site {} already present".format(site_name) - log.info(msg) + self.log.info(msg) raise SiteAlreadyPresentError(msg) if reset_existing: @@ -1951,16 +1957,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.widget = SyncServerWindow(self) no_errors = True except ValueError: - log.info("No system setting for sync. Not syncing.", exc_info=True) + self.log.info( + "No system setting for sync. Not syncing.", exc_info=True + ) except KeyError: - log.info(( + self.log.info(( "There are not set presets for SyncServer OR " "Credentials provided are invalid, " "no syncing possible"). format(str(self.sync_project_settings)), exc_info=True) except: - log.error("Uncaught exception durin start of SyncServer", - exc_info=True) + self.log.error( + "Uncaught exception durin start of SyncServer", + exc_info=True) self.enabled = no_errors self.widget.show() From c352ae5bcc713405794a3234ca38065624b9c119 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 15:57:52 +0200 Subject: [PATCH 0632/2550] add deprecation warning for PypeLogger and added docstring about deprecation --- openpype/lib/log.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index e77edea0e9..26dcd86eec 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -486,12 +486,18 @@ class Logger: class PypeLogger(Logger): + """Duplicate of 'Logger'. + + Deprecated: + Class will be removed after release version 3.16.* + """ + @classmethod def get_logger(cls, *args, **kwargs): logger = Logger.get_logger(*args, **kwargs) # TODO uncomment when replaced most of places - # logger.warning(( - # "'openpype.lib.PypeLogger' is deprecated class." - # " Please use 'openpype.lib.Logger' instead." - # )) + logger.warning(( + "'openpype.lib.PypeLogger' is deprecated class." + " Please use 'openpype.lib.Logger' instead." + )) return logger From 437ead97762c861172f19901d0d83d8ea11b7b2b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 24 Aug 2022 15:54:06 +0200 Subject: [PATCH 0633/2550] OP-3214 - introduced update state enum --- distribution/addon_distribution.py | 33 ++++++++++++++++++++++-------- 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index f5af0f77ed..4ca3f5687a 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -15,6 +15,11 @@ class UrlType(Enum): FILESYSTEM = "filesystem" +class UpdateState(Enum): + EXISTS = "exists" + UPDATED = "updated" + FAILED = "failed" + @attr.s class MultiPlatformPath(object): windows = attr.ib(default=None) @@ -171,7 +176,8 @@ def update_addon_state(addon_infos, destination_folder, factory, addon type log (logging.Logger) Returns: - (dict): {"addon_full_name":"exists"|"updated"|"failed" + (dict): {"addon_full_name": UpdateState.value + (eg. "exists"|"updated"|"failed") """ if not log: log = logging.getLogger(__name__) @@ -183,17 +189,17 @@ def update_addon_state(addon_infos, destination_folder, factory, if os.path.isdir(addon_dest): log.debug(f"Addon version folder {addon_dest} already exists.") - download_states[full_name] = "exists" + download_states[full_name] = UpdateState.EXISTS.value continue for source in addon.sources: - download_states[full_name] = "failed" + download_states[full_name] = UpdateState.FAILED.value try: downloader = factory.get_downloader(source["type"]) zip_file_path = downloader.download(source, addon_dest) downloader.check_hash(zip_file_path, addon.hash) downloader.unzip(zip_file_path, addon_dest) - download_states[full_name] = "updated" + download_states[full_name] = UpdateState.UPDATED.value break except Exception: log.warning(f"Error happened during updating {addon.name}", @@ -203,11 +209,22 @@ def update_addon_state(addon_infos, destination_folder, factory, def check_addons(server_endpoint, addon_folder, downloaders): - """Main entry point to compare existing addons with those on server.""" + """Main entry point to compare existing addons with those on server. + + Args: + server_endpoint (str): url to v4 server endpoint + addon_folder (str): local dir path for addons + downloaders (AddonDownloader): factory of downloaders + + Raises: + (RuntimeError) if any addon failed update + """ addons_info = get_addons_info(server_endpoint) - update_addon_state(addons_info, - addon_folder, - downloaders) + result = update_addon_state(addons_info, + addon_folder, + downloaders) + if UpdateState.FAILED.value in result.values(): + raise RuntimeError(f"Unable to update some addons {result}") def cli(args): From 0d495a36834d0e5aec062841c3f3820f68900c0a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 24 Aug 2022 16:01:37 +0200 Subject: [PATCH 0634/2550] OP-3214 - added unit tests --- distribution/__init__.py | 0 .../tests/test_addon_distributtion.py | 121 ++++++++++++++++++ 2 files changed, 121 insertions(+) create mode 100644 distribution/__init__.py create mode 100644 distribution/tests/test_addon_distributtion.py diff --git a/distribution/__init__.py b/distribution/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/distribution/tests/test_addon_distributtion.py b/distribution/tests/test_addon_distributtion.py new file mode 100644 index 0000000000..2e81bc4ef9 --- /dev/null +++ b/distribution/tests/test_addon_distributtion.py @@ -0,0 +1,121 @@ +import pytest +import attr +import tempfile + +from distribution.addon_distribution import ( + AddonDownloader, + UrlType, + OSAddonDownloader, + HTTPAddonDownloader, + AddonInfo, + update_addon_state, + UpdateState +) + + +@pytest.fixture +def addon_downloader(): + addon_downloader = AddonDownloader() + addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader) + addon_downloader.register_format(UrlType.HTTP, HTTPAddonDownloader) + + yield addon_downloader + + +@pytest.fixture +def http_downloader(addon_downloader): + yield addon_downloader.get_downloader(UrlType.HTTP.value) + + +@pytest.fixture +def temp_folder(): + yield tempfile.mkdtemp() + + +@pytest.fixture +def sample_addon_info(): + addon_info = { + "name": "openpype_slack", + "version": "1.0.0", + "sources": [ + { + "type": "http", + "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" + }, + { + "type": "filesystem", + "path": { + "windows": ["P:/sources/some_file.zip", "W:/sources/some_file.zip"], + "linux": ["/mnt/srv/sources/some_file.zip"], + "darwin": ["/Volumes/srv/sources/some_file.zip"] + } + } + ], + "hash": "4f6b8568eb9dd6f510fd7c4dcb676788" + } + yield addon_info + + +def test_register(printer): + addon_downloader = AddonDownloader() + + assert len(addon_downloader._downloaders) == 0, "Contains registered" + + addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader) + assert len(addon_downloader._downloaders) == 1, "Should contain one" + + +def test_get_downloader(printer, addon_downloader): + assert addon_downloader.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa + + with pytest.raises(ValueError): + addon_downloader.get_downloader("unknown"), "Shouldn't find" + + +def test_addon_info(printer, sample_addon_info): + valid_minimum = {"name": "openpype_slack", "version": "1.0.0"} + + assert AddonInfo(**valid_minimum), "Missing required fields" + assert AddonInfo(name=valid_minimum["name"], + version=valid_minimum["version"]), \ + "Missing required fields" + + with pytest.raises(TypeError): + # TODO should be probably implemented + assert AddonInfo(valid_minimum), "Wrong argument format" + + addon = AddonInfo(**sample_addon_info) + assert addon, "Should be created" + assert addon.name == "openpype_slack", "Incorrect name" + assert addon.version == "1.0.0", "Incorrect version" + + with pytest.raises(TypeError): + assert addon["name"], "Dict approach not implemented" + + addon_as_dict = attr.asdict(addon) + assert addon_as_dict["name"], "Dict approach should work" + + with pytest.raises(AttributeError): + # TODO should be probably implemented as . not dict + first_source = addon.sources[0] + assert first_source.type == "http", "Not implemented" + + +def test_update_addon_state(printer, sample_addon_info, + temp_folder, addon_downloader): + addon_info = AddonInfo(**sample_addon_info) + orig_hash = addon_info.hash + + addon_info.hash = "brokenhash" + result = update_addon_state([addon_info], temp_folder, addon_downloader) + assert (result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, + "Hashes not matching") + + addon_info.hash = orig_hash + result = update_addon_state([addon_info], temp_folder, addon_downloader) + assert (result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, + "Failed updating") + + result = update_addon_state([addon_info], temp_folder, addon_downloader) + assert (result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, + "Tried to update") From be5dbd6512362c58abb5ec9415414903e8badb20 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 24 Aug 2022 16:03:57 +0200 Subject: [PATCH 0635/2550] OP-3214 - Hound --- distribution/addon_distribution.py | 1 + distribution/tests/test_addon_distributtion.py | 16 ++++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 4ca3f5687a..95d0b5e397 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -20,6 +20,7 @@ class UpdateState(Enum): UPDATED = "updated" FAILED = "failed" + @attr.s class MultiPlatformPath(object): windows = attr.ib(default=None) diff --git a/distribution/tests/test_addon_distributtion.py b/distribution/tests/test_addon_distributtion.py index 2e81bc4ef9..e67ca3c479 100644 --- a/distribution/tests/test_addon_distributtion.py +++ b/distribution/tests/test_addon_distributtion.py @@ -40,12 +40,12 @@ def sample_addon_info(): "sources": [ { "type": "http", - "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" + "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa }, { "type": "filesystem", "path": { - "windows": ["P:/sources/some_file.zip", "W:/sources/some_file.zip"], + "windows": ["P:/sources/some_file.zip", "W:/sources/some_file.zip"], # noqa "linux": ["/mnt/srv/sources/some_file.zip"], "darwin": ["/Volumes/srv/sources/some_file.zip"] } @@ -108,14 +108,14 @@ def test_update_addon_state(printer, sample_addon_info, addon_info.hash = "brokenhash" result = update_addon_state([addon_info], temp_folder, addon_downloader) - assert (result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, - "Hashes not matching") + assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \ + "Hashes not matching" addon_info.hash = orig_hash result = update_addon_state([addon_info], temp_folder, addon_downloader) - assert (result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, - "Failed updating") + assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \ + "Failed updating" result = update_addon_state([addon_info], temp_folder, addon_downloader) - assert (result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, - "Tried to update") + assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \ + "Tried to update" From 08efc477caa31e3ee064ce755ff7336322a9bc2b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 16:21:04 +0200 Subject: [PATCH 0636/2550] small tweaks in usage of Logger --- openpype/hosts/celaction/api/cli.py | 2 +- openpype/hosts/fusion/api/pipeline.py | 4 ++-- .../fusion/utility_scripts/__OpenPype_Menu__.py | 5 ++--- openpype/hosts/hiero/api/events.py | 5 ++--- openpype/hosts/hiero/api/lib.py | 4 ++-- openpype/hosts/hiero/api/pipeline.py | 4 ++-- openpype/hosts/hiero/api/plugin.py | 3 ++- openpype/hosts/nuke/plugins/load/actions.py | 4 ++-- openpype/hosts/nuke/startup/clear_rendered.py | 5 +++-- openpype/hosts/nuke/startup/write_to_read.py | 4 ++-- .../modules/ftrack/ftrack_server/socket_thread.py | 6 +++--- openpype/modules/ftrack/lib/ftrack_base_handler.py | 4 ++-- .../modules/ftrack/scripts/sub_event_processor.py | 11 +++++------ .../modules/ftrack/scripts/sub_legacy_server.py | 4 ++-- openpype/modules/ftrack/scripts/sub_user_server.py | 5 ++--- openpype/modules/ftrack/tray/ftrack_tray.py | 5 +++-- .../sync_server/providers/abstract_provider.py | 4 ++-- openpype/modules/sync_server/utils.py | 6 ++++-- openpype/modules/timers_manager/rest_api.py | 14 +++++++++----- 19 files changed, 52 insertions(+), 47 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 8c7b3a2e74..eb91def090 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -14,7 +14,7 @@ from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins -log = Logger().get_logger("Celaction_cli_publisher") +log = Logger.get_logger("Celaction_cli_publisher") publish_host = "celaction" diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 54002f9f51..54a6c94b60 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -8,7 +8,7 @@ import contextlib import pyblish.api -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -20,7 +20,7 @@ from openpype.pipeline import ( ) import openpype.hosts.fusion -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") diff --git a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py index de8fc4b3b4..870e74280a 100644 --- a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py +++ b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py @@ -1,14 +1,12 @@ import os import sys -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( install_host, registered_host, ) -log = Logger().get_logger(__name__) - def main(env): from openpype.hosts.fusion import api @@ -17,6 +15,7 @@ def main(env): # activate resolve from pype install_host(api) + log = Logger.get_logger(__name__) log.info(f"Registered host: {registered_host()}") menu.launch_openpype_menu() diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py index 59fd278a81..862a2607c1 100644 --- a/openpype/hosts/hiero/api/events.py +++ b/openpype/hosts/hiero/api/events.py @@ -1,7 +1,6 @@ import os import hiero.core.events -from openpype.api import Logger -from openpype.lib import register_event_callback +from openpype.lib import Logger, register_event_callback from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, @@ -11,7 +10,7 @@ from .lib import ( from .tags import add_tags_to_workfile from .menu import update_menu_task_label -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def startupCompleted(event): diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 2f66f3ddd7..e288cea2b1 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -21,7 +21,7 @@ from openpype.client import ( ) from openpype.settings import get_anatomy_settings from openpype.pipeline import legacy_io, Anatomy -from openpype.api import Logger +from openpype.lib import Logger from . import tags try: @@ -34,7 +34,7 @@ except ImportError: # from opentimelineio import opentime # from pprint import pformat -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) self = sys.modules[__name__] self._has_been_setup = False diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index b243a38b06..dacfd338bb 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -6,7 +6,7 @@ import contextlib from collections import OrderedDict from pyblish import api as pyblish -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( schema, register_creator_plugin_path, @@ -18,7 +18,7 @@ from openpype.pipeline import ( from openpype.tools.utils import host_tools from . import lib, menu, events -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) # plugin paths API_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index 28a9dfb492..77fedbbbdc 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -9,11 +9,12 @@ from Qt import QtWidgets, QtCore import qargparse import openpype.api as openpype +from openpype.lib import Logger from openpype.pipeline import LoaderPlugin, LegacyCreator from openpype.pipeline.context_tools import get_current_project_asset from . import lib -log = openpype.Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def load_stylesheet(): diff --git a/openpype/hosts/nuke/plugins/load/actions.py b/openpype/hosts/nuke/plugins/load/actions.py index d364a4f3a1..69f56c7305 100644 --- a/openpype/hosts/nuke/plugins/load/actions.py +++ b/openpype/hosts/nuke/plugins/load/actions.py @@ -2,10 +2,10 @@ """ -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import load -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) class SetFrameRangeLoader(load.LoaderPlugin): diff --git a/openpype/hosts/nuke/startup/clear_rendered.py b/openpype/hosts/nuke/startup/clear_rendered.py index cf1d8ce170..744af71034 100644 --- a/openpype/hosts/nuke/startup/clear_rendered.py +++ b/openpype/hosts/nuke/startup/clear_rendered.py @@ -1,10 +1,11 @@ import os -from openpype.api import Logger -log = Logger().get_logger(__name__) +from openpype.lib import Logger def clear_rendered(dir_path): + log = Logger.get_logger(__name__) + for _f in os.listdir(dir_path): _f_path = os.path.join(dir_path, _f) log.info("Removing: `{}`".format(_f_path)) diff --git a/openpype/hosts/nuke/startup/write_to_read.py b/openpype/hosts/nuke/startup/write_to_read.py index f5cf66b357..b7add40f47 100644 --- a/openpype/hosts/nuke/startup/write_to_read.py +++ b/openpype/hosts/nuke/startup/write_to_read.py @@ -2,8 +2,8 @@ import re import os import glob import nuke -from openpype.api import Logger -log = Logger().get_logger(__name__) +from openpype.lib import Logger +log = Logger.get_logger(__name__) SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v', 'm2v'] diff --git a/openpype/modules/ftrack/ftrack_server/socket_thread.py b/openpype/modules/ftrack/ftrack_server/socket_thread.py index f49ca5557e..3ef55f8daa 100644 --- a/openpype/modules/ftrack/ftrack_server/socket_thread.py +++ b/openpype/modules/ftrack/ftrack_server/socket_thread.py @@ -5,8 +5,8 @@ import socket import threading import traceback import subprocess -from openpype.api import Logger -from openpype.lib import get_openpype_execute_args + +from openpype.lib import get_openpype_execute_args, Logger class SocketThread(threading.Thread): @@ -16,7 +16,7 @@ class SocketThread(threading.Thread): def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() - self.log = Logger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) self.setName(name) self.name = name self.port = port diff --git a/openpype/modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/ftrack/lib/ftrack_base_handler.py index c0fad6aadc..c0b03f8a41 100644 --- a/openpype/modules/ftrack/lib/ftrack_base_handler.py +++ b/openpype/modules/ftrack/lib/ftrack_base_handler.py @@ -6,7 +6,7 @@ import uuid import datetime import traceback import time -from openpype.api import Logger +from openpype.lib import Logger from openpype.settings import get_project_settings import ftrack_api @@ -52,7 +52,7 @@ class BaseHandler(object): def __init__(self, session): '''Expects a ftrack_api.Session instance''' - self.log = Logger().get_logger(self.__class__.__name__) + self.log = Logger.get_logger(self.__class__.__name__) if not( isinstance(session, ftrack_api.session.Session) or isinstance(session, ftrack_server.lib.SocketSession) diff --git a/openpype/modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/ftrack/scripts/sub_event_processor.py index d1e2e3aaeb..a5ce0511b8 100644 --- a/openpype/modules/ftrack/scripts/sub_event_processor.py +++ b/openpype/modules/ftrack/scripts/sub_event_processor.py @@ -4,6 +4,8 @@ import signal import socket import datetime +import ftrack_api + from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -12,17 +14,12 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype.modules import ModulesManager -from openpype.api import Logger from openpype.lib import ( + Logger, get_openpype_version, get_build_version ) - -import ftrack_api - -log = Logger().get_logger("Event processor") - subprocess_started = datetime.datetime.now() @@ -68,6 +65,8 @@ def register(session): def main(args): + log = Logger.get_logger("Event processor") + port = int(args[-1]) # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) diff --git a/openpype/modules/ftrack/scripts/sub_legacy_server.py b/openpype/modules/ftrack/scripts/sub_legacy_server.py index e3a623c376..1f0fc1b369 100644 --- a/openpype/modules/ftrack/scripts/sub_legacy_server.py +++ b/openpype/modules/ftrack/scripts/sub_legacy_server.py @@ -5,11 +5,11 @@ import signal import threading import ftrack_api -from openpype.api import Logger +from openpype.lib import Logger from openpype.modules import ModulesManager from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer -log = Logger().get_logger("Event Server Legacy") +log = Logger.get_logger("Event Server Legacy") class TimerChecker(threading.Thread): diff --git a/openpype/modules/ftrack/scripts/sub_user_server.py b/openpype/modules/ftrack/scripts/sub_user_server.py index a3701a0950..930a2d51e2 100644 --- a/openpype/modules/ftrack/scripts/sub_user_server.py +++ b/openpype/modules/ftrack/scripts/sub_user_server.py @@ -2,6 +2,7 @@ import sys import signal import socket +from openpype.lib import Logger from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, @@ -9,9 +10,7 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype.modules import ModulesManager -from openpype.api import Logger - -log = Logger().get_logger("FtrackUserServer") +log = Logger.get_logger("FtrackUserServer") def main(args): diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index 2919ae22fb..501d837a4c 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -12,10 +12,11 @@ from ..lib import credentials from ..ftrack_module import FTRACK_MODULE_DIR from . import login_dialog -from openpype.api import Logger, resources +from openpype import resources +from openpype.lib import Logger -log = Logger().get_logger("FtrackModule") +log = Logger.get_logger("FtrackModule") class FtrackTrayWrapper: diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 8c2fe1cad9..9c808dc80e 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -1,8 +1,8 @@ import abc import six -from openpype.api import Logger +from openpype.lib import Logger -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer") @six.add_metaclass(abc.ABCMeta) diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py index 03f362202f..4caa01e9d7 100644 --- a/openpype/modules/sync_server/utils.py +++ b/openpype/modules/sync_server/utils.py @@ -1,6 +1,8 @@ import time -from openpype.api import Logger -log = Logger().get_logger("SyncServer") + +from openpype.lib import Logger + +log = Logger.get_logger("SyncServer") class ResumableError(Exception): diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index f16cb316c3..9bde19aec9 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -1,9 +1,7 @@ import json from aiohttp.web_response import Response -from openpype.api import Logger - -log = Logger().get_logger("Event processor") +from openpype.lib import Logger class TimersManagerModuleRestApi: @@ -12,6 +10,7 @@ class TimersManagerModuleRestApi: happens in Workfile app. """ def __init__(self, user_module, server_manager): + self.log = None self.module = user_module self.server_manager = server_manager @@ -19,6 +18,11 @@ class TimersManagerModuleRestApi: self.register() + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__ckass__.__name__) + return self._log + def register(self): self.server_manager.add_route( "POST", @@ -47,7 +51,7 @@ class TimersManagerModuleRestApi: "Payload must contain fields 'project_name," " 'asset_name' and 'task_name'" ) - log.error(msg) + self.log.error(msg) return Response(status=400, message=msg) self.module.stop_timers() @@ -73,7 +77,7 @@ class TimersManagerModuleRestApi: "Payload must contain fields 'project_name, 'asset_name'," " 'task_name'" ) - log.warning(message) + self.log.warning(message) return Response(text=message, status=404) time = self.module.get_task_time(project_name, asset_name, task_name) From 840fbaa38086b110f9ad72584aa3e37fa1bf1178 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 16:21:17 +0200 Subject: [PATCH 0637/2550] cleanup imports in modules --- openpype/modules/base.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 7fc848af2d..8ac4e7ddac 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -13,7 +13,6 @@ from uuid import uuid4 from abc import ABCMeta, abstractmethod import six -import openpype from openpype.settings import ( get_system_settings, SYSTEM_SETTINGS_KEY, @@ -26,7 +25,12 @@ from openpype.settings.lib import ( get_studio_system_settings_overrides, load_json_file ) -from openpype.lib import Logger + +from openpype.lib import ( + Logger, + import_filepath, + import_module_from_dirpath +) # Files that will be always ignored on modules import IGNORED_FILENAMES = ( @@ -278,12 +282,6 @@ def load_modules(force=False): def _load_modules(): - # Import helper functions from lib - from openpype.lib import ( - import_filepath, - import_module_from_dirpath - ) - # Key under which will be modules imported in `sys.modules` modules_key = "openpype_modules" From bf8e2207e07dd18f1fc8e2d8026ec719886666ba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 16:58:38 +0200 Subject: [PATCH 0638/2550] fix property --- openpype/modules/timers_manager/rest_api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index 9bde19aec9..6686407350 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -18,6 +18,7 @@ class TimersManagerModuleRestApi: self.register() + @property def log(self): if self._log is None: self._log = Logger.get_logger(self.__ckass__.__name__) From 310e9bf50f59a3f39adf54d18047eb7a422c5843 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 17:19:01 +0200 Subject: [PATCH 0639/2550] fix line lengths --- openpype/modules/sync_server/sync_server_module.py | 4 +++- openpype/modules/webserver/server.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 3ef680c5a6..634b68c55f 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1504,7 +1504,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if get_local_site_id() not in (local_site, remote_site): # don't do upload/download for studio sites - self.log.debug("No local site {} - {}".format(local_site, remote_site)) + self.log.debug( + "No local site {} - {}".format(local_site, remote_site) + ) return SyncStatus.DO_NOTHING _, remote_rec = self._get_site_rec(sites, remote_site) or {} diff --git a/openpype/modules/webserver/server.py b/openpype/modules/webserver/server.py index 44b14acbb6..120925a362 100644 --- a/openpype/modules/webserver/server.py +++ b/openpype/modules/webserver/server.py @@ -171,7 +171,9 @@ class WebServerThread(threading.Thread): ] list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks results = await asyncio.gather(*tasks, return_exceptions=True) - self.log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + self.log.debug( + f'Finished awaiting cancelled tasks, results: {results}...' + ) await self.loop.shutdown_asyncgens() # to really make sure everything else has time to stop await asyncio.sleep(0.07) From 8539c03d72c246c125334f059522b14073cd6ed8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 17:53:21 +0200 Subject: [PATCH 0640/2550] remove getattrs on instance and context --- openpype/tools/publisher/widgets/publish_widget.py | 2 -- openpype/tools/pyblish_pype/model.py | 5 +---- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/tools/publisher/widgets/publish_widget.py b/openpype/tools/publisher/widgets/publish_widget.py index 80d0265dd3..b32b5381d1 100644 --- a/openpype/tools/publisher/widgets/publish_widget.py +++ b/openpype/tools/publisher/widgets/publish_widget.py @@ -335,14 +335,12 @@ class PublishFrame(QtWidgets.QFrame): if instance is None: new_name = ( context.data.get("label") - or getattr(context, "label", None) or context.data.get("name") or "Context" ) else: new_name = ( instance.data.get("label") - or getattr(instance, "label", None) or instance.data["name"] ) diff --git a/openpype/tools/pyblish_pype/model.py b/openpype/tools/pyblish_pype/model.py index 309126a884..1479d91bb5 100644 --- a/openpype/tools/pyblish_pype/model.py +++ b/openpype/tools/pyblish_pype/model.py @@ -613,10 +613,7 @@ class InstanceItem(QtGui.QStandardItem): if role == QtCore.Qt.DisplayRole: label = None if settings.UseLabel: - label = ( - self.instance.data.get("label") - or getattr(self.instance, "label", None) - ) + label = self.instance.data.get("label") if not label: if self.is_context: From a42f7278c2ec1123b2f19b103933182f2e3cd92d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 17:59:46 +0200 Subject: [PATCH 0641/2550] define resolve as module --- openpype/hosts/resolve/__init__.py | 6 ++++++ openpype/hosts/resolve/module.py | 24 ++++++++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 openpype/hosts/resolve/module.py diff --git a/openpype/hosts/resolve/__init__.py b/openpype/hosts/resolve/__init__.py index e69de29bb2..a02e07794d 100644 --- a/openpype/hosts/resolve/__init__.py +++ b/openpype/hosts/resolve/__init__.py @@ -0,0 +1,6 @@ +from .module import ResolveModule + + +__all__ = ( + "ResolveModule", +) diff --git a/openpype/hosts/resolve/module.py b/openpype/hosts/resolve/module.py new file mode 100644 index 0000000000..8f2824df75 --- /dev/null +++ b/openpype/hosts/resolve/module.py @@ -0,0 +1,24 @@ +import os + +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +from .utils import RESOLVE_ROOT_DIR + + +class ResolveModule(OpenPypeModule, IHostModule): + name = "resolve" + host_name = "resolve" + + def initialize(self, module_settings): + self.enabled = True + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(RESOLVE_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".drp"] From 6a1979b6b2b852be227ff0b254cc27797ba8b3f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 18:09:28 +0200 Subject: [PATCH 0642/2550] created aftereffects module --- openpype/hosts/aftereffects/__init__.py | 15 ++++++--------- openpype/hosts/aftereffects/module.py | 24 ++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 9 deletions(-) create mode 100644 openpype/hosts/aftereffects/module.py diff --git a/openpype/hosts/aftereffects/__init__.py b/openpype/hosts/aftereffects/__init__.py index deae48d122..c9ad6aaeeb 100644 --- a/openpype/hosts/aftereffects/__init__.py +++ b/openpype/hosts/aftereffects/__init__.py @@ -1,9 +1,6 @@ -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True", - "WEBSOCKET_URL": "ws://localhost:8097/ws/" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +from .module import AfterEffectsModule + + +__all__ = ( + "AfterEffectsModule", +) diff --git a/openpype/hosts/aftereffects/module.py b/openpype/hosts/aftereffects/module.py new file mode 100644 index 0000000000..33e42b451b --- /dev/null +++ b/openpype/hosts/aftereffects/module.py @@ -0,0 +1,24 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + + +class AfterEffectsModule(OpenPypeModule, IHostModule): + name = "aftereffects" + host_name = "aftereffects" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True", + "WEBSOCKET_URL": "ws://localhost:8097/ws/" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".aep"] From 7e1ba966ce2188e5a492255bae7f9216eaac8833 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 18:09:38 +0200 Subject: [PATCH 0643/2550] workio is not using HOST_WORKFILE_EXTENSIONS --- openpype/hosts/aftereffects/api/workio.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/api/workio.py b/openpype/hosts/aftereffects/api/workio.py index d6c732285a..18b40af5dc 100644 --- a/openpype/hosts/aftereffects/api/workio.py +++ b/openpype/hosts/aftereffects/api/workio.py @@ -1,12 +1,11 @@ """Host API required Work Files tool""" import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from .launch_logic import get_stub def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["aftereffects"] + return [".aep"] def has_unsaved_changes(): From 18f22f4d0fa522c50bba4797471ebd5ab4446e43 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 18:12:49 +0200 Subject: [PATCH 0644/2550] removed unused import --- openpype/hosts/aftereffects/module.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/aftereffects/module.py b/openpype/hosts/aftereffects/module.py index 33e42b451b..93d575c186 100644 --- a/openpype/hosts/aftereffects/module.py +++ b/openpype/hosts/aftereffects/module.py @@ -1,4 +1,3 @@ -import os from openpype.modules import OpenPypeModule from openpype.modules.interfaces import IHostModule From 6282719d9a7d0a773ebb075a801d496083b908be Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 18:16:25 +0200 Subject: [PATCH 0645/2550] added blender module --- openpype/hosts/blender/__init__.py | 54 ++-------------------- openpype/hosts/blender/module.py | 73 ++++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 50 deletions(-) create mode 100644 openpype/hosts/blender/module.py diff --git a/openpype/hosts/blender/__init__.py b/openpype/hosts/blender/__init__.py index 0f27882c7e..58d7ac656f 100644 --- a/openpype/hosts/blender/__init__.py +++ b/openpype/hosts/blender/__init__.py @@ -1,52 +1,6 @@ -import os +from .module import BlenderModule -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - # Prepare path to implementation script - implementation_user_script_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "blender_addon" - ) - - # Add blender implementation script path to PYTHONPATH - python_path = env.get("PYTHONPATH") or "" - python_path_parts = [ - path - for path in python_path.split(os.pathsep) - if path - ] - python_path_parts.insert(0, implementation_user_script_path) - env["PYTHONPATH"] = os.pathsep.join(python_path_parts) - - # Modify Blender user scripts path - previous_user_scripts = set() - # Implementation path is added to set for easier paths check inside loops - # - will be removed at the end - previous_user_scripts.add(implementation_user_script_path) - - openpype_blender_user_scripts = ( - env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or "" - ) - for path in openpype_blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or "" - for path in blender_user_scripts.split(os.pathsep): - if path: - previous_user_scripts.add(os.path.normpath(path)) - - # Remove implementation path from user script paths as is set to - # `BLENDER_USER_SCRIPTS` - previous_user_scripts.remove(implementation_user_script_path) - env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path - - # Set custom user scripts env - env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join( - previous_user_scripts - ) - - # Define Qt binding if not defined - if not env.get("QT_PREFERRED_BINDING"): - env["QT_PREFERRED_BINDING"] = "PySide2" +__all__ = ( + "BlenderModule", +) diff --git a/openpype/hosts/blender/module.py b/openpype/hosts/blender/module.py new file mode 100644 index 0000000000..73865184c0 --- /dev/null +++ b/openpype/hosts/blender/module.py @@ -0,0 +1,73 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class BlenderModule(OpenPypeModule, IHostModule): + name = "nuke" + host_name = "nuke" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + # Prepare path to implementation script + implementation_user_script_path = os.path.join( + BLENDER_ROOT_DIR, + "blender_addon" + ) + + # Add blender implementation script path to PYTHONPATH + python_path = env.get("PYTHONPATH") or "" + python_path_parts = [ + path + for path in python_path.split(os.pathsep) + if path + ] + python_path_parts.insert(0, implementation_user_script_path) + env["PYTHONPATH"] = os.pathsep.join(python_path_parts) + + # Modify Blender user scripts path + previous_user_scripts = set() + # Implementation path is added to set for easier paths check inside loops + # - will be removed at the end + previous_user_scripts.add(implementation_user_script_path) + + openpype_blender_user_scripts = ( + env.get("OPENPYPE_BLENDER_USER_SCRIPTS") or "" + ) + for path in openpype_blender_user_scripts.split(os.pathsep): + if path: + previous_user_scripts.add(os.path.normpath(path)) + + blender_user_scripts = env.get("BLENDER_USER_SCRIPTS") or "" + for path in blender_user_scripts.split(os.pathsep): + if path: + previous_user_scripts.add(os.path.normpath(path)) + + # Remove implementation path from user script paths as is set to + # `BLENDER_USER_SCRIPTS` + previous_user_scripts.remove(implementation_user_script_path) + env["BLENDER_USER_SCRIPTS"] = implementation_user_script_path + + # Set custom user scripts env + env["OPENPYPE_BLENDER_USER_SCRIPTS"] = os.pathsep.join( + previous_user_scripts + ) + + # Define Qt binding if not defined + if not env.get("QT_PREFERRED_BINDING"): + env["QT_PREFERRED_BINDING"] = "PySide2" + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(BLENDER_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".blend"] From f36b8f49a2202b10e5f17fc4ecb26cfa1ab6e428 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 18:16:39 +0200 Subject: [PATCH 0646/2550] don't use HOST_WORKFILE_EXTENSIONS in blender workio --- openpype/hosts/blender/api/workio.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/blender/api/workio.py b/openpype/hosts/blender/api/workio.py index 5eb9f82999..a8f6193abc 100644 --- a/openpype/hosts/blender/api/workio.py +++ b/openpype/hosts/blender/api/workio.py @@ -5,8 +5,6 @@ from typing import List, Optional import bpy -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - class OpenFileCacher: """Store information about opening file. @@ -78,7 +76,7 @@ def has_unsaved_changes() -> bool: def file_extensions() -> List[str]: """Return the supported file extensions for Blender scene files.""" - return HOST_WORKFILE_EXTENSIONS["blender"] + return [".blend"] def work_root(session: dict) -> str: From 78d107f485894726c87b502d2e88eb2ecf0d8e38 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 24 Aug 2022 18:19:08 +0200 Subject: [PATCH 0647/2550] hound fix --- openpype/hosts/blender/module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/blender/module.py b/openpype/hosts/blender/module.py index 73865184c0..0cb2f5c44b 100644 --- a/openpype/hosts/blender/module.py +++ b/openpype/hosts/blender/module.py @@ -32,8 +32,8 @@ class BlenderModule(OpenPypeModule, IHostModule): # Modify Blender user scripts path previous_user_scripts = set() - # Implementation path is added to set for easier paths check inside loops - # - will be removed at the end + # Implementation path is added to set for easier paths check inside + # loops - will be removed at the end previous_user_scripts.add(implementation_user_script_path) openpype_blender_user_scripts = ( From 28bdf2f2caa5ce4c80ffa9f3bb2a470cb9769ba7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 10:55:57 +0200 Subject: [PATCH 0648/2550] fix host name --- openpype/hosts/blender/module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/blender/module.py b/openpype/hosts/blender/module.py index 0cb2f5c44b..d6ff3b111c 100644 --- a/openpype/hosts/blender/module.py +++ b/openpype/hosts/blender/module.py @@ -6,8 +6,8 @@ BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) class BlenderModule(OpenPypeModule, IHostModule): - name = "nuke" - host_name = "nuke" + name = "blender" + host_name = "blender" def initialize(self, module_settings): self.enabled = True From 2f2cbd41465c882519ef6831e5ad81373f7a3615 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 25 Aug 2022 11:24:53 +0200 Subject: [PATCH 0649/2550] OP-3722 - added check for empty context to basic publish --- openpype/pype_commands.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index b266479bb1..391244d185 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -230,7 +230,6 @@ class PypeCommands: format("\n".join(running_batches)) msg += "Ask admin to check them and reprocess current batch" fail_batch(_id, dbcon, msg) - print("Another batch running, probably stuck, ask admin for help") if not task_data["context"]: msg = "Batch manifest must contain context data" @@ -351,6 +350,12 @@ class PypeCommands: dbcon = get_webpublish_conn() _id = start_webpublish_log(dbcon, batch_id, user_email) + task_data = get_task_data(batch_path) + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + publish_and_log(dbcon, _id, log, batch_id=batch_id) log.info("Publish finished.") From d623dfa857be9b6650a7c4cd285f73b02be32808 Mon Sep 17 00:00:00 2001 From: Kaa Maurice Date: Thu, 25 Aug 2022 11:54:09 +0200 Subject: [PATCH 0650/2550] fix validator invalid return --- .../blender/plugins/publish/validate_mesh_no_negative_scale.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 3c5c7c11eb..329a8d80c3 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -24,6 +24,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): if isinstance(obj, bpy.types.Object) and obj.type == 'MESH': if any(v < 0 for v in obj.scale): invalid.append(obj) + return invalid def process(self, instance): invalid = self.get_invalid(instance) From 382ec674a8d044f2f4f1650773b78192062618d2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 12:02:22 +0200 Subject: [PATCH 0651/2550] copied 'get_unique_layer_name' and 'get_background_layers' into ae lib --- openpype/hosts/aftereffects/api/lib.py | 56 ++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py index ce4cbf09af..dc16aaeac5 100644 --- a/openpype/hosts/aftereffects/api/lib.py +++ b/openpype/hosts/aftereffects/api/lib.py @@ -1,5 +1,7 @@ import os import sys +import re +import json import contextlib import traceback import logging @@ -68,3 +70,57 @@ def get_extension_manifest_path(): "CSXS", "manifest.xml" ) + + +def get_unique_layer_name(layers, name): + """ + Gets all layer names and if 'name' is present in them, increases + suffix by 1 (eg. creates unique layer name - for Loader) + Args: + layers (list): of strings, names only + name (string): checked value + + Returns: + (string): name_00X (without version) + """ + names = {} + for layer in layers: + layer_name = re.sub(r'_\d{3}$', '', layer) + if layer_name in names.keys(): + names[layer_name] = names[layer_name] + 1 + else: + names[layer_name] = 1 + occurrences = names.get(name, 0) + + return "{}_{:0>3d}".format(name, occurrences + 1) + + +def get_background_layers(file_url): + """ + Pulls file name from background json file, enrich with folder url for + AE to be able import files. + + Order is important, follows order in json. + + Args: + file_url (str): abs url of background json + + Returns: + (list): of abs paths to images + """ + with open(file_url) as json_file: + data = json.load(json_file) + + layers = list() + bg_folder = os.path.dirname(file_url) + for child in data['children']: + if child.get("filename"): + layers.append(os.path.join(bg_folder, child.get("filename")). + replace("\\", "/")) + else: + for layer in child['children']: + if layer.get("filename"): + layers.append(os.path.join(bg_folder, + layer.get("filename")). + replace("\\", "/")) + return layers From 5d83a428d9f7fd11fdf8002dc231c3577b7de7a3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 12:02:37 +0200 Subject: [PATCH 0652/2550] change imports to new location in loaders --- .../hosts/aftereffects/plugins/load/load_background.py | 8 ++++---- openpype/hosts/aftereffects/plugins/load/load_file.py | 7 +++---- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/load/load_background.py b/openpype/hosts/aftereffects/plugins/load/load_background.py index d346df504a..260e780be0 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_background.py +++ b/openpype/hosts/aftereffects/plugins/load/load_background.py @@ -1,14 +1,14 @@ import re -from openpype.lib import ( - get_background_layers, - get_unique_layer_name -) from openpype.pipeline import get_representation_path from openpype.hosts.aftereffects.api import ( AfterEffectsLoader, containerise ) +from openpype.hosts.aftereffects.api.lib import ( + get_background_layers, + get_unique_layer_name, +) class BackgroundLoader(AfterEffectsLoader): diff --git a/openpype/hosts/aftereffects/plugins/load/load_file.py b/openpype/hosts/aftereffects/plugins/load/load_file.py index 6ab69c6bfa..2ddc9825e5 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_file.py +++ b/openpype/hosts/aftereffects/plugins/load/load_file.py @@ -1,12 +1,11 @@ import re -from openpype import lib - from openpype.pipeline import get_representation_path from openpype.hosts.aftereffects.api import ( AfterEffectsLoader, containerise ) +from openpype.hosts.aftereffects.api.lib import get_unique_layer_name class FileLoader(AfterEffectsLoader): @@ -28,7 +27,7 @@ class FileLoader(AfterEffectsLoader): stub = self.get_stub() layers = stub.get_items(comps=True, folders=True, footages=True) existing_layers = [layer.name for layer in layers] - comp_name = lib.get_unique_layer_name( + comp_name = get_unique_layer_name( existing_layers, "{}_{}".format(context["asset"]["name"], name)) import_options = {} @@ -87,7 +86,7 @@ class FileLoader(AfterEffectsLoader): if namespace_from_container != layer_name: layers = stub.get_items(comps=True) existing_layers = [layer.name for layer in layers] - layer_name = lib.get_unique_layer_name( + layer_name = get_unique_layer_name( existing_layers, "{}_{}".format(context["asset"], context["subset"])) else: # switching version - keep same name From d263a8ef9df1aa496a44c08d6c0b7e69810153d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 12:02:51 +0200 Subject: [PATCH 0653/2550] remove functions from openpype lib --- openpype/lib/__init__.py | 4 --- openpype/lib/plugin_tools.py | 54 ------------------------------------ 2 files changed, 58 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 3d3e425a86..adb857a056 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -189,8 +189,6 @@ from .plugin_tools import ( filter_pyblish_plugins, set_plugin_attributes_from_settings, source_hash, - get_unique_layer_name, - get_background_layers, ) from .path_tools import ( @@ -354,8 +352,6 @@ __all__ = [ "filter_pyblish_plugins", "set_plugin_attributes_from_settings", "source_hash", - "get_unique_layer_name", - "get_background_layers", "create_hard_link", "version_up", diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 060db94ae0..9080918dfa 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -375,60 +375,6 @@ def source_hash(filepath, *args): return "|".join([file_name, time, size] + list(args)).replace(".", ",") -def get_unique_layer_name(layers, name): - """ - Gets all layer names and if 'name' is present in them, increases - suffix by 1 (eg. creates unique layer name - for Loader) - Args: - layers (list): of strings, names only - name (string): checked value - - Returns: - (string): name_00X (without version) - """ - names = {} - for layer in layers: - layer_name = re.sub(r'_\d{3}$', '', layer) - if layer_name in names.keys(): - names[layer_name] = names[layer_name] + 1 - else: - names[layer_name] = 1 - occurrences = names.get(name, 0) - - return "{}_{:0>3d}".format(name, occurrences + 1) - - -def get_background_layers(file_url): - """ - Pulls file name from background json file, enrich with folder url for - AE to be able import files. - - Order is important, follows order in json. - - Args: - file_url (str): abs url of background json - - Returns: - (list): of abs paths to images - """ - with open(file_url) as json_file: - data = json.load(json_file) - - layers = list() - bg_folder = os.path.dirname(file_url) - for child in data['children']: - if child.get("filename"): - layers.append(os.path.join(bg_folder, child.get("filename")). - replace("\\", "/")) - else: - for layer in child['children']: - if layer.get("filename"): - layers.append(os.path.join(bg_folder, - layer.get("filename")). - replace("\\", "/")) - return layers - - def parse_json(path): """Parses json file at 'path' location From 5372c016eadfe2dd09cfc1803a6984cfca24d61b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 12:19:47 +0200 Subject: [PATCH 0654/2550] moved 'OpenPypeInterface' into interfaces.py --- openpype/modules/__init__.py | 2 -- openpype/modules/base.py | 42 +++++++--------------------------- openpype/modules/interfaces.py | 29 +++++++++++++++++++++-- 3 files changed, 35 insertions(+), 38 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 68b5f6c247..02e7dc13ab 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -2,7 +2,6 @@ from .base import ( OpenPypeModule, OpenPypeAddOn, - OpenPypeInterface, load_modules, @@ -20,7 +19,6 @@ from .base import ( __all__ = ( "OpenPypeModule", "OpenPypeAddOn", - "OpenPypeInterface", "load_modules", diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1316d7f734..1b8cf5d769 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -28,6 +28,14 @@ from openpype.settings.lib import ( ) from openpype.lib import PypeLogger +from .interfaces import ( + OpenPypeInterface, + IPluginPaths, + IHostModule, + ITrayModule, + ITrayService +) + # Files that will be always ignored on modules import IGNORED_FILENAMES = ( "__pycache__", @@ -391,29 +399,7 @@ def _load_modules(): log.error(msg, exc_info=True) -class _OpenPypeInterfaceMeta(ABCMeta): - """OpenPypeInterface meta class to print proper string.""" - def __str__(self): - return "<'OpenPypeInterface.{}'>".format(self.__name__) - - def __repr__(self): - return str(self) - - -@six.add_metaclass(_OpenPypeInterfaceMeta) -class OpenPypeInterface: - """Base class of Interface that can be used as Mixin with abstract parts. - - This is way how OpenPype module or addon can tell that has implementation - for specific part or for other module/addon. - - Child classes of OpenPypeInterface may be used as mixin in different - OpenPype modules which means they have to have implemented methods defined - in the interface. By default interface does not have any abstract parts. - """ - - pass @six.add_metaclass(ABCMeta) @@ -749,8 +735,6 @@ class ModulesManager: and "actions" each containing list of paths. """ # Output structure - from openpype_interfaces import IPluginPaths - output = { "publish": [], "create": [], @@ -807,8 +791,6 @@ class ModulesManager: list: List of creator plugin paths. """ # Output structure - from openpype_interfaces import IPluginPaths - output = [] for module in self.get_enabled_modules(): # Skip module that do not inherit from `IPluginPaths` @@ -897,8 +879,6 @@ class ModulesManager: host name set to passed 'host_name'. """ - from openpype_interfaces import IHostModule - for module in self.get_enabled_modules(): if ( isinstance(module, IHostModule) @@ -915,8 +895,6 @@ class ModulesManager: inheriting 'IHostModule'. """ - from openpype_interfaces import IHostModule - host_names = { module.host_name for module in self.get_enabled_modules() @@ -1098,8 +1076,6 @@ class TrayModulesManager(ModulesManager): self.tray_menu(tray_menu) def get_enabled_tray_modules(self): - from openpype_interfaces import ITrayModule - output = [] for module in self.modules: if module.enabled and isinstance(module, ITrayModule): @@ -1175,8 +1151,6 @@ class TrayModulesManager(ModulesManager): self._report["Tray menu"] = report def start_modules(self): - from openpype_interfaces import ITrayService - report = {} time_start = time.time() prev_start_time = time_start diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 14f49204ee..8221db4d05 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -1,8 +1,33 @@ -from abc import abstractmethod, abstractproperty +from abc import ABCMeta, abstractmethod, abstractproperty + +import six from openpype import resources -from openpype.modules import OpenPypeInterface + +class _OpenPypeInterfaceMeta(ABCMeta): + """OpenPypeInterface meta class to print proper string.""" + + def __str__(self): + return "<'OpenPypeInterface.{}'>".format(self.__name__) + + def __repr__(self): + return str(self) + + +@six.add_metaclass(_OpenPypeInterfaceMeta) +class OpenPypeInterface: + """Base class of Interface that can be used as Mixin with abstract parts. + + This is way how OpenPype module or addon can tell OpenPype that contain + implementation for specific functionality. + + Child classes of OpenPypeInterface may be used as mixin in different + OpenPype modules which means they have to have implemented methods defined + in the interface. By default interface does not have any abstract parts. + """ + + pass class IPluginPaths(OpenPypeInterface): From 8cc6086e92a6c5135428898717e6d9057f567e8c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 12:22:19 +0200 Subject: [PATCH 0655/2550] removed usage of 'ILaunchHookPaths' --- openpype/modules/ftrack/ftrack_module.py | 5 ++--- openpype/modules/shotgrid/shotgrid_module.py | 5 +---- openpype/modules/slack/slack_module.py | 10 ++++------ openpype/modules/timers_manager/timers_manager.py | 11 ++++------- 4 files changed, 11 insertions(+), 20 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index f99e189082..cb4f204523 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -9,7 +9,6 @@ from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IPluginPaths, - ILaunchHookPaths, ISettingsChangeListener ) from openpype.settings import SaveWarningExc @@ -21,7 +20,6 @@ class FtrackModule( OpenPypeModule, ITrayModule, IPluginPaths, - ILaunchHookPaths, ISettingsChangeListener ): name = "ftrack" @@ -85,7 +83,8 @@ class FtrackModule( } def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" + return os.path.join(FTRACK_MODULE_DIR, "launch_hooks") def modify_application_launch_arguments(self, application, env): diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py index 5644f0c35f..281c6fdcad 100644 --- a/openpype/modules/shotgrid/shotgrid_module.py +++ b/openpype/modules/shotgrid/shotgrid_module.py @@ -3,7 +3,6 @@ import os from openpype_interfaces import ( ITrayModule, IPluginPaths, - ILaunchHookPaths, ) from openpype.modules import OpenPypeModule @@ -11,9 +10,7 @@ from openpype.modules import OpenPypeModule SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class ShotgridModule( - OpenPypeModule, ITrayModule, IPluginPaths, ILaunchHookPaths -): +class ShotgridModule(OpenPypeModule, ITrayModule, IPluginPaths): leecher_manager_url = None name = "shotgrid" enabled = False diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/slack/slack_module.py index 9b2976d766..499c1c19ce 100644 --- a/openpype/modules/slack/slack_module.py +++ b/openpype/modules/slack/slack_module.py @@ -1,14 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype_interfaces import ( - IPluginPaths, - ILaunchHookPaths -) +from openpype.modules.interfaces import IPluginPaths SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): +class SlackIntegrationModule(OpenPypeModule, IPluginPaths): """Allows sending notification to Slack channels during publishing.""" name = "slack" @@ -18,7 +15,8 @@ class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): self.enabled = slack_settings["enabled"] def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" + return os.path.join(SLACK_MODULE_DIR, "launch_hooks") def get_plugin_paths(self): diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 93332ace4f..c168e9534d 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -6,7 +6,6 @@ from openpype.client import get_asset_by_name from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, - ILaunchHookPaths, IPluginPaths ) from openpype.lib.events import register_event_callback @@ -79,7 +78,6 @@ class ExampleTimersManagerConnector: class TimersManager( OpenPypeModule, ITrayService, - ILaunchHookPaths, IPluginPaths ): """ Handles about Timers. @@ -185,12 +183,11 @@ class TimersManager( ) def get_launch_hook_paths(self): - """Implementation of `ILaunchHookPaths`.""" + """Implementation for applications launch hooks.""" - return os.path.join( - TIMER_MODULE_DIR, - "launch_hooks" - ) + return [ + os.path.join(TIMER_MODULE_DIR, "launch_hooks") + ] def get_plugin_paths(self): """Implementation of `IPluginPaths`.""" From 8acb96c572a58c779b544dac8b79aab9d71b6663 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 12:27:02 +0200 Subject: [PATCH 0656/2550] added deprecation warning to 'ILaunchHookPaths' --- openpype/modules/interfaces.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 8221db4d05..13655773dd 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -81,6 +81,13 @@ class ILaunchHookPaths(OpenPypeInterface): Expected result is list of paths. ["path/to/launch_hooks_dir"] + + Deprecated: + This interface is not needed since OpenPype 3.14.*. Addon just have to + implement 'get_launch_hook_paths' which can expect Application object + or nothing as argument. + + Interface class will be removed after 3.16.*. """ @abstractmethod From 7356fc666d7aa4cca7251849f4eaf4d91dfc0e75 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 12:31:47 +0200 Subject: [PATCH 0657/2550] moved collection of launch hooks from modules into applications logic --- openpype/lib/applications.py | 61 +++++++++++++++++++++++++++++++++-- openpype/modules/base.py | 62 ------------------------------------ 2 files changed, 58 insertions(+), 65 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 074e815160..b389bc2539 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -950,6 +950,63 @@ class ApplicationLaunchContext: ) self.kwargs["env"] = value + def _collect_addons_launch_hook_paths(self): + """Helper to collect application launch hooks from addons. + + Module have to have implemented 'get_launch_hook_paths' method which + can expect appliction as argument or nothing. + + Returns: + List[str]: Paths to launch hook directories. + """ + + expected_types = (list, tuple, set) + + output = [] + for module in self.modules_manager.get_enabled_modules(): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(self.application) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) + continue + + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, six.string_types): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + def paths_to_launch_hooks(self): """Directory paths where to look for launch hooks.""" # This method has potential to be part of application manager (maybe). @@ -983,9 +1040,7 @@ class ApplicationLaunchContext: paths.append(path) # Load modules paths - paths.extend( - self.modules_manager.collect_launch_hook_paths(self.application) - ) + paths.extend(self._collect_addons_launch_hook_paths()) return paths diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1b8cf5d769..25355cbd9c 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -805,68 +805,6 @@ class ModulesManager: output.extend(paths) return output - def collect_launch_hook_paths(self, app): - """Helper to collect application launch hooks. - - It used to be based on 'ILaunchHookPaths' which is not true anymore. - Module just have to have implemented 'get_launch_hook_paths' method. - - Args: - app (Application): Application object which can be used for - filtering of which launch hook paths are returned. - - Returns: - list: Paths to launch hook directories. - """ - - str_type = type("") - expected_types = (list, tuple, set) - - output = [] - for module in self.get_enabled_modules(): - # Skip module if does not have implemented 'get_launch_hook_paths' - func = getattr(module, "get_launch_hook_paths", None) - if func is None: - continue - - func = module.get_launch_hook_paths - if hasattr(inspect, "signature"): - sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 - else: - expect_args = len(inspect.getargspec(func)[0]) > 0 - - # Pass application argument if method expect it. - try: - if expect_args: - hook_paths = func(app) - else: - hook_paths = func() - except Exception: - self.log.warning( - "Failed to call 'get_launch_hook_paths'", - exc_info=True - ) - continue - - if not hook_paths: - continue - - # Convert string to list - if isinstance(hook_paths, str_type): - hook_paths = [hook_paths] - - # Skip invalid types - if not isinstance(hook_paths, expected_types): - self.log.warning(( - "Result of `get_launch_hook_paths`" - " has invalid type {}. Expected {}" - ).format(type(hook_paths), expected_types)) - continue - - output.extend(hook_paths) - return output - def get_host_module(self, host_name): """Find host module by host name. From 390dbb6320f97ba1b05e1de895905b57c62ce1e3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 25 Aug 2022 14:51:21 +0200 Subject: [PATCH 0658/2550] OP-3682 - added readme to highlight it is for v4 --- distribution/README.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 distribution/README.md diff --git a/distribution/README.md b/distribution/README.md new file mode 100644 index 0000000000..212eb267b8 --- /dev/null +++ b/distribution/README.md @@ -0,0 +1,18 @@ +Addon distribution tool +------------------------ + +Code in this folder is backend portion of Addon distribution logic for v4 server. + +Each host, module will be separate Addon in the future. Each v4 server could run different set of Addons. + +Client (running on artist machine) will in the first step ask v4 for list of enabled addons. +(It expects list of json documents matching to `addon_distribution.py:AddonInfo` object.) +Next it will compare presence of enabled addon version in local folder. In the case of missing version of +an addon, client will use information in the addon to download (from http/shared local disk/git) zip file +and unzip it. + +Required part of addon distribution will be sharing of dependencies (python libraries, utilities) which is not part of this folder. + +Location of this folder might change in the future as it will be required for a clint to add this folder to sys.path reliably. + +This code needs to be independent on Openpype code as much as possible! \ No newline at end of file From 6914c626a1dfe5414dc3c1a2881801f2a069f55c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:28:23 +0200 Subject: [PATCH 0659/2550] implemented flame addon --- openpype/hosts/flame/__init__.py | 26 +++++++------------------- openpype/hosts/flame/module.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 19 deletions(-) create mode 100644 openpype/hosts/flame/module.py diff --git a/openpype/hosts/flame/__init__.py b/openpype/hosts/flame/__init__.py index f839357147..1ab0e6ff33 100644 --- a/openpype/hosts/flame/__init__.py +++ b/openpype/hosts/flame/__init__.py @@ -1,22 +1,10 @@ -import os - -HOST_DIR = os.path.dirname( - os.path.abspath(__file__) +from .module import ( + HOST_DIR, + FlameAddon, ) -def add_implementation_envs(env, _app): - # Add requirements to DL_PYTHON_HOOK_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - - env["DL_PYTHON_HOOK_PATH"] = os.path.join( - pype_root, "openpype", "hosts", "flame", "startup") - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +__all__ = ( + "HOST_DIR", + "FlameAddon", +) diff --git a/openpype/hosts/flame/module.py b/openpype/hosts/flame/module.py new file mode 100644 index 0000000000..2960d7db0f --- /dev/null +++ b/openpype/hosts/flame/module.py @@ -0,0 +1,29 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class FlameAddon(OpenPypeModule, IHostModule): + name = "flame" + host_name = "flame" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to DL_PYTHON_HOOK_PATH + env["DL_PYTHON_HOOK_PATH"] = os.path.join(HOST_DIR, "startup") + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".otoc"] From c3f70ccb04e965c8ec9b4c3edb12e957496a4a23 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:33:29 +0200 Subject: [PATCH 0660/2550] changed "module" to "addon" --- openpype/hosts/resolve/__init__.py | 2 +- openpype/hosts/resolve/{module.py => addon.py} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename openpype/hosts/resolve/{module.py => addon.py} (91%) diff --git a/openpype/hosts/resolve/__init__.py b/openpype/hosts/resolve/__init__.py index a02e07794d..ebbfdc6a1e 100644 --- a/openpype/hosts/resolve/__init__.py +++ b/openpype/hosts/resolve/__init__.py @@ -1,4 +1,4 @@ -from .module import ResolveModule +from .addon import ResolveModule __all__ = ( diff --git a/openpype/hosts/resolve/module.py b/openpype/hosts/resolve/addon.py similarity index 91% rename from openpype/hosts/resolve/module.py rename to openpype/hosts/resolve/addon.py index 8f2824df75..af09448a43 100644 --- a/openpype/hosts/resolve/module.py +++ b/openpype/hosts/resolve/addon.py @@ -6,7 +6,7 @@ from openpype.modules.interfaces import IHostModule from .utils import RESOLVE_ROOT_DIR -class ResolveModule(OpenPypeModule, IHostModule): +class ResolveAddon(OpenPypeModule, IHostModule): name = "resolve" host_name = "resolve" From 92240f7237d2b820f0d32b058a6df9b333e8efa1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:33:46 +0200 Subject: [PATCH 0661/2550] fix import --- openpype/hosts/resolve/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/resolve/__init__.py b/openpype/hosts/resolve/__init__.py index ebbfdc6a1e..b4a994bbaa 100644 --- a/openpype/hosts/resolve/__init__.py +++ b/openpype/hosts/resolve/__init__.py @@ -1,6 +1,6 @@ -from .addon import ResolveModule +from .addon import ResolveAddon __all__ = ( - "ResolveModule", + "ResolveAddon", ) From d78d73f391ac8ca4daa3f3c07ad301167ff70bb7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:37:34 +0200 Subject: [PATCH 0662/2550] added missing definition of launch hooks --- openpype/hosts/flame/module.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/flame/module.py b/openpype/hosts/flame/module.py index 2960d7db0f..7e68378f4b 100644 --- a/openpype/hosts/flame/module.py +++ b/openpype/hosts/flame/module.py @@ -25,5 +25,12 @@ class FlameAddon(OpenPypeModule, IHostModule): if not env.get(key): env[key] = value + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(HOST_DIR, "hooks") + ] + def get_workfile_extensions(self): return [".otoc"] From 2c3604cee6bde2fff46cef817aac0c288eb94d4c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:37:54 +0200 Subject: [PATCH 0663/2550] renemad module.py to addon.py --- openpype/hosts/flame/__init__.py | 2 +- openpype/hosts/flame/{module.py => addon.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename openpype/hosts/flame/{module.py => addon.py} (100%) diff --git a/openpype/hosts/flame/__init__.py b/openpype/hosts/flame/__init__.py index 1ab0e6ff33..b45f107747 100644 --- a/openpype/hosts/flame/__init__.py +++ b/openpype/hosts/flame/__init__.py @@ -1,4 +1,4 @@ -from .module import ( +from .addon import ( HOST_DIR, FlameAddon, ) diff --git a/openpype/hosts/flame/module.py b/openpype/hosts/flame/addon.py similarity index 100% rename from openpype/hosts/flame/module.py rename to openpype/hosts/flame/addon.py From 67fd21edae5d7c3670ee704558ff10505c64a783 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:40:32 +0200 Subject: [PATCH 0664/2550] imlemented fusion addon --- openpype/hosts/fusion/__init__.py | 10 ++++++++++ openpype/hosts/fusion/addon.py | 23 +++++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 openpype/hosts/fusion/addon.py diff --git a/openpype/hosts/fusion/__init__.py b/openpype/hosts/fusion/__init__.py index e69de29bb2..ddae01890b 100644 --- a/openpype/hosts/fusion/__init__.py +++ b/openpype/hosts/fusion/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + FusionAddon, + FUSION_HOST_DIR, +) + + +__all__ = ( + "FusionAddon", + "FUSION_HOST_DIR", +) diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py new file mode 100644 index 0000000000..97fb262517 --- /dev/null +++ b/openpype/hosts/fusion/addon.py @@ -0,0 +1,23 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class FusionAddon(OpenPypeModule, IHostModule): + name = "fusion" + host_name = "fusion" + + def initialize(self, module_settings): + self.enabled = True + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(FUSION_HOST_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".comp"] From 3ea7510693ec23623ec7760f3479e10b363a2adf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:40:40 +0200 Subject: [PATCH 0665/2550] removed usage of HOST_WORKFILE_EXTENSIONS --- openpype/hosts/fusion/api/workio.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/workio.py b/openpype/hosts/fusion/api/workio.py index a1710c6e3a..89752d3e6d 100644 --- a/openpype/hosts/fusion/api/workio.py +++ b/openpype/hosts/fusion/api/workio.py @@ -2,13 +2,11 @@ import sys import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - from .pipeline import get_current_comp def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["fusion"] + return [".comp"] def has_unsaved_changes(): From cf50d1dd1f00fe178d8bfc28a039579222cab1d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:40:56 +0200 Subject: [PATCH 0666/2550] reuse 'FUSION_HOST_DIR' from fusion public api --- openpype/hosts/fusion/api/pipeline.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 54a6c94b60..987eae214b 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -18,12 +18,11 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) -import openpype.hosts.fusion +from openpype.hosts.fusion import FUSION_HOST_DIR log = Logger.get_logger(__name__) -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(FUSION_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") From c1268ec253a1c9b4ee7e3d8dc5acb76712a8a035 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:55:09 +0200 Subject: [PATCH 0667/2550] implemented hamrony addon --- openpype/hosts/harmony/__init__.py | 17 ++++++++--------- openpype/hosts/harmony/addon.py | 24 ++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 9 deletions(-) create mode 100644 openpype/hosts/harmony/addon.py diff --git a/openpype/hosts/harmony/__init__.py b/openpype/hosts/harmony/__init__.py index d2f710d83d..9177eaa285 100644 --- a/openpype/hosts/harmony/__init__.py +++ b/openpype/hosts/harmony/__init__.py @@ -1,11 +1,10 @@ -import os +from .addon import ( + HARMONY_HOST_DIR, + HarmonyAddon, +) -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - openharmony_path = os.path.join( - os.environ["OPENPYPE_REPOS_ROOT"], "openpype", "hosts", - "harmony", "vendor", "OpenHarmony" - ) - # TODO check if is already set? What to do if is already set? - env["LIB_OPENHARMONY_PATH"] = openharmony_path +__all__ = ( + "HARMONY_HOST_DIR", + "HarmonyAddon", +) diff --git a/openpype/hosts/harmony/addon.py b/openpype/hosts/harmony/addon.py new file mode 100644 index 0000000000..b051d68abb --- /dev/null +++ b/openpype/hosts/harmony/addon.py @@ -0,0 +1,24 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HarmonyAddon(OpenPypeModule, IHostModule): + name = "harmony" + host_name = "harmony" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + openharmony_path = os.path.join( + HARMONY_HOST_DIR, "vendor", "OpenHarmony" + ) + # TODO check if is already set? What to do if is already set? + env["LIB_OPENHARMONY_PATH"] = openharmony_path + + def get_workfile_extensions(self): + return [".zip"] From a5ce719e58eadaa9c936086e7fdbd37f4eaba7fb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:55:19 +0200 Subject: [PATCH 0668/2550] removed usage of HOST_WORKFILE_EXTENSIONS --- openpype/hosts/harmony/api/workio.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/harmony/api/workio.py b/openpype/hosts/harmony/api/workio.py index ab1cb9b1a9..8df5ede917 100644 --- a/openpype/hosts/harmony/api/workio.py +++ b/openpype/hosts/harmony/api/workio.py @@ -2,8 +2,6 @@ import os import shutil -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - from .lib import ( ProcessContext, get_local_harmony_path, @@ -16,7 +14,7 @@ save_disabled = False def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["harmony"] + return [".zip"] def has_unsaved_changes(): From bdedea41d67bd72ecca676c066d24a83120b3215 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:55:30 +0200 Subject: [PATCH 0669/2550] reuse 'HARMONY_HOST_DIR' from public api --- openpype/hosts/harmony/api/pipeline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index 4d71b9380d..4b9849c190 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -14,14 +14,14 @@ from openpype.pipeline import ( ) from openpype.pipeline.load import get_outdated_containers from openpype.pipeline.context_tools import get_current_project_asset -import openpype.hosts.harmony + +from openpype.hosts.harmony import HARMONY_HOST_DIR import openpype.hosts.harmony.api as harmony log = logging.getLogger("openpype.hosts.harmony") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.harmony.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(HARMONY_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") From 4c8291442695a841be30b5402a3bb9e77fed7bdf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:02:49 +0200 Subject: [PATCH 0670/2550] implemented Houdini addon --- openpype/hosts/houdini/__init__.py | 44 +++++------------------- openpype/hosts/houdini/addon.py | 55 ++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 36 deletions(-) create mode 100644 openpype/hosts/houdini/addon.py diff --git a/openpype/hosts/houdini/__init__.py b/openpype/hosts/houdini/__init__.py index a3ee38db8d..7fba9baddc 100644 --- a/openpype/hosts/houdini/__init__.py +++ b/openpype/hosts/houdini/__init__.py @@ -1,38 +1,10 @@ -import os +from .addon import ( + HoudiniAddon, + HOUDINI_HOST_DIR. +) -def add_implementation_envs(env, _app): - # Add requirements to HOUDINI_PATH and HOUDINI_MENU_PATH - pype_root = os.environ["OPENPYPE_REPOS_ROOT"] - - startup_path = os.path.join( - pype_root, "openpype", "hosts", "houdini", "startup" - ) - new_houdini_path = [startup_path] - new_houdini_menu_path = [startup_path] - - old_houdini_path = env.get("HOUDINI_PATH") or "" - old_houdini_menu_path = env.get("HOUDINI_MENU_PATH") or "" - - for path in old_houdini_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_houdini_path: - new_houdini_path.append(norm_path) - - for path in old_houdini_menu_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_houdini_menu_path: - new_houdini_menu_path.append(norm_path) - - # Add ampersand for unknown reason (Maybe is needed in Houdini?) - new_houdini_path.append("&") - new_houdini_menu_path.append("&") - - env["HOUDINI_PATH"] = os.pathsep.join(new_houdini_path) - env["HOUDINI_MENU_PATH"] = os.pathsep.join(new_houdini_menu_path) +__all__ = ( + "HoudiniAddon", + "HOUDINI_HOST_DIR". +) diff --git a/openpype/hosts/houdini/addon.py b/openpype/hosts/houdini/addon.py new file mode 100644 index 0000000000..255d6ed53f --- /dev/null +++ b/openpype/hosts/houdini/addon.py @@ -0,0 +1,55 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +HOUDINI_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class HoudiniAddon(OpenPypeModule, IHostModule): + name = "houdini" + host_name = "houdini" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to HOUDINI_PATH and HOUDINI_MENU_PATH + startup_path = os.path.join(HOUDINI_HOST_DIR, "startup") + new_houdini_path = [startup_path] + new_houdini_menu_path = [startup_path] + + old_houdini_path = env.get("HOUDINI_PATH") or "" + old_houdini_menu_path = env.get("HOUDINI_MENU_PATH") or "" + + for path in old_houdini_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_houdini_path: + new_houdini_path.append(norm_path) + + for path in old_houdini_menu_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_houdini_menu_path: + new_houdini_menu_path.append(norm_path) + + # Add ampersand for unknown reason (Maybe is needed in Houdini?) + new_houdini_path.append("&") + new_houdini_menu_path.append("&") + + env["HOUDINI_PATH"] = os.pathsep.join(new_houdini_path) + env["HOUDINI_MENU_PATH"] = os.pathsep.join(new_houdini_menu_path) + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(HOUDINI_HOST_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] From 9b7b217faafefb5bc32873337b41a3cce415c124 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 25 Aug 2022 16:02:56 +0200 Subject: [PATCH 0671/2550] Nuke: adding sumbitted job ids to instance attribute for downstream --- .../deadline/plugins/publish/submit_nuke_deadline.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 336a56ec45..b09d2935ab 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -114,6 +114,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): instance.data["deadlineSubmissionJob"] = resp.json() instance.data["publishJobState"] = "Suspended" + # add to list of job Id + if not instance.data.get("bakingSubmissionJobs"): + instance.data["bakingSubmissionJobs"] = [] + + instance.data["bakingSubmissionJobs"].append( + resp.json()["_id"]) + # redefinition of families if "render.farm" in families: instance.data['family'] = 'write' From a2ea726be175448571ff1683338782721bed54bb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:02:58 +0200 Subject: [PATCH 0672/2550] removed usage of HOST_WORKFILE_EXTENSIONS --- openpype/hosts/houdini/api/workio.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py index e0213023fd..5f7efff333 100644 --- a/openpype/hosts/houdini/api/workio.py +++ b/openpype/hosts/houdini/api/workio.py @@ -2,11 +2,10 @@ import os import hou -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["houdini"] + return [".hip", ".hiplc", ".hipnc"] def has_unsaved_changes(): From 38ad727b0dca59f1ac9494ead42fd1d19e6f11d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:03:12 +0200 Subject: [PATCH 0673/2550] reuse HOUDINI_HOST_DIR from public api --- openpype/hosts/houdini/api/pipeline.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b5f5459392..d396fc71c0 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -13,7 +13,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -import openpype.hosts.houdini +from openpype.hosts.houdini import HOUDINI_HOST_DIR from openpype.hosts.houdini.api import lib from openpype.lib import ( @@ -28,8 +28,7 @@ log = logging.getLogger("openpype.hosts.houdini") AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS" IS_HEADLESS = not hasattr(hou, "ui") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.houdini.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -66,7 +65,7 @@ def install(): self._has_been_setup = True # add houdini vendor packages - hou_pythonpath = os.path.join(os.path.dirname(HOST_DIR), "vendor") + hou_pythonpath = os.path.join(os.path.dirname(HOUDINI_HOST_DIR), "vendor") sys.path.append(hou_pythonpath) From 63aee53d4cc7aa5c1fe450a348116878fe6ca6c0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:03:23 +0200 Subject: [PATCH 0674/2550] fix init --- openpype/hosts/houdini/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/__init__.py b/openpype/hosts/houdini/__init__.py index 7fba9baddc..09ba66c834 100644 --- a/openpype/hosts/houdini/__init__.py +++ b/openpype/hosts/houdini/__init__.py @@ -6,5 +6,5 @@ from .addon import ( __all__ = ( "HoudiniAddon", - "HOUDINI_HOST_DIR". + "HOUDINI_HOST_DIR", ) From 81a2766598821067545bfc6daf3bd8f106de13fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:03:51 +0200 Subject: [PATCH 0675/2550] fix houdini vender path access --- openpype/hosts/houdini/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index d396fc71c0..2ae8a4dbf7 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -65,7 +65,7 @@ def install(): self._has_been_setup = True # add houdini vendor packages - hou_pythonpath = os.path.join(os.path.dirname(HOUDINI_HOST_DIR), "vendor") + hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") sys.path.append(hou_pythonpath) From ca1ef8a81d3c14fbf84938f2ffb26473da932316 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:05:38 +0200 Subject: [PATCH 0676/2550] fix init (again) --- openpype/hosts/houdini/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/__init__.py b/openpype/hosts/houdini/__init__.py index 09ba66c834..38bf1fcc2d 100644 --- a/openpype/hosts/houdini/__init__.py +++ b/openpype/hosts/houdini/__init__.py @@ -1,6 +1,6 @@ from .addon import ( HoudiniAddon, - HOUDINI_HOST_DIR. + HOUDINI_HOST_DIR, ) From 33661b665cd60b6c4bc0fef13788f40cd906f0c9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 25 Aug 2022 16:10:17 +0200 Subject: [PATCH 0677/2550] global: submitting job is creating multiple job dependencies if multiple baking streams are submitted --- .../modules/deadline/plugins/publish/submit_publish_job.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 379953c9e4..2647dcf0cb 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -296,6 +296,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): for assembly_id in instance.data.get("assemblySubmissionJobs"): payload["JobInfo"]["JobDependency{}".format(job_index)] = assembly_id # noqa: E501 job_index += 1 + elif instance.data.get("bakingSubmissionJobs"): + self.log.info("Adding baking submission jobs as dependencies...") + job_index = 0 + for assembly_id in instance.data["bakingSubmissionJobs"]: + payload["JobInfo"]["JobDependency{}".format(job_index)] = assembly_id # noqa: E501 + job_index += 1 else: payload["JobInfo"]["JobDependency0"] = job["_id"] From b95e79e0d0374a3b95be5e5f21712975470dca93 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:13:02 +0200 Subject: [PATCH 0678/2550] change how cwd is set for houdini process --- openpype/hosts/houdini/hooks/set_paths.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/hooks/set_paths.py b/openpype/hosts/houdini/hooks/set_paths.py index cd2f98fb76..04a33b1643 100644 --- a/openpype/hosts/houdini/hooks/set_paths.py +++ b/openpype/hosts/houdini/hooks/set_paths.py @@ -1,5 +1,4 @@ from openpype.lib import PreLaunchHook -import os class SetPath(PreLaunchHook): @@ -15,4 +14,4 @@ class SetPath(PreLaunchHook): self.log.warning("BUG: Workdir is not filled.") return - os.chdir(workdir) + self.launch_context.kwargs["cwd"] = workdir From 1810d757856093726e37b9f4eee8eb50ebfb5934 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:23:53 +0200 Subject: [PATCH 0679/2550] implemented photoshop addon --- openpype/hosts/photoshop/__init__.py | 19 ++++++++++--------- openpype/hosts/photoshop/addon.py | 26 ++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 9 deletions(-) create mode 100644 openpype/hosts/photoshop/addon.py diff --git a/openpype/hosts/photoshop/__init__.py b/openpype/hosts/photoshop/__init__.py index a91e0a65ff..b3f66ea35c 100644 --- a/openpype/hosts/photoshop/__init__.py +++ b/openpype/hosts/photoshop/__init__.py @@ -1,9 +1,10 @@ -def add_implementation_envs(env, _app): - """Modify environments to contain all required for implementation.""" - defaults = { - "OPENPYPE_LOG_NO_COLORS": "True", - "WEBSOCKET_URL": "ws://localhost:8099/ws/" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value +from .module import ( + PhotoshopAddon, + PHOTOSHOP_HOST_DIR, +) + + +__all__ = ( + "PhotoshopAddon", + "PHOTOSHOP_HOST_DIR", +) diff --git a/openpype/hosts/photoshop/addon.py b/openpype/hosts/photoshop/addon.py new file mode 100644 index 0000000000..18899d4de8 --- /dev/null +++ b/openpype/hosts/photoshop/addon.py @@ -0,0 +1,26 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class PhotoshopAddon(OpenPypeModule, IHostModule): + name = "photoshop" + host_name = "photoshop" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + """Modify environments to contain all required for implementation.""" + defaults = { + "OPENPYPE_LOG_NO_COLORS": "True", + "WEBSOCKET_URL": "ws://localhost:8099/ws/" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".psd", ".psb"] From 355edb24f55cc9dc9ff9cc9c7d9c6aaf612efadb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:24:04 +0200 Subject: [PATCH 0680/2550] reuse PHOTOSHOP_HOST_DIR from public api --- openpype/hosts/photoshop/api/pipeline.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index ee150d1808..f660096630 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -14,14 +14,13 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -import openpype.hosts.photoshop +from openpype.hosts.photoshop import PHOTOSHOP_HOST_DIR from . import lib log = Logger.get_logger(__name__) -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.photoshop.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(PHOTOSHOP_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") From f0af027faaa4a1c015ee91de0cb98a5c22152e05 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:24:12 +0200 Subject: [PATCH 0681/2550] removed usage of HOST_WORKFILE_EXTENSIONS --- openpype/hosts/photoshop/api/workio.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/api/workio.py b/openpype/hosts/photoshop/api/workio.py index 951c5dbfff..35b44d6070 100644 --- a/openpype/hosts/photoshop/api/workio.py +++ b/openpype/hosts/photoshop/api/workio.py @@ -1,7 +1,6 @@ """Host API required Work Files tool""" import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from . import lib @@ -14,7 +13,7 @@ def _active_document(): def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["photoshop"] + return [".psd", ".psb"] def has_unsaved_changes(): From caf9e014bdb8150ac2abcc8dd23cbe5cb88ab09d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:32:46 +0200 Subject: [PATCH 0682/2550] implemented webpublish addon --- openpype/hosts/webpublisher/__init__.py | 10 ++++++++++ openpype/hosts/webpublisher/addon.py | 13 +++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 openpype/hosts/webpublisher/addon.py diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py index e69de29bb2..4e918c5d7d 100644 --- a/openpype/hosts/webpublisher/__init__.py +++ b/openpype/hosts/webpublisher/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + WebpublisherAddon, + WEBPUBLISHER_ROOT_DIR, +) + + +__all__ = ( + "WebpublisherAddon", + "WEBPUBLISHER_ROOT_DIR", +) diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py new file mode 100644 index 0000000000..3d76115df1 --- /dev/null +++ b/openpype/hosts/webpublisher/addon.py @@ -0,0 +1,13 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class WebpublisherAddon(OpenPypeModule, IHostModule): + name = "webpublisher" + host_name = "webpublisher" + + def initialize(self, module_settings): + self.enabled = True From b188afe97569a7141f6a3c3e14dc7966b1e3b853 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:39:19 +0200 Subject: [PATCH 0683/2550] reorganized imports in pype commands --- openpype/pype_commands.py | 43 ++++++++++++++++++++++++--------------- 1 file changed, 27 insertions(+), 16 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 67b0b8ad76..cb84fac3c7 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -5,19 +5,6 @@ import sys import json import time -from openpype.api import get_app_environments_for_context -from openpype.lib.plugin_tools import get_batch_asset_task_info -from openpype.lib.remote_publish import ( - get_webpublish_conn, - start_webpublish_log, - publish_and_log, - fail_batch, - find_variant_key, - get_task_data, - get_timeout, - IN_PROGRESS_STATUS -) - class PypeCommands: """Class implementing commands used by Pype. @@ -100,6 +87,7 @@ class PypeCommands: """ from openpype.lib import Logger + from openpype.lib.applications import get_app_environments_for_context from openpype.modules import ModulesManager from openpype.pipeline import install_openpype_plugins from openpype.tools.utils.host_tools import show_publish @@ -199,9 +187,23 @@ class PypeCommands: """ import pyblish.api - from openpype.lib import ApplicationManager from openpype.lib import Logger + from openpype.lib.applications import ( + ApplicationManager, + get_app_environments_for_context, + ) + from openpype.lib.plugin_tools import get_batch_asset_task_info + from openpype.lib.remote_publish import ( + get_webpublish_conn, + start_webpublish_log, + fail_batch, + find_variant_key, + get_task_data, + get_timeout, + IN_PROGRESS_STATUS + ) + log = Logger.get_logger("CLI-remotepublishfromapp") log.info("remotepublishphotoshop command") @@ -318,9 +320,16 @@ class PypeCommands: import pyblish.api import pyblish.util - from openpype.lib import Logger from openpype.pipeline import install_host from openpype.hosts.webpublisher import api as webpublisher + from openpype.lib import Logger + from openpype.lib.remote_publish import ( + get_webpublish_conn, + start_webpublish_log, + publish_and_log, + fail_batch, + get_task_data, + ) log = Logger.get_logger("remotepublish") @@ -366,8 +375,10 @@ class PypeCommands: Called by Deadline plugin to propagate environment into render jobs. """ + + from openpype.lib.applications import get_app_environments_for_context + if all((project, asset, task, app)): - from openpype.api import get_app_environments_for_context env = get_app_environments_for_context( project, asset, task, app, env_group ) From c2332507f49eb863aecd98540b9b71b421c2f1ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:41:56 +0200 Subject: [PATCH 0684/2550] implement webpublisher host with HostBase --- openpype/hosts/webpublisher/api/__init__.py | 36 ++++++++------------- openpype/pype_commands.py | 11 +++---- 2 files changed, 19 insertions(+), 28 deletions(-) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 18e3a16cf5..afea838e2c 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -1,31 +1,23 @@ import os import logging -from pyblish import api as pyblish -import openpype.hosts.webpublisher -from openpype.pipeline import legacy_io +import pyblish.api + +from openpype.host import HostBase +from openpype.hosts.webpublisher import WEBPUBLISHER_ROOT_DIR log = logging.getLogger("openpype.hosts.webpublisher") -HOST_DIR = os.path.dirname(os.path.abspath( - openpype.hosts.webpublisher.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +class WebpublisherHost(HostBase): + name = "webpublisher" -def install(): - print("Installing Pype config...") + def install(self): + print("Installing Pype config...") + pyblish.api.register_host(self.name) - pyblish.register_plugin_path(PUBLISH_PATH) - log.info(PUBLISH_PATH) - - legacy_io.install() - - -def uninstall(): - pyblish.deregister_plugin_path(PUBLISH_PATH) - - -# to have required methods for interface -def ls(): - pass + publish_plugin_dir = os.path.join( + WEBPUBLISHER_ROOT_DIR, "plugins", "publish" + ) + pyblish.api.register_plugin_path(publish_plugin_dir) + self.log.info(publish_plugin_dir) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index cb84fac3c7..6a65b78dfc 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -321,7 +321,7 @@ class PypeCommands: import pyblish.util from openpype.pipeline import install_host - from openpype.hosts.webpublisher import api as webpublisher + from openpype.hosts.webpublisher.api import WebpublisherHost from openpype.lib import Logger from openpype.lib.remote_publish import ( get_webpublish_conn, @@ -335,22 +335,21 @@ class PypeCommands: log.info("remotepublish command") - host_name = "webpublisher" + webpublisher_host = WebpublisherHost() + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP"] = host_name + os.environ["AVALON_APP"] = webpublisher_host.name os.environ["USER_EMAIL"] = user_email os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib - pyblish.api.register_host(host_name) - if targets: if isinstance(targets, str): targets = [targets] for target in targets: pyblish.api.register_target(target) - install_host(webpublisher) + install_host(webpublisher_host) log.info("Running publish ...") From 61690d84774268b49904789f0ea5cd8e5171caf7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:54:42 +0200 Subject: [PATCH 0685/2550] omved cli functions into webpublisher --- openpype/hosts/webpublisher/addon.py | 50 +++++ openpype/hosts/webpublisher/cli_functions.py | 204 +++++++++++++++++++ openpype/pype_commands.py | 159 +-------------- 3 files changed, 259 insertions(+), 154 deletions(-) create mode 100644 openpype/hosts/webpublisher/cli_functions.py diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index 3d76115df1..1a4370c9a5 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -1,4 +1,7 @@ import os + +import click + from openpype.modules import OpenPypeModule from openpype.modules.interfaces import IHostModule @@ -11,3 +14,50 @@ class WebpublisherAddon(OpenPypeModule, IHostModule): def initialize(self, module_settings): self.enabled = True + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group( + WebpublisherAddon.name, + help="Webpublisher related commands.") +def cli_main(): + pass + + +@cli_main.command() +@click.argument("path") +@click.option("-u", "--user", help="User email address") +@click.option("-p", "--project", help="Project") +@click.option("-t", "--targets", help="Targets", default=None, + multiple=True) +def publish(project, path, user=None, targets=None): + """Start CLI publishing. + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + + from .cli_functions import publish + + publish(project, path, user, targets) + + +@cli_main.command() +@click.argument("path") +@click.option("-h", "--host", help="Host") +@click.option("-u", "--user", help="User email address") +@click.option("-p", "--project", help="Project") +@click.option("-t", "--targets", help="Targets", default=None, + multiple=True) +def publishfromapp(project, path, user=None, targets=None): + """Start CLI publishing. + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + + from .cli_functions import publish_from_app + + publish_from_app(project, path, user, targets) diff --git a/openpype/hosts/webpublisher/cli_functions.py b/openpype/hosts/webpublisher/cli_functions.py new file mode 100644 index 0000000000..cb2e59fac2 --- /dev/null +++ b/openpype/hosts/webpublisher/cli_functions.py @@ -0,0 +1,204 @@ +import os +import time +import pyblish.api +import pyblish.util + +from openpype.lib import Logger +from openpype.lib.remote_publish import ( + get_webpublish_conn, + start_webpublish_log, + publish_and_log, + fail_batch, + find_variant_key, + get_task_data, + get_timeout, + IN_PROGRESS_STATUS +) +from openpype.lib.applications import ( + ApplicationManager, + get_app_environments_for_context, +) +from openpype.lib.plugin_tools import get_batch_asset_task_info +from openpype.pipeline import install_host +from openpype.hosts.webpublisher.api import WebpublisherHost + + +def publish(project_name, batch_path, user_email, targets): + """Start headless publishing. + + Used to publish rendered assets, workfiles etc via Webpublisher. + Eventually should be yanked out to Webpublisher cli. + + Publish use json from passed paths argument. + + Args: + project_name (str): project to publish (only single context is + expected per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + user_email (string): email address for webpublisher - used to + find Ftrack user with same email + targets (list): Pyblish targets + (to choose validator for example) + + Raises: + RuntimeError: When there is no path to process. + """ + + if not batch_path: + raise RuntimeError("No publish paths specified") + + log = Logger.get_logger("remotepublish") + log.info("remotepublish command") + + # Register target and host + webpublisher_host = WebpublisherHost() + + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + os.environ["AVALON_PROJECT"] = project_name + os.environ["AVALON_APP"] = webpublisher_host.name + os.environ["USER_EMAIL"] = user_email + os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib + + if targets: + if isinstance(targets, str): + targets = [targets] + for target in targets: + pyblish.api.register_target(target) + + install_host(webpublisher_host) + + log.info("Running publish ...") + + _, batch_id = os.path.split(batch_path) + dbcon = get_webpublish_conn() + _id = start_webpublish_log(dbcon, batch_id, user_email) + + task_data = get_task_data(batch_path) + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + + publish_and_log(dbcon, _id, log, batch_id=batch_id) + + log.info("Publish finished.") + + +def publish_from_app( + project_name, batch_path, host_name, user_email, targets +): + """Opens installed variant of 'host' and run remote publish there. + + Eventually should be yanked out to Webpublisher cli. + + Currently implemented and tested for Photoshop where customer + wants to process uploaded .psd file and publish collected layers + from there. Triggered by Webpublisher. + + Checks if no other batches are running (status =='in_progress). If + so, it sleeps for SLEEP (this is separate process), + waits for WAIT_FOR seconds altogether. + + Requires installed host application on the machine. + + Runs publish process as user would, in automatic fashion. + + Args: + project_name (str): project to publish (only single context is + expected per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + host_name (str): 'photoshop' + user_email (string): email address for webpublisher - used to + find Ftrack user with same email + targets (list): Pyblish targets + (to choose validator for example) + """ + + log = Logger.get_logger("RemotePublishFromApp") + + log.info("remotepublishphotoshop command") + + task_data = get_task_data(batch_path) + + workfile_path = os.path.join(batch_path, + task_data["task"], + task_data["files"][0]) + + print("workfile_path {}".format(workfile_path)) + + batch_id = task_data["batch"] + dbcon = get_webpublish_conn() + # safer to start logging here, launch might be broken altogether + _id = start_webpublish_log(dbcon, batch_id, user_email) + + batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) + if len(batches_in_progress) > 1: + running_batches = [str(batch["_id"]) + for batch in batches_in_progress + if batch["_id"] != _id] + msg = "There are still running batches {}\n". \ + format("\n".join(running_batches)) + msg += "Ask admin to check them and reprocess current batch" + fail_batch(_id, dbcon, msg) + + if not task_data["context"]: + msg = "Batch manifest must contain context data" + msg += "Create new batch and set context properly." + fail_batch(_id, dbcon, msg) + + asset_name, task_name, task_type = get_batch_asset_task_info( + task_data["context"]) + + application_manager = ApplicationManager() + found_variant_key = find_variant_key(application_manager, host_name) + app_name = "{}/{}".format(host_name, found_variant_key) + + # must have for proper launch of app + env = get_app_environments_for_context( + project_name, + asset_name, + task_name, + app_name + ) + print("env:: {}".format(env)) + os.environ.update(env) + + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + # must pass identifier to update log lines for a batch + os.environ["BATCH_LOG_ID"] = str(_id) + os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib + os.environ["USER_EMAIL"] = user_email + + pyblish.api.register_host(host_name) + if targets: + if isinstance(targets, str): + targets = [targets] + current_targets = os.environ.get("PYBLISH_TARGETS", "").split( + os.pathsep) + for target in targets: + current_targets.append(target) + + os.environ["PYBLISH_TARGETS"] = os.pathsep.join( + set(current_targets)) + + data = { + "last_workfile_path": workfile_path, + "start_last_workfile": True, + "project_name": project_name, + "asset_name": asset_name, + "task_name": task_name + } + + launched_app = application_manager.launch(app_name, **data) + + timeout = get_timeout(project_name, host_name, task_type) + + time_start = time.time() + while launched_app.poll() is None: + time.sleep(0.5) + if time.time() - time_start > timeout: + launched_app.terminate() + msg = "Timeout reached" + fail_batch(_id, dbcon, msg) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 6a65b78dfc..1817724df1 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -186,110 +186,11 @@ class PypeCommands: (to choose validator for example) """ - import pyblish.api + from openpype.hosts.webpublisher.cli_functions import publish_from_app - from openpype.lib import Logger - from openpype.lib.applications import ( - ApplicationManager, - get_app_environments_for_context, + publish_from_app( + project_name, batch_path, host_name, user_email, targets ) - from openpype.lib.plugin_tools import get_batch_asset_task_info - from openpype.lib.remote_publish import ( - get_webpublish_conn, - start_webpublish_log, - fail_batch, - find_variant_key, - get_task_data, - get_timeout, - IN_PROGRESS_STATUS - ) - - log = Logger.get_logger("CLI-remotepublishfromapp") - - log.info("remotepublishphotoshop command") - - task_data = get_task_data(batch_path) - - workfile_path = os.path.join(batch_path, - task_data["task"], - task_data["files"][0]) - - print("workfile_path {}".format(workfile_path)) - - batch_id = task_data["batch"] - dbcon = get_webpublish_conn() - # safer to start logging here, launch might be broken altogether - _id = start_webpublish_log(dbcon, batch_id, user_email) - - batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) - if len(batches_in_progress) > 1: - running_batches = [str(batch["_id"]) - for batch in batches_in_progress - if batch["_id"] != _id] - msg = "There are still running batches {}\n". \ - format("\n".join(running_batches)) - msg += "Ask admin to check them and reprocess current batch" - fail_batch(_id, dbcon, msg) - - if not task_data["context"]: - msg = "Batch manifest must contain context data" - msg += "Create new batch and set context properly." - fail_batch(_id, dbcon, msg) - - asset_name, task_name, task_type = get_batch_asset_task_info( - task_data["context"]) - - application_manager = ApplicationManager() - found_variant_key = find_variant_key(application_manager, host_name) - app_name = "{}/{}".format(host_name, found_variant_key) - - # must have for proper launch of app - env = get_app_environments_for_context( - project_name, - asset_name, - task_name, - app_name - ) - print("env:: {}".format(env)) - os.environ.update(env) - - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path - # must pass identifier to update log lines for a batch - os.environ["BATCH_LOG_ID"] = str(_id) - os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib - os.environ["USER_EMAIL"] = user_email - - pyblish.api.register_host(host_name) - if targets: - if isinstance(targets, str): - targets = [targets] - current_targets = os.environ.get("PYBLISH_TARGETS", "").split( - os.pathsep) - for target in targets: - current_targets.append(target) - - os.environ["PYBLISH_TARGETS"] = os.pathsep.join( - set(current_targets)) - - data = { - "last_workfile_path": workfile_path, - "start_last_workfile": True, - "project_name": project_name, - "asset_name": asset_name, - "task_name": task_name - } - - launched_app = application_manager.launch(app_name, **data) - - timeout = get_timeout(project_name, host_name, task_type) - - time_start = time.time() - while launched_app.poll() is None: - time.sleep(0.5) - if time.time() - time_start > timeout: - launched_app.terminate() - msg = "Timeout reached" - fail_batch(_id, dbcon, msg) @staticmethod def remotepublish(project, batch_path, user_email, targets=None): @@ -313,59 +214,10 @@ class PypeCommands: Raises: RuntimeError: When there is no path to process. """ - if not batch_path: - raise RuntimeError("No publish paths specified") - # Register target and host - import pyblish.api - import pyblish.util + from openpype.hosts.webpublisher.cli_functions import publish - from openpype.pipeline import install_host - from openpype.hosts.webpublisher.api import WebpublisherHost - from openpype.lib import Logger - from openpype.lib.remote_publish import ( - get_webpublish_conn, - start_webpublish_log, - publish_and_log, - fail_batch, - get_task_data, - ) - - log = Logger.get_logger("remotepublish") - - log.info("remotepublish command") - - webpublisher_host = WebpublisherHost() - - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path - os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP"] = webpublisher_host.name - os.environ["USER_EMAIL"] = user_email - os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib - - if targets: - if isinstance(targets, str): - targets = [targets] - for target in targets: - pyblish.api.register_target(target) - - install_host(webpublisher_host) - - log.info("Running publish ...") - - _, batch_id = os.path.split(batch_path) - dbcon = get_webpublish_conn() - _id = start_webpublish_log(dbcon, batch_id, user_email) - - task_data = get_task_data(batch_path) - if not task_data["context"]: - msg = "Batch manifest must contain context data" - msg += "Create new batch and set context properly." - fail_batch(_id, dbcon, msg) - - publish_and_log(dbcon, _id, log, batch_id=batch_id) - - log.info("Publish finished.") + publish(project, batch_path, user_email, targets) @staticmethod def extractenvironments(output_json_path, project, asset, task, app, @@ -479,7 +331,6 @@ class PypeCommands: sync_server_module.server_init() sync_server_module.server_start() - import time while True: time.sleep(1.0) From eed9789287adeb1ba000262544368344be353ff9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 16:59:20 +0200 Subject: [PATCH 0686/2550] changed function names --- openpype/hosts/webpublisher/addon.py | 8 ++++---- openpype/hosts/webpublisher/cli_functions.py | 4 ++-- openpype/pype_commands.py | 12 ++++++++---- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index 1a4370c9a5..9e63030fe2 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -39,9 +39,9 @@ def publish(project, path, user=None, targets=None): More than one path is allowed. """ - from .cli_functions import publish + from .cli_functions import cli_publish - publish(project, path, user, targets) + cli_publish(project, path, user, targets) @cli_main.command() @@ -58,6 +58,6 @@ def publishfromapp(project, path, user=None, targets=None): More than one path is allowed. """ - from .cli_functions import publish_from_app + from .cli_functions import cli_publish_from_app - publish_from_app(project, path, user, targets) + cli_publish_from_app(project, path, user, targets) diff --git a/openpype/hosts/webpublisher/cli_functions.py b/openpype/hosts/webpublisher/cli_functions.py index cb2e59fac2..ad3bb596fb 100644 --- a/openpype/hosts/webpublisher/cli_functions.py +++ b/openpype/hosts/webpublisher/cli_functions.py @@ -23,7 +23,7 @@ from openpype.pipeline import install_host from openpype.hosts.webpublisher.api import WebpublisherHost -def publish(project_name, batch_path, user_email, targets): +def cli_publish(project_name, batch_path, user_email, targets): """Start headless publishing. Used to publish rendered assets, workfiles etc via Webpublisher. @@ -85,7 +85,7 @@ def publish(project_name, batch_path, user_email, targets): log.info("Publish finished.") -def publish_from_app( +def cli_publish_from_app( project_name, batch_path, host_name, user_email, targets ): """Opens installed variant of 'host' and run remote publish there. diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 1817724df1..b6c1228ade 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -186,9 +186,11 @@ class PypeCommands: (to choose validator for example) """ - from openpype.hosts.webpublisher.cli_functions import publish_from_app + from openpype.hosts.webpublisher.cli_functions import ( + cli_publish_from_app + ) - publish_from_app( + cli_publish_from_app( project_name, batch_path, host_name, user_email, targets ) @@ -215,9 +217,11 @@ class PypeCommands: RuntimeError: When there is no path to process. """ - from openpype.hosts.webpublisher.cli_functions import publish + from openpype.hosts.webpublisher.cli_functions import ( + cli_publish + ) - publish(project, batch_path, user_email, targets) + cli_publish(project, batch_path, user_email, targets) @staticmethod def extractenvironments(output_json_path, project, asset, task, app, From 531682f316bc0ec3212ac31edc2881a988385056 Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 25 Aug 2022 17:17:56 +0200 Subject: [PATCH 0687/2550] Kitsu|Change: Drop 'entities root' setting. Closes #3738 --- .../modules/kitsu/utils/update_op_with_zou.py | 109 +++++------------- .../defaults/project_settings/kitsu.json | 4 - .../projects_schema/schema_project_kitsu.json | 17 --- 3 files changed, 28 insertions(+), 102 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index e03cf2b30e..7c97e126e5 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -166,50 +166,21 @@ def update_op_assets( # Substitute item type for general classification (assets or shots) if item_type in ["Asset", "AssetType"]: - substitute_item_type = "assets" + entity_root_asset_name = "Assets" elif item_type in ["Episode", "Sequence"]: - substitute_item_type = "shots" - else: - substitute_item_type = f"{item_type.lower()}s" - entity_parent_folders = [ - f - for f in project_module_settings["entities_root"] - .get(substitute_item_type) - .split("/") - if f - ] + entity_root_asset_name = "Shots" # Root parent folder if exist visual_parent_doc_id = ( asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None ) if visual_parent_doc_id is None: - # Find root folder docs - root_folder_docs = get_assets( + # Find root folder doc ("Assets" or "Shots") + root_folder_doc = get_asset_by_name( project_name, - asset_names=[entity_parent_folders[-1]], + asset_name=entity_root_asset_name, fields=["_id", "data.root_of"], ) - # NOTE: Not sure why it's checking for entity type? - # OP3 does not support multiple assets with same names so type - # filtering is irelevant. - # This way mimics previous implementation: - # ``` - # root_folder_doc = dbcon.find_one( - # { - # "type": "asset", - # "name": entity_parent_folders[-1], - # "data.root_of": substitute_item_type, - # }, - # ["_id"], - # ) - # ``` - root_folder_doc = None - for folder_doc in root_folder_docs: - root_of = folder_doc.get("data", {}).get("root_of") - if root_of == substitute_item_type: - root_folder_doc = folder_doc - break if root_folder_doc: visual_parent_doc_id = root_folder_doc["_id"] @@ -240,7 +211,7 @@ def update_op_assets( item_name = item["name"] # Set root folders parents - item_data["parents"] = entity_parent_folders + item_data["parents"] + item_data["parents"] = [entity_root_asset_name] + item_data["parents"] # Update 'data' different in zou DB updated_data = { @@ -396,54 +367,30 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): zou_ids_and_asset_docs[project["id"]] = project_doc # Create entities root folders - project_module_settings = get_project_settings(project_name)["kitsu"] - for entity_type, root in project_module_settings["entities_root"].items(): - parent_folders = root.split("/") - direct_parent_doc = None - for i, folder in enumerate(parent_folders, 1): - parent_doc = get_asset_by_name( - project_name, folder, fields=["_id", "data.root_of"] - ) - # NOTE: Not sure why it's checking for entity type? - # OP3 does not support multiple assets with same names so type - # filtering is irelevant. - # Also all of the entities could find be queried at once using - # 'get_assets'. - # This way mimics previous implementation: - # ``` - # parent_doc = dbcon.find_one( - # {"type": "asset", "name": folder, "data.root_of": entity_type} - # ) - # ``` - if ( - parent_doc - and parent_doc.get("data", {}).get("root_of") != entity_type - ): - parent_doc = None - - if not parent_doc: - direct_parent_doc = dbcon.insert_one( - { - "name": folder, - "type": "asset", - "schema": "openpype:asset-3.0", - "data": { - "root_of": entity_type, - "parents": parent_folders[:i], - "visualParent": direct_parent_doc.inserted_id - if direct_parent_doc - else None, - "tasks": {}, - }, - } - ) + to_insert = [ + { + "name": r, + "type": "asset", + "schema": "openpype:asset-3.0", + "data": { + "root_of": r, + "tasks": {}, + }, + } + for r in ["Assets", "Shots"] + if not get_asset_by_name( + project_name, r, fields=["_id", "data.root_of"] + ) + ] # Create - to_insert = [ - create_op_asset(item) - for item in all_entities - if item["id"] not in zou_ids_and_asset_docs.keys() - ] + to_insert.extend( + [ + create_op_asset(item) + for item in all_entities + if item["id"] not in zou_ids_and_asset_docs.keys() + ] + ) if to_insert: # Insert doc in DB dbcon.insert_many(to_insert) diff --git a/openpype/settings/defaults/project_settings/kitsu.json b/openpype/settings/defaults/project_settings/kitsu.json index ba02d8d259..3a9723b9c0 100644 --- a/openpype/settings/defaults/project_settings/kitsu.json +++ b/openpype/settings/defaults/project_settings/kitsu.json @@ -1,8 +1,4 @@ { - "entities_root": { - "assets": "Assets", - "shots": "Shots" - }, "entities_naming_pattern": { "episode": "E##", "sequence": "SQ##", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json b/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json index 014a1b7886..fb47670e74 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_kitsu.json @@ -5,23 +5,6 @@ "collapsible": true, "is_file": true, "children": [ - { - "type": "dict", - "key": "entities_root", - "label": "Entities root folder", - "children": [ - { - "type": "text", - "key": "assets", - "label": "Assets:" - }, - { - "type": "text", - "key": "shots", - "label": "Shots (includes Episodes & Sequences if any):" - } - ] - }, { "type": "dict", "key": "entities_naming_pattern", From dec6335ece01b37e6f0396807526ab16b5db1a6e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 17:41:18 +0200 Subject: [PATCH 0688/2550] move remote_publish funtion into pipeline publish --- openpype/pipeline/publish/lib.py | 40 ++++++++++++++++++++++++++++++ openpype/scripts/remote_publish.py | 7 +++--- 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index d5494cd8a4..9060a0bf4b 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -273,3 +273,43 @@ def filter_pyblish_plugins(plugins): option, value, plugin.__name__)) setattr(plugin, option, value) + + +def find_close_plugin(close_plugin_name, log): + if close_plugin_name: + plugins = pyblish.api.discover() + for plugin in plugins: + if plugin.__name__ == close_plugin_name: + return plugin + + log.debug("Close plugin not found, app might not close.") + + +def remote_publish(log, close_plugin_name=None, raise_error=False): + """Loops through all plugins, logs to console. Used for tests. + + Args: + log (openpype.lib.Logger) + close_plugin_name (str): name of plugin with responsibility to + close host app + """ + # Error exit as soon as any error occurs. + error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" + + close_plugin = find_close_plugin(close_plugin_name, log) + + for result in pyblish.util.publish_iter(): + for record in result["records"]: + log.info("{}: {}".format( + result["plugin"].label, record.msg)) + + if result["error"]: + error_message = error_format.format(**result) + log.error(error_message) + if close_plugin: # close host app explicitly after error + context = pyblish.api.Context() + close_plugin().process(context) + if raise_error: + # Fatal Error is because of Deadline + error_message = "Fatal Error: " + error_format.format(**result) + raise RuntimeError(error_message) diff --git a/openpype/scripts/remote_publish.py b/openpype/scripts/remote_publish.py index d322f369d1..37df35e36c 100644 --- a/openpype/scripts/remote_publish.py +++ b/openpype/scripts/remote_publish.py @@ -1,11 +1,12 @@ try: - from openpype.api import Logger - import openpype.lib.remote_publish + from openpype.lib import Logger + from openpype.pipeline.publish.lib import remote_publish except ImportError as exc: # Ensure Deadline fails by output an error that contains "Fatal Error:" raise ImportError("Fatal Error: %s" % exc) + if __name__ == "__main__": # Perform remote publish with thorough error checking log = Logger.get_logger(__name__) - openpype.lib.remote_publish.publish(log, raise_error=True) + remote_publish(log, raise_error=True) From c1a7b9aff5024c4df92493169e2741f044558b2d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 17:49:24 +0200 Subject: [PATCH 0689/2550] moved webpublisher specific functions into webpublisher --- openpype/hosts/webpublisher/lib.py | 278 +++++++++++++++++++++++++++++ 1 file changed, 278 insertions(+) create mode 100644 openpype/hosts/webpublisher/lib.py diff --git a/openpype/hosts/webpublisher/lib.py b/openpype/hosts/webpublisher/lib.py new file mode 100644 index 0000000000..dde875c934 --- /dev/null +++ b/openpype/hosts/webpublisher/lib.py @@ -0,0 +1,278 @@ +import os +from datetime import datetime +import collections +import json + +from bson.objectid import ObjectId + +import pyblish.util +import pyblish.api + +from openpype.client.mongo import OpenPypeMongoConnection +from openpype.settings import get_project_settings +from openpype.lib import Logger +from openpype.lib.profiles_filtering import filter_profiles + +ERROR_STATUS = "error" +IN_PROGRESS_STATUS = "in_progress" +REPROCESS_STATUS = "reprocess" +SENT_REPROCESSING_STATUS = "sent_for_reprocessing" +FINISHED_REPROCESS_STATUS = "republishing_finished" +FINISHED_OK_STATUS = "finished_ok" + +log = Logger.get_logger(__name__) + + +def parse_json(path): + """Parses json file at 'path' location + + Returns: + (dict) or None if unparsable + Raises: + AsssertionError if 'path' doesn't exist + """ + path = path.strip('\"') + assert os.path.isfile(path), ( + "Path to json file doesn't exist. \"{}\"".format(path) + ) + data = None + with open(path, "r") as json_file: + try: + data = json.load(json_file) + except Exception as exc: + log.error( + "Error loading json: {} - Exception: {}".format(path, exc) + ) + return data + + +def get_batch_asset_task_info(ctx): + """Parses context data from webpublisher's batch metadata + + Returns: + (tuple): asset, task_name (Optional), task_type + """ + task_type = "default_task_type" + task_name = None + asset = None + + if ctx["type"] == "task": + items = ctx["path"].split('/') + asset = items[-2] + task_name = ctx["name"] + task_type = ctx["attributes"]["type"] + else: + asset = ctx["name"] + + return asset, task_name, task_type + + +def get_webpublish_conn(): + """Get connection to OP 'webpublishes' collection.""" + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + return mongo_client[database_name]["webpublishes"] + + +def start_webpublish_log(dbcon, batch_id, user): + """Start new log record for 'batch_id' + + Args: + dbcon (OpenPypeMongoConnection) + batch_id (str) + user (str) + Returns + (ObjectId) from DB + """ + return dbcon.insert_one({ + "batch_id": batch_id, + "start_date": datetime.now(), + "user": user, + "status": IN_PROGRESS_STATUS, + "progress": 0 # integer 0-100, percentage + }).inserted_id + + +def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): + """Loops through all plugins, logs ok and fails into OP DB. + + Args: + dbcon (OpenPypeMongoConnection) + _id (str) - id of current job in DB + log (openpype.lib.Logger) + batch_id (str) - id sent from frontend + close_plugin_name (str): name of plugin with responsibility to + close host app + """ + # Error exit as soon as any error occurs. + error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n" + error_format += "-" * 80 + "\n" + + close_plugin = _get_close_plugin(close_plugin_name, log) + + if isinstance(_id, str): + _id = ObjectId(_id) + + log_lines = [] + processed = 0 + log_every = 5 + for result in pyblish.util.publish_iter(): + for record in result["records"]: + log_lines.append("{}: {}".format( + result["plugin"].label, record.msg)) + processed += 1 + + if result["error"]: + log.error(error_format.format(**result)) + log_lines = [error_format.format(**result)] + log_lines + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "finish_date": datetime.now(), + "status": ERROR_STATUS, + "log": os.linesep.join(log_lines) + + }} + ) + if close_plugin: # close host app explicitly after error + context = pyblish.api.Context() + close_plugin().process(context) + return + elif processed % log_every == 0: + # pyblish returns progress in 0.0 - 2.0 + progress = min(round(result["progress"] / 2 * 100), 99) + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "progress": progress, + "log": os.linesep.join(log_lines) + }} + ) + + # final update + if batch_id: + dbcon.update_many( + {"batch_id": batch_id, "status": SENT_REPROCESSING_STATUS}, + { + "$set": + { + "finish_date": datetime.now(), + "status": FINISHED_REPROCESS_STATUS, + } + } + ) + + dbcon.update_one( + {"_id": _id}, + { + "$set": + { + "finish_date": datetime.now(), + "status": FINISHED_OK_STATUS, + "progress": 100, + "log": os.linesep.join(log_lines) + } + } + ) + + +def fail_batch(_id, dbcon, msg): + """Set current batch as failed as there is some problem. + + Raises: + ValueError + """ + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "finish_date": datetime.now(), + "status": ERROR_STATUS, + "log": msg + + }} + ) + raise ValueError(msg) + + +def find_variant_key(application_manager, host): + """Searches for latest installed variant for 'host' + + Args: + application_manager (ApplicationManager) + host (str) + Returns + (string) (optional) + Raises: + (ValueError) if no variant found + """ + app_group = application_manager.app_groups.get(host) + if not app_group or not app_group.enabled: + raise ValueError("No application {} configured".format(host)) + + found_variant_key = None + # finds most up-to-date variant if any installed + sorted_variants = collections.OrderedDict( + sorted(app_group.variants.items())) + for variant_key, variant in sorted_variants.items(): + for executable in variant.executables: + if executable.exists(): + found_variant_key = variant_key + + if not found_variant_key: + raise ValueError("No executable for {} found".format(host)) + + return found_variant_key + + +def _get_close_plugin(close_plugin_name, log): + if close_plugin_name: + plugins = pyblish.api.discover() + for plugin in plugins: + if plugin.__name__ == close_plugin_name: + return plugin + + log.debug("Close plugin not found, app might not close.") + + +def get_task_data(batch_dir): + """Return parsed data from first task manifest.json + + Used for `remotepublishfromapp` command where batch contains only + single task with publishable workfile. + + Returns: + (dict) + Throws: + (ValueError) if batch or task manifest not found or broken + """ + batch_data = parse_json(os.path.join(batch_dir, "manifest.json")) + if not batch_data: + raise ValueError( + "Cannot parse batch meta in {} folder".format(batch_dir)) + task_dir_name = batch_data["tasks"][0] + task_data = parse_json(os.path.join(batch_dir, task_dir_name, + "manifest.json")) + if not task_data: + raise ValueError( + "Cannot parse batch meta in {} folder".format(task_data)) + + return task_data + + +def get_timeout(project_name, host_name, task_type): + """Returns timeout(seconds) from Setting profile.""" + filter_data = { + "task_types": task_type, + "hosts": host_name + } + timeout_profiles = (get_project_settings(project_name)["webpublisher"] + ["timeout_profiles"]) + matching_item = filter_profiles(timeout_profiles, filter_data) + timeout = 3600 + if matching_item: + timeout = matching_item["timeout"] + + return timeout From 6c330c48969bffba508a83aa0d98cf93d804142f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 17:50:54 +0200 Subject: [PATCH 0690/2550] use lib functions from webpublisher --- .../plugins/publish/collect_batch_data.py | 8 ++++---- openpype/hosts/webpublisher/cli_functions.py | 17 +++++++++-------- .../plugins/publish/collect_batch_data.py | 11 ++++++----- .../plugins/publish/collect_published_files.py | 6 ++---- .../publish/collect_tvpaint_workfile_data.py | 2 +- .../webserver_service/webpublish_routes.py | 12 +++++------- .../webserver_service/webserver_cli.py | 14 +++++++------- 7 files changed, 34 insertions(+), 36 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py index 2881ef0ea6..5d50a78914 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -17,11 +17,11 @@ import os import pyblish.api -from openpype.lib.plugin_tools import ( - parse_json, - get_batch_asset_task_info -) from openpype.pipeline import legacy_io +from openpype_modules.webpublisher.lib import ( + get_batch_asset_task_info, + parse_json +) class CollectBatchData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/cli_functions.py b/openpype/hosts/webpublisher/cli_functions.py index ad3bb596fb..83f53ced68 100644 --- a/openpype/hosts/webpublisher/cli_functions.py +++ b/openpype/hosts/webpublisher/cli_functions.py @@ -4,7 +4,15 @@ import pyblish.api import pyblish.util from openpype.lib import Logger -from openpype.lib.remote_publish import ( +from openpype.lib.applications import ( + ApplicationManager, + get_app_environments_for_context, +) +from openpype.pipeline import install_host +from openpype.hosts.webpublisher.api import WebpublisherHost + +from .lib import ( + get_batch_asset_task_info, get_webpublish_conn, start_webpublish_log, publish_and_log, @@ -14,13 +22,6 @@ from openpype.lib.remote_publish import ( get_timeout, IN_PROGRESS_STATUS ) -from openpype.lib.applications import ( - ApplicationManager, - get_app_environments_for_context, -) -from openpype.lib.plugin_tools import get_batch_asset_task_info -from openpype.pipeline import install_host -from openpype.hosts.webpublisher.api import WebpublisherHost def cli_publish(project_name, batch_path, user_email, targets): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py index 9ff779636a..eb2737b276 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -13,12 +13,13 @@ import os import pyblish.api -from openpype.lib.plugin_tools import ( - parse_json, - get_batch_asset_task_info -) -from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS from openpype.pipeline import legacy_io +from openpype_modules.webpublisher.lib import ( + parse_json, + get_batch_asset_task_info, + get_webpublish_conn, + IN_PROGRESS_STATUS +) class CollectBatchData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 20e277d794..454f78ce9d 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -23,10 +23,8 @@ from openpype.lib import ( get_ffprobe_streams, convert_ffprobe_fps_value, ) -from openpype.lib.plugin_tools import ( - parse_json, - get_subset_name_with_asset_doc -) +from openpype.lib.plugin_tools import get_subset_name_with_asset_doc +from openpype_modules.webpublisher.lib import parse_json class CollectPublishedFiles(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index f0f29260a2..b5f8ed9c8f 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -16,11 +16,11 @@ import uuid import json import shutil import pyblish.api -from openpype.lib.plugin_tools import parse_json from openpype.hosts.tvpaint.worker import ( SenderTVPaintCommands, CollectSceneData ) +from openpype_modules.webpublisher.lib import parse_json class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 2e9d460a98..e3de555ace 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -10,16 +10,16 @@ from aiohttp.web_response import Response from openpype.client import ( get_projects, get_assets, - OpenPypeMongoConnection, ) from openpype.lib import Logger -from openpype.lib.remote_publish import ( +from openpype.settings import get_project_settings +from openpype_modules.webserver.base_routes import RestApiEndpoint +from openpype_modules.webpublisher.lib import ( + get_webpublish_conn, get_task_data, ERROR_STATUS, REPROCESS_STATUS ) -from openpype.settings import get_project_settings -from openpype_modules.webserver.base_routes import RestApiEndpoint log = Logger.get_logger("WebpublishRoutes") @@ -77,9 +77,7 @@ class WebpublishRestApiResource(JsonApiResource): """Resource carrying OP DB connection for storing batch info into DB.""" def __init__(self): - mongo_client = OpenPypeMongoConnection.get_mongo_client() - database_name = os.environ["OPENPYPE_DATABASE_NAME"] - self.dbcon = mongo_client[database_name]["webpublishes"] + self.dbcon = get_webpublish_conn() class ProjectsEndpoint(ResourceRestApiEndpoint): diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 936bd9735f..47c792a575 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -7,8 +7,15 @@ import json import subprocess from openpype.client import OpenPypeMongoConnection +from openpype.modules import ModulesManager from openpype.lib import Logger +from openpype_modules.webpublisher.lib import ( + ERROR_STATUS, + REPROCESS_STATUS, + SENT_REPROCESSING_STATUS +) + from .webpublish_routes import ( RestApiResource, WebpublishRestApiResource, @@ -21,19 +28,12 @@ from .webpublish_routes import ( TaskPublishEndpoint, UserReportEndpoint ) -from openpype.lib.remote_publish import ( - ERROR_STATUS, - REPROCESS_STATUS, - SENT_REPROCESSING_STATUS -) - log = Logger.get_logger("webserver_gui") def run_webserver(*args, **kwargs): """Runs webserver in command line, adds routes.""" - from openpype.modules import ModulesManager manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] From 233d70bdd8d21062842aa88b15841ac1fb61f0a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 17:54:05 +0200 Subject: [PATCH 0691/2550] headless_publish is a method on webpublisher addon --- openpype/hosts/aftereffects/api/lib.py | 19 ++++++++++++++----- openpype/hosts/photoshop/api/lib.py | 9 +++++---- openpype/hosts/webpublisher/addon.py | 24 ++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py index ce4cbf09af..d5583ee862 100644 --- a/openpype/hosts/aftereffects/api/lib.py +++ b/openpype/hosts/aftereffects/api/lib.py @@ -3,11 +3,12 @@ import sys import contextlib import traceback import logging +from functools import partial from Qt import QtWidgets from openpype.pipeline import install_host -from openpype.lib.remote_publish import headless_publish +from openpype.modules import ModulesManager from openpype.tools.utils import host_tools from .launch_logic import ProcessLauncher, get_stub @@ -35,10 +36,18 @@ def main(*subprocess_args): launcher.start() if os.environ.get("HEADLESS_PUBLISH"): - launcher.execute_in_main_thread(lambda: headless_publish( - log, - "CloseAE", - os.environ.get("IS_TEST"))) + manager = ModulesManager() + webpublisher_addon = manager["webpublisher"] + + launcher.execute_in_main_thread( + partial( + webpublisher_addon.headless_publish, + log, + "CloseAE", + os.environ.get("IS_TEST") + ) + ) + elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True): save = False if os.getenv("WORKFILES_SAVE_AS"): diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 2f57d64464..73a546604f 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -5,11 +5,10 @@ import traceback from Qt import QtWidgets -from openpype.api import Logger +from openpype.lib import env_value_to_bool, Logger +from openpype.modules import ModulesManager from openpype.pipeline import install_host from openpype.tools.utils import host_tools -from openpype.lib.remote_publish import headless_publish -from openpype.lib import env_value_to_bool from .launch_logic import ProcessLauncher, stub @@ -35,8 +34,10 @@ def main(*subprocess_args): launcher.start() if env_value_to_bool("HEADLESS_PUBLISH"): + manager = ModulesManager() + webpublisher_addon = manager["webpublisher"] launcher.execute_in_main_thread( - headless_publish, + webpublisher_addon.headless_publish, log, "ClosePS", os.environ.get("IS_TEST") diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index 9e63030fe2..0bba8adc4b 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -15,6 +15,30 @@ class WebpublisherAddon(OpenPypeModule, IHostModule): def initialize(self, module_settings): self.enabled = True + def headless_publish(self, log, close_plugin_name=None, is_test=False): + """Runs publish in a opened host with a context. + + Close Python process at the end. + """ + + from openpype.pipeline.publish.lib import remote_publish + from .lib import get_webpublish_conn, publish_and_log + + if is_test: + remote_publish(log, close_plugin_name) + return + + dbcon = get_webpublish_conn() + _id = os.environ.get("BATCH_LOG_ID") + if not _id: + log.warning("Unable to store log records, " + "batch will be unfinished!") + return + + publish_and_log( + dbcon, _id, log, close_plugin_name=close_plugin_name + ) + def cli(self, click_group): click_group.add_command(cli_main) From a98c7953aaf4fecf465b7e9b95357de2056e3018 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 17:54:30 +0200 Subject: [PATCH 0692/2550] use 'find_close_plugin' --- openpype/hosts/webpublisher/lib.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/webpublisher/lib.py b/openpype/hosts/webpublisher/lib.py index dde875c934..4bc3f1db80 100644 --- a/openpype/hosts/webpublisher/lib.py +++ b/openpype/hosts/webpublisher/lib.py @@ -12,6 +12,7 @@ from openpype.client.mongo import OpenPypeMongoConnection from openpype.settings import get_project_settings from openpype.lib import Logger from openpype.lib.profiles_filtering import filter_profiles +from openpype.pipeline.publish.lib import find_close_plugin ERROR_STATUS = "error" IN_PROGRESS_STATUS = "in_progress" @@ -108,7 +109,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n" error_format += "-" * 80 + "\n" - close_plugin = _get_close_plugin(close_plugin_name, log) + close_plugin = find_close_plugin(close_plugin_name, log) if isinstance(_id, str): _id = ObjectId(_id) @@ -227,16 +228,6 @@ def find_variant_key(application_manager, host): return found_variant_key -def _get_close_plugin(close_plugin_name, log): - if close_plugin_name: - plugins = pyblish.api.discover() - for plugin in plugins: - if plugin.__name__ == close_plugin_name: - return plugin - - log.debug("Close plugin not found, app might not close.") - - def get_task_data(batch_dir): """Return parsed data from first task manifest.json From 9c3e37e3f4ab83e465b71d908bcec439df011385 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 17:55:04 +0200 Subject: [PATCH 0693/2550] removed unused functions from openpype lib --- openpype/lib/plugin_tools.py | 45 ------ openpype/lib/remote_publish.py | 277 --------------------------------- 2 files changed, 322 deletions(-) delete mode 100644 openpype/lib/remote_publish.py diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 060db94ae0..659210e6e3 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -427,48 +427,3 @@ def get_background_layers(file_url): layer.get("filename")). replace("\\", "/")) return layers - - -def parse_json(path): - """Parses json file at 'path' location - - Returns: - (dict) or None if unparsable - Raises: - AsssertionError if 'path' doesn't exist - """ - path = path.strip('\"') - assert os.path.isfile(path), ( - "Path to json file doesn't exist. \"{}\"".format(path) - ) - data = None - with open(path, "r") as json_file: - try: - data = json.load(json_file) - except Exception as exc: - log.error( - "Error loading json: " - "{} - Exception: {}".format(path, exc) - ) - return data - - -def get_batch_asset_task_info(ctx): - """Parses context data from webpublisher's batch metadata - - Returns: - (tuple): asset, task_name (Optional), task_type - """ - task_type = "default_task_type" - task_name = None - asset = None - - if ctx["type"] == "task": - items = ctx["path"].split('/') - asset = items[-2] - task_name = ctx["name"] - task_type = ctx["attributes"]["type"] - else: - asset = ctx["name"] - - return asset, task_name, task_type diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py deleted file mode 100644 index 2a901544cc..0000000000 --- a/openpype/lib/remote_publish.py +++ /dev/null @@ -1,277 +0,0 @@ -import os -from datetime import datetime -import collections - -from bson.objectid import ObjectId - -import pyblish.util -import pyblish.api - -from openpype.client.mongo import OpenPypeMongoConnection -from openpype.lib.plugin_tools import parse_json -from openpype.lib.profiles_filtering import filter_profiles -from openpype.api import get_project_settings - -ERROR_STATUS = "error" -IN_PROGRESS_STATUS = "in_progress" -REPROCESS_STATUS = "reprocess" -SENT_REPROCESSING_STATUS = "sent_for_reprocessing" -FINISHED_REPROCESS_STATUS = "republishing_finished" -FINISHED_OK_STATUS = "finished_ok" - - -def headless_publish(log, close_plugin_name=None, is_test=False): - """Runs publish in a opened host with a context and closes Python process. - """ - if not is_test: - dbcon = get_webpublish_conn() - _id = os.environ.get("BATCH_LOG_ID") - if not _id: - log.warning("Unable to store log records, " - "batch will be unfinished!") - return - - publish_and_log(dbcon, _id, log, close_plugin_name=close_plugin_name) - else: - publish(log, close_plugin_name) - - -def get_webpublish_conn(): - """Get connection to OP 'webpublishes' collection.""" - mongo_client = OpenPypeMongoConnection.get_mongo_client() - database_name = os.environ["OPENPYPE_DATABASE_NAME"] - return mongo_client[database_name]["webpublishes"] - - -def start_webpublish_log(dbcon, batch_id, user): - """Start new log record for 'batch_id' - - Args: - dbcon (OpenPypeMongoConnection) - batch_id (str) - user (str) - Returns - (ObjectId) from DB - """ - return dbcon.insert_one({ - "batch_id": batch_id, - "start_date": datetime.now(), - "user": user, - "status": IN_PROGRESS_STATUS, - "progress": 0 # integer 0-100, percentage - }).inserted_id - - -def publish(log, close_plugin_name=None, raise_error=False): - """Loops through all plugins, logs to console. Used for tests. - - Args: - log (openpype.lib.Logger) - close_plugin_name (str): name of plugin with responsibility to - close host app - """ - # Error exit as soon as any error occurs. - error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" - - close_plugin = _get_close_plugin(close_plugin_name, log) - - for result in pyblish.util.publish_iter(): - for record in result["records"]: - log.info("{}: {}".format( - result["plugin"].label, record.msg)) - - if result["error"]: - error_message = error_format.format(**result) - log.error(error_message) - if close_plugin: # close host app explicitly after error - context = pyblish.api.Context() - close_plugin().process(context) - if raise_error: - # Fatal Error is because of Deadline - error_message = "Fatal Error: " + error_format.format(**result) - raise RuntimeError(error_message) - - -def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): - """Loops through all plugins, logs ok and fails into OP DB. - - Args: - dbcon (OpenPypeMongoConnection) - _id (str) - id of current job in DB - log (openpype.lib.Logger) - batch_id (str) - id sent from frontend - close_plugin_name (str): name of plugin with responsibility to - close host app - """ - # Error exit as soon as any error occurs. - error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n" - error_format += "-" * 80 + "\n" - - close_plugin = _get_close_plugin(close_plugin_name, log) - - if isinstance(_id, str): - _id = ObjectId(_id) - - log_lines = [] - processed = 0 - log_every = 5 - for result in pyblish.util.publish_iter(): - for record in result["records"]: - log_lines.append("{}: {}".format( - result["plugin"].label, record.msg)) - processed += 1 - - if result["error"]: - log.error(error_format.format(**result)) - log_lines = [error_format.format(**result)] + log_lines - dbcon.update_one( - {"_id": _id}, - {"$set": - { - "finish_date": datetime.now(), - "status": ERROR_STATUS, - "log": os.linesep.join(log_lines) - - }} - ) - if close_plugin: # close host app explicitly after error - context = pyblish.api.Context() - close_plugin().process(context) - return - elif processed % log_every == 0: - # pyblish returns progress in 0.0 - 2.0 - progress = min(round(result["progress"] / 2 * 100), 99) - dbcon.update_one( - {"_id": _id}, - {"$set": - { - "progress": progress, - "log": os.linesep.join(log_lines) - }} - ) - - # final update - if batch_id: - dbcon.update_many( - {"batch_id": batch_id, "status": SENT_REPROCESSING_STATUS}, - { - "$set": - { - "finish_date": datetime.now(), - "status": FINISHED_REPROCESS_STATUS, - } - } - ) - - dbcon.update_one( - {"_id": _id}, - { - "$set": - { - "finish_date": datetime.now(), - "status": FINISHED_OK_STATUS, - "progress": 100, - "log": os.linesep.join(log_lines) - } - } - ) - - -def fail_batch(_id, dbcon, msg): - """Set current batch as failed as there is some problem. - - Raises: - ValueError - """ - dbcon.update_one( - {"_id": _id}, - {"$set": - { - "finish_date": datetime.now(), - "status": ERROR_STATUS, - "log": msg - - }} - ) - raise ValueError(msg) - - -def find_variant_key(application_manager, host): - """Searches for latest installed variant for 'host' - - Args: - application_manager (ApplicationManager) - host (str) - Returns - (string) (optional) - Raises: - (ValueError) if no variant found - """ - app_group = application_manager.app_groups.get(host) - if not app_group or not app_group.enabled: - raise ValueError("No application {} configured".format(host)) - - found_variant_key = None - # finds most up-to-date variant if any installed - sorted_variants = collections.OrderedDict( - sorted(app_group.variants.items())) - for variant_key, variant in sorted_variants.items(): - for executable in variant.executables: - if executable.exists(): - found_variant_key = variant_key - - if not found_variant_key: - raise ValueError("No executable for {} found".format(host)) - - return found_variant_key - - -def _get_close_plugin(close_plugin_name, log): - if close_plugin_name: - plugins = pyblish.api.discover() - for plugin in plugins: - if plugin.__name__ == close_plugin_name: - return plugin - - log.debug("Close plugin not found, app might not close.") - - -def get_task_data(batch_dir): - """Return parsed data from first task manifest.json - - Used for `remotepublishfromapp` command where batch contains only - single task with publishable workfile. - - Returns: - (dict) - Throws: - (ValueError) if batch or task manifest not found or broken - """ - batch_data = parse_json(os.path.join(batch_dir, "manifest.json")) - if not batch_data: - raise ValueError( - "Cannot parse batch meta in {} folder".format(batch_dir)) - task_dir_name = batch_data["tasks"][0] - task_data = parse_json(os.path.join(batch_dir, task_dir_name, - "manifest.json")) - if not task_data: - raise ValueError( - "Cannot parse batch meta in {} folder".format(task_data)) - - return task_data - - -def get_timeout(project_name, host_name, task_type): - """Returns timeout(seconds) from Setting profile.""" - filter_data = { - "task_types": task_type, - "hosts": host_name - } - timeout_profiles = (get_project_settings(project_name)["webpublisher"] - ["timeout_profiles"]) - matching_item = filter_profiles(timeout_profiles, filter_data) - timeout = 3600 - if matching_item: - timeout = matching_item["timeout"] - - return timeout From d5f6ad9fdc1727cd3c631698ae258ed8485ca479 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 17:58:09 +0200 Subject: [PATCH 0694/2550] renamed 'cli_functions' to 'publish_functions' --- openpype/hosts/webpublisher/addon.py | 4 ++-- .../webpublisher/{cli_functions.py => publish_functions.py} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename openpype/hosts/webpublisher/{cli_functions.py => publish_functions.py} (100%) diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index 0bba8adc4b..cb639db3fa 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -63,7 +63,7 @@ def publish(project, path, user=None, targets=None): More than one path is allowed. """ - from .cli_functions import cli_publish + from .publish_functions import cli_publish cli_publish(project, path, user, targets) @@ -82,6 +82,6 @@ def publishfromapp(project, path, user=None, targets=None): More than one path is allowed. """ - from .cli_functions import cli_publish_from_app + from .publish_functions import cli_publish_from_app cli_publish_from_app(project, path, user, targets) diff --git a/openpype/hosts/webpublisher/cli_functions.py b/openpype/hosts/webpublisher/publish_functions.py similarity index 100% rename from openpype/hosts/webpublisher/cli_functions.py rename to openpype/hosts/webpublisher/publish_functions.py From 338d12e60cd7f2fe9a15efecac07ce7ae8449d57 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:03:09 +0200 Subject: [PATCH 0695/2550] added cli command for webserver --- openpype/hosts/webpublisher/addon.py | 19 +++++++++++++++++++ .../webserver_service/__init__.py | 6 ++++++ .../webserver_service/webserver_cli.py | 16 ++++++++++------ 3 files changed, 35 insertions(+), 6 deletions(-) create mode 100644 openpype/hosts/webpublisher/webserver_service/__init__.py diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index cb639db3fa..85e16de4a6 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -85,3 +85,22 @@ def publishfromapp(project, path, user=None, targets=None): from .publish_functions import cli_publish_from_app cli_publish_from_app(project, path, user, targets) + + +@cli_main.command() +@click.option("-h", "--host", help="Host", default=None) +@click.option("-p", "--port", help="Port", default=None) +@click.option("-e", "--executable", help="Executable") +@click.option("-u", "--upload_dir", help="Upload dir") +def webserver(executable, upload_dir, host=None, port=None): + """Starts webserver for communication with Webpublish FR via command line + + OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND + FTRACK_BOT_API_KEY provided with api key from Ftrack. + + Expect "pype.club" user created on Ftrack. + """ + + from .webserver_service import run_webserver + + run_webserver(executable, upload_dir, host, port) diff --git a/openpype/hosts/webpublisher/webserver_service/__init__.py b/openpype/hosts/webpublisher/webserver_service/__init__.py new file mode 100644 index 0000000000..e43f3f063a --- /dev/null +++ b/openpype/hosts/webpublisher/webserver_service/__init__.py @@ -0,0 +1,6 @@ +from .webserver_cli import run_webserver + + +__all__ = ( + "run_webserver", +) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 47c792a575..093b53d9d3 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -32,21 +32,25 @@ from .webpublish_routes import ( log = Logger.get_logger("webserver_gui") -def run_webserver(*args, **kwargs): +def run_webserver(executable, upload_dir, host=None, port=None): """Runs webserver in command line, adds routes.""" + if not host: + host = "localhost" + if not port: + port = 8079 + manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] - host = kwargs.get("host") or "localhost" - port = kwargs.get("port") or 8079 + server_manager = webserver_module.create_new_server_manager(port, host) webserver_url = server_manager.url # queue for remotepublishfromapp tasks studio_task_queue = collections.deque() resource = RestApiResource(server_manager, - upload_dir=kwargs["upload_dir"], - executable=kwargs["executable"], + upload_dir=upload_dir, + executable=executable, studio_task_queue=studio_task_queue) projects_endpoint = ProjectsEndpoint(resource) server_manager.add_route( @@ -111,7 +115,7 @@ def run_webserver(*args, **kwargs): last_reprocessed = time.time() while True: if time.time() - last_reprocessed > 20: - reprocess_failed(kwargs["upload_dir"], webserver_url) + reprocess_failed(upload_dir, webserver_url) last_reprocessed = time.time() if studio_task_queue: args = studio_task_queue.popleft() From e2c83c142684ccf0f400ffd345ddcf530bf49ed7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:03:40 +0200 Subject: [PATCH 0696/2550] renamed webserver_cli.py into webserver.py --- openpype/hosts/webpublisher/webserver_service/__init__.py | 2 +- .../webserver_service/{webserver_cli.py => webserver.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename openpype/hosts/webpublisher/webserver_service/{webserver_cli.py => webserver.py} (100%) diff --git a/openpype/hosts/webpublisher/webserver_service/__init__.py b/openpype/hosts/webpublisher/webserver_service/__init__.py index e43f3f063a..73111d286e 100644 --- a/openpype/hosts/webpublisher/webserver_service/__init__.py +++ b/openpype/hosts/webpublisher/webserver_service/__init__.py @@ -1,4 +1,4 @@ -from .webserver_cli import run_webserver +from .webserver import run_webserver __all__ = ( diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver.py similarity index 100% rename from openpype/hosts/webpublisher/webserver_service/webserver_cli.py rename to openpype/hosts/webpublisher/webserver_service/webserver.py From 971ae6d1ed0ad07713524dd4e7066e100fd22b34 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:04:37 +0200 Subject: [PATCH 0697/2550] fix import in global commands --- openpype/pype_commands.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index b6c1228ade..fe46a4bc54 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -61,8 +61,8 @@ class PypeCommands: @staticmethod def launch_webpublisher_webservercli(*args, **kwargs): - from openpype.hosts.webpublisher.webserver_service.webserver_cli \ - import (run_webserver) + from openpype.hosts.webpublisher.webserver_service import run_webserver + return run_webserver(*args, **kwargs) @staticmethod From ae11ae16d5fd0b9f1cdb86263a25ace48bbf9b04 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:12:20 +0200 Subject: [PATCH 0698/2550] modify launch arguments --- .../webserver_service/webpublish_routes.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index e3de555ace..4039d2c8ec 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -14,6 +14,7 @@ from openpype.client import ( from openpype.lib import Logger from openpype.settings import get_project_settings from openpype_modules.webserver.base_routes import RestApiEndpoint +from openpype_modules.webpublisher import WebpublisherAddon from openpype_modules.webpublisher.lib import ( get_webpublish_conn, get_task_data, @@ -213,7 +214,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): # TVPaint filter { "extensions": [".tvpp"], - "command": "remotepublish", + "command": "publish", "arguments": { "targets": ["tvpaint_worker"] }, @@ -222,13 +223,13 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): # Photoshop filter { "extensions": [".psd", ".psb"], - "command": "remotepublishfromapp", + "command": "publishfromapp", "arguments": { - # Command 'remotepublishfromapp' requires --host argument + # Command 'publishfromapp' requires --host argument "host": "photoshop", # Make sure targets are set to None for cases that default # would change - # - targets argument is not used in 'remotepublishfromapp' + # - targets argument is not used in 'publishfromapp' "targets": ["remotepublish"] }, # does publish need to be handled by a queue, eg. only @@ -240,7 +241,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): batch_dir = os.path.join(self.resource.upload_dir, content["batch"]) # Default command and arguments - command = "remotepublish" + command = "publish" add_args = { # All commands need 'project' and 'user' "project": content["project_name"], @@ -271,6 +272,8 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): args = [ openpype_app, + "module", + WebpublisherAddon.name, command, batch_dir ] From 808d1a5dd121d2f771a75d8ea4d061522ca42306 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:36:11 +0200 Subject: [PATCH 0699/2550] abstrac provides has log attribute --- .../sync_server/providers/abstract_provider.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 9c808dc80e..e11a8ba71e 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -10,6 +10,8 @@ class AbstractProvider: CODE = '' LABEL = '' + _log = None + def __init__(self, project_name, site_name, tree=None, presets=None): self.presets = None self.active = False @@ -19,6 +21,12 @@ class AbstractProvider: super(AbstractProvider, self).__init__() + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @abc.abstractmethod def is_active(self): """ @@ -199,11 +207,11 @@ class AbstractProvider: path = anatomy.fill_root(path) except KeyError: msg = "Error in resolving local root from anatomy" - log.error(msg) + self.log.error(msg) raise ValueError(msg) except IndexError: msg = "Path {} contains unfillable placeholder" - log.error(msg) + self.log.error(msg) raise ValueError(msg) return path From 5631fb66a79fe64c38073217aebe32b1a0fa5c60 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:36:31 +0200 Subject: [PATCH 0700/2550] use log attribute in provides --- .../modules/sync_server/providers/dropbox.py | 17 +++--- .../modules/sync_server/providers/gdrive.py | 53 +++++++++++-------- .../modules/sync_server/providers/sftp.py | 15 +++--- 3 files changed, 45 insertions(+), 40 deletions(-) diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index 89d6990841..e026ae7ef6 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -2,12 +2,9 @@ import os import dropbox -from openpype.api import Logger from .abstract_provider import AbstractProvider from ..utils import EditableScopes -log = Logger().get_logger("SyncServer") - class DropboxHandler(AbstractProvider): CODE = 'dropbox' @@ -20,26 +17,26 @@ class DropboxHandler(AbstractProvider): self.dbx = None if not self.presets: - log.info( + self.log.info( "Sync Server: There are no presets for {}.".format(site_name) ) return if not self.presets["enabled"]: - log.debug("Sync Server: Site {} not enabled for {}.". + self.log.debug("Sync Server: Site {} not enabled for {}.". format(site_name, project_name)) return token = self.presets.get("token", "") if not token: msg = "Sync Server: No access token for dropbox provider" - log.info(msg) + self.log.info(msg) return team_folder_name = self.presets.get("team_folder_name", "") if not team_folder_name: msg = "Sync Server: No team folder name for dropbox provider" - log.info(msg) + self.log.info(msg) return acting_as_member = self.presets.get("acting_as_member", "") @@ -47,7 +44,7 @@ class DropboxHandler(AbstractProvider): msg = ( "Sync Server: No acting member for dropbox provider" ) - log.info(msg) + self.log.info(msg) return try: @@ -55,7 +52,7 @@ class DropboxHandler(AbstractProvider): token, acting_as_member, team_folder_name ) except Exception as e: - log.info("Could not establish dropbox object: {}".format(e)) + self.log.info("Could not establish dropbox object: {}".format(e)) return super(AbstractProvider, self).__init__() @@ -448,7 +445,7 @@ class DropboxHandler(AbstractProvider): path = anatomy.fill_root(path) except KeyError: msg = "Error in resolving local root from anatomy" - log.error(msg) + self.log.error(msg) raise ValueError(msg) return path diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index bef707788b..9a3ce89cf5 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -5,12 +5,12 @@ import sys import six import platform -from openpype.api import Logger -from openpype.api import get_system_settings +from openpype.lib import Logger +from openpype.settings import get_system_settings from .abstract_provider import AbstractProvider from ..utils import time_function, ResumableError -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("GDriveHandler") try: from googleapiclient.discovery import build @@ -69,13 +69,17 @@ class GDriveHandler(AbstractProvider): self.presets = presets if not self.presets: - log.info("Sync Server: There are no presets for {}.". - format(site_name)) + self.log.info( + "Sync Server: There are no presets for {}.".format(site_name) + ) return if not self.presets["enabled"]: - log.debug("Sync Server: Site {} not enabled for {}.". - format(site_name, project_name)) + self.log.debug( + "Sync Server: Site {} not enabled for {}.".format( + site_name, project_name + ) + ) return current_platform = platform.system().lower() @@ -85,20 +89,22 @@ class GDriveHandler(AbstractProvider): if not cred_path: msg = "Sync Server: Please, fill the credentials for gdrive "\ "provider for platform '{}' !".format(current_platform) - log.info(msg) + self.log.info(msg) return try: cred_path = cred_path.format(**os.environ) except KeyError as e: - log.info("Sync Server: The key(s) {} does not exist in the " - "environment variables".format(" ".join(e.args))) + self.log.info(( + "Sync Server: The key(s) {} does not exist in the " + "environment variables" + ).format(" ".join(e.args))) return if not os.path.exists(cred_path): msg = "Sync Server: No credentials for gdrive provider " + \ "for '{}' on path '{}'!".format(site_name, cred_path) - log.info(msg) + self.log.info(msg) return self.service = None @@ -318,7 +324,7 @@ class GDriveHandler(AbstractProvider): fields='id') media.stream() - log.debug("Start Upload! {}".format(source_path)) + self.log.debug("Start Upload! {}".format(source_path)) last_tick = status = response = None status_val = 0 while response is None: @@ -331,7 +337,7 @@ class GDriveHandler(AbstractProvider): if not last_tick or \ time.time() - last_tick >= server.LOG_PROGRESS_SEC: last_tick = time.time() - log.debug("Uploaded %d%%." % + self.log.debug("Uploaded %d%%." % int(status_val * 100)) server.update_db(project_name=project_name, new_file_id=None, @@ -350,8 +356,9 @@ class GDriveHandler(AbstractProvider): if 'has not granted' in ex._get_reason().strip(): raise PermissionError(ex._get_reason().strip()) - log.warning("Forbidden received, hit quota. " - "Injecting 60s delay.") + self.log.warning( + "Forbidden received, hit quota. Injecting 60s delay." + ) time.sleep(60) return False raise @@ -417,7 +424,7 @@ class GDriveHandler(AbstractProvider): if not last_tick or \ time.time() - last_tick >= server.LOG_PROGRESS_SEC: last_tick = time.time() - log.debug("Downloaded %d%%." % + self.log.debug("Downloaded %d%%." % int(status_val * 100)) server.update_db(project_name=project_name, new_file_id=None, @@ -629,9 +636,9 @@ class GDriveHandler(AbstractProvider): ["gdrive"] ) except KeyError: - log.info(("Sync Server: There are no presets for Gdrive " + - "provider."). - format(str(provider_presets))) + log.info(( + "Sync Server: There are no presets for Gdrive provider." + ).format(str(provider_presets))) return return provider_presets @@ -704,7 +711,7 @@ class GDriveHandler(AbstractProvider): roots[self.MY_DRIVE_STR] = self.service.files() \ .get(fileId='root').execute() except errors.HttpError: - log.warning("HttpError in sync loop, " + self.log.warning("HttpError in sync loop, " "trying next loop", exc_info=True) raise ResumableError @@ -727,7 +734,7 @@ class GDriveHandler(AbstractProvider): Returns: (dictionary) path as a key, folder id as a value """ - log.debug("build_tree len {}".format(len(folders))) + self.log.debug("build_tree len {}".format(len(folders))) if not self.root: # build only when necessary, could be expensive self.root = self._prepare_root_info() @@ -779,9 +786,9 @@ class GDriveHandler(AbstractProvider): loop_cnt += 1 if len(no_parents_yet) > 0: - log.debug("Some folders path are not resolved {}". + self.log.debug("Some folders path are not resolved {}". format(no_parents_yet)) - log.debug("Remove deleted folders from trash.") + self.log.debug("Remove deleted folders from trash.") return tree diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 302ffae3e6..40f11cb9dd 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -4,10 +4,10 @@ import time import threading import platform -from openpype.api import Logger -from openpype.api import get_system_settings +from openpype.lib import Logger +from openpype.settings import get_system_settings from .abstract_provider import AbstractProvider -log = Logger().get_logger("SyncServer") +log = Logger.get_logger("SyncServer-SFTPHandler") pysftp = None try: @@ -43,8 +43,9 @@ class SFTPHandler(AbstractProvider): self.presets = presets if not self.presets: - log.warning("Sync Server: There are no presets for {}.". - format(site_name)) + self.log.warning( + "Sync Server: There are no presets for {}.".format(site_name) + ) return # store to instance for reconnect @@ -423,7 +424,7 @@ class SFTPHandler(AbstractProvider): return pysftp.Connection(**conn_params) except (paramiko.ssh_exception.SSHException, pysftp.exceptions.ConnectionException): - log.warning("Couldn't connect", exc_info=True) + self.log.warning("Couldn't connect", exc_info=True) def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): @@ -445,7 +446,7 @@ class SFTPHandler(AbstractProvider): time.time() - last_tick >= server.LOG_PROGRESS_SEC: status_val = target_file_size / source_file_size last_tick = time.time() - log.debug(direction + "ed %d%%." % int(status_val * 100)) + self.log.debug(direction + "ed %d%%." % int(status_val * 100)) server.update_db(project_name=project_name, new_file_id=None, file=file, From 54b8719b76c98b30d30e81b828e2dfb9ce13d0a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:49:04 +0200 Subject: [PATCH 0701/2550] fix attr initialization --- openpype/modules/timers_manager/rest_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index 6686407350..4a2e9e6575 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -10,7 +10,7 @@ class TimersManagerModuleRestApi: happens in Workfile app. """ def __init__(self, user_module, server_manager): - self.log = None + self._log = None self.module = user_module self.server_manager = server_manager From 59f36cc7c8ef54e3ac54d547e5f772bc726f3f1b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 18:49:20 +0200 Subject: [PATCH 0702/2550] log traceback when webserver connection is not possible --- openpype/modules/webserver/webserver_module.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index 686bd27bfd..16861abd29 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -53,9 +53,12 @@ class WebServerModule(OpenPypeModule, ITrayService): try: module.webserver_initialization(self.server_manager) except Exception: - self.log.warning(( - "Failed to connect module \"{}\" to webserver." - ).format(module.name)) + self.log.warning( + ( + "Failed to connect module \"{}\" to webserver." + ).format(module.name), + exc_info=True + ) def tray_init(self): self.create_server_manager() From 3ad9533fa82955301383c53e096d8fde2067c778 Mon Sep 17 00:00:00 2001 From: maxpareschi Date: Thu, 25 Aug 2022 20:10:27 +0200 Subject: [PATCH 0703/2550] workfile template also matches against os.environ --- openpype/pipeline/workfile/path_resolving.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index ed1d1d793e..4cd225a515 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -408,6 +408,9 @@ def get_custom_workfile_template( # add root dict anatomy_context_data["root"] = anatomy.roots + # extend anatomy context with os.environ + anatomy_context_data.update(os.environ) + # get task type for the task in context current_task_type = anatomy_context_data["task"]["type"] From bad5b9b194f498903900ee283ed5a4b14e25a198 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 10:04:17 +0200 Subject: [PATCH 0704/2550] fix import --- openpype/hosts/photoshop/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/__init__.py b/openpype/hosts/photoshop/__init__.py index b3f66ea35c..773f73d624 100644 --- a/openpype/hosts/photoshop/__init__.py +++ b/openpype/hosts/photoshop/__init__.py @@ -1,4 +1,4 @@ -from .module import ( +from .addon import ( PhotoshopAddon, PHOTOSHOP_HOST_DIR, ) From 45c112eb84ae741d4b102ea89ac5c64c01f591f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 10:12:35 +0200 Subject: [PATCH 0705/2550] fixed arguments --- openpype/hosts/webpublisher/addon.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index 85e16de4a6..7d26d5a7ff 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -57,7 +57,7 @@ def cli_main(): @click.option("-t", "--targets", help="Targets", default=None, multiple=True) def publish(project, path, user=None, targets=None): - """Start CLI publishing. + """Start publishing (Inner command). Publish collects json from paths provided as an argument. More than one path is allowed. @@ -70,13 +70,13 @@ def publish(project, path, user=None, targets=None): @cli_main.command() @click.argument("path") +@click.option("-p", "--project", help="Project") @click.option("-h", "--host", help="Host") @click.option("-u", "--user", help="User email address") -@click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets", default=None, multiple=True) -def publishfromapp(project, path, user=None, targets=None): - """Start CLI publishing. +def publishfromapp(project, path, host, user=None, targets=None): + """Start publishing through application (Inner command). Publish collects json from paths provided as an argument. More than one path is allowed. @@ -84,16 +84,16 @@ def publishfromapp(project, path, user=None, targets=None): from .publish_functions import cli_publish_from_app - cli_publish_from_app(project, path, user, targets) + cli_publish_from_app(project, path, host, user, targets) @cli_main.command() -@click.option("-h", "--host", help="Host", default=None) -@click.option("-p", "--port", help="Port", default=None) @click.option("-e", "--executable", help="Executable") @click.option("-u", "--upload_dir", help="Upload dir") +@click.option("-h", "--host", help="Host", default=None) +@click.option("-p", "--port", help="Port", default=None) def webserver(executable, upload_dir, host=None, port=None): - """Starts webserver for communication with Webpublish FR via command line + """Start service for communication with Webpublish Front end. OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND FTRACK_BOT_API_KEY provided with api key from Ftrack. From 5908995349a6e416e3a7db975dc119e407747f85 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:00:34 +0200 Subject: [PATCH 0706/2550] moved helper functions 'get_errored_instances_from_context' and 'get_errored_plugins_from_context' into openpype.pipeline.publish --- openpype/pipeline/publish/__init__.py | 6 ++++ openpype/pipeline/publish/lib.py | 44 +++++++++++++++++++++++++++ 2 files changed, 50 insertions(+) diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index aa7fe0bdbf..dbd0b696ec 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -14,6 +14,9 @@ from .lib import ( publish_plugins_discover, load_help_content_from_plugin, load_help_content_from_filepath, + + get_errored_instances_from_context, + get_errored_plugins_from_context, ) from .abstract_expected_files import ExpectedFiles @@ -38,6 +41,9 @@ __all__ = ( "load_help_content_from_plugin", "load_help_content_from_filepath", + "get_errored_instances_from_context", + "get_errored_plugins_from_context", + "ExpectedFiles", "RenderInstance", diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 9060a0bf4b..83a1e3ff1a 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -313,3 +313,47 @@ def remote_publish(log, close_plugin_name=None, raise_error=False): # Fatal Error is because of Deadline error_message = "Fatal Error: " + error_format.format(**result) raise RuntimeError(error_message) + + +def get_errored_instances_from_context(context): + """Collect failed instances from pyblish context. + + Args: + context (pyblish.api.Context): Publish context where we're looking + for failed instances. + + Returns: + List[pyblish.lib.Instance]: Instances which failed during processing. + """ + + instances = list() + for result in context.data["results"]: + if result["instance"] is None: + # When instance is None we are on the "context" result + continue + + if result["error"]: + instances.append(result["instance"]) + + return instances + + +def get_errored_plugins_from_context(context): + """Collect failed plugins from pyblish context. + + Args: + context (pyblish.api.Context): Publish context where we're looking + for failed plugins. + + Returns: + List[pyblish.api.Plugin]: Plugins which failed during processing. + """ + + plugins = list() + results = context.data.get("results", []) + for result in results: + if result["success"] is True: + continue + plugins.append(result["plugin"]) + + return plugins From 74a1847cfeaca882a66336c485ffd8cf47415a9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:01:05 +0200 Subject: [PATCH 0707/2550] removed unused ValidationException --- openpype/api.py | 3 --- openpype/plugin.py | 4 ---- 2 files changed, 7 deletions(-) diff --git a/openpype/api.py b/openpype/api.py index c2227c1a52..0466eb7f78 100644 --- a/openpype/api.py +++ b/openpype/api.py @@ -49,7 +49,6 @@ from .plugin import ( ValidateContentsOrder, ValidateSceneOrder, ValidateMeshOrder, - ValidationException ) # temporary fix, might @@ -94,8 +93,6 @@ __all__ = [ "RepairAction", "RepairContextAction", - "ValidationException", - # get contextual data "version_up", "get_asset", diff --git a/openpype/plugin.py b/openpype/plugin.py index bb9bc2ff85..9b194a13ba 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -79,7 +79,3 @@ def contextplugin_should_run(plugin, context): return True return False - - -class ValidationException(Exception): - pass From 08675dcdf5dc9b845696c60383caa90cd2a9760d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:02:33 +0200 Subject: [PATCH 0708/2550] moved default repair actions into publish plugins --- openpype/pipeline/publish/__init__.py | 6 ++ openpype/pipeline/publish/publish_plugins.py | 59 +++++++++++++++++++- 2 files changed, 64 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index dbd0b696ec..57d9da2167 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -7,6 +7,9 @@ from .publish_plugins import ( KnownPublishError, OpenPypePyblishPluginMixin, OptionalPyblishPluginMixin, + + RepairAction, + RepairContextAction, ) from .lib import ( @@ -36,6 +39,9 @@ __all__ = ( "OpenPypePyblishPluginMixin", "OptionalPyblishPluginMixin", + "RepairAction", + "RepairContextAction", + "DiscoverResult", "publish_plugins_discover", "load_help_content_from_plugin", diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 71a2c675b6..00ac2c94ac 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,7 +1,15 @@ from abc import ABCMeta + +import pyblish.api from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin + from openpype.lib import BoolDef -from .lib import load_help_content_from_plugin + +from .lib import ( + load_help_content_from_plugin, + get_errored_instances_from_context, + get_errored_plugins_from_data +) class AbstractMetaInstancePlugin(ABCMeta, MetaPlugin): @@ -184,3 +192,52 @@ class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): if active is None: active = getattr(self, "active", True) return active + + +class RepairAction(pyblish.api.Action): + """Repairs the action + + To process the repairing this requires a static `repair(instance)` method + is available on the plugin. + """ + + label = "Repair" + on = "failed" # This action is only available on a failed plug-in + icon = "wrench" # Icon from Awesome Icon + + def process(self, context, plugin): + if not hasattr(plugin, "repair"): + raise RuntimeError("Plug-in does not have repair method.") + + # Get the errored instances + self.log.info("Finding failed instances..") + errored_instances = get_errored_instances_from_context(context) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + for instance in instances: + plugin.repair(instance) + + +class RepairContextAction(pyblish.api.Action): + """Repairs the action + + To process the repairing this requires a static `repair(instance)` method + is available on the plugin. + """ + + label = "Repair" + on = "failed" # This action is only available on a failed plug-in + + def process(self, context, plugin): + if not hasattr(plugin, "repair"): + raise RuntimeError("Plug-in does not have repair method.") + + # Get the errored instances + self.log.info("Finding failed instances..") + errored_plugins = get_errored_plugins_from_data(context) + + # Apply pyblish.logic to get the instances for the plug-in + if plugin in errored_plugins: + self.log.info("Attempting fix ...") + plugin.repair(context) From e28603bf1dbde45d5080303ec6f0abbee5bdbd1b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:24:03 +0200 Subject: [PATCH 0709/2550] added 'context_plugin_should_run' to pipeline publish --- openpype/pipeline/publish/__init__.py | 4 ++ openpype/pipeline/publish/lib.py | 57 +++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index 57d9da2167..06323ebf7c 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -20,6 +20,8 @@ from .lib import ( get_errored_instances_from_context, get_errored_plugins_from_context, + + context_plugin_should_run, ) from .abstract_expected_files import ExpectedFiles @@ -50,6 +52,8 @@ __all__ = ( "get_errored_instances_from_context", "get_errored_plugins_from_context", + "context_plugin_should_run", + "ExpectedFiles", "RenderInstance", diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 83a1e3ff1a..b3aa6242cd 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -357,3 +357,60 @@ def get_errored_plugins_from_context(context): plugins.append(result["plugin"]) return plugins + + +def filter_instances_for_context_plugin(plugin, context): + """Filter instances on context by context plugin filters. + + This is for cases when context plugin need similar filtering like instance + plugin have, but for some reason must run on context. + + Args: + plugin (pyblish.api.Plugin): Plugin with filters. + context (pyblish.api.Context): Pyblish context with insances. + + Returns: + Iterator[pyblish.lib.Instance]: Iteration of valid instances. + """ + + if not plugin.families: + return [] + + plugin_families = set(plugin.families) + for instance in context: + # Ignore inactive instances + if ( + not instance.data.get("publish", True) + or not instance.data.get("active", True) + ): + continue + + family = instance.data.get("family") + if family and family in plugin_families: + yield instance + + families = instance.data.get("families", []) + if any(f in plugin_families for f in families): + yield instance + + +def context_plugin_should_run(plugin, context): + """Return whether the ContextPlugin should run on the given context. + + This is a helper function to work around a bug pyblish-base#250 + Whenever a ContextPlugin sets specific families it will still trigger even + when no instances are present that have those families. + + This actually checks it correctly and returns whether it should run. + + Args: + plugin (pyblish.api.Plugin): Plugin with filters. + context (pyblish.api.Context): Pyblish context with insances. + + Returns: + bool: Context plugin should run based on valid instances. + """ + + for instance in filter_instances_for_context_plugin(plugin, context): + return True + return False From fa2234259e55a6a1f49681fc3f7210777a5e45b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:27:20 +0200 Subject: [PATCH 0710/2550] replaced usages of 'contextplugin_should_run' with 'context_plugin_should_run' --- .../publish/validate_current_renderlayer_renderable.py | 4 ++-- .../hosts/maya/plugins/publish/validate_muster_connection.py | 4 ++-- .../maya/plugins/publish/validate_vray_translator_settings.py | 4 ++-- openpype/pipeline/publish/lib.py | 3 ++- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py b/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py index 3c3ea68fc6..f072e5e323 100644 --- a/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py +++ b/openpype/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py @@ -1,7 +1,7 @@ import pyblish.api from maya import cmds -from openpype.plugin import contextplugin_should_run +from openpype.pipeline.publish import context_plugin_should_run class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin): @@ -24,7 +24,7 @@ class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin): def process(self, context): # Workaround bug pyblish-base#250 - if not contextplugin_should_run(self, context): + if not context_plugin_should_run(self, context): return layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) diff --git a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py index 6dc7bd3bc4..856f1d933d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py +++ b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py @@ -5,7 +5,7 @@ import appdirs import pyblish.api from openpype.lib import requests_get -from openpype.plugin import contextplugin_should_run +from openpype.pipeline.publish import context_plugin_should_run import openpype.hosts.maya.api.action @@ -26,7 +26,7 @@ class ValidateMusterConnection(pyblish.api.ContextPlugin): def process(self, context): # Workaround bug pyblish-base#250 - if not contextplugin_should_run(self, context): + if not context_plugin_should_run(self, context): return # test if we have environment set (redundant as this plugin shouldn' diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py index 1deabde4a2..d611777f43 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py @@ -2,7 +2,7 @@ """Validate VRay Translator settings.""" import pyblish.api import openpype.api -from openpype.plugin import contextplugin_should_run +from openpype.pipeline.publish import context_plugin_should_run from maya import cmds @@ -18,7 +18,7 @@ class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin): def process(self, context): """Plugin entry point.""" # Workaround bug pyblish-base#250 - if not contextplugin_should_run(self, context): + if not context_plugin_should_run(self, context): return invalid = self.get_invalid(context) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index b3aa6242cd..deecc262c3 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -363,7 +363,8 @@ def filter_instances_for_context_plugin(plugin, context): """Filter instances on context by context plugin filters. This is for cases when context plugin need similar filtering like instance - plugin have, but for some reason must run on context. + plugin have, but for some reason must run on context or should find out + if there is at least one instance with a family. Args: plugin (pyblish.api.Plugin): Plugin with filters. From 66bbb5497af0cb0f4a0c32d51e4b9e2713942e2f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:32:18 +0200 Subject: [PATCH 0711/2550] fix all families filter --- openpype/pipeline/publish/lib.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index deecc262c3..266e0bbf08 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -378,6 +378,7 @@ def filter_instances_for_context_plugin(plugin, context): return [] plugin_families = set(plugin.families) + all_families = "*" in plugin_families for instance in context: # Ignore inactive instances if ( @@ -387,11 +388,12 @@ def filter_instances_for_context_plugin(plugin, context): continue family = instance.data.get("family") - if family and family in plugin_families: - yield instance - - families = instance.data.get("families", []) - if any(f in plugin_families for f in families): + families = instance.data.get("families") or [] + if ( + all_families + or (family and family in plugin_families) + or any(f in plugin_families for f in families) + ): yield instance From 16bd7a3fef62c5469c9c13aa6881818a3cf07bac Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:50:53 +0200 Subject: [PATCH 0712/2550] fix imported function name --- openpype/pipeline/publish/publish_plugins.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 00ac2c94ac..5bb6b5aaff 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -8,7 +8,7 @@ from openpype.lib import BoolDef from .lib import ( load_help_content_from_plugin, get_errored_instances_from_context, - get_errored_plugins_from_data + get_errored_plugins_from_context ) @@ -235,7 +235,7 @@ class RepairContextAction(pyblish.api.Action): # Get the errored instances self.log.info("Finding failed instances..") - errored_plugins = get_errored_plugins_from_data(context) + errored_plugins = get_errored_plugins_from_context(context) # Apply pyblish.logic to get the instances for the plug-in if plugin in errored_plugins: From dc9971fd7290705adfd2f79bcd6c13ee84024fe3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:56:18 +0200 Subject: [PATCH 0713/2550] fix py2 compatibility --- openpype/pipeline/publish/lib.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 266e0bbf08..afa02ac54c 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -374,12 +374,15 @@ def filter_instances_for_context_plugin(plugin, context): Iterator[pyblish.lib.Instance]: Iteration of valid instances. """ - if not plugin.families: - return [] + instances = [] + plugin_families = set() + all_families = False + if plugin.families: + instances = context + plugin_families = set(plugin.families) + all_families = "*" in plugin_families - plugin_families = set(plugin.families) - all_families = "*" in plugin_families - for instance in context: + for instance in instances: # Ignore inactive instances if ( not instance.data.get("publish", True) From 5203814b0e205d30df86787fc1868f2cd4d66750 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 11:59:45 +0200 Subject: [PATCH 0714/2550] added helper function to get staging dir even outside 'Extractor' plugin --- openpype/pipeline/publish/__init__.py | 4 ++++ openpype/pipeline/publish/lib.py | 28 +++++++++++++++++++++++++++ openpype/plugin.py | 14 ++++---------- 3 files changed, 36 insertions(+), 10 deletions(-) diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index 06323ebf7c..9c6462740f 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -21,7 +21,9 @@ from .lib import ( get_errored_instances_from_context, get_errored_plugins_from_context, + filter_instances_for_context_plugin, context_plugin_should_run, + get_instance_staging_dir, ) from .abstract_expected_files import ExpectedFiles @@ -52,7 +54,9 @@ __all__ = ( "get_errored_instances_from_context", "get_errored_plugins_from_context", + "filter_instances_for_context_plugin", "context_plugin_should_run", + "get_instance_staging_dir", "ExpectedFiles", diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index afa02ac54c..4f06f0e6fd 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -2,6 +2,7 @@ import os import sys import types import inspect +import tempfile import xml.etree.ElementTree import six @@ -420,3 +421,30 @@ def context_plugin_should_run(plugin, context): for instance in filter_instances_for_context_plugin(plugin, context): return True return False + + +def get_instance_staging_dir(instance): + """Unified way how staging dir is stored and created on instances. + + First check if 'stagingDir' is already set in instance data. If there is + not create new in tempdir. + + Note: + Staging dir does not have to be necessarily in tempdir so be carefull + about it's usage. + + Args: + instance (pyblish.lib.Instance): Instance for which we want to get + staging dir. + + Returns: + str: Path to staging dir of instance. + """ + + staging_dir = instance.data.get("stagingDir") + if not staging_dir: + instance.data["stagingDir"] = os.path.normpath( + tempfile.mkdtemp(prefix="pyblish_tmp_") + ) + + return staging_dir diff --git a/openpype/plugin.py b/openpype/plugin.py index 9b194a13ba..9a131f320c 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -1,5 +1,3 @@ -import tempfile -import os import pyblish.api ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 @@ -18,7 +16,8 @@ class InstancePlugin(pyblish.api.InstancePlugin): super(InstancePlugin, cls).process(cls, *args, **kwargs) -class Extractor(InstancePlugin): +# NOTE: This class is used on so many places I gave up moving it +class Extractor(pyblish.api.InstancePlugin): """Extractor base class. The extractor base class implements a "staging_dir" function used to @@ -36,15 +35,10 @@ class Extractor(InstancePlugin): Upon calling this method the staging directory is stored inside the instance.data['stagingDir'] """ - staging_dir = instance.data.get('stagingDir', None) - if not staging_dir: - staging_dir = os.path.normpath( - tempfile.mkdtemp(prefix="pyblish_tmp_") - ) - instance.data['stagingDir'] = staging_dir + from openpype.pipeline.publish import get_instance_staging_dir - return staging_dir + return get_instance_staging_dir(instance) def contextplugin_should_run(plugin, context): From 4e8b40e3a01ea3d6759af19be390f3673ec9a069 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 12:01:09 +0200 Subject: [PATCH 0715/2550] 'contextplugin_should_run' is marked as deprecated --- openpype/plugin.py | 73 +++++++++++++++++++++++++++++++++------------- 1 file changed, 53 insertions(+), 20 deletions(-) diff --git a/openpype/plugin.py b/openpype/plugin.py index 9a131f320c..5896cbd749 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -1,3 +1,6 @@ +import functools +import warnings + import pyblish.api ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 @@ -6,6 +9,53 @@ ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3 +class PluginDeprecatedWarning(DeprecationWarning): + pass + + +def _deprecation_warning(item_name, warning_message): + warnings.simplefilter("always", PluginDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(item_name, warning_message), + category=PluginDeprecatedWarning, + stacklevel=4 + ) + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + _deprecation_warning(decorated_func.__name__, warning_message) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) class ContextPlugin(pyblish.api.ContextPlugin): def process(cls, *args, **kwargs): super(ContextPlugin, cls).process(cls, *args, **kwargs) @@ -41,6 +91,7 @@ class Extractor(pyblish.api.InstancePlugin): return get_instance_staging_dir(instance) +@deprecated("openpype.pipeline.publish.context_plugin_should_run") def contextplugin_should_run(plugin, context): """Return whether the ContextPlugin should run on the given context. @@ -51,25 +102,7 @@ def contextplugin_should_run(plugin, context): This actually checks it correctly and returns whether it should run. """ - required = set(plugin.families) - # When no filter always run - if "*" in required: - return True + from openpype.pipeline.publish import context_plugin_should_run - for instance in context: - - # Ignore inactive instances - if (not instance.data.get("publish", True) or - not instance.data.get("active", True)): - continue - - families = instance.data.get("families", []) - if any(f in required for f in families): - return True - - family = instance.data.get("family") - if family and family in required: - return True - - return False + return context_plugin_should_run(plugin, context) From 27517897ab6b78dbe65d1e1281dc21f757a30ddb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 12:07:26 +0200 Subject: [PATCH 0716/2550] use new imports of 'get_errored_plugins_from_context' and 'get_errored_instances_from_context' --- .../aftereffects/plugins/publish/increment_workfile.py | 4 ++-- openpype/hosts/blender/api/action.py | 2 +- .../plugins/publish/increment_current_file_deadline.py | 4 ++-- .../hosts/harmony/plugins/publish/increment_workfile.py | 4 ++-- .../hosts/houdini/plugins/publish/increment_current_file.py | 6 +++--- .../plugins/publish/increment_current_file_deadline.py | 6 +++--- openpype/hosts/maya/api/action.py | 2 +- .../maya/plugins/publish/increment_current_file_deadline.py | 5 ++--- openpype/hosts/nuke/api/actions.py | 2 +- openpype/hosts/nuke/plugins/publish/validate_write_nodes.py | 2 +- .../hosts/photoshop/plugins/publish/increment_workfile.py | 4 ++-- openpype/hosts/resolve/api/action.py | 2 +- 12 files changed, 21 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py b/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py index 0829355f3b..d8f6ef5d27 100644 --- a/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/increment_workfile.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype.action import get_errored_plugins_from_data from openpype.lib import version_up +from openpype.pipeline.publish import get_errored_plugins_from_context from openpype.hosts.aftereffects.api import get_stub @@ -18,7 +18,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin): optional = True def process(self, instance): - errored_plugins = get_errored_plugins_from_data(instance.context) + errored_plugins = get_errored_plugins_from_context(instance.context) if errored_plugins: raise RuntimeError( "Skipping incrementing current file because publishing failed." diff --git a/openpype/hosts/blender/api/action.py b/openpype/hosts/blender/api/action.py index 09ef76326e..fe0833e39f 100644 --- a/openpype/hosts/blender/api/action.py +++ b/openpype/hosts/blender/api/action.py @@ -2,7 +2,7 @@ import bpy import pyblish.api -from openpype.api import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context class SelectInvalidAction(pyblish.api.Action): diff --git a/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py index 6483454d96..5c595638e9 100644 --- a/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py +++ b/openpype/hosts/fusion/plugins/publish/increment_current_file_deadline.py @@ -17,9 +17,9 @@ class FusionIncrementCurrentFile(pyblish.api.ContextPlugin): def process(self, context): from openpype.lib import version_up - from openpype.action import get_errored_plugins_from_data + from openpype.pipeline.publish import get_errored_plugins_from_context - errored_plugins = get_errored_plugins_from_data(context) + errored_plugins = get_errored_plugins_from_context(context) if any(plugin.__name__ == "FusionSubmitDeadline" for plugin in errored_plugins): raise RuntimeError("Skipping incrementing current file because " diff --git a/openpype/hosts/harmony/plugins/publish/increment_workfile.py b/openpype/hosts/harmony/plugins/publish/increment_workfile.py index 417377fff8..1caf581567 100644 --- a/openpype/hosts/harmony/plugins/publish/increment_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/increment_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.action import get_errored_plugins_from_data +from openpype.pipeline.publish import get_errored_plugins_from_context from openpype.lib import version_up import openpype.hosts.harmony.api as harmony @@ -19,7 +19,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin): optional = True def process(self, instance): - errored_plugins = get_errored_plugins_from_data(instance.context) + errored_plugins = get_errored_plugins_from_context(instance.context) if errored_plugins: raise RuntimeError( "Skipping incrementing current file because publishing failed." diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index c5cacd1880..5cb14d732a 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -1,8 +1,8 @@ import pyblish.api -from openpype.api import version_up -from openpype.action import get_errored_plugins_from_data +from openpype.lib import version_up from openpype.pipeline import registered_host +from openpype.pipeline.publish import get_errored_plugins_from_context class IncrementCurrentFile(pyblish.api.InstancePlugin): @@ -30,7 +30,7 @@ class IncrementCurrentFile(pyblish.api.InstancePlugin): context.data[key] = True context = instance.context - errored_plugins = get_errored_plugins_from_data(context) + errored_plugins = get_errored_plugins_from_context(context) if any( plugin.__name__ == "HoudiniSubmitPublishDeadline" for plugin in errored_plugins diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py index faa015f739..cb0d7e3680 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py @@ -1,8 +1,8 @@ import pyblish.api import hou -from openpype.api import version_up -from openpype.action import get_errored_plugins_from_data +from openpype.lib import version_up +from openpype.pipeline.publish import get_errored_plugins_from_context class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): @@ -19,7 +19,7 @@ class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): def process(self, context): - errored_plugins = get_errored_plugins_from_data(context) + errored_plugins = get_errored_plugins_from_context(context) if any( plugin.__name__ == "HoudiniSubmitPublishDeadline" for plugin in errored_plugins diff --git a/openpype/hosts/maya/api/action.py b/openpype/hosts/maya/api/action.py index 90605734e7..065fdf3691 100644 --- a/openpype/hosts/maya/api/action.py +++ b/openpype/hosts/maya/api/action.py @@ -5,7 +5,7 @@ import pyblish.api from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io -from openpype.api import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context class GenerateUUIDsOnInvalidAction(pyblish.api.Action): diff --git a/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py index f9cfac3eb9..b5d5847e9f 100644 --- a/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py +++ b/openpype/hosts/maya/plugins/publish/increment_current_file_deadline.py @@ -16,12 +16,11 @@ class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): def process(self, context): - import os from maya import cmds from openpype.lib import version_up - from openpype.action import get_errored_plugins_from_data + from openpype.pipeline.publish import get_errored_plugins_from_context - errored_plugins = get_errored_plugins_from_data(context) + errored_plugins = get_errored_plugins_from_context(context) if any(plugin.__name__ == "MayaSubmitDeadline" for plugin in errored_plugins): raise RuntimeError("Skipping incrementing current file because " diff --git a/openpype/hosts/nuke/api/actions.py b/openpype/hosts/nuke/api/actions.py index c4a6f0fb84..92b83560da 100644 --- a/openpype/hosts/nuke/api/actions.py +++ b/openpype/hosts/nuke/api/actions.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype.api import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context from .lib import ( reset_selection, select_nodes diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 362ff31174..5a8bc2022e 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.api import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( get_write_node_template_attr, set_node_knobs_from_settings diff --git a/openpype/hosts/photoshop/plugins/publish/increment_workfile.py b/openpype/hosts/photoshop/plugins/publish/increment_workfile.py index 92132c393b..665dd67fc5 100644 --- a/openpype/hosts/photoshop/plugins/publish/increment_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/increment_workfile.py @@ -1,6 +1,6 @@ import os import pyblish.api -from openpype.action import get_errored_plugins_from_data +from openpype.pipeline.publish import get_errored_plugins_from_context from openpype.lib import version_up from openpype.hosts.photoshop import api as photoshop @@ -19,7 +19,7 @@ class IncrementWorkfile(pyblish.api.InstancePlugin): optional = True def process(self, instance): - errored_plugins = get_errored_plugins_from_data(instance.context) + errored_plugins = get_errored_plugins_from_context(instance.context) if errored_plugins: raise RuntimeError( "Skipping incrementing current file because publishing failed." diff --git a/openpype/hosts/resolve/api/action.py b/openpype/hosts/resolve/api/action.py index d55a24a39a..ceedc2cc54 100644 --- a/openpype/hosts/resolve/api/action.py +++ b/openpype/hosts/resolve/api/action.py @@ -4,7 +4,7 @@ from __future__ import absolute_import import pyblish.api -from openpype.action import get_errored_instances_from_context +from openpype.pipeline.publish import get_errored_instances_from_context class SelectInvalidAction(pyblish.api.Action): From 94306ae8425af2250c766fa280a0b5a54982fc14 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 12:08:21 +0200 Subject: [PATCH 0717/2550] marked 'get_errored_plugins_from_data' and 'get_errored_instances_from_context' as deprecated in 'openpype.action' --- openpype/action.py | 83 +++++++++++++++++++++++++++++----------------- 1 file changed, 53 insertions(+), 30 deletions(-) diff --git a/openpype/action.py b/openpype/action.py index 50741875e4..8949858498 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -1,42 +1,65 @@ -# absolute_import is needed to counter the `module has no cmds error` in Maya -from __future__ import absolute_import - +import warnings +import functools import pyblish.api -def get_errored_instances_from_context(context): - - instances = list() - for result in context.data["results"]: - if result["instance"] is None: - # When instance is None we are on the "context" result - continue - - if result["error"]: - instances.append(result["instance"]) - - return instances +class ActionDeprecatedWarning(DeprecationWarning): + pass -def get_errored_plugins_from_data(context): - """Get all failed validation plugins - - Args: - context (object): - - Returns: - list of plugins which failed during validation +def deprecated(new_destination): + """Mark functions as deprecated. + It will result in a warning being emitted when the function is used. """ - plugins = list() - results = context.data.get("results", []) - for result in results: - if result["success"] is True: - continue - plugins.append(result["plugin"]) + func = None + if callable(new_destination): + func = new_destination + new_destination = None - return plugins + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", ActionDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=ActionDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + +@deprecated("openpype.pipeline.publish.get_errored_instances_from_context") +def get_errored_instances_from_context(context): + from openpype.pipeline.publish import get_errored_instances_from_context + + return get_errored_instances_from_context(context) + + +@deprecated("openpype.pipeline.publish.get_errored_plugins_from_context") +def get_errored_plugins_from_data(context): + from openpype.pipeline.publish import get_errored_plugins_from_context + + return get_errored_plugins_from_context(context) class RepairAction(pyblish.api.Action): From 380965927ad4aa58672008588940c455f02d08cc Mon Sep 17 00:00:00 2001 From: maxpareschi Date: Fri, 26 Aug 2022 12:13:29 +0200 Subject: [PATCH 0718/2550] reversed dict merging, anatomy has precedence. --- openpype/pipeline/workfile/path_resolving.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 4cd225a515..97e00d807c 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -409,10 +409,11 @@ def get_custom_workfile_template( anatomy_context_data["root"] = anatomy.roots # extend anatomy context with os.environ - anatomy_context_data.update(os.environ) + full_context_data = os.environ + full_context_data.update(anatomy_context_data) # get task type for the task in context - current_task_type = anatomy_context_data["task"]["type"] + current_task_type = full_context_data["task"]["type"] # get path from matching profile matching_item = filter_profiles( @@ -424,7 +425,7 @@ def get_custom_workfile_template( if matching_item: template = matching_item["path"][platform.system().lower()] return StringTemplate.format_strict_template( - template, anatomy_context_data + template, full_context_data ).normalized() return None From 2e412b55c1d526373e8b2eb35250c2409a60a1ba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 12:25:02 +0200 Subject: [PATCH 0719/2550] use new import sources for 'RepairContextAction' and 'RepairAction' --- .../fusion/plugins/publish/validate_background_depth.py | 4 ++-- .../plugins/publish/validate_create_folder_checked.py | 4 ++-- .../houdini/plugins/publish/validate_remote_publish.py | 4 ++-- .../plugins/publish/validate_remote_publish_enabled.py | 4 ++-- .../validate_animation_out_set_related_node_ids.py | 3 ++- .../maya/plugins/publish/validate_ass_relative_paths.py | 3 ++- .../maya/plugins/publish/validate_assembly_transforms.py | 3 ++- .../hosts/maya/plugins/publish/validate_attributes.py | 3 ++- .../hosts/maya/plugins/publish/validate_color_sets.py | 3 ++- .../hosts/maya/plugins/publish/validate_frame_range.py | 3 ++- .../hosts/maya/plugins/publish/validate_loaded_plugin.py | 7 ++++--- .../plugins/publish/validate_look_id_reference_edits.py | 3 ++- .../maya/plugins/publish/validate_look_shading_group.py | 3 ++- .../hosts/maya/plugins/publish/validate_maya_units.py | 3 ++- .../plugins/publish/validate_mesh_arnold_attributes.py | 3 ++- .../plugins/publish/validate_mesh_normals_unlocked.py | 3 ++- .../plugins/publish/validate_mesh_shader_connections.py | 3 ++- .../maya/plugins/publish/validate_mesh_single_uv_set.py | 3 ++- .../maya/plugins/publish/validate_mesh_uv_set_map1.py | 3 ++- .../plugins/publish/validate_mesh_vertices_have_edges.py | 3 ++- .../maya/plugins/publish/validate_muster_connection.py | 8 +++++--- .../hosts/maya/plugins/publish/validate_no_namespace.py | 3 ++- .../maya/plugins/publish/validate_no_null_transforms.py | 3 ++- .../plugins/publish/validate_node_ids_deformed_shapes.py | 3 ++- .../maya/plugins/publish/validate_render_image_rule.py | 3 ++- .../maya/plugins/publish/validate_rendersettings.py | 3 ++- .../maya/plugins/publish/validate_rig_controllers.py | 4 ++-- .../validate_rig_controllers_arnold_attributes.py | 3 ++- .../maya/plugins/publish/validate_rig_joints_hidden.py | 3 ++- .../plugins/publish/validate_rig_out_set_node_ids.py | 3 ++- .../maya/plugins/publish/validate_rig_output_ids.py | 3 ++- .../maya/plugins/publish/validate_shape_default_names.py | 3 ++- .../maya/plugins/publish/validate_shape_render_stats.py | 3 ++- .../hosts/maya/plugins/publish/validate_shape_zero.py | 3 ++- .../maya/plugins/publish/validate_unreal_up_axis.py | 3 ++- .../publish/validate_vray_distributed_rendering.py | 3 ++- .../plugins/publish/validate_vray_referenced_aovs.py | 4 ++-- .../plugins/publish/validate_vray_translator_settings.py | 7 +++++-- .../plugins/publish/validate_yeti_rig_cache_state.py | 4 ++-- openpype/hosts/nuke/plugins/publish/validate_knobs.py | 9 ++++++--- .../nuke/plugins/publish/validate_output_resolution.py | 6 +++--- .../nuke/plugins/publish/validate_script_attributes.py | 4 ++-- .../hosts/nuke/plugins/publish/validate_write_legacy.py | 5 +++-- 43 files changed, 100 insertions(+), 61 deletions(-) diff --git a/openpype/hosts/fusion/plugins/publish/validate_background_depth.py b/openpype/hosts/fusion/plugins/publish/validate_background_depth.py index a0734d8278..4268fab528 100644 --- a/openpype/hosts/fusion/plugins/publish/validate_background_depth.py +++ b/openpype/hosts/fusion/plugins/publish/validate_background_depth.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype import action +from openpype.pipeline.publish import RepairAction class ValidateBackgroundDepth(pyblish.api.InstancePlugin): @@ -8,7 +8,7 @@ class ValidateBackgroundDepth(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder label = "Validate Background Depth 32 bit" - actions = [action.RepairAction] + actions = [RepairAction] hosts = ["fusion"] families = ["render"] optional = True diff --git a/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py b/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py index 45ed53f65c..f6beefefc1 100644 --- a/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py +++ b/openpype/hosts/fusion/plugins/publish/validate_create_folder_checked.py @@ -1,6 +1,6 @@ import pyblish.api -from openpype import action +from openpype.pipeline.publish import RepairAction class ValidateCreateFolderChecked(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidateCreateFolderChecked(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - actions = [action.RepairAction] + actions = [RepairAction] label = "Validate Create Folder Checked" families = ["render"] hosts = ["fusion"] diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 95c66edff0..0ab182c584 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api from openpype.hosts.houdini.api import lib +from openpype.pipeline.publish import RepairContextAction import hou @@ -14,7 +14,7 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): hosts = ["houdini"] targets = ["deadline"] label = "Remote Publish ROP node" - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, context): diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index b681fd0ee1..afc8df7528 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import RepairContextAction class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @@ -12,7 +12,7 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): hosts = ["houdini"] targets = ["deadline"] label = "Remote Publish ROP enabled" - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, context): diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 05d63f1d56..5f4d92bed9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import RepairAction class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): @@ -22,7 +23,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): label = 'Animation Out Set Related Node Ids' actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py b/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py index 5fb9bd98b1..3de8e55a41 100644 --- a/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py +++ b/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py @@ -6,6 +6,7 @@ import maya.cmds as cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateAssRelativePaths(pyblish.api.InstancePlugin): @@ -15,7 +16,7 @@ class ValidateAssRelativePaths(pyblish.api.InstancePlugin): hosts = ['maya'] families = ['ass'] label = "ASS has relative texture paths" - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, instance): # we cannot ask this until user open render settings as diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py index dca59b147b..f793846555 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py @@ -4,6 +4,7 @@ import openpype.api from maya import cmds import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): @@ -29,7 +30,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): label = "Assembly Model Transforms" families = ["assembly"] actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] prompt_message = ("You are about to reset the matrix to the default values." " This can alter the look of your scene. " diff --git a/openpype/hosts/maya/plugins/publish/validate_attributes.py b/openpype/hosts/maya/plugins/publish/validate_attributes.py index e2a22f80b6..e5b72ac3a3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_attributes.py @@ -2,6 +2,7 @@ import pymel.core as pm import pyblish.api import openpype.api +from openpype.pipeline.publish import RepairContextAction class ValidateAttributes(pyblish.api.ContextPlugin): @@ -19,7 +20,7 @@ class ValidateAttributes(pyblish.api.ContextPlugin): order = openpype.api.ValidateContentsOrder label = "Attributes" hosts = ["maya"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] optional = True attributes = None diff --git a/openpype/hosts/maya/plugins/publish/validate_color_sets.py b/openpype/hosts/maya/plugins/publish/validate_color_sets.py index 45224b0672..ecc5c78e43 100644 --- a/openpype/hosts/maya/plugins/publish/validate_color_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_color_sets.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateColorSets(pyblish.api.Validator): @@ -19,7 +20,7 @@ class ValidateColorSets(pyblish.api.Validator): category = 'geometry' label = 'Mesh ColorSets' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] optional = True @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index c51766379e..9094176abb 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -2,6 +2,7 @@ import pyblish.api import openpype.api from maya import cmds +from openpype.pipeline.publish import RepairAction class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -26,7 +27,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): "review", "yeticache"] optional = True - actions = [openpype.api.RepairAction] + actions = [RepairAction] exclude_families = [] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py b/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py index 9306d8ce15..624074aaf9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py +++ b/openpype/hosts/maya/plugins/publish/validate_loaded_plugin.py @@ -1,7 +1,8 @@ +import os import pyblish.api import maya.cmds as cmds -import openpype.api -import os + +from openpype.pipeline.publish import RepairContextAction class ValidateLoadedPlugin(pyblish.api.ContextPlugin): @@ -10,7 +11,7 @@ class ValidateLoadedPlugin(pyblish.api.ContextPlugin): label = "Loaded Plugin" order = pyblish.api.ValidatorOrder host = ["maya"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] @classmethod def get_invalid(cls, context): diff --git a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py index 9d074f927b..dc751b6b04 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py @@ -4,6 +4,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): @@ -21,7 +22,7 @@ class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): hosts = ['maya'] label = 'Look Id Reference Edits' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] def process(self, instance): invalid = self.get_invalid(instance) diff --git a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py index e8affac036..5e504cde9b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateShadingEngine(pyblish.api.InstancePlugin): @@ -16,7 +17,7 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin): hosts = ["maya"] label = "Look Shading Engine Naming" actions = [ - openpype.hosts.maya.api.action.SelectInvalidAction, openpype.api.RepairAction + openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction ] # The default connections to check diff --git a/openpype/hosts/maya/plugins/publish/validate_maya_units.py b/openpype/hosts/maya/plugins/publish/validate_maya_units.py index 5f67adec76..791030e932 100644 --- a/openpype/hosts/maya/plugins/publish/validate_maya_units.py +++ b/openpype/hosts/maya/plugins/publish/validate_maya_units.py @@ -5,6 +5,7 @@ import openpype.api import openpype.hosts.maya.api.lib as mayalib from openpype.pipeline.context_tools import get_current_project_asset from math import ceil +from openpype.pipeline.publish import RepairContextAction def float_round(num, places=0, direction=ceil): @@ -17,7 +18,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): order = openpype.api.ValidateSceneOrder label = "Maya Units" hosts = ['maya'] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] validate_linear_units = True linear_units = "cm" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py index 90eb01aa12..0924a40632 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline.publish import RepairAction class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin): @@ -20,7 +21,7 @@ class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin): label = "Mesh Arnold Attributes" actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] optional = True if cmds.getAttr( diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py index 750932df54..b11d4a3b63 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -4,6 +4,7 @@ import maya.api.OpenMaya as om2 import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateMeshNormalsUnlocked(pyblish.api.Validator): @@ -21,7 +22,7 @@ class ValidateMeshNormalsUnlocked(pyblish.api.Validator): version = (0, 1, 0) label = 'Mesh Normals Unlocked' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] optional = True @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py index e0835000f0..90873369c9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction def pairs(iterable): @@ -91,7 +92,7 @@ class ValidateMeshShaderConnections(pyblish.api.InstancePlugin): families = ['model'] label = "Mesh Shader Connections" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] def process(self, instance): """Process all the nodes in the instance 'objectSet'""" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py index 9d2aeb7d99..70007ccd7b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import RepairAction class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): @@ -23,7 +24,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): version = (0, 1, 0) label = "Mesh Single UV Set" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index 52c45d3b0c..cb9180fd09 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin): @@ -21,7 +22,7 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin): optional = True label = "Mesh has map1 UV Set" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py index 463c3c4c50..0c9cc81de7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -5,6 +5,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction def len_flattened(components): @@ -63,7 +64,7 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): category = 'geometry' label = 'Mesh Vertices Have Edges' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @classmethod def repair(cls, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py index 856f1d933d..c31ccf405c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_muster_connection.py +++ b/openpype/hosts/maya/plugins/publish/validate_muster_connection.py @@ -5,8 +5,10 @@ import appdirs import pyblish.api from openpype.lib import requests_get -from openpype.pipeline.publish import context_plugin_should_run -import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + context_plugin_should_run, + RepairAction, +) class ValidateMusterConnection(pyblish.api.ContextPlugin): @@ -21,7 +23,7 @@ class ValidateMusterConnection(pyblish.api.ContextPlugin): token = None if not os.environ.get("MUSTER_REST_URL"): active = False - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, context): diff --git a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py index 5b3d6bc9c4..6b721135b8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py @@ -3,6 +3,7 @@ import maya.cmds as cmds import pyblish.api import openpype.api +from openpype.pipeline.publish import RepairAction import openpype.hosts.maya.api.action @@ -23,7 +24,7 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin): version = (0, 1, 0) label = 'No Namespaces' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py index 36d61b03e8..fea91b666d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py @@ -3,6 +3,7 @@ import maya.cmds as cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction def has_shape_children(node): @@ -43,7 +44,7 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin): category = 'cleanup' version = (0, 1, 0) label = 'No Empty/Null Transforms' - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py index 0324be9fc9..363d8882c0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import RepairAction class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): @@ -22,7 +23,7 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): label = 'Deformed shape ids' actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 4d3796e429..965211e046 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -2,6 +2,7 @@ from maya import cmds import pyblish.api import openpype.api +from openpype.pipeline.publish import RepairAction class ValidateRenderImageRule(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): label = "Images File Rule (Workspace)" hosts = ["maya"] families = ["renderlayer"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index f19c0bff36..41cfbc19b5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -7,6 +7,7 @@ from maya import cmds, mel import pyblish.api import openpype.api +from openpype.pipeline.publish import RepairAction from openpype.hosts.maya.api import lib @@ -43,7 +44,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): label = "Render Settings" hosts = ["maya"] families = ["renderlayer"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] ImagePrefixes = { 'mentalray': 'defaultRenderGlobals.imageFilePrefix', diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py b/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py index d5a1fd3529..fc6cda7f23 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py @@ -2,7 +2,7 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import RepairAction import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import undo_chunk @@ -29,7 +29,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin): label = "Rig Controllers" hosts = ["maya"] families = ["rig"] - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] # Default controller values diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py index 1f1db9156b..23237d7ef6 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api +from openpype.pipeline.publish import RepairAction from openpype.hosts.maya.api import lib import openpype.hosts.maya.api.action @@ -30,7 +31,7 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): label = "Rig Controllers (Arnold Attributes)" hosts = ["maya"] families = ["rig"] - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] attributes = [ diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py index 5df754fff4..bad9c8b0d4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import RepairAction class ValidateRigJointsHidden(pyblish.api.InstancePlugin): @@ -23,7 +24,7 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin): version = (0, 1, 0) label = "Joints Hidden" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def get_invalid(instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index cc3723a6e1..f170bc72b1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import RepairAction class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): @@ -22,7 +23,7 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): label = 'Rig Out Set Node Ids' actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] allow_history_only = False diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py index 7c5c540c60..d0ef8d9c04 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateRigOutputIds(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin): label = "Rig Output Ids" hosts = ["maya"] families = ["rig"] - actions = [openpype.api.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py index e08e06b50e..d6e7ed82c4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py @@ -5,6 +5,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction def short_name(node): @@ -39,7 +40,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin): version = (0, 1, 0) label = "Shape Default Naming" actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] @staticmethod def _define_default_name(shape): diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py index 714451bb98..c332e07505 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py @@ -4,6 +4,7 @@ import openpype.api from maya import cmds import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateShapeRenderStats(pyblish.api.Validator): @@ -14,7 +15,7 @@ class ValidateShapeRenderStats(pyblish.api.Validator): families = ['model'] label = 'Shape Default Render Stats' actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction] + RepairAction] defaults = {'castsShadows': 1, 'receiveShadows': 1, diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py index 343eaccb7d..515a8056f7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import RepairAction class ValidateShapeZero(pyblish.api.Validator): @@ -19,7 +20,7 @@ class ValidateShapeZero(pyblish.api.Validator): label = "Shape Zero (Freeze)" actions = [ openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction + RepairAction ] @staticmethod diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py b/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py index 5e1b04889f..3ed22c7cef 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api +from openpype.pipeline.publish import RepairAction class ValidateUnrealUpAxis(pyblish.api.ContextPlugin): @@ -14,7 +15,7 @@ class ValidateUnrealUpAxis(pyblish.api.ContextPlugin): hosts = ["maya"] families = ["staticMesh"] label = "Unreal Up-Axis check" - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, context): assert cmds.upAxis(q=True, axis=True) == "z", ( diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py b/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py index 5e35565383..376d49d583 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py @@ -1,6 +1,7 @@ import pyblish.api import openpype.api from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import RepairAction from maya import cmds @@ -18,7 +19,7 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin): order = openpype.api.ValidateContentsOrder label = "VRay Distributed Rendering" families = ["renderlayer"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] # V-Ray attribute names enabled_attr = "vraySettings.sys_distributed_rendering_on" diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py b/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py index 7a48c29b7d..39c721e717 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py @@ -4,7 +4,7 @@ import pyblish.api import types from maya import cmds -import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairContextAction class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): @@ -20,7 +20,7 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): label = 'VRay Referenced AOVs' hosts = ['maya'] families = ['renderlayer'] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, instance): """Plugin main entry point.""" diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py index d611777f43..9b78699e14 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py @@ -2,7 +2,10 @@ """Validate VRay Translator settings.""" import pyblish.api import openpype.api -from openpype.pipeline.publish import context_plugin_should_run +from openpype.pipeline.publish import ( + context_plugin_should_run, + RepairContextAction, +) from maya import cmds @@ -13,7 +16,7 @@ class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin): order = openpype.api.ValidateContentsOrder label = "VRay Translator Settings" families = ["vrayscene_layer"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] def process(self, context): """Plugin entry point.""" diff --git a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py index 5610733577..4842134b12 100644 --- a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py +++ b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.action import maya.cmds as cmds import openpype.hosts.maya.api.action +from openpype.pipeline.publish import RepairAction class ValidateYetiRigCacheState(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateYetiRigCacheState(pyblish.api.InstancePlugin): label = "Yeti Rig Cache State" hosts = ["maya"] families = ["yetiRig"] - actions = [openpype.action.RepairAction, + actions = [RepairAction, openpype.hosts.maya.api.action.SelectInvalidAction] def process(self, instance): diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index e2b11892e5..d44f27791a 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -1,8 +1,11 @@ import nuke import six import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError + +from openpype.pipeline.publish import ( + RepairContextAction, + PublishXmlValidationError, +) class ValidateKnobs(pyblish.api.ContextPlugin): @@ -24,7 +27,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder label = "Validate Knobs" hosts = ["nuke"] - actions = [openpype.api.RepairContextAction] + actions = [RepairContextAction] optional = True def process(self, context): diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index fc07e9b83b..1e59880f90 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -1,8 +1,8 @@ - import pyblish.api -import openpype.api + from openpype.hosts.nuke.api import maintained_selection from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import RepairAction import nuke @@ -18,7 +18,7 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): families = ["render", "render.local", "render.farm"] label = "Write Resolution" hosts = ["nuke"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] missing_msg = "Missing Reformat node in render group node" resolution_msg = "Reformat is set to wrong format" diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index 106d7a2524..f0632f8080 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -1,8 +1,8 @@ from pprint import pformat import pyblish.api -import openpype.api from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import RepairAction from openpype.hosts.nuke.api.lib import ( get_avalon_knob_data, WorkfileSettings @@ -19,7 +19,7 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): label = "Validatte script attributes" hosts = ["nuke"] optional = True - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, instance): root = nuke.root() diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py index 9fb57c1698..699526ef57 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py @@ -3,8 +3,9 @@ import toml import nuke import pyblish.api -import openpype.api + from openpype.pipeline import discover_creator_plugins +from openpype.pipeline.publish import RepairAction from openpype.hosts.nuke.api.lib import get_avalon_knob_data @@ -16,7 +17,7 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin): families = ["write"] label = "Validate Write Legacy" hosts = ["nuke"] - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, instance): node = instance[0] From 9ead11791551a2ed5b7146a8b298e3c8d41fbee3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 12:27:55 +0200 Subject: [PATCH 0720/2550] added comments and deprecation warnings to classes which stayed where they are --- openpype/action.py | 6 ++++++ openpype/plugin.py | 24 ++++++++++++++++++++---- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/openpype/action.py b/openpype/action.py index 8949858498..70a3934723 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -62,6 +62,11 @@ def get_errored_plugins_from_data(context): return get_errored_plugins_from_context(context) +# 'RepairAction' and 'RepairContextAction' were moved to +# 'openpype.pipeline.publish' please change you imports. +# There is no "reasonable" way hot mark these classes as deprecated to show +# warning of wrong import. +# Deprecated since 3.14.* will be removed in 3.16.* class RepairAction(pyblish.api.Action): """Repairs the action @@ -88,6 +93,7 @@ class RepairAction(pyblish.api.Action): plugin.repair(instance) +# Deprecated since 3.14.* will be removed in 3.16.* class RepairContextAction(pyblish.api.Action): """Repairs the action diff --git a/openpype/plugin.py b/openpype/plugin.py index 5896cbd749..495cfaf233 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -56,14 +56,30 @@ def deprecated(new_destination): if func is None: return _decorator return _decorator(func) + + +# Classes just inheriting from pyblish classes +# - seems to be unused in code (not 100% sure) +# - they should be removed but because it is not clear if they're used +# we'll keep then and log deprecation warning +# Deprecated since 3.14.* will be removed in 3.16.* class ContextPlugin(pyblish.api.ContextPlugin): - def process(cls, *args, **kwargs): - super(ContextPlugin, cls).process(cls, *args, **kwargs) + def __init__(self, *args, **kwargs): + _deprecation_warning( + "openpype.plugin.ContextPlugin", + " Please replace your usage with 'pyblish.api.ContextPlugin'." + ) + super(ContextPlugin, self).__init__(*args, **kwargs) +# Deprecated since 3.14.* will be removed in 3.16.* class InstancePlugin(pyblish.api.InstancePlugin): - def process(cls, *args, **kwargs): - super(InstancePlugin, cls).process(cls, *args, **kwargs) + def __init__(self, *args, **kwargs): + _deprecation_warning( + "openpype.plugin.ContextPlugin", + " Please replace your usage with 'pyblish.api.InstancePlugin'." + ) + super(InstancePlugin, self).__init__(*args, **kwargs) # NOTE: This class is used on so many places I gave up moving it From 694953c1baf0085e8cc3d2f662432a0aa7ff6496 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 12:29:01 +0200 Subject: [PATCH 0721/2550] added deprecation into functions --- openpype/action.py | 10 ++++++++++ openpype/plugin.py | 2 ++ 2 files changed, 12 insertions(+) diff --git a/openpype/action.py b/openpype/action.py index 70a3934723..de9cdee010 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -50,6 +50,11 @@ def deprecated(new_destination): @deprecated("openpype.pipeline.publish.get_errored_instances_from_context") def get_errored_instances_from_context(context): + """ + Deprecated: + Since 3.14.* will be removed in 3.16.* or later. + """ + from openpype.pipeline.publish import get_errored_instances_from_context return get_errored_instances_from_context(context) @@ -57,6 +62,11 @@ def get_errored_instances_from_context(context): @deprecated("openpype.pipeline.publish.get_errored_plugins_from_context") def get_errored_plugins_from_data(context): + """ + Deprecated: + Since 3.14.* will be removed in 3.16.* or later. + """ + from openpype.pipeline.publish import get_errored_plugins_from_context return get_errored_plugins_from_context(context) diff --git a/openpype/plugin.py b/openpype/plugin.py index 495cfaf233..72dca31ec9 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -117,6 +117,8 @@ def contextplugin_should_run(plugin, context): This actually checks it correctly and returns whether it should run. + Deprecated: + Since 3.14.* will be removed in 3.16.* or later. """ from openpype.pipeline.publish import context_plugin_should_run From d570f882caade2e617c9ec7588f467fd00d2e816 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 12:34:32 +0200 Subject: [PATCH 0722/2550] added orders to publish constants --- openpype/pipeline/publish/__init__.py | 12 ++++++++++++ openpype/pipeline/publish/constants.py | 7 +++++++ 2 files changed, 19 insertions(+) create mode 100644 openpype/pipeline/publish/constants.py diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index 9c6462740f..8ba17b2516 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -1,3 +1,10 @@ +from .constants import ( + ValidatePipelineOrder, + ValidateContentsOrder, + ValidateSceneOrder, + ValidateMeshOrder, +) + from .publish_plugins import ( AbstractMetaInstancePlugin, AbstractMetaContextPlugin, @@ -34,6 +41,11 @@ from .abstract_collect_render import ( __all__ = ( + "ValidatePipelineOrder", + "ValidateContentsOrder", + "ValidateSceneOrder", + "ValidateMeshOrder", + "AbstractMetaInstancePlugin", "AbstractMetaContextPlugin", diff --git a/openpype/pipeline/publish/constants.py b/openpype/pipeline/publish/constants.py new file mode 100644 index 0000000000..dcd3445200 --- /dev/null +++ b/openpype/pipeline/publish/constants.py @@ -0,0 +1,7 @@ +import pyblish.api + + +ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 +ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1 +ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 +ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3 From 2f0f9508d4d36a157d4a4a55ed63c7408ae3c7f8 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 26 Aug 2022 12:05:18 +0100 Subject: [PATCH 0723/2550] Implemented update --- .../plugins/load/load_alembic_staticmesh.py | 26 +++-- .../plugins/load/load_layout_existing.py | 103 ++++++++++-------- 2 files changed, 75 insertions(+), 54 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 50e498dbb0..a5b9cbd1fc 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -20,15 +20,11 @@ class StaticMeshAlembicLoader(plugin.Loader): icon = "cube" color = "orange" - def get_task(self, filename, asset_dir, asset_name, replace): + @staticmethod + def get_task(filename, asset_dir, asset_name, replace, default_conversion): task = unreal.AssetImportTask() options = unreal.AbcImportSettings() sm_settings = unreal.AbcStaticMeshSettings() - conversion_settings = unreal.AbcConversionSettings( - preset=unreal.AbcConversionPreset.CUSTOM, - flip_u=False, flip_v=False, - rotation=[0.0, 0.0, 0.0], - scale=[1.0, 1.0, 1.0]) task.set_editor_property('filename', filename) task.set_editor_property('destination_path', asset_dir) @@ -44,13 +40,20 @@ class StaticMeshAlembicLoader(plugin.Loader): sm_settings.set_editor_property('merge_meshes', True) + if not default_conversion: + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, 1.0]) + options.conversion_settings = conversion_settings + options.static_mesh_settings = sm_settings - options.conversion_settings = conversion_settings task.options = options return task - def load(self, context, name, namespace, data): + def load(self, context, name, namespace, options): """Load and containerise representation into Content Browser. This is two step process. First, import FBX to temporary path and @@ -82,6 +85,10 @@ class StaticMeshAlembicLoader(plugin.Loader): asset_name = "{}".format(name) version = context.get('version').get('name') + default_conversion = False + if options.get("default_conversion"): + default_conversion = options.get("default_conversion") + tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( f"{root}/{asset}/{name}_v{version:03d}", suffix="") @@ -91,7 +98,8 @@ class StaticMeshAlembicLoader(plugin.Loader): if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = self.get_task(self.fname, asset_dir, asset_name, False) + task = self.get_task( + self.fname, asset_dir, asset_name, False, default_conversion) unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 diff --git a/openpype/hosts/unreal/plugins/load/load_layout_existing.py b/openpype/hosts/unreal/plugins/load/load_layout_existing.py index c20af950d9..8cd1950f7e 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout_existing.py +++ b/openpype/hosts/unreal/plugins/load/load_layout_existing.py @@ -11,6 +11,7 @@ from openpype.pipeline import ( discover_loader_plugins, loaders_from_representation, load_container, + get_representation_path, AVALON_CONTAINER_ID, legacy_io, ) @@ -132,6 +133,23 @@ class ExistingLayoutLoader(plugin.Loader): ) return transform + def _spawn_actor(self, obj, lasset): + actor = EditorLevelLibrary.spawn_actor_from_object( + obj, unreal.Vector(0.0, 0.0, 0.0) + ) + + actor.set_actor_label(lasset.get('instance_name')) + smc = actor.get_editor_property('static_mesh_component') + mesh = smc.get_editor_property('static_mesh') + import_data = mesh.get_editor_property('asset_import_data') + filename = import_data.get_first_filename() + path = Path(filename) + + transform = self._get_transform( + path.suffix, import_data, lasset) + + actor.set_actor_transform(transform, False, True) + @staticmethod def _get_fbx_loader(loaders, family): name = "" @@ -192,25 +210,29 @@ class ExistingLayoutLoader(plugin.Loader): if not loader: raise AssertionError(f"No valid loader found for {representation}") + # This option is necessary to avoid importing the assets with a + # different conversion compared to the other assets. For ABC files, + # it is in fact impossible to access the conversion settings. So, + # we must assume that the Maya conversion settings have been applied. + options = { + "default_conversion": True + } + assets = load_container( loader, representation, - namespace=instance_name + namespace=instance_name, + options=options ) return assets - def load(self, context, name, namespace, options): - print("Loading Layout and Match Assets") - + def _process(self, lib_path): ar = unreal.AssetRegistryHelpers.get_asset_registry() - asset = context.get('asset').get('name') - container_name = f"{asset}_{name}_CON" - actors = EditorLevelLibrary.get_all_level_actors() - with open(self.fname, "r") as fp: + with open(lib_path, "r") as fp: data = json.load(fp) layout_data = [] @@ -260,7 +282,6 @@ class ExistingLayoutLoader(plugin.Loader): if path.name not in repr_data.get('data').get('path'): continue - asset_name = path.with_suffix('').name mesh_path = Path(mesh.get_path_name()).parent.as_posix() # Create the container for the asset. @@ -309,23 +330,7 @@ class ExistingLayoutLoader(plugin.Loader): for asset in assets: obj = asset.get_asset() - actor = EditorLevelLibrary.spawn_actor_from_object( - obj, unreal.Vector(0.0, 0.0, 0.0) - ) - - actor.set_actor_label(lasset.get('instance_name')) - smc = actor.get_editor_property( - 'static_mesh_component') - mesh = smc.get_editor_property('static_mesh') - import_data = mesh.get_editor_property( - 'asset_import_data') - filename = import_data.get_first_filename() - path = Path(filename) - - transform = self._get_transform( - path.suffix, import_data, lasset) - - actor.set_actor_transform(transform, False, True) + self._spawn_actor(obj, lasset) loaded = True break @@ -345,24 +350,7 @@ class ExistingLayoutLoader(plugin.Loader): obj = ar.get_asset_by_object_path(asset).get_asset() if not obj.get_class().get_name() == 'StaticMesh': continue - actor = EditorLevelLibrary.spawn_actor_from_object( - obj, unreal.Vector(0.0, 0.0, 0.0) - ) - - actor.set_actor_label(lasset.get('instance_name')) - smc = actor.get_editor_property('static_mesh_component') - mesh = smc.get_editor_property('static_mesh') - import_data = mesh.get_editor_property('asset_import_data') - filename = import_data.get_first_filename() - path = Path(filename) - - transform = self._transform_from_basis( - lasset.get('transform_matrix'), - lasset.get('basis'), - unreal.Matrix.IDENTITY.transform() - ) - - actor.set_actor_transform(transform, False, True) + self._spawn_actor(obj, lasset) break @@ -374,10 +362,21 @@ class ExistingLayoutLoader(plugin.Loader): if actor not in actors_matched: EditorLevelLibrary.destroy_actor(actor) + return containers + + def load(self, context, name, namespace, options): + print("Loading Layout and Match Assets") + + asset = context.get('asset').get('name') + asset_name = f"{asset}_{name}" if asset else name + container_name = f"{asset}_{name}_CON" + curr_level = self._get_current_level() if not curr_level: - return + raise AssertionError("Current level not saved") + + containers = self._process(self.fname) curr_level_path = Path( curr_level.get_outer().get_path_name()).parent.as_posix() @@ -402,3 +401,17 @@ class ExistingLayoutLoader(plugin.Loader): "loaded_assets": containers } upipeline.imprint(f"{curr_level_path}/{container_name}", data) + + def update(self, container, representation): + asset_dir = container.get('namespace') + + source_path = get_representation_path(representation) + containers = self._process(source_path) + + data = { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]), + "loaded_assets": containers + } + upipeline.imprint( + "{}/{}".format(asset_dir, container.get('container_name')), data) From 2d9f2a6e767f340589c0f1955904a2b6762e178a Mon Sep 17 00:00:00 2001 From: maxpareschi Date: Fri, 26 Aug 2022 14:46:03 +0200 Subject: [PATCH 0724/2550] os.environ is now a copy not an instance --- openpype/pipeline/workfile/path_resolving.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 97e00d807c..4ab4a4936c 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -409,7 +409,7 @@ def get_custom_workfile_template( anatomy_context_data["root"] = anatomy.roots # extend anatomy context with os.environ - full_context_data = os.environ + full_context_data = os.environ.copy() full_context_data.update(anatomy_context_data) # get task type for the task in context From 10758ec144730ef33c70ea6b2e42a2f43f8a4d5d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 14:51:22 +0200 Subject: [PATCH 0725/2550] use new source of order variables --- .../publish/validate_instance_asset.py | 8 ++-- .../publish/validate_camera_zero_keyframe.py | 3 +- .../publish/validate_no_colons_in_name.py | 3 +- .../publish/validate_transform_zero.py | 3 +- .../plugins/publish/validate_instances.py | 9 ++-- .../plugins/publish/valiate_vdb_input_node.py | 4 +- .../validate_abc_primitive_to_detail.py | 5 +- .../publish/validate_alembic_face_sets.py | 5 +- .../publish/validate_alembic_input_node.py | 5 +- .../plugins/publish/validate_bypass.py | 4 +- .../plugins/publish/validate_camera_rop.py | 4 +- .../publish/validate_mkpaths_toggled.py | 4 +- .../plugins/publish/validate_no_errors.py | 4 +- .../validate_primitive_hierarchy_paths.py | 4 +- .../validate_usd_shade_model_exists.py | 4 +- .../publish/validate_usd_shade_workspace.py | 4 +- .../publish/validate_vdb_input_node.py | 4 +- .../publish/validate_vdb_output_node.py | 4 +- .../hosts/maya/plugins/create/create_ass0.py | 48 +++++++++++++++++++ .../publish/validate_animation_content.py | 3 +- ...date_animation_out_set_related_node_ids.py | 7 ++- .../publish/validate_ass_relative_paths.py | 9 ++-- .../plugins/publish/validate_attributes.py | 8 ++-- .../publish/validate_camera_attributes.py | 3 +- .../publish/validate_camera_contents.py | 3 +- .../plugins/publish/validate_color_sets.py | 7 ++- .../plugins/publish/validate_cycle_error.py | 3 +- .../plugins/publish/validate_frame_range.py | 8 ++-- .../publish/validate_instance_has_members.py | 3 +- .../publish/validate_instance_in_context.py | 4 +- .../publish/validate_instance_subset.py | 4 +- .../plugins/publish/validate_look_contents.py | 3 +- ...lidate_look_default_shaders_connections.py | 4 +- .../validate_look_id_reference_edits.py | 7 ++- .../publish/validate_look_members_unique.py | 3 +- .../validate_look_no_default_shaders.py | 3 +- .../plugins/publish/validate_look_sets.py | 8 ++-- .../publish/validate_look_shading_group.py | 7 ++- .../publish/validate_look_single_shader.py | 3 +- .../plugins/publish/validate_maya_units.py | 9 ++-- .../validate_mesh_arnold_attributes.py | 7 ++- .../plugins/publish/validate_mesh_has_uv.py | 3 +- .../publish/validate_mesh_lamina_faces.py | 3 +- .../plugins/publish/validate_mesh_ngons.py | 3 +- .../validate_mesh_no_negative_scale.py | 3 +- .../publish/validate_mesh_non_manifold.py | 3 +- .../publish/validate_mesh_non_zero_edge.py | 3 +- .../publish/validate_mesh_normals_unlocked.py | 7 ++- .../publish/validate_mesh_overlapping_uvs.py | 3 +- .../validate_mesh_shader_connections.py | 7 ++- .../publish/validate_mesh_single_uv_set.py | 7 ++- .../publish/validate_mesh_uv_set_map1.py | 7 ++- .../validate_mesh_vertices_have_edges.py | 7 ++- .../plugins/publish/validate_model_content.py | 3 +- .../plugins/publish/validate_model_name.py | 3 +- .../publish/validate_mvlook_contents.py | 5 +- .../plugins/publish/validate_no_animation.py | 3 +- .../publish/validate_no_default_camera.py | 3 +- .../plugins/publish/validate_no_namespace.py | 8 +++- .../publish/validate_no_null_transforms.py | 7 ++- .../publish/validate_no_unknown_nodes.py | 3 +- .../maya/plugins/publish/validate_node_ids.py | 4 +- .../validate_node_ids_deformed_shapes.py | 7 ++- .../publish/validate_node_ids_in_database.py | 3 +- .../publish/validate_node_ids_related.py | 3 +- .../publish/validate_node_ids_unique.py | 3 +- .../publish/validate_node_no_ghosting.py | 3 +- .../publish/validate_render_image_rule.py | 8 ++-- .../validate_render_no_default_cameras.py | 3 +- .../publish/validate_render_single_camera.py | 3 +- .../publish/validate_rendersettings.py | 8 ++-- .../plugins/publish/validate_resources.py | 4 +- .../validate_review_subset_uniqueness.py | 8 ++-- .../plugins/publish/validate_rig_contents.py | 4 +- .../publish/validate_rig_controllers.py | 7 ++- ...idate_rig_controllers_arnold_attributes.py | 7 ++- .../publish/validate_rig_joints_hidden.py | 7 ++- .../publish/validate_rig_out_set_node_ids.py | 7 ++- .../publish/validate_rig_output_ids.py | 7 ++- .../publish/validate_scene_set_workspace.py | 5 +- .../plugins/publish/validate_setdress_root.py | 5 +- .../plugins/publish/validate_shader_name.py | 5 +- .../publish/validate_shape_default_names.py | 7 ++- .../publish/validate_shape_render_stats.py | 7 ++- .../plugins/publish/validate_shape_zero.py | 7 ++- .../publish/validate_single_assembly.py | 4 +- .../validate_skeletalmesh_hierarchy.py | 9 ++-- .../validate_skinCluster_deformer_set.py | 3 +- .../plugins/publish/validate_step_size.py | 3 +- .../validate_transform_naming_suffix.py | 3 +- .../publish/validate_transform_zero.py | 3 +- .../validate_unreal_mesh_triangulated.py | 3 +- .../validate_unreal_staticmesh_naming.py | 5 +- .../publish/validate_unreal_up_axis.py | 9 ++-- .../plugins/publish/validate_visible_only.py | 3 +- .../validate_vray_distributed_rendering.py | 8 ++-- .../validate_vray_translator_settings.py | 4 +- .../validate_yeti_renderscript_callbacks.py | 4 +- .../validate_yeti_rig_input_in_instance.py | 3 +- .../plugins/publish/validate_asset_name.py | 9 ++-- .../publish/validate_instance_asset.py | 4 +- .../plugins/publish/validate_naming.py | 9 ++-- .../publish/validate_unique_subsets.py | 8 ++-- .../publish/validate_editorial_resources.py | 8 ++-- .../plugins/publish/validate_frame_ranges.py | 8 ++-- .../publish/validate_shot_duplicates.py | 9 ++-- .../validate_simple_unreal_texture_naming.py | 11 +++-- .../plugins/publish/validate_sources.py | 8 ++-- .../plugins/publish/validate_texture_batch.py | 8 ++-- .../publish/validate_texture_has_workfile.py | 8 ++-- .../plugins/publish/validate_texture_name.py | 8 ++-- .../publish/validate_texture_versions.py | 8 ++-- .../publish/validate_texture_workfiles.py | 8 ++-- .../plugins/publish/validate_frame_ranges.py | 8 ++-- .../validate_custom_ftrack_attributes.py | 4 +- .../plugins/publish/validate_shotgrid_user.py | 4 +- openpype/plugin.py | 3 ++ .../plugins/publish/validate_resources.py | 7 ++- .../plugins/publish/validate_unique_names.py | 3 +- 119 files changed, 444 insertions(+), 231 deletions(-) create mode 100644 openpype/hosts/maya/plugins/create/create_ass0.py diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py index 7a9356f020..6c36136b20 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -1,9 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import ( +from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ( + ValidateContentsOrder, PublishXmlValidationError, - legacy_io, ) from openpype.hosts.aftereffects.api import get_stub @@ -50,7 +50,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): label = "Validate Instance Asset" hosts = ["aftereffects"] actions = [ValidateInstanceAssetRepair] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): instance_asset = instance.data["asset"] diff --git a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py index 39b9b67511..f17800c6ed 100644 --- a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py +++ b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py @@ -4,6 +4,7 @@ import mathutils import pyblish.api import openpype.hosts.blender.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): @@ -14,7 +15,7 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin): in Unreal and Blender. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["camera"] category = "geometry" diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py index 261ff864d5..5be08fd794 100644 --- a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py @@ -2,6 +2,7 @@ from typing import List import pyblish.api import openpype.hosts.blender.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoColonsInName(pyblish.api.InstancePlugin): @@ -12,7 +13,7 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model", "rig"] version = (0, 1, 0) diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py index 7456dbc423..9bcb405579 100644 --- a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -4,6 +4,7 @@ import mathutils import pyblish.api import openpype.hosts.blender.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateTransformZero(pyblish.api.InstancePlugin): @@ -15,7 +16,7 @@ class ValidateTransformZero(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] category = "geometry" diff --git a/openpype/hosts/harmony/plugins/publish/validate_instances.py b/openpype/hosts/harmony/plugins/publish/validate_instances.py index 373ef94cc3..ac367082ef 100644 --- a/openpype/hosts/harmony/plugins/publish/validate_instances.py +++ b/openpype/hosts/harmony/plugins/publish/validate_instances.py @@ -1,9 +1,12 @@ import os import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError + import openpype.hosts.harmony.api as harmony +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateInstanceRepair(pyblish.api.Action): @@ -37,7 +40,7 @@ class ValidateInstance(pyblish.api.InstancePlugin): label = "Validate Instance" hosts = ["harmony"] actions = [ValidateInstanceRepair] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): instance_asset = instance.data["asset"] diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py index 0ae1bc94eb..ac408bc842 100644 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 3e17d3e8de..ea800707fb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -1,8 +1,9 @@ import pyblish.api -import openpype.api from collections import defaultdict +from openpype.pipeline.publish import ValidateContentsOrder + class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """Validate Alembic ROP Primitive to Detail attribute is consistent. @@ -15,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index e9126ffef0..cbed3ea235 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,5 +1,6 @@ import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 8d7e3b611f..2625ae5f83 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,5 +1,6 @@ import pyblish.api -import colorbleed.api + +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -11,7 +12,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = colorbleed.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index fc4e18f701..7cf8da69d6 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateBypassed(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder - 0.1 + order = ValidateContentsOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index a0919e1323..d414920f8b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -1,11 +1,11 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["camera"] hosts = ["houdini"] label = "Camera ROP" diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index cd72877949..be6a798a95 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,11 +1,11 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Create Intermediate Directories Checked" diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index f58e5f8d7d..76635d4ed5 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -1,6 +1,6 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import ValidateContentsOrder def cook_in_range(node, start, end): @@ -28,7 +28,7 @@ def get_errors(node): class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["houdini"] label = "Validate no errors" diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 1eb36763bb..7a8cd04f15 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Prims Hierarchy Path" diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index b979b87d84..f08c7c72c5 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -3,14 +3,14 @@ import re import pyblish.api from openpype.client import get_subset_by_name -import openpype.api from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidateContentsOrder class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade model exists" diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index a77ca2f3cb..a4902b48a9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder import hou @@ -12,7 +12,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade Workspace" diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index 0ae1bc94eb..ac408bc842 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 1ba840b71d..55ed581d4c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,6 +1,6 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import ValidateContentsOrder class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 + order = ValidateContentsOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" diff --git a/openpype/hosts/maya/plugins/create/create_ass0.py b/openpype/hosts/maya/plugins/create/create_ass0.py new file mode 100644 index 0000000000..4523aa02c5 --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_ass0.py @@ -0,0 +1,48 @@ +from collections import OrderedDicta + +from openpype.hosts.maya.api import ( + lib, + plugin +) + +from maya import cmds + + +class CreateAss(plugin.Creator): + """Arnold Archive""" + + name = "ass" + label = "Ass StandIn" + family = "ass" + icon = "cube" + + def __init__(self, *args, **kwargs): + super(CreateAss, self).__init__(*args, **kwargs) + + # Add animation data + self.data.update(lib.collect_animation_data()) + + # Vertex colors with the geometry + self.data["exportSequence"] = False + + def process(self): + instance = super(CreateAss, self).process() + + # data = OrderedDict(**self.data) + + + + nodes = list() + + if (self.options or {}).get("useSelection"): + nodes = cmds.ls(selection=True) + + cmds.sets(nodes, rm=instance) + + assContent = cmds.sets(name="content_SET") + assProxy = cmds.sets(name="proxy_SET", empty=True) + cmds.sets([assContent, assProxy], forceElement=instance) + + # self.log.info(data) + # + # self.data = data diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_content.py b/openpype/hosts/maya/plugins/publish/validate_animation_content.py index 7638c44b87..6f7a6b905a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_content.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_content.py @@ -1,6 +1,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAnimationContent(pyblish.api.InstancePlugin): @@ -11,7 +12,7 @@ class ValidateAnimationContent(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["animation"] label = "Animation Content" diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 5f4d92bed9..aa27633402 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): @@ -17,7 +20,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['animation', "pointcache"] hosts = ['maya'] label = 'Animation Out Set Related Node Ids' diff --git a/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py b/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py index 3de8e55a41..ac6ce4d22d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py +++ b/openpype/hosts/maya/plugins/publish/validate_ass_relative_paths.py @@ -4,15 +4,16 @@ import types import maya.cmds as cmds import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateAssRelativePaths(pyblish.api.InstancePlugin): """Ensure exporting ass file has set relative texture paths""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['ass'] label = "ASS has relative texture paths" diff --git a/openpype/hosts/maya/plugins/publish/validate_attributes.py b/openpype/hosts/maya/plugins/publish/validate_attributes.py index e5b72ac3a3..136c38bc1d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_attributes.py @@ -1,8 +1,10 @@ import pymel.core as pm import pyblish.api -import openpype.api -from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline.publish import ( + RepairContextAction, + ValidateContentsOrder, +) class ValidateAttributes(pyblish.api.ContextPlugin): @@ -17,7 +19,7 @@ class ValidateAttributes(pyblish.api.ContextPlugin): } """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Attributes" hosts = ["maya"] actions = [RepairContextAction] diff --git a/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py b/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py index e019788aff..19c1179e52 100644 --- a/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraAttributes(pyblish.api.InstancePlugin): @@ -14,7 +15,7 @@ class ValidateCameraAttributes(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['camera'] hosts = ['maya'] label = 'Camera Attributes' diff --git a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py index 5f6faddbe7..f846319807 100644 --- a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCameraContents(pyblish.api.InstancePlugin): @@ -15,7 +16,7 @@ class ValidateCameraContents(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['camera'] hosts = ['maya'] label = 'Camera Contents' diff --git a/openpype/hosts/maya/plugins/publish/validate_color_sets.py b/openpype/hosts/maya/plugins/publish/validate_color_sets.py index ecc5c78e43..cab9d6ebab 100644 --- a/openpype/hosts/maya/plugins/publish/validate_color_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_color_sets.py @@ -3,7 +3,10 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateColorSets(pyblish.api.Validator): @@ -14,7 +17,7 @@ class ValidateColorSets(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_cycle_error.py b/openpype/hosts/maya/plugins/publish/validate_cycle_error.py index 4dfe0b8add..d3b8316d94 100644 --- a/openpype/hosts/maya/plugins/publish/validate_cycle_error.py +++ b/openpype/hosts/maya/plugins/publish/validate_cycle_error.py @@ -5,12 +5,13 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline.publish import ValidateContentsOrder class ValidateCycleError(pyblish.api.InstancePlugin): """Validate nodes produce no cycle errors.""" - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Cycle Errors" hosts = ["maya"] families = ["rig"] diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index 9094176abb..b467a7c232 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -1,8 +1,10 @@ import pyblish.api -import openpype.api from maya import cmds -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -19,7 +21,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): """ label = "Validate Frame Range" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["animation", "pointcache", "camera", diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index e04a26e4fd..bf92ac5099 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -1,12 +1,13 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateInstanceHasMembers(pyblish.api.InstancePlugin): """Validates instance objectSet has *any* members.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] label = 'Instance has members' actions = [openpype.hosts.maya.api.action.SelectInvalidAction] diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py b/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py index 7b8c335062..41bb414829 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_in_context.py @@ -3,7 +3,7 @@ from __future__ import absolute_import import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder from maya import cmds @@ -98,7 +98,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin): Action on this validator will select invalid instances in Outliner. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Instance in same Context" optional = True hosts = ["maya"] diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_subset.py b/openpype/hosts/maya/plugins/publish/validate_instance_subset.py index 539f3f9d3c..bb3dde761c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_subset.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_subset.py @@ -1,8 +1,8 @@ import pyblish.api -import openpype.api import string import six +from openpype.pipeline.publish import ValidateContentsOrder # Allow only characters, numbers and underscore allowed = set(string.ascii_lowercase + @@ -18,7 +18,7 @@ def validate_name(subset): class ValidateSubsetName(pyblish.api.InstancePlugin): """Validates subset name has only valid characters""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["*"] label = "Subset Name" diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index b1e1d5416b..d9819b05d5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -1,6 +1,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookContents(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateLookContents(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Data Contents' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py b/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py index 262dd10b74..20f561a892 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookDefaultShadersConnections(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateLookDefaultShadersConnections(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Default Shader Connections' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py index dc751b6b04..f223c1a42b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py @@ -4,7 +4,10 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): @@ -17,7 +20,7 @@ class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Id Reference Edits' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py b/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py index 2367602d05..210fcb174d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py @@ -3,6 +3,7 @@ from collections import defaultdict import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidatePipelineOrder class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin): @@ -20,7 +21,7 @@ class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Look members unique' hosts = ['maya'] families = ['look'] diff --git a/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py b/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py index 8ba6cde988..95f8fa20d0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin): @@ -23,7 +24,7 @@ class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.01 + order = ValidateContentsOrder + 0.01 families = ['look'] hosts = ['maya'] label = 'Look No Default Shaders' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_sets.py b/openpype/hosts/maya/plugins/publish/validate_look_sets.py index 5e737ca876..3a60b771f4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_sets.py @@ -1,8 +1,8 @@ -import openpype.hosts.maya.api.action -from openpype.hosts.maya.api import lib - import pyblish.api import openpype.api +import openpype.hosts.maya.api.action +from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ValidateContentsOrder class ValidateLookSets(pyblish.api.InstancePlugin): @@ -38,7 +38,7 @@ class ValidateLookSets(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Sets' diff --git a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py index 5e504cde9b..7d043eddb8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -3,7 +3,10 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateShadingEngine(pyblish.api.InstancePlugin): @@ -12,7 +15,7 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin): Shading engines should be named "{surface_shader}SG" """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["look"] hosts = ["maya"] label = "Look Shading Engine Naming" diff --git a/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py b/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py index 2b32ccf492..51e1232bb7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSingleShader(pyblish.api.InstancePlugin): @@ -12,7 +13,7 @@ class ValidateSingleShader(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Look Single Shader Per Shape' diff --git a/openpype/hosts/maya/plugins/publish/validate_maya_units.py b/openpype/hosts/maya/plugins/publish/validate_maya_units.py index 791030e932..5698d795ff 100644 --- a/openpype/hosts/maya/plugins/publish/validate_maya_units.py +++ b/openpype/hosts/maya/plugins/publish/validate_maya_units.py @@ -1,11 +1,14 @@ import maya.cmds as cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.lib as mayalib from openpype.pipeline.context_tools import get_current_project_asset from math import ceil -from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline.publish import ( + RepairContextAction, + ValidateSceneOrder, +) def float_round(num, places=0, direction=ceil): @@ -15,7 +18,7 @@ def float_round(num, places=0, direction=ceil): class ValidateMayaUnits(pyblish.api.ContextPlugin): """Check if the Maya units are set correct""" - order = openpype.api.ValidateSceneOrder + order = ValidateSceneOrder label = "Maya Units" hosts = ['maya'] actions = [RepairContextAction] diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py index 0924a40632..abfe1213a0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import maintained_selection -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin): @@ -14,7 +17,7 @@ class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin): later published looks can discover non-default Arnold attributes. """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ["maya"] families = ["model"] category = "geometry" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py index 8f9b5d1c4e..4d2885d6e2 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py @@ -5,6 +5,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder def len_flattened(components): @@ -45,7 +46,7 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin): UVs for every face. """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py b/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py index 8fa1f3cf3b..e7a73c21b0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin): @@ -12,7 +13,7 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py b/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py index ab0beb2a9c..24d6188ec8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ValidateContentsOrder class ValidateMeshNgons(pyblish.api.Validator): @@ -16,7 +17,7 @@ class ValidateMeshNgons(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Mesh ngons" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py index 5ccfa7377a..18ceccaa28 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshNoNegativeScale(pyblish.api.Validator): @@ -17,7 +18,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = 'Mesh No Negative Scale' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py b/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py index 9bd584bbbf..e75a132d50 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshNonManifold(pyblish.api.Validator): @@ -13,7 +14,7 @@ class ValidateMeshNonManifold(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = 'Mesh Non-Manifold Vertices/Edges' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py index 5e6f24cf79..8c03b54971 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ValidateMeshOrder class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin): @@ -16,7 +17,7 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder families = ['model'] hosts = ['maya'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py index b11d4a3b63..7d88161058 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -4,7 +4,10 @@ import maya.api.OpenMaya as om2 import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshNormalsUnlocked(pyblish.api.Validator): @@ -15,7 +18,7 @@ class ValidateMeshNormalsUnlocked(pyblish.api.Validator): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py index bf95d8ba09..dde3e4fead 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py @@ -6,6 +6,7 @@ import maya.api.OpenMaya as om import pymel.core as pm from six.moves import xrange +from openpype.pipeline.publish import ValidateMeshOrder class GetOverlappingUVs(object): @@ -232,7 +233,7 @@ class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin): It is optional to warn publisher about it. """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py index 90873369c9..9621fd5aa8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py @@ -3,7 +3,10 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) def pairs(iterable): @@ -87,7 +90,7 @@ class ValidateMeshShaderConnections(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = "Mesh Shader Connections" diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py index 70007ccd7b..3fb09356d3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): @@ -16,7 +19,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model', 'pointcache'] category = 'uv' diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index cb9180fd09..2711682f76 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -3,7 +3,10 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin): @@ -16,7 +19,7 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] optional = True diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py index 0c9cc81de7..350a5f4789 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -5,7 +5,10 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) def len_flattened(components): @@ -58,7 +61,7 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] category = 'geometry' diff --git a/openpype/hosts/maya/plugins/publish/validate_model_content.py b/openpype/hosts/maya/plugins/publish/validate_model_content.py index aee0ea52f0..0557858639 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_content.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_content.py @@ -4,6 +4,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib +from openpype.pipeline.publish import ValidateContentsOrder class ValidateModelContent(pyblish.api.InstancePlugin): @@ -14,7 +15,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Model Content" diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 02107d5732..99a4b2654e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -7,6 +7,7 @@ import pyblish.api import openpype.api from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api.shader_definition_editor import ( DEFINITION_FILENAME) @@ -23,7 +24,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): """ optional = True - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Model Name" diff --git a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py index bac2c030c8..62f360cd86 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py @@ -1,15 +1,16 @@ +import os import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder -import os COLOUR_SPACES = ['sRGB', 'linear', 'auto'] MIPMAP_EXTENSIONS = ['tdl'] class ValidateMvLookContents(pyblish.api.InstancePlugin): - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['mvLook'] hosts = ['maya'] label = 'Validate mvLook Data' diff --git a/openpype/hosts/maya/plugins/publish/validate_no_animation.py b/openpype/hosts/maya/plugins/publish/validate_no_animation.py index 6621e452f0..177de1468d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_animation.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_animation.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoAnimation(pyblish.api.Validator): @@ -14,7 +15,7 @@ class ValidateNoAnimation(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "No Animation" hosts = ["maya"] families = ["model"] diff --git a/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py b/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py index c3f6f3c38e..d4ddb28070 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoDefaultCameras(pyblish.api.InstancePlugin): @@ -13,7 +14,7 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin): settings when being loaded and sometimes being skipped. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['camera'] version = (0, 1, 0) diff --git a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py index 6b721135b8..95caa1007f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py @@ -3,7 +3,11 @@ import maya.cmds as cmds import pyblish.api import openpype.api -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) + import openpype.hosts.maya.api.action @@ -17,7 +21,7 @@ def get_namespace(node_name): class ValidateNoNamespace(pyblish.api.InstancePlugin): """Ensure the nodes don't have a namespace""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' diff --git a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py index fea91b666d..f31fd09c95 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py @@ -3,7 +3,10 @@ import maya.cmds as cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) def has_shape_children(node): @@ -38,7 +41,7 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' diff --git a/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py b/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py index d140a1f24a..20fe34f2fd 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNoUnknownNodes(pyblish.api.InstancePlugin): @@ -16,7 +17,7 @@ class ValidateNoUnknownNodes(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model', 'rig'] optional = True diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_node_ids.py index d17d34117f..877ba0e781 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids.py @@ -1,7 +1,7 @@ import pyblish.api import openpype.api +from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action - from openpype.hosts.maya.api import lib @@ -14,7 +14,7 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Instance Nodes Have ID' hosts = ['maya'] families = ["model", diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py index 363d8882c0..1fe4a34e07 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): @@ -17,7 +20,7 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ['look'] hosts = ['maya'] label = 'Deformed shape ids' diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py index 632b531668..a5b1215f30 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -3,6 +3,7 @@ import pyblish.api import openpype.api from openpype.client import get_assets from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -18,7 +19,7 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Node Ids in Database' hosts = ['maya'] families = ["*"] diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py index c8bac6e569..a7595d7392 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,6 +1,7 @@ import pyblish.api import openpype.api +from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -10,7 +11,7 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Node Ids Related (ID)' hosts = ['maya'] families = ["model", diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py index ed9ef526d6..5ff18358e2 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py @@ -2,6 +2,7 @@ from collections import defaultdict import pyblish.api import openpype.api +from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -12,7 +13,7 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): Here we ensure that what has been added to the instance is unique """ - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder label = 'Non Duplicate Instance Members (ID)' hosts = ['maya'] families = ["model", diff --git a/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py b/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py index 38f3ab1e68..2f22d6da1e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateNodeNoGhosting(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateNodeNoGhosting(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model', 'rig'] label = "No Ghosting" diff --git a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py index 965211e046..78bb022785 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -1,8 +1,10 @@ from maya import cmds import pyblish.api -import openpype.api -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRenderImageRule(pyblish.api.InstancePlugin): @@ -14,7 +16,7 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Images File Rule (Workspace)" hosts = ["maya"] families = ["renderlayer"] diff --git a/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py b/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py index 044cc7c6a2..da35f42291 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py @@ -3,12 +3,13 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin): """Ensure no default (startup) cameras are to be rendered.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['renderlayer'] label = "No Default Cameras Renderable" diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index 35b87fd0ab..fc41b1cf5b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -6,6 +6,7 @@ from maya import cmds import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.render_settings import RenderSettings +from openpype.pipeline.publish import ValidateContentsOrder class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): @@ -15,7 +16,7 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): prefix must contain token. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Render Single Camera" hosts = ['maya'] families = ["renderlayer", diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 41cfbc19b5..08ecc0d149 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -6,8 +6,10 @@ from collections import OrderedDict from maya import cmds, mel import pyblish.api -import openpype.api -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) from openpype.hosts.maya.api import lib @@ -40,7 +42,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Render Settings" hosts = ["maya"] families = ["renderlayer"] diff --git a/openpype/hosts/maya/plugins/publish/validate_resources.py b/openpype/hosts/maya/plugins/publish/validate_resources.py index 08f0f5467c..b7bd47ad0a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_resources.py +++ b/openpype/hosts/maya/plugins/publish/validate_resources.py @@ -2,7 +2,7 @@ import os from collections import defaultdict import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateResources(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateResources(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Resources Unique" def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py index 04cc9ab5fb..361c594013 100644 --- a/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py +++ b/openpype/hosts/maya/plugins/publish/validate_review_subset_uniqueness.py @@ -1,14 +1,16 @@ # -*- coding: utf-8 -*- import collections import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateReviewSubsetUniqueness(pyblish.api.ContextPlugin): """Validates that review subset has unique name.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["review"] label = "Validate Review Subset Unique" diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_contents.py b/openpype/hosts/maya/plugins/publish/validate_rig_contents.py index 6fe51d7b51..1096c95486 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_contents.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateRigContents(pyblish.api.InstancePlugin): @@ -13,7 +13,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Rig Contents" hosts = ["maya"] families = ["rig"] diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py b/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py index fc6cda7f23..1e42abdcd9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_controllers.py @@ -2,7 +2,10 @@ from maya import cmds import pyblish.api -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import undo_chunk @@ -25,7 +28,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin): - Break all incoming connections to keyable attributes """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Rig Controllers" hosts = ["maya"] families = ["rig"] diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py index 23237d7ef6..3d486cf7a4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py @@ -3,7 +3,10 @@ from maya import cmds import pyblish.api import openpype.api -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) from openpype.hosts.maya.api import lib import openpype.hosts.maya.api.action @@ -27,7 +30,7 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): This validator will ensure they are hidden or unkeyable attributes. """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Rig Controllers (Arnold Attributes)" hosts = ["maya"] families = ["rig"] diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py index bad9c8b0d4..86967d7502 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRigJointsHidden(pyblish.api.InstancePlugin): @@ -18,7 +21,7 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['rig'] version = (0, 1, 0) diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index f170bc72b1..70128ac493 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): @@ -17,7 +20,7 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["rig"] hosts = ['maya'] label = 'Rig Out Set Node Ids' diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py index d0ef8d9c04..f075f42ff2 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) class ValidateRigOutputIds(pyblish.api.InstancePlugin): @@ -14,7 +17,7 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin): to ensure the id from the model is preserved through animation. """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Rig Output Ids" hosts = ["maya"] families = ["rig"] diff --git a/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py b/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py index 174bc44a6f..ec2bea220d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py +++ b/openpype/hosts/maya/plugins/publish/validate_scene_set_workspace.py @@ -3,7 +3,8 @@ import os import maya.cmds as cmds import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ValidatePipelineOrder def is_subdir(path, root_dir): @@ -28,7 +29,7 @@ def is_subdir(path, root_dir): class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin): """Validate the scene is inside the currently set Maya workspace""" - order = openpype.api.ValidatePipelineOrder + order = ValidatePipelineOrder hosts = ['maya'] category = 'scene' version = (0, 1, 0) diff --git a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py index 8e23a7c04f..5fd971f8c4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_setdress_root.py +++ b/openpype/hosts/maya/plugins/publish/validate_setdress_root.py @@ -1,12 +1,11 @@ - import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSetdressRoot(pyblish.api.InstancePlugin): """Validate if set dress top root node is published.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "SetDress Root" hosts = ["maya"] families = ["setdress"] diff --git a/openpype/hosts/maya/plugins/publish/validate_shader_name.py b/openpype/hosts/maya/plugins/publish/validate_shader_name.py index 24111f0ad4..522b42fd00 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shader_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_shader_name.py @@ -1,9 +1,10 @@ +import re from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -import re +from openpype.pipeline.publish import ValidateContentsOrder class ValidateShaderName(pyblish.api.InstancePlugin): @@ -13,7 +14,7 @@ class ValidateShaderName(pyblish.api.InstancePlugin): """ optional = True - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["look"] hosts = ['maya'] label = 'Validate Shaders Name' diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py index d6e7ed82c4..25bd3442a3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py @@ -5,7 +5,10 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) def short_name(node): @@ -32,7 +35,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py index c332e07505..0980d6b4b6 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py @@ -4,13 +4,16 @@ import openpype.api from maya import cmds import openpype.hosts.maya.api.action -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, +) class ValidateShapeRenderStats(pyblish.api.Validator): """Ensure all render stats are set to the default values.""" - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ['maya'] families = ['model'] label = 'Shape Default Render Stats' diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py index 515a8056f7..9e30735d40 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py @@ -4,7 +4,10 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) class ValidateShapeZero(pyblish.api.Validator): @@ -14,7 +17,7 @@ class ValidateShapeZero(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Shape Zero (Freeze)" diff --git a/openpype/hosts/maya/plugins/publish/validate_single_assembly.py b/openpype/hosts/maya/plugins/publish/validate_single_assembly.py index 9fb3a47e6d..8771ca58d1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_single_assembly.py +++ b/openpype/hosts/maya/plugins/publish/validate_single_assembly.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSingleAssembly(pyblish.api.InstancePlugin): @@ -17,7 +17,7 @@ class ValidateSingleAssembly(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['rig', 'animation'] label = 'Single Assembly' diff --git a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py index 54a86d27cf..8221c18b17 100644 --- a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py +++ b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py @@ -1,7 +1,10 @@ # -*- coding: utf-8 -*- import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) from maya import cmds @@ -9,7 +12,7 @@ from maya import cmds class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin): """Validates that nodes has common root.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["skeletalMesh"] label = "Skeletal Mesh Top Node" diff --git a/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py b/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py index 8c804786f3..86ff914cb0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py +++ b/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin): @@ -14,7 +15,7 @@ class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['fbx'] label = "Skincluster Deformer Relationships" diff --git a/openpype/hosts/maya/plugins/publish/validate_step_size.py b/openpype/hosts/maya/plugins/publish/validate_step_size.py index 172ac5f26e..552a936966 100644 --- a/openpype/hosts/maya/plugins/publish/validate_step_size.py +++ b/openpype/hosts/maya/plugins/publish/validate_step_size.py @@ -1,6 +1,7 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateStepSize(pyblish.api.InstancePlugin): @@ -10,7 +11,7 @@ class ValidateStepSize(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = 'Step size' families = ['camera', 'pointcache', diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py index 6f5ff24b9c..64faf9ecb6 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py @@ -5,6 +5,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): @@ -27,7 +28,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ['maya'] families = ['model'] category = 'cleanup' diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_zero.py b/openpype/hosts/maya/plugins/publish/validate_transform_zero.py index fdd09658d1..9e232f6023 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_zero.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateTransformZero(pyblish.api.Validator): @@ -14,7 +15,7 @@ class ValidateTransformZero(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] category = "geometry" diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py index c05121a1b0..1ed3e5531c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py @@ -3,12 +3,13 @@ from maya import cmds import pyblish.api import openpype.api +from openpype.pipeline.publish import ValidateMeshOrder class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin): """Validate if mesh is made of triangles for Unreal Engine""" - order = openpype.api.ValidateMeshOrder + order = ValidateMeshOrder hosts = ["maya"] families = ["staticMesh"] category = "geometry" diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index 33788d1835..a4bb54f5af 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -6,7 +6,8 @@ import pyblish.api import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline import legacy_io -from openpype.api import get_project_settings +from openpype.settings import get_project_settings +from openpype.pipeline.publish import ValidateContentsOrder class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): @@ -50,7 +51,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): """ optional = True - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["staticMesh"] label = "Unreal Static Mesh Name" diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py b/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py index 3ed22c7cef..dd699735d9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_up_axis.py @@ -2,8 +2,11 @@ from maya import cmds import pyblish.api -import openpype.api -from openpype.pipeline.publish import RepairAction + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) class ValidateUnrealUpAxis(pyblish.api.ContextPlugin): @@ -11,7 +14,7 @@ class ValidateUnrealUpAxis(pyblish.api.ContextPlugin): optional = True active = False - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["staticMesh"] label = "Unreal Up-Axis check" diff --git a/openpype/hosts/maya/plugins/publish/validate_visible_only.py b/openpype/hosts/maya/plugins/publish/validate_visible_only.py index 59a7f976ab..f326b91796 100644 --- a/openpype/hosts/maya/plugins/publish/validate_visible_only.py +++ b/openpype/hosts/maya/plugins/publish/validate_visible_only.py @@ -3,6 +3,7 @@ import pyblish.api import openpype.api from openpype.hosts.maya.api.lib import iter_visible_nodes_in_range import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): @@ -12,7 +13,7 @@ class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): on the instance - otherwise the validation is skipped. """ - order = openpype.api.ValidateContentsOrder + 0.05 + order = ValidateContentsOrder + 0.05 label = "Alembic Visible Only" hosts = ["maya"] families = ["pointcache", "animation"] diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py b/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py index 376d49d583..366f3bd10e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api from openpype.hosts.maya.api import lib -from openpype.pipeline.publish import RepairAction +from openpype.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, +) from maya import cmds @@ -16,7 +18,7 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "VRay Distributed Rendering" families = ["renderlayer"] actions = [RepairAction] diff --git a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py index 9b78699e14..f49811c2c0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py +++ b/openpype/hosts/maya/plugins/publish/validate_vray_translator_settings.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- """Validate VRay Translator settings.""" import pyblish.api -import openpype.api from openpype.pipeline.publish import ( context_plugin_should_run, RepairContextAction, + ValidateContentsOrder, ) from maya import cmds @@ -13,7 +13,7 @@ from maya import cmds class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin): """Validate VRay Translator settings for extracting vrscenes.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "VRay Translator Settings" families = ["vrayscene_layer"] actions = [RepairContextAction] diff --git a/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py b/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py index 79cd09315e..a864a18cee 100644 --- a/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py +++ b/openpype/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin): @@ -20,7 +20,7 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Yeti Render Script Callbacks" hosts = ["maya"] families = ["renderlayer"] diff --git a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py index 651c8da849..0fe89634f5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py +++ b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py @@ -3,12 +3,13 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator): """Validate if all input nodes are part of the instance's hierarchy""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["yetiRig"] label = "Yeti Rig Input Shapes In Instance" diff --git a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 7647471f8a..52731140ff 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -4,10 +4,13 @@ from __future__ import absolute_import import nuke import pyblish.api -import openpype.api + import openpype.hosts.nuke.api.lib as nlib import openpype.hosts.nuke.api as nuke_api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class SelectInvalidInstances(pyblish.api.Action): @@ -97,7 +100,7 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin): Action on this validator will select invalid instances in Outliner. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Validate correct asset name" hosts = ["nuke"] actions = [ diff --git a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py index b65f9d259f..2609f7a8cf 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ValidateContentsOrder from openpype.hosts.photoshop import api as photoshop @@ -45,7 +45,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): label = "Validate Instance Asset" hosts = ["photoshop"] actions = [ValidateInstanceAssetRepair] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): instance_asset = instance.data["asset"] diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index 8106d6ff16..0665aff9d0 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -1,10 +1,13 @@ import re import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError + from openpype.hosts.photoshop import api as photoshop from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateNamingRepair(pyblish.api.Action): @@ -72,7 +75,7 @@ class ValidateNaming(pyblish.api.InstancePlugin): label = "Validate Naming" hosts = ["photoshop"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["image"] actions = [ValidateNamingRepair] diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py index 01f2323157..78e84729ce 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py @@ -1,7 +1,9 @@ import collections import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): @@ -11,7 +13,7 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): label = "Validate Subset Uniqueness" hosts = ["photoshop"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["image"] def process(self, context): diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py index afb828474d..3d2b6d04ad 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_editorial_resources.py @@ -1,6 +1,8 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateEditorialResources(pyblish.api.InstancePlugin): @@ -13,7 +15,7 @@ class ValidateEditorialResources(pyblish.api.InstancePlugin): # make sure it is enabled only if at least both families are available match = pyblish.api.Subset - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, instance): self.log.debug( diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py index ff7f60354e..074c62ea0e 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_frame_ranges.py @@ -2,9 +2,11 @@ import re import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError from openpype.pipeline.context_tools import get_current_project_asset +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -13,7 +15,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): label = "Validate Frame Range" hosts = ["standalonepublisher"] families = ["render"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder optional = True # published data might be sequence (.mov, .mp4) in that counting files diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py index fe655f6b74..df04ae3b66 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_shot_duplicates.py @@ -1,14 +1,17 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) + class ValidateShotDuplicates(pyblish.api.ContextPlugin): """Validating no duplicate names are in context.""" label = "Validate Shot Duplicates" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, context): shot_names = [] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py index ef8da9f280..c123bef4f8 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_simple_unreal_texture_naming.py @@ -1,16 +1,19 @@ # -*- coding: utf-8 -*- """Validator for correct file naming.""" -import pyblish.api -import openpype.api import re -from openpype.pipeline import PublishXmlValidationError +import pyblish.api + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateSimpleUnrealTextureNaming(pyblish.api.InstancePlugin): label = "Validate Unreal Texture Names" hosts = ["standalonepublisher"] families = ["simpleUnrealTexture"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder regex = "^T_{asset}.*" def process(self, instance): diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py index 316f58988f..1782f53de2 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_sources.py @@ -2,8 +2,10 @@ import os import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateSources(pyblish.api.InstancePlugin): @@ -13,7 +15,7 @@ class ValidateSources(pyblish.api.InstancePlugin): got deleted between starting of SP and now. """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Check source files" optional = True # only for unforeseeable cases diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py index d66fb257bb..44f69e48f7 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_batch.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatch(pyblish.api.InstancePlugin): @@ -9,7 +11,7 @@ class ValidateTextureBatch(pyblish.api.InstancePlugin): label = "Validate Texture Presence" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["texture_batch_workfile"] optional = False diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py index 0e67464f59..f489d37f59 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_has_workfile.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureHasWorkfile(pyblish.api.InstancePlugin): @@ -12,7 +14,7 @@ class ValidateTextureHasWorkfile(pyblish.api.InstancePlugin): """ label = "Validate Texture Has Workfile" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["textures"] optional = True diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py index 751ad917ca..22f4a0eafc 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py @@ -1,14 +1,16 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatchNaming(pyblish.api.InstancePlugin): """Validates that all instances had properly formatted name.""" label = "Validate Texture Batch Naming" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["texture_batch_workfile", "textures"] optional = False diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py index 84d9def895..dab160d537 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_versions.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatchVersions(pyblish.api.InstancePlugin): @@ -14,7 +16,7 @@ class ValidateTextureBatchVersions(pyblish.api.InstancePlugin): """ label = "Validate Texture Batch Versions" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["textures"] optional = False diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index fa492a80d8..56ea82f6b6 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -1,7 +1,9 @@ import pyblish.api -import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, +) class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): @@ -12,7 +14,7 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): label = "Validate Texture Workfile Has Resources" hosts = ["standalonepublisher"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["texture_batch_workfile"] optional = True diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index 947624100a..b962ea464a 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -2,10 +2,10 @@ import re import pyblish.api -import openpype.api -from openpype.pipeline import ( +from openpype.pipeline.publish import ( + ValidateContentsOrder, PublishXmlValidationError, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, ) @@ -16,7 +16,7 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, label = "Validate Frame Range" hosts = ["traypublisher"] families = ["render"] - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder optional = True # published data might be sequence (.mov, .mp4) in that counting files diff --git a/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py b/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py index dc80bf4eb3..489f291c0f 100644 --- a/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py +++ b/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateFtrackAttributes(pyblish.api.InstancePlugin): @@ -34,7 +34,7 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin): """ label = "Validate Custom Ftrack Attributes" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder families = ["ftrack"] optional = True # Ignore standalone host, because it does not have an Ftrack entity diff --git a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py index c14c980e2a..48b320e15e 100644 --- a/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py +++ b/openpype/modules/shotgrid/plugins/publish/validate_shotgrid_user.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateShotgridUser(pyblish.api.ContextPlugin): @@ -8,7 +8,7 @@ class ValidateShotgridUser(pyblish.api.ContextPlugin): """ label = "Validate Shotgrid User" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder def process(self, context): sg = context.data.get("shotgridSession") diff --git a/openpype/plugin.py b/openpype/plugin.py index 72dca31ec9..d3605fcb1e 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -3,6 +3,9 @@ import warnings import pyblish.api +# New location of orders: openpype.pipeline.publish.constants +# - can be imported as +# 'from openpype.pipeline.publish import ValidatePipelineOrder' ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1 ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 diff --git a/openpype/plugins/publish/validate_resources.py b/openpype/plugins/publish/validate_resources.py index 644977ecd4..7911c70c2d 100644 --- a/openpype/plugins/publish/validate_resources.py +++ b/openpype/plugins/publish/validate_resources.py @@ -1,7 +1,6 @@ -import pyblish.api -import openpype.api - import os +import pyblish.api +from openpype.pipeline.publish import ValidateContentsOrder class ValidateResources(pyblish.api.InstancePlugin): @@ -17,7 +16,7 @@ class ValidateResources(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder label = "Resources" def process(self, instance): diff --git a/openpype/plugins/publish/validate_unique_names.py b/openpype/plugins/publish/validate_unique_names.py index 459c90e6c1..33a460f7cc 100644 --- a/openpype/plugins/publish/validate_unique_names.py +++ b/openpype/plugins/publish/validate_unique_names.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ValidateContentsOrder class ValidateUniqueNames(pyblish.api.Validator): @@ -12,7 +13,7 @@ class ValidateUniqueNames(pyblish.api.Validator): """ - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["maya"] families = ["model"] label = "Unique transform name" From 2bfa9eea445a37e830db4dda036f5f8f18168573 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:06:50 +0200 Subject: [PATCH 0726/2550] renamed 'IHostModule' to 'IHostAddon' --- openpype/hosts/aftereffects/module.py | 4 ++-- openpype/hosts/blender/module.py | 4 ++-- openpype/hosts/harmony/addon.py | 4 ++-- openpype/hosts/hiero/module.py | 4 ++-- openpype/hosts/maya/module.py | 4 ++-- openpype/hosts/nuke/module.py | 4 ++-- openpype/hosts/photoshop/addon.py | 4 ++-- .../standalonepublisher/standalonepublish_module.py | 4 ++-- openpype/hosts/traypublisher/module.py | 4 ++-- openpype/hosts/tvpaint/tvpaint_module.py | 4 ++-- openpype/hosts/unreal/module.py | 4 ++-- openpype/hosts/webpublisher/addon.py | 4 ++-- openpype/modules/base.py | 10 +++++----- openpype/modules/interfaces.py | 4 ++-- 14 files changed, 31 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/aftereffects/module.py b/openpype/hosts/aftereffects/module.py index 93d575c186..dff9634ecf 100644 --- a/openpype/hosts/aftereffects/module.py +++ b/openpype/hosts/aftereffects/module.py @@ -1,8 +1,8 @@ from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon -class AfterEffectsModule(OpenPypeModule, IHostModule): +class AfterEffectsModule(OpenPypeModule, IHostAddon): name = "aftereffects" host_name = "aftereffects" diff --git a/openpype/hosts/blender/module.py b/openpype/hosts/blender/module.py index d6ff3b111c..3db7973c17 100644 --- a/openpype/hosts/blender/module.py +++ b/openpype/hosts/blender/module.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class BlenderModule(OpenPypeModule, IHostModule): +class BlenderModule(OpenPypeModule, IHostAddon): name = "blender" host_name = "blender" diff --git a/openpype/hosts/harmony/addon.py b/openpype/hosts/harmony/addon.py index b051d68abb..872a7490b5 100644 --- a/openpype/hosts/harmony/addon.py +++ b/openpype/hosts/harmony/addon.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class HarmonyAddon(OpenPypeModule, IHostModule): +class HarmonyAddon(OpenPypeModule, IHostAddon): name = "harmony" host_name = "harmony" diff --git a/openpype/hosts/hiero/module.py b/openpype/hosts/hiero/module.py index 375486e034..7883d2255f 100644 --- a/openpype/hosts/hiero/module.py +++ b/openpype/hosts/hiero/module.py @@ -1,12 +1,12 @@ import os import platform from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class HieroModule(OpenPypeModule, IHostModule): +class HieroModule(OpenPypeModule, IHostAddon): name = "hiero" host_name = "hiero" diff --git a/openpype/hosts/maya/module.py b/openpype/hosts/maya/module.py index 5a215be8d2..674b36b250 100644 --- a/openpype/hosts/maya/module.py +++ b/openpype/hosts/maya/module.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class OpenPypeMaya(OpenPypeModule, IHostModule): +class OpenPypeMaya(OpenPypeModule, IHostAddon): name = "openpype_maya" host_name = "maya" diff --git a/openpype/hosts/nuke/module.py b/openpype/hosts/nuke/module.py index e4706a36cb..444aa75ff2 100644 --- a/openpype/hosts/nuke/module.py +++ b/openpype/hosts/nuke/module.py @@ -1,12 +1,12 @@ import os import platform from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class NukeModule(OpenPypeModule, IHostModule): +class NukeModule(OpenPypeModule, IHostAddon): name = "nuke" host_name = "nuke" diff --git a/openpype/hosts/photoshop/addon.py b/openpype/hosts/photoshop/addon.py index 18899d4de8..a41d91554b 100644 --- a/openpype/hosts/photoshop/addon.py +++ b/openpype/hosts/photoshop/addon.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class PhotoshopAddon(OpenPypeModule, IHostModule): +class PhotoshopAddon(OpenPypeModule, IHostAddon): name = "photoshop" host_name = "photoshop" diff --git a/openpype/hosts/standalonepublisher/standalonepublish_module.py b/openpype/hosts/standalonepublisher/standalonepublish_module.py index bf8e1d2c23..21b47beb54 100644 --- a/openpype/hosts/standalonepublisher/standalonepublish_module.py +++ b/openpype/hosts/standalonepublisher/standalonepublish_module.py @@ -5,12 +5,12 @@ import click from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import ITrayAction, IHostModule +from openpype.modules.interfaces import ITrayAction, IHostAddon STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostModule): +class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostAddon): label = "Publish" name = "standalonepublish_tool" host_name = "standalonepublisher" diff --git a/openpype/hosts/traypublisher/module.py b/openpype/hosts/traypublisher/module.py index 92a2312fec..c35ce2093a 100644 --- a/openpype/hosts/traypublisher/module.py +++ b/openpype/hosts/traypublisher/module.py @@ -5,12 +5,12 @@ import click from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import ITrayAction, IHostModule +from openpype.modules.interfaces import ITrayAction, IHostAddon TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction): +class TrayPublishModule(OpenPypeModule, IHostAddon, ITrayAction): label = "New Publish (beta)" name = "traypublish_tool" host_name = "traypublish" diff --git a/openpype/hosts/tvpaint/tvpaint_module.py b/openpype/hosts/tvpaint/tvpaint_module.py index a004359231..4b30ce667c 100644 --- a/openpype/hosts/tvpaint/tvpaint_module.py +++ b/openpype/hosts/tvpaint/tvpaint_module.py @@ -1,6 +1,6 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -13,7 +13,7 @@ def get_launch_script_path(): ) -class TVPaintModule(OpenPypeModule, IHostModule): +class TVPaintModule(OpenPypeModule, IHostAddon): name = "tvpaint" host_name = "tvpaint" diff --git a/openpype/hosts/unreal/module.py b/openpype/hosts/unreal/module.py index aa08c8c130..99c8851e8e 100644 --- a/openpype/hosts/unreal/module.py +++ b/openpype/hosts/unreal/module.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class UnrealModule(OpenPypeModule, IHostModule): +class UnrealModule(OpenPypeModule, IHostAddon): name = "unreal" host_name = "unreal" diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index 7d26d5a7ff..a64d74e62b 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -3,12 +3,12 @@ import os import click from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class WebpublisherAddon(OpenPypeModule, IHostModule): +class WebpublisherAddon(OpenPypeModule, IHostAddon): name = "webpublisher" host_name = "webpublisher" diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 6db6ee9524..c96ca02ab7 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -35,7 +35,7 @@ from openpype.lib import ( from .interfaces import ( OpenPypeInterface, IPluginPaths, - IHostModule, + IHostAddon, ITrayModule, ITrayService ) @@ -811,13 +811,13 @@ class ModulesManager: Returns: OpenPypeModule: Found host module by name. - None: There was not found module inheriting IHostModule which has + None: There was not found module inheriting IHostAddon which has host name set to passed 'host_name'. """ for module in self.get_enabled_modules(): if ( - isinstance(module, IHostModule) + isinstance(module, IHostAddon) and module.host_name == host_name ): return module @@ -828,13 +828,13 @@ class ModulesManager: Returns: Iterable[str]: All available host names based on enabled modules - inheriting 'IHostModule'. + inheriting 'IHostAddon'. """ host_names = { module.host_name for module in self.get_enabled_modules() - if isinstance(module, IHostModule) + if isinstance(module, IHostAddon) } return host_names diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 13655773dd..f92ec6bf2d 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -385,8 +385,8 @@ class ISettingsChangeListener(OpenPypeInterface): pass -class IHostModule(OpenPypeInterface): - """Module which also contain a host implementation.""" +class IHostAddon(OpenPypeInterface): + """Addon which also contain a host implementation.""" @abstractproperty def host_name(self): From 2c81bb5788db784073eec6a61755c288f4dd41d6 Mon Sep 17 00:00:00 2001 From: maxpareschi Date: Fri, 26 Aug 2022 15:09:47 +0200 Subject: [PATCH 0727/2550] moved env logic inside matching check --- openpype/pipeline/workfile/path_resolving.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 4ab4a4936c..6d9e72dbd2 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -408,12 +408,8 @@ def get_custom_workfile_template( # add root dict anatomy_context_data["root"] = anatomy.roots - # extend anatomy context with os.environ - full_context_data = os.environ.copy() - full_context_data.update(anatomy_context_data) - # get task type for the task in context - current_task_type = full_context_data["task"]["type"] + current_task_type = anatomy_context_data["task"]["type"] # get path from matching profile matching_item = filter_profiles( @@ -423,6 +419,11 @@ def get_custom_workfile_template( # when path is available try to format it in case # there are some anatomy template strings if matching_item: + # extend anatomy context with os.environ to + # also allow formatting against env + full_context_data = os.environ.copy() + full_context_data.update(anatomy_context_data) + template = matching_item["path"][platform.system().lower()] return StringTemplate.format_strict_template( template, full_context_data From 0212f6fc06554945be11ecb02810f93c4103b620 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:11:14 +0200 Subject: [PATCH 0728/2550] renamed module.py to addon.py in unreal --- openpype/hosts/unreal/__init__.py | 4 ++-- openpype/hosts/unreal/{module.py => addon.py} | 4 ++-- openpype/hosts/unreal/lib.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) rename openpype/hosts/unreal/{module.py => addon.py} (92%) diff --git a/openpype/hosts/unreal/__init__.py b/openpype/hosts/unreal/__init__.py index 41222f4f94..42dd8f0ac4 100644 --- a/openpype/hosts/unreal/__init__.py +++ b/openpype/hosts/unreal/__init__.py @@ -1,6 +1,6 @@ -from .module import UnrealModule +from .addon import UnrealAddon __all__ = ( - "UnrealModule", + "UnrealAddon", ) diff --git a/openpype/hosts/unreal/module.py b/openpype/hosts/unreal/addon.py similarity index 92% rename from openpype/hosts/unreal/module.py rename to openpype/hosts/unreal/addon.py index 99c8851e8e..16736214c5 100644 --- a/openpype/hosts/unreal/module.py +++ b/openpype/hosts/unreal/addon.py @@ -5,14 +5,14 @@ from openpype.modules.interfaces import IHostAddon UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class UnrealModule(OpenPypeModule, IHostAddon): +class UnrealAddon(OpenPypeModule, IHostAddon): name = "unreal" host_name = "unreal" def initialize(self, module_settings): self.enabled = True - def add_implementation_envs(self, env, app) -> None: + def add_implementation_envs(self, env, app): """Modify environments to contain all required for implementation.""" # Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation diff --git a/openpype/hosts/unreal/lib.py b/openpype/hosts/unreal/lib.py index 8c453b38b9..d02c6de357 100644 --- a/openpype/hosts/unreal/lib.py +++ b/openpype/hosts/unreal/lib.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Unreal launching and project tools.""" -import sys + import os import platform import json @@ -9,7 +9,7 @@ import subprocess import re from pathlib import Path from collections import OrderedDict -from openpype.api import get_project_settings +from openpype.settings import get_project_settings def get_engine_versions(env=None): From f0ddc5b746cfb9be546860cc99d1121bf9f21b61 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:11:22 +0200 Subject: [PATCH 0729/2550] renamed 'tvpaint_module.py' to 'addon.py' --- openpype/hosts/tvpaint/__init__.py | 6 +++--- openpype/hosts/tvpaint/{tvpaint_module.py => addon.py} | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) rename openpype/hosts/tvpaint/{tvpaint_module.py => addon.py} (95%) diff --git a/openpype/hosts/tvpaint/__init__.py b/openpype/hosts/tvpaint/__init__.py index 0a84b575dc..b98680f204 100644 --- a/openpype/hosts/tvpaint/__init__.py +++ b/openpype/hosts/tvpaint/__init__.py @@ -1,12 +1,12 @@ -from .tvpaint_module import ( +from .addon import ( get_launch_script_path, - TVPaintModule, + TVPaintAddon, TVPAINT_ROOT_DIR, ) __all__ = ( "get_launch_script_path", - "TVPaintModule", + "TVPaintAddon", "TVPAINT_ROOT_DIR", ) diff --git a/openpype/hosts/tvpaint/tvpaint_module.py b/openpype/hosts/tvpaint/addon.py similarity index 95% rename from openpype/hosts/tvpaint/tvpaint_module.py rename to openpype/hosts/tvpaint/addon.py index 4b30ce667c..d710e63f93 100644 --- a/openpype/hosts/tvpaint/tvpaint_module.py +++ b/openpype/hosts/tvpaint/addon.py @@ -13,7 +13,7 @@ def get_launch_script_path(): ) -class TVPaintModule(OpenPypeModule, IHostAddon): +class TVPaintAddon(OpenPypeModule, IHostAddon): name = "tvpaint" host_name = "tvpaint" From 3e912a88f6367f7488f90648b223abcd501e8d86 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:12:19 +0200 Subject: [PATCH 0730/2550] renamed module.py to addon.py, changed addon name and host name --- openpype/hosts/traypublisher/__init__.py | 4 ++-- openpype/hosts/traypublisher/{module.py => addon.py} | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) rename openpype/hosts/traypublisher/{module.py => addon.py} (86%) diff --git a/openpype/hosts/traypublisher/__init__.py b/openpype/hosts/traypublisher/__init__.py index 4eb7bf3eef..77ba908ddd 100644 --- a/openpype/hosts/traypublisher/__init__.py +++ b/openpype/hosts/traypublisher/__init__.py @@ -1,6 +1,6 @@ -from .module import TrayPublishModule +from .addon import TrayPublishAddon __all__ = ( - "TrayPublishModule", + "TrayPublishAddon", ) diff --git a/openpype/hosts/traypublisher/module.py b/openpype/hosts/traypublisher/addon.py similarity index 86% rename from openpype/hosts/traypublisher/module.py rename to openpype/hosts/traypublisher/addon.py index c35ce2093a..c86c835ed9 100644 --- a/openpype/hosts/traypublisher/module.py +++ b/openpype/hosts/traypublisher/addon.py @@ -10,10 +10,10 @@ from openpype.modules.interfaces import ITrayAction, IHostAddon TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class TrayPublishModule(OpenPypeModule, IHostAddon, ITrayAction): +class TrayPublishAddon(OpenPypeModule, IHostAddon, ITrayAction): label = "New Publish (beta)" - name = "traypublish_tool" - host_name = "traypublish" + name = "traypublisher" + host_name = "traypublisher" def initialize(self, modules_settings): self.enabled = True @@ -28,7 +28,7 @@ class TrayPublishModule(OpenPypeModule, IHostAddon, ITrayAction): self._experimental_tools = ExperimentalTools() def tray_menu(self, *args, **kwargs): - super(TrayPublishModule, self).tray_menu(*args, **kwargs) + super(TrayPublishAddon, self).tray_menu(*args, **kwargs) traypublisher = self._experimental_tools.get("traypublisher") visible = False if traypublisher and traypublisher.enabled: @@ -53,7 +53,7 @@ class TrayPublishModule(OpenPypeModule, IHostAddon, ITrayAction): click_group.add_command(cli_main) -@click.group(TrayPublishModule.name, help="TrayPublisher related commands.") +@click.group(TrayPublishAddon.name, help="TrayPublisher related commands.") def cli_main(): pass From 4a35b4bea7610bedfd780882bde2a0216a3bde76 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:13:16 +0200 Subject: [PATCH 0731/2550] renamed standalonepublisher to addon --- openpype/hosts/standalonepublisher/__init__.py | 4 ++-- .../{standalonepublish_module.py => addon.py} | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) rename openpype/hosts/standalonepublisher/{standalonepublish_module.py => addon.py} (90%) diff --git a/openpype/hosts/standalonepublisher/__init__.py b/openpype/hosts/standalonepublisher/__init__.py index 394d5be397..f47fa6b573 100644 --- a/openpype/hosts/standalonepublisher/__init__.py +++ b/openpype/hosts/standalonepublisher/__init__.py @@ -1,6 +1,6 @@ -from .standalonepublish_module import StandAlonePublishModule +from .addon import StandAlonePublishAddon __all__ = ( - "StandAlonePublishModule", + "StandAlonePublishAddon", ) diff --git a/openpype/hosts/standalonepublisher/standalonepublish_module.py b/openpype/hosts/standalonepublisher/addon.py similarity index 90% rename from openpype/hosts/standalonepublisher/standalonepublish_module.py rename to openpype/hosts/standalonepublisher/addon.py index 21b47beb54..40a156ee70 100644 --- a/openpype/hosts/standalonepublisher/standalonepublish_module.py +++ b/openpype/hosts/standalonepublisher/addon.py @@ -10,9 +10,9 @@ from openpype.modules.interfaces import ITrayAction, IHostAddon STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostAddon): +class StandAlonePublishAddon(OpenPypeModule, ITrayAction, IHostAddon): label = "Publish" - name = "standalonepublish_tool" + name = "standalonepublisher" host_name = "standalonepublisher" def initialize(self, modules_settings): @@ -42,7 +42,7 @@ class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostAddon): @click.group( - StandAlonePublishModule.name, + StandAlonePublishAddon.name, help="StandalonePublisher related commands.") def cli_main(): pass From 7af8e8998465b33841440677a16bfe9ef870e9dc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:14:05 +0200 Subject: [PATCH 0732/2550] renamed maya to addon --- openpype/hosts/maya/__init__.py | 4 ++-- openpype/hosts/maya/{module.py => addon.py} | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) rename openpype/hosts/maya/{module.py => addon.py} (94%) diff --git a/openpype/hosts/maya/__init__.py b/openpype/hosts/maya/__init__.py index 72b4d5853c..860db766f3 100644 --- a/openpype/hosts/maya/__init__.py +++ b/openpype/hosts/maya/__init__.py @@ -1,6 +1,6 @@ -from .module import OpenPypeMaya +from .addon import MayaAddon __all__ = ( - "OpenPypeMaya", + "MayaAddon", ) diff --git a/openpype/hosts/maya/module.py b/openpype/hosts/maya/addon.py similarity index 94% rename from openpype/hosts/maya/module.py rename to openpype/hosts/maya/addon.py index 674b36b250..7b1f7bf754 100644 --- a/openpype/hosts/maya/module.py +++ b/openpype/hosts/maya/addon.py @@ -5,8 +5,8 @@ from openpype.modules.interfaces import IHostAddon MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class OpenPypeMaya(OpenPypeModule, IHostAddon): - name = "openpype_maya" +class MayaAddon(OpenPypeModule, IHostAddon): + name = "maya" host_name = "maya" def initialize(self, module_settings): From 621b2dbe882ea97c55d1910735a27e508fab303a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:14:33 +0200 Subject: [PATCH 0733/2550] renamed hiero to addon --- openpype/hosts/hiero/__init__.py | 6 +++--- openpype/hosts/hiero/{module.py => addon.py} | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) rename openpype/hosts/hiero/{module.py => addon.py} (97%) diff --git a/openpype/hosts/hiero/__init__.py b/openpype/hosts/hiero/__init__.py index a307e265d5..e6744d5aec 100644 --- a/openpype/hosts/hiero/__init__.py +++ b/openpype/hosts/hiero/__init__.py @@ -1,10 +1,10 @@ -from .module import ( +from .addon import ( HIERO_ROOT_DIR, - HieroModule, + HieroAddon, ) __all__ = ( "HIERO_ROOT_DIR", - "HieroModule", + "HieroAddon", ) diff --git a/openpype/hosts/hiero/module.py b/openpype/hosts/hiero/addon.py similarity index 97% rename from openpype/hosts/hiero/module.py rename to openpype/hosts/hiero/addon.py index 7883d2255f..3523e9aed7 100644 --- a/openpype/hosts/hiero/module.py +++ b/openpype/hosts/hiero/addon.py @@ -6,7 +6,7 @@ from openpype.modules.interfaces import IHostAddon HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class HieroModule(OpenPypeModule, IHostAddon): +class HieroAddon(OpenPypeModule, IHostAddon): name = "hiero" host_name = "hiero" From a991d4b6fe008db77ff4b83c18a14a7f9f95a07f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:15:14 +0200 Subject: [PATCH 0734/2550] renamed blender to addon --- openpype/hosts/blender/__init__.py | 4 ++-- openpype/hosts/blender/{module.py => addon.py} | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) rename openpype/hosts/blender/{module.py => addon.py} (98%) diff --git a/openpype/hosts/blender/__init__.py b/openpype/hosts/blender/__init__.py index 58d7ac656f..2a6603606a 100644 --- a/openpype/hosts/blender/__init__.py +++ b/openpype/hosts/blender/__init__.py @@ -1,6 +1,6 @@ -from .module import BlenderModule +from .addon import BlenderAddon __all__ = ( - "BlenderModule", + "BlenderAddon", ) diff --git a/openpype/hosts/blender/module.py b/openpype/hosts/blender/addon.py similarity index 98% rename from openpype/hosts/blender/module.py rename to openpype/hosts/blender/addon.py index 3db7973c17..3ee638a5bb 100644 --- a/openpype/hosts/blender/module.py +++ b/openpype/hosts/blender/addon.py @@ -5,7 +5,7 @@ from openpype.modules.interfaces import IHostAddon BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class BlenderModule(OpenPypeModule, IHostAddon): +class BlenderAddon(OpenPypeModule, IHostAddon): name = "blender" host_name = "blender" From 3d7b2179c855d466bebb580cb8082084bb2ab44b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:15:42 +0200 Subject: [PATCH 0735/2550] renamed aftereffects to addon --- openpype/hosts/aftereffects/__init__.py | 4 ++-- openpype/hosts/aftereffects/{module.py => addon.py} | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) rename openpype/hosts/aftereffects/{module.py => addon.py} (92%) diff --git a/openpype/hosts/aftereffects/__init__.py b/openpype/hosts/aftereffects/__init__.py index c9ad6aaeeb..ae750d05b6 100644 --- a/openpype/hosts/aftereffects/__init__.py +++ b/openpype/hosts/aftereffects/__init__.py @@ -1,6 +1,6 @@ -from .module import AfterEffectsModule +from .addon import AfterEffectsAddon __all__ = ( - "AfterEffectsModule", + "AfterEffectsAddon", ) diff --git a/openpype/hosts/aftereffects/module.py b/openpype/hosts/aftereffects/addon.py similarity index 92% rename from openpype/hosts/aftereffects/module.py rename to openpype/hosts/aftereffects/addon.py index dff9634ecf..94843e7dc5 100644 --- a/openpype/hosts/aftereffects/module.py +++ b/openpype/hosts/aftereffects/addon.py @@ -2,7 +2,7 @@ from openpype.modules import OpenPypeModule from openpype.modules.interfaces import IHostAddon -class AfterEffectsModule(OpenPypeModule, IHostAddon): +class AfterEffectsAddon(OpenPypeModule, IHostAddon): name = "aftereffects" host_name = "aftereffects" From 511bf71f61426a4794ca9bc9890b49d82ce7917e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:17:28 +0200 Subject: [PATCH 0736/2550] fix standalone publisher settings --- openpype/hosts/standalonepublisher/addon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/standalonepublisher/addon.py b/openpype/hosts/standalonepublisher/addon.py index 40a156ee70..98ec44d4e2 100644 --- a/openpype/hosts/standalonepublisher/addon.py +++ b/openpype/hosts/standalonepublisher/addon.py @@ -16,7 +16,7 @@ class StandAlonePublishAddon(OpenPypeModule, ITrayAction, IHostAddon): host_name = "standalonepublisher" def initialize(self, modules_settings): - self.enabled = modules_settings[self.name]["enabled"] + self.enabled = modules_settings["standalonepublish_tool"]["enabled"] self.publish_paths = [ os.path.join(STANDALONEPUBLISH_ROOT_DIR, "plugins", "publish") ] From 310bc6b70523e1e24b9060d5b3163678dbca3417 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 15:17:51 +0200 Subject: [PATCH 0737/2550] renamed nuke to addon --- openpype/hosts/nuke/__init__.py | 6 +++--- openpype/hosts/nuke/{module.py => addon.py} | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) rename openpype/hosts/nuke/{module.py => addon.py} (97%) diff --git a/openpype/hosts/nuke/__init__.py b/openpype/hosts/nuke/__init__.py index 718307583e..8ab565939b 100644 --- a/openpype/hosts/nuke/__init__.py +++ b/openpype/hosts/nuke/__init__.py @@ -1,10 +1,10 @@ -from .module import ( +from .addon import ( NUKE_ROOT_DIR, - NukeModule, + NukeAddon, ) __all__ = ( "NUKE_ROOT_DIR", - "NukeModule", + "NukeAddon", ) diff --git a/openpype/hosts/nuke/module.py b/openpype/hosts/nuke/addon.py similarity index 97% rename from openpype/hosts/nuke/module.py rename to openpype/hosts/nuke/addon.py index 444aa75ff2..54e4da5195 100644 --- a/openpype/hosts/nuke/module.py +++ b/openpype/hosts/nuke/addon.py @@ -6,7 +6,7 @@ from openpype.modules.interfaces import IHostAddon NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) -class NukeModule(OpenPypeModule, IHostAddon): +class NukeAddon(OpenPypeModule, IHostAddon): name = "nuke" host_name = "nuke" From 6801a719d33378e1729cc9022d6e924e7778b924 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 16:06:59 +0200 Subject: [PATCH 0738/2550] added method to get last available variant in application manager --- openpype/lib/applications.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index eaa4c1a0a8..e249ae4f1c 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -469,6 +469,19 @@ class ApplicationManager: for tool in group: self.tools[tool.full_name] = tool + def find_latest_available_variant_for_group(self, group_name): + group = self.app_groups.get(group_name) + if group is None or not group.enabled: + return None + + output = None + for _, variant in reversed(sorted(group.variants.items())): + executable = variant.find_executable() + if executable: + output = variant + break + return output + def launch(self, app_name, **data): """Launch procedure. From 5736fa2382e7d2cb63e3a007c3b04bc2805243bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 16:07:16 +0200 Subject: [PATCH 0739/2550] fix testing classes --- tests/lib/testing_classes.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 2b4d7deb48..64676f62f4 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -12,8 +12,6 @@ import platform from tests.lib.db_handler import DBHandler from tests.lib.file_handler import RemoteFileHandler -from openpype.lib.remote_publish import find_variant_key - class BaseTest: """Empty base test class""" @@ -210,7 +208,10 @@ class PublishTest(ModuleUnitTest): application_manager = ApplicationManager() if not app_variant: - app_variant = find_variant_key(application_manager, self.APP) + variant = ( + application_manager.find_latest_available_variant_for_group( + self.APP)) + app_variant = variant.name yield "{}/{}".format(self.APP, app_variant) @@ -342,4 +343,4 @@ class HostFixtures(PublishTest): @pytest.fixture(scope="module") def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" - raise NotImplementedError \ No newline at end of file + raise NotImplementedError From a0c3eb6f809e6aa6c7ee0c43f330a67e2466594b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 16:32:34 +0200 Subject: [PATCH 0740/2550] removed unnecessary lines --- openpype/modules/base.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c96ca02ab7..09aea50424 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -397,9 +397,6 @@ def _load_modules(): log.error(msg, exc_info=True) - - - @six.add_metaclass(ABCMeta) class OpenPypeModule: """Base class of pype module. From 4fda8d6ff2602421bcc2eacd74d4ff8b50525d4e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 26 Aug 2022 16:34:36 +0200 Subject: [PATCH 0741/2550] fix addon name --- openpype/tools/standalonepublish/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index 3ceeb3ad48..081235c91c 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -236,7 +236,7 @@ def main(): signal.signal(signal.SIGTERM, signal_handler) modules_manager = ModulesManager() - module = modules_manager.modules_by_name["standalonepublish_tool"] + module = modules_manager.modules_by_name["standalonepublisher"] window = Window(module.publish_paths) window.show() From 55849039b554bfab52eefaa81906dba9a6d02d06 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 27 Aug 2022 04:12:30 +0000 Subject: [PATCH 0742/2550] [Automated] Bump version --- CHANGELOG.md | 55 ++++++++++++++++++++++++--------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 31 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a45f65b6f7..2a8e962085 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.14.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD) @@ -9,22 +9,49 @@ - Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) - Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) +**🆕 New features** + +- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) + **🚀 Enhancements** +- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) +- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) +- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) - Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) +- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) **🐛 Bug fixes** +- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) +- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) +- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) +- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) - RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) - Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) +- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) **🔀 Refactored code** +- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) +- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) +- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) +- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) +- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) +- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) +- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) +- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) +- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713) - Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) - TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) - StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) @@ -33,6 +60,7 @@ **Merged pull requests:** +- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) - Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) - Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) @@ -40,10 +68,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) -**🆕 New features** - -- Maya: Build workfile by template [\#3578](https://github.com/pypeclub/OpenPype/pull/3578) - **🚀 Enhancements** - Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) @@ -69,7 +93,6 @@ - Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) - TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) - General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) -- General: Workfiles builder using query functions [\#3598](https://github.com/pypeclub/OpenPype/pull/3598) **Merged pull requests:** @@ -91,12 +114,6 @@ - Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) - Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) - Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) -- General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) -- Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) -- Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) -- General: Python module appdirs from git [\#3589](https://github.com/pypeclub/OpenPype/pull/3589) -- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) -- General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) **🐛 Bug fixes** @@ -109,35 +126,21 @@ - AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) - Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) - TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) -- Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) -- Fix general settings environment variables resolution [\#3587](https://github.com/pypeclub/OpenPype/pull/3587) -- Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) -- General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) -- Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) -- Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) **🔀 Refactored code** - General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) - General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) -- General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) -- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) -- General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) **Merged pull requests:** - Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) - Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) -- Enable write color sets on animation publish automatically [\#3582](https://github.com/pypeclub/OpenPype/pull/3582) ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) -**🐛 Bug fixes** - -- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) diff --git a/openpype/version.py b/openpype/version.py index e738689c20..7894bb8bf4 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.1-nightly.2" +__version__ = "3.14.1-nightly.3" diff --git a/pyproject.toml b/pyproject.toml index bfc570f597..75e4721d7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.1-nightly.2" # OpenPype +version = "3.14.1-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From f29f45c3803210082b2fb8de52fac856658b34f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 27 Aug 2022 10:09:54 +0200 Subject: [PATCH 0743/2550] removed duplicated plugin --- .../hosts/maya/plugins/create/create_ass0.py | 48 ------------------- 1 file changed, 48 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/create/create_ass0.py diff --git a/openpype/hosts/maya/plugins/create/create_ass0.py b/openpype/hosts/maya/plugins/create/create_ass0.py deleted file mode 100644 index 4523aa02c5..0000000000 --- a/openpype/hosts/maya/plugins/create/create_ass0.py +++ /dev/null @@ -1,48 +0,0 @@ -from collections import OrderedDicta - -from openpype.hosts.maya.api import ( - lib, - plugin -) - -from maya import cmds - - -class CreateAss(plugin.Creator): - """Arnold Archive""" - - name = "ass" - label = "Ass StandIn" - family = "ass" - icon = "cube" - - def __init__(self, *args, **kwargs): - super(CreateAss, self).__init__(*args, **kwargs) - - # Add animation data - self.data.update(lib.collect_animation_data()) - - # Vertex colors with the geometry - self.data["exportSequence"] = False - - def process(self): - instance = super(CreateAss, self).process() - - # data = OrderedDict(**self.data) - - - - nodes = list() - - if (self.options or {}).get("useSelection"): - nodes = cmds.ls(selection=True) - - cmds.sets(nodes, rm=instance) - - assContent = cmds.sets(name="content_SET") - assProxy = cmds.sets(name="proxy_SET", empty=True) - cmds.sets([assContent, assProxy], forceElement=instance) - - # self.log.info(data) - # - # self.data = data From 38932ba3012668e8ca2a239ffa2b68b002d979d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 27 Aug 2022 10:38:01 +0200 Subject: [PATCH 0744/2550] fixed interface name --- openpype/hosts/fusion/addon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py index 97fb262517..e257005061 100644 --- a/openpype/hosts/fusion/addon.py +++ b/openpype/hosts/fusion/addon.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class FusionAddon(OpenPypeModule, IHostModule): +class FusionAddon(OpenPypeModule, IHostAddon): name = "fusion" host_name = "fusion" From 981b56d1e4f38b45b5ab809760b8ede8dedb7d89 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 27 Aug 2022 10:39:12 +0200 Subject: [PATCH 0745/2550] fixed interface name --- openpype/hosts/houdini/addon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/addon.py b/openpype/hosts/houdini/addon.py index 255d6ed53f..8d88e83c56 100644 --- a/openpype/hosts/houdini/addon.py +++ b/openpype/hosts/houdini/addon.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon HOUDINI_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class HoudiniAddon(OpenPypeModule, IHostModule): +class HoudiniAddon(OpenPypeModule, IHostAddon): name = "houdini" host_name = "houdini" From b21ac83866b375320ec3463b040f753b2a52d9d5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 27 Aug 2022 10:40:24 +0200 Subject: [PATCH 0746/2550] fixed innterface name --- openpype/hosts/resolve/addon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/resolve/addon.py b/openpype/hosts/resolve/addon.py index af09448a43..a31da52a6d 100644 --- a/openpype/hosts/resolve/addon.py +++ b/openpype/hosts/resolve/addon.py @@ -1,12 +1,12 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon from .utils import RESOLVE_ROOT_DIR -class ResolveAddon(OpenPypeModule, IHostModule): +class ResolveAddon(OpenPypeModule, IHostAddon): name = "resolve" host_name = "resolve" From 892af6ad9e6e49e74573710d5d4b3997bf9e3716 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 27 Aug 2022 10:41:13 +0200 Subject: [PATCH 0747/2550] fixed interface name --- openpype/hosts/flame/addon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/addon.py b/openpype/hosts/flame/addon.py index 7e68378f4b..5a34413bb0 100644 --- a/openpype/hosts/flame/addon.py +++ b/openpype/hosts/flame/addon.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class FlameAddon(OpenPypeModule, IHostModule): +class FlameAddon(OpenPypeModule, IHostAddon): name = "flame" host_name = "flame" From 2cfa25682f72d630d7efed9e7b1a27c43db86269 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 11:55:25 +0200 Subject: [PATCH 0748/2550] split host into host and interfaces --- openpype/host/__init__.py | 4 + openpype/host/host.py | 371 +----------------------------------- openpype/host/interfaces.py | 370 +++++++++++++++++++++++++++++++++++ 3 files changed, 375 insertions(+), 370 deletions(-) create mode 100644 openpype/host/interfaces.py diff --git a/openpype/host/__init__.py b/openpype/host/__init__.py index 84a2fa930a..77df655788 100644 --- a/openpype/host/__init__.py +++ b/openpype/host/__init__.py @@ -1,5 +1,8 @@ from .host import ( HostBase, +) + +from .interfaces import ( IWorkfileHost, ILoadHost, INewPublisher, @@ -7,6 +10,7 @@ from .host import ( __all__ = ( "HostBase", + "IWorkfileHost", "ILoadHost", "INewPublisher", diff --git a/openpype/host/host.py b/openpype/host/host.py index 9cdbb819e1..99f7868727 100644 --- a/openpype/host/host.py +++ b/openpype/host/host.py @@ -1,37 +1,12 @@ import logging import contextlib -from abc import ABCMeta, abstractproperty, abstractmethod +from abc import ABCMeta, abstractproperty import six # NOTE can't import 'typing' because of issues in Maya 2020 # - shiboken crashes on 'typing' module import -class MissingMethodsError(ValueError): - """Exception when host miss some required methods for specific workflow. - - Args: - host (HostBase): Host implementation where are missing methods. - missing_methods (list[str]): List of missing methods. - """ - - def __init__(self, host, missing_methods): - joined_missing = ", ".join( - ['"{}"'.format(item) for item in missing_methods] - ) - if isinstance(host, HostBase): - host_name = host.name - else: - try: - host_name = host.__file__.replace("\\", "/").split("/")[-3] - except Exception: - host_name = str(host) - message = ( - "Host \"{}\" miss methods {}".format(host_name, joined_missing) - ) - super(MissingMethodsError, self).__init__(message) - - @six.add_metaclass(ABCMeta) class HostBase(object): """Base of host implementation class. @@ -185,347 +160,3 @@ class HostBase(object): yield finally: pass - - -class ILoadHost: - """Implementation requirements to be able use reference of representations. - - The load plugins can do referencing even without implementation of methods - here, but switch and removement of containers would not be possible. - - Questions: - - Is list container dependency of host or load plugins? - - Should this be directly in HostBase? - - how to find out if referencing is available? - - do we need to know that? - """ - - @staticmethod - def get_missing_load_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - loading. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Object of host where to look for - required methods. - - Returns: - list[str]: Missing method implementations for loading workflow. - """ - - if isinstance(host, ILoadHost): - return [] - - required = ["ls"] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_load_methods(host): - """Validate implemented methods of "old type" host for load workflow. - - Args: - Union[ModuleType, HostBase]: Object of host to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - missing = ILoadHost.get_missing_load_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_containers(self): - """Retreive referenced containers from scene. - - This can be implemented in hosts where referencing can be used. - - Todo: - Rename function to something more self explanatory. - Suggestion: 'get_containers' - - Returns: - list[dict]: Information about loaded containers. - """ - - pass - - # --- Deprecated method names --- - def ls(self): - """Deprecated variant of 'get_containers'. - - Todo: - Remove when all usages are replaced. - """ - - return self.get_containers() - - -@six.add_metaclass(ABCMeta) -class IWorkfileHost: - """Implementation requirements to be able use workfile utils and tool.""" - - @staticmethod - def get_missing_workfile_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - workfiles. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Object of host where to look for - required methods. - - Returns: - list[str]: Missing method implementations for workfiles workflow. - """ - - if isinstance(host, IWorkfileHost): - return [] - - required = [ - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - ] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_workfile_methods(host): - """Validate methods of "old type" host for workfiles workflow. - - Args: - Union[ModuleType, HostBase]: Object of host to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - - missing = IWorkfileHost.get_missing_workfile_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_workfile_extensions(self): - """Extensions that can be used as save. - - Questions: - This could potentially use 'HostDefinition'. - """ - - return [] - - @abstractmethod - def save_workfile(self, dst_path=None): - """Save currently opened scene. - - Args: - dst_path (str): Where the current scene should be saved. Or use - current path if 'None' is passed. - """ - - pass - - @abstractmethod - def open_workfile(self, filepath): - """Open passed filepath in the host. - - Args: - filepath (str): Path to workfile. - """ - - pass - - @abstractmethod - def get_current_workfile(self): - """Retreive path to current opened file. - - Returns: - str: Path to file which is currently opened. - None: If nothing is opened. - """ - - return None - - def workfile_has_unsaved_changes(self): - """Currently opened scene is saved. - - Not all hosts can know if current scene is saved because the API of - DCC does not support it. - - Returns: - bool: True if scene is saved and False if has unsaved - modifications. - None: Can't tell if workfiles has modifications. - """ - - return None - - def work_root(self, session): - """Modify workdir per host. - - Default implementation keeps workdir untouched. - - Warnings: - We must handle this modification with more sofisticated way because - this can't be called out of DCC so opening of last workfile - (calculated before DCC is launched) is complicated. Also breaking - defined work template is not a good idea. - Only place where it's really used and can make sense is Maya. There - workspace.mel can modify subfolders where to look for maya files. - - Args: - session (dict): Session context data. - - Returns: - str: Path to new workdir. - """ - - return session["AVALON_WORKDIR"] - - # --- Deprecated method names --- - def file_extensions(self): - """Deprecated variant of 'get_workfile_extensions'. - - Todo: - Remove when all usages are replaced. - """ - return self.get_workfile_extensions() - - def save_file(self, dst_path=None): - """Deprecated variant of 'save_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - self.save_workfile() - - def open_file(self, filepath): - """Deprecated variant of 'open_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.open_workfile(filepath) - - def current_file(self): - """Deprecated variant of 'get_current_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.get_current_workfile() - - def has_unsaved_changes(self): - """Deprecated variant of 'workfile_has_unsaved_changes'. - - Todo: - Remove when all usages are replaced. - """ - - return self.workfile_has_unsaved_changes() - - -class INewPublisher: - """Functions related to new creation system in new publisher. - - New publisher is not storing information only about each created instance - but also some global data. At this moment are data related only to context - publish plugins but that can extend in future. - """ - - @staticmethod - def get_missing_publish_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - new publish creation. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Host module where to look for - required methods. - - Returns: - list[str]: Missing method implementations for new publsher - workflow. - """ - - if isinstance(host, INewPublisher): - return [] - - required = [ - "get_context_data", - "update_context_data", - ] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_publish_methods(host): - """Validate implemented methods of "old type" host. - - Args: - Union[ModuleType, HostBase]: Host module to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - missing = INewPublisher.get_missing_publish_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_context_data(self): - """Get global data related to creation-publishing from workfile. - - These data are not related to any created instance but to whole - publishing context. Not saving/returning them will cause that each - reset of publishing resets all values to default ones. - - Context data can contain information about enabled/disabled publish - plugins or other values that can be filled by artist. - - Returns: - dict: Context data stored using 'update_context_data'. - """ - - pass - - @abstractmethod - def update_context_data(self, data, changes): - """Store global context data to workfile. - - Called when some values in context data has changed. - - Without storing the values in a way that 'get_context_data' would - return them will each reset of publishing cause loose of filled values - by artist. Best practice is to store values into workfile, if possible. - - Args: - data (dict): New data as are. - changes (dict): Only data that has been changed. Each value has - tuple with '(, )' value. - """ - - pass diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py new file mode 100644 index 0000000000..cbf12b0d13 --- /dev/null +++ b/openpype/host/interfaces.py @@ -0,0 +1,370 @@ +from abc import ABCMeta, abstractmethod +import six + + +class MissingMethodsError(ValueError): + """Exception when host miss some required methods for specific workflow. + + Args: + host (HostBase): Host implementation where are missing methods. + missing_methods (list[str]): List of missing methods. + """ + + def __init__(self, host, missing_methods): + joined_missing = ", ".join( + ['"{}"'.format(item) for item in missing_methods] + ) + host_name = getattr(host, "name", None) + if not host_name: + try: + host_name = host.__file__.replace("\\", "/").split("/")[-3] + except Exception: + host_name = str(host) + message = ( + "Host \"{}\" miss methods {}".format(host_name, joined_missing) + ) + super(MissingMethodsError, self).__init__(message) + + +class ILoadHost: + """Implementation requirements to be able use reference of representations. + + The load plugins can do referencing even without implementation of methods + here, but switch and removement of containers would not be possible. + + Questions: + - Is list container dependency of host or load plugins? + - Should this be directly in HostBase? + - how to find out if referencing is available? + - do we need to know that? + """ + + @staticmethod + def get_missing_load_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + loading. Checks only existence of methods. + + Args: + Union[ModuleType, HostBase]: Object of host where to look for + required methods. + + Returns: + list[str]: Missing method implementations for loading workflow. + """ + + if isinstance(host, ILoadHost): + return [] + + required = ["ls"] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_load_methods(host): + """Validate implemented methods of "old type" host for load workflow. + + Args: + Union[ModuleType, HostBase]: Object of host to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + missing = ILoadHost.get_missing_load_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_containers(self): + """Retreive referenced containers from scene. + + This can be implemented in hosts where referencing can be used. + + Todo: + Rename function to something more self explanatory. + Suggestion: 'get_containers' + + Returns: + list[dict]: Information about loaded containers. + """ + + pass + + # --- Deprecated method names --- + def ls(self): + """Deprecated variant of 'get_containers'. + + Todo: + Remove when all usages are replaced. + """ + + return self.get_containers() + + +@six.add_metaclass(ABCMeta) +class IWorkfileHost: + """Implementation requirements to be able use workfile utils and tool.""" + + @staticmethod + def get_missing_workfile_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + workfiles. Checks only existence of methods. + + Args: + Union[ModuleType, HostBase]: Object of host where to look for + required methods. + + Returns: + list[str]: Missing method implementations for workfiles workflow. + """ + + if isinstance(host, IWorkfileHost): + return [] + + required = [ + "open_file", + "save_file", + "current_file", + "has_unsaved_changes", + "file_extensions", + "work_root", + ] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_workfile_methods(host): + """Validate methods of "old type" host for workfiles workflow. + + Args: + Union[ModuleType, HostBase]: Object of host to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + + missing = IWorkfileHost.get_missing_workfile_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_workfile_extensions(self): + """Extensions that can be used as save. + + Questions: + This could potentially use 'HostDefinition'. + """ + + return [] + + @abstractmethod + def save_workfile(self, dst_path=None): + """Save currently opened scene. + + Args: + dst_path (str): Where the current scene should be saved. Or use + current path if 'None' is passed. + """ + + pass + + @abstractmethod + def open_workfile(self, filepath): + """Open passed filepath in the host. + + Args: + filepath (str): Path to workfile. + """ + + pass + + @abstractmethod + def get_current_workfile(self): + """Retreive path to current opened file. + + Returns: + str: Path to file which is currently opened. + None: If nothing is opened. + """ + + return None + + def workfile_has_unsaved_changes(self): + """Currently opened scene is saved. + + Not all hosts can know if current scene is saved because the API of + DCC does not support it. + + Returns: + bool: True if scene is saved and False if has unsaved + modifications. + None: Can't tell if workfiles has modifications. + """ + + return None + + def work_root(self, session): + """Modify workdir per host. + + Default implementation keeps workdir untouched. + + Warnings: + We must handle this modification with more sofisticated way because + this can't be called out of DCC so opening of last workfile + (calculated before DCC is launched) is complicated. Also breaking + defined work template is not a good idea. + Only place where it's really used and can make sense is Maya. There + workspace.mel can modify subfolders where to look for maya files. + + Args: + session (dict): Session context data. + + Returns: + str: Path to new workdir. + """ + + return session["AVALON_WORKDIR"] + + # --- Deprecated method names --- + def file_extensions(self): + """Deprecated variant of 'get_workfile_extensions'. + + Todo: + Remove when all usages are replaced. + """ + return self.get_workfile_extensions() + + def save_file(self, dst_path=None): + """Deprecated variant of 'save_workfile'. + + Todo: + Remove when all usages are replaced. + """ + + self.save_workfile() + + def open_file(self, filepath): + """Deprecated variant of 'open_workfile'. + + Todo: + Remove when all usages are replaced. + """ + + return self.open_workfile(filepath) + + def current_file(self): + """Deprecated variant of 'get_current_workfile'. + + Todo: + Remove when all usages are replaced. + """ + + return self.get_current_workfile() + + def has_unsaved_changes(self): + """Deprecated variant of 'workfile_has_unsaved_changes'. + + Todo: + Remove when all usages are replaced. + """ + + return self.workfile_has_unsaved_changes() + + +class INewPublisher: + """Functions related to new creation system in new publisher. + + New publisher is not storing information only about each created instance + but also some global data. At this moment are data related only to context + publish plugins but that can extend in future. + """ + + @staticmethod + def get_missing_publish_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + new publish creation. Checks only existence of methods. + + Args: + Union[ModuleType, HostBase]: Host module where to look for + required methods. + + Returns: + list[str]: Missing method implementations for new publsher + workflow. + """ + + if isinstance(host, INewPublisher): + return [] + + required = [ + "get_context_data", + "update_context_data", + ] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_publish_methods(host): + """Validate implemented methods of "old type" host. + + Args: + Union[ModuleType, HostBase]: Host module to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + missing = INewPublisher.get_missing_publish_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_context_data(self): + """Get global data related to creation-publishing from workfile. + + These data are not related to any created instance but to whole + publishing context. Not saving/returning them will cause that each + reset of publishing resets all values to default ones. + + Context data can contain information about enabled/disabled publish + plugins or other values that can be filled by artist. + + Returns: + dict: Context data stored using 'update_context_data'. + """ + + pass + + @abstractmethod + def update_context_data(self, data, changes): + """Store global context data to workfile. + + Called when some values in context data has changed. + + Without storing the values in a way that 'get_context_data' would + return them will each reset of publishing cause loose of filled values + by artist. Best practice is to store values into workfile, if possible. + + Args: + data (dict): New data as are. + changes (dict): Only data that has been changed. Each value has + tuple with '(, )' value. + """ + + pass From 5d12773b6d53e2bc5dda2d3a890ce347941c8075 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 11:55:46 +0200 Subject: [PATCH 0749/2550] moved HostDirnap into openpype.host --- openpype/host/__init__.py | 5 + openpype/host/dirmap.py | 197 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 202 insertions(+) create mode 100644 openpype/host/dirmap.py diff --git a/openpype/host/__init__.py b/openpype/host/__init__.py index 77df655788..519888fce3 100644 --- a/openpype/host/__init__.py +++ b/openpype/host/__init__.py @@ -8,10 +8,15 @@ from .interfaces import ( INewPublisher, ) +from .dirmap import HostDirmap + + __all__ = ( "HostBase", "IWorkfileHost", "ILoadHost", "INewPublisher", + + "HostDirmap", ) diff --git a/openpype/host/dirmap.py b/openpype/host/dirmap.py new file mode 100644 index 0000000000..901bbb185c --- /dev/null +++ b/openpype/host/dirmap.py @@ -0,0 +1,197 @@ +import os +from abc import ABCMeta, abstractmethod + +import six + +from openpype.lib import Logger +from openpype.modules import ModulesManager +from openpype.settings import get_project_settings +from openpype.settings.lib import get_site_local_overrides + + +@six.add_metaclass(ABCMeta) +class HostDirmap(object): + """Abstract class for running dirmap on a workfile in a host. + + Dirmap is used to translate paths inside of host workfile from one + OS to another. (Eg. arstist created workfile on Win, different artists + opens same file on Linux.) + + Expects methods to be implemented inside of host: + on_dirmap_enabled: run host code for enabling dirmap + do_dirmap: run host code to do actual remapping + """ + + def __init__( + self, host_name, project_name, project_settings=None, sync_module=None + ): + self.host_name = host_name + self.project_name = project_name + self._project_settings = project_settings + self._sync_module = sync_module # to limit reinit of Modules + self._log = None + self._mapping = None # cache mapping + + @property + def sync_module(self): + if self._sync_module is None: + manager = ModulesManager() + self._sync_module = manager["sync_server"] + return self._sync_module + + @property + def project_settings(self): + if self._project_settings is None: + self._project_settings = get_project_settings(self.project_name) + return self._project_settings + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @abstractmethod + def on_enable_dirmap(self): + """Run host dependent operation for enabling dirmap if necessary.""" + pass + + @abstractmethod + def dirmap_routine(self, source_path, destination_path): + """Run host dependent remapping from source_path to destination_path""" + pass + + def process_dirmap(self): + # type: (dict) -> None + """Go through all paths in Settings and set them using `dirmap`. + + If artists has Site Sync enabled, take dirmap mapping directly from + Local Settings when artist is syncing workfile locally. + + Args: + project_settings (dict): Settings for current project. + """ + + if not self._mapping: + self._mapping = self.get_mappings(self.project_settings) + if not self._mapping: + return + + self.log.info("Processing directory mapping ...") + self.on_enable_dirmap() + self.log.info("mapping:: {}".format(self._mapping)) + + for k, sp in enumerate(self._mapping["source-path"]): + dst = self._mapping["destination-path"][k] + try: + print("{} -> {}".format(sp, dst)) + self.dirmap_routine(sp, dst) + except IndexError: + # missing corresponding destination path + self.log.error(( + "invalid dirmap mapping, missing corresponding" + " destination directory." + )) + break + except RuntimeError: + self.log.error( + "invalid path {} -> {}, mapping not registered".format( + sp, dst + ) + ) + continue + + def get_mappings(self, project_settings): + """Get translation from source-path to destination-path. + + It checks if Site Sync is enabled and user chose to use local + site, in that case configuration in Local Settings takes precedence + """ + + local_mapping = self._get_local_sync_dirmap(project_settings) + dirmap_label = "{}-dirmap".format(self.host_name) + if ( + not self.project_settings[self.host_name].get(dirmap_label) + and not local_mapping + ): + return {} + mapping_settings = self.project_settings[self.host_name][dirmap_label] + mapping_enabled = mapping_settings["enabled"] or bool(local_mapping) + if not mapping_enabled: + return {} + + mapping = ( + local_mapping + or mapping_settings["paths"] + or {} + ) + + if ( + not mapping + or not mapping.get("destination-path") + or not mapping.get("source-path") + ): + return {} + return mapping + + def _get_local_sync_dirmap(self, project_settings): + """ + Returns dirmap if synch to local project is enabled. + + Only valid mapping is from roots of remote site to local site set + in Local Settings. + + Args: + project_settings (dict) + Returns: + dict : { "source-path": [XXX], "destination-path": [YYYY]} + """ + + mapping = {} + + if not project_settings["global"]["sync_server"]["enabled"]: + return mapping + + project_name = os.getenv("AVALON_PROJECT") + + active_site = self.sync_module.get_local_normalized_site( + self.sync_module.get_active_site(project_name)) + remote_site = self.sync_module.get_local_normalized_site( + self.sync_module.get_remote_site(project_name)) + self.log.debug( + "active {} - remote {}".format(active_site, remote_site) + ) + + if ( + active_site == "local" + and project_name in self.sync_module.get_enabled_projects() + and active_site != remote_site + ): + sync_settings = self.sync_module.get_sync_project_setting( + project_name, + exclude_locals=False, + cached=False) + + active_overrides = get_site_local_overrides( + project_name, active_site) + remote_overrides = get_site_local_overrides( + project_name, remote_site) + + self.log.debug("local overrides {}".format(active_overrides)) + self.log.debug("remote overrides {}".format(remote_overrides)) + for root_name, active_site_dir in active_overrides.items(): + remote_site_dir = ( + remote_overrides.get(root_name) + or sync_settings["sites"][remote_site]["root"][root_name] + ) + if os.path.isdir(active_site_dir): + if "destination-path" not in mapping: + mapping["destination-path"] = [] + mapping["destination-path"].append(active_site_dir) + + if "source-path" not in mapping: + mapping["source-path"] = [] + mapping["source-path"].append(remote_site_dir) + + self.log.debug("local sync mapping:: {}".format(mapping)) + return mapping From 761e52b2dca2d0288a1aa39c2bb95c993e42b1a1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 12:00:59 +0200 Subject: [PATCH 0750/2550] use new HostDirmap location in maya --- openpype/hosts/maya/api/pipeline.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f565f6a308..c9f22fe119 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -9,14 +9,18 @@ import maya.api.OpenMaya as om import pyblish.api from openpype.settings import get_project_settings -from openpype.host import HostBase, IWorkfileHost, ILoadHost import openpype.hosts.maya +from openpype.host import ( + HostBase, + IWorkfileHost, + ILoadHost, + HostDirmap, +) from openpype.tools.utils import host_tools from openpype.lib import ( register_event_callback, emit_event ) -from openpype.lib.path_tools import HostDirmap from openpype.pipeline import ( legacy_io, register_loader_plugin_path, @@ -59,9 +63,10 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self._op_events = {} def install(self): - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_name = os.getenv("AVALON_PROJECT") + project_settings = get_project_settings(project_name) # process path mapping - dirmap_processor = MayaDirmap("maya", project_settings) + dirmap_processor = MayaDirmap("maya", project_name, project_settings) dirmap_processor.process_dirmap() pyblish.api.register_plugin_path(PUBLISH_PATH) From fcb047770ad41364bbb9aa50ab40765fd43132cf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 12:07:51 +0200 Subject: [PATCH 0751/2550] fix import in collect ftrack api --- .../modules/ftrack/plugins/publish/collect_ftrack_family.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py index 5758068f86..576a7d36c4 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -8,7 +8,7 @@ Provides: import pyblish.api from openpype.pipeline import legacy_io -from openpype.lib.plugin_tools import filter_profiles +from openpype.lib import filter_profiles class CollectFtrackFamily(pyblish.api.InstancePlugin): From 5ad2de372a3507ed38321afe881e7414e6738051 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 12:11:30 +0200 Subject: [PATCH 0752/2550] use new 'get_subset_name' in creator plugins --- openpype/pipeline/create/creator_plugins.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 9e1530a6a7..bf2fdd2c5f 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -9,7 +9,7 @@ from abc import ( import six from openpype.settings import get_system_settings, get_project_settings -from openpype.lib import get_subset_name_with_asset_doc +from .subset_name import get_subset_name from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -75,6 +75,7 @@ class BaseCreator: ): # Reference to CreateContext self.create_context = create_context + self.project_settings = project_settings # Creator is running in headless mode (without UI elemets) # - we may use UI inside processing this attribute should be checked @@ -276,14 +277,15 @@ class BaseCreator: variant, task_name, asset_doc, project_name, host_name ) - return get_subset_name_with_asset_doc( + return get_subset_name( self.family, variant, task_name, asset_doc, project_name, host_name, - dynamic_data=dynamic_data + dynamic_data=dynamic_data, + project_settings=self.project_settings ) def get_instance_attr_defs(self): From d611fdb88c9c0a4d264dd898ae693f3b49a3679c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 12:20:35 +0200 Subject: [PATCH 0753/2550] added docstring to dirmap --- openpype/host/dirmap.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/host/dirmap.py b/openpype/host/dirmap.py index 901bbb185c..88d68f27bf 100644 --- a/openpype/host/dirmap.py +++ b/openpype/host/dirmap.py @@ -1,3 +1,11 @@ +"""Dirmap functionality used in host integrations inside DCCs. + +Idea for current dirmap implementation was used from Maya where is possible to +enter source and destination roots and maya will try each found source +in referenced file replace with each destionation paths. First path which +exists is used. +""" + import os from abc import ABCMeta, abstractmethod From 8fae41c9704cee79959d9c851120a44ab089b790 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 12:20:49 +0200 Subject: [PATCH 0754/2550] use new HostDirmap in nuke --- openpype/hosts/nuke/api/lib.py | 54 +++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index b14f1a1eb1..69512c37f8 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -19,17 +19,19 @@ from openpype.client import ( get_last_versions, get_representations, ) -from openpype.api import ( + +from openpype.host import HostDirmap +from openpype.tools.utils import host_tools +from openpype.lib import ( + env_value_to_bool, Logger, get_version_from_path, - get_current_project_settings, -) -from openpype.tools.utils import host_tools -from openpype.lib import env_value_to_bool -from openpype.lib.path_tools import HostDirmap + ) + from openpype.settings import ( get_project_settings, get_anatomy_settings, + get_current_project_settings, ) from openpype.modules import ModulesManager from openpype.pipeline.template_data import get_template_data_with_names @@ -2651,20 +2653,16 @@ def add_scripts_gizmo(): class NukeDirmap(HostDirmap): - def __init__(self, host_name, project_settings, sync_module, file_name): + def __init__(self, file_name, *args, **kwargs): """ - Args: - host_name (str): Nuke - project_settings (dict): settings of current project - sync_module (SyncServerModule): to limit reinitialization - file_name (str): full path of referenced file from workfiles + Args: + file_name (str): full path of referenced file from workfiles + *args (tuple): Positional arguments for 'HostDirmap' class + **kwargs (dict): Keyword arguments for 'HostDirmap' class """ - self.host_name = host_name - self.project_settings = project_settings - self.file_name = file_name - self.sync_module = sync_module - self._mapping = None # cache mapping + self.file_name = file_name + super(NukeDirmap, self).__init__(*args, **kwargs) def on_enable_dirmap(self): pass @@ -2684,14 +2682,20 @@ class NukeDirmap(HostDirmap): class DirmapCache: """Caching class to get settings and sync_module easily and only once.""" + _project_name = None _project_settings = None _sync_module = None + @classmethod + def project_name(cls): + if cls._project_name is None: + cls._project_name = os.getenv("AVALON_PROJECT") + return cls._project_name + @classmethod def project_settings(cls): if cls._project_settings is None: - cls._project_settings = get_project_settings( - os.getenv("AVALON_PROJECT")) + cls._project_settings = get_project_settings(cls.project_name()) return cls._project_settings @classmethod @@ -2757,10 +2761,14 @@ def dirmap_file_name_filter(file_name): Checks project settings for potential mapping from source to dest. """ - dirmap_processor = NukeDirmap("nuke", - DirmapCache.project_settings(), - DirmapCache.sync_module(), - file_name) + + dirmap_processor = NukeDirmap( + file_name, + "nuke", + DirmapCache.project_name(), + DirmapCache.project_settings(), + DirmapCache.sync_module(), + ) dirmap_processor.process_dirmap() if os.path.exists(dirmap_processor.file_name): return dirmap_processor.file_name From 5133d9d22e32221313dacc9bf358c93a38e0fd50 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:16:45 +0200 Subject: [PATCH 0755/2550] removed 'HostDirmap' from lib --- openpype/lib/path_tools.py | 156 ------------------------------------- 1 file changed, 156 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 4f28be3302..01d947af1a 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -322,159 +322,3 @@ def create_workdir_extra_folders( fullpath = os.path.join(workdir, subfolder) if not os.path.exists(fullpath): os.makedirs(fullpath) - - -@six.add_metaclass(abc.ABCMeta) -class HostDirmap: - """ - Abstract class for running dirmap on a workfile in a host. - - Dirmap is used to translate paths inside of host workfile from one - OS to another. (Eg. arstist created workfile on Win, different artists - opens same file on Linux.) - - Expects methods to be implemented inside of host: - on_dirmap_enabled: run host code for enabling dirmap - do_dirmap: run host code to do actual remapping - """ - - def __init__(self, host_name, project_settings, sync_module=None): - self.host_name = host_name - self.project_settings = project_settings - self.sync_module = sync_module # to limit reinit of Modules - - self._mapping = None # cache mapping - - @abc.abstractmethod - def on_enable_dirmap(self): - """ - Run host dependent operation for enabling dirmap if necessary. - """ - - @abc.abstractmethod - def dirmap_routine(self, source_path, destination_path): - """ - Run host dependent remapping from source_path to destination_path - """ - - def process_dirmap(self): - # type: (dict) -> None - """Go through all paths in Settings and set them using `dirmap`. - - If artists has Site Sync enabled, take dirmap mapping directly from - Local Settings when artist is syncing workfile locally. - - Args: - project_settings (dict): Settings for current project. - - """ - if not self._mapping: - self._mapping = self.get_mappings(self.project_settings) - if not self._mapping: - return - - log.info("Processing directory mapping ...") - self.on_enable_dirmap() - log.info("mapping:: {}".format(self._mapping)) - - for k, sp in enumerate(self._mapping["source-path"]): - try: - print("{} -> {}".format(sp, - self._mapping["destination-path"][k])) - self.dirmap_routine(sp, - self._mapping["destination-path"][k]) - except IndexError: - # missing corresponding destination path - log.error(("invalid dirmap mapping, missing corresponding" - " destination directory.")) - break - except RuntimeError: - log.error("invalid path {} -> {}, mapping not registered".format( # noqa: E501 - sp, self._mapping["destination-path"][k] - )) - continue - - def get_mappings(self, project_settings): - """Get translation from source-path to destination-path. - - It checks if Site Sync is enabled and user chose to use local - site, in that case configuration in Local Settings takes precedence - """ - local_mapping = self._get_local_sync_dirmap(project_settings) - dirmap_label = "{}-dirmap".format(self.host_name) - if not self.project_settings[self.host_name].get(dirmap_label) and \ - not local_mapping: - return [] - mapping = local_mapping or \ - self.project_settings[self.host_name][dirmap_label]["paths"] or {} - enbled = self.project_settings[self.host_name][dirmap_label]["enabled"] - mapping_enabled = enbled or bool(local_mapping) - - if not mapping or not mapping_enabled or \ - not mapping.get("destination-path") or \ - not mapping.get("source-path"): - return [] - return mapping - - def _get_local_sync_dirmap(self, project_settings): - """ - Returns dirmap if synch to local project is enabled. - - Only valid mapping is from roots of remote site to local site set - in Local Settings. - - Args: - project_settings (dict) - Returns: - dict : { "source-path": [XXX], "destination-path": [YYYY]} - """ - import json - mapping = {} - - if not project_settings["global"]["sync_server"]["enabled"]: - return mapping - - from openpype.settings.lib import get_site_local_overrides - - if not self.sync_module: - from openpype.modules import ModulesManager - manager = ModulesManager() - self.sync_module = manager.modules_by_name["sync_server"] - - project_name = os.getenv("AVALON_PROJECT") - - active_site = self.sync_module.get_local_normalized_site( - self.sync_module.get_active_site(project_name)) - remote_site = self.sync_module.get_local_normalized_site( - self.sync_module.get_remote_site(project_name)) - log.debug("active {} - remote {}".format(active_site, remote_site)) - - if active_site == "local" \ - and project_name in self.sync_module.get_enabled_projects()\ - and active_site != remote_site: - - sync_settings = self.sync_module.get_sync_project_setting( - os.getenv("AVALON_PROJECT"), exclude_locals=False, - cached=False) - - active_overrides = get_site_local_overrides( - os.getenv("AVALON_PROJECT"), active_site) - remote_overrides = get_site_local_overrides( - os.getenv("AVALON_PROJECT"), remote_site) - - log.debug("local overrides".format(active_overrides)) - log.debug("remote overrides".format(remote_overrides)) - for root_name, active_site_dir in active_overrides.items(): - remote_site_dir = remote_overrides.get(root_name) or\ - sync_settings["sites"][remote_site]["root"][root_name] - if os.path.isdir(active_site_dir): - if not mapping.get("destination-path"): - mapping["destination-path"] = [] - mapping["destination-path"].append(active_site_dir) - - if not mapping.get("source-path"): - mapping["source-path"] = [] - mapping["source-path"].append(remote_site_dir) - - log.debug("local sync mapping:: {}".format(mapping)) - return mapping From ffbc3cc25e3875d812f7981a270853d5a92c004b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:23:52 +0200 Subject: [PATCH 0756/2550] hound fix --- openpype/hosts/nuke/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 69512c37f8..8721117dba 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -26,7 +26,7 @@ from openpype.lib import ( env_value_to_bool, Logger, get_version_from_path, - ) +) from openpype.settings import ( get_project_settings, From fb675ca01cbcbd7482967f56451e4375e50072b0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:24:42 +0200 Subject: [PATCH 0757/2550] reuse 'MAYA_ROOT_DIR' from public --- openpype/hosts/maya/__init__.py | 6 +++++- openpype/hosts/maya/api/pipeline.py | 6 +++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/__init__.py b/openpype/hosts/maya/__init__.py index 860db766f3..bb940a881b 100644 --- a/openpype/hosts/maya/__init__.py +++ b/openpype/hosts/maya/__init__.py @@ -1,6 +1,10 @@ -from .addon import MayaAddon +from .addon import ( + MayaAddon, + MAYA_ROOT_DIR, +) __all__ = ( "MayaAddon", + "MAYA_ROOT_DIR", ) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index c9f22fe119..2401cc6aba 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -9,7 +9,6 @@ import maya.api.OpenMaya as om import pyblish.api from openpype.settings import get_project_settings -import openpype.hosts.maya from openpype.host import ( HostBase, IWorkfileHost, @@ -32,7 +31,9 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers +from openpype.hosts.maya import MAYA_ROOT_DIR from openpype.hosts.maya.lib import copy_workspace_mel + from . import menu, lib from .workio import ( open_file, @@ -45,8 +46,7 @@ from .workio import ( log = logging.getLogger("openpype.hosts.maya") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.maya.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(MAYA_ROOT_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") From 6f4f87418eabdf1248dbd4db29cff77ff018b0ab Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:31:28 +0200 Subject: [PATCH 0758/2550] integrate thumbnail does not require 'AVALON_THUMBNAIL_ROOT' to be set if template does not use it --- .../plugins/publish/integrate_thumbnail.py | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index 8ae0dd2d60..445c563d27 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -6,10 +6,9 @@ import copy import six import pyblish.api -from bson.objectid import ObjectId from openpype.client import get_version_by_id -from openpype.pipeline import legacy_io +from openpype.client.operations import OperationsSession, new_thumbnail_doc class IntegrateThumbnails(pyblish.api.InstancePlugin): @@ -24,13 +23,9 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ] def process(self, instance): - - if not os.environ.get("AVALON_THUMBNAIL_ROOT"): - self.log.warning( - "AVALON_THUMBNAIL_ROOT is not set." - " Skipping thumbnail integration." - ) - return + env_key = "AVALON_THUMBNAIL_ROOT" + thumbnail_root_format_key = "{thumbnail_root}" + thumbnail_root = os.environ.get(env_key) or "" published_repres = instance.data.get("published_representations") if not published_repres: @@ -51,6 +46,16 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ).format(project_name)) return + thumbnail_template = anatomy.templates["publish"]["thumbnail"] + if ( + not thumbnail_root + and thumbnail_root_format_key in thumbnail_template + ): + self.log.warning(( + "{} is not set. Skipping thumbnail integration." + ).format(env_key)) + return + thumb_repre = None thumb_repre_anatomy_data = None for repre_info in published_repres.values(): From 503d64ec11be7c9af86992df3ffbe6a14534d97f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:31:57 +0200 Subject: [PATCH 0759/2550] thumbnail resolver does not need to have 'AVALON_THUMBNAIL_ROOT' set if thumbnail template does not need it --- openpype/pipeline/thumbnail.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index eb383b16d9..5530d29614 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -73,19 +73,20 @@ class ThumbnailResolver(object): class TemplateResolver(ThumbnailResolver): - priority = 90 def process(self, thumbnail_entity, thumbnail_type): - - if not os.environ.get("AVALON_THUMBNAIL_ROOT"): - return - template = thumbnail_entity["data"].get("template") if not template: self.log.debug("Thumbnail entity does not have set template") return + thumbnail_root_format_key = "{thumbnail_root}" + thumbnail_root = os.environ.get("AVALON_THUMBNAIL_ROOT") or "" + # Check if template require thumbnail root and if is avaiable + if thumbnail_root_format_key in template and not thumbnail_root: + return + project_name = self.dbcon.active_project() project = get_project(project_name, fields=["name", "data.code"]) @@ -95,7 +96,7 @@ class TemplateResolver(ThumbnailResolver): template_data.update({ "_id": str(thumbnail_entity["_id"]), "thumbnail_type": thumbnail_type, - "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), + "thumbnail_root": thumbnail_root, "project": { "name": project["name"], "code": project["data"].get("code") From 88d914811647427e52d86b5b99a0eb1afd8f1b6c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:32:12 +0200 Subject: [PATCH 0760/2550] added creation of new thumbnail document into operations --- openpype/client/operations.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index c0716ee109..9daaa3e116 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -24,6 +24,7 @@ CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" CURRENT_VERSION_SCHEMA = "openpype:version-3.0" CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" +CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0" def _create_or_convert_to_mongo_id(mongo_id): @@ -195,6 +196,29 @@ def new_representation_doc( } +def new_thumbnail_doc(data=None, entity_id=None): + """Create skeleton data of thumbnail document. + + Args: + data (Dict[str, Any]): Thumbnail document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of thumbnail document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "thumbnail", + "schema": CURRENT_THUMBNAIL_SCHEMA, + "data": data + } + + def new_workfile_info_doc( filename, asset_id, task_name, files, data=None, entity_id=None ): From 46553deec9cdb85937b298ba7aa6e1482b5aa673 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:32:23 +0200 Subject: [PATCH 0761/2550] use perations in integrate thumbnail --- .../plugins/publish/integrate_thumbnail.py | 55 ++++++++++--------- 1 file changed, 29 insertions(+), 26 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index 445c563d27..d86cec10ad 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -71,10 +71,6 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - legacy_io.install() - - thumbnail_template = anatomy.templates["publish"]["thumbnail"] - version = get_version_by_id(project_name, thumb_repre["parent"]) if not version: raise AssertionError( @@ -93,14 +89,15 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): filename, file_extension = os.path.splitext(src_full_path) # Create id for mongo entity now to fill anatomy template - thumbnail_id = ObjectId() + thumbnail_doc = new_thumbnail_doc() + thumbnail_id = thumbnail_doc["_id"] # Prepare anatomy template fill data template_data = copy.deepcopy(thumb_repre_anatomy_data) template_data.update({ "_id": str(thumbnail_id), - "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), "ext": file_extension[1:], + "thumbnail_root": thumbnail_root, "thumbnail_type": "thumbnail" }) @@ -122,8 +119,8 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): shutil.copy(src_full_path, dst_full_path) # Clean template data from keys that are dynamic - template_data.pop("_id") - template_data.pop("thumbnail_root") + for key in ("_id", "thumbnail_root"): + template_data.pop(key, None) repre_context = template_filled.used_values for key in self.required_context_keys: @@ -132,34 +129,40 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): continue repre_context[key] = template_data[key] - thumbnail_entity = { - "_id": thumbnail_id, - "type": "thumbnail", - "schema": "openpype:thumbnail-1.0", - "data": { - "template": thumbnail_template, - "template_data": repre_context - } + op_session = OperationsSession() + + thumbnail_doc["data"] = { + "template": thumbnail_template, + "template_data": repre_context } - # Create thumbnail entity - legacy_io.insert_one(thumbnail_entity) - self.log.debug( - "Creating entity in database {}".format(str(thumbnail_entity)) + op_session.create_entity( + project_name, thumbnail_doc["type"], thumbnail_doc ) + # Create thumbnail entity + self.log.debug( + "Creating entity in database {}".format(str(thumbnail_doc)) + ) + # Set thumbnail id for version - legacy_io.update_many( - {"_id": version["_id"]}, - {"$set": {"data.thumbnail_id": thumbnail_id}} + op_session.update_entity( + project_name, + version["type"], + version["_id"], + {"data.thumbnail_id": thumbnail_id} ) self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( version["name"], str(version["_id"]) )) asset_entity = instance.data["assetEntity"] - legacy_io.update_many( - {"_id": asset_entity["_id"]}, - {"$set": {"data.thumbnail_id": thumbnail_id}} + op_session.update_entity( + project_name, + asset_entity["type"], + asset_entity["_id"], + {"data.thumbnail_id": thumbnail_id} ) self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format( asset_entity["name"], str(version["_id"]) )) + + op_session.commit() From f56658737a9abc52ede2f971a136d44e749c4771 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:35:50 +0200 Subject: [PATCH 0762/2550] use also anatomy roots --- openpype/pipeline/thumbnail.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index 5530d29614..d95f5e35c9 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -4,6 +4,7 @@ import logging from openpype.client import get_project from . import legacy_io +from .anatomy import Anatomy from .plugin_discover import ( discover, register_plugin, @@ -89,6 +90,7 @@ class TemplateResolver(ThumbnailResolver): project_name = self.dbcon.active_project() project = get_project(project_name, fields=["name", "data.code"]) + anatomy = Anatomy(project_name) template_data = copy.deepcopy( thumbnail_entity["data"].get("template_data") or {} @@ -100,7 +102,8 @@ class TemplateResolver(ThumbnailResolver): "project": { "name": project["name"], "code": project["data"].get("code") - } + }, + "root": anatomy.roots }) try: From f99d9d3d77149b084b75ebf3a5621bf49c4eb9b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 13:36:58 +0200 Subject: [PATCH 0763/2550] use project anatomy if needed --- openpype/pipeline/thumbnail.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index d95f5e35c9..39f3e17893 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -90,7 +90,6 @@ class TemplateResolver(ThumbnailResolver): project_name = self.dbcon.active_project() project = get_project(project_name, fields=["name", "data.code"]) - anatomy = Anatomy(project_name) template_data = copy.deepcopy( thumbnail_entity["data"].get("template_data") or {} @@ -103,8 +102,11 @@ class TemplateResolver(ThumbnailResolver): "name": project["name"], "code": project["data"].get("code") }, - "root": anatomy.roots }) + # Add anatomy roots if is in template + if "{root" in template: + anatomy = Anatomy(project_name) + template_data["root"] = anatomy.roots try: filepath = os.path.normpath(template.format(**template_data)) From 3618e8f856859106714bc9c550af7ac8aac9f8c6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:10:23 +0200 Subject: [PATCH 0764/2550] create formatting function for file sizes 'format_file_size' --- openpype/lib/path_tools.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 4f28be3302..f807917f5b 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -14,6 +14,27 @@ from .profiles_filtering import filter_profiles log = logging.getLogger(__name__) +def format_file_size(file_size, suffix=None): + """Returns formatted string with size in appropriate unit. + + Args: + file_size (int): Size of file in bytes. + suffix (str): Suffix for formatted size. Default is 'B' (as bytes). + + Returns: + str: Formatted size using proper unit and passed suffix (e.g. 7 MiB). + """ + + if suffix is None: + suffix = "B" + + for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]: + if abs(file_size) < 1024.0: + return "%3.1f%s%s" % (file_size, unit, suffix) + file_size /= 1024.0 + return "%.1f%s%s" % (file_size, "Yi", suffix) + + def create_hard_link(src_path, dst_path): """Create hardlink of file. From 6398f021092d2de440218acec0d8a024aa55d75d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:10:48 +0200 Subject: [PATCH 0765/2550] copied function to collect frames 'collect_frames' --- openpype/lib/path_tools.py | 39 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index f807917f5b..45aa54d6cb 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -6,6 +6,8 @@ import logging import six import platform +import clique + from openpype.client import get_project from openpype.settings import get_project_settings @@ -71,6 +73,43 @@ def create_hard_link(src_path, dst_path): ) +def collect_frames(files): + """Returns dict of source path and its frame, if from sequence + + Uses clique as most precise solution, used when anatomy template that + created files is not known. + + Assumption is that frames are separated by '.', negative frames are not + allowed. + + Args: + files(list) or (set with single value): list of source paths + + Returns: + (dict): {'/asset/subset_v001.0001.png': '0001', ....} + """ + + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble( + files, minimum_items=1, patterns=patterns) + + sources_and_frames = {} + if collections: + for collection in collections: + src_head = collection.head + src_tail = collection.tail + + for index in collection.indexes: + src_frame = collection.format("{padding}") % index + src_file_name = "{}{}{}".format( + src_head, src_frame, src_tail) + sources_and_frames[src_file_name] = src_frame + else: + sources_and_frames[remainder.pop()] = None + + return sources_and_frames + + def _rreplace(s, a, b, n=1): """Replace a with b in string s from right side n times.""" return b.join(s.rsplit(a, n)) From c26119cc9f6a9fe4c330842f9dddbf7865a63425 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:13:08 +0200 Subject: [PATCH 0766/2550] use new functions in code --- openpype/lib/__init__.py | 4 ++++ .../publish/submit_aftereffects_deadline.py | 6 ++++-- .../validate_expected_and_rendered_files.py | 2 +- .../action_delete_old_versions.py | 17 +++++++---------- openpype/plugins/load/delete_old_versions.py | 11 ++--------- openpype/plugins/load/delivery.py | 11 +++++++---- 6 files changed, 25 insertions(+), 26 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index adb857a056..17aafc3e8b 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -192,6 +192,8 @@ from .plugin_tools import ( ) from .path_tools import ( + format_file_size, + collect_frames, create_hard_link, version_up, get_version_from_path, @@ -353,6 +355,8 @@ __all__ = [ "set_plugin_attributes_from_settings", "source_hash", + "format_file_size", + "collect_frames", "create_hard_link", "version_up", "get_version_from_path", diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index c55f85c8da..1d68793d53 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -3,8 +3,10 @@ import attr import getpass import pyblish.api -from openpype.lib import env_value_to_bool -from openpype.lib.delivery import collect_frames +from openpype.lib import ( + env_value_to_bool, + collect_frames, +) from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo diff --git a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py index c2426e0d78..f0a3ddd246 100644 --- a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py +++ b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py @@ -3,7 +3,7 @@ import requests import pyblish.api -from openpype.lib.delivery import collect_frames +from openpype.lib import collect_frames from openpype_modules.deadline.abstract_submit_deadline import requests_get diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index 79d04a7854..c543dc8834 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -11,7 +11,11 @@ from openpype.client import ( get_versions, get_representations ) -from openpype.lib import StringTemplate, TemplateUnsolved +from openpype.lib import ( + StringTemplate, + TemplateUnsolved, + format_file_size, +) from openpype.pipeline import AvalonMongoDB, Anatomy from openpype_modules.ftrack.lib import BaseAction, statics_icon @@ -134,13 +138,6 @@ class DeleteOldVersions(BaseAction): "title": self.inteface_title } - def sizeof_fmt(self, num, suffix='B'): - for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: - if abs(num) < 1024.0: - return "%3.1f%s%s" % (num, unit, suffix) - num /= 1024.0 - return "%.1f%s%s" % (num, 'Yi', suffix) - def launch(self, session, entities, event): values = event["data"].get("values") if not values: @@ -359,7 +356,7 @@ class DeleteOldVersions(BaseAction): dir_paths, file_paths_by_dir, delete=False ) - msg = "Total size of files: " + self.sizeof_fmt(size) + msg = "Total size of files: {}".format(format_file_size(size)) self.log.warning(msg) @@ -430,7 +427,7 @@ class DeleteOldVersions(BaseAction): "message": msg } - msg = "Total size of files deleted: " + self.sizeof_fmt(size) + msg = "Total size of files deleted: {}".format(format_file_size(size)) self.log.warning(msg) diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index 6e0b464cc1..ce6f204c64 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -10,7 +10,7 @@ from Qt import QtWidgets, QtCore from openpype.client import get_versions, get_representations from openpype import style from openpype.pipeline import load, AvalonMongoDB, Anatomy -from openpype.lib import StringTemplate +from openpype.lib import StringTemplate, format_file_size from openpype.modules import ModulesManager @@ -38,13 +38,6 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): ) ] - def sizeof_fmt(self, num, suffix='B'): - for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: - if abs(num) < 1024.0: - return "%3.1f%s%s" % (num, unit, suffix) - num /= 1024.0 - return "%.1f%s%s" % (num, 'Yi', suffix) - def delete_whole_dir_paths(self, dir_paths, delete=True): size = 0 @@ -456,7 +449,7 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): size += self.main(project_name, data, remove_publish_folder) print("Progressing {}/{}".format(count + 1, len(contexts))) - msg = "Total size of files: " + self.sizeof_fmt(size) + msg = "Total size of files: {}".format(format_file_size(size)) self.log.info(msg) self.message(msg) diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index f6e1d4f06b..2a9f25e0fb 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -7,15 +7,17 @@ from openpype.client import get_representations from openpype.pipeline import load, Anatomy from openpype import resources, style +from openpype.lib import ( + format_file_size, + collect_frames, +) from openpype.lib.dateutils import get_datetime_data from openpype.lib.delivery import ( - sizeof_fmt, path_from_representation, get_format_dict, check_destination_path, process_single_file, process_sequence, - collect_frames ) @@ -263,8 +265,9 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): def _prepare_label(self): """Provides text with no of selected files and their size.""" - label = "{} files, size {}".format(self.files_selected, - sizeof_fmt(self.size_selected)) + label = "{} files, size {}".format( + self.files_selected, + format_file_size(self.size_selected)) return label def _get_selected_repres(self): From aeb30b3101c31f8965e80cf40287f5e0d4e4dfe9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:13:21 +0200 Subject: [PATCH 0767/2550] marked functions in delivery as deprecated --- openpype/lib/delivery.py | 78 ++++++++++++++++++++++++++++------------ 1 file changed, 55 insertions(+), 23 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index ffcfe9fa4d..5244187354 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -4,6 +4,8 @@ import shutil import glob import clique import collections +import functools +import warnings from .path_templates import ( StringTemplate, @@ -11,6 +13,52 @@ from .path_templates import ( ) +class DeliveryDeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeliveryDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeliveryDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + +@deprecated("openpype.lib.path_tools.collect_frames") def collect_frames(files): """ Returns dict of source path and its frame, if from sequence @@ -26,34 +74,18 @@ def collect_frames(files): Returns: (dict): {'/asset/subset_v001.0001.png': '0001', ....} """ - patterns = [clique.PATTERNS["frames"]] - collections, remainder = clique.assemble(files, minimum_items=1, - patterns=patterns) - sources_and_frames = {} - if collections: - for collection in collections: - src_head = collection.head - src_tail = collection.tail + from .path_tools import collect_frames - for index in collection.indexes: - src_frame = collection.format("{padding}") % index - src_file_name = "{}{}{}".format(src_head, src_frame, - src_tail) - sources_and_frames[src_file_name] = src_frame - else: - sources_and_frames[remainder.pop()] = None - - return sources_and_frames + return collect_frames(files) -def sizeof_fmt(num, suffix='B'): +@deprecated("openpype.lib.path_tools.format_file_size") +def sizeof_fmt(num, suffix=None): """Returns formatted string with size in appropriate unit""" - for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: - if abs(num) < 1024.0: - return "%3.1f%s%s" % (num, unit, suffix) - num /= 1024.0 - return "%.1f%s%s" % (num, 'Yi', suffix) + + from .path_tools import format_file_size + return format_file_size(num, suffix) def path_from_representation(representation, anatomy): From d58ea894159cb1190fb5bcad5cdf4e949adf39f7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:26:23 +0200 Subject: [PATCH 0768/2550] implemented 'get_representation_path_with_anatomy'. --- openpype/pipeline/load/__init__.py | 5 +++ openpype/pipeline/load/utils.py | 62 +++++++++++++++++++++++++++--- 2 files changed, 62 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index b6bdd13d50..4fc8ad1d16 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -1,6 +1,8 @@ from .utils import ( HeroVersionType, + IncompatibleLoaderError, + InvalidRepresentationContext, get_repres_contexts, get_subset_contexts, @@ -20,6 +22,7 @@ from .utils import ( get_representation_path_from_context, get_representation_path, + get_representation_path_with_anatomy, is_compatible_loader, @@ -46,7 +49,9 @@ from .plugins import ( __all__ = ( # utils.py "HeroVersionType", + "IncompatibleLoaderError", + "InvalidRepresentationContext", "get_repres_contexts", "get_subset_contexts", diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 99d6876d4b..d4a5c2be5a 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -23,10 +23,16 @@ from openpype.client import ( get_representation_by_name, get_representation_parents ) +from openpype.lib import ( + StringTemplate, + TemplateUnsolved, +) from openpype.pipeline import ( schema, legacy_io, Anatomy, + registered_root, + registered_host, ) log = logging.getLogger(__name__) @@ -61,6 +67,11 @@ class IncompatibleLoaderError(ValueError): pass +class InvalidRepresentationContext(ValueError): + """Representation path can't be received using representation document.""" + pass + + def get_repres_contexts(representation_ids, dbcon=None): """Return parenthood context for representation. @@ -515,6 +526,52 @@ def get_representation_path_from_context(context): return get_representation_path(representation, root) +def get_representation_path_with_anatomy(repre_doc, anatomy): + """Receive representation path using representation document and anatomy. + + Anatomy is used to replace 'root' key in representation file. Ideally + should be used instead of 'get_representation_path' which is based on + "current context". + + Future notes: + We want also be able store resources into representation and I can + imagine the result should also contain paths to possible resources. + + Args: + repre_doc (Dict[str, Any]): Representation document. + anatomy (Anatomy): Project anatomy object. + + Returns: + Union[None, TemplateResult]: None if path can't be received + + Raises: + InvalidRepresentationContext: When representation data are probably + invalid or not available. + """ + + try: + template = repre_doc["data"]["template"] + + except KeyError: + raise InvalidRepresentationContext(( + "Representation document does not" + " contain template in data ('data.template')" + )) + + try: + context = repre_doc["context"] + context["root"] = anatomy.roots + path = StringTemplate.format_strict_template(template, context) + + except TemplateUnsolved as exc: + raise InvalidRepresentationContext(( + "Couldn't resolve representation template with available data." + " Reason: {}".format(str(exc)) + )) + + return path.normalized() + + def get_representation_path(representation, root=None, dbcon=None): """Get filename from representation document @@ -533,14 +590,10 @@ def get_representation_path(representation, root=None, dbcon=None): """ - from openpype.lib import StringTemplate, TemplateUnsolved - if dbcon is None: dbcon = legacy_io if root is None: - from openpype.pipeline import registered_root - root = registered_root() def path_from_represenation(): @@ -736,7 +789,6 @@ def get_outdated_containers(host=None, project_name=None): """ if host is None: - from openpype.pipeline import registered_host host = registered_host() if project_name is None: From 315cf40d8baa47bb3a4f6864e49551bdfc6d196b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:29:43 +0200 Subject: [PATCH 0769/2550] fixed import in load utils --- openpype/pipeline/load/utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index d4a5c2be5a..83b904e4a7 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -31,8 +31,6 @@ from openpype.pipeline import ( schema, legacy_io, Anatomy, - registered_root, - registered_host, ) log = logging.getLogger(__name__) @@ -594,6 +592,8 @@ def get_representation_path(representation, root=None, dbcon=None): dbcon = legacy_io if root is None: + from openpype.pipeline import registered_root + root = registered_root() def path_from_represenation(): @@ -789,6 +789,8 @@ def get_outdated_containers(host=None, project_name=None): """ if host is None: + from openpype.pipeline import registered_host + host = registered_host() if project_name is None: From f2a191861b9264383da3c0b63ed8f4feac629a1c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:41:28 +0200 Subject: [PATCH 0770/2550] marked 'path_from_representation' as deprecated and replaced it's usage with 'get_representation_path_with_anatomy' --- openpype/lib/delivery.py | 29 +++++-------- .../event_handlers_user/action_delivery.py | 6 +-- openpype/pipeline/load/__init__.py | 1 + openpype/plugins/load/delete_old_versions.py | 41 +++++++++++-------- openpype/plugins/load/delivery.py | 8 ++-- 5 files changed, 43 insertions(+), 42 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 5244187354..ea757932c9 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -7,11 +7,6 @@ import collections import functools import warnings -from .path_templates import ( - StringTemplate, - TemplateUnsolved, -) - class DeliveryDeprecatedWarning(DeprecationWarning): pass @@ -88,24 +83,22 @@ def sizeof_fmt(num, suffix=None): return format_file_size(num, suffix) +@deprecated("openpype.pipeline.load.get_representation_path_with_anatomy") def path_from_representation(representation, anatomy): - try: - template = representation["data"]["template"] + """Get representation path using representation document and anatomy. - except KeyError: - return None + Args: + representation (Dict[str, Any]): Representation document. + anatomy (Anatomy): Project anatomy. - try: - context = representation["context"] - context["root"] = anatomy.roots - path = StringTemplate.format_strict_template(template, context) - return os.path.normpath(path) + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. + """ - except TemplateUnsolved: - # Template references unavailable data - return None + from openpype.pipeline.load import get_representation_path_with_anatomy - return path + return get_representation_path_with_anatomy(representation, anatomy) def copy_file(src_path, dst_path): diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index eec245070c..59a34b3f85 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -10,15 +10,15 @@ from openpype.client import ( get_versions, get_representations ) -from openpype.pipeline import Anatomy from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from openpype_modules.ftrack.lib.custom_attributes import ( query_custom_attributes ) from openpype.lib.dateutils import get_datetime_data +from openpype.pipeline import Anatomy +from openpype.pipeline.load import get_representation_path_with_anatomy from openpype.lib.delivery import ( - path_from_representation, get_format_dict, check_destination_path, process_single_file, @@ -580,7 +580,7 @@ class Delivery(BaseAction): if frame: repre["context"]["frame"] = len(str(frame)) * "#" - repre_path = path_from_representation(repre, anatomy) + repre_path = get_representation_path_with_anatomy(repre, anatomy) # TODO add backup solution where root of path from component # is replaced with root args = ( diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index 4fc8ad1d16..bf38a0b3c8 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -71,6 +71,7 @@ __all__ = ( "get_representation_path_from_context", "get_representation_path", + "get_representation_path_with_anatomy", "is_compatible_loader", diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index ce6f204c64..8c8546d9c8 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -7,11 +7,15 @@ from pymongo import UpdateOne import qargparse from Qt import QtWidgets, QtCore -from openpype.client import get_versions, get_representations from openpype import style -from openpype.pipeline import load, AvalonMongoDB, Anatomy -from openpype.lib import StringTemplate, format_file_size +from openpype.client import get_versions, get_representations from openpype.modules import ModulesManager +from openpype.lib import StringTemplate, format_file_size +from openpype.pipeline import load, AvalonMongoDB, Anatomy +from openpype.pipeline.load import ( + get_representation_path_with_anatomy, + InvalidRepresentationContext, +) class DeleteOldVersions(load.SubsetLoaderPlugin): @@ -73,27 +77,28 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): def path_from_representation(self, representation, anatomy): try: - template = representation["data"]["template"] - + context = representation["context"] except KeyError: return (None, None) + try: + path = get_representation_path_with_anatomy( + representation, anatomy + ) + except InvalidRepresentationContext: + return (None, None) + sequence_path = None - try: - context = representation["context"] - context["root"] = anatomy.roots - path = str(StringTemplate.format_template(template, context)) - if "frame" in context: - context["frame"] = self.sequence_splitter - sequence_path = os.path.normpath(str( - StringTemplate.format_template(template, context) - )) + if "frame" in context: + context["frame"] = self.sequence_splitter + sequence_path = get_representation_path_with_anatomy( + representation, anatomy + ) - except KeyError: - # Template references unavailable data - return (None, None) + if sequence_path: + sequence_path = sequence_path.normalized() - return (os.path.normpath(path), sequence_path) + return (path.normalized(), sequence_path) def delete_only_repre_files(self, dir_paths, file_paths, delete=True): size = 0 diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 2a9f25e0fb..4651efd4a3 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -10,10 +10,10 @@ from openpype import resources, style from openpype.lib import ( format_file_size, collect_frames, + get_datetime_data, ) -from openpype.lib.dateutils import get_datetime_data +from openpype.pipeline.load import get_representation_path_with_anatomy from openpype.lib.delivery import ( - path_from_representation, get_format_dict, check_destination_path, process_single_file, @@ -169,7 +169,9 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): if repre["name"] not in selected_repres: continue - repre_path = path_from_representation(repre, self.anatomy) + repre_path = get_representation_path_with_anatomy( + repre, self.anatomy + ) anatomy_data = copy.deepcopy(repre["context"]) new_report_items = check_destination_path(str(repre["_id"]), From ea241ca807837896ac1f8299ec0c6c6bbb1020ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:41:44 +0200 Subject: [PATCH 0771/2550] added some docstrings to deprecated functions --- openpype/lib/delivery.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index ea757932c9..e09188d3bb 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -55,19 +55,23 @@ def deprecated(new_destination): @deprecated("openpype.lib.path_tools.collect_frames") def collect_frames(files): - """ - Returns dict of source path and its frame, if from sequence + """Returns dict of source path and its frame, if from sequence - Uses clique as most precise solution, used when anatomy template that - created files is not known. + Uses clique as most precise solution, used when anatomy template that + created files is not known. - Assumption is that frames are separated by '.', negative frames are not - allowed. + Assumption is that frames are separated by '.', negative frames are not + allowed. - Args: - files(list) or (set with single value): list of source paths - Returns: - (dict): {'/asset/subset_v001.0001.png': '0001', ....} + Args: + files(list) or (set with single value): list of source paths + + Returns: + (dict): {'/asset/subset_v001.0001.png': '0001', ....} + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ from .path_tools import collect_frames @@ -77,7 +81,12 @@ def collect_frames(files): @deprecated("openpype.lib.path_tools.format_file_size") def sizeof_fmt(num, suffix=None): - """Returns formatted string with size in appropriate unit""" + """Returns formatted string with size in appropriate unit + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. + """ from .path_tools import format_file_size return format_file_size(num, suffix) From 14dc209ab0a42d799cfa37eebd08d090666b537f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 14:53:52 +0200 Subject: [PATCH 0772/2550] 'get_project_template_data' can access project name --- openpype/pipeline/template_data.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/template_data.py b/openpype/pipeline/template_data.py index 824a25127c..bab46a627d 100644 --- a/openpype/pipeline/template_data.py +++ b/openpype/pipeline/template_data.py @@ -28,27 +28,37 @@ def get_general_template_data(system_settings=None): } -def get_project_template_data(project_doc): +def get_project_template_data(project_doc=None, project_name=None): """Extract data from project document that are used in templates. Project document must have 'name' and (at this moment) optional key 'data.code'. + One of 'project_name' or 'project_doc' must be passed. With prepared + project document is function much faster because don't have to query. + Output contains formatting keys: - 'project[name]' - Project name - 'project[code]' - Project code Args: project_doc (Dict[str, Any]): Queried project document. + project_name (str): Name of project. Returns: Dict[str, Dict[str, str]]: Template data based on project document. """ + if not project_name: + project_name = project_doc["name"] + + if not project_doc: + project_code = get_project(project_name, fields=["data.code"]) + project_code = project_doc.get("data", {}).get("code") return { "project": { - "name": project_doc["name"], + "name": project_name, "code": project_code } } From e2060b9d65e4b7224fff916b4f492de0d015e9bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 15:00:08 +0200 Subject: [PATCH 0773/2550] marked 'get_format_dict' as deprecated and moved it to pipeline delivery --- openpype/lib/delivery.py | 33 +++++++++---------- .../event_handlers_user/action_delivery.py | 4 ++- openpype/pipeline/delivery.py | 26 +++++++++++++++ openpype/plugins/load/delivery.py | 4 ++- 4 files changed, 47 insertions(+), 20 deletions(-) create mode 100644 openpype/pipeline/delivery.py diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index e09188d3bb..1e364c45d7 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -125,28 +125,25 @@ def copy_file(src_path, dst_path): shutil.copyfile(src_path, dst_path) +@deprecated("openpype.pipeline.delivery.get_format_dict") def get_format_dict(anatomy, location_path): """Returns replaced root values from user provider value. - Args: - anatomy (Anatomy) - location_path (str): user provided value - Returns: - (dict): prepared for formatting of a template + Args: + anatomy (Anatomy) + location_path (str): user provided value + + Returns: + (dict): prepared for formatting of a template + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - format_dict = {} - if location_path: - location_path = location_path.replace("\\", "/") - root_names = anatomy.root_names_from_templates( - anatomy.templates["delivery"] - ) - if root_names is None: - format_dict["root"] = location_path - else: - format_dict["root"] = {} - for name in root_names: - format_dict["root"][name] = location_path - return format_dict + + from openpype.pipeline.delivery import get_format_dict + + return get_format_dict(anatomy, location_path) def check_destination_path(repre_id, diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index 59a34b3f85..08d6e53078 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -18,8 +18,10 @@ from openpype_modules.ftrack.lib.custom_attributes import ( from openpype.lib.dateutils import get_datetime_data from openpype.pipeline import Anatomy from openpype.pipeline.load import get_representation_path_with_anatomy -from openpype.lib.delivery import ( +from openpype.pipeline.delivery import ( get_format_dict, +) +from openpype.lib.delivery import ( check_destination_path, process_single_file, process_sequence diff --git a/openpype/pipeline/delivery.py b/openpype/pipeline/delivery.py new file mode 100644 index 0000000000..03319f7ddc --- /dev/null +++ b/openpype/pipeline/delivery.py @@ -0,0 +1,26 @@ +"""Functions useful for delivery of published representations.""" + + +def get_format_dict(anatomy, location_path): + """Returns replaced root values from user provider value. + + Args: + anatomy (Anatomy): Project anatomy. + location_path (str): User provided value. + + Returns: + (dict): Prepared data for formatting of a template. + """ + + format_dict = {} + if not location_path: + return format_dict + + location_path = location_path.replace("\\", "/") + root_names = anatomy.root_names_from_templates( + anatomy.templates["delivery"] + ) + format_dict["root"] = {} + for name in root_names: + format_dict["root"][name] = location_path + return format_dict diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 4651efd4a3..0ea62510a4 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -13,8 +13,10 @@ from openpype.lib import ( get_datetime_data, ) from openpype.pipeline.load import get_representation_path_with_anatomy -from openpype.lib.delivery import ( +from openpype.pipeline.delivery import ( get_format_dict, +) +from openpype.lib.delivery import ( check_destination_path, process_single_file, process_sequence, From fe566f4a4b1f5695e73731927601a814a330d8ae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 15:00:57 +0200 Subject: [PATCH 0774/2550] copied 'copy_file' to 'pipeline.delivery' and renamed to '_copy_file' --- openpype/pipeline/delivery.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/openpype/pipeline/delivery.py b/openpype/pipeline/delivery.py index 03319f7ddc..5906892d59 100644 --- a/openpype/pipeline/delivery.py +++ b/openpype/pipeline/delivery.py @@ -1,4 +1,26 @@ """Functions useful for delivery of published representations.""" +import os +import shutil + +from openpype.lib import create_hard_link + + +def _copy_file(src_path, dst_path): + """Hardlink file if possible(to save space), copy if not. + + Because of using hardlinks should not be function used in other parts + of pipeline. + """ + + if os.path.exists(dst_path): + return + try: + create_hard_link( + src_path, + dst_path + ) + except OSError: + shutil.copyfile(src_path, dst_path) def get_format_dict(anatomy, location_path): From dc77d4a60908729f3b7ce343216b9af9852f1912 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 15:04:03 +0200 Subject: [PATCH 0775/2550] marked 'check_destination_path' as deprecated and moved to pipeline.delivery --- openpype/lib/delivery.py | 65 +++++++------------ .../event_handlers_user/action_delivery.py | 2 +- openpype/pipeline/delivery.py | 61 +++++++++++++++++ openpype/plugins/load/delivery.py | 2 +- 4 files changed, 86 insertions(+), 44 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 1e364c45d7..543c3d12e5 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -3,7 +3,6 @@ import os import shutil import glob import clique -import collections import functools import warnings @@ -146,56 +145,38 @@ def get_format_dict(anatomy, location_path): return get_format_dict(anatomy, location_path) +@deprecated("openpype.pipeline.delivery.check_destination_path") def check_destination_path(repre_id, anatomy, anatomy_data, datetime_data, template_name): """ Try to create destination path based on 'template_name'. - In the case that path cannot be filled, template contains unmatched - keys, provide error message to filter out repre later. + In the case that path cannot be filled, template contains unmatched + keys, provide error message to filter out repre later. - Args: - anatomy (Anatomy) - anatomy_data (dict): context to fill anatomy - datetime_data (dict): values with actual date - template_name (str): to pick correct delivery template - Returns: - (collections.defauldict): {"TYPE_OF_ERROR":"ERROR_DETAIL"} + Args: + anatomy (Anatomy) + anatomy_data (dict): context to fill anatomy + datetime_data (dict): values with actual date + template_name (str): to pick correct delivery template + + Returns: + (collections.defauldict): {"TYPE_OF_ERROR":"ERROR_DETAIL"} + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - anatomy_data.update(datetime_data) - anatomy_filled = anatomy.format_all(anatomy_data) - dest_path = anatomy_filled["delivery"][template_name] - report_items = collections.defaultdict(list) - if not dest_path.solved: - msg = ( - "Missing keys in Representation's context" - " for anatomy template \"{}\"." - ).format(template_name) + from openpype.pipeline.delivery import check_destination_path - sub_msg = ( - "Representation: {}
    " - ).format(repre_id) - - if dest_path.missing_keys: - keys = ", ".join(dest_path.missing_keys) - sub_msg += ( - "- Missing keys: \"{}\"
    " - ).format(keys) - - if dest_path.invalid_types: - items = [] - for key, value in dest_path.invalid_types.items(): - items.append("\"{}\" {}".format(key, str(value))) - - keys = ", ".join(items) - sub_msg += ( - "- Invalid value DataType: \"{}\"
    " - ).format(keys) - - report_items[msg].append(sub_msg) - - return report_items + return check_destination_path( + repre_id, + anatomy, + anatomy_data, + datetime_data, + template_name + ) def process_single_file( diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index 08d6e53078..8b314d8f1d 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -20,9 +20,9 @@ from openpype.pipeline import Anatomy from openpype.pipeline.load import get_representation_path_with_anatomy from openpype.pipeline.delivery import ( get_format_dict, + check_destination_path, ) from openpype.lib.delivery import ( - check_destination_path, process_single_file, process_sequence ) diff --git a/openpype/pipeline/delivery.py b/openpype/pipeline/delivery.py index 5906892d59..79667161a6 100644 --- a/openpype/pipeline/delivery.py +++ b/openpype/pipeline/delivery.py @@ -1,6 +1,7 @@ """Functions useful for delivery of published representations.""" import os import shutil +import collections from openpype.lib import create_hard_link @@ -46,3 +47,63 @@ def get_format_dict(anatomy, location_path): for name in root_names: format_dict["root"][name] = location_path return format_dict + + +def check_destination_path( + repre_id, + anatomy, + anatomy_data, + datetime_data, + template_name +): + """ Try to create destination path based on 'template_name'. + + In the case that path cannot be filled, template contains unmatched + keys, provide error message to filter out repre later. + + Args: + repre_id (str): Representation id. + anatomy (Anatomy): Project anatomy. + anatomy_data (dict): Template data to fill anatomy templates. + datetime_data (dict): Values with actual date. + template_name (str): Name of template which should be used from anatomy + templates. + Returns: + Dict[str, List[str]]: Report of happened errors. Key is message title + value is detailed information. + """ + + anatomy_data.update(datetime_data) + anatomy_filled = anatomy.format_all(anatomy_data) + dest_path = anatomy_filled["delivery"][template_name] + report_items = collections.defaultdict(list) + + if not dest_path.solved: + msg = ( + "Missing keys in Representation's context" + " for anatomy template \"{}\"." + ).format(template_name) + + sub_msg = ( + "Representation: {}
    " + ).format(repre_id) + + if dest_path.missing_keys: + keys = ", ".join(dest_path.missing_keys) + sub_msg += ( + "- Missing keys: \"{}\"
    " + ).format(keys) + + if dest_path.invalid_types: + items = [] + for key, value in dest_path.invalid_types.items(): + items.append("\"{}\" {}".format(key, str(value))) + + keys = ", ".join(items) + sub_msg += ( + "- Invalid value DataType: \"{}\"
    " + ).format(keys) + + report_items[msg].append(sub_msg) + + return report_items diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 0ea62510a4..1161636cb7 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -15,9 +15,9 @@ from openpype.lib import ( from openpype.pipeline.load import get_representation_path_with_anatomy from openpype.pipeline.delivery import ( get_format_dict, + check_destination_path, ) from openpype.lib.delivery import ( - check_destination_path, process_single_file, process_sequence, ) From eaff50b23e29dcb142557e3068c3031e1f1e268a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 15:07:45 +0200 Subject: [PATCH 0776/2550] Marked 'process_single_file' as deprecated and moved to pipeline delivery as 'deliver_single_file' --- openpype/lib/delivery.py | 61 +++++++------------ .../event_handlers_user/action_delivery.py | 4 +- openpype/pipeline/delivery.py | 59 ++++++++++++++++++ openpype/plugins/load/delivery.py | 6 +- 4 files changed, 87 insertions(+), 43 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 543c3d12e5..455401d0fd 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -179,53 +179,38 @@ def check_destination_path(repre_id, ) +@deprecated("openpype.pipeline.delivery.deliver_single_file") def process_single_file( src_path, repre, anatomy, template_name, anatomy_data, format_dict, report_items, log ): """Copy single file to calculated path based on template - Args: - src_path(str): path of source representation file - _repre (dict): full repre, used only in process_sequence, here only - as to share same signature - anatomy (Anatomy) - template_name (string): user selected delivery template name - anatomy_data (dict): data from repre to fill anatomy with - format_dict (dict): root dictionary with names and values - report_items (collections.defaultdict): to return error messages - log (Logger): for log printing - Returns: - (collections.defaultdict , int) + Args: + src_path(str): path of source representation file + _repre (dict): full repre, used only in process_sequence, here only + as to share same signature + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (Logger): for log printing + + Returns: + (collections.defaultdict , int) + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - # Make sure path is valid for all platforms - src_path = os.path.normpath(src_path.replace("\\", "/")) - if not os.path.exists(src_path): - msg = "{} doesn't exist for {}".format(src_path, repre["_id"]) - report_items["Source file was not found"].append(msg) - return report_items, 0 + from openpype.pipeline.delivery import deliver_single_file - anatomy_filled = anatomy.format(anatomy_data) - if format_dict: - template_result = anatomy_filled["delivery"][template_name] - delivery_path = template_result.rootless.format(**format_dict) - else: - delivery_path = anatomy_filled["delivery"][template_name] - - # Backwards compatibility when extension contained `.` - delivery_path = delivery_path.replace("..", ".") - # Make sure path is valid for all platforms - delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) - - delivery_folder = os.path.dirname(delivery_path) - if not os.path.exists(delivery_folder): - os.makedirs(delivery_folder) - - log.debug("Copying single: {} -> {}".format(src_path, delivery_path)) - copy_file(src_path, delivery_path) - - return report_items, 1 + return deliver_single_file( + src_path, repre, anatomy, template_name, anatomy_data, format_dict, + report_items, log + ) def process_sequence( diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index 8b314d8f1d..fe91670c3d 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -21,9 +21,9 @@ from openpype.pipeline.load import get_representation_path_with_anatomy from openpype.pipeline.delivery import ( get_format_dict, check_destination_path, + deliver_single_file, ) from openpype.lib.delivery import ( - process_single_file, process_sequence ) @@ -596,7 +596,7 @@ class Delivery(BaseAction): self.log ) if not frame: - process_single_file(*args) + deliver_single_file(*args) else: process_sequence(*args) diff --git a/openpype/pipeline/delivery.py b/openpype/pipeline/delivery.py index 79667161a6..7c5121aa53 100644 --- a/openpype/pipeline/delivery.py +++ b/openpype/pipeline/delivery.py @@ -1,6 +1,8 @@ """Functions useful for delivery of published representations.""" import os import shutil +import glob +import clique import collections from openpype.lib import create_hard_link @@ -107,3 +109,60 @@ def check_destination_path( report_items[msg].append(sub_msg) return report_items + + +def deliver_single_file( + src_path, + repre, + anatomy, + template_name, + anatomy_data, + format_dict, + report_items, + log +): + """Copy single file to calculated path based on template + + Args: + src_path(str): path of source representation file + repre (dict): full repre, used only in process_sequence, here only + as to share same signature + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (logging.Logger): for log printing + + Returns: + (collections.defaultdict, int) + """ + + # Make sure path is valid for all platforms + src_path = os.path.normpath(src_path.replace("\\", "/")) + + if not os.path.exists(src_path): + msg = "{} doesn't exist for {}".format(src_path, repre["_id"]) + report_items["Source file was not found"].append(msg) + return report_items, 0 + + anatomy_filled = anatomy.format(anatomy_data) + if format_dict: + template_result = anatomy_filled["delivery"][template_name] + delivery_path = template_result.rootless.format(**format_dict) + else: + delivery_path = anatomy_filled["delivery"][template_name] + + # Backwards compatibility when extension contained `.` + delivery_path = delivery_path.replace("..", ".") + # Make sure path is valid for all platforms + delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) + + delivery_folder = os.path.dirname(delivery_path) + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + log.debug("Copying single: {} -> {}".format(src_path, delivery_path)) + _copy_file(src_path, delivery_path) + + return report_items, 1 diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 1161636cb7..a028ac0a87 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -16,9 +16,9 @@ from openpype.pipeline.load import get_representation_path_with_anatomy from openpype.pipeline.delivery import ( get_format_dict, check_destination_path, + deliver_single_file, ) from openpype.lib.delivery import ( - process_single_file, process_sequence, ) @@ -208,7 +208,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): args[0] = src_path if frame: anatomy_data["frame"] = frame - new_report_items, uploaded = process_single_file(*args) + new_report_items, uploaded = deliver_single_file(*args) report_items.update(new_report_items) self._update_progress(uploaded) else: # fallback for Pype2 and representations without files @@ -217,7 +217,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): repre["context"]["frame"] = len(str(frame)) * "#" if not frame: - new_report_items, uploaded = process_single_file(*args) + new_report_items, uploaded = deliver_single_file(*args) else: new_report_items, uploaded = process_sequence(*args) report_items.update(new_report_items) From d3a7637d1561a411a40a7b74494d692e292f5c4b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 15:10:44 +0200 Subject: [PATCH 0777/2550] Marked 'process_sequence' as deprecated and moved to pipeline delivery as 'deliver_sequence' --- openpype/lib/delivery.py | 144 +++--------------- .../event_handlers_user/action_delivery.py | 6 +- openpype/pipeline/delivery.py | 144 +++++++++++++++++- openpype/plugins/load/delivery.py | 6 +- 4 files changed, 172 insertions(+), 128 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 455401d0fd..d44a4edb3f 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -213,6 +213,7 @@ def process_single_file( ) +@deprecated("openpype.pipeline.delivery.deliver_sequence") def process_sequence( src_path, repre, anatomy, template_name, anatomy_data, format_dict, report_items, log @@ -220,128 +221,33 @@ def process_sequence( """ For Pype2(mainly - works in 3 too) where representation might not contain files. - Uses listing physical files (not 'files' on repre as a)might not be - present, b)might not be reliable for representation and copying them. + Uses listing physical files (not 'files' on repre as a)might not be + present, b)might not be reliable for representation and copying them. - TODO Should be refactored when files are sufficient to drive all - representations. + TODO Should be refactored when files are sufficient to drive all + representations. - Args: - src_path(str): path of source representation file - repre (dict): full representation - anatomy (Anatomy) - template_name (string): user selected delivery template name - anatomy_data (dict): data from repre to fill anatomy with - format_dict (dict): root dictionary with names and values - report_items (collections.defaultdict): to return error messages - log (Logger): for log printing - Returns: - (collections.defaultdict , int) + Args: + src_path(str): path of source representation file + repre (dict): full representation + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (Logger): for log printing + + Returns: + (collections.defaultdict , int) + + Deprecated: + Function was moved to different location and will be removed + after 3.16.* release. """ - src_path = os.path.normpath(src_path.replace("\\", "/")) - def hash_path_exist(myPath): - res = myPath.replace('#', '*') - glob_search_results = glob.glob(res) - if len(glob_search_results) > 0: - return True - return False + from openpype.pipeline.delivery import deliver_sequence - if not hash_path_exist(src_path): - msg = "{} doesn't exist for {}".format(src_path, - repre["_id"]) - report_items["Source file was not found"].append(msg) - return report_items, 0 - - delivery_templates = anatomy.templates.get("delivery") or {} - delivery_template = delivery_templates.get(template_name) - if delivery_template is None: - msg = ( - "Delivery template \"{}\" in anatomy of project \"{}\"" - " was not found" - ).format(template_name, anatomy.project_name) - report_items[""].append(msg) - return report_items, 0 - - # Check if 'frame' key is available in template which is required - # for sequence delivery - if "{frame" not in delivery_template: - msg = ( - "Delivery template \"{}\" in anatomy of project \"{}\"" - "does not contain '{{frame}}' key to fill. Delivery of sequence" - " can't be processed." - ).format(template_name, anatomy.project_name) - report_items[""].append(msg) - return report_items, 0 - - dir_path, file_name = os.path.split(str(src_path)) - - context = repre["context"] - ext = context.get("ext", context.get("representation")) - - if not ext: - msg = "Source extension not found, cannot find collection" - report_items[msg].append(src_path) - log.warning("{} <{}>".format(msg, context)) - return report_items, 0 - - ext = "." + ext - # context.representation could be .psd - ext = ext.replace("..", ".") - - src_collections, remainder = clique.assemble(os.listdir(dir_path)) - src_collection = None - for col in src_collections: - if col.tail != ext: - continue - - src_collection = col - break - - if src_collection is None: - msg = "Source collection of files was not found" - report_items[msg].append(src_path) - log.warning("{} <{}>".format(msg, src_path)) - return report_items, 0 - - frame_indicator = "@####@" - - anatomy_data["frame"] = frame_indicator - anatomy_filled = anatomy.format(anatomy_data) - - if format_dict: - template_result = anatomy_filled["delivery"][template_name] - delivery_path = template_result.rootless.format(**format_dict) - else: - delivery_path = anatomy_filled["delivery"][template_name] - - delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) - delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split(frame_indicator) - dst_padding = src_collection.padding - dst_collection = clique.Collection( - head=dst_head, - tail=dst_tail, - padding=dst_padding + return deliver_sequence( + src_path, repre, anatomy, template_name, anatomy_data, format_dict, + report_items, log ) - - if not os.path.exists(delivery_folder): - os.makedirs(delivery_folder) - - src_head = src_collection.head - src_tail = src_collection.tail - uploaded = 0 - for index in src_collection.indexes: - src_padding = src_collection.format("{padding}") % index - src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) - src = os.path.normpath( - os.path.join(dir_path, src_file_name) - ) - - dst_padding = dst_collection.format("{padding}") % index - dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) - log.debug("Copying single: {} -> {}".format(src, dst)) - copy_file(src, dst) - uploaded += 1 - - return report_items, uploaded diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index fe91670c3d..a400c8f5f0 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -22,9 +22,7 @@ from openpype.pipeline.delivery import ( get_format_dict, check_destination_path, deliver_single_file, -) -from openpype.lib.delivery import ( - process_sequence + deliver_sequence, ) @@ -598,7 +596,7 @@ class Delivery(BaseAction): if not frame: deliver_single_file(*args) else: - process_sequence(*args) + deliver_sequence(*args) return self.report(report_items) diff --git a/openpype/pipeline/delivery.py b/openpype/pipeline/delivery.py index 7c5121aa53..8cf9a43aac 100644 --- a/openpype/pipeline/delivery.py +++ b/openpype/pipeline/delivery.py @@ -125,7 +125,7 @@ def deliver_single_file( Args: src_path(str): path of source representation file - repre (dict): full repre, used only in process_sequence, here only + repre (dict): full repre, used only in deliver_sequence, here only as to share same signature anatomy (Anatomy) template_name (string): user selected delivery template name @@ -166,3 +166,145 @@ def deliver_single_file( _copy_file(src_path, delivery_path) return report_items, 1 + + +def deliver_sequence( + src_path, + repre, + anatomy, + template_name, + anatomy_data, + format_dict, + report_items, + log +): + """ For Pype2(mainly - works in 3 too) where representation might not + contain files. + + Uses listing physical files (not 'files' on repre as a)might not be + present, b)might not be reliable for representation and copying them. + + TODO Should be refactored when files are sufficient to drive all + representations. + + Args: + src_path(str): path of source representation file + repre (dict): full representation + anatomy (Anatomy) + template_name (string): user selected delivery template name + anatomy_data (dict): data from repre to fill anatomy with + format_dict (dict): root dictionary with names and values + report_items (collections.defaultdict): to return error messages + log (logging.Logger): for log printing + + Returns: + (collections.defaultdict, int) + """ + + src_path = os.path.normpath(src_path.replace("\\", "/")) + + def hash_path_exist(myPath): + res = myPath.replace('#', '*') + glob_search_results = glob.glob(res) + if len(glob_search_results) > 0: + return True + return False + + if not hash_path_exist(src_path): + msg = "{} doesn't exist for {}".format( + src_path, repre["_id"]) + report_items["Source file was not found"].append(msg) + return report_items, 0 + + delivery_templates = anatomy.templates.get("delivery") or {} + delivery_template = delivery_templates.get(template_name) + if delivery_template is None: + msg = ( + "Delivery template \"{}\" in anatomy of project \"{}\"" + " was not found" + ).format(template_name, anatomy.project_name) + report_items[""].append(msg) + return report_items, 0 + + # Check if 'frame' key is available in template which is required + # for sequence delivery + if "{frame" not in delivery_template: + msg = ( + "Delivery template \"{}\" in anatomy of project \"{}\"" + "does not contain '{{frame}}' key to fill. Delivery of sequence" + " can't be processed." + ).format(template_name, anatomy.project_name) + report_items[""].append(msg) + return report_items, 0 + + dir_path, file_name = os.path.split(str(src_path)) + + context = repre["context"] + ext = context.get("ext", context.get("representation")) + + if not ext: + msg = "Source extension not found, cannot find collection" + report_items[msg].append(src_path) + log.warning("{} <{}>".format(msg, context)) + return report_items, 0 + + ext = "." + ext + # context.representation could be .psd + ext = ext.replace("..", ".") + + src_collections, remainder = clique.assemble(os.listdir(dir_path)) + src_collection = None + for col in src_collections: + if col.tail != ext: + continue + + src_collection = col + break + + if src_collection is None: + msg = "Source collection of files was not found" + report_items[msg].append(src_path) + log.warning("{} <{}>".format(msg, src_path)) + return report_items, 0 + + frame_indicator = "@####@" + + anatomy_data["frame"] = frame_indicator + anatomy_filled = anatomy.format(anatomy_data) + + if format_dict: + template_result = anatomy_filled["delivery"][template_name] + delivery_path = template_result.rootless.format(**format_dict) + else: + delivery_path = anatomy_filled["delivery"][template_name] + + delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) + delivery_folder = os.path.dirname(delivery_path) + dst_head, dst_tail = delivery_path.split(frame_indicator) + dst_padding = src_collection.padding + dst_collection = clique.Collection( + head=dst_head, + tail=dst_tail, + padding=dst_padding + ) + + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + src_head = src_collection.head + src_tail = src_collection.tail + uploaded = 0 + for index in src_collection.indexes: + src_padding = src_collection.format("{padding}") % index + src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + src = os.path.normpath( + os.path.join(dir_path, src_file_name) + ) + + dst_padding = dst_collection.format("{padding}") % index + dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) + log.debug("Copying single: {} -> {}".format(src, dst)) + _copy_file(src, dst) + uploaded += 1 + + return report_items, uploaded diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index a028ac0a87..89c24f2402 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -17,9 +17,7 @@ from openpype.pipeline.delivery import ( get_format_dict, check_destination_path, deliver_single_file, -) -from openpype.lib.delivery import ( - process_sequence, + deliver_sequence, ) @@ -219,7 +217,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): if not frame: new_report_items, uploaded = deliver_single_file(*args) else: - new_report_items, uploaded = process_sequence(*args) + new_report_items, uploaded = deliver_sequence(*args) report_items.update(new_report_items) self._update_progress(uploaded) From 19c7d2b8a150d4d052db9a9b4a813bd2922b272f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 15:20:21 +0200 Subject: [PATCH 0778/2550] marked 'copy_file' as deprecated --- openpype/lib/delivery.py | 3 +-- openpype/lib/path_tools.py | 11 ++++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index d44a4edb3f..efb542de75 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -1,8 +1,6 @@ """Functions useful for delivery action or loader""" import os import shutil -import glob -import clique import functools import warnings @@ -109,6 +107,7 @@ def path_from_representation(representation, anatomy): return get_representation_path_with_anatomy(representation, anatomy) +@deprecated def copy_file(src_path, dst_path): """Hardlink file if possible(to save space), copy if not""" from openpype.lib import create_hard_link # safer importing diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 45aa54d6cb..1835c71644 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -179,12 +179,12 @@ def get_version_from_path(file): """Find version number in file path string. Args: - file (string): file path + file (str): file path Returns: - v: version number in string ('001') - + str: version number in string ('001') """ + pattern = re.compile(r"[\._]v([0-9]+)", re.IGNORECASE) try: return pattern.findall(file)[-1] @@ -200,16 +200,17 @@ def get_last_version_from_path(path_dir, filter): """Find last version of given directory content. Args: - path_dir (string): directory path + path_dir (str): directory path filter (list): list of strings used as file name filter Returns: - string: file name with last version + str: file name with last version Example: last_version_file = get_last_version_from_path( "/project/shots/shot01/work", ["shot01", "compositing", "nk"]) """ + assert os.path.isdir(path_dir), "`path_dir` argument needs to be directory" assert isinstance(filter, list) and ( len(filter) != 0), "`filter` argument needs to be list and not empty" From cd09f23b968ee0162441c388172ec0027e825a5e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 16:44:51 +0200 Subject: [PATCH 0779/2550] removed unused import --- openpype/plugins/load/delete_old_versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index 8c8546d9c8..b7ac015268 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -10,7 +10,7 @@ from Qt import QtWidgets, QtCore from openpype import style from openpype.client import get_versions, get_representations from openpype.modules import ModulesManager -from openpype.lib import StringTemplate, format_file_size +from openpype.lib import format_file_size from openpype.pipeline import load, AvalonMongoDB, Anatomy from openpype.pipeline.load import ( get_representation_path_with_anatomy, From 0d8cf12618cee76a5e144429a3459074b14e4adf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 17:14:22 +0200 Subject: [PATCH 0780/2550] define new source where publish templates are not defined in integrate plubin --- .../defaults/project_settings/global.json | 3 + .../defaults/project_settings/maya.json | 2 +- .../schemas/schema_global_publish.json | 4 ++ .../schemas/schema_global_tools.json | 57 +++++++++++++++++++ 4 files changed, 65 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 0ff9363ba7..3e00cd725e 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -414,6 +414,9 @@ "filter_families": [] } ] + }, + "publish": { + "template_name_profiles": [] } }, "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 28f6d23e4d..38063bc2c1 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -980,4 +980,4 @@ "ValidateNoAnimation": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index e1aa230b49..c24c88d04a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -642,6 +642,10 @@ ] } }, + { + "type": "label", + "label": "NOTE: Publish template profiles settings were moved to Tools/Publish/Template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index f8c9482e5f..7dc44c2842 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -284,6 +284,63 @@ } } ] + }, + { + "type": "dict", + "key": "publish", + "label": "Publish", + "children": [ + { + "type": "label", + "label": "NOTE: For backwards compatibility can be value empty and in that case are used values from IntegrateAssetNew. This will change in future so please move all values here as soon as possible." + }, + { + "type": "list", + "key": "template_name_profiles", + "label": "Template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "label", + "label": "" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + } + ] } ] } From b14bb4b91e2bf7053a3e5d057f6b54d479535072 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 17:23:22 +0200 Subject: [PATCH 0781/2550] Fix typo for Maya argument `with_focus` -> `withFocus` --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 2 +- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 54ef09e060..871adda0c3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -128,7 +128,7 @@ class ExtractPlayblast(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: - panel = cmds.getPanel(with_focus=True) + panel = cmds.getPanel(withFocus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) cmds.setFocus(panel) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 01980578cf..9380da5128 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -100,9 +100,9 @@ class ExtractThumbnail(openpype.api.Extractor): # camera. if preset.pop("isolate_view", False) and instance.data.get("isolate"): preset["isolate"] = instance.data["setMembers"] - + # Show or Hide Image Plane - image_plane = instance.data.get("imagePlane", True) + image_plane = instance.data.get("imagePlane", True) if "viewport_options" in preset: preset["viewport_options"]["imagePlane"] = image_plane else: @@ -117,7 +117,7 @@ class ExtractThumbnail(openpype.api.Extractor): # Update preset with current panel setting # if override_viewport_options is turned off if not override_viewport_options: - panel = cmds.getPanel(with_focus=True) + panel = cmds.getPanel(withFocus=True) panel_preset = capture.parse_active_view() preset.update(panel_preset) cmds.setFocus(panel) From 2b6c4659237259b6c691dd2b5dc1db927b47fcd3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:03:30 +0200 Subject: [PATCH 0782/2550] added helper functions to get template name --- openpype/pipeline/publish/__init__.py | 4 ++ openpype/pipeline/publish/contants.py | 1 + openpype/pipeline/publish/lib.py | 97 ++++++++++++++++++++++++++- 3 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 openpype/pipeline/publish/contants.py diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index aa7fe0bdbf..a2aa61c4d5 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -10,6 +10,8 @@ from .publish_plugins import ( ) from .lib import ( + get_publish_template_name, + DiscoverResult, publish_plugins_discover, load_help_content_from_plugin, @@ -33,6 +35,8 @@ __all__ = ( "OpenPypePyblishPluginMixin", "OptionalPyblishPluginMixin", + "get_publish_template_name", + "DiscoverResult", "publish_plugins_discover", "load_help_content_from_plugin", diff --git a/openpype/pipeline/publish/contants.py b/openpype/pipeline/publish/contants.py new file mode 100644 index 0000000000..958675ecc1 --- /dev/null +++ b/openpype/pipeline/publish/contants.py @@ -0,0 +1 @@ +DEFAULT_PUBLISH_TEMPLATE = "publish" diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 9060a0bf4b..7c3ea22c06 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -8,8 +8,101 @@ import six import pyblish.plugin import pyblish.api -from openpype.lib import Logger -from openpype.settings import get_project_settings, get_system_settings +from openpype.lib import Logger, filter_profiles +from openpype.settings import ( + get_project_settings, + get_system_settings, +) + +from .contants import DEFAULT_PUBLISH_TEMPLATE + + +def get_template_name_profiles(project_name=None, project_settings=None): + """Receive profiles for publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["template_name_profiles"] + ) + if profiles: + return profiles + + # Use legacy approach for cases new settings are not filled yet for the + # project + return ( + project_settings + ["global"] + ["publish"] + ["IntegrateAssetNew"] + ["template_name_profiles"] + ) + + +def get_publish_template_name( + project_name, + host_name, + family, + task_name, + task_type, + project_settings=None, + logger=None +): + """Get template name which should be used for passed context. + + Publish templates are filtered by host name, family, task name and + task type. + + Default template which is used at if profiles are not available or profile + has empty value is defined by 'DEFAULT_PUBLISH_TEMPLATE' constant. + + Args: + project_name (str): Name of project where to look for settings. + host_name (str): Name of host integration. + family (str): Family for which should be found template. + task_name (str): Task name on which is intance working. + task_type (str): Task type on which is intance working. + project_setting (Dict[str, Any]): Prepared project settings. + logger (logging.Logger): Custom logger used for 'filter_profiles' + function. + + Returns: + str: Template name which should be used for integration. + """ + + template = None + filter_criteria = { + "hosts": host_name, + "families": family, + "tasks": task_name, + "task_types": task_type, + } + profiles = get_template_name_profiles(project_name, project_settings) + profile = filter_profiles(profiles, filter_criteria, logger=logger) + if profile: + template = profile["template_name"] + return template or DEFAULT_PUBLISH_TEMPLATE class DiscoverResult: From 96138a0b73ba6a3f9757283853da9cd1aa85c023 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:22:49 +0200 Subject: [PATCH 0783/2550] use new functions in integrators --- openpype/plugins/publish/integrate.py | 61 ++++++-------------- openpype/plugins/publish/integrate_legacy.py | 21 +++---- 2 files changed, 27 insertions(+), 55 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f99c718f8a..56d2621015 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -26,7 +26,10 @@ from openpype.lib import source_hash from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io -from openpype.pipeline.publish import KnownPublishError +from openpype.pipeline.publish import ( + KnownPublishError, + get_publish_template_name, +) log = logging.getLogger(__name__) @@ -792,52 +795,26 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def get_template_name(self, instance): """Return anatomy template name to use for integration""" - # Define publish template name from profiles - filter_criteria = self.get_profile_filter_criteria(instance) - template_name_profiles = self._get_template_name_profiles(instance) - profile = filter_profiles( - template_name_profiles, - filter_criteria, - logger=self.log - ) - - if profile: - return profile["template_name"] - return self.default_template_name - - def _get_template_name_profiles(self, instance): - """Receive profiles for publish template keys. - - Reuse template name profiles from legacy integrator. Goal is to move - the profile settings out of plugin settings but until that happens we - want to be able set it at one place and don't break backwards - compatibility (more then once). - """ - - return ( - instance.context.data["project_settings"] - ["global"] - ["publish"] - ["IntegrateAssetNew"] - ["template_name_profiles"] - ) - - def get_profile_filter_criteria(self, instance): - """Return filter criteria for `filter_profiles`""" # Anatomy data is pre-filled by Collectors - anatomy_data = instance.data["anatomyData"] + + project_name = legacy_io.active_project() # Task can be optional in anatomy data - task = anatomy_data.get("task", {}) + host_name = instance.context.data["hostName"] + anatomy_data = instance.data["anatomyData"] + family = anatomy_data["family"] + task_info = anatomy_data.get("task") or {} - # Return filter criteria - return { - "families": anatomy_data["family"], - "tasks": task.get("name"), - "task_types": task.get("type"), - "hosts": instance.context.data["hostName"], - } + return get_publish_template_name( + project_name, + host_name, + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], + logger=self.log + ) def get_rootless_path(self, anatomy, path): """Returns, if possible, path without absolute portion from root diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index b90b61f587..fedaae794a 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -33,6 +33,7 @@ from openpype.lib import ( TemplateUnsolved ) from openpype.pipeline import legacy_io +from openpype.pipeline.publish import get_publish_template_name # this is needed until speedcopy for linux is fixed if sys.platform == "win32": @@ -388,22 +389,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "tasks": task_name, - "hosts": instance.context.data["hostName"], - "task_types": task_type - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + template_name = get_publish_template_name( + project_name, + instance.context.data["hostName"], + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], logger=self.log ) - template_name = "publish" - if profile: - template_name = profile["template_name"] - published_representations = {} for idx, repre in enumerate(repres): published_files = [] From c7108ac7fbad46fe2aafe669498cb3755d9c7730 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:23:37 +0200 Subject: [PATCH 0784/2550] modified imports in integrators --- openpype/plugins/publish/integrate.py | 6 +++--- openpype/plugins/publish/integrate_legacy.py | 11 ++++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 56d2621015..8b60ea3b51 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -5,6 +5,9 @@ import copy import clique import six +from bson.objectid import ObjectId +import pyblish.api + from openpype.client.operations import ( OperationsSession, new_subset_document, @@ -14,8 +17,6 @@ from openpype.client.operations import ( prepare_version_update_data, prepare_representation_update_data, ) -from bson.objectid import ObjectId -import pyblish.api from openpype.client import ( get_representations, @@ -23,7 +24,6 @@ from openpype.client import ( get_version_by_name, ) from openpype.lib import source_hash -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io from openpype.pipeline.publish import ( diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index fedaae794a..0e157c9d1f 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -15,7 +15,6 @@ from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne import pyblish.api -import openpype.api from openpype.client import ( get_asset_by_name, get_subset_by_id, @@ -25,12 +24,14 @@ from openpype.client import ( get_representations, get_archived_representations, ) -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, create_hard_link, StringTemplate, - TemplateUnsolved + TemplateUnsolved, + source_hash, + filter_profiles, + get_local_site_id, ) from openpype.pipeline import legacy_io from openpype.pipeline.publish import get_publish_template_name @@ -1053,7 +1054,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for _src, dest in resources: path = self.get_rootless_path(anatomy, dest) dest = self.get_dest_temp_url(dest) - file_hash = openpype.api.source_hash(dest) + file_hash = source_hash(dest) if self.TMP_FILE_EXT and \ ',{}'.format(self.TMP_FILE_EXT) in file_hash: file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), @@ -1163,7 +1164,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def _get_sites(self, sync_project_presets): """Returns tuple (local_site, remote_site)""" - local_site_id = openpype.api.get_local_site_id() + local_site_id = get_local_site_id() local_site = sync_project_presets["config"]. \ get("active_site", "studio").strip() From c76a1a1dbbe8e705b06ebf02f37237cf7dda98fd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:52:43 +0200 Subject: [PATCH 0785/2550] added settings for hero templates and changed 'tasks' to 'task_names' --- .../defaults/project_settings/global.json | 3 +- .../schemas/schema_global_tools.json | 47 +++++++++++++++++-- 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 3e00cd725e..8692f95a04 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -416,7 +416,8 @@ ] }, "publish": { - "template_name_profiles": [] + "template_name_profiles": [], + "hero_template_name_profiles": [] } }, "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index 7dc44c2842..c919cd73c5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -303,9 +303,47 @@ "type": "dict", "children": [ { - "type": "label", - "label": "" + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + }, + { + "type": "list", + "key": "hero_template_name_profiles", + "label": "Hero template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ { "key": "families", "label": "Families", @@ -324,7 +362,7 @@ "type": "task-types-enum" }, { - "key": "tasks", + "key": "task_names", "label": "Task names", "type": "list", "object_type": "text" @@ -335,7 +373,8 @@ { "type": "text", "key": "template_name", - "label": "Template name" + "label": "Template name", + "tooltip": "Name of template from Anatomy templates" } ] } From 9d4416719b4a99d50b0d411b5548a8afa8072240 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:53:33 +0200 Subject: [PATCH 0786/2550] convert legacy to new settings by replacing 'tasks' with 'task_names' --- openpype/pipeline/publish/lib.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 7c3ea22c06..03dfbadfcc 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -2,6 +2,7 @@ import os import sys import types import inspect +import copy import xml.etree.ElementTree import six @@ -47,17 +48,23 @@ def get_template_name_profiles(project_name=None, project_settings=None): ["template_name_profiles"] ) if profiles: - return profiles + return copy.deepcopy(profiles) # Use legacy approach for cases new settings are not filled yet for the # project - return ( + legacy_profiles = ( project_settings ["global"] ["publish"] ["IntegrateAssetNew"] ["template_name_profiles"] ) + # Replace "tasks" key with "task_names" + profiles = [] + for profile in copy.deepcopy(legacy_profiles): + profile["task_names"] = profile.pop("tasks", []) + profiles.append(profile) + return profiles def get_publish_template_name( @@ -95,7 +102,7 @@ def get_publish_template_name( filter_criteria = { "hosts": host_name, "families": family, - "tasks": task_name, + "task_names": task_name, "task_types": task_type, } profiles = get_template_name_profiles(project_name, project_settings) From 63f5b5f2ab40a94c7496b8f08fa19204a5687b5a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:53:49 +0200 Subject: [PATCH 0787/2550] added ability to get hero version template name --- openpype/pipeline/publish/contants.py | 1 + openpype/pipeline/publish/lib.py | 64 +++++++++++++++++++++++++-- 2 files changed, 62 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/contants.py b/openpype/pipeline/publish/contants.py index 958675ecc1..169eca2e5c 100644 --- a/openpype/pipeline/publish/contants.py +++ b/openpype/pipeline/publish/contants.py @@ -1 +1,2 @@ DEFAULT_PUBLISH_TEMPLATE = "publish" +DEFAULT_HERO_PUBLISH_TEMPLATE = "hero" diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 03dfbadfcc..85a64da721 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -15,7 +15,10 @@ from openpype.settings import ( get_system_settings, ) -from .contants import DEFAULT_PUBLISH_TEMPLATE +from .contants import ( + DEFAULT_PUBLISH_TEMPLATE, + DEFAULT_HERO_PUBLISH_TEMPLATE, +) def get_template_name_profiles(project_name=None, project_settings=None): @@ -67,6 +70,49 @@ def get_template_name_profiles(project_name=None, project_settings=None): return profiles +def get_hero_template_name_profiles(project_name=None, project_settings=None): + """Receive profiles for hero publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["hero_template_name_profiles"] + ) + if profiles: + return copy.deepcopy(profiles) + + # Use legacy approach for cases new settings are not filled yet for the + # project + return copy.deepcopy( + project_settings + ["global"] + ["publish"] + ["IntegrateHeroVersion"] + ["template_name_profiles"] + ) + + def get_publish_template_name( project_name, host_name, @@ -74,6 +120,7 @@ def get_publish_template_name( task_name, task_type, project_settings=None, + hero=False, logger=None ): """Get template name which should be used for passed context. @@ -105,11 +152,22 @@ def get_publish_template_name( "task_names": task_name, "task_types": task_type, } - profiles = get_template_name_profiles(project_name, project_settings) + if hero: + default_template = DEFAULT_HERO_PUBLISH_TEMPLATE + profiles = get_hero_template_name_profiles( + project_name, project_settings + ) + + else: + profiles = get_template_name_profiles( + project_name, project_settings + ) + default_template = DEFAULT_PUBLISH_TEMPLATE + profile = filter_profiles(profiles, filter_criteria, logger=logger) if profile: template = profile["template_name"] - return template or DEFAULT_PUBLISH_TEMPLATE + return template or default_template class DiscoverResult: From 1698aefcfbc887ba6f29fc59dbdfbc2595d5c6a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:57:00 +0200 Subject: [PATCH 0788/2550] use 'get_publish_template_name' in hero integration --- .../plugins/publish/integrate_hero_version.py | 41 ++++++++----------- 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 7d698ff98d..2938c61f8e 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -14,14 +14,12 @@ from openpype.client import ( get_archived_representations, get_representations, ) -from openpype.lib import ( - create_hard_link, - filter_profiles -) +from openpype.lib import create_hard_link from openpype.pipeline import ( schema, legacy_io, ) +from openpype.pipeline.publish import get_publish_template_name class IntegrateHeroVersion(pyblish.api.InstancePlugin): @@ -68,10 +66,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) return - template_key = self._get_template_key(instance) - anatomy = instance.context.data["anatomy"] project_name = anatomy.project_name + + template_key = self._get_template_key(project_name, instance) + if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -527,30 +526,24 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): return publish_folder - def _get_template_key(self, instance): + def _get_template_key(self, project_name, instance): anatomy_data = instance.data["anatomyData"] - task_data = anatomy_data.get("task") or {} - task_name = task_data.get("name") - task_type = task_data.get("type") + task_info = anatomy_data.get("task") or {} host_name = instance.context.data["hostName"] + # TODO raise error if Hero not set? family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "task_names": task_name, - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + + return get_publish_template_name( + project_name, + host_name, + family, + task_info.get("name"), + task_info.get("type"), + project_settings=instance.context.data["project_settings"], + hero=True, logger=self.log ) - if profile: - template_name = profile["template_name"] - else: - template_name = self._default_template_name - return template_name def main_family_from_instance(self, instance): """Returns main family of entered instance.""" From 9b7384e1ae96b0f348911e9e163a23857dd2ca7f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:57:27 +0200 Subject: [PATCH 0789/2550] removed unused attribute --- openpype/plugins/publish/integrate_legacy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 0e157c9d1f..bbf30c9ab7 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -140,7 +140,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): integrated_file_sizes = {} # Attributes set by settings - template_name_profiles = None subset_grouping_profiles = None def process(self, instance): From c6a6e3b21a4aaa6c98450de918c78907fed91f5e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 19:07:34 +0200 Subject: [PATCH 0790/2550] added warning for access to legacy settings --- openpype/pipeline/publish/lib.py | 35 +++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 85a64da721..29c745ed15 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -21,7 +21,9 @@ from .contants import ( ) -def get_template_name_profiles(project_name=None, project_settings=None): +def get_template_name_profiles( + project_name, project_settings=None, logger=None +): """Receive profiles for publish template keys. At least one of arguments must be passed. @@ -62,6 +64,16 @@ def get_template_name_profiles(project_name=None, project_settings=None): ["IntegrateAssetNew"] ["template_name_profiles"] ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/template_name_profiles'." + ).format(project_name)) + # Replace "tasks" key with "task_names" profiles = [] for profile in copy.deepcopy(legacy_profiles): @@ -70,7 +82,9 @@ def get_template_name_profiles(project_name=None, project_settings=None): return profiles -def get_hero_template_name_profiles(project_name=None, project_settings=None): +def get_hero_template_name_profiles( + project_name, project_settings=None, logger=None +): """Receive profiles for hero publish template keys. At least one of arguments must be passed. @@ -104,13 +118,24 @@ def get_hero_template_name_profiles(project_name=None, project_settings=None): # Use legacy approach for cases new settings are not filled yet for the # project - return copy.deepcopy( + legacy_profiles = copy.deepcopy( project_settings ["global"] ["publish"] ["IntegrateHeroVersion"] ["template_name_profiles"] ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_hero_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to hero publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/" + "hero_template_name_profiles'." + ).format(project_name)) + return legacy_profiles def get_publish_template_name( @@ -155,12 +180,12 @@ def get_publish_template_name( if hero: default_template = DEFAULT_HERO_PUBLISH_TEMPLATE profiles = get_hero_template_name_profiles( - project_name, project_settings + project_name, project_settings, logger ) else: profiles = get_template_name_profiles( - project_name, project_settings + project_name, project_settings, logger ) default_template = DEFAULT_PUBLISH_TEMPLATE From dc4c32b6fc6aaeaedb6bb9d76c7b72d9f5c45c45 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:14:58 +0200 Subject: [PATCH 0791/2550] Fix representation data for workfile --- openpype/modules/deadline/abstract_submit_deadline.py | 6 +++--- .../deadline/plugins/publish/submit_maya_deadline.py | 3 --- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 577378335e..d198542370 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -554,9 +554,9 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): "Workfile (scene) must be published along") # determine published path from Anatomy. template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("ext") - template_data["representation"] = rep - template_data["ext"] = rep + rep = i.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") template_data["comment"] = None anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled["publish"]["path"] diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 5a7d0b98c6..68e8eaaa73 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -258,10 +258,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) self._patch_workfile(filepath, patches) - # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") # todo: on self.use_published replace path for publishRenderMetadataFolder - # rep = i.data.get("representations")[0].get("name") - # if instance.data.get("publishRenderMetadataFolder"): # instance.data["publishRenderMetadataFolder"] = \ # instance.data["publishRenderMetadataFolder"].replace( From 67b8664be07fbb8a0061b7c8a62bf4073ef79307 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:15:20 +0200 Subject: [PATCH 0792/2550] Remove comment for patched file code refactor since it's already implemented --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 68e8eaaa73..07ed237c94 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -266,10 +266,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # self.log.info("Scene name was switched {} -> {}".format( # orig_scene, new_scene # )) - # # patch workfile is needed - # if filepath not in patched_files: - # patched_file = self._patch_workfile(filepath, patches) - # patched_files.append(patched_file) # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] From 2da8f036dee501be62da07d07fa9efafc9e8839f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:19:37 +0200 Subject: [PATCH 0793/2550] Refactor logic for less indentation --- .../deadline/abstract_submit_deadline.py | 103 +++++++++--------- 1 file changed, 52 insertions(+), 51 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index d198542370..55e16d8d21 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -546,65 +546,66 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): anatomy = self._instance.context.data['anatomy'] file_path = None for i in self._instance.context: - if "workfile" in i.data["families"] \ - or i.data["family"] == "workfile": - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - # determine published path from Anatomy. - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0] - template_data["representation"] = rep.get("name") - template_data["ext"] = rep.get("ext") - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - file_path = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - file_path)) + is_workfile = + if not is_workfile: + continue - if not os.path.exists(file_path): - self.log.error("published scene does not exist!") - raise + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + # determine published path from Anatomy. + template_data = i.data.get("anatomyData") + rep = i.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") + template_data["comment"] = None + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + file_path = os.path.normpath(template_filled) - if not replace_in_path: - return file_path + self.log.info("Using published scene for render {}".format( + file_path)) - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(file_path))[0] - orig_scene = os.path.splitext( - os.path.basename( - self._instance.context.data["currentFile"]))[0] - exp = self._instance.data.get("expectedFiles") + if not os.path.exists(file_path): + self.log.error("published scene does not exist!") + raise - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - str(f).replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - # [] might be too much here, TODO - self._instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( + if not replace_in_path: + return file_path + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + new_scene = os.path.splitext(os.path.basename(file_path))[0] + orig_scene = os.path.splitext(os.path.basename( + self._instance.context.data["currentFile"]))[0] + exp = self._instance.data.get("expectedFiles") + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in exp[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( str(f).replace(orig_scene, new_scene) ) - self._instance.data["expectedFiles"] = new_exp + new_exp[aov] = replaced_files + # [] might be too much here, TODO + self._instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in exp: + new_exp.append( + str(f).replace(orig_scene, new_scene) + ) + self._instance.data["expectedFiles"] = new_exp - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) return file_path From 21a319b10c46dec6efc2aaf6fe6ac1fe09bfc512 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 19:20:33 +0200 Subject: [PATCH 0794/2550] added 'deprecated' to integrator labels and added new location for hero templates as note --- .../projects_schema/schemas/schema_global_publish.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index c24c88d04a..2cb0cebf95 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -649,7 +649,7 @@ { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", @@ -754,10 +754,14 @@ "type": "list", "object_type": "text" }, + { + "type": "label", + "label": "NOTE: Hero publish template profiles settings were moved to Tools/Publish/Hero template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", From e81e3a7a1021db4e442aa3147ed03ccf8d92d8c6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:22:06 +0200 Subject: [PATCH 0795/2550] Fix missing line --- openpype/modules/deadline/abstract_submit_deadline.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 55e16d8d21..86eebc0d35 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -547,7 +547,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): file_path = None for i in self._instance.context: - is_workfile = + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) if not is_workfile: continue From 2933b37ef7711aac4e04284120b6c9b0ce2c9612 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:34:27 +0200 Subject: [PATCH 0796/2550] Refactor code for readability --- .../deadline/abstract_submit_deadline.py | 124 ++++++++++-------- 1 file changed, 68 insertions(+), 56 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 86eebc0d35..46baa9ee57 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -543,72 +543,84 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): published. """ - anatomy = self._instance.context.data['anatomy'] - file_path = None - for i in self._instance.context: - is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" - ) - if not is_workfile: - continue + def _get_workfile_instance(context): + """Find workfile instance in context""" + for i in context: - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - # determine published path from Anatomy. - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0] - template_data["representation"] = rep.get("name") - template_data["ext"] = rep.get("ext") - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - file_path = os.path.normpath(template_filled) + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) + if not is_workfile: + continue - self.log.info("Using published scene for render {}".format( - file_path)) + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") - if not os.path.exists(file_path): - self.log.error("published scene does not exist!") - raise + return i - if not replace_in_path: - return file_path + instance = self._instance + workfile_instance = _get_workfile_instance(instance.context) + if not workfile_instance: + return - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext(os.path.basename(file_path))[0] - orig_scene = os.path.splitext(os.path.basename( - self._instance.context.data["currentFile"]))[0] - exp = self._instance.data.get("expectedFiles") + # determine published path from Anatomy. + template_data = workfile_instance.data.get("anatomyData") + rep = workfile_instance.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") + template_data["comment"] = None - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - str(f).replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - # [] might be too much here, TODO - self._instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( + anatomy = instance.context.data['anatomy'] + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + file_path = os.path.normpath(template_filled) + + self.log.info("Using published scene for render {}".format(file_path)) + + if not os.path.exists(file_path): + self.log.error("published scene does not exist!") + raise + + if not replace_in_path: + return file_path + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + def _clean_name(path): + return os.path.splitext(os.path.basename(path))[0] + + new_scene = _clean_name(file_path) + orig_scene = _clean_name(instance.context.data["currentFile"]) + expected_files = instance.data.get("expectedFiles") + + if isinstance(expected_files[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in expected_files[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( str(f).replace(orig_scene, new_scene) ) - self._instance.data["expectedFiles"] = new_exp + new_exp[aov] = replaced_files + # [] might be too much here, TODO + instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in expected_files: + new_exp.append( + str(f).replace(orig_scene, new_scene) + ) + instance.data["expectedFiles"] = new_exp - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) return file_path From c725ff5b42c2f3f248a6af8f835020c9efb23182 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:35:05 +0200 Subject: [PATCH 0797/2550] Move replacing in `publishRenderMetadataFolder` to abstract base class --- openpype/modules/deadline/abstract_submit_deadline.py | 6 ++++++ .../deadline/plugins/publish/submit_maya_deadline.py | 9 --------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 46baa9ee57..f56cf49f6d 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -618,6 +618,12 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): ) instance.data["expectedFiles"] = new_exp + metadata_folder = instance.data.get("publishRenderMetadataFolder") + if metadata_folder: + metadata_folder = metadata_folder.replace(orig_scene, + new_scene) + instance.data["publishRenderMetadataFolder"] = metadata_folder + self.log.info("Scene name was switched {} -> {}".format( orig_scene, new_scene )) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 07ed237c94..26c26a124c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -258,15 +258,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) self._patch_workfile(filepath, patches) - # todo: on self.use_published replace path for publishRenderMetadataFolder - # if instance.data.get("publishRenderMetadataFolder"): - # instance.data["publishRenderMetadataFolder"] = \ - # instance.data["publishRenderMetadataFolder"].replace( - # orig_scene, new_scene) - # self.log.info("Scene name was switched {} -> {}".format( - # orig_scene, new_scene - # )) - # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] default_render_file = instance.context.data.get('project_settings')\ From 23e652a51f41a6c65d6feba6edb0268f85feccb8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:54:36 +0200 Subject: [PATCH 0798/2550] Patch plug-in payload with settings --- .../maya/plugins/publish/collect_render.py | 1 + .../plugins/publish/submit_maya_deadline.py | 17 +++++++++-------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index ebda5e190d..768a53329f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -293,6 +293,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "source": filepath, "expectedFiles": full_exp_files, "publishRenderMetadataFolder": common_publish_meta_path, + "renderProducts": layer_render_products, "resolutionWidth": lib.get_attr_in_layer( "defaultResolution.width", layer=layer_name ), diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 26c26a124c..854a66eaa5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -215,16 +215,21 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderlayer = instance.data['setMembers'] # rs_beauty - self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa - # Output driver to render plugin_info = DeadlinePluginInfo( SceneFile=context.data["currentFile"], Version=cmds.about(version=True), + RenderLayer=renderlayer, + RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights") # noqa ) - return attr.asdict(plugin_info) + plugin_payload = attr.asdict(plugin_info) + + # Patching with pluginInfo from settings + for key, value in self.pluginInfo.items(): + plugin_payload[key] = value + + return plugin_payload def process_submission(self): @@ -338,10 +343,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) - # add jobInfo and pluginInfo variables from Settings - payload["JobInfo"].update(self.jobInfo) - payload["PluginInfo"].update(self.pluginInfo) - if instance.data.get("tileRendering"): # Prepare tiles data self._tile_render(instance, payload) From 4abddd027de9d4a1814ddff1b971bb9a99c47008 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:07 +0200 Subject: [PATCH 0799/2550] Use collected render products for image prefix --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 854a66eaa5..bb7ae380b6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -307,7 +307,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "jobname": jobname, "comment": comment, "output_filename_0": output_filename_0, - "render_variables": render_variables, "renderlayer": renderlayer, "workspace": workspace, "dirname": dirname, @@ -564,6 +563,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderer = self._instance.data["renderer"] + # Get layer prefix + render_products = self._instance.data["renderProducts"] + layer_metadata = render_products.layer_data + layer_prefix = layer_metadata.filePrefix + # This hack is here because of how Deadline handles Renderman version. # it considers everything with `renderman` set as version older than # Renderman 22, and so if we are using renderman > 21 we need to set @@ -583,7 +587,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "SceneFile": data["filepath"], # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), - "OutputFilePrefix": data["render_variables"]["filename_prefix"], # noqa: E501 + "OutputFilePrefix": layer_prefix, # Only render layers are considered renderable in this pipeline "UsingRenderLayers": True, From 6f5fcecfae7ec92bf2df80fb673bfff3e1049231 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:24 +0200 Subject: [PATCH 0800/2550] Use existing variable `renderer` --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index bb7ae380b6..c7f91905ea 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -573,7 +573,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Renderman 22, and so if we are using renderman > 21 we need to set # renderer string on the job to `renderman22`. We will have to change # this when Deadline releases new version handling this. - if self._instance.data["renderer"] == "renderman": + if renderer == "renderman": try: from rfm2.config import cfg # noqa except ImportError: From ae250c4a100dcc4474512913dbc858326ced3c8a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:40 +0200 Subject: [PATCH 0801/2550] Remove unused `comment` key-value --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index c7f91905ea..b2b877ab0e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -305,7 +305,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "filename": filename, "filepath": filepath, "jobname": jobname, - "comment": comment, "output_filename_0": output_filename_0, "renderlayer": renderlayer, "workspace": workspace, From 7af7f71edacea21a00c370e4ad1e92b2fe576b66 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:58:04 +0200 Subject: [PATCH 0802/2550] Remove logging of plugin name --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index b2b877ab0e..db796f25a9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -335,9 +335,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if export_job: payload["JobInfo"]["JobDependency0"] = export_job - plugin = payload["JobInfo"]["Plugin"] - self.log.info("using render plugin : {}".format(plugin)) - # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) From f91e33c0385762a7a73cca192f4d36716377ee1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:21:30 +0200 Subject: [PATCH 0803/2550] More refactoring/cleanup (WIP) --- .../plugins/publish/submit_maya_deadline.py | 304 +++++++----------- 1 file changed, 110 insertions(+), 194 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index db796f25a9..8f12a9518f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -70,7 +70,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): tile_assembler_plugin = "OpenPypeTileAssembler" priority = 50 tile_priority = 50 - limit_groups = [] + limit = [] # limit groups jobInfo = {} pluginInfo = {} group = "none" @@ -112,23 +112,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.ChunkSize = instance.data.get("chunkSize", 10) job_info.Comment = context.data.get("comment") job_info.Priority = instance.data.get("priority", self.priority) + job_info.FramesPerTask = instance.data.get("framesPerTask", 1) if self.group != "none" and self.group: job_info.Group = self.group - if self.limit_groups: - job_info.LimitGroups = ",".join(self.limit_groups) + if self.limit: + job_info.LimitGroups = ",".join(self.limit) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ - os.path.dirname(output_filename_0).replace("\\", "/") - self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ - output_filename_0.replace("\\", "/") - - # Add options from RenderGlobals------------------------------------- + # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - self.payload_skeleton["JobInfo"].update(render_globals) + for key, value in render_globals: + setattr(job_info, key, value) keys = [ "FTRACK_API_KEY", @@ -140,7 +135,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS", "OPENPYPE_VERSION" ] # Add mongo url if it's enabled @@ -150,10 +144,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO: Taken from old publish class - test whether still needed environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) # to recognize job from PYPE for turning Event On/Off environment["OPENPYPE_RENDER_JOB"] = "1" @@ -166,7 +158,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) # to recognize job from PYPE for turning Event On/Off job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + job_info.EnvironmentKeyValue = "OPENPYPE_LOG_NO_COLORS=1" + # Optional, enable double-click to preview rendered + # frames from Deadline Monitor for i, filepath in enumerate(instance.data["files"]): dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) @@ -213,14 +208,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - renderlayer = instance.data['setMembers'] # rs_beauty - - # Output driver to render plugin_info = DeadlinePluginInfo( - SceneFile=context.data["currentFile"], + SceneFile=self.scene_path, Version=cmds.about(version=True), - RenderLayer=renderlayer, - RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights") # noqa + RenderLayer=instance.data['setMembers'], + RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights"), # noqa + ProjectPath=context.data["workspaceDir"], + UsingRenderLayers=True, ) plugin_payload = attr.asdict(plugin_info) @@ -236,12 +230,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - # Generated by AbstractSubmitDeadline. The `job_info`, `plugin_info` - # and `aux_files` are the skeleton payloads that are the basis for - # all the maya submissions - job_info = self.job_info - plugin_info = self.plugin_info - aux_files = self.aux_files filepath = self.scene_path # publish if `use_publish` else workfile # TODO: Avoid the need for this logic here, needed for submit publish @@ -250,18 +238,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data["outputDir"] = output_dir instance.data["toBeRenderedOn"] = "deadline" - self.limit_groups = self.limit - # Patch workfile (only when use_published is enabled) if self.use_published: - patches = ( - context.data["project_settings"].get( - "deadline", {}).get( - "publish", {}).get( - "MayaSubmitDeadline", {}).get( - "scene_patches", {}) - ) - self._patch_workfile(filepath, patches) + self._patch_workfile() # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] @@ -271,22 +250,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): .get('default_render_image_folder') filename = os.path.basename(filepath) dirname = os.path.join(workspace, default_render_file) - renderlayer = instance.data['setMembers'] # rs_beauty - - # Get the variables depending on the renderer - # TODO: Find replacement logic for `get_renderer_variables` through - # what is collected for the render or is implemented in maya - # api `lib_renderproducts` - render_variables = get_renderer_variables(renderlayer, dirname) - filename_0 = render_variables["filename_0"] - if self.use_published: - new_scene = os.path.splitext(filename)[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - filename_0 = render_variables["filename_0"].replace( - orig_scene, new_scene) - - output_filename_0 = filename_0 # this is needed because renderman handles directory and file # prefixes separately @@ -301,16 +264,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pass # Fill in common data to payload ------------------------------------ + # TODO: Replace these with collected data from CollectRender payload_data = { "filename": filename, "filepath": filepath, - "jobname": jobname, "output_filename_0": output_filename_0, "renderlayer": renderlayer, - "workspace": workspace, "dirname": dirname, } + # Store output dir for unified publisher (filesequence) + instance.data["outputDir"] = os.path.dirname(output_filename_0) + # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] @@ -333,17 +298,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add export job as dependency -------------------------------------- if export_job: - payload["JobInfo"]["JobDependency0"] = export_job - - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) + job_info, _ = payload + job_info.JobDependency = export_job if instance.data.get("tileRendering"): # Prepare tiles data self._tile_render(instance, payload) else: # Submit main render job - self.submit(payload) + job_info, plugin_info = payload + self.submit(self.assemble_payload(job_info, plugin_info)) def _tile_render(self, instance, payload): @@ -546,18 +510,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data["jobBatchName"])) def _get_maya_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) - if not self.asset_dependencies: - job_info_ext = {} + job_info = copy.deepcopy(self.job_info) - else: - job_info_ext = { - # Asset dependency to wait for at least the scene file to sync. - "AssetDependency0": data["filepath"], - } - - renderer = self._instance.data["renderer"] + if self.asset_dependencies: + # Asset dependency to wait for at least the scene file to sync. + job_info.AssetDependency = self.scene_path # Get layer prefix render_products = self._instance.data["renderProducts"] @@ -569,6 +527,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Renderman 22, and so if we are using renderman > 21 we need to set # renderer string on the job to `renderman22`. We will have to change # this when Deadline releases new version handling this. + renderer = self._instance.data["renderer"] if renderer == "renderman": try: from rfm2.config import cfg # noqa @@ -580,29 +539,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderer = "renderman22" plugin_info = { - "SceneFile": data["filepath"], # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), "OutputFilePrefix": layer_prefix, - - # Only render layers are considered renderable in this pipeline - "UsingRenderLayers": True, - - # Render only this layer - "RenderLayer": data["renderlayer"], - - # Determine which renderer to use from the file itself - "Renderer": renderer, - - # Resolve relative references - "ProjectPath": data["workspace"], } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + + return job_info, plugin_info def _get_vray_export_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + job_info = copy.deepcopy(self.job_info) + + job_info.Name = self._job_info_label("Export") + + # Get V-Ray settings info to compute output path vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) @@ -610,34 +560,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): first_file = self.format_vray_output_filename(scene, template) first_file = "{}/{}".format(data["workspace"], first_file) output = os.path.dirname(first_file) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - "Plugin": self._instance.data.get( - "mayaRenderPlugin", "MayaPype"), - "FramesPerTask": self._instance.data.get("framesPerTask", 1) - } - - plugin_info_ext = { - # Renderer + plugin_info = { "Renderer": "vray", - # Input - "SceneFile": data["filepath"], "SkipExistingFrames": True, - "UsingRenderLayers": True, "UseLegacyRenderLayers": True, - "RenderLayer": data["renderlayer"], - "ProjectPath": data["workspace"], "OutputFilePath": output } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) - return payload + return job_info, plugin_info def _get_arnold_export_payload(self, data): @@ -653,76 +584,55 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): script = os.path.normpath(module_path) - payload = copy.deepcopy(self.payload_skeleton) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), + job_info = copy.deepcopy(self.job_info) + plugin_info = copy.deepcopy(self.plugin_info) - "Plugin": "Python", - "FramesPerTask": self._instance.data.get("framesPerTask", 1), - "Frames": 1 + job_info.Name = self._job_info_label("Export") + + # Force a single frame Python job + job_info.Plugin = "Python" + job_info.Frames = 1 + + # add required env vars for the export script + envs = { + "AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"), + "OPENPYPE_ASS_EXPORT_RENDER_LAYER": data["renderlayer"], + "OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path, + "OPENPYPE_ASS_EXPORT_OUTPUT": payload['JobInfo']['OutputFilename0'], # noqa + "OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_STEP": 1 } + for key, value in envs.items(): + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, + value=value) - plugin_info_ext = { + plugin_info.update({ "Version": "3.6", "ScriptFile": script, "Arguments": "", "SingleFrameOnly": "True", - } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) + }) - envs = [ - v - for k, v in payload["JobInfo"].items() - if k.startswith("EnvironmentKeyValue") - ] - - # add app name to environment - envs.append( - "AVALON_APP_NAME={}".format(os.environ.get("AVALON_APP_NAME"))) - envs.append( - "OPENPYPE_ASS_EXPORT_RENDER_LAYER={}".format(data["renderlayer"])) - envs.append( - "OPENPYPE_ASS_EXPORT_SCENE_FILE={}".format(data["filepath"])) - envs.append( - "OPENPYPE_ASS_EXPORT_OUTPUT={}".format( - payload['JobInfo']['OutputFilename0'])) - envs.append( - "OPENPYPE_ASS_EXPORT_START={}".format( - int(self._instance.data["frameStartHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_END={}".format( - int(self._instance.data["frameEndHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_STEP={}".format(1)) - - for i, e in enumerate(envs): - payload["JobInfo"]["EnvironmentKeyValue{}".format(i)] = e - return payload + return job_info, plugin_info def _get_vray_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Vray" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) # "vrayscene//_/" - scene, _ = os.path.splitext(data["filename"]) + scene, _ = os.path.splitext(self.scene_path) first_file = self.format_vray_output_filename(scene, template) first_file = "{}/{}".format(data["workspace"], first_file) - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Vray", - "OverrideTaskExtraInfoNames": False, - } plugin_info = { "InputFilename": first_file, @@ -731,35 +641,28 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Width": self._instance.data["resolutionWidth"], "Height": self._instance.data["resolutionHeight"], - "OutputFilePath": payload["JobInfo"]["OutputDirectory0"], - "OutputFileName": payload["JobInfo"]["OutputFilename0"] + "OutputFilePath": job_info.OutputDirectory[0], + "OutputFileName": job_info.OutputFilename[0] } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + return job_info, plugin_info def _get_arnold_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Arnold" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info ass_file, _ = os.path.splitext(data["output_filename_0"]) first_file = ass_file + ".ass" - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Arnold", - "OverrideTaskExtraInfoNames": False, - } - plugin_info = { "ArnoldFile": first_file, } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + return job_info, plugin_info def format_vray_output_filename(self, filename, template, dir=False): """Format the expected output file of the Export job. @@ -804,7 +707,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return result - def _patch_workfile(self, file, patches): + def _patch_workfile(self): # type: (str, dict) -> [str, None] """Patch Maya scene. @@ -818,19 +721,25 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "line": "line to insert" } - Args: - file (str): File to patch. - patches (dict): Dictionary defining patches. - - Returns: - str: Patched file path or None - """ - if not patches or os.path.splitext(file)[1].lower() != ".ma": + project_settings = self._instance.context.data["project_settings"] + patches = ( + project_settings.get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "scene_patches", {}) + ) + if not patches: + return + + if not os.path.splitext(self.scene_path)[1].lower() != ".ma": + self.log.debug("Skipping workfile patch since workfile is not " + ".ma file") return compiled_regex = [re.compile(p["regex"]) for p in patches] - with open(file, "r+") as pf: + with open(self.scene_path, "r+") as pf: scene_data = pf.readlines() for ln, line in enumerate(scene_data): for i, r in enumerate(compiled_regex): @@ -839,10 +748,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pf.seek(0) pf.writelines(scene_data) pf.truncate() - self.log.info( - "Applied {} patch to scene.".format( - patches[i]["name"])) - return file + self.log.info("Applied {} patch to scene.".format( + patches[i]["name"] + )) + + def _job_info_label(self, label): + return "{label} {job.Name} [{start}-{end}]".format( + label=label, + job=self.job_info, + start=int(self._instance.data["frameStartHandle"]), + end=int(self._instance.data["frameEndHandle"]), + ) def _format_tiles( From f9bbda244bee373dd3bfb025528923d061808525 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:22:19 +0200 Subject: [PATCH 0804/2550] More explicit PluginInfo name --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 8f12a9518f..87ef4e6db9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -38,7 +38,7 @@ from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s -class DeadlinePluginInfo(): +class MayaPluginInfo: SceneFile = attr.ib(default=None) # Input OutputFilePath = attr.ib(default=None) # Output directory and filename OutputFilePrefix = attr.ib(default=None) @@ -208,7 +208,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - plugin_info = DeadlinePluginInfo( + plugin_info = MayaPluginInfo( SceneFile=self.scene_path, Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], From ecf2a89081f19e14c65b0fd7b1992fe80519e983 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:39:11 +0200 Subject: [PATCH 0805/2550] More temp restructuring --- .../plugins/publish/submit_maya_deadline.py | 52 +++++++------------ 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 87ef4e6db9..a77ccd73d4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -273,9 +273,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "dirname": dirname, } - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) - # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] @@ -326,26 +323,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True - assembly_payload = { - "AuxFiles": [], - "JobInfo": { - "BatchName": payload["JobInfo"]["BatchName"], - "Frames": 1, - "Name": "{} - Tile Assembly Job".format( - payload["JobInfo"]["Name"]), - "OutputDirectory0": - payload["JobInfo"]["OutputDirectory0"].replace( - "\\", "/"), - "Plugin": self.tile_assembler_plugin, - "MachineLimit": 1 - }, - "PluginInfo": { + assembly_job_info = copy.deepcopy(job_info) + assembly_job_info.Plugin = self.tile_assembler_plugin + assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( + job=job_info) + assembly_job_info.Frames = 1 + assembly_job_info.MachineLimit = 1 + assembly_job_info.Priority = instance.data.get("tile_priority", + self.tile_priority) + + assembly_plugin_info = { "CleanupTiles": 1, "ErrorOnMissing": True - } } - assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( - "tile_priority", self.tile_priority) frame_payloads = [] assembly_payloads = [] @@ -414,6 +404,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): file_index = 1 for file in assembly_files: frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_assembly_payload = copy.deepcopy(assembly_payload) new_assembly_payload["JobInfo"]["Name"] = \ "{} (Frame {})".format( @@ -434,7 +425,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - url = "{}/api/jobs".format(self.deadline_url) tiles_count = instance.data.get("tilesX") * instance.data.get( "tilesY") # noqa: E501 @@ -444,9 +434,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_id = response.json()["_id"] hash = response.json()["Props"]["Ex0"] + # Add assembly job dependencies for assembly_job in assembly_payloads: - if assembly_job["JobInfo"]["ExtraInfo0"] == hash: - assembly_job["JobInfo"]["JobDependency0"] = job_id + assembly_job_info = assembly_job["JobInfo"] + if assembly_job_info.ExtraInfo[0] == hash: + assembly_job.JobDependency = job_id for assembly_job in assembly_payloads: file = assembly_job["JobInfo"]["ExtraInfo1"] @@ -461,14 +453,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) ) + config_file_dir = os.path.dirname(config_file) try: - if not os.path.isdir(os.path.dirname(config_file)): - os.makedirs(os.path.dirname(config_file)) + if not os.path.isdir(config_file_dir): + os.makedirs(config_file_dir) except OSError: # directory is not available - self.log.warning( - "Path is unreachable: `{}`".format( - os.path.dirname(config_file))) + self.log.warning("Path is unreachable: " + "`{}`".format(config_file_dir)) # add config file as job auxFile assembly_job["AuxFiles"] = [config_file] @@ -505,10 +497,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): response.json()["_id"]) job_idx += 1 - instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] - self.log.info("Setting batch name on instance: {}".format( - instance.data["jobBatchName"])) - def _get_maya_payload(self, data): job_info = copy.deepcopy(self.job_info) From 6abafd0aca1ca06204f5e5bc11907a0a6a855900 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 00:41:30 +0200 Subject: [PATCH 0806/2550] Refactor tile logic --- .../plugins/publish/submit_maya_deadline.py | 120 ++++++++---------- 1 file changed, 55 insertions(+), 65 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index a77ccd73d4..920adf7e4a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -314,11 +314,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # if we have sequence of files, we need to create tile job for # every frame - job_info.TileJob = True job_info.TileJobTilesInX = instance.data.get("tilesX") job_info.TileJobTilesInY = instance.data.get("tilesY") + tiles_count = job_info.TileJobTilesInX * job_info.TileJobTilesInY + plugin_info["ImageHeight"] = instance.data.get("resolutionHeight") plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True @@ -334,7 +335,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): assembly_plugin_info = { "CleanupTiles": 1, - "ErrorOnMissing": True + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] } frame_payloads = [] @@ -367,81 +369,69 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): file_index = 1 for file in files: frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_payload = copy.deepcopy(payload) - new_payload["JobInfo"]["Name"] = \ - "{} (Frame {} - {} tiles)".format( + + new_job_info = copy.deepcopy(job_info) + new_job_info.Name = "{} (Frame {} - {} tiles)".format( payload["JobInfo"]["Name"], frame, instance.data.get("tilesX") * instance.data.get("tilesY") - # noqa: E501 - ) - self.log.info( - "... preparing job {}".format( - new_payload["JobInfo"]["Name"])) - new_payload["JobInfo"]["TileJobFrame"] = frame + ) + new_job_info.TileJobFrame = frame - tiles_data = _format_tiles( + new_plugin_info = copy.deepcopy(plugin_info) + + # Add tile data into job info and plugin info + tiles_out, _ = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), payload["PluginInfo"]["OutputFilePrefix"] - )[0] - new_payload["JobInfo"].update(tiles_data["JobInfo"]) - new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + ) + new_job_info.update(tiles_out["JobInfo"]) + new_plugin_info.update(tiles_out["PluginInfo"]) self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( ("{}_{}".format(file_index, file)).encode("utf-8")) frame_jobs[frame] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo1"] = file - frame_payloads.append(new_payload) - file_index += 1 + new_job_info.ExtraInfo[0] = job_hash.hexdigest() + new_job_info.ExtraInfo[1] = file - file_index = 1 - for file in assembly_files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - - new_assembly_payload = copy.deepcopy(assembly_payload) - new_assembly_payload["JobInfo"]["Name"] = \ - "{} (Frame {})".format( - assembly_payload["JobInfo"]["Name"], - frame) - new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( - REPL_FRAME_NUMBER, - "\\1{}\\3".format("#" * len(frame)), file) - - new_assembly_payload["PluginInfo"]["Renderer"] = \ - self._instance.data["renderer"] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[ - frame] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo1"] = file - assembly_payloads.append(new_assembly_payload) + frame_payloads.append(self.assemble_payload( + job_info=new_job_info, + plugin_info=new_plugin_info + )) file_index += 1 self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - tiles_count = instance.data.get("tilesX") * instance.data.get( - "tilesY") # noqa: E501 - - for tile_job in frame_payloads: - response = self.submit(tile_job) - + frame_tile_job_id = {} + for tile_job_payload in frame_payloads: + response = self.submit(tile_job_payload) job_id = response.json()["_id"] - hash = response.json()["Props"]["Ex0"] + frame_tile_job_id[frame] = job_id - # Add assembly job dependencies - for assembly_job in assembly_payloads: - assembly_job_info = assembly_job["JobInfo"] - if assembly_job_info.ExtraInfo[0] == hash: - assembly_job.JobDependency = job_id + assembly_jobs = [] + for i, file in enumerate(assembly_files): + frame = re.search(R_FRAME_NUMBER, file).group("frame") + + frame_assembly_job_info = copy.deepcopy(assembly_job_info) + frame_assembly_job_info.Name += " (Frame {})".format(frame) + frame_assembly_job_info.OutputFilename[0] = re.sub( + REPL_FRAME_NUMBER, + "\\1{}\\3".format("#" * len(frame)), file) + + hash = frame_jobs[frame] + tile_job_id = frame_tile_job_id[frame] + + frame_assembly_job_info.ExtraInfo[0] = hash + frame_assembly_job_info.ExtraInfo[1] = file + frame_assembly_job_info.JobDependency = tile_job_id - for assembly_job in assembly_payloads: - file = assembly_job["JobInfo"]["ExtraInfo1"] # write assembly job config files now = datetime.now() @@ -462,9 +452,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.warning("Path is unreachable: " "`{}`".format(config_file_dir)) - # add config file as job auxFile - assembly_job["AuxFiles"] = [config_file] - with open(config_file, "w") as cf: print("TileCount={}".format(tiles_count), file=cf) print("ImageFileName={}".format(file), file=cf) @@ -485,17 +472,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for k, v in tiles.items(): print("{}={}".format(k, v), file=cf) - job_idx = 1 - instance.data["assemblySubmissionJobs"] = [] - for ass_job in assembly_payloads: - self.log.info("submitting assembly job {} of {}".format( - job_idx, len(assembly_payloads) - )) - response = self.submit(ass_job) + payload = self.assemble_payload( + job_info=frame_assembly_job_info, + plugin_info=assembly_plugin_info.copy(), + # add config file as job auxFile + aux_files=[config_file] + ) - instance.data["assemblySubmissionJobs"].append( - response.json()["_id"]) - job_idx += 1 + self.log.info("submitting assembly job {} of {}".format( + i+1, len(assembly_payloads) + )) + response = self.submit(payload) + assembly_jobs.append(response.json()["_id"]) + + instance.data["assemblySubmissionJobs"] = assembly_jobs def _get_maya_payload(self, data): From a6002de641e1ad500192be433f620f17680ea056 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 00:58:23 +0200 Subject: [PATCH 0807/2550] Refactor _format_tiles for readability --- .../plugins/publish/submit_maya_deadline.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 920adf7e4a..00d8eb7859 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -800,23 +800,23 @@ def _format_tiles( tiles_x, tiles_y ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) + + # Job Info new_filename = "{}/{}{}".format( os.path.dirname(filename), tile_prefix, os.path.basename(filename) ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(tile)] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + out["JobInfo"]["OutputFilename{}Tile{}".format(index, tile)] = new_filename # noqa + # Plugin Info + out["PluginInfo"]["RegionPrefix{}".format(tile)] = "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) # noqa: E501 out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename @@ -828,4 +828,5 @@ def _format_tiles( cfg["Tile{}Height".format(tile)] = h_space tile += 1 + return out, cfg From a9fe806fec1a5e5ecf98327cfa4845b8b6d3edc0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:02:24 +0200 Subject: [PATCH 0808/2550] Calculate once --- .../plugins/publish/submit_maya_deadline.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 00d8eb7859..d0348119dc 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -800,6 +800,10 @@ def _format_tiles( tiles_x, tiles_y ) + top = int(height) - (tile_y * h_space) + bottom = int(height) - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 # Job Info new_filename = "{}/{}{}".format( @@ -811,19 +815,17 @@ def _format_tiles( # Plugin Info out["PluginInfo"]["RegionPrefix{}".format(tile)] = "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) # noqa: E501 - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + out["PluginInfo"]["RegionTop{}".format(tile)] = top + out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom + out["PluginInfo"]["RegionLeft{}".format(tile)] = left + out["PluginInfo"]["RegionRight{}".format(tile)] = right # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) - + cfg["Tile{}X".format(tile)] = left + cfg["Tile{}Y".format(tile)] = top cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From d7c72f97b30f85aca15b4a8148c140595f0b2a3a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:10:19 +0200 Subject: [PATCH 0809/2550] Batch submit assembly jobs --- .../plugins/publish/submit_maya_deadline.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index d0348119dc..265c0f79ec 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -339,7 +339,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Renderer": self._instance.data["renderer"] } - frame_payloads = [] assembly_payloads = [] R_FRAME_NUMBER = re.compile( @@ -358,14 +357,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): itertools.chain.from_iterable( [f for _, f in exp[0].items()])) if not files: - # if beauty doesn't exists, use first aov we found + # if beauty doesn't exist, use first aov we found files = exp[0].get(list(exp[0].keys())[0]) else: files = exp assembly_files = files + # Define frame tile jobs frame_jobs = {} - + frame_payloads = {} file_index = 1 for file in files: frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -400,22 +400,24 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): new_job_info.ExtraInfo[0] = job_hash.hexdigest() new_job_info.ExtraInfo[1] = file - frame_payloads.append(self.assemble_payload( + frame_payloads[frame] = self.assemble_payload( job_info=new_job_info, plugin_info=new_plugin_info - )) + ) file_index += 1 self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) + # Submit frame tile jobs frame_tile_job_id = {} - for tile_job_payload in frame_payloads: + for frame, tile_job_payload in frame_payloads.items(): response = self.submit(tile_job_payload) job_id = response.json()["_id"] frame_tile_job_id[frame] = job_id - assembly_jobs = [] + # Define assembly payloads + assembly_payloads = [] for i, file in enumerate(assembly_files): frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -478,14 +480,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # add config file as job auxFile aux_files=[config_file] ) + assembly_payloads.append(payload) + # Submit assembly jobs + assembly_job_ids = [] + for i, payload in enumerate(assembly_payloads): self.log.info("submitting assembly job {} of {}".format( i+1, len(assembly_payloads) )) response = self.submit(payload) - assembly_jobs.append(response.json()["_id"]) + assembly_job_ids.append(response.json()["_id"]) - instance.data["assemblySubmissionJobs"] = assembly_jobs + instance.data["assemblySubmissionJobs"] = assembly_job_ids def _get_maya_payload(self, data): From 965522585b98e441907480caee57af5dad92c2d2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:11:12 +0200 Subject: [PATCH 0810/2550] Remove redundant docstring --- .../deadline/plugins/publish/submit_maya_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 265c0f79ec..cd9f426977 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -51,16 +51,6 @@ class MayaPluginInfo: class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): - """Submit available render layers to Deadline. - - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". - - Attributes: - use_published (bool): Use published scene to render instead of the - one in work area. - - """ label = "Submit Render to Deadline" hosts = ["maya"] From 8af88e115723e6abc73fff279773043e4a520326 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:08:44 +0200 Subject: [PATCH 0811/2550] More cleanup --- .../plugins/publish/submit_maya_deadline.py | 98 +++++++------------ 1 file changed, 38 insertions(+), 60 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index cd9f426977..95140a082f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -86,8 +86,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.Name = "%s - %s" % (src_filename, instance.name) job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") - job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) + job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # Deadline requires integers in frame range frames = "{start}-{end}x{step}".format( @@ -134,25 +133,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO: Taken from old publish class - test whether still needed - environment["OPENPYPE_LOG_NO_COLORS"] = "1" # to recognize job from PYPE for turning Event On/Off environment["OPENPYPE_RENDER_JOB"] = "1" + environment["OPENPYPE_LOG_NO_COLORS"] = "1" - for key in keys: - val = environment.get(key) - if val: - job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val - ) - # to recognize job from PYPE for turning Event On/Off - job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" - job_info.EnvironmentKeyValue = "OPENPYPE_LOG_NO_COLORS=1" + for key, value in environment.items(): + if not value: + continue + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, + value=value) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - for i, filepath in enumerate(instance.data["files"]): + # Enable double-click to preview rendered frames from Deadline Monitor + for filepath in instance.data["files"]: dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) job_info.OutputDirectory = dirname.replace("\\", "/") @@ -241,25 +233,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): filename = os.path.basename(filepath) dirname = os.path.join(workspace, default_render_file) - # this is needed because renderman handles directory and file - # prefixes separately - if self._instance.data["renderer"] == "renderman": - dirname = os.path.dirname(output_filename_0) - - # Create render folder ---------------------------------------------- - try: - # Ensure render folder exists - os.makedirs(dirname) - except OSError: - pass - # Fill in common data to payload ------------------------------------ # TODO: Replace these with collected data from CollectRender payload_data = { "filename": filename, - "filepath": filepath, - "output_filename_0": output_filename_0, - "renderlayer": renderlayer, "dirname": dirname, } @@ -299,8 +276,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): def _tile_render(self, instance, payload): # As collected by super process() - job_info = self.job_info - plugin_info = self.pluginInfo + job_info = copy.deepcopy(self.job_info) + plugin_info = copy.deepcopy(self.plugin_info) # if we have sequence of files, we need to create tile job for # every frame @@ -314,23 +291,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True - assembly_job_info = copy.deepcopy(job_info) - assembly_job_info.Plugin = self.tile_assembler_plugin - assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( - job=job_info) - assembly_job_info.Frames = 1 - assembly_job_info.MachineLimit = 1 - assembly_job_info.Priority = instance.data.get("tile_priority", - self.tile_priority) - - assembly_plugin_info = { - "CleanupTiles": 1, - "ErrorOnMissing": True, - "Renderer": self._instance.data["renderer"] - } - - assembly_payloads = [] - R_FRAME_NUMBER = re.compile( r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 REPL_FRAME_NUMBER = re.compile( @@ -407,7 +367,23 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): frame_tile_job_id[frame] = job_id # Define assembly payloads + assembly_job_info = copy.deepcopy(job_info) + assembly_job_info.Plugin = self.tile_assembler_plugin + assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( + job=job_info) + assembly_job_info.Frames = 1 + assembly_job_info.MachineLimit = 1 + assembly_job_info.Priority = instance.data.get("tile_priority", + self.tile_priority) + + assembly_plugin_info = { + "CleanupTiles": 1, + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] + } + assembly_payloads = [] + output_dir = self.job_info.OutputDirectory[0] for i, file in enumerate(assembly_files): frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -427,22 +403,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # write assembly job config files now = datetime.now() - config_file = os.path.join( - os.path.dirname(output_filename_0), + config_file = os.path.join(output_dir, "{}_config_{}.txt".format( os.path.splitext(file)[0], now.strftime("%Y_%m_%d_%H_%M_%S") ) ) - - config_file_dir = os.path.dirname(config_file) try: - if not os.path.isdir(config_file_dir): - os.makedirs(config_file_dir) + if not os.path.isdir(output_dir): + os.makedirs(output_dir) except OSError: # directory is not available self.log.warning("Path is unreachable: " - "`{}`".format(config_file_dir)) + "`{}`".format(output_dir)) with open(config_file, "w") as cf: print("TileCount={}".format(tiles_count), file=cf) @@ -567,17 +540,22 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.Plugin = "Python" job_info.Frames = 1 + renderlayer = self._instance.data["setMembers"] + # add required env vars for the export script envs = { "AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"), - "OPENPYPE_ASS_EXPORT_RENDER_LAYER": data["renderlayer"], + "OPENPYPE_ASS_EXPORT_RENDER_LAYER": renderlayer, "OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path, - "OPENPYPE_ASS_EXPORT_OUTPUT": payload['JobInfo']['OutputFilename0'], # noqa + "OPENPYPE_ASS_EXPORT_OUTPUT": job_info.OutputFilename[0], "OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa "OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa "OPENPYPE_ASS_EXPORT_STEP": 1 } for key, value in envs.items(): + if not value: + continue + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, value=value) From e8aa926cb7d338427ce7ba558f8ffa1609fde8ef Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:28:42 +0200 Subject: [PATCH 0812/2550] Move single use of in-line function to the class for readability --- .../deadline/abstract_submit_deadline.py | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index f56cf49f6d..a3db3feac9 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -544,26 +544,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): """ - def _get_workfile_instance(context): - """Find workfile instance in context""" - for i in context: - - is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" - ) - if not is_workfile: - continue - - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - - return i - instance = self._instance - workfile_instance = _get_workfile_instance(instance.context) + workfile_instance = self._get_workfile_instance(instance.context) if not workfile_instance: return @@ -689,3 +671,22 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self._instance.data["deadlineSubmissionJob"] = result return result["_id"] + + @staticmethod + def _get_workfile_instance(context): + """Find workfile instance in context""" + for i in context: + + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) + if not is_workfile: + continue + + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + + return i From c6a0a199e1810e5c63c484d5871d34d14594e5be Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:40:08 +0200 Subject: [PATCH 0813/2550] Cosmetics --- .../deadline/plugins/publish/submit_maya_deadline.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 95140a082f..873005e051 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -267,15 +267,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if instance.data.get("tileRendering"): # Prepare tiles data - self._tile_render(instance, payload) + self._tile_render(payload) else: # Submit main render job job_info, plugin_info = payload self.submit(self.assemble_payload(job_info, plugin_info)) - def _tile_render(self, instance, payload): + def _tile_render(self, payload): # As collected by super process() + instance = self._instance job_info = copy.deepcopy(self.job_info) plugin_info = copy.deepcopy(self.plugin_info) @@ -321,11 +322,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): frame = re.search(R_FRAME_NUMBER, file).group("frame") new_job_info = copy.deepcopy(job_info) - new_job_info.Name = "{} (Frame {} - {} tiles)".format( - payload["JobInfo"]["Name"], - frame, - instance.data.get("tilesX") * instance.data.get("tilesY") - ) + new_job_info.Name += " (Frame {} - {} tiles)".format(frame, + tiles_count) new_job_info.TileJobFrame = frame new_plugin_info = copy.deepcopy(plugin_info) From e429e2ec41d23c097b301c31485a38aea634d6a3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 10:19:38 +0200 Subject: [PATCH 0814/2550] Remove json dump since `renderProducts` are not serializable --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 768a53329f..14aac2f206 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -360,7 +360,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): instance.data["label"] = label instance.data["farm"] = True instance.data.update(data) - self.log.debug("data: {}".format(json.dumps(data, indent=4))) def parse_options(self, render_globals): """Get all overrides with a value, skip those without. From 40a4262916043b364dbd697b30713a1154692da3 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Tue, 30 Aug 2022 08:20:59 +0000 Subject: [PATCH 0815/2550] [Automated] Bump version --- CHANGELOG.md | 18 +++++++----------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 9 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a8e962085..0a7d93711a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.14.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.1-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD) @@ -12,33 +12,37 @@ **🆕 New features** - Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) +- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662) **🚀 Enhancements** +- General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) - Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) - Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) -- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) - Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) **🐛 Bug fixes** +- Maya: Fix typo in getPanel argument `with\_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753) +- General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) - General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) +- Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) - Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) - Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) - Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) - Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) - Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) **🔀 Refactored code** +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) @@ -105,10 +109,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) -**🆕 New features** - -- Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) - **🚀 Enhancements** - Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) @@ -123,14 +123,10 @@ - General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) - Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) - Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) -- AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) -- Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) -- TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) **🔀 Refactored code** - General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) -- General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) **Merged pull requests:** diff --git a/openpype/version.py b/openpype/version.py index 7894bb8bf4..a6ebacb910 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.1-nightly.3" +__version__ = "3.14.1-nightly.4" diff --git a/pyproject.toml b/pyproject.toml index 75e4721d7f..a2954f9c9c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.1-nightly.3" # OpenPype +version = "3.14.1-nightly.4" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 3a425fcf386ae96ead5a8b98744deb2aa90287ab Mon Sep 17 00:00:00 2001 From: OpenPype Date: Tue, 30 Aug 2022 08:33:14 +0000 Subject: [PATCH 0816/2550] [Automated] Release --- CHANGELOG.md | 4 ++-- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a7d93711a..cee0183273 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.14.1-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...3.14.1) ### 📖 Documentation diff --git a/openpype/version.py b/openpype/version.py index a6ebacb910..963f9171e2 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.1-nightly.4" +__version__ = "3.14.1" diff --git a/pyproject.toml b/pyproject.toml index a2954f9c9c..2fe2573baf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.1-nightly.4" # OpenPype +version = "3.14.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 9472cbe271af4be7c0328bf05200f44c521c58ae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 11:23:14 +0200 Subject: [PATCH 0817/2550] Fix submission --- .../collect_deadline_server_from_instance.py | 2 +- .../plugins/publish/submit_maya_deadline.py | 71 +++++++++---------- 2 files changed, 33 insertions(+), 40 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index a7035cd99f..9981bead3e 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -13,7 +13,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.415 label = "Deadline Webservice from the Instance" - families = ["rendering"] + families = ["rendering", "renderlayer"] def process(self, instance): instance.data["deadlineUrl"] = self._collect_deadline_url(instance) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 873005e051..2afa1883c4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -27,7 +27,6 @@ import itertools from collections import OrderedDict import attr -import clique from maya import cmds @@ -111,7 +110,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - for key, value in render_globals: + for key, value in render_globals.items(): setattr(job_info, key, value) keys = [ @@ -143,13 +142,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, value=value) - # Enable double-click to preview rendered frames from Deadline Monitor - for filepath in instance.data["files"]: - dirname = os.path.dirname(filepath) - fname = os.path.basename(filepath) - job_info.OutputDirectory = dirname.replace("\\", "/") - job_info.OutputFilename = fname - # Adding file dependencies. if self.asset_dependencies: dependencies = instance.context.data["fileDependencies"] @@ -160,28 +152,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add list of expected files to job # --------------------------------- exp = instance.data.get("expectedFiles") - - def _get_output_filename(files): - col, rem = clique.assemble(files) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ( - "Found multiple non related files " - "to render, don't know what to do " - "with them.") - return rem[0] - else: - return col[0].format('{head}{padding}{tail}') - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - for _aov, files in exp[0].items(): - output_file = _get_output_filename(files) - job_info.OutputFilename = output_file - else: - output_file = _get_output_filename(exp) - job_info.OutputFilename = output_file + for filepath in self._iter_expected_files(exp): + job_info.OutputDirectory = os.path.dirname(filepath) + job_info.OutputFilename = os.path.basename(filepath) return job_info @@ -194,6 +167,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): SceneFile=self.scene_path, Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], + Renderer=instance.data["renderer"], RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights"), # noqa ProjectPath=context.data["workspaceDir"], UsingRenderLayers=True, @@ -216,7 +190,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # TODO: Avoid the need for this logic here, needed for submit publish # Store output dir for unified publisher (filesequence) - output_dir = os.path.dirname(instance.data["files"][0]) + expected_files = instance.data["expectedFiles"] + first_file = next(self._iter_expected_files(expected_files)) + output_dir = os.path.dirname(first_file) instance.data["outputDir"] = output_dir instance.data["toBeRenderedOn"] = "deadline" @@ -247,17 +223,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Vray Scene and Ass Scene options are mutually exclusive") if "vrayscene" in instance.data["families"]: + self.log.debug("Submitting V-Ray scene render..") vray_export_payload = self._get_vray_export_payload(payload_data) export_job = self.submit(vray_export_payload) payload = self._get_vray_render_payload(payload_data) elif "assscene" in instance.data["families"]: + self.log.debug("Submitting Arnold .ass standalone render..") ass_export_payload = self._get_arnold_export_payload(payload_data) export_job = self.submit(ass_export_payload) payload = self._get_arnold_render_payload(payload_data) else: + self.log.debug("Submitting MayaBatch render..") payload = self._get_maya_payload(payload_data) # Add export job as dependency -------------------------------------- @@ -274,6 +253,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.submit(self.assemble_payload(job_info, plugin_info)) def _tile_render(self, payload): + """Submit as tile render per frame with dependent assembly jobs.""" # As collected by super process() instance = self._instance @@ -315,7 +295,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): assembly_files = files # Define frame tile jobs - frame_jobs = {} + frame_file_hash = {} frame_payloads = {} file_index = 1 for file in files: @@ -343,9 +323,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( ("{}_{}".format(file_index, file)).encode("utf-8")) - frame_jobs[frame] = job_hash.hexdigest() - new_job_info.ExtraInfo[0] = job_hash.hexdigest() + file_hash = job_hash.hexdigest() + frame_file_hash[frame] = file_hash + + new_job_info.ExtraInfo[0] = file_hash new_job_info.ExtraInfo[1] = file frame_payloads[frame] = self.assemble_payload( @@ -391,10 +373,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): REPL_FRAME_NUMBER, "\\1{}\\3".format("#" * len(frame)), file) - hash = frame_jobs[frame] + file_hash = frame_file_hash[frame] tile_job_id = frame_tile_job_id[frame] - frame_assembly_job_info.ExtraInfo[0] = hash + frame_assembly_job_info.ExtraInfo[0] = file_hash frame_assembly_job_info.ExtraInfo[1] = file frame_assembly_job_info.JobDependency = tile_job_id @@ -483,11 +465,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if int(rman_version.split(".")[0]) > 22: renderer = "renderman22" - plugin_info = { + plugin_info = copy.deepcopy(self.plugin_info) + plugin_info.update({ # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), "OutputFilePrefix": layer_prefix, - } + }) return job_info, plugin_info @@ -710,6 +693,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): end=int(self._instance.data["frameEndHandle"]), ) + @staticmethod + def _iter_expected_files(exp): + if isinstance(exp[0], dict): + for _aov, files in exp[0].items(): + for file in files: + yield file + else: + for file in exp: + yield file + def _format_tiles( filename, index, tiles_x, tiles_y, From fb0ec16bf3ff959a6f5471d03b788d2669ca9864 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 30 Aug 2022 11:54:50 +0200 Subject: [PATCH 0818/2550] fix return value of 'get_instance_staging_dir' --- openpype/pipeline/publish/lib.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 4f06f0e6fd..bb0bd7bd4b 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -443,8 +443,9 @@ def get_instance_staging_dir(instance): staging_dir = instance.data.get("stagingDir") if not staging_dir: - instance.data["stagingDir"] = os.path.normpath( + staging_dir = os.path.normpath( tempfile.mkdtemp(prefix="pyblish_tmp_") ) + instance.data["stagingDir"] = staging_dir return staging_dir From 4cdd23021b4b251c13ae6139b06dac951d9ba6e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 13:17:18 +0200 Subject: [PATCH 0819/2550] :bug: fix version resolution --- igniter/bootstrap_repos.py | 2 +- tools/create_zip.ps1 | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index c5003b062e..ccc9d4ac52 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -63,7 +63,7 @@ class OpenPypeVersion(semver.VersionInfo): """ staging = False path = None - _VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") # noqa: E501 + _VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?") # noqa: E501 _installed_version = None def __init__(self, *args, **kwargs): diff --git a/tools/create_zip.ps1 b/tools/create_zip.ps1 index 7b852b7c54..e5ebc7678b 100644 --- a/tools/create_zip.ps1 +++ b/tools/create_zip.ps1 @@ -96,6 +96,7 @@ Write-Color -Text ">>> ", "Cleaning cache files ... " -Color Green, Gray -NoNewl Get-ChildItem $openpype_root -Filter "__pycache__" -Force -Recurse| Where-Object {( $_.FullName -inotmatch '\\build\\' ) -and ( $_.FullName -inotmatch '\\.venv' )} | Remove-Item -Force -Recurse Get-ChildItem $openpype_root -Filter "*.pyc" -Force -Recurse | Where-Object {( $_.FullName -inotmatch '\\build\\' ) -and ( $_.FullName -inotmatch '\\.venv' )} | Remove-Item -Force Get-ChildItem $openpype_root -Filter "*.pyo" -Force -Recurse | Where-Object {( $_.FullName -inotmatch '\\build\\' ) -and ( $_.FullName -inotmatch '\\.venv' )} | Remove-Item -Force +Write-Color -Text "OK" -Color Green Write-Color -Text ">>> ", "Generating zip from current sources ..." -Color Green, Gray $env:PYTHONPATH="$($openpype_root);$($env:PYTHONPATH)" From e23ed382f8149648c404f15e18ea567bdceebfb0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 30 Aug 2022 13:23:07 +0200 Subject: [PATCH 0820/2550] added python 3.9 startup script for houdini --- .../hosts/houdini/startup/python3.9libs/pythonrc.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 openpype/hosts/houdini/startup/python3.9libs/pythonrc.py diff --git a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py new file mode 100644 index 0000000000..afadbffd3e --- /dev/null +++ b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py @@ -0,0 +1,10 @@ +from openpype.pipeline import install_host +from openpype.hosts.houdini import api + + +def main(): + print("Installing OpenPype ...") + install_host(api) + + +main() From 90b474b365385804cd25ad3d0a1db8e281552292 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 30 Aug 2022 13:39:44 +0200 Subject: [PATCH 0821/2550] removed OpenColorIO submodule --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 57d55a4091392b9da815850a5e72c84680b81bf4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 30 Aug 2022 13:49:24 +0200 Subject: [PATCH 0822/2550] remove unused variable --- openpype/pipeline/publish/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index bb0bd7bd4b..e7a3ef73fe 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -418,7 +418,7 @@ def context_plugin_should_run(plugin, context): bool: Context plugin should run based on valid instances. """ - for instance in filter_instances_for_context_plugin(plugin, context): + for _ in filter_instances_for_context_plugin(plugin, context): return True return False From 227b8405479e3a87b507e63a9b59fe473d7c9276 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:12:54 +0200 Subject: [PATCH 0823/2550] Refactor AbstractSubmitDeadline vars to allow easier access to indices --- .../deadline/abstract_submit_deadline.py | 251 +++++++----------- .../plugins/publish/submit_maya_deadline.py | 35 ++- 2 files changed, 115 insertions(+), 171 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index a3db3feac9..427faec115 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -9,6 +9,7 @@ import os from abc import abstractmethod import platform import getpass +from functools import partial from collections import OrderedDict import six @@ -66,6 +67,58 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) +class DeadlineIndexedVar(dict): + """ + + Allows to set and query values by integer indices: + Query: var[1] or var.get(1) + Set: var[1] = "my_value" + Append: var += "value" + + Note: Iterating the instance is not guarantueed to be the order of the + indices. To do so iterate with `sorted()` + + """ + def __init__(self, key): + self.__key = key + + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): value for index, value in sorted(self.items()) + } + + def update(self, data): + # Force the integer key check + for key, value in data.items(): + self.__setitem__(key, value) + + def __iadd__(self, other): + index = self.next_available_index() + self[index] = other + return self + + def __setitem__(self, key, value): + if not isinstance(key, int): + raise TypeError("Key must be an integer: {}".format(key)) + + if key < 0: + raise ValueError("Negative index can't be set: {}".format(key)) + dict.__setitem__(self, key, value) + + @attr.s class DeadlineJobInfo(object): """Mapping of all Deadline *JobInfo* attributes. @@ -218,24 +271,8 @@ class DeadlineJobInfo(object): # Environment # ---------------------------------------------- - _environmentKeyValue = attr.ib(factory=list) - - @property - def EnvironmentKeyValue(self): # noqa: N802 - """Return all environment key values formatted for Deadline. - - Returns: - dict: as `{'EnvironmentKeyValue0', 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._environmentKeyValue): - out["EnvironmentKeyValue{}".format(index)] = v - return out - - @EnvironmentKeyValue.setter - def EnvironmentKeyValue(self, val): # noqa: N802 - self._environmentKeyValue.append(val) + EnvironmentKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + "EnvironmentKeyValue")) IncludeEnvironment = attr.ib(default=None) # Default: false UseJobEnvironmentOnly = attr.ib(default=None) # Default: false @@ -243,142 +280,29 @@ class DeadlineJobInfo(object): # Job Extra Info # ---------------------------------------------- - _extraInfos = attr.ib(factory=list) - _extraInfoKeyValues = attr.ib(factory=list) - - @property - def ExtraInfo(self): # noqa: N802 - """Return all ExtraInfo values formatted for Deadline. - - Returns: - dict: as `{'ExtraInfo0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfos): - out["ExtraInfo{}".format(index)] = v - return out - - @ExtraInfo.setter - def ExtraInfo(self, val): # noqa: N802 - self._extraInfos.append(val) - - @property - def ExtraInfoKeyValue(self): # noqa: N802 - """Return all ExtraInfoKeyValue values formatted for Deadline. - - Returns: - dict: as {'ExtraInfoKeyValue0': 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfoKeyValues): - out["ExtraInfoKeyValue{}".format(index)] = v - return out - - @ExtraInfoKeyValue.setter - def ExtraInfoKeyValue(self, val): # noqa: N802 - self._extraInfoKeyValues.append(val) + ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) + ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + "ExtraInfoKeyValue")) # Task Extra Info Names # ---------------------------------------------- OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false - _taskExtraInfos = attr.ib(factory=list) - - @property - def TaskExtraInfoName(self): # noqa: N802 - """Return all TaskExtraInfoName values formatted for Deadline. - - Returns: - dict: as `{'TaskExtraInfoName0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._taskExtraInfos): - out["TaskExtraInfoName{}".format(index)] = v - return out - - @TaskExtraInfoName.setter - def TaskExtraInfoName(self, val): # noqa: N802 - self._taskExtraInfos.append(val) + TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar, + "TaskExtraInfoName")) # Output # ---------------------------------------------- - _outputFilename = attr.ib(factory=list) - _outputFilenameTile = attr.ib(factory=list) - _outputDirectory = attr.ib(factory=list) - - @property - def OutputFilename(self): # noqa: N802 - """Return all OutputFilename values formatted for Deadline. - - Returns: - dict: as `{'OutputFilename0': 'filename'}` - - """ - out = {} - for index, v in enumerate(self._outputFilename): - out["OutputFilename{}".format(index)] = v - return out - - @OutputFilename.setter - def OutputFilename(self, val): # noqa: N802 - self._outputFilename.append(val) - - @property - def OutputFilenameTile(self): # noqa: N802 - """Return all OutputFilename#Tile values formatted for Deadline. - - Returns: - dict: as `{'OutputFilenme#Tile': 'tile'}` - - """ - out = {} - for index, v in enumerate(self._outputFilenameTile): - out["OutputFilename{}Tile".format(index)] = v - return out - - @OutputFilenameTile.setter - def OutputFilenameTile(self, val): # noqa: N802 - self._outputFilenameTile.append(val) - - @property - def OutputDirectory(self): # noqa: N802 - """Return all OutputDirectory values formatted for Deadline. - - Returns: - dict: as `{'OutputDirectory0': 'dir'}` - - """ - out = {} - for index, v in enumerate(self._outputDirectory): - out["OutputDirectory{}".format(index)] = v - return out - - @OutputDirectory.setter - def OutputDirectory(self, val): # noqa: N802 - self._outputDirectory.append(val) + OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename")) + OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename{}Tile")) + OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputDirectory")) # Asset Dependency # ---------------------------------------------- - _assetDependency = attr.ib(factory=list) - - @property - def AssetDependency(self): # noqa: N802 - """Return all OutputDirectory values formatted for Deadline. - - Returns: - dict: as `{'OutputDirectory0': 'dir'}` - - """ - out = {} - for index, v in enumerate(self._assetDependency): - out["AssetDependency{}".format(index)] = v - return out - - @OutputDirectory.setter - def AssetDependency(self, val): # noqa: N802 - self._assetDependency.append(val) + AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar, + "AssetDependency")) # Tile Job # ---------------------------------------------- @@ -402,7 +326,7 @@ class DeadlineJobInfo(object): """ def filter_data(a, v): - if a.name.startswith("_"): + if isinstance(v, DeadlineIndexedVar): return False if v is None: return False @@ -410,16 +334,37 @@ class DeadlineJobInfo(object): serialized = attr.asdict( self, dict_factory=OrderedDict, filter=filter_data) - serialized.update(self.EnvironmentKeyValue) - serialized.update(self.ExtraInfo) - serialized.update(self.ExtraInfoKeyValue) - serialized.update(self.TaskExtraInfoName) - serialized.update(self.OutputFilename) - serialized.update(self.OutputFilenameTile) - serialized.update(self.OutputDirectory) - serialized.update(self.AssetDependency) + + # Custom serialize these attributes + for attribute in [ + self.EnvironmentKeyValue, + self.ExtraInfo, + self.ExtraInfoKeyValue, + self.TaskExtraInfoName, + self.OutputFilename, + self.OutputFilenameTile, + self.OutputDirectory, + self.AssetDependency + ]: + serialized.update(attribute.serialize()) + return serialized + def update(self, data): + """Update instance with data dict""" + for key, value in data.items(): + setattr(self, key, value) + + def __setattr__(self, key, value): + # Backwards compatibility: Allow appending to index vars by setting + # it on Job Info directly like: JobInfo.OutputFilename = filename + existing = getattr(self, key, None) + if isinstance(existing, DeadlineIndexedVar): + existing += value + return + + object.__setattr__(self, key, value) + @six.add_metaclass(AbstractMetaInstancePlugin) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 2afa1883c4..d979c92814 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -110,8 +110,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - for key, value in render_globals.items(): - setattr(job_info, key, value) + job_info.update(render_globals) keys = [ "FTRACK_API_KEY", @@ -257,8 +256,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # As collected by super process() instance = self._instance - job_info = copy.deepcopy(self.job_info) - plugin_info = copy.deepcopy(self.plugin_info) + + payload_job_info, payload_plugin_info = payload + job_info = copy.deepcopy(payload_job_info) + plugin_info = copy.deepcopy(payload_plugin_info) # if we have sequence of files, we need to create tile job for # every frame @@ -309,16 +310,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): new_plugin_info = copy.deepcopy(plugin_info) # Add tile data into job info and plugin info - tiles_out, _ = _format_tiles( + tiles_data = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - ) - new_job_info.update(tiles_out["JobInfo"]) - new_plugin_info.update(tiles_out["PluginInfo"]) + payload_plugin_info["OutputFilePrefix"] + )[0] + + new_job_info.update(tiles_data["JobInfo"]) + new_plugin_info.update(tiles_data["PluginInfo"]) self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( @@ -342,15 +344,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Submit frame tile jobs frame_tile_job_id = {} for frame, tile_job_payload in frame_payloads.items(): - response = self.submit(tile_job_payload) - job_id = response.json()["_id"] + job_id = self.submit(tile_job_payload) frame_tile_job_id[frame] = job_id # Define assembly payloads assembly_job_info = copy.deepcopy(job_info) assembly_job_info.Plugin = self.tile_assembler_plugin - assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( - job=job_info) + assembly_job_info.Name += " - Tile Assembly Job" assembly_job_info.Frames = 1 assembly_job_info.MachineLimit = 1 assembly_job_info.Priority = instance.data.get("tile_priority", @@ -411,10 +411,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] + payload_plugin_info["OutputFilePrefix"] )[1] - sorted(tiles) - for k, v in tiles.items(): + for k, v in sorted(tiles.items()): print("{}={}".format(k, v), file=cf) payload = self.assemble_payload( @@ -431,8 +430,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info("submitting assembly job {} of {}".format( i+1, len(assembly_payloads) )) - response = self.submit(payload) - assembly_job_ids.append(response.json()["_id"]) + assembly_job_id = self.submit(payload) + assembly_job_ids.append(assembly_job_id) instance.data["assemblySubmissionJobs"] = assembly_job_ids From a7293f2a4f6a30297eea15297f5b25100e11e9f1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:13:14 +0200 Subject: [PATCH 0824/2550] Fix indentation --- openpype/modules/deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 427faec115..e1bdcb10d9 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -295,7 +295,7 @@ class DeadlineJobInfo(object): OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, "OutputFilename")) OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename{}Tile")) + "OutputFilename{}Tile")) OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, "OutputDirectory")) From 91a3d8494bf8b65fe37560a02018edf59433caa6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:13:39 +0200 Subject: [PATCH 0825/2550] Disable aux files for now since it's not supported by Deadline Webservice --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index d979c92814..7694e80e9a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -419,8 +419,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): payload = self.assemble_payload( job_info=frame_assembly_job_info, plugin_info=assembly_plugin_info.copy(), + # todo: aux file transfers don't work with deadline webservice # add config file as job auxFile - aux_files=[config_file] + # aux_files=[config_file] ) assembly_payloads.append(payload) From 39d216797dacad00a3f42102e146c2806b9f8244 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:17:40 +0200 Subject: [PATCH 0826/2550] Force integer pixel values --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7694e80e9a..3fbff0153b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -765,10 +765,10 @@ def _format_tiles( tiles_x, tiles_y ) - top = int(height) - (tile_y * h_space) - bottom = int(height) - ((tile_y - 1) * h_space) - 1 - left = (tile_x - 1) * w_space - right = (tile_x * w_space) - 1 + top = int(height - (tile_y * h_space)) + bottom = int(height - ((tile_y - 1) * h_space) - 1) + left = int((tile_x - 1) * w_space) + right = int((tile_x * w_space) - 1) # Job Info new_filename = "{}/{}{}".format( From a7190a51ad74823c069c73dbe8f7ec0f4c6daba6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:19:52 +0200 Subject: [PATCH 0827/2550] Force integer pixel values --- .../plugins/publish/submit_maya_deadline.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7966861358..3ac9df07d6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -141,17 +141,21 @@ def _format_tiles( out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + top = int(height) - (tile_y * h_space) + bottom = int(height) - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 + + out["PluginInfo"]["RegionTop{}".format(tile)] = int(top) + out["PluginInfo"]["RegionBottom{}".format(tile)] = int(bottom) + out["PluginInfo"]["RegionLeft{}".format(tile)] = int(left) + out["PluginInfo"]["RegionRight{}".format(tile)] = int(right) cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) + cfg["Tile{}X".format(tile)] = int(left) + cfg["Tile{}Y".format(tile)] = int(top) cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From 2fb7cabca49bae51162e2a82dfdc7d225094ed36 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:39:28 +0200 Subject: [PATCH 0828/2550] Shush hound --- .../deadline/abstract_submit_deadline.py | 4 ++-- .../plugins/publish/submit_maya_deadline.py | 21 +++++++++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index e1bdcb10d9..35b114da95 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -623,8 +623,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): for i in context: is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" ) if not is_workfile: continue diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3fbff0153b..1b69f8b4e9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -357,14 +357,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.tile_priority) assembly_plugin_info = { - "CleanupTiles": 1, - "ErrorOnMissing": True, - "Renderer": self._instance.data["renderer"] + "CleanupTiles": 1, + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] } assembly_payloads = [] output_dir = self.job_info.OutputDirectory[0] - for i, file in enumerate(assembly_files): + for file in assembly_files: frame = re.search(R_FRAME_NUMBER, file).group("frame") frame_assembly_job_info = copy.deepcopy(assembly_job_info) @@ -383,7 +383,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # write assembly job config files now = datetime.now() - config_file = os.path.join(output_dir, + config_file = os.path.join( + output_dir, "{}_config_{}.txt".format( os.path.splitext(file)[0], now.strftime("%Y_%m_%d_%H_%M_%S") @@ -427,10 +428,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Submit assembly jobs assembly_job_ids = [] + num_assemblies = len(assembly_payloads) for i, payload in enumerate(assembly_payloads): - self.log.info("submitting assembly job {} of {}".format( - i+1, len(assembly_payloads) - )) + self.log.info( + "submitting assembly job {} of {}".format(i + 1, + num_assemblies) + ) assembly_job_id = self.submit(payload) assembly_job_ids.append(assembly_job_id) @@ -682,7 +685,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pf.writelines(scene_data) pf.truncate() self.log.info("Applied {} patch to scene.".format( - patches[i]["name"] + patches[i]["name"] )) def _job_info_label(self, label): From e9e01e3163079bc9f6c48fe633aed92592928328 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:43:18 +0200 Subject: [PATCH 0829/2550] Use update method --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 1b69f8b4e9..9692b136e9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -69,8 +69,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # todo: test whether this works for existing production cases # where custom jobInfo was stored in the project settings - for key, value in self.jobInfo.items(): - setattr(job_info, key, value) + job_info.update(self.jobInfo) instance = self._instance context = instance.context From 6067b1effcca66198836b3519c1a2f9b6cd73872 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 16:02:57 +0200 Subject: [PATCH 0830/2550] :minus: delete avalon-core submodule --- repos/avalon-core | 1 - 1 file changed, 1 deletion(-) delete mode 160000 repos/avalon-core diff --git a/repos/avalon-core b/repos/avalon-core deleted file mode 160000 index 2fa14cea6f..0000000000 --- a/repos/avalon-core +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb From f9c214e435a53ecbf8b5f0aba57292511a1e0873 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 17:02:23 +0200 Subject: [PATCH 0831/2550] Only apply `RenderSetupIncludeLights` when value is not None --- .../deadline/plugins/publish/submit_maya_deadline.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3ac9df07d6..92c50c3e80 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -509,7 +509,15 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.payload_skeleton["JobInfo"]["Comment"] = comment self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa + # Only set RenderSetupIncludeLights when not None + rs_include_lights = instance.data.get("renderSetupIncludeLights") + if rs_include_lights is not None: + self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = ( + rs_include_lights + ) + else: + self.payload_skeleton["PluginInfo"].pop("RenderSetupIncludeLights") + # Adding file dependencies. dependencies = instance.context.data["fileDependencies"] dependencies.append(filepath) From 37b2d85aa762ed4908fe29e3a57a11aef8e459cf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 17:02:55 +0200 Subject: [PATCH 0832/2550] Fix indentation (shush hound) --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 92c50c3e80..0a18506bd4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -513,7 +513,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): rs_include_lights = instance.data.get("renderSetupIncludeLights") if rs_include_lights is not None: self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = ( - rs_include_lights + rs_include_lights ) else: self.payload_skeleton["PluginInfo"].pop("RenderSetupIncludeLights") From e07fdf2e9157aa96f8f28bc4945df462eeafd43d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 30 Aug 2022 17:48:46 +0200 Subject: [PATCH 0833/2550] added settings for nuke --- openpype/settings/defaults/project_settings/maya.json | 2 +- openpype/settings/defaults/project_settings/nuke.json | 3 +++ .../entities/schemas/projects_schema/schema_project_nuke.json | 4 ++++ .../schemas/schema_templated_workfile_build.json | 2 +- 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 28f6d23e4d..38063bc2c1 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -980,4 +980,4 @@ "ValidateNoAnimation": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index f40ec1fe9e..c3eda2cbb4 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -325,5 +325,8 @@ } ] }, + "templated_workfile_build": { + "profiles": [] + }, "filters": {} } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 03d67a57ba..7cf82b9e69 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -308,6 +308,10 @@ "type": "schema_template", "name": "template_workfile_options" }, + { + "type": "schema", + "name": "schema_templated_workfile_build" + }, { "type": "schema", "name": "schema_publish_gui_filter" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json index a591facf98..99a29beb27 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -17,7 +17,7 @@ "type": "task-types-enum" }, { - "key": "tasks", + "key": "task_names", "label": "Task names", "type": "list", "object_type": "text" From c6b8b9182587448e9024f2f1f31ec7f16b72135f Mon Sep 17 00:00:00 2001 From: murphy Date: Tue, 30 Aug 2022 18:06:19 +0200 Subject: [PATCH 0834/2550] Ftrack status fix typo prgoress -> progress there is a typo in status name in settings prgoress -> progress --- openpype/settings/defaults/project_settings/ftrack.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 41bed7751b..09b194e21c 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -56,7 +56,7 @@ "Not Ready" ], "__ignore__": [ - "in prgoress", + "in progress", "omitted", "on hold" ] From f2a1a11bec47855f1409b6620c618fa3bd89c550 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:41:57 +0200 Subject: [PATCH 0835/2550] :lipstick: add new publisher menu item --- .../hosts/houdini/startup/MainMenuCommon.xml | 10 ++--- openpype/tools/utils/host_tools.py | 37 +++++++++++++++++++ 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml index abfa3f136e..c08114b71b 100644 --- a/openpype/hosts/houdini/startup/MainMenuCommon.xml +++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml @@ -1,10 +1,10 @@ - + - + - + - + Date: Tue, 30 Aug 2022 18:42:44 +0200 Subject: [PATCH 0836/2550] :fire: remove workio workio integrated into host addon --- openpype/hosts/houdini/api/workio.py | 57 ---------------------------- 1 file changed, 57 deletions(-) delete mode 100644 openpype/hosts/houdini/api/workio.py diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py deleted file mode 100644 index 5f7efff333..0000000000 --- a/openpype/hosts/houdini/api/workio.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Host API required Work Files tool""" -import os - -import hou - - -def file_extensions(): - return [".hip", ".hiplc", ".hipnc"] - - -def has_unsaved_changes(): - return hou.hipFile.hasUnsavedChanges() - - -def save_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.save(file_name=filepath, - save_to_recent_files=True) - - return filepath - - -def open_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.load(filepath, - suppress_save_prompt=True, - ignore_load_warnings=False) - - return filepath - - -def current_file(): - - current_filepath = hou.hipFile.path() - if (os.path.basename(current_filepath) == "untitled.hip" and - not os.path.exists(current_filepath)): - # By default a new scene in houdini is saved in the current - # working directory as "untitled.hip" so we need to capture - # that and consider it 'not saved' when it's in that state. - return None - - return current_filepath - - -def work_root(session): - work_dir = session["AVALON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - return os.path.join(work_dir, scene_dir) - else: - return work_dir From 2f6a6cfc9a2676d3361e4fc11e0e182de2a4057d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:44:15 +0200 Subject: [PATCH 0837/2550] :alien: implement creator methods --- openpype/hosts/houdini/api/plugin.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 64abfe9ef9..fc36284a72 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -3,17 +3,17 @@ import sys import six from abc import ( - ABCMeta, - abstractmethod, - abstractproperty + ABCMeta ) import six import hou from openpype.pipeline import ( CreatorError, LegacyCreator, - Creator as NewCreator + Creator as NewCreator, + CreatedInstance ) +from openpype.hosts.houdini.api import list_instances, remove_instance from .lib import imprint @@ -97,10 +97,17 @@ class HoudiniCreator(NewCreator): _nodes = [] def collect_instances(self): - pass + for instance_data in list_instances(): + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) def update_instances(self, update_list): - pass + for created_inst, _changes in update_list: + imprint(created_inst.get("instance_id"), created_inst.data_to_store()) def remove_instances(self, instances): - pass \ No newline at end of file + for instance in instances: + remove_instance(instance) + self._remove_instance_from_context(instance) From 20e25e111bdd41b31415142d3f3fd74460ebbaaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:44:48 +0200 Subject: [PATCH 0838/2550] :alien: change houdini to host addon --- openpype/hosts/houdini/api/__init__.py | 32 +--- openpype/hosts/houdini/api/lib.py | 52 ++++-- openpype/hosts/houdini/api/pipeline.py | 167 +++++++++++------- .../houdini/startup/python2.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.9libs/pythonrc.py | 6 +- 6 files changed, 158 insertions(+), 111 deletions(-) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index fddf7ab98d..f29df021e1 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,24 +1,15 @@ from .pipeline import ( - install, - uninstall, - + HoudiniHost, ls, containerise, + list_instances, + remove_instance ) from .plugin import ( Creator, ) -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root -) - from .lib import ( lsattr, lsattrs, @@ -29,22 +20,15 @@ from .lib import ( __all__ = [ - "install", - "uninstall", + "HoudiniHost", "ls", "containerise", + "list_instances", + "remove_instance", "Creator", - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - # Utility functions "lsattr", "lsattrs", @@ -52,7 +36,3 @@ __all__ = [ "maintained_selection" ] - -# Backwards API compatibility -open = open_file -save = save_file diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index ab33fdc3f6..675f3afcb5 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,6 +1,9 @@ +# -*- coding: utf-8 -*- +import sys import uuid import logging from contextlib import contextmanager +import json import six @@ -8,9 +11,11 @@ from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.context_tools import get_current_project_asset - import hou + +self = sys.modules[__name__] +self._parent = None log = logging.getLogger(__name__) @@ -29,23 +34,18 @@ def set_id(node, unique_id, overwrite=False): def get_id(node): - """ - Get the `cbId` attribute of the given node + """Get the `cbId` attribute of the given node. + Args: node (hou.Node): the name of the node to retrieve the attribute from Returns: - str + str: cbId attribute of the node. """ - if node is None: - return - - id = node.parm("id") - if node is None: - return - return id + if node is not None: + return node.parm("id") def generate_ids(nodes, asset_id=None): @@ -325,6 +325,11 @@ def imprint(node, data): label=key, num_components=1, default_value=(value,)) + elif isinstance(value, dict): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(json.dumps(value),)) else: raise TypeError("Unsupported type: %r" % type(value)) @@ -397,8 +402,20 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects - return {parameter.name(): parameter.eval() for - parameter in node.spareParms()} + data = {} + for parameter in node.spareParms(): + value = parameter.eval() + # test if value is json encoded dict + if isinstance(value, six.string_types) and \ + len(value) > 0 and value[0] == "{": + try: + value = json.loads(value) + except json.JSONDecodeError: + # not a json + pass + data[parameter.name()] = value + + return data @contextmanager @@ -477,4 +494,11 @@ def load_creator_code_to_asset( definition = definitions[0] # Store the source code into the PythonCook section of the asset. - definition.addSection("PythonCook", source) \ No newline at end of file + definition.addSection("PythonCook", source) + + +def get_main_window(): + """Acquire Houdini's main window""" + if self._parent is None: + self._parent = hou.ui.mainQtWindow() + return self._parent diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 2ae8a4dbf7..b8479a7b25 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -3,7 +3,10 @@ import sys import logging import contextlib -import hou +import hou # noqa + +from openpype.host import HostBase, IWorkfileHost, ILoadHost +from openpype.tools.utils import host_tools import pyblish.api @@ -35,70 +38,96 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -self = sys.modules[__name__] -self._has_been_setup = False -self._parent = None -self._events = dict() +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): + name = "houdini" + def __init__(self): + super(HoudiniHost, self).__init__() + self._op_events = {} + self._has_been_setup = False -def install(): - _register_callbacks() + def install(self): + pyblish.api.register_host("houdini") + pyblish.api.register_host("hython") + pyblish.api.register_host("hpython") - pyblish.api.register_host("houdini") - pyblish.api.register_host("hython") - pyblish.api.register_host("hpython") + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + register_event_callback("before.save", before_save) + register_event_callback("save", on_save) + register_event_callback("open", on_open) + register_event_callback("new", on_new) - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - register_event_callback("before.save", before_save) - register_event_callback("save", on_save) - register_event_callback("open", on_open) - register_event_callback("new", on_new) + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled + ) - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled - ) + self._has_been_setup = True + # add houdini vendor packages + hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") - self._has_been_setup = True - # add houdini vendor packages - hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") + sys.path.append(hou_pythonpath) - sys.path.append(hou_pythonpath) + # Set asset settings for the empty scene directly after launch of Houdini + # so it initializes into the correct scene FPS, Frame Range, etc. + # todo: make sure this doesn't trigger when opening with last workfile + _set_context_settings() - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile - _set_context_settings() + def has_unsaved_changes(self): + return hou.hipFile.hasUnsavedChanges() + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] -def uninstall(): - """Uninstall Houdini-specific functionality of avalon-core. + def save_workfile(self, dst_path=None): + # Force forwards slashes to avoid segfault + filepath = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=filepath, + save_to_recent_files=True) + return filepath - This function is called automatically on calling `api.uninstall()`. - """ + def open_workfile(self, filepath): + # Force forwards slashes to avoid segfault + filepath = filepath.replace("\\", "/") - pyblish.api.deregister_host("hython") - pyblish.api.deregister_host("hpython") - pyblish.api.deregister_host("houdini") + hou.hipFile.load(filepath, + suppress_save_prompt=True, + ignore_load_warnings=False) + return filepath -def _register_callbacks(): - for event in self._events.copy().values(): - if event is None: - continue + def get_current_workfile(self): + current_filepath = hou.hipFile.path() + if (os.path.basename(current_filepath) == "untitled.hip" and + not os.path.exists(current_filepath)): + # By default a new scene in houdini is saved in the current + # working directory as "untitled.hip" so we need to capture + # that and consider it 'not saved' when it's in that state. + return None - try: - hou.hipFile.removeEventCallback(event) - except RuntimeError as e: - log.info(e) + return current_filepath - self._events[on_file_event_callback] = hou.hipFile.addEventCallback( - on_file_event_callback - ) + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + hou.hipFile.removeEventCallback(event) + except RuntimeError as e: + log.info(e) + + self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback( + on_file_event_callback + ) def on_file_event_callback(event): @@ -112,22 +141,6 @@ def on_file_event_callback(event): emit_event("new") -def get_main_window(): - """Acquire Houdini's main window""" - if self._parent is None: - self._parent = hou.ui.mainQtWindow() - return self._parent - - -def teardown(): - """Remove integration""" - if not self._has_been_setup: - return - - self._has_been_setup = False - print("pyblish: Integration torn down successfully") - - def containerise(name, namespace, nodes, @@ -250,7 +263,7 @@ def on_open(): log.warning("Scene has outdated content.") # Get main window - parent = get_main_window() + parent = lib.get_main_window() if parent is None: log.info("Skipping outdated content pop-up " "because Houdini window can't be found.") @@ -370,3 +383,27 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): instance_node.bypass(not new_value) except hou.PermissionError as exc: log.warning("%s - %s", instance_node.path(), exc) + + +def list_instances(): + """List all publish instances in the scene.""" + return lib.lsattr("id", "pyblish.avalon.instance") + + +def remove_instance(instance): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer instance, + because it might contain valuable data for artist. + + """ + nodes = instance[:] + if not nodes: + return + + # Assume instance node is first node + instance_node = nodes[0] + for parameter in instance_node.spareParms(): + if parameter.name() == "id" and \ + parameter.eval() == "pyblish.avalon.instance": + instance_node.removeSpareParmTuple(parameter) diff --git a/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py b/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() diff --git a/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() diff --git a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() From 8ce7d45dd9ff120c959e302636134ca29c8a7bb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:46:00 +0200 Subject: [PATCH 0839/2550] :construction: change to new creator style --- .../houdini/plugins/create/create_pointcache.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 27112260ad..052580b56f 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,14 +1,23 @@ +# -*- coding: utf-8 -*- from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api import list_instances +from openpype.pipeline import CreatedInstance class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - - name = "pointcache" + identifier = "pointcache" label = "Point Cache" family = "pointcache" icon = "gears" + def collect_instances(self): + for instance_data in list_instances(): + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + def create(self, subset_name, instance_data, pre_create_data): pass From 767f20ef1ba0fdc68084e3280b5518b654b3b424 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 20:02:01 +0200 Subject: [PATCH 0840/2550] Open main file of representation For image sequences this is the same first file as uses before --- openpype/plugins/load/open_file.py | 36 ++++++------------------------ 1 file changed, 7 insertions(+), 29 deletions(-) diff --git a/openpype/plugins/load/open_file.py b/openpype/plugins/load/open_file.py index f21cd07c7f..00b2ecd7c5 100644 --- a/openpype/plugins/load/open_file.py +++ b/openpype/plugins/load/open_file.py @@ -15,8 +15,8 @@ def open(filepath): subprocess.call(('xdg-open', filepath)) -class Openfile(load.LoaderPlugin): - """Open Image Sequence with system default""" +class OpenFile(load.LoaderPlugin): + """Open Image Sequence or Video with system default""" families = ["render2d"] representations = ["*"] @@ -27,32 +27,10 @@ class Openfile(load.LoaderPlugin): color = "orange" def load(self, context, name, namespace, data): - import clique - directory = os.path.dirname(self.fname) - pattern = clique.PATTERNS["frames"] + path = self.fname + if not os.path.exists(path): + raise RuntimeError("File not found: {}".format(path)) - files = os.listdir(directory) - representation = context["representation"] - - ext = representation["name"] - path = representation["data"]["path"] - - if ext in ["#"]: - collections, remainder = clique.assemble(files, - patterns=[pattern], - minimum_items=1) - - seqeunce = collections[0] - - first_image = list(seqeunce)[0] - filepath = os.path.normpath(os.path.join(directory, first_image)) - else: - file = [f for f in files - if ext in f - if "#" not in f][0] - filepath = os.path.normpath(os.path.join(directory, file)) - - self.log.info("Opening : {}".format(filepath)) - - open(filepath) + self.log.info("Opening : {}".format(path)) + open(path) From 1f8c7e8ea527adfd16bf209781e2997d03f3e189 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 20:48:23 +0200 Subject: [PATCH 0841/2550] Force integer division --- .../plugins/publish/submit_maya_deadline.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 0a18506bd4..ac9d5a3d79 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -117,8 +117,8 @@ def _format_tiles( tile = 0 out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y + w_space = width // tiles_x + h_space = height // tiles_y cfg["TilesCropped"] = "False" @@ -146,16 +146,16 @@ def _format_tiles( left = (tile_x - 1) * w_space right = (tile_x * w_space) - 1 - out["PluginInfo"]["RegionTop{}".format(tile)] = int(top) - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(bottom) - out["PluginInfo"]["RegionLeft{}".format(tile)] = int(left) - out["PluginInfo"]["RegionRight{}".format(tile)] = int(right) + out["PluginInfo"]["RegionTop{}".format(tile)] = top + out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom + out["PluginInfo"]["RegionLeft{}".format(tile)] = left + out["PluginInfo"]["RegionRight{}".format(tile)] = right cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = int(left) - cfg["Tile{}Y".format(tile)] = int(top) + cfg["Tile{}X".format(tile)] = left + cfg["Tile{}Y".format(tile)] = top cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From 3c62ff63ad51f0b370fde3fb431c1579b2196268 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 31 Aug 2022 04:27:37 +0000 Subject: [PATCH 0842/2550] [Automated] Bump version --- CHANGELOG.md | 33 ++++++++++++++++++++------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cee0183273..7c8834dd49 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,27 @@ # Changelog +## [3.14.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) + +**🆕 New features** + +- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) + +**🐛 Bug fixes** + +- Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) + +**🔀 Refactored code** + +- General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) +- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) + ## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...3.14.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.1-nightly.4...3.14.1) ### 📖 Documentation @@ -44,7 +63,6 @@ - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) -- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) @@ -75,7 +93,6 @@ **🚀 Enhancements** - Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) -- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) - Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) - Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) - General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) @@ -112,27 +129,17 @@ **🚀 Enhancements** - Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) -- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) -- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) **🐛 Bug fixes** - Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) - Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) - Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) -- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) -- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) -- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) **🔀 Refactored code** - General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) -**Merged pull requests:** - -- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) -- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) - ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) diff --git a/openpype/version.py b/openpype/version.py index 963f9171e2..0c114b6060 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.1" +__version__ = "3.14.2-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 2fe2573baf..9ed1872eac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.1" # OpenPype +version = "3.14.2-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 67ee5b5710548a0d390d9aa34b13563da7bda30d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 31 Aug 2022 10:35:52 +0200 Subject: [PATCH 0843/2550] Format with signed numbers (include + or -) This allows negative offsets to be pasted --- .../plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 9fca1b5391..05899de5e1 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -453,7 +453,7 @@ class OpenPypeTileAssembler(DeadlinePlugin): # Swap to have input as foreground args.append("--swap") # Paste foreground to background - args.append("--paste +{}+{}".format(pos_x, pos_y)) + args.append("--paste {x:+d}{y:+d}".format(x=pos_x, y=pos_y)) args.append("-o") args.append(output_path) From 2397577dbdaf386c55d58ba5167aba70d08c24cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 10:48:40 +0200 Subject: [PATCH 0844/2550] node temp file can create more then one tempfile at once --- openpype/hosts/nuke/api/lib.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index b14f1a1eb1..e51f1f7586 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2708,26 +2708,19 @@ def _duplicate_node_temp(): This is to avoid using clipboard for node duplication. """ - duplicate_node_temp_path = os.path.join( - tempfile.gettempdir(), - "openpype_nuke_duplicate_temp_{}".format(os.getpid()) + tmp_file = tempfile.NamedTemporaryFile( + mode="w", prefix="openpype_nuke_temp_", suffix=".nk", delete=False ) - - # This can happen only if 'duplicate_node' would be - if os.path.exists(duplicate_node_temp_path): - log.warning(( - "Temp file for node duplication already exists." - " Trying to remove {}" - ).format(duplicate_node_temp_path)) - os.remove(duplicate_node_temp_path) + tmp_file.close() + node_tempfile_path = tmp_file.name try: # Yield the path where node can be copied - yield duplicate_node_temp_path + yield node_tempfile_path finally: # Remove the file at the end - os.remove(duplicate_node_temp_path) + os.remove(node_tempfile_path) def duplicate_node(node): From c2fdf6d1d0ec9764717a962c291353a3c3634539 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 10:49:04 +0200 Subject: [PATCH 0845/2550] _duplicate_node_temp is public and renamed to node_tempfile --- openpype/hosts/nuke/api/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e51f1f7586..6675e3ca5e 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2702,7 +2702,7 @@ class DirmapCache: @contextlib.contextmanager -def _duplicate_node_temp(): +def node_tempfile(): """Create a temp file where node is pasted during duplication. This is to avoid using clipboard for node duplication. @@ -2729,7 +2729,7 @@ def duplicate_node(node): # select required node for duplication node.setSelected(True) - with _duplicate_node_temp() as filepath: + with node_tempfile() as filepath: # copy selected to temp filepath nuke.nodeCopy(filepath) From 4cbc82778ea0aaa9fbeb333d4047b59544d68577 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 10:49:32 +0200 Subject: [PATCH 0846/2550] moved 'get_main_window' to nuke lib --- openpype/hosts/nuke/api/lib.py | 17 +++++++++++++++++ openpype/hosts/nuke/api/pipeline.py | 20 ++------------------ 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 6675e3ca5e..9f28194a93 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -76,6 +76,23 @@ class Context: _project_doc = None +def get_main_window(): + """Acquire Nuke's main window""" + if Context.main_window is None: + from Qt import QtWidgets + + top_widgets = QtWidgets.QApplication.topLevelWidgets() + name = "Foundry::UI::DockMainWindow" + for widget in top_widgets: + if ( + widget.inherits("QMainWindow") + and widget.metaObject().className() == name + ): + Context.main_window = widget + break + return Context.main_window + + class Knobby(object): """For creating knob which it's type isn't mapped in `create_knobs` diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c1cd8f771a..c680cd9119 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -26,6 +26,8 @@ from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop from .lib import ( + Context, + get_main_window, add_publish_knob, WorkfileSettings, process_workfile_builder, @@ -33,7 +35,6 @@ from .lib import ( check_inventory_versions, set_avalon_knob_data, read_avalon_data, - Context ) log = Logger.get_logger(__name__) @@ -53,23 +54,6 @@ if os.getenv("PYBLISH_GUI", None): pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) -def get_main_window(): - """Acquire Nuke's main window""" - if Context.main_window is None: - from Qt import QtWidgets - - top_widgets = QtWidgets.QApplication.topLevelWidgets() - name = "Foundry::UI::DockMainWindow" - for widget in top_widgets: - if ( - widget.inherits("QMainWindow") - and widget.metaObject().className() == name - ): - Context.main_window = widget - break - return Context.main_window - - def reload_config(): """Attempt to reload pipeline at run-time. From d31d004f67c62918ea48a66e982a1489d0ca49cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 10:50:04 +0200 Subject: [PATCH 0847/2550] added few helper functions to lib --- openpype/hosts/nuke/api/lib.py | 97 ++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 9f28194a93..b8fbecd874 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2817,3 +2817,100 @@ def ls_img_sequence(path): } return False + + +def get_group_io_nodes(nodes): + """Get the input and the output of a group of nodes.""" + + if not nodes: + raise ValueError("there is no nodes in the list") + + input_node = None + output_node = None + + if len(nodes) == 1: + input_node = output_node = nodes[0] + + else: + for node in nodes: + if "Input" in node.name(): + input_node = node + + if "Output" in node.name(): + output_node = node + + if input_node is not None and output_node is not None: + break + + if input_node is None: + raise ValueError("No Input found") + + if output_node is None: + raise ValueError("No Output found") + return input_node, output_node + + +def get_extreme_positions(nodes): + """Get the 4 numbers that represent the box of a group of nodes.""" + + if not nodes: + raise ValueError("there is no nodes in the list") + + nodes_xpos = [n.xpos() for n in nodes] + \ + [n.xpos() + n.screenWidth() for n in nodes] + + nodes_ypos = [n.ypos() for n in nodes] + \ + [n.ypos() + n.screenHeight() for n in nodes] + + min_x, min_y = (min(nodes_xpos), min(nodes_ypos)) + max_x, max_y = (max(nodes_xpos), max(nodes_ypos)) + return min_x, min_y, max_x, max_y + + +def refresh_node(node): + """Correct a bug caused by the multi-threading of nuke. + + Refresh the node to make sure that it takes the desired attributes. + """ + + x = node.xpos() + y = node.ypos() + nuke.autoplaceSnap(node) + node.setXYpos(x, y) + + +def refresh_nodes(nodes): + for node in nodes: + refresh_node(node) + + +def get_names_from_nodes(nodes): + """Get list of nodes names. + + Args: + nodes(List[nuke.Node]): List of nodes to convert into names. + + Returns: + List[str]: Name of passed nodes. + """ + + return [ + node.name() + for node in nodes + ] + + +def get_nodes_by_names(names): + """Get list of nuke nodes based on their names. + + Args: + names (List[str]): List of node names to be found. + + Returns: + List[nuke.Node]: List of nodes found by name. + """ + + return [ + nuke.toNode(name) + for name in names + ] From 1da0f46930dacb69ff13cb0984ebfb2b341c6ceb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 31 Aug 2022 10:58:41 +0200 Subject: [PATCH 0848/2550] Fix docstring typos --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index ac9d5a3d79..e77c86ec43 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -103,13 +103,13 @@ def _format_tiles( filename (str): Filename to process as tiles. index (int): Index of that file if it is sequence. tiles_x (int): Number of tiles in X. - tiles_y (int): Number if tikes in Y. + tiles_y (int): Number of tiles in Y. width (int): Width resolution of final image. height (int): Height resolution of final image. prefix (str): Image prefix. Returns: - (dict, dict): Tuple of two dictionaires - first can be used to + (dict, dict): Tuple of two dictionaries - first can be used to extend JobInfo, second has tiles x, y, width and height used for assembler configuration. From f339eba67036260c819c364b844552ee32df220f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:39:22 +0200 Subject: [PATCH 0849/2550] copied template build logic from https://github.com/pypeclub/OpenPype/pull/3681 --- .../hosts/nuke/api/lib_template_builder.py | 206 +++++++ openpype/hosts/nuke/api/template_loader.py | 534 ++++++++++++++++++ 2 files changed, 740 insertions(+) create mode 100644 openpype/hosts/nuke/api/lib_template_builder.py create mode 100644 openpype/hosts/nuke/api/template_loader.py diff --git a/openpype/hosts/nuke/api/lib_template_builder.py b/openpype/hosts/nuke/api/lib_template_builder.py new file mode 100644 index 0000000000..b95a6edf7b --- /dev/null +++ b/openpype/hosts/nuke/api/lib_template_builder.py @@ -0,0 +1,206 @@ +from collections import OrderedDict + +from openpype.vendor.python.common import qargparse +from openpype.tools.utils.widgets import OptionDialog +from openpype.hosts.nuke.api.lib import imprint +import nuke + + +# To change as enum +build_types = ["context_asset", "linked_asset", "all_assets"] + + +def get_placeholder_attributes(node, enumerate=False): + list_atts = ['builder_type', 'family', 'representation', 'loader', + 'loader_args', 'order', 'asset', 'subset', + 'hierarchy', 'siblings', 'last_loaded'] + attributes = {} + for attr in node.knobs().keys(): + if attr in list_atts: + if enumerate: + try: + attributes[attr] = node.knob(attr).values() + except AttributeError: + attributes[attr] = node.knob(attr).getValue() + else: + attributes[attr] = node.knob(attr).getValue() + + return attributes + + +def delete_placeholder_attributes(node): + ''' + function to delete all extra placeholder attributes + ''' + extra_attributes = get_placeholder_attributes(node) + for attribute in extra_attributes.keys(): + try: + node.removeKnob(node.knob(attribute)) + except ValueError: + continue + + +def hide_placeholder_attributes(node): + ''' + function to hide all extra placeholder attributes + ''' + extra_attributes = get_placeholder_attributes(node) + for attribute in extra_attributes.keys(): + try: + node.knob(attribute).setVisible(False) + except ValueError: + continue + + +def create_placeholder(): + + args = placeholder_window() + + if not args: + return # operation canceled, no locator created + + placeholder = nuke.nodes.NoOp() + placeholder.setName('PLACEHOLDER') + placeholder.knob('tile_color').setValue(4278190335) + + # custom arg parse to force empty data query + # and still imprint them on placeholder + # and getting items when arg is of type Enumerator + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + imprint(placeholder, options) + imprint(placeholder, {'is_placeholder': True}) + placeholder.knob('is_placeholder').setVisible(False) + + +def update_placeholder(): + placeholder = nuke.selectedNodes() + if not placeholder: + raise ValueError("No node selected") + if len(placeholder) > 1: + raise ValueError("Too many selected nodes") + placeholder = placeholder[0] + + args = placeholder_window(get_placeholder_attributes(placeholder)) + if not args: + return # operation canceled + # delete placeholder attributes + delete_placeholder_attributes(placeholder) + + options = OrderedDict() + for arg in args: + if not type(arg) == qargparse.Separator: + options[str(arg)] = arg._data.get("items") or arg.read() + imprint(placeholder, options) + + +def imprint_enum(placeholder, args): + """ + Imprint method doesn't act properly with enums. + Replacing the functionnality with this for now + """ + enum_values = {str(arg): arg.read() + for arg in args if arg._data.get("items")} + string_to_value_enum_table = { + build: i for i, build + in enumerate(build_types)} + attrs = {} + for key, value in enum_values.items(): + attrs[key] = string_to_value_enum_table[value] + + +def placeholder_window(options=None): + from openpype.hosts.nuke.api.pipeline import get_main_window + options = options or dict() + dialog = OptionDialog(parent=get_main_window()) + dialog.setWindowTitle("Create Placeholder") + + args = [ + qargparse.Separator("Main attributes"), + qargparse.Enum( + "builder_type", + label="Asset Builder Type", + default=options.get("builder_type", 0), + items=build_types, + help="""Asset Builder Type +Builder type describe what template loader will look for. + +context_asset : Template loader will look for subsets of +current context asset (Asset bob will find asset) + +linked_asset : Template loader will look for assets linked +to current context asset. +Linked asset are looked in avalon database under field "inputLinks" +""" + ), + qargparse.String( + "family", + default=options.get("family", ""), + label="OpenPype Family", + placeholder="ex: model, look ..."), + qargparse.String( + "representation", + default=options.get("representation", ""), + label="OpenPype Representation", + placeholder="ex: ma, abc ..."), + qargparse.String( + "loader", + default=options.get("loader", ""), + label="Loader", + placeholder="ex: ReferenceLoader, LightLoader ...", + help="""Loader + +Defines what openpype loader will be used to load assets. +Useable loader depends on current host's loader list. +Field is case sensitive. +"""), + qargparse.String( + "loader_args", + default=options.get("loader_args", ""), + label="Loader Arguments", + placeholder='ex: {"camera":"persp", "lights":True}', + help="""Loader + +Defines a dictionnary of arguments used to load assets. +Useable arguments depend on current placeholder Loader. +Field should be a valid python dict. Anything else will be ignored. +"""), + qargparse.Integer( + "order", + default=options.get("order", 0), + min=0, + max=999, + label="Order", + placeholder="ex: 0, 100 ... (smallest order loaded first)", + help="""Order + +Order defines asset loading priority (0 to 999) +Priority rule is : "lowest is first to load"."""), + qargparse.Separator( + "Optional attributes "), + qargparse.String( + "asset", + default=options.get("asset", ""), + label="Asset filter", + placeholder="regex filtering by asset name", + help="Filtering assets by matching field regex to asset's name"), + qargparse.String( + "subset", + default=options.get("subset", ""), + label="Subset filter", + placeholder="regex filtering by subset name", + help="Filtering assets by matching field regex to subset's name"), + qargparse.String( + "hierarchy", + default=options.get("hierarchy", ""), + label="Hierarchy filter", + placeholder="regex filtering by asset's hierarchy", + help="Filtering assets by matching field asset's hierarchy") + ] + dialog.create(args) + if not dialog.exec_(): + return None + + return args diff --git a/openpype/hosts/nuke/api/template_loader.py b/openpype/hosts/nuke/api/template_loader.py new file mode 100644 index 0000000000..861498d2e5 --- /dev/null +++ b/openpype/hosts/nuke/api/template_loader.py @@ -0,0 +1,534 @@ +from openpype.hosts.nuke.api.lib_template_builder import ( + delete_placeholder_attributes, get_placeholder_attributes, + hide_placeholder_attributes) +from openpype.lib.abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader) +import nuke +from collections import defaultdict +from openpype.hosts.nuke.api.lib import ( + find_free_space_to_paste_nodes, get_extremes, get_io, imprint, + refresh_node, refresh_nodes, reset_selection, + get_names_from_nodes, get_nodes_from_names, select_nodes) +PLACEHOLDER_SET = 'PLACEHOLDERS_SET' + + +class NukeTemplateLoader(AbstractTemplateLoader): + """Concrete implementation of AbstractTemplateLoader for Nuke + + """ + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + + Returns: + bool: Wether the template was succesfully imported or not + """ + + # TODO check if the template is already imported + + nuke.nodePaste(path) + reset_selection() + + return True + + def preload(self, placeholder, loaders_by_name, last_representation): + placeholder.data["nodes_init"] = nuke.allNodes() + placeholder.data["_id"] = last_representation['_id'] + + def populate_template(self, ignored_ids=None): + place_holders = self.get_template_nodes() + while len(place_holders) > 0: + super().populate_template(ignored_ids) + place_holders = self.get_template_nodes() + + @staticmethod + def get_template_nodes(): + placeholders = [] + allGroups = [nuke.thisGroup()] + while len(allGroups) > 0: + group = allGroups.pop(0) + for node in group.nodes(): + if "builder_type" in node.knobs().keys() and ( + 'is_placeholder' in node.knobs().keys() + and node.knob('is_placeholder').value()): + if 'empty' in node.knobs().keys()\ + and node.knob('empty').value(): + continue + placeholders += [node] + if isinstance(node, nuke.Group): + allGroups.append(node) + + return placeholders + + def update_missing_containers(self): + nodes_byId = {} + nodes_byId = defaultdict(lambda: [], nodes_byId) + + for n in nuke.allNodes(): + if 'id_rep' in n.knobs().keys(): + nodes_byId[n.knob('id_rep').getValue()] += [n.name()] + if 'empty' in n.knobs().keys(): + n.removeKnob(n.knob('empty')) + imprint(n, {"empty": False}) + for s in nodes_byId.values(): + n = None + for name in s: + n = nuke.toNode(name) + if 'builder_type' in n.knobs().keys(): + break + if n is not None and 'builder_type' in n.knobs().keys(): + + placeholder = nuke.nodes.NoOp() + placeholder.setName('PLACEHOLDER') + placeholder.knob('tile_color').setValue(4278190335) + attributes = get_placeholder_attributes(n, enumerate=True) + imprint(placeholder, attributes) + x = int(n.knob('x').getValue()) + y = int(n.knob('y').getValue()) + placeholder.setXYpos(x, y) + imprint(placeholder, {'nb_children': 1}) + refresh_node(placeholder) + + self.populate_template(self.get_loaded_containers_by_id()) + + def get_loaded_containers_by_id(self): + ids = [] + for n in nuke.allNodes(): + if 'id_rep' in n.knobs(): + ids.append(n.knob('id_rep').getValue()) + + # Removes duplicates in the list + ids = list(set(ids)) + return ids + + def get_placeholders(self): + placeholders = super().get_placeholders() + return placeholders + + def delete_placeholder(self, placeholder): + node = placeholder.data['node'] + lastLoaded = placeholder.data['last_loaded'] + if not placeholder.data['delete']: + if 'empty' in node.knobs().keys(): + node.removeKnob(node.knob('empty')) + imprint(node, {"empty": True}) + else: + if lastLoaded: + if 'last_loaded' in node.knobs().keys(): + for s in node.knob('last_loaded').values(): + n = nuke.toNode(s) + try: + delete_placeholder_attributes(n) + except Exception: + pass + + lastLoaded_names = [] + for loadedNode in lastLoaded: + lastLoaded_names.append(loadedNode.name()) + imprint(node, {'last_loaded': lastLoaded_names}) + + for n in lastLoaded: + refresh_node(n) + refresh_node(node) + if 'builder_type' not in n.knobs().keys(): + attributes = get_placeholder_attributes(node, True) + imprint(n, attributes) + imprint(n, {'is_placeholder': False}) + hide_placeholder_attributes(n) + n.knob('is_placeholder').setVisible(False) + imprint(n, {'x': node.xpos(), 'y': node.ypos()}) + n.knob('x').setVisible(False) + n.knob('y').setVisible(False) + nuke.delete(node) + + +class NukePlaceholder(AbstractPlaceholder): + """Concrete implementation of AbstractPlaceholder for Nuke + + """ + + optional_attributes = {'asset', 'subset', 'hierarchy'} + + def get_data(self, node): + user_data = dict() + dictKnobs = node.knobs() + for attr in self.attributes.union(self.optional_attributes): + if attr in dictKnobs.keys(): + user_data[attr] = dictKnobs[attr].getValue() + user_data['node'] = node + + if 'nb_children' in dictKnobs.keys(): + user_data['nb_children'] = int(dictKnobs['nb_children'].getValue()) + else: + user_data['nb_children'] = 0 + if 'siblings' in dictKnobs.keys(): + user_data['siblings'] = dictKnobs['siblings'].values() + else: + user_data['siblings'] = [] + + fullName = node.fullName() + user_data['group_name'] = fullName.rpartition('.')[0] + user_data['last_loaded'] = [] + user_data['delete'] = False + self.data = user_data + + def parent_in_hierarchy(self, containers): + return + + def create_sib_copies(self): + """ creating copies of the palce_holder siblings (the ones who were + loaded with it) for the new nodes added + + Returns : + copies (dict) : with copied nodes names and their copies + """ + + copies = {} + siblings = get_nodes_from_names(self.data['siblings']) + for n in siblings: + reset_selection() + n.setSelected(True) + nuke.nodeCopy("%clipboard%") + reset_selection() + nuke.nodePaste("%clipboard%") + new_node = nuke.selectedNodes()[0] + x_init = int(new_node.knob('x_init').getValue()) + y_init = int(new_node.knob('y_init').getValue()) + new_node.setXYpos(x_init, y_init) + if isinstance(new_node, nuke.BackdropNode): + w_init = new_node.knob('w_init').getValue() + h_init = new_node.knob('h_init').getValue() + new_node.knob('bdwidth').setValue(w_init) + new_node.knob('bdheight').setValue(h_init) + refresh_node(n) + + if 'id_rep' in n.knobs().keys(): + n.removeKnob(n.knob('id_rep')) + copies[n.name()] = new_node + return copies + + def fix_z_order(self): + """ + fix the problem of z_order when a backdrop is loaded + """ + orders_bd = [] + nodes_loaded = self.data['last_loaded'] + for n in nodes_loaded: + if isinstance(n, nuke.BackdropNode): + orders_bd.append(n.knob("z_order").getValue()) + + if orders_bd: + + min_order = min(orders_bd) + siblings = self.data["siblings"] + + orders_sib = [] + for s in siblings: + n = nuke.toNode(s) + if isinstance(n, nuke.BackdropNode): + orders_sib.append(n.knob("z_order").getValue()) + if orders_sib: + max_order = max(orders_sib) + for n in nodes_loaded: + if isinstance(n, nuke.BackdropNode): + z_order = n.knob("z_order").getValue() + n.knob("z_order").setValue( + z_order + max_order - min_order + 1) + + def update_nodes(self, nodes, considered_nodes, offset_y=None): + """ Adjust backdrop nodes dimensions and positions considering some nodes + sizes + + Arguments: + nodes (list): list of nodes to update + considered_nodes (list) : list of nodes to consider while updating + positions and dimensions + offset (int) : distance between copies + """ + node = self.data['node'] + + min_x, min_y, max_x, max_y = get_extremes(considered_nodes) + + diff_x = diff_y = 0 + contained_nodes = [] # for backdrops + + if offset_y is None: + width_ph = node.screenWidth() + height_ph = node.screenHeight() + diff_y = max_y - min_y - height_ph + diff_x = max_x - min_x - width_ph + contained_nodes = [node] + min_x = node.xpos() + min_y = node.ypos() + else: + siblings = get_nodes_from_names(self.data['siblings']) + minX, _, maxX, _ = get_extremes(siblings) + diff_y = max_y - min_y + 20 + diff_x = abs(max_x - min_x - maxX + minX) + contained_nodes = considered_nodes + + if diff_y > 0 or diff_x > 0: + for n in nodes: + refresh_node(n) + if n != node and n not in considered_nodes: + + if not isinstance(n, nuke.BackdropNode)\ + or isinstance(n, nuke.BackdropNode)\ + and not set(contained_nodes) <= set(n.getNodes()): + if offset_y is None and n.xpos() >= min_x: + n.setXpos(n.xpos() + diff_x) + + if n.ypos() >= min_y: + n.setYpos(n.ypos() + diff_y) + + else: + width = n.screenWidth() + height = n.screenHeight() + n.knob("bdwidth").setValue(width + diff_x) + n.knob("bdheight").setValue(height + diff_y) + + refresh_node(n) + + def imprint_inits(self): + """ + add initial positions and dimensions to the attributes + """ + for n in nuke.allNodes(): + refresh_node(n) + imprint(n, {'x_init': n.xpos(), 'y_init': n.ypos()}) + n.knob('x_init').setVisible(False) + n.knob('y_init').setVisible(False) + width = n.screenWidth() + height = n.screenHeight() + if 'bdwidth' in n.knobs().keys(): + imprint(n, {'w_init': width, 'h_init': height}) + n.knob('w_init').setVisible(False) + n.knob('h_init').setVisible(False) + refresh_node(n) + + def imprint_siblings(self): + """ + - add siblings names to placeholder attributes (nodes loaded with it) + - add Id to the attributes of all the other nodes + """ + + nodes_loaded = self.data['last_loaded'] + d = {"id_rep": str(self.data['_id'])} + + for n in nodes_loaded: + if "builder_type" in n.knobs().keys()\ + and ('is_placeholder' not in n.knobs().keys() + or 'is_placeholder' in n.knobs().keys() + and n.knob('is_placeholder').value()): + + siblings = list(set(nodes_loaded) - set([n])) + siblings_name = get_names_from_nodes(siblings) + siblings = {"siblings": siblings_name} + imprint(n, siblings) + + elif 'builder_type' not in n.knobs().keys(): + # save the id of representation for all imported nodes + imprint(n, d) + n.knob('id_rep').setVisible(False) + refresh_node(n) + + def set_loaded_connections(self): + """ + set inputs and outputs of loaded nodes""" + + node = self.data['node'] + input, output = get_io(self.data['last_loaded']) + for n in node.dependent(): + for i in range(n.inputs()): + if n.input(i) == node: + n.setInput(i, output) + + for n in node.dependencies(): + for i in range(node.inputs()): + if node.input(i) == n: + input.setInput(0, n) + + def set_copies_connections(self, copies): + """ + set inputs and outputs of the copies + + Arguments : + copies (dict) : with copied nodes names and their copies + """ + input, output = get_io(self.data['last_loaded']) + siblings = get_nodes_from_names(self.data['siblings']) + inp, out = get_io(siblings) + inp_copy, out_copy = (copies[inp.name()], copies[out.name()]) + + for node_init in siblings: + if node_init != out: + node_copy = copies[node_init.name()] + for n in node_init.dependent(): + for i in range(n.inputs()): + if n.input(i) == node_init: + if n in siblings: + copies[n.name()].setInput(i, node_copy) + else: + input.setInput(0, node_copy) + + for n in node_init.dependencies(): + for i in range(node_init.inputs()): + if node_init.input(i) == n: + if node_init == inp: + inp_copy.setInput(i, n) + elif n in siblings: + node_copy.setInput(i, copies[n.name()]) + else: + node_copy.setInput(i, output) + + inp.setInput(0, out_copy) + + def move_to_placeholder_group(self, nodes_loaded): + """ + opening the placeholder's group and copying loaded nodes in it. + + Returns : + nodes_loaded (list): the new list of pasted nodes + """ + + groups_name = self.data['group_name'] + reset_selection() + select_nodes(nodes_loaded) + if groups_name: + nuke.nodeCopy("%clipboard%") + for n in nuke.selectedNodes(): + nuke.delete(n) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste("%clipboard%") + nodes_loaded = nuke.selectedNodes() + return nodes_loaded + + def clean(self): + print("cleaaaaar") + # deselect all selected nodes + node = self.data['node'] + + # getting the latest nodes added + nodes_init = self.data["nodes_init"] + nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) + print(nodes_loaded) + if nodes_loaded: + self.data['delete'] = True + else: + return + nodes_loaded = self.move_to_placeholder_group(nodes_loaded) + self.data['last_loaded'] = nodes_loaded + refresh_nodes(nodes_loaded) + + # positioning of the loaded nodes + min_x, min_y, _, _ = get_extremes(nodes_loaded) + for n in nodes_loaded: + xpos = (n.xpos() - min_x) + node.xpos() + ypos = (n.ypos() - min_y) + node.ypos() + n.setXYpos(xpos, ypos) + refresh_nodes(nodes_loaded) + + self.fix_z_order() # fix the problem of z_order for backdrops + self.imprint_siblings() + + if self.data['nb_children'] == 0: + # save initial nodes postions and dimensions, update them + # and set inputs and outputs of loaded nodes + + self.imprint_inits() + self.update_nodes(nuke.allNodes(), nodes_loaded) + self.set_loaded_connections() + + elif self.data['siblings']: + # create copies of placeholder siblings for the new loaded nodes, + # set their inputs and outpus and update all nodes positions and + # dimensions and siblings names + + siblings = get_nodes_from_names(self.data['siblings']) + refresh_nodes(siblings) + copies = self.create_sib_copies() + new_nodes = list(copies.values()) # copies nodes + self.update_nodes(new_nodes, nodes_loaded) + node.removeKnob(node.knob('siblings')) + new_nodes_name = get_names_from_nodes(new_nodes) + imprint(node, {'siblings': new_nodes_name}) + self.set_copies_connections(copies) + + self.update_nodes(nuke.allNodes(), + new_nodes + nodes_loaded, 20) + + new_siblings = get_names_from_nodes(new_nodes) + self.data['siblings'] = new_siblings + + else: + # if the placeholder doesn't have siblings, the loaded + # nodes will be placed in a free space + + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes_loaded, direction="bottom", offset=200 + ) + n = nuke.createNode("NoOp") + reset_selection() + nuke.delete(n) + for n in nodes_loaded: + xpos = (n.xpos() - min_x) + xpointer + ypos = (n.ypos() - min_y) + ypointer + n.setXYpos(xpos, ypos) + + self.data['nb_children'] += 1 + reset_selection() + # go back to root group + nuke.root().begin() + + def convert_to_db_filters(self, current_asset, linked_asset): + if self.data['builder_type'] == "context_asset": + return [{ + "type": "representation", + "context.asset": { + "$eq": current_asset, "$regex": self.data['asset']}, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + }] + + elif self.data['builder_type'] == "linked_asset": + return [{ + "type": "representation", + "context.asset": { + "$eq": asset_name, "$regex": self.data['asset']}, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + } for asset_name in linked_asset] + + else: + return [{ + "type": "representation", + "context.asset": {"$regex": self.data['asset']}, + "context.subset": {"$regex": self.data['subset']}, + "context.hierarchy": {"$regex": self.data['hierarchy']}, + "context.representation": self.data['representation'], + "context.family": self.data['family'], + }] + + def err_message(self): + return ( + "Error while trying to load a representation.\n" + "Either the subset wasn't published or the template is malformed." + "\n\n" + "Builder was looking for:\n{attributes}".format( + attributes="\n".join([ + "{}: {}".format(key.title(), value) + for key, value in self.data.items()] + ) + ) + ) From cf2adc4e0172e1478cc64dda27a75ce31f225f2a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:40:41 +0200 Subject: [PATCH 0850/2550] small tweaks of template builder lib --- .../hosts/nuke/api/lib_template_builder.py | 62 ++++++++++++------- 1 file changed, 38 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/nuke/api/lib_template_builder.py b/openpype/hosts/nuke/api/lib_template_builder.py index b95a6edf7b..555d942cea 100644 --- a/openpype/hosts/nuke/api/lib_template_builder.py +++ b/openpype/hosts/nuke/api/lib_template_builder.py @@ -1,19 +1,32 @@ from collections import OrderedDict -from openpype.vendor.python.common import qargparse -from openpype.tools.utils.widgets import OptionDialog -from openpype.hosts.nuke.api.lib import imprint +import qargparse + import nuke +from openpype.tools.utils.widgets import OptionDialog + +from .lib import imprint, get_main_window + # To change as enum build_types = ["context_asset", "linked_asset", "all_assets"] def get_placeholder_attributes(node, enumerate=False): - list_atts = ['builder_type', 'family', 'representation', 'loader', - 'loader_args', 'order', 'asset', 'subset', - 'hierarchy', 'siblings', 'last_loaded'] + list_atts = { + "builder_type", + "family", + "representation", + "loader", + "loader_args", + "order", + "asset", + "subset", + "hierarchy", + "siblings", + "last_loaded" + } attributes = {} for attr in node.knobs().keys(): if attr in list_atts: @@ -29,9 +42,8 @@ def get_placeholder_attributes(node, enumerate=False): def delete_placeholder_attributes(node): - ''' - function to delete all extra placeholder attributes - ''' + """Delete all extra placeholder attributes.""" + extra_attributes = get_placeholder_attributes(node) for attribute in extra_attributes.keys(): try: @@ -41,9 +53,8 @@ def delete_placeholder_attributes(node): def hide_placeholder_attributes(node): - ''' - function to hide all extra placeholder attributes - ''' + """Hide all extra placeholder attributes.""" + extra_attributes = get_placeholder_attributes(node) for attribute in extra_attributes.keys(): try: @@ -53,15 +64,14 @@ def hide_placeholder_attributes(node): def create_placeholder(): - args = placeholder_window() - if not args: - return # operation canceled, no locator created + # operation canceled, no locator created + return placeholder = nuke.nodes.NoOp() - placeholder.setName('PLACEHOLDER') - placeholder.knob('tile_color').setValue(4278190335) + placeholder.setName("PLACEHOLDER") + placeholder.knob("tile_color").setValue(4278190335) # custom arg parse to force empty data query # and still imprint them on placeholder @@ -71,8 +81,8 @@ def create_placeholder(): if not type(arg) == qargparse.Separator: options[str(arg)] = arg._data.get("items") or arg.read() imprint(placeholder, options) - imprint(placeholder, {'is_placeholder': True}) - placeholder.knob('is_placeholder').setVisible(False) + imprint(placeholder, {"is_placeholder": True}) + placeholder.knob("is_placeholder").setVisible(False) def update_placeholder(): @@ -101,18 +111,22 @@ def imprint_enum(placeholder, args): Imprint method doesn't act properly with enums. Replacing the functionnality with this for now """ - enum_values = {str(arg): arg.read() - for arg in args if arg._data.get("items")} + + enum_values = { + str(arg): arg.read() + for arg in args + if arg._data.get("items") + } string_to_value_enum_table = { - build: i for i, build - in enumerate(build_types)} + build: idx + for idx, build in enumerate(build_types) + } attrs = {} for key, value in enum_values.items(): attrs[key] = string_to_value_enum_table[value] def placeholder_window(options=None): - from openpype.hosts.nuke.api.pipeline import get_main_window options = options or dict() dialog = OptionDialog(parent=get_main_window()) dialog.setWindowTitle("Create Placeholder") From aae4854e12774c94a77ab6b6c6beb92dcee300bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:42:23 +0200 Subject: [PATCH 0851/2550] safer template population --- openpype/hosts/nuke/api/template_loader.py | 31 +++++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/api/template_loader.py b/openpype/hosts/nuke/api/template_loader.py index 861498d2e5..dbdeb92134 100644 --- a/openpype/hosts/nuke/api/template_loader.py +++ b/openpype/hosts/nuke/api/template_loader.py @@ -42,10 +42,33 @@ class NukeTemplateLoader(AbstractTemplateLoader): placeholder.data["_id"] = last_representation['_id'] def populate_template(self, ignored_ids=None): - place_holders = self.get_template_nodes() - while len(place_holders) > 0: - super().populate_template(ignored_ids) - place_holders = self.get_template_nodes() + processed_key = "_node_processed" + + processed_nodes = [] + nodes = self.get_template_nodes() + while nodes: + # Mark nodes as processed so they're not re-executed + # - that can happen if processing of placeholder node fails + for node in nodes: + imprint(node, {processed_key: True}) + processed_nodes.append(node) + + super(NukeTemplateLoader, self).populate_template(ignored_ids) + + # Recollect nodes to repopulate + nodes = [] + for node in self.get_template_nodes(): + # Skip already processed nodes + if ( + processed_key in node.knobs() + and node.knob(processed_key).value() + ): + continue + nodes.append(node) + + for node in processed_nodes: + if processed_key in node.knobs(): + nuke.removeKnob(node, processed_key) @staticmethod def get_template_nodes(): From d912431833d3f6799583658014b70e621019dd3d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:44:18 +0200 Subject: [PATCH 0852/2550] tweak template loader and use new abstractions --- openpype/hosts/nuke/api/template_loader.py | 711 ++++++++++++--------- 1 file changed, 396 insertions(+), 315 deletions(-) diff --git a/openpype/hosts/nuke/api/template_loader.py b/openpype/hosts/nuke/api/template_loader.py index dbdeb92134..d4a8560c4a 100644 --- a/openpype/hosts/nuke/api/template_loader.py +++ b/openpype/hosts/nuke/api/template_loader.py @@ -1,16 +1,37 @@ -from openpype.hosts.nuke.api.lib_template_builder import ( - delete_placeholder_attributes, get_placeholder_attributes, - hide_placeholder_attributes) -from openpype.lib.abstract_template_loader import ( - AbstractPlaceholder, - AbstractTemplateLoader) +import re +import collections + import nuke -from collections import defaultdict -from openpype.hosts.nuke.api.lib import ( - find_free_space_to_paste_nodes, get_extremes, get_io, imprint, - refresh_node, refresh_nodes, reset_selection, - get_names_from_nodes, get_nodes_from_names, select_nodes) -PLACEHOLDER_SET = 'PLACEHOLDERS_SET' + +from openpype.client import get_representations +from openpype.pipeline import legacy_io +from openpype.pipeline.workfile.abstract_template_loader import ( + AbstractPlaceholder, + AbstractTemplateLoader, +) + +from .lib import ( + find_free_space_to_paste_nodes, + get_extreme_positions, + get_group_io_nodes, + imprint, + refresh_node, + refresh_nodes, + reset_selection, + get_names_from_nodes, + get_nodes_by_names, + select_nodes, + duplicate_node, + node_tempfile, +) + +from .lib_template_builder import ( + delete_placeholder_attributes, + get_placeholder_attributes, + hide_placeholder_attributes +) + +PLACEHOLDER_SET = "PLACEHOLDERS_SET" class NukeTemplateLoader(AbstractTemplateLoader): @@ -39,7 +60,7 @@ class NukeTemplateLoader(AbstractTemplateLoader): def preload(self, placeholder, loaders_by_name, last_representation): placeholder.data["nodes_init"] = nuke.allNodes() - placeholder.data["_id"] = last_representation['_id'] + placeholder.data["last_repre_id"] = str(last_representation["_id"]) def populate_template(self, ignored_ids=None): processed_key = "_node_processed" @@ -73,132 +94,150 @@ class NukeTemplateLoader(AbstractTemplateLoader): @staticmethod def get_template_nodes(): placeholders = [] - allGroups = [nuke.thisGroup()] - while len(allGroups) > 0: - group = allGroups.pop(0) + all_groups = collections.deque() + all_groups.append(nuke.thisGroup()) + while all_groups: + group = all_groups.popleft() for node in group.nodes(): - if "builder_type" in node.knobs().keys() and ( - 'is_placeholder' in node.knobs().keys() - and node.knob('is_placeholder').value()): - if 'empty' in node.knobs().keys()\ - and node.knob('empty').value(): - continue - placeholders += [node] if isinstance(node, nuke.Group): - allGroups.append(node) + all_groups.append(node) + + node_knobs = node.knobs() + if ( + "builder_type" not in node_knobs + or "is_placeholder" not in node_knobs + or not node.knob("is_placeholder").value() + ): + continue + + if "empty" in node_knobs and node.knob("empty").value(): + continue + + placeholders.append(node) return placeholders def update_missing_containers(self): - nodes_byId = {} - nodes_byId = defaultdict(lambda: [], nodes_byId) + nodes_by_id = collections.defaultdict(list) - for n in nuke.allNodes(): - if 'id_rep' in n.knobs().keys(): - nodes_byId[n.knob('id_rep').getValue()] += [n.name()] - if 'empty' in n.knobs().keys(): - n.removeKnob(n.knob('empty')) - imprint(n, {"empty": False}) - for s in nodes_byId.values(): - n = None - for name in s: - n = nuke.toNode(name) - if 'builder_type' in n.knobs().keys(): + for node in nuke.allNodes(): + node_knobs = node.knobs().keys() + if "repre_id" in node_knobs: + repre_id = node.knob("repre_id").getValue() + nodes_by_id[repre_id].append(node.name()) + + if "empty" in node_knobs: + node.removeKnob(node.knob("empty")) + imprint(node, {"empty": False}) + + for node_names in nodes_by_id.values(): + node = None + for node_name in node_names: + node_by_name = nuke.toNode(node_name) + if "builder_type" in node_by_name.knobs().keys(): + node = node_by_name break - if n is not None and 'builder_type' in n.knobs().keys(): - placeholder = nuke.nodes.NoOp() - placeholder.setName('PLACEHOLDER') - placeholder.knob('tile_color').setValue(4278190335) - attributes = get_placeholder_attributes(n, enumerate=True) - imprint(placeholder, attributes) - x = int(n.knob('x').getValue()) - y = int(n.knob('y').getValue()) - placeholder.setXYpos(x, y) - imprint(placeholder, {'nb_children': 1}) - refresh_node(placeholder) + if node is None: + continue + + placeholder = nuke.nodes.NoOp() + placeholder.setName("PLACEHOLDER") + placeholder.knob("tile_color").setValue(4278190335) + attributes = get_placeholder_attributes(node, enumerate=True) + imprint(placeholder, attributes) + pos_x = int(node.knob("x").getValue()) + pos_y = int(node.knob("y").getValue()) + placeholder.setXYpos(pos_x, pos_y) + imprint(placeholder, {"nb_children": 1}) + refresh_node(placeholder) self.populate_template(self.get_loaded_containers_by_id()) def get_loaded_containers_by_id(self): - ids = [] - for n in nuke.allNodes(): - if 'id_rep' in n.knobs(): - ids.append(n.knob('id_rep').getValue()) + repre_ids = set() + for node in nuke.allNodes(): + if "repre_id" in node.knobs(): + repre_ids.add(node.knob("repre_id").getValue()) # Removes duplicates in the list - ids = list(set(ids)) - return ids - - def get_placeholders(self): - placeholders = super().get_placeholders() - return placeholders + return list(repre_ids) def delete_placeholder(self, placeholder): - node = placeholder.data['node'] - lastLoaded = placeholder.data['last_loaded'] - if not placeholder.data['delete']: - if 'empty' in node.knobs().keys(): - node.removeKnob(node.knob('empty')) - imprint(node, {"empty": True}) - else: - if lastLoaded: - if 'last_loaded' in node.knobs().keys(): - for s in node.knob('last_loaded').values(): - n = nuke.toNode(s) - try: - delete_placeholder_attributes(n) - except Exception: - pass + placeholder_node = placeholder.data["node"] + last_loaded = placeholder.data["last_loaded"] + if not placeholder.data["delete"]: + if "empty" in placeholder_node.knobs().keys(): + placeholder_node.removeKnob(placeholder_node.knob("empty")) + imprint(placeholder_node, {"empty": True}) + return - lastLoaded_names = [] - for loadedNode in lastLoaded: - lastLoaded_names.append(loadedNode.name()) - imprint(node, {'last_loaded': lastLoaded_names}) + if not last_loaded: + nuke.delete(placeholder_node) + return - for n in lastLoaded: - refresh_node(n) - refresh_node(node) - if 'builder_type' not in n.knobs().keys(): - attributes = get_placeholder_attributes(node, True) - imprint(n, attributes) - imprint(n, {'is_placeholder': False}) - hide_placeholder_attributes(n) - n.knob('is_placeholder').setVisible(False) - imprint(n, {'x': node.xpos(), 'y': node.ypos()}) - n.knob('x').setVisible(False) - n.knob('y').setVisible(False) - nuke.delete(node) + if "last_loaded" in placeholder_node.knobs().keys(): + for node_name in placeholder_node.knob("last_loaded").values(): + node = nuke.toNode(node_name) + try: + delete_placeholder_attributes(node) + except Exception: + pass + + last_loaded_names = [ + loaded_node.name() + for loaded_node in last_loaded + ] + imprint(placeholder_node, {"last_loaded": last_loaded_names}) + + for node in last_loaded: + refresh_node(node) + refresh_node(placeholder_node) + if "builder_type" not in node.knobs().keys(): + attributes = get_placeholder_attributes(placeholder_node, True) + imprint(node, attributes) + imprint(node, {"is_placeholder": False}) + hide_placeholder_attributes(node) + node.knob("is_placeholder").setVisible(False) + imprint( + node, + { + "x": placeholder_node.xpos(), + "y": placeholder_node.ypos() + } + ) + node.knob("x").setVisible(False) + node.knob("y").setVisible(False) + nuke.delete(placeholder_node) class NukePlaceholder(AbstractPlaceholder): - """Concrete implementation of AbstractPlaceholder for Nuke + """Concrete implementation of AbstractPlaceholder for Nuke""" - """ - - optional_attributes = {'asset', 'subset', 'hierarchy'} + optional_keys = {"asset", "subset", "hierarchy"} def get_data(self, node): user_data = dict() - dictKnobs = node.knobs() - for attr in self.attributes.union(self.optional_attributes): - if attr in dictKnobs.keys(): - user_data[attr] = dictKnobs[attr].getValue() - user_data['node'] = node + node_knobs = node.knobs() + for attr in self.required_keys.union(self.optional_keys): + if attr in node_knobs: + user_data[attr] = node_knobs[attr].getValue() + user_data["node"] = node - if 'nb_children' in dictKnobs.keys(): - user_data['nb_children'] = int(dictKnobs['nb_children'].getValue()) - else: - user_data['nb_children'] = 0 - if 'siblings' in dictKnobs.keys(): - user_data['siblings'] = dictKnobs['siblings'].values() - else: - user_data['siblings'] = [] + nb_children = 0 + if "nb_children" in node_knobs: + nb_children = int(node_knobs["nb_children"].getValue()) + user_data["nb_children"] = nb_children - fullName = node.fullName() - user_data['group_name'] = fullName.rpartition('.')[0] - user_data['last_loaded'] = [] - user_data['delete'] = False + siblings = [] + if "siblings" in node_knobs: + siblings = node_knobs["siblings"].values() + user_data["siblings"] = siblings + + node_full_name = node.fullName() + user_data["group_name"] = node_full_name.rpartition(".")[0] + user_data["last_loaded"] = [] + user_data["delete"] = False self.data = user_data def parent_in_hierarchy(self, containers): @@ -213,127 +252,137 @@ class NukePlaceholder(AbstractPlaceholder): """ copies = {} - siblings = get_nodes_from_names(self.data['siblings']) - for n in siblings: - reset_selection() - n.setSelected(True) - nuke.nodeCopy("%clipboard%") - reset_selection() - nuke.nodePaste("%clipboard%") - new_node = nuke.selectedNodes()[0] - x_init = int(new_node.knob('x_init').getValue()) - y_init = int(new_node.knob('y_init').getValue()) + siblings = get_nodes_by_names(self.data["siblings"]) + for node in siblings: + new_node = duplicate_node(node) + + x_init = int(new_node.knob("x_init").getValue()) + y_init = int(new_node.knob("y_init").getValue()) new_node.setXYpos(x_init, y_init) if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob('w_init').getValue() - h_init = new_node.knob('h_init').getValue() - new_node.knob('bdwidth').setValue(w_init) - new_node.knob('bdheight').setValue(h_init) - refresh_node(n) + w_init = new_node.knob("w_init").getValue() + h_init = new_node.knob("h_init").getValue() + new_node.knob("bdwidth").setValue(w_init) + new_node.knob("bdheight").setValue(h_init) + refresh_node(node) - if 'id_rep' in n.knobs().keys(): - n.removeKnob(n.knob('id_rep')) - copies[n.name()] = new_node + if "repre_id" in node.knobs().keys(): + node.removeKnob(node.knob("repre_id")) + copies[node.name()] = new_node return copies def fix_z_order(self): - """ - fix the problem of z_order when a backdrop is loaded - """ - orders_bd = [] - nodes_loaded = self.data['last_loaded'] - for n in nodes_loaded: - if isinstance(n, nuke.BackdropNode): - orders_bd.append(n.knob("z_order").getValue()) + """Fix the problem of z_order when a backdrop is loaded.""" - if orders_bd: + nodes_loaded = self.data["last_loaded"] + loaded_backdrops = [] + bd_orders = set() + for node in nodes_loaded: + if isinstance(node, nuke.BackdropNode): + loaded_backdrops.append(node) + bd_orders.add(node.knob("z_order").getValue()) - min_order = min(orders_bd) - siblings = self.data["siblings"] + if not bd_orders: + return - orders_sib = [] - for s in siblings: - n = nuke.toNode(s) - if isinstance(n, nuke.BackdropNode): - orders_sib.append(n.knob("z_order").getValue()) - if orders_sib: - max_order = max(orders_sib) - for n in nodes_loaded: - if isinstance(n, nuke.BackdropNode): - z_order = n.knob("z_order").getValue() - n.knob("z_order").setValue( - z_order + max_order - min_order + 1) + sib_orders = set() + for node_name in self.data["siblings"]: + node = nuke.toNode(node_name) + if isinstance(node, nuke.BackdropNode): + sib_orders.add(node.knob("z_order").getValue()) + + if not sib_orders: + return + + min_order = min(bd_orders) + max_order = max(sib_orders) + for backdrop_node in loaded_backdrops: + z_order = backdrop_node.knob("z_order").getValue() + backdrop_node.knob("z_order").setValue( + z_order + max_order - min_order + 1) def update_nodes(self, nodes, considered_nodes, offset_y=None): - """ Adjust backdrop nodes dimensions and positions considering some nodes - sizes + """Adjust backdrop nodes dimensions and positions. - Arguments: + Considering some nodes sizes. + + Args: nodes (list): list of nodes to update - considered_nodes (list) : list of nodes to consider while updating - positions and dimensions - offset (int) : distance between copies + considered_nodes (list): list of nodes to consider while updating + positions and dimensions + offset (int): distance between copies """ - node = self.data['node'] - min_x, min_y, max_x, max_y = get_extremes(considered_nodes) + placeholder_node = self.data["node"] + + min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) diff_x = diff_y = 0 contained_nodes = [] # for backdrops if offset_y is None: - width_ph = node.screenWidth() - height_ph = node.screenHeight() + width_ph = placeholder_node.screenWidth() + height_ph = placeholder_node.screenHeight() diff_y = max_y - min_y - height_ph diff_x = max_x - min_x - width_ph - contained_nodes = [node] - min_x = node.xpos() - min_y = node.ypos() + contained_nodes = [placeholder_node] + min_x = placeholder_node.xpos() + min_y = placeholder_node.ypos() else: - siblings = get_nodes_from_names(self.data['siblings']) - minX, _, maxX, _ = get_extremes(siblings) + siblings = get_nodes_by_names(self.data["siblings"]) + minX, _, maxX, _ = get_extreme_positions(siblings) diff_y = max_y - min_y + 20 diff_x = abs(max_x - min_x - maxX + minX) contained_nodes = considered_nodes - if diff_y > 0 or diff_x > 0: - for n in nodes: - refresh_node(n) - if n != node and n not in considered_nodes: + if diff_y <= 0 and diff_x <= 0: + return - if not isinstance(n, nuke.BackdropNode)\ - or isinstance(n, nuke.BackdropNode)\ - and not set(contained_nodes) <= set(n.getNodes()): - if offset_y is None and n.xpos() >= min_x: - n.setXpos(n.xpos() + diff_x) + for node in nodes: + refresh_node(node) - if n.ypos() >= min_y: - n.setYpos(n.ypos() + diff_y) + if ( + node == placeholder_node + or node in considered_nodes + ): + continue - else: - width = n.screenWidth() - height = n.screenHeight() - n.knob("bdwidth").setValue(width + diff_x) - n.knob("bdheight").setValue(height + diff_y) + if ( + not isinstance(node, nuke.BackdropNode) + or ( + isinstance(node, nuke.BackdropNode) + and not set(contained_nodes) <= set(node.getNodes()) + ) + ): + if offset_y is None and node.xpos() >= min_x: + node.setXpos(node.xpos() + diff_x) - refresh_node(n) + if node.ypos() >= min_y: + node.setYpos(node.ypos() + diff_y) + + else: + width = node.screenWidth() + height = node.screenHeight() + node.knob("bdwidth").setValue(width + diff_x) + node.knob("bdheight").setValue(height + diff_y) + + refresh_node(node) def imprint_inits(self): - """ - add initial positions and dimensions to the attributes - """ - for n in nuke.allNodes(): - refresh_node(n) - imprint(n, {'x_init': n.xpos(), 'y_init': n.ypos()}) - n.knob('x_init').setVisible(False) - n.knob('y_init').setVisible(False) - width = n.screenWidth() - height = n.screenHeight() - if 'bdwidth' in n.knobs().keys(): - imprint(n, {'w_init': width, 'h_init': height}) - n.knob('w_init').setVisible(False) - n.knob('h_init').setVisible(False) - refresh_node(n) + """Add initial positions and dimensions to the attributes""" + + for node in nuke.allNodes(): + refresh_node(node) + imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) + node.knob("x_init").setVisible(False) + node.knob("y_init").setVisible(False) + width = node.screenWidth() + height = node.screenHeight() + if "bdwidth" in node.knobs(): + imprint(node, {"w_init": width, "h_init": height}) + node.knob("w_init").setVisible(False) + node.knob("h_init").setVisible(False) + refresh_node(node) def imprint_siblings(self): """ @@ -341,76 +390,88 @@ class NukePlaceholder(AbstractPlaceholder): - add Id to the attributes of all the other nodes """ - nodes_loaded = self.data['last_loaded'] - d = {"id_rep": str(self.data['_id'])} + loaded_nodes = self.data["last_loaded"] + loaded_nodes_set = set(loaded_nodes) + data = {"repre_id": str(self.data["last_repre_id"])} - for n in nodes_loaded: - if "builder_type" in n.knobs().keys()\ - and ('is_placeholder' not in n.knobs().keys() - or 'is_placeholder' in n.knobs().keys() - and n.knob('is_placeholder').value()): + for node in loaded_nodes: + node_knobs = node.knobs() + if "builder_type" not in node_knobs: + # save the id of representation for all imported nodes + imprint(node, data) + node.knob("repre_id").setVisible(False) + refresh_node(node) + continue - siblings = list(set(nodes_loaded) - set([n])) + if ( + "is_placeholder" not in node_knobs + or ( + "is_placeholder" in node_knobs + and node.knob("is_placeholder").value() + ) + ): + siblings = list(loaded_nodes_set - {node}) siblings_name = get_names_from_nodes(siblings) siblings = {"siblings": siblings_name} - imprint(n, siblings) - - elif 'builder_type' not in n.knobs().keys(): - # save the id of representation for all imported nodes - imprint(n, d) - n.knob('id_rep').setVisible(False) - refresh_node(n) + imprint(node, siblings) def set_loaded_connections(self): """ set inputs and outputs of loaded nodes""" - node = self.data['node'] - input, output = get_io(self.data['last_loaded']) - for n in node.dependent(): - for i in range(n.inputs()): - if n.input(i) == node: - n.setInput(i, output) + placeholder_node = self.data["node"] + input_node, output_node = get_group_io_nodes(self.data["last_loaded"]) + for node in placeholder_node.dependent(): + for idx in range(node.inputs()): + if node.input(idx) == placeholder_node: + node.setInput(idx, output_node) - for n in node.dependencies(): - for i in range(node.inputs()): - if node.input(i) == n: - input.setInput(0, n) + for node in placeholder_node.dependencies(): + for idx in range(placeholder_node.inputs()): + if placeholder_node.input(idx) == node: + input_node.setInput(0, node) def set_copies_connections(self, copies): - """ - set inputs and outputs of the copies + """Set inputs and outputs of the copies. - Arguments : - copies (dict) : with copied nodes names and their copies + Args: + copies (dict): Copied nodes by their names. """ - input, output = get_io(self.data['last_loaded']) - siblings = get_nodes_from_names(self.data['siblings']) - inp, out = get_io(siblings) - inp_copy, out_copy = (copies[inp.name()], copies[out.name()]) + + last_input, last_output = get_group_io_nodes(self.data["last_loaded"]) + siblings = get_nodes_by_names(self.data["siblings"]) + siblings_input, siblings_output = get_group_io_nodes(siblings) + copy_input = copies[siblings_input.name()] + copy_output = copies[siblings_output.name()] for node_init in siblings: - if node_init != out: - node_copy = copies[node_init.name()] - for n in node_init.dependent(): - for i in range(n.inputs()): - if n.input(i) == node_init: - if n in siblings: - copies[n.name()].setInput(i, node_copy) - else: - input.setInput(0, node_copy) + if node_init == siblings_output: + continue - for n in node_init.dependencies(): - for i in range(node_init.inputs()): - if node_init.input(i) == n: - if node_init == inp: - inp_copy.setInput(i, n) - elif n in siblings: - node_copy.setInput(i, copies[n.name()]) - else: - node_copy.setInput(i, output) + node_copy = copies[node_init.name()] + for node in node_init.dependent(): + for idx in range(node.inputs()): + if node.input(idx) != node_init: + continue - inp.setInput(0, out_copy) + if node in siblings: + copies[node.name()].setInput(idx, node_copy) + else: + last_input.setInput(0, node_copy) + + for node in node_init.dependencies(): + for idx in range(node_init.inputs()): + if node_init.input(idx) != node: + continue + + if node_init == siblings_input: + copy_input.setInput(idx, node) + elif node in siblings: + node_copy.setInput(idx, copies[node.name()]) + else: + node_copy.setInput(idx, last_output) + + siblings_input.setInput(0, copy_output) def move_to_placeholder_group(self, nodes_loaded): """ @@ -420,48 +481,49 @@ class NukePlaceholder(AbstractPlaceholder): nodes_loaded (list): the new list of pasted nodes """ - groups_name = self.data['group_name'] + groups_name = self.data["group_name"] reset_selection() select_nodes(nodes_loaded) if groups_name: - nuke.nodeCopy("%clipboard%") - for n in nuke.selectedNodes(): - nuke.delete(n) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste("%clipboard%") - nodes_loaded = nuke.selectedNodes() + with node_tempfile() as filepath: + nuke.nodeCopy(filepath) + for node in nuke.selectedNodes(): + nuke.delete(node) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste(filepath) + nodes_loaded = nuke.selectedNodes() return nodes_loaded def clean(self): - print("cleaaaaar") # deselect all selected nodes - node = self.data['node'] + placeholder_node = self.data["node"] # getting the latest nodes added nodes_init = self.data["nodes_init"] nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) - print(nodes_loaded) - if nodes_loaded: - self.data['delete'] = True - else: + self.log.debug("Loaded nodes: {}".format(nodes_loaded)) + if not nodes_loaded: return + + self.data["delete"] = True + nodes_loaded = self.move_to_placeholder_group(nodes_loaded) - self.data['last_loaded'] = nodes_loaded + self.data["last_loaded"] = nodes_loaded refresh_nodes(nodes_loaded) # positioning of the loaded nodes - min_x, min_y, _, _ = get_extremes(nodes_loaded) - for n in nodes_loaded: - xpos = (n.xpos() - min_x) + node.xpos() - ypos = (n.ypos() - min_y) + node.ypos() - n.setXYpos(xpos, ypos) + min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + placeholder_node.xpos() + ypos = (node.ypos() - min_y) + placeholder_node.ypos() + node.setXYpos(xpos, ypos) refresh_nodes(nodes_loaded) self.fix_z_order() # fix the problem of z_order for backdrops self.imprint_siblings() - if self.data['nb_children'] == 0: + if self.data["nb_children"] == 0: # save initial nodes postions and dimensions, update them # and set inputs and outputs of loaded nodes @@ -469,26 +531,29 @@ class NukePlaceholder(AbstractPlaceholder): self.update_nodes(nuke.allNodes(), nodes_loaded) self.set_loaded_connections() - elif self.data['siblings']: + elif self.data["siblings"]: # create copies of placeholder siblings for the new loaded nodes, # set their inputs and outpus and update all nodes positions and # dimensions and siblings names - siblings = get_nodes_from_names(self.data['siblings']) + siblings = get_nodes_by_names(self.data["siblings"]) refresh_nodes(siblings) copies = self.create_sib_copies() new_nodes = list(copies.values()) # copies nodes self.update_nodes(new_nodes, nodes_loaded) - node.removeKnob(node.knob('siblings')) + placeholder_node.removeKnob(placeholder_node.knob("siblings")) new_nodes_name = get_names_from_nodes(new_nodes) - imprint(node, {'siblings': new_nodes_name}) + imprint(placeholder_node, {"siblings": new_nodes_name}) self.set_copies_connections(copies) - self.update_nodes(nuke.allNodes(), - new_nodes + nodes_loaded, 20) + self.update_nodes( + nuke.allNodes(), + new_nodes + nodes_loaded, + 20 + ) new_siblings = get_names_from_nodes(new_nodes) - self.data['siblings'] = new_siblings + self.data["siblings"] = new_siblings else: # if the placeholder doesn't have siblings, the loaded @@ -497,51 +562,67 @@ class NukePlaceholder(AbstractPlaceholder): xpointer, ypointer = find_free_space_to_paste_nodes( nodes_loaded, direction="bottom", offset=200 ) - n = nuke.createNode("NoOp") + node = nuke.createNode("NoOp") reset_selection() - nuke.delete(n) - for n in nodes_loaded: - xpos = (n.xpos() - min_x) + xpointer - ypos = (n.ypos() - min_y) + ypointer - n.setXYpos(xpos, ypos) + nuke.delete(node) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + xpointer + ypos = (node.ypos() - min_y) + ypointer + node.setXYpos(xpos, ypos) - self.data['nb_children'] += 1 + self.data["nb_children"] += 1 reset_selection() # go back to root group nuke.root().begin() - def convert_to_db_filters(self, current_asset, linked_asset): - if self.data['builder_type'] == "context_asset": - return [{ - "type": "representation", - "context.asset": { - "$eq": current_asset, "$regex": self.data['asset']}, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - }] + def get_representations(self, current_asset_doc, linked_asset_docs): + project_name = legacy_io.active_project() - elif self.data['builder_type'] == "linked_asset": - return [{ - "type": "representation", - "context.asset": { - "$eq": asset_name, "$regex": self.data['asset']}, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - } for asset_name in linked_asset] + builder_type = self.data["builder_type"] + if builder_type == "context_asset": + context_filters = { + "asset": [re.compile(self.data["asset"])], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representations": [self.data["representation"]], + "family": [self.data["family"]] + } + + elif builder_type != "linked_asset": + context_filters = { + "asset": [ + current_asset_doc["name"], + re.compile(self.data["asset"]) + ], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]] + } else: - return [{ - "type": "representation", - "context.asset": {"$regex": self.data['asset']}, - "context.subset": {"$regex": self.data['subset']}, - "context.hierarchy": {"$regex": self.data['hierarchy']}, - "context.representation": self.data['representation'], - "context.family": self.data['family'], - }] + asset_regex = re.compile(self.data["asset"]) + linked_asset_names = [] + for asset_doc in linked_asset_docs: + asset_name = asset_doc["name"] + if asset_regex.match(asset_name): + linked_asset_names.append(asset_name) + + if not linked_asset_names: + return [] + + context_filters = { + "asset": linked_asset_names, + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]], + } + + return list(get_representations( + project_name, + context_filters=context_filters + )) def err_message(self): return ( From ebcd34a29b84986d660c99aae0ad463b27cf7a54 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:44:26 +0200 Subject: [PATCH 0853/2550] add menu options --- openpype/hosts/nuke/api/pipeline.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c680cd9119..bac42128cc 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -22,6 +22,10 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.workfile import BuildWorkfile +from openpype.pipeline.workfile.build_template import ( + build_workfile_template, + update_workfile_template +) from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop @@ -36,6 +40,9 @@ from .lib import ( set_avalon_knob_data, read_avalon_data, ) +from .lib_template_builder import ( + create_placeholder, update_placeholder +) log = Logger.get_logger(__name__) @@ -203,6 +210,24 @@ def _install_menu(): lambda: BuildWorkfile().process() ) + menu_template = menu.addMenu("Template Builder") # creating template menu + menu_template.addCommand( + "Build Workfile from template", + lambda: build_workfile_template() + ) + menu_template.addCommand( + "Update Workfile", + lambda: update_workfile_template() + ) + menu_template.addSeparator() + menu_template.addCommand( + "Create Place Holder", + lambda: create_placeholder() + ) + menu_template.addCommand( + "Update Place Holder", + lambda: update_placeholder() + ) menu.addSeparator() menu.addCommand( "Experimental tools...", From d5f5fedfda0c40e27c4d43591a31d7b94a5b4c79 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:44:53 +0200 Subject: [PATCH 0854/2550] fix host name getter --- openpype/pipeline/workfile/build_template.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py index e6396578c5..3328dfbc9e 100644 --- a/openpype/pipeline/workfile/build_template.py +++ b/openpype/pipeline/workfile/build_template.py @@ -1,3 +1,4 @@ +import os from importlib import import_module from openpype.lib import classes_from_module from openpype.host import HostBase @@ -30,7 +31,7 @@ def build_workfile_template(*args): template_loader.populate_template() -def update_workfile_template(args): +def update_workfile_template(*args): template_loader = build_template_loader() template_loader.update_missing_containers() @@ -42,7 +43,10 @@ def build_template_loader(): if isinstance(host, HostBase): host_name = host.name else: - host_name = host.__name__.partition('.')[2] + host_name = os.environ.get("AVALON_APP") + if not host_name: + host_name = host.__name__.split(".")[-2] + module_path = _module_path_format.format(host=host_name) module = import_module(module_path) if not module: From 4ac4c672bfec28c3f886afb84dd10b55001a02dd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:45:09 +0200 Subject: [PATCH 0855/2550] use 'task_names' instead of 'tasks' --- openpype/pipeline/workfile/abstract_template_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 05a98a1ddc..370dcef20a 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -177,7 +177,7 @@ class AbstractTemplateLoader: build_info["profiles"], { "task_types": task_type, - "tasks": task_name + "task_names": task_name } ) From 400ff1d3aedf656818fb0e92e110d560e5c17333 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:53:26 +0200 Subject: [PATCH 0856/2550] added build documentation from https://github.com/pypeclub/OpenPype/pull/3544 --- website/docs/artist_hosts_hiero.md | 64 ++++++++++++++++++ website/docs/assets/nuke_addProfile.png | Bin 0 -> 24604 bytes .../assets/nuke_buildWorfileFromTemplate.png | Bin 0 -> 29798 bytes website/docs/assets/nuke_buildworkfile.png | Bin 0 -> 36149 bytes .../docs/assets/nuke_createPlaceHolder.png | Bin 0 -> 30703 bytes .../assets/nuke_fillingExtraAttributes.png | Bin 0 -> 30953 bytes website/docs/assets/nuke_placeHolderNode.png | Bin 0 -> 4010 bytes website/docs/assets/nuke_placeholder.png | Bin 0 -> 12169 bytes .../docs/assets/nuke_publishedinstance.png | Bin 0 -> 20541 bytes .../docs/assets/nuke_updatePlaceHolder.png | Bin 0 -> 30608 bytes website/docs/assets/nuke_updateWorkfile.png | Bin 0 -> 30968 bytes 11 files changed, 64 insertions(+) create mode 100644 website/docs/assets/nuke_addProfile.png create mode 100644 website/docs/assets/nuke_buildWorfileFromTemplate.png create mode 100644 website/docs/assets/nuke_buildworkfile.png create mode 100644 website/docs/assets/nuke_createPlaceHolder.png create mode 100644 website/docs/assets/nuke_fillingExtraAttributes.png create mode 100644 website/docs/assets/nuke_placeHolderNode.png create mode 100644 website/docs/assets/nuke_placeholder.png create mode 100644 website/docs/assets/nuke_publishedinstance.png create mode 100644 website/docs/assets/nuke_updatePlaceHolder.png create mode 100644 website/docs/assets/nuke_updateWorkfile.png diff --git a/website/docs/artist_hosts_hiero.md b/website/docs/artist_hosts_hiero.md index dc6f1696e7..d14dcd1c01 100644 --- a/website/docs/artist_hosts_hiero.md +++ b/website/docs/artist_hosts_hiero.md @@ -202,3 +202,67 @@ This video shows a way to publish shot look as effect from Hiero to Nuke. ### Assembling edit from published shot versions + + +# Nuke Build Workfile +This is a tool of Node Graph initialisation using a pre-created template. + +### Add a profile +The path to the template that will be used in the initialisation must be added as a profile on Project Settings. + +![Create menu](assets/nuke_addProfile.png) + +### Create Place Holder + +![Create menu](assets/nuke_createPlaceHolder.png) + +This tool creates a Place Holder, which is a node that will be replaced by published instances. + +![Create menu](assets/nuke_placeHolderNode.png) +#### Result +- Create a red node called `PLACEHOLDER` which can be manipulated as wanted by using it in Node Graph. + +![Create menu](assets/nuke_placeholder.png) + +:::note +All published instances that will replace the place holder must contain unique input and output nodes in case they will not be imported as a single node. +::: + +![Create menu](assets/nuke_publishedinstance.png) + + +The informations about these objects are given by the user by filling the extra attributes of the Place Holder + +![Create menu](assets/nuke_fillingExtraAttributes.png) + + + +### Update Place Holder +This tool alows the user to change the information provided in the extra attributes of the selected Place Holder. + +![Create menu](assets/nuke_updatePlaceHolder.png) + + + +### Build Workfile from template +This tool imports the template used and replaces the existed PlaceHolders with the corresponding published objects (which can contain Place Holders too). In case there is no published items with the description given, the place holder will remain in the node graph. + +![Create menu](assets/nuke_buildWorfileFromTemplate.png) + +#### Result +- Replace `PLACEHOLDER` node in the template with the published instance corresponding to the informations provided in extra attributes of the Place Holder + +![Create menu](assets/nuke_buildworkfile.png) + +:::note +In case the instance that will replace the Place holder **A** contains another Place Holder **B** that points to many published elements, all the nodes that were imported with **A** except **B** will be duplicated for each element that will replace **B** +::: + +### Update Workfile +This tool can be used to check if some instances were published after the last build, so they will be imported. + +![Create menu](assets/nuke_updateWorkfile.png) + +:::note +Imported instances must not be deleted because they contain extra attributes that will be used to update the workfile since the place holder is been deleted. +::: \ No newline at end of file diff --git a/website/docs/assets/nuke_addProfile.png b/website/docs/assets/nuke_addProfile.png new file mode 100644 index 0000000000000000000000000000000000000000..37578df7f52ee4b61e3ba0725d999fc4940288ea GIT binary patch literal 24604 zcmd43bzD?$-!6)Uii$xgDLQmXH>l*$-Jx_N-JmEUAfN*S44nha(A^;*($Wpm3@I^y zL!F!7?|t`vpZz}P?DyGwpYu8X0pnV0?t88K{>JsauJ0m5MM>rk0XYF49^M_OtfU$q z-sKQHyi3V9@WGLzjf52N>#EC3sKyQO&-aGeCp^5zcu>g~8lH(8(_XFyD}Kv=vPtoR zUcP2ZC1AO9`SGPMI|Ve`-{VO2va$UR|tImQ3w@lBJb#oDh6U$Wt$Y|ij*64c;bW4}zhdn_n3 z(pJCiH+wkqP0;(qXPYgK*>=seMy5@wntO7(UgFSO2;6N<(Hlm{`f1|W$jn~R%4Iyf zw{)#1ISk;;+pOR(H^A>JFQ0&)w{L(4#d}s&NTxkAJN~S+zVh{oM8a)wjJo~}qa@vZ z@JafEB>4I8-h)ecc(0zzUIk-ZqP;kQ|1X?qhTRF_(+_Mo{*K03o^R;OIgimn4rcIxG1<|JZ&o)e4|{|H z4DHLA(OQ^kKk>oezdx<<7izzrb5>MPTJ1*Mvaze3E~?u6Tm{A>B9i|8 z`rW>=TUX!TeZ~)Vvwe^ZHy_%!643Nkz~07Lj*P=d%fX?yyJ0e|Fw-nZ3l3)-+>CeY zZE8wA*z_^(mzC77w1e@~-`;W+g2%>YESd#t#Sk++2)VGUQoq;m*5J)fxP_)>OqYXl zG$j0wuT9NB9t>uZg`{6!D9C<4{LVW zl%om@&s7Zx!M4LJP}&b+eU{q9BI#-hRuTQcE`Zxva5mCjHnLz9bPZ z>4ML$o71)W;j!@=S|w^x>gqzii*Y!dh_e8!%um05s^E5b#Gn10;sxLdL#2HiKY#zo z8BmKdubjm8D*K}fea+D^OG~n__ym2o`9(yqi~7aZ^~3z=`@e4dyybJiC^+uaw}Q;d zg2Ei8R$*;+F+DMl6WCsmhG{x@eB`sw84S7=F63PHf@I0Ov(K0w%7svHHaA+`qs&5k+QI`s9Ws0 zo>fe45?qroKH>$_i73ePL2!O&$3_kfryFU)gZU8$6gbQ~iHXyx0$ux==|gozMQI3h zF63qw5+VCcb7LU1(@gR`uOW5`N8qc|s*LklV=jhmZ`(RNINJvRn^D7UbT$rrF z&FUm52oMb#F^kTnv)i@XZyBL8?_PLa$1iseegSTHlO?{rp=~B>o9pT?nz^zB)t6k6%-bW}!71hy3bRVVPoYa_`EA=Ruw(Mgu-hRZ_}<;O zD24d_p1Y4d^wKD;P@~|_8~pXN=wq1p4t-&eb8sB?iEQ?q$&n?oIs zk3o9Ha=!Kp=VY!2N#v9?YWl{(Gv0gc%DRr!z#GJTW)`G!yY}~3 z4u4R`3Y2m%2+9j~=%UUlZTC+-0#c*46Lmi40#|4CifnT(W#flp)|c;oI;fJAcz#lr zStVJ2=2F;M&NcXkm5nxiki%HBMSe8eJS7@pPOpU7@6y&>A)InEI(L%KKI@8z2r~YW zUH3actG zt&_0%2iO4)U;CMbo#i-xFYK?tmHlT1Z)o|NM|Ln|GkbQr`{yKO zQmom$aeXInvF=*9esrntjH!$dmABKMoQ42i38H>s+UyCEpXY}|il=K|FDLCxV9BtC zoyE#)`q#rOlFn_HjQna{WgC4xtMz1v_9uD$CJ-x80iEgRD=_)vl^guYuM!WYW&0Z_ zm^v$z*TOX=a85Gu4aFup);ss9)|5s9rUr5=im$RMCx4kF?e`GVDR6|}UN0YbK^JH0 zeQ1Gd@ZeN0j@!?Ny{^~e%2}++nZd|vc0=IEkD7W*{f#-lQD^yFOJfXJA2&ag++7s^ zy=-y8ge;f$?kxgj_(a%SZ4wh$hmh|KY`C-hP|P|FX>ZL(^iw{#a0|2CO|&2D3;oj5 z8XmKRn@B{6I&M(WkQtivbL@lexuq%6e*%#{Di;i9r}Z0%%{27#Px?KEtz~fGz0ZqT z)lR_rYln_hUy&=4+~4k7T+=az%{3@U^~hB!TAR!1$aquX>eZ`5+cIMbWcJtLM%Zf$ zLwqr*@LbxKm1}rSQ4Ub82Xq~OM}5|ubb(QM=9HMPs|G*g3NrV-BI}2fkvq^*|195E zFZ`xS!x8%iXL0`4!>MP#6(S05PwvZgd`UK19&G$$h5)NFygiKVK-{ag6(_^r=qy)e zY<|E^=~Hf|6A}|QVkG#Up>p1KkI`d7u!=Td6}L4Z&)h~+1s+mrDZzHUTKdQz(!?L?Fq(|a z@@KCbYusLCpt@x|prH6^)x-91gsFo;*TVTjPxl?wn%E zpZ+IN+1lDCMVQ7q;c5?Q_J!@dY0VU$oXQ_4D|$@{U89a19ieQri^=Im#u2(xT&W+1 zP#r?7)wQ0_s3dxS&tKbb@1f*?-dsmZJ=NQaQ>mk)ujNIxDvP3zGWfDjqAX6)yhiy- zw=nIM2LV&tKZBYj>u2HVvjtmG-2R=<4(ky3mkCjlPV@#lY(TTCegKx#)gx$5J)M+ zhsv!_U)p4P2#0be?F(GEx%)~a1`cl_zpXng7V~UH=em&kVu{qnl)*Ky6}zWUGW^ZZ zR)q=p%3ymvv^1g(VammXghhXGCwmy{A5ic7yirXoHM%N?{m!)4A1ljbW1m|x*z$B$ zEdIQOSa9ENZ}J0f5AI4i36+o=0b@^i_RSI_#k!zM@WB zKU`w`&MR>K$V_E6;4pZX{nxGXfT8*5L-lu=wjxTv8?hP%%Em0@{pVIGrzfU7q=RFb zx&hu|@{`Ud7pdf|xGnd5CI)GY{=Md-U!wu1j(5rH-2dXAjnwvvfS&dvC{H8x`|i#3 zEYT)_P*hQ2ag5cT6u!7&DHHmneF$c|z}!kELb=g!kVD2-qdyKRZr7ob9JTVbeucReKqti}unE?7I}zCde!891>U$ z1X`SGhmrYReBCE)@OL}3<%csr^~Jpt0Dee0Gc&Cuh+^392|jWEZJQ;5OF@gE0?^&} z)FS=Mb|c`hnHu_#aRm!b=6>yH|36Qq;$ zGRH+`c472ooGhu?&ZoFWavG7mX>@h+3xk|lz4d;EQ+I@!S~yD%jfby)5Gg<)J*FfN ziakdxu3{5xMZ6oir5r2RH&{N#gq93_F=8fHh1d-2oIy@kSRO@s%KuafZ}(B}VFwIwaB5jKI}YP>`Qltgas z?wH?SKVIJ&H7Q1+oZ-F3Rr~iWtoJv#h0RLoR+Car_8WR6ySrsLv7*?$?F)?q0 zB*W>_Wxe_eIX5?3EPuf973hF%;aPe?W!c)%&LA@jTYrt)FgXp)D5z@_dwJ?l55(Yw z>`^^2sSyzg-UkS(Q0a^kI(lYNQAK`7*R|hY@evC2qUH5Q*Zy?zkc6zN4U!lNH1bFQ zIO@q0v%wt_VHRvORw~oc5#b}oKaF69u5Isid)4{xNqf~_SzB9E{bzI%506N?%L;|t zuhY9`6>*x{wbIS6WRs>vLnV>>y~5kM)me%TmGTLE`ReWAN=bq)FU-wZG#hGMdyB*^Pf5tQ~S%F2b#rKRTO zI%T$_>Zm*ewy?Imp&_#+GxJ<;d${}NSi$0=8QlaCub^P=t7n-y8oPwInIGO@W)PN# zx$X`L8amXVz^yDTE%geHSWa)OsRlI4*l$t+y3kzz7tB*n55-D~X6G}~a&ZY0a&+&T zba%B=02kP7uYQ4yM?K&b5<;pl=B@E+I#(O+5)!go8Q|glUetkg^}rSn|7`nIkFV@O zVFm@RcWyttqVo8e*!l{XkV?eUb9n_+pwoS#`3XsexYdkE?0&&!$>gR>gn5aA_PGLb zEI%NDI9z&#YJHG(8S~kJZ@P1Bc^uw4mW|^!ub!lNI-5AxU^bcB z6bOP|davhulv~&bjcCAG=*H9Tiirq0(f~i`D1}I*|674>w4I87J-FQ8FnoSto;XC3 zB;{VV%L@4&dmo?UG6%!_-CbuMzGZc-3jLKnW7xPIY}iEA!J(q0gnNl1A-QX@^#d?s zIk`|?p?P&>j}#4-d}D2NKWECofZFiXR7t8ADJkg>;b*BOCGg$jc9PpyEvXt$T{O+B zH$o~9Zv?nRP_mHp-B2#wd@Z)FKFwzj^?sj1<>l3p!V>xURL-^BTeeVe&muD*Q5snN zs7bghE|QMJ-9hD}M}~$nK^dP?I=$4T4PShdQ&6C%h1T@*I|YP-P(YfhrY1?rB+;;M zzNVnyU{|NTJ(XT@iH`mn|JqPK-7*n_l5Nj~9755;uy!P!MLU6vr~Tc#7sFbG@HU(N z{*07|5tSZO8XcV`nxh)I=u&-AQ9oGbZ%G{nr9@4`X}$3WcH~r#)O}x`1a617nP?eq zfFf{~h0z$S@dgR^3`a*EHdhGakY z;0N;y*eH*?A|DW!K!Py5i1YAPhPKPb7RiH1#r2`#;ipjwoxQ!@jKkU>giqcGF;;to zQG&x`7)irXwP=}@UPIp@br9Yo@5dwn66Tq_0I|17Bf|w#&Wy8Kqja<{vpkLKj%_oDF!`#6t=+=+NzJ$2oJ zjYI|P0oK;p*?GnVh3YInTuB~m3yUQQYqL10HGG6yX5r{sr}_dN7&SgaiJVf0M_#|} z=UcNm7Pp}&7|HH`!P#uYluCs>^fu|q74deo0|zN?Qw|M8P&P!1_fQyD^9nX;RE@itu08%XD&U0 zN|@d8ys4Wrmc_7LIT6-oqpH{wFWA@D>%0!~S$H_EEXzMbfqpP>nUsd6uqR>tLQX)k z^mO+1B~{u3>X6s40VN-vnozMlf5ILJ?_Dwptx+VztpS7hD==M88?F0aG1Ldd5(}_Bp%RrtVaW3=K?w~brACM z@j+qV=87NyJalAMvwR@ka~0!Ha9DEP+i)+|H%(BW2e}?F&zAnl4yg#vWo|*v({v?P zp+QXI-r28~jJ*R84U!e)xgek#yH5CNz?IJa)hqWb)`vGxkHwW+B%U(s>e>ldPS@B3 zU4Bj1%3$^CmFG;u;J`pMrdP$*Hh605vyTxe2P^9rk5|t@N?$sRWO*2Oiagv*K{I!m zE0^_rX!$y)D)r<(TsykMrrbu?zaX&rXJEVEPp|-Q8Cot#+rvL3&u%%bb$i)QsAOX) zY}__&!q(PCdMR%Oy>v!qG1UtgQt>;W%Ci(Pm|nr-Vx4Xj+K@#*T@ff&?P7vn%6kui z$Yqb!)6FBL62*ngdHS>~!t+?Q+!Wz8Lbbz#gA7mtCp;nS%Z|R4IZPi)zPYajDNpL(WY&-~MH?BqwX7`D{G42zqmAJXHgX^fh zPW`PL1kTJijAcA5Ax9~hg#~|qtQ#L6g}=HtgrGo^h-J4?efvot;W&z0tE~3o@TPFY=*guX6+ft-&u>t zGLoB5;@SDpm6hkJ$`@LMbY7(QWf!OaCI}e=VUqSBA=6%;pJi|p7BE$4+OC8uoPFv8 zGpu!YP_N-OB$_KyN*2o=8F3%CyE#$rQk#gewsEW#L>Gp34uhP)9@dr);x7Q%pA8;i zCTcw@#g2Y`s&E+3d@4iXT3V_PTXZ*HZ6iz3UdUo$Hq_1AX{z#fUoARm*wH-|YxIfj zwkSQ=JVy)+4q^vJ4_1GrwbXc(%IjCysi?kAtEu&gPTWvwY)3D&E8Di<$axLxk%dK|yk`yCblg1@r$vLW&H9 z&I3#=rwHwDzr#RGplr1Q(4S5>0e)bOd%4tgLP}V>a4N2c0s?fSrOwXIq3Y7`k{;U+ zp8wtp@O`>t@c&k&0(mW}pt8c&(`@Kju$c^u(Ep93d##~c`S7h|A0vl|PC5CZfr)~& zwAa-JkM=6q78ykR_OXuh3&B9AN;a}Oeo8GeX8^WIo zLRqwl6%V7L=%5$+7Djj)f!uoj9OjDa76d?yYlRZjJ?O$hW6}CkXY+2FeX*`3xmZY& zkejr+`h9plGoo^Hbu4aTLU_>?i9#af4l*UZMBl>{fTg5{k#Tg1i-~Rj0p9z!(iL1Y zWyggu<4X{$mw*8jUGDD}zTsZ$zdt*xW!*twKUJlK9NQPH!LpjzMFc;X{T~&u@9kIi zdRuG|>@#}Z%aiIy8w%!Jij~E5t$iEe2!}oi?!Dx`#nwgF$w@^MdhGuGTKo2fy1jjQ zh21z~R7W0*c45yXz!RP&Y#m=#509zko_GI{)pk-%gXH!ts46R`gofSX6%fhOG^Ybw zPX6doRGZyb1+|*NT#`yu5RB)UKn|+Y834T>OG+u@vmWboUp3L4Lo*|^AX8HD3zP9= zhlXmFeAk61#AZ-X@!x5`(wrJY#;OkLCOLVn?hP8shcL+SNWs^>744FRnV7k~y*;oT zZgb!0mMkngz9zaVuUy3GE|@QUadq`ln#MhOc`d8KHO@52s#vxBB>b|!lg@yf90S4+(>M%&5he|uS5~H}d zc5O<49>^Rkc|5)5-TVc2*4IHr83Dj)e3qWELRiXl?r$uc5)EYL= z0=BLpK-rM(^IEygcCtcoV((;-oRWM1PLKKogj4?uY%O)_96O&rh1lGhNo%9TeXTil z-+On3CI@}oJj?8VDm*)s)urva-2Er`nJ@sd!r#(<4j4H2=k@%doL`YwR8)1^amF5a zN6*Q{4%_rMHT`Ny0@mHc_Mw#{M281S(k7)>TOmg@chk_WE{}qr^nelqPS=}MKLQ3; zdLat_nZM}Xp=SG+6}k6P=cT^(o1$T&U`V{J`*2C{>mQn3n$yLjj?Q{NA~x&q2$T5$ zhsX1c_&?el^Nx<2;ruSCOVenmkY&YrTIuSoM++3QNAZch8q;;-Y*8JVqm6rtfL$4$ z@t1(@Z#w!JMnGyW#Q&+4^}_n}qYnbW=6fcLzocY~`Xp;AV#%+)f3Z1MtVtRM?4hY{ zr8^5W5^$WK7!8l)scJ6S0BzR#)f?I7S5`AFYU(g7Hnx#jxlv|e;hk6eWVvnaNJ0M4 zFnW~CxBj9~B|*erR6>C6kyCP?I{cIDUueJ{3+Vt6Vc14hOA85M8lU^pW1${!-XxJ= ztH4PmIFY(lV8mtgaL@cXSCwobCuZm0xQ?iqn-iz1aPy{wD}2;wZFPYq#xVfgltEC}-thQ%#@ZUe>2I1dYouf8c;0m1rW#CC6->%3F0O1*s;j*1Ef;Y9p1=IH zUwC|h(crINKl22amd$MUmhS+%VR&@1utkP~m@w#$%l77`_A}z4_;PrBs3Zj1w>?7# zM)W%lK<@9m*6){#3VE&;U0%49kIoi0MO0BqsaKE&J)BM@T+y+BU2A$LAqQ1d#ZI6EC;hQ) zE7(qceC>k+HO`8UDg%ol_PMh;RRZW4;Wx7Wjm98E zB?@Y3R_kfmCKedkR~KH8MGK}N+tLjE{{4lXfnmwH7SKu~9Z}ill?=?>gZYcZ!6K(d zP&Ie=m;xR{bz9roC+g}GC~vZ)o;fS#U*mSjek5%{h~>b9@1IS;OttTM-<|V`XBZhQ+RzJ4qzX9Alb&8qUhf zG;~?DKql2SoEGiu=#AbRaWl8H>@efQR9kvI$pRjf`1Vy^E-pUD6h$E5_RpDicDtfWOt66~ zTq0a36~;^$j6}{E!o<>*e>>Zyx1uX%$p*Wo2Kb<1U@8!%d$%vB&5Pkmk@GI?6pyGi zArDjmhZGt{05>jDfDwna4NZ<_2L_tUxga|+3Ytbny@7=n7*p_XeZ?#FEu&&%oC#;b z@HWCokD8KFjg=mhEO!VAzX5iZvb)9#kZSk}QmzqLA_4?Ogvo zd@m6rMTPFujRpI;kRqi-k*BdN5}^6VEn}f~l#t3+APrzg5GxfYQ&8F_>Gm&1L4;3c z{VT5-zWVRYAA-ON@JJt160r6B#oYfdB%6O|y8OH5XwuMCr|}tz0%JD~y!@--@*fDE zSB4vxX8*l4)8y`5kUsnF69(n{r8|83ml>shqEr-_Eiaxon-Cu9sjhS%h`W;d(VEs( zc~n+7OlJa$hx=Rr26NrB7$xJH`472f^1Df~&1Sqa7VASgRXB)1ItHw-larH=05Tqu zo0+M@;kS7W)C-1DQR*lo{ai95Pc14ceiDKpjfNVHi~hCv0t0n3=8{w=&^YF|2ajFC z5?TQlU)QnkAs5~2*48j8mV4S_dQm+9)@YoFXhqbLJ%Qixr%T_KilQ5wxl_Ai(>=Gg zR_3d$>x_$50poy6-{t_Er3Vnt`Ss$5iR?yoc{;3o=xOvU?%H_N2B6dtqq&);H1iHKLLk9(<3M?89XM%S2 zCZAPyEnTP--_m|x={|@6NO9K_f9U5MHEQKXc3#8&)wqjR(8RVK z>$(Zx$+!*;_No}V%r%QLVo~?_#6KlIfsA^$rY4(34!F^7WF$JP+z=L>5S8DPz|Yw6 zTHDyMk&wn-MSytZVvoDD3vzREo;#7Edkw8Lz>I)?9-16PzYmW10smS+X6$Pe9$S#( zYF_;P$^vdYt!4N4yJdLNe?^h0@zum8jG~L)RMXQl07!VPysWIl9esIOL0R2rQn95R z^biu?4!WohmwS}VEoyi`+Rsy{EHJQ+>4K;E_ql%Q?&ug;R$Pa}=2ZgD-xAUYBA-9b z88!BiN=X#RLUj@VY7JeVZ;A{61*@^&ju#f|#Um(~29ypEV}Hb0se=>>Dlc-%hg=_i z{-=U9ZJsP#62OP7=Z{m%ZJ8~;%~CY(+b0Rn=D4h|qe@KRd=3WLjpxk?%%DiSvu-;) zHP2Vip(s!xgEJoRASBwety)|(~oeS+7+E9#X0%q5}KMzxMP|+ zrb=N6ES4hLtkTxiRb!jxM0UZS83y!3+YZeKKrPVFQ%j$iARitX$tlZ}tgqkWSYVF^ zs5*jba&gDo?N<|RRwgpX+kUuv2iT2}_aagF^~)E?->?4Erp{CM+O_ZZIke!{*=H}*WQ{&RQ3(LK~jUu%{jhv{IJ-@IpJT-vK+fZoJ&6ksx zhqo`ld3#Yw!WAqJ2`~$hkzI%w6dRVp0+%yer)sp_lt-rrNCnH3nY1R)=Q0t#>40P; z36XR=^-dt9IrE(HS$DUz8vH0^SoJdpkPI5CM#z&B^N;Q~E3W#j2p6sfktJXFHxMf7 zkAwLl>!v6{F_%+bZg}O&djR+D*z4&{M#shF#2WjnvS^da$DeyNE$rC14rQPD7hb?5 ztQYQ1%BUsSz-lh8{Zffwx+o3xQfXM3^`MY$4=F&mMu6Sr-W=7?WF(COK;s2(Wf@Bo zj2G`!h`uvz|H_{2Mc>)+$?ojL!5?bhkUosUg?2dqZ3$?HNnpL1rV&Ceb){%|kkLU; zB~XEeU6>;&`XM6qECAO%Qd4&@qVzfM@Jj*y%*@ z`PGMqgI@Ga75(-*z<0X$`)8cgWg_@;va>xbj6n<@7)-(eb`xliJX}J|h)kfM&CT_D zZQCE@Khx0G&d9EC{0hku9=yE?T3E4L4OfIf52&@ZH7dtI)G@_l!8wbCGz`<)QCjn6 zr{rW|;UgbVcP1Lo{jOc1ynzpSdwNP6qQGYCX0A!M&Z} z1;qz}!wLir$Gtx%cX=NZf{KKgAQyD~!ziJ^ELd7Fbu0eN%v!l+u0hUw&Y-i`cR2@Y zQ3}-xld7&B@5BvpVgH->2*U5PLsjlHkk3;k0g_OjR z>b-CPJ6}3f7Rw2xH0Zyx9Y%4{(T$h;{^0@y^P>p_n*|9O397<9f8#*%&YipRu)uez z1iO1024x0xl`$W;E#AD61yOo^-A2QxL9U~NP$B^oJT~B+5UoOXO*J)`uh^_%z;%$7 z;L8e{fRGTRgnsO^r8OyzyI3_a3@9r#wcJlygWSV zo0IE6F(GwTuA6yr0e(+T3^x7Gtl{wd#8LusfT_W%pwfNedV0b`o|sSwW(ct-SHfU@ z`f=h~ebCJGGP3_tzaLk6tZrTK{qYGP><_4Y}X>@p-DO>)H1Et9Y++Zw9}V-$S(0ZO(*6 zdIsj@GRO9b7k~^tPa;K5PEGsxGG0UUjgTe@;*>9`Y_=?B3c2Dmtd7mq3l*vvIB#1iAok$Cn>6c$LIwiBGN#Ylr#avSSvnrJQea zr5Me`qJKr}Znb`r{zke4QS>_tQ7$^F5?ETUS==GC^=vJah3w{JgJ4PxJ30~oY8O0q zeWa=B)5*dMsPVTio|76HLj9KN4g=8yuZdo%b}Z9e1T35D;lUqdS3 z7JA+@Dy#EV;1E?7b_v^1G$3>9fcsrYM|ui1MsAhxUEMKELuIrmP)#J`{(H)jGl^ z9lAiE93;ngPS4QsuTNLHtcA7}f3Bn3_j^h>{eD_&+q~46_2kgDh%)1CC&lW~&}mDf z>WnTw0jhuP@7BzZ`0b0GvcpZkR%1CJ77%G{RQ)u85@KJ=wj{!x0rl8Rj^9@+X|)mx zLruT-o3hsyuz^9SYOF_Vy1%d!LVO+0*ST6cYs*g(UxlpYjUp2oJlhZ%xG3DEQ$uxB zyv%$<)hz=P7|f2Gn)9PIAkL0%2Ghg8er5K1Br(5f`9R#3$9Kh?0L_Gq6Vu$}W~&Q` zGQownAa<2|{dbOI@0#1nE2#t+Y!4e7Dhvi29{405$9`=ynS0dA@kQ84u2G#oH9J;4 z_Zl8+azLGh_QER|p*tP(@7!)~l8_(X>(_=k3}p>I4W5SJO6{pn8^f*RL!q_1S8$*K z=30_Zr|ICR{0WXjk%=q8L@qp%nV6G_oEM|xm=2paL3Q00<)poDoEK2(pmG)x7kcjf zx)H=qUp))=1`)rcL*JRot`=PQ#0x)jgkqx?k4rCKq`7R}TbDL%|H96G%Zm4s+-JsV(*C0lx^p?uzYKb$u82eZ{4Vy2 z_g;;JWqpNUq+>S$rjGyhay}r9lg&Oz`da%QH|-zlpN(r3&eSF6sh_$8*B@i2Lsd^l z+8u%zE?ytI_h2XMu+f21Lsw6S&sJepIHe$CI%Fb_WT3HtLJeXA<2UPf^mSe7VN0Mx zHqHzeHVUjKdZl4jLQhcc!Ppe{{dJXW!#=jVtNf8QLz|K6b#zD%4Xfe%q3&JLpw}kSyyP{^1jUp>`>y*FGD?Mg+R5dXR?c(ZO+!zGU zgBv+%^p`%UyuYLoich9Z`nu6yTlGUZyQ1A&g7i;SjIvH{BtPA>uP&{4s6Lmx`Q>@e zO}aGsn_sRDJa)6-S~_fxgMRDIeDxNu@A$6KV$4di&vrF;U!qTM{mUD)@j}oCZFd}d zx1Qf7y~gI(T{kWqndy@J*-pQ;JE@uS$6%`FFl7Q(DMdt!5}VICe;OPmCURf1l{=(| zk&mf3{gn#ZSxkKOMd7n6!p^Qb_5?!sh$;@Iz$a0#%K6=gQ>f2a#7x~({oCw-n;Dre zo6~Fi2I*RLY#)8N_wo}`3BTYIMZDT;Q`c^~FCpeOYwI z&PBTLm+x)a(+>^KSKJ1V*&6;FCg~*M!&vgcBAL`^oc!B{=(`9^h-JO!Zmb2*Az!JT zN`)ydh>L{t9lhR>#t=DwwnBIt@^N>ULZ!-;$x_ivD%!Os(V?iRZM)p~e(_*V+{&|$ zh1Si=2qTBC$KYwEZa|>K{AJIRg6f=oc(+FDx;aIkn`Zq?H90d9O}6=cP*nM>Ky;~R zR{e?>+Cwa~>w9)|0?7yOc&dPN&(5%7&e$J6M%7*$C-I%pf2-T*x^1T`?cETQ$i+Kj z!;%!JhHFd`#coYLKZNcL}Z7{852J z4qYGXyeFuPU&%c8)m_Tht+kKyG-1J?bv58!dA6@ko%q|7Ue8jo16B^YV1XDxwbf>{ zD8Z-Sd@fbbTfq18_&nY883$`tGBn3?ikkOAdL9nX!6vzG(0;h5-4#ZZ;vwv~b<&27 zc3J{4+?FtN+;A?Ggb4t%~J1KbXL6b@7#B<{R|0tX&_@xB4;E^ zP(Rc5*Ny)WWd{%P^2VGbO!9yHfP?Ulf3Xae_|OLlsZ~dwu0qOcu&8GbZ}To=T!rT6k4>IMN5K+(6-{q~%jQRDD-#mp zUfh%r@S4|t5HRyAdg#)_+xXy9Q-^VYxyrXMwxoX#ND2o00tO`9kbla+Qrm0pQc@~d z#2;RDbcCk<`x0t!$tS#e>ZIOMBdB3k;)WfV{qjBz{cl6iW1_|+T{lOPq8A~td$<|D zyNr*SIQy-Q&PhQDj(t5G*PuHlPJnbsQ+x4u0SpF^S)A7y%k60tgX137)z=s4u;S%tFw4nz2_$}P zE`EBwuu}{yjFk`sYWKv~*U#{PqO5Q4QqZ?0k@wASd%H?!iftB{N3g$Rv2%DQqGGY| zcHeQgvk|$20!dRsm748$*ZZx?Hx#GecUYk~l{1Ea(p6A=wVx>R?^%F$@x- zC~4lM?B=wyb9EJ4W+8brm zbGvRVA1_SvMZt@}R>xrajhuxAv|u_j!<4 z`rBe#7maj&YBrCvA_;&HUsnopEI$FB!d5p z6*=Tj>a<9Ued~W3+)?{w-MOK|%EVHRpqZBTHpQPCv-t)yjK?4hrenTBigVF+FEoBt z@D?{0JE*w0UpFaYw8EG!_sn=flJ?BZCZAYr-Clm4jGy1BW%|bhw^%%%;~0MR{$m|W z3z>)rFKch=&#dO)vSlQ`bEZm{>3U1i1abFPl+38g)7rlE8zYEuSeB&&Dq_@Ud62s- zF(g64x{#l<+Lm^*L=-Vr{zaMl@>l|TX(#6iwY9NiF|=*DG?`U&HB!sG#mf3*VfN=G zIrw@N&gsqUmAfPHkF{cB6V`QP^oWD*%`ZcsCPZmkf++;;WPg69rK%J;xSck9whecC zuf}ZmIik9Cvw>vHjsUD6p1;6SOSh31tN2=1KmgW>^<2{R{*uKtTV=1>&y_2c`eT-1 znyo?OrsabaUS)oqS_$EVID&RQyaU4`hIrj98#(6ai1T%~gEQ}GiSV-D6_vkiG_}-R z-13B%dcK`iA9;=)t$pHy$f@pmRO{pOT~fY4Nv7+c2Fzx2c6ZPigj=H|HJzuo$cA>AJ=mD;2J zJzbQ8SAY%QVP4)IUdY9Fyz7Hsc@WJXpnPXBf94OirmH~ZZOZqT{?+99ozi_4cPNfZ z*8?MhCu8sqw3O>d0vkumRUk!@Of1?W0($H`-UCLEK2h&m9Wx zuzHz2I8vVq?vEm3%E93Y@L>=)iAbrpR)w`wD34wPIM$uXtr(5H`y;05ia-Nxn_l>D`(7X*j_{NmGE9)QZ zdSi3Pk_U@o#=Ly6-5c=sq8}!CeAismv7WYeB7?ES$mI^VLn922$JUG#rch6|t$IVEbVq9+ehcax z{mr}28+v$~a&dnlb(hyySY3^r7;R~gScg;wU*m$$o^sXbexlI8UXsHuIlLVJiGcG_ z_|;&6!#P(50m#*;e#V>W07GN~6gQ6_14~rMW;a}iGinj4IP(v9oyHHBfxlj06k_s5 z_Qs}=>zB%WfGQOgKw@@yLot%S6fu%tZTahQ>|NDSjO!Hln|mJM-GLRntZglJhIDgz z@9%AC{Wgiqwq@ktY;~4C!uV z0o?~ry+RMC3pBMzNb30ow%K-4kJF2df~bC4|6%c-SEA3E(YTGK0ZUh@Kz@1=nu z)qA$j8?Vgzo@TFBoVZtZK`$`T{+k4O%5Lr56a4MJkz3`beGw8&N3sDI^FvbIZVv`u zLw1H9QUH1aYD+&o(yuS@;JlbN@GL29$p9X-RjAR6dhTeq~>;K^>D`wtU48bJQP2|%h&QztZ*xj zI1HcHnhl^Omjl&?)NU92HhBLt;Dq*aKt5btnzYuCZ|*>C>v!QB;d5tlcsRiP2U0vO?gr0}t-=!ye7dxV+w`{Qsn@gK*A3}(Z zPK=Fipw`4VAE#yLw>}AXXlyVTu3RG~ zxb{>gVzHD?UZ=+O(VqCpm(-?Iv>pAcZFbRK!P1Wa< zctm*I9{5ft=^sgMj&BGRW|cQSf`9uSKR7c*q$>TP5^;B6->&Px%rn76=o;epsNPL1 zPw7?}|GC(PwrPqm{v*Ud23}B)bn=UyOg362b1}V~IRJ)^tn-M;H5H~w6`)e5{!!LE zdUEn|0o%Eknp#@k-@hx~ojhGjK{vCls;3pGm5t`imk4HTFgnAM#10O73ndXI^n7f7 zbzu)7QS{Dh`yE22q9P_&U6$*@I?QZT9<8ArXB!d)U1CZt`-G5EQc^CfKhw}9qT0-n z{7uo?*>6t9oRWP_c(l-Pi=KGHQs@7xjO&bQYF*kO0)if5p|{xR9i*2iB1%UBGpclz%SAAXR3UBq%JewdszRaYj;?cs-)KR-_(5@d17waVf0e)A3?-~555MwWyF zjfS5^$R)CM;dWd44+&odSs<`jxP>h^|it_NzR6 zTwU)|uiw)R3s!FwS;G*bGjrJ2Bbx%@6myUNiX{nfz6GiSBF+Y88Ze!6ytb;n^}Mq# z>V=IfBzJ!?s_bsQhKKrRjRCMlMxtTKQgz;<8ZCQ}d}Cs%jLba%gleVdHt{w$PyXb@VcLK`)|=VJK7LJz*x`lm9S#N!wv|QS8oe zP(tgOW$D2o0f&DeA|euZx}#^?*%|V@5?f{75MAdolAI|1x9?+l*y+LQ&Q2TcL+z&_ zgT{b2vMB|M`{755od(l|&090k+_2Qb!n^aZBdayI92oXC|1; zbEz*zKIBllZPNmJQ02UjG=UxoK$Y`Qgm#1cI*j zvrmH<6cEPi0&v#m4Q}BGgc!J$OgS|*^$Lk8`8N%QDOHC;_BnyV2zS1z^+R4Sm7b%> zLDZ+ki&`e<5+v?)Q?_Ui}E_V!D)3P5P_|6uR5-$)F}lCi6X~WL;hxU1-lmN zWm1M;0w_2~CmeMM4q0W;P~;?5X%uJe;v-xqYZ5a|!zA-;7<;oAY1Zx^d@rFh5qTv~dIi3{lJFuZ#g47d`-X@q?+i85Q( zt83wuW^ZX_z*LwS2&hkjgK3^T$%u@JSsrGSn(Tl>78ehDzraozl;rjkS}#cMqPKdN z3hleIr5$iyaMaHCcR<>tkW2IH{uott=1d&{;#w<+_(1HW0)28oGXPXY<^!L*n#DKM zH2X#!p1N0=m|euizNav)Hja*tepui=l;C<}k+HO}FwCy!qBoXJiqnSQYdAM*)Yb9( z!lP@fa&M~#26VTYqM4Yv)sfc9K_|r-qplBxEx$wpq;R^Yz|1p9;>aOEAih=&8oAW+ z0%unxzB{{zp}IaJl725D;d`+bOQYwEx~=VX7Ew#v{#dp@0RWFd;E8Ck%lda(vjYO^ zG7WX1aXFi!vo^NNm_KvB`)ZX{T|bf9=sXbBOpuA^Qr>)aT}a@CCa-qxg0CV4Z&8w2 zo%1Nl%SJtpz0VJ$5;N@VFpyjvdO9O1k}d19`Li4difb4IX57(G}b>gYFCkM~Tq3*wDA^4~<4Qz>^)?)9Dq>1r10 zS%QHmp#wm;$u@V;#hW*lt*$!(uNq9B^D_bu^wUk+x$eB~%>L1xMUiYO?~SEmcKnGi zC5hs2X9mrJfRD5t&1-chUIrqs8)9RL1eX@#ihQASpt`P6SzqVPOR=Q^S%(Ef2|5fs$vT0NcJWInm+q;JAOQDpR% zPC`x@()2|ytWA)9|X)_xQ8kn!Q1qGvlg@Qf`QYnC2tE#F-#>Y4EB|d-t zv82RKG7AVPA3gL69MG&3wY9at`_OSOJp(0+)P<#iAh|n@@HrBALTbN@mTkB5I)!~< zB`LzH#zFh3dR%aBfHQ`--Ak$cO*@bR7Yhy-!dZC7DqaW)3R)id0b=B}g+~)ULW9;O zxo_W6Qx(!qX%-fM-*-Sl|Dw8W$HA-x-${ zefh|mGTPf0!$L>TQ0P8k+J1h>(d?xm7st9;e9?n#gTB$VtWF} z-L02f#)H+lE(`Dixsn%rs6kbW2sWj?XbBf=C^XAY>8m&i;5euxLZY#9lW zEK6&H;%k2UGO);jMEPZft{l{pr)SbCm-Rd~IYlD7nn_+9{{Za>5Kmx`=?0VWNI`c6 zL4a|g^mC$3je2G6XkAtpG)rtf@Jsc%OuT{JV`Q}rTl>ixM{-K?m!tU*uCeHt3Uel` zCKDDiEd=SE)jhJ99{?zP1XC2HPvZsOoSM9AnD!S=uh^<-B+>lF!Y_J$3K(WeI?*Fr z&wSf9HDC{%VSWLvqGBOdnwpyWA!S_er78?7XK}WzNf=jy5$Ci22&Q-hn=O|8qPB>uTyP{M4IULZ-NJEYW~gfqpst*`vq_{d7En^ z1;q(jm)@WEETPBd&x&C;Vs=z)kaA4M&y=?M$Q;Ri$~T;bbjJ4d9qDk_29^fawT7lw z&$i~P*i0P1Z2iU_^~E{CqTGo?m54i{H5HkMIQQ4z%xAfFH| zziP>s7_MCT=f$A0WB$l%#W`2;CGf(IkKSIPU#@zh#FHZ)46)uKLYYcyucZ|!o>QF( zd_|$=57$S>2*EWaTuQGKt=oc^@$TT?A1AiB(2C+^71DQj6|!SvU!!pwv<%*G)l0x| zWT#t0a$edKOG_{V=QBP&K3X^&g3i!9vkGWZW}yH?+x7!4TC5Peu;oUxgXO%eEDa4G ziF@j?-b(3T2L~&=W_*hM@QMHwUw^IeYde$=Pz!lmWQ4*(C31$VY&!CP{xo{p(X5HZ zXuvoX{bRZ3LMX1YPY?OT)Yg#F-`sxx=sit=(NsdXH4eF(m6N=7jY!%9b5+@Upb`ENN{7W1!V~Nx!HWQuDiE3yd{iZop3Sa zh24De-pqrZCUoM*%V7Vk>>KWRVu?j8%B&>VRWuGep)-ta+D0CYk7V0i)AOu4XPMZ% zHD9Ij9NMxpH#ZXHct8*xC~H19@i5r+Ztz9$?=BRtWOulla3O=8M)@`_LX&@mp8npI zgmjb>;XD9<7OVH1Ne_=gn%|i}&SHcpVAkcj8H{6UE1p z;$2`*OHvw?;qA#Des43?gi8V?^*RGJ`pA-MVI%?i;uCNPG4h9TREryA<( ze+4!beQ6=j#hU_7Xm4MbBVs)DX_1e&?e`_X{`jFRsg;!#3g`r?SBE3YgKJ=^4{Z0@ zbh0)`3J~W)$5C9sLS8do6X?pW98f~#wajk3ZUv^C4W~?MEH{jBqA2@5`uTGXJPe4p8;M%2AOb2>7gxcVDCt?a6CR8SN7JUl`Vp@yj`E!sKEEe|LCRxN zJX(~AIM{E)4h`{YmB>qgT+jmO-Sy{}IGu7MNw14g^yjAOnsH*g<*Mu1stpYK0%d4J zK0bEfjRj+^hh*akBMccET!kH8>KzMzO~MeQOJ2M=($XrtuA{B4PIEK|hqMJB z)tG2Kc?=jhDDb$4Kb8I|uCsC#AW}I|pGY{EEKu^ss|1cpNC=-)`J~tu8)$ex^G?yb zPWkK0GJ|fO5_GsRKsp{}0aYTcyY22B_1J&bH#g9%G(LQ1!JD#Cu?=^0yzf9JnV@kQ zqE_}JEju!sGVFtjbuJUfr{_(6Tq2=yHFWu8_hzyz8z2K{XvUh~L^d$dnYCt@-BZ?k zan!B)z~^5QhCgz3ki$Qw`SYy*jA!fd53P90t6CYtDCJ#;2Hp$faks(MRosjFw2uay#f<6{X|Gc zG=XcSIO^HYio$Mf>-i_DJ^PF+EOrl!Z2k9CwEsONu-+U592EBctjWkIx~cm@8KC6L z3^iWL^AxXMraigAxRPMbgW13Jr!&K>?m+#)_m)eY9~a$SKytx;Tdd>CZp?pK{VP6y lJwyCI4a5Ib4J%|SD}SD%OSglS0ihR##uHt&8dck{{{U|c@|^$x literal 0 HcmV?d00001 diff --git a/website/docs/assets/nuke_buildWorfileFromTemplate.png b/website/docs/assets/nuke_buildWorfileFromTemplate.png new file mode 100644 index 0000000000000000000000000000000000000000..77d2de2ff85841cf7910dc714c60e531002b2344 GIT binary patch literal 29798 zcmbrlWl&vPx3!DAySr;}cXtngAi*KHyE_CA?(XjH!5sp@-QAr#v-f%HoKyAH{chD= zRm`IJvDRE;_TKu_+8Bf@D@q~4;lY7`fFR09i>rcwfYF11fJVW90e{gqC_)7RAw#wh z6H~S@G6DfHU$0CvQ^uTVKKC>|K)~nZv0_cnaGd0kLqg-^F@vC>56g?s4~rL978b^U z6c>>YfxuLUz{F(v27xKTbnzjeaWG%$DYRPFUR3mwTHK~tee`_S3?)p#!TN~~)D)D2 zj+bS9WHs#kd`o7C0i59*I7Sb2!7DhrVPRgsYVQXJCo$+@wvXa{w$A1UBaFy+kT3|z zY1KK>k0_BS4+#CfbE;uha`M!=D%m>ND^k$#D}M(A zfB%nsQ=eFn3_qSvV0C`5WUj>fRSuA{?bcvn(4-_F6zBK;!bm}9P5zr7jufnF=pRQ> zMuv0S{i34iN3w7cWvotfYjsDH;Vd)qZN_kHB%};+r4q;r@;?^hlVgw~O`%xCwo}K0 zKunG0BXqvW|Low*#N8vtDwX&+2t|KSHNW)58;Z?u4+^22W{iqDs=Z?AHE16@^^pvL zWX(rU5xFfmBr)Y*%eEnyx>Z7uGo6hx%F8p%Lq7}&55F=1l?gXsnW>x1eZP|LAFhF+ z($!M*@1o}Aq!P4ut()l9I*k9oNUQm3vFU39`^purBkvXV>dTl&LL5t#zRBF3E-V<2 zyG@LXgFaw9{8o?sVfWH~lY&xFaZ3zOh6n1Wz5M5i3JRnM!SBq7kFPu3#idEY?lG1rUT-5Xfl| za61rW7(bZd_Tq5Jn)56=7h!M$KX^NEqMmloJ_tXO9B@(+FmZnz6zF(A&;q|(COHRT zBAQdxLIia`I%Q})VRUm)>m2x7KfzB+ds8?mAalBCjDD0gV4OWzOQ4>cEpo+dcA%bq z_;%1lJ?L)W#GCN1ejiM!{v9keeXfFmNKMG*F?vsLEi`{yRAsKmFm9VatR$N+6wch}iuk?aN!}E$@b0fEjARj_{=MW?KMQ)Y)dr1$n;*{1Rn65b_d-A2f0k_Y2rmFZ?B12UvA4{v}q0zw0LWBaEQ`)F%8R zR9B$FCHO1E*G;Y^DBoTr2$4@gLCPpleWGq*AaNwD;{IPq=?0KyNEE~1xBcBneWX$H z#QI`kD4@c{;!(ikLySpzq(N1Kev)EJBhLDIjhTo86v2A_xF5Gl^ZDnaB0&kW|xbyVh>hrbg6pC)m_ zZ39CT%b&(^0#Ea^5lNWVHOEWyH_e5fCN~H50Am!Dn}#=s^gyf>wVd`mB)^4y2J;T& z%*md{x`lp*2geDC`vmo?7a|2h?Hk=Vl4Fm~C**Nt_&(7Z7zQZ0KGhlo259=e(Hgo6 zP|IH48X_wg{QjaENGr&dp8Fc|Q-t=u^BU??@R#0qI}`#0;2GOt5Wo-h%h_Gs8K>L7FVDcG^C!e#qU^&M>5Jc6wvBln42YPew#1rBX} zy`&v{C8|U$Lzta34b?gfNgO*5-yW33 zuO+A@07v8p=`Yk~R9Dmol=`sHFvtP50oHBGZH{fmZ2?2VE6iH|$MDCf$H<^K8VW2K zVbxf53dE7%T`@Sa;5b8RC{-xh7aisamjshWU|OO%3)Pw$T}#k82)hnC@V1+k||Qp*tcPkUCdpWYhDL1 zp70FGbtyq{CKBwpsW|#!ja|N7-fMaX@F)1zC@>ldxJa@b8X8Uv=m=;^KQzhkTpSv$ zpJcL-h5oZ5xH&&m_Gv7C3XH<;MO=$N<$)>pmV8pjphHRwk`*(_b5MRN5uG=fXHn-V z!_$c24gW6Ln9r+npn|Q8ts+)}I`2HMFi$uyFi-HE=nkXthqg#%-g>@ZZdblf-g^!x zUih!PaC0nnzBKrWP%P2NTwHUl1~P5T%JAP}__-6)pUwFi*p?B`g73wk^TcN24!<~& z@gio1Pl{mYAm_?W7ar0%5w@YO{jia8&GVdoJCr*#xW(}R^M>|Dego&k-q+qGTk@$fp37;Rs^6SetE0$;+%^%dikkP|n z2gMDxKaHd~$zxFws|E1{&K%fvorc(|7tU?h8RuP@y4Dt=AXQ3hvK%^f4N}-wp3-etm(jVEhex5_0$J*(k6U zb?J*0nhP-}e0mIHU(&a+nuR5(Q(7zhCiIm+!X6|8soI?-nNxvN@Kd8x)_Y7B#P;a7 zkhk!)U!-gRdG2q8i>Z{*s zy#jMn;Rk1q(d@O{y1OcJqwfG;$GixB7=qp*zq)$F@S+k#)(>OuP2RD*jC#cKg6_f> z^6L)h4(Sfh?wi`ezLa~^d~|(OdPID+1rzT<34@^Xk0cRBp_E45g)M`y4j}E#_$FsZ z<%aSKBNAjM%0QYLD>6)pfhG&H7o^=MBTAo}FfE5efq-sDdd znGS=>B90apHLOs8J{Zm_PMHU#5@RmgK*fu;7A81AV#v*slp+s29Ms4#sqn-Bk8N5* zzAOIPY;JJ^@^vyrbYiq@G<}o}w8e1zfenhVAj&uyb~I+0FbbJ?^3wPQ8A%HGcmWIK zk#r1UIP$^Rcp2ORepS>XoYMGt;qO$=u^!UcGW`YivxKt@M=Tbpw;{Lww^0oeA%N7U z%9G#32ah1s@oPm@$Z?W>jUya6Hx_dE#1o$(ye>6KiXAUEjJfM^ZQ%gj8pGZR=06 zQ5IQ_!nj%4qr6*{5E`SOGNaz3U?!>8A=hzrB2WDR-+JqnwOX_e8PFK68NM^LF;FpZ zGE6dLF@Q1{Fw`=XGH5YCf06w{_XY1un$~2U&bQWa)?W_kCLt#NCaETgCYdG)Ch3@i zvc@K6dnT9$iG~`63&tGAjo+NVIUC&>AsR%PDws|fGkz23)1K6xS|0aEx&u2wIl(!h zIAJ(3`i)&-S&^;fs|t!KSE$TW8lVnaMxYT@CdHAztn#4jT_XP-gCjgmW}=v=C`s+4 zbYfodJMkUd9n>Au9q1jcbMSN4bJTM>Vz1;kBcmM?bb|y_HPbm$HiLQtM}r$vc*96z zIpeW!V`hBgpC^C!{Or>fUx z*k_myThM!;U46ICftgxb58>ARNRUDC|!UxO?n>=BMR{=Lhb94WSGnM$<-15sVSo z@^sl**^_aHXPjodW^89%XW(UoWPHw;%vjHmpERGun6#QCn^c|jn8ccN%)sW6;<0s{ zbP(M08hnbofyUnKvsY<#YZY(ZZJBRHXklp0Xz6H8(V5Yq)#1?zZ5eGjZq;s`bVPKB zvR~Ws=xH2i9KB1mjKuF%*xld6+BLPKx3_dmcUZA+cYJY_vZHpmI1n86&S+0+Pi>EU zA$y>Cpm-p7U_0kp&2DXWL*b_&LL%ag!H8jvVT!?x0gs_pB2mInf>T18BF%EhLg%;U z7vLA;-{GI*hv#Q@7reB(JiXj@`{VZHPU(i}*660-cILL`4(>+op5RvR7U?$ThVQoG z=IIvdHspSBCAhD<^)~zxh`ddHIr9kdgyS{p)#kPD744%iHV8i_&YstN4-k z3F@)xYHiDNxOu$!*F)w7vrtm@=+qE%Z}o8Xgze}>&&3Gw0C9izuk0cHKK%*bi0;S_ zvJZ~;_Scw4{mbry4HOVCH_$Y{9KWA_5q?>Ig?{yZ)xssh0m4PXxx%T!fx_*=)jb;i z>0tF>kznOu`jGE1h7hFzUIAzUvH`pS`vHmp>j9(zR{^*I(2#PF_z)`)o{-gW>u4F6 zUQ~~tFVi>md6On3v2)mfapm2HxfXmbg3E_x%%94rX;h9L>37UUi@ z7=#(*95fM}8RQ+@AA}xU9~2o(ADkZ49t0g+4ZnfzL-s297do5BTT|DBN8Js zF-|b>G1^Hv$(v}J=(yiCHA-bl-Agr9^$Xr*cM5q5B?>VM?+Pjl*9uc-uV%GoLklenz7}{EN)=ql3MKiF zzbQb)la5`EN{^O|;*2bfxs3&lP>k)45G3%BI({})Hq|lxY^t@NKc2)OnWXz!`m^3= zso$!^m?yygRTs;5vlv zmhEcqdhAK=742q?b|$X;lp3xdg&!Upwj4%?RxVnrct!n9K3ymYq)+*NWYn{D>(Iz^TM5)Bwva0Vz$9Z@qc-3i@9_o!E zjcU%)cTL9u$7RPMc&bqf3epN93i=a@6JHrct+Tb`v=g<xFJl`xlAm)lyRaJX?4u(NY8u&=TQaKy7;bBwYp zTHKoJ%%#kH`Tb>a={Lpq>+iCb-z~Jiw^@dm7nu`HS4>;|KAr2R;wgU;y@|w{&NGMo zK4Nk7{m*xIO9}HrOTyW*1>Ss)5@$I}JxdKsHT+72Ijp(QGn_Lnzmevn7bh14tJVtE zDh0{}@;fWuWL~O~w4{WkK1*p1eIEKstx7Fc{H0j&pkmS_Qzny|_rl5NXxpjP8MiI1 z4Xf?EEw`<-Ex2u=&AJWS+4gwyr0V#6y8qDrSmvz^!*o-G~{V< zEpcgat#RopB`ReqaVnK6p);{Fg!vNrF_J64jEzK$+>OAEq>Oxb+;;eP9Cl21e0E@W z+;=o~vZ=VyiO?<3tjHn!`s^vN5XNoe4;)~LXJ{Rc~#TBI% zQ5HoNtracGn<;3@ccQnx$B# zucW-BNTr*l6LSffteXkiybr%tJ~6|0KxPM51a}1o26syYNOVaQN~8}`M#@A=M8;t8 zB{3xRC5NmCcQ93GU78#X+;A#RQ1K?OKxRLML|VFrB&r?#dd{5D6X7dE2}!AXyAq4Asmv)i>VM zM>dZ&mpAYkaT#-&G+R9^5LrwaOw}wlHP>%g65Hy3(>K{L@E!D7eh+@*f^Gm$4r~al z3JetU6N~Q`3Dpi|3Plao3e`jsBCwD4ioT8xiDrr>jkbsmCB!H6B(UY4L7%?Tq{#d6DQyz{8W^;nPFGIqkZmq0&^=l>ofw z`o+_UtTU$Lf?cD_onw?Mqg|!_nG>OXrla>x+n%IDetT(4YP)-@MEhQwMoXrB`!?rJ z)AoHo;RVxX)uGMt`8m;1)|uY^`o7N}@8!3`mk{X5zzwn9{(=5A@;>sh{PFyWow}ih zA-Na@r3~us;=JP0;!v{!vnI1vv-ShbDcmW|ETK*qy#T#Yy?s4BJr+H8Jqp01u7mwK1)o8V8=UO0kng3*HV+1`Sbg2i4?UgwVt zFJ3o;&(}|Mk7@Ef^YZTHseJ*Dmim)e_LSF(?px2n%BHm7bk4)nKumwl&wM?Zo; zOhHKfh;s10;cnu`qA24`qfF!5v29xP90;qEDCKzN$mB?UbNZ$SP`&2nmrbwDP!gIj za%tXS5_2VGH|5XD63SB3($j%CjXB-&oK%6OLr$h4tZb8!>O@bvCyQJqrrjV-zDp$WC5CKla!R)m;5Z171@FM zL~-qi!5hIW%QMP4$_x+ZjJTia>Iw6iPHYym z7Hp|M8PX~;Pk$0*WhCfidI>(6UaMY9!m&uOOYjZSQu0$$7xEVJ?U|0Mj+)ZZYA|R} zFHmxjad0(qHd5cY9N)}mTl{1XXYXWRY&dKPXb5TOYG}8hx9EPTJxf?NYnXD>U+v2C zR(OGWm^k%JN1ElC+nbx1dz(#|lbiiI7dl5=SZ$WW`pBlvX2q6*n~IBxtBbpX8;QHY z7M_}y%A3lR8khQ%dY9^*s+L;K*8DJcKX%Twc2O^6@e%jN_!9acwA`H)Jc%}}I#o9X zH!?a}HhMhSmUzd1qRDB=Nxt%Fg^f?}Mfv{dH1`zx53xfm&nr(Ej|)#Pj~0&x&l*p& zgWm4zXyLx>9^Qe+9@Re4r1c2lr1#GkTI2+&skw2-armjRvGTE#vGh^T%m=P)Hht0! zvJD|$eIG3!v-hZ1v!`^(pI{?k{a{l8j{(~N#g{|IzzGue5TWqqIQKjW@A?OTQzPl{ ze{%eGM&cFwkTWW>5=aVNkNlh9RKV?EvpjGmI3pM?I4Kx3_$U}N_yEopOOSb$&X7Ky zmX-dRUY3zp&#CE9az=F~MvNh}0121)n&+q2;bAUY=r1HjqGg&Q3^^=(OixT|%w&va z>=;ZT#&vFaK?e7w!`@+G(%EJ6YCE4y|l~EiM zDw8ggQWFysU6bZfy`(qV0|rfoW?E#XDcWj!Vw!APUw!|w98Gi0)6%d?ZuJ>e7VWD7 z!~A$vS`7aLwJOSUUjD;-KAN_dqQ)H&4eN{;0?YPIv0OO}gy3)>pn z%AY^q$=+??o#Q3qy`){H1*9c#h;hVx@0v%mr2HQB-P>~R``+`EJ`Db{MWhPB(T6P6A8rl}|I5bp|gG z53q^oNYrGC1pM~L1DD~{)SP%)_$GLac;NW;cqe$!cxL#EO52JNne6;W-mG`VPb23E zu8LKhq-hZ84QV>*+v&1tylGTvdhFKO+k%B2qgU$lrFm+M#Z{ReF2{=&%NB!kO~w6+ z*cmpw-gmbBJnq)}+nSjVa9#O~#G$jR;Nr=1;4#RwfN; zP0!8eww2a|=9!jn^OEKXR%{k4D@lJo&v8z3uH2VqPBZNCInqKFN6A+PNmYRF6Of8 zmg-L1ybBGXRpzDYm5YV!-}-+x7KK(m-2M1T*uL?guquDD^VnSOx$e1_SdUD@USp!t zWoTb=yjmZO2@DeXmGea6_{p}$!p`{e&*p{PYBD=htDbC!x<~2D>}_F8GHfz#@&q%q z{$#h;@%7=}-q?y*hnTk5x!8yprWkg}IkI<`Xcw91;Ir&wVjq2_=CA4so#5)(>SO)8 z)-?OZ^W(yDculTQi~iAm#eVyKsn97zCB$yx-k494<5B@qaFS_Kza;g31gP|?bgEp; z+|De|$mPT53sucjmFhTlVfmgtHlABPR8_{r{WAVlG1NB{@oRa=Cc3jVx{b+W?a`y> zF5`r{Iz~^w>-_=h12Gd{k89R;e!a!XXzN#u{iJ>4mB5b7&fX68FSDU;#TrE#d2aau zd4f!CE*6ffE2^uaafOp~R4(MT(-!xa{bj~e#*wf4&7wAhEejr&ufI=LN>=ik6>Ol_ zCfmKA@9$QaskdadWcp=3MlwcSzNtceFh%|4~9lU-X{7fc`oKC}#3m$-<(!h`$q@~Xd5 zia(}-*j;GU%V~RNIDgOm6EnLlJ27{*&<3jzOLv=}?l*=njN%LgdWYI6dXKj$d$rp} z#Y3<$W5f?aKDUnt=;zehN*!6o{5)62yCXNn(`sAAR7$Bz$o$s69xq4j5`B56d25E_ z+gF3@gV2%qQ`}h={5Nh%?-lQVF6;Lw&1Kw1jKf=<>`J{Y9FF=~Ewp_881VSCdbRqx z`#dc=W*zWOd1spm*n0T9J)K}5V#hOA=!ti6UvIxgzA$%n7Idon9$h(Xj6Em!GiO)# z(C5*c(&y5Le&*FI(iH5@>r!>!d#1b7I4h^GPSO*rme5P@LUPx=eLg)YUCiE4-YDs? z_fEJkI-gkJ8Y{mnJ1qP2ed+tv_r>G!_N2IroDXrUTuZ_Hb9r3)qXjb-peoH~pvg*_XiwUm+OpPfsYXxrbs0p`OHD zKE@v`$GxD)xKVx(Yncc&0k}_GV6D@yc>JJpuZ(t6>lm@Wb+EGz1-fhoz?#` ztDbrh;_@c?c)f?;id@AWVh-0Y>qhX_dWQ4HeEWXIv28iBm{pD3{r>vvMd9OXPji5+ zS=ZXz{S)7cYvV=M23t3mclXuX8I*yTPrn7Zd%o$;@vj?&-J*e_JJWH~qy3)!oryDI zQ6YJspm*-q{FkBC7-AuJ-{UugNB2#$p{b*6eV-X0i}&lN#GCTH#WQ{4rY}wEO{>kU zO^P;aOCB%jk6*W%$1dhK?z*LXyPpH!$Zv+WlxDNjgfu@omL#p&nyj05n(3RFZJFPK zKV)AU-|=snUoSQ{1_^7(n~Yk~8`av_YHpm&4mLa~+x;Jlc?6LCN ziN-{;s;jBZtxZXom=s1v!Vf`^205`og1x;wT>L)RKK)ZqfD(iaSnF67#!VZk@HYwkl+B&;0)VnJHPn)8lR+9KErz5!v?l#s+a_MO76i zD{Gtm&aXIfspI2gH%wU4U70Zdw6rv$^g-$3bj;D;t~h zSRyl1(>_ymb#(=WiOa4283R2DQJAA7mgzE$ayWSShqIN%>1mR^kf5NJ_IC22rKOg_ z!o4Zq_Z#mKRXaO7507VTeJ_9M;nC6K)mBG;fB*aY`@OwAqoHW;{Zum&d;yPJzrF6) zJ!aSQRTUZxbSnA7x#E~GE9KhS+N(ci*=Gyo*s^Kq>HSn>Y^9+Bp7-Kg@$qP{jP&%8 z;o(nn+3)IFTC{!KIIs6Bwkm5qA>1%9q`OxG!XJ0~AI>lLXTUe^RVcskds%O^)YQ>g zT`1QqR?IA`)f7@xRMf1}5ut#1>ZLkf?{G)==`JgS15S>(z{0|=G+B>LP7-li;fRyb z(7;^R_`E$guX~(4-<^O%BVkNvef`?~@&59)*+#+{8yowM25xq#+t)V?g)BD`o4K!L zT24r)8~Bo>A6Ajz>sNPo&USWwmY+B5pRV`XT`#n?wcj6iVgjKM%c`rn zR!el4LW#b7`BHK~f`#g@E-49RobP(E9ws`?KA)%G?UR+2_3@Pb@pRsPfrQHnkx9$Q zC^1q|QL((N{Zm|m#!x~6k`)>O2L#srZb38G4Ccr9`1oYD5HjST-YuJu5HTs?cIC_W zFceYggE?|uUhM)5xZr0bD^a!rFPaE9JX07Lt&Yx4M@Pqt(u_D%vJ~5{C&{|idNU>S z_&B&Qh*_W}rIvyeFoY|}mqq8nU8Plvl*Lnz1*Ss~@xMVlEuEa6BGA$?F`ch8 z+kX4DX_oCBB42s{#VHOv7Z#e>#6--Fny4s5FvvT?{!nBgr$ZF>P2f38e6rVjI9K|SW-S@TB7xYzsBfrNy_F`v)t)zHT4 zJ{M2`Fy{B?YnE14RxLBv*Vi#Z?{0lTuqfct(mp;uIywZmVjWncqoZ)h$U{|ib*x`P zGQ1wvTw(N+m~s?2HN&%7$`w{ z)vzEEw!6E#*a)q9QxP7)w_CM>ty0vXuwnxbgoXKe^H(MUhaA1_8QhNU`g*okK{C2s zlD6;4pcfnMuC~FV{voomvI7rR=7*_fS^hd}ZO-tOjh{3=dle4TURPm2axt-O7lT|hrb&x z$`}5krl*IL-qh5DyozE(0%*cKPDv%5s3`IKLTv9;k2qAq_e*y)E9qE6b zJUS{{bt(C8A%usIpxg02c08f;ET zXK_AKg8fJM{*gdYNT8`iA&DUQn8Sw<>jK{V*M^J9&&PUfq{MUS`SX0n-0BH(-iNd-#1+w*~m6B-c@0vnLLcYb&k??dNL0jZj(&WmGtA zHMKz2xniZM(NPC`dlB@*l!SzY$KJM^gDFi-%}B7;*4F*~{hEyAWa=b1aa=?-UOpj@ z+r#RvXE@MQk5CD!i0~qN?di^p3<85cfBxV?hoOJf(fL6YO(Zb7;s-41GcMkTEkppR zo86<~s|F1QAlRv?sX$_9R{@g8}&yneN=1cN|TmYRk}upgA+w=yur z*W05xyri+w(SkiKld{Gz5{Hs1bvjZZ)`jiLX*B8m8iW4HOg?B9zwsY9EZg&?YNDOf zweZ+XIY#7C(c-(npt+6If9;!^LT06c%Mn5$o$&|Mt)bj{baGNL|K>17zEKC#ShBO9 z>;}+cX&iifGcz+Wf4h`s+YQN3Xk;RUMA3J)DSq%^v1)M{QJ7q?BPb@Ks&3njF1w9p zo3(gDayQ>x6z}I-R5{RIk^V~Z&(l?grlv)4fy&@M8yg$lxw)W{m)Q2>N`yyHa3qpaOwrp{%5w0l#{F!ti~A4mQBz^@kwCDaeI5q)-Qiq@$yoHdB|ngqGEmTk(Pqt=USYKZkdVeI(^WWayCZcO7KV%P(hY%f@R;ZF z6zDa4ez#8q0<2pns8Ax~6k{9S51HlVCqPTr%6V`&Uu{dMOc>W0B5&4?Iyub{6w#k}s0Y z9D;QuL9vtKHk-4{XJCuJYrCVON_hitxJ|h2xeoz0adW?FOqahql=qT`N)=%U01NvD zbj{|^bd9iNnq>_qJP`wSY69L2);!#iy2oi;JP?2XrqTx#7jc}R-?i{Xv9YK5$lGT~ zaMHFfGvd{Hoi^(ow0IJsa&mITO4&#?nVFdeC<^7-5oTHLlE!7e z<|MLt$mV!1ry9a-;=aQ>5(t`E(11>&!OaIx#_2?h1*-Fty@>ipL`L!**x3zcW@HR_ z7FAR*q#ldGuEVO6h?WQ|(PqBNu!E6oouW|4_~Ye*q0Ww6Hh*n{$b9wPRWIGNpbp~K zdmJkO1h_=_KbPVB{kRTjwC-UUn3@jU}nve_GjicR5|Eg<1}e ziP6;WSICUQ=dxLDu;`3>1CVVyIlK_y=U6FO%~$JF;FS#Il?4E`WM^k@RO@$J zE>?}~IoR4>1VSS>RhO4JAKxbbsWpPOl1;a_x1U8T0d6Hr6>@ZJtR8sP)|Qr(_}vo- zcsPxxuuv;yovyXJ8W|bM$b^SQnNDVuD$|semp`1Z(X*o767V=cfT}4fA_05o>}Hb$ zz@ZzVkEbaG1qEvkJWR|+hdtRl+fe}l0WmSLiScm~QyCcc_09?9Q zZj1y6zJ3-BWWcyirw8V;v@Ib|j1+3UWAyKxIV*7LyL3e4abMqBb`- zDdm!^mTE-i0VbA{k%^N}O-W(UZYU`!q2b}t%1<;IOM(@7zMWH=o15df2T(dRG?ZL1 z0B(7CnMuEkpO+U;H-Mz_YqKK8<{_|l)C&F}KcUXMnwmJdxS9(KA!P$WVBjDW`t*>I zk;C9v;}qrOfNl>|&i%y6(XboEwf^}2a?$;KOn*k@a=u#nj-d!{Yinz1c`_J@Eth9* zYpbuLb9#Kt>w2!^bO8_wfdIh6#&pwT%-uSgn#q}&jB1^@0k6WCNCez|3Ik%Xly}#& zMZek!mT=k4r;p}}@$vA^%fZd_3JNHgm=wv0U@$Uf0s8rF)=YX2ziu~lXJ3IEmrQd`~)Ba&|7U* zf9hV|IuI4Od3c_lp28KFnVDZ3Yis>kNs;*6E^|b4YwGGmi(+D8Dl01||JK$yHh8$E z2!H@UqPxVvz!3D1oK>Ttsd@3I-fSjMBG$jGtPEM`RXL_KUn)kljf|Wea5jIElr7ZM z)DmK2Kgk8h#i`iaU!AYD1KkS%E2E}=OIbq$2BiuKCFNdUF#Iws60<1GOyDXojKmOV z6J?>Hp|g}l-JnqoNnRBo0H^}`^uI1aZ~&nFdr|fWesgjq8u>s59A!V7oOJsaZS89c zz%*Xa|Mi@R*?oZb1p4{EF8}AZ;fH~%ojvSV)6_IGFqkpuGdE@jhNY&SyUA>U3{Y@D zV~mZASlHP3duJmula?nr_FKmWkC|JBu%zSmW-r0$5rw{Ik65Rj0%9d4+N2o`&!(AGoi zB0)~@ZHIWJH~SOyO-<`{CUP1Yi2yb}UhM#Fg^!QFaB=^FM$uA+POZrQBP0&GMUPj> z^XsC``D$w@5+Oi!no~eq8vFPNQtbBlgCBznPnj&#fK$RCBX2ZV%uS?o%`_Aj7iWQo zM@F^-bcQx1_;R}5=?NGhM61DA5jmJ6D7f9tO+VbjG!9EUY9n-_E#SiqvG*&bq6zFB z9s5tw95vh$rl*zJ*x1}c3knMU&JwMfZ=hg&!J@h9bm-{lmfyddj;Huf3=d1o8=<42 z>FDY){m>=KgqYoNJl`(1l2V9hxE!MTcA*vp)op&-1?GnF;X)sGJ!K=rRyo z8fIoGJWZXj2oO`LA2|MTc9)wX1H8lG5fR$04tw2iE8TZTa|eaVdH{s9dA~G~RvzLu z0JT~me13k85(98|rYFE;s;a63H@M^0uBS^XDk=t+mhE0o*N2DZt4}Em06gaR{G?SY zQp}79|L)*$4Ll~5y4>8{N^Jr-LZ8lgrS@NqY$)THfZzNiQBzX`gM{>3nZ){qmzIHH zd~(v~qVs{3jqT?8dNPBD{LO~6>IDEa&xdnDR8&$7X+P54v$IZs!U6)otng+d-v6#H zCme<5%!Q9KRs^m$_ z;>kLbv4JsZeEf94k;`@1golThR#%4}7ZetfLSr6`rv}nPB3h2M5b+UvUHT^^pyy#h zsLmBBh!vsaUUYvD`Jx_@Ah2?B{;pt)7fg;-00A?@?rw^Vv&6EFg8*}xNl9Cb_ z2*;jltE(svR_h(OOuqmkHHd9$pRbZ2TDzNdKb!y4>R2pYq1%qNC^1kioU8nUfCo)2 zXeM7OQh@@1>E)!Wrc)SM^}mjB>11ce@qCrXST=rcb=At)c-l4By3z1ah9pSZX|TFB zQFad)VFy#$F)z6308gbffB#r_(bFTMYKq0JJ=)u&RC{CK((y~#f`WjM4n8?P9-o+4 zDTCZhw6eB#rWyvoWvPn)U{5rANY>B`E< zU6fOYG#GB?PzE{O;lNN(Q7L8%dJjfmraT$uMc|g-LVW>a{YR!CscHrX2LaTY932f{ z4hswGS&xW-b0b#a=HTF0be`;eg}(hk>#2Eg=3< zQc^<$1065t?aUpQqA^63@V-GoK@E-907tCID=4fq0{K@LU?^*9K4m)JE%{>fhoki2 zggNm5rt#qDs1=y29G1T;^t-!?iX>HGfPQDxcGQ~C{zK|D(weiXYnp@{&Gq4-XG43=CnPSV8CG`3kLiwnTc4$KTEZ zlzwr3{_*LF6alRrAb)nW-8j`RUtp|jnwowi1+uqzK2*8v1HM$k@NBse=zOvgNVs4_ z;1NbeMFC?RD_t6(zy`)0>p#GV)oL3OE9>asKa2wU5tv!sKAlxnle4pEOiCRuuz+Eb z{p*B8MLmXNN#h}jiHU)THWT(bmL_o&FzHj-Ld0Z23h3ww7IQ^g$0Si>#x^$Z-%J7P zs-vn}_kE#^gRaSTBU-K+)k@%pw08>>{%g@V;MF*I&n=_Z|nAryo4(=1tGeFL%skA0#D~lfu3P#)uzkgqspgj1|1wsCpBjAe0KYkP`WxtQoSIeidTdcO&134}a4-c>c z|3V6{QeBo@espW=)9=c!)>c+kI<0xSnlW$C_=+)c(Vc-XM_5j4cm6JC%g$%Z9LUHq zTpM11aMiEfaXF)5V;4K>D$dr9r*lm$FMEt7F=}XN0Idr2A{Pe-@NPk(8Qk`w+J!Rl z^ic%8K+p|zPC@PX)Nma6&CLxs7#P5|Ac1%qPRolH#bP1oqy>T0)w&(Nmx6YbfHGg2 zr@)s>eNx4%zu0&?ceU+)y9cs%jxs3;iC)tC!xV^=Eu9xU1Ee8=T4}x3 zNJ>mBaG3TVZ^E)#S6*J8n5ZEs84?^UVP{wK9ACR* z1BbMnTtaD5N{7c?S#ol6QxXO?c7O6jCLb3){Rv1J^%p09Rg0KixU#HsI+Fz zN{em@Bvn07ozgE)Po)=q@>)e6_*+LusmaO3y4VSZC5we}$w)-}FjA%_CJUeGfqVl9 zS$WY57*V{lgnWJ8U!PD&tu-QLrz|Buc8|1!e=@iqTk z{>SzgopQ1PU`7Blm;5;j9u7`cS~`W>o{aemHX(3uZA$3F%gM<}UVgm&qU)s(o;ged zLQGV2S^L=*5Dt&uor#~gxHh`HJ->WOnQZd_4#}a4LVDN%q6*~6;y_wD6C51ep{<() zu!_5fhs~cllZiBr%R7L4;zemN;H+C`EsOz|VQOlMmkQ*T;{Y7Oz)ZJ(s#1L6J+tFR zorOGVqn|Gm&&8z|q%*MCQS&jm0H1_s2r`HX)J4G{}^Edhzs$jA{8 zzGE}%S(Wbs?eBY6uD{mmcmS+7P=SEP4+lMnHGcL1t>W{ zvF!2*2%xsl&yk`Q^)fkmc-#T1;I2hPL?nYQ7NDdYv>QuN-HbO7{bpc*NCi2jiHVL=}14l zMY4EBLUuN?5DnEJbLTA@8W64i{{0)+pu=*(!^b~6JF{JF>FqTjF?@0qlLexFOEadg zO$fPkXq=p!ww-raReZB7EG)pzjWVGQ)ou=15mF1&_VST<450u3gQAeLh#kme@u#O& zS0IQ$PEVTlr^B7i;q~HAih4B=z9XIjst%5H=)+p7za#1cVt+YUjvZPe>-0LERji> zD+6q9p{D|{5%3{OH4E&T)~l1a0b757h32}hg|&6w>VrIk7>a@d6d2GJ{aQmvvc&&) z<^bUAzo7$LY31A!xCM+%`8XcEf=%m3+y;p&>_`gU^15AqYt@OKHoI8wBmfQi3+*VsggXL<=OBHtqyMM&=)ty5|K}{g|J6$T zw+_WJRS5$I)ydIOZR$^xb$V$jEgFzz7&=@3jb(BsNr0H@04_ak$jFadr3cu8q@T9_ zY!>#9SugzZUx#;QyztLZ+AC?-`$}%0FsG`2`*UlUpJ7b zx-cdj1oW@q`=inB!|9(JU^f#OR+XO=|0X~4R6uzG+t9$i^5ffuZ+3e6S1qmZabUO7 z0(2jMd38-qau$=(pQUQWM7&PYyK*Lgb=+`2Al&VrPhl~d%o31$`n$EJ`~ukL5%rT& zAa`5@Y!?4oHSVwSg*q-%q#w6-_vns0s_*$L*%HZTRYc+2z)v zqJRCe3-0BDrhe!1aXGL<2JF572N0lu|JDz3VqLq=^s=-2^bSa)n}^5#-k!y2V?%@Z zD*5^w1O!Bb2}GG1;1;m3?(Y`$AM1pnrHi$>DZKcH_L@so!mtMZif6TN0;d6Ij=J(FL z?_BRW*O|+IhPehddw-v`?seav_2{`fh5m4>ZbNI+2R8_`%;2K`(JOeb%5yOw;`0j# zupca(g|QV{BOnB>`1P9~K(5Mq*W?;jz@KR-$9$#WP`kM7GA|amia_$TG*a32Sss^h zSk*f_Cx^5Ox5G>Idq)SwZp6n^yUUb~mDSAD6r6F)8|tQ}rVvla$h>g+vD&RF4Vg9q z_>7H5V+73Wo;o^iLYcUI>lTX-41dtakB65%*@Y)2 zCICPYEGMO)V139C@1!;bMGultbXFgC&FD+ZT_rfEjucQx!H67nTdoAe;memVkh3D> z;mIbYT>Nhh=TwI(JOUM}q5;P)BPYj?h%u*IrrPt6L214(hYZ{F)0D=ge{s~20gg_x z;j~Jmg*fGPJ@A9vK4;mB{lv-n7GZJZt5sD{YqXUxgfD055X$=n+S$nv|aQ@E|68iNdQL?DXlBPnz62KplcrLXOFD2#Fzy0g0|xOi1hltOPjBzbbaQbrNVY+q%E-t>^jV&5 z>LO{qEa|M4q;^kNH|bqmTpWDRTc;y2t+LWN(ma^Ze+~>VUX4Kw5FR?A{pl`B3y9>w z(zRBPeE;4+P0gzv91LA95)qI`P_SuYGBY#5oFwu8@#CI_PjM!gs=(~fOE_hJ`C{<% z#i>}G;(~Kcm?<0e=Ro{IQ%fuLkZ}iQUt_n$BB+NH6cjgO=7vecKv?Z9lE}1Zg#DYJ z?v8f@T}A(CqW^cX`M1OQKY}Vyur1w6mki?>85kH?SQ_9}C#g+NP0i{Ko@orDmFOE9 z%1$T)kEP6Dq^HN%K|!HOk=~;R&qE>E;%@f!?WwoM3F3}H^}%pjWB&cL%6hB37fnh+ z+=tT-ph-9B1{Ia%r6mrSRs9r(hlk-|ahYn3T3dS>#tR&V3cjhWZK#q=seOjLon3BA z0J)#Pe{4()qW~u-UTsGRl>iBgyn;e;VId0(3y90~t8vi0P7Q5v_R)&$1vY_!fq|ON zNeAld=?UZqBt&7yk1awBdaJRqaf3QotF@57IXEHA^`#Ft$j-|sHZsmwww>r$JiZ|a ze;If;dkfDZJ3HiVl2kDu5UCZSuj;cU)Oqa9ro)J*^d>4QH6B$;Ch7hTS$UzCI z4{mQK9d2(B1$QFiRL0(z4Dbx2m!|tn-AXyafv(x&XRriuB}guVwRY(tukB#ryLXhn z7=fV|d5o^KGAQMjlw3-Bse%r`R>@UnBJR(0CV{mj<8ESQr3ZxC)G;(?ppFjQGy35- zi>z|poC_pnj#$~+vMa&tyvm^nWugmtll-UEd}|cE*)+MVOczDml7y?s zz_{pyXfXmN3qD>3LtKN6Le?r>HG3HvS!!0_2s_A3scE|1m5AQ+pum#=2@LZK#5;jO zzZ&GJAQy|w$jTBB5vk_c2iq}FzG45Jm)99g>!RMXwjd+iW>KjT5fS@V1ZN{4Vf69A zMAg>TZvTXr%zJK*k`10=`;jkf6i|;L*KfH416wKvsqASI78w&83n{5z1go%caaGj_ z$pjc%w^I3;m@w+Rc(GrYD_te;8#o)!!pfRsRJjLZl79(y(qM`G2M)ZmKKUc|$&fTx zMxbq}K*k+iJ|sJF2#8K+P0{}d#r5}+QZRppc(+WL&h=!1S03Fqh3f(QR~wlO?t}9U zRF@e;ym4opcPbu3>-@Ka{(qH-2d}T~wWWaP3!YO`Qxm=)6A#Zaa6UF!EJ3tT!N7eo zFSj!FJpk>>*!APbyMzSKos|cc@}h*`XB9u4YL$4t^jHvGQgRVs5rFj?Cv02pvd+fR z(A}+}`hcV3B^kRQxdq^gFv#c^+hl(F#Lve^CGGhoGt*KZ-_)Ye!Zf7|qzyX9WuQ|F zhN=)_Rl6`WWDrTriBCud{(L;tJdwT!&Hk73e{ppUV7f_9n-2&$^8 zpts6JfhSk#wjKTEO-4!9GkbUU1CXpB_aJAjI^6mR=|x3pX(A>hS-@Gos6B5l91Bk?eYTu}5-nny!+&ab8&8>Q+j$j5hYyka(5@1D7s>p&?k)r<K<;+^>*GC^!QbCwuRD3Ifma1o)9qM^9u{z!8I8{ z9(!CSt>bJk6%SX+R#QZa{ANrUU*hXKHDAAGLZ}5k9DGoEckZ08!){PO513{oEQ||Y zdKNlwvpTgm;C6xKrK9s@a0ppu%-r&MI#8L1n2*9^-Y zAiWF>g^#O+9)O&qt+`$TZ|a4_tAQV!x5-FI7zUm@Ib9qEUirddmx~;FfSY;3|F`7m z8V3jBk&H~;6^fN#rx}&PBP%N_m`nk#fP}!qqcS7m-8)gpttq&*5$u1=Lz~0ZhLIn} zrvi&ECR1%KElc+?c;|D>^Uhr0ciXaAeX|T6SEqdB=+IC|F#{Jp@_?@pr#I=vU*Iy3%r3O9t1)YPP&Ol$MHQ%Q4u)N=PWt_%On zN7XTgbC#Btft0B3?tQS?E|yYBO)M>E7Z*c+SU|zRwF8FQmm$+o;=;)ofu+?+3HEq{I_^6L z=@z+Wxfqy2IHur8KrT^A$<62sQ?A@;e(ia=l~Fhnoa>B29p*0Wv|m|ypj3Kw#>VY| zI-Y1EtfxWAzQo762PDS`GpX`KK}@z?#X@)TQCIzDycI_@VDJi^POyn=#)@GLUF zf?TKxL0kW;)@Q6!Cr7VHp>b>Rr_YNQlTXWjNIYt%JU}DxNq>Ugie z`l9OGdHam3f0rv6et(WA&Ho%xK$F6JK4&S_)U^wSyS$tn`~bw$MEw>ACnpd@#Rk}6 z*s;yt@L^;8DNRzwtHXBem(sW2^pCH^5}q0 zYU2=zb|2uZXsKo-f3Etr{#}}$j_w)?)C1Up(7zw9msB6ZXl_Edz}RkeWhu-r z09!&(Ik$`=6AcLodHgUrIr&ND2u!(`jGS*6%&EVSh)>dD&_@_x* z$7$*Pg#w^bhQX+Yl@<@jS-kt8roa{fH3fKn2S6)N&awEZagd)&r~&&OCMQeNFkn0S zIT#oa={7TkdEH@SVE>{l9X7uv%E;T=UPV2&TCuC%A6`!3U;)Yc63{z2I!KHntq1{P zfl{#I1BH(I90(eCZP1|r(@B(445y=o;s;d({d!<IW?E_057mIdx8tt z$hg7=S6BlGy~jk%%*^_FdL-892xuSc6F^7CwlR44_;s@o1fXD4>Upc#U(6G48}C#Y zK!%nTY{y5-$3!>(C@HB1WeQ{e`8*3VGYUGou-zS~J#4B8*?bfJ9S}^)xZUrDw7&^x zE_p0OU;tD-xlKQUOQ3;AN}n);#RuRQjj#=g<0*v1q5&F1fB+mh>=>o=)KrLnY7bxPK67;~FOP?%s0kn(kLR-T1{Ys}k5g2% z3GIlL!k3#|S@LX)7NxrS1QtF_r_%s(VqtUxmPDX~H4BjsV9FUp*UmmwBmK;#LM@nm zb;5N)vK`V+$xwWjnvE@RC2`N6_ieL)+)*lfgJAcZh$!xqRZ^X#F-_uE@jLn&@AWsR0!Txa)0^)V31wKzmg_Fv``+27f#1V|$w0fxp zNsz!D%=Koyb1m6dzGNdVCU(8C#d&36tuT3Q}EV*#$>FFD0hxcT|jW4-26 z($d%%86QOBi`Ihq37oI=&3X<#z9(SSb8BbU+^okFd%pC3=NbdZ0f3*xpzv>7les1z zji)Cjo|!n?^=WTAXl@G@*Y~MaV6QJ2Z8jyd)xlx>GxzLlz95PHzYdnO(<|n4c(YuP z)+At|Qk{E_0}uh6YDX1L{JH`>l#Ikdw5n%kt*{nm*!a}e+U9E^sTe6E?kyf^+wXN#5P)ioqr!P#+GzpJ9F0ZRr z@ar|P-0li(r%jLTcYm_%dBB#}X#T8Q+O9aT!we@*y6nfp`@|2lC$34Xg-0Xz|8Ut- z?3gNAbhx?y!F4!H>b1X{PxJT-y3J}ka!kiN=2QH-bN2d|*xzmXzj71;=8$7CvF2 zHu8hfzzX7CDV$$~sE_#3ETzDh)D(4WhD?Ypb=kfvi+_v#5 z%Unwf!l4mLZu-OEzIg$`VzH+o2f^*pu@(8-2?BfKB~zN$3-yZ~{UhI2-XV8(eDkGL z-6`keRMeT>H=a*3>vu}TQ&|-UUesij6iK?q4)tBu_!8~UHei3i)=x5x}|k_@eSeG!|B36HxqhVC1imR;f+J9%t*i0YOB=gLYw1a?9i~eZddjo zAKQ7}W9EAaj<@~Yt_&S>Qod8g@8H*svSm2Zi*hW+b=+woKkpPdnqgI@dv)H7Q(|4C ztvWoKzLbT4{#qMHWDJL?hmAhL1l<90w0K0@X?m?By#@Jzy_>GS&gOQ(LM7SN=QL)* z)A{uF#o4O1v&$UeE-Jahtnu4^_uF)?&N&KETV)CC9s1Xh*suIBB%x5kRk+>Et=3lE zu00%C>Gov`>3OxelqWN~a_z}&6jMyhr~ST1`Krc|$3lu*^f+-Pl03*~?+=c@wK*Y2 zzc5aYq}CaJzH(1nm*+$L5B_%fX$zOU`adcU-tz0O_dRs`)_SzP+>t@UmnL+0bfwIa zSa1-foqXXqrYD4E*)?iGmw14uaw(=)lu=#JBQHPA@!Ib1vZoHq#}et~H2QiQm(zly z7K-kMPinqaqzW!H%DMFu!Q$$ai|e@1^5zpI%~z7`_nbDnsUl091C+!AO|NsF?FvX2 z`73hhYID2IbG)DwJ6KL?%cnqEj)*Le#AopNThr*NB-<`sX^}l2r4ztaz2fxcZ2~HS0HoFw1@e#^Vh4kF?I2Jl97CF3!MP=JN z5+!!;AS>ucR{GtSrBrKYD@~tM>jy82G3%2!e-GTl>*D3UP&#^$5Lq@*m}b8Fwb*l5 zC3vE8w@gta#9Nb$Q#T7Ybmp$Usk+XPrSkKI7ESg}WzT87+o~G7y#0O3qTB5k21b!q zRKGbDzi!O)7tTNE*RyV77}(PsSS;73&yn&+u3NU9H{N~!;J970(z$aWb!)ldLw^)C zsyoK%&|g7t!0hDrZA4f-iqq5kO^SS-ci6pU+elIW!wL1V2@UUcMvD*Aiw{+)f<9x#aP->!Ve9N6e%7 zfn)OkiZ!ljW8EeB;7Uq|+{+FMgtYjPalBA2yy zz4P3k*#vLw?HOHuriye7fAC{>%c_mQM~^m3K}4|JbSIu8;`BFY`mX%k@KR6C>>qCj zWj;%+eAu8BXH@)a}f zPl}Vqrp0}iw0N~LOOx}ukdA8~a(;QLa7S$3RW5qGtry#pt5cj-{jK|b$4xwG^8x8w zrX}*rrrBX9O7YyRrFeJQ3QC;LPFch?ubJ>t~zpEFE#?X6>ii`tb&Vh5HUyt2E9 zA=Gx|meyML?8)PZ<4rm=Q-mM$+H5}eC@ddE-Rr}q#itpbz@+~yTkBw!(j1Y&eA)a(fu-Rx@rsn+Rcc79kHcye@m&)lSpLpF&GRqHg-$}?$a8VDXU>pT zqwfXab8z^Ulk4J}{Jd3_$9C*e`{e2u9R6ZqtBgq(=h)f4YZiO#mE||j_xRB(f3$yE zb%Qb7uOiBzz_$}y>M8bIzI`+Ae#DE1fF~2UVo(Iq~ZFvsBJ!mCxB4L4jN`AI)VB%MByoCEXVQH~_Ym9)xCPSzB0S#Kcf3*u!Mx zIt&H_?2bdY3Hg%6cfj(RL#UnseT1e}goVM`4*Y=|T6f>kQAkeS^rIUDVmE5uyngMo z_KLbAMgYf~cCYgvvlj%ns8Rw+DXE`x7m_fBA)#DAkF?A$_d`N>5%i5%_)nig)Vcra zHBN?1OZ^#RBv}2h%mr!Y@9Ak7mAd-+5}SdiFiXP(c)PY46n-M1(w+a9&$m=m6wVI4 z1D65(rwl(q!KVK}R4791m@GL{tMdbiDq=ST zTN+0Bre9$>Uv3!2vNvzYGfVy&z7id6TwPpXRUj`h5wTyo@%*KlHyu7Zdpb-!n6eNM zk{i(!q=3tvm?IYdEiT=ibX$m1G&o|ISwXu1Shs`z|kqpn>n~qy^in^&H^-VMWZ)kXHDK zHA1Pu0t*#lY29xPYNbUHtyRgnbJZNrgEb7JKKyS%~5Sf~rw`tn@ zGf0!*4PE+J?tkZ29Q9p-1;{dHz_cDOM#)wfGQmFkeFze$RmUKWgCJm?{&l*x#qAWa7%Yx?O!tiv(BX364>fJJl=&=arb8S_t@P1qICz@KtpW3u1tr88UWdk9SV*`(-IJxmgSCCU zjXVyZRHD{_ago#vEH*w1`CoM4>c=#C1e6i^yTV$_M`Yk@O6St$h~$y8L&~KZ{>1c0 zz}Vau0I%a2rzUyrMf~q^BE)0+5O;u6xT13yri~+@(vHn1Bo~->+-QFC>jq$G2#XpF zu|rZxy*q~|rIA!jhflCtlf#jTyQII0FbEf#jgH2@T`1w#Bj(c$Phkuf{;Dj(U&-QE zjW;lr!1f&od*fc;oqP?Kl&pd$+(_~Qc?`x%iXr9iz`-OwNC7^-qge!~71B-k=T@rv zmsbt%&&`6JfBwGQap#4*0Dz+>!VmuG05&;EGa+ z&7v6X_&cKa_{04Tw_+KKh5ULn(9hVhCTWQS*w* zbm#-!M(Jkt(PkbXLaDAQ<@k|DZN^7TY~pITi{;IlN_QdrR;7nsgWPLw5!W^=r9X9M zV03hs?WW?h@MgWL9A(w&h$B2ncxOEKBWgb`~@FWBDLehNsy1O$N&XiUU ztJ0*>=W8-MJ(ROksF)tI#cL=@X+0JlERZ?WkT*B~4G(^s*8A%o)X(mje*6IVD&RJq zDvwLaWXj1;x-Z{GXWinZg9HyksU+Nchmn)4J)D9!wURnrECCEqfudoowB(swfRftAbhzNG7P3yKtFSnOC4>b zOOKRk9CO4ze*QcFY0tgJak3t8qi$$OQc}|Wza_o?2DGA&&(K}QPJ`wMt3mL+Sx>PL zRtSFwKcxEztqO@qcr(aY!bt9XbXG1}Xq>Fb6 zGTp;H#};A~t{f!lkYx@@=a>VNyINy1lXF$M) ztH$U&JP4hpz+S;8t%f1THMo{$cpKBvrIcsFUnHp@x+&lSb)0%T3oArn0-u;^iK?5} zQr0QOdyJE2B4c7)et)CoTyKqPqx~854;SxQHMn?|$0NdZM8NBCc>j>Q@-gV^l_dS~ z%{vH$mnLP>%d#>T{i5@7=}#yj8@cEN^-gWNHWqP}~?>pG#O+yCT}V5@mVW@*XTy z74KX|2fBVT(g-pz-T`paEwmsd(v>{Z^P>WJd<5kcaF>t7;{+n{=3baZ0_f$Z^(!PO zyp84M^`U3oW0#LN(uoC~r1lp|Q&MCN#(f<)8G6HA@2tU( zqB7;!fQpL09H|^$(V2+5cEiPm>N_!di;)hH`-|fhUMQ5lBn=u8ot1Rm`m)U#yN9E9 z!i>g~V!o8~@R0g`4!yW7PV6Vcc!73vzw_ijo@4a84tp1W{hulQ(rIar{`xmAZtjLP zNobRQeE}{3uphE8yt)x%UdqLH^6f6IKI{7S?VBXU$&c2AuBC-P$>eDoi(bJ@d0pyJ zaWGgU!_1s~bcnNJbMi$EQf~Xbot^6_ZCE3O&E_8FoShx#&u<(n5>pZ?A%K49G4C`` z`edTqasFr}K>Ex2Hwj3#NMcT+qQyVLySjYAH!h?iBU^_%Uq0s4iO2OJ|41SGwTJL^ zperjIU}Y8g;L@6=mh$uVO)GMA4U}T1LXUc-RU^EokskU%_7~p16&GF|_Z6(@H#os~=`&-o?hc0s^c%-2QxGQuL?q(qOBF&_RM}QcDUWEs*0{7^WSP>8(g#y!6vw} zx!E{Y)Rc z-}f${F!$Qof$o7;^gjz_Ck~tr+1PtPtZr~Cj`Jl)MfC!=1FBH=(18TDswB9rQnAZDU2B zo#5}F;Fy9rR`M7+^a03!N@-6?cW#st$noG&EDV#TklInWoQ#p7MPm#Q>L{7%2u4{s zDj=bK*-lHJ>BU!U*FVc%g|avk(4vTC;ymg=9dHA#EPBw~(t>M7myOr+qOPp8@MuL& zO^sxnYZE)hfo`GxG6CAY@0xtAdfjEU@e**qhJhqsR)8F3S+ugVGlESDgM(&zfOnEPqnqNqQ3(e#U0 zP5K8?MvDwObMKMP=!g&k+G~u*&tl0MTNCL)2AUJ9f9^ z0zLil-hAHr&xa`aCYF=iC0r!Y*S)9$G(#$hmoi@is~3OY^>XP!%P>RiRQ0c~;qQn6 vCO(bqdLvs(R&DzKH+1)3JiTP-!7+`gqwa!T7hLgp=FClb6}eoQ`!D_v&y+7} literal 0 HcmV?d00001 diff --git a/website/docs/assets/nuke_buildworkfile.png b/website/docs/assets/nuke_buildworkfile.png new file mode 100644 index 0000000000000000000000000000000000000000..e3d8d07f7c959324c1224f88079084813ad7c101 GIT binary patch literal 36149 zcmZsCbzIaz_b#ZQG$J7#QqtWaAR?VhNGUB{N(m?u(jwhmOS5!JOM~RnA>AEI+(GaA zd++_+%RjLDjhQ)f=A7qw&LL1mNd^m@1RV(p2}}0%OEn}U8d$&U+s6!ak9;6r`(G*dM^bM_!NuhBb64}k{TI)a5jT~ znHt^7cgb;rX!97K@aopn9j2o-W_+Ro>sjdc?`ca*8TMB-J*|F2_0_Gr`b=&Y=!0Sf zT`RROc>djw#jiM>0ti#r1&5uH z+(Gta8(*qN`r+&A7MWMn>A_=2OQrnF+v0dg^rpK>| zPW5N{f4JR0IIZvPG$uvEIySLYdsKi^NblfG8`~Rg(Nx!idF6eLO>@&DiN# z-U_PiTy|d^$uTEWy)yBPAJuQRmm3#4hNpH zN!z_QA+s!TkEIn9?o(14NGW;}2UAk~+`M1#6nA&9?>(|!Q*`G5hHyN$SVD=i&7#qb zbPOw*q>2T~R(P@b%F;0Ygx7h&3-@;&<;pdC)@QwA#0LGGWNDEQyNZ2Nj#)m!SL2$YQ(W#YOR0-;dX7S6Ghcq<4MPSD@zeqnX z?inuv9@uyf(;EGeAyk($GGjF)_{0%KCYAfu5eugg9+Ee(&yGw`%N{#~4j4i(a7F zHu^Gp(n7#VbMg3nwD{FKM&r&9Y8IBhzh?49Jdh~u`HX!3_3PIQvx<95JL#q^+f6eg ze@X>+m^Ilbq&F+^PYo`^=wSXFmRY0^*|v?kufDim%$hC-4nD%#f4h79R+C)2zhXE- zAz`7v_?SGY=A#cvphKZ(17SyYOqbbNre^JzL>FRiJde#b&ovirJ2|o^cr)<$rmwkc zT}wn3m}j-=bCG?qy9qn&K&q0aTEE5N;xU|Mx$Mn-I$sRH z+jXMhio|=+!o+Pp@TW`shI4HoRYpP@LLl|1|2*Oy<{9yzj)AdT;Rzx%UDWDq+KZKN zB9HK$_|YlmyO$8omkf#1mJQzL`|lLuI3&!}lwOW!0m}tBDE5quME=H89xpkg5}tm7 zqpUn$bd*uob6;6mx#^-alD%pF#YkH7>hb$)UX*H4?dkyxWp-tyl%dp-4UvQWeUm{K z16Oxf6`lNMdgk7*1zMsy%`K5Zj-K@ov0ck;lNp4PZ>%h#4=E@N@Cn?$r|t#j=04om zxO5^3eW=@XxG-%ey1##%Q8oVcWb0(&&+2B$bJnAuBW=M^_&9pAA`VVRZ?A=v!^jTW zM|!`0*b=FAlbY}92&O2UUwCe%rjLYnhUW7&`q!`5ouQ;%9){OMb183D2eH>u=o3qR zHyKGETKsmzKAP>=!-RVlZ}(32Kk2lecaZuxe~hb1sV%(}_nJPO-xpD?T>chHU9X(K zSO$MQ%EWJf#zyzv5^2qL9fnv{4x^)PiA>&@c60Y4^ZJtg$=&%r@=fuAW=|Dt`t`TH z-l#w?vYNcX-Qcj{($)Pg|LrC)NMHL4ws|FPUql8xrYbtPnoIk164%Fv6^5uvfrIT; z!DG01jEnnobF6@FX6>a3{atuhfP4D)iFDwLbNl|njCaV{BOo}kNZ5D-YZp?%oFG+7 zl$pm|Q@AD_EV!&9jXdZp=E6YZ)34?TXL)0GNvxc3GM|OwIjK^PsJmDG_k}r%iJi

    =-Qwg~sFK#=Fyw)a9rjcCcJZMh> zFEAHzsXKv!tZ$r!AIu!0l&X0ie*5k1Gf;-YRoh8=@*|g|A|$yt?@u8b1;clmO8 zmQyx)c@^r{YqokSy=?XLq0-LYFq_NKwPlquRb=BxAF8p@v~euE;0>h~?kJBn({+!| znT2-Rr>sOXDTc+{{Hc`Vo{q3LBzp1pVbjAu-3?cVz4-X}YT9|_#t#(|vN$YCn+B4_ zW+rOlT^zWquP~RE3FsA6YQ4x$cOlwkcxu@BCUE@S5xZ3^Z2x3og2N+73|og>4NgV| z6)$A*$}KuK#fMz@XWM;3R<*pE-0}?!KLTcr@^XANHI3E&0I_s*kF$J*#A26nO43$O zS$}&&Bbt|qeu$f$dC7QTmttQ;(zm{UQH0*qhTizZeMD!h=Qechcs{@1T!_`EmzS`X zgtg+gV@W4mSKZ2#e&v&i;vG~GIUbScNwfW~YP*A_$goU=sG*<7!$y z(2;s7+7s$YT<>}q@kMbwOH$j@o?zsaG?!ZjQT^}c#XptHef%0p_73^)(m7FPb3?M8 zhY~QXDi7zT?SXw(_YZoKN-;fDxYQmcD=P3EWc0|4Z((Em2h>vNm(LW{hFHh5jtLlI zMEQKvzN7O!9nX@pgiX3zGCwntjOT;nu(PxCSx-_~fjIfk{;j@fh1z94<-g16D-rmb zMVhh(g}9UQrbmQ%P>t6Hg$ZN3>c47>%k9_;^^gP97A#|0N8}-&w)19PdVxE7-EXhZ zt{_#=bEJ4H^L^4zvW@;46wL{%G6CU?si2bbJws-DMw zyV&Y?7y9w;vNuzweX8Y5rZ2UEnSz=Msf*Z`pbU~8f_S!3@67qt(HfXsb1u|jE7Fwi zxU)GvRGb%Qh!X zkFRXb<}nEvj#12-<)&w7XUy{i92|(0Mex2>&+Cf`H${AZp-jP-mBtVjWODeD5_S(3 z^L+g>=!q#U-Dx=^N8ZPB&@75E&>S9sb0rOdRJ>u8zr7~*KO-NQoOJjz)6{v>`%pWg;ztxl zz^^(FzcqUCfciw?`ho4G#bF$$+->aX0b66q(FaSXY@6zTlLZ)t%ggbpC;`SeWnLBdO9ijXUJ;bPUpSVz{H0XWV$hX}miks0I z1G7S1KOB@xkG8AReyT^NWHdmN6dCx-Xx`Il9l->mKE&0wtsbfjNx8bePrl|7+5eRg z1$L{fa5iA&$M@_d6vAGO)$CJz<_TTr->#`fr%uam9pw~nM`j$Hvh_R4??-o9L15P{ z^obn0lopM7M6O+Tsv(bb%H~rE{g8!hNYY9UbVp)lgd6w`K9w zE_RcZ@{5{HJ-^Kf#!IvU1HZJt^dsfh;30X+fuxkAQe)pz8t)=p+4O5j%(CvGjG^&9IXW>H*3kF!oh*{D!nPc z>JC?=qu7txyHp}6Dfv{$nXv$;xN};**C!tJ;#}RZ%yF1e%}H%C@qk?3jP%bk_s6n7 z5dbI7n=W&HP0;`p0=xKkZ&$b-4P|OMq2C*d#Od%in1=`IScz`z+h%ir1n5JY%-7)L zB8^mp&7aly$n0kN5GQaOT1;&7%|8^|gB-uQx-7JvCKbePKBxcO=K1>R%8mQt{0cKg zSHSn@kVm0-ol@$omPa&eZZ!vukZ=3*n{{WQDr-0m{69Iwbh7lF$Wb46O>G@7ybq&5 zbF0nc3o?3Qb)E*Kb*ZD&A+vSCHCpxw)mmL^QzH|zCX@e^=VfTEfx)GahmFnZY@<3I zJ7tQ#9BHE49Txw%t>SQ|qTTR1yWt+6XMX9SG|YZp#IID0X@18q>;30@l-|~bv$>`2 zkfZ(v_J%amjv#{nhNGPR2D7dN$gp~X%#OlDMHJ?{kNWa-7x}&&X5c?Ozg=Ho!jqBadY>US<8nA9yP9aT>S?WnvDow+WTZ>%loZNf!|wo8d`=~W zdOV+2vg2Nt-DvYXilhk)yaV9WGiR@93$UN^AE7$bQjMw9D^u>)$Hlfdg=0~3{Gr$i zFXRT6aks>1GnR}K%VxmB!uoW5GI6)ge~7zZeTV=Cl`vC}GOJzJ&hp|Pa3`^1j<=PP zhLBo)&t#05VD7wfhP{u@cr!CEaH47vL-tPYn@axxZ?f7(YMVAw_tk~T|KvVQ3XwQE z`Iv>ZEqRG_uYU6-T1*tPqF-I?n&;M;24YQRKMPxX2$%xY@QY10D$^Sp?-*16cveDh z*BQWEru3Az8>oKk+R^RL(fJQGc(Bm#XJ{y888LCxdp>k^e{o3TcZeRXXxJJ=r29QQ zHD~@|<*Mx`B*azt7XfeyH#dz}C^^ZcMw^(5-s)^Na6HJ~%se|XV?Z+GXU)`;)T^0aw zf?%fg&t*I>GmX{yn6j92HrInJ3%Rf_O&(O1Io5SQ&ZB4e{jIi*Td1w`C8{TvU)fY$ z?U5DcnT|mwU;Qfx2?9dUdqc9vANpA!pY5~2-C@rd*}&{+kc$-bkI&?E;a;Wlw-74e ztZ&?KgYk$2H}!3i@akGyT88^rYS&xNpce9Qmvxx2aG`~Ai^j`-K%uQy27u!MDk>fk zQI~l>=}Aum?9x%WV7{-O#tlE$QSE#;LlxTQcZW5Myeh=%_i7B{U3Np>YVH{JS8!hT zH`I@~*k4V_HfqT9yurUCgj+DgOk;0c%4Fmbe;7=-tu&s$B{|s~Gtuq7{KfHl?t^3J z`(HJ>YN!`!m9|@Mf<0WFU(pV8l8EtHuj*gf~h1qnS7ZF#iE=tnja zV~(Tp<=!16`)47{ryQUCFo=HVDpR6k!964CThb)Z3&ef?NXNiEgNc}}c}&g}w4xI(U~>#va%RWh|3Xx8`!XFr`cEgZ8t5vjZQX$dy?|4V>B!MhYpnBM~js;InYV#$-O(y$DZPixw|LQ+&clr-t_*|A*%m@&TyFbDD z8<4@?-RVBVg&*g;x1X3THjz*yl1mgh-sj&_GowYd-|gg-s$q{oL!;PMm>(E9S_Yf~ z>%n<_MqTv&34DjP{mgXprQxaJOutVSBWfp4oaGs-mv%`X>DWZ7P9U;gU9wr;EKz@z zx^7LJw`y}de~r&c*Er1az#_=1xb&ot@Kw9-4*__1P!Rg*?(Bcl7alQjw+{+Nl8yg} z+gh>YwCI$Tc+o)gG)WoFJYJv@4x@CGgaV^Co z?CR>1qoZRSK;rs4=BB1_&-s(Bi3`O1tx~F(XI@FkS|CFT6%9>mu~*`cn3z{mQuo^e zaMswpFEFa@=2u;s5y^#8A0Gcbx6wID(1wPjlIEL))T=)Xu5NCs8XCT@G9@6805kot zncR_$r(7nF21SlAFD@>&;jn;}-q`7}k}|uF??JW|ZEbB|fs`3qX*H3+$AvKjE|6jqWSS7AEnnhb#!#}9jS43buD#Reg!QkaBy-m zhL@Yx)YR;6xb6Y3r8|2zsenP*qaj-yZG~jkljE*MMVXM085{Qnd4AXQ@?!q zGHx4MT-*tijH#(N<0Xd5o}Qv&VqzkQZCF%H%;Cja>U~^7!p;oo;2Ou3p!{T@xWeq_ z#b#z^R3OnnL`7<7Xix}KA78FR{o%s~Ai^h4PhyU@pWo+1L_|oasNkljr{@?$p=xSs ztvl1zZi{|I78Vw1g@qxy)wYW(D;?m?c?AW_r#sU#O`fDKE-n<_R}Qa2o=V8dqE*|@ zF8zvlM$5o3Qtx7?lq@v1Ee80-?p$MUq_S9`W`XHoil~XHsRIyaRtHm+H8lynyu85e zk+HEd`ugOHi;E%T!ZBsWU5?w6oT!*jUznO^=O_2|Dcrkv&-Hj%rn|elWpFUO$rF(Q zWy)1bHUcQ2roKKC$|Ms;7BQ6iBqIK9JKmE79UUEIf~D#_RmdHAOi1XjlneC6*q$D_ zp5ESCw}~hBN(|$&DV~?v~KEzoM3+c-})CioOJ1t2>gPl9Cb?6}7Oq_^zzVHBD!qM6<+3|NO+({T@3M#Y^r?kq{d+?n zr-;9_=-}Xhnu)20rtIhZd@8GMmHp-M`(pk2SD!wKfM^Wtm;_X(pr9bI@y2~|oJj%> zJ{1)_zeh$6&dwP4_+p+gDA{k0K4;UfdkM}XBRl(Agbc+yV350H^yJCNwpg-~x#hez z^3vf-@7>eyM{+o=CpoYw#rop8&B1G*adJN9=jT7&n;!&T_ne7|2E5kaA5~o#04Fjb z=WkmRCF|F?xcY`Ma!Wz@R!0J*B>1pcLbho zp1A0260@fFFWg*Sx^Hf6?OCN(R#pO6UEbVuH$>Qk?(f^*)iaittnWXNzzs1mGy9R8 zykpJn%jSC*1?wKXDIzS=3=RkgfGIw%!(9U6!#dvR*ckF_G@6!s4r)GB&S)W9h5)wp z-Bn8@V}WO)4-r)nJb3o+-luJz`p;p?_G14&YG`TovdCHZ|9kn#$w@f{9goyq@KWlE z;{j0uhe4OdnfGqa3(g=uA@3g_JD1Z*YW>eW)K+Se>i@2XY^(vvjYs@%?#&qewQ_!e*Q&cE}#`^CPb-bsf z?%&OP|CAeH5*{^#_xF_~oe7>`q7{Eln&e-0Sv$J(^cf*~LXPPwhW%*X!y|dM`3s_@oYwOM$VRu9B3iI;vel|Hp zv$^N|_f?@fK{oI!o^P?TfdK)I=pViGHXomR z^&>+r(cqBSYft$6;B5V+ z<7>~$^N|8Ac3t<4sQKQ*Lq~LU^hX#Nuw^#IE>fBb+3L%qpAp1SP^iCm*jWK|b*JK{ zM3j=M>U1RDbEZM3#qic0s((FsNp^d)7qGnxf>zjC>ioc$`}_*mg$ABsl((+R1)c3CRhcea3m@fT@P)j`{8Fgu&m(W$Vp2>hd}a+#`Y(gQ(DHx{1Qm8*qa2RhK4Bc2{n&8 zh+^G#Du@59%r0k!4bm-X_GrKTX)%|WjH)WH)wtfz zuC8Q%R93qN&X{L1hrb5cmQ!yu2v8cUkR{4+8v z$BX-aPV0)Jf~QbFD<+oz>KzH6ND_>|q@?JJcy?i89$RKP=4Lh!Ld8{Xp12ntc5_|x zv54&Rw@2Pb22;Go<)PzsW$@b= zLB#vLNtn(Y$-lE)YumMU1!D{b(XNV)&fv5iL)!5A`B+I9htbPNP-w;&6i6O#H6SZQ z&9C20Rd^+d>iR0>90T~5dlbJF-8t?hA{ytZ2%qTb!2KtXNUCS;R;^U1%2X5Zve?ImeE{8 zrw6-U&GfeRt5=*52v);E=TqQ*&4^hCe;gdM!$0=Bb4Bkug3@yiHbn}B{e)eh2L}#& zX-&f8wSk$N3C_$6E8o6u&KPPKQw~Lw>uJY{jmW9ukGL|*9XabRhfs0De81yT8o0h= zghGGBx`ba{d0Z`1nj_o0xPTztA~${c(RRL=mPxMJVy+;T;pF6vM&&^bRPyyV98vcg z(y0 ze{V5#aIqGfUu^rL0Fs|StnE7*G9Tb~!jZ_kx!3=zM&7lCUDRW$9<6dXK9q7kprom; zbWFs1zFE&Cf?cC4E-wm>p8m+~g01y1LgL zZo|GJyWU=N|5=TB@_5nCVf*p~y|vBrIm_{K?b;G+XK#NIa%j#5V#CXK>X=VM%POzf zHqFIuPs#Bm`4J1S79p@!<%Gls{w`yAaaK=lU;opey}kVvs7ouR7n<>$nc}v#Y@j#6 z-1TB5!o)QCuuD8=X?IsHoMM;7P;B%#)?-BMRmdZ1(Z+A$w?O@?BpC@f{q~p|oYVHp z6C#HXko|+SvY7Rj>7G^wBaP z?hbrXE#Ov;B#o@B%nytJ#?AvlwdJ9?>-pkqBO~{NZU&={%V?O#S;b0iAc@A*Iz1!iNZaAfA1@nx{&ZyU zA$CRXJYxA}B25@{D=maVbjr z=g+uNuDwok2-X zF;f8qVdsJT0|^T`)rclDL%=d&ie;qvM;jt_kPb8ezuD;MK0H7T@{f8;`4>Ms%gG%} z21G>c=!D}@yu5>;LmPiqHX`YPqB!|i!w@20R_K8XskNSk+iqnMvVDprGMJoPy4V<* zUl`LmTUs5j-Av;)Yv|`5yn?ACILv>3?#3i8UuZgc$l(O1e({fTxIrkx?x!W|SXoQ_T>@ty$nZ)Z;n<@fc8K0rNX5D>_L3rags z)0@$2l~BCQ%-_=xW@6q`(~OzV_l$c&!0VSTMcHU~{ei&!EX?e8mNzx;2P{&)LR#Jr z$uLE{zVBIREvj`8^g6+b;_RUz6)C+Lmfe|JBL_B-Pqa-bNuY z&dAt2;>V#l;o8!YnYy|<=wQgtF@F5mI9}kuR8n$j$inhh&s0~-S8s{Te3WBqR>OJR zVm)|Q9OvL~m^tNmkq3xp@q9ZBzJ6FO9UajgXXt`Xt4;&S2=t)8y z^r%QvN>!B}an>y3;Go1UJTudvU9+!$b=@{uvB3+BKgrXH^h`#qrN*Im|L}0FGgLiE z6yZi%!Xt9{i=7zE6UnZ@;lbxnQtzMpi{04bFoy=PnxN9#1AEBTS zf4(W%h>P1WG2M~M9l3}5m@}*+__lql;NkgdqVoRXW_Q%{l7-#fj)CN6u1X?8zNcDR z{7o0Dk62zDK4f41jI8eZC&KVYL|JlMfH)P4 zHOCYB!uGbzhXL5U#aq7q*pj(cZ>Otn{a&dLCJBnKEe3dZcZP~vlR%uCW6SOZ+vUl;pi(Q!>&BL3+=Su`}Ai#xNy;CSY25ZWt1g0Fyl);q>5VO4enId!ov7H}SC@7h0)uwqaES#-_NJ(h|c73d+g#+v= zGddoVH~{DBT<&6Do2r!Bp1)1$CZtTILdEf_8RIO0zv?)28b4_Iyx`CnhahHj*P?(4M}XzB8od2`E9c@M&6@ghj9z$ z|Ngy2Ncr)@2Z)BojM-GhgrMi8y6wzs?tZeu#m!A6dwU-90d4*?crmk*zX{l$negX-cTcG{< z?=>Mri#*fZ-7d;Zy1g?;-oq<4Ae);c=Jr(~4yG&QK3_9s<#p{G6g9`^_>whb#9xvEgUM>_R67iQ5C0khyHrdAfRr26<^-W=zG13z&VIHPy%A%Vy6KxTbh3H=!R z?PO`!BOFQz2?^BU;gK5qcg&qQdUke=Rf=lArxj9fVmca&i+9g2c;(zcbmM)mZ+f)K zA187V(3f)E47$f?#c#<#K$>9Dxt}HzPW2+au5NUxX1L7FjhBsOVnR#zG|Li;PcVgR ze#R?K@W^|(WL_YQ;##<)13o&T#c4UJe!Rus`8Lw)Q^+DLW*yE=raVYNCUQ=M@erjd zv@AT_%Q`_Qs_N1PLna0O+Q!DOW3W>}|4j*~nK(Sy|S(Ia^l`7eqK| z`jhq3y@ZAa(D*t>$7aiCU~uE=e(8AOXq-1FperkT!7L~!lQ=s+n$DA5af~`$eM`#5 zX6qXeZqgH_<+@#V+a-Qmn+LDF#Cc0VNh!}tPMP}RD=R_&?uNo~{yFz#^Q|ZsS5;zI zeO+B*6Tknq`|gz3dh7e9EIdL^&`_X$nWzcZGX~aftEWSA zXX-Ar>?H|lRPoYiVZ%GgQP53H>dabKkFyLg7isDEoTJK00(=6~#}r~e)N{o?)KCI1 z?N5B4BvhGlapWCbU(b()#Y;G0QHeEE>l8gX8Ba+0y{wFT$dZ(iv1YPD=i#;O36`h*>iR`E7B`PPJ8q2izsbCrmb$9f)^ct*}EDOcAtSU+x4uJd$%!>WndZs&A8 zQCXQRA=;By*pyzWd3oI;m+O0hf$@3>Yv^3pdsRlx*wYjB)Ql-9hKLx(XHISJSLWOJf!><7~k?4 zIWnGWMdszp5UX)B%gr~xe~)v5c+WC0&DF`E^r>$s9V0L}(GOVl{zLkYt|yqfskNe$ zhlTInNx3eEKEosvsBF932?7Dq>QvS&b%Al#iylQq#Xh3E{FD0$di7=&IQ_;WIa=`? z@93P?n(nW|+RI_#D(WNU%X!6Y+0e~9Xt1zFs$gS-!f%%YY7h@u=OUAnTL+SFB_vxh zFe$D$#>VK91P&g9`HbE?L}4?yin&Wm9;bsKX}+Ig7{jaoc%2Z>eH%@{%%`fQ)v?km zEa>s^3D|Xo3B6{oAN`4j<^W$Ug4Fj#Q&;)jA{}@~r)8o{*cFzGi`^Y61h=e#zEi$X z2hDK<;IWEEOIsjU?cw9=y`68mHEQvh|M+Wi@~Cg8CB__rk0R}W9EH6BD^SxbEP*os z-A+dJRXD}0jpN%`)~y+#Pd-$WT~#&{g7yo1dkx&19;vg{w)$%^Do+ZJUfi_|+hccb zuFbnVq!YmTs~VHIxk1VUwJeDK;#!`p%i1A>zj*ffHcy#E6D>zEvrvC-v4giUrp?CJ&?9K zBtd6ytnlNX7wK#dWY-a#=ORzH-CkK4WG>nM)6XCN`03<)vB&-U2w>OS)r+->qfSy8 z*UkK9VUw@i8j(BcbO^F*QLf#HAC^xfJ?=u|!v11&1|#~jS0cOC>J*Q765QQSF@Xnu z?*Lu7i|6MLglGjOA8&R>(q9M5%C#Bzd^KL5_ZI2s5X_Nm4jUSSMC!r@Bjz((C9nNI z!k^Jq1lTsmYe7DIx>!+8vgy44^_9AmmlwT*1B{WS!CHs{tbb-^`Q^(=!_~aT&CZq< z&h{>GNx7ce?@`B^)BHTt%vO}~@$<-~7Q^0)8y5>jWdesDQK^P>>0;@#&K?Y7Bvnic zY;TSYN5>$CJJroRXq3D&(&D|k<<8u*GDn~LCTj2#^xM2^3Qx1C|!6IoC~CsY4f>37OFI%OHl%G6v1 zot-k8X!YBh%98wavWaXqaCN&r?ZQ}Xo1AOdJnZUvT*?#$3XX<}8jguZTU&e6NjCvlV<|Mfvp8T&Omop1Nz(?w-?XAF5&#NJ7&~3wB~gK<_>O0-ubZGg1H)b(dTIc zzc;!&<Z+6f=)M~vXNe(tVas6VS@hss)9d!*+mBjXE?^7!{QDP zgQR)S-8}7f(-^W>;5o@Jwpt;IYEwCT>1LjXLduPxb&#}R$7zTxM+waVP4$%mW;U#- zQza@^%vE_;np#^W@8x&KHS4!ksckLtsD%-a03HXSt6uT<)~a)kTye9vAwa=84d;XX z1FZUW2>>XTx*d`3>~@iPbas6!G9J>LE=qRse`D|E6%C3Ej>fwcV-gB_uHV43IRyQ%*Nmngu&xpxtC)olgfH1%zF#Z89Ka z^lnd`Fb4&Jw&RL`n9i;Q%koC!q47ywVY{s@t8l23Ecz1I(UuxPrJyR2bz7ElijJ$2n48KX~jdf8sH1uos zrbNX~0^J<9@3j(+L5f=_>$;)5%W=$Fr%y`rd=!pY}HdJ+K6VH2h-E6WC8 z`77sRXV5*71yU8D3R7fXtEQ^5?R4i{SmA4rfFRM{e(bz%Ai)884zT{LBrpE_BF^cs z)Z3SIQ35`i-QI|o+Ar1@&8>mxQZ$)yXjL#}qxk)!al6>Vh(V%TCzRf1e!B@_k6?eajKUUTx5YOU1k;9IOWM zvH3<)G86%dWE9$f9+!UH`As-j&EK{!K?ekb#WtIg8tO+ypvdL=c6g%#&<4Tj5L-VG zOTu2={rko`N63$ZhzOjN)tDi{yGmPn8*aC2dgkBSX8`q&14)^`l{4%%ZEa0lA@skg z?kNp1yED(U`rpQk_j_sX{* z=kfmc)66qExTmDd-;x}*L@JyCk?9-uxBf})qajWtbpLL^ip0)7v3xj#Gt1qW7W|=q zl^P+!zt>2Y736?r!d~5#19t$kqo4w#E&m;@J|YV(9d=h5l&tUWEx^Nip02(=UcXDP zcuCY5ZfJX13x&o3<^c2wIoa+HBfQ34V!ehpKP=QqR{=9hO%gtf?kFQNoOxMdun3Y& zMLv$x(()RB^pOu9yi6Oj%*(6k=ugZAkQ*skX-g$_+y(+_wHXZ}so<H# z`Ek6imcQDZHr`P{PlP1tGSb*sHUQ3!$LsUYSsG@RKCC|Ng1ai}z!k+V*SqPpo;Mu- z);hoNbG$ld3L{74>JOsbIqri8$L9SBxkXc)o|mMU#D|RDw}@vzv{@fQv{HB!hRVw? zn`h;`y1&kwRX6C0rtfFFlZE6ggk|V`&5d+*jrak0o_x%wp9#Fm>RMXwok-_D9R-_p z1qX4ldSPP(=W3PMBLVq||ANlf*O%yyIGLDOGqBo-1}#axevRqi;3W8QTgthPq#j|t z<0dNNj*ExaJCq*IWj#q#WqbV`Fi>>w}cmXB>q@yDP z>;IJfs^JD!lL(<{Aul91w=pDNe|ML}1H3xy**V#Ks2oVLBZHtT;%+Q};GaD+4(PYh zt1U0--&$H)O&cC{SQ{WHP_k_QMhCtPpRM<{68d;?$c9aM!wiKAjpp*|_NSg5uFPg- z+bnc5+y(#=)C-!_hAl~v?f^aj@Pf|ko=VA6Z{Ljo{fB|`plhZfz7sH+H#;z(N%Zv4 z0B0VkvbmjF`fz$|ezV=^F8bhwgoJ;;@o#6cpkSEWQXtM#j<@J*gLY*##YKF)2_%O{ z+yEh%9;B202bG0J5R8&6SzY7FgH8gz6&#Rf;Hh_#F zTf&yfr9XT?3&3s+FR`=B$jpro3nw|DjgFR15tL+WgvGXEZ5n+vZz70G?2-dkxEfMuhL zN5r3)kiZ3K%ZFcPGTZM1$2-~iwJ0Mvl#|ZI{>?iHpvtGP<1+b`iAloq5GTCqEV5!OR}?f4;Uz1{r;14-12@f8 zPNn|yhpdvsE5EQoUZO=B-$c*gGW7|or%Xi=q zc;3demblmTWoRdm`rz>AgcE{?N%8UZ8*msqAZH3)dU|?*W-5UHsG|zBY6?IT-^Ypmb(F15m&KE(eZvKLnQ5=y4l~f^owhE^y2il~5; z0Yp|LKxNye>$PC<0gZdXry=#M%MUMk^K%Z#<|n|ZqX4I_wyy4I$|{s|V{=pO z5OHm6fh4=X5?jJAY>PQtTT}D+No0GaRVYRk!2VAK?3or9pZyvX$9Mtayx4^aG5ELv zxD1fW<2c@-92^{JH+u6hS_QggB-#^A;7mu`bJwH+Q?T6T zaf{13Ap0BuX`C1Fy}~?+oBNEzW@?K}>}nnafVKKPuX&?e;E_tXi#^fT;8-jyw20ql zDl3EK6|&HXi_gCEAg_FAO(~ux-F?1y1VV0r?OqmO{7qMfU3pMBa7YLUsuTV0Pf0=` zhlfhlu3}t{B0%KWvDd-I^LWJwvvgyo|xzdvKf6pGTNvalZfo;c`EY zBon<%@AsSvPE2GlFc8g$G9jns zgfg4QIhuCUhU#5-)N>kI8$dsOn?Dh&fwxDe;Qt3yFRwe9neD#&e*`wg38o!SLjlY1 zqGj{Z@8$Hwx1MC|>03Y!!XMf$DF~xyv&BX_TD^wCYbPg($prQT2+5bh>CG!_kb3dL z#602k?F|}GW*rfyZZB@Boe>{VRaGPH9UQExC>B6WDlK(oGpJy_|NEC8hGFFQeHxIs z%^g5nwiVX$R!XXO%+wUuT{s8(T4TRx6)e*<)wnG>1k!2d@*E8ox0|CAd4)L*JRSe(1!%a7PzqgxNf%^5xO^3AS+}9qoUX2_C*bihQs47^ zCDxq?c1MI{!siIrgfhi-yMZtO@?8jdJ*IJI{4-Dzq6hnMv{qf^8v1N7A1YK{eu_sb z{8i%Xs@fa(%(l}*=k}Uv9xu?L#R^G07XvY((v`|g%_f}@os*M0fxP(xbK}?%9bN75 zt|$l`b^}Ql4e(cR-Rj@6ueFu*X2=|!!eP!EGJn=m>A*;vn|L5}5pym%fcA-2KN~){ zhT4zmO`nnsM$qp~4BkKq>P(=sR}4su*6_os_u=?If#wWkR?@)0lE-9Zib?~jZS+00 z$9v)_UmxC%u2MX=N)oR216TpDH zp!@F97f_|wQOGKHK#V)^mlVG9MsmIP!h77+rXA}t8XgCj`H(U zai3169v)tqVWS^g@#yHxKc*BXw@fSy@+=W9Du^CB3JyxmEC-xp%GAuSAz6xWasJuNM6K@UvGe^z>$^P$;_$3@{^b zCT+@6$3dk{(9``{B*X1~)3G`8L^1g)Xy*aj)^|+3wJuQY_iRw-zoJ@QYM$un$Mzas z(&mN+D0al!y5%yRGkFxy`dsGNek1zIO(_QWBtW;=ocoqQTfoEePi|c{g8(KA!u(h|-a(+5D0uHdhzn@cA=X-VK{ClL?8b%`{k6s0E z9ruZGYC^#^Q`sqV<7S!6O0{SS+GI|hb!Zr+p=edZ+79tV4j*W&`4U3Ho5DXoW0me) z?|P7G?{a2)N`e1C#+52bLpP}Q)s$N7EKsFWyuI!(cZv=kEEgzrWoFQGg4%*^RfAt* zV)cfrRZuyI5!2UtRnSFxy2k!r5%Ryf1%bTuaC9_kd5`iHsJ^CrI<&hwN)3KfB#AXn zpk!s)lb~-LoKVonR2IhANb+xnfuhjf@%3K^DARLCVIc*>15oH2IeB?tk01ay*~Tb* zm6pHQtJyqX^|9G3UsiIux<=h~LKJIIt-G~)tfIWGvW^Y~x7m7uDlP4Lfea`t zbSb&}#FRp?&YlAZ@;#<%Zu26TsM+RJ%T?x`V*i+Gw6;f-LGl-FjaXf^-pXsyz4IEr z=nx{`fsAS4=eqb#p%vL|7?<{4fY;03iQM8h(w$+RF`ynMVVZ4Bc|5KmrLS+3xG)9v zFECttfBVo>U07dFPq(otKf%gF!^j5WzVnF8O>rBdUzKg>(KYc+bk9wua}1Q668TS* zpvtLKs&xcH(YBPkFlo+-*SeeKuMUp}9S@fH6%vlQTz2&qfiO;1UPL#h@-t^C)}~0OL!J%zzZ1xzYsXK4>YcQb*ocB?hs|Q5|wN)~7MaSk*>jK>jV|FL+`SaKiJCc85Ma>{ z2Ed}^9m5mt6l12khz};8$ic|mU}Sc^*O^-E%1!4>5HNC8Q&U|VehkW8ZO-G)&^c^Y zVVo%X1vY}PLe@8GSVhH3RsXB7uMUf<>-ru-r9*}W=~lW^Qb0gJxN(96p zr9@g3>68)`0V$OZ=?3Ze*4+2|yz$rPHF`N`&e>it`5|5E#v8|6%ZPhIEZ$Wh}~ z0D(JAulKIbj@6tQ)ddT`EqpCZpIMCE4zeIK)qk&YQxpcoci(S zCgm)f0YTlvMnqh~Lt`sPiqYpbQ;N6>Jb*m7U;Z*ZtNGWg8Egd(7PhD}BmC zC=W@bDHL2wR_W?4fyZ_bFEiB-^t}x5hu#rvDO2CN<-_{A0Ljy=8hf*Z$ouW>_p4cb zWG6<{2DpyXhN* zU!{6^ot?_R>n*NdU(+$)Q2o|j19pM=W`HSJF^W2Ssn-q4Pg{M)&6Y@9Hez)cq z3dy)HtRJjTxhyY#-`flH^()sn&b0V;(;!74UEmdK*;8R1;UcAbtlUnd12=mtRLQl6 z0z*QF6!djCIX$%WgXKn34Pw2uCf@e;o=G3?`6 z?OylqNs7ZLX3 zhpq!2eh$iawrHZyX1G}2sMdiLTc=~v&_X5Y4b|5de36`ZKQFpu_h=H{7MDKe#Ca>@ zfhcoz?ROC7G;g3_pp%WaYr1xH6j1AZb{U1Keb+pJ;3G|XwM#f-thnb6>-TJ_=-AjG z##qennXaFb@@{QsUmRuUTzIGoy1H(Bmo?B8&U~3~tBO<>&(s2VycP7}ZE5jiM!cUV zJ5VWwGOzgkRTAvAu^M8PJW~BseUeM0AFntK{2sweP+|b8?DWrfg9|fm+`efZ>TxU(y6aTrs}79NLGaC@@vYu z+cPZYJ4jY~DnvD=K6}B4M{?*8>?}>wUSx1w7#tDt4S#8{`>LAf&%`(2YGMm zX_&(xk%@qqIIsQ<4Q80sNJaN^HoucBA?-f_3y^NKH4`m4 zCDbP>0x$xaM7i@?Wayp`c~g@C?`(l%x4L?i3U|?Tni`I;Z~x|_ifscimOhfx@@r@D z+M6?FA()}`b-H`fhI9dGBy@41Y9hzRZ6eG(Tt@;lW#bH%_pdh}-WHWdsk7x4?^NWtkK{>OX%)8{{#yT8iSEWK~t*Jq2m(cWxd&S#p~P&2+B)b;zv$G64Bvl-7cmj1FM zZfeAzTp`%_b!#v|O+6}NF@O4%x0fMH_{Zpz%bOxB4D}pArwo!ip3@JJ3gm(t#EMA$ zHjx(>A`#QCF7S>UNzB%cnld@DW!i;gy42OL?Hz{<4O2g47#*oT+TR?j`=DL?jx*My z_Dy?ypZ>xmPhTfnP=VV^qvbAtT&roBkp0DP|AgwPg4drh?%Y?FGx;Iu5 z2g^_RMpYYJ?|0DF*I&*``61S@m@hr5XE++aQGT+-GSzM0Cz`|Vkg4~vaJO&5{O3lx zlLw?Uk_9R5?vN(Va>lvuGZ^6;TtdML^|BTXIKh&u1r~f`Ph-?94zo zq0|L$c$88DhvqA-anq%#L6uMgoWrJzMa6@wcrF=FaX(68aO-Dm-b>{sVq-JDs-iRy z1!UHkI<(|I-L~B6cjZ)yp^>dnPkk1`L>;*wE3A*B3D%u22kF*fZo70u_M*DwxeUr`z~tfgw*aFyupVD-o#R^lQsB!-Xdc3}8Cb&qT-2 zzO^x+2+)QVs`O86#W~mr4Ri(P%kn!0?%%SCt0B=_Jp34jAF=to^@|GKQN~G=g7E02 zDc1OLj$?Bc{~Ga~>eiquluh(OpQ!rwoL^C%Wv^4>fb^L$L#lljtl)j8UEZDr24J(sqami`9LFaEiA z7WJ1UE@0Tn9faEO!Mcv-LRNB7g|^?aO>KL-PeW6QqZYx#O*c{slY^E!DVZ=iclEq5CL(FjD(-apO1hC+ zPnd=Kr-sdH6<_j8?~;@8KxX1ue517~a}661y4ChI5mPLL{@&Tf5}U4JfZUXbf)!c! z+rBGJeZ$WUkM6OtF;G+EmgcGomV)-JuzYfb;UUQxJ6YP_MbD+Dg~@dn7*&4pdV*39 z*1~j)_M9;oZn51pj-0VR$av=W=vPLDi}c0#E&}^mx%s0{xy*f+KV(T1DaSF!Zn^NA zK8CqcL@G7TOgt}oe>H+}x;6Y!7NH&fgtu-qQKC-IeXFm}HJd38Aq_TfHVbd6S_8e# z2h9gE()&LR40Ks(OY`&Fd|^>nH}D0x*y7Wk?UcO0W}4n@PQ1kQx_HO!K2vNM<1s@G z)04oci=Yz*+}W`=J@hy#!?q#xLCved?#OZeO}~UIJY36}5`)nTc4W!PHrAEmOtH*( z`1(eT;`E6A-;$_A*%c`CxLa`|b9`LNPg7>}?q*~T(-lZ=WA}MrvenK|e$z{^MTV4z z%UCkVAkMtuf%Ts+`nu7~uZZQOW3x~5BT}C!t*nj#_D^fvchp=}O)Z=ehd;g7|6sz; zb7kmSsp$(LF16x<&Uom;NJU@dAeW8*&zKh$PF+n06kmANDwC6=8qdi|5Qka|6P0ro zecS8!!5q7nU2gAn{p8p!zhNHNo<+~Gv$I=ieU=Hvj|hCjj2_#oEwpLaw4Kbmm8ekQ zWqq>W(iIRaL=;qgKd|+~oRaJM=k32=UZ(^M{?Hv599(q_pNjvcA#!}u>;HGy5aEAk zfxat_uNnI$WG3AI{Jp1bw@-!`qc!bg0Vg-Nk^&~>uKJ&EuVj%FZk>R}#z0_UVIjsR zGHAuzI<~g>zh?x{78PwU;H9T)LdwP0&tG(N3+<^6CJa&4)$&6lG*8phAM+Tx{H*tt z?1>o5VsK?tH~CtF=CgHUU!a?OPmueSH#T(vySDTkh@^n{$vAUlq8&Q3A{j^d7cg zcTlY{sbpeq{wO}4K+ixHN1nuEwgVRm4c=7E>iU=KJ@(hg^Yc5HZrmXH^T#`9G~Hv*fy2xD z7K2oai}7V$-Pv~`GCX#8Xlp^`fjq;+Bg$G*ooNZGi0NO(3UqbM{)OH;1iu3msG11t z+{N%WH%Qy?-*vP`YBr19olLycGJ2YOO~OQp;Ng=g=iEi=jFe<8li#C=cs=bLob$MK z{`Wk}y1LgdUtUvHi&3AyV zyr42S_haUA^!bB@P*hY539ZQskn=s-dAd0oAFsrjD)7!|yuG%msU0%Il{DfJFL-R; zbUNa_19O>%meK}tzx|Jw3oD)ea$n~rs=SfJX)|>$CVlI^-m&ZLq05>%mkVm1&pusz zOK^}e6bRNq+6u|o)l9kQAZ~6(6m#15tj+=AM1@pfURUCohr0p7bdm(58x)66_8$z5 z1ei?wZG;K7xz~E0h~^i($tm<(bA46JU1C*fvNfBPl&9e3^?rMNmk&WwS?zNp7B6o4 zs=5Dc47aG3>NF7j-hp%FgLRVwVv!IzX;dx>63yT0aU`YDB%;N|&L8SpA4i1WxBzW- zsN3UTF1HMxUxkE~`>d=Ttu~%ki{BMk**a(I7WZBXuwa~>8C*Bc?Mh}_ep@bkC6)Uc znn&c$D{)<29|06y!PuBLHg1she2}WG3)hift*5^yrL*>6GKiFx?9pzod;6aKnrh-C?M3D{D4nuB9-T5+F>*-;uRa{kt1n2FsUDJtlk<_UBmWZ3J?b}Mn zLA$ApXXlOA#vaj#HUBMhpXuRXut`o<4GxwHD=?~dp}tBs(f$DHT)+l@A0nfp-dDn~ zsip4M{B5(O2sL#Yx{Va3DGy5xmnCz?)2{F3A&n}i-U|g6bJ7(V>=sIeG8CS_<)-XI zCJTI&6lbx3-wzV;agL9^t6tiMewfij!I<(Sul2gD96rw&bNK$nRu+B*?&0T}HWw5D z>+yN_F5#qmHq%tphmh|3*jj41c~5?jJQ6EXUdo@jZ4g4+ew_Stu_>4#s4(qutNk%Y z^E4&aq@itca@v4Hr_^RuZaynMIr$~B=9YKboh%EEx-jKW#nguNg0vR?5)?Ky9^6M# zQl6N6I#~CvNTCsd3>APfaa%ny-V~W6v}@{<4wz*HoPy$%PsXZ3;D*wjJH$>-#W1t%&z?WeZE2aR!FBb(KZTkQo(In> z4rMLC9x5*8CLko#ykL+%5*GGBAPh=BH+H2e-HoID_T{OcK*bc#mK09=Xt0K2}yPdb@COmwe2BHXLAbbL08zH8KF( zJp!xiQfbPL>D+wQ+O{Q%cD!{u^ZL%jbkTDaS)x;EX>ZLC=Q4g>7hTKXc#qNTrpixhK>kYpjV z^Om#Nrnj_=vHrUCIVU>HS2sNR8pWrd&K)26Lb6kbga=nO55b>#-S}`rWb9iWArF@$ zSaM9jftES5f2NE%E|W{PjUG;Ih6ZolVGExK+<5=ld=0x#B7t`eb9P^rC)+#`s>RL0aive@fDQqBB_F{mP>8?CiN>b{ z8@RF2k1sYZ?w!xF7Y^oWJ^n=McsMB5kAZ&He4R6j47GmNeP6qy2>mc~Q7dV$&40>C zg?lgltzFD&l_;tRJzIr9RHz-BNJ6{{PsmOR+33WBYs*GzYG^7rVqn-@eS&X<^gUCM zW2&{lYvyv8N!h%1ajmKKRoV2pVoL?**A0IntyP_dt{wmMLt1QYOJB4LWFE8+)#bytD@Rf~F=FJ$-!z6$|gt1WAtxx+-+Nl_=six?wGn2#sI zPxX6i=j+86btjs$?2BDeVH)ylA^w#^IiIh>;)5{KR%~~9I7d76RFa^l>3AjaW&RsA zkiC-+cdtHJ=Z-Kac`iw33-T8ijXYu{AhQPjy z=ear9UG1IB!VXS|mM)r*R?BL(KC>h;Lg^NBb8`=IIuhmW`r`7{y1qYNGdz|@cpFUz z7SIJKi()fcZcY|xxe0dsYmQjve=5g>UL2qj5&&r<0(E0xNA46?E}i|m71d)4cgP~4 zc%~jA#dtK+qE3xBX|+G!lXAFX*b-Y?D-KaaTU%R%2cLhF#|&mx;0hN30m1ax)VV0s z+u5GFIGduPp;2&gxxUe`Da(n(1F+d`^c6ERI)e%e!WUmJefSl>m>qh}Zz@5542kC2sHmu?PoIWCQ64D&bVW3sP)$wkXPr04@$oS%Egu%*{f7^g z0lRkK4^DX-lp2K}OocL3R#t)?8b`^eqOKmhxvIm7WD@;ZQ1qj}A2-Z(k=0T|R}fj8 z{H3RdTK@=44k*Pm9(r*m?lq4CHLRdlk-t9J=b=@0=&8UH200>m5`O2A>*FCO>xi(0 z1v>&FBCt5J+M$l+)1;(uh!`i<^U282bEo2=$6;b>y8T^h(+bM<@KX`|{OC*xq2s84 z-Gv-z5;B;0p%_yn6g`{GiM|H)_x%@M+4oOP5$hm>`=CS-@ct(#Nc7HqsyVCz>eQJ3 zBr*687J%IDgT(x(dP<*-=gq>^2$ehqH@EjgGbE86tZa&HYTP0ssqLtL*0|233x>_s87Zmwj?S zd>EvxIgpr`m@sPeujG3b*fP@ACf|5|ym~lN)7*(IqUlrbhPY{LoYj#;l7P0#8`avJ z2XdAD_kY=Y?)HkYFfn03+!;zBVR+x3v=kB)e43mbpq{NJ2*Ql-n~l(vQcr;Z|2jbDep5fGoZEW6!=3GFp1lr@cDp81?Gd>`rMtIK5a}iSbsF}JXx=qLo+GVYr}7{ zNJP`)gE3d=MO*gy9*0?@|A3+Lk(@pHCE!nb9_`$R*eN4W9EVm$SesSrn0pH$0?Kb; zEquQ(wWsIgBK*n56B6sXB!y$%(17g9kvmh19CK^XT;XT+JKWl=o$EjboNo6GjEJJT z87}aP6meYc;w(rJ0GBfqyQm@Qj2}_RfFv- z{_ybKkhMl}R@OBr&7`$-EiyuX()P!4y;Y_7yFih#?_3Yjc4ZKmM+w}Pc6|Kc;Z_@) zo*wdbKC3gP-%k3nt)y2znfm=T>GIJ2-Zai9Gqat{u8%-ucLt~_@b7+&C>-dL$Kj?UQ_v)>y zG7!c{dhz0$P#))X0kwfD-4Zfw9UVm8Kx|VGpDMjp$KY~3B8>L0?oae{hPug@$_xlI z%kdKuREpYT9Dp)8Q!D$OhIOMAe9T|wt6l{&Tbuh{rQhZyqw7R)izLAQ-Vr`1y~bMOPcrcP5~8239|@k z^S5E-`)GCt_F=|KLH2wC-vv7Sw!+=s^qA1eVJXZcv$fhg7DodDP%gJyJYi_iiBjWs z?tdSM(xrw~z=Yv^zhlm^>9z<r9FmIL2Y&_sJ%*oZGa4&D5rhqshv5kMpS&AbT z32jNiOu*jY0#gjx5L(_hWZt#4h9`Ea3%tc*x3}G;3`@xJiJ~2T@T(G0T~#BNMN`8xWdG^>EN;-iMx zS+_-G&&zzy(q}LFB31G}rMp|i9#{KhmZ!TeBDdj(HIJW(@Lw79e?fjhCAzQyhlJk~ zKYl&X8i-Jf{iF*jy~;$yr@s8H6l)UPstLEctpC=ZcfOtYM)B(E%5Q{6jdv`OhAwi1 zo0|e0ZKYl}q)OhfpWQQEY93#4rMpj{9WX)*Mn`B62N?lzK^zo+Ym{kXRaa0r$ z^!Z=P7IuYx*(iCL2Hwjf1!AV{(H*i*&PD0Mq!kE?&i?+<)R5`zid3eBW;lVw!NmcY zz-;%k&x*B>jP7c($moiYizV#n+Rem9s6;E;tJ4jv4EOR zN9<$tEYc#h4W4rVwg0e!n^V)%<6%f33_D;*$&wkXx~9%($_2)Cf`^n$Yk05FB;fu1 zyKe2LY6JbGhK;6kVqT+@*?-={?U-qhxggfFy%yK{rALk#a z<{o_G+~AiN20F_9U2d)nrtsD`p+z>=+hYF_ga7=NMYm9h7JQ_UATxFD{2!TlRS}pA zWJ`b@Qip_{ix~mE1z{L3Hq|J+9?cq9j zRWqNCILLsVt?GiBkAzyLxj$AlA)1^K$4mnxn3TQjcDtg)dAE^oROO$yBlB}}TfJ3h zK{7t!3S!6`z-yX(oSb~UI+H9RTrHLpb?{Uvu2Z0(J%q(x#WPzrw~}9tH1hRuaiyC% z5e;p^-5<~0D#c$9`pW0&fOK5q7Vo+#9EGL|w+9T%0>(Y85~36bj(7vMwGl0c`V>em zyo`5y8VXfPNinI&0Z58RA`ot|hWCQnPSpYy$kPe^&f6P)p6?#;bMQcMgD+|UygIBl z$;|b!^MZR;%rM#fnXa1EIj6iI~qW)pHL^aLMI@8KzFR z<;_+E=XjQv?~+UE`~;OzAL}29`5`4)`+0WUXsBM)_&3|qep^gLS~`cW8sU|PE;S7v zr~bsU?kkGAF5mLIG>~dPCvN$WOekc{P8yw@*s2DOi(;UrU@eKlJ80blDlR7Qb?Mx^kpKzeFi(sw?392;O zBX#;BDaoVPd9$7;8#i`I^4+CO=I^HN!MFupfuq-khs_%j9!igDu*Y@AxQBRuuWIHP zutm#{L6It~IEu5(A#2smS(73b#(HbuLQka^SiDyb+F4?;|NTa$#gT&4VMU9pxvTVf z$$Fu8$cQfbuxf)>WmyXra;E%=qNf^(YbMSlll6~McA6iQ;4;vkN)edVg-KOk5YLj? z$Jf8nzo2;5^Wmw)iF3+MmZBY&$zS(e|IYT59ckt|w9qAUs_pwz#iVH-zaO`0eV*H* zaO-Q3>Q^J20mp`g{@0xKj5woikM(f`4p-J&ROSYj>FOaK<#9aMZC&#!N4sO@P3pyC9vOR&nY}nwgFj3`-zCjl0;vTmV@Q-K1-+Z2 ze(io$naI=bI4bv)EX%W@y0#L<%ub|LNX}bGm>s%zTRhXCqqERPtw-+l^3PC4%MZ9k zLJM=Z&qClTbI|uP!vxp;Q>i4$JVXFKl66WRn79)N>B+w4Pent5a*m9bNL zp2!5HVIXps!oE5Z9`_NUc%gyU6(yu!=C9xMY5Vg&=gfv)_s4?Ga`COCq$bRD%2h^5 zS5?)F@Y(xQT{lDV1-;$cl9M-CUvR7E#-pIGEBc^Nv$3lQwpFv?4i@fMQ5FeS24 z=~B3EdRC#TXO~-2hGAUCGvK)#T$^FwLo2%@bKq22@TOPMMwk7YA#Z%39ADGqycAX( z=lDBVI8R{2AA@AmUc6yK9Uj@7l1T#2m%h`T5F`06h7Jh3B@THoJZM@;jT)E;qYMp@^zc(z9gz7lYyQDb_y=KNM^{nStt^fXg+ixZqg@b{;KM__A(}H?VnxCJjY-WWk&BV3` zBLx7U9#EQT9drW$y^;`XfA|MKBYoBMtny4z~H&S_lG;!*B`ATI-VZD3JG63!_^4x6)!sx36y$rrGtbr8?&L8ps+pS z+nQsrt00>BSUvBI@?7yPcTt z`95FN(q$YTgHn zyy^O^AHxr@E4R6{?Qdj0dmD8_X35F5H6yn-PgYFv&~M6Z-6f;eldsV#Me_06ju}}n z)UYohcwgKbI{ijSs+=LSEyaoSL?zr-(N*8@m6KyRp+LN>I-H%=O)~W`Tv|Nx%?}c5 zXTuXi%cMp{YsA*}$rN!w0{_Ti8&x(*=LPxB--?Otblh%;);`ztOh86L_$pRBS3G!( z+4F=gw+M_G((cgim_H?3 zZaU6?X0h~45<8a`O+<^?EqsWV&05#Nhk~tQQh;zkre$+^OHmsZG1sYQCUOAnbs!L> z#zMDeRZ+~uAn6HASo1uBT&@a0q@ zjRD0g`ESIpG9oMznS|C=G&j;(&Rb;)+i_FYhE<8aw(& zutTsAki5kDQaN?5V(3P!(bDo@xoJcdZu5|4JJT?bbEnh zH8+`)5ok^Kk00pwk}(a=_E8$O;wMn35cAyTO~Qs|%G(hBwO-jXi6WvVQ+ZyF}ef)%poVwoEb7n5B#~WTzd5vje8(e6%SeX%9ZwebJq>Vc?A=s6>JlRn*SJ43l%TvAx1HJ^n zR(Y-e3spPl5dtu~rl-1Op!+It$0A&VFb~Z@UWT}RL53({0?y}XG zL=qj^L>HpE3|r0G;FA{!8h{zjiIaj<{umXdg@AO|53nvM1yGo8JJCJt2IKc@!zJB+ zV_pMB(3sP*&jh$gQ7U=KVDzI+R%N!$*@@v22})&FdtjebCuc&l#@oot=r;@h6I!n0 zjpZcZSJN5V&T9Nh&yJdhCCSC3+`Jk|30>uu4tT-Yi2fN2!2cb5VWxpVODqi`;YJ;2 z7PKgfe*M4er1KKp$NyO;9dY#c|FcdGm~i`S0Oe-6aeEzSu17m?Sw;Exi3iPY=1(Ak zH=qQwp#-}s;2qgUFyq~Yz7GJIc)m^Klt%OG?5uh27R&%?9IR2K9CStXzk>!4EGQs~ zMwJ7DDYO1Bugix8UdlW?Li5^y!vmi+mmmH7t$*lw+6<|Kjka>U=T!kRW(2_sHF`YK z2}Wp`6IQD<7ln>AC_npO+cLR5t*pw{n}B1&KwNHT1kzVme-AsJ>U89!nwQVzUVXAK z&%|H|9glqWOdNrpbu}Y39U~!{G6Vv2`_~uIB$S1%fcrVN;KPH z6^}TwrP*|pl+c@|oTbzW?Rge=i)Oce-V?h`LP)A9itZP;KHYhw|b=O?2k zMT(5EG#a|XZd!#{4`^#>nRAz6CFd5rB&})^CTGG?4#oUx1GVNQ$&%i)Bu)qGQH(g+ z>;*Q9exXnDpH=CM27B&{r`=h8XFWD$u`APk@{M+2)SJ4={4$dFoIC3B@%BZ?DWl}? z=GMduqr_)JduqXa%xN$jnwk=Q-8Vck{wFQjt$yc!xLCPU1}3%v*WetMnvpX&Aw_7eS=tNg*xo?5{ z_VIW{fOOz55Ui)SWad2s-Gq9R>!tyeHa0faHO2ypGWZB9YT>y*7sp~hOWRe|wL14( z&Y9>Q&}L}afA0I3Ar&rn(I@ozTUOj-r9ZXV%b}JJ+%UB0FN6d?EZQ7>A?zP+!T9n`mL`=ef?v@QDwxkv4{R_X1tVZqN^f{)3Cn= zdKf8I#g@kW?|cV;`!X9_N%p}DUnTf_kY$rG^xWpObKP`%GF?PGxba$y8kdIzz4+vo zT%Igt_G=|CCrQ!CT|fVevcjZ{WYKc_E|>V26K8#W)4d%IG|z28~XcH)?3s?boFu8a%lF1^}$*5 zpa;eazRX*uUyXmdw!gG&q=SX!b%n0u&$Y3%yABEBxu{k3#) zk=E67qI;SiU0{=If7BQ+jmy_@sVdYAigxIS{~n z<^m5s`4rGf9HrsP3|=(zG|@LtmDqppQk#%}c-|A_AUP10|4jJXjiK564TS8)e6Vo# z5mnstvd&wZEm3^muicuNt(g5Yk(kq@pBT4zhnY))u$cN4hNbgC0oKoVOvINiaRYb= z9mr?8GuuL+=9G}2Riubg{}fBkqg3?h<@JLJNGw)2{3vEnxe4hPzu!!>oVtB>eq6b-BOtro zFL(a^tkJTTazErGJH+pvyn2{(KI#p`zZ25G3hYZ3t&*Qg8#pPUwM|!VXy#Dx)S2;T z=LhE#RAQkAi%gc)K~YDWCk_4C9Jxy%8|Or&rBN#8;IC@Lj64@oO&1_5-g`CLC@Pd{ zZlN?739YQVH~+!pXZ58@*_c%p%90)lhiQbswT`YJnb=fD3?QcjHm1X);qFz?e!VY~ zAZfzoTgEF?R+csB!tI%8bMfGMVuKR=4|ZInl-<6Y15Z}}fiEh_6GX|QQE#A4K+PcF zgy?sHzM@qoA3Fw;^lG)xXc-o~WC-YieSnUg0-!UAg&wUhE5ziB;v)@8x0%_h|C~bP$*qFsIHVwt4xs?9bHCEn^(GJ!%sjB7pU2XvT+A2bL^G zJ7-!ychAT=!q#psE;_DnAuBfT!H58L1r@b5EH9}jjiN28%h<$MLXI8#-p<@CGw3Z# z0bLkdS|N5+%)?h^kT6{GL4uA7 zteO0_8QEN}d3vH!2&3N18xRW}hS0G|=Cq@I-+~9@02LTKZ{WliICL-SA0sINqO4#B zD<%R)qN)H&l7>bSLeG2aZf!F+17}rHz zzXAP5=2tk^h)(wgvvk_SP0G?5Ottl_*_2uD<%l*6I}ICEGN&|{Y6uzV{+Ld*RT5li z`Q-}3$`1KGw@L)lp^ODKGyjYn-BMP|3IP?74<@ zHTTO9xeQ%k#_ns*RRz~17pLn(1v_!{Nq!qc5sH}#E_;BLfMoOECy!(yIv`aR$O;w- z0c@0thL+a%d%9NyjH`ZAfgZ%;fe<42cFK;9M0j|5s2I}zpAJy7AQpnihzKmsoC$WYza$;q$m`7M?*v8g@@IaT@5841PnHt$6_Zfc)+cD zA?_5bRGN=+&^T`ozx_PF{SPLu1zoFs_TS_3N)vN9zmTBt?qih{cI&FUajDOQk-ArI zD$Ov7iBUsr_;C4-d$_K|@s&^Yl33S*)@my-9ytiYN}|}6D#Y4saLx5!YCMoWn&*OA zx5;4!`$G1|{o^hQ09(9H!J(w1WSxT-M_G7ME&)u%@UWjKQsJhX+tudtzY^M0}+w7tE(h_D>=fe36&?Ch7}T#LCUiCrKvl*nv^iRKp4g$Bl) z>HpS`rrgQ;G}`<;5&~>GmQzpIZaTyKRD<}R=Z~t z7vAmg?pqd+ri<(7Q1bEdxtTX`Ge>(Gj>K77Ux!^43`B-Z(8xhh8Xc^?Fzlim&siw? zE@&{|;nzSgF#!=49*%(sW5oGc>&X%o9qqaBZER_?W(Gz=$^dZ$&Ktd5KnmJLvsS_; zm3EqY|b%l(!e}$jFq{46> zwVO*dtk2Qo&M_#-(XO4#kT^hH93c<{jcQ8rdfx14sK&Q%-#q0K*`vInm-o}sHI3@L z*uL2b!RPYj%l!QO($&;ZJ|Ox_?bfBv1?4nT>ioKCIP>MZJk13`FS(>kPy7&lsQ0ID5DMe{Cp!oces3eNty_v(X3HC4AhP7 zeJ&mCr?Cf<4e{QK$LI|TzGnWJy=7TIRDXgKGZJ43kyQjHg4$rX`1u&vKv*pO>2r$B zlCDM#b}1+4F^}_yah_IhX4r>{I|;CK?Ktg->iK00uZxVXHKy{v2Dx*hQ4mBB+dQ`r{4L1tx(c79p=z z^;nbx9U20bK&GU`YKkUtSKw)gb3AUJ0k0H~?4L^zdC-9K%!f0UOn!>XD-GIBHtKy# zf5Gibg;X(8BPC680l$5q3U=cAhkTS*_&GJe%O0p&S*uc|$BZQXj{d^Um(Amg^MCJO z)qGnb1TIk|F1X%l2B54fD^Euh=h`LEU(rDV63+OoZfa)cWZ1#`_YT-#tMwna?#qdG zi%||Hot<3M9%B&RI_Xi52ywf4*DM8W@zYhCuND%!cg!3>^Ky;@Y{ zsT+OMItj3+N=n)i#hmImq$|2ul6l>c@UEynB zelRotf_)xlH9!UcT!aGlyP;8Pcn`B0?B~A}<0D6Vb~#VuJ%NZ>a|}2DqX5{nWC`G& ze;11N)u^Srd~k5XkTu)|a-Q|B(aX!Jv5V>hKhZ-J{Ac=97eJ3HDZMAxQR9}9GF>4- zIbf!xl>_v}PMnUnNU@GP#63w%iH)mpuqRYQ$=Y`0dcj)FtuNodM-47NlZ<~$uH&uC z`{Jaqa&658#G@)5A*p%X_^5gS*|UQne9abANj~^JJlxFn*5|mfsn2|vdDR4TNF#}( z+an<$o-^pHMz0<=jor!0mWfi=J->MU@-1qfPt+hh-Az|9Pc20fnXQcrvDP0k0NSez z`8`8Np7gNCv24ib-{#&Y+bDx}@coG0sje<5+>Za~j`!A6+OZYD{iiFB$$1P=x>w|t26|mY zS-7}xfsX+RAV{Xu9;}4S(!G1PfGFISJ2&bCh2ne<9FR0ma2bo&td`N2id7N4=sj?T zB5*_xG6hMh#w#G8vqGZ!=HCT^pr=wYEe%bCMMd;4m*%YxBsy@JpzGVFQ`}c*(U|~FXM-OH?J0}v_`IkeJ zuD|TH19e6LSQ>Q4KcNXUGC!Xcj&*00m9<#WR#ich4h8tnz~^~r>j7mO2k*6=1JGTq z&vG)F*wB}4?@cxaWTfOGKt8b20=2jO&oJH(fDpc0sMYqz3&Juz*kH`6iUJS|K1jUt zQw_I&pW#q$JDkHKCG7#1gtRB>>z_|WcMClL(u8KOX%7at2G}Z3PXRd7FP|vVHX;>n z!y`i;p}FK=_AwTqA`lPQ$%kHPcmSi=LeUx8y1F{uXnDalkZrb+&IO+t(9M6jJH@3i zkyv#&QuYDeSQsoBE6>H&TEO-1@0t7z-Tpr*!M;07CnPCzyPU}OfOF| z?IQ_Vqkn7+AP=le=!Lod(?t|uoUL{rtnW8p=8EwY_gz6E7+F{pXO~pDK^Ull+f2Lm zAgI+D{Lib(ivI{(TP9iAYtVdG%ajy@80CFt6(*}XcC65ohQ55F1Xc+5D7{ZgON+e{ zqQkKD8;Z#l;6~Yoqx(t=SU6Z&BL5pABKo-wszm68G7uEP8okxo-F+L0+k=4~r#hSt z2=k0M3@KPblhk{t=j#Fn?<)wLGXIK6OuXggCAgS}nt&$nKu1&*6e3{uI?zH-12BN= z>+8w%*#Pn5>^Z><8(db>(jr3Q5s9loup8AS4A4I~m8-_+gI&TPYlfXO%MZV{ZZpwOO6W^h4q z#m}Rm`yaAr3gynYN`R+8S8zf{XhmIG;Z#evS^()6f0|2_djf|WB3j);c4_D5=dGYw zVwt^u+sCVefMASf2%)lkxP6I;PCN{>|9Jl{K>RwK)NO#$=6B)v%{h4Kr|D@%xQvYu z%g7%oHGWiIFNsF{A`oSkRm68@Q%mJk`J)qxM#-7DE4$6T@}YeH+| zb!6y+3#~-cNhv9#Z!rWT0kK?rwm)Vqn!e3&vCV+SJZ?kb$g7YvrF!QMgYOye!4?`H zC@04ddh{YR1|Xtf+qBtz&{RJK$mM@cCI%P}aJ(3S4wBqI0`;JA*cxu0W>Om}||<)=WDZ)d)## zCMG8E$XY`PfEr&4+`A0ts$yRKVQZj^;J5z{wmmyL7cR(Bov@mz5L&SbpOlIZ?tcZE$MU1hz70F+|d{J<6;udxJiD-pvJ^7oKmJEPruC!{yJcaqVWo%4cYT*2N z6^_8bif$L+R@1a&8y*_MPE{A->dTD*SQvBm{JgD$FinPRFwO2luOif`%a<18;lU3M z4zB$>Vv05$Kl}nY;Q94N%`}`2+Xdh+kiQ<)yD1Ir^9t;14JjH7%{KI`74&+1YZA!+PTHp$#~JwuJLv zrUAOu)YSak*9ZGpNs@FuF#Xk8w*fkXQc%~@in6G<1-uNvq)pSv_L{ ze@F*v0g#P`r0=i&dMOv$JT#-x0Ly2fBiP!7%S@B<^^XE51ifpr{`+gjSsZL=Q^!FA zUI!p-(R9rf?KkFbZskKvB#3j=O|Ui4a_9f|@81h71HSC#?FpN91pHA`)>0~0u!{J9 DVlFeL literal 0 HcmV?d00001 diff --git a/website/docs/assets/nuke_createPlaceHolder.png b/website/docs/assets/nuke_createPlaceHolder.png new file mode 100644 index 0000000000000000000000000000000000000000..93fb4de9d01adc65bff170cdaaeaa576074a956f GIT binary patch literal 30703 zcma%?byQYeyY3OByF@w#Y3Y>i?vn2A?hZjhLb_8a>F(|nM7p~>&-DHFclO@roIS?* z!{rz_eAasAyzlG&UDsShD9DK;Bj6!GKtLc%N{A>yKtR5SfOr)J2MK=ShcH$T0YQdp zAuOz5VPpgWVZL0JYNmiaT7T?ix`Twz&SlMPDzCAzne<;{i!z+Nd{o_;Df8lA{N7wx_>fZ6q+YRie^n17K*7fRf##Lz zD-s%RrscuK@Z;k($$mO0I(;at4%qxhC``kG+-{}L7dCd{SG!rha@Seft1t9$f+N8K z5G04?$0$)>pXd@5BHqBfrALVjUhg*D16Biex9Xy z*>%UE+R(9i1(EUgoZq?QWg;NwOc5fy;V>HKsXp}OwO-dT8psd z07rv>fR{W|-&lxrf36RZRsQf~Zp7Q=j?hxgHjv@4q$Cg&$JYS@D8WZ{0jn=g6wJz) zFMD5%45!z-g@iEoq!2!TXLg=ms@fZiV49R^GDct_A*G8e5k-}iiJJZTEe0jh6oyH7 zJ!K>q!qiyiv$noWLJNBa?iTTz6495PFwEx^^HV>({@A?c;Lvvy^k2U0{XAppG-w_^ z^c4$*X3oP*7QD>gB{Ah)%CaRGzm!LkHl6xnl$&dqi@6&b5piblN;1NLX|if8=lM*g zd!Q1IQb$uRppA-~os!SNt!lJgbNA~DR%+!Zi&Z}p_(zTiZ5i+IM?d;R65?3Ov{lCT zGy%S^xa-8YZ!vp}2cD{NUhE&*FOtzpOD~BL$?#tJYc2e~qlAGdMDjl};^Apeb9Jqg zKRU03>oC9c8;{|F_wVZ~_d9I=I4K74{tg0Z@z#zp8^J#k_SLEV@3aY)+_{4 zEClof1e84lDx5#uKyy(9bmeg-jjI3@fj^=>6j4XBSJ!KQl58ka5=fDNw`j0m{a@w# zUouEL3J|?NR4PDH@uyLM#S_3Ze`S-6c$w;&EDYLg)9F0@ZJF_M4eDnzLt(ME!xPv7#sid@CjfE@2cJjG~%tmwo)eN!6XwhDC% z#}_cZig*jt79@KL_4xYJD#tvGUnk0I!4JW~3TQB0Lhj)ZaU{$l0UD$wKO3T3MfF1jVUC`vd9-Dq@#VB^(`$beB>k z5*;jU*H9%*=_{*F?n)wSIQ;IyN@#27KONVVkSJ&w$&A z+b_3~!Ex^?-be~4#i~#s4~A?CBanr}8A`w?!BEGjhEui$ADf(V!#-O9`9g zIx5^13(XkJFsX2T$5V~rj<6J~&ErFQP9u!u$;@Pb%VQ*c+k999(nF8Zs^HvWQ>8_&K8!YUVsOEDOj- zA=kpNxx$liyBf}9+{hUbV}dx@s5#OT1-mrPgiYv6QMTf4xn2`byVAP`mv22GePDf1 zpCDTObw%0*+Qr(1EArZN{j&XxAYdXo4J6*ughF*kqW6RwQZQA$6^+6ZL(Gd|>eJA- zW{Q5R9!1rMiWv^yD`K$zVKCWQ<_#sWaxhmQb+3xS{s$^1`ec@5r)0$B$Yk1Nt7P~Q zify9pSKCb6xZCecu`ga-KvxI+?mEW!>8(Lp%BhA&8>8B3X?RuHr-4{X z{|o*g^s4XPDCj5pyoNP~D=|A_S`2+xlKybz>^#gNwKaYn=AR(K4ip3NpBwX%haV52 z4vh|(ud!W`o1>pXpCXp}NbNNU;Kh3#)^8oUIN{TgYY|=S7(Lpv@KA}*e4-3{jeM4XJ@xq-jsZ(df|+n z-!`mHzudm@hHb;=_iqnu4{eXg>Kb3eIhDRuzjeEnzeRqugB0mN3x7=$5J@6{_D%wQ z6aM>an?TaebbV=iN_VtJIKg0hAv)5OSiymJSQt`pTftggl0vjO@e|T-DUdL2(J0|r zg2jVHyQI7D)-Vl%PSfDfnM5$+z6{9bWA;Wci@eK)QH(K{s-fh@SPJLsAu;4+N=lXi zn}Z%1E*_EC z)AQWI5w&Vc*?HUYz68?EN;<2GI7O%U4kL031mY|QfvirQ&M}mmx`h9jS?h>e1;%RQqDuq6+*$>DcMU=rZYE(HYSFq${D*q=VIv(xB16(@51EtJ2nQ9AWNrOfv~J z2{1`9Ni@kYi8o2Z?v*k&G21f1Hb^v7HJmkOGp^Nl(RVSrGD0@^Vk&DoYD};Hu}f=A zYkXnEGwBNQ0PW!I0mT8`fzdCVQmfJ|O+Teq*wO_GTqS`j@ZSkkzkC;G%Ue*qQSd32 zvBYAFNR=EdA}UN$J}4QTk+USeLb!stVz_#BMeP!DpLze~J`K53Oy9_8!vxbH-c;Fi z+LXnh+Q7-+!W7Xk(pcJfSbx}zXGCr6SI4ieU%kJ^T@!86Z9X5N|H1f!U*}RMTSw@M z#1oPcpE1TTSl&_IX**y$csy`CNYp_zmf4Xxmf7!{+?w8+*&5Os>ErK{>=XDz^uYVz zbX|X_H}5w|f|n^)a5g1k&diFP$}pN>D(;kbHz{QCvxZd*%_X=fK?L5N~cq&aHj#MVyCzE8jhR}?vAe=MI0#{4tF>^91`+U^CI$s zHX!@a`jMllqs0k^3GBGq?5!QhI3v;z(;w5<)6diK(nHhL(#O)5(`ClY$FRn%$H>N% z#yrQ~j5(#_aEWu-IgL5;ZF%?J#a+PStadplHo7;8G;TJ`G$J+7HKsSTG$w0LYEx@- zX@@loHS9NPHI6wUJAQFkTJ!9v?WrBQO0kN>@08u#UVXD^YESE6<&@_5$D!Hj!Aabn z%JF1}Z@?$LIjK3NIr4$*=KT%D4Z#h|F~?$7W4${XF9i__5qAt$408-a3~mfm43#{I zJeE9yJjys}reh{1uMO`pkS%Kt5pAN{{;Wf{+a#-{?+~!0>uJ> z0)+xO0x1GP0?h&y9jXCokkycpkUt>xpr7FkUzY@W2Vw+D1#$;&2g(I52a*P!1>y$6 zLQ6yAzy9;u3%UYf86zFroAOreG;LLnJ84V|Cz}O~E1xFprI2HB)FP_M>b>4eDNoGS zc;7fav8-Ii3H9BBOFbb8($8oyvB|N4v30R2v3are3gHSZ6TUfQIgUAEIVd^4If^;W zLKi}eA$$l=SYimu!5+cA!Pvnr!J{D=!9F3~!I&Y{!I2@fA!)(Q!LT6}h%1=BWRGIE zk*CDyrEqEC+2IM{-@|jlzl1kSWJ^R1@Wnxq^^4+(EQsEVh>0AB^I~{VS23JRW4#+9 z8=|72{!aIu(UN|j29E@fK9y37#8bOQw??N%-dW@_dbefw^KR2_(p#-48cLErG-YfS ztZM8FG^5YGpH0HCKPN_JU>#uLV>Oeqlh?hkqq&x~7EdP`qh=suU@WJ%WjLlgrXnKE zB+F#^mN?Cs?kSvV^jU0{geymEOO0`PNjB1HwiARaLl3xC^)J6eUfoK7C!Bu`)!BRoW)Y+8g zR9Jyk{-=De0`dG4DgGp1@+VoCucX7LLlQ&9LvIJ?huw#R2PuX(2MOZ2NS)M76->2F z)l4B-C`(#D6LMQe~H5mve2m{%QTi`o|&kZ>y^5DtVJRV~(v@$5KZ~ zM`p)V$8ARj$7e?-M_k9S&F`C9o1R-@TZNmML#>H_62u3phY$yb2doCuhEL*9V`CCm z81?A%$X6&=sQ4-Q$zNoiTOS#rYap8gD+5~s%L8iz!-T?xa)rvfm_jfSG7!FD+0yW; zXei$1R^~}4Z7L?Hs3@~4Ix8*ycw3g7<1X=2>!<2Zc?TH>F$c-Bp!FmSwFqHJ6-miK zfkC;!Pt@AfvU}M|AyXCwLBgxH9|#Fom#0x%|dlgy|JA6$GP@a{7@Z@Vxo9r z&i8W5!u?#lV!VpfGEbFS!CGY(iL1K(!2R$0p?FGPWMw5}1!eU{p9yHtDME9x%PDOq{c6ex%podmgkmIR+bi8mQ7Zn=7r`&6QvW@ zzYeEc%DH~r30*|KnaDMVw;Z(Cv;1x8VI^u_U`06feU>|qt=L7{O4mx&N*TXQcKXe< z+9dm=>o1g<=((|3zVfB~rLvFTKjyWTK1n`QplFHXOLVFEsQeEI1(S;BO3)i!rLMIef$OdW40)^5hh++ z_o2$U)26lbHQMeIvG?SuaSd^)agA|lip7fG72_1k6vHNCCkgXJ^J2vQ^cfoo8hIE& z8HpSDZMbjnZa8k3ZuoA%Z+L8|Ze&q%ViI9mVA^2fV-m-k$J50dQOZ)bQW{Y@QC7&X z%S;xg7k({FEmSMiEsQHnDSTJ>rEsZmPR2}DU8X%PXM}Tt3UGiLFu3Qene*O5> zZhgw`&F;t!!!E}z$Uev(!d}as!QRcz!p>;Z_9xe-(Wb!0#pcN7$p+IJ(fWt=_`L4V zj5;6NhglShevAIf>ALFLi|WYw;rbsnJVqSG947VFH?u?*;|AlEb9MFAD^|pIdir`M zD+YeOz6;MGPaLo{P~U=Tg35z}g#CrTb_<4Sg)xMohiQhXqwo_rM0-b{M~6l;M3Y8a zM28XL6M7NYagOnnb3Hrca>DbJaHjBhaEo%8@Fa5Aaw2mtIcPcFuSYp=+naJ8aQ@+e z<)q@m;hy8la%$Ut?C_ZN7!KJKa#M6=ayqk*a2|A|cCm24b#C0K+B|nbwM3mG+WY9~ zMR4=sCjXdv*~w6TJoD@$qSx}t-GP(~w$rSAt?QN37dLwQGKV8)LWc||pN*z1F~_{- zl7^IKk4DkvttQom42R}*_Kmvr>u$mmhSl<2+x_EXqP@%`-Rt-fB$Eut;6t!1q@o>ZPVo^xJqUMr`%r_ZZUcN5-k z`I`8m`DC(u_}=jqdBb=g-_kvJU-aIe-_hKr-g@4O-EH5i-MijK-;Ujt-#?!jonc-~ zUshiro=Tt5Ui0kD?YR9pNm-d|-ED30+PXU# zadfgeez~%vx9+##H{mz*67pgSLF!MGji-;hiXV%n@OA=i0^gox)uLlZK!rp;+dEq_ zTU_5+Ul&ll`l`mN_i7l)`*3mz?g0{Wc?EX`H3d-x@d=5EpzPZ0b{Tfcppt%P(@WIQ_bQv5?K0*rKAZM?-d-fZsdw>GD)aBL#XA{;$j6s*R65}E@k zv1_rgq*kOnIfpq)IrlkULfyjE!nDHUp}L4ZSofqzC>u!e$Sf#MsF5hcs6XI&h}ns* zSr45!xzic7sTau>sd!0S-nTHd@Ov{qdfb}6W)k8QBJW}#dvB=M^E3QcI4ycLx*hqm zSe3XGpqVByF|lniHSx^I7W6xcb0;kB&x}%BL(D^rfM__sQ(JzpR3?Vrptnqcx~8e&Rf@-q5rgwmfL-H644c|mE)lSHpZ^NZvcJv$9M ze;s=r)wSuN(%}uKP`Z7(Sh|n4y|$RPrndN^`J(rtCogj*hc5Lp*)k&^*RzDT)8p3d z!5$TpDGjwMohsGrJ2o;lj#~Cwsw>z1iQtnc&QhZXBQz}^MZ>Fz@k2#i3s`)Km z;-2Uq!fyB%+A~AOFb0&ytHu!qhlain?TSFTPjO)gcgC9Zl$-Ob0Lf^DfSydBRi%59=Cn?b@cpM(c$)Ohjn=@F+9 z#PQ+bAHxU3X+vHaHyl|kdZa64EBt9`Zv3>${^;bUB+F{J%U%}jevD@D((>s+@hHmNijBTN^TkAh2l&XwT3yPLxj z)`vn*wD7(VOZp8ywih-P_BX70oEU6=`ejZTK01&2-O9PO{BCj7dSW7;7I*gzxGlJi z@JaL&@MIfhPlAo5f7bV}ZFu+3M)MHFaO&E(oZS{|IQ7p-|19VN zRbyM=P|#aYR*+X9FV+4{ap98hIwC=s8ubr0I^k6Bn1^y6ObO6qC_?-(x;?;-UxH83@vO_(jlvTX*#>Ye2m zOCPIg%VDeWIk{hL;zVve?Cl}RvA3JzJX7~M4=*+Bj9z|?>UX2qGHF>#5cjC$AiMJ#yh}+ z#WTa7lV6wnoWaVw=fiwud^dO;??>eeH(N1TN*K&pABif zL~U~Y>pJ7wnOdYeUh}&}DQlCO)VllnW4kgNLh}r(rx`Kxcxx7mKYx;bt4*^{u>ZL( z$(UgHZ98?mlKETr58nje-18pN8PZ17UWz}$cnA1LlS>_+T!%}g(|ELTqIgYRq+&qQarZW;j%k%I8E<3!UeQp|YL|PW%D>1sU-9Q;HtUz(@0B_JKQA8syd*68 zTxiS+AM8C>7dp;6u0@w4Q*o9UD0S$X=bg@$dt-uv1^cq^NSr>{Ra)2^pZ;DwkzV}9 z%Fw7Q)uQ5A@-TH-5c3WG8}7GJMp(VEcJKZ3-L0+RKf*1-TEfS|gTmOtIHAX=K5as6 zWL~}ZQn!g+v}Njj6{XrC6;l=adRL994zJG!kswz?@Vi;3$q z?knx@>i^ug&~F>v+8Eu$;JI|`*>RPAKvfZ=tJn5?1M`BMfv?LkWjC|j;B2(k7vnJI zPcH6K^i&->e69@$HVpl{UQC} zr|o(nTf&A}PpijY2Y-tHI%ZwQZLIb-+bLhwd1`KRyIJ|@;C6dFwbZT*sqCrTruRa^9Yvb(e|r#?%md@DF)AuTKu-ZFziPWVK@@=FH+>VdiIQ zZ}QjCr_-Ppp(D`U)k@aAy-eQvxo%X{4<9p3e8ca1dAoyoOr<5?l4;D#b!NOdcu_Q= zyjDahpCXUSYvbqnu-7cwm3x@GWH_>Z*1Oyb8;L*8nQ6g$;hywd`uzK}dh4CJr2C+8 zM5D8PiI0WjUN^Iartfb9F5gD)Mn4bVyE&)K9qw_TEVGYxp1x0a2ROSpUl~hvMcO#e z*B>Jv7~5L&TUGq_&Ky^U@4t03W>s|1=F*zd=FoVJ31H55pGr|@F!c#JZE9u z$?}YC-D+?yvjVmK`LXXo_T^JYeW0CL+tSnZ9nT-P+LO!`mUa%G_OquW7z1J7ZVPgc zJkyQ+z6;sS!k)q_(-G6X?T+n@(Ia9Zei`53XU@mGhyKMFVtx<5{U@Ybk5#k&@x3fP z-$`GK=kvS7iyvEaM|#9{8g(jli}lQPa<)tJo)2lapVsP!Pi9uG+Qt3a?}MJmFZ$Qy zr?OJ{)n8iX#cWvWZ0a}aY3mv77@tC3q#kRZ@h|EhPgYlY2`kC#j2ba(m77>9FPzPG zR=nOd2iz8MeMI$_hJj#LhM1XwBq4%?=zZ{erJI~#&Pu#51>O*Wx0ld#f`CB8`1|M8 z)RONb_#vLBiiWd+i?g|%BLoE8oYROjotnM0%;3l?2#g<4!E3VXGY}9Y5RxK-D((x1 z8Sok^;!V%;(^bRKB4~uKa73aI8nP#UI3RN8_0TcAos+a>7P6Ia_FiX}ggfTQR`0u?Sk4X(G@*+ZkK%(AJv_7 z?v+m~TsGgxNJs?8g3*MuB3;WS#8g#PKk>_MZEYPN9}_SHqfM~~!(Q%A=E-IiDi_H) zl%feqRO|OC3&4>EcM1Rd!V25ovVqI%ZZ9Ap;O*Vg>6j-TQ&(TV(&}{+#Id)xcXwDl z&dtqDgY{YZ8v`MqcSBRtedlQI#jdf5NkK}=?^>(5>ys5dW#w)}8934y?H(w3db?s3 znj=|4JUl$2#YADdLYb8FCAXE$B>>YsYMn68kJkXt%x>WJv*=V85P zC0$*HK=!wfYhlC+OFf~Sd~l@scOg`ilrZIQsHLTcBXJnXm`q3Ba<1R3_z^FA-(T)^ zhoSO&-&VR3x*xrhPU>)wPi3_@od2ok_f#vC^Wo=54vzXDSX8p(fPH;T-TfYQV2YLU+;F`C%JUaS8)PnYNZ+hB->lx^p0nL%r!dgC`>Ic`_6t>-R+LT zWm#NWa&mIoqRbBe=y~ZcsKVuSO&3*^pFcl8fBW#z9Syf9V?A)W0m{Vi`)SMFc3M}f zkUk6wL3q!!JU{Zs-Ct$+ui@ng_y~zKo5w;Baco!Gkmp`5#`(uHKR$rJaN@$Kv*~wn zjRkmMzq@>Kw&Mq%6 z7sHW?V1vby8Xm-l1VkhrphFiHV7UD}xY$JR1UQ&G<>oY@$2` z#vE!&&-+YNODh#_7%eZnjs&Bwu5KRjOR$On+`Cbp%tTuCF1Yib^+x^C*x1;h1ksYS zv$HGO?h#TP$7QXOO5**TQ1YEX#-8aZ|z^B7vNPgM*#@O%y2T?b`d-jymbm94_u49 z@86|%>K9_6CNakQC$#&c3D=I%`AK1as${@$KFMWpQzuHXs%U7uZ%YXc&Gg)-E}r>< z&oRM0H6UUhc>VzryQH*qCc^#j=m_12UZWD?$=TIaZ0sW&MVRbkoPrRXMP|6T8lirU z;2ZpmZv<*;YL0lG7bFz|*{?uVh4Rdl6B@6oFBDHg4j=A2edbgZT=<4);uBEjY+H1&|ch)L^j_5)z&?lZx>It+YlT z_nDbQ@k#xbbbDWw7OG|rwkDkVR9pz2G?@1seY_Qjz*xu((9N36(*ZpV^Xb#9tJtvc zM`S!9BG@(b6^m^f;ekBfJ&TNogxG#5cQ{g1$P?vr&Q3-gG$FX0(|=z`v517=NU<`L z`se1{HV5J`DP<<^G}#H8Tn>H-9faZ4N9ll07C_DNx&Dn#E|y(y7?AaAr*S6}GZNLp zO=FBA7!4t&L>tJ%gV=M3!p-s0)=HACZqz5=`@JIY&6Jgu8MPZ?Gc%)! z3@>u%a(vD=CDql{S<99TxZ;Jg(9SE(z2aAht>I?MnW|cKNAwLoUKb0ZFE0Ep(pG@v3b%^9x`bUI z=T^|s`Qmyd3%<{uGZA4t>w$=XP7MzobA@i}=itUh4>h$n!LVT-7u&MV4f0uhXjGg0 zS2N0J?3Ypr6v44nI_}Q?W?}s&K%iT$LHBG*z{%;s34deI>6zj=9X{O-N4or>p{cpI zNmA+Gj0y)={NqOux;L5S-CX-~fsJN6%jsM(t%^8I7X)sQa2N?GMqo`x0vxkGY>2^| zbN|=t*!iRoyA(YT&_hE*6J{#n#jx)>3friS@pz$_VR*E4by0@H2eS>rk1BEK*JBOd z*mN@6p$bVzpK^NtKo;yn<$r|WSomqwzt$aQG*r=wL-f19e`hDluWzv(h-(JS6AjE4 z5=>GQ8Zu@9Uea7#@{fWpaLJvu9i$Sm40n?RhvAHb9ZTJ%8e`aLXhfu_=jsp&=qY4h z_6O!TLLkM)1eC*f9Nqc?QgBDsF_W%bBGH82^R`2_5$WPDK*afvi)ug+Bec^!G8(U5 zAN@%lFnVKWW(HgFx%FPPw5-g<@xzliC0UFi@_eLzu5QrbZ**yk7bG1Q7q<1cyXhM5 zP=^yY5!62iqHl%6-o$iusJV>gsw#x^P>H@v?Jf{6Lm(k}NU zf=lSlt>LZh?N(Qp5U1N&E1Tqnw~Y5Yr2RnfS}0ltCnskI&JWgb9Ey8YBs@U_R;ou% zZze;W-&MDT%-a0U&|eFegy+!(QMLLRl7)ooV)(*2INp=AcZg#WayvKpJ~f$R5i-!w z0HWbdKnf9cQsA77ZKDp#IGO)Rj+AS0qW63k!qK%r(~QJ(U?3E+Sa2}45|j~Au& zE5coiK6QYL*sP3FaEG%HyFH+e;Vye*w4h-WPYN^?_1~8%x|(YqqO**6oW;8T#9u-X0Xx3WJ?t|?$Pv&x>bo^EJ ziOLsWLk>)cM!SXi3S~1B6WJ_2oqVs$-MF;0KR|kE)yv;wArKR1Ej7EX=vc4~cj7Wu z8+60WCvX@U8Erv}{hd;pUGeep8=IS(tE&oXYA}Ob_NyIW6vhy6O_eAY-QVAXEk=iz zO=XoVQ2{gcc>brbFMH2d5HWH%xrqKX<3F_Oz^ zdvj-pih^RdhS~4gJ4aa0_s+!9^81*smDK@QRS{7%esS@BXv8-nqN3of!y=3q96pun&L&5=2D1 zmcr-bDJv_hrk0fNaR4rv)`yB9MFcw~VMPs9*qQR5xetx99V7vkE5azzv;2=EJ z^76W zTu+wEfCW1|jNJo6d3V?RgCHDfl=}kGI$O+-A65Vm*%|fPkct@j#eS5QF4o!nN#}A} zeWInIsk2?O?3oFGg3Z=IM@Pr!u(g)Tx-9wr{c}uAR%z$RNNIMqKG<%+qTXk{IM{e- zqJXD!C=e(pDGT%Scd0nf;Yj7_KKuLoC)wsTG|cz*hU}!GEei%qJZ?e$#h(9H%YZfo z)hktV-M#^CwJu4uS^huv;XfSp|NNLucv0#%2iA&~mX_Am)~T{Iqa%PeUWRBZqVU)( zZ%^02?t{gpN!)Kk4h;?lZL&+3EUR_ziOY3#@T-gm@U*aSki+n=U%!ZbF5|%AE3gy3 zI+%=B`ueLa9v2h!lX>E9M+>T@lK>2EMmbh?XAFe+Ja(CaI zBOCQ?b(Z6S#5uAmvMc&jZ+o<`7p>Yu&>$903T&*~{tWez4)?X)Qj^I9N)N&)f_2|DzsQ#rae@%I_^WORSFyqe7&Pt2Nbb?wGk`@^BHvVH7Ji;tP z!^6nO^wiXnGBQ}G1|Q;=Dh-4+HB<8PHUgm$b8~a;yNxWhH8mq3jryW+d0goV3*h15 zL53kV*woZiXz7r^`fxSHcf>t6Ch|eFvbwr^q@%CTbRd?LyDQIbh2$Hf-q_I4{b@HU z3wR($N5IZhclECwmefl%CV^<4p4yZ0LL!j@<;~!6+uqtT`1^pUe;+W08v$AN2CRXNEj!Z>7gyId*CQ=}%&^FKuMv}ylJ29kOSA~y(L1*y>GIfYk7qG13xalT zc{pxfSCw*hcDAvh@1f-4!sROF1o4hGwS>BI-VSX%$Fq$xLqcxuj~}#TiD0k2_9Ljvk~z)c4SFtit9%}= z&o=sj#X-B!mXT)SPt@BV63|)S*r2lNaIXEAPJ!pOe+5FP`A=XT7Lg-Ry?ZArCf21Q zBNOFIkJVvnXlN)U_1QNNYPYT8+qFfzdRi>*(Kl`n4-aHyWB_g;oQh?sJv#6vluHAi z+ZEIpG9Cs7Mo$|4G}BLPWf*!=d6d6uri4N%CxF7JU21!V;;aDZjNWhWD0LSj+TPSBr-$jFT?yD#XbrtE+%ii?YPcXvVj z@p8A5Wj9yuz!V)D8(a8B19W3XLBW;|{ijc#Du7B1B~Zg0;Qd^|=#D_^3J>F@8A)d9 z9v-$jTBrpRMCYfWsH$or9gG}!U}&VYdu&jktEJ zIfkFRPsvP1ebI!?X5&N&NQj8tGY??414Urp;zIHe;C4GkT4Q8noEwyvkmz^^0zzRO zos0qMe-wswsVXcCmf<}m8aq3C!|`8<$O5}#!QSM0l$w==Ix7eQIuEd_ry?NWg0qD| z#>-cDPe)H5vT<>K4x}n*pTq~~FrbQy^LkbU+c(AqeFotyP>oP$Hj~A0LS1rD12-EB!NhT$K$8E}!YZn&cYk{ry%}Vx z&KUWkjox?66rIb6r+`t76=5=*)Z5qh8STyc#33+0e$CCH67gzSlQ{s3 zS{TJ)5{7oJV15yk2851;#gGr5fWZ4|e-^YPs26wcGhM0=9~2mVgM_t5Sc3oQa=Hpy zPfzckHa+*p!okVnaxw+IK0#!y1%??u2Z}nZg*GK<=LeQQMem@x;b48e z%SjgQghYtqndg6F$v=SdUvU0ETlfF$-~Y39iMXYHewB?>QdTBc>$G^IR;I<~xH<4L z0b&3h1%(Nf!G?wgyS6(qvy=vq^bm46=I7)H$|Y;WE$+hQcv{*4O6qWdR zb2A|$qfDtpNJt257E-1$`8P3qu+lMn_XC}sr1bQ%?d9kXHyr6iWTRfN=;sp_@dszt z3pIP74*;0^JRH@7fp*-~R3K<*WQ1JqDVh2Wm&J7c&z}mN=DOTmuvdmX;pkvIfiw^; z6cFd|53)g;f_U;|cAEu2sm@8WxSaO-wzjr_e#&*4^%aUf3qHUcu@2jl1uu4c*BrSL z%E1!#%+6v*BeCcmxr@pfl|PN~oILKy`|Yo^x654(3`pE|0(=<-lXLLvyOXwuV|=?7 z(v#jx@3Zw@MMXs*^c(LR?XPQX&evSI2cQCuGBBy+RA>?bdNd$3csCeN zM5yceU>k%9@+>vEbSF*?BB7v&>3iOtn><F|Svetr z+kjz?vA9C~a@!B;a$nxm>-x~b#H7~y&QbDWS8OaSQX5EMkz7VscQ;78hXWk_oR1e9 z8yYlTYJdTWBKqjr9f}Ofj11)I6iiH$&cj4RM6Rx`;9LX<1W=wy-PoDzCsSNGkk_cY zgW0|^F+uXCsT`!6nN$0*>FIP9Z|qx~_rBKG*S7^@VPSQI0H;LH!gA3Sg6Q}Riu)`D z=!zyYFtba3=r&k*GSn%lsg|cgk#4advVxQBMN9+yK0-yp-q0{0*b9(^n}g@4V^@iSk)REi5clRIt{GWZZ>?h1a5NmzqMnH7)cP zN1|4qn&lY){YzHX*S8-;Vg{Z3vCk1T1ibO{=g%sd|64j~5b86w-R8QJIfEVom{6Ds5^KkQ zkM`~XI6%liGakW`=Nu%HrSth{^(072N&;~h0Isv~e;oQ>zM!4kE53gHx~8iJhY5C^ zjkF~QF79?1`rm{4zfSAFBl|x?-v8OA1pkMk9H*%MP%G2;DWscD4?-3`zUSX&SzEQ< zBI3y@zk}g1!57KR%L6n12}EkTbiZ)G+1Ct$7jP;pRo_daYHP0mZ5PR;7*+xyAdmwe zdz=EsED-ECB3}1X;EFv+i8W0}lGnDjYX6!D>2F=VfanLivQ@yT^-y-4#dM)2h!2fX z2sl50`up%%`L2% znt@@t&4%gh`TG~#X$_zMUGoNoV>59ECR;RCVYvq^E{qm}lR>1me#&IfOy8p!g* zzU<8hJb)T zy)0Fm#SAdg(FJdEa&ggXHz>fx_)7BdG=ugk*KKXe&o>6*Xt@7{66hqm^*l&Zws&_M z^g9EKi{HD;N8@AeEjF^`>-Zhb1qTNMTe#8lYj!q*ApN?hM`W7bh%!Jr)WiSFm(f^K zVdr4+m1iF$kSB%g;C%S<(P?|c{}mJ@6x7Q@`^%?mEP%0utmd&oIh8dvGp??nZ*Kum zh^`vi+M<}{>tV#n+S)QKW+g+mKb{Xu+}_>-4Ij_oQJ*iVsBrtoY~b~Oi3n%wmHn$6 z-?O+tX!5#H0+iHWhtY8SpRAD*8}vDuX|*Zb$_$O%HR_*762CcvTaPI%r8`SN0;wjM zt7f$+vLnb7(1!*>NZ(c&k|r=IDXrL%X?_0kWlNxu{~zV>cRzsr$o)2o9Q5ORjNv5u zZdEiv8U>m}C|Hyp6T<)QBjVOX;4HWn_UB4+U;!LX`*IptSs|WK62KOgi|ymlMc5Zv z&6OLPc#RZ_n###V1JlzZ`PVo^23J&6>Xn5P%Gm^rR^KrhWvhpp5Ex{Q7 z6dM~GFzYvFbs#-|3-RB~L(c5S-*d!r!DwQYPf1D1*qGw4(j3oH8jC&jl&)9)`(1znIq*_4kY123F*7r7%Gf-t$1QwB zrUJ)(IdE`r<{=gp;S@v;ABb^Kyd{`$9Mn@&JA$nHB|967>V0BUm}MZPVm*kA3p@ytM8l(^v}?@cG<5%>3W%|4YHOG3?XonI zkPRllArU%-iJ2KOm!t8oQcdszf>c3OLxVfmZ$m<*BjyPraNa%6_keT<0PbZv>$%4I zH?u+dUza2PZDC>IFV^(-ZpDCh1qeAjJRGN~q(oL>*#atKXh@0z_%0UL!+CJmaPP*y z+UIrB8s?ph$L+ieTDF+`uZObuy18jAS?xOrl&+<<6%6#qQIL~23jE89{|Bhk!EZ8y zx+!LJKV3a(fAIwdKszkqukpHxdW0F6p2k!hOJ%bH8T;QRxZM5r3LI4XT+OOWOH12< zOzvobEgAzHP>K=NyX^r2=q!qijQsXZ9Te`EF2HCJuJu+qeSLjpF%ouAXv&2$cgM{q zn==0fX|TdE9zB@o=#zjHrod$6_QIjlfC#vIycn;|%7>F@`RhY`@ z`*@4U*kXUY$i-!LzBvd+7H})~@F?U$a7=`v;Jpp$`pGxyQCaLZTHu#`z)>>S-~5Rl zgl@dH=<<<ICIOK9ra{K&30!mmXVjw!mV zj9wlx=k(MSygx97bgKCOQF56I3(Hk-!0eNfl7a+Fs(_h_$_$Jx+5gqaSx05Lc3WSR zmJ~#~TM>{D0qJf5MG-^=>5@iDx*G(M?ndd5ln|v6=@g_JrQxhcJZ|6j#COK`&)$1D zkPXj$-D}M?=Wkun67L|-%LQYy16m&sH}{vpLAvABib+ZVyOqY8QfL*DfkeeW3>&5=}> zuWM4KrYmsHZml3rGp}cD?QM_CG?y*sz+E8RS4loIz4Sn%fQ{`k)9jD#PfJVt#n=}3 z7uT@OBDdo_h$`h6^A{f)4i=6>)fqo?xfMTN4%pw{2Lm1B71(C>D?@i6;wCFCEd?Ff zBC!6Rju(0%)EL`pd=e5CT1rnlm1l7JKs17mMvv$Mjfo@&DnU>W%4mFa^be>^`g(d` zMIP<+E92op%kmN><27Zzdez#-M(gckQQ#@W#F_x&34frWVbG%vr#de$k49LAthA@r z$$p_{Ywoiol57dz;>vJ|?$f8B)OqzjhC!%UpjFl9;EQed!Hmc(sEi1Q74+V zH$=zYKKo3BjEwlG2a6^_Jc81|&!@f=CtzR5>=_2|42e)J3*Gj7H5h~d!IOYuDB?O;sJ*?t?L7y-0*hKExRD~YYd@HShGgaCKQMW47$!o_ zsg^5?co6q8ei=%y*dBggUUoYCX=d8m+8TG?1>7x-LM;qTOnB|S!n=z%2T6w~5#BB} z$FGRQkeLvtCE1x@j5!MuWz4Z#1*<^`_v!4KD<~y!zJykIx*uv0hkM z2)Y@@M@ZiM+~FOTB!L7tJUb@`KlCG(733T6Tlsogo$_k%xiobq=d#gXUwE;YmsC2FCW{vyj!0RzScV%K87}tN$~={^dRW*V)y~ z;(=E~Lqk{?9#}tM^hHMM>M0@-3OH;e%VqR+X@L_ZmSSjVD8hgH_A!t{X5K<+cS@Ow z91o$kgi;8!hD*c2F2#@qsP$T(DZ`ylDh~zpub&!23$Lsi|pX#DYczC$t|GMic2V>i4`7GvH{`QehI3k{Sb7(*Zs;ZPaUU zki=5`{LsrZDX6I2ptNL~wzjnVi%vXKsnX+rjBNiK(2K<1dNYO%XDL$&3kypqJuEk^ z0$-;QhuCyk=k(wTmY7fr4YAQ@D88MY*!ZJh>4!r&b~<^yX3rPUpQW4zh0wB}i~+*K z?qloyt-1XCd`Q7L=;-K}nC7#RTs=wvJCY*y;>*m>@96D)-sph>WA1N5S|5C-kZ{l% zfzoDYnHKA%v)t?HI=r^5SFeT>bLn>gS+Kh@Ot%HEIl_Z3(h*iaosCq6Nar@#3G<0g z=5S(A1uWqOIzEA~B(S3vZXIPsXjzdYqU&1d)9)Z<>(ZWsj21d_aGZh>{!P~d-9U+B4 zb&smA7e0?)6{KMF=oDj_v?BH0Kt1->HrKDt^^DMOu4@;IP9+;Z@KP@AnR ze~JAfZZu^bZU!R&x& zop?|3`fQ{`R{=d}Y+|CwnfUtk11&9bN=lsDA}Z$-xH5<^$yQ#9!t7I{*B$|_Fm8fq z#|UzRRC=3>9xsZ5FNu9j^8}_dIb-yt)8!Idt4Ror{N~*+fI#FnRDti+-5z;iof=`~ zB7?;VrW=*Fn7fDp)D%HzT3tqz9%ZSSZZCJ5D$+nw79J%6aIWSR}dBhJTxOh1BQsY!@m>* zX5U}E8!r~jre*YQP;`KQLH~Go=MHpu&Zo`jKvHIUwS9Tp+o6zk86lMXH$=FoFJ)w8 zMCe~$0(~Fg%&cN)m>nH``3B$?FhK$+A>kzfCXsO&zmU))3AQuC8O&|(C7HO(MYt6F z^w;*zTJ~Ld!_HcE_IP1qEePJTD6FHa>jcq|Ab1@an@&<5}*11 zqqj3@0OZOw=HlpB74iTkQ7Arb&COahc4m-OLE`$b$|{4!NnTFw5&=Q@0!SB$r%-@o zOo}S7{YOBC!>}2_AWt1>_yRss74{XAqaU@ zfO+17?O}OkRTVto0tgJ?-46^2k+OXGrzjwk;9(puVPYTbgRR)v-F@Y5L}Vn`4(D^S z3k&O(CqQA}$xKde`0(K$SrEpSBw&w|#2gYMc?%v$=L+}&*q=d$N!2CieoEaiI14u9MS*#3Nlp*m;qwq?0};Ni4z2uv9Ylb0^b2|U=?J7 zPNgNyEK&{NXTd$`b#<<1l0j5oQqsFiqZE{A@XBS8TaQ3hY%;IIm>MP?UZk4SLsU9j z(kt_>0{pE>Q2Q%5V?14>vbJ96- zvXl|ru3n#CSyuX8qc*i3D}a)==@BaE|6rH@r8fQ^PmOqh%^HNR_Uu#CVP2j2I6u7g zhvqVcg%*%LST6QuP&sdi+Sru7cg+?B_qzj>LVxCN@UP?Ac9u*{&jHY}yTA>ilaPq0 z98!KQ;D;Lk&X0K6v6UcGK*{$s8xVcz!1pLd0_EtkbwC< zB_-u)_|5zB%F0QgNun5OX~lp#8JYzr!5L(Is$^gf_Kx$}1h7DlGYuM@W8k-<7;-w; zF&e_c#r37_ioexAK2FgJG)$FIKVygyjKx{6-Py}aN<0tC-@PNbq32gl3BPL713EWw z(l-vF9Vsqm zj7`QH6&-zxpa1&(x74hxtTZ%FU`60#eqNqhK?e@Oy|+(3)SaF<^^@H|mopzOo=XcA^0)^>6`OYT7cg_$+R(y##;UCGKf$|* zM$@ak`Y}BtW14`5Fui4Lj69s6i4Md>FDIhroI8KZY@`(4wd=NW(4hN|&aCSu7jcee z#!F~8P$6cij>8|o>!T%6uM zDb$0MAdT)_nWa&Lc)*Ae{W*-i@cvSRk$FM5?r)1)g}Hg+&-;%99(a%*=6??Uy{EJ< zH+`@?R8ZyUKEd7uoIyf>WhB9Cb#rsOBZX6iXVI?QTG}rqQrQoL5>O@h!Tt)R9z@+q z!u3wO=Gkcp2^4MI;P)t7pI@X;gR( z#iD+r>Eg`u_(g1<7I^yw0E>Rn)e;0&39<^kE?TC+nO!okHAKLj4J zR?+Wt{z}X>diwO7AMX_fx!`!ixSe$4SwLg8)XL;81Zjd*_}u{gK=r!qz=?~CyJthk z06bsIC!@@;=;#5M@Wa=@UN$!`98zZ{6cQ3@9i)s-$ZtF{NOaVP1_QoGVQy}9*7e({ z{~qn5biG6*`+dy>d}bru&?FKNNYM-_3Mwk8`bE#5*dv@SV3)^v^9t6h8ACIdBCW$x zT1~K@2-gjPIfou1ReXc7YFqijfeMRB*+|C5N!geA>l$# zigXD1;+GsX7Z;Z&Pg4B6R47&fFcg!QV2m662QPG4fG_Fn?CiYflh{@l^gk+Sfy zdcFL{XE@fDeyq{PP^1qiOlqJBHAIVBL4|sY(Q?~n0q+tNyXIR#ets%>1AfR*emcX|G1>=#WlA~Vi$#vpd z3Bfi~3Y{Q~%tbjlaGgK|w}neGe~%CRXc#wHonHmL&ml-_P=aTT2=fO?ZkobJ32uBu2RF^>ugT_qQz8N#Huvl+1a=82#4I#UYYz5Bn z%#}~imq6UCcOwc~^4C9kL?t z!u2@;u}i5#kdfa}S4U3rCpLYX^Ay)^l{VL)D*Qair5dc(eh4cjcO|a0RaUM zD@YgV8K+e8!3}Y&HVXsW`qxl8b$AwH9PtIe{Rf%gGvcH9Ux^94H^y$;Dx-A{>2}J9 zxYz2xK_#q^{;OsD7r;cxut&GyRf-f480dKEBVCK9b4xvs7t^i_ldS3{KRzVvE;=*y z-)KVn6|#A}q;2y&2K`|N>MCY;k=y-9L?Dor;q5#B`q!U@#zE47m}|k`r^GDSTzA0R z;Qp*rD|ZmOhkArPJ~UNPE}H&fH#_cg-QF_QsETb~d+jFEmmk=S!=g;mrpClIRZnJ5 zdEYyU*yc^7OJSE!IGKrWQ_`JMde^_mNbqNP69ZN>n@-)4C$5lRNJ!!>>WfT?~^0GC`uZgzR8cH(9<{iWZY$J^le+e&b)6G)u(drtv@mpK9kawaCEc>_~qd3v4PBZ<5J6T^XUckf(CaU}FHCU!{-G|p)F&Z=t zPbFWzXmN3IrgGBtJ_OFz*||DVD?qCHtCv6nt~-<%>QHctzP9lXK)O4^kWg)Jw(!V% zeq;IruK9egMO5g>nwJW{ez$h?(|V1&13Xfe7T9W7CyHr->ttyG#JPKn;@Y+k4>oR< ztY6O!oqKwo#f7`KWAwo#lt&cl%*-)`Q8_vo>OqTzVFE$*4E{?iPkEa1m3{_fKgX0d zG5Wx1tL?dSSfp_D;Obx*S;j#Cbaa7S^>1$u83X8JysD0WJU_ap~9dZkt`fy0KB=V`huGMfE9{x<=Hc|!j-52)cmeBLu-rB+di3R>dB1xsu? z+*WM-s44BF#k7TO@mPH6)J3$n@g8e_)T4ccTKf>aj9WLHt}cGoAav_~b@KLMnVrYX z>U6-V0G*jojwXp!o2dQssPDMppG+>Zn``V^$7q?Y+YpyuIr5z~9KUZm%CgukoU3xv z*--c3TRY}wJ45wx^|TwOWg5>?q6+WHblXIqd^ikwkU&72Rkll{A-hhYdTJYYe*Dla zi|j7iYbl)maite#DuMCFeSJ5js&w%#tv=LnI@uOJC6GJk7W3+r-N}(-|J9MdNnqv|-k&9CHl~vf zabByM2Wp~UnKy>td9uK-h}U>eob zPm3R#d&l0D^GyW%cy`1|=GX!UMCBVfoRy;p})O+kHUg?c>K;#&DClhM5% z3c3*{iMD*1c($;kqzFL@(*DTfjL^Co`9M17J?s!(;!VTv%BpEUu?#g5EbgfF7%RGg zKLjO2lw%WiMJYp8MlI@j-qAk2=wO+R(+5)Xji6((5)wL=7up1o5MdK$Qivs56Wwe9 zXrrD>oWsgzs}ng zOC%%oB`}lM(s1p0Y$54PQRaKsGYBM!FZ*!@rPVDD;Yxdhj;wyji-naLS4iI=PrCF+eB zW}=dVbLTG0AnpZ_QJ5gZ6;J6Q$=S;6Qm3$kbu7fTp-tc3od!k^pP`}q1fiOU&qN}P zkI8Cz=-F&^UAWNLK$L3E7g==QvF~wR>*2o z>H9ctq?_5YvbFuttN7yUC%q2sud4SuJ3z)4>eOF9(ls+HK!9PE){1xA+uIS04DE&V zB-aC@)vw*35$ZCFbS5+M7S{otm?p|OC^UR3W0Oy=tAUXw&DOS>a&tn zw)EIZxl?Pcfa4TS%f;`s1th2PvMoiQC-K<^*tyz=1u}{?7staRA_v$R4xMBu?Ns*$ zBRn%^N9s0j2M_TSB<(C|1dDbsv2zb+cf4_8ur3i0+d4=VAR@wwnb?2xW%$n0+E!j7 zHBCi`)SYWLM3?l~t48JZ16n(3C>f1(kwWE)S^Bb-@@``LhB%<8=`S>c2`cgZ3WR^) z#){Iw!swe^Sl{%2YVm$k3aSoF)hG6O%_fxKIY&fXGJRC%JlxnQ>RRo+1co01oYmhw z17~^q+5xl?c?YRAulbl^I`ue%C~wSX=`>Fa^=>gDH5+qE(8D|14bm*6@Wa^m`-jL^j`vPn?b@BQ8pGRqX@q zDi#JbGwyFiYQnET6xG5#0%n^vy?8CsE)Tq<_iJw+WKNCZZ>dyjceRLpjBF=3erSen zPcJ)T;wbh}T=Ynn1+$yr$|mSL$7|lcpI|Q^jKM2w`7W?K=cK7SX9<@8;eaOI(~l*Yf<@tkLBjF!Ul$c ztlE}LZ(-*T2k;t*)3%ChVEVrz&&*2~%kX5v1`PM_wQRs=8|qDCA4{!kW(@EvCnDpC zKYo7;{Qv4|Mg|7JkElyvD;ojbUxgnzMjld#&tMhaP}9-&!!DvR(qS7Dz-u)d8*5#NmPeMv)cy2gy?w6FZMN)e2@L+ghqc8CEa&(+e}+G)9GK&qIW^{KMyDXKTLd58@K+JCmySWMb)ZY#H38aoa`*Of2N)txo=Hq;Ctvy=y z2DUpjMcd4O`?|KY<7UtEf19JEYh$s(N2kQygWuBIF_cb+*i@siK zxL;OL0ZB?>K6fQr%Q5ZC1`&szfF9lzvC0e6?u1WZ^Fup@A=TN+YAcFGEm@g4NYN3) z3~B8E7KH#^-YJxJ8HDH5J&u31HE;*^z@{yMMF@V~JE-FpeE0;&f5pxz4hj?FnKZ7~ z5siL207JvJMb83LlCy^o+>)|CX5Eb#H$QS?u8LVfnD;AQ?oGrcjptaZ$kT3_BJcUw zjJE?Jv{fZQ)tuwsM3{=ZVfok$Ja0usY~JT4BErpv$XoKHw`e8}t0ldu#+}12}$;RD~HgNFKuVP2NnP{k}pY)$kJ_+zlNrWA! z`&4oCsFZ60vxfHP>_!S!!1Df1b0jv_4+bHiw?L4)j=w|~Iz8HsdGuoOcz3c5&*R}KFh_IR z#Pfc=G;DC$#Ov5gF5pJ|(~TU){c^DS0P7`g@cThSlWKm(JDL+HoWL=MEvu0{(Mp1P;#*1nCS$Ytlkaw9!75EkV+g_xE;QUofIM7S*H9#-Nqe8 zMdxzA-nF)TMD$(h?n%CA7G`?a)b|R~um(M)iNmiCEf(q5Mi|7Vt1hZs+4tX0Iz2KV zBO?P-2Es}@<6&?>A(h~4YF<%rLmdnvNOFpkYTp)>kU?b%lFFU3+?jvb%H;EBEjENo zIE3Wtjo$9&=4J?wkCE4r#bok+fuD=KQwDS1Q<{;#b2c4mMzaK8TQvMWG2Mv&4BK{z zsor#nKS5;R=YtU2vA=w5!Mf6KEY#`PPd3nAp&OwJRnl?oO`Y!8*H_mk zr!w_^n0OM(5xAnvz1~B$=4uyXN!g(Ci9IflIsY1InKa~B)JV5iP}-LfyIv4so68uCJqF3x#&#NTN{n*+yioQJEzOTy)jlh*Lkl zr$MoaXgW{W<1@5=_pWVl{ruinj+UuOZlZ?~*# zo4(T4OMcb$ZmPRJP0xfz`+9VIzFYg3n>byw@|=@-xfb%6-aBph0ixPi_6qeFY(_?7 zg0J#UlBh_71s+^I;g;GY%hI{5^yyyFY04d!CpyRL1m0yC{-q4|CqCzL?|s zF>x#6u7FXan@q^p;0e34iG6~y5S?ly_QavMsW`#wWBcO*9W?r2MW%UK9$VEbZ35#e zH=j1_b!xKQ5Uq8MNG)z|og@I8wiqZ6> zx_|-mtaV`R*^HkWn6`7Obd%SsBjBN`8g{#Zzs><1xW4Zgn51yY%l5ZWChw)8k@6&g z-MijmDXiLkL5-2si+ILf-3i((`+dQzi=l_>QR0uiRN6m3;E!imJ9xl@=$-0xfv-6g4)xj5CQEJ1dq<6kGFB zwlcM>G+B9SX$k(d8JA`II!Q4K0yDdbWKNW26LEqU+fVwWC&%aR&z4AlvMe=&-Iaq~ z+)9_jdvc$Q!XZ9=9b=zkqOqpJ&9vJ@Y!%hpWM)94*B zqh;kjoh(_Ld%@@5u9kj{(jh1&&d=!ItyEc@%-zTvK@HCPp%B~WrFoo3?x=R8T(BQ$ z;(uDEHX+M=+5Sc_H+>R=)#S!1F-<74%ONUr^eWKf0LC&HWM*iGUGUJ~x2}k6wTXE} zVI1ET=&ND5o552bAh@w>BfY3|GlS~-_^^b+OJxa+u&V-}TX$}fY?(Oc)MlvL<+W>$ zxEYJL(T?yKlB2Q{$+M6O*kT7iTPwp@oGgmEVJ`DPGI;U!6?&@q)+t$YQm4}g%;WiH z3RG1NOeHx!al2Nfi^5>UmYPM$WhCiX(D(JbeeJ5ZVadwO)Evkc#;Z1s+G!8%%TG|P zl`A$r;lWq3YmW@n0%I)JH$Mng1yCgk~vHtb1`AM?_9T&`T1T9Q>oza zSCppe>;@(*FUHtW!Wt$}K4g4E&WWs@HJ=;`Gpx?>fk(MU26*(j?(;ObF_gERlbp<* zB_&&QPKqTB2D!SibXKYYv-|{&gHGRH0HR@pqkdme!P zvss$;?*^k|ah;WoZMw!3;dgU0ef`S&^XE?)!pq|v_h0HvFhBv5vF@uoPAK99D}b9u zP@Qt?eq)gu!PW*HWq5W$!Q3!4qL{OVs5;K0J%UwEETZhXPCs8n(e3mlUw~=Q_*CLe zhqW80{qyJ3{8YuP5>Ys` zVMO3cXxH3(*{r5qwQY)sI@5V7tui)mUgWo1`Kb2mYrax*af#Cju0a+lNQRhhjGmiM zT&uO3D+X8%1{+6S#PV?;X?kYnO#k3uk>o=__~6R5!H_&aDak1)6mtS2q<+Hv2_k%o z?@EjZa$(WPZKDxwy>@p33>=B?z!by&%9~}5UN2YzeosO{ar|cTC^H92J+JHAS)^$& z|7XLeW?J%_nwFJs5A^nzTue>k9dtwQhKq#mW)@JC{-(NDM}7bt-u3Y#GdsIQ9+y2A z*WK;wF%t0Emof?;ZHxmH5BC;sXz1KVo^B*Rh3L7fs|$1paCm^=s(!1^c41PCgWcKa z#Kh|{unPaGrlv+U=i1rg(oeV`?bp=?#$}}V(Z0UYXJA?U7R*j%%f^v!-hiYF{9fuU zJ;`^&#o0O0?ZlDT>lkrooF?EG&*TXB0W6!geEhz+7;zmHSd=49*p&H=K9+@ujpk(5 zvyCP4YB(1oto!>KA(Y28Wo5w`8PSbN9EL_ye!Huq2D-W>00Tl+)b!c+Z6}qvES&Bq=PzOok}rL@IeUH**0`C(t&b2Uc&D!B~#czrVHqMU@Vg@2Jar+ zbmQG&=u5%h+8YVdR19v{s&~x&#evnE5FmD#byQ=x*Bv$x_evfeIp4W86jOgGy;1G;bC51(#I zHC&4J>Q(up$GF6B@sf~$&BDt?w-6u1l@#$|`yiYHZ$RdcF43G9gMhuRMJYHxz$t^P zSg6TpsWt-^UAUmi!de6KO6=>`DILc=?wWYK_Peb~zUZ*GN|;f5E~3%XB}^A?WkIL0 z--G23ofnH4h}$DaF3cZ7NcpKoopk{4j#D=$`yf&pJRhVT9Lkn7WcJ4_ep8b9%}t+_ z?ypbX-Z^(q^Zuw_vE49UQ$#HCL>}^UDYQ*pS2S$ppLOfqHwIVjJ{Oy651Ng3%{|-} z^j#I~78hJOq?GJ+T;U6O_JFeO&fUBH-0b!=Os}39js6Ur|M}$G6M@n&`u2SvH)0>7 zxWolog_Y_;`F`$>`7YMUn%dASWYw+*ntP@{N!s4sd0D@5f^k4~$|j~gD0$~Tc?ZgF zx+>YZ7**p76SI!yc$GdRxaU03&NW^*H`ydWxrbJ6#)XbfIYoOVKf~uLOBoWi!#O;` nbASI6of7dUfB*BSSVKCKluGwb?*+J#^_;Avl0>1{V~_s;3wvgU literal 0 HcmV?d00001 diff --git a/website/docs/assets/nuke_fillingExtraAttributes.png b/website/docs/assets/nuke_fillingExtraAttributes.png new file mode 100644 index 0000000000000000000000000000000000000000..146c4d9db61284233c486a83708a8473969f7e44 GIT binary patch literal 30953 zcmd43bySsW*EfoVh$x^)my(hSO1Gr6l(f>_APovggMffYgLHR;5+V)K-MMI_>&)fz zy!q`j&iCipW9+fPz1F?feP8pM^H-Ptax!9f&LM;ecbY-l^q5`=oDOaze)1tRv-ShsW z*QOsaxo&a5(!f6_Q=(@(dym{^%I$3lH|w&ip~GR#a%W6Dg{H%(zDdzM{i-^C_PlzByREP5C^gM&^CCvM+kp{7D73XX*vtOS*IK z0oGEy59D!$V`FN~Vx`7ho)(kUvjzL?)!|(XtU3ZT;kh@DZ4l#n9@gGozURaYcT--n z@Ny$oVohe&t~gkTOQwzt`^wfwXW0ctWs=7Q*78|u28tPdzWZILXa-YQHiWlkjNtsX z4@n~PV$433;*FYZ-LMA*1-R~9`b7;mI$VcuWkpylm(6CQlBb`{;;HOTB`^`w;@*0N z)&0Dztc*5bMVVdXrrfxAPLYZZgD&&o?xD)m%2K~XwO0)H)F`rf_G0%Z(YYQU3-;;64xrABzU7nLg!U=b1^3Uk-4a6u+!jY7sia* zy~fc#R;rkq3@L+Sb<2|F)2$vyC+WOP57uB_-*YVIWo4CPaS-S*H!3LcgZ;eA!pKMS*&_ zP~9<}d_824Q9_ ztz_&~-OT;{va~Df^1JlqiV}a zP#8{R6t-m`XPP9V85_~4?3w@JP+gBN5x;RC+bsLsxapK`OT*M+*tCqUodK%0YZc?1 zG$F?I*xEk`;WZOuC+Yml{zH3O9InO6ZoINGch|Cd+&py!g%`B=Gt9RWGgJx`Q4-?$ z6K`f*hNv}=1uizbcr!{kU-3w4d3u#Lpz-8znuc``H9R81>Ful#>CuK4%EH+`GbIg` zyyFTMVoE!-&&Z^Fy?ViM!c!_JKm3~5+4|%TRnD-l+~DOw2G#eZ=t5OHp3~M?0^)`k zF-w2lzK>YbI0NBvIy!5&7mt)EQ0hHD<^bzN3D2`T@6C-xf99C+7njR@Ejeid$uf zs9ql$gNtOr_;AXnCm#`2I@{zr8x98>m+#|Rn0IrYrlt<4s|-_M4z;MR+U@R|TFS}2 z{w1v|wiEb1I<^0?Yl9`3W&Z25JWO(ldl8movX_C(LK!(k)=C~HTPvMGsP<{;e`bZH4uC)2D4)+KkqlqeADQdc@Q3ev0x#0Hy!d z0c9g&wcv~Vpla9gsIKcDe&Dve)_(HMX8OE=dYRcH8J1AX-T2nM-Yf;0-QBy}u{X(t zqx2OC559yR_NVcbiU0PvDfyvHi&*BCM7SJI+FKMl#cCzegiAi=F2b%26_556#K~L# zof(GA?|1{)a)|Z_%I%Y@&GEmIbii@w{7Ok)JQH&Me-?;UwSSpXd`Me?KXNP1|F9j!o=Z8k2&C9N5 zpY;AXzX)s}yiz$~AtgHRk$A_^xXm(gZ`|A;gn6i@V$Gb5HZf{IPn~I z8-qcK^3Ma!ny?S*2INXQK*0!0oOlT#Xs1*f5rNp5n`yCr+ChJBP1l$2He0;YD2CSO) zhj$OSO-rx%$%%^yr6p~Pp(MC2an>Jia&IWM4)|Au#SO&s^Mpw!PSwRYpPIuW2Ul!L z>zAnA`Y42hLmE(B_eesV|D0s|lKsk>{;j&R{#bFPGDnn;cL|8gy1j1L+n@D3@H$#A z>Df8G7Chu4(Qv!$JObf%-}jJpBc-0>u9clUDH1?-Q+`(*Y1w5V!*93;G7 zWUgQ@k$HsJ=s0fQI;>df_@T$fy1!X-ziaW6ETMsijvamU=AKLqf;1n#*|1|4x#S(&o>lyC|Ze_EsKqB za=BXZG-pqBln^%0(@Hxf1pLJ=`zNWxxz~;minlTAm0!G#>0~}>mY?B}+Hg9P zCXvo-h*B*{TspW_vD4fs_LLx)n1SJ2`Vg7*TX%B&nWj+wbld1|9=l6TwRdNi=enG+ z6=wR|i`N{N+FB{v9h5motIfG%C<+!&a{6QJ4&CG{v`;TkO+1`?hWTlTF>c5s;+J#C zdhpci^?JX5w0HlrN?mQY+I%_1@zEt?;zxx`z2O1tUdoWV+-v+Ik6Gb^zI16yDu=~- zZkp8L$LmLHrWeQE4dN1Or1Cv2=O??{*A7jWiLbC(j0x)uGj$C;P*m?_Jc-FMf7^jA z(Rk8d#K7+$<-&!9@w;A=dt=AHe7kX(_}UM0vA_UA&1K>CJ2$*&i#z8U7JKtdRqhgV zd|%+scqDPJpy32PD=UlPLj7)qY{G}udBG5y_CmhL4~uHX%3L&uTi-l+(~F2X$hmE_ zeDYP!>1I>BOc+O{o?VN4?jInRLxdT|G1VA`ihwj`xNi@s(_M*nzmr1zJLgN zmU!};rupQ&`*-C;&{5SChmZ3sGtpwtoB2W?EH2jWjG1O;O4ZxaZHy(J=_@Si>#t9b z|Jl#O`?Dc-dUS*vy}IvMxiMsYHk}%3GxPllwRF|}&;kqLF*BB(pQZ)jU%$3bWF*e! zMbgKT2nQTl>zkW10xhULRORcDt@d=+d{Y036V-lgA+WP$`;i-qRRPd0JK6YVw8;sz8;e08??@boKaJJ^qt<8 zCo)MtzBN-YK$?^N(#ihQFn{#X-XBFav*Rz1GLFJ($m8g7;_oVcT8Pq?%wb>YBa-Ps zYv5(tEd8vIn4*xme|qe3`TC`^t`E-586=u$|1cl*CyxBzD<} z1mU>6jLTcDyl_;P?c*_K=25tet?DG$%Cf#)gpv`~OMneA{T z=h8j)I4o~l|30(q8Xn2;<_6J=$80X^4C-conpIv=6>ZUKOt-dmt@3WS?ci`~IJ$*J zFI@K$?5sv9xUBI`)#&^)UhrL`ES@`?HqPs*`#8FqoMNNWqRCrt*FFz#=rqL)EHCyt*)0!Dlw=^$gprD|5lrT@m zjwdD~BU6G9>koInF37uTLc*Au%>jpI`Z*u~7&{@Ak2)v_*~8tk6qINqMDx z?=|9QMp!p)+`zzp;yE@p=BH0UKyY+^AxbukCu7jsDzB>QB((EPC6pX(8MnnCG+sY2 zIL`$KBklk_ziNqa%Hz@v^)B(R5EAZ{fvo!tM=i`8b}Lv)N=i65I99`&@4~{uer(S+ zRjs51j?}y>h!Re4-Id@wU1wEPqN1Vkee>iOv&XSye$6J=gavy`ON%^$gx{U#@?^Nd zFEq3jv2U|J@)YG~5W$PBsk+t48g>;eC#qYwZv9yslys@JU6#>uoV@{;;^if&nUoO} z6ci?ApRl@y!aiwte^|?f5&1>%@N}OM#;{wGlDTfb$AxEf7crD)D!0`g&n@xt7(k(19 z?S9G#4{F7Z>mvq1V-%zM%1~bH5S@CN$pb1XD%eXbtgI5UvNY`McoN|#RyC7q(QN$u ziO)@ku!x8xm6dVFD=ftdhQ$Vk%S=^(=5fE0 zi|Od-2nS4yUVnM=_G%CRB@aB==9ZSHrb7(H#lY2_-L zn%45b{aK5K>(b)PAqpA{dXoe(h&euumKd_SpE;U4si_fqq23XS(hm})h>VPccXb-r zdKEitO$EzjG%fGFWsBVb)D}J;siM`k)D?TnM`(St#P`;H>Nh2$gm$xe$~jSyF{h2UxA%2w*@Vs?UN_-;VFsj>W||3N$%L|o6JIGe8*QWr8Xk;DKTa}4rojg^x8f>*RXDR zARg{5lcBrPo9uct>&r-rl?9@WLCNzfnaT%`CGHh7%paaogqv5k5Cl_7eE$6TQ+zy0 z*b!79HTB1g1m}Qn-zedO@VOlrN(_4}tTq*~yoYRmhU-d5+%g@?HSR?Z@b^#7&JHLp zW?6UOZu)rVh5Tpgn{U==lIHifrW1evenslC{?xoKf!Aes#tXC9{oENIH)*KvmoNC{ zvGZgV-xB`(u|sa4x%qk6+OuiVTA+8#?>qa4hSK0;*zC06iJ2)hB%GbOa@nsndtSNr ztEXppuCu2H9w=_Xu%wn2sgsivtSpv9_@nyk*9DC<4H-DEy22ZeTiqb))#-X65Q*_@+E$v=Lzkip3X>Xc+ z%_M9-cr+Fj>ZSy3#Zu&0z2WN6?uv?v#KVO}L~g*w=x@(#VCY=5NghTJX^^QZDfzVM z2xj!d&MC7nIdauIZ-qL>-WLv}aP5Y2kfOei>U9OBVLB9GoIDPW$dCCQBYhHW%P08F z^E*k|blKTZjmkNZ0^|bH(!t6VKi(MqIm;kN%ZS!{R$EzZ9yz=T45WU>w$Z6BdWPs^`3*CmkhI+teQ)*HdQ5@a&CXGb6Vuoh%-&nE_sDVKi0 z@(~XbGKJ}Avkf=-4z{ z?46AMTLOZDS0tktI9H2(6l@nGRf_EqLkgsF9GZ)9Ov`FK%i*xgZ{8skhL>GK#dx#& zJ1vjxb0Q2Fm#wK*`%%1q)O>I9i?*(= zG|d`^jAw15t8~kDoWpaR(Q#o59D2X7rrb}>^Xm@OV9&#Nv(#MVB8i}ue)RC+dpTNR zsK!7%W{W>go}WMCSk&%_LV+dA`N0qZo>E0xGcA|n5~)JT3{D1OkSMW^V^ z6a38l{Gp|c1XjblnEYpjHFBC!WK1BuwolH5)Y z>{fc;2NSWIv^8Woj#V4A6}cR42H`&ys%c(s!sP#!l;jDW2{TtdkQb zipSYJ*)gD$)sZ5KjqwV@y``?RW&X>zN87WYAZoswgpKyLCE$VG`bZeGD-6_Y+1a~o zG>KNA)QnYF_B8w7SFN^t47(45gzGa5;yswc!i*V3RlmeSNu^suNW%09GWy$KVKb!1 z*h~g!xg6HTX>l4CZHjbS0z?yU!n{=X?s-C|7&SuS)P1p0`CxFN{nsmz4DC+NI$R^6r;-tXyb3L4Vw?0wD zsw3La-u`k9W^e1h`g@BCZXLE4v0XpmMq{|&G5PrT=ouK8F!Gd)P8h|_j@%{V>s%Ym zxrORAh7r^?TI<9r%d>O6Bbr$(obuycl6HVow2vS6-7|txr&rGXZpIc7AD>6{eQD0P zBV_t!$!MoE`(!i!#SUh<`8cX)n_i0PaDEU0>qk6o*D%Zas!i78s;FQ20wwZ7^A-zCQDZT8XT!<ZU^(~ zXbFvIYFKVAgDW2yZs3iE=^j69iWI)-3|TT37M7Hs&X0BmmK|!4w+$=zL#N27XLw-X z%{?Ae4;daJ_L3rfA8Wy`yjV<*<(Ew{HVn=Gp#!4W53naesQmSOIKB_+w7T zZE}a2c&J?@w(-Pmsrrjb_>xzZc}X#A95#M0Es?UB;rH;JGN5`1j!%yAcO%_!eP99K4NN~IJC1OG!yEs3U zkdZNfhrpm(> zsvo`c<0hGrWq4tfR_@y2DLkQH#l(G!77}r@n`izf3lXxF0BN(bvm0lvnSrb0p*z8&@Wtwt*|drtC9nHe)mGXxdBmUB<`c-`a63eP0DLS4v7sIA;GQ zePa(9w?okS`nn%8d&9IWpge<;7%)BM6%@RN5mcZSszyWL4ve|}4jmYELs&^;BR zzjTE`cJVnXYBzTJ?Bh)`*H6Jst+o@Q%*iVb6r=$H3-M&QnV(Gg)_V8Cb?7 z<`Bq`iN}BP1_h>DY?mm&XNMZ-_1lg-j4#8B8Tt#f`Cq<#39u8cP^-nfv$KPu0Vu31 zDl9P(ZYo@Pnkri3UQuoqN0Ur?i?W7BQ2(kPl!Dnr<=-2Wl9j!gq}v+u?c29c(b2=} z)}{zWrQoB(`B47faO1BO6y%j2NQ6VPJ3E>Wwc8kr%-5_*$)=0ea(;Z**X!$7LPk#F z%S*Xv;ySH@sAnE$J7Rz-tIl>ipysGKIb(nl{mxNghVD%v8kAI8`q^o3>8XA@jyboI z5=Fc5dLs0l?Ch7O#~B$Jsly1s`vCjwxJZC>nT-@)QH|nsSjWc=q*~j9{wMu&EZp(y z3GDmhxj<%MM2u<;jK+gex|OFHfGJ;sl>9GD`BF*><1ZGBE@ZQuk}@`ajAWHZ)0gMd zs6FFmY`Dx?onM~n`UM9MR?h_DG74q%7g^7X0hfM7A)NgWzdK$s#mFj zTKrB@Ck3qVR2P*kHW;Riyt4AgNejzC$L{iZe!J(-pP%g|Ug}L$T92J>o*zyx!|tnv zsWxFhtofzO0EoBa`Tj6;BkbkTlrmxH3GJ{>FjZ?WP7n;*^>IN#7-MCozA_o9-@bXX zPdR#(mOg_e1d?`Fs~{SPlDGBz&mz0kS2!ZZax$e?J^`v@sz1^(===hES5Za9w7}NY zl?V9F^))^s=wK;uoj|5)Kj5*+KlThts`_4c8Q2ESacWE4@}C^cpG15e&hD`tqqnNk zSlK^8y>=JYZLDqLq|4gt2aJ?QSZKm-q-)iDq@iM`Y}p?Efy0PFt(k2<%hyL=e>{no z@omxR8v@2R=J%$OijeeER&FjOPz+!d7k4YT^i@Tp^lP;W%B2;o%NN2E-#ihwx3_O5 z$NmoO$(n;WSFP+Nk>C4j`E7{^>bFnuVIQc`Q@R${0S?Iqc673jz_X8Ooll0kgPuSs zVYfL!5ZLko;O(rHQ7f{Yjy(k|LFRVyr*G>1>ERYQn_8gGtEv*nWZahs7nhOw3H-&1 zqm-$t&*+pRnIe z5Tj;nTmX?)_GWdl8Hcu?no00vG#yTR6zunQ>xcJEoV3YvxB-7}xIEvouF~jC&8w4R)Eo_ zUdwyCyU1bEBP4A2bE?i|T*}7mlqRm5b@ocm&p28Cr@49YE56kV63h)}EHD%AM71~t6o*nN!2G;`$5BUn$(~XKd29#!uw#(gVNl5}V>5>`! z@#W*s8UMvV%s&sU+UYza9PixM{m_eBYGM-vC==or(9gV}IDnx%c<^9hX(>D=rVYL< z=s*h#3x#i+P@&FcAvH7tR2!6{>*Q2aK5!%05pBY>I67TlpM&~?nHeib$Ic!NJ^nY? z!fa+E-{HqcI)!R^9K?5Fz5s6pI2euu+e@*QCLJZk#g1n?tvkoAG$fMG*kT!V3g7H5 zv;odaqD6<|WYDa}sTe0IV!n3sZnGW>vh@H2M1s6el>RV7{O9I$P|{@+yMZSVThH!2 zAJ%vQ1CMaBV6WxFjoYsq+<8H$Lo$F+z7szHrKqWWzkrzi08NWxh_}9lx`Ty%+LaZE zC=`Dn&@a(oLZ3b(VI`%Ze9aDkKC!vCXBuxaQ?jz&SBz3C3S zoPt7XIECQZ=K6_u2B0ESj9CG%{Rnb@q4mC-LQE^-iu_|ebb zMn@NylA;9tCmP%roI;%zbl7~OqoZoSV>qlI-Ns{reetETlEa|utBA64lmpQ*2q{1W zka7j69A?w?Bz0P3*kaT5ZqUq;NN?0IF)Tj5Yj?3DjM5)QX0dMTU6|{o#xk;OQ74VI zdLY2JN79ehxv=Lb=OMc?v_b;|YG5R=)F!PhvsRxXBU@nBHPD?R{rK=vPgZgD^$lK-QmCVJ%d? zdE?%{^(7XwktYdm2dWsTUQq%v)KT*hNYX*ez%W3n&VY-Q)|R5QfQA*1`gkE<_4ce1&m+aqRIy_;KAmk3!SgH(R*d-5V!!6V2_N9_zfxbB=A0_ zqx(&b?^|taGkZO|f3>t(?bRzG2L}h?LnvBCB!gkB^1g3fnSXNYWjLjdjt*=!B{Tv%2rjEDKWWwD`}quK|hG2SFS@n^FS6`F2Ia`G)rz z`sL)8lcIsknyQlorzHZQCtWx2G`?rbr+(r0xNx#>Uyw2#lyrj69I3Xa2hM=#K;uk{w`ioJUE>Q_exl8D%?4fum-143}&sM)u)w6_)G6y@gG`amK4gM(Wv zDoh#Mp`XOb(A=k%dIKf+tJrl+#~2Vl2B4D9k&$o9)Cho&1*AMX!Wx{8oCg4yz#85DkW;Mza zqh500`8$q0IJmgsU%q6zB@c(DW}P%5jJ;dt5k%lCuH_(^@6eq<}&YwUu9C6wR%zkfZ%dMbKrqR52J%34Q~GT0q|y zsr1Y|l6aX6s!#KSvOxmP8=q3BZi4o*<#Fk*;gn#Bc-gubvPAV~Rh^%3-eYVn z7bzu{ZH$qE|M}br^LzOh{N!Wgn-`mdX|98|sfw1mu4%5cD zG_nU-(DR!ioxuso~6qQX(Zi z{o_7jcn@TaGLt%4{_|%)-h2W|4)9~L+8(B2of0BQnf*K9U`unRg>vvDz3vdPJ>wQJ zE45FAhGA|~RHFam!wsI3l~l$6Z7elj&JdVSpjhmT=!SyJC=p7^i)2@T-6m_^RhUfv zOU?bHRp*R+#m+{>^xVF)dRg~8VzV#xCQ_b2nr0ggiRHWcs(kZgyTG8PKrqCOpXYX; z^tp}qN=T|zw9G|cnbS70-6M|O=5=T{46;c-h0=I4nPjECK&uqx-xrF}QPjuyJk80KF3=VUXOAZ4GX-5PtN-huL}8 z_gcYlAm}*l^abAl1=b!f#g6TQ5{6WkUHPUZTJM3%lLm4Pux}VAgM)*=iYM0jo-2ER zBnwvo@gfXn2;argylBU7Fd{*i1Ir7tLRaA_$>I2{_GhHhq67b(D<8L@ zc=+%NbQ*ekT@b}9CpR+@GKqZgpyYf3tO1A(=|=^m%-B@Du6N~xNy+Fg9b5e7$5o`B zDHZl+ zjrrQoHTyFF+J#;YTIiSOrnmr_#;zY8A0OwcmV6u;c?~=rP&v1A9p9r6U@icMA3uIP zThhZv0XE%YM>K`RJ&2p>g9Hqt*XZyD&*dV z)*r-e84C-G0edB-mJJ8w;-^c+Gy>=V-^>v79i$grWxH&W*BMotE)C4p^-rG|n5jFx zFEB+Y#k_uehtiaR?uT@htz#OGGn67yDJ3$q0~k7f*)$a;o=lH$JFNHe5d(rx0eO9_ zCpcXuJ|Kk_*b59V2-(|Ip^N}Lpe#V^OCqoJy_39994W z!T>=1!a?2LfjpVc6a3NzHFd|Abv}_C{8K0#`qIG7h&1?)QL zQ`^Bf?!nwP?Jh2=DMO13ZjW^7b^cvrYE+Z(hB! z(S$G4JX;M0ve?_V59OjWb^jR#z0Mm_2v|JYvzo0gsGLLJtXd|8SeEDbhG_UVgTu#a z=d;Y=V3Tbq?{!7)4=Binef}&A8XQ13eYk7J?1n5|!l*A_e!*mE>+Vj6Jqd~n*!JoJ zB1%dRMC2c>VS+r*Czf+5Jm8`2gFxp$1g6tlVX0kiF-cgIKu%ns#Jrf5nfV45egY&$ zc*IGYFP?*0VPk7c!AA@L9|cK&KylvP-Q}>FK>;g9=ey?>m;@jh7#kapY#jKR$8!C! z{7)2=7y5sqAkO+t&{%iD!+8Q0DzpOFOkGj;^Da&rPsbHiLBobE3!m`1%p`K?B~PU8 z8VIJ>3Gd&30rvq!4n(<+cZfcw(5l_bYp`&GciU}E#RK0$L29FYsbWM@;e%>kY|wtv z3JX6$3&l(A8=(LCd%k6O8kbQey?-LwX!^Ksgw2D?nIM zb8|5OMbEVa60jOzfC)<*AOw2?`V-fpY9|PJe;o@zD5cGlYDGYNb~E_T{b8icJJiI2 z@pBS7tf;D4Z8saGZgFJ|mHgk*A6x9kw?kdPgJ8s9W1y;5T0Ok$`=W-Yb=6L^bMckO zU+eBk1>Y#qm8uxq#(7+AT>Q5RY@s~QewQos^ap$# zB5>AX&2n9a304#^EaMu>?1b&YFKm!(dmQk)!h@Dr_r^^??v3rjW^6H%qE_%yCVs}7 zHoxPaa^XummTL#P<@Dvn;lF_5kkRm(o=(=*QMJBmh+kfn4dLH!mYusz{^}Wxi&SVR zz1nb9kv$?BlwBecJ>zV5K@Nq#C@H1PPc1Mz8!#q?L^#`5=W-qYC9~n_C{Vq|AE|}V z%t}#TlL-G>dvj(~M#`|pT@dTv_~_oh@R6HWho`r#g0ytkV{LsK>auw88xPSzActy& zfBp(kj3k{rDt<`t5>6>nM+XUy$Q9+=5Knm+o{Efhfq3+P{34p;!&XpS{7TBc7uDMX zF|E9W_4fn+0UMbwi9Y(b{$#ZKZnv@OMZvIwmX>>YlhIQoAYyhckQC&I2itqBHmWm~ z5sl_$(0~8wW5FYzq5QMd+Nhk;D@dB>#gd9(Ank1f{ zDza5V#wKyhB?D}c39F_%z$%aqiD-uc&JD0$EKim>3{h;~NHx?U7>_Os@+e@PFd> zZ-Xl%(Y+m{qtwU>h{xmu{eeFpOnU!Yu}YdGXJJ7Pgru~MEs)bk8$9^A;zPzQYLJ}I zsKmS;+zmz->>fcwD$X%8wtt-Tkn#8jp+=`-94VO~rM<#JIhVJ|n?yGM2zG?`d@;!k z_SXg>1!2b(|7GCZa4v*jeuAeu*8zM&WBwUT)Owq__B+JR;7DK& zVQJ@P?Vb#TCy=f>MBZGXrHvK;Y!CMLp9PD20q{2{izx7rr&LhA-h-JDpOW$jyzt=^ z4&dTPhYgo^JO@qm;hTYUDEI1BMo^)Co)0YU?!5Wh3udJW*o zKRLIzJ>j+~I&5M+`n<(q82m6}o8NIJI%p7k}Ue``MeSya}LOcOih29paO zAE6 znH4jL+1&@8^=~-D0=eZS<@~%nbA<|HJQ*qQiz-jna**>6fH3$5(byECMjeO-?XO%n zHxEybA*4{bUfgj6PG2lz#pC}W?7YWqkylqI+TobkGnP6(tVyB0GsF+V0eB|M(G6$% zfgR=yq7vcA(AS2gM_~0Nat15B)SdG9&FWz|}4kkrERa(|u6J98YD zU!NWl^E!V9Dg#PYCWNB^1LO?0!)`9c zhX<<+UM$p`@iICBGxJ}HjNhmk>P$)?lSwI$Y7|g_P=|}WEE`a8S0Hb}>-?v;Jnxcw z3ryNqP+xV2Q-OU(Jjoo5*6;qC!ej2ygo;UuApp)8@*QeL`+3?83ECIC(PIVvU+F5c z@R8T+O-8CM{kSyeU(QooKKfB{1f^0RNFJSx(%A$K}9G%j4XB zv*FS`FK`z&9Eobw@vJY||H#ahaSCZL;X!m`=3T=n35+$M-lmJylNSHwUW&-$1Ox;i zlRvRvcceJ1!O*%v&deOTyKA#qcc=y8ffZN$81W2KbRaMsTPQ#U$c^qy-Kqi116C)n z2f)YReOQ)Hz!GS=Y&?e^?TlD&Ml}*~vVnG2kcMz1+OnKRqEEHO^f%ik?)(21`P-lES)uolb zRHS7IZd?pBEjR%YP6;SMxxKwz?LsI)_nUX~rpfT81WG2KENf08G352Kv$6BofN?`Y zul@>%Ns#x*Tzc&F;OguOu;Z9mQ<$O@8Z>qH7k!XrDZQ^ z?*VFHq&kWH(EG9L%hP!?-|Z4Ace+RarFZ6a?;b&r8PN!J3M;eO$)6J z6i6D^J?z;|QD4XSarGhg zu@B{Nx~1J4$TIpRG&Ckg@1}_$aYv~Dx#y$>|M*)pMrdk4z2V2wFMln*`kFHGI2j~< z(KNyrks0kUhp>sUc~QC=TPg9uvB>R2=xMD)u1aZn1b`wrRP6o`X? ziu>#fcO}($q9o7)C@?rPofrvQ-C=V=!rlD>c>`mOr++jYhEVmu#BBv1K_2HubVmy~ zB##*xZ~TMFbVuM{#0>cN-&`Au!mC%6;Cz%lW*N>^P0fplh-iXoE&k3Va4_xBUm;qf=n=Y@=k0#$fzM76772kbBzR zZo(8ZyZn9))5v3aX^F*ZMgW8Z*n?>RWd0zAF(twsSJR?t!7%OPTU=WD4LNOOo)(zY zNSP@fsJoC51@|i?g%;^ys9V-W{tunTuI}^lXE0|#IM16@v88yGUA zK)$7?18YF~LY{2w&I%<*K88PXnR(vUA>~!2B#TECx0xyhh-i@}HyBudwx-E)1_S=) z@PNRky1fqu-5+4pd-!0OAdr?y26792!xc}kER7T2|2%lA(f$6~O|SqJ6&0dzFL}o3jxqq)WY>s&#^<&gyz(g~US>Rko1oQOZ%42XEpDKDciY5a9z# z3-enljw1}PC^TGfu+k4UjM!rT(Kb8yK>DA;49L;=!J>@m;700{jE+><(tr~QSA^U+ ziFJ(lIgEVE9&qn|!bpaUFdB?;81=}V@BbSaG-h(l9yTM ziro+^Lrz7f`CxXq$F5BU0FaQf2HiahG`Dig>F)m~`^YIp0yOzw>X5l}Jq`7INsePzD#52b(+7dm6m9zOKO2*O3Xiuy3+ zj*^L|&Y*I^n;d(m@q8Oo(-s?qC#^UQb0cHRLB3YBin{vF9Lg9|jbf<@#&!4$CEkC! z?(N+ets-optmnT!|mIDZp?J|Q7@QQ!p-Pl{+8$dE=Vz2LeEkbeiS2B}aw)M%a#tk<0{ zv_(K-4B|rdNc;+6;`;LqOUz?@q}m0atf@%?YB96}{G7p;kOKg3Z0zE0xh%k2;ez4* z(NTZU(h&O})AH`@lY@0wiO8?X%*|Z@ODK9_gBgTG3@dTg#xkHKwcwf zWmUM1!OPD}Pe+H(>-?<5psUfM0&1}=L8a&Wy;aGr{tVfS{QQtedKDYMZaa|c_(YTF z8V;+S-y;&-djRtrCZUn75ER<*5O5MGR$viVzOb3AHP82!FxHJf7@M}8qybPul5uq8 zsHl`yTN?+B$+WIl()=PbE$v2Ni_puL0p+=d=sutf_4V}$0>@U!kQwAt6hhqjb8eQ= zPrxQjnt6vURytM&DjRGvt%8i497%3C)bTr|IJ7Qavd)X-_R8juk3-O42L=>3cpyhl zNkw%>tw;QwymQFq#nvV1fkY(`HKZ3`51AtHxs5F?mysYcI~#cxY7K!i=?gg3~xD00C()0!)bK1K9GUD?L z!IVb+jTjIaf!o+0P0z~x+=1z2T z1}LfuRtTIdi06J+Y&Oc0blRn+p&RdAmI}W4wNOdPFCaJlyF)H!!D5(dQLBs z1xWfcho{I`Akv6|WEWUSGBVwswjmF_NDAbL;KjMY*C8D(Woyd{z!&x*oNQSIfw8mLR1e7;6Ht^EwCHsK;lwl8wr#Hti%UIPaGHO!7wmU4aTI`{Kom&Its^Z*i;IAXrXwJ~4TmT;-ezR~Kx}&KcTnZH60SPINs^ zEU{g0@1F|!NqX5?dt*JidMo5nIswMV!kX*NzZPw(Q0qE$DE;H3(6Qf7T4dzqZRC>i zJ!0a62+WQoK1JkpG3Eh}j#TTstI0WaLMnbSk$r12n2s(BO?b!Teb4wri)C;2J$m& z@tZ2+(4_;M^5by6E1eIiz zoEmu8MtUHjBh5m{+RJHabpuu^WpI+oP>|FiV|kr&I;HR5(PNnC_7hB@=EM5Kx><{M zsvN_fK0OxX{9^3-1?T`u=1n3Bfn*9Haese5w1SlSdhp+|qpFxGD6S>;WN6j_g2gGQ zKgnkEErsNhJBWC;bIrG+7*tbuH6dDy`E{kn05K}j-BZ306RMm_Yuroq}h+(FA<&)C^v@-#MFAR0{SD!~Mf zV$y5}@Ag#VHzvsl1bV^KDSN|?9P{ye5_Zhql~z~Zdmi+F*EcmKBPxmtxeh2X@FALp z{GZD=sGUQ244I;WvxFbAvtjcffh3%@2z9@&U~gxKOar;Y`Ir8Q2`L4I2nVYkn!Y-$ zxyDsDdRe#;$es$EHqhh@5-O1 z7aZ_P)hbwql;IMb`O$-OVLZFPC_3TT#;G}6Gy>7QY3r!MD7@p`(3f@wh&yy z9P6HHz*HUvV2`TFAD%ow;mzS9@1Zj3FmC=WBV+FuMQ9;-QSPVX(>5K96%cu6|F#hr z5C9(U*cLL80P$k%`vKatd3kx*9X&lg8F0)J=9F;{_Ig2loyXQ=oKEL0NlD2L)e8G0 zXK$=!8N+wY1!b+R?=F`b3VWI)#l>fk8Y>9yoNx^|n!!Vg2}7vBeLn{onXFrqt-v#D6W;|nOmumSWg&W_io8xoP$E%;8JaJm_eR{Q{MsspLBAv1*{ zB0dL>2aM|FFbAs+#!QdbR2l;MR|j5+O+iu|=fMN(N&DfQG-fydCSOd`D@zbQSO{jH z5-1oxYc80B4V0!K7qnm2VjxfU=8>okQ7gwP5|>*2^?EXx_{=7ieFtHrl&nR*ef51muk zNe{@+ihrTOpn|{Edd+W>CVe zyXPd!_sbfJla2bUS6CP({QbPanQp~vuSO{D8GM_$K!Y=t@tox%T0x3kN@SLOhV2Hv zJ2^XJPXupDOLYg?<0oLQ#};9|ZXLDUfMYM%TT@9e{Dt)c6!PL=e? zJllz%B|@<`Tr}A-*cLd}nrrh@YD2X>#78yI(UmaU38vcm8@0;N?eOA@=4l;S?&oit z{wAhh`MG|b7Zv9iHwKJ4YM9J06r zUY`lJvt*X`s94}Lpjdz2%Nj}tB!mqRh5hEu;SwP1^0@l*kc^}mNT_wWEh1&-&!?3; z_{0>rof|%<|8vZenCb8eRXN@0qS8o0GW5ocDa&F_9|#e*zt=}y(IFrz18ifjxM+6R ze3+*DA?H{8{`vc+D@v}$xG9l)W`3WdGgexX<;@G`%)zKis^j@BkL3KT0Tn(rvn!JMkHo-qI;$6|sjCy^+VT%2F3X#4Lv8@%K&MYy?XrgjuJELytx zwuDo2mEpgj@j-GoM&t#Oks(8Ahw_u(WV2xdR^$D_!I=cv+vBgMI`?0THjMsnj!~md zdP7m--j0UUaL#4?Ix>l#4X*8()mjj(x8Ciwmnb&M^!N8af8oMiyKDWo-1NR~y4UY! z-ipKxuZl`OtLsM%B+WfRTFNb@GLeb7k2MaT7FT@ICP~P}wFg?WRu(gP*j74C5kbSo zJZzHN<0@T8%E-t_OZjSln3kB30Q1Y_=uwCTdT6T~r(?K#p;qmBJG=Y}eK)zCIi9{} z29h*hwyeRm)6*+G0bDAeF8DUF-qmrv%`D*qU(3{2T4Gmlyj02K69*e6uo*v`TNMgb`#vy`GRp+8H&DmMKq%F&ZGgbtC znL~yR8)lriVW_y$Ae=<1g=x-4Av5OM%ZDV9k+_A{2Y7ym-qb|$Z-7+yBnVr<#9c2B zT5(Km-S5Uj(QpUW?yOI@z$?f(0PJNFH`MTq^dcYoc3BvoO_%-0kMDkB{a5{kU|tUN zlNBc{kV|Ro3gkvn_5r1-7gum7JUkn|s!Mxu*7^ihug~r^H8l)#-?9?31;tXt>^N`E zoZg&^9Px{cj52VmPu}`|6ciB|0j!tzA3q)qw<8)Fa&yJ5dbP2+sKTJ*+0_@k5-KJ{ zJ%oZn4-jZ>^>8OIw}+1(ce5aHhxg=3Uf$^=o3smyidL6}Ezz_0`t#^#VI zoA+>B|44Cxd0fMx&_$;1jG=CKYg=4naYiu#&9bsmpz(u~UAw(X9!h|b|MaO-U0E1k zdEC9*6CJ073Neqo88s*(tO$|gQAo;7AA=x@HvB+LNvkZuOC}>c?sG>BkiJx()59~{ zBOtxp0S6~0FiVCUj5N6PI*LmS*n^PLh`1A7>P=xMqNcq6X$j=HD+gDwEE*3LSUNYZ^aVk3cV3||b>gPM>A{Zdf;z+X2L{lGgs24;b$X7P7 z$2L+2xbUV_IWt7VK9Tu4Mw^Owy_Lxuqhix)>Qe#_c_;yd6&A zVdm&Tj2IsOa_;OUJEInymDkaciz~=Po!=X&2zQ6*yahL3XLv3qW({CzRTpJ^iU}qvTRL|AzS79(Xi$SmfK-IJ2e!Y+`1ts2*PWpgJ8gRu zSNc9)%k$J<6F*^5agK{z4>yldk5d0UaZdI!?7(7HI}wv4RLhUlcHBR-Q2PO&iOy8CjCI!#~`gsMRnj7un7#FNn}S;--BA2IFzL?-P&*5 z_H$?XKQUk9KYyusw!*}OpJco^>DJp?v8ao%r@}zNKfK5E*mSv>>^4rc_)o`|ALP|u_W2#8 z@CkTKR8&$LJ)mdEV$Fpilcz(uesd^)Sn;7v+S;o_tZl$5J0!p2Cr1Z|b8&H%U`d9B zwOJt#804#JY6#jK=V=~m@3-0OaD2d~wB}96CAw-B54pL6Pfx15Zg|A36)S>Ut6w`L z#g}ujA9~n z%TCEej_b@Xc=2LJhdw8AOxKc$Rm<*VhK7V;>4sfsSlsSz>ZAc{>w^| zelL6Ur2oDdGjrRzXW;X(zH7v4rm#b{HNyS;(E853r%M%A>uy>;PDSPH&71DmgEu`f zwd-)xelsZOpBWy@6KNLLPgV9*8LfPD+?o3Ke*Ykj<|p@97+}ivedgALkzBrNdWOpP z`#zn~Jm&8D{{jf!tWWUU%3D0BGrBwbiZJ`5sbvdyMER}bI0D2RyFb?W6V{|CuKB1ZlR+gzrYYWi^?d4t$%lvhGOThN7@^7c;Dq@B z?hNeAOGxX3{e8r!2l2cO)}QNFgWH47s4fjRoBHw4;n`0hz3+pf?zIob%n0@rO$upT z%%)sqLtEQV%|684H)v(sLnE;e#Dc{><5zg-(P)Nh{Q&eD&(^Eq2aX?~4Obv!*>Lyw zy|lvvA@nv?hPoU)a)j8~-~q*kx_>n|hHxSYzmBkyiI5|#@X)8u0KlY^)uR}F+m-AY zK!v6LqIt?NtQTkI2gip@((;l$CpEAr%(dqHs*WOU%&(ORop1?Rs{^$L=YQrF5cYPk zfo4^!$W})lG;cK(u5Cg@>uqe71B#IqM{=uTCu6~6fL`z9>lApe)g71@h8_23j13mR zeDDmH6(}LS+Ns}IpY@7I-uQ?{ACXObcLL%nyfSy8PB)S1K`~gj`5MS($dE}E zLnRHD{ub>ub0l}F@a*7A?o!IihTya6U!F{pfc7wGjyL8q-3TJyAL+XI*AySlojx6kN!4+nDaz=7#>rod6}sfZZ- zrSkmQurhnE4&nbLx!(xm&sC1m6VylKUl}n zTxYNf81)~p?s{D8V*w6BfbP0ce`{xO)s(|lZNyw-X|9zvjk2)>o^tS=+KIkSwK@F33#PdJAK!< zQ|{BY3|Y5;tgNgqAF3_4&_>hYQ75AA{F7>JyLoYubK8Z+=50!((vy3#`tNE^%Rcl+_0@A+}umiFqi==4&E zZ5PdwgG6Z#@JE>a;M%86knVc>-yo$JBi6!Q-(dn$!FS|5gs&Ij`uV}?E$3C1h=sgm z{>R;GB_yQX#did>e3OC!|ojl|V!>QcO!s}*Ydd9sU&(8ll? zwGp7p4Gmf_wdm9nebA=59QGFsxy`s_yj53QLzJbe#58{Uh=D z1~GqQvguMgPDx2`X`Z&i6~EX;4Jl}1haRZCnbq{Aw1qzvaNdT2e+<(l!cI$M^ z>!vmX1A~*Xv9TpLgd_pPL&%)w5?4=cA)lpaURe+}bvC1k1Q#xP)rOpQY)^z3x%NMH zhg88{IQe27_aFYhjTFPE!j5`21($et%V6t^i<^*|wc68{FFluKWLGxzt(Fdp6ZX-! zukHn_+m?m8yzI5Y3b%wmV-!7?P#$R-Q(sk88vf;zLb6MPfM^zGz;UOJN6sM?GSr`b{e62HMNo1X>WHG^kvvmvxxAH+^b3))E93FGn(*KAS;TD z_1FHXT-_WQ?}p?C?R`Y()TjK!D-&0W*b}r2%nO}zgnUz2p0?S=(q~Pb1f3oNH11T? z#w;60@KHsATWk7~%J)&OTa}r3#x6c%z1^0KF8l8Hl`8`?%kNNZ*6S8wjeM&P!O)Nt!dCYJ}NTO@^%1Gu7akaQBbPu2y-3-q*BqBW2*0#Za{3*Qbm&#!}lFRyt zwbAcJfBESgC%6FEMKE}R!TM>E-6*1HNc8X7*)umw%eI8QzT?wJgZnOD-|o>DF386=2?rr)3spx83FEWB zP=PCKR(1^I7n9%+%^?gx-7lVvirOiNDu&4KQp^`ZMFAgnv=#zO9>Ors1@6tCKi{Mw zquilo#Y(1U(SgZ2R_x8@g57F8pUex-zJkB-#EBCh>s?a)88Q+QmIfEd?G^(aa~|Q_ zQgRvV!V3`Fjgy^i|8l!J$K=oJ7VX#THJ|JZt?y$_`N8dShjn**U^Kaqm(zKw-?bw8 zA9G!|MCh2J+M+6arKO0>tx_9CyTGdI$$Ryq)Z;3f+R#NYMcWz-X!nLZFPw=V5*(Xw z?b_*q2teE`g-$xlkH`Olu+ zXW&T~_mo8Js#kY>0@x=@mug@?5k6!N$}K78C*4fSU^zR3w#EtuYG>4dC!^ASO_cg! zcyMqqwU?~BwFX1<2@9zDXnBq*8xJ%Qr;Xf8DGw>uvlJzn-iMpcArGf0xDSBbr@3=6Q^rAt39X zNND9HQY42hJ+)zEvH0g@tpFGtQke znA^`uUHQZJEv5n9tAl7YV=&30$SwfNk87<=>p0oX$|auGavkH0LXuadju1hhCaisE zEW!bWA9e~L1Ah^&Bi-@OjMf;YB2^n zG(DcQTJ<cKS+XmhGn%O; zdw!aXw2dW5+>l~tDlGWRbs3KPyr(C4zN(wbW^%gV1!n}Go!RlC#2NDpcmLx~4aV0?}S@A@;@|aZQYmslrW0F%AD81fB>>s0YWCsLv|Q z2rE}Mi5E2o1ebu#C^m~$tkA6b+!}P~kS>K=tTN7#n1IZCdcF3EQ%Upyg}zI%|Bhi3 zszelPgb|SuxSUr{v`oz9CJnV%6>ehLCMIK&8Tuo&MKCm-c@R$4mlHgk)3_Ox@HrlTs^ zwFY2#M#}l^v$VdGUfjmU$s+~vke10}YVy6AeOIg7z&?C*8G&*=ZloTe3f4%}Gy?b0 zg#gJ-Y=a2=CRAH+prZW=T{*RST9HQR0&3nKGGGw17h^V1TDy)gE~(t@@C%q1^6lLM zWi<7f>|k6W6Qqn?s_rIMYU`Id@MSy~IV;h3O_{wQ5IgJbM==m$%jKJbM0p?}+{C0T zGmTF=wLLEmkDYgG2l2>+nH3}dL6EU@;s2Z3K)g;)P7=kskgW+PgN_~*F0trnX0BPB zudrF%tX7>s0{T<#Vw2kOcx|rs0p00Ua$i37h>nI=7T%cm>*OYH-1avuLs+i8K4o}W zKe&@jDB~27#|&|WLJhXGh^dxqU-|y88buz1 zb#vW57ea56CAUgR``xwmnvBzzI+RDkvA?9WZLjgY)iL>5t^BIbzvTxUlV4_Nn8raM zT$Yr*B^NKiCfRS7{P>ODJC;{1PQ842AjJd_(`ZbaJW@upc?=+ zoKsl%5O#MfQWOQ5mR#9g4)L`L4xu?z&w2nps z5KvA9zB;lg7DlTzLd&?ga{bB9y`!9t;)NWet!n_Y2s5)#^{KMf$>e7Vbm`HcjN^5>6=HE0a=wk z=SyrX&suFcXh*M#tWf_ni&B15q5EnOA+I`Q>efM&1hGeH^&4VlW(LuJh>vh_Nko0% z!JuE2k7+A^zs=&T<}XLYt=jj3n~Kf4bx)KxBA0R|{GEZz^}Wn)WS#pSsr#y`Q9KzP z&Cl3M4_nt6RxZhj!i68c*)6de{pBPsHC%-KrMDFo6@fSkJps&>*+|QpPe2Ktsb4O( zF8}1kl@lPN*&1@KpxUW$$vo~=(NqzS5bLhLt&Ojxx;-Mz&%#hv@~sFUp5S1(ug+IJKJ5%Drvy^Bhd zmwaYh_gR}Z-TQsx;oULQ75Ds{-RFlgRW(bYgryt&ELgOiX&Yx}4>X@+s%$i`*k_=o z(vLrX$6|(~bnYJ}o%9vcL`?sNX-xP3bjqh{{(r?Z|3TgRPd2gt!;c$2UQwKv=aXBF z@H+GAuz`0O5Z5OlNXXw(uG}wu-=Z?R4GR+TEW!d^1aKZtF3w3G!Ft0o0dDIId*RS; z_h+f{Pa1NDGRyGMYMj?gh2h05CfVA%eE6Wwp%2>}wId51yP}+S-I-=>ByV}xT<7Ch z#mfCv*^9Em9hEiLkNzU3asFQ0K}Q3r9)E*e*@gjI_|>oADgXX2@5cW~`3%|?EE8#; zAt|3Zyto+A$%drVzj8PqqRaQ6(OO6|6qYuu5U@UN89JZ0mo%iFvlUF8FaXXrGAhuE znG`frYqI&AAM{qyKaE|R(Ebb?8=EC@L3*loFD5TpT8{CV5z61>+GHJUA&8mp$;C6? ztmI@DW&qYUa=;IIN$2X?P$gM&)0dV%)gATJacUoK8g|Bekw$IsXmAtF?rcm6t{r?A zxPPf&z;aebJI&CIZNlxs^@#N!LaC8)|5a0)qN1V&MQ3c5I`tr;cHbjnu^+o|fStt; zK!BrP<;WyOWgkaH?oIK91){$KX&fm4GX-MftFI|Bcf2YJqepimEM{w4jyL9ZowH=) z)$q;$dilb6O^m>UZd~ov% zd{Kgm`~x;df*?C0&wMiYqQe%L%X!{J)ax>pmUvL&oo8=+8!mNW*L};&6Ch#){)E^i z);v~xZvwDY@Zo3yjkw{6=V3A0p?>zSlJ}grxHsFL1bTYY)kbcb>K4J^z~4d>s;;`z zRd}Ls>};t^DBzmobqpFb2+y8OMbyt(%c|^U9w3h-ArdcI`Xdj5H*SCPU3UUCT}sKS zip(&x*F8(zP1|{2i&w797LBxP*N)-!aEjU4DFci_@2IS&`aajx)GRO3*BzymG4h1E z?J$Tf%vV2PzK)KDA2Z)t{v+`x2}*%$?{#DJR91M3`X<7j1dWd;nt%2EQ!Ky&D-4N@ zRHU^7GuFP!^5)%MW8YeVR7eSk>bMR>!iRWOLV9K+T3cJ7@$muJdT_i5p|zvjNj6le*FIggXNKJ8#vajAu~DJ{hmmEGF#dn9=#twvYr2#~c@;=nNAL$Ns+0#l`HJVtLhGxKGPYxtp9XwE3STU2i}EU zdF#80E@H^3_u%(^BC_4qRCyKK9_Iwtu&C9QRK}ZUMXVb=htJDCwNo8ud3TXW(QLSR zh$)N!z*bHr<_;*i01>Sc0Z2G%QH%3xs#@82QHy7~wfu;v-QY1JG>hCg&xKqDX#P($ zm!8wy+}w-<<8+9%h_*s9;C+*e``zZ(-NQ*BMzwit4jwPB12=(~KNAk@lRowh4a6n~ z%e-}3nRAEclD4NuZ#hlR9IlKJOXb$b9c%$#C3_niVVvSuaq=NfMny$Q>KW1=w;D=I zQaLUz?rchmS@8G0=bv&*F&7JwH%s#{OWxIW3aJ*pgh2N8>5bDX=)S3SkyAeH*z<}CfuvT>`^%@M zE<2+_bS+0nhU!$#**!5cz+T>>aH1S8QMJjo}U7j zYCpsN`?SLsCb#JMz4U#5yPL+YnO>1?xA&bhKlA(2BOqGSW`%uz&xG@zRkY1$?mu6m zQpJs?faJ&PXG--so&Ppa{OvpW+vTIvo>;5tk6sgb-Tl7CDBrmoN;=1S4=@Cu{I8w- z|9}(zKisWVY-hPCp|Yd1roN~f2>mm?o3+ilkt5El_zd`N+qRNG?afP66Z-DGZWjB> zE^xq%0BKh}Lj##37nRt!1HZ^sC2Y^H>vTL-;xnLZgW{g24Gjl8W#4`=$owq#yZyA4 VDq4MW`0iqf*?J3R#?G+V@jnmA5kmj~ literal 0 HcmV?d00001 diff --git a/website/docs/assets/nuke_placeHolderNode.png b/website/docs/assets/nuke_placeHolderNode.png new file mode 100644 index 0000000000000000000000000000000000000000..ac9e83b9d67c5c976c99bb9016a47a9836eadb39 GIT binary patch literal 4010 zcmcInc{r3^8y~yuB5Q{5mh7P!h3rCEl6`Dr%Q9ncEHh(?#vbZL621r_%h)wUma$Ay z#LO7$JC=&cmW+^Zx~}i9@1O6l@42q$IrlmD@49~HJm=iM``qca);D>%B)9+o0I!9) zi5;V*Fy5D(?2Iq)0AkN*SR;)r>^T_^iqkg@0N{sOm>Al}Jg3dWv*gwdIkr#fe#h;a z8}3^RAwU13=%|w2A}`QPzg{+4kqK%z!+YVC;;NN>t)i`|!GpxX)IJU_VFv|iE(J@r zAlX^|aYIQir}A6Il`mT#{)iiOca2C>bSfbRFP3`FZ0YzQ=}o0WrBaRex_@S&_f9R8 zXmMpby1y=4qW*^oOY-!yOpmGtM17<4%nCy`YJd%0FVj$U6b(jes zl$3V9ze(`g;2t!%%{9a+Nw+288?T}`T;S*IbY}vuL|l7mcUd8W8DM_# zi8BNe+2t5jx93Z!s{Q8i5g=1MUhZ1|##wHc*#-lLLYc}H5PAjH)$X;tu@5f|umhBo zl-v_Ux!C@;{Hrj?*kg&@G(-(IfBbo$gvVoUwOvr#hLHeXv9VnnTS09ZFwlk_{EOrI z*Cx}7JMw4K8-^c`zP#1;0#Z^vz`m)E2nH2zT2GqC+58Qftq^g)dws49-Am(1lW;0>noIh$3KTz9lU-XIdKZDC#7z$a*eQL$_6bhci ziM3y_sI)71@Um7B0?zRU@8$}dlB3%(h zkeCfxE{#CM+67%*UHfNeT_;Xk{PsjJ@Ik(3$Nr@5htNoa#?ep)8~4DPKuedlgM+v> zKa_OdS44Z^;yWENQ0mvD;*t`!J}O9Uc^{_was3~YJH8~{6li2Ru+!t<=}SnC5|pv$ zU!&p{3;U7hw#qkotfth#;M6ef&9PtZXCR>Bq4YQ@eXt%4|82CwL#=QJKO^GkxvL1dQD`YIgnMm%q^fF}#iEWRq@4H^D~F+n)ZskN%>>EC8sg3>3Mg9V0J|!|$;lkT z&AwxxjPWcfdiP+oev$^e)j|{zW*tP`P-ai=R6396`0Gry4;JmDAnlKfnmov)z7PNw zm*S~YQ^=!-;#O0D<$s;6xQeLA`{fgF*r(Ro$Q2ME(NL%O*0cT}@3su3Hi_R}&=+;x zJJCuT34usSqoacaMd9ru+WO`yH(9xqGHb-$XQ`exg%$I@A>BNFQFS^frrr%-C?|M~ zKKgmTq;XZy<@;fG^J1^l%=e8ziLpRH>HKUzH*I5OL6VR9Hb9sc5K0W9d#k2K9_s$4 zw^PDqY%l3-fRJU+0Au497aW9-BVQyIa7@d*A4F%K1RtjVhCD zmni&t5ZC%hqh!@%#D50`$~-gxBw$-pp5&PmBh*AQ>9?swUH9G!IPOI&sL?1jgE zJYoe9&#_I+btY{&D$;2i#GQw2R|hFXHK0H4Y5kU6TgIZr#hrX3rd|!rz%3=n=&xu{Bv{X(l;UB zlbf@uMMOR_Z!~A`Gl>g~eYUtI?Q~Rxb6u!)@B8fQWr_7ezsdIi|V+>VqtFDsp);6&=knUp9 zk3|H}Zrl`RB{cfw5*ts&AfTR7UaNtL+g8h8nD%v*2+myWdG^BHcWQXt7UTVxmV_cB z0f|l-LwO;h=-zFq?dKt>aOFR=9Eq9JiVZUx0)RO|uA;YR98qYbWDv4CI8dU1!touqJ4 z<;^bOSA))Ng+^cH_-gi32`MZ5K!I}vBw=2sxWR(L?{TPG{`P^7`9Y!%g%LgD zcqEw1N5`N%NTReWoTcx3(sottKbH0EynRAD+9D6$p_5r0Im|zZW(qV>DklqLoeOrX zdmbgIi0l;8Y<%3?dwec2GeMHoksJrlFiyxh(_mGBzUu4IirfS-;tRD3Vq`-PoKIsB z_^z7_3S{Wh2Oea^~s=tJO~>Go%h8>eHXQ|GJf-^4>GL?Sn60?$CWOQw5UKD zV?ifahC&_x1OW^H0(*yHHu4WznzK^nYwDhdlat+QrgIs8TkWi2|JEX8?fqwAMlAO~ z^cIvZD>3~S)xh=Cq*Vv}s*v{xMgNBNK@-R>YgA&xvRTEa`?JL*ao==2?vd}b!un46 z*S_4>dyn`!rxwMWxTqJwnlrMSbbkj%awWmBS=SfSB5&qAQ>AQVgOE4mPO{_7AHNw( zWo4@Nxzg>SN^Oe}=eM?^orGQ#=(}oa=<0C5H(1{U=`bt^44Cz}hHed=GSbtmebaG! zKpu8718;*FPuT`^bY3ar=N;fknx{>6d47hd95;CkueY~vcR(OdxmmYw415W`WK-5~ z-xfScN2(#l3Lz?}YC9%?ZX^}&5k2^2)mu=1TCiPcSzWbloeun4p1ZBt6O1DSNcsJU zxFjps?NYbQ&_7L%&v#kEYS5?-V0u=oEL1C_T2yq$ zUL*aUhPW>Bh{k#Je)weWhG!rQm*qFPL&KEV^K* zsOEC_(+MghpDRB89>-0jGWTZu?6=VNd7{RMM#;Q+PwDBMnx*s-sn`3j)_V&r8Anls zQh~*Xyv)B3xlH-2g}PkG0yzHh`Ln?Ww)ri^R8ab{-!TN6)+oY7uPhRuT4!V~JLxyz zW+ab)^|8{BEMQ5frNw#c)A)5Kg(bHeIow0C@%3o^-{QT}(#1vZ3L~yDVnoR1!vxjC zIJ%N;iL$D}IKK4vlamIFu=Ts1ljMIAkCF!tXEL-J#z3klFYygr4rnYS(2vK91pp|G zJkoqDhjVmqb8&6PdW*|(bk8G(Naons@S~o?{fa!ZhIfK$QX1i*)YtNc8eL54xIFe` zgq$SccNMy{qKKoqD%alOHEXgqknaHXUGwf;`Yaim46ME}WoF zOa(HMoVH>H=OIb~R7@|6lUjA;-jJGrw}tRqOnT7yr?OcQH-m!fMSlAsC2gwh=;^kc zO66$>*(hN`}7}L-*e1%ENfDvXEtJ59Fxlfb5S?u#QCU6&OCpzblK9$ zH-wXR7L_J4@_hr0<2>-u)I{#1sy&8t2!8Y?ow?Ij%2Yv@g%Z5F^1ZgVy;pQHy@p3; zer=--1O!A_-?K4m%URG3v@(#gf}DCD#HJ2Hs-O6xf=Kua=fPC@Gc6dQ*7@&dT-DYk z5xUz&x7iQ=wxs(#v5_vn6WirH2u?OsR#U?@|HkV0%SoIIm2SBLTahVkFkKPo$RXoH z5>o=~btT(9s)GfyGw$V(8F!=V*bYA*@ZXu}#(RPxMarUI76fkY|6Y$GM%{{B31hYBqn==c zM^A?3k*pKW8!I+LB&NE_Nw9-dd;)6Fde5f_lL1#?KIV_*2YI61+GRyD4V%L$>=7aZ zpepvj7o^&3Fa~~`44-FM`WSc?Ik4tGmdl?pD|QV0y-OhWKh}QmpD{iRoca9c2&_Z# zc^(Sum+3bQOPPm~q8|K?;bq)-jl>|}_Mj)J&|;Yq6>&)0D0OzI!q@ZO2Hp|As}2*w xsLZZ-lP>Z=T1SU`{;Nz(`CoP9{~55GVD{ehhl{oA5~KJASeROyG#Gh3_%|x^qs{;T literal 0 HcmV?d00001 diff --git a/website/docs/assets/nuke_placeholder.png b/website/docs/assets/nuke_placeholder.png new file mode 100644 index 0000000000000000000000000000000000000000..d899ff742ce32a76317645bcf60fdf256f98b0c8 GIT binary patch literal 12169 zcmdsdWmHsg_bwB{hP?P%_leF!X>( z*FC(k?t4GHAMU5S{+G38&8)Ky=bYbf@AEwS+53rlqozbiKtq6qg+&N{C9jEvg^dq< z#PM)}Bf~T9UBDNPn=JS(9`NUnXBCNs^#ltn|MIPO)-KA&{Vg(k@c=WflSp!h+wP^J zET;wOXS%yxPrlJulFpLwPVw=@rZyI%275NYqhrRwVs6WS-EEq)rO1o3g>Yh~y&UM` zr%BU#D{K<t4i8%O-VJ!G$vUtj5%wZ%c%&+hmxxn!xvU-V67{jWWU&`W{ zga1dJSQIa#o5$JYd&JS%*%=Ij2lOvF$0sI|ZM<^_lQCaze9<*o!rWYA?ixcbG;gl2 z?izo3t{f5uVlQ$Q%WgfIG=HY9V^kgzCu|=>Q?!v=?(#QnxsMcLKTg3{QyrMC7|>=h z?Dl4%q{gDjLMDifs>tP_z~aIa5|u<@A7JJc1#kKmZJYaP-x+f60R=c}|MV>+<6Un! zgr+-!6L+n1Q;X)}gfqwcj&JXKhvzy*Z3PFvI6q7v?|*YIRaC3DaA!0s9bvnc=X0d{ zYHLR)mUnvj6<#qNUGhq=U9A|YKj+xj{`_o1cl95MMqL_DFUu*I#UBJ4wP{R_)wnQ+1f z=AFa$<{TS4$F*wl;@bQFTyegNsXN%1GBIioNQ{dO>X|YRU}uZ88aPm;Ypu>j7ZVn# z8}MT=KDl{$HaTg3vA7NHy(fY(vT~;!6#BVd{t1*4)gMfbxZrd3X5*ALmQffIw0Uej zF1hz;K+0HA<%EPGGrsSrSyPB%x^F+2J%;)>guXa>TDv*Zws-6RL`?x<+&?XLx=C{(Y4jm+hV4Uw>26Ph>7rtqnkMetVsSLcQsU6tT=@LFKq^}R+ z&UT&R4mYeC163`@e@1H*K56JEkE-+%wy-oq`L?wj%+c9ZAobd1`OL6lxJKAIN~UUz zExY6u-q9rQOp5pox3nar;b?)7jrxUC)twuT5646T3~GX;B@V(At4yMYGMBcrF?))h zu?5cG;cJ}H%NA7PYE1mbF8d_@*G=#W{b)xG8qQSCE{FXI0Z0pU4_=)gJ@&1-O5XBy z!c$QV%UMqaA5jTuUE((n9&e%`E-RGCa&!*jA7uz$3s^!7Zzf$eR(`M&olPadaO$OuY zZl+Nmc^Ta5#9D_1>q!2J`NVE>Q^t5kl?UdyjSeD;noI=Y5B?+*pZh(#qv%Q&n4FN% z8A-~7N5qW|ZTozb)zdRSQD-g-ZnlOcC9Wym$#kes$b0*z5#Gzee^F`)YP zvDstTH{h}3y*uX?!U#2L6&p`l3`So*^h#*KuYzZIcf8u6&Dt1B#dBM zc3PRNVt-%?%@E zuqlMi!xM=evLtC0^%@}$&>#G(nPXbKK7>U~Pt}aW?}IjwqS@wLXFuSbazYOEypBf5 zfwI;zQ@_hJ6P@>&5MDzn+R5qDQ^a`__t1Hhty%=B%{L7bSgzl8J)xl<6${Jx!ALxB zNa#`S$u42{Bgewylt4#?PbP8W#Yf2TF^8l*#gAaG?zpk>ai5L+2)qYXi+9IK?2Km> zCOw64#00|bwLEezUc%*vTKRYh8_DJUlv++O51`gIa~B>4neBb=++%#hU6RhqDk<{~ zL9!7w03?C_;7Xx5)g8#u!(g@a_`bSkO(_RqoaWEAAdsw$8trFK?(;v4Bcw3KkuOlj zk(NEiky3JBt#Q{J!U!Zkvv_dVsN~d1ujii&G=I>w|E42O1|{?Pym+f^?=bU~jqM}$ zU4ht)2pLQBT?0rsZf}<88`G*9iy{jd7$RmMD*C$s1l2-`Ms)8?(etFMi5qI@m1t<>pRSTq39B(N$+;|I zWl5A02P-PrSLo&A5q{c~;(s1NsFj@3Wnu}p)+~Te?nW!G(YT>Gl0!}wkgt@rRMHlG zc;GR?S#$_LKFe^6jgg1&7;2{5R8I*5HATo^C)?J>Pj;@dV!&W{b)KyHBaSsx8E=qF zX7t#=ab@TQ-Xh0OEoci3^v_&(20V=Qkfh{A?ly<+p(t{EYO}f>dZxYibEqykOWq8u z))hB4Hg-Df)h;LhmgO^qaijpuK2i6j=J=b3ueASLfe4;G^X<*b!8DObbGcCc>i5_U z+vPqpf;hfrqEUQ84D^|c{ovJ;HS-gajh8t=u6U&Zncf|%0U_U?<)kH-sSikf56_yQ zfi$r1?k+_s_+^k|OUZqQJ7v%CwTe4$97K89H-Mcs?t|6o$N8A& z8^bUFKrSsF0{|lH=}G(fm}E+HB} zW#Xm(XOry2&!v2mZaWUlHVU1^7 zK|aH}M6t#~iCz<(>wUWzn$kvZ{CcOuesI=!+EvBV7j)}9Bd=86QmB5)$fThaVhbmZ zXnjdjtj2kaL3%IaEBNzF)#?}iDO)bmr4yKpsJ1G7Dt@0p>HS+H4S&rFKF6%5O!P!_ zVDv(&CRQ12!LJ5u%B|{OaFNXZoL24AIiRMTF0hJZQ7XQ zAGS@qaHx%uT${Pd)D7*lrRpp`S`ypKp@2~@EUZgJUc5zaglRM*n0ee}JGYH2!@5wo ze314=Ervbe-nxxJEhl?0PRc6tWr=#O)G8W1-i9XhQnT3Vq1d8(tncTY?dS71|8vW< zr*CFL65b0}GGXuLzBJZ@Qdva#kb@09OX=tu- zhaZD0S?itzpW^x4gT>JOcQX<_WJeWjy}XTS34}C`wkN;pGTRZN1errB9)X86OX#f7DY~$ ze5;~M&GIiwh^~oaVLM-1k@6F9!@}w}8XDNS{uZmuF9e9_-egJOf3vlhS7g_Ljo=Y# ztK<@S?b%hBRQ4ev%)13%D}S-gktcJQg#v`-eeEOJ$Zb>t)a+<8qYhZdV@o)jtLx&1 z282n8g5O5Q*OyMKSnW%e{Kr?yU2zi1y^!{X%`}X+;>GB_6|`D=fOz!Zb$GB3EcA?^ zDm=8bXlWkrrv$R|ZHjDqvE%o08P*SZo>f1~bN1X+kK~eo>)h^>rluy9m^&L^jUd-N zx-9_J3{?Bf;t5J(`YriRrsxdT(ZGvlrSPLQbL0%!{96uve!A!Q$NmA2O3D_NxfYiK*r(3py9bDH)=i$tGseoQ&DF#>G5Lq zRDi1c9H#?U-NjtQvM;-(9jyNpMwe}$r+m0qNO9LyFqT1=PP78eY+*sMjocQE5=F3Y z(VF+VLa^^r++z}aZldY1fJ@H$?rRZ;XX+18CRS2X7++BEQ$EsY=(~xzzYeCU!gy-g z$2%$X3(A%QHfwbOsM;bLebMNy(Kjv%D@hgs4LdRJkh>8StdQO9f@bO#>j-wJB@&z*|5Qe$*UxGMWZT!CrSI8Rux7|*L z-@@#t@BcbJyiAe<3wlBc3Zht_6_3f@D@TcI9j$uz0y0WlWwcw-t9vT_-QD;%k zF%c+aBP(B1AY35x7g|)Jj`vZ#Pip?0R-5YGvh2z(SLu#TpC4L5h;yeye+MmkdsCS0 zwxfEJ2__v6ryqQ(5f~`twm!I2sOtGl{f|J+(H<_HB$W7~DO?5#3k@G`rd>NH+2{e& z*a!Hx(5~B0!-ssN!j=|B`)GIgakj=$b|u6%{hH$>8`c0=SyED`)7&TW^%B;pn7|{{ zQt|Q-7NfqSsxnrb5M=LF7|J+uddi#QvrgR){&$I0#=GmdJCiqXnlLB<)$VJy2CBNL)o$T?d@E05-(68m>iVamwKnQzrD0!`3B8Mi9T`Wrj)ic z;9%%|ZfS7LpUp*S>S0H@**~h{&zzzO9O@y}r+7$g`TJTs%M;BkF<$%mS<#zW_t+z) zNk7#AHFXtRRI^875*~>?u<;i~%TS%*giv1f;Nu*k4+cGJ;_qk6`9?YipK1BZe`u`F%kd8hc*!~oBiz>Lp41@d#iSEhD zm*@1YG2F9?lrllNyW18P=2r#~-@{`MsJORrbIH{Dd^?dRW>Un<Ph1Yu#@AW6zg%pD7um+qik)X?>BkqRomZbEWTgkC4s(qf|E^_X9MgnS4`(VAx+O3#yKT4qa*5jgYEljaVLm902N+l0AV1p2 z7prVoi0n3$LgpU-mLsY6A2!%13G#vKhfd79=V?v~Wpk={Nb^HFVhP$ynkGEeq)~`p zFi_3d{rHG2arJk6XPUgTFi8+*s^xG_r3e{I{RonbW@RNUtO;x@XlGa-9K}3uxA@e4 zblrESx9D@4D;dB#=;1RiN_9<@06ND>@A|64Ic=j{*3t6E4)YZ*M`g=jbm=CL5!LRc zQo81U?I4`G6ut0ckIhZzjd(a3_|@)3iF6~QIXjEampdxjW?_jgcpBh$vpY+mAY;WsLEwGV|hh-+i>05nlrYkzACd6kI;Vkx31~GY1oJxAH#l6QODFL5ElbZ zY&V>3F?b5A+?n}MgJ~yGKg6D4<=PKaJ4Bcmp0{i-y1@|jekPVr_8vv?d78h|JWW|k zpV)l&GR?FcsFGEIDwzTVQU^McU{$A;(8OLvX(LmP_UnDIlev#_)+`Q^)`Lk@j|M}s zA2j}U4lORWB4zlH!#XP_u(s7#cmK?e-xsvc&gD6cy&5|(*Cpau4I~k{)+FHcDcVLcOY}v z(U#CXMTZJ_b6xZ~uD4^XqGg-K89{)-ao-)FHzmBlsv0I`Ps7qeX={vv;OX%$Or@9M zmAw~`n#(&KcBa~C4^16KMMex6s-Pg@Bq`4|tZe1X^Wkd*3J0t-mOt5F@GmZ6LlEko zbErl?rJ5O_p+#G!;j2^AfD;`pkVr;3TQBF+1V3~;(uY2P?7b~=2 zV}#6ovQ$0W8SWNgK~*$_@lAk;H&A3Vd#YOvq;QX+{)zNTUi*Ixp6{NcY{$V`=LmQD zdAA(MPyddcS@mg8NxK~EHSH3#1k%k8PEsnazKPm;19dgRS4OU~0~&c-)R>U|@H*Y( z@hP)Q5ew-*`Uiu9DmlJe?6LI9`}5o_G6~{8_$jB`437;;oJN4~7u1+6+$#&(OWjGi zig_wAmX$P80@HAq+h1$3?zpVohAo@68y;8a(`aZ!QwH@rP8$mT;$hFpEk_KUmbF+5 z3ok4dOJ9}#Z`5Wx44R{@`)HdslEmW-5==4mdt}=bE-hH7k*`9YDdCpe_V|s@$qDKN zuh(DKWk{0!mmk|Ce`o*E5vHA1bbZKX5bYcO)_90uNW_tW;mCk_?>A0D%0GpJ@9_>; zO^s-}>E$_zU1MkgJ|Ogs&32bO;BTz}PA#qUcS_{r_Cw!s6rNw)WS22brj`y(fsA&rf2P4K&kgh8qgE$ zNS@m3ln`Q!+V-%W0$|+oqM6MzXRO#dDCieSZLOg`z!w!{M@L7dEip(FKgj8+ zH&$0ySDL8Xv+3#SAP=AH=^D%XYQ;(TbrMQUZbjU9W@9A*G$`Lc0wS&bGbj4d}uyaFB{8IfOGoA@3HJ6QhAQ(#X6 zJjlevTPzwH8erVRWmq3mTl)eCd%rSIo4lPJ8>?ED+4f|mNVXaoIeEIIU;V`aw_$x= z^3&(fW2e8_w&a#Wq2bb3o9egh^%V;B)C)&BqIVk>pr##x6B84>?`0n5qBwbZI-iM#}Z@g@C^ zzbeHsFtf6*xQq4o_2t#qPi{9UEdDbNufSmJnINhxHCAEAxy5wb9P{=1MLc6+ z`7|>#Lp0#M-+8~u=kVF>xC01*J94G6VZNZU5)vMcYZ7pzeS1Tv`LDn=k)}b})z!T7 zK4^NNC)PJKw05y@BbAVl0IcWWcBr~KIVu2YSP0Dg`jy!$ zJK%=rjye$Sri{sTY5Ht(T3Y2?sem`H6H-zXRaFUJvpb@ids{H=4`klI=cJ~lHfnSZ zzP`E`Z}am?)|8i*9~l{u2?&rpL?h5==v-EIcKjj=G$I#M{SfK)jG5WnLwMeMg%Io4 zz<_?O6~0sJ0j{;R^({#6?>l3K6Vk#zJ$Srpa9LKqL&*kFyLs@xw6a;d_ zak1R{d3AlA?9n6KjgzM?ZF{lT|9V^lGJJe|qc-0!_4Ps_A+5+M(QGwYS=rWhFg?+2 zX7B7c*bHZFdR#AcN6bPpfaK}w?%tgK#tPh?nVGo}gei#urbeEKO)=0DLBTKU>o#81 ze+)`&E(4Eu^oMh$5pjWknSd+mRhwaZwzQ$PHC~faP^>oHMAm$tYml%9&xu=6mVlbB z4_E;q5g-2ps+wu{;Y07HyQYcBaDNhOTgI$f@%U9fSX~`Gn9l!JR~JYidk2TF&CMAA z2)M2fQadiRL&i71!eB(j<6viJUg^_LViDI>ya-|teAC_2AROa%ejT{>i_*Z*Fe*Cw zDK|G{Y)lh)Pk zbz!Wq@bDLI>nb6kp;1v$<~vh0t%t+nY(;$o15BKppMhu*(Tnu}VXc{V`1cyVmx5lbX#P)|*#CF?!Phl3x?DzA76Fu=;p?zsD;QW2)tCy7-{=z3!wErf-HF^VV1DIp{-w@S&FG!o$!U}_;oHfXyKk6W zcoLtC_IgaL#lymRlm5@2XP%;8%NhLv2ZV_+(dIimQ@|CWn=02#)1~_=kMqeL#N6$F z?X$d;#qv#E+2Y10DUie`yHlN`$I>i>PZsEYw51KCBVaBd($>D$j5|q3bf2A_Js75# z$_%ip2|mGcF+TpSErI<64w2E#AGl2+q9QvNvAf2EjMq;A>g~&?@MPgU?7dO|vZpQ> z2j68O;h!fZJYn49|JND);K+*@9DM0_cdJr*lh9|>V>Y$`R=&QE-f}7tK2GTNL9z#HYOoRa_Y7_dw8;Qf0a zK`Dj}39kcmix)K=X=g5GevNX)=wq(7&KT0hQdJR`F#}!-efDeDlL7Am6P{jf`lfbl zx&PtecWk0@OX>0oD>H8qX~dvB_KokA*p2^{nA0RJK=i|S~S0N^p5nsdOo{r5_fT3>Lw;SHq|H1 z%mVbDj89gIhK7=-cU!4IapR3b;u2X)sAl~X^hIO$sikf-|Im<8?W_+_7w8n(X+?vM zwt92UePMO=foraoa}XB--d>)!aj}8fZMzA2RS(1Zmv}v zxU>oiXC>!1++T=_WjrcQa#Kui7fntcc{AMboy6K*R4ntvJh0Wvj!jjK6$xNDKGEKH zDcE+_k77c?n8=u9AC?m=AX8UoQOI^p}Xx+FX_Jn}r45r~t5Q&OetHJNx>oE+vR0n0Wj1^O-+|*M6^0q2}Wg z`CY~=Qe0IjWp*ced@@PcY2lX-op4BLFy*VQY>>4=(bLlsk+xMA% zC~3oPK8)PEvY_kgB5V&Oikhnk8c{)`mE^07VX+m}ye6@6w)!8qgS^gOiM&8)` z>@gEQKVARx=jl^##^*59VS{M4uX307&RV0$BjjXr)zIh2#fzGny%qZ3-%6B-vwpM= zq`I&vSiK>!EGS5GD7(-tRX1|x4ejcl_C0wXM{`C%O68gN zVH?5C?VPm#4$nXN>jR(x0)TQK)E`K;9C(XTG|3i**$( z1ibcact}V6@zFp0VOUegpF15cdKYDM<>o@Ia&G-(-~ZD^_HmV29?i^pYb4|T51$#% z6H)w8(SGo))lB(63})D`)MVY&TyM>2V~PJ^LBmQ+BFRq5`hR-)&HN!TQ)#?6iEZ!diMvz*ea6S0 zr?SSyKZOEyR(@tg`laJUJ$DEt#mfMlK0lHR4DWuVWVudU0{X)*^$qVy!TW?g#m74O z@6kO)xVVRmMrR{e|K>eCeit4<=RQVi{DaO^LgXJe}A1cHxE&x|H6E! zF^+gu&m-Ig!*~Fy8)>1_bpn4R*#$UK=4T_;5@4mYuEqZUud;v$d=y~^6!dGAN~ zBD!C?y1sb%GEwRQ1%Nk`6B9j#PHhSr8XC7_ovH8krDSUi2{rB3c;DVJC7u3oO@ zo!HZZ-FmwGxx%C^qq@3UMz>TsF*XMzC?u4aoee=bMx@LJ!-m_c)`UD;vVnS?hL-lO zaptY@FBd4+p(^RJIy#SHp$5LFY03=_)FJTwsjxH!@Wo|hWJV3;OA;LC<@_4PNe zn@UTgqyo;dNanbjBCCEKnG9tL$3dY`EF|=O;*$gJva&MQ`_XW?;)@qAs%Lj`8s^Vu ztfkN4xo4+8tgC(ob}R{0wHwO@XmMVx!_H?A2ik4>v&PH_qplwy*fo5B$g%Ac%8?at zM>Nx6KLzfpqpjV$HjtVw^nu33#pU~SX5V+r)yLu!9?wzWxI<&-{r$0c1pJfeDM z$i?(Rv^_?1@on_uXD2Hw3lhhb5=zhjo)5>*fKoP1=tD*hf;5ojWfw7^e89@e-Hr|9 zGv`Bt4p~yoJcd#&vfohHKi0Nzf0b{s(Phb7y3ryoeIqXBj{JD|{$PauWZ%^0fG!W! zGIoisKMa9yKUnjph|rjT0CKDr^|uEVE+3STsMc|no5wnQn47>~6S*JqYoQ5Uo!}n# zo8!G{(iwwLV13-&ss`lP067o?>Oc|56(BT|lao$g2Xi#cDIUIZ_7c_EDPX(|Y?2fjzHtpGx_m*F5+t)UXj&0?wT2U4rl8Ltw zEy`NcR|PgmX9h~9-(T*yI(txASvj@qnV6JhZ)bP<=RuF~>viyH=X{%=51@fJ0cqG! z-6{OHXnV_51vH*YDL!y|)vB>OJRPu1Ln*xFa~b4-n>W2@reZ=S_ib6kGe7(ssuL0_ zKya8S@>fBSg|+Ii!^F2_d>iZhnF8OFO0@9d)AkUkiOdh&>G;;wU_lS}b(6{%5422qK z_bO4Pdy+&P930G#NzDMO6+a-ny}d<*W*da%4sKYBii&Cjj6Z^vyAUBEAplO);x2K% w2D(7*l+61O#bCx}>{7QbYu#1f--(x?57Z>(bpF&+_>` z&v}05{Qmg;_49a-_q}&_c6N4l<~6hP_Oq(8JT?Xy1_T1ZeyIRchd>ab!6z3T6}0@! zEe!zwAb*m6sfiB$c%i=!gFt8?FJV%eZYjI-?(g1wSV2A9C=FyqEU$sEF9qmHt$+QQ z{Y$nupEprgRY_Or?Lg)0lRmSY58u0fys*k?AsZdxt*ZPtY(d!mJmFmn${Ymm%Ny02 zovS;C1ED$Wxqz_2jg&VkdGnW(^}?QIMa5|S%B5^a*2I#TL5>d}Y#%FHq1HAY%mmY@raP`NCB zt%6{!g7^ovhoW)G0xnNe9 zDg9~9uF?EH7e3r&;}5?`w~Ni%wB9S%D_e?)V96<_TvZrWERdgty9|g83!UpxSI3{`@ozrIFS4~Cd~tU#ym5CPY zzHhFUa;MKG$~HS#lk=^wY3r=o;(En8Ay`-L{`X$Iu}qZBq2pKq;h!#@e6Z2%Zg2m!&(kGmKf%flr}ud@MbNXs2h^-|BW zIt+Gg3fio%bzN_#V!2sg-VslusUtuFGd))h4k%g+Z%k=64@SQqNXb|9sXRbvT+TKg z(*O0YS=bGw#i+UMiUk~7Z(c_d}I#U4pH zNe*OOV`WK5rAUpShMeYk-OR*X*gince7j%ugj65{2ZrZnX+g})LWYO8g;u4=B9V}N zV+eNWW7<@dUs{^E_1ID$TLlf`=D&7IpL;5%7x~@)+CS8=yud&~8ON2yJ9AbPJzer$ z+TRb@nT)@@`Tc^FaGro*gDW#k>+yLM0fP530S7{d`90vUfmVCyfbi> z$j4ZhDxu%ER#p!wl{!(0I9;R`VUCSG4+?ePaMQCQOPy%=_x8dT^Ly@d-6-Oc2O$LA zsIZrCiSDnJc-I2_aFwwUM(ZfK$y**)eo~HG7uKdl{jlG^QLX2MF%`2>H5iM!D?Nc( z;3Z>Qxco$0sXO0Dtw`;k`cR8rq}7q2XO>ZUin=~97vLRAY_j3Y-qtsYoo<)n9}qxh zJ^c+wjqvDhUs_X(A4rWSL(}pLuRGFy+5&ZbQ+-gctE|aYiCtVDqqoY&AbF$``QXZC zEcflv0P^ilU^)b{AijTTGKy7GTWhqqVA61Nj(2+(_`+FgfT1O;E|oHa&8Kk2B$zCeH&T%Q>fUeS|`?o%4w-l3^FWK9zYl_H@BsB#y0*?oJpP)ji{P zIUYpy?R41Rzo=K(E-Sx$NF_3Gs9w!}#*t_umkC8Ny$rJyQmE9$V`rBh9a#0;X^MVI z5eAectP=-Y-u+6+?WRTw*Kad1?egf${>9l$RdQaD8Uzy3iJC>zyme1jgiay_7S?LG zn1ltn?7X!Rbg9I;qrHWM*}J;7_D=^6`%3z$oeZxlZQwp#i^QlGPl``&eZRH%>*ZgG79^rc+aHt?E|nCF(T433 zT^DlRjfzdTNgFF=oBnpbf2`RE$EUlm2-3}~FWeVaZtnFN zA5~Op?K-|%Twt|C+i|S;yDu)uHDdVsc;dsT^Mc8tC4cE_OQ}C-x$;~Ug$3x4yS$h?%m-~ zKfU=_-H-8xw}x3|YS`4&u7&rzRF|jr0vOSU3u5+GUQ90DuG8$o-5*?zE%dsME15-+ zM?Z`|+_ZP_2g@m_hNU*v9*sT>{cF6KRms?`5Iy$YMOs|a?K;IKRF;40fJh zE4t)E0fUV(|G$7naH|vyR@AmcLz}5}HNA9ceDDSfU7zkDiS*))_P4(NS{^=9+&a|0 zgd`PTS7Or;-_J)`0@HP*sq4D#NT@-@QrB${Gpkp23##OI>^SK9fmUnS{4X$L>3EX6 z|D}lJo_99#WDRG9jwgJf9L|Ck$J2&q8BsHZe7 z@uKDDAS>pyUqLb?O1C4#8Aazl`JY+LgHt8qN0rH*jVgJgpNvhnW@LyS9)EVkziVo< z9JcY?ERVKh3)tncrNOrudYaoGuGYB&1M{M$y8jIX2f3N9ZW?)AUAdcAt+yU_e!Q}G zCg9u}6yNh<7jPw+-gVOdZk;kz?-7Z%YVs-Gj_fm*nwh%tYzoUkd}pmSEX*Pwvm*zc!4)t zgQ*D(NYwXqlPirHRFPfdECQt$H~3j_AeJ(lz7KPJRn*ebjcf+>J~jwGwX41gVT+N^ z?k0>^e{yj8b}3uWI`;0)tzQeyoHU~O4Hz1<_%_u>3M^2Cw)~7P8NiJ#gBCb^Gidz$ zcC< zAJ15tg~H0qQ6tkyLr%LxpShb6vg_AG=Th}`-5jJ?o(sR4ITmp``-|OhJ@TwSNdouf z=w8J^aL=@pj>?5U_21InEj+jTHN9sxJa>TZW#`G{(eViy=YF)kSEU(uLu0bRc5MY;qUR8_;dIx>D?zMcpaj0T zY^8Iy|KfCS5=pPQo}{(W$y2ZSP?3VlbJO?oz_s&o=^*#8U_%}O0;Oi{{xSZM%gtQH z>uk^I^w1d%!s=_x`>0&omMCqWOZOZ_xrgxi)zO&ItO!Uf@fxSM?8sZyQ_=GNu7KsnKXGQwu~h^9^l&I73z^>DJN5 zG~YY;!h|^Vz>Fs5QDfRkZd&L8rL}AZ$`W*O-g>+k`TZdj&F{wSLVie6g+gMcjAg^1 z(Glrnf7U-Mv}AejVxJQe8*;fD_er&&Qs?{>otNKSRX_X7Lr{<#or9y@YI;%U+AC)p zA4>9xE%6MAvt6dE0peljwx(lU(Rc5Vy~_Sk3?f0&uFjZ*cp)u)eMHSZ(_Wq< zf104Jvf8K`2x4OW(J$4hB13euJ9EzJxYI;50@I_lj24hUTxRLR@tKe;6MalyJ7Yzc zZo8O;OH2qKKuWZW1U}EA{E2($hx-#=$MRQ4>x*5j5=1^Rm=|z)2*eFCKu~<)a*ZV#Az)-WdSK>c%Gd;uZ{@ObSbKj>q z&%FYHD7$M!+U#eiRD(j^?q_|ObMa)#L_;&Wn?V#k@38l3VxSIy^sCr3C1F66H?-iX z64dg8P3mKlNf)Ev^c$yF&O&rg8iyqGBAOd~uwLNEnI3;@(vm>Kt1=8lk^hu_IXCx? zHG0CLppRrnRimh7cuF#of^u%xeS91PQs<7wHX!!->B#8j&O}3W-Aggi-f+K-cl$*@oQ<5rq^-jqCU?c}dA~JhIp=!-t*OOS&=)0r z6#K>jg0?oo%|-l5`oWsC#Yp$)*9otK1diEIgJ%Yp^9Obn*Y~h7E^F@OA=!rv^&);> zjeq8_u|?I^+$4csV1$gO4ka6|X4Gy?l!+&7e^?$xO)VN;FpRNuU^-L>JH1=RzP#F5 z*er=BpVXyayMEiN{`U5EK4UyQ@3%Ft&gT?Q^3VBAmPm-H9}cN&FTtlq;sfsebEOQ@ZBv-QO+BYh(5VTvHBH%s;a=Eeu^wPnQygy+$sF+`yA|` zNo;k{c5VQ`w&pucJ54zmq3KkfugYJ`IcHX=U%U(LLZ$s+%-f~RX-7<-d0zI? z9*b&%U}3>_s;YwYL$;FL^_esC?R`9TQC;`S=`%NY3EW=%LHN-)smI~!e&a{B-DlUS zxGwAz=mX|`4@#7O_t4m!3VZV*%j;bh-qCe|{Lk@l1+GoH8VOketE<%J6wV*j|EBU< z4Hrs9bF-^T;GvD$Y|x#e;xRt=r_q1QSF^R@heIlibf4z+4-f5aj^5=z?W=q@TWuhB z{-DUJKVrO~E3~kZ{`LX~^0OeRsmf7w{UM-$64#kGnfHymd##Du4DRjD-8=flDr}A1 zvp`)olf^WAEOM%`l}&0H9M*9E+yoY%y8;T<#9D$Ke#Q*Lp#1?>4|~YM^{imSq>`J4zH#)7!* zBVZDA%|Sgfl`cKq%1P=s?FwFL4Ml}ls0=Reci4Rq<-rJ`G1@qnM!qM8M~7=wud_82(F$v7 zmpxXwW(7llz}kQtwLj=*^13vML?6iIGzhf(%v9VaL^mnF@h4hPfBCv>P;QF;ub@7sR&A`n-E7Mq`Z_(bL|E;};iymq)>y7EwcHF5dbfm%wp@AhFvRE|a^?C&C;Z z=692u%<<;^S@+Q68k%_qGXVyka&@u|C63Hv;hocj1Dt20k3}1LQPGMg!i}Ep#U*<` zW6#D!E5EX}W_enV1rZZ5{_INo@v#HxrUk!TdJkT09mcIb!!~3t<}r2n|3vi0_PBUz z1)PFmVR>&z|Gxu(|9`tXR5&A0v?e!8U`R-~>XEdwkB{dcjy!aNXaY7COiH1Jz9)n> zB=OKt6k(ijf0*0*haC~mnF}?P2&5ad3OZx6MOVwB>wYwZE3Y zpq8K~1Ygq9pdiDv7J{8uuNG6I2amnCCxc0~d>=?w(|;3OS;>nY;Fl`mx}xa7h1%Ic zBPNEO$wVXK$#Hyg68rr-R0_Au_d&{fv58VjO6s^VhFT)JP``fFYA{783JB`*#_DLP zEp#~VR8qx!UES!{-jdnb*;(b~($K8w3bSX= zxDY0-cD^JfA*lH(o0=kVbMw$Z62I~iZCPxo{*FIxBp|?#P5X{oq&2%Hfk|5~<`S(Z zg%O>D1VNSzf=D5sKEzVv8ee7)J82R+ZG>OnpkJGg_~+>Hrqb!t5zt5n(O`6aAcT=p zb2V#m3Q1`@%?lmB4Y-158r^zj-lt(fGxtBY8lzM!4fA;U+6go0-p*{=%%4&kT6 z*NJ`^GNDqqu=nrZ(~65jv*Z^sz$Uq$5Np19L&xtz+7&~MrbY-v`#CXDNQ*Q4lFyXWv)Zgh4s$9C6H1b0!RL98TZhwEqY*S0(3e60o9x%@c`q z7a2_niTku6|H{sl!bK@uETuX-I}3}5k@xsf{K;!)eIT#!x3q_@FgQdLgRF!^B+nQa zEo~`&beG(O9QkXPm4;OT~6?EB~dA@kDyOlF|6pWc5nQb^6fvgh=I)tMSc&yCsm|15%OdZ0-dB;@j{`q~J35fZ#wU7HcP+NI zRC#%OC^t2*g}Zqog}X7h z7dOB5$`N~(BTFzq&BGS>^QL`?W;<=F+c@~*o-mv-%?)vbBSl`y$A8V8qTU4oqE3?2 zHan_!k1Sz#?}dDMm62JFuh+bQ5jG+kZE*bwc`m_=L%O|fIM2MJt_mnj{>!ki@Jh}Y znLwLaZZ-9l@FxTeMpLC3j*CHnVi~E3ZNnT6KCmVoFQ3S@2V}cb45t$i5(WuecE-9fV{?%{yqdPV(aMIA1^1zoj!J7i69O+GAGsgQ{(R`bo)5N3WkTrju zCUJ~n_2|#~9kE=Do(Frykp}Vo_kCIgB)5U+q=j}j$&z4YXq;z7m(-Y8Sxh@dh&P2R zP}9&ef{Tfg#bGb%P@fe8tqEm9A|g6r2GlJJ3qKm8Ap?DCdEL5V3DXCuIB}>{N15sM z6Tj=5G$C)Yq@<+7lSz~1w$N2^sKtA&QDn*aNlXom<*mKo@)1Y(5D#4sVP+QQQehc^ zX}j+}h(C*CvbHE>Wdgkx^IyJHPV7^p_#2<=AY!P!C->PeAMy&pZsy4jZdR_6}5&U#o-7V|1y zbKW00O&{U}K5R8EGLgL2{f!kU`=x|nb;mCUuQtSPLN11UuA~-=TXe`NrzU%X+aj!J z%Zlq}hF%ngH*C&Jler(73fH=vrS@1U>!y-<`}W1Ll5n?}=$u1sDSG`W0fZzcr){*4 zJn4Gy=;~y>m4gQIhKOj&gcE^NuqMGv+40yL9TO)wGXu}+i~y3WtR$_e2^+Oh8d zV}&F~kj*v?zkL`Tq-@nWqU0o(~mq)ao zQm7pdkNY^_an2D_LdvThAc-jFxKV?SN!@++@NEz!l5vF3RU>Q0FPPo6Jpy~S2kSf8 zzf_aqe{7RluP0f{#zM?g@3;yTpZL)zQm&y+K2^pa6L2rf56|;7*{FGRT0Q+^MbQQ6 zbJbFdLR8cwF`;L1H@}Qax3;$Y(0BW1KCfh&%p*(Qh1Yss?8^E`H8tSxw<2ASI|K%< zdQwZcP}G-~JH??sTRPnu1rBgn1>%LS7>qVZMA%dHz4FwFP@f&vwT&e?#^JrP7Cgdz zw_TH2jMSLxe+Wp+|c$0J~rgXjBX#f5RrkL7EooQZD}`8cUse`eU1AD z_Y*warO@zj#Xwo;$1H7oTyAb{BwPnWM{eo%_Tj1FJi}F0$&s-a>*PY6u=a7;QEO`X z3IYiBwV;WK$(Ju*j5JTSNdDoB?x&%u5ymDZv@wJVfHUopBTf#oq4($pOcl~eW-D^HZWih2n^t~8l-S{p39PtCbgkVj*AO~__C>! zuulw72#}Jcq@_joC193WCA5|r-Sr>s%~m4}+hFRwvEXK{(dOK?+&Nt9;#m7c7=^9W z9uSv9%gl|Tn2m2WQHTJcC&*PO_&>D(3=Fgk3}g!ng$`4Jmt4QW! zqG7VlSY^&!6!?Md7+tAwR3$<6-1175;NdZP`oB?P0r$+Wc+jvzYT9bt6>NRDlcw1O zC|iGO>-+gUKm)mDEbqCkb$O_%SjDo8lVGFWO6RC}vhzD1G~&GvaSmI4E=#Pm_#=fy zm{vsaMN^4|OSSt3`AYr*lYg1g{17~8c;1mI8}U<}*_i-Rah3{?AZcwMpiGE(%>eQ6 z>`R>)L2gkDq=bAN`VsuopmCKi>(>)M^^F>ck)k?&CV(bdQifP>Q%Ha$jKr`0bdHkbcp&C(YF`S&1GgVaZ(M#h6e1Y< z%uv7j&RPbxxGgu`C;aLa`!nAD*$LS)PwU^7S?lK%ld+bN>nr~OQIm|Dxwc&2Q8+5W zDJ!e^hlOmLD}xYivY{8e?{+$tblr1Xi5WNpQsL+Hy#3^XArD~$jJBO$p2=A21222N z`xcAro=&=5XV#nr;NyM<$hPbLZ0Y^0vB}A2;_OhI9qqdL24|Mv%q(lYJ|fLG&`w#| zu+P!o8E-#)+&RDguGKf+>?`rdZqbYr0awsxHxYQ9{ifWZ6e)o0Z;@cEul2FA+b}mU zj?0932x$E(PkDj0`D(>9(nOQ}AzO}RQ%h5;XQTgz7-k$kzM7sk%jB1l(UIYw=HD)a z?S4S=zL&gGcr8iB2$@M;rVA|nFpqb7u-M#wp#S#mXA(m%JZkS#wb1#BKYxhjW2ib) zT^C+Z>(x2EVArn;8XMDO)ve5HPwN|wrIY6r;mgz5pV#LUEHu)Bf6*r7)!#-!rQTA~ z#teFUCb{2)iv>+geSe#69N%j6-a2A~FTVBH{qRW7yQ~s63PKcfS=3Om&ta#s@B5#n zhzQ`Tsi{$V*k!9fQ6tPyWfI;^TU%S3YV;Ifc%4W?eY0oh8$s=JiGYobO?mz9YhMBf z9rrpeEiIbUWn(?Tf1!xND-{_ZA8~;3A5rl?lN!&OjJ#3>gG`y~vC!(<7rqM(?&vi) zwwjuWtY@)bn9}1?QX-GwU5{4yUNimMw;&Lv+VG|g^?2kl!%K&A08e;8%xylLA)3&ozas0D&cUy{(zRB zpBOAWEGDKKI2VVlk>`c)YVqmhA|;iSuuV)&V*$@R9|vgwbrrEQLdeo*#ym}Q0FazrLHFY(KCEwX~ zH_>n-A1v}Y+U*CVbp*RF`AtSoNDWv?BqhG~x$UR#8zUTtacmVI=W$wa$^HA)W@5ZW z(4MV9Z~ePm0xz}+Jw`YzGgBuhIM{j0JDV&NtVS_AKrXBG>+@GM;k0Sq=WpknC-qhH zHC34i9ntPEIU4+q;N5*2Eb)qvF9$ppBlO?Cee3b36)XN{9yxfx^x?yYN19J%>47F&L!FYUOCgVVcZD9vAy; z)HqL`NU5pe@sdS(MK524RFTR&3eA!tS2?vU5~a|_2^lEZ4}i|WM;4Eqe^a|VDkeds zU0^4)^=~R%#5J=j!cKYQpH7+(*(Z!aBo|IU2iMc|~P(`Xjn z$7hO9>lu<@bhXUQ&d$i|`T0qdk7!}RfQfHPm|skcl8A`tm=9zD+J1rfU}BZiabdoSigONUjN$go`?uN9>=-~W0?pz!)N%*ly&ZjQH$ z01}Fg0z-J31in?!6SVnUYhFk|BiECS>H}NChqb{j*+$b#li_6NU?G&0{uliWVKy;;oPAJnpR~ zVmmcVoN&bIGoQjS(b2#&GBD-#dR~_W5I}Ve*R!efHK9l{zTdCXN@~S1^+a6vDfW|x zX5{2e>~*9tpzXyDSr+&KTUQ~5q-j%5uX()+le$qbaRg>xn;j;OHN1Bj{w1m|qy$&6 zJ-4)F4!=i44fT$oCJ4B@@sYN%A#7+A@wJFrS~BWSmuPO6m#LDMkwFUZd_3_UUnVLGG%ZLqoEyw<7Ki!ZDg5}3EVZbO z|6c&T-jh`yO(BG6XlVG0Rl%PrOz_h+mZPKNqhY5IKVn1;rPN^Jbu+(5ar5wSx&9QN zkU+=FTREDZ%NG(^*iyj}8Iqe@03cW?C=ifyq(*x!Pm*+D6ownAbz;|B*>~SHhX@za% zEYUPSbJ674oe6-eX*+pkXR_X(G*xCQA0YyfCMc_>!rJ(RS zh$eh0E;S1AG2EhAYPrAb>FcYgK{C?yAVXGb+2ZN)rohN%#5f)@K3K4Z)ZIt4^~v^( z96%C({J`L_v-_6t-DK89pE`7n65^Rr{D;`kb`Bj~^16M?+}wKj!a<=ufbOT}?|v6Z zgJU)_<6z-?jW+@UrrRzl!fL@dS}JX)?|s@_`FG*wSsPT%{!e+#rs_ytw#I5*pHj?E zffX%fy>VA~OBz+J63wfP zgB-SQzvO3s;{2Fx1-WIpECA)hcDqJGFMj@!`<$TNVS1^spEEfRP*@8Ynp)hM6=tca z=Q9TSpihKv1Ui8OWeR6vXYcyXrnmY$T|SB|cxp=L{f7^|OB4WVYs3%rm_4{7@6hQ~+1r`4~lJ?=Su+u6BiykdNmx+Tz9vEe5 zY3WsgzRgYZM?U!Y_;f1Fut9#){${V*^ww=Yx723#PsXob$d*-Paf~W**4Bky4K+2S z#l^)n@MsZSQc|X7u7kN+0uVy+nDv*s-HjJ$m-*azz+f;MPEOqFc^7iXd~#zYV?byq z8ZQ|(zwO+kr(%j%q-SI_ZSg|@5ax0Qg?Dwy{rdHbhMvA-x5;(EopGYXFos;f z7SK^$XvK!y-rb4NbM_uJl&zoxI}=;ZN%jJ>_Ri9^YpGNk*vhu{_JIYaDqS|)xf=h+ zSS6FmWbJP}sfY{fV5&&y^|goSaSt8H=eS+WyK*^gs-}o}5`!hX?Ur?~dOofn$Zcn` zL>{DazkdB{zux~eV)B(p&Oj0m8bnoFJM{S2(d+i8J1;+93S@yj&bEWWoj2HmN3u0O zcf#V=n{=WVbD!*;on;@P?z?xYs;YkT^{%F^0Vrk{0|NuW$;sqYK9|Pt>RqrU?oQ*5 ziH^G5w+iZkB`qVZ1x$ptOCSBVq$Otvyx4lQJ(@^{Y+4h zlu;!O=I$;8mWu+Zv0p~?^74Z47ERMPCWVBA(29yueA*a{kHHM^D=p=u5OKjW?TU;& zC>poqvR{$}d_s06iewZO6)PsmQA4tu8ygx_4Gm)eR!kB1CVNzm?VX+F{r$&#j!_kf zM?jg#XGP3z2<~+XS!+u=4#itrTbWo{0ic%!_zL9MNx`{~=)1@Q2PK9*V(oYp*{{`@ zNLyPQ9553w+cEjxQfuJj21B|c?kuWwVe<0umCgvAMi0KZxw)miy{Otx8??g0WGE;o z0Co5Q&FqSNN@us@4z#|#vr`s|^J+5U8U+=5aWgxI`NcbRb#;KG0vj4cy^ebsfyO3s znW9$evSnswmVW3X9vdHjI^TENBol%qWne%BGIozO`)=;;y)i|G%~YM8oj|sa=3@TB zq1N)_A7Du)O4wS!1+SaCEV%5FaS5;j-n}e1yXIt21YG~sYrd`aw|XO#zf+|c8T}Rq zGUe$8l~;4tcvtW{YKm26{@)1i{8w5Ek5@VWSv6JHlYhTOSKlX?csSCBE_{6o%o*bNQWUOzVIze$p5Yvp z0_#A<1!7N9ie}F>qKJ)#<9cG-;kLkLtF1bAr|q#9k)&L06P+~Vo(@+&k0x3^q0>=7 z!jc&1Z&XxN&*0!6XD;D^&Il4U9i1>>GBSZ`71g=)D7;~e53eHw+*JXdFK>59^++-_ z^h>biR}$4zBz&n3?ybyOF0_|&)j!%dYV}SpzvoLxmk3Tt_4NzAHG9_Ru6l6{YW8Zm z2;N4uNP4NU1u{eGaBTZwMV(7DNxc7Fk+rJ~s>~$50A5bKXLVuuJtZpI(<`H@ z{eeIwt@(ED!0BZmwFwjZFR{yX&UBXH7&~RB%VK}nq0X5CS`@AM1aE*Z8OW%9td!)Y zV@qel9-`l9k4nlDBc*sByUbLO+2%o>5F!i~qVCBl*hAZ{lDT_e&ps0B58TElQwB={ zR5ZR&voE6 z8es|npKUL@LcxHPU3F;V^JF7?)!d~nl~nm{8E%R%l4J3t6k`@OM;aSO&8X94U;s84 z;5L`|c^3gZ5P=85k^b8!PnJQGb$F{(YBae-Rr<27k##1mo+%&(=G*%?RnnSK{oV=- zx{W_~nZNhzP8mpV8WR=G`Kj&gn8zbIyuiRZLIEF3%B`D9W>b8?Zar8FJK(+WV&(Ww z&^a?4_<(&*!Mr{Pcy2nQx1C%h#>%i>xd;}t{0X4a04%{El(D{Ri6$AWU;y6r`uzde zU!J+k-jqtF)zei4a0*-^TBY~_(I1as517bLn{3l!sA$tb4kKK_V(9o z;?~$_W-tco-zw#jA!2vi5tY>Q4`{Tl(_~aS)&-Q&5gGI)ZJKCt3hIUHX{(N0L!;7vnF-~`g!ZU`q5DqY84C@u)G&|&J1}rp*>&pjGdWKzovHm7 zeaCgs_fA2bSPq;LM+HDzZxF~{+2zH8jzbVweg>CdPqEN!m&)!W&Bqon!TM`*iI=Z> z9=kh%?w*6CM+880AHn3IOFiTgPi^}vnd2(jdyl}#XM)Fo!_jj9;CeHvS9wV(Ryiqv zl3BnKc^eXYo&xqV1V_45Y8jnBse%m>B1c6NqL82dkHHFH5ZWDr0E1CURQkkyKonbG zz_aO-E|rnLO~7Ofg6)U)Q3?2PQ+V`EFRvB?`lY*Ivq;jWV`j*n1HN*>=7gH5kV`n2 zGHkPh-w$_<;f(Fk*EoVA;DR|@pyCrobW-WcQ2a#C)3115tl=w*K`xK_5l>qGSxaFS*p<7HCn&+ z+9s!u#O_|bT%dIHrIiQL&RubyM``+(CaB|u?Tun5`k)ELx0{Uw1$8b8C*!fyCCjmB z&E3H#lVzY1n1SJ3?!APe`IRreGu^E-WA2IuJRn(0caFb6V85w|n!iYbL*kFG;Z1U~r_zf;y71leJVB_}NQGQ8CJ%Vs81@BpMvU z_pqZmCi!gik`28mKWXnu@2*7k)T?4IDo(o0C-=mHt+8;MtozBX8OX?U0bfXPU0H74 zmD>I$1%}Q$p7+E|_D9Dj@GWhs5ZS9$?@Yj;!2Bw==gy#D`+)q}5wU2k7ybE0JbYo6 zZfTnqGThi(U6n3lne!FvGkQz4-D{csjgwMy7cAS;kw5jfu;>6V96m$CH)a5^l=sox zt7@YQ|F?;^BQEBZN_RNEo;$hx^EWLxO8y$U^M1xLcCa&CHm@AF$1%LcyYQd;z#uBK z(2kIhSF3c#^~wM3o|$>~K}X{=NB{&fZWv2TyVdt8tcC%#8T&O8N=Rj6QLNki2eS47 z8{)^95Yy4MH!9MQ0*el6m6~t+#4#TLD(Go=`|HQE3ZS>D;h@f{aQ<5ms1(kQQ&dpTUl9&^f=?+Beg9kR~0x}KZt1?8k(QzEPdtd z%-nG5>+pvi6>mR1&B`T`4_cl??bCqPzLH+)%(bho*$Dz`NnBi< zW`A)gqa@3Ii)0!}m7dVhzVf#L_6K+}c2`h*{#e})+oL?rwJMSWRi!F5H6F`mcW>U6 z49crcvDK)#%8GB?=KE$9uj9jtFz~o4h6^ri@|t~~euOroHUBhsa}T9)Jfuhwzhxi$ zoqV{B7ruQ=K$?7dVOD|$aaEDfuSn9eIT~%!Drg%Z&$hq78kS&R*)7BQ=XOHIYl(sy z?S?BB7@D!P%6N!F7BJid&lvxG2;cwca5L5nm)`%8&gMGyr4}8d9z~nOH-_py_V^Qr z;h&fcDVI>-CFsJt_@H>S(l^oa7Rwhk2Ks)RcoAxvrP#Vr_;1#_Gpq|+^Nu*Afy_43 z67UFxF*4;)qU8J&V{wom#rnPe1<3*-U2HP&0mpA7dsastpS3i}P*}`PF z(rB(3;)4gQKPbBR$xb4{fXt$;)RFo*>X^|hN((`4Ec0WD4&l0;e?bAsoTwMVW8g07 zJs>+~$N^hec13^$M38?A*EO$XzPvn(J@}MWOe%Iu_wygq(*9G3wre_ZiL3Jo;?JEt zlH_~`b6&-;Of8?ndyi*cNY^r1YCRW|=48uj1c)C@`3SM&Ydo$zu_)5Qx69#L@p>0x zs?uZ42E75n|G+y7->RVHEWqEZXBaOWZ`Qft{KgwxX58-Al2~g0KP>TxfA~pcjO~0m_@Z<8-#9k%43#4iX7bBP^AB5LRECy}^O%If8rl!q+F5Y-seITBa3EKV+zwgi++b~WKO#uI`m)T{&JQtQ z?NRE0S6Ui3bmmtvG#B0R!cUrM+wSqRvCU!9Shg2zNsx^SN@D#baR z^a%?Y^U~IQpV`pj*E-?&q6o<`R`7d&Mmp;3ZtW)(t>vZ{o?M;_kgQmPbGpA1C^VMD zBtpMzw?M%$GNk`OwleCK^r{$30;3BpipwsFKh7(rEQfp}F zR1G~0lyFGR4?d=+hDVWJU{1f59U}%Rr6sN#j|OpDc{=!)!IA~+CW|@`krg@2rhDn9 z?ITe@8~*P@8Mq}bb#@4&AO^@X>Oy-vWQkqTG*?g!#3XroJs|M~BALWAiqByR`@iMK zr~qZ+1z&mXS-0c2F5v{=zA7-qV8567F=mvO77U5ODu1N*h)2?RzlM90!zdpe)}BWZ z{>~&Z4e(o|adbBN6tuaIP~kkyhkl%3E+js$9k}9r8y)SIHQ6v>QZRbrx(Wrn^62P} zmk9-UDEGP2rKwF|o+&_RFJAUV{9=t5?qeS z`vm&t+exySz(V72sD6Sndt-L~#y2kN4~aIgq|Ad1ja5O_lq-T?SdtMR-AYHNKvjTGgAlPL?>`vh~|6iO?(c%#-wm%!7fc{|R&EF|0?SB{!~~!LRBbn;vOxj7rSUj*H^2)53m*$-$1VSJlou&XBP+)?VaKM%j6=)gz-#h<^9Lw`B7KM7j zu2NuuE^@jC@1>Y!0{_E?X1|n=7{P&BRRwbqJZ`502rv79uK#^l50aqmUNhT4RJ|zk znihw&AVrweqzlF(fp9M21&0*`q!~*XArN5M|FA;ws_>G3=e+`FE}3$+{D`(IdX8ym zjfS?n1vn@*r)g0^`jmF5RUL5XpRUT=%S-$9nmD(U3;E@Z4ci@)%1yC5%NqTnV&&k3dyYYdA*IzwQVQkF?R5(Je(!}x zOegU{>5nsBSPDj9`%CjGvOG-;J(9q%It0|vxM9mNRTfYaId%ocNN1^m0zjC4K}hfWL0YR--y$@|8Ok$>t+YZ6}27Kyi81K=vb*D`XGDh(k@YY znJ-bjEu?osLAfQTk3E_= zO|=)REJ9q_e=9<3&q>P8Dw;Fx#&D65 zgK24)8r}>UdwBOBmdP0|#-2i4Y_8C=wFhOgv`lZmxs!^TP(X|(Ct}nbuUO~i#5fs@ z8Q4iuB$ux=rVjsEk&3uOuVL8!DlriAG0Tb{3Gcy+Rt+^?z9e_NVJYPQ?T2OFn<$9i zk8qYlO!TkK-&fOx(WB&ozjYhP-kmXf-M1bbZekwD6sb!RR;d<{B}>Q``6*6^)o+NS zLIe*Nm5Mx}aXWn_2He-zbG;OEc?E4qF-3lISzX)BViEo_wynWD@@jf-!^g6HzofU9 ze8l}-Ok^sz%>$uHkSb8yl1oVX_Jq?gH6d&!`f-=Hb5v%;ZV?@7mwrt%9vl}W;Qg;U zt~?&9?hoJcO4-Sl7iEZ&@Mg)3u{2pq*|LPnAWJVpvlt;%c44d~B3q1Y(8PoZMPy0d z5K3dso@Fu_OZXkX_wV<=d(Y>d&pG#d&+^>o`#fjh&M_NPPGm!nK?x?p^iy#qZq`e= zJEs7Jz18H8jAs5SLZ^%6m^gGC$850C5L8j&TWfVSyv&Gmw(nldr&Wo8r1(ZINzLl+ ze(h&>=fqfXEO$zHF$b2Sq$u0qQkuqX$SXza;#B;VGf#Z4@@)Iiz6W_H>Vn0T3isF8 zPGJeFz$&j1B~@QT!QIc;%R@b9pPF-z%iHnh6FX z=*jI}*%S3o;wtq55Bo03tgx0I&^?gxjoZ;<__sLB^=-sAhVEa;P+=Gs>3%*_ffl-U z^&JBz(7a`IbEZhZ$_X!&Zx`1)v0Cf-HuWdF4a->xzye@;%?E#5#$=@lii;~s%KR%* zM=d5%bNQKf>700aeU7 zLQ{c~2`*rcRoH&se}Ky>8qBxYwzfaDaoBWVvQQWAkqJllIIpvLA4HRJS6C8cs#Qr$ z)?qL_f3!=wEx?98fIoj1C%oqTLMg7M7?LyQbeaCP?@1_H9{dOIW7QCaVzanSY?G~P z0daSaCOWZDsRC1^+vur8wns!@fnCR~MOYZFaH*hlZ-0L-xG`FBYUo9SrJjt?T-+Sn z$aP?wLjq=K=L!H)HDJbRzOQfnWvE~=*VO=zaRlqEuLXDVwtiYZ&G-;>aCYv&Tt_lK zRZK_|cvX(<0i8kgoEMB&2-XGsE!{~dc}2R2(Sb`<78ay;<$)Y(y71npLOCknfs;k? z7IrQRMR7wfC`qy?1^_$7k1~0ZLDZkX??6j+s=_<_WVDoJt!7-Yf;H!@Ty*_sXC4BQ z`}k4Mm?ZU^SV3aaH0oXO03!M3-c4A})0&ThvL*m*%)50*9`vN77J!<>7mqzC>W1w? zz;=UazY>FOmi?jwn$I39C*nnbgGk5iq#6|KW7Z}hpGma#$*xgZjCn#;t1o+K z>fZ)bNMCZnvff&xXwpeA;8m2W_?Lvv&#YSz~3+=s3R4 zbB6x6*4vN0ehrhkE70?n#^{KVk#hDo4|L!dxfUpVyJ;hp&-B6?U{4G^0LE>x0 zvFX)Md9aZVVY(y0LKq(Np&Whf7~4mK3^av+6sTJJ#D}vH70Sp+5+nhVLqa+g(JIQ# zx3fl`Tv;WgAH92fszU`Z^Ly?g;M*FXoAUy)Qh)+tK2&Sq=iBqVFG#V4i!NvMMjodg zrQca@OFp0DztBwsaqCwy5kR-3pyR_(c~wB6;M!kowa!`v20QDkwTmQ@Yg7Lpe&=WRd@iORgFB` zO*ClpIZ%i-4Rb{|HR*$!W9;wGeCzDxLGQ}(fHjbi(XG0Xx91SDEwdZn9j4RMX&)id zq&~gXx#Uu+F*&15&V06Wq?Coid7l>D4LkFdQZ2&0mM^}PFQ$Z{6D;k&*dF=y@sqE# zh`;9b1_|2mn{=PFf5si3DrB!4pO>`4rx3mwSz%IZvx?0$Mgf8sy5&l1(kdEp<>svHU8C@Q1|3@H# z1M00iEA~jpJN5l!2V81(b#;De$1#CZFsO9_mNTc@`uKMH|Ts^_Zj`fO?^(u~s}Z*zle z@^#VM+QA{?a^Hu!nE}eRl;`1G0-YLWa4G&)*S{iBvJQ_68yodBwX{GX(a*2Aq2b10 zC3<1rH(#I@Z?KU+V~D z;n)b}F2|8qEV_$C55_`XxU{0eX@{Plj~d6?7ya#_8G7+dhRvCb>*?XYiA18L^kym+ zy3g{fVZL7`B7%qL(~4h!NpAjYjW>66OKEwzjj~wZ*jO&ef|CtldABr51CSMUb#M?= z&}E0m$6M4MFAuBna)zJl?&&Fe{@gIn@`6hGB?5uqkbtH>8(#@(s1JN;&bO5#Vun1( zbw7Up9hoxPS~f8-j6w5x3LHojAhvr1G=~~y(z-!5I{wHIDp6Z%esS?L`2JIV2+_p% zwoXnJk&&D?>isS*E-vB`*-EOagHv7UdmpX07Z%+j`)WA*b3oH*<*BO`CnR_~smck7 ziDd-^H@g!lG@9K5ohA`8?b@Wn_p6J@Vyr1oyK~v8UaqdF?d@$u;7_+ka*C_7v%w8C z#(JvEedAeY{Y2&@MAbqd3o`Om9t&My%6rYW*^>FC ze~R|QOMv;PP|k!T(x3HsJy?%XyZI{f8gIjdQa9UAOJp2Ma&^09tT(z87VM602czFO z3gxBy+p&>M%>z*p84cD|jh<|jK?Wo(mtd40yO*#A-$!s5FzYsNhBT5_;+Gp6$g9~+ zg^a;f+f=>H^=@%w!ROb>%ZD)96>$FCG0l6HkJdz{6ShuqD@(RMz(OYiDwjv{EP9%9 zsJaX-46=Ejvvok|=TEgw`NgWEnHCWoZ;D>vBj0+--P6|QckZ*Vn|H2B*54~`HkI22 zoidAjWW0lm=YX7f#vy13r42zeL4N4|X)vw-vnWNFq@;?T;eyC;M;es$eWkzyaqS5Zc0?>*5yW)I(o&K#Zj_)`iS}i8uCBt8jbO{ML zcXet>W{i{uZEWym@JG4TuXcRt_~t191?2rOl(QDN(wtnPR%R9k+8%%TX9AxCt~`qu zwA1|PlE#@|xzF?RT$r3aN1>`%obG?T@9w^NlX&$Rk@Hk=9w2nl$lS032rV(%810HnweNzx~!)d+l@n`@x@mXg&-W@SA?F-v%Hcq{x<{qDq#= z#vmXT>s1-%N|+N(=U!$92>2Y_HY}N0&Xe4-NN60~<`Cqx;RT6>;fZ2OLP8jjV#4CW z5SVHZn3&9l5SZc&7a#oU2lG{4f~)0iB_%KErLCIPN6&XnP(tMFEKD?@W}w70Jk0AO ztKsM8Thc>x;BeHHF=bvHlgVT8wng+Pc;f1V>n z$0)8Y1hp};Q!szi>~nMTx$jo`$kTiC)*S1BTKLSDN_r3MFJXe72sqP5b#lG z<{J-^<PEEx(+N_h%@!;inuHjH{QN#Z2r2liF<|qhK;82Qb`k0ubUsudMMr~uKz7nC3EQRQ4 z!nZ|-#AbYJxpo9ow~7d|X0tKI1qELV&<{f+BCd=;r6Y`(XKE+&->>BQhHGFb^|TcN zI;nU#CEdP0;gaRo+@IN!=qKW>Y?l01rUUI5Xfl|a0d`% z7=M`Iw$cd5n)4i*Z$jV%{_qaqgxzgky%7GydEg|(U}6C{DA0-iphf<-46;r_gr84U ziV@WOX_TPxgwQQOZS&x7{RNnQ?@i&Pfz0Wl(fd=>fN^wV{RZ{gY?du$bpZA9$9I4x z>_&G7C)$L6_5WZ<4-TTg(?|bSCJ#kTj9C=m5)aDK;~S6Y zB*ZWb4!H@!1p_CH`WY&$2UQ6yHee?YBRK$03AS8_{RgCL9wD2*gaaB|fNCCYy}!mZ z)Uq%%7d($JHWzZMF!CX!PaYA1f7B*gGeT%LCKG~0_vdD)!EQt*B+71i2Yk6e-c5!y zsE2NZX5{^D@Jp2OAjnG`KG3L5TrIGv9{5YNcChLm{7bCL0Jlx>M;L*CsZIDtsLmky zOYm2SFPogdq5OJ~AcUEMgOyOAdPUsBK@x~r!~(QPX!?<6h!w)&w*%Zse5FtdM0?|5 z$e|)c6H&kuLrqAyr9f4JlSwe85NG|_NNlB`XM+%kMTc=%LdJ>Vhv8~NZAiR^L7f71 z^Z4WOcR@Ww*5euLv9H6xNGLJDLPWNd$T1KHgdCN?k|8KW%cccBgD)eS3UezFs)2k8 zj?5FCHu?d<1C=7Or$nI!xgg}K1U?VLE?k{=&jK$az$_nnn#=;!6O3L&b{gIS(i5>t#CqE6kn9%r8O$e$BQJLv z>lXSM9vmk$feC7`2O8NlU?JV4moR97kUgP!B+V%8 zFrW~hNCHtLnNV0@xPwVWAoF9VMGi@CQ6*qhgCTqIjKUnmnu&E$31RGmiAX_u#DYml zQD(xq#7IdYe<3GC3`^{i!l8tOe-(q><|-gij`<;)CB#OOfodC$B!*pp?+D85-yGZ= zh$9?LGKkuQ>W2D&QWqW;4%x5T&$3Oi&Av^)&Hwe&73Qyi$B4(6$Ee_h&*WIrLMriU zcx3T_2TWM7>gKLKt{wuNy!qJ zhapT@SP;`A#wGU2kV&HwD27!`AZwwt4vUgVHJCe)dO*$Sn2Cy+K8-caISoE7DvdVHIt_N5e4lV1bf0-2cb~!x^9J+= zvM%6H@2O#e7C~xo@mKC&-dZG;TpD<^aq2x*U+-!Lwcso17hz9A?*^ZZgMOj@*0MqS zM#KT18AsonYB*N2@Ehus+6KQ7eI@8qHGv6U>$lx8;=@wxt=nq30&`d9 z180oW?6KauyDD*~Z3kb+ya;(1g5Dv!x_ZR$rW8On2xsj1zGHnE^N8gQ-H9*g-xb&u z+7*%8JGF&m5VtwO%B)`)TnUDh?IWMZR)SQSA4&6 zxx@&_)=3r6iO_P<3{W=EmLl-`H^{?-DH5dF&={q|$)yv?$`b3PCCK3u`7M!0GBJeU z$OhsQrE!b+R8Ws_$`a>=tSDXMJ*BXv`-&WAKh4q|FuiJ%h`cVd>%y5uAYcB1Ss=C0?pr4w{Z7@rg&Wn9ul;=!=u zFypZONbYdvt}R@mNS+e)Dh7{uAgQEs@u5;QwFZV{B=0~Rnp{KyxymecOET#wvkZH2 z!mP|u!L4%WXX9k)QJ+yT)AZ}m>x5e2r@lbLo;oG%X6-{dG`ed#E4o%XN;(d@NxB?5 zP&y;JUvy=3+H}xbGFmiRcv>0SleM~rE#oYMPMM~mrU9nurYWY`rb(umm;*8P#(=*SFj?w zgS&&eW4Hsoqjn8>&Uub`&P42yFf=yaF-12@GE+61Gh;QXGjcY%F@ygaWg=@bW;kZf zJFYRg*uB`hIIuYNEyXs=Hu4O01#Jbt(X~;&@zXa1-jM91>`8`^pWQ!u?1t?|&WF!O z2)hX70elEy-=~Jn1B`=RNS0 za089K+3Tp>;@%?GvfDi0g3wIYlGWValBPSOORdYT8`eD9eB7ebGU<%y6yvzI<=NfP z-!OWYZXJc+Bfq=9iM4CyKa;mXDuLly8S` zjt`!X(L>Wgroo5o3r4}z+|j8a#-8fo>IwVNi|&gNqJE;j+`-%-gI|49EF|6>0-|7xKhLV-dhLis}JLP0`pLe<^s0hwTRU{PQd zUhA@LzrAiN-};nvZzFuf@sH7+wZ z4R}%~C9v~Yfp+E7in$hYE{R-9HB)yqa4X}9-jU>+ARv)jz&NdWcyenXB1IC35*MEq z9~j>lpB`Tr-=q|-)IRN-Pnz$PFOiRw@0+ij-zIV+!Wbd|_l6+>ryA@LJP?c->>4}~ zk{#?5(ie;#QWqQ*LK~79+!hQSQVqX>?o0YA@fdYUgjxxc8J-uO99|xtA089lCY2`@ zJuHv_PC6uxC$=p9EG8j#9?pm6L0!vmEsH@hN;*nKLtRc+&S*t{OoK;^N1s8dL+q(r zuV1fMujnFn8++J(7+0BMly?dlA3{(f$=B39m6@@ITayE4rvafuUxx+yI#AZx7zE^$G}J0tEuf3 zG%nbT;P~Lm;F93<;H2Pcv1YMav7CP82%ZRz2!ROYh&Hr13Vm7cgx{(CvQAQwQlqj~ zvN}?&Qs*hriE>GN#M(3-BG;9>;v@Kptcg?POXQrmOhLVyrcx|wIH3q%LaE4t6S{}Z z2+Zc0bZ|SOQ3v$~!FJep;CHGqgd=5T)MS)p6h~!7eS-q)yt&I zJjygx42s@mc8a-+#fveE?~1C5*NW3;uV%Gp!-}noz7%;COBP+o2&Vdyy~#r*l8jxB zN{#*)#TofM<~|lYLO!-TLXgBw;;dn&WTtDTVWz!bIG#!;k*cR5rJ=7Oxu~+J&LPF2 z@U6?{mraaK#VO<;>)N?mMbjk{&b@f2N+&QU7N-oSeJ2K|cPC~iT&J+z@?D)>&pnB~ zlHHuqj+B*T$>F+D_~Ef(>ygZ{iv;BOxReb>13Ck;4ayBFK}tcg54rb_S4PNsu(rUO z!1lnOfenFSBH%QJzZ3tYtA_F={b2uuN_; zf+;hjr@*<29DN>b9924`@&*$M6JO{>Y;$!IbW(IGbh?%i8@L;=t#TK>&5s9eI@kV_yW&aYCJJl1zyj84#PZ1MkClhD zxJ9w`r`hrao+EUCKm*Lt`)6S@t5-#c2vGe zzf>b>OA1M9NNNsg41J+ep%N|CDpfeBoHWgr&ZgqIaPd9bc4=|NZOv%KYCUhwZ!K#L zXVuI>jN?kf#dVWeDsqe;b>~988T;O)#)Zp~swBU+CFhMRsUxPe@6oP1> zRH3F21n^$j$?@TEM)8mFm+{ZpquIxq`Rv?BYnCosx3acq`z|CtlVv0{CuAhFBxEZ8 zP%c+aP_9xAn~9(KR485;C$TbUVk~UzVGM38Y3#S-zQecUv}3m8y92x9v7^3|OUZ>! zh;E5)i;j;@lw^@amt;&SPuW3fOzBKnEyp1@Q<7DZSdvkqQKDaxP?BCkQ4&+KRg=K2AEpG7+1R&33|!&zi~VW!h*N%Y49Mo@Sl7lJ=4&nQ5L$ z#3^XHZZ2T=KKxqs#0cLGnHy3W(isvI(j^`!-YH%zo;g4fB^@Om6^F%}N|)N3I+|LN z`ZF~@^@T2q9-nSXI~Ks9pIUZW`Blx8MV0kcHdU{c+m%jLqg6Up3_5F#o>q4=Rzn#B zI=^)WmdBUZc*c3wp2J-WUE}UtPr?=}7Q1XNIlMWXIG{KbID|PyI6^oYII=nVI9NFt zZ97*AY+G!LZC!28Y~O6rZQyMxY^Hwe|H^Lkv3prSvK+D;s+nu7Yq+V4Y8q>*sOL52 zG~qODvUyk_w45@Us#$7ms@t$8vNtd^Fx@cn8}MCz4|(H+t_S}fR3G#+C`i;_G_g-O zOec&X3^hzUOcP0vz%kZ4_Bu8+mLZlT)-pEi6aFVJ0(-7W-k;p>js;w>yk%VJydFH_ z+@`!KJPllkJZp|RPS4xXF8dB)*|3m#)3yCQDN-!DGsn?yqDo*8G@xyzqUp$@2xMDgl zI5d2_bB=MNcc^kabNS?$?d-GDx+mdO*jCn@-saIF-nQ4O-kj~&w#~8AxP9OE>4IVN z=b_#4`8nZH&YAxH`o8ZUpXImWmr&@*pbgQUzW%;7vR<;W!tuh1o!X)LA=x;2#Vo3> z(t^^m(lGNP^G5R)^R@%bDcmW|9KjA4{XqRu{e69XeP(?ReRX~F4vF^tc8v~SFV%MO z_JxkBjt5UFPi)U6FE_7^Oa06DP4K5_ZybSEfmi{#Tps}nfl_ZM@AF5x7w?;a=j$h$ z$BakMM~SEXXN~7?kFk%FPd}gEuZ*wIZ{}|6Zs0FvuW0XikCqPHRxZ*vmO2hQ+P(H3 zGEQAL@EbVY?Dy8|yd>`hez!Hct>hjvZdIRMY);*792ji-E&EOTjedlDn1PV^6XxL= z;%?%{qbT7_qfFyFux?s*9|)-tE9QCUN#{u#x)|yMRIj^ecxV!965jmNe3ks?d@qqc(K=CD(Wy{< zcpr>s5(K0jgd{{(BxmF(q%q_QSY9Fy!h5z;XD*&BMqTPv(p4%xlJ?K-%45XjG8ub4PUks;3twXgZdzYw{lmTd_RYF2yUqVANC#oIwiTv6bgC~+v zhI^D{lo22ru5xNCCM%v}^?408>w2+zyLx9Acb9K2rY@SdcZWkWxyIAX-pr%Ssmxx+ ziN;7nS+Ok`tmrqCX1uBN8Z?WP$y4t#u zs}`%?tDbx;Ih^{`>!j<90^ILX-p;Rkk1tpDJ8o!PXeMaU=zQqgXmIFw=ul`SX!;a3 z6t$Exl*d%9loAyCREre%l!oInY)P?3CWpiN%Pq*td`W4tm(;g8I{?m z$pkrBNxIqI0#9bwD%TQl%;Id~yaUt}d=ym0JjJ|wW}_;jW;E35bm~+K6zrtzoDCce zRCnKwZ{~9?li4EJI@p%#59a>B`urRPdOW`b{6=^zd$`qoO)#< z&2rD}%}va`%_hys&VHE-nuWL0CeVNJtL$Hm0e!`;D+!rfqvNH0j|NoPn; zNPkMdOZQ1vO|NEcdYHQ(JLg=xs1vmONO+@v340J+?#c<7L>pF_s-1!x867PjJ)Ue$ zx#K(0ja+2iK+_>{N{M6W3#n{PM=BQWp17|L)0m%mGhM=E;uePuG zd(5l(Qzm3G*a%o3*i_(S;5I<<6_9ao0-w4GQF!uPyPt%10|LRRkaYQ&oDI*2z2hJ9 zMuk^`NTBPG7YR=J-48Y^f>uJZLf}GDLqJ20LNG%P;Owyk7*}b&(k4=~&|cHZ(DUfK zG#*OKsLaHP(uEZv;SybQCwm_r=Cg(kBGD5re=fn0#lpw*!lc6dj?si2hbc(E≪6 z=kfcnW@)RaPZGI_h>*A4-F*jU4`wHP2K9n$L^AdhxAX3Eg1Mw`a!sn#_q624)S}en zRF_n_WIo0}Jfq*#?teTfe-s``(#GZpdcGWv&YT%%4PnG-6B!eZ@o0EeUi`lO)ikuV z<2|$x%S#Z)rSH&w^;o>)JhUYHtGE|9HFm|0#RJ7v#f8O+GF{)5M{!IkO*>7?OifMo zOq)jaQ{Si$=rrk?sF4|_sHs%FmElgCQR=3mv zh^@M|ptjAa_SzNm$99!UmD7*NA3RD6YV2xvKaOSDf9Vt~|5z^NDQ>NAt$5bBleyc# zJI71Gd&#)W2+T-g7iEvL>YPWjrm%{!^0A(?8nd2SQdsQ#3GV9cdhDv;s_Yu#I^jy@ zs(8$CtbJm7JaGbk^02ft|FASQ-(}y;2E%#C;m$_RL14|b!ZdSPYxDx~0Goo2L`AAV zz~^|}e;GkV#et`dZ;D5c2aaEdcY+6vXO6$5xUCSG&Bk}+!*XZxG;*Hgrtp)4Bm*L| zK0`NiJ5wfuCxbFWpUpORTcFr;^h#~MtU#5%^k=r`x8o(tWy^uN#?n3o>?}JTpS#kce6SE9my$7EzCnwN%EVd=+Iv5Q>?wY_T*?z0}s9v$HnPLo#vDP_e)_?hid*@ zBWwGOp}F%x!({Q~hMuloxt@X^vtC%+q>H>uRTlWcMjUg`&#kkr*UiFv zLV7lM@p~qDjd^)_UflUM`*#sL_S}kehn&JwF(BxUTDV7JZp^HP5n_+1ui{@37x-zfUki8%%b2A73Bt z?TxL7wu|bBo{Nr%Vv1sio+JBoigc2C4Lr*{ru5QQX%1Fb>V{O$Rv#POwPZLpoF5lg zz-w}bS@wf0Co}ui z73EdQxco^bDkt*VX|u=6{xbb3{m7U7CK0<&%?qB^uZt%uKUNBwCfj_T@9$O^ zskWrIr2C{lMzTgqMrKA1+`2zKe+uIDb*nvRTgPpw^EN;DxvBTo*6H@J!SCqycs{$< zr3$9%soJUfQmP<_s7NpWmW`FQn0uc&nPHbjn696*-Xhs$?qGk>aq2~Nq1X2C6)FQY zgE<2>Z6uqU-Py-%Phh+A&?o6-`~~vi%emsJA8S;4N*a4EVm5siKdYD5{ThLNEH#cnCBS@-|_zz@ZP*!{&?VeONiN9#IKjr6dg0-*X+gAhY1JF_U zQ(QTgd^hf?@0IU=F6;IvETr8>Od?ub9LjtwosRlgEVX_A7;*cyc(?d@_&zN;=N#}% z`Q)1O+k5)HJ)K}5Vka_I>Wg)9U2nfey)brm6m_Wi9bGwXj6HwvW6Z7YrY)c~qs^xc z)8Nr8(G=(^=v498d#1TlKdYdvPSqE!7T3@0MDozPeLg)YTgu%~+W67#=#z9`az3%Z zIaYC5epvp;>bKRE)zb0!@lw;N{lbQ4yS$&*_SxY|3j2JT{-NkWm={r}ugS-c_n8~x z{h=hHYCrFeSiSH1O<&@^FdB3AHG5gEA8J?Xx8FC^XwS9=ws?E$J`0GaENM+V9@qnK&a75tQ={e&>2E zd>LAeBNFuRJAOlW^w=~XnmWog@SX9se7}B5xvAJ&Ix`?@)M`{~Ty0`$RIpq7?fH`V z_+_hU>|%c7u1nId>pAF+>}F_7aW*$YQ1hexw}dTgqixep6KxZtJ>y%*hs&52Az^59rM&lOr2Gv&9nj07MgAFf=wt&Y{ZhmBcStt+=Rgn34Fk(V5kbxIJQ2n%Q z3pS!-8Q>Eluntn%&LAN0Xn(&!XV-jRfnVZzs%g0xxw=@`JAr_}EIE(M(rGxz%8iVJ zf}mA^2XD!5&x3#vgGh@BtGO?qX2a^LEv>y@Fr5y^ilJ2b!;?xt>ULYKA?{b985cn9 zEEh?Kw3Ey{OBcw?IQT*OhF+EK@X)}bCC_HlCU_|AK}y|!SP?5 zN$ExesF=4;i^%*BtIoa$t7m)%Ee~ctNr^3r#9&B*`98}v!~gwPN>2T*VHG|YMdZ-^ z@6QFX2l_OcnwoC+XVpJ`I5<1+cnHCeWQ)2BZFKtZ-reW&c_dPS{(Yc(MGT4vOi0*& zZY;6osN~WN&CQ$KF4n!`_O`Z|?N+&8ExH632v;et}=9t z4-X3g-^0hp$0VYmd3}6Nz$3Lk#bv?4!XC{ON{WgGrKhLM6hDs{ygO-HVGs~xrKaBR zeK+WR_`STG!{bWB#56rJQdU`MGyg+bk?#s(zIT2eBjMw|scT_ri8f{Qm+{d3=^xcc z95ovo8}H+?($;Thk%<}_8sV@+q3_|3daW+PTLlHAuk`fvF|o1lH@P3W%E|%im!dF_ zYi=9)tqF)%-RL62eJ*?~I1Np9Yk2thUcdrKCs7)matOp%RGbV%VQ+cm@o0&yoh(%w zH&<6y_7@Zu8hd-c4#wcu)z(5G6C&8cz`+I5RO&PwpEYfCdfr)myuXP%fBW{0$oFw8 z6cJx(ZDK-xj#U`-<^Bx*EZ~CM>z*zUKK=IQhS%i~<@%zWLp_nc1>Z0zVWU2Sn3F9BN(NjT(R)X5l($LYJ0+()RxOtH47c19tAi)6~@ZBx=sdy66`?Hm>{ExEIoIuy+3Zb&HGI%UH z|I2Pg!*C)&Up}@SqK`+(rJs7g7N@3=mx(h_wv$s*FwxLdl$AkU1A=;%6BE(?(2N-E zt$f|ajWdg)Qq1wb8o)j|IZ253yWjtbM;eTB9UpQ~3M}oJz2C|4G2Bb7^ML}eOgrEy zDJf^b0YClyYZ~OUq@A6ejSZcNDBtzS_&5SQd|&zOtcqA@r1!_$0~2j^$30#aobK?( z$GaO>THEn+3aut^Y`R{rWs5?7<7~u-gh0me167BC@&5h}?4VNk(Oju0lfcUYyGBy??kM#{f-0mX1CUA)Ewp~g4C`Sgop49ERrToip7r~Kcw2zLql`i zw_$(G$KZ2L^UMwhvkQAb1VpN-t6Opt^78V!S5nBP=H+gKPU*3euTZ{AeFk3JV#wIi zlA2M}{(KJ=hOK*SOx91fq$EDTAUsD0Ja}+8{EIV~xw(0QgG!+tZ`4t zLqtRsf$^}2mDERB9CF{9apkMZKQdm1PfM$Rcn5``cxy5!E%fXzqF zYpZ-8lnvO*qEB0K!Dzta-D3~z%6|OV44eNeR9sukoSc3T-#|a=2(M8RHu6Yznb8QZ zj#(M;MF>ZW4hR{ALs)(Sg+ZEgrENtfB&{mqEKHJ zmt*h10HnM%@Fct3@-Ro6E_vKe+S<4hHNEW&(|N*x$|#qq-QC>@zCRiRuMy8} zG93S<$}_ou%=)Bb$i%YyXmgAL2{#AEShbuLYK;ST2mbykEr57pw3& zPFWx&oPDe`%-y1a1?r}rO!{@Lm z6pntK3seXUz9)qYn#k}D1D}qe$x!%4o11;~GN;{Yd^QS{4+#Rl*F8q=dQWro48f#O zQ)F>*u?A8!WT=;)-$!UD6hx0fbE@Cli9tq!D{s&ho@>bQun-QhG0rrJl!Hh-RxlbR zxT6w|5ec$XcQr90s1Owi)Mq7q{aArQo=U=Gkf_%D_%b)-;lncradAk}z9iwk&9fRT z`maf-ShgfAQ(hv&qD7)z3s6Z{K<6>SIa{n+Ih-kE-doa*LjAL{@&X7`?`l^~&9qvK z9cG7cJZw}@qyy9?dhZ0zA#Ay5_pL8HK_J9YqWuI(tosp2AAfJ;RLfV?Ub0>qDCv-CjU4(}?NbNo#-F0Tmr`fNfYg^aIh>U&?SiguWXm(os9Q zD?Z@wF(r-DZm+7Mmr$=gFlz4mO>#BgkOD1$JsXW%rkP@ zz3BQ_Ut4=SUZ~Kb5d6jiG}`iL0&X;89#WyO=>>p9>g(C+VLplM-rl+-&$oUu@BU7V zm%Ot*5EY+PUQV;7#KFPQ>LnDL_s0kvF&`@fhsh9xv~We|LLNnA7+k_=x2fU_{><{j zQW-OIm}!qz^e_A!CuL<+D7UNijG|6KN`2U%)^3kHkk`nL4*tPyGxlxJh`;-(gfkK0 zNMOv|9z~$tUKX8C)Cwj&cS^DiGZ(b@3obuCey0$5djt_~82NHW4Cr&!23-tz;$cxy zQKfR}U=*T8GXxk4Y(@Ao_6I;O4fVIsan|I!>PIJ4y;%cQM)MJx3*RY%illfJC1luk z+4CjF$O)t7`()Ae8RIwG=YF3^#^BPLn#&#=q}bYuiVFA8Zb4Hr2G7(atGStLLb2Gg)@&l9!FuVZ3ai_> z?yk*hv+^rPO%~8EoVt!acPV72q}**sF-E!o2<+_a{BnEbyf=yoeAx5stOIBlys|%@ zogG5QY}VUxnfC^M|JHf}F9a@tka25dWW>?MrM{_&k&!WM7ZAr70`6;|LV<&458wa| z4Vr?*a=oQYF#yC?2b0;P=vW7vJwY{+fByXW%osIYsa-cdKK_3Fy^D>VyFMc-!lKvd@ND?;MMoziD~m3S zg_F~u#!wje`uep%gEk7A5sT;ck9jVja+XyMwY6~Y@IkbJkK9~bblSD#G&FE}fk;)l zEh-qBxS}v5ap9LBFbEP)85tR}-xQ*dbop6WSh%>b`Bev?h@6KY-SW5sN<=US@MG|$iMctTMtIwf zJA;VQZz#ikeSN?pALZrcRcKa=6xzkqM~F0sKUCrxnVvWYnmx9FaHJ* zqhSwN^g95MM1HTo0+ghsrIjkf!^7#nG}f0WwyJ4r0#Bo;Y5eu8ik4Qh?Mfp+dJ}2P zqWMq^<$oz%nV6gVX*`{^6*fmD809NYNLQC%Chhm+WG-G_XDYN_+HjPeEk-96kT1}A1F!O`UeL?c>|Q(-MRmQoWrFCK2U3L$p5+d zpIG(x_^T#wXXe-zHa0FUFSo9-P4}COCQ~zZ+|B~cT{4dF?C>xxJ9`aqb^HD#BqZG2 z+&V<#ln*cNxWCWvIqmp7Z!J_+leUHd=Y`KftEhN-kARaZ{1dC32 zd%Pg)UC7A5a6ZP^HD69EiG+Y4mICY!a7YSLnX6WJq*ppMQAo0ZZH1iWcHaN}=TDVE zS7&YQ?CPrbcp5Vn7FJA5Oa}Io-Vut#55T0ZM!6(#5HNPMbu<`rdflG_47jkcU}=Vg z&q2q@i9Pzue2Q=ay}R3JArc%E1|I%P+qbj3lckyZ($dlZ#E6)f4%ZVbG)4Zq`N?cv zM!i-O6WYP7`r!CjT0Vdf)9it>LV(SL#f(BA;H_2JOLqsWWSn43X`+3&CkxF z1@*qaz0A$cDVwdgd(_wg@=#PnPC}C3+Uj{cU#1lu9^PHVE+7CjCA}sr%ZHufh7}`S zwzzD&to07B%nYmC;7bB7iA$6qXk-D8>!N*{sz-n%J$Hl#jrG1QCs*NBE8 z4W!q&UFdV+?}|xCFnnn|6@1?j^t?ZP>V;>-z`$5-_n1r5K=^z43dDT;p11yHYHDhS zdjA#(yIw=eHjkVA>uV=y=rF)Bh>44D1rE(OmzTpqQYvI|Se#dB-OtBqSx>?8FI< zc2R`ybXqNxpRcurg@wUyp>ASa-`@l8)JCe*ggM(5hX0wy73qviGP$S}sT^L9V%Ye-eV z5i?--=;%YgF#A%Wi0~9#8b(HvKwam#y?<^)*FE@`*koAq^YLvuEmnOAOBevmW{IR* z@_d!}`Nq|(*D2edTGJ@0O08dDL*mWqd1^Fai1?xDJ>jTi{iJ9>@mE3G)5qpXO+0M) z#We^B2$U*hi3_x6FhD5_7A; zCC^U^Nq{0xq_dVv%LC2H-N~3wqVelEa1drhwk|IG80Yi>D|#*_;tq-WmG5;A>_6i6 zUxoWe<^ZPxn92X%{QrvD2Y?CeK&?IyQZX>Z4&4c7@laC!r3;`aTwHEn08Tuuo65*& z^fo0UCogJ1uC!fkp6Tt~j!meltmLp=W^>+`>u^Qv?;}63{yTt}$w9X#oWV6&b0Y#y7Ah5Rn?{uP1Yx zTv~Di@Bzp4Z`(Lu)mBpr9tY|XxGzcFKZ+sj?d}4Y$MobY)RXqYv`YwNcF@6H1qC9sg3QdQVTL#R6M#^VmI8Im z&Hd+b80hz0d1*pgU%u=#Tf@fLc>}hibnOs$dSPKuHRyjc#8>k2va)~{TmkQ9k^CCK zh=S*qmYM_d0NlfPC{{jKAbaS}w@}GgZUlJZwQ(ST>uV%6vbX=ZU$O7&B>wd2lb)s~ z^sk;R2_%}y4EC{+5!l%U9S3kn_h_I=)|*;RoNA^23}m{wU?xs3H~vpWC~o(rIYo!B!uYcywUAnpdsf)OH2DVWiT-_Gy6IWkBl64pZxv1@obFTxU=Bf z?c9>SR~3L2)|r`^;=w@dL`6lF%k3<|*#qu(K~7FC2Sf=+v&CFOz`g;>gckR#s-i+l zMwS2wzw>_!*g@d-Y=x7IOmuQS+Ws*$WAR8$Rkg2BLP}ETe-^AaX!QTX6Z~H}<^Q{s z|Ec2muxJ(U%M#o9#l^_#&3bt2#Y$Y>Z>QA@0Cr+vVKr*7{F@q{Zq2c3KJXCxy@&VvC79b#iL4h3r zw0bBS*VxEt=J#(HBQB@i@K<$m?c#bT2moc)o9uHJ&0T@mHx)>1P*7?B2!WU{FW~H4 z=Q?xZK^0l{TV4iz2^NMX3y+F|hhiD`W`B#!8q|HMYFd8=VoolTVU((4Kx=WJ<>lqE z8FU}d+b%la?pJ{1F{2%8?#6B1gNla6>TUxZ^@7rO_9N=aMsH?{F@`!QfUSGY}jEgWg90;oBA|zu_cyct3 z0Ed!X_~Xa05H146G&zEw-(w#UgI;S9Z~EC%HDD+9MpI}h?yB&)i9~nH)hk-vE&!GF zy+75?0ks80h>(JUqP2~mnc1;77&epB-p;}Tg&Qy-fV&!${O0D?)X*TWsMyrlh=hc+ z_)1*1{`k+D5E^W0x4Sz65cxRms;-{0pR5rzKd&bJ20AqMozcs~<48uz$cQAg=1-lu zxtW=MvxD%YpyM{gI~E3pP%7ZNqKUe~DdE@$ljUS(=e+U;`oz7wytGuVx+!^h@NQtk z5s?_2lpGf;wE@V8X8@;?vvzb*q#|SoG|}@n;M0H*cXwca-^|0s`bs8UA2 zVWS274aNT^0w@KX4#(RZh2VH3kuX9J6122d9v&W2K$J>b;BLN3zZLb&&9PEZNj5Q* zbfZ+=986uTwPEjkud1@Ou6lfYunKJ$fpB8_xhXOUY&PVDbu*l8O|o^cVq+ zZ0ld11qdmC;#~Nr{`FjYxc$jk0JZE1g0}X>R^aS>yw$aK)ZxtrY-LVv4-WIUQ!PN} zqOm_7hyHpD>gxw!%%-j$(%&C}P9>jFZ@(dsV4$W3@i#KjZFMO_AzVuY;y_?iblSez zT3Q~ZF&p>K9;9y5TD?Czr)!1oq7a z7zPga*a7E`Lx9B$jmsnr8sG?EBKa32UOBQ901rDgWr*hmG*V!ZpdcV#m;Bzo^Qj=< zu~|G`Zo!500)D}{rp^6|^gT5trKdS9J>7_DmLAqTDKj%_OHnSszHm6ddk-w}ZEXLnoWmnI}`z#7%CI2pl&R%+F- zK*=1(W;)0MbioI{EaB@bKn}WH1;EGj30afPA5t`0jmmCi6iUp`DWgVxp<6YvILsn+ zqzTE%^E#J{+c;dnrXXh{J@ByK0;B+*!D$X8ZA`5Z0pO~d;I4P6!D~kg72h9g`OW4? z0T~$rjL$YXR!4igSnq!l3=4ojz*w1qz{~>quh9TzW6^JMF1EH+DJkkJy+{OHC{*3N z7wvVr6D9JQKpL+&nmqWm$|F4ETCWeNFs+KB5NWT5cQ4B{! zM6|4=#p6zd@pyQ604)!&@e{+t@vQ7SQD3awT!G>w%OI#k8 zL*TgYY;U`7N<%ZpW^coh?#{UJolO==Cjpu84J2%f$|xo+;>up$G} zj{h-O0)F>@h|NFA=KoZjHq%S|HShm?7T{m*B7++cl=_+)AyNhg1@GF7zt3|Z@DDV+ zq%5x(22(0G0r^bObiQB|3NCJo%^#L4k6+ZPCATL_Q-g#1zdjYfHQ;A1N+wv)t6ONtkf6< z)z#e)yfx{9hQ#odD}5PF>M?YROSovsAv+mKL*cx>K}4k02&Km#_BCjMSlRV7{p zz_ItsSyQ1z%(j~pa3Q_UOT&u(eE&Ai|AFqpFHqiZcT0?2ufWXg4=|hq-os#@B545f2WDt zu~ej_1!ZN@MTLch4T}ugwGd!8f1BBjLIyD_>t8no*w$rmOGhUstEz~dp*TU{a}_{h zo8(-gn49yzeJipEjHTP(8LjQlRopHdK_Ebyot*`OmaE@MVG8E$fiNEP1XDf(i;MRF z5muI$FSofdBJ+{_Jxd}(qQl0ZA6~xy)2kNU%-GpCM0~g>0dXEaKK?m`$kx{0-#K?r z&lg+ZsQowg{yz@gf6tqT+>ubOI1KOw3k$>*sx&D;+y;b;j@A^)r@#aasNalq0FW)M ztgLKo#GWgvJ9&T{vct_1XzxH4TTxxzWz3229hiB}ZUqm`%rMi@*#Oh+rD_9CX6CEg zTQ(&eviAfs$@*Yuo3++Efam_EMNt2%l(UYis_ojnfP_ewh_uqu9STy?Ak9W;q(Nx} zM3k0BLb_F?RJywaX+%H0vKZU06uk!tUHXCM%sRwM$Gk)Hg*J}`UeCc5D0pwvn~W@*`FaKC`j#% zCKP}w#&3qFhdSUzzEF}L3^TH}7J;$@Vtft^Dq=@}le)UP!CYm~j(K@0qHbiQq@5`2 z*RN|AAa+Oe9fDgcDk?TN9~9Vw$A#UVC5V?tDfFNL!cnMdjLXn+#eN`LRG`v-FrOsF zV4gC1Cg3o2*T7)?>(^ql&S;PUEiElzKGJ^3$hhV38ise6DI^|8j$Fx5O}p8b0lF^) zHTv*dHWaZ4C`7JnhzAcg!LrmYGO(*?epGEti${VLjV26Tjs;m6W<2&A1i@gU3P^`M z0VeEJF%L&@q`R?``@qMNxvWbMLs#f>0XhiUcfrB(@f`OE6qbC;%ggQkphYD_{4HTo z4!~V!Wo9Ob($LVb_>^pAXNS96HD16)un2k7{zYPp9wfw}%#S=@KO|Vxe;n^k7wNu( zy1oeRlx}EP82|f2m|hF-RuYku3pq^HfLLyxXC}d}CH(4X6neO~K|?`-?Jvd6oEAsW zwG8@}q@uQ733rVf?O}^PK0W422X6$y_UOPJSqXAN<}h-gQtz&=twHJpC-STB(b19g zWpKx%UKbRQmUrwpf}I8z3I;3M0zuTLS#&SHKrE;Jrlhn~h8rIrAMSHLE;e=*{I?P# z(V9|*hKGeg>#_$9jN>*xgv0B6y`9&7q5^ak-y3-)rS6X(rS1btc2|2kS0Gia_ba!@gZz`jZ0NtbJ1(qeC>fq3W>@fiYx#VWsf7jRG_9;%Pz$sR z>6;|XXIAygUwa#Yu`hwEx_$?Y1%(`t%|%5;4(#_q_Jb4z%jErs51`CM9HzwJRt~YV zu*l2Fy)G;yGY5@u?t?#Lq%h>|hK7bhuhz!P4S_gtz}zX@E%G_Oh(ecULX1-~4<-AK;eF0svsiEG9OwFN=JZ2VJ*+{D2JT=Kj}bp`oG4$;nXP2%nV# z$zV(1rR3pJ`RrM9G=m~cG(=~PbT=gQ1++*5xO@yi8fb5P*Ax^LV`5@j#~{~-drZgu zaJB`wE%;ojbDxKXfW|^j$iPkBrh!lgX`le7v;)t}j_X%AJypjWKt#<-JjN-9hIziz z_a^(jDv~Bs8yiuZk?Z?nJ5G=|Jh;zn>V?b)XRJhvIk_$uqt<%v1z;$pios(|LF^-x z;eYW`2v4yzOqESROXhGV5anZjM_7NFX2s=;7oUfOU<;oH(mZYeD(-I32>e83o!7xO zl=3LZ%<4=rX>|xfVZ=^9eNR?4G6H_r)}=x$`!-;Yt^~Peh;%4{B%1NR6lx3ME3?>| zR8%6kmyf})0)IJS!IWD-psWYQJnQSs49em=`$-XU7IATLa>B|`wdZi+^#szNHhD=1 zuaBZ+xcd6~f@}B8q%oJ=Qn4l={fi$*8=nGH`S|c{5q`#9HHy$mx$24vP`{ThT>`44 z0nK=o%)AEFWF~||U0o}Mv*XaFJ%TLQ!SRwEu`WQXN*43j22T-!A-7?&Rh1%x+T@g! zC>ZeT#*w!>d@+?AD2!xbr^KOa5Ma*F)A9bcn)jc9W`{_Oj`wVw#9&}#%q=YZl<*Z! z5qxWAW~L7DFduB<$`L}YF9^k}9z@G3Y*32|}Y z{h=H*+aMsueh9$=1V3VEAQZSs|D_O6b7YZ$H+Jjs0H z!_@-d#r-{0!tIy@Qv#yFK3}BWGg+lY@?_o-z8*dV0Skp5hi9=TDbNH*`jZS{xO(k0J}nJ)=H}>EO8zaof=5Lq zfeYm&7@3Z{V7nSs00P^G4Th|!&&g3jLINNH=$M$Ch!V@)mEpF07}b?kRAfzn$RY@a zrcYmRP-`bQ-mVrfGaprHZ|Ox$5YAhHwj?TFf0fV}E}?7v=-Wi zPzso5>f0F&_3AZ(0|21>SFo<==<9!9T(p=Y`~6XFsbw!fo?-aT#6KbeKmQz=si|og zw-R|HcdpG#>W*{Qloe+}_1LN-0pLrB1_iXcF)hx=N8-qAzs?e?a7O~<@>$-6nT6rv zyWBsnT4IU(w>z>FZhNFqyhq6)ENq3Zsi`{fmJLv-vA$I>qoL6e72_qk)n40;rvxc= zcPDTC`_{`O~=qAWr0 zA?|fSMMa(4pR95ro)mzD;kdtA25N8n4}$+(KX4}gMKhR5Nl7UwDc!r5@;Ws#lFPj7 z1#~}2>SBmtHHsO*ECWYbx7Y}U2b_&kgZ{$2Jf33hbMxR-1XRSj9qm8t$mnpex0luL zi-r$%%TbML_7XKU-Wm(=AmN!f14~?a&vh;!yH98+POUy@;|P4@SSq0N*tVXW=F+9T z!|z2ulx-X%#L9FCJi5V)qf(2luM=_7nm8jT_!@f7B9#g0Ny0(2ZjTVTJ(f z8ycyUVBZgjHY=*Dvtma5z*wgr0?Om@kECg%HQ>Gocwxv35jq<#&eubEfTr&8o(S0? zaoSAu_4RS4fx84$Ch?IPgl&x!2cRQ4+Kxj)LPmHR!5PH2c@K7FRaF(JEBf%b6>>{C zPVoKo^z>-FHgCZ~tBT5}2=No$d-wiZ1$r$6PnK}Og-3XuzEKDm6#Ot#Uh>yp7fr|?Ez&AAq;P$aXiH0~5da}ExXY&R2 zyA=f5_Zy$07fZW^UIi;l=E&dwX$g2?jlzx8w_GBF!-IL?)DUzaYs>lbn;NhD!1n)n z`;8_*6FAL+=cRsIPuGfu6-we*_Th;(e0*RYx(W@v!gVci3-@xm9Pa86++N7HpxXNb zLp3U3dmRzap#1ok#6@RnGVHon@VsPXWI#Pc)WPDxlKVQiRhTA}K8q=Cac{qrSZS!K zMM`E6*!*1qggZboN3uNmGxTfh$((D(T}Ic&P`KI!BjK4G$&vwT8l=q~*|NKv`r9eo zJtZhcZjW|X=2us*Pb73mvsrF|4)NSu_ZIVx)c`rY07CZ1RVAV4CizrrE71uH~PORE&- z&QAl2pmlM*n8omu;IYoXTe+?3bEpA1 zI<$UeW!z&y*|T!gr(~JAdn3sr)r0Z+b){@?Z|`s4zQI`0GK+L6tcRGddIl9E4D*2e zx%=A76*z9ogkoUZ7n^2fWn}GK-){Why4Bf_uvv=9YFesJw2q)!5bbQ zAKxs$J%3Bv;Y36AMk%;KFk@-!=uB(HQ=|NQu#fF2fz0)-=J1klc$OhK;RUFk4?>|k z?d17S?LYcP?t?TXuTeducn!&@#xuS)9vo|j+TFNS;il&1sG!hLzuwnHMe#P!y^+u$ z`3bCTl%g)o%?XK!jE#@S$Hz-KLINNgPU!(l7(mMi3J8D~a)HTj#j+fb90Wa2KXN29 zj(~~mjsdllMNgvn{rdrB{{Vhqie##~*Hl3X0Ts%-vrwRl>U#}(^BJgz;g|g8-PW^ql3k?z)9ao;QLmy)piBA2xKwb7-ev9Z~*lUQ(V_l(xAJLL$7MsETMqFV3ADm=~G4L5CXJTf$)I!bX?0w6ua{eq#+>K|6ovl1NfX#pV?;r@N#GD%F;Y zH823sOkm6D`5m+mI*@u~A0Hhd-bTm73})TNA0^2K<3fHC6^Ipb=0Kdw^nOgDqMq>d zVsR)D<$1ch2UR+qQ4w@^9$h&+LoGvte^O0L_dn~5RTLXmS*PLNqur5%y09P#ITffud`N!yJxX!aMzR~~q z#u_`Ac`wG2Rd^0j?bAt z56y=y zH8KzerweYV%W7gzbdEOU-5sahUS!{=W|AC}E!tx^aTA;4PQEZ1v&r{aXIlsfc|K|S z@sVZ(3}4Ch^34aB$OW7yO<9@0wPri~p6shms;5-=II7i0Hnwq(siysWu9ov16gm(s zZ+iEvks2OdAsW}bH@z~pzo8YTEZcgEsomBtv82Fi^^~tRW$5m-rDoP6eVXFBH{Rt@ zry@=DvN;7&4CC?tqZ#1(2m$LAHk+jGC>et!wp;&8FG$P5|JPn1f2pTxt(r+nQnKXm z3kUgB;jf{ktIO_VivxG$)VtoYdy2i)O{}lh%g*_=xh?r2+qx&`Sk?N+kp%HY)4GNH z=rS>z*>9|q*3k>Awm0{C7p%|_1!bnW!IjHvvb$yW-2wvqiV7$3bt!>)9LJlV%U0Vl zr*}X)7Tv8x?bLg3cA;$^AgB^;WHZ@lH{ONyntubzp9PR$`@=#U41OD8&HUW`#|mWb zuzLt;lLx>x)oan(lcEC7tkJU|X$xtG-&^GmOhVvKn?Lgs|TF z@+@X6-|bLMua_BlbqwB{C7putn(dCQzwS!5tlP?&w=mEH>9@ey ze_L3nqN@5CJ0T|K+tQMygM*=g0bZxPf&zuG;~daA)u3JgyVumbmQ0YBH3>TxOTHb_ zPRIo81|(pz9(q?G+5eBdZu>8gzU4uU&*|fyaOA#hqDv`-bpbCX9wj6u5)E)-p@d|T zmpH+$CD5cXSCK{s=~a4qO;*yH6iC_aY1YZH(ebE6fDM|PorNy!2q|UHvGAGcSpr$< z)0I(tu*GJgTqK82nz9asZOvnK|n`Ahb`yTBl-HDRaDwGfgM&or3}LZo`G6E zFAF60PNv_G4d2T3?Q#APYjF9op(}r7#Onxce<@m`d(`goRZRup)|kg-%xL=eLSQteK|czf>LV`(S3z=+e|aX5`dl(*n6rRr7%R$ zxC3yI4hiJyG~PReHDkyXJ|I%4L|r@9GEGO(EXy-88YPgo#TxTr)B2;mNjKwQ!5p@6{9#cjnt|%*6}H{*M09e2Pa*vXh2_rTIEe=7 zah1Yu7<;Sh5p&PQ8Y*e0Kudqp;1B-M)5Ot%-inW^(n@BML`#I)k{lj51DlsBF3 zJ5-X}P*b+)%YQr*NMlgxh<<;zy#AOS<9$)xVOK}UCK-8ShKL-EsH-hVQUp3J7Xfu> z8Mzc)KKY=r8?Bh+6pSzE?1VALXA2f44Wx03Ck-`U!vv=wv_&rQdAD=i(t#No$t*ml zIj=RCXh|n%md*<*2kit_cLn(QVP_G=3Tp*s^6|bv(Ev!?lKYreb(g-wkjLLM4>@_p z#Aa6F)1fllSKqAC^z9r+8MU52Jg^K%73fh$5GBOwF*{h$O^zNpO$OwScedR(7O~e+ z;f%g{a_8`)ZReD_h^yY%Lj_Z|p;jEeyvk6@1nQ3Q44mPKbMhX)v@XV0Eo)P$exyEi z3ycjJe~E!8Q5#rsmTk{EG5lCi7IU8;9RvTXcjdDXJzm+`h6V{NRakSxs2JC&Zv2eP z^~9Q?#BZ_nk|8*8JzZVVL#p2CBINYk9u8k{{3X9D#a=4ST?yhfYIp)sZdOb{IV!g? z1t^}d3F}Hav=qWHleCTk(^%izhZu(s+SL?t)Y<4ZH#eVr)GN2Y=i*YDmPVu0d&Az{ zVlFjR?|G${=t)f4R*;XXYs`s@#^L(*n#?G=@?PKe#x$Nh8_jBUUvx)3OU}JmjH=_B zpZ5#qY_ERFqv`f~*{^P>=_|-uBsjFxoANB>)x&e=NS2ZB1vqkJBS;t$d-KXvk;2i$ zl;d83rMyo}mYwsQpP}G#n#@2?RoqU$Vma?d&C8?Dl_zhC@R9~%v>C+7IXvnML3jO; zU0d(K{O<23w8C*J&;no%$=fEQ45;2b$hJpOTI1fJ<}nK*NQGws>gd%9F8SBtTSNb{ z;UcgDm$BjUbyyt=-zcEV1z>5yA_3#7;Pn2d5{xc=J-taqng(!@x5~jEeGn6LbrI?2 z&r7&O;$dM~PRr6n=-aIwpZH@j2dpZSOtCY2>Z<)${#bwa_x@VcQDJ0Yxb=>np1#cXi%B|pzygqV%ht3xXWEbV zKGTGj#SZiaqRW?qRoYrwN(%6saF7WR&j*{&96i6RHoZohVdfugfQ^a{4^e?Jy|etm zaUlN+mD5{U+dGs#%B?z)ol+TJx<6%SG$lkgmnUp~enTTaJ3DLObG-X$-=0tUU6v17 z##Qk>*=Z6l5m>%2XWf4XGA@48c@%THSTy?D);lq}xeVG9Q zp#K2hiEV>V1Yy8){3Jcp{ zLt3)tsYfm1+|dvy=zOKtG!}EjV_WDPzVDA7I6bYc5FyQjXV5_j8zo>LaJ_)o<0Av4 z5sM287%#!NgmANA47@}W?lWGl1SVo7{ryQ;#(+V{)AJYHWjPgf^#M3L zNctFJKp@27%@Q+yeaxuZT3WXhr5UT-fW{xqT3xk$5ies2tZDJXB`KKO06g;ts0z;U zH69-I@2s0UJGKRieodw%keJjt&Ubk~bY)}|;@oI=zQF1TqOn>%N_S5B{h2bbGDqeD z?j=O5^HY#A{hrsKjWFz%r-6b?iI0C(eJ`L|npw=Qad>za+Ev3##NE50jTBCq5W$uU zJR20<>Q7Ya9AudLCV~Uf%%gi1xvN`A&Oy}Du4PP1VQoAR7@mP<(VIG)h;I^}%Wph1 z$y~rXH8oY`BvDJ;JdW8tz^%yOYyNN>!S?7yYhlV&|fv|x;-aPLz6arda_3z)JmG;h7tI-urxSg#7tq~ zuhjakJ7DM%huiZX0gvJr3#9God}Sgg>O#>uOV@Kc#Dy<_jYYC&y4{~XwUJfrHf7Fc652g(S_!kq>jgwpFVUeULw&*1n*Ot{H!|Wo{RPj#u#TbCLiQWN zWu$>-@)*syufS6-Y}JZZ9OJOqYLHR6^qGlO;jw&}HmO|7RBO=?t#4;?%}=NSeF_ur zps+b%rgLmlEuJ1vpO%)D{oLJ!JdOSCM;OdN#0K#yb$@cUc(5k4Rr%KHg&2*W)PSfl zpBj2n7Cb>>+^g_ngwV^pB-UWbhhJvn>FR#0+r=~>{r;bex9QIC)+GblxePUZeH%NwN&63S z#4%tQ)$9x?f?B1y@u&n|7Ub&sv-T*+6V~ojRKiCrF6<(g*n#>vL4e(-3|J)(#4GMQ zOLq>WnD(#|Px#e~efXLE9n zy58?VU+vz#8$hUEmz9=o*lKBShY9=p$N+<&U^$4{)>at}cjsqU2jTq|z~ReCw4B09 zdxq8{QP}rvPl zJljfP{?gaihhU!!06Z=tY?$KMLi7@7&ckm@5mkA#H%B(f78ukwl&5 zZFQfvI-qcRdpE+BZNI!nuHw+cI$T)&AC9mU5Z*J6@ScXoRnerH8nM@S1MT5dN$|Ru z8M~-GXk`Qgm5nu?9R#QPKGC1bFeY?HQv9~fm99}TDxM+H=H8St>PR9sJmiVe1 zM8OLP0Q<CcF4-Q7|?G-HA#|HX}BK z*Nx%GKwW$O{P_-lV`*>}5O2+|J$iQ42yj$sN^q!OSm_hLMmHudTEIX@ZLcqNSRvoHBOJ(hA}RjFDY|?Nt1{wfMW=SNyjZQbIl{Gm9BPwq zlT?Rywx=8Z(eNWtMKx{hu45@62O@vAbaf%bRyaB88f$MM&-5>qc#d*I)h;^V-==6mjCXiL)Egr0c`V6`Bpie2%(CE3QSg&NIHzdplIxeYv#2g!RFoh1 zF`qE8N5v3a-=Hc&5{{C@BIf6eU}fzBq8VxyEMY$nDf~`mSy(h@Ku?A53FAacEQorW z&)7G_9^KsX)7RF{O+}UUnuv+KjSqjj7X{mO67E$l-*XLLz7|E!1y+VUyu7lZSnDG_ zF!w9$)i2!X-$L#KW_wq9d|2&@^-4&EQ6VQMCFdKR^z8Q`zX^vHH+EpbU33~ZOFq|E zVRTyD(~z0$6-FBoyjQ@L$Du%>d*!q??_K9>A#k|EzKro%>1%Y{R>pj+9^6g$PI-8D zt?Z@cY}#k7?b(V`D-j>9VT5t5@|7vw6HhM2im!A7v{n)YJXjl8=Uxu?k;T_F^I16R3yE#reIUYWC)Kh~kddGvfJ<)lSH7R-K ziu|5$)i1BqOy!1sSyS$TeQ!tPg^C9fVWD!-fSj9t=|}Gmm*LEN{)>wVc|M$YAy$t) zDl&^DtJDi0u}E;gQ+*zg{oAE=ERGJ?(|mn$?|9No%QZOfo!*N5tDwaEhOwS@Of)YfRd8y)E~8u_uSYUsR~GZl3(w~e&fXcKB-%aYixLYQ#Z;( zYI}pj|A5VXMIB0WJ_*U)gTwFjU@PeB>W+N=Y$X!?(^^tm{mo-Sb%WJ^^*)LHcczSo z4ezr>o}Q|d;nsNX|rdog~tq>p7b3pSjy~o#O(wQl9J`Bo7b)!H|L#tsPqrZ zvgSn_jOC2gOT6bB=DEfmrT!y(41+IkU^Ui$Gs#8WEYo_Bua&ku?xyCnB>u)k3plY! zlP4_n%LsGD&1mc8wQIxSFR?k9tftYemZuZldhc|2p0jZgEPHQYT|g(OzZ^TW=H-k$ zIe`t8PF4k9Ubt4F=d|N?eE0>{VK4{Tz!RoyUjJxyx`P{Z9;4-EDtj>kUQX4cQP-Eh zife5W6w`hh5f3CX?j?~E%*zp{tVyx4XC-~1n5u%vOMlARu$2EYv-rc2KmH)@<1&eD z*EkH<>U%hh%cJw6f`ohdnTDUomm^Q@?gW;+C31WuT-NeZL;?{@KbarfS7mu<;C9ea@o&qxk~2u{$+As$Fts zT~&GP}8Ae*T|fys|sxnyCb z<@Alc%_b+0i{OZa6qy1d9k@l9>EKcRha;J~8&kuLA+RK|N)}|o%bnOuvZDk{B)if_Y z?MJ`~y^U}m+r^ZOKS7Lb7LZhBnLGbFVAw~#pAnU2X6PubDz?IqG$P8O}02Ru7?nm zX>aEL{^j+^)8^!YH2PMr@oi^JZ+Dv#zM*f0Nl!#?b-xZC*p^~nnKxsQQ9Y?VN2L3< z`kiG}X+UutC+UR4pK>oo5`E(1>86B6OMs?U!i36cDS<*p;D;G%6zq%6CDEjb<=uPk zn`)6VIq&1($zHn@y!J*hzUwK$m?uX=aX&NeJ2YBg;ny%)JogS0B{Vh;-#qupm5`cN zRIE!%hAjMCf_r*^o#2*Ql#}&2bq{Ma3HNgZ)W|=6n8-gfDDaK{&G%+d{`qGsTu_(o XvZ_}FZZN?sQ_soEs7Mz|8a@3#;PNPY literal 0 HcmV?d00001 diff --git a/website/docs/assets/nuke_updateWorkfile.png b/website/docs/assets/nuke_updateWorkfile.png new file mode 100644 index 0000000000000000000000000000000000000000..aeeebb123f0971938f79f7b086eb291eb99158fa GIT binary patch literal 30968 zcmbT8WmH^Um#uMkw*Upf-8HxchhQN{aCdhrEI0&$yL)hVcPGK!gS+07@9pl}-M9PC z&5tvXk*ag{UTdyr&Rt2kvZ6F95)l#v1O)0w83|Pg2q-KF2*_9jDBvf~lca?Z5ESSZ z;^N8{Mn(`2=4%zHX3DtZji+9w`zXZReAet~=}r@Ta%foGd}c838Nzbo^TOgKltn~v zU?oH)MPYE&U~qBSzQW*2GM~MDP~V@c@Dg4rZ7nQ(N-1j5tU7$WX@nDb&&AG44`~WX zPS4M_HoOvcdb;^>kP({kD>P0QeEu^uwqZeTuWI)j7dIK?L6)!LZI;f)8xw-)7+3^? z{G{>}Eiy`JWj?5tg^P;KRkPR4&F8jD`7K-b#anZP)p8oB? z9iL`f*9H5_BCP2o8vms+C_Mt4&w^DZJ+0~lEiEy@Q`4(d&4ssAQ_Ro(hz4WxTEQVyQ%q4&hd(dax(!-KPkg09 zVA=DqlSQxd56DdgSF>zMCa;xHb{Cp7qhLc+T#bU$H1o4?CTu0tJ?Aebgk(?}+I&FicGfhM& zo^XqdkN~^Sc<7}T|IPlX^C}smy!@IBnSuz?Uwi5I12r5(A&UPWBLRWVG*{OKr9YR| z2wmpaev>hLi2efum3}9kAEu=sXdfU@R_^UsvXT5F;2}3WNs@80o~s5gv*saCVj*Cs zAfW9b&=LF*hFXilVXIFw>0L#jN&J!Rp-H=1y?S8$$+MwBFQy-mMFl{k>lnEILAV= zcl*YoI*Kq4LBno9@FF0IV$#Bec4I0-MF(tW<9rW5QbsHl;VOo8$tLCWm$Jv=4EU5y zSm&=X1-B#$&x_12iqDJQB8q+h>yu4};vcbr)r1n#h0BT})kWI`H_(O3ibma~U{5R` zD6qkt40qRs(uBU(1$~Y&76f}vAP5<;L8t{a*^PXT)dp47O?-}59^keCeUBg%Fu8$z z57!Z-a1Q+p^JRnQ7o1-=8jL6_7_5u|*CXa01`$WjE)k#wqVGeSCRYqY-U@IB`O0AA ziuc4KyoU=HkH>(H4>1Pu$v~=tzk_gPP-px(K{hh*GeIci;zI=N!DHmeLr66t)*!DT zNXI~(><_WTJCGh?Yq2bK_?MwjAZi?_V6jc*_c*BiA`Z$>-(jf4OQwWqp_kB2MER6S z)ga!1zh#S085G0t!zGICDpRS!&WpGxL(d^_i5BJH6{GS)X9V--I4SeYA>W8XPmwzl zwm_kZ=S>kfL#O)NiY83yniHi4nC8GwQJO<~LNSTSO(C1ZdZJc{SxtEzP+lWGLiq%7 zXJ<{}UBf>jLlcC=vBC{>!z9Ce`bs~B=G3Lbiav&p+#^A z=kEn!*MrP9L(c`DVSM3OdRaC#&*`7hULpJfJ9~)F(cO{PLfkM|p@O?8&JnNzVY@@J zK}{HK2#_%EK!F(2tQhPFd|(PO*qrDou>*>0Oeus*mif`S6}Cwg4?kkk$Z5=L;Cp#SRIzxv2q!2N(ZS`7G=Z!xAAB zlyPiBFvjfcs43xNQhSu>6cKS$L#oEGHE^F}1j7ZRti@f)CqZ7ZFGHd`xH|}!{Ekq3 z;ptLq(n1o<sFIM{D){!wBN;w@ZnKTqx!4VpcUAdbC8athh<8qw+(s*qp%}n;Ks! zk$Mb&xTREm9>2=I3cfPFig+>RoXecT+`G9Cb0n6eH#qf?+M*S?Yk5LB9eKXFui219 z;RCth=6IX}smSA@cw!Mbgyx^?D70}a!WYDebH=AM%mwN=mQeo$--^TMiciNKXgO2x zqh^Fph~j6X=g3VJ9MC(zYr$NNw3T+t^_qG)kUKEACh&yvf%idwfoS*Fljs!bll4=2zkMknmIxUQ(J(Jvt{ z;j056do2<~>3)Z;dxsug#B>xz%%bSoZo;iGdv0FobcPA!{TSo{$&D@h;U;Gtv{Q&m z0yh%P@UmX6O>GA=hYwAVE4V9MD=??br$V-*jbUElU;2IeeEMJdY_<&q@sxUMH=8e@ z+*JgjSzv-7DRrKHEV_bYXZ84}3Il$6j$sV~LqD^drpz|~Y zOg0IuxTqn8eC+;kb_uFnIF%T4**a=|tkp1~K5|1|wxncvU~@1d!lc6!`#iVk3_sH=dEYOG3 za72(O`(xuj66Oo4VjdEd#LtOXQoF=@%HV(O&3Bl2H^X?yW|49oavg9TRVNt&NPUVt zD!#Y-PF?9Dd0_3Tc&s1La6M4IJrj0{t;q^5MDfRcGj$zf+8` z$#NCM&Bz|+UaN%A8h!sb;xhtel5!bx8CN6v&>Q%*yH@#g)8_+5EXGSlOU4#PYDR9x z3C2uDNJaz3pNu7ppBdq`WVPtEh_q5aPt@pqZ60GEa7;4^F$pk9F-bJZFi9{;!|j(f zHZj{Z!8J%UR5zSA<}$AT>hjga=*9@uAj(w1bljNf>xUlg3GKA#vpjl=(^m)euWb)T2tJx$>4&?v#Csx^pw__2;K~%)Fv3{Q zc=YS2nZTIF#6s6X&qDvgq-&y0y3Mygn9Eqp#0@SD3JvdEQ3QfB5;7*3hby}(yKRST zhfjx2he^9gCo;P-Co%_pliSnVGuwmPBYgaQl6?YSNS_3soNgOW^ndwHlM`i16ZsN2Mb{(uc~ z$7-i@r!c1>r(!1pdo4#^2X{vpM+wLG4k!D(T@K&#QuD&|g0`UsF$Phi>7u1cMoH}W zI_#|-D0suuPtu>$x6&`uiPA&THPR>2*V5%D%qMUrtS2ZYR3|(q@Ftwn@%g0r?3^YX zg?7FBAL6dy@i%%LRGQtJC7O4d=9*EO7@O0Z+M1Jfrgi9a_;f;>Mw*VAwVNlLP#vQj zRyRGn>igH8;9r2)-vpBju05iD8dnjvN z5EK{O7MvAC7G&`dI=4PQIp1>s?f&3F<&NuK@2=qf$9>fU+MUuP!M)Z!!hO`8*nQdE z%RSV6(BtevXis=bIiT|dFTE*1_YElWU7C*|9Agy{+a#-{U)jzv<-d!qzNhfYz|;t`Ly7!2A@i!7tu`D9`;|$dSbUH_{IrIW#zI=X&xM3>x;>N zzG1|~CdUTGHpHgH=EXKDhbgyB`Q}jMIOa&@pyl}HsN}SYU5T*-3n9JWNFjX!dw~1F zxL_CXcyI>TC%6}k9b5~J2xbUQ1Gj?VgR79&v3)6?rS2on$uP?i(!#RCzK4~D<%C6r zwaR46L=FkXK~oG$5=kscK1xVQoQ4TvdC=7`U&`T7jZlox(9@MNmakhzy)# z9AcbS5I1E5Z3F$Sg0*xy`2-y^1v5(}lP&Wp<0%a(D3cmM z;bL-Y8H*P&6&wpL2N!}Mi7GXC4`0& z9M?5uN@6<4s!iA)i8-J<0JY7vjl5ljBl=BFR!vq#R%t|bM1fw0UXlLOQLI{`TCG~l zoO;Q}5|0v1RsH-|+3fNdKQNs8%bf z2h|!_9oQCF8CV|}Di$V|D^}UV7L1LQf%F5%mR?XzOXWVdI!{J*N9DVk+9ysGXVsN5 zf{NrEcbT8sKh=LKImkOmIefeb+DgLG2p9jT_VMGe$gtw@7djm}g~M#s;2Dbo(gMr^ zI$)XHWQ7x_M^1pc`Zn@7(lD|%vN=*R0{@-iyAs14Z4A8<0}Fkp=4(Nq%BC`=x{3yy z+E2|3^>DS1nsllq8W!q@n$4B$WtTd;2_p^kDv8pGIi;1Bg-5wW#Y9!96`pGKqV=C# zWNsRc0*^|MLWoqO6cl6>L>2VM7016YiP>an$7v^OmuYt{q1N-&<6CCUyUqV*CW~3hs>5gA5J*>uIsET2uld7cPe*kck22nza_mIy>Pwg!|_7fLeoJrK{G%r1tA1E2N?!= z1}O$Hz3FGgy4z}QIC$ioQQNZQD6+kIPb+i}};+jkpr+hbdO zJBykZn-tpu+XkB$n=HXRfib~|T7kNq+KAeTx=Nl~e!4KdFupLgP@_<PTIC<(UwsU7$#aRhaXAiMC|l^XKIiu4cI1ZRR^%4t9_9|_uIJ9+ z?&ap-X0hp5&b4W_DX?*|`D629gKdp$U1mM`OYdh!gOBagJetLz#bEVpLv8(4ZA9Z} zV_BVm5sxvCNu%}MJgLQ`!DRJfLu2i_6`7s>SACOp1HXRXrPtsW9{4)wA3=3Nl|ez` z{^IeyqM_QM%%PZ}pF=g#gh?Ety`wLqL!z0ZLD3e`q3?*_d6C%hP6$-;y*lLbA_|o7 zrU-cOOY)frB=XnuqVlggXgfY`MLO@E(^f(((vK)FY;wMb?iNNdCYr^2JeWu zskpK^UD$^^4?EJiSU3}nxnsS{Kc!o9GE|z(y!e3ZwRZM! zEbD^nG;d$;dgB!3#$;dN@W=U`Lxz*jcFV4mV_s`XQ%b8xvt;XTi+WRrL+cjzcEi?f z@4GYRjmiVtqtjE;!^}T=duw~XzkQZo3Z6pXCxX_+yLjvdw z6qM3wI*W3PN{T|w^358|n$24GaVH5UH8X|V5%dD}M)dad^z_*DJoMD{%-W^e_S!Vs zeZ4-lNw&?mSG3=G(s<%~E_%6nt)J_izivQ3OnDOswFpHE$!GZpQ3(}!!+D?HGd_7= z^*>%d(BG%td)`Yu>^*8cy52|MPdrpUzFrtzU|-E%*Ipr?%Uv+s3LGx(yDguktS`17 zw6}Tf-ld+ntP|JsxY_Nl)p|+a3jJzra9hqgV%e4I>a~k`}d>%@P-He3?S%Cy{PI6Rp9&^0Jdc|wS z8N??;^pJgU9ziH*+b9XB9B5AH5on|6WrzY~+@!ahCr-Tl=`1>QD-k@}nlr&XOqoo$_yv%9mavx&3j_07THbe7Q+n>X7C zTN0a>QM?h_V0v^j4hQxXwW&Z7lLq|)`2rI+J-2WJcLU9>>51ye9j{oreY#Y-kB+^L zl+I@z=@s)8?-fr$_DmicO-TS8tyKOftUMyp*NNhpuEi5E#B5XLU zLM%NhYpNP*S?VL27HTP~J(>loTk17M@63D3@bveY`Dr`xIC0<8)Dq@0oH;D$EI3lW zGp3ejoO~zAOi$3s@D_S7y;QxFLSmETloaTvqY|W|Dc~;<*fkwd9WkY+Q)g7CnWy5S z;Nq$0uBW+iJ-V99viQy!&e_hnSa(ntSQk>)QP*n0V9|N^^H0K(S>2?Q{z^x#kHQn& z-S~-D8rlrs?C$LN?8{8TtlZ3(+0a?Cf-19Y_InOB4r`8N!W2SWLS4db!U)23j_{P+ z6#f+El(>|Kl$#Wvlus#D9F2Fgx1*;#t7o;s7H@GcOi!VA!b_c*!4p_Rs*^R7NW&u| zr6WfZEr~aR$C}($+?30#%NzniPs+E4CpjnZzsVe9`JVYo`CR$B`9AZh^R4nVI_m8_ zj}+|5?h@^L?o#iOPS^~;oACMmM2DUrJvlq(G=@AmI$Ab*JeoG*m2tJ$u)3GyL zGRQLV>p3?ZNKLCw$A~kA=A#jkUGjbRJ~+tX2pvFUB3+^_#F4`z#`VIb!To{Lh#!M1 z%(TWUFU08a>!5mZGrw0Fy^)Mmpv~QV8(|k=J8T;BjB;2y`W>Is&SRXJwD0%oB$*${ z-|v(1lfEZ8C&_;oWckfM;;MFA{GjrdcPPyeohj`3bTBgg$0&UeC+0Jm5$PzuhFAI7 zuj`+UgPYslgY(e>Br&{t_H7sU1=~)8i*i2;dVs32EpRC4FQ_QUD^QZ{{Gl>JU_x!u zVNzmZVxnu(IHH&ILbuPT$=FDT&OAw1#Xv@zMdzm7+_t;(i-k#Cq6 zuS%zGp$!mQRZVV9t7FZj3vTgNg=&Rk@wZ}r<#{zOwVUE2Ij*1Dxl6@MMf?RVbuDF& z8aJ{x>qMtSi9}DS=c$3I30&e_F_s;3SXNY)QISa#KJi(g7VtF~~x5g~@6!-FNNyZfOZ`+yE^~~RT%R*B^i?4?$7bx42 zhbjIXPm^;Qgo0NaV?WRwL)Ah(q?M&zrNsw#qaUK}%sx+yWY+T&J93|$4A*K-3h_PV zCA5FanQh=`yZUP8v|m3_FtM(yqg$%0sLQ4s+B)H^;9QXoy?;Mxk>Mop!@cr)<-y5q zvLZt|3!l%S73UscOz^H~e}ztyHTniw{E$<0pANAO3a|c0+55yEh6>Grs&bv+s+p=I{hQ`g zhx*f_f-+=Ho=}V4kzU1KhhFK>NmM1&PO|P8R;e-RKxrhYROtaJy~se7Zk2YGv+3*U zrD?f5BwRN1LcD~gWm?025qC;o1>*oQ*aIVjLzM>MuTQf85^P#vd>n>{P5)q?PBD?~^CV z;N@ZCy11aeC>&EbPQ&CuUp;B^c-mWHI$;|AvezhP`>tu;)9QKQc)55vw^6|ses!YN z=kfMtnT2Na{=}t3yyR-HaFPbym z);mMERLoSiRK(=r3_dO=AJbi-t&Rhqgr~75*t;*MN-KUG5h;nuTv@0YOz9suyu5B# zNqiMOCrp_gPlt3CSFTqcp+}&D;nC2yNWTjm_^{q>Jc-|imZIyD6$+Mn^4_0c^j1g< z#?+B{2#Z%q&7?Rv1XaN2Q_@n#9HvkS9zw)pFQWz=GlU?kBy&`#F7zfRu$xn)!| zh!`_Ub|>t6eZP-=N~5jRmT4@=cVWCUd{s2{X|sr0DMblg(8ka6>9AF@C-)?G)o^U< zqJOO)K7x3XH`7A!%020|{Pp*F?Jkx1NB3dl@MdTG5+4i4!(Mia&%VD6_rRn;$ zdUz}0HHXewkNqd+ech|pa5t+n#IxNsfes~i{fF0?r~W%XVFVx62aM;O1M&S(FR~6_ z6q!^Uf16C_#ZMc zVR>KhEAMmO)8I-BnXrf7(F@AG$A;P9*Yh@RoU+1AAPb0tp>G*l}7dk zMcdV1o=<7_Up5;@&*s)|I;H(OAA?>fuLd`jX0lR+HQ(BPN!f5T*fef8GBmQ-vAhJo z$v)S=5??hwpKYx7zpJKfFlxrG|J1@!edTPnzwSlV8gO64_W|8s4i19*6U5vc6gepr zME{c?q+W7{IVahXEbs{tM0=UfP7n~tz-LB~u^?wweV>6J5_zg=IUBe*o7*`;Kp-qS zjma@;*vrWek3m9Ul|h3y6}ILeAjly;N{FhtFP&r{YN<)LyeiGsa1KRFV7%*k$40*a zMIGJRy`o~6UfkWJ$ezHk6CyE4qe6q&{et8vnQmUWEj-3$gvGU?p1a-8NW|{YPX!%@ zAf+59X{?_8^ft3gBCx73OH04;6roh4m{D4FWWD}noiFs$x2}y%ED#vOkn|mAV_7pk z0tlQO4(kR90>_$45HlGT)YQ~eoP~X9wDoXzKL?Y8W2HgF*_u^)HlETX!oKNd&>%1; zqMa(H7($LU#=l)WPFB1c*j!Jv;Ne48Tit}4R9IN5rt_o)`1t5>zRCSye#h@zTv~e6 zJq`y0lbwKzi;IVcw^*sW+-Rro!nICH4UQeex0zd96mFsn(Uwo+{*z|c;c+$2LrFn_ zgoNb#w7gzAe}s2vYideuEhQbV~#Aaq^J0Fi2`bchXZ)p_M5s;7^9USPGn0#)3n`L<((cM3y zz2on?e0x26TkG)oQlb`3#8nEc0!4^Oe@jb?S*Azfry@n#64jEg*le*Yz0vOka_tQa zK)!OavWA9+ua`eMS6W=;VB}fvSKZcgT2fFiP=LkTlO*P`TO(mJgz+x3W%fSZ8&BKb z+G5si;q-lWZ?7+OaB~y(dlfW9Gl#=dxqG}hwqMxY-PLk-bybv+(dzVl2}Z_;LnA?$ zBqbq1$?7od|K_&lv41pQ=JRxmJI=?;>wP^feY{X1?t+evzUmI^xlpFbLJMygJzJ#I z+l0aA=l2E;k4E1485P(tixBJkhll2tmY%Gmy-WeO(}@fLS65fzm+Mc!J6T#$0v%&)Q#n@VdRexOj+pyN|hB_j@}m%91ks_U)T8 zI|D;mX(88L--A7$7h?H#awpQYQGsfax(MEzRrU(nNxL+%gP9 zEbPHqtHGM^wMdQL#px%4G|Rp?T;Di_lZgFPyVqR=KFiuX6*6)k(z@?cg>|#hU^E66 zGSm5qj32OL2+GxAP+{~EU&7F{zi34yca4l7u~S=Q*_7)vV@buv)Mp5|h4|v&;9MU} zQ_|8NM6h&-6s|aN5L19LW>9|HySceJJNL!v@VHH7i+bIj1Z?~6rI@i=+a>ka)zvjS z?Tc})U*F%~@0d)H zmS{&CZbsa~L@Mn2y!%66stXK$4v017l0X3U>HKy-u4wFdz8rmi_Fi4d@_jsX>wJ2L z1Do@=k{Fqo#v8S?wI6{c1?mool*CtQfYv$PL9cyjv>9zk}U}2 zo|~KN>QqW$hdq70niQ5MI>p5dYWi&4BWq$(u*Opo3q6eKmb6g=6b6b})Gt`RFeA(Q zE)%!I8Oaj0+?<{su?ub-BG03eqHH!E+ZB;x8A@ye$dO(I&g|S=F_tBJdwUV@TKi40 z>raBe%r(RvhkBFxHbgi^QZ#ac&h}t1@?=V2t>e+|LZmoJT}bpnt>jTm?Z&v z!K7EGyu5s=yJI8EA$BnAc?}*^q08MTP$PJV=+P#gl+=QSXu(k)0=kc15%W@7_&#Jozx(}Y$2L_MO5qD7*qgwMoD_-0Gtw8B$3A=t6(wNNs zOfN-w=GEt6`mz!CNn?mo-H;o7BT@{Ijg9sczAdS zDBI}JkO=zE`&gJUPT}YCptLmN)P+CQ>B9+W7?-T(QN1%WGphGno=k52<2{C5{*dnk zT<6R=8|~JG08u_$YmehIpUMXI_3KalPT%vi^Thf2d9#Uh(Lw{3k&|D6fl!=3{oZ`_puw@+EX48M0w)Lp8?Kuo7iDVnx;i@S92`DA zKAWhR-+B4@d$xP844Fvkva-lX>I`t%W-DwJJv{it;N9)5n9kdPYM-ywQB|c-Vx+a$ zzx8L&@pN!t;}oA!6hMIVpFkzyJ?{(0KHr!M=?;P?<8zddLF0Elnxk{J9*PBpsa#%N z`Q4Q1KRi8&dr3fE!;gUYmDSaQT~QVI1O$#Bon1AGm56>$Q90BFf2)BI7fQ za95`QIxi9Wba#esA^Q4B=>zO%u_}p=faosQ5hhE1T;$O<`A;mA7g-; zjD`?%F%Y2T!y$!EvRTJ^nkfUcAT65goUtlNU`Iwqrp(l&lVOFUCB;7takju4N4nlG zRD7w9bVon}Hz8R~Q*aZSPUA6NL6L(we>7ZB)Z9L5`h~&4vzDbp;mt}rZGjEh-4%XsNu(f!!xt_cLom_n`5yNV!S^gyY}Td@>(V1 zexfqOtO2FBK5r1=#?j3bM|+EZqWx-!&Kc5_N40(s^!d4`Cg%u4Or|v+)!p)5$KAEb z-Cdqdj<2osJkx&5?R2G^oQTrg%ggItqMWNdSGDBYk@Z@NTI!`=yBZJrfuE^HUy3B} zskx%Gl)eu`nT{adVSmDn-9(h7=i-q{Y!0a zf|1#5mJ`Fnhx27WVq%oEwTVKmXjcm4ex#-n;Uv{zCm*W^u$;~o#n_^)T!drOJObN) z7Aenb4nv|${`(S`l4pm99ydqoV=|Vn4>LKW4XfY_r%9zgeH8t!XKD=WRll;m@ zNeNALV{4oehWHiL_wD)N3M1&-Hze^-z;q7r5^2a@Ho!*B!Am_@tRiE~TlQ3XLIm}Z z;Fv1zSrXmnCA~l~!}TNJZ+zVJ*i=VGM&3;AZ&lrzh<8UtT=G{DMk3d+cXkHTs9Ry8 z!=aQBa0T|5E4mxFtM;MRm|WO0knpHM2Ien||H6cX*B+!#787fT5elQhqa_!1mBP;8 zcWLo`X)(un$4pP3%IiSkj1Vm8G(|HVn?skEakkRTfs$*ns(Uf_Nhw@@m1ZsX{iA=O zf%Mo2T07gx;WH-3$Zvhuk(H+<;w>Z-1w0FWKV*D0NN z5Gr#6Oj$=odhw+ifeRce#!C?@0j?1ZOF3u2iOG_F{(-%#-L#Adx`P)skpYGRpw9nX zc7D!YPIC9rJR^V#f7aG+E0TXl)k6VB{lumMy%+-MJ1THN4@KKTRzwebPEi*ddYPus zqyR;711|u)>wmt)E-(>K3cq=~yEn-^Jv>moe?N3J^|FcX=MHekcpWwwS(QLs9CMjQ zg?78=4H8qW>A2Vg0B-$v(z`GKSm)KXux#ix*>3^}fOiOJ#O~hS?aA*eU_K)S=X%|p zwz{3!Y9rPsaHTNUI&4e*`i>6};m+_Aa2It{zlVm09e`bTc4lN?2>N=m-syLLz8Q*6 zb~;lar>m=LV2~3gMy-@tu13#5Pw#s23&f78ce31wp7YVwm3w3qi{0nZ8PKH8*PE)A z1g~RET-{d@i~9g> zofc=rK!@9t&tJYQU+qu6-8Obg76%N8k4c%?*$D$vwY)T$OK9@@|H%goHIY}6=F4Gs;ZqzG{4i|EGh!xf~S|4IAUzLv$pn9oyBZ6 zGi{{g2S|6o9r)dEh5~bCsXAOCjr(vde=a-bOh+O%u><2DU_y5-B#C+OT+CsR4u!^lI{E0)w5Oloq5tR z3kwU}Wzgoi)zz%b%!({zY|_9meY*Wa4EPsp?B32O8n9qM-2lG5lamu$3mK<*Y;4rY z$q6QmRg2tP0HJ%H^P1bME1DG=~POj_01%`DP6h`{{x^Yi<8 zaC{u0FD)%Sgh-JIm?(xsSwIJkZx3e&fz>n*#1JE}5v1tbM&;pY1W1rkufv;=`AY|+ zx{AtAzzhJ-*W>o%#}B*ZhH-Go%hR2>9R(#NkIm9tatnd#=g%pzv8<92!NHQEqTNYM zU-;cl1znERg7G9+B_$Cz`+7 z+lfU3#t{2W{=ZzbU(dMd85o+sAX;Dv2nd|Qi2dH^7eN61Az8}@Anbqti?qbRZ&L$Q z>B+@1n(9G7>*7hH1sHjYgjkCv2 ze%E3O(#Qb$B!q_vd?uRdvr>yY&v6IW!$UZ#N4Ix*6UrF+>4<=INvQ>- z7Z(=;cl);7`2rZ^RXxwcg6HRFKt@t$zlHzn{>6&`fjGYhe#zd_ke~maib`^&ql?yY zdmsupgaD`(XuZhj=z-mD1{RW%ut<@_JhspG7X(<;G&C{q1n{V=tgML3`meHq8$PvN z6mT_#3TSL>46OueBLoaS7!Jwt$+y^gM%F$99pc}+uAS;%0Jh_`F5|blR<{&#Bh{LlCE8_d!9J|uCqvx zVf@m7GAJqWO{7E(jgTFtMacRW>(8*h*Ow<^jqB%)too}R8PuGV|TYZSeBm6lU zWlW4?o(0uEHOAHv!FAButH@0Vw@R6Ud{zsLI$*83nwlPj>GQG{K%NQ2#EAQHtE#H< zU+$SJzpy2tprCw5*3{Gl+H1fPvI&cZh9(cuaeYv3?a!U)+w05b0RWM!1=`%!zc5Hh z%6W2m28G)A&R{DG|qmqSVWuZl5ZVPgXsOq2OEMI;;B`@UbwzBqKy zHZn3Y$+@uN{egjjOkWy|O-weSqQkxUZF~_9WV{M^14r+RbLO;0b7>BVYXoPQBe_3iE(Uj zOk<4C0?~qu777YV+j!{k=xC(03Xm{B&0BuZg33W2NwPG>03|Qb3NP0Lk9y3TUb2CS3ZlFTrjZig9T?9MazY z0>pH{_EqV&;$dO2`@KFVGH6ox0nl#9zV85N1DUWd;D2ceNlBv(&Z3BU2BP0t?ZHg% zh#oA}GZ-)1qy5?0F(wHFelzmP^~d+``T6b$5cb&yO+D8_6Go)gg}Op0zQXr zY~|nDm=ORAaLhk_oL^aK`>Wc(5HlPY;5?>nY=5_~2z6xuAblJ;uIx~1ttMt$|q8}kVgU}feD6)O}%kI z9@U$#T5>vR4RkGF75<}b{l%;B7X-t6#ALi8xB+t5wFgmCn*yiV<8t?D;uB)q)c^$SG zs@m91$C3fs;}sAvGczljKbo4FN>3-r6ZqYiGc8%5TJfcEVtV=lFmxe6Lw9!09|w9aY?aAvl0z8s3nWI z*>DWUJ9^vkjG)o3o3p(=I3BP{k!?A75!-~%)1St->mp%PB2JkqPc=21HZ9Y&Bx&iK7V)Yj zk&%&oDqWkspr9XqH}jfoJ3N*}Kp?}b>Y38r-Hm|6qTgZd9(jF|){y7Me^HD%gO-OX zy58Zlc?Z;@!uJ45|J*K*mx~y`HusC%O_3V6vo#<;9{s_B&+}o&o3{y z>-un35cf0&nAdpt_{68TJ3BkT{I^|c`q|K+u>%y~zl+1{+L{n0WiJ5o@83(f)d=U^ ze%Wyb7+3wbR`pj=AD{MQs0JXubmJ|N*49qV&mj2&IIWJ3ju0qiWn~C>PGEh3#{;@x z_~Bne{OaloM4yzDwC6>Z8zC0KM>RV;8%1h<;XFP)jk%fau8fI|9W)J*4+W3USIi5z zF?@V{gqU#bLNv-PZkr|N-4R?5l?si~j*_2WFrridhIscbDj7Ox9Pk=V zSZuTP<=CvY2HiX(rwPFG_vQ%j@^U_ObsacSlbFW=!TD!^9#3WyWp@GD2h2torHx9#}4dh5mEwcqhP{ z0&b)C>DTfy&w?y~!s+Shk<>CqJKHd>;XsyUMS%2Y@j( zFxYH8>u6OQ1>QR_DCnjq75=9Zf`#352q+6kaILD^T>()C^6qwZfcL;zkIUncyNs}QWB0@SU&&A-gz^LU1#_~Xun;)+0nu=C zH#BpDuv*YL0P#LLKK8mfN=!+iGdo&Z^m6%zae1x`0|Qex6pOv>bE+Wb4jbo+es=gj zCAa?{16`wB3#62RLqd`ccqz|obJ7Y8Pqfckf^W}P)6>&zY-|8>y&|);kr&P&p`zMY zzn0w`FQz5`RFNuBwE<)uaIaFwsnwIS;juK#OTchF1A-$NbYkukU^ob>Rs8Mv$NPJr z<6RGD3hbv66BBE`XwoN!5E;!4MG|rVxPQ$aib5C<#J>at=FYOxe>;5hP(lWIMz+X8 zR!*+B_`E2~*U`lV$d>47X)zvZ%_eWp&;4s@fgvL=4@4@a-rmo^@yv%0A68dafrch8 z#OBd<1P(zuAI9x$fll1&3$L!O4hust?%4&Q>gQ>cCB(pZ-?qu#4i+lXhSuw?ek4j{{_0OPgf=NkjMX!t9Fx~I1nIF!)R(AeAAsk7hg zIXE!StJTBKbN;i0+hfgJW$5U533$MtM!S5=KbHY|9BYhpfIuTuTlq&Cl(+K=y(naVWSMU!#=xVyWK6^X^DnMZl06mz2jWN8Kf`tpqv7B*q%76o;>`oqNY%k@3iBzt?^0 z=)9T`;60TY?CS%@q6ocxa%|AIDK8Ryhm%Odp6GBuz-N4o#ob*aUs!aS$WK`y0m|q@ z1t>!_8aR>aL1<=OX)otNpp^g4dF>gd1`H5)`W3PtEczkhq1QwiL%R5RY%7HdX%dFN zYfN~NC`4dn3mk#4o433-1Qk0%1_+rt*|wv8vpMi%ik-$RJV7zHJpe`GVrTX1_wR0= z@KHcQuw9~(B#Jlx{9!V+9+1&623EPLwN+gEUp7GoP~(46iUU<0oydUa|129&x56{P zaRYtg(d*dl**Y0b*MCzD)89OT89+|Jv;+a*NXDt5h6Y}`nuG+*b#{3SamHtYki zj7^@B{}L7poc@8j|W} zLn4NC5dS8YL{!Yo6X_4_<0?3%Xdtj4g3WgCp^AzMaCC~7@CL*uKY{THWQjm1hMPbQ zg8AKOM+Wu%A7TeMd%3q)yhII{o;1|d-}qjgM@9HnR#*QX2Jg6?{JK!`dz`H?83j84 zob^YP+ZhNpJ~!HCl$9Nmz1|e$fc;X8FrLf4z@~SHQ!{lUw_WHjAOz`=So!@LbJQZ*t zOV7** zsj~&1mMR-qTYIf&Tq`Xu{?+cqGYKJvU{ELew}eRXo)awoU%<5qPx4EUeJ*(i>f;19 zb_Ee!(;X22tPEe{1o7VszPE9Gx$;lO{rX(cP+AKE=$maeE zfQFy5the0*d@C?jkB^RGU!T_Bd;x8RKbryUDGo$vGgex28|zK-rjAW<9zGpf0{S;}x{30$#-ued)x zx&DUcuF7eJ@T?cuZ;($04w;Txe`#!7P(iAIBOewR;_Hizg)=!n8N{XkiG3sOlA)m? zh`kmi1%=j@mUv$C^YxJ}EplNp(Gd|3Z+$-A{rQH2gCk9{J~7Q7SdI2XzP5%2Y(a?K z{C=dypZ2(hm2lxl^Gka0Q9g=4d9nn0@zdefuo1VYs3 z|5;sqi7|`Tk~rwL5pN1AG1c`4JL?T+d3bp2?d%|&#~mnC&nEM+1ZkO<*Z%dZ`DDF6 z)O%P>z$8eUSXt%Y)c|0jAScJih0&^40=8BfGg`9Zu&jLXwx4T%5f)bCw1SkAYk@vM zyWoL&Rf9~mVLj;|Y#&T+U1*FR`+^nZqISb%yvmUScN#)u>RYxGegxzImXNVC5<;|? z!tQGeA07@vCHG|_N=_)z^FN9%D-qbhIqfV-goA@az0(!XeVKz}X{6G=WqWI@v#X2h z)%TtrIQUJTJRw>!U)75LV2>_a%osjK_mN2ccdlqx)(qDlS$ll6|G4ed=E8SK0flu! zLqd3p_uwxU08Ig{5ZIdFr!m#5u~Wz(80YS&^+2)=GIgFjSiJm?v64z|p9Mul^|Vm@ zkR#i2km)8f-cXZ91X)j+nWHt4IEI6Siz(m?va-$T>FLlOeYphtHa!&lB{}eWLy{C_ z1icP+z><)Zl#JxJz-;O!Yh50z0SISkcsL^-Tynb9fJOfBvm0NJnU4%jja^Nx| z>E;m0cPCi+-7<>*ZSr)swMm*tNYqmxsM8?Av>g11lK=-8JpJ{#qM{<(`G9v(QBkir z@7sY<0ol1E5db48LXJ5SZGbJMslNYsYN<;|x3;yBj2|8@7kBBquatsMIrH;pTSC<+ znIKS1?^Ynoq?JBLMz%ho6&w-*0ZpXgBzaLrrHZU(;#0i?t9-HGL8ItGSXx6MVUBDy*{J3Bk^?8Jk&9OffZET4g&qF-s( zC};WD&`_u1E$m$$nw6o_rK!e{t~X)^NM!5x5cC3_zjf=@!}jYQ*B=~!{VOf{=Q1Z} zds7o`0>nS~m(g0#mgVM6^^9&77M77(&uZ=@u-f6!_`R{w+Sw`Uv9k^y^=%Omwel0! zsR&H0n89jiE-ZT9=k4HJLfXb;F<9l88-IS}1GdcY9_)BSvZ*WlbyB(zA+4^i2E9^w zBO2BYY{=o90IE*w^{uB^B+^n+@#E}J|0+K|-QAdlliCAvp@f74Ff!pN@XfuKt%)yA zCJBxOeC0HG5)u@2UH(zjl9!*~$Mf-F9oEz6ML;;fC6`S<^{ewnOP^?iH+>#j7+L^& zQf|9TdEL9$7kq&Oc&wnsOWp^+ASDO!x;bLsWi?(qYK8~d*P|cDX!?otOM`*Axd*K) zWCb$lm{HjY!wB1Xyy%ha9|NtAO+!L}nR%UyWQ+pA`UN*#&kXk_oDN{#v+~fg`bAvP z}>947r@iNUk7RVNe~O`eT-nK+7C0l(5RT05^y}AL5Uxy?0pQMWIBr`p52cT83Lx6 z?oaPUm9g9t*NajUh(AbwXw<2CwK~XgJa5{QvynACmH4KQqrNX;Qt2i;Ei&5P(D?uf?`PcMc_+ zPwV_&r+{~wA0=hWC0G>aLFGV2gGV2mCEfD~JSz}s{M=^+0qxX6&~|oqPEJndY8Tde z9SR-PhKGm0sE-IdDc4Qy2|V<4bo~W-y88Ook00~d&B(z5Ycg&aNo}u}D7=6spGpWP zeoan+ZeU$G$OAb*vHL9?Y5;I6#%w1Mlu{xu!K9qAai&a$yqp}_Ip5seCu(ZnU`eK3 zjB>jE0~RJB5r7upNy@|mHVBHDf|7Ev?1_UFG^Kxcj;Q_B>ifkeK%b^!Lv0`Vw&Bzwuz4ATw7kP*zo?dJMFz z$nD#v?eUSZv5u=Ff`c0%N!LT_U0q$l)5=nP!*D9KyGdGn$!^S`3u+>h#QViX3y7e3 z$^pw{r&ZQ?KfAw%G60VW?4TRO%|>7~3DyxFaEW}>|BQKsRJ`Z=75o0yb9W#g#PWgj zZhK`|fRmHhh|N0WYOKQDJ1k%VoaR^J=Swg#1G~M_rB7ZQE(Th~d2q{f-YEjuSO~_EG{s00Rm;6XOi0xq?_2< z-e@{{&oOYbM(bHhsw zf`;np6ddLgBSniqkl_=7>sAJYo%8Ag<5zKUOEtZ^ckdb+ zMq}sXM0)lttlK5`wVLfLvBL%6wz(0s8(+Mu7NE#?UR1$1uP*7~ngH?kPcqaTeu*jp zv;kz!wy=lo?G=@kZv&0+C;>|oVpBAE7P$)YmhKQ5&@V~_?`SQEfh-3M8>E`Nu<&=Z z{D2coRYxa2-iM$c>?u$mXOf{~Lvtgng2@pPWT)VZ1NQClYUjeNtSdqpvt60!_jYO~|jTtW0_U3=I{>p~e-K&jZ9p9Or$k zLIfMJKq3LCW_LfF)6D;(sK>44dGml50ikyPLxkk!r zeZt(_oH{8hD+|pOw&z~EWj{yk1w=Mjq%*taVI~Sg2~CqBfh4rJ$Iwf6d4cUeCtyqw z12F6o{FBNHr!olq(&FN!u^Qp@2#CX(LgR&M~}LzIOR5Y>~DW__2UW>-1`q_lR?FEPNIdJ z-s%t7NBU^KUN7e{cvlc95c*5x(Zgr&CG5E9_uN%Q<>d<{%>Q)t0pW$p@IL*{eIvNr z)X@P3eG#Noh^p%9{r&xrn|2kxICR~Y$Q zmPaaoL#?}j{D2_4mQo565<$$2j1zVF6jW3p+#8at5Kn~(jD4#{+3;$_!GnOzgYK4X zsxsYK&?vv$R!?oK;QVR9J(hK|0!9g#3hwp+n;*5=uM5o!nnUC81XvyIVV%=qoB($e zPgU*2!a_&k0sqs&Dq%$)Dd)7dDy(WrB!21ad@1}~jo*bE`XJ%J{|}Z|1pjqfG&%?D z2HqtPGIDYrkd>tLN{3Iso}NUG+P@BdcLfFph5+PiAR*xc9iYxZ!=4SekEX0U!h;*=E^^NhZAasey;VKc;3(=Ij8&VnBBxn-P|mKN6G@MJFKZnh>Z zzhykXRbqD)+w%A}+UL#fMkjFN?rXPq7(w~F7?ZQYGW_yMW)1{{KM4L{2e6nSQg2L` zNShz4sa|kBG^R4=J^qW>0hk9q>7>55PM@KY0MY{jsv6=cFdZJAZLuD;b=q!nn)ol{ z-sceoDBIov+ESa|m%EVAWXdBqTNJb3!tjK-3v)$g`M8%Z!+^GLN7Hq_60S9? z3@2jMD-a|{hm9W}ZLd#XzkdBEbdv5{dAk?jOl@dr;NC#{bgmYYp`G(~Le>_G!Tg#6BN# z+5b@*8XUOPgkBXqAAfbrUec?y!@=GiKU{TQC-pwwAMeNudV1SKfERlKV1A{9o6?0C zGnkMQ{De}(t>!Pn*!#>9-v**w64pCxJ#Zd>f2Zu*b$lH8IX4%S!|QRf*HJ>;FWXVz@RYhTXa~Qffo$=>;WJZwPlXSi=E;L zpFuqVVff|CT#Jo({{bBN1&91Qz`d)esBm({(gUWDl)lQ0+IHTM_cKV#Q_!gTJp`n+ zX}rcQWOB=(9KM{K?l$c%kDO?PlU-)X?Z!(MscaFPoWOw<7@)2fxQ5a}SQxpry={}H z08awG_c`VhLV=dDrK7_?iT1xy$qIZmwb8aV#ZX;eUtidAgwMH$zQG$nFAQS4B#+DZ z{E$}7itg|2m1U8?uc+98cm|5LkDc6*yE8eP5}AQ`Q-Mad=C)~>_wmuUA?I;V=j>M` zwEPpc^ZtAtrG@U0Yz*m^U^<8DU|M78djZAC{JXeI6fB*AiE)T@X>)@N*@A11_LiVI zT{jE5`-d>7nHJjBsh4=TZP2rB9Fl87Jhl(+8uupz7uR^1RniL;qjjR17onjsfmJeq zd?=ly5-GWCAHIK(RC#@ii9s#ve-!wlv)la4{N`mh8e9M(|y1BF|Agw zvqlM@NfishLW?>$xP%TP0?Y>|QdrPG88m|o1Kp~kXKKi9Bj?um$t+7+P(r1Dmi*if zXlt{_Y^-YJWS8%j$9Vp-QG4JyyCO5d=24iPE6_$NBxq-UxP#ctIC^m0>9x5cR7}G+ zGvEVTfsbjf2~TuG@N5+fzmCuLrTZrrA!ufIWc(|7&_ z1`zBADswS06flesdll^jiXBYC#2#O9;Eh}XLwjg2K_gOf>0837Sfv;+-)}W|`yvfZ z&OjGs-5;mhnKHVDCEYi#)JjCDAAkFRx=tZ)@025rbi)rH>5F<-gul>bz9=Y8*+18A zpA*Sn^RT;FuS$cfVaur;1SW4hzbVr<2l6^P=k{I42 zb*MXiuv1*;5}+4;@X4j9Qm-d>*jTb7HVpdm+ZDQL-vWi{^Kv5MrvpSr)Hn+(J$g2W|HEZ|LXuT6P4i@tQf93HZ+$&<_7FIdTtT zUJ9W2CQW!&O)fL+*;4JuReaPIYcqo)UJMdQo`|M(r^et5HmJigk+JZgpgYWf^G!Q& z7x#}wJAHlzfvdKbmWi?P{6#R?|6dt+HTo48H~O3uqI5FmzNm9~v|8ZC4KiGEa&i!@ z;YMn;P~o8JD3^(t_poRJ(*j*ZP%iYUeouTp?F{|u^0Qj)HP#UhXl4%Wg zFlm(YNG$%lnQ&wnr8mpiLsk1O%U!F5L*GO()cbkkD;Kon3O#b<6FCOW_E#TX_B;xn zI{x_9dpuu%|I<|dx!_~Lt5&<>H8=khxUbByAn+rt``F*HS>U?KSvxrk6Au%4BLuT) zf~~}^A8vZF^zrt;Z}(=&&X4Kovs<`mvoKKW+_AVqCYWDcR-x{; zmO59+Loq*gEZm)Xv4yuNfnlrXiR7i$0=Bc~R=TTZDg30GWpn9J$A@c^&6D|^fcC=( zz;6z4;6aE1vbP0Q6#|dGYF%cwX1|A@-{t2cLeawk{&7+>;5M-4bS3ZrpFyd@^Z#6} z3sqHAUX?_%qQ^$KxVk)DExuo^$SAt&}4h|)BFPDc(iJrf-)~jS9PktUF zAq8z>#lt$EaIqumr#<)kCIl|QTY%Fu7aQEp$NvcfGm(UM|t%J1ciIj8Te8Pnn-*KXjHKO7kNxB82N>-EDkaUB|YxE^`W|G2t zku))nYy)#VauKk!pIGy0l$E_4KSP^(F zxE`z`jK=2}E~eocO9n-kmkY$)Auc)1cyOJs*3-K#KCXife7E7O7`o|Fb|T1`IHeHo zS1|@w89ryQlVlV6JsmDK1$j()ZS4`1GdW38ARRm|d7Aa~f|~HFde|&MulhzbpY{g{ zy%0rJh>n`J%BoGnMXQ5)%bIsgW;-r$Qs#5goTol_pJUC0NiVBbyS(hk`lAX0_rRIM zltN+nrM!1Cli-k?>~;BJ)p<+*viKJ=j&YH`&#AI>&=X_He65DtqczLQqAY>? z-xcS-Hnnk~U~EtyYAAUZNoZ3*ndYIEYK0P*+)EAYTY+UHc`0JjD zq~~3nr}BeV8ewBgGbrS9k<3~7(bu#+WZ1$S3yb*ye*^^g!~!G~#T*`rNKqFq0 znKZdk(b3-_xd?5Igc0K`5~7DXVcct};=X>C(5Z$~b7M70Kl52!4`5(mki$Do)UY0v zq;a7R&Sz`@lES-NKw$|Ta+_zoT za6u1)XY|Ud1Tm6nv0!>spK4i_5lgXe)8kDaJ5~DxpT@01;R)LqNA}L+-Rh`}T7lrC zVvTFLkz*HG1&2pC1Y0yVO5&JnW9af(%U_aoUHOu85&Nw+$_$@T)T^jl=JA+Z6enV} z_4B2GvCm*r-_5TlSHx5 zqnJ~?)!`;T|JKnSBUU89DB>)lKvs+j-wi7N08>!RAD+V`1lcsD<)5_l(|}<=Yuqe( zVC92ya7N*iot|s1B3=BP^qdwp2^x0uY(!p|<$C8YT{>=}wJDh^H>bsp+OidUJ(l>U zKi!5<^a5kJzi|1jWDIlflHWG!OuZ2-FV032Foom%YUYj!rIZt^F$2}3*Y8rujSWY8zi|0hE3DD?;tL;F?lpHM>L=5@;}iCh z{@q`ALD|}}26;Z3J~eaoXD*ZATA0Qe2lF4PE8jWughoc$OEV2Cwlc>vL9)h5q(xlG z9Qk1A8=sPgb07SF$Vp*jX4Hot;%h+a9!@pv1%&b6pN(`(DM?DwbkmSm5F$4q4Z4RC z(!6V{Sf&2Xl~s+qz|dD$9@dUO=sekviA0uOjO_$vU1MNzxS((&{px>b^}Fqr^2LfW@vrDU{{qO zAMaBZ?cWCDd_SDEt{(BK{K^fuibq?uy~Y%rM|<9nNo2y>l-FXRK{8R;F)Bl0Z=uRW zI_F(n67!s3>73*wKnvjwmHaYh{AG#zViLNL<; zA7$|hcSQC=kBW`DIkK}?&o|LPhSunuzPoskL#_-r-(8+>8HPxCDO~U5v^z(~!c23I ztl7)g2cn{L>Q<~z%4v=>v>&E}AhJ-`&`?bl@>jZa>CI{SRWS|dYG^3&T{~zG-^V8> zdx9GSa69)y*&~QXU>xJFC^t7uw;zGa8szx!7(hU_ytG$nOFDoQW`f;%GUXNCf`@(H z)E1BhfKebLkI;DVfVfyjtRh|eEu>rM%uGdEx)E>`LL#EOxXIk@pWJ7J9OC(&P$I>f zVScmH^|wh%M<~CPWg3-L3nm`*`$afbt9>snM*{f z_0Gua84LHWm>d5nq6epnXxgr@l98*ih$Hc3fAI4Zl8}hNc8P`NH++{pDBh=d-CG=$ z-*B?N!I%A}g!x|8eT3i--Wa;UL#olE(VvO(Ucn34$FbQplLZ48Bh5Z4W*)W$#1q`l zgo{K7oh#w(?Ee1gy*rDZpwp{yD}8W2ccKn#cCgm%eoZxk5465+dxqn`C&ehAO9Ny8 zGp2^Rkaclr4CnO-Kdj+^lkJ-BiHQ@b{70-yjYixaxg~K@4Z@BPd%{BvoO1mSpT3dpNz~7sj`MJ6FwzlyA z3h9JMe_>{}mQz&~80rVj95g;M@&LaNT=s}sFekwkj=Xc`TYM>u_D+|Vlu*eY4ODAeXC8TL3x6G} zVmiEM;%h+VcrGJm^wopdo)ckJpC&N!%%TFfkbfk$2Pdt)RJSM1a-mCApqGB;D&jWd zedmZOvj&>u&hs$~5|O>5_x3hE^^U$3|2b13zJ1X}O3wEgVn;nViDeKu<}UXLE(#fB z-S{$^X+ceRHFiD9HW92Gx7{iN7mF{L5IqG@(zwSniPQqjpeH7C6{dRM<%>)FA#8)Y z0hsD_icOT^Y7~HMv0k#3laefugncmM)liWvBHLq}n}^$rbaZr-l!z}yl^YJ4bvNsV zcp>ulwFrmDCQLo`GPj$@iqA47{H=GMtPvsTn50?ZcxO2qh?rzOCbBby1Td16FifQ zW^iR%Y~sIrLcbz1h*>;a$fEUB`?@a6?XDF(tO`mrFoaY>%=pCfbm2N5CaA zTYv;J)suDcU}0k`|A@=|ybRtq99RYVH8gZ|p$2(36R!^Vd-V7ALP)>$=J?Q8$A{y`RzNZQ{8{d{X(2@n$4;_JxBMwDk`osjTUk-TdmksM_!JFeHo11<0@C(_l%zIk zbyyv7h1{1AaXQr?!M2wW6=eq(i!_|++Pw0UEp{P*;!oC09eO}Pq~d~$yldh+S$QBI zXl*{#QR&F*nyxAOj}T;*{z~c^8CE~#K#^ChborT?nO*SZt0PDjam{(wH3A?DPKh`3 zjo1?JqnDvYGiKaLKaY&AqBM>EXz==pU;*MgQ?ezU705|z<|~{W@v6pwz`VS>>5=RH zs|Y7-S-nh>I$3n2d@_0czkUenY4DAHcy1#erla6y4XdKMRR2Z@y$xJvW4U1>hMK#6 za1bs;*%ue3pr@e$u;Ni?CgZ6h$Zw&YG=!8%@=3lgu zv*G#`nr=z9ajWhJ*ek9Ij{{K$v(v-nPZ(s#VeG?S!`d>`;FK(Zlpbz6bMUBk@i2>)T?pL$RAQWj?+yfmP9l+i|`z_&x4lVh$kSVNmm~{ z+L=Bp!Lz(9)S0xMBa!B^R>O+u^NYVy@z*dlcdf5)1F*kejP&%9Lqdo=uFK3qmG{SS zv6Wmg&wG`oO7)$ZDDoTq#?r;@lO**T`evSWcwj4QYvab`FsoT*HyeyjJ|+f+hlRz& z3@Cs(f-fiL=Ud&o7fW|jH)u8QKtUJ%Kw5CTq`;Q(HtC zB5pf)x)p~ejr(;Y-Bg!1DQD-URN~EA%nqcCcd)KAg$H3`W5u+R0<8fA>?3i|lG_@2W z-S+f=Q)+B#qUpa`tSu9EnS=;NC@yrp6+1q#Z&~!@1z7g4izSSr8xOf_O3_1IryaAO zXwY^>=qhH)dy_^dn9Up^@)NNHP~rE8J^5re+=Z9#mYd>gBBkEL&?$Ncd11&rG4xF| zFzPKC6WXYaCL8hRkx54guQ$OFfrRkzWX0pI>+kfrUzSrtBI^$&VG~EVzt=3A!gRdb zO|nXYzB__fy!oZGfpL##O&}VN&FnUlWPvwPA%G_8dSIkQodvnb@d16Ei+NXzIE+N| zZR4Wb(8N1lnTA$Y4K?EEmu0JP%vDlyo}8a=`Rfa6xqSl#eC_tzy&A8TqR}L-38N?> zOsSc!5}tfxWi73DTKpnkvWyAPuc!3%x5+{*M;ydm@RmDHU*+97`=-r>>1{RLqLLEG zfxd;APfbrZwr4B*M6vY)y%)TonPd#J9N;Y+Pxzi417HU3p9*P*5iwoe@*n%nX7!MO z>4&~}vD;VZ)u&^ap`0|PV}#XNF5Yr^YO0aQ-+$nZTTXGDwty6aRl#utIwE-S^;)7a z+;##``LC@lFSub2hTdtDhCh*n=E1c8h%x$un%!Nkw$b`9~U*@ABNx!J7+6A2tcR8f6QPz5T zkdMRlW#3*GSW?r#w75HJy`Ja>{=R|7FPxYr7z%jJ_GkU~?}oY0E`RqgjsrBKIvFH} z2Iv`bOl2pht;vQ!*F|-Hd{>c~p9Z^oOUD}7mayNg$GwQhvwUdbq%F+PUx{K>ysrNV z{}Mg0%rFZNFgA?cXw~9DwYXlP#{vem05F&!J0KIz_zb^{a;gIzioh5 z8vul%%bAv@yto)00*dal0d1<3l{}l$|-V3ZEzV=Ah`Xn)R-am zEWMCsn%ZYy+A#wOfEu;~05Z&e5j&oh&%Or_7?cA-s|kiEil3*U$FaPc`K z_YcisuDVSAI9w`4Q>~6^1bwt25B99}IGH9vAPN7Lw zn&&@jXU<^7yz8NFkkS3#Ytu^%_Hi&}O}t80hpA6pY9faHSEi&6p@wUL?IcJf2W3e1 ze9}N4$Idu^R~_F&5e9+jGgz3Ob5&PMUKw4D+&R!VcyK+7bnefk*DkBeh<%B2%n9*O znGE%8h;U-)t|j}{eRf^4z16$q*f%?AYozxs%+7_9$t+U7CC!| zUX>5DxE^#E{lP-pBQAt%9p*G32FHqzWV+`urdE CMAer7 literal 0 HcmV?d00001 From eebfd22e275bea13b7aa81e5fa0bf0ad89881326 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 11:56:24 +0200 Subject: [PATCH 0857/2550] added nuke specific placeholders --- openpype/hosts/nuke/api/lib_template_builder.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/api/lib_template_builder.py b/openpype/hosts/nuke/api/lib_template_builder.py index 555d942cea..61baa23928 100644 --- a/openpype/hosts/nuke/api/lib_template_builder.py +++ b/openpype/hosts/nuke/api/lib_template_builder.py @@ -146,24 +146,24 @@ current context asset (Asset bob will find asset) linked_asset : Template loader will look for assets linked to current context asset. -Linked asset are looked in avalon database under field "inputLinks" +Linked asset are looked in OpenPype database under field "inputLinks" """ ), qargparse.String( "family", default=options.get("family", ""), label="OpenPype Family", - placeholder="ex: model, look ..."), + placeholder="ex: image, plate ..."), qargparse.String( "representation", default=options.get("representation", ""), label="OpenPype Representation", - placeholder="ex: ma, abc ..."), + placeholder="ex: mov, png ..."), qargparse.String( "loader", default=options.get("loader", ""), label="Loader", - placeholder="ex: ReferenceLoader, LightLoader ...", + placeholder="ex: LoadClip, LoadImage ...", help="""Loader Defines what openpype loader will be used to load assets. From 5d6b672c9b6ec4a0c9bc0f7069041b7d7c06bfa5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 12:08:09 +0200 Subject: [PATCH 0858/2550] fix knob removement --- openpype/hosts/nuke/api/template_loader.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/template_loader.py b/openpype/hosts/nuke/api/template_loader.py index d4a8560c4a..5ff4b8fc41 100644 --- a/openpype/hosts/nuke/api/template_loader.py +++ b/openpype/hosts/nuke/api/template_loader.py @@ -88,8 +88,9 @@ class NukeTemplateLoader(AbstractTemplateLoader): nodes.append(node) for node in processed_nodes: - if processed_key in node.knobs(): - nuke.removeKnob(node, processed_key) + knob = node.knob(processed_key) + if knob is not None: + node.removeKnob(knob) @staticmethod def get_template_nodes(): From 1ff3b31d4087e92b705cf80ad939332e00a35f5b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 14:43:10 +0200 Subject: [PATCH 0859/2550] added cli commands for sync server --- .../modules/sync_server/sync_server_module.py | 50 ++++++++++++++++++- openpype/pype_commands.py | 9 +++- 2 files changed, 56 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 634b68c55f..a478faa9ef 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1,11 +1,16 @@ import os -from bson.objectid import ObjectId +import sys +import time from datetime import datetime import threading import platform import copy +import signal from collections import deque, defaultdict +import click +from bson.objectid import ObjectId + from openpype.client import get_projects from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule @@ -2080,3 +2085,46 @@ class SyncServerModule(OpenPypeModule, ITrayModule): settings ('presets') """ return presets[project_name]['sites'][site_name]['root'] + + def cli(self, click_group): + click_group.add_command(cli_main) + + +@click.group(SyncServerModule.name, help="SyncServer module related commands.") +def cli_main(): + pass + + +@cli_main.command() +@click.option( + "-a", + "--active_site", + required=True, + help="Name of active stie") +def syncservice(active_site): + """Launch sync server under entered site. + + This should be ideally used by system service (such us systemd or upstart + on linux and window service). + """ + + from openpype.modules import ModulesManager + + os.environ["OPENPYPE_LOCAL_ID"] = active_site + + def signal_handler(sig, frame): + print("You pressed Ctrl+C. Process ended.") + sync_server_module.server_exit() + sys.exit(0) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + manager = ModulesManager() + sync_server_module = manager.modules_by_name["sync_server"] + + sync_server_module.server_init() + sync_server_module.server_start() + + while True: + time.sleep(1.0) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index fe46a4bc54..85561495fd 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -4,6 +4,7 @@ import os import sys import json import time +import signal class PypeCommands: @@ -315,8 +316,12 @@ class PypeCommands: pytest.main(args) def syncserver(self, active_site): - """Start running sync_server in background.""" - import signal + """Start running sync_server in background. + + This functionality is available in directly in module cli commands. + `~/openpype_console module sync_server syncservice` + """ + os.environ["OPENPYPE_LOCAL_ID"] = active_site def signal_handler(sig, frame): From 519f8088674d6e48ffa6d03df4e5e38c2866f6d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 15:02:55 +0200 Subject: [PATCH 0860/2550] moved 'create_project' to client operations --- openpype/client/__init__.py | 7 +++ openpype/client/operations.py | 87 +++++++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+) diff --git a/openpype/client/__init__.py b/openpype/client/__init__.py index 64a82334d9..4b2d56168f 100644 --- a/openpype/client/__init__.py +++ b/openpype/client/__init__.py @@ -45,6 +45,11 @@ from .entities import ( get_workfile_info, ) +from .operations import ( + create_project, +) + + __all__ = ( "OpenPypeMongoConnection", @@ -88,4 +93,6 @@ __all__ = ( "get_thumbnail_id_from_source", "get_workfile_info", + + "create_project", ) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 9daaa3e116..48e8645726 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -9,6 +9,7 @@ from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne, UpdateOne from .mongo import get_project_connection +from .entities import get_project REMOVED_VALUE = object() @@ -662,3 +663,89 @@ class OperationsSession(object): operation = DeleteOperation(project_name, entity_type, entity_id) self.add(operation) return operation + + +def create_project(project_name, project_code, library_project=False): + """Create project using OpenPype settings. + + This project creation function is not validating project document on + creation. It is because project document is created blindly with only + minimum required information about project which is it's name, code, type + and schema. + + Entered project name must be unique and project must not exist yet. + + Note: + This function is here to be OP v4 ready but in v3 has more logic + to do. That's why inner imports are in the body. + + Args: + project_name(str): New project name. Should be unique. + project_code(str): Project's code should be unique too. + library_project(bool): Project is library project. + + Raises: + ValueError: When project name already exists in MongoDB. + + Returns: + dict: Created project document. + """ + + from openpype.settings import ProjectSettings, SaveWarningExc + from openpype.pipeline.schema import validate + + if get_project(project_name, fields=["name"]): + raise ValueError("Project with name \"{}\" already exists".format( + project_name + )) + + if not PROJECT_NAME_REGEX.match(project_name): + raise ValueError(( + "Project name \"{}\" contain invalid characters" + ).format(project_name)) + + project_doc = { + "type": "project", + "name": project_name, + "data": { + "code": project_code, + "library_project": library_project + }, + "schema": CURRENT_PROJECT_SCHEMA + } + + op_session = OperationsSession() + # Insert document with basic data + create_op = op_session.create_entity( + project_name, project_doc["type"], project_doc + ) + op_session.commit() + + # Load ProjectSettings for the project and save it to store all attributes + # and Anatomy + try: + project_settings_entity = ProjectSettings(project_name) + project_settings_entity.save() + except SaveWarningExc as exc: + print(str(exc)) + except Exception: + op_session.delete_entity( + project_name, project_doc["type"], create_op.entity_id + ) + op_session.commit() + raise + + project_doc = get_project(project_name) + + try: + # Validate created project document + validate(project_doc) + except Exception: + # Remove project if is not valid + op_session.delete_entity( + project_name, project_doc["type"], create_op.entity_id + ) + op_session.commit() + raise + + return project_doc From c62e12a11bf69f98d3c7d6d0c9c062c1086393c0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 15:03:19 +0200 Subject: [PATCH 0861/2550] marked 'create_project' in lib as deprecated --- openpype/lib/avalon_context.py | 53 ++-------------------------------- 1 file changed, 3 insertions(+), 50 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 7d56d039d4..d2a0665bf5 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -84,6 +84,7 @@ def deprecated(new_destination): return _decorator(func) +@deprecated("openpype.client.operations.create_project") def create_project( project_name, project_code, library_project=False, dbcon=None ): @@ -109,57 +110,9 @@ def create_project( dict: Created project document. """ - from openpype.settings import ProjectSettings, SaveWarningExc - from openpype.pipeline import AvalonMongoDB - from openpype.pipeline.schema import validate + from openpype.client.operations import create_project - if get_project(project_name, fields=["name"]): - raise ValueError("Project with name \"{}\" already exists".format( - project_name - )) - - if dbcon is None: - dbcon = AvalonMongoDB() - - if not PROJECT_NAME_REGEX.match(project_name): - raise ValueError(( - "Project name \"{}\" contain invalid characters" - ).format(project_name)) - - database = dbcon.database - project_doc = { - "type": "project", - "name": project_name, - "data": { - "code": project_code, - "library_project": library_project - }, - "schema": CURRENT_DOC_SCHEMAS["project"] - } - # Insert document with basic data - database[project_name].insert_one(project_doc) - # Load ProjectSettings for the project and save it to store all attributes - # and Anatomy - try: - project_settings_entity = ProjectSettings(project_name) - project_settings_entity.save() - except SaveWarningExc as exc: - print(str(exc)) - except Exception: - database[project_name].delete_one({"type": "project"}) - raise - - project_doc = get_project(project_name) - - try: - # Validate created project document - validate(project_doc) - except Exception: - # Remove project if is not valid - database[project_name].delete_one({"type": "project"}) - raise - - return project_doc + return create_project(project_name, project_code, library_project) def with_pipeline_io(func): From a1b20c22683b8c26e5158e5cdd601b6c3c01d40e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 15:03:49 +0200 Subject: [PATCH 0862/2550] use new location of 'create_project' function --- .../ftrack/event_handlers_server/action_prepare_project.py | 6 ++---- .../ftrack/event_handlers_user/action_prepare_project.py | 6 ++---- openpype/modules/kitsu/utils/update_op_with_zou.py | 6 +++--- openpype/tools/project_manager/project_manager/widgets.py | 7 +++---- 4 files changed, 10 insertions(+), 15 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py index 713a4d9aba..332648cd02 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py @@ -1,10 +1,8 @@ import json import copy -from openpype.client import get_project -from openpype.api import ProjectSettings -from openpype.lib import create_project -from openpype.settings import SaveWarningExc +from openpype.client import get_project, create_project +from openpype.settings import ProjectSettings, SaveWarningExc from openpype_modules.ftrack.lib import ( ServerAction, diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py index e89595109e..e825198180 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py @@ -1,10 +1,8 @@ import json import copy -from openpype.client import get_project -from openpype.api import ProjectSettings -from openpype.lib import create_project -from openpype.settings import SaveWarningExc +from openpype.client import get_project, create_project +from openpype.settings import ProjectSettings, SaveWarningExc from openpype_modules.ftrack.lib import ( BaseAction, diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index e03cf2b30e..8d65591c0b 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -15,10 +15,10 @@ from openpype.client import ( get_assets, get_asset_by_id, get_asset_by_name, + create_project, ) from openpype.pipeline import AvalonMongoDB -from openpype.api import get_project_settings -from openpype.lib import create_project +from openpype.settings import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials @@ -278,7 +278,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_doc = get_project(project_name) if not project_doc: print(f"Creating project '{project_name}'") - project_doc = create_project(project_name, project_name, dbcon=dbcon) + project_doc = create_project(project_name, project_name) # Project data and tasks project_data = project_doc["data"] or {} diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index d0715f204d..4bc968347a 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -1,14 +1,13 @@ import re -from openpype.client import get_projects +from openpype.client import get_projects, create_project from .constants import ( NAME_ALLOWED_SYMBOLS, NAME_REGEX ) -from openpype.lib import create_project from openpype.client.operations import ( PROJECT_NAME_ALLOWED_SYMBOLS, - PROJECT_NAME_REGEX + PROJECT_NAME_REGEX, ) from openpype.style import load_stylesheet from openpype.pipeline import AvalonMongoDB @@ -266,7 +265,7 @@ class CreateProjectDialog(QtWidgets.QDialog): project_name = self.project_name_input.text() project_code = self.project_code_input.text() library_project = self.library_project_input.isChecked() - create_project(project_name, project_code, library_project, self.dbcon) + create_project(project_name, project_code, library_project) self.done(1) From ac273094ba6286df53ff5b07cc3697575865aac1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 15:10:58 +0200 Subject: [PATCH 0863/2550] added removement version --- openpype/lib/avalon_context.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index d2a0665bf5..08c221cb81 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -108,6 +108,9 @@ def create_project( Returns: dict: Created project document. + + Deprecated: + Function will be removed after release version 3.16.* """ from openpype.client.operations import create_project From acf3d67f242625f8a91317b602ffa6467ff7cc6b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 15:59:20 +0200 Subject: [PATCH 0864/2550] moved 'create_workdir_extra_folders' to 'openpype.pipeline.workfile' --- .../hooks/pre_create_extra_workdir_folders.py | 6 +- openpype/lib/path_tools.py | 89 ++++++++++++------- openpype/lib/plugin_tools.py | 1 - openpype/pipeline/workfile/__init__.py | 4 + openpype/pipeline/workfile/path_resolving.py | 57 ++++++++++++ openpype/tools/workfiles/files_widget.py | 10 +-- 6 files changed, 126 insertions(+), 41 deletions(-) diff --git a/openpype/hooks/pre_create_extra_workdir_folders.py b/openpype/hooks/pre_create_extra_workdir_folders.py index d79c5831ee..c5af620c87 100644 --- a/openpype/hooks/pre_create_extra_workdir_folders.py +++ b/openpype/hooks/pre_create_extra_workdir_folders.py @@ -1,8 +1,6 @@ import os -from openpype.lib import ( - PreLaunchHook, - create_workdir_extra_folders -) +from openpype.lib import PreLaunchHook +from openpype.pipeline.workfile import create_workdir_extra_folders class AddLastWorkfileToLaunchArgs(PreLaunchHook): diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index f60b2fa722..671591bca4 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -16,6 +16,51 @@ from .profiles_filtering import filter_profiles log = logging.getLogger(__name__) +class PathToolsDeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", PathToolsDeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=PathToolsDeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + def format_file_size(file_size, suffix=None): """Returns formatted string with size in appropriate unit. @@ -333,6 +378,7 @@ def get_project_basic_paths(project_name): return _list_path_items(folder_structure) +@deprecated("openpype.pipeline.workfile.create_workdir_extra_folders") def create_workdir_extra_folders( workdir, host_name, task_type, task_name, project_name, project_settings=None @@ -349,37 +395,18 @@ def create_workdir_extra_folders( project_name (str): Name of project on which task is. project_settings (dict): Prepared project settings. Are loaded if not passed. + + Deprecated: + Function will be removed after release version 3.16.* """ - # Load project settings if not set - if not project_settings: - project_settings = get_project_settings(project_name) - # Load extra folders profiles - extra_folders_profiles = ( - project_settings["global"]["tools"]["Workfiles"]["extra_folders"] + from openpype.pipeline.project_folders import create_workdir_extra_folders + + return create_workdir_extra_folders( + workdir, + host_name, + task_type, + task_name, + project_name, + project_settings ) - # Skip if are empty - if not extra_folders_profiles: - return - - # Prepare profiles filters - filter_data = { - "task_types": task_type, - "task_names": task_name, - "hosts": host_name - } - profile = filter_profiles(extra_folders_profiles, filter_data) - if profile is None: - return - - for subfolder in profile["folders"]: - # Make sure backslashes are converted to forwards slashes - # and does not start with slash - subfolder = subfolder.replace("\\", "/").lstrip("/") - # Skip empty strings - if not subfolder: - continue - - fullpath = os.path.join(workdir, subfolder) - if not os.path.exists(fullpath): - os.makedirs(fullpath) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 81d268ea1c..1e157dfbfd 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -3,7 +3,6 @@ import os import logging import re -import json import warnings import functools diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py index 0aad29b6f9..94ecc81bd6 100644 --- a/openpype/pipeline/workfile/__init__.py +++ b/openpype/pipeline/workfile/__init__.py @@ -9,6 +9,8 @@ from .path_resolving import ( get_custom_workfile_template, get_custom_workfile_template_by_string_context, + + create_workdir_extra_folders, ) from .build_workfile import BuildWorkfile @@ -26,5 +28,7 @@ __all__ = ( "get_custom_workfile_template", "get_custom_workfile_template_by_string_context", + "create_workdir_extra_folders", + "BuildWorkfile", ) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index 6d9e72dbd2..1243e84148 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -467,3 +467,60 @@ def get_custom_workfile_template_by_string_context( return get_custom_workfile_template( project_doc, asset_doc, task_name, host_name, anatomy, project_settings ) + + +def create_workdir_extra_folders( + workdir, + host_name, + task_type, + task_name, + project_name, + project_settings=None +): + """Create extra folders in work directory based on context. + + Args: + workdir (str): Path to workdir where workfiles is stored. + host_name (str): Name of host implementation. + task_type (str): Type of task for which extra folders should be + created. + task_name (str): Name of task for which extra folders should be + created. + project_name (str): Name of project on which task is. + project_settings (dict): Prepared project settings. Are loaded if not + passed. + """ + + # Load project settings if not set + if not project_settings: + project_settings = get_project_settings(project_name) + + # Load extra folders profiles + extra_folders_profiles = ( + project_settings["global"]["tools"]["Workfiles"]["extra_folders"] + ) + # Skip if are empty + if not extra_folders_profiles: + return + + # Prepare profiles filters + filter_data = { + "task_types": task_type, + "task_names": task_name, + "hosts": host_name + } + profile = filter_profiles(extra_folders_profiles, filter_data) + if profile is None: + return + + for subfolder in profile["folders"]: + # Make sure backslashes are converted to forwards slashes + # and does not start with slash + subfolder = subfolder.replace("\\", "/").lstrip("/") + # Skip empty strings + if not subfolder: + continue + + fullpath = os.path.join(workdir, subfolder) + if not os.path.exists(fullpath): + os.makedirs(fullpath) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index a5d5b14bb6..b4f5e422bc 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -10,10 +10,7 @@ from openpype.host import IWorkfileHost from openpype.client import get_asset_by_id from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate -from openpype.lib import ( - emit_event, - create_workdir_extra_folders, -) +from openpype.lib import emit_event from openpype.pipeline import ( registered_host, legacy_io, @@ -23,7 +20,10 @@ from openpype.pipeline.context_tools import ( compute_session_changes, change_current_context ) -from openpype.pipeline.workfile import get_workfile_template_key +from openpype.pipeline.workfile import ( + get_workfile_template_key, + create_workdir_extra_folders, +) from .model import ( WorkAreaFilesModel, From d3a9610c5a92f17bd319843eab4a99f6d68acce4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 16:09:15 +0200 Subject: [PATCH 0865/2550] moved helper function to pipeline.project_folders --- openpype/lib/path_tools.py | 64 +++++++++++----------------- openpype/pipeline/project_folders.py | 49 +++++++++++++++++++++ 2 files changed, 73 insertions(+), 40 deletions(-) create mode 100644 openpype/pipeline/project_folders.py diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 671591bca4..736eb0effc 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -277,58 +277,42 @@ def get_last_version_from_path(path_dir, filter): return None +@deprecated("openpype.pipeline.project_folders.concatenate_splitted_paths") def concatenate_splitted_paths(split_paths, anatomy): - pattern_array = re.compile(r"\[.*\]") - output = [] - for path_items in split_paths: - clean_items = [] - if isinstance(path_items, str): - path_items = [path_items] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - for path_item in path_items: - if not re.match(r"{.+}", path_item): - path_item = re.sub(pattern_array, "", path_item) - clean_items.append(path_item) + from openpype.pipeline.project_folders import concatenate_splitted_paths - # backward compatibility - if "__project_root__" in path_items: - for root, root_path in anatomy.roots.items(): - if not os.path.exists(str(root_path)): - log.debug("Root {} path path {} not exist on \ - computer!".format(root, root_path)) - continue - clean_items = ["{{root[{}]}}".format(root), - r"{project[name]}"] + clean_items[1:] - output.append(os.path.normpath(os.path.sep.join(clean_items))) - continue - - output.append(os.path.normpath(os.path.sep.join(clean_items))) - - return output + return concatenate_splitted_paths(split_paths, anatomy) +@deprecated def get_format_data(anatomy): - project_doc = get_project(anatomy.project_name, fields=["data.code"]) - project_code = project_doc["data"]["code"] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - return { - "root": anatomy.roots, - "project": { - "name": anatomy.project_name, - "code": project_code - }, - } + from openpype.pipeline.template_data import get_project_template_data + + data = get_project_template_data(project_name=anatomy.project_name) + data["root"] = anatomy.roots + return data +@deprecated("openpype.pipeline.project_folders.fill_paths") def fill_paths(path_list, anatomy): - format_data = get_format_data(anatomy) - filled_paths = [] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - for path in path_list: - new_path = path.format(**format_data) - filled_paths.append(new_path) + from openpype.pipeline.project_folders import fill_paths - return filled_paths + return fill_paths(path_list, anatomy) def create_project_folders(basic_paths, project_name): diff --git a/openpype/pipeline/project_folders.py b/openpype/pipeline/project_folders.py new file mode 100644 index 0000000000..256c4e73d8 --- /dev/null +++ b/openpype/pipeline/project_folders.py @@ -0,0 +1,49 @@ +import os +import re + +from openpype.lib import Logger + +from .template_data import get_project_template_data + + +def concatenate_splitted_paths(split_paths, anatomy): + log = Logger.get_logger("concatenate_splitted_paths") + pattern_array = re.compile(r"\[.*\]") + output = [] + for path_items in split_paths: + clean_items = [] + if isinstance(path_items, str): + path_items = [path_items] + + for path_item in path_items: + if not re.match(r"{.+}", path_item): + path_item = re.sub(pattern_array, "", path_item) + clean_items.append(path_item) + + # backward compatibility + if "__project_root__" in path_items: + for root, root_path in anatomy.roots.items(): + if not os.path.exists(str(root_path)): + log.debug("Root {} path path {} not exist on \ + computer!".format(root, root_path)) + continue + clean_items = ["{{root[{}]}}".format(root), + r"{project[name]}"] + clean_items[1:] + output.append(os.path.normpath(os.path.sep.join(clean_items))) + continue + + output.append(os.path.normpath(os.path.sep.join(clean_items))) + + return output + + +def fill_paths(path_list, anatomy): + format_data = get_project_template_data(project_name=anatomy.project_name) + format_data["root"] = anatomy.roots + filled_paths = [] + + for path in path_list: + new_path = path.format(**format_data) + filled_paths.append(new_path) + + return filled_paths From c7ffda124e36813d00757d5096475bcb902c60f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 16:10:54 +0200 Subject: [PATCH 0866/2550] moved 'create_project_folders' and 'get_project_basic_paths' --- openpype/lib/path_tools.py | 65 ++++++------------- .../action_create_project_structure.py | 7 +- openpype/pipeline/project_folders.py | 56 ++++++++++++++++ .../project_manager/project_manager/window.py | 24 +++---- 4 files changed, 88 insertions(+), 64 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 736eb0effc..5800498b07 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -1,21 +1,17 @@ import os import re -import abc -import json import logging -import six import platform +import functools +import warnings import clique -from openpype.client import get_project -from openpype.settings import get_project_settings - -from .profiles_filtering import filter_profiles - log = logging.getLogger(__name__) + + class PathToolsDeprecatedWarning(DeprecationWarning): pass @@ -315,51 +311,28 @@ def fill_paths(path_list, anatomy): return fill_paths(path_list, anatomy) +@deprecated("openpype.pipeline.project_folders.create_project_folders") def create_project_folders(basic_paths, project_name): - from openpype.pipeline import Anatomy - anatomy = Anatomy(project_name) + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - concat_paths = concatenate_splitted_paths(basic_paths, anatomy) - filled_paths = fill_paths(concat_paths, anatomy) + from openpype.pipeline.project_folders import create_project_folders - # Create folders - for path in filled_paths: - if os.path.exists(path): - log.debug("Folder already exists: {}".format(path)) - else: - log.debug("Creating folder: {}".format(path)) - os.makedirs(path) - - -def _list_path_items(folder_structure): - output = [] - for key, value in folder_structure.items(): - if not value: - output.append(key) - else: - paths = _list_path_items(value) - for path in paths: - if not isinstance(path, (list, tuple)): - path = [path] - - item = [key] - item.extend(path) - output.append(item) - - return output + return create_project_folders(project_name, basic_paths) +@deprecated("openpype.pipeline.project_folders.get_project_basic_paths") def get_project_basic_paths(project_name): - project_settings = get_project_settings(project_name) - folder_structure = ( - project_settings["global"]["project_folder_structure"] - ) - if not folder_structure: - return [] + """ + Deprecated: + Function will be removed after release version 3.16.* + """ - if isinstance(folder_structure, str): - folder_structure = json.loads(folder_structure) - return _list_path_items(folder_structure) + from openpype.pipeline.project_folders import get_project_basic_paths + + return get_project_basic_paths(project_name) @deprecated("openpype.pipeline.workfile.create_workdir_extra_folders") diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py index df914de854..7c896570b1 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -1,7 +1,10 @@ import re +from openpype.pipeline.project_folders import ( + get_project_basic_paths, + create_project_folders, +) from openpype_modules.ftrack.lib import BaseAction, statics_icon -from openpype.api import get_project_basic_paths, create_project_folders class CreateProjectFolders(BaseAction): @@ -81,7 +84,7 @@ class CreateProjectFolders(BaseAction): } # Invoking OpenPype API to create the project folders - create_project_folders(basic_paths, project_name) + create_project_folders(project_name, basic_paths) self.create_ftrack_entities(basic_paths, project_entity) self.trigger_event( diff --git a/openpype/pipeline/project_folders.py b/openpype/pipeline/project_folders.py index 256c4e73d8..811b9aa648 100644 --- a/openpype/pipeline/project_folders.py +++ b/openpype/pipeline/project_folders.py @@ -1,8 +1,13 @@ import os import re +import json +import six + +from openpype.settings import get_project_settings from openpype.lib import Logger +from .anatomy import Anatomy from .template_data import get_project_template_data @@ -47,3 +52,54 @@ def fill_paths(path_list, anatomy): filled_paths.append(new_path) return filled_paths + + +def create_project_folders(project_name, basic_paths=None): + log = Logger.get_logger("create_project_folders") + anatomy = Anatomy(project_name) + if basic_paths is None: + basic_paths = get_project_basic_paths(project_name) + + concat_paths = concatenate_splitted_paths(basic_paths, anatomy) + filled_paths = fill_paths(concat_paths, anatomy) + + # Create folders + for path in filled_paths: + if os.path.exists(path): + log.debug("Folder already exists: {}".format(path)) + else: + log.debug("Creating folder: {}".format(path)) + os.makedirs(path) + return filled_paths + + +def _list_path_items(folder_structure): + output = [] + for key, value in folder_structure.items(): + if not value: + output.append(key) + continue + + paths = _list_path_items(value) + for path in paths: + if not isinstance(path, (list, tuple)): + path = [path] + + item = [key] + item.extend(path) + output.append(item) + + return output + + +def get_project_basic_paths(project_name): + project_settings = get_project_settings(project_name) + folder_structure = ( + project_settings["global"]["project_folder_structure"] + ) + if not folder_structure: + return [] + + if isinstance(folder_structure, six.string_types): + folder_structure = json.loads(folder_structure) + return _list_path_items(folder_structure) diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py index c6ae0ff352..3b2dea8ca3 100644 --- a/openpype/tools/project_manager/project_manager/window.py +++ b/openpype/tools/project_manager/project_manager/window.py @@ -1,5 +1,12 @@ from Qt import QtWidgets, QtCore, QtGui +from openpype import resources +from openpype.style import load_stylesheet +from openpype.widgets import PasswordDialog +from openpype.lib import is_admin_password_required, Logger +from openpype.pipeline import AvalonMongoDB +from openpype.pipeline.project_folders import create_project_folders + from . import ( ProjectModel, ProjectProxyFilter, @@ -13,17 +20,6 @@ from . import ( ) from .widgets import ConfirmProjectDeletion from .style import ResourceCache -from openpype.style import load_stylesheet -from openpype.lib import is_admin_password_required -from openpype.widgets import PasswordDialog -from openpype.pipeline import AvalonMongoDB - -from openpype import resources -from openpype.api import ( - get_project_basic_paths, - create_project_folders, - Logger -) class ProjectManagerWindow(QtWidgets.QWidget): @@ -259,12 +255,8 @@ class ProjectManagerWindow(QtWidgets.QWidget): qm.Yes | qm.No) if ans == qm.Yes: try: - # Get paths based on presets - basic_paths = get_project_basic_paths(project_name) - if not basic_paths: - pass # Invoking OpenPype API to create the project folders - create_project_folders(basic_paths, project_name) + create_project_folders(project_name) except Exception as exc: self.log.warning( "Cannot create starting folders: {}".format(exc), From 27e74ee0095d33c22ee2059f12efb508662d066f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 16:15:28 +0200 Subject: [PATCH 0867/2550] skip folders creation if are not set --- openpype/pipeline/project_folders.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/project_folders.py b/openpype/pipeline/project_folders.py index 811b9aa648..1bcba5c320 100644 --- a/openpype/pipeline/project_folders.py +++ b/openpype/pipeline/project_folders.py @@ -60,6 +60,9 @@ def create_project_folders(project_name, basic_paths=None): if basic_paths is None: basic_paths = get_project_basic_paths(project_name) + if not basic_paths: + return + concat_paths = concatenate_splitted_paths(basic_paths, anatomy) filled_paths = fill_paths(concat_paths, anatomy) @@ -70,7 +73,6 @@ def create_project_folders(project_name, basic_paths=None): else: log.debug("Creating folder: {}".format(path)) os.makedirs(path) - return filled_paths def _list_path_items(folder_structure): From 111a0be1a858f7568a169196c0be32e5d9da4a88 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 16:21:23 +0200 Subject: [PATCH 0868/2550] removed not needed lines --- openpype/lib/path_tools.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 5800498b07..0b6d0a3391 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -10,8 +10,6 @@ import clique log = logging.getLogger(__name__) - - class PathToolsDeprecatedWarning(DeprecationWarning): pass From 137e4098897942a0baa15e01ab6d1950996900ce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 18:08:16 +0200 Subject: [PATCH 0869/2550] convert functions to be usable in other files of client --- openpype/client/entities.py | 82 +++++++++++++++++++++++-------------- 1 file changed, 51 insertions(+), 31 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 3d2730a17c..a4bd07838a 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -32,17 +32,37 @@ def _prepare_fields(fields, required_fields=None): return output -def _convert_id(in_id): +def convert_id(in_id): + """Helper function for conversion of id from string to ObjectId. + + Args: + in_id (Union[str, ObjectId, Any]): Entity id that should be converted + to right type for queries. + + Returns: + Union[ObjectId, Any]: Converted ids to ObjectId or in type. + """ + if isinstance(in_id, six.string_types): return ObjectId(in_id) return in_id -def _convert_ids(in_ids): +def convert_ids(in_ids): + """Helper function for conversion of ids from string to ObjectId. + + Args: + in_ids (Iterable[Union[str, ObjectId, Any]]): List of entity ids that + should be converted to right type for queries. + + Returns: + List[ObjectId]: Converted ids to ObjectId. + """ + _output = set() for in_id in in_ids: if in_id is not None: - _output.add(_convert_id(in_id)) + _output.add(convert_id(in_id)) return list(_output) @@ -115,7 +135,7 @@ def get_asset_by_id(project_name, asset_id, fields=None): None: Asset was not found by id. """ - asset_id = _convert_id(asset_id) + asset_id = convert_id(asset_id) if not asset_id: return None @@ -196,7 +216,7 @@ def _get_assets( query_filter = {"type": {"$in": asset_types}} if asset_ids is not None: - asset_ids = _convert_ids(asset_ids) + asset_ids = convert_ids(asset_ids) if not asset_ids: return [] query_filter["_id"] = {"$in": asset_ids} @@ -207,7 +227,7 @@ def _get_assets( query_filter["name"] = {"$in": list(asset_names)} if parent_ids is not None: - parent_ids = _convert_ids(parent_ids) + parent_ids = convert_ids(parent_ids) if not parent_ids: return [] query_filter["data.visualParent"] = {"$in": parent_ids} @@ -307,7 +327,7 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None): "type": "subset" } if asset_ids is not None: - asset_ids = _convert_ids(asset_ids) + asset_ids = convert_ids(asset_ids) if not asset_ids: return [] subset_query["parent"] = {"$in": asset_ids} @@ -347,7 +367,7 @@ def get_subset_by_id(project_name, subset_id, fields=None): Dict: Subset document which can be reduced to specified 'fields'. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -374,7 +394,7 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): if not subset_name: return None - asset_id = _convert_id(asset_id) + asset_id = convert_id(asset_id) if not asset_id: return None @@ -428,13 +448,13 @@ def get_subsets( query_filter = {"type": {"$in": subset_types}} if asset_ids is not None: - asset_ids = _convert_ids(asset_ids) + asset_ids = convert_ids(asset_ids) if not asset_ids: return [] query_filter["parent"] = {"$in": asset_ids} if subset_ids is not None: - subset_ids = _convert_ids(subset_ids) + subset_ids = convert_ids(subset_ids) if not subset_ids: return [] query_filter["_id"] = {"$in": subset_ids} @@ -449,7 +469,7 @@ def get_subsets( for asset_id, names in names_by_asset_ids.items(): if asset_id and names: or_query.append({ - "parent": _convert_id(asset_id), + "parent": convert_id(asset_id), "name": {"$in": list(names)} }) if not or_query: @@ -510,7 +530,7 @@ def get_version_by_id(project_name, version_id, fields=None): Dict: Version document which can be reduced to specified 'fields'. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return None @@ -537,7 +557,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None): Dict: Version document which can be reduced to specified 'fields'. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -567,7 +587,7 @@ def version_is_latest(project_name, version_id): bool: True if is latest version from subset else False. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return False version_doc = get_version_by_id( @@ -610,13 +630,13 @@ def _get_versions( query_filter = {"type": {"$in": version_types}} if subset_ids is not None: - subset_ids = _convert_ids(subset_ids) + subset_ids = convert_ids(subset_ids) if not subset_ids: return [] query_filter["parent"] = {"$in": subset_ids} if version_ids is not None: - version_ids = _convert_ids(version_ids) + version_ids = convert_ids(version_ids) if not version_ids: return [] query_filter["_id"] = {"$in": version_ids} @@ -690,7 +710,7 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None): Dict: Hero version entity data. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -720,7 +740,7 @@ def get_hero_version_by_id(project_name, version_id, fields=None): Dict: Hero version entity data. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return None @@ -786,7 +806,7 @@ def get_output_link_versions(project_name, version_id, fields=None): links for passed version. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id: return [] @@ -812,7 +832,7 @@ def get_last_versions(project_name, subset_ids, fields=None): dict[ObjectId, int]: Key is subset id and value is last version name. """ - subset_ids = _convert_ids(subset_ids) + subset_ids = convert_ids(subset_ids) if not subset_ids: return {} @@ -898,7 +918,7 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None): Dict: Version document which can be reduced to specified 'fields'. """ - subset_id = _convert_id(subset_id) + subset_id = convert_id(subset_id) if not subset_id: return None @@ -971,7 +991,7 @@ def get_representation_by_id(project_name, representation_id, fields=None): "type": {"$in": repre_types} } if representation_id is not None: - query_filter["_id"] = _convert_id(representation_id) + query_filter["_id"] = convert_id(representation_id) conn = get_project_connection(project_name) @@ -996,7 +1016,7 @@ def get_representation_by_name( to specified 'fields'. """ - version_id = _convert_id(version_id) + version_id = convert_id(version_id) if not version_id or not representation_name: return None repre_types = ["representation", "archived_representations"] @@ -1089,7 +1109,7 @@ def _get_representations( query_filter = {"type": {"$in": repre_types}} if representation_ids is not None: - representation_ids = _convert_ids(representation_ids) + representation_ids = convert_ids(representation_ids) if not representation_ids: return default_output query_filter["_id"] = {"$in": representation_ids} @@ -1100,7 +1120,7 @@ def _get_representations( query_filter["name"] = {"$in": list(representation_names)} if version_ids is not None: - version_ids = _convert_ids(version_ids) + version_ids = convert_ids(version_ids) if not version_ids: return default_output query_filter["parent"] = {"$in": version_ids} @@ -1111,7 +1131,7 @@ def _get_representations( for version_id, names in names_by_version_ids.items(): if version_id and names: or_query.append({ - "parent": _convert_id(version_id), + "parent": convert_id(version_id), "name": {"$in": list(names)} }) if not or_query: @@ -1361,7 +1381,7 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id): if not src_type or not src_id: return None - query_filter = {"_id": _convert_id(src_id)} + query_filter = {"_id": convert_id(src_id)} conn = get_project_connection(project_name) src_doc = conn.find_one(query_filter, {"data.thumbnail_id"}) @@ -1388,7 +1408,7 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None): """ if thumbnail_ids: - thumbnail_ids = _convert_ids(thumbnail_ids) + thumbnail_ids = convert_ids(thumbnail_ids) if not thumbnail_ids: return [] @@ -1416,7 +1436,7 @@ def get_thumbnail(project_name, thumbnail_id, fields=None): if not thumbnail_id: return None - query_filter = {"type": "thumbnail", "_id": _convert_id(thumbnail_id)} + query_filter = {"type": "thumbnail", "_id": convert_id(thumbnail_id)} conn = get_project_connection(project_name) return conn.find_one(query_filter, _prepare_fields(fields)) @@ -1444,7 +1464,7 @@ def get_workfile_info( query_filter = { "type": "workfile", - "parent": _convert_id(asset_id), + "parent": convert_id(asset_id), "task_name": task_name, "filename": filename } From a0b33deda33c1777f9cd2dd62c6dac6a535167fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 18:18:33 +0200 Subject: [PATCH 0870/2550] added functions to get linked ids --- openpype/client/__init__.py | 11 ++ openpype/client/entity_links.py | 232 ++++++++++++++++++++++++++++++++ 2 files changed, 243 insertions(+) create mode 100644 openpype/client/entity_links.py diff --git a/openpype/client/__init__.py b/openpype/client/__init__.py index 64a82334d9..d080425e3c 100644 --- a/openpype/client/__init__.py +++ b/openpype/client/__init__.py @@ -45,6 +45,13 @@ from .entities import ( get_workfile_info, ) +from .entity_links import ( + get_linked_asset_ids, + get_linked_assets, + get_linked_representation_ids, +) + + __all__ = ( "OpenPypeMongoConnection", @@ -88,4 +95,8 @@ __all__ = ( "get_thumbnail_id_from_source", "get_workfile_info", + + "get_linked_asset_ids", + "get_linked_assets", + "get_linked_representation_ids", ) diff --git a/openpype/client/entity_links.py b/openpype/client/entity_links.py new file mode 100644 index 0000000000..66214f469c --- /dev/null +++ b/openpype/client/entity_links.py @@ -0,0 +1,232 @@ +from .mongo import get_project_connection +from .entities import ( + get_assets, + get_asset_by_id, + get_representation_by_id, + convert_id, +) + + +def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None): + """Extract linked asset ids from asset document. + + One of asset document or asset id must be passed. + + Note: + Asset links now works only from asset to assets. + + Args: + asset_doc (dict): Asset document from DB. + + Returns: + List[Union[ObjectId, str]]: Asset ids of input links. + """ + + output = [] + if not asset_doc and not asset_id: + return output + + if not asset_doc: + asset_doc = get_asset_by_id( + project_name, asset_id, fields=["data.inputLinks"] + ) + + input_links = asset_doc["data"].get("inputLinks") + if not input_links: + return output + + for item in input_links: + # Backwards compatibility for "_id" key which was replaced with + # "id" + if "_id" in item: + link_id = item["_id"] + else: + link_id = item["id"] + output.append(link_id) + return output + + +def get_linked_assets( + project_name, asset_doc=None, asset_id=None, fields=None +): + """Return linked assets based on passed asset document. + + One of asset document or asset id must be passed. + + Args: + project_name (str): Name of project where to look for queried entities. + asset_doc (Dict[str, Any]): Asset document from database. + asset_id (Union[ObjectId, str]): Asset id. Can be used instead of + asset document. + fields (Iterable[str]): Fields that should be returned. All fields are + returned if 'None' is passed. + + Returns: + List[Dict[str, Any]]: Asset documents of input links for passed + asset doc. + """ + + if not asset_doc: + if not asset_id: + return [] + asset_doc = get_asset_by_id( + project_name, + asset_id, + fields=["data.inputLinks"] + ) + if not asset_doc: + return [] + + link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc) + if not link_ids: + return [] + + return list(get_assets(project_name, asset_ids=link_ids, fields=fields)) + + +def get_linked_representation_id( + project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None +): + """Returns list of linked ids of particular type (if provided). + + One of representation document or representation id must be passed. + Note: + Representation links now works only from representation through version + back to representations. + + Args: + project_name (str): Name of project where look for links. + repre_doc (Dict[str, Any]): Representation document. + repre_id (Union[ObjectId, str]): Representation id. + link_type (str): Type of link (e.g. 'reference', ...). + max_depth (int): Limit recursion level. Default: 0 + + Returns: + List[ObjectId] Linked representation ids. + """ + + if repre_doc: + repre_id = repre_doc["_id"] + + if repre_id: + repre_id = convert_id(repre_id) + + if not repre_id and not repre_doc: + return [] + + version_id = None + if repre_doc: + version_id = repre_doc.get("parent") + + if not version_id: + repre_doc = get_representation_by_id( + project_name, repre_id, fields=["parent"] + ) + version_id = repre_doc["parent"] + + if not version_id: + return [] + + if max_depth is None: + max_depth = 0 + + match = { + "_id": version_id, + "type": {"$in": ["version", "hero_version"]} + } + + graph_lookup = { + "from": project_name, + "startWith": "$data.inputLinks.id", + "connectFromField": "data.inputLinks.id", + "connectToField": "_id", + "as": "outputs_recursive", + "depthField": "depth" + } + if max_depth != 0: + # We offset by -1 since 0 basically means no recursion + # but the recursion only happens after the initial lookup + # for outputs. + graph_lookup["maxDepth"] = max_depth - 1 + + query_pipeline = [ + # Match + {"$match": match}, + # Recursive graph lookup for inputs + {"$graphLookup": graph_lookup} + ] + + conn = get_project_connection(project_name) + result = conn.aggregate(query_pipeline) + referenced_version_ids = _process_referenced_pipeline_result( + result, link_type + ) + if not referenced_version_ids: + return [] + + ref_ids = conn.distinct( + "_id", + filter={ + "parent": {"$in": list(referenced_version_ids)}, + "type": "representation" + } + ) + + return list(ref_ids) + + +def _process_referenced_pipeline_result(result, link_type): + """Filters result from pipeline for particular link_type. + + Pipeline cannot use link_type directly in a query. + + Returns: + (list) + """ + + referenced_version_ids = set() + correctly_linked_ids = set() + for item in result: + input_links = item["data"].get("inputLinks") + if not input_links: + continue + + _filter_input_links( + input_links, + link_type, + correctly_linked_ids + ) + + # outputs_recursive in random order, sort by depth + outputs_recursive = item.get("outputs_recursive") + if not outputs_recursive: + continue + + for output in sorted(outputs_recursive, key=lambda o: o["depth"]): + output_links = output["data"].get("inputLinks") + if not output_links: + continue + + # Leaf + if output["_id"] not in correctly_linked_ids: + continue + + _filter_input_links( + output_links, + link_type, + correctly_linked_ids + ) + + referenced_version_ids.add(output["_id"]) + + return referenced_version_ids + + +def _filter_input_links(input_links, link_type, correctly_linked_ids): + for input_link in input_links: + if link_type and input_link["type"] != link_type: + continue + + link_id = input_link.get("id") or input_link.get("_id") + if link_id is not None: + correctly_linked_ids.add(link_id) From 17cfd18d1aa16720292b8f953db77039776ec47d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 18:19:51 +0200 Subject: [PATCH 0871/2550] use new location of 'get_linked_assets' --- openpype/lib/avalon_context.py | 36 +++++++++---------- .../workfile/abstract_template_loader.py | 6 ++-- openpype/pipeline/workfile/build_workfile.py | 2 +- 3 files changed, 22 insertions(+), 22 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 7d56d039d4..140ea887ff 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -236,6 +236,7 @@ def get_system_general_anatomy_data(system_settings=None): return get_general_template_data(system_settings) +@deprecated("openpype.client.get_linked_asset_ids") def get_linked_asset_ids(asset_doc): """Return linked asset ids for `asset_doc` from DB @@ -244,26 +245,20 @@ def get_linked_asset_ids(asset_doc): Returns: (list): MongoDB ids of input links. + + Deprecated: + Function will be removed after release version 3.16.* """ - output = [] - if not asset_doc: - return output - input_links = asset_doc["data"].get("inputLinks") or [] - if input_links: - for item in input_links: - # Backwards compatibility for "_id" key which was replaced with - # "id" - if "_id" in item: - link_id = item["_id"] - else: - link_id = item["id"] - output.append(link_id) + from openpype.client import get_linked_asset_ids + from openpype.pipeline import legacy_io - return output + project_name = legacy_io.active_project() + + return get_linked_asset_ids(project_name, asset_doc=asset_doc) -@with_pipeline_io +@deprecated("openpype.client.get_linked_assets") def get_linked_assets(asset_doc): """Return linked assets for `asset_doc` from DB @@ -272,14 +267,17 @@ def get_linked_assets(asset_doc): Returns: (list) Asset documents of input links for passed asset doc. + + Deprecated: + Function will be removed after release version 3.15.* """ - link_ids = get_linked_asset_ids(asset_doc) - if not link_ids: - return [] + from openpype.pipeline import legacy_io + from openpype.client import get_linked_assets project_name = legacy_io.active_project() - return list(get_assets(project_name, link_ids)) + + return get_linked_assets(project_name, asset_doc=asset_doc) @deprecated("openpype.client.get_last_version_by_subset_name") diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py index 05a98a1ddc..82a0fd33e9 100644 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ b/openpype/pipeline/workfile/abstract_template_loader.py @@ -5,13 +5,15 @@ import six import logging from functools import reduce -from openpype.client import get_asset_by_name +from openpype.client import ( + get_asset_by_name, + get_linked_assets, +) from openpype.settings import get_project_settings from openpype.lib import ( StringTemplate, Logger, filter_profiles, - get_linked_assets, ) from openpype.pipeline import legacy_io, Anatomy from openpype.pipeline.load import ( diff --git a/openpype/pipeline/workfile/build_workfile.py b/openpype/pipeline/workfile/build_workfile.py index bb6fcb4189..0b8a444436 100644 --- a/openpype/pipeline/workfile/build_workfile.py +++ b/openpype/pipeline/workfile/build_workfile.py @@ -8,10 +8,10 @@ from openpype.client import ( get_subsets, get_last_versions, get_representations, + get_linked_assets, ) from openpype.settings import get_project_settings from openpype.lib import ( - get_linked_assets, filter_profiles, Logger, ) From c8ef54e9ce840c7ad09c3ca2f52d28a8c9f1c622 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 31 Aug 2022 18:22:04 +0200 Subject: [PATCH 0872/2550] use new source of 'get_linked_ids_for_representations' --- openpype/lib/avalon_context.py | 116 +++++------------------------- openpype/plugins/load/add_site.py | 10 +-- 2 files changed, 25 insertions(+), 101 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 140ea887ff..1fff45f262 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1083,9 +1083,10 @@ def get_last_workfile( ) -@with_pipeline_io -def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, - link_type=None, max_depth=0): +@deprecated("openpype.client.get_linked_ids_for_representations") +def get_linked_ids_for_representations( + project_name, repre_ids, dbcon=None, link_type=None, max_depth=0 +): """Returns list of linked ids of particular type (if provided). Goes from representations to version, back to representations @@ -1096,104 +1097,25 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, with Session. link_type (str): ['reference', '..] max_depth (int): limit how many levels of recursion + Returns: (list) of ObjectId - linked representations + + Deprecated: + Function will be removed after release version 3.16.* """ - # Create new dbcon if not passed and use passed project name - if not dbcon: - from openpype.pipeline import AvalonMongoDB - dbcon = AvalonMongoDB() - dbcon.Session["AVALON_PROJECT"] = project_name - # Validate that passed dbcon has same project - elif dbcon.Session["AVALON_PROJECT"] != project_name: - raise ValueError("Passed connection does not have right project") + + from openpype.client import get_linked_representation_ids if not isinstance(repre_ids, list): repre_ids = [repre_ids] - version_ids = dbcon.distinct("parent", { - "_id": {"$in": repre_ids}, - "type": "representation" - }) - - match = { - "_id": {"$in": version_ids}, - "type": "version" - } - - graph_lookup = { - "from": project_name, - "startWith": "$data.inputLinks.id", - "connectFromField": "data.inputLinks.id", - "connectToField": "_id", - "as": "outputs_recursive", - "depthField": "depth" - } - if max_depth != 0: - # We offset by -1 since 0 basically means no recursion - # but the recursion only happens after the initial lookup - # for outputs. - graph_lookup["maxDepth"] = max_depth - 1 - - pipeline_ = [ - # Match - {"$match": match}, - # Recursive graph lookup for inputs - {"$graphLookup": graph_lookup} - ] - - result = dbcon.aggregate(pipeline_) - referenced_version_ids = _process_referenced_pipeline_result(result, - link_type) - - ref_ids = dbcon.distinct( - "_id", - filter={ - "parent": {"$in": list(referenced_version_ids)}, - "type": "representation" - } - ) - - return list(ref_ids) - - -def _process_referenced_pipeline_result(result, link_type): - """Filters result from pipeline for particular link_type. - - Pipeline cannot use link_type directly in a query. - Returns: - (list) - """ - referenced_version_ids = set() - correctly_linked_ids = set() - for item in result: - input_links = item["data"].get("inputLinks", []) - correctly_linked_ids = _filter_input_links(input_links, - link_type, - correctly_linked_ids) - - # outputs_recursive in random order, sort by depth - outputs_recursive = sorted(item.get("outputs_recursive", []), - key=lambda d: d["depth"]) - - for output in outputs_recursive: - if output["_id"] not in correctly_linked_ids: # leaf - continue - - correctly_linked_ids = _filter_input_links( - output["data"].get("inputLinks", []), - link_type, - correctly_linked_ids) - - referenced_version_ids.add(output["_id"]) - - return referenced_version_ids - - -def _filter_input_links(input_links, link_type, correctly_linked_ids): - for input_link in input_links: - if not link_type or input_link["type"] == link_type: - correctly_linked_ids.add(input_link.get("id") or - input_link.get("_id")) # legacy - - return correctly_linked_ids + output = [] + for repre_id in repre_ids: + output.extend(get_linked_representation_ids( + project_name, + repre_id=repre_id, + link_type=link_type, + max_depth=max_depth + )) + return output diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 55fda55d17..388a871e9d 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -1,6 +1,6 @@ +from openpype.client import get_linked_ids_for_representations from openpype.modules import ModulesManager from openpype.pipeline import load -from openpype.lib.avalon_context import get_linked_ids_for_representations from openpype.modules.sync_server.utils import SiteAlreadyPresentError @@ -45,9 +45,11 @@ class AddSyncSite(load.LoaderPlugin): force=True) if family == "workfile": - links = get_linked_ids_for_representations(project_name, - [repre_id], - link_type="reference") + links = get_linked_ids_for_representations( + project_name, + repre_id=repre_id, + link_type="reference" + ) for link_repre_id in links: try: self.sync_server.add_site(project_name, link_repre_id, From 0399c311db079c74c74fb652cbadca48c83ffcbe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 10:42:17 +0200 Subject: [PATCH 0873/2550] copied 'Extractor' to publish pipeline --- openpype/pipeline/publish/__init__.py | 4 ++++ openpype/pipeline/publish/publish_plugins.py | 25 +++++++++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index 8ba17b2516..8ccfc32a6b 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -17,6 +17,8 @@ from .publish_plugins import ( RepairAction, RepairContextAction, + + Extractor, ) from .lib import ( @@ -58,6 +60,8 @@ __all__ = ( "RepairAction", "RepairContextAction", + "Extractor", + "DiscoverResult", "publish_plugins_discover", "load_help_content_from_plugin", diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 5bb6b5aaff..6e2be1ce2c 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -8,7 +8,8 @@ from openpype.lib import BoolDef from .lib import ( load_help_content_from_plugin, get_errored_instances_from_context, - get_errored_plugins_from_context + get_errored_plugins_from_context, + get_instance_staging_dir, ) @@ -241,3 +242,25 @@ class RepairContextAction(pyblish.api.Action): if plugin in errored_plugins: self.log.info("Attempting fix ...") plugin.repair(context) + + +class Extractor(pyblish.api.InstancePlugin): + """Extractor base class. + + The extractor base class implements a "staging_dir" function used to + generate a temporary directory for an instance to extract to. + + This temporary directory is generated through `tempfile.mkdtemp()` + + """ + + order = 2.0 + + def staging_dir(self, instance): + """Provide a temporary directory in which to store extracted files + + Upon calling this method the staging directory is stored inside + the instance.data['stagingDir'] + """ + + return get_instance_staging_dir(instance) From 20d9345c48a2bfe7f229dbfd83a18ccb2c613be5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 11:38:29 +0200 Subject: [PATCH 0874/2550] removed comment --- openpype/plugin.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugin.py b/openpype/plugin.py index d3605fcb1e..7e906b4451 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -85,7 +85,6 @@ class InstancePlugin(pyblish.api.InstancePlugin): super(InstancePlugin, self).__init__(*args, **kwargs) -# NOTE: This class is used on so many places I gave up moving it class Extractor(pyblish.api.InstancePlugin): """Extractor base class. From ec782caa00935af6841110c5b153b6e71ff46171 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 11:40:33 +0200 Subject: [PATCH 0875/2550] use new Extractor in global plugins --- openpype/plugins/publish/extract_burnin.py | 12 ++++++------ openpype/plugins/publish/extract_otio_file.py | 5 +++-- .../plugins/publish/extract_otio_review.py | 13 +++++++++---- .../publish/extract_otio_trimming_video.py | 18 ++++++++++++------ .../plugins/publish/extract_review_slate.py | 17 ++++++++++------- .../publish/extract_trim_video_audio.py | 10 ++++++---- 6 files changed, 46 insertions(+), 29 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 88093fb92f..8ddee162e3 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -8,10 +8,10 @@ import shutil import clique import six -import pyblish +import pyblish.api -import openpype -import openpype.api +from openpype import resources, PACKAGE_DIR +from openpype.pipeline import publish from openpype.lib import ( run_openpype_process, @@ -23,7 +23,7 @@ from openpype.lib import ( ) -class ExtractBurnin(openpype.api.Extractor): +class ExtractBurnin(publish.Extractor): """ Extractor to create video with pre-defined burnins from existing extracted video representation. @@ -400,7 +400,7 @@ class ExtractBurnin(openpype.api.Extractor): # Use OpenPype default font if not font_filepath: - font_filepath = openpype.api.resources.get_liberation_font_path() + font_filepath = resources.get_liberation_font_path() burnin_options["font"] = font_filepath @@ -981,7 +981,7 @@ class ExtractBurnin(openpype.api.Extractor): """Return path to python script for burnin processing.""" scriptpath = os.path.normpath( os.path.join( - openpype.PACKAGE_DIR, + PACKAGE_DIR, "scripts", "otio_burnin.py" ) diff --git a/openpype/plugins/publish/extract_otio_file.py b/openpype/plugins/publish/extract_otio_file.py index 4d310ce109..c692205d81 100644 --- a/openpype/plugins/publish/extract_otio_file.py +++ b/openpype/plugins/publish/extract_otio_file.py @@ -1,10 +1,11 @@ import os import pyblish.api -import openpype.api import opentimelineio as otio +from openpype.pipeline import publish -class ExtractOTIOFile(openpype.api.Extractor): + +class ExtractOTIOFile(publish.Extractor): """ Extractor export OTIO file """ diff --git a/openpype/plugins/publish/extract_otio_review.py b/openpype/plugins/publish/extract_otio_review.py index 2ce5323468..169ff9e136 100644 --- a/openpype/plugins/publish/extract_otio_review.py +++ b/openpype/plugins/publish/extract_otio_review.py @@ -18,7 +18,12 @@ import os import clique import opentimelineio as otio from pyblish import api -import openpype + +from openpype.lib import ( + get_ffmpeg_tool_path, + run_subprocess, +) +from openpype.pipeline import publish from openpype.pipeline.editorial import ( otio_range_to_frame_range, trim_media_range, @@ -28,7 +33,7 @@ from openpype.pipeline.editorial import ( ) -class ExtractOTIOReview(openpype.api.Extractor): +class ExtractOTIOReview(publish.Extractor): """ Extract OTIO timeline into one concuted image sequence file. @@ -334,7 +339,7 @@ class ExtractOTIOReview(openpype.api.Extractor): otio.time.TimeRange: trimmed available range """ # get rendering app path - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") # create path and frame start to destination output_path, out_frame_start = self._get_ffmpeg_output() @@ -397,7 +402,7 @@ class ExtractOTIOReview(openpype.api.Extractor): ]) # execute self.log.debug("Executing: {}".format(" ".join(command))) - output = openpype.api.run_subprocess( + output = run_subprocess( command, logger=self.log ) self.log.debug("Output: {}".format(output)) diff --git a/openpype/plugins/publish/extract_otio_trimming_video.py b/openpype/plugins/publish/extract_otio_trimming_video.py index 19625fa568..70726338aa 100644 --- a/openpype/plugins/publish/extract_otio_trimming_video.py +++ b/openpype/plugins/publish/extract_otio_trimming_video.py @@ -6,18 +6,24 @@ Requires: """ import os -from pyblish import api -import openpype from copy import deepcopy + +import pyblish.api + +from openpype.lib import ( + get_ffmpeg_tool_path, + run_subprocess, +) +from openpype.pipeline import publish from openpype.pipeline.editorial import frames_to_seconds -class ExtractOTIOTrimmingVideo(openpype.api.Extractor): +class ExtractOTIOTrimmingVideo(publish.Extractor): """ Trimming video file longer then required lenght """ - order = api.ExtractorOrder + order = pyblish.api.ExtractorOrder label = "Extract OTIO trim longer video" families = ["trim"] hosts = ["resolve", "hiero", "flame"] @@ -70,7 +76,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor): """ # get rendering app path - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") # create path to destination output_path = self._get_ffmpeg_output(input_file_path) @@ -96,7 +102,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor): # execute self.log.debug("Executing: {}".format(" ".join(command))) - output = openpype.api.run_subprocess( + output = run_subprocess( command, logger=self.log ) self.log.debug("Output: {}".format(output)) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 69043ee261..239d89538c 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -1,19 +1,22 @@ import os -from pprint import pformat import re -import openpype.api -import pyblish +from pprint import pformat + +import pyblish.api + from openpype.lib import ( path_to_subprocess_arg, + run_subprocess, get_ffmpeg_tool_path, get_ffprobe_data, get_ffprobe_streams, get_ffmpeg_codec_args, get_ffmpeg_format_args, ) +from openpype.pipeline import publish -class ExtractReviewSlate(openpype.api.Extractor): +class ExtractReviewSlate(publish.Extractor): """ Will add slate frame at the start of the video files """ @@ -267,7 +270,7 @@ class ExtractReviewSlate(openpype.api.Extractor): self.log.debug( "Slate Executing: {}".format(slate_subprocess_cmd) ) - openpype.api.run_subprocess( + run_subprocess( slate_subprocess_cmd, shell=True, logger=self.log ) @@ -348,7 +351,7 @@ class ExtractReviewSlate(openpype.api.Extractor): "Executing concat filter: {}".format (" ".join(concat_args)) ) - openpype.api.run_subprocess( + run_subprocess( concat_args, logger=self.log ) @@ -533,7 +536,7 @@ class ExtractReviewSlate(openpype.api.Extractor): self.log.debug("Silent Slate Executing: {}".format( " ".join(slate_silent_args) )) - openpype.api.run_subprocess( + run_subprocess( slate_silent_args, logger=self.log ) diff --git a/openpype/plugins/publish/extract_trim_video_audio.py b/openpype/plugins/publish/extract_trim_video_audio.py index 06817c4b5a..b951136391 100644 --- a/openpype/plugins/publish/extract_trim_video_audio.py +++ b/openpype/plugins/publish/extract_trim_video_audio.py @@ -1,14 +1,16 @@ import os +from pprint import pformat + import pyblish.api -import openpype.api from openpype.lib import ( get_ffmpeg_tool_path, + run_subprocess, ) -from pprint import pformat +from openpype.pipeline import publish -class ExtractTrimVideoAudio(openpype.api.Extractor): +class ExtractTrimVideoAudio(publish.Extractor): """Trim with ffmpeg "mov" and "wav" files.""" # must be before `ExtractThumbnailSP` @@ -98,7 +100,7 @@ class ExtractTrimVideoAudio(openpype.api.Extractor): joined_args = " ".join(ffmpeg_args) self.log.info(f"Processing: {joined_args}") - openpype.api.run_subprocess( + run_subprocess( ffmpeg_args, logger=self.log ) From e4085f768805e66c1ca58766101891922c0e963f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 11:42:20 +0200 Subject: [PATCH 0876/2550] fix 'path_to_subprocess_arg' usage --- openpype/plugins/publish/extract_review_slate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py index 239d89538c..fca3d96ca6 100644 --- a/openpype/plugins/publish/extract_review_slate.py +++ b/openpype/plugins/publish/extract_review_slate.py @@ -161,7 +161,7 @@ class ExtractReviewSlate(publish.Extractor): input_args.extend([ "-loop", "1", - "-i", openpype.lib.path_to_subprocess_arg(slate_path), + "-i", path_to_subprocess_arg(slate_path), "-r", str(input_frame_rate), "-frames:v", "1", ]) From e5c662d941aa1e09716de2b1ce8bdaf56f8d0ea5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 11:59:48 +0200 Subject: [PATCH 0877/2550] use Extractor from openpype.pipeline --- .../plugins/publish/extract_local_render.py | 14 +++++++++----- .../plugins/publish/extract_save_scene.py | 4 ++-- .../plugins/publish/remove_publish_highlight.py | 6 +++--- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py index 7323a0b125..dc65cee61d 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py @@ -2,14 +2,18 @@ import os import sys import six -import openpype.api +from openpype.lib import ( + get_ffmpeg_tool_path, + run_subprocess, +) +from openpype.pipeline import publish from openpype.hosts.aftereffects.api import get_stub -class ExtractLocalRender(openpype.api.Extractor): +class ExtractLocalRender(publish.Extractor): """Render RenderQueue locally.""" - order = openpype.api.Extractor.order - 0.47 + order = publish.Extractor.order - 0.47 label = "Extract Local Render" hosts = ["aftereffects"] families = ["renderLocal", "render.local"] @@ -53,7 +57,7 @@ class ExtractLocalRender(openpype.api.Extractor): instance.data["representations"] = [repre_data] - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") # Generate thumbnail. thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg") @@ -66,7 +70,7 @@ class ExtractLocalRender(openpype.api.Extractor): ] self.log.debug("Thumbnail args:: {}".format(args)) try: - output = openpype.lib.run_subprocess(args) + output = run_subprocess(args) except TypeError: self.log.warning("Error in creating thumbnail") six.reraise(*sys.exc_info()) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py index eb2977309f..343838eb49 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py @@ -1,13 +1,13 @@ import pyblish.api -import openpype.api +from openpype.pipeline import publish from openpype.hosts.aftereffects.api import get_stub class ExtractSaveScene(pyblish.api.ContextPlugin): """Save scene before extraction.""" - order = openpype.api.Extractor.order - 0.48 + order = publish.Extractor.order - 0.48 label = "Extract Save Scene" hosts = ["aftereffects"] diff --git a/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py b/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py index 5f3fcc3089..370f916f04 100644 --- a/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py +++ b/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py @@ -1,8 +1,8 @@ -import openpype.api +from openpype.pipeline import publish from openpype.hosts.aftereffects.api import get_stub -class RemovePublishHighlight(openpype.api.Extractor): +class RemovePublishHighlight(publish.Extractor): """Clean utf characters which are not working in DL Published compositions are marked with unicode icon which causes @@ -10,7 +10,7 @@ class RemovePublishHighlight(openpype.api.Extractor): rendering, add it later back to avoid confusion. """ - order = openpype.api.Extractor.order - 0.49 # just before save + order = publish.Extractor.order - 0.49 # just before save label = "Clean render comp" hosts = ["aftereffects"] families = ["render.farm"] From cb8cccc9e8fdeba54d2d32b1bbf947ac40a4b92a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:01:36 +0200 Subject: [PATCH 0878/2550] Use new import source of Extractor --- openpype/hosts/blender/plugins/publish/extract_abc.py | 4 ++-- openpype/hosts/blender/plugins/publish/extract_blend.py | 4 ++-- .../hosts/blender/plugins/publish/extract_blend_animation.py | 4 ++-- openpype/hosts/blender/plugins/publish/extract_camera.py | 4 ++-- openpype/hosts/blender/plugins/publish/extract_fbx.py | 4 ++-- .../hosts/blender/plugins/publish/extract_fbx_animation.py | 4 ++-- openpype/hosts/blender/plugins/publish/extract_layout.py | 4 ++-- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py index a26a92f7e4..1cab9d225b 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc.py @@ -2,12 +2,12 @@ import os import bpy -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -class ExtractABC(api.Extractor): +class ExtractABC(publish.Extractor): """Extract as ABC.""" label = "Extract ABC" diff --git a/openpype/hosts/blender/plugins/publish/extract_blend.py b/openpype/hosts/blender/plugins/publish/extract_blend.py index 9add633f05..6a001b6f65 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend.py @@ -2,10 +2,10 @@ import os import bpy -import openpype.api +from openpype.pipeline import publish -class ExtractBlend(openpype.api.Extractor): +class ExtractBlend(publish.Extractor): """Extract a blend file.""" label = "Extract Blend" diff --git a/openpype/hosts/blender/plugins/publish/extract_blend_animation.py b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py index 4917223331..477411b73d 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py @@ -2,10 +2,10 @@ import os import bpy -import openpype.api +from openpype.pipeline import publish -class ExtractBlendAnimation(openpype.api.Extractor): +class ExtractBlendAnimation(publish.Extractor): """Extract a blend file.""" label = "Extract Blend" diff --git a/openpype/hosts/blender/plugins/publish/extract_camera.py b/openpype/hosts/blender/plugins/publish/extract_camera.py index b2c7611b58..9fd181825c 100644 --- a/openpype/hosts/blender/plugins/publish/extract_camera.py +++ b/openpype/hosts/blender/plugins/publish/extract_camera.py @@ -2,11 +2,11 @@ import os import bpy -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin -class ExtractCamera(api.Extractor): +class ExtractCamera(publish.Extractor): """Extract as the camera as FBX.""" label = "Extract Camera" diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx.py b/openpype/hosts/blender/plugins/publish/extract_fbx.py index 3ac66f33a4..0ad797c226 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx.py @@ -2,12 +2,12 @@ import os import bpy -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -class ExtractFBX(api.Extractor): +class ExtractFBX(publish.Extractor): """Extract as FBX.""" label = "Extract FBX" diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index 4b4a92932a..062b42e99d 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -5,12 +5,12 @@ import bpy import bpy_extras import bpy_extras.anim_utils -from openpype import api +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -class ExtractAnimationFBX(api.Extractor): +class ExtractAnimationFBX(publish.Extractor): """Extract as animation.""" label = "Extract FBX" diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index 8502c6fbd4..f2d04f1178 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -6,12 +6,12 @@ import bpy_extras import bpy_extras.anim_utils from openpype.client import get_representation_by_name +from openpype.pipeline import publish from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -import openpype.api -class ExtractLayout(openpype.api.Extractor): +class ExtractLayout(publish.Extractor): """Extract a layout.""" label = "Extract Layout" From 4e1856eaf677407803d525d8c06f32a947d9a6a3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:02:24 +0200 Subject: [PATCH 0879/2550] Use new import source of Extractor --- openpype/hosts/flame/plugins/publish/extract_otio_file.py | 4 ++-- .../hosts/flame/plugins/publish/extract_subset_resources.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_otio_file.py b/openpype/hosts/flame/plugins/publish/extract_otio_file.py index 7dd75974fc..e5bfa42ce6 100644 --- a/openpype/hosts/flame/plugins/publish/extract_otio_file.py +++ b/openpype/hosts/flame/plugins/publish/extract_otio_file.py @@ -1,10 +1,10 @@ import os import pyblish.api -import openpype.api import opentimelineio as otio +from openpype.pipeline import publish -class ExtractOTIOFile(openpype.api.Extractor): +class ExtractOTIOFile(publish.Extractor): """ Extractor export OTIO file """ diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 3e1e8db986..61b3cd0ab9 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,11 +1,11 @@ import os import re import tempfile -from pprint import pformat from copy import deepcopy import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.flame import api as opfapi from openpype.hosts.flame.api import MediaInfoFile from openpype.pipeline.editorial import ( @@ -15,7 +15,7 @@ from openpype.pipeline.editorial import ( import flame -class ExtractSubsetResources(openpype.api.Extractor): +class ExtractSubsetResources(publish.Extractor): """ Extractor for transcoding files from Flame clip """ From 47908465197226814aee76e51e74de7fedc8b01a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:03:27 +0200 Subject: [PATCH 0880/2550] Use new import source of Extractor --- openpype/hosts/harmony/plugins/publish/extract_palette.py | 4 ++-- openpype/hosts/harmony/plugins/publish/extract_template.py | 7 +++---- openpype/hosts/harmony/plugins/publish/extract_workfile.py | 4 ++-- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/harmony/plugins/publish/extract_palette.py b/openpype/hosts/harmony/plugins/publish/extract_palette.py index fae778f6b0..69c6e098ff 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_palette.py +++ b/openpype/hosts/harmony/plugins/publish/extract_palette.py @@ -6,10 +6,10 @@ import csv from PIL import Image, ImageDraw, ImageFont import openpype.hosts.harmony.api as harmony -import openpype.api +from openpype.pipeline import publish -class ExtractPalette(openpype.api.Extractor): +class ExtractPalette(publish.Extractor): """Extract palette.""" label = "Extract Palette" diff --git a/openpype/hosts/harmony/plugins/publish/extract_template.py b/openpype/hosts/harmony/plugins/publish/extract_template.py index d25b07bba3..458bf25a3c 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_template.py +++ b/openpype/hosts/harmony/plugins/publish/extract_template.py @@ -3,12 +3,11 @@ import os import shutil -import openpype.api +from openpype.pipeline import publish import openpype.hosts.harmony.api as harmony -import openpype.hosts.harmony -class ExtractTemplate(openpype.api.Extractor): +class ExtractTemplate(publish.Extractor): """Extract the connected nodes to the composite instance.""" label = "Extract Template" @@ -50,7 +49,7 @@ class ExtractTemplate(openpype.api.Extractor): dependencies.remove(instance.data["setMembers"][0]) # Export template. - openpype.hosts.harmony.api.export_template( + harmony.export_template( unique_backdrops, dependencies, filepath ) diff --git a/openpype/hosts/harmony/plugins/publish/extract_workfile.py b/openpype/hosts/harmony/plugins/publish/extract_workfile.py index 7f25ec8150..9bb3090558 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_workfile.py +++ b/openpype/hosts/harmony/plugins/publish/extract_workfile.py @@ -4,10 +4,10 @@ import os import shutil from zipfile import ZipFile -import openpype.api +from openpype.pipeline import publish -class ExtractWorkfile(openpype.api.Extractor): +class ExtractWorkfile(publish.Extractor): """Extract and zip complete workfile folder into zip.""" label = "Extract Workfile" From b939556394fa1456355fe952493cbc13e1a2735d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:04:40 +0200 Subject: [PATCH 0881/2550] Use new import source of Extractor --- .../hiero/plugins/publish/extract_clip_effects.py | 5 +++-- .../hosts/hiero/plugins/publish/extract_frames.py | 13 +++++++++---- .../hiero/plugins/publish/extract_thumbnail.py | 5 +++-- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py b/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py index 5b0aa270a7..7fb381ff7e 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py @@ -2,10 +2,11 @@ import os import json import pyblish.api -import openpype + +from openpype.pipeline import publish -class ExtractClipEffects(openpype.api.Extractor): +class ExtractClipEffects(publish.Extractor): """Extract clip effects instances.""" order = pyblish.api.ExtractorOrder diff --git a/openpype/hosts/hiero/plugins/publish/extract_frames.py b/openpype/hosts/hiero/plugins/publish/extract_frames.py index aa3eda2e9f..f865d2fb39 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_frames.py +++ b/openpype/hosts/hiero/plugins/publish/extract_frames.py @@ -1,9 +1,14 @@ import os import pyblish.api -import openpype + +from openpype.lib import ( + get_oiio_tools_path, + run_subprocess, +) +from openpype.pipeline import publish -class ExtractFrames(openpype.api.Extractor): +class ExtractFrames(publish.Extractor): """Extracts frames""" order = pyblish.api.ExtractorOrder @@ -13,7 +18,7 @@ class ExtractFrames(openpype.api.Extractor): movie_extensions = ["mov", "mp4"] def process(self, instance): - oiio_tool_path = openpype.lib.get_oiio_tools_path() + oiio_tool_path = get_oiio_tools_path() staging_dir = self.staging_dir(instance) output_template = os.path.join(staging_dir, instance.data["name"]) sequence = instance.context.data["activeTimeline"] @@ -43,7 +48,7 @@ class ExtractFrames(openpype.api.Extractor): args.extend(["--powc", "0.45,0.45,0.45,1.0"]) args.extend([input_path, "-o", output_path]) - output = openpype.api.run_subprocess(args) + output = run_subprocess(args) failed_output = "oiiotool produced no output." if failed_output in output: diff --git a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py index d12e7665bf..e64aa89b26 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -1,9 +1,10 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish -class ExtractThumnail(openpype.api.Extractor): +class ExtractThumnail(publish.Extractor): """ Extractor for track item's tumnails """ From 2fbfe59d966a4a22dc3534be3f5bb97da08a65c6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:09:18 +0200 Subject: [PATCH 0882/2550] Use new import source of Extractor --- openpype/hosts/houdini/plugins/publish/extract_alembic.py | 5 +++-- openpype/hosts/houdini/plugins/publish/extract_ass.py | 5 +++-- openpype/hosts/houdini/plugins/publish/extract_composite.py | 4 ++-- openpype/hosts/houdini/plugins/publish/extract_hda.py | 5 +++-- .../hosts/houdini/plugins/publish/extract_redshift_proxy.py | 5 +++-- openpype/hosts/houdini/plugins/publish/extract_usd.py | 5 +++-- .../hosts/houdini/plugins/publish/extract_usd_layered.py | 4 ++-- openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py | 5 +++-- 8 files changed, 22 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 83b790407f..758d4c560b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractAlembic(openpype.api.Extractor): +class ExtractAlembic(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Alembic" diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index e56e40df85..a302b451cb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractAss(openpype.api.Extractor): +class ExtractAss(publish.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract Ass" diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index f300b6d28d..23e875f107 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -1,12 +1,12 @@ import os import pyblish.api -import openpype.api +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractComposite(openpype.api.Extractor): +class ExtractComposite(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Composite (Image Sequence)" diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 301dd4e297..7dd03a92b7 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -4,10 +4,11 @@ import os from pprint import pformat import pyblish.api -import openpype.api + +from openpype.pipeline import publish -class ExtractHDA(openpype.api.Extractor): +class ExtractHDA(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract HDA" diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index c754d60c59..ca9be64a47 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractRedshiftProxy(openpype.api.Extractor): +class ExtractRedshiftProxy(publish.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract Redshift Proxy" diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 0fc26900fb..78c32affb4 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractUSD(openpype.api.Extractor): +class ExtractUSD(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract USD" diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 80919c023b..f686f712bb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -5,7 +5,6 @@ import sys from collections import deque import pyblish.api -import openpype.api from openpype.client import ( get_asset_by_name, @@ -16,6 +15,7 @@ from openpype.client import ( from openpype.pipeline import ( get_representation_path, legacy_io, + publish, ) import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.hosts.houdini.api.lib import render_rop @@ -160,7 +160,7 @@ def parm_values(overrides): parm.set(value) -class ExtractUSDLayered(openpype.api.Extractor): +class ExtractUSDLayered(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Layered USD" diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 113e1b0bcb..26ec423048 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop -class ExtractVDBCache(openpype.api.Extractor): +class ExtractVDBCache(publish.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract VDB Cache" From b61688828e4e65433e7d7aabb68883601b62244a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:11:07 +0200 Subject: [PATCH 0883/2550] Use new import source of Extractor --- .../hosts/maya/plugins/publish/extract_ass.py | 6 ++--- .../maya/plugins/publish/extract_assembly.py | 7 +++-- .../maya/plugins/publish/extract_assproxy.py | 6 ++--- .../plugins/publish/extract_camera_alembic.py | 4 +-- .../publish/extract_camera_mayaScene.py | 4 +-- .../hosts/maya/plugins/publish/extract_fbx.py | 6 ++--- .../maya/plugins/publish/extract_layout.py | 27 ++++++++++--------- .../maya/plugins/publish/extract_look.py | 8 +++--- .../plugins/publish/extract_maya_scene_raw.py | 5 ++-- .../maya/plugins/publish/extract_model.py | 4 +-- .../publish/extract_multiverse_look.py | 4 +-- .../plugins/publish/extract_multiverse_usd.py | 4 +-- .../publish/extract_multiverse_usd_comp.py | 4 +-- .../publish/extract_multiverse_usd_over.py | 4 +-- .../maya/plugins/publish/extract_playblast.py | 6 ++--- .../plugins/publish/extract_pointcache.py | 4 +-- .../plugins/publish/extract_redshift_proxy.py | 4 +-- .../plugins/publish/extract_rendersetup.py | 7 ++--- .../hosts/maya/plugins/publish/extract_rig.py | 4 +-- .../maya/plugins/publish/extract_thumbnail.py | 4 +-- .../publish/extract_unreal_skeletalmesh.py | 5 ++-- .../publish/extract_unreal_staticmesh.py | 5 ++-- .../maya/plugins/publish/extract_vrayproxy.py | 4 +-- .../maya/plugins/publish/extract_vrayscene.py | 4 +-- .../plugins/publish/extract_xgen_cache.py | 4 +-- .../plugins/publish/extract_yeti_cache.py | 4 +-- .../maya/plugins/publish/extract_yeti_rig.py | 4 +-- 27 files changed, 76 insertions(+), 76 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 760f410f91..5c21a4ff08 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,12 +1,12 @@ import os -import openpype.api - from maya import cmds + +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractAssStandin(openpype.api.Extractor): +class ExtractAssStandin(publish.Extractor): """Extract the content of the instance to a ass file Things to pay attention to: diff --git a/openpype/hosts/maya/plugins/publish/extract_assembly.py b/openpype/hosts/maya/plugins/publish/extract_assembly.py index 482930b76e..466fe962ab 100644 --- a/openpype/hosts/maya/plugins/publish/extract_assembly.py +++ b/openpype/hosts/maya/plugins/publish/extract_assembly.py @@ -1,14 +1,13 @@ +import os import json -import os - -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import extract_alembic from maya import cmds -class ExtractAssembly(openpype.api.Extractor): +class ExtractAssembly(publish.Extractor): """Produce an alembic of just point positions and normals. Positions and normals are preserved, but nothing more, diff --git a/openpype/hosts/maya/plugins/publish/extract_assproxy.py b/openpype/hosts/maya/plugins/publish/extract_assproxy.py index 93720dbb82..4937a28a9e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_assproxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_assproxy.py @@ -3,17 +3,17 @@ import contextlib from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractAssProxy(openpype.api.Extractor): +class ExtractAssProxy(publish.Extractor): """Extract proxy model as Maya Ascii to use as arnold standin """ - order = openpype.api.Extractor.order + 0.2 + order = publish.Extractor.order + 0.2 label = "Ass Proxy (Maya ASCII)" hosts = ["maya"] families = ["ass"] diff --git a/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py b/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py index b744bfd0fe..aa445a0387 100644 --- a/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py +++ b/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -class ExtractCameraAlembic(openpype.api.Extractor): +class ExtractCameraAlembic(publish.Extractor): """Extract a Camera as Alembic. The cameras gets baked to world space by default. Only when the instance's diff --git a/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py b/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py index 8d6c4b5f3c..7467fa027d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py +++ b/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py @@ -5,7 +5,7 @@ import itertools from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib @@ -78,7 +78,7 @@ def unlock(plug): cmds.disconnectAttr(source, destination) -class ExtractCameraMayaScene(openpype.api.Extractor): +class ExtractCameraMayaScene(publish.Extractor): """Extract a Camera as Maya Scene. This will create a duplicate of the camera that will be baked *with* diff --git a/openpype/hosts/maya/plugins/publish/extract_fbx.py b/openpype/hosts/maya/plugins/publish/extract_fbx.py index fbbe8e06b0..9af3acef65 100644 --- a/openpype/hosts/maya/plugins/publish/extract_fbx.py +++ b/openpype/hosts/maya/plugins/publish/extract_fbx.py @@ -4,13 +4,13 @@ import os from maya import cmds # noqa import maya.mel as mel # noqa import pyblish.api -import openpype.api -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline import publish +from openpype.hosts.maya.api.lib import maintained_selection from openpype.hosts.maya.api import fbx -class ExtractFBX(openpype.api.Extractor): +class ExtractFBX(publish.Extractor): """Extract FBX from Maya. This extracts reproducible FBX exports ignoring any of the diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 991217684a..0f499b09b1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -5,13 +5,11 @@ import json from maya import cmds from maya.api import OpenMaya as om -from bson.objectid import ObjectId - -from openpype.pipeline import legacy_io -import openpype.api +from openpype.client import get_representation_by_id +from openpype.pipeline import legacy_io, publish -class ExtractLayout(openpype.api.Extractor): +class ExtractLayout(publish.Extractor): """Extract a layout.""" label = "Extract Layout" @@ -30,6 +28,8 @@ class ExtractLayout(openpype.api.Extractor): instance.data["representations"] = [] json_data = [] + # TODO representation queries can be refactored to be faster + project_name = legacy_io.active_project() for asset in cmds.sets(str(instance), query=True): # Find the container @@ -43,11 +43,11 @@ class ExtractLayout(openpype.api.Extractor): representation_id = cmds.getAttr(f"{container}.representation") - representation = legacy_io.find_one( - { - "type": "representation", - "_id": ObjectId(representation_id) - }, projection={"parent": True, "context.family": True}) + representation = get_representation_by_id( + project_name, + representation_id, + fields=["parent", "context.family"] + ) self.log.info(representation) @@ -102,9 +102,10 @@ class ExtractLayout(openpype.api.Extractor): for i in range(0, len(t_matrix_list), row_length): t_matrix.append(t_matrix_list[i:i + row_length]) - json_element["transform_matrix"] = [] - for row in t_matrix: - json_element["transform_matrix"].append(list(row)) + json_element["transform_matrix"] = [ + list(row) + for row in t_matrix + ] basis_list = [ 1, 0, 0, 0, diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index ce3b265566..91b0da75c6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -13,8 +13,8 @@ from maya import cmds # noqa import pyblish.api -import openpype.api -from openpype.pipeline import legacy_io +from openpype.lib import source_hash +from openpype.pipeline import legacy_io, publish from openpype.hosts.maya.api import lib # Modes for transfer @@ -161,7 +161,7 @@ def no_workspace_dir(): os.rmdir(fake_workspace_dir) -class ExtractLook(openpype.api.Extractor): +class ExtractLook(publish.Extractor): """Extract Look (Maya Scene + JSON) Only extracts the sets (shadingEngines and alike) alongside a .json file @@ -505,7 +505,7 @@ class ExtractLook(openpype.api.Extractor): args = [] if do_maketx: args.append("maketx") - texture_hash = openpype.api.source_hash(filepath, *args) + texture_hash = source_hash(filepath, *args) # If source has been published before with the same settings, # then don't reprocess but hardlink from the original diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py index 3a47cdadb5..3769ec3605 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py @@ -4,12 +4,11 @@ import os from maya import cmds -import openpype.api from openpype.hosts.maya.api.lib import maintained_selection -from openpype.pipeline import AVALON_CONTAINER_ID +from openpype.pipeline import AVALON_CONTAINER_ID, publish -class ExtractMayaSceneRaw(openpype.api.Extractor): +class ExtractMayaSceneRaw(publish.Extractor): """Extract as Maya Scene (raw). This will preserve all references, construction history, etc. diff --git a/openpype/hosts/maya/plugins/publish/extract_model.py b/openpype/hosts/maya/plugins/publish/extract_model.py index 0282d1e9c8..7c8c3a2981 100644 --- a/openpype/hosts/maya/plugins/publish/extract_model.py +++ b/openpype/hosts/maya/plugins/publish/extract_model.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -class ExtractModel(openpype.api.Extractor): +class ExtractModel(publish.Extractor): """Extract as Model (Maya Scene). Only extracts contents based on the original "setMembers" data to ensure diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py index 82e2b41929..92137acb95 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseLook(openpype.api.Extractor): +class ExtractMultiverseLook(publish.Extractor): """Extractor for Multiverse USD look data. This will extract: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 3654be7b34..6c352bebe6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -3,11 +3,11 @@ import six from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseUsd(openpype.api.Extractor): +class ExtractMultiverseUsd(publish.Extractor): """Extractor for Multiverse USD Asset data. This will extract settings for a Multiverse Write Asset operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index ad9303657f..a62729c198 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseUsdComposition(openpype.api.Extractor): +class ExtractMultiverseUsdComposition(publish.Extractor): """Extractor of Multiverse USD Composition data. This will extract settings for a Multiverse Write Composition operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index d44e3878b8..0628623e88 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -1,12 +1,12 @@ import os -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection from maya import cmds -class ExtractMultiverseUsdOverride(openpype.api.Extractor): +class ExtractMultiverseUsdOverride(publish.Extractor): """Extractor for Multiverse USD Override data. This will extract settings for a Multiverse Write Override operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 871adda0c3..81fdba2f98 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -1,18 +1,16 @@ import os -import glob -import contextlib import clique import capture +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -import openpype.api from maya import cmds import pymel.core as pm -class ExtractPlayblast(openpype.api.Extractor): +class ExtractPlayblast(publish.Extractor): """Extract viewport playblast. Takes review camera and creates review Quicktime video based on viewport diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index bf6feecef3..7c1c6d5c12 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -2,7 +2,7 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( extract_alembic, suspended_refresh, @@ -11,7 +11,7 @@ from openpype.hosts.maya.api.lib import ( ) -class ExtractAlembic(openpype.api.Extractor): +class ExtractAlembic(publish.Extractor): """Produce an alembic of just point positions and normals. Positions and normals, uvs, creases are preserved, but nothing more, diff --git a/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py index 23cac9190d..4377275635 100644 --- a/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractRedshiftProxy(openpype.api.Extractor): +class ExtractRedshiftProxy(publish.Extractor): """Extract the content of the instance to a redshift proxy file.""" label = "Redshift Proxy (.rs)" diff --git a/openpype/hosts/maya/plugins/publish/extract_rendersetup.py b/openpype/hosts/maya/plugins/publish/extract_rendersetup.py index 6bdd5f590e..5970c038a4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_rendersetup.py +++ b/openpype/hosts/maya/plugins/publish/extract_rendersetup.py @@ -1,10 +1,11 @@ -import json import os -import openpype.api +import json + import maya.app.renderSetup.model.renderSetup as renderSetup +from openpype.pipeline import publish -class ExtractRenderSetup(openpype.api.Extractor): +class ExtractRenderSetup(publish.Extractor): """ Produce renderSetup template file diff --git a/openpype/hosts/maya/plugins/publish/extract_rig.py b/openpype/hosts/maya/plugins/publish/extract_rig.py index 53c1eeb671..c71a2f710d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_rig.py +++ b/openpype/hosts/maya/plugins/publish/extract_rig.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractRig(openpype.api.Extractor): +class ExtractRig(publish.Extractor): """Extract rig as Maya Scene.""" label = "Extract Rig (Maya Scene)" diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 9380da5128..854301ea48 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -3,14 +3,14 @@ import glob import capture +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -import openpype.api from maya import cmds import pymel.core as pm -class ExtractThumbnail(openpype.api.Extractor): +class ExtractThumbnail(publish.Extractor): """Extract viewport thumbnail. Takes review camera and creates a thumbnail based on viewport diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py index 7ef7f2f181..258120db2f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py @@ -6,7 +6,8 @@ from contextlib import contextmanager from maya import cmds # noqa import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.maya.api import fbx @@ -20,7 +21,7 @@ def renamed(original_name, renamed_name): cmds.rename(renamed_name, original_name) -class ExtractUnrealSkeletalMesh(openpype.api.Extractor): +class ExtractUnrealSkeletalMesh(publish.Extractor): """Extract Unreal Skeletal Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py index 69d51f9ff1..44f0615a27 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py @@ -5,7 +5,8 @@ import os from maya import cmds # noqa import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( parent_nodes, maintained_selection @@ -13,7 +14,7 @@ from openpype.hosts.maya.api.lib import ( from openpype.hosts.maya.api import fbx -class ExtractUnrealStaticMesh(openpype.api.Extractor): +class ExtractUnrealStaticMesh(publish.Extractor): """Extract Unreal Static Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py b/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py index 562ca078e1..38bf02245a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractVRayProxy(openpype.api.Extractor): +class ExtractVRayProxy(publish.Extractor): """Extract the content of the instance to a vrmesh file Things to pay attention to: diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py index 5d41697e5f..8442df1611 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py @@ -3,14 +3,14 @@ import os import re -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.render_setup_tools import export_in_rs_layer from openpype.hosts.maya.api.lib import maintained_selection from maya import cmds -class ExtractVrayscene(openpype.api.Extractor): +class ExtractVrayscene(publish.Extractor): """Extractor for vrscene.""" label = "VRay Scene (.vrscene)" diff --git a/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py b/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py index 5728682abe..77350f343e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py +++ b/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py @@ -2,14 +2,14 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( suspended_refresh, maintained_selection ) -class ExtractXgenCache(openpype.api.Extractor): +class ExtractXgenCache(publish.Extractor): """Produce an alembic of just xgen interactive groom """ diff --git a/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py b/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py index cf6db00e9a..b61f599cab 100644 --- a/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py +++ b/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py @@ -3,10 +3,10 @@ import json from maya import cmds -import openpype.api +from openpype.pipeline import publish -class ExtractYetiCache(openpype.api.Extractor): +class ExtractYetiCache(publish.Extractor): """Producing Yeti cache files using scene time range. This will extract Yeti cache file sequence and fur settings. diff --git a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py index 6e21bffa4e..1d0c5e88c3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py +++ b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py @@ -7,7 +7,7 @@ import contextlib from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib @@ -90,7 +90,7 @@ def yetigraph_attribute_values(assumed_destination, resources): pass -class ExtractYetiRig(openpype.api.Extractor): +class ExtractYetiRig(publish.Extractor): """Extract the Yeti rig to a Maya Scene and write the Yeti rig data.""" label = "Extract Yeti Rig" From da8697d8821d9396d4a401e80be1ae6cdeb460b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:52:30 +0200 Subject: [PATCH 0884/2550] Use new import source of Extractor --- .../hosts/nuke/plugins/publish/extract_backdrop.py | 4 ++-- openpype/hosts/nuke/plugins/publish/extract_camera.py | 5 +++-- openpype/hosts/nuke/plugins/publish/extract_gizmo.py | 4 ++-- openpype/hosts/nuke/plugins/publish/extract_model.py | 5 +++-- .../hosts/nuke/plugins/publish/extract_render_local.py | 10 ++++++---- .../hosts/nuke/plugins/publish/extract_review_data.py | 7 ++++--- .../nuke/plugins/publish/extract_review_data_lut.py | 5 +++-- .../nuke/plugins/publish/extract_review_data_mov.py | 7 ++++--- .../hosts/nuke/plugins/publish/extract_slate_frame.py | 4 ++-- .../hosts/nuke/plugins/publish/extract_thumbnail.py | 5 +++-- 10 files changed, 32 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py index 0a2df0898e..d1e5c4cc5a 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py @@ -4,7 +4,7 @@ import nuke import pyblish.api -import openpype +from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import ( maintained_selection, reset_selection, @@ -12,7 +12,7 @@ from openpype.hosts.nuke.api.lib import ( ) -class ExtractBackdropNode(openpype.api.Extractor): +class ExtractBackdropNode(publish.Extractor): """Extracting content of backdrop nodes Will create nuke script only with containing nodes. diff --git a/openpype/hosts/nuke/plugins/publish/extract_camera.py b/openpype/hosts/nuke/plugins/publish/extract_camera.py index 54f65a0be3..b751bfab03 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_camera.py +++ b/openpype/hosts/nuke/plugins/publish/extract_camera.py @@ -5,11 +5,12 @@ from pprint import pformat import nuke import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import maintained_selection -class ExtractCamera(openpype.api.Extractor): +class ExtractCamera(publish.Extractor): """ 3D camera exctractor """ label = 'Exctract Camera' diff --git a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py index 2d5bfdeb5e..3047ad6724 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py @@ -3,7 +3,7 @@ import nuke import pyblish.api -import openpype +from openpype.pipeline import publish from openpype.hosts.nuke.api import utils as pnutils from openpype.hosts.nuke.api.lib import ( maintained_selection, @@ -12,7 +12,7 @@ from openpype.hosts.nuke.api.lib import ( ) -class ExtractGizmo(openpype.api.Extractor): +class ExtractGizmo(publish.Extractor): """Extracting Gizmo (Group) node Will create nuke script only with the Gizmo node. diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index 0375263338..d82cb3110b 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -2,14 +2,15 @@ import os from pprint import pformat import nuke import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.nuke.api.lib import ( maintained_selection, select_nodes ) -class ExtractModel(openpype.api.Extractor): +class ExtractModel(publish.Extractor): """ 3D model exctractor """ label = 'Exctract Model' diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 8879f0c999..843d588786 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -1,11 +1,13 @@ -import pyblish.api -import nuke import os -import openpype + +import pyblish.api import clique +import nuke + +from openpype.pipeline import publish -class NukeRenderLocal(openpype.api.Extractor): +class NukeRenderLocal(publish.Extractor): # TODO: rewrite docstring to nuke """Render the current Nuke composition locally. diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data.py b/openpype/hosts/nuke/plugins/publish/extract_review_data.py index 38a8140cff..3c85b21b08 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data.py @@ -1,10 +1,11 @@ import os -import pyblish.api -import openpype from pprint import pformat +import pyblish.api + +from openpype.pipeline import publish -class ExtractReviewData(openpype.api.Extractor): +class ExtractReviewData(publish.Extractor): """Extracts review tag into available representation """ diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py index 4cf2fd7d9f..67779e9599 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py @@ -1,11 +1,12 @@ import os import pyblish.api -import openpype + +from openpype.pipeline import publish from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import maintained_selection -class ExtractReviewDataLut(openpype.api.Extractor): +class ExtractReviewDataLut(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index fc16e189fb..3fcfc2a4b5 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -1,13 +1,14 @@ import os -from pprint import pformat import re +from pprint import pformat import pyblish.api -import openpype + +from openpype.pipeline import publish from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import maintained_selection -class ExtractReviewDataMov(openpype.api.Extractor): +class ExtractReviewDataMov(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index b5cad143db..e7197b4fa8 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -6,7 +6,7 @@ import copy import pyblish.api import six -import openpype +from openpype.pipeline import publish from openpype.hosts.nuke.api import ( maintained_selection, duplicate_node, @@ -14,7 +14,7 @@ from openpype.hosts.nuke.api import ( ) -class ExtractSlateFrame(openpype.api.Extractor): +class ExtractSlateFrame(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py diff --git a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py index 2a919051d2..19eae9638b 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py @@ -2,7 +2,8 @@ import sys import os import nuke import pyblish.api -import openpype + +from openpype.pipeline import publish from openpype.hosts.nuke.api import ( maintained_selection, get_view_process_node @@ -13,7 +14,7 @@ if sys.version_info[0] >= 3: unicode = str -class ExtractThumbnail(openpype.api.Extractor): +class ExtractThumbnail(publish.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py From a04188fd7e53bece6aa2ae0b60384760162d8bf6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:53:25 +0200 Subject: [PATCH 0885/2550] Use new import source of Extractor --- openpype/hosts/unreal/plugins/publish/extract_camera.py | 4 ++-- openpype/hosts/unreal/plugins/publish/extract_layout.py | 7 ++----- openpype/hosts/unreal/plugins/publish/extract_look.py | 4 ++-- openpype/hosts/unreal/plugins/publish/extract_render.py | 4 ++-- 4 files changed, 8 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/extract_camera.py b/openpype/hosts/unreal/plugins/publish/extract_camera.py index ce53824563..4e37cc6a86 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_camera.py +++ b/openpype/hosts/unreal/plugins/publish/extract_camera.py @@ -6,10 +6,10 @@ import unreal from unreal import EditorAssetLibrary as eal from unreal import EditorLevelLibrary as ell -import openpype.api +from openpype.pipeline import publish -class ExtractCamera(openpype.api.Extractor): +class ExtractCamera(publish.Extractor): """Extract a camera.""" label = "Extract Camera" diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py index 8924df36a7..cac7991f00 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_layout.py +++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py @@ -3,18 +3,15 @@ import os import json import math -from bson.objectid import ObjectId - import unreal from unreal import EditorLevelLibrary as ell from unreal import EditorAssetLibrary as eal from openpype.client import get_representation_by_name -import openpype.api -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, publish -class ExtractLayout(openpype.api.Extractor): +class ExtractLayout(publish.Extractor): """Extract a layout.""" label = "Extract Layout" diff --git a/openpype/hosts/unreal/plugins/publish/extract_look.py b/openpype/hosts/unreal/plugins/publish/extract_look.py index ea39949417..f999ad8651 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_look.py +++ b/openpype/hosts/unreal/plugins/publish/extract_look.py @@ -5,10 +5,10 @@ import os import unreal from unreal import MaterialEditingLibrary as mat_lib -import openpype.api +from openpype.pipeline import publish -class ExtractLook(openpype.api.Extractor): +class ExtractLook(publish.Extractor): """Extract look.""" label = "Extract Look" diff --git a/openpype/hosts/unreal/plugins/publish/extract_render.py b/openpype/hosts/unreal/plugins/publish/extract_render.py index 37fe7e916f..8ff38fbee0 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_render.py +++ b/openpype/hosts/unreal/plugins/publish/extract_render.py @@ -2,10 +2,10 @@ from pathlib import Path import unreal -import openpype.api +from openpype.pipeline import publish -class ExtractRender(openpype.api.Extractor): +class ExtractRender(publish.Extractor): """Extract render.""" label = "Extract Render" From 72f254dd036c9bc1484a9d6d5092278e464d22a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:54:01 +0200 Subject: [PATCH 0886/2550] Use new import source of Extractor --- openpype/hosts/photoshop/api/README.md | 6 +++--- .../photoshop/plugins/publish/extract_image.py | 4 ++-- .../photoshop/plugins/publish/extract_review.py | 15 +++++++++------ .../plugins/publish/extract_save_scene.py | 6 +++--- 4 files changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/photoshop/api/README.md b/openpype/hosts/photoshop/api/README.md index 80792a4da0..4a36746cb2 100644 --- a/openpype/hosts/photoshop/api/README.md +++ b/openpype/hosts/photoshop/api/README.md @@ -127,11 +127,11 @@ class CollectInstances(pyblish.api.ContextPlugin): ```python import os -import openpype.api -from avalon import photoshop +from openpype.pipeline import publish +from openpype.hosts.photoshop import api as photoshop -class ExtractImage(openpype.api.Extractor): +class ExtractImage(publish.Extractor): """Produce a flattened image file from instance This plug-in takes into account only the layers in the group. diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index a133e33409..c84a958960 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -1,10 +1,10 @@ import os -import openpype.api +from openpype.pipeline import publish from openpype.hosts.photoshop import api as photoshop -class ExtractImage(openpype.api.Extractor): +class ExtractImage(publish.Extractor): """Produce a flattened image file from instance This plug-in takes into account only the layers in the group. diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 5d37c86ed8..e5fee311f8 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -2,12 +2,15 @@ import os import shutil from PIL import Image -import openpype.api -import openpype.lib +from openpype.lib import ( + run_subprocess, + get_ffmpeg_tool_path, +) +from openpype.pipeline import publish from openpype.hosts.photoshop import api as photoshop -class ExtractReview(openpype.api.Extractor): +class ExtractReview(publish.Extractor): """ Produce a flattened or sequence image files from all 'image' instances. @@ -72,7 +75,7 @@ class ExtractReview(openpype.api.Extractor): }) processed_img_names = [img_list] - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") instance.data["stagingDir"] = staging_dir @@ -93,7 +96,7 @@ class ExtractReview(openpype.api.Extractor): thumbnail_path ] self.log.debug("thumbnail args:: {}".format(args)) - output = openpype.lib.run_subprocess(args) + output = run_subprocess(args) instance.data["representations"].append({ "name": "thumbnail", @@ -116,7 +119,7 @@ class ExtractReview(openpype.api.Extractor): mov_path ] self.log.debug("mov args:: {}".format(args)) - output = openpype.lib.run_subprocess(args) + output = run_subprocess(args) self.log.debug(output) instance.data["representations"].append({ "name": "mov", diff --git a/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py b/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py index 03086f389f..aa900fec9f 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_save_scene.py @@ -1,11 +1,11 @@ -import openpype.api +from openpype.pipeline import publish from openpype.hosts.photoshop import api as photoshop -class ExtractSaveScene(openpype.api.Extractor): +class ExtractSaveScene(publish.Extractor): """Save scene before extraction.""" - order = openpype.api.Extractor.order - 0.49 + order = publish.Extractor.order - 0.49 label = "Extract Save Scene" hosts = ["photoshop"] families = ["workfile"] From b52db9224f6ffb1a0dfe87aae29a69bfd811e431 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:54:29 +0200 Subject: [PATCH 0887/2550] Use new import source of Extractor --- openpype/hosts/resolve/plugins/publish/extract_workfile.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/resolve/plugins/publish/extract_workfile.py b/openpype/hosts/resolve/plugins/publish/extract_workfile.py index ea8f19cd8c..535f879b58 100644 --- a/openpype/hosts/resolve/plugins/publish/extract_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/extract_workfile.py @@ -1,10 +1,11 @@ import os import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.resolve.api.lib import get_project_manager -class ExtractWorkfile(openpype.api.Extractor): +class ExtractWorkfile(publish.Extractor): """ Extractor export DRP workfile file representation """ From aaff1525a06177127a23d6773c0ac84a1970369b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:59:14 +0200 Subject: [PATCH 0888/2550] remove unused teshost --- openpype/hosts/testhost/README.md | 16 -- openpype/hosts/testhost/__init__.py | 0 openpype/hosts/testhost/api/__init__.py | 43 ----- openpype/hosts/testhost/api/context.json | 1 - openpype/hosts/testhost/api/instances.json | 108 ------------ openpype/hosts/testhost/api/pipeline.py | 155 ------------------ .../testhost/plugins/create/auto_creator.py | 75 --------- .../testhost/plugins/create/test_creator_1.py | 94 ----------- .../testhost/plugins/create/test_creator_2.py | 74 --------- .../plugins/publish/collect_context.py | 34 ---- .../plugins/publish/collect_instance_1.py | 52 ------ .../publish/validate_context_with_error.py | 57 ------- .../plugins/publish/validate_with_error.py | 57 ------- openpype/hosts/testhost/run_publish.py | 68 -------- 14 files changed, 834 deletions(-) delete mode 100644 openpype/hosts/testhost/README.md delete mode 100644 openpype/hosts/testhost/__init__.py delete mode 100644 openpype/hosts/testhost/api/__init__.py delete mode 100644 openpype/hosts/testhost/api/context.json delete mode 100644 openpype/hosts/testhost/api/instances.json delete mode 100644 openpype/hosts/testhost/api/pipeline.py delete mode 100644 openpype/hosts/testhost/plugins/create/auto_creator.py delete mode 100644 openpype/hosts/testhost/plugins/create/test_creator_1.py delete mode 100644 openpype/hosts/testhost/plugins/create/test_creator_2.py delete mode 100644 openpype/hosts/testhost/plugins/publish/collect_context.py delete mode 100644 openpype/hosts/testhost/plugins/publish/collect_instance_1.py delete mode 100644 openpype/hosts/testhost/plugins/publish/validate_context_with_error.py delete mode 100644 openpype/hosts/testhost/plugins/publish/validate_with_error.py delete mode 100644 openpype/hosts/testhost/run_publish.py diff --git a/openpype/hosts/testhost/README.md b/openpype/hosts/testhost/README.md deleted file mode 100644 index f69e02a3b3..0000000000 --- a/openpype/hosts/testhost/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# What is `testhost` -Host `testhost` was created to fake running host for testing of publisher. - -Does not have any proper launch mechanism at the moment. There is python script `./run_publish.py` which will show publisher window. The script requires to set few variables to run. Execution will register host `testhost`, register global publish plugins and register creator and publish plugins from `./plugins`. - -## Data -Created instances and context data are stored into json files inside `./api` folder. Can be easily modified to save them to a different place. - -## Plugins -Test host has few plugins to be able test publishing. - -### Creators -They are just example plugins using functions from `api` to create/remove/update data. One of them is auto creator which means that is triggered on each reset of create context. Others are manual creators both creating the same family. - -### Publishers -Collectors are example plugin to use `get_attribute_defs` to define attributes for specific families or for context. Validators are to test `PublishValidationError`. diff --git a/openpype/hosts/testhost/__init__.py b/openpype/hosts/testhost/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/testhost/api/__init__.py b/openpype/hosts/testhost/api/__init__.py deleted file mode 100644 index a929a891aa..0000000000 --- a/openpype/hosts/testhost/api/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -import logging -import pyblish.api - -from openpype.pipeline import register_creator_plugin_path - -from .pipeline import ( - ls, - list_instances, - update_instances, - remove_instances, - get_context_data, - update_context_data, - get_context_title -) - - -HOST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") - -log = logging.getLogger(__name__) - - -def install(): - log.info("OpenPype - Installing TestHost integration") - pyblish.api.register_host("testhost") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_creator_plugin_path(CREATE_PATH) - - -__all__ = ( - "ls", - "list_instances", - "update_instances", - "remove_instances", - "get_context_data", - "update_context_data", - "get_context_title", - - "install" -) diff --git a/openpype/hosts/testhost/api/context.json b/openpype/hosts/testhost/api/context.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/openpype/hosts/testhost/api/context.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/openpype/hosts/testhost/api/instances.json b/openpype/hosts/testhost/api/instances.json deleted file mode 100644 index d955012514..0000000000 --- a/openpype/hosts/testhost/api/instances.json +++ /dev/null @@ -1,108 +0,0 @@ -[ - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMyVariant", - "version": 1, - "asset": "sq01_sh0010", - "task": "Compositing", - "variant": "myVariant", - "instance_id": "a485f148-9121-46a5-8157-aa64df0fb449", - "creator_attributes": { - "number_key": 10, - "ha": 10 - }, - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": false - } - }, - "creator_identifier": "test_one" - }, - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMyVariant2", - "version": 1, - "asset": "sq01_sh0010", - "task": "Compositing", - "variant": "myVariant2", - "creator_attributes": {}, - "instance_id": "a485f148-9121-46a5-8157-aa64df0fb444", - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - }, - "creator_identifier": "test_one" - }, - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMain", - "version": 1, - "asset": "sq01_sh0010", - "task": "Compositing", - "variant": "Main", - "creator_attributes": {}, - "instance_id": "3607bc95-75f6-4648-a58d-e699f413d09f", - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - }, - "creator_identifier": "test_two" - }, - { - "id": "pyblish.avalon.instance", - "active": true, - "family": "test", - "subset": "testMain2", - "version": 1, - "asset": "sq01_sh0020", - "task": "Compositing", - "variant": "Main2", - "instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8eb", - "creator_attributes": {}, - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - }, - "creator_identifier": "test_two" - }, - { - "id": "pyblish.avalon.instance", - "family": "test_three", - "subset": "test_threeMain2", - "active": true, - "version": 1, - "asset": "sq01_sh0020", - "task": "Compositing", - "variant": "Main2", - "instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8ec", - "creator_attributes": {}, - "publish_attributes": { - "CollectFtrackApi": { - "add_ftrack_family": true - } - } - }, - { - "id": "pyblish.avalon.instance", - "family": "workfile", - "subset": "workfileMain", - "active": true, - "creator_identifier": "workfile", - "version": 1, - "asset": "Alpaca_01", - "task": "modeling", - "variant": "Main", - "instance_id": "7c9ddfc7-9f9c-4c1c-b233-38c966735fb6", - "creator_attributes": {}, - "publish_attributes": {} - } -] \ No newline at end of file diff --git a/openpype/hosts/testhost/api/pipeline.py b/openpype/hosts/testhost/api/pipeline.py deleted file mode 100644 index 1e05f336fb..0000000000 --- a/openpype/hosts/testhost/api/pipeline.py +++ /dev/null @@ -1,155 +0,0 @@ -import os -import json -from openpype.client import get_asset_by_name - - -class HostContext: - instances_json_path = None - context_json_path = None - - @classmethod - def get_context_title(cls): - project_name = os.environ.get("AVALON_PROJECT") - if not project_name: - return "TestHost" - - asset_name = os.environ.get("AVALON_ASSET") - if not asset_name: - return project_name - - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.parents"] - ) - - parents = asset_doc.get("data", {}).get("parents") or [] - - hierarchy = [project_name] - hierarchy.extend(parents) - hierarchy.append("{}".format(asset_name)) - task_name = os.environ.get("AVALON_TASK") - if task_name: - hierarchy.append(task_name) - - return "/".join(hierarchy) - - @classmethod - def get_current_dir_filepath(cls, filename): - return os.path.join( - os.path.dirname(os.path.abspath(__file__)), - filename - ) - - @classmethod - def get_instances_json_path(cls): - if cls.instances_json_path is None: - cls.instances_json_path = cls.get_current_dir_filepath( - "instances.json" - ) - return cls.instances_json_path - - @classmethod - def get_context_json_path(cls): - if cls.context_json_path is None: - cls.context_json_path = cls.get_current_dir_filepath( - "context.json" - ) - return cls.context_json_path - - @classmethod - def add_instance(cls, instance): - instances = cls.get_instances() - instances.append(instance) - cls.save_instances(instances) - - @classmethod - def save_instances(cls, instances): - json_path = cls.get_instances_json_path() - with open(json_path, "w") as json_stream: - json.dump(instances, json_stream, indent=4) - - @classmethod - def get_instances(cls): - json_path = cls.get_instances_json_path() - if not os.path.exists(json_path): - instances = [] - with open(json_path, "w") as json_stream: - json.dump(json_stream, instances) - else: - with open(json_path, "r") as json_stream: - instances = json.load(json_stream) - return instances - - @classmethod - def get_context_data(cls): - json_path = cls.get_context_json_path() - if not os.path.exists(json_path): - data = {} - with open(json_path, "w") as json_stream: - json.dump(data, json_stream) - else: - with open(json_path, "r") as json_stream: - data = json.load(json_stream) - return data - - @classmethod - def save_context_data(cls, data): - json_path = cls.get_context_json_path() - with open(json_path, "w") as json_stream: - json.dump(data, json_stream, indent=4) - - -def ls(): - return [] - - -def list_instances(): - return HostContext.get_instances() - - -def update_instances(update_list): - updated_instances = {} - for instance, _changes in update_list: - updated_instances[instance.id] = instance.data_to_store() - - instances = HostContext.get_instances() - for instance_data in instances: - instance_id = instance_data["instance_id"] - if instance_id in updated_instances: - new_instance_data = updated_instances[instance_id] - old_keys = set(instance_data.keys()) - new_keys = set(new_instance_data.keys()) - instance_data.update(new_instance_data) - for key in (old_keys - new_keys): - instance_data.pop(key) - - HostContext.save_instances(instances) - - -def remove_instances(instances): - if not isinstance(instances, (tuple, list)): - instances = [instances] - - current_instances = HostContext.get_instances() - for instance in instances: - instance_id = instance.data["instance_id"] - found_idx = None - for idx, _instance in enumerate(current_instances): - if instance_id == _instance["instance_id"]: - found_idx = idx - break - - if found_idx is not None: - current_instances.pop(found_idx) - HostContext.save_instances(current_instances) - - -def get_context_data(): - return HostContext.get_context_data() - - -def update_context_data(data, changes): - HostContext.save_context_data(data) - - -def get_context_title(): - return HostContext.get_context_title() diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py deleted file mode 100644 index 8d59fc3242..0000000000 --- a/openpype/hosts/testhost/plugins/create/auto_creator.py +++ /dev/null @@ -1,75 +0,0 @@ -from openpype.lib import NumberDef -from openpype.client import get_asset_by_name -from openpype.pipeline import ( - legacy_io, - AutoCreator, - CreatedInstance, -) -from openpype.hosts.testhost.api import pipeline - - -class MyAutoCreator(AutoCreator): - identifier = "workfile" - family = "workfile" - - def get_instance_attr_defs(self): - output = [ - NumberDef("number_key", label="Number") - ] - return output - - def collect_instances(self): - for instance_data in pipeline.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - subset_name = instance_data["subset"] - instance = CreatedInstance( - self.family, subset_name, instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - pipeline.update_instances(update_list) - - def create(self): - existing_instance = None - for instance in self.create_context.instances: - if instance.family == self.family: - existing_instance = instance - break - - variant = "Main" - project_name = legacy_io.Session["AVALON_PROJECT"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - host_name = legacy_io.Session["AVALON_APP"] - - if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name - ) - data = { - "asset": asset_name, - "task": task_name, - "variant": variant - } - data.update(self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name - )) - - new_instance = CreatedInstance( - self.family, subset_name, data, self - ) - self._add_instance_to_context(new_instance) - - elif ( - existing_instance["asset"] != asset_name - or existing_instance["task"] != task_name - ): - asset_doc = get_asset_by_name(project_name, asset_name) - subset_name = self.get_subset_name( - variant, task_name, asset_doc, project_name, host_name - ) - existing_instance["asset"] = asset_name - existing_instance["task"] = task_name diff --git a/openpype/hosts/testhost/plugins/create/test_creator_1.py b/openpype/hosts/testhost/plugins/create/test_creator_1.py deleted file mode 100644 index 7664276fa2..0000000000 --- a/openpype/hosts/testhost/plugins/create/test_creator_1.py +++ /dev/null @@ -1,94 +0,0 @@ -import json -from openpype import resources -from openpype.hosts.testhost.api import pipeline -from openpype.lib import ( - UISeparatorDef, - UILabelDef, - BoolDef, - NumberDef, - FileDef, -) -from openpype.pipeline import ( - Creator, - CreatedInstance, -) - - -class TestCreatorOne(Creator): - identifier = "test_one" - label = "test" - family = "test" - description = "Testing creator of testhost" - - create_allow_context_change = False - - def get_icon(self): - return resources.get_openpype_splash_filepath() - - def collect_instances(self): - for instance_data in pipeline.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - pipeline.update_instances(update_list) - - def remove_instances(self, instances): - pipeline.remove_instances(instances) - for instance in instances: - self._remove_instance_from_context(instance) - - def create(self, subset_name, data, pre_create_data): - print("Data that can be used in create:\n{}".format( - json.dumps(pre_create_data, indent=4) - )) - new_instance = CreatedInstance(self.family, subset_name, data, self) - pipeline.HostContext.add_instance(new_instance.data_to_store()) - self.log.info(new_instance.data) - self._add_instance_to_context(new_instance) - - def get_default_variants(self): - return [ - "myVariant", - "variantTwo", - "different_variant" - ] - - def get_instance_attr_defs(self): - output = [ - NumberDef("number_key", label="Number"), - ] - return output - - def get_pre_create_attr_defs(self): - output = [ - BoolDef("use_selection", label="Use selection"), - UISeparatorDef(), - UILabelDef("Testing label"), - FileDef("filepath", folders=True, label="Filepath"), - FileDef( - "filepath_2", multipath=True, folders=True, label="Filepath 2" - ) - ] - return output - - def get_detail_description(self): - return """# Relictus funes est Nyseides currusque nunc oblita - -## Causa sed - -Lorem markdownum posito consumptis, *plebe Amorque*, abstitimus rogatus fictaque -gladium Circe, nos? Bos aeternum quae. Utque me, si aliquem cladis, et vestigia -arbor, sic mea ferre lacrimae agantur prospiciens hactenus. Amanti dentes pete, -vos quid laudemque rastrorumque terras in gratantibus **radix** erat cedemus? - -Pudor tu ponderibus verbaque illa; ire ergo iam Venus patris certe longae -cruentum lecta, et quaeque. Sit doce nox. Anteit ad tempora magni plenaque et -videres mersit sibique auctor in tendunt mittit cunctos ventisque gravitate -volucris quemquam Aeneaden. Pectore Mensis somnus; pectora -[ferunt](http://www.mox.org/oculosbracchia)? Fertilitatis bella dulce et suum? - """ diff --git a/openpype/hosts/testhost/plugins/create/test_creator_2.py b/openpype/hosts/testhost/plugins/create/test_creator_2.py deleted file mode 100644 index f54adee8a2..0000000000 --- a/openpype/hosts/testhost/plugins/create/test_creator_2.py +++ /dev/null @@ -1,74 +0,0 @@ -from openpype.lib import NumberDef, TextDef -from openpype.hosts.testhost.api import pipeline -from openpype.pipeline import ( - Creator, - CreatedInstance, -) - - -class TestCreatorTwo(Creator): - identifier = "test_two" - label = "test" - family = "test" - description = "A second testing creator" - - def get_icon(self): - return "cube" - - def create(self, subset_name, data, pre_create_data): - new_instance = CreatedInstance(self.family, subset_name, data, self) - pipeline.HostContext.add_instance(new_instance.data_to_store()) - self.log.info(new_instance.data) - self._add_instance_to_context(new_instance) - - def collect_instances(self): - for instance_data in pipeline.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - pipeline.update_instances(update_list) - - def remove_instances(self, instances): - pipeline.remove_instances(instances) - for instance in instances: - self._remove_instance_from_context(instance) - - def get_instance_attr_defs(self): - output = [ - NumberDef("number_key"), - TextDef("text_key") - ] - return output - - def get_detail_description(self): - return """# Lorem ipsum, dolor sit amet. [![Awesome](https://cdn.rawgit.com/sindresorhus/awesome/d7305f38d29fed78fa85652e3a63e154dd8e8829/media/badge.svg)](https://github.com/sindresorhus/awesome) - -> A curated list of awesome lorem ipsum generators. - -Inspired by the [awesome](https://github.com/sindresorhus/awesome) list thing. - - -## Table of Contents - -- [Legend](#legend) -- [Practical](#briefcase-practical) -- [Whimsical](#roller_coaster-whimsical) - - [Animals](#rabbit-animals) - - [Eras](#tophat-eras) - - [Famous Individuals](#sunglasses-famous-individuals) - - [Music](#microphone-music) - - [Food and Drink](#pizza-food-and-drink) - - [Geographic and Dialects](#earth_africa-geographic-and-dialects) - - [Literature](#books-literature) - - [Miscellaneous](#cyclone-miscellaneous) - - [Sports and Fitness](#bicyclist-sports-and-fitness) - - [TV and Film](#movie_camera-tv-and-film) -- [Tools, Apps, and Extensions](#wrench-tools-apps-and-extensions) -- [Contribute](#contribute) -- [TODO](#todo) -""" diff --git a/openpype/hosts/testhost/plugins/publish/collect_context.py b/openpype/hosts/testhost/plugins/publish/collect_context.py deleted file mode 100644 index 0ab98fb84b..0000000000 --- a/openpype/hosts/testhost/plugins/publish/collect_context.py +++ /dev/null @@ -1,34 +0,0 @@ -import pyblish.api - -from openpype.pipeline import ( - OpenPypePyblishPluginMixin, - attribute_definitions -) - - -class CollectContextDataTestHost( - pyblish.api.ContextPlugin, OpenPypePyblishPluginMixin -): - """ - Collecting temp json data sent from a host context - and path for returning json data back to hostself. - """ - - label = "Collect Source - Test Host" - order = pyblish.api.CollectorOrder - 0.4 - hosts = ["testhost"] - - @classmethod - def get_attribute_defs(cls): - return [ - attribute_definitions.BoolDef( - "test_bool", - True, - label="Bool input" - ) - ] - - def process(self, context): - # get json paths from os and load them - for instance in context: - instance.data["source"] = "testhost" diff --git a/openpype/hosts/testhost/plugins/publish/collect_instance_1.py b/openpype/hosts/testhost/plugins/publish/collect_instance_1.py deleted file mode 100644 index c7241a15a8..0000000000 --- a/openpype/hosts/testhost/plugins/publish/collect_instance_1.py +++ /dev/null @@ -1,52 +0,0 @@ -import json -import pyblish.api - -from openpype.lib import attribute_definitions -from openpype.pipeline import OpenPypePyblishPluginMixin - - -class CollectInstanceOneTestHost( - pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin -): - """ - Collecting temp json data sent from a host context - and path for returning json data back to hostself. - """ - - label = "Collect Instance 1 - Test Host" - order = pyblish.api.CollectorOrder - 0.3 - hosts = ["testhost"] - - @classmethod - def get_attribute_defs(cls): - return [ - attribute_definitions.NumberDef( - "version", - default=1, - minimum=1, - maximum=999, - decimals=0, - label="Version" - ) - ] - - def process(self, instance): - self._debug_log(instance) - - publish_attributes = instance.data.get("publish_attributes") - if not publish_attributes: - return - - values = publish_attributes.get(self.__class__.__name__) - if not values: - return - - instance.data["version"] = values["version"] - - def _debug_log(self, instance): - def _default_json(value): - return str(value) - - self.log.info( - json.dumps(instance.data, indent=4, default=_default_json) - ) diff --git a/openpype/hosts/testhost/plugins/publish/validate_context_with_error.py b/openpype/hosts/testhost/plugins/publish/validate_context_with_error.py deleted file mode 100644 index 46e996a569..0000000000 --- a/openpype/hosts/testhost/plugins/publish/validate_context_with_error.py +++ /dev/null @@ -1,57 +0,0 @@ -import pyblish.api -from openpype.pipeline import PublishValidationError - - -class ValidateInstanceAssetRepair(pyblish.api.Action): - """Repair the instance asset.""" - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - pass - - -description = """ -## Publish plugins - -### Validate Scene Settings - -#### Skip Resolution Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB. - -#### Skip Timeline Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB. - -### AfterEffects Submit to Deadline - -* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one. -* `Priority` - priority of job on farm -* `Primary Pool` - here is list of pool fetched from server you can select from. -* `Secondary Pool` -* `Frames Per Task` - number of sequence division between individual tasks (chunks) -making one job on farm. -""" - - -class ValidateContextWithError(pyblish.api.ContextPlugin): - """Validate the instance asset is the current selected context asset. - - As it might happen that multiple worfiles are opened, switching - between them would mess with selected context. - In that case outputs might be output under wrong asset! - - Repair action will use Context asset value (from Workfiles or Launcher) - Closing and reopening with Workfiles will refresh Context value. - """ - - label = "Validate Context With Error" - hosts = ["testhost"] - actions = [ValidateInstanceAssetRepair] - order = pyblish.api.ValidatorOrder - - def process(self, context): - raise PublishValidationError("Crashing", "Context error", description) diff --git a/openpype/hosts/testhost/plugins/publish/validate_with_error.py b/openpype/hosts/testhost/plugins/publish/validate_with_error.py deleted file mode 100644 index 5a2888a8b0..0000000000 --- a/openpype/hosts/testhost/plugins/publish/validate_with_error.py +++ /dev/null @@ -1,57 +0,0 @@ -import pyblish.api -from openpype.pipeline import PublishValidationError - - -class ValidateInstanceAssetRepair(pyblish.api.Action): - """Repair the instance asset.""" - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - pass - - -description = """ -## Publish plugins - -### Validate Scene Settings - -#### Skip Resolution Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB. - -#### Skip Timeline Check for Tasks - -Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB. - -### AfterEffects Submit to Deadline - -* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one. -* `Priority` - priority of job on farm -* `Primary Pool` - here is list of pool fetched from server you can select from. -* `Secondary Pool` -* `Frames Per Task` - number of sequence division between individual tasks (chunks) -making one job on farm. -""" - - -class ValidateWithError(pyblish.api.InstancePlugin): - """Validate the instance asset is the current selected context asset. - - As it might happen that multiple worfiles are opened, switching - between them would mess with selected context. - In that case outputs might be output under wrong asset! - - Repair action will use Context asset value (from Workfiles or Launcher) - Closing and reopening with Workfiles will refresh Context value. - """ - - label = "Validate With Error" - hosts = ["testhost"] - actions = [ValidateInstanceAssetRepair] - order = pyblish.api.ValidatorOrder - - def process(self, instance): - raise PublishValidationError("Crashing", "Instance error", description) diff --git a/openpype/hosts/testhost/run_publish.py b/openpype/hosts/testhost/run_publish.py deleted file mode 100644 index c7ad63aafd..0000000000 --- a/openpype/hosts/testhost/run_publish.py +++ /dev/null @@ -1,68 +0,0 @@ -import os -import sys - -mongo_url = "" -project_name = "" -asset_name = "" -task_name = "" -ftrack_url = "" -ftrack_username = "" -ftrack_api_key = "" - - -def multi_dirname(path, times=1): - for _ in range(times): - path = os.path.dirname(path) - return path - - -host_name = "testhost" -current_file = os.path.abspath(__file__) -openpype_dir = multi_dirname(current_file, 4) - -os.environ["OPENPYPE_MONGO"] = mongo_url -os.environ["OPENPYPE_ROOT"] = openpype_dir -os.environ["AVALON_PROJECT"] = project_name -os.environ["AVALON_ASSET"] = asset_name -os.environ["AVALON_TASK"] = task_name -os.environ["AVALON_APP"] = host_name -os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" -os.environ["AVALON_TIMEOUT"] = "1000" -os.environ["AVALON_DB"] = "avalon" -os.environ["FTRACK_SERVER"] = ftrack_url -os.environ["FTRACK_API_USER"] = ftrack_username -os.environ["FTRACK_API_KEY"] = ftrack_api_key -for path in [ - openpype_dir, - r"{}\repos\avalon-core".format(openpype_dir), - r"{}\.venv\Lib\site-packages".format(openpype_dir) -]: - sys.path.append(path) - -from Qt import QtWidgets, QtCore - -from openpype.tools.publisher.window import PublisherWindow - - -def main(): - """Main function for testing purposes.""" - import pyblish.api - from openpype.pipeline import install_host - from openpype.modules import ModulesManager - from openpype.hosts.testhost import api as testhost - - manager = ModulesManager() - for plugin_path in manager.collect_plugin_paths()["publish"]: - pyblish.api.register_plugin_path(plugin_path) - - install_host(testhost) - - QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling) - app = QtWidgets.QApplication([]) - window = PublisherWindow() - window.show() - app.exec_() - - -if __name__ == "__main__": - main() From 1f06830a03fa58ccfa2ccf76f940ec0281343c9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 13:24:20 +0200 Subject: [PATCH 0889/2550] change both 'user' and 'username' keys --- openpype/plugins/publish/extract_burnin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 88093fb92f..36a28beb5d 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -492,6 +492,7 @@ class ExtractBurnin(openpype.api.Extractor): # OPENPYPE_USERNAME might have side effects webpublish_user_name = os.environ.get("WEBPUBLISH_OPENPYPE_USERNAME") if webpublish_user_name: + burnin_data["user"] = webpublish_user_name burnin_data["username"] = webpublish_user_name self.log.debug( From 398325684961bf9b408da0e56011cfbd4eb8bed1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 13:24:33 +0200 Subject: [PATCH 0890/2550] add 'user' to representation context data --- openpype/plugins/publish/integrate.py | 2 +- openpype/plugins/publish/integrate_hero_version.py | 2 +- openpype/plugins/publish/integrate_legacy.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f99c718f8a..788966878f 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -135,7 +135,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # the database even if not used by the destination template db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "username", "output" + "family", "hierarchy", "username", "user", "output" ] skip_host_families = [] diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 7d698ff98d..5b8b141e88 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -46,7 +46,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ignored_representation_names = [] db_representation_context_keys = [ "project", "asset", "task", "subset", "representation", - "family", "hierarchy", "task", "username" + "family", "hierarchy", "task", "username", "user" ] # QUESTION/TODO this process should happen on server if crashed due to # permissions error on files (files were used or user didn't have perms) diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index b90b61f587..5a6190f38e 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -127,7 +127,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): exclude_families = ["render.farm"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", - "family", "hierarchy", "task", "username" + "family", "hierarchy", "task", "username", "user" ] default_template_name = "publish" From 198ad3ac901dc962ab840a0b09a5bd25d4a286e2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 13:25:02 +0200 Subject: [PATCH 0891/2550] add both 'user' and 'username' in integrate slack --- .../modules/slack/plugins/publish/integrate_slack_api.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index c3b288f0cd..4a8e9f773f 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -95,13 +95,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): Reviews might be large, so allow only adding link to message instead of uploading only. """ + fill_data = copy.deepcopy(instance.context.data["anatomyData"]) + username = fill_data.get("user") fill_pairs = [ ("asset", instance.data.get("asset", fill_data.get("asset"))), ("subset", instance.data.get("subset", fill_data.get("subset"))), - ("username", instance.data.get("username", - fill_data.get("username"))), + ("user", username), + ("username", username), ("app", instance.data.get("app", fill_data.get("app"))), ("family", instance.data.get("family", fill_data.get("family"))), ("version", str(instance.data.get("version", From 6bcbf34d27e3806ae7f62a09d7d5a6002671a2eb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 13:53:39 +0200 Subject: [PATCH 0892/2550] changed class name to 'CollectUsernameForWebpublish' --- openpype/modules/ftrack/plugins/publish/collect_username.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/ftrack/plugins/publish/collect_username.py index a9b746ea51..0e232bf83e 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/ftrack/plugins/publish/collect_username.py @@ -13,7 +13,7 @@ import os import pyblish.api -class CollectUsername(pyblish.api.ContextPlugin): +class CollectUsernameForWebpublish(pyblish.api.ContextPlugin): """ Translates user email to Ftrack username. @@ -32,10 +32,8 @@ class CollectUsername(pyblish.api.ContextPlugin): hosts = ["webpublisher", "photoshop"] targets = ["remotepublish", "filespublish", "tvpaint_worker"] - _context = None - def process(self, context): - self.log.info("CollectUsername") + self.log.info("{}".format(self.__class__.__name__)) os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"] os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"] From 8e6b0567c967008b0b64151eeceed23a426ed073 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 13:54:29 +0200 Subject: [PATCH 0893/2550] skip 'WEBPUBLISH_OPENPYPE_USERNAME' usage --- .../modules/ftrack/plugins/publish/collect_username.py | 1 - openpype/plugins/publish/extract_burnin.py | 7 ------- 2 files changed, 8 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/ftrack/plugins/publish/collect_username.py index 0e232bf83e..ab1f7d8d5d 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/ftrack/plugins/publish/collect_username.py @@ -65,5 +65,4 @@ class CollectUsernameForWebpublish(pyblish.api.ContextPlugin): burnin_name = username if '@' in burnin_name: burnin_name = burnin_name[:burnin_name.index('@')] - os.environ["WEBPUBLISH_OPENPYPE_USERNAME"] = burnin_name context.data["user"] = burnin_name diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 36a28beb5d..c8b2b73874 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -488,13 +488,6 @@ class ExtractBurnin(openpype.api.Extractor): "frame_end_handle": frame_end_handle } - # use explicit username for webpublishes as rewriting - # OPENPYPE_USERNAME might have side effects - webpublish_user_name = os.environ.get("WEBPUBLISH_OPENPYPE_USERNAME") - if webpublish_user_name: - burnin_data["user"] = webpublish_user_name - burnin_data["username"] = webpublish_user_name - self.log.debug( "Basic burnin_data: {}".format(json.dumps(burnin_data, indent=4)) ) From 3ac5305f65b17083b126eeb76675064311bf4f4a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 13:54:58 +0200 Subject: [PATCH 0894/2550] better user query --- .../modules/ftrack/plugins/publish/collect_username.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/ftrack/plugins/publish/collect_username.py index ab1f7d8d5d..798f3960a8 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/ftrack/plugins/publish/collect_username.py @@ -1,5 +1,8 @@ """Loads publishing context from json and continues in publish process. +Should run before 'CollectAnatomyContextData' so the user on context is +changed before it's stored to context anatomy data or instance anatomy data. + Requires: anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) @@ -52,12 +55,14 @@ class CollectUsernameForWebpublish(pyblish.api.ContextPlugin): return session = ftrack_api.Session(auto_connect_event_hub=False) - user = session.query("User where email like '{}'".format(user_email)) + user = session.query( + "User where email like '{}'".format(user_email) + ).first() if not user: raise ValueError( "Couldn't find user with {} email".format(user_email)) - user = user[0] + username = user.get("username") self.log.debug("Resolved ftrack username:: {}".format(username)) os.environ["FTRACK_API_USER"] = username From 3c6b999d544d65a8d31cc37ae54c731ff098f73a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= Date: Thu, 1 Sep 2022 17:22:14 +0200 Subject: [PATCH 0895/2550] Kitsu - sync_all_project - add list ignore_projects --- openpype/modules/kitsu/utils/update_op_with_zou.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 8d65591c0b..f013251bb1 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -318,13 +318,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: ) -def sync_all_projects(login: str, password: str): +def sync_all_projects(login: str, password: str, ignore_projects: list = []): """Update all OP projects in DB with Zou data. Args: login (str): Kitsu user login password (str): Kitsu user password - + ignore_projects (list): List of unsynced project names Raises: gazu.exception.AuthFailedException: Wrong user login and/or password """ @@ -340,7 +340,8 @@ def sync_all_projects(login: str, password: str): dbcon.install() all_projects = gazu.project.all_open_projects() for project in all_projects: - sync_project_from_kitsu(dbcon, project) + if project["name"] not in ignore_projects: + sync_project_from_kitsu(dbcon, project) def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): From d90f9fb5b819e874012141391861b843fc4abc82 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Sep 2022 18:12:16 +0200 Subject: [PATCH 0896/2550] Fix - changed format of version string in pyproject.toml Since Poetry 1.2 version in .toml must follow PEP-440, in our case there must not be dash sign. Plus sign is valid though. Version in pyproject.toml is not used anywhere yet (only maybe when uploading wheel to pip, which is not really used). --- pyproject.toml | 2 +- tools/ci_tools.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9ed1872eac..0deb4f465b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.2-nightly.1" # OpenPype +version = "3.14.2+nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 4c59cd6af6..65a615b33e 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -101,7 +101,7 @@ def bump_file_versions(version): # bump pyproject.toml filename = "pyproject.toml" - regex = "version = \"(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-((0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?\" # OpenPype" + regex = "version = \"(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(+((0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?\" # OpenPype" pyproject_version = f"version = \"{version}\" # OpenPype" file_regex_replace(filename, regex, pyproject_version) From 05043b8578e8fcfa4cae58609fc1c5055a1f019c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Sep 2022 18:15:49 +0200 Subject: [PATCH 0897/2550] Fix - escaped plus sign --- tools/ci_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 65a615b33e..c8f0cd48b4 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -101,7 +101,7 @@ def bump_file_versions(version): # bump pyproject.toml filename = "pyproject.toml" - regex = "version = \"(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(+((0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?\" # OpenPype" + regex = "version = \"(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(\+((0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?\" # OpenPype" pyproject_version = f"version = \"{version}\" # OpenPype" file_regex_replace(filename, regex, pyproject_version) From 1ca386c78d48cb3903499dd1d7adc5d1ac333a69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 1 Sep 2022 18:46:53 +0200 Subject: [PATCH 0898/2550] :bug: add required key variant --- openpype/pipeline/create/context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index eaaed39357..1b2521e4f7 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -435,6 +435,7 @@ class CreatedInstance: if key in data: data.pop(key) + self._data["variant"] = self._data.get("variant") or "" # Stored creator specific attribute values # {key: value} creator_values = copy.deepcopy(orig_creator_attributes) From d2233bc6f8c5c2541ad04c66cafa5e3419c2fbae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 1 Sep 2022 18:47:58 +0200 Subject: [PATCH 0899/2550] :wrench: new style creator --- openpype/hosts/houdini/api/lib.py | 97 ++++++++++++------- openpype/hosts/houdini/api/pipeline.py | 35 +++++-- openpype/hosts/houdini/api/plugin.py | 61 ++++++++++-- .../plugins/create/create_pointcache.py | 55 ++++------- 4 files changed, 164 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 675f3afcb5..5d99d7f363 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -281,7 +281,7 @@ def render_rop(ropnode): raise RuntimeError("Render failed: {0}".format(exc)) -def imprint(node, data): +def imprint(node, data, update=False): """Store attributes with value on a node Depending on the type of attribute it creates the correct parameter @@ -293,51 +293,50 @@ def imprint(node, data): Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value + update (bool, optional): flag if imprint should update + already existing data or leave them untouched and only + add new. Returns: None """ + if not data: + return + + current_parameters = node.spareParms() + current_keys = [p.name() for p in current_parameters] + update_keys = [] parm_group = node.parmTemplateGroup() - parm_folder = hou.FolderParmTemplate("folder", "Extra") + templates = [] for key, value in data.items(): if value is None: continue - if isinstance(value, float): - parm = hou.FloatParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, bool): - parm = hou.ToggleParmTemplate(name=key, - label=key, - default_value=value) - elif isinstance(value, int): - parm = hou.IntParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, six.string_types): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, dict): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(json.dumps(value),)) - else: - raise TypeError("Unsupported type: %r" % type(value)) - - parm_folder.addParmTemplate(parm) - + if key in current_keys: + if not update: + print(f"{key} already exists on {node}") + else: + print(f"replacing {key}") + update_keys.append((key, value)) + continue + parm = parm_to_template(key, value) + # parm.hide(True) + templates.append(parm) + parm_folder.setParmTemplates(templates) parm_group.append(parm_folder) node.setParmTemplateGroup(parm_group) + if update_keys: + parms = node.parmTuplesInFolder(("Extra",)) + for parm in parms: + for key, value in update_keys: + if parm.name() == key: + node.replaceSpareParmTuple( + parm.name(), parm_to_template(key, value)) + def lsattr(attr, value=None, root="/"): """Return nodes that have `attr` @@ -407,9 +406,9 @@ def read(node): value = parameter.eval() # test if value is json encoded dict if isinstance(value, six.string_types) and \ - len(value) > 0 and value[0] == "{": + len(value) > 0 and value.startswith("JSON:::"): try: - value = json.loads(value) + value = json.loads(value.lstrip("JSON:::")) except json.JSONDecodeError: # not a json pass @@ -502,3 +501,35 @@ def get_main_window(): if self._parent is None: self._parent = hou.ui.mainQtWindow() return self._parent + + +def parm_to_template(key, value): + if isinstance(value, float): + parm = hou.FloatParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, bool): + parm = hou.ToggleParmTemplate(name=key, + label=key, + default_value=value) + elif isinstance(value, int): + parm = hou.IntParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, six.string_types): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, (dict, list, tuple)): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=( + "JSON:::" + json.dumps(value),)) + else: + raise TypeError("Unsupported type: %r" % type(value)) + + return parm \ No newline at end of file diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b8479a7b25..6daf942cf0 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -5,8 +5,7 @@ import contextlib import hou # noqa -from openpype.host import HostBase, IWorkfileHost, ILoadHost -from openpype.tools.utils import host_tools +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api @@ -38,7 +37,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): name = "houdini" def __init__(self): @@ -129,6 +128,16 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): on_file_event_callback ) + def update_context_data(self, data, changes): + root_node = hou.node("/") + lib.imprint(root_node, data) + + def get_context_data(self): + from pprint import pformat + + self.log.debug(f"----" + pformat(lib.read(hou.node("/")))) + return lib.read(hou.node("/")) + def on_file_event_callback(event): if event == hou.hipFileEventType.AfterLoad: @@ -385,9 +394,15 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): log.warning("%s - %s", instance_node.path(), exc) -def list_instances(): - """List all publish instances in the scene.""" - return lib.lsattr("id", "pyblish.avalon.instance") +def list_instances(creator_id=None): + """List all publish instances in the scene. + + """ + instance_signature = { + "id": "pyblish.avalon.instance", + "identifier": creator_id + } + return lib.lsattrs(instance_signature) def remove_instance(instance): @@ -397,13 +412,15 @@ def remove_instance(instance): because it might contain valuable data for artist. """ - nodes = instance[:] + nodes = instance.get("members") if not nodes: return # Assume instance node is first node - instance_node = nodes[0] + instance_node = hou.node(nodes[0]) + to_delete = None for parameter in instance_node.spareParms(): if parameter.name() == "id" and \ parameter.eval() == "pyblish.avalon.instance": - instance_node.removeSpareParmTuple(parameter) + to_delete = parameter + instance_node.removeSpareParmTuple(to_delete) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index fc36284a72..7120a49e41 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -13,8 +13,9 @@ from openpype.pipeline import ( Creator as NewCreator, CreatedInstance ) +from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint +from .lib import imprint, read class OpenPypeCreatorError(CreatorError): @@ -96,18 +97,64 @@ class Creator(LegacyCreator): class HoudiniCreator(NewCreator): _nodes = [] - def collect_instances(self): - for instance_data in list_instances(): - instance = CreatedInstance.from_existing( - instance_data, self - ) + def create(self, subset_name, instance_data, pre_create_data): + try: + if pre_create_data.get("use_selection"): + self._nodes = hou.selectedNodes() + + # Get the node type and remove it from the data, not needed + node_type = instance_data.pop("node_type", None) + if node_type is None: + node_type = "geometry" + + # Get out node + out = hou.node("/out") + instance_node = out.createNode( + node_type, node_name=subset_name) + instance_node.moveToGoodPosition() + instance_data["members"] = [instance_node.path()] + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) self._add_instance_to_context(instance) + imprint(instance_node, instance.data_to_store()) + return instance + + except hou.Error as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def collect_instances(self): + for instance in list_instances(creator_id=self.identifier): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) def update_instances(self, update_list): for created_inst, _changes in update_list: - imprint(created_inst.get("instance_id"), created_inst.data_to_store()) + instance_node = hou.node(created_inst.get("members")[0]) + current_data = read(instance_node) + + imprint( + instance_node, + { + key: value[1] for key, value in _changes.items() + if current_data.get(key) != value[1] + }, + update=True + ) def remove_instances(self, instances): for instance in instances: remove_instance(instance) self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 052580b56f..686dbaa7ab 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- from openpype.hosts.houdini.api import plugin -from openpype.hosts.houdini.api import list_instances from openpype.pipeline import CreatedInstance +import hou + class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" @@ -11,50 +12,34 @@ class CreatePointCache(plugin.HoudiniCreator): family = "pointcache" icon = "gears" - def collect_instances(self): - for instance_data in list_instances(): - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - def create(self, subset_name, instance_data, pre_create_data): - pass + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - def __init__(self, *args, **kwargs): - super(CreatePointCache, self).__init__(*args, **kwargs) + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) - - self.data.update({"node_type": "alembic"}) - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("members")[0]) parms = { - "use_sop_path": True, # Export single node from SOP Path - "build_from_path": True, # Direct path of primitive in output - "path_attrib": "path", # Pass path attribute for output + "use_sop_path": True, + "build_from_path": True, + "path_attrib": "path", "prim_to_detail_pattern": "cbId", - "format": 2, # Set format to Ogawa - "facesets": 0, # No face sets (by default exclude them) - "filename": "$HIP/pyblish/%s.abc" % self.name, + "format": 2, + "facesets": 0, + "filename": "$HIP/pyblish/{}.abc".format(self.identifier) } - if self.nodes: - node = self.nodes[0] - parms.update({"sop_path": node.path()}) + if instance_node: + parms["sop_path"] = instance_node.path() - instance.setParms(parms) - instance.parm("trange").set(1) + instance_node.setParms(parms) + instance_node.parm("trange").set(1) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) From 4fc90655d7e5b6869650638e4b7ff064ebc2271e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Sep 2022 18:59:20 +0200 Subject: [PATCH 0900/2550] Fix - update cx-freeze Old version caused "No module named 'distutils.command.bdist_msi'" error --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 21b6bda880..726b248f8c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -339,7 +339,7 @@ test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", [[package]] name = "cx-freeze" -version = "6.9" +version = "6.11.1" description = "Create standalone executables from Python scripts" category = "dev" optional = false From 58724a4c1be465b1429bc32c353ac25db3fc0624 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 1 Sep 2022 23:33:27 +0200 Subject: [PATCH 0901/2550] Have `get_project` `inactive` argument default to True --- openpype/client/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 3d2730a17c..f9afde0f5d 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -58,7 +58,7 @@ def get_projects(active=True, inactive=False, fields=None): yield project_doc -def get_project(project_name, active=True, inactive=False, fields=None): +def get_project(project_name, active=True, inactive=True, fields=None): # Skip if both are disabled if not active and not inactive: return None From ff88f201ec648a001ca63c770edfa8fcdcbc7f26 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 00:16:52 +0200 Subject: [PATCH 0902/2550] Ignore empty labels, then still use name like other asset models --- openpype/tools/standalonepublish/widgets/model_asset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/standalonepublish/widgets/model_asset.py b/openpype/tools/standalonepublish/widgets/model_asset.py index abfc0a2145..9fed46b3fe 100644 --- a/openpype/tools/standalonepublish/widgets/model_asset.py +++ b/openpype/tools/standalonepublish/widgets/model_asset.py @@ -81,7 +81,7 @@ class AssetModel(TreeModel): for asset in current_assets: # get label from data, otherwise use name data = asset.get("data", {}) - label = data.get("label", asset["name"]) + label = data.get("label") or asset["name"] tags = data.get("tags", []) # store for the asset for optimization From 8bb526ed8f3f76e2fa1745e9b724ef9ba4051224 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Sep 2022 08:48:30 +0200 Subject: [PATCH 0903/2550] nuke: validate write node is not failing due wrong type --- .../hosts/nuke/plugins/publish/validate_write_nodes.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 5a8bc2022e..26a563b13b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -2,7 +2,8 @@ import pyblish.api from openpype.pipeline.publish import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( get_write_node_template_attr, - set_node_knobs_from_settings + set_node_knobs_from_settings, + color_gui_to_int ) from openpype.pipeline import PublishXmlValidationError @@ -76,8 +77,11 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): # fix type differences if type(node_value) in (int, float): - value = float(value) - node_value = float(node_value) + if isinstance(value, list): + value = color_gui_to_int(value) + else: + value = float(value) + node_value = float(node_value) else: value = str(value) node_value = str(node_value) From 7ed5f5a0ee56d6481d28f0510d1bc6311e89ac38 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Sep 2022 11:04:13 +0200 Subject: [PATCH 0904/2550] Fix - update cx-freeze Old version caused "No module named 'distutils.command.bdist_msi'" error --- poetry.lock | 990 ++++++++++++++++++++++++++++++++++++++++++------- pyproject.toml | 3 +- 2 files changed, 864 insertions(+), 129 deletions(-) diff --git a/poetry.lock b/poetry.lock index 726b248f8c..b428393a1f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,7 +33,7 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} yarl = ">=1.0,<2.0" [package.extras] -speedups = ["aiodns", "brotli", "cchardet"] +speedups = ["Brotli", "aiodns", "cchardet"] [[package]] name = "aiohttp-json-rpc" @@ -122,6 +122,7 @@ python-versions = ">=3.6.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" +setuptools = ">=20.0" typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} wrapt = ">=1.11,<2" @@ -162,10 +163,10 @@ optional = false python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "autopep8" @@ -273,9 +274,9 @@ optional = false python-versions = ">=2.7, <4.0" [package.extras] -dev = ["sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)", "lowdown (>=0.2.0,<1)", "pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)"] -doc = ["sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)", "lowdown (>=0.2.0,<1)"] -test = ["pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)"] +dev = ["lowdown (>=0.2.0,<1)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"] +doc = ["lowdown (>=0.2.0,<1)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"] +test = ["pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)"] [[package]] name = "colorama" @@ -330,30 +331,39 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] -name = "cx-freeze" +name = "cx-Freeze" version = "6.11.1" description = "Create standalone executables from Python scripts" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} -importlib-metadata = ">=4.3.1" +cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\" and python_version < \"3.10\""} +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +lief = {version = ">=0.11.5", markers = "sys_platform == \"win32\" and python_version <= \"3.10\""} +packaging = ">=21.0" +patchelf = {version = ">=0.12", markers = "sys_platform == \"linux\""} +setuptools = ">=59.0.1,<=60.10.0" + +[package.extras] +dev = ["bump2version (>=1.0.1)", "cibuildwheel (==2.6.1)", "pre-commit (>=2.17.0)", "pylint (>=2.13.0)"] +doc = ["sphinx (>=5.0.1,<6.0.0)", "sphinx-rtd-theme (==1.0.0)"] +test = ["nose (==1.3.7)", "pygments (>=2.11.2)", "pytest (>=7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytest-timeout (>=1.4.2)"] [[package]] name = "cx-logging" version = "3.0" description = "Python and C interfaces for logging" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -369,7 +379,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] +dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "dill" @@ -391,8 +401,8 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -dnssec = ["cryptography (>=2.6,<37.0)"] curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<37.0)"] doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] @@ -503,7 +513,7 @@ requests = ">=2.25.1,<=2.27.1" [package.extras] dev = ["wheel"] -test = ["pytest-cov (==2.12.1)", "requests-mock (==1.9.3)", "pytest (==4.6.11)", "pytest (==6.1.2)", "pytest (==6.2.5)", "black (==21.12b0)", "pre-commit (==2.17.0)"] +test = ["black (==21.12b0)", "pre-commit (==2.17.0)", "pytest (==4.6.11)", "pytest (==6.1.2)", "pytest (==6.2.5)", "pytest-cov (==2.12.1)", "requests-mock (==1.9.3)"] [[package]] name = "gitdb" @@ -576,7 +586,7 @@ rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} six = ">=1.9.0" [package.extras] -aiohttp = ["requests (>=2.20.0,<3.0.0dev)", "aiohttp (>=3.6.2,<4.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -648,9 +658,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -669,10 +679,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" @@ -697,8 +707,8 @@ optional = false python-versions = ">=3.7" [package.extras] -trio = ["async-generator", "trio"] -test = ["async-timeout", "trio", "testpath", "pytest-asyncio (>=0.17)", "pytest-trio", "pytest"] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["async_generator", "trio"] [[package]] name = "jinja2" @@ -751,8 +761,8 @@ pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_ SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler", "pytest-flake8", "pytest-mypy"] [[package]] name = "lazy-object-proxy" @@ -762,6 +772,14 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "lief" +version = "0.12.1" +description = "Library to instrument executable formats" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "log4mongo" version = "1.7.0" @@ -809,7 +827,7 @@ python-versions = "*" pyaaf2 = "1.4.0" [package.extras] -dev = ["check-manifest", "flake8 (>=3.5)", "coverage (>=4.5)", "urllib3 (>=1.24.3)"] +dev = ["check-manifest", "coverage (>=4.5)", "flake8 (>=3.5)", "urllib3 (>=1.24.3)"] view = ["PySide2 (>=5.11,<6.0)"] [package.source] @@ -821,7 +839,7 @@ reference = "openpype" name = "packaging" version = "21.3" description = "Core utilities for Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -843,9 +861,9 @@ pynacl = ">=1.0.1" six = "*" [package.extras] -all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] -ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"] -gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] +all = ["bcrypt (>=3.1.3)", "gssapi (>=1.4.1)", "invoke (>=1.3)", "pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "pywin32 (>=2.1.8)"] +ed25519 = ["bcrypt (>=3.1.3)", "pynacl (>=1.0.1)"] +gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=1.3)"] [[package]] @@ -860,6 +878,17 @@ python-versions = ">=3.6" qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["docopt", "pytest (<6.0.0)"] +[[package]] +name = "patchelf" +version = "0.15.0.0" +description = "A small utility to modify the dynamic linker and RPATH of ELF executables." +category = "main" +optional = false +python-versions = "*" + +[package.extras] +test = ["importlib-metadata", "pytest"] + [[package]] name = "pathlib2" version = "2.3.7.post1" @@ -892,8 +921,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" @@ -1054,7 +1083,7 @@ python-versions = "*" aws = ["pymongo-auth-aws (<2.0.0)"] encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] gssapi = ["pykerberos"] -ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)", "certifi"] +ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] snappy = ["python-snappy"] srv = ["dnspython (>=1.16.0,<1.17.0)"] tls = ["ipaddress"] @@ -1072,8 +1101,8 @@ python-versions = ">=3.6" cffi = ">=1.4.1" [package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] +docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pynput" @@ -1188,7 +1217,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-print" @@ -1381,6 +1410,19 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "setuptools" +version = "60.10.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-inline-tabs", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "shotgun-api3" version = "3.3.3" @@ -1413,8 +1455,8 @@ optional = false python-versions = ">=3.6.0" [package.extras] -optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=10,<11)", "websocket-client (>=1,<2)"] -testing = ["pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "itsdangerous (==1.1.0)", "Jinja2 (==3.0.3)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=4,<5)", "black (==22.3.0)", "click (==8.0.4)", "psutil (>=5,<6)", "databases (>=0.5)", "boto3 (<=2)", "moto (>=3,<4)"] +optional = ["SQLAlchemy (>=1,<2)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)"] +testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (<2)", "black (==22.3.0)", "boto3 (<=2)", "click (==8.0.4)", "codecov (>=2,<3)", "databases (>=0.5)", "flake8 (>=4,<5)", "itsdangerous (==1.1.0)", "moto (>=3,<4)", "psutil (>=5,<6)", "pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "pytest-cov (>=2,<3)"] [[package]] name = "smmap" @@ -1469,8 +1511,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "docutils-stubs", "types-typed-ast", "types-requests"] -test = ["pytest (>=4.6)", "html5lib", "cython", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed-ast"] [[package]] name = "sphinx-qt-documentation" @@ -1485,9 +1527,9 @@ docutils = "*" sphinx = "*" [package.extras] -test = ["pytest-cov", "pytest (>=3.0.0)"] -lint = ["pylint", "flake8", "black"] dev = ["pre-commit"] +lint = ["black", "flake8", "pylint"] +test = ["pytest (>=3.0.0)", "pytest-cov"] [[package]] name = "sphinx-rtd-theme" @@ -1502,7 +1544,7 @@ docutils = "<0.18" sphinx = ">=1.6" [package.extras] -dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] [[package]] name = "sphinxcontrib-applehelp" @@ -1513,7 +1555,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1525,7 +1567,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1537,8 +1579,8 @@ optional = false python-versions = ">=3.6" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" @@ -1549,7 +1591,7 @@ optional = false python-versions = ">=3.5" [package.extras] -test = ["pytest", "flake8", "mypy"] +test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" @@ -1560,7 +1602,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1572,7 +1614,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1588,7 +1630,7 @@ sphinxcontrib-serializinghtml = "*" [package.extras] lint = ["flake8"] -test = ["pytest", "sqlalchemy", "whoosh", "sphinx"] +test = ["Sphinx", "pytest", "sqlalchemy", "whoosh"] [[package]] name = "stone" @@ -1659,8 +1701,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1682,6 +1724,17 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] six = "*" +[[package]] +name = "wheel" +version = "0.37.1" +description = "A built-package format for Python" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.extras] +test = ["pytest (>=3.0.0)", "pytest-cov"] + [[package]] name = "wrapt" version = "1.14.1" @@ -1703,8 +1756,8 @@ aiohttp = "<4" yarl = "*" [package.extras] -develop = ["async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov", "sphinx", "sphinxcontrib-plantuml", "tox (>=2.4)"] -testing = ["async-timeout", "pytest", "pytest-aiohttp", "pytest-cov", "coverage (!=4.3)", "coveralls"] +develop = ["Sphinx", "async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov", "sphinxcontrib-plantuml", "tox (>=2.4)"] +testing = ["async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov"] ujson = ["ujson"] [[package]] @@ -1729,13 +1782,13 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "de7422afb6aed02f75e1696afdda9ad6c7bf32da76b5022ee3e8f71a1ac4bae2" +content-hash = "6a5f6910109c9ed6fb1cc1cb1cef7b21a0055a17c393175a1a7aabc00e35d54f" [metadata.files] acre = [] @@ -1817,7 +1870,10 @@ aiohttp-json-rpc = [ {file = "aiohttp-json-rpc-0.13.3.tar.gz", hash = "sha256:6237a104478c22c6ef96c7227a01d6832597b414e4b79a52d85593356a169e99"}, {file = "aiohttp_json_rpc-0.13.3-py3-none-any.whl", hash = "sha256:4fbd197aced61bd2df7ae3237ead7d3e08833c2ccf48b8581e1828c95ebee680"}, ] -aiohttp-middlewares = [] +aiohttp-middlewares = [ + {file = "aiohttp-middlewares-2.1.0.tar.gz", hash = "sha256:5863970d944dc63faedc96ef324a7fe2bcefefebe29acc90cd641236322d00c3"}, + {file = "aiohttp_middlewares-2.1.0-py3-none-any.whl", hash = "sha256:c83d48702e6a8669981976f39a60e83d059dc01d7b1ee651aec5d4cb807ff784"}, +] aiosignal = [ {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, @@ -1835,7 +1891,10 @@ arrow = [ {file = "arrow-0.17.0-py2.py3-none-any.whl", hash = "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5"}, {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] -astroid = [] +astroid = [ + {file = "astroid-2.11.7-py3-none-any.whl", hash = "sha256:86b0a340a512c65abf4368b80252754cda17c02cdbbd3f587dddf98112233e7b"}, + {file = "astroid-2.11.7.tar.gz", hash = "sha256:bb24615c77f4837c707669d16907331374ae8a964650a66999da3f5ca68dc946"}, +] async-timeout = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, @@ -1844,21 +1903,112 @@ asynctest = [ {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, ] -atomicwrites = [] -attrs = [] +atomicwrites = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] +attrs = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] autopep8 = [ {file = "autopep8-1.5.7-py2.py3-none-any.whl", hash = "sha256:aa213493c30dcdac99537249ee65b24af0b2c29f2e83cd8b3f68760441ed0db9"}, {file = "autopep8-1.5.7.tar.gz", hash = "sha256:276ced7e9e3cb22e5d7c14748384a5cf5d9002257c0ed50c0e075b68011bb6d0"}, ] -babel = [] -bcrypt = [] +babel = [ + {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, + {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +] +bcrypt = [ + {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40"}, + {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa"}, + {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa"}, + {file = "bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e"}, + {file = "bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129"}, + {file = "bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"}, +] blessed = [ {file = "blessed-1.19.1-py2.py3-none-any.whl", hash = "sha256:63b8554ae2e0e7f43749b6715c734cc8f3883010a809bf16790102563e6cf25b"}, {file = "blessed-1.19.1.tar.gz", hash = "sha256:9a0d099695bf621d4680dd6c73f6ad547f6a3442fbdbe80c4b1daa1edbc492fc"}, ] -cachetools = [] -certifi = [] -cffi = [] +cachetools = [ + {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, + {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, +] +certifi = [ + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, @@ -1871,7 +2021,10 @@ clique = [ {file = "clique-1.6.1-py2.py3-none-any.whl", hash = "sha256:8619774fa035661928dd8c93cd805acf2d42533ccea1b536c09815ed426c9858"}, {file = "clique-1.6.1.tar.gz", hash = "sha256:90165c1cf162d4dd1baef83ceaa1afc886b453e379094fa5b60ea470d1733e66"}, ] -colorama = [] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] commonmark = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, @@ -1880,20 +2033,99 @@ coolname = [ {file = "coolname-1.1.0-py2.py3-none-any.whl", hash = "sha256:e6a83a0ac88640f4f3d2070438dbe112fe80cfebc119c93bd402976ec84c0978"}, {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, ] -coverage = [] -cryptography = [] -cx-freeze = [ - {file = "cx_Freeze-6.9-cp310-cp310-win32.whl", hash = "sha256:776d4fb68a4831691acbd3c374362b9b48ce2e568514a73c3d4cb14d5dcf1470"}, - {file = "cx_Freeze-6.9-cp310-cp310-win_amd64.whl", hash = "sha256:243f36d35a034a409cd6247d8cb5d1fbfd7374e3e668e813d0811f64d6bd5ed3"}, - {file = "cx_Freeze-6.9-cp36-cp36m-win32.whl", hash = "sha256:ffc855eabc735b693e2d604d71dce6d52d78a6ba1070c55d51e786dd68ed232c"}, - {file = "cx_Freeze-6.9-cp36-cp36m-win_amd64.whl", hash = "sha256:fe4e32a0c75b2b54491882926bf3ba12f8a3d589822a68a8be7c09f1dcca5546"}, - {file = "cx_Freeze-6.9-cp37-cp37m-win32.whl", hash = "sha256:99c292e7a31cb343efc0cf47f82220a44a4a3b8776651624cd8ee03c23104940"}, - {file = "cx_Freeze-6.9-cp37-cp37m-win_amd64.whl", hash = "sha256:738ab22f3a3f6bc220b16dccf2aa0603c3cd271b2a7a9d9480dab82311308b23"}, - {file = "cx_Freeze-6.9-cp38-cp38-win32.whl", hash = "sha256:c1c75df572858e623d0aa39771cd984c0abd8aacb43b2aca2d12d0bc95f25566"}, - {file = "cx_Freeze-6.9-cp38-cp38-win_amd64.whl", hash = "sha256:0788c895c47fdcf375151ce78ff42336c01aca7bc43daecb8f8f8356cdc42b43"}, - {file = "cx_Freeze-6.9-cp39-cp39-win32.whl", hash = "sha256:a31f5ddbc80b29e297370d868791470b0e3e9062db45038c23293a76ed039018"}, - {file = "cx_Freeze-6.9-cp39-cp39-win_amd64.whl", hash = "sha256:30708f603076713c0a839cdfb34f4126d68e9d61afb3d9a59daa9cf252033872"}, - {file = "cx_Freeze-6.9.tar.gz", hash = "sha256:673aa3199af2ef87fc03a43a30e5d78b27ced2cedde925da89c55b5657da267b"}, +coverage = [ + {file = "coverage-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f50d3a822947572496ea922ee7825becd8e3ae6fbd2400cd8236b7d64b17f285"}, + {file = "coverage-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5191d53afbe5b6059895fa7f58223d3751c42b8101fb3ce767e1a0b1a1d8f87"}, + {file = "coverage-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04010af3c06ce2bfeb3b1e4e05d136f88d88c25f76cd4faff5d1fd84d11581ea"}, + {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6630d8d943644ea62132789940ca97d05fac83f73186eaf0930ffa715fbdab6b"}, + {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05de0762c1caed4a162b3e305f36cf20a548ff4da0be6766ad5c870704be3660"}, + {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e3a41aad5919613483aad9ebd53336905cab1bd6788afd3995c2a972d89d795"}, + {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2738ba1ee544d6f294278cfb6de2dc1f9a737a780469b5366e662a218f806c3"}, + {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a0d2df4227f645a879010461df2cea6b7e3fb5a97d7eafa210f7fb60345af9e8"}, + {file = "coverage-6.4.3-cp310-cp310-win32.whl", hash = "sha256:73a10939dc345460ca0655356a470dd3de9759919186a82383c87b6eb315faf2"}, + {file = "coverage-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:53c8edd3b83a4ddba3d8c506f1359401e7770b30f2188f15c17a338adf5a14db"}, + {file = "coverage-6.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1eda5cae434282712e40b42aaf590b773382afc3642786ac3ed39053973f61f"}, + {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59fc88bc13e30f25167e807b8cad3c41b7218ef4473a20c86fd98a7968733083"}, + {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75314b00825d70e1e34b07396e23f47ed1d4feedc0122748f9f6bd31a544840"}, + {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52f8b9fcf3c5e427d51bbab1fb92b575a9a9235d516f175b24712bcd4b5be917"}, + {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5a559aab40c716de80c7212295d0dc96bc1b6c719371c20dd18c5187c3155518"}, + {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:306788fd019bb90e9cbb83d3f3c6becad1c048dd432af24f8320cf38ac085684"}, + {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:920a734fe3d311ca01883b4a19aa386c97b82b69fbc023458899cff0a0d621b9"}, + {file = "coverage-6.4.3-cp37-cp37m-win32.whl", hash = "sha256:ab9ef0187d6c62b09dec83a84a3b94f71f9690784c84fd762fb3cf2d2b44c914"}, + {file = "coverage-6.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:39ebd8e120cb77a06ee3d5fc26f9732670d1c397d7cd3acf02f6f62693b89b80"}, + {file = "coverage-6.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc698580216050b5f4a34d2cdd2838b429c53314f1c4835fab7338200a8396f2"}, + {file = "coverage-6.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:877ee5478fd78e100362aed56db47ccc5f23f6e7bb035a8896855f4c3e49bc9b"}, + {file = "coverage-6.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:555a498999c44f5287cc95500486cd0d4f021af9162982cbe504d4cb388f73b5"}, + {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff095a5aac7011fdb51a2c82a8fae9ec5211577f4b764e1e59cfa27ceeb1b59"}, + {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5de1e9335e2569974e20df0ce31493d315a830d7987e71a24a2a335a8d8459d3"}, + {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7856ea39059d75f822ff0df3a51ea6d76307c897048bdec3aad1377e4e9dca20"}, + {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:411fdd9f4203afd93b056c0868c8f9e5e16813e765de962f27e4e5798356a052"}, + {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdf7b83f04a313a21afb1f8730fe4dd09577fefc53bbdfececf78b2006f4268e"}, + {file = "coverage-6.4.3-cp38-cp38-win32.whl", hash = "sha256:ab2b1a89d2bc7647622e9eaf06128a5b5451dccf7c242deaa31420b055716481"}, + {file = "coverage-6.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:0e34247274bde982bbc613894d33f9e36358179db2ed231dd101c48dd298e7b0"}, + {file = "coverage-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b104b6b1827d6a22483c469e3983a204bcf9c6bf7544bf90362c4654ebc2edf3"}, + {file = "coverage-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adf1a0d272633b21d645dd6e02e3293429c1141c7d65a58e4cbcd592d53b8e01"}, + {file = "coverage-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9832434a9193fbd716fbe05f9276484e18d26cc4cf850853594bb322807ac3"}, + {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:923f9084d7e1d31b5f74c92396b05b18921ed01ee5350402b561a79dce3ea48d"}, + {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d64304acf79766e650f7acb81d263a3ea6e2d0d04c5172b7189180ff2c023c"}, + {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fc294de50941d3da66a09dca06e206297709332050973eca17040278cb0918ff"}, + {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a42eaaae772f14a5194f181740a67bfd48e8806394b8c67aa4399e09d0d6b5db"}, + {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4822327b35cb032ff16af3bec27f73985448f08e874146b5b101e0e558b613dd"}, + {file = "coverage-6.4.3-cp39-cp39-win32.whl", hash = "sha256:f217850ac0e046ede611312703423767ca032a7b952b5257efac963942c055de"}, + {file = "coverage-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0a84376e4fd13cebce2c0ef8c2f037929c8307fb94af1e5dbe50272a1c651b5d"}, + {file = "coverage-6.4.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:068d6f2a893af838291b8809c876973d885543411ea460f3e6886ac0ee941732"}, + {file = "coverage-6.4.3.tar.gz", hash = "sha256:ec2ae1f398e5aca655b7084392d23e80efb31f7a660d2eecf569fb9f79b3fb94"}, +] +cryptography = [ + {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884"}, + {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8"}, + {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280"}, + {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3"}, + {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59"}, + {file = "cryptography-37.0.4-cp36-abi3-win32.whl", hash = "sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157"}, + {file = "cryptography-37.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327"}, + {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b"}, + {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282"}, + {file = "cryptography-37.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a"}, + {file = "cryptography-37.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab"}, + {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, +] +cx-Freeze = [ + {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e372b9e72ac0e2207ee65a9d404e2669da1134dc37f5ace9a2a779099d3aa868"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd293382e1ad270dddf5a2707db5dbb8600a1e0b0c9b0da7af9d61326eb1b325"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:feec2f36bce042da6a0d92690bc592b0dcec29218adc2278535cd13b28ec3485"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5aafcc6337856d5921b20f41acdcc8d0fe770388f3a072eb25163f8825f6c5d"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-win32.whl", hash = "sha256:b99cc0b6d6c1ba51bd9fe11dbfae9aabcf089ba779ea86d83d280e2e40f484e7"}, + {file = "cx_Freeze-6.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:f0dfe6acf25eb096faba7d4b4b001bcd0f818e372ea1f05d900665b0ad82b0b9"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a68e70dcb27b0720b131a35c5fdd096012fe00119a8e51d935f3fb3cd251c39"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f7bde925042d8843af9b6242a1bf3865dbbae088f3183a89a575124ec2e14a4"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-win32.whl", hash = "sha256:7698fb82b6f84b3426774b5f3bee770601f26b612306319664a02f1ec5160861"}, + {file = "cx_Freeze-6.11.1-cp36-cp36m-win_amd64.whl", hash = "sha256:9848c975401b21a98aa896baabfed067c3e981afd5b5b0a8a5eabe5c9f23d3c5"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87dcf5ceb78dc6af910c45238128fda2394b7c430d3fa469e87e1efdeeb5d4cc"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb74d8cc1f8c658986acc19ea6875b985a979421f9bb9c310b43cd2ff5d90c44"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-win32.whl", hash = "sha256:971c0a8356ef0ee09a3097f9c9d5b52cde6d08d1ef80e997eb4a6e22fe0eff2f"}, + {file = "cx_Freeze-6.11.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7c1cb44379b2093cbdde77e302a376f29aa61999c73be6e8a559463db84b85c4"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc90d6dbde66e8ddfe6b26f63fb2ea7d6d0e4568205f40660a63b8b200dcabcf"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f56f618a23d86bdcfff22b29ec993117effd32a401060013105517301c0bf32"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-win32.whl", hash = "sha256:4edfb5d65afb11eb9f0326d40d15445366481585705b3096f2cd090e30a36247"}, + {file = "cx_Freeze-6.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:cfb5a8032bf424c04814c9426425fa1db4cf8c280da948969eead9f616c0fd92"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0a3e32980269cfabc2e814978bfdf4382fe3cbc9ac64f9f1bdb1cd2ddf3a40d0"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:798bb7ca037c3c885efd3eda6756c84c7927c712b730b22a7f256440faa36d38"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5aa1759098ca4853200a79138b626a9caa2ccf829d662b28c82ec7e71ea97cde"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7962680ae04ee3afda1012026b5394a534e2526b68681d591158b7d8bc733bcf"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-win32.whl", hash = "sha256:da4f82fe27e71571c0ab9d700b5e6c6c631ae39133d9b6d7157939f1e9f37312"}, + {file = "cx_Freeze-6.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:aaf399b6ed5d54b7271980ae354605620bedcd52d722f57ad527bd989c56a875"}, + {file = "cx_Freeze-6.11.1.tar.gz", hash = "sha256:8f3a30c9e3394f290655e346d3b460910656b30ac6347a87499bb5ad365c6e7c"}, ] cx-logging = [ {file = "cx_Logging-3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9fcd297e5c51470521c47eff0f86ba844aeca6be97e13c3e2114ebdf03fa3c96"}, @@ -1910,8 +2142,14 @@ cx-logging = [ {file = "cx_Logging-3.0-cp39-cp39-win_amd64.whl", hash = "sha256:302e9c4f65a936c288a4fa59a90e7e142d9ef994aa29676731acafdcccdbb3f5"}, {file = "cx_Logging-3.0.tar.gz", hash = "sha256:ba8a7465facf7b98d8f494030fb481a2e8aeee29dc191e10383bb54ed42bdb34"}, ] -deprecated = [] -dill = [] +deprecated = [ + {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, + {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, +] +dill = [ + {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"}, + {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, +] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, @@ -1920,22 +2158,93 @@ docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] -dropbox = [] +dropbox = [ + {file = "dropbox-11.33.0-py2-none-any.whl", hash = "sha256:3ee9024631b80f18938556d5e27cbdede26d6dc0b73aeaa90fc075ce96c950b1"}, + {file = "dropbox-11.33.0-py3-none-any.whl", hash = "sha256:1a0cbc22b0d1dae96e18b37e3520e5c289de7eb1303935db40e4dbfc9bb9e59b"}, + {file = "dropbox-11.33.0.tar.gz", hash = "sha256:7c638b521169a460de38b9eaeb204fe918874f72d6c3eed005d064b6f37da9c1"}, +] enlighten = [ {file = "enlighten-1.10.2-py2.py3-none-any.whl", hash = "sha256:b237fe562b320bf9f1d4bb76d0c98e0daf914372a76ab87c35cd02f57aa9d8c1"}, {file = "enlighten-1.10.2.tar.gz", hash = "sha256:7a5b83cd0f4d095e59d80c648ebb5f7ffca0cd8bcf7ae6639828ee1ad000632a"}, ] -evdev = [] +evdev = [ + {file = "evdev-1.6.0.tar.gz", hash = "sha256:ecfa01b5c84f7e8c6ced3367ac95288f43cd84efbfd7dd7d0cdbfc0d18c87a6a"}, +] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] -frozenlist = [] -ftrack-python-api = [] +frozenlist = [ + {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5f271c93f001748fc26ddea409241312a75e13466b06c94798d1a341cf0e6989"}, + {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c6ef8014b842f01f5d2b55315f1af5cbfde284eb184075c189fd657c2fd8204"}, + {file = "frozenlist-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:219a9676e2eae91cb5cc695a78b4cb43d8123e4160441d2b6ce8d2c70c60e2f3"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b47d64cdd973aede3dd71a9364742c542587db214e63b7529fbb487ed67cddd9"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2af6f7a4e93f5d08ee3f9152bce41a6015b5cf87546cb63872cc19b45476e98a"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a718b427ff781c4f4e975525edb092ee2cdef6a9e7bc49e15063b088961806f8"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c56c299602c70bc1bb5d1e75f7d8c007ca40c9d7aebaf6e4ba52925d88ef826d"}, + {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:717470bfafbb9d9be624da7780c4296aa7935294bd43a075139c3d55659038ca"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:31b44f1feb3630146cffe56344704b730c33e042ffc78d21f2125a6a91168131"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c3b31180b82c519b8926e629bf9f19952c743e089c41380ddca5db556817b221"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d82bed73544e91fb081ab93e3725e45dd8515c675c0e9926b4e1f420a93a6ab9"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49459f193324fbd6413e8e03bd65789e5198a9fa3095e03f3620dee2f2dabff2"}, + {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:94e680aeedc7fd3b892b6fa8395b7b7cc4b344046c065ed4e7a1e390084e8cb5"}, + {file = "frozenlist-1.3.1-cp310-cp310-win32.whl", hash = "sha256:fabb953ab913dadc1ff9dcc3a7a7d3dc6a92efab3a0373989b8063347f8705be"}, + {file = "frozenlist-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:eee0c5ecb58296580fc495ac99b003f64f82a74f9576a244d04978a7e97166db"}, + {file = "frozenlist-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0bc75692fb3770cf2b5856a6c2c9de967ca744863c5e89595df64e252e4b3944"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086ca1ac0a40e722d6833d4ce74f5bf1aba2c77cbfdc0cd83722ffea6da52a04"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b51eb355e7f813bcda00276b0114c4172872dc5fb30e3fea059b9367c18fbcb"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74140933d45271c1a1283f708c35187f94e1256079b3c43f0c2267f9db5845ff"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee4c5120ddf7d4dd1eaf079af3af7102b56d919fa13ad55600a4e0ebe532779b"}, + {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97d9e00f3ac7c18e685320601f91468ec06c58acc185d18bb8e511f196c8d4b2"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e19add867cebfb249b4e7beac382d33215d6d54476bb6be46b01f8cafb4878b"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a027f8f723d07c3f21963caa7d585dcc9b089335565dabe9c814b5f70c52705a"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:61d7857950a3139bce035ad0b0945f839532987dfb4c06cfe160254f4d19df03"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:53b2b45052e7149ee8b96067793db8ecc1ae1111f2f96fe1f88ea5ad5fd92d10"}, + {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bbb1a71b1784e68870800b1bc9f3313918edc63dbb8f29fbd2e767ce5821696c"}, + {file = "frozenlist-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:ab6fa8c7871877810e1b4e9392c187a60611fbf0226a9e0b11b7b92f5ac72792"}, + {file = "frozenlist-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89139662cc4e65a4813f4babb9ca9544e42bddb823d2ec434e18dad582543bc"}, + {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4c0c99e31491a1d92cde8648f2e7ccad0e9abb181f6ac3ddb9fc48b63301808e"}, + {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:61e8cb51fba9f1f33887e22488bad1e28dd8325b72425f04517a4d285a04c519"}, + {file = "frozenlist-1.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc2f3e368ee5242a2cbe28323a866656006382872c40869b49b265add546703f"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58fb94a01414cddcdc6839807db77ae8057d02ddafc94a42faee6004e46c9ba8"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:022178b277cb9277d7d3b3f2762d294f15e85cd2534047e68a118c2bb0058f3e"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:572ce381e9fe027ad5e055f143763637dcbac2542cfe27f1d688846baeef5170"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19127f8dcbc157ccb14c30e6f00392f372ddb64a6ffa7106b26ff2196477ee9f"}, + {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42719a8bd3792744c9b523674b752091a7962d0d2d117f0b417a3eba97d1164b"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2743bb63095ef306041c8f8ea22bd6e4d91adabf41887b1ad7886c4c1eb43d5f"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fa47319a10e0a076709644a0efbcaab9e91902c8bd8ef74c6adb19d320f69b83"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52137f0aea43e1993264a5180c467a08a3e372ca9d378244c2d86133f948b26b"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:f5abc8b4d0c5b556ed8cd41490b606fe99293175a82b98e652c3f2711b452988"}, + {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1e1cf7bc8cbbe6ce3881863671bac258b7d6bfc3706c600008925fb799a256e2"}, + {file = "frozenlist-1.3.1-cp38-cp38-win32.whl", hash = "sha256:0dde791b9b97f189874d654c55c24bf7b6782343e14909c84beebd28b7217845"}, + {file = "frozenlist-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:9494122bf39da6422b0972c4579e248867b6b1b50c9b05df7e04a3f30b9a413d"}, + {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31bf9539284f39ff9398deabf5561c2b0da5bb475590b4e13dd8b268d7a3c5c1"}, + {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e0c8c803f2f8db7217898d11657cb6042b9b0553a997c4a0601f48a691480fab"}, + {file = "frozenlist-1.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da5ba7b59d954f1f214d352308d1d86994d713b13edd4b24a556bcc43d2ddbc3"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e6b2b456f21fc93ce1aff2b9728049f1464428ee2c9752a4b4f61e98c4db96"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526d5f20e954d103b1d47232e3839f3453c02077b74203e43407b962ab131e7b"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b499c6abe62a7a8d023e2c4b2834fce78a6115856ae95522f2f974139814538c"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab386503f53bbbc64d1ad4b6865bf001414930841a870fc97f1546d4d133f141"}, + {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f63c308f82a7954bf8263a6e6de0adc67c48a8b484fab18ff87f349af356efd"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:12607804084d2244a7bd4685c9d0dca5df17a6a926d4f1967aa7978b1028f89f"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:da1cdfa96425cbe51f8afa43e392366ed0b36ce398f08b60de6b97e3ed4affef"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f810e764617b0748b49a731ffaa525d9bb36ff38332411704c2400125af859a6"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:35c3d79b81908579beb1fb4e7fcd802b7b4921f1b66055af2578ff7734711cfa"}, + {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c92deb5d9acce226a501b77307b3b60b264ca21862bd7d3e0c1f3594022f01bc"}, + {file = "frozenlist-1.3.1-cp39-cp39-win32.whl", hash = "sha256:5e77a8bd41e54b05e4fb2708dc6ce28ee70325f8c6f50f3df86a44ecb1d7a19b"}, + {file = "frozenlist-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:625d8472c67f2d96f9a4302a947f92a7adbc1e20bedb6aff8dbc8ff039ca6189"}, + {file = "frozenlist-1.3.1.tar.gz", hash = "sha256:3a735e4211a04ccfa3f4833547acdf5d2f863bfeb01cfd3edaffbc251f15cec8"}, +] +ftrack-python-api = [ + {file = "ftrack-python-api-2.3.3.tar.gz", hash = "sha256:358f37e5b1c5635eab107c19e27a0c890d512877f78af35b1ac416e90c037295"}, + {file = "ftrack_python_api-2.3.3-py2.py3-none-any.whl", hash = "sha256:82834c4d5def5557a2ea547a7e6f6ba84d3129e8f90457d8bbd85b287a2c39f6"}, +] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] -gazu = [] +gazu = [ + {file = "gazu-0.8.30-py2.py3-none-any.whl", hash = "sha256:d692927a11314151bc33e7d67edee634053f70a3b09e4500dfc6626bfea18753"}, +] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, @@ -1944,24 +2253,42 @@ gitpython = [ {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] -google-api-core = [] +google-api-core = [ + {file = "google-api-core-2.8.2.tar.gz", hash = "sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc"}, + {file = "google_api_core-2.8.2-py3-none-any.whl", hash = "sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50"}, +] google-api-python-client = [ {file = "google-api-python-client-1.12.11.tar.gz", hash = "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9"}, {file = "google_api_python_client-1.12.11-py2.py3-none-any.whl", hash = "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b"}, ] -google-auth = [] +google-auth = [ + {file = "google-auth-2.10.0.tar.gz", hash = "sha256:7904dbd44b745c7323fef29565adee2fe7ff48473e2d94443aced40b0404a395"}, + {file = "google_auth-2.10.0-py2.py3-none-any.whl", hash = "sha256:1deba4a54f95ef67b4139eaf5c20eaa7047215eec9f6a2344599b8596db8863b"}, +] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] -googleapis-common-protos = [] +googleapis-common-protos = [ + {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, + {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, +] httplib2 = [ {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, ] -idna = [] -imagesize = [] -importlib-metadata = [] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +imagesize = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, + {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1974,12 +2301,18 @@ jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] -jeepney = [] +jeepney = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] -jinxed = [] +jinxed = [ + {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, + {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, +] jsonschema = [ {file = "jsonschema-2.6.0-py2.py3-none-any.whl", hash = "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08"}, {file = "jsonschema-2.6.0.tar.gz", hash = "sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02"}, @@ -2027,16 +2360,58 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] +lief = [ + {file = "lief-0.12.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:4fbbc9d520de87ac22210c62d22a9b088e5460f9a028741311e6f68ef8877ddd"}, + {file = "lief-0.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443e4494df448ea1a021976258c7a6aca27d81b0612783fa3a84fab196fb9fcb"}, + {file = "lief-0.12.1-cp310-cp310-win32.whl", hash = "sha256:1c4019dddf03a5185462fb5ea04327cee08d40f46777b02f0773c7dc294552ea"}, + {file = "lief-0.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:d7e09968f99ddf1e3983d3bcc16c62d1b6635a345fee8d8139f82b31bad457d6"}, + {file = "lief-0.12.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:9fa6269ec4fa3f874b807fbba3c48a46af30df2497723f6966080e3eb630cb26"}, + {file = "lief-0.12.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b05cac5fa491e01e1819573bbbbcaea0a4229f4aa3a2edb231b5695ddaf2d"}, + {file = "lief-0.12.1-cp36-cp36m-win32.whl", hash = "sha256:f1292bff96579c18e01e20b7a14043052379fe6e9a476c1d6d88aca43e5f9ac7"}, + {file = "lief-0.12.1-cp36-cp36m-win_amd64.whl", hash = "sha256:dab63876113bd573d64ce043f50153f6e2810e5e78256397aa0fe1fedf82ab84"}, + {file = "lief-0.12.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:5771f5226b62c885a7aa30c1b98040d39229a1dab889d03155e5538e57d0054b"}, + {file = "lief-0.12.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:8ec307a762505076a6d31566225a231c44ec7063c0e7d751ac4654c674454c47"}, + {file = "lief-0.12.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a755f6088d3b2041e4402adf917ac87e5ad9d1c5278973f48a29a5631fe393eb"}, + {file = "lief-0.12.1-cp37-cp37m-win32.whl", hash = "sha256:5d746f7eb6d3bf35a0230c7184aaaf434cb1ea89d7e7c8e8fe14a49cf2bb17a0"}, + {file = "lief-0.12.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2d3ab7212da696bcbe5ca9dd78ceaa32dfb8a0e85e18001793b4441ef4624561"}, + {file = "lief-0.12.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:4360b0acd525ba77777cc38f0e5128c90c93cc4e91ab566ef3aa45b7f8a8c57e"}, + {file = "lief-0.12.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5e82e466d36cbabb28cc1a787b554d2feae5ab55c39cab58ef64fb6513bad92a"}, + {file = "lief-0.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa0022a3bf70ef46335639e61b946cc2d9cf012d60e263c215e3e64b1ce38b4"}, + {file = "lief-0.12.1-cp38-cp38-win32.whl", hash = "sha256:d29f91d9f64f67d3ada5b7e0e48ab084d825fb4601d32d9fecdd2bdf23cdad23"}, + {file = "lief-0.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:7dea6b3f17d362f93165379c46dadb012c73b1f751c8ceac256e5f43842cd86d"}, + {file = "lief-0.12.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:44012da4c32c670a97bb8a055a4ff16168cfaa757d03986f319aa3329a43e343"}, + {file = "lief-0.12.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e1d23997b0a71d34e766ff183be07854c6f698fd3d6aa44bf30b6b7f4f77ef55"}, + {file = "lief-0.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b845eca79c772041efb38b50cfaf951e24bc047ec462450b7e54e75b7e2bee0d"}, + {file = "lief-0.12.1-cp39-cp39-win32.whl", hash = "sha256:0df84ac2df20b14db12e69442d39b0e8cd89428ba3b131995e0570bcd3725460"}, + {file = "lief-0.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:960a2da9f28c8d5dba753bb9ab77e26b3c6ff9b9658918be95650ceb8ee91e68"}, + {file = "lief-0.12.1.zip", hash = "sha256:4ff4ccfae2e1ee4ccba2b5556027dbb56282b8a973c5835c5b597e8b7b664416"}, +] log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2045,14 +2420,27 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2062,6 +2450,12 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2131,22 +2525,109 @@ multidict = [ {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, ] -opentimelineio = [] +opentimelineio = [ + {file = "OpenTimelineIO-0.14.0.dev1-cp310-cp310-linux_x86_64.whl", hash = "sha256:112c27ad419a79c88cd4ebac96278ab8f446fda4c6e1a70c871f2b24b3d003ef"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e2416ca76805a07ecbcdda65e5a31ce447e04e2db7082d72582740cbd8a16d7"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8806dc240496b72e16a0fddacb0b2e825d19656d80689098e6c5bd6a805bc84"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp36-cp36m-win_amd64.whl", hash = "sha256:0aa54488ca50b53ac247610cef23fb63619dd1993016c0cd4069e54526d5905c"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-linux_x86_64.whl", hash = "sha256:8b11287eb733ad1c7fc53d4af3e3f926c396add6c3a3c1417b9c2b001f7ef4ba"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:2cba2ce567fc06f042365393dbe8e99dc4c1361999fb7ddc03d4b8b0d5ddb894"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:3d96da307c1969c309974a6734c7f3e39925236c845c8289f25d4d7d00be3f0c"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:968cb0c5e6e7d697037b1cd4f7707521995a32fc51664139ed15004b93ab8106"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dae64aa418193ca094854a55efa8bcc5a5c2855b3679509987e8b48610d31"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-win_amd64.whl", hash = "sha256:85dfae42f5a992ef85d0015f33f999751d846a484ef907e8834407d545a7ee6a"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aaac346cb758de719f88ac175d3948409ded39c5eed0844068402f70f3e90b6"}, + {file = "OpenTimelineIO-0.14.0.dev1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a09f6dd199218cf69faf42a8b21f5be2cd01764e57dbcc8456b3ced564eb110f"}, + {file = "OpenTimelineIO-0.14.0.dev1.tar.gz", hash = "sha256:d29eafd5188c3ad6c7b6d2095bf69984d590c331b701d3bbc644d7abd5f08606"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -paramiko = [] +paramiko = [ + {file = "paramiko-2.11.0-py2.py3-none-any.whl", hash = "sha256:655f25dc8baf763277b933dfcea101d636581df8d6b9774d1fb653426b72c270"}, + {file = "paramiko-2.11.0.tar.gz", hash = "sha256:003e6bee7c034c21fbb051bf83dc0a9ee4106204dd3c53054c71452cc4ec3938"}, +] parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, ] +patchelf = [ + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:08e5e30a9415a8628de47726fbf15bfcd89be35df51c8a0a12372aebd0c5b4f6"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:4ce9d08119816bc4316c8ecc5f33da42384934fc0fc9cfbdded53a4930705466"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_s390x.manylinux2014_s390x.musllinux_1_1_s390x.whl", hash = "sha256:ae19b0f91aabc9af2608a4ca0395533f1df9122e6abc11ef2c8db6e4db0f98c2"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.musllinux_1_1_i686.whl", hash = "sha256:f3f87aee44d1d1b2209e38c4227b0316bb03538df68d20b3d96205aa87868d95"}, + {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:52e48c08110f2988a9761a5a383f7ae35b1e8e06a140e320d18386d3510697ed"}, + {file = "patchelf-0.15.0.0.tar.gz", hash = "sha256:0f8dcf0df0ba919ce37e8aef67a08bde5326897098451df94ab3a5eedc9e08d9"}, +] pathlib2 = [ {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] -pillow = [] -platformdirs = [] +pillow = [ + {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, + {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"}, + {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"}, + {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, + {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, + {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8"}, + {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"}, + {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"}, + {file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"}, + {file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"}, + {file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"}, + {file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"}, + {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"}, + {file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"}, + {file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"}, + {file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"}, + {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"}, + {file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"}, + {file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"}, + {file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"}, + {file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"}, + {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"}, + {file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"}, + {file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"}, + {file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"}, + {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"}, + {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, + {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, @@ -2159,7 +2640,22 @@ prefixed = [ {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] -protobuf = [] +protobuf = [ + {file = "protobuf-4.21.5-cp310-abi3-win32.whl", hash = "sha256:5310cbe761e87f0c1decce019d23f2101521d4dfff46034f8a12a53546036ec7"}, + {file = "protobuf-4.21.5-cp310-abi3-win_amd64.whl", hash = "sha256:e5c5a2886ae48d22a9d32fbb9b6636a089af3cd26b706750258ce1ca96cc0116"}, + {file = "protobuf-4.21.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ee04f5823ed98bb9a8c3b1dc503c49515e0172650875c3f76e225b223793a1f2"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b04484d6f42f48c57dd2737a72692f4c6987529cdd148fb5b8e5f616862a2e37"}, + {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e0b272217aad8971763960238c1a1e6a65d50ef7824e23300da97569a251c55"}, + {file = "protobuf-4.21.5-cp37-cp37m-win32.whl", hash = "sha256:5eb0724615e90075f1d763983e708e1cef08e66b1891d8b8b6c33bc3b2f1a02b"}, + {file = "protobuf-4.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:011c0f267e85f5d73750b6c25f0155d5db1e9443cd3590ab669a6221dd8fcdb0"}, + {file = "protobuf-4.21.5-cp38-cp38-win32.whl", hash = "sha256:7b6f22463e2d1053d03058b7b4ceca6e4ed4c14f8c286c32824df751137bf8e7"}, + {file = "protobuf-4.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:b52e7a522911a40445a5f588bd5b5e584291bfc5545e09b7060685e4b2ff814f"}, + {file = "protobuf-4.21.5-cp39-cp39-win32.whl", hash = "sha256:a7faa62b183d6a928e3daffd06af843b4287d16ef6e40f331575ecd236a7974d"}, + {file = "protobuf-4.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:5e0ce02418ef03d7657a420ae8fd6fec4995ac713a3cb09164e95f694dbcf085"}, + {file = "protobuf-4.21.5-py2.py3-none-any.whl", hash = "sha256:bf711b451212dc5b0fa45ae7dada07d8e71a4b0ff0bc8e4783ee145f47ac4f82"}, + {file = "protobuf-4.21.5-py3-none-any.whl", hash = "sha256:3ec6f5b37935406bb9df9b277e79f8ed81d697146e07ef2ba8a5a272fb24b2c9"}, + {file = "protobuf-4.21.5.tar.gz", hash = "sha256:eb1106e87e095628e96884a877a51cdb90087106ee693925ec0a300468a9be3a"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -2218,8 +2714,14 @@ pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] -pygments = [] -pylint = [] +pygments = [ + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, +] +pylint = [ + {file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"}, + {file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"}, +] pymongo = [ {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, @@ -2346,10 +2848,42 @@ pynput = [ {file = "pynput-1.7.6-py3.9.egg", hash = "sha256:264429fbe676e98e9050ad26a7017453bdd08768adb25cafb918347cf9f1eb4a"}, {file = "pynput-1.7.6.tar.gz", hash = "sha256:3a5726546da54116b687785d38b1db56997ce1d28e53e8d22fc656d8b92e533c"}, ] -pyobjc-core = [] -pyobjc-framework-applicationservices = [] -pyobjc-framework-cocoa = [] -pyobjc-framework-quartz = [] +pyobjc-core = [ + {file = "pyobjc-core-8.5.tar.gz", hash = "sha256:704c275439856c0d1287469f0d589a7d808d48b754a93d9ce5415d4eaf06d576"}, + {file = "pyobjc_core-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0c234143b48334443f5adcf26e668945a6d47bc1fa6223e80918c6c735a029d9"}, + {file = "pyobjc_core-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1486ee533f0d76f666804ce89723ada4db56bfde55e56151ba512d3f849857f8"}, + {file = "pyobjc_core-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:412de06dfa728301c04b3e46fd7453320a8ae8b862e85236e547cd797a73b490"}, + {file = "pyobjc_core-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b3e09cccb1be574a82cc9f929ae27fc4283eccc75496cb5d51534caa6bb83a3"}, + {file = "pyobjc_core-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:eeafe21f879666ab7f57efcc6b007c9f5f8733d367b7e380c925203ed83f000d"}, + {file = "pyobjc_core-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c0071686976d7ea8c14690950e504a13cb22b4ebb2bc7b5ec47c1c1c0f6eff41"}, +] +pyobjc-framework-applicationservices = [ + {file = "pyobjc-framework-ApplicationServices-8.5.tar.gz", hash = "sha256:fa3015ef8e3add90af3447d7fdcc7f8dd083cc2a1d58f99a569480a2df10d2b1"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:436b16ebe448a829a8312e10208eec81a2adcae1fff674dbcc3262e1bd76e0ca"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:408958d14aa7fcf46f2163754c211078bc63be1368934d86188202914dce077d"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1d6cd4ce192859a22e208da4d7177a1c3ceb1ef2f64c339fd881102b1210cadd"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0251d092adb1d2d116fd9f147ceef0e53b158a46c21245131c40b9d7b786d0db"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:9742e69fe6d4545d0e02b0ad0a7a2432bc9944569ee07d6e90ffa5ef614df9f7"}, + {file = "pyobjc_framework_ApplicationServices-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16f5677c14ea903c6aaca1dd121521825c39e816cae696d6ae32c0b287252ab2"}, +] +pyobjc-framework-cocoa = [ + {file = "pyobjc-framework-Cocoa-8.5.tar.gz", hash = "sha256:569bd3a020f64b536fb2d1c085b37553e50558c9f907e08b73ffc16ae68e1861"}, + {file = "pyobjc_framework_Cocoa-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7a7c160416696bf6035dfcdf0e603aaa52858d6afcddfcc5ab41733619ac2529"}, + {file = "pyobjc_framework_Cocoa-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6ceba444282030be8596b812260e8d28b671254a51052ad778d32da6e17db847"}, + {file = "pyobjc_framework_Cocoa-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f46b2b161b8dd40c7b9e00bc69636c3e6480b2704a69aee22ee0154befbe163a"}, + {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b31d425aee8698cbf62b187338f5ca59427fa4dca2153a73866f7cb410713119"}, + {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:898359ac1f76eedec8aa156847682378a8950824421c40edb89391286e607dc4"}, + {file = "pyobjc_framework_Cocoa-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:baa2947f76b119a3360973d74d57d6dada87ac527bab9a88f31596af392f123c"}, +] +pyobjc-framework-quartz = [ + {file = "pyobjc-framework-Quartz-8.5.tar.gz", hash = "sha256:d2bc5467a792ddc04814f12a1e9c2fcaf699a1c3ad3d4264cfdce6b9c7b10624"}, + {file = "pyobjc_framework_Quartz-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9f0fb663f7872c9de94169031ac42b91ad01bd4cad49a9f1a0164be8f028426"}, + {file = "pyobjc_framework_Quartz-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:567eec91287cfe9a1b6433717192c585935de8f3daa28d82ce72fdd6c7ac00f6"}, + {file = "pyobjc_framework_Quartz-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f910ab41a712ffc7a8c3e3716a2d6f39ea4419004b26a2fd2d2f740ff5c262c"}, + {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29d07066781628278bf0e5278abcfc96ef6724c66c5629a0b4c214d319a82e55"}, + {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:72abcde1a3d72be11f2c881c9b9872044c8f2de86d2047b67fe771713638b107"}, + {file = "pyobjc_framework_Quartz-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8809b9a2df2f461697bdb45b6d1b5a4f881f88f09450e3990858e64e3e26c530"}, +] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, @@ -2373,8 +2907,14 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-engineio = [] -python-socketio = [] +python-engineio = [ + {file = "python-engineio-3.14.2.tar.gz", hash = "sha256:eab4553f2804c1ce97054c8b22cf0d5a9ab23128075248b97e1a5b2f29553085"}, + {file = "python_engineio-3.14.2-py2.py3-none-any.whl", hash = "sha256:5a9e6086d192463b04a1428ff1f85b6ba631bbb19d453b144ffc04f530542b84"}, +] +python-socketio = [ + {file = "python-socketio-4.6.1.tar.gz", hash = "sha256:cd1f5aa492c1eb2be77838e837a495f117e17f686029ebc03d62c09e33f4fa10"}, + {file = "python_socketio-4.6.1-py2.py3-none-any.whl", hash = "sha256:5a21da53fdbdc6bb6c8071f40e13d100e0b279ad997681c2492478e06f370523"}, +] python-xlib = [ {file = "python-xlib-0.31.tar.gz", hash = "sha256:74d83a081f532bc07f6d7afcd6416ec38403d68f68b9b9dc9e1f28fbf2d799e9"}, {file = "python_xlib-0.31-py2.py3-none-any.whl", hash = "sha256:1ec6ce0de73d9e6592ead666779a5732b384e5b8fb1f1886bd0a81cafa477759"}, @@ -2382,7 +2922,10 @@ python-xlib = [ python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, ] -pytz = [] +pytz = [ + {file = "pytz-2022.2-py2.py3-none-any.whl", hash = "sha256:d9b245e63af49c4e51afdec5402f56b99c0cb483a84a12bb8b7db980386baade"}, + {file = "pytz-2022.2.tar.gz", hash = "sha256:bc824559e43e8ab983426a49525079d186b25372ff63aa3430ccd527d95edc3a"}, +] pywin32 = [ {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"}, {file = "pywin32-301-cp35-cp35m-win_amd64.whl", hash = "sha256:9635df6998a70282bd36e7ac2a5cef9ead1627b0a63b17c731312c7a0daebb72"}, @@ -2399,7 +2942,10 @@ pywin32-ctypes = [ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, ] -"qt.py" = [] +"qt.py" = [ + {file = "Qt.py-1.3.7-py2.py3-none-any.whl", hash = "sha256:150099d1c6f64c9621a2c9d79d45102ec781c30ee30ee69fc082c6e9be7324fe"}, + {file = "Qt.py-1.3.7.tar.gz", hash = "sha256:803c7bdf4d6230f9a466be19d55934a173eabb61406d21cb91e80c2a3f773b1f"}, +] qtawesome = [ {file = "QtAwesome-0.7.3-py2.py3-none-any.whl", hash = "sha256:ddf4530b4af71cec13b24b88a4cdb56ec85b1e44c43c42d0698804c7137b09b0"}, {file = "QtAwesome-0.7.3.tar.gz", hash = "sha256:b98b9038d19190e83ab26d91c4d8fc3a36591ee2bc7f5016d4438b8240d097bd"}, @@ -2412,19 +2958,35 @@ recommonmark = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, ] -requests = [] -rsa = [] -secretstorage = [] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +rsa = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] +secretstorage = [ + {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"}, + {file = "SecretStorage-3.3.2.tar.gz", hash = "sha256:0a8eb9645b320881c222e827c26f4cfcf55363e8b374a021981ef886657a912f"}, +] semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, ] +setuptools = [ + {file = "setuptools-60.10.0-py3-none-any.whl", hash = "sha256:782ef48d58982ddb49920c11a0c5c9c0b02e7d7d1c2ad0aa44e1a1e133051c96"}, + {file = "setuptools-60.10.0.tar.gz", hash = "sha256:6599055eeb23bfef457d5605d33a4d68804266e6cb430b0fb12417c5efeae36c"}, +] shotgun-api3 = [] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -slack-sdk = [] +slack-sdk = [ + {file = "slack_sdk-3.18.1-py2.py3-none-any.whl", hash = "sha256:63ce5e6253a31873d6c921c9feaa842a93a2f56e6e009cb7daf406f4bc4df798"}, + {file = "slack_sdk-3.18.1.tar.gz", hash = "sha256:a25d3d2bf0bf605d54d764d4a463fe7c0659ee24c13d75653e2bec247bd5998b"}, +] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, @@ -2433,9 +2995,18 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -speedcopy = [] -sphinx = [] -sphinx-qt-documentation = [] +speedcopy = [ + {file = "speedcopy-2.1.4-py3-none-any.whl", hash = "sha256:e09eb1de67ae0e0b51d5b99a28882009d565a37a3cb3c6bae121e3a5d3cccb17"}, + {file = "speedcopy-2.1.4.tar.gz", hash = "sha256:eff007a97e49ec1934df4fa8074f4bd1cf4a3b14c5499d914988785cff0c199a"}, +] +sphinx = [ + {file = "Sphinx-5.0.1-py3-none-any.whl", hash = "sha256:36aa2a3c2f6d5230be94585bc5d74badd5f9ed8f3388b8eedc1726fe45b1ad30"}, + {file = "Sphinx-5.0.1.tar.gz", hash = "sha256:f4da1187785a5bc7312cc271b0e867a93946c319d106363e102936a3d9857306"}, +] +sphinx-qt-documentation = [ + {file = "sphinx_qt_documentation-0.4-py3-none-any.whl", hash = "sha256:fa131093f75cd1bd48699cd132e18e4d46ba9eaadc070e6026867cea75ecdb7b"}, + {file = "sphinx_qt_documentation-0.4.tar.gz", hash = "sha256:f43ba17baa93e353fb94045027fb67f9d935ed158ce8662de93f08b88eec6774"}, +] sphinx-rtd-theme = [ {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, @@ -2484,13 +3055,44 @@ tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -typed-ast = [] -typing-extensions = [] +typed-ast = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] +typing-extensions = [ + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, +] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] -urllib3 = [] +urllib3 = [ + {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"}, + {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, +] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, @@ -2499,10 +3101,142 @@ websocket-client = [ {file = "websocket-client-0.59.0.tar.gz", hash = "sha256:d376bd60eace9d437ab6d7ee16f4ab4e821c9dae591e1b783c58ebd8aaf80c5c"}, {file = "websocket_client-0.59.0-py2.py3-none-any.whl", hash = "sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32"}, ] -wrapt = [] +wheel = [ + {file = "wheel-0.37.1-py2.py3-none-any.whl", hash = "sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a"}, + {file = "wheel-0.37.1.tar.gz", hash = "sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"}, +] +wrapt = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] wsrpc-aiohttp = [ {file = "wsrpc-aiohttp-3.2.0.tar.gz", hash = "sha256:f467abc51bcdc760fc5aeb7041abdeef46eeca3928dc43dd6e7fa7a533563818"}, {file = "wsrpc_aiohttp-3.2.0-py3-none-any.whl", hash = "sha256:fa9b0bf5cb056898cb5c9f64cbc5eacb8a5dd18ab1b7f0cd4a2208b4a7fde282"}, ] -yarl = [] -zipp = [] +yarl = [ + {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"}, + {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"}, + {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"}, + {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"}, + {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"}, + {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"}, + {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"}, + {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"}, + {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"}, + {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"}, + {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"}, + {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"}, + {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, + {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"}, + {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"}, + {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"}, + {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"}, + {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"}, + {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"}, + {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"}, + {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"}, + {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"}, + {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"}, +] +zipp = [ + {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, + {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, +] diff --git a/pyproject.toml b/pyproject.toml index 0deb4f465b..2a0606a10c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,12 +70,13 @@ requests = "^2.25.1" pysftp = "^0.2.9" dropbox = "^11.20.0" aiohttp-middlewares = "^2.0.0" +cx-Freeze = "6.11.1" [tool.poetry.dev-dependencies] flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "~6.9" +cx_freeze = "^6.11.1" GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" From 4bdd18cb817bbc58c1143e0e02442a9346ce9a1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 11:50:14 +0200 Subject: [PATCH 0905/2550] Use DeadlineKeyValueVar for EnvironmentKeyValue on Job Info - To improve readability of code that sets the values --- .../deadline/abstract_submit_deadline.py | 58 +++++++++++++++---- .../publish/submit_aftereffects_deadline.py | 11 ++-- .../publish/submit_harmony_deadline.py | 10 ++-- .../plugins/publish/submit_maya_deadline.py | 7 +-- 4 files changed, 59 insertions(+), 27 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 35b114da95..beb1cd0fae 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -67,6 +67,43 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) +class DeadlineKeyValueVar(dict): + """ + + Serializes dictionary key values as "{key}={value}" like Deadline uses + for EnvironmentKeyValue. + + As an example: + EnvironmentKeyValue0="A_KEY=VALUE_A" + EnvironmentKeyValue1="OTHER_KEY=VALUE_B" + + The keys are serialized in alphabetical order (sorted). + + Example: + >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") + >>> var["my_var"] = "hello" + >>> var["my_other_var"] = "hello2" + >>> var.serialize() + + + """ + def __init__(self, key): + super(DeadlineKeyValueVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): "{}={}".format(var_key, var_value) + for index, (var_key, var_value) in enumerate(sorted(self.items())) + } + + class DeadlineIndexedVar(dict): """ @@ -80,15 +117,9 @@ class DeadlineIndexedVar(dict): """ def __init__(self, key): + super(DeadlineIndexedVar, self).__init__() self.__key = key - def next_available_index(self): - # Add as first unused entry - i = 0 - while i in self.keys(): - i += 1 - return i - def serialize(self): key = self.__key @@ -100,6 +131,13 @@ class DeadlineIndexedVar(dict): key.format(index): value for index, value in sorted(self.items()) } + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + def update(self, data): # Force the integer key check for key, value in data.items(): @@ -271,7 +309,7 @@ class DeadlineJobInfo(object): # Environment # ---------------------------------------------- - EnvironmentKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, "EnvironmentKeyValue")) IncludeEnvironment = attr.ib(default=None) # Default: false @@ -281,7 +319,7 @@ class DeadlineJobInfo(object): # Job Extra Info # ---------------------------------------------- ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) - ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, "ExtraInfoKeyValue")) # Task Extra Info Names @@ -326,7 +364,7 @@ class DeadlineJobInfo(object): """ def filter_data(a, v): - if isinstance(v, DeadlineIndexedVar): + if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): return False if v is None: return False diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 1d68793d53..55acd92043 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -92,13 +92,12 @@ class AfterEffectsSubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - dln_job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + dln_job_info.EnvironmentKeyValue[key] = value + # to recognize job from PYPE for turning Event On/Off - dln_job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + dln_job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return dln_job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 3f9c09b592..6327143623 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -284,14 +284,12 @@ class HarmonySubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + job_info.EnvironmentKeyValue[key] = value # to recognize job from PYPE for turning Event On/Off - job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 9692b136e9..ad46feea03 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -137,8 +137,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for key, value in environment.items(): if not value: continue - job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, - value=value) + job_info.EnvironmentKeyValue[key] = value # Adding file dependencies. if self.asset_dependencies: @@ -538,9 +537,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for key, value in envs.items(): if not value: continue - - job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, - value=value) + job_info.EnvironmentKeyValue[key] = value plugin_info.update({ "Version": "3.6", From 1e87c9d6d2c7338f8e53e8a06d9f1983056797b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:02:39 +0200 Subject: [PATCH 0906/2550] Use DeadlineIndexedVar `__iadd__` functionality --- .../plugins/publish/submit_aftereffects_deadline.py | 4 ++-- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 55acd92043..0c1ffa6bd7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -67,9 +67,9 @@ class AfterEffectsSubmitDeadline( dln_job_info.Group = self.group dln_job_info.Department = self.department dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename = \ + dln_job_info.OutputFilename += \ os.path.basename(self._instance.data["expectedFiles"][0]) - dln_job_info.OutputDirectory = \ + dln_job_info.OutputDirectory += \ os.path.dirname(self._instance.data["expectedFiles"][0]) dln_job_info.JobDelay = "00:00:00" diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index ad46feea03..6b08f9894d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -144,14 +144,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): dependencies = instance.context.data["fileDependencies"] dependencies.append(context.data["currentFile"]) for dependency in dependencies: - job_info.AssetDependency = dependency + job_info.AssetDependency += dependency # Add list of expected files to job # --------------------------------- exp = instance.data.get("expectedFiles") for filepath in self._iter_expected_files(exp): - job_info.OutputDirectory = os.path.dirname(filepath) - job_info.OutputFilename = os.path.basename(filepath) + job_info.OutputDirectory += os.path.dirname(filepath) + job_info.OutputFilename += os.path.basename(filepath) return job_info @@ -443,7 +443,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if self.asset_dependencies: # Asset dependency to wait for at least the scene file to sync. - job_info.AssetDependency = self.scene_path + job_info.AssetDependency += self.scene_path # Get layer prefix render_products = self._instance.data["renderProducts"] From 2c01cb806d68aa04a733e16d4cfd1abb15f438fe Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:03:34 +0200 Subject: [PATCH 0907/2550] Remove backwards compatibility for append functionality in old style vars --- openpype/modules/deadline/abstract_submit_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index beb1cd0fae..f698b7688e 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -393,16 +393,6 @@ class DeadlineJobInfo(object): for key, value in data.items(): setattr(self, key, value) - def __setattr__(self, key, value): - # Backwards compatibility: Allow appending to index vars by setting - # it on Job Info directly like: JobInfo.OutputFilename = filename - existing = getattr(self, key, None) - if isinstance(existing, DeadlineIndexedVar): - existing += value - return - - object.__setattr__(self, key, value) - @six.add_metaclass(AbstractMetaInstancePlugin) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): From 47164b36effa0f4986ccf15aa3a4967ccb014e26 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:11:35 +0200 Subject: [PATCH 0908/2550] Be more explicit about what keys to include from Session This way it matches more with logic of other host submitters (e.g. AfterEffects + Harmony) --- .../deadline/plugins/publish/submit_maya_deadline.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 6b08f9894d..bb48fe6902 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -130,15 +130,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # to recognize job from PYPE for turning Event On/Off - environment["OPENPYPE_RENDER_JOB"] = "1" - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - - for key, value in environment.items(): + for key in keys: + value = environment.get(key) if not value: continue job_info.EnvironmentKeyValue[key] = value + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" + job_info.EnvironmentKeyValue["OPENPYPE_LOG_NO_COLORS"] = "1" + # Adding file dependencies. if self.asset_dependencies: dependencies = instance.context.data["fileDependencies"] From ec171443d2fd0832d63d821a65be5cc343829059 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Sep 2022 12:14:32 +0200 Subject: [PATCH 0909/2550] Small grammar fixes --- .../docs/admin_settings_project_anatomy.md | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/website/docs/admin_settings_project_anatomy.md b/website/docs/admin_settings_project_anatomy.md index 361710fc9c..b2a3d9f969 100644 --- a/website/docs/admin_settings_project_anatomy.md +++ b/website/docs/admin_settings_project_anatomy.md @@ -20,17 +20,17 @@ It defines: - Colour Management - File Formats -Anatomy is the only configuration that is always saved as project override. This is to make sure, that any updates to OpenPype or Studio default values, don't affect currently running productions. +Anatomy is the only configuration that is always saved as an project override. This is to make sure that any updates to OpenPype or Studio default values, don't affect currently running productions. ![anatomy_01](assets/settings/anatomy_01.png) ## Roots -Roots define where files are stored with path to shared folder. It is required to set root path for each platform you are using in studio. All paths must point to same folder! +Roots define where files are stored with path to a shared folder. It is required to set the root path for each platform you are using in the studio. All paths must point to the same folder! ![roots01](assets/settings/anatomy_roots01.png) -It is possible to set multiple roots when necessary. That may be handy when you need to store specific type of data on another disk. +It is possible to set multiple roots when necessary. That may be handy when you need to store a specific type of data on another disk. ![roots02](assets/settings/anatomy_roots02.png) @@ -40,7 +40,7 @@ Note how multiple roots are used here, to push different types of files to diffe ## Templates -Templates define project's folder structure and filenames. +Templates define the project's folder structure and filenames. We have a few required anatomy templates for OpenPype to work properly, however we keep adding more when needed. @@ -102,34 +102,34 @@ We have a few required anatomy templates for OpenPype to work properly, however ### Anatomy reference keys -Anatomy templates have ability to use "referenced keys". Best example is `path` in publish or work templates which are just referencing to `folder` and `file` (`{@folder}/{@file}`) so any changes in folder or file template are propagated to the path template. The other advantage is to simplify version and frame formatting with paddings. In default templates you can notice that keys `{@version}` or `{@frame}` are used in templates. They are referencing to `Anatomy` -> `Templates` -> `Version` or `Frame` which handle version and frame formatting with padding. +Anatomy templates have the ability to use "referenced keys". Best example is `path` in publish or work templates which just contains references to `folder` and `file` (`{@folder}/{@file}`). Any changes in folder or file template are propagated to the path template. The another example is simplification of version and frame formatting with paddings. You can notice that keys `{@version}` or `{@frame}` are used in default templates. They are referencing `Anatomy` -> `Templates` -> `Version` or `Frame` which handle version and frame formatting with padding. -So if you set `project_anatomy/templates/defaults/version_padding` to `5` the `{@version}` key will be transformed to `v{version:0>5}` and version number in paths will have 5 numbers -> `v00001`. +So if you set `project_anatomy/templates/defaults/version_padding` to `5` the `{@version}` key will be transformed to `v{version:0>5}` automatically and version number in paths will have 5 numbers -> `v00001`. ### Optional keys -In some cases are not all keys available and should be just ignored. For example `{frame}` should be available only for sequences but we have single publish template. To handle these cases it is possible to use optional marks which will ignore segment of template if can't be filled because of missing keys. To mark these segments use `<` and `>`. +In some cases of template formatting not all keys are available and should be just ignored. For example `{frame}` should be available only for sequences but we have single publish template. To handle these cases it is possible to use special characters to mark segment of template which should be ignored, if it can't be filled because of missing keys. To mark these segments use `<` and `>`. . Template `{project[code]}_{asset}_{subset}<_{output}><.{@frame}>.{ext}` can handle all 4 possible situations when `output` and `frame` keys are available or not. The optional segments can contain additional text, like in the example dot (`.`) for frame and underscore (`_`) for output, those are also ignored if the keys are not available. Optional segments without formatting keys are kept untouched: `
    ` -> stays as `
    `. It is possible to nest optional segments inside optional segments `<{asset}<.{@frame}>
    >` which may result in empty string if `asset` key is not available. ## Attributes -Project attributes are used as default values for new assets under project, except `Applications` and `Active project` which are project specific. Values of attributes that are **not** project specific are always used from assets. So if `tools` are not loading as expected it is because the asset have different value. +Project attributes are used as default values for new assets created under project, except `Applications` and `Active project` which are project specific. Values of attributes that are **not** project specific are always used from assets. So if `tools` are not loading as expected it is because the assets have different values. ![anatomy_attributes](assets/settings/anatomy_attributes.png) **Most of attributes don't need detailed explanation.** -| Attribute | Description | -| --- | --- | -| `Applications` | List of applications that can be used in the project. At the moment only possible filter of applications. | -| `Tools` | List of application tools. This value can be overridden per asset. | +| Attribute | Description | +| --- |-----------------------------------------------------------------------------------------------------------------------------| +| `Applications` | List of applications that can be used in the project. At the moment used only as a possible filter of applications. | +| `Tools` | List of application tools. This value can be overridden per asset. | | `Active project` | Project won't be visible in tools if enabled.
    - To revert check `Show Inactive projects` checkbox in project settings. | ## Task Types -Available task types on a project. Each task on an asset is referencing to a task type on project which allows access to task type attributes. At this moment only `short_name` is available (can be used in templates as `{task[short_name]}`). +Available task types on a project. Each task on an asset is referencing a task type on the project which allows access to additional task type attributes. At this moment only `short_name` is available (can be used in templates as `{task[short_name]}`). ![tasks](assets/settings/anatomy_tasks.png) From 585d43cf48497982be916ac7abc45141f47ff8a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 2 Sep 2022 13:40:03 +0200 Subject: [PATCH 0910/2550] modified table --- website/docs/admin_settings_project_anatomy.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/website/docs/admin_settings_project_anatomy.md b/website/docs/admin_settings_project_anatomy.md index b2a3d9f969..2068c5cde2 100644 --- a/website/docs/admin_settings_project_anatomy.md +++ b/website/docs/admin_settings_project_anatomy.md @@ -120,10 +120,10 @@ Project attributes are used as default values for new assets created under proje **Most of attributes don't need detailed explanation.** -| Attribute | Description | -| --- |-----------------------------------------------------------------------------------------------------------------------------| -| `Applications` | List of applications that can be used in the project. At the moment used only as a possible filter of applications. | -| `Tools` | List of application tools. This value can be overridden per asset. | +| Attribute | Description | +| --- | --- | +| `Applications` | List of applications that can be used in the project. At the moment used only as a possible filter of applications. | +| `Tools` | List of application tools. This value can be overridden per asset. | | `Active project` | Project won't be visible in tools if enabled.
    - To revert check `Show Inactive projects` checkbox in project settings. | From 37371936cf293200b74b7d5ee5381aac8a0551ba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 2 Sep 2022 14:08:40 +0200 Subject: [PATCH 0911/2550] safer task data --- .../plugins/publish/integrate_slack_api.py | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 4a8e9f773f..643e55915b 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -112,13 +112,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if review_path: fill_pairs.append(("review_filepath", review_path)) - task_data = instance.data.get("task") - if not task_data: - task_data = fill_data.get("task") - for key, value in task_data.items(): - fill_key = "task[{}]".format(key) - fill_pairs.append((fill_key, value)) - fill_pairs.append(("task", task_data["name"])) + task_data = fill_data.get("task") + if task_data: + if ( + "{task}" in message_templ + or "{Task}" in message_templ + or "{TASK}" in message_templ + ): + fill_pairs.append(("task", task_data["name"])) + + else: + for key, value in task_data.items(): + fill_key = "task[{}]".format(key) + fill_pairs.append((fill_key, value)) self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) From fc8d525207216b7ce8a47499f2419e1febd56f13 Mon Sep 17 00:00:00 2001 From: Hayley GUILLOT Date: Fri, 2 Sep 2022 14:14:33 +0200 Subject: [PATCH 0912/2550] Added an event triggered when user connects to kitsu --- openpype/modules/kitsu/utils/credentials.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index 0529380d6d..b80852dd6a 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -5,6 +5,7 @@ from typing import Tuple import gazu from openpype.lib.local_settings import OpenPypeSecureRegistry +from openpype.lib import emit_event def validate_credentials( @@ -32,6 +33,7 @@ def validate_credentials( except gazu.exception.AuthFailedException: return False + emit_on_kitsu_login(login) return True @@ -102,3 +104,13 @@ def set_credentials_envs(login: str, password: str): """ os.environ["KITSU_LOGIN"] = login os.environ["KITSU_PWD"] = password + +def emit_on_kitsu_login(login:str): + """Notifies listeners that Kitsu module succesfully connected, + and passes them data + + Args: + login (str): Kitsu username + """ + event_data = {"username": login} + emit_event("kitsu.user.logged", data = event_data, source = "kitsu") \ No newline at end of file From 75e2ec1a65316a0da8fa6b72530e684b6a77b4fe Mon Sep 17 00:00:00 2001 From: Hayley GUILLOT Date: Fri, 2 Sep 2022 15:44:47 +0200 Subject: [PATCH 0913/2550] Update credentials.py Linted, should fix hounds comments --- openpype/modules/kitsu/utils/credentials.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index b80852dd6a..a3b90a992b 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -8,9 +8,7 @@ from openpype.lib.local_settings import OpenPypeSecureRegistry from openpype.lib import emit_event -def validate_credentials( - login: str, password: str, kitsu_url: str = None -) -> bool: +def validate_credentials(login: str, password: str, kitsu_url: str = None) -> bool: """Validate credentials by trying to connect to Kitsu host URL. Args: @@ -105,12 +103,13 @@ def set_credentials_envs(login: str, password: str): os.environ["KITSU_LOGIN"] = login os.environ["KITSU_PWD"] = password -def emit_on_kitsu_login(login:str): - """Notifies listeners that Kitsu module succesfully connected, + +def emit_on_kitsu_login(login: str): + """Notifies listeners that Kitsu module succesfully connected, and passes them data Args: login (str): Kitsu username """ event_data = {"username": login} - emit_event("kitsu.user.logged", data = event_data, source = "kitsu") \ No newline at end of file + emit_event("kitsu.user.logged", data=event_data, source="kitsu") From 7a98e083de1f151d686601ca620c9432a1d756b8 Mon Sep 17 00:00:00 2001 From: Hayley GUILLOT Date: Fri, 2 Sep 2022 15:48:23 +0200 Subject: [PATCH 0914/2550] Shortened max line length to 79 Hopefully hounds will be happy --- openpype/modules/kitsu/utils/credentials.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index a3b90a992b..d853d5b437 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -8,7 +8,9 @@ from openpype.lib.local_settings import OpenPypeSecureRegistry from openpype.lib import emit_event -def validate_credentials(login: str, password: str, kitsu_url: str = None) -> bool: +def validate_credentials( + login: str, password: str, kitsu_url: str = None +) -> bool: """Validate credentials by trying to connect to Kitsu host URL. Args: From 74a404021975c52d6e22bf3ea5125fa23f9bcc3f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Sep 2022 15:49:18 +0200 Subject: [PATCH 0915/2550] Revert "Fix - update cx-freeze" This reverts commit 7ed5f5a0ee56d6481d28f0510d1bc6311e89ac38. --- poetry.lock | 990 +++++++------------------------------------------ pyproject.toml | 3 +- 2 files changed, 129 insertions(+), 864 deletions(-) diff --git a/poetry.lock b/poetry.lock index b428393a1f..726b248f8c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,7 +33,7 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +speedups = ["aiodns", "brotli", "cchardet"] [[package]] name = "aiohttp-json-rpc" @@ -122,7 +122,6 @@ python-versions = ">=3.6.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" -setuptools = ">=20.0" typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} wrapt = ">=1.11,<2" @@ -163,10 +162,10 @@ optional = false python-versions = ">=3.5" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "autopep8" @@ -274,9 +273,9 @@ optional = false python-versions = ">=2.7, <4.0" [package.extras] -dev = ["lowdown (>=0.2.0,<1)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"] -doc = ["lowdown (>=0.2.0,<1)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"] -test = ["pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)"] +dev = ["sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)", "lowdown (>=0.2.0,<1)", "pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)"] +doc = ["sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)", "lowdown (>=0.2.0,<1)"] +test = ["pytest-runner (>=2.7,<3)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)"] [[package]] name = "colorama" @@ -331,39 +330,30 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] [[package]] -name = "cx-Freeze" +name = "cx-freeze" version = "6.11.1" description = "Create standalone executables from Python scripts" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\" and python_version < \"3.10\""} -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -lief = {version = ">=0.11.5", markers = "sys_platform == \"win32\" and python_version <= \"3.10\""} -packaging = ">=21.0" -patchelf = {version = ">=0.12", markers = "sys_platform == \"linux\""} -setuptools = ">=59.0.1,<=60.10.0" - -[package.extras] -dev = ["bump2version (>=1.0.1)", "cibuildwheel (==2.6.1)", "pre-commit (>=2.17.0)", "pylint (>=2.13.0)"] -doc = ["sphinx (>=5.0.1,<6.0.0)", "sphinx-rtd-theme (==1.0.0)"] -test = ["nose (==1.3.7)", "pygments (>=2.11.2)", "pytest (>=7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytest-timeout (>=1.4.2)"] +cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} +importlib-metadata = ">=4.3.1" [[package]] name = "cx-logging" version = "3.0" description = "Python and C interfaces for logging" -category = "main" +category = "dev" optional = false python-versions = "*" @@ -379,7 +369,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] [[package]] name = "dill" @@ -401,8 +391,8 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] dnssec = ["cryptography (>=2.6,<37.0)"] +curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] @@ -513,7 +503,7 @@ requests = ">=2.25.1,<=2.27.1" [package.extras] dev = ["wheel"] -test = ["black (==21.12b0)", "pre-commit (==2.17.0)", "pytest (==4.6.11)", "pytest (==6.1.2)", "pytest (==6.2.5)", "pytest-cov (==2.12.1)", "requests-mock (==1.9.3)"] +test = ["pytest-cov (==2.12.1)", "requests-mock (==1.9.3)", "pytest (==4.6.11)", "pytest (==6.1.2)", "pytest (==6.2.5)", "black (==21.12b0)", "pre-commit (==2.17.0)"] [[package]] name = "gitdb" @@ -586,7 +576,7 @@ rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} six = ">=1.9.0" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] +aiohttp = ["requests (>=2.20.0,<3.0.0dev)", "aiohttp (>=3.6.2,<4.0.0dev)"] enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -658,9 +648,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -679,10 +669,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] plugins = ["setuptools"] -requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" @@ -707,8 +697,8 @@ optional = false python-versions = ">=3.7" [package.extras] -test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] -trio = ["async_generator", "trio"] +trio = ["async-generator", "trio"] +test = ["async-timeout", "trio", "testpath", "pytest-asyncio (>=0.17)", "pytest-trio", "pytest"] [[package]] name = "jinja2" @@ -761,8 +751,8 @@ pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_ SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler", "pytest-flake8", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] name = "lazy-object-proxy" @@ -772,14 +762,6 @@ category = "dev" optional = false python-versions = ">=3.6" -[[package]] -name = "lief" -version = "0.12.1" -description = "Library to instrument executable formats" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "log4mongo" version = "1.7.0" @@ -827,7 +809,7 @@ python-versions = "*" pyaaf2 = "1.4.0" [package.extras] -dev = ["check-manifest", "coverage (>=4.5)", "flake8 (>=3.5)", "urllib3 (>=1.24.3)"] +dev = ["check-manifest", "flake8 (>=3.5)", "coverage (>=4.5)", "urllib3 (>=1.24.3)"] view = ["PySide2 (>=5.11,<6.0)"] [package.source] @@ -839,7 +821,7 @@ reference = "openpype" name = "packaging" version = "21.3" description = "Core utilities for Python packages" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -861,9 +843,9 @@ pynacl = ">=1.0.1" six = "*" [package.extras] -all = ["bcrypt (>=3.1.3)", "gssapi (>=1.4.1)", "invoke (>=1.3)", "pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "pywin32 (>=2.1.8)"] -ed25519 = ["bcrypt (>=3.1.3)", "pynacl (>=1.0.1)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] +ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"] +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=1.3)"] [[package]] @@ -878,17 +860,6 @@ python-versions = ">=3.6" qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["docopt", "pytest (<6.0.0)"] -[[package]] -name = "patchelf" -version = "0.15.0.0" -description = "A small utility to modify the dynamic linker and RPATH of ELF executables." -category = "main" -optional = false -python-versions = "*" - -[package.extras] -test = ["importlib-metadata", "pytest"] - [[package]] name = "pathlib2" version = "2.3.7.post1" @@ -921,8 +892,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] [[package]] name = "pluggy" @@ -1083,7 +1054,7 @@ python-versions = "*" aws = ["pymongo-auth-aws (<2.0.0)"] encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] gssapi = ["pykerberos"] -ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)", "certifi"] snappy = ["python-snappy"] srv = ["dnspython (>=1.16.0,<1.17.0)"] tls = ["ipaddress"] @@ -1101,8 +1072,8 @@ python-versions = ">=3.6" cffi = ">=1.4.1" [package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"] -tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] [[package]] name = "pynput" @@ -1217,7 +1188,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-print" @@ -1410,19 +1381,6 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[[package]] -name = "setuptools" -version = "60.10.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-inline-tabs", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "shotgun-api3" version = "3.3.3" @@ -1455,8 +1413,8 @@ optional = false python-versions = ">=3.6.0" [package.extras] -optional = ["SQLAlchemy (>=1,<2)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)"] -testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (<2)", "black (==22.3.0)", "boto3 (<=2)", "click (==8.0.4)", "codecov (>=2,<3)", "databases (>=0.5)", "flake8 (>=4,<5)", "itsdangerous (==1.1.0)", "moto (>=3,<4)", "psutil (>=5,<6)", "pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "pytest-cov (>=2,<3)"] +optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=10,<11)", "websocket-client (>=1,<2)"] +testing = ["pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "itsdangerous (==1.1.0)", "Jinja2 (==3.0.3)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=4,<5)", "black (==22.3.0)", "click (==8.0.4)", "psutil (>=5,<6)", "databases (>=0.5)", "boto3 (<=2)", "moto (>=3,<4)"] [[package]] name = "smmap" @@ -1511,8 +1469,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed-ast"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "docutils-stubs", "types-typed-ast", "types-requests"] +test = ["pytest (>=4.6)", "html5lib", "cython", "typed-ast"] [[package]] name = "sphinx-qt-documentation" @@ -1527,9 +1485,9 @@ docutils = "*" sphinx = "*" [package.extras] +test = ["pytest-cov", "pytest (>=3.0.0)"] +lint = ["pylint", "flake8", "black"] dev = ["pre-commit"] -lint = ["black", "flake8", "pylint"] -test = ["pytest (>=3.0.0)", "pytest-cov"] [[package]] name = "sphinx-rtd-theme" @@ -1544,7 +1502,7 @@ docutils = "<0.18" sphinx = ">=1.6" [package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] [[package]] name = "sphinxcontrib-applehelp" @@ -1555,7 +1513,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] @@ -1567,7 +1525,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] @@ -1579,8 +1537,8 @@ optional = false python-versions = ">=3.6" [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] [[package]] name = "sphinxcontrib-jsmath" @@ -1591,7 +1549,7 @@ optional = false python-versions = ">=3.5" [package.extras] -test = ["flake8", "mypy", "pytest"] +test = ["pytest", "flake8", "mypy"] [[package]] name = "sphinxcontrib-qthelp" @@ -1602,7 +1560,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] @@ -1614,7 +1572,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] @@ -1630,7 +1588,7 @@ sphinxcontrib-serializinghtml = "*" [package.extras] lint = ["flake8"] -test = ["Sphinx", "pytest", "sqlalchemy", "whoosh"] +test = ["pytest", "sqlalchemy", "whoosh", "sphinx"] [[package]] name = "stone" @@ -1701,8 +1659,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1724,17 +1682,6 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] six = "*" -[[package]] -name = "wheel" -version = "0.37.1" -description = "A built-package format for Python" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.extras] -test = ["pytest (>=3.0.0)", "pytest-cov"] - [[package]] name = "wrapt" version = "1.14.1" @@ -1756,8 +1703,8 @@ aiohttp = "<4" yarl = "*" [package.extras] -develop = ["Sphinx", "async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov", "sphinxcontrib-plantuml", "tox (>=2.4)"] -testing = ["async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov"] +develop = ["async-timeout", "coverage (!=4.3)", "coveralls", "pytest", "pytest-aiohttp", "pytest-cov", "sphinx", "sphinxcontrib-plantuml", "tox (>=2.4)"] +testing = ["async-timeout", "pytest", "pytest-aiohttp", "pytest-cov", "coverage (!=4.3)", "coveralls"] ujson = ["ujson"] [[package]] @@ -1782,13 +1729,13 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "6a5f6910109c9ed6fb1cc1cb1cef7b21a0055a17c393175a1a7aabc00e35d54f" +content-hash = "de7422afb6aed02f75e1696afdda9ad6c7bf32da76b5022ee3e8f71a1ac4bae2" [metadata.files] acre = [] @@ -1870,10 +1817,7 @@ aiohttp-json-rpc = [ {file = "aiohttp-json-rpc-0.13.3.tar.gz", hash = "sha256:6237a104478c22c6ef96c7227a01d6832597b414e4b79a52d85593356a169e99"}, {file = "aiohttp_json_rpc-0.13.3-py3-none-any.whl", hash = "sha256:4fbd197aced61bd2df7ae3237ead7d3e08833c2ccf48b8581e1828c95ebee680"}, ] -aiohttp-middlewares = [ - {file = "aiohttp-middlewares-2.1.0.tar.gz", hash = "sha256:5863970d944dc63faedc96ef324a7fe2bcefefebe29acc90cd641236322d00c3"}, - {file = "aiohttp_middlewares-2.1.0-py3-none-any.whl", hash = "sha256:c83d48702e6a8669981976f39a60e83d059dc01d7b1ee651aec5d4cb807ff784"}, -] +aiohttp-middlewares = [] aiosignal = [ {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, @@ -1891,10 +1835,7 @@ arrow = [ {file = "arrow-0.17.0-py2.py3-none-any.whl", hash = "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5"}, {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] -astroid = [ - {file = "astroid-2.11.7-py3-none-any.whl", hash = "sha256:86b0a340a512c65abf4368b80252754cda17c02cdbbd3f587dddf98112233e7b"}, - {file = "astroid-2.11.7.tar.gz", hash = "sha256:bb24615c77f4837c707669d16907331374ae8a964650a66999da3f5ca68dc946"}, -] +astroid = [] async-timeout = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, @@ -1903,112 +1844,21 @@ asynctest = [ {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, ] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] +atomicwrites = [] +attrs = [] autopep8 = [ {file = "autopep8-1.5.7-py2.py3-none-any.whl", hash = "sha256:aa213493c30dcdac99537249ee65b24af0b2c29f2e83cd8b3f68760441ed0db9"}, {file = "autopep8-1.5.7.tar.gz", hash = "sha256:276ced7e9e3cb22e5d7c14748384a5cf5d9002257c0ed50c0e075b68011bb6d0"}, ] -babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, -] -bcrypt = [ - {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521"}, - {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa"}, - {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa"}, - {file = "bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e"}, - {file = "bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129"}, - {file = "bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"}, -] +babel = [] +bcrypt = [] blessed = [ {file = "blessed-1.19.1-py2.py3-none-any.whl", hash = "sha256:63b8554ae2e0e7f43749b6715c734cc8f3883010a809bf16790102563e6cf25b"}, {file = "blessed-1.19.1.tar.gz", hash = "sha256:9a0d099695bf621d4680dd6c73f6ad547f6a3442fbdbe80c4b1daa1edbc492fc"}, ] -cachetools = [ - {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, - {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, -] -certifi = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] +cachetools = [] +certifi = [] +cffi = [] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, @@ -2021,10 +1871,7 @@ clique = [ {file = "clique-1.6.1-py2.py3-none-any.whl", hash = "sha256:8619774fa035661928dd8c93cd805acf2d42533ccea1b536c09815ed426c9858"}, {file = "clique-1.6.1.tar.gz", hash = "sha256:90165c1cf162d4dd1baef83ceaa1afc886b453e379094fa5b60ea470d1733e66"}, ] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] +colorama = [] commonmark = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, @@ -2033,99 +1880,20 @@ coolname = [ {file = "coolname-1.1.0-py2.py3-none-any.whl", hash = "sha256:e6a83a0ac88640f4f3d2070438dbe112fe80cfebc119c93bd402976ec84c0978"}, {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, ] -coverage = [ - {file = "coverage-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f50d3a822947572496ea922ee7825becd8e3ae6fbd2400cd8236b7d64b17f285"}, - {file = "coverage-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5191d53afbe5b6059895fa7f58223d3751c42b8101fb3ce767e1a0b1a1d8f87"}, - {file = "coverage-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04010af3c06ce2bfeb3b1e4e05d136f88d88c25f76cd4faff5d1fd84d11581ea"}, - {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6630d8d943644ea62132789940ca97d05fac83f73186eaf0930ffa715fbdab6b"}, - {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05de0762c1caed4a162b3e305f36cf20a548ff4da0be6766ad5c870704be3660"}, - {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e3a41aad5919613483aad9ebd53336905cab1bd6788afd3995c2a972d89d795"}, - {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2738ba1ee544d6f294278cfb6de2dc1f9a737a780469b5366e662a218f806c3"}, - {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a0d2df4227f645a879010461df2cea6b7e3fb5a97d7eafa210f7fb60345af9e8"}, - {file = "coverage-6.4.3-cp310-cp310-win32.whl", hash = "sha256:73a10939dc345460ca0655356a470dd3de9759919186a82383c87b6eb315faf2"}, - {file = "coverage-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:53c8edd3b83a4ddba3d8c506f1359401e7770b30f2188f15c17a338adf5a14db"}, - {file = "coverage-6.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1eda5cae434282712e40b42aaf590b773382afc3642786ac3ed39053973f61f"}, - {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59fc88bc13e30f25167e807b8cad3c41b7218ef4473a20c86fd98a7968733083"}, - {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75314b00825d70e1e34b07396e23f47ed1d4feedc0122748f9f6bd31a544840"}, - {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52f8b9fcf3c5e427d51bbab1fb92b575a9a9235d516f175b24712bcd4b5be917"}, - {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5a559aab40c716de80c7212295d0dc96bc1b6c719371c20dd18c5187c3155518"}, - {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:306788fd019bb90e9cbb83d3f3c6becad1c048dd432af24f8320cf38ac085684"}, - {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:920a734fe3d311ca01883b4a19aa386c97b82b69fbc023458899cff0a0d621b9"}, - {file = "coverage-6.4.3-cp37-cp37m-win32.whl", hash = "sha256:ab9ef0187d6c62b09dec83a84a3b94f71f9690784c84fd762fb3cf2d2b44c914"}, - {file = "coverage-6.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:39ebd8e120cb77a06ee3d5fc26f9732670d1c397d7cd3acf02f6f62693b89b80"}, - {file = "coverage-6.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc698580216050b5f4a34d2cdd2838b429c53314f1c4835fab7338200a8396f2"}, - {file = "coverage-6.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:877ee5478fd78e100362aed56db47ccc5f23f6e7bb035a8896855f4c3e49bc9b"}, - {file = "coverage-6.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:555a498999c44f5287cc95500486cd0d4f021af9162982cbe504d4cb388f73b5"}, - {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff095a5aac7011fdb51a2c82a8fae9ec5211577f4b764e1e59cfa27ceeb1b59"}, - {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5de1e9335e2569974e20df0ce31493d315a830d7987e71a24a2a335a8d8459d3"}, - {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7856ea39059d75f822ff0df3a51ea6d76307c897048bdec3aad1377e4e9dca20"}, - {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:411fdd9f4203afd93b056c0868c8f9e5e16813e765de962f27e4e5798356a052"}, - {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdf7b83f04a313a21afb1f8730fe4dd09577fefc53bbdfececf78b2006f4268e"}, - {file = "coverage-6.4.3-cp38-cp38-win32.whl", hash = "sha256:ab2b1a89d2bc7647622e9eaf06128a5b5451dccf7c242deaa31420b055716481"}, - {file = "coverage-6.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:0e34247274bde982bbc613894d33f9e36358179db2ed231dd101c48dd298e7b0"}, - {file = "coverage-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b104b6b1827d6a22483c469e3983a204bcf9c6bf7544bf90362c4654ebc2edf3"}, - {file = "coverage-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adf1a0d272633b21d645dd6e02e3293429c1141c7d65a58e4cbcd592d53b8e01"}, - {file = "coverage-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9832434a9193fbd716fbe05f9276484e18d26cc4cf850853594bb322807ac3"}, - {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:923f9084d7e1d31b5f74c92396b05b18921ed01ee5350402b561a79dce3ea48d"}, - {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d64304acf79766e650f7acb81d263a3ea6e2d0d04c5172b7189180ff2c023c"}, - {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fc294de50941d3da66a09dca06e206297709332050973eca17040278cb0918ff"}, - {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a42eaaae772f14a5194f181740a67bfd48e8806394b8c67aa4399e09d0d6b5db"}, - {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4822327b35cb032ff16af3bec27f73985448f08e874146b5b101e0e558b613dd"}, - {file = "coverage-6.4.3-cp39-cp39-win32.whl", hash = "sha256:f217850ac0e046ede611312703423767ca032a7b952b5257efac963942c055de"}, - {file = "coverage-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0a84376e4fd13cebce2c0ef8c2f037929c8307fb94af1e5dbe50272a1c651b5d"}, - {file = "coverage-6.4.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:068d6f2a893af838291b8809c876973d885543411ea460f3e6886ac0ee941732"}, - {file = "coverage-6.4.3.tar.gz", hash = "sha256:ec2ae1f398e5aca655b7084392d23e80efb31f7a660d2eecf569fb9f79b3fb94"}, -] -cryptography = [ - {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884"}, - {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280"}, - {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3"}, - {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59"}, - {file = "cryptography-37.0.4-cp36-abi3-win32.whl", hash = "sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157"}, - {file = "cryptography-37.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327"}, - {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b"}, - {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab"}, - {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, -] -cx-Freeze = [ - {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e372b9e72ac0e2207ee65a9d404e2669da1134dc37f5ace9a2a779099d3aa868"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd293382e1ad270dddf5a2707db5dbb8600a1e0b0c9b0da7af9d61326eb1b325"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:feec2f36bce042da6a0d92690bc592b0dcec29218adc2278535cd13b28ec3485"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5aafcc6337856d5921b20f41acdcc8d0fe770388f3a072eb25163f8825f6c5d"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-win32.whl", hash = "sha256:b99cc0b6d6c1ba51bd9fe11dbfae9aabcf089ba779ea86d83d280e2e40f484e7"}, - {file = "cx_Freeze-6.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:f0dfe6acf25eb096faba7d4b4b001bcd0f818e372ea1f05d900665b0ad82b0b9"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a68e70dcb27b0720b131a35c5fdd096012fe00119a8e51d935f3fb3cd251c39"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f7bde925042d8843af9b6242a1bf3865dbbae088f3183a89a575124ec2e14a4"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-win32.whl", hash = "sha256:7698fb82b6f84b3426774b5f3bee770601f26b612306319664a02f1ec5160861"}, - {file = "cx_Freeze-6.11.1-cp36-cp36m-win_amd64.whl", hash = "sha256:9848c975401b21a98aa896baabfed067c3e981afd5b5b0a8a5eabe5c9f23d3c5"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87dcf5ceb78dc6af910c45238128fda2394b7c430d3fa469e87e1efdeeb5d4cc"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb74d8cc1f8c658986acc19ea6875b985a979421f9bb9c310b43cd2ff5d90c44"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-win32.whl", hash = "sha256:971c0a8356ef0ee09a3097f9c9d5b52cde6d08d1ef80e997eb4a6e22fe0eff2f"}, - {file = "cx_Freeze-6.11.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7c1cb44379b2093cbdde77e302a376f29aa61999c73be6e8a559463db84b85c4"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc90d6dbde66e8ddfe6b26f63fb2ea7d6d0e4568205f40660a63b8b200dcabcf"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f56f618a23d86bdcfff22b29ec993117effd32a401060013105517301c0bf32"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-win32.whl", hash = "sha256:4edfb5d65afb11eb9f0326d40d15445366481585705b3096f2cd090e30a36247"}, - {file = "cx_Freeze-6.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:cfb5a8032bf424c04814c9426425fa1db4cf8c280da948969eead9f616c0fd92"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0a3e32980269cfabc2e814978bfdf4382fe3cbc9ac64f9f1bdb1cd2ddf3a40d0"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:798bb7ca037c3c885efd3eda6756c84c7927c712b730b22a7f256440faa36d38"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5aa1759098ca4853200a79138b626a9caa2ccf829d662b28c82ec7e71ea97cde"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7962680ae04ee3afda1012026b5394a534e2526b68681d591158b7d8bc733bcf"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-win32.whl", hash = "sha256:da4f82fe27e71571c0ab9d700b5e6c6c631ae39133d9b6d7157939f1e9f37312"}, - {file = "cx_Freeze-6.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:aaf399b6ed5d54b7271980ae354605620bedcd52d722f57ad527bd989c56a875"}, - {file = "cx_Freeze-6.11.1.tar.gz", hash = "sha256:8f3a30c9e3394f290655e346d3b460910656b30ac6347a87499bb5ad365c6e7c"}, +coverage = [] +cryptography = [] +cx-freeze = [ + {file = "cx_Freeze-6.9-cp310-cp310-win32.whl", hash = "sha256:776d4fb68a4831691acbd3c374362b9b48ce2e568514a73c3d4cb14d5dcf1470"}, + {file = "cx_Freeze-6.9-cp310-cp310-win_amd64.whl", hash = "sha256:243f36d35a034a409cd6247d8cb5d1fbfd7374e3e668e813d0811f64d6bd5ed3"}, + {file = "cx_Freeze-6.9-cp36-cp36m-win32.whl", hash = "sha256:ffc855eabc735b693e2d604d71dce6d52d78a6ba1070c55d51e786dd68ed232c"}, + {file = "cx_Freeze-6.9-cp36-cp36m-win_amd64.whl", hash = "sha256:fe4e32a0c75b2b54491882926bf3ba12f8a3d589822a68a8be7c09f1dcca5546"}, + {file = "cx_Freeze-6.9-cp37-cp37m-win32.whl", hash = "sha256:99c292e7a31cb343efc0cf47f82220a44a4a3b8776651624cd8ee03c23104940"}, + {file = "cx_Freeze-6.9-cp37-cp37m-win_amd64.whl", hash = "sha256:738ab22f3a3f6bc220b16dccf2aa0603c3cd271b2a7a9d9480dab82311308b23"}, + {file = "cx_Freeze-6.9-cp38-cp38-win32.whl", hash = "sha256:c1c75df572858e623d0aa39771cd984c0abd8aacb43b2aca2d12d0bc95f25566"}, + {file = "cx_Freeze-6.9-cp38-cp38-win_amd64.whl", hash = "sha256:0788c895c47fdcf375151ce78ff42336c01aca7bc43daecb8f8f8356cdc42b43"}, + {file = "cx_Freeze-6.9-cp39-cp39-win32.whl", hash = "sha256:a31f5ddbc80b29e297370d868791470b0e3e9062db45038c23293a76ed039018"}, + {file = "cx_Freeze-6.9-cp39-cp39-win_amd64.whl", hash = "sha256:30708f603076713c0a839cdfb34f4126d68e9d61afb3d9a59daa9cf252033872"}, + {file = "cx_Freeze-6.9.tar.gz", hash = "sha256:673aa3199af2ef87fc03a43a30e5d78b27ced2cedde925da89c55b5657da267b"}, ] cx-logging = [ {file = "cx_Logging-3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9fcd297e5c51470521c47eff0f86ba844aeca6be97e13c3e2114ebdf03fa3c96"}, @@ -2142,14 +1910,8 @@ cx-logging = [ {file = "cx_Logging-3.0-cp39-cp39-win_amd64.whl", hash = "sha256:302e9c4f65a936c288a4fa59a90e7e142d9ef994aa29676731acafdcccdbb3f5"}, {file = "cx_Logging-3.0.tar.gz", hash = "sha256:ba8a7465facf7b98d8f494030fb481a2e8aeee29dc191e10383bb54ed42bdb34"}, ] -deprecated = [ - {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, - {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, -] -dill = [ - {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"}, - {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, -] +deprecated = [] +dill = [] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, @@ -2158,93 +1920,22 @@ docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] -dropbox = [ - {file = "dropbox-11.33.0-py2-none-any.whl", hash = "sha256:3ee9024631b80f18938556d5e27cbdede26d6dc0b73aeaa90fc075ce96c950b1"}, - {file = "dropbox-11.33.0-py3-none-any.whl", hash = "sha256:1a0cbc22b0d1dae96e18b37e3520e5c289de7eb1303935db40e4dbfc9bb9e59b"}, - {file = "dropbox-11.33.0.tar.gz", hash = "sha256:7c638b521169a460de38b9eaeb204fe918874f72d6c3eed005d064b6f37da9c1"}, -] +dropbox = [] enlighten = [ {file = "enlighten-1.10.2-py2.py3-none-any.whl", hash = "sha256:b237fe562b320bf9f1d4bb76d0c98e0daf914372a76ab87c35cd02f57aa9d8c1"}, {file = "enlighten-1.10.2.tar.gz", hash = "sha256:7a5b83cd0f4d095e59d80c648ebb5f7ffca0cd8bcf7ae6639828ee1ad000632a"}, ] -evdev = [ - {file = "evdev-1.6.0.tar.gz", hash = "sha256:ecfa01b5c84f7e8c6ced3367ac95288f43cd84efbfd7dd7d0cdbfc0d18c87a6a"}, -] +evdev = [] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] -frozenlist = [ - {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5f271c93f001748fc26ddea409241312a75e13466b06c94798d1a341cf0e6989"}, - {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c6ef8014b842f01f5d2b55315f1af5cbfde284eb184075c189fd657c2fd8204"}, - {file = "frozenlist-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:219a9676e2eae91cb5cc695a78b4cb43d8123e4160441d2b6ce8d2c70c60e2f3"}, - {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b47d64cdd973aede3dd71a9364742c542587db214e63b7529fbb487ed67cddd9"}, - {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2af6f7a4e93f5d08ee3f9152bce41a6015b5cf87546cb63872cc19b45476e98a"}, - {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a718b427ff781c4f4e975525edb092ee2cdef6a9e7bc49e15063b088961806f8"}, - {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c56c299602c70bc1bb5d1e75f7d8c007ca40c9d7aebaf6e4ba52925d88ef826d"}, - {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:717470bfafbb9d9be624da7780c4296aa7935294bd43a075139c3d55659038ca"}, - {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:31b44f1feb3630146cffe56344704b730c33e042ffc78d21f2125a6a91168131"}, - {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c3b31180b82c519b8926e629bf9f19952c743e089c41380ddca5db556817b221"}, - {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d82bed73544e91fb081ab93e3725e45dd8515c675c0e9926b4e1f420a93a6ab9"}, - {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49459f193324fbd6413e8e03bd65789e5198a9fa3095e03f3620dee2f2dabff2"}, - {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:94e680aeedc7fd3b892b6fa8395b7b7cc4b344046c065ed4e7a1e390084e8cb5"}, - {file = "frozenlist-1.3.1-cp310-cp310-win32.whl", hash = "sha256:fabb953ab913dadc1ff9dcc3a7a7d3dc6a92efab3a0373989b8063347f8705be"}, - {file = "frozenlist-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:eee0c5ecb58296580fc495ac99b003f64f82a74f9576a244d04978a7e97166db"}, - {file = "frozenlist-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0bc75692fb3770cf2b5856a6c2c9de967ca744863c5e89595df64e252e4b3944"}, - {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086ca1ac0a40e722d6833d4ce74f5bf1aba2c77cbfdc0cd83722ffea6da52a04"}, - {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b51eb355e7f813bcda00276b0114c4172872dc5fb30e3fea059b9367c18fbcb"}, - {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74140933d45271c1a1283f708c35187f94e1256079b3c43f0c2267f9db5845ff"}, - {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee4c5120ddf7d4dd1eaf079af3af7102b56d919fa13ad55600a4e0ebe532779b"}, - {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97d9e00f3ac7c18e685320601f91468ec06c58acc185d18bb8e511f196c8d4b2"}, - {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e19add867cebfb249b4e7beac382d33215d6d54476bb6be46b01f8cafb4878b"}, - {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a027f8f723d07c3f21963caa7d585dcc9b089335565dabe9c814b5f70c52705a"}, - {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:61d7857950a3139bce035ad0b0945f839532987dfb4c06cfe160254f4d19df03"}, - {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:53b2b45052e7149ee8b96067793db8ecc1ae1111f2f96fe1f88ea5ad5fd92d10"}, - {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bbb1a71b1784e68870800b1bc9f3313918edc63dbb8f29fbd2e767ce5821696c"}, - {file = "frozenlist-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:ab6fa8c7871877810e1b4e9392c187a60611fbf0226a9e0b11b7b92f5ac72792"}, - {file = "frozenlist-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89139662cc4e65a4813f4babb9ca9544e42bddb823d2ec434e18dad582543bc"}, - {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4c0c99e31491a1d92cde8648f2e7ccad0e9abb181f6ac3ddb9fc48b63301808e"}, - {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:61e8cb51fba9f1f33887e22488bad1e28dd8325b72425f04517a4d285a04c519"}, - {file = "frozenlist-1.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc2f3e368ee5242a2cbe28323a866656006382872c40869b49b265add546703f"}, - {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58fb94a01414cddcdc6839807db77ae8057d02ddafc94a42faee6004e46c9ba8"}, - {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:022178b277cb9277d7d3b3f2762d294f15e85cd2534047e68a118c2bb0058f3e"}, - {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:572ce381e9fe027ad5e055f143763637dcbac2542cfe27f1d688846baeef5170"}, - {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19127f8dcbc157ccb14c30e6f00392f372ddb64a6ffa7106b26ff2196477ee9f"}, - {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42719a8bd3792744c9b523674b752091a7962d0d2d117f0b417a3eba97d1164b"}, - {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2743bb63095ef306041c8f8ea22bd6e4d91adabf41887b1ad7886c4c1eb43d5f"}, - {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fa47319a10e0a076709644a0efbcaab9e91902c8bd8ef74c6adb19d320f69b83"}, - {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52137f0aea43e1993264a5180c467a08a3e372ca9d378244c2d86133f948b26b"}, - {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:f5abc8b4d0c5b556ed8cd41490b606fe99293175a82b98e652c3f2711b452988"}, - {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1e1cf7bc8cbbe6ce3881863671bac258b7d6bfc3706c600008925fb799a256e2"}, - {file = "frozenlist-1.3.1-cp38-cp38-win32.whl", hash = "sha256:0dde791b9b97f189874d654c55c24bf7b6782343e14909c84beebd28b7217845"}, - {file = "frozenlist-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:9494122bf39da6422b0972c4579e248867b6b1b50c9b05df7e04a3f30b9a413d"}, - {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31bf9539284f39ff9398deabf5561c2b0da5bb475590b4e13dd8b268d7a3c5c1"}, - {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e0c8c803f2f8db7217898d11657cb6042b9b0553a997c4a0601f48a691480fab"}, - {file = "frozenlist-1.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da5ba7b59d954f1f214d352308d1d86994d713b13edd4b24a556bcc43d2ddbc3"}, - {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e6b2b456f21fc93ce1aff2b9728049f1464428ee2c9752a4b4f61e98c4db96"}, - {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526d5f20e954d103b1d47232e3839f3453c02077b74203e43407b962ab131e7b"}, - {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b499c6abe62a7a8d023e2c4b2834fce78a6115856ae95522f2f974139814538c"}, - {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab386503f53bbbc64d1ad4b6865bf001414930841a870fc97f1546d4d133f141"}, - {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f63c308f82a7954bf8263a6e6de0adc67c48a8b484fab18ff87f349af356efd"}, - {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:12607804084d2244a7bd4685c9d0dca5df17a6a926d4f1967aa7978b1028f89f"}, - {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:da1cdfa96425cbe51f8afa43e392366ed0b36ce398f08b60de6b97e3ed4affef"}, - {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f810e764617b0748b49a731ffaa525d9bb36ff38332411704c2400125af859a6"}, - {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:35c3d79b81908579beb1fb4e7fcd802b7b4921f1b66055af2578ff7734711cfa"}, - {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c92deb5d9acce226a501b77307b3b60b264ca21862bd7d3e0c1f3594022f01bc"}, - {file = "frozenlist-1.3.1-cp39-cp39-win32.whl", hash = "sha256:5e77a8bd41e54b05e4fb2708dc6ce28ee70325f8c6f50f3df86a44ecb1d7a19b"}, - {file = "frozenlist-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:625d8472c67f2d96f9a4302a947f92a7adbc1e20bedb6aff8dbc8ff039ca6189"}, - {file = "frozenlist-1.3.1.tar.gz", hash = "sha256:3a735e4211a04ccfa3f4833547acdf5d2f863bfeb01cfd3edaffbc251f15cec8"}, -] -ftrack-python-api = [ - {file = "ftrack-python-api-2.3.3.tar.gz", hash = "sha256:358f37e5b1c5635eab107c19e27a0c890d512877f78af35b1ac416e90c037295"}, - {file = "ftrack_python_api-2.3.3-py2.py3-none-any.whl", hash = "sha256:82834c4d5def5557a2ea547a7e6f6ba84d3129e8f90457d8bbd85b287a2c39f6"}, -] +frozenlist = [] +ftrack-python-api = [] future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] -gazu = [ - {file = "gazu-0.8.30-py2.py3-none-any.whl", hash = "sha256:d692927a11314151bc33e7d67edee634053f70a3b09e4500dfc6626bfea18753"}, -] +gazu = [] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, @@ -2253,42 +1944,24 @@ gitpython = [ {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] -google-api-core = [ - {file = "google-api-core-2.8.2.tar.gz", hash = "sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc"}, - {file = "google_api_core-2.8.2-py3-none-any.whl", hash = "sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50"}, -] +google-api-core = [] google-api-python-client = [ {file = "google-api-python-client-1.12.11.tar.gz", hash = "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9"}, {file = "google_api_python_client-1.12.11-py2.py3-none-any.whl", hash = "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b"}, ] -google-auth = [ - {file = "google-auth-2.10.0.tar.gz", hash = "sha256:7904dbd44b745c7323fef29565adee2fe7ff48473e2d94443aced40b0404a395"}, - {file = "google_auth-2.10.0-py2.py3-none-any.whl", hash = "sha256:1deba4a54f95ef67b4139eaf5c20eaa7047215eec9f6a2344599b8596db8863b"}, -] +google-auth = [] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, ] -googleapis-common-protos = [ - {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, - {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, -] +googleapis-common-protos = [] httplib2 = [ {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, ] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -imagesize = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, - {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, -] +idna = [] +imagesize = [] +importlib-metadata = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -2301,18 +1974,12 @@ jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] -jeepney = [ - {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, - {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, -] +jeepney = [] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] -jinxed = [ - {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, - {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, -] +jinxed = [] jsonschema = [ {file = "jsonschema-2.6.0-py2.py3-none-any.whl", hash = "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08"}, {file = "jsonschema-2.6.0.tar.gz", hash = "sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02"}, @@ -2360,58 +2027,16 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] -lief = [ - {file = "lief-0.12.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:4fbbc9d520de87ac22210c62d22a9b088e5460f9a028741311e6f68ef8877ddd"}, - {file = "lief-0.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443e4494df448ea1a021976258c7a6aca27d81b0612783fa3a84fab196fb9fcb"}, - {file = "lief-0.12.1-cp310-cp310-win32.whl", hash = "sha256:1c4019dddf03a5185462fb5ea04327cee08d40f46777b02f0773c7dc294552ea"}, - {file = "lief-0.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:d7e09968f99ddf1e3983d3bcc16c62d1b6635a345fee8d8139f82b31bad457d6"}, - {file = "lief-0.12.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:9fa6269ec4fa3f874b807fbba3c48a46af30df2497723f6966080e3eb630cb26"}, - {file = "lief-0.12.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b05cac5fa491e01e1819573bbbbcaea0a4229f4aa3a2edb231b5695ddaf2d"}, - {file = "lief-0.12.1-cp36-cp36m-win32.whl", hash = "sha256:f1292bff96579c18e01e20b7a14043052379fe6e9a476c1d6d88aca43e5f9ac7"}, - {file = "lief-0.12.1-cp36-cp36m-win_amd64.whl", hash = "sha256:dab63876113bd573d64ce043f50153f6e2810e5e78256397aa0fe1fedf82ab84"}, - {file = "lief-0.12.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:5771f5226b62c885a7aa30c1b98040d39229a1dab889d03155e5538e57d0054b"}, - {file = "lief-0.12.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:8ec307a762505076a6d31566225a231c44ec7063c0e7d751ac4654c674454c47"}, - {file = "lief-0.12.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a755f6088d3b2041e4402adf917ac87e5ad9d1c5278973f48a29a5631fe393eb"}, - {file = "lief-0.12.1-cp37-cp37m-win32.whl", hash = "sha256:5d746f7eb6d3bf35a0230c7184aaaf434cb1ea89d7e7c8e8fe14a49cf2bb17a0"}, - {file = "lief-0.12.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2d3ab7212da696bcbe5ca9dd78ceaa32dfb8a0e85e18001793b4441ef4624561"}, - {file = "lief-0.12.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:4360b0acd525ba77777cc38f0e5128c90c93cc4e91ab566ef3aa45b7f8a8c57e"}, - {file = "lief-0.12.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5e82e466d36cbabb28cc1a787b554d2feae5ab55c39cab58ef64fb6513bad92a"}, - {file = "lief-0.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa0022a3bf70ef46335639e61b946cc2d9cf012d60e263c215e3e64b1ce38b4"}, - {file = "lief-0.12.1-cp38-cp38-win32.whl", hash = "sha256:d29f91d9f64f67d3ada5b7e0e48ab084d825fb4601d32d9fecdd2bdf23cdad23"}, - {file = "lief-0.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:7dea6b3f17d362f93165379c46dadb012c73b1f751c8ceac256e5f43842cd86d"}, - {file = "lief-0.12.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:44012da4c32c670a97bb8a055a4ff16168cfaa757d03986f319aa3329a43e343"}, - {file = "lief-0.12.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e1d23997b0a71d34e766ff183be07854c6f698fd3d6aa44bf30b6b7f4f77ef55"}, - {file = "lief-0.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b845eca79c772041efb38b50cfaf951e24bc047ec462450b7e54e75b7e2bee0d"}, - {file = "lief-0.12.1-cp39-cp39-win32.whl", hash = "sha256:0df84ac2df20b14db12e69442d39b0e8cd89428ba3b131995e0570bcd3725460"}, - {file = "lief-0.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:960a2da9f28c8d5dba753bb9ab77e26b3c6ff9b9658918be95650ceb8ee91e68"}, - {file = "lief-0.12.1.zip", hash = "sha256:4ff4ccfae2e1ee4ccba2b5556027dbb56282b8a973c5835c5b597e8b7b664416"}, -] log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2420,27 +2045,14 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2450,12 +2062,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2525,109 +2131,22 @@ multidict = [ {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, ] -opentimelineio = [ - {file = "OpenTimelineIO-0.14.0.dev1-cp310-cp310-linux_x86_64.whl", hash = "sha256:112c27ad419a79c88cd4ebac96278ab8f446fda4c6e1a70c871f2b24b3d003ef"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e2416ca76805a07ecbcdda65e5a31ce447e04e2db7082d72582740cbd8a16d7"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8806dc240496b72e16a0fddacb0b2e825d19656d80689098e6c5bd6a805bc84"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp36-cp36m-win_amd64.whl", hash = "sha256:0aa54488ca50b53ac247610cef23fb63619dd1993016c0cd4069e54526d5905c"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-linux_x86_64.whl", hash = "sha256:8b11287eb733ad1c7fc53d4af3e3f926c396add6c3a3c1417b9c2b001f7ef4ba"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:2cba2ce567fc06f042365393dbe8e99dc4c1361999fb7ddc03d4b8b0d5ddb894"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:3d96da307c1969c309974a6734c7f3e39925236c845c8289f25d4d7d00be3f0c"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:968cb0c5e6e7d697037b1cd4f7707521995a32fc51664139ed15004b93ab8106"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dae64aa418193ca094854a55efa8bcc5a5c2855b3679509987e8b48610d31"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp37-cp37m-win_amd64.whl", hash = "sha256:85dfae42f5a992ef85d0015f33f999751d846a484ef907e8834407d545a7ee6a"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aaac346cb758de719f88ac175d3948409ded39c5eed0844068402f70f3e90b6"}, - {file = "OpenTimelineIO-0.14.0.dev1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a09f6dd199218cf69faf42a8b21f5be2cd01764e57dbcc8456b3ced564eb110f"}, - {file = "OpenTimelineIO-0.14.0.dev1.tar.gz", hash = "sha256:d29eafd5188c3ad6c7b6d2095bf69984d590c331b701d3bbc644d7abd5f08606"}, -] +opentimelineio = [] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -paramiko = [ - {file = "paramiko-2.11.0-py2.py3-none-any.whl", hash = "sha256:655f25dc8baf763277b933dfcea101d636581df8d6b9774d1fb653426b72c270"}, - {file = "paramiko-2.11.0.tar.gz", hash = "sha256:003e6bee7c034c21fbb051bf83dc0a9ee4106204dd3c53054c71452cc4ec3938"}, -] +paramiko = [] parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, ] -patchelf = [ - {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:08e5e30a9415a8628de47726fbf15bfcd89be35df51c8a0a12372aebd0c5b4f6"}, - {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:4ce9d08119816bc4316c8ecc5f33da42384934fc0fc9cfbdded53a4930705466"}, - {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_17_s390x.manylinux2014_s390x.musllinux_1_1_s390x.whl", hash = "sha256:ae19b0f91aabc9af2608a4ca0395533f1df9122e6abc11ef2c8db6e4db0f98c2"}, - {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.musllinux_1_1_i686.whl", hash = "sha256:f3f87aee44d1d1b2209e38c4227b0316bb03538df68d20b3d96205aa87868d95"}, - {file = "patchelf-0.15.0.0-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:52e48c08110f2988a9761a5a383f7ae35b1e8e06a140e320d18386d3510697ed"}, - {file = "patchelf-0.15.0.0.tar.gz", hash = "sha256:0f8dcf0df0ba919ce37e8aef67a08bde5326897098451df94ab3a5eedc9e08d9"}, -] pathlib2 = [ {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"}, {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] -pillow = [ - {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, - {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"}, - {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, - {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, - {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"}, - {file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"}, - {file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"}, - {file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"}, - {file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"}, - {file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"}, - {file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"}, - {file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"}, - {file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"}, - {file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"}, - {file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"}, - {file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"}, - {file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"}, - {file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"}, - {file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"}, - {file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"}, - {file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, - {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, -] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] +pillow = [] +platformdirs = [] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, @@ -2640,22 +2159,7 @@ prefixed = [ {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, ] -protobuf = [ - {file = "protobuf-4.21.5-cp310-abi3-win32.whl", hash = "sha256:5310cbe761e87f0c1decce019d23f2101521d4dfff46034f8a12a53546036ec7"}, - {file = "protobuf-4.21.5-cp310-abi3-win_amd64.whl", hash = "sha256:e5c5a2886ae48d22a9d32fbb9b6636a089af3cd26b706750258ce1ca96cc0116"}, - {file = "protobuf-4.21.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ee04f5823ed98bb9a8c3b1dc503c49515e0172650875c3f76e225b223793a1f2"}, - {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:b04484d6f42f48c57dd2737a72692f4c6987529cdd148fb5b8e5f616862a2e37"}, - {file = "protobuf-4.21.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e0b272217aad8971763960238c1a1e6a65d50ef7824e23300da97569a251c55"}, - {file = "protobuf-4.21.5-cp37-cp37m-win32.whl", hash = "sha256:5eb0724615e90075f1d763983e708e1cef08e66b1891d8b8b6c33bc3b2f1a02b"}, - {file = "protobuf-4.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:011c0f267e85f5d73750b6c25f0155d5db1e9443cd3590ab669a6221dd8fcdb0"}, - {file = "protobuf-4.21.5-cp38-cp38-win32.whl", hash = "sha256:7b6f22463e2d1053d03058b7b4ceca6e4ed4c14f8c286c32824df751137bf8e7"}, - {file = "protobuf-4.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:b52e7a522911a40445a5f588bd5b5e584291bfc5545e09b7060685e4b2ff814f"}, - {file = "protobuf-4.21.5-cp39-cp39-win32.whl", hash = "sha256:a7faa62b183d6a928e3daffd06af843b4287d16ef6e40f331575ecd236a7974d"}, - {file = "protobuf-4.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:5e0ce02418ef03d7657a420ae8fd6fec4995ac713a3cb09164e95f694dbcf085"}, - {file = "protobuf-4.21.5-py2.py3-none-any.whl", hash = "sha256:bf711b451212dc5b0fa45ae7dada07d8e71a4b0ff0bc8e4783ee145f47ac4f82"}, - {file = "protobuf-4.21.5-py3-none-any.whl", hash = "sha256:3ec6f5b37935406bb9df9b277e79f8ed81d697146e07ef2ba8a5a272fb24b2c9"}, - {file = "protobuf-4.21.5.tar.gz", hash = "sha256:eb1106e87e095628e96884a877a51cdb90087106ee693925ec0a300468a9be3a"}, -] +protobuf = [] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -2714,14 +2218,8 @@ pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, -] -pylint = [ - {file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"}, - {file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"}, -] +pygments = [] +pylint = [] pymongo = [ {file = "pymongo-3.12.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:c164eda0be9048f83c24b9b2656900041e069ddf72de81c17d874d0c32f6079f"}, {file = "pymongo-3.12.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:a055d29f1302892a9389a382bed10a3f77708bcf3e49bfb76f7712fa5f391cc6"}, @@ -2848,42 +2346,10 @@ pynput = [ {file = "pynput-1.7.6-py3.9.egg", hash = "sha256:264429fbe676e98e9050ad26a7017453bdd08768adb25cafb918347cf9f1eb4a"}, {file = "pynput-1.7.6.tar.gz", hash = "sha256:3a5726546da54116b687785d38b1db56997ce1d28e53e8d22fc656d8b92e533c"}, ] -pyobjc-core = [ - {file = "pyobjc-core-8.5.tar.gz", hash = "sha256:704c275439856c0d1287469f0d589a7d808d48b754a93d9ce5415d4eaf06d576"}, - {file = "pyobjc_core-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0c234143b48334443f5adcf26e668945a6d47bc1fa6223e80918c6c735a029d9"}, - {file = "pyobjc_core-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1486ee533f0d76f666804ce89723ada4db56bfde55e56151ba512d3f849857f8"}, - {file = "pyobjc_core-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:412de06dfa728301c04b3e46fd7453320a8ae8b862e85236e547cd797a73b490"}, - {file = "pyobjc_core-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b3e09cccb1be574a82cc9f929ae27fc4283eccc75496cb5d51534caa6bb83a3"}, - {file = "pyobjc_core-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:eeafe21f879666ab7f57efcc6b007c9f5f8733d367b7e380c925203ed83f000d"}, - {file = "pyobjc_core-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c0071686976d7ea8c14690950e504a13cb22b4ebb2bc7b5ec47c1c1c0f6eff41"}, -] -pyobjc-framework-applicationservices = [ - {file = "pyobjc-framework-ApplicationServices-8.5.tar.gz", hash = "sha256:fa3015ef8e3add90af3447d7fdcc7f8dd083cc2a1d58f99a569480a2df10d2b1"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:436b16ebe448a829a8312e10208eec81a2adcae1fff674dbcc3262e1bd76e0ca"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:408958d14aa7fcf46f2163754c211078bc63be1368934d86188202914dce077d"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1d6cd4ce192859a22e208da4d7177a1c3ceb1ef2f64c339fd881102b1210cadd"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0251d092adb1d2d116fd9f147ceef0e53b158a46c21245131c40b9d7b786d0db"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:9742e69fe6d4545d0e02b0ad0a7a2432bc9944569ee07d6e90ffa5ef614df9f7"}, - {file = "pyobjc_framework_ApplicationServices-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16f5677c14ea903c6aaca1dd121521825c39e816cae696d6ae32c0b287252ab2"}, -] -pyobjc-framework-cocoa = [ - {file = "pyobjc-framework-Cocoa-8.5.tar.gz", hash = "sha256:569bd3a020f64b536fb2d1c085b37553e50558c9f907e08b73ffc16ae68e1861"}, - {file = "pyobjc_framework_Cocoa-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7a7c160416696bf6035dfcdf0e603aaa52858d6afcddfcc5ab41733619ac2529"}, - {file = "pyobjc_framework_Cocoa-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6ceba444282030be8596b812260e8d28b671254a51052ad778d32da6e17db847"}, - {file = "pyobjc_framework_Cocoa-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f46b2b161b8dd40c7b9e00bc69636c3e6480b2704a69aee22ee0154befbe163a"}, - {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b31d425aee8698cbf62b187338f5ca59427fa4dca2153a73866f7cb410713119"}, - {file = "pyobjc_framework_Cocoa-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:898359ac1f76eedec8aa156847682378a8950824421c40edb89391286e607dc4"}, - {file = "pyobjc_framework_Cocoa-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:baa2947f76b119a3360973d74d57d6dada87ac527bab9a88f31596af392f123c"}, -] -pyobjc-framework-quartz = [ - {file = "pyobjc-framework-Quartz-8.5.tar.gz", hash = "sha256:d2bc5467a792ddc04814f12a1e9c2fcaf699a1c3ad3d4264cfdce6b9c7b10624"}, - {file = "pyobjc_framework_Quartz-8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9f0fb663f7872c9de94169031ac42b91ad01bd4cad49a9f1a0164be8f028426"}, - {file = "pyobjc_framework_Quartz-8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:567eec91287cfe9a1b6433717192c585935de8f3daa28d82ce72fdd6c7ac00f6"}, - {file = "pyobjc_framework_Quartz-8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f910ab41a712ffc7a8c3e3716a2d6f39ea4419004b26a2fd2d2f740ff5c262c"}, - {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29d07066781628278bf0e5278abcfc96ef6724c66c5629a0b4c214d319a82e55"}, - {file = "pyobjc_framework_Quartz-8.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:72abcde1a3d72be11f2c881c9b9872044c8f2de86d2047b67fe771713638b107"}, - {file = "pyobjc_framework_Quartz-8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8809b9a2df2f461697bdb45b6d1b5a4f881f88f09450e3990858e64e3e26c530"}, -] +pyobjc-core = [] +pyobjc-framework-applicationservices = [] +pyobjc-framework-cocoa = [] +pyobjc-framework-quartz = [] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, @@ -2907,14 +2373,8 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-engineio = [ - {file = "python-engineio-3.14.2.tar.gz", hash = "sha256:eab4553f2804c1ce97054c8b22cf0d5a9ab23128075248b97e1a5b2f29553085"}, - {file = "python_engineio-3.14.2-py2.py3-none-any.whl", hash = "sha256:5a9e6086d192463b04a1428ff1f85b6ba631bbb19d453b144ffc04f530542b84"}, -] -python-socketio = [ - {file = "python-socketio-4.6.1.tar.gz", hash = "sha256:cd1f5aa492c1eb2be77838e837a495f117e17f686029ebc03d62c09e33f4fa10"}, - {file = "python_socketio-4.6.1-py2.py3-none-any.whl", hash = "sha256:5a21da53fdbdc6bb6c8071f40e13d100e0b279ad997681c2492478e06f370523"}, -] +python-engineio = [] +python-socketio = [] python-xlib = [ {file = "python-xlib-0.31.tar.gz", hash = "sha256:74d83a081f532bc07f6d7afcd6416ec38403d68f68b9b9dc9e1f28fbf2d799e9"}, {file = "python_xlib-0.31-py2.py3-none-any.whl", hash = "sha256:1ec6ce0de73d9e6592ead666779a5732b384e5b8fb1f1886bd0a81cafa477759"}, @@ -2922,10 +2382,7 @@ python-xlib = [ python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, ] -pytz = [ - {file = "pytz-2022.2-py2.py3-none-any.whl", hash = "sha256:d9b245e63af49c4e51afdec5402f56b99c0cb483a84a12bb8b7db980386baade"}, - {file = "pytz-2022.2.tar.gz", hash = "sha256:bc824559e43e8ab983426a49525079d186b25372ff63aa3430ccd527d95edc3a"}, -] +pytz = [] pywin32 = [ {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"}, {file = "pywin32-301-cp35-cp35m-win_amd64.whl", hash = "sha256:9635df6998a70282bd36e7ac2a5cef9ead1627b0a63b17c731312c7a0daebb72"}, @@ -2942,10 +2399,7 @@ pywin32-ctypes = [ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, ] -"qt.py" = [ - {file = "Qt.py-1.3.7-py2.py3-none-any.whl", hash = "sha256:150099d1c6f64c9621a2c9d79d45102ec781c30ee30ee69fc082c6e9be7324fe"}, - {file = "Qt.py-1.3.7.tar.gz", hash = "sha256:803c7bdf4d6230f9a466be19d55934a173eabb61406d21cb91e80c2a3f773b1f"}, -] +"qt.py" = [] qtawesome = [ {file = "QtAwesome-0.7.3-py2.py3-none-any.whl", hash = "sha256:ddf4530b4af71cec13b24b88a4cdb56ec85b1e44c43c42d0698804c7137b09b0"}, {file = "QtAwesome-0.7.3.tar.gz", hash = "sha256:b98b9038d19190e83ab26d91c4d8fc3a36591ee2bc7f5016d4438b8240d097bd"}, @@ -2958,35 +2412,19 @@ recommonmark = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, ] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -rsa = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] -secretstorage = [ - {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"}, - {file = "SecretStorage-3.3.2.tar.gz", hash = "sha256:0a8eb9645b320881c222e827c26f4cfcf55363e8b374a021981ef886657a912f"}, -] +requests = [] +rsa = [] +secretstorage = [] semver = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, ] -setuptools = [ - {file = "setuptools-60.10.0-py3-none-any.whl", hash = "sha256:782ef48d58982ddb49920c11a0c5c9c0b02e7d7d1c2ad0aa44e1a1e133051c96"}, - {file = "setuptools-60.10.0.tar.gz", hash = "sha256:6599055eeb23bfef457d5605d33a4d68804266e6cb430b0fb12417c5efeae36c"}, -] shotgun-api3 = [] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -slack-sdk = [ - {file = "slack_sdk-3.18.1-py2.py3-none-any.whl", hash = "sha256:63ce5e6253a31873d6c921c9feaa842a93a2f56e6e009cb7daf406f4bc4df798"}, - {file = "slack_sdk-3.18.1.tar.gz", hash = "sha256:a25d3d2bf0bf605d54d764d4a463fe7c0659ee24c13d75653e2bec247bd5998b"}, -] +slack-sdk = [] smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, @@ -2995,18 +2433,9 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -speedcopy = [ - {file = "speedcopy-2.1.4-py3-none-any.whl", hash = "sha256:e09eb1de67ae0e0b51d5b99a28882009d565a37a3cb3c6bae121e3a5d3cccb17"}, - {file = "speedcopy-2.1.4.tar.gz", hash = "sha256:eff007a97e49ec1934df4fa8074f4bd1cf4a3b14c5499d914988785cff0c199a"}, -] -sphinx = [ - {file = "Sphinx-5.0.1-py3-none-any.whl", hash = "sha256:36aa2a3c2f6d5230be94585bc5d74badd5f9ed8f3388b8eedc1726fe45b1ad30"}, - {file = "Sphinx-5.0.1.tar.gz", hash = "sha256:f4da1187785a5bc7312cc271b0e867a93946c319d106363e102936a3d9857306"}, -] -sphinx-qt-documentation = [ - {file = "sphinx_qt_documentation-0.4-py3-none-any.whl", hash = "sha256:fa131093f75cd1bd48699cd132e18e4d46ba9eaadc070e6026867cea75ecdb7b"}, - {file = "sphinx_qt_documentation-0.4.tar.gz", hash = "sha256:f43ba17baa93e353fb94045027fb67f9d935ed158ce8662de93f08b88eec6774"}, -] +speedcopy = [] +sphinx = [] +sphinx-qt-documentation = [] sphinx-rtd-theme = [ {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, @@ -3055,44 +2484,13 @@ tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -typed-ast = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, -] -typing-extensions = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, -] +typed-ast = [] +typing-extensions = [] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] -urllib3 = [ - {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"}, - {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, -] +urllib3 = [] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, @@ -3101,142 +2499,10 @@ websocket-client = [ {file = "websocket-client-0.59.0.tar.gz", hash = "sha256:d376bd60eace9d437ab6d7ee16f4ab4e821c9dae591e1b783c58ebd8aaf80c5c"}, {file = "websocket_client-0.59.0-py2.py3-none-any.whl", hash = "sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32"}, ] -wheel = [ - {file = "wheel-0.37.1-py2.py3-none-any.whl", hash = "sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a"}, - {file = "wheel-0.37.1.tar.gz", hash = "sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"}, -] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] +wrapt = [] wsrpc-aiohttp = [ {file = "wsrpc-aiohttp-3.2.0.tar.gz", hash = "sha256:f467abc51bcdc760fc5aeb7041abdeef46eeca3928dc43dd6e7fa7a533563818"}, {file = "wsrpc_aiohttp-3.2.0-py3-none-any.whl", hash = "sha256:fa9b0bf5cb056898cb5c9f64cbc5eacb8a5dd18ab1b7f0cd4a2208b4a7fde282"}, ] -yarl = [ - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"}, - {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"}, - {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"}, - {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"}, - {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"}, - {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"}, - {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"}, - {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"}, - {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"}, - {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"}, - {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"}, -] -zipp = [ - {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, - {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, -] +yarl = [] +zipp = [] diff --git a/pyproject.toml b/pyproject.toml index 2a0606a10c..0deb4f465b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,13 +70,12 @@ requests = "^2.25.1" pysftp = "^0.2.9" dropbox = "^11.20.0" aiohttp-middlewares = "^2.0.0" -cx-Freeze = "6.11.1" [tool.poetry.dev-dependencies] flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "^6.11.1" +cx_freeze = "~6.9" GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" From 5079d05df103adf2690b161d1e726ebdb34038d9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Sep 2022 15:50:30 +0200 Subject: [PATCH 0916/2550] Hardcoded downgraded version of Poetry --- tools/create_env.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index 3f956e5c6a..cdb97d4942 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -68,6 +68,7 @@ function Install-Poetry() { } $env:POETRY_HOME="$openpype_root\.poetry" + $env:POETRY_VERSION="1.1.15" (Invoke-WebRequest -Uri https://install.python-poetry.org/ -UseBasicParsing).Content | & $($python) - } From 73309ffc324544726ecdbe5e43749d304637a91d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Sep 2022 15:51:44 +0200 Subject: [PATCH 0917/2550] Revert "Fix - update cx-freeze" This reverts commit 4fc90655d7e5b6869650638e4b7ff064ebc2271e. --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 726b248f8c..21b6bda880 100644 --- a/poetry.lock +++ b/poetry.lock @@ -339,7 +339,7 @@ test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", [[package]] name = "cx-freeze" -version = "6.11.1" +version = "6.9" description = "Create standalone executables from Python scripts" category = "dev" optional = false From e189b21e543bf0480d0dba31dd18c2b2107104c6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:55:05 +0200 Subject: [PATCH 0918/2550] :bug: set AttributeValues as new style class --- openpype/pipeline/create/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 1b2521e4f7..2962f43443 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -85,7 +85,7 @@ class InstanceMember: }) -class AttributeValues: +class AttributeValues(object): """Container which keep values of Attribute definitions. Goal is to have one object which hold values of attribute definitions for From 13dd125e2677bda06f5afe21971a4e9893b01b5a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:55:37 +0200 Subject: [PATCH 0919/2550] :rotating_light: remove debug prints --- openpype/hosts/houdini/api/pipeline.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 6daf942cf0..92761b7b4e 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" import os import sys import logging @@ -72,9 +74,11 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): sys.path.append(hou_pythonpath) - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile + # Set asset settings for the empty scene directly after launch of + # Houdini so it initializes into the correct scene FPS, + # Frame Range, etc. + # TODO: make sure this doesn't trigger when + # opening with last workfile. _set_context_settings() def has_unsaved_changes(self): @@ -133,9 +137,6 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): lib.imprint(root_node, data) def get_context_data(self): - from pprint import pformat - - self.log.debug(f"----" + pformat(lib.read(hou.node("/")))) return lib.read(hou.node("/")) From d2240544ad2c00558114ef4122618ca2e73ad0e6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Sep 2022 15:55:58 +0200 Subject: [PATCH 0920/2550] Hardcoded downgraded version of Poetry Latest 1.2 version breaks build because cx-freeze. Latest cx-freeze works on Win, fails on Linux. --- tools/create_env.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/create_env.sh b/tools/create_env.sh index fba3942b87..1ecd960fe1 100755 --- a/tools/create_env.sh +++ b/tools/create_env.sh @@ -109,6 +109,7 @@ detect_python () { install_poetry () { echo -e "${BIGreen}>>>${RST} Installing Poetry ..." export POETRY_HOME="$openpype_root/.poetry" + export POETRY_VERSION="1.1.15" command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; } curl -sSL https://install.python-poetry.org/ | python - } From f09cd22e7ce6b8546f8a74f7b847edc2bf63eef5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:56:06 +0200 Subject: [PATCH 0921/2550] :recycle: remove unused import --- openpype/hosts/houdini/api/plugin.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 7120a49e41..ff747085da 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Houdini specific Avalon/Pyblish plugin definitions.""" import sys -import six from abc import ( ABCMeta ) From c0263462663f2d099a1db47850152fe7b6ee1791 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:56:40 +0200 Subject: [PATCH 0922/2550] :bug: set output name to subset name --- openpype/hosts/houdini/plugins/create/create_pointcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 686dbaa7ab..3365e25091 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -29,7 +29,7 @@ class CreatePointCache(plugin.HoudiniCreator): "prim_to_detail_pattern": "cbId", "format": 2, "facesets": 0, - "filename": "$HIP/pyblish/{}.abc".format(self.identifier) + "filename": "$HIP/pyblish/{}.abc".format(subset_name) } if instance_node: From 27d131f0eea1dfb74b750a0a6a1cc622d152b2ca Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:57:16 +0200 Subject: [PATCH 0923/2550] :recycle: optimize imprint function --- openpype/hosts/houdini/api/lib.py | 85 +++++++++++++++---------------- 1 file changed, 41 insertions(+), 44 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 5d99d7f363..f438944b09 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -17,7 +17,7 @@ import hou self = sys.modules[__name__] self._parent = None log = logging.getLogger(__name__) - +JSON_PREFIX = "JSON:::" def get_asset_fps(): """Return current asset fps.""" @@ -290,6 +290,11 @@ def imprint(node, data, update=False): http://www.sidefx.com/docs/houdini/hom/hou/ParmTemplate.html + Because of some update glitch where you cannot overwrite existing + ParmTemplates on node using: + `setParmTemplates()` and `parmTuplesInFolder()` + update is done in another pass. + Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value @@ -304,38 +309,48 @@ def imprint(node, data, update=False): if not data: return - current_parameters = node.spareParms() - current_keys = [p.name() for p in current_parameters] - update_keys = [] - - parm_group = node.parmTemplateGroup() - parm_folder = hou.FolderParmTemplate("folder", "Extra") + current_parms = {p.name(): p for p in node.spareParms()} + update_parms = [] templates = [] + for key, value in data.items(): if value is None: continue - if key in current_keys: + parm = get_template_from_value(key, value) + + if key in current_parms.keys(): if not update: - print(f"{key} already exists on {node}") + log.debug("{} already exists on {}".format(key, node)) else: - print(f"replacing {key}") - update_keys.append((key, value)) + log.debug("replacing {}".format(key)) + update_parms.append(parm) continue - parm = parm_to_template(key, value) # parm.hide(True) templates.append(parm) - parm_folder.setParmTemplates(templates) - parm_group.append(parm_folder) + + parm_group = node.parmTemplateGroup() + parm_folder = parm_group.findFolder("Extra") + + # if folder doesn't exist yet, create one and append to it, + # else append to existing one + if not parm_folder: + parm_folder = hou.FolderParmTemplate("folder", "Extra") + parm_folder.setParmTemplates(templates) + parm_group.append(parm_folder) + else: + for template in templates: + parm_group.appendToFolder(parm_folder, template) + node.setParmTemplateGroup(parm_group) - if update_keys: - parms = node.parmTuplesInFolder(("Extra",)) - for parm in parms: - for key, value in update_keys: - if parm.name() == key: - node.replaceSpareParmTuple( - parm.name(), parm_to_template(key, value)) + # TODO: Updating is done here, by calling probably deprecated functions. + # This needs to be addressed in the future. + if not update_parms: + return + + for parm in update_parms: + node.replaceSpareParmTuple(parm.name(), parm) def lsattr(attr, value=None, root="/"): @@ -406,9 +421,9 @@ def read(node): value = parameter.eval() # test if value is json encoded dict if isinstance(value, six.string_types) and \ - len(value) > 0 and value.startswith("JSON:::"): + value.startswith(JSON_PREFIX): try: - value = json.loads(value.lstrip("JSON:::")) + value = json.loads(value[len(JSON_PREFIX):]) except json.JSONDecodeError: # not a json pass @@ -478,24 +493,6 @@ def reset_framerange(): hou.setFrame(frame_start) -def load_creator_code_to_asset( - otl_file_path, node_type_name, source_file_path): - # type: (str, str, str) -> None - # Load the Python source code. - with open(source_file_path, "rb") as src: - source = src.read() - - # Find the asset definition in the otl file. - definitions = [definition - for definition in hou.hda.definitionsInFile(otl_file_path) - if definition.nodeTypeName() == node_type_name] - assert(len(definitions) == 1) - definition = definitions[0] - - # Store the source code into the PythonCook section of the asset. - definition.addSection("PythonCook", source) - - def get_main_window(): """Acquire Houdini's main window""" if self._parent is None: @@ -503,7 +500,7 @@ def get_main_window(): return self._parent -def parm_to_template(key, value): +def get_template_from_value(key, value): if isinstance(value, float): parm = hou.FloatParmTemplate(name=key, label=key, @@ -528,8 +525,8 @@ def parm_to_template(key, value): label=key, num_components=1, default_value=( - "JSON:::" + json.dumps(value),)) + JSON_PREFIX + json.dumps(value),)) else: raise TypeError("Unsupported type: %r" % type(value)) - return parm \ No newline at end of file + return parm From f0dc54f78e230c1d39c0c33b9360b97876d6ce22 Mon Sep 17 00:00:00 2001 From: Hayley GUILLOT Date: Fri, 2 Sep 2022 16:32:45 +0200 Subject: [PATCH 0924/2550] Update credentials.py Implementation got moved from emit_on_kitsu_login to the login function --- openpype/modules/kitsu/utils/credentials.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index d853d5b437..013a9966c5 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -33,7 +33,10 @@ def validate_credentials( except gazu.exception.AuthFailedException: return False - emit_on_kitsu_login(login) + emit_event("kitsu.user.logged", + data={"username": login}, + source="kitsu") + return True @@ -103,15 +106,4 @@ def set_credentials_envs(login: str, password: str): password (str): Kitsu user password """ os.environ["KITSU_LOGIN"] = login - os.environ["KITSU_PWD"] = password - - -def emit_on_kitsu_login(login: str): - """Notifies listeners that Kitsu module succesfully connected, - and passes them data - - Args: - login (str): Kitsu username - """ - event_data = {"username": login} - emit_event("kitsu.user.logged", data=event_data, source="kitsu") + os.environ["KITSU_PWD"] = password \ No newline at end of file From 58fd5a1b097bd7d7e71f004b3fff8529296b1102 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= Date: Fri, 2 Sep 2022 16:32:52 +0200 Subject: [PATCH 0925/2550] Make to Optional Arguments --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index f013251bb1..199c59053b 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -318,7 +318,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: ) -def sync_all_projects(login: str, password: str, ignore_projects: list = []): +def sync_all_projects(login: str, password: str, ignore_projects=[]): """Update all OP projects in DB with Zou data. Args: From a72b84a58695b11b6128a5985014c7a0ebea15dd Mon Sep 17 00:00:00 2001 From: Hayley GUILLOT Date: Fri, 2 Sep 2022 16:33:34 +0200 Subject: [PATCH 0926/2550] Forgot to lint --- openpype/modules/kitsu/utils/credentials.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index 013a9966c5..adcfb07cd5 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -33,10 +33,8 @@ def validate_credentials( except gazu.exception.AuthFailedException: return False - emit_event("kitsu.user.logged", - data={"username": login}, - source="kitsu") - + emit_event("kitsu.user.logged", data={"username": login}, source="kitsu") + return True @@ -106,4 +104,4 @@ def set_credentials_envs(login: str, password: str): password (str): Kitsu user password """ os.environ["KITSU_LOGIN"] = login - os.environ["KITSU_PWD"] = password \ No newline at end of file + os.environ["KITSU_PWD"] = password From 88e4798b535c47cefce3dc2a1ed9aacf60dd0f68 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 19:50:56 +0200 Subject: [PATCH 0927/2550] Remove old type hint --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index bb48fe6902..68d55fef5d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -641,7 +641,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return result def _patch_workfile(self): - # type: (str, dict) -> [str, None] """Patch Maya scene. This will take list of patches (lines to add) and apply them to From 153f2c2e95526f0ace683249486316cfd5bc213e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 3 Sep 2022 04:15:14 +0000 Subject: [PATCH 0928/2550] [Automated] Bump version --- CHANGELOG.md | 46 ++++++++++++++++++++++----------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 25 insertions(+), 25 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c8834dd49..b35e89e96e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,23 +1,42 @@ # Changelog -## [3.14.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) **🆕 New features** +- Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) - Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) +**🚀 Enhancements** + +- SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) +- Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) +- Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) + **🐛 Bug fixes** +- Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) - Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) +- Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) +- Maya: `containerise` dont skip empty values [\#3674](https://github.com/pypeclub/OpenPype/pull/3674) **🔀 Refactored code** +- AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) +- General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) +- General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) +- General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) + +**Merged pull requests:** + +- Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) ## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) @@ -63,6 +82,7 @@ - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) @@ -93,6 +113,7 @@ **🚀 Enhancements** - Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) +- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) - Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) - Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) - General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) @@ -103,43 +124,22 @@ - General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) - General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) - Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) -- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) -- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) -- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) **🔀 Refactored code** - General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) - Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653) - Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) -- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) -- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) **Merged pull requests:** - Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) - Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645) -- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) -- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) -**🚀 Enhancements** - -- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) - -**🐛 Bug fixes** - -- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) -- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) -- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) - -**🔀 Refactored code** - -- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) - ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) diff --git a/openpype/version.py b/openpype/version.py index 0c114b6060..26b03c37e5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.1" +__version__ = "3.14.2-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 0deb4f465b..f74f40c561 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.14.2+nightly.1" # OpenPype +version = "3.14.2-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 5645bcb353b13b1711ba67e0a3b394b273e7cef3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 3 Sep 2022 13:17:10 +0200 Subject: [PATCH 0929/2550] Use custom plugin info per type of plugin submission --- .../plugins/publish/submit_maya_deadline.py | 127 +++++++++--------- 1 file changed, 66 insertions(+), 61 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 68d55fef5d..2a41d92efd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -49,6 +49,30 @@ class MayaPluginInfo: RenderSetupIncludeLights = attr.ib(default=None) # Include all lights flag +@attr.s +class PythonPluginInfo: + ScriptFile = attr.ib() + Version = attr.ib(default="3.6") + Arguments = attr.ib(default=None) + SingleFrameOnly = attr.ib(default=None) + + +@attr.s +class VRayPluginInfo: + InputFilename = attr.ib(default=None) # Input + SeparateFilesPerFrame = attr.ib(default=None) + VRayEngine = attr.ib(default="V-Ray") + Width = attr.ib(default=None) + Height = attr.ib(default=None) # Mandatory for Deadline + OutputFilePath = attr.ib(default=True) + OutputFileName = attr.ib(default=None) # Render only this layer + + +@attr.s +class ArnoldPluginInfo: + ArnoldFile = attr.ib(default=None) + + class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): label = "Submit Render to Deadline" @@ -479,26 +503,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): def _get_vray_export_payload(self, data): job_info = copy.deepcopy(self.job_info) - job_info.Name = self._job_info_label("Export") # Get V-Ray settings info to compute output path - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - scene, _ = os.path.splitext(data["filename"]) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - output = os.path.dirname(first_file) + vray_scene = self.format_vray_output_filename() plugin_info = { "Renderer": "vray", "SkipExistingFrames": True, "UseLegacyRenderLayers": True, - "OutputFilePath": output + "OutputFilePath": os.path.dirname(vray_scene) } - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_arnold_export_payload(self, data): @@ -515,8 +532,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): script = os.path.normpath(module_path) job_info = copy.deepcopy(self.job_info) - plugin_info = copy.deepcopy(self.plugin_info) - job_info.Name = self._job_info_label("Export") # Force a single frame Python job @@ -540,14 +555,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): continue job_info.EnvironmentKeyValue[key] = value - plugin_info.update({ - "Version": "3.6", - "ScriptFile": script, - "Arguments": "", - "SingleFrameOnly": "True", - }) + plugin_info = PythonPluginInfo( + ScriptFile=script, + Version="3.6", + Arguments="", + SingleFrameOnly="True" + ) - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_vray_render_payload(self, data): @@ -558,27 +573,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.OverrideTaskExtraInfoNames = False # Plugin Info - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - # "vrayscene//_/" + plugin_info = VRayPluginInfo( + InputFilename=self.format_vray_output_filename(), + SeparateFilesPerFrame=False, + VRayEngine="V-Ray", + Width=self._instance.data["resolutionWidth"], + Height=self._instance.data["resolutionHeight"], + OutputFilePath=job_info.OutputDirectory[0], + OutputFileName=job_info.OutputFilename[0] + ) - scene, _ = os.path.splitext(self.scene_path) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - - plugin_info = { - "InputFilename": first_file, - "SeparateFilesPerFrame": True, - "VRayEngine": "V-Ray", - - "Width": self._instance.data["resolutionWidth"], - "Height": self._instance.data["resolutionHeight"], - "OutputFilePath": job_info.OutputDirectory[0], - "OutputFileName": job_info.OutputFilename[0] - } - - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_arnold_render_payload(self, data): @@ -590,55 +595,55 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Plugin Info ass_file, _ = os.path.splitext(data["output_filename_0"]) - first_file = ass_file + ".ass" - plugin_info = { - "ArnoldFile": first_file, - } + ass_filepath = ass_file + ".ass" - return job_info, plugin_info + plugin_info = ArnoldPluginInfo( + ArnoldFile=ass_filepath + ) - def format_vray_output_filename(self, filename, template, dir=False): + return job_info, attr.asdict(plugin_info) + + def format_vray_output_filename(self): """Format the expected output file of the Export job. Example: /_/ - "shot010_v006/shot010_v006_CHARS/CHARS" - - Args: - instance: - filename(str): - dir(bool): - + "shot010_v006/shot010_v006_CHARS/CHARS_0001.vrscene" Returns: str """ + + # "vrayscene//_/" + vray_settings = cmds.ls(type="VRaySettingsNode") + node = vray_settings[0] + template = cmds.getAttr("{}.vrscene_filename".format(node)) + scene, _ = os.path.splitext(self.scene_path) + def smart_replace(string, key_values): new_string = string for key, value in key_values.items(): new_string = new_string.replace(key, value) return new_string - # Ensure filename has no extension - file_name, _ = os.path.splitext(filename) + # Get workfile scene path without extension to format vrscene_filename + scene_filename = os.path.basename(self.scene_path) + scene_filename_no_ext, _ = os.path.splitext(scene_filename) layer = self._instance.data['setMembers'] # Reformat without tokens output_path = smart_replace( template, - {"": file_name, + {"": scene_filename_no_ext, "": layer}) - if dir: - return output_path.replace("\\", "/") - start_frame = int(self._instance.data["frameStartHandle"]) + workspace = self._instance.context.data["workspace"] filename_zero = "{}_{:04d}.vrscene".format(output_path, start_frame) + filepath_zero = os.path.join(workspace, filename_zero) - result = filename_zero.replace("\\", "/") - - return result + return filepath_zero.replace("\\", "/") def _patch_workfile(self): """Patch Maya scene. From 507dac4aa9f50e8978a841067262ce33e77cf5e0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 3 Sep 2022 15:06:24 +0200 Subject: [PATCH 0930/2550] Ensure integer math for _format_tiles See #3758 --- .../plugins/publish/submit_maya_deadline.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 2a41d92efd..7c486b7c34 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -754,14 +754,21 @@ def _format_tiles( used for assembler configuration. """ - tile = 0 + # Math used requires integers for correct output - as such + # we ensure our inputs are correct. + assert type(tiles_x) is int, "tiles_x must be an integer" + assert type(tiles_y) is int, "tiles_y must be an integer" + assert type(width) is int, "width must be an integer" + assert type(height) is int, "height must be an integer" + out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y + w_space = width // tiles_x + h_space = height // tiles_y cfg["TilesCropped"] = "False" + tile = 0 for tile_x in range(1, tiles_x + 1): for tile_y in reversed(range(1, tiles_y + 1)): tile_prefix = "_tile_{}x{}_{}x{}_".format( @@ -769,10 +776,10 @@ def _format_tiles( tiles_x, tiles_y ) - top = int(height - (tile_y * h_space)) - bottom = int(height - ((tile_y - 1) * h_space) - 1) - left = int((tile_x - 1) * w_space) - right = int((tile_x * w_space) - 1) + top = height - (tile_y * h_space) + bottom = height - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 # Job Info new_filename = "{}/{}{}".format( From f54f4cf99c824e927e5cbe24c7a02ec0e4a4fc62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= Date: Sun, 4 Sep 2022 22:43:25 +0200 Subject: [PATCH 0931/2550] Kitsu : Modification default value for Ignore_projects --- openpype/modules/kitsu/utils/update_op_with_zou.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 199c59053b..26cd125e15 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -318,7 +318,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: ) -def sync_all_projects(login: str, password: str, ignore_projects=[]): +def sync_all_projects(login: str, password: str, ignore_projects: list = None): """Update all OP projects in DB with Zou data. Args: @@ -340,8 +340,9 @@ def sync_all_projects(login: str, password: str, ignore_projects=[]): dbcon.install() all_projects = gazu.project.all_open_projects() for project in all_projects: - if project["name"] not in ignore_projects: - sync_project_from_kitsu(dbcon, project) + if ignore_projects and project["name"] in ignore_projects: + continue + sync_project_from_kitsu(dbcon, project) def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): From fb48faf386c8d53c737ac9a6a00287730c934217 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= Date: Sun, 4 Sep 2022 22:49:07 +0200 Subject: [PATCH 0932/2550] Kitsu : Ignore_projects - minor fix - indent --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 26cd125e15..55a7bdc51d 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -341,7 +341,7 @@ def sync_all_projects(login: str, password: str, ignore_projects: list = None): all_projects = gazu.project.all_open_projects() for project in all_projects: if ignore_projects and project["name"] in ignore_projects: - continue + continue sync_project_from_kitsu(dbcon, project) From 81d8a53dbad5313fc435ca5669500e868f4c1134 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Sep 2022 12:20:16 +0200 Subject: [PATCH 0933/2550] fix function name --- openpype/client/__init__.py | 4 ++-- openpype/lib/avalon_context.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/client/__init__.py b/openpype/client/__init__.py index cf3ce1ae46..7831afd8ad 100644 --- a/openpype/client/__init__.py +++ b/openpype/client/__init__.py @@ -48,7 +48,7 @@ from .entities import ( from .entity_links import ( get_linked_asset_ids, get_linked_assets, - get_linked_representation_ids, + get_linked_representation_id, ) from .operations import ( @@ -102,7 +102,7 @@ __all__ = ( "get_linked_asset_ids", "get_linked_assets", - "get_linked_representation_ids", + "get_linked_representation_id", "create_project", ) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 470c40d0d7..c890e08d3e 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1061,14 +1061,14 @@ def get_linked_ids_for_representations( Function will be removed after release version 3.16.* """ - from openpype.client import get_linked_representation_ids + from openpype.client import get_linked_representation_id if not isinstance(repre_ids, list): repre_ids = [repre_ids] output = [] for repre_id in repre_ids: - output.extend(get_linked_representation_ids( + output.extend(get_linked_representation_id( project_name, repre_id=repre_id, link_type=link_type, From 88ab0462b5b8861f99ef0eb069d8f1e2ff4ccfd3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Sep 2022 12:21:19 +0200 Subject: [PATCH 0934/2550] fix new import path --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index c890e08d3e..12f4a5198b 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1039,7 +1039,7 @@ def get_last_workfile( ) -@deprecated("openpype.client.get_linked_ids_for_representations") +@deprecated("openpype.client.get_linked_representation_id") def get_linked_ids_for_representations( project_name, repre_ids, dbcon=None, link_type=None, max_depth=0 ): From 469ba24879956e5c1a83d6d042f6503d3e254593 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 5 Sep 2022 12:27:23 +0200 Subject: [PATCH 0935/2550] Fix - updated to not use deprecated function --- openpype/plugins/load/add_site.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 388a871e9d..ac931e41db 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -1,4 +1,4 @@ -from openpype.client import get_linked_ids_for_representations +from openpype.client import get_linked_representation_id from openpype.modules import ModulesManager from openpype.pipeline import load from openpype.modules.sync_server.utils import SiteAlreadyPresentError @@ -45,7 +45,7 @@ class AddSyncSite(load.LoaderPlugin): force=True) if family == "workfile": - links = get_linked_ids_for_representations( + links = get_linked_representation_id( project_name, repre_id=repre_id, link_type="reference" From db1fa6d40ef59f9e3061a637a04874d4857a6585 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 5 Sep 2022 12:49:43 +0200 Subject: [PATCH 0936/2550] add a python2 compatibility for the FileNotFoundError --- openpype/hosts/houdini/api/shelves.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 805ce4c397..248d99105c 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,6 +1,7 @@ import os import logging import platform +import six from openpype.settings import get_project_settings @@ -8,6 +9,9 @@ import hou log = logging.getLogger("openpype.hosts.houdini.shelves") +if six.PY2: + FileNotFoundError = IOError + def generate_shelves(): """This function generates complete shelves from shelf set to tools From d9a150022e1659aec584fe962f9c47e66bfb178d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:28:48 +0800 Subject: [PATCH 0937/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 4 ++-- openpype/hosts/maya/lib.py | 18 ++++++++++++++++++ .../defaults/project_settings/maya.json | 14 ++++++++++++++ .../projects_schema/schema_project_maya.json | 15 +++++++++++++++ 4 files changed, 49 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f565f6a308..5bf8b67fc2 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.hosts.maya.lib import copy_workspace_mel,load_workspace_mel from . import menu, lib from .workio import ( open_file, @@ -550,7 +550,7 @@ def on_task_changed(): def before_workfile_save(event): workdir_path = event["workdir_path"] if workdir_path: - copy_workspace_mel(workdir_path) + load_workspace_mel(workdir_path) class MayaDirmap(HostDirmap): diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 6c142053e6..d24f267bbd 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,8 @@ import os import shutil +import json +from openpype.settings import get_current_project_settings def copy_workspace_mel(workdir): # Check that source mel exists @@ -24,3 +26,19 @@ def copy_workspace_mel(workdir): src_filepath, dst_filepath )) shutil.copy(src_filepath, dst_filepath) + + +def load_workspace_mel(workdir): + dst_filepath = os.path.join(workdir, "workspace.mel") + if os.path.exists(dst_filepath): + return + + if not os.path.exists(workdir): + os.makedirs(workdir) + + with open(dst_filepath, "w") as mel_file: + setting = get_current_project_settings() + mel_script = setting["maya"]["mel-workspace"]["scripts"] + for mel in mel_script: + mel_file.write(mel) + mel_file.write("\n") diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ac0f161cf2..0a46632042 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -15,6 +15,20 @@ "destination-path": [] } }, + "mel-workspace":{ + "scripts":[ + "workspace -fr \"shaders\" \"renderData/shaders\";", + "workspace -fr \"images\" \"renders\";", + "workspace -fr \"particles\" \"particles\";", + "workspace -fr \"mayaAscii\" \"\";", + "workspace -fr \"mayaBinary\" \"\";", + "workspace -fr \"scene\" \"\";", + "workspace -fr \"alembicCache\" \"cache/alembic\";", + "workspace -fr \"renderData\" \"renderData\";", + "workspace -fr \"sourceImages\" \"sourceimages\";", + "workspace -fr \"fileCache\" \"cache/nCache\";" + ] + }, "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index cb380194a7..a774d604ca 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -53,6 +53,21 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "mel-workspace", + "label": "Maya MEL Workspace", + "is_group": true, + "children": [ + { + "type": "list", + "object_type": "text", + "key": "scripts", + "label": "scripts" + } + ] + }, { "type": "schema", "name": "schema_scriptsmenu" From b88def9aea1fc1a682209ea78edcf5ae87a652e3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:40:30 +0800 Subject: [PATCH 0938/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 27 +------------------ .../defaults/project_settings/maya.json | 2 +- .../projects_schema/schema_project_maya.json | 4 +-- 3 files changed, 4 insertions(+), 29 deletions(-) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index d24f267bbd..bf06c9ad7d 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,33 +1,8 @@ import os import shutil -import json from openpype.settings import get_current_project_settings -def copy_workspace_mel(workdir): - # Check that source mel exists - current_dir = os.path.dirname(os.path.abspath(__file__)) - src_filepath = os.path.join(current_dir, "resources", "workspace.mel") - if not os.path.exists(src_filepath): - print("Source mel file does not exist. {}".format(src_filepath)) - return - - # Skip if workspace.mel already exists - dst_filepath = os.path.join(workdir, "workspace.mel") - if os.path.exists(dst_filepath): - return - - # Create workdir if does not exists yet - if not os.path.exists(workdir): - os.makedirs(workdir) - - # Copy file - print("Copying workspace mel \"{}\" -> \"{}\"".format( - src_filepath, dst_filepath - )) - shutil.copy(src_filepath, dst_filepath) - - def load_workspace_mel(workdir): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): @@ -38,7 +13,7 @@ def load_workspace_mel(workdir): with open(dst_filepath, "w") as mel_file: setting = get_current_project_settings() - mel_script = setting["maya"]["mel-workspace"]["scripts"] + mel_script = setting["maya"]["mel-workspace"]["definition"] for mel in mel_script: mel_file.write(mel) mel_file.write("\n") diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 0a46632042..162732280f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -16,7 +16,7 @@ } }, "mel-workspace":{ - "scripts":[ + "definition":[ "workspace -fr \"shaders\" \"renderData/shaders\";", "workspace -fr \"images\" \"renders\";", "workspace -fr \"particles\" \"particles\";", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index a774d604ca..7204ec586a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -63,8 +63,8 @@ { "type": "list", "object_type": "text", - "key": "scripts", - "label": "scripts" + "key": "definition", + "label": "definition" } ] }, From 109abb58987b22f6d390d424a27e209eff6b5638 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:41:09 +0800 Subject: [PATCH 0939/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 5bf8b67fc2..4768a9ee4f 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import copy_workspace_mel,load_workspace_mel +from openpype.hosts.maya.lib import load_workspace_mel from . import menu, lib from .workio import ( open_file, From 69d2cf20f5b4889ce674487d2da8fd2a230a093a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:42:10 +0800 Subject: [PATCH 0940/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index bf06c9ad7d..2853789656 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -3,6 +3,7 @@ import shutil from openpype.settings import get_current_project_settings + def load_workspace_mel(workdir): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): From fe1a1055c27072a73d45172389b603b69d19d296 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 5 Sep 2022 18:03:38 +0200 Subject: [PATCH 0941/2550] :bug: store context on dedicated node instead of root node root node doesn't allow storing of spare parameters --- openpype/hosts/houdini/api/pipeline.py | 32 +++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 92761b7b4e..4ff6873ced 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -30,6 +30,7 @@ from .lib import get_asset_fps log = logging.getLogger("openpype.hosts.houdini") AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS" +CONTEXT_CONTAINER = "/obj/OpenPypeContext" IS_HEADLESS = not hasattr(hou, "ui") PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins") @@ -132,12 +133,37 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): on_file_event_callback ) + @staticmethod + def _create_context_node(): + """Helper for creating context holding node. + + Returns: + hou.Node: context node + + """ + obj_network = hou.node("/obj") + op_ctx = obj_network.createNode( + "null", node_name="OpenPypeContext") + op_ctx.moveToGoodPosition() + op_ctx.setBuiltExplicitly(False) + op_ctx.setCreatorState("OpenPype") + op_ctx.setComment("OpenPype node to hold context metadata") + op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) + op_ctx.hide(True) + return op_ctx + def update_context_data(self, data, changes): - root_node = hou.node("/") - lib.imprint(root_node, data) + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self._create_context_node() + + lib.imprint(op_ctx, data) def get_context_data(self): - return lib.read(hou.node("/")) + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self._create_context_node() + return lib.read(op_ctx) def on_file_event_callback(event): From 5908f49b685a981d08d7ea1ff841d567018a7e76 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Sep 2022 10:56:01 +0200 Subject: [PATCH 0942/2550] updating README file --- README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b8c04f8b49..a2f442b640 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ It can be built and ran on all common platforms. We develop and test on the foll - **Linux** - **Ubuntu** 20.04 LTS - **Centos** 7 -- **Mac OSX** +- **Mac OSX** - **10.15** Catalina - **11.1** Big Sur (using Rosetta2) @@ -287,6 +287,14 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`. **Note that it needs existing virtual environment.** + +Developer tools +------------- + +In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (`dev_clear_pyc(.ps1|.sh)`). + + + ## Contributors ✨ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): From a0e241b02fbfdee18b8ded65af89eedb343793d2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Sep 2022 10:58:22 +0200 Subject: [PATCH 0943/2550] README fix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a2f442b640..a3d3cf1dbb 100644 --- a/README.md +++ b/README.md @@ -291,7 +291,7 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`. Developer tools ------------- -In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (`dev_clear_pyc(.ps1|.sh)`). +In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`). From 70a17a8876541de84d98bd8f502421827fd78751 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Sep 2022 11:39:58 +0200 Subject: [PATCH 0944/2550] hiero: instances detection - timeline no need to retime --- openpype/hosts/hiero/plugins/publish/precollect_instances.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 0c7dbc1f22..84f2927fc7 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -318,10 +318,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin): @staticmethod def create_otio_time_range_from_timeline_item_data(track_item): - speed = track_item.playbackSpeed() timeline = phiero.get_current_sequence() frame_start = int(track_item.timelineIn()) - frame_duration = int((track_item.duration() - 1) / speed) + frame_duration = int(track_item.duration()) fps = timeline.framerate().toFloat() return hiero_export.create_otio_time_range( From 1ed5ef7dcdf9cd2183f009c541e4c52f5374075d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 6 Sep 2022 13:16:01 +0200 Subject: [PATCH 0945/2550] OP-3863 - attempt to speed up ExtractImage If workfile has a large number of layers (hundreds or thousands), ExtractImage wasn't too efficient. It was hiding/showing layers too many times. Current logic is to hide all, show only publishable layers of instance, save to image, hide them again. get_layers replaced by argument if possible. --- openpype/hosts/photoshop/api/lib.py | 11 +- openpype/hosts/photoshop/api/ws_stub.py | 19 ++- .../plugins/publish/extract_image.py | 120 +++++++++++------- 3 files changed, 97 insertions(+), 53 deletions(-) diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 73a546604f..221b4314e6 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -64,10 +64,15 @@ def maintained_selection(): @contextlib.contextmanager -def maintained_visibility(): - """Maintain visibility during context.""" +def maintained_visibility(layers=None): + """Maintain visibility during context. + + Args: + layers (list) of PSItem (used for caching) + """ visibility = {} - layers = stub().get_layers() + if not layers: + layers = stub().get_layers() for layer in layers: visibility[layer.id] = layer.visible try: diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index b49bf1c73f..2c4d0ad5fc 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -229,10 +229,11 @@ class PhotoshopServerStub: return self._get_layers_in_layers(parent_ids) - def get_layers_in_layers_ids(self, layers_ids): + def get_layers_in_layers_ids(self, layers_ids, layers=None): """Return all layers that belong to layers (might be groups). Args: + layers_ids layers : Returns: @@ -240,10 +241,13 @@ class PhotoshopServerStub: """ parent_ids = set(layers_ids) - return self._get_layers_in_layers(parent_ids) + return self._get_layers_in_layers(parent_ids, layers) - def _get_layers_in_layers(self, parent_ids): - all_layers = self.get_layers() + def _get_layers_in_layers(self, parent_ids, layers=None): + if not layers: + layers = self.get_layers() + + all_layers = layers ret = [] for layer in all_layers: @@ -394,14 +398,17 @@ class PhotoshopServerStub: self.hide_all_others_layers_ids(extract_ids) - def hide_all_others_layers_ids(self, extract_ids): + def hide_all_others_layers_ids(self, extract_ids, layers=None): """hides all layers that are not part of the list or that are not children of this list Args: extract_ids (list): list of integer that should be visible + layers (list) of PSItem (used for caching) """ - for layer in self.get_layers(): + if not layers: + layers = self.get_layers() + for layer in layers: if layer.visible and layer.id not in extract_ids: self.set_visible(layer.id, False) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index a133e33409..7543af95bd 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -1,61 +1,93 @@ import os -import openpype.api +import pyblish.api + +from openpype.pipeline import publish from openpype.hosts.photoshop import api as photoshop -class ExtractImage(openpype.api.Extractor): - """Produce a flattened image file from instance - - This plug-in takes into account only the layers in the group. - """ +class ExtractImage(pyblish.api.ContextPlugin): + """Save scene before extraction.""" + order = publish.Extractor.order - 0.48 label = "Extract Image" hosts = ["photoshop"] + families = ["image", "background"] formats = ["png", "jpg"] - def process(self, instance): - staging_dir = self.staging_dir(instance) - self.log.info("Outputting image to {}".format(staging_dir)) - - # Perform extraction + def process(self, context): stub = photoshop.stub() - files = {} + hidden_layer_ids = set() + + all_layers = stub.get_layers() + for layer in all_layers: + if not layer.visible: + hidden_layer_ids.add(layer.id) + stub.hide_all_others_layers_ids([], layers=all_layers) + with photoshop.maintained_selection(): - self.log.info("Extracting %s" % str(list(instance))) - with photoshop.maintained_visibility(): - ids = set() - layer = instance.data.get("layer") - if layer: - ids.add(layer.id) - add_ids = instance.data.pop("ids", None) - if add_ids: - ids.update(set(add_ids)) - extract_ids = set([ll.id for ll in stub. - get_layers_in_layers_ids(ids)]) - stub.hide_all_others_layers_ids(extract_ids) + # self.log.info("Extracting %s" % str(list(instance))) + with photoshop.maintained_visibility(layers=all_layers): + for instance in context: + if instance.data["family"] not in self.families: + continue - file_basename = os.path.splitext( - stub.get_active_document_name() - )[0] - for extension in self.formats: - _filename = "{}.{}".format(file_basename, extension) - files[extension] = _filename + staging_dir = self.staging_dir(instance) + self.log.info("Outputting image to {}".format(staging_dir)) - full_filename = os.path.join(staging_dir, _filename) - stub.saveAs(full_filename, extension, True) - self.log.info(f"Extracted: {extension}") + # Perform extraction + files = {} + ids = set() + layer = instance.data.get("layer") + if layer: + ids.add(layer.id) + add_ids = instance.data.pop("ids", None) + if add_ids: + ids.update(set(add_ids)) + extract_ids = set([ll.id for ll in stub. + get_layers_in_layers_ids(ids, all_layers) + if ll.id not in hidden_layer_ids]) - representations = [] - for extension, filename in files.items(): - representations.append({ - "name": extension, - "ext": extension, - "files": filename, - "stagingDir": staging_dir - }) - instance.data["representations"] = representations - instance.data["stagingDir"] = staging_dir + for extracted_id in extract_ids: + stub.set_visible(extracted_id, True) - self.log.info(f"Extracted {instance} to {staging_dir}") + file_basename = os.path.splitext( + stub.get_active_document_name() + )[0] + for extension in self.formats: + _filename = "{}.{}".format(file_basename, + extension) + files[extension] = _filename + + full_filename = os.path.join(staging_dir, + _filename) + stub.saveAs(full_filename, extension, True) + self.log.info(f"Extracted: {extension}") + + representations = [] + for extension, filename in files.items(): + representations.append({ + "name": extension, + "ext": extension, + "files": filename, + "stagingDir": staging_dir + }) + instance.data["representations"] = representations + instance.data["stagingDir"] = staging_dir + + self.log.info(f"Extracted {instance} to {staging_dir}") + + for extracted_id in extract_ids: + stub.set_visible(extracted_id, False) + + def staging_dir(self, instance): + """Provide a temporary directory in which to store extracted files + + Upon calling this method the staging directory is stored inside + the instance.data['stagingDir'] + """ + + from openpype.pipeline.publish import get_instance_staging_dir + + return get_instance_staging_dir(instance) From d00eb29ca1bd8fcda97a40a14f8ae03d171c4853 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 6 Sep 2022 13:20:48 +0200 Subject: [PATCH 0946/2550] Remove instance._log legacy code that has no use anymore --- .../modules/deadline/plugins/publish/submit_publish_job.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 2647dcf0cb..c9d1daffd1 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -700,9 +700,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.context = context self.anatomy = instance.context.data["anatomy"] - if hasattr(instance, "_log"): - data['_log'] = instance._log - asset = data.get("asset") or legacy_io.Session["AVALON_ASSET"] subset = data.get("subset") From 96f8c3b7b3a1cf524c2ed1a815092ef2f55a2e9b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 6 Sep 2022 13:23:41 +0200 Subject: [PATCH 0947/2550] OP-3863 - fixed doc string --- .../hosts/photoshop/plugins/publish/extract_image.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index 7543af95bd..5bdb3ef681 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -7,7 +7,15 @@ from openpype.hosts.photoshop import api as photoshop class ExtractImage(pyblish.api.ContextPlugin): - """Save scene before extraction.""" + """Extract all layers (groups) marked for publish. + + Usually publishable instance is created as a wrapper of layer(s). For each + publishable instance so many images as there is 'formats' is created. + + Logic tries to hide/unhide layers minimum times. + + Called once for all publishable instances. + """ order = publish.Extractor.order - 0.48 label = "Extract Image" @@ -27,7 +35,6 @@ class ExtractImage(pyblish.api.ContextPlugin): stub.hide_all_others_layers_ids([], layers=all_layers) with photoshop.maintained_selection(): - # self.log.info("Extracting %s" % str(list(instance))) with photoshop.maintained_visibility(layers=all_layers): for instance in context: if instance.data["family"] not in self.families: From 1a7a52f44cb5dbc07b1fc53c9592c79d6da5156e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 6 Sep 2022 16:40:09 +0200 Subject: [PATCH 0948/2550] :recycle: members as nodes, change access to members --- .../hosts/houdini/plugins/publish/collect_active_state.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_frames.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_instances.py | 6 ++++++ .../hosts/houdini/plugins/publish/collect_output_node.py | 2 +- .../hosts/houdini/plugins/publish/collect_redshift_rop.py | 2 +- .../houdini/plugins/publish/collect_render_products.py | 2 +- .../hosts/houdini/plugins/publish/collect_usd_layers.py | 4 ++-- openpype/hosts/houdini/plugins/publish/extract_alembic.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_ass.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_composite.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_hda.py | 2 +- .../hosts/houdini/plugins/publish/extract_redshift_proxy.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_usd.py | 2 +- .../hosts/houdini/plugins/publish/extract_usd_layered.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py | 2 +- .../plugins/publish/validate_abc_primitive_to_detail.py | 2 +- .../houdini/plugins/publish/validate_alembic_face_sets.py | 2 +- .../houdini/plugins/publish/validate_animation_settings.py | 2 +- openpype/hosts/houdini/plugins/publish/validate_bypass.py | 2 +- .../hosts/houdini/plugins/publish/validate_camera_rop.py | 2 +- .../houdini/plugins/publish/validate_cop_output_node.py | 2 +- .../houdini/plugins/publish/validate_file_extension.py | 2 +- .../hosts/houdini/plugins/publish/validate_frame_token.py | 2 +- .../hosts/houdini/plugins/publish/validate_no_errors.py | 2 +- .../plugins/publish/validate_primitive_hierarchy_paths.py | 2 +- .../houdini/plugins/publish/validate_sop_output_node.py | 2 +- .../plugins/publish/validate_usd_layer_path_backslashes.py | 2 +- .../houdini/plugins/publish/validate_usd_model_and_shade.py | 2 +- .../houdini/plugins/publish/validate_usd_output_node.py | 2 +- .../hosts/houdini/plugins/publish/validate_usd_setdress.py | 2 +- .../houdini/plugins/publish/validate_usd_shade_workspace.py | 2 +- .../houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 32 files changed, 38 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index 862d5720e1..dd83721358 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -24,7 +24,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = instance[0] + node = instance.data["members"][0] if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9bd43d8a09..cad894cc3f 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -24,7 +24,7 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index d38927984a..0187a1f1d8 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -47,6 +47,11 @@ class CollectInstances(pyblish.api.ContextPlugin): if node.evalParm("id") != "pyblish.avalon.instance": continue + # instance was created by new creator code, skip it as + # it is already collected. + if node.parm("creator_identifier"): + continue + has_family = node.evalParm("family") assert has_family, "'%s' is missing 'family'" % node.name() @@ -78,6 +83,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["families"] = [instance.data["family"]] instance[:] = [node] + instance.data["members"] = [node] instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index 0130c0a8da..a3989dc776 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = instance.data["members"][0] # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 72b554b567..33bf74610a 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index d7163b43c0..e88c5ea0e6 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: - rop_path = instance[0].path() + rop_path = instance.data["members"][0].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index e3985e3c97..c0a55722a5 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -19,7 +19,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = instance[0] + rop_node = instance.data["members"][0] save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -54,7 +54,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance[0]) # include same USD ROP + layer_inst.append(instance.data["members"][0]) # include same USD ROP layer_inst.append((layer, save_path)) # include layer data # Allow this subset to be grouped into a USD Layer on creation diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 83b790407f..7f1e98c0af 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -14,7 +14,7 @@ class ExtractAlembic(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index e56e40df85..03ca899c5b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -14,7 +14,7 @@ class ExtractAss(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index f300b6d28d..eb77a91d62 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -15,7 +15,7 @@ class ExtractComposite(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 301dd4e297..4352939a2c 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -16,7 +16,7 @@ class ExtractHDA(openpype.api.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance[0] + hda_node = instance.data["members"][0] hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index c754d60c59..b440b1d2ee 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -14,7 +14,7 @@ class ExtractRedshiftProxy(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 0fc26900fb..9fa68178f4 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -16,7 +16,7 @@ class ExtractUSD(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 80919c023b..6214e65655 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(openpype.api.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance[0] + node = instance.data["members"][0] # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 113e1b0bcb..a30854333e 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -14,7 +14,7 @@ class ExtractVDBCache(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 3e17d3e8de..b97978d927 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -33,7 +33,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): output = instance.data["output_node"] - rop = instance[0] + rop = instance.data["members"][0] pattern = rop.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index e9126ffef0..ee59eed35e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -24,7 +24,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 5eb8f93d03..32c5078b9f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -36,7 +36,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = instance.data["members"][0] # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index fc4e18f701..6a37009549 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -34,6 +34,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - rop = instance[0] + rop = instance.data["members"][0] if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index a0919e1323..4433f5712b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -14,7 +14,7 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = instance.data["members"][0] if node.parm("use_sop_path").eval(): raise RuntimeError( "Alembic ROP for Camera export should not be " diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 543539ffe3..86ddc2adf2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -33,7 +33,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index b26d28a1e7..f050a41b88 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -37,7 +37,7 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): def get_invalid(cls, instance): # Get ROP node from instance - node = instance[0] + node = instance.data["members"][0] # Create lookup for current family in instance families = [] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index 76b5910576..b65e9ef62e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -36,7 +36,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = instance.data["members"][0] # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index f58e5f8d7d..46210bda61 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -37,7 +37,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance[0]) + validate_nodes.append(instance.data["members"][0]) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 1eb36763bb..a0e580fbf0 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -30,7 +30,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): output = instance.data["output_node"] - rop = instance[0] + rop = instance.data["members"][0] build_from_path = rop.parm("build_from_path").eval() if not build_from_path: cls.log.debug( diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a5a07b1b1a..a2a9c1f4ea 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -35,7 +35,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index ac0181aed2..95cad82085 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -24,7 +24,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 2fd2f5eb9f..bdb7c05319 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -37,7 +37,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 1f10fafdf4..0c38ccd4be 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -33,7 +33,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index fb1094e6b5..835cd5977a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -21,7 +21,7 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): from pxr import UsdGeom - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index a77ca2f3cb..c5218c203d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -19,7 +19,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] workspace = rop.parent() definition = workspace.type().definition() diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 1ba840b71d..ac87fa8fed 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -36,7 +36,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance[0].path() + "ROP node '%s'." % instance.data["members"][0].path() ) return [instance] From c8efd0d67e5600a093d024786dd16717c9f3d16c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Sep 2022 17:07:46 +0200 Subject: [PATCH 0949/2550] removed f-string formatting for py2 compatibility --- openpype/hosts/resolve/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/resolve/utils.py b/openpype/hosts/resolve/utils.py index 382a7cf344..aa8ad3008b 100644 --- a/openpype/hosts/resolve/utils.py +++ b/openpype/hosts/resolve/utils.py @@ -17,7 +17,7 @@ def setup(env): # collect script dirs if us_env: - log.info(f"Utility Scripts Env: `{us_env}`") + log.info("Utility Scripts Env: `{}`".format(us_env)) us_paths = us_env.split( os.pathsep) + us_paths @@ -25,8 +25,8 @@ def setup(env): for path in us_paths: scripts.update({path: os.listdir(path)}) - log.info(f"Utility Scripts Dir: `{us_paths}`") - log.info(f"Utility Scripts: `{scripts}`") + log.info("Utility Scripts Dir: `{}`".format(us_paths)) + log.info("Utility Scripts: `{}`".format(scripts)) # make sure no script file is in folder for s in os.listdir(us_dir): @@ -44,7 +44,7 @@ def setup(env): # script in script list src = os.path.join(d, s) dst = os.path.join(us_dir, s) - log.info(f"Copying `{src}` to `{dst}`...") + log.info("Copying `{}` to `{}`...".format(src, dst)) if os.path.isdir(src): shutil.copytree( src, dst, symlinks=False, From de52a7d61b970b6df353130295568a2242b6b902 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Sep 2022 17:21:32 +0200 Subject: [PATCH 0950/2550] last f-string removement --- openpype/hosts/resolve/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/resolve/utils.py b/openpype/hosts/resolve/utils.py index aa8ad3008b..d5c133bbf5 100644 --- a/openpype/hosts/resolve/utils.py +++ b/openpype/hosts/resolve/utils.py @@ -31,7 +31,7 @@ def setup(env): # make sure no script file is in folder for s in os.listdir(us_dir): path = os.path.join(us_dir, s) - log.info(f"Removing `{path}`...") + log.info("Removing `{}`...".format(path)) if os.path.isdir(path): shutil.rmtree(path, onerror=None) else: From 44518d2d85dcabe808c19b2f24ca64f21d096d90 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Sep 2022 01:55:15 +0200 Subject: [PATCH 0951/2550] :sparkles: add collector for member nodes --- .../publish/collect_members_as_nodes.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py new file mode 100644 index 0000000000..07d71c6605 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +import pyblish.api +import hou + + +class CollectMembersAsNodes(pyblish.api.InstancePlugin): + """Collects instance members as Houdini nodes.""" + + order = pyblish.api.CollectorOrder - 0.01 + hosts = ["houdini"] + label = "Collect Members as Nodes" + + def process(self, instance): + if not instance.data.get("creator_identifier"): + return + + nodes = [ + hou.node(member) for member in instance.data.get("members", []) + ] + + instance.data["members"] = nodes From 31c0e9050b84b015f104ba7d08275563b75dbbc6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Sep 2022 01:55:37 +0200 Subject: [PATCH 0952/2550] :rotating_light: fix hound :dog: --- .../hosts/houdini/plugins/publish/collect_usd_layers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index c0a55722a5..c21b336403 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -54,8 +54,10 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance.data["members"][0]) # include same USD ROP - layer_inst.append((layer, save_path)) # include layer data + # include same USD ROP + layer_inst.append(instance.data["members"][0]) + # include layer data + layer_inst.append((layer, save_path)) # Allow this subset to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" From 915cba631c42a822be8f71517d081a43a09cc177 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 7 Sep 2022 04:24:55 +0000 Subject: [PATCH 0953/2550] [Automated] Bump version --- CHANGELOG.md | 18 +++++++++--------- openpype/version.py | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b35e89e96e..6754f1e2e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,22 +1,25 @@ # Changelog -## [3.14.2-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) **🆕 New features** - Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) -- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) **🚀 Enhancements** +- Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) +- Blender: Publisher collect workfile representation [\#3670](https://github.com/pypeclub/OpenPype/pull/3670) - Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) - Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) **🐛 Bug fixes** +- Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) +- nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) - Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) - Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) - Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) @@ -24,10 +27,14 @@ **🔀 Refactored code** +- Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) +- Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) - AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) - General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) - General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) +- General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) - General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) @@ -80,7 +87,6 @@ **🔀 Refactored code** -- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) @@ -116,25 +122,19 @@ - Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) - Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) - Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) -- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) - General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) -- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) -- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) **🔀 Refactored code** - General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) -- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653) -- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) **Merged pull requests:** - Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) -- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) diff --git a/openpype/version.py b/openpype/version.py index 26b03c37e5..c042ca2625 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.2" +__version__ = "3.14.2-nightly.3" From 26fbdac8da117c83a71b75ce6315be4044d23942 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 16:14:30 +0800 Subject: [PATCH 0954/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 5 +++-- openpype/hosts/maya/hooks/pre_copy_mel.py | 5 +++-- openpype/hosts/maya/lib.py | 19 ++++++++++--------- .../defaults/project_anatomy/attributes.json | 3 +-- .../defaults/project_settings/maya.json | 15 +-------------- .../projects_schema/schema_project_maya.json | 17 ++++------------- 6 files changed, 22 insertions(+), 42 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 4768a9ee4f..4578d6fb39 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import load_workspace_mel +from openpype.hosts.maya.lib import create_workspace_mel from . import menu, lib from .workio import ( open_file, @@ -548,9 +548,10 @@ def on_task_changed(): def before_workfile_save(event): + project_name = os.getenv("AVALON_PROJECT") workdir_path = event["workdir_path"] if workdir_path: - load_workspace_mel(workdir_path) + create_workspace_mel(workdir_path, project_name) class MayaDirmap(HostDirmap): diff --git a/openpype/hosts/maya/hooks/pre_copy_mel.py b/openpype/hosts/maya/hooks/pre_copy_mel.py index b11e18241e..6f90af4b7c 100644 --- a/openpype/hosts/maya/hooks/pre_copy_mel.py +++ b/openpype/hosts/maya/hooks/pre_copy_mel.py @@ -1,5 +1,5 @@ from openpype.lib import PreLaunchHook -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.hosts.maya.lib import create_workspace_mel class PreCopyMel(PreLaunchHook): @@ -10,9 +10,10 @@ class PreCopyMel(PreLaunchHook): app_groups = ["maya"] def execute(self): + project_name = self.launch_context.env.get("AVALON_PROJECT") workdir = self.launch_context.env.get("AVALON_WORKDIR") if not workdir: self.log.warning("BUG: Workdir is not filled.") return - copy_workspace_mel(workdir) + create_workspace_mel(workdir, project_name) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 2853789656..443bf7d10e 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,10 +1,8 @@ import os -import shutil - -from openpype.settings import get_current_project_settings +from openpype.settings import get_project_settings -def load_workspace_mel(workdir): +def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): return @@ -12,9 +10,12 @@ def load_workspace_mel(workdir): if not os.path.exists(workdir): os.makedirs(workdir) + project_setting = get_project_settings(project_name) + mel_script = project_setting["maya"].get("mel_workspace") + + # Skip if mel script in settings is empty + if not mel_script: + return + with open(dst_filepath, "w") as mel_file: - setting = get_current_project_settings() - mel_script = setting["maya"]["mel-workspace"]["definition"] - for mel in mel_script: - mel_file.write(mel) - mel_file.write("\n") + mel_file.write(mel_script) diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json index 983ac603f9..bf8bbef8de 100644 --- a/openpype/settings/defaults/project_anatomy/attributes.json +++ b/openpype/settings/defaults/project_anatomy/attributes.json @@ -19,8 +19,7 @@ "blender/2-91", "harmony/20", "photoshop/2021", - "aftereffects/2021", - "unreal/4-26" + "aftereffects/2021" ], "tools_env": [], "active": true diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 162732280f..ada69c3730 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -15,20 +15,7 @@ "destination-path": [] } }, - "mel-workspace":{ - "definition":[ - "workspace -fr \"shaders\" \"renderData/shaders\";", - "workspace -fr \"images\" \"renders\";", - "workspace -fr \"particles\" \"particles\";", - "workspace -fr \"mayaAscii\" \"\";", - "workspace -fr \"mayaBinary\" \"\";", - "workspace -fr \"scene\" \"\";", - "workspace -fr \"alembicCache\" \"cache/alembic\";", - "workspace -fr \"renderData\" \"renderData\";", - "workspace -fr \"sourceImages\" \"sourceimages\";", - "workspace -fr \"fileCache\" \"cache/nCache\";" - ] - }, + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 7204ec586a..978de56a51 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -54,19 +54,10 @@ ] }, { - "type": "dict", - "collapsible": true, - "key": "mel-workspace", - "label": "Maya MEL Workspace", - "is_group": true, - "children": [ - { - "type": "list", - "object_type": "text", - "key": "definition", - "label": "definition" - } - ] + "type": "text", + "multiline" : true, + "key": "mel_workspace", + "label": "Maya MEL Workspace" }, { "type": "schema", From a9b69536cac401221cacaaa3155c4f9a7be682b8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 16:44:29 +0800 Subject: [PATCH 0955/2550] adding and loading maya mel workspace through openpype project setting --- openpype/settings/defaults/project_anatomy/attributes.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json index bf8bbef8de..983ac603f9 100644 --- a/openpype/settings/defaults/project_anatomy/attributes.json +++ b/openpype/settings/defaults/project_anatomy/attributes.json @@ -19,7 +19,8 @@ "blender/2-91", "harmony/20", "photoshop/2021", - "aftereffects/2021" + "aftereffects/2021", + "unreal/4-26" ], "tools_env": [], "active": true From decc11251854f60db02531f93e2b8fbd4d3fa7ec Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 17:06:47 +0800 Subject: [PATCH 0956/2550] load and edit mel workspace within the Openpype project settings --- .../settings/defaults/project_settings/maya.json | 2 +- .../schemas/projects_schema/schema_project_maya.json | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ada69c3730..bb96fcf741 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1,4 +1,5 @@ { + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "ext_mapping": { "model": "ma", "mayaAscii": "ma", @@ -15,7 +16,6 @@ "destination-path": [] } }, - "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 978de56a51..a54f8e6e4f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -5,6 +5,12 @@ "label": "Maya", "is_file": true, "children": [ + { + "type": "text", + "multiline" : true, + "key": "mel_workspace", + "label": "Maya MEL Workspace" + }, { "type": "dict-modifiable", "key": "ext_mapping", @@ -53,12 +59,6 @@ } ] }, - { - "type": "text", - "multiline" : true, - "key": "mel_workspace", - "label": "Maya MEL Workspace" - }, { "type": "schema", "name": "schema_scriptsmenu" From e1c17c71d30a38bcfda292393e425651c4c5a6d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 12:11:06 +0200 Subject: [PATCH 0957/2550] fix variable name --- openpype/pipeline/template_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/template_data.py b/openpype/pipeline/template_data.py index bab46a627d..627eba5c3d 100644 --- a/openpype/pipeline/template_data.py +++ b/openpype/pipeline/template_data.py @@ -53,7 +53,7 @@ def get_project_template_data(project_doc=None, project_name=None): project_name = project_doc["name"] if not project_doc: - project_code = get_project(project_name, fields=["data.code"]) + project_doc = get_project(project_name, fields=["data.code"]) project_code = project_doc.get("data", {}).get("code") return { From 509c209093d6fd971ec3631e662a8e34a31c5717 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 12:40:34 +0200 Subject: [PATCH 0958/2550] fix status handling --- igniter/install_dialog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/install_dialog.py b/igniter/install_dialog.py index b09529f5c5..c7e9ef74c5 100644 --- a/igniter/install_dialog.py +++ b/igniter/install_dialog.py @@ -389,7 +389,7 @@ class InstallDialog(QtWidgets.QDialog): def _installation_finished(self): status = self._install_thread.result() - if status >= 0: + if status is not None and status >= 0: self._update_progress(100) QtWidgets.QApplication.processEvents() self.done(3) From d6ab41887a37e1bfb4e7f9a1f430daef93182faa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 13:41:30 +0200 Subject: [PATCH 0959/2550] added comment to code --- igniter/install_dialog.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/igniter/install_dialog.py b/igniter/install_dialog.py index c7e9ef74c5..65ddd58735 100644 --- a/igniter/install_dialog.py +++ b/igniter/install_dialog.py @@ -388,6 +388,9 @@ class InstallDialog(QtWidgets.QDialog): install_thread.start() def _installation_finished(self): + # TODO we should find out why status can be set to 'None'? + # - 'InstallThread.run' should handle all cases so not sure where + # that come from status = self._install_thread.result() if status is not None and status >= 0: self._update_progress(100) From 797d1ea59da616f84083e21b9b790c056f8f8c29 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 13:47:02 +0200 Subject: [PATCH 0960/2550] copied attribute defs widgets into 'openpype.tools' --- openpype/tools/attribute_defs/__init__.py | 10 + openpype/tools/attribute_defs/files_widget.py | 968 ++++++++++++++++++ openpype/tools/attribute_defs/widgets.py | 490 +++++++++ 3 files changed, 1468 insertions(+) create mode 100644 openpype/tools/attribute_defs/__init__.py create mode 100644 openpype/tools/attribute_defs/files_widget.py create mode 100644 openpype/tools/attribute_defs/widgets.py diff --git a/openpype/tools/attribute_defs/__init__.py b/openpype/tools/attribute_defs/__init__.py new file mode 100644 index 0000000000..ce6b80109e --- /dev/null +++ b/openpype/tools/attribute_defs/__init__.py @@ -0,0 +1,10 @@ +from .widgets import ( + create_widget_for_attr_def, + AttributeDefinitionsWidget, +) + + +__all__ = ( + "create_widget_for_attr_def", + "AttributeDefinitionsWidget", +) diff --git a/openpype/tools/attribute_defs/files_widget.py b/openpype/tools/attribute_defs/files_widget.py new file mode 100644 index 0000000000..d29aa1b607 --- /dev/null +++ b/openpype/tools/attribute_defs/files_widget.py @@ -0,0 +1,968 @@ +import os +import collections +import uuid +import json + +from Qt import QtWidgets, QtCore, QtGui + +from openpype.lib import FileDefItem +from openpype.tools.utils import ( + paint_image_with_color, + ClickableLabel, +) +# TODO change imports +from openpype.tools.resources import get_image +from openpype.tools.utils import ( + IconButton, + PixmapLabel +) + +ITEM_ID_ROLE = QtCore.Qt.UserRole + 1 +ITEM_LABEL_ROLE = QtCore.Qt.UserRole + 2 +ITEM_ICON_ROLE = QtCore.Qt.UserRole + 3 +FILENAMES_ROLE = QtCore.Qt.UserRole + 4 +DIRPATH_ROLE = QtCore.Qt.UserRole + 5 +IS_DIR_ROLE = QtCore.Qt.UserRole + 6 +IS_SEQUENCE_ROLE = QtCore.Qt.UserRole + 7 +EXT_ROLE = QtCore.Qt.UserRole + 8 + + +def convert_bytes_to_json(bytes_value): + if isinstance(bytes_value, QtCore.QByteArray): + # Raw data are already QByteArray and we don't have to load them + encoded_data = bytes_value + else: + encoded_data = QtCore.QByteArray.fromRawData(bytes_value) + stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.ReadOnly) + text = stream.readQString() + try: + return json.loads(text) + except Exception: + return None + + +def convert_data_to_bytes(data): + bytes_value = QtCore.QByteArray() + stream = QtCore.QDataStream(bytes_value, QtCore.QIODevice.WriteOnly) + stream.writeQString(json.dumps(data)) + return bytes_value + + +class SupportLabel(QtWidgets.QLabel): + pass + + +class DropEmpty(QtWidgets.QWidget): + _empty_extensions = "Any file" + + def __init__(self, single_item, allow_sequences, extensions_label, parent): + super(DropEmpty, self).__init__(parent) + + drop_label_widget = QtWidgets.QLabel("Drag & Drop files here", self) + + items_label_widget = SupportLabel(self) + items_label_widget.setWordWrap(True) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addSpacing(20) + layout.addWidget( + drop_label_widget, 0, alignment=QtCore.Qt.AlignCenter + ) + layout.addSpacing(30) + layout.addStretch(1) + layout.addWidget( + items_label_widget, 0, alignment=QtCore.Qt.AlignCenter + ) + layout.addSpacing(10) + + for widget in ( + drop_label_widget, + items_label_widget, + ): + widget.setAlignment(QtCore.Qt.AlignCenter) + widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) + + update_size_timer = QtCore.QTimer() + update_size_timer.setInterval(10) + update_size_timer.setSingleShot(True) + + update_size_timer.timeout.connect(self._on_update_size_timer) + + self._update_size_timer = update_size_timer + + if extensions_label and not extensions_label.startswith(" "): + extensions_label = " " + extensions_label + + self._single_item = single_item + self._extensions_label = extensions_label + self._allow_sequences = allow_sequences + self._allowed_extensions = set() + self._allow_folders = None + + self._drop_label_widget = drop_label_widget + self._items_label_widget = items_label_widget + + self.set_allow_folders(False) + + def set_extensions(self, extensions): + if extensions: + extensions = { + ext.replace(".", "") + for ext in extensions + } + if extensions == self._allowed_extensions: + return + self._allowed_extensions = extensions + + self._update_items_label() + + def set_allow_folders(self, allowed): + if self._allow_folders == allowed: + return + + self._allow_folders = allowed + self._update_items_label() + + def _update_items_label(self): + allowed_items = [] + if self._allow_folders: + allowed_items.append("folder") + + if self._allowed_extensions: + allowed_items.append("file") + if self._allow_sequences: + allowed_items.append("sequence") + + if not self._single_item: + allowed_items = [item + "s" for item in allowed_items] + + if not allowed_items: + self._items_label_widget.setText( + "It is not allowed to add anything here!" + ) + return + + items_label = "Multiple " + if self._single_item: + items_label = "Single " + + if len(allowed_items) == 1: + extensions_label = allowed_items[0] + elif len(allowed_items) == 2: + extensions_label = " or ".join(allowed_items) + else: + last_item = allowed_items.pop(-1) + new_last_item = " or ".join(last_item, allowed_items.pop(-1)) + allowed_items.append(new_last_item) + extensions_label = ", ".join(allowed_items) + + allowed_items_label = extensions_label + + items_label += allowed_items_label + label_tooltip = None + if self._allowed_extensions: + items_label += " of\n{}".format( + ", ".join(sorted(self._allowed_extensions)) + ) + + if self._extensions_label: + label_tooltip = items_label + items_label = self._extensions_label + + if self._items_label_widget.text() == items_label: + return + + self._items_label_widget.setToolTip(label_tooltip) + self._items_label_widget.setText(items_label) + self._update_size_timer.start() + + def resizeEvent(self, event): + super(DropEmpty, self).resizeEvent(event) + self._update_size_timer.start() + + def _on_update_size_timer(self): + """Recalculate height of label with extensions. + + Dynamic QLabel with word wrap does not handle properly it's sizeHint + calculations on show. This way it is recalculated. It is good practice + to trigger this method with small offset using '_update_size_timer'. + """ + + width = self._items_label_widget.width() + height = self._items_label_widget.heightForWidth(width) + self._items_label_widget.setMinimumHeight(height) + self._items_label_widget.updateGeometry() + + def paintEvent(self, event): + super(DropEmpty, self).paintEvent(event) + painter = QtGui.QPainter(self) + pen = QtGui.QPen() + pen.setWidth(1) + pen.setBrush(QtCore.Qt.darkGray) + pen.setStyle(QtCore.Qt.DashLine) + painter.setPen(pen) + content_margins = self.layout().contentsMargins() + + left_m = content_margins.left() + top_m = content_margins.top() + rect = QtCore.QRect( + left_m, + top_m, + ( + self.rect().width() + - (left_m + content_margins.right() + pen.width()) + ), + ( + self.rect().height() + - (top_m + content_margins.bottom() + pen.width()) + ) + ) + painter.drawRect(rect) + + +class FilesModel(QtGui.QStandardItemModel): + def __init__(self, single_item, allow_sequences): + super(FilesModel, self).__init__() + + self._id = str(uuid.uuid4()) + self._single_item = single_item + self._multivalue = False + self._allow_sequences = allow_sequences + + self._items_by_id = {} + self._file_items_by_id = {} + self._filenames_by_dirpath = collections.defaultdict(set) + self._items_by_dirpath = collections.defaultdict(list) + + @property + def id(self): + return self._id + + def set_multivalue(self, multivalue): + """Disable filtering.""" + + if self._multivalue == multivalue: + return + self._multivalue = multivalue + + def add_filepaths(self, items): + if not items: + return + + file_items = FileDefItem.from_value(items, self._allow_sequences) + if not file_items: + return + + if not self._multivalue and self._single_item: + file_items = [file_items[0]] + current_ids = list(self._file_items_by_id.keys()) + if current_ids: + self.remove_item_by_ids(current_ids) + + new_model_items = [] + for file_item in file_items: + item_id, model_item = self._create_item(file_item) + new_model_items.append(model_item) + self._file_items_by_id[item_id] = file_item + self._items_by_id[item_id] = model_item + + if new_model_items: + roow_item = self.invisibleRootItem() + roow_item.appendRows(new_model_items) + + def remove_item_by_ids(self, item_ids): + if not item_ids: + return + + items = [] + for item_id in set(item_ids): + if item_id not in self._items_by_id: + continue + item = self._items_by_id.pop(item_id) + self._file_items_by_id.pop(item_id) + items.append(item) + + if items: + for item in items: + self.removeRows(item.row(), 1) + + def get_file_item_by_id(self, item_id): + return self._file_items_by_id.get(item_id) + + def _create_item(self, file_item): + if file_item.is_dir: + icon_pixmap = paint_image_with_color( + get_image(filename="folder.png"), QtCore.Qt.white + ) + else: + icon_pixmap = paint_image_with_color( + get_image(filename="file.png"), QtCore.Qt.white + ) + + item = QtGui.QStandardItem() + item_id = str(uuid.uuid4()) + item.setData(item_id, ITEM_ID_ROLE) + item.setData(file_item.label or "< empty >", ITEM_LABEL_ROLE) + item.setData(file_item.filenames, FILENAMES_ROLE) + item.setData(file_item.directory, DIRPATH_ROLE) + item.setData(icon_pixmap, ITEM_ICON_ROLE) + item.setData(file_item.ext, EXT_ROLE) + item.setData(file_item.is_dir, IS_DIR_ROLE) + item.setData(file_item.is_sequence, IS_SEQUENCE_ROLE) + + return item_id, item + + def mimeData(self, indexes): + item_ids = [ + index.data(ITEM_ID_ROLE) + for index in indexes + ] + + item_ids_data = convert_data_to_bytes(item_ids) + mime_data = super(FilesModel, self).mimeData(indexes) + mime_data.setData("files_widget/internal_move", item_ids_data) + + file_items = [] + for item_id in item_ids: + file_item = self.get_file_item_by_id(item_id) + if file_item: + file_items.append(file_item.to_dict()) + + full_item_data = convert_data_to_bytes({ + "items": file_items, + "id": self._id + }) + mime_data.setData("files_widget/full_data", full_item_data) + return mime_data + + def dropMimeData(self, mime_data, action, row, col, index): + item_ids = convert_bytes_to_json( + mime_data.data("files_widget/internal_move") + ) + if item_ids is None: + return False + + # Find matching item after which will be items moved + # - store item before moved items are removed + root = self.invisibleRootItem() + if row >= 0: + src_item = self.item(row) + else: + src_item_id = index.data(ITEM_ID_ROLE) + src_item = self._items_by_id.get(src_item_id) + + # Take out items that should be moved + items = [] + for item_id in item_ids: + item = self._items_by_id.get(item_id) + if item: + self.takeRow(item.row()) + items.append(item) + + # Skip if there are not items that can be moved + if not items: + return False + + # Calculate row where items should be inserted + if src_item: + src_row = src_item.row() + else: + src_row = root.rowCount() + + root.insertRow(src_row, items) + return True + + +class FilesProxyModel(QtCore.QSortFilterProxyModel): + def __init__(self, *args, **kwargs): + super(FilesProxyModel, self).__init__(*args, **kwargs) + self._allow_folders = False + self._allowed_extensions = None + self._multivalue = False + + def set_multivalue(self, multivalue): + """Disable filtering.""" + + if self._multivalue == multivalue: + return + self._multivalue = multivalue + self.invalidateFilter() + + def set_allow_folders(self, allow=None): + if allow is None: + allow = not self._allow_folders + + if allow == self._allow_folders: + return + self._allow_folders = allow + self.invalidateFilter() + + def set_allowed_extensions(self, extensions=None): + if extensions is not None: + _extensions = set() + for ext in set(extensions): + if not ext.startswith("."): + ext = ".{}".format(ext) + _extensions.add(ext.lower()) + extensions = _extensions + + if self._allowed_extensions != extensions: + self._allowed_extensions = extensions + self.invalidateFilter() + + def are_valid_files(self, filepaths): + for filepath in filepaths: + if os.path.isfile(filepath): + _, ext = os.path.splitext(filepath) + if ext in self._allowed_extensions: + return True + + elif self._allow_folders: + return True + return False + + def filter_valid_files(self, filepaths): + filtered_paths = [] + for filepath in filepaths: + if os.path.isfile(filepath): + _, ext = os.path.splitext(filepath) + if ext in self._allowed_extensions: + filtered_paths.append(filepath) + + elif self._allow_folders: + filtered_paths.append(filepath) + return filtered_paths + + def filterAcceptsRow(self, row, parent_index): + # Skip filtering if multivalue is set + if self._multivalue: + return True + + model = self.sourceModel() + index = model.index(row, self.filterKeyColumn(), parent_index) + # First check if item is folder and if folders are enabled + if index.data(IS_DIR_ROLE): + if not self._allow_folders: + return False + return True + + # Check if there are any allowed extensions + if self._allowed_extensions is None: + return False + + if index.data(EXT_ROLE) not in self._allowed_extensions: + return False + return True + + def lessThan(self, left, right): + left_comparison = left.data(DIRPATH_ROLE) + right_comparison = right.data(DIRPATH_ROLE) + if left_comparison == right_comparison: + left_comparison = left.data(ITEM_LABEL_ROLE) + right_comparison = right.data(ITEM_LABEL_ROLE) + + if sorted((left_comparison, right_comparison))[0] == left_comparison: + return True + return False + + +class ItemWidget(QtWidgets.QWidget): + context_menu_requested = QtCore.Signal(QtCore.QPoint) + + def __init__( + self, item_id, label, pixmap_icon, is_sequence, multivalue, parent=None + ): + self._item_id = item_id + + super(ItemWidget, self).__init__(parent) + + self.setAttribute(QtCore.Qt.WA_TranslucentBackground) + + icon_widget = PixmapLabel(pixmap_icon, self) + label_widget = QtWidgets.QLabel(label, self) + + label_size_hint = label_widget.sizeHint() + height = label_size_hint.height() + actions_menu_pix = paint_image_with_color( + get_image(filename="menu.png"), QtCore.Qt.white + ) + + split_btn = ClickableLabel(self) + split_btn.setFixedSize(height, height) + split_btn.setPixmap(actions_menu_pix) + if multivalue: + split_btn.setVisible(False) + else: + split_btn.setVisible(is_sequence) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(5, 5, 5, 5) + layout.addWidget(icon_widget, 0) + layout.addWidget(label_widget, 1) + layout.addWidget(split_btn, 0) + + split_btn.clicked.connect(self._on_actions_clicked) + + self._icon_widget = icon_widget + self._label_widget = label_widget + self._split_btn = split_btn + self._actions_menu_pix = actions_menu_pix + self._last_scaled_pix_height = None + + def _update_btn_size(self): + label_size_hint = self._label_widget.sizeHint() + height = label_size_hint.height() + if height == self._last_scaled_pix_height: + return + self._last_scaled_pix_height = height + self._split_btn.setFixedSize(height, height) + pix = self._actions_menu_pix.scaled( + height, height, + QtCore.Qt.KeepAspectRatio, + QtCore.Qt.SmoothTransformation + ) + self._split_btn.setPixmap(pix) + + def showEvent(self, event): + super(ItemWidget, self).showEvent(event) + self._update_btn_size() + + def resizeEvent(self, event): + super(ItemWidget, self).resizeEvent(event) + self._update_btn_size() + + def _on_actions_clicked(self): + pos = self._split_btn.rect().bottomLeft() + point = self._split_btn.mapToGlobal(pos) + self.context_menu_requested.emit(point) + + +class InViewButton(IconButton): + pass + + +class FilesView(QtWidgets.QListView): + """View showing instances and their groups.""" + + remove_requested = QtCore.Signal() + context_menu_requested = QtCore.Signal(QtCore.QPoint) + + def __init__(self, *args, **kwargs): + super(FilesView, self).__init__(*args, **kwargs) + + self.setEditTriggers(QtWidgets.QListView.NoEditTriggers) + self.setSelectionMode( + QtWidgets.QAbstractItemView.ExtendedSelection + ) + self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + self.setAcceptDrops(True) + self.setDragEnabled(True) + self.setDragDropMode(self.InternalMove) + + remove_btn = InViewButton(self) + pix_enabled = paint_image_with_color( + get_image(filename="delete.png"), QtCore.Qt.white + ) + pix_disabled = paint_image_with_color( + get_image(filename="delete.png"), QtCore.Qt.gray + ) + icon = QtGui.QIcon(pix_enabled) + icon.addPixmap(pix_disabled, icon.Disabled, icon.Off) + remove_btn.setIcon(icon) + remove_btn.setEnabled(False) + + remove_btn.clicked.connect(self._on_remove_clicked) + self.customContextMenuRequested.connect(self._on_context_menu_request) + + self._remove_btn = remove_btn + + def setSelectionModel(self, *args, **kwargs): + """Catch selection model set to register signal callback. + + Selection model is not available during initialization. + """ + + super(FilesView, self).setSelectionModel(*args, **kwargs) + selection_model = self.selectionModel() + selection_model.selectionChanged.connect(self._on_selection_change) + + def set_multivalue(self, multivalue): + """Disable remove button on multivalue.""" + + self._remove_btn.setVisible(not multivalue) + + def has_selected_item_ids(self): + """Is any index selected.""" + for index in self.selectionModel().selectedIndexes(): + instance_id = index.data(ITEM_ID_ROLE) + if instance_id is not None: + return True + return False + + def get_selected_item_ids(self): + """Ids of selected instances.""" + + selected_item_ids = set() + for index in self.selectionModel().selectedIndexes(): + instance_id = index.data(ITEM_ID_ROLE) + if instance_id is not None: + selected_item_ids.add(instance_id) + return selected_item_ids + + def has_selected_sequence(self): + for index in self.selectionModel().selectedIndexes(): + if index.data(IS_SEQUENCE_ROLE): + return True + return False + + def event(self, event): + if event.type() == QtCore.QEvent.KeyPress: + if ( + event.key() == QtCore.Qt.Key_Delete + and self.has_selected_item_ids() + ): + self.remove_requested.emit() + return True + + return super(FilesView, self).event(event) + + def _on_context_menu_request(self, pos): + index = self.indexAt(pos) + if index.isValid(): + point = self.viewport().mapToGlobal(pos) + self.context_menu_requested.emit(point) + + def _on_selection_change(self): + self._remove_btn.setEnabled(self.has_selected_item_ids()) + + def _on_remove_clicked(self): + self.remove_requested.emit() + + def _update_remove_btn(self): + """Position remove button to bottom right.""" + + viewport = self.viewport() + height = viewport.height() + pos_x = viewport.width() - self._remove_btn.width() - 5 + pos_y = height - self._remove_btn.height() - 5 + self._remove_btn.move(max(0, pos_x), max(0, pos_y)) + + def resizeEvent(self, event): + super(FilesView, self).resizeEvent(event) + self._update_remove_btn() + + def showEvent(self, event): + super(FilesView, self).showEvent(event) + self._update_remove_btn() + + +class FilesWidget(QtWidgets.QFrame): + value_changed = QtCore.Signal() + + def __init__(self, single_item, allow_sequences, extensions_label, parent): + super(FilesWidget, self).__init__(parent) + self.setAcceptDrops(True) + + empty_widget = DropEmpty( + single_item, allow_sequences, extensions_label, self + ) + + files_model = FilesModel(single_item, allow_sequences) + files_proxy_model = FilesProxyModel() + files_proxy_model.setSourceModel(files_model) + files_view = FilesView(self) + files_view.setModel(files_proxy_model) + files_view.setVisible(False) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(empty_widget, 1) + layout.addWidget(files_view, 1) + + files_proxy_model.rowsInserted.connect(self._on_rows_inserted) + files_proxy_model.rowsRemoved.connect(self._on_rows_removed) + files_view.remove_requested.connect(self._on_remove_requested) + files_view.context_menu_requested.connect( + self._on_context_menu_requested + ) + + self._in_set_value = False + self._single_item = single_item + self._multivalue = False + + self._empty_widget = empty_widget + self._files_model = files_model + self._files_proxy_model = files_proxy_model + self._files_view = files_view + + self._widgets_by_id = {} + + def _set_multivalue(self, multivalue): + if self._multivalue == multivalue: + return + self._multivalue = multivalue + self._files_view.set_multivalue(multivalue) + self._files_model.set_multivalue(multivalue) + self._files_proxy_model.set_multivalue(multivalue) + + def set_value(self, value, multivalue): + self._in_set_value = True + + widget_ids = set(self._widgets_by_id.keys()) + self._remove_item_by_ids(widget_ids) + + self._set_multivalue(multivalue) + + self._add_filepaths(value) + + self._in_set_value = False + + def current_value(self): + model = self._files_proxy_model + item_ids = set() + for row in range(model.rowCount()): + index = model.index(row, 0) + item_ids.add(index.data(ITEM_ID_ROLE)) + + file_items = [] + for item_id in item_ids: + file_item = self._files_model.get_file_item_by_id(item_id) + if file_item is not None: + file_items.append(file_item.to_dict()) + + if not self._single_item: + return file_items + if file_items: + return file_items[0] + + empty_item = FileDefItem.create_empty_item() + return empty_item.to_dict() + + def set_filters(self, folders_allowed, exts_filter): + self._files_proxy_model.set_allow_folders(folders_allowed) + self._files_proxy_model.set_allowed_extensions(exts_filter) + self._empty_widget.set_extensions(exts_filter) + self._empty_widget.set_allow_folders(folders_allowed) + + def _on_rows_inserted(self, parent_index, start_row, end_row): + for row in range(start_row, end_row + 1): + index = self._files_proxy_model.index(row, 0, parent_index) + item_id = index.data(ITEM_ID_ROLE) + if item_id in self._widgets_by_id: + continue + label = index.data(ITEM_LABEL_ROLE) + pixmap_icon = index.data(ITEM_ICON_ROLE) + is_sequence = index.data(IS_SEQUENCE_ROLE) + + widget = ItemWidget( + item_id, + label, + pixmap_icon, + is_sequence, + self._multivalue + ) + widget.context_menu_requested.connect( + self._on_context_menu_requested + ) + self._files_view.setIndexWidget(index, widget) + self._files_proxy_model.setData( + index, widget.sizeHint(), QtCore.Qt.SizeHintRole + ) + self._widgets_by_id[item_id] = widget + + if not self._in_set_value: + self.value_changed.emit() + + def _on_rows_removed(self, parent_index, start_row, end_row): + available_item_ids = set() + for row in range(self._files_proxy_model.rowCount()): + index = self._files_proxy_model.index(row, 0) + item_id = index.data(ITEM_ID_ROLE) + available_item_ids.add(index.data(ITEM_ID_ROLE)) + + widget_ids = set(self._widgets_by_id.keys()) + for item_id in available_item_ids: + if item_id in widget_ids: + widget_ids.remove(item_id) + + for item_id in widget_ids: + widget = self._widgets_by_id.pop(item_id) + widget.setVisible(False) + widget.deleteLater() + + if not self._in_set_value: + self.value_changed.emit() + + def _on_split_request(self): + if self._multivalue: + return + + item_ids = self._files_view.get_selected_item_ids() + if not item_ids: + return + + for item_id in item_ids: + file_item = self._files_model.get_file_item_by_id(item_id) + if not file_item: + return + + new_items = file_item.split_sequence() + self._add_filepaths(new_items) + self._remove_item_by_ids(item_ids) + + def _on_remove_requested(self): + if self._multivalue: + return + + items_to_delete = self._files_view.get_selected_item_ids() + if items_to_delete: + self._remove_item_by_ids(items_to_delete) + + def _on_context_menu_requested(self, pos): + if self._multivalue: + return + + menu = QtWidgets.QMenu(self._files_view) + + if self._files_view.has_selected_sequence(): + split_action = QtWidgets.QAction("Split sequence", menu) + split_action.triggered.connect(self._on_split_request) + menu.addAction(split_action) + + remove_action = QtWidgets.QAction("Remove", menu) + remove_action.triggered.connect(self._on_remove_requested) + menu.addAction(remove_action) + + menu.popup(pos) + + def sizeHint(self): + # Get size hints of widget and visible widgets + result = super(FilesWidget, self).sizeHint() + if not self._files_view.isVisible(): + not_visible_hint = self._files_view.sizeHint() + else: + not_visible_hint = self._empty_widget.sizeHint() + + # Get margins of this widget + margins = self.layout().contentsMargins() + + # Change size hint based on result of maximum size hint of widgets + result.setWidth(max( + result.width(), + not_visible_hint.width() + margins.left() + margins.right() + )) + result.setHeight(max( + result.height(), + not_visible_hint.height() + margins.top() + margins.bottom() + )) + + return result + + def dragEnterEvent(self, event): + if self._multivalue: + return + + mime_data = event.mimeData() + if mime_data.hasUrls(): + filepaths = [] + for url in mime_data.urls(): + filepath = url.toLocalFile() + if os.path.exists(filepath): + filepaths.append(filepath) + + if self._files_proxy_model.are_valid_files(filepaths): + event.setDropAction(QtCore.Qt.CopyAction) + event.accept() + + full_data_value = mime_data.data("files_widget/full_data") + if self._handle_full_data_drag(full_data_value): + event.setDropAction(QtCore.Qt.CopyAction) + event.accept() + + def dragLeaveEvent(self, event): + event.accept() + + def dropEvent(self, event): + if self._multivalue: + return + + mime_data = event.mimeData() + if mime_data.hasUrls(): + event.accept() + # event.setDropAction(QtCore.Qt.CopyAction) + filepaths = [] + for url in mime_data.urls(): + filepath = url.toLocalFile() + if os.path.exists(filepath): + filepaths.append(filepath) + + # Filter filepaths before passing it to model + filepaths = self._files_proxy_model.filter_valid_files(filepaths) + if filepaths: + self._add_filepaths(filepaths) + + if self._handle_full_data_drop( + mime_data.data("files_widget/full_data") + ): + event.setDropAction(QtCore.Qt.CopyAction) + event.accept() + + super(FilesWidget, self).dropEvent(event) + + def _handle_full_data_drag(self, value): + if value is None: + return False + + full_data = convert_bytes_to_json(value) + if full_data is None: + return False + + if full_data["id"] == self._files_model.id: + return False + return True + + def _handle_full_data_drop(self, value): + if value is None: + return False + + full_data = convert_bytes_to_json(value) + if full_data is None: + return False + + if full_data["id"] == self._files_model.id: + return False + + for item in full_data["items"]: + filepaths = [ + os.path.join(item["directory"], filename) + for filename in item["filenames"] + ] + filepaths = self._files_proxy_model.filter_valid_files(filepaths) + if filepaths: + self._add_filepaths(filepaths) + + if self._copy_modifiers_enabled(): + return False + return True + + def _copy_modifiers_enabled(self): + if ( + QtWidgets.QApplication.keyboardModifiers() + & QtCore.Qt.ControlModifier + ): + return True + return False + + def _add_filepaths(self, filepaths): + self._files_model.add_filepaths(filepaths) + self._update_visibility() + + def _remove_item_by_ids(self, item_ids): + self._files_model.remove_item_by_ids(item_ids) + self._update_visibility() + + def _update_visibility(self): + files_exists = self._files_proxy_model.rowCount() > 0 + self._files_view.setVisible(files_exists) + self._empty_widget.setVisible(not files_exists) diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py new file mode 100644 index 0000000000..60ae952553 --- /dev/null +++ b/openpype/tools/attribute_defs/widgets.py @@ -0,0 +1,490 @@ +import uuid +import copy + +from Qt import QtWidgets, QtCore + +from openpype.lib.attribute_definitions import ( + AbtractAttrDef, + UnknownDef, + NumberDef, + TextDef, + EnumDef, + BoolDef, + FileDef, + UIDef, + UISeparatorDef, + UILabelDef +) +from openpype.tools.utils import CustomTextComboBox +from openpype.widgets.nice_checkbox import NiceCheckbox + +from .files_widget import FilesWidget + + +def create_widget_for_attr_def(attr_def, parent=None): + if not isinstance(attr_def, AbtractAttrDef): + raise TypeError("Unexpected type \"{}\" expected \"{}\"".format( + str(type(attr_def)), AbtractAttrDef + )) + + if isinstance(attr_def, NumberDef): + return NumberAttrWidget(attr_def, parent) + + if isinstance(attr_def, TextDef): + return TextAttrWidget(attr_def, parent) + + if isinstance(attr_def, EnumDef): + return EnumAttrWidget(attr_def, parent) + + if isinstance(attr_def, BoolDef): + return BoolAttrWidget(attr_def, parent) + + if isinstance(attr_def, UnknownDef): + return UnknownAttrWidget(attr_def, parent) + + if isinstance(attr_def, FileDef): + return FileAttrWidget(attr_def, parent) + + if isinstance(attr_def, UISeparatorDef): + return SeparatorAttrWidget(attr_def, parent) + + if isinstance(attr_def, UILabelDef): + return LabelAttrWidget(attr_def, parent) + + raise ValueError("Unknown attribute definition \"{}\"".format( + str(type(attr_def)) + )) + + +class AttributeDefinitionsWidget(QtWidgets.QWidget): + """Create widgets for attribute definitions in grid layout. + + Widget creates input widgets for passed attribute definitions. + + Widget can't handle multiselection values. + """ + + def __init__(self, attr_defs=None, parent=None): + super(AttributeDefinitionsWidget, self).__init__(parent) + + self._widgets = [] + self._current_keys = set() + + self.set_attr_defs(attr_defs) + + def clear_attr_defs(self): + """Remove all existing widgets and reset layout if needed.""" + self._widgets = [] + self._current_keys = set() + + layout = self.layout() + if layout is not None: + if layout.count() == 0: + return + + while layout.count(): + item = layout.takeAt(0) + widget = item.widget() + if widget: + widget.setVisible(False) + widget.deleteLater() + + layout.deleteLater() + + new_layout = QtWidgets.QGridLayout() + new_layout.setColumnStretch(0, 0) + new_layout.setColumnStretch(1, 1) + self.setLayout(new_layout) + + def set_attr_defs(self, attr_defs): + """Replace current attribute definitions with passed.""" + self.clear_attr_defs() + if attr_defs: + self.add_attr_defs(attr_defs) + + def add_attr_defs(self, attr_defs): + """Add attribute definitions to current.""" + layout = self.layout() + + row = 0 + for attr_def in attr_defs: + if attr_def.key in self._current_keys: + raise KeyError("Duplicated key \"{}\"".format(attr_def.key)) + + self._current_keys.add(attr_def.key) + widget = create_widget_for_attr_def(attr_def, self) + + expand_cols = 2 + if attr_def.is_value_def and attr_def.is_label_horizontal: + expand_cols = 1 + + col_num = 2 - expand_cols + + if attr_def.label: + label_widget = QtWidgets.QLabel(attr_def.label, self) + layout.addWidget( + label_widget, row, 0, 1, expand_cols + ) + if not attr_def.is_label_horizontal: + row += 1 + + layout.addWidget( + widget, row, col_num, 1, expand_cols + ) + self._widgets.append(widget) + row += 1 + + def set_value(self, value): + new_value = copy.deepcopy(value) + unused_keys = set(new_value.keys()) + for widget in self._widgets: + attr_def = widget.attr_def + if attr_def.key not in new_value: + continue + unused_keys.remove(attr_def.key) + + widget_value = new_value[attr_def.key] + if widget_value is None: + widget_value = copy.deepcopy(attr_def.default) + widget.set_value(widget_value) + + def current_value(self): + output = {} + for widget in self._widgets: + attr_def = widget.attr_def + if not isinstance(attr_def, UIDef): + output[attr_def.key] = widget.current_value() + + return output + + +class _BaseAttrDefWidget(QtWidgets.QWidget): + # Type 'object' may not work with older PySide versions + value_changed = QtCore.Signal(object, uuid.UUID) + + def __init__(self, attr_def, parent): + super(_BaseAttrDefWidget, self).__init__(parent) + + self.attr_def = attr_def + + main_layout = QtWidgets.QHBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + + self.main_layout = main_layout + + self._ui_init() + + def _ui_init(self): + raise NotImplementedError( + "Method '_ui_init' is not implemented. {}".format( + self.__class__.__name__ + ) + ) + + def current_value(self): + raise NotImplementedError( + "Method 'current_value' is not implemented. {}".format( + self.__class__.__name__ + ) + ) + + def set_value(self, value, multivalue=False): + raise NotImplementedError( + "Method 'set_value' is not implemented. {}".format( + self.__class__.__name__ + ) + ) + + +class SeparatorAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + input_widget = QtWidgets.QWidget(self) + input_widget.setObjectName("Separator") + input_widget.setMinimumHeight(2) + input_widget.setMaximumHeight(2) + + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + + +class LabelAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + input_widget = QtWidgets.QLabel(self) + label = self.attr_def.label + if label: + input_widget.setText(str(label)) + + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + + +class NumberAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + decimals = self.attr_def.decimals + if decimals > 0: + input_widget = QtWidgets.QDoubleSpinBox(self) + input_widget.setDecimals(decimals) + else: + input_widget = QtWidgets.QSpinBox(self) + + if self.attr_def.tooltip: + input_widget.setToolTip(self.attr_def.tooltip) + + input_widget.setMinimum(self.attr_def.minimum) + input_widget.setMaximum(self.attr_def.maximum) + input_widget.setValue(self.attr_def.default) + + input_widget.setButtonSymbols( + QtWidgets.QAbstractSpinBox.ButtonSymbols.NoButtons + ) + + input_widget.valueChanged.connect(self._on_value_change) + + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + + def _on_value_change(self, new_value): + self.value_changed.emit(new_value, self.attr_def.id) + + def current_value(self): + return self._input_widget.value() + + def set_value(self, value, multivalue=False): + if multivalue: + set_value = set(value) + if None in set_value: + set_value.remove(None) + set_value.add(self.attr_def.default) + + if len(set_value) > 1: + self._input_widget.setSpecialValueText("Multiselection") + return + value = tuple(set_value)[0] + + if self.current_value != value: + self._input_widget.setValue(value) + + +class TextAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + # TODO Solve how to handle regex + # self.attr_def.regex + + self.multiline = self.attr_def.multiline + if self.multiline: + input_widget = QtWidgets.QPlainTextEdit(self) + else: + input_widget = QtWidgets.QLineEdit(self) + + if ( + self.attr_def.placeholder + and hasattr(input_widget, "setPlaceholderText") + ): + input_widget.setPlaceholderText(self.attr_def.placeholder) + + if self.attr_def.tooltip: + input_widget.setToolTip(self.attr_def.tooltip) + + if self.attr_def.default: + if self.multiline: + input_widget.setPlainText(self.attr_def.default) + else: + input_widget.setText(self.attr_def.default) + + input_widget.textChanged.connect(self._on_value_change) + + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + + def _on_value_change(self): + if self.multiline: + new_value = self._input_widget.toPlainText() + else: + new_value = self._input_widget.text() + self.value_changed.emit(new_value, self.attr_def.id) + + def current_value(self): + if self.multiline: + return self._input_widget.toPlainText() + return self._input_widget.text() + + def set_value(self, value, multivalue=False): + if multivalue: + set_value = set(value) + if None in set_value: + set_value.remove(None) + set_value.add(self.attr_def.default) + + if len(set_value) == 1: + value = tuple(set_value)[0] + else: + value = "< Multiselection >" + + if value != self.current_value(): + if self.multiline: + self._input_widget.setPlainText(value) + else: + self._input_widget.setText(value) + + +class BoolAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + input_widget = NiceCheckbox(parent=self) + input_widget.setChecked(self.attr_def.default) + + if self.attr_def.tooltip: + input_widget.setToolTip(self.attr_def.tooltip) + + input_widget.stateChanged.connect(self._on_value_change) + + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + self.main_layout.addStretch(1) + + def _on_value_change(self): + new_value = self._input_widget.isChecked() + self.value_changed.emit(new_value, self.attr_def.id) + + def current_value(self): + return self._input_widget.isChecked() + + def set_value(self, value, multivalue=False): + if multivalue: + set_value = set(value) + if None in set_value: + set_value.remove(None) + set_value.add(self.attr_def.default) + + if len(set_value) > 1: + self._input_widget.setCheckState(QtCore.Qt.PartiallyChecked) + return + value = tuple(set_value)[0] + + if value != self.current_value(): + self._input_widget.setChecked(value) + + +class EnumAttrWidget(_BaseAttrDefWidget): + def __init__(self, *args, **kwargs): + self._multivalue = False + super(EnumAttrWidget, self).__init__(*args, **kwargs) + + def _ui_init(self): + input_widget = CustomTextComboBox(self) + combo_delegate = QtWidgets.QStyledItemDelegate(input_widget) + input_widget.setItemDelegate(combo_delegate) + + if self.attr_def.tooltip: + input_widget.setToolTip(self.attr_def.tooltip) + + items = self.attr_def.items + for key, label in items.items(): + input_widget.addItem(label, key) + + idx = input_widget.findData(self.attr_def.default) + if idx >= 0: + input_widget.setCurrentIndex(idx) + + input_widget.currentIndexChanged.connect(self._on_value_change) + + self._combo_delegate = combo_delegate + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + + def _on_value_change(self): + new_value = self.current_value() + if self._multivalue: + self._multivalue = False + self._input_widget.set_custom_text(None) + self.value_changed.emit(new_value, self.attr_def.id) + + def current_value(self): + idx = self._input_widget.currentIndex() + return self._input_widget.itemData(idx) + + def set_value(self, value, multivalue=False): + if multivalue: + set_value = set(value) + if len(set_value) == 1: + multivalue = False + value = tuple(set_value)[0] + + if not multivalue: + idx = self._input_widget.findData(value) + cur_idx = self._input_widget.currentIndex() + if idx != cur_idx and idx >= 0: + self._input_widget.setCurrentIndex(idx) + + custom_text = None + if multivalue: + custom_text = "< Multiselection >" + self._input_widget.set_custom_text(custom_text) + self._multivalue = multivalue + + +class UnknownAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + input_widget = QtWidgets.QLabel(self) + self._value = self.attr_def.default + input_widget.setText(str(self._value)) + + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + + def current_value(self): + raise ValueError( + "{} can't hold real value.".format(self.__class__.__name__) + ) + + def set_value(self, value, multivalue=False): + if multivalue: + set_value = set(value) + if len(set_value) == 1: + value = tuple(set_value)[0] + else: + value = "< Multiselection >" + + str_value = str(value) + if str_value != self._value: + self._value = str_value + self._input_widget.setText(str_value) + + +class FileAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + input_widget = FilesWidget( + self.attr_def.single_item, + self.attr_def.allow_sequences, + self.attr_def.extensions_label, + self + ) + + if self.attr_def.tooltip: + input_widget.setToolTip(self.attr_def.tooltip) + + input_widget.set_filters( + self.attr_def.folders, self.attr_def.extensions + ) + + input_widget.value_changed.connect(self._on_value_change) + + self._input_widget = input_widget + + self.main_layout.addWidget(input_widget, 0) + + def _on_value_change(self): + new_value = self.current_value() + self.value_changed.emit(new_value, self.attr_def.id) + + def current_value(self): + return self._input_widget.current_value() + + def set_value(self, value, multivalue=False): + self._input_widget.set_value(value, multivalue) From 790842080f7c5e1c2cbf1cb3b5ebafec85ddd961 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 13:47:06 +0200 Subject: [PATCH 0961/2550] changed imports in publisher --- openpype/tools/publisher/widgets/precreate_widget.py | 2 +- openpype/tools/publisher/widgets/widgets.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/widgets/precreate_widget.py b/openpype/tools/publisher/widgets/precreate_widget.py index eaadfe890b..d41942dd60 100644 --- a/openpype/tools/publisher/widgets/precreate_widget.py +++ b/openpype/tools/publisher/widgets/precreate_widget.py @@ -1,6 +1,6 @@ from Qt import QtWidgets, QtCore -from openpype.widgets.attribute_defs import create_widget_for_attr_def +from openpype.tools.attribute_defs import create_widget_for_attr_def class PreCreateWidget(QtWidgets.QWidget): diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index aa7e3be687..5e52d7f50e 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -6,7 +6,7 @@ import collections from Qt import QtWidgets, QtCore, QtGui import qtawesome -from openpype.widgets.attribute_defs import create_widget_for_attr_def +from openpype.tools.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm from openpype.tools.utils import ( @@ -1219,7 +1219,7 @@ class CreatorAttrsWidget(QtWidgets.QWidget): Attributes are defined on creator so are dynamic. Their look and type is based on attribute definitions that are defined in `~/openpype/pipeline/lib/attribute_definitions.py` and their widget - representation in `~/openpype/widgets/attribute_defs/*`. + representation in `~/openpype/tools/attribute_defs/*`. Widgets are disabled if context of instance is not valid. @@ -1341,7 +1341,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): Look and type of attributes is based on attribute definitions that are defined in `~/openpype/pipeline/lib/attribute_definitions.py` and their - widget representation in `~/openpype/widgets/attribute_defs/*`. + widget representation in `~/openpype/tools/attribute_defs/*`. Widgets are disabled if context of instance is not valid. From c78b7d1d0cd51e1e2c411051948b8b70d02eb621 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 13:47:24 +0200 Subject: [PATCH 0962/2550] added dialog for attribute defs --- openpype/tools/attribute_defs/__init__.py | 6 +++++ openpype/tools/attribute_defs/dialog.py | 32 +++++++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 openpype/tools/attribute_defs/dialog.py diff --git a/openpype/tools/attribute_defs/__init__.py b/openpype/tools/attribute_defs/__init__.py index ce6b80109e..f991fdec3d 100644 --- a/openpype/tools/attribute_defs/__init__.py +++ b/openpype/tools/attribute_defs/__init__.py @@ -3,8 +3,14 @@ from .widgets import ( AttributeDefinitionsWidget, ) +from .dialog import ( + AttributeDefinitionsDialog, +) + __all__ = ( "create_widget_for_attr_def", "AttributeDefinitionsWidget", + + "AttributeDefinitionsDialog", ) diff --git a/openpype/tools/attribute_defs/dialog.py b/openpype/tools/attribute_defs/dialog.py new file mode 100644 index 0000000000..e6c11516c8 --- /dev/null +++ b/openpype/tools/attribute_defs/dialog.py @@ -0,0 +1,32 @@ +from Qt import QtWidgets + +from .widgets import AttributeDefinitionsWidget + + +class AttributeDefinitionsDialog(QtWidgets.QDialog): + def __init__(self, attr_defs, parent=None): + super(AttributeDefinitionsDialog, self).__init__(parent) + + attrs_widget = AttributeDefinitionsWidget(attr_defs, self) + + btns_widget = QtWidgets.QWidget(self) + ok_btn = QtWidgets.QPushButton("OK", btns_widget) + cancel_btn = QtWidgets.QPushButton("Cancel", btns_widget) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.addStretch(1) + btns_layout.addWidget(ok_btn, 0) + btns_layout.addWidget(cancel_btn, 0) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(attrs_widget, 1) + main_layout.addWidget(btns_widget, 0) + + ok_btn.clicked.connect(self.accept) + cancel_btn.clicked.connect(self.reject) + + self._attrs_widget = attrs_widget + + def get_values(self): + return self._attrs_widget.current_value() From 72e166066eb2a82d694f5c5e6dc66a5fbfeb4e6e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 13:48:01 +0200 Subject: [PATCH 0963/2550] removed attribute defs from widgets --- openpype/widgets/attribute_defs/__init__.py | 10 - .../widgets/attribute_defs/files_widget.py | 968 ------------------ openpype/widgets/attribute_defs/widgets.py | 490 --------- 3 files changed, 1468 deletions(-) delete mode 100644 openpype/widgets/attribute_defs/__init__.py delete mode 100644 openpype/widgets/attribute_defs/files_widget.py delete mode 100644 openpype/widgets/attribute_defs/widgets.py diff --git a/openpype/widgets/attribute_defs/__init__.py b/openpype/widgets/attribute_defs/__init__.py deleted file mode 100644 index ce6b80109e..0000000000 --- a/openpype/widgets/attribute_defs/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .widgets import ( - create_widget_for_attr_def, - AttributeDefinitionsWidget, -) - - -__all__ = ( - "create_widget_for_attr_def", - "AttributeDefinitionsWidget", -) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py deleted file mode 100644 index d29aa1b607..0000000000 --- a/openpype/widgets/attribute_defs/files_widget.py +++ /dev/null @@ -1,968 +0,0 @@ -import os -import collections -import uuid -import json - -from Qt import QtWidgets, QtCore, QtGui - -from openpype.lib import FileDefItem -from openpype.tools.utils import ( - paint_image_with_color, - ClickableLabel, -) -# TODO change imports -from openpype.tools.resources import get_image -from openpype.tools.utils import ( - IconButton, - PixmapLabel -) - -ITEM_ID_ROLE = QtCore.Qt.UserRole + 1 -ITEM_LABEL_ROLE = QtCore.Qt.UserRole + 2 -ITEM_ICON_ROLE = QtCore.Qt.UserRole + 3 -FILENAMES_ROLE = QtCore.Qt.UserRole + 4 -DIRPATH_ROLE = QtCore.Qt.UserRole + 5 -IS_DIR_ROLE = QtCore.Qt.UserRole + 6 -IS_SEQUENCE_ROLE = QtCore.Qt.UserRole + 7 -EXT_ROLE = QtCore.Qt.UserRole + 8 - - -def convert_bytes_to_json(bytes_value): - if isinstance(bytes_value, QtCore.QByteArray): - # Raw data are already QByteArray and we don't have to load them - encoded_data = bytes_value - else: - encoded_data = QtCore.QByteArray.fromRawData(bytes_value) - stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.ReadOnly) - text = stream.readQString() - try: - return json.loads(text) - except Exception: - return None - - -def convert_data_to_bytes(data): - bytes_value = QtCore.QByteArray() - stream = QtCore.QDataStream(bytes_value, QtCore.QIODevice.WriteOnly) - stream.writeQString(json.dumps(data)) - return bytes_value - - -class SupportLabel(QtWidgets.QLabel): - pass - - -class DropEmpty(QtWidgets.QWidget): - _empty_extensions = "Any file" - - def __init__(self, single_item, allow_sequences, extensions_label, parent): - super(DropEmpty, self).__init__(parent) - - drop_label_widget = QtWidgets.QLabel("Drag & Drop files here", self) - - items_label_widget = SupportLabel(self) - items_label_widget.setWordWrap(True) - - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addSpacing(20) - layout.addWidget( - drop_label_widget, 0, alignment=QtCore.Qt.AlignCenter - ) - layout.addSpacing(30) - layout.addStretch(1) - layout.addWidget( - items_label_widget, 0, alignment=QtCore.Qt.AlignCenter - ) - layout.addSpacing(10) - - for widget in ( - drop_label_widget, - items_label_widget, - ): - widget.setAlignment(QtCore.Qt.AlignCenter) - widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) - - update_size_timer = QtCore.QTimer() - update_size_timer.setInterval(10) - update_size_timer.setSingleShot(True) - - update_size_timer.timeout.connect(self._on_update_size_timer) - - self._update_size_timer = update_size_timer - - if extensions_label and not extensions_label.startswith(" "): - extensions_label = " " + extensions_label - - self._single_item = single_item - self._extensions_label = extensions_label - self._allow_sequences = allow_sequences - self._allowed_extensions = set() - self._allow_folders = None - - self._drop_label_widget = drop_label_widget - self._items_label_widget = items_label_widget - - self.set_allow_folders(False) - - def set_extensions(self, extensions): - if extensions: - extensions = { - ext.replace(".", "") - for ext in extensions - } - if extensions == self._allowed_extensions: - return - self._allowed_extensions = extensions - - self._update_items_label() - - def set_allow_folders(self, allowed): - if self._allow_folders == allowed: - return - - self._allow_folders = allowed - self._update_items_label() - - def _update_items_label(self): - allowed_items = [] - if self._allow_folders: - allowed_items.append("folder") - - if self._allowed_extensions: - allowed_items.append("file") - if self._allow_sequences: - allowed_items.append("sequence") - - if not self._single_item: - allowed_items = [item + "s" for item in allowed_items] - - if not allowed_items: - self._items_label_widget.setText( - "It is not allowed to add anything here!" - ) - return - - items_label = "Multiple " - if self._single_item: - items_label = "Single " - - if len(allowed_items) == 1: - extensions_label = allowed_items[0] - elif len(allowed_items) == 2: - extensions_label = " or ".join(allowed_items) - else: - last_item = allowed_items.pop(-1) - new_last_item = " or ".join(last_item, allowed_items.pop(-1)) - allowed_items.append(new_last_item) - extensions_label = ", ".join(allowed_items) - - allowed_items_label = extensions_label - - items_label += allowed_items_label - label_tooltip = None - if self._allowed_extensions: - items_label += " of\n{}".format( - ", ".join(sorted(self._allowed_extensions)) - ) - - if self._extensions_label: - label_tooltip = items_label - items_label = self._extensions_label - - if self._items_label_widget.text() == items_label: - return - - self._items_label_widget.setToolTip(label_tooltip) - self._items_label_widget.setText(items_label) - self._update_size_timer.start() - - def resizeEvent(self, event): - super(DropEmpty, self).resizeEvent(event) - self._update_size_timer.start() - - def _on_update_size_timer(self): - """Recalculate height of label with extensions. - - Dynamic QLabel with word wrap does not handle properly it's sizeHint - calculations on show. This way it is recalculated. It is good practice - to trigger this method with small offset using '_update_size_timer'. - """ - - width = self._items_label_widget.width() - height = self._items_label_widget.heightForWidth(width) - self._items_label_widget.setMinimumHeight(height) - self._items_label_widget.updateGeometry() - - def paintEvent(self, event): - super(DropEmpty, self).paintEvent(event) - painter = QtGui.QPainter(self) - pen = QtGui.QPen() - pen.setWidth(1) - pen.setBrush(QtCore.Qt.darkGray) - pen.setStyle(QtCore.Qt.DashLine) - painter.setPen(pen) - content_margins = self.layout().contentsMargins() - - left_m = content_margins.left() - top_m = content_margins.top() - rect = QtCore.QRect( - left_m, - top_m, - ( - self.rect().width() - - (left_m + content_margins.right() + pen.width()) - ), - ( - self.rect().height() - - (top_m + content_margins.bottom() + pen.width()) - ) - ) - painter.drawRect(rect) - - -class FilesModel(QtGui.QStandardItemModel): - def __init__(self, single_item, allow_sequences): - super(FilesModel, self).__init__() - - self._id = str(uuid.uuid4()) - self._single_item = single_item - self._multivalue = False - self._allow_sequences = allow_sequences - - self._items_by_id = {} - self._file_items_by_id = {} - self._filenames_by_dirpath = collections.defaultdict(set) - self._items_by_dirpath = collections.defaultdict(list) - - @property - def id(self): - return self._id - - def set_multivalue(self, multivalue): - """Disable filtering.""" - - if self._multivalue == multivalue: - return - self._multivalue = multivalue - - def add_filepaths(self, items): - if not items: - return - - file_items = FileDefItem.from_value(items, self._allow_sequences) - if not file_items: - return - - if not self._multivalue and self._single_item: - file_items = [file_items[0]] - current_ids = list(self._file_items_by_id.keys()) - if current_ids: - self.remove_item_by_ids(current_ids) - - new_model_items = [] - for file_item in file_items: - item_id, model_item = self._create_item(file_item) - new_model_items.append(model_item) - self._file_items_by_id[item_id] = file_item - self._items_by_id[item_id] = model_item - - if new_model_items: - roow_item = self.invisibleRootItem() - roow_item.appendRows(new_model_items) - - def remove_item_by_ids(self, item_ids): - if not item_ids: - return - - items = [] - for item_id in set(item_ids): - if item_id not in self._items_by_id: - continue - item = self._items_by_id.pop(item_id) - self._file_items_by_id.pop(item_id) - items.append(item) - - if items: - for item in items: - self.removeRows(item.row(), 1) - - def get_file_item_by_id(self, item_id): - return self._file_items_by_id.get(item_id) - - def _create_item(self, file_item): - if file_item.is_dir: - icon_pixmap = paint_image_with_color( - get_image(filename="folder.png"), QtCore.Qt.white - ) - else: - icon_pixmap = paint_image_with_color( - get_image(filename="file.png"), QtCore.Qt.white - ) - - item = QtGui.QStandardItem() - item_id = str(uuid.uuid4()) - item.setData(item_id, ITEM_ID_ROLE) - item.setData(file_item.label or "< empty >", ITEM_LABEL_ROLE) - item.setData(file_item.filenames, FILENAMES_ROLE) - item.setData(file_item.directory, DIRPATH_ROLE) - item.setData(icon_pixmap, ITEM_ICON_ROLE) - item.setData(file_item.ext, EXT_ROLE) - item.setData(file_item.is_dir, IS_DIR_ROLE) - item.setData(file_item.is_sequence, IS_SEQUENCE_ROLE) - - return item_id, item - - def mimeData(self, indexes): - item_ids = [ - index.data(ITEM_ID_ROLE) - for index in indexes - ] - - item_ids_data = convert_data_to_bytes(item_ids) - mime_data = super(FilesModel, self).mimeData(indexes) - mime_data.setData("files_widget/internal_move", item_ids_data) - - file_items = [] - for item_id in item_ids: - file_item = self.get_file_item_by_id(item_id) - if file_item: - file_items.append(file_item.to_dict()) - - full_item_data = convert_data_to_bytes({ - "items": file_items, - "id": self._id - }) - mime_data.setData("files_widget/full_data", full_item_data) - return mime_data - - def dropMimeData(self, mime_data, action, row, col, index): - item_ids = convert_bytes_to_json( - mime_data.data("files_widget/internal_move") - ) - if item_ids is None: - return False - - # Find matching item after which will be items moved - # - store item before moved items are removed - root = self.invisibleRootItem() - if row >= 0: - src_item = self.item(row) - else: - src_item_id = index.data(ITEM_ID_ROLE) - src_item = self._items_by_id.get(src_item_id) - - # Take out items that should be moved - items = [] - for item_id in item_ids: - item = self._items_by_id.get(item_id) - if item: - self.takeRow(item.row()) - items.append(item) - - # Skip if there are not items that can be moved - if not items: - return False - - # Calculate row where items should be inserted - if src_item: - src_row = src_item.row() - else: - src_row = root.rowCount() - - root.insertRow(src_row, items) - return True - - -class FilesProxyModel(QtCore.QSortFilterProxyModel): - def __init__(self, *args, **kwargs): - super(FilesProxyModel, self).__init__(*args, **kwargs) - self._allow_folders = False - self._allowed_extensions = None - self._multivalue = False - - def set_multivalue(self, multivalue): - """Disable filtering.""" - - if self._multivalue == multivalue: - return - self._multivalue = multivalue - self.invalidateFilter() - - def set_allow_folders(self, allow=None): - if allow is None: - allow = not self._allow_folders - - if allow == self._allow_folders: - return - self._allow_folders = allow - self.invalidateFilter() - - def set_allowed_extensions(self, extensions=None): - if extensions is not None: - _extensions = set() - for ext in set(extensions): - if not ext.startswith("."): - ext = ".{}".format(ext) - _extensions.add(ext.lower()) - extensions = _extensions - - if self._allowed_extensions != extensions: - self._allowed_extensions = extensions - self.invalidateFilter() - - def are_valid_files(self, filepaths): - for filepath in filepaths: - if os.path.isfile(filepath): - _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: - return True - - elif self._allow_folders: - return True - return False - - def filter_valid_files(self, filepaths): - filtered_paths = [] - for filepath in filepaths: - if os.path.isfile(filepath): - _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: - filtered_paths.append(filepath) - - elif self._allow_folders: - filtered_paths.append(filepath) - return filtered_paths - - def filterAcceptsRow(self, row, parent_index): - # Skip filtering if multivalue is set - if self._multivalue: - return True - - model = self.sourceModel() - index = model.index(row, self.filterKeyColumn(), parent_index) - # First check if item is folder and if folders are enabled - if index.data(IS_DIR_ROLE): - if not self._allow_folders: - return False - return True - - # Check if there are any allowed extensions - if self._allowed_extensions is None: - return False - - if index.data(EXT_ROLE) not in self._allowed_extensions: - return False - return True - - def lessThan(self, left, right): - left_comparison = left.data(DIRPATH_ROLE) - right_comparison = right.data(DIRPATH_ROLE) - if left_comparison == right_comparison: - left_comparison = left.data(ITEM_LABEL_ROLE) - right_comparison = right.data(ITEM_LABEL_ROLE) - - if sorted((left_comparison, right_comparison))[0] == left_comparison: - return True - return False - - -class ItemWidget(QtWidgets.QWidget): - context_menu_requested = QtCore.Signal(QtCore.QPoint) - - def __init__( - self, item_id, label, pixmap_icon, is_sequence, multivalue, parent=None - ): - self._item_id = item_id - - super(ItemWidget, self).__init__(parent) - - self.setAttribute(QtCore.Qt.WA_TranslucentBackground) - - icon_widget = PixmapLabel(pixmap_icon, self) - label_widget = QtWidgets.QLabel(label, self) - - label_size_hint = label_widget.sizeHint() - height = label_size_hint.height() - actions_menu_pix = paint_image_with_color( - get_image(filename="menu.png"), QtCore.Qt.white - ) - - split_btn = ClickableLabel(self) - split_btn.setFixedSize(height, height) - split_btn.setPixmap(actions_menu_pix) - if multivalue: - split_btn.setVisible(False) - else: - split_btn.setVisible(is_sequence) - - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(5, 5, 5, 5) - layout.addWidget(icon_widget, 0) - layout.addWidget(label_widget, 1) - layout.addWidget(split_btn, 0) - - split_btn.clicked.connect(self._on_actions_clicked) - - self._icon_widget = icon_widget - self._label_widget = label_widget - self._split_btn = split_btn - self._actions_menu_pix = actions_menu_pix - self._last_scaled_pix_height = None - - def _update_btn_size(self): - label_size_hint = self._label_widget.sizeHint() - height = label_size_hint.height() - if height == self._last_scaled_pix_height: - return - self._last_scaled_pix_height = height - self._split_btn.setFixedSize(height, height) - pix = self._actions_menu_pix.scaled( - height, height, - QtCore.Qt.KeepAspectRatio, - QtCore.Qt.SmoothTransformation - ) - self._split_btn.setPixmap(pix) - - def showEvent(self, event): - super(ItemWidget, self).showEvent(event) - self._update_btn_size() - - def resizeEvent(self, event): - super(ItemWidget, self).resizeEvent(event) - self._update_btn_size() - - def _on_actions_clicked(self): - pos = self._split_btn.rect().bottomLeft() - point = self._split_btn.mapToGlobal(pos) - self.context_menu_requested.emit(point) - - -class InViewButton(IconButton): - pass - - -class FilesView(QtWidgets.QListView): - """View showing instances and their groups.""" - - remove_requested = QtCore.Signal() - context_menu_requested = QtCore.Signal(QtCore.QPoint) - - def __init__(self, *args, **kwargs): - super(FilesView, self).__init__(*args, **kwargs) - - self.setEditTriggers(QtWidgets.QListView.NoEditTriggers) - self.setSelectionMode( - QtWidgets.QAbstractItemView.ExtendedSelection - ) - self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - self.setAcceptDrops(True) - self.setDragEnabled(True) - self.setDragDropMode(self.InternalMove) - - remove_btn = InViewButton(self) - pix_enabled = paint_image_with_color( - get_image(filename="delete.png"), QtCore.Qt.white - ) - pix_disabled = paint_image_with_color( - get_image(filename="delete.png"), QtCore.Qt.gray - ) - icon = QtGui.QIcon(pix_enabled) - icon.addPixmap(pix_disabled, icon.Disabled, icon.Off) - remove_btn.setIcon(icon) - remove_btn.setEnabled(False) - - remove_btn.clicked.connect(self._on_remove_clicked) - self.customContextMenuRequested.connect(self._on_context_menu_request) - - self._remove_btn = remove_btn - - def setSelectionModel(self, *args, **kwargs): - """Catch selection model set to register signal callback. - - Selection model is not available during initialization. - """ - - super(FilesView, self).setSelectionModel(*args, **kwargs) - selection_model = self.selectionModel() - selection_model.selectionChanged.connect(self._on_selection_change) - - def set_multivalue(self, multivalue): - """Disable remove button on multivalue.""" - - self._remove_btn.setVisible(not multivalue) - - def has_selected_item_ids(self): - """Is any index selected.""" - for index in self.selectionModel().selectedIndexes(): - instance_id = index.data(ITEM_ID_ROLE) - if instance_id is not None: - return True - return False - - def get_selected_item_ids(self): - """Ids of selected instances.""" - - selected_item_ids = set() - for index in self.selectionModel().selectedIndexes(): - instance_id = index.data(ITEM_ID_ROLE) - if instance_id is not None: - selected_item_ids.add(instance_id) - return selected_item_ids - - def has_selected_sequence(self): - for index in self.selectionModel().selectedIndexes(): - if index.data(IS_SEQUENCE_ROLE): - return True - return False - - def event(self, event): - if event.type() == QtCore.QEvent.KeyPress: - if ( - event.key() == QtCore.Qt.Key_Delete - and self.has_selected_item_ids() - ): - self.remove_requested.emit() - return True - - return super(FilesView, self).event(event) - - def _on_context_menu_request(self, pos): - index = self.indexAt(pos) - if index.isValid(): - point = self.viewport().mapToGlobal(pos) - self.context_menu_requested.emit(point) - - def _on_selection_change(self): - self._remove_btn.setEnabled(self.has_selected_item_ids()) - - def _on_remove_clicked(self): - self.remove_requested.emit() - - def _update_remove_btn(self): - """Position remove button to bottom right.""" - - viewport = self.viewport() - height = viewport.height() - pos_x = viewport.width() - self._remove_btn.width() - 5 - pos_y = height - self._remove_btn.height() - 5 - self._remove_btn.move(max(0, pos_x), max(0, pos_y)) - - def resizeEvent(self, event): - super(FilesView, self).resizeEvent(event) - self._update_remove_btn() - - def showEvent(self, event): - super(FilesView, self).showEvent(event) - self._update_remove_btn() - - -class FilesWidget(QtWidgets.QFrame): - value_changed = QtCore.Signal() - - def __init__(self, single_item, allow_sequences, extensions_label, parent): - super(FilesWidget, self).__init__(parent) - self.setAcceptDrops(True) - - empty_widget = DropEmpty( - single_item, allow_sequences, extensions_label, self - ) - - files_model = FilesModel(single_item, allow_sequences) - files_proxy_model = FilesProxyModel() - files_proxy_model.setSourceModel(files_model) - files_view = FilesView(self) - files_view.setModel(files_proxy_model) - files_view.setVisible(False) - - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(empty_widget, 1) - layout.addWidget(files_view, 1) - - files_proxy_model.rowsInserted.connect(self._on_rows_inserted) - files_proxy_model.rowsRemoved.connect(self._on_rows_removed) - files_view.remove_requested.connect(self._on_remove_requested) - files_view.context_menu_requested.connect( - self._on_context_menu_requested - ) - - self._in_set_value = False - self._single_item = single_item - self._multivalue = False - - self._empty_widget = empty_widget - self._files_model = files_model - self._files_proxy_model = files_proxy_model - self._files_view = files_view - - self._widgets_by_id = {} - - def _set_multivalue(self, multivalue): - if self._multivalue == multivalue: - return - self._multivalue = multivalue - self._files_view.set_multivalue(multivalue) - self._files_model.set_multivalue(multivalue) - self._files_proxy_model.set_multivalue(multivalue) - - def set_value(self, value, multivalue): - self._in_set_value = True - - widget_ids = set(self._widgets_by_id.keys()) - self._remove_item_by_ids(widget_ids) - - self._set_multivalue(multivalue) - - self._add_filepaths(value) - - self._in_set_value = False - - def current_value(self): - model = self._files_proxy_model - item_ids = set() - for row in range(model.rowCount()): - index = model.index(row, 0) - item_ids.add(index.data(ITEM_ID_ROLE)) - - file_items = [] - for item_id in item_ids: - file_item = self._files_model.get_file_item_by_id(item_id) - if file_item is not None: - file_items.append(file_item.to_dict()) - - if not self._single_item: - return file_items - if file_items: - return file_items[0] - - empty_item = FileDefItem.create_empty_item() - return empty_item.to_dict() - - def set_filters(self, folders_allowed, exts_filter): - self._files_proxy_model.set_allow_folders(folders_allowed) - self._files_proxy_model.set_allowed_extensions(exts_filter) - self._empty_widget.set_extensions(exts_filter) - self._empty_widget.set_allow_folders(folders_allowed) - - def _on_rows_inserted(self, parent_index, start_row, end_row): - for row in range(start_row, end_row + 1): - index = self._files_proxy_model.index(row, 0, parent_index) - item_id = index.data(ITEM_ID_ROLE) - if item_id in self._widgets_by_id: - continue - label = index.data(ITEM_LABEL_ROLE) - pixmap_icon = index.data(ITEM_ICON_ROLE) - is_sequence = index.data(IS_SEQUENCE_ROLE) - - widget = ItemWidget( - item_id, - label, - pixmap_icon, - is_sequence, - self._multivalue - ) - widget.context_menu_requested.connect( - self._on_context_menu_requested - ) - self._files_view.setIndexWidget(index, widget) - self._files_proxy_model.setData( - index, widget.sizeHint(), QtCore.Qt.SizeHintRole - ) - self._widgets_by_id[item_id] = widget - - if not self._in_set_value: - self.value_changed.emit() - - def _on_rows_removed(self, parent_index, start_row, end_row): - available_item_ids = set() - for row in range(self._files_proxy_model.rowCount()): - index = self._files_proxy_model.index(row, 0) - item_id = index.data(ITEM_ID_ROLE) - available_item_ids.add(index.data(ITEM_ID_ROLE)) - - widget_ids = set(self._widgets_by_id.keys()) - for item_id in available_item_ids: - if item_id in widget_ids: - widget_ids.remove(item_id) - - for item_id in widget_ids: - widget = self._widgets_by_id.pop(item_id) - widget.setVisible(False) - widget.deleteLater() - - if not self._in_set_value: - self.value_changed.emit() - - def _on_split_request(self): - if self._multivalue: - return - - item_ids = self._files_view.get_selected_item_ids() - if not item_ids: - return - - for item_id in item_ids: - file_item = self._files_model.get_file_item_by_id(item_id) - if not file_item: - return - - new_items = file_item.split_sequence() - self._add_filepaths(new_items) - self._remove_item_by_ids(item_ids) - - def _on_remove_requested(self): - if self._multivalue: - return - - items_to_delete = self._files_view.get_selected_item_ids() - if items_to_delete: - self._remove_item_by_ids(items_to_delete) - - def _on_context_menu_requested(self, pos): - if self._multivalue: - return - - menu = QtWidgets.QMenu(self._files_view) - - if self._files_view.has_selected_sequence(): - split_action = QtWidgets.QAction("Split sequence", menu) - split_action.triggered.connect(self._on_split_request) - menu.addAction(split_action) - - remove_action = QtWidgets.QAction("Remove", menu) - remove_action.triggered.connect(self._on_remove_requested) - menu.addAction(remove_action) - - menu.popup(pos) - - def sizeHint(self): - # Get size hints of widget and visible widgets - result = super(FilesWidget, self).sizeHint() - if not self._files_view.isVisible(): - not_visible_hint = self._files_view.sizeHint() - else: - not_visible_hint = self._empty_widget.sizeHint() - - # Get margins of this widget - margins = self.layout().contentsMargins() - - # Change size hint based on result of maximum size hint of widgets - result.setWidth(max( - result.width(), - not_visible_hint.width() + margins.left() + margins.right() - )) - result.setHeight(max( - result.height(), - not_visible_hint.height() + margins.top() + margins.bottom() - )) - - return result - - def dragEnterEvent(self, event): - if self._multivalue: - return - - mime_data = event.mimeData() - if mime_data.hasUrls(): - filepaths = [] - for url in mime_data.urls(): - filepath = url.toLocalFile() - if os.path.exists(filepath): - filepaths.append(filepath) - - if self._files_proxy_model.are_valid_files(filepaths): - event.setDropAction(QtCore.Qt.CopyAction) - event.accept() - - full_data_value = mime_data.data("files_widget/full_data") - if self._handle_full_data_drag(full_data_value): - event.setDropAction(QtCore.Qt.CopyAction) - event.accept() - - def dragLeaveEvent(self, event): - event.accept() - - def dropEvent(self, event): - if self._multivalue: - return - - mime_data = event.mimeData() - if mime_data.hasUrls(): - event.accept() - # event.setDropAction(QtCore.Qt.CopyAction) - filepaths = [] - for url in mime_data.urls(): - filepath = url.toLocalFile() - if os.path.exists(filepath): - filepaths.append(filepath) - - # Filter filepaths before passing it to model - filepaths = self._files_proxy_model.filter_valid_files(filepaths) - if filepaths: - self._add_filepaths(filepaths) - - if self._handle_full_data_drop( - mime_data.data("files_widget/full_data") - ): - event.setDropAction(QtCore.Qt.CopyAction) - event.accept() - - super(FilesWidget, self).dropEvent(event) - - def _handle_full_data_drag(self, value): - if value is None: - return False - - full_data = convert_bytes_to_json(value) - if full_data is None: - return False - - if full_data["id"] == self._files_model.id: - return False - return True - - def _handle_full_data_drop(self, value): - if value is None: - return False - - full_data = convert_bytes_to_json(value) - if full_data is None: - return False - - if full_data["id"] == self._files_model.id: - return False - - for item in full_data["items"]: - filepaths = [ - os.path.join(item["directory"], filename) - for filename in item["filenames"] - ] - filepaths = self._files_proxy_model.filter_valid_files(filepaths) - if filepaths: - self._add_filepaths(filepaths) - - if self._copy_modifiers_enabled(): - return False - return True - - def _copy_modifiers_enabled(self): - if ( - QtWidgets.QApplication.keyboardModifiers() - & QtCore.Qt.ControlModifier - ): - return True - return False - - def _add_filepaths(self, filepaths): - self._files_model.add_filepaths(filepaths) - self._update_visibility() - - def _remove_item_by_ids(self, item_ids): - self._files_model.remove_item_by_ids(item_ids) - self._update_visibility() - - def _update_visibility(self): - files_exists = self._files_proxy_model.rowCount() > 0 - self._files_view.setVisible(files_exists) - self._empty_widget.setVisible(not files_exists) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py deleted file mode 100644 index 60ae952553..0000000000 --- a/openpype/widgets/attribute_defs/widgets.py +++ /dev/null @@ -1,490 +0,0 @@ -import uuid -import copy - -from Qt import QtWidgets, QtCore - -from openpype.lib.attribute_definitions import ( - AbtractAttrDef, - UnknownDef, - NumberDef, - TextDef, - EnumDef, - BoolDef, - FileDef, - UIDef, - UISeparatorDef, - UILabelDef -) -from openpype.tools.utils import CustomTextComboBox -from openpype.widgets.nice_checkbox import NiceCheckbox - -from .files_widget import FilesWidget - - -def create_widget_for_attr_def(attr_def, parent=None): - if not isinstance(attr_def, AbtractAttrDef): - raise TypeError("Unexpected type \"{}\" expected \"{}\"".format( - str(type(attr_def)), AbtractAttrDef - )) - - if isinstance(attr_def, NumberDef): - return NumberAttrWidget(attr_def, parent) - - if isinstance(attr_def, TextDef): - return TextAttrWidget(attr_def, parent) - - if isinstance(attr_def, EnumDef): - return EnumAttrWidget(attr_def, parent) - - if isinstance(attr_def, BoolDef): - return BoolAttrWidget(attr_def, parent) - - if isinstance(attr_def, UnknownDef): - return UnknownAttrWidget(attr_def, parent) - - if isinstance(attr_def, FileDef): - return FileAttrWidget(attr_def, parent) - - if isinstance(attr_def, UISeparatorDef): - return SeparatorAttrWidget(attr_def, parent) - - if isinstance(attr_def, UILabelDef): - return LabelAttrWidget(attr_def, parent) - - raise ValueError("Unknown attribute definition \"{}\"".format( - str(type(attr_def)) - )) - - -class AttributeDefinitionsWidget(QtWidgets.QWidget): - """Create widgets for attribute definitions in grid layout. - - Widget creates input widgets for passed attribute definitions. - - Widget can't handle multiselection values. - """ - - def __init__(self, attr_defs=None, parent=None): - super(AttributeDefinitionsWidget, self).__init__(parent) - - self._widgets = [] - self._current_keys = set() - - self.set_attr_defs(attr_defs) - - def clear_attr_defs(self): - """Remove all existing widgets and reset layout if needed.""" - self._widgets = [] - self._current_keys = set() - - layout = self.layout() - if layout is not None: - if layout.count() == 0: - return - - while layout.count(): - item = layout.takeAt(0) - widget = item.widget() - if widget: - widget.setVisible(False) - widget.deleteLater() - - layout.deleteLater() - - new_layout = QtWidgets.QGridLayout() - new_layout.setColumnStretch(0, 0) - new_layout.setColumnStretch(1, 1) - self.setLayout(new_layout) - - def set_attr_defs(self, attr_defs): - """Replace current attribute definitions with passed.""" - self.clear_attr_defs() - if attr_defs: - self.add_attr_defs(attr_defs) - - def add_attr_defs(self, attr_defs): - """Add attribute definitions to current.""" - layout = self.layout() - - row = 0 - for attr_def in attr_defs: - if attr_def.key in self._current_keys: - raise KeyError("Duplicated key \"{}\"".format(attr_def.key)) - - self._current_keys.add(attr_def.key) - widget = create_widget_for_attr_def(attr_def, self) - - expand_cols = 2 - if attr_def.is_value_def and attr_def.is_label_horizontal: - expand_cols = 1 - - col_num = 2 - expand_cols - - if attr_def.label: - label_widget = QtWidgets.QLabel(attr_def.label, self) - layout.addWidget( - label_widget, row, 0, 1, expand_cols - ) - if not attr_def.is_label_horizontal: - row += 1 - - layout.addWidget( - widget, row, col_num, 1, expand_cols - ) - self._widgets.append(widget) - row += 1 - - def set_value(self, value): - new_value = copy.deepcopy(value) - unused_keys = set(new_value.keys()) - for widget in self._widgets: - attr_def = widget.attr_def - if attr_def.key not in new_value: - continue - unused_keys.remove(attr_def.key) - - widget_value = new_value[attr_def.key] - if widget_value is None: - widget_value = copy.deepcopy(attr_def.default) - widget.set_value(widget_value) - - def current_value(self): - output = {} - for widget in self._widgets: - attr_def = widget.attr_def - if not isinstance(attr_def, UIDef): - output[attr_def.key] = widget.current_value() - - return output - - -class _BaseAttrDefWidget(QtWidgets.QWidget): - # Type 'object' may not work with older PySide versions - value_changed = QtCore.Signal(object, uuid.UUID) - - def __init__(self, attr_def, parent): - super(_BaseAttrDefWidget, self).__init__(parent) - - self.attr_def = attr_def - - main_layout = QtWidgets.QHBoxLayout(self) - main_layout.setContentsMargins(0, 0, 0, 0) - - self.main_layout = main_layout - - self._ui_init() - - def _ui_init(self): - raise NotImplementedError( - "Method '_ui_init' is not implemented. {}".format( - self.__class__.__name__ - ) - ) - - def current_value(self): - raise NotImplementedError( - "Method 'current_value' is not implemented. {}".format( - self.__class__.__name__ - ) - ) - - def set_value(self, value, multivalue=False): - raise NotImplementedError( - "Method 'set_value' is not implemented. {}".format( - self.__class__.__name__ - ) - ) - - -class SeparatorAttrWidget(_BaseAttrDefWidget): - def _ui_init(self): - input_widget = QtWidgets.QWidget(self) - input_widget.setObjectName("Separator") - input_widget.setMinimumHeight(2) - input_widget.setMaximumHeight(2) - - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - - -class LabelAttrWidget(_BaseAttrDefWidget): - def _ui_init(self): - input_widget = QtWidgets.QLabel(self) - label = self.attr_def.label - if label: - input_widget.setText(str(label)) - - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - - -class NumberAttrWidget(_BaseAttrDefWidget): - def _ui_init(self): - decimals = self.attr_def.decimals - if decimals > 0: - input_widget = QtWidgets.QDoubleSpinBox(self) - input_widget.setDecimals(decimals) - else: - input_widget = QtWidgets.QSpinBox(self) - - if self.attr_def.tooltip: - input_widget.setToolTip(self.attr_def.tooltip) - - input_widget.setMinimum(self.attr_def.minimum) - input_widget.setMaximum(self.attr_def.maximum) - input_widget.setValue(self.attr_def.default) - - input_widget.setButtonSymbols( - QtWidgets.QAbstractSpinBox.ButtonSymbols.NoButtons - ) - - input_widget.valueChanged.connect(self._on_value_change) - - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - - def _on_value_change(self, new_value): - self.value_changed.emit(new_value, self.attr_def.id) - - def current_value(self): - return self._input_widget.value() - - def set_value(self, value, multivalue=False): - if multivalue: - set_value = set(value) - if None in set_value: - set_value.remove(None) - set_value.add(self.attr_def.default) - - if len(set_value) > 1: - self._input_widget.setSpecialValueText("Multiselection") - return - value = tuple(set_value)[0] - - if self.current_value != value: - self._input_widget.setValue(value) - - -class TextAttrWidget(_BaseAttrDefWidget): - def _ui_init(self): - # TODO Solve how to handle regex - # self.attr_def.regex - - self.multiline = self.attr_def.multiline - if self.multiline: - input_widget = QtWidgets.QPlainTextEdit(self) - else: - input_widget = QtWidgets.QLineEdit(self) - - if ( - self.attr_def.placeholder - and hasattr(input_widget, "setPlaceholderText") - ): - input_widget.setPlaceholderText(self.attr_def.placeholder) - - if self.attr_def.tooltip: - input_widget.setToolTip(self.attr_def.tooltip) - - if self.attr_def.default: - if self.multiline: - input_widget.setPlainText(self.attr_def.default) - else: - input_widget.setText(self.attr_def.default) - - input_widget.textChanged.connect(self._on_value_change) - - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - - def _on_value_change(self): - if self.multiline: - new_value = self._input_widget.toPlainText() - else: - new_value = self._input_widget.text() - self.value_changed.emit(new_value, self.attr_def.id) - - def current_value(self): - if self.multiline: - return self._input_widget.toPlainText() - return self._input_widget.text() - - def set_value(self, value, multivalue=False): - if multivalue: - set_value = set(value) - if None in set_value: - set_value.remove(None) - set_value.add(self.attr_def.default) - - if len(set_value) == 1: - value = tuple(set_value)[0] - else: - value = "< Multiselection >" - - if value != self.current_value(): - if self.multiline: - self._input_widget.setPlainText(value) - else: - self._input_widget.setText(value) - - -class BoolAttrWidget(_BaseAttrDefWidget): - def _ui_init(self): - input_widget = NiceCheckbox(parent=self) - input_widget.setChecked(self.attr_def.default) - - if self.attr_def.tooltip: - input_widget.setToolTip(self.attr_def.tooltip) - - input_widget.stateChanged.connect(self._on_value_change) - - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - self.main_layout.addStretch(1) - - def _on_value_change(self): - new_value = self._input_widget.isChecked() - self.value_changed.emit(new_value, self.attr_def.id) - - def current_value(self): - return self._input_widget.isChecked() - - def set_value(self, value, multivalue=False): - if multivalue: - set_value = set(value) - if None in set_value: - set_value.remove(None) - set_value.add(self.attr_def.default) - - if len(set_value) > 1: - self._input_widget.setCheckState(QtCore.Qt.PartiallyChecked) - return - value = tuple(set_value)[0] - - if value != self.current_value(): - self._input_widget.setChecked(value) - - -class EnumAttrWidget(_BaseAttrDefWidget): - def __init__(self, *args, **kwargs): - self._multivalue = False - super(EnumAttrWidget, self).__init__(*args, **kwargs) - - def _ui_init(self): - input_widget = CustomTextComboBox(self) - combo_delegate = QtWidgets.QStyledItemDelegate(input_widget) - input_widget.setItemDelegate(combo_delegate) - - if self.attr_def.tooltip: - input_widget.setToolTip(self.attr_def.tooltip) - - items = self.attr_def.items - for key, label in items.items(): - input_widget.addItem(label, key) - - idx = input_widget.findData(self.attr_def.default) - if idx >= 0: - input_widget.setCurrentIndex(idx) - - input_widget.currentIndexChanged.connect(self._on_value_change) - - self._combo_delegate = combo_delegate - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - - def _on_value_change(self): - new_value = self.current_value() - if self._multivalue: - self._multivalue = False - self._input_widget.set_custom_text(None) - self.value_changed.emit(new_value, self.attr_def.id) - - def current_value(self): - idx = self._input_widget.currentIndex() - return self._input_widget.itemData(idx) - - def set_value(self, value, multivalue=False): - if multivalue: - set_value = set(value) - if len(set_value) == 1: - multivalue = False - value = tuple(set_value)[0] - - if not multivalue: - idx = self._input_widget.findData(value) - cur_idx = self._input_widget.currentIndex() - if idx != cur_idx and idx >= 0: - self._input_widget.setCurrentIndex(idx) - - custom_text = None - if multivalue: - custom_text = "< Multiselection >" - self._input_widget.set_custom_text(custom_text) - self._multivalue = multivalue - - -class UnknownAttrWidget(_BaseAttrDefWidget): - def _ui_init(self): - input_widget = QtWidgets.QLabel(self) - self._value = self.attr_def.default - input_widget.setText(str(self._value)) - - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - - def current_value(self): - raise ValueError( - "{} can't hold real value.".format(self.__class__.__name__) - ) - - def set_value(self, value, multivalue=False): - if multivalue: - set_value = set(value) - if len(set_value) == 1: - value = tuple(set_value)[0] - else: - value = "< Multiselection >" - - str_value = str(value) - if str_value != self._value: - self._value = str_value - self._input_widget.setText(str_value) - - -class FileAttrWidget(_BaseAttrDefWidget): - def _ui_init(self): - input_widget = FilesWidget( - self.attr_def.single_item, - self.attr_def.allow_sequences, - self.attr_def.extensions_label, - self - ) - - if self.attr_def.tooltip: - input_widget.setToolTip(self.attr_def.tooltip) - - input_widget.set_filters( - self.attr_def.folders, self.attr_def.extensions - ) - - input_widget.value_changed.connect(self._on_value_change) - - self._input_widget = input_widget - - self.main_layout.addWidget(input_widget, 0) - - def _on_value_change(self): - new_value = self.current_value() - self.value_changed.emit(new_value, self.attr_def.id) - - def current_value(self): - return self._input_widget.current_value() - - def set_value(self, value, multivalue=False): - self._input_widget.set_value(value, multivalue) From 170c35c4d11936e40e40b3a36967c558cee3caa9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 14:37:00 +0200 Subject: [PATCH 0964/2550] initial commit - not changing current implementation yet --- .../pipeline/workfile/new_template_loader.py | 678 ++++++++++++++++++ 1 file changed, 678 insertions(+) create mode 100644 openpype/pipeline/workfile/new_template_loader.py diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py new file mode 100644 index 0000000000..82cb2d9974 --- /dev/null +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -0,0 +1,678 @@ +import os +import collections +from abc import ABCMeta, abstractmethod + +import six + +from openpype.client import get_asset_by_name +from openpype.settings import get_project_settings +from openpype.host import HostBase +from openpype.lib import Logger, StringTemplate, filter_profiles +from openpype.pipeline import legacy_io, Anatomy +from openpype.pipeline.load import get_loaders_by_name +from openpype.pipeline.create import get_legacy_creator_by_name + +from .build_template_exceptions import ( + TemplateProfileNotFound, + TemplateLoadingFailed, + TemplateNotFound, +) + + +@six.add_metaclass(ABCMeta) +class AbstractTemplateLoader: + """Abstraction of Template Loader. + + Args: + host (Union[HostBase, ModuleType]): Implementation of host. + """ + + _log = None + + def __init__(self, host): + # Store host + self._host = host + if isinstance(host, HostBase): + host_name = host.name + else: + host_name = os.environ.get("AVALON_APP") + self._host_name = host_name + + # Shared data across placeholder plugins + self._shared_data = {} + + # Where created objects of placeholder plugins will be stored + self._placeholder_plugins = None + + project_name = legacy_io.active_project() + asset_name = legacy_io.Session["AVALON_ASSET"] + + self.current_asset = asset_name + self.project_name = project_name + self.task_name = legacy_io.Session["AVALON_TASK"] + self.current_asset_doc = get_asset_by_name(project_name, asset_name) + self.task_type = ( + self.current_asset_doc + .get("data", {}) + .get("tasks", {}) + .get(self.task_name, {}) + .get("type") + ) + + @abstractmethod + def get_placeholder_plugin_classes(self): + """Get placeholder plugin classes that can be used to build template. + + Returns: + List[PlaceholderPlugin]: Plugin classes available for host. + """ + + return [] + + @property + def host(self): + """Access to host implementation. + + Returns: + Union[HostBase, ModuleType]: Implementation of host. + """ + + return self._host + + @property + def host_name(self): + """Name of 'host' implementation. + + Returns: + str: Host's name. + """ + + return self._host_name + + @property + def log(self): + """Dynamically created logger for the plugin.""" + + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + def refresh(self): + """Reset cached data.""" + + self._placeholder_plugins = None + self._loaders_by_name = None + self._creators_by_name = None + self.clear_shared_data() + + def clear_shared_data(self): + """Clear shared data. + + Method only clear shared data to default state. + """ + + self._shared_data = {} + + def get_loaders_by_name(self): + if self._loaders_by_name is None: + self._loaders_by_name = get_loaders_by_name() + return self._loaders_by_name + + def get_creators_by_name(self): + if self._creators_by_name is None: + self._creators_by_name = get_legacy_creator_by_name() + return self._creators_by_name + + def get_shared_data(self, key): + """Receive shared data across plugins and placeholders. + + This can be used to scroll scene only once to look for placeholder + items if the storing is unified but each placeholder plugin would have + to call it again. + + Shared data are cleaned up on specific callbacks. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + return self._shared_data.get(key) + + def set_shared_data(self, key, value): + """Store share data across plugins and placeholders. + + Store data that can be afterwards accessed from any future call. It + is good practice to check if the same value is not already stored under + different key or if the key is not already used for something else. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Shared data are cleaned up on specific callbacks. + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + self._shared_data[key] = value + + @property + def placeholder_plugins(self): + """Access to initialized placeholder plugins. + + Returns: + List[PlaceholderPlugin]: Initialized plugins available for host. + """ + + if self._placeholder_plugins is None: + placeholder_plugins = {} + for cls in self.get_placeholder_plugin_classes(): + try: + plugin = cls(self) + placeholder_plugins[plugin.identifier] = plugin + + except Exception: + self.log.warning( + "Failed to initialize placeholder plugin {}".format( + cls.__name__ + ) + ) + + self._placeholder_plugins = placeholder_plugins + return self._placeholder_plugins + + def get_placeholders(self): + """Collect placeholder items from scene. + + Each placeholder plugin can collect it's placeholders and return them. + This method does not use cached values but always go through the scene. + + Returns: + List[PlaceholderItem]: Sorted placeholder items. + """ + + placeholders = [] + for placeholder_plugin in self.placeholder_plugins: + result = placeholder_plugin.collect_placeholders() + if result: + placeholders.extend(result) + + return list(sorted( + placeholders, + key=lambda i: i.order + )) + + @abstractmethod + def import_template(self, template_path): + """ + Import template in current host. + + Should load the content of template into scene so + 'process_scene_placeholders' can be started. + + Args: + template_path (str): Fullpath for current task and + host's template file. + """ + + pass + + # def template_already_imported(self, err_msg): + # pass + # + # def template_loading_failed(self, err_msg): + # pass + + def _prepare_placeholders(self, placeholders): + """Run preparation part for placeholders on plugins. + + Args: + placeholders (List[PlaceholderItem]): Placeholder items that will + be processed. + """ + + # Prepare placeholder items by plugin + plugins_by_identifier = {} + placeholders_by_plugin_id = collections.defaultdict(list) + for placeholder in placeholders: + plugin = placeholder.plugin + identifier = plugin.identifier + plugins_by_identifier[identifier] = plugin + placeholders_by_plugin_id[identifier].append(placeholder) + + # Plugin should prepare data for passed placeholders + for identifier, placeholders in placeholders_by_plugin_id.items(): + plugin = plugins_by_identifier[identifier] + plugin.prepare_placeholders(placeholders) + + def process_scene_placeholders(self, level_limit=None): + """Find placeholders in scene using plugins and process them. + + This should happen after 'import_template'. + + Collect available placeholders from scene. All of them are processed + after that shared data are cleared. Placeholder items are collected + again and if there are any new the loop happens again. This is possible + to change with defying 'level_limit'. + + Placeholders are marked as processed so they're not re-processed. To + identify which placeholders were already processed is used + placeholder's 'scene_identifier'. + + Args: + level_limit (int): Level of loops that can happen. By default + if is possible to have infinite nested placeholder processing. + """ + + if not self.placeholder_plugins: + self.log.warning("There are no placeholder plugins available.") + return + + placeholders = self.get_placeholders() + if not placeholders: + self.log.warning("No placeholders were found.") + return + + placeholder_by_scene_id = { + placeholder.identifier: placeholder + for placeholder in placeholders + } + all_processed = len(placeholders) == 0 + iter_counter = 0 + while not all_processed: + filtered_placeholders = [] + for placeholder in placeholders: + if placeholder.finished: + continue + + if placeholder.in_progress: + self.log.warning(( + "Placeholder that should be processed" + " is already in progress." + )) + continue + filtered_placeholders.append(placeholder) + + self._prepare_placeholders(filtered_placeholders) + + for placeholder in filtered_placeholders: + placeholder.set_in_progress() + placeholder_plugin = placeholder.plugin + try: + placeholder_plugin.process_placeholder(placeholder) + + except Exception as exc: + placeholder.set_error(exc) + + else: + placeholder.set_finished() + + # Clear shared data before getting new placeholders + self.clear_shared_data() + + if level_limit: + iter_counter += 1 + if iter_counter >= level_limit: + break + + all_processed = True + collected_placeholders = self.get_placeholders() + for placeholder in collected_placeholders: + if placeholder.identifier in placeholder_by_scene_id: + continue + + all_processed = False + identifier = placeholder.identifier + placeholder_by_scene_id[identifier] = placeholder + placeholders.append(placeholder) + + def _get_build_profiles(self): + project_settings = get_project_settings(self.project_name) + return ( + project_settings + [self.host_name] + ["templated_workfile_build"] + ["profiles"] + ) + + def get_template_path(self): + project_name = self.project_name + host_name = self.host_name + task_name = self.task_name + task_type = self.task_type + + build_profiles = self._get_build_profiles() + profile = filter_profiles( + build_profiles, + { + "task_types": task_type, + "task_names": task_name + } + ) + + if not profile: + raise TemplateProfileNotFound(( + "No matching profile found for task '{}' of type '{}' " + "with host '{}'" + ).format(task_name, task_type, host_name)) + + path = profile["path"] + if not path: + raise TemplateLoadingFailed(( + "Template path is not set.\n" + "Path need to be set in {}\\Template Workfile Build " + "Settings\\Profiles" + ).format(host_name.title())) + + # Try fill path with environments and anatomy roots + anatomy = Anatomy(project_name) + fill_data = { + key: value + for key, value in os.environ.items() + } + fill_data["root"] = anatomy.roots + result = StringTemplate.format_template(path, fill_data) + if result.solved: + path = result.normalized() + + if path and os.path.exists(path): + self.log.info("Found template at: '{}'".format(path)) + return path + + solved_path = None + while True: + try: + solved_path = anatomy.path_remapper(path) + except KeyError as missing_key: + raise KeyError( + "Could not solve key '{}' in template path '{}'".format( + missing_key, path)) + + if solved_path is None: + solved_path = path + if solved_path == path: + break + path = solved_path + + solved_path = os.path.normpath(solved_path) + if not os.path.exists(solved_path): + raise TemplateNotFound( + "Template found in openPype settings for task '{}' with host " + "'{}' does not exists. (Not found : {})".format( + task_name, host_name, solved_path)) + + self.log.info("Found template at: '{}'".format(solved_path)) + + return solved_path + + +@six.add_metaclass(ABCMeta) +class PlaceholderPlugin(object): + label = None + placeholder_options = [] + _log = None + + def __init__(self, builder): + self._builder = builder + + @property + def builder(self): + """Access to builder which initialized the plugin. + + Returns: + AbstractTemplateLoader: Loader of template build. + """ + + return self._builder + + @property + def log(self): + """Dynamically created logger for the plugin.""" + + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + @property + def identifier(self): + """Identifier which will be stored to placeholder. + + Default implementation uses class name. + + Returns: + str: Unique identifier of placeholder plugin. + """ + + return self.__class__.__name__ + + @abstractmethod + def collect_placeholders(self): + """Collect placeholders from scene. + + Returns: + List[PlaceholderItem]: Placeholder objects. + """ + + pass + + def get_placeholder_options(self): + """Placeholder options for data showed. + + Returns: + List[AbtractAttrDef]: Attribute definitions of placeholder options. + """ + + return self.placeholder_options + + def prepare_placeholders(self, placeholders): + """Preparation part of placeholders. + + Args: + placeholders (List[PlaceholderItem]): List of placeholders that + will be processed. + """ + + pass + + @abstractmethod + def process_placeholder(self, placeholder): + """Process single placeholder item. + + Processing of placeholders is defined by their order thus can't be + processed in batch. + + Args: + placeholder (PlaceholderItem): Placeholder that should be + processed. + """ + + pass + + def cleanup_placeholders(self, placeholders): + """Cleanup of placeholders after processing. + + Not: + Passed placeholders can be failed. + + Args: + placeholders (List[PlaceholderItem]): List of placeholders that + were be processed. + """ + + pass + + def get_plugin_shared_data(self, key): + """Receive shared data across plugin and placeholders. + + Using shared data from builder but stored under plugin identifier. + + Shared data are cleaned up on specific callbacks. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + plugin_data = self.builder.get_shared_data(self.identifier) + if plugin_data is None: + return None + return plugin_data.get(key) + + def set_plugin_shared_data(self, key, value): + """Store share data across plugin and placeholders. + + Using shared data from builder but stored under plugin identifier. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Shared data are cleaned up on specific callbacks. + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + plugin_data = self.builder.get_shared_data(self.identifier) + if plugin_data is None: + plugin_data = {} + plugin_data[key] = value + self.builder.set_shared_data(self.identifier, plugin_data) + + +class PlaceholderItem(object): + """Item representing single item in scene that is a placeholder to process. + + Scene identifier is used to avoid processing of the palceholder item + multiple times. + + Args: + scene_identifier (str): Unique scene identifier. If placeholder is + created from the same "node" it must have same identifier. + data (Dict[str, Any]): Data related to placeholder. They're defined + by plugin. + plugin (PlaceholderPlugin): Plugin which created the placeholder item. + """ + + default_order = 100 + + def __init__(self, scene_identifier, data, plugin): + self._log = None + self._scene_identifier = scene_identifier + self._data = data + self._plugin = plugin + + # Keep track about state of Placeholder process + self._state = 0 + + # Exception which happened during processing + self._error = None + + @property + def plugin(self): + """Access to plugin which created placeholder. + + Returns: + PlaceholderPlugin: Plugin object. + """ + + return self._plugin + + @property + def builder(self): + """Access to builder. + + Returns: + AbstractTemplateLoader: Builder which is the top part of + placeholder. + """ + + return self.plugin.builder + + @property + def data(self): + """Placeholder data which can modify how placeholder is processed. + + Possible general keys + - order: Can define the order in which is palceholder processed. + Lower == earlier. + + Other keys are defined by placeholder and should validate them on item + creation. + + Returns: + Dict[str, Any]: Placeholder item data. + """ + + return self._data + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(repr(self)) + return self._log + + def __repr__(self): + return "< {} {} >".format(self.__class__.__name__, self.name) + + @property + def order(self): + order = self._data.get("order") + if order is None: + return self.default_order + return order + + @property + def scene_identifier(self): + return self._scene_identifier + + @property + def finished(self): + """Item was already processed.""" + + return self._state == 2 + + @property + def in_progress(self): + """Processing is in progress.""" + + return self._state == 1 + + @property + def failed(self): + """Processing of placeholder failed.""" + + return self._error is not None + + @property + def error(self): + """Exception with which the placeholder process failed. + + Gives ability to access the exception. + """ + + return self._error + + def set_in_progress(self): + """Change to in progress state.""" + + self._state = 1 + + def set_finished(self): + """Change to finished state.""" + + self._state = 2 + + def set_error(self, error): + """Set placeholder item as failed and mark it as finished.""" + + self._error = error + self.set_finished() From ae88578dbda9eca89cc792cec498d19c7ef3d6af Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Sep 2022 14:38:45 +0200 Subject: [PATCH 0965/2550] OP-3682 - changed md5 to sha256 Updated tests. Removed test cli method --- distribution/addon_distribution.py | 28 ++++------ distribution/file_handler.py | 54 ++++++++++++++----- .../tests/test_addon_distributtion.py | 8 +-- 3 files changed, 57 insertions(+), 33 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 95d0b5e397..0e3c672915 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -5,6 +5,7 @@ import attr import logging import requests import platform +import shutil from distribution.file_handler import RemoteFileHandler @@ -87,7 +88,9 @@ class AddonDownloader: """ if not os.path.exists(addon_path): raise ValueError(f"{addon_path} doesn't exist.") - if addon_hash != RemoteFileHandler.calculate_md5(addon_path): + if not RemoteFileHandler.check_integrity(addon_path, + addon_hash, + hash_type="sha256"): raise ValueError(f"{addon_path} doesn't match expected hash.") @classmethod @@ -144,14 +147,14 @@ def get_addons_info(server_endpoint): # "version": "1.0.0", # "addon_url": "c:/projects/openpype_slack_1.0.0.zip", # "type": UrlType.FILESYSTEM, - # "hash": "4f6b8568eb9dd6f510fd7c4dcb676788"}) # noqa + # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa # # http_addon = AddonInfo( # **{"name": "openpype_slack", # "version": "1.0.0", # "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa # "type": UrlType.HTTP, - # "hash": "4f6b8568eb9dd6f510fd7c4dcb676788"}) # noqa + # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa response = requests.get(server_endpoint) if not response.ok: @@ -205,6 +208,9 @@ def update_addon_state(addon_infos, destination_folder, factory, except Exception: log.warning(f"Error happened during updating {addon.name}", exc_info=True) + if os.path.isdir(addon_dest): + log.debug(f"Cleaning {addon_dest}") + shutil.rmtree(addon_dest) return download_states @@ -228,17 +234,5 @@ def check_addons(server_endpoint, addon_folder, downloaders): raise RuntimeError(f"Unable to update some addons {result}") -def cli(args): - addon_folder = "c:/projects/testing_addons/pypeclub/openpype/addons" - - downloader_factory = AddonDownloader() - downloader_factory.register_format(UrlType.FILESYSTEM, OSAddonDownloader) - downloader_factory.register_format(UrlType.HTTP, HTTPAddonDownloader) - - test_endpoint = "https://34e99f0f-f987-4715-95e6-d2d88caa7586.mock.pstmn.io/get_addons_info" # noqa - if os.environ.get("OPENPYPE_SERVER"): # TODO or from keychain - server_endpoint = os.environ.get("OPENPYPE_SERVER") + "get_addons_info" - else: - server_endpoint = test_endpoint - - check_addons(server_endpoint, addon_folder, downloader_factory) +def cli(*args): + raise NotImplemented \ No newline at end of file diff --git a/distribution/file_handler.py b/distribution/file_handler.py index 8c8b4230ce..f585c77632 100644 --- a/distribution/file_handler.py +++ b/distribution/file_handler.py @@ -33,17 +33,45 @@ class RemoteFileHandler: return md5 == RemoteFileHandler.calculate_md5(fpath, **kwargs) @staticmethod - def check_integrity(fpath, md5=None): + def calculate_sha256(fpath): + """Calculate sha256 for content of the file. + + Args: + fpath (str): Path to file. + + Returns: + str: hex encoded sha256 + + """ + h = hashlib.sha256() + b = bytearray(128 * 1024) + mv = memoryview(b) + with open(fpath, 'rb', buffering=0) as f: + for n in iter(lambda: f.readinto(mv), 0): + h.update(mv[:n]) + return h.hexdigest() + + @staticmethod + def check_sha256(fpath, sha256, **kwargs): + return sha256 == RemoteFileHandler.calculate_sha256(fpath, **kwargs) + + @staticmethod + def check_integrity(fpath, hash_value=None, hash_type=None): if not os.path.isfile(fpath): return False - if md5 is None: + if hash_value is None: return True - return RemoteFileHandler.check_md5(fpath, md5) + if not hash_type: + raise ValueError("Provide hash type, md5 or sha256") + if hash_type == 'md5': + return RemoteFileHandler.check_md5(fpath, hash_value) + if hash_type == "sha256": + return RemoteFileHandler.check_sha256(fpath, hash_value) @staticmethod def download_url( url, root, filename=None, - md5=None, max_redirect_hops=3 + sha256=None, max_redirect_hops=3 ): """Download a file from a url and place it in root. Args: @@ -51,7 +79,7 @@ class RemoteFileHandler: root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the basename of the URL - md5 (str, optional): MD5 checksum of the download. + sha256 (str, optional): sha256 checksum of the download. If None, do not check max_redirect_hops (int, optional): Maximum number of redirect hops allowed @@ -64,7 +92,8 @@ class RemoteFileHandler: os.makedirs(root, exist_ok=True) # check if file is already present locally - if RemoteFileHandler.check_integrity(fpath, md5): + if RemoteFileHandler.check_integrity(fpath, + sha256, hash_type="sha256"): print('Using downloaded and verified file: ' + fpath) return @@ -76,7 +105,7 @@ class RemoteFileHandler: file_id = RemoteFileHandler._get_google_drive_file_id(url) if file_id is not None: return RemoteFileHandler.download_file_from_google_drive( - file_id, root, filename, md5) + file_id, root, filename, sha256) # download the file try: @@ -92,20 +121,21 @@ class RemoteFileHandler: raise e # check integrity of downloaded file - if not RemoteFileHandler.check_integrity(fpath, md5): + if not RemoteFileHandler.check_integrity(fpath, + sha256, hash_type="sha256"): raise RuntimeError("File not found or corrupted.") @staticmethod def download_file_from_google_drive(file_id, root, filename=None, - md5=None): + sha256=None): """Download a Google Drive file from and place it in root. Args: file_id (str): id of file to be downloaded root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the id of the file. - md5 (str, optional): MD5 checksum of the download. + sha256 (str, optional): sha256 checksum of the download. If None, do not check """ # Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url # noqa @@ -119,8 +149,8 @@ class RemoteFileHandler: os.makedirs(root, exist_ok=True) - if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(fpath, - md5): + if os.path.isfile(fpath) and RemoteFileHandler.check_integrity( + fpath, sha256, hash_type="sha256"): print('Using downloaded and verified file: ' + fpath) else: session = requests.Session() diff --git a/distribution/tests/test_addon_distributtion.py b/distribution/tests/test_addon_distributtion.py index e67ca3c479..717ef1330e 100644 --- a/distribution/tests/test_addon_distributtion.py +++ b/distribution/tests/test_addon_distributtion.py @@ -51,7 +51,7 @@ def sample_addon_info(): } } ], - "hash": "4f6b8568eb9dd6f510fd7c4dcb676788" + "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" } yield addon_info @@ -109,13 +109,13 @@ def test_update_addon_state(printer, sample_addon_info, addon_info.hash = "brokenhash" result = update_addon_state([addon_info], temp_folder, addon_downloader) assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \ - "Hashes not matching" + "Update should failed because of wrong hash" addon_info.hash = orig_hash result = update_addon_state([addon_info], temp_folder, addon_downloader) assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \ - "Failed updating" + "Addon should have been updated" result = update_addon_state([addon_info], temp_folder, addon_downloader) assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \ - "Tried to update" + "Addon should already exist" From b2999a7bbd402154570140fd1db72f6a62158d60 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Sep 2022 14:46:12 +0200 Subject: [PATCH 0966/2550] OP-3682 - Hound --- distribution/addon_distribution.py | 2 +- distribution/tests/test_addon_distributtion.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/addon_distribution.py b/distribution/addon_distribution.py index 0e3c672915..389b92b10b 100644 --- a/distribution/addon_distribution.py +++ b/distribution/addon_distribution.py @@ -235,4 +235,4 @@ def check_addons(server_endpoint, addon_folder, downloaders): def cli(*args): - raise NotImplemented \ No newline at end of file + raise NotImplemented diff --git a/distribution/tests/test_addon_distributtion.py b/distribution/tests/test_addon_distributtion.py index 717ef1330e..c6ecaca3c8 100644 --- a/distribution/tests/test_addon_distributtion.py +++ b/distribution/tests/test_addon_distributtion.py @@ -51,7 +51,7 @@ def sample_addon_info(): } } ], - "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" + "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa } yield addon_info From 57aa1e6659ab9552c1980dbebcd8b64535469f39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 7 Sep 2022 14:49:55 +0200 Subject: [PATCH 0967/2550] implementing not retimed working frame range and retimed handles switch --- openpype/hosts/flame/api/plugin.py | 10 ++++++ .../flame/plugins/create/create_shot_clip.py | 16 ++++++++++ .../publish/collect_timeline_instances.py | 4 +++ .../publish/extract_subset_resources.py | 32 +++++++++++++------ .../publish/collect_otio_frame_ranges.py | 6 ++++ 5 files changed, 58 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index efbabb6a55..145b1f0921 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -361,6 +361,8 @@ class PublishableClip: index_from_segment_default = False use_shot_name_default = False include_handles_default = False + retimed_handles_default = True + retimed_framerange_default = True def __init__(self, segment, **kwargs): self.rename_index = kwargs["rename_index"] @@ -496,6 +498,14 @@ class PublishableClip: "audio", {}).get("value") or False self.include_handles = self.ui_inputs.get( "includeHandles", {}).get("value") or self.include_handles_default + self.retimed_handles = ( + self.ui_inputs.get("retimedHandles", {}).get("value") + or self.retimed_handles_default + ) + self.retimed_framerange = ( + self.ui_inputs.get("retimedFramerange", {}).get("value") + or self.retimed_framerange_default + ) # build subset name from layer name if self.subset_name == "[ track name ]": diff --git a/openpype/hosts/flame/plugins/create/create_shot_clip.py b/openpype/hosts/flame/plugins/create/create_shot_clip.py index fa239ea420..b03a39a7ca 100644 --- a/openpype/hosts/flame/plugins/create/create_shot_clip.py +++ b/openpype/hosts/flame/plugins/create/create_shot_clip.py @@ -276,6 +276,22 @@ class CreateShotClip(opfapi.Creator): "target": "tag", "toolTip": "By default handles are excluded", # noqa "order": 3 + }, + "retimedHandles": { + "value": True, + "type": "QCheckBox", + "label": "Retimed handles", + "target": "tag", + "toolTip": "By default handles are retimed.", # noqa + "order": 4 + }, + "retimedFramerange": { + "value": True, + "type": "QCheckBox", + "label": "Retimed framerange", + "target": "tag", + "toolTip": "By default framerange is retimed.", # noqa + "order": 5 } } } diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 992db62c75..d6ff13b059 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -131,6 +131,10 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "fps": self.fps, "workfileFrameStart": workfile_start, "sourceFirstFrame": int(first_frame), + "notRetimedHandles": ( + not marker_data.get("retimedHandles")), + "notRetimedFramerange": ( + not marker_data.get("retimedFramerange")), "path": file_path, "flameAddTasks": self.add_tasks, "tasks": { diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 3e1e8db986..1af6b00654 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -90,26 +90,38 @@ class ExtractSubsetResources(openpype.api.Extractor): handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) include_handles = instance.data.get("includeHandles") + retimed_handles = instance.data.get("retimedHandles") # get media source range with handles source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] - # retime if needed + + # retime if needed if r_speed != 1.0: - source_start_handles = ( - instance.data["sourceStart"] - r_handle_start) - source_end_handles = ( - source_start_handles - + (r_source_dur - 1) - + r_handle_start - + r_handle_end - ) + if retimed_handles: + # handles are retimed + source_start_handles = ( + instance.data["sourceStart"] - r_handle_start) + source_end_handles = ( + source_start_handles + + (r_source_dur - 1) + + r_handle_start + + r_handle_end + ) + else: + # handles are not retimed + source_end_handles = ( + source_start_handles + + (r_source_dur - 1) + + handle_start + + handle_end + ) # get frame range with handles for representation range frame_start_handle = frame_start - handle_start repre_frame_start = frame_start_handle if include_handles: - if r_speed == 1.0: + if r_speed == 1.0 or not retimed_handles: frame_start_handle = frame_start else: frame_start_handle = ( diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index 40e89e29bc..40a3fa6978 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -29,6 +29,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): # get basic variables otio_clip = instance.data["otioClip"] workfile_start = instance.data["workfileFrameStart"] + not_retime_framerange = instance.data.get("notRetimedFramerange") # get ranges otio_tl_range = otio_clip.range_in_parent() @@ -54,6 +55,11 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): frame_end = frame_start + otio.opentime.to_frames( otio_tl_range.duration, otio_tl_range.duration.rate) - 1 + # in case of retimed clip and frame range should not be retimed + if not_retime_framerange: + frame_end = frame_start + otio.opentime.to_frames( + otio_src_range.duration, otio_src_range.duration.rate) - 1 + data = { "frameStart": frame_start, "frameEnd": frame_end, From d4eeabad7e0883bb2f2d31e0b8479e4b547ba7cc Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:17:49 +0800 Subject: [PATCH 0968/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 6 +++--- openpype/hosts/maya/lib.py | 5 +++-- .../schemas/projects_schema/schema_project_maya.json | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 4578d6fb39..6012d82263 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -59,7 +59,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self._op_events = {} def install(self): - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_settings = get_project_settings(legacy_io.active_project()) # process path mapping dirmap_processor = MayaDirmap("maya", project_settings) dirmap_processor.process_dirmap() @@ -536,7 +536,7 @@ def on_task_changed(): lib.update_content_on_context_change() msg = " project: {}\n asset: {}\n task:{}".format( - legacy_io.Session["AVALON_PROJECT"], + legacy_io.active_project(), legacy_io.Session["AVALON_ASSET"], legacy_io.Session["AVALON_TASK"] ) @@ -548,7 +548,7 @@ def on_task_changed(): def before_workfile_save(event): - project_name = os.getenv("AVALON_PROJECT") + project_name = legacy_io.active_project() workdir_path = event["workdir_path"] if workdir_path: create_workspace_mel(workdir_path, project_name) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 443bf7d10e..e466850810 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,6 @@ import os from openpype.settings import get_project_settings - +from openpype.api import Logger def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") @@ -15,7 +15,8 @@ def create_workspace_mel(workdir, project_name): # Skip if mel script in settings is empty if not mel_script: - return + log = Logger.get_logger("create_workspace_mel") + log.debug("File 'workspace.mel' not created. Settings value is empty.") with open(dst_filepath, "w") as mel_file: mel_file.write(mel_script) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index a54f8e6e4f..72c974642f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -8,6 +8,7 @@ { "type": "text", "multiline" : true, + "use_label_wrap": true, "key": "mel_workspace", "label": "Maya MEL Workspace" }, From a6d7df1423fdd5ec37744a34042e01a76776f263 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:19:15 +0800 Subject: [PATCH 0969/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index e466850810..e07e174dd6 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -2,6 +2,7 @@ import os from openpype.settings import get_project_settings from openpype.api import Logger + def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): From b7256e7c19ba376e438a88ca5d0b4a9609a44423 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:49:02 +0800 Subject: [PATCH 0970/2550] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index e07e174dd6..6f7bb8f986 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,6 @@ import os from openpype.settings import get_project_settings -from openpype.api import Logger +from openpype.lib import Logger def create_workspace_mel(workdir, project_name): From d2b3c80fb93acf26f9d61a6164dadf2941cd7930 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 7 Sep 2022 16:02:38 +0200 Subject: [PATCH 0971/2550] improving variable name --- openpype/plugins/publish/collect_otio_frame_ranges.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index 40a3fa6978..cfb0318950 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -29,7 +29,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): # get basic variables otio_clip = instance.data["otioClip"] workfile_start = instance.data["workfileFrameStart"] - not_retime_framerange = instance.data.get("notRetimedFramerange") + workfile_source_duration = instance.data.get("notRetimedFramerange") # get ranges otio_tl_range = otio_clip.range_in_parent() @@ -56,7 +56,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): otio_tl_range.duration, otio_tl_range.duration.rate) - 1 # in case of retimed clip and frame range should not be retimed - if not_retime_framerange: + if workfile_source_duration: frame_end = frame_start + otio.opentime.to_frames( otio_src_range.duration, otio_src_range.duration.rate) - 1 From 9bcd86bac7ca0294fc41bc3d2465166b3b5e8861 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 22:12:16 +0800 Subject: [PATCH 0972/2550] load and edit mel workspace within the Openpype project settings --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 6f7bb8f986..ffb2f0b27c 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -18,6 +18,7 @@ def create_workspace_mel(workdir, project_name): if not mel_script: log = Logger.get_logger("create_workspace_mel") log.debug("File 'workspace.mel' not created. Settings value is empty.") + return with open(dst_filepath, "w") as mel_file: mel_file.write(mel_script) From 0d6c40bb32fff14cb08cc33505acf298555b95e7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Sep 2022 19:13:00 +0200 Subject: [PATCH 0973/2550] added few missing abstract methods and attributes --- .../pipeline/workfile/new_template_loader.py | 68 ++++++++++++++++++- 1 file changed, 66 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 82cb2d9974..4b77168aa1 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -1,5 +1,6 @@ import os import collections +import copy from abc import ABCMeta, abstractmethod import six @@ -43,6 +44,8 @@ class AbstractTemplateLoader: # Where created objects of placeholder plugins will be stored self._placeholder_plugins = None + self._loaders_by_name = None + self._creators_by_name = None project_name = legacy_io.active_project() asset_name = legacy_io.Session["AVALON_ASSET"] @@ -180,12 +183,29 @@ class AbstractTemplateLoader: self.log.warning( "Failed to initialize placeholder plugin {}".format( cls.__name__ - ) + ), + exc_info=True ) self._placeholder_plugins = placeholder_plugins return self._placeholder_plugins + def create_placeholder(self, plugin_identifier, placeholder_data): + """Create new placeholder using plugin identifier and data. + + Args: + plugin_identifier (str): Identifier of plugin. That's how builder + know which plugin should be used. + placeholder_data (Dict[str, Any]): Placeholder item data. They + should match options required by the plugin. + + Returns: + PlaceholderItem: Created placeholder item. + """ + + plugin = self.placeholder_plugins[plugin_identifier] + return plugin.create_placeholder(placeholder_data) + def get_placeholders(self): """Collect placeholder items from scene. @@ -197,7 +217,7 @@ class AbstractTemplateLoader: """ placeholders = [] - for placeholder_plugin in self.placeholder_plugins: + for placeholder_plugin in self.placeholder_plugins.values(): result = placeholder_plugin.collect_placeholders() if result: placeholders.extend(result) @@ -450,6 +470,42 @@ class PlaceholderPlugin(object): return self.__class__.__name__ + @abstractmethod + def create_placeholder(self, placeholder_data): + """Create new placeholder in scene and get it's item. + + It matters on the plugin implementation if placeholder will use + selection in scene or create new node. + + Args: + placeholder_data (Dict[str, Any]): Data that were created + based on attribute definitions from 'get_placeholder_options'. + + Returns: + PlaceholderItem: Created placeholder item. + """ + + pass + + @abstractmethod + def update_placeholder(self, placeholder_item, placeholder_data): + """Update placeholder item with new data. + + New data should be propagated to object of placeholder item itself + and also into the scene. + + Reason: + Some placeholder plugins may require some special way how the + updates should be propagated to object. + + Args: + placeholder_item (PlaceholderItem): Object of placeholder that + should be updated. + placeholder_data (Dict[str, Any]): Data related to placeholder. + Should match plugin options. + """ + pass + @abstractmethod def collect_placeholders(self): """Collect placeholders from scene. @@ -614,6 +670,14 @@ class PlaceholderItem(object): return self._data + def to_dict(self): + """Create copy of item's data. + + Returns: + Dict[str, Any]: Placeholder data. + """ + return copy.deepcopy(self.data) + @property def log(self): if self._log is None: From 5627c8ec627e4157e45eaeeb5a58e12216979f7c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Sep 2022 11:23:56 +0200 Subject: [PATCH 0974/2550] enabled pixmap scaling in tray --- openpype/tools/tray/pype_tray.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 85bc00ead6..c32a074fd1 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -778,6 +778,14 @@ def main(): if not app: app = QtWidgets.QApplication([]) + for attr_name in ( + "AA_EnableHighDpiScaling", + "AA_UseHighDpiPixmaps" + ): + attr = getattr(QtCore.Qt, attr_name, None) + if attr is not None: + app.setAttribute(attr) + starter = PypeTrayStarter(app) # TODO remove when pype.exe will have an icon From 4f3accee1ac84991543493a32768c1aa99672035 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Sep 2022 11:34:33 +0200 Subject: [PATCH 0975/2550] change "hierarchy" key to "nodesHierarchy" in maya --- openpype/hosts/maya/plugins/publish/collect_assembly.py | 2 +- openpype/hosts/maya/plugins/publish/extract_assembly.py | 2 +- .../hosts/maya/plugins/publish/validate_assembly_transforms.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_assembly.py b/openpype/hosts/maya/plugins/publish/collect_assembly.py index 1a65bf1fde..2aef9ab908 100644 --- a/openpype/hosts/maya/plugins/publish/collect_assembly.py +++ b/openpype/hosts/maya/plugins/publish/collect_assembly.py @@ -70,7 +70,7 @@ class CollectAssembly(pyblish.api.InstancePlugin): data[representation_id].append(instance_data) instance.data["scenedata"] = dict(data) - instance.data["hierarchy"] = list(set(hierarchy_nodes)) + instance.data["nodesHierarchy"] = list(set(hierarchy_nodes)) def get_file_rule(self, rule): return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule)) diff --git a/openpype/hosts/maya/plugins/publish/extract_assembly.py b/openpype/hosts/maya/plugins/publish/extract_assembly.py index 482930b76e..120805894e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_assembly.py +++ b/openpype/hosts/maya/plugins/publish/extract_assembly.py @@ -33,7 +33,7 @@ class ExtractAssembly(openpype.api.Extractor): json.dump(instance.data["scenedata"], filepath, ensure_ascii=False) self.log.info("Extracting point cache ..") - cmds.select(instance.data["hierarchy"]) + cmds.select(instance.data["nodesHierarchy"]) # Run basic alembic exporter extract_alembic(file=hierarchy_path, diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py index f793846555..fb25b617be 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py @@ -48,7 +48,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): from openpype.hosts.maya.api import lib # Get all transforms in the loaded containers - container_roots = cmds.listRelatives(instance.data["hierarchy"], + container_roots = cmds.listRelatives(instance.data["nodesHierarchy"], children=True, type="transform", fullPath=True) From 1c7f32e93a750cd3f97b17fb3fba53054a6e74f5 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Sep 2022 17:43:01 +0800 Subject: [PATCH 0976/2550] adding lock task workfiles when users are working on them --- openpype/hosts/maya/api/pipeline.py | 99 ++++++++++++++++++++- openpype/pipeline/workfile/lock_workfile.py | 67 ++++++++++++++ openpype/tools/workfiles/files_widget.py | 15 ++++ 3 files changed, 179 insertions(+), 2 deletions(-) create mode 100644 openpype/pipeline/workfile/lock_workfile.py diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index c963b5d996..b645b41fa0 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -31,6 +31,12 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers +from openpype.pipeline.workfile.lock_workfile import ( + create_workfile_lock, + get_username, + remove_workfile_lock, + is_workfile_locked +) from openpype.hosts.maya import MAYA_ROOT_DIR from openpype.hosts.maya.lib import copy_workspace_mel @@ -99,7 +105,10 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): register_event_callback("open", on_open) register_event_callback("new", on_new) register_event_callback("before.save", on_before_save) + register_event_callback("before.close", on_before_close) + register_event_callback("before.file.open", before_file_open) register_event_callback("taskChanged", on_task_changed) + register_event_callback("workfile.open.before", before_workfile_open) register_event_callback("workfile.save.before", before_workfile_save) def open_workfile(self, filepath): @@ -161,8 +170,25 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): ) ) - self._op_events[_on_scene_open] = OpenMaya.MSceneMessage.addCallback( - OpenMaya.MSceneMessage.kAfterOpen, _on_scene_open + self._op_events[_on_scene_open] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterOpen, + _on_scene_open + ) + ) + + self._op_events[_before_scene_open] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kBeforeOpen, + _before_scene_open + ) + ) + + self._op_events[_before_close_maya] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kMayaExiting, + _before_close_maya + ) ) self.log.info("Installed event handler _on_scene_save..") @@ -170,6 +196,8 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self.log.info("Installed event handler _on_scene_new..") self.log.info("Installed event handler _on_maya_initialized..") self.log.info("Installed event handler _on_scene_open..") + self.log.info("Installed event handler _check_lock_file..") + self.log.info("Installed event handler _before_close_maya..") def _set_project(): @@ -216,6 +244,14 @@ def _on_scene_open(*args): emit_event("open") +def _before_close_maya(*args): + emit_event("before.close") + + +def _before_scene_open(*args): + emit_event("before.file.open") + + def _before_scene_save(return_code, client_data): # Default to allowing the action. Registered @@ -229,6 +265,12 @@ def _before_scene_save(return_code, client_data): ) +def _remove_workfile_lock(): + filepath = current_file() + if filepath: + remove_workfile_lock(filepath) + + def uninstall(): pyblish.api.deregister_plugin_path(PUBLISH_PATH) pyblish.api.deregister_host("mayabatch") @@ -426,6 +468,49 @@ def on_before_save(): return lib.validate_fps() +def after_file_open(): + """Check if there is a user opening the file""" + log.info("Running callback on checking the lock file...") + + # add the lock file when opening the file + filepath = current_file() + + if not is_workfile_locked(filepath): + create_workfile_lock(filepath) + + else: + username = get_username(filepath) + reminder = cmds.window(title="Reminder", width=400, height=30) + cmds.columnLayout(adjustableColumn=True) + cmds.separator() + cmds.columnLayout(adjustableColumn=True) + comment = " %s is working the same workfile!" % username + cmds.text(comment, align='center') + cmds.text(vis=False) + cmds.rowColumnLayout(numberOfColumns=3, + columnWidth=[(1, 300), (2, 100)], + columnSpacing=[(2, 10)]) + cmds.separator(vis=False) + quit_command = "cmds.quit(force=True);cmds.deleteUI('%s')" % reminder + cmds.button(label='Ok', command=quit_command) + cmds.showWindow(reminder) + + +def on_before_close(): + """Delete the lock file after user quitting the Maya Scene""" + log.info("Closing Maya...") + # delete the lock file + filepath = current_file() + remove_workfile_lock(filepath) + + +def before_file_open(): + """check lock file when the file changed""" + log.info("check lock file when file changed...") + # delete the lock file + _remove_workfile_lock() + + def on_save(): """Automatically add IDs to new nodes @@ -434,6 +519,8 @@ def on_save(): """ log.info("Running callback on save..") + # remove lockfile if users jumps over from one scene to another + _remove_workfile_lock() # # Update current task for the current scene # update_task_from_path(cmds.file(query=True, sceneName=True)) @@ -491,6 +578,9 @@ def on_open(): dialog.on_clicked.connect(_on_show_inventory) dialog.show() + # create lock file for the maya scene + after_file_open() + def on_new(): """Set project resolution and fps when create a new file""" @@ -544,7 +634,12 @@ def on_task_changed(): ) +def before_workfile_open(): + _remove_workfile_lock() + + def before_workfile_save(event): + _remove_workfile_lock() workdir_path = event["workdir_path"] if workdir_path: copy_workspace_mel(workdir_path) diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py new file mode 100644 index 0000000000..03dee66d46 --- /dev/null +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -0,0 +1,67 @@ +import os +import json +from uuid import uuid4 +from openpype.lib.pype_info import get_workstation_info + + +def _read_lock_file(lock_filepath): + with open(lock_filepath, "r") as stream: + data = json.load(stream) + return data + + +def _get_lock_file(filepath): + return filepath + ".lock" + + +def is_workfile_locked(filepath): + lock_filepath = _get_lock_file(filepath) + if not os.path.exists(lock_filepath): + return False + return True + + +def is_workfile_locked_for_current_process(filepath): + if not is_workfile_locked(): + return False + + lock_filepath = _get_lock_file(filepath) + process_id = os.environ["OPENPYPE_PROCESS_ID"] + data = _read_lock_file(lock_filepath) + return data["process_id"] == process_id + + +def delete_workfile_lock(filepath): + lock_filepath = _get_lock_file(filepath) + if not os.path.exists(lock_filepath): + return + + if is_workfile_locked_for_current_process(filepath): + os.remove(filepath) + + +def create_workfile_lock(filepath): + lock_filepath = _get_lock_file(filepath) + process_id = os.environ.get("OPENPYPE_PROCESS_ID") + if not process_id: + process_id = str(uuid4()) + os.environ["OPENPYPE_PROCESS_ID"] = process_id + info = get_workstation_info() + info["process_id"] = process_id + with open(lock_filepath, "w") as stream: + json.dump(info, stream) + + +def get_username(filepath): + lock_filepath = _get_lock_file(filepath) + with open(lock_filepath, "r") as stream: + data = json.load(stream) + username = data["username"] + return username + + +def remove_workfile_lock(filepath): + lock_filepath = _get_lock_file(filepath) + if not os.path.exists(lock_filepath): + return + return os.remove(lock_filepath) \ No newline at end of file diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index a5d5b14bb6..6a554efd8b 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -8,6 +8,10 @@ from Qt import QtWidgets, QtCore from openpype.host import IWorkfileHost from openpype.client import get_asset_by_id +from openpype.pipeline.workfile.lock_workfile import ( + is_workfile_locked, + get_username +) from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate from openpype.lib import ( @@ -454,6 +458,17 @@ class FilesWidget(QtWidgets.QWidget): def open_file(self, filepath): host = self.host + if is_workfile_locked(filepath): + username = get_username(filepath) + popup_dialog = QtWidgets.QMessageBox(parent=self) + popup_dialog.setWindowTitle("Warning") + popup_dialog.setText(username + " is using the file") + popup_dialog.setStandardButtons(popup_dialog.Ok) + + result = popup_dialog.exec_() + if result == popup_dialog.Ok: + return False + if isinstance(host, IWorkfileHost): has_unsaved_changes = host.workfile_has_unsaved_changes() else: From 3782ad4782b2044c20ead45c13a6f457d9a332e5 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Sep 2022 17:47:31 +0800 Subject: [PATCH 0977/2550] adding lock task workfiles when users are working on them --- openpype/pipeline/workfile/lock_workfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py index 03dee66d46..8e75f6fb61 100644 --- a/openpype/pipeline/workfile/lock_workfile.py +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -64,4 +64,4 @@ def remove_workfile_lock(filepath): lock_filepath = _get_lock_file(filepath) if not os.path.exists(lock_filepath): return - return os.remove(lock_filepath) \ No newline at end of file + return os.remove(lock_filepath) From 01d3b0b0b0e2ed72b4ff509d4e0685f44086ef85 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Sep 2022 13:30:27 +0200 Subject: [PATCH 0978/2550] add logs about missing attributes --- openpype/tools/tray/pype_tray.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index c32a074fd1..348573a191 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -9,11 +9,11 @@ import platform from Qt import QtCore, QtGui, QtWidgets import openpype.version -from openpype.api import ( - resources, - get_system_settings +from openpype import resources, style +from openpype.lib import ( + get_openpype_execute_args, + Logger, ) -from openpype.lib import get_openpype_execute_args, Logger from openpype.lib.openpype_version import ( op_version_control_available, get_expected_version, @@ -25,8 +25,8 @@ from openpype.lib.openpype_version import ( get_openpype_version, ) from openpype.modules import TrayModulesManager -from openpype import style from openpype.settings import ( + get_system_settings, SystemSettings, ProjectSettings, DefaultsNotDefined @@ -774,6 +774,7 @@ class PypeTrayStarter(QtCore.QObject): def main(): + log = Logger.get_logger(__name__) app = QtWidgets.QApplication.instance() if not app: app = QtWidgets.QApplication([]) @@ -783,7 +784,12 @@ def main(): "AA_UseHighDpiPixmaps" ): attr = getattr(QtCore.Qt, attr_name, None) - if attr is not None: + if attr is None: + log.debug(( + "Missing QtCore.Qt attribute \"{}\"." + " UI quality may be affected." + ).format(attr_name)) + else: app.setAttribute(attr) starter = PypeTrayStarter(app) From d7a768f718944c9322ae30e9167ea64b655be5e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 8 Sep 2022 15:51:24 +0200 Subject: [PATCH 0979/2550] turn plugin off by default --- openpype/settings/defaults/project_settings/global.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 9258343440..99a2e16a7c 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -4,7 +4,7 @@ "follow_workfile_version": false }, "CollectAudio": { - "enabled": true, + "enabled": false, "audio_subset_name": "audioMain" }, "CollectSceneVersion": { From 26954b9377639b12fdbf3f67e36b0edf86582018 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:08:19 +0200 Subject: [PATCH 0980/2550] :recycle: fix name typo and refactor validator error --- .../publish/help/validate_vdb_input_node.xml | 21 +++++++++ .../plugins/publish/valiate_vdb_input_node.py | 47 ------------------- .../publish/validate_vdb_input_node.py | 13 +++-- 3 files changed, 30 insertions(+), 51 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml delete mode 100644 openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml new file mode 100644 index 0000000000..0f92560bf7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py deleted file mode 100644 index ac408bc842..0000000000 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ /dev/null @@ -1,47 +0,0 @@ -import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - - -class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB. - - Regardless of the amount of VDBs create the output will need to have an - equal amount of VDBs, points, primitives and vertices - - A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - """ - - order = ValidateContentsOrder + 0.1 - families = ["vdbcache"] - hosts = ["houdini"] - label = "Validate Input Node (VDB)" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" - ) - - @classmethod - def get_invalid(cls, instance): - - node = instance.data["output_node"] - - prims = node.geometry().prims() - nr_of_prims = len(prims) - - nr_of_points = len(node.geometry().points()) - if nr_of_points != nr_of_prims: - cls.log.error("The number of primitives and points do not match") - return [instance] - - for prim in prims: - if prim.numVertices() != 1: - cls.log.error("Found primitive with more than 1 vertex!") - return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index ac408bc842..1f9ccc9c42 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import ( + PublishValidationError +) class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +19,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" @@ -24,8 +27,10 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" + raise PublishValidationError( + self, + "Node connected to the output node is not of type VDB", + title=self.label ) @classmethod From 59c13789e6924a700e269c30bec2d62327acbf09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:08:44 +0200 Subject: [PATCH 0981/2550] :rotating_light: fix hound --- openpype/hosts/houdini/plugins/publish/collect_instances.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 0187a1f1d8..0582ee154c 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -63,7 +63,8 @@ class CollectInstances(pyblish.api.ContextPlugin): data.update({"active": not node.isBypassed()}) # temporarily translation of `active` to `publish` till issue has - # been resolved, https://github.com/pyblish/pyblish-base/issues/307 + # been resolved. + # https://github.com/pyblish/pyblish-base/issues/307 if "active" in data: data["publish"] = data["active"] From 3b25a68552c6ec1c41f9351bdfcd5bde6626310f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:09:09 +0200 Subject: [PATCH 0982/2550] :recycle: work on validation errors --- .../publish/help/validate_sop_output_node.xml | 21 +++++++++++++++++++ .../publish/validate_sop_output_node.py | 9 +++++--- 2 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml new file mode 100644 index 0000000000..0f92560bf7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a2a9c1f4ea..02b650d48e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishXmlValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -22,9 +24,10 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishXmlValidationError( + self, + message="Output node(s) `%s` are incorrect. " % invalid, + title=self.label ) @classmethod From 008479022108e013110c22c1eb95e2e026fb2938 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:14:03 +0200 Subject: [PATCH 0983/2550] :pencil2: fix typo in import --- openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index cf8d61cda3..81274c670e 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,6 +1,6 @@ import pyblish.api -from openyppe.client import get_subset_by_name, get_asset_by_name +from openpype.client import get_subset_by_name, get_asset_by_name from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib From 9e1fb2bc6c979b8a31cf3630af2b5ea76e58a337 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:54:10 +0200 Subject: [PATCH 0984/2550] :fire: delete validation error help file --- .../publish/help/validate_sop_output_node.xml | 21 ------------------- 1 file changed, 21 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml deleted file mode 100644 index 0f92560bf7..0000000000 --- a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - -Scene setting - -## Invalid input node - -VDB input must have the same number of VDBs, points, primitives and vertices as output. - - - -### __Detailed Info__ (optional) - -A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - - \ No newline at end of file From 831050799d6a1b1f0b1a51bcbc16f62fbd39f96c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:54:46 +0200 Subject: [PATCH 0985/2550] :bug: pass argument in deprecated function --- openpype/host/interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py index cbf12b0d13..03c731d0e4 100644 --- a/openpype/host/interfaces.py +++ b/openpype/host/interfaces.py @@ -252,7 +252,7 @@ class IWorkfileHost: Remove when all usages are replaced. """ - self.save_workfile() + self.save_workfile(dst_path) def open_file(self, filepath): """Deprecated variant of 'open_workfile'. From e1a504ff3a831f5bd3ee5dd36914239613cb7b7c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:55:16 +0200 Subject: [PATCH 0986/2550] :recycle: refactor to new function calls --- openpype/hosts/houdini/plugins/publish/save_scene.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py index 6128c7af77..d6e07ccab0 100644 --- a/openpype/hosts/houdini/plugins/publish/save_scene.py +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -14,13 +14,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): # Filename must not have changed since collecting host = registered_host() - current_file = host.current_file() + current_file = host.get_current_workfile() assert context.data['currentFile'] == current_file, ( "Collected filename from current scene name." ) if host.has_unsaved_changes(): - self.log.info("Saving current file..") - host.save_file(current_file) + self.log.info("Saving current file {}...".format(current_file)) + host.save_workfile(current_file) else: self.log.debug("No unsaved changes, skipping file save..") From 49dff63f08207eea0218cf37e4824795d08e3895 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 17:50:55 +0200 Subject: [PATCH 0987/2550] Fix detection of workfile instance --- openpype/modules/deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index f698b7688e..512ff800ee 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -519,7 +519,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): instance = self._instance workfile_instance = self._get_workfile_instance(instance.context) - if not workfile_instance: + if workfile_instance is None: return # determine published path from Anatomy. From 50a9a7973b32a3fb38b1f42861288ffb44ed823f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Sep 2022 18:31:15 +0200 Subject: [PATCH 0988/2550] small modifications of placeholder identifiers and errors --- .../pipeline/workfile/new_template_loader.py | 45 ++++++++----------- 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 4b77168aa1..b1231c2308 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -299,7 +299,7 @@ class AbstractTemplateLoader: return placeholder_by_scene_id = { - placeholder.identifier: placeholder + placeholder.scene_identifier: placeholder for placeholder in placeholders } all_processed = len(placeholders) == 0 @@ -343,11 +343,11 @@ class AbstractTemplateLoader: all_processed = True collected_placeholders = self.get_placeholders() for placeholder in collected_placeholders: - if placeholder.identifier in placeholder_by_scene_id: + identifier = placeholder.scene_identifier + if identifier in placeholder_by_scene_id: continue all_processed = False - identifier = placeholder.identifier placeholder_by_scene_id[identifier] = placeholder placeholders.append(placeholder) @@ -434,7 +434,6 @@ class AbstractTemplateLoader: @six.add_metaclass(ABCMeta) class PlaceholderPlugin(object): label = None - placeholder_options = [] _log = None def __init__(self, builder): @@ -516,14 +515,14 @@ class PlaceholderPlugin(object): pass - def get_placeholder_options(self): + def get_placeholder_options(self, options=None): """Placeholder options for data showed. Returns: List[AbtractAttrDef]: Attribute definitions of placeholder options. """ - return self.placeholder_options + return [] def prepare_placeholders(self, placeholders): """Preparation part of placeholders. @@ -629,8 +628,9 @@ class PlaceholderItem(object): # Keep track about state of Placeholder process self._state = 0 - # Exception which happened during processing - self._error = None + # Error messages to be shown in UI + # - all other messages should be logged + self._errors = [] # -> List[str] @property def plugin(self): @@ -676,6 +676,7 @@ class PlaceholderItem(object): Returns: Dict[str, Any]: Placeholder data. """ + return copy.deepcopy(self.data) @property @@ -710,21 +711,6 @@ class PlaceholderItem(object): return self._state == 1 - @property - def failed(self): - """Processing of placeholder failed.""" - - return self._error is not None - - @property - def error(self): - """Exception with which the placeholder process failed. - - Gives ability to access the exception. - """ - - return self._error - def set_in_progress(self): """Change to in progress state.""" @@ -735,8 +721,15 @@ class PlaceholderItem(object): self._state = 2 - def set_error(self, error): + def add_error(self, error): """Set placeholder item as failed and mark it as finished.""" - self._error = error - self.set_finished() + self._errors.append(error) + + def get_errors(self): + """Exception with which the placeholder process failed. + + Gives ability to access the exception. + """ + + return self._errors From 2eef3a8f826b614b1f496d6aa05b02da6a328a02 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Sep 2022 18:31:39 +0200 Subject: [PATCH 0989/2550] initial commit of maya implementation of new template builder --- .../hosts/maya/api/new_template_builder.py | 505 ++++++++++++++++++ 1 file changed, 505 insertions(+) create mode 100644 openpype/hosts/maya/api/new_template_builder.py diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py new file mode 100644 index 0000000000..023f240061 --- /dev/null +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -0,0 +1,505 @@ +import re +from maya import cmds + +from openpype.client import get_representations +from openpype.lib import attribute_definitions +from openpype.pipeline import legacy_io +from openpype.pipeline.workfile.build_template_exceptions import ( + TemplateAlreadyImported +) +from openpype.pipeline.workfile.new_template_loader import ( + AbstractTemplateLoader, + PlaceholderPlugin, + PlaceholderItem, +) + +from .lib import read, imprint + +PLACEHOLDER_SET = "PLACEHOLDERS_SET" + + +class MayaTemplateLoader(AbstractTemplateLoader): + """Concrete implementation of AbstractTemplateLoader for maya""" + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + + Returns: + bool: Wether the template was succesfully imported or not + """ + + if cmds.objExists(PLACEHOLDER_SET): + raise TemplateAlreadyImported(( + "Build template already loaded\n" + "Clean scene if needed (File > New Scene)" + )) + + cmds.sets(name=PLACEHOLDER_SET, empty=True) + cmds.file(path, i=True, returnNewNodes=True) + + cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True) + + # This should be handled by creators + # for set_name in cmds.listSets(allSets=True): + # if ( + # cmds.objExists(set_name) + # and cmds.attributeQuery('id', node=set_name, exists=True) + # and cmds.getAttr(set_name + '.id') == 'pyblish.avalon.instance' + # ): + # if cmds.attributeQuery('asset', node=set_name, exists=True): + # cmds.setAttr( + # set_name + '.asset', + # legacy_io.Session['AVALON_ASSET'], type='string' + # ) + + return True + + def get_placeholder_plugin_classes(self): + return [ + MayaLoadPlaceholderPlugin + ] + + # def template_already_imported(self, err_msg): + # clearButton = "Clear scene and build" + # updateButton = "Update template" + # abortButton = "Abort" + # + # title = "Scene already builded" + # message = ( + # "It's seems a template was already build for this scene.\n" + # "Error message reveived :\n\n\"{}\"".format(err_msg)) + # buttons = [clearButton, updateButton, abortButton] + # defaultButton = clearButton + # cancelButton = abortButton + # dismissString = abortButton + # answer = cmds.confirmDialog( + # t=title, + # m=message, + # b=buttons, + # db=defaultButton, + # cb=cancelButton, + # ds=dismissString) + # + # if answer == clearButton: + # cmds.file(newFile=True, force=True) + # self.import_template(self.template_path) + # self.populate_template() + # elif answer == updateButton: + # self.update_missing_containers() + # elif answer == abortButton: + # return + + # def get_loaded_containers_by_id(self): + # try: + # containers = cmds.sets("AVALON_CONTAINERS", q=True) + # except ValueError: + # return None + # + # return [ + # cmds.getAttr(container + '.representation') + # for container in containers] + + +class MayaLoadPlaceholderPlugin(PlaceholderPlugin): + identifier = "maya.load" + label = "Maya load" + + def _collect_scene_placeholders(self): + # Cache placeholder data to shared data + placeholder_nodes = self.builder.get_shared_data("placeholder_nodes") + if placeholder_nodes is None: + attributes = cmds.ls("*.plugin_identifier", long=True) + placeholder_nodes = [ + self._parse_placeholder_node_data(attribute.rpartition(".")[0]) + for attribute in attributes + ] + self.builder.set_shared_data( + "placeholder_nodes", placeholder_nodes + ) + return placeholder_nodes + + def _parse_placeholder_node_data(self, node_name): + placeholder_data = read(node_name) + parent_name = ( + cmds.getAttr(node_name + ".parent", asString=True) + or node_name.rpartition("|")[0] + or "" + ) + if parent_name: + siblings = cmds.listRelatives(parent_name, children=True) + else: + siblings = cmds.ls(assemblies=True) + node_shortname = node_name.rpartition("|")[2] + current_index = cmds.getAttr(node_name + ".index", asString=True) + if current_index < 0: + current_index = siblings.index(node_shortname) + + placeholder_data.update({ + "parent": parent_name, + "index": current_index + }) + return placeholder_data + + def _create_placeholder_name(self, placeholder_data): + # TODO implement placeholder name logic + return "Placeholder" + + def create_placeholder(self, placeholder_data): + selection = cmds.ls(selection=True) + if not selection: + raise ValueError("Nothing is selected") + if len(selection) > 1: + raise ValueError("More then one item are selected") + + placeholder_data["plugin_identifier"] = self.identifier + + placeholder_name = self._create_placeholder_name(placeholder_data) + + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + + # TODO: this can crash if selection can't be used + cmds.parent(placeholder, selection[0]) + + # get the long name of the placeholder (with the groups) + placeholder_full_name = ( + cmds.ls(selection[0], long=True)[0] + + "|" + + placeholder.replace("|", "") + ) + + imprint(placeholder_full_name, placeholder_data) + + # Add helper attributes to keep placeholder info + cmds.addAttr( + placeholder_full_name, + longName="parent", + hidden=True, + dataType="string" + ) + cmds.addAttr( + placeholder_full_name, + longName="index", + hidden=True, + attributeType="short", + defaultValue=-1 + ) + + cmds.setAttr(placeholder_full_name + ".parent", "", type="string") + + def update_placeholder(self, placeholder_item, placeholder_data): + node_name = placeholder_item.scene_identifier + new_values = {} + for key, value in placeholder_data.items(): + placeholder_value = placeholder_item.data.get(key) + if value != placeholder_value: + new_values[key] = value + placeholder_item.data[key] = value + + imprint(node_name, new_values) + + def collect_placeholders(self): + filtered_placeholders = [] + for placeholder_data in self._collect_scene_placeholders(): + if placeholder_data.get("plugin_identifier") != self.identifier: + continue + + filtered_placeholders.append(placeholder_data) + + output = [] + for placeholder_data in filtered_placeholders: + # TODO do data validations and maybe updgrades if are invalid + output.append(LoadPlaceholder(placeholder_data)) + return output + + def process_placeholder(self, placeholder): + current_asset_doc = self.current_asset_doc + linked_assets = self.linked_assets + loader_name = placeholder.data["loader"] + loader_args = placeholder.data["loader_args"] + + # TODO check loader existence + placeholder_representations = placeholder.get_representations( + current_asset_doc, + linked_assets + ) + + if not placeholder_representations: + self.log.info(( + "There's no representation for this placeholder: {}" + ).format(placeholder.scene_identifier)) + return + + loaders_by_name = self.builder.get_loaders_by_name() + for representation in placeholder_representations: + repre_context = representation["context"] + self.log.info( + "Loading {} from {} with loader {}\n" + "Loader arguments used : {}".format( + repre_context["subset"], + repre_context["asset"], + loader_name, + loader_args + ) + ) + try: + container = self.load( + placeholder, loaders_by_name, representation) + except Exception: + placeholder.load_failed(representation) + + else: + placeholder.load_succeed(container) + # TODO find out if 'postload make sense?' + # finally: + # self.postload(placeholder) + + def get_placeholder_options(self, options=None): + loaders_by_name = self.builder.get_loaders_by_name() + loader_names = list(sorted(loaders_by_name.keys())) + options = options or {} + return [ + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Main attributes"), + + attribute_definitions.EnumDef( + "builder_type", + label="Asset Builder Type", + default=options.get("builder_type"), + items=[ + ("context_asset", "Current asset"), + ("linked_asset", "Linked assets"), + ("all_assets", "All assets") + ], + tooltip=( + "Asset Builder Type\n" + "\nBuilder type describe what template loader will look" + " for." + "\ncontext_asset : Template loader will look for subsets" + " of current context asset (Asset bob will find asset)" + "\nlinked_asset : Template loader will look for assets" + " linked to current context asset." + "\nLinked asset are looked in database under" + " field \"inputLinks\"" + ) + ), + attribute_definitions.TextDef( + "family", + label="Family", + default=options.get("family"), + placeholder="model, look, ..." + ), + attribute_definitions.TextDef( + "representation", + label="Representation name", + default=options.get("representation"), + placeholder="ma, abc, ..." + ), + attribute_definitions.EnumDef( + "loader", + label="Loader", + default=options.get("loader"), + items=[ + (loader_name, loader_name) + for loader_name in loader_names + ], + tooltip="""Loader +Defines what OpenPype loader will be used to load assets. +Useable loader depends on current host's loader list. +Field is case sensitive. +""" + ), + attribute_definitions.TextDef( + "loader_args", + label="Loader Arguments", + default=options.get("loader_args"), + placeholder='{"camera":"persp", "lights":True}', + tooltip="""Loader +Defines a dictionnary of arguments used to load assets. +Useable arguments depend on current placeholder Loader. +Field should be a valid python dict. Anything else will be ignored. +""" + ), + attribute_definitions.NumberDef( + "order", + label="Order", + default=options.get("order") or 0, + decimals=0, + minimum=0, + maximum=999, + tooltip="""Order +Order defines asset loading priority (0 to 999) +Priority rule is : "lowest is first to load".""" + ), + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Optional attributes"), + attribute_definitions.UISeparatorDef(), + attribute_definitions.TextDef( + "asset", + label="Asset filter", + default=options.get("asset"), + placeholder="regex filtering by asset name", + tooltip=( + "Filtering assets by matching field regex to asset's name" + ) + ), + attribute_definitions.TextDef( + "subset", + label="Subset filter", + default=options.get("subset"), + placeholder="regex filtering by subset name", + tooltip=( + "Filtering assets by matching field regex to subset's name" + ) + ), + attribute_definitions.TextDef( + "hierarchy", + label="Hierarchy filter", + default=options.get("hierarchy"), + placeholder="regex filtering by asset's hierarchy", + tooltip=( + "Filtering assets by matching field asset's hierarchy" + ) + ) + ] + + +class LoadPlaceholder(PlaceholderItem): + """Concrete implementation of AbstractPlaceholder for maya + """ + + def __init__(self, *args, **kwargs): + super(LoadPlaceholder, self).__init__(*args, **kwargs) + self._failed_representations = [] + + def parent_in_hierarchy(self, container): + """Parent loaded container to placeholder's parent. + + ie : Set loaded content as placeholder's sibling + + Args: + container (str): Placeholder loaded containers + """ + + if not container: + return + + roots = cmds.sets(container, q=True) + nodes_to_parent = [] + for root in roots: + if root.endswith("_RN"): + refRoot = cmds.referenceQuery(root, n=True)[0] + refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] + nodes_to_parent.extend(refRoot) + elif root not in cmds.listSets(allSets=True): + nodes_to_parent.append(root) + + elif not cmds.sets(root, q=True): + return + + if self.data['parent']: + cmds.parent(nodes_to_parent, self.data['parent']) + # Move loaded nodes to correct index in outliner hierarchy + placeholder_form = cmds.xform( + self._scene_identifier, + q=True, + matrix=True, + worldSpace=True + ) + for node in set(nodes_to_parent): + cmds.reorder(node, front=True) + cmds.reorder(node, relative=self.data['index']) + cmds.xform(node, matrix=placeholder_form, ws=True) + + holding_sets = cmds.listSets(object=self._scene_identifier) + if not holding_sets: + return + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) + + def clean(self): + """Hide placeholder, parent them to root + add them to placeholder set and register placeholder's parent + to keep placeholder info available for future use + """ + + node = self._scene_identifier + if self.data['parent']: + cmds.setAttr(node + '.parent', self.data['parent'], type='string') + if cmds.getAttr(node + '.index') < 0: + cmds.setAttr(node + '.index', self.data['index']) + + holding_sets = cmds.listSets(object=node) + if holding_sets: + for set in holding_sets: + cmds.sets(node, remove=set) + + if cmds.listRelatives(node, p=True): + node = cmds.parent(node, world=True)[0] + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr(node + '.hiddenInOutliner', True) + + def get_representations(self, current_asset_doc, linked_asset_docs): + project_name = legacy_io.active_project() + + builder_type = self.data["builder_type"] + if builder_type == "context_asset": + context_filters = { + "asset": [current_asset_doc["name"]], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representations": [self.data["representation"]], + "family": [self.data["family"]] + } + + elif builder_type != "linked_asset": + context_filters = { + "asset": [re.compile(self.data["asset"])], + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]] + } + + else: + asset_regex = re.compile(self.data["asset"]) + linked_asset_names = [] + for asset_doc in linked_asset_docs: + asset_name = asset_doc["name"] + if asset_regex.match(asset_name): + linked_asset_names.append(asset_name) + + context_filters = { + "asset": linked_asset_names, + "subset": [re.compile(self.data["subset"])], + "hierarchy": [re.compile(self.data["hierarchy"])], + "representation": [self.data["representation"]], + "family": [self.data["family"]], + } + + return list(get_representations( + project_name, + context_filters=context_filters + )) + + def get_errors(self): + if not self._failed_representations: + return [] + message = ( + "Failed to load {} representations using Loader {}" + ).format( + len(self._failed_representations), + self.data["loader"] + ) + return [message] + + def load_failed(self, representation): + self._failed_representations.append(representation) + + def load_succeed(self, container): + self.parent_in_hierarchy(container) From 5fe6d2606ddc69e265f89584326ee19939c4b2c8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Sep 2022 18:32:20 +0200 Subject: [PATCH 0990/2550] added simple toolt to be able show attribute definitionis for workfile template builder --- .../tools/workfile_template_build/__init__.py | 5 + .../tools/workfile_template_build/window.py | 232 ++++++++++++++++++ 2 files changed, 237 insertions(+) create mode 100644 openpype/tools/workfile_template_build/__init__.py create mode 100644 openpype/tools/workfile_template_build/window.py diff --git a/openpype/tools/workfile_template_build/__init__.py b/openpype/tools/workfile_template_build/__init__.py new file mode 100644 index 0000000000..70b8867759 --- /dev/null +++ b/openpype/tools/workfile_template_build/__init__.py @@ -0,0 +1,5 @@ +from .window import WorkfileBuildDialog + +__all__ = ( + "WorkfileBuildDialog", +) diff --git a/openpype/tools/workfile_template_build/window.py b/openpype/tools/workfile_template_build/window.py new file mode 100644 index 0000000000..a5cec465ec --- /dev/null +++ b/openpype/tools/workfile_template_build/window.py @@ -0,0 +1,232 @@ +from Qt import QtWidgets + +from openpype import style +from openpype.lib import Logger +from openpype.pipeline import legacy_io +from openpype.widgets.attribute_defs import AttributeDefinitionsWidget + + +class WorkfileBuildDialog(QtWidgets.QDialog): + def __init__(self, host, builder, parent=None): + super(WorkfileBuildDialog, self).__init__(parent) + self.setWindowTitle("Workfile Placeholder Manager") + + self._log = None + + self._first_show = True + self._first_refreshed = False + + self._builder = builder + self._host = host + # Mode can be 0 (create) or 1 (update) + # TODO write it a little bit better + self._mode = 0 + self._update_item = None + self._last_selected_plugin = None + + host_name = getattr(self._host, "name", None) + if not host_name: + host_name = legacy_io.Session.get("AVALON_APP") or "NA" + self._host_name = host_name + + plugins_combo = QtWidgets.QComboBox(self) + + content_widget = QtWidgets.QWidget(self) + content_layout = QtWidgets.QVBoxLayout(content_widget) + content_layout.setContentsMargins(0, 0, 0, 0) + + btns_widget = QtWidgets.QWidget(self) + create_btn = QtWidgets.QPushButton("Create", btns_widget) + save_btn = QtWidgets.QPushButton("Save", btns_widget) + close_btn = QtWidgets.QPushButton("Close", btns_widget) + + create_btn.setVisible(False) + save_btn.setVisible(False) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.addStretch(1) + btns_layout.addWidget(create_btn, 0) + btns_layout.addWidget(save_btn, 0) + btns_layout.addWidget(close_btn, 0) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(plugins_combo, 0) + main_layout.addWidget(content_widget, 1) + main_layout.addWidget(btns_widget, 0) + + create_btn.clicked.connect(self._on_create_click) + save_btn.clicked.connect(self._on_save_click) + close_btn.clicked.connect(self._on_close_click) + plugins_combo.currentIndexChanged.connect(self._on_plugin_change) + + self._attr_defs_widget = None + self._plugins_combo = plugins_combo + + self._content_widget = content_widget + self._content_layout = content_layout + + self._create_btn = create_btn + self._save_btn = save_btn + self._close_btn = close_btn + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + def _clear_content_widget(self): + while self._content_layout.count() > 0: + item = self._content_layout.takeAt(0) + widget = item.widget + if widget: + widget.setVisible(False) + widget.deleteLater() + + def _add_message_to_content(self, message): + msg_label = QtWidgets.QLabel(message, self._content_widget) + self._content_layout.addWidget(msg_label, 0) + self._content_layout.addStretch(1) + + def refresh(self): + self._first_refreshed = True + self._clear_content_widget() + + if not self._builder: + self._add_message_to_content(( + "Host \"{}\" does not have implemented logic" + " for template workfile build." + ).format(self._host_name)) + self._update_ui_visibility() + return + + if self._mode == 1: + self._update_ui_visibility() + return + + placeholder_plugins = builder.placeholder_plugins + if not placeholder_plugins: + self._add_message_to_content(( + "Host \"{}\" does not have implemented plugins" + " for template workfile build." + ).format(self._host_name)) + self._update_ui_visibility() + return + + last_selected_plugin = self._last_selected_plugin + self._last_selected_plugin = None + self._plugins_combo.clear() + for identifier, plugin in placeholder_plugins.items(): + label = plugin.label or plugin.identifier + self._plugins_combo.addItem(label, plugin.identifier) + + index = self._plugins_combo.findData(last_selected_plugin) + if index < 0: + index = 0 + self._plugins_combo.setCurrentIndex(index) + self._on_plugin_change() + + self._update_ui_visibility() + + def set_create_mode(self): + if self._mode == 0: + return + + self._mode = 0 + self._update_item = None + self.refresh() + + def set_update_mode(self, update_item): + if self._mode == 1: + return + + self._mode = 1 + self._update_item = update_item + if not update_item: + self._add_message_to_content(( + "Nothing to update." + " (You maybe don't have selected placeholder.)" + )) + else: + self._create_option_widgets( + update_item.plugin, update_item.to_dict() + ) + self._update_ui_visibility() + + def _create_option_widgets(self, plugin, options=None): + self._clear_content_widget() + attr_defs = plugin.get_placeholder_options(options) + widget = AttributeDefinitionsWidget(attr_defs, self._content_widget) + self._content_layout.addWidget(widget, 0) + self._content_layout.addStretch(1) + self._attr_defs_widget = widget + + def _update_ui_visibility(self): + create_mode = self._mode == 0 + self._plugins_combo.setVisible(create_mode) + + if not self._builder: + self._save_btn.setVisible(False) + self._create_btn.setVisible(False) + return + + save_enabled = not create_mode + if save_enabled: + save_enabled = self._update_item is not None + self._save_btn.setVisible(save_enabled) + self._create_btn.setVisible(create_mode) + + def _on_plugin_change(self): + index = self._plugins_combo.currentIndex() + plugin_identifier = self._plugins_combo.itemData(index) + if plugin_identifier == self._last_selected_plugin: + return + + self._last_selected_plugin = plugin_identifier + plugin = self._builder.placeholder_plugins.get(plugin_identifier) + self._create_option_widgets(plugin) + + def _on_save_click(self): + options = self._attr_defs_widget.current_value() + plugin = self._builder.placeholder_plugins.get( + self._last_selected_plugin + ) + # TODO much better error handling + try: + plugin.update_placeholder(self._update_item, options) + self.accept() + except Exception as exc: + self.log.warning("Something went wrong", exc_info=True) + dialog = QtWidgets.QMessageBox(self) + dialog.setWindowTitle("Something went wrong") + dialog.setText("Something went wrong") + dialog.exec_() + + def _on_create_click(self): + options = self._attr_defs_widget.current_value() + plugin = self._builder.placeholder_plugins.get( + self._last_selected_plugin + ) + # TODO much better error handling + try: + plugin.create_placeholder(options) + self.accept() + except Exception as exc: + self.log.warning("Something went wrong", exc_info=True) + dialog = QtWidgets.QMessageBox(self) + dialog.setWindowTitle("Something went wrong") + dialog.setText("Something went wrong") + dialog.exec_() + + def _on_close_click(self): + self.reject() + + def showEvent(self, event): + super(WorkfileBuildDialog, self).showEvent(event) + if not self._first_refreshed: + self.refresh() + + if self._first_show: + self._first_show = False + self.setStyleSheet(style.load_stylesheet()) + self.resize(390, 450) From ff149b68215adc8c0211d7248d94b17376496ad1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 19:42:55 +0200 Subject: [PATCH 0991/2550] Remove unused import --- openpype/tools/loader/model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 9d1f1e045c..19b135bfc5 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -14,8 +14,7 @@ from openpype.client import ( get_versions, get_hero_versions, get_version_by_name, - get_representations, - get_representations_parents + get_representations ) from openpype.pipeline import ( registered_host, From dc903c752014348bda4fd4a2a05600ab7ec66f87 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 19:49:17 +0200 Subject: [PATCH 0992/2550] Store `loaded_in_scene` as `bool` in model --- openpype/tools/loader/model.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 19b135bfc5..8543672617 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -679,9 +679,7 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): data["asset"] = asset_docs_by_id[asset_id]["name"] data["last_version"] = last_version - - loaded = subset_doc["_id"] in subsets_loaded_by_id - data["loaded_in_scene"] = "yes" if loaded else "no" + data["loaded_in_scene"] = subset_doc["_id"] in subsets_loaded_by_id # Sync server data data.update( From 9ebd602a91a21e427cbd063bdd3841a0370d6b22 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 20:06:34 +0200 Subject: [PATCH 0993/2550] Add delegate to loaded in scene column for "yes/no" and colorized column --- openpype/tools/loader/widgets.py | 7 ++++++- openpype/tools/utils/delegates.py | 27 +++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 3c4a89aa0f..e27d7e6a12 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -36,7 +36,8 @@ from openpype.tools.utils import ( ) from openpype.tools.utils.delegates import ( VersionDelegate, - PrettyTimeDelegate + PrettyTimeDelegate, + LoadedInSceneDelegate ) from openpype.tools.utils.widgets import ( OptionalMenu, @@ -234,6 +235,10 @@ class SubsetWidget(QtWidgets.QWidget): column = model.Columns.index("repre_info") view.setItemDelegateForColumn(column, avail_delegate) + loaded_in_scene_delegate = LoadedInSceneDelegate(view) + column = model.Columns.index("loaded_in_scene") + view.setItemDelegateForColumn(column, loaded_in_scene_delegate) + layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addLayout(top_bar_layout) diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py index d6c2d69e76..3547251282 100644 --- a/openpype/tools/utils/delegates.py +++ b/openpype/tools/utils/delegates.py @@ -291,3 +291,30 @@ class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate): def displayText(self, value, locale): if value is not None: return pretty_timestamp(value) + + +class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate): + """Delegate for Loaded in Scene state columns. + + Shows "yes" or "no" for True or False values + Colorizes green or dark grey based on True or False values + + """ + + def __init__(self, *args, **kwargs): + super(LoadedInSceneDelegate, self).__init__(*args, **kwargs) + self._colors = { + True: QtGui.QColor(80, 170, 80), + False: QtGui.QColor(90, 90, 90) + } + + def displayText(self, value, locale): + return "yes" if value else "no" + + def initStyleOption(self, option, index): + super(LoadedInSceneDelegate, self).initStyleOption(option, index) + + # Colorize based on value + value = index.data(QtCore.Qt.DisplayRole) + color = self._colors[bool(value)] + option.palette.setBrush(QtGui.QPalette.Text, color) From 379a5f5d787f3d4fff3f16e3d6d4b1f6adea390e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 22:52:08 +0200 Subject: [PATCH 0994/2550] Log file format in more human-readable manner instead of an integer --- .../maya/plugins/publish/validate_rendersettings.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index feb6a16dac..679535aa8c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -10,6 +10,12 @@ import openpype.api from openpype.hosts.maya.api import lib +def get_redshift_image_format_labels(): + """Return nice labels for Redshift image formats.""" + var = "$g_redshiftImageFormatLabels" + return mel.eval("{0}={0}".format(var)) + + class ValidateRenderSettings(pyblish.api.InstancePlugin): """Validates the global render settings @@ -191,10 +197,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): pass elif default_ext != aov_ext: + labels = get_redshift_image_format_labels() cls.log.error(("AOV file format is not the same " "as the one set globally " - "{} != {}").format(default_ext, - aov_ext)) + "{} != {}").format(labels[default_ext], + labels[aov_ext])) invalid = True if renderer == "renderman": From d37f65e81ff2d88dcdd4bca07048fcea0d85849d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 22:55:36 +0200 Subject: [PATCH 0995/2550] Fix `prefix` is None issue --- openpype/hosts/maya/plugins/publish/validate_rendersettings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 679535aa8c..eea60ef4f3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -108,8 +108,9 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): # Get the node attributes for current renderer attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS['default']) + # Prefix attribute can return None when a value was never set prefix = lib.get_attr_in_layer(cls.ImagePrefixes[renderer], - layer=layer) + layer=layer) or "" padding = lib.get_attr_in_layer("{node}.{padding}".format(**attrs), layer=layer) From 4d84a06bd323425c7c033400853a058c983b4e32 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 22:57:46 +0200 Subject: [PATCH 0996/2550] Repair defaultRenderGlobals.animation (must be enabled) --- openpype/hosts/maya/plugins/publish/validate_rendersettings.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index eea60ef4f3..dcf77ad1f7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -302,6 +302,9 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): default = lib.RENDER_ATTRS['default'] render_attrs = lib.RENDER_ATTRS.get(renderer, default) + # Repair animation must be enabled + cmds.setAttr("defaultRenderGlobals.animation", True) + # Repair prefix if renderer != "renderman": node = render_attrs["node"] From fea33ee4966a0e7f00792bdf98b73515976a334d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 23:04:41 +0200 Subject: [PATCH 0997/2550] Clarify log message which of the values is AOV format and which is global --- .../hosts/maya/plugins/publish/validate_rendersettings.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index dcf77ad1f7..ed4a076302 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -199,10 +199,10 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): elif default_ext != aov_ext: labels = get_redshift_image_format_labels() - cls.log.error(("AOV file format is not the same " - "as the one set globally " - "{} != {}").format(labels[default_ext], - labels[aov_ext])) + cls.log.error( + "AOV file format {} does not match global file format " + "{}".format(labels[default_ext], labels[aov_ext]) + ) invalid = True if renderer == "renderman": From 1f8887eabb2a68c08af178d35f408e6a3eb5acfc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 23:05:03 +0200 Subject: [PATCH 0998/2550] Cosmetics --- openpype/hosts/maya/plugins/publish/validate_rendersettings.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index ed4a076302..7f0985f69b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -337,8 +337,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): cmds.optionMenuGrp("vrayRenderElementSeparator", v=instance.data.get("aovSeparator", "_")) cmds.setAttr( - "{}.fileNameRenderElementSeparator".format( - node), + "{}.fileNameRenderElementSeparator".format(node), instance.data.get("aovSeparator", "_"), type="string" ) From 488c0000da26891ab807065718ebb9af0d4631b0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 23:28:14 +0200 Subject: [PATCH 0999/2550] Correctly ignore nodes inside `rendering` instance that do not match expected naming - A warning is still logged --- .../maya/plugins/publish/collect_render.py | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index ebda5e190d..a90b635311 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -102,22 +102,24 @@ class CollectMayaRender(pyblish.api.ContextPlugin): } for layer in collected_render_layers: - try: - if layer.startswith("LAYER_"): - # this is support for legacy mode where render layers - # started with `LAYER_` prefix. - expected_layer_name = re.search( - r"^LAYER_(.*)", layer).group(1) - else: - # new way is to prefix render layer name with instance - # namespace. - expected_layer_name = re.search( - r"^.+:(.*)", layer).group(1) - except IndexError: + if layer.startswith("LAYER_"): + # this is support for legacy mode where render layers + # started with `LAYER_` prefix. + layer_name_pattern = r"^LAYER_(.*)" + else: + # new way is to prefix render layer name with instance + # namespace. + layer_name_pattern = r"^.+:(.*)" + + # todo: We should have a more explicit way to link the renderlayer + match = re.match(layer_name_pattern, layer) + if not match: msg = "Invalid layer name in set [ {} ]".format(layer) self.log.warning(msg) continue + expected_layer_name = match.group(1) + self.log.info("processing %s" % layer) # check if layer is part of renderSetup if expected_layer_name not in maya_render_layers: From 07e2f35c96aa9933a2a33aa31fe2855ebe63525a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 23:42:55 +0200 Subject: [PATCH 1000/2550] Improve logging message to end user - Previously only the slightly more complex node name was logged --- openpype/hosts/maya/plugins/publish/collect_render.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index a90b635311..35af21eec8 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -119,8 +119,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin): continue expected_layer_name = match.group(1) + self.log.info("Processing '{}' as layer [ {} ]" + "".format(layer, expected_layer_name)) - self.log.info("processing %s" % layer) # check if layer is part of renderSetup if expected_layer_name not in maya_render_layers: msg = "Render layer [ {} ] is not in " "Render Setup".format( From 41e673c3ee45c85a7c71e5b0a0bb465ae95a9b83 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 00:20:47 +0200 Subject: [PATCH 1001/2550] Cleanup comment --- openpype/hosts/maya/api/lib.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 58e160cb2f..5e449b324e 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2633,8 +2633,6 @@ def load_capture_preset(data=None): scene = capture.parse_active_scene() options['sound'] = scene['sound'] - # options['display_options'] = temp_options - return options From ed11baf0fdaa8c35764bdee8b04215e58aca1e23 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 00:29:16 +0200 Subject: [PATCH 1002/2550] Include camera options --- openpype/hosts/maya/api/lib.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 5e449b324e..c8369bac13 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2629,6 +2629,13 @@ def load_capture_preset(data=None): options['viewport_options'] = temp_options options['viewport2_options'] = temp_options2 + # CAMERA OPTIONS + id = 'Camera Options' + camera_options = {} + for key, value in preset[id].items(): + camera_options[key] = value + options['camera_options'] = camera_options + # use active sound track scene = capture.parse_active_scene() options['sound'] = scene['sound'] From 26ae84df161344da8e3132f25d4655a470598645 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Sep 2022 13:20:49 +0800 Subject: [PATCH 1003/2550] adding lock task workfiles when users are working on them --- openpype/hosts/maya/api/pipeline.py | 60 ++++++++++++------- openpype/pipeline/workfile/lock_workfile.py | 60 ++++++++++++------- .../defaults/project_settings/global.json | 3 +- .../schemas/schema_global_tools.json | 25 ++++++++ openpype/tools/workfiles/files_widget.py | 15 ++++- 5 files changed, 116 insertions(+), 47 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index b645b41fa0..67cf80e707 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -33,9 +33,10 @@ from openpype.pipeline import ( from openpype.pipeline.load import any_outdated_containers from openpype.pipeline.workfile.lock_workfile import ( create_workfile_lock, - get_username, + get_user_from_lock, remove_workfile_lock, - is_workfile_locked + is_workfile_locked, + is_workfile_lock_enabled ) from openpype.hosts.maya import MAYA_ROOT_DIR from openpype.hosts.maya.lib import copy_workspace_mel @@ -266,11 +267,21 @@ def _before_scene_save(return_code, client_data): def _remove_workfile_lock(): + """Remove workfile lock on current file""" + if not handle_workfile_locks(): + return filepath = current_file() if filepath: remove_workfile_lock(filepath) +def handle_workfile_locks(): + if lib.IS_HEADLESS: + return False + project_name = legacy_io.active_project() + return is_workfile_lock_enabled(MayaHost.name, project_name) + + def uninstall(): pyblish.api.deregister_plugin_path(PUBLISH_PATH) pyblish.api.deregister_host("mayabatch") @@ -468,8 +479,10 @@ def on_before_save(): return lib.validate_fps() -def after_file_open(): +def check_lock_on_current_file(): """Check if there is a user opening the file""" + if not handle_workfile_locks(): + return log.info("Running callback on checking the lock file...") # add the lock file when opening the file @@ -477,23 +490,25 @@ def after_file_open(): if not is_workfile_locked(filepath): create_workfile_lock(filepath) + return - else: - username = get_username(filepath) - reminder = cmds.window(title="Reminder", width=400, height=30) - cmds.columnLayout(adjustableColumn=True) - cmds.separator() - cmds.columnLayout(adjustableColumn=True) - comment = " %s is working the same workfile!" % username - cmds.text(comment, align='center') - cmds.text(vis=False) - cmds.rowColumnLayout(numberOfColumns=3, - columnWidth=[(1, 300), (2, 100)], - columnSpacing=[(2, 10)]) - cmds.separator(vis=False) - quit_command = "cmds.quit(force=True);cmds.deleteUI('%s')" % reminder - cmds.button(label='Ok', command=quit_command) - cmds.showWindow(reminder) + username = get_user_from_lock(filepath) + reminder = cmds.window(title="Reminder", width=400, height=30) + cmds.columnLayout(adjustableColumn=True) + cmds.separator() + cmds.columnLayout(adjustableColumn=True) + comment = " %s is working the same workfile!" % username + cmds.text(comment, align='center') + cmds.text(vis=False) + cmds.rowColumnLayout(numberOfColumns=3, + columnWidth=[(1,200), (2, 100), (3, 100)], + columnSpacing=[(3, 10)]) + cmds.separator(vis=False) + cancel_command = "cmds.file(new=True);cmds.deleteUI('%s')" % reminder + ignore_command ="cmds.deleteUI('%s')" % reminder + cmds.button(label='Cancel', command=cancel_command) + cmds.button(label = "Ignore", command=ignore_command) + cmds.showWindow(reminder) def on_before_close(): @@ -501,12 +516,13 @@ def on_before_close(): log.info("Closing Maya...") # delete the lock file filepath = current_file() - remove_workfile_lock(filepath) + if handle_workfile_locks(): + remove_workfile_lock(filepath) def before_file_open(): """check lock file when the file changed""" - log.info("check lock file when file changed...") + log.info("Removing lock on current file before scene open...") # delete the lock file _remove_workfile_lock() @@ -579,7 +595,7 @@ def on_open(): dialog.show() # create lock file for the maya scene - after_file_open() + check_lock_on_current_file() def on_new(): diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py index 8e75f6fb61..b62f80c507 100644 --- a/openpype/pipeline/workfile/lock_workfile.py +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -1,17 +1,22 @@ import os import json from uuid import uuid4 +from openpype.lib import Logger, filter_profiles from openpype.lib.pype_info import get_workstation_info +from openpype.settings import get_project_settings def _read_lock_file(lock_filepath): + if not os.path.exists(lock_filepath): + log = Logger.get_logger("_read_lock_file") + log.debug("lock file is not created or readable as expected!") with open(lock_filepath, "r") as stream: data = json.load(stream) return data def _get_lock_file(filepath): - return filepath + ".lock" + return filepath + ".oplock" def is_workfile_locked(filepath): @@ -22,46 +27,59 @@ def is_workfile_locked(filepath): def is_workfile_locked_for_current_process(filepath): - if not is_workfile_locked(): + if not is_workfile_locked(filepath): return False lock_filepath = _get_lock_file(filepath) - process_id = os.environ["OPENPYPE_PROCESS_ID"] data = _read_lock_file(lock_filepath) - return data["process_id"] == process_id + return data["process_id"] == _get_process_id() def delete_workfile_lock(filepath): lock_filepath = _get_lock_file(filepath) - if not os.path.exists(lock_filepath): - return - - if is_workfile_locked_for_current_process(filepath): - os.remove(filepath) + if os.path.exists(lock_filepath): + os.remove(lock_filepath) def create_workfile_lock(filepath): lock_filepath = _get_lock_file(filepath) - process_id = os.environ.get("OPENPYPE_PROCESS_ID") - if not process_id: - process_id = str(uuid4()) - os.environ["OPENPYPE_PROCESS_ID"] = process_id info = get_workstation_info() - info["process_id"] = process_id + info["process_id"] = _get_process_id() with open(lock_filepath, "w") as stream: json.dump(info, stream) -def get_username(filepath): +def get_user_from_lock(filepath): lock_filepath = _get_lock_file(filepath) - with open(lock_filepath, "r") as stream: - data = json.load(stream) + if not os.path.exists(lock_filepath): + return + data = _read_lock_file(lock_filepath) username = data["username"] return username def remove_workfile_lock(filepath): - lock_filepath = _get_lock_file(filepath) - if not os.path.exists(lock_filepath): - return - return os.remove(lock_filepath) + if is_workfile_locked_for_current_process(filepath): + delete_workfile_lock(filepath) + + +def _get_process_id(): + process_id = os.environ.get("OPENPYPE_PROCESS_ID") + if not process_id: + process_id = str(uuid4()) + os.environ["OPENPYPE_PROCESS_ID"] = process_id + return process_id + +def is_workfile_lock_enabled(host_name, project_name, project_setting=None): + if project_setting is None: + project_setting = get_project_settings(project_name) + workfile_lock_profiles = ( + project_setting + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_lock_profiles"]) + profile = filter_profiles(workfile_lock_profiles,{"host_name": host_name}) + if not profile: + return False + return profile["enabled"] diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 0ff9363ba7..fc98a06ef1 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -403,7 +403,8 @@ "enabled": false } ], - "extra_folders": [] + "extra_folders": [], + "workfile_lock_profiles": [] }, "loader": { "family_filter_profiles": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index f8c9482e5f..d422278667 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -238,6 +238,31 @@ } ] } + }, + { + "type": "list", + "key": "workfile_lock_profiles", + "label": "Workfile lock profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "hosts-enum", + "key": "host_name", + "label": "Hosts", + "multiselection": true + }, + { + "type": "splitter" + }, + { + "key": "enabled", + "label": "Enabled", + "type": "boolean" + } + ] + } } ] }, diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 6a554efd8b..5eab3af144 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -10,7 +10,9 @@ from openpype.host import IWorkfileHost from openpype.client import get_asset_by_id from openpype.pipeline.workfile.lock_workfile import ( is_workfile_locked, - get_username + get_user_from_lock, + is_workfile_lock_enabled, + is_workfile_locked_for_current_process ) from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate @@ -456,10 +458,17 @@ class FilesWidget(QtWidgets.QWidget): "host_name": self.host_name } + def _is_workfile_locked(self, filepath): + if not is_workfile_lock_enabled(self.host_name, self.project_name): + return False + if not is_workfile_locked(filepath): + return False + return not is_workfile_locked_for_current_process(filepath) + def open_file(self, filepath): host = self.host - if is_workfile_locked(filepath): - username = get_username(filepath) + if self._is_workfile_locked(filepath): + username = get_user_from_lock(filepath) popup_dialog = QtWidgets.QMessageBox(parent=self) popup_dialog.setWindowTitle("Warning") popup_dialog.setText(username + " is using the file") From 4978981631933e98aba32fd8ebed363382ac5a06 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Sep 2022 13:25:12 +0800 Subject: [PATCH 1004/2550] adding lock task workfiles when users are working on them --- openpype/hosts/maya/api/pipeline.py | 2 +- openpype/pipeline/workfile/lock_workfile.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 67cf80e707..355537b92c 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -505,7 +505,7 @@ def check_lock_on_current_file(): columnSpacing=[(3, 10)]) cmds.separator(vis=False) cancel_command = "cmds.file(new=True);cmds.deleteUI('%s')" % reminder - ignore_command ="cmds.deleteUI('%s')" % reminder + ignore_command = "cmds.deleteUI('%s')" % reminder cmds.button(label='Cancel', command=cancel_command) cmds.button(label = "Ignore", command=ignore_command) cmds.showWindow(reminder) diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py index b62f80c507..2a7f25e524 100644 --- a/openpype/pipeline/workfile/lock_workfile.py +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -70,6 +70,7 @@ def _get_process_id(): os.environ["OPENPYPE_PROCESS_ID"] = process_id return process_id + def is_workfile_lock_enabled(host_name, project_name, project_setting=None): if project_setting is None: project_setting = get_project_settings(project_name) From 365a90c3c17c0d65bb6f97b9fd25d8299e3b0c0b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Sep 2022 13:26:58 +0800 Subject: [PATCH 1005/2550] adding lock task workfiles when users are working on them --- openpype/hosts/maya/api/pipeline.py | 4 ++-- openpype/pipeline/workfile/lock_workfile.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 355537b92c..b34a216c13 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -501,13 +501,13 @@ def check_lock_on_current_file(): cmds.text(comment, align='center') cmds.text(vis=False) cmds.rowColumnLayout(numberOfColumns=3, - columnWidth=[(1,200), (2, 100), (3, 100)], + columnWidth=[(1, 200), (2, 100), (3, 100)], columnSpacing=[(3, 10)]) cmds.separator(vis=False) cancel_command = "cmds.file(new=True);cmds.deleteUI('%s')" % reminder ignore_command = "cmds.deleteUI('%s')" % reminder cmds.button(label='Cancel', command=cancel_command) - cmds.button(label = "Ignore", command=ignore_command) + cmds.button(label="Ignore", command=ignore_command) cmds.showWindow(reminder) diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py index 2a7f25e524..7c8c4a8066 100644 --- a/openpype/pipeline/workfile/lock_workfile.py +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -80,7 +80,7 @@ def is_workfile_lock_enabled(host_name, project_name, project_setting=None): ["tools"] ["Workfiles"] ["workfile_lock_profiles"]) - profile = filter_profiles(workfile_lock_profiles,{"host_name": host_name}) + profile = filter_profiles(workfile_lock_profiles, {"host_name": host_name}) if not profile: return False return profile["enabled"] From 3501d0d23a78fbaef106da2fffe946cb49bef855 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:36:43 +0200 Subject: [PATCH 1006/2550] :wastebasket: move deprecation marks from comments to docstrings --- openpype/action.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/openpype/action.py b/openpype/action.py index de9cdee010..15c96404b6 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -72,17 +72,19 @@ def get_errored_plugins_from_data(context): return get_errored_plugins_from_context(context) -# 'RepairAction' and 'RepairContextAction' were moved to -# 'openpype.pipeline.publish' please change you imports. -# There is no "reasonable" way hot mark these classes as deprecated to show -# warning of wrong import. -# Deprecated since 3.14.* will be removed in 3.16.* class RepairAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in @@ -103,13 +105,19 @@ class RepairAction(pyblish.api.Action): plugin.repair(instance) -# Deprecated since 3.14.* will be removed in 3.16.* class RepairContextAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in From d59e188ab003d56d6ce8a71947f973b4a732ea01 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:37:27 +0200 Subject: [PATCH 1007/2550] :recycle: add instance_node as separate parameter --- openpype/hosts/houdini/api/plugin.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ff747085da..f300496a43 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -111,7 +111,12 @@ class HoudiniCreator(NewCreator): instance_node = out.createNode( node_type, node_name=subset_name) instance_node.moveToGoodPosition() + + # wondering if we'll ever need more than one member here + # in Houdini instance_data["members"] = [instance_node.path()] + instance_data["instance_node"] = instance_node.path() + instance = CreatedInstance( self.family, subset_name, @@ -136,7 +141,7 @@ class HoudiniCreator(NewCreator): def update_instances(self, update_list): for created_inst, _changes in update_list: - instance_node = hou.node(created_inst.get("members")[0]) + instance_node = hou.node(created_inst.get("instance_node")) current_data = read(instance_node) imprint( From 42c6c846e479c344b6021101a5aa5d744372447a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:38:05 +0200 Subject: [PATCH 1008/2550] :alien: change error handling --- .../validate_abc_primitive_to_detail.py | 31 +++++++----- .../publish/validate_alembic_input_node.py | 27 +++++++---- .../plugins/publish/validate_camera_rop.py | 47 +++++++++++++------ .../validate_primitive_hierarchy_paths.py | 26 ++++++---- .../publish/validate_sop_output_node.py | 11 ++--- .../publish/validate_workfile_paths.py | 19 ++++++-- 6 files changed, 109 insertions(+), 52 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 40949b7042..55c705c65b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -1,8 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api from collections import defaultdict - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" @@ -24,15 +24,24 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitives found with inconsistent primitive " - "to detail attributes. See log." + raise PublishValidationError( + ("Primitives found with inconsistent primitive " + "to detail attributes. See log."), + title=self.label ) @classmethod def get_invalid(cls, instance): - output = instance.data["output_node"] + output_node = instance.data.get("output_node") + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] rop = instance.data["members"][0] pattern = rop.parm("prim_to_detail_pattern").eval().strip() @@ -67,7 +76,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the start frame then it might be # something that is emitted over time. As such we can't actually @@ -86,7 +95,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -94,7 +103,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = None for attr in pattern.split(" "): @@ -130,4 +139,4 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Path has multiple values: %s (path: %s)" % (list(values), path) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 2625ae5f83..aa572dc3bb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,5 @@ import pyblish.api - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -12,7 +11,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" @@ -20,18 +19,28 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitive types found that are not supported" - "for Alembic output." + raise PublishValidationError( + ("Primitive types found that are not supported" + "for Alembic output."), + title=self.label ) @classmethod def get_invalid(cls, instance): invalid_prim_types = ["VDB", "Volume"] - node = instance.data["output_node"] + output_node = instance.data.get("output_node") - if not hasattr(node, "geometry"): + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] + + if not hasattr(output_node, "geometry"): # In the case someone has explicitly set an Object # node instead of a SOP node in Geometry context # then for now we ignore - this allows us to also @@ -40,7 +49,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): return frame = instance.data.get("frameStart", 0) - geo = node.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) invalid = False for prim_type in invalid_prim_types: diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index f97c46ae9d..18fed7fbc4 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -1,11 +1,13 @@ +# -*- coding: utf-8 -*- +"""Validator plugin for Houdini Camera ROP settings.""" import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["camera"] hosts = ["houdini"] label = "Camera ROP" @@ -14,30 +16,45 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): import hou - node = instance.data["members"][0] + node = hou.node(instance.data.get("instance_node")) if node.parm("use_sop_path").eval(): - raise RuntimeError( - "Alembic ROP for Camera export should not be " - "set to 'Use Sop Path'. Please disable." + raise PublishValidationError( + ("Alembic ROP for Camera export should not be " + "set to 'Use Sop Path'. Please disable."), + title=self.label ) # Get the root and objects parameter of the Alembic ROP node root = node.parm("root").eval() objects = node.parm("objects").eval() - assert root, "Root parameter must be set on Alembic ROP" - assert root.startswith("/"), "Root parameter must start with slash /" - assert objects, "Objects parameter must be set on Alembic ROP" - assert len(objects.split(" ")) == 1, "Must have only a single object." + errors = [] + if not root: + errors.append("Root parameter must be set on Alembic ROP") + if not root.startswith("/"): + errors.append("Root parameter must start with slash /") + if not objects: + errors.append("Objects parameter must be set on Alembic ROP") + if len(objects.split(" ")) != 1: + errors.append("Must have only a single object.") + + if errors: + for error in errors: + self.log.error(error) + raise PublishValidationError( + "Some checks failed, see validator log.", + title=self.label) # Check if the object exists and is a camera path = root + "/" + objects camera = hou.node(path) if not camera: - raise ValueError("Camera path does not exist: %s" % path) + raise PublishValidationError( + "Camera path does not exist: %s" % path, + title=self.label) if camera.type().name() != "cam": - raise ValueError( - "Object set in Alembic ROP is not a camera: " - "%s (type: %s)" % (camera, camera.type().name()) - ) + raise PublishValidationError( + ("Object set in Alembic ROP is not a camera: " + "{} (type: {})").format(camera, camera.type().name()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 10100b698e..e1f1dc116e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -1,5 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -19,16 +21,24 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "See log for details. " "Invalid nodes: {0}".format(invalid) + raise PublishValidationError( + "See log for details. " "Invalid nodes: {0}".format(invalid), + title=self.label ) @classmethod def get_invalid(cls, instance): - import hou + output_node = instance.data.get("output_node") - output = instance.data["output_node"] + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] rop = instance.data["members"][0] build_from_path = rop.parm("build_from_path").eval() @@ -52,7 +62,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the current frame then we can't # check whether the path names are correct. So we'll just issue a @@ -73,7 +83,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -81,7 +91,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = geo.primStringAttribValues(path_attr) # Ensure all primitives are set to a valid path @@ -93,4 +103,4 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Prims have no value for attribute `%s` " "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index 02b650d48e..c18ad7a1b7 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import PublishValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -24,10 +24,9 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise PublishXmlValidationError( - self, - message="Output node(s) `%s` are incorrect. " % invalid, - title=self.label + raise PublishValidationError( + "Output node(s) are incorrect", + title="Invalid output node(s)" ) @classmethod @@ -35,7 +34,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + output_node = instance.data.get("output_node") if output_node is None: node = instance.data["members"][0] diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 79b3e894e5..f7a4c762cc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -2,22 +2,30 @@ import openpype.api import pyblish.api import hou +from openpype.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from openpype.pipeline.publish import RepairAction -class ValidateWorkfilePaths(pyblish.api.InstancePlugin): +class ValidateWorkfilePaths( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate workfile paths so they are absolute.""" order = pyblish.api.ValidatorOrder families = ["workfile"] hosts = ["houdini"] label = "Validate Workfile Paths" - actions = [openpype.api.RepairAction] + actions = [RepairAction] optional = True node_types = ["file", "alembic"] prohibited_vars = ["$HIP", "$JOB"] def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid() self.log.info( "node types to check: {}".format(", ".join(self.node_types))) @@ -29,13 +37,18 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): self.log.error( "{}: {}".format(param.path(), param.unexpandedString())) - raise RuntimeError("Invalid paths found") + raise PublishValidationError( + "Invalid paths found", title=self.label) @classmethod def get_invalid(cls): invalid = [] for param, _ in hou.fileReferences(): + # it might return None for some reason + if not param: + continue # skip nodes we are not interested in + cls.log.debug(param) if param.node().type().name() not in cls.node_types: continue From a1377a87d6001acb91429022b14a1db12e3f57a0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:39:17 +0200 Subject: [PATCH 1009/2550] :construction: dealing with identifiers --- .../plugins/create/create_alembic_camera.py | 42 +++++++++---------- .../plugins/create/create_pointcache.py | 13 +++--- 2 files changed, 27 insertions(+), 28 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index eef86005f5..294c99744b 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,46 +1,44 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateAlembicCamera(plugin.Creator): +class CreateAlembicCamera(plugin.HoudiniCreator): """Single baked camera from Alembic ROP""" - name = "camera" + identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" family = "camera" icon = "camera" - def __init__(self, *args, **kwargs): - super(CreateAlembicCamera, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - # Set node type to create for output - self.data.update({"node_type": "alembic"}) + instance = super(CreateAlembicCamera, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/%s.abc" % self.name, + "filename": "$HIP/pyblish/{}.abc".format(subset_name), "use_sop_path": False, } - if self.nodes: - node = self.nodes[0] - path = node.path() + if self._nodes: + path = self._nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) parms.update({"root": "/" + root, "objects": remainder}) - instance.setParms(parms) + instance_node.setParms(parms) # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance.parm("use_sop_path").lock(True) - instance.parm("trange").set(1) + instance_node.parm("use_sop_path").lock(True) + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 3365e25091..889e27ba51 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,18 +1,19 @@ # -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" from openpype.hosts.houdini.api import plugin from openpype.pipeline import CreatedInstance -import hou - class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - identifier = "pointcache" + identifier = "io.openpype.creators.houdini.pointcache" label = "Point Cache" family = "pointcache" icon = "gears" def create(self, subset_name, instance_data, pre_create_data): + import hou + instance_data.pop("active", None) instance_data.update({"node_type": "alembic"}) @@ -21,7 +22,7 @@ class CreatePointCache(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = hou.node(instance.get("members")[0]) + instance_node = hou.node(instance.get("instance_node")) parms = { "use_sop_path": True, "build_from_path": True, @@ -32,8 +33,8 @@ class CreatePointCache(plugin.HoudiniCreator): "filename": "$HIP/pyblish/{}.abc".format(subset_name) } - if instance_node: - parms["sop_path"] = instance_node.path() + if self._nodes: + parms["sop_path"] = self._nodes[0].path() instance_node.setParms(parms) instance_node.parm("trange").set(1) From dade064eb3f50b6b70aedec4e6d0cd487f7a9a70 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:39:30 +0200 Subject: [PATCH 1010/2550] :construction: solving hda publishing --- .../houdini/plugins/create/create_hda.py | 53 +++++++------------ .../houdini/plugins/publish/extract_hda.py | 2 +- 2 files changed, 21 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b98da8b8bb..b1751d0b6c 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,28 +1,22 @@ # -*- coding: utf-8 -*- -import hou - +"""Creator plugin for creating publishable Houdini Digital Assets.""" from openpype.client import ( get_asset_by_name, get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import lib -from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api import (lib, plugin) -class CreateHDA(plugin.Creator): +class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - name = "hda" + identifier = "hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" maintain_selection = False - def __init__(self, *args, **kwargs): - super(CreateHDA, self).__init__(*args, **kwargs) - self.data.pop("active", None) - def _check_existing(self, subset_name): # type: (str) -> bool """Check if existing subset name versions already exists.""" @@ -40,28 +34,34 @@ class CreateHDA(plugin.Creator): } return subset_name.lower() in existing_subset_names_low - def _process(self, instance): - subset_name = self.data["subset"] - # get selected nodes - out = hou.node("/obj") - self.nodes = hou.selectedNodes() + def create(self, subset_name, instance_data, pre_create_data): + import hou - if (self.options or {}).get("useSelection") and self.nodes: - # if we have `use selection` enabled and we have some + instance_data.pop("active", None) + + instance = super(CreateHDA, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + instance_node = hou.node(instance.get("instance_node")) + out = hou.node("/obj") + if self._nodes: + # if we have `use selection` enabled, and we have some # selected nodes ... subnet = out.collapseIntoSubnet( self.nodes, - subnet_name="{}_subnet".format(self.name)) + subnet_name="{}_subnet".format(subset_name)) subnet.moveToGoodPosition() to_hda = subnet else: to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(self.name)) + "subnet", node_name="{}_subnet".format(subset_name)) if not to_hda.type().definition(): # if node type has not its definition, it is not user # created hda. We test if hda can be created from the node. if not to_hda.canCreateDigitalAsset(): - raise Exception( + raise plugin.OpenPypeCreatorError( "cannot create hda from node {}".format(to_hda)) hda_node = to_hda.createDigitalAsset( @@ -78,17 +78,4 @@ class CreateHDA(plugin.Creator): hda_node.setName(subset_name) - # delete node created by Avalon in /out - # this needs to be addressed in future Houdini workflow refactor. - - hou.node("/out/{}".format(subset_name)).destroy() - - try: - lib.imprint(hda_node, self.data) - except hou.OperationFailed: - raise plugin.OpenPypeCreatorError( - ("Cannot set metadata on asset. Might be that it already is " - "OpenPype asset.") - ) - return hda_node diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 4352939a2c..50a7ce2908 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -16,7 +16,7 @@ class ExtractHDA(openpype.api.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance.data["members"][0] + hda_node = instance.data.get("members")[0] hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) From 872d85f91d31f678c28759c05dd35ed388bab8ec Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 11:52:33 +0200 Subject: [PATCH 1011/2550] Remove old legacy plug-ins that are of no use anymore --- .../plugins/publish/collect_maya_scene.py | 25 ------------------- .../hosts/maya/plugins/publish/collect_rig.py | 22 ---------------- 2 files changed, 47 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/publish/collect_maya_scene.py delete mode 100644 openpype/hosts/maya/plugins/publish/collect_rig.py diff --git a/openpype/hosts/maya/plugins/publish/collect_maya_scene.py b/openpype/hosts/maya/plugins/publish/collect_maya_scene.py deleted file mode 100644 index eb21b17989..0000000000 --- a/openpype/hosts/maya/plugins/publish/collect_maya_scene.py +++ /dev/null @@ -1,25 +0,0 @@ -from maya import cmds - -import pyblish.api - - -class CollectMayaScene(pyblish.api.InstancePlugin): - """Collect Maya Scene Data - - """ - - order = pyblish.api.CollectorOrder + 0.2 - label = 'Collect Model Data' - families = ["mayaScene"] - - def process(self, instance): - # Extract only current frame (override) - frame = cmds.currentTime(query=True) - instance.data["frameStart"] = frame - instance.data["frameEnd"] = frame - - # make ftrack publishable - if instance.data.get('families'): - instance.data['families'].append('ftrack') - else: - instance.data['families'] = ['ftrack'] diff --git a/openpype/hosts/maya/plugins/publish/collect_rig.py b/openpype/hosts/maya/plugins/publish/collect_rig.py deleted file mode 100644 index 98ae1e8009..0000000000 --- a/openpype/hosts/maya/plugins/publish/collect_rig.py +++ /dev/null @@ -1,22 +0,0 @@ -from maya import cmds - -import pyblish.api - - -class CollectRigData(pyblish.api.InstancePlugin): - """Collect rig data - - Ensures rigs are published to Ftrack. - - """ - - order = pyblish.api.CollectorOrder + 0.2 - label = 'Collect Rig Data' - families = ["rig"] - - def process(self, instance): - # make ftrack publishable - if instance.data.get('families'): - instance.data['families'].append('ftrack') - else: - instance.data['families'] = ['ftrack'] From 3943d74f3ea3b72d08f2d2e114a30b4a5e00a515 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Sep 2022 11:55:05 +0200 Subject: [PATCH 1012/2550] flame: adding batch action hook --- openpype/hosts/flame/api/__init__.py | 4 +- openpype/hosts/flame/api/menu.py | 50 +++++++++++++++++++ .../hosts/flame/startup/openpype_in_flame.py | 13 +++++ 3 files changed, 66 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 76c1c93379..7da91d41e4 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -51,7 +51,8 @@ from .pipeline import ( ) from .menu import ( FlameMenuProjectConnect, - FlameMenuTimeline + FlameMenuTimeline, + FlameMenuBatch ) from .plugin import ( Creator, @@ -131,6 +132,7 @@ __all__ = [ # menu "FlameMenuProjectConnect", "FlameMenuTimeline", + "FlameMenuBatch", # plugin "Creator", diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index 7f1a6a24e2..a822059930 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -201,3 +201,53 @@ class FlameMenuTimeline(_FlameMenuApp): if self.flame: self.flame.execute_shortcut('Rescan Python Hooks') self.log.info('Rescan Python Hooks') + + +class FlameMenuBatch(_FlameMenuApp): + + # flameMenuProjectconnect app takes care of the preferences dialog as well + + def __init__(self, framework): + _FlameMenuApp.__init__(self, framework) + + def __getattr__(self, name): + def method(*args, **kwargs): + project = self.dynamic_menu_data.get(name) + if project: + self.link_project(project) + return method + + def build_menu(self): + if not self.flame: + return [] + + menu = deepcopy(self.menu) + + menu['actions'].append({ + "name": "Load...", + "execute": lambda x: self.tools_helper.show_loader() + }) + menu['actions'].append({ + "name": "Manage...", + "execute": lambda x: self.tools_helper.show_scene_inventory() + }) + menu['actions'].append({ + "name": "Library...", + "execute": lambda x: self.tools_helper.show_library_loader() + }) + return menu + + def refresh(self, *args, **kwargs): + self.rescan() + + def rescan(self, *args, **kwargs): + if not self.flame: + try: + import flame + self.flame = flame + except ImportError: + self.flame = None + + if self.flame: + self.flame.execute_shortcut('Rescan Python Hooks') + self.log.info('Rescan Python Hooks') diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index f2ac23b19e..60f6612b7f 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -73,6 +73,8 @@ def load_apps(): opfapi.FlameMenuProjectConnect(opfapi.CTX.app_framework)) opfapi.CTX.flame_apps.append( opfapi.FlameMenuTimeline(opfapi.CTX.app_framework)) + opfapi.CTX.flame_apps.append( + opfapi.FlameMenuBatch(opfapi.CTX.app_framework)) opfapi.CTX.app_framework.log.info("Apps are loaded") @@ -191,3 +193,14 @@ def get_timeline_custom_ui_actions(): openpype_install() return _build_app_menu("FlameMenuTimeline") + +def get_batch_custom_ui_actions(): + """Hook to create submenu in batch + + Returns: + list: menu object + """ + # install openpype and the host + openpype_install() + + return _build_app_menu("FlameMenuBatch") \ No newline at end of file From ad9e172e4a0d97be2124aa6816ba18a28008e8cb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 12:02:10 +0200 Subject: [PATCH 1013/2550] Add functionality back in to store playback time range with the mayaScene publish --- .../publish/collect_maya_scene_time.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py diff --git a/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py b/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py new file mode 100644 index 0000000000..71ca785e1d --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py @@ -0,0 +1,26 @@ +from maya import cmds + +import pyblish.api + + +class CollectMayaSceneTime(pyblish.api.InstancePlugin): + """Collect Maya Scene playback range + + This allows to reproduce the playback range for the content to be loaded. + It does *not* limit the extracted data to only data inside that time range. + + """ + + order = pyblish.api.CollectorOrder + 0.2 + label = 'Collect Maya Scene Time' + families = ["mayaScene"] + + def process(self, instance): + instance.data.update({ + "frameStart": cmds.playbackOptions(query=True, minTime=True), + "frameEnd": cmds.playbackOptions(query=True, maxTime=True), + "frameStartHandle": cmds.playbackOptions(query=True, + animationStartTime=True), + "frameEndHandle": cmds.playbackOptions(query=True, + animationEndTime=True), + }) From b7ee7669af0a3f43f1050d24818c2c7550331ba6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 12:03:32 +0200 Subject: [PATCH 1014/2550] Remove redundant comma --- openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py b/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py index 71ca785e1d..7e198df14d 100644 --- a/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py +++ b/openpype/hosts/maya/plugins/publish/collect_maya_scene_time.py @@ -22,5 +22,5 @@ class CollectMayaSceneTime(pyblish.api.InstancePlugin): "frameStartHandle": cmds.playbackOptions(query=True, animationStartTime=True), "frameEndHandle": cmds.playbackOptions(query=True, - animationEndTime=True), + animationEndTime=True) }) From 5558c1eb46c8e2045c26c19ae4afcf8a47520eba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= Date: Fri, 9 Sep 2022 12:07:56 +0200 Subject: [PATCH 1015/2550] Kitsu : add launcher action --- .../kitsu/actions/launcher_show_in_kitsu.py | 131 ++++++++++++++++++ openpype/modules/kitsu/kitsu_module.py | 5 +- 2 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 openpype/modules/kitsu/actions/launcher_show_in_kitsu.py diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py new file mode 100644 index 0000000000..0ac9c6e9b7 --- /dev/null +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -0,0 +1,131 @@ +import webbrowser + +from openpype.pipeline import LauncherAction +from openpype.modules import ModulesManager +from openpype.client import get_project, get_asset_by_name + + +class ShowInKitsu(LauncherAction): + name = "showinkitsu" + label = "Show in Kitsu" + icon = "external-link-square" + color = "#e0e1e1" + order = 10 + + @staticmethod + def get_kitsu_module(): + return ModulesManager().modules_by_name.get("kitsu") + + def is_compatible(self, session): + if not session.get("AVALON_PROJECT"): + return False + + return True + + def process(self, session, **kwargs): + + # Context inputs + project_name = session["AVALON_PROJECT"] + asset_name = session.get("AVALON_ASSET", None) + task_name = session.get("AVALON_TASK", None) + + project = get_project(project_name=project_name, + fields=["data.zou_id"]) + if not project: + raise RuntimeError(f"Project {project_name} not found.") + + project_zou_id = project["data"].get("zou_id") + if not project_zou_id: + raise RuntimeError(f"Project {project_name} has no " + f"connected ftrack id.") + + asset_zou_data = None + task_zou_id = None + asset_zou_name = None + asset_zou_id = None + asset_zou_type = 'Assets' + zou_sub_type = ['AssetType','Sequence'] + if asset_name: + asset_zou_name = asset_name + asset_fields = ["data.zou.id", "data.zou.type"] + if task_name: + asset_fields.append(f"data.tasks.{task_name}.zou.id") + + asset = get_asset_by_name(project_name, + asset_name=asset_name, + fields=asset_fields) + + asset_zou_data = asset["data"].get("zou") + + if asset_zou_data: + asset_zou_type = asset_zou_data["type"] + if not asset_zou_type in zou_sub_type: + asset_zou_id = asset_zou_data["id"] + else: + asset_zou_type = asset_name + + + if task_name: + task_data = asset["data"]["tasks"][task_name] + task_zou_data = task_data.get("zou", {}) + if not task_zou_data: + self.log.debug(f"No zou task data for task: {task_name}") + task_zou_id = task_zou_data["id"] + + # Define URL + url = self.get_url(project_id=project_zou_id, + asset_name=asset_zou_name, + asset_id=asset_zou_id, + asset_type=asset_zou_type, + task_id=task_zou_id) + + # Open URL in webbrowser + self.log.info(f"Opening URL: {url}") + webbrowser.open(url, + # Try in new tab + new=2) + + def get_url(self, + project_id, + asset_name=None, + asset_id=None, + asset_type=None, + task_id=None): + + shots_url = ['Shots','Sequence','Shot'] + sub_type = ['AssetType','Sequence'] + kitsu_module = self.get_kitsu_module() + + # Get kitsu url with /api stripped + kitsu_url = kitsu_module.server_url + if kitsu_url.endswith("/api"): + kitsu_url = kitsu_url[:-len("/api")] + + sub_url = f"/productions/{project_id}" + asset_type_url = "Assets" + + # Add redirection url for shots_url list + if asset_type in shots_url: + asset_type_url = 'Shots' + + if task_id: + # Go to task page + # /productions/{project-id}/{asset_type}/tasks/{task_id} + sub_url += f"/{asset_type_url}/tasks/{task_id}" + + elif asset_id: + # Go to asset or shot page + # /productions/{project-id}/assets/{entity_id} + # /productions/{project-id}/shots/{entity_id} + sub_url += f"/{asset_type_url}/{asset_id}" + + else: + # Go to project page + # Project page must end with a view + # /productions/{project-id}/assets/ + # Add search method if is a sub_type + sub_url += f"/{asset_type_url}" + if asset_type in sub_type: + sub_url += f'?search={asset_name}' + + return f"{kitsu_url}{sub_url}" diff --git a/openpype/modules/kitsu/kitsu_module.py b/openpype/modules/kitsu/kitsu_module.py index d19d14dda7..23c032715b 100644 --- a/openpype/modules/kitsu/kitsu_module.py +++ b/openpype/modules/kitsu/kitsu_module.py @@ -89,7 +89,10 @@ class KitsuModule(OpenPypeModule, IPluginPaths, ITrayAction): """Implementation of abstract method for `IPluginPaths`.""" current_dir = os.path.dirname(os.path.abspath(__file__)) - return {"publish": [os.path.join(current_dir, "plugins", "publish")]} + return { + "publish": [os.path.join(current_dir, "plugins", "publish")], + "actions": [os.path.join(current_dir, "actions")] + } def cli(self, click_group): click_group.add_command(cli_main) From 7ca64b67c39eff858028f2aa52fca2d3b6de8f90 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:22:00 +0200 Subject: [PATCH 1016/2550] modified attr defs creation --- .../hosts/maya/api/new_template_builder.py | 33 +++++++++++-------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index 023f240061..06d1cf0fd7 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -265,6 +265,7 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): return [ attribute_definitions.UISeparatorDef(), attribute_definitions.UILabelDef("Main attributes"), + attribute_definitions.UISeparatorDef(), attribute_definitions.EnumDef( "builder_type", @@ -307,22 +308,26 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): (loader_name, loader_name) for loader_name in loader_names ], - tooltip="""Loader -Defines what OpenPype loader will be used to load assets. -Useable loader depends on current host's loader list. -Field is case sensitive. -""" + tooltip=( + "Loader" + "\nDefines what OpenPype loader will be used to" + " load assets." + "\nUseable loader depends on current host's loader list." + "\nField is case sensitive." + ) ), attribute_definitions.TextDef( "loader_args", label="Loader Arguments", default=options.get("loader_args"), placeholder='{"camera":"persp", "lights":True}', - tooltip="""Loader -Defines a dictionnary of arguments used to load assets. -Useable arguments depend on current placeholder Loader. -Field should be a valid python dict. Anything else will be ignored. -""" + tooltip=( + "Loader" + "\nDefines a dictionnary of arguments used to load assets." + "\nUseable arguments depend on current placeholder Loader." + "\nField should be a valid python dict." + " Anything else will be ignored." + ) ), attribute_definitions.NumberDef( "order", @@ -331,9 +336,11 @@ Field should be a valid python dict. Anything else will be ignored. decimals=0, minimum=0, maximum=999, - tooltip="""Order -Order defines asset loading priority (0 to 999) -Priority rule is : "lowest is first to load".""" + tooltip=( + "Order" + "\nOrder defines asset loading priority (0 to 999)" + "\nPriority rule is : \"lowest is first to load\"." + ) ), attribute_definitions.UISeparatorDef(), attribute_definitions.UILabelDef("Optional attributes"), From f3ce64c1893bc2ea1b81fa254b6a8bc8734b587d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:22:19 +0200 Subject: [PATCH 1017/2550] copied placeholder name creation --- .../hosts/maya/api/new_template_builder.py | 25 +++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index 06d1cf0fd7..36a74e9b9a 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -1,4 +1,6 @@ import re +import json + from maya import cmds from openpype.client import get_representations @@ -146,8 +148,27 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): return placeholder_data def _create_placeholder_name(self, placeholder_data): - # TODO implement placeholder name logic - return "Placeholder" + placeholder_name_parts = placeholder_data["builder_type"].split("_") + + pos = 1 + # add famlily in any + placeholder_family = placeholder_data["family"] + if placeholder_family: + placeholder_name_parts.insert(pos, placeholder_family) + pos += 1 + + # add loader arguments if any + loader_args = placeholder_data["loader_args"] + if loader_args: + loader_args = json.loads(loader_args.replace('\'', '\"')) + values = [v for v in loader_args.values()] + for value in values: + placeholder_name_parts.insert(pos, value) + pos += 1 + + placeholder_name = "_".join(placeholder_name_parts) + + return placeholder_name.capitalize() def create_placeholder(self, placeholder_data): selection = cmds.ls(selection=True) From 88f16988a4786c5b8144ca1d0988c27b973637e3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:22:57 +0200 Subject: [PATCH 1018/2550] fix build dialog and renamed it to placeholder dialog --- .../tools/workfile_template_build/__init__.py | 4 +- .../tools/workfile_template_build/window.py | 40 ++++++++++++------- 2 files changed, 27 insertions(+), 17 deletions(-) diff --git a/openpype/tools/workfile_template_build/__init__.py b/openpype/tools/workfile_template_build/__init__.py index 70b8867759..82a22aea50 100644 --- a/openpype/tools/workfile_template_build/__init__.py +++ b/openpype/tools/workfile_template_build/__init__.py @@ -1,5 +1,5 @@ -from .window import WorkfileBuildDialog +from .window import WorkfileBuildPlaceholderDialog __all__ = ( - "WorkfileBuildDialog", + "WorkfileBuildPlaceholderDialog", ) diff --git a/openpype/tools/workfile_template_build/window.py b/openpype/tools/workfile_template_build/window.py index a5cec465ec..2e531026cf 100644 --- a/openpype/tools/workfile_template_build/window.py +++ b/openpype/tools/workfile_template_build/window.py @@ -6,9 +6,9 @@ from openpype.pipeline import legacy_io from openpype.widgets.attribute_defs import AttributeDefinitionsWidget -class WorkfileBuildDialog(QtWidgets.QDialog): +class WorkfileBuildPlaceholderDialog(QtWidgets.QDialog): def __init__(self, host, builder, parent=None): - super(WorkfileBuildDialog, self).__init__(parent) + super(WorkfileBuildPlaceholderDialog, self).__init__(parent) self.setWindowTitle("Workfile Placeholder Manager") self._log = None @@ -78,7 +78,7 @@ class WorkfileBuildDialog(QtWidgets.QDialog): def _clear_content_widget(self): while self._content_layout.count() > 0: item = self._content_layout.takeAt(0) - widget = item.widget + widget = item.widget() if widget: widget.setVisible(False) widget.deleteLater() @@ -90,6 +90,7 @@ class WorkfileBuildDialog(QtWidgets.QDialog): def refresh(self): self._first_refreshed = True + self._clear_content_widget() if not self._builder: @@ -100,11 +101,19 @@ class WorkfileBuildDialog(QtWidgets.QDialog): self._update_ui_visibility() return + placeholder_plugins = self._builder.placeholder_plugins + if self._mode == 1: + self._last_selected_plugin + plugin = self._builder.placeholder_plugins.get( + self._last_selected_plugin + ) + self._create_option_widgets( + plugin, self._update_item.to_dict() + ) self._update_ui_visibility() return - placeholder_plugins = builder.placeholder_plugins if not placeholder_plugins: self._add_message_to_content(( "Host \"{}\" does not have implemented plugins" @@ -142,15 +151,16 @@ class WorkfileBuildDialog(QtWidgets.QDialog): self._mode = 1 self._update_item = update_item - if not update_item: - self._add_message_to_content(( - "Nothing to update." - " (You maybe don't have selected placeholder.)" - )) - else: - self._create_option_widgets( - update_item.plugin, update_item.to_dict() - ) + if update_item: + self._last_selected_plugin = update_item.plugin.identifier + self.refresh() + return + + self._clear_content_widget() + self._add_message_to_content(( + "Nothing to update." + " (You maybe don't have selected placeholder.)" + )) self._update_ui_visibility() def _create_option_widgets(self, plugin, options=None): @@ -160,6 +170,7 @@ class WorkfileBuildDialog(QtWidgets.QDialog): self._content_layout.addWidget(widget, 0) self._content_layout.addStretch(1) self._attr_defs_widget = widget + self._last_selected_plugin = plugin.identifier def _update_ui_visibility(self): create_mode = self._mode == 0 @@ -182,7 +193,6 @@ class WorkfileBuildDialog(QtWidgets.QDialog): if plugin_identifier == self._last_selected_plugin: return - self._last_selected_plugin = plugin_identifier plugin = self._builder.placeholder_plugins.get(plugin_identifier) self._create_option_widgets(plugin) @@ -222,7 +232,7 @@ class WorkfileBuildDialog(QtWidgets.QDialog): self.reject() def showEvent(self, event): - super(WorkfileBuildDialog, self).showEvent(event) + super(WorkfileBuildPlaceholderDialog, self).showEvent(event) if not self._first_refreshed: self.refresh() From 8f2bf758f3963badfa6a211211eda4ea5efe9326 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:23:31 +0200 Subject: [PATCH 1019/2550] fixed get of value from attribute defs widget --- openpype/widgets/attribute_defs/widgets.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 60ae952553..dc697b08a6 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -108,10 +108,12 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): row = 0 for attr_def in attr_defs: - if attr_def.key in self._current_keys: - raise KeyError("Duplicated key \"{}\"".format(attr_def.key)) + if not isinstance(attr_def, UIDef): + if attr_def.key in self._current_keys: + raise KeyError( + "Duplicated key \"{}\"".format(attr_def.key)) - self._current_keys.add(attr_def.key) + self._current_keys.add(attr_def.key) widget = create_widget_for_attr_def(attr_def, self) expand_cols = 2 From 187eaaec2d9f17d01cff4cc43bd3b01c58d679e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:24:37 +0200 Subject: [PATCH 1020/2550] renamed 'process_scene_placeholders' to 'populate_scene_placeholders' --- .../pipeline/workfile/new_template_loader.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index b1231c2308..a59394b09c 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -233,7 +233,7 @@ class AbstractTemplateLoader: Import template in current host. Should load the content of template into scene so - 'process_scene_placeholders' can be started. + 'populate_scene_placeholders' can be started. Args: template_path (str): Fullpath for current task and @@ -270,7 +270,7 @@ class AbstractTemplateLoader: plugin = plugins_by_identifier[identifier] plugin.prepare_placeholders(placeholders) - def process_scene_placeholders(self, level_limit=None): + def populate_scene_placeholders(self, level_limit=None): """Find placeholders in scene using plugins and process them. This should happen after 'import_template'. @@ -285,8 +285,7 @@ class AbstractTemplateLoader: placeholder's 'scene_identifier'. Args: - level_limit (int): Level of loops that can happen. By default - if is possible to have infinite nested placeholder processing. + level_limit (int): Level of loops that can happen. Default is 1000. """ if not self.placeholder_plugins: @@ -298,6 +297,11 @@ class AbstractTemplateLoader: self.log.warning("No placeholders were found.") return + # Avoid infinite loop + # - 1000 iterations of placeholders processing must be enough + if not level_limit: + level_limit = 1000 + placeholder_by_scene_id = { placeholder.scene_identifier: placeholder for placeholder in placeholders @@ -335,10 +339,9 @@ class AbstractTemplateLoader: # Clear shared data before getting new placeholders self.clear_shared_data() - if level_limit: - iter_counter += 1 - if iter_counter >= level_limit: - break + iter_counter += 1 + if iter_counter >= level_limit: + break all_processed = True collected_placeholders = self.get_placeholders() From 1ee1c2858856eae18b38244ea179580bb5ba2fb9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:24:54 +0200 Subject: [PATCH 1021/2550] added method to build template --- openpype/pipeline/workfile/new_template_loader.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index a59394b09c..3fee84a4e8 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -227,6 +227,12 @@ class AbstractTemplateLoader: key=lambda i: i.order )) + def build_template(self, template_path=None, level_limit=None): + if template_path is None: + template_path = self.get_template_path() + self.import_template(template_path) + self.populate_scene_placeholders(level_limit) + @abstractmethod def import_template(self, template_path): """ From 326d21d86a2d99f36bc8f420b20b271bd0a894b1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:25:11 +0200 Subject: [PATCH 1022/2550] use 'get_placeholder_plugin_classes' from host --- openpype/pipeline/workfile/new_template_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 3fee84a4e8..dbf58fe15d 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -174,7 +174,7 @@ class AbstractTemplateLoader: if self._placeholder_plugins is None: placeholder_plugins = {} - for cls in self.get_placeholder_plugin_classes(): + for cls in self.host.get_placeholder_plugin_classes(): try: plugin = cls(self) placeholder_plugins[plugin.identifier] = plugin From ffab250bf822a6440670ece4d815199f4826a52a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:26:45 +0200 Subject: [PATCH 1023/2550] changed how 'get_placeholder_plugin_classes' is implemented --- .../pipeline/workfile/new_template_loader.py | 48 ++++++++++++------- 1 file changed, 31 insertions(+), 17 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index dbf58fe15d..64e3021f00 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -8,7 +8,12 @@ import six from openpype.client import get_asset_by_name from openpype.settings import get_project_settings from openpype.host import HostBase -from openpype.lib import Logger, StringTemplate, filter_profiles +from openpype.lib import ( + Logger, + StringTemplate, + filter_profiles, +) +from openpype.lib.attribute_definitions import get_attributes_keys from openpype.pipeline import legacy_io, Anatomy from openpype.pipeline.load import get_loaders_by_name from openpype.pipeline.create import get_legacy_creator_by_name @@ -31,12 +36,26 @@ class AbstractTemplateLoader: _log = None def __init__(self, host): - # Store host - self._host = host + # Prepare context information + project_name = legacy_io.active_project() + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + current_asset_doc = get_asset_by_name(project_name, asset_name) + task_type = ( + current_asset_doc + .get("data", {}) + .get("tasks", {}) + .get(task_name, {}) + .get("type") + ) + + # Get host name if isinstance(host, HostBase): host_name = host.name else: host_name = os.environ.get("AVALON_APP") + + self._host = host self._host_name = host_name # Shared data across placeholder plugins @@ -47,29 +66,24 @@ class AbstractTemplateLoader: self._loaders_by_name = None self._creators_by_name = None - project_name = legacy_io.active_project() - asset_name = legacy_io.Session["AVALON_ASSET"] - self.current_asset = asset_name self.project_name = project_name - self.task_name = legacy_io.Session["AVALON_TASK"] - self.current_asset_doc = get_asset_by_name(project_name, asset_name) - self.task_type = ( - self.current_asset_doc - .get("data", {}) - .get("tasks", {}) - .get(self.task_name, {}) - .get("type") - ) + self.task_name = task_name + self.current_asset_doc = current_asset_doc + self.task_type = task_type - @abstractmethod def get_placeholder_plugin_classes(self): """Get placeholder plugin classes that can be used to build template. + Default implementation looks for 'get_placeholder_plugin_classes' on + host. + Returns: List[PlaceholderPlugin]: Plugin classes available for host. """ + if hasattr(self._host, "get_placeholder_plugin_classes"): + return self._host.get_placeholder_plugin_classes() return [] @property @@ -174,7 +188,7 @@ class AbstractTemplateLoader: if self._placeholder_plugins is None: placeholder_plugins = {} - for cls in self.host.get_placeholder_plugin_classes(): + for cls in self.get_placeholder_plugin_classes(): try: plugin = cls(self) placeholder_plugins[plugin.identifier] = plugin From d5346a14d90f7746d7ee589e17d6315c0f7baaea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 12:28:32 +0200 Subject: [PATCH 1024/2550] implemented helper functions to receive placeholder option keys --- openpype/lib/attribute_definitions.py | 21 +++++++++++++++++++ .../pipeline/workfile/new_template_loader.py | 11 ++++++++++ 2 files changed, 32 insertions(+) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 17658eef93..cbd53d1f07 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -9,6 +9,27 @@ import six import clique +def get_attributes_keys(attribute_definitions): + """Collect keys from list of attribute definitions. + + Args: + attribute_definitions (List[AbtractAttrDef]): Objects of attribute + definitions. + + Returns: + Set[str]: Keys that will be created using passed attribute definitions. + """ + + keys = set() + if not attribute_definitions: + return keys + + for attribute_def in attribute_definitions: + if not isinstance(attribute_def, UIDef): + keys.add(attribute_def.key) + return keys + + class AbstractAttrDefMeta(ABCMeta): """Meta class to validate existence of 'key' attribute. diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 64e3021f00..21bfa3650d 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -547,6 +547,17 @@ class PlaceholderPlugin(object): return [] + def get_placeholder_keys(self): + """Get placeholder keys that are stored in scene. + + Returns: + Set[str]: Key of placeholder keys that are stored in scene. + """ + + option_keys = get_attributes_keys(self.get_placeholder_options()) + option_keys.add("plugin_identifier") + return option_keys + def prepare_placeholders(self, placeholders): """Preparation part of placeholders. From d0665c3928ecff4176f6e22b676384f09485c173 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 13:06:12 +0200 Subject: [PATCH 1025/2550] Change `mayaascii` -> `mayaAscii` --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 2 +- openpype/settings/defaults/project_settings/ftrack.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 6024781d87..7e5815b100 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -35,7 +35,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): family_mapping = { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 09b194e21c..cdf861df4a 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -455,7 +455,7 @@ "family_mapping": { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", From 6401c4064150f316ae96cd39585edf6b77a4cdac Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 13:14:05 +0200 Subject: [PATCH 1026/2550] changed how placeholders are cached --- .../hosts/maya/api/new_template_builder.py | 39 +++++++++++-------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index 36a74e9b9a..b25d75452c 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -44,7 +44,7 @@ class MayaTemplateLoader(AbstractTemplateLoader): cmds.sets(name=PLACEHOLDER_SET, empty=True) cmds.file(path, i=True, returnNewNodes=True) - cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True) + cmds.setAttr(PLACEHOLDER_SET + ".hiddenInOutliner", True) # This should be handled by creators # for set_name in cmds.listSets(allSets=True): @@ -116,10 +116,13 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): placeholder_nodes = self.builder.get_shared_data("placeholder_nodes") if placeholder_nodes is None: attributes = cmds.ls("*.plugin_identifier", long=True) - placeholder_nodes = [ - self._parse_placeholder_node_data(attribute.rpartition(".")[0]) - for attribute in attributes - ] + placeholder_nodes = {} + for attribute in attributes: + node_name = attribute.rpartition(".")[0] + placeholder_nodes[node_name] = ( + self._parse_placeholder_node_data(node_name) + ) + self.builder.set_shared_data( "placeholder_nodes", placeholder_nodes ) @@ -182,7 +185,6 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): placeholder_name = self._create_placeholder_name(placeholder_data) placeholder = cmds.spaceLocator(name=placeholder_name)[0] - # TODO: this can crash if selection can't be used cmds.parent(placeholder, selection[0]) @@ -221,20 +223,23 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): new_values[key] = value placeholder_item.data[key] = value + for key in new_values.keys(): + cmds.deleteAttr(node_name + "." + key) + imprint(node_name, new_values) def collect_placeholders(self): - filtered_placeholders = [] - for placeholder_data in self._collect_scene_placeholders(): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, placeholder_data in scene_placeholders.items(): if placeholder_data.get("plugin_identifier") != self.identifier: continue - filtered_placeholders.append(placeholder_data) - - output = [] - for placeholder_data in filtered_placeholders: # TODO do data validations and maybe updgrades if are invalid - output.append(LoadPlaceholder(placeholder_data)) + output.append( + LoadPlaceholder(node_name, placeholder_data, self) + ) + return output def process_placeholder(self, placeholder): @@ -429,8 +434,8 @@ class LoadPlaceholder(PlaceholderItem): elif not cmds.sets(root, q=True): return - if self.data['parent']: - cmds.parent(nodes_to_parent, self.data['parent']) + if self.data["parent"]: + cmds.parent(nodes_to_parent, self.data["parent"]) # Move loaded nodes to correct index in outliner hierarchy placeholder_form = cmds.xform( self._scene_identifier, @@ -440,7 +445,7 @@ class LoadPlaceholder(PlaceholderItem): ) for node in set(nodes_to_parent): cmds.reorder(node, front=True) - cmds.reorder(node, relative=self.data['index']) + cmds.reorder(node, relative=self.data["index"]) cmds.xform(node, matrix=placeholder_form, ws=True) holding_sets = cmds.listSets(object=self._scene_identifier) @@ -470,7 +475,7 @@ class LoadPlaceholder(PlaceholderItem): node = cmds.parent(node, world=True)[0] cmds.sets(node, addElement=PLACEHOLDER_SET) cmds.hide(node) - cmds.setAttr(node + '.hiddenInOutliner', True) + cmds.setAttr(node + ".hiddenInOutliner", True) def get_representations(self, current_asset_doc, linked_asset_docs): project_name = legacy_io.active_project() From 5ccda391e4e4c980915c8f6954b5b43aaf454d86 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 13:23:10 +0200 Subject: [PATCH 1027/2550] changed loaders --- openpype/hosts/maya/api/new_template_builder.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index b25d75452c..bc1a0250a8 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -286,7 +286,12 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): def get_placeholder_options(self, options=None): loaders_by_name = self.builder.get_loaders_by_name() - loader_names = list(sorted(loaders_by_name.keys())) + loader_items = [ + (loader_name, loader.label or loader_name) + for loader_name, loader in loaders_by_name.items() + ] + + loader_items = list(sorted(loader_items, key=lambda i: i[0])) options = options or {} return [ attribute_definitions.UISeparatorDef(), @@ -330,10 +335,7 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): "loader", label="Loader", default=options.get("loader"), - items=[ - (loader_name, loader_name) - for loader_name in loader_names - ], + items=loader_items, tooltip=( "Loader" "\nDefines what OpenPype loader will be used to" From e5654595a83d88a5ac4dcea8bd7c9b7fe360f3bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 13:31:55 +0200 Subject: [PATCH 1028/2550] implemented 'get_workfile_build_placeholder_plugins' for maya --- openpype/hosts/maya/api/pipeline.py | 6 ++++++ openpype/pipeline/workfile/new_template_loader.py | 8 ++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index c963b5d996..2492e75b36 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -35,6 +35,7 @@ from openpype.hosts.maya import MAYA_ROOT_DIR from openpype.hosts.maya.lib import copy_workspace_mel from . import menu, lib +from .new_template_builder import MayaLoadPlaceholderPlugin from .workio import ( open_file, save_file, @@ -123,6 +124,11 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): def get_containers(self): return ls() + def get_workfile_build_placeholder_plugins(self): + return [ + MayaLoadPlaceholderPlugin + ] + @contextlib.contextmanager def maintained_selection(self): with lib.maintained_selection(): diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 21bfa3650d..8b64306c18 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -75,15 +75,15 @@ class AbstractTemplateLoader: def get_placeholder_plugin_classes(self): """Get placeholder plugin classes that can be used to build template. - Default implementation looks for 'get_placeholder_plugin_classes' on - host. + Default implementation looks for method + 'get_workfile_build_placeholder_plugins' on host. Returns: List[PlaceholderPlugin]: Plugin classes available for host. """ - if hasattr(self._host, "get_placeholder_plugin_classes"): - return self._host.get_placeholder_plugin_classes() + if hasattr(self._host, "get_workfile_build_placeholder_plugins"): + return self._host.get_workfile_build_placeholder_plugins() return [] @property From c622eb7a59986ebe003205d66cd2ae101e1b23eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Sep 2022 13:54:33 +0200 Subject: [PATCH 1029/2550] flame: add ui to project media panel --- openpype/hosts/flame/api/__init__.py | 4 ++-- openpype/hosts/flame/api/menu.py | 2 +- openpype/hosts/flame/startup/openpype_in_flame.py | 15 ++++++++++++++- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 7da91d41e4..c00ee958b6 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -52,7 +52,7 @@ from .pipeline import ( from .menu import ( FlameMenuProjectConnect, FlameMenuTimeline, - FlameMenuBatch + FlameMenuUniversal ) from .plugin import ( Creator, @@ -132,7 +132,7 @@ __all__ = [ # menu "FlameMenuProjectConnect", "FlameMenuTimeline", - "FlameMenuBatch", + "FlameMenuUniversal", # plugin "Creator", diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index a822059930..f72a352bba 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -203,7 +203,7 @@ class FlameMenuTimeline(_FlameMenuApp): self.log.info('Rescan Python Hooks') -class FlameMenuBatch(_FlameMenuApp): +class FlameMenuUniversal(_FlameMenuApp): # flameMenuProjectconnect app takes care of the preferences dialog as well diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index 60f6612b7f..9fdc30db5d 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -194,6 +194,7 @@ def get_timeline_custom_ui_actions(): return _build_app_menu("FlameMenuTimeline") + def get_batch_custom_ui_actions(): """Hook to create submenu in batch @@ -203,4 +204,16 @@ def get_batch_custom_ui_actions(): # install openpype and the host openpype_install() - return _build_app_menu("FlameMenuBatch") \ No newline at end of file + return _build_app_menu("FlameMenuUniversal") + + +def get_media_panel_custom_ui_actions(): + """Hook to create submenu in desktop + + Returns: + list: menu object + """ + # install openpype and the host + openpype_install() + + return _build_app_menu("FlameMenuUniversal") From 7b78e09eaec1c7e243782c13079ff70ca6c06d23 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Sep 2022 13:59:15 +0200 Subject: [PATCH 1030/2550] fixing name of class --- openpype/hosts/flame/startup/openpype_in_flame.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index 9fdc30db5d..d07aaa6b7d 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -74,7 +74,7 @@ def load_apps(): opfapi.CTX.flame_apps.append( opfapi.FlameMenuTimeline(opfapi.CTX.app_framework)) opfapi.CTX.flame_apps.append( - opfapi.FlameMenuBatch(opfapi.CTX.app_framework)) + opfapi.FlameMenuUniversal(opfapi.CTX.app_framework)) opfapi.CTX.app_framework.log.info("Apps are loaded") From 1dee7ceb6464f37c2582b5dbc8d10e0a20327197 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 14:09:38 +0200 Subject: [PATCH 1031/2550] Tweak logging for attribute validation check + clean-up splitting logic --- .../plugins/publish/validate_rendersettings.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 08ecc0d149..25674effa8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -257,14 +257,18 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): # go through definitions and test if such node.attribute exists. # if so, compare its value from the one required. for attr, value in OrderedDict(validation_settings).items(): - # first get node of that type cls.log.debug("{}: {}".format(attr, value)) - node_type = attr.split(".")[0] - attribute_name = ".".join(attr.split(".")[1:]) + if "." not in attr: + cls.log.warning("Skipping invalid attribute defined in " + "validation settings: '{}'".format(attr)) + + node_type, attribute_name = attr.split(".", 1) + + # first get node of that type nodes = cmds.ls(type=node_type) - if not isinstance(nodes, list): - cls.log.warning("No nodes of '{}' found.".format(node_type)) + if not nodes: + cls.log.info("No nodes of type '{}' found.".format(node_type)) continue for node in nodes: From dc2d6e59a4ed0f53a77b09479f2b038c9b25f383 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 14:10:05 +0200 Subject: [PATCH 1032/2550] Skip invalid attr correctly --- openpype/hosts/maya/plugins/publish/validate_rendersettings.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 25674effa8..883b0daa88 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -261,6 +261,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): if "." not in attr: cls.log.warning("Skipping invalid attribute defined in " "validation settings: '{}'".format(attr)) + continue node_type, attribute_name = attr.split(".", 1) From dfb0677971ef10a1bf919b87e70266d4c2b7888a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 14:13:36 +0200 Subject: [PATCH 1033/2550] Revert message back to a warning --- openpype/hosts/maya/plugins/publish/validate_rendersettings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 883b0daa88..818f814076 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -269,7 +269,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): nodes = cmds.ls(type=node_type) if not nodes: - cls.log.info("No nodes of type '{}' found.".format(node_type)) + cls.log.warning( + "No nodes of type '{}' found.".format(node_type)) continue for node in nodes: From 42f575fca94722a25ae462a5112a843799d9aad1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 14:23:36 +0200 Subject: [PATCH 1034/2550] actions are expected as list so each application in group is stored if force not open workfile is enabled --- openpype/tools/launcher/models.py | 46 +++++++++++++++++++------------ 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 6d40d21f96..6e3b531018 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -281,18 +281,25 @@ class ActionModel(QtGui.QStandardItemModel): if not action_item: return - action = action_item.data(ACTION_ROLE) - actual_data = self._prepare_compare_data(action) + actions = action_item.data(ACTION_ROLE) + if not isinstance(actions, list): + actions = [actions] + + action_actions_data = [ + self._prepare_compare_data(action) + for action in actions + ] stored = self.launcher_registry.get_item("force_not_open_workfile") - if is_checked: - stored.append(actual_data) - else: - final_values = [] - for config in stored: - if config != actual_data: - final_values.append(config) - stored = final_values + for actual_data in action_actions_data: + if is_checked: + stored.append(actual_data) + else: + final_values = [] + for config in stored: + if config != actual_data: + final_values.append(config) + stored = final_values self.launcher_registry.set_item("force_not_open_workfile", stored) self.launcher_registry._get_item.cache_clear() @@ -329,21 +336,24 @@ class ActionModel(QtGui.QStandardItemModel): item (QStandardItem) stored (list) of dict """ - action = item.data(ACTION_ROLE) - if not self.is_application_action(action): + + actions = item.data(ACTION_ROLE) + if not isinstance(actions, list): + actions = [actions] + + if not self.is_application_action(actions[0]): return False - actual_data = self._prepare_compare_data(action) + action_actions_data = [ + self._prepare_compare_data(action) + for action in actions + ] for config in stored: - if config == actual_data: + if config in action_actions_data: return True - return False def _prepare_compare_data(self, action): - if isinstance(action, list) and action: - action = action[0] - compare_data = {} if action and action.label: compare_data = { From 98c065cb8b3c8f75ae0479da1a3287ebbe0b22d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 14:24:02 +0200 Subject: [PATCH 1035/2550] change "start_last_workfile" when triggered from group --- openpype/tools/launcher/widgets.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 62599664fe..774ceb659d 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -312,11 +312,12 @@ class ActionBar(QtWidgets.QWidget): is_group = index.data(GROUP_ROLE) is_variant_group = index.data(VARIANT_GROUP_ROLE) + force_not_open_workfile = index.data(FORCE_NOT_OPEN_WORKFILE_ROLE) if not is_group and not is_variant_group: action = index.data(ACTION_ROLE) # Change data of application action if issubclass(action, ApplicationAction): - if index.data(FORCE_NOT_OPEN_WORKFILE_ROLE): + if force_not_open_workfile: action.data["start_last_workfile"] = False else: action.data.pop("start_last_workfile", None) @@ -385,10 +386,18 @@ class ActionBar(QtWidgets.QWidget): menu.addMenu(sub_menu) result = menu.exec_(QtGui.QCursor.pos()) - if result: - action = actions_mapping[result] - self._start_animation(index) - self.action_clicked.emit(action) + if not result: + return + + action = actions_mapping[result] + if issubclass(action, ApplicationAction): + if force_not_open_workfile: + action.data["start_last_workfile"] = False + else: + action.data.pop("start_last_workfile", None) + + self._start_animation(index) + self.action_clicked.emit(action) class ActionHistory(QtWidgets.QPushButton): From 44757580b78970566fb8e3a5ac3871425dacf408 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 15:08:10 +0200 Subject: [PATCH 1036/2550] Fix very slow `get_container_members` calls for instances --- openpype/hosts/maya/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 58e160cb2f..06faa123f5 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1532,7 +1532,7 @@ def get_container_members(container): if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): continue - reference_members = cmds.referenceQuery(ref, nodes=True) + reference_members = cmds.referenceQuery(ref, nodes=True, dagPath=True) reference_members = cmds.ls(reference_members, long=True, objectsOnly=True) From d3eca627de790a3e73c58cf96ff03f14bf2f7d96 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 16:52:39 +0200 Subject: [PATCH 1037/2550] added some more options for shared data --- .../pipeline/workfile/new_template_loader.py | 122 ++++++++++++++++-- 1 file changed, 110 insertions(+), 12 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 8b64306c18..baca11d730 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -60,6 +60,7 @@ class AbstractTemplateLoader: # Shared data across placeholder plugins self._shared_data = {} + self._shared_populate_data = {} # Where created objects of placeholder plugins will be stored self._placeholder_plugins = None @@ -121,14 +122,7 @@ class AbstractTemplateLoader: self._loaders_by_name = None self._creators_by_name = None self.clear_shared_data() - - def clear_shared_data(self): - """Clear shared data. - - Method only clear shared data to default state. - """ - - self._shared_data = {} + self.clear_shared_populate_data() def get_loaders_by_name(self): if self._loaders_by_name is None: @@ -147,8 +141,6 @@ class AbstractTemplateLoader: items if the storing is unified but each placeholder plugin would have to call it again. - Shared data are cleaned up on specific callbacks. - Args: key (str): Key under which are shared data stored. @@ -169,8 +161,6 @@ class AbstractTemplateLoader: - wrong: 'asset' - good: 'asset_name' - Shared data are cleaned up on specific callbacks. - Args: key (str): Key under which is key stored. value (Any): Value that should be stored under the key. @@ -178,6 +168,72 @@ class AbstractTemplateLoader: self._shared_data[key] = value + def clear_shared_data(self): + """Clear shared data. + + Method only clear shared data to default state. + """ + + self._shared_data = {} + + def clear_shared_populate_data(self): + """Receive shared data across plugins and placeholders. + + These data are cleared after each loop of populating of template. + + This can be used to scroll scene only once to look for placeholder + items if the storing is unified but each placeholder plugin would have + to call it again. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + self._shared_populate_data = {} + + def get_shared_populate_data(self, key): + """Store share populate data across plugins and placeholders. + + These data are cleared after each loop of populating of template. + + Store data that can be afterwards accessed from any future call. It + is good practice to check if the same value is not already stored under + different key or if the key is not already used for something else. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + return self._shared_populate_data.get(key) + + def set_shared_populate_data(self, key, value): + """Store share populate data across plugins and placeholders. + + These data are cleared after each loop of populating of template. + + Store data that can be afterwards accessed from any future call. It + is good practice to check if the same value is not already stored under + different key or if the key is not already used for something else. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + self._shared_populate_data[key] = value + @property def placeholder_plugins(self): """Access to initialized placeholder plugins. @@ -636,6 +692,48 @@ class PlaceholderPlugin(object): plugin_data[key] = value self.builder.set_shared_data(self.identifier, plugin_data) + def get_plugin_shared_populate_data(self, key): + """Receive shared data across plugin and placeholders. + + Using shared populate data from builder but stored under plugin + identifier. + + Shared populate data are cleaned up during populate while loop. + + Args: + key (str): Key under which are shared data stored. + + Returns: + Union[None, Any]: None if key was not set. + """ + + plugin_data = self.builder.get_shared_populate_data(self.identifier) + if plugin_data is None: + return None + return plugin_data.get(key) + + def set_plugin_shared_populate_data(self, key, value): + """Store share data across plugin and placeholders. + + Using shared data from builder but stored under plugin identifier. + + Key should be self explanatory to content. + - wrong: 'asset' + - good: 'asset_name' + + Shared populate data are cleaned up during populate while loop. + + Args: + key (str): Key under which is key stored. + value (Any): Value that should be stored under the key. + """ + + plugin_data = self.builder.get_shared_populate_data(self.identifier) + if plugin_data is None: + plugin_data = {} + plugin_data[key] = value + self.builder.set_shared_populate_data(self.identifier, plugin_data) + class PlaceholderItem(object): """Item representing single item in scene that is a placeholder to process. From e45fd5f99507d4cf6c8e4b7220651741b3bf564e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 16:53:06 +0200 Subject: [PATCH 1038/2550] changed 'process_placeholder' to 'populate_placeholder' --- openpype/pipeline/workfile/new_template_loader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index baca11d730..953b7771b2 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -404,7 +404,7 @@ class AbstractTemplateLoader: placeholder.set_in_progress() placeholder_plugin = placeholder.plugin try: - placeholder_plugin.process_placeholder(placeholder) + placeholder_plugin.populate_placeholder(placeholder) except Exception as exc: placeholder.set_error(exc) @@ -625,7 +625,7 @@ class PlaceholderPlugin(object): pass @abstractmethod - def process_placeholder(self, placeholder): + def populate_placeholder(self, placeholder): """Process single placeholder item. Processing of placeholders is defined by their order thus can't be From 77d8d1b1ce05872a613e3e54ff1d2e182d4e5eb4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 16:53:21 +0200 Subject: [PATCH 1039/2550] implemented basics of update template placeholder --- .../pipeline/workfile/new_template_loader.py | 57 ++++++++++++------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 953b7771b2..b97e48dcba 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -303,6 +303,37 @@ class AbstractTemplateLoader: self.import_template(template_path) self.populate_scene_placeholders(level_limit) + def update_template(self): + """Go through existing placeholders in scene and update them. + + This could not make sense for all plugin types so this is optional + logic for plugins. + + Note: + Logic is not importing the template again but using placeholders + that were already available. We should maybe change the method + name. + + Question: + Should this also handle subloops as it is possible that another + template is loaded during processing? + """ + + if not self.placeholder_plugins: + self.log.info("There are no placeholder plugins available.") + return + + placeholders = self.get_placeholders() + if not placeholders: + self.log.info("No placeholders were found.") + return + + for placeholder in placeholders: + plugin = placeholder.plugin + plugin.update_template_placeholder(placeholder) + + self.clear_shared_populate_data() + @abstractmethod def import_template(self, template_path): """ @@ -318,12 +349,6 @@ class AbstractTemplateLoader: pass - # def template_already_imported(self, err_msg): - # pass - # - # def template_loading_failed(self, err_msg): - # pass - def _prepare_placeholders(self, placeholders): """Run preparation part for placeholders on plugins. @@ -413,7 +438,7 @@ class AbstractTemplateLoader: placeholder.set_finished() # Clear shared data before getting new placeholders - self.clear_shared_data() + self.clear_shared_populate_data() iter_counter += 1 if iter_counter >= level_limit: @@ -430,6 +455,8 @@ class AbstractTemplateLoader: placeholder_by_scene_id[identifier] = placeholder placeholders.append(placeholder) + self.refresh() + def _get_build_profiles(self): project_settings = get_project_settings(self.project_name) return ( @@ -582,6 +609,7 @@ class PlaceholderPlugin(object): placeholder_data (Dict[str, Any]): Data related to placeholder. Should match plugin options. """ + pass @abstractmethod @@ -638,15 +666,10 @@ class PlaceholderPlugin(object): pass - def cleanup_placeholders(self, placeholders): - """Cleanup of placeholders after processing. + def update_template_placeholder(self, placeholder): + """Update scene with current context for passed placeholder. - Not: - Passed placeholders can be failed. - - Args: - placeholders (List[PlaceholderItem]): List of placeholders that - were be processed. + Can be used to re-run placeholder logic (if it make sense). """ pass @@ -656,8 +679,6 @@ class PlaceholderPlugin(object): Using shared data from builder but stored under plugin identifier. - Shared data are cleaned up on specific callbacks. - Args: key (str): Key under which are shared data stored. @@ -679,8 +700,6 @@ class PlaceholderPlugin(object): - wrong: 'asset' - good: 'asset_name' - Shared data are cleaned up on specific callbacks. - Args: key (str): Key under which is key stored. value (Any): Value that should be stored under the key. From 9821a05cdcf201b14319ba4a054adc735ea8d69a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 16:54:58 +0200 Subject: [PATCH 1040/2550] implemented update logic for maya load plugin --- .../hosts/maya/api/new_template_builder.py | 39 +++++++++++++++++-- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index bc1a0250a8..9017e447c5 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -173,6 +173,25 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): return placeholder_name.capitalize() + def _get_loaded_repre_ids(self): + loaded_representation_ids = self.builder.get_shared_populate_data( + "loaded_representation_ids" + ) + if loaded_representation_ids is None: + try: + containers = cmds.sets("AVALON_CONTAINERS", q=True) + except ValueError: + containers = [] + + loaded_representation_ids = { + cmds.getAttr(container + ".representation") + for container in containers + } + self.builder.set_shared_populate_data( + "loaded_representation_ids" + ) + return loaded_representation_ids + def create_placeholder(self, placeholder_data): selection = cmds.ls(selection=True) if not selection: @@ -242,7 +261,17 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): return output - def process_placeholder(self, placeholder): + def populate_placeholder(self, placeholder): + self._populate_placeholder(placeholder) + + def update_template_placeholder(self, placeholder): + repre_ids = self._get_loaded_repre_ids() + self._populate_placeholder(placeholder, repre_ids) + + def _populate_placeholder(self, placeholder, ignore_repre_ids=None): + if ignore_repre_ids is None: + ignore_repre_ids = set() + current_asset_doc = self.current_asset_doc linked_assets = self.linked_assets loader_name = placeholder.data["loader"] @@ -262,6 +291,10 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): loaders_by_name = self.builder.get_loaders_by_name() for representation in placeholder_representations: + repre_id = str(representation["_id"]) + if repre_id in ignore_repre_ids: + continue + repre_context = representation["context"] self.log.info( "Loading {} from {} with loader {}\n" @@ -280,9 +313,7 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): else: placeholder.load_succeed(container) - # TODO find out if 'postload make sense?' - # finally: - # self.postload(placeholder) + placeholder.clean() def get_placeholder_options(self, options=None): loaders_by_name = self.builder.get_loaders_by_name() From f133f401c059a4b03033e995ea67460de6fac732 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 16:55:09 +0200 Subject: [PATCH 1041/2550] small tweaks in code --- openpype/hosts/maya/api/new_template_builder.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index 9017e447c5..b28cc80cd1 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -61,11 +61,6 @@ class MayaTemplateLoader(AbstractTemplateLoader): return True - def get_placeholder_plugin_classes(self): - return [ - MayaLoadPlaceholderPlugin - ] - # def template_already_imported(self, err_msg): # clearButton = "Clear scene and build" # updateButton = "Update template" @@ -113,7 +108,9 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): def _collect_scene_placeholders(self): # Cache placeholder data to shared data - placeholder_nodes = self.builder.get_shared_data("placeholder_nodes") + placeholder_nodes = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) if placeholder_nodes is None: attributes = cmds.ls("*.plugin_identifier", long=True) placeholder_nodes = {} @@ -123,7 +120,7 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): self._parse_placeholder_node_data(node_name) ) - self.builder.set_shared_data( + self.builder.set_shared_populate_data( "placeholder_nodes", placeholder_nodes ) return placeholder_nodes From 8d60739fe43b96ea227e2af6b3d7889504448ae6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 16:55:24 +0200 Subject: [PATCH 1042/2550] implemented functions for building actions --- .../hosts/maya/api/new_template_builder.py | 47 ++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index b28cc80cd1..43e92c59e2 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -5,7 +5,7 @@ from maya import cmds from openpype.client import get_representations from openpype.lib import attribute_definitions -from openpype.pipeline import legacy_io +from openpype.pipeline import legacy_io, registered_host from openpype.pipeline.workfile.build_template_exceptions import ( TemplateAlreadyImported ) @@ -14,6 +14,9 @@ from openpype.pipeline.workfile.new_template_loader import ( PlaceholderPlugin, PlaceholderItem, ) +from openpype.tools.workfile_template_build import ( + WorkfileBuildPlaceholderDialog, +) from .lib import read, imprint @@ -566,3 +569,45 @@ class LoadPlaceholder(PlaceholderItem): def load_succeed(self, container): self.parent_in_hierarchy(container) + + +def build_workfile_template(): + builder = MayaTemplateLoader(registered_host()) + builder.build_template() + + +def update_workfile_template(): + builder = MayaTemplateLoader(registered_host()) + builder.update_build_template() + + +def create_placeholder(): + host = registered_host() + builder = MayaTemplateLoader(host) + window = WorkfileBuildPlaceholderDialog(host, builder) + window.exec_() + + +def update_placeholder(): + host = registered_host() + builder = MayaTemplateLoader(host) + placeholder_items_by_id = { + placeholder_item.scene_identifier: placeholder_item + for placeholder_item in builder.get_placeholders() + } + placeholder_items = [] + for node_name in cmds.ls(selection=True, long=True): + if node_name in placeholder_items_by_id: + placeholder_items.append(placeholder_items_by_id[node_name]) + + # TODO show UI at least + if len(placeholder_items) == 0: + raise ValueError("No node selected") + + if len(placeholder_items) > 1: + raise ValueError("Too many selected nodes") + + placeholder_item = placeholder_items[0] + window = WorkfileBuildPlaceholderDialog(host, builder) + window.set_update_mode(placeholder_item) + window.exec_() From 0102614531c70902c37b5bde494daf65718c805b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 16:55:34 +0200 Subject: [PATCH 1043/2550] use functions in menu --- openpype/hosts/maya/api/menu.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 666f555660..0b3ea81403 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -9,16 +9,17 @@ import maya.cmds as cmds from openpype.settings import get_project_settings from openpype.pipeline import legacy_io from openpype.pipeline.workfile import BuildWorkfile -from openpype.pipeline.workfile.build_template import ( - build_workfile_template, - update_workfile_template -) from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS from .commands import reset_frame_range -from .lib_template_builder import create_placeholder, update_placeholder +from .new_template_builder import ( + create_placeholder, + update_placeholder, + build_workfile_template, + update_workfile_template, +) log = logging.getLogger(__name__) From 113210a781ed5affd6e92f72d075a9522a07c0f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 17:21:53 +0200 Subject: [PATCH 1044/2550] firx access to 'Pattern' attribute --- openpype/client/entities.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index a9112ac581..43afccf2f1 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -14,6 +14,8 @@ from bson.objectid import ObjectId from .mongo import get_project_database, get_project_connection +PatternType = type(re.compile("")) + def _prepare_fields(fields, required_fields=None): if not fields: @@ -1054,11 +1056,11 @@ def _regex_filters(filters): for key, value in filters.items(): regexes = [] a_values = [] - if isinstance(value, re.Pattern): + if isinstance(value, PatternType): regexes.append(value) elif isinstance(value, (list, tuple, set)): for item in value: - if isinstance(item, re.Pattern): + if isinstance(item, PatternType): regexes.append(item) else: a_values.append(item) @@ -1194,7 +1196,7 @@ def get_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - context_filters (Dict[str, List[str, re.Pattern]]): Filter by + context_filters (Dict[str, List[str, PatternType]]): Filter by representation context fields. names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. @@ -1240,7 +1242,7 @@ def get_archived_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - context_filters (Dict[str, List[str, re.Pattern]]): Filter by + context_filters (Dict[str, List[str, PatternType]]): Filter by representation context fields. names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. From c5ae7e5d6cb704794e444addf93bb9e903795dc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 9 Sep 2022 17:45:44 +0200 Subject: [PATCH 1045/2550] Update openpype/hosts/flame/plugins/publish/extract_subset_resources.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 1af6b00654..1d42330e23 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -96,7 +96,7 @@ class ExtractSubsetResources(openpype.api.Extractor): source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] - # retime if needed + # retime if needed if r_speed != 1.0: if retimed_handles: # handles are retimed From b25270ccddbb7b47cbc6bca978a8c539096e7798 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 17:58:07 +0200 Subject: [PATCH 1046/2550] use direct function calls --- openpype/hosts/maya/api/menu.py | 4 ++-- openpype/hosts/maya/api/new_template_builder.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 0b3ea81403..cc9a17fd72 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -162,12 +162,12 @@ def install(): cmds.menuItem( "Create Placeholder", parent=builder_menu, - command=lambda *args: create_placeholder() + command=create_placeholder ) cmds.menuItem( "Update Placeholder", parent=builder_menu, - command=lambda *args: update_placeholder() + command=update_placeholder ) cmds.menuItem( "Build Workfile from template", diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index 43e92c59e2..72d613afa3 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -571,24 +571,24 @@ class LoadPlaceholder(PlaceholderItem): self.parent_in_hierarchy(container) -def build_workfile_template(): +def build_workfile_template(*args): builder = MayaTemplateLoader(registered_host()) builder.build_template() -def update_workfile_template(): +def update_workfile_template(*args): builder = MayaTemplateLoader(registered_host()) builder.update_build_template() -def create_placeholder(): +def create_placeholder(*args): host = registered_host() builder = MayaTemplateLoader(host) window = WorkfileBuildPlaceholderDialog(host, builder) window.exec_() -def update_placeholder(): +def update_placeholder(*args): host = registered_host() builder = MayaTemplateLoader(host) placeholder_items_by_id = { From b2a59336eca51bbc163e66a830238e57c3671d3b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 17:58:20 +0200 Subject: [PATCH 1047/2550] use attributes from builder --- openpype/hosts/maya/api/new_template_builder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index 72d613afa3..80f62b8759 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -272,8 +272,8 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): if ignore_repre_ids is None: ignore_repre_ids = set() - current_asset_doc = self.current_asset_doc - linked_assets = self.linked_assets + current_asset_doc = self.builder.current_asset_doc + linked_assets = self.builder.linked_asset_docs loader_name = placeholder.data["loader"] loader_args = placeholder.data["loader_args"] From bd5d7ca4a6d53bdc23d573a9b190c70a077b0437 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 17:58:51 +0200 Subject: [PATCH 1048/2550] added more specific attributes --- .../pipeline/workfile/new_template_loader.py | 82 ++++++++++++++++--- 1 file changed, 70 insertions(+), 12 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index b97e48dcba..b23f03b0df 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -5,7 +5,10 @@ from abc import ABCMeta, abstractmethod import six -from openpype.client import get_asset_by_name +from openpype.client import ( + get_asset_by_name, + get_linked_assets, +) from openpype.settings import get_project_settings from openpype.host import HostBase from openpype.lib import ( @@ -67,11 +70,50 @@ class AbstractTemplateLoader: self._loaders_by_name = None self._creators_by_name = None - self.current_asset = asset_name - self.project_name = project_name - self.task_name = task_name - self.current_asset_doc = current_asset_doc - self.task_type = task_type + self._current_asset_doc = None + self._linked_asset_docs = None + self._task_type = None + + @property + def project_name(self): + return legacy_io.active_project() + + @property + def current_asset_name(self): + return legacy_io.Session["AVALON_ASSET"] + + @property + def current_task_name(self): + return legacy_io.Session["AVALON_TASK"] + + @property + def current_asset_doc(self): + if self._current_asset_doc is None: + self._current_asset_doc = get_asset_by_name( + self.project_name, self.current_asset_name + ) + return self._current_asset_doc + + @property + def linked_asset_docs(self): + if self._linked_asset_docs is None: + self._linked_asset_docs = get_linked_assets( + self.current_asset_doc + ) + return self._linked_asset_docs + + @property + def current_task_type(self): + asset_doc = self.current_asset_doc + if not asset_doc: + return None + return ( + asset_doc + .get("data", {}) + .get("tasks", {}) + .get(self.current_task_name, {}) + .get("type") + ) def get_placeholder_plugin_classes(self): """Get placeholder plugin classes that can be used to build template. @@ -121,6 +163,11 @@ class AbstractTemplateLoader: self._placeholder_plugins = None self._loaders_by_name = None self._creators_by_name = None + + self._current_asset_doc = None + self._linked_asset_docs = None + self._task_type = None + self.clear_shared_data() self.clear_shared_populate_data() @@ -432,10 +479,18 @@ class AbstractTemplateLoader: placeholder_plugin.populate_placeholder(placeholder) except Exception as exc: - placeholder.set_error(exc) + self.log.warning( + ( + "Failed to process placeholder {} with plugin {}" + ).format( + placeholder.scene_identifier, + placeholder_plugin.__class__.__name__ + ), + exc_info=True + ) + placeholder.set_failed(exc) - else: - placeholder.set_finished() + placeholder.set_finished() # Clear shared data before getting new placeholders self.clear_shared_populate_data() @@ -467,10 +522,10 @@ class AbstractTemplateLoader: ) def get_template_path(self): - project_name = self.project_name host_name = self.host_name - task_name = self.task_name - task_type = self.task_type + project_name = self.project_name + task_name = self.current_task_name + task_type = self.current_task_type build_profiles = self._get_build_profiles() profile = filter_profiles( @@ -872,6 +927,9 @@ class PlaceholderItem(object): self._state = 2 + def set_failed(self, exception): + self.add_error(str(exception)) + def add_error(self, error): """Set placeholder item as failed and mark it as finished.""" From 419812241c91f8d9b1dad18a36937cad0fe152fe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 18:00:57 +0200 Subject: [PATCH 1049/2550] removed unused code --- .../hosts/maya/api/new_template_builder.py | 53 ------------------- 1 file changed, 53 deletions(-) diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/new_template_builder.py index 80f62b8759..05f6a8551a 100644 --- a/openpype/hosts/maya/api/new_template_builder.py +++ b/openpype/hosts/maya/api/new_template_builder.py @@ -49,61 +49,8 @@ class MayaTemplateLoader(AbstractTemplateLoader): cmds.setAttr(PLACEHOLDER_SET + ".hiddenInOutliner", True) - # This should be handled by creators - # for set_name in cmds.listSets(allSets=True): - # if ( - # cmds.objExists(set_name) - # and cmds.attributeQuery('id', node=set_name, exists=True) - # and cmds.getAttr(set_name + '.id') == 'pyblish.avalon.instance' - # ): - # if cmds.attributeQuery('asset', node=set_name, exists=True): - # cmds.setAttr( - # set_name + '.asset', - # legacy_io.Session['AVALON_ASSET'], type='string' - # ) - return True - # def template_already_imported(self, err_msg): - # clearButton = "Clear scene and build" - # updateButton = "Update template" - # abortButton = "Abort" - # - # title = "Scene already builded" - # message = ( - # "It's seems a template was already build for this scene.\n" - # "Error message reveived :\n\n\"{}\"".format(err_msg)) - # buttons = [clearButton, updateButton, abortButton] - # defaultButton = clearButton - # cancelButton = abortButton - # dismissString = abortButton - # answer = cmds.confirmDialog( - # t=title, - # m=message, - # b=buttons, - # db=defaultButton, - # cb=cancelButton, - # ds=dismissString) - # - # if answer == clearButton: - # cmds.file(newFile=True, force=True) - # self.import_template(self.template_path) - # self.populate_template() - # elif answer == updateButton: - # self.update_missing_containers() - # elif answer == abortButton: - # return - - # def get_loaded_containers_by_id(self): - # try: - # containers = cmds.sets("AVALON_CONTAINERS", q=True) - # except ValueError: - # return None - # - # return [ - # cmds.getAttr(container + '.representation') - # for container in containers] - class MayaLoadPlaceholderPlugin(PlaceholderPlugin): identifier = "maya.load" From 486d6636a994da49c35113337b4fa62c3e1d2071 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 18:05:17 +0200 Subject: [PATCH 1050/2550] renamed file 'new_template_builder' to 'workfile_template_builder' --- openpype/hosts/maya/api/menu.py | 2 +- openpype/hosts/maya/api/pipeline.py | 2 +- .../{new_template_builder.py => workfile_template_builder.py} | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename openpype/hosts/maya/api/{new_template_builder.py => workfile_template_builder.py} (100%) diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index cc9a17fd72..e20f29049b 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -14,7 +14,7 @@ from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS from .commands import reset_frame_range -from .new_template_builder import ( +from .workfile_template_builder import ( create_placeholder, update_placeholder, build_workfile_template, diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 2492e75b36..c47e34aebc 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -35,7 +35,7 @@ from openpype.hosts.maya import MAYA_ROOT_DIR from openpype.hosts.maya.lib import copy_workspace_mel from . import menu, lib -from .new_template_builder import MayaLoadPlaceholderPlugin +from .workfile_template_builder import MayaLoadPlaceholderPlugin from .workio import ( open_file, save_file, diff --git a/openpype/hosts/maya/api/new_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py similarity index 100% rename from openpype/hosts/maya/api/new_template_builder.py rename to openpype/hosts/maya/api/workfile_template_builder.py From a3b6d9645e7a0b93d359b4304e8a96381cd373ca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 18:05:27 +0200 Subject: [PATCH 1051/2550] removed legacy workfile template builder --- .../hosts/maya/api/lib_template_builder.py | 253 ------------------ openpype/hosts/maya/api/template_loader.py | 252 ----------------- 2 files changed, 505 deletions(-) delete mode 100644 openpype/hosts/maya/api/lib_template_builder.py delete mode 100644 openpype/hosts/maya/api/template_loader.py diff --git a/openpype/hosts/maya/api/lib_template_builder.py b/openpype/hosts/maya/api/lib_template_builder.py deleted file mode 100644 index 34a8450a26..0000000000 --- a/openpype/hosts/maya/api/lib_template_builder.py +++ /dev/null @@ -1,253 +0,0 @@ -import json -from collections import OrderedDict -import maya.cmds as cmds - -import qargparse -from openpype.tools.utils.widgets import OptionDialog -from .lib import get_main_window, imprint - -# To change as enum -build_types = ["context_asset", "linked_asset", "all_assets"] - - -def get_placeholder_attributes(node): - return { - attr: cmds.getAttr("{}.{}".format(node, attr)) - for attr in cmds.listAttr(node, userDefined=True)} - - -def delete_placeholder_attributes(node): - ''' - function to delete all extra placeholder attributes - ''' - extra_attributes = get_placeholder_attributes(node) - for attribute in extra_attributes: - cmds.deleteAttr(node + '.' + attribute) - - -def create_placeholder(): - args = placeholder_window() - - if not args: - return # operation canceled, no locator created - - # custom arg parse to force empty data query - # and still imprint them on placeholder - # and getting items when arg is of type Enumerator - options = create_options(args) - - # create placeholder name dynamically from args and options - placeholder_name = create_placeholder_name(args, options) - - selection = cmds.ls(selection=True) - if not selection: - raise ValueError("Nothing is selected") - - placeholder = cmds.spaceLocator(name=placeholder_name)[0] - - # get the long name of the placeholder (with the groups) - placeholder_full_name = cmds.ls(selection[0], long=True)[ - 0] + '|' + placeholder.replace('|', '') - - if selection: - cmds.parent(placeholder, selection[0]) - - imprint(placeholder_full_name, options) - - # Some tweaks because imprint force enums to to default value so we get - # back arg read and force them to attributes - imprint_enum(placeholder_full_name, args) - - # Add helper attributes to keep placeholder info - cmds.addAttr( - placeholder_full_name, - longName="parent", - hidden=True, - dataType="string" - ) - cmds.addAttr( - placeholder_full_name, - longName="index", - hidden=True, - attributeType="short", - defaultValue=-1 - ) - - cmds.setAttr(placeholder_full_name + '.parent', "", type="string") - - -def create_placeholder_name(args, options): - placeholder_builder_type = [ - arg.read() for arg in args if 'builder_type' in str(arg) - ][0] - placeholder_family = options['family'] - placeholder_name = placeholder_builder_type.split('_') - - # add famlily in any - if placeholder_family: - placeholder_name.insert(1, placeholder_family) - - # add loader arguments if any - if options['loader_args']: - pos = 2 - loader_args = options['loader_args'].replace('\'', '\"') - loader_args = json.loads(loader_args) - values = [v for v in loader_args.values()] - for i in range(len(values)): - placeholder_name.insert(i + pos, values[i]) - - placeholder_name = '_'.join(placeholder_name) - - return placeholder_name.capitalize() - - -def update_placeholder(): - placeholder = cmds.ls(selection=True) - if len(placeholder) == 0: - raise ValueError("No node selected") - if len(placeholder) > 1: - raise ValueError("Too many selected nodes") - placeholder = placeholder[0] - - args = placeholder_window(get_placeholder_attributes(placeholder)) - - if not args: - return # operation canceled - - # delete placeholder attributes - delete_placeholder_attributes(placeholder) - - options = create_options(args) - - imprint(placeholder, options) - imprint_enum(placeholder, args) - - cmds.addAttr( - placeholder, - longName="parent", - hidden=True, - dataType="string" - ) - cmds.addAttr( - placeholder, - longName="index", - hidden=True, - attributeType="short", - defaultValue=-1 - ) - - cmds.setAttr(placeholder + '.parent', '', type="string") - - -def create_options(args): - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() - return options - - -def imprint_enum(placeholder, args): - """ - Imprint method doesn't act properly with enums. - Replacing the functionnality with this for now - """ - enum_values = {str(arg): arg.read() - for arg in args if arg._data.get("items")} - string_to_value_enum_table = { - build: i for i, build - in enumerate(build_types)} - for key, value in enum_values.items(): - cmds.setAttr( - placeholder + "." + key, - string_to_value_enum_table[value]) - - -def placeholder_window(options=None): - options = options or dict() - dialog = OptionDialog(parent=get_main_window()) - dialog.setWindowTitle("Create Placeholder") - - args = [ - qargparse.Separator("Main attributes"), - qargparse.Enum( - "builder_type", - label="Asset Builder Type", - default=options.get("builder_type", 0), - items=build_types, - help="""Asset Builder Type -Builder type describe what template loader will look for. -context_asset : Template loader will look for subsets of -current context asset (Asset bob will find asset) -linked_asset : Template loader will look for assets linked -to current context asset. -Linked asset are looked in avalon database under field "inputLinks" -""" - ), - qargparse.String( - "family", - default=options.get("family", ""), - label="OpenPype Family", - placeholder="ex: model, look ..."), - qargparse.String( - "representation", - default=options.get("representation", ""), - label="OpenPype Representation", - placeholder="ex: ma, abc ..."), - qargparse.String( - "loader", - default=options.get("loader", ""), - label="Loader", - placeholder="ex: ReferenceLoader, LightLoader ...", - help="""Loader -Defines what openpype loader will be used to load assets. -Useable loader depends on current host's loader list. -Field is case sensitive. -"""), - qargparse.String( - "loader_args", - default=options.get("loader_args", ""), - label="Loader Arguments", - placeholder='ex: {"camera":"persp", "lights":True}', - help="""Loader -Defines a dictionnary of arguments used to load assets. -Useable arguments depend on current placeholder Loader. -Field should be a valid python dict. Anything else will be ignored. -"""), - qargparse.Integer( - "order", - default=options.get("order", 0), - min=0, - max=999, - label="Order", - placeholder="ex: 0, 100 ... (smallest order loaded first)", - help="""Order -Order defines asset loading priority (0 to 999) -Priority rule is : "lowest is first to load"."""), - qargparse.Separator( - "Optional attributes"), - qargparse.String( - "asset", - default=options.get("asset", ""), - label="Asset filter", - placeholder="regex filtering by asset name", - help="Filtering assets by matching field regex to asset's name"), - qargparse.String( - "subset", - default=options.get("subset", ""), - label="Subset filter", - placeholder="regex filtering by subset name", - help="Filtering assets by matching field regex to subset's name"), - qargparse.String( - "hierarchy", - default=options.get("hierarchy", ""), - label="Hierarchy filter", - placeholder="regex filtering by asset's hierarchy", - help="Filtering assets by matching field asset's hierarchy") - ] - dialog.create(args) - - if not dialog.exec_(): - return None - - return args diff --git a/openpype/hosts/maya/api/template_loader.py b/openpype/hosts/maya/api/template_loader.py deleted file mode 100644 index ecffafc93d..0000000000 --- a/openpype/hosts/maya/api/template_loader.py +++ /dev/null @@ -1,252 +0,0 @@ -import re -from maya import cmds - -from openpype.client import get_representations -from openpype.pipeline import legacy_io -from openpype.pipeline.workfile.abstract_template_loader import ( - AbstractPlaceholder, - AbstractTemplateLoader -) -from openpype.pipeline.workfile.build_template_exceptions import ( - TemplateAlreadyImported -) - -PLACEHOLDER_SET = 'PLACEHOLDERS_SET' - - -class MayaTemplateLoader(AbstractTemplateLoader): - """Concrete implementation of AbstractTemplateLoader for maya - """ - - def import_template(self, path): - """Import template into current scene. - Block if a template is already loaded. - Args: - path (str): A path to current template (usually given by - get_template_path implementation) - Returns: - bool: Wether the template was succesfully imported or not - """ - if cmds.objExists(PLACEHOLDER_SET): - raise TemplateAlreadyImported( - "Build template already loaded\n" - "Clean scene if needed (File > New Scene)") - - cmds.sets(name=PLACEHOLDER_SET, empty=True) - self.new_nodes = cmds.file(path, i=True, returnNewNodes=True) - cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True) - - for set in cmds.listSets(allSets=True): - if (cmds.objExists(set) and - cmds.attributeQuery('id', node=set, exists=True) and - cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'): - if cmds.attributeQuery('asset', node=set, exists=True): - cmds.setAttr( - set + '.asset', - legacy_io.Session['AVALON_ASSET'], type='string' - ) - - return True - - def template_already_imported(self, err_msg): - clearButton = "Clear scene and build" - updateButton = "Update template" - abortButton = "Abort" - - title = "Scene already builded" - message = ( - "It's seems a template was already build for this scene.\n" - "Error message reveived :\n\n\"{}\"".format(err_msg)) - buttons = [clearButton, updateButton, abortButton] - defaultButton = clearButton - cancelButton = abortButton - dismissString = abortButton - answer = cmds.confirmDialog( - t=title, - m=message, - b=buttons, - db=defaultButton, - cb=cancelButton, - ds=dismissString) - - if answer == clearButton: - cmds.file(newFile=True, force=True) - self.import_template(self.template_path) - self.populate_template() - elif answer == updateButton: - self.update_missing_containers() - elif answer == abortButton: - return - - @staticmethod - def get_template_nodes(): - attributes = cmds.ls('*.builder_type', long=True) - return [attribute.rpartition('.')[0] for attribute in attributes] - - def get_loaded_containers_by_id(self): - try: - containers = cmds.sets("AVALON_CONTAINERS", q=True) - except ValueError: - return None - - return [ - cmds.getAttr(container + '.representation') - for container in containers] - - -class MayaPlaceholder(AbstractPlaceholder): - """Concrete implementation of AbstractPlaceholder for maya - """ - - optional_keys = {'asset', 'subset', 'hierarchy'} - - def get_data(self, node): - user_data = dict() - for attr in self.required_keys.union(self.optional_keys): - attribute_name = '{}.{}'.format(node, attr) - if not cmds.attributeQuery(attr, node=node, exists=True): - print("{} not found".format(attribute_name)) - continue - user_data[attr] = cmds.getAttr( - attribute_name, - asString=True) - user_data['parent'] = ( - cmds.getAttr(node + '.parent', asString=True) - or node.rpartition('|')[0] - or "" - ) - user_data['node'] = node - if user_data['parent']: - siblings = cmds.listRelatives(user_data['parent'], children=True) - else: - siblings = cmds.ls(assemblies=True) - node_shortname = user_data['node'].rpartition('|')[2] - current_index = cmds.getAttr(node + '.index', asString=True) - user_data['index'] = ( - current_index if current_index >= 0 - else siblings.index(node_shortname)) - - self.data = user_data - - def parent_in_hierarchy(self, containers): - """Parent loaded container to placeholder's parent - ie : Set loaded content as placeholder's sibling - Args: - containers (String): Placeholder loaded containers - """ - if not containers: - return - - roots = cmds.sets(containers, q=True) - nodes_to_parent = [] - for root in roots: - if root.endswith("_RN"): - refRoot = cmds.referenceQuery(root, n=True)[0] - refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] - nodes_to_parent.extend(refRoot) - elif root in cmds.listSets(allSets=True): - if not cmds.sets(root, q=True): - return - else: - continue - else: - nodes_to_parent.append(root) - - if self.data['parent']: - cmds.parent(nodes_to_parent, self.data['parent']) - # Move loaded nodes to correct index in outliner hierarchy - placeholder_node = self.data['node'] - placeholder_form = cmds.xform( - placeholder_node, - q=True, - matrix=True, - worldSpace=True - ) - for node in set(nodes_to_parent): - cmds.reorder(node, front=True) - cmds.reorder(node, relative=self.data['index']) - cmds.xform(node, matrix=placeholder_form, ws=True) - - holding_sets = cmds.listSets(object=placeholder_node) - if not holding_sets: - return - for holding_set in holding_sets: - cmds.sets(roots, forceElement=holding_set) - - def clean(self): - """Hide placeholder, parent them to root - add them to placeholder set and register placeholder's parent - to keep placeholder info available for future use - """ - node = self.data['node'] - if self.data['parent']: - cmds.setAttr(node + '.parent', self.data['parent'], type='string') - if cmds.getAttr(node + '.index') < 0: - cmds.setAttr(node + '.index', self.data['index']) - - holding_sets = cmds.listSets(object=node) - if holding_sets: - for set in holding_sets: - cmds.sets(node, remove=set) - - if cmds.listRelatives(node, p=True): - node = cmds.parent(node, world=True)[0] - cmds.sets(node, addElement=PLACEHOLDER_SET) - cmds.hide(node) - cmds.setAttr(node + '.hiddenInOutliner', True) - - def get_representations(self, current_asset_doc, linked_asset_docs): - project_name = legacy_io.active_project() - - builder_type = self.data["builder_type"] - if builder_type == "context_asset": - context_filters = { - "asset": [current_asset_doc["name"]], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representations": [self.data["representation"]], - "family": [self.data["family"]] - } - - elif builder_type != "linked_asset": - context_filters = { - "asset": [re.compile(self.data["asset"])], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]] - } - - else: - asset_regex = re.compile(self.data["asset"]) - linked_asset_names = [] - for asset_doc in linked_asset_docs: - asset_name = asset_doc["name"] - if asset_regex.match(asset_name): - linked_asset_names.append(asset_name) - - context_filters = { - "asset": linked_asset_names, - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]], - } - - return list(get_representations( - project_name, - context_filters=context_filters - )) - - def err_message(self): - return ( - "Error while trying to load a representation.\n" - "Either the subset wasn't published or the template is malformed." - "\n\n" - "Builder was looking for :\n{attributes}".format( - attributes="\n".join([ - "{}: {}".format(key.title(), value) - for key, value in self.data.items()] - ) - ) - ) From 778f2c09ad13675981076c6eef601447da41d591 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 18:09:18 +0200 Subject: [PATCH 1052/2550] renamed 'update_template' to 'rebuild_template' --- openpype/hosts/maya/api/workfile_template_builder.py | 4 ++-- openpype/pipeline/workfile/new_template_loader.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 05f6a8551a..98da18bba1 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -135,7 +135,7 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): for container in containers } self.builder.set_shared_populate_data( - "loaded_representation_ids" + "loaded_representation_ids", loaded_representation_ids ) return loaded_representation_ids @@ -525,7 +525,7 @@ def build_workfile_template(*args): def update_workfile_template(*args): builder = MayaTemplateLoader(registered_host()) - builder.update_build_template() + builder.rebuild_template() def create_placeholder(*args): diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index b23f03b0df..47d84d4ff7 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -350,7 +350,7 @@ class AbstractTemplateLoader: self.import_template(template_path) self.populate_scene_placeholders(level_limit) - def update_template(self): + def rebuild_template(self): """Go through existing placeholders in scene and update them. This could not make sense for all plugin types so this is optional From 01c60e6fa777029ce50864d5cae843e24f797fb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:02 +0200 Subject: [PATCH 1053/2550] :recycle: rename selected node, instance node creation n method --- openpype/hosts/houdini/api/plugin.py | 32 ++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index f300496a43..8180676ce8 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -94,23 +94,41 @@ class Creator(LegacyCreator): @six.add_metaclass(ABCMeta) class HoudiniCreator(NewCreator): - _nodes = [] + selected_nodes = [] + + def _create_instance_node( + self, node_name, parent, + node_type="geometry"): + # type: (str, str, str) -> hou.Node + """Create node representing instance. + + Arguments: + node_name (str): Name of the new node. + parent (str): Name of the parent node. + node_type (str, optional): Type of the node. + + Returns: + hou.Node: Newly created instance node. + + """ + parent_node = hou.node(parent) + instance_node = parent_node.createNode( + node_type, node_name=node_name) + instance_node.moveToGoodPosition() + return instance_node def create(self, subset_name, instance_data, pre_create_data): try: if pre_create_data.get("use_selection"): - self._nodes = hou.selectedNodes() + self.selected_nodes = hou.selectedNodes() # Get the node type and remove it from the data, not needed node_type = instance_data.pop("node_type", None) if node_type is None: node_type = "geometry" - # Get out node - out = hou.node("/out") - instance_node = out.createNode( - node_type, node_name=subset_name) - instance_node.moveToGoodPosition() + instance_node = self._create_instance_node( + subset_name, "/out", node_type, pre_create_data) # wondering if we'll ever need more than one member here # in Houdini From fc5c07f1ca08021048acc99c24bad1e7656aa378 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:25 +0200 Subject: [PATCH 1054/2550] :recycle: selected nodes argument rename --- .../hosts/houdini/plugins/create/create_alembic_camera.py | 4 ++-- openpype/hosts/houdini/plugins/create/create_pointcache.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 294c99744b..483c4205a8 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -29,8 +29,8 @@ class CreateAlembicCamera(plugin.HoudiniCreator): "use_sop_path": False, } - if self._nodes: - path = self._nodes[0].path() + if self.selected_nodes: + path = self.selected_nodes.path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 889e27ba51..239f3ce50b 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -33,8 +33,8 @@ class CreatePointCache(plugin.HoudiniCreator): "filename": "$HIP/pyblish/{}.abc".format(subset_name) } - if self._nodes: - parms["sop_path"] = self._nodes[0].path() + if self.selected_nodes: + parms["sop_path"] = self.selected_nodes[0].path() instance_node.setParms(parms) instance_node.parm("trange").set(1) From 9b32b4926ce8eb3356c9aea899acf05b0fe77ece Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:47 +0200 Subject: [PATCH 1055/2550] :construction: hda creator refactor --- .../houdini/plugins/create/create_hda.py | 73 ++++++++++--------- 1 file changed, 38 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b1751d0b6c..67e338b1b3 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -34,6 +34,43 @@ class CreateHDA(plugin.HoudiniCreator): } return subset_name.lower() in existing_subset_names_low + def _create_instance_node( + self, node_name, parent, node_type="geometry"): + parent_node = hou.node("/obj") + if self.selected_nodes: + # if we have `use selection` enabled, and we have some + # selected nodes ... + subnet = parent_node.collapseIntoSubnet( + self._nodes, + subnet_name="{}_subnet".format(node_name)) + subnet.moveToGoodPosition() + to_hda = subnet + else: + to_hda = parent_node.createNode( + "subnet", node_name="{}_subnet".format(node_name)) + if not to_hda.type().definition(): + # if node type has not its definition, it is not user + # created hda. We test if hda can be created from the node. + if not to_hda.canCreateDigitalAsset(): + raise plugin.OpenPypeCreatorError( + "cannot create hda from node {}".format(to_hda)) + + hda_node = to_hda.createDigitalAsset( + name=node_name, + hda_file_name="$HIP/{}.hda".format(node_name) + ) + hda_node.layoutChildren() + elif self._check_existing(node_name): + raise plugin.OpenPypeCreatorError( + ("subset {} is already published with different HDA" + "definition.").format(node_name)) + else: + hda_node = to_hda + + hda_node.setName(node_name) + return hda_node + + def create(self, subset_name, instance_data, pre_create_data): import hou @@ -44,38 +81,4 @@ class CreateHDA(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = hou.node(instance.get("instance_node")) - out = hou.node("/obj") - if self._nodes: - # if we have `use selection` enabled, and we have some - # selected nodes ... - subnet = out.collapseIntoSubnet( - self.nodes, - subnet_name="{}_subnet".format(subset_name)) - subnet.moveToGoodPosition() - to_hda = subnet - else: - to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(subset_name)) - if not to_hda.type().definition(): - # if node type has not its definition, it is not user - # created hda. We test if hda can be created from the node. - if not to_hda.canCreateDigitalAsset(): - raise plugin.OpenPypeCreatorError( - "cannot create hda from node {}".format(to_hda)) - - hda_node = to_hda.createDigitalAsset( - name=subset_name, - hda_file_name="$HIP/{}.hda".format(subset_name) - ) - hda_node.layoutChildren() - elif self._check_existing(subset_name): - raise plugin.OpenPypeCreatorError( - ("subset {} is already published with different HDA" - "definition.").format(subset_name)) - else: - hda_node = to_hda - - hda_node.setName(subset_name) - - return hda_node + return instance From abbe7b7a3609dcd5c33bcbd8eff1e19853d0b495 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 19:15:49 +0200 Subject: [PATCH 1056/2550] implemented function 'get_contexts_for_repre_docs' to get representation contexts from already queried representations --- openpype/pipeline/load/utils.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 83b904e4a7..22e823bd3b 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -87,13 +87,20 @@ def get_repres_contexts(representation_ids, dbcon=None): if not dbcon: dbcon = legacy_io - contexts = {} if not representation_ids: - return contexts + return {} project_name = dbcon.active_project() repre_docs = get_representations(project_name, representation_ids) + return get_contexts_for_repre_docs(project_name, repre_docs) + + +def get_contexts_for_repre_docs(project_name, repre_docs): + contexts = {} + if not repre_docs: + return contexts + repre_docs_by_id = {} version_ids = set() for repre_doc in repre_docs: From 81b9b16a5c2eb3c0e1845262a1e2747f8bc9e6d5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 19:16:14 +0200 Subject: [PATCH 1057/2550] extracted loading specific logic into load mixin --- .../maya/api/workfile_template_builder.py | 228 +------------- .../pipeline/workfile/new_template_loader.py | 283 +++++++++++++++++- 2 files changed, 292 insertions(+), 219 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 98da18bba1..14f1f284fd 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -1,11 +1,8 @@ -import re import json from maya import cmds -from openpype.client import get_representations -from openpype.lib import attribute_definitions -from openpype.pipeline import legacy_io, registered_host +from openpype.pipeline import registered_host from openpype.pipeline.workfile.build_template_exceptions import ( TemplateAlreadyImported ) @@ -13,6 +10,7 @@ from openpype.pipeline.workfile.new_template_loader import ( AbstractTemplateLoader, PlaceholderPlugin, PlaceholderItem, + PlaceholderLoadMixin, ) from openpype.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, @@ -52,7 +50,7 @@ class MayaTemplateLoader(AbstractTemplateLoader): return True -class MayaLoadPlaceholderPlugin(PlaceholderPlugin): +class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): identifier = "maya.load" label = "Maya load" @@ -203,190 +201,27 @@ class MayaLoadPlaceholderPlugin(PlaceholderPlugin): # TODO do data validations and maybe updgrades if are invalid output.append( - LoadPlaceholder(node_name, placeholder_data, self) + LoadPlaceholderItem(node_name, placeholder_data, self) ) return output def populate_placeholder(self, placeholder): - self._populate_placeholder(placeholder) + self.populate_load_placeholder(placeholder) def update_template_placeholder(self, placeholder): repre_ids = self._get_loaded_repre_ids() - self._populate_placeholder(placeholder, repre_ids) - - def _populate_placeholder(self, placeholder, ignore_repre_ids=None): - if ignore_repre_ids is None: - ignore_repre_ids = set() - - current_asset_doc = self.builder.current_asset_doc - linked_assets = self.builder.linked_asset_docs - loader_name = placeholder.data["loader"] - loader_args = placeholder.data["loader_args"] - - # TODO check loader existence - placeholder_representations = placeholder.get_representations( - current_asset_doc, - linked_assets - ) - - if not placeholder_representations: - self.log.info(( - "There's no representation for this placeholder: {}" - ).format(placeholder.scene_identifier)) - return - - loaders_by_name = self.builder.get_loaders_by_name() - for representation in placeholder_representations: - repre_id = str(representation["_id"]) - if repre_id in ignore_repre_ids: - continue - - repre_context = representation["context"] - self.log.info( - "Loading {} from {} with loader {}\n" - "Loader arguments used : {}".format( - repre_context["subset"], - repre_context["asset"], - loader_name, - loader_args - ) - ) - try: - container = self.load( - placeholder, loaders_by_name, representation) - except Exception: - placeholder.load_failed(representation) - - else: - placeholder.load_succeed(container) - placeholder.clean() + self.populate_load_placeholder(placeholder, repre_ids) def get_placeholder_options(self, options=None): - loaders_by_name = self.builder.get_loaders_by_name() - loader_items = [ - (loader_name, loader.label or loader_name) - for loader_name, loader in loaders_by_name.items() - ] - - loader_items = list(sorted(loader_items, key=lambda i: i[0])) - options = options or {} - return [ - attribute_definitions.UISeparatorDef(), - attribute_definitions.UILabelDef("Main attributes"), - attribute_definitions.UISeparatorDef(), - - attribute_definitions.EnumDef( - "builder_type", - label="Asset Builder Type", - default=options.get("builder_type"), - items=[ - ("context_asset", "Current asset"), - ("linked_asset", "Linked assets"), - ("all_assets", "All assets") - ], - tooltip=( - "Asset Builder Type\n" - "\nBuilder type describe what template loader will look" - " for." - "\ncontext_asset : Template loader will look for subsets" - " of current context asset (Asset bob will find asset)" - "\nlinked_asset : Template loader will look for assets" - " linked to current context asset." - "\nLinked asset are looked in database under" - " field \"inputLinks\"" - ) - ), - attribute_definitions.TextDef( - "family", - label="Family", - default=options.get("family"), - placeholder="model, look, ..." - ), - attribute_definitions.TextDef( - "representation", - label="Representation name", - default=options.get("representation"), - placeholder="ma, abc, ..." - ), - attribute_definitions.EnumDef( - "loader", - label="Loader", - default=options.get("loader"), - items=loader_items, - tooltip=( - "Loader" - "\nDefines what OpenPype loader will be used to" - " load assets." - "\nUseable loader depends on current host's loader list." - "\nField is case sensitive." - ) - ), - attribute_definitions.TextDef( - "loader_args", - label="Loader Arguments", - default=options.get("loader_args"), - placeholder='{"camera":"persp", "lights":True}', - tooltip=( - "Loader" - "\nDefines a dictionnary of arguments used to load assets." - "\nUseable arguments depend on current placeholder Loader." - "\nField should be a valid python dict." - " Anything else will be ignored." - ) - ), - attribute_definitions.NumberDef( - "order", - label="Order", - default=options.get("order") or 0, - decimals=0, - minimum=0, - maximum=999, - tooltip=( - "Order" - "\nOrder defines asset loading priority (0 to 999)" - "\nPriority rule is : \"lowest is first to load\"." - ) - ), - attribute_definitions.UISeparatorDef(), - attribute_definitions.UILabelDef("Optional attributes"), - attribute_definitions.UISeparatorDef(), - attribute_definitions.TextDef( - "asset", - label="Asset filter", - default=options.get("asset"), - placeholder="regex filtering by asset name", - tooltip=( - "Filtering assets by matching field regex to asset's name" - ) - ), - attribute_definitions.TextDef( - "subset", - label="Subset filter", - default=options.get("subset"), - placeholder="regex filtering by subset name", - tooltip=( - "Filtering assets by matching field regex to subset's name" - ) - ), - attribute_definitions.TextDef( - "hierarchy", - label="Hierarchy filter", - default=options.get("hierarchy"), - placeholder="regex filtering by asset's hierarchy", - tooltip=( - "Filtering assets by matching field asset's hierarchy" - ) - ) - ] + return self.get_load_plugin_options(self, options) -class LoadPlaceholder(PlaceholderItem): - """Concrete implementation of AbstractPlaceholder for maya - """ +class LoadPlaceholderItem(PlaceholderItem): + """Concrete implementation of PlaceholderItem for Maya load plugin.""" def __init__(self, *args, **kwargs): - super(LoadPlaceholder, self).__init__(*args, **kwargs) + super(LoadPlaceholderItem, self).__init__(*args, **kwargs) self._failed_representations = [] def parent_in_hierarchy(self, container): @@ -457,49 +292,6 @@ class LoadPlaceholder(PlaceholderItem): cmds.hide(node) cmds.setAttr(node + ".hiddenInOutliner", True) - def get_representations(self, current_asset_doc, linked_asset_docs): - project_name = legacy_io.active_project() - - builder_type = self.data["builder_type"] - if builder_type == "context_asset": - context_filters = { - "asset": [current_asset_doc["name"]], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representations": [self.data["representation"]], - "family": [self.data["family"]] - } - - elif builder_type != "linked_asset": - context_filters = { - "asset": [re.compile(self.data["asset"])], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]] - } - - else: - asset_regex = re.compile(self.data["asset"]) - linked_asset_names = [] - for asset_doc in linked_asset_docs: - asset_name = asset_doc["name"] - if asset_regex.match(asset_name): - linked_asset_names.append(asset_name) - - context_filters = { - "asset": linked_asset_names, - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]], - } - - return list(get_representations( - project_name, - context_filters=context_filters - )) - def get_errors(self): if not self._failed_representations: return [] diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 47d84d4ff7..921cc39ba9 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -1,4 +1,5 @@ import os +import re import collections import copy from abc import ABCMeta, abstractmethod @@ -8,6 +9,7 @@ import six from openpype.client import ( get_asset_by_name, get_linked_assets, + get_representations, ) from openpype.settings import get_project_settings from openpype.host import HostBase @@ -15,10 +17,15 @@ from openpype.lib import ( Logger, StringTemplate, filter_profiles, + attribute_definitions, ) from openpype.lib.attribute_definitions import get_attributes_keys from openpype.pipeline import legacy_io, Anatomy -from openpype.pipeline.load import get_loaders_by_name +from openpype.pipeline.load import ( + get_loaders_by_name, + get_contexts_for_repre_docs, + load_with_repre_context, +) from openpype.pipeline.create import get_legacy_creator_by_name from .build_template_exceptions import ( @@ -942,3 +949,277 @@ class PlaceholderItem(object): """ return self._errors + + +class PlaceholderLoadMixin(object): + """Mixin prepared for loading placeholder plugins. + + Implementation prepares options for placeholders with + 'get_load_plugin_options'. + + For placeholder population is implemented 'populate_load_placeholder'. + + Requires that PlaceholderItem has implemented methods: + - 'load_failed' - called when loading of one representation failed + - 'load_succeed' - called when loading of one representation succeeded + - 'clean' - called when placeholder processing finished + """ + + def get_load_plugin_options(self, options=None): + """Unified attribute definitions for load placeholder. + + Common function for placeholder plugins used for loading of + repsentations. + + Args: + plugin (PlaceholderPlugin): Plugin used for loading of + representations. + options (Dict[str, Any]): Already available options which are used + as defaults for attributes. + + Returns: + List[AbtractAttrDef]: Attribute definitions common for load + plugins. + """ + + loaders_by_name = self.builder.get_loaders_by_name() + loader_items = [ + (loader_name, loader.label or loader_name) + for loader_name, loader in loaders_by_name.items() + ] + + loader_items = list(sorted(loader_items, key=lambda i: i[1])) + options = options or {} + return [ + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Main attributes"), + attribute_definitions.UISeparatorDef(), + + attribute_definitions.EnumDef( + "builder_type", + label="Asset Builder Type", + default=options.get("builder_type"), + items=[ + ("context_asset", "Current asset"), + ("linked_asset", "Linked assets"), + ("all_assets", "All assets") + ], + tooltip=( + "Asset Builder Type\n" + "\nBuilder type describe what template loader will look" + " for." + "\ncontext_asset : Template loader will look for subsets" + " of current context asset (Asset bob will find asset)" + "\nlinked_asset : Template loader will look for assets" + " linked to current context asset." + "\nLinked asset are looked in database under" + " field \"inputLinks\"" + ) + ), + attribute_definitions.TextDef( + "family", + label="Family", + default=options.get("family"), + placeholder="model, look, ..." + ), + attribute_definitions.TextDef( + "representation", + label="Representation name", + default=options.get("representation"), + placeholder="ma, abc, ..." + ), + attribute_definitions.EnumDef( + "loader", + label="Loader", + default=options.get("loader"), + items=loader_items, + tooltip=( + "Loader" + "\nDefines what OpenPype loader will be used to" + " load assets." + "\nUseable loader depends on current host's loader list." + "\nField is case sensitive." + ) + ), + attribute_definitions.TextDef( + "loader_args", + label="Loader Arguments", + default=options.get("loader_args"), + placeholder='{"camera":"persp", "lights":True}', + tooltip=( + "Loader" + "\nDefines a dictionnary of arguments used to load assets." + "\nUseable arguments depend on current placeholder Loader." + "\nField should be a valid python dict." + " Anything else will be ignored." + ) + ), + attribute_definitions.NumberDef( + "order", + label="Order", + default=options.get("order") or 0, + decimals=0, + minimum=0, + maximum=999, + tooltip=( + "Order" + "\nOrder defines asset loading priority (0 to 999)" + "\nPriority rule is : \"lowest is first to load\"." + ) + ), + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Optional attributes"), + attribute_definitions.UISeparatorDef(), + attribute_definitions.TextDef( + "asset", + label="Asset filter", + default=options.get("asset"), + placeholder="regex filtering by asset name", + tooltip=( + "Filtering assets by matching field regex to asset's name" + ) + ), + attribute_definitions.TextDef( + "subset", + label="Subset filter", + default=options.get("subset"), + placeholder="regex filtering by subset name", + tooltip=( + "Filtering assets by matching field regex to subset's name" + ) + ), + attribute_definitions.TextDef( + "hierarchy", + label="Hierarchy filter", + default=options.get("hierarchy"), + placeholder="regex filtering by asset's hierarchy", + tooltip=( + "Filtering assets by matching field asset's hierarchy" + ) + ) + ] + + def parse_loader_args(self, loader_args): + """Helper function to parse string of loader arugments. + + Empty dictionary is returned if conversion fails. + + Args: + loader_args (str): Loader args filled by user. + + Returns: + Dict[str, Any]: Parsed arguments used as dictionary. + """ + + if not loader_args: + return {} + + try: + parsed_args = eval(loader_args) + if isinstance(parsed_args, dict): + return parsed_args + + except Exception as err: + print( + "Error while parsing loader arguments '{}'.\n{}: {}\n\n" + "Continuing with default arguments. . .".format( + loader_args, err.__class__.__name__, err)) + + return {} + + def get_representations(self, placeholder): + project_name = self.builder.project_name + current_asset_doc = self.builder.current_asset_doc + linked_asset_docs = self.builder.linked_asset_docs + + builder_type = placeholder.data["builder_type"] + if builder_type == "context_asset": + context_filters = { + "asset": [current_asset_doc["name"]], + "subset": [re.compile(placeholder.data["subset"])], + "hierarchy": [re.compile(placeholder.data["hierarchy"])], + "representations": [placeholder.data["representation"]], + "family": [placeholder.data["family"]] + } + + elif builder_type != "linked_asset": + context_filters = { + "asset": [re.compile(placeholder.data["asset"])], + "subset": [re.compile(placeholder.data["subset"])], + "hierarchy": [re.compile(placeholder.data["hierarchy"])], + "representation": [placeholder.data["representation"]], + "family": [placeholder.data["family"]] + } + + else: + asset_regex = re.compile(placeholder.data["asset"]) + linked_asset_names = [] + for asset_doc in linked_asset_docs: + asset_name = asset_doc["name"] + if asset_regex.match(asset_name): + linked_asset_names.append(asset_name) + + context_filters = { + "asset": linked_asset_names, + "subset": [re.compile(placeholder.data["subset"])], + "hierarchy": [re.compile(placeholder.data["hierarchy"])], + "representation": [placeholder.data["representation"]], + "family": [placeholder.data["family"]], + } + + return list(get_representations( + project_name, + context_filters=context_filters + )) + + def populate_load_placeholder(self, placeholder, ignore_repre_ids=None): + if ignore_repre_ids is None: + ignore_repre_ids = set() + + # TODO check loader existence + loader_name = placeholder.data["loader"] + loader_args = placeholder.data["loader_args"] + + placeholder_representations = self.get_representations(placeholder) + + filtered_representations = [] + for representation in placeholder_representations: + repre_id = str(representation["_id"]) + if repre_id not in ignore_repre_ids: + filtered_representations.append(representation) + + if not filtered_representations: + self.log.info(( + "There's no representation for this placeholder: {}" + ).format(placeholder.scene_identifier)) + return + + repre_load_contexts = get_contexts_for_repre_docs( + self.project_name, filtered_representations + ) + loaders_by_name = self.builder.get_loaders_by_name() + for repre_load_context in repre_load_contexts: + representation = repre_load_context["representation"] + repre_context = representation["context"] + self.log.info( + "Loading {} from {} with loader {}\n" + "Loader arguments used : {}".format( + repre_context["subset"], + repre_context["asset"], + loader_name, + loader_args + ) + ) + try: + container = load_with_repre_context( + loaders_by_name[loader_name], + repre_load_context, + options=self.parse_loader_args(loader_args) + ) + + except Exception: + placeholder.load_failed(representation) + + else: + placeholder.load_succeed(container) + placeholder.clean() From a2a900d18aedf1ad9e4fd87868487f24359df094 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 19:16:28 +0200 Subject: [PATCH 1058/2550] fix class name change --- openpype/hosts/maya/api/pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index c47e34aebc..70e6b02e4c 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -35,7 +35,7 @@ from openpype.hosts.maya import MAYA_ROOT_DIR from openpype.hosts.maya.lib import copy_workspace_mel from . import menu, lib -from .workfile_template_builder import MayaLoadPlaceholderPlugin +from .workfile_template_builder import MayaPlaceholderLoadPlugin from .workio import ( open_file, save_file, @@ -126,7 +126,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): def get_workfile_build_placeholder_plugins(self): return [ - MayaLoadPlaceholderPlugin + MayaPlaceholderLoadPlugin ] @contextlib.contextmanager From 87cb5e25b82d3ea352b17794e231924bdbb097ed Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 10 Sep 2022 04:17:04 +0000 Subject: [PATCH 1059/2550] [Automated] Bump version --- CHANGELOG.md | 45 ++++++++++++++++++--------------------------- openpype/version.py | 2 +- 2 files changed, 19 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6754f1e2e3..0ffb6a996b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,29 +1,36 @@ # Changelog -## [3.14.2-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) **🆕 New features** - Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) +- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) +- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) **🚀 Enhancements** +- Flame: Adding Creator's retimed shot and handles switch [\#3826](https://github.com/pypeclub/OpenPype/pull/3826) +- Flame: OpenPype submenu to batch and media manager [\#3825](https://github.com/pypeclub/OpenPype/pull/3825) +- General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) -- Blender: Publisher collect workfile representation [\#3670](https://github.com/pypeclub/OpenPype/pull/3670) -- Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) -- Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) +- Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) **🐛 Bug fixes** +- General: Fix Pattern access in client code [\#3828](https://github.com/pypeclub/OpenPype/pull/3828) +- Launcher: Skip opening last work file works for groups [\#3822](https://github.com/pypeclub/OpenPype/pull/3822) +- Maya: Publishing data key change [\#3811](https://github.com/pypeclub/OpenPype/pull/3811) +- Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) - Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) +- Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) - nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) - Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) - Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) - Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) -- Maya: `containerise` dont skip empty values [\#3674](https://github.com/pypeclub/OpenPype/pull/3674) **🔀 Refactored code** @@ -33,17 +40,19 @@ - General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) - General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) - General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) +- General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) -- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) - General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) +- Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** - Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) +- Kitsu - sync\_all\_project - add list ignore\_projects [\#3776](https://github.com/pypeclub/OpenPype/pull/3776) ## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) @@ -52,23 +61,16 @@ ### 📖 Documentation - Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) -- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) - -**🆕 New features** - -- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) -- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662) **🚀 Enhancements** - General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) - Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) -- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) -- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) **🐛 Bug fixes** @@ -82,11 +84,11 @@ - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) - Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) **🔀 Refactored code** +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) @@ -110,7 +112,6 @@ - Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) - Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) -- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) @@ -120,21 +121,11 @@ - Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) - Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) -- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) -- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) -- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) - -**🔀 Refactored code** - -- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) - -**Merged pull requests:** - -- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) diff --git a/openpype/version.py b/openpype/version.py index c042ca2625..142bd51a30 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.3" +__version__ = "3.14.2-nightly.4" From 5fa019527b2868c010334a6c36852e32ebaa476e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Sep 2022 10:26:23 +0200 Subject: [PATCH 1060/2550] OP-3682 - changed folder structure --- {distribution => common/openpype_common/distribution}/README.md | 0 .../openpype_common/distribution}/__init__.py | 0 .../openpype_common/distribution}/addon_distribution.py | 2 +- .../openpype_common/distribution}/file_handler.py | 0 .../distribution}/tests/test_addon_distributtion.py | 2 +- 5 files changed, 2 insertions(+), 2 deletions(-) rename {distribution => common/openpype_common/distribution}/README.md (100%) rename {distribution => common/openpype_common/distribution}/__init__.py (100%) rename {distribution => common/openpype_common/distribution}/addon_distribution.py (98%) rename {distribution => common/openpype_common/distribution}/file_handler.py (100%) rename {distribution => common/openpype_common/distribution}/tests/test_addon_distributtion.py (98%) diff --git a/distribution/README.md b/common/openpype_common/distribution/README.md similarity index 100% rename from distribution/README.md rename to common/openpype_common/distribution/README.md diff --git a/distribution/__init__.py b/common/openpype_common/distribution/__init__.py similarity index 100% rename from distribution/__init__.py rename to common/openpype_common/distribution/__init__.py diff --git a/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py similarity index 98% rename from distribution/addon_distribution.py rename to common/openpype_common/distribution/addon_distribution.py index 389b92b10b..e39ce66a0a 100644 --- a/distribution/addon_distribution.py +++ b/common/openpype_common/distribution/addon_distribution.py @@ -7,7 +7,7 @@ import requests import platform import shutil -from distribution.file_handler import RemoteFileHandler +from common.openpype_common.distribution.file_handler import RemoteFileHandler class UrlType(Enum): diff --git a/distribution/file_handler.py b/common/openpype_common/distribution/file_handler.py similarity index 100% rename from distribution/file_handler.py rename to common/openpype_common/distribution/file_handler.py diff --git a/distribution/tests/test_addon_distributtion.py b/common/openpype_common/distribution/tests/test_addon_distributtion.py similarity index 98% rename from distribution/tests/test_addon_distributtion.py rename to common/openpype_common/distribution/tests/test_addon_distributtion.py index c6ecaca3c8..7dd27fd44f 100644 --- a/distribution/tests/test_addon_distributtion.py +++ b/common/openpype_common/distribution/tests/test_addon_distributtion.py @@ -2,7 +2,7 @@ import pytest import attr import tempfile -from distribution.addon_distribution import ( +from common.openpype_common.distribution.addon_distribution import ( AddonDownloader, UrlType, OSAddonDownloader, From e76ec9e5aff5651070bcc817e9c0b60a9a980ce4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 12 Sep 2022 16:26:35 +0800 Subject: [PATCH 1061/2550] adding and loading mel workspace within openpype settings --- openpype/hosts/maya/resources/workspace.mel | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 openpype/hosts/maya/resources/workspace.mel diff --git a/openpype/hosts/maya/resources/workspace.mel b/openpype/hosts/maya/resources/workspace.mel deleted file mode 100644 index f7213fa4f6..0000000000 --- a/openpype/hosts/maya/resources/workspace.mel +++ /dev/null @@ -1,11 +0,0 @@ -//Maya 2018 Project Definition - -workspace -fr "shaders" "renderData/shaders"; -workspace -fr "alembicCache" "cache/alembic"; -workspace -fr "mayaAscii" ""; -workspace -fr "mayaBinary" ""; -workspace -fr "renderData" "renderData"; -workspace -fr "fileCache" "cache/nCache"; -workspace -fr "scene" ""; -workspace -fr "sourceImages" "sourceimages"; -workspace -fr "images" "renders"; From 4ba3ff21ab10e5b0c092f9580dc6444bdd61383c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 12 Sep 2022 11:18:09 +0200 Subject: [PATCH 1062/2550] Tweak back more to intended logic --- .../plugins/publish/submit_maya_deadline.py | 29 ++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index f8d0af9752..45790c40ea 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -754,7 +754,12 @@ def _format_tiles( used for assembler configuration. """ - tile = 0 + # Math used requires integers for correct output - as such + # we ensure our inputs are correct. + assert type(tiles_x) is int, "tiles_x must be an integer" + assert type(tiles_y) is int, "tiles_y must be an integer" + assert type(width) is int, "width must be an integer" + assert type(height) is int, "height must be an integer" out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() w_space = width // tiles_x @@ -762,6 +767,7 @@ def _format_tiles( cfg["TilesCropped"] = "False" + tile = 0 for tile_x in range(1, tiles_x + 1): for tile_y in reversed(range(1, tiles_y + 1)): tile_prefix = "_tile_{}x{}_{}x{}_".format( @@ -769,28 +775,31 @@ def _format_tiles( tiles_x, tiles_y ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) + new_filename = "{}/{}{}".format( os.path.dirname(filename), tile_prefix, os.path.basename(filename) ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - top = int(height) - (tile_y * h_space) - bottom = int(height) - ((tile_y - 1) * h_space) - 1 + top = height - (tile_y * h_space) + bottom = height - ((tile_y - 1) * h_space) - 1 left = (tile_x - 1) * w_space right = (tile_x * w_space) - 1 + # Job info + out["JobInfo"]["OutputFilename{}Tile{}".format(index, tile)] = new_filename # noqa: E501 + + # Plugin Info + out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + out["PluginInfo"]["RegionTop{}".format(tile)] = top out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom out["PluginInfo"]["RegionLeft{}".format(tile)] = left out["PluginInfo"]["RegionRight{}".format(tile)] = right + # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename @@ -801,5 +810,5 @@ def _format_tiles( cfg["Tile{}Height".format(tile)] = h_space tile += 1 - + return out, cfg From 41a738bd12efc48aec512a83ee36ffd9b4ddcb3a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 12 Sep 2022 11:19:28 +0200 Subject: [PATCH 1063/2550] Cosmetics --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 45790c40ea..44f2b5b2b4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -760,6 +760,7 @@ def _format_tiles( assert type(tiles_y) is int, "tiles_y must be an integer" assert type(width) is int, "width must be an integer" assert type(height) is int, "height must be an integer" + out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() w_space = width // tiles_x @@ -793,7 +794,6 @@ def _format_tiles( # Plugin Info out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - out["PluginInfo"]["RegionTop{}".format(tile)] = top out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom out["PluginInfo"]["RegionLeft{}".format(tile)] = left @@ -805,7 +805,6 @@ def _format_tiles( cfg["Tile{}FileName".format(tile)] = new_filename cfg["Tile{}X".format(tile)] = left cfg["Tile{}Y".format(tile)] = top - cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From c6ad515682944690d15532cd446fae2d8c93a570 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 12 Sep 2022 09:46:17 +0000 Subject: [PATCH 1064/2550] [Automated] Bump version --- CHANGELOG.md | 16 ++++++---------- openpype/version.py | 2 +- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ffb6a996b..cccfc2eded 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.14.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) @@ -8,7 +8,6 @@ - Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) - Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) -- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) **🚀 Enhancements** @@ -18,6 +17,7 @@ - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) - Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) **🐛 Bug fixes** @@ -42,6 +42,8 @@ - General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) - General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) - General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) @@ -65,12 +67,12 @@ **🚀 Enhancements** - General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) -- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) - Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) **🐛 Bug fixes** @@ -84,7 +86,7 @@ - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) **🔀 Refactored code** @@ -117,15 +119,9 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) -**🚀 Enhancements** - -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) -- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) - **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) -- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) diff --git a/openpype/version.py b/openpype/version.py index 142bd51a30..c5dc4ee581 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.4" +__version__ = "3.14.2-nightly.5" From 162370e1ad1291cbbf3eca65266c226ccd119aca Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 12 Sep 2022 09:56:59 +0000 Subject: [PATCH 1065/2550] [Automated] Release --- CHANGELOG.md | 15 +++++++++------ openpype/version.py | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cccfc2eded..46bf56f5bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.14.2-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) **🆕 New features** @@ -45,10 +45,11 @@ - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) - General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** @@ -72,7 +73,6 @@ - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) **🐛 Bug fixes** @@ -91,13 +91,12 @@ **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) -- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) - AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) -- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) - General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) - Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) @@ -119,6 +118,10 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) + **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) diff --git a/openpype/version.py b/openpype/version.py index c5dc4ee581..8469b1712a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.5" +__version__ = "3.14.2" From 6e2ffc1e5ceb134f17fcedd1646f2cec0014a43a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 10:33:41 +0200 Subject: [PATCH 1066/2550] Remove getting project name and settings twice --- openpype/hosts/maya/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index acd8a55aa4..45c52cd0d5 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -66,8 +66,6 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): project_name = legacy_io.active_project() project_settings = get_project_settings(project_name) # process path mapping - project_name = legacy_io.active_project() - project_settings = get_project_settings(project_name) dirmap_processor = MayaDirmap("maya", project_name, project_settings) dirmap_processor.process_dirmap() From 37286eef2ce894d357cfaa530434f011f5ff4a59 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 11:42:38 +0200 Subject: [PATCH 1067/2550] propagated 'get_contexts_for_repre_docs' to load init --- openpype/pipeline/load/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/pipeline/load/__init__.py b/openpype/pipeline/load/__init__.py index bf38a0b3c8..e96f64f2a4 100644 --- a/openpype/pipeline/load/__init__.py +++ b/openpype/pipeline/load/__init__.py @@ -5,6 +5,7 @@ from .utils import ( InvalidRepresentationContext, get_repres_contexts, + get_contexts_for_repre_docs, get_subset_contexts, get_representation_context, @@ -54,6 +55,7 @@ __all__ = ( "InvalidRepresentationContext", "get_repres_contexts", + "get_contexts_for_repre_docs", "get_subset_contexts", "get_representation_context", From cacfa0999553e35af0cafb9272c83414bcfc50c5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 11:43:16 +0200 Subject: [PATCH 1068/2550] reduce representations to last version --- .../pipeline/workfile/new_template_loader.py | 45 +++++++++++++------ 1 file changed, 31 insertions(+), 14 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 921cc39ba9..2cae32a04a 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -46,19 +46,6 @@ class AbstractTemplateLoader: _log = None def __init__(self, host): - # Prepare context information - project_name = legacy_io.active_project() - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - current_asset_doc = get_asset_by_name(project_name, asset_name) - task_type = ( - current_asset_doc - .get("data", {}) - .get("tasks", {}) - .get(task_name, {}) - .get("type") - ) - # Get host name if isinstance(host, HostBase): host_name = host.name @@ -1172,6 +1159,34 @@ class PlaceholderLoadMixin(object): context_filters=context_filters )) + def _reduce_last_version_repre_docs(self, representations): + """Reduce representations to last verison.""" + + mapping = {} + for repre_doc in representations: + repre_context = repre_doc["context"] + + asset_name = repre_context["asset"] + subset_name = repre_context["subset"] + version = repre_context.get("version", -1) + + if asset_name not in mapping: + mapping[asset_name] = {} + + subset_mapping = mapping[asset_name] + if subset_name not in subset_mapping: + subset_mapping[subset_name] = collections.defaultdict(list) + + version_mapping = subset_mapping[subset_name] + version_mapping[version].append(repre_doc) + + output = [] + for subset_mapping in mapping.values(): + for version_mapping in subset_mapping.values(): + last_version = tuple(sorted(version_mapping.keys()))[-1] + output.extend(version_mapping[last_version]) + return output + def populate_load_placeholder(self, placeholder, ignore_repre_ids=None): if ignore_repre_ids is None: ignore_repre_ids = set() @@ -1183,7 +1198,9 @@ class PlaceholderLoadMixin(object): placeholder_representations = self.get_representations(placeholder) filtered_representations = [] - for representation in placeholder_representations: + for representation in self._reduce_last_version_repre_docs( + placeholder_representations + ): repre_id = str(representation["_id"]) if repre_id not in ignore_repre_ids: filtered_representations.append(representation) From af895da690654573f64c542ce09ad6690fc7e817 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 11:43:51 +0200 Subject: [PATCH 1069/2550] few minor fixes --- .../pipeline/workfile/new_template_loader.py | 27 ++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 2cae32a04a..65c50b9d80 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -604,6 +604,10 @@ class PlaceholderPlugin(object): return self._builder + @property + def project_name(self): + return self._builder.project_name + @property def log(self): """Dynamically created logger for the plugin.""" @@ -956,7 +960,7 @@ class PlaceholderLoadMixin(object): """Unified attribute definitions for load placeholder. Common function for placeholder plugins used for loading of - repsentations. + repsentations. Use it in 'get_placeholder_options'. Args: plugin (PlaceholderPlugin): Plugin used for loading of @@ -1125,7 +1129,7 @@ class PlaceholderLoadMixin(object): "asset": [current_asset_doc["name"]], "subset": [re.compile(placeholder.data["subset"])], "hierarchy": [re.compile(placeholder.data["hierarchy"])], - "representations": [placeholder.data["representation"]], + "representation": [placeholder.data["representation"]], "family": [placeholder.data["family"]] } @@ -1188,6 +1192,23 @@ class PlaceholderLoadMixin(object): return output def populate_load_placeholder(self, placeholder, ignore_repre_ids=None): + """Load placeholder is goind to load matching representations. + + Note: + Ignore repre ids is to avoid loading the same representation again + on load. But the representation can be loaded with different loader + and there could be published new version of matching subset for the + representation. We should maybe expect containers. + + Also import loaders don't have containers at all... + + Args: + placeholder (PlaceholderItem): Placeholder item with information + about requested representations. + ignore_repre_ids (Iterable[Union[str, ObjectId]]): Representation + ids that should be skipped. + """ + if ignore_repre_ids is None: ignore_repre_ids = set() @@ -1215,7 +1236,7 @@ class PlaceholderLoadMixin(object): self.project_name, filtered_representations ) loaders_by_name = self.builder.get_loaders_by_name() - for repre_load_context in repre_load_contexts: + for repre_load_context in repre_load_contexts.values(): representation = repre_load_context["representation"] repre_context = representation["context"] self.log.info( From 09c73beec90df7b493e83e481fc97313a4d7a850 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 11:44:04 +0200 Subject: [PATCH 1070/2550] added option to have callback before load --- openpype/pipeline/workfile/new_template_loader.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 65c50b9d80..07ff69d1f5 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -1163,6 +1163,11 @@ class PlaceholderLoadMixin(object): context_filters=context_filters )) + def _before_repre_load(self, placeholder, representation): + """Can be overriden. Is called before representation is loaded.""" + + pass + def _reduce_last_version_repre_docs(self, representations): """Reduce representations to last verison.""" @@ -1239,6 +1244,9 @@ class PlaceholderLoadMixin(object): for repre_load_context in repre_load_contexts.values(): representation = repre_load_context["representation"] repre_context = representation["context"] + self._before_repre_load( + placeholder, representation + ) self.log.info( "Loading {} from {} with loader {}\n" "Loader arguments used : {}".format( From c702d117172c3b7561eedd5cdb70ada3b5a399cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 11:44:37 +0200 Subject: [PATCH 1071/2550] moved cleanup logic to plugin responsibility instead of placeholder's --- .../maya/api/workfile_template_builder.py | 48 ++++++++++--------- .../pipeline/workfile/new_template_loader.py | 5 +- 2 files changed, 29 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 14f1f284fd..42736badf2 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -216,6 +216,31 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): def get_placeholder_options(self, options=None): return self.get_load_plugin_options(self, options) + def cleanup_placeholder(self, placeholder): + """Hide placeholder, parent them to root + add them to placeholder set and register placeholder's parent + to keep placeholder info available for future use + """ + + node = placeholder._scene_identifier + node_parent = placeholder.data["parent"] + if node_parent: + cmds.setAttr(node + ".parent", node_parent, type="string") + + if cmds.getAttr(node + ".index") < 0: + cmds.setAttr(node + ".index", placeholder.data["index"]) + + holding_sets = cmds.listSets(object=node) + if holding_sets: + for set in holding_sets: + cmds.sets(node, remove=set) + + if cmds.listRelatives(node, p=True): + node = cmds.parent(node, world=True)[0] + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr(node + ".hiddenInOutliner", True) + class LoadPlaceholderItem(PlaceholderItem): """Concrete implementation of PlaceholderItem for Maya load plugin.""" @@ -269,29 +294,6 @@ class LoadPlaceholderItem(PlaceholderItem): for holding_set in holding_sets: cmds.sets(roots, forceElement=holding_set) - def clean(self): - """Hide placeholder, parent them to root - add them to placeholder set and register placeholder's parent - to keep placeholder info available for future use - """ - - node = self._scene_identifier - if self.data['parent']: - cmds.setAttr(node + '.parent', self.data['parent'], type='string') - if cmds.getAttr(node + '.index') < 0: - cmds.setAttr(node + '.index', self.data['index']) - - holding_sets = cmds.listSets(object=node) - if holding_sets: - for set in holding_sets: - cmds.sets(node, remove=set) - - if cmds.listRelatives(node, p=True): - node = cmds.parent(node, world=True)[0] - cmds.sets(node, addElement=PLACEHOLDER_SET) - cmds.hide(node) - cmds.setAttr(node + ".hiddenInOutliner", True) - def get_errors(self): if not self._failed_representations: return [] diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/new_template_loader.py index 07ff69d1f5..3f81ce0114 100644 --- a/openpype/pipeline/workfile/new_template_loader.py +++ b/openpype/pipeline/workfile/new_template_loader.py @@ -1268,4 +1268,7 @@ class PlaceholderLoadMixin(object): else: placeholder.load_succeed(container) - placeholder.clean() + self.cleanup_placeholder(placeholder) + + def cleanup_placeholder(self, placeholder): + pass From f36b0aa2c6026d093ffc7421a52eb9c59264c740 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 11:48:41 +0200 Subject: [PATCH 1072/2550] initial commit of nuke workfile builder --- openpype/hosts/nuke/api/__init__.py | 4 + openpype/hosts/nuke/api/pipeline.py | 6 + .../nuke/api/workfile_template_builder.py | 607 ++++++++++++++++++ 3 files changed, 617 insertions(+) create mode 100644 openpype/hosts/nuke/api/workfile_template_builder.py diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 962f31c177..c65058874b 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -21,6 +21,8 @@ from .pipeline import ( containerise, parse_container, update_container, + + get_workfile_build_placeholder_plugins, ) from .lib import ( maintained_selection, @@ -55,6 +57,8 @@ __all__ = ( "parse_container", "update_container", + "get_workfile_build_placeholder_plugins", + "maintained_selection", "reset_selection", "get_view_process_node", diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index bac42128cc..d4edd24cf6 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -141,6 +141,12 @@ def _show_workfiles(): host_tools.show_workfiles(parent=None, on_top=False) +def get_workfile_build_placeholder_plugins(): + return [ + NukePlaceholderLoadPlugin + ] + + def _install_menu(): # uninstall original avalon menu main_window = get_main_window() diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py new file mode 100644 index 0000000000..71ea5c95a5 --- /dev/null +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -0,0 +1,607 @@ +import json +import collections + +import nuke + +from openpype.pipeline import registered_host +from openpype.pipeline.workfile.build_template_exceptions import ( + TemplateAlreadyImported +) +from openpype.pipeline.workfile.new_template_loader import ( + AbstractTemplateLoader, + PlaceholderPlugin, + PlaceholderItem, + PlaceholderLoadMixin, +) +from openpype.tools.workfile_template_build import ( + WorkfileBuildPlaceholderDialog, +) + +from .lib import ( + find_free_space_to_paste_nodes, + get_extreme_positions, + get_group_io_nodes, + imprint, + refresh_node, + refresh_nodes, + reset_selection, + get_names_from_nodes, + get_nodes_by_names, + select_nodes, + duplicate_node, + node_tempfile, +) + +PLACEHOLDER_SET = "PLACEHOLDERS_SET" + + +class NukeTemplateLoader(AbstractTemplateLoader): + """Concrete implementation of AbstractTemplateLoader for maya""" + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + + Args: + path (str): A path to current template (usually given by + get_template_path implementation) + + Returns: + bool: Wether the template was succesfully imported or not + """ + + # TODO check if the template is already imported + + nuke.nodePaste(path) + reset_selection() + + return True + + +class NukePlaceholderPlugin(PlaceholderPlugin): + noce_color = 4278190335 + + def _collect_scene_placeholders(self): + # Cache placeholder data to shared data + placeholder_nodes = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) + if placeholder_nodes is None: + placeholder_nodes = {} + all_groups = collections.deque() + all_groups.append(nuke.thisGroup()) + while all_groups: + group = all_groups.popleft() + for node in group.nodes(): + if isinstance(node, nuke.Group): + all_groups.append(node) + + node_knobs = node.knobs() + if ( + "builder_type" not in node_knobs + or "is_placeholder" not in node_knobs + or not node.knob("is_placeholder").value() + ): + continue + + if "empty" in node_knobs and node.knob("empty").value(): + continue + + placeholder_nodes[node.fullName()] = node + + self.builder.set_shared_populate_data( + "placeholder_nodes", placeholder_nodes + ) + return placeholder_nodes + + def create_placeholder(self, placeholder_data): + placeholder_data["plugin_identifier"] = self.identifier + + placeholder = nuke.nodes.NoOp() + placeholder.setName("PLACEHOLDER") + placeholder.knob("tile_color").setValue(self.node_color) + + imprint(placeholder, placeholder_data) + imprint(placeholder, {"is_placeholder": True}) + placeholder.knob("is_placeholder").setVisible(False) + + def update_placeholder(self, placeholder_item, placeholder_data): + node = nuke.toNode(placeholder_item.scene_identifier) + imprint(node, placeholder_data) + + def _parse_placeholder_node_data(self, node): + placeholder_data = {} + for key in self.get_placeholder_keys(): + knob = node.knob(key) + value = None + if knob is not None: + value = knob.getValue() + placeholder_data[key] = value + return placeholder_data + + +class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): + identifier = "nuke.load" + label = "Nuke load" + + def _parse_placeholder_node_data(self, node): + placeholder_data = super( + NukePlaceholderLoadPlugin, self + )._parse_placeholder_node_data(node) + + node_knobs = node.knobs() + nb_children = 0 + if "nb_children" in node_knobs: + nb_children = int(node_knobs["nb_children"].getValue()) + placeholder_data["nb_children"] = nb_children + + siblings = [] + if "siblings" in node_knobs: + siblings = node_knobs["siblings"].values() + placeholder_data["siblings"] = siblings + + node_full_name = node.fullName() + placeholder_data["group_name"] = node_full_name.rpartition(".")[0] + placeholder_data["last_loaded"] = [] + placeholder_data["delete"] = False + return placeholder_data + + def _get_loaded_repre_ids(self): + loaded_representation_ids = self.builder.get_shared_populate_data( + "loaded_representation_ids" + ) + if loaded_representation_ids is None: + loaded_representation_ids = set() + for node in nuke.allNodes(): + if "repre_id" in node.knobs(): + loaded_representation_ids.add( + node.knob("repre_id").getValue() + ) + + self.builder.set_shared_populate_data( + "loaded_representation_ids", loaded_representation_ids + ) + return loaded_representation_ids + + def _before_repre_load(self, placeholder, representation): + placeholder.data["nodes_init"] = nuke.allNodes() + placeholder.data["last_repre_id"] = str(representation["_id"]) + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, node in scene_placeholders.items(): + plugin_identifier_knob = node.knob("plugin_identifier") + if ( + plugin_identifier_knob is None + or plugin_identifier_knob.getValue() != self.identifier + ): + continue + + placeholder_data = self._parse_placeholder_node_data(node) + # TODO do data validations and maybe updgrades if are invalid + output.append( + NukeLoadPlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_load_placeholder(placeholder) + + def update_template_placeholder(self, placeholder): + repre_ids = self._get_loaded_repre_ids() + self.populate_load_placeholder(placeholder, repre_ids) + + def get_placeholder_options(self, options=None): + return self.get_load_plugin_options(options) + + def cleanup_placeholder(self, placeholder): + # deselect all selected nodes + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + # getting the latest nodes added + # TODO get from shared populate data! + nodes_init = placeholder.data["nodes_init"] + nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) + self.log.debug("Loaded nodes: {}".format(nodes_loaded)) + if not nodes_loaded: + return + + placeholder.data["delete"] = True + + nodes_loaded = self._move_to_placeholder_group( + placeholder, nodes_loaded + ) + placeholder.data["last_loaded"] = nodes_loaded + refresh_nodes(nodes_loaded) + + # positioning of the loaded nodes + min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + placeholder_node.xpos() + ypos = (node.ypos() - min_y) + placeholder_node.ypos() + node.setXYpos(xpos, ypos) + refresh_nodes(nodes_loaded) + + # fix the problem of z_order for backdrops + self._fix_z_order(placeholder) + self._imprint_siblings(placeholder) + + if placeholder.data["nb_children"] == 0: + # save initial nodes postions and dimensions, update them + # and set inputs and outputs of loaded nodes + + self._imprint_inits() + self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded) + self._set_loaded_connections(placeholder) + + elif placeholder.data["siblings"]: + # create copies of placeholder siblings for the new loaded nodes, + # set their inputs and outpus and update all nodes positions and + # dimensions and siblings names + + siblings = get_nodes_by_names(placeholder.data["siblings"]) + refresh_nodes(siblings) + copies = self._create_sib_copies(placeholder) + new_nodes = list(copies.values()) # copies nodes + self._update_nodes(new_nodes, nodes_loaded) + placeholder_node.removeKnob(placeholder_node.knob("siblings")) + new_nodes_name = get_names_from_nodes(new_nodes) + imprint(placeholder_node, {"siblings": new_nodes_name}) + self._set_copies_connections(placeholder, copies) + + self._update_nodes( + nuke.allNodes(), + new_nodes + nodes_loaded, + 20 + ) + + new_siblings = get_names_from_nodes(new_nodes) + placeholder.data["siblings"] = new_siblings + + else: + # if the placeholder doesn't have siblings, the loaded + # nodes will be placed in a free space + + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes_loaded, direction="bottom", offset=200 + ) + node = nuke.createNode("NoOp") + reset_selection() + nuke.delete(node) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + xpointer + ypos = (node.ypos() - min_y) + ypointer + node.setXYpos(xpos, ypos) + + placeholder.data["nb_children"] += 1 + reset_selection() + # go back to root group + nuke.root().begin() + + def _move_to_placeholder_group(self, placeholder, nodes_loaded): + """ + opening the placeholder's group and copying loaded nodes in it. + + Returns : + nodes_loaded (list): the new list of pasted nodes + """ + + groups_name = placeholder.data["group_name"] + reset_selection() + select_nodes(nodes_loaded) + if groups_name: + with node_tempfile() as filepath: + nuke.nodeCopy(filepath) + for node in nuke.selectedNodes(): + nuke.delete(node) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste(filepath) + nodes_loaded = nuke.selectedNodes() + return nodes_loaded + + def _fix_z_order(self, placeholder): + """Fix the problem of z_order when a backdrop is loaded.""" + + nodes_loaded = placeholder.data["last_loaded"] + loaded_backdrops = [] + bd_orders = set() + for node in nodes_loaded: + if isinstance(node, nuke.BackdropNode): + loaded_backdrops.append(node) + bd_orders.add(node.knob("z_order").getValue()) + + if not bd_orders: + return + + sib_orders = set() + for node_name in placeholder.data["siblings"]: + node = nuke.toNode(node_name) + if isinstance(node, nuke.BackdropNode): + sib_orders.add(node.knob("z_order").getValue()) + + if not sib_orders: + return + + min_order = min(bd_orders) + max_order = max(sib_orders) + for backdrop_node in loaded_backdrops: + z_order = backdrop_node.knob("z_order").getValue() + backdrop_node.knob("z_order").setValue( + z_order + max_order - min_order + 1) + + def _imprint_siblings(self, placeholder): + """ + - add siblings names to placeholder attributes (nodes loaded with it) + - add Id to the attributes of all the other nodes + """ + + loaded_nodes = placeholder.data["last_loaded"] + loaded_nodes_set = set(loaded_nodes) + data = {"repre_id": str(placeholder.data["last_repre_id"])} + + for node in loaded_nodes: + node_knobs = node.knobs() + if "builder_type" not in node_knobs: + # save the id of representation for all imported nodes + imprint(node, data) + node.knob("repre_id").setVisible(False) + refresh_node(node) + continue + + if ( + "is_placeholder" not in node_knobs + or ( + "is_placeholder" in node_knobs + and node.knob("is_placeholder").value() + ) + ): + siblings = list(loaded_nodes_set - {node}) + siblings_name = get_names_from_nodes(siblings) + siblings = {"siblings": siblings_name} + imprint(node, siblings) + + def _imprint_inits(self): + """Add initial positions and dimensions to the attributes""" + + for node in nuke.allNodes(): + refresh_node(node) + imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) + node.knob("x_init").setVisible(False) + node.knob("y_init").setVisible(False) + width = node.screenWidth() + height = node.screenHeight() + if "bdwidth" in node.knobs(): + imprint(node, {"w_init": width, "h_init": height}) + node.knob("w_init").setVisible(False) + node.knob("h_init").setVisible(False) + refresh_node(node) + + def _update_nodes( + self, placeholder, nodes, considered_nodes, offset_y=None + ): + """Adjust backdrop nodes dimensions and positions. + + Considering some nodes sizes. + + Args: + nodes (list): list of nodes to update + considered_nodes (list): list of nodes to consider while updating + positions and dimensions + offset (int): distance between copies + """ + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) + + diff_x = diff_y = 0 + contained_nodes = [] # for backdrops + + if offset_y is None: + width_ph = placeholder_node.screenWidth() + height_ph = placeholder_node.screenHeight() + diff_y = max_y - min_y - height_ph + diff_x = max_x - min_x - width_ph + contained_nodes = [placeholder_node] + min_x = placeholder_node.xpos() + min_y = placeholder_node.ypos() + else: + siblings = get_nodes_by_names(placeholder.data["siblings"]) + minX, _, maxX, _ = get_extreme_positions(siblings) + diff_y = max_y - min_y + 20 + diff_x = abs(max_x - min_x - maxX + minX) + contained_nodes = considered_nodes + + if diff_y <= 0 and diff_x <= 0: + return + + for node in nodes: + refresh_node(node) + + if ( + node == placeholder_node + or node in considered_nodes + ): + continue + + if ( + not isinstance(node, nuke.BackdropNode) + or ( + isinstance(node, nuke.BackdropNode) + and not set(contained_nodes) <= set(node.getNodes()) + ) + ): + if offset_y is None and node.xpos() >= min_x: + node.setXpos(node.xpos() + diff_x) + + if node.ypos() >= min_y: + node.setYpos(node.ypos() + diff_y) + + else: + width = node.screenWidth() + height = node.screenHeight() + node.knob("bdwidth").setValue(width + diff_x) + node.knob("bdheight").setValue(height + diff_y) + + refresh_node(node) + + def _set_loaded_connections(self, placeholder): + """ + set inputs and outputs of loaded nodes""" + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + input_node, output_node = get_group_io_nodes( + placeholder.data["last_loaded"] + ) + for node in placeholder_node.dependent(): + for idx in range(node.inputs()): + if node.input(idx) == placeholder_node: + node.setInput(idx, output_node) + + for node in placeholder_node.dependencies(): + for idx in range(placeholder_node.inputs()): + if placeholder_node.input(idx) == node: + input_node.setInput(0, node) + + def _create_sib_copies(self, placeholder): + """ creating copies of the palce_holder siblings (the ones who were + loaded with it) for the new nodes added + + Returns : + copies (dict) : with copied nodes names and their copies + """ + + copies = {} + siblings = get_nodes_by_names(placeholder.data["siblings"]) + for node in siblings: + new_node = duplicate_node(node) + + x_init = int(new_node.knob("x_init").getValue()) + y_init = int(new_node.knob("y_init").getValue()) + new_node.setXYpos(x_init, y_init) + if isinstance(new_node, nuke.BackdropNode): + w_init = new_node.knob("w_init").getValue() + h_init = new_node.knob("h_init").getValue() + new_node.knob("bdwidth").setValue(w_init) + new_node.knob("bdheight").setValue(h_init) + refresh_node(node) + + if "repre_id" in node.knobs().keys(): + node.removeKnob(node.knob("repre_id")) + copies[node.name()] = new_node + return copies + + def _set_copies_connections(self, placeholder, copies): + """Set inputs and outputs of the copies. + + Args: + copies (dict): Copied nodes by their names. + """ + + last_input, last_output = get_group_io_nodes( + placeholder.data["last_loaded"] + ) + siblings = get_nodes_by_names(placeholder.data["siblings"]) + siblings_input, siblings_output = get_group_io_nodes(siblings) + copy_input = copies[siblings_input.name()] + copy_output = copies[siblings_output.name()] + + for node_init in siblings: + if node_init == siblings_output: + continue + + node_copy = copies[node_init.name()] + for node in node_init.dependent(): + for idx in range(node.inputs()): + if node.input(idx) != node_init: + continue + + if node in siblings: + copies[node.name()].setInput(idx, node_copy) + else: + last_input.setInput(0, node_copy) + + for node in node_init.dependencies(): + for idx in range(node_init.inputs()): + if node_init.input(idx) != node: + continue + + if node_init == siblings_input: + copy_input.setInput(idx, node) + elif node in siblings: + node_copy.setInput(idx, copies[node.name()]) + else: + node_copy.setInput(idx, last_output) + + siblings_input.setInput(0, copy_output) + + +class NukeLoadPlaceholderItem(PlaceholderItem): + """Concrete implementation of PlaceholderItem for Maya load plugin.""" + + def __init__(self, *args, **kwargs): + super(NukeLoadPlaceholderItem, self).__init__(*args, **kwargs) + self._failed_representations = [] + + def get_errors(self): + if not self._failed_representations: + return [] + message = ( + "Failed to load {} representations using Loader {}" + ).format( + len(self._failed_representations), + self.data["loader"] + ) + return [message] + + def load_failed(self, representation): + self._failed_representations.append(representation) + + def load_succeed(self, container): + pass + + +def build_workfile_template(*args): + builder = NukeTemplateLoader(registered_host()) + builder.build_template() + + +def update_workfile_template(*args): + builder = NukeTemplateLoader(registered_host()) + builder.rebuild_template() + + +def create_placeholder(*args): + host = registered_host() + builder = NukeTemplateLoader(host) + window = WorkfileBuildPlaceholderDialog(host, builder) + window.exec_() + + +def update_placeholder(*args): + host = registered_host() + builder = NukeTemplateLoader(host) + placeholder_items_by_id = { + placeholder_item.scene_identifier: placeholder_item + for placeholder_item in builder.get_placeholders() + } + placeholder_items = [] + for node in nuke.selectedNodes(): + node_name = node.fullName() + if node_name in placeholder_items_by_id: + placeholder_items.append(placeholder_items_by_id[node_name]) + + # TODO show UI at least + if len(placeholder_items) == 0: + raise ValueError("No node selected") + + if len(placeholder_items) > 1: + raise ValueError("Too many selected nodes") + + placeholder_item = placeholder_items[0] + window = WorkfileBuildPlaceholderDialog(host, builder) + window.set_update_mode(placeholder_item) + window.exec_() From 34dc71936b626ed6b15de64003d086de3d3625a3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 11:51:22 +0200 Subject: [PATCH 1073/2550] use new workfile building system in nuke --- openpype/hosts/nuke/api/pipeline.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index d4edd24cf6..c6ccfaeb3a 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -22,10 +22,6 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.workfile import BuildWorkfile -from openpype.pipeline.workfile.build_template import ( - build_workfile_template, - update_workfile_template -) from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop @@ -40,8 +36,12 @@ from .lib import ( set_avalon_knob_data, read_avalon_data, ) -from .lib_template_builder import ( - create_placeholder, update_placeholder +from .workfile_template_builder import ( + NukePlaceholderLoadPlugin, + build_workfile_template, + update_workfile_template, + create_placeholder, + update_placeholder, ) log = Logger.get_logger(__name__) From 703169e1706e50ec02d6a05959bf2e8504ebac5c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:00:34 +0200 Subject: [PATCH 1074/2550] removed unused imports --- openpype/hosts/nuke/api/workfile_template_builder.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 71ea5c95a5..d018b9b598 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -1,12 +1,8 @@ -import json import collections import nuke from openpype.pipeline import registered_host -from openpype.pipeline.workfile.build_template_exceptions import ( - TemplateAlreadyImported -) from openpype.pipeline.workfile.new_template_loader import ( AbstractTemplateLoader, PlaceholderPlugin, From f6792d2e420fc75d5a3c4aee489c40a48be56d6c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 13 Sep 2022 18:04:45 +0800 Subject: [PATCH 1075/2550] adding a Qt lockfile dialog for lockfile tasks --- openpype/hosts/maya/api/pipeline.py | 27 ++++------ openpype/pipeline/workfile/lock_workfile.py | 14 ++---- openpype/tools/workfiles/files_widget.py | 14 ++---- openpype/tools/workfiles/lock_dialog.py | 55 +++++++++++++++++++++ 4 files changed, 74 insertions(+), 36 deletions(-) create mode 100644 openpype/tools/workfiles/lock_dialog.py diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index b34a216c13..5c7a7abf4d 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -480,6 +480,7 @@ def on_before_save(): def check_lock_on_current_file(): + """Check if there is a user opening the file""" if not handle_workfile_locks(): return @@ -492,23 +493,15 @@ def check_lock_on_current_file(): create_workfile_lock(filepath) return - username = get_user_from_lock(filepath) - reminder = cmds.window(title="Reminder", width=400, height=30) - cmds.columnLayout(adjustableColumn=True) - cmds.separator() - cmds.columnLayout(adjustableColumn=True) - comment = " %s is working the same workfile!" % username - cmds.text(comment, align='center') - cmds.text(vis=False) - cmds.rowColumnLayout(numberOfColumns=3, - columnWidth=[(1, 200), (2, 100), (3, 100)], - columnSpacing=[(3, 10)]) - cmds.separator(vis=False) - cancel_command = "cmds.file(new=True);cmds.deleteUI('%s')" % reminder - ignore_command = "cmds.deleteUI('%s')" % reminder - cmds.button(label='Cancel', command=cancel_command) - cmds.button(label="Ignore", command=ignore_command) - cmds.showWindow(reminder) + # add lockfile dialog + from Qt import QtWidgets + from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog + + top_level_widgets = {w.objectName(): w for w in + QtWidgets.QApplication.topLevelWidgets()} + parent = top_level_widgets.get("MayaWindow", None) + workfile_dialog = WorkfileLockDialog(filepath, parent=parent) + workfile_dialog.show() def on_before_close(): diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py index 7c8c4a8066..fbec44247a 100644 --- a/openpype/pipeline/workfile/lock_workfile.py +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -26,6 +26,11 @@ def is_workfile_locked(filepath): return True +def get_workfile_lock_data(filepath): + lock_filepath = _get_lock_file(filepath) + return _read_lock_file(lock_filepath) + + def is_workfile_locked_for_current_process(filepath): if not is_workfile_locked(filepath): return False @@ -49,15 +54,6 @@ def create_workfile_lock(filepath): json.dump(info, stream) -def get_user_from_lock(filepath): - lock_filepath = _get_lock_file(filepath) - if not os.path.exists(lock_filepath): - return - data = _read_lock_file(lock_filepath) - username = data["username"] - return username - - def remove_workfile_lock(filepath): if is_workfile_locked_for_current_process(filepath): delete_workfile_lock(filepath) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 5eab3af144..c1c647478d 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -10,7 +10,6 @@ from openpype.host import IWorkfileHost from openpype.client import get_asset_by_id from openpype.pipeline.workfile.lock_workfile import ( is_workfile_locked, - get_user_from_lock, is_workfile_lock_enabled, is_workfile_locked_for_current_process ) @@ -20,6 +19,7 @@ from openpype.lib import ( emit_event, create_workdir_extra_folders, ) +from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog from openpype.pipeline import ( registered_host, legacy_io, @@ -30,6 +30,7 @@ from openpype.pipeline.context_tools import ( change_current_context ) from openpype.pipeline.workfile import get_workfile_template_key +from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog from .model import ( WorkAreaFilesModel, @@ -468,15 +469,8 @@ class FilesWidget(QtWidgets.QWidget): def open_file(self, filepath): host = self.host if self._is_workfile_locked(filepath): - username = get_user_from_lock(filepath) - popup_dialog = QtWidgets.QMessageBox(parent=self) - popup_dialog.setWindowTitle("Warning") - popup_dialog.setText(username + " is using the file") - popup_dialog.setStandardButtons(popup_dialog.Ok) - - result = popup_dialog.exec_() - if result == popup_dialog.Ok: - return False + # add lockfile dialog + WorkfileLockDialog(filepath, parent=self) if isinstance(host, IWorkfileHost): has_unsaved_changes = host.workfile_has_unsaved_changes() diff --git a/openpype/tools/workfiles/lock_dialog.py b/openpype/tools/workfiles/lock_dialog.py new file mode 100644 index 0000000000..6c0ad6e850 --- /dev/null +++ b/openpype/tools/workfiles/lock_dialog.py @@ -0,0 +1,55 @@ +from Qt import QtWidgets, QtCore, QtGui +from openpype.style import load_stylesheet, get_app_icon_path + +from openpype.pipeline.workfile.lock_workfile import get_workfile_lock_data + + +class WorkfileLockDialog(QtWidgets.QDialog): + def __init__(self, workfile_path, parent=None): + super(WorkfileLockDialog, self).__init__(parent) + self.setWindowTitle("Warning") + icon = QtGui.QIcon(get_app_icon_path()) + self.setWindowIcon(icon) + + data = get_workfile_lock_data(workfile_path) + + message = "{} on {} machine is working on the same workfile.".format( + data["username"], + data["hostname"] + ) + + msg_label = QtWidgets.QLabel(message, self) + + btns_widget = QtWidgets.QWidget(self) + + cancel_btn = QtWidgets.QPushButton("Cancel", btns_widget) + ignore_btn = QtWidgets.QPushButton("Ignore lock", btns_widget) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.setSpacing(10) + btns_layout.addStretch(1) + btns_layout.addWidget(cancel_btn, 0) + btns_layout.addWidget(ignore_btn, 0) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(15, 15, 15, 15) + main_layout.addWidget(msg_label, 1, QtCore.Qt.AlignCenter), + main_layout.addSpacing(10) + main_layout.addWidget(btns_widget, 0) + + cancel_btn.clicked.connect(self._on_cancel_click) + ignore_btn.clicked.connect(self._on_ignore_click) + + def showEvent(self, event): + super(WorkfileLockDialog, self).showEvent(event) + + self.setStyleSheet(load_stylesheet()) + + def _on_ignore_click(self): + # Result is '1' + self.accept() + + def _on_cancel_click(self): + # Result is '0' + self.reject() From 3c949aaec3a9cffb6830a100e5b9dcec3f18b1aa Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 13 Sep 2022 18:06:35 +0800 Subject: [PATCH 1076/2550] adding a Qt lockfile dialog for lockfile tasks --- openpype/hosts/maya/api/pipeline.py | 1 - openpype/tools/workfiles/files_widget.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 5c7a7abf4d..87f34e1c05 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -33,7 +33,6 @@ from openpype.pipeline import ( from openpype.pipeline.load import any_outdated_containers from openpype.pipeline.workfile.lock_workfile import ( create_workfile_lock, - get_user_from_lock, remove_workfile_lock, is_workfile_locked, is_workfile_lock_enabled diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index c1c647478d..b59a7eccc5 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -30,7 +30,7 @@ from openpype.pipeline.context_tools import ( change_current_context ) from openpype.pipeline.workfile import get_workfile_template_key -from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog + from .model import ( WorkAreaFilesModel, From 085ec8989092af6b0a478ab2d414975285476e19 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:17:14 +0200 Subject: [PATCH 1077/2550] renamed 'new_template_loader' to 'workfile_template_builder' --- .../{new_template_loader.py => workfile_template_builder.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/pipeline/workfile/{new_template_loader.py => workfile_template_builder.py} (100%) diff --git a/openpype/pipeline/workfile/new_template_loader.py b/openpype/pipeline/workfile/workfile_template_builder.py similarity index 100% rename from openpype/pipeline/workfile/new_template_loader.py rename to openpype/pipeline/workfile/workfile_template_builder.py From 518c3f75cabdce037c42f5132b50e623bc9c88de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:18:09 +0200 Subject: [PATCH 1078/2550] changed AbstractTemplateLoader to AbstractTemplateBuilder --- .../hosts/maya/api/workfile_template_builder.py | 16 ++++++++-------- .../hosts/nuke/api/workfile_template_builder.py | 16 ++++++++-------- .../workfile/workfile_template_builder.py | 10 ++++++++-- 3 files changed, 24 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 42736badf2..b947a51aaa 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -6,8 +6,8 @@ from openpype.pipeline import registered_host from openpype.pipeline.workfile.build_template_exceptions import ( TemplateAlreadyImported ) -from openpype.pipeline.workfile.new_template_loader import ( - AbstractTemplateLoader, +from openpype.pipeline.workfile.workfile_template_builder import ( + AbstractTemplateBuilder, PlaceholderPlugin, PlaceholderItem, PlaceholderLoadMixin, @@ -21,8 +21,8 @@ from .lib import read, imprint PLACEHOLDER_SET = "PLACEHOLDERS_SET" -class MayaTemplateLoader(AbstractTemplateLoader): - """Concrete implementation of AbstractTemplateLoader for maya""" +class MayaTemplateBuilder(AbstractTemplateBuilder): + """Concrete implementation of AbstractTemplateBuilder for maya""" def import_template(self, path): """Import template into current scene. @@ -313,25 +313,25 @@ class LoadPlaceholderItem(PlaceholderItem): def build_workfile_template(*args): - builder = MayaTemplateLoader(registered_host()) + builder = MayaTemplateBuilder(registered_host()) builder.build_template() def update_workfile_template(*args): - builder = MayaTemplateLoader(registered_host()) + builder = MayaTemplateBuilder(registered_host()) builder.rebuild_template() def create_placeholder(*args): host = registered_host() - builder = MayaTemplateLoader(host) + builder = MayaTemplateBuilder(host) window = WorkfileBuildPlaceholderDialog(host, builder) window.exec_() def update_placeholder(*args): host = registered_host() - builder = MayaTemplateLoader(host) + builder = MayaTemplateBuilder(host) placeholder_items_by_id = { placeholder_item.scene_identifier: placeholder_item for placeholder_item in builder.get_placeholders() diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index d018b9b598..f4dfac1e32 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -3,8 +3,8 @@ import collections import nuke from openpype.pipeline import registered_host -from openpype.pipeline.workfile.new_template_loader import ( - AbstractTemplateLoader, +from openpype.pipeline.workfile.workfile_template_builder import ( + AbstractTemplateBuilder, PlaceholderPlugin, PlaceholderItem, PlaceholderLoadMixin, @@ -31,8 +31,8 @@ from .lib import ( PLACEHOLDER_SET = "PLACEHOLDERS_SET" -class NukeTemplateLoader(AbstractTemplateLoader): - """Concrete implementation of AbstractTemplateLoader for maya""" +class NukeTemplateBuilder(AbstractTemplateBuilder): + """Concrete implementation of AbstractTemplateBuilder for maya""" def import_template(self, path): """Import template into current scene. @@ -561,25 +561,25 @@ class NukeLoadPlaceholderItem(PlaceholderItem): def build_workfile_template(*args): - builder = NukeTemplateLoader(registered_host()) + builder = NukeTemplateBuilder(registered_host()) builder.build_template() def update_workfile_template(*args): - builder = NukeTemplateLoader(registered_host()) + builder = NukeTemplateBuilder(registered_host()) builder.rebuild_template() def create_placeholder(*args): host = registered_host() - builder = NukeTemplateLoader(host) + builder = NukeTemplateBuilder(host) window = WorkfileBuildPlaceholderDialog(host, builder) window.exec_() def update_placeholder(*args): host = registered_host() - builder = NukeTemplateLoader(host) + builder = NukeTemplateBuilder(host) placeholder_items_by_id = { placeholder_item.scene_identifier: placeholder_item for placeholder_item in builder.get_placeholders() diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 3f81ce0114..4c6f3939e5 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -36,8 +36,14 @@ from .build_template_exceptions import ( @six.add_metaclass(ABCMeta) -class AbstractTemplateLoader: - """Abstraction of Template Loader. +class AbstractTemplateBuilder(object): + """Abstraction of Template Builder. + + Builder cares about context, shared data, cache, discovery of plugins + and trigger logic. Provides public api for host workfile build systen. + + Rest of logic is based on plugins that care about collection and creation + of placeholder items. Args: host (Union[HostBase, ModuleType]): Implementation of host. From 30780efd487ee22a4214bdaf6f09ebb48e66e004 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:18:38 +0200 Subject: [PATCH 1079/2550] renamed method 'update_template_placeholder' to 'repopulate_placeholder' --- openpype/hosts/maya/api/workfile_template_builder.py | 2 +- openpype/hosts/nuke/api/workfile_template_builder.py | 2 +- openpype/pipeline/workfile/workfile_template_builder.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index b947a51aaa..5fd2113bdb 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -209,7 +209,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): def populate_placeholder(self, placeholder): self.populate_load_placeholder(placeholder) - def update_template_placeholder(self, placeholder): + def repopulate_placeholder(self, placeholder): repre_ids = self._get_loaded_repre_ids() self.populate_load_placeholder(placeholder, repre_ids) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index f4dfac1e32..ba0d975496 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -185,7 +185,7 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): def populate_placeholder(self, placeholder): self.populate_load_placeholder(placeholder) - def update_template_placeholder(self, placeholder): + def repopulate_placeholder(self, placeholder): repre_ids = self._get_loaded_repre_ids() self.populate_load_placeholder(placeholder, repre_ids) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 4c6f3939e5..494eebda8a 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -377,7 +377,7 @@ class AbstractTemplateBuilder(object): for placeholder in placeholders: plugin = placeholder.plugin - plugin.update_template_placeholder(placeholder) + plugin.repopulate_placeholder(placeholder) self.clear_shared_populate_data() @@ -725,7 +725,7 @@ class PlaceholderPlugin(object): pass - def update_template_placeholder(self, placeholder): + def repopulate_placeholder(self, placeholder): """Update scene with current context for passed placeholder. Can be used to re-run placeholder logic (if it make sense). From b14ab9f2aff93fce85cec7b6403183a0b7dcf511 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:31:44 +0200 Subject: [PATCH 1080/2550] added publisher to host tools --- openpype/tools/utils/host_tools.py | 37 ++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 52d15a59f7..3177ed35aa 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -32,6 +32,7 @@ class HostToolsHelper: self._workfiles_tool = None self._loader_tool = None self._creator_tool = None + self._publisher_tool = None self._subset_manager_tool = None self._scene_inventory_tool = None self._library_loader_tool = None @@ -205,6 +206,7 @@ class HostToolsHelper: pyblish_show = self._discover_pyblish_gui() return pyblish_show(parent) + def _discover_pyblish_gui(self): """Return the most desirable of the currently registered GUIs""" # Prefer last registered @@ -269,6 +271,30 @@ class HostToolsHelper: dialog.activateWindow() dialog.showNormal() + def get_publisher_tool(self, parent): + """Create, cache and return scene inventory tool window.""" + if self._scene_inventory_tool is None: + from openpype.tools.publisher import PublisherWindow + + host = registered_host() + ILoadHost.validate_load_methods(host) + + publisher_window = PublisherWindow( + parent=parent or self._parent + ) + self._publisher_tool = publisher_window + + return self._publisher_tool + + def show_publisher_tool(self, parent=None): + with qt_app_context(): + dialog = self.get_publisher_tool(parent) + + dialog.show() + dialog.raise_() + dialog.activateWindow() + dialog.showNormal() + def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -298,6 +324,10 @@ class HostToolsHelper: elif tool_name == "publish": self.log.info("Can't return publish tool window.") + # "new" publisher + elif tool_name == "publisher": + return self.get_publisher_tool(parent, *args, **kwargs) + elif tool_name == "experimental_tools": return self.get_experimental_tools_dialog(parent, *args, **kwargs) @@ -335,6 +365,9 @@ class HostToolsHelper: elif tool_name == "publish": self.show_publish(parent, *args, **kwargs) + elif tool_name == "publisher": + self.show_publisher_tool(parent, *args, **kwargs) + elif tool_name == "experimental_tools": self.show_experimental_tools_dialog(parent, *args, **kwargs) @@ -414,6 +447,10 @@ def show_publish(parent=None): _SingletonPoint.show_tool_by_name("publish", parent) +def show_publisher(parent=None): + _SingletonPoint.show_tool_by_name("publisher", parent) + + def show_experimental_tools_dialog(parent=None): _SingletonPoint.show_tool_by_name("experimental_tools", parent) From 7ad8aa34db533c97de270d520874a307caa93fe4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:38:36 +0200 Subject: [PATCH 1081/2550] fix variable usage --- openpype/tools/utils/host_tools.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 3177ed35aa..f7e6d330ed 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -273,7 +273,8 @@ class HostToolsHelper: def get_publisher_tool(self, parent): """Create, cache and return scene inventory tool window.""" - if self._scene_inventory_tool is None: + + if self._publisher_tool is None: from openpype.tools.publisher import PublisherWindow host = registered_host() From a440a92838772c967f1bb844534153fe9814f4fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 13 Sep 2022 13:34:37 +0200 Subject: [PATCH 1082/2550] Fix docstring Co-authored-by: Roy Nieterau --- openpype/tools/utils/host_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index f7e6d330ed..7208e0a500 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -272,7 +272,7 @@ class HostToolsHelper: dialog.showNormal() def get_publisher_tool(self, parent): - """Create, cache and return scene inventory tool window.""" + """Create, cache and return publisher window.""" if self._publisher_tool is None: from openpype.tools.publisher import PublisherWindow From 9e5e5d59210a82d6c171f3871834955d326b2a0b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 13:35:38 +0200 Subject: [PATCH 1083/2550] remove unnecessary lines --- openpype/tools/utils/host_tools.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 7208e0a500..d2f05d3302 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -194,7 +194,6 @@ class HostToolsHelper: library_loader_tool.showNormal() library_loader_tool.refresh() - def show_publish(self, parent=None): """Try showing the most desirable publish GUI @@ -206,7 +205,6 @@ class HostToolsHelper: pyblish_show = self._discover_pyblish_gui() return pyblish_show(parent) - def _discover_pyblish_gui(self): """Return the most desirable of the currently registered GUIs""" # Prefer last registered From ecee2d2be5d33c4014effa836f620114cfc1bf9a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:02:11 +0200 Subject: [PATCH 1084/2550] implemented 'check_ftrack_url' in ftrack module --- openpype/modules/ftrack/__init__.py | 8 ++- openpype/modules/ftrack/ftrack_module.py | 69 ++++++++++++++++++++---- 2 files changed, 64 insertions(+), 13 deletions(-) diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index 7261254c6f..6dc67b74b9 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -1,9 +1,13 @@ from .ftrack_module import ( FtrackModule, - FTRACK_MODULE_DIR + FTRACK_MODULE_DIR, + + check_ftrack_url, ) __all__ = ( "FtrackModule", - "FTRACK_MODULE_DIR" + "FTRACK_MODULE_DIR", + + "check_ftrack_url", ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index cb4f204523..e00f9d89c6 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -12,8 +12,10 @@ from openpype_interfaces import ( ISettingsChangeListener ) from openpype.settings import SaveWarningExc +from openpype.lib import Logger FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) +_PLACEHOLDER = object() class FtrackModule( @@ -28,17 +30,8 @@ class FtrackModule( ftrack_settings = settings[self.name] self.enabled = ftrack_settings["enabled"] - # Add http schema - ftrack_url = ftrack_settings["ftrack_server"].strip("/ ") - if ftrack_url: - if "http" not in ftrack_url: - ftrack_url = "https://" + ftrack_url - - # Check if "ftrack.app" is part os url - if "ftrackapp.com" not in ftrack_url: - ftrack_url = ftrack_url + ".ftrackapp.com" - - self.ftrack_url = ftrack_url + self._settings_ftrack_url = ftrack_settings["ftrack_server"] + self._ftrack_url = _PLACEHOLDER current_dir = os.path.dirname(os.path.abspath(__file__)) low_platform = platform.system().lower() @@ -70,6 +63,16 @@ class FtrackModule( self.timers_manager_connector = None self._timers_manager_module = None + def get_ftrack_url(self): + if self._ftrack_url is _PLACEHOLDER: + self._ftrack_url = check_ftrack_url( + self._settings_ftrack_url, + logger=self.log + ) + return self._ftrack_url + + ftrack_url = property(get_ftrack_url) + def get_global_environments(self): """Ftrack's global environments.""" return { @@ -479,6 +482,50 @@ class FtrackModule( click_group.add_command(cli_main) +def _check_ftrack_url(url): + import requests + + try: + result = requests.get(url, allow_redirects=False) + except requests.exceptions.RequestException: + return False + + if (result.status_code != 200 or "FTRACK_VERSION" not in result.headers): + return False + return True + + +def check_ftrack_url(url, log_errors=True, logger=None): + """Checks if Ftrack server is responding""" + + if logger is None: + logger = Logger.get_logger(__name__) + + url = url.strip("/ ") + if not url: + logger.error("Ftrack URL is not set!") + return None + + if not url.startswith("http"): + url = "https://" + url + + ftrack_url = None + if not url.endswith("ftrackapp.com"): + ftrackapp_url = url + ".ftrackapp.com" + if _check_ftrack_url(ftrackapp_url): + ftrack_url = ftrackapp_url + + if not ftrack_url and _check_ftrack_url(url): + ftrack_url = url + + if ftrack_url: + logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) + elif log_errors: + logger.error("Entered Ftrack URL \"{}\" is not accesible!".format(url)) + + return ftrack_url + + @click.group(FtrackModule.name, help="Ftrack module related commands.") def cli_main(): pass From b29f26b28cb9350c0460b8bd8b89a8bfcbf0c7cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:02:42 +0200 Subject: [PATCH 1085/2550] changed imports in ftrack tray --- openpype/modules/ftrack/tray/ftrack_tray.py | 40 +++++++++------------ 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index 501d837a4c..a6a87b8ef9 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -6,22 +6,18 @@ import threading from Qt import QtCore, QtWidgets, QtGui import ftrack_api -from ..ftrack_server.lib import check_ftrack_url -from ..ftrack_server import socket_thread -from ..lib import credentials -from ..ftrack_module import FTRACK_MODULE_DIR -from . import login_dialog - from openpype import resources from openpype.lib import Logger - - -log = Logger.get_logger("FtrackModule") +from openpype_modules.ftrack import check_ftrack_url, FTRACK_MODULE_DIR +from openpype_modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.lib import credentials +from . import login_dialog class FtrackTrayWrapper: def __init__(self, module): self.module = module + self.log = Logger.get_logger(self.__class__.__name__) self.thread_action_server = None self.thread_socket_server = None @@ -62,19 +58,19 @@ class FtrackTrayWrapper: if validation: self.widget_login.set_credentials(ft_user, ft_api_key) self.module.set_credentials_to_env(ft_user, ft_api_key) - log.info("Connected to Ftrack successfully") + self.log.info("Connected to Ftrack successfully") self.on_login_change() return validation if not validation and ft_user and ft_api_key: - log.warning( + self.log.warning( "Current Ftrack credentials are not valid. {}: {} - {}".format( str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key ) ) - log.info("Please sign in to Ftrack") + self.log.info("Please sign in to Ftrack") self.bool_logged = False self.show_login_widget() self.set_menu_visibility() @@ -104,7 +100,7 @@ class FtrackTrayWrapper: self.action_credentials.setIcon(self.icon_not_logged) self.action_credentials.setToolTip("Logged out") - log.info("Logged out of Ftrack") + self.log.info("Logged out of Ftrack") self.bool_logged = False self.set_menu_visibility() @@ -126,10 +122,6 @@ class FtrackTrayWrapper: ftrack_url = self.module.ftrack_url os.environ["FTRACK_SERVER"] = ftrack_url - parent_file_path = os.path.dirname( - os.path.dirname(os.path.realpath(__file__)) - ) - min_fail_seconds = 5 max_fail_count = 3 wait_time_after_max_fail = 10 @@ -154,7 +146,7 @@ class FtrackTrayWrapper: # Main loop while True: if not self.bool_action_server_running: - log.debug("Action server was pushed to stop.") + self.log.debug("Action server was pushed to stop.") break # Check if accessible Ftrack and Mongo url @@ -164,7 +156,9 @@ class FtrackTrayWrapper: # Run threads only if Ftrack is accessible if not ftrack_accessible: if not printed_ftrack_error: - log.warning("Can't access Ftrack {}".format(ftrack_url)) + self.log.warning( + "Can't access Ftrack {}".format(ftrack_url) + ) if self.thread_socket_server is not None: self.thread_socket_server.stop() @@ -191,7 +185,7 @@ class FtrackTrayWrapper: self.set_menu_visibility() elif failed_count == max_fail_count: - log.warning(( + self.log.warning(( "Action server failed {} times." " I'll try to run again {}s later" ).format( @@ -243,10 +237,10 @@ class FtrackTrayWrapper: self.thread_action_server.join() self.thread_action_server = None - log.info("Ftrack action server was forced to stop") + self.log.info("Ftrack action server was forced to stop") except Exception: - log.warning( + self.log.warning( "Error has happened during Killing action server", exc_info=True ) @@ -343,7 +337,7 @@ class FtrackTrayWrapper: self.thread_timer = None except Exception as e: - log.error("During Killing Timer event server: {0}".format(e)) + self.log.error("During Killing Timer event server: {0}".format(e)) def changed_user(self): self.stop_action_server() From 21e050a8f18a272da3e200405550be6570e9f3d9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:05:25 +0200 Subject: [PATCH 1086/2550] use new import of 'check_ftrack_url' --- openpype/modules/ftrack/ftrack_module.py | 2 +- .../modules/ftrack/ftrack_server/__init__.py | 2 -- .../ftrack/ftrack_server/event_server_cli.py | 6 ++-- openpype/modules/ftrack/ftrack_server/lib.py | 35 +------------------ openpype/modules/ftrack/lib/avalon_sync.py | 7 ++-- 5 files changed, 8 insertions(+), 44 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index e00f9d89c6..899711e33e 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -6,7 +6,7 @@ import platform import click from openpype.modules import OpenPypeModule -from openpype_interfaces import ( +from openpype.modules.interfaces import ( ITrayModule, IPluginPaths, ISettingsChangeListener diff --git a/openpype/modules/ftrack/ftrack_server/__init__.py b/openpype/modules/ftrack/ftrack_server/__init__.py index 9e3920b500..8e5f7c4c51 100644 --- a/openpype/modules/ftrack/ftrack_server/__init__.py +++ b/openpype/modules/ftrack/ftrack_server/__init__.py @@ -1,8 +1,6 @@ from .ftrack_server import FtrackServer -from .lib import check_ftrack_url __all__ = ( "FtrackServer", - "check_ftrack_url" ) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 3ef7c8270a..2848469bc3 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -20,9 +20,11 @@ from openpype.lib import ( get_openpype_version, get_build_version, ) -from openpype_modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack import ( + FTRACK_MODULE_DIR, + check_ftrack_url, +) from openpype_modules.ftrack.lib import credentials -from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url from openpype_modules.ftrack.ftrack_server import socket_thread diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 947dacf917..c8143f739c 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -26,45 +26,12 @@ except ImportError: from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.client import OpenPypeMongoConnection -from openpype.api import Logger +from openpype.lib import Logger TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" -def check_ftrack_url(url, log_errors=True, logger=None): - """Checks if Ftrack server is responding""" - if logger is None: - logger = Logger.get_logger(__name__) - - if not url: - logger.error("Ftrack URL is not set!") - return None - - url = url.strip('/ ') - - if 'http' not in url: - if url.endswith('ftrackapp.com'): - url = 'https://' + url - else: - url = 'https://{0}.ftrackapp.com'.format(url) - try: - result = requests.get(url, allow_redirects=False) - except requests.exceptions.RequestException: - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - logger.debug("Ftrack server {} is accessible.".format(url)) - - return url - - class SocketBaseEventHub(ftrack_api.event.hub.EventHub): hearbeat_msg = b"hearbeat" diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 72be6a8e9a..935d1e85c9 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -19,11 +19,8 @@ from openpype.client.operations import ( CURRENT_PROJECT_SCHEMA, CURRENT_PROJECT_CONFIG_SCHEMA, ) -from openpype.api import ( - Logger, - get_anatomy_settings -) -from openpype.lib import ApplicationManager +from openpype.settings import get_anatomy_settings +from openpype.lib import ApplicationManager, Logger from openpype.pipeline import AvalonMongoDB, schema from .constants import CUST_ATTR_ID_KEY, FPS_KEYS From d0d80b0b90648b1633a11c5980c0c66e3a3cff7f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 14:57:05 +0200 Subject: [PATCH 1087/2550] Fix typo `camera_option` -> `camera_options` - Also use `setdefault` to ensure its added into the preset when key wasn't there originally --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 871adda0c3..6010319f40 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -79,8 +79,10 @@ class ExtractPlayblast(openpype.api.Extractor): preset['height'] = asset_height preset['start_frame'] = start preset['end_frame'] = end - camera_option = preset.get("camera_option", {}) - camera_option["depthOfField"] = cmds.getAttr( + + # Enforce persisting camera depth of field + camera_options = preset.setdefault("camera_options", {}) + camera_options["depthOfField"] = cmds.getAttr( "{0}.depthOfField".format(camera)) stagingdir = self.staging_dir(instance) From 98f1312ce999e4be72a1a90bce99c75be967cdfd Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 13 Sep 2022 15:06:31 +0200 Subject: [PATCH 1088/2550] Modify log message Co-authored-by: Roy Nieterau --- openpype/modules/ftrack/ftrack_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 899711e33e..2ab0eb8239 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -521,7 +521,7 @@ def check_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) elif log_errors: - logger.error("Entered Ftrack URL \"{}\" is not accesible!".format(url)) + logger.error("Entered Ftrack URL \"{}\" is not accessible!".format(url)) return ftrack_url From aead601397e0ebecfafb6da62570c0585f627018 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:11:20 +0200 Subject: [PATCH 1089/2550] unify messages --- openpype/modules/ftrack/ftrack_module.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 2ab0eb8239..e79910372f 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -520,8 +520,9 @@ def check_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) + elif log_errors: - logger.error("Entered Ftrack URL \"{}\" is not accessible!".format(url)) + logger.error("Ftrack server \"{}\" is not accessible!".format(url)) return ftrack_url From 0291d2a7054b6b551fc8e5dc1092a87a026838d8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:15:18 +0200 Subject: [PATCH 1090/2550] renamed 'check_ftrack_url' to 'resolve_ftrack_url' --- openpype/modules/ftrack/__init__.py | 4 ++-- openpype/modules/ftrack/ftrack_module.py | 6 +++--- openpype/modules/ftrack/ftrack_server/event_server_cli.py | 8 ++++---- openpype/modules/ftrack/tray/ftrack_tray.py | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index 6dc67b74b9..e520f08337 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -2,12 +2,12 @@ from .ftrack_module import ( FtrackModule, FTRACK_MODULE_DIR, - check_ftrack_url, + resolve_ftrack_url, ) __all__ = ( "FtrackModule", "FTRACK_MODULE_DIR", - "check_ftrack_url", + "resolve_ftrack_url", ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index e79910372f..05ea7b79d1 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -65,7 +65,7 @@ class FtrackModule( def get_ftrack_url(self): if self._ftrack_url is _PLACEHOLDER: - self._ftrack_url = check_ftrack_url( + self._ftrack_url = resolve_ftrack_url( self._settings_ftrack_url, logger=self.log ) @@ -495,8 +495,8 @@ def _check_ftrack_url(url): return True -def check_ftrack_url(url, log_errors=True, logger=None): - """Checks if Ftrack server is responding""" +def resolve_ftrack_url(url, log_errors=True, logger=None): + """Checks if Ftrack server is responding.""" if logger is None: logger = Logger.get_logger(__name__) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 2848469bc3..20c5ab24a8 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -22,7 +22,7 @@ from openpype.lib import ( ) from openpype_modules.ftrack import ( FTRACK_MODULE_DIR, - check_ftrack_url, + resolve_ftrack_url, ) from openpype_modules.ftrack.lib import credentials from openpype_modules.ftrack.ftrack_server import socket_thread @@ -116,7 +116,7 @@ def legacy_server(ftrack_url): while True: if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible and not printed_ftrack_error: @@ -259,7 +259,7 @@ def main_loop(ftrack_url): while True: # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) if not mongo_accessible: mongo_accessible = check_mongo_url(mongo_uri) @@ -443,7 +443,7 @@ def run_event_server( os.environ["CLOCKIFY_API_KEY"] = clockify_api_key # Check url regex and accessibility - ftrack_url = check_ftrack_url(ftrack_url) + ftrack_url = resolve_ftrack_url(ftrack_url) if not ftrack_url: print('Exiting! < Please enter Ftrack server url >') return 1 diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index a6a87b8ef9..e3c6e30ead 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -8,7 +8,7 @@ from Qt import QtCore, QtWidgets, QtGui import ftrack_api from openpype import resources from openpype.lib import Logger -from openpype_modules.ftrack import check_ftrack_url, FTRACK_MODULE_DIR +from openpype_modules.ftrack import resolve_ftrack_url, FTRACK_MODULE_DIR from openpype_modules.ftrack.ftrack_server import socket_thread from openpype_modules.ftrack.lib import credentials from . import login_dialog @@ -151,7 +151,7 @@ class FtrackTrayWrapper: # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible: From 09519c25a804186f9cc4afb92131d0572211f712 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:17:25 +0200 Subject: [PATCH 1091/2550] removed unused argument 'log_errors' --- openpype/modules/ftrack/ftrack_module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 05ea7b79d1..68575009b2 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -495,7 +495,7 @@ def _check_ftrack_url(url): return True -def resolve_ftrack_url(url, log_errors=True, logger=None): +def resolve_ftrack_url(url, logger=None): """Checks if Ftrack server is responding.""" if logger is None: @@ -521,7 +521,7 @@ def resolve_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) - elif log_errors: + else: logger.error("Ftrack server \"{}\" is not accessible!".format(url)) return ftrack_url From 477266f1407e84e2ba9d086107e15e8fc5173e79 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:18:56 +0200 Subject: [PATCH 1092/2550] better variable name for ftrack url value check --- openpype/modules/ftrack/ftrack_module.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 68575009b2..75ffd7f864 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -15,7 +15,7 @@ from openpype.settings import SaveWarningExc from openpype.lib import Logger FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -_PLACEHOLDER = object() +_URL_NOT_SET = object() class FtrackModule( @@ -31,7 +31,7 @@ class FtrackModule( self.enabled = ftrack_settings["enabled"] self._settings_ftrack_url = ftrack_settings["ftrack_server"] - self._ftrack_url = _PLACEHOLDER + self._ftrack_url = _URL_NOT_SET current_dir = os.path.dirname(os.path.abspath(__file__)) low_platform = platform.system().lower() @@ -64,7 +64,7 @@ class FtrackModule( self._timers_manager_module = None def get_ftrack_url(self): - if self._ftrack_url is _PLACEHOLDER: + if self._ftrack_url is _URL_NOT_SET: self._ftrack_url = resolve_ftrack_url( self._settings_ftrack_url, logger=self.log From 6c2c161ed4cb570085b8ff6aada053bc4e1daf11 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:30:31 +0200 Subject: [PATCH 1093/2550] moved exceptions to workfile_template_builder --- .../maya/api/workfile_template_builder.py | 4 +-- .../workfile/workfile_template_builder.py | 26 +++++++++++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 5fd2113bdb..71e3e0ce4e 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -3,10 +3,8 @@ import json from maya import cmds from openpype.pipeline import registered_host -from openpype.pipeline.workfile.build_template_exceptions import ( - TemplateAlreadyImported -) from openpype.pipeline.workfile.workfile_template_builder import ( + TemplateAlreadyImported, AbstractTemplateBuilder, PlaceholderPlugin, PlaceholderItem, diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 494eebda8a..a381b96c8f 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -28,11 +28,27 @@ from openpype.pipeline.load import ( ) from openpype.pipeline.create import get_legacy_creator_by_name -from .build_template_exceptions import ( - TemplateProfileNotFound, - TemplateLoadingFailed, - TemplateNotFound, -) + +class TemplateNotFound(Exception): + """Exception raised when template does not exist.""" + pass + + +class TemplateProfileNotFound(Exception): + """Exception raised when current profile + doesn't match any template profile""" + pass + + +class TemplateAlreadyImported(Exception): + """Error raised when Template was already imported by host for + this session""" + pass + + +class TemplateLoadFailed(Exception): + """Error raised whend Template loader was unable to load the template""" + pass @six.add_metaclass(ABCMeta) From 778e0b2e491f5948f2932968a70f8f620204fb01 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:44:20 +0200 Subject: [PATCH 1094/2550] Perform case-insensitive lookup --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 7e5815b100..5d39e12985 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -74,11 +74,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): version_number = int(instance_version) family = instance.data["family"] - family_low = family.lower() + # Perform case-insensitive family mapping + family_low = family.lower() asset_type = instance.data.get("ftrackFamily") - if not asset_type and family_low in self.family_mapping: - asset_type = self.family_mapping[family_low] + if not asset_type: + for map_family, map_value in self.family_mapping.items(): + if map_family.lower() == family_low: + asset_type = map_value if not asset_type: asset_type = "upload" From 4466d8a94249ad66546730b7135e34003f4aa4f8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:45:11 +0200 Subject: [PATCH 1095/2550] Remove redundant logic since just above it's forced to be "upload" when `not asset_type` --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 5d39e12985..a35dbf71d4 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -89,15 +89,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): self.log.debug( "Family: {}\nMapping: {}".format(family_low, self.family_mapping) ) - - # Ignore this instance if neither "ftrackFamily" or a family mapping is - # found. - if not asset_type: - self.log.info(( - "Family \"{}\" does not match any asset type mapping" - ).format(family)) - return - status_name = self._get_asset_version_status_name(instance) # Base of component item data From 1bc37ace465f647b6af35a4a2b8cf2832bd94925 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:45:48 +0200 Subject: [PATCH 1096/2550] Actually break loop early on detected mapping --- .../modules/ftrack/plugins/publish/integrate_ftrack_instances.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index a35dbf71d4..5ff75e7060 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -82,6 +82,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): for map_family, map_value in self.family_mapping.items(): if map_family.lower() == family_low: asset_type = map_value + break if not asset_type: asset_type = "upload" From b2b1613016f54d8293296c2f6cca5a87a9b62565 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 16:19:52 +0200 Subject: [PATCH 1097/2550] fix raised exception --- openpype/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index a381b96c8f..2358a047f1 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -560,7 +560,7 @@ class AbstractTemplateBuilder(object): path = profile["path"] if not path: - raise TemplateLoadingFailed(( + raise TemplateLoadFailed(( "Template path is not set.\n" "Path need to be set in {}\\Template Workfile Build " "Settings\\Profiles" From fe2a769a7e3b5b8e6fc0744054c711a0e019ec72 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 16:20:27 +0200 Subject: [PATCH 1098/2550] added quick access to settings --- .../workfile/workfile_template_builder.py | 26 ++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 2358a047f1..28c06aeeac 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -11,7 +11,10 @@ from openpype.client import ( get_linked_assets, get_representations, ) -from openpype.settings import get_project_settings +from openpype.settings import ( + get_project_settings, + get_system_settings, +) from openpype.host import HostBase from openpype.lib import ( Logger, @@ -86,6 +89,9 @@ class AbstractTemplateBuilder(object): self._loaders_by_name = None self._creators_by_name = None + self._system_settings = None + self._project_settings = None + self._current_asset_doc = None self._linked_asset_docs = None self._task_type = None @@ -102,6 +108,18 @@ class AbstractTemplateBuilder(object): def current_task_name(self): return legacy_io.Session["AVALON_TASK"] + @property + def system_settings(self): + if self._system_settings is None: + self._system_settings = get_system_settings() + return self._system_settings + + @property + def project_settings(self): + if self._project_settings is None: + self._project_settings = get_project_settings(self.project_name) + return self._project_settings + @property def current_asset_doc(self): if self._current_asset_doc is None: @@ -184,6 +202,9 @@ class AbstractTemplateBuilder(object): self._linked_asset_docs = None self._task_type = None + self._system_settings = None + self._project_settings = None + self.clear_shared_data() self.clear_shared_populate_data() @@ -529,9 +550,8 @@ class AbstractTemplateBuilder(object): self.refresh() def _get_build_profiles(self): - project_settings = get_project_settings(self.project_name) return ( - project_settings + self.project_settings [self.host_name] ["templated_workfile_build"] ["profiles"] From a0ffa97e1d125642f575a26f888e6b8469ea8230 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 16:20:44 +0200 Subject: [PATCH 1099/2550] added some docstrings --- openpype/pipeline/workfile/build_workfile.py | 11 ++ .../workfile/workfile_template_builder.py | 117 ++++++++++++++++-- 2 files changed, 121 insertions(+), 7 deletions(-) diff --git a/openpype/pipeline/workfile/build_workfile.py b/openpype/pipeline/workfile/build_workfile.py index 0b8a444436..87b9df158f 100644 --- a/openpype/pipeline/workfile/build_workfile.py +++ b/openpype/pipeline/workfile/build_workfile.py @@ -1,3 +1,14 @@ +"""Workfile build based on settings. + +Workfile builder will do stuff based on project settings. Advantage is that +it need only access to settings. Disadvantage is that it is hard to focus +build per context and being explicit about loaded content. + +For more explicit workfile build is recommended 'AbstractTemplateBuilder' +from '~/openpype/pipeline/workfile/workfile_template_builder'. Which gives +more abilities to define how build happens but require more code to achive it. +""" + import os import re import collections diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 28c06aeeac..f81849fbe4 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1,3 +1,16 @@ +"""Workfile build mechanism using workfile templates. + +Build templates are manually prepared using plugin definitions which create +placeholders inside the template which are populated on import. + +This approach is very explicit to achive very specific build logic that can be +targeted by task types and names. + +Placeholders are created using placeholder plugins which should care about +logic and data of placeholder items. 'PlaceholderItem' is used to keep track +about it's progress. +""" + import os import re import collections @@ -64,6 +77,13 @@ class AbstractTemplateBuilder(object): Rest of logic is based on plugins that care about collection and creation of placeholder items. + Population of placeholders happens in loops. Each loop will collect all + available placeholders, skip already populated, and populate the rest. + + Builder item has 2 types of shared data. Refresh lifetime which are cleared + on refresh and populate lifetime which are cleared after loop of + placeholder population. + Args: host (Union[HostBase, ModuleType]): Implementation of host. """ @@ -382,6 +402,20 @@ class AbstractTemplateBuilder(object): )) def build_template(self, template_path=None, level_limit=None): + """Main callback for building workfile from template path. + + Todo: + Handle report of populated placeholders from + 'populate_scene_placeholders' to be shown to a user. + + Args: + template_path (str): Path to a template file with placeholders. + Template from settings 'get_template_path' used when not + passed. + level_limit (int): Limit of populate loops. Related to + 'populate_scene_placeholders' method. + """ + if template_path is None: template_path = self.get_template_path() self.import_template(template_path) @@ -492,6 +526,8 @@ class AbstractTemplateBuilder(object): for placeholder in placeholders } all_processed = len(placeholders) == 0 + # Counter is checked at the ned of a loop so the loop happens at least + # once. iter_counter = 0 while not all_processed: filtered_placeholders = [] @@ -550,6 +586,12 @@ class AbstractTemplateBuilder(object): self.refresh() def _get_build_profiles(self): + """Get build profiles for workfile build template path. + + Returns: + List[Dict[str, Any]]: Profiles for template path resolving. + """ + return ( self.project_settings [self.host_name] @@ -558,6 +600,22 @@ class AbstractTemplateBuilder(object): ) def get_template_path(self): + """Unified way how template path is received usign settings. + + Method is dependent on '_get_build_profiles' which should return filter + profiles to resolve path to a template. Default implementation looks + into host settings: + - 'project_settings/{host name}/templated_workfile_build/profiles' + + Returns: + str: Path to a template file with placeholders. + + Raises: + TemplateProfileNotFound: When profiles are not filled. + TemplateLoadFailed: Profile was found but path is not set. + TemplateNotFound: Path was set but file does not exists. + """ + host_name = self.host_name project_name = self.project_name task_name = self.current_task_name @@ -630,6 +688,14 @@ class AbstractTemplateBuilder(object): @six.add_metaclass(ABCMeta) class PlaceholderPlugin(object): + """Plugin which care about handling of placeholder items logic. + + Plugin create and update placeholders in scene and populate them on + template import. Populating means that based on placeholder data happens + a logic in the scene. Most common logic is to load representation using + loaders or to create instances in scene. + """ + label = None _log = None @@ -641,7 +707,7 @@ class PlaceholderPlugin(object): """Access to builder which initialized the plugin. Returns: - AbstractTemplateLoader: Loader of template build. + AbstractTemplateBuilder: Loader of template build. """ return self._builder @@ -852,8 +918,12 @@ class PlaceholderPlugin(object): class PlaceholderItem(object): """Item representing single item in scene that is a placeholder to process. + Items are always created and updated by their plugins. Each plugin can use + modified class of 'PlacehoderItem' but only to add more options instead of + new other. + Scene identifier is used to avoid processing of the palceholder item - multiple times. + multiple times so must be unique across whole workfile builder. Args: scene_identifier (str): Unique scene identifier. If placeholder is @@ -893,7 +963,7 @@ class PlaceholderItem(object): """Access to builder. Returns: - AbstractTemplateLoader: Builder which is the top part of + AbstractTemplateBuilder: Builder which is the top part of placeholder. """ @@ -936,6 +1006,8 @@ class PlaceholderItem(object): @property def order(self): + """Order of item processing.""" + order = self._data.get("order") if order is None: return self.default_order @@ -1160,7 +1232,25 @@ class PlaceholderLoadMixin(object): return {} - def get_representations(self, placeholder): + def _get_representations(self, placeholder): + """Prepared query of representations based on load options. + + This function is directly connected to options defined in + 'get_load_plugin_options'. + + Note: + This returns all representation documents from all versions of + matching subset. To filter for last version use + '_reduce_last_version_repre_docs'. + + Args: + placeholder (PlaceholderItem): Item which should be populated. + + Returns: + List[Dict[str, Any]]: Representation documents matching filters + from placeholder data. + """ + project_name = self.builder.project_name current_asset_doc = self.builder.current_asset_doc linked_asset_docs = self.builder.linked_asset_docs @@ -1263,7 +1353,7 @@ class PlaceholderLoadMixin(object): loader_name = placeholder.data["loader"] loader_args = placeholder.data["loader_args"] - placeholder_representations = self.get_representations(placeholder) + placeholder_representations = self._get_representations(placeholder) filtered_representations = [] for representation in self._reduce_last_version_repre_docs( @@ -1306,11 +1396,24 @@ class PlaceholderLoadMixin(object): ) except Exception: + failed = True placeholder.load_failed(representation) else: + failed = False placeholder.load_succeed(container) - self.cleanup_placeholder(placeholder) + self.cleanup_placeholder(placeholder, failed) + + def cleanup_placeholder(self, placeholder, failed): + """Cleanup placeholder after load of single representation. + + Can be called multiple times during placeholder item populating and is + called even if loading failed. + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ - def cleanup_placeholder(self, placeholder): pass From 60c1d1eb6c1ea42bd974cfd528dc9ce3cfda0b3f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 16:21:46 +0200 Subject: [PATCH 1100/2550] removed nuke previous template builder --- .../hosts/nuke/api/lib_template_builder.py | 220 ------ openpype/hosts/nuke/api/template_loader.py | 639 ------------------ 2 files changed, 859 deletions(-) delete mode 100644 openpype/hosts/nuke/api/lib_template_builder.py delete mode 100644 openpype/hosts/nuke/api/template_loader.py diff --git a/openpype/hosts/nuke/api/lib_template_builder.py b/openpype/hosts/nuke/api/lib_template_builder.py deleted file mode 100644 index 61baa23928..0000000000 --- a/openpype/hosts/nuke/api/lib_template_builder.py +++ /dev/null @@ -1,220 +0,0 @@ -from collections import OrderedDict - -import qargparse - -import nuke - -from openpype.tools.utils.widgets import OptionDialog - -from .lib import imprint, get_main_window - - -# To change as enum -build_types = ["context_asset", "linked_asset", "all_assets"] - - -def get_placeholder_attributes(node, enumerate=False): - list_atts = { - "builder_type", - "family", - "representation", - "loader", - "loader_args", - "order", - "asset", - "subset", - "hierarchy", - "siblings", - "last_loaded" - } - attributes = {} - for attr in node.knobs().keys(): - if attr in list_atts: - if enumerate: - try: - attributes[attr] = node.knob(attr).values() - except AttributeError: - attributes[attr] = node.knob(attr).getValue() - else: - attributes[attr] = node.knob(attr).getValue() - - return attributes - - -def delete_placeholder_attributes(node): - """Delete all extra placeholder attributes.""" - - extra_attributes = get_placeholder_attributes(node) - for attribute in extra_attributes.keys(): - try: - node.removeKnob(node.knob(attribute)) - except ValueError: - continue - - -def hide_placeholder_attributes(node): - """Hide all extra placeholder attributes.""" - - extra_attributes = get_placeholder_attributes(node) - for attribute in extra_attributes.keys(): - try: - node.knob(attribute).setVisible(False) - except ValueError: - continue - - -def create_placeholder(): - args = placeholder_window() - if not args: - # operation canceled, no locator created - return - - placeholder = nuke.nodes.NoOp() - placeholder.setName("PLACEHOLDER") - placeholder.knob("tile_color").setValue(4278190335) - - # custom arg parse to force empty data query - # and still imprint them on placeholder - # and getting items when arg is of type Enumerator - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() - imprint(placeholder, options) - imprint(placeholder, {"is_placeholder": True}) - placeholder.knob("is_placeholder").setVisible(False) - - -def update_placeholder(): - placeholder = nuke.selectedNodes() - if not placeholder: - raise ValueError("No node selected") - if len(placeholder) > 1: - raise ValueError("Too many selected nodes") - placeholder = placeholder[0] - - args = placeholder_window(get_placeholder_attributes(placeholder)) - if not args: - return # operation canceled - # delete placeholder attributes - delete_placeholder_attributes(placeholder) - - options = OrderedDict() - for arg in args: - if not type(arg) == qargparse.Separator: - options[str(arg)] = arg._data.get("items") or arg.read() - imprint(placeholder, options) - - -def imprint_enum(placeholder, args): - """ - Imprint method doesn't act properly with enums. - Replacing the functionnality with this for now - """ - - enum_values = { - str(arg): arg.read() - for arg in args - if arg._data.get("items") - } - string_to_value_enum_table = { - build: idx - for idx, build in enumerate(build_types) - } - attrs = {} - for key, value in enum_values.items(): - attrs[key] = string_to_value_enum_table[value] - - -def placeholder_window(options=None): - options = options or dict() - dialog = OptionDialog(parent=get_main_window()) - dialog.setWindowTitle("Create Placeholder") - - args = [ - qargparse.Separator("Main attributes"), - qargparse.Enum( - "builder_type", - label="Asset Builder Type", - default=options.get("builder_type", 0), - items=build_types, - help="""Asset Builder Type -Builder type describe what template loader will look for. - -context_asset : Template loader will look for subsets of -current context asset (Asset bob will find asset) - -linked_asset : Template loader will look for assets linked -to current context asset. -Linked asset are looked in OpenPype database under field "inputLinks" -""" - ), - qargparse.String( - "family", - default=options.get("family", ""), - label="OpenPype Family", - placeholder="ex: image, plate ..."), - qargparse.String( - "representation", - default=options.get("representation", ""), - label="OpenPype Representation", - placeholder="ex: mov, png ..."), - qargparse.String( - "loader", - default=options.get("loader", ""), - label="Loader", - placeholder="ex: LoadClip, LoadImage ...", - help="""Loader - -Defines what openpype loader will be used to load assets. -Useable loader depends on current host's loader list. -Field is case sensitive. -"""), - qargparse.String( - "loader_args", - default=options.get("loader_args", ""), - label="Loader Arguments", - placeholder='ex: {"camera":"persp", "lights":True}', - help="""Loader - -Defines a dictionnary of arguments used to load assets. -Useable arguments depend on current placeholder Loader. -Field should be a valid python dict. Anything else will be ignored. -"""), - qargparse.Integer( - "order", - default=options.get("order", 0), - min=0, - max=999, - label="Order", - placeholder="ex: 0, 100 ... (smallest order loaded first)", - help="""Order - -Order defines asset loading priority (0 to 999) -Priority rule is : "lowest is first to load"."""), - qargparse.Separator( - "Optional attributes "), - qargparse.String( - "asset", - default=options.get("asset", ""), - label="Asset filter", - placeholder="regex filtering by asset name", - help="Filtering assets by matching field regex to asset's name"), - qargparse.String( - "subset", - default=options.get("subset", ""), - label="Subset filter", - placeholder="regex filtering by subset name", - help="Filtering assets by matching field regex to subset's name"), - qargparse.String( - "hierarchy", - default=options.get("hierarchy", ""), - label="Hierarchy filter", - placeholder="regex filtering by asset's hierarchy", - help="Filtering assets by matching field asset's hierarchy") - ] - dialog.create(args) - if not dialog.exec_(): - return None - - return args diff --git a/openpype/hosts/nuke/api/template_loader.py b/openpype/hosts/nuke/api/template_loader.py deleted file mode 100644 index 5ff4b8fc41..0000000000 --- a/openpype/hosts/nuke/api/template_loader.py +++ /dev/null @@ -1,639 +0,0 @@ -import re -import collections - -import nuke - -from openpype.client import get_representations -from openpype.pipeline import legacy_io -from openpype.pipeline.workfile.abstract_template_loader import ( - AbstractPlaceholder, - AbstractTemplateLoader, -) - -from .lib import ( - find_free_space_to_paste_nodes, - get_extreme_positions, - get_group_io_nodes, - imprint, - refresh_node, - refresh_nodes, - reset_selection, - get_names_from_nodes, - get_nodes_by_names, - select_nodes, - duplicate_node, - node_tempfile, -) - -from .lib_template_builder import ( - delete_placeholder_attributes, - get_placeholder_attributes, - hide_placeholder_attributes -) - -PLACEHOLDER_SET = "PLACEHOLDERS_SET" - - -class NukeTemplateLoader(AbstractTemplateLoader): - """Concrete implementation of AbstractTemplateLoader for Nuke - - """ - - def import_template(self, path): - """Import template into current scene. - Block if a template is already loaded. - - Args: - path (str): A path to current template (usually given by - get_template_path implementation) - - Returns: - bool: Wether the template was succesfully imported or not - """ - - # TODO check if the template is already imported - - nuke.nodePaste(path) - reset_selection() - - return True - - def preload(self, placeholder, loaders_by_name, last_representation): - placeholder.data["nodes_init"] = nuke.allNodes() - placeholder.data["last_repre_id"] = str(last_representation["_id"]) - - def populate_template(self, ignored_ids=None): - processed_key = "_node_processed" - - processed_nodes = [] - nodes = self.get_template_nodes() - while nodes: - # Mark nodes as processed so they're not re-executed - # - that can happen if processing of placeholder node fails - for node in nodes: - imprint(node, {processed_key: True}) - processed_nodes.append(node) - - super(NukeTemplateLoader, self).populate_template(ignored_ids) - - # Recollect nodes to repopulate - nodes = [] - for node in self.get_template_nodes(): - # Skip already processed nodes - if ( - processed_key in node.knobs() - and node.knob(processed_key).value() - ): - continue - nodes.append(node) - - for node in processed_nodes: - knob = node.knob(processed_key) - if knob is not None: - node.removeKnob(knob) - - @staticmethod - def get_template_nodes(): - placeholders = [] - all_groups = collections.deque() - all_groups.append(nuke.thisGroup()) - while all_groups: - group = all_groups.popleft() - for node in group.nodes(): - if isinstance(node, nuke.Group): - all_groups.append(node) - - node_knobs = node.knobs() - if ( - "builder_type" not in node_knobs - or "is_placeholder" not in node_knobs - or not node.knob("is_placeholder").value() - ): - continue - - if "empty" in node_knobs and node.knob("empty").value(): - continue - - placeholders.append(node) - - return placeholders - - def update_missing_containers(self): - nodes_by_id = collections.defaultdict(list) - - for node in nuke.allNodes(): - node_knobs = node.knobs().keys() - if "repre_id" in node_knobs: - repre_id = node.knob("repre_id").getValue() - nodes_by_id[repre_id].append(node.name()) - - if "empty" in node_knobs: - node.removeKnob(node.knob("empty")) - imprint(node, {"empty": False}) - - for node_names in nodes_by_id.values(): - node = None - for node_name in node_names: - node_by_name = nuke.toNode(node_name) - if "builder_type" in node_by_name.knobs().keys(): - node = node_by_name - break - - if node is None: - continue - - placeholder = nuke.nodes.NoOp() - placeholder.setName("PLACEHOLDER") - placeholder.knob("tile_color").setValue(4278190335) - attributes = get_placeholder_attributes(node, enumerate=True) - imprint(placeholder, attributes) - pos_x = int(node.knob("x").getValue()) - pos_y = int(node.knob("y").getValue()) - placeholder.setXYpos(pos_x, pos_y) - imprint(placeholder, {"nb_children": 1}) - refresh_node(placeholder) - - self.populate_template(self.get_loaded_containers_by_id()) - - def get_loaded_containers_by_id(self): - repre_ids = set() - for node in nuke.allNodes(): - if "repre_id" in node.knobs(): - repre_ids.add(node.knob("repre_id").getValue()) - - # Removes duplicates in the list - return list(repre_ids) - - def delete_placeholder(self, placeholder): - placeholder_node = placeholder.data["node"] - last_loaded = placeholder.data["last_loaded"] - if not placeholder.data["delete"]: - if "empty" in placeholder_node.knobs().keys(): - placeholder_node.removeKnob(placeholder_node.knob("empty")) - imprint(placeholder_node, {"empty": True}) - return - - if not last_loaded: - nuke.delete(placeholder_node) - return - - if "last_loaded" in placeholder_node.knobs().keys(): - for node_name in placeholder_node.knob("last_loaded").values(): - node = nuke.toNode(node_name) - try: - delete_placeholder_attributes(node) - except Exception: - pass - - last_loaded_names = [ - loaded_node.name() - for loaded_node in last_loaded - ] - imprint(placeholder_node, {"last_loaded": last_loaded_names}) - - for node in last_loaded: - refresh_node(node) - refresh_node(placeholder_node) - if "builder_type" not in node.knobs().keys(): - attributes = get_placeholder_attributes(placeholder_node, True) - imprint(node, attributes) - imprint(node, {"is_placeholder": False}) - hide_placeholder_attributes(node) - node.knob("is_placeholder").setVisible(False) - imprint( - node, - { - "x": placeholder_node.xpos(), - "y": placeholder_node.ypos() - } - ) - node.knob("x").setVisible(False) - node.knob("y").setVisible(False) - nuke.delete(placeholder_node) - - -class NukePlaceholder(AbstractPlaceholder): - """Concrete implementation of AbstractPlaceholder for Nuke""" - - optional_keys = {"asset", "subset", "hierarchy"} - - def get_data(self, node): - user_data = dict() - node_knobs = node.knobs() - for attr in self.required_keys.union(self.optional_keys): - if attr in node_knobs: - user_data[attr] = node_knobs[attr].getValue() - user_data["node"] = node - - nb_children = 0 - if "nb_children" in node_knobs: - nb_children = int(node_knobs["nb_children"].getValue()) - user_data["nb_children"] = nb_children - - siblings = [] - if "siblings" in node_knobs: - siblings = node_knobs["siblings"].values() - user_data["siblings"] = siblings - - node_full_name = node.fullName() - user_data["group_name"] = node_full_name.rpartition(".")[0] - user_data["last_loaded"] = [] - user_data["delete"] = False - self.data = user_data - - def parent_in_hierarchy(self, containers): - return - - def create_sib_copies(self): - """ creating copies of the palce_holder siblings (the ones who were - loaded with it) for the new nodes added - - Returns : - copies (dict) : with copied nodes names and their copies - """ - - copies = {} - siblings = get_nodes_by_names(self.data["siblings"]) - for node in siblings: - new_node = duplicate_node(node) - - x_init = int(new_node.knob("x_init").getValue()) - y_init = int(new_node.knob("y_init").getValue()) - new_node.setXYpos(x_init, y_init) - if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob("w_init").getValue() - h_init = new_node.knob("h_init").getValue() - new_node.knob("bdwidth").setValue(w_init) - new_node.knob("bdheight").setValue(h_init) - refresh_node(node) - - if "repre_id" in node.knobs().keys(): - node.removeKnob(node.knob("repre_id")) - copies[node.name()] = new_node - return copies - - def fix_z_order(self): - """Fix the problem of z_order when a backdrop is loaded.""" - - nodes_loaded = self.data["last_loaded"] - loaded_backdrops = [] - bd_orders = set() - for node in nodes_loaded: - if isinstance(node, nuke.BackdropNode): - loaded_backdrops.append(node) - bd_orders.add(node.knob("z_order").getValue()) - - if not bd_orders: - return - - sib_orders = set() - for node_name in self.data["siblings"]: - node = nuke.toNode(node_name) - if isinstance(node, nuke.BackdropNode): - sib_orders.add(node.knob("z_order").getValue()) - - if not sib_orders: - return - - min_order = min(bd_orders) - max_order = max(sib_orders) - for backdrop_node in loaded_backdrops: - z_order = backdrop_node.knob("z_order").getValue() - backdrop_node.knob("z_order").setValue( - z_order + max_order - min_order + 1) - - def update_nodes(self, nodes, considered_nodes, offset_y=None): - """Adjust backdrop nodes dimensions and positions. - - Considering some nodes sizes. - - Args: - nodes (list): list of nodes to update - considered_nodes (list): list of nodes to consider while updating - positions and dimensions - offset (int): distance between copies - """ - - placeholder_node = self.data["node"] - - min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) - - diff_x = diff_y = 0 - contained_nodes = [] # for backdrops - - if offset_y is None: - width_ph = placeholder_node.screenWidth() - height_ph = placeholder_node.screenHeight() - diff_y = max_y - min_y - height_ph - diff_x = max_x - min_x - width_ph - contained_nodes = [placeholder_node] - min_x = placeholder_node.xpos() - min_y = placeholder_node.ypos() - else: - siblings = get_nodes_by_names(self.data["siblings"]) - minX, _, maxX, _ = get_extreme_positions(siblings) - diff_y = max_y - min_y + 20 - diff_x = abs(max_x - min_x - maxX + minX) - contained_nodes = considered_nodes - - if diff_y <= 0 and diff_x <= 0: - return - - for node in nodes: - refresh_node(node) - - if ( - node == placeholder_node - or node in considered_nodes - ): - continue - - if ( - not isinstance(node, nuke.BackdropNode) - or ( - isinstance(node, nuke.BackdropNode) - and not set(contained_nodes) <= set(node.getNodes()) - ) - ): - if offset_y is None and node.xpos() >= min_x: - node.setXpos(node.xpos() + diff_x) - - if node.ypos() >= min_y: - node.setYpos(node.ypos() + diff_y) - - else: - width = node.screenWidth() - height = node.screenHeight() - node.knob("bdwidth").setValue(width + diff_x) - node.knob("bdheight").setValue(height + diff_y) - - refresh_node(node) - - def imprint_inits(self): - """Add initial positions and dimensions to the attributes""" - - for node in nuke.allNodes(): - refresh_node(node) - imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) - node.knob("x_init").setVisible(False) - node.knob("y_init").setVisible(False) - width = node.screenWidth() - height = node.screenHeight() - if "bdwidth" in node.knobs(): - imprint(node, {"w_init": width, "h_init": height}) - node.knob("w_init").setVisible(False) - node.knob("h_init").setVisible(False) - refresh_node(node) - - def imprint_siblings(self): - """ - - add siblings names to placeholder attributes (nodes loaded with it) - - add Id to the attributes of all the other nodes - """ - - loaded_nodes = self.data["last_loaded"] - loaded_nodes_set = set(loaded_nodes) - data = {"repre_id": str(self.data["last_repre_id"])} - - for node in loaded_nodes: - node_knobs = node.knobs() - if "builder_type" not in node_knobs: - # save the id of representation for all imported nodes - imprint(node, data) - node.knob("repre_id").setVisible(False) - refresh_node(node) - continue - - if ( - "is_placeholder" not in node_knobs - or ( - "is_placeholder" in node_knobs - and node.knob("is_placeholder").value() - ) - ): - siblings = list(loaded_nodes_set - {node}) - siblings_name = get_names_from_nodes(siblings) - siblings = {"siblings": siblings_name} - imprint(node, siblings) - - def set_loaded_connections(self): - """ - set inputs and outputs of loaded nodes""" - - placeholder_node = self.data["node"] - input_node, output_node = get_group_io_nodes(self.data["last_loaded"]) - for node in placeholder_node.dependent(): - for idx in range(node.inputs()): - if node.input(idx) == placeholder_node: - node.setInput(idx, output_node) - - for node in placeholder_node.dependencies(): - for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node: - input_node.setInput(0, node) - - def set_copies_connections(self, copies): - """Set inputs and outputs of the copies. - - Args: - copies (dict): Copied nodes by their names. - """ - - last_input, last_output = get_group_io_nodes(self.data["last_loaded"]) - siblings = get_nodes_by_names(self.data["siblings"]) - siblings_input, siblings_output = get_group_io_nodes(siblings) - copy_input = copies[siblings_input.name()] - copy_output = copies[siblings_output.name()] - - for node_init in siblings: - if node_init == siblings_output: - continue - - node_copy = copies[node_init.name()] - for node in node_init.dependent(): - for idx in range(node.inputs()): - if node.input(idx) != node_init: - continue - - if node in siblings: - copies[node.name()].setInput(idx, node_copy) - else: - last_input.setInput(0, node_copy) - - for node in node_init.dependencies(): - for idx in range(node_init.inputs()): - if node_init.input(idx) != node: - continue - - if node_init == siblings_input: - copy_input.setInput(idx, node) - elif node in siblings: - node_copy.setInput(idx, copies[node.name()]) - else: - node_copy.setInput(idx, last_output) - - siblings_input.setInput(0, copy_output) - - def move_to_placeholder_group(self, nodes_loaded): - """ - opening the placeholder's group and copying loaded nodes in it. - - Returns : - nodes_loaded (list): the new list of pasted nodes - """ - - groups_name = self.data["group_name"] - reset_selection() - select_nodes(nodes_loaded) - if groups_name: - with node_tempfile() as filepath: - nuke.nodeCopy(filepath) - for node in nuke.selectedNodes(): - nuke.delete(node) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste(filepath) - nodes_loaded = nuke.selectedNodes() - return nodes_loaded - - def clean(self): - # deselect all selected nodes - placeholder_node = self.data["node"] - - # getting the latest nodes added - nodes_init = self.data["nodes_init"] - nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) - self.log.debug("Loaded nodes: {}".format(nodes_loaded)) - if not nodes_loaded: - return - - self.data["delete"] = True - - nodes_loaded = self.move_to_placeholder_group(nodes_loaded) - self.data["last_loaded"] = nodes_loaded - refresh_nodes(nodes_loaded) - - # positioning of the loaded nodes - min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + placeholder_node.xpos() - ypos = (node.ypos() - min_y) + placeholder_node.ypos() - node.setXYpos(xpos, ypos) - refresh_nodes(nodes_loaded) - - self.fix_z_order() # fix the problem of z_order for backdrops - self.imprint_siblings() - - if self.data["nb_children"] == 0: - # save initial nodes postions and dimensions, update them - # and set inputs and outputs of loaded nodes - - self.imprint_inits() - self.update_nodes(nuke.allNodes(), nodes_loaded) - self.set_loaded_connections() - - elif self.data["siblings"]: - # create copies of placeholder siblings for the new loaded nodes, - # set their inputs and outpus and update all nodes positions and - # dimensions and siblings names - - siblings = get_nodes_by_names(self.data["siblings"]) - refresh_nodes(siblings) - copies = self.create_sib_copies() - new_nodes = list(copies.values()) # copies nodes - self.update_nodes(new_nodes, nodes_loaded) - placeholder_node.removeKnob(placeholder_node.knob("siblings")) - new_nodes_name = get_names_from_nodes(new_nodes) - imprint(placeholder_node, {"siblings": new_nodes_name}) - self.set_copies_connections(copies) - - self.update_nodes( - nuke.allNodes(), - new_nodes + nodes_loaded, - 20 - ) - - new_siblings = get_names_from_nodes(new_nodes) - self.data["siblings"] = new_siblings - - else: - # if the placeholder doesn't have siblings, the loaded - # nodes will be placed in a free space - - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes_loaded, direction="bottom", offset=200 - ) - node = nuke.createNode("NoOp") - reset_selection() - nuke.delete(node) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + xpointer - ypos = (node.ypos() - min_y) + ypointer - node.setXYpos(xpos, ypos) - - self.data["nb_children"] += 1 - reset_selection() - # go back to root group - nuke.root().begin() - - def get_representations(self, current_asset_doc, linked_asset_docs): - project_name = legacy_io.active_project() - - builder_type = self.data["builder_type"] - if builder_type == "context_asset": - context_filters = { - "asset": [re.compile(self.data["asset"])], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representations": [self.data["representation"]], - "family": [self.data["family"]] - } - - elif builder_type != "linked_asset": - context_filters = { - "asset": [ - current_asset_doc["name"], - re.compile(self.data["asset"]) - ], - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]] - } - - else: - asset_regex = re.compile(self.data["asset"]) - linked_asset_names = [] - for asset_doc in linked_asset_docs: - asset_name = asset_doc["name"] - if asset_regex.match(asset_name): - linked_asset_names.append(asset_name) - - if not linked_asset_names: - return [] - - context_filters = { - "asset": linked_asset_names, - "subset": [re.compile(self.data["subset"])], - "hierarchy": [re.compile(self.data["hierarchy"])], - "representation": [self.data["representation"]], - "family": [self.data["family"]], - } - - return list(get_representations( - project_name, - context_filters=context_filters - )) - - def err_message(self): - return ( - "Error while trying to load a representation.\n" - "Either the subset wasn't published or the template is malformed." - "\n\n" - "Builder was looking for:\n{attributes}".format( - attributes="\n".join([ - "{}: {}".format(key.title(), value) - for key, value in self.data.items()] - ) - ) - ) From 8259a4129bc1439443ee7fa5d778e76f32ccd9df Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 16:22:16 +0200 Subject: [PATCH 1101/2550] removed functionality of previous template build logic --- .../workfile/abstract_template_loader.py | 528 ------------------ openpype/pipeline/workfile/build_template.py | 72 --- .../workfile/build_template_exceptions.py | 35 -- 3 files changed, 635 deletions(-) delete mode 100644 openpype/pipeline/workfile/abstract_template_loader.py delete mode 100644 openpype/pipeline/workfile/build_template.py delete mode 100644 openpype/pipeline/workfile/build_template_exceptions.py diff --git a/openpype/pipeline/workfile/abstract_template_loader.py b/openpype/pipeline/workfile/abstract_template_loader.py deleted file mode 100644 index e2fbea98ca..0000000000 --- a/openpype/pipeline/workfile/abstract_template_loader.py +++ /dev/null @@ -1,528 +0,0 @@ -import os -from abc import ABCMeta, abstractmethod - -import six -import logging -from functools import reduce - -from openpype.client import ( - get_asset_by_name, - get_linked_assets, -) -from openpype.settings import get_project_settings -from openpype.lib import ( - StringTemplate, - Logger, - filter_profiles, -) -from openpype.pipeline import legacy_io, Anatomy -from openpype.pipeline.load import ( - get_loaders_by_name, - get_representation_context, - load_with_repre_context, -) - -from .build_template_exceptions import ( - TemplateAlreadyImported, - TemplateLoadingFailed, - TemplateProfileNotFound, - TemplateNotFound -) - -log = logging.getLogger(__name__) - - -def update_representations(entities, entity): - if entity['context']['subset'] not in entities: - entities[entity['context']['subset']] = entity - else: - current = entities[entity['context']['subset']] - incomming = entity - entities[entity['context']['subset']] = max( - current, incomming, - key=lambda entity: entity["context"].get("version", -1)) - - return entities - - -def parse_loader_args(loader_args): - if not loader_args: - return dict() - try: - parsed_args = eval(loader_args) - if not isinstance(parsed_args, dict): - return dict() - else: - return parsed_args - except Exception as err: - print( - "Error while parsing loader arguments '{}'.\n{}: {}\n\n" - "Continuing with default arguments. . .".format( - loader_args, - err.__class__.__name__, - err)) - return dict() - - -@six.add_metaclass(ABCMeta) -class AbstractTemplateLoader: - """ - Abstraction of Template Loader. - Properties: - template_path : property to get current template path - Methods: - import_template : Abstract Method. Used to load template, - depending on current host - get_template_nodes : Abstract Method. Used to query nodes acting - as placeholders. Depending on current host - """ - - _log = None - - def __init__(self, placeholder_class): - # TODO template loader should expect host as and argument - # - host have all responsibility for most of code (also provide - # placeholder class) - # - also have responsibility for current context - # - this won't work in DCCs where multiple workfiles with - # different contexts can be opened at single time - # - template loader should have ability to change context - project_name = legacy_io.active_project() - asset_name = legacy_io.Session["AVALON_ASSET"] - - self.loaders_by_name = get_loaders_by_name() - self.current_asset = asset_name - self.project_name = project_name - self.host_name = legacy_io.Session["AVALON_APP"] - self.task_name = legacy_io.Session["AVALON_TASK"] - self.placeholder_class = placeholder_class - self.current_asset_doc = get_asset_by_name(project_name, asset_name) - self.task_type = ( - self.current_asset_doc - .get("data", {}) - .get("tasks", {}) - .get(self.task_name, {}) - .get("type") - ) - - self.log.info( - "BUILDING ASSET FROM TEMPLATE :\n" - "Starting templated build for {asset} in {project}\n\n" - "Asset : {asset}\n" - "Task : {task_name} ({task_type})\n" - "Host : {host}\n" - "Project : {project}\n".format( - asset=self.current_asset, - host=self.host_name, - project=self.project_name, - task_name=self.task_name, - task_type=self.task_type - )) - # Skip if there is no loader - if not self.loaders_by_name: - self.log.warning( - "There is no registered loaders. No assets will be loaded") - return - - @property - def log(self): - if self._log is None: - self._log = Logger.get_logger(self.__class__.__name__) - return self._log - - def template_already_imported(self, err_msg): - """In case template was already loaded. - Raise the error as a default action. - Override this method in your template loader implementation - to manage this case.""" - self.log.error("{}: {}".format( - err_msg.__class__.__name__, - err_msg)) - raise TemplateAlreadyImported(err_msg) - - def template_loading_failed(self, err_msg): - """In case template loading failed - Raise the error as a default action. - Override this method in your template loader implementation - to manage this case. - """ - self.log.error("{}: {}".format( - err_msg.__class__.__name__, - err_msg)) - raise TemplateLoadingFailed(err_msg) - - @property - def template_path(self): - """ - Property returning template path. Avoiding setter. - Getting template path from open pype settings based on current avalon - session and solving the path variables if needed. - Returns: - str: Solved template path - Raises: - TemplateProfileNotFound: No profile found from settings for - current avalon session - KeyError: Could not solve path because a key does not exists - in avalon context - TemplateNotFound: Solved path does not exists on current filesystem - """ - project_name = self.project_name - host_name = self.host_name - task_name = self.task_name - task_type = self.task_type - - anatomy = Anatomy(project_name) - project_settings = get_project_settings(project_name) - - build_info = project_settings[host_name]["templated_workfile_build"] - profile = filter_profiles( - build_info["profiles"], - { - "task_types": task_type, - "task_names": task_name - } - ) - - if not profile: - raise TemplateProfileNotFound( - "No matching profile found for task '{}' of type '{}' " - "with host '{}'".format(task_name, task_type, host_name) - ) - - path = profile["path"] - if not path: - raise TemplateLoadingFailed( - "Template path is not set.\n" - "Path need to be set in {}\\Template Workfile Build " - "Settings\\Profiles".format(host_name.title())) - - # Try fill path with environments and anatomy roots - fill_data = { - key: value - for key, value in os.environ.items() - } - fill_data["root"] = anatomy.roots - result = StringTemplate.format_template(path, fill_data) - if result.solved: - path = result.normalized() - - if path and os.path.exists(path): - self.log.info("Found template at: '{}'".format(path)) - return path - - solved_path = None - while True: - try: - solved_path = anatomy.path_remapper(path) - except KeyError as missing_key: - raise KeyError( - "Could not solve key '{}' in template path '{}'".format( - missing_key, path)) - - if solved_path is None: - solved_path = path - if solved_path == path: - break - path = solved_path - - solved_path = os.path.normpath(solved_path) - if not os.path.exists(solved_path): - raise TemplateNotFound( - "Template found in openPype settings for task '{}' with host " - "'{}' does not exists. (Not found : {})".format( - task_name, host_name, solved_path)) - - self.log.info("Found template at: '{}'".format(solved_path)) - - return solved_path - - def populate_template(self, ignored_ids=None): - """ - Use template placeholders to load assets and parent them in hierarchy - Arguments : - ignored_ids : - Returns: - None - """ - - loaders_by_name = self.loaders_by_name - current_asset_doc = self.current_asset_doc - linked_assets = get_linked_assets(current_asset_doc) - - ignored_ids = ignored_ids or [] - placeholders = self.get_placeholders() - self.log.debug("Placeholders found in template: {}".format( - [placeholder.name for placeholder in placeholders] - )) - for placeholder in placeholders: - self.log.debug("Start to processing placeholder {}".format( - placeholder.name - )) - placeholder_representations = self.get_placeholder_representations( - placeholder, - current_asset_doc, - linked_assets - ) - - if not placeholder_representations: - self.log.info( - "There's no representation for this placeholder: " - "{}".format(placeholder.name) - ) - continue - - for representation in placeholder_representations: - self.preload(placeholder, loaders_by_name, representation) - - if self.load_data_is_incorrect( - placeholder, - representation, - ignored_ids): - continue - - self.log.info( - "Loading {}_{} with loader {}\n" - "Loader arguments used : {}".format( - representation['context']['asset'], - representation['context']['subset'], - placeholder.loader_name, - placeholder.loader_args)) - - try: - container = self.load( - placeholder, loaders_by_name, representation) - except Exception: - self.load_failed(placeholder, representation) - else: - self.load_succeed(placeholder, container) - finally: - self.postload(placeholder) - - def get_placeholder_representations( - self, placeholder, current_asset_doc, linked_asset_docs - ): - placeholder_representations = placeholder.get_representations( - current_asset_doc, - linked_asset_docs - ) - for repre_doc in reduce( - update_representations, - placeholder_representations, - dict() - ).values(): - yield repre_doc - - def load_data_is_incorrect( - self, placeholder, last_representation, ignored_ids): - if not last_representation: - self.log.warning(placeholder.err_message()) - return True - if (str(last_representation['_id']) in ignored_ids): - print("Ignoring : ", last_representation['_id']) - return True - return False - - def preload(self, placeholder, loaders_by_name, last_representation): - pass - - def load(self, placeholder, loaders_by_name, last_representation): - repre = get_representation_context(last_representation) - return load_with_repre_context( - loaders_by_name[placeholder.loader_name], - repre, - options=parse_loader_args(placeholder.loader_args)) - - def load_succeed(self, placeholder, container): - placeholder.parent_in_hierarchy(container) - - def load_failed(self, placeholder, last_representation): - self.log.warning( - "Got error trying to load {}:{} with {}".format( - last_representation['context']['asset'], - last_representation['context']['subset'], - placeholder.loader_name - ), - exc_info=True - ) - - def postload(self, placeholder): - placeholder.clean() - - def update_missing_containers(self): - loaded_containers_ids = self.get_loaded_containers_by_id() - self.populate_template(ignored_ids=loaded_containers_ids) - - def get_placeholders(self): - placeholders = map(self.placeholder_class, self.get_template_nodes()) - valid_placeholders = filter( - lambda i: i.is_valid, - placeholders - ) - sorted_placeholders = list(sorted( - valid_placeholders, - key=lambda i: i.order - )) - return sorted_placeholders - - @abstractmethod - def get_loaded_containers_by_id(self): - """ - Collect already loaded containers for updating scene - Return: - dict (string, node): A dictionnary id as key - and containers as value - """ - pass - - @abstractmethod - def import_template(self, template_path): - """ - Import template in current host - Args: - template_path (str): fullpath to current task and - host's template file - Return: - None - """ - pass - - @abstractmethod - def get_template_nodes(self): - """ - Returning a list of nodes acting as host placeholders for - templating. The data representation is by user. - AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes - Args : - None - Returns: - list(AnyNode): Solved template path - """ - pass - - -@six.add_metaclass(ABCMeta) -class AbstractPlaceholder: - """Abstraction of placeholders logic. - - Properties: - required_keys: A list of mandatory keys to decribe placeholder - and assets to load. - optional_keys: A list of optional keys to decribe - placeholder and assets to load - loader_name: Name of linked loader to use while loading assets - - Args: - identifier (str): Placeholder identifier. Should be possible to be - used as identifier in "a scene" (e.g. unique node name). - """ - - required_keys = { - "builder_type", - "family", - "representation", - "order", - "loader", - "loader_args" - } - optional_keys = {} - - def __init__(self, identifier): - self._log = None - self._name = identifier - self.get_data(identifier) - - @property - def log(self): - if self._log is None: - self._log = Logger.get_logger(repr(self)) - return self._log - - def __repr__(self): - return "< {} {} >".format(self.__class__.__name__, self.name) - - @property - def name(self): - return self._name - - @property - def loader_args(self): - return self.data["loader_args"] - - @property - def builder_type(self): - return self.data["builder_type"] - - @property - def order(self): - return self.data["order"] - - @property - def loader_name(self): - """Return placeholder loader name. - - Returns: - str: Loader name that will be used to load placeholder - representations. - """ - - return self.data["loader"] - - @property - def is_valid(self): - """Test validity of placeholder. - - i.e.: every required key exists in placeholder data - - Returns: - bool: True if every key is in data - """ - - if set(self.required_keys).issubset(self.data.keys()): - self.log.debug("Valid placeholder : {}".format(self.name)) - return True - self.log.info("Placeholder is not valid : {}".format(self.name)) - return False - - @abstractmethod - def parent_in_hierarchy(self, container): - """Place loaded container in correct hierarchy given by placeholder - - Args: - container (Dict[str, Any]): Loaded container created by loader. - """ - - pass - - @abstractmethod - def clean(self): - """Clean placeholder from hierarchy after loading assets.""" - - pass - - @abstractmethod - def get_representations(self, current_asset_doc, linked_asset_docs): - """Query representations based on placeholder data. - - Args: - current_asset_doc (Dict[str, Any]): Document of current - context asset. - linked_asset_docs (List[Dict[str, Any]]): Documents of assets - linked to current context asset. - - Returns: - Iterable[Dict[str, Any]]: Representations that are matching - placeholder filters. - """ - - pass - - @abstractmethod - def get_data(self, identifier): - """Collect information about placeholder by identifier. - - Args: - identifier (str): A unique placeholder identifier defined by - implementation. - """ - - pass diff --git a/openpype/pipeline/workfile/build_template.py b/openpype/pipeline/workfile/build_template.py deleted file mode 100644 index 3328dfbc9e..0000000000 --- a/openpype/pipeline/workfile/build_template.py +++ /dev/null @@ -1,72 +0,0 @@ -import os -from importlib import import_module -from openpype.lib import classes_from_module -from openpype.host import HostBase -from openpype.pipeline import registered_host - -from .abstract_template_loader import ( - AbstractPlaceholder, - AbstractTemplateLoader) - -from .build_template_exceptions import ( - TemplateLoadingFailed, - TemplateAlreadyImported, - MissingHostTemplateModule, - MissingTemplatePlaceholderClass, - MissingTemplateLoaderClass -) - -_module_path_format = 'openpype.hosts.{host}.api.template_loader' - - -def build_workfile_template(*args): - template_loader = build_template_loader() - try: - template_loader.import_template(template_loader.template_path) - except TemplateAlreadyImported as err: - template_loader.template_already_imported(err) - except TemplateLoadingFailed as err: - template_loader.template_loading_failed(err) - else: - template_loader.populate_template() - - -def update_workfile_template(*args): - template_loader = build_template_loader() - template_loader.update_missing_containers() - - -def build_template_loader(): - # TODO refactor to use advantage of 'HostBase' and don't import dynamically - # - hosts should have methods that gives option to return builders - host = registered_host() - if isinstance(host, HostBase): - host_name = host.name - else: - host_name = os.environ.get("AVALON_APP") - if not host_name: - host_name = host.__name__.split(".")[-2] - - module_path = _module_path_format.format(host=host_name) - module = import_module(module_path) - if not module: - raise MissingHostTemplateModule( - "No template loader found for host {}".format(host_name)) - - template_loader_class = classes_from_module( - AbstractTemplateLoader, - module - ) - template_placeholder_class = classes_from_module( - AbstractPlaceholder, - module - ) - - if not template_loader_class: - raise MissingTemplateLoaderClass() - template_loader_class = template_loader_class[0] - - if not template_placeholder_class: - raise MissingTemplatePlaceholderClass() - template_placeholder_class = template_placeholder_class[0] - return template_loader_class(template_placeholder_class) diff --git a/openpype/pipeline/workfile/build_template_exceptions.py b/openpype/pipeline/workfile/build_template_exceptions.py deleted file mode 100644 index 7a5075e3dc..0000000000 --- a/openpype/pipeline/workfile/build_template_exceptions.py +++ /dev/null @@ -1,35 +0,0 @@ -class MissingHostTemplateModule(Exception): - """Error raised when expected module does not exists""" - pass - - -class MissingTemplatePlaceholderClass(Exception): - """Error raised when module doesn't implement a placeholder class""" - pass - - -class MissingTemplateLoaderClass(Exception): - """Error raised when module doesn't implement a template loader class""" - pass - - -class TemplateNotFound(Exception): - """Exception raised when template does not exist.""" - pass - - -class TemplateProfileNotFound(Exception): - """Exception raised when current profile - doesn't match any template profile""" - pass - - -class TemplateAlreadyImported(Exception): - """Error raised when Template was already imported by host for - this session""" - pass - - -class TemplateLoadingFailed(Exception): - """Error raised whend Template loader was unable to load the template""" - pass From 1da23985a9e5e8845933d9eb8d239c39aca0f1ac Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 17:16:07 +0200 Subject: [PATCH 1102/2550] unified LoadPlaceholderItem --- .../maya/api/workfile_template_builder.py | 40 +++++-------------- .../nuke/api/workfile_template_builder.py | 29 +------------- .../workfile/workfile_template_builder.py | 40 +++++++++++++++++-- 3 files changed, 47 insertions(+), 62 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 71e3e0ce4e..9163cf9a6f 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -7,7 +7,7 @@ from openpype.pipeline.workfile.workfile_template_builder import ( TemplateAlreadyImported, AbstractTemplateBuilder, PlaceholderPlugin, - PlaceholderItem, + LoadPlaceholderItem, PlaceholderLoadMixin, ) from openpype.tools.workfile_template_build import ( @@ -239,15 +239,10 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): cmds.hide(node) cmds.setAttr(node + ".hiddenInOutliner", True) + def load_succeed(self, placeholder, container): + self._parent_in_hierarhchy(placeholder, container) -class LoadPlaceholderItem(PlaceholderItem): - """Concrete implementation of PlaceholderItem for Maya load plugin.""" - - def __init__(self, *args, **kwargs): - super(LoadPlaceholderItem, self).__init__(*args, **kwargs) - self._failed_representations = [] - - def parent_in_hierarchy(self, container): + def _parent_in_hierarchy(self, placeholder, container): """Parent loaded container to placeholder's parent. ie : Set loaded content as placeholder's sibling @@ -272,43 +267,26 @@ class LoadPlaceholderItem(PlaceholderItem): elif not cmds.sets(root, q=True): return - if self.data["parent"]: - cmds.parent(nodes_to_parent, self.data["parent"]) + if placeholder.data["parent"]: + cmds.parent(nodes_to_parent, placeholder.data["parent"]) # Move loaded nodes to correct index in outliner hierarchy placeholder_form = cmds.xform( - self._scene_identifier, + placeholder.scene_identifier, q=True, matrix=True, worldSpace=True ) for node in set(nodes_to_parent): cmds.reorder(node, front=True) - cmds.reorder(node, relative=self.data["index"]) + cmds.reorder(node, relative=placeholder.data["index"]) cmds.xform(node, matrix=placeholder_form, ws=True) - holding_sets = cmds.listSets(object=self._scene_identifier) + holding_sets = cmds.listSets(object=placeholder.scene_identifier) if not holding_sets: return for holding_set in holding_sets: cmds.sets(roots, forceElement=holding_set) - def get_errors(self): - if not self._failed_representations: - return [] - message = ( - "Failed to load {} representations using Loader {}" - ).format( - len(self._failed_representations), - self.data["loader"] - ) - return [message] - - def load_failed(self, representation): - self._failed_representations.append(representation) - - def load_succeed(self, container): - self.parent_in_hierarchy(container) - def build_workfile_template(*args): builder = MayaTemplateBuilder(registered_host()) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index ba0d975496..709ee3b743 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -6,7 +6,7 @@ from openpype.pipeline import registered_host from openpype.pipeline.workfile.workfile_template_builder import ( AbstractTemplateBuilder, PlaceholderPlugin, - PlaceholderItem, + LoadPlaceholderItem, PlaceholderLoadMixin, ) from openpype.tools.workfile_template_build import ( @@ -177,7 +177,7 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): placeholder_data = self._parse_placeholder_node_data(node) # TODO do data validations and maybe updgrades if are invalid output.append( - NukeLoadPlaceholderItem(node_name, placeholder_data, self) + LoadPlaceholderItem(node_name, placeholder_data, self) ) return output @@ -535,31 +535,6 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): siblings_input.setInput(0, copy_output) -class NukeLoadPlaceholderItem(PlaceholderItem): - """Concrete implementation of PlaceholderItem for Maya load plugin.""" - - def __init__(self, *args, **kwargs): - super(NukeLoadPlaceholderItem, self).__init__(*args, **kwargs) - self._failed_representations = [] - - def get_errors(self): - if not self._failed_representations: - return [] - message = ( - "Failed to load {} representations using Loader {}" - ).format( - len(self._failed_representations), - self.data["loader"] - ) - return [message] - - def load_failed(self, representation): - self._failed_representations.append(representation) - - def load_succeed(self, container): - pass - - def build_workfile_template(*args): builder = NukeTemplateBuilder(registered_host()) builder.build_template() diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index f81849fbe4..582657c735 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1064,10 +1064,9 @@ class PlaceholderLoadMixin(object): For placeholder population is implemented 'populate_load_placeholder'. - Requires that PlaceholderItem has implemented methods: + PlaceholderItem can have implemented methods: - 'load_failed' - called when loading of one representation failed - 'load_succeed' - called when loading of one representation succeeded - - 'clean' - called when placeholder processing finished """ def get_load_plugin_options(self, options=None): @@ -1397,13 +1396,21 @@ class PlaceholderLoadMixin(object): except Exception: failed = True - placeholder.load_failed(representation) + self.load_failed(placeholder, representation) else: failed = False - placeholder.load_succeed(container) + self.load_succeed(placeholder, container) self.cleanup_placeholder(placeholder, failed) + def load_failed(self, placeholder, representation): + if hasattr(placeholder, "load_failed"): + placeholder.load_failed(representation) + + def load_succeed(self, placeholder, container): + if hasattr(placeholder, "load_succeed"): + placeholder.load_succeed(container) + def cleanup_placeholder(self, placeholder, failed): """Cleanup placeholder after load of single representation. @@ -1417,3 +1424,28 @@ class PlaceholderLoadMixin(object): """ pass + + +class LoadPlaceholderItem(PlaceholderItem): + """PlaceholderItem for plugin which is loading representations. + + Connected to 'PlaceholderLoadMixin'. + """ + + def __init__(self, *args, **kwargs): + super(LoadPlaceholderItem, self).__init__(*args, **kwargs) + self._failed_representations = [] + + def get_errors(self): + if not self._failed_representations: + return [] + message = ( + "Failed to load {} representations using Loader {}" + ).format( + len(self._failed_representations), + self.data["loader"] + ) + return [message] + + def load_failed(self, representation): + self._failed_representations.append(representation) From b151c04b00305c837f29dd4820c7e8a99c1a66b5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 17:45:40 +0200 Subject: [PATCH 1103/2550] removed unused variables --- openpype/tools/workfile_template_build/window.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/workfile_template_build/window.py b/openpype/tools/workfile_template_build/window.py index 2e531026cf..757ccc0b4a 100644 --- a/openpype/tools/workfile_template_build/window.py +++ b/openpype/tools/workfile_template_build/window.py @@ -205,7 +205,7 @@ class WorkfileBuildPlaceholderDialog(QtWidgets.QDialog): try: plugin.update_placeholder(self._update_item, options) self.accept() - except Exception as exc: + except Exception: self.log.warning("Something went wrong", exc_info=True) dialog = QtWidgets.QMessageBox(self) dialog.setWindowTitle("Something went wrong") @@ -221,7 +221,7 @@ class WorkfileBuildPlaceholderDialog(QtWidgets.QDialog): try: plugin.create_placeholder(options) self.accept() - except Exception as exc: + except Exception: self.log.warning("Something went wrong", exc_info=True) dialog = QtWidgets.QMessageBox(self) dialog.setWindowTitle("Something went wrong") From b5682af9ac880cc91adad91b66720f15d47e3463 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 17:46:27 +0200 Subject: [PATCH 1104/2550] fix variable usage --- openpype/tools/workfile_template_build/window.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/workfile_template_build/window.py b/openpype/tools/workfile_template_build/window.py index 757ccc0b4a..ea4e2fec5a 100644 --- a/openpype/tools/workfile_template_build/window.py +++ b/openpype/tools/workfile_template_build/window.py @@ -126,8 +126,8 @@ class WorkfileBuildPlaceholderDialog(QtWidgets.QDialog): self._last_selected_plugin = None self._plugins_combo.clear() for identifier, plugin in placeholder_plugins.items(): - label = plugin.label or plugin.identifier - self._plugins_combo.addItem(label, plugin.identifier) + label = plugin.label or identifier + self._plugins_combo.addItem(label, identifier) index = self._plugins_combo.findData(last_selected_plugin) if index < 0: From b3bb5f8612ccb623f5da34e4270e8db9bcdb8a7e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 23:09:17 +0200 Subject: [PATCH 1105/2550] Always increment current file - similar to Maya --- .../plugins/publish/increment_current_file.py | 29 +++------------ .../increment_current_file_deadline.py | 35 ------------------- 2 files changed, 4 insertions(+), 60 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index 5cb14d732a..c990f481d3 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -2,10 +2,9 @@ import pyblish.api from openpype.lib import version_up from openpype.pipeline import registered_host -from openpype.pipeline.publish import get_errored_plugins_from_context -class IncrementCurrentFile(pyblish.api.InstancePlugin): +class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. Saves the current scene with an increased version number. @@ -15,30 +14,10 @@ class IncrementCurrentFile(pyblish.api.InstancePlugin): label = "Increment current file" order = pyblish.api.IntegratorOrder + 9.0 hosts = ["houdini"] - families = ["colorbleed.usdrender", "redshift_rop"] - targets = ["local"] + families = ["workfile"] + optional = True - def process(self, instance): - - # This should be a ContextPlugin, but this is a workaround - # for a bug in pyblish to run once for a family: issue #250 - context = instance.context - key = "__hasRun{}".format(self.__class__.__name__) - if context.data.get(key, False): - return - else: - context.data[key] = True - - context = instance.context - errored_plugins = get_errored_plugins_from_context(context) - if any( - plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins - ): - raise RuntimeError( - "Skipping incrementing current file because " - "submission to deadline failed." - ) + def process(self, context): # Filename must not have changed since collecting host = registered_host() diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py deleted file mode 100644 index cb0d7e3680..0000000000 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py +++ /dev/null @@ -1,35 +0,0 @@ -import pyblish.api - -import hou -from openpype.lib import version_up -from openpype.pipeline.publish import get_errored_plugins_from_context - - -class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): - """Increment the current file. - - Saves the current scene with an increased version number. - - """ - - label = "Increment current file" - order = pyblish.api.IntegratorOrder + 9.0 - hosts = ["houdini"] - targets = ["deadline"] - - def process(self, context): - - errored_plugins = get_errored_plugins_from_context(context) - if any( - plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins - ): - raise RuntimeError( - "Skipping incrementing current file because " - "submission to deadline failed." - ) - - current_filepath = context.data["currentFile"] - new_filepath = version_up(current_filepath) - - hou.hipFile.save(file_name=new_filepath, save_to_recent_files=True) From 229d31bc1ca10d51ab2b562ed128623a8895d26b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 23:41:42 +0200 Subject: [PATCH 1106/2550] Collect global in/out as handles --- .../fusion/plugins/publish/collect_instances.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/fusion/plugins/publish/collect_instances.py b/openpype/hosts/fusion/plugins/publish/collect_instances.py index b2192d1dd9..b36e43cacd 100644 --- a/openpype/hosts/fusion/plugins/publish/collect_instances.py +++ b/openpype/hosts/fusion/plugins/publish/collect_instances.py @@ -4,19 +4,21 @@ import pyblish.api def get_comp_render_range(comp): - """Return comp's start and end render range.""" + """Return comp's start-end render range and global start-end range.""" comp_attrs = comp.GetAttrs() start = comp_attrs["COMPN_RenderStart"] end = comp_attrs["COMPN_RenderEnd"] + global_start = comp_attrs["COMPN_GlobalStart"] + global_end = comp_attrs["COMPN_GlobalEnd"] # Whenever render ranges are undefined fall back # to the comp's global start and end if start == -1000000000: - start = comp_attrs["COMPN_GlobalEnd"] + start = global_start if end == -1000000000: - end = comp_attrs["COMPN_GlobalStart"] + end = global_end - return start, end + return start, end, global_start, global_end class CollectInstances(pyblish.api.ContextPlugin): @@ -42,9 +44,11 @@ class CollectInstances(pyblish.api.ContextPlugin): tools = comp.GetToolList(False).values() savers = [tool for tool in tools if tool.ID == "Saver"] - start, end = get_comp_render_range(comp) + start, end, global_start, global_end = get_comp_render_range(comp) context.data["frameStart"] = int(start) context.data["frameEnd"] = int(end) + context.data["frameStartHandle"] = int(global_start) + context.data["frameEndHandle"] = int(global_end) for tool in savers: path = tool["Clip"][comp.TIME_UNDEFINED] @@ -78,6 +82,8 @@ class CollectInstances(pyblish.api.ContextPlugin): "label": label, "frameStart": context.data["frameStart"], "frameEnd": context.data["frameEnd"], + "frameStartHandle": context.data["frameStartHandle"], + "frameEndHandle": context.data["frameStartHandle"], "fps": context.data["fps"], "families": ["render", "review", "ftrack"], "family": "render", From 8d4d80c2258f2e6a3a7c799547b9940af74cfdb8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 23:56:48 +0200 Subject: [PATCH 1107/2550] Be more explicit about the to render frame range (include rendering of handles) --- .../hosts/fusion/plugins/publish/render_local.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/plugins/publish/render_local.py b/openpype/hosts/fusion/plugins/publish/render_local.py index 601c2ffccf..79e458b40a 100644 --- a/openpype/hosts/fusion/plugins/publish/render_local.py +++ b/openpype/hosts/fusion/plugins/publish/render_local.py @@ -20,6 +20,8 @@ class Fusionlocal(pyblish.api.InstancePlugin): def process(self, instance): + # This plug-in runs only once and thus assumes all instances + # currently will render the same frame range context = instance.context key = "__hasRun{}".format(self.__class__.__name__) if context.data.get(key, False): @@ -28,8 +30,8 @@ class Fusionlocal(pyblish.api.InstancePlugin): context.data[key] = True current_comp = context.data["currentComp"] - frame_start = current_comp.GetAttrs("COMPN_RenderStart") - frame_end = current_comp.GetAttrs("COMPN_RenderEnd") + frame_start = context.data["frameStartHandle"] + frame_end = context.data["frameEndHandle"] path = instance.data["path"] output_dir = instance.data["outputDir"] @@ -40,7 +42,11 @@ class Fusionlocal(pyblish.api.InstancePlugin): self.log.info("End frame: {}".format(frame_end)) with comp_lock_and_undo_chunk(current_comp): - result = current_comp.Render() + result = current_comp.Render({ + "Start": frame_start, + "End": frame_end, + "Wait": True + }) if "representations" not in instance.data: instance.data["representations"] = [] From e12de9b3b2bfd2d28cc8cbeb620b01babca54e6d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 00:02:14 +0200 Subject: [PATCH 1108/2550] Do not auto-add ftrack family - That should be left up to plug-ins in Ftrack module --- openpype/hosts/fusion/plugins/publish/collect_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/plugins/publish/collect_instances.py b/openpype/hosts/fusion/plugins/publish/collect_instances.py index b36e43cacd..fe60b83827 100644 --- a/openpype/hosts/fusion/plugins/publish/collect_instances.py +++ b/openpype/hosts/fusion/plugins/publish/collect_instances.py @@ -85,7 +85,7 @@ class CollectInstances(pyblish.api.ContextPlugin): "frameStartHandle": context.data["frameStartHandle"], "frameEndHandle": context.data["frameStartHandle"], "fps": context.data["fps"], - "families": ["render", "review", "ftrack"], + "families": ["render", "review"], "family": "render", "active": active, "publish": active # backwards compatibility From f2b6e954a1ddb4662835f8d786e34e70100645fc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 00:16:06 +0200 Subject: [PATCH 1109/2550] Avoid name conflict where `group_name != group_node` due to maya auto renaming new node --- openpype/hosts/maya/plugins/load/load_yeti_cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py index 8435ba2493..abc0e6003c 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py @@ -73,8 +73,8 @@ class YetiCacheLoader(load.LoaderPlugin): c = colors.get(family) if c is not None: - cmds.setAttr(group_name + ".useOutlinerColor", 1) - cmds.setAttr(group_name + ".outlinerColor", + cmds.setAttr(group_node + ".useOutlinerColor", 1) + cmds.setAttr(group_node + ".outlinerColor", (float(c[0])/255), (float(c[1])/255), (float(c[2])/255) From 4624fb930ff580b1f33c34ec8d3426f7e6fafd4d Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:26:49 +0200 Subject: [PATCH 1110/2550] :recycle: minor fixes --- .../houdini/plugins/publish/validate_alembic_face_sets.py | 5 ++--- .../houdini/plugins/publish/validate_alembic_input_node.py | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index 7c1d068390..10681e4b72 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,7 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -18,7 +17,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index aa572dc3bb..4355bc7921 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline import PublishValidationError From 2c59d6317932cd6040b9c77f316112922b850a79 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:27:28 +0200 Subject: [PATCH 1111/2550] :recycle: change vdb cache creator to new publisher --- .../plugins/create/create_vbd_cache.py | 38 +++++++++---------- .../publish/validate_vdb_output_node.py | 10 +++-- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 242c21fc72..1a5011745f 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,38 +1,36 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating VDB Caches.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateVDBCache(plugin.Creator): +class CreateVDBCache(plugin.HoudiniCreator): """OpenVDB from Geometry ROP""" - + identifier = "io.openpype.creators.houdini.vdbcache" name = "vbdcache" label = "VDB Cache" family = "vdbcache" icon = "cloud" - def __init__(self, *args, **kwargs): - super(CreateVDBCache, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "geometry"}) - # Set node type to create for output - self.data["node_type"] = "geometry" + instance = super(CreateVDBCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, + "sopoutput": "$HIP/pyblish/{}.$F4.vdb".format(subset_name), "initsim": True, "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"soppath": node.path()}) + if self.selected_nodes: + parms["soppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 9be2635a9e..a9f8b38e7e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" @@ -25,8 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" " of type VDB!" + raise PublishValidationError( + "Node connected to the output node is not" " of type VDB!", + title=self.label ) @classmethod From dff7c27562dedda5ce3a1daece04840121b8001a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:28:25 +0200 Subject: [PATCH 1112/2550] :bug: fix function call --- openpype/hosts/houdini/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 8180676ce8..28830bdc64 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -128,7 +128,7 @@ class HoudiniCreator(NewCreator): node_type = "geometry" instance_node = self._create_instance_node( - subset_name, "/out", node_type, pre_create_data) + subset_name, "/out", node_type) # wondering if we'll ever need more than one member here # in Houdini From 48546ef24f6c9b1db77e6094792ba2b2f1f88bc3 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 14 Sep 2022 04:18:24 +0000 Subject: [PATCH 1113/2550] [Automated] Bump version --- CHANGELOG.md | 32 ++++++++++++++++++-------------- openpype/version.py | 2 +- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 46bf56f5bd..7d6b620d58 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,22 @@ # Changelog +## [3.14.3-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...HEAD) + +**🚀 Enhancements** + +- Publisher: Add new publisher to host tools [\#3833](https://github.com/pypeclub/OpenPype/pull/3833) +- Maya: Workspace mel loaded from settings [\#3790](https://github.com/pypeclub/OpenPype/pull/3790) + +**🐛 Bug fixes** + +- Ftrack: Url validation does not require ftrackapp [\#3834](https://github.com/pypeclub/OpenPype/pull/3834) +- Maya+Ftrack: Change typo in family name `mayaascii` -\> `mayaAscii` [\#3820](https://github.com/pypeclub/OpenPype/pull/3820) + ## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.2-nightly.5...3.14.2) **🆕 New features** @@ -45,11 +59,10 @@ - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) - General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) -- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** @@ -72,7 +85,6 @@ - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) -- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) **🐛 Bug fixes** @@ -86,17 +98,17 @@ - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) - AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) - General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) - Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) @@ -118,14 +130,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) -**🚀 Enhancements** - -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) - -**🐛 Bug fixes** - -- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) - ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) diff --git a/openpype/version.py b/openpype/version.py index 8469b1712a..e8a65b04d2 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2" +__version__ = "3.14.3-nightly.1" From d6b7e666e8fa5342c69a0ed027774c4bc3804e28 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 14 Sep 2022 12:37:30 +0800 Subject: [PATCH 1114/2550] adding a Qt lockfile dialog for lockfile tasks --- openpype/hosts/maya/api/pipeline.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 3b84d91158..35d0026357 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -493,7 +493,7 @@ def check_lock_on_current_file(): # add lockfile dialog from Qt import QtWidgets top_level_widgets = {w.objectName(): w for w in - QtWidgets.QApplication.topLevelWidgets()} + QtWidgets.QApplication.topLevelWidgets()} parent = top_level_widgets.get("MayaWindow", None) workfile_dialog = WorkfileLockDialog(filepath, parent=parent) if not workfile_dialog.exec_(): @@ -502,6 +502,7 @@ def check_lock_on_current_file(): create_workfile_lock(filepath) + def on_before_close(): """Delete the lock file after user quitting the Maya Scene""" log.info("Closing Maya...") From b1ebef457c23a3e48e077cd63040e7b15b9828a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 14 Sep 2022 11:36:10 +0200 Subject: [PATCH 1115/2550] :sparkles: add script for python dependencies info --- tools/get_python_packages_info.py | 83 +++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 tools/get_python_packages_info.py diff --git a/tools/get_python_packages_info.py b/tools/get_python_packages_info.py new file mode 100644 index 0000000000..b4952840e6 --- /dev/null +++ b/tools/get_python_packages_info.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +"""Get version and license information on used Python packages. + +This is getting over all packages installed with Poetry and printing out +their name, version and available license information from PyPi in Markdown +table format. + +Usage: + ./.poetry/bin/poetry run python ./tools/get_python_packages_info.py + +""" + +import toml +import requests + + +packages = [] + +# define column headers +package_header = "Package" +version_header = "Version" +license_header = "License" + +name_col_width = len(package_header) +version_col_width = len(version_header) +license_col_width = len(license_header) + +# read lock file to get packages +with open("poetry.lock", "r") as fb: + lock_content = toml.load(fb) + + for package in lock_content["package"]: + # query pypi for license information + url = f"https://pypi.org/pypi/{package['name']}/json" + response = requests.get( + f"https://pypi.org/pypi/{package['name']}/json") + package_data = response.json() + version = package.get("version") or "N/A" + try: + package_license = package_data["info"].get("license") or "N/A" + except KeyError: + package_license = "N/A" + + if len(package_license) > 64: + package_license = f"{package_license[:32]}..." + packages.append( + ( + package["name"], + version, + package_license + ) + ) + + # update column width based on max string length + if len(package["name"]) > name_col_width: + name_col_width = len(package["name"]) + if len(version) > version_col_width: + version_col_width = len(version) + if len(package_license) > license_col_width: + license_col_width = len(package_license) + +# pad columns +name_col_width += 2 +version_col_width += 2 +license_col_width += 2 + +# print table header +print((f"|{package_header.center(name_col_width)}" + f"|{version_header.center(version_col_width)}" + f"|{license_header.center(license_col_width)}|")) + +print( + "|" + ("-" * len(package_header.center(name_col_width))) + + "|" + ("-" * len(version_header.center(version_col_width))) + + "|" + ("-" * len(license_header.center(license_col_width))) + "|") + +# print rest of the table +for package in packages: + print(( + f"|{package[0].center(name_col_width)}" + f"|{package[1].center(version_col_width)}" + f"|{package[2].center(license_col_width)}|" + )) From d5cb828edc611f9ed9f98b61ae5ae3591d797640 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:45:44 +0200 Subject: [PATCH 1116/2550] Move LoadedInSceneDelegate into Loader tool since it's specific to loader --- openpype/tools/loader/delegates.py | 28 ++++++++++++++++++++++++++++ openpype/tools/loader/widgets.py | 4 ++-- openpype/tools/utils/delegates.py | 27 --------------------------- 3 files changed, 30 insertions(+), 29 deletions(-) create mode 100644 openpype/tools/loader/delegates.py diff --git a/openpype/tools/loader/delegates.py b/openpype/tools/loader/delegates.py new file mode 100644 index 0000000000..e6663d48f1 --- /dev/null +++ b/openpype/tools/loader/delegates.py @@ -0,0 +1,28 @@ +from Qt import QtWidgets, QtGui, QtCore + + +class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate): + """Delegate for Loaded in Scene state columns. + + Shows "yes" or "no" for True or False values + Colorizes green or dark grey based on True or False values + + """ + + def __init__(self, *args, **kwargs): + super(LoadedInSceneDelegate, self).__init__(*args, **kwargs) + self._colors = { + True: QtGui.QColor(80, 170, 80), + False: QtGui.QColor(90, 90, 90) + } + + def displayText(self, value, locale): + return "yes" if value else "no" + + def initStyleOption(self, option, index): + super(LoadedInSceneDelegate, self).initStyleOption(option, index) + + # Colorize based on value + value = index.data(QtCore.Qt.DisplayRole) + color = self._colors[bool(value)] + option.palette.setBrush(QtGui.QPalette.Text, color) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 615f28f04d..98522c48ce 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -37,8 +37,7 @@ from openpype.tools.utils import ( ) from openpype.tools.utils.delegates import ( VersionDelegate, - PrettyTimeDelegate, - LoadedInSceneDelegate + PrettyTimeDelegate ) from openpype.tools.utils.widgets import ( OptionalMenu, @@ -59,6 +58,7 @@ from .model import ( ITEM_ID_ROLE ) from . import lib +from .delegates import LoadedInSceneDelegate from openpype.tools.utils.constants import ( LOCAL_PROVIDER_ROLE, diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py index 3547251282..d6c2d69e76 100644 --- a/openpype/tools/utils/delegates.py +++ b/openpype/tools/utils/delegates.py @@ -291,30 +291,3 @@ class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate): def displayText(self, value, locale): if value is not None: return pretty_timestamp(value) - - -class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate): - """Delegate for Loaded in Scene state columns. - - Shows "yes" or "no" for True or False values - Colorizes green or dark grey based on True or False values - - """ - - def __init__(self, *args, **kwargs): - super(LoadedInSceneDelegate, self).__init__(*args, **kwargs) - self._colors = { - True: QtGui.QColor(80, 170, 80), - False: QtGui.QColor(90, 90, 90) - } - - def displayText(self, value, locale): - return "yes" if value else "no" - - def initStyleOption(self, option, index): - super(LoadedInSceneDelegate, self).initStyleOption(option, index) - - # Colorize based on value - value = index.data(QtCore.Qt.DisplayRole) - color = self._colors[bool(value)] - option.palette.setBrush(QtGui.QPalette.Text, color) From d84b175efbc60c226609c6393dddedbf9960c7be Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:47:52 +0200 Subject: [PATCH 1117/2550] Support ILoadHost hosts Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/tools/loader/model.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 58d6787507..17af7bb2f7 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -569,6 +569,11 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): if self._host: time_since_refresh = time.time() - self._host_loaded_refresh_time if time_since_refresh > self._host_loaded_refresh_timeout: + if isinstance(self._host, ILoadHost): + containers = self._host.get_containers() + else: + containers = self._host.ls() + repre_ids = {con.get("representation") for con in self._host.ls()} self._loaded_representation_ids = repre_ids From a8909889c4662764042288245ef2d901d1218055 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:49:52 +0200 Subject: [PATCH 1118/2550] Fix refactored code --- openpype/tools/loader/model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 17af7bb2f7..1c078325f3 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -25,6 +25,7 @@ from openpype.pipeline import ( from openpype.style import get_default_entity_icon_color from openpype.tools.utils.models import TreeModel, Item from openpype.tools.utils import lib +from openpype.host import ILoadHost from openpype.modules import ModulesManager from openpype.tools.utils.constants import ( @@ -573,9 +574,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): containers = self._host.get_containers() else: containers = self._host.ls() - - repre_ids = {con.get("representation") - for con in self._host.ls()} + + repre_ids = {con.get("representation") for con in containers} self._loaded_representation_ids = repre_ids self._host_loaded_refresh_time = time.time() From 1eae84fd49af6930eb7865451c17557bf1d4d8b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:58:10 +0200 Subject: [PATCH 1119/2550] Switch columns --- openpype/tools/loader/model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 1c078325f3..77a8669c46 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -138,8 +138,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration", "handles", "step", - "repre_info", - "loaded_in_scene" + "loaded_in_scene", + "repre_info" ] column_labels_mapping = { @@ -153,8 +153,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration": "Duration", "handles": "Handles", "step": "Step", - "repre_info": "Availability", - "loaded_in_scene": "In scene" + "loaded_in_scene": "In scene", + "repre_info": "Availability" } SortAscendingRole = QtCore.Qt.UserRole + 2 From 4b7ecac2bbc5fc0f33839c3b0efa0cf2ff304e4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= Date: Wed, 14 Sep 2022 12:31:13 +0200 Subject: [PATCH 1120/2550] Fix Format Document --- .../modules/kitsu/actions/launcher_show_in_kitsu.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index 0ac9c6e9b7..bca57ce4c6 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -44,7 +44,7 @@ class ShowInKitsu(LauncherAction): asset_zou_name = None asset_zou_id = None asset_zou_type = 'Assets' - zou_sub_type = ['AssetType','Sequence'] + zou_sub_type = ['AssetType', 'Sequence'] if asset_name: asset_zou_name = asset_name asset_fields = ["data.zou.id", "data.zou.type"] @@ -59,11 +59,10 @@ class ShowInKitsu(LauncherAction): if asset_zou_data: asset_zou_type = asset_zou_data["type"] - if not asset_zou_type in zou_sub_type: + if asset_zou_type not in zou_sub_type: asset_zou_id = asset_zou_data["id"] else: asset_zou_type = asset_name - if task_name: task_data = asset["data"]["tasks"][task_name] @@ -71,7 +70,7 @@ class ShowInKitsu(LauncherAction): if not task_zou_data: self.log.debug(f"No zou task data for task: {task_name}") task_zou_id = task_zou_data["id"] - + # Define URL url = self.get_url(project_id=project_zou_id, asset_name=asset_zou_name, @@ -92,8 +91,8 @@ class ShowInKitsu(LauncherAction): asset_type=None, task_id=None): - shots_url = ['Shots','Sequence','Shot'] - sub_type = ['AssetType','Sequence'] + shots_url = ['Shots', 'Sequence', 'Shot'] + sub_type = ['AssetType', 'Sequence'] kitsu_module = self.get_kitsu_module() # Get kitsu url with /api stripped @@ -103,7 +102,7 @@ class ShowInKitsu(LauncherAction): sub_url = f"/productions/{project_id}" asset_type_url = "Assets" - + # Add redirection url for shots_url list if asset_type in shots_url: asset_type_url = 'Shots' From c3588e2f9d337109aea3c2630fb12079b617d81d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 12:37:46 +0200 Subject: [PATCH 1121/2550] Make `loaded_in_scene` column 5 pixels wider + reorder to match with column order --- openpype/tools/loader/widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 98522c48ce..c028aa4174 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -170,8 +170,8 @@ class SubsetWidget(QtWidgets.QWidget): ("duration", 60), ("handles", 55), ("step", 10), - ("repre_info", 65), - ("loaded_in_scene", 20) + ("loaded_in_scene", 25), + ("repre_info", 65) ) def __init__( From 8a8d9041c7700a5e42113fad5cfd9af8a2153897 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 14 Sep 2022 13:00:56 +0200 Subject: [PATCH 1122/2550] added option to mark instance as stored to cleanup changes --- openpype/pipeline/create/context.py | 37 +++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index eaaed39357..b74b343bbe 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -169,6 +169,9 @@ class AttributeValues: def reset_values(self): self._data = [] + def mark_stored(self): + self._origin_data = copy.deepcopy(self._data) + @property def attr_defs(self): """Pointer to attribute definitions.""" @@ -304,6 +307,9 @@ class PublishAttributes: for name in self._plugin_names_order: yield name + def mark_stored(self): + self._origin_data = copy.deepcopy(self._data) + def data_to_store(self): """Convert attribute values to "data to store".""" @@ -623,6 +629,25 @@ class CreatedInstance: changes[key] = (old_value, None) return changes + def mark_stored(self): + """Should be called when instance data are stored. + + Origin data are replaced by current data so changes are cleared. + """ + + orig_keys = set(self._orig_data.keys()) + for key, value in self._data.items(): + orig_keys.discard(key) + if key in ("creator_attributes", "publish_attributes"): + continue + self._orig_data[key] = copy.deepcopy(value) + + for key in orig_keys: + self._orig_data.pop(key) + + self.creator_attributes.mark_stored() + self.publish_attributes.mark_stored() + @property def creator_attributes(self): return self._data["creator_attributes"] @@ -636,6 +661,18 @@ class CreatedInstance: return self._data["publish_attributes"] def data_to_store(self): + """Collect data that contain json parsable types. + + It is possible to recreate the instance using these data. + + Todo: + We probably don't need OrderedDict. When data are loaded they + are not ordered anymore. + + Returns: + OrderedDict: Ordered dictionary with instance data. + """ + output = collections.OrderedDict() for key, value in self._data.items(): if key in ("creator_attributes", "publish_attributes"): From 84d5de704bbce7986f0ee3c06ea5726f7692a2e2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 14 Sep 2022 13:01:12 +0200 Subject: [PATCH 1123/2550] fix 'reset_values' --- openpype/pipeline/create/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index b74b343bbe..d6d7e3c29e 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -167,7 +167,7 @@ class AttributeValues: return self._data.pop(key, default) def reset_values(self): - self._data = [] + self._data = {} def mark_stored(self): self._origin_data = copy.deepcopy(self._data) From 5f321f1c2061d073c06ed102e164cef85545bbf0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 14 Sep 2022 13:02:37 +0200 Subject: [PATCH 1124/2550] traypublisher mark new instances as stored --- openpype/hosts/traypublisher/api/plugin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index a3eead51c8..cf98b4010e 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -104,6 +104,8 @@ class TrayPublishCreator(Creator): # Host implementation of storing metadata about instance HostContext.add_instance(new_instance.data_to_store()) + new_instance.mark_stored() + # Add instance to current context self._add_instance_to_context(new_instance) From 817886b234c00d6a9f2a9bd0902cf8a2a9cb9cb3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 13:39:10 +0200 Subject: [PATCH 1125/2550] Fix typo in logic --- openpype/lib/transcoding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 60d5d3ed4a..51e34312f2 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -154,7 +154,7 @@ def convert_value_by_type_name(value_type, value, logger=None): elif parts_len == 4: divisor = 2 elif parts_len == 9: - divisor == 3 + divisor = 3 elif parts_len == 16: divisor = 4 else: From 5c3c4dcbb2acfe736cfc94e6360eb2eb52cd580b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 13:43:43 +0200 Subject: [PATCH 1126/2550] Fix same typo in duplicated code --- .../plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 9fca1b5391..b259e18a94 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -71,7 +71,7 @@ def convert_value_by_type_name(value_type, value): elif parts_len == 4: divisor = 2 elif parts_len == 9: - divisor == 3 + divisor = 3 elif parts_len == 16: divisor = 4 else: From 6186c63c599822bddaf4fc2c2a437831f00e62b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 14 Sep 2022 13:50:01 +0200 Subject: [PATCH 1127/2550] added 'float2' type support --- openpype/lib/transcoding.py | 2 +- .../plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 60d5d3ed4a..71c12b3376 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -139,7 +139,7 @@ def convert_value_by_type_name(value_type, value, logger=None): return float(value) # Vectors will probably have more types - if value_type == "vec2f": + if value_type in ("vec2f", "float2"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 05899de5e1..691c642e82 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -56,7 +56,7 @@ def convert_value_by_type_name(value_type, value): return float(value) # Vectors will probably have more types - if value_type == "vec2f": + if value_type in ("vec2f", "float2"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 From e5b82d112373905cc61e2030e3939a09eba90ee1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 14 Sep 2022 13:50:21 +0200 Subject: [PATCH 1128/2550] lowered log level and modified messages on unknown value type --- openpype/lib/transcoding.py | 8 ++++---- .../OpenPypeTileAssembler/OpenPypeTileAssembler.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 71c12b3376..5b919b4111 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -204,8 +204,8 @@ def convert_value_by_type_name(value_type, value, logger=None): ) return output - logger.info(( - "MISSING IMPLEMENTATION:" + logger.debug(( + "Dev note (missing implementation):" " Unknown attrib type \"{}\". Value: {}" ).format(value_type, value)) return value @@ -263,8 +263,8 @@ def parse_oiio_xml_output(xml_string, logger=None): # - feel free to add more tags else: value = child.text - logger.info(( - "MISSING IMPLEMENTATION:" + logger.debug(( + "Dev note (missing implementation):" " Unknown tag \"{}\". Value \"{}\"" ).format(tag_name, value)) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 691c642e82..c5208590f2 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -127,7 +127,7 @@ def convert_value_by_type_name(value_type, value): return output print(( - "MISSING IMPLEMENTATION:" + "Dev note (missing implementation):" " Unknown attrib type \"{}\". Value: {}" ).format(value_type, value)) return value @@ -183,7 +183,7 @@ def parse_oiio_xml_output(xml_string): else: value = child.text print(( - "MISSING IMPLEMENTATION:" + "Dev note (missing implementation):" " Unknown tag \"{}\". Value \"{}\"" ).format(tag_name, value)) From 1c6b23b674eec3ed1fc1b1e0a68931a5661b71a0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 13:50:57 +0200 Subject: [PATCH 1129/2550] Fix `headsUpDisplay` key name Capture has a default setting named `headsUpDisplay` which is the long name for the setting `hud`. Thus when supplying `hud` as viewport option then `capture` will merge the key-values and thus will try to set both `headsUpDisplay` and `hud` value for the modelEditor which ends up ignoring `hud` and instead applying the `headsUpDisplay`. Thus, `hud` didn't do anything. --- openpype/settings/defaults/project_settings/maya.json | 2 +- .../schemas/projects_schema/schemas/schema_maya_capture.json | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 99ba4cdd5c..7759ac4e5e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -731,7 +731,7 @@ "grid": false, "hairSystems": true, "handles": false, - "hud": false, + "headsUpDisplay": false, "hulls": false, "ikHandles": false, "imagePlane": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 7a40f349cc..ab35fd391f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -441,8 +441,8 @@ }, { "type": "boolean", - "key": "hud", - "label": "hud" + "key": "headsUpDisplay", + "label": "headsUpDisplay" }, { "type": "boolean", From 2fb40a9db7c3fde1c1842958c597c07693bb17e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= <89812691+sebasti1a@users.noreply.github.com> Date: Wed, 14 Sep 2022 14:49:44 +0200 Subject: [PATCH 1130/2550] Update openpype/modules/kitsu/actions/launcher_show_in_kitsu.py Co-authored-by: Roy Nieterau --- openpype/modules/kitsu/actions/launcher_show_in_kitsu.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index bca57ce4c6..0ee95e773d 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -101,11 +101,7 @@ class ShowInKitsu(LauncherAction): kitsu_url = kitsu_url[:-len("/api")] sub_url = f"/productions/{project_id}" - asset_type_url = "Assets" - - # Add redirection url for shots_url list - if asset_type in shots_url: - asset_type_url = 'Shots' + asset_type_url = "Shots" if asset_type in shots_url else "Assets" if task_id: # Go to task page From 9a19da923c783e253c2f249842ed5c1409d2a5c3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 14 Sep 2022 20:49:48 +0800 Subject: [PATCH 1131/2550] adding a Qt lockfile dialog for lockfile tasks --- openpype/hosts/maya/api/pipeline.py | 41 ++++++++++++++++++------ openpype/tools/workfiles/files_widget.py | 4 +-- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 35d0026357..eb22eeeb3b 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -106,11 +106,13 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): register_event_callback("open", on_open) register_event_callback("new", on_new) register_event_callback("before.save", on_before_save) + register_event_callback("after.save", on_after_save) register_event_callback("before.close", on_before_close) register_event_callback("before.file.open", before_file_open) register_event_callback("taskChanged", on_task_changed) register_event_callback("workfile.open.before", before_workfile_open) register_event_callback("workfile.save.before", before_workfile_save) + register_event_callback("workfile.save.after", after_workfile_save) def open_workfile(self, filepath): return open_file(filepath) @@ -153,6 +155,10 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): OpenMaya.MSceneMessage.kBeforeSave, _on_scene_save ) + self._op_events[_after_scene_save] = OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterSave, _after_scene_save + ) + self._op_events[_before_scene_save] = ( OpenMaya.MSceneMessage.addCheckCallback( OpenMaya.MSceneMessage.kBeforeSaveCheck, @@ -194,6 +200,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self.log.info("Installed event handler _on_scene_save..") self.log.info("Installed event handler _before_scene_save..") + self.log.info("Insatall event handler _on_after_save..") self.log.info("Installed event handler _on_scene_new..") self.log.info("Installed event handler _on_maya_initialized..") self.log.info("Installed event handler _on_scene_open..") @@ -236,6 +243,8 @@ def _on_maya_initialized(*args): def _on_scene_new(*args): emit_event("new") +def _after_scene_save(*arg): + emit_event("after.save") def _on_scene_save(*args): emit_event("save") @@ -271,6 +280,7 @@ def _remove_workfile_lock(): if not handle_workfile_locks(): return filepath = current_file() + log.info("Removing lock on current file {}...".format(filepath)) if filepath: remove_workfile_lock(filepath) @@ -479,6 +489,13 @@ def on_before_save(): return lib.validate_fps() +def on_after_save(): + """Check if there is a lockfile after save""" + filepath = current_file() + if not is_workfile_locked(filepath): + create_workfile_lock(filepath) + + def check_lock_on_current_file(): """Check if there is a user opening the file""" @@ -491,14 +508,14 @@ def check_lock_on_current_file(): if is_workfile_locked(filepath): # add lockfile dialog - from Qt import QtWidgets - top_level_widgets = {w.objectName(): w for w in - QtWidgets.QApplication.topLevelWidgets()} - parent = top_level_widgets.get("MayaWindow", None) - workfile_dialog = WorkfileLockDialog(filepath, parent=parent) - if not workfile_dialog.exec_(): - cmds.file(new=True) - return + try: + workfile_dialog.close() + workfile_dialog.deleteLater() + except: + workfile_dialog = WorkfileLockDialog(filepath) + if not workfile_dialog.exec_(): + cmds.file(new=True) + return create_workfile_lock(filepath) @@ -514,7 +531,6 @@ def on_before_close(): def before_file_open(): """check lock file when the file changed""" - log.info("Removing lock on current file before scene open...") # delete the lock file _remove_workfile_lock() @@ -654,6 +670,13 @@ def before_workfile_save(event): create_workspace_mel(workdir_path, project_name) +def after_workfile_save(event): + workfile_name = event["filename"] + if workfile_name: + if not is_workfile_locked(workfile_name): + create_workfile_lock(workfile_name) + + class MayaDirmap(HostDirmap): def on_enable_dirmap(self): cmds.dirmap(en=True) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 93cc0b153b..7377d10171 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -469,9 +469,7 @@ class FilesWidget(QtWidgets.QWidget): host = self.host if self._is_workfile_locked(filepath): # add lockfile dialog - dialog = WorkfileLockDialog(filepath, parent=self) - if not dialog.exec_(): - return + WorkfileLockDialog(filepath) if isinstance(host, IWorkfileHost): has_unsaved_changes = host.workfile_has_unsaved_changes() From 9b34573361ef292fcb68e692efc74268ac165ca5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= <89812691+sebasti1a@users.noreply.github.com> Date: Wed, 14 Sep 2022 14:50:29 +0200 Subject: [PATCH 1132/2550] Update openpype/modules/kitsu/actions/launcher_show_in_kitsu.py Co-authored-by: Roy Nieterau --- openpype/modules/kitsu/actions/launcher_show_in_kitsu.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index 0ee95e773d..68da3e3a0e 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -39,7 +39,6 @@ class ShowInKitsu(LauncherAction): raise RuntimeError(f"Project {project_name} has no " f"connected ftrack id.") - asset_zou_data = None task_zou_id = None asset_zou_name = None asset_zou_id = None From 40e3dbb38fe91cdc07814707b56f2550138c7eac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= <89812691+sebasti1a@users.noreply.github.com> Date: Wed, 14 Sep 2022 14:50:42 +0200 Subject: [PATCH 1133/2550] Update openpype/modules/kitsu/actions/launcher_show_in_kitsu.py Co-authored-by: Roy Nieterau --- openpype/modules/kitsu/actions/launcher_show_in_kitsu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index 68da3e3a0e..575d1eecd0 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -39,10 +39,10 @@ class ShowInKitsu(LauncherAction): raise RuntimeError(f"Project {project_name} has no " f"connected ftrack id.") - task_zou_id = None asset_zou_name = None asset_zou_id = None asset_zou_type = 'Assets' + task_zou_id = None zou_sub_type = ['AssetType', 'Sequence'] if asset_name: asset_zou_name = asset_name From ab03df702a0f14c8d0dd94ed3c917c78fde9cb4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= <89812691+sebasti1a@users.noreply.github.com> Date: Wed, 14 Sep 2022 14:50:50 +0200 Subject: [PATCH 1134/2550] Update openpype/modules/kitsu/actions/launcher_show_in_kitsu.py Co-authored-by: Roy Nieterau --- openpype/modules/kitsu/actions/launcher_show_in_kitsu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index 575d1eecd0..ab523876ed 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -90,8 +90,8 @@ class ShowInKitsu(LauncherAction): asset_type=None, task_id=None): - shots_url = ['Shots', 'Sequence', 'Shot'] - sub_type = ['AssetType', 'Sequence'] + shots_url = {'Shots', 'Sequence', 'Shot'} + sub_type = {'AssetType', 'Sequence'} kitsu_module = self.get_kitsu_module() # Get kitsu url with /api stripped From 775b34df06b6af95e87d031576a7bea7a4bd7ef5 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 14 Sep 2022 20:53:22 +0800 Subject: [PATCH 1135/2550] adding a Qt lockfile dialog for lockfile tasks --- openpype/hosts/maya/api/pipeline.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index eb22eeeb3b..969680bdf5 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -155,8 +155,11 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): OpenMaya.MSceneMessage.kBeforeSave, _on_scene_save ) - self._op_events[_after_scene_save] = OpenMaya.MSceneMessage.addCallback( - OpenMaya.MSceneMessage.kAfterSave, _after_scene_save + self._op_events[_after_scene_save] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterSave, + _after_scene_save + ) ) self._op_events[_before_scene_save] = ( @@ -243,9 +246,11 @@ def _on_maya_initialized(*args): def _on_scene_new(*args): emit_event("new") + def _after_scene_save(*arg): emit_event("after.save") + def _on_scene_save(*args): emit_event("save") @@ -508,14 +513,10 @@ def check_lock_on_current_file(): if is_workfile_locked(filepath): # add lockfile dialog - try: - workfile_dialog.close() - workfile_dialog.deleteLater() - except: - workfile_dialog = WorkfileLockDialog(filepath) - if not workfile_dialog.exec_(): - cmds.file(new=True) - return + workfile_dialog = WorkfileLockDialog(filepath) + if not workfile_dialog.exec_(): + cmds.file(new=True) + return create_workfile_lock(filepath) From 8e068308c6369f50d220a58a81288f1f57337365 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 15:30:46 +0200 Subject: [PATCH 1136/2550] Add Display Textures settings correctly, labelize the Show settings to clarify what they are --- .../schemas/schema_maya_capture.json | 77 ++++++++++--------- 1 file changed, 41 insertions(+), 36 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 7a40f349cc..ae6c428faf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -195,6 +195,11 @@ { "nolights": "No Lights"} ] }, + { + "type": "boolean", + "key": "displayTextures", + "label": "Display Textures" + }, { "type": "number", "key": "textureMaxResolution", @@ -217,11 +222,6 @@ "key": "shadows", "label": "Display Shadows" }, - { - "type": "boolean", - "key": "textures", - "label": "Display Textures" - }, { "type": "boolean", "key": "twoSidedLighting", @@ -372,67 +372,67 @@ { "type": "boolean", "key": "cameras", - "label": "cameras" + "label": "Cameras" }, { "type": "boolean", "key": "clipGhosts", - "label": "clipGhosts" + "label": "Clip Ghosts" }, { "type": "boolean", "key": "controlVertices", - "label": "controlVertices" + "label": "NURBS CVs" }, { "type": "boolean", "key": "deformers", - "label": "deformers" + "label": "Deformers" }, { "type": "boolean", "key": "dimensions", - "label": "dimensions" + "label": "Dimensions" }, { "type": "boolean", "key": "dynamicConstraints", - "label": "dynamicConstraints" + "label": "Dynamic Constraints" }, { "type": "boolean", "key": "dynamics", - "label": "dynamics" + "label": "Dynamics" }, { "type": "boolean", "key": "fluids", - "label": "fluids" + "label": "Fluids" }, { "type": "boolean", "key": "follicles", - "label": "follicles" + "label": "Follicles" }, { "type": "boolean", "key": "gpuCacheDisplayFilter", - "label": "gpuCacheDisplayFilter" + "label": "GPU Cache" }, { "type": "boolean", "key": "greasePencils", - "label": "greasePencils" + "label": "Grease Pencil" }, { "type": "boolean", "key": "grid", - "label": "grid" + "label": "Grid" }, { "type": "boolean", "key": "hairSystems", - "label": "hairSystems" + "label": "Hair Systems" }, { "type": "boolean", @@ -442,47 +442,47 @@ { "type": "boolean", "key": "hud", - "label": "hud" + "label": "HUD" }, { "type": "boolean", "key": "hulls", - "label": "hulls" + "label": "NURBS Hulls" }, { "type": "boolean", "key": "ikHandles", - "label": "ikHandles" + "label": "IK Handles" }, { "type": "boolean", "key": "imagePlane", - "label": "imagePlane" + "label": "Image Planes" }, { "type": "boolean", "key": "joints", - "label": "joints" + "label": "Joints" }, { "type": "boolean", "key": "lights", - "label": "lights" + "label": "Lights" }, { "type": "boolean", "key": "locators", - "label": "locators" + "label": "Locators" }, { "type": "boolean", "key": "manipulators", - "label": "manipulators" + "label": "Manipulators" }, { "type": "boolean", "key": "motionTrails", - "label": "motionTrails" + "label": "Motion Trails" }, { "type": "boolean", @@ -502,47 +502,52 @@ { "type": "boolean", "key": "nurbsCurves", - "label": "nurbsCurves" + "label": "NURBS Curves" }, { "type": "boolean", "key": "nurbsSurfaces", - "label": "nurbsSurfaces" + "label": "NURBS Surfaces" }, { "type": "boolean", "key": "particleInstancers", - "label": "particleInstancers" + "label": "Particle Instancers" }, { "type": "boolean", "key": "pivots", - "label": "pivots" + "label": "Pivots" }, { "type": "boolean", "key": "planes", - "label": "planes" + "label": "Planes" }, { "type": "boolean", "key": "pluginShapes", - "label": "pluginShapes" + "label": "Plugin Shapes" }, { "type": "boolean", "key": "polymeshes", - "label": "polymeshes" + "label": "Polygons" }, { "type": "boolean", "key": "strokes", - "label": "strokes" + "label": "Strokes" }, { "type": "boolean", "key": "subdivSurfaces", - "label": "subdivSurfaces" + "label": "Subdiv Surfaces" + }, + { + "type": "boolean", + "key": "textures", + "label": "Texture Placements" } ] }, From c6bd26485d191406e96288c7a4ea7e99f2364494 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 15:35:12 +0200 Subject: [PATCH 1137/2550] Sort a bit more by Label again so that NURBS options are together + fix label for handles --- .../schemas/schema_maya_capture.json | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index ae6c428faf..d2627c1e2a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -379,11 +379,6 @@ "key": "clipGhosts", "label": "Clip Ghosts" }, - { - "type": "boolean", - "key": "controlVertices", - "label": "NURBS CVs" - }, { "type": "boolean", "key": "deformers", @@ -437,18 +432,13 @@ { "type": "boolean", "key": "handles", - "label": "handles" + "label": "Handles" }, { "type": "boolean", "key": "hud", "label": "HUD" }, - { - "type": "boolean", - "key": "hulls", - "label": "NURBS Hulls" - }, { "type": "boolean", "key": "ikHandles", @@ -499,11 +489,21 @@ "key": "nRigids", "label": "nRigids" }, + { + "type": "boolean", + "key": "controlVertices", + "label": "NURBS CVs" + }, { "type": "boolean", "key": "nurbsCurves", "label": "NURBS Curves" }, + { + "type": "boolean", + "key": "hulls", + "label": "NURBS Hulls" + }, { "type": "boolean", "key": "nurbsSurfaces", From fd4648c9bd48bda91e1c259a5903f4f263668b78 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 15:47:43 +0200 Subject: [PATCH 1138/2550] Add label --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index d2627c1e2a..18e69e92c3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -369,6 +369,10 @@ { "type": "splitter" }, + { + "type": "label", + "label": "Show" + }, { "type": "boolean", "key": "cameras", From 522d1e2df837bda7611b9667c1afd127337d945f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:04:09 +0200 Subject: [PATCH 1139/2550] Labelize Camera options to match with Camera attributes in Attribute Editor --- .../schemas/schema_maya_capture.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 18e69e92c3..8c2a460871 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -564,47 +564,47 @@ { "type": "boolean", "key": "displayGateMask", - "label": "displayGateMask" + "label": "Display Gate Mask" }, { "type": "boolean", "key": "displayResolution", - "label": "displayResolution" + "label": "Display Resolution" }, { "type": "boolean", "key": "displayFilmGate", - "label": "displayFilmGate" + "label": "Display Film Gate" }, { "type": "boolean", "key": "displayFieldChart", - "label": "displayFieldChart" + "label": "Display Field Chart" }, { "type": "boolean", "key": "displaySafeAction", - "label": "displaySafeAction" + "label": "Display Safe Action" }, { "type": "boolean", "key": "displaySafeTitle", - "label": "displaySafeTitle" + "label": "Display Safe Title" }, { "type": "boolean", "key": "displayFilmPivot", - "label": "displayFilmPivot" + "label": "Display Film Pivot" }, { "type": "boolean", "key": "displayFilmOrigin", - "label": "displayFilmOrigin" + "label": "Display Film Origin" }, { "type": "number", "key": "overscan", - "label": "overscan", + "label": "Overscan", "decimal": 1, "minimum": 0, "maximum": 10 From 9b9bfdadb993c43c2f6d1232ef522b5326df2cce Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:05:10 +0200 Subject: [PATCH 1140/2550] Uppercase `percent` label like the surrounding labels --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 8c2a460871..32987e7423 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -157,7 +157,7 @@ { "type": "number", "key": "percent", - "label": "percent", + "label": "Percent", "decimal": 1, "minimum": 0, "maximum": 200 From f501dac15ff4d3f0e30b4db4af5caf3485a90c7a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:14:18 +0200 Subject: [PATCH 1141/2550] Fix default settings for new viewport options settings --- openpype/settings/defaults/project_settings/maya.json | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 99ba4cdd5c..8706ea995f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -693,10 +693,10 @@ "Viewport Options": { "override_viewport_options": true, "displayLights": "default", + "displayTextures": true, "textureMaxResolution": 1024, "renderDepthOfField": true, "shadows": true, - "textures": true, "twoSidedLighting": true, "lineAAEnable": true, "multiSample": 8, @@ -719,7 +719,6 @@ "motionBlurShutterOpenFraction": 0.2, "cameras": false, "clipGhosts": false, - "controlVertices": false, "deformers": false, "dimensions": false, "dynamicConstraints": false, @@ -732,7 +731,6 @@ "hairSystems": true, "handles": false, "hud": false, - "hulls": false, "ikHandles": false, "imagePlane": true, "joints": false, @@ -743,7 +741,9 @@ "nCloths": false, "nParticles": false, "nRigids": false, + "controlVertices": false, "nurbsCurves": false, + "hulls": false, "nurbsSurfaces": false, "particleInstancers": false, "pivots": false, @@ -751,7 +751,8 @@ "pluginShapes": false, "polymeshes": true, "strokes": false, - "subdivSurfaces": false + "subdivSurfaces": false, + "textures": false }, "Camera Options": { "displayGateMask": false, From b9c3c95c2642b19305a57064be74fa0cd6ef12ae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:16:03 +0200 Subject: [PATCH 1142/2550] Use `id` variable (cosmetics because it results in same key) --- openpype/hosts/maya/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 58e160cb2f..6a8447d6ad 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2483,7 +2483,7 @@ def load_capture_preset(data=None): # DISPLAY OPTIONS id = 'Display Options' disp_options = {} - for key in preset['Display Options']: + for key in preset[id]: if key.startswith('background'): disp_options[key] = preset['Display Options'][key] if len(disp_options[key]) == 4: From 95fef2c4b11e055be8d55bd464e075eb2f1d7415 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:50:32 +0200 Subject: [PATCH 1143/2550] Fix Width label --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 570e22aa60..ffa1e61e68 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -141,7 +141,7 @@ { "type": "number", "key": "width", - "label": " Width", + "label": "Width", "decimal": 0, "minimum": 0, "maximum": 99999 From 1e27e9b71ff18af0aa7d957be6db344f7030a2aa Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:50:50 +0200 Subject: [PATCH 1144/2550] Remove unused settings --- .../schemas/schema_maya_capture.json | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index ffa1e61e68..2e4d4d67ab 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -153,19 +153,6 @@ "decimal": 0, "minimum": 0, "maximum": 99999 - }, - { - "type": "number", - "key": "percent", - "label": "Percent", - "decimal": 1, - "minimum": 0, - "maximum": 200 - }, - { - "type": "text", - "key": "mode", - "label": "Mode" } ] }, From ee4b9056feae4f942624d6e9bf37bb64a875bfba Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:51:59 +0200 Subject: [PATCH 1145/2550] Fix incorrectly resolved merge conflict --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 2e4d4d67ab..e23dbbbc1d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -430,11 +430,6 @@ "key": "headsUpDisplay", "label": "HUD" }, - { - "type": "boolean", - "key": "hulls", - "label": "hulls" - }, { "type": "boolean", "key": "ikHandles", From e16f5df4d7a346ee5b1f7b3c79b146fe9ba3e958 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:53:04 +0200 Subject: [PATCH 1146/2550] Update defaults for the removed settings --- openpype/settings/defaults/project_settings/maya.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 8b0418f5c6..79e80aec2e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -686,9 +686,7 @@ }, "Resolution": { "width": 1920, - "height": 1080, - "percent": 1.0, - "mode": "Custom" + "height": 1080 }, "Viewport Options": { "override_viewport_options": true, From 730f451020cb438b6a57756e5713212ae6e2261f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:57:11 +0200 Subject: [PATCH 1147/2550] Revert "Fix Width label" This reverts commit 95fef2c4b11e055be8d55bd464e075eb2f1d7415. --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index e23dbbbc1d..c9904150fd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -141,7 +141,7 @@ { "type": "number", "key": "width", - "label": "Width", + "label": " Width", "decimal": 0, "minimum": 0, "maximum": 99999 From a0333c88aed89707eba5cbea154f4449639dac44 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 17:23:59 +0200 Subject: [PATCH 1148/2550] Remove unused PanZoom / pan_zoom settings --- .../settings/defaults/project_settings/maya.json | 3 --- .../projects_schema/schemas/schema_maya_capture.json | 12 ------------ 2 files changed, 15 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 79e80aec2e..8643297f02 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -678,9 +678,6 @@ "isolate_view": true, "off_screen": true }, - "PanZoom": { - "pan_zoom": true - }, "Renderer": { "rendererName": "vp2Renderer" }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index c9904150fd..62c33f55fc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -94,18 +94,6 @@ } ] }, - - { - "type": "dict", - "key": "PanZoom", - "children": [ - { - "type": "boolean", - "key": "pan_zoom", - "label": " Pan Zoom" - } - ] - }, { "type": "splitter" }, From 3703ec07bcc709fb69cc57b19a66dc449c88f425 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 14 Sep 2022 18:23:17 +0200 Subject: [PATCH 1149/2550] OP-3940 - introduced new Settings for CollectVersion for Photoshop --- .../defaults/project_settings/photoshop.json | 3 +++ .../schema_project_photoshop.json | 17 +++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index 552c2c9cad..8ea36a3000 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -15,6 +15,9 @@ "CollectInstances": { "flatten_subset_template": "" }, + "CollectVersion": { + "sync_workfile_version": true + }, "ValidateContainers": { "enabled": true, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 7aa49c99a4..500c5d027b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -131,6 +131,23 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectVersion", + "label": "Collect Version", + "children": [ + { + "type": "label", + "label": "Synchronize version for image and review instances by workfile version." + }, + { + "type": "boolean", + "key": "sync_workfile_version", + "label": "Synchronize version with workfile" + } + ] + }, { "type": "schema_template", "name": "template_publish_plugin", From d93a18fd89bf749de62413b9280a21dde871c319 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 14 Sep 2022 18:25:11 +0200 Subject: [PATCH 1150/2550] OP-3940 - added new collector for Photoshop Single point of control if image and review instances should have their version synchronized according to workfile version. --- .../hosts/photoshop/plugins/CollectVersion.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 openpype/hosts/photoshop/plugins/CollectVersion.py diff --git a/openpype/hosts/photoshop/plugins/CollectVersion.py b/openpype/hosts/photoshop/plugins/CollectVersion.py new file mode 100644 index 0000000000..46f48b20fb --- /dev/null +++ b/openpype/hosts/photoshop/plugins/CollectVersion.py @@ -0,0 +1,28 @@ +import pyblish.api + + +class CollectVersion(pyblish.api.InstancePlugin): + """Collect version for publishable instances. + + Used to synchronize version from workfile to all publishable instances: + - image (manually created or color coded) + - review + + Dev comment: + Explicit collector created to control this from single place and not from + 3 different. + """ + order = pyblish.api.CollectorOrder + 0.200 + label = 'Collect Version' + + hosts = ["photoshop"] + families = ["image", "review"] + + # controlled by Settings + sync_workfile_version = False + + def process(self, instance): + if self.sync_workfile_version: + workfile_version = instance.context.data["version"] + self.log.debug(f"Applying version {workfile_version}") + instance.data["version"] = workfile_version From d0b8437cfd86113c6ffa3a04a0959bc80f3ddfcc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 14 Sep 2022 18:32:08 +0200 Subject: [PATCH 1151/2550] OP-3940 - added optionality to collector There might be a reason when artist would like to skip this synchronization for specific workfile. --- openpype/hosts/photoshop/plugins/CollectVersion.py | 1 + openpype/settings/defaults/project_settings/photoshop.json | 1 + .../schemas/projects_schema/schema_project_photoshop.json | 5 +++++ 3 files changed, 7 insertions(+) diff --git a/openpype/hosts/photoshop/plugins/CollectVersion.py b/openpype/hosts/photoshop/plugins/CollectVersion.py index 46f48b20fb..bc7af580d7 100644 --- a/openpype/hosts/photoshop/plugins/CollectVersion.py +++ b/openpype/hosts/photoshop/plugins/CollectVersion.py @@ -19,6 +19,7 @@ class CollectVersion(pyblish.api.InstancePlugin): families = ["image", "review"] # controlled by Settings + optional = True sync_workfile_version = False def process(self, instance): diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index 8ea36a3000..43a460052a 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -16,6 +16,7 @@ "flatten_subset_template": "" }, "CollectVersion": { + "optional": true, "sync_workfile_version": true }, "ValidateContainers": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 500c5d027b..e8dad84859 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -137,6 +137,11 @@ "key": "CollectVersion", "label": "Collect Version", "children": [ + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, { "type": "label", "label": "Synchronize version for image and review instances by workfile version." From 790d350d7f0c9a90fae28a02e0bfea57d1746e4a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 14 Sep 2022 22:17:50 +0200 Subject: [PATCH 1152/2550] fix: retimed attributes integration --- .../publish/extract_subset_resources.py | 27 +++++++++++-------- .../publish/collect_otio_frame_ranges.py | 12 +++++++-- 2 files changed, 26 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 1d42330e23..0774c401c0 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -90,7 +90,7 @@ class ExtractSubsetResources(openpype.api.Extractor): handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) include_handles = instance.data.get("includeHandles") - retimed_handles = instance.data.get("retimedHandles") + not_retimed_handles = instance.data.get("notRetimedHandles") # get media source range with handles source_start_handles = instance.data["sourceStartH"] @@ -98,7 +98,15 @@ class ExtractSubsetResources(openpype.api.Extractor): # retime if needed if r_speed != 1.0: - if retimed_handles: + if not_retimed_handles: + # handles are not retimed + source_end_handles = ( + source_start_handles + + (r_source_dur - 1) + + handle_start + + handle_end + ) + else: # handles are retimed source_start_handles = ( instance.data["sourceStart"] - r_handle_start) @@ -108,20 +116,12 @@ class ExtractSubsetResources(openpype.api.Extractor): + r_handle_start + r_handle_end ) - else: - # handles are not retimed - source_end_handles = ( - source_start_handles - + (r_source_dur - 1) - + handle_start - + handle_end - ) # get frame range with handles for representation range frame_start_handle = frame_start - handle_start repre_frame_start = frame_start_handle if include_handles: - if r_speed == 1.0 or not retimed_handles: + if r_speed == 1.0 or not_retimed_handles: frame_start_handle = frame_start else: frame_start_handle = ( @@ -167,6 +167,11 @@ class ExtractSubsetResources(openpype.api.Extractor): - (r_handle_start + r_handle_end) ) }) + if not_retimed_handles: + instance.data["versionData"].update({ + "handleStart": handle_start, + "handleEnd": handle_end + }) self.log.debug("_ i_version_data: {}".format( instance.data["versionData"] )) diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index cfb0318950..bfd5320c25 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -10,6 +10,7 @@ import opentimelineio as otio import pyblish.api from pprint import pformat from openpype.pipeline.editorial import ( + get_media_range_with_retimes, otio_range_to_frame_range, otio_range_with_handles ) @@ -57,8 +58,15 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): # in case of retimed clip and frame range should not be retimed if workfile_source_duration: - frame_end = frame_start + otio.opentime.to_frames( - otio_src_range.duration, otio_src_range.duration.rate) - 1 + # get available range trimmed with processed retimes + retimed_attributes = get_media_range_with_retimes( + otio_clip, 0, 0) + self.log.debug( + ">> retimed_attributes: {}".format(retimed_attributes)) + media_in = int(retimed_attributes["mediaIn"]) + media_out = int(retimed_attributes["mediaOut"]) + frame_end = frame_start + (media_out - media_in) + 1 + self.log.debug(frame_end) data = { "frameStart": frame_start, From be0734684918130469a775fc67d21c8684620f5b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 14 Sep 2022 22:18:07 +0200 Subject: [PATCH 1153/2550] flame: settings for retimed attributes --- .../hosts/flame/plugins/create/create_shot_clip.py | 4 ++-- openpype/settings/defaults/project_settings/flame.json | 4 +++- .../schemas/projects_schema/schema_project_flame.json | 10 ++++++++++ 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/create/create_shot_clip.py b/openpype/hosts/flame/plugins/create/create_shot_clip.py index b03a39a7ca..7622ff217c 100644 --- a/openpype/hosts/flame/plugins/create/create_shot_clip.py +++ b/openpype/hosts/flame/plugins/create/create_shot_clip.py @@ -23,10 +23,10 @@ class CreateShotClip(opfapi.Creator): # nested dictionary (only one level allowed # for sections and dict) for _k, _v in v["value"].items(): - if presets.get(_k): + if presets.get(_k, None) is not None: gui_inputs[k][ "value"][_k]["value"] = presets[_k] - if presets.get(k): + if presets.get(_k, None) is not None: gui_inputs[k]["value"] = presets[k] # open widget for plugins inputs diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index bfdc58d9ee..c90193fe13 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -17,7 +17,9 @@ "workfileFrameStart": 1001, "handleStart": 5, "handleEnd": 5, - "includeHandles": false + "includeHandles": false, + "retimedHandles": true, + "retimedFramerange": true } }, "publish": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index ca62679b3d..5f05bef0e1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -128,6 +128,16 @@ "type": "boolean", "key": "includeHandles", "label": "Enable handles including" + }, + { + "type": "boolean", + "key": "retimedHandles", + "label": "Enable retimed handles" + }, + { + "type": "boolean", + "key": "retimedFramerange", + "label": "Enable retimed shot frameranges" } ] } From 9ab7647d55b89919043efee3e36852607a1348b9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Sep 2022 10:57:19 +0200 Subject: [PATCH 1154/2550] OP-3940 - fixed location and name --- .../plugins/{CollectVersion.py => publish/collect_version.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/photoshop/plugins/{CollectVersion.py => publish/collect_version.py} (100%) diff --git a/openpype/hosts/photoshop/plugins/CollectVersion.py b/openpype/hosts/photoshop/plugins/publish/collect_version.py similarity index 100% rename from openpype/hosts/photoshop/plugins/CollectVersion.py rename to openpype/hosts/photoshop/plugins/publish/collect_version.py From 7a5d20ffdb05347736475c86b87e6a782fb5d80f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 15 Sep 2022 11:00:20 +0200 Subject: [PATCH 1155/2550] :bug: skip plugin if otioTimeline is missing --- openpype/plugins/publish/extract_otio_file.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/plugins/publish/extract_otio_file.py b/openpype/plugins/publish/extract_otio_file.py index c692205d81..1a6a82117d 100644 --- a/openpype/plugins/publish/extract_otio_file.py +++ b/openpype/plugins/publish/extract_otio_file.py @@ -16,6 +16,8 @@ class ExtractOTIOFile(publish.Extractor): hosts = ["resolve", "hiero", "traypublisher"] def process(self, instance): + if not instance.context.data.get("otioTimeline"): + return # create representation data if "representations" not in instance.data: instance.data["representations"] = [] From c97602341fbeb6ef40c46143619f56f693c36588 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 11:53:49 +0200 Subject: [PATCH 1156/2550] flame: fix creator preset detection --- openpype/hosts/flame/plugins/create/create_shot_clip.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/create/create_shot_clip.py b/openpype/hosts/flame/plugins/create/create_shot_clip.py index 7622ff217c..835201cd3b 100644 --- a/openpype/hosts/flame/plugins/create/create_shot_clip.py +++ b/openpype/hosts/flame/plugins/create/create_shot_clip.py @@ -26,7 +26,8 @@ class CreateShotClip(opfapi.Creator): if presets.get(_k, None) is not None: gui_inputs[k][ "value"][_k]["value"] = presets[_k] - if presets.get(_k, None) is not None: + + if presets.get(k, None) is not None: gui_inputs[k]["value"] = presets[k] # open widget for plugins inputs From fa65e20ff7f4ff843ca54ac97897f33567c89eee Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 12:09:59 +0200 Subject: [PATCH 1157/2550] Flame: open folder and files after project is created --- openpype/hosts/flame/hooks/pre_flame_setup.py | 35 ++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index ad2b0dc897..9c2ad709c7 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -22,6 +22,7 @@ class FlamePrelaunch(PreLaunchHook): in environment var FLAME_SCRIPT_DIR. """ app_groups = ["flame"] + permisisons = 0o777 wtc_script_path = os.path.join( opflame.HOST_DIR, "api", "scripts", "wiretap_com.py") @@ -38,6 +39,7 @@ class FlamePrelaunch(PreLaunchHook): """Hook entry method.""" project_doc = self.data["project_doc"] project_name = project_doc["name"] + volume_name = _env.get("FLAME_WIRETAP_VOLUME") # get image io project_anatomy = self.data["anatomy"] @@ -81,7 +83,7 @@ class FlamePrelaunch(PreLaunchHook): data_to_script = { # from settings "host_name": _env.get("FLAME_WIRETAP_HOSTNAME") or hostname, - "volume_name": _env.get("FLAME_WIRETAP_VOLUME"), + "volume_name": volume_name, "group_name": _env.get("FLAME_WIRETAP_GROUP"), "color_policy": str(imageio_flame["project"]["colourPolicy"]), @@ -99,8 +101,39 @@ class FlamePrelaunch(PreLaunchHook): app_arguments = self._get_launch_arguments(data_to_script) + # fix project data permission issue + self._fix_permissions(project_name, volume_name) + self.launch_context.launch_args.extend(app_arguments) + def _fix_permissions(self, project_name, volume_name): + """Work around for project data permissions + + Reported issue: when project is created locally on one machine, + it is impossible to migrate it to other machine. Autodesk Flame + is crating some unmanagable files which needs to be opened to 0o777. + + Args: + project_name (str): project name + volume_name (str): studio volume + """ + dirs_to_modify = [ + "/usr/discreet/project/{}".format(project_name), + "/opt/Autodesk/clip/{}/{}.prj".format(volume_name, project_name), + "/usr/discreet/clip/{}/{}.prj".format(volume_name, project_name) + ] + + for dirtm in dirs_to_modify: + for root, dirs, files in os.walk(dirtm): + try: + for d in dirs: + os.chmod(os.path.join(root, d), self.permisisons) + for f in files: + os.chmod(os.path.join(root, f), self.permisisons) + except OSError as _E: + self.log.warning("Not able to open files: {}".format(_E)) + + def _get_flame_fps(self, fps_num): fps_table = { float(23.976): "23.976 fps", From d905740208168114b12ec1a4873601d61c9b1798 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Sep 2022 12:18:07 +0200 Subject: [PATCH 1158/2550] OP-3940 - collector cannot be optional --- openpype/settings/defaults/project_settings/photoshop.json | 1 - .../schemas/projects_schema/schema_project_photoshop.json | 5 ----- 2 files changed, 6 deletions(-) diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index 43a460052a..8ea36a3000 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -16,7 +16,6 @@ "flatten_subset_template": "" }, "CollectVersion": { - "optional": true, "sync_workfile_version": true }, "ValidateContainers": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index e8dad84859..500c5d027b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -137,11 +137,6 @@ "key": "CollectVersion", "label": "Collect Version", "children": [ - { - "type": "boolean", - "key": "optional", - "label": "Optional" - }, { "type": "label", "label": "Synchronize version for image and review instances by workfile version." From 18e03a8d28bc9e9fb0404c5f92475861e90f4f17 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Sep 2022 12:46:31 +0200 Subject: [PATCH 1159/2550] OP-3940 - collector cannot be optional --- openpype/hosts/photoshop/plugins/publish/collect_version.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_version.py b/openpype/hosts/photoshop/plugins/publish/collect_version.py index bc7af580d7..46f48b20fb 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_version.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_version.py @@ -19,7 +19,6 @@ class CollectVersion(pyblish.api.InstancePlugin): families = ["image", "review"] # controlled by Settings - optional = True sync_workfile_version = False def process(self, instance): From 8a21fdfcf25a3294b53e742dff84eb82950768e7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Sep 2022 12:48:35 +0200 Subject: [PATCH 1160/2550] OP-3682 - Hound --- common/openpype_common/distribution/addon_distribution.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py index e39ce66a0a..ac9c69deca 100644 --- a/common/openpype_common/distribution/addon_distribution.py +++ b/common/openpype_common/distribution/addon_distribution.py @@ -71,7 +71,7 @@ class AddonDownloader: Args: source (dict): {type:"http", "url":"https://} ...} destination (str): local folder to unzip - Retursn: + Returns: (str) local path to addon zip file """ pass @@ -235,4 +235,4 @@ def check_addons(server_endpoint, addon_folder, downloaders): def cli(*args): - raise NotImplemented + raise NotImplementedError From ed29c38cdcaa446bdfa0d0f8300e987d253e09f1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 13:11:42 +0200 Subject: [PATCH 1161/2550] flame: turn double negative logic to readable code --- .../publish/extract_subset_resources.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 0774c401c0..a8d3201896 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,7 +1,6 @@ import os import re import tempfile -from pprint import pformat from copy import deepcopy import pyblish.api @@ -90,7 +89,7 @@ class ExtractSubsetResources(openpype.api.Extractor): handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) include_handles = instance.data.get("includeHandles") - not_retimed_handles = instance.data.get("notRetimedHandles") + retimed_handles = instance.data.get("retimedHandles") # get media source range with handles source_start_handles = instance.data["sourceStartH"] @@ -98,15 +97,7 @@ class ExtractSubsetResources(openpype.api.Extractor): # retime if needed if r_speed != 1.0: - if not_retimed_handles: - # handles are not retimed - source_end_handles = ( - source_start_handles - + (r_source_dur - 1) - + handle_start - + handle_end - ) - else: + if retimed_handles: # handles are retimed source_start_handles = ( instance.data["sourceStart"] - r_handle_start) @@ -117,11 +108,20 @@ class ExtractSubsetResources(openpype.api.Extractor): + r_handle_end ) + else: + # handles are not retimed + source_end_handles = ( + source_start_handles + + (r_source_dur - 1) + + handle_start + + handle_end + ) + # get frame range with handles for representation range frame_start_handle = frame_start - handle_start repre_frame_start = frame_start_handle if include_handles: - if r_speed == 1.0 or not_retimed_handles: + if r_speed == 1.0 or not retimed_handles: frame_start_handle = frame_start else: frame_start_handle = ( From ecc6f3ae0e2e4a24cafde6cd13f4959403bc9ada Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 13:12:04 +0200 Subject: [PATCH 1162/2550] flame: fixing logic --- .../flame/plugins/publish/collect_timeline_instances.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index d6ff13b059..76d48dded2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -131,9 +131,8 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "fps": self.fps, "workfileFrameStart": workfile_start, "sourceFirstFrame": int(first_frame), - "notRetimedHandles": ( - not marker_data.get("retimedHandles")), - "notRetimedFramerange": ( + "retimedHandles": marker_data.get("retimedHandles"), + "shotDurationFromSource": ( not marker_data.get("retimedFramerange")), "path": file_path, "flameAddTasks": self.add_tasks, From b635749a7748880df74ea65927148e833c7d7e98 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 13:12:23 +0200 Subject: [PATCH 1163/2550] global: improving code readibility --- openpype/plugins/publish/collect_otio_frame_ranges.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index bfd5320c25..9a68b6e43d 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -30,7 +30,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): # get basic variables otio_clip = instance.data["otioClip"] workfile_start = instance.data["workfileFrameStart"] - workfile_source_duration = instance.data.get("notRetimedFramerange") + workfile_source_duration = instance.data.get("shotDurationFromSource") # get ranges otio_tl_range = otio_clip.range_in_parent() From 4c0f629e386794b26c8b48fbe30ea0b6599f752d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 13:13:54 +0200 Subject: [PATCH 1164/2550] flame: missing variable fix --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index a8d3201896..7adcd1453e 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -167,7 +167,7 @@ class ExtractSubsetResources(openpype.api.Extractor): - (r_handle_start + r_handle_end) ) }) - if not_retimed_handles: + if not retimed_handles: instance.data["versionData"].update({ "handleStart": handle_start, "handleEnd": handle_end From 9fdb71b814011dc9f94fa9243711660d3854bbfa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 15 Sep 2022 13:52:29 +0200 Subject: [PATCH 1165/2550] increasi size of publisher's main window --- openpype/tools/publisher/window.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 90a36b4f01..2a0e6e940a 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -30,8 +30,8 @@ from .widgets import ( class PublisherWindow(QtWidgets.QDialog): """Main window of publisher.""" - default_width = 1000 - default_height = 600 + default_width = 1200 + default_height = 700 def __init__(self, parent=None, reset_on_show=None): super(PublisherWindow, self).__init__(parent) From cfe997dac7a519500ec19834ead6963d7e01ebc8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 14:16:40 +0200 Subject: [PATCH 1166/2550] flame: fixing ls() --- openpype/hosts/flame/api/pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/pipeline.py b/openpype/hosts/flame/api/pipeline.py index da44be1b15..324d13bc3f 100644 --- a/openpype/hosts/flame/api/pipeline.py +++ b/openpype/hosts/flame/api/pipeline.py @@ -90,8 +90,7 @@ def containerise(flame_clip_segment, def ls(): """List available containers. """ - # TODO: ls - pass + return [] def parse_container(tl_segment, validate=True): @@ -107,6 +106,7 @@ def update_container(tl_segment, data=None): # TODO: update_container pass + def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle node passthrough states on instance toggles.""" From dd0bbf2bb515441e730c7fea6e3f762ae1f186b2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 15:04:55 +0200 Subject: [PATCH 1167/2550] adding running version into issue template --- .github/ISSUE_TEMPLATE/bug_report.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 6ed6ae428c..d1e98409c5 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -6,6 +6,8 @@ labels: bug assignees: '' --- +**Running version** +openpype-v3.14.1-nightly.2 **Describe the bug** A clear and concise description of what the bug is. From ee154aca7f73df163a2e3fe361d87f2f7264ca6d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 15:06:27 +0200 Subject: [PATCH 1168/2550] issue: improving example string --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index d1e98409c5..96e768e420 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -7,7 +7,7 @@ assignees: '' --- **Running version** -openpype-v3.14.1-nightly.2 +[ex. 3.14.1-nightly.2] **Describe the bug** A clear and concise description of what the bug is. From b405299e92fe718993591892c71d59f76a604b5d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Sep 2022 15:33:57 +0200 Subject: [PATCH 1169/2550] OP-3940 - added collector for remote publishes In Webpublisher uploaded workfile name is not relieable, use last published + 1 instead. --- .../publish/collect_published_version.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_published_version.py diff --git a/openpype/hosts/photoshop/plugins/publish/collect_published_version.py b/openpype/hosts/photoshop/plugins/publish/collect_published_version.py new file mode 100644 index 0000000000..2502689e4b --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_published_version.py @@ -0,0 +1,55 @@ +"""Collects published version of workfile and increments it. + +For synchronization of published image and workfile version it is required +to store workfile version from workfile file name in context.data["version"]. +In remote publishing this name is unreliable (artist might not follow naming +convention etc.), last published workfile version for particular workfile +subset is used instead. + +This plugin runs only in remote publishing (eg. Webpublisher). + +Requires: + context.data["assetEntity"] + +Provides: + context["version"] - incremented latest published workfile version +""" + +import pyblish.api + +from openpype.client import get_last_version_by_subset_name + + +class CollectPublishedVersion(pyblish.api.ContextPlugin): + """Collects published version of workfile and increments it.""" + + order = pyblish.api.CollectorOrder + 0.190 + label = "Collect published version" + hosts = ["photoshop"] + targets = ["remotepublish"] + + def process(self, context): + workfile_subset_name = None + for instance in context: + if instance.data["family"] == "workfile": + workfile_subset_name = instance.data["subset"] + break + + if not workfile_subset_name: + self.log.warning("No workfile instance found, " + "synchronization of version will not work.") + return + + project_name = context.data["projectName"] + asset_doc = context.data["assetEntity"] + asset_id = asset_doc["_id"] + + version_doc = get_last_version_by_subset_name(project_name, + workfile_subset_name, + asset_id) + version_int = 1 + if version_doc: + version_int += int(version_doc["name"]) + + self.log.debug(f"Setting {version_int} to context.") + context.data["version"] = version_int From abee82f45e48205c2e995dda9f53d8e92370cfed Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Sep 2022 15:35:09 +0200 Subject: [PATCH 1170/2550] OP-3940 - fix wrong import for pype_commands --- openpype/pype_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 85561495fd..f65d969c53 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -187,7 +187,7 @@ class PypeCommands: (to choose validator for example) """ - from openpype.hosts.webpublisher.cli_functions import ( + from openpype.hosts.webpublisher.publish_functions import ( cli_publish_from_app ) From 3fff6647389d28b47fe4048a5a048d3fb420da91 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 16:13:01 +0200 Subject: [PATCH 1171/2550] Refactor `load_capture_preset` --- openpype/hosts/maya/api/lib.py | 204 +++++++++++---------------------- 1 file changed, 64 insertions(+), 140 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 97035ad3f2..3561c3f53f 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2462,28 +2462,16 @@ def load_capture_preset(data=None): import capture preset = data - options = dict() - # CODEC - id = 'Codec' - for key in preset[id]: - options[str(key)] = preset[id][key] - - # GENERIC - id = 'Generic' - for key in preset[id]: - options[str(key)] = preset[id][key] - - # RESOLUTION - id = 'Resolution' - options['height'] = preset[id]['height'] - options['width'] = preset[id]['width'] + # Straight key-value match from settings to capture arguments + for settings_key in ["Codec", "Generic", "Resolution"]: + for key, value in preset[settings_key].items(): + options[key] = value # DISPLAY OPTIONS - id = 'Display Options' disp_options = {} - for key in preset[id]: + for key in preset['Display Options']: if key.startswith('background'): disp_options[key] = preset['Display Options'][key] if len(disp_options[key]) == 4: @@ -2497,142 +2485,78 @@ def load_capture_preset(data=None): options['display_options'] = disp_options # VIEWPORT OPTIONS - temp_options = {} - id = 'Renderer' - for key in preset[id]: - temp_options[str(key)] = preset[id][key] + viewport_options = {} + viewport2_options = {} - temp_options2 = {} - id = 'Viewport Options' - for key in preset[id]: + for key, value in preset['Renderer'].items(): + viewport_options[key] = value + + # Viewport Options has a mixture of Viewport2 Options and Viewport Options + # to pass along to capture. So we'll need to differentiate between the two + VIEWPORT2_OPTIONS = { + "textureMaxResolution", + "renderDepthOfField", + "ssaoEnable", + "ssaoSamples", + "ssaoAmount", + "ssaoRadius", + "ssaoFilterRadius", + "hwFogStart", + "hwFogEnd", + "hwFogAlpha", + "hwFogFalloff", + "hwFogColorR", + "hwFogColorG", + "hwFogColorB", + "hwFogDensity", + "motionBlurEnable", + "motionBlurSampleCount", + "motionBlurShutterOpenFraction", + "lineAAEnable" + } + for key, value in preset['Viewport Options'].items(): + + # There are some keys we want to ignore + if key in {"override_viewport_options", "high_quality"}: + continue + + # First handle special cases where we do value conversion to + # separate option values if key == 'textureMaxResolution': - if preset[id][key] > 0: - temp_options2['textureMaxResolution'] = preset[id][key] - temp_options2['enableTextureMaxRes'] = True - temp_options2['textureMaxResMode'] = 1 + viewport2_options['textureMaxResolution'] = value + if value > 0: + viewport2_options['enableTextureMaxRes'] = True + viewport2_options['textureMaxResMode'] = 1 else: - temp_options2['textureMaxResolution'] = preset[id][key] - temp_options2['enableTextureMaxRes'] = False - temp_options2['textureMaxResMode'] = 0 + viewport2_options['enableTextureMaxRes'] = False + viewport2_options['textureMaxResMode'] = 0 - if key == 'multiSample': - if preset[id][key] > 0: - temp_options2['multiSampleEnable'] = True - temp_options2['multiSampleCount'] = preset[id][key] - else: - temp_options2['multiSampleEnable'] = False - temp_options2['multiSampleCount'] = preset[id][key] + elif key == 'multiSample': + viewport2_options['multiSampleEnable'] = value > 0 + viewport2_options['multiSampleCount'] = value - if key == 'renderDepthOfField': - temp_options2['renderDepthOfField'] = preset[id][key] + elif key == 'alphaCut': + viewport2_options['transparencyAlgorithm'] = 5 + viewport2_options['transparencyQuality'] = 1 - if key == 'ssaoEnable': - if preset[id][key] is True: - temp_options2['ssaoEnable'] = True - else: - temp_options2['ssaoEnable'] = False + elif key == 'hwFogFalloff': + # Settings enum value string to integer + viewport2_options['hwFogFalloff'] = int(value) - if key == 'ssaoSamples': - temp_options2['ssaoSamples'] = preset[id][key] - - if key == 'ssaoAmount': - temp_options2['ssaoAmount'] = preset[id][key] - - if key == 'ssaoRadius': - temp_options2['ssaoRadius'] = preset[id][key] - - if key == 'hwFogDensity': - temp_options2['hwFogDensity'] = preset[id][key] - - if key == 'ssaoFilterRadius': - temp_options2['ssaoFilterRadius'] = preset[id][key] - - if key == 'alphaCut': - temp_options2['transparencyAlgorithm'] = 5 - temp_options2['transparencyQuality'] = 1 - - if key == 'headsUpDisplay': - temp_options['headsUpDisplay'] = True - - if key == 'fogging': - temp_options['fogging'] = preset[id][key] or False - - if key == 'hwFogStart': - temp_options2['hwFogStart'] = preset[id][key] - - if key == 'hwFogEnd': - temp_options2['hwFogEnd'] = preset[id][key] - - if key == 'hwFogAlpha': - temp_options2['hwFogAlpha'] = preset[id][key] - - if key == 'hwFogFalloff': - temp_options2['hwFogFalloff'] = int(preset[id][key]) - - if key == 'hwFogColorR': - temp_options2['hwFogColorR'] = preset[id][key] - - if key == 'hwFogColorG': - temp_options2['hwFogColorG'] = preset[id][key] - - if key == 'hwFogColorB': - temp_options2['hwFogColorB'] = preset[id][key] - - if key == 'motionBlurEnable': - if preset[id][key] is True: - temp_options2['motionBlurEnable'] = True - else: - temp_options2['motionBlurEnable'] = False - - if key == 'motionBlurSampleCount': - temp_options2['motionBlurSampleCount'] = preset[id][key] - - if key == 'motionBlurShutterOpenFraction': - temp_options2['motionBlurShutterOpenFraction'] = preset[id][key] - - if key == 'lineAAEnable': - if preset[id][key] is True: - temp_options2['lineAAEnable'] = True - else: - temp_options2['lineAAEnable'] = False + # Then handle Viewport 2.0 Options + elif key in VIEWPORT2_OPTIONS: + viewport2_options[key] = value + # Then assume remainder is Viewport Options else: - temp_options[str(key)] = preset[id][key] + viewport_options[key] = value - for key in ['override_viewport_options', - 'high_quality', - 'alphaCut', - 'gpuCacheDisplayFilter', - 'multiSample', - 'ssaoEnable', - 'ssaoSamples', - 'ssaoAmount', - 'ssaoFilterRadius', - 'ssaoRadius', - 'hwFogStart', - 'hwFogEnd', - 'hwFogAlpha', - 'hwFogFalloff', - 'hwFogColorR', - 'hwFogColorG', - 'hwFogColorB', - 'hwFogDensity', - 'textureMaxResolution', - 'motionBlurEnable', - 'motionBlurSampleCount', - 'motionBlurShutterOpenFraction', - 'lineAAEnable', - 'renderDepthOfField' - ]: - temp_options.pop(key, None) - - options['viewport_options'] = temp_options - options['viewport2_options'] = temp_options2 + options['viewport_options'] = viewport_options + options['viewport2_options'] = viewport2_options # CAMERA OPTIONS - id = 'Camera Options' camera_options = {} - for key, value in preset[id].items(): + for key, value in preset['Camera Options'].items(): camera_options[key] = value options['camera_options'] = camera_options From d1d2d05ec6b8e62e6765b9e1b17ef6f7d9ba950d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 15 Sep 2022 16:15:14 +0200 Subject: [PATCH 1172/2550] flame: version frame start was wrong if handles included was off --- .../publish/extract_subset_resources.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 7adcd1453e..1b7e9b88b5 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -136,6 +136,9 @@ class ExtractSubsetResources(openpype.api.Extractor): source_duration_handles = ( source_end_handles - source_start_handles) + 1 + self.log.debug("_ source_duration_handles: {}".format( + source_duration_handles)) + # create staging dir path staging_dir = self.staging_dir(instance) @@ -159,18 +162,28 @@ class ExtractSubsetResources(openpype.api.Extractor): if version_data: instance.data["versionData"].update(version_data) + # version data start frame + vd_frame_start = frame_start + if include_handles: + vd_frame_start = frame_start_handle + if r_speed != 1.0: instance.data["versionData"].update({ - "frameStart": frame_start_handle, + "frameStart": vd_frame_start, "frameEnd": ( - (frame_start_handle + source_duration_handles - 1) + (vd_frame_start + source_duration_handles - 1) - (r_handle_start + r_handle_end) ) }) if not retimed_handles: instance.data["versionData"].update({ "handleStart": handle_start, - "handleEnd": handle_end + "handleEnd": handle_end, + "frameStart": vd_frame_start, + "frameEnd": ( + (vd_frame_start + source_duration_handles - 1) + - (handle_start + handle_end) + ) }) self.log.debug("_ i_version_data: {}".format( instance.data["versionData"] From 2560fc0081292b7862367101e895a7d3526045da Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 16:29:16 +0200 Subject: [PATCH 1173/2550] Simplify logic where we're taking values directly --- openpype/hosts/maya/api/lib.py | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 3561c3f53f..4d6f599d2b 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2460,14 +2460,20 @@ def bake_to_world_space(nodes, def load_capture_preset(data=None): import capture - preset = data + options = dict() + viewport_options = dict() + viewport2_options = dict() + camera_options = dict() # Straight key-value match from settings to capture arguments - for settings_key in ["Codec", "Generic", "Resolution"]: - for key, value in preset[settings_key].items(): - options[key] = value + options.update(preset["Codec"]) + options.update(preset["Generic"]) + options.update(preset["Resolution"]) + + camera_options.update(preset['Camera Options']) + viewport_options.update(preset["Renderer"]) # DISPLAY OPTIONS disp_options = {} @@ -2484,13 +2490,6 @@ def load_capture_preset(data=None): options['display_options'] = disp_options - # VIEWPORT OPTIONS - viewport_options = {} - viewport2_options = {} - - for key, value in preset['Renderer'].items(): - viewport_options[key] = value - # Viewport Options has a mixture of Viewport2 Options and Viewport Options # to pass along to capture. So we'll need to differentiate between the two VIEWPORT2_OPTIONS = { @@ -2553,11 +2552,6 @@ def load_capture_preset(data=None): options['viewport_options'] = viewport_options options['viewport2_options'] = viewport2_options - - # CAMERA OPTIONS - camera_options = {} - for key, value in preset['Camera Options'].items(): - camera_options[key] = value options['camera_options'] = camera_options # use active sound track From f99b31d13a7bafbfe3763978db9a10a1024f8d85 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 16:30:32 +0200 Subject: [PATCH 1174/2550] Don't remap input argument `preset` isn't necessarily more explicit than `data` and actually adds to confusion because it makes it feel like it's an actual capture preset but instead it converts OpenPype preset data into capture preset --- openpype/hosts/maya/api/lib.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 4d6f599d2b..7220f53e66 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2460,7 +2460,6 @@ def bake_to_world_space(nodes, def load_capture_preset(data=None): import capture - preset = data options = dict() viewport_options = dict() @@ -2468,18 +2467,18 @@ def load_capture_preset(data=None): camera_options = dict() # Straight key-value match from settings to capture arguments - options.update(preset["Codec"]) - options.update(preset["Generic"]) - options.update(preset["Resolution"]) + options.update(data["Codec"]) + options.update(data["Generic"]) + options.update(data["Resolution"]) - camera_options.update(preset['Camera Options']) - viewport_options.update(preset["Renderer"]) + camera_options.update(data['Camera Options']) + viewport_options.update(data["Renderer"]) # DISPLAY OPTIONS disp_options = {} - for key in preset['Display Options']: + for key in data['Display Options']: if key.startswith('background'): - disp_options[key] = preset['Display Options'][key] + disp_options[key] = data['Display Options'][key] if len(disp_options[key]) == 4: disp_options[key][0] = (float(disp_options[key][0])/255) disp_options[key][1] = (float(disp_options[key][1])/255) @@ -2513,7 +2512,7 @@ def load_capture_preset(data=None): "motionBlurShutterOpenFraction", "lineAAEnable" } - for key, value in preset['Viewport Options'].items(): + for key, value in data['Viewport Options'].items(): # There are some keys we want to ignore if key in {"override_viewport_options", "high_quality"}: From 1132d8d4f8ffa4b224308b5099424e9984d0822c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 16:34:11 +0200 Subject: [PATCH 1175/2550] Add docstring --- openpype/hosts/maya/api/lib.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 7220f53e66..dec698062a 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2459,6 +2459,19 @@ def bake_to_world_space(nodes, def load_capture_preset(data=None): + """Convert OpenPype Extract Playblast settings to `capture` arguments + + Input data is the settings from: + `project_settings/maya/publish/ExtractPlayblast/capture_preset` + + Args: + data (dict): Capture preset settings from OpenPype settings + + Returns: + dict: `capture.capture` compatible keyword arguments + + """ + import capture options = dict() From 598ec6e2dcb5664407945d081ae15adad6b4d8c6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 16:57:42 +0200 Subject: [PATCH 1176/2550] Improve readability of color conversion logic --- openpype/hosts/maya/api/lib.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index dec698062a..5b436a018d 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2489,14 +2489,17 @@ def load_capture_preset(data=None): # DISPLAY OPTIONS disp_options = {} - for key in data['Display Options']: + for key, value in data['Display Options'].items(): if key.startswith('background'): - disp_options[key] = data['Display Options'][key] - if len(disp_options[key]) == 4: - disp_options[key][0] = (float(disp_options[key][0])/255) - disp_options[key][1] = (float(disp_options[key][1])/255) - disp_options[key][2] = (float(disp_options[key][2])/255) - disp_options[key].pop() + # Convert background, backgroundTop, backgroundBottom colors + if len(value) == 4: + # Ignore alpha + convert RGB to float + value = [ + float(value[0]) / 255, + float(value[1]) / 255, + float(value[2]) / 255 + ] + disp_options[key] = value else: disp_options['displayGradient'] = True From 9403dc743a341e56c44b7afb2f9549d31a3216fa Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 15 Sep 2022 23:44:02 +0800 Subject: [PATCH 1177/2550] adding a Qt lockfile dialog for lockfile tasks --- openpype/hosts/maya/api/pipeline.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 969680bdf5..c13b47ef4a 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -112,7 +112,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): register_event_callback("taskChanged", on_task_changed) register_event_callback("workfile.open.before", before_workfile_open) register_event_callback("workfile.save.before", before_workfile_save) - register_event_callback("workfile.save.after", after_workfile_save) + register_event_callback("workfile.save.before", after_workfile_save) def open_workfile(self, filepath): return open_file(filepath) @@ -203,7 +203,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self.log.info("Installed event handler _on_scene_save..") self.log.info("Installed event handler _before_scene_save..") - self.log.info("Insatall event handler _on_after_save..") + self.log.info("Installed event handler _on_after_save..") self.log.info("Installed event handler _on_scene_new..") self.log.info("Installed event handler _on_maya_initialized..") self.log.info("Installed event handler _on_scene_open..") @@ -496,9 +496,7 @@ def on_before_save(): def on_after_save(): """Check if there is a lockfile after save""" - filepath = current_file() - if not is_workfile_locked(filepath): - create_workfile_lock(filepath) + check_lock_on_current_file() def check_lock_on_current_file(): @@ -621,6 +619,7 @@ def on_new(): "from openpype.hosts.maya.api import lib;" "lib.add_render_layer_change_observer()") lib.set_context_settings() + _remove_workfile_lock() def on_task_changed(): @@ -660,12 +659,14 @@ def on_task_changed(): def before_workfile_open(): - _remove_workfile_lock() + if handle_workfile_locks(): + _remove_workfile_lock() def before_workfile_save(event): project_name = legacy_io.active_project() - _remove_workfile_lock() + if handle_workfile_locks(): + _remove_workfile_lock() workdir_path = event["workdir_path"] if workdir_path: create_workspace_mel(workdir_path, project_name) @@ -673,9 +674,10 @@ def before_workfile_save(event): def after_workfile_save(event): workfile_name = event["filename"] - if workfile_name: - if not is_workfile_locked(workfile_name): - create_workfile_lock(workfile_name) + if handle_workfile_locks(): + if workfile_name: + if not is_workfile_locked(workfile_name): + create_workfile_lock(workfile_name) class MayaDirmap(HostDirmap): From 0fb1b9be93de9fe690afc4b1ca6fca2b1f8ce2fd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Sep 2022 17:59:02 +0200 Subject: [PATCH 1178/2550] OP-3682 - updated AddonSource --- .../distribution/addon_distribution.py | 33 +++++++++++++++++-- .../tests/test_addon_distributtion.py | 8 ++--- 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py index ac9c69deca..be6faab3e6 100644 --- a/common/openpype_common/distribution/addon_distribution.py +++ b/common/openpype_common/distribution/addon_distribution.py @@ -32,21 +32,50 @@ class MultiPlatformPath(object): @attr.s class AddonSource(object): type = attr.ib() - url = attr.ib(default=None) + + +@attr.s +class LocalAddonSource(AddonSource): path = attr.ib(default=attr.Factory(MultiPlatformPath)) +@attr.s +class WebAddonSource(AddonSource): + url = attr.ib(default=None) + + @attr.s class AddonInfo(object): """Object matching json payload from Server""" name = attr.ib() version = attr.ib() - sources = attr.ib(default=attr.Factory(list), type=AddonSource) + sources = attr.ib(default=attr.Factory(list)) hash = attr.ib(default=None) description = attr.ib(default=None) license = attr.ib(default=None) authors = attr.ib(default=None) + @classmethod + def from_dict(cls, data): + sources = [] + for source in data.get("sources", []): + if source.get("type") == UrlType.FILESYSTEM.value: + source_addon = LocalAddonSource(type=source["type"], + path=source["path"]) + if source.get("type") == UrlType.HTTP.value: + source_addon = WebAddonSource(type=source["type"], + url=source["url"]) + + sources.append(source_addon) + + return cls(name=data.get("name"), + version=data.get("version"), + hash=data.get("hash"), + description=data.get("description"), + sources=sources, + license=data.get("license"), + authors=data.get("authors")) + class AddonDownloader: log = logging.getLogger(__name__) diff --git a/common/openpype_common/distribution/tests/test_addon_distributtion.py b/common/openpype_common/distribution/tests/test_addon_distributtion.py index 7dd27fd44f..faf4e01e22 100644 --- a/common/openpype_common/distribution/tests/test_addon_distributtion.py +++ b/common/openpype_common/distribution/tests/test_addon_distributtion.py @@ -75,7 +75,7 @@ def test_get_downloader(printer, addon_downloader): def test_addon_info(printer, sample_addon_info): valid_minimum = {"name": "openpype_slack", "version": "1.0.0"} - assert AddonInfo(**valid_minimum), "Missing required fields" + assert AddonInfo.from_dict(valid_minimum), "Missing required fields" assert AddonInfo(name=valid_minimum["name"], version=valid_minimum["version"]), \ "Missing required fields" @@ -84,7 +84,7 @@ def test_addon_info(printer, sample_addon_info): # TODO should be probably implemented assert AddonInfo(valid_minimum), "Wrong argument format" - addon = AddonInfo(**sample_addon_info) + addon = AddonInfo.from_dict(sample_addon_info) assert addon, "Should be created" assert addon.name == "openpype_slack", "Incorrect name" assert addon.version == "1.0.0", "Incorrect version" @@ -95,10 +95,10 @@ def test_addon_info(printer, sample_addon_info): addon_as_dict = attr.asdict(addon) assert addon_as_dict["name"], "Dict approach should work" - with pytest.raises(AttributeError): + with pytest.raises(TypeError): # TODO should be probably implemented as . not dict first_source = addon.sources[0] - assert first_source.type == "http", "Not implemented" + assert first_source["type"] == "http", "Not implemented" def test_update_addon_state(printer, sample_addon_info, From df370e5d3cd321b3a34a37b7247ccf356ae9e053 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 23:43:09 +0200 Subject: [PATCH 1179/2550] Refactor to match with API changes of OpenPype --- openpype/hosts/fusion/api/lib.py | 8 ++++---- openpype/hosts/fusion/api/menu.py | 5 ++--- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index db8dfb2795..19242da304 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -17,10 +17,10 @@ from openpype.pipeline import ( switch_container, legacy_io, ) +from openpype.pipeline.context_tools import get_current_project_asset + from .pipeline import get_current_comp, comp_lock_and_undo_chunk -from openpype.api import ( - get_asset -) + self = sys.modules[__name__] self._project = None @@ -70,7 +70,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True, **kwargs): def set_framerange(): - asset_doc = get_asset() + asset_doc = get_current_project_asset() start = asset_doc["data"]["frameStart"] end = asset_doc["data"]["frameEnd"] diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 823670b9cf..c5eb093247 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -2,9 +2,7 @@ import sys from Qt import QtWidgets, QtCore -from avalon import api from openpype.tools.utils import host_tools - from openpype.style import load_stylesheet from openpype.lib import register_event_callback from openpype.hosts.fusion.scripts import ( @@ -14,6 +12,7 @@ from openpype.hosts.fusion.scripts import ( from openpype.hosts.fusion.api import ( set_framerange ) +from openpype.pipeline import legacy_io from .pulse import FusionPulse @@ -129,7 +128,7 @@ class OpenPypeMenu(QtWidgets.QWidget): def on_task_changed(self): # Update current context label - label = api.Session["AVALON_ASSET"] + label = legacy_io.Session["AVALON_ASSET"] self.asset_label.setText(label) def register_callback(self, name, fn): From 618e6267b48b44dcc62615f33489e199a00a19bd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 23:43:42 +0200 Subject: [PATCH 1180/2550] Allow to minimize the menu so it doesn't always have to stay on top --- openpype/hosts/fusion/api/menu.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index c5eb093247..bba94053a2 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -44,6 +44,7 @@ class OpenPypeMenu(QtWidgets.QWidget): QtCore.Qt.Window | QtCore.Qt.CustomizeWindowHint | QtCore.Qt.WindowTitleHint + | QtCore.Qt.WindowMinimizeButtonHint | QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowStaysOnTopHint ) From 83fb00e0ffbbceb969aff2cf0865af08545784ca Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 23:54:28 +0200 Subject: [PATCH 1181/2550] Get start frame including handles --- .../fusion/plugins/load/load_sequence.py | 36 ++++++++++++++----- 1 file changed, 27 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index abd0f4e411..faac942c53 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -149,9 +149,8 @@ class FusionLoadSequence(load.LoaderPlugin): tool["Clip"] = path # Set global in point to start frame (if in version.data) - start = context["version"]["data"].get("frameStart", None) - if start is not None: - loader_shift(tool, start, relative=False) + start = self._get_start(context["version"], tool) + loader_shift(tool, start, relative=False) imprint_container(tool, name=name, @@ -214,12 +213,7 @@ class FusionLoadSequence(load.LoaderPlugin): # Get start frame from version data project_name = legacy_io.active_project() version = get_version_by_id(project_name, representation["parent"]) - start = version["data"].get("frameStart") - if start is None: - self.log.warning("Missing start frame for updated version" - "assuming starts at frame 0 for: " - "{} ({})".format(tool.Name, representation)) - start = 0 + start = self._get_start(version, tool) with comp_lock_and_undo_chunk(comp, "Update Loader"): @@ -256,3 +250,27 @@ class FusionLoadSequence(load.LoaderPlugin): """Get first file in representation root""" files = sorted(os.listdir(root)) return os.path.join(root, files[0]) + + def _get_start(self, version_doc, tool): + """Return real start frame of published files (incl. handles)""" + data = version_doc["data"] + + # Get start frame directly with handle if it's in data + start = data.get("frameStartHandle") + if start is not None: + return start + + # Get frame start without handles + start = data.get("frameStart") + if start is None: + self.log.warning("Missing start frame for version " + "assuming starts at frame 0 for: " + "{}".format(tool.Name)) + return 0 + + # Use `handleStart` if the data is available + handle_start = data.get("handleStart") + if handle_start: + start -= handle_start + + return start From 0806534a278a6ae1a95f1f4f62ba17a86f89d5e0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 23:56:09 +0200 Subject: [PATCH 1182/2550] Return early if no need to shift --- openpype/hosts/fusion/plugins/load/load_sequence.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index faac942c53..1614704090 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -101,6 +101,9 @@ def loader_shift(loader, frame, relative=True): else: shift = frame - old_in + if not shift: + return + # Shifting global in will try to automatically compensate for the change # in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those # input values to "just shift" the clip From 5058de28881aee71f8681034b4caa18f6b7b0605 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 23:56:46 +0200 Subject: [PATCH 1183/2550] Fix return value --- openpype/hosts/fusion/plugins/load/load_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index 1614704090..6f44c61d1b 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -102,7 +102,7 @@ def loader_shift(loader, frame, relative=True): shift = frame - old_in if not shift: - return + return 0 # Shifting global in will try to automatically compensate for the change # in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those From df4d92d973c7d85561745dae167d744da6ac5e56 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 22:48:27 +0200 Subject: [PATCH 1184/2550] Hack in support for Fusion 18 (cherry picked from commit 5a75db9d7ba1f4df78e84f1315e8e8d9c9a357c0) --- openpype/hosts/fusion/hooks/pre_fusion_setup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index c78d433e5c..ec5889a88a 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -33,6 +33,13 @@ class FusionPrelaunch(PreLaunchHook): self.log.info(f"Setting {py36_var}: '{py36_dir}'...") self.launch_context.env[py36_var] = py36_dir + # TODO: Set this for EITHER Fu16-17 OR Fu18+, don't do both + # Fusion 18+ does not look in FUSION16_PYTHON36_HOME anymore + # but instead uses FUSION_PYTHON3_HOME and requires the Python to + # be available on PATH to work. So let's enforce that for now. + self.launch_context.env["FUSION_PYTHON3_HOME"] = py36_dir + self.launch_context.env["PATH"] += ";" + py36_dir + # Add our Fusion Master Prefs which is the only way to customize # Fusion to define where it can read custom scripts and tools from self.log.info(f"Setting OPENPYPE_FUSION: {HOST_DIR}") From 0e4a73f6808c6c22a7ffa45a97589c7990473829 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 15 Sep 2022 23:22:56 +0200 Subject: [PATCH 1185/2550] Fusion: Set OCIO project setting and set OCIO env var on launch (cherry picked from commit 8e9a7200d35ab7cba174855acfe03794936125cf) --- .../fusion/hooks/pre_fusion_ocio_hook.py | 40 +++++++++++++++++++ .../defaults/project_anatomy/imageio.json | 10 +++++ .../schemas/schema_anatomy_imageio.json | 29 ++++++++++++++ 3 files changed, 79 insertions(+) create mode 100644 openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py diff --git a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py new file mode 100644 index 0000000000..f7c7bc0b4c --- /dev/null +++ b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py @@ -0,0 +1,40 @@ +import os +import platform + +from openpype.lib import PreLaunchHook, ApplicationLaunchFailed + + +class FusionPreLaunchOCIO(PreLaunchHook): + """Set OCIO environment variable for Fusion""" + app_groups = ["fusion"] + + def execute(self): + """Hook entry method.""" + + # get image io + project_anatomy = self.data["anatomy"] + + # make sure anatomy settings are having flame key + imageio_fusion = project_anatomy["imageio"].get("fusion") + if not imageio_fusion: + raise ApplicationLaunchFailed(( + "Anatomy project settings are missing `fusion` key. " + "Please make sure you remove project overrides on " + "Anatomy ImageIO") + ) + + ocio = imageio_fusion.get("ocio") + enabled = ocio.get("enabled", False) + if not enabled: + return + + platform_key = platform.system().lower() + ocio_path = ocio["configFilePath"][platform_key] + if not ocio_path: + raise ApplicationLaunchFailed( + "Fusion OCIO is enabled in project settings but no OCIO config" + f"path is set for your current platform: {platform_key}" + ) + + self.log.info(f"Setting OCIO config path: {ocio_path}") + self.launch_context.env["OCIO"] = os.pathsep.join(ocio_path) diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index f0be8f95f4..9b5e8639b1 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -236,6 +236,16 @@ "viewTransform": "sRGB gamma" } }, + "fusion": { + "ocio": { + "enabled": false, + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + } + } + }, "flame": { "project": { "colourPolicy": "ACES 1.1", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index ef8c907dda..644463fece 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -427,6 +427,35 @@ } ] }, + + { + "key": "fusion", + "type": "dict", + "label": "Fusion", + "children": [ + { + "key": "ocio", + "type": "dict", + "label": "OCIO", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Set OCIO variable for Fusion" + }, + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + } + ] + } + ] + }, { "key": "flame", "type": "dict", From cd40fab99ff2627bf29aa2c9de7a884d7640ff7e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 00:01:41 +0200 Subject: [PATCH 1186/2550] Add Fusion 18 application to defaults --- .../defaults/system_settings/applications.json | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 30b0a5cbe3..4a3e0c1b94 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -730,6 +730,21 @@ "OPENPYPE_LOG_NO_COLORS": "Yes" }, "variants": { + "18": { + "executables": { + "windows": [ + "C:\\Program Files\\Blackmagic Design\\Fusion 18\\Fusion.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": {} + }, "17": { "executables": { "windows": [ From c5e7d8f93c620abbcc64a6fdcb7a6824558f57f7 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:33:20 +0200 Subject: [PATCH 1187/2550] :recycle: handle file saving --- openpype/hosts/houdini/api/pipeline.py | 7 +++++++ .../houdini/plugins/publish/increment_current_file.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b9246251a2..4ff24c8004 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -166,6 +166,13 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx = self._create_context_node() return lib.read(op_ctx) + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + + hou.hipFile.save(file_name=dst_path, + save_to_recent_files=True) + def on_file_event_callback(event): if event == hou.hipFileEventType.AfterLoad: diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index c990f481d3..92ac9fbeca 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -27,4 +27,4 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save(new_filepath) + host.save_file(new_filepath) From 99bf89cafae2e94ec927d948811e60e5b15cfb44 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:02 +0200 Subject: [PATCH 1188/2550] :recycle: handle frame data --- openpype/hosts/houdini/api/lib.py | 27 +++++++++++++++++++ openpype/hosts/houdini/api/plugin.py | 2 +- .../houdini/plugins/publish/collect_frames.py | 2 ++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index f438944b09..d0a3068531 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -530,3 +530,30 @@ def get_template_from_value(key, value): raise TypeError("Unsupported type: %r" % type(value)) return parm + + +def get_frame_data(node): + """Get the frame data: start frame, end frame and steps. + + Args: + node(hou.Node) + + Returns: + dict: frame data for star, end and steps. + + """ + data = {} + + if node.parm("trange") is None: + + return data + + if node.evalParm("trange") == 0: + self.log.debug("trange is 0") + return data + + data["frameStart"] = node.evalParm("f1") + data["frameEnd"] = node.evalParm("f2") + data["steps"] = node.evalParm("f3") + + return data \ No newline at end of file diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 28830bdc64..ee73745651 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read +from .lib import imprint, read, get_frame_data class OpenPypeCreatorError(CreatorError): diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index cad894cc3f..cd94635c29 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -25,6 +25,8 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): ropnode = instance.data["members"][0] + frame_data = lib.get_frame_data(ropnode) + instance.data.update(frame_data) start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) From bd8b2c7d70a13a85f89ab4f60489a8114e9cdf01 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:26 +0200 Subject: [PATCH 1189/2550] :recycle: arnold creator --- .../plugins/create/create_arnold_ass.py | 45 ++++++++++--------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 72088e43b0..b3926b8cee 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -1,9 +1,12 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Arnold ASS files.""" from openpype.hosts.houdini.api import plugin -class CreateArnoldAss(plugin.Creator): +class CreateArnoldAss(plugin.HoudiniCreator): """Arnold .ass Archive""" + identifier = "io.openpype.creators.houdini.ass" label = "Arnold ASS" family = "ass" icon = "magic" @@ -12,42 +15,40 @@ class CreateArnoldAss(plugin.Creator): # Default extension: `.ass` or `.ass.gz` ext = ".ass" - def __init__(self, *args, **kwargs): - super(CreateArnoldAss, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "arnold"}) - self.data.update({"node_type": "arnold"}) + instance = super(CreateArnoldAss, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def process(self): - node = super(CreateArnoldAss, self).process() + instance_node = hou.node(instance.get("instance_node")) - basename = node.name() - node.setName(basename + "_ASS", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ASS", unique_name=True) # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export - parm_template_group = node.parmTemplateGroup() + parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) - node.setParmTemplateGroup(parm_template_group) + instance_node.setParmTemplateGroup(parm_template_group) - filepath = '$HIP/pyblish/`chs("subset")`.$F4{}'.format(self.ext) + filepath = "$HIP/pyblish/{}.$F4{}".format(subset_name, self.ext) parms = { # Render frame range "trange": 1, - # Arnold ROP settings "ar_ass_file": filepath, - "ar_ass_export_enable": 1 + "ar_ass_export_enable": 1, + "filename": filepath } - node.setParms(parms) - # Lock the ASS export attribute - node.parm("ar_ass_export_enable").lock(True) - - # Lock some Avalon attributes - to_lock = ["family", "id"] + # Lock any parameters in this list + to_lock = ["ar_ass_export_enable", "family", "id"] for name in to_lock: - parm = node.parm(name) + parm = instance_node.parm(name) parm.lock(True) From 93b3b0403401075596e9951c06fc5414e7fa50a0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:42 +0200 Subject: [PATCH 1190/2550] :recycle: composite creator --- .../plugins/create/create_composite.py | 51 +++++++++---------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index e278708076..96d8ca9fd5 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -1,44 +1,43 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating composite sequences.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateCompositeSequence(plugin.Creator): +class CreateCompositeSequence(plugin.HoudiniCreator): """Composite ROP to Image Sequence""" + identifier = "io.openpype.creators.houdini.imagesequence" label = "Composite (Image Sequence)" family = "imagesequence" icon = "gears" - def __init__(self, *args, **kwargs): - super(CreateCompositeSequence, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou + from pprint import pformat - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "comp"}) - # Type of ROP node to create - self.data.update({"node_type": "comp"}) + instance = super(CreateCompositeSequence, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + self.log.info(pformat(instance)) + print(pformat(instance)) + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. + filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) + parms = { + "copoutput": filepath + } - """ - parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} - - if self.nodes: - node = self.nodes[0] - parms.update({"coppath": node.path()}) - - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] for name in to_lock: - try: - parm = instance.parm(name) - parm.lock(True) - except AttributeError: - # missing lock pattern - self.log.debug( - "missing lock pattern {}".format(name)) + parm = instance_node.parm(name) + parm.lock(True) + From 839ded23bad51e9e949e960794baceaf4f4d9958 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 00:35:37 +0200 Subject: [PATCH 1191/2550] Make `handle_start` and `handle_end` more explicit arguments --- openpype/hosts/fusion/api/lib.py | 34 ++++++++++++++------------------ 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 19242da304..dcf205ff6a 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -25,7 +25,8 @@ self = sys.modules[__name__] self._project = None -def update_frame_range(start, end, comp=None, set_render_range=True, **kwargs): +def update_frame_range(start, end, comp=None, set_render_range=True, + handle_start=0, handle_end=0): """Set Fusion comp's start and end frame range Args: @@ -34,7 +35,8 @@ def update_frame_range(start, end, comp=None, set_render_range=True, **kwargs): comp (object, Optional): comp object from fusion set_render_range (bool, Optional): When True this will also set the composition's render start and end frame. - kwargs (dict): additional kwargs + handle_start (float, int, Optional): frame handles before start frame + handle_end (float, int, Optional): frame handles after end frame Returns: None @@ -44,20 +46,15 @@ def update_frame_range(start, end, comp=None, set_render_range=True, **kwargs): if not comp: comp = get_current_comp() + # Convert any potential none type to zero + handle_start = handle_start or 0 + handle_end = handle_end or 0 + attrs = { - "COMPN_GlobalStart": start, - "COMPN_GlobalEnd": end + "COMPN_GlobalStart": start - handle_start, + "COMPN_GlobalEnd": end + handle_end } - # exclude handles if any found in kwargs - if kwargs.get("handle_start"): - handle_start = kwargs.get("handle_start") - attrs["COMPN_GlobalStart"] = int(start - handle_start) - - if kwargs.get("handle_end"): - handle_end = kwargs.get("handle_end") - attrs["COMPN_GlobalEnd"] = int(end + handle_end) - # set frame range if set_render_range: attrs.update({ @@ -73,12 +70,11 @@ def set_framerange(): asset_doc = get_current_project_asset() start = asset_doc["data"]["frameStart"] end = asset_doc["data"]["frameEnd"] - - data = { - "handle_start": asset_doc["data"]["handleStart"], - "handle_end": asset_doc["data"]["handleEnd"] - } - update_frame_range(start, end, set_render_range=True, **data) + handle_start = asset_doc["data"]["handleStart"], + handle_end = asset_doc["data"]["handleEnd"], + update_frame_range(start, end, set_render_range=True, + handle_start=handle_start, + handle_end=handle_end) def get_additional_data(container): From bb6c8817608c7e6e0c5a5bc40b1652361efb1651 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 16 Sep 2022 11:15:25 +0200 Subject: [PATCH 1192/2550] OP-3940 - renamed sync_workfile_version to enabled --- .../hosts/photoshop/plugins/publish/collect_version.py | 10 +++------- .../settings/defaults/project_settings/photoshop.json | 2 +- .../projects_schema/schema_project_photoshop.json | 4 ++-- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_version.py b/openpype/hosts/photoshop/plugins/publish/collect_version.py index 46f48b20fb..aff9f13bfb 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_version.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_version.py @@ -18,11 +18,7 @@ class CollectVersion(pyblish.api.InstancePlugin): hosts = ["photoshop"] families = ["image", "review"] - # controlled by Settings - sync_workfile_version = False - def process(self, instance): - if self.sync_workfile_version: - workfile_version = instance.context.data["version"] - self.log.debug(f"Applying version {workfile_version}") - instance.data["version"] = workfile_version + workfile_version = instance.context.data["version"] + self.log.debug(f"Applying version {workfile_version}") + instance.data["version"] = workfile_version diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index 8ea36a3000..9d74df7cd5 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -16,7 +16,7 @@ "flatten_subset_template": "" }, "CollectVersion": { - "sync_workfile_version": true + "enabled": false }, "ValidateContainers": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 500c5d027b..e63e25d2c2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -143,8 +143,8 @@ }, { "type": "boolean", - "key": "sync_workfile_version", - "label": "Synchronize version with workfile" + "key": "enabled", + "label": "Enabled" } ] }, From 9c6d0b1d7e10a9d5d56832f8b0bb25952f25ad38 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 16 Sep 2022 11:20:52 +0200 Subject: [PATCH 1193/2550] OP-3682 - changed to relative import --- common/openpype_common/distribution/addon_distribution.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py index be6faab3e6..ad17a831d8 100644 --- a/common/openpype_common/distribution/addon_distribution.py +++ b/common/openpype_common/distribution/addon_distribution.py @@ -7,7 +7,7 @@ import requests import platform import shutil -from common.openpype_common.distribution.file_handler import RemoteFileHandler +from .file_handler import RemoteFileHandler class UrlType(Enum): From 52e1b563672d757a19b7ab0452f291189f10de6f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 16 Sep 2022 11:47:05 +0200 Subject: [PATCH 1194/2550] OP-3952 - added text filter on project name to Tray Publisher --- openpype/tools/traypublisher/window.py | 13 +++++++++++++ openpype/tools/utils/models.py | 4 +++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index cc33287091..d161afd37b 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -7,6 +7,7 @@ publishing plugins. """ from Qt import QtWidgets, QtCore +import qtawesome from openpype.pipeline import ( install_host, @@ -43,6 +44,7 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): projects_model = ProjectModel(dbcon) projects_proxy = ProjectSortFilterProxy() projects_proxy.setSourceModel(projects_model) + projects_proxy.setFilterKeyColumn(0) projects_view = QtWidgets.QListView(content_widget) projects_view.setObjectName("ChooseProjectView") @@ -59,10 +61,17 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): btns_layout.addWidget(cancel_btn, 0) btns_layout.addWidget(confirm_btn, 0) + txt_filter = QtWidgets.QLineEdit() + txt_filter.setPlaceholderText("Quick filter projects..") + txt_filter.setClearButtonEnabled(True) + txt_filter.addAction(qtawesome.icon("fa.filter", color="gray"), + QtWidgets.QLineEdit.LeadingPosition) + content_layout = QtWidgets.QVBoxLayout(content_widget) content_layout.setContentsMargins(0, 0, 0, 0) content_layout.setSpacing(20) content_layout.addWidget(header_label, 0) + content_layout.addWidget(txt_filter, 0) content_layout.addWidget(projects_view, 1) content_layout.addLayout(btns_layout, 0) @@ -79,11 +88,15 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): projects_view.doubleClicked.connect(self._on_double_click) confirm_btn.clicked.connect(self._on_confirm_click) cancel_btn.clicked.connect(self._on_cancel_click) + txt_filter.textChanged.connect( + lambda: projects_proxy.setFilterRegularExpression( + txt_filter.text())) self._projects_view = projects_view self._projects_model = projects_model self._cancel_btn = cancel_btn self._confirm_btn = confirm_btn + self._txt_filter = txt_filter self._publisher_window = publisher_window self._project_name = None diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 1faccef4dd..817d9c0944 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -356,10 +356,12 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): def filterAcceptsRow(self, source_row, source_parent): index = self.sourceModel().index(source_row, 0, source_parent) + string_pattern = self.filterRegularExpression().pattern() if self._filter_enabled: result = self._custom_index_filter(index) if result is not None: - return result + project_name = index.data(PROJECT_NAME_ROLE) + return string_pattern in project_name return super(ProjectSortFilterProxy, self).filterAcceptsRow( source_row, source_parent From ef33cda784b591e1faa899b9c4db9994146222fe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 16 Sep 2022 12:46:06 +0200 Subject: [PATCH 1195/2550] OP-3952 - used PlaceholderLineEdit Changed lambda to separate method as lamba is supposed to have some issue during QtDestroy --- openpype/tools/traypublisher/window.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index d161afd37b..6c17c66016 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -20,6 +20,7 @@ from openpype.tools.utils.models import ( ProjectModel, ProjectSortFilterProxy ) +from openpype.tools.utils import PlaceholderLineEdit class StandaloneOverlayWidget(QtWidgets.QFrame): @@ -61,7 +62,7 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): btns_layout.addWidget(cancel_btn, 0) btns_layout.addWidget(confirm_btn, 0) - txt_filter = QtWidgets.QLineEdit() + txt_filter = PlaceholderLineEdit(content_widget) txt_filter.setPlaceholderText("Quick filter projects..") txt_filter.setClearButtonEnabled(True) txt_filter.addAction(qtawesome.icon("fa.filter", color="gray"), @@ -88,12 +89,11 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): projects_view.doubleClicked.connect(self._on_double_click) confirm_btn.clicked.connect(self._on_confirm_click) cancel_btn.clicked.connect(self._on_cancel_click) - txt_filter.textChanged.connect( - lambda: projects_proxy.setFilterRegularExpression( - txt_filter.text())) + txt_filter.textChanged.connect(self._on_text_changed) self._projects_view = projects_view self._projects_model = projects_model + self._projects_proxy = projects_proxy self._cancel_btn = cancel_btn self._confirm_btn = confirm_btn self._txt_filter = txt_filter @@ -115,6 +115,10 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): def _on_cancel_click(self): self._set_project(self._project_name) + def _on_text_changed(self): + self._projects_proxy.setFilterRegularExpression( + self._txt_filter.text()) + def set_selected_project(self): index = self._projects_view.currentIndex() From eafae24e239f4130e3f6c4069b0d085218757614 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 16 Sep 2022 13:53:28 +0200 Subject: [PATCH 1196/2550] copy of workfile does not use 'copy' function but 'copyfile' --- openpype/tools/workfiles/files_widget.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 7377d10171..b7d31e4af4 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -578,7 +578,7 @@ class FilesWidget(QtWidgets.QWidget): src = self._get_selected_filepath() dst = os.path.join(self._workfiles_root, work_file) - shutil.copy(src, dst) + shutil.copyfile(src, dst) self.workfile_created.emit(dst) @@ -675,7 +675,7 @@ class FilesWidget(QtWidgets.QWidget): else: self.host.save_file(filepath) else: - shutil.copy(src_path, filepath) + shutil.copyfile(src_path, filepath) if isinstance(self.host, IWorkfileHost): self.host.open_workfile(filepath) else: From 210a3e05525913f7439b805c9671e38b7bae40db Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 16 Sep 2022 14:19:07 +0200 Subject: [PATCH 1197/2550] OP-3953 - added persisting of last selected project in Tray Publisher Last selected project is stored in .json in app folder - eg. next to unzipped version zip. --- openpype/tools/traypublisher/window.py | 32 ++++++++++++++++++++++++++ openpype/tools/utils/models.py | 13 +++++++++++ 2 files changed, 45 insertions(+) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index cc33287091..2d32b5d6bf 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -19,6 +19,25 @@ from openpype.tools.utils.models import ( ProjectModel, ProjectSortFilterProxy ) +import appdirs +from openpype.lib import JSONSettingRegistry + + +class TrayPublisherRegistry(JSONSettingRegistry): + """Class handling OpenPype general settings registry. + + Attributes: + vendor (str): Name used for path construction. + product (str): Additional name used for path construction. + + """ + + def __init__(self): + self.vendor = "pypeclub" + self.product = "openpype" + name = "tray_publisher" + path = appdirs.user_data_dir(self.product, self.vendor) + super(TrayPublisherRegistry, self).__init__(name, path) class StandaloneOverlayWidget(QtWidgets.QFrame): @@ -90,6 +109,16 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): def showEvent(self, event): self._projects_model.refresh() + + setting_registry = TrayPublisherRegistry() + project_name = setting_registry.get_item("project_name") + if project_name: + index = self._projects_model.get_index(project_name) + if index: + mode = QtCore.QItemSelectionModel.Select | \ + QtCore.QItemSelectionModel.Rows + self._projects_view.selectionModel().select(index, mode) + self._cancel_btn.setVisible(self._project_name is not None) super(StandaloneOverlayWidget, self).showEvent(event) @@ -119,6 +148,9 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): self.setVisible(False) self.project_selected.emit(project_name) + setting_registry = TrayPublisherRegistry() + setting_registry.set_item("project_name", project_name) + class TrayPublishWindow(PublisherWindow): def __init__(self, *args, **kwargs): diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 1faccef4dd..2d917fcc49 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -330,6 +330,19 @@ class ProjectModel(QtGui.QStandardItemModel): if new_items: root_item.appendRows(new_items) + def get_index(self, project_name): + """ + Get index of 'project_name' value. + + Args: + project_name (str): + Returns: + (QModelIndex) + """ + val = self._items_by_name.get(project_name) + if val: + return self.indexFromItem(val) + class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): def __init__(self, *args, **kwargs): From f24925dfd28fe0e053fcdb25239341d6056b863e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 16 Sep 2022 14:21:52 +0200 Subject: [PATCH 1198/2550] resaved default settings to add missing values --- .../defaults/project_settings/blender.json | 32 +++++++++---------- .../defaults/project_settings/houdini.json | 3 +- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/openpype/settings/defaults/project_settings/blender.json b/openpype/settings/defaults/project_settings/blender.json index 2720e0286d..7acecfaae0 100644 --- a/openpype/settings/defaults/project_settings/blender.json +++ b/openpype/settings/defaults/project_settings/blender.json @@ -36,35 +36,35 @@ "layout" ] }, - "ExtractBlendAnimation": { - "enabled": true, - "optional": true, - "active": true - }, - "ExtractCamera": { - "enabled": true, - "optional": true, - "active": true - }, "ExtractFBX": { "enabled": true, "optional": true, "active": false }, - "ExtractAnimationFBX": { - "enabled": true, - "optional": true, - "active": false - }, "ExtractABC": { "enabled": true, "optional": true, "active": false }, + "ExtractBlendAnimation": { + "enabled": true, + "optional": true, + "active": true + }, + "ExtractAnimationFBX": { + "enabled": true, + "optional": true, + "active": false + }, + "ExtractCamera": { + "enabled": true, + "optional": true, + "active": true + }, "ExtractLayout": { "enabled": true, "optional": true, "active": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index af0789ff8a..cdf829db57 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -6,7 +6,8 @@ "windows": "", "darwin": "", "linux": "" - } + }, + "shelf_definition": [] } ], "create": { From 7331fd20691ba2a7047d98e7baa0e7c04ce750e6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 16 Sep 2022 14:28:04 +0200 Subject: [PATCH 1199/2550] formatting change Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/tools/traypublisher/window.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 2d32b5d6bf..56c5594638 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -115,8 +115,9 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): if project_name: index = self._projects_model.get_index(project_name) if index: - mode = QtCore.QItemSelectionModel.Select | \ - QtCore.QItemSelectionModel.Rows + mode = ( + QtCore.QItemSelectionModel.Select + | QtCore.QItemSelectionModel.Rows) self._projects_view.selectionModel().select(index, mode) self._cancel_btn.setVisible(self._project_name is not None) From ccab10b0d3fecc4e32711e5f1a783de433b54ce8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 14:43:13 +0200 Subject: [PATCH 1200/2550] Do not enforce maya/ folder --- .../publish/validate_rendersettings.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 08ecc0d149..4a67cb73e4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -16,10 +16,10 @@ from openpype.hosts.maya.api import lib class ValidateRenderSettings(pyblish.api.InstancePlugin): """Validates the global render settings - * File Name Prefix must start with: `maya/` + * File Name Prefix must start with: `` all other token are customizable but sane values for Arnold are: - `maya///_` + `//_` token is supported also, useful for multiple renderable cameras per render layer. @@ -58,12 +58,12 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): } ImagePrefixTokens = { - 'mentalray': 'maya///{aov_separator}', # noqa: E501 - 'arnold': 'maya///{aov_separator}', # noqa: E501 - 'redshift': 'maya///', - 'vray': 'maya///', + 'mentalray': '//{aov_separator}', # noqa: E501 + 'arnold': '//{aov_separator}', # noqa: E501 + 'redshift': '//', + 'vray': '//', 'renderman': '{aov_separator}..', - 'mayahardware2': 'maya///', + 'mayahardware2': '//', } _aov_chars = { @@ -74,7 +74,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): redshift_AOV_prefix = "/{aov_separator}" # noqa: E501 - renderman_dir_prefix = "maya//" + renderman_dir_prefix = "/" R_AOV_TOKEN = re.compile( r'%a||', re.IGNORECASE) @@ -84,8 +84,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): R_SCENE_TOKEN = re.compile(r'%s|', re.IGNORECASE) DEFAULT_PADDING = 4 - VRAY_PREFIX = "maya///" - DEFAULT_PREFIX = "maya///_" + VRAY_PREFIX = "//" + DEFAULT_PREFIX = "//_" def process(self, instance): @@ -116,7 +116,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): prefix = prefix.replace( "{aov_separator}", instance.data.get("aovSeparator", "_")) - required_prefix = "maya/" + required_prefix = "" default_prefix = cls.ImagePrefixTokens[renderer] if not anim_override: From 59cfca7508f493bb7afec85ded74bd8abd44e2e0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 14:44:19 +0200 Subject: [PATCH 1201/2550] Do not enforce maya/ folder --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 7cd2193086..21dd7f00c8 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -28,7 +28,7 @@ class RenderSettings(object): _image_prefixes = { 'vray': get_current_project_settings()["maya"]["RenderSettings"]["vray_renderer"]["image_prefix"], # noqa 'arnold': get_current_project_settings()["maya"]["RenderSettings"]["arnold_renderer"]["image_prefix"], # noqa - 'renderman': 'maya///{aov_separator}', + 'renderman': '//{aov_separator}', 'redshift': get_current_project_settings()["maya"]["RenderSettings"]["redshift_renderer"]["image_prefix"] # noqa } From 47d7b3044bff4827356ddde4f4c257ac87a9287c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 14:46:45 +0200 Subject: [PATCH 1202/2550] Remove maya/ from file prefixes in setting defaults --- openpype/settings/defaults/project_settings/maya.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 8643297f02..716e45a6e2 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -34,12 +34,12 @@ }, "RenderSettings": { "apply_render_settings": true, - "default_render_image_folder": "renders", + "default_render_image_folder": "renders/maya", "enable_all_lights": false, "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { - "image_prefix": "maya///_", + "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, "tiled": true, @@ -47,14 +47,14 @@ "additional_options": [] }, "vray_renderer": { - "image_prefix": "maya///", + "image_prefix": "//", "engine": "1", "image_format": "png", "aov_list": [], "additional_options": [] }, "redshift_renderer": { - "image_prefix": "maya///", + "image_prefix": "//", "primary_gi_engine": "0", "secondary_gi_engine": "0", "image_format": "iff", From ea5ae50982cc867f24c253e8c5dcb1df1612d30f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 14:47:28 +0200 Subject: [PATCH 1203/2550] Add maya/ folder by default into "images" file rule in workspace --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 716e45a6e2..a62d356162 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1,5 +1,5 @@ { - "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders/maya\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "ext_mapping": { "model": "ma", "mayaAscii": "ma", From b4b62ce7ba793dcd812ea6cc40adcdb79b687aac Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 14:48:56 +0200 Subject: [PATCH 1204/2550] Fix remainder of hardcoded maya/ folders in file prefixes --- openpype/hosts/maya/api/lib_renderproducts.py | 2 +- openpype/hosts/maya/plugins/publish/submit_maya_muster.py | 2 +- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 1e883ea43f..1ab771cfe6 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -80,7 +80,7 @@ IMAGE_PREFIXES = { "mayahardware2": "defaultRenderGlobals.imageFilePrefix" } -RENDERMAN_IMAGE_DIR = "maya//" +RENDERMAN_IMAGE_DIR = "/" def has_tokens(string, tokens): diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index c4250a20bd..01008b7756 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -118,7 +118,7 @@ def preview_fname(folder, scene, layer, padding, ext): """ # Following hardcoded "/_/" - output = "maya/{scene}/{layer}/{layer}.{number}.{ext}".format( + output = "{scene}/{layer}/{layer}.{number}.{ext}".format( scene=scene, layer=layer, number="#" * padding, diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 44f2b5b2b4..3e3e5c5b16 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -729,10 +729,10 @@ def _format_tiles( Example:: Image prefix is: - `maya///_` + `//_` Result for tile 0 for 4x4 will be: - `maya///_tile_1x1_4x4__` + `//_tile_1x1_4x4__` Calculating coordinates is tricky as in Job they are defined as top, left, bottom, right with zero being in top-left corner. But Assembler From 563515c0e63f41dbb2d2c01a7518017bf88e7602 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 16 Sep 2022 20:56:12 +0800 Subject: [PATCH 1205/2550] remove lockfile during publish --- openpype/hosts/maya/plugins/publish/save_scene.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/save_scene.py b/openpype/hosts/maya/plugins/publish/save_scene.py index 50a2f2112a..5a317f4b53 100644 --- a/openpype/hosts/maya/plugins/publish/save_scene.py +++ b/openpype/hosts/maya/plugins/publish/save_scene.py @@ -1,5 +1,9 @@ import pyblish.api - +from openpype.pipeline.workfile.lock_workfile import( + is_workfile_lock_enabled, + remove_workfile_lock +) +from openpype.pipeline import legacy_io class SaveCurrentScene(pyblish.api.ContextPlugin): """Save current scene @@ -23,5 +27,8 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): "are no modifications..") return + active_project = legacy_io.active_project() + if is_workfile_lock_enabled("maya", active_project): + remove_workfile_lock(current) self.log.info("Saving current file..") cmds.file(save=True, force=True) From c2d9ef859e7fbaa697e32ef869c69365e06b9e85 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 16 Sep 2022 21:00:03 +0800 Subject: [PATCH 1206/2550] remove lockfile during publish --- openpype/hosts/maya/plugins/publish/save_scene.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/save_scene.py b/openpype/hosts/maya/plugins/publish/save_scene.py index 5a317f4b53..99d486e545 100644 --- a/openpype/hosts/maya/plugins/publish/save_scene.py +++ b/openpype/hosts/maya/plugins/publish/save_scene.py @@ -1,4 +1,5 @@ import pyblish.api + from openpype.pipeline.workfile.lock_workfile import( is_workfile_lock_enabled, remove_workfile_lock @@ -28,6 +29,7 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): return active_project = legacy_io.active_project() + # remove lockfile before saving if is_workfile_lock_enabled("maya", active_project): remove_workfile_lock(current) self.log.info("Saving current file..") From 1049f22c4d5f43de8a5c81363ecf77ed10728baf Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 16 Sep 2022 21:01:45 +0800 Subject: [PATCH 1207/2550] remove lockfile during publish --- openpype/hosts/maya/plugins/publish/save_scene.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/save_scene.py b/openpype/hosts/maya/plugins/publish/save_scene.py index 99d486e545..33a297889a 100644 --- a/openpype/hosts/maya/plugins/publish/save_scene.py +++ b/openpype/hosts/maya/plugins/publish/save_scene.py @@ -1,6 +1,5 @@ import pyblish.api - -from openpype.pipeline.workfile.lock_workfile import( +from openpype.pipeline.workfile.lock_workfile import ( is_workfile_lock_enabled, remove_workfile_lock ) From 8fab407da2e203d8815d8c2005cd84285f316737 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 16 Sep 2022 21:12:22 +0800 Subject: [PATCH 1208/2550] remove lockfile during publish --- openpype/hosts/maya/plugins/publish/save_scene.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/save_scene.py b/openpype/hosts/maya/plugins/publish/save_scene.py index 33a297889a..15472dba96 100644 --- a/openpype/hosts/maya/plugins/publish/save_scene.py +++ b/openpype/hosts/maya/plugins/publish/save_scene.py @@ -26,10 +26,10 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): self.log.debug("Skipping file save as there " "are no modifications..") return - - active_project = legacy_io.active_project() + project_name = context.data["projectName"] + project_settings = context.data["project_settings"] # remove lockfile before saving - if is_workfile_lock_enabled("maya", active_project): + if is_workfile_lock_enabled("maya", project_name, project_settings): remove_workfile_lock(current) self.log.info("Saving current file..") cmds.file(save=True, force=True) From 42ad74b398c21aee19f5fa4d7f7710aaf4c4673d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 16 Sep 2022 21:13:14 +0800 Subject: [PATCH 1209/2550] remove lockfile during publish --- openpype/hosts/maya/plugins/publish/save_scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/save_scene.py b/openpype/hosts/maya/plugins/publish/save_scene.py index 15472dba96..45e62e7b44 100644 --- a/openpype/hosts/maya/plugins/publish/save_scene.py +++ b/openpype/hosts/maya/plugins/publish/save_scene.py @@ -3,7 +3,7 @@ from openpype.pipeline.workfile.lock_workfile import ( is_workfile_lock_enabled, remove_workfile_lock ) -from openpype.pipeline import legacy_io + class SaveCurrentScene(pyblish.api.ContextPlugin): """Save current scene From 508b17963d09bda307051d2483fd83753ca080b1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 15:41:16 +0200 Subject: [PATCH 1210/2550] Move Fusion OCIO settings out of anatomy into project settings --- .../defaults/project_anatomy/imageio.json | 10 ----- .../defaults/project_settings/fusion.json | 12 ++++++ .../schemas/projects_schema/schema_main.json | 4 ++ .../schema_project_fusion.json | 38 +++++++++++++++++++ .../schemas/schema_anatomy_imageio.json | 29 -------------- 5 files changed, 54 insertions(+), 39 deletions(-) create mode 100644 openpype/settings/defaults/project_settings/fusion.json create mode 100644 openpype/settings/entities/schemas/projects_schema/schema_project_fusion.json diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index 9b5e8639b1..f0be8f95f4 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -236,16 +236,6 @@ "viewTransform": "sRGB gamma" } }, - "fusion": { - "ocio": { - "enabled": false, - "configFilePath": { - "windows": [], - "darwin": [], - "linux": [] - } - } - }, "flame": { "project": { "colourPolicy": "ACES 1.1", diff --git a/openpype/settings/defaults/project_settings/fusion.json b/openpype/settings/defaults/project_settings/fusion.json new file mode 100644 index 0000000000..1b4c4c55b5 --- /dev/null +++ b/openpype/settings/defaults/project_settings/fusion.json @@ -0,0 +1,12 @@ +{ + "imageio": { + "ocio": { + "enabled": false, + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + } + } + } +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 80b1baad1b..0b9fbf7470 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -90,6 +90,10 @@ "type": "schema", "name": "schema_project_nuke" }, + { + "type": "schema", + "name": "schema_project_fusion" + }, { "type": "schema", "name": "schema_project_hiero" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_fusion.json b/openpype/settings/entities/schemas/projects_schema/schema_project_fusion.json new file mode 100644 index 0000000000..8f98a8173f --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_fusion.json @@ -0,0 +1,38 @@ +{ + "type": "dict", + "collapsible": true, + "key": "fusion", + "label": "Fusion", + "is_file": true, + "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "collapsible": true, + "children": [ + { + "key": "ocio", + "type": "dict", + "label": "OpenColorIO (OCIO)", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Set OCIO variable for Fusion" + }, + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + } + ] + } + ] + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 644463fece..ef8c907dda 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -427,35 +427,6 @@ } ] }, - - { - "key": "fusion", - "type": "dict", - "label": "Fusion", - "children": [ - { - "key": "ocio", - "type": "dict", - "label": "OCIO", - "collapsible": true, - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Set OCIO variable for Fusion" - }, - { - "type": "path", - "key": "configFilePath", - "label": "OCIO Config File Path", - "multiplatform": true, - "multipath": true - } - ] - } - ] - }, { "key": "flame", "type": "dict", From 7aa905898e6726e910c05be8db6514e5e656b8c1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 16 Sep 2022 15:43:11 +0200 Subject: [PATCH 1211/2550] Use new settings location --- openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py index f7c7bc0b4c..12fc640f5c 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py @@ -12,10 +12,10 @@ class FusionPreLaunchOCIO(PreLaunchHook): """Hook entry method.""" # get image io - project_anatomy = self.data["anatomy"] + project_settings = self.data["project_settings"] # make sure anatomy settings are having flame key - imageio_fusion = project_anatomy["imageio"].get("fusion") + imageio_fusion = project_settings.get("fusion", {}).get("imageio") if not imageio_fusion: raise ApplicationLaunchFailed(( "Anatomy project settings are missing `fusion` key. " From 7b8946e1298f8ec983d3247869e9ca7d3f3fbb9e Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 16 Sep 2022 17:51:53 +0200 Subject: [PATCH 1212/2550] get resolution from project --- .../modules/kitsu/utils/update_op_with_zou.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 4a064f6a16..4cd3ae957f 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -18,7 +18,7 @@ from openpype.client import ( create_project, ) from openpype.pipeline import AvalonMongoDB -from openpype.settings import get_project_settings +from openpype.settings import get_project_settings, get_anatomy_settings from openpype.modules.kitsu.utils.credentials import validate_credentials @@ -82,7 +82,7 @@ def update_op_assets( List[Dict[str, dict]]: List of (doc_id, update_dict) tuples """ project_name = project_doc["name"] - project_module_settings = get_project_settings(project_name)["kitsu"] + # project_module_settings = get_project_settings(project_name)["kitsu"] assets_with_update = [] for item in entities_list: @@ -230,7 +230,6 @@ def update_op_assets( }, ) ) - return assets_with_update @@ -263,13 +262,21 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: # Update Zou gazu.project.update_project(project) + project_attributes = get_anatomy_settings(project_name)['attributes'] + if "x" in project["resolution"]: + resolutionWidth = int(project["resolution"].split("x")[0]) + resolutionHeight = int(project["resolution"].split("x")[1]) + else: + resolutionWidth = project_attributes['resolutionWidth'] + resolutionHeight = project_attributes['resolutionHeight'] + # Update data project_data.update( { "code": project_code, "fps": float(project["fps"]), - "resolutionWidth": int(project["resolution"].split("x")[0]), - "resolutionHeight": int(project["resolution"].split("x")[1]), + "resolutionWidth": resolutionWidth, + "resolutionHeight": resolutionHeight, "zou_id": project["id"], } ) From 3161d3d8debb64bdd3c5705d7d4ab0c0c0a70cdc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 16 Sep 2022 19:13:31 +0200 Subject: [PATCH 1213/2550] use explicit float conversions for decimal calculations --- openpype/widgets/nice_checkbox.py | 48 ++++++++++++++++--------------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/openpype/widgets/nice_checkbox.py b/openpype/widgets/nice_checkbox.py index ccd079c0fb..56e6d2ac24 100644 --- a/openpype/widgets/nice_checkbox.py +++ b/openpype/widgets/nice_checkbox.py @@ -111,14 +111,14 @@ class NiceCheckbox(QtWidgets.QFrame): return QtCore.QSize(width, height) def get_width_hint_by_height(self, height): - return ( - height / self._base_size.height() - ) * self._base_size.width() + return int(( + float(height) / self._base_size.height() + ) * self._base_size.width()) def get_height_hint_by_width(self, width): - return ( - width / self._base_size.width() - ) * self._base_size.height() + return int(( + float(width) / self._base_size.width() + ) * self._base_size.height()) def setFixedHeight(self, *args, **kwargs): self._fixed_height_set = True @@ -321,7 +321,7 @@ class NiceCheckbox(QtWidgets.QFrame): bg_color = self.unchecked_bg_color else: - offset_ratio = self._current_step / self._steps + offset_ratio = float(self._current_step) / self._steps # Animation bg bg_color = self.steped_color( self.checked_bg_color, @@ -332,7 +332,8 @@ class NiceCheckbox(QtWidgets.QFrame): margins_ratio = self._checker_margins_divider if margins_ratio > 0: size_without_margins = int( - (frame_rect.height() / margins_ratio) * (margins_ratio - 2) + (float(frame_rect.height()) / margins_ratio) + * (margins_ratio - 2) ) size_without_margins -= size_without_margins % 2 margin_size_c = ceil( @@ -434,21 +435,21 @@ class NiceCheckbox(QtWidgets.QFrame): def _get_enabled_icon_path( self, painter, checker_rect, step=None, half_steps=None ): - fifteenth = checker_rect.height() / 15 + fifteenth = float(checker_rect.height()) / 15 # Left point p1 = QtCore.QPoint( - checker_rect.x() + (5 * fifteenth), - checker_rect.y() + (9 * fifteenth) + int(checker_rect.x() + (5 * fifteenth)), + int(checker_rect.y() + (9 * fifteenth)) ) # Middle bottom point p2 = QtCore.QPoint( checker_rect.center().x(), - checker_rect.y() + (11 * fifteenth) + int(checker_rect.y() + (11 * fifteenth)) ) # Top right point p3 = QtCore.QPoint( - checker_rect.x() + (10 * fifteenth), - checker_rect.y() + (5 * fifteenth) + int(checker_rect.x() + (10 * fifteenth)), + int(checker_rect.y() + (5 * fifteenth)) ) if step is not None: multiplier = (half_steps - step) @@ -458,16 +459,16 @@ class NiceCheckbox(QtWidgets.QFrame): p3c = p3 - checker_rect.center() p1o = QtCore.QPoint( - (p1c.x() / half_steps) * multiplier, - (p1c.y() / half_steps) * multiplier + int((float(p1c.x()) / half_steps) * multiplier), + int((float(p1c.y()) / half_steps) * multiplier) ) p2o = QtCore.QPoint( - (p2c.x() / half_steps) * multiplier, - (p2c.y() / half_steps) * multiplier + int((float(p2c.x()) / half_steps) * multiplier), + int((float(p2c.y()) / half_steps) * multiplier) ) p3o = QtCore.QPoint( - (p3c.x() / half_steps) * multiplier, - (p3c.y() / half_steps) * multiplier + int((float(p3c.x()) / half_steps) * multiplier), + int((float(p3c.y()) / half_steps) * multiplier) ) p1 -= p1o @@ -484,11 +485,12 @@ class NiceCheckbox(QtWidgets.QFrame): self, painter, checker_rect, step=None, half_steps=None ): center_point = QtCore.QPointF( - checker_rect.width() / 2, checker_rect.height() / 2 + float(checker_rect.width()) / 2, + float(checker_rect.height()) / 2 ) - offset = ( + offset = float(( (center_point + QtCore.QPointF(0, 0)) / 2 - ).x() / 4 * 5 + ).x()) / 4 * 5 if step is not None: diff = center_point.x() - offset diff_offset = (diff / half_steps) * (half_steps - step) From d1cc57b2e5aff77147ad8d9ba54e7bb3d4fe4b64 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 17 Sep 2022 04:11:42 +0000 Subject: [PATCH 1214/2550] [Automated] Bump version --- CHANGELOG.md | 45 +++++++++++++++++++++++---------------------- openpype/version.py | 2 +- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7d6b620d58..af347cadfe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,18 +1,39 @@ # Changelog -## [3.14.3-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.3-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...HEAD) **🚀 Enhancements** +- Github issues adding `running version` section [\#3864](https://github.com/pypeclub/OpenPype/pull/3864) +- Publisher: Increase size of main window [\#3862](https://github.com/pypeclub/OpenPype/pull/3862) +- Houdini: Increment current file on workfile publish [\#3840](https://github.com/pypeclub/OpenPype/pull/3840) - Publisher: Add new publisher to host tools [\#3833](https://github.com/pypeclub/OpenPype/pull/3833) +- General: lock task workfiles when they are working on [\#3810](https://github.com/pypeclub/OpenPype/pull/3810) - Maya: Workspace mel loaded from settings [\#3790](https://github.com/pypeclub/OpenPype/pull/3790) **🐛 Bug fixes** +- Settings: Add missing default settings [\#3870](https://github.com/pypeclub/OpenPype/pull/3870) +- General: Copy of workfile does not use 'copy' function but 'copyfile' [\#3869](https://github.com/pypeclub/OpenPype/pull/3869) +- Tray Publisher: skip plugin if otioTimeline is missing [\#3856](https://github.com/pypeclub/OpenPype/pull/3856) +- Maya: Extract Playblast fix textures + labelize viewport show settings [\#3852](https://github.com/pypeclub/OpenPype/pull/3852) - Ftrack: Url validation does not require ftrackapp [\#3834](https://github.com/pypeclub/OpenPype/pull/3834) - Maya+Ftrack: Change typo in family name `mayaascii` -\> `mayaAscii` [\#3820](https://github.com/pypeclub/OpenPype/pull/3820) +- Maya Deadline: Fix Tile Rendering by forcing integer pixel values [\#3758](https://github.com/pypeclub/OpenPype/pull/3758) + +**🔀 Refactored code** + +- Hiero: Use new Extractor location [\#3851](https://github.com/pypeclub/OpenPype/pull/3851) +- Maya: Remove old legacy \(ftrack\) plug-ins that are of no use anymore [\#3819](https://github.com/pypeclub/OpenPype/pull/3819) +- Nuke: Use new Extractor location [\#3799](https://github.com/pypeclub/OpenPype/pull/3799) +- Maya: Use new Extractor location [\#3775](https://github.com/pypeclub/OpenPype/pull/3775) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) + +**Merged pull requests:** + +- Remove lockfile during publish [\#3874](https://github.com/pypeclub/OpenPype/pull/3874) ## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) @@ -21,7 +42,6 @@ **🆕 New features** - Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) -- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) **🚀 Enhancements** @@ -57,12 +77,10 @@ - General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) -- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) - General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) -- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** @@ -74,17 +92,10 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.1-nightly.4...3.14.1) -### 📖 Documentation - -- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) - **🚀 Enhancements** - General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) - Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) -- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) -- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) -- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) **🐛 Bug fixes** @@ -94,10 +105,6 @@ - Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) - Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) - Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) -- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) -- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) -- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) -- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) **🔀 Refactored code** @@ -106,6 +113,7 @@ - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) +- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) - AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) - Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) @@ -114,17 +122,10 @@ - Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) - General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) - General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) -- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713) -- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) -- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) -- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) -- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705) -- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702) **Merged pull requests:** - Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) -- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) diff --git a/openpype/version.py b/openpype/version.py index e8a65b04d2..a2335b696b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.3-nightly.1" +__version__ = "3.14.3-nightly.2" From ad4211656b7651b7eef42351aa13240add36a109 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 17 Sep 2022 10:52:36 +0200 Subject: [PATCH 1215/2550] Remove double setting of additional attributes for Arnold --- openpype/hosts/maya/api/lib_rendersettings.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 7cd2193086..67b66b8024 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -133,20 +133,7 @@ class RenderSettings(object): cmds.setAttr( "defaultArnoldDriver.mergeAOVs", multi_exr) - # Passes additional options in from the schema as a list - # but converts it to a dictionary because ftrack doesn't - # allow fullstops in custom attributes. Then checks for - # type of MtoA attribute passed to adjust the `setAttr` - # command accordingly. self._additional_attribs_setter(additional_options) - for item in additional_options: - attribute, value = item - if (cmds.getAttr(str(attribute), type=True)) == "long": - cmds.setAttr(str(attribute), int(value)) - elif (cmds.getAttr(str(attribute), type=True)) == "bool": - cmds.setAttr(str(attribute), int(value), type = "Boolean") # noqa - elif (cmds.getAttr(str(attribute), type=True)) == "string": - cmds.setAttr(str(attribute), str(value), type = "string") # noqa reset_frame_range() def _set_redshift_settings(self, width, height): From 2c8eaec2d7c93e53accfec87d828bf8648d38428 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 17 Sep 2022 11:04:12 +0200 Subject: [PATCH 1216/2550] Remove debug print statement --- openpype/hosts/maya/api/lib_rendersettings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 67b66b8024..a62145e921 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -217,7 +217,6 @@ class RenderSettings(object): cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) def _additional_attribs_setter(self, additional_attribs): - print(additional_attribs) for item in additional_attribs: attribute, value = item if (cmds.getAttr(str(attribute), type=True)) == "long": From 0c71ec1d3840db418a4077efa1eddc57436f4d27 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 17 Sep 2022 13:18:04 +0200 Subject: [PATCH 1217/2550] Tweak readability, log error on unsupported attribute type --- openpype/hosts/maya/api/lib_rendersettings.py | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index a62145e921..0618420b2b 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -5,6 +5,7 @@ import maya.mel as mel import six import sys +from openpype.lib import Logger from openpype.api import ( get_project_settings, get_current_project_settings @@ -38,6 +39,8 @@ class RenderSettings(object): "underscore": "_" } + log = Logger.get_logger("RenderSettings") + @classmethod def get_image_prefix_attr(cls, renderer): return cls._image_prefix_nodes[renderer] @@ -219,9 +222,16 @@ class RenderSettings(object): def _additional_attribs_setter(self, additional_attribs): for item in additional_attribs: attribute, value = item - if (cmds.getAttr(str(attribute), type=True)) == "long": - cmds.setAttr(str(attribute), int(value)) - elif (cmds.getAttr(str(attribute), type=True)) == "bool": - cmds.setAttr(str(attribute), int(value)) # noqa - elif (cmds.getAttr(str(attribute), type=True)) == "string": - cmds.setAttr(str(attribute), str(value), type = "string") # noqa + attribute = str(attribute) # ensure str conversion from settings + attribute_type = cmds.getAttr(attribute, type=True) + if attribute_type in {"long", "bool"}: + cmds.setAttr(attribute, int(value)) + elif attribute_type == "string": + cmds.setAttr(attribute, str(value), type="string") + else: + self.log.error( + "Attribute {attribute} can not be set due to unsupported " + "type: {attribute_type}".format( + attribute=attribute, + attribute_type=attribute_type) + ) From 83922a87cd08630b549cb6f0220aaacfcb5623ec Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 17 Sep 2022 13:19:37 +0200 Subject: [PATCH 1218/2550] Add double attribute support (float) --- openpype/hosts/maya/api/lib_rendersettings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 0618420b2b..777a6ffbc9 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -228,6 +228,8 @@ class RenderSettings(object): cmds.setAttr(attribute, int(value)) elif attribute_type == "string": cmds.setAttr(attribute, str(value), type="string") + elif attribute_type in {"double", "doubleAngle", "doubleLinear"}: + cmds.setAttr(attribute, float(value)) else: self.log.error( "Attribute {attribute} can not be set due to unsupported " From b36e1ded7d24c74b0601d0c65567f4674d6bf212 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 17 Sep 2022 13:29:38 +0200 Subject: [PATCH 1219/2550] Set default image format for V-Ray+Redshift to exr instead of png and iff --- openpype/settings/defaults/project_settings/maya.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 8643297f02..76ef0a7338 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -49,7 +49,7 @@ "vray_renderer": { "image_prefix": "maya///", "engine": "1", - "image_format": "png", + "image_format": "exr", "aov_list": [], "additional_options": [] }, @@ -57,7 +57,7 @@ "image_prefix": "maya///", "primary_gi_engine": "0", "secondary_gi_engine": "0", - "image_format": "iff", + "image_format": "exr", "multilayer_exr": true, "force_combine": true, "aov_list": [], From b48ad01e4c6ada98a96f048d93615fa072190b86 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 10:48:44 +0200 Subject: [PATCH 1220/2550] Changed function name Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/tools/utils/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 2d917fcc49..d072ff297d 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -330,7 +330,7 @@ class ProjectModel(QtGui.QStandardItemModel): if new_items: root_item.appendRows(new_items) - def get_index(self, project_name): + def find_project(self, project_name): """ Get index of 'project_name' value. From ae7f7ebabbd69352fab23f7bc9757c90c80881b5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 10:54:23 +0200 Subject: [PATCH 1221/2550] OP-3953 - added missing mapping to proxy model --- openpype/tools/traypublisher/window.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 56c5594638..0c99a55998 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -113,7 +113,10 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): setting_registry = TrayPublisherRegistry() project_name = setting_registry.get_item("project_name") if project_name: - index = self._projects_model.get_index(project_name) + index = None + src_index = self._projects_model.find_project(project_name) + if src_index is not None: + index = self._projects_proxy.mapFromSource(src_index) if index: mode = ( QtCore.QItemSelectionModel.Select From 94aa11f1167e90719ef9426bfae78492b7491366 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 10:57:47 +0200 Subject: [PATCH 1222/2550] OP-3953 - added missing proxy model variable --- openpype/tools/traypublisher/window.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 0c99a55998..ca8c0758d6 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -101,6 +101,7 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): self._projects_view = projects_view self._projects_model = projects_model + self._projects_proxy = projects_proxy self._cancel_btn = cancel_btn self._confirm_btn = confirm_btn From f711b529b9693d73d08d696acba077bf45ba3f77 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 11:01:52 +0200 Subject: [PATCH 1223/2550] OP-3952 - safer resolving of search pattern --- openpype/tools/utils/models.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 817d9c0944..6663ca1b0a 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -361,7 +361,9 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): result = self._custom_index_filter(index) if result is not None: project_name = index.data(PROJECT_NAME_ROLE) - return string_pattern in project_name + if project_name is None: + return result + return string_pattern.lower() in project_name.lower() return super(ProjectSortFilterProxy, self).filterAcceptsRow( source_row, source_parent From 31beb5b6b15a4c7b110c21ebafc71eb3163b004b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 11:03:15 +0200 Subject: [PATCH 1224/2550] OP-3952 - sort projects after refresh --- openpype/tools/traypublisher/window.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 6c17c66016..b134e8ab86 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -103,6 +103,8 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): def showEvent(self, event): self._projects_model.refresh() + # Sort projects after refresh + self._projects_proxy.sort(0) self._cancel_btn.setVisible(self._project_name is not None) super(StandaloneOverlayWidget, self).showEvent(event) From a5b2d8c6bd8a6c6ee6ed636deb3d823d9103aabb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 11:04:47 +0200 Subject: [PATCH 1225/2550] OP-3952 - set sorting as case insensitive --- openpype/tools/utils/models.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 6663ca1b0a..f31a56b2f4 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -335,6 +335,9 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): def __init__(self, *args, **kwargs): super(ProjectSortFilterProxy, self).__init__(*args, **kwargs) self._filter_enabled = True + # Disable case sensitivity + self.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) + self._filter_enabled = True def lessThan(self, left_index, right_index): if left_index.data(PROJECT_NAME_ROLE) is None: From 222c64b0c68f6dbde3ac8b6bdf33db51732449e4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 11:45:58 +0200 Subject: [PATCH 1226/2550] OP-3953 - handle missing registry json file --- openpype/tools/traypublisher/window.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index ca8c0758d6..f3ed01b151 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -112,7 +112,11 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): self._projects_model.refresh() setting_registry = TrayPublisherRegistry() - project_name = setting_registry.get_item("project_name") + try: + project_name = setting_registry.get_item("project_name") + except ValueError: + project_name = None + if project_name: index = None src_index = self._projects_model.find_project(project_name) From 091498e13017cc96d0638f34fb03b0f0caee59e8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 11:57:29 +0200 Subject: [PATCH 1227/2550] OP-3952 - remove duplicated field Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/tools/utils/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index f31a56b2f4..cff5238a69 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -337,7 +337,6 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): self._filter_enabled = True # Disable case sensitivity self.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) - self._filter_enabled = True def lessThan(self, left_index, right_index): if left_index.data(PROJECT_NAME_ROLE) is None: From 8dc0f6f011c17c455f62269768fe0c728f76631b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 19 Sep 2022 13:15:27 +0200 Subject: [PATCH 1228/2550] change return value of 'rename_filepaths_by_frame_start' when frame start is same as mark in --- openpype/hosts/tvpaint/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index c67ab1e4fb..bf47e725cb 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -648,7 +648,7 @@ def rename_filepaths_by_frame_start( """Change frames in filenames of finished images to new frame start.""" # Skip if source first frame is same as destination first frame if range_start == new_frame_start: - return + return {} # Calculate frame end new_frame_end = range_end + (new_frame_start - range_start) From 2dba044e48c5670fffe0562754dc108212d2e193 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 13:49:01 +0200 Subject: [PATCH 1229/2550] OP-3943 - added publish field to Settings for PS CollectReview Customer wants to configure if review should be published by default for Photoshop. --- .../defaults/project_settings/photoshop.json | 3 +++ .../projects_schema/schema_project_photoshop.json | 13 +++++++++++++ 2 files changed, 16 insertions(+) diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index 552c2c9cad..3477d185a6 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -15,6 +15,9 @@ "CollectInstances": { "flatten_subset_template": "" }, + "CollectReview": { + "publish": true + }, "ValidateContainers": { "enabled": true, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 7aa49c99a4..7294ba8608 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -131,6 +131,19 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectReview", + "label": "Collect Review", + "children": [ + { + "type": "boolean", + "key": "publish", + "label": "Publish review" + } + ] + }, { "type": "schema_template", "name": "template_publish_plugin", From 0d4549f7bf5af630eb1a6072471f0cd543e52ddb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 13:50:07 +0200 Subject: [PATCH 1230/2550] OP-3943 - added publish field to PS CollectReview Customer wants to configure if review should be published by default for Photoshop. Default could be set in Settings, artist might decide to override it for particular publish. --- openpype/hosts/photoshop/plugins/publish/collect_review.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 7f395b46d7..7e598a8250 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -25,6 +25,8 @@ class CollectReview(pyblish.api.ContextPlugin): hosts = ["photoshop"] order = pyblish.api.CollectorOrder + 0.1 + publish = True + def process(self, context): family = "review" subset = get_subset_name( @@ -45,5 +47,6 @@ class CollectReview(pyblish.api.ContextPlugin): "family": family, "families": [], "representations": [], - "asset": os.environ["AVALON_ASSET"] + "asset": os.environ["AVALON_ASSET"], + "publish": self.publish }) From bd3f6acb0275ee90b3107f8ccdf4b9e8a6e2770b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 14:35:44 +0200 Subject: [PATCH 1231/2550] Refactor `HOST_DIR` to `FUSION_HOST_DIR` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Jakub Ježek --- openpype/hosts/fusion/hooks/pre_fusion_setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index ec5889a88a..a0796de10a 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,6 +1,6 @@ import os from openpype.lib import PreLaunchHook, ApplicationLaunchFailed -from openpype.hosts.fusion import HOST_DIR +from openpype.hosts.fusion import FUSION_HOST_DIR class FusionPrelaunch(PreLaunchHook): @@ -42,8 +42,8 @@ class FusionPrelaunch(PreLaunchHook): # Add our Fusion Master Prefs which is the only way to customize # Fusion to define where it can read custom scripts and tools from - self.log.info(f"Setting OPENPYPE_FUSION: {HOST_DIR}") - self.launch_context.env["OPENPYPE_FUSION"] = HOST_DIR + self.log.info(f"Setting OPENPYPE_FUSION: {FUSION_HOST_DIR}") + self.launch_context.env["OPENPYPE_FUSION"] = FUSION_HOST_DIR pref_var = "FUSION16_MasterPrefs" # used by both Fu16 and Fu17 prefs = os.path.join(HOST_DIR, "deploy", "fusion_shared.prefs") From daf29122e6f7be1b6cc4d3171423dc61764d726d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 14:36:36 +0200 Subject: [PATCH 1232/2550] Refactor `HOST_DIR` to `FUSION_HOST_DIR` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Jakub Ježek --- openpype/hosts/fusion/hooks/pre_fusion_setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index a0796de10a..0ba7e92bb1 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -46,6 +46,6 @@ class FusionPrelaunch(PreLaunchHook): self.launch_context.env["OPENPYPE_FUSION"] = FUSION_HOST_DIR pref_var = "FUSION16_MasterPrefs" # used by both Fu16 and Fu17 - prefs = os.path.join(HOST_DIR, "deploy", "fusion_shared.prefs") + prefs = os.path.join(FUSION_HOST_DIR, "deploy", "fusion_shared.prefs") self.log.info(f"Setting {pref_var}: {prefs}") self.launch_context.env[pref_var] = prefs From 5694cff114a3f1f3a42461d72bb03cff9521bb58 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 14:47:45 +0200 Subject: [PATCH 1233/2550] Use `FUSION_PYTHON3_HOME` instead of `FUSION16_PYTHON36_HOME` as input variable --- .../hosts/fusion/hooks/pre_fusion_setup.py | 56 +++++++++++-------- 1 file changed, 33 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index 0ba7e92bb1..d043d54322 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -4,48 +4,58 @@ from openpype.hosts.fusion import FUSION_HOST_DIR class FusionPrelaunch(PreLaunchHook): - """ - This hook will check if current workfile path has Fusion - project inside. + """Prepares OpenPype Fusion environment + + Requires FUSION_PYTHON3_HOME to be defined in the environment for Fusion + to point at a valid Python 3 build for Fusion. That is Python 3.3-3.10 + for Fusion 18 and Fusion 3.6 for Fusion 16 and 17. + + This also sets FUSION16_MasterPrefs to apply the fusion master prefs + as set in openpype/hosts/fusion/deploy/fusion_shared.prefs to enable + the OpenPype menu and force Python 3 over Python 2. + """ app_groups = ["fusion"] def execute(self): - # making sure python 3.6 is installed at provided path - py36_var = "FUSION16_PYTHON36_HOME" - fusion_python36_home = self.launch_context.env.get(py36_var, "") + # making sure python 3 is installed at provided path + # Py 3.3-3.10 for Fusion 18+ or Py 3.6 for Fu 16-17 + py3_var = "FUSION_PYTHON3_HOME" + fusion_python3_home = self.launch_context.env.get(py3_var, "") - self.log.info(f"Looking for Python 3.6 in: {fusion_python36_home}") - for path in fusion_python36_home.split(os.pathsep): + self.log.info(f"Looking for Python 3 in: {fusion_python3_home}") + for path in fusion_python3_home.split(os.pathsep): # Allow defining multiple paths to allow "fallback" to other # path. But make to set only a single path as final variable. - py36_dir = os.path.normpath(path) - if os.path.isdir(py36_dir): + py3_dir = os.path.normpath(path) + if os.path.isdir(py3_dir): break else: raise ApplicationLaunchFailed( - "Python 3.6 is not installed at the provided path.\n" - "Either make sure the environments in fusion settings has" - " 'PYTHON36' set corectly or make sure Python 3.6 is installed" - f" in the given path.\n\nPYTHON36: {fusion_python36_home}" + "Python 3 is not installed at the provided path.\n" + "Make sure the environment in fusion settings has " + "'FUSION_PYTHON3_HOME' set correctly and make sure " + "Python 3 is installed in the given path." + f"\n\nPYTHON36: {fusion_python3_home}" ) - self.log.info(f"Setting {py36_var}: '{py36_dir}'...") - self.launch_context.env[py36_var] = py36_dir + self.log.info(f"Setting {py3_var}: '{py3_dir}'...") + self.launch_context.env[py3_var] = py3_dir - # TODO: Set this for EITHER Fu16-17 OR Fu18+, don't do both - # Fusion 18+ does not look in FUSION16_PYTHON36_HOME anymore - # but instead uses FUSION_PYTHON3_HOME and requires the Python to - # be available on PATH to work. So let's enforce that for now. - self.launch_context.env["FUSION_PYTHON3_HOME"] = py36_dir - self.launch_context.env["PATH"] += ";" + py36_dir + # Fusion 18+ requires FUSION_PYTHON3_HOME to also be on PATH + self.launch_context.env["PATH"] += ";" + py3_dir + + # Fusion 16 and 17 use FUSION16_PYTHON36_HOME instead of + # FUSION_PYTHON3_HOME and will only work with a Python 3.6 version + # TODO: Detect Fusion version to only set for specific Fusion build + self.launch_context.env["FUSION16_PYTHON36_HOME"] = py3_dir # Add our Fusion Master Prefs which is the only way to customize # Fusion to define where it can read custom scripts and tools from self.log.info(f"Setting OPENPYPE_FUSION: {FUSION_HOST_DIR}") self.launch_context.env["OPENPYPE_FUSION"] = FUSION_HOST_DIR - pref_var = "FUSION16_MasterPrefs" # used by both Fu16 and Fu17 + pref_var = "FUSION16_MasterPrefs" # used by Fusion 16, 17 and 18 prefs = os.path.join(FUSION_HOST_DIR, "deploy", "fusion_shared.prefs") self.log.info(f"Setting {pref_var}: {prefs}") self.launch_context.env[pref_var] = prefs From c965b35549ebcde4b88380f1320376f52781a946 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 15:55:10 +0200 Subject: [PATCH 1234/2550] Fix typo --- openpype/hosts/fusion/api/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index dcf205ff6a..314acb7e78 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -70,8 +70,8 @@ def set_framerange(): asset_doc = get_current_project_asset() start = asset_doc["data"]["frameStart"] end = asset_doc["data"]["frameEnd"] - handle_start = asset_doc["data"]["handleStart"], - handle_end = asset_doc["data"]["handleEnd"], + handle_start = asset_doc["data"]["handleStart"] + handle_end = asset_doc["data"]["handleEnd"] update_frame_range(start, end, set_render_range=True, handle_start=handle_start, handle_end=handle_end) From 4154f76830f2bb96abccf8090ebf9b43c943ff6c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 18:03:06 +0200 Subject: [PATCH 1235/2550] Implement Set Asset Resolution --- openpype/hosts/fusion/api/lib.py | 15 +++++++++++++++ openpype/hosts/fusion/api/menu.py | 6 ++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 314acb7e78..b7e72e4c7a 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -77,6 +77,21 @@ def set_framerange(): handle_end=handle_end) +def set_resolution(): + """Set Comp's defaults""" + asset_doc = get_current_project_asset() + width = asset_doc["data"]["resolutionWidth"] + height = asset_doc["data"]["resolutionHeight"] + comp = get_current_comp() + + print("Setting comp frame format resolution to {}x{}".format(width, + height)) + comp.SetPrefs({ + "Comp.FrameFormat.Width": width, + "Comp.FrameFormat.Height": height, + }) + + def get_additional_data(container): """Get Fusion related data for the container diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index bba94053a2..84f680a918 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -9,8 +9,9 @@ from openpype.hosts.fusion.scripts import ( set_rendermode, duplicate_with_inputs ) -from openpype.hosts.fusion.api import ( - set_framerange +from openpype.hosts.fusion.api.lib import ( + set_framerange, + set_resolution ) from openpype.pipeline import legacy_io @@ -185,6 +186,7 @@ class OpenPypeMenu(QtWidgets.QWidget): def on_set_resolution_clicked(self): print("Clicked Reset Resolution") + set_resolution() def on_set_framerange_clicked(self): print("Clicked Reset Framerange") From 830ccf0438947f6c2891611293e0f26b76874411 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 18:04:58 +0200 Subject: [PATCH 1236/2550] Clarify function names --- openpype/hosts/fusion/api/__init__.py | 4 ++-- openpype/hosts/fusion/api/lib.py | 4 ++-- openpype/hosts/fusion/api/menu.py | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/fusion/api/__init__.py b/openpype/hosts/fusion/api/__init__.py index 78afabdb45..e5a7dac8c8 100644 --- a/openpype/hosts/fusion/api/__init__.py +++ b/openpype/hosts/fusion/api/__init__.py @@ -24,7 +24,7 @@ from .lib import ( maintained_selection, get_additional_data, update_frame_range, - set_framerange + set_asset_framerange ) from .menu import launch_openpype_menu @@ -54,7 +54,7 @@ __all__ = [ "maintained_selection", "get_additional_data", "update_frame_range", - "set_framerange", + "set_asset_framerange", # menu "launch_openpype_menu", diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index b7e72e4c7a..85afc11e1c 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -66,7 +66,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True, comp.SetAttrs(attrs) -def set_framerange(): +def set_asset_framerange(): asset_doc = get_current_project_asset() start = asset_doc["data"]["frameStart"] end = asset_doc["data"]["frameEnd"] @@ -77,7 +77,7 @@ def set_framerange(): handle_end=handle_end) -def set_resolution(): +def set_asset_resolution(): """Set Comp's defaults""" asset_doc = get_current_project_asset() width = asset_doc["data"]["resolutionWidth"] diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 84f680a918..4819fd6c7c 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -10,8 +10,8 @@ from openpype.hosts.fusion.scripts import ( duplicate_with_inputs ) from openpype.hosts.fusion.api.lib import ( - set_framerange, - set_resolution + set_asset_framerange, + set_asset_resolution ) from openpype.pipeline import legacy_io @@ -186,11 +186,11 @@ class OpenPypeMenu(QtWidgets.QWidget): def on_set_resolution_clicked(self): print("Clicked Reset Resolution") - set_resolution() + set_asset_resolution() def on_set_framerange_clicked(self): print("Clicked Reset Framerange") - set_framerange() + set_asset_framerange() def launch_openpype_menu(): From 863265fb41cbb7aec7111833658fd21c4f38af23 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 18:06:22 +0200 Subject: [PATCH 1237/2550] Set `OPENPYPE_LOG_NO_COLORS` in addon --- openpype/hosts/fusion/addon.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py index e257005061..1913cc2e30 100644 --- a/openpype/hosts/fusion/addon.py +++ b/openpype/hosts/fusion/addon.py @@ -19,5 +19,14 @@ class FusionAddon(OpenPypeModule, IHostAddon): os.path.join(FUSION_HOST_DIR, "hooks") ] + def add_implementation_envs(self, env, _app): + # Set default values if are not already set via settings + defaults = { + "OPENPYPE_LOG_NO_COLORS": "Yes" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + def get_workfile_extensions(self): return [".comp"] From df330e1002420a51b87117526f5c7a910f152279 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 18:07:24 +0200 Subject: [PATCH 1238/2550] Update defaults for Fusion environment --- .../system_settings/applications.json | 21 ++----------------- 1 file changed, 2 insertions(+), 19 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 4a3e0c1b94..c37c3d299e 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -706,28 +706,11 @@ "icon": "{}/app_icons/fusion.png", "host_name": "fusion", "environment": { - "FUSION_UTILITY_SCRIPTS_SOURCE_DIR": [], - "FUSION_UTILITY_SCRIPTS_DIR": { - "windows": "{PROGRAMDATA}/Blackmagic Design/Fusion/Scripts/Comp", - "darwin": "/Library/Application Support/Blackmagic Design/Fusion/Scripts/Comp", - "linux": "/opt/Fusion/Scripts/Comp" - }, - "PYTHON36": { + "FUSION_PYTHON3_HOME": { "windows": "{LOCALAPPDATA}/Programs/Python/Python36", "darwin": "~/Library/Python/3.6/bin", "linux": "/opt/Python/3.6/bin" - }, - "PYTHONPATH": [ - "{PYTHON36}/Lib/site-packages", - "{VIRTUAL_ENV}/Lib/site-packages", - "{PYTHONPATH}" - ], - "PATH": [ - "{PYTHON36}", - "{PYTHON36}/Scripts", - "{PATH}" - ], - "OPENPYPE_LOG_NO_COLORS": "Yes" + } }, "variants": { "18": { From c725c6affbcf474a7b9f9430e3d26933898ebb1d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 19 Sep 2022 18:12:41 +0200 Subject: [PATCH 1239/2550] Tweak docstrings --- openpype/hosts/fusion/api/lib.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 85afc11e1c..91a74ac848 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -67,6 +67,7 @@ def update_frame_range(start, end, comp=None, set_render_range=True, def set_asset_framerange(): + """Set Comp's frame range based on current asset""" asset_doc = get_current_project_asset() start = asset_doc["data"]["frameStart"] end = asset_doc["data"]["frameEnd"] @@ -78,7 +79,7 @@ def set_asset_framerange(): def set_asset_resolution(): - """Set Comp's defaults""" + """Set Comp's resolution width x height default based on current asset""" asset_doc = get_current_project_asset() width = asset_doc["data"]["resolutionWidth"] height = asset_doc["data"]["resolutionHeight"] From 56dcb25dc056b892af0939236d18abda6967ee96 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Sep 2022 18:23:08 +0200 Subject: [PATCH 1240/2550] Updated args to maketx Better logging. --- .../maya/plugins/publish/extract_look.py | 54 ++++++++----------- 1 file changed, 23 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 91b0da75c6..dd705324b9 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -13,7 +13,7 @@ from maya import cmds # noqa import pyblish.api -from openpype.lib import source_hash +from openpype.lib import source_hash, run_subprocess from openpype.pipeline import legacy_io, publish from openpype.hosts.maya.api import lib @@ -68,7 +68,7 @@ def find_paths_by_hash(texture_hash): return legacy_io.distinct(key, {"type": "version"}) -def maketx(source, destination, *args): +def maketx(source, destination, args, logger): """Make `.tx` using `maketx` with some default settings. The settings are based on default as used in Arnold's @@ -79,7 +79,8 @@ def maketx(source, destination, *args): Args: source (str): Path to source file. destination (str): Writing destination path. - *args: Additional arguments for `maketx`. + args: Additional arguments for `maketx`. + logger Returns: str: Output of `maketx` command. @@ -94,7 +95,7 @@ def maketx(source, destination, *args): "OIIO tool not found in {}".format(maketx_path)) raise AssertionError("OIIO tool not found") - cmd = [ + subprocess_args = [ maketx_path, "-v", # verbose "-u", # update mode @@ -103,27 +104,20 @@ def maketx(source, destination, *args): "--checknan", # use oiio-optimized settings for tile-size, planarconfig, metadata "--oiio", - "--filter lanczos3", - escape_space(source) + "--filter", "lanczos3", + source ] - cmd.extend(args) - cmd.extend(["-o", escape_space(destination)]) + subprocess_args.extend(args) + subprocess_args.extend(["-o", destination]) - cmd = " ".join(cmd) + cmd = " ".join(subprocess_args) + logger.debug(cmd) - CREATE_NO_WINDOW = 0x08000000 # noqa - kwargs = dict(args=cmd, stderr=subprocess.STDOUT) - - if sys.platform == "win32": - kwargs["creationflags"] = CREATE_NO_WINDOW try: - out = subprocess.check_output(**kwargs) - except subprocess.CalledProcessError as exc: - print(exc) - import traceback - - traceback.print_exc() + out = run_subprocess(subprocess_args) + except Exception: + logger.error("Maketx converion failed", exc_info=True) raise return out @@ -524,15 +518,17 @@ class ExtractLook(publish.Extractor): if do_maketx and ext != ".tx": # Produce .tx file in staging if source file is not .tx converted = os.path.join(staging, "resources", fname + ".tx") - + additional_args = [ + "--sattrib", + "sourceHash", + texture_hash + ] if linearize: self.log.info("tx: converting sRGB -> linear") - colorconvert = "--colorconvert sRGB linear" - else: - colorconvert = "" + additional_args.extend(["--colorconvert", "sRGB", "linear"]) config_path = get_ocio_config_path("nuke-default") - color_config = "--colorconfig {0}".format(config_path) + additional_args.extend(["--colorconfig", config_path]) # Ensure folder exists if not os.path.exists(os.path.dirname(converted)): os.makedirs(os.path.dirname(converted)) @@ -541,12 +537,8 @@ class ExtractLook(publish.Extractor): maketx( filepath, converted, - # Include `source-hash` as string metadata - "--sattrib", - "sourceHash", - escape_space(texture_hash), - colorconvert, - color_config + additional_args, + self.log ) return converted, COPY, texture_hash From b3bedc7ce72bfaceb0f7072200b101bbef45b5b7 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 19 Sep 2022 18:37:17 +0200 Subject: [PATCH 1241/2550] remove comment --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 4cd3ae957f..fb6e9bacae 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -82,7 +82,7 @@ def update_op_assets( List[Dict[str, dict]]: List of (doc_id, update_dict) tuples """ project_name = project_doc["name"] - # project_module_settings = get_project_settings(project_name)["kitsu"] + project_module_settings = get_project_settings(project_name)["kitsu"] assets_with_update = [] for item in entities_list: From 9a9c29c70c893401cbec02259181937662f28c4e Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 19 Sep 2022 18:48:27 +0200 Subject: [PATCH 1242/2550] remove unecessary code --- .../modules/kitsu/utils/update_op_with_zou.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index fb6e9bacae..d4ced9dab2 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -18,7 +18,7 @@ from openpype.client import ( create_project, ) from openpype.pipeline import AvalonMongoDB -from openpype.settings import get_project_settings, get_anatomy_settings +from openpype.settings import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials @@ -262,25 +262,20 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: # Update Zou gazu.project.update_project(project) - project_attributes = get_anatomy_settings(project_name)['attributes'] - if "x" in project["resolution"]: - resolutionWidth = int(project["resolution"].split("x")[0]) - resolutionHeight = int(project["resolution"].split("x")[1]) - else: - resolutionWidth = project_attributes['resolutionWidth'] - resolutionHeight = project_attributes['resolutionHeight'] - # Update data project_data.update( { "code": project_code, "fps": float(project["fps"]), - "resolutionWidth": resolutionWidth, - "resolutionHeight": resolutionHeight, "zou_id": project["id"], } ) + proj_res = project["resolution"] + if "x" in proj_res: + project_data['resolutionWidth'] = int(proj_res.split("x")[0]) + project_data['resolutionHeight'] = int(proj_res.split("x")[1]) + return UpdateOne( {"_id": project_doc["_id"]}, { From 1f035a1eee956d56b55e86dd97f5090fdab81894 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 01:24:24 +0200 Subject: [PATCH 1243/2550] Store link to menu so we can get access to it elsewhere --- openpype/hosts/fusion/api/menu.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 4819fd6c7c..cf3dea8ec3 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -17,6 +17,9 @@ from openpype.pipeline import legacy_io from .pulse import FusionPulse +self = sys.modules[__name__] +self.menu = None + class Spacer(QtWidgets.QWidget): def __init__(self, height, *args, **kwargs): @@ -202,6 +205,7 @@ def launch_openpype_menu(): pype_menu.setStyleSheet(stylesheet) pype_menu.show() + self.menu = pype_menu result = app.exec_() print("Shutting down..") From 23b6a35266ef14f8bccbe8edfc1551ce63a0f0b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 01:26:20 +0200 Subject: [PATCH 1244/2550] Add after open callback to show popup about outdated containers --- openpype/hosts/fusion/api/pipeline.py | 39 ++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 7f9a57dc0f..b5d461e7f0 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -8,7 +8,10 @@ import contextlib import pyblish.api -from openpype.lib import Logger +from openpype.lib import ( + Logger, + register_event_callback +) from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -18,6 +21,7 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.load import any_outdated_containers from openpype.hosts.fusion import FUSION_HOST_DIR log = Logger.get_logger(__name__) @@ -77,6 +81,11 @@ def install(): "instanceToggled", on_pyblish_instance_toggled ) + # Fusion integration currently does not attach to direct callbacks of + # the application. So we use workfile callbacks to allow similar behavior + # on save and open + register_event_callback("workfile.open.after", on_after_open) + def uninstall(): """Uninstall all that was installed @@ -125,6 +134,34 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): tool.SetAttrs({"TOOLB_PassThrough": passthrough}) +def on_after_open(_event): + + if any_outdated_containers(): + log.warning("Scene has outdated content.") + + # Find OpenPype menu to attach to + from . import menu + + comp = get_current_comp() + + def _on_show_scene_inventory(): + comp.CurrentFrame.ActivateFrame() # ensure that comp is active + host_tools.show_scene_inventory() + + from openpype.widgets import popup + from openpype.style import load_stylesheet + dialog = popup.Popup(parent=menu.menu) + dialog.setWindowTitle("Fusion comp has outdated content") + dialog.setMessage("There are outdated containers in " + "your Fusion comp.") + dialog.on_clicked.connect(_on_show_scene_inventory) + + dialog.show() + dialog.raise_() + dialog.activateWindow() + dialog.setStyleSheet(load_stylesheet()) + + def ls(): """List containers from active Fusion scene From 30ee358fc1e7b127981fcf4c1e897e7dc65a0de7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 01:29:01 +0200 Subject: [PATCH 1245/2550] Move `get_current_comp` and `comp_lock_and_undo_chunk` to `lib` + fix import of host tools --- openpype/hosts/fusion/api/__init__.py | 14 ++++++-------- openpype/hosts/fusion/api/lib.py | 20 ++++++++++++++++++-- openpype/hosts/fusion/api/pipeline.py | 22 ++++++---------------- openpype/hosts/fusion/api/workio.py | 2 +- 4 files changed, 31 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/fusion/api/__init__.py b/openpype/hosts/fusion/api/__init__.py index e5a7dac8c8..45ed4e12a3 100644 --- a/openpype/hosts/fusion/api/__init__.py +++ b/openpype/hosts/fusion/api/__init__.py @@ -5,10 +5,7 @@ from .pipeline import ( ls, imprint_container, - parse_container, - - get_current_comp, - comp_lock_and_undo_chunk + parse_container ) from .workio import ( @@ -24,7 +21,9 @@ from .lib import ( maintained_selection, get_additional_data, update_frame_range, - set_asset_framerange + set_asset_framerange, + get_current_comp, + comp_lock_and_undo_chunk ) from .menu import launch_openpype_menu @@ -39,9 +38,6 @@ __all__ = [ "imprint_container", "parse_container", - "get_current_comp", - "comp_lock_and_undo_chunk", - # workio "open_file", "save_file", @@ -55,6 +51,8 @@ __all__ = [ "get_additional_data", "update_frame_range", "set_asset_framerange", + "get_current_comp", + "comp_lock_and_undo_chunk", # menu "launch_openpype_menu", diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 91a74ac848..295ba33711 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -19,8 +19,6 @@ from openpype.pipeline import ( ) from openpype.pipeline.context_tools import get_current_project_asset -from .pipeline import get_current_comp, comp_lock_and_undo_chunk - self = sys.modules[__name__] self._project = None @@ -232,3 +230,21 @@ def get_frame_path(path): padding = 4 # default Fusion padding return filename, padding, ext + + +def get_current_comp(): + """Hack to get current comp in this session""" + fusion = getattr(sys.modules["__main__"], "fusion", None) + return fusion.CurrentComp if fusion else None + + +@contextlib.contextmanager +def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"): + """Lock comp and open an undo chunk during the context""" + try: + comp.Lock() + comp.StartUndo(undo_queue_name) + yield + finally: + comp.Unlock() + comp.EndUndo() \ No newline at end of file diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index b5d461e7f0..82cae427ff 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -23,6 +23,12 @@ from openpype.pipeline import ( ) from openpype.pipeline.load import any_outdated_containers from openpype.hosts.fusion import FUSION_HOST_DIR +from openpype.tools.utils import host_tools + +from .lib import ( + get_current_comp, + comp_lock_and_undo_chunk +) log = Logger.get_logger(__name__) @@ -247,19 +253,3 @@ def parse_container(tool): return container -def get_current_comp(): - """Hack to get current comp in this session""" - fusion = getattr(sys.modules["__main__"], "fusion", None) - return fusion.CurrentComp if fusion else None - - -@contextlib.contextmanager -def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"): - """Lock comp and open an undo chunk during the context""" - try: - comp.Lock() - comp.StartUndo(undo_queue_name) - yield - finally: - comp.Unlock() - comp.EndUndo() diff --git a/openpype/hosts/fusion/api/workio.py b/openpype/hosts/fusion/api/workio.py index 89752d3e6d..939b2ff4be 100644 --- a/openpype/hosts/fusion/api/workio.py +++ b/openpype/hosts/fusion/api/workio.py @@ -2,7 +2,7 @@ import sys import os -from .pipeline import get_current_comp +from .lib import get_current_comp def file_extensions(): From 9dda39a3e722e8a888f94de06072830cf241252d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 01:29:24 +0200 Subject: [PATCH 1246/2550] Remove unused imports --- openpype/hosts/fusion/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 82cae427ff..6d4a20ccee 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -2,9 +2,7 @@ Basic avalon integration """ import os -import sys import logging -import contextlib import pyblish.api From 15c3b068285fe2021d35822c32ad9d5b85e8574e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 01:36:31 +0200 Subject: [PATCH 1247/2550] Add validate comp prefs on scene before save and after scene open --- openpype/hosts/fusion/api/lib.py | 44 +++++++++++++++++++++++++++ openpype/hosts/fusion/api/pipeline.py | 9 +++++- 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 295ba33711..f10809a7e1 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -5,6 +5,7 @@ import contextlib from Qt import QtGui +from openpype.lib import Logger from openpype.client import ( get_asset_by_name, get_subset_by_name, @@ -91,6 +92,49 @@ def set_asset_resolution(): }) +def validate_comp_prefs(): + """Validate current comp defaults with asset settings. + + Validates fps, resolutionWidth, resolutionHeight, aspectRatio. + + This does *not* validate frameStart, frameEnd, handleStart and handleEnd. + """ + + log = Logger.get_logger("validate_comp_prefs") + + fields = [ + "data.fps", + "data.resolutionWidth", + "data.resolutionHeight", + "data.pixelAspect" + ] + asset_data = get_current_project_asset(fields=fields)["data"] + + comp = get_current_comp() + comp_frame_format_prefs = comp.GetPrefs("Comp.FrameFormat") + + # Pixel aspect ratio in Fusion is set as AspectX and AspectY so we convert + # the data to something that is more sensible to Fusion + asset_data["pixelAspectX"] = asset_data.pop("pixelAspect") + asset_data["pixelAspectY"] = 1.0 + + for key, comp_key, label in [ + ("fps", "Rate", "FPS"), + ("resolutionWidth", "Width", "Resolution Width"), + ("resolutionHeight", "Height", "Resolution Height"), + ("pixelAspectX", "AspectX", "Pixel Aspect Ratio X"), + ("pixelAspectY", "AspectY", "Pixel Aspect Ratio Y") + ]: + value = asset_data[key] + current_value = comp_frame_format_prefs.get(comp_key) + if value != current_value: + # todo: Actually show dialog to user instead of just logging + log.warning( + "Invalid pref {}: {} (should be: {})".format(comp_key, + current_value, + value)) + + def get_additional_data(container): """Get Fusion related data for the container diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 6d4a20ccee..c2c39291c6 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -25,7 +25,8 @@ from openpype.tools.utils import host_tools from .lib import ( get_current_comp, - comp_lock_and_undo_chunk + comp_lock_and_undo_chunk, + validate_comp_prefs ) log = Logger.get_logger(__name__) @@ -88,6 +89,7 @@ def install(): # Fusion integration currently does not attach to direct callbacks of # the application. So we use workfile callbacks to allow similar behavior # on save and open + register_event_callback("workfile.save.before", on_before_save) register_event_callback("workfile.open.after", on_after_open) @@ -138,7 +140,12 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): tool.SetAttrs({"TOOLB_PassThrough": passthrough}) +def on_before_save(_event): + validate_comp_prefs() + + def on_after_open(_event): + validate_comp_prefs() if any_outdated_containers(): log.warning("Scene has outdated content.") From fb940c5da099dd56cba7cbfedf9143e29e659b4a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 01:39:42 +0200 Subject: [PATCH 1248/2550] Don't error if "show" is clicked but comp is closed already --- openpype/hosts/fusion/api/pipeline.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index c2c39291c6..bc7f90a97c 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -156,7 +156,12 @@ def on_after_open(_event): comp = get_current_comp() def _on_show_scene_inventory(): - comp.CurrentFrame.ActivateFrame() # ensure that comp is active + # ensure that comp is active + frame = comp.CurrentFrame + if not frame: + print("Comp is closed, skipping show scene inventory") + return + frame.ActivateFrame() # raise comp window host_tools.show_scene_inventory() from openpype.widgets import popup From 1013a293519aed5470f1eda4e2ba4f280d718dba Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 01:41:04 +0200 Subject: [PATCH 1249/2550] Remove double space --- openpype/hosts/fusion/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index bc7f90a97c..083aa50027 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -167,7 +167,7 @@ def on_after_open(_event): from openpype.widgets import popup from openpype.style import load_stylesheet dialog = popup.Popup(parent=menu.menu) - dialog.setWindowTitle("Fusion comp has outdated content") + dialog.setWindowTitle("Fusion comp has outdated content") dialog.setMessage("There are outdated containers in " "your Fusion comp.") dialog.on_clicked.connect(_on_show_scene_inventory) From 2df5d871f16338a425870649a0df59d189acdaf4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 02:07:30 +0200 Subject: [PATCH 1250/2550] Remove before save callback since it runs BEFORE the task change making it unusable for these particular validations --- openpype/hosts/fusion/api/pipeline.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 083aa50027..76b365e29f 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -89,7 +89,6 @@ def install(): # Fusion integration currently does not attach to direct callbacks of # the application. So we use workfile callbacks to allow similar behavior # on save and open - register_event_callback("workfile.save.before", on_before_save) register_event_callback("workfile.open.after", on_after_open) @@ -140,10 +139,6 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): tool.SetAttrs({"TOOLB_PassThrough": passthrough}) -def on_before_save(_event): - validate_comp_prefs() - - def on_after_open(_event): validate_comp_prefs() @@ -171,7 +166,6 @@ def on_after_open(_event): dialog.setMessage("There are outdated containers in " "your Fusion comp.") dialog.on_clicked.connect(_on_show_scene_inventory) - dialog.show() dialog.raise_() dialog.activateWindow() From 4c884ed2bc0649a5a6a08772fedd48ce06c1bda9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 02:08:09 +0200 Subject: [PATCH 1251/2550] Add a pop-up about invalid comp configuration --- openpype/hosts/fusion/api/lib.py | 58 +++++++++++++++++++++++++++----- 1 file changed, 49 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index f10809a7e1..a95312b938 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -103,12 +103,14 @@ def validate_comp_prefs(): log = Logger.get_logger("validate_comp_prefs") fields = [ + "name", "data.fps", "data.resolutionWidth", "data.resolutionHeight", "data.pixelAspect" ] - asset_data = get_current_project_asset(fields=fields)["data"] + asset_doc = get_current_project_asset(fields=fields) + asset_data = asset_doc["data"] comp = get_current_comp() comp_frame_format_prefs = comp.GetPrefs("Comp.FrameFormat") @@ -118,21 +120,59 @@ def validate_comp_prefs(): asset_data["pixelAspectX"] = asset_data.pop("pixelAspect") asset_data["pixelAspectY"] = 1.0 - for key, comp_key, label in [ + validations = [ ("fps", "Rate", "FPS"), ("resolutionWidth", "Width", "Resolution Width"), ("resolutionHeight", "Height", "Resolution Height"), ("pixelAspectX", "AspectX", "Pixel Aspect Ratio X"), ("pixelAspectY", "AspectY", "Pixel Aspect Ratio Y") - ]: - value = asset_data[key] - current_value = comp_frame_format_prefs.get(comp_key) - if value != current_value: + ] + + invalid = [] + for key, comp_key, label in validations: + asset_value = asset_data[key] + comp_value = comp_frame_format_prefs.get(comp_key) + if asset_value != comp_value: # todo: Actually show dialog to user instead of just logging log.warning( - "Invalid pref {}: {} (should be: {})".format(comp_key, - current_value, - value)) + "Comp {pref} {value} does not match asset " + "'{asset_name}' {pref} {asset_value}".format( + pref=label, + value=comp_value, + asset_name=asset_doc["name"], + asset_value=asset_value) + ) + + invalid_msg = "{} {} should be {}".format(label, + comp_value, + asset_value) + invalid.append(invalid_msg) + + if invalid: + + def _on_repair(): + attributes = dict() + for key, comp_key, _label in validations: + value = asset_data[key] + comp_key_full = "Comp.FrameFormat.{}".format(comp_key) + attributes[comp_key_full] = value + comp.SetPrefs(attributes) + + from . import menu + from openpype.widgets import popup + from openpype.style import load_stylesheet + dialog = popup.Popup(parent=menu.menu) + dialog.setWindowTitle("Fusion comp has invalid configuration") + + msg = "Comp preferences mismatches '{}'".format(asset_doc["name"]) + msg += "\n" + "\n".join(invalid) + dialog.setMessage(msg) + dialog.setButtonText("Repair") + dialog.on_clicked.connect(_on_repair) + dialog.show() + dialog.raise_() + dialog.activateWindow() + dialog.setStyleSheet(load_stylesheet()) def get_additional_data(container): From 794e8a9b8504ccf99d0795d54b7ab9ec3feb6a78 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 08:35:23 +0200 Subject: [PATCH 1252/2550] Shush hound --- openpype/hosts/fusion/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index a95312b938..a7472d239c 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -331,4 +331,4 @@ def comp_lock_and_undo_chunk(comp, undo_queue_name="Script CMD"): yield finally: comp.Unlock() - comp.EndUndo() \ No newline at end of file + comp.EndUndo() From 316ba2294fdf0a7f3fef17ea9c4665c8e1f2a573 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Sep 2022 10:15:11 +0200 Subject: [PATCH 1253/2550] hide drop label if it's not allowed to add anything --- openpype/widgets/attribute_defs/files_widget.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index d29aa1b607..e4d0a481c8 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -138,11 +138,13 @@ class DropEmpty(QtWidgets.QWidget): allowed_items = [item + "s" for item in allowed_items] if not allowed_items: + self._drop_label_widget.setVisible(False) self._items_label_widget.setText( "It is not allowed to add anything here!" ) return + self._drop_label_widget.setVisible(True) items_label = "Multiple " if self._single_item: items_label = "Single " From 1aad016cb8f64baf419f4e9363cd81b3b2b449ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Sep 2022 10:16:06 +0200 Subject: [PATCH 1254/2550] hide remove button if there are not file items --- openpype/widgets/attribute_defs/files_widget.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index e4d0a481c8..af15cfa859 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -594,6 +594,13 @@ class FilesView(QtWidgets.QListView): self._remove_btn.setVisible(not multivalue) + def update_remove_btn_visibility(self): + model = self.model() + visible = False + if model: + visible = model.rowCount() > 0 + self._remove_btn.setVisible(visible) + def has_selected_item_ids(self): """Is any index selected.""" for index in self.selectionModel().selectedIndexes(): @@ -657,6 +664,7 @@ class FilesView(QtWidgets.QListView): def showEvent(self, event): super(FilesView, self).showEvent(event) self._update_remove_btn() + self.update_remove_btn_visibility() class FilesWidget(QtWidgets.QFrame): @@ -968,3 +976,4 @@ class FilesWidget(QtWidgets.QFrame): files_exists = self._files_proxy_model.rowCount() > 0 self._files_view.setVisible(files_exists) self._empty_widget.setVisible(not files_exists) + self._files_view.update_remove_btn_visibility() From aa56f615819bc55f05736400a46c52d19d446134 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Sep 2022 10:16:46 +0200 Subject: [PATCH 1255/2550] update visibility on add and remove --- openpype/widgets/attribute_defs/files_widget.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index af15cfa859..99498f9fa9 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -784,6 +784,8 @@ class FilesWidget(QtWidgets.QFrame): if not self._in_set_value: self.value_changed.emit() + self._update_visibility() + def _on_rows_removed(self, parent_index, start_row, end_row): available_item_ids = set() for row in range(self._files_proxy_model.rowCount()): @@ -803,6 +805,7 @@ class FilesWidget(QtWidgets.QFrame): if not self._in_set_value: self.value_changed.emit() + self._update_visibility() def _on_split_request(self): if self._multivalue: @@ -966,11 +969,9 @@ class FilesWidget(QtWidgets.QFrame): def _add_filepaths(self, filepaths): self._files_model.add_filepaths(filepaths) - self._update_visibility() def _remove_item_by_ids(self, item_ids): self._files_model.remove_item_by_ids(item_ids) - self._update_visibility() def _update_visibility(self): files_exists = self._files_proxy_model.rowCount() > 0 From b78796f21c5ea31edd318361d614279da7a30af1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Sep 2022 10:17:40 +0200 Subject: [PATCH 1256/2550] stack drop widget and files view to handle both size hints --- .../widgets/attribute_defs/files_widget.py | 42 ++++++------------- 1 file changed, 12 insertions(+), 30 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 99498f9fa9..cf931ccbdc 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -683,12 +683,13 @@ class FilesWidget(QtWidgets.QFrame): files_proxy_model.setSourceModel(files_model) files_view = FilesView(self) files_view.setModel(files_proxy_model) - files_view.setVisible(False) - layout = QtWidgets.QHBoxLayout(self) + layout = QtWidgets.QStackedLayout(self) layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(empty_widget, 1) - layout.addWidget(files_view, 1) + layout.setStackingMode(layout.StackAll) + layout.addWidget(empty_widget) + layout.addWidget(files_view) + layout.setCurrentWidget(empty_widget) files_proxy_model.rowsInserted.connect(self._on_rows_inserted) files_proxy_model.rowsRemoved.connect(self._on_rows_removed) @@ -708,6 +709,8 @@ class FilesWidget(QtWidgets.QFrame): self._widgets_by_id = {} + self._layout = layout + def _set_multivalue(self, multivalue): if self._multivalue == multivalue: return @@ -849,29 +852,6 @@ class FilesWidget(QtWidgets.QFrame): menu.popup(pos) - def sizeHint(self): - # Get size hints of widget and visible widgets - result = super(FilesWidget, self).sizeHint() - if not self._files_view.isVisible(): - not_visible_hint = self._files_view.sizeHint() - else: - not_visible_hint = self._empty_widget.sizeHint() - - # Get margins of this widget - margins = self.layout().contentsMargins() - - # Change size hint based on result of maximum size hint of widgets - result.setWidth(max( - result.width(), - not_visible_hint.width() + margins.left() + margins.right() - )) - result.setHeight(max( - result.height(), - not_visible_hint.height() + margins.top() + margins.bottom() - )) - - return result - def dragEnterEvent(self, event): if self._multivalue: return @@ -903,7 +883,6 @@ class FilesWidget(QtWidgets.QFrame): mime_data = event.mimeData() if mime_data.hasUrls(): event.accept() - # event.setDropAction(QtCore.Qt.CopyAction) filepaths = [] for url in mime_data.urls(): filepath = url.toLocalFile() @@ -975,6 +954,9 @@ class FilesWidget(QtWidgets.QFrame): def _update_visibility(self): files_exists = self._files_proxy_model.rowCount() > 0 - self._files_view.setVisible(files_exists) - self._empty_widget.setVisible(not files_exists) + if files_exists: + current_widget = self._files_view + else: + current_widget = self._empty_widget + self._layout.setCurrentWidget(current_widget) self._files_view.update_remove_btn_visibility() From 7f5b192ac4ff7fcdabeab7d9ba85a47898865e57 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Sep 2022 10:18:09 +0200 Subject: [PATCH 1257/2550] handle storing of inserted and removed items --- .../widgets/attribute_defs/files_widget.py | 32 ++++++++++++++++--- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index cf931ccbdc..7a02a1b26b 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -237,10 +237,28 @@ class FilesModel(QtGui.QStandardItemModel): self._filenames_by_dirpath = collections.defaultdict(set) self._items_by_dirpath = collections.defaultdict(list) + self.rowsAboutToBeRemoved.connect(self._on_about_to_be_removed) + self.rowsInserted.connect(self._on_insert) + @property def id(self): return self._id + def _on_about_to_be_removed(self, parent_index, start, end): + # Make sure items are removed from cache + for row in range(start, end + 1): + index = self.index(row, 0, parent_index) + item_id = index.data(ITEM_ID_ROLE) + if item_id is not None: + self._items_by_id.pop(item_id, None) + + def _on_insert(self, parent_index, start, end): + for row in range(start, end + 1): + index = self.index(start, end, parent_index) + item_id = index.data(ITEM_ID_ROLE) + if item_id not in self._items_by_id: + self._items_by_id[item_id] = self.item(row) + def set_multivalue(self, multivalue): """Disable filtering.""" @@ -354,6 +372,10 @@ class FilesModel(QtGui.QStandardItemModel): src_item_id = index.data(ITEM_ID_ROLE) src_item = self._items_by_id.get(src_item_id) + src_row = None + if src_item: + src_row = src_item.row() + # Take out items that should be moved items = [] for item_id in item_ids: @@ -367,10 +389,12 @@ class FilesModel(QtGui.QStandardItemModel): return False # Calculate row where items should be inserted - if src_item: - src_row = src_item.row() - else: - src_row = root.rowCount() + row_count = root.rowCount() + if src_row is None: + src_row = row_count + + if src_row > row_count: + src_row = row_count root.insertRow(src_row, items) return True From 0f0a5fa294548d761d11eedd2c562b99fc19b3f4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Sep 2022 10:28:37 +0200 Subject: [PATCH 1258/2550] added some docstrings --- openpype/widgets/attribute_defs/files_widget.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 7a02a1b26b..259cb774b0 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -245,7 +245,13 @@ class FilesModel(QtGui.QStandardItemModel): return self._id def _on_about_to_be_removed(self, parent_index, start, end): - # Make sure items are removed from cache + """Make sure that removed items are removed from items mapping. + + Connected with '_on_insert'. When user drag item and drop it to same + view the item is actually removed and creted again but it happens in + inner calls of Qt. + """ + for row in range(start, end + 1): index = self.index(row, 0, parent_index) item_id = index.data(ITEM_ID_ROLE) @@ -253,6 +259,13 @@ class FilesModel(QtGui.QStandardItemModel): self._items_by_id.pop(item_id, None) def _on_insert(self, parent_index, start, end): + """Make sure new added items are stored in items mapping. + + Connected to '_on_about_to_be_removed'. Some items are not created + using '_create_item' but are recreated using Qt. So the item is not in + mapping and if it would it would not lead to same item pointer. + """ + for row in range(start, end + 1): index = self.index(start, end, parent_index) item_id = index.data(ITEM_ID_ROLE) From c43b952357c9c69798adcd3fb04fc2d6dbc85100 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 11:15:39 +0200 Subject: [PATCH 1259/2550] Updated docstring Co-authored-by: Roy Nieterau --- openpype/hosts/maya/plugins/publish/extract_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index dd705324b9..403b4ee6bc 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -79,8 +79,8 @@ def maketx(source, destination, args, logger): Args: source (str): Path to source file. destination (str): Writing destination path. - args: Additional arguments for `maketx`. - logger + args (list): Additional arguments for `maketx`. + logger (logging.Logger): Logger to log messages to. Returns: str: Output of `maketx` command. From cefe853cc6c932771ffd9f2325c783942fbff3da Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 11:29:23 +0200 Subject: [PATCH 1260/2550] OP-3943 - changed label to Active Active label matches configuration for optional plugins. Collector cannot be optional, but Active matches to "Instance is collected but won't get published by default." --- .../schemas/projects_schema/schema_project_photoshop.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 7294ba8608..26b38fa2c6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -140,7 +140,7 @@ { "type": "boolean", "key": "publish", - "label": "Publish review" + "label": "Active" } ] }, From ec4bcc474b7a3c3701ae45c8008536d0fc3d7992 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 12:25:48 +0200 Subject: [PATCH 1261/2550] :recycle: replace exceptions and asserts in validators --- .../plugins/publish/validate_bypass.py | 12 +++++---- .../publish/validate_cop_output_node.py | 19 ++++++++----- .../publish/validate_file_extension.py | 11 +++++--- .../validate_houdini_license_category.py | 10 ++++--- .../publish/validate_mkpaths_toggled.py | 13 ++++----- .../plugins/publish/validate_no_errors.py | 9 ++++--- .../publish/validate_remote_publish.py | 27 ++++++++++++------- .../validate_remote_publish_enabled.py | 11 +++++--- .../publish/validate_sop_output_node.py | 9 ++++--- .../validate_usd_layer_path_backslashes.py | 8 +++--- .../publish/validate_usd_model_and_shade.py | 6 +++-- .../publish/validate_usd_output_node.py | 9 ++++--- .../validate_usd_render_product_names.py | 7 +++-- .../plugins/publish/validate_usd_setdress.py | 7 +++-- .../validate_usd_shade_model_exists.py | 9 ++++--- .../publish/validate_usd_shade_workspace.py | 23 +++++++++------- 16 files changed, 121 insertions(+), 69 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 1b441b8da9..59ab2d2b1b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateBypassed(pyblish.api.InstancePlugin): @@ -11,7 +12,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder - 0.1 + order = pyblish.api.ValidatorOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" @@ -26,9 +27,10 @@ class ValidateBypassed(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: rop = invalid[0] - raise RuntimeError( - "ROP node %s is set to bypass, publishing cannot continue.." - % rop.path() + raise PublishValidationError( + ("ROP node {} is set to bypass, publishing cannot " + "continue.".format(rop.path())), + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 86ddc2adf2..2e99e5fb41 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError + class ValidateCopOutputNode(pyblish.api.InstancePlugin): """Validate the instance COP Output Node. @@ -20,9 +23,10 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod @@ -54,7 +58,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Cop2", ( - "Output node %s is not of category Cop2. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Cop2": + raise PublishValidationError( + ("Output node %s is not of category Cop2. " + "This is a bug...").format(output_node.path()), + title=cls.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index f050a41b88..5211cdb919 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import os import pyblish.api from openpype.hosts.houdini.api import lib +from openpype.pipeline import PublishValidationError class ValidateFileExtension(pyblish.api.InstancePlugin): @@ -29,8 +31,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "ROP node has incorrect " "file extension: %s" % invalid + raise PublishValidationError( + "ROP node has incorrect file extension: {}".format(invalid), + title=self.label ) @classmethod @@ -53,7 +56,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): for family in families: extension = cls.family_extensions.get(family, None) if extension is None: - raise RuntimeError("Unsupported family: %s" % family) + raise PublishValidationError( + "Unsupported family: {}".format(family), + title=cls.label) if output_extension != extension: return [node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index f5f03aa844..f1c52f22c1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): @@ -24,7 +26,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): license = hou.licenseCategory() if license != hou.licenseCategoryType.Commercial: - raise RuntimeError( - "USD Publishing requires a full Commercial " - "license. You are on: %s" % license - ) + raise PublishValidationError( + ("USD Publishing requires a full Commercial " + "license. You are on: {}").format(license), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index be6a798a95..9d1f92a101 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Create Intermediate Directories Checked" @@ -14,10 +15,10 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Found ROP node with Create Intermediate " - "Directories turned off: %s" % invalid - ) + raise PublishValidationError( + ("Found ROP node with Create Intermediate " + "Directories turned off: {}".format(invalid)), + title=self.label) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 77e7cc9ff7..fd396ad8c9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError def cook_in_range(node, start, end): @@ -28,7 +29,7 @@ def get_errors(node): class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] label = "Validate no errors" @@ -62,4 +63,6 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): errors = get_errors(node) if errors: self.log.error(errors) - raise RuntimeError("Node has errors: %s" % node.path()) + raise PublishValidationError( + "Node has errors: {}".format(node.path()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 0ab182c584..7349022681 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -1,7 +1,9 @@ +# -*-coding: utf-8 -*- import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError import hou @@ -27,17 +29,24 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): # We ensure it's a shell node and that it has the pre-render script # set correctly. Plus the shell script it will trigger should be # completely empty (doing nothing) - assert node.type().name() == "shell", "Must be shell ROP node" - assert node.parm("command").eval() == "", "Must have no command" - assert not node.parm("shellexec").eval(), "Must not execute in shell" - assert ( - node.parm("prerender").eval() == cmd - ), "REMOTE_PUBLISH node does not have correct prerender script." - assert ( - node.parm("lprerender").eval() == "python" - ), "REMOTE_PUBLISH node prerender script type not set to 'python'" + if node.type().name() != "shell": + self.raise_error("Must be shell ROP node") + if node.parm("command").eval() != "": + self.raise_error("Must have no command") + if node.parm("shellexec").eval(): + self.raise_error("Must not execute in shell") + if node.parm("prerender").eval() != cmd: + self.raise_error(("REMOTE_PUBLISH node does not have " + "correct prerender script.")) + if node.parm("lprerender").eval() != "python": + self.raise_error(("REMOTE_PUBLISH node prerender script " + "type not set to 'python'")) @classmethod def repair(cls, context): """(Re)create the node if it fails to pass validation.""" lib.create_remote_publish_node(force=True) + + def raise_error(self, message): + self.log.error(message) + raise PublishValidationError(message, title=self.label) \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index afc8df7528..8ec62f4e85 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @@ -18,10 +20,12 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=self.label) if node.isBypassed(): - raise RuntimeError("REMOTE_PUBLISH must not be bypassed.") + raise PublishValidationError( + "REMOTE_PUBLISH must not be bypassed.", title=self.label) @classmethod def repair(cls, context): @@ -29,7 +33,8 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=cls.label) cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH") node.bypass(False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index c18ad7a1b7..a1a96120e2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -58,10 +58,11 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Sop": + raise PublishValidationError( + ("Output node {} is not of category Sop. " + "This is a bug.").format(output_node.path()), + title=cls.label) # Ensure the node is cooked and succeeds to cook so we can correctly # check for its geometry data. diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 95cad82085..3e593a9508 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): @@ -44,7 +46,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): invalid.append(layer) if invalid: - raise RuntimeError( + raise PublishValidationError(( "Loaded layers have backslashes. " - "This is invalid for HUSK USD rendering." - ) + "This is invalid for HUSK USD rendering."), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index bdb7c05319..3ca0fd0298 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -1,7 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib - +from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux @@ -55,7 +56,8 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): if invalid: prim_paths = sorted([str(prim.GetPath()) for prim in invalid]) - raise RuntimeError("Found invalid primitives: %s" % prim_paths) + raise PublishValidationError( + "Found invalid primitives: {}".format(prim_paths)) class ValidateUsdShade(ValidateUsdModel): diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 0c38ccd4be..9a4d292778 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateUSDOutputNode(pyblish.api.InstancePlugin): @@ -20,9 +22,10 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py index 36336a03ae..02c44ab94e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- +import os import pyblish.api -import os +from openpype.pipeline import PublishValidationError class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): @@ -28,4 +30,5 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): if invalid: for message in invalid: self.log.error(message) - raise RuntimeError("USD Render Paths are invalid.") + raise PublishValidationError( + "USD Render Paths are invalid.", title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 835cd5977a..89ae8b8ad9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUsdSetDress(pyblish.api.InstancePlugin): @@ -47,8 +49,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): invalid.append(node) if invalid: - raise RuntimeError( + raise PublishValidationError(( "SetDress contains local geometry. " "This is not allowed, it must be an assembly " - "of referenced assets." + "of referenced assets."), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index f08c7c72c5..c4f118ac3b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import re import pyblish.api @@ -5,6 +6,7 @@ import pyblish.api from openpype.client import get_subset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -32,7 +34,8 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): project_name, model_subset, asset_doc["_id"], fields=["_id"] ) if not subset_doc: - raise RuntimeError( - "USD Model subset not found: " - "%s (%s)" % (model_subset, asset_name) + raise PublishValidationError( + ("USD Model subset not found: " + "{} ({})").format(model_subset, asset_name), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 2781756272..2ff2702061 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError import hou @@ -12,7 +13,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade Workspace" @@ -39,13 +40,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): if node_type != other_node_type: continue - # Get highest version + # Get the highest version highest = max(highest, other_version) if version != highest: - raise RuntimeError( - "Shading Workspace is not the latest version." - " Found %s. Latest is %s." % (version, highest) + raise PublishValidationError( + ("Shading Workspace is not the latest version." + " Found {}. Latest is {}.").format(version, highest), + title=self.label ) # There were some issues with the editable node not having the right @@ -56,8 +58,9 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): ) rop_value = rop.parm("lopoutput").rawValue() if rop_value != value: - raise RuntimeError( - "Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values." + raise PublishValidationError( + ("Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values."), + title=self.label ) From c8466855d7d65fc76874a62115ef74739fa23318 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 13:07:13 +0200 Subject: [PATCH 1262/2550] OP-3938 - refactor - extracted two methods --- .../plugins/publish/extract_review.py | 96 ++++++++++++------- 1 file changed, 61 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index e5fee311f8..64ec18710c 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -84,6 +84,67 @@ class ExtractReview(publish.Extractor): source_files_pattern = self._check_and_resize(processed_img_names, source_files_pattern, staging_dir) + self._generate_thumbnail(ffmpeg_path, instance, source_files_pattern, + staging_dir) + + no_of_frames = len(img_list) + self._generate_mov(ffmpeg_path, instance, fps, no_of_frames, + source_files_pattern, staging_dir) + + self.log.info(f"Extracted {instance} to {staging_dir}") + + def _generate_mov(self, ffmpeg_path, instance, fps, no_of_frames, + source_files_pattern, staging_dir): + """Generates .mov to upload to Ftrack. + + Args: + ffmpeg_path (str): path to ffmpeg + instance (Pyblish Instance) + fps (str) + no_of_frames (int): + source_files_pattern (str): name of source file + staging_dir (str): temporary location to store thumbnail + Updates: + instance - adds representation portion + """ + # Generate mov. + mov_path = os.path.join(staging_dir, "review.mov") + self.log.info(f"Generate mov review: {mov_path}") + args = [ + ffmpeg_path, + "-y", + "-i", source_files_pattern, + "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", + "-vframes", str(no_of_frames), + mov_path + ] + self.log.debug("mov args:: {}".format(args)) + output = run_subprocess(args) + self.log.debug(output) + instance.data["representations"].append({ + "name": "mov", + "ext": "mov", + "files": os.path.basename(mov_path), + "stagingDir": staging_dir, + "frameStart": 1, + "frameEnd": no_of_frames, + "fps": fps, + "preview": True, + "tags": self.mov_options['tags'] + }) + + def _generate_thumbnail(self, ffmpeg_path, instance, source_files_pattern, + staging_dir): + """Generates scaled down thumbnail and adds it as representation. + + Args: + ffmpeg_path (str): path to ffmpeg + instance (Pyblish Instance) + source_files_pattern (str): name of source file + staging_dir (str): temporary location to store thumbnail + Updates: + instance - adds representation portion + """ # Generate thumbnail thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg") self.log.info(f"Generate thumbnail {thumbnail_path}") @@ -97,7 +158,6 @@ class ExtractReview(publish.Extractor): ] self.log.debug("thumbnail args:: {}".format(args)) output = run_subprocess(args) - instance.data["representations"].append({ "name": "thumbnail", "ext": "jpg", @@ -106,40 +166,6 @@ class ExtractReview(publish.Extractor): "tags": ["thumbnail"] }) - # Generate mov. - mov_path = os.path.join(staging_dir, "review.mov") - self.log.info(f"Generate mov review: {mov_path}") - img_number = len(img_list) - args = [ - ffmpeg_path, - "-y", - "-i", source_files_pattern, - "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", - "-vframes", str(img_number), - mov_path - ] - self.log.debug("mov args:: {}".format(args)) - output = run_subprocess(args) - self.log.debug(output) - instance.data["representations"].append({ - "name": "mov", - "ext": "mov", - "files": os.path.basename(mov_path), - "stagingDir": staging_dir, - "frameStart": 1, - "frameEnd": img_number, - "fps": fps, - "preview": True, - "tags": self.mov_options['tags'] - }) - - # Required for extract_review plugin (L222 onwards). - instance.data["frameStart"] = 1 - instance.data["frameEnd"] = img_number - instance.data["fps"] = 25 - - self.log.info(f"Extracted {instance} to {staging_dir}") - def _check_and_resize(self, processed_img_names, source_files_pattern, staging_dir): """Check if saved image could be used in ffmpeg. From 967af51fdf500134e5526616cfbe7b295cd363c7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 13:25:32 +0200 Subject: [PATCH 1263/2550] OP-3938 - create .mov only if multiple frames --- openpype/hosts/photoshop/plugins/publish/extract_review.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 64ec18710c..323fa2b4dd 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -87,9 +87,10 @@ class ExtractReview(publish.Extractor): self._generate_thumbnail(ffmpeg_path, instance, source_files_pattern, staging_dir) - no_of_frames = len(img_list) - self._generate_mov(ffmpeg_path, instance, fps, no_of_frames, - source_files_pattern, staging_dir) + no_of_frames = len(processed_img_names) + if no_of_frames > 1: + self._generate_mov(ffmpeg_path, instance, fps, no_of_frames, + source_files_pattern, staging_dir) self.log.info(f"Extracted {instance} to {staging_dir}") From 59ee65e2a94807c4ec91cd9a04c5c3b7280808ba Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 13:26:22 +0200 Subject: [PATCH 1264/2550] OP-3938 - refactor - removed obsolete function --- .../photoshop/plugins/publish/extract_review.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 323fa2b4dd..aaa6995f27 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -195,22 +195,6 @@ class ExtractReview(publish.Extractor): return source_files_pattern - def _get_image_path_from_instances(self, instance): - img_list = [] - - for instance in sorted(instance.context): - if instance.data["family"] != "image": - continue - - for rep in instance.data["representations"]: - img_path = os.path.join( - rep["stagingDir"], - rep["files"] - ) - img_list.append(img_path) - - return img_list - def _copy_image_to_staging_dir(self, staging_dir, img_list): copy_files = [] for i, img_src in enumerate(img_list): From c12005d14e4714afca54bca43534abaf5c02e669 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 13:27:45 +0200 Subject: [PATCH 1265/2550] OP-3938 - refactor - removed obsolete function --- .../photoshop/plugins/publish/extract_review.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index aaa6995f27..a16315996c 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -195,20 +195,6 @@ class ExtractReview(publish.Extractor): return source_files_pattern - def _copy_image_to_staging_dir(self, staging_dir, img_list): - copy_files = [] - for i, img_src in enumerate(img_list): - img_filename = self.output_seq_filename % i - img_dst = os.path.join(staging_dir, img_filename) - - self.log.debug( - "Copying file .. {} -> {}".format(img_src, img_dst) - ) - shutil.copy(img_src, img_dst) - copy_files.append(img_filename) - - return copy_files - def _get_layers_from_image_instances(self, instance): layers = [] for image_instance in instance.context: From c56b98d56814a851fce31d1fbd5f648536530f68 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 14:32:20 +0200 Subject: [PATCH 1266/2550] OP-3938 - refactor - renamed methods --- .../plugins/publish/extract_review.py | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index a16315996c..566e723457 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -49,7 +49,7 @@ class ExtractReview(publish.Extractor): if self.make_image_sequence and len(layers) > 1: self.log.info("Extract layers to image sequence.") - img_list = self._saves_sequences_layers(staging_dir, layers) + img_list = self._save_sequence_images(staging_dir, layers) instance.data["representations"].append({ "name": "jpg", @@ -64,7 +64,7 @@ class ExtractReview(publish.Extractor): processed_img_names = img_list else: self.log.info("Extract layers to flatten image.") - img_list = self._saves_flattened_layers(staging_dir, layers) + img_list = self._save_flatten_image(staging_dir, layers) instance.data["representations"].append({ "name": "jpg", @@ -196,6 +196,11 @@ class ExtractReview(publish.Extractor): return source_files_pattern def _get_layers_from_image_instances(self, instance): + """Collect all layers from 'instance'. + + Returns: + (list) of PSItem + """ layers = [] for image_instance in instance.context: if image_instance.data["family"] != "image": @@ -207,7 +212,12 @@ class ExtractReview(publish.Extractor): return sorted(layers) - def _saves_flattened_layers(self, staging_dir, layers): + def _save_flatten_image(self, staging_dir, layers): + """Creates flat image from 'layers' into 'staging_dir'. + + Returns: + (str): path to new image + """ img_filename = self.output_seq_filename % 0 output_image_path = os.path.join(staging_dir, img_filename) stub = photoshop.stub() @@ -221,7 +231,13 @@ class ExtractReview(publish.Extractor): return img_filename - def _saves_sequences_layers(self, staging_dir, layers): + def _save_sequence_images(self, staging_dir, layers): + """Creates separate flat images from 'layers' into 'staging_dir'. + + Used as source for multi frames .mov to review at once. + Returns: + (list): paths to new images + """ stub = photoshop.stub() list_img_filename = [] From 08ac24080f863e904b4ddec4b53a9c9f502f9685 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 15:02:04 +0200 Subject: [PATCH 1267/2550] :recycle: convert creators --- .../plugins/create/create_redshift_proxy.py | 40 +++++++------- .../plugins/create/create_redshift_rop.py | 54 +++++++++---------- .../houdini/plugins/create/create_usd.py | 38 ++++++------- .../plugins/create/create_usdrender.py | 37 ++++++------- 4 files changed, 85 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index da4d80bf2b..d4bfe9d253 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -1,18 +1,20 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Redshift proxies.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftProxy(plugin.Creator): +class CreateRedshiftProxy(plugin.HoudiniCreator): """Redshift Proxy""" - + identifier = "io.openpype.creators.houdini.redshiftproxy" label = "Redshift Proxy" family = "redshiftproxy" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateRedshiftProxy, self).__init__(*args, **kwargs) - + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) # Redshift provides a `Redshift_Proxy_Output` node type which shows # a limited set of parameters by default and is set to extract a @@ -21,28 +23,26 @@ class CreateRedshiftProxy(plugin.Creator): # why this happens. # TODO: Somehow enforce so that it only shows the original limited # attributes of the Redshift_Proxy_Output node type - self.data.update({"node_type": "Redshift_Proxy_Output"}) + instance_data.update({"node_type": "Redshift_Proxy_Output"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateRedshiftProxy, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) - """ parms = { - "RS_archive_file": '$HIP/pyblish/`chs("subset")`.$F4.rs', + "RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name), } - if self.nodes: - node = self.nodes[0] - path = node.path() - parms["RS_archive_sopPath"] = path + if self.selected_nodes: + parms["RS_archive_sopPath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] + to_lock = ["family", "id", "prim_to_detail_pattern"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 6949ca169b..2bb8325623 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -1,41 +1,40 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin to create Redshift ROP.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftROP(plugin.Creator): +class CreateRedshiftROP(plugin.HoudiniCreator): """Redshift ROP""" - + identifier = "io.openpype.creators.houdini.redshift_rop" label = "Redshift ROP" family = "redshift_rop" icon = "magic" defaults = ["master"] - def __init__(self, *args, **kwargs): - super(CreateRedshiftROP, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa + + instance_data.pop("active", None) + instance_data.update({"node_type": "Redshift_ROP"}) + # Add chunk size attribute + instance_data["chunkSize"] = 10 # Clear the family prefix from the subset - subset = self.data["subset"] + subset = subset_name subset_no_prefix = subset[len(self.family):] subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] - self.data["subset"] = subset_no_prefix + subset_name = subset_no_prefix - # Add chunk size attribute - self.data["chunkSize"] = 10 + instance = super(CreateRedshiftROP, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_node = hou.node(instance.get("instance_node")) - self.data.update({"node_type": "Redshift_ROP"}) - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ - basename = instance.name() - instance.setName(basename + "_ROP", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ROP", unique_name=True) # Also create the linked Redshift IPR Rop try: @@ -43,11 +42,12 @@ class CreateRedshiftROP(plugin.Creator): "Redshift_IPR", node_name=basename + "_IPR" ) except hou.OperationFailed: - raise Exception(("Cannot create Redshift node. Is Redshift " - "installed and enabled?")) + raise plugin.OpenPypeCreatorError( + ("Cannot create Redshift node. Is Redshift " + "installed and enabled?")) # Move it to directly under the Redshift ROP - ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) + ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1)) # Set the linked rop to the Redshift ROP ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance)) @@ -61,10 +61,10 @@ class CreateRedshiftROP(plugin.Creator): "RS_outputMultilayerMode": 0, # no multi-layered exr "RS_outputBeautyAOVSuffix": "beauty", } - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 5bcb7840c0..8502a4e5e9 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -1,39 +1,39 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating USDs.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSD(plugin.Creator): +class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" - + identifier = "io.openpype.creators.houdini.usd" label = "USD (experimental)" family = "usd" icon = "gears" enabled = False - def __init__(self, *args, **kwargs): - super(CreateUSD, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usd"}) - self.data.update({"node_type": "usd"}) + instance = super(CreateUSD, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - - """ parms = { - "lopoutput": "$HIP/pyblish/%s.usd" % self.name, + "lopoutput": "$HIP/pyblish/{}.usd".format(subset_name), "enableoutputprocessor_simplerelativepaths": False, } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = [ @@ -43,5 +43,5 @@ class CreateUSD(plugin.Creator): "id", ] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index cb3fe3f02b..e5c61d2984 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -1,42 +1,43 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin for creating USD renders.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSDRender(plugin.Creator): +class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" - + identifier = "io.openpype.creators.houdini.usdrender" label = "USD Render (experimental)" family = "usdrender" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateUSDRender, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - self.parent = hou.node("/stage") + instance_data["parent"] = hou.node("/stage") # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usdrender"}) - self.data.update({"node_type": "usdrender"}) + instance = super(CreateUSDRender, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - """ parms = { # Render frame range "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) - instance.setParms(parms) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) From 7e65bdd096b7ee3c9ae927374d2c5c89270cd9b3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 20 Sep 2022 16:39:00 +0200 Subject: [PATCH 1268/2550] OP-3923 - fix issue in Hero hardlinks Disk must be NTFS format or it will throw "WinError 1", which matches to EINVAL. Still raise different errors. --- openpype/plugins/publish/integrate_hero_version.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 96d768e1c1..c0760a5471 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -577,8 +577,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): return except OSError as exc: - # re-raise exception if different than cross drive path - if exc.errno != errno.EXDEV: + # re-raise exception if different than + # EXDEV - cross drive path + # EINVAL - wrong format, must be NTFS + self.log.debug("Hardlink failed with errno:'{}'".format(exc.errno)) + if exc.errno not in [errno.EXDEV, errno.EINVAL]: raise shutil.copy(src_path, dst_path) From b0f7db52c84e40fa7fadfeb2fd180e2874fe950c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Sep 2022 17:09:58 +0200 Subject: [PATCH 1269/2550] make sure the output is always the same --- openpype/hosts/tvpaint/lib.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index bf47e725cb..95653b6ecb 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -646,9 +646,6 @@ def rename_filepaths_by_frame_start( filepaths_by_frame, range_start, range_end, new_frame_start ): """Change frames in filenames of finished images to new frame start.""" - # Skip if source first frame is same as destination first frame - if range_start == new_frame_start: - return {} # Calculate frame end new_frame_end = range_end + (new_frame_start - range_start) @@ -669,14 +666,17 @@ def rename_filepaths_by_frame_start( source_range = range(range_start, range_end + 1) output_range = range(new_frame_start, new_frame_end + 1) + # Skip if source first frame is same as destination first frame new_dst_filepaths = {} for src_frame, dst_frame in zip(source_range, output_range): - src_filepath = filepaths_by_frame[src_frame] - src_dirpath = os.path.dirname(src_filepath) + src_filepath = os.path.normpath(filepaths_by_frame[src_frame]) + dirpath, src_filename = os.path.split(src_filepath) dst_filename = filename_template.format(frame=dst_frame) - dst_filepath = os.path.join(src_dirpath, dst_filename) + dst_filepath = os.path.join(dirpath, dst_filename) - os.rename(src_filepath, dst_filepath) + if src_filename != dst_filename: + os.rename(src_filepath, dst_filepath) new_dst_filepaths[dst_frame] = dst_filepath + return new_dst_filepaths From 71caefe44915f9618e276812408d29ebd4ca5a51 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:06:28 +0200 Subject: [PATCH 1270/2550] :recycle: refactor parameter locking --- openpype/hosts/houdini/api/plugin.py | 15 +++++++++++++++ .../houdini/plugins/create/create_arnold_ass.py | 4 +--- .../houdini/plugins/create/create_composite.py | 11 ++--------- .../houdini/plugins/create/create_pointcache.py | 4 +--- .../plugins/create/create_redshift_proxy.py | 4 +--- .../houdini/plugins/create/create_redshift_rop.py | 4 +--- .../hosts/houdini/plugins/create/create_usd.py | 4 +--- .../houdini/plugins/create/create_usdrender.py | 4 +--- 8 files changed, 23 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ee73745651..5c52cb416b 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -150,6 +150,21 @@ class HoudiniCreator(NewCreator): OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + def lock_parameters(self, node, parameters): + """Lock list of specified parameters on the node. + + Args: + node (hou.Node): Houdini node to lock parameters on. + parameters (list of str): List of parameter names. + + """ + for name in parameters: + try: + parm = node.parm(name) + parm.lock(True) + except AttributeError: + self.log.debug("missing lock pattern {}".format(name)) + def collect_instances(self): for instance in list_instances(creator_id=self.identifier): created_instance = CreatedInstance.from_existing( diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index b3926b8cee..a48658ab99 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -49,6 +49,4 @@ class CreateArnoldAss(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["ar_ass_export_enable", "family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 96d8ca9fd5..1a9c56571a 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -13,8 +13,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): icon = "gears" def create(self, subset_name, instance_data, pre_create_data): - import hou - from pprint import pformat + import hou # noqa instance_data.pop("active", None) instance_data.update({"node_type": "comp"}) @@ -24,10 +23,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - self.log.info(pformat(instance)) - print(pformat(instance)) instance_node = hou.node(instance.get("instance_node")) - filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) parms = { "copoutput": filepath @@ -37,7 +33,4 @@ class CreateCompositeSequence(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) - + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 239f3ce50b..124936d285 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -41,6 +41,4 @@ class CreatePointCache(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index d4bfe9d253..8b6a68437b 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -43,6 +43,4 @@ class CreateRedshiftProxy(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id", "prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 2bb8325623..2cbe9bfda1 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -65,6 +65,4 @@ class CreateRedshiftROP(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 8502a4e5e9..51ed8237c5 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -42,6 +42,4 @@ class CreateUSD(plugin.HoudiniCreator): "family", "id", ] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index e5c61d2984..f78f0bed50 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -38,6 +38,4 @@ class CreateUSDRender(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) From df2f68db9798bddffb8ee8fcfcf08764dffc44e9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:06:56 +0200 Subject: [PATCH 1271/2550] :recycle: move splitext to lib --- openpype/hosts/houdini/api/lib.py | 23 ++++++++++++++++++- .../houdini/plugins/publish/collect_frames.py | 21 +++++++---------- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index d0a3068531..8d6f666eb7 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import sys +import os import uuid import logging from contextlib import contextmanager @@ -556,4 +557,24 @@ def get_frame_data(node): data["frameEnd"] = node.evalParm("f2") data["steps"] = node.evalParm("f3") - return data \ No newline at end of file + return data + + +def splitext(name, allowed_multidot_extensions): + # type: (str, list) -> tuple + """Split file name to name and extension. + + Args: + name (str): File name to split. + allowed_multidot_extensions (list of str): List of allowed multidot + extensions. + + Returns: + tuple: Name and extension. + """ + + for ext in allowed_multidot_extensions: + if name.endswith(ext): + return name[:-len(ext)], ext + + return os.path.splitext(name) diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index cd94635c29..9108432384 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -1,19 +1,13 @@ +# -*- coding: utf-8 -*- +"""Collector plugin for frames data on ROP instances.""" import os import re -import hou +import hou # noqa import pyblish.api from openpype.hosts.houdini.api import lib -def splitext(name, allowed_multidot_extensions): - - for ext in allowed_multidot_extensions: - if name.endswith(ext): - return name[:-len(ext)], ext - - return os.path.splitext(name) - class CollectFrames(pyblish.api.InstancePlugin): """Collect all frames which would be saved from the ROP nodes""" @@ -40,13 +34,13 @@ class CollectFrames(pyblish.api.InstancePlugin): self.log.warning("Using current frame: {}".format(hou.frame())) output = output_parm.eval() - _, ext = splitext(output, + _, ext = lib.splitext(output, allowed_multidot_extensions=[".ass.gz"]) file_name = os.path.basename(output) result = file_name # Get the filename pattern match from the output - # path so we can compute all frames that would + # path, so we can compute all frames that would # come out from rendering the ROP node if there # is a frame pattern in the name pattern = r"\w+\.(\d+)" + re.escape(ext) @@ -65,8 +59,9 @@ class CollectFrames(pyblish.api.InstancePlugin): # for a custom frame list. So this should be refactored. instance.data.update({"frames": result}) - def create_file_list(self, match, start_frame, end_frame): - """Collect files based on frame range and regex.match + @staticmethod + def create_file_list(match, start_frame, end_frame): + """Collect files based on frame range and `regex.match` Args: match(re.match): match object From d59861a6539dd69e51180245ab6ce2164343aaab Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:07:21 +0200 Subject: [PATCH 1272/2550] :bug: update representation creation --- .../plugins/publish/extract_composite.py | 26 +++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index eb77a91d62..4c91d51efd 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -3,7 +3,7 @@ import os import pyblish.api import openpype.api -from openpype.hosts.houdini.api.lib import render_rop +from openpype.hosts.houdini.api.lib import render_rop, splitext class ExtractComposite(openpype.api.Extractor): @@ -28,8 +28,24 @@ class ExtractComposite(openpype.api.Extractor): render_rop(ropnode) - if "files" not in instance.data: - instance.data["files"] = [] + output = instance.data["frames"] + _, ext = splitext(output[0], []) + ext = ext.lstrip(".") - frames = instance.data["frames"] - instance.data["files"].append(frames) + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": ext, + "ext": ext, + "files": output, + "stagingDir": staging_dir, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + } + + from pprint import pformat + + self.log.info(pformat(representation)) + + instance.data["representations"].append(representation) \ No newline at end of file From 64ef00b564eaccc0cad74e4c59e318724f4a8315 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 16:01:26 +0200 Subject: [PATCH 1273/2550] Set OpenPype icon for menu (cherry picked from commit c902d7b8130ae21b1808e54643b1de59a54f5c43) --- openpype/hosts/fusion/api/menu.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index cf3dea8ec3..ba0e267b14 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -1,6 +1,6 @@ import sys -from Qt import QtWidgets, QtCore +from Qt import QtWidgets, QtCore, QtGui from openpype.tools.utils import host_tools from openpype.style import load_stylesheet @@ -14,6 +14,7 @@ from openpype.hosts.fusion.api.lib import ( set_asset_resolution ) from openpype.pipeline import legacy_io +from openpype.resources import get_openpype_icon_filepath from .pulse import FusionPulse @@ -44,6 +45,10 @@ class OpenPypeMenu(QtWidgets.QWidget): self.setObjectName("OpenPypeMenu") + icon_path = get_openpype_icon_filepath() + icon = QtGui.QIcon(icon_path) + self.setWindowIcon(icon) + self.setWindowFlags( QtCore.Qt.Window | QtCore.Qt.CustomizeWindowHint From a8d90473b8e9bf331a3ff740095bc843c17afb2a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 19:42:21 +0200 Subject: [PATCH 1274/2550] Remove redundant Spacer widget and use `QVBoxLayout.addSpacing` --- openpype/hosts/fusion/api/menu.py | 27 +++++---------------------- 1 file changed, 5 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index ba0e267b14..949b905705 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -22,23 +22,6 @@ self = sys.modules[__name__] self.menu = None -class Spacer(QtWidgets.QWidget): - def __init__(self, height, *args, **kwargs): - super(Spacer, self).__init__(*args, **kwargs) - - self.setFixedHeight(height) - - real_spacer = QtWidgets.QWidget(self) - real_spacer.setObjectName("Spacer") - real_spacer.setFixedHeight(height) - - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(real_spacer) - - self.setLayout(layout) - - class OpenPypeMenu(QtWidgets.QWidget): def __init__(self, *args, **kwargs): super(OpenPypeMenu, self).__init__(*args, **kwargs) @@ -86,28 +69,28 @@ class OpenPypeMenu(QtWidgets.QWidget): layout.addWidget(asset_label) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) layout.addWidget(workfiles_btn) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) layout.addWidget(create_btn) layout.addWidget(load_btn) layout.addWidget(publish_btn) layout.addWidget(manager_btn) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) layout.addWidget(libload_btn) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) layout.addWidget(set_framerange_btn) layout.addWidget(set_resolution_btn) layout.addWidget(rendermode_btn) - layout.addWidget(Spacer(15, self)) + layout.addSpacing(20) layout.addWidget(duplicate_with_inputs_btn) From 1ce7e697ec2b64f135113e8e3c8832c2ada33972 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 19:43:56 +0200 Subject: [PATCH 1275/2550] Remove "Clicked {button}" print statements - UIs have pretty much no delays so no need to print --- openpype/hosts/fusion/api/menu.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 949b905705..7a6293807f 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -138,31 +138,24 @@ class OpenPypeMenu(QtWidgets.QWidget): self._callbacks[:] = [] def on_workfile_clicked(self): - print("Clicked Workfile") host_tools.show_workfiles() def on_create_clicked(self): - print("Clicked Create") host_tools.show_creator() def on_publish_clicked(self): - print("Clicked Publish") host_tools.show_publish() def on_load_clicked(self): - print("Clicked Load") host_tools.show_loader(use_context=True) def on_manager_clicked(self): - print("Clicked Manager") host_tools.show_scene_inventory() def on_libload_clicked(self): - print("Clicked Library") host_tools.show_library_loader() def on_rendermode_clicked(self): - print("Clicked Set Render Mode") if self.render_mode_widget is None: window = set_rendermode.SetRenderMode() window.setStyleSheet(load_stylesheet()) @@ -172,15 +165,12 @@ class OpenPypeMenu(QtWidgets.QWidget): self.render_mode_widget.show() def on_duplicate_with_inputs_clicked(self): - print("Clicked Duplicate with input connections") duplicate_with_inputs.duplicate_with_input_connections() def on_set_resolution_clicked(self): - print("Clicked Reset Resolution") set_asset_resolution() def on_set_framerange_clicked(self): - print("Clicked Reset Framerange") set_asset_framerange() From 15610328be6f94ed00aaaa166062438bc8bda3f6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 19:46:13 +0200 Subject: [PATCH 1276/2550] Get current comp once and as early as possible --- openpype/hosts/fusion/api/lib.py | 6 ++++-- openpype/hosts/fusion/api/pipeline.py | 5 ++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index a7472d239c..956f3557ad 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -92,7 +92,7 @@ def set_asset_resolution(): }) -def validate_comp_prefs(): +def validate_comp_prefs(comp=None): """Validate current comp defaults with asset settings. Validates fps, resolutionWidth, resolutionHeight, aspectRatio. @@ -100,6 +100,9 @@ def validate_comp_prefs(): This does *not* validate frameStart, frameEnd, handleStart and handleEnd. """ + if comp is None: + comp = get_current_comp() + log = Logger.get_logger("validate_comp_prefs") fields = [ @@ -112,7 +115,6 @@ def validate_comp_prefs(): asset_doc = get_current_project_asset(fields=fields) asset_data = asset_doc["data"] - comp = get_current_comp() comp_frame_format_prefs = comp.GetPrefs("Comp.FrameFormat") # Pixel aspect ratio in Fusion is set as AspectX and AspectY so we convert diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 76b365e29f..7933f160dc 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -140,7 +140,8 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): def on_after_open(_event): - validate_comp_prefs() + comp = get_current_comp() + validate_comp_prefs(comp) if any_outdated_containers(): log.warning("Scene has outdated content.") @@ -148,8 +149,6 @@ def on_after_open(_event): # Find OpenPype menu to attach to from . import menu - comp = get_current_comp() - def _on_show_scene_inventory(): # ensure that comp is active frame = comp.CurrentFrame From 6bcb9dd2471cf39dc21f2ea71e05cdfe4fea650c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 20:01:53 +0200 Subject: [PATCH 1277/2550] Remove `get_additional_data` OpenPype manager/containers don't use it so it's redundant in the code base. --- openpype/hosts/fusion/api/__init__.py | 2 -- openpype/hosts/fusion/api/lib.py | 20 -------------------- 2 files changed, 22 deletions(-) diff --git a/openpype/hosts/fusion/api/__init__.py b/openpype/hosts/fusion/api/__init__.py index 45ed4e12a3..ed70dbca50 100644 --- a/openpype/hosts/fusion/api/__init__.py +++ b/openpype/hosts/fusion/api/__init__.py @@ -19,7 +19,6 @@ from .workio import ( from .lib import ( maintained_selection, - get_additional_data, update_frame_range, set_asset_framerange, get_current_comp, @@ -48,7 +47,6 @@ __all__ = [ # lib "maintained_selection", - "get_additional_data", "update_frame_range", "set_asset_framerange", "get_current_comp", diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 956f3557ad..4ef44dbb61 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -177,26 +177,6 @@ def validate_comp_prefs(comp=None): dialog.setStyleSheet(load_stylesheet()) -def get_additional_data(container): - """Get Fusion related data for the container - - Args: - container(dict): the container found by the ls() function - - Returns: - dict - """ - - tool = container["_tool"] - tile_color = tool.TileColor - if tile_color is None: - return {} - - return {"color": QtGui.QColor.fromRgbF(tile_color["R"], - tile_color["G"], - tile_color["B"])} - - def switch_item(container, asset_name=None, subset_name=None, From 3c134ec011ae2b8ffecda43f1281b77a59d591bc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 20:05:15 +0200 Subject: [PATCH 1278/2550] Remove redundant saying "installed" even though it's midway during install + fix comment --- openpype/hosts/fusion/api/pipeline.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 7933f160dc..260c7d9e60 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -60,7 +60,7 @@ def install(): """ # Remove all handlers associated with the root logger object, because - # that one sometimes logs as "warnings" incorrectly. + # that one always logs as "warnings" incorrectly. for handler in logging.root.handlers[:]: logging.root.removeHandler(handler) @@ -72,8 +72,6 @@ def install(): logger.addHandler(handler) logger.setLevel(logging.DEBUG) - log.info("openpype.hosts.fusion installed") - pyblish.api.register_host("fusion") pyblish.api.register_plugin_path(PUBLISH_PATH) log.info("Registering Fusion plug-ins..") From 0b525fa3658c926be033afda4462b4769b1aa025 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 20:17:33 +0200 Subject: [PATCH 1279/2550] Cleanup Create EXR saver logic --- .../fusion/plugins/create/create_exr_saver.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/fusion/plugins/create/create_exr_saver.py b/openpype/hosts/fusion/plugins/create/create_exr_saver.py index 8bab5ee9b1..6d93fe710a 100644 --- a/openpype/hosts/fusion/plugins/create/create_exr_saver.py +++ b/openpype/hosts/fusion/plugins/create/create_exr_saver.py @@ -1,6 +1,9 @@ import os -from openpype.pipeline import LegacyCreator +from openpype.pipeline import ( + LegacyCreator, + legacy_io +) from openpype.hosts.fusion.api import ( get_current_comp, comp_lock_and_undo_chunk @@ -21,12 +24,9 @@ class CreateOpenEXRSaver(LegacyCreator): comp = get_current_comp() - # todo: improve method of getting current environment - # todo: pref avalon.Session over os.environ + workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"]) - workdir = os.path.normpath(os.environ["AVALON_WORKDIR"]) - - filename = "{}..tiff".format(self.name) + filename = "{}..exr".format(self.name) filepath = os.path.join(workdir, "render", filename) with comp_lock_and_undo_chunk(comp): @@ -39,10 +39,10 @@ class CreateOpenEXRSaver(LegacyCreator): saver["Clip"] = filepath saver["OutputFormat"] = file_format - # # # Set standard TIFF settings + # Check file format settings are available if saver[file_format] is None: - raise RuntimeError("File format is not set to TiffFormat, " - "this is a bug") + raise RuntimeError("File format is not set to {}, " + "this is a bug".format(file_format)) # Set file format attributes saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other From b62d12779a67a266a3109af0433eb234397200a0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 20 Sep 2022 20:17:52 +0200 Subject: [PATCH 1280/2550] Clean up docstring --- openpype/hosts/fusion/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 260c7d9e60..c92d072ef7 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -48,7 +48,7 @@ class CompLogHandler(logging.Handler): def install(): - """Install fusion-specific functionality of avalon-core. + """Install fusion-specific functionality of OpenPype. This is where you install menus and register families, data and loaders into fusion. From c693af79cf374efb04b9d113f747e02baaf95ed1 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 21 Sep 2022 04:19:54 +0000 Subject: [PATCH 1281/2550] [Automated] Bump version --- CHANGELOG.md | 26 +++++++++++--------------- openpype/version.py | 2 +- 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af347cadfe..f868e6ed6e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,13 +1,18 @@ # Changelog -## [3.14.3-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.3-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...HEAD) **🚀 Enhancements** +- Maya: better logging in Maketx [\#3886](https://github.com/pypeclub/OpenPype/pull/3886) +- TrayPublisher: added persisting of last selected project [\#3871](https://github.com/pypeclub/OpenPype/pull/3871) +- TrayPublisher: added text filter on project name to Tray Publisher [\#3867](https://github.com/pypeclub/OpenPype/pull/3867) - Github issues adding `running version` section [\#3864](https://github.com/pypeclub/OpenPype/pull/3864) - Publisher: Increase size of main window [\#3862](https://github.com/pypeclub/OpenPype/pull/3862) +- Photoshop: synchronize image version with workfile [\#3854](https://github.com/pypeclub/OpenPype/pull/3854) +- General: Simple script for getting license information about used packages [\#3843](https://github.com/pypeclub/OpenPype/pull/3843) - Houdini: Increment current file on workfile publish [\#3840](https://github.com/pypeclub/OpenPype/pull/3840) - Publisher: Add new publisher to host tools [\#3833](https://github.com/pypeclub/OpenPype/pull/3833) - General: lock task workfiles when they are working on [\#3810](https://github.com/pypeclub/OpenPype/pull/3810) @@ -28,11 +33,10 @@ - Hiero: Use new Extractor location [\#3851](https://github.com/pypeclub/OpenPype/pull/3851) - Maya: Remove old legacy \(ftrack\) plug-ins that are of no use anymore [\#3819](https://github.com/pypeclub/OpenPype/pull/3819) - Nuke: Use new Extractor location [\#3799](https://github.com/pypeclub/OpenPype/pull/3799) -- Maya: Use new Extractor location [\#3775](https://github.com/pypeclub/OpenPype/pull/3775) -- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) **Merged pull requests:** +- Maya: RenderSettings set default image format for V-Ray+Redshift to exr [\#3879](https://github.com/pypeclub/OpenPype/pull/3879) - Remove lockfile during publish [\#3874](https://github.com/pypeclub/OpenPype/pull/3874) ## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) @@ -51,7 +55,6 @@ - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) - Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) -- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) **🐛 Bug fixes** @@ -71,16 +74,18 @@ - Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) - Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) - AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) +- Maya: Use new Extractor location [\#3775](https://github.com/pypeclub/OpenPype/pull/3775) - General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) - General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) - General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) - General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) +- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** @@ -95,7 +100,6 @@ **🚀 Enhancements** - General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) -- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) **🐛 Bug fixes** @@ -103,29 +107,21 @@ - General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) - General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) - Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) -- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) -- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) -- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) - AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) - Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) - General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) - Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) -- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) -- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) - -**Merged pull requests:** - -- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) diff --git a/openpype/version.py b/openpype/version.py index a2335b696b..26b145f1db 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.3-nightly.2" +__version__ = "3.14.3-nightly.3" From dc920e1a035d1140bf34491810ab79ffb12b5604 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 21 Sep 2022 13:15:18 +0200 Subject: [PATCH 1282/2550] Update openpype/hosts/flame/hooks/pre_flame_setup.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/flame/hooks/pre_flame_setup.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index 9c2ad709c7..218fecfd2c 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -126,12 +126,14 @@ class FlamePrelaunch(PreLaunchHook): for dirtm in dirs_to_modify: for root, dirs, files in os.walk(dirtm): try: - for d in dirs: - os.chmod(os.path.join(root, d), self.permisisons) - for f in files: - os.chmod(os.path.join(root, f), self.permisisons) - except OSError as _E: - self.log.warning("Not able to open files: {}".format(_E)) + for name in set(dirs) | set(files): + path = os.path.join(root, name) + st = os.stat(path) + if oct(st.st_mode) != self.permissions: + os.chmod(path, self.permisisons) + + except OSError as exc: + self.log.warning("Not able to open files: {}".format(exc)) def _get_flame_fps(self, fps_num): From 1ec87dde38e844de4820f5aac44dd54f0b79a373 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 21 Sep 2022 13:28:08 +0200 Subject: [PATCH 1283/2550] typo --- openpype/hosts/flame/hooks/pre_flame_setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index 218fecfd2c..2dc11d94ae 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -129,7 +129,7 @@ class FlamePrelaunch(PreLaunchHook): for name in set(dirs) | set(files): path = os.path.join(root, name) st = os.stat(path) - if oct(st.st_mode) != self.permissions: + if oct(st.st_mode) != self.permisisons: os.chmod(path, self.permisisons) except OSError as exc: From f7033e716e2b078be8aff7b9cbc1dfc2d539589d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 21 Sep 2022 13:29:32 +0200 Subject: [PATCH 1284/2550] typo finally correct spelling --- openpype/hosts/flame/hooks/pre_flame_setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index 2dc11d94ae..0173eb8e3b 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -22,7 +22,7 @@ class FlamePrelaunch(PreLaunchHook): in environment var FLAME_SCRIPT_DIR. """ app_groups = ["flame"] - permisisons = 0o777 + permissions = 0o777 wtc_script_path = os.path.join( opflame.HOST_DIR, "api", "scripts", "wiretap_com.py") @@ -129,8 +129,8 @@ class FlamePrelaunch(PreLaunchHook): for name in set(dirs) | set(files): path = os.path.join(root, name) st = os.stat(path) - if oct(st.st_mode) != self.permisisons: - os.chmod(path, self.permisisons) + if oct(st.st_mode) != self.permissions: + os.chmod(path, self.permissions) except OSError as exc: self.log.warning("Not able to open files: {}".format(exc)) From 73e05fba071d4e43ea8cd71e0682c0bd16f48b75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 21 Sep 2022 14:10:56 +0200 Subject: [PATCH 1285/2550] Update openpype/hosts/flame/plugins/create/create_shot_clip.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/flame/plugins/create/create_shot_clip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/create/create_shot_clip.py b/openpype/hosts/flame/plugins/create/create_shot_clip.py index 835201cd3b..a16e8d394f 100644 --- a/openpype/hosts/flame/plugins/create/create_shot_clip.py +++ b/openpype/hosts/flame/plugins/create/create_shot_clip.py @@ -23,7 +23,7 @@ class CreateShotClip(opfapi.Creator): # nested dictionary (only one level allowed # for sections and dict) for _k, _v in v["value"].items(): - if presets.get(_k, None) is not None: + if presets.get(_k) is not None: gui_inputs[k][ "value"][_k]["value"] = presets[_k] From 764c844db8daa8465bbe4c8ff85006cec51039d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 21 Sep 2022 14:11:04 +0200 Subject: [PATCH 1286/2550] Update openpype/hosts/flame/plugins/create/create_shot_clip.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/flame/plugins/create/create_shot_clip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/create/create_shot_clip.py b/openpype/hosts/flame/plugins/create/create_shot_clip.py index a16e8d394f..4fb041a4b2 100644 --- a/openpype/hosts/flame/plugins/create/create_shot_clip.py +++ b/openpype/hosts/flame/plugins/create/create_shot_clip.py @@ -27,7 +27,7 @@ class CreateShotClip(opfapi.Creator): gui_inputs[k][ "value"][_k]["value"] = presets[_k] - if presets.get(k, None) is not None: + if presets.get(k) is not None: gui_inputs[k]["value"] = presets[k] # open widget for plugins inputs From a317c8f5fd3766eddf11cc414643eff5fc975def Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 21 Sep 2022 15:21:10 +0200 Subject: [PATCH 1287/2550] OP-3943 - fix broken Settins schema --- .../schemas/projects_schema/schema_project_photoshop.json | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 6668406336..b768db30ee 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -145,6 +145,7 @@ ] }, { + "type": "dict", "key": "CollectVersion", "label": "Collect Version", "children": [ From 86a7eb91e15be069193755b1bd4392f6486ccafe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 21 Sep 2022 15:23:27 +0200 Subject: [PATCH 1288/2550] Fix - updated missed import after refactor Error would occur in Webpublisher. --- openpype/pype_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index f65d969c53..d08a812c61 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -218,7 +218,7 @@ class PypeCommands: RuntimeError: When there is no path to process. """ - from openpype.hosts.webpublisher.cli_functions import ( + from openpype.hosts.webpublisher.publish_functions import ( cli_publish ) From 3a935c968c97bd19695ae3888c9904a961397d04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 21 Sep 2022 18:36:23 +0200 Subject: [PATCH 1289/2550] :rotating_light: cosmetic changes --- openpype/hosts/houdini/api/lib.py | 3 +++ openpype/hosts/houdini/api/pipeline.py | 7 ++++--- openpype/hosts/houdini/api/plugin.py | 5 +++-- .../houdini/plugins/create/create_alembic_camera.py | 6 ++++-- .../hosts/houdini/plugins/create/create_arnold_ass.py | 4 +++- openpype/hosts/houdini/plugins/create/create_hda.py | 9 ++++----- .../hosts/houdini/plugins/publish/extract_composite.py | 2 +- .../houdini/plugins/publish/increment_current_file.py | 6 +++--- .../hosts/houdini/plugins/publish/validate_camera_rop.py | 2 +- .../houdini/plugins/publish/validate_remote_publish.py | 2 +- 10 files changed, 27 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 8d6f666eb7..3426040d65 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -20,6 +20,7 @@ self._parent = None log = logging.getLogger(__name__) JSON_PREFIX = "JSON:::" + def get_asset_fps(): """Return current asset fps.""" return get_current_project_asset()["data"].get("fps") @@ -418,6 +419,8 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects data = {} + if not node: + return data for parameter in node.spareParms(): value = parameter.eval() # test if value is json encoded dict diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 4ff24c8004..d64479fc14 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -91,10 +91,11 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): def save_workfile(self, dst_path=None): # Force forwards slashes to avoid segfault - filepath = dst_path.replace("\\", "/") - hou.hipFile.save(file_name=filepath, + if dst_path: + dst_path = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=dst_path, save_to_recent_files=True) - return filepath + return dst_path def open_workfile(self, filepath): # Force forwards slashes to avoid segfault diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 5c52cb416b..897696533f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read, get_frame_data +from .lib import imprint, read class OpenPypeCreatorError(CreatorError): @@ -96,8 +96,9 @@ class Creator(LegacyCreator): class HoudiniCreator(NewCreator): selected_nodes = [] + @staticmethod def _create_instance_node( - self, node_name, parent, + node_name, parent, node_type="geometry"): # type: (str, str, str) -> hou.Node """Create node representing instance. diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 483c4205a8..183ab28b26 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -5,7 +5,7 @@ from openpype.pipeline import CreatedInstance class CreateAlembicCamera(plugin.HoudiniCreator): - """Single baked camera from Alembic ROP""" + """Single baked camera from Alembic ROP.""" identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" @@ -40,5 +40,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator): # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance_node.parm("use_sop_path").lock(True) + to_lock = ["use_sop_path"] + self.lock_parameters(instance_node, to_lock) + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index a48658ab99..40b253d1aa 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -24,7 +24,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance = super(CreateArnoldAss, self).create( subset_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) # type: plugin.CreatedInstance instance_node = hou.node(instance.get("instance_node")) @@ -47,6 +47,8 @@ class CreateArnoldAss(plugin.HoudiniCreator): "filename": filepath } + instance_node.setParms(parms) + # Lock any parameters in this list to_lock = ["ar_ass_export_enable", "family", "id"] self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 67e338b1b3..67c05b1634 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -5,7 +5,7 @@ from openpype.client import ( get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import (lib, plugin) +from openpype.hosts.houdini.api import plugin class CreateHDA(plugin.HoudiniCreator): @@ -36,6 +36,8 @@ class CreateHDA(plugin.HoudiniCreator): def _create_instance_node( self, node_name, parent, node_type="geometry"): + import hou + parent_node = hou.node("/obj") if self.selected_nodes: # if we have `use selection` enabled, and we have some @@ -70,15 +72,12 @@ class CreateHDA(plugin.HoudiniCreator): hda_node.setName(node_name) return hda_node - def create(self, subset_name, instance_data, pre_create_data): - import hou - instance_data.pop("active", None) instance = super(CreateHDA, self).create( subset_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) # type: plugin.CreatedInstance return instance diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 4c91d51efd..8dbfd3e08c 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -48,4 +48,4 @@ class ExtractComposite(openpype.api.Extractor): self.log.info(pformat(representation)) - instance.data["representations"].append(representation) \ No newline at end of file + instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index 92ac9fbeca..16d9ef9aec 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -2,7 +2,7 @@ import pyblish.api from openpype.lib import version_up from openpype.pipeline import registered_host - +from openpype.hosts.houdini.api import HoudiniHost class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. @@ -20,11 +20,11 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): def process(self, context): # Filename must not have changed since collecting - host = registered_host() + host = registered_host() # type: HoudiniHost current_file = host.current_file() assert ( context.data["currentFile"] == current_file ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save_file(new_filepath) + host.save_workfile(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index 18fed7fbc4..41b5273e6a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -56,5 +56,5 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): if camera.type().name() != "cam": raise PublishValidationError( ("Object set in Alembic ROP is not a camera: " - "{} (type: {})").format(camera, camera.type().name()), + "{} (type: {})").format(camera, camera.type().name()), title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 7349022681..4e8e5fc0e8 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -49,4 +49,4 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): def raise_error(self, message): self.log.error(message) - raise PublishValidationError(message, title=self.label) \ No newline at end of file + raise PublishValidationError(message, title=self.label) From 8742acaef94ab0a2695e4637d66fccd783742bea Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 21 Sep 2022 23:56:10 +0200 Subject: [PATCH 1290/2550] Match logic of global thumbnail extractor to avoid overwriting source files --- .../maya/plugins/publish/extract_thumbnail.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 854301ea48..712159c2be 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -1,5 +1,6 @@ import os import glob +import tempfile import capture @@ -81,9 +82,17 @@ class ExtractThumbnail(publish.Extractor): elif asset_width and asset_height: preset['width'] = asset_width preset['height'] = asset_height - stagingDir = self.staging_dir(instance) + + # Create temp directory for thumbnail + # - this is to avoid "override" of source file + dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") + self.log.debug( + "Create temp directory {} for thumbnail".format(dst_staging) + ) + # Store new staging to cleanup paths + instance.context.data["cleanupFullPaths"].append(dst_staging) filename = "{0}".format(instance.name) - path = os.path.join(stagingDir, filename) + path = os.path.join(dst_staging, filename) self.log.info("Outputting images to %s" % path) @@ -137,7 +146,7 @@ class ExtractThumbnail(publish.Extractor): 'name': 'thumbnail', 'ext': 'jpg', 'files': thumbnail, - "stagingDir": stagingDir, + "stagingDir": dst_staging, "thumbnail": True } instance.data["representations"].append(representation) From bae5a0799b1e220f1b6b6f7ab3deb8ba3a422331 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 10:37:06 +0200 Subject: [PATCH 1291/2550] fix import of RepairAction --- .../hosts/houdini/plugins/publish/collect_remote_publish.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py index c635a53074..d56d389be0 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py @@ -1,7 +1,7 @@ import pyblish.api -import openpype.api import hou +from openpype.pipeline.publish import RepairAction from openpype.hosts.houdini.api import lib @@ -13,7 +13,7 @@ class CollectRemotePublishSettings(pyblish.api.ContextPlugin): hosts = ["houdini"] targets = ["deadline"] label = "Remote Publish Submission Settings" - actions = [openpype.api.RepairAction] + actions = [RepairAction] def process(self, context): From c35dde88ee2ce3e5522010fd44838722d7bbd2c8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 10:49:01 +0200 Subject: [PATCH 1292/2550] kix kwarg in hiero representations query --- openpype/hosts/hiero/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index e288cea2b1..7c27f2ebdc 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -1089,7 +1089,7 @@ def check_inventory_versions(track_items=None): # Find representations based on found containers repre_docs = get_representations( project_name, - repre_ids=repre_ids, + representation_ids=repre_ids, fields=["_id", "parent"] ) # Store representations by id and collect version ids From d1f77b7383029a3269583803258adb946d44a8a2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Sep 2022 10:57:32 +0200 Subject: [PATCH 1293/2550] multichannel openclip create --- openpype/hosts/flame/api/plugin.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 145b1f0921..a76e5ccc84 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -751,17 +751,18 @@ class OpenClipSolver(flib.MediaInfoFile): self.log.info("Building new openClip") self.log.debug(">> self.clip_data: {}".format(self.clip_data)) - # clip data comming from MediaInfoFile - tmp_xml_feeds = self.clip_data.find('tracks/track/feeds') - tmp_xml_feeds.set('currentVersion', self.feed_version_name) - for tmp_feed in tmp_xml_feeds: - tmp_feed.set('vuid', self.feed_version_name) + for tmp_xml_track in self.clip_data.iter("track"): + tmp_xml_feeds = tmp_xml_track.find('feeds') + tmp_xml_feeds.set('currentVersion', self.feed_version_name) - # add colorspace if any is set - if self.feed_colorspace: - self._add_colorspace(tmp_feed, self.feed_colorspace) + for tmp_feed in tmp_xml_track.iter("feed"): + tmp_feed.set('vuid', self.feed_version_name) - self._clear_handler(tmp_feed) + # add colorspace if any is set + if self.feed_colorspace: + self._add_colorspace(tmp_feed, self.feed_colorspace) + + self._clear_handler(tmp_feed) tmp_xml_versions_obj = self.clip_data.find('versions') tmp_xml_versions_obj.set('currentVersion', self.feed_version_name) @@ -812,6 +813,7 @@ class OpenClipSolver(flib.MediaInfoFile): feed_added = False if not self._feed_exists(out_xml, new_path): + tmp_xml_feed.set('vuid', self.feed_version_name) # Append new temp file feed to .clip source out xml out_track = out_xml.find("tracks/track") From 2502cb8f59fe4ed4a1e3a9c7a9a05db6d141035b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 11:08:15 +0200 Subject: [PATCH 1294/2550] added 'lifetime_data' to instance object --- openpype/pipeline/create/context.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index eaaed39357..070e0fb2c2 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -404,6 +404,9 @@ class CreatedInstance: # Instance members may have actions on them self._members = [] + # Data that can be used for lifetime of object + self._lifetime_data = {} + # Create a copy of passed data to avoid changing them on the fly data = copy.deepcopy(data or {}) # Store original value of passed data @@ -596,6 +599,26 @@ class CreatedInstance: return self + @property + def lifetime_data(self): + """Data stored for lifetime of instance object. + + These data are not stored to scene and will be lost on object + deletion. + + Can be used to store objects. In some host implementations is not + possible to reference to object in scene with some unique identifier + (e.g. node in Fusion.). In that case it is handy to store the object + here. Should be used that way only if instance data are stored on the + node itself. + + Returns: + Dict[str, Any]: Dictionary object where you can store data related + to instance for lifetime of instance object. + """ + + return self._lifetime_data + def changes(self): """Calculate and return changes.""" From 5b26d07624b1369a280a99c8211ad5d5dddbf2bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 11:17:23 +0200 Subject: [PATCH 1295/2550] added 'apply_settings' method to creators so they don't have to override '__init__' --- openpype/pipeline/create/creator_plugins.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index bf2fdd2c5f..5b0532c60a 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -81,6 +81,13 @@ class BaseCreator: # - we may use UI inside processing this attribute should be checked self.headless = headless + self.apply_settings(project_settings, system_settings) + + def apply_settings(self, project_settings, system_settings): + """Method called on initialization of plugin to apply settings.""" + + pass + @property def identifier(self): """Identifier of creator (must be unique). From 316a8efeb1f85908b6f07c61dfc7f830898f9bba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 11:17:28 +0200 Subject: [PATCH 1296/2550] changed imports --- openpype/pipeline/create/context.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 070e0fb2c2..9b7b6f8903 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -7,6 +7,10 @@ from uuid import uuid4 from contextlib import contextmanager from openpype.client import get_assets +from openpype.settings import ( + get_system_settings, + get_project_settings +) from openpype.host import INewPublisher from openpype.pipeline import legacy_io from openpype.pipeline.mongodb import ( @@ -20,11 +24,6 @@ from .creator_plugins import ( discover_creator_plugins, ) -from openpype.api import ( - get_system_settings, - get_project_settings -) - UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) @@ -402,6 +401,7 @@ class CreatedInstance: self.creator = creator # Instance members may have actions on them + # TODO implement members logic self._members = [] # Data that can be used for lifetime of object From 09d7617c7344add9c9035cf4fd39615fcb9fbec0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 11:19:33 +0200 Subject: [PATCH 1297/2550] renamed 'INewPublisher' to 'IPublishHost' --- openpype/host/__init__.py | 2 ++ openpype/host/interfaces.py | 10 +++++++--- openpype/hosts/traypublisher/api/pipeline.py | 4 ++-- openpype/pipeline/create/context.py | 4 ++-- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/openpype/host/__init__.py b/openpype/host/__init__.py index 519888fce3..da1237c739 100644 --- a/openpype/host/__init__.py +++ b/openpype/host/__init__.py @@ -5,6 +5,7 @@ from .host import ( from .interfaces import ( IWorkfileHost, ILoadHost, + IPublishHost, INewPublisher, ) @@ -16,6 +17,7 @@ __all__ = ( "IWorkfileHost", "ILoadHost", + "IPublishHost", "INewPublisher", "HostDirmap", diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py index cbf12b0d13..e9008262c8 100644 --- a/openpype/host/interfaces.py +++ b/openpype/host/interfaces.py @@ -282,7 +282,7 @@ class IWorkfileHost: return self.workfile_has_unsaved_changes() -class INewPublisher: +class IPublishHost: """Functions related to new creation system in new publisher. New publisher is not storing information only about each created instance @@ -306,7 +306,7 @@ class INewPublisher: workflow. """ - if isinstance(host, INewPublisher): + if isinstance(host, IPublishHost): return [] required = [ @@ -330,7 +330,7 @@ class INewPublisher: MissingMethodsError: If there are missing methods on host implementation. """ - missing = INewPublisher.get_missing_publish_methods(host) + missing = IPublishHost.get_missing_publish_methods(host) if missing: raise MissingMethodsError(host, missing) @@ -368,3 +368,7 @@ class INewPublisher: """ pass + + +class INewPublisher(IPublishHost): + pass diff --git a/openpype/hosts/traypublisher/api/pipeline.py b/openpype/hosts/traypublisher/api/pipeline.py index 2d9db7801e..0a8ddaa343 100644 --- a/openpype/hosts/traypublisher/api/pipeline.py +++ b/openpype/hosts/traypublisher/api/pipeline.py @@ -9,7 +9,7 @@ from openpype.pipeline import ( register_creator_plugin_path, legacy_io, ) -from openpype.host import HostBase, INewPublisher +from openpype.host import HostBase, IPublishHost ROOT_DIR = os.path.dirname(os.path.dirname( @@ -19,7 +19,7 @@ PUBLISH_PATH = os.path.join(ROOT_DIR, "plugins", "publish") CREATE_PATH = os.path.join(ROOT_DIR, "plugins", "create") -class TrayPublisherHost(HostBase, INewPublisher): +class TrayPublisherHost(HostBase, IPublishHost): name = "traypublisher" def install(self): diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 9b7b6f8903..a1b11d08c5 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -11,7 +11,7 @@ from openpype.settings import ( get_system_settings, get_project_settings ) -from openpype.host import INewPublisher +from openpype.host import IPublishHost from openpype.pipeline import legacy_io from openpype.pipeline.mongodb import ( AvalonMongoDB, @@ -794,7 +794,7 @@ class CreateContext: """ missing = set( - INewPublisher.get_missing_publish_methods(host) + IPublishHost.get_missing_publish_methods(host) ) return missing From 6ca895906d1eafff6f8157f8f18aa42294086e6b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 11:26:33 +0200 Subject: [PATCH 1298/2550] added docstring to 'INewPublisher' --- openpype/host/interfaces.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py index e9008262c8..cfd089a0ad 100644 --- a/openpype/host/interfaces.py +++ b/openpype/host/interfaces.py @@ -371,4 +371,14 @@ class IPublishHost: class INewPublisher(IPublishHost): + """Legacy interface replaced by 'IPublishHost'. + + Deprecated: + 'INewPublisher' is replaced by 'IPublishHost' please change your + imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.15.* + """ + pass From c4127208d24d58b048773f16465d6ad208f3a35e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 22 Sep 2022 11:29:01 +0200 Subject: [PATCH 1299/2550] Fix typo in ContainersFilterResult namedtuple `not_foud` -> `not_found` --- openpype/pipeline/load/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 83b904e4a7..363120600c 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -37,7 +37,7 @@ log = logging.getLogger(__name__) ContainersFilterResult = collections.namedtuple( "ContainersFilterResult", - ["latest", "outdated", "not_foud", "invalid"] + ["latest", "outdated", "not_found", "invalid"] ) @@ -808,7 +808,7 @@ def filter_containers(containers, project_name): Categories are 'latest', 'outdated', 'invalid' and 'not_found'. The 'lastest' containers are from last version, 'outdated' are not, - 'invalid' are invalid containers (invalid content) and 'not_foud' has + 'invalid' are invalid containers (invalid content) and 'not_found' has some missing entity in database. Args: From 46c2a354f65569b9a7944d8120b41cb3e7536f4f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 11:29:12 +0200 Subject: [PATCH 1300/2550] publish instance has access to lifetime data --- openpype/plugins/publish/collect_from_create_context.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 9236c698ed..b5e3225c34 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -25,7 +25,9 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): for created_instance in create_context.instances: instance_data = created_instance.data_to_store() if instance_data["active"]: - self.create_instance(context, instance_data) + self.create_instance( + context, instance_data, created_instance.lifetime_data + ) # Update global data to context context.data.update(create_context.context_data_to_store()) @@ -37,7 +39,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): legacy_io.Session[key] = value os.environ[key] = value - def create_instance(self, context, in_data): + def create_instance(self, context, in_data, lifetime_data): subset = in_data["subset"] # If instance data already contain families then use it instance_families = in_data.get("families") or [] @@ -56,5 +58,8 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): for key, value in in_data.items(): if key not in instance.data: instance.data[key] = value + + instance.data["lifetimeData"] = lifetime_data + self.log.info("collected instance: {}".format(instance.data)) self.log.info("parsing data: {}".format(in_data)) From 806865e7d7d45639cb5c7948a697490b21680d0a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 22 Sep 2022 11:42:15 +0200 Subject: [PATCH 1301/2550] refactor 'check_inventory_versions' to use 'filter_containers' from load utils --- openpype/hosts/hiero/api/lib.py | 71 ++++++++------------------------- 1 file changed, 16 insertions(+), 55 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 7c27f2ebdc..895e95e0c0 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -13,14 +13,10 @@ import hiero from Qt import QtWidgets -from openpype.client import ( - get_project, - get_versions, - get_last_versions, - get_representations, -) +from openpype.client import get_project from openpype.settings import get_anatomy_settings from openpype.pipeline import legacy_io, Anatomy +from openpype.pipeline.load import filter_containers from openpype.lib import Logger from . import tags @@ -1055,6 +1051,10 @@ def sync_clip_name_to_data_asset(track_items_list): print("asset was changed in clip: {}".format(ti_name)) +def set_track_color(track_item, color): + track_item.source().binItem().setColor(color) + + def check_inventory_versions(track_items=None): """ Actual version color idetifier of Loaded containers @@ -1066,68 +1066,29 @@ def check_inventory_versions(track_items=None): """ from . import parse_container - track_item = track_items or get_track_items() + track_items = track_items or get_track_items() # presets clip_color_last = "green" clip_color = "red" - item_with_repre_id = [] - repre_ids = set() + containers = [] # Find all containers and collect it's node and representation ids - for track_item in track_item: + for track_item in track_items: container = parse_container(track_item) if container: - repre_id = container["representation"] - repre_ids.add(repre_id) - item_with_repre_id.append((track_item, repre_id)) + containers.append(container) # Skip if nothing was found - if not repre_ids: + if not containers: return project_name = legacy_io.active_project() - # Find representations based on found containers - repre_docs = get_representations( - project_name, - representation_ids=repre_ids, - fields=["_id", "parent"] - ) - # Store representations by id and collect version ids - repre_docs_by_id = {} - version_ids = set() - for repre_doc in repre_docs: - # Use stringed representation id to match value in containers - repre_id = str(repre_doc["_id"]) - repre_docs_by_id[repre_id] = repre_doc - version_ids.add(repre_doc["parent"]) + filter_result = filter_containers(containers, project_name) + for container in filter_result.latest: + set_track_color(container["_track_item"], clip_color) - version_docs = get_versions( - project_name, version_ids, fields=["_id", "name", "parent"] - ) - # Store versions by id and collect subset ids - version_docs_by_id = {} - subset_ids = set() - for version_doc in version_docs: - version_docs_by_id[version_doc["_id"]] = version_doc - subset_ids.add(version_doc["parent"]) - - # Query last versions based on subset ids - last_versions_by_subset_id = get_last_versions( - project_name, subset_ids=subset_ids, fields=["_id", "parent"] - ) - - for item in item_with_repre_id: - # Some python versions of nuke can't unfold tuple in for loop - track_item, repre_id = item - - repre_doc = repre_docs_by_id[repre_id] - version_doc = version_docs_by_id[repre_doc["parent"]] - last_version_doc = last_versions_by_subset_id[version_doc["parent"]] - # Check if last version is same as current version - if version_doc["_id"] == last_version_doc["_id"]: - track_item.source().binItem().setColor(clip_color_last) - else: - track_item.source().binItem().setColor(clip_color) + for container in filter_result.outdated: + set_track_color(container["_track_item"], clip_color_last) def selection_changed_timeline(event): From 15874c660f823d3c54750820c4c92d78f7ef8313 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Sep 2022 11:46:15 +0200 Subject: [PATCH 1302/2550] OP-3938 - do not integrate thumbnail Storing thumbnail representation in the DB doesn't make sense. There will be eventually pre-integrator that could allow this with profiles usage. --- openpype/hosts/photoshop/plugins/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 566e723457..0f7dbd3f12 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -164,7 +164,7 @@ class ExtractReview(publish.Extractor): "ext": "jpg", "files": os.path.basename(thumbnail_path), "stagingDir": staging_dir, - "tags": ["thumbnail"] + "tags": ["thumbnail", "delete"] }) def _check_and_resize(self, processed_img_names, source_files_pattern, From fb121844300e904038d33ad4b34989a6244b6a6c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Sep 2022 12:17:40 +0200 Subject: [PATCH 1303/2550] updating multichannel openclip --- openpype/hosts/flame/api/plugin.py | 105 +++++++++++++++++++---------- 1 file changed, 68 insertions(+), 37 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index a76e5ccc84..9f09fa13ce 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -775,6 +775,15 @@ class OpenClipSolver(flib.MediaInfoFile): self.write_clip_data_to_file(self.out_file, self.clip_data) + def _get_xml_track_obj_by_uid(self, xml_data, uid): + # loop all tracks of input xml data + for xml_track in xml_data.iter("track"): + track_uid = xml_track.get("uid") + + # get matching uids + if uid == track_uid: + return xml_track + def _update_open_clip(self): self.log.info("Updating openClip ..") @@ -784,53 +793,75 @@ class OpenClipSolver(flib.MediaInfoFile): self.log.debug(">> out_xml: {}".format(out_xml)) self.log.debug(">> self.clip_data: {}".format(self.clip_data)) - # Get new feed from tmp file - tmp_xml_feed = self.clip_data.find('tracks/track/feeds/feed') + # loop tmp tracks + updated_any = [] + for tmp_xml_track in self.clip_data.iter("track"): + # get tmp track uid + tmp_track_uid = tmp_xml_track.get("uid") + # get out data track by uid + out_track_element = self._get_xml_track_obj_by_uid( + out_xml, tmp_track_uid) - self._clear_handler(tmp_xml_feed) + # loop tmp feeds + for tmp_xml_feed in tmp_xml_track.iter("feed"): + new_path_obj = tmp_xml_feed.find( + "spans/span/path") + new_path = new_path_obj.text - # update fps from MediaInfoFile class - if self.fps: - tmp_feed_fps_obj = tmp_xml_feed.find( - "startTimecode/rate") - tmp_feed_fps_obj.text = str(self.fps) + # check if feed path already exists in track's feeds + if ( + out_track_element + and not self._feed_exists(out_track_element, new_path) + ): + continue - # update start_frame from MediaInfoFile class - if self.start_frame: - tmp_feed_nb_ticks_obj = tmp_xml_feed.find( - "startTimecode/nbTicks") - tmp_feed_nb_ticks_obj.text = str(self.start_frame) + # rename versions on feeds + tmp_xml_feed.set('vuid', self.feed_version_name) + self._clear_handler(tmp_xml_feed) - # update drop_mode from MediaInfoFile class - if self.drop_mode: - tmp_feed_drop_mode_obj = tmp_xml_feed.find( - "startTimecode/dropMode") - tmp_feed_drop_mode_obj.text = str(self.drop_mode) + # update fps from MediaInfoFile class + if self.fps: + tmp_feed_fps_obj = tmp_xml_feed.find( + "startTimecode/rate") + tmp_feed_fps_obj.text = str(self.fps) - new_path_obj = tmp_xml_feed.find( - "spans/span/path") - new_path = new_path_obj.text + # update start_frame from MediaInfoFile class + if self.start_frame: + tmp_feed_nb_ticks_obj = tmp_xml_feed.find( + "startTimecode/nbTicks") + tmp_feed_nb_ticks_obj.text = str(self.start_frame) - feed_added = False - if not self._feed_exists(out_xml, new_path): + # update drop_mode from MediaInfoFile class + if self.drop_mode: + tmp_feed_drop_mode_obj = tmp_xml_feed.find( + "startTimecode/dropMode") + tmp_feed_drop_mode_obj.text = str(self.drop_mode) - tmp_xml_feed.set('vuid', self.feed_version_name) - # Append new temp file feed to .clip source out xml - out_track = out_xml.find("tracks/track") - # add colorspace if any is set - if self.feed_colorspace: - self._add_colorspace(tmp_xml_feed, self.feed_colorspace) + # add colorspace if any is set + if self.feed_colorspace: + self._add_colorspace(tmp_xml_feed, self.feed_colorspace) - out_feeds = out_track.find('feeds') - out_feeds.set('currentVersion', self.feed_version_name) - out_feeds.append(tmp_xml_feed) + # then append/update feed to correct track in output + if out_track_element: + # update already present track + out_feeds = out_track_element.find('feeds') + out_feeds.set('currentVersion', self.feed_version_name) + out_feeds.append(tmp_xml_feed) - self.log.info( - "Appending new feed: {}".format( - self.feed_version_name)) - feed_added = True + self.log.info( + "Appending new feed: {}".format( + self.feed_version_name)) + else: + # create new track as it doesnt exists yet + # set current version to feeds on tmp + tmp_xml_feeds = tmp_xml_track.find('feeds') + tmp_xml_feeds.set('currentVersion', self.feed_version_name) + out_tracks = out_xml.find("tracks") + out_tracks.append(tmp_xml_track) - if feed_added: + updated_any.append(True) + + if any(updated_any): # Append vUID to versions out_xml_versions_obj = out_xml.find('versions') out_xml_versions_obj.set( From 1bc7fbf1e11bd3dad66b8fb841dbbbac1a86d5fe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Sep 2022 12:28:03 +0200 Subject: [PATCH 1304/2550] OP-3938 - added outputName to thumbnail representation In case of integrating thumbnail, 'outputName' value will be used in templeate as {output} placeholder. Without it integrated thumbnail would overwrite integrated review high res file. --- openpype/hosts/photoshop/plugins/publish/extract_review.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 0f7dbd3f12..d84e709c06 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -162,6 +162,7 @@ class ExtractReview(publish.Extractor): instance.data["representations"].append({ "name": "thumbnail", "ext": "jpg", + "outputName": "thumb", "files": os.path.basename(thumbnail_path), "stagingDir": staging_dir, "tags": ["thumbnail", "delete"] From fa3409d30258ba9dcbaec48cec0995e494560778 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Sep 2022 12:37:49 +0200 Subject: [PATCH 1305/2550] fix logging in plugin --- openpype/hosts/flame/api/lib.py | 6 +++--- openpype/hosts/flame/api/plugin.py | 8 ++++++-- openpype/hosts/flame/plugins/load/load_clip.py | 6 +++--- openpype/hosts/flame/plugins/load/load_clip_batch.py | 5 ++--- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 94c46fe937..b7f7b24e51 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -766,11 +766,11 @@ class MediaInfoFile(object): _drop_mode = None _file_pattern = None - def __init__(self, path, **kwargs): + def __init__(self, path, logger=None): # replace log if any - if kwargs.get("logger"): - self.log = kwargs["logger"] + if logger: + self.log = logger # test if `dl_get_media_info` paht exists self._validate_media_script_path() diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 9f09fa13ce..205acf51b0 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -694,16 +694,20 @@ class OpenClipSolver(flib.MediaInfoFile): log = log - def __init__(self, openclip_file_path, feed_data): + def __init__(self, openclip_file_path, feed_data, logger=None): self.out_file = openclip_file_path + # replace log if any + if logger: + self.log = logger + # new feed variables: feed_path = feed_data.pop("path") # initialize parent class super(OpenClipSolver, self).__init__( feed_path, - **feed_data + logger=logger ) # get other metadata diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index b12f2f9690..0843dde76a 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -4,6 +4,7 @@ from pprint import pformat import openpype.hosts.flame.api as opfapi from openpype.lib import StringTemplate + class LoadClip(opfapi.ClipLoader): """Load a subset to timeline as clip @@ -60,8 +61,6 @@ class LoadClip(opfapi.ClipLoader): "path": self.fname.replace("\\", "/"), "colorspace": colorspace, "version": "v{:0>3}".format(version_name), - "logger": self.log - } self.log.debug(pformat( loading_context @@ -69,7 +68,8 @@ class LoadClip(opfapi.ClipLoader): self.log.debug(openclip_path) # make openpype clip file - opfapi.OpenClipSolver(openclip_path, loading_context).make() + opfapi.OpenClipSolver( + openclip_path, loading_context, logger=self.log).make() # prepare Reel group in actual desktop opc = self._get_clip( diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index fb4a3dc6e9..3b049b861b 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -64,8 +64,6 @@ class LoadClipBatch(opfapi.ClipLoader): "path": self.fname.replace("\\", "/"), "colorspace": colorspace, "version": "v{:0>3}".format(version_name), - "logger": self.log - } self.log.debug(pformat( loading_context @@ -73,7 +71,8 @@ class LoadClipBatch(opfapi.ClipLoader): self.log.debug(openclip_path) # make openpype clip file - opfapi.OpenClipSolver(openclip_path, loading_context).make() + opfapi.OpenClipSolver( + openclip_path, loading_context, logger=self.log).make() # prepare Reel group in actual desktop opc = self._get_clip( From 3ac038760645bb76c5c60c473b7aa4dc6880e292 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Sep 2022 12:45:46 +0200 Subject: [PATCH 1306/2550] fix future warning add logging from lib --- openpype/hosts/flame/api/plugin.py | 2 +- openpype/hosts/flame/plugins/load/load_clip_batch.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 205acf51b0..59c8dab631 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -814,7 +814,7 @@ class OpenClipSolver(flib.MediaInfoFile): # check if feed path already exists in track's feeds if ( - out_track_element + out_track_element is not None and not self._feed_exists(out_track_element, new_path) ): continue diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 3b049b861b..c17e060f5b 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -2,7 +2,7 @@ import os import flame from pprint import pformat import openpype.hosts.flame.api as opfapi -from openpype.lib import StringTemplate +from openpype.lib import StringTemplate, Logger class LoadClipBatch(opfapi.ClipLoader): @@ -24,6 +24,8 @@ class LoadClipBatch(opfapi.ClipLoader): reel_name = "OP_LoadedReel" clip_name_template = "{asset}_{subset}<_{output}>" + log = Logger.get_logger(__file__) + def load(self, context, name, namespace, options): # get flame objects From 7e35fdcdedf583c8a58ff9cbb93c9067189149dd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Sep 2022 12:52:08 +0200 Subject: [PATCH 1307/2550] abstracting logger --- openpype/hosts/flame/api/plugin.py | 1 + openpype/hosts/flame/plugins/load/load_clip_batch.py | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 59c8dab631..a3f2f5f2fc 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -678,6 +678,7 @@ class ClipLoader(LoaderPlugin): `update` logic. """ + log = log options = [ qargparse.Boolean( diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index c17e060f5b..3b049b861b 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -2,7 +2,7 @@ import os import flame from pprint import pformat import openpype.hosts.flame.api as opfapi -from openpype.lib import StringTemplate, Logger +from openpype.lib import StringTemplate class LoadClipBatch(opfapi.ClipLoader): @@ -24,8 +24,6 @@ class LoadClipBatch(opfapi.ClipLoader): reel_name = "OP_LoadedReel" clip_name_template = "{asset}_{subset}<_{output}>" - log = Logger.get_logger(__file__) - def load(self, context, name, namespace, options): # get flame objects From 7063d21450607bf07bd430af06fef517044338a3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Sep 2022 12:58:20 +0200 Subject: [PATCH 1308/2550] adding logging --- openpype/hosts/flame/api/plugin.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index a3f2f5f2fc..044e86b17f 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -784,6 +784,8 @@ class OpenClipSolver(flib.MediaInfoFile): # loop all tracks of input xml data for xml_track in xml_data.iter("track"): track_uid = xml_track.get("uid") + self.log.debug( + ">> track_uid:uid: {}:{}".format(track_uid, uid)) # get matching uids if uid == track_uid: @@ -803,9 +805,13 @@ class OpenClipSolver(flib.MediaInfoFile): for tmp_xml_track in self.clip_data.iter("track"): # get tmp track uid tmp_track_uid = tmp_xml_track.get("uid") + self.log.debug(">> tmp_track_uid: {}".format(tmp_track_uid)) + # get out data track by uid out_track_element = self._get_xml_track_obj_by_uid( out_xml, tmp_track_uid) + self.log.debug( + ">> out_track_element: {}".format(out_track_element)) # loop tmp feeds for tmp_xml_feed in tmp_xml_track.iter("feed"): @@ -848,6 +854,7 @@ class OpenClipSolver(flib.MediaInfoFile): # then append/update feed to correct track in output if out_track_element: + self.log.debug("updating track element ..") # update already present track out_feeds = out_track_element.find('feeds') out_feeds.set('currentVersion', self.feed_version_name) @@ -857,6 +864,7 @@ class OpenClipSolver(flib.MediaInfoFile): "Appending new feed: {}".format( self.feed_version_name)) else: + self.log.debug("adding new track element ..") # create new track as it doesnt exists yet # set current version to feeds on tmp tmp_xml_feeds = tmp_xml_track.find('feeds') From ca7300a95fd1db17ef6656c6884be7157627c8f8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Sep 2022 13:41:01 +0200 Subject: [PATCH 1309/2550] fixing logic --- openpype/hosts/flame/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 044e86b17f..6ad33f16a3 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -822,7 +822,7 @@ class OpenClipSolver(flib.MediaInfoFile): # check if feed path already exists in track's feeds if ( out_track_element is not None - and not self._feed_exists(out_track_element, new_path) + and self._feed_exists(out_track_element, new_path) ): continue From 8d9c3f2b5e217fda216f3af3901736ee1ec81e40 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Sep 2022 14:51:29 +0200 Subject: [PATCH 1310/2550] OP-3938 - extracted image an video extensions to lib.transcoding Extracted to lib for better reusability. --- openpype/hosts/traypublisher/api/plugin.py | 22 ++-------------------- openpype/lib/transcoding.py | 22 ++++++++++++++++++++++ 2 files changed, 24 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index a3eead51c8..3bf1638651 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -11,27 +11,9 @@ from .pipeline import ( remove_instances, HostContext, ) +from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS + -IMAGE_EXTENSIONS = [ - ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", - ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", - ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", - ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", - ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", - ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", - ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", - ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", - ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", - ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", - ".xpm", ".xwd" -] -VIDEO_EXTENSIONS = [ - ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", - ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", - ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", - ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", - ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" -] REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 51e34312f2..ce556b1d50 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -42,6 +42,28 @@ XML_CHAR_REF_REGEX_HEX = re.compile(r"&#x?[0-9a-fA-F]+;") # Regex to parse array attributes ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$") +IMAGE_EXTENSIONS = [ + ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", + ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", + ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", + ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", + ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", + ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", + ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", + ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", + ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", + ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", + ".xpm", ".xwd" +] + +VIDEO_EXTENSIONS = [ + ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", + ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", + ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", + ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", + ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" +] + def get_transcode_temp_directory(): """Creates temporary folder for transcoding. From 57cc55b32dd597bb48a4d7f3a603bd8445e92e66 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Sep 2022 15:22:58 +0200 Subject: [PATCH 1311/2550] OP-3938 - added metadata method for image representation Different metadata are needed for video or image repre. --- .../publish/integrate_ftrack_instances.py | 108 ++++++++++++------ 1 file changed, 74 insertions(+), 34 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 5ff75e7060..78b9d56a1b 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -9,7 +9,7 @@ from openpype.lib.transcoding import ( convert_ffprobe_fps_to_float, ) from openpype.lib.profiles_filtering import filter_profiles - +from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS class IntegrateFtrackInstance(pyblish.api.InstancePlugin): """Collect ftrack component data (not integrate yet). @@ -121,6 +121,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): review_representations = [] thumbnail_representations = [] other_representations = [] + has_movie_review = False for repre in instance_repres: self.log.debug("Representation {}".format(repre)) repre_tags = repre.get("tags") or [] @@ -129,6 +130,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): elif "ftrackreview" in repre_tags: review_representations.append(repre) + if repre["ext"] in VIDEO_EXTENSIONS: + has_movie_review = True else: other_representations.append(repre) @@ -177,34 +180,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): component_list.append(thumbnail_item) if first_thumbnail_component is not None: - width = first_thumbnail_component_repre.get("width") - height = first_thumbnail_component_repre.get("height") - if not width or not height: - component_path = first_thumbnail_component["component_path"] - streams = [] - try: - streams = get_ffprobe_streams(component_path) - except Exception: - self.log.debug(( - "Failed to retrieve information about intput {}" - ).format(component_path)) + metadata = self._prepare_image_component_metadata( + first_thumbnail_component_repre, + first_thumbnail_component["component_path"] + ) - for stream in streams: - if "width" in stream and "height" in stream: - width = stream["width"] - height = stream["height"] - break - - if width and height: + if metadata: component_data = first_thumbnail_component["component_data"] component_data["name"] = "ftrackreview-image" - component_data["metadata"] = { - "ftr_meta": json.dumps({ - "width": width, - "height": height, - "format": "image" - }) - } + component_data["metadata"] = metadata # Create review components # Change asset name of each new component for review @@ -213,6 +197,11 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): extended_asset_name = "" multiple_reviewable = len(review_representations) > 1 for repre in review_representations: + if repre["ext"] in IMAGE_EXTENSIONS and has_movie_review: + self.log.debug("Movie repre has priority " + "from {}".format(repre)) + continue + repre_path = self._get_repre_path(instance, repre, False) if not repre_path: self.log.warning( @@ -261,12 +250,22 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Change location review_item["component_path"] = repre_path # Change component data - review_item["component_data"] = { - # Default component name is "main". - "name": "ftrackreview-mp4", - "metadata": self._prepare_component_metadata( + + if repre["ext"] in VIDEO_EXTENSIONS: + review_type = "ftrackreview-mp4" + metadata = self._prepare_video_component_metadata( instance, repre, repre_path, True ) + else: + review_type = "ftrackreview-image" + metadata = self._prepare_image_component_metadata( + repre, repre_path + ) + + review_item["component_data"] = { + # Default component name is "main". + "name": review_type, + "metadata": metadata } if is_first_review_repre: @@ -422,7 +421,18 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): return matching_profile["status"] or None def _prepare_component_metadata( - self, instance, repre, component_path, is_review + self, instance, repre, component_path, is_review=None + ): + if repre["ext"] in VIDEO_EXTENSIONS: + return self._prepare_video_component_metadata(instance, repre, + component_path, + is_review) + else: + return self._prepare_image_component_metadata(repre, + component_path) + + def _prepare_video_component_metadata( + self, instance, repre, component_path, is_review=None ): metadata = {} if "openpype_version" in self.additional_metadata_keys: @@ -435,8 +445,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): streams = get_ffprobe_streams(component_path) except Exception: self.log.debug(( - "Failed to retrieve information about intput {}" - ).format(component_path)) + "Failed to retrieve information about input {}" + ).format(component_path)) # Find video streams video_streams = [ @@ -482,7 +492,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): except ValueError: self.log.warning(( "Could not convert ffprobe fps to float \"{}\"" - ).format(input_framerate)) + ).format(input_framerate)) continue stream_width = tmp_width @@ -554,3 +564,33 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "frameRate": float(fps) }) return metadata + + def _prepare_image_component_metadata(self, repre, component_path): + width = repre.get("width") + height = repre.get("height") + if not width or not height: + streams = [] + try: + streams = get_ffprobe_streams(component_path) + except Exception: + self.log.debug(( + "Failed to retrieve information about intput {}" + ).format(component_path)) + + for stream in streams: + if "width" in stream and "height" in stream: + width = stream["width"] + height = stream["height"] + break + + metadata = {} + if width and height: + metadata = { + "ftr_meta": json.dumps({ + "width": width, + "height": height, + "format": "image" + }) + } + + return metadata From 50477a4543bf611f2e991c06bb1cedc1206f8127 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Sep 2022 19:26:36 +0200 Subject: [PATCH 1312/2550] OP-3938 - do not create component for not integrated thumbnails --- .../publish/integrate_ftrack_instances.py | 34 +++++++++++++------ 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 78b9d56a1b..231a3a7816 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -11,6 +11,7 @@ from openpype.lib.transcoding import ( from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS + class IntegrateFtrackInstance(pyblish.api.InstancePlugin): """Collect ftrack component data (not integrate yet). @@ -130,7 +131,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): elif "ftrackreview" in repre_tags: review_representations.append(repre) - if repre["ext"] in VIDEO_EXTENSIONS: + if self._is_repre_video(repre): has_movie_review = True else: @@ -150,6 +151,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): first_thumbnail_component = None first_thumbnail_component_repre = None for repre in thumbnail_representations: + if review_representations and not has_movie_review: + break repre_path = self._get_repre_path(instance, repre, False) if not repre_path: self.log.warning( @@ -166,7 +169,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): thumbnail_item["thumbnail"] = True # Create copy of item before setting location - src_components_to_add.append(copy.deepcopy(thumbnail_item)) + if "delete" not in repre["tags"]: + src_components_to_add.append(copy.deepcopy(thumbnail_item)) # Create copy of first thumbnail if first_thumbnail_component is None: first_thumbnail_component_repre = repre @@ -187,9 +191,13 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): if metadata: component_data = first_thumbnail_component["component_data"] - component_data["name"] = "ftrackreview-image" component_data["metadata"] = metadata + if review_representations: + component_data["name"] = "thumbnail" + else: + component_data["name"] = "ftrackreview-image" + # Create review components # Change asset name of each new component for review is_first_review_repre = True @@ -197,7 +205,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): extended_asset_name = "" multiple_reviewable = len(review_representations) > 1 for repre in review_representations: - if repre["ext"] in IMAGE_EXTENSIONS and has_movie_review: + if not self._is_repre_video(repre) and has_movie_review: self.log.debug("Movie repre has priority " "from {}".format(repre)) continue @@ -251,20 +259,21 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): review_item["component_path"] = repre_path # Change component data - if repre["ext"] in VIDEO_EXTENSIONS: - review_type = "ftrackreview-mp4" + if self._is_repre_video(repre): + component_name = "ftrackreview-mp4" metadata = self._prepare_video_component_metadata( instance, repre, repre_path, True ) else: - review_type = "ftrackreview-image" + component_name = "ftrackreview-image" metadata = self._prepare_image_component_metadata( repre, repre_path ) + review_item["thumbnail"] = True review_item["component_data"] = { # Default component name is "main". - "name": review_type, + "name": component_name, "metadata": metadata } @@ -275,7 +284,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): not_first_components.append(review_item) # Create copy of item before setting location - src_components_to_add.append(copy.deepcopy(review_item)) + if "delete" not in repre["tags"]: + src_components_to_add.append(copy.deepcopy(review_item)) # Set location review_item["component_location_name"] = ( @@ -423,7 +433,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): def _prepare_component_metadata( self, instance, repre, component_path, is_review=None ): - if repre["ext"] in VIDEO_EXTENSIONS: + if self._is_repre_video(repre): return self._prepare_video_component_metadata(instance, repre, component_path, is_review) @@ -594,3 +604,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): } return metadata + + def _is_repre_video(self, repre): + repre_ext = ".{}".format(repre["ext"]) + return repre_ext in VIDEO_EXTENSIONS From 98c1e58d40d4b83639fe73ca2e2e3c93563ce3b0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 11:38:29 +0200 Subject: [PATCH 1313/2550] removed hero version filtering --- openpype/client/entity_links.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/client/entity_links.py b/openpype/client/entity_links.py index 66214f469c..dea3654f8e 100644 --- a/openpype/client/entity_links.py +++ b/openpype/client/entity_links.py @@ -132,7 +132,9 @@ def get_linked_representation_id( match = { "_id": version_id, - "type": {"$in": ["version", "hero_version"]} + # Links are not stored to hero versions at this moment so filter + # is limited to just versions + "type": "version" } graph_lookup = { From 1b0bc2eab648803a54e4a111933d837269f9293d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 11:38:35 +0200 Subject: [PATCH 1314/2550] safe data access --- openpype/client/entity_links.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/client/entity_links.py b/openpype/client/entity_links.py index dea3654f8e..ac92597e66 100644 --- a/openpype/client/entity_links.py +++ b/openpype/client/entity_links.py @@ -189,7 +189,7 @@ def _process_referenced_pipeline_result(result, link_type): referenced_version_ids = set() correctly_linked_ids = set() for item in result: - input_links = item["data"].get("inputLinks") + input_links = item.get("data", {}).get("inputLinks") if not input_links: continue @@ -205,7 +205,7 @@ def _process_referenced_pipeline_result(result, link_type): continue for output in sorted(outputs_recursive, key=lambda o: o["depth"]): - output_links = output["data"].get("inputLinks") + output_links = output.get("data", {}).get("inputLinks") if not output_links: continue From 54d5724d6ac1ad7478e4df61de10e33833c1dba9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 23 Sep 2022 11:59:06 +0200 Subject: [PATCH 1315/2550] Fix log formatting (global file format and aov file format were previously swapped) --- openpype/hosts/maya/plugins/publish/validate_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 7f0985f69b..bfb72c7012 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -201,7 +201,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): labels = get_redshift_image_format_labels() cls.log.error( "AOV file format {} does not match global file format " - "{}".format(labels[default_ext], labels[aov_ext]) + "{}".format(labels[aov_ext], labels[default_ext]) ) invalid = True From da7d4cb1d7d792ae4c2398d64bdd3e1d9036d81c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Sep 2022 12:57:17 +0200 Subject: [PATCH 1316/2550] OP-3682 - updates to match to v4 payload Parsing should match payload from localhost:5000/api/addons?details=1 --- .../distribution/addon_distribution.py | 29 ++++- .../tests/test_addon_distributtion.py | 104 +++++++++++++----- 2 files changed, 98 insertions(+), 35 deletions(-) diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py index ad17a831d8..fec8cb762b 100644 --- a/common/openpype_common/distribution/addon_distribution.py +++ b/common/openpype_common/distribution/addon_distribution.py @@ -44,12 +44,18 @@ class WebAddonSource(AddonSource): url = attr.ib(default=None) +@attr.s +class VersionData(object): + version_data = attr.ib(default=None) + + @attr.s class AddonInfo(object): """Object matching json payload from Server""" name = attr.ib() version = attr.ib() - sources = attr.ib(default=attr.Factory(list)) + title = attr.ib(default=None) + sources = attr.ib(default=attr.Factory(dict)) hash = attr.ib(default=None) description = attr.ib(default=None) license = attr.ib(default=None) @@ -58,7 +64,16 @@ class AddonInfo(object): @classmethod def from_dict(cls, data): sources = [] - for source in data.get("sources", []): + + production_version = data.get("productionVersion") + if not production_version: + return + + # server payload contains info about all versions + # active addon must have 'productionVersion' and matching version info + version_data = data.get("versions", {})[production_version] + + for source in version_data.get("clientSourceInfo", []): if source.get("type") == UrlType.FILESYSTEM.value: source_addon = LocalAddonSource(type=source["type"], path=source["path"]) @@ -69,10 +84,11 @@ class AddonInfo(object): sources.append(source_addon) return cls(name=data.get("name"), - version=data.get("version"), + version=production_version, + sources=sources, hash=data.get("hash"), description=data.get("description"), - sources=sources, + title=data.get("title"), license=data.get("license"), authors=data.get("authors")) @@ -228,8 +244,9 @@ def update_addon_state(addon_infos, destination_folder, factory, for source in addon.sources: download_states[full_name] = UpdateState.FAILED.value try: - downloader = factory.get_downloader(source["type"]) - zip_file_path = downloader.download(source, addon_dest) + downloader = factory.get_downloader(source.type) + zip_file_path = downloader.download(attr.asdict(source), + addon_dest) downloader.check_hash(zip_file_path, addon.hash) downloader.unzip(zip_file_path, addon_dest) download_states[full_name] = UpdateState.UPDATED.value diff --git a/common/openpype_common/distribution/tests/test_addon_distributtion.py b/common/openpype_common/distribution/tests/test_addon_distributtion.py index faf4e01e22..46bcd276cd 100644 --- a/common/openpype_common/distribution/tests/test_addon_distributtion.py +++ b/common/openpype_common/distribution/tests/test_addon_distributtion.py @@ -35,23 +35,50 @@ def temp_folder(): @pytest.fixture def sample_addon_info(): addon_info = { - "name": "openpype_slack", - "version": "1.0.0", - "sources": [ - { - "type": "http", - "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa - }, - { - "type": "filesystem", - "path": { - "windows": ["P:/sources/some_file.zip", "W:/sources/some_file.zip"], # noqa - "linux": ["/mnt/srv/sources/some_file.zip"], - "darwin": ["/Volumes/srv/sources/some_file.zip"] - } + "versions": { + "1.0.0": { + "clientPyproject": { + "tool": { + "poetry": { + "dependencies": { + "nxtools": "^1.6", + "orjson": "^3.6.7", + "typer": "^0.4.1", + "email-validator": "^1.1.3", + "python": "^3.10", + "fastapi": "^0.73.0" + } + } + } + }, + "hasSettings": True, + "clientSourceInfo": [ + { + "type": "http", + "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa + }, + { + "type": "filesystem", + "path": { + "windows": ["P:/sources/some_file.zip", + "W:/sources/some_file.zip"], # noqa + "linux": ["/mnt/srv/sources/some_file.zip"], + "darwin": ["/Volumes/srv/sources/some_file.zip"] + } + } + ], + "frontendScopes": { + "project": { + "sidebar": "hierarchy" + } + } } - ], - "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa + }, + "description": "", + "title": "Slack addon", + "name": "openpype_slack", + "productionVersion": "1.0.0", + "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa } yield addon_info @@ -73,16 +100,39 @@ def test_get_downloader(printer, addon_downloader): def test_addon_info(printer, sample_addon_info): - valid_minimum = {"name": "openpype_slack", "version": "1.0.0"} + """Tests parsing of expected payload from v4 server into AadonInfo.""" + valid_minimum = { + "name": "openpype_slack", + "productionVersion": "1.0.0", + "versions": { + "1.0.0": { + "clientSourceInfo": [ + { + "type": "filesystem", + "path": { + "windows": [ + "P:/sources/some_file.zip", + "W:/sources/some_file.zip"], + "linux": [ + "/mnt/srv/sources/some_file.zip"], + "darwin": [ + "/Volumes/srv/sources/some_file.zip"] # noqa + } + } + ] + } + } + } assert AddonInfo.from_dict(valid_minimum), "Missing required fields" - assert AddonInfo(name=valid_minimum["name"], - version=valid_minimum["version"]), \ - "Missing required fields" - with pytest.raises(TypeError): - # TODO should be probably implemented - assert AddonInfo(valid_minimum), "Wrong argument format" + valid_minimum["versions"].pop("1.0.0") + with pytest.raises(KeyError): + assert not AddonInfo.from_dict(valid_minimum), "Must fail without version data" # noqa + + valid_minimum.pop("productionVersion") + assert not AddonInfo.from_dict( + valid_minimum), "none if not productionVersion" # noqa addon = AddonInfo.from_dict(sample_addon_info) assert addon, "Should be created" @@ -95,15 +145,11 @@ def test_addon_info(printer, sample_addon_info): addon_as_dict = attr.asdict(addon) assert addon_as_dict["name"], "Dict approach should work" - with pytest.raises(TypeError): - # TODO should be probably implemented as . not dict - first_source = addon.sources[0] - assert first_source["type"] == "http", "Not implemented" - def test_update_addon_state(printer, sample_addon_info, temp_folder, addon_downloader): - addon_info = AddonInfo(**sample_addon_info) + """Tests possible cases of addon update.""" + addon_info = AddonInfo.from_dict(sample_addon_info) orig_hash = addon_info.hash addon_info.hash = "brokenhash" From c659dcfce6393972aa0443f03f67950b1e9fdc45 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Sep 2022 13:05:50 +0200 Subject: [PATCH 1317/2550] OP-3682 - extracted AddonInfo to separate file Parsing of v4 payload info will be required in Dependencies tool also. --- .../distribution/addon_distribution.py | 78 +----------------- .../distribution/addon_info.py | 80 +++++++++++++++++++ .../tests/test_addon_distributtion.py | 2 +- 3 files changed, 82 insertions(+), 78 deletions(-) create mode 100644 common/openpype_common/distribution/addon_info.py diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py index fec8cb762b..5e48639dec 100644 --- a/common/openpype_common/distribution/addon_distribution.py +++ b/common/openpype_common/distribution/addon_distribution.py @@ -8,12 +8,7 @@ import platform import shutil from .file_handler import RemoteFileHandler - - -class UrlType(Enum): - HTTP = "http" - GIT = "git" - FILESYSTEM = "filesystem" +from .addon_info import AddonInfo class UpdateState(Enum): @@ -22,77 +17,6 @@ class UpdateState(Enum): FAILED = "failed" -@attr.s -class MultiPlatformPath(object): - windows = attr.ib(default=None) - linux = attr.ib(default=None) - darwin = attr.ib(default=None) - - -@attr.s -class AddonSource(object): - type = attr.ib() - - -@attr.s -class LocalAddonSource(AddonSource): - path = attr.ib(default=attr.Factory(MultiPlatformPath)) - - -@attr.s -class WebAddonSource(AddonSource): - url = attr.ib(default=None) - - -@attr.s -class VersionData(object): - version_data = attr.ib(default=None) - - -@attr.s -class AddonInfo(object): - """Object matching json payload from Server""" - name = attr.ib() - version = attr.ib() - title = attr.ib(default=None) - sources = attr.ib(default=attr.Factory(dict)) - hash = attr.ib(default=None) - description = attr.ib(default=None) - license = attr.ib(default=None) - authors = attr.ib(default=None) - - @classmethod - def from_dict(cls, data): - sources = [] - - production_version = data.get("productionVersion") - if not production_version: - return - - # server payload contains info about all versions - # active addon must have 'productionVersion' and matching version info - version_data = data.get("versions", {})[production_version] - - for source in version_data.get("clientSourceInfo", []): - if source.get("type") == UrlType.FILESYSTEM.value: - source_addon = LocalAddonSource(type=source["type"], - path=source["path"]) - if source.get("type") == UrlType.HTTP.value: - source_addon = WebAddonSource(type=source["type"], - url=source["url"]) - - sources.append(source_addon) - - return cls(name=data.get("name"), - version=production_version, - sources=sources, - hash=data.get("hash"), - description=data.get("description"), - title=data.get("title"), - license=data.get("license"), - authors=data.get("authors")) - - class AddonDownloader: log = logging.getLogger(__name__) diff --git a/common/openpype_common/distribution/addon_info.py b/common/openpype_common/distribution/addon_info.py new file mode 100644 index 0000000000..00ece11f3b --- /dev/null +++ b/common/openpype_common/distribution/addon_info.py @@ -0,0 +1,80 @@ +import attr +from enum import Enum + + +class UrlType(Enum): + HTTP = "http" + GIT = "git" + FILESYSTEM = "filesystem" + + +@attr.s +class MultiPlatformPath(object): + windows = attr.ib(default=None) + linux = attr.ib(default=None) + darwin = attr.ib(default=None) + + +@attr.s +class AddonSource(object): + type = attr.ib() + + +@attr.s +class LocalAddonSource(AddonSource): + path = attr.ib(default=attr.Factory(MultiPlatformPath)) + + +@attr.s +class WebAddonSource(AddonSource): + url = attr.ib(default=None) + + +@attr.s +class VersionData(object): + version_data = attr.ib(default=None) + + +@attr.s +class AddonInfo(object): + """Object matching json payload from Server""" + name = attr.ib() + version = attr.ib() + title = attr.ib(default=None) + sources = attr.ib(default=attr.Factory(dict)) + hash = attr.ib(default=None) + description = attr.ib(default=None) + license = attr.ib(default=None) + authors = attr.ib(default=None) + + @classmethod + def from_dict(cls, data): + sources = [] + + production_version = data.get("productionVersion") + if not production_version: + return + + # server payload contains info about all versions + # active addon must have 'productionVersion' and matching version info + version_data = data.get("versions", {})[production_version] + + for source in version_data.get("clientSourceInfo", []): + if source.get("type") == UrlType.FILESYSTEM.value: + source_addon = LocalAddonSource(type=source["type"], + path=source["path"]) + if source.get("type") == UrlType.HTTP.value: + source_addon = WebAddonSource(type=source["type"], + url=source["url"]) + + sources.append(source_addon) + + return cls(name=data.get("name"), + version=production_version, + sources=sources, + hash=data.get("hash"), + description=data.get("description"), + title=data.get("title"), + license=data.get("license"), + authors=data.get("authors")) + diff --git a/common/openpype_common/distribution/tests/test_addon_distributtion.py b/common/openpype_common/distribution/tests/test_addon_distributtion.py index 46bcd276cd..765ea0596a 100644 --- a/common/openpype_common/distribution/tests/test_addon_distributtion.py +++ b/common/openpype_common/distribution/tests/test_addon_distributtion.py @@ -4,13 +4,13 @@ import tempfile from common.openpype_common.distribution.addon_distribution import ( AddonDownloader, - UrlType, OSAddonDownloader, HTTPAddonDownloader, AddonInfo, update_addon_state, UpdateState ) +from common.openpype_common.distribution.addon_info import UrlType @pytest.fixture From a25b951607239984719acb550506571025682069 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Sep 2022 13:12:04 +0200 Subject: [PATCH 1318/2550] making the code more appealing --- .../publish/extract_subset_resources.py | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 1b7e9b88b5..5482af973c 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -79,10 +79,10 @@ class ExtractSubsetResources(openpype.api.Extractor): retimed_data = self._get_retimed_attributes(instance) # get individual keys - r_handle_start = retimed_data["handle_start"] - r_handle_end = retimed_data["handle_end"] - r_source_dur = retimed_data["source_duration"] - r_speed = retimed_data["speed"] + retimed_handle_start = retimed_data["handle_start"] + retimed_handle_end = retimed_data["handle_end"] + retimed_source_duration = retimed_data["source_duration"] + retimed_speed = retimed_data["speed"] # get handles value - take only the max from both handle_start = instance.data["handleStart"] @@ -96,23 +96,23 @@ class ExtractSubsetResources(openpype.api.Extractor): source_end_handles = instance.data["sourceEndH"] # retime if needed - if r_speed != 1.0: + if retimed_speed != 1.0: if retimed_handles: # handles are retimed source_start_handles = ( - instance.data["sourceStart"] - r_handle_start) + instance.data["sourceStart"] - retimed_handle_start) source_end_handles = ( source_start_handles - + (r_source_dur - 1) - + r_handle_start - + r_handle_end + + (retimed_source_duration - 1) + + retimed_handle_start + + retimed_handle_end ) else: # handles are not retimed source_end_handles = ( source_start_handles - + (r_source_dur - 1) + + (retimed_source_duration - 1) + handle_start + handle_end ) @@ -121,11 +121,11 @@ class ExtractSubsetResources(openpype.api.Extractor): frame_start_handle = frame_start - handle_start repre_frame_start = frame_start_handle if include_handles: - if r_speed == 1.0 or not retimed_handles: + if retimed_speed == 1.0 or not retimed_handles: frame_start_handle = frame_start else: frame_start_handle = ( - frame_start - handle_start) + r_handle_start + frame_start - handle_start) + retimed_handle_start self.log.debug("_ frame_start_handle: {}".format( frame_start_handle)) @@ -163,29 +163,29 @@ class ExtractSubsetResources(openpype.api.Extractor): instance.data["versionData"].update(version_data) # version data start frame - vd_frame_start = frame_start + version_frame_start = frame_start if include_handles: - vd_frame_start = frame_start_handle - - if r_speed != 1.0: - instance.data["versionData"].update({ - "frameStart": vd_frame_start, - "frameEnd": ( - (vd_frame_start + source_duration_handles - 1) - - (r_handle_start + r_handle_end) - ) - }) - if not retimed_handles: + version_frame_start = frame_start_handle + if retimed_speed != 1.0: + if retimed_handles: + instance.data["versionData"].update({ + "frameStart": version_frame_start, + "frameEnd": ( + (version_frame_start + source_duration_handles - 1) + - (retimed_handle_start + retimed_handle_end) + ) + }) + else: instance.data["versionData"].update({ "handleStart": handle_start, "handleEnd": handle_end, - "frameStart": vd_frame_start, + "frameStart": version_frame_start, "frameEnd": ( - (vd_frame_start + source_duration_handles - 1) + (version_frame_start + source_duration_handles - 1) - (handle_start + handle_end) ) }) - self.log.debug("_ i_version_data: {}".format( + self.log.debug("_ version_data: {}".format( instance.data["versionData"] )) From 68ef0e35066c6a2eaff185665d6fd30749a164b5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Sep 2022 13:23:18 +0200 Subject: [PATCH 1319/2550] making code more appealing --- openpype/hosts/flame/api/plugin.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 6ad33f16a3..1a26e96c79 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -801,7 +801,7 @@ class OpenClipSolver(flib.MediaInfoFile): self.log.debug(">> self.clip_data: {}".format(self.clip_data)) # loop tmp tracks - updated_any = [] + updated_any = False for tmp_xml_track in self.clip_data.iter("track"): # get tmp track uid tmp_track_uid = tmp_xml_track.get("uid") @@ -831,25 +831,25 @@ class OpenClipSolver(flib.MediaInfoFile): self._clear_handler(tmp_xml_feed) # update fps from MediaInfoFile class - if self.fps: + if self.fps is not None: tmp_feed_fps_obj = tmp_xml_feed.find( "startTimecode/rate") tmp_feed_fps_obj.text = str(self.fps) # update start_frame from MediaInfoFile class - if self.start_frame: + if self.start_frame is not None: tmp_feed_nb_ticks_obj = tmp_xml_feed.find( "startTimecode/nbTicks") tmp_feed_nb_ticks_obj.text = str(self.start_frame) # update drop_mode from MediaInfoFile class - if self.drop_mode: + if self.drop_mode is not None: tmp_feed_drop_mode_obj = tmp_xml_feed.find( "startTimecode/dropMode") tmp_feed_drop_mode_obj.text = str(self.drop_mode) # add colorspace if any is set - if self.feed_colorspace: + if self.feed_colorspace is not None: self._add_colorspace(tmp_xml_feed, self.feed_colorspace) # then append/update feed to correct track in output @@ -872,9 +872,9 @@ class OpenClipSolver(flib.MediaInfoFile): out_tracks = out_xml.find("tracks") out_tracks.append(tmp_xml_track) - updated_any.append(True) + updated_any = True - if any(updated_any): + if updated_any: # Append vUID to versions out_xml_versions_obj = out_xml.find('versions') out_xml_versions_obj.set( From 00995475d9c9471dda3923287a8295f64687d4ce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 13:23:32 +0200 Subject: [PATCH 1320/2550] added function to get default values of attribute definitions --- openpype/lib/attribute_definitions.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index cbd53d1f07..37446f01f8 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -30,6 +30,28 @@ def get_attributes_keys(attribute_definitions): return keys +def get_default_values(attribute_definitions): + """Receive default values for attribute definitions. + + Args: + attribute_definitions (List[AbtractAttrDef]): Attribute definitions for + which default values should be collected. + + Returns: + Dict[str, Any]: Default values for passet attribute definitions. + """ + + output = {} + if not attribute_definitions: + return output + + for attr_def in attribute_definitions: + # Skip UI definitions + if not isinstance(attr_def, UIDef): + output[attr_def.key] = attr_def.default + return output + + class AbstractAttrDefMeta(ABCMeta): """Meta class to validate existence of 'key' attribute. From 2db3aa131cc0920ff6050c8bca55997cd3c50f21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 13:24:17 +0200 Subject: [PATCH 1321/2550] fix typo --- openpype/hosts/nuke/api/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 709ee3b743..7317f6726b 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -55,7 +55,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder): class NukePlaceholderPlugin(PlaceholderPlugin): - noce_color = 4278190335 + node_color = 4278190335 def _collect_scene_placeholders(self): # Cache placeholder data to shared data From d6c7509150d695b7c7e5a60d8bc13889e71b01af Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 13:27:28 +0200 Subject: [PATCH 1322/2550] fix 'get_load_plugin_options' in maya --- openpype/hosts/maya/api/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 9163cf9a6f..be5dc3db61 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -212,7 +212,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): self.populate_load_placeholder(placeholder, repre_ids) def get_placeholder_options(self, options=None): - return self.get_load_plugin_options(self, options) + return self.get_load_plugin_options(options) def cleanup_placeholder(self, placeholder): """Hide placeholder, parent them to root From 654b5744de733ebebd045a0b2614a8d40e7c41bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 14:06:39 +0200 Subject: [PATCH 1323/2550] fix arguments for 'cleanup_placeholder' --- openpype/hosts/maya/api/workfile_template_builder.py | 2 +- openpype/hosts/nuke/api/workfile_template_builder.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index be5dc3db61..ef043ed0f4 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -214,7 +214,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): def get_placeholder_options(self, options=None): return self.get_load_plugin_options(options) - def cleanup_placeholder(self, placeholder): + def cleanup_placeholder(self, placeholder, failed): """Hide placeholder, parent them to root add them to placeholder set and register placeholder's parent to keep placeholder info available for future use diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 7317f6726b..7a2e442e32 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -192,7 +192,7 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): def get_placeholder_options(self, options=None): return self.get_load_plugin_options(options) - def cleanup_placeholder(self, placeholder): + def cleanup_placeholder(self, placeholder, failed): # deselect all selected nodes placeholder_node = nuke.toNode(placeholder.scene_identifier) From 9c1ee5b79c90fd3db25857661a39a21e97b9d5ad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 17:11:40 +0200 Subject: [PATCH 1324/2550] renamed 'lifetime_data' to 'transient_data' --- openpype/pipeline/create/context.py | 6 +++--- openpype/plugins/publish/collect_from_create_context.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index a1b11d08c5..a7e43cb2f2 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -405,7 +405,7 @@ class CreatedInstance: self._members = [] # Data that can be used for lifetime of object - self._lifetime_data = {} + self._transient_data = {} # Create a copy of passed data to avoid changing them on the fly data = copy.deepcopy(data or {}) @@ -600,7 +600,7 @@ class CreatedInstance: return self @property - def lifetime_data(self): + def transient_data(self): """Data stored for lifetime of instance object. These data are not stored to scene and will be lost on object @@ -617,7 +617,7 @@ class CreatedInstance: to instance for lifetime of instance object. """ - return self._lifetime_data + return self._transient_data def changes(self): """Calculate and return changes.""" diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index b5e3225c34..fc0f97b187 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -26,7 +26,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): instance_data = created_instance.data_to_store() if instance_data["active"]: self.create_instance( - context, instance_data, created_instance.lifetime_data + context, instance_data, created_instance.transient_data ) # Update global data to context @@ -39,7 +39,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): legacy_io.Session[key] = value os.environ[key] = value - def create_instance(self, context, in_data, lifetime_data): + def create_instance(self, context, in_data, transient_data): subset = in_data["subset"] # If instance data already contain families then use it instance_families = in_data.get("families") or [] @@ -59,7 +59,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): if key not in instance.data: instance.data[key] = value - instance.data["lifetimeData"] = lifetime_data + instance.data["transientData"] = transient_data self.log.info("collected instance: {}".format(instance.data)) self.log.info("parsing data: {}".format(in_data)) From e97b6ce01f511b1cf240cb8640b871de3d79dc4e Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 23 Sep 2022 16:18:08 +0100 Subject: [PATCH 1325/2550] Fixed path resolving not finding the workfile in certain conditions In case the workfile only contains the project name, the workfile is not found because while the regex matches, the match doesn't have any group, and so it throws an exception. --- openpype/pipeline/workfile/path_resolving.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py index ed1d1d793e..4e4d3ca1c0 100644 --- a/openpype/pipeline/workfile/path_resolving.py +++ b/openpype/pipeline/workfile/path_resolving.py @@ -265,6 +265,10 @@ def get_last_workfile_with_version( if not match: continue + if not match.groups(): + output_filenames.append(filename) + continue + file_version = int(match.group(1)) if version is None or file_version > version: output_filenames[:] = [] From 548a37e4e1baf7f8cd9c4af61d0172caeec0f3ba Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 23 Sep 2022 16:18:52 +0100 Subject: [PATCH 1326/2550] Added setting to remove unmatched assets --- openpype/settings/defaults/project_settings/unreal.json | 1 + .../schemas/projects_schema/schema_project_unreal.json | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/openpype/settings/defaults/project_settings/unreal.json b/openpype/settings/defaults/project_settings/unreal.json index c5f5cdf719..391e2415a5 100644 --- a/openpype/settings/defaults/project_settings/unreal.json +++ b/openpype/settings/defaults/project_settings/unreal.json @@ -1,5 +1,6 @@ { "level_sequences_for_layouts": false, + "delete_unmatched_assets": false, "project_setup": { "dev_mode": true } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json index d26b5c1ccf..09e5791ac4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json @@ -10,6 +10,11 @@ "key": "level_sequences_for_layouts", "label": "Generate level sequences when loading layouts" }, + { + "type": "boolean", + "key": "delete_unmatched_assets", + "label": "Delete assets that are not matched" + }, { "type": "dict", "collapsible": true, From b42fb6aedb10934f5836d89e1734e32669671350 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 23 Sep 2022 16:24:21 +0100 Subject: [PATCH 1327/2550] Fixed problem with transformations from FBX files --- .../plugins/load/load_layout_existing.py | 52 ++++++++----------- 1 file changed, 22 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout_existing.py b/openpype/hosts/unreal/plugins/load/load_layout_existing.py index 8cd1950f7e..9ab27d0cef 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout_existing.py +++ b/openpype/hosts/unreal/plugins/load/load_layout_existing.py @@ -80,30 +80,22 @@ class ExistingLayoutLoader(plugin.Loader): raise NotImplementedError( f"Unreal version {ue_major} not supported") - @staticmethod - def _transform_from_basis(transform, basis, conversion): - """Transform a transform from a basis to a new basis.""" - # Get the basis matrix - basis_matrix = unreal.Matrix( - basis[0], - basis[1], - basis[2], - basis[3] - ) - transform_matrix = unreal.Matrix( - transform[0], - transform[1], - transform[2], - transform[3] - ) - - new_transform = ( - basis_matrix.get_inverse() * transform_matrix * basis_matrix) - - return conversion.inverse() * new_transform.transform() - def _get_transform(self, ext, import_data, lasset): conversion = unreal.Matrix.IDENTITY.transform() + fbx_tuning = unreal.Matrix.IDENTITY.transform() + + basis = unreal.Matrix( + lasset.get('basis')[0], + lasset.get('basis')[1], + lasset.get('basis')[2], + lasset.get('basis')[3] + ).transform() + transform = unreal.Matrix( + lasset.get('transform_matrix')[0], + lasset.get('transform_matrix')[1], + lasset.get('transform_matrix')[2], + lasset.get('transform_matrix')[3] + ).transform() # Check for the conversion settings. We cannot access # the alembic conversion settings, so we assume that @@ -111,11 +103,15 @@ class ExistingLayoutLoader(plugin.Loader): if ext == '.fbx': loc = import_data.import_translation rot = import_data.import_rotation.to_vector() - scale = import_data.import_scale + scale = import_data.import_uniform_scale conversion = unreal.Transform( location=[loc.x, loc.y, loc.z], rotation=[rot.x, rot.y, rot.z], - scale=[scale, scale, scale] + scale=[-scale, scale, scale] + ) + fbx_tuning = unreal.Transform( + rotation=[180.0, 0.0, 90.0], + scale=[1.0, 1.0, 1.0] ) elif ext == '.abc': # This is the standard conversion settings for @@ -126,12 +122,8 @@ class ExistingLayoutLoader(plugin.Loader): scale=[1.0, -1.0, 1.0] ) - transform = self._transform_from_basis( - lasset.get('transform_matrix'), - lasset.get('basis'), - conversion - ) - return transform + new_transform = (basis.inverse() * transform * basis) + return fbx_tuning * conversion.inverse() * new_transform def _spawn_actor(self, obj, lasset): actor = EditorLevelLibrary.spawn_actor_from_object( From a3eb15387108938f8536ed4b336e66c893e595aa Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 23 Sep 2022 16:34:05 +0100 Subject: [PATCH 1328/2550] Checks settings to determine if deleting or not unmatched assets --- .../plugins/load/load_layout_existing.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout_existing.py b/openpype/hosts/unreal/plugins/load/load_layout_existing.py index 9ab27d0cef..3ce99f8ef6 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout_existing.py +++ b/openpype/hosts/unreal/plugins/load/load_layout_existing.py @@ -15,6 +15,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, legacy_io, ) +from openpype.api import get_current_project_settings from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as upipeline @@ -147,7 +148,7 @@ class ExistingLayoutLoader(plugin.Loader): name = "" if family == 'rig': name = "SkeletalMeshFBXLoader" - elif family == 'model': + elif family == 'model' or family == 'staticMesh': name = "StaticMeshFBXLoader" elif family == 'camera': name = "CameraLoader" @@ -200,7 +201,8 @@ class ExistingLayoutLoader(plugin.Loader): loader = self._get_abc_loader(loaders, family) if not loader: - raise AssertionError(f"No valid loader found for {representation}") + self.log.error(f"No valid loader found for {representation}") + return [] # This option is necessary to avoid importing the assets with a # different conversion compared to the other assets. For ABC files, @@ -220,6 +222,9 @@ class ExistingLayoutLoader(plugin.Loader): return assets def _process(self, lib_path): + data = get_current_project_settings() + delete_unmatched = data["unreal"]["delete_unmatched_assets"] + ar = unreal.AssetRegistryHelpers.get_asset_registry() actors = EditorLevelLibrary.get_all_level_actors() @@ -264,16 +269,18 @@ class ExistingLayoutLoader(plugin.Loader): # Get the original path of the file from which the asset has # been imported. - actor.set_actor_label(lasset.get('instance_name')) smc = actor.get_editor_property('static_mesh_component') mesh = smc.get_editor_property('static_mesh') import_data = mesh.get_editor_property('asset_import_data') filename = import_data.get_first_filename() path = Path(filename) - if path.name not in repr_data.get('data').get('path'): + if (not path.name or + path.name not in repr_data.get('data').get('path')): continue + actor.set_actor_label(lasset.get('instance_name')) + mesh_path = Path(mesh.get_path_name()).parent.as_posix() # Create the container for the asset. @@ -352,7 +359,9 @@ class ExistingLayoutLoader(plugin.Loader): if not actor.get_class().get_name() == 'StaticMeshActor': continue if actor not in actors_matched: - EditorLevelLibrary.destroy_actor(actor) + self.log.warning(f"Actor {actor.get_name()} not matched.") + if delete_unmatched: + EditorLevelLibrary.destroy_actor(actor) return containers From 38387fc8997e579c1d4c8281f3810abc48059ce8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 18:46:12 +0200 Subject: [PATCH 1329/2550] removed 'abstract_collect_render' --- openpype/lib/abstract_collect_render.py | 33 ------------------------- 1 file changed, 33 deletions(-) delete mode 100644 openpype/lib/abstract_collect_render.py diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py deleted file mode 100644 index e4ff87aa0f..0000000000 --- a/openpype/lib/abstract_collect_render.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -"""Content was moved to 'openpype.pipeline.publish.abstract_collect_render'. - -Please change your imports as soon as possible. - -File will be probably removed in OpenPype 3.14.* -""" - -import warnings -from openpype.pipeline.publish import AbstractCollectRender, RenderInstance - - -class CollectRenderDeprecated(DeprecationWarning): - pass - - -warnings.simplefilter("always", CollectRenderDeprecated) -warnings.warn( - ( - "Content of 'abstract_collect_render' was moved." - "\nUsing deprecated source of 'abstract_collect_render'. Content was" - " move to 'openpype.pipeline.publish.abstract_collect_render'." - " Please change your imports as soon as possible." - ), - category=CollectRenderDeprecated, - stacklevel=4 -) - - -__all__ = ( - "AbstractCollectRender", - "RenderInstance" -) From 97f368c3a5377e0972ba56bba99b90dc092be2bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 18:46:23 +0200 Subject: [PATCH 1330/2550] removed 'abstract_expected_files' --- openpype/lib/abstract_expected_files.py | 32 ------------------------- 1 file changed, 32 deletions(-) delete mode 100644 openpype/lib/abstract_expected_files.py diff --git a/openpype/lib/abstract_expected_files.py b/openpype/lib/abstract_expected_files.py deleted file mode 100644 index f24d844fe5..0000000000 --- a/openpype/lib/abstract_expected_files.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -"""Content was moved to 'openpype.pipeline.publish.abstract_expected_files'. - -Please change your imports as soon as possible. - -File will be probably removed in OpenPype 3.14.* -""" - -import warnings -from openpype.pipeline.publish import ExpectedFiles - - -class ExpectedFilesDeprecated(DeprecationWarning): - pass - - -warnings.simplefilter("always", ExpectedFilesDeprecated) -warnings.warn( - ( - "Content of 'abstract_expected_files' was moved." - "\nUsing deprecated source of 'abstract_expected_files'. Content was" - " move to 'openpype.pipeline.publish.abstract_expected_files'." - " Please change your imports as soon as possible." - ), - category=ExpectedFilesDeprecated, - stacklevel=4 -) - - -__all__ = ( - "ExpectedFiles", -) From e21334463398df7eb2e199dc61c4f5eef85325d0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 18:46:33 +0200 Subject: [PATCH 1331/2550] removed 'abstract_metaplugins' --- openpype/lib/abstract_metaplugins.py | 35 ---------------------------- 1 file changed, 35 deletions(-) delete mode 100644 openpype/lib/abstract_metaplugins.py diff --git a/openpype/lib/abstract_metaplugins.py b/openpype/lib/abstract_metaplugins.py deleted file mode 100644 index 346b5d86b3..0000000000 --- a/openpype/lib/abstract_metaplugins.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Content was moved to 'openpype.pipeline.publish.publish_plugins'. - -Please change your imports as soon as possible. - -File will be probably removed in OpenPype 3.14.* -""" - -import warnings -from openpype.pipeline.publish import ( - AbstractMetaInstancePlugin, - AbstractMetaContextPlugin -) - - -class MetaPluginsDeprecated(DeprecationWarning): - pass - - -warnings.simplefilter("always", MetaPluginsDeprecated) -warnings.warn( - ( - "Content of 'abstract_metaplugins' was moved." - "\nUsing deprecated source of 'abstract_metaplugins'. Content was" - " moved to 'openpype.pipeline.publish.publish_plugins'." - " Please change your imports as soon as possible." - ), - category=MetaPluginsDeprecated, - stacklevel=4 -) - - -__all__ = ( - "AbstractMetaInstancePlugin", - "AbstractMetaContextPlugin", -) From 5f32cdc7a53e7ec3aa5dcf4df1fe0ddfc1d8ab43 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 18:49:34 +0200 Subject: [PATCH 1332/2550] removed config --- openpype/api.py | 2 -- openpype/lib/config.py | 41 ----------------------------------------- 2 files changed, 43 deletions(-) delete mode 100644 openpype/lib/config.py diff --git a/openpype/api.py b/openpype/api.py index 0466eb7f78..b60cd21d2b 100644 --- a/openpype/api.py +++ b/openpype/api.py @@ -11,7 +11,6 @@ from .lib import ( PypeLogger, Logger, Anatomy, - config, execute, run_subprocess, version_up, @@ -72,7 +71,6 @@ __all__ = [ "PypeLogger", "Logger", "Anatomy", - "config", "execute", "get_default_components", "ApplicationManager", diff --git a/openpype/lib/config.py b/openpype/lib/config.py deleted file mode 100644 index 26822649e4..0000000000 --- a/openpype/lib/config.py +++ /dev/null @@ -1,41 +0,0 @@ -import warnings -import functools - - -class ConfigDeprecatedWarning(DeprecationWarning): - pass - - -def deprecated(func): - """Mark functions as deprecated. - - It will result in a warning being emitted when the function is used. - """ - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.simplefilter("always", ConfigDeprecatedWarning) - warnings.warn( - ( - "Deprecated import of function '{}'." - " Class was moved to 'openpype.lib.dateutils.{}'." - " Please change your imports." - ).format(func.__name__), - category=ConfigDeprecatedWarning - ) - return func(*args, **kwargs) - return new_func - - -@deprecated -def get_datetime_data(datetime_obj=None): - from .dateutils import get_datetime_data - - return get_datetime_data(datetime_obj) - - -@deprecated -def get_formatted_current_time(): - from .dateutils import get_formatted_current_time - - return get_formatted_current_time() From c06ce65885f819475aa36b625f0f0d05f3de0b21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 23 Sep 2022 18:54:23 +0200 Subject: [PATCH 1333/2550] removed 'editorial' --- openpype/lib/__init__.py | 23 --------- openpype/lib/editorial.py | 102 -------------------------------------- 2 files changed, 125 deletions(-) delete mode 100644 openpype/lib/editorial.py diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 17aafc3e8b..a64b7c2911 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -203,19 +203,6 @@ from .path_tools import ( get_project_basic_paths, ) -from .editorial import ( - is_overlapping_otio_ranges, - otio_range_to_frame_range, - otio_range_with_handles, - get_media_range_with_retimes, - convert_to_padded_path, - trim_media_range, - range_from_frames, - frames_to_secons, - frames_to_timecode, - make_sequence_collection -) - from .openpype_version import ( op_version_control_available, get_openpype_version, @@ -383,16 +370,6 @@ __all__ = [ "validate_mongo_connection", "OpenPypeMongoConnection", - "is_overlapping_otio_ranges", - "otio_range_with_handles", - "convert_to_padded_path", - "otio_range_to_frame_range", - "get_media_range_with_retimes", - "trim_media_range", - "range_from_frames", - "frames_to_secons", - "frames_to_timecode", - "make_sequence_collection", "create_project_folders", "create_workdir_extra_folders", "get_project_basic_paths", diff --git a/openpype/lib/editorial.py b/openpype/lib/editorial.py deleted file mode 100644 index 49220b4f15..0000000000 --- a/openpype/lib/editorial.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Code related to editorial utility functions was moved -to 'openpype.pipeline.editorial' please change your imports as soon as -possible. File will be probably removed in OpenPype 3.14.* -""" - -import warnings -import functools - - -class EditorialDeprecatedWarning(DeprecationWarning): - pass - - -def editorial_deprecated(func): - """Mark functions as deprecated. - - It will result in a warning being emitted when the function is used. - """ - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.simplefilter("always", EditorialDeprecatedWarning) - warnings.warn( - ( - "Call to deprecated function '{}'." - " Function was moved to 'openpype.pipeline.editorial'." - ).format(func.__name__), - category=EditorialDeprecatedWarning, - stacklevel=2 - ) - return func(*args, **kwargs) - return new_func - - -@editorial_deprecated -def otio_range_to_frame_range(*args, **kwargs): - from openpype.pipeline.editorial import otio_range_to_frame_range - - return otio_range_to_frame_range(*args, **kwargs) - - -@editorial_deprecated -def otio_range_with_handles(*args, **kwargs): - from openpype.pipeline.editorial import otio_range_with_handles - - return otio_range_with_handles(*args, **kwargs) - - -@editorial_deprecated -def is_overlapping_otio_ranges(*args, **kwargs): - from openpype.pipeline.editorial import is_overlapping_otio_ranges - - return is_overlapping_otio_ranges(*args, **kwargs) - - -@editorial_deprecated -def convert_to_padded_path(*args, **kwargs): - from openpype.pipeline.editorial import convert_to_padded_path - - return convert_to_padded_path(*args, **kwargs) - - -@editorial_deprecated -def trim_media_range(*args, **kwargs): - from openpype.pipeline.editorial import trim_media_range - - return trim_media_range(*args, **kwargs) - - -@editorial_deprecated -def range_from_frames(*args, **kwargs): - from openpype.pipeline.editorial import range_from_frames - - return range_from_frames(*args, **kwargs) - - -@editorial_deprecated -def frames_to_secons(*args, **kwargs): - from openpype.pipeline.editorial import frames_to_seconds - - return frames_to_seconds(*args, **kwargs) - - -@editorial_deprecated -def frames_to_timecode(*args, **kwargs): - from openpype.pipeline.editorial import frames_to_timecode - - return frames_to_timecode(*args, **kwargs) - - -@editorial_deprecated -def make_sequence_collection(*args, **kwargs): - from openpype.pipeline.editorial import make_sequence_collection - - return make_sequence_collection(*args, **kwargs) - - -@editorial_deprecated -def get_media_range_with_retimes(*args, **kwargs): - from openpype.pipeline.editorial import get_media_range_with_retimes - - return get_media_range_with_retimes(*args, **kwargs) From b6d035ad6958562e2f8c521614f51d513f0e9406 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Sep 2022 19:48:59 +0200 Subject: [PATCH 1334/2550] OP-3938 - added ftrackreview tag to jpeg options When jpg is created instead of .mov, it must have same tags to get to Ftrack --- openpype/settings/defaults/project_settings/photoshop.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index 552c2c9cad..be3f30bf48 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -34,7 +34,10 @@ "make_image_sequence": false, "max_downscale_size": 8192, "jpg_options": { - "tags": [] + "tags": [ + "review", + "ftrackreview" + ] }, "mov_options": { "tags": [ From 233a0c00403290b5e96392fb549815336cb12e41 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Sep 2022 19:55:12 +0200 Subject: [PATCH 1335/2550] OP-3938 - Hound --- .../plugins/publish/extract_review.py | 5 ++--- .../publish/integrate_ftrack_instances.py | 20 +++++++++---------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index d84e709c06..01022ce0b2 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -120,8 +120,7 @@ class ExtractReview(publish.Extractor): mov_path ] self.log.debug("mov args:: {}".format(args)) - output = run_subprocess(args) - self.log.debug(output) + _output = run_subprocess(args) instance.data["representations"].append({ "name": "mov", "ext": "mov", @@ -158,7 +157,7 @@ class ExtractReview(publish.Extractor): thumbnail_path ] self.log.debug("thumbnail args:: {}".format(args)) - output = run_subprocess(args) + _output = run_subprocess(args) instance.data["representations"].append({ "name": "thumbnail", "ext": "jpg", diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 231a3a7816..7cc3d7389b 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -9,7 +9,7 @@ from openpype.lib.transcoding import ( convert_ffprobe_fps_to_float, ) from openpype.lib.profiles_filtering import filter_profiles -from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS +from openpype.lib.transcoding import VIDEO_EXTENSIONS class IntegrateFtrackInstance(pyblish.api.InstancePlugin): @@ -454,9 +454,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): try: streams = get_ffprobe_streams(component_path) except Exception: - self.log.debug(( - "Failed to retrieve information about input {}" - ).format(component_path)) + self.log.debug( + "Failed to retrieve information about " + "input {}".format(component_path)) # Find video streams video_streams = [ @@ -500,9 +500,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): input_framerate ) except ValueError: - self.log.warning(( - "Could not convert ffprobe fps to float \"{}\"" - ).format(input_framerate)) + self.log.warning( + "Could not convert ffprobe " + "fps to float \"{}\"".format(input_framerate)) continue stream_width = tmp_width @@ -583,9 +583,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): try: streams = get_ffprobe_streams(component_path) except Exception: - self.log.debug(( - "Failed to retrieve information about intput {}" - ).format(component_path)) + self.log.debug( + "Failed to retrieve information " + "about input {}".format(component_path)) for stream in streams: if "width" in stream and "height" in stream: From 25ac0dd9a44d569e23e15bd17e700f1c7dc9c560 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 23 Sep 2022 21:00:46 +0200 Subject: [PATCH 1336/2550] for hero versions use standard version for representation links --- openpype/client/entity_links.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/client/entity_links.py b/openpype/client/entity_links.py index ac92597e66..e42ac58aff 100644 --- a/openpype/client/entity_links.py +++ b/openpype/client/entity_links.py @@ -2,6 +2,7 @@ from .mongo import get_project_connection from .entities import ( get_assets, get_asset_by_id, + get_version_by_id, get_representation_by_id, convert_id, ) @@ -127,6 +128,12 @@ def get_linked_representation_id( if not version_id: return [] + version_doc = get_version_by_id( + project_name, version_id, fields=["type", "version_id"] + ) + if version_doc["type"] == "hero_version": + version_id = version_doc["version_id"] + if max_depth is None: max_depth = 0 From ae5ec70d0538b4c659cdfa1eff6f0a89e62bf887 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 23 Sep 2022 21:37:47 +0200 Subject: [PATCH 1337/2550] Move `imageio` settings from project anatomy to project settings - Note: There is no backwards compatibility implemented --- openpype/hosts/flame/hooks/pre_flame_setup.py | 13 +- .../fusion/hooks/pre_fusion_ocio_hook.py | 7 +- openpype/hosts/hiero/api/lib.py | 8 +- openpype/hosts/maya/api/lib.py | 4 +- openpype/hosts/nuke/api/lib.py | 2 +- openpype/hosts/nuke/plugins/load/load_clip.py | 2 +- .../defaults/project_anatomy/imageio.json | 258 --------- .../defaults/project_settings/flame.json | 19 + .../defaults/project_settings/hiero.json | 25 + .../defaults/project_settings/maya.json | 22 + .../defaults/project_settings/nuke.json | 190 +++++++ .../schemas/projects_schema/schema_main.json | 4 - .../projects_schema/schema_project_flame.json | 63 +++ .../projects_schema/schema_project_hiero.json | 110 ++++ .../projects_schema/schema_project_maya.json | 70 +++ .../projects_schema/schema_project_nuke.json | 248 +++++++++ .../schemas/schema_anatomy_imageio.json | 493 ------------------ openpype/settings/lib.py | 17 - 18 files changed, 763 insertions(+), 792 deletions(-) delete mode 100644 openpype/settings/defaults/project_anatomy/imageio.json delete mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index 0173eb8e3b..8f2edf59a6 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -42,17 +42,16 @@ class FlamePrelaunch(PreLaunchHook): volume_name = _env.get("FLAME_WIRETAP_VOLUME") # get image io - project_anatomy = self.data["anatomy"] + project_settings = self.data["project_settings"] # make sure anatomy settings are having flame key - if not project_anatomy["imageio"].get("flame"): - raise ApplicationLaunchFailed(( - "Anatomy project settings are missing `flame` key. " - "Please make sure you remove project overides on " - "Anatomy Image io") + if not project_settings["flame"].get("imageio"): + raise ApplicationLaunchFailed( + "Project settings are missing `flame/imageio` key. " + "Please make sure to update project settings." ) - imageio_flame = project_anatomy["imageio"]["flame"] + imageio_flame = project_settings["flame"]["imageio"] # get user name and host name user_name = get_openpype_username() diff --git a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py index 12fc640f5c..83cd070924 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py @@ -17,10 +17,9 @@ class FusionPreLaunchOCIO(PreLaunchHook): # make sure anatomy settings are having flame key imageio_fusion = project_settings.get("fusion", {}).get("imageio") if not imageio_fusion: - raise ApplicationLaunchFailed(( - "Anatomy project settings are missing `fusion` key. " - "Please make sure you remove project overrides on " - "Anatomy ImageIO") + raise ApplicationLaunchFailed( + "Project settings are missing `fusion/imageio` key. " + "Please make sure you update your project settings. " ) ocio = imageio_fusion.get("ocio") diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 895e95e0c0..e5d35945af 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -14,7 +14,7 @@ import hiero from Qt import QtWidgets from openpype.client import get_project -from openpype.settings import get_anatomy_settings +from openpype.settings import get_project_settings from openpype.pipeline import legacy_io, Anatomy from openpype.pipeline.load import filter_containers from openpype.lib import Logger @@ -878,8 +878,7 @@ def apply_colorspace_project(): project.close() # get presets for hiero - imageio = get_anatomy_settings( - project_name)["imageio"].get("hiero", None) + imageio = get_project_settings(project_name)["hiero"]["imageio"] presets = imageio.get("workfile") # save the workfile as subversion "comment:_colorspaceChange" @@ -932,8 +931,7 @@ def apply_colorspace_clips(): clips = project.clips() # get presets for hiero - imageio = get_anatomy_settings( - project_name)["imageio"].get("hiero", None) + imageio = get_project_settings(project_name)["hiero"]["imageio"] from pprint import pprint presets = imageio.get("regexInputs", {}).get("inputs", {}) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 6a8447d6ad..789dec31fa 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -23,7 +23,7 @@ from openpype.client import ( get_last_versions, get_representation_by_name ) -from openpype.api import get_anatomy_settings +from openpype.api import get_project_settings from openpype.pipeline import ( legacy_io, discover_loader_plugins, @@ -3159,7 +3159,7 @@ def set_colorspace(): """Set Colorspace from project configuration """ project_name = os.getenv("AVALON_PROJECT") - imageio = get_anatomy_settings(project_name)["imageio"]["maya"] + imageio = get_project_settings(project_name)["maya"]["imageio"] # Maya 2022+ introduces new OCIO v2 color management settings that # can override the old color managenement preferences. OpenPype has diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e55fdbfcb2..6297da884c 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -563,7 +563,7 @@ def get_node_path(path, padding=4): def get_nuke_imageio_settings(): - return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] + return get_project_settings(Context.project_name)["nuke"]["imageio"] def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 346773b5af..654ea367c8 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -425,7 +425,7 @@ class LoadClip(plugin.NukeLoader): colorspace = repre_data.get("colorspace") colorspace = colorspace or version_data.get("colorspace") - # colorspace from `project_anatomy/imageio/nuke/regexInputs` + # colorspace from `project_settings/nuke/imageio/regexInputs` iio_colorspace = get_imageio_input_colorspace(path) # Set colorspace defined in version data diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json deleted file mode 100644 index f0be8f95f4..0000000000 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ /dev/null @@ -1,258 +0,0 @@ -{ - "hiero": { - "workfile": { - "ocioConfigName": "nuke-default", - "ocioconfigpath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "workingSpace": "linear", - "sixteenBitLut": "sRGB", - "eightBitLut": "sRGB", - "floatLut": "linear", - "logLut": "Cineon", - "viewerLut": "sRGB", - "thumbnailLut": "sRGB" - }, - "regexInputs": { - "inputs": [ - { - "regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)", - "colorspace": "sRGB" - } - ] - } - }, - "nuke": { - "viewer": { - "viewerProcess": "sRGB" - }, - "baking": { - "viewerProcess": "rec709" - }, - "workfile": { - "colorManagement": "Nuke", - "OCIO_config": "nuke-default", - "customOCIOConfigPath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "workingSpaceLUT": "linear", - "monitorLut": "sRGB", - "int8Lut": "sRGB", - "int16Lut": "sRGB", - "logLut": "Cineon", - "floatLut": "linear" - }, - "nodes": { - "requiredNodes": [ - { - "plugins": [ - "CreateWriteRender" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": true - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 186, - 35, - 35, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "linear" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - }, - { - "plugins": [ - "CreateWritePrerender" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": true - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 171, - 171, - 10, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "linear" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - }, - { - "plugins": [ - "CreateWriteStill" - ], - "nukeNodeClass": "Write", - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "tiff" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit" - }, - { - "type": "text", - "name": "compression", - "value": "Deflate" - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 56, - 162, - 7, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "text", - "name": "colorspace", - "value": "sRGB" - }, - { - "type": "bool", - "name": "create_directories", - "value": true - } - ] - } - ], - "overrideNodes": [] - }, - "regexInputs": { - "inputs": [ - { - "regex": "(beauty).*(?=.exr)", - "colorspace": "linear" - } - ] - } - }, - "maya": { - "colorManagementPreference_v2": { - "enabled": true, - "configFilePath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "renderSpace": "ACEScg", - "displayName": "sRGB", - "viewName": "ACES 1.0 SDR-video" - }, - "colorManagementPreference": { - "configFilePath": { - "windows": [], - "darwin": [], - "linux": [] - }, - "renderSpace": "scene-linear Rec 709/sRGB", - "viewTransform": "sRGB gamma" - } - }, - "flame": { - "project": { - "colourPolicy": "ACES 1.1", - "frameDepth": "16-bit fp", - "fieldDominance": "PROGRESSIVE" - }, - "profilesMapping": { - "inputs": [ - { - "flameName": "ACEScg", - "ocioName": "ACES - ACEScg" - }, - { - "flameName": "Rec.709 video", - "ocioName": "Output - Rec.709" - } - ] - } - } -} \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index c90193fe13..0f3080ad64 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -1,4 +1,23 @@ { + "imageio": { + "project": { + "colourPolicy": "ACES 1.1", + "frameDepth": "16-bit fp", + "fieldDominance": "PROGRESSIVE" + }, + "profilesMapping": { + "inputs": [ + { + "flameName": "ACEScg", + "ocioName": "ACES - ACEScg" + }, + { + "flameName": "Rec.709 video", + "ocioName": "Output - Rec.709" + } + ] + } + }, "create": { "CreateShotClip": { "hierarchy": "{folder}/{sequence}", diff --git a/openpype/settings/defaults/project_settings/hiero.json b/openpype/settings/defaults/project_settings/hiero.json index e9e7199330..d2ba697305 100644 --- a/openpype/settings/defaults/project_settings/hiero.json +++ b/openpype/settings/defaults/project_settings/hiero.json @@ -1,4 +1,29 @@ { + "imageio": { + "workfile": { + "ocioConfigName": "nuke-default", + "ocioconfigpath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "workingSpace": "linear", + "sixteenBitLut": "sRGB", + "eightBitLut": "sRGB", + "floatLut": "linear", + "logLut": "Cineon", + "viewerLut": "sRGB", + "thumbnailLut": "sRGB" + }, + "regexInputs": { + "inputs": [ + { + "regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)", + "colorspace": "sRGB" + } + ] + } + }, "create": { "CreateShotClip": { "hierarchy": "{folder}/{sequence}", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 76ef0a7338..c8a32d6bdf 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1,4 +1,26 @@ { + "imageio": { + "colorManagementPreference_v2": { + "enabled": true, + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "renderSpace": "ACEScg", + "displayName": "sRGB", + "viewName": "ACES 1.0 SDR-video" + }, + "colorManagementPreference": { + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "renderSpace": "scene-linear Rec 709/sRGB", + "viewTransform": "sRGB gamma" + } + }, "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "ext_mapping": { "model": "ma", diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index c3eda2cbb4..e0feb06eb6 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -8,6 +8,196 @@ "build_workfile": "ctrl+alt+b" } }, + "imageio": { + "viewer": { + "viewerProcess": "sRGB" + }, + "baking": { + "viewerProcess": "rec709" + }, + "workfile": { + "colorManagement": "Nuke", + "OCIO_config": "nuke-default", + "customOCIOConfigPath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "workingSpaceLUT": "linear", + "monitorLut": "sRGB", + "int8Lut": "sRGB", + "int16Lut": "sRGB", + "logLut": "Cineon", + "floatLut": "linear" + }, + "nodes": { + "requiredNodes": [ + { + "plugins": [ + "CreateWriteRender" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "exr" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit half" + }, + { + "type": "text", + "name": "compression", + "value": "Zip (1 scanline)" + }, + { + "type": "bool", + "name": "autocrop", + "value": true + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 186, + 35, + 35, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "linear" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + }, + { + "plugins": [ + "CreateWritePrerender" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "exr" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit half" + }, + { + "type": "text", + "name": "compression", + "value": "Zip (1 scanline)" + }, + { + "type": "bool", + "name": "autocrop", + "value": true + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 171, + 171, + 10, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "linear" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + }, + { + "plugins": [ + "CreateWriteStill" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "tiff" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit" + }, + { + "type": "text", + "name": "compression", + "value": "Deflate" + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 56, + 162, + 7, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "sRGB" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + } + ], + "overrideNodes": [] + }, + "regexInputs": { + "inputs": [ + { + "regex": "(beauty).*(?=.exr)", + "colorspace": "linear" + } + ] + } + }, "nuke-dirmap": { "enabled": false, "paths": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 0b9fbf7470..0f4afc54ce 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -43,10 +43,6 @@ } ] } - }, - { - "type": "schema", - "name": "schema_anatomy_imageio" } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 5f05bef0e1..73664300aa 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -5,6 +5,69 @@ "label": "Flame", "is_file": true, "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "is_group": true, + "children": [ + { + "key": "project", + "type": "dict", + "label": "Project", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "text", + "key": "colourPolicy", + "label": "Colour Policy (name or path)" + }, + { + "type": "text", + "key": "frameDepth", + "label": "Image Depth" + }, + { + "type": "text", + "key": "fieldDominance", + "label": "Field Dominance" + } + ] + } + ] + }, + { + "key": "profilesMapping", + "type": "dict", + "label": "Profile names mapping", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "flameName", + "label": "Flame name" + }, + { + "type": "text", + "key": "ocioName", + "label": "OCIO name" + } + ] + } + } + ] + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json b/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json index 3108d2197e..9e18522def 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json @@ -5,6 +5,116 @@ "label": "Hiero", "is_file": true, "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "is_group": true, + "collapsible": true, + "children": [ + { + "key": "workfile", + "type": "dict", + "label": "Workfile", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "enum", + "key": "ocioConfigName", + "label": "OpenColorIO Config", + "enum_items": [ + { + "nuke-default": "nuke-default" + }, + { + "aces_1.0.3": "aces_1.0.3" + }, + { + "aces_1.1": "aces_1.1" + }, + { + "custom": "custom" + } + ] + }, + { + "type": "path", + "key": "ocioconfigpath", + "label": "Custom OCIO path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "workingSpace", + "label": "Working Space" + }, + { + "type": "text", + "key": "sixteenBitLut", + "label": "16 Bit Files" + }, + { + "type": "text", + "key": "eightBitLut", + "label": "8 Bit Files" + }, + { + "type": "text", + "key": "floatLut", + "label": "Floating Point Files" + }, + { + "type": "text", + "key": "logLut", + "label": "Log Files" + }, + { + "type": "text", + "key": "viewerLut", + "label": "Viewer" + }, + { + "type": "text", + "key": "thumbnailLut", + "label": "Thumbnails" + } + ] + } + ] + }, + { + "key": "regexInputs", + "type": "dict", + "label": "Colorspace on Inputs by regex detection", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "regex", + "label": "Regex" + }, + { + "type": "text", + "key": "colorspace", + "label": "Colorspace" + } + ] + } + } + ] + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index d7a2b086d9..b2d79797a3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -5,6 +5,76 @@ "label": "Maya", "is_file": true, "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "collapsible": true, + "is_group": true, + "children": [ + { + "key": "colorManagementPreference_v2", + "type": "dict", + "label": "Color Management Preference v2 (Maya 2022+)", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Use Color Management Preference v2" + }, + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "renderSpace", + "label": "Rendering Space" + }, + { + "type": "text", + "key": "displayName", + "label": "Display" + }, + { + "type": "text", + "key": "viewName", + "label": "View" + } + ] + }, + { + "key": "colorManagementPreference", + "type": "dict", + "label": "Color Management Preference (legacy)", + "collapsible": true, + "children": [ + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "renderSpace", + "label": "Rendering Space" + }, + { + "type": "text", + "key": "viewTransform", + "label": "Viewer Transform" + } + ] + } + ] + }, { "type": "text", "multiline" : true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 7cf82b9e69..ff341fb919 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -46,6 +46,254 @@ } ] }, + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "collapsible": true, + "is_group": true, + "children": [ + { + "key": "viewer", + "type": "dict", + "label": "Viewer", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "baking", + "type": "dict", + "label": "Extract-review baking profile", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "workfile", + "type": "dict", + "label": "Workfile", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "enum", + "key": "colorManagement", + "label": "color management", + "enum_items": [ + { + "Nuke": "Nuke" + }, + { + "OCIO": "OCIO" + } + ] + }, + { + "type": "enum", + "key": "OCIO_config", + "label": "OpenColorIO Config", + "enum_items": [ + { + "nuke-default": "nuke-default" + }, + { + "spi-vfx": "spi-vfx" + }, + { + "spi-anim": "spi-anim" + }, + { + "aces_0.1.1": "aces_0.1.1" + }, + { + "aces_0.7.1": "aces_0.7.1" + }, + { + "aces_1.0.1": "aces_1.0.1" + }, + { + "aces_1.0.3": "aces_1.0.3" + }, + { + "aces_1.1": "aces_1.1" + }, + { + "aces_1.2": "aces_1.2" + }, + { + "custom": "custom" + } + ] + }, + { + "type": "path", + "key": "customOCIOConfigPath", + "label": "Custom OCIO config path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "workingSpaceLUT", + "label": "Working Space" + }, + { + "type": "text", + "key": "monitorLut", + "label": "monitor" + }, + { + "type": "text", + "key": "int8Lut", + "label": "8-bit files" + }, + { + "type": "text", + "key": "int16Lut", + "label": "16-bit files" + }, + { + "type": "text", + "key": "logLut", + "label": "log files" + }, + { + "type": "text", + "key": "floatLut", + "label": "float files" + } + ] + } + ] + }, + { + "key": "nodes", + "type": "dict", + "label": "Nodes", + "collapsible": true, + "children": [ + { + "key": "requiredNodes", + "type": "list", + "label": "Plugin required", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs", + "key": "knobs" + } + ] + } + + ] + } + }, + { + "type": "splitter" + }, + { + "type": "list", + "key": "overrideNodes", + "label": "Plugin's node overrides", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "key": "subsets", + "label": "Subsets", + "type": "list", + "object_type": "text" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs overrides", + "key": "knobs" + } + ] + } + ] + } + } + ] + }, + { + "key": "regexInputs", + "type": "dict", + "label": "Colorspace on Inputs by regex detection", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "regex", + "label": "Regex" + }, + { + "type": "text", + "key": "colorspace", + "label": "Colorspace" + } + ] + } + } + ] + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json deleted file mode 100644 index ef8c907dda..0000000000 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ /dev/null @@ -1,493 +0,0 @@ -{ - "type": "dict", - "key": "imageio", - "label": "Color Management and Output Formats", - "is_file": true, - "is_group": true, - "children": [ - { - "key": "hiero", - "type": "dict", - "label": "Hiero", - "children": [ - { - "key": "workfile", - "type": "dict", - "label": "Workfile", - "collapsible": false, - "children": [ - { - "type": "form", - "children": [ - { - "type": "enum", - "key": "ocioConfigName", - "label": "OpenColorIO Config", - "enum_items": [ - { - "nuke-default": "nuke-default" - }, - { - "aces_1.0.3": "aces_1.0.3" - }, - { - "aces_1.1": "aces_1.1" - }, - { - "custom": "custom" - } - ] - }, - { - "type": "path", - "key": "ocioconfigpath", - "label": "Custom OCIO path", - "multiplatform": true, - "multipath": true - }, - { - "type": "text", - "key": "workingSpace", - "label": "Working Space" - }, - { - "type": "text", - "key": "sixteenBitLut", - "label": "16 Bit Files" - }, - { - "type": "text", - "key": "eightBitLut", - "label": "8 Bit Files" - }, - { - "type": "text", - "key": "floatLut", - "label": "Floating Point Files" - }, - { - "type": "text", - "key": "logLut", - "label": "Log Files" - }, - { - "type": "text", - "key": "viewerLut", - "label": "Viewer" - }, - { - "type": "text", - "key": "thumbnailLut", - "label": "Thumbnails" - } - ] - } - ] - }, - { - "key": "regexInputs", - "type": "dict", - "label": "Colorspace on Inputs by regex detection", - "collapsible": true, - "children": [ - { - "type": "list", - "key": "inputs", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "regex", - "label": "Regex" - }, - { - "type": "text", - "key": "colorspace", - "label": "Colorspace" - } - ] - } - } - ] - } - ] - }, - { - "key": "nuke", - "type": "dict", - "label": "Nuke", - "children": [ - { - "key": "viewer", - "type": "dict", - "label": "Viewer", - "collapsible": false, - "children": [ - { - "type": "text", - "key": "viewerProcess", - "label": "Viewer Process" - } - ] - }, - { - "key": "baking", - "type": "dict", - "label": "Extract-review baking profile", - "collapsible": false, - "children": [ - { - "type": "text", - "key": "viewerProcess", - "label": "Viewer Process" - } - ] - }, - { - "key": "workfile", - "type": "dict", - "label": "Workfile", - "collapsible": false, - "children": [ - { - "type": "form", - "children": [ - { - "type": "enum", - "key": "colorManagement", - "label": "color management", - "enum_items": [ - { - "Nuke": "Nuke" - }, - { - "OCIO": "OCIO" - } - ] - }, - { - "type": "enum", - "key": "OCIO_config", - "label": "OpenColorIO Config", - "enum_items": [ - { - "nuke-default": "nuke-default" - }, - { - "spi-vfx": "spi-vfx" - }, - { - "spi-anim": "spi-anim" - }, - { - "aces_0.1.1": "aces_0.1.1" - }, - { - "aces_0.7.1": "aces_0.7.1" - }, - { - "aces_1.0.1": "aces_1.0.1" - }, - { - "aces_1.0.3": "aces_1.0.3" - }, - { - "aces_1.1": "aces_1.1" - }, - { - "aces_1.2": "aces_1.2" - }, - { - "custom": "custom" - } - ] - }, - { - "type": "path", - "key": "customOCIOConfigPath", - "label": "Custom OCIO config path", - "multiplatform": true, - "multipath": true - }, - { - "type": "text", - "key": "workingSpaceLUT", - "label": "Working Space" - }, - { - "type": "text", - "key": "monitorLut", - "label": "monitor" - }, - { - "type": "text", - "key": "int8Lut", - "label": "8-bit files" - }, - { - "type": "text", - "key": "int16Lut", - "label": "16-bit files" - }, - { - "type": "text", - "key": "logLut", - "label": "log files" - }, - { - "type": "text", - "key": "floatLut", - "label": "float files" - } - ] - } - ] - }, - { - "key": "nodes", - "type": "dict", - "label": "Nodes", - "collapsible": true, - "children": [ - { - "key": "requiredNodes", - "type": "list", - "label": "Plugin required", - "object_type": { - "type": "dict", - "children": [ - { - "type": "list", - "key": "plugins", - "label": "Used in plugins", - "object_type": { - "type": "text", - "key": "pluginClass" - } - }, - { - "type": "text", - "key": "nukeNodeClass", - "label": "Nuke Node Class" - }, - { - "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Knobs", - "key": "knobs" - } - ] - } - - ] - } - }, - { - "type": "splitter" - }, - { - "type": "list", - "key": "overrideNodes", - "label": "Plugin's node overrides", - "object_type": { - "type": "dict", - "children": [ - { - "type": "list", - "key": "plugins", - "label": "Used in plugins", - "object_type": { - "type": "text", - "key": "pluginClass" - } - }, - { - "type": "text", - "key": "nukeNodeClass", - "label": "Nuke Node Class" - }, - { - "key": "subsets", - "label": "Subsets", - "type": "list", - "object_type": "text" - }, - { - "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Knobs overrides", - "key": "knobs" - } - ] - } - ] - } - } - ] - }, - { - "key": "regexInputs", - "type": "dict", - "label": "Colorspace on Inputs by regex detection", - "collapsible": true, - "children": [ - { - "type": "list", - "key": "inputs", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "regex", - "label": "Regex" - }, - { - "type": "text", - "key": "colorspace", - "label": "Colorspace" - } - ] - } - } - ] - } - ] - }, - { - "key": "maya", - "type": "dict", - "label": "Maya", - "children": [ - { - "key": "colorManagementPreference_v2", - "type": "dict", - "label": "Color Management Preference v2 (Maya 2022+)", - "collapsible": true, - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Use Color Management Preference v2" - }, - { - "type": "path", - "key": "configFilePath", - "label": "OCIO Config File Path", - "multiplatform": true, - "multipath": true - }, - { - "type": "text", - "key": "renderSpace", - "label": "Rendering Space" - }, - { - "type": "text", - "key": "displayName", - "label": "Display" - }, - { - "type": "text", - "key": "viewName", - "label": "View" - } - ] - }, - { - "key": "colorManagementPreference", - "type": "dict", - "label": "Color Management Preference (legacy)", - "collapsible": true, - "children": [ - { - "type": "path", - "key": "configFilePath", - "label": "OCIO Config File Path", - "multiplatform": true, - "multipath": true - }, - { - "type": "text", - "key": "renderSpace", - "label": "Rendering Space" - }, - { - "type": "text", - "key": "viewTransform", - "label": "Viewer Transform" - } - ] - } - ] - }, - { - "key": "flame", - "type": "dict", - "label": "Flame & Flare", - "children": [ - { - "key": "project", - "type": "dict", - "label": "Project", - "collapsible": false, - "children": [ - { - "type": "form", - "children": [ - { - "type": "text", - "key": "colourPolicy", - "label": "Colour Policy (name or path)" - }, - { - "type": "text", - "key": "frameDepth", - "label": "Image Depth" - }, - { - "type": "text", - "key": "fieldDominance", - "label": "Field Dominance" - } - ] - } - ] - }, - { - "key": "profilesMapping", - "type": "dict", - "label": "Profile names mapping", - "collapsible": true, - "children": [ - { - "type": "list", - "key": "inputs", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "flameName", - "label": "Flame name" - }, - { - "type": "text", - "key": "ocioName", - "label": "OCIO name" - } - ] - } - } - ] - } - ] - } - ] -} diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 5eaddf6e6e..3112400dbf 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -316,22 +316,6 @@ def _system_settings_backwards_compatible_conversion(studio_overrides): } -def _project_anatomy_backwards_compatible_conversion(project_anatomy): - # Backwards compatibility of node settings in Nuke 3.9.x - 3.10.0 - # - source PR - https://github.com/pypeclub/OpenPype/pull/3143 - value = project_anatomy - for key in ("imageio", "nuke", "nodes", "requiredNodes"): - if key not in value: - return - value = value[key] - - for item in value: - for node in item.get("knobs") or []: - if "type" in node: - break - node["type"] = "__legacy__" - - @require_handler def get_studio_system_settings_overrides(return_version=False): output = _SETTINGS_HANDLER.get_studio_system_settings_overrides( @@ -368,7 +352,6 @@ def get_project_settings_overrides(project_name, return_version=False): @require_handler def get_project_anatomy_overrides(project_name): output = _SETTINGS_HANDLER.get_project_anatomy_overrides(project_name) - _project_anatomy_backwards_compatible_conversion(output) return output From 843b52cbdb0433a3284a4f894749d2f8503043f7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 23 Sep 2022 21:39:39 +0200 Subject: [PATCH 1338/2550] Remove imageio from config-2.0 --- schema/config-2.0.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/schema/config-2.0.json b/schema/config-2.0.json index 54b226711a..c20f0a3f46 100644 --- a/schema/config-2.0.json +++ b/schema/config-2.0.json @@ -23,9 +23,6 @@ "roots": { "type": "object" }, - "imageio": { - "type": "object" - }, "tasks": { "type": "object", "items": { From 50b850ec17e37b720b21d1c80e87320465d3db05 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 24 Sep 2022 04:19:03 +0000 Subject: [PATCH 1339/2550] [Automated] Bump version --- CHANGELOG.md | 31 ++++++++++++++----------------- openpype/version.py | 2 +- 2 files changed, 15 insertions(+), 18 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f868e6ed6e..24e02acc6f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,17 +1,20 @@ # Changelog -## [3.14.3-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.3-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...HEAD) **🚀 Enhancements** - Maya: better logging in Maketx [\#3886](https://github.com/pypeclub/OpenPype/pull/3886) +- Photoshop: review can be turned off [\#3885](https://github.com/pypeclub/OpenPype/pull/3885) - TrayPublisher: added persisting of last selected project [\#3871](https://github.com/pypeclub/OpenPype/pull/3871) - TrayPublisher: added text filter on project name to Tray Publisher [\#3867](https://github.com/pypeclub/OpenPype/pull/3867) - Github issues adding `running version` section [\#3864](https://github.com/pypeclub/OpenPype/pull/3864) - Publisher: Increase size of main window [\#3862](https://github.com/pypeclub/OpenPype/pull/3862) +- Flame: make migratable projects after creation [\#3860](https://github.com/pypeclub/OpenPype/pull/3860) - Photoshop: synchronize image version with workfile [\#3854](https://github.com/pypeclub/OpenPype/pull/3854) +- General: Transcoding handle float2 attr type [\#3849](https://github.com/pypeclub/OpenPype/pull/3849) - General: Simple script for getting license information about used packages [\#3843](https://github.com/pypeclub/OpenPype/pull/3843) - Houdini: Increment current file on workfile publish [\#3840](https://github.com/pypeclub/OpenPype/pull/3840) - Publisher: Add new publisher to host tools [\#3833](https://github.com/pypeclub/OpenPype/pull/3833) @@ -20,9 +23,15 @@ **🐛 Bug fixes** +- Flame: loading multilayer exr to batch/reel is working [\#3901](https://github.com/pypeclub/OpenPype/pull/3901) +- Hiero: Fix inventory check on launch [\#3895](https://github.com/pypeclub/OpenPype/pull/3895) +- WebPublisher: Fix import after refactor [\#3891](https://github.com/pypeclub/OpenPype/pull/3891) +- TVPaint: Fix renaming of rendered files [\#3882](https://github.com/pypeclub/OpenPype/pull/3882) +- Publisher: Nice checkbox visible in Python 2 [\#3877](https://github.com/pypeclub/OpenPype/pull/3877) - Settings: Add missing default settings [\#3870](https://github.com/pypeclub/OpenPype/pull/3870) - General: Copy of workfile does not use 'copy' function but 'copyfile' [\#3869](https://github.com/pypeclub/OpenPype/pull/3869) - Tray Publisher: skip plugin if otioTimeline is missing [\#3856](https://github.com/pypeclub/OpenPype/pull/3856) +- Flame: retimed attributes are integrated with settings [\#3855](https://github.com/pypeclub/OpenPype/pull/3855) - Maya: Extract Playblast fix textures + labelize viewport show settings [\#3852](https://github.com/pypeclub/OpenPype/pull/3852) - Ftrack: Url validation does not require ftrackapp [\#3834](https://github.com/pypeclub/OpenPype/pull/3834) - Maya+Ftrack: Change typo in family name `mayaascii` -\> `mayaAscii` [\#3820](https://github.com/pypeclub/OpenPype/pull/3820) @@ -30,9 +39,12 @@ **🔀 Refactored code** +- Houdini: Use new Extractor location [\#3894](https://github.com/pypeclub/OpenPype/pull/3894) +- Harmony: Use new Extractor location [\#3893](https://github.com/pypeclub/OpenPype/pull/3893) - Hiero: Use new Extractor location [\#3851](https://github.com/pypeclub/OpenPype/pull/3851) - Maya: Remove old legacy \(ftrack\) plug-ins that are of no use anymore [\#3819](https://github.com/pypeclub/OpenPype/pull/3819) - Nuke: Use new Extractor location [\#3799](https://github.com/pypeclub/OpenPype/pull/3799) +- Maya: Use new Extractor location [\#3775](https://github.com/pypeclub/OpenPype/pull/3775) **Merged pull requests:** @@ -54,7 +66,6 @@ - General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) -- Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) **🐛 Bug fixes** @@ -74,7 +85,6 @@ - Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) - Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) - AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) -- Maya: Use new Extractor location [\#3775](https://github.com/pypeclub/OpenPype/pull/3775) - General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) - General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) - General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) @@ -83,10 +93,7 @@ - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) - General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) -- Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) -- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) -- Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) **Merged pull requests:** @@ -106,22 +113,12 @@ - Maya: Fix typo in getPanel argument `with\_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753) - General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) - General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) -- Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) -- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) -- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) -- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) -- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) -- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) -- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) -- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) -- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) diff --git a/openpype/version.py b/openpype/version.py index 26b145f1db..fd6e894fe2 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.3-nightly.3" +__version__ = "3.14.3-nightly.4" From 2e3e799f34955a31a946b939411cdb477eb33da6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 24 Sep 2022 11:33:03 +0200 Subject: [PATCH 1340/2550] Fix PublishIconButton drawing disabled icon with the color specified - Previously the color to draw was ignored when button was disabled because default color was applied to the disabled state --- openpype/tools/publisher/widgets/widgets.py | 40 +++++++-------------- 1 file changed, 12 insertions(+), 28 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index aa7e3be687..1b081cc4a1 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -125,28 +125,19 @@ class PublishIconBtn(IconButton): def __init__(self, pixmap_path, *args, **kwargs): super(PublishIconBtn, self).__init__(*args, **kwargs) - loaded_image = QtGui.QImage(pixmap_path) + icon = self.generate_icon(pixmap_path, + enabled_color=QtCore.Qt.white, + disabled_color=QtGui.QColor("#5b6779")) + self.setIcon(icon) - pixmap = self.paint_image_with_color(loaded_image, QtCore.Qt.white) - - self._base_image = loaded_image - self._enabled_icon = QtGui.QIcon(pixmap) - self._disabled_icon = None - - self.setIcon(self._enabled_icon) - - def get_enabled_icon(self): - """Enabled icon.""" - return self._enabled_icon - - def get_disabled_icon(self): - """Disabled icon.""" - if self._disabled_icon is None: - pixmap = self.paint_image_with_color( - self._base_image, QtCore.Qt.gray - ) - self._disabled_icon = QtGui.QIcon(pixmap) - return self._disabled_icon + def generate_icon(self, pixmap_path, enabled_color, disabled_color): + icon = QtGui.QIcon() + image = QtGui.QImage(pixmap_path) + enabled_pixmap = self.paint_image_with_color(image, enabled_color) + icon.addPixmap(enabled_pixmap, icon.Normal) + disabled_pixmap = self.paint_image_with_color(image, disabled_color) + icon.addPixmap(disabled_pixmap, icon.Disabled) + return icon @staticmethod def paint_image_with_color(image, color): @@ -187,13 +178,6 @@ class PublishIconBtn(IconButton): return pixmap - def setEnabled(self, enabled): - super(PublishIconBtn, self).setEnabled(enabled) - if self.isEnabled(): - self.setIcon(self.get_enabled_icon()) - else: - self.setIcon(self.get_disabled_icon()) - class ResetBtn(PublishIconBtn): """Publish reset button.""" From 4a98d8de3916db4aa34a362e1cd89b97f73101f7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 24 Sep 2022 12:03:35 +0200 Subject: [PATCH 1341/2550] Revert removal of anatomy imageio settings - This way project save will not delete the old settings --- .../defaults/project_anatomy/imageio.json | 258 +++++++++ .../schemas/projects_schema/schema_main.json | 4 + .../schemas/schema_anatomy_imageio.json | 493 ++++++++++++++++++ 3 files changed, 755 insertions(+) create mode 100644 openpype/settings/defaults/project_anatomy/imageio.json create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json new file mode 100644 index 0000000000..f0be8f95f4 --- /dev/null +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -0,0 +1,258 @@ +{ + "hiero": { + "workfile": { + "ocioConfigName": "nuke-default", + "ocioconfigpath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "workingSpace": "linear", + "sixteenBitLut": "sRGB", + "eightBitLut": "sRGB", + "floatLut": "linear", + "logLut": "Cineon", + "viewerLut": "sRGB", + "thumbnailLut": "sRGB" + }, + "regexInputs": { + "inputs": [ + { + "regex": "[^-a-zA-Z0-9](plateRef).*(?=mp4)", + "colorspace": "sRGB" + } + ] + } + }, + "nuke": { + "viewer": { + "viewerProcess": "sRGB" + }, + "baking": { + "viewerProcess": "rec709" + }, + "workfile": { + "colorManagement": "Nuke", + "OCIO_config": "nuke-default", + "customOCIOConfigPath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "workingSpaceLUT": "linear", + "monitorLut": "sRGB", + "int8Lut": "sRGB", + "int16Lut": "sRGB", + "logLut": "Cineon", + "floatLut": "linear" + }, + "nodes": { + "requiredNodes": [ + { + "plugins": [ + "CreateWriteRender" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "exr" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit half" + }, + { + "type": "text", + "name": "compression", + "value": "Zip (1 scanline)" + }, + { + "type": "bool", + "name": "autocrop", + "value": true + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 186, + 35, + 35, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "linear" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + }, + { + "plugins": [ + "CreateWritePrerender" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "exr" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit half" + }, + { + "type": "text", + "name": "compression", + "value": "Zip (1 scanline)" + }, + { + "type": "bool", + "name": "autocrop", + "value": true + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 171, + 171, + 10, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "linear" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + }, + { + "plugins": [ + "CreateWriteStill" + ], + "nukeNodeClass": "Write", + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "tiff" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit" + }, + { + "type": "text", + "name": "compression", + "value": "Deflate" + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 56, + 162, + 7, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "text", + "name": "colorspace", + "value": "sRGB" + }, + { + "type": "bool", + "name": "create_directories", + "value": true + } + ] + } + ], + "overrideNodes": [] + }, + "regexInputs": { + "inputs": [ + { + "regex": "(beauty).*(?=.exr)", + "colorspace": "linear" + } + ] + } + }, + "maya": { + "colorManagementPreference_v2": { + "enabled": true, + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "renderSpace": "ACEScg", + "displayName": "sRGB", + "viewName": "ACES 1.0 SDR-video" + }, + "colorManagementPreference": { + "configFilePath": { + "windows": [], + "darwin": [], + "linux": [] + }, + "renderSpace": "scene-linear Rec 709/sRGB", + "viewTransform": "sRGB gamma" + } + }, + "flame": { + "project": { + "colourPolicy": "ACES 1.1", + "frameDepth": "16-bit fp", + "fieldDominance": "PROGRESSIVE" + }, + "profilesMapping": { + "inputs": [ + { + "flameName": "ACEScg", + "ocioName": "ACES - ACEScg" + }, + { + "flameName": "Rec.709 video", + "ocioName": "Output - Rec.709" + } + ] + } + } +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 0f4afc54ce..0b9fbf7470 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -43,6 +43,10 @@ } ] } + }, + { + "type": "schema", + "name": "schema_anatomy_imageio" } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json new file mode 100644 index 0000000000..ef8c907dda --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -0,0 +1,493 @@ +{ + "type": "dict", + "key": "imageio", + "label": "Color Management and Output Formats", + "is_file": true, + "is_group": true, + "children": [ + { + "key": "hiero", + "type": "dict", + "label": "Hiero", + "children": [ + { + "key": "workfile", + "type": "dict", + "label": "Workfile", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "enum", + "key": "ocioConfigName", + "label": "OpenColorIO Config", + "enum_items": [ + { + "nuke-default": "nuke-default" + }, + { + "aces_1.0.3": "aces_1.0.3" + }, + { + "aces_1.1": "aces_1.1" + }, + { + "custom": "custom" + } + ] + }, + { + "type": "path", + "key": "ocioconfigpath", + "label": "Custom OCIO path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "workingSpace", + "label": "Working Space" + }, + { + "type": "text", + "key": "sixteenBitLut", + "label": "16 Bit Files" + }, + { + "type": "text", + "key": "eightBitLut", + "label": "8 Bit Files" + }, + { + "type": "text", + "key": "floatLut", + "label": "Floating Point Files" + }, + { + "type": "text", + "key": "logLut", + "label": "Log Files" + }, + { + "type": "text", + "key": "viewerLut", + "label": "Viewer" + }, + { + "type": "text", + "key": "thumbnailLut", + "label": "Thumbnails" + } + ] + } + ] + }, + { + "key": "regexInputs", + "type": "dict", + "label": "Colorspace on Inputs by regex detection", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "regex", + "label": "Regex" + }, + { + "type": "text", + "key": "colorspace", + "label": "Colorspace" + } + ] + } + } + ] + } + ] + }, + { + "key": "nuke", + "type": "dict", + "label": "Nuke", + "children": [ + { + "key": "viewer", + "type": "dict", + "label": "Viewer", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "baking", + "type": "dict", + "label": "Extract-review baking profile", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "workfile", + "type": "dict", + "label": "Workfile", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "enum", + "key": "colorManagement", + "label": "color management", + "enum_items": [ + { + "Nuke": "Nuke" + }, + { + "OCIO": "OCIO" + } + ] + }, + { + "type": "enum", + "key": "OCIO_config", + "label": "OpenColorIO Config", + "enum_items": [ + { + "nuke-default": "nuke-default" + }, + { + "spi-vfx": "spi-vfx" + }, + { + "spi-anim": "spi-anim" + }, + { + "aces_0.1.1": "aces_0.1.1" + }, + { + "aces_0.7.1": "aces_0.7.1" + }, + { + "aces_1.0.1": "aces_1.0.1" + }, + { + "aces_1.0.3": "aces_1.0.3" + }, + { + "aces_1.1": "aces_1.1" + }, + { + "aces_1.2": "aces_1.2" + }, + { + "custom": "custom" + } + ] + }, + { + "type": "path", + "key": "customOCIOConfigPath", + "label": "Custom OCIO config path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "workingSpaceLUT", + "label": "Working Space" + }, + { + "type": "text", + "key": "monitorLut", + "label": "monitor" + }, + { + "type": "text", + "key": "int8Lut", + "label": "8-bit files" + }, + { + "type": "text", + "key": "int16Lut", + "label": "16-bit files" + }, + { + "type": "text", + "key": "logLut", + "label": "log files" + }, + { + "type": "text", + "key": "floatLut", + "label": "float files" + } + ] + } + ] + }, + { + "key": "nodes", + "type": "dict", + "label": "Nodes", + "collapsible": true, + "children": [ + { + "key": "requiredNodes", + "type": "list", + "label": "Plugin required", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs", + "key": "knobs" + } + ] + } + + ] + } + }, + { + "type": "splitter" + }, + { + "type": "list", + "key": "overrideNodes", + "label": "Plugin's node overrides", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "key": "subsets", + "label": "Subsets", + "type": "list", + "object_type": "text" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs overrides", + "key": "knobs" + } + ] + } + ] + } + } + ] + }, + { + "key": "regexInputs", + "type": "dict", + "label": "Colorspace on Inputs by regex detection", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "regex", + "label": "Regex" + }, + { + "type": "text", + "key": "colorspace", + "label": "Colorspace" + } + ] + } + } + ] + } + ] + }, + { + "key": "maya", + "type": "dict", + "label": "Maya", + "children": [ + { + "key": "colorManagementPreference_v2", + "type": "dict", + "label": "Color Management Preference v2 (Maya 2022+)", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Use Color Management Preference v2" + }, + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "renderSpace", + "label": "Rendering Space" + }, + { + "type": "text", + "key": "displayName", + "label": "Display" + }, + { + "type": "text", + "key": "viewName", + "label": "View" + } + ] + }, + { + "key": "colorManagementPreference", + "type": "dict", + "label": "Color Management Preference (legacy)", + "collapsible": true, + "children": [ + { + "type": "path", + "key": "configFilePath", + "label": "OCIO Config File Path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "renderSpace", + "label": "Rendering Space" + }, + { + "type": "text", + "key": "viewTransform", + "label": "Viewer Transform" + } + ] + } + ] + }, + { + "key": "flame", + "type": "dict", + "label": "Flame & Flare", + "children": [ + { + "key": "project", + "type": "dict", + "label": "Project", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "text", + "key": "colourPolicy", + "label": "Colour Policy (name or path)" + }, + { + "type": "text", + "key": "frameDepth", + "label": "Image Depth" + }, + { + "type": "text", + "key": "fieldDominance", + "label": "Field Dominance" + } + ] + } + ] + }, + { + "key": "profilesMapping", + "type": "dict", + "label": "Profile names mapping", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "flameName", + "label": "Flame name" + }, + { + "type": "text", + "key": "ocioName", + "label": "OCIO name" + } + ] + } + } + ] + } + ] + } + ] +} From 9ff7d5665304ccf16533a2a3e17f272cc0ab3697 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 24 Sep 2022 12:04:03 +0200 Subject: [PATCH 1342/2550] Revert imageio removal from project anatomy in config schema --- schema/config-2.0.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schema/config-2.0.json b/schema/config-2.0.json index c20f0a3f46..54b226711a 100644 --- a/schema/config-2.0.json +++ b/schema/config-2.0.json @@ -23,6 +23,9 @@ "roots": { "type": "object" }, + "imageio": { + "type": "object" + }, "tasks": { "type": "object", "items": { From 92371d54fa30b53ca43c630062e4ace522137a82 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 24 Sep 2022 12:09:52 +0200 Subject: [PATCH 1343/2550] Add deprecation labels --- .../projects_schema/schemas/schema_anatomy_imageio.json | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index ef8c907dda..93b6adae6b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -1,10 +1,14 @@ { "type": "dict", "key": "imageio", - "label": "Color Management and Output Formats", + "label": "Color Management and Output Formats (Deprecated)", "is_file": true, "is_group": true, "children": [ + { + "type": "label", + "label": "These settings are deprecated and have moved to: project_settings/{app}/imageio.
    You can right click to copy each host's values and paste them to apply to each host as needed.
    Changing these values here will not do anything." + }, { "key": "hiero", "type": "dict", From d6949754a1e6bff33d85df0a2012d15dbf825214 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 11:07:17 +0200 Subject: [PATCH 1344/2550] Use colors from style/data.json --- openpype/tools/publisher/widgets/widgets.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 1b081cc4a1..d1fa71343c 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -16,6 +16,7 @@ from openpype.tools.utils import ( BaseClickableFrame, set_style_property, ) +from openpype.style import get_objected_colors from openpype.pipeline.create import ( SUBSET_NAME_ALLOWED_SYMBOLS, TaskNotSetError, @@ -125,9 +126,11 @@ class PublishIconBtn(IconButton): def __init__(self, pixmap_path, *args, **kwargs): super(PublishIconBtn, self).__init__(*args, **kwargs) - icon = self.generate_icon(pixmap_path, - enabled_color=QtCore.Qt.white, - disabled_color=QtGui.QColor("#5b6779")) + colors = get_objected_colors() + icon = self.generate_icon( + pixmap_path, + enabled_color=colors["font"].get_qcolor(), + disabled_color=colors["font-disabled"].get_qcolor()) self.setIcon(icon) def generate_icon(self, pixmap_path, enabled_color, disabled_color): From 6237c4ae8204f044da371594814f34eefd1e91f3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 11:07:34 +0200 Subject: [PATCH 1345/2550] Update "font-disabled" color --- openpype/style/data.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/style/data.json b/openpype/style/data.json index 15d9472e3e..adda49de23 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -20,7 +20,7 @@ "color": { "font": "#D3D8DE", "font-hover": "#F0F2F5", - "font-disabled": "#99A3B2", + "font-disabled": "#5b6779", "font-view-selection": "#ffffff", "font-view-hover": "#F0F2F5", From 65f31c445c5088ef6e02be9d9304b46aadc10402 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 11:26:36 +0200 Subject: [PATCH 1346/2550] Cache `get_objected_colors` function --- openpype/style/__init__.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/style/__init__.py b/openpype/style/__init__.py index b2a1a4ce6c..ca6183b62e 100644 --- a/openpype/style/__init__.py +++ b/openpype/style/__init__.py @@ -19,6 +19,8 @@ class _Cache: disabled_entity_icon_color = None deprecated_entity_font_color = None + objected_colors = None + def get_style_image_path(image_name): # All filenames are lowered @@ -81,10 +83,15 @@ def get_objected_colors(): Returns: dict: Parsed color objects by keys in data. """ + if _Cache.objected_colors is not None: + return _Cache.objected_colors + colors_data = get_colors_data() output = {} for key, value in colors_data.items(): output[key] = _convert_color_values_to_objects(value) + + _Cache.objected_colors = output return output From 0fd0e307ae6fed41505e38d2cbd27bfe72a5cf32 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 12:15:02 +0200 Subject: [PATCH 1347/2550] Cache colors data --- openpype/style/__init__.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/style/__init__.py b/openpype/style/__init__.py index ca6183b62e..b34e3f97b0 100644 --- a/openpype/style/__init__.py +++ b/openpype/style/__init__.py @@ -19,6 +19,7 @@ class _Cache: disabled_entity_icon_color = None deprecated_entity_font_color = None + colors_data = None objected_colors = None @@ -48,8 +49,13 @@ def _get_colors_raw_data(): def get_colors_data(): """Only color data from stylesheet data.""" + if _Cache.colors_data is not None: + return _Cache.colors_data + data = _get_colors_raw_data() - return data.get("color") or {} + color_data = data.get("color") or {} + _Cache.colors_data = color_data + return color_data def _convert_color_values_to_objects(value): From e2fd32d8106d507a326f24fad2ee7aab1d52fdb8 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 26 Sep 2022 13:43:22 +0200 Subject: [PATCH 1348/2550] use regex and logger --- .../modules/kitsu/utils/update_op_with_zou.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index d4ced9dab2..8d81983ae2 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -21,6 +21,9 @@ from openpype.pipeline import AvalonMongoDB from openpype.settings import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials +from openpype.lib import Logger + +log = Logger.get_logger(__name__) # Accepted namin pattern for OP naming_pattern = re.compile("^[a-zA-Z0-9_.]*$") @@ -247,7 +250,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_name = project["name"] project_doc = get_project(project_name) if not project_doc: - print(f"Creating project '{project_name}'") + log.info(f"Creating project '{project_name}'") project_doc = create_project(project_name, project_name) # Project data and tasks @@ -271,10 +274,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: } ) - proj_res = project["resolution"] - if "x" in proj_res: - project_data['resolutionWidth'] = int(proj_res.split("x")[0]) - project_data['resolutionHeight'] = int(proj_res.split("x")[1]) + match_res = re.match(r"(\d+)x(\d+)", project["resolution"]) + if match_res: + project_data['resolutionWidth'] = match_res.group(1) + project_data['resolutionHeight'] = match_res.group(2) + else: + log.warning(f"\'{project['resolution']}\' does not match the expected "\ + "format for the resolution, for example: 1920x1080") return UpdateOne( {"_id": project_doc["_id"]}, @@ -336,7 +342,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): if not project: project = gazu.project.get_project_by_name(project["name"]) - print(f"Synchronizing {project['name']}...") + log.info(f"Synchronizing {project['name']}...") # Get all assets from zou all_assets = gazu.asset.all_assets_for_project(project) From 9f591b2605ae5892b5bbebe91123e1942399b76f Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 26 Sep 2022 13:49:42 +0200 Subject: [PATCH 1349/2550] fix linter --- openpype/modules/kitsu/utils/update_op_with_zou.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 8d81983ae2..7a54ed20bb 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -279,8 +279,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_data['resolutionWidth'] = match_res.group(1) project_data['resolutionHeight'] = match_res.group(2) else: - log.warning(f"\'{project['resolution']}\' does not match the expected "\ - "format for the resolution, for example: 1920x1080") + log.warning(f"\'{project['resolution']}\' does not match the " + "expected format for the resolution, for example: 1920x1080") return UpdateOne( {"_id": project_doc["_id"]}, From a0bd78027cc1aacd7ba0ed93029c91ddd5d52233 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Mon, 26 Sep 2022 13:51:44 +0200 Subject: [PATCH 1350/2550] fix linter --- openpype/modules/kitsu/utils/update_op_with_zou.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 7a54ed20bb..94b26c2019 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -279,8 +279,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_data['resolutionWidth'] = match_res.group(1) project_data['resolutionHeight'] = match_res.group(2) else: - log.warning(f"\'{project['resolution']}\' does not match the " - "expected format for the resolution, for example: 1920x1080") + log.warning(f"\'{project['resolution']}\' does not match the expected" + " format for the resolution, for example: 1920x1080") return UpdateOne( {"_id": project_doc["_id"]}, From 02765e95c1a89882ff86778ecc4c98449dcc84ec Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 14:20:17 +0200 Subject: [PATCH 1351/2550] Continue instead of return to allow other valid configs to still be set --- openpype/hosts/houdini/api/shelves.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 248d99105c..e179a5fde7 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -51,7 +51,7 @@ def generate_shelves(): log.warning( "No name found in shelf set definition." ) - return + continue shelf_set = get_or_create_shelf_set(shelf_set_name) @@ -63,7 +63,7 @@ def generate_shelves(): shelf_set_name ) ) - return + continue for shelf_definition in shelves_definition: shelf_name = shelf_definition.get('shelf_name') @@ -71,7 +71,7 @@ def generate_shelves(): log.warning( "No name found in shelf definition." ) - return + continue shelf = get_or_create_shelf(shelf_name) @@ -81,7 +81,7 @@ def generate_shelves(): shelf_name ) ) - return + continue mandatory_attributes = {'name', 'script'} for tool_definition in shelf_definition.get('tools_list'): @@ -98,7 +98,7 @@ the script path of the tool.") tool = get_or_create_tool(tool_definition, shelf) if not tool: - return + continue # Add the tool to the shelf if not already in it if tool not in shelf.tools(): From 98d7de5103a1dae8bf7977f6c50b368b138369ca Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 14:21:00 +0200 Subject: [PATCH 1352/2550] Do not create Shelf Set if no shelf definition --- openpype/hosts/houdini/api/shelves.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index e179a5fde7..b78e461c66 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -53,10 +53,7 @@ def generate_shelves(): ) continue - shelf_set = get_or_create_shelf_set(shelf_set_name) - shelves_definition = shelf_set_config.get('shelf_definition') - if not shelves_definition: log.debug( "No shelf definition found for shelf set named '{}'".format( @@ -65,6 +62,7 @@ def generate_shelves(): ) continue + shelf_set = get_or_create_shelf_set(shelf_set_name) for shelf_definition in shelves_definition: shelf_name = shelf_definition.get('shelf_name') if not shelf_name: From 0c08dd17e43746a72c21b28f359b43c48a02cefd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 14:22:36 +0200 Subject: [PATCH 1353/2550] Remove default empty "OpenPype Shelves" shelf set. If empty, it'd just spew warnings and remain redundant --- .../settings/defaults/project_settings/houdini.json | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index cdf829db57..1517983569 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -1,15 +1,5 @@ { - "shelves": [ - { - "shelf_set_name": "OpenPype Shelves", - "shelf_set_source_path": { - "windows": "", - "darwin": "", - "linux": "" - }, - "shelf_definition": [] - } - ], + "shelves": [], "create": { "CreateArnoldAss": { "enabled": true, From 855a1e4eb074c4d42603c3bb5a7720bfaea5b0b8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 14:25:31 +0200 Subject: [PATCH 1354/2550] Use `next` to directly stop on finding first match --- openpype/hosts/houdini/api/shelves.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index b78e461c66..eacd0a267f 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -119,12 +119,10 @@ def get_or_create_shelf_set(shelf_set_label): """ all_shelves_sets = hou.shelves.shelfSets().values() - shelf_sets = [ - shelf for shelf in all_shelves_sets if shelf.label() == shelf_set_label - ] - - if shelf_sets: - return shelf_sets[0] + shelf_set = next((shelf for shelf in all_shelves_sets if + shelf.label() == shelf_set_label), None) + if shelf_set: + return shelf_set[0] shelf_set_name = shelf_set_label.replace(' ', '_').lower() new_shelf_set = hou.shelves.newShelfSet( From 7bd1d6c6b0362d660babfd3c29d23aea61660e7d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 14:27:43 +0200 Subject: [PATCH 1355/2550] Fix typo --- openpype/hosts/houdini/api/shelves.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index eacd0a267f..a1bcac3b30 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -122,7 +122,7 @@ def get_or_create_shelf_set(shelf_set_label): shelf_set = next((shelf for shelf in all_shelves_sets if shelf.label() == shelf_set_label), None) if shelf_set: - return shelf_set[0] + return shelf_set shelf_set_name = shelf_set_label.replace(' ', '_').lower() new_shelf_set = hou.shelves.newShelfSet( From 4f5769550455c3545490a007b51948876885a3d9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 14:28:47 +0200 Subject: [PATCH 1356/2550] Use `next` to return on first match --- openpype/hosts/houdini/api/shelves.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index a1bcac3b30..254e2278c2 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -144,10 +144,9 @@ def get_or_create_shelf(shelf_label): """ all_shelves = hou.shelves.shelves().values() - shelf = [s for s in all_shelves if s.label() == shelf_label] - + shelf = next((s for s in all_shelves if s.label() == shelf_label), None) if shelf: - return shelf[0] + return shelf shelf_name = shelf_label.replace(' ', '_').lower() new_shelf = hou.shelves.newShelf( From d6a0f641920dad649701234e434abc21a7e88495 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 14:29:43 +0200 Subject: [PATCH 1357/2550] Use `next` to return on first match --- openpype/hosts/houdini/api/shelves.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 254e2278c2..5ece24fb56 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -170,15 +170,15 @@ def get_or_create_tool(tool_definition, shelf): existing_tools = shelf.tools() tool_label = tool_definition.get('label') - existing_tool = [ - tool for tool in existing_tools if tool.label() == tool_label - ] - + existing_tool = next( + (tool for tool in existing_tools if tool.label() == tool_label), + None + ) if existing_tool: tool_definition.pop('name', None) tool_definition.pop('label', None) - existing_tool[0].setData(**tool_definition) - return existing_tool[0] + existing_tool.setData(**tool_definition) + return existing_tool tool_name = tool_label.replace(' ', '_').lower() From 5c0b5b148b1cad1a4525eac8bb697ebec4057ee9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Hector?= Date: Mon, 26 Sep 2022 15:06:44 +0200 Subject: [PATCH 1358/2550] make resolution int var Co-authored-by: Roy Nieterau --- openpype/modules/kitsu/utils/update_op_with_zou.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 94b26c2019..10e80b3c89 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -276,8 +276,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: match_res = re.match(r"(\d+)x(\d+)", project["resolution"]) if match_res: - project_data['resolutionWidth'] = match_res.group(1) - project_data['resolutionHeight'] = match_res.group(2) + project_data['resolutionWidth'] = int(match_res.group(1)) + project_data['resolutionHeight'] = int(match_res.group(2)) else: log.warning(f"\'{project['resolution']}\' does not match the expected" " format for the resolution, for example: 1920x1080") From 4e8ae52a275af9b61f7ebcf7becf500e5cfa208f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 15:16:00 +0200 Subject: [PATCH 1359/2550] Do no raise error but log error if filepath does not exist --- openpype/hosts/houdini/api/shelves.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 5ece24fb56..b118b5e36d 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -34,14 +34,12 @@ def generate_shelves(): for shelf_set_config in shelves_set_config: shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') - - if shelf_set_filepath[current_os]: - if not os.path.isfile(shelf_set_filepath[current_os]): - raise FileNotFoundError( - "This path doesn't exist - {}".format( - shelf_set_filepath[current_os] - ) - ) + shelf_set_os_filepath = shelf_set_filepath[current_os] + if shelf_set_os_filepath: + if not os.path.isfile(shelf_set_os_filepath): + log.error("Shelf path doesn't exist - " + "{}".format(shelf_set_os_filepath)) + continue hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) continue From a93a09b47a0d091b9d51ac5e3113495d76970a88 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 15:17:37 +0200 Subject: [PATCH 1360/2550] Re-use variable --- openpype/hosts/houdini/api/shelves.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index b118b5e36d..1482af4301 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -41,7 +41,7 @@ def generate_shelves(): "{}".format(shelf_set_os_filepath)) continue - hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) + hou.shelves.newShelfSet(file_path=shelf_set_os_filepath) continue shelf_set_name = shelf_set_config.get('shelf_set_name') From a27a996878bb7939bbe39ce4eb837b9d4a10d0e4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 15:19:35 +0200 Subject: [PATCH 1361/2550] Remove FileNotFound error definitions --- openpype/hosts/houdini/api/shelves.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 1482af4301..b9f36bd1d3 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,7 +1,6 @@ import os import logging import platform -import six from openpype.settings import get_project_settings @@ -9,16 +8,10 @@ import hou log = logging.getLogger("openpype.hosts.houdini.shelves") -if six.PY2: - FileNotFoundError = IOError - def generate_shelves(): """This function generates complete shelves from shelf set to tools in Houdini from openpype project settings houdini shelf definition. - - Raises: - FileNotFoundError: Raised when the shelf set filepath does not exist """ current_os = platform.system().lower() From 00785b89cfe18b7807d78a3de06825e29caf23a3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 15:20:46 +0200 Subject: [PATCH 1362/2550] Tweak cosmetics --- openpype/hosts/houdini/api/shelves.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index b9f36bd1d3..f395bd8ef6 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -80,8 +80,8 @@ def generate_shelves(): tool_definition[key] for key in mandatory_attributes ): log.warning( - "You need to specify at least the name and \ -the script path of the tool.") + "You need to specify at least the name and the " + "script path of the tool.") continue tool = get_or_create_tool(tool_definition, shelf) From 64929258c87c41415220384465bacae7eec3c1dc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 15:22:27 +0200 Subject: [PATCH 1363/2550] Cosmetics --- openpype/hosts/houdini/api/shelves.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index f395bd8ef6..3ccab964cd 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -20,9 +20,7 @@ def generate_shelves(): shelves_set_config = project_settings["houdini"]["shelves"] if not shelves_set_config: - log.debug( - "No custom shelves found in project settings." - ) + log.debug("No custom shelves found in project settings.") return for shelf_set_config in shelves_set_config: @@ -39,9 +37,7 @@ def generate_shelves(): shelf_set_name = shelf_set_config.get('shelf_set_name') if not shelf_set_name: - log.warning( - "No name found in shelf set definition." - ) + log.warning("No name found in shelf set definition.") continue shelves_definition = shelf_set_config.get('shelf_definition') @@ -57,9 +53,7 @@ def generate_shelves(): for shelf_definition in shelves_definition: shelf_name = shelf_definition.get('shelf_name') if not shelf_name: - log.warning( - "No name found in shelf definition." - ) + log.warning("No name found in shelf definition.") continue shelf = get_or_create_shelf(shelf_name) @@ -175,9 +169,7 @@ def get_or_create_tool(tool_definition, shelf): if not os.path.exists(tool_definition['script']): log.warning( - "This path doesn't exist - {}".format( - tool_definition['script'] - ) + "This path doesn't exist - {}".format(tool_definition['script']) ) return From 6815eb4e80608804262e713f9ff53d74d5937b0e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 15:28:12 +0200 Subject: [PATCH 1364/2550] Fix variable name `sat` -> `sat_str` `sat` is actually undefined in the else statement --- openpype/style/color_defs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/style/color_defs.py b/openpype/style/color_defs.py index 0f4e145ca0..bd3ccb3ccf 100644 --- a/openpype/style/color_defs.py +++ b/openpype/style/color_defs.py @@ -296,7 +296,7 @@ class HSLColor: if "%" in sat_str: sat = float(sat_str.rstrip("%")) / 100 else: - sat = float(sat) + sat = float(sat_str) if "%" in light_str: light = float(light_str.rstrip("%")) / 100 @@ -350,7 +350,7 @@ class HSLAColor: if "%" in sat_str: sat = float(sat_str.rstrip("%")) / 100 else: - sat = float(sat) + sat = float(sat_str) if "%" in light_str: light = float(light_str.rstrip("%")) / 100 From 3af46fb9277bcbb9e4dc7a1502519921c62a0f47 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 26 Sep 2022 15:30:54 +0200 Subject: [PATCH 1365/2550] Fix example --- openpype/style/color_defs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/style/color_defs.py b/openpype/style/color_defs.py index bd3ccb3ccf..f1eab38c24 100644 --- a/openpype/style/color_defs.py +++ b/openpype/style/color_defs.py @@ -337,8 +337,8 @@ class HSLAColor: as float (0-1 range). Examples: - "hsl(27, 0.7, 0.3)" - "hsl(27, 70%, 30%)" + "hsla(27, 0.7, 0.3, 0.5)" + "hsla(27, 70%, 30%, 0.5)" """ def __init__(self, value): modified_color = value.lower().strip() From 9c229fa21381f57f018aaac1fecd0beef857004a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 27 Sep 2022 15:34:10 +0200 Subject: [PATCH 1366/2550] Remove "saveWindowPref" property --- openpype/tools/sceneinventory/window.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index 578f47d1c0..8bac1beb30 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -40,8 +40,6 @@ class SceneInventoryWindow(QtWidgets.QDialog): project_name = os.getenv("AVALON_PROJECT") or "" self.setWindowTitle("Scene Inventory 1.0 - {}".format(project_name)) self.setObjectName("SceneInventory") - # Maya only property - self.setProperty("saveWindowPref", True) self.resize(1100, 480) From 92791eb6b6f4bb58655ccedc8c173bdaf34db8f5 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 28 Sep 2022 04:16:10 +0000 Subject: [PATCH 1367/2550] [Automated] Bump version --- CHANGELOG.md | 18 +++++++----------- openpype/version.py | 2 +- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 24e02acc6f..8af555adf2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,12 @@ # Changelog -## [3.14.3-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.3-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...HEAD) **🚀 Enhancements** +- Publisher: Enhancement proposals [\#3897](https://github.com/pypeclub/OpenPype/pull/3897) - Maya: better logging in Maketx [\#3886](https://github.com/pypeclub/OpenPype/pull/3886) - Photoshop: review can be turned off [\#3885](https://github.com/pypeclub/OpenPype/pull/3885) - TrayPublisher: added persisting of last selected project [\#3871](https://github.com/pypeclub/OpenPype/pull/3871) @@ -17,9 +18,8 @@ - General: Transcoding handle float2 attr type [\#3849](https://github.com/pypeclub/OpenPype/pull/3849) - General: Simple script for getting license information about used packages [\#3843](https://github.com/pypeclub/OpenPype/pull/3843) - Houdini: Increment current file on workfile publish [\#3840](https://github.com/pypeclub/OpenPype/pull/3840) -- Publisher: Add new publisher to host tools [\#3833](https://github.com/pypeclub/OpenPype/pull/3833) +- General: Workfile template build enhancements [\#3838](https://github.com/pypeclub/OpenPype/pull/3838) - General: lock task workfiles when they are working on [\#3810](https://github.com/pypeclub/OpenPype/pull/3810) -- Maya: Workspace mel loaded from settings [\#3790](https://github.com/pypeclub/OpenPype/pull/3790) **🐛 Bug fixes** @@ -33,12 +33,13 @@ - Tray Publisher: skip plugin if otioTimeline is missing [\#3856](https://github.com/pypeclub/OpenPype/pull/3856) - Flame: retimed attributes are integrated with settings [\#3855](https://github.com/pypeclub/OpenPype/pull/3855) - Maya: Extract Playblast fix textures + labelize viewport show settings [\#3852](https://github.com/pypeclub/OpenPype/pull/3852) -- Ftrack: Url validation does not require ftrackapp [\#3834](https://github.com/pypeclub/OpenPype/pull/3834) -- Maya+Ftrack: Change typo in family name `mayaascii` -\> `mayaAscii` [\#3820](https://github.com/pypeclub/OpenPype/pull/3820) - Maya Deadline: Fix Tile Rendering by forcing integer pixel values [\#3758](https://github.com/pypeclub/OpenPype/pull/3758) **🔀 Refactored code** +- Resolve: Use new Extractor location [\#3918](https://github.com/pypeclub/OpenPype/pull/3918) +- Unreal: Use new Extractor location [\#3917](https://github.com/pypeclub/OpenPype/pull/3917) +- Flame: Use new Extractor location [\#3916](https://github.com/pypeclub/OpenPype/pull/3916) - Houdini: Use new Extractor location [\#3894](https://github.com/pypeclub/OpenPype/pull/3894) - Harmony: Use new Extractor location [\#3893](https://github.com/pypeclub/OpenPype/pull/3893) - Hiero: Use new Extractor location [\#3851](https://github.com/pypeclub/OpenPype/pull/3851) @@ -48,6 +49,7 @@ **Merged pull requests:** +- Maya: Fix Scene Inventory possibly starting off-screen due to maya preferences [\#3923](https://github.com/pypeclub/OpenPype/pull/3923) - Maya: RenderSettings set default image format for V-Ray+Redshift to exr [\#3879](https://github.com/pypeclub/OpenPype/pull/3879) - Remove lockfile during publish [\#3874](https://github.com/pypeclub/OpenPype/pull/3874) @@ -92,8 +94,6 @@ - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) - General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) -- General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) **Merged pull requests:** @@ -111,14 +111,10 @@ **🐛 Bug fixes** - Maya: Fix typo in getPanel argument `with\_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753) -- General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) -- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) -- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) -- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) diff --git a/openpype/version.py b/openpype/version.py index fd6e894fe2..18ff49ffbf 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.3-nightly.4" +__version__ = "3.14.3-nightly.5" From affd54bd1ae6603fb0e59fffc3c39c2b08c85e41 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:45:47 +0800 Subject: [PATCH 1368/2550] write color sets --- openpype/hosts/maya/plugins/create/create_rig.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_rig.py b/openpype/hosts/maya/plugins/create/create_rig.py index 3b0ee1e22a..8032e5fbbd 100644 --- a/openpype/hosts/maya/plugins/create/create_rig.py +++ b/openpype/hosts/maya/plugins/create/create_rig.py @@ -13,18 +13,12 @@ class CreateRig(plugin.Creator): label = "Rig" family = "rig" icon = "wheelchair" - write_color_sets = False - write_face_sets = False - - def __init__(self, *args, **kwargs): - super(CreateRig, self).__init__(*args, **kwargs) - self.data["writeColorSets"] = self.write_color_sets - self.data["writeFaceSets"] = self.write_face_sets def process(self): with lib.undo_chunk(): instance = super(CreateRig, self).process() + self.log.info("Creating Rig instance set up ...") controls = cmds.sets(name="controls_SET", empty=True) pointcache = cmds.sets(name="out_SET", empty=True) From 1423b8ba69869dbaa774e966f706b70eab7066dd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 13:27:45 +0200 Subject: [PATCH 1369/2550] removed unused 'openpype.api' imports in maya validators --- .../hosts/maya/plugins/publish/validate_animation_content.py | 1 - .../publish/validate_animation_out_set_related_node_ids.py | 1 - .../hosts/maya/plugins/publish/validate_assembly_namespaces.py | 1 - .../hosts/maya/plugins/publish/validate_assembly_transforms.py | 1 - .../hosts/maya/plugins/publish/validate_camera_attributes.py | 1 - .../hosts/maya/plugins/publish/validate_camera_contents.py | 1 - openpype/hosts/maya/plugins/publish/validate_color_sets.py | 1 - openpype/hosts/maya/plugins/publish/validate_cycle_error.py | 1 - .../maya/plugins/publish/validate_instance_has_members.py | 1 - openpype/hosts/maya/plugins/publish/validate_look_contents.py | 1 - .../maya/plugins/publish/validate_look_id_reference_edits.py | 1 - .../hosts/maya/plugins/publish/validate_look_members_unique.py | 1 - .../maya/plugins/publish/validate_look_no_default_shaders.py | 1 - openpype/hosts/maya/plugins/publish/validate_look_sets.py | 1 - .../hosts/maya/plugins/publish/validate_look_shading_group.py | 1 - .../hosts/maya/plugins/publish/validate_look_single_shader.py | 1 - .../maya/plugins/publish/validate_mesh_arnold_attributes.py | 1 - openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py | 1 - .../hosts/maya/plugins/publish/validate_mesh_lamina_faces.py | 1 - openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py | 1 - .../maya/plugins/publish/validate_mesh_no_negative_scale.py | 1 - .../hosts/maya/plugins/publish/validate_mesh_non_manifold.py | 1 - .../hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py | 1 - .../maya/plugins/publish/validate_mesh_normals_unlocked.py | 1 - .../maya/plugins/publish/validate_mesh_overlapping_uvs.py | 1 - .../maya/plugins/publish/validate_mesh_shader_connections.py | 1 - .../hosts/maya/plugins/publish/validate_mesh_single_uv_set.py | 1 - .../hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py | 1 - .../maya/plugins/publish/validate_mesh_vertices_have_edges.py | 1 - openpype/hosts/maya/plugins/publish/validate_model_content.py | 1 - openpype/hosts/maya/plugins/publish/validate_model_name.py | 1 - .../hosts/maya/plugins/publish/validate_mvlook_contents.py | 1 - openpype/hosts/maya/plugins/publish/validate_no_animation.py | 1 - .../hosts/maya/plugins/publish/validate_no_default_camera.py | 1 - openpype/hosts/maya/plugins/publish/validate_no_namespace.py | 1 - .../hosts/maya/plugins/publish/validate_no_null_transforms.py | 1 - .../hosts/maya/plugins/publish/validate_no_unknown_nodes.py | 1 - openpype/hosts/maya/plugins/publish/validate_node_ids.py | 2 +- .../maya/plugins/publish/validate_node_ids_deformed_shapes.py | 1 - .../maya/plugins/publish/validate_node_ids_in_database.py | 1 - .../hosts/maya/plugins/publish/validate_node_ids_related.py | 1 - .../hosts/maya/plugins/publish/validate_node_ids_unique.py | 1 - .../hosts/maya/plugins/publish/validate_node_no_ghosting.py | 2 +- .../maya/plugins/publish/validate_render_no_default_cameras.py | 2 +- .../maya/plugins/publish/validate_render_single_camera.py | 1 - .../publish/validate_rig_controllers_arnold_attributes.py | 1 - .../hosts/maya/plugins/publish/validate_rig_joints_hidden.py | 2 +- .../maya/plugins/publish/validate_rig_out_set_node_ids.py | 2 +- openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py | 1 - openpype/hosts/maya/plugins/publish/validate_shader_name.py | 2 +- .../hosts/maya/plugins/publish/validate_shape_default_names.py | 2 +- .../hosts/maya/plugins/publish/validate_shape_render_stats.py | 1 - openpype/hosts/maya/plugins/publish/validate_shape_zero.py | 2 +- .../maya/plugins/publish/validate_skinCluster_deformer_set.py | 2 +- openpype/hosts/maya/plugins/publish/validate_step_size.py | 2 +- .../maya/plugins/publish/validate_transform_naming_suffix.py | 2 +- openpype/hosts/maya/plugins/publish/validate_transform_zero.py | 2 +- .../maya/plugins/publish/validate_unreal_mesh_triangulated.py | 3 ++- .../maya/plugins/publish/validate_unreal_staticmesh_naming.py | 2 +- openpype/hosts/maya/plugins/publish/validate_visible_only.py | 1 - .../hosts/maya/plugins/publish/validate_vrayproxy_members.py | 1 - .../plugins/publish/validate_yeti_rig_input_in_instance.py | 2 +- 62 files changed, 16 insertions(+), 62 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_content.py b/openpype/hosts/maya/plugins/publish/validate_animation_content.py index 6f7a6b905a..9dbb09a046 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_content.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_content.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index aa27633402..649913fff6 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -1,7 +1,6 @@ import maya.cmds as cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py b/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py index a9ea5a6d15..229da63c42 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_namespaces.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py index fb25b617be..3f2c59b95b 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api from maya import cmds diff --git a/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py b/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py index 19c1179e52..bd1529e252 100644 --- a/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_camera_attributes.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py index f846319807..1ce8026fc2 100644 --- a/openpype/hosts/maya/plugins/publish/validate_camera_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_camera_contents.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_color_sets.py b/openpype/hosts/maya/plugins/publish/validate_color_sets.py index cab9d6ebab..905417bafa 100644 --- a/openpype/hosts/maya/plugins/publish/validate_color_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_color_sets.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_cycle_error.py b/openpype/hosts/maya/plugins/publish/validate_cycle_error.py index d3b8316d94..210ee4127c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_cycle_error.py +++ b/openpype/hosts/maya/plugins/publish/validate_cycle_error.py @@ -2,7 +2,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import maintained_selection from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index bf92ac5099..4870f27bff 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_look_contents.py b/openpype/hosts/maya/plugins/publish/validate_look_contents.py index d9819b05d5..53501d11e5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_contents.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py index f223c1a42b..a266a0fd74 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_id_reference_edits.py @@ -2,7 +2,6 @@ from collections import defaultdict from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py b/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py index 210fcb174d..f81e511ff3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py @@ -1,7 +1,6 @@ from collections import defaultdict import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidatePipelineOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py b/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py index 95f8fa20d0..db6aadae8d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_no_default_shaders.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_look_sets.py b/openpype/hosts/maya/plugins/publish/validate_look_sets.py index 3a60b771f4..8434ddde04 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_sets.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py index 7d043eddb8..9b57b06ee7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py b/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py index 51e1232bb7..788e440d12 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_single_shader.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py index abfe1213a0..c1c0636b9e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_arnold_attributes.py @@ -1,7 +1,6 @@ import pymel.core as pc from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.lib import maintained_selection from openpype.pipeline.publish import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py index 4d2885d6e2..36a0da7a59 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py @@ -3,7 +3,6 @@ import re from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateMeshOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py b/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py index e7a73c21b0..4427c6eece 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateMeshOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py b/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py index 24d6188ec8..5b67db3307 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py index 18ceccaa28..664e2b5772 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateMeshOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py b/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py index e75a132d50..d7711da722 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_non_manifold.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateMeshOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py index 8c03b54971..0ef2716559 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ValidateMeshOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py index 7d88161058..c8892a8e59 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -2,7 +2,6 @@ from maya import cmds import maya.api.OpenMaya as om2 import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py index dde3e4fead..be7324a68f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api import openpype.hosts.maya.api.action import math import maya.api.OpenMaya as om diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py index 9621fd5aa8..2a0abe975c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_shader_connections.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py index 3fb09356d3..6ca8c06ba5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index 2711682f76..40ddb916ca 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py index 350a5f4789..1e6d290ae7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -3,7 +3,6 @@ import re from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_model_content.py b/openpype/hosts/maya/plugins/publish/validate_model_content.py index 0557858639..723346a285 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_content.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_content.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 99a4b2654e..2dec9ba267 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -5,7 +5,6 @@ import re from maya import cmds import pyblish.api -import openpype.api from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.maya.api.action diff --git a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py index 62f360cd86..67fc1616c2 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py @@ -1,6 +1,5 @@ import os import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_no_animation.py b/openpype/hosts/maya/plugins/publish/validate_no_animation.py index 177de1468d..2e7cafe4ab 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_animation.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_animation.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py b/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py index d4ddb28070..1a5773e6a7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_default_camera.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py index 95caa1007f..01c77e5b2e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_namespace.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_namespace.py @@ -2,7 +2,6 @@ import pymel.core as pm import maya.cmds as cmds import pyblish.api -import openpype.api from openpype.pipeline.publish import ( RepairAction, ValidateContentsOrder, diff --git a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py index f31fd09c95..b430c2b63c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_null_transforms.py @@ -1,7 +1,6 @@ import maya.cmds as cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py b/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py index 20fe34f2fd..2cfdc28128 100644 --- a/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py +++ b/openpype/hosts/maya/plugins/publish/validate_no_unknown_nodes.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_node_ids.py index 877ba0e781..796f4c8d76 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api + from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py index 1fe4a34e07..68c47f3a96 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py index a5b1215f30..b2f28fd4e5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -1,6 +1,5 @@ import pyblish.api -import openpype.api from openpype.client import get_assets from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidatePipelineOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py index a7595d7392..f901dc58c4 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py index 5ff18358e2..f7a5e6e292 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_unique.py @@ -1,7 +1,6 @@ from collections import defaultdict import pyblish.api -import openpype.api from openpype.pipeline.publish import ValidatePipelineOrder import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib diff --git a/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py b/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py index 2f22d6da1e..0f608dab2c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_no_ghosting.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py b/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py index da35f42291..67ece75af8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_no_default_cameras.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index fc41b1cf5b..f7ce8873f9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -3,7 +3,6 @@ import re import pyblish.api from maya import cmds -import openpype.api import openpype.hosts.maya.api.action from openpype.hosts.maya.api.render_settings import RenderSettings from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py index 3d486cf7a4..55b2ebd6d8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api from openpype.pipeline.publish import ( ValidateContentsOrder, diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py index 86967d7502..d5bf7fd1cf 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_joints_hidden.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index 70128ac493..03ba381f8d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -1,7 +1,7 @@ import maya.cmds as cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py index f075f42ff2..f3ed1a36ef 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_output_ids.py @@ -2,7 +2,6 @@ import pymel.core as pc import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( RepairAction, diff --git a/openpype/hosts/maya/plugins/publish/validate_shader_name.py b/openpype/hosts/maya/plugins/publish/validate_shader_name.py index 522b42fd00..b3e51f011d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shader_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_shader_name.py @@ -2,7 +2,7 @@ import re from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py index 25bd3442a3..651c6bcec9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_default_names.py @@ -3,7 +3,7 @@ import re from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ( ValidateContentsOrder, diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py index 0980d6b4b6..f58c0aaf81 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_render_stats.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api from maya import cmds diff --git a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py index 9e30735d40..7a7e9a0aee 100644 --- a/openpype/hosts/maya/plugins/publish/validate_shape_zero.py +++ b/openpype/hosts/maya/plugins/publish/validate_shape_zero.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib from openpype.pipeline.publish import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py b/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py index 86ff914cb0..b45d2b120a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py +++ b/openpype/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_step_size.py b/openpype/hosts/maya/plugins/publish/validate_step_size.py index 552a936966..294458f63c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_step_size.py +++ b/openpype/hosts/maya/plugins/publish/validate_step_size.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py index 64faf9ecb6..4615e2ec07 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py @@ -3,7 +3,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_zero.py b/openpype/hosts/maya/plugins/publish/validate_transform_zero.py index 9e232f6023..da569195e8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_zero.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py index 1ed3e5531c..4211e76a73 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py @@ -2,8 +2,9 @@ from maya import cmds import pyblish.api -import openpype.api + from openpype.pipeline.publish import ValidateMeshOrder +import openpype.hosts.maya.api.action class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index a4bb54f5af..1425190b82 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -3,7 +3,7 @@ import re import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline import legacy_io from openpype.settings import get_project_settings diff --git a/openpype/hosts/maya/plugins/publish/validate_visible_only.py b/openpype/hosts/maya/plugins/publish/validate_visible_only.py index f326b91796..faf634f258 100644 --- a/openpype/hosts/maya/plugins/publish/validate_visible_only.py +++ b/openpype/hosts/maya/plugins/publish/validate_visible_only.py @@ -1,6 +1,5 @@ import pyblish.api -import openpype.api from openpype.hosts.maya.api.lib import iter_visible_nodes_in_range import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py b/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py index b94e5cbbed..855a96e6b9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_vrayproxy_members.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api from maya import cmds diff --git a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py index 0fe89634f5..ebef44774d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py +++ b/openpype/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py @@ -1,7 +1,7 @@ from maya import cmds import pyblish.api -import openpype.api + import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder From 79e6de15b56ac9c3b2c60a1278b09958df1d67e3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 14:49:52 +0200 Subject: [PATCH 1370/2550] import Logger from 'openpype.lib' instead of 'openpype.api' --- openpype/hosts/aftereffects/api/launch_logic.py | 3 +-- openpype/hosts/aftereffects/api/pipeline.py | 10 ++++------ openpype/hosts/blender/api/lib.py | 2 +- openpype/hosts/blender/api/pipeline.py | 2 +- openpype/hosts/celaction/api/cli.py | 3 +-- openpype/hosts/flame/api/lib.py | 9 +++++---- openpype/hosts/flame/api/pipeline.py | 2 +- openpype/hosts/flame/api/plugin.py | 3 ++- openpype/hosts/flame/api/render_utils.py | 2 +- openpype/hosts/flame/api/utils.py | 2 +- openpype/hosts/hiero/api/menu.py | 2 +- openpype/hosts/hiero/api/tags.py | 2 +- openpype/hosts/hiero/api/workio.py | 2 +- openpype/hosts/nuke/api/gizmo_menu.py | 2 +- openpype/hosts/nuke/api/pipeline.py | 3 +-- .../hosts/nuke/plugins/inventory/repair_old_loaders.py | 2 +- openpype/hosts/nuke/startup/menu.py | 2 +- openpype/hosts/photoshop/api/launch_logic.py | 2 +- openpype/hosts/photoshop/api/pipeline.py | 3 +-- openpype/hosts/resolve/api/workio.py | 2 +- .../plugins/create/create_from_settings.py | 3 ++- openpype/modules/ftrack/scripts/sub_event_status.py | 2 +- openpype/modules/ftrack/scripts/sub_event_storer.py | 2 +- openpype/modules/log_viewer/log_view_module.py | 1 - openpype/modules/sync_server/providers/local_drive.py | 2 +- openpype/pipeline/create/creator_plugins.py | 5 ++--- openpype/pipeline/plugin_discover.py | 2 +- openpype/tools/launcher/actions.py | 3 ++- openpype/tools/settings/local_settings/window.py | 3 +-- .../standalonepublish/widgets/widget_components.py | 3 ++- openpype/tools/stdout_broker/app.py | 2 +- openpype/tools/utils/host_tools.py | 4 ++-- openpype/tools/utils/lib.py | 7 ++----- 33 files changed, 46 insertions(+), 53 deletions(-) diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py index 30a3e1f1c3..9c8513fe8c 100644 --- a/openpype/hosts/aftereffects/api/launch_logic.py +++ b/openpype/hosts/aftereffects/api/launch_logic.py @@ -12,6 +12,7 @@ from wsrpc_aiohttp import ( from Qt import QtCore +from openpype.lib import Logger from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.tools.adobe_webserver.app import WebServerTool @@ -84,8 +85,6 @@ class ProcessLauncher(QtCore.QObject): @property def log(self): if self._log is None: - from openpype.api import Logger - self._log = Logger.get_logger("{}-launcher".format( self.route_name)) return self._log diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index c13c22ced5..7026fe3f05 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -4,8 +4,7 @@ from Qt import QtWidgets import pyblish.api -from openpype import lib -from openpype.api import Logger +from openpype.lib import Logger, register_event_callback from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -16,9 +15,8 @@ from openpype.pipeline import ( ) from openpype.pipeline.load import any_outdated_containers import openpype.hosts.aftereffects -from openpype.lib import register_event_callback -from .launch_logic import get_stub +from .launch_logic import get_stub, ConnectionNotEstablishedYet log = Logger.get_logger(__name__) @@ -111,7 +109,7 @@ def ls(): """ try: stub = get_stub() # only after AfterEffects is up - except lib.ConnectionNotEstablishedYet: + except ConnectionNotEstablishedYet: print("Not connected yet, ignoring") return @@ -284,7 +282,7 @@ def _get_stub(): """ try: stub = get_stub() # only after Photoshop is up - except lib.ConnectionNotEstablishedYet: + except ConnectionNotEstablishedYet: print("Not connected yet, ignoring") return diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py index 9cd1ace821..05912885f7 100644 --- a/openpype/hosts/blender/api/lib.py +++ b/openpype/hosts/blender/api/lib.py @@ -6,7 +6,7 @@ from typing import Dict, List, Union import bpy import addon_utils -from openpype.api import Logger +from openpype.lib import Logger from . import pipeline diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index ea405b028e..c2aee1e653 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -20,8 +20,8 @@ from openpype.pipeline import ( deregister_creator_plugin_path, AVALON_CONTAINER_ID, ) -from openpype.api import Logger from openpype.lib import ( + Logger, register_event_callback, emit_event ) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index eb91def090..88fc11cafb 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -6,9 +6,8 @@ import argparse import pyblish.api import pyblish.util -from openpype.api import Logger -import openpype import openpype.hosts.celaction +from openpype.lib import Logger from openpype.hosts.celaction import api as celaction from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index b7f7b24e51..6aca5c5ce6 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -12,6 +12,9 @@ import xml.etree.cElementTree as cET from copy import deepcopy, copy from xml.etree import ElementTree as ET from pprint import pformat + +from openpype.lib import Logger, run_subprocess + from .constants import ( MARKER_COLOR, MARKER_DURATION, @@ -20,9 +23,7 @@ from .constants import ( MARKER_PUBLISH_DEFAULT ) -import openpype.api as openpype - -log = openpype.Logger.get_logger(__name__) +log = Logger.get_logger(__name__) FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]") @@ -1016,7 +1017,7 @@ class MediaInfoFile(object): try: # execute creation of clip xml template data - openpype.run_subprocess(cmd_args) + run_subprocess(cmd_args) except TypeError as error: raise TypeError( "Error creating `{}` due: {}".format(fpath, error)) diff --git a/openpype/hosts/flame/api/pipeline.py b/openpype/hosts/flame/api/pipeline.py index 324d13bc3f..3a23389961 100644 --- a/openpype/hosts/flame/api/pipeline.py +++ b/openpype/hosts/flame/api/pipeline.py @@ -5,7 +5,7 @@ import os import contextlib from pyblish import api as pyblish -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 1a26e96c79..4bbdc79621 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -9,13 +9,14 @@ from Qt import QtCore, QtWidgets import openpype.api as openpype import qargparse from openpype import style +from openpype.lib import Logger from openpype.pipeline import LegacyCreator, LoaderPlugin from . import constants from . import lib as flib from . import pipeline as fpipeline -log = openpype.Logger.get_logger(__name__) +log = Logger.get_logger(__name__) class CreatorWidget(QtWidgets.QDialog): diff --git a/openpype/hosts/flame/api/render_utils.py b/openpype/hosts/flame/api/render_utils.py index a29d6be695..7e50c2b23e 100644 --- a/openpype/hosts/flame/api/render_utils.py +++ b/openpype/hosts/flame/api/render_utils.py @@ -1,6 +1,6 @@ import os from xml.etree import ElementTree as ET -from openpype.api import Logger +from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/flame/api/utils.py b/openpype/hosts/flame/api/utils.py index 2dfdfa8f48..fb8bdee42d 100644 --- a/openpype/hosts/flame/api/utils.py +++ b/openpype/hosts/flame/api/utils.py @@ -4,7 +4,7 @@ Flame utils for syncing scripts import os import shutil -from openpype.api import Logger +from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py index 541a1f1f92..2a7560c6ba 100644 --- a/openpype/hosts/hiero/api/menu.py +++ b/openpype/hosts/hiero/api/menu.py @@ -4,7 +4,7 @@ import sys import hiero.core from hiero.ui import findMenuAction -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index 10df96fa53..fac26da03a 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -3,7 +3,7 @@ import os import hiero from openpype.client import get_project, get_assets -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import legacy_io log = Logger.get_logger(__name__) diff --git a/openpype/hosts/hiero/api/workio.py b/openpype/hosts/hiero/api/workio.py index 762e22804f..040fd1435a 100644 --- a/openpype/hosts/hiero/api/workio.py +++ b/openpype/hosts/hiero/api/workio.py @@ -1,7 +1,7 @@ import os import hiero -from openpype.api import Logger +from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/nuke/api/gizmo_menu.py b/openpype/hosts/nuke/api/gizmo_menu.py index 0f1a3e03fc..9edfc62e3b 100644 --- a/openpype/hosts/nuke/api/gizmo_menu.py +++ b/openpype/hosts/nuke/api/gizmo_menu.py @@ -2,7 +2,7 @@ import os import re import nuke -from openpype.api import Logger +from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c6ccfaeb3a..b347fc0d09 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -8,10 +8,9 @@ import pyblish.api import openpype from openpype.api import ( - Logger, get_current_project_settings ) -from openpype.lib import register_event_callback +from openpype.lib import register_event_callback, Logger from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, diff --git a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py index c04c939a8d..764499ff0c 100644 --- a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py +++ b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py @@ -1,4 +1,4 @@ -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import InventoryAction from openpype.hosts.nuke.api.lib import set_avalon_knob_data diff --git a/openpype/hosts/nuke/startup/menu.py b/openpype/hosts/nuke/startup/menu.py index 1461d41385..5e29121e9b 100644 --- a/openpype/hosts/nuke/startup/menu.py +++ b/openpype/hosts/nuke/startup/menu.py @@ -1,7 +1,7 @@ import nuke import os -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import install_host from openpype.hosts.nuke import api from openpype.hosts.nuke.api.lib import ( diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index 0bbb19523d..1f0203dca6 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -10,7 +10,7 @@ from wsrpc_aiohttp import ( from Qt import QtCore -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.tools.adobe_webserver.app import WebServerTool diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index f660096630..9f6fc0983c 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -3,8 +3,7 @@ from Qt import QtWidgets import pyblish.api -from openpype.api import Logger -from openpype.lib import register_event_callback +from openpype.lib import register_event_callback, Logger from openpype.pipeline import ( legacy_io, register_loader_plugin_path, diff --git a/openpype/hosts/resolve/api/workio.py b/openpype/hosts/resolve/api/workio.py index 5a742ecf7e..5ce73eea53 100644 --- a/openpype/hosts/resolve/api/workio.py +++ b/openpype/hosts/resolve/api/workio.py @@ -1,7 +1,7 @@ """Host API required Work Files tool""" import os -from openpype.api import Logger +from openpype.lib import Logger from .lib import ( get_project_manager, get_current_project, diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py index 41c1c29bb0..5d80c20309 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -1,5 +1,6 @@ import os -from openpype.api import get_project_settings, Logger +from openpype.lib import Logger +from openpype.api import get_project_settings log = Logger.get_logger(__name__) diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index 3163642e3f..6c7ecb8351 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -15,8 +15,8 @@ from openpype_modules.ftrack.ftrack_server.lib import ( TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from openpype.api import Logger from openpype.lib import ( + Logger, is_current_version_studio_latest, is_running_from_build, get_expected_version, diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/ftrack/scripts/sub_event_storer.py index 204cce89e8..a7e77951af 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/ftrack/scripts/sub_event_storer.py @@ -17,10 +17,10 @@ from openpype_modules.ftrack.ftrack_server.lib import ( ) from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import ( + Logger, get_openpype_version, get_build_version ) -from openpype.api import Logger log = Logger.get_logger("Event storer") subprocess_started = datetime.datetime.now() diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index 14be6b392e..da1628b71f 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -1,4 +1,3 @@ -from openpype.api import Logger from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 01bc891d08..8f55dc529b 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -4,7 +4,7 @@ import shutil import threading import time -from openpype.api import Logger +from openpype.lib import Logger from openpype.pipeline import Anatomy from .abstract_provider import AbstractProvider diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 5b0532c60a..945a97a99c 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -9,7 +9,7 @@ from abc import ( import six from openpype.settings import get_system_settings, get_project_settings -from .subset_name import get_subset_name +from openpype.lib import Logger from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -18,6 +18,7 @@ from openpype.pipeline.plugin_discover import ( deregister_plugin_path ) +from .subset_name import get_subset_name from .legacy_create import LegacyCreator @@ -143,8 +144,6 @@ class BaseCreator: """ if self._log is None: - from openpype.api import Logger - self._log = Logger.get_logger(self.__class__.__name__) return self._log diff --git a/openpype/pipeline/plugin_discover.py b/openpype/pipeline/plugin_discover.py index 004e530b1c..7edd9ac290 100644 --- a/openpype/pipeline/plugin_discover.py +++ b/openpype/pipeline/plugin_discover.py @@ -2,7 +2,7 @@ import os import inspect import traceback -from openpype.api import Logger +from openpype.lib import Logger from openpype.lib.python_module_tools import ( modules_from_path, classes_from_module, diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py index 546bda1c34..b954110da4 100644 --- a/openpype/tools/launcher/actions.py +++ b/openpype/tools/launcher/actions.py @@ -4,8 +4,9 @@ from Qt import QtWidgets, QtGui from openpype import PLUGINS_DIR from openpype import style -from openpype.api import Logger, resources +from openpype.api import resources from openpype.lib import ( + Logger, ApplictionExecutableNotFound, ApplicationLaunchFailed ) diff --git a/openpype/tools/settings/local_settings/window.py b/openpype/tools/settings/local_settings/window.py index 6a2db3fff5..761b978ab4 100644 --- a/openpype/tools/settings/local_settings/window.py +++ b/openpype/tools/settings/local_settings/window.py @@ -1,4 +1,3 @@ -import logging from Qt import QtWidgets, QtGui from openpype import style @@ -7,10 +6,10 @@ from openpype.settings.lib import ( get_local_settings, save_local_settings ) +from openpype.lib import Logger from openpype.tools.settings import CHILD_OFFSET from openpype.tools.utils import MessageOverlayObject from openpype.api import ( - Logger, SystemSettings, ProjectSettings ) diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py index b3280089c3..237e1da583 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components.py +++ b/openpype/tools/standalonepublish/widgets/widget_components.py @@ -6,9 +6,10 @@ import string from Qt import QtWidgets, QtCore -from openpype.api import execute, Logger from openpype.pipeline import legacy_io from openpype.lib import ( + execute, + Logger, get_openpype_execute_args, apply_project_environments_value ) diff --git a/openpype/tools/stdout_broker/app.py b/openpype/tools/stdout_broker/app.py index a42d93dab4..f8dc2111aa 100644 --- a/openpype/tools/stdout_broker/app.py +++ b/openpype/tools/stdout_broker/app.py @@ -6,8 +6,8 @@ import websocket import json from datetime import datetime +from openpype.lib import Logger from openpype_modules.webserver.host_console_listener import MsgAction -from openpype.api import Logger log = Logger.get_logger(__name__) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index d2f05d3302..552ce0d432 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -7,6 +7,7 @@ import os import pyblish.api from openpype.host import IWorkfileHost, ILoadHost +from openpype.lib import Logger from openpype.pipeline import ( registered_host, legacy_io, @@ -23,6 +24,7 @@ class HostToolsHelper: Class may also contain tools that are available only for one or few hosts. """ + def __init__(self, parent=None): self._log = None # Global parent for all tools (may and may not be set) @@ -42,8 +44,6 @@ class HostToolsHelper: @property def log(self): if self._log is None: - from openpype.api import Logger - self._log = Logger.get_logger(self.__class__.__name__) return self._log diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 97b680b77e..caf568f0c2 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -16,11 +16,8 @@ from openpype.style import ( get_objected_colors, ) from openpype.resources import get_image_path -from openpype.lib import filter_profiles -from openpype.api import ( - get_project_settings, - Logger -) +from openpype.lib import filter_profiles, Logger +from openpype.api import get_project_settings from openpype.pipeline import registered_host log = Logger.get_logger(__name__) From 073a38726e3450409a9fd2bc7e6d789583c379e3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 15:00:49 +0200 Subject: [PATCH 1371/2550] footer widget is not part of subset widget --- openpype/tools/publisher/window.py | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 2a0e6e940a..3b504655d9 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -142,6 +142,9 @@ class PublisherWindow(QtWidgets.QDialog): subset_content_layout.addWidget(subset_attributes_wrap, 7) # Footer + footer_widget = QtWidgets.QWidget(self) + footer_bottom_widget = QtWidgets.QWidget(footer_widget) + comment_input = PlaceholderLineEdit(subset_frame) comment_input.setObjectName("PublishCommentInput") comment_input.setPlaceholderText( @@ -153,13 +156,17 @@ class PublisherWindow(QtWidgets.QDialog): validate_btn = ValidateBtn(subset_frame) publish_btn = PublishBtn(subset_frame) - footer_layout = QtWidgets.QHBoxLayout() - footer_layout.setContentsMargins(0, 0, 0, 0) - footer_layout.addWidget(comment_input, 1) - footer_layout.addWidget(reset_btn, 0) - footer_layout.addWidget(stop_btn, 0) - footer_layout.addWidget(validate_btn, 0) - footer_layout.addWidget(publish_btn, 0) + footer_bottom_layout = QtWidgets.QHBoxLayout(footer_bottom_widget) + footer_bottom_layout.setContentsMargins(0, 0, 0, 0) + footer_bottom_layout.addStretch(1) + footer_bottom_layout.addWidget(reset_btn, 0) + footer_bottom_layout.addWidget(stop_btn, 0) + footer_bottom_layout.addWidget(validate_btn, 0) + footer_bottom_layout.addWidget(publish_btn, 0) + + footer_layout = QtWidgets.QVBoxLayout(footer_widget) + footer_layout.addWidget(comment_input, 0) + footer_layout.addWidget(footer_bottom_widget, 0) # Subset frame layout subset_layout = QtWidgets.QVBoxLayout(subset_frame) @@ -167,10 +174,9 @@ class PublisherWindow(QtWidgets.QDialog): marings.setLeft(marings.left() * 2) marings.setRight(marings.right() * 2) marings.setTop(marings.top() * 2) - marings.setBottom(marings.bottom() * 2) + marings.setBottom(0) subset_layout.setContentsMargins(marings) subset_layout.addWidget(subset_content_widget, 1) - subset_layout.addLayout(footer_layout, 0) # Create publish frame publish_frame = PublishFrame(controller, content_stacked_widget) @@ -192,6 +198,7 @@ class PublisherWindow(QtWidgets.QDialog): main_layout.addWidget(header_widget, 0) main_layout.addWidget(line_widget, 0) main_layout.addWidget(content_stacked_widget, 1) + main_layout.addWidget(footer_widget, 0) creator_window = CreateDialog(controller, parent=self) From 46ea4561f31cc96537e3820c858385baa7e1b30b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 15:49:31 +0200 Subject: [PATCH 1372/2550] implemented tabs widget --- openpype/style/data.json | 1 + openpype/style/style.css | 23 +++++ openpype/tools/publisher/widgets/__init__.py | 6 ++ .../tools/publisher/widgets/tabs_widget.py | 83 +++++++++++++++++++ 4 files changed, 113 insertions(+) create mode 100644 openpype/tools/publisher/widgets/tabs_widget.py diff --git a/openpype/style/data.json b/openpype/style/data.json index adda49de23..b75aa98508 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -91,6 +91,7 @@ "error": "#AA5050", "success": "#458056", "warning": "#ffc671", + "tab-bg": "#16191d", "list-view-group": { "bg": "#434a56", "bg-hover": "rgba(168, 175, 189, 0.3)", diff --git a/openpype/style/style.css b/openpype/style/style.css index 72d12a9230..ab23dd621f 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -856,6 +856,29 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { } /* New Create/Publish UI */ +PublisherTabsWidget { + background: {color:publisher:tab-bg}; +} + +PublisherTabBtn { + border-radius: 0px; + background: {color:bg-inputs}; + font-size: 9pt; + font-weight: regular; + padding: 0.5em 1em 0.5em 1em; +} + +PublisherTabBtn:hover { + background: {color:bg-buttons}; +} + +PublisherTabBtn[active="1"] { + background: {color:bg}; +} +PublisherTabBtn[active="1"]:hover { + background: {color:bg}; +} + #CreatorDetailedDescription { padding-left: 5px; padding-right: 5px; diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index 55afc349ff..a09e1353ec 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -33,6 +33,10 @@ from .list_view_widgets import ( InstanceListView ) +from .tabs_widget import ( + PublisherTabsWidget +) + __all__ = ( "get_icon_path", @@ -57,4 +61,6 @@ __all__ = ( "InstanceCardView", "InstanceListView", + + "PublisherTabsWidget", ) diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py new file mode 100644 index 0000000000..0e92a6fd8d --- /dev/null +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -0,0 +1,83 @@ +from Qt import QtWidgets, QtCore +from openpype.tools.utils import set_style_property + + +class PublisherTabBtn(QtWidgets.QPushButton): + tab_clicked = QtCore.Signal(str) + + def __init__(self, identifier, label, parent): + super(PublisherTabBtn, self).__init__(label, parent) + self._identifier = identifier + self._active = False + + self.clicked.connect(self._on_click) + + def _on_click(self): + self.tab_clicked.emit(self.identifier) + + @property + def identifier(self): + return self._identifier + + def activate(self): + if self._active: + return + self._active = True + set_style_property(self, "active", "1") + + def deactivate(self): + if not self._active: + return + self._active = False + set_style_property(self, "active", "") + + +class PublisherTabsWidget(QtWidgets.QFrame): + tab_changed = QtCore.Signal(str, str) + + def __init__(self, parent=None): + super(PublisherTabsWidget, self).__init__(parent) + + btns_widget = QtWidgets.QWidget(self) + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.setSpacing(0) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(btns_widget, 0) + layout.addStretch(1) + + self._btns_layout = btns_layout + + self._current_button = None + self._buttons_by_identifier = {} + + def add_tab(self, label, identifier): + button = PublisherTabBtn(identifier, label, self) + button.tab_clicked.connect(self._on_tab_click) + self._btns_layout.addWidget(button, 0) + self._buttons_by_identifier[identifier] = button + + if self._current_button is None: + self.set_current_tab(identifier) + + def set_current_tab(self, identifier): + if identifier == self._current_button: + return + + new_btn = self._buttons_by_identifier.get(identifier) + if new_btn is None: + return + + old_identifier = self._current_button + old_btn = self._buttons_by_identifier.get(old_identifier) + self._current_button = identifier + + if old_btn is not None: + old_btn.deactivate() + new_btn.activate() + self.tab_changed.emit(old_identifier, identifier) + + def _on_tab_click(self, identifier): + self.set_current_tab(identifier) From ae62357d9873fec68e55fbfc545c8b9a61409667 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 16:00:22 +0200 Subject: [PATCH 1373/2550] added tab to main window --- openpype/tools/publisher/window.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 3b504655d9..d02fe704ee 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -15,6 +15,8 @@ from .widgets import ( SubsetAttributesWidget, InstanceCardView, InstanceListView, + PublisherTabsWidget, + CreateDialog, StopBtn, @@ -78,9 +80,11 @@ class PublisherWindow(QtWidgets.QDialog): header_layout.addWidget(icon_label, 0) header_layout.addWidget(context_label, 1) - line_widget = QtWidgets.QWidget(self) - line_widget.setObjectName("Separator") - line_widget.setMinimumHeight(2) + tabs_widget = PublisherTabsWidget(self) + tabs_widget.add_tab("Create", "create") + tabs_widget.add_tab("Publish", "publish") + tabs_widget.add_tab("Report", "report") + tabs_widget.add_tab("Details", "details") # Content content_stacked_widget = QtWidgets.QWidget(self) @@ -196,12 +200,14 @@ class PublisherWindow(QtWidgets.QDialog): main_layout.setContentsMargins(0, 0, 0, 0) main_layout.setSpacing(0) main_layout.addWidget(header_widget, 0) - main_layout.addWidget(line_widget, 0) + main_layout.addWidget(tabs_widget, 0) main_layout.addWidget(content_stacked_widget, 1) main_layout.addWidget(footer_widget, 0) creator_window = CreateDialog(controller, parent=self) + tabs_widget.tab_changed.connect(self._on_tab_change) + create_btn.clicked.connect(self._on_create_clicked) delete_btn.clicked.connect(self._on_delete_clicked) change_view_btn.clicked.connect(self._on_change_view_clicked) @@ -318,6 +324,9 @@ class PublisherWindow(QtWidgets.QDialog): self._on_subset_change() + def _on_tab_change(self, prev_tab, new_tab): + print(prev_tab, new_tab) + def _on_create_clicked(self): self.creator_window.show() From 711f55204b008628d4e68f1a190f81d362c256dd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 16:14:41 +0200 Subject: [PATCH 1374/2550] Implement Alembic and FBX mesh loader --- openpype/hosts/fusion/api/pipeline.py | 2 +- .../hosts/fusion/plugins/load/load_alembic.py | 70 ++++++++++++++++++ .../hosts/fusion/plugins/load/load_fbx.py | 71 +++++++++++++++++++ 3 files changed, 142 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/fusion/plugins/load/load_alembic.py create mode 100644 openpype/hosts/fusion/plugins/load/load_fbx.py diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index c92d072ef7..eba55f755a 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -182,7 +182,7 @@ def ls(): """ comp = get_current_comp() - tools = comp.GetToolList(False, "Loader").values() + tools = comp.GetToolList(False).values() for tool in tools: container = parse_container(tool) diff --git a/openpype/hosts/fusion/plugins/load/load_alembic.py b/openpype/hosts/fusion/plugins/load/load_alembic.py new file mode 100644 index 0000000000..f8b8c2cb0a --- /dev/null +++ b/openpype/hosts/fusion/plugins/load/load_alembic.py @@ -0,0 +1,70 @@ +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.hosts.fusion.api import ( + imprint_container, + get_current_comp, + comp_lock_and_undo_chunk +) + + +class FusionLoadAlembicMesh(load.LoaderPlugin): + """Load Alembic mesh into Fusion""" + + families = ["pointcache", "model"] + representations = ["abc"] + + label = "Load alembic mesh" + order = -10 + icon = "code-fork" + color = "orange" + + tool_type = "SurfaceAlembicMesh" + + def load(self, context, name, namespace, data): + # Fallback to asset name when namespace is None + if namespace is None: + namespace = context['asset']['name'] + + # Create the Loader with the filename path set + comp = get_current_comp() + with comp_lock_and_undo_chunk(comp, "Create tool"): + + path = self.fname + + args = (-32768, -32768) + tool = comp.AddTool(self.tool_type, *args) + tool["Filename"] = path + + imprint_container(tool, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__) + + def switch(self, container, representation): + self.update(container, representation) + + def update(self, container, representation): + """Update Alembic path""" + + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + path = get_representation_path(representation) + + with comp_lock_and_undo_chunk(comp, "Update tool"): + tool["Filename"] = path + + # Update the imprinted representation + tool.SetData("avalon.representation", str(representation["_id"])) + + def remove(self, container): + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + with comp_lock_and_undo_chunk(comp, "Remove tool"): + tool.Delete() diff --git a/openpype/hosts/fusion/plugins/load/load_fbx.py b/openpype/hosts/fusion/plugins/load/load_fbx.py new file mode 100644 index 0000000000..70fe82ffef --- /dev/null +++ b/openpype/hosts/fusion/plugins/load/load_fbx.py @@ -0,0 +1,71 @@ + +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.hosts.fusion.api import ( + imprint_container, + get_current_comp, + comp_lock_and_undo_chunk +) + + +class FusionLoadFBXMesh(load.LoaderPlugin): + """Load FBX mesh into Fusion""" + + families = ["*"] + representations = ["fbx"] + + label = "Load FBX mesh" + order = -10 + icon = "code-fork" + color = "orange" + + tool_type = "SurfaceFBXMesh" + + def load(self, context, name, namespace, data): + # Fallback to asset name when namespace is None + if namespace is None: + namespace = context['asset']['name'] + + # Create the Loader with the filename path set + comp = get_current_comp() + with comp_lock_and_undo_chunk(comp, "Create tool"): + + path = self.fname + + args = (-32768, -32768) + tool = comp.AddTool(self.tool_type, *args) + tool["ImportFile"] = path + + imprint_container(tool, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__) + + def switch(self, container, representation): + self.update(container, representation) + + def update(self, container, representation): + """Update path""" + + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + path = get_representation_path(representation) + + with comp_lock_and_undo_chunk(comp, "Update tool"): + tool["ImportFile"] = path + + # Update the imprinted representation + tool.SetData("avalon.representation", str(representation["_id"])) + + def remove(self, container): + tool = container["_tool"] + assert tool.ID == self.tool_type, f"Must be {self.tool_type}" + comp = tool.Comp() + + with comp_lock_and_undo_chunk(comp, "Remove tool"): + tool.Delete() From 6d6348f28a5f95817a426f42200dac2b825ff1b0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 16:24:10 +0200 Subject: [PATCH 1375/2550] Fix logging handler to still print logs correctly when original "comp" is closed --- openpype/hosts/fusion/api/pipeline.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index c92d072ef7..4ddc8b0411 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -39,12 +39,13 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class CompLogHandler(logging.Handler): +class FusionLogHandler(logging.Handler): + # Keep a reference to fusion's Print function (Remote Object) + _print = getattr(sys.modules["__main__"], "fusion").Print + def emit(self, record): entry = self.format(record) - comp = get_current_comp() - if comp: - comp.Print(entry) + self._print(entry) def install(): @@ -67,7 +68,7 @@ def install(): # Attach default logging handler that prints to active comp logger = logging.getLogger() formatter = logging.Formatter(fmt="%(message)s\n") - handler = CompLogHandler() + handler = FusionLogHandler() handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) From 6120d3b0fe09321ce21822d748527ff5ed785a55 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 16:24:44 +0200 Subject: [PATCH 1376/2550] Remove unused import --- openpype/hosts/fusion/api/lib.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 4ef44dbb61..a55d25829e 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -3,8 +3,6 @@ import sys import re import contextlib -from Qt import QtGui - from openpype.lib import Logger from openpype.client import ( get_asset_by_name, From 0ebb6bd321f0c9bedb2095458a49c903bcab216a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 16:25:04 +0200 Subject: [PATCH 1377/2550] Fix missing import --- openpype/hosts/fusion/api/pipeline.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 4ddc8b0411..3efaad91fc 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -2,6 +2,7 @@ Basic avalon integration """ import os +import sys import logging import pyblish.api From e9110d518d062ad34168b6abc59a9e9b9cf9e9b4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 16:27:32 +0200 Subject: [PATCH 1378/2550] Add FusionEventHandler with background QThread --- openpype/hosts/fusion/api/menu.py | 5 + openpype/hosts/fusion/api/pipeline.py | 137 ++++++++++++++++++++++++-- 2 files changed, 135 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 7a6293807f..39126935e6 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -16,6 +16,7 @@ from openpype.hosts.fusion.api.lib import ( from openpype.pipeline import legacy_io from openpype.resources import get_openpype_icon_filepath +from .pipeline import FusionEventHandler from .pulse import FusionPulse self = sys.modules[__name__] @@ -119,6 +120,10 @@ class OpenPypeMenu(QtWidgets.QWidget): self._pulse = FusionPulse(parent=self) self._pulse.start() + # Detect Fusion events as OpenPype events + self._event_handler = FusionEventHandler(parent=self) + self._event_handler.start() + def on_task_changed(self): # Update current context label label = legacy_io.Session["AVALON_ASSET"] diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 3efaad91fc..2043fa290f 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -6,10 +6,12 @@ import sys import logging import pyblish.api +from Qt import QtCore from openpype.lib import ( Logger, - register_event_callback + register_event_callback, + emit_event ) from openpype.pipeline import ( register_loader_plugin_path, @@ -86,10 +88,10 @@ def install(): "instanceToggled", on_pyblish_instance_toggled ) - # Fusion integration currently does not attach to direct callbacks of - # the application. So we use workfile callbacks to allow similar behavior - # on save and open - register_event_callback("workfile.open.after", on_after_open) + # Register events + register_event_callback("open", on_after_open) + register_event_callback("save", on_save) + register_event_callback("new", on_new) def uninstall(): @@ -139,8 +141,18 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): tool.SetAttrs({"TOOLB_PassThrough": passthrough}) -def on_after_open(_event): - comp = get_current_comp() +def on_new(event): + comp = event["Rets"]["comp"] + validate_comp_prefs(comp) + + +def on_save(event): + comp = event["sender"] + validate_comp_prefs(comp) + + +def on_after_open(event): + comp = event["sender"] validate_comp_prefs(comp) if any_outdated_containers(): @@ -256,3 +268,114 @@ def parse_container(tool): return container +class FusionEventThread(QtCore.QThread): + """QThread which will periodically ping Fusion app for any events. + + The fusion.UIManager must be set up to be notified of events before they'll + be reported by this thread, for example: + fusion.UIManager.AddNotify("Comp_Save", None) + + """ + + on_event = QtCore.Signal(dict) + + def run(self): + + app = getattr(sys.modules["__main__"], "app", None) + if app is None: + # No Fusion app found + return + + # As optimization store the GetEvent method directly because every + # getattr of UIManager.GetEvent tries to resolve the Remote Function + # through the PyRemoteObject + get_event = app.UIManager.GetEvent + delay = int(os.environ.get("OPENPYPE_FUSION_CALLBACK_INTERVAL", 1000)) + while True: + if self.isInterruptionRequested(): + return + + # Process all events that have been queued up until now + while True: + event = get_event(False) + if not event: + break + self.on_event.emit(event) + + # Wait some time before processing events again + # to not keep blocking the UI + self.msleep(delay) + + +class FusionEventHandler(QtCore.QObject): + """Emits OpenPype events based on Fusion events captured in a QThread. + + This will emit the following OpenPype events based on Fusion actions: + save: Comp_Save, Comp_SaveAs + open: Comp_Opened + new: Comp_New + + To use this you can attach it to you Qt UI so it runs in the background. + E.g. + >>> handler = FusionEventHandler(parent=window) + >>> handler.start() + + + """ + ACTION_IDS = [ + "Comp_Save", + "Comp_SaveAs", + "Comp_New", + "Comp_Opened" + ] + + def __init__(self, parent=None): + super(FusionEventHandler, self).__init__(parent=parent) + + # Set up Fusion event callbacks + fusion = getattr(sys.modules["__main__"], "fusion", None) + ui = fusion.UIManager + + # Add notifications for the ones we want to listen to + notifiers = [] + for action_id in self.ACTION_IDS: + notifier = ui.AddNotify(action_id, None) + notifiers.append(notifier) + + # TODO: Not entirely sure whether these must be kept to avoid + # garbage collection + self._notifiers = notifiers + + self._event_thread = FusionEventThread(parent=self) + self._event_thread.on_event.connect(self._on_event) + + def start(self): + self._event_thread.start() + + def stop(self): + self._event_thread.stop() + + def _on_event(self, event): + """Handle Fusion events to emit OpenPype events""" + if not event: + return + + what = event["what"] + + # Comp Save + if what in {"Comp_Save", "Comp_SaveAs"}: + if not event["Rets"].get("success"): + # If the Save action is cancelled it will still emit an + # event but with "success": False so we ignore those cases + return + # Comp was saved + emit_event("save", data=event) + return + + # Comp New + elif what in {"Comp_New"}: + emit_event("new", data=event) + + # Comp Opened + elif what in {"Comp_Opened"}: + emit_event("open", data=event) From fa256ad2a8ac153b24e81e156e6612c8538d4e65 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 16:28:32 +0200 Subject: [PATCH 1379/2550] Force repair on new comp without asking the user --- openpype/hosts/fusion/api/lib.py | 28 ++++++++++++++++----------- openpype/hosts/fusion/api/pipeline.py | 2 +- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index a55d25829e..a33e5cf289 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -90,7 +90,7 @@ def set_asset_resolution(): }) -def validate_comp_prefs(comp=None): +def validate_comp_prefs(comp=None, force_repair=False): """Validate current comp defaults with asset settings. Validates fps, resolutionWidth, resolutionHeight, aspectRatio. @@ -133,21 +133,22 @@ def validate_comp_prefs(comp=None): asset_value = asset_data[key] comp_value = comp_frame_format_prefs.get(comp_key) if asset_value != comp_value: - # todo: Actually show dialog to user instead of just logging - log.warning( - "Comp {pref} {value} does not match asset " - "'{asset_name}' {pref} {asset_value}".format( - pref=label, - value=comp_value, - asset_name=asset_doc["name"], - asset_value=asset_value) - ) - invalid_msg = "{} {} should be {}".format(label, comp_value, asset_value) invalid.append(invalid_msg) + if not force_repair: + # Do not log warning if we force repair anyway + log.warning( + "Comp {pref} {value} does not match asset " + "'{asset_name}' {pref} {asset_value}".format( + pref=label, + value=comp_value, + asset_name=asset_doc["name"], + asset_value=asset_value) + ) + if invalid: def _on_repair(): @@ -158,6 +159,11 @@ def validate_comp_prefs(comp=None): attributes[comp_key_full] = value comp.SetPrefs(attributes) + if force_repair: + log.info("Applying default Comp preferences..") + _on_repair() + return + from . import menu from openpype.widgets import popup from openpype.style import load_stylesheet diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 2043fa290f..79928c0d96 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -143,7 +143,7 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): def on_new(event): comp = event["Rets"]["comp"] - validate_comp_prefs(comp) + validate_comp_prefs(comp, force_repair=True) def on_save(event): From 7a0ce610b1472332719ec19e3ca98badbf8ab8c6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 16:41:12 +0200 Subject: [PATCH 1380/2550] moved overview widget from window --- openpype/tools/publisher/widgets/__init__.py | 43 +-- .../publisher/widgets/overview_widget.py | 246 +++++++++++++ openpype/tools/publisher/window.py | 340 ++++-------------- openpype/tools/traypublisher/window.py | 2 +- 4 files changed, 328 insertions(+), 303 deletions(-) create mode 100644 openpype/tools/publisher/widgets/overview_widget.py diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index a09e1353ec..869f7adf9b 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -3,64 +3,31 @@ from .icons import ( get_pixmap, get_icon ) -from .border_label_widget import ( - BorderedLabelWidget -) from .widgets import ( - SubsetAttributesWidget, - StopBtn, ResetBtn, ValidateBtn, PublishBtn, - - CreateInstanceBtn, - RemoveInstanceBtn, - ChangeViewBtn ) -from .publish_widget import ( - PublishFrame -) -from .create_dialog import ( - CreateDialog -) - -from .card_view_widgets import ( - InstanceCardView -) - -from .list_view_widgets import ( - InstanceListView -) - -from .tabs_widget import ( - PublisherTabsWidget -) - +from .publish_widget import PublishFrame +from .create_dialog import CreateDialog +from .tabs_widget import PublisherTabsWidget +from .overview_widget import CreateOverviewWidget __all__ = ( "get_icon_path", "get_pixmap", "get_icon", - "SubsetAttributesWidget", - "BorderedLabelWidget", - "StopBtn", "ResetBtn", "ValidateBtn", "PublishBtn", - "CreateInstanceBtn", - "RemoveInstanceBtn", - "ChangeViewBtn", - "PublishFrame", "CreateDialog", - "InstanceCardView", - "InstanceListView", - "PublisherTabsWidget", + "CreateOverviewWidget", ) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py new file mode 100644 index 0000000000..abdd98ff7c --- /dev/null +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -0,0 +1,246 @@ +from Qt import QtWidgets, QtCore + +from .border_label_widget import BorderedLabelWidget + +from .card_view_widgets import InstanceCardView +from .list_view_widgets import InstanceListView +from .widgets import ( + SubsetAttributesWidget, + CreateInstanceBtn, + RemoveInstanceBtn, + ChangeViewBtn +) + + +class CreateOverviewWidget(QtWidgets.QFrame): + active_changed = QtCore.Signal() + instance_context_changed = QtCore.Signal() + create_requested = QtCore.Signal() + + def __init__(self, controller, parent): + super(CreateOverviewWidget, self).__init__(parent) + + self._controller = controller + self._refreshing_instances = False + + subset_views_widget = BorderedLabelWidget( + "Subsets to publish", self + ) + + subset_view_cards = InstanceCardView(controller, subset_views_widget) + subset_list_view = InstanceListView(controller, subset_views_widget) + + subset_views_layout = QtWidgets.QStackedLayout() + subset_views_layout.addWidget(subset_view_cards) + subset_views_layout.addWidget(subset_list_view) + + # Buttons at the bottom of subset view + create_btn = CreateInstanceBtn(self) + delete_btn = RemoveInstanceBtn(self) + change_view_btn = ChangeViewBtn(self) + + # Subset details widget + subset_attributes_wrap = BorderedLabelWidget( + "Publish options", self + ) + subset_attributes_widget = SubsetAttributesWidget( + controller, subset_attributes_wrap + ) + subset_attributes_wrap.set_center_widget(subset_attributes_widget) + + # Layout of buttons at the bottom of subset view + subset_view_btns_layout = QtWidgets.QHBoxLayout() + subset_view_btns_layout.setContentsMargins(0, 5, 0, 0) + subset_view_btns_layout.addWidget(create_btn) + subset_view_btns_layout.addSpacing(5) + subset_view_btns_layout.addWidget(delete_btn) + subset_view_btns_layout.addStretch(1) + subset_view_btns_layout.addWidget(change_view_btn) + + # Layout of view and buttons + # - widget 'subset_view_widget' is necessary + # - only layout won't be resized automatically to minimum size hint + # on child resize request! + subset_view_widget = QtWidgets.QWidget(subset_views_widget) + subset_view_layout = QtWidgets.QVBoxLayout(subset_view_widget) + subset_view_layout.setContentsMargins(0, 0, 0, 0) + subset_view_layout.addLayout(subset_views_layout, 1) + subset_view_layout.addLayout(subset_view_btns_layout, 0) + + subset_views_widget.set_center_widget(subset_view_widget) + + # Whole subset layout with attributes and details + subset_content_widget = QtWidgets.QWidget(self) + subset_content_layout = QtWidgets.QHBoxLayout(subset_content_widget) + subset_content_layout.setContentsMargins(0, 0, 0, 0) + subset_content_layout.addWidget(subset_views_widget, 3) + subset_content_layout.addWidget(subset_attributes_wrap, 7) + + # Subset frame layout + main_layout = QtWidgets.QVBoxLayout(self) + marings = main_layout.contentsMargins() + marings.setLeft(marings.left() * 2) + marings.setRight(marings.right() * 2) + marings.setTop(marings.top() * 2) + marings.setBottom(0) + main_layout.setContentsMargins(marings) + main_layout.addWidget(subset_content_widget, 1) + + create_btn.clicked.connect(self._on_create_clicked) + delete_btn.clicked.connect(self._on_delete_clicked) + change_view_btn.clicked.connect(self._on_change_view_clicked) + + # Selection changed + subset_list_view.selection_changed.connect( + self._on_subset_change + ) + subset_view_cards.selection_changed.connect( + self._on_subset_change + ) + # Active instances changed + subset_list_view.active_changed.connect( + self._on_active_changed + ) + subset_view_cards.active_changed.connect( + self._on_active_changed + ) + # Instance context has changed + subset_attributes_widget.instance_context_changed.connect( + self._on_instance_context_change + ) + + controller.add_publish_reset_callback(self._on_publish_reset) + controller.add_instances_refresh_callback(self._on_instances_refresh) + + self.subset_content_widget = subset_content_widget + + self.subset_view_cards = subset_view_cards + self.subset_list_view = subset_list_view + self.subset_views_layout = subset_views_layout + + self.delete_btn = delete_btn + + self.subset_attributes_widget = subset_attributes_widget + + def _on_create_clicked(self): + """Pass signal to parent widget which should care about changing state. + + We don't change anything here until the parent will care about it. + """ + + self.create_requested.emit() + + def _on_delete_clicked(self): + instances, _ = self.get_selected_items() + + # Ask user if he really wants to remove instances + dialog = QtWidgets.QMessageBox(self) + dialog.setIcon(QtWidgets.QMessageBox.Question) + dialog.setWindowTitle("Are you sure?") + if len(instances) > 1: + msg = ( + "Do you really want to remove {} instances?" + ).format(len(instances)) + else: + msg = ( + "Do you really want to remove the instance?" + ) + dialog.setText(msg) + dialog.setStandardButtons( + QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel + ) + dialog.setDefaultButton(QtWidgets.QMessageBox.Ok) + dialog.setEscapeButton(QtWidgets.QMessageBox.Cancel) + dialog.exec_() + # Skip if OK was not clicked + if dialog.result() == QtWidgets.QMessageBox.Ok: + self._controller.remove_instances(instances) + + def _on_change_view_clicked(self): + self._change_view_type() + + def _on_subset_change(self, *_args): + # Ignore changes if in middle of refreshing + if self._refreshing_instances: + return + + instances, context_selected = self.get_selected_items() + + # Disable delete button if nothing is selected + self.delete_btn.setEnabled(len(instances) > 0) + + self.subset_attributes_widget.set_current_instances( + instances, context_selected + ) + + def _on_active_changed(self): + if self._refreshing_instances: + return + self.active_changed.emit() + + def _on_instance_context_change(self): + current_idx = self.subset_views_layout.currentIndex() + for idx in range(self.subset_views_layout.count()): + if idx == current_idx: + continue + widget = self.subset_views_layout.widget(idx) + if widget.refreshed: + widget.set_refreshed(False) + + current_widget = self.subset_views_layout.widget(current_idx) + current_widget.refresh_instance_states() + + self.instance_context_changed.emit() + + def get_selected_items(self): + view = self.subset_views_layout.currentWidget() + return view.get_selected_items() + + def _change_view_type(self): + idx = self.subset_views_layout.currentIndex() + new_idx = (idx + 1) % self.subset_views_layout.count() + self.subset_views_layout.setCurrentIndex(new_idx) + + new_view = self.subset_views_layout.currentWidget() + if not new_view.refreshed: + new_view.refresh() + new_view.set_refreshed(True) + else: + new_view.refresh_instance_states() + + self._on_subset_change() + + def _refresh_instances(self): + if self._refreshing_instances: + return + + self._refreshing_instances = True + + for idx in range(self.subset_views_layout.count()): + widget = self.subset_views_layout.widget(idx) + widget.set_refreshed(False) + + view = self.subset_views_layout.currentWidget() + view.refresh() + view.set_refreshed(True) + + self._refreshing_instances = False + + # Force to change instance and refresh details + self._on_subset_change() + + def _on_publish_reset(self): + """Context in controller has been refreshed.""" + + self.subset_content_widget.setEnabled(self._controller.host_is_valid) + + def _on_instances_refresh(self): + """Controller refreshed instances.""" + + self._refresh_instances() + + # Give a change to process Resize Request + QtWidgets.QApplication.processEvents() + # Trigger update geometry of + widget = self.subset_views_layout.currentWidget() + widget.updateGeometry() diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index d02fe704ee..8df9f9bbf5 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -10,11 +10,9 @@ from openpype.tools.utils import ( ) from .control import PublisherController from .widgets import ( - BorderedLabelWidget, + CreateOverviewWidget, PublishFrame, - SubsetAttributesWidget, - InstanceCardView, - InstanceListView, + PublisherTabsWidget, CreateDialog, @@ -23,10 +21,6 @@ from .widgets import ( ResetBtn, ValidateBtn, PublishBtn, - - CreateInstanceBtn, - RemoveInstanceBtn, - ChangeViewBtn ) @@ -62,7 +56,6 @@ class PublisherWindow(QtWidgets.QDialog): self._reset_on_show = reset_on_show self._first_show = True - self._refreshing_instances = False controller = PublisherController() @@ -78,8 +71,10 @@ class PublisherWindow(QtWidgets.QDialog): header_layout.setContentsMargins(15, 15, 15, 15) header_layout.setSpacing(15) header_layout.addWidget(icon_label, 0) - header_layout.addWidget(context_label, 1) + header_layout.addWidget(context_label, 0) + header_layout.addStretch(1) + # Tabs widget under header tabs_widget = PublisherTabsWidget(self) tabs_widget.add_tab("Create", "create") tabs_widget.add_tab("Publish", "publish") @@ -89,76 +84,24 @@ class PublisherWindow(QtWidgets.QDialog): # Content content_stacked_widget = QtWidgets.QWidget(self) - # Subset widget - subset_frame = QtWidgets.QFrame(content_stacked_widget) - - subset_views_widget = BorderedLabelWidget( - "Subsets to publish", subset_frame + create_overview_widget = CreateOverviewWidget( + controller, content_stacked_widget ) - subset_view_cards = InstanceCardView(controller, subset_views_widget) - subset_list_view = InstanceListView(controller, subset_views_widget) - - subset_views_layout = QtWidgets.QStackedLayout() - subset_views_layout.addWidget(subset_view_cards) - subset_views_layout.addWidget(subset_list_view) - - # Buttons at the bottom of subset view - create_btn = CreateInstanceBtn(subset_frame) - delete_btn = RemoveInstanceBtn(subset_frame) - change_view_btn = ChangeViewBtn(subset_frame) - - # Subset details widget - subset_attributes_wrap = BorderedLabelWidget( - "Publish options", subset_frame - ) - subset_attributes_widget = SubsetAttributesWidget( - controller, subset_attributes_wrap - ) - subset_attributes_wrap.set_center_widget(subset_attributes_widget) - - # Layout of buttons at the bottom of subset view - subset_view_btns_layout = QtWidgets.QHBoxLayout() - subset_view_btns_layout.setContentsMargins(0, 5, 0, 0) - subset_view_btns_layout.addWidget(create_btn) - subset_view_btns_layout.addSpacing(5) - subset_view_btns_layout.addWidget(delete_btn) - subset_view_btns_layout.addStretch(1) - subset_view_btns_layout.addWidget(change_view_btn) - - # Layout of view and buttons - # - widget 'subset_view_widget' is necessary - # - only layout won't be resized automatically to minimum size hint - # on child resize request! - subset_view_widget = QtWidgets.QWidget(subset_views_widget) - subset_view_layout = QtWidgets.QVBoxLayout(subset_view_widget) - subset_view_layout.setContentsMargins(0, 0, 0, 0) - subset_view_layout.addLayout(subset_views_layout, 1) - subset_view_layout.addLayout(subset_view_btns_layout, 0) - - subset_views_widget.set_center_widget(subset_view_widget) - - # Whole subset layout with attributes and details - subset_content_widget = QtWidgets.QWidget(subset_frame) - subset_content_layout = QtWidgets.QHBoxLayout(subset_content_widget) - subset_content_layout.setContentsMargins(0, 0, 0, 0) - subset_content_layout.addWidget(subset_views_widget, 3) - subset_content_layout.addWidget(subset_attributes_wrap, 7) - # Footer footer_widget = QtWidgets.QWidget(self) footer_bottom_widget = QtWidgets.QWidget(footer_widget) - comment_input = PlaceholderLineEdit(subset_frame) + comment_input = PlaceholderLineEdit(footer_widget) comment_input.setObjectName("PublishCommentInput") comment_input.setPlaceholderText( "Attach a comment to your publish" ) - reset_btn = ResetBtn(subset_frame) - stop_btn = StopBtn(subset_frame) - validate_btn = ValidateBtn(subset_frame) - publish_btn = PublishBtn(subset_frame) + reset_btn = ResetBtn(footer_widget) + stop_btn = StopBtn(footer_widget) + validate_btn = ValidateBtn(footer_widget) + publish_btn = PublishBtn(footer_widget) footer_bottom_layout = QtWidgets.QHBoxLayout(footer_bottom_widget) footer_bottom_layout.setContentsMargins(0, 0, 0, 0) @@ -172,16 +115,6 @@ class PublisherWindow(QtWidgets.QDialog): footer_layout.addWidget(comment_input, 0) footer_layout.addWidget(footer_bottom_widget, 0) - # Subset frame layout - subset_layout = QtWidgets.QVBoxLayout(subset_frame) - marings = subset_layout.contentsMargins() - marings.setLeft(marings.left() * 2) - marings.setRight(marings.right() * 2) - marings.setTop(marings.top() * 2) - marings.setBottom(0) - subset_layout.setContentsMargins(marings) - subset_layout.addWidget(subset_content_widget, 1) - # Create publish frame publish_frame = PublishFrame(controller, content_stacked_widget) @@ -192,7 +125,7 @@ class PublisherWindow(QtWidgets.QDialog): content_stacked_layout.setStackingMode( QtWidgets.QStackedLayout.StackAll ) - content_stacked_layout.addWidget(subset_frame) + content_stacked_layout.addWidget(create_overview_widget) content_stacked_layout.addWidget(publish_frame) # Add main frame to this window @@ -207,37 +140,22 @@ class PublisherWindow(QtWidgets.QDialog): creator_window = CreateDialog(controller, parent=self) tabs_widget.tab_changed.connect(self._on_tab_change) - - create_btn.clicked.connect(self._on_create_clicked) - delete_btn.clicked.connect(self._on_delete_clicked) - change_view_btn.clicked.connect(self._on_change_view_clicked) + create_overview_widget.active_changed.connect( + self._on_context_or_active_change + ) + create_overview_widget.instance_context_changed.connect( + self._on_context_or_active_change + ) + create_overview_widget.create_requested.connect( + self._on_create_request + ) reset_btn.clicked.connect(self._on_reset_clicked) stop_btn.clicked.connect(self._on_stop_clicked) validate_btn.clicked.connect(self._on_validate_clicked) publish_btn.clicked.connect(self._on_publish_clicked) - # Selection changed - subset_list_view.selection_changed.connect( - self._on_subset_change - ) - subset_view_cards.selection_changed.connect( - self._on_subset_change - ) - # Active instances changed - subset_list_view.active_changed.connect( - self._on_active_changed - ) - subset_view_cards.active_changed.connect( - self._on_active_changed - ) - # Instance context has changed - subset_attributes_widget.instance_context_changed.connect( - self._on_instance_context_change - ) - controller.add_instances_refresh_callback(self._on_instances_refresh) - controller.add_publish_reset_callback(self._on_publish_reset) controller.add_publish_started_callback(self._on_publish_start) controller.add_publish_validated_callback(self._on_publish_validated) @@ -246,22 +164,15 @@ class PublisherWindow(QtWidgets.QDialog): # Store header for TrayPublisher self._header_layout = header_layout + self._tabs_widget = tabs_widget + self._content_stacked_widget = content_stacked_widget self.content_stacked_layout = content_stacked_layout + self._create_overview_widget = create_overview_widget self.publish_frame = publish_frame - self.subset_frame = subset_frame - self.subset_content_widget = subset_content_widget self.context_label = context_label - self.subset_view_cards = subset_view_cards - self.subset_list_view = subset_list_view - self.subset_views_layout = subset_views_layout - - self.delete_btn = delete_btn - - self.subset_attributes_widget = subset_attributes_widget - self.comment_input = comment_input self.stop_btn = stop_btn @@ -269,10 +180,14 @@ class PublisherWindow(QtWidgets.QDialog): self.validate_btn = validate_btn self.publish_btn = publish_btn - self.controller = controller + self._controller = controller self.creator_window = creator_window + @property + def controller(self): + return self._controller + def showEvent(self, event): super(PublisherWindow, self).showEvent(event) if self._first_show: @@ -283,88 +198,33 @@ class PublisherWindow(QtWidgets.QDialog): self.reset() def closeEvent(self, event): - self.controller.save_changes() + self._controller.save_changes() super(PublisherWindow, self).closeEvent(event) def reset(self): - self.controller.reset() + self._controller.reset() def set_context_label(self, label): self.context_label.setText(label) - def get_selected_items(self): - view = self.subset_views_layout.currentWidget() - return view.get_selected_items() - - def _on_instance_context_change(self): - current_idx = self.subset_views_layout.currentIndex() - for idx in range(self.subset_views_layout.count()): - if idx == current_idx: - continue - widget = self.subset_views_layout.widget(idx) - if widget.refreshed: - widget.set_refreshed(False) - - current_widget = self.subset_views_layout.widget(current_idx) - current_widget.refresh_instance_states() - - self._validate_create_instances() - - def _change_view_type(self): - idx = self.subset_views_layout.currentIndex() - new_idx = (idx + 1) % self.subset_views_layout.count() - self.subset_views_layout.setCurrentIndex(new_idx) - - new_view = self.subset_views_layout.currentWidget() - if not new_view.refreshed: - new_view.refresh() - new_view.set_refreshed(True) - else: - new_view.refresh_instance_states() - - self._on_subset_change() - def _on_tab_change(self, prev_tab, new_tab): print(prev_tab, new_tab) - def _on_create_clicked(self): - self.creator_window.show() + def _on_context_or_active_change(self): + self._validate_create_instances() - def _on_delete_clicked(self): - instances, _ = self.get_selected_items() + def _on_create_request(self): + self._go_to_create_tab() - # Ask user if he really wants to remove instances - dialog = QtWidgets.QMessageBox(self) - dialog.setIcon(QtWidgets.QMessageBox.Question) - dialog.setWindowTitle("Are you sure?") - if len(instances) > 1: - msg = ( - "Do you really want to remove {} instances?" - ).format(len(instances)) - else: - msg = ( - "Do you really want to remove the instance?" - ) - dialog.setText(msg) - dialog.setStandardButtons( - QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel - ) - dialog.setDefaultButton(QtWidgets.QMessageBox.Ok) - dialog.setEscapeButton(QtWidgets.QMessageBox.Cancel) - dialog.exec_() - # Skip if OK was not clicked - if dialog.result() == QtWidgets.QMessageBox.Ok: - self.controller.remove_instances(instances) - - def _on_change_view_clicked(self): - self._change_view_type() + def _go_to_create_tab(self): + self._tabs_widget.set_current_tab("create") def _set_publish_visibility(self, visible): if visible: widget = self.publish_frame publish_frame_visible = True else: - widget = self.subset_frame + widget = self._create_overview_widget publish_frame_visible = False self.content_stacked_layout.setCurrentWidget(widget) self._set_publish_frame_visible(publish_frame_visible) @@ -381,79 +241,27 @@ class PublisherWindow(QtWidgets.QDialog): self.creator_window.close() def _on_reset_clicked(self): - self.controller.reset() + self._controller.reset() def _on_stop_clicked(self): - self.controller.stop_publish() + self._controller.stop_publish() def _set_publish_comment(self): - if self.controller.publish_comment_is_set: + if self._controller.publish_comment_is_set: return comment = self.comment_input.text() - self.controller.set_comment(comment) + self._controller.set_comment(comment) def _on_validate_clicked(self): self._set_publish_comment() self._set_publish_visibility(True) - self.controller.validate() + self._controller.validate() def _on_publish_clicked(self): self._set_publish_comment() self._set_publish_visibility(True) - self.controller.publish() - - def _refresh_instances(self): - if self._refreshing_instances: - return - - self._refreshing_instances = True - - for idx in range(self.subset_views_layout.count()): - widget = self.subset_views_layout.widget(idx) - widget.set_refreshed(False) - - view = self.subset_views_layout.currentWidget() - view.refresh() - view.set_refreshed(True) - - self._refreshing_instances = False - - # Force to change instance and refresh details - self._on_subset_change() - - def _on_instances_refresh(self): - self._refresh_instances() - - self._validate_create_instances() - - context_title = self.controller.get_context_title() - self.set_context_label(context_title) - - # Give a change to process Resize Request - QtWidgets.QApplication.processEvents() - # Trigger update geometry of - widget = self.subset_views_layout.currentWidget() - widget.updateGeometry() - - def _on_subset_change(self, *_args): - # Ignore changes if in middle of refreshing - if self._refreshing_instances: - return - - instances, context_selected = self.get_selected_items() - - # Disable delete button if nothing is selected - self.delete_btn.setEnabled(len(instances) > 0) - - self.subset_attributes_widget.set_current_instances( - instances, context_selected - ) - - def _on_active_changed(self): - if self._refreshing_instances: - return - self._validate_create_instances() + self._controller.publish() def _set_footer_enabled(self, enabled): self.comment_input.setEnabled(enabled) @@ -467,30 +275,9 @@ class PublisherWindow(QtWidgets.QDialog): self.validate_btn.setEnabled(enabled) self.publish_btn.setEnabled(enabled) - def _validate_create_instances(self): - if not self.controller.host_is_valid: - self._set_footer_enabled(True) - return - - all_valid = None - for instance in self.controller.instances: - if not instance["active"]: - continue - - if not instance.has_valid_context: - all_valid = False - break - - if all_valid is None: - all_valid = True - - self._set_footer_enabled(bool(all_valid)) - def _on_publish_reset(self): self._set_publish_visibility(False) - self.subset_content_widget.setEnabled(self.controller.host_is_valid) - self._set_footer_enabled(False) def _on_publish_start(self): @@ -505,19 +292,44 @@ class PublisherWindow(QtWidgets.QDialog): def _on_publish_stop(self): self.reset_btn.setEnabled(True) self.stop_btn.setEnabled(False) - validate_enabled = not self.controller.publish_has_crashed - publish_enabled = not self.controller.publish_has_crashed + validate_enabled = not self._controller.publish_has_crashed + publish_enabled = not self._controller.publish_has_crashed if validate_enabled: - validate_enabled = not self.controller.publish_has_validated + validate_enabled = not self._controller.publish_has_validated if publish_enabled: if ( - self.controller.publish_has_validated - and self.controller.publish_has_validation_errors + self._controller.publish_has_validated + and self._controller.publish_has_validation_errors ): publish_enabled = False else: - publish_enabled = not self.controller.publish_has_finished + publish_enabled = not self._controller.publish_has_finished self.validate_btn.setEnabled(validate_enabled) self.publish_btn.setEnabled(publish_enabled) + + def _validate_create_instances(self): + if not self._controller.host_is_valid: + self._set_footer_enabled(True) + return + + all_valid = None + for instance in self._controller.instances: + if not instance["active"]: + continue + + if not instance.has_valid_context: + all_valid = False + break + + if all_valid is None: + all_valid = True + + self._set_footer_enabled(bool(all_valid)) + + def _on_instances_refresh(self): + self._validate_create_instances() + + context_title = self.controller.get_context_title() + self.set_context_label(context_title) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 930c27ca9c..128c0fef11 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -244,7 +244,7 @@ class TrayPublishWindow(PublisherWindow): self.reset() if not self.controller.instances: - self._on_create_clicked() + self._go_to_create_tab() def _on_tray_publish_save(self): self.controller.save_changes() From 323995369000e194575999a0b8460e412a3aee68 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 16:45:21 +0200 Subject: [PATCH 1381/2550] Optimize Fusion pulse --- openpype/hosts/fusion/api/pulse.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/pulse.py b/openpype/hosts/fusion/api/pulse.py index 5b61f3bd63..eb7ef3785d 100644 --- a/openpype/hosts/fusion/api/pulse.py +++ b/openpype/hosts/fusion/api/pulse.py @@ -19,9 +19,12 @@ class PulseThread(QtCore.QThread): while True: if self.isInterruptionRequested(): return - try: - app.Test() - except Exception: + + # We don't need to call Test because PyRemoteObject of the app + # will actually fail to even resolve the Test function if it has + # gone down. So we can actually already just check by confirming + # the method is still getting resolved. (Optimization) + if app.Test is None: self.no_response.emit() self.msleep(interval) From 05372493a3fcb21903ddc12a310ec258b6c9e20c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 17:45:35 +0200 Subject: [PATCH 1382/2550] added create widget to overview widget --- .../tools/publisher/widgets/assets_widget.py | 10 +- .../tools/publisher/widgets/create_widget.py | 986 ++++++++++++++++++ .../publisher/widgets/overview_widget.py | 74 +- .../tools/publisher/widgets/tasks_widget.py | 6 +- 4 files changed, 1045 insertions(+), 31 deletions(-) create mode 100644 openpype/tools/publisher/widgets/create_widget.py diff --git a/openpype/tools/publisher/widgets/assets_widget.py b/openpype/tools/publisher/widgets/assets_widget.py index 46fdcc6526..7a77c9e898 100644 --- a/openpype/tools/publisher/widgets/assets_widget.py +++ b/openpype/tools/publisher/widgets/assets_widget.py @@ -13,13 +13,13 @@ from openpype.tools.utils.assets_widget import ( ) -class CreateDialogAssetsWidget(SingleSelectAssetsWidget): +class CreateWidgetAssetsWidget(SingleSelectAssetsWidget): current_context_required = QtCore.Signal() header_height_changed = QtCore.Signal(int) def __init__(self, controller, parent): self._controller = controller - super(CreateDialogAssetsWidget, self).__init__(None, parent) + super(CreateWidgetAssetsWidget, self).__init__(None, parent) self.set_refresh_btn_visibility(False) self.set_current_asset_btn_visibility(False) @@ -42,11 +42,11 @@ class CreateDialogAssetsWidget(SingleSelectAssetsWidget): self.header_height_changed.emit(height) def resizeEvent(self, event): - super(CreateDialogAssetsWidget, self).resizeEvent(event) + super(CreateWidgetAssetsWidget, self).resizeEvent(event) self._check_header_height() def showEvent(self, event): - super(CreateDialogAssetsWidget, self).showEvent(event) + super(CreateWidgetAssetsWidget, self).showEvent(event) self._check_header_height() def _on_current_asset_click(self): @@ -63,7 +63,7 @@ class CreateDialogAssetsWidget(SingleSelectAssetsWidget): self.select_asset(self._last_selection) def _select_indexes(self, *args, **kwargs): - super(CreateDialogAssetsWidget, self)._select_indexes(*args, **kwargs) + super(CreateWidgetAssetsWidget, self)._select_indexes(*args, **kwargs) if self._enabled: return self._last_selection = self.get_selected_asset_id() diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py new file mode 100644 index 0000000000..a0b3db0409 --- /dev/null +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -0,0 +1,986 @@ +import sys +import re +import traceback +import copy + +import qtawesome +try: + import commonmark +except Exception: + commonmark = None +from Qt import QtWidgets, QtCore, QtGui + +from openpype.client import get_asset_by_name, get_subsets +from openpype.pipeline.create import ( + CreatorError, + SUBSET_NAME_ALLOWED_SYMBOLS, + TaskNotSetError, +) +from openpype.tools.utils import ( + ErrorMessageBox, + MessageOverlayObject, + ClickableFrame, +) + +from .widgets import IconValuePixmapLabel +from .assets_widget import CreateWidgetAssetsWidget +from .tasks_widget import CreateWidgetTasksWidget +from .precreate_widget import PreCreateWidget +from ..constants import ( + VARIANT_TOOLTIP, + CREATOR_IDENTIFIER_ROLE, + FAMILY_ROLE +) + +SEPARATORS = ("---separator---", "---") + + +class VariantInputsWidget(QtWidgets.QWidget): + resized = QtCore.Signal() + + def resizeEvent(self, event): + super(VariantInputsWidget, self).resizeEvent(event) + self.resized.emit() + + +class CreateErrorMessageBox(ErrorMessageBox): + def __init__( + self, + creator_label, + subset_name, + asset_name, + exc_msg, + formatted_traceback, + parent + ): + self._creator_label = creator_label + self._subset_name = subset_name + self._asset_name = asset_name + self._exc_msg = exc_msg + self._formatted_traceback = formatted_traceback + super(CreateErrorMessageBox, self).__init__("Creation failed", parent) + + def _create_top_widget(self, parent_widget): + label_widget = QtWidgets.QLabel(parent_widget) + label_widget.setText( + "Failed to create" + ) + return label_widget + + def _get_report_data(self): + report_message = ( + "{creator}: Failed to create Subset: \"{subset}\"" + " in Asset: \"{asset}\"" + "\n\nError: {message}" + ).format( + creator=self._creator_label, + subset=self._subset_name, + asset=self._asset_name, + message=self._exc_msg, + ) + if self._formatted_traceback: + report_message += "\n\n{}".format(self._formatted_traceback) + return [report_message] + + def _create_content(self, content_layout): + item_name_template = ( + "Creator: {}
    " + "Subset: {}
    " + "Asset: {}
    " + ) + exc_msg_template = "{}" + + line = self._create_line() + content_layout.addWidget(line) + + item_name_widget = QtWidgets.QLabel(self) + item_name_widget.setText( + item_name_template.format( + self._creator_label, self._subset_name, self._asset_name + ) + ) + content_layout.addWidget(item_name_widget) + + message_label_widget = QtWidgets.QLabel(self) + message_label_widget.setText( + exc_msg_template.format(self.convert_text_for_html(self._exc_msg)) + ) + content_layout.addWidget(message_label_widget) + + if self._formatted_traceback: + line_widget = self._create_line() + tb_widget = self._create_traceback_widget( + self._formatted_traceback + ) + content_layout.addWidget(line_widget) + content_layout.addWidget(tb_widget) + + +# TODO add creator identifier/label to details +class CreatorShortDescWidget(QtWidgets.QWidget): + height_changed = QtCore.Signal(int) + + def __init__(self, parent=None): + super(CreatorShortDescWidget, self).__init__(parent=parent) + + # --- Short description widget --- + icon_widget = IconValuePixmapLabel(None, self) + icon_widget.setObjectName("FamilyIconLabel") + + # --- Short description inputs --- + short_desc_input_widget = QtWidgets.QWidget(self) + + family_label = QtWidgets.QLabel(short_desc_input_widget) + family_label.setAlignment( + QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft + ) + + description_label = QtWidgets.QLabel(short_desc_input_widget) + description_label.setAlignment( + QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft + ) + + short_desc_input_layout = QtWidgets.QVBoxLayout( + short_desc_input_widget + ) + short_desc_input_layout.setSpacing(0) + short_desc_input_layout.addWidget(family_label) + short_desc_input_layout.addWidget(description_label) + # -------------------------------- + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(icon_widget, 0) + layout.addWidget(short_desc_input_widget, 1) + # -------------------------------- + + self._icon_widget = icon_widget + self._family_label = family_label + self._description_label = description_label + + self._last_height = None + + def _check_height_change(self): + height = self.height() + if height != self._last_height: + self._last_height = height + self.height_changed.emit(height) + + def showEvent(self, event): + super(CreatorShortDescWidget, self).showEvent(event) + self._check_height_change() + + def resizeEvent(self, event): + super(CreatorShortDescWidget, self).resizeEvent(event) + self._check_height_change() + + def set_plugin(self, plugin=None): + if not plugin: + self._icon_widget.set_icon_def(None) + self._family_label.setText("") + self._description_label.setText("") + return + + plugin_icon = plugin.get_icon() + description = plugin.get_description() or "" + + self._icon_widget.set_icon_def(plugin_icon) + self._family_label.setText("{}".format(plugin.family)) + self._family_label.setTextInteractionFlags(QtCore.Qt.NoTextInteraction) + self._description_label.setText(description) + + +class HelpButton(ClickableFrame): + resized = QtCore.Signal(int) + question_mark_icon_name = "fa.question" + help_icon_name = "fa.question-circle" + hide_icon_name = "fa.angle-left" + + def __init__(self, *args, **kwargs): + super(HelpButton, self).__init__(*args, **kwargs) + self.setObjectName("CreateDialogHelpButton") + + question_mark_label = QtWidgets.QLabel(self) + help_widget = QtWidgets.QWidget(self) + + help_question = QtWidgets.QLabel(help_widget) + help_label = QtWidgets.QLabel("Help", help_widget) + hide_icon = QtWidgets.QLabel(help_widget) + + help_layout = QtWidgets.QHBoxLayout(help_widget) + help_layout.setContentsMargins(0, 0, 5, 0) + help_layout.addWidget(help_question, 0) + help_layout.addWidget(help_label, 0) + help_layout.addStretch(1) + help_layout.addWidget(hide_icon, 0) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(0) + layout.addWidget(question_mark_label, 0) + layout.addWidget(help_widget, 1) + + help_widget.setVisible(False) + + self._question_mark_label = question_mark_label + self._help_widget = help_widget + self._help_question = help_question + self._hide_icon = hide_icon + + self._expanded = None + self.set_expanded() + + def set_expanded(self, expanded=None): + if self._expanded is expanded: + if expanded is not None: + return + expanded = False + self._expanded = expanded + self._help_widget.setVisible(expanded) + self._update_content() + + def _update_content(self): + width = self.get_icon_width() + if self._expanded: + question_mark_pix = QtGui.QPixmap(width, width) + question_mark_pix.fill(QtCore.Qt.transparent) + + else: + question_mark_icon = qtawesome.icon( + self.question_mark_icon_name, color=QtCore.Qt.white + ) + question_mark_pix = question_mark_icon.pixmap(width, width) + + hide_icon = qtawesome.icon( + self.hide_icon_name, color=QtCore.Qt.white + ) + help_question_icon = qtawesome.icon( + self.help_icon_name, color=QtCore.Qt.white + ) + self._question_mark_label.setPixmap(question_mark_pix) + self._question_mark_label.setMaximumWidth(width) + self._hide_icon.setPixmap(hide_icon.pixmap(width, width)) + self._help_question.setPixmap(help_question_icon.pixmap(width, width)) + + def get_icon_width(self): + metrics = self.fontMetrics() + return metrics.height() + + def set_pos_and_size(self, pos_x, pos_y, width, height): + update_icon = self.height() != height + self.move(pos_x, pos_y) + self.resize(width, height) + + if update_icon: + self._update_content() + self.updateGeometry() + + def showEvent(self, event): + super(HelpButton, self).showEvent(event) + self.resized.emit(self.height()) + + def resizeEvent(self, event): + super(HelpButton, self).resizeEvent(event) + self.resized.emit(self.height()) + + +class CreateWidget(QtWidgets.QWidget): + def __init__(self, controller, parent=None): + super(CreateWidget, self).__init__(parent) + + self.setWindowTitle("Create new instance") + + self.controller = controller + + self._asset_name = self.dbcon.Session.get("AVALON_ASSET") + self._task_name = self.dbcon.Session.get("AVALON_TASK") + + self._asset_doc = None + self._subset_names = None + self._selected_creator = None + + self._prereq_available = False + + self._message_dialog = None + + name_pattern = "^[{}]*$".format(SUBSET_NAME_ALLOWED_SYMBOLS) + self._name_pattern = name_pattern + self._compiled_name_pattern = re.compile(name_pattern) + + overlay_object = MessageOverlayObject(self) + + context_widget = QtWidgets.QWidget(self) + + assets_widget = CreateWidgetAssetsWidget(controller, context_widget) + tasks_widget = CreateWidgetTasksWidget(controller, context_widget) + + context_layout = QtWidgets.QVBoxLayout(context_widget) + context_layout.setContentsMargins(0, 0, 0, 0) + context_layout.setSpacing(0) + context_layout.addWidget(assets_widget, 2) + context_layout.addWidget(tasks_widget, 1) + + # --- Creators view --- + creators_header_widget = QtWidgets.QWidget(self) + header_label_widget = QtWidgets.QLabel( + "Choose family:", creators_header_widget + ) + creators_header_layout = QtWidgets.QHBoxLayout(creators_header_widget) + creators_header_layout.setContentsMargins(0, 0, 0, 0) + creators_header_layout.addWidget(header_label_widget, 1) + + creators_view = QtWidgets.QListView(self) + creators_model = QtGui.QStandardItemModel() + creators_sort_model = QtCore.QSortFilterProxyModel() + creators_sort_model.setSourceModel(creators_model) + creators_view.setModel(creators_sort_model) + + variant_widget = VariantInputsWidget(self) + + variant_input = QtWidgets.QLineEdit(variant_widget) + variant_input.setObjectName("VariantInput") + variant_input.setToolTip(VARIANT_TOOLTIP) + + variant_hints_btn = QtWidgets.QToolButton(variant_widget) + variant_hints_btn.setArrowType(QtCore.Qt.DownArrow) + variant_hints_btn.setIconSize(QtCore.QSize(12, 12)) + + variant_hints_menu = QtWidgets.QMenu(variant_widget) + variant_hints_group = QtWidgets.QActionGroup(variant_hints_menu) + + variant_layout = QtWidgets.QHBoxLayout(variant_widget) + variant_layout.setContentsMargins(0, 0, 0, 0) + variant_layout.setSpacing(0) + variant_layout.addWidget(variant_input, 1) + variant_layout.addWidget(variant_hints_btn, 0, QtCore.Qt.AlignVCenter) + + subset_name_input = QtWidgets.QLineEdit(self) + subset_name_input.setEnabled(False) + + form_layout = QtWidgets.QFormLayout() + form_layout.addRow("Variant:", variant_widget) + form_layout.addRow("Subset:", subset_name_input) + + mid_widget = QtWidgets.QWidget(self) + mid_layout = QtWidgets.QVBoxLayout(mid_widget) + mid_layout.setContentsMargins(0, 0, 0, 0) + mid_layout.addWidget(creators_header_widget, 0) + mid_layout.addWidget(creators_view, 1) + mid_layout.addLayout(form_layout, 0) + # ------------ + + # --- Creator short info and attr defs --- + creator_attrs_widget = QtWidgets.QWidget(self) + + creator_short_desc_widget = CreatorShortDescWidget( + creator_attrs_widget + ) + + attr_separator_widget = QtWidgets.QWidget(self) + attr_separator_widget.setObjectName("Separator") + attr_separator_widget.setMinimumHeight(1) + attr_separator_widget.setMaximumHeight(1) + + # Precreate attributes widget + pre_create_widget = PreCreateWidget(creator_attrs_widget) + + # Create button + create_btn_wrapper = QtWidgets.QWidget(creator_attrs_widget) + create_btn = QtWidgets.QPushButton("Create", create_btn_wrapper) + create_btn.setEnabled(False) + + create_btn_wrap_layout = QtWidgets.QHBoxLayout(create_btn_wrapper) + create_btn_wrap_layout.setContentsMargins(0, 0, 0, 0) + create_btn_wrap_layout.addStretch(1) + create_btn_wrap_layout.addWidget(create_btn, 0) + + creator_attrs_layout = QtWidgets.QVBoxLayout(creator_attrs_widget) + creator_attrs_layout.setContentsMargins(0, 0, 0, 0) + creator_attrs_layout.addWidget(creator_short_desc_widget, 0) + creator_attrs_layout.addWidget(attr_separator_widget, 0) + creator_attrs_layout.addWidget(pre_create_widget, 1) + creator_attrs_layout.addWidget(create_btn_wrapper, 0) + # ------------------------------------- + + # --- Detailed information about creator --- + # Detailed description of creator + detail_description_widget = QtWidgets.QWidget(self) + + detail_placoholder_widget = QtWidgets.QWidget( + detail_description_widget + ) + detail_placoholder_widget.setAttribute( + QtCore.Qt.WA_TranslucentBackground + ) + + detail_description_input = QtWidgets.QTextEdit( + detail_description_widget + ) + detail_description_input.setObjectName("CreatorDetailedDescription") + detail_description_input.setTextInteractionFlags( + QtCore.Qt.TextBrowserInteraction + ) + + detail_description_layout = QtWidgets.QVBoxLayout( + detail_description_widget + ) + detail_description_layout.setContentsMargins(0, 0, 0, 0) + detail_description_layout.setSpacing(0) + detail_description_layout.addWidget(detail_placoholder_widget, 0) + detail_description_layout.addWidget(detail_description_input, 1) + + detail_description_widget.setVisible(False) + + # ------------------------------------------- + splitter_widget = QtWidgets.QSplitter(self) + splitter_widget.addWidget(context_widget) + splitter_widget.addWidget(mid_widget) + splitter_widget.addWidget(creator_attrs_widget) + splitter_widget.addWidget(detail_description_widget) + splitter_widget.setStretchFactor(0, 1) + splitter_widget.setStretchFactor(1, 1) + splitter_widget.setStretchFactor(2, 1) + splitter_widget.setStretchFactor(3, 1) + + layout = QtWidgets.QHBoxLayout(self) + layout.addWidget(splitter_widget, 1) + + prereq_timer = QtCore.QTimer() + prereq_timer.setInterval(50) + prereq_timer.setSingleShot(True) + + prereq_timer.timeout.connect(self._invalidate_prereq) + + assets_widget.header_height_changed.connect( + self._on_asset_filter_height_change + ) + + create_btn.clicked.connect(self._on_create) + variant_widget.resized.connect(self._on_variant_widget_resize) + variant_input.returnPressed.connect(self._on_create) + variant_input.textChanged.connect(self._on_variant_change) + creators_view.selectionModel().currentChanged.connect( + self._on_creator_item_change + ) + variant_hints_btn.clicked.connect(self._on_variant_btn_click) + variant_hints_menu.triggered.connect(self._on_variant_action) + assets_widget.selection_changed.connect(self._on_asset_change) + assets_widget.current_context_required.connect( + self._on_current_session_context_request + ) + tasks_widget.task_changed.connect(self._on_task_change) + creator_short_desc_widget.height_changed.connect( + self._on_description_height_change + ) + + controller.add_plugins_refresh_callback(self._on_plugins_refresh) + + self._overlay_object = overlay_object + + self._splitter_widget = splitter_widget + + self._context_widget = context_widget + self._assets_widget = assets_widget + self._tasks_widget = tasks_widget + + self.subset_name_input = subset_name_input + + self.variant_input = variant_input + self.variant_hints_btn = variant_hints_btn + self.variant_hints_menu = variant_hints_menu + self.variant_hints_group = variant_hints_group + + self._creators_header_widget = creators_header_widget + self._creators_model = creators_model + self._creators_sort_model = creators_sort_model + self._creators_view = creators_view + self._create_btn = create_btn + + self._creator_short_desc_widget = creator_short_desc_widget + self._pre_create_widget = pre_create_widget + self._attr_separator_widget = attr_separator_widget + + self._detail_placoholder_widget = detail_placoholder_widget + self._detail_description_widget = detail_description_widget + self._detail_description_input = detail_description_input + + self._prereq_timer = prereq_timer + self._first_show = True + + def _emit_message(self, message): + self._overlay_object.add_message(message) + + def _context_change_is_enabled(self): + return self._context_widget.isEnabled() + + def _get_asset_name(self): + asset_name = None + if self._context_change_is_enabled(): + asset_name = self._assets_widget.get_selected_asset_name() + + if asset_name is None: + asset_name = self._asset_name + return asset_name + + def _get_task_name(self): + task_name = None + if self._context_change_is_enabled(): + # Don't use selection of task if asset is not set + asset_name = self._assets_widget.get_selected_asset_name() + if asset_name: + task_name = self._tasks_widget.get_selected_task_name() + + if not task_name: + task_name = self._task_name + return task_name + + @property + def dbcon(self): + return self.controller.dbcon + + def _set_context_enabled(self, enabled): + self._assets_widget.set_enabled(enabled) + self._tasks_widget.set_enabled(enabled) + check_prereq = self._context_widget.isEnabled() != enabled + self._context_widget.setEnabled(enabled) + if check_prereq: + self._invalidate_prereq() + + def refresh(self): + # Get context before refresh to keep selection of asset and + # task widgets + asset_name = self._get_asset_name() + task_name = self._get_task_name() + + self._prereq_available = False + + # Disable context widget so refresh of asset will use context asset + # name + self._set_context_enabled(False) + + self._assets_widget.refresh() + + # Refresh data before update of creators + self._refresh_asset() + # Then refresh creators which may trigger callbacks using refreshed + # data + self._refresh_creators() + + self._assets_widget.set_current_asset_name(self._asset_name) + self._assets_widget.select_asset_by_name(asset_name) + self._tasks_widget.set_asset_name(asset_name) + self._tasks_widget.select_task_name(task_name) + + self._invalidate_prereq_deffered() + + def _invalidate_prereq_deffered(self): + self._prereq_timer.start() + + def _on_asset_filter_height_change(self, height): + self._creators_header_widget.setMinimumHeight(height) + self._creators_header_widget.setMaximumHeight(height) + + def _invalidate_prereq(self): + prereq_available = True + creator_btn_tooltips = [] + + available_creators = self._creators_model.rowCount() > 0 + if available_creators != self._creators_view.isEnabled(): + self._creators_view.setEnabled(available_creators) + + if not available_creators: + prereq_available = False + creator_btn_tooltips.append("Creator is not selected") + + if self._context_change_is_enabled() and self._asset_doc is None: + # QUESTION how to handle invalid asset? + prereq_available = False + creator_btn_tooltips.append("Context is not selected") + + if prereq_available != self._prereq_available: + self._prereq_available = prereq_available + + self._create_btn.setEnabled(prereq_available) + + self.variant_input.setEnabled(prereq_available) + self.variant_hints_btn.setEnabled(prereq_available) + + tooltip = "" + if creator_btn_tooltips: + tooltip = "\n".join(creator_btn_tooltips) + self._create_btn.setToolTip(tooltip) + + self._on_variant_change() + + def _refresh_asset(self): + asset_name = self._get_asset_name() + + # Skip if asset did not change + if self._asset_doc and self._asset_doc["name"] == asset_name: + return + + # Make sure `_asset_doc` and `_subset_names` variables are reset + self._asset_doc = None + self._subset_names = None + if asset_name is None: + return + + project_name = self.dbcon.active_project() + asset_doc = get_asset_by_name(project_name, asset_name) + self._asset_doc = asset_doc + + if asset_doc: + asset_id = asset_doc["_id"] + subset_docs = get_subsets( + project_name, asset_ids=[asset_id], fields=["name"] + ) + self._subset_names = { + subset_doc["name"] + for subset_doc in subset_docs + } + + if not asset_doc: + self.subset_name_input.setText("< Asset is not set >") + + def _refresh_creators(self): + # Refresh creators and add their families to list + existing_items = {} + old_creators = set() + for row in range(self._creators_model.rowCount()): + item = self._creators_model.item(row, 0) + identifier = item.data(CREATOR_IDENTIFIER_ROLE) + existing_items[identifier] = item + old_creators.add(identifier) + + # Add new families + new_creators = set() + for identifier, creator in self.controller.manual_creators.items(): + # TODO add details about creator + new_creators.add(identifier) + if identifier in existing_items: + item = existing_items[identifier] + else: + item = QtGui.QStandardItem() + item.setFlags( + QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable + ) + self._creators_model.appendRow(item) + + label = creator.label or identifier + item.setData(label, QtCore.Qt.DisplayRole) + item.setData(identifier, CREATOR_IDENTIFIER_ROLE) + item.setData(creator.family, FAMILY_ROLE) + + # Remove families that are no more available + for identifier in (old_creators - new_creators): + item = existing_items[identifier] + self._creators_model.takeRow(item.row()) + + if self._creators_model.rowCount() < 1: + return + + self._creators_sort_model.sort(0) + # Make sure there is a selection + indexes = self._creators_view.selectedIndexes() + if not indexes: + index = self._creators_sort_model.index(0, 0) + self._creators_view.setCurrentIndex(index) + else: + index = indexes[0] + + identifier = index.data(CREATOR_IDENTIFIER_ROLE) + + self._set_creator_by_identifier(identifier) + + def _on_plugins_refresh(self): + # Trigger refresh only if is visible + self.refresh() + + def _on_asset_change(self): + self._refresh_asset() + + asset_name = self._assets_widget.get_selected_asset_name() + self._tasks_widget.set_asset_name(asset_name) + if self._context_change_is_enabled(): + self._invalidate_prereq_deffered() + + def _on_task_change(self): + if self._context_change_is_enabled(): + self._invalidate_prereq_deffered() + + def _on_current_session_context_request(self): + self._assets_widget.set_current_session_asset() + if self._task_name: + self._tasks_widget.select_task_name(self._task_name) + + def _on_description_height_change(self): + # Use separator's 'y' position as height + height = self._attr_separator_widget.y() + self._detail_placoholder_widget.setMinimumHeight(height) + self._detail_placoholder_widget.setMaximumHeight(height) + + def _on_creator_item_change(self, new_index, _old_index): + identifier = None + if new_index.isValid(): + identifier = new_index.data(CREATOR_IDENTIFIER_ROLE) + self._set_creator_by_identifier(identifier) + + def _set_creator_detailed_text(self, creator): + if not creator: + self._detail_description_input.setPlainText("") + return + detailed_description = creator.get_detail_description() or "" + if commonmark: + html = commonmark.commonmark(detailed_description) + self._detail_description_input.setHtml(html) + else: + self._detail_description_input.setMarkdown(detailed_description) + + def _set_creator_by_identifier(self, identifier): + creator = self.controller.manual_creators.get(identifier) + self._set_creator(creator) + + def _set_creator(self, creator): + self._creator_short_desc_widget.set_plugin(creator) + self._set_creator_detailed_text(creator) + self._pre_create_widget.set_plugin(creator) + + self._selected_creator = creator + + if not creator: + self._set_context_enabled(False) + return + + if ( + creator.create_allow_context_change + != self._context_change_is_enabled() + ): + self._set_context_enabled(creator.create_allow_context_change) + self._refresh_asset() + + default_variants = creator.get_default_variants() + if not default_variants: + default_variants = ["Main"] + + default_variant = creator.get_default_variant() + if not default_variant: + default_variant = default_variants[0] + + for action in tuple(self.variant_hints_menu.actions()): + self.variant_hints_menu.removeAction(action) + action.deleteLater() + + for variant in default_variants: + if variant in SEPARATORS: + self.variant_hints_menu.addSeparator() + elif variant: + self.variant_hints_menu.addAction(variant) + + variant_text = default_variant or "Main" + # Make sure subset name is updated to new plugin + if variant_text == self.variant_input.text(): + self._on_variant_change() + else: + self.variant_input.setText(variant_text) + + def _on_variant_widget_resize(self): + self.variant_hints_btn.setFixedHeight(self.variant_input.height()) + + def _on_variant_btn_click(self): + pos = self.variant_hints_btn.rect().bottomLeft() + point = self.variant_hints_btn.mapToGlobal(pos) + self.variant_hints_menu.popup(point) + + def _on_variant_action(self, action): + value = action.text() + if self.variant_input.text() != value: + self.variant_input.setText(value) + + def _on_variant_change(self, variant_value=None): + if not self._prereq_available: + return + + # This should probably never happen? + if not self._selected_creator: + if self.subset_name_input.text(): + self.subset_name_input.setText("") + return + + if variant_value is None: + variant_value = self.variant_input.text() + + if not self._compiled_name_pattern.match(variant_value): + self._create_btn.setEnabled(False) + self._set_variant_state_property("invalid") + self.subset_name_input.setText("< Invalid variant >") + return + + if not self._context_change_is_enabled(): + self._create_btn.setEnabled(True) + self._set_variant_state_property("") + self.subset_name_input.setText("< Valid variant >") + return + + project_name = self.controller.project_name + task_name = self._get_task_name() + + asset_doc = copy.deepcopy(self._asset_doc) + # Calculate subset name with Creator plugin + try: + subset_name = self._selected_creator.get_subset_name( + variant_value, task_name, asset_doc, project_name + ) + except TaskNotSetError: + self._create_btn.setEnabled(False) + self._set_variant_state_property("invalid") + self.subset_name_input.setText("< Missing task >") + return + + self.subset_name_input.setText(subset_name) + + self._create_btn.setEnabled(True) + self._validate_subset_name(subset_name, variant_value) + + def _validate_subset_name(self, subset_name, variant_value): + # Get all subsets of the current asset + if self._subset_names: + existing_subset_names = set(self._subset_names) + else: + existing_subset_names = set() + existing_subset_names_low = set( + _name.lower() + for _name in existing_subset_names + ) + + # Replace + compare_regex = re.compile(re.sub( + variant_value, "(.+)", subset_name, flags=re.IGNORECASE + )) + variant_hints = set() + if variant_value: + for _name in existing_subset_names: + _result = compare_regex.search(_name) + if _result: + variant_hints |= set(_result.groups()) + + # Remove previous hints from menu + for action in tuple(self.variant_hints_group.actions()): + self.variant_hints_group.removeAction(action) + self.variant_hints_menu.removeAction(action) + action.deleteLater() + + # Add separator if there are hints and menu already has actions + if variant_hints and self.variant_hints_menu.actions(): + self.variant_hints_menu.addSeparator() + + # Add hints to actions + for variant_hint in variant_hints: + action = self.variant_hints_menu.addAction(variant_hint) + self.variant_hints_group.addAction(action) + + # Indicate subset existence + if not variant_value: + property_value = "empty" + + elif subset_name.lower() in existing_subset_names_low: + # validate existence of subset name with lowered text + # - "renderMain" vs. "rendermain" mean same path item for + # windows + property_value = "exists" + else: + property_value = "new" + + self._set_variant_state_property(property_value) + + variant_is_valid = variant_value.strip() != "" + if variant_is_valid != self._create_btn.isEnabled(): + self._create_btn.setEnabled(variant_is_valid) + + def _set_variant_state_property(self, state): + current_value = self.variant_input.property("state") + if current_value != state: + self.variant_input.setProperty("state", state) + self.variant_input.style().polish(self.variant_input) + + def _on_first_show(self): + width = self.width() + part = int(width / 7) + self._splitter_widget.setSizes( + [part * 2, part * 2, width - (part * 4)] + ) + + def showEvent(self, event): + super(CreateWidget, self).showEvent(event) + if self._first_show: + self._first_show = False + self._on_first_show() + + def _on_create(self): + indexes = self._creators_view.selectedIndexes() + if not indexes or len(indexes) > 1: + return + + if not self._create_btn.isEnabled(): + return + + index = indexes[0] + creator_label = index.data(QtCore.Qt.DisplayRole) + creator_identifier = index.data(CREATOR_IDENTIFIER_ROLE) + family = index.data(FAMILY_ROLE) + variant = self.variant_input.text() + # Care about subset name only if context change is enabled + subset_name = None + asset_name = None + task_name = None + if self._context_change_is_enabled(): + subset_name = self.subset_name_input.text() + asset_name = self._get_asset_name() + task_name = self._get_task_name() + + pre_create_data = self._pre_create_widget.current_value() + # Where to define these data? + # - what data show be stored? + instance_data = { + "asset": asset_name, + "task": task_name, + "variant": variant, + "family": family + } + + error_msg = None + formatted_traceback = None + try: + self.controller.create( + creator_identifier, + subset_name, + instance_data, + pre_create_data + ) + + except CreatorError as exc: + error_msg = str(exc) + + # Use bare except because some hosts raise their exceptions that + # do not inherit from python's `BaseException` + except: + exc_type, exc_value, exc_traceback = sys.exc_info() + formatted_traceback = "".join(traceback.format_exception( + exc_type, exc_value, exc_traceback + )) + error_msg = str(exc_value) + + if error_msg is None: + self._set_creator(self._selected_creator) + self._emit_message("Creation finished...") + else: + box = CreateErrorMessageBox( + creator_label, + subset_name, + asset_name, + error_msg, + formatted_traceback, + parent=self + ) + box.show() + # Store dialog so is not garbage collected before is shown + self._message_dialog = box diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index abdd98ff7c..ddc976d458 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -10,6 +10,7 @@ from .widgets import ( RemoveInstanceBtn, ChangeViewBtn ) +from .create_widget import CreateWidget class CreateOverviewWidget(QtWidgets.QFrame): @@ -20,9 +21,13 @@ class CreateOverviewWidget(QtWidgets.QFrame): def __init__(self, controller, parent): super(CreateOverviewWidget, self).__init__(parent) - self._controller = controller self._refreshing_instances = False + self._controller = controller + create_widget = CreateWidget(controller, self) + + # --- Created Subsets/Instances --- + # Common widget for creation and overview subset_views_widget = BorderedLabelWidget( "Subsets to publish", self ) @@ -39,6 +44,7 @@ class CreateOverviewWidget(QtWidgets.QFrame): delete_btn = RemoveInstanceBtn(self) change_view_btn = ChangeViewBtn(self) + # --- Overview --- # Subset details widget subset_attributes_wrap = BorderedLabelWidget( "Publish options", self @@ -73,6 +79,7 @@ class CreateOverviewWidget(QtWidgets.QFrame): subset_content_widget = QtWidgets.QWidget(self) subset_content_layout = QtWidgets.QHBoxLayout(subset_content_widget) subset_content_layout.setContentsMargins(0, 0, 0, 0) + subset_content_layout.addWidget(create_widget, 7) subset_content_layout.addWidget(subset_views_widget, 3) subset_content_layout.addWidget(subset_attributes_wrap, 7) @@ -86,6 +93,7 @@ class CreateOverviewWidget(QtWidgets.QFrame): main_layout.setContentsMargins(marings) main_layout.addWidget(subset_content_widget, 1) + # --- Calbacks for instances/subsets view --- create_btn.clicked.connect(self._on_create_clicked) delete_btn.clicked.connect(self._on_delete_clicked) change_view_btn.clicked.connect(self._on_change_view_clicked) @@ -109,18 +117,38 @@ class CreateOverviewWidget(QtWidgets.QFrame): self._on_instance_context_change ) + # --- Controller callbacks --- controller.add_publish_reset_callback(self._on_publish_reset) controller.add_instances_refresh_callback(self._on_instances_refresh) - self.subset_content_widget = subset_content_widget + self._subset_content_widget = subset_content_widget - self.subset_view_cards = subset_view_cards - self.subset_list_view = subset_list_view - self.subset_views_layout = subset_views_layout + self._subset_view_cards = subset_view_cards + self._subset_list_view = subset_list_view + self._subset_views_layout = subset_views_layout - self.delete_btn = delete_btn + self._delete_btn = delete_btn - self.subset_attributes_widget = subset_attributes_widget + self._subset_attributes_widget = subset_attributes_widget + self._create_widget = create_widget + self._subset_attributes_wrap = subset_attributes_wrap + + # Start in create mode + self._current_state = "create" + subset_attributes_wrap.setVisible(False) + + def set_state(self, old_state, new_state): + if new_state == self._current_state: + return + + self._current_state = new_state + + self._create_widget.setVisible( + self._current_state == "create" + ) + self._subset_attributes_wrap.setVisible( + self._current_state == "publish" + ) def _on_create_clicked(self): """Pass signal to parent widget which should care about changing state. @@ -167,9 +195,9 @@ class CreateOverviewWidget(QtWidgets.QFrame): instances, context_selected = self.get_selected_items() # Disable delete button if nothing is selected - self.delete_btn.setEnabled(len(instances) > 0) + self._delete_btn.setEnabled(len(instances) > 0) - self.subset_attributes_widget.set_current_instances( + self._subset_attributes_widget.set_current_instances( instances, context_selected ) @@ -179,29 +207,29 @@ class CreateOverviewWidget(QtWidgets.QFrame): self.active_changed.emit() def _on_instance_context_change(self): - current_idx = self.subset_views_layout.currentIndex() - for idx in range(self.subset_views_layout.count()): + current_idx = self._subset_views_layout.currentIndex() + for idx in range(self._subset_views_layout.count()): if idx == current_idx: continue - widget = self.subset_views_layout.widget(idx) + widget = self._subset_views_layout.widget(idx) if widget.refreshed: widget.set_refreshed(False) - current_widget = self.subset_views_layout.widget(current_idx) + current_widget = self._subset_views_layout.widget(current_idx) current_widget.refresh_instance_states() self.instance_context_changed.emit() def get_selected_items(self): - view = self.subset_views_layout.currentWidget() + view = self._subset_views_layout.currentWidget() return view.get_selected_items() def _change_view_type(self): - idx = self.subset_views_layout.currentIndex() - new_idx = (idx + 1) % self.subset_views_layout.count() - self.subset_views_layout.setCurrentIndex(new_idx) + idx = self._subset_views_layout.currentIndex() + new_idx = (idx + 1) % self._subset_views_layout.count() + self._subset_views_layout.setCurrentIndex(new_idx) - new_view = self.subset_views_layout.currentWidget() + new_view = self._subset_views_layout.currentWidget() if not new_view.refreshed: new_view.refresh() new_view.set_refreshed(True) @@ -216,11 +244,11 @@ class CreateOverviewWidget(QtWidgets.QFrame): self._refreshing_instances = True - for idx in range(self.subset_views_layout.count()): - widget = self.subset_views_layout.widget(idx) + for idx in range(self._subset_views_layout.count()): + widget = self._subset_views_layout.widget(idx) widget.set_refreshed(False) - view = self.subset_views_layout.currentWidget() + view = self._subset_views_layout.currentWidget() view.refresh() view.set_refreshed(True) @@ -232,7 +260,7 @@ class CreateOverviewWidget(QtWidgets.QFrame): def _on_publish_reset(self): """Context in controller has been refreshed.""" - self.subset_content_widget.setEnabled(self._controller.host_is_valid) + self._subset_content_widget.setEnabled(self._controller.host_is_valid) def _on_instances_refresh(self): """Controller refreshed instances.""" @@ -242,5 +270,5 @@ class CreateOverviewWidget(QtWidgets.QFrame): # Give a change to process Resize Request QtWidgets.QApplication.processEvents() # Trigger update geometry of - widget = self.subset_views_layout.currentWidget() + widget = self._subset_views_layout.currentWidget() widget.updateGeometry() diff --git a/openpype/tools/publisher/widgets/tasks_widget.py b/openpype/tools/publisher/widgets/tasks_widget.py index aa239f6334..f31fffb9ea 100644 --- a/openpype/tools/publisher/widgets/tasks_widget.py +++ b/openpype/tools/publisher/widgets/tasks_widget.py @@ -141,10 +141,10 @@ class TasksModel(QtGui.QStandardItemModel): return super(TasksModel, self).headerData(section, orientation, role) -class CreateDialogTasksWidget(TasksWidget): +class CreateWidgetTasksWidget(TasksWidget): def __init__(self, controller, parent): self._controller = controller - super(CreateDialogTasksWidget, self).__init__(None, parent) + super(CreateWidgetTasksWidget, self).__init__(None, parent) self._enabled = None @@ -164,7 +164,7 @@ class CreateDialogTasksWidget(TasksWidget): self.task_changed.emit() def select_task_name(self, task_name): - super(CreateDialogTasksWidget, self).select_task_name(task_name) + super(CreateWidgetTasksWidget, self).select_task_name(task_name) if not self._enabled: current = self.get_selected_task_name() if current: From b6312fe3692850dc1a821e9c79d83425078d5ba7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 17:45:43 +0200 Subject: [PATCH 1383/2550] changed style of disable button --- openpype/style/style.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/style/style.css b/openpype/style/style.css index ab23dd621f..1d112fa575 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -868,6 +868,10 @@ PublisherTabBtn { padding: 0.5em 1em 0.5em 1em; } +PublisherTabBtn:disabled { + background: {color:bg-inputs}; +} + PublisherTabBtn:hover { background: {color:bg-buttons}; } From 22dab5ddefb2908d3eefcbd8a46cbb77a204ed4c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 17:46:20 +0200 Subject: [PATCH 1384/2550] removed created dialog --- openpype/tools/publisher/widgets/__init__.py | 3 - .../tools/publisher/widgets/create_dialog.py | 1222 ----------------- .../tools/publisher/widgets/tabs_widget.py | 9 + openpype/tools/publisher/window.py | 36 +- 4 files changed, 22 insertions(+), 1248 deletions(-) delete mode 100644 openpype/tools/publisher/widgets/create_dialog.py diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index 869f7adf9b..1d0ed0633b 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -10,7 +10,6 @@ from .widgets import ( PublishBtn, ) from .publish_widget import PublishFrame -from .create_dialog import CreateDialog from .tabs_widget import PublisherTabsWidget from .overview_widget import CreateOverviewWidget @@ -26,8 +25,6 @@ __all__ = ( "PublishFrame", - "CreateDialog", - "PublisherTabsWidget", "CreateOverviewWidget", ) diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py deleted file mode 100644 index 173df7d5c8..0000000000 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ /dev/null @@ -1,1222 +0,0 @@ -import sys -import re -import traceback -import copy - -import qtawesome -try: - import commonmark -except Exception: - commonmark = None -from Qt import QtWidgets, QtCore, QtGui - -from openpype.client import get_asset_by_name, get_subsets -from openpype.pipeline.create import ( - CreatorError, - SUBSET_NAME_ALLOWED_SYMBOLS, - TaskNotSetError, -) -from openpype.tools.utils import ( - ErrorMessageBox, - MessageOverlayObject, - ClickableFrame, -) - -from .widgets import IconValuePixmapLabel -from .assets_widget import CreateDialogAssetsWidget -from .tasks_widget import CreateDialogTasksWidget -from .precreate_widget import PreCreateWidget -from ..constants import ( - VARIANT_TOOLTIP, - CREATOR_IDENTIFIER_ROLE, - FAMILY_ROLE -) - -SEPARATORS = ("---separator---", "---") - - -class VariantInputsWidget(QtWidgets.QWidget): - resized = QtCore.Signal() - - def resizeEvent(self, event): - super(VariantInputsWidget, self).resizeEvent(event) - self.resized.emit() - - -class CreateErrorMessageBox(ErrorMessageBox): - def __init__( - self, - creator_label, - subset_name, - asset_name, - exc_msg, - formatted_traceback, - parent - ): - self._creator_label = creator_label - self._subset_name = subset_name - self._asset_name = asset_name - self._exc_msg = exc_msg - self._formatted_traceback = formatted_traceback - super(CreateErrorMessageBox, self).__init__("Creation failed", parent) - - def _create_top_widget(self, parent_widget): - label_widget = QtWidgets.QLabel(parent_widget) - label_widget.setText( - "Failed to create" - ) - return label_widget - - def _get_report_data(self): - report_message = ( - "{creator}: Failed to create Subset: \"{subset}\"" - " in Asset: \"{asset}\"" - "\n\nError: {message}" - ).format( - creator=self._creator_label, - subset=self._subset_name, - asset=self._asset_name, - message=self._exc_msg, - ) - if self._formatted_traceback: - report_message += "\n\n{}".format(self._formatted_traceback) - return [report_message] - - def _create_content(self, content_layout): - item_name_template = ( - "Creator: {}
    " - "Subset: {}
    " - "Asset: {}
    " - ) - exc_msg_template = "{}" - - line = self._create_line() - content_layout.addWidget(line) - - item_name_widget = QtWidgets.QLabel(self) - item_name_widget.setText( - item_name_template.format( - self._creator_label, self._subset_name, self._asset_name - ) - ) - content_layout.addWidget(item_name_widget) - - message_label_widget = QtWidgets.QLabel(self) - message_label_widget.setText( - exc_msg_template.format(self.convert_text_for_html(self._exc_msg)) - ) - content_layout.addWidget(message_label_widget) - - if self._formatted_traceback: - line_widget = self._create_line() - tb_widget = self._create_traceback_widget( - self._formatted_traceback - ) - content_layout.addWidget(line_widget) - content_layout.addWidget(tb_widget) - - -# TODO add creator identifier/label to details -class CreatorShortDescWidget(QtWidgets.QWidget): - height_changed = QtCore.Signal(int) - - def __init__(self, parent=None): - super(CreatorShortDescWidget, self).__init__(parent=parent) - - # --- Short description widget --- - icon_widget = IconValuePixmapLabel(None, self) - icon_widget.setObjectName("FamilyIconLabel") - - # --- Short description inputs --- - short_desc_input_widget = QtWidgets.QWidget(self) - - family_label = QtWidgets.QLabel(short_desc_input_widget) - family_label.setAlignment( - QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft - ) - - description_label = QtWidgets.QLabel(short_desc_input_widget) - description_label.setAlignment( - QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft - ) - - short_desc_input_layout = QtWidgets.QVBoxLayout( - short_desc_input_widget - ) - short_desc_input_layout.setSpacing(0) - short_desc_input_layout.addWidget(family_label) - short_desc_input_layout.addWidget(description_label) - # -------------------------------- - - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(icon_widget, 0) - layout.addWidget(short_desc_input_widget, 1) - # -------------------------------- - - self._icon_widget = icon_widget - self._family_label = family_label - self._description_label = description_label - - self._last_height = None - - def _check_height_change(self): - height = self.height() - if height != self._last_height: - self._last_height = height - self.height_changed.emit(height) - - def showEvent(self, event): - super(CreatorShortDescWidget, self).showEvent(event) - self._check_height_change() - - def resizeEvent(self, event): - super(CreatorShortDescWidget, self).resizeEvent(event) - self._check_height_change() - - def set_plugin(self, plugin=None): - if not plugin: - self._icon_widget.set_icon_def(None) - self._family_label.setText("") - self._description_label.setText("") - return - - plugin_icon = plugin.get_icon() - description = plugin.get_description() or "" - - self._icon_widget.set_icon_def(plugin_icon) - self._family_label.setText("{}".format(plugin.family)) - self._family_label.setTextInteractionFlags(QtCore.Qt.NoTextInteraction) - self._description_label.setText(description) - - -class HelpButton(ClickableFrame): - resized = QtCore.Signal(int) - question_mark_icon_name = "fa.question" - help_icon_name = "fa.question-circle" - hide_icon_name = "fa.angle-left" - - def __init__(self, *args, **kwargs): - super(HelpButton, self).__init__(*args, **kwargs) - self.setObjectName("CreateDialogHelpButton") - - question_mark_label = QtWidgets.QLabel(self) - help_widget = QtWidgets.QWidget(self) - - help_question = QtWidgets.QLabel(help_widget) - help_label = QtWidgets.QLabel("Help", help_widget) - hide_icon = QtWidgets.QLabel(help_widget) - - help_layout = QtWidgets.QHBoxLayout(help_widget) - help_layout.setContentsMargins(0, 0, 5, 0) - help_layout.addWidget(help_question, 0) - help_layout.addWidget(help_label, 0) - help_layout.addStretch(1) - help_layout.addWidget(hide_icon, 0) - - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.setSpacing(0) - layout.addWidget(question_mark_label, 0) - layout.addWidget(help_widget, 1) - - help_widget.setVisible(False) - - self._question_mark_label = question_mark_label - self._help_widget = help_widget - self._help_question = help_question - self._hide_icon = hide_icon - - self._expanded = None - self.set_expanded() - - def set_expanded(self, expanded=None): - if self._expanded is expanded: - if expanded is not None: - return - expanded = False - self._expanded = expanded - self._help_widget.setVisible(expanded) - self._update_content() - - def _update_content(self): - width = self.get_icon_width() - if self._expanded: - question_mark_pix = QtGui.QPixmap(width, width) - question_mark_pix.fill(QtCore.Qt.transparent) - - else: - question_mark_icon = qtawesome.icon( - self.question_mark_icon_name, color=QtCore.Qt.white - ) - question_mark_pix = question_mark_icon.pixmap(width, width) - - hide_icon = qtawesome.icon( - self.hide_icon_name, color=QtCore.Qt.white - ) - help_question_icon = qtawesome.icon( - self.help_icon_name, color=QtCore.Qt.white - ) - self._question_mark_label.setPixmap(question_mark_pix) - self._question_mark_label.setMaximumWidth(width) - self._hide_icon.setPixmap(hide_icon.pixmap(width, width)) - self._help_question.setPixmap(help_question_icon.pixmap(width, width)) - - def get_icon_width(self): - metrics = self.fontMetrics() - return metrics.height() - - def set_pos_and_size(self, pos_x, pos_y, width, height): - update_icon = self.height() != height - self.move(pos_x, pos_y) - self.resize(width, height) - - if update_icon: - self._update_content() - self.updateGeometry() - - def showEvent(self, event): - super(HelpButton, self).showEvent(event) - self.resized.emit(self.height()) - - def resizeEvent(self, event): - super(HelpButton, self).resizeEvent(event) - self.resized.emit(self.height()) - - -class CreateDialog(QtWidgets.QDialog): - default_size = (1000, 560) - - def __init__( - self, controller, asset_name=None, task_name=None, parent=None - ): - super(CreateDialog, self).__init__(parent) - - self.setWindowTitle("Create new instance") - - self.controller = controller - - if asset_name is None: - asset_name = self.dbcon.Session.get("AVALON_ASSET") - - if task_name is None: - task_name = self.dbcon.Session.get("AVALON_TASK") - - self._asset_name = asset_name - self._task_name = task_name - - self._last_pos = None - self._asset_doc = None - self._subset_names = None - self._selected_creator = None - - self._prereq_available = False - - self._message_dialog = None - - name_pattern = "^[{}]*$".format(SUBSET_NAME_ALLOWED_SYMBOLS) - self._name_pattern = name_pattern - self._compiled_name_pattern = re.compile(name_pattern) - - overlay_object = MessageOverlayObject(self) - - context_widget = QtWidgets.QWidget(self) - - assets_widget = CreateDialogAssetsWidget(controller, context_widget) - tasks_widget = CreateDialogTasksWidget(controller, context_widget) - - context_layout = QtWidgets.QVBoxLayout(context_widget) - context_layout.setContentsMargins(0, 0, 0, 0) - context_layout.setSpacing(0) - context_layout.addWidget(assets_widget, 2) - context_layout.addWidget(tasks_widget, 1) - - # --- Creators view --- - creators_header_widget = QtWidgets.QWidget(self) - header_label_widget = QtWidgets.QLabel( - "Choose family:", creators_header_widget - ) - creators_header_layout = QtWidgets.QHBoxLayout(creators_header_widget) - creators_header_layout.setContentsMargins(0, 0, 0, 0) - creators_header_layout.addWidget(header_label_widget, 1) - - creators_view = QtWidgets.QListView(self) - creators_model = QtGui.QStandardItemModel() - creators_sort_model = QtCore.QSortFilterProxyModel() - creators_sort_model.setSourceModel(creators_model) - creators_view.setModel(creators_sort_model) - - variant_widget = VariantInputsWidget(self) - - variant_input = QtWidgets.QLineEdit(variant_widget) - variant_input.setObjectName("VariantInput") - variant_input.setToolTip(VARIANT_TOOLTIP) - - variant_hints_btn = QtWidgets.QToolButton(variant_widget) - variant_hints_btn.setArrowType(QtCore.Qt.DownArrow) - variant_hints_btn.setIconSize(QtCore.QSize(12, 12)) - - variant_hints_menu = QtWidgets.QMenu(variant_widget) - variant_hints_group = QtWidgets.QActionGroup(variant_hints_menu) - - variant_layout = QtWidgets.QHBoxLayout(variant_widget) - variant_layout.setContentsMargins(0, 0, 0, 0) - variant_layout.setSpacing(0) - variant_layout.addWidget(variant_input, 1) - variant_layout.addWidget(variant_hints_btn, 0, QtCore.Qt.AlignVCenter) - - subset_name_input = QtWidgets.QLineEdit(self) - subset_name_input.setEnabled(False) - - form_layout = QtWidgets.QFormLayout() - form_layout.addRow("Variant:", variant_widget) - form_layout.addRow("Subset:", subset_name_input) - - mid_widget = QtWidgets.QWidget(self) - mid_layout = QtWidgets.QVBoxLayout(mid_widget) - mid_layout.setContentsMargins(0, 0, 0, 0) - mid_layout.addWidget(creators_header_widget, 0) - mid_layout.addWidget(creators_view, 1) - mid_layout.addLayout(form_layout, 0) - # ------------ - - # --- Creator short info and attr defs --- - creator_attrs_widget = QtWidgets.QWidget(self) - - creator_short_desc_widget = CreatorShortDescWidget( - creator_attrs_widget - ) - - attr_separator_widget = QtWidgets.QWidget(self) - attr_separator_widget.setObjectName("Separator") - attr_separator_widget.setMinimumHeight(1) - attr_separator_widget.setMaximumHeight(1) - - # Precreate attributes widget - pre_create_widget = PreCreateWidget(creator_attrs_widget) - - # Create button - create_btn_wrapper = QtWidgets.QWidget(creator_attrs_widget) - create_btn = QtWidgets.QPushButton("Create", create_btn_wrapper) - create_btn.setEnabled(False) - - create_btn_wrap_layout = QtWidgets.QHBoxLayout(create_btn_wrapper) - create_btn_wrap_layout.setContentsMargins(0, 0, 0, 0) - create_btn_wrap_layout.addStretch(1) - create_btn_wrap_layout.addWidget(create_btn, 0) - - creator_attrs_layout = QtWidgets.QVBoxLayout(creator_attrs_widget) - creator_attrs_layout.setContentsMargins(0, 0, 0, 0) - creator_attrs_layout.addWidget(creator_short_desc_widget, 0) - creator_attrs_layout.addWidget(attr_separator_widget, 0) - creator_attrs_layout.addWidget(pre_create_widget, 1) - creator_attrs_layout.addWidget(create_btn_wrapper, 0) - # ------------------------------------- - - # --- Detailed information about creator --- - # Detailed description of creator - detail_description_widget = QtWidgets.QWidget(self) - - detail_placoholder_widget = QtWidgets.QWidget( - detail_description_widget - ) - detail_placoholder_widget.setAttribute( - QtCore.Qt.WA_TranslucentBackground - ) - - detail_description_input = QtWidgets.QTextEdit( - detail_description_widget - ) - detail_description_input.setObjectName("CreatorDetailedDescription") - detail_description_input.setTextInteractionFlags( - QtCore.Qt.TextBrowserInteraction - ) - - detail_description_layout = QtWidgets.QVBoxLayout( - detail_description_widget - ) - detail_description_layout.setContentsMargins(0, 0, 0, 0) - detail_description_layout.setSpacing(0) - detail_description_layout.addWidget(detail_placoholder_widget, 0) - detail_description_layout.addWidget(detail_description_input, 1) - - detail_description_widget.setVisible(False) - - # ------------------------------------------- - splitter_widget = QtWidgets.QSplitter(self) - splitter_widget.addWidget(context_widget) - splitter_widget.addWidget(mid_widget) - splitter_widget.addWidget(creator_attrs_widget) - splitter_widget.addWidget(detail_description_widget) - splitter_widget.setStretchFactor(0, 1) - splitter_widget.setStretchFactor(1, 1) - splitter_widget.setStretchFactor(2, 1) - splitter_widget.setStretchFactor(3, 1) - - layout = QtWidgets.QHBoxLayout(self) - layout.addWidget(splitter_widget, 1) - - # Floating help button - # - Create this button as last to be fully visible - help_btn = HelpButton(self) - - prereq_timer = QtCore.QTimer() - prereq_timer.setInterval(50) - prereq_timer.setSingleShot(True) - - desc_width_anim_timer = QtCore.QTimer() - desc_width_anim_timer.setInterval(10) - - prereq_timer.timeout.connect(self._invalidate_prereq) - - desc_width_anim_timer.timeout.connect(self._on_desc_animation) - - help_btn.clicked.connect(self._on_help_btn) - help_btn.resized.connect(self._on_help_btn_resize) - - assets_widget.header_height_changed.connect( - self._on_asset_filter_height_change - ) - - create_btn.clicked.connect(self._on_create) - variant_widget.resized.connect(self._on_variant_widget_resize) - variant_input.returnPressed.connect(self._on_create) - variant_input.textChanged.connect(self._on_variant_change) - creators_view.selectionModel().currentChanged.connect( - self._on_creator_item_change - ) - variant_hints_btn.clicked.connect(self._on_variant_btn_click) - variant_hints_menu.triggered.connect(self._on_variant_action) - assets_widget.selection_changed.connect(self._on_asset_change) - assets_widget.current_context_required.connect( - self._on_current_session_context_request - ) - tasks_widget.task_changed.connect(self._on_task_change) - creator_short_desc_widget.height_changed.connect( - self._on_description_height_change - ) - splitter_widget.splitterMoved.connect(self._on_splitter_move) - - controller.add_plugins_refresh_callback(self._on_plugins_refresh) - - self._overlay_object = overlay_object - - self._splitter_widget = splitter_widget - - self._context_widget = context_widget - self._assets_widget = assets_widget - self._tasks_widget = tasks_widget - - self.subset_name_input = subset_name_input - - self.variant_input = variant_input - self.variant_hints_btn = variant_hints_btn - self.variant_hints_menu = variant_hints_menu - self.variant_hints_group = variant_hints_group - - self._creators_header_widget = creators_header_widget - self._creators_model = creators_model - self._creators_sort_model = creators_sort_model - self._creators_view = creators_view - self._create_btn = create_btn - - self._creator_short_desc_widget = creator_short_desc_widget - self._pre_create_widget = pre_create_widget - self._attr_separator_widget = attr_separator_widget - - self._detail_placoholder_widget = detail_placoholder_widget - self._detail_description_widget = detail_description_widget - self._detail_description_input = detail_description_input - self._help_btn = help_btn - - self._prereq_timer = prereq_timer - self._first_show = True - - # Description animation - self._description_size_policy = detail_description_widget.sizePolicy() - self._desc_width_anim_timer = desc_width_anim_timer - self._desc_widget_step = 0 - self._last_description_width = None - self._last_full_width = 0 - self._expected_description_width = 0 - self._last_desc_max_width = None - self._other_widgets_widths = [] - - def _emit_message(self, message): - self._overlay_object.add_message(message) - - def _context_change_is_enabled(self): - return self._context_widget.isEnabled() - - def _get_asset_name(self): - asset_name = None - if self._context_change_is_enabled(): - asset_name = self._assets_widget.get_selected_asset_name() - - if asset_name is None: - asset_name = self._asset_name - return asset_name - - def _get_task_name(self): - task_name = None - if self._context_change_is_enabled(): - # Don't use selection of task if asset is not set - asset_name = self._assets_widget.get_selected_asset_name() - if asset_name: - task_name = self._tasks_widget.get_selected_task_name() - - if not task_name: - task_name = self._task_name - return task_name - - @property - def dbcon(self): - return self.controller.dbcon - - def _set_context_enabled(self, enabled): - self._assets_widget.set_enabled(enabled) - self._tasks_widget.set_enabled(enabled) - check_prereq = self._context_widget.isEnabled() != enabled - self._context_widget.setEnabled(enabled) - if check_prereq: - self._invalidate_prereq() - - def refresh(self): - # Get context before refresh to keep selection of asset and - # task widgets - asset_name = self._get_asset_name() - task_name = self._get_task_name() - - self._prereq_available = False - - # Disable context widget so refresh of asset will use context asset - # name - self._set_context_enabled(False) - - self._assets_widget.refresh() - - # Refresh data before update of creators - self._refresh_asset() - # Then refresh creators which may trigger callbacks using refreshed - # data - self._refresh_creators() - - self._assets_widget.set_current_asset_name(self._asset_name) - self._assets_widget.select_asset_by_name(asset_name) - self._tasks_widget.set_asset_name(asset_name) - self._tasks_widget.select_task_name(task_name) - - self._invalidate_prereq_deffered() - - def _invalidate_prereq_deffered(self): - self._prereq_timer.start() - - def _on_asset_filter_height_change(self, height): - self._creators_header_widget.setMinimumHeight(height) - self._creators_header_widget.setMaximumHeight(height) - - def _invalidate_prereq(self): - prereq_available = True - creator_btn_tooltips = [] - - available_creators = self._creators_model.rowCount() > 0 - if available_creators != self._creators_view.isEnabled(): - self._creators_view.setEnabled(available_creators) - - if not available_creators: - prereq_available = False - creator_btn_tooltips.append("Creator is not selected") - - if self._context_change_is_enabled() and self._asset_doc is None: - # QUESTION how to handle invalid asset? - prereq_available = False - creator_btn_tooltips.append("Context is not selected") - - if prereq_available != self._prereq_available: - self._prereq_available = prereq_available - - self._create_btn.setEnabled(prereq_available) - - self.variant_input.setEnabled(prereq_available) - self.variant_hints_btn.setEnabled(prereq_available) - - tooltip = "" - if creator_btn_tooltips: - tooltip = "\n".join(creator_btn_tooltips) - self._create_btn.setToolTip(tooltip) - - self._on_variant_change() - - def _refresh_asset(self): - asset_name = self._get_asset_name() - - # Skip if asset did not change - if self._asset_doc and self._asset_doc["name"] == asset_name: - return - - # Make sure `_asset_doc` and `_subset_names` variables are reset - self._asset_doc = None - self._subset_names = None - if asset_name is None: - return - - project_name = self.dbcon.active_project() - asset_doc = get_asset_by_name(project_name, asset_name) - self._asset_doc = asset_doc - - if asset_doc: - asset_id = asset_doc["_id"] - subset_docs = get_subsets( - project_name, asset_ids=[asset_id], fields=["name"] - ) - self._subset_names = { - subset_doc["name"] - for subset_doc in subset_docs - } - - if not asset_doc: - self.subset_name_input.setText("< Asset is not set >") - - def _refresh_creators(self): - # Refresh creators and add their families to list - existing_items = {} - old_creators = set() - for row in range(self._creators_model.rowCount()): - item = self._creators_model.item(row, 0) - identifier = item.data(CREATOR_IDENTIFIER_ROLE) - existing_items[identifier] = item - old_creators.add(identifier) - - # Add new families - new_creators = set() - for identifier, creator in self.controller.manual_creators.items(): - # TODO add details about creator - new_creators.add(identifier) - if identifier in existing_items: - item = existing_items[identifier] - else: - item = QtGui.QStandardItem() - item.setFlags( - QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable - ) - self._creators_model.appendRow(item) - - label = creator.label or identifier - item.setData(label, QtCore.Qt.DisplayRole) - item.setData(identifier, CREATOR_IDENTIFIER_ROLE) - item.setData(creator.family, FAMILY_ROLE) - - # Remove families that are no more available - for identifier in (old_creators - new_creators): - item = existing_items[identifier] - self._creators_model.takeRow(item.row()) - - if self._creators_model.rowCount() < 1: - return - - self._creators_sort_model.sort(0) - # Make sure there is a selection - indexes = self._creators_view.selectedIndexes() - if not indexes: - index = self._creators_sort_model.index(0, 0) - self._creators_view.setCurrentIndex(index) - else: - index = indexes[0] - - identifier = index.data(CREATOR_IDENTIFIER_ROLE) - - self._set_creator_by_identifier(identifier) - - def _on_plugins_refresh(self): - # Trigger refresh only if is visible - if self.isVisible(): - self.refresh() - - def _on_asset_change(self): - self._refresh_asset() - - asset_name = self._assets_widget.get_selected_asset_name() - self._tasks_widget.set_asset_name(asset_name) - if self._context_change_is_enabled(): - self._invalidate_prereq_deffered() - - def _on_task_change(self): - if self._context_change_is_enabled(): - self._invalidate_prereq_deffered() - - def _on_current_session_context_request(self): - self._assets_widget.set_current_session_asset() - if self._task_name: - self._tasks_widget.select_task_name(self._task_name) - - def _on_description_height_change(self): - # Use separator's 'y' position as height - height = self._attr_separator_widget.y() - self._detail_placoholder_widget.setMinimumHeight(height) - self._detail_placoholder_widget.setMaximumHeight(height) - - def _on_creator_item_change(self, new_index, _old_index): - identifier = None - if new_index.isValid(): - identifier = new_index.data(CREATOR_IDENTIFIER_ROLE) - self._set_creator_by_identifier(identifier) - - def _update_help_btn(self): - short_desc_rect = self._creator_short_desc_widget.rect() - - # point = short_desc_rect.topRight() - point = short_desc_rect.center() - mapped_point = self._creator_short_desc_widget.mapTo(self, point) - # pos_y = mapped_point.y() - center_pos_y = mapped_point.y() - icon_width = self._help_btn.get_icon_width() - - _height = int(icon_width * 2.5) - height = min(_height, short_desc_rect.height()) - pos_y = center_pos_y - int(height / 2) - - pos_x = self.width() - icon_width - if self._detail_placoholder_widget.isVisible(): - pos_x -= ( - self._detail_placoholder_widget.width() - + self._splitter_widget.handle(3).width() - ) - - width = self.width() - pos_x - - self._help_btn.set_pos_and_size( - max(0, pos_x), max(0, pos_y), - width, height - ) - - def _on_help_btn_resize(self, height): - if self._creator_short_desc_widget.height() != height: - self._update_help_btn() - - def _on_splitter_move(self, *args): - self._update_help_btn() - - def _on_help_btn(self): - if self._desc_width_anim_timer.isActive(): - return - - final_size = self.size() - cur_sizes = self._splitter_widget.sizes() - - if self._desc_widget_step == 0: - now_visible = self._detail_description_widget.isVisible() - else: - now_visible = self._desc_widget_step > 0 - - sizes = [] - for idx, value in enumerate(cur_sizes): - if idx < 3: - sizes.append(value) - - self._last_full_width = final_size.width() - self._other_widgets_widths = list(sizes) - - if now_visible: - cur_desc_width = self._detail_description_widget.width() - if cur_desc_width < 1: - cur_desc_width = 2 - step_size = int(cur_desc_width / 5) - if step_size < 1: - step_size = 1 - - step_size *= -1 - expected_width = 0 - desc_width = cur_desc_width - 1 - width = final_size.width() - 1 - min_max = desc_width - self._last_description_width = cur_desc_width - - else: - self._detail_description_widget.setVisible(True) - handle = self._splitter_widget.handle(3) - desc_width = handle.sizeHint().width() - if self._last_description_width: - expected_width = self._last_description_width - else: - hint = self._detail_description_widget.sizeHint() - expected_width = hint.width() - - width = final_size.width() + desc_width - step_size = int(expected_width / 5) - if step_size < 1: - step_size = 1 - min_max = 0 - - if self._last_desc_max_width is None: - self._last_desc_max_width = ( - self._detail_description_widget.maximumWidth() - ) - self._detail_description_widget.setMinimumWidth(min_max) - self._detail_description_widget.setMaximumWidth(min_max) - self._expected_description_width = expected_width - self._desc_widget_step = step_size - - self._desc_width_anim_timer.start() - - sizes.append(desc_width) - - final_size.setWidth(width) - - self._splitter_widget.setSizes(sizes) - self.resize(final_size) - - self._help_btn.set_expanded(not now_visible) - - def _on_desc_animation(self): - current_width = self._detail_description_widget.width() - - desc_width = None - last_step = False - growing = self._desc_widget_step > 0 - - # Growing - if growing: - if current_width < self._expected_description_width: - desc_width = current_width + self._desc_widget_step - if desc_width >= self._expected_description_width: - desc_width = self._expected_description_width - last_step = True - - # Decreasing - elif self._desc_widget_step < 0: - if current_width > self._expected_description_width: - desc_width = current_width + self._desc_widget_step - if desc_width <= self._expected_description_width: - desc_width = self._expected_description_width - last_step = True - - if desc_width is None: - self._desc_widget_step = 0 - self._desc_width_anim_timer.stop() - return - - if last_step and not growing: - self._detail_description_widget.setVisible(False) - QtWidgets.QApplication.processEvents() - - width = self._last_full_width - handle_width = self._splitter_widget.handle(3).width() - if growing: - width += (handle_width + desc_width) - else: - width -= self._last_description_width - if last_step: - width -= handle_width - else: - width += desc_width - - if not last_step or growing: - self._detail_description_widget.setMaximumWidth(desc_width) - self._detail_description_widget.setMinimumWidth(desc_width) - - window_size = self.size() - window_size.setWidth(width) - self.resize(window_size) - if not last_step: - return - - self._desc_widget_step = 0 - self._desc_width_anim_timer.stop() - - if not growing: - return - - self._detail_description_widget.setMinimumWidth(0) - self._detail_description_widget.setMaximumWidth( - self._last_desc_max_width - ) - self._detail_description_widget.setSizePolicy( - self._description_size_policy - ) - - sizes = list(self._other_widgets_widths) - sizes.append(desc_width) - self._splitter_widget.setSizes(sizes) - - def _set_creator_detailed_text(self, creator): - if not creator: - self._detail_description_input.setPlainText("") - return - detailed_description = creator.get_detail_description() or "" - if commonmark: - html = commonmark.commonmark(detailed_description) - self._detail_description_input.setHtml(html) - else: - self._detail_description_input.setMarkdown(detailed_description) - - def _set_creator_by_identifier(self, identifier): - creator = self.controller.manual_creators.get(identifier) - self._set_creator(creator) - - def _set_creator(self, creator): - self._creator_short_desc_widget.set_plugin(creator) - self._set_creator_detailed_text(creator) - self._pre_create_widget.set_plugin(creator) - - self._selected_creator = creator - - if not creator: - self._set_context_enabled(False) - return - - if ( - creator.create_allow_context_change - != self._context_change_is_enabled() - ): - self._set_context_enabled(creator.create_allow_context_change) - self._refresh_asset() - - default_variants = creator.get_default_variants() - if not default_variants: - default_variants = ["Main"] - - default_variant = creator.get_default_variant() - if not default_variant: - default_variant = default_variants[0] - - for action in tuple(self.variant_hints_menu.actions()): - self.variant_hints_menu.removeAction(action) - action.deleteLater() - - for variant in default_variants: - if variant in SEPARATORS: - self.variant_hints_menu.addSeparator() - elif variant: - self.variant_hints_menu.addAction(variant) - - variant_text = default_variant or "Main" - # Make sure subset name is updated to new plugin - if variant_text == self.variant_input.text(): - self._on_variant_change() - else: - self.variant_input.setText(variant_text) - - def _on_variant_widget_resize(self): - self.variant_hints_btn.setFixedHeight(self.variant_input.height()) - - def _on_variant_btn_click(self): - pos = self.variant_hints_btn.rect().bottomLeft() - point = self.variant_hints_btn.mapToGlobal(pos) - self.variant_hints_menu.popup(point) - - def _on_variant_action(self, action): - value = action.text() - if self.variant_input.text() != value: - self.variant_input.setText(value) - - def _on_variant_change(self, variant_value=None): - if not self._prereq_available: - return - - # This should probably never happen? - if not self._selected_creator: - if self.subset_name_input.text(): - self.subset_name_input.setText("") - return - - if variant_value is None: - variant_value = self.variant_input.text() - - if not self._compiled_name_pattern.match(variant_value): - self._create_btn.setEnabled(False) - self._set_variant_state_property("invalid") - self.subset_name_input.setText("< Invalid variant >") - return - - if not self._context_change_is_enabled(): - self._create_btn.setEnabled(True) - self._set_variant_state_property("") - self.subset_name_input.setText("< Valid variant >") - return - - project_name = self.controller.project_name - task_name = self._get_task_name() - - asset_doc = copy.deepcopy(self._asset_doc) - # Calculate subset name with Creator plugin - try: - subset_name = self._selected_creator.get_subset_name( - variant_value, task_name, asset_doc, project_name - ) - except TaskNotSetError: - self._create_btn.setEnabled(False) - self._set_variant_state_property("invalid") - self.subset_name_input.setText("< Missing task >") - return - - self.subset_name_input.setText(subset_name) - - self._create_btn.setEnabled(True) - self._validate_subset_name(subset_name, variant_value) - - def _validate_subset_name(self, subset_name, variant_value): - # Get all subsets of the current asset - if self._subset_names: - existing_subset_names = set(self._subset_names) - else: - existing_subset_names = set() - existing_subset_names_low = set( - _name.lower() - for _name in existing_subset_names - ) - - # Replace - compare_regex = re.compile(re.sub( - variant_value, "(.+)", subset_name, flags=re.IGNORECASE - )) - variant_hints = set() - if variant_value: - for _name in existing_subset_names: - _result = compare_regex.search(_name) - if _result: - variant_hints |= set(_result.groups()) - - # Remove previous hints from menu - for action in tuple(self.variant_hints_group.actions()): - self.variant_hints_group.removeAction(action) - self.variant_hints_menu.removeAction(action) - action.deleteLater() - - # Add separator if there are hints and menu already has actions - if variant_hints and self.variant_hints_menu.actions(): - self.variant_hints_menu.addSeparator() - - # Add hints to actions - for variant_hint in variant_hints: - action = self.variant_hints_menu.addAction(variant_hint) - self.variant_hints_group.addAction(action) - - # Indicate subset existence - if not variant_value: - property_value = "empty" - - elif subset_name.lower() in existing_subset_names_low: - # validate existence of subset name with lowered text - # - "renderMain" vs. "rendermain" mean same path item for - # windows - property_value = "exists" - else: - property_value = "new" - - self._set_variant_state_property(property_value) - - variant_is_valid = variant_value.strip() != "" - if variant_is_valid != self._create_btn.isEnabled(): - self._create_btn.setEnabled(variant_is_valid) - - def _set_variant_state_property(self, state): - current_value = self.variant_input.property("state") - if current_value != state: - self.variant_input.setProperty("state", state) - self.variant_input.style().polish(self.variant_input) - - def _on_first_show(self): - center = self.rect().center() - - width, height = self.default_size - self.resize(width, height) - part = int(width / 7) - self._splitter_widget.setSizes( - [part * 2, part * 2, width - (part * 4)] - ) - - new_pos = self.mapToGlobal(center) - new_pos.setX(new_pos.x() - int(self.width() / 2)) - new_pos.setY(new_pos.y() - int(self.height() / 2)) - self.move(new_pos) - - def moveEvent(self, event): - super(CreateDialog, self).moveEvent(event) - self._last_pos = self.pos() - - def showEvent(self, event): - super(CreateDialog, self).showEvent(event) - if self._first_show: - self._first_show = False - self._on_first_show() - - if self._last_pos is not None: - self.move(self._last_pos) - - self._update_help_btn() - - self.refresh() - - def resizeEvent(self, event): - super(CreateDialog, self).resizeEvent(event) - self._update_help_btn() - - def _on_create(self): - indexes = self._creators_view.selectedIndexes() - if not indexes or len(indexes) > 1: - return - - if not self._create_btn.isEnabled(): - return - - index = indexes[0] - creator_label = index.data(QtCore.Qt.DisplayRole) - creator_identifier = index.data(CREATOR_IDENTIFIER_ROLE) - family = index.data(FAMILY_ROLE) - variant = self.variant_input.text() - # Care about subset name only if context change is enabled - subset_name = None - asset_name = None - task_name = None - if self._context_change_is_enabled(): - subset_name = self.subset_name_input.text() - asset_name = self._get_asset_name() - task_name = self._get_task_name() - - pre_create_data = self._pre_create_widget.current_value() - # Where to define these data? - # - what data show be stored? - instance_data = { - "asset": asset_name, - "task": task_name, - "variant": variant, - "family": family - } - - error_msg = None - formatted_traceback = None - try: - self.controller.create( - creator_identifier, - subset_name, - instance_data, - pre_create_data - ) - - except CreatorError as exc: - error_msg = str(exc) - - # Use bare except because some hosts raise their exceptions that - # do not inherit from python's `BaseException` - except: - exc_type, exc_value, exc_traceback = sys.exc_info() - formatted_traceback = "".join(traceback.format_exception( - exc_type, exc_value, exc_traceback - )) - error_msg = str(exc_value) - - if error_msg is None: - self._set_creator(self._selected_creator) - self._emit_message("Creation finished...") - else: - box = CreateErrorMessageBox( - creator_label, - subset_name, - asset_name, - error_msg, - formatted_traceback, - parent=self - ) - box.show() - # Store dialog so is not garbage collected before is shown - self._message_dialog = box diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index 0e92a6fd8d..bf3ef9eec0 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -53,6 +53,11 @@ class PublisherTabsWidget(QtWidgets.QFrame): self._current_button = None self._buttons_by_identifier = {} + def is_current_tab(self, identifier): + if isinstance(identifier, PublisherTabBtn): + identifier = identifier.identifier + return self._current_button == identifier + def add_tab(self, label, identifier): button = PublisherTabBtn(identifier, label, self) button.tab_clicked.connect(self._on_tab_click) @@ -61,8 +66,12 @@ class PublisherTabsWidget(QtWidgets.QFrame): if self._current_button is None: self.set_current_tab(identifier) + return button def set_current_tab(self, identifier): + if isinstance(identifier, PublisherTabBtn): + identifier = identifier.identifier + if identifier == self._current_button: return diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 8df9f9bbf5..5fd558a1b5 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -15,8 +15,6 @@ from .widgets import ( PublisherTabsWidget, - CreateDialog, - StopBtn, ResetBtn, ValidateBtn, @@ -76,7 +74,7 @@ class PublisherWindow(QtWidgets.QDialog): # Tabs widget under header tabs_widget = PublisherTabsWidget(self) - tabs_widget.add_tab("Create", "create") + create_tab = tabs_widget.add_tab("Create", "create") tabs_widget.add_tab("Publish", "publish") tabs_widget.add_tab("Report", "report") tabs_widget.add_tab("Details", "details") @@ -137,8 +135,6 @@ class PublisherWindow(QtWidgets.QDialog): main_layout.addWidget(content_stacked_widget, 1) main_layout.addWidget(footer_widget, 0) - creator_window = CreateDialog(controller, parent=self) - tabs_widget.tab_changed.connect(self._on_tab_change) create_overview_widget.active_changed.connect( self._on_context_or_active_change @@ -165,6 +161,7 @@ class PublisherWindow(QtWidgets.QDialog): self._header_layout = header_layout self._tabs_widget = tabs_widget + self._create_tab = create_tab self._content_stacked_widget = content_stacked_widget self.content_stacked_layout = content_stacked_layout @@ -182,8 +179,6 @@ class PublisherWindow(QtWidgets.QDialog): self._controller = controller - self.creator_window = creator_window - @property def controller(self): return self._controller @@ -208,7 +203,10 @@ class PublisherWindow(QtWidgets.QDialog): self.context_label.setText(label) def _on_tab_change(self, prev_tab, new_tab): - print(prev_tab, new_tab) + if new_tab in ("create", "publish"): + self._create_overview_widget.set_state(prev_tab, new_tab) + + # TODO handle rest of conditions def _on_context_or_active_change(self): self._validate_create_instances() @@ -222,23 +220,9 @@ class PublisherWindow(QtWidgets.QDialog): def _set_publish_visibility(self, visible): if visible: widget = self.publish_frame - publish_frame_visible = True else: widget = self._create_overview_widget - publish_frame_visible = False self.content_stacked_layout.setCurrentWidget(widget) - self._set_publish_frame_visible(publish_frame_visible) - - def _set_publish_frame_visible(self, publish_frame_visible): - """Publish frame visibility has changed. - - Also used in TrayPublisher to be able handle start/end of publish - widget overlay. - """ - - # Hide creator dialog if visible - if publish_frame_visible and self.creator_window.isVisible(): - self.creator_window.close() def _on_reset_clicked(self): self._controller.reset() @@ -264,7 +248,6 @@ class PublisherWindow(QtWidgets.QDialog): self._controller.publish() def _set_footer_enabled(self, enabled): - self.comment_input.setEnabled(enabled) self.reset_btn.setEnabled(True) if enabled: self.stop_btn.setEnabled(False) @@ -276,6 +259,8 @@ class PublisherWindow(QtWidgets.QDialog): self.publish_btn.setEnabled(enabled) def _on_publish_reset(self): + self._create_tab.setEnabled(True) + self.comment_input.setVisible(True) self._set_publish_visibility(False) self._set_footer_enabled(False) @@ -286,6 +271,11 @@ class PublisherWindow(QtWidgets.QDialog): self.validate_btn.setEnabled(False) self.publish_btn.setEnabled(False) + self.comment_input.setVisible(False) + self._create_tab.setEnabled(False) + if self._tabs_widget.is_current_tab(self._create_tab): + self._tabs_widget.set_current_tab("publish") + def _on_publish_validated(self): self.validate_btn.setEnabled(False) From 88ace97c941def6b9189cc4bc9e8bd6792d173bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 17:57:16 +0200 Subject: [PATCH 1385/2550] fixed project selection in tray publisher --- openpype/tools/traypublisher/window.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 128c0fef11..97edb9ab06 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -137,11 +137,14 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): src_index = self._projects_model.find_project(project_name) if src_index is not None: index = self._projects_proxy.mapFromSource(src_index) - if index: - mode = ( - QtCore.QItemSelectionModel.Select - | QtCore.QItemSelectionModel.Rows) - self._projects_view.selectionModel().select(index, mode) + + if index is not None: + selection_model = self._projects_view.selectionModel() + selection_model.select( + index, + QtCore.QItemSelectionModel.SelectCurrent + ) + self._projects_view.setCurrentIndex(index) self._cancel_btn.setVisible(self._project_name is not None) super(StandaloneOverlayWidget, self).showEvent(event) @@ -239,15 +242,15 @@ class TrayPublishWindow(PublisherWindow): def _on_project_select(self, project_name): # TODO register project specific plugin paths - self.controller.save_changes() - self.controller.reset_project_data_cache() + self._controller.save_changes() + self._controller.reset_project_data_cache() self.reset() - if not self.controller.instances: + if not self._controller.instances: self._go_to_create_tab() def _on_tray_publish_save(self): - self.controller.save_changes() + self._controller.save_changes() print("NOT YET IMPLEMENTED") From 5857c01442dc81daa71e71f78725a682c450f27e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 18:20:08 +0200 Subject: [PATCH 1386/2550] changed PublishReportViewerWidget to frame --- openpype/tools/publisher/publish_report_viewer/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/publish_report_viewer/widgets.py b/openpype/tools/publisher/publish_report_viewer/widgets.py index 61eb814a56..dc82448495 100644 --- a/openpype/tools/publisher/publish_report_viewer/widgets.py +++ b/openpype/tools/publisher/publish_report_viewer/widgets.py @@ -331,7 +331,7 @@ class DetailsPopup(QtWidgets.QDialog): self.closed.emit() -class PublishReportViewerWidget(QtWidgets.QWidget): +class PublishReportViewerWidget(QtWidgets.QFrame): def __init__(self, parent=None): super(PublishReportViewerWidget, self).__init__(parent) From 55e4aa3c76d397e59c909021e4eb642f6e73821a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 18:20:33 +0200 Subject: [PATCH 1387/2550] added details to window --- openpype/tools/publisher/widgets/__init__.py | 1 + .../tools/publisher/widgets/create_widget.py | 1 + .../publisher/widgets/overview_widget.py | 9 +- openpype/tools/publisher/window.py | 128 +++++++++++------- 4 files changed, 86 insertions(+), 53 deletions(-) diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index 1d0ed0633b..f8e3c4b19b 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -13,6 +13,7 @@ from .publish_widget import PublishFrame from .tabs_widget import PublisherTabsWidget from .overview_widget import CreateOverviewWidget + __all__ = ( "get_icon_path", "get_pixmap", diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index a0b3db0409..733dbf18ca 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -443,6 +443,7 @@ class CreateWidget(QtWidgets.QWidget): splitter_widget.setStretchFactor(3, 1) layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(splitter_widget, 1) prereq_timer = QtCore.QTimer() diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index ddc976d458..7afe02116f 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -85,12 +85,7 @@ class CreateOverviewWidget(QtWidgets.QFrame): # Subset frame layout main_layout = QtWidgets.QVBoxLayout(self) - marings = main_layout.contentsMargins() - marings.setLeft(marings.left() * 2) - marings.setRight(marings.right() * 2) - marings.setTop(marings.top() * 2) - marings.setBottom(0) - main_layout.setContentsMargins(marings) + main_layout.setContentsMargins(0, 0, 0, 0) main_layout.addWidget(subset_content_widget, 1) # --- Calbacks for instances/subsets view --- @@ -137,7 +132,7 @@ class CreateOverviewWidget(QtWidgets.QFrame): self._current_state = "create" subset_attributes_wrap.setVisible(False) - def set_state(self, old_state, new_state): + def set_state(self, new_state, animate): if new_state == self._current_state: return diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 5fd558a1b5..4aa02ff2d5 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -8,6 +8,7 @@ from openpype.tools.utils import ( PlaceholderLineEdit, PixmapLabel ) +from .publish_report_viewer import PublishReportViewerWidget from .control import PublisherController from .widgets import ( CreateOverviewWidget, @@ -79,13 +80,6 @@ class PublisherWindow(QtWidgets.QDialog): tabs_widget.add_tab("Report", "report") tabs_widget.add_tab("Details", "details") - # Content - content_stacked_widget = QtWidgets.QWidget(self) - - create_overview_widget = CreateOverviewWidget( - controller, content_stacked_widget - ) - # Footer footer_widget = QtWidgets.QWidget(self) footer_bottom_widget = QtWidgets.QWidget(footer_widget) @@ -113,17 +107,45 @@ class PublisherWindow(QtWidgets.QDialog): footer_layout.addWidget(comment_input, 0) footer_layout.addWidget(footer_bottom_widget, 0) + # Content + # - wrap stacked widget under one more widget to be able propagate + # margins (QStackedLayout can't have margins) + content_widget = QtWidgets.QWidget(self) + + content_stacked_widget = QtWidgets.QWidget(content_widget) + + content_layout = QtWidgets.QVBoxLayout(content_widget) + marings = content_layout.contentsMargins() + marings.setLeft(marings.left() * 2) + marings.setRight(marings.right() * 2) + marings.setTop(marings.top() * 2) + marings.setBottom(0) + content_layout.setContentsMargins(marings) + content_layout.addWidget(content_stacked_widget, 1) + + # Overview - create and attributes part + overview_widget = CreateOverviewWidget( + controller, content_stacked_widget + ) + + # Details - Publish details + publish_details_widget = PublishReportViewerWidget( + content_stacked_widget + ) + # Create publish frame publish_frame = PublishFrame(controller, content_stacked_widget) content_stacked_layout = QtWidgets.QStackedLayout( content_stacked_widget ) + content_stacked_layout.setContentsMargins(0, 0, 0, 0) content_stacked_layout.setStackingMode( QtWidgets.QStackedLayout.StackAll ) - content_stacked_layout.addWidget(create_overview_widget) + content_stacked_layout.addWidget(overview_widget) + content_stacked_layout.addWidget(publish_details_widget) content_stacked_layout.addWidget(publish_frame) # Add main frame to this window @@ -132,17 +154,17 @@ class PublisherWindow(QtWidgets.QDialog): main_layout.setSpacing(0) main_layout.addWidget(header_widget, 0) main_layout.addWidget(tabs_widget, 0) - main_layout.addWidget(content_stacked_widget, 1) + main_layout.addWidget(content_widget, 1) main_layout.addWidget(footer_widget, 0) tabs_widget.tab_changed.connect(self._on_tab_change) - create_overview_widget.active_changed.connect( + overview_widget.active_changed.connect( self._on_context_or_active_change ) - create_overview_widget.instance_context_changed.connect( + overview_widget.instance_context_changed.connect( self._on_context_or_active_change ) - create_overview_widget.create_requested.connect( + overview_widget.create_requested.connect( self._on_create_request ) @@ -164,18 +186,20 @@ class PublisherWindow(QtWidgets.QDialog): self._create_tab = create_tab self._content_stacked_widget = content_stacked_widget - self.content_stacked_layout = content_stacked_layout - self._create_overview_widget = create_overview_widget - self.publish_frame = publish_frame + self._content_stacked_layout = content_stacked_layout - self.context_label = context_label + self._overview_widget = overview_widget + self._publish_details_widget = publish_details_widget + self._publish_frame = publish_frame - self.comment_input = comment_input + self._context_label = context_label - self.stop_btn = stop_btn - self.reset_btn = reset_btn - self.validate_btn = validate_btn - self.publish_btn = publish_btn + self._comment_input = comment_input + + self._stop_btn = stop_btn + self._reset_btn = reset_btn + self._validate_btn = validate_btn + self._publish_btn = publish_btn self._controller = controller @@ -200,11 +224,23 @@ class PublisherWindow(QtWidgets.QDialog): self._controller.reset() def set_context_label(self, label): - self.context_label.setText(label) + self._context_label.setText(label) - def _on_tab_change(self, prev_tab, new_tab): + def _on_tab_change(self, old_tab, new_tab): if new_tab in ("create", "publish"): - self._create_overview_widget.set_state(prev_tab, new_tab) + animate = True + if old_tab not in ("create", "publish"): + animate = False + self._content_stacked_layout.setCurrentWidget( + self._overview_widget + ) + self._overview_widget.set_state(new_tab, animate) + + elif new_tab == "details": + self._content_stacked_layout.setCurrentWidget( + self._publish_details_widget + ) + # TODO handle rest of conditions @@ -219,10 +255,10 @@ class PublisherWindow(QtWidgets.QDialog): def _set_publish_visibility(self, visible): if visible: - widget = self.publish_frame + widget = self._publish_frame else: - widget = self._create_overview_widget - self.content_stacked_layout.setCurrentWidget(widget) + widget = self._overview_widget + self._content_stacked_layout.setCurrentWidget(widget) def _on_reset_clicked(self): self._controller.reset() @@ -234,7 +270,7 @@ class PublisherWindow(QtWidgets.QDialog): if self._controller.publish_comment_is_set: return - comment = self.comment_input.text() + comment = self._comment_input.text() self._controller.set_comment(comment) def _on_validate_clicked(self): @@ -248,40 +284,40 @@ class PublisherWindow(QtWidgets.QDialog): self._controller.publish() def _set_footer_enabled(self, enabled): - self.reset_btn.setEnabled(True) + self._reset_btn.setEnabled(True) if enabled: - self.stop_btn.setEnabled(False) - self.validate_btn.setEnabled(True) - self.publish_btn.setEnabled(True) + self._stop_btn.setEnabled(False) + self._validate_btn.setEnabled(True) + self._publish_btn.setEnabled(True) else: - self.stop_btn.setEnabled(enabled) - self.validate_btn.setEnabled(enabled) - self.publish_btn.setEnabled(enabled) + self._stop_btn.setEnabled(enabled) + self._validate_btn.setEnabled(enabled) + self._publish_btn.setEnabled(enabled) def _on_publish_reset(self): self._create_tab.setEnabled(True) - self.comment_input.setVisible(True) + self._comment_input.setVisible(True) self._set_publish_visibility(False) self._set_footer_enabled(False) def _on_publish_start(self): - self.reset_btn.setEnabled(False) - self.stop_btn.setEnabled(True) - self.validate_btn.setEnabled(False) - self.publish_btn.setEnabled(False) + self._reset_btn.setEnabled(False) + self._stop_btn.setEnabled(True) + self._validate_btn.setEnabled(False) + self._publish_btn.setEnabled(False) - self.comment_input.setVisible(False) + self._comment_input.setVisible(False) self._create_tab.setEnabled(False) if self._tabs_widget.is_current_tab(self._create_tab): self._tabs_widget.set_current_tab("publish") def _on_publish_validated(self): - self.validate_btn.setEnabled(False) + self._validate_btn.setEnabled(False) def _on_publish_stop(self): - self.reset_btn.setEnabled(True) - self.stop_btn.setEnabled(False) + self._reset_btn.setEnabled(True) + self._stop_btn.setEnabled(False) validate_enabled = not self._controller.publish_has_crashed publish_enabled = not self._controller.publish_has_crashed if validate_enabled: @@ -296,8 +332,8 @@ class PublisherWindow(QtWidgets.QDialog): else: publish_enabled = not self._controller.publish_has_finished - self.validate_btn.setEnabled(validate_enabled) - self.publish_btn.setEnabled(publish_enabled) + self._validate_btn.setEnabled(validate_enabled) + self._publish_btn.setEnabled(publish_enabled) def _validate_create_instances(self): if not self._controller.host_is_valid: From bd9a987f7ba9de82a75f3d6b8e3d0cfbcd1b872c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 18:33:22 +0200 Subject: [PATCH 1388/2550] update details in certain situations --- openpype/tools/publisher/widgets/tabs_widget.py | 15 +++++++++------ openpype/tools/publisher/window.py | 16 ++++++++++++++-- 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index bf3ef9eec0..84638a002c 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -50,13 +50,13 @@ class PublisherTabsWidget(QtWidgets.QFrame): self._btns_layout = btns_layout - self._current_button = None + self._current_identifier = None self._buttons_by_identifier = {} def is_current_tab(self, identifier): if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier - return self._current_button == identifier + return self._current_identifier == identifier def add_tab(self, label, identifier): button = PublisherTabBtn(identifier, label, self) @@ -64,7 +64,7 @@ class PublisherTabsWidget(QtWidgets.QFrame): self._btns_layout.addWidget(button, 0) self._buttons_by_identifier[identifier] = button - if self._current_button is None: + if self._current_identifier is None: self.set_current_tab(identifier) return button @@ -72,21 +72,24 @@ class PublisherTabsWidget(QtWidgets.QFrame): if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier - if identifier == self._current_button: + if identifier == self._current_identifier: return new_btn = self._buttons_by_identifier.get(identifier) if new_btn is None: return - old_identifier = self._current_button + old_identifier = self._current_identifier old_btn = self._buttons_by_identifier.get(old_identifier) - self._current_button = identifier + self._current_identifier = identifier if old_btn is not None: old_btn.deactivate() new_btn.activate() self.tab_changed.emit(old_identifier, identifier) + def current_tab(self): + return self._current_identifier + def _on_tab_click(self, identifier): self.set_current_tab(identifier) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 4aa02ff2d5..95c639a56c 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -226,7 +226,17 @@ class PublisherWindow(QtWidgets.QDialog): def set_context_label(self, label): self._context_label.setText(label) + def _update_publish_details_widget(self, force=False): + if not force and self._tabs_widget.current_tab() != "details": + return + + report_data = self.controller.get_publish_report() + self._publish_details_widget.set_report_data(report_data) + def _on_tab_change(self, old_tab, new_tab): + if old_tab == "details": + self._publish_details_widget.close_details_popup() + if new_tab in ("create", "publish"): animate = True if old_tab not in ("create", "publish"): @@ -240,7 +250,7 @@ class PublisherWindow(QtWidgets.QDialog): self._content_stacked_layout.setCurrentWidget( self._publish_details_widget ) - + self._update_publish_details_widget() # TODO handle rest of conditions @@ -298,8 +308,8 @@ class PublisherWindow(QtWidgets.QDialog): self._create_tab.setEnabled(True) self._comment_input.setVisible(True) self._set_publish_visibility(False) - self._set_footer_enabled(False) + self._update_publish_details_widget() def _on_publish_start(self): self._reset_btn.setEnabled(False) @@ -334,6 +344,7 @@ class PublisherWindow(QtWidgets.QDialog): self._validate_btn.setEnabled(validate_enabled) self._publish_btn.setEnabled(publish_enabled) + self._update_publish_details_widget() def _validate_create_instances(self): if not self._controller.host_is_valid: @@ -359,3 +370,4 @@ class PublisherWindow(QtWidgets.QDialog): context_title = self.controller.get_context_title() self.set_context_label(context_title) + self._update_publish_details_widget() From acb167c0d4b48afce082f6d76dad2ecf264b0096 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 18:45:48 +0200 Subject: [PATCH 1389/2550] added report page to window --- openpype/tools/publisher/widgets/__init__.py | 6 ++++-- .../publisher/widgets/overview_widget.py | 4 ++-- .../publisher/widgets/validations_widget.py | 5 ++--- openpype/tools/publisher/window.py | 19 ++++++++++++++++--- 4 files changed, 24 insertions(+), 10 deletions(-) diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index f8e3c4b19b..81bb77ce89 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -11,7 +11,8 @@ from .widgets import ( ) from .publish_widget import PublishFrame from .tabs_widget import PublisherTabsWidget -from .overview_widget import CreateOverviewWidget +from .overview_widget import OverviewWidget +from .validations_widget import ValidationsWidget __all__ = ( @@ -27,5 +28,6 @@ __all__ = ( "PublishFrame", "PublisherTabsWidget", - "CreateOverviewWidget", + "OverviewWidget", + "ValidationsWidget", ) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 7afe02116f..90527234a7 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -13,13 +13,13 @@ from .widgets import ( from .create_widget import CreateWidget -class CreateOverviewWidget(QtWidgets.QFrame): +class OverviewWidget(QtWidgets.QFrame): active_changed = QtCore.Signal() instance_context_changed = QtCore.Signal() create_requested = QtCore.Signal() def __init__(self, controller, parent): - super(CreateOverviewWidget, self).__init__(parent) + super(OverviewWidget, self).__init__(parent) self._refreshing_instances = False self._controller = controller diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index e7ab4ecf5a..b70cd81878 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -400,7 +400,7 @@ class VerticallScrollArea(QtWidgets.QScrollArea): return super(VerticallScrollArea, self).eventFilter(obj, event) -class ValidationsWidget(QtWidgets.QWidget): +class ValidationsWidget(QtWidgets.QFrame): """Widgets showing validation error. This widget is shown if validation error/s happened during validation part. @@ -418,11 +418,10 @@ class ValidationsWidget(QtWidgets.QWidget): │ Publish buttons │ └───────────────────────────────┘ """ + def __init__(self, controller, parent): super(ValidationsWidget, self).__init__(parent) - self.setAttribute(QtCore.Qt.WA_TranslucentBackground) - errors_scroll = VerticallScrollArea(self) errors_scroll.setWidgetResizable(True) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 95c639a56c..568735f4ae 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -11,7 +11,8 @@ from openpype.tools.utils import ( from .publish_report_viewer import PublishReportViewerWidget from .control import PublisherController from .widgets import ( - CreateOverviewWidget, + OverviewWidget, + ValidationsWidget, PublishFrame, PublisherTabsWidget, @@ -124,10 +125,12 @@ class PublisherWindow(QtWidgets.QDialog): content_layout.addWidget(content_stacked_widget, 1) # Overview - create and attributes part - overview_widget = CreateOverviewWidget( + overview_widget = OverviewWidget( controller, content_stacked_widget ) + report_widget = ValidationsWidget(controller, parent) + # Details - Publish details publish_details_widget = PublishReportViewerWidget( content_stacked_widget @@ -145,6 +148,7 @@ class PublisherWindow(QtWidgets.QDialog): QtWidgets.QStackedLayout.StackAll ) content_stacked_layout.addWidget(overview_widget) + content_stacked_layout.addWidget(report_widget) content_stacked_layout.addWidget(publish_details_widget) content_stacked_layout.addWidget(publish_frame) @@ -189,6 +193,7 @@ class PublisherWindow(QtWidgets.QDialog): self._content_stacked_layout = content_stacked_layout self._overview_widget = overview_widget + self._report_widget = report_widget self._publish_details_widget = publish_details_widget self._publish_frame = publish_frame @@ -252,7 +257,10 @@ class PublisherWindow(QtWidgets.QDialog): ) self._update_publish_details_widget() - # TODO handle rest of conditions + elif new_tab == "report": + self._content_stacked_layout.setCurrentWidget( + self._report_widget + ) def _on_context_or_active_change(self): self._validate_create_instances() @@ -319,6 +327,8 @@ class PublisherWindow(QtWidgets.QDialog): self._comment_input.setVisible(False) self._create_tab.setEnabled(False) + + self._report_widget.clear() if self._tabs_widget.is_current_tab(self._create_tab): self._tabs_widget.set_current_tab("publish") @@ -346,6 +356,9 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_btn.setEnabled(publish_enabled) self._update_publish_details_widget() + validation_errors = self._controller.get_validation_errors() + self._report_widget.set_errors(validation_errors) + def _validate_create_instances(self): if not self._controller.host_is_valid: self._set_footer_enabled(True) From 3f4d59b54667b9c0fbda928173793ea31e599e9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 28 Sep 2022 18:45:58 +0200 Subject: [PATCH 1390/2550] removed report and validation from publish frame --- .../tools/publisher/widgets/publish_widget.py | 41 ++----------------- 1 file changed, 3 insertions(+), 38 deletions(-) diff --git a/openpype/tools/publisher/widgets/publish_widget.py b/openpype/tools/publisher/widgets/publish_widget.py index b32b5381d1..ea23f9c42c 100644 --- a/openpype/tools/publisher/widgets/publish_widget.py +++ b/openpype/tools/publisher/widgets/publish_widget.py @@ -6,8 +6,6 @@ from Qt import QtWidgets, QtCore, QtGui from openpype.pipeline import KnownPublishError -from .validations_widget import ValidationsWidget -from ..publish_report_viewer import PublishReportViewerWidget from .widgets import ( StopBtn, ResetBtn, @@ -101,9 +99,6 @@ class PublishFrame(QtWidgets.QFrame): self.setObjectName("PublishFrame") - # Widget showing validation errors. Their details and action callbacks. - validation_errors_widget = ValidationsWidget(controller, self) - # Bottom part of widget where process and callback buttons are showed # - QFrame used to be able set background using stylesheets easily # and not override all children widgets style @@ -203,17 +198,15 @@ class PublishFrame(QtWidgets.QFrame): publish_widget = QtWidgets.QWidget(self) publish_widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) publish_layout = QtWidgets.QVBoxLayout(publish_widget) - publish_layout.addWidget(validation_errors_widget, 1) + publish_layout.addStretch(1) publish_layout.addWidget(info_frame, 0) details_widget = QtWidgets.QWidget(self) - report_view = PublishReportViewerWidget(details_widget) close_report_btn = QtWidgets.QPushButton(details_widget) close_report_icon = self._get_report_close_icon() close_report_btn.setIcon(close_report_icon) details_layout = QtWidgets.QVBoxLayout(details_widget) - details_layout.addWidget(report_view) details_layout.addWidget(close_report_btn) main_layout = QtWidgets.QStackedLayout(self) @@ -224,8 +217,6 @@ class PublishFrame(QtWidgets.QFrame): main_layout.setCurrentWidget(publish_widget) - show_details_btn.clicked.connect(self._on_show_details) - copy_report_btn.clicked.connect(self._on_copy_report) export_report_btn.clicked.connect(self._on_export_report) @@ -234,8 +225,6 @@ class PublishFrame(QtWidgets.QFrame): validate_btn.clicked.connect(self._on_validate_clicked) publish_btn.clicked.connect(self._on_publish_clicked) - close_report_btn.clicked.connect(self._on_close_report_clicked) - controller.add_publish_reset_callback(self._on_publish_reset) controller.add_publish_started_callback(self._on_publish_start) controller.add_publish_validated_callback(self._on_publish_validated) @@ -249,8 +238,6 @@ class PublishFrame(QtWidgets.QFrame): self._info_frame = info_frame self._publish_widget = publish_widget - self._validation_errors_widget = validation_errors_widget - self._main_layout = main_layout self._main_label = main_label @@ -269,7 +256,6 @@ class PublishFrame(QtWidgets.QFrame): self._publish_btn = publish_btn self._details_widget = details_widget - self._report_view = report_view def _get_report_close_icon(self): size = 100 @@ -314,8 +300,6 @@ class PublishFrame(QtWidgets.QFrame): self._progress_widget.setMaximum(self.controller.publish_max_progress) def _on_publish_start(self): - self._validation_errors_widget.clear() - self._set_success_property(-1) self._change_bg_property() self._set_progress_visibility(True) @@ -388,7 +372,7 @@ class PublishFrame(QtWidgets.QFrame): elif validation_errors: self._set_progress_visibility(False) self._change_bg_property(1) - self._set_validation_errors(validation_errors) + self._set_validation_errors() elif self.controller.publish_has_finished: self._set_finished() @@ -421,14 +405,12 @@ class PublishFrame(QtWidgets.QFrame): self._message_label_bottom.setText("") self._set_success_property(0) - def _set_validation_errors(self, validation_errors): + def _set_validation_errors(self): self._main_label.setText("Your publish didn't pass studio validations") self._message_label_top.setText("") self._message_label_bottom.setText("Check results above please") self._set_success_property(2) - self._validation_errors_widget.set_errors(validation_errors) - def _set_finished(self): self._main_label.setText("Finished") self._message_label_top.setText("") @@ -489,23 +471,6 @@ class PublishFrame(QtWidgets.QFrame): with open(full_path, "w") as file_stream: json.dump(logs, file_stream) - def _on_show_details(self): - self._change_bg_property(2) - self._main_layout.setCurrentWidget(self._details_widget) - report_data = self.controller.get_publish_report() - self._report_view.set_report_data(report_data) - - def _on_close_report_clicked(self): - self._report_view.close_details_popup() - if self.controller.get_publish_crash_error(): - self._change_bg_property() - - elif self.controller.get_validation_errors(): - self._change_bg_property(1) - else: - self._change_bg_property(2) - self._main_layout.setCurrentWidget(self._publish_widget) - def _on_reset_clicked(self): self.controller.reset() From 15105b36d94a7a5d1615652bfc5ef66b67571083 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 20:03:58 +0200 Subject: [PATCH 1391/2550] Always return deep copy of the cache --- openpype/style/__init__.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/openpype/style/__init__.py b/openpype/style/__init__.py index b34e3f97b0..4411af5451 100644 --- a/openpype/style/__init__.py +++ b/openpype/style/__init__.py @@ -1,4 +1,5 @@ import os +import copy import json import collections import six @@ -49,13 +50,11 @@ def _get_colors_raw_data(): def get_colors_data(): """Only color data from stylesheet data.""" - if _Cache.colors_data is not None: - return _Cache.colors_data - - data = _get_colors_raw_data() - color_data = data.get("color") or {} - _Cache.colors_data = color_data - return color_data + if _Cache.colors_data is None: + data = _get_colors_raw_data() + color_data = data.get("color") or {} + _Cache.colors_data = color_data + return copy.deepcopy(_Cache.colors_data) def _convert_color_values_to_objects(value): @@ -89,16 +88,15 @@ def get_objected_colors(): Returns: dict: Parsed color objects by keys in data. """ - if _Cache.objected_colors is not None: - return _Cache.objected_colors + if _Cache.objected_colors is None: + colors_data = get_colors_data() + output = {} + for key, value in colors_data.items(): + output[key] = _convert_color_values_to_objects(value) - colors_data = get_colors_data() - output = {} - for key, value in colors_data.items(): - output[key] = _convert_color_values_to_objects(value) + _Cache.objected_colors = output - _Cache.objected_colors = output - return output + return copy.deepcopy(_Cache.objected_colors) def _load_stylesheet(): From 24d9e5017d2a63abbf7e81f4d6dca53a99c5fa10 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 20:27:01 +0200 Subject: [PATCH 1392/2550] Optimize use of cache by allowing to copy only the part of the data you need --- openpype/style/__init__.py | 23 ++++++++++++++++--- .../publisher/widgets/border_label_widget.py | 3 +-- .../publisher/widgets/list_view_widgets.py | 3 +-- openpype/tools/settings/settings/widgets.py | 5 ++-- openpype/tools/tray/pype_tray.py | 3 +-- openpype/tools/utils/assets_widget.py | 2 +- openpype/tools/utils/lib.py | 4 +--- openpype/tools/utils/overlay_messages.py | 3 +-- openpype/tools/utils/widgets.py | 2 +- openpype/widgets/nice_checkbox.py | 3 +-- 10 files changed, 30 insertions(+), 21 deletions(-) diff --git a/openpype/style/__init__.py b/openpype/style/__init__.py index 4411af5451..473fb42bb5 100644 --- a/openpype/style/__init__.py +++ b/openpype/style/__init__.py @@ -82,11 +82,25 @@ def _convert_color_values_to_objects(value): return parse_color(value) -def get_objected_colors(): +def get_objected_colors(*keys): """Colors parsed from stylesheet data into color definitions. + You can pass multiple arguments to get a key from the data dict's colors. + Because this functions returns a deep copy of the cached data this allows + a much smaller dataset to be copied and thus result in a faster function. + It is however a micro-optimization in the area of 0.001s and smaller. + + For example: + >>> get_colors_data() # copy of full colors dict + >>> get_colors_data("font") + >>> get_colors_data("loader", "asset-view") + + Args: + *keys: Each key argument will return a key nested deeper in the + objected colors data. + Returns: - dict: Parsed color objects by keys in data. + Any: Parsed color objects by keys in data. """ if _Cache.objected_colors is None: colors_data = get_colors_data() @@ -96,7 +110,10 @@ def get_objected_colors(): _Cache.objected_colors = output - return copy.deepcopy(_Cache.objected_colors) + output = _Cache.objected_colors + for key in keys: + output = output[key] + return copy.deepcopy(output) def _load_stylesheet(): diff --git a/openpype/tools/publisher/widgets/border_label_widget.py b/openpype/tools/publisher/widgets/border_label_widget.py index 696a9050b8..8e09dd817e 100644 --- a/openpype/tools/publisher/widgets/border_label_widget.py +++ b/openpype/tools/publisher/widgets/border_label_widget.py @@ -158,8 +158,7 @@ class BorderedLabelWidget(QtWidgets.QFrame): """ def __init__(self, label, parent): super(BorderedLabelWidget, self).__init__(parent) - colors_data = get_objected_colors() - color_value = colors_data.get("border") + color_value = get_objected_colors("border") color = None if color_value: color = color_value.get_qcolor() diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 6e31ba635b..32b923c5d6 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -54,8 +54,7 @@ class ListItemDelegate(QtWidgets.QStyledItemDelegate): def __init__(self, parent): super(ListItemDelegate, self).__init__(parent) - colors_data = get_objected_colors() - group_color_info = colors_data["publisher"]["list-view-group"] + colors_data = get_objected_colors("publisher", "list-view-group") self._group_colors = { key: value.get_qcolor() diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 1a4a6877b0..722717df89 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -323,7 +323,7 @@ class SettingsToolBtn(ImageButton): @classmethod def _get_icon_type(cls, btn_type): if btn_type not in cls._cached_icons: - settings_colors = get_objected_colors()["settings"] + settings_colors = get_objected_colors("settings") normal_color = settings_colors["image-btn"].get_qcolor() hover_color = settings_colors["image-btn-hover"].get_qcolor() disabled_color = settings_colors["image-btn-disabled"].get_qcolor() @@ -789,8 +789,7 @@ class ProjectModel(QtGui.QStandardItemModel): self._items_by_name = {} self._versions_by_project = {} - colors = get_objected_colors() - font_color = colors["font"].get_qcolor() + font_color = get_objected_colors("font").get_qcolor() font_color.setAlpha(67) self._version_font_color = font_color self._current_version = get_openpype_version() diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 348573a191..8a24b3eaa6 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -144,8 +144,7 @@ class VersionUpdateDialog(QtWidgets.QDialog): "gifts.png" ) src_image = QtGui.QImage(image_path) - colors = style.get_objected_colors() - color_value = colors["font"] + color_value = style.get_objected_colors("font") return paint_image_with_color( src_image, diff --git a/openpype/tools/utils/assets_widget.py b/openpype/tools/utils/assets_widget.py index 772946e9e1..2a1fb4567c 100644 --- a/openpype/tools/utils/assets_widget.py +++ b/openpype/tools/utils/assets_widget.py @@ -114,7 +114,7 @@ class UnderlinesAssetDelegate(QtWidgets.QItemDelegate): def __init__(self, *args, **kwargs): super(UnderlinesAssetDelegate, self).__init__(*args, **kwargs) - asset_view_colors = get_objected_colors()["loader"]["asset-view"] + asset_view_colors = get_objected_colors("loader", "asset-view") self._selected_color = ( asset_view_colors["selected"].get_qcolor() ) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 97b680b77e..fe7dda454b 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -822,8 +822,6 @@ def get_warning_pixmap(color=None): src_image_path = get_image_path("warning.png") src_image = QtGui.QImage(src_image_path) if color is None: - colors = get_objected_colors() - color_value = colors["delete-btn-bg"] - color = color_value.get_qcolor() + color = get_objected_colors("delete-btn-bg").get_qcolor() return paint_image_with_color(src_image, color) diff --git a/openpype/tools/utils/overlay_messages.py b/openpype/tools/utils/overlay_messages.py index 62de2cf272..cbcbb15621 100644 --- a/openpype/tools/utils/overlay_messages.py +++ b/openpype/tools/utils/overlay_messages.py @@ -14,8 +14,7 @@ class CloseButton(QtWidgets.QFrame): def __init__(self, parent): super(CloseButton, self).__init__(parent) - colors = get_objected_colors() - close_btn_color = colors["overlay-messages"]["close-btn"] + close_btn_color = get_objected_colors("overlay-messages", "close-btn") self._color = close_btn_color.get_qcolor() self._mouse_pressed = False policy = QtWidgets.QSizePolicy( diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index df0d349822..c8133b3359 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -40,7 +40,7 @@ class PlaceholderLineEdit(QtWidgets.QLineEdit): # Change placeholder palette color if hasattr(QtGui.QPalette, "PlaceholderText"): filter_palette = self.palette() - color_obj = get_objected_colors()["font"] + color_obj = get_objected_colors("font") color = color_obj.get_qcolor() color.setAlpha(67) filter_palette.setColor( diff --git a/openpype/widgets/nice_checkbox.py b/openpype/widgets/nice_checkbox.py index 56e6d2ac24..334a5d197b 100644 --- a/openpype/widgets/nice_checkbox.py +++ b/openpype/widgets/nice_checkbox.py @@ -66,8 +66,7 @@ class NiceCheckbox(QtWidgets.QFrame): if cls._checked_bg_color is not None: return - colors_data = get_objected_colors() - colors_info = colors_data["nice-checkbox"] + colors_info = get_objected_colors("nice-checkbox") cls._checked_bg_color = colors_info["bg-checked"].get_qcolor() cls._unchecked_bg_color = colors_info["bg-unchecked"].get_qcolor() From e66dbfd3f25249941c462d443c8d1dfb54b2c445 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 28 Sep 2022 20:28:20 +0200 Subject: [PATCH 1393/2550] Fix refactor --- openpype/tools/publisher/widgets/list_view_widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 32b923c5d6..a701181e5b 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -54,7 +54,7 @@ class ListItemDelegate(QtWidgets.QStyledItemDelegate): def __init__(self, parent): super(ListItemDelegate, self).__init__(parent) - colors_data = get_objected_colors("publisher", "list-view-group") + group_color_info = get_objected_colors("publisher", "list-view-group") self._group_colors = { key: value.get_qcolor() From 387a43b09ef5ab3c5441d20d4d04f24f69051274 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Sep 2022 10:29:19 +0200 Subject: [PATCH 1394/2550] fix import of render settings --- .../hosts/maya/plugins/publish/validate_render_single_camera.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py index f7ce8873f9..77322fefd5 100644 --- a/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/openpype/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -4,7 +4,7 @@ import pyblish.api from maya import cmds import openpype.hosts.maya.api.action -from openpype.hosts.maya.api.render_settings import RenderSettings +from openpype.hosts.maya.api.lib_rendersettings import RenderSettings from openpype.pipeline.publish import ValidateContentsOrder From 0c86e157f98544a5f57b80b0b59450f51ed5a49a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Sep 2022 12:37:55 +0200 Subject: [PATCH 1395/2550] disable attributes after publish start --- openpype/tools/publisher/widgets/overview_widget.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 90527234a7..99b9d88007 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -38,6 +38,7 @@ class OverviewWidget(QtWidgets.QFrame): subset_views_layout = QtWidgets.QStackedLayout() subset_views_layout.addWidget(subset_view_cards) subset_views_layout.addWidget(subset_list_view) + subset_views_layout.setCurrentWidget(subset_view_cards) # Buttons at the bottom of subset view create_btn = CreateInstanceBtn(self) @@ -113,6 +114,7 @@ class OverviewWidget(QtWidgets.QFrame): ) # --- Controller callbacks --- + controller.add_publish_started_callback(self._on_publish_start) controller.add_publish_reset_callback(self._on_publish_reset) controller.add_instances_refresh_callback(self._on_instances_refresh) @@ -252,9 +254,15 @@ class OverviewWidget(QtWidgets.QFrame): # Force to change instance and refresh details self._on_subset_change() + def _on_publish_start(self): + """Publish started.""" + + self._subset_attributes_wrap.setEnabled(False) + def _on_publish_reset(self): """Context in controller has been refreshed.""" + self._subset_attributes_wrap.setEnabled(True) self._subset_content_widget.setEnabled(self._controller.host_is_valid) def _on_instances_refresh(self): From a2a686cfb86156b4bb12ed8256e572867a3e3a53 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Sep 2022 12:38:50 +0200 Subject: [PATCH 1396/2550] added base of validation labels showed in different stages of publishing --- .../publisher/widgets/validations_widget.py | 136 ++++++++++++++++-- 1 file changed, 124 insertions(+), 12 deletions(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index b70cd81878..b21cb3bb1b 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -414,15 +414,76 @@ class ValidationsWidget(QtWidgets.QFrame): │ │ Error detail │ │ │ │ │ │ │ │ │ │ - ├──────┴────────────────┴───────┤ - │ Publish buttons │ - └───────────────────────────────┘ + └──────┴────────────────┴───────┘ """ def __init__(self, controller, parent): super(ValidationsWidget, self).__init__(parent) - errors_scroll = VerticallScrollArea(self) + # Before publishing + before_publish_widget = QtWidgets.QWidget(self) + before_publish_label = QtWidgets.QLabel( + "Nothing to report until you run publish", + before_publish_widget + ) + before_publish_label.setAlignment(QtCore.Qt.AlignCenter) + before_publish_layout = QtWidgets.QHBoxLayout(before_publish_widget) + before_publish_layout.setContentsMargins(0, 0, 0, 0) + before_publish_layout.addWidget( + before_publish_label, 1, QtCore.Qt.AlignCenter + ) + + # After success publishing + publish_started_widget = QtWidgets.QWidget(self) + publish_started_label = QtWidgets.QLabel( + "Publishing run smoothly", + publish_started_widget + ) + publish_started_label.setAlignment(QtCore.Qt.AlignCenter) + publish_started_layout = QtWidgets.QHBoxLayout( + publish_started_widget + ) + publish_started_layout.setContentsMargins(0, 0, 0, 0) + publish_started_layout.addWidget( + publish_started_label, 1, QtCore.Qt.AlignCenter + ) + + # After success publishing + publish_stop_ok_widget = QtWidgets.QWidget(self) + publish_stop_ok_label = QtWidgets.QLabel( + "Publishing finished successfully", + publish_stop_ok_widget + ) + publish_stop_ok_label.setAlignment(QtCore.Qt.AlignCenter) + publish_stop_ok_layout = QtWidgets.QHBoxLayout( + publish_stop_ok_widget + ) + publish_stop_ok_layout.setContentsMargins(0, 0, 0, 0) + publish_stop_ok_layout.addWidget( + publish_stop_ok_label, 1, QtCore.Qt.AlignCenter + ) + + # After failed publishing (not with validation error) + publish_stop_fail_widget = QtWidgets.QWidget(self) + publish_stop_fail_label = QtWidgets.QLabel( + "This is not your fault", + publish_stop_fail_widget + ) + publish_stop_fail_label.setAlignment(QtCore.Qt.AlignCenter) + publish_stop_fail_layout = QtWidgets.QHBoxLayout( + publish_stop_fail_widget + ) + publish_stop_fail_layout.setContentsMargins(0, 0, 0, 0) + publish_stop_fail_layout.addWidget( + publish_stop_fail_label, 1, QtCore.Qt.AlignCenter + ) + + # Validation errors + validations_widget = QtWidgets.QWidget(self) + + content_widget = QtWidgets.QWidget(validations_widget) + + errors_scroll = VerticallScrollArea(content_widget) errors_scroll.setWidgetResizable(True) errors_widget = QtWidgets.QWidget(errors_scroll) @@ -432,35 +493,58 @@ class ValidationsWidget(QtWidgets.QFrame): errors_scroll.setWidget(errors_widget) - error_details_frame = QtWidgets.QFrame(self) + error_details_frame = QtWidgets.QFrame(content_widget) error_details_input = QtWidgets.QTextEdit(error_details_frame) error_details_input.setObjectName("InfoText") error_details_input.setTextInteractionFlags( QtCore.Qt.TextBrowserInteraction ) - actions_widget = ValidateActionsWidget(controller, self) + actions_widget = ValidateActionsWidget(controller, content_widget) actions_widget.setMinimumWidth(140) error_details_layout = QtWidgets.QHBoxLayout(error_details_frame) error_details_layout.addWidget(error_details_input, 1) error_details_layout.addWidget(actions_widget, 0) - content_layout = QtWidgets.QHBoxLayout() + content_layout = QtWidgets.QHBoxLayout(content_widget) content_layout.setSpacing(0) content_layout.setContentsMargins(0, 0, 0, 0) content_layout.addWidget(errors_scroll, 0) content_layout.addWidget(error_details_frame, 1) - top_label = QtWidgets.QLabel("Publish validation report", self) + top_label = QtWidgets.QLabel( + "Publish validation report", content_widget + ) top_label.setObjectName("PublishInfoMainLabel") top_label.setAlignment(QtCore.Qt.AlignCenter) - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(top_label) - layout.addLayout(content_layout) + validation_layout = QtWidgets.QVBoxLayout(validations_widget) + validation_layout.setContentsMargins(0, 0, 0, 0) + validation_layout.addWidget(top_label, 0) + validation_layout.addWidget(content_widget, 1) + + main_layout = QtWidgets.QStackedLayout(self) + main_layout.addWidget(before_publish_widget) + main_layout.addWidget(publish_started_widget) + main_layout.addWidget(publish_stop_ok_widget) + main_layout.addWidget(publish_stop_fail_widget) + main_layout.addWidget(validations_widget) + + main_layout.setCurrentWidget(before_publish_widget) + + controller.add_publish_started_callback(self._on_publish_start) + controller.add_publish_reset_callback(self._on_publish_reset) + controller.add_publish_stopped_callback(self._on_publish_stop) + + self._main_layout = main_layout + + self._before_publish_widget = before_publish_widget + self._publish_started_widget = publish_started_widget + self._publish_stop_ok_widget = publish_stop_ok_widget + self._publish_stop_fail_widget = publish_stop_fail_widget + self._validations_widget = validations_widget self._top_label = top_label self._errors_widget = errors_widget @@ -473,6 +557,8 @@ class ValidationsWidget(QtWidgets.QFrame): self._error_info = {} self._previous_select = None + self._controller = controller + def clear(self): """Delete all dynamic widgets and hide all wrappers.""" self._title_widgets = {} @@ -536,6 +622,32 @@ class ValidationsWidget(QtWidgets.QFrame): self.updateGeometry() + def _set_current_widget(self, widget): + self._main_layout.setCurrentWidget(widget) + + def _on_publish_start(self): + self._set_current_widget(self._publish_started_widget) + + def _on_publish_reset(self): + self._set_current_widget(self._before_publish_widget) + + def _on_publish_stop(self): + if self._controller.publish_has_crashed: + self._set_current_widget(self._publish_stop_fail_widget) + return + + if self._controller.publish_has_validation_errors: + validation_errors = self._controller.get_validation_errors() + self._set_current_widget(self._validations_widget) + self.set_errors(validation_errors) + return + + if self._contoller.publish_has_finished: + self._set_current_widget(self._publish_stop_ok_widget) + return + + self._set_current_widget(self._publish_started_widget) + def _on_select(self, index): if self._previous_select: if self._previous_select.index == index: From e725246c31acb948c0075aff4c33a4cac7ebe921 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 29 Sep 2022 13:49:39 +0200 Subject: [PATCH 1397/2550] publish frame is showed in a different way --- openpype/style/style.css | 10 - .../tools/publisher/widgets/publish_widget.py | 188 ++++++------------ openpype/tools/publisher/widgets/widgets.py | 43 ++-- openpype/tools/publisher/window.py | 141 ++++++++++--- 4 files changed, 194 insertions(+), 188 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 1d112fa575..9dbc6b2adc 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -971,16 +971,6 @@ VariantInputsWidget QToolButton { color: {color:publisher:error}; } -#PublishFrame { - background: rgba(0, 0, 0, 127); -} -#PublishFrame[state="1"] { - background: rgb(22, 25, 29); -} -#PublishFrame[state="2"] { - background: {color:bg}; -} - #PublishInfoFrame { background: {color:bg}; border: 2px solid black; diff --git a/openpype/tools/publisher/widgets/publish_widget.py b/openpype/tools/publisher/widgets/publish_widget.py index ea23f9c42c..8a8c25e1b6 100644 --- a/openpype/tools/publisher/widgets/publish_widget.py +++ b/openpype/tools/publisher/widgets/publish_widget.py @@ -2,7 +2,7 @@ import os import json import time -from Qt import QtWidgets, QtCore, QtGui +from Qt import QtWidgets, QtCore from openpype.pipeline import KnownPublishError @@ -11,9 +11,7 @@ from .widgets import ( ResetBtn, ValidateBtn, PublishBtn, - CopyPublishReportBtn, - SavePublishReportBtn, - ShowPublishReportBtn + PublishReportBtn, ) @@ -66,7 +64,7 @@ class ActionsButton(QtWidgets.QToolButton): self._set_action(action) -class PublishFrame(QtWidgets.QFrame): +class PublishFrame(QtWidgets.QWidget): """Frame showed during publishing. Shows all information related to publishing. Contains validation error @@ -77,65 +75,49 @@ class PublishFrame(QtWidgets.QFrame): only when publishing process is stopped and must be manually triggered to change into that layer. - +------------------------------------------------------------------------+ - | | - | | - | | - | < Validation error widget > | - | | - | | - | | - | | +------------------------------------------------------------------------+ | < Main label > | | < Label top > | | (#### 10% ) | | | - | Report:

    VzVAy)>camUOhb8HS=eG>)enj*Pe87;CD{T!#rY#Q_m0Q zuge!hdKBR=X^HmgKdG(_!P0ZzoXqu1;+F532p(led2v$#Eu-3G_FHk;TR6}OZzZop zv9mZPW0K_Av-p{(xk#9f>SPDO1k3E}a-c)+QD$(b`xTd`dZa^Yrt2$?C)WMXzwdRY#9%#wM(0rjUTCY zIAKQ%+)(_K_O~WUf6m?2LE3SebxS2&E<6oE)JQ@K$k(UOSHejac3}Zp4T6Y1@} z(nI%QN!n=sqq4(kD;onfi&4xu2SKZZ9msw1(K@$IpB&;9(&GZN=osN(88J`W8*C$7j%`!9 zJ6d&N8c}|3OYpnBg!gMK_$5UcJ+W(o=aPF zMVbYshAg5wKi^PMhn~3k^FyaoRL+_vjZ`c~)qM$+IEJ2)kABI2dZg4{4J}$f4?0Ws zJCcKc85tWea_2nc+&CZwxgJ~Z=RX~KVktj5Upw=1!|>s$r;}r%vkqu-@UPC%_z^K| z$+6z*i7)JtokqbiA_kY6y0?$hT@!ywzX@zQJBfOJ>kX9WR0pEF^OF;O@S$6MURj#p z1-5+&Xh|-5D6X{2S28~66GBBHT1UXHnwq2Sb7tY@a&adL&VifW1YVDRekVA|s80gw zEkA@NH!6TOMSEHFQROMFp(E={I|`yp(o@FdaTmRavU07i=`XLrYz}(iqF*q|*!f6! zZ{vchzC#Uvm6Dh|G+5) zk^uwBdev~QM$FW9#$OuKcgN6iEOFXelzl0dvTTk&@Ryl!2}17JE>pZ(9)Ssz6bst# zJpv%H^vILGHdml@X$eKED%`7@RZif zQ+E+^25Z!x>wzV^Qp-FDDLJAHnA~8RIqSB#DnxPv{5WhoiDbvimV#JpDJU!$r97;j zkCo`q$;0k`W6N{{e7!^7djETnRH*14?YkCoJff8|XB&`Yqi^(WW-TBF=1j6W+EqZlkjp!nyQD1#6x=5TgQ}e$ zdp?77iKyPGv(Jb9%IklUmE#e5K!8G=;Tw`+m{1z5bY!^w&Kt=AdFf|zj$42Zio6q? zlrYp5ACP;zgnW!GvYBo2W1;zjruZ`MviJZ)Q{Q0io^4N%DuCe=fe@qyp$sX#RHag{ zmzX^4@uyX&s+&O6DWPEUvv&78d^eb$*SFsH22AzeoM*Z_4trc2Os(cRIS}~1*+zpD zc?jfZ7ukudZY_W zj^rCG66NQ#j0Ab$Z%z`3QTH028iQJH(Y?#Npnw|zhKYe<;!*Uj{`SF-a4V|Q@N{wLNNJ%R9P!#M-$Ap^zxPm_24kbxYN zT{&pF-({emJ?!?wGn}r3VG+kCNWekL_nUsygT6<@d#q;$f!zAFh1NCsLOX3?s&V`N z(Of>YUA2hPUCYxa)_i;Tp=pvbYn+|5DGjd_Mn1U}kek$+hwTUt8j)$41YXINE+C6g z64)X@7yJMuj;+ZN&|OPZ=w)Bx3k-QcwL}W2a-1xq`Ma{re7MDnxjjweue9$mk|3Ia z53T`kJbg05`KdGlQQUpBld>Gn+^-SC@2ki%W5)8U;y-R5=f)`d-~H~*c_pUBm1|#o zfRW+m*(WL3lD-RP{IdnB_n0H`^r5O6nXqsLU!}wu$FIw$>nA!^9bJsWMdr3-!X2h( z|C$pvjM$G{LJS+%_vvS(v>lqlL>9jLVd3z8V{p-x&Ohc9G$J<4@i-?P5yLxWroG?o zhr!)Pv=3325jG0(M1nQ4bp&`jTgMZjKn2Yd>mmI9A79ae4-$f5;^Zc^$$eH-t=? zg->}fumv-UQ7vv(wGA5X$T$E)Rj%g63Mj%}+O;uz}c zew|mSGv#2zxzBrhwMgUEgi|&0*Mt(>xwY9ewZbLoaxNj|mFWgu?Z|$zO_fMaDm|w?~&%AsTaom+ua|NIx+qAAXG=FvN=e z-m=2=ve;*BrJfQuH3e&OR;IPRH1#QIz!@PuE^r@P{+feR=+?pSu@pUaDaLG{hW26@ z+R)a;Mn2l$kQIZ^&#uMd&JS;x=WF$-+VhD03Uu>@qgcRaJ+MT@xwXH~PFOy3f1P>8 zAF+OCWm?NBsuCR7O_pHde%uwiYvbiN4teiX;15Ku?5?m3Rxsu{ydgOEp&58)B6uIW zy{{t4LOPayF@~^Sh5eybZ7dyo;Ko=Q9#}El_kP$$^Okwv4gp=0^(CoCIc^&yK9%qve-cE|fqCWr&lBdbV~-%g zb4qZ?eEqW2qi7>meHb0p!KTv1LEGlmo}HM~SqWqMEnxz7MX%jDRbC!#5%&*IlPgkg zmuHj64UYcO`4isdOb{~C=5O8AsbiEecIN7kY5dYvsPbth zQlCq=c;L&20VAf}o1`ew#X!-d*7tdsyg$8_@_HhAd-`^N(|nCdnW5Z}nN{}Tlx($^ z_?P)#B2V5JX>@Q1g_<3-jeg(7tm{ojJUv$a@$0~rHa(X9PZD9vP9pjgVOIwM+lu`0 zcRw=kJ?HCh_;^IhX@UGta$^NK2{$H{)H*_ih@_%`$5P82%&s>{9?~J<*S^`uk0JhO z2O(FaD&kf)&Pa!dv&1?}E)Jd(XKo<`MGT@tx~Jjvv_wbKfAgSm_cybn6HnI2p~yi; zO`>K$Rx4q_>&!w}&&gf(nnZ`2GIs#cO|~zT_FDD}itTNJJt^XdEc=5&-##N(+?hcG zYq#UhP8{s?D4a48wEB}a4f@^~>C-%ENftzyBeERTqGJbcE_@t5Mjys6C))0Pzw*3| z9^3V2&WFHv5UKjcgvZy(rnEIqfrkTGm}+zmrcQiCg;76}=XU?ljA5_G)ox;0F9g2a zC3jzmNF5eojg6TdB&m~73~*$xM$E<@fpb;K3a$qP}V$-THCmYarK>bqQvjomX)N|XmZ^~ndUA;6*K z30=LADoS5w=z49*Qy~`5SGSD*@^t1mGIjnSVqIiz&AjAg$YD*HwVzNDdh>A^r~P?- z?~iZx=3!H4k}5l`9hXjn9#bzP1<;$k&JN8;8Ds57qW6PWdkzvuwb6#ucMC^5vz6&< zgpw?W$$Mn1y7Q|e6Sk4D4{5r3;?76cqdO-qM5W!F<#rBL+i*;iJ(ZtNu4YTul{H*i z+nsj+_ZC%QQOA_@8GNVgzQ?p*r3LNh&^KRi01X|Y4-*Ud9DVMd;;*ayh)AaKSX0{^ ztG;Y1mqa=I{y}~#`J}sIQHB_h297&XZQ+`0D^ZVn>WeivB5|>zQjf|wa_oELENtIa zq4h1<7F`|0khfci9%iMYG<=A^(_2FeTYgv0ArVWOFiw(v>G&v$4iOA!HpPajc5 zw8?QIi+B>q;!q1_gBOb?=Tj0&rohJ$k~ph>Oi8K=(h^;>c65j@n@d-uMy@8K>@HFtHHBK=Pug0THQ=aA)+mj1(&Jq@s9+=Z4C9@5e{8$^ zNyozu^O9tKX|q)^@p|h4=e^*@z|w^JyI%x+o9Xgt&&2g$IOcpuoyW-^cM!}J0i25z zi}&PKxX_B+c+kh(n~ggvo|^wy;w+n<8>QMm5)MS}=yrwOQgS+q*G<*-hw^$0zXU>* z-s_PE$HHIT#-jt6zupvzxBUHacRRFDEu_U+6Sp!gda{HtGURqt)9#gcgi>xFejVIV zT1(8vIe_E7>9CL%C0J;mhYDrV;!;mG=lb7Y+g0YR3>)p%M^WwNl?sH{kXkXJ^HX6A zRChPv8s#~yQu+a^4OLct<;vS@?a3GA^wRI$qyOj-?nEOcKo^57n7fqekwQm8Uk4Y_46l zKh7nAgLI`<$@)YTYZt?gTpyj|`mwQb)Mw2WN+8GlKZ zns<8rBbsI|2;r;>p!WMbrVN?#ndGri(B{MU{{t${B)csbdB0E?R&f_29 z>}V+|+ETFsSBaixFW}BN4NF&KFR1)w%6gxHSe%y|KaqOf$LlDSwj<;Rr_rRqrkl$$ zwuu8G16|I=!<>rqD4UjNZo|Y#=tIytZ^J=ns(pG@9QmZ+rrIe80tZI%MQw0d%ws?d zc_#g7LGpO}U@yV8EzbzH zz{ETT^BCgQ#b}kpwWs6uI9ZQ~sC8>@@%LK zkT$fy>op{f@Z<4X5d6My!?RciGR)l1A~LOr{-~Bz+@4b|8K`&K(*}0nTCHtUIXXw+ z4EVAO?k3tV{vba+w$|Ql4>^xYG4H{Xlr^kt6^>wCG7)5`2yFRiL(mg6pd;K`7-x}= zSWqRo9AxMLs%~;YlSAlit|>$|LU%`h0*(+OwHRdhsBq)bqDkjHiU+v0u)jsQaK=Qr zsQ^~wTiSYikomY)>fYnY*k+b9(}PxtTEi^wERqaO*&N9@swS%?tMzwt`xBjMw?X{Y z4is#)2E`a=9`_pUcf|F)hjKgpcp|rjE$xeO{LLRz3DSWl>X;Sl&Z1-mi(fpSc&9CF z)A%4|x}PV~XX|I9&1obY_Y-wJ89nU{N5fSQr3q5k5r&U4C6Q7(E5;Eh*slr&zOUaa zn84<7k E5AViXga7~l From f4bcbe33b86f06d1bf8729b50af017be00da8308 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Oct 2022 13:27:58 +0200 Subject: [PATCH 1462/2550] change validate button --- .../publisher/widgets/images/validate.png | Bin 16232 -> 7688 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/openpype/tools/publisher/widgets/images/validate.png b/openpype/tools/publisher/widgets/images/validate.png index d3cfa0b75def5efc6e60158c8d6be6768a9c5086..c8472e9d3133522f99cf34ff994331e03c701ecd 100644 GIT binary patch literal 7688 zcmeHMhg*|Z+dm0YK%_22MD%T~Ql_#)fnusuX^YLU7OU?co_?43)7bmWwJ$b5kdd&ps*ahHJ+(S5`sS_+ z-j@dcy8cI7#O6&`2G5Lt>Yk_^44NpT#d}`M=uu!Dt-5)!K033k{bBw4-`1H>N|k1l~(|-x=7-W{$i29zKZ6IB&jax!zG-`jGUYyp0Y?CBJoD{q@{|k3YBP zK9#r__^YYyJFg^xPnhBHMHx~8#Z3cnorR_y<@8IoB!T; ztupkR($3#w5A0vH`xounJ!@X)CT?}H-Dz&~G|OY(nGLCq2fa0tz9$p1?(Dmpc{OFN zsoJ+zPe+$~tMI!^Ri{F!3ZscJ?>i+DC?9`Neq4XvU@9nEHz;R>!v1?9ipa2>in zr5V@j>*_V6X6rWrIaw9-i%T#mGUd{a7E|5xLyfGct9hcwr{> zO>on)vF~DQ;07@4%Rk<=rp+p}ct$ZjOxTF`W6h3=4r@3|HT_Dfi zd^5+zAXlKxAarhfd)u}BpzK~R5hqUf4`3pbbqctw5 zonpx`XD`_csnLZQ_x!byHg_pX;69IdYE0WjhTz)yya#y_eYY@X{!-o0Hl)!x&tcj} zd4*iI4{aJkxkFt1K3+>nUL?@|X4+Ne`*;WS_G#pvNY6(Zy)PfQ?Iua1?r3win?TRV zD6-2AjSHqPM!g3~yjXMpIdbSsqI>|EP#P$#$EGZ$2&|+aN8x?EwZ&p)&nz)$8UL0d z8--{`79xZr*zA5O<15CBtim*TCTMKmuj>B;p?<$q+0I~wOrN`f>0tuu8hVZYB&=(( z7=L=S04==13)07z#rVbLMZ>l=Be1r@IRCk6?%ag&0->ET>rWDRhSx=foln!6cQ+%6 zBkq%Z*ezV!Qp&ieJAjbkJ{Cwnsf|Yeh{m$hTK>FQ_KNs1e#;UX&6G`ZqnaxRmlK1W zgrD%c+0lr<`P{9!I?oTzeHr=aN8MO+R59X%>F$p$6=cZp1LNfp@-;{=4A3e;O{WfD zpA7!IC#sPCvu^2Xon&>S_-;~)Hf#Ej{&q+~KZ`hNl`V$&F^#Vq)2#?8&uzfC4vnUV zW$Rfk54NLrmPybe(ja{l9sjzVuT;t@dHzz0%atbXNx zPHSBhN3H$dg08@7UDA`I^4)>d zVoN2;6OnIFaHvP<{LnW20(DeET@Ds)OPV!Is;wRfmAHvN#oP_6kD{RO6C9TxbQ+SL z+-Oo#=?kcO6(ZaBXNv`*UF>mLOl;YnU|3)MV4`>W(%WxEg|ngE-Spnv($Kd0bb%cI zMisTvLnQc$pfc0p`2yTyW*?Wj*K>gL?gkizVVgc3A9&BGfgiN7L#v9ZnKUctGp z(MF-nPZW2a6-PB5jLjWNz^^@@o-FueDiIWJ=uqNNZ2%>maiKRqe~Zd7?O<)>m{p(^ z(D@BQ!Lv*F^K5c=x^XkwGEs$jadwJCM7C2+v&%z6E=Lw>O5DuhAon|4bw0y_j0xDj zP-#lRTZs=VeAx~AV-agzDOb~Y!PmqSCCm!(hj@pmN>2CKRr-efZhwygHiuXX@8J?K z{FCrKUWZ+2v9PW)*vMkXI=MH_6VWDLrtl@61EZM1b1=k&BD+x@0?$9D5MW8&=^uLVKU%go{ZrfoBFwN{dm?JT zq)lyRGmJ=2Qrj;QZGq^ipwXC-^zr%LJ)|c;j6IX%zYoZFb9*Jq!I6n#e8X6Kzk7wgSkS{jeN7p92b?Q@ zMGfga=7qswMaUuQHvO!iB9+kRP+2jo$u#j%^g;>Y4B=gT7{bSMG^3j4mPdtL4($*U zZ3OH-ZO1iA;7?wVz3?g?(_+!;qZo@O0+>QPY^Y^5Y8~(vq}~(6GRJ1XkbKVgolJ8u zq;O^ckIE+dJz5(;=pG)$pW(Mbx{j6VDf*>z-}t^uc0lZ{?b9!A)+ay~xJLzgIt=fS z%M_PKAVprks3MPX@mr)WjLs?a*d5E%EI zlvr<6?zlOsh@6Z!wp-jUYqsE9ri<$5EAUo)pC~5Z1hUO^NdI`t@ZT0<>o_|_`VL3& z=x>G;f_c**;8peT*r7yakWf@wK0I775ih?-N`~)hT6I^vI}j8);q#RAr78nB7Br%y zC&%;i-Auq2_1Sdeg$Z!^(EKUE7@8-bu-Yz8_$B$>h7h*GU=D{pC~`yT(+S}t+%h`2 zF@YF#Oi>Tq2m-0r4d-)CnoC(qrGBf6c82lF_42- zwNGRCvzF}xzY zAsbdY9Dc8MSucfP6lLhtn=gVSe{bcjxyd1gu7d<4T|gw8;Sr+1x)>1{S_gxTa{Sba zL&D*!h(-ByM##luv!nfVC20Cv1rNR(vVbIxmL5rqS;|*cInkv=JQ}|O$99kUG)5a~ zfy$TT;sv;qa@MD5E^wu<~7^zK$5rZIGJ9&NKsGuYWo!%)n7iR)2}Vkm=?pK@LL0=u^CK!~?;!bUb+N9OONb9&?D zmsi^%gFtuT9I%Av+Yfl){#fVDWY+OfU%JMmh={LWs#CpG!UtUCS0ZWxXxk*>oDHPUV{hO%bxz`>XENq`hPLfO!BZb|)&^CB zCa2M_aFbp~W=)+56?TI8t$jtt3tn|{{DpHx!(psN#Lqt)DNJbDUaDS#28O$FHeLpW zl5y*hlQx&3XCxJ#jT+k|C{`D=wy22r>8W@ho(TrnG18P+pGeNGX{~a8xh8Dtz+{%D zE}9A>3t!=NA_0H(-US}Ku+AP0OiW>QSQ1dN(SeBq1~daRo{}_^?h>CPeMw+_sp34_ zOE1sJ>+yQl*y+>4i#S0ffPV}3!HTk@gC^t%0f}EYu)X%A@j^+A5^L;(5MTJrfZflJ zZ~$vHPlIJ?^BMM0TzU@qQI4GA>p@N}p8Jl%c*Dw&E zfrtP#I{R~a_sK77o{*$PzH*MLpg$ej#y|E2YZ+T5XcbX+=?=mvk%TEJY(q9VXb!5R zdfp-Hz$#=g*nkzXRTJbN4co)fbv*65xT$xD0&Vcs3%%*^MNpxf;0~=q`-|if-2sP;)g!W(P%;u)>;5-&Z{M8vp<&l z9QZd|Y7m9h3n|*4$|6D(oa*KH<7H*^h|n%D*Sa*14nEvCE=ePXv=E-5#5!4;U!F~e=88XBSw`;#7qX6^hSOo8 zCxtIyf5)7S;P^kSe`vFaJdHJebf15ZLmhYyrsbUPCxcWaPjvmAjeW(e%%Q%s=1^O2 zCfyYzXc04x1*$5&M;_wqFkKKB^C?8Aj`g=# zwErkU>)6@?So45nDTnju2Sb7D9)qubhE89w=J)bSPF7Ux@&%xOWh@VilN^K`QZT9g zu`~?{7Q5N$5H}MJY+jfux5tPi{sz`ri?G%v!dmM9o|DV0G9*dAQG*8PJ!MB915~Nc zF6K~}?#e2^Md*k>c@NZPND}95EO_4Ee7X?@g}eKgtI*8RXhI2fVw1{+h|{vH$TI<< zj>2w;lJ3k?xRoVJ4$?f=BkQ*ppnXyV#>*%cqckw&p|BQG2z@`xJ0%YkNVYqLI}|kb zH?K+|(6kL5dh^_X#e$wOa?8(zNDo*M`g+b)m6y^#Kt>KkGKCe{WSu}yWTZ6f-?{PQ z-=HkAsDQp-@JFE0b_rHe=nC9+hTNea4jmiX)|j>ZC+k*y>|{0>MV4#Cn(urnhjL$! zV4tu;h&1Eb7r4y?$+GN2SNcun1RJMvzJ-LEf&=wrj=pP|TNoJUg17VA48WI50VQDV0ET z9Pc)pd3a?zt=`qP(knzdEcyuIzUaWR-U@4Y@~ZxHd|+s3+o+@j%P}%Y@aIj-HBE$y zw{tnk3*8Xoids3u2@8hnadC?U&9d96@;|ZW>YkFU(2q`nziuW2->kvJXF_ZPcs;wO=jdsB@B`V034I z+Pw2H&$0E$VE8;mdo_J@XqyfhsI779W|iMg>WJ~bS2;B|@>u=X{(Q#24@T?zn;-8& zew|ggFq_P8JlZo|NKig5efgUiebbF+;S|&n``l8BHyoE_**0;^Y)ykh6gCYBumXoz zODT_t^=Lo+XNXlSxG%Pan`x)A3G{%ozpTJbIqdJ@$lm^D9Z$4r>?___6uQ0ee9Lxc z0|}P#M?c{|_$qK;^4#Zck|xP2D`g70nzsVTxGfmvQ1v-zA8k-dQoZ3d);wtPo&1V6 z@n;I#MATN^vYke8ORvpR+AYTK{z`45e|`oVF=22c3J&=SHkyQ)OC@ivrEg1+q6`WS zwE*K~Ev3%u2Hk|%*R)vljW;44suag+in^|A#FElmE6?-<3>sA`cNTV-( zq(Z4-4C>g}<^aV*n0Dcax+VxkO$ac*(!^*dV15n(K4_v~YKgSw4UHhdxyuKjKcG3vinqmbq|YllDs^ z^8Ymb3z)waBJq1PX3vKE8eiHIIp0_2P#rB{^i6`^Vs-e(LT4B(4|FJT(%zgA{rn5Q z(54YC9v$z#_f27Hv+BUi&X3z!sufIO8GahhYA1INktNXQYbdH-aR9T`z zgo0_(3XIn{9tw1808F-q*{raw8>6NNNc{+>fD=B3nZUyYt*6QV@LD~(^K&o#tV0VQ zm(_x{=%S!zE$_o?L;9ipx$>39h{4At(RfEq4&^)(8vNX`=JX|1(n9~%UGNH?}f2k%SO?*QTE7S(j0^zj-@#JODCnXig{=r$Su_Z(5E*d+<&h>1uBIe$K3fe1 zO)Q;(g^u>9uLruJub&oA3cW;YLG8}LJdyCvpWa}Kma)irg9Nf}x_49ph5Zim6J*Ri zQJ%n)kwF;`v$5Nh3IZwyCAkA>xPFJcw4X67e+3s?IQ;v5-GzeaJ$8!|a0#_ehDC$k zp)SHAxTjjj!nX+<;f0(s#LD{%@l(t0&3UO2_}6VwUnBf&wM8`jtBA(gg?OZch^N0E zIcx4f?Vy*NxfV5M64A&lcoWM({JC8OzMmT6xgCUOHki(W=>#H57fplY0ns!_ae--B z8RQ4M<#G{!6MVf&L~meLM+RNGEsn^$kPb}xWf0NSO>v}r8}ZM_e zE#lsQ>A%I1uV@+!?S$z-nBEA}pqUH1yDU~Ar-KC67+9d5jrd`eEJO7^{Oc@c z_pz&W5O1WoOngh;(kHz7JH(UgI!x?&kN`g`Xgn>x#+vmUQAON;im$TF8+4Zu&o1#! zRnJHA($87GZ03VvrYX{BkLGYTGuwL?r=H@sM&`Z5(4fKqIJ+fT0EW}?tEZ>4W-(=v1 zoWKWj(aSOThn!Dhh!)e755%~bVyb4GUAV|j`0uoC1_==*W3?N93 zpmgT|1DwtK`_4b-ukUbOT(I{O``K&ny`Hu1`(E+prUo?ESgwIUAR3sVt_27L0se)6 zD9C_62ccu9z#o!l+Au2$;3tyeQ52GNF3x%vfXiqf(ICvOMp-4C?%a}Nx8 z5bzij5)vW-_w{<__Q3zKgkOM1?v^SG2*d?~>1tVp=55Y~bUe0>I^G@5zee3D)1akE z+M};qc|%K2*X_16llKR;_`kcmBdQ}p3~oQ-Zl)KxM&ByU&W@ueC9QbKZA4G?SYO)@ zsu;DEA9iv7_~p~=*5i$5Kh?WCk1je~YPYFU7q*E<>Rz?={ZqV#p6yA|I;c{VASeYI z43cNRJWGgfkDdojfSIATkp5Wb3O*RiWHB}i6&o#gbN~7dDIWiTSsaEweWvJ7IZk)V z+V=$`LxJY2h;HIJUvM$$in-LafqH-)sMkLmqA?Zh6k=SL(a6~uYT`($dO9C0r-u@2 zv8F*Vd!)2eQXF{)hHh|{1lve=Tnz+Evi-XZbXr$@b>I~_Nq!kG1}(UER-VBJyL2WUNRRyCq99py$Y_ind&&xKW<+9L6q+IR zp2Qh6U(l38oW`V|kl(xAXFxlAf*x5kZ(dXT-;o_sm#BrPRBdfClN7FkkB}O!{h}(N zTG67EGy0bjWrZ&NKLEHW1A68H6jd7{eNDTxq=~ zCI|x7n#u@*p_RW;qWEC!e@HaZ7}@{`7tDfO37pDT5t9cpM2~5ig7{!-T>sbIWlE@0 z4}@f*XHj8TjzuKG@R8QdPRdD60(+gmYBQv2!3f<@lePlEBu|k}rU#-5n};};A)}?& zs=nXyS4p``!IbF1?LSJGPnXpN?k#)}azsW8IwC!S7LaXRx3hw(kvP&38Ct+CECi)G zzK=o`gTOjZHnf#nw&_%#szxky%AecQ%uqW*9myT3h&P6r4DA%bsi!jRp{o8u{sX`gl0KI_d}V z_UdWZSd|_|2D%KH|I$l6u5Ah$laB@0k~k-JA7u7+Vii!esT?vHd~!Xnv8=G+=e5#r zzYFU_g7%6g)2go>HLn_<%vC`_-Pi{MRE3f@cuEK3-w!OV1!zBb^o+v;(z%G!j zWN|(8)UpsRXfm_s@>Iq>%1)~dzGKJ-gTB$X$1EH4&~1IS`aT?;?tk(GR{6*hzlqge zAP;aO`wqdG#5nN4ri$33)1%4bI9rmwzllYef!ldtr<`Vxp|9&_Z_k0*?EH~@6`zlI z)r=Z*2+_Z?AGpiGfBi1!g0b&~;v_HbEs#HS|Pd48CMQ60bMLEz=y!ro+4 zY`oxW{8ocuEqcwU@q=ab&wIXn_6TaURb4)66sDlANB90~M#f)kIZV{W$U@pZtzZ!u zM~EKFM%QN_JlW7UwD_D4nz>8F!tdq=@JeyQJd2`G`|+GYN$_V9kb*cqIqXjiD=`u? zoc;SzVqhu$$Pk*%;e>i6$4Icm=VBBNO%lWn))MJ)Yj6B+U9K0R1xPLUc}Ga!8InT_ z1*`uE3jaLBd?%!oQr)v7j=vR=2b+>ge2(cJyI}^Z;S>MZ2=^BkiSq!BRV{-q4ebZb zEM&w<=BOhjTWD`$Wk5N7HW(MC<;71fo^4^+5Lo{;KW2wlIYYT1xi=>m=q2#!rCEJP zVArF`qpM=P?&*Y4YOdFduG(Hc9eH8MGKoY>P|Ywo#pQf42H8&}M7!XDnGzOZX4g7Y z+pfzcFoH}b6A!8`SutOt_!@heu(pO&6ek!IND?fhlq#!8kO-(aXM)s{)KJvueCnXA zn|=%3CcB{AzV*88p)sBhcYJvVxA*TF*Dxq>Vhb}GJ@;EOXu=v(4^}W>F8RIBf$|L} z^{3$=mer_G5N`WF4CH1}2m1DV<4dvAB~&9A8}KE~4CM@XM%zAhH7QNcdWRD4igsZ8FjECjg}MEi6kZq1iJ zR2c5!FyV+t?QR&`zhBx775E6N;qvKLDWSS|?yk#RRICUgu+-@UaV1_j3T9r7R0$DA&3ah2sK*;0&i0IGX34(t@DPV|{#}t2;2XJ|nQG zo27-Vg`!2Hp~O8rZmh6>;&fgfT0qODuk7Hj2rnu6J(RE8oK?`Dx(wS70;T8hNJD;~y=M$2C1tN?|)tSdWq!>!Z{RV%`7t2zy4h_t%n;@p6oU z_4gn`>5fsj-)R+UP}C>-w?9NGTilSbZ6D?(rhEMEAo4{+T{ecx0h)DP<5ouAG)y6V zusp`LXy@!(Zra z=dVgCfK#fcLSg4BBrRD4NhG|LlwgdTIw*SzF|TdKM(n=V)M$-8u;7NtsYC~Qgdb#e zVTcr=l&j~m z=*MrvDeYhpO`T&xwOnektmpiG;jh!lHlt2~xT+IHm^;c@o*0V}ia>Kj?ce*iLdM8G zMb|whN7Q@+z8Z?k7_t0l zkH6GbP)dMx4^3TEE}qA42i5+neaj*HMIQ5_IgjFQLHAmHH`;Jm!~hwen{yMxyqghf zfYgvLN%R^i9lLypc{`ch@Uq8Z;cRX!Zz?3DESUwYu20Nrj<&*=;MaJu^Ng2eKK3@yOX`g|q*uZ*U5{U^Guc_q6qLX+ zS6kl|$s`EIWvFWtvl`*8EQ*n-Z<}-}%^-Q^)8&%A(U?ZTl4iZpz3*B%B8{`cJxEw? zh6+l-;s~OyG+6RWRC7+jxfEABn~qY6pO-M{`wl&@@~jTIe)-qi$R@R>LVmY8azx#f zc=XF$-DYuFVw?@9H9W4u`8|RR{%%Yj3Eas?;S;7+Jp=o0rzcB;?)`@At6t|vkS-l! zu&1`oqB58v)s*oR5zrFR!JK2yI1Guz{4|^4*oR3D$Im&B*wGMIQJopDj(8mBLTFZl z-upOxs-?{ZLbvW&fdJ7ucy(cmQxlI`-_&6_seW`8&7^UK;qcox%j!osx5#=Af104HB`HvN!dr%B({A>=#=@Wfb?(s*{oon$xcS#eyR&ldW+hDT zVvf|C1mG1b(qa|Pbd}6G7qXpgy{#CEm$-&2buSe3btt%ZX|R!~1M}!9=`Oqnys2oqwKxQGjR6I)Ay{dC|%0k8-Dl)BZ&bj|DFSXM+KZLXH9aYzi$v(WvBKJ0qv z7rgiwSz_v3hkRl=Onm;*I_n`gKzV+poNv(em_-Szh&BeI7TO4;d$4;&WbvPb5ZA1v3(9GG1@4qxcL`*~tPw+V~k0G$f5<#F5@A>#aPLaD6uX%}@+> z)XN$nBKIqeO?zJK8GxMPNdp($$9Ws-}O?}Cw;6` zFb#3j0C^dkS-S+!wW_@Y4FCB4iTZ3?54YTRo51xhRP!>>kfH-qsL@5k&}u7-0L{Y9 z9^~z{A#v>DGr7AK)kmaE3>v;;`B{(Gvc|HUVBLwO-4sDf6IcnLGh8SbQX_~UzLTMY z?B6*wu6)F@iFBBdd}`-XVWk4<=0dPl$YlMd>b-%7_G}Y_mOQZXzuNeP|Fyy7E|v7A zEAP*F&!WS3WT(zMF3Ye>S8cerSh_e)+qFIsd=bzn@IiRTVIkzSEr33ygHPW=#Tyct zLf9>*7}(D8jZpmm>Ij%Co-v`lYk=& z=-}0t8)|85Ixqojg+7Qs+YC5iZP;cr<*It&%G2`XnUoaqDl#9npd+`AV1b~)C9GK- zCvxpu$eknDLT1nQSkNDP*Y(6Gv{kZr^d%31t&+Ef;hk6$cuN2gJCr{UQE}f$$Lx92 z&1e!sGgOWt(~mdOmDdX6=HEP9W{NC(i+N-_b+Uz$^w>yKUQt{KS(pJnBkdspn8=)Z z!@j_>$FeuN&5d?7E8u`OPewG_W62wh`KbuGZ%S0ml%ab1K<4oGXwP=s^wn5gZMi-$jf z__dxM&$iH8o!3g1omLO?EDvGmu3s9Udm4p=jSrA?nGp$@GJb==d_%!lo`sMx$JVrl z$yh$?M!C&4`cnk1O#&C5uSGP?5jBo3|KpQ_Ne<(w`-iYAW@|b;&C&P;ffZQ@`Qf;m zjD#t5Jij`xAAP>xWZ>fu-c|9aiBx^&#}_oc+LPkdx)(FhdGELLU83n5QQOqH6LDDU z0LH2IIs!3w^v7{X&5bU2#_-n6b<#GqXHE42X{>tLOyEkzu$05 z!+cVUCG0-^5h)G~qA-KlFm}h#pe6tMzT$gqb!rKA!`ObVdOXC9SZ3k7dPTQi@%^o* z@mJVg{L7^z8ex0G)58@o?N2Tx=?Ggs;ci<74QE>PU(9jOJAQQ|qN$wWQ2#swta9+A zCnY!L&*u||5{lfkpZzIw#ixVUTx+kEaJm8ewQV1UqE+>g6K_Lthw|1v4y1=F(zFP- zQNSwk?s%CIJ!)4`e9(yA6Db+gJiSKlhC;~ifNpeO!)<@T-Nh%}cACmB-shEDk*a=Q6hUh>Azhn)_3 z;(u(gr(u4pJSRv8rK<8Gjjfvfu6HjUExMmT>(=h(+7F!FN-Q4UBWBvw=-Sr2#DHFh@JYNl8*BClt^&=C*+Q!e{JN^C; z*GUC@p(lIcepI#BO6;95!Ce)Y z#)(>;6U})l=It-0&}_of=E9D~mk6PB*0_1C)9=DTvN~v%wa+~%?dN~mlyizdvFj6u zM$BHPy7!>RZcbZn{hf;OTrr4~uZHeau0T!?`0-sUJbduDE$~pXM2T5#qJ$>$2^sJv z$b7E64Lz#4T1H?Be^T_wqW}0zOf98vh6iJ}E>4P^K#q(-Ajds6zXpV_bvns2*9=@`0stp+cv zuBLl;Os^qfJII5pdVcc{Y0ox^b^Wz$hpSQMDrsN8uI#pYCfAnU{%mn-3F4oN599DR z2&6}kafLvZ4VXt_8B(;WrPA-1U1~I6t>Jfp0g>P+M`ylMq6y^A)OZSf?1-LKAgfeL z!)c~fEWIn8D<@tWbM9k{n_8bv^x(xc_A4!34l%g$*X11;Zk}VthkQwy(7r#~9uS8o zs26qKr3?6y&B(Q-zM_TJ#7UHJei?E^Edo&6=Qp7{w$9I@b$a(@U6HQ!uP&36RPWrL zgrkvL4{*~z5JoIJ08G@D8E}hNZTzrnMO!4C_Jt|PD41rq99j8@db0Yp={sY0a4&?D z90=MFQBb&Cv)gX+6B=Iy*9^jWn-RjsSg=9sS@d|1A3@~AR6O6Dd4TJ@kHE7!Qo0fq zrV|DtK;Y3C=wS(wpMdT9tv>=d=xfs_oz7-N{aO|m?uyD_vN> z25Gd(BWJ610ZOaV&Tt)PJtD$hj(h?~c^*y2*j-%ph_Vh2+|+6?Waw=NtNpXTs(lel z4xc~VuFD`qH5jSiHx@h~N9Yq@)}WuQ!(*<=Lqk<|uJ4|C1Tv-j1|C#^tJiaTwhOHT ztkrye{9_`#{eoVyri@W!oEs|{H!qp{`VvCRQdH|%m_^_1V)Na|HQipB8lv(a&;pWKhPnGIwaBX7Onl@p6%4Z;s z837sXHnyFp$j41Mx_6xV%=bo$M@X&iO_3D6Te4VM7tfp+!B}4O>N8uU^1{^i)3h?> zU%n1b6!{w1*ADNq{?X<|qWB_MY>6>b8uvIIJ#Sz6N%bY+yv~T@-}=bL&}+M_%|^cO z%4tNffw)}G(&JA}f4p7Y%fyT1Thj|=*7j#tAhLlDX>*T)BLlg`4^;6FU8h&HWdqBS%F5WQUipje zmIsFqK6EV&pDKDeb-Sg25LLU`%29ZIB_~OE{VA68u%Wm;SO8IK^F8daL5rplfM0gc zoNV`uUDpem5~mj0d$vQYKN_p~e5=YZ29fJCn~&&?kUn`LPu4}FD&3Y!`-_=C3!ul> zX$I*^sNSuhRv_W``bh9Fz+{1Tfnb(bZ?FnNe$kckp~^dQk+GV#jvP*!dU~pq=6xhZ zaqh_uM#}8(gIU2tpaNDW!&r8@NcsuNmFHE$Ue@F2WV<9=+_R#Bi|v<|aq|;*`_9TI zZ9HR*qH9j{PAJci8#`GedMn6CP?r7HbM)7rox-lOuNVYC zXO{Z;j7C#1>seZ?#$rIB$sHO+@=4*g>*JM}4?)UxW`XpU!oRWhxWFC&Y9B>fSr$uM=vc7+-pXe*^4?G07xztWN)U)>9CI0_ktc zUQAm5OH#C3|E`fJ)CL+~x~|%ifj`H78qs?vZwnYtYca)%|1PNDIsm|Hq)2EsX46K# zzSAY?q`*#qNT~n|T3`k&_k(cgm7`v?3Grrh(j}eHV=l7qxEUG{NWb=LF}gcWLc382 zo;b_2&CK#Gtw&?{mzNDRcLGPMH$@+sN;vPg>JM~yJ0lUKNJ#0L% zbU#TjZHk2Uugt*PuFiqr^rw*26h_q1i9hgsvpv95<(LmYWkAqu<||rZ&`7RvF*>Fs z;RlxX%j4(MZ!sT&uLi+SNs315%Y!mxMaRb?Ki!RpWJQmT}2aN5i8 z`p6L$zWe!pAa~!wZ7e^q&?+5aFZ$a)lXr0+9_R;htKA}%=eTrRls%!w)?_^0A#b@! zcj2>#ETJ+F_8mRCPddZlq?_WN`m-L65ikR>M6Eqyrz)i~myZAqCjj?wE?pN9NZ;qK z6$rjLkRaizlcNg$-@-Q&b7t1zN5|UdZ^$~Xj8ZSvhPuXb=GOEb>IADloCh9*m^)sp zWYF16DeITc$uO^FY6LGx{XzqFpqaAM1oQ!SLU`xp>Rf zH;`V=!E|bPi)S+=wX0SI5;3}!wX&gYKeKu^@*;3jA9LX|5~HI9PDBT2#dr~ef8`$~9(1iExt!`Zj{A!*A_2$26=DOLCz&Hr zP5wT7YQo!TxGawCPPuF!Ib0rE7Ca&L`N{8S<2hk&)3tzoOis zeq;!{&*XPfhR(6Q(fJX-6j{EBxzg`WtJ?ZP=f1o8jn2FbJSJ|^5L80)I?H zh*S!&`Z~8l=L3#>6qr22Fui=@(7zMPrA%HJraV; z*xf!EJX6pIsj{}j5H`K=+OFl_6(+`#AzN^4taQqw#D)fVy6sc3k=nTVrkFoBUhZ2B z2jYy;B^ujY!f^2uhQ!n(joNJm(GaM?-H)>ZhYxWT%PVvbeiq8?roDAsV(3;o8x&fd zny>2&BVMYU?jNU*X9O{arJImGGV+i>5{1|(;4+Tno`;jA{_Kg)Xg#5oCX z(|^(nJ$dhvyEuDMwln~^0}!p?EE}U!(W)e8#DRb{9fuCFsVXPrCocX|M=qhuYS6ba zu;g4FMr@Kff1F4oR*!^3U&)+%C=H%k-+#4C-@RG+M?R<3`BFdQxN1iU=sX+zNe^K( zOeY|})Famepj05MnqRW}Sr0~m52L6gC7lfa_Ran3k9?*x3{kqT`n zjtuj~e|$=*gc6VMHuNrGKaJjUt#*||`zcqw#kk07BuW&KGaE3QM>C>4Rc#vj(?(H( z`nJXIIgHzQ9Xf>{L+Yqoq}$>+h%epeFfHn3-e&@nwT5Z5KQ_j!Tr5zMI?s;&HSrzeSW8jWSc%q#$#P(^G}^4qzCUZ zeCA0w1gAXC;Mlh#)^vceeg5Fl}?|ATb6^|1AmgL2q-H^@j2c2*mO0 zN(+z+$2J1XtC~-B9w?bp1vak~09iH&<}^@Jn8^INnd_?b_`Oc?oRdP*9UErLZ6o{m zYRa6Bri-@ZE9)9uSvRY%^5j7vxNoflj=dQ=dOtT(vJLfbM&Na!2jWKzo_XfDKed!b z;lnWk$JhPGWpNexNE!+Kuo+;Zsq{?(%(3|7==5+JJp4+cDq&9nEP;>MVkA)*4l zBFZu!86NNRcht95z5upQDXLIlH%<=~ze)$MF#-E-b|zNo>U}a`-$xqS-};>K<}r3A zNTKp=pz7{)jgfEp8_9^NTP^8?3o95bOEl*pGtO>t#mD1x_yr^Dv(4UD7Xd`S%DaVR zTR?W8sQ^5{5l8%RX~O-K5ReKGBHn{fcTxI6FVx#84t?Zy=4j3b>*EUZv)5WPKia(V zLet}Dq(`Rfj1Y7i_69Pi4Jz=mmX$86?>x9B+o0UEAzeK`9ZNNj_Z|@!DdMn%Z@SFz z@Znqj1E=zD!-JJ}r$2Z~@QAlD`jGik?w-HPF!1{A0LoSrtDDXO1nUaK+&t|{_p7|o{c4_JN8nh4dP$>^o zt$Rhdo$y9AyEdmn58egK}lFatzgTkp!E*Ke`K&tnc zdk0q`Z_(`z_5;;;VaYeOw@B6Xm8nX@2YEU<DNE@ZXmh60D}2S=XNX6w^6lOx zJrMi>f!1Uy5OpzKdAJTBfE={Lym&Y!;f9G&9i!zp0kK4gdF_$70*>HjMuUuJ02NGO zu)+z*M13oRh2R4|8ze>FYd`0~!Y?}i@d~ao95#vw|G-WQl>kCFJ9cWvn@<0H6k45> zGE$aL)Mx&vZlVsYdvzf8jM5zvVZdDMEzujJm+Sa)y55At>^3>lP@zD64^KB(0Md*x z%YWa?2n2#QNuAGZGxOP;P#>o>t^&L$YOmA(cS)9>p7W#7jNw^AUqs0MR1#gpZBuZ#!ihp*w^l7+RXAI;FVu=#2cT5UFGq(T(@?sHJ zQ@6t;XUy%?@7h(xS1Ry3U6KP>9O}Dl6vSV}9|?>(IJXgyoAQV*1n#-Oi9Avk$Vf^s z(nroXy11{Nrz9X;R_hJ^&AheQTRYwU#pMi%U_Y+TzWbY&xWh}Y5~^$?N=B4dDG=qS zu0X7Et59@$%*Dd_)I`Or)aJ(uMC1vPq=*D*L|kW!81!^q2hvBfY%)hfBEtP=VSk~{dh(Hduu^>;{ z{-MMMWgM1Ctlr41+}9jcH63Ux1`;r8g7sAb_TXx(RRTOVkg1IqChQ_m0wyBa;VO^=yU0ABBK>nSUK1wEje!i`sUUP%~+6K76oyZ1iP^ zks=gC>S%1&{}r1K*naaT$PdzW8^|%ESVP(24)10!C?D zn6^dUq6#FoZP!9?(;z0=K8kG8uk#3i%Ce1#!ucX6!D>F{IzC|HbHaIzXe$S2^JdOA zefPZechjp}nu($NUc{gUt_&2^>xhdM4xjgmsA1i1Y;$wNWwUf{ALcytwU_+M0LnPD zA5Mz?T$=F1d{%2}$(i=ua7r{UtkWih&BRXQ-K*QsJkZ#(5|C@JFoAnPmn=?(RQcp= zdUlfJmK^^lz2D(mFx#|(Eiy*~2`jXa7s2Um+(kX`u7Pw+K!Kr~&L+P0MeQe9yw-w( zZ>H*70Er^u#h$@JDh#$`%iM2-a;h)SKaODR=Ct_^y%Ff%nXL`$k8-~0q^mcaETQ?} zC#XDCg0e-|C+HY7%wOHoBGAJ0MM9BocQOj(343q z9P5Trcq`1w&Zup~Z%LEgn)O@-z>P8;?aWvkn&NYx8v&@-K7e}RHnqcZXFV5=N8|Fy zH>CZU$Mth9=SWWeD6IYiqfy7Rz8ZD2XS>~cFB4eSVYv7%Ylo+4C4jZn=fhHHCAbim z)j8AAmb)z7l@V&YXMW=yl~)j6a7hPi`A}^c$q4^N4^t!O)CEmbGveVvirN7 z#khI;)9*y(9B$YPr8L5BOCg)3fsEDQ-JDM4sh4}{|24F}0vbHGib<(lG7)qjOzs-s zO>{X6^zHjK(QRvneUV993Wf^em0$pT)SooF=)6L!xrccFL*9S>pgM#r!~cF<$uIG= zNw@J%$JB(e{jEOhCTmH5J<~J$(ioH)T!KuHD52v8#7+sR7|?dffZLDO7J}I#n>KNv zu277$35d+lV*p><53rqQ-=nX{2uHIk>OkG<%6`SB{D_%N!HKaE>c6|R3n8h9y@<%h zN{sL1|6w7BVKnFF0@LPO@UoOEYQ-}rg;8VfD7_48b2Znw%&v-2w@Q{IC3?&qz~d(Kvn7$?jA-*K!mlcXZg9icL=UOvsT^`|S zQz0=Y&>Nn~Q1~gYj|KmJh3B+^{tBBjw4j-z8@OUH3I*96tWcZLH~%ALuDPaMkup*d zV9|I^moxTsr>Zikc`iins)EbaE#LS%4-n1lDJp*q7eey1_7eZ2i$qVpZpdLD0Ib#n zpG7l3ffa4C9Xjm73QtC_W1rKlCp?nFb2njq+j2cASuu~4>fE;83jNHAoBu%ey%Q4w z;j5@A{j z%VHcSC;6aleTZJt2Tu4)=FW5)^ov0JmK99zjZV0)dUzxoUnya|`|#oof!*gvB+u0? zAPcz>j%>0nLvuIfhTLE>oh$i1sfm!-CErB8!{7dW8ue`qlQJQ0V|QJ65V5SX&7fS5 ztpq;n52SS{!mj_1&}C%oKq)Z@{1fEyU&F{t<4RGK+7;;;Ot!1yoZO<<%Z%X~8!Y z8DK~b>D8MmZLkFi9-8KWL0kp#v#$fWNI_ z)1|nmEJFSrTlIW^X>Qx(jOUyP`1>!wgHvr#0jkpFfy{|Vo5`4(@SD4fb~W#LI&YYU zXACoR$1L^T!B>P$^H#VO+*SE${N|hB6{-G_W(TyZ0cEZ6O4Sma?*93cM}r=-=j6E(65PY103pG8)N=y6f`<*X_P;szl^1#|XHK5{ zq^)3-=bqJN?%ZD*Fv4f&nV`p53?QO$oaF)TU@1Yjlre6DbcB+@c8}8j^*ZE#nujh- z)}n5TS?e?JA%TjBEsg zwVc7|LI#bCv9!y9NkK}K6Y4B}zIFXu8K#_ruW_Dj>d|c0JpFg8n)HvZab@cjz*`#?V;i@UmBC1L7HK{(5)) zL}o&?39x}%F#dmpDtxqD)s@VhqZ66^DDjU#2aNcWZMjQ1tO$M;(7hbuqk6UpK}(*I z$u=UhHdyPP6v3b+NucA}h%6wv3b{~z29;8<{me%*PTR0OzO=q%;Tm4^5@y|SGJ=ht zfo!BzJwU&Fr~ok1DV#y>^zhqm!UOR=Ncy!QZP)I*=V+_ibNKSq#kxBJ7!^aH-gSA| zwlC%H2pm;@k%9U7kCTIShF@Ey zpIzC$tlP^3G*fHM(s2IH70JQ6kJO=2uZLhceE<~RgWQhI^x6sf+qk1&b27)xfxX2#7woopIiLYTCZ^cSeILT6#9boj08x(HGg-4&d-e<6 zhM$MjFr=^>9=5oL!ZBjjQ5ri>ai0vH7HEYN#ZM#DHaY}!wlV&kJQw}48}A{e{NiZL z?_bXwOm&82%=%%B;Q3~CM;X-dkEAbAKL^PHWt-{(Xl)AC{94j`0ASumK6}&$-4ps;rH6@<-S`xV78)l z*}}kmhl49EUh0-Zn!Se?&ctTK<>R|Z_DN5?gs0DFw4@Z#CrqF#R;_t(b*K`t-e2n< zY2LpHYlBIg_A_OYVvC_6_fior3Sl!%s#n-_fZ}T7^vL}1yBX3A=_ib33~VG~{1?W| z0415^nM(`E6;)mkd`@f|k$u6_)CAejt&yW+VqjXfhBOOUfnHm?@@YTwn`zi^ZdF?; zpj2x9eM`M+wssk}>2mI6Xja9h;mTRY)0ukE0*F|plmQ_kYh9MpN2SBLO&7W5-@MPQ zz0E=jui2}VVvWf+%B&h}M>a>1jA5X{8!O|2@$=5>OKy#bg`;e`8Drwnt0B)wWE=_B z!O$gcMe;FYfI7T7yn0?j_F35K-UkyPg!-|(&$nprh#^S2TYf|brsJ8Q zK@IsigmolO`8c7~xRD>OH&8RbT^~HBW^PdSTKNq@(jz^9b0;`*F7jV73$v2u{+9t7 z5({@8u=q0o`ibc>*a?=)By(zu_ez%fx)5qWjT?I%%Dp@Nuu`?V$;>60H`?WTw|LHL zGuj`P6^#0*VTQ}cJ`cNSRo`CzWZI@)X#Qk%;_Xw|b#er`$Rzmu=*de?(1)qr245r- zBz+j`(FYXP%z_YpG2SR0pj&G&|7`t2(8b|$Y3NRtNCe%k%iLRc(pBc&nXYz+*Ga;Q zPe<;#mga>2keyEV-Eq*5oX^d%Jlm?_yI|v_&l6*0jAlRc@#j7g?3(Cw?*{tam`2lj zJ2?+URK6#>FzcpyeGu1d{ zvFyEbazEKI`X(lVYFzFCbSWkOR9r0nEFzn-)rqt9lL(`9)?oVNrYPzm31%v1{U4W3_51 z7y8S;lG1LNe&~MmZEIG}9!gEbD9X*S{G_w{ZTC!^fqt#a5zDh-< z1pem1jALP?ir=>W-unE&5QE{WkFLE^lov?jA^1j2y!EIe_}0Kr?7E=`A`%n4lcy7| zl!rx5e^&GA}L%%cSo*Uv{} zbJrrtIn$~|=~8VFnzk;)w#y`+j{p6^l^$kS=d(iXSsllf8UOSy9m z&w!T+?e(3NCo7QQFq|;1Pif3tFT7W>aO-{NA^IKE~ARHMApF519 zY^$0YPqFiylj0wR)cPJ{VlJIVUc5sLeQbYBw!@7RdE(imhZXmFEQ(JmzInLCh@HiZ z-xU1FdVzn^Hk?-A51;;H$%-c_%5^3qLRSx}Fuo4D;a!(AfcC-B6x8OW0JwPrU1lEP zffPdpD#Zun{vW-p&y6-;+wAIk`;9`?bE{v%1cMOmX~f!rcq{BolZ{2-PTu+WJrw8;59 zIZ)n}Z1sh>vpPD*?S^9hfyj~z=|qWkD4FKDVrrl~J{~CNSzyK%LTrJ$nvVog({lpl z>gIDl$QDhF-dZI&DkPO@Zx3JF*FjQ*s1~46_DH@wjBe3@eU>sho$I7%i92x0z(}R= zRiP%4m~39R=6Uj^Mt-^0S|tEvN{>` z7*Bf|S+jKyUBg#dVbSRNXtgv9Q?5Y{MIwX74htioj9qW(X~HD`ian_6jYqB8+oaPH zQw-3`-WoWX7$9EHPO6aUhxI?%j^SHHdeh}m0|<$x&)p$alE`a*Tj>DUeXWC@K(h;N zmX7(ugv+`0ES2xjo5dgE&%X<8m9`F$^Jl0*a>JkzWUFs0ZeEqCy_Xb4)q-PrVST=$ zxW7OM&bgChr_*M2$bnN6Lu0rVQ4IxHh6OsvTL(K+W!@laspovcJxrK-h=$+=)^#eM z)v)`hSx+QS#-1gAX|bWA>Pam$t<4qJ3ctA%XUnb4XpYvP%|h)rT99~1|Nng zzc%JX2UJ-HrcxXP;U}7$gqm6fpYLkH)PPKw`;+rrUpcr&*PyhqNg0aQe5v+XiP58gtu#{ydu%r4#?_;0~sXv74a9bMt07Lod z0k-SI-@vc=ssTmac@3bkwI8oA5TJ&1?+ukI%_L{&Lg*2Xlc{--8#W1ZtnNtVXrP>r zGcS!xkzJOL?$I>F1%_OVV-Je2PjW9f-q1##{(Fx@98~&+HFw;2B4>wDlje+CPV3-X9qw{@8n?*=O2X( zFE5}P`OZEA%Ifv7F0)j;8BiV!vv3$B^X0vmxM;JZ7^vKKGe%=>v21fjJDdAe4qfx~Nv+t+JWRd8x6SE+(KQDy&VYt!`iu+%7 z=q^N~H8}n&sAlySTcwHotW1z3D^STW696U&0{Jr@auShLs$qtOFf`wxD;54OE+Hm> zlhBKjK@G+p!TDeo{N!oQY#Hl#B)31_Tq7DNIiA+S+F^C9G*&bWm2-CvifzT=Li#{) zu~mB#urd9gQgV|2O37hf*N99l$p=RBoM6$H&J~#Algppyx3Ob!Jg Date: Wed, 5 Oct 2022 13:50:27 +0200 Subject: [PATCH 1463/2550] OP-4181 - changed legacy way to update database Operations used instead. --- openpype/client/operations.py | 43 +++++++ .../plugins/publish/integrate_hero_version.py | 115 +++++++++--------- 2 files changed, 98 insertions(+), 60 deletions(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 48e8645726..1f2727599c 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -23,6 +23,7 @@ CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" CURRENT_VERSION_SCHEMA = "openpype:version-3.0" +CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0" CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0" @@ -162,6 +163,34 @@ def new_version_doc(version, subset_id, data=None, entity_id=None): } +def new_hero_version_doc(version_id, parent_id, data=None, entity_id=None): + """Create skeleton data of hero version document. + + Args: + version_id (ObjectId): Is considered as unique identifier of version + under subset. + parent_id (Union[str, ObjectId]): Id of parent subset. + data (Dict[str, Any]): Version document data. + entity_id (Union[str, ObjectId]): Predefined id of document. New id is + created if not passed. + + Returns: + Dict[str, Any]: Skeleton of version document. + """ + + if data is None: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "schema": CURRENT_HERO_VERSION_SCHEMA, + "type": "hero_version", + "version_id": version_id, + "parent": parent_id, + "data": data + } + + def new_representation_doc( name, version_id, context, data=None, entity_id=None ): @@ -293,6 +322,20 @@ def prepare_version_update_data(old_doc, new_doc, replace=True): return _prepare_update_data(old_doc, new_doc, replace) +def prepare_hero_version_update_data(old_doc, new_doc, replace=True): + """Compare two hero version documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + def prepare_representation_update_data(old_doc, new_doc, replace=True): """Compare two representation documents and prepare update data. diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index c0760a5471..26327ccc97 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -4,8 +4,6 @@ import clique import errno import shutil -from bson.objectid import ObjectId -from pymongo import InsertOne, ReplaceOne import pyblish.api from openpype.client import ( @@ -14,10 +12,16 @@ from openpype.client import ( get_archived_representations, get_representations, ) +from openpype.client.operations import ( + OperationsSession, + _create_or_convert_to_mongo_id, + new_hero_version_doc, + prepare_hero_version_update_data, + prepare_representation_update_data, +) from openpype.lib import create_hard_link from openpype.pipeline import ( - schema, - legacy_io, + schema ) from openpype.pipeline.publish import get_publish_template_name @@ -187,35 +191,29 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): repre["name"].lower(): repre for repre in old_repres } - if old_version: - new_version_id = old_version["_id"] - else: - new_version_id = ObjectId() + op_session = OperationsSession() - new_hero_version = { - "_id": new_version_id, - "version_id": src_version_entity["_id"], - "parent": src_version_entity["parent"], - "type": "hero_version", - "schema": "openpype:hero_version-1.0" - } - schema.validate(new_hero_version) - - # Don't make changes in database until everything is O.K. - bulk_writes = [] + new_hero_version = new_hero_version_doc( + src_version_entity["_id"], + src_version_entity["parent"] + ) if old_version: self.log.debug("Replacing old hero version.") - bulk_writes.append( - ReplaceOne( - {"_id": new_hero_version["_id"]}, - new_hero_version - ) + new_hero_version["_id"] = old_version["_id"] + update_data = prepare_hero_version_update_data( + old_version, new_hero_version + ) + op_session.update_entity( + project_name, + new_hero_version["type"], + old_version["_id"], + update_data ) else: self.log.debug("Creating first hero version.") - bulk_writes.append( - InsertOne(new_hero_version) + op_session.create_entity( + project_name, new_hero_version["type"], new_hero_version ) # Separate old representations into `to replace` and `to delete` @@ -235,7 +233,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repres = list(get_archived_representations( project_name, # Check what is type of archived representation - version_ids=[new_version_id] + version_ids=[new_hero_version["_id"]] )) archived_repres_by_name = {} for repre in archived_repres: @@ -382,12 +380,15 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # Replace current representation if repre_name_low in old_repres_to_replace: old_repre = old_repres_to_replace.pop(repre_name_low) + repre["_id"] = old_repre["_id"] - bulk_writes.append( - ReplaceOne( - {"_id": old_repre["_id"]}, - repre - ) + update_data = prepare_representation_update_data( + old_repre, repre) + op_session.update_entity( + project_name, + "representation", + old_repre["_id"], + update_data ) # Unarchive representation @@ -395,21 +396,21 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repre = archived_repres_by_name.pop( repre_name_low ) - old_id = archived_repre["old_id"] - repre["_id"] = old_id - bulk_writes.append( - ReplaceOne( - {"old_id": old_id}, - repre - ) + repre["_id"] = archived_repre["old_id"] + update_data = prepare_representation_update_data( + archived_repre, repre) + op_session.update_entity( + project_name, + "representation", + archived_repre["_id"], + update_data ) # Create representation else: - repre["_id"] = ObjectId() - bulk_writes.append( - InsertOne(repre) - ) + repre["_id"] = _create_or_convert_to_mongo_id(None) + op_session.create_entity(project_name, "representation", + repre) self.path_checks = [] @@ -430,28 +431,22 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repre = archived_repres_by_name.pop( repre_name_low ) - repre["old_id"] = repre["_id"] - repre["_id"] = archived_repre["_id"] - repre["type"] = archived_repre["type"] - bulk_writes.append( - ReplaceOne( - {"_id": archived_repre["_id"]}, - repre - ) - ) + changes["old_id"] = repre["_id"] + changes["_id"] = archived_repre["_id"] + changes["type"] = archived_repre["type"] + op_session.update_entity(project_name, + archived_repre["type"], + archived_repre["_id"], + changes) else: repre["old_id"] = repre["_id"] - repre["_id"] = ObjectId() + repre["_id"] = _create_or_convert_to_mongo_id(None) repre["type"] = "archived_representation" - bulk_writes.append( - InsertOne(repre) - ) + op_session.create_entity(project_name, "representation", + repre) - if bulk_writes: - legacy_io.database[project_name].bulk_write( - bulk_writes - ) + op_session.commit() # Remove backuped previous hero if ( From 5c401fb17ff893420137bfbd872997fea954d857 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 5 Oct 2022 14:13:52 +0200 Subject: [PATCH 1464/2550] OP-4181 - Hound --- openpype/plugins/publish/integrate_hero_version.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 26327ccc97..adc629352e 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -432,9 +432,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): repre_name_low ) - changes["old_id"] = repre["_id"] - changes["_id"] = archived_repre["_id"] - changes["type"] = archived_repre["type"] + changes = {"old_id": repre["_id"], + "_id": archived_repre["_id"], + "type": archived_repre["type"]} op_session.update_entity(project_name, archived_repre["type"], archived_repre["_id"], From 7d2a6bfab7b37320331c787c883bd43f780ca7db Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 5 Oct 2022 14:56:39 +0200 Subject: [PATCH 1465/2550] Fix frame number recognition - Previously 1005 would fail due to a "5" being present. This would only be noticable if the start frame used for detection included a digit that was not 0, 1, 2, 3 or 4. --- openpype/hosts/maya/plugins/load/load_yeti_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py index 8435ba2493..8d15ed23c4 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py @@ -250,7 +250,7 @@ class YetiCacheLoader(load.LoaderPlugin): """ name = node_name.replace(":", "_") - pattern = r"^({name})(\.[0-4]+)?(\.fur)$".format(name=re.escape(name)) + pattern = r"^({name})(\.[0-9]+)?(\.fur)$".format(name=re.escape(name)) files = [fname for fname in os.listdir(root) if re.match(pattern, fname)] From 614069542b203e9120f012cb90b6be6fc52bf9d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 5 Oct 2022 16:26:08 +0200 Subject: [PATCH 1466/2550] :bug: fix regression of renderman deadline hack --- .../plugins/publish/submit_maya_deadline.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 44f2b5b2b4..4d6068f3c0 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -475,6 +475,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): layer_metadata = render_products.layer_data layer_prefix = layer_metadata.filePrefix + plugin_info = copy.deepcopy(self.plugin_info) + plugin_info.update({ + # Output directory and filename + "OutputFilePath": data["dirname"].replace("\\", "/"), + "OutputFilePrefix": layer_prefix, + }) + # This hack is here because of how Deadline handles Renderman version. # it considers everything with `renderman` set as version older than # Renderman 22, and so if we are using renderman > 21 we need to set @@ -491,12 +498,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if int(rman_version.split(".")[0]) > 22: renderer = "renderman22" - plugin_info = copy.deepcopy(self.plugin_info) - plugin_info.update({ - # Output directory and filename - "OutputFilePath": data["dirname"].replace("\\", "/"), - "OutputFilePrefix": layer_prefix, - }) + plugin_info["Renderer"] = renderer return job_info, plugin_info From 870e3394514e562740148b246812841262a13656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 5 Oct 2022 16:37:16 +0200 Subject: [PATCH 1467/2550] :bug: set default value for render setup option RenderSetupIncludeLights must be either set to 1 or 0 or not set at all --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 44f2b5b2b4..5021c0796b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -190,7 +190,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], Renderer=instance.data["renderer"], - RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights"), # noqa + # Set it to default Maya behaviour if it cannot be determined + # from instance (but it should be, by the Collector). + RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights", 1), # noqa ProjectPath=context.data["workspaceDir"], UsingRenderLayers=True, ) From f42fc0a7df1522c347a29dc2df04a304bc98c992 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Oct 2022 16:43:25 +0200 Subject: [PATCH 1468/2550] removed unused imports --- openpype/tools/publisher/control.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index cd326412a5..621a9855a2 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1,16 +1,9 @@ import os import copy -import inspect import logging import traceback import collections -import weakref -try: - from weakref import WeakMethod -except Exception: - from openpype.lib.python_2_comp import WeakMethod - import pyblish.api from openpype.client import get_assets From 7737003fbc17428f90941944b2ca332cebd1bb29 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Oct 2022 16:49:50 +0200 Subject: [PATCH 1469/2550] removed unused attribute 'dbcon' --- openpype/tools/publisher/control.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 621a9855a2..481fb5981b 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -436,11 +436,6 @@ class PublisherController: return self.host.get_current_context()["task_name"] - @property - def dbcon(self): - """Pointer to AvalonMongoDB in creator context.""" - return self.create_context.dbcon - @property def instances(self): """Current instances in create context.""" From 8dc8b7386ca6ee0c34d03e053905da942ad1f7e0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Oct 2022 17:10:37 +0200 Subject: [PATCH 1470/2550] don't bother with AvalonMongoDB in traypublisher --- openpype/tools/traypublisher/window.py | 20 ++++++-------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 6fcee79775..b1ff3c7383 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -10,23 +10,19 @@ import platform from Qt import QtWidgets, QtCore import qtawesome +import appdirs -from openpype.pipeline import ( - install_host, - AvalonMongoDB, -) +from openpype.lib import JSONSettingRegistry +from openpype.pipeline import install_host from openpype.hosts.traypublisher.api import TrayPublisherHost -from openpype.tools.publisher import PublisherWindow +from openpype.tools.publisher.window import PublisherWindow +from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.constants import PROJECT_NAME_ROLE from openpype.tools.utils.models import ( ProjectModel, ProjectSortFilterProxy ) -from openpype.tools.utils import PlaceholderLineEdit -import appdirs -from openpype.lib import JSONSettingRegistry - class TrayPublisherRegistry(JSONSettingRegistry): """Class handling OpenPype general settings registry. @@ -57,14 +53,10 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): content_widget = QtWidgets.QWidget(middle_frame) - # Create db connection for projects model - dbcon = AvalonMongoDB() - dbcon.install() - header_label = QtWidgets.QLabel("Choose project", content_widget) header_label.setObjectName("ChooseProjectLabel") # Create project models and view - projects_model = ProjectModel(dbcon) + projects_model = ProjectModel() projects_proxy = ProjectSortFilterProxy() projects_proxy.setSourceModel(projects_model) projects_proxy.setFilterKeyColumn(0) From ff2453c70d04bf38762736d9f7168d159e91379c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Oct 2022 17:22:04 +0200 Subject: [PATCH 1471/2550] PublisherController can be imported without import of Qt --- openpype/tools/publisher/__init__.py | 7 -- openpype/tools/publisher/control.py | 75 +++------------------- openpype/tools/publisher/control_qt.py | 88 ++++++++++++++++++++++++++ openpype/tools/publisher/window.py | 7 +- 4 files changed, 102 insertions(+), 75 deletions(-) create mode 100644 openpype/tools/publisher/control_qt.py diff --git a/openpype/tools/publisher/__init__.py b/openpype/tools/publisher/__init__.py index a7b597eece..e69de29bb2 100644 --- a/openpype/tools/publisher/__init__.py +++ b/openpype/tools/publisher/__init__.py @@ -1,7 +0,0 @@ -from .app import show -from .window import PublisherWindow - -__all__ = ( - "show", - "PublisherWindow" -) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 481fb5981b..af0556afc5 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -14,14 +14,13 @@ from openpype.pipeline import ( ) from openpype.pipeline.create import CreateContext -from Qt import QtCore - # Define constant for plugin orders offset PLUGIN_ORDER_OFFSET = 0.5 class MainThreadItem: """Callback with args and kwargs.""" + def __init__(self, callback, *args, **kwargs): self.callback = callback self.args = args @@ -31,64 +30,9 @@ class MainThreadItem: self.callback(*self.args, **self.kwargs) -class MainThreadProcess(QtCore.QObject): - """Qt based main thread process executor. - - Has timer which controls each 50ms if there is new item to process. - - This approach gives ability to update UI meanwhile plugin is in progress. - """ - - count_timeout = 2 - - def __init__(self): - super(MainThreadProcess, self).__init__() - self._items_to_process = collections.deque() - - timer = QtCore.QTimer() - timer.setInterval(0) - - timer.timeout.connect(self._execute) - - self._timer = timer - self._switch_counter = self.count_timeout - - def process(self, func, *args, **kwargs): - item = MainThreadItem(func, *args, **kwargs) - self.add_item(item) - - def add_item(self, item): - self._items_to_process.append(item) - - def _execute(self): - if not self._items_to_process: - return - - if self._switch_counter > 0: - self._switch_counter -= 1 - return - - self._switch_counter = self.count_timeout - - item = self._items_to_process.popleft() - item.process() - - def start(self): - if not self._timer.isActive(): - self._timer.start() - - def stop(self): - if self._timer.isActive(): - self._timer.stop() - - def clear(self): - if self._timer.isActive(): - self._timer.stop() - self._items_to_process = collections.deque() - - class AssetDocsCache: """Cache asset documents for creation part.""" + projection = { "_id": True, "name": True, @@ -133,6 +77,7 @@ class PublishReport: Report keeps current state of publishing and currently processed plugin. """ + def __init__(self, controller): self.controller = controller self._publish_discover_result = None @@ -341,7 +286,7 @@ class PublishReport: return output -class PublisherController: +class PublisherController(object): """Middleware between UI, CreateContext and publish Context. Handle both creation and publishing parts. @@ -394,8 +339,6 @@ class PublisherController: pyblish.api.ValidatorOrder + PLUGIN_ORDER_OFFSET ) - # Qt based main thread processor - self._main_thread_processor = MainThreadProcess() # Plugin iterator self._main_thread_iter = None @@ -744,7 +687,7 @@ class PublisherController: self._publish_up_validation = False self._publish_finished = False self._publish_comment_is_set = False - self._main_thread_processor.clear() + self._main_thread_iter = self._publish_iterator() self._publish_context = pyblish.api.Context() # Make sure "comment" is set on publish context @@ -792,13 +735,12 @@ class PublisherController: self._publish_is_running = True self._emit_event("publish.process.started") - self._main_thread_processor.start() + self._publish_next_process() def _stop_publish(self): """Stop or pause publishing.""" self._publish_is_running = False - self._main_thread_processor.stop() self._emit_event("publish.process.stopped") @@ -837,7 +779,10 @@ class PublisherController: else: item = next(self._main_thread_iter) - self._main_thread_processor.add_item(item) + self._process_main_thread_item(item) + + def _process_main_thread_item(self, item): + item() def _publish_iterator(self): """Main logic center of publishing. diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py new file mode 100644 index 0000000000..add7c4c7e4 --- /dev/null +++ b/openpype/tools/publisher/control_qt.py @@ -0,0 +1,88 @@ +import collections + +from Qt import QtCore + +from .control import MainThreadItem, PublisherController + + +class MainThreadProcess(QtCore.QObject): + """Qt based main thread process executor. + + Has timer which controls each 50ms if there is new item to process. + + This approach gives ability to update UI meanwhile plugin is in progress. + """ + + count_timeout = 2 + + def __init__(self): + super(MainThreadProcess, self).__init__() + self._items_to_process = collections.deque() + + timer = QtCore.QTimer() + timer.setInterval(0) + + timer.timeout.connect(self._execute) + + self._timer = timer + self._switch_counter = self.count_timeout + + def process(self, func, *args, **kwargs): + item = MainThreadItem(func, *args, **kwargs) + self.add_item(item) + + def add_item(self, item): + self._items_to_process.append(item) + + def _execute(self): + if not self._items_to_process: + return + + if self._switch_counter > 0: + self._switch_counter -= 1 + return + + self._switch_counter = self.count_timeout + + item = self._items_to_process.popleft() + item.process() + + def start(self): + if not self._timer.isActive(): + self._timer.start() + + def stop(self): + if self._timer.isActive(): + self._timer.stop() + + def clear(self): + if self._timer.isActive(): + self._timer.stop() + self._items_to_process = collections.deque() + + +class QtPublisherController(PublisherController): + def __init__(self, *args, **kwargs): + self._main_thread_processor = MainThreadProcess() + + super(QtPublisherController, self).__init__(*args, **kwargs) + + self._event_system.add_callback( + "publish.process.started", self._qt_on_publish_start + ) + self._event_system.add_callback( + "publish.process.stopped", self._qt_on_publish_stop + ) + + def _reset_publish(self): + super(QtPublisherController, self)._reset_publish() + self._main_thread_processor.clear() + + def _process_main_thread_item(self, item): + self._main_thread_processor.add_item(item) + + def _qt_on_publish_start(self): + self._main_thread_processor.start() + + def _qt_on_publish_stop(self): + self._main_thread_processor.stop() diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index aa5f08eed4..699cf6f1f9 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -11,7 +11,7 @@ from openpype.tools.utils import ( ) from .publish_report_viewer import PublishReportViewerWidget -from .control import PublisherController +from .control_qt import QtPublisherController from .widgets import ( OverviewWidget, ValidationsWidget, @@ -34,7 +34,7 @@ class PublisherWindow(QtWidgets.QDialog): default_width = 1300 default_height = 800 - def __init__(self, parent=None, reset_on_show=None): + def __init__(self, parent=None, controller=None, reset_on_show=None): super(PublisherWindow, self).__init__(parent) self.setWindowTitle("OpenPype publisher") @@ -59,7 +59,8 @@ class PublisherWindow(QtWidgets.QDialog): | on_top_flag ) - controller = PublisherController() + if controller is None: + controller = QtPublisherController() help_dialog = HelpDialog(controller, self) From a00dafb4b6cfa9fa5c1035ac320fbb4c429a45e3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 5 Oct 2022 17:30:47 +0200 Subject: [PATCH 1472/2550] change few attributes to private --- openpype/tools/publisher/control.py | 63 ++++++++++++++++------------- 1 file changed, 35 insertions(+), 28 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index af0556afc5..9abcc620a8 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -296,16 +296,17 @@ class PublisherController(object): headless (bool): Headless publishing. ATM not implemented or used. """ + _log = None + def __init__(self, dbcon=None, headless=False): - self.log = logging.getLogger("PublisherController") - self.host = registered_host() - self.headless = headless + self._host = registered_host() + self._headless = headless # Inner event system of controller self._event_system = EventSystem() - self.create_context = CreateContext( - self.host, dbcon, headless=headless, reset=False + self._create_context = CreateContext( + self._host, dbcon, headless=headless, reset=False ) # pyblish.api.Context @@ -349,6 +350,12 @@ class PublisherController(object): # Cacher of avalon documents self._asset_docs_cache = AssetDocsCache(self) + @property + def log(self): + if self._log is None: + self._log = logging.getLogger("PublisherController") + return self._log + @property def project_name(self): """Current project context defined by host. @@ -357,7 +364,7 @@ class PublisherController(object): str: Project name. """ - return self.host.get_current_context()["project_name"] + return self._host.get_current_context()["project_name"] @property def current_asset_name(self): @@ -367,7 +374,7 @@ class PublisherController(object): Union[str, None]: Asset name or None if asset is not set. """ - return self.host.get_current_context()["asset_name"] + return self._host.get_current_context()["asset_name"] @property def current_task_name(self): @@ -377,37 +384,37 @@ class PublisherController(object): Union[str, None]: Task name or None if task is not set. """ - return self.host.get_current_context()["task_name"] + return self._host.get_current_context()["task_name"] @property def instances(self): """Current instances in create context.""" - return self.create_context.instances + return self._create_context.instances @property def creators(self): """All creators loaded in create context.""" - return self.create_context.creators + return self._create_context.creators @property def manual_creators(self): """Creators that can be shown in create dialog.""" - return self.create_context.manual_creators + return self._create_context.manual_creators @property def host_is_valid(self): """Host is valid for creation.""" - return self.create_context.host_is_valid + return self._create_context.host_is_valid @property def publish_plugins(self): """Publish plugins.""" - return self.create_context.publish_plugins + return self._create_context.publish_plugins @property def plugins_with_defs(self): """Publish plugins with possible attribute definitions.""" - return self.create_context.plugins_with_defs + return self._create_context.plugins_with_defs @property def event_system(self): @@ -445,8 +452,8 @@ class PublisherController(object): def get_context_title(self): """Get context title for artist shown at the top of main window.""" context_title = None - if hasattr(self.host, "get_context_title"): - context_title = self.host.get_context_title() + if hasattr(self._host, "get_context_title"): + context_title = self._host.get_context_title() if context_title is None: context_title = os.environ.get("AVALON_APP_NAME") @@ -486,7 +493,7 @@ class PublisherController(object): self.save_changes() # Reset avalon context - self.create_context.reset_avalon_context() + self._create_context.reset_avalon_context() self._reset_plugins() # Publish part must be reset after plugins @@ -502,7 +509,7 @@ class PublisherController(object): self._resetting_plugins = True - self.create_context.reset_plugins() + self._create_context.reset_plugins() self._resetting_plugins = False @@ -515,10 +522,10 @@ class PublisherController(object): self._resetting_instances = True - self.create_context.reset_context_data() - with self.create_context.bulk_instances_collection(): - self.create_context.reset_instances() - self.create_context.execute_autocreators() + self._create_context.reset_context_data() + with self._create_context.bulk_instances_collection(): + self._create_context.reset_instances() + self._create_context.execute_autocreators() self._resetting_instances = False @@ -567,7 +574,7 @@ class PublisherController(object): """ _tmp_items = [] if include_context: - _tmp_items.append(self.create_context) + _tmp_items.append(self._create_context) for instance in instances: _tmp_items.append(instance) @@ -626,8 +633,8 @@ class PublisherController(object): def save_changes(self): """Save changes happened during creation.""" - if self.create_context.host_is_valid: - self.create_context.save_changes() + if self._create_context.host_is_valid: + self._create_context.save_changes() def remove_instances(self, instances): """""" @@ -635,7 +642,7 @@ class PublisherController(object): # reset is not required and save changes too. self.save_changes() - self.create_context.remove_instances(instances) + self._create_context.remove_instances(instances) self._emit_event("instances.refresh.finished") @@ -696,9 +703,9 @@ class PublisherController(object): # - must not be used for changing CreatedInstances during publishing! # QUESTION # - pop the key after first collector using it would be safest option? - self._publish_context.data["create_context"] = self.create_context + self._publish_context.data["create_context"] = self._create_context - self._publish_report.reset(self._publish_context, self.create_context) + self._publish_report.reset(self._publish_context, self._create_context) self._publish_validation_errors = [] self._publish_current_plugin_validation_errors = None self._publish_error = None From 9040047fdd75ad4f11c9a95036f095c3a3373d7b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 10:56:06 +0200 Subject: [PATCH 1473/2550] fix context validation title selection and sizes --- .../tools/publisher/widgets/validations_widget.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index fd9410df98..4fa5ed4902 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -79,6 +79,7 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): help_text_by_instance_id = {} context_validation = False + items = [] if ( not error_info or (len(error_info) == 1 and error_info[0][0] is None) @@ -87,8 +88,10 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): toggle_instance_btn.setArrowType(QtCore.Qt.NoArrow) description = self._prepare_description(error_info[0][1]) help_text_by_instance_id[None] = description + # Add fake item to have minimum size hint of view widget + items.append(QtGui.QStandardItem("Context")) + else: - items = [] for instance, exception in error_info: label = instance.data.get("label") or instance.data.get("name") item = QtGui.QStandardItem(label) @@ -101,7 +104,9 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): description = self._prepare_description(exception) help_text_by_instance_id[instance.id] = description - instances_model.invisibleRootItem().appendRows(items) + if items: + root_item = instances_model.invisibleRootItem() + root_item.appendRows(items) instances_view = ValidationErrorInstanceList(self) instances_view.setModel(instances_model) @@ -177,7 +182,6 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): """Mark this widget as selected on click.""" self.set_selected(True) - self._set_expanded(True) def current_desctiption_text(self): if self._context_validation: @@ -225,6 +229,7 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): self._change_style_property(selected) if selected: self.selected.emit(self._index) + self._set_expanded(True) def _on_toggle_btn_click(self): """Show/hide instances list.""" @@ -238,6 +243,9 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): elif expanded is self._expanded: return + if expanded and self._context_validation: + return + self._expanded = expanded self._view_widget.setVisible(expanded) if expanded: From baa8643f7d5a04a87ea98ba1a69f2e4525f5e1ca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 11:05:42 +0200 Subject: [PATCH 1474/2550] hide set current asset button if asset is not set --- openpype/tools/publisher/widgets/assets_widget.py | 9 ++++----- openpype/tools/publisher/widgets/create_widget.py | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/tools/publisher/widgets/assets_widget.py b/openpype/tools/publisher/widgets/assets_widget.py index 7a77c9e898..39bf3886ea 100644 --- a/openpype/tools/publisher/widgets/assets_widget.py +++ b/openpype/tools/publisher/widgets/assets_widget.py @@ -24,7 +24,6 @@ class CreateWidgetAssetsWidget(SingleSelectAssetsWidget): self.set_refresh_btn_visibility(False) self.set_current_asset_btn_visibility(False) - self._current_asset_name = None self._last_selection = None self._enabled = None @@ -69,13 +68,13 @@ class CreateWidgetAssetsWidget(SingleSelectAssetsWidget): self._last_selection = self.get_selected_asset_id() self._clear_selection() - def set_current_asset_name(self, asset_name): - self._current_asset_name = asset_name + def update_current_asset(self): # Hide set current asset if there is no one - self.set_current_asset_btn_visibility(asset_name is not None) + asset_name = self._get_current_session_asset() + self.set_current_asset_btn_visibility(bool(asset_name)) def _get_current_session_asset(self): - return self._current_asset_name + return self._controller.current_asset_name def _create_source_model(self): return AssetsHierarchyModel(self._controller) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index fb4d1dd718..4c9fa63d24 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -422,7 +422,7 @@ class CreateWidget(QtWidgets.QWidget): # data self._refresh_creators() - self._assets_widget.set_current_asset_name(self.current_asset_name) + self._assets_widget.update_current_asset() self._assets_widget.select_asset_by_name(asset_name) self._tasks_widget.set_asset_name(asset_name) self._tasks_widget.select_task_name(task_name) From 284b82a649ff15cd30f523488ae6f71fb39f8866 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 11:43:08 +0200 Subject: [PATCH 1475/2550] Fix - missed sync published version of workfile with workfile If Collect Version is enabled, everything published from workfile should carry its version number. --- openpype/hosts/photoshop/plugins/publish/collect_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_version.py b/openpype/hosts/photoshop/plugins/publish/collect_version.py index aff9f13bfb..dbfa1fdbec 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_version.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_version.py @@ -16,7 +16,7 @@ class CollectVersion(pyblish.api.InstancePlugin): label = 'Collect Version' hosts = ["photoshop"] - families = ["image", "review"] + families = ["image", "review", "workfile"] def process(self, instance): workfile_version = instance.context.data["version"] From 277f116033d8aa78610245dd5dba626f948b063f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 11:51:45 +0200 Subject: [PATCH 1476/2550] Added bit of documentation --- openpype/hosts/photoshop/plugins/publish/collect_version.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_version.py b/openpype/hosts/photoshop/plugins/publish/collect_version.py index dbfa1fdbec..cda71d8643 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_version.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_version.py @@ -7,10 +7,15 @@ class CollectVersion(pyblish.api.InstancePlugin): Used to synchronize version from workfile to all publishable instances: - image (manually created or color coded) - review + - workfile Dev comment: Explicit collector created to control this from single place and not from 3 different. + + Workfile set here explicitly as version might to be forced from latest + 1 + because of Webpublisher. + (This plugin must run after CollectPublishedVersion!) """ order = pyblish.api.CollectorOrder + 0.200 label = 'Collect Version' From 640971a49bf2398a5216b0083d018cf794cc4bcf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 13:40:37 +0200 Subject: [PATCH 1477/2550] OP-4181 - modified signature of new_hero_version_doc --- openpype/client/operations.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 1f2727599c..fd639c34a7 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -163,13 +163,13 @@ def new_version_doc(version, subset_id, data=None, entity_id=None): } -def new_hero_version_doc(version_id, parent_id, data=None, entity_id=None): +def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None): """Create skeleton data of hero version document. Args: version_id (ObjectId): Is considered as unique identifier of version under subset. - parent_id (Union[str, ObjectId]): Id of parent subset. + subset_id (Union[str, ObjectId]): Id of parent subset. data (Dict[str, Any]): Version document data. entity_id (Union[str, ObjectId]): Predefined id of document. New id is created if not passed. @@ -186,7 +186,7 @@ def new_hero_version_doc(version_id, parent_id, data=None, entity_id=None): "schema": CURRENT_HERO_VERSION_SCHEMA, "type": "hero_version", "version_id": version_id, - "parent": parent_id, + "parent": subset_id, "data": data } From 92cd6b60dfb750e562a1aba61020b4b5c077d083 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 13:47:22 +0200 Subject: [PATCH 1478/2550] added abstract controller for UI --- openpype/tools/publisher/control.py | 236 +++++++++++++++++++++++++++- 1 file changed, 235 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 9abcc620a8..09f6555d69 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -3,7 +3,9 @@ import copy import logging import traceback import collections +from abc import ABCMeta, abstractmethod, abstractproperty +import six import pyblish.api from openpype.client import get_assets @@ -286,7 +288,239 @@ class PublishReport: return output -class PublisherController(object): + + +@six.add_metaclass(ABCMeta) +class AbstractPublisherController(object): + """Publisher tool controller. + + Define what must be implemented to be able use Publisher functionality. + + Goal is to have "data driven" controller that can be used to control UI + running in different process. That lead to some "" + """ + + _log = None + _event_system = None + + @property + def log(self): + """Controller's logger object. + + Returns: + logging.Logger: Logger object that can be used for logging. + """ + + if self._log is None: + self._log = logging.getLogget(self.__class__.__name__) + return self._log + + @property + def event_system(self): + """Inner event system for publisher controller. + + Event system is autocreated. + + Known topics: + "show.detailed.help" - Detailed help requested (UI related). + "show.card.message" - Show card message request (UI related). + "instances.refresh.finished" - Instances are refreshed. + "plugins.refresh.finished" - Plugins refreshed. + "publish.reset.finished" - Controller reset finished. + "publish.process.started" - Publishing started. Can be started from + paused state. + "publish.process.validated" - Publishing passed validation. + "publish.process.stopped" - Publishing stopped/paused process. + "publish.process.plugin.changed" - Plugin state has changed. + "publish.process.instance.changed" - Instance state has changed. + + Returns: + EventSystem: Event system which can trigger callbacks for topics. + """ + + if self._event_system is None: + self._event_system = EventSystem() + return self._event_system + + @abstractproperty + def project_name(self): + """Current context project name. + + Returns: + str: Name of project. + """ + + pass + + @abstractproperty + def current_asset_name(self): + """Current context asset name. + + Returns: + Union[str, None]: Name of asset. + """ + + pass + + @abstractproperty + def current_task_name(self): + """Current context task name. + + Returns: + Union[str, None]: Name of task. + """ + + pass + + @abstractproperty + def instances(self): + """Collected/created instances. + + Returns: + List[CreatedInstance]: List of created instances. + """ + + pass + + @abstractmethod + def get_manual_creators_base_info(self): + """Creators that can be selected and triggered by artist. + + Returns: + List[CreatorBaseInfo]: Base information about creator plugin. + """ + + pass + + @abstractmethod + def get_context_title(self): + """Get context title for artist shown at the top of main window. + + Returns: + Union[str, None]: Context title for window or None. In case of None + a warning is displayed (not nice for artists). + """ + + pass + + @abstractmethod + def get_asset_docs(self): + pass + + @abstractmethod + def get_asset_hierarchy(self): + pass + + @abstractmethod + def get_task_names_by_asset_names(self, asset_names): + pass + + @abstractmethod + def reset(self): + pass + + @abstractmethod + def emit_card_message(self, message): + pass + + @abstractmethod + def get_creator_attribute_definitions(self, instances): + pass + + @abstractmethod + def get_publish_attribute_definitions(self, instances, include_context): + pass + + @abstractmethod + def get_icon_for_family(self, family): + pass + + @abstractmethod + def create( + self, creator_identifier, subset_name, instance_data, options + ): + pass + + def save_changes(self): + """Save changes happened during creation.""" + + pass + + def remove_instances(self, instances): + """Remove list of instances.""" + + pass + + @abstractproperty + def publish_has_finished(self): + pass + + @abstractproperty + def publish_is_running(self): + pass + + @abstractproperty + def publish_has_validated(self): + pass + + @abstractproperty + def publish_has_crashed(self): + pass + + @abstractproperty + def publish_has_validation_errors(self): + pass + + @abstractproperty + def publish_max_progress(self): + pass + + @abstractproperty + def publish_progress(self): + pass + + @abstractproperty + def publish_comment_is_set(self): + pass + + @abstractmethod + def get_publish_crash_error(self): + pass + + @abstractmethod + def get_publish_report(self): + pass + + @abstractmethod + def get_validation_errors(self): + pass + + @abstractmethod + def set_comment(self, comment): + pass + + @abstractmethod + def publish(self): + pass + + @abstractmethod + def validate(self): + pass + + @abstractmethod + def stop_publish(self): + pass + + @abstractmethod + def run_action(self, plugin, action): + pass + + @abstractmethod + def reset_project_data_cache(self): + pass + + +class PublisherController(AbstractPublisherController): """Middleware between UI, CreateContext and publish Context. Handle both creation and publishing parts. From 6397db6e7956703de0776a90cb090d6f70bcabd7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 15:14:11 +0200 Subject: [PATCH 1479/2550] removed 'plugins_with_defs' attribute --- openpype/tools/publisher/control.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 09f6555d69..a5a7539369 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -645,11 +645,6 @@ class PublisherController(AbstractPublisherController): """Publish plugins.""" return self._create_context.publish_plugins - @property - def plugins_with_defs(self): - """Publish plugins with possible attribute definitions.""" - return self._create_context.plugins_with_defs - @property def event_system(self): """Inner event system for publisher controller. @@ -838,7 +833,7 @@ class PublisherController(AbstractPublisherController): attr_values.append((item, value)) output = [] - for plugin in self.plugins_with_defs: + for plugin in self._create_context.plugins_with_defs: plugin_name = plugin.__name__ if plugin_name not in all_defs_by_plugin_name: continue From 80103e60e8ffb431ab0696ce5e396096f5d0faeb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 15:14:59 +0200 Subject: [PATCH 1480/2550] changed 'creators' attribute to '_creators' --- openpype/tools/publisher/control.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index a5a7539369..c2816757d4 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -626,8 +626,9 @@ class PublisherController(AbstractPublisherController): return self._create_context.instances @property - def creators(self): + def _creators(self): """All creators loaded in create context.""" + return self._create_context.creators @property @@ -846,7 +847,7 @@ class PublisherController(AbstractPublisherController): def get_icon_for_family(self, family): """TODO rename to get creator icon.""" - creator = self.creators.get(family) + creator = self._creators.get(family) if creator is not None: return creator.get_icon() return None @@ -855,7 +856,7 @@ class PublisherController(AbstractPublisherController): self, creator_identifier, subset_name, instance_data, options ): """Trigger creation and refresh of instances in UI.""" - creator = self.creators[creator_identifier] + creator = self._creators[creator_identifier] creator.create(subset_name, instance_data, options) self._emit_event("instances.refresh.finished") From 71cca8e74288284135484183ae24647acdfa5dea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 15:15:46 +0200 Subject: [PATCH 1481/2550] changed 'publish_plugins' attribute to '_publish_plugins' --- openpype/tools/publisher/control.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index c2816757d4..6a73989ae8 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -642,7 +642,7 @@ class PublisherController(AbstractPublisherController): return self._create_context.host_is_valid @property - def publish_plugins(self): + def _publish_plugins(self): """Publish plugins.""" return self._create_context.publish_plugins @@ -681,6 +681,7 @@ class PublisherController(AbstractPublisherController): def get_context_title(self): """Get context title for artist shown at the top of main window.""" + context_title = None if hasattr(self._host, "get_context_title"): context_title = self._host.get_context_title() @@ -913,7 +914,7 @@ class PublisherController(AbstractPublisherController): return self._publish_error def get_publish_report(self): - return self._publish_report.get_report(self.publish_plugins) + return self._publish_report.get_report(self._publish_plugins) def get_validation_errors(self): return self._publish_validation_errors @@ -940,7 +941,7 @@ class PublisherController(AbstractPublisherController): self._publish_current_plugin_validation_errors = None self._publish_error = None - self._publish_max_progress = len(self.publish_plugins) + self._publish_max_progress = len(self._publish_plugins) self._publish_progress = 0 self._emit_event("publish.reset.finished") @@ -1034,7 +1035,7 @@ class PublisherController(AbstractPublisherController): QUESTION: Does validate button still make sense? """ - for idx, plugin in enumerate(self.publish_plugins): + for idx, plugin in enumerate(self._publish_plugins): self._publish_progress = idx # Reset current plugin validations error From c232e812396cd00681b6badd9a49f63931d96b44 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 15:17:11 +0200 Subject: [PATCH 1482/2550] removed doubled event system --- openpype/tools/publisher/control.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 6a73989ae8..57098f8734 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -536,9 +536,6 @@ class PublisherController(AbstractPublisherController): self._host = registered_host() self._headless = headless - # Inner event system of controller - self._event_system = EventSystem() - self._create_context = CreateContext( self._host, dbcon, headless=headless, reset=False ) @@ -646,29 +643,6 @@ class PublisherController(AbstractPublisherController): """Publish plugins.""" return self._create_context.publish_plugins - @property - def event_system(self): - """Inner event system for publisher controller. - - Known topics: - "show.detailed.help" - Detailed help requested (UI related). - "show.card.message" - Show card message request (UI related). - "instances.refresh.finished" - Instances are refreshed. - "plugins.refresh.finished" - Plugins refreshed. - "publish.reset.finished" - Controller reset finished. - "publish.process.started" - Publishing started. Can be started from - paused state. - "publish.process.validated" - Publishing passed validation. - "publish.process.stopped" - Publishing stopped/paused process. - "publish.process.plugin.changed" - Plugin state has changed. - "publish.process.instance.changed" - Instance state has changed. - - Returns: - EventSystem: Event system which can trigger callbacks for topics. - """ - - return self._event_system - def _emit_event(self, topic, data=None): if data is None: data = {} From 618137cf586bb4a15a7681464d35aec2c5bcf61c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 16:30:08 +0200 Subject: [PATCH 1483/2550] added root environments to launch environments --- openpype/lib/applications.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index e249ae4f1c..990dc7495a 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1403,6 +1403,7 @@ def get_app_environments_for_context( "env": env }) + data["env"].update(anatomy.root_environments()) prepare_app_environments(data, env_group, modules_manager) prepare_context_environments(data, env_group, modules_manager) From 2a2326971a677dc4bad6f87cfd05b9a03cf49574 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 6 Oct 2022 16:30:41 +0200 Subject: [PATCH 1484/2550] :sparkles: add validator for RenderSetupIncludeLights --- .../plugins/publish/submit_maya_deadline.py | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 5021c0796b..e232571122 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -36,6 +36,17 @@ from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo +def _validate_deadline_bool_value(instance, attribute, value): + if not isinstance(value, (str, bool)): + raise TypeError( + "Attribute {} must be str or bool.".format(attribute)) + if value not in {"1", "0", True, False}: + raise ValueError( + ("Value of {} must be one of " + "'0', '1', True, False").format(attribute) + ) + + @attr.s class MayaPluginInfo: SceneFile = attr.ib(default=None) # Input @@ -46,7 +57,8 @@ class MayaPluginInfo: RenderLayer = attr.ib(default=None) # Render only this layer Renderer = attr.ib(default=None) ProjectPath = attr.ib(default=None) # Resolve relative references - RenderSetupIncludeLights = attr.ib(default=None) # Include all lights flag + RenderSetupIncludeLights = attr.ib( + default="1", validator=_validate_deadline_bool_value) # Include all lights flag @attr.s @@ -185,14 +197,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context + # Set it to default Maya behaviour if it cannot be determined + # from instance (but it should be, by the Collector). Also + rs_include_lights = instance.data.get("renderSetupIncludeLights", "1") + if rs_include_lights not in {"1", "0", True, False}: + rs_include_lights = "1" plugin_info = MayaPluginInfo( SceneFile=self.scene_path, Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], Renderer=instance.data["renderer"], - # Set it to default Maya behaviour if it cannot be determined - # from instance (but it should be, by the Collector). - RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights", 1), # noqa + RenderSetupIncludeLights=rs_include_lights, # noqa ProjectPath=context.data["workspaceDir"], UsingRenderLayers=True, ) From 87584b5f4976db087d04e738f0fdfec7fd56f773 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 6 Oct 2022 16:34:19 +0200 Subject: [PATCH 1485/2550] :rotating_light: fix hound :dog: --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index e232571122..3f0905c586 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -57,8 +57,9 @@ class MayaPluginInfo: RenderLayer = attr.ib(default=None) # Render only this layer Renderer = attr.ib(default=None) ProjectPath = attr.ib(default=None) # Resolve relative references + # Include all lights flag RenderSetupIncludeLights = attr.ib( - default="1", validator=_validate_deadline_bool_value) # Include all lights flag + default="1", validator=_validate_deadline_bool_value) @attr.s From 99e9c2d14f90fa1e7782169c8f67b36240426153 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 16:47:09 +0200 Subject: [PATCH 1486/2550] pass instance to get_subset_name on update of existing instance subset name --- openpype/pipeline/create/creator_plugins.py | 13 ++++++++++++- openpype/tools/publisher/widgets/widgets.py | 6 +++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 945a97a99c..4e77146838 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -257,7 +257,13 @@ class BaseCreator: return {} def get_subset_name( - self, variant, task_name, asset_doc, project_name, host_name=None + self, + variant, + task_name, + asset_doc, + project_name, + host_name=None, + instance=None ): """Return subset name for passed context. @@ -271,12 +277,17 @@ class BaseCreator: Asset document is not used yet but is required if would like to use task type in subset templates. + Method is also called on subset name update. In that case origin + instance is passed in. + Args: variant(str): Subset name variant. In most of cases user input. task_name(str): For which task subset is created. asset_doc(dict): Asset document for which subset is created. project_name(str): Project name. host_name(str): Which host creates subset. + instance(str|None): Object of 'CreatedInstance' for which is + subset name updated. Passed only on subset name update. """ dynamic_data = self.get_dynamic_data( diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d1fa71343c..7fdceff68f 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1080,7 +1080,11 @@ class GlobalAttrsWidget(QtWidgets.QWidget): try: new_subset_name = instance.creator.get_subset_name( - new_variant_value, new_task_name, asset_doc, project_name + new_variant_value, + new_task_name, + asset_doc, + project_name, + instance=instance ) except TaskNotSetError: invalid_tasks = True From c722c81a08f2218dc3e8cb6a6f6ef34635b8beae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 6 Oct 2022 16:49:04 +0200 Subject: [PATCH 1487/2550] Fix error on `param=None` for HDA file references --- .../hosts/houdini/plugins/publish/validate_workfile_paths.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 79b3e894e5..0bd78ff38a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -35,6 +35,9 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): def get_invalid(cls): invalid = [] for param, _ in hou.fileReferences(): + if param is None: + continue + # skip nodes we are not interested in if param.node().type().name() not in cls.node_types: continue From a7077a0abd65c4df0506e5d22be502ee03dc2c40 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 16:53:17 +0200 Subject: [PATCH 1488/2550] pass the instance to 'get_dynamic_data' too --- openpype/pipeline/create/creator_plugins.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 4e77146838..05ba8902aa 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -246,7 +246,7 @@ class BaseCreator: return self.icon def get_dynamic_data( - self, variant, task_name, asset_doc, project_name, host_name + self, variant, task_name, asset_doc, project_name, host_name, instance ): """Dynamic data for subset name filling. @@ -291,7 +291,7 @@ class BaseCreator: """ dynamic_data = self.get_dynamic_data( - variant, task_name, asset_doc, project_name, host_name + variant, task_name, asset_doc, project_name, host_name, instance ) return get_subset_name( From a7150bd6f1c9494734f03265eecbe86ff284d882 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 17:28:30 +0200 Subject: [PATCH 1489/2550] OP-4181 - clean up after review comments --- igniter/GPUCache/data_0 | Bin 0 -> 8192 bytes igniter/GPUCache/data_1 | Bin 0 -> 270336 bytes igniter/GPUCache/data_2 | Bin 0 -> 8192 bytes igniter/GPUCache/data_3 | Bin 0 -> 8192 bytes igniter/GPUCache/index | Bin 0 -> 262512 bytes openpype/hooks/pre_python2_prelaunch.py | 35 + openpype/hosts/photoshop/tests/expr.py | 51 + openpype/lib/token | 1 + .../event_handlers_user/action_edl_create.py | 275 ++++ openpype/pipeline/temp_anatomy.py | 1330 +++++++++++++++++ .../plugins/publish/integrate_hero_version.py | 9 +- .../_process_referenced_pipeline_result.json | 92 ++ tests/unit/test_unzip.py | 11 + vendor/configs/OpenColorIO-Configs | 1 + vendor/instance.json | 1133 ++++++++++++++ vendor/response.json | 1 + vendor/temp.json | 46 + 17 files changed, 2982 insertions(+), 3 deletions(-) create mode 100644 igniter/GPUCache/data_0 create mode 100644 igniter/GPUCache/data_1 create mode 100644 igniter/GPUCache/data_2 create mode 100644 igniter/GPUCache/data_3 create mode 100644 igniter/GPUCache/index create mode 100644 openpype/hooks/pre_python2_prelaunch.py create mode 100644 openpype/hosts/photoshop/tests/expr.py create mode 100644 openpype/lib/token create mode 100644 openpype/modules/ftrack/event_handlers_user/action_edl_create.py create mode 100644 openpype/pipeline/temp_anatomy.py create mode 100644 tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json create mode 100644 tests/unit/test_unzip.py create mode 160000 vendor/configs/OpenColorIO-Configs create mode 100644 vendor/instance.json create mode 100644 vendor/response.json create mode 100644 vendor/temp.json diff --git a/igniter/GPUCache/data_0 b/igniter/GPUCache/data_0 new file mode 100644 index 0000000000000000000000000000000000000000..d76fb77e93ac8a536b5dbade616d63abd00626c5 GIT binary patch literal 8192 zcmeIuK?wjL5Jka{7-jo+5O1auw}mk8@B+*}b0s6M>Kg$91PBlyK!5-N0t5&UAV7cs W0RjXF5FkK+009C72oNCfo4^Gh&;oe? literal 0 HcmV?d00001 diff --git a/igniter/GPUCache/data_1 b/igniter/GPUCache/data_1 new file mode 100644 index 0000000000000000000000000000000000000000..212f73166781160e472f8e76c3b9998b3775ecb7 GIT binary patch literal 270336 zcmeI%u?@m75CFhW@CfV>3QP3t%>amw0a8XffrJVo)0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N c0t5&UAV7cs0RjXF5FkK+009C72>hqO2eI!7!2kdN literal 0 HcmV?d00001 diff --git a/igniter/GPUCache/data_2 b/igniter/GPUCache/data_2 new file mode 100644 index 0000000000000000000000000000000000000000..c7e2eb9adcfb2d3313ec85f5c28cedda950a3f9b GIT binary patch literal 8192 zcmeIu!3h8`2n0b1_TQ7_m#U&=2(t%Qz}%M=ae7_Oi2wlt1PBlyK!5-N0t5&UAV7cs V0RjXF5FkK+009C72oTsN@Bv`}0$Tt8 literal 0 HcmV?d00001 diff --git a/igniter/GPUCache/data_3 b/igniter/GPUCache/data_3 new file mode 100644 index 0000000000000000000000000000000000000000..5eec97358cf550862fd343fc9a73c159d4c0ab10 GIT binary patch literal 8192 zcmeIuK@9*P5CpLeAOQbv2)|PW$RO!FMnHFsm9+HS=9>r*AV7cs0RjXF5FkK+009C7 W2oNAZfB*pk1PBlyK!5;&-vkZ-dID$w literal 0 HcmV?d00001 diff --git a/igniter/GPUCache/index b/igniter/GPUCache/index new file mode 100644 index 0000000000000000000000000000000000000000..b2998cfef1a6457e5cfe9dc37e029bdbe0a7f778 GIT binary patch literal 262512 zcmeIuu?>JQ00XcT9zZ-&b?3`o&{Gf_S0S;K0~nnt$>{4|&t%Cr+dIlg%Diho_EzWC z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjYm G5qJP2>jZZI literal 0 HcmV?d00001 diff --git a/openpype/hooks/pre_python2_prelaunch.py b/openpype/hooks/pre_python2_prelaunch.py new file mode 100644 index 0000000000..84272d2e5d --- /dev/null +++ b/openpype/hooks/pre_python2_prelaunch.py @@ -0,0 +1,35 @@ +import os +from openpype.lib import PreLaunchHook + + +class PrePython2Vendor(PreLaunchHook): + """Prepend python 2 dependencies for py2 hosts.""" + order = 10 + + def execute(self): + if not self.application.use_python_2: + return + + # Prepare vendor dir path + self.log.info("adding global python 2 vendor") + pype_root = os.getenv("OPENPYPE_REPOS_ROOT") + python_2_vendor = os.path.join( + pype_root, + "openpype", + "vendor", + "python", + "python_2" + ) + + # Add Python 2 modules + python_paths = [ + python_2_vendor + ] + + # Load PYTHONPATH from current launch context + python_path = self.launch_context.env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths) diff --git a/openpype/hosts/photoshop/tests/expr.py b/openpype/hosts/photoshop/tests/expr.py new file mode 100644 index 0000000000..ff796f417c --- /dev/null +++ b/openpype/hosts/photoshop/tests/expr.py @@ -0,0 +1,51 @@ +import json + +data = [ + { + "schema": "openpype:container-2.0", + "id": "pyblish.avalon.container", + "name": "imageArtNeew", + "namespace": "Jungle_imageArtNeew_001", + "loader": "ReferenceLoader", + "representation": "61c1eb91e1a4d1e5a23582f6", + "members": [ + "131" + ] + }, + { + "id": "pyblish.avalon.instance", + "family": "image", + "asset": "Jungle", + "subset": "imageMainBg", + "active": True, + "variant": "Main", + "uuid": "199", + "long_name": "BG" + }, + { + "id": "pyblish.avalon.instance", + "family": "image", + "asset": "Jungle", + "subset": "imageMain", + "active": True, + "variant": "Main", + "uuid": "192", + "long_name": "imageMain" + }, + { + "id": "pyblish.avalon.instance", + "family": "workfile", + "subset": "workfile", + "active": True, + "creator_identifier": "workfile", + "asset": "Jungle", + "task": "art", + "variant": "", + "instance_id": "3ed19342-cd8e-4bb6-8cda-d6e74d9a7efe", + "creator_attributes": {}, + "publish_attributes": {} + } +] + +with open("C:\\Users\\petrk\\PycharmProjects\\Pype3.0\\pype\\openpype\\hosts\\photoshop\\tests\\mock_get_layers_metadata.json", 'w') as fp: + fp.write(json.dumps(data, indent=4)) \ No newline at end of file diff --git a/openpype/lib/token b/openpype/lib/token new file mode 100644 index 0000000000..193a2aac95 --- /dev/null +++ b/openpype/lib/token @@ -0,0 +1 @@ +5d58370a7702b2efee5120704246baf4abb865323fc9db9a04827bfb478569d6 \ No newline at end of file diff --git a/openpype/modules/ftrack/event_handlers_user/action_edl_create.py b/openpype/modules/ftrack/event_handlers_user/action_edl_create.py new file mode 100644 index 0000000000..7ac139ae63 --- /dev/null +++ b/openpype/modules/ftrack/event_handlers_user/action_edl_create.py @@ -0,0 +1,275 @@ +import os +import subprocess +import tempfile +import shutil +import json +import sys + +import opentimelineio as otio +import ftrack_api +import requests + +from openpype_modules.ftrack.lib import BaseAction + + +def download_file(url, path): + with open(path, "wb") as f: + print("\nDownloading %s" % path) + response = requests.get(url, stream=True) + total_length = response.headers.get('content-length') + + if total_length is None: + f.write(response.content) + else: + dl = 0 + total_length = int(total_length) + for data in response.iter_content(chunk_size=4096): + dl += len(data) + f.write(data) + done = int(50 * dl / total_length) + sys.stdout.write("\r[%s%s]" % ('=' * done, ' ' * (50-done))) + sys.stdout.flush() + + +class ExportEditorialAction(BaseAction): + '''Export Editorial action''' + + label = "Export Editorial" + variant = None + identifier = "export-editorial" + description = None + component_name_order = ["exr", "mov", "ftrackreview-mp4_src"] + + def export_editorial(self, entity, output_path): + session = ftrack_api.Session() + unmanaged_location = session.query( + "Location where name is \"ftrack.unmanaged\"" + ).one() + temp_path = tempfile.mkdtemp() + + files = {} + for obj in entity["review_session_objects"]: + data = {} + parent_name = obj["asset_version"]["asset"]["parent"]["name"] + component_query = "Component where version_id is \"{}\"" + component_query += " and name is \"{}\"" + for name in self.component_name_order: + try: + component = session.query( + component_query.format( + obj["asset_version"]["id"], name + ) + ).one() + path = unmanaged_location.get_filesystem_path(component) + data["path"] = path.replace("\\", "/") + break + except ftrack_api.exception.NoResultFoundError: + pass + + # Download online review if not local path found. + if "path" not in data: + component = session.query( + component_query.format( + obj["asset_version"]["id"], "ftrackreview-mp4" + ) + ).one() + location = component["component_locations"][0] + component_url = location["location"].get_url(component) + asset_name = obj["asset_version"]["asset"]["name"] + version = obj["asset_version"]["version"] + filename = "{}_{}_v{:03d}.mp4".format( + parent_name, asset_name, version + ) + filepath = os.path.join( + output_path, "downloads", filename + ).replace("\\", "/") + + if not os.path.exists(os.path.dirname(filepath)): + os.makedirs(os.path.dirname(filepath)) + + download_file(component_url, filepath) + data["path"] = filepath + + # Get frame duration and framerate. + query = "Component where version_id is \"{}\"" + query += " and name is \"ftrackreview-mp4\"" + component = session.query( + query.format(obj["asset_version"]["id"]) + ).one() + metadata = json.loads(component["metadata"]["ftr_meta"]) + data["framerate"] = metadata["frameRate"] + data["frames"] = metadata["frameOut"] - metadata["frameIn"] + + # Find audio if it exists. + query = "Asset where parent.id is \"{}\"" + query += " and type.name is \"Audio\"" + asset = session.query( + query.format(obj["asset_version"]["asset"]["parent"]["id"]) + ) + if asset: + asset_version = asset[0]["versions"][-1] + query = "Component where version_id is \"{}\"" + query += " and name is \"{}\"" + comp = session.query( + query.format(asset_version["id"], "wav") + ).one() + src = unmanaged_location.get_filesystem_path(comp) + dst = os.path.join(temp_path, parent_name + ".wav") + shutil.copy(src, dst) + + # Collect data. + files[parent_name] = data + + clips = [] + for name, data in files.items(): + self.log.info("Processing {} with {}".format(name, data)) + f = data["path"] + range = otio.opentime.TimeRange( + start_time=otio.opentime.RationalTime(0, data["framerate"]), + duration=otio.opentime.RationalTime( + data["frames"], data["framerate"] + ) + ) + + media_reference = otio.schema.ExternalReference( + available_range=range, + target_url=f"file://{f}" + ) + + clip = otio.schema.Clip( + name=name, + media_reference=media_reference, + source_range=range + ) + clips.append(clip) + + # path = os.path.join(temp_path, name + ".wav").replace("\\", "/") + # if not os.path.exists(path): + # args = ["ffmpeg", "-y", "-i", f, path] + # self.log.info(subprocess.list2cmdline(args)) + # subprocess.call(args) + + timeline = otio.schema.timeline_from_clips(clips) + otio.adapters.write_to_file( + timeline, os.path.join(output_path, entity["name"] + ".xml") + ) + + data = "" + for f in os.listdir(temp_path): + f = f.replace("\\", "/") + data += f"file '{f}'\n" + + path = os.path.join(temp_path, "temp.txt") + with open(path, "w") as f: + f.write(data) + + args = [ + "ffmpeg", "-y", "-f", "concat", "-safe", "0", + "-i", os.path.basename(path), + os.path.join(output_path, entity["name"] + ".wav") + ] + self.log.info(subprocess.list2cmdline(args)) + subprocess.call(args, cwd=temp_path) + + shutil.rmtree(temp_path) + + def discover(self, session, entities, event): + '''Return true if we can handle the selected entities. + *session* is a `ftrack_api.Session` instance + *entities* is a list of tuples each containing the entity type and the + entity id. + If the entity is a hierarchical you will always get the entity + type TypedContext, once retrieved through a get operation you + will have the "real" entity type ie. example Shot, Sequence + or Asset Build. + *event* the unmodified original event + ''' + if len(entities) == 1: + if entities[0].entity_type == "ReviewSession": + return True + + return False + + def launch(self, session, entities, event): + '''Callback method for the custom action. + return either a bool ( True if successful or False if the action + failed ) or a dictionary with they keys `message` and `success`, the + message should be a string and will be displayed as feedback to the + user, success should be a bool, True if successful or False if the + action failed. + *session* is a `ftrack_api.Session` instance + *entities* is a list of tuples each containing the entity type and the + entity id. + If the entity is a hierarchical you will always get the entity + type TypedContext, once retrieved through a get operation you + will have the "real" entity type ie. example Shot, Sequence + or Asset Build. + *event* the unmodified original event + ''' + if 'values' in event['data']: + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + job = session.create( + 'Job', + { + 'user': user, + 'status': 'running', + 'data': json.dumps({ + 'description': 'Export Editorial.' + }) + } + ) + session.commit() + + try: + output_path = event["data"]["values"]["output_path"] + + if not os.path.exists(output_path): + os.makedirs(output_path) + + self.export_editorial(entities[0], output_path) + + job['status'] = 'done' + session.commit() + except Exception: + session.rollback() + job["status"] = "failed" + session.commit() + self.log.error( + "Exporting editorial failed ({})", exc_info=True + ) + + return { + 'success': True, + 'message': 'Action completed successfully' + } + + items = [ + { + 'label': 'Output folder:', + 'type': 'text', + 'value': '', + 'name': 'output_path' + } + + ] + return { + 'success': True, + 'message': "", + 'items': items + } + + +def register(session): + '''Register action. Called when used as an event plugin.''' + + ExportEditorialAction(session).register() + + +if __name__ == "__main__": + session = ftrack_api.Session() + action = ExportEditorialAction(session) + id = "bfe0477c-d5a8-49d8-88b9-6d44d2e48fd9" + review_session = session.get("ReviewSession", id) + path = r"c:/projects" + action.export_editorial(review_session, path) \ No newline at end of file diff --git a/openpype/pipeline/temp_anatomy.py b/openpype/pipeline/temp_anatomy.py new file mode 100644 index 0000000000..27a9370928 --- /dev/null +++ b/openpype/pipeline/temp_anatomy.py @@ -0,0 +1,1330 @@ +import os +import re +import copy +import platform +import collections +import numbers + +import six +import time + +from openpype.settings.lib import ( + get_anatomy_settings, + get_project_settings, + get_default_project_settings, + get_local_settings +) + +from openpype.client import get_project +from openpype.lib.path_templates import ( + TemplateUnsolved, + TemplateResult, + TemplatesDict, + FormatObject, +) +from openpype.lib.log import Logger +from openpype.lib import get_local_site_id + +log = Logger.get_logger(__name__) + + +class ProjectNotSet(Exception): + """Exception raised when is created Anatomy without project name.""" + + +class RootCombinationError(Exception): + """This exception is raised when templates has combined root types.""" + + def __init__(self, roots): + joined_roots = ", ".join( + ["\"{}\"".format(_root) for _root in roots] + ) + # TODO better error message + msg = ( + "Combination of root with and" + " without root name in AnatomyTemplates. {}" + ).format(joined_roots) + + super(RootCombinationError, self).__init__(msg) + + +class BaseAnatomy(object): + """Anatomy module helps to keep project settings. + + Wraps key project specifications, AnatomyTemplates and Roots. + """ + + def __init__(self, project_doc, local_settings): + project_name = project_doc["name"] + self.project_name = project_name + + self._data = self._prepare_anatomy_data( + project_doc, local_settings + ) + self._templates_obj = AnatomyTemplates(self) + self._roots_obj = Roots(self) + + root_key_regex = re.compile(r"{(root?[^}]+)}") + root_name_regex = re.compile(r"root\[([^]]+)\]") + + # Anatomy used as dictionary + # - implemented only getters returning copy + def __getitem__(self, key): + return copy.deepcopy(self._data[key]) + + def get(self, key, default=None): + return copy.deepcopy(self._data).get(key, default) + + def keys(self): + return copy.deepcopy(self._data).keys() + + def values(self): + return copy.deepcopy(self._data).values() + + def items(self): + return copy.deepcopy(self._data).items() + + @staticmethod + def _prepare_anatomy_data(anatomy_data): + """Prepare anatomy data for further processing. + + Method added to replace `{task}` with `{task[name]}` in templates. + """ + templates_data = anatomy_data.get("templates") + if templates_data: + # Replace `{task}` with `{task[name]}` in templates + value_queue = collections.deque() + value_queue.append(templates_data) + while value_queue: + item = value_queue.popleft() + if not isinstance(item, dict): + continue + + for key in tuple(item.keys()): + value = item[key] + if isinstance(value, dict): + value_queue.append(value) + + elif isinstance(value, six.string_types): + item[key] = value.replace("{task}", "{task[name]}") + return anatomy_data + + def reset(self): + """Reset values of cached data in templates and roots objects.""" + self._data = self._prepare_anatomy_data( + get_anatomy_settings(self.project_name, self._site_name) + ) + self.templates_obj.reset() + self.roots_obj.reset() + + @property + def templates(self): + """Wrap property `templates` of Anatomy's AnatomyTemplates instance.""" + return self._templates_obj.templates + + @property + def templates_obj(self): + """Return `AnatomyTemplates` object of current Anatomy instance.""" + return self._templates_obj + + def format(self, *args, **kwargs): + """Wrap `format` method of Anatomy's `templates_obj`.""" + return self._templates_obj.format(*args, **kwargs) + + def format_all(self, *args, **kwargs): + """Wrap `format_all` method of Anatomy's `templates_obj`.""" + return self._templates_obj.format_all(*args, **kwargs) + + @property + def roots(self): + """Wrap `roots` property of Anatomy's `roots_obj`.""" + return self._roots_obj.roots + + @property + def roots_obj(self): + """Return `Roots` object of current Anatomy instance.""" + return self._roots_obj + + def root_environments(self): + """Return OPENPYPE_ROOT_* environments for current project in dict.""" + return self._roots_obj.root_environments() + + def root_environmets_fill_data(self, template=None): + """Environment variable values in dictionary for rootless path. + + Args: + template (str): Template for environment variable key fill. + By default is set to `"${}"`. + """ + return self.roots_obj.root_environmets_fill_data(template) + + def find_root_template_from_path(self, *args, **kwargs): + """Wrapper for Roots `find_root_template_from_path`.""" + return self.roots_obj.find_root_template_from_path(*args, **kwargs) + + def path_remapper(self, *args, **kwargs): + """Wrapper for Roots `path_remapper`.""" + return self.roots_obj.path_remapper(*args, **kwargs) + + def all_root_paths(self): + """Wrapper for Roots `all_root_paths`.""" + return self.roots_obj.all_root_paths() + + def set_root_environments(self): + """Set OPENPYPE_ROOT_* environments for current project.""" + self._roots_obj.set_root_environments() + + def root_names(self): + """Return root names for current project.""" + return self.root_names_from_templates(self.templates) + + def _root_keys_from_templates(self, data): + """Extract root key from templates in data. + + Args: + data (dict): Data that may contain templates as string. + + Return: + set: Set of all root names from templates as strings. + + Output example: `{"root[work]", "root[publish]"}` + """ + + output = set() + if isinstance(data, dict): + for value in data.values(): + for root in self._root_keys_from_templates(value): + output.add(root) + + elif isinstance(data, str): + for group in re.findall(self.root_key_regex, data): + output.add(group) + + return output + + def root_value_for_template(self, template): + """Returns value of root key from template.""" + root_templates = [] + for group in re.findall(self.root_key_regex, template): + root_templates.append("{" + group + "}") + + if not root_templates: + return None + + return root_templates[0].format(**{"root": self.roots}) + + def root_names_from_templates(self, templates): + """Extract root names form anatomy templates. + + Returns None if values in templates contain only "{root}". + Empty list is returned if there is no "root" in templates. + Else returns all root names from templates in list. + + RootCombinationError is raised when templates contain both root types, + basic "{root}" and with root name specification "{root[work]}". + + Args: + templates (dict): Anatomy templates where roots are not filled. + + Return: + list/None: List of all root names from templates as strings when + multiroot setup is used, otherwise None is returned. + """ + roots = list(self._root_keys_from_templates(templates)) + # Return empty list if no roots found in templates + if not roots: + return roots + + # Raise exception when root keys have roots with and without root name. + # Invalid output example: ["root", "root[project]", "root[render]"] + if len(roots) > 1 and "root" in roots: + raise RootCombinationError(roots) + + # Return None if "root" without root name in templates + if len(roots) == 1 and roots[0] == "root": + return None + + names = set() + for root in roots: + for group in re.findall(self.root_name_regex, root): + names.add(group) + return list(names) + + def fill_root(self, template_path): + """Fill template path where is only "root" key unfilled. + + Args: + template_path (str): Path with "root" key in. + Example path: "{root}/projects/MyProject/Shot01/Lighting/..." + + Return: + str: formatted path + """ + # NOTE does not care if there are different keys than "root" + return template_path.format(**{"root": self.roots}) + + @classmethod + def fill_root_with_path(cls, rootless_path, root_path): + """Fill path without filled "root" key with passed path. + + This is helper to fill root with different directory path than anatomy + has defined no matter if is single or multiroot. + + Output path is same as input path if `rootless_path` does not contain + unfilled root key. + + Args: + rootless_path (str): Path without filled "root" key. Example: + "{root[work]}/MyProject/..." + root_path (str): What should replace root key in `rootless_path`. + + Returns: + str: Path with filled root. + """ + output = str(rootless_path) + for group in re.findall(cls.root_key_regex, rootless_path): + replacement = "{" + group + "}" + output = output.replace(replacement, root_path) + + return output + + def replace_root_with_env_key(self, filepath, template=None): + """Replace root of path with environment key. + + # Example: + ## Project with roots: + ``` + { + "nas": { + "windows": P:/projects", + ... + } + ... + } + ``` + + ## Entered filepath + "P:/projects/project/asset/task/animation_v001.ma" + + ## Entered template + "<{}>" + + ## Output + "/project/asset/task/animation_v001.ma" + + Args: + filepath (str): Full file path where root should be replaced. + template (str): Optional template for environment key. Must + have one index format key. + Default value if not entered: "${}" + + Returns: + str: Path where root is replaced with environment root key. + + Raise: + ValueError: When project's roots were not found in entered path. + """ + success, rootless_path = self.find_root_template_from_path(filepath) + if not success: + raise ValueError( + "{}: Project's roots were not found in path: {}".format( + self.project_name, filepath + ) + ) + + data = self.root_environmets_fill_data(template) + return rootless_path.format(**data) + + +class Anatomy(BaseAnatomy): + _project_cache = {} + + def __init__(self, project_name=None, site_name=None): + if not project_name: + project_name = os.environ.get("AVALON_PROJECT") + + if not project_name: + raise ProjectNotSet(( + "Implementation bug: Project name is not set. Anatomy requires" + " to load data for specific project." + )) + + self._site_name = site_name + project_info = self.get_project_data_and_cache(project_name, site_name) + + super(Anatomy, self).__init__( + project_info["project_doc"], + project_info["local_settings"] + ) + + @classmethod + def get_project_data_and_cache(cls, project_name, site_name): + project_info = cls._project_cache.get(project_name) + if project_info is not None: + if time.time() - project_info["start"] > 10: + cls._project_cache.pop(project_name) + project_info = None + + if project_info is None: + if site_name is None: + if project_name: + project_settings = get_project_settings(project_name) + else: + project_settings = get_default_project_settings() + site_name = ( + project_settings["global"] + ["sync_server"] + ["config"] + ["active_site"] + ) + if site_name == "local": + site_name = get_local_site_id() + + project_info = { + "project_doc": get_project(project_name), + "local_settings": get_local_settings(site_name), + "site_name": site_name, + "start": time.time() + } + cls._project_cache[project_name] = project_info + + return project_info + + def reset(self): + """Reset values of cached data in templates and roots objects.""" + self._data = self._prepare_anatomy_data( + get_anatomy_settings(self.project_name, self._site_name) + ) + self.templates_obj.reset() + self.roots_obj.reset() + + +class AnatomyTemplateUnsolved(TemplateUnsolved): + """Exception for unsolved template when strict is set to True.""" + + msg = "Anatomy template \"{0}\" is unsolved.{1}{2}" + + +class AnatomyTemplateResult(TemplateResult): + rootless = None + + def __new__(cls, result, rootless_path): + new_obj = super(AnatomyTemplateResult, cls).__new__( + cls, + str(result), + result.template, + result.solved, + result.used_values, + result.missing_keys, + result.invalid_types + ) + new_obj.rootless = rootless_path + return new_obj + + def validate(self): + if not self.solved: + raise AnatomyTemplateUnsolved( + self.template, + self.missing_keys, + self.invalid_types + ) + + def copy(self): + tmp = TemplateResult( + str(self), + self.template, + self.solved, + self.used_values, + self.missing_keys, + self.invalid_types + ) + return self.__class__(tmp, self.rootless) + + def normalized(self): + """Convert to normalized path.""" + + tmp = TemplateResult( + os.path.normpath(self), + self.template, + self.solved, + self.used_values, + self.missing_keys, + self.invalid_types + ) + return self.__class__(tmp, self.rootless) + + +class AnatomyTemplates(TemplatesDict): + inner_key_pattern = re.compile(r"(\{@.*?[^{}0]*\})") + inner_key_name_pattern = re.compile(r"\{@(.*?[^{}0]*)\}") + + def __init__(self, anatomy): + super(AnatomyTemplates, self).__init__() + self.anatomy = anatomy + self.loaded_project = None + + def __getitem__(self, key): + return self.templates[key] + + def get(self, key, default=None): + return self.templates.get(key, default) + + def reset(self): + self._raw_templates = None + self._templates = None + self._objected_templates = None + + @property + def project_name(self): + return self.anatomy.project_name + + @property + def roots(self): + return self.anatomy.roots + + @property + def templates(self): + self._validate_discovery() + return self._templates + + @property + def objected_templates(self): + self._validate_discovery() + return self._objected_templates + + def _validate_discovery(self): + if self.project_name != self.loaded_project: + self.reset() + + if self._templates is None: + self._discover() + self.loaded_project = self.project_name + + def _format_value(self, value, data): + if isinstance(value, RootItem): + return self._solve_dict(value, data) + + result = super(AnatomyTemplates, self)._format_value(value, data) + if isinstance(result, TemplateResult): + rootless_path = self._rootless_path(result, data) + result = AnatomyTemplateResult(result, rootless_path) + return result + + def set_templates(self, templates): + if not templates: + self.reset() + return + + self._raw_templates = copy.deepcopy(templates) + templates = copy.deepcopy(templates) + v_queue = collections.deque() + v_queue.append(templates) + while v_queue: + item = v_queue.popleft() + if not isinstance(item, dict): + continue + + for key in tuple(item.keys()): + value = item[key] + if isinstance(value, dict): + v_queue.append(value) + + elif ( + isinstance(value, six.string_types) + and "{task}" in value + ): + item[key] = value.replace("{task}", "{task[name]}") + + solved_templates = self.solve_template_inner_links(templates) + self._templates = solved_templates + self._objected_templates = self.create_ojected_templates( + solved_templates + ) + + def default_templates(self): + """Return default templates data with solved inner keys.""" + return self.solve_template_inner_links( + self.anatomy["templates"] + ) + + def _discover(self): + """ Loads anatomy templates from yaml. + Default templates are loaded if project is not set or project does + not have set it's own. + TODO: create templates if not exist. + + Returns: + TemplatesResultDict: Contain templates data for current project of + default templates. + """ + + if self.project_name is None: + # QUESTION create project specific if not found? + raise AssertionError(( + "Project \"{0}\" does not have his own templates." + " Trying to use default." + ).format(self.project_name)) + + self.set_templates(self.anatomy["templates"]) + + @classmethod + def replace_inner_keys(cls, matches, value, key_values, key): + """Replacement of inner keys in template values.""" + for match in matches: + anatomy_sub_keys = ( + cls.inner_key_name_pattern.findall(match) + ) + if key in anatomy_sub_keys: + raise ValueError(( + "Unsolvable recursion in inner keys, " + "key: \"{}\" is in his own value." + " Can't determine source, please check Anatomy templates." + ).format(key)) + + for anatomy_sub_key in anatomy_sub_keys: + replace_value = key_values.get(anatomy_sub_key) + if replace_value is None: + raise KeyError(( + "Anatomy templates can't be filled." + " Anatomy key `{0}` has" + " invalid inner key `{1}`." + ).format(key, anatomy_sub_key)) + + if not ( + isinstance(replace_value, numbers.Number) + or isinstance(replace_value, six.string_types) + ): + raise ValueError(( + "Anatomy templates can't be filled." + " Anatomy key `{0}` has" + " invalid inner key `{1}`" + " with value `{2}`." + ).format(key, anatomy_sub_key, str(replace_value))) + + value = value.replace(match, str(replace_value)) + + return value + + @classmethod + def prepare_inner_keys(cls, key_values): + """Check values of inner keys. + + Check if inner key exist in template group and has valid value. + It is also required to avoid infinite loop with unsolvable recursion + when first inner key's value refers to second inner key's value where + first is used. + """ + keys_to_solve = set(key_values.keys()) + while True: + found = False + for key in tuple(keys_to_solve): + value = key_values[key] + + if isinstance(value, six.string_types): + matches = cls.inner_key_pattern.findall(value) + if not matches: + keys_to_solve.remove(key) + continue + + found = True + key_values[key] = cls.replace_inner_keys( + matches, value, key_values, key + ) + continue + + elif not isinstance(value, dict): + keys_to_solve.remove(key) + continue + + subdict_found = False + for _key, _value in tuple(value.items()): + matches = cls.inner_key_pattern.findall(_value) + if not matches: + continue + + subdict_found = True + found = True + key_values[key][_key] = cls.replace_inner_keys( + matches, _value, key_values, + "{}.{}".format(key, _key) + ) + + if not subdict_found: + keys_to_solve.remove(key) + + if not found: + break + + return key_values + + @classmethod + def solve_template_inner_links(cls, templates): + """Solve templates inner keys identified by "{@*}". + + Process is split into 2 parts. + First is collecting all global keys (keys in top hierarchy where value + is not dictionary). All global keys are set for all group keys (keys + in top hierarchy where value is dictionary). Value of a key is not + overridden in group if already contain value for the key. + + In second part all keys with "at" symbol in value are replaced with + value of the key afterward "at" symbol from the group. + + Args: + templates (dict): Raw templates data. + + Example: + templates:: + key_1: "value_1", + key_2: "{@key_1}/{filling_key}" + + group_1: + key_3: "value_3/{@key_2}" + + group_2: + key_2": "value_2" + key_4": "value_4/{@key_2}" + + output:: + key_1: "value_1" + key_2: "value_1/{filling_key}" + + group_1: { + key_1: "value_1" + key_2: "value_1/{filling_key}" + key_3: "value_3/value_1/{filling_key}" + + group_2: { + key_1: "value_1" + key_2: "value_2" + key_4: "value_3/value_2" + """ + default_key_values = templates.pop("defaults", {}) + for key, value in tuple(templates.items()): + if isinstance(value, dict): + continue + default_key_values[key] = templates.pop(key) + + # Pop "others" key before before expected keys are processed + other_templates = templates.pop("others") or {} + + keys_by_subkey = {} + for sub_key, sub_value in templates.items(): + key_values = {} + key_values.update(default_key_values) + key_values.update(sub_value) + keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) + + for sub_key, sub_value in other_templates.items(): + if sub_key in keys_by_subkey: + log.warning(( + "Key \"{}\" is duplicated in others. Skipping." + ).format(sub_key)) + continue + + key_values = {} + key_values.update(default_key_values) + key_values.update(sub_value) + keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) + + default_keys_by_subkeys = cls.prepare_inner_keys(default_key_values) + + for key, value in default_keys_by_subkeys.items(): + keys_by_subkey[key] = value + + return keys_by_subkey + + def _dict_to_subkeys_list(self, subdict, pre_keys=None): + if pre_keys is None: + pre_keys = [] + output = [] + for key in subdict: + value = subdict[key] + result = list(pre_keys) + result.append(key) + if isinstance(value, dict): + for item in self._dict_to_subkeys_list(value, result): + output.append(item) + else: + output.append(result) + return output + + def _keys_to_dicts(self, key_list, value): + if not key_list: + return None + if len(key_list) == 1: + return {key_list[0]: value} + return {key_list[0]: self._keys_to_dicts(key_list[1:], value)} + + def _rootless_path(self, result, final_data): + used_values = result.used_values + missing_keys = result.missing_keys + template = result.template + invalid_types = result.invalid_types + if ( + "root" not in used_values + or "root" in missing_keys + or "{root" not in template + ): + return + + for invalid_type in invalid_types: + if "root" in invalid_type: + return + + root_keys = self._dict_to_subkeys_list({"root": used_values["root"]}) + if not root_keys: + return + + output = str(result) + for used_root_keys in root_keys: + if not used_root_keys: + continue + + used_value = used_values + root_key = None + for key in used_root_keys: + used_value = used_value[key] + if root_key is None: + root_key = key + else: + root_key += "[{}]".format(key) + + root_key = "{" + root_key + "}" + output = output.replace(str(used_value), root_key) + + return output + + def format(self, data, strict=True): + copy_data = copy.deepcopy(data) + roots = self.roots + if roots: + copy_data["root"] = roots + result = super(AnatomyTemplates, self).format(copy_data) + result.strict = strict + return result + + def format_all(self, in_data, only_keys=True): + """ Solves templates based on entered data. + + Args: + data (dict): Containing keys to be filled into template. + + Returns: + TemplatesResultDict: Output `TemplateResult` have `strict` + attribute set to False so accessing unfilled keys in templates + won't raise any exceptions. + """ + return self.format(in_data, strict=False) + + +class RootItem(FormatObject): + """Represents one item or roots. + + Holds raw data of root item specification. Raw data contain value + for each platform, but current platform value is used when object + is used for formatting of template. + + Args: + root_raw_data (dict): Dictionary containing root values by platform + names. ["windows", "linux" and "darwin"] + name (str, optional): Root name which is representing. Used with + multi root setup otherwise None value is expected. + parent_keys (list, optional): All dictionary parent keys. Values of + `parent_keys` are used for get full key which RootItem is + representing. Used for replacing root value in path with + formattable key. e.g. parent_keys == ["work"] -> {root[work]} + parent (object, optional): It is expected to be `Roots` object. + Value of `parent` won't affect code logic much. + """ + + def __init__( + self, root_raw_data, name=None, parent_keys=None, parent=None + ): + lowered_platform_keys = {} + for key, value in root_raw_data.items(): + lowered_platform_keys[key.lower()] = value + self.raw_data = lowered_platform_keys + self.cleaned_data = self._clean_roots(lowered_platform_keys) + self.name = name + self.parent_keys = parent_keys or [] + self.parent = parent + + self.available_platforms = list(lowered_platform_keys.keys()) + self.value = lowered_platform_keys.get(platform.system().lower()) + self.clean_value = self.clean_root(self.value) + + def __format__(self, *args, **kwargs): + return self.value.__format__(*args, **kwargs) + + def __str__(self): + return str(self.value) + + def __repr__(self): + return self.__str__() + + def __getitem__(self, key): + if isinstance(key, numbers.Number): + return self.value[key] + + additional_info = "" + if self.parent and self.parent.project_name: + additional_info += " for project \"{}\"".format( + self.parent.project_name + ) + + raise AssertionError( + "Root key \"{}\" is missing{}.".format( + key, additional_info + ) + ) + + def full_key(self): + """Full key value for dictionary formatting in template. + + Returns: + str: Return full replacement key for formatting. This helps when + multiple roots are set. In that case e.g. `"root[work]"` is + returned. + """ + if not self.name: + return "root" + + joined_parent_keys = "".join( + ["[{}]".format(key) for key in self.parent_keys] + ) + return "root{}".format(joined_parent_keys) + + def clean_path(self, path): + """Just replace backslashes with forward slashes.""" + return str(path).replace("\\", "/") + + def clean_root(self, root): + """Makes sure root value does not end with slash.""" + if root: + root = self.clean_path(root) + while root.endswith("/"): + root = root[:-1] + return root + + def _clean_roots(self, raw_data): + """Clean all values of raw root item values.""" + cleaned = {} + for key, value in raw_data.items(): + cleaned[key] = self.clean_root(value) + return cleaned + + def path_remapper(self, path, dst_platform=None, src_platform=None): + """Remap path for specific platform. + + Args: + path (str): Source path which need to be remapped. + dst_platform (str, optional): Specify destination platform + for which remapping should happen. + src_platform (str, optional): Specify source platform. This is + recommended to not use and keep unset until you really want + to use specific platform. + roots (dict/RootItem/None, optional): It is possible to remap + path with different roots then instance where method was + called has. + + Returns: + str/None: When path does not contain known root then + None is returned else returns remapped path with "{root}" + or "{root[]}". + """ + cleaned_path = self.clean_path(path) + if dst_platform: + dst_root_clean = self.cleaned_data.get(dst_platform) + if not dst_root_clean: + key_part = "" + full_key = self.full_key() + if full_key != "root": + key_part += "\"{}\" ".format(full_key) + + log.warning( + "Root {}miss platform \"{}\" definition.".format( + key_part, dst_platform + ) + ) + return None + + if cleaned_path.startswith(dst_root_clean): + return cleaned_path + + if src_platform: + src_root_clean = self.cleaned_data.get(src_platform) + if src_root_clean is None: + log.warning( + "Root \"{}\" miss platform \"{}\" definition.".format( + self.full_key(), src_platform + ) + ) + return None + + if not cleaned_path.startswith(src_root_clean): + return None + + subpath = cleaned_path[len(src_root_clean):] + if dst_platform: + # `dst_root_clean` is used from upper condition + return dst_root_clean + subpath + return self.clean_value + subpath + + result, template = self.find_root_template_from_path(path) + if not result: + return None + + def parent_dict(keys, value): + if not keys: + return value + + key = keys.pop(0) + return {key: parent_dict(keys, value)} + + if dst_platform: + format_value = parent_dict(list(self.parent_keys), dst_root_clean) + else: + format_value = parent_dict(list(self.parent_keys), self.value) + + return template.format(**{"root": format_value}) + + def find_root_template_from_path(self, path): + """Replaces known root value with formattable key in path. + + All platform values are checked for this replacement. + + Args: + path (str): Path where root value should be found. + + Returns: + tuple: Tuple contain 2 values: `success` (bool) and `path` (str). + When success it True then path should contain replaced root + value with formattable key. + + Example: + When input path is:: + "C:/windows/path/root/projects/my_project/file.ext" + + And raw data of item looks like:: + { + "windows": "C:/windows/path/root", + "linux": "/mount/root" + } + + Output will be:: + (True, "{root}/projects/my_project/file.ext") + + If any of raw data value wouldn't match path's root output is:: + (False, "C:/windows/path/root/projects/my_project/file.ext") + """ + result = False + output = str(path) + + root_paths = list(self.cleaned_data.values()) + mod_path = self.clean_path(path) + for root_path in root_paths: + # Skip empty paths + if not root_path: + continue + + if mod_path.startswith(root_path): + result = True + replacement = "{" + self.full_key() + "}" + output = replacement + mod_path[len(root_path):] + break + + return (result, output) + + +class Roots: + """Object which should be used for formatting "root" key in templates. + + Args: + anatomy Anatomy: Anatomy object created for a specific project. + """ + + env_prefix = "OPENPYPE_PROJECT_ROOT" + roots_filename = "roots.json" + + def __init__(self, anatomy): + self.anatomy = anatomy + self.loaded_project = None + self._roots = None + + def __format__(self, *args, **kwargs): + return self.roots.__format__(*args, **kwargs) + + def __getitem__(self, key): + return self.roots[key] + + def reset(self): + """Reset current roots value.""" + self._roots = None + + def path_remapper( + self, path, dst_platform=None, src_platform=None, roots=None + ): + """Remap path for specific platform. + + Args: + path (str): Source path which need to be remapped. + dst_platform (str, optional): Specify destination platform + for which remapping should happen. + src_platform (str, optional): Specify source platform. This is + recommended to not use and keep unset until you really want + to use specific platform. + roots (dict/RootItem/None, optional): It is possible to remap + path with different roots then instance where method was + called has. + + Returns: + str/None: When path does not contain known root then + None is returned else returns remapped path with "{root}" + or "{root[]}". + """ + if roots is None: + roots = self.roots + + if roots is None: + raise ValueError("Roots are not set. Can't find path.") + + if "{root" in path: + path = path.format(**{"root": roots}) + # If `dst_platform` is not specified then return else continue. + if not dst_platform: + return path + + if isinstance(roots, RootItem): + return roots.path_remapper(path, dst_platform, src_platform) + + for _root in roots.values(): + result = self.path_remapper( + path, dst_platform, src_platform, _root + ) + if result is not None: + return result + + def find_root_template_from_path(self, path, roots=None): + """Find root value in entered path and replace it with formatting key. + + Args: + path (str): Source path where root will be searched. + roots (Roots/dict, optional): It is possible to use different + roots than instance where method was triggered has. + + Returns: + tuple: Output contains tuple with bool representing success as + first value and path with or without replaced root with + formatting key as second value. + + Raises: + ValueError: When roots are not entered and can't be loaded. + """ + if roots is None: + log.debug( + "Looking for matching root in path \"{}\".".format(path) + ) + roots = self.roots + + if roots is None: + raise ValueError("Roots are not set. Can't find path.") + + if isinstance(roots, RootItem): + return roots.find_root_template_from_path(path) + + for root_name, _root in roots.items(): + success, result = self.find_root_template_from_path(path, _root) + if success: + log.info("Found match in root \"{}\".".format(root_name)) + return success, result + + log.warning("No matching root was found in current setting.") + return (False, path) + + def set_root_environments(self): + """Set root environments for current project.""" + for key, value in self.root_environments().items(): + os.environ[key] = value + + def root_environments(self): + """Use root keys to create unique keys for environment variables. + + Concatenates prefix "OPENPYPE_ROOT" with root keys to create unique + keys. + + Returns: + dict: Result is `{(str): (str)}` dicitonary where key represents + unique key concatenated by keys and value is root value of + current platform root. + + Example: + With raw root values:: + "work": { + "windows": "P:/projects/work", + "linux": "/mnt/share/projects/work", + "darwin": "/darwin/path/work" + }, + "publish": { + "windows": "P:/projects/publish", + "linux": "/mnt/share/projects/publish", + "darwin": "/darwin/path/publish" + } + + Result on windows platform:: + { + "OPENPYPE_ROOT_WORK": "P:/projects/work", + "OPENPYPE_ROOT_PUBLISH": "P:/projects/publish" + } + + Short example when multiroot is not used:: + { + "OPENPYPE_ROOT": "P:/projects" + } + """ + return self._root_environments() + + def all_root_paths(self, roots=None): + """Return all paths for all roots of all platforms.""" + if roots is None: + roots = self.roots + + output = [] + if isinstance(roots, RootItem): + for value in roots.raw_data.values(): + output.append(value) + return output + + for _roots in roots.values(): + output.extend(self.all_root_paths(_roots)) + return output + + def _root_environments(self, keys=None, roots=None): + if not keys: + keys = [] + if roots is None: + roots = self.roots + + if isinstance(roots, RootItem): + key_items = [self.env_prefix] + for _key in keys: + key_items.append(_key.upper()) + + key = "_".join(key_items) + # Make sure key and value does not contain unicode + # - can happen in Python 2 hosts + return {str(key): str(roots.value)} + + output = {} + for _key, _value in roots.items(): + _keys = list(keys) + _keys.append(_key) + output.update(self._root_environments(_keys, _value)) + return output + + def root_environmets_fill_data(self, template=None): + """Environment variable values in dictionary for rootless path. + + Args: + template (str): Template for environment variable key fill. + By default is set to `"${}"`. + """ + if template is None: + template = "${}" + return self._root_environmets_fill_data(template) + + def _root_environmets_fill_data(self, template, keys=None, roots=None): + if keys is None and roots is None: + return { + "root": self._root_environmets_fill_data( + template, [], self.roots + ) + } + + if isinstance(roots, RootItem): + key_items = [Roots.env_prefix] + for _key in keys: + key_items.append(_key.upper()) + key = "_".join(key_items) + return template.format(key) + + output = {} + for key, value in roots.items(): + _keys = list(keys) + _keys.append(key) + output[key] = self._root_environmets_fill_data( + template, _keys, value + ) + return output + + @property + def project_name(self): + """Return project name which will be used for loading root values.""" + return self.anatomy.project_name + + @property + def roots(self): + """Property for filling "root" key in templates. + + This property returns roots for current project or default root values. + Warning: + Default roots value may cause issues when project use different + roots settings. That may happen when project use multiroot + templates but default roots miss their keys. + """ + if self.project_name != self.loaded_project: + self._roots = None + + if self._roots is None: + self._roots = self._discover() + self.loaded_project = self.project_name + return self._roots + + def _discover(self): + """ Loads current project's roots or default. + + Default roots are loaded if project override's does not contain roots. + + Returns: + `RootItem` or `dict` with multiple `RootItem`s when multiroot + setting is used. + """ + + return self._parse_dict(self.anatomy["roots"], parent=self) + + @staticmethod + def _parse_dict(data, key=None, parent_keys=None, parent=None): + """Parse roots raw data into RootItem or dictionary with RootItems. + + Converting raw roots data to `RootItem` helps to handle platform keys. + This method is recursive to be able handle multiroot setup and + is static to be able to load default roots without creating new object. + + Args: + data (dict): Should contain raw roots data to be parsed. + key (str, optional): Current root key. Set by recursion. + parent_keys (list): Parent dictionary keys. Set by recursion. + parent (Roots, optional): Parent object set in `RootItem` + helps to keep RootItem instance updated with `Roots` object. + + Returns: + `RootItem` or `dict` with multiple `RootItem`s when multiroot + setting is used. + """ + if not parent_keys: + parent_keys = [] + is_last = False + for value in data.values(): + if isinstance(value, six.string_types): + is_last = True + break + + if is_last: + return RootItem(data, key, parent_keys, parent=parent) + + output = {} + for _key, value in data.items(): + _parent_keys = list(parent_keys) + _parent_keys.append(_key) + output[_key] = Roots._parse_dict(value, _key, _parent_keys, parent) + return output diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index adc629352e..661975993b 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -14,7 +14,6 @@ from openpype.client import ( ) from openpype.client.operations import ( OperationsSession, - _create_or_convert_to_mongo_id, new_hero_version_doc, prepare_hero_version_update_data, prepare_representation_update_data, @@ -193,9 +192,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): op_session = OperationsSession() + entity_id = None + if old_version: + entity_id = old_version["_id"] new_hero_version = new_hero_version_doc( src_version_entity["_id"], - src_version_entity["parent"] + src_version_entity["parent"], + entity_id=entity_id ) if old_version: @@ -408,7 +411,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # Create representation else: - repre["_id"] = _create_or_convert_to_mongo_id(None) + repre.pop("_id", None) op_session.create_entity(project_name, "representation", repre) diff --git a/tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json b/tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json new file mode 100644 index 0000000000..fb798524bc --- /dev/null +++ b/tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json @@ -0,0 +1,92 @@ +[ + { + "_id": { + "$oid": "623c9d53db3f5046eb1ad5f4" + }, + "schema": "openpype:version-3.0", + "type": "version", + "parent": { + "$oid": "5f3e439a30a9464d6c181cbc" + }, + "name": 94, + "data": { + "families": [ + "workfile" + ], + "time": "20220324T173254Z", + "author": "petrk", + "source": "C:/projects_local/petr_test/assets/locations/Jungle/work/art/petr_test_Jungle_art_v009.psd", + "comment": "", + "machine": "LAPTOP-UB778LHG", + "fps": 25.0, + "intent": "-", + "inputLinks": [ + { + "type": "reference", + "id": { + "$oid": "618eb14f0a55a9c1591e913c" + }, + "linkedBy": "publish" + } + ] + }, + "outputs_recursive": [ + { + "_id": { + "$oid": "618eb14f0a55a9c1591e913c" + }, + "schema": "openpype:version-3.0", + "type": "version", + "parent": { + "$oid": "618e42a72ff49bd543bc1768" + }, + "name": 8, + "data": { + "families": [ + "image" + ], + "time": "20211112T192359Z", + "author": "petrk", + "source": "C:/projects_local/petr_test/assets/locations/Town/work/art/petr_test_Town_art_v005.psd", + "comment": "", + "machine": "LAPTOP-UB778LHG", + "fps": 25.0, + "intent": "-", + "inputLinks": [ + { + "type": "reference", + "id": { + "$oid": "5f3cd2d530a94638544837c3" + }, + "linkedBy": "publish" + } + ] + }, + "depth": 0 + }, + { + "_id": { + "$oid": "5f3cd2d530a94638544837c3" + }, + "schema": "pype:version-3.0", + "type": "version", + "parent": { + "$oid": "5f3a714030a9464bfc7d2382" + }, + "name": 7, + "data": { + "families": [ + "image" + ], + "time": "20200819T092032Z", + "author": "petrk", + "source": "/c/projects/petr_test/assets/characters/Hero/work/art/Hero_v019.psd", + "comment": "", + "machine": "LAPTOP-UB778LHG", + "fps": null + }, + "depth": 1 + } + ] + } +] \ No newline at end of file diff --git a/tests/unit/test_unzip.py b/tests/unit/test_unzip.py new file mode 100644 index 0000000000..586fc49b6f --- /dev/null +++ b/tests/unit/test_unzip.py @@ -0,0 +1,11 @@ + +from openpype.hosts.harmony.api.lib import _ZipFile +from pathlib import Path + +def test_zip(): + source = "c:/Users/petrk/Downloads/fbb_fbb100_sh0020_workfileAnimation_v010.zip" + dest = "c:/projects/temp/unzipped_with_python_111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\\2222222222222222222222222222222222222222222222222222222222222222222222222222222222" + + dest = Path(dest) + with _ZipFile(source, "r") as zip_ref: + zip_ref.extractall(dest.as_posix()) \ No newline at end of file diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs new file mode 160000 index 0000000000..0bb079c08b --- /dev/null +++ b/vendor/configs/OpenColorIO-Configs @@ -0,0 +1 @@ +Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 diff --git a/vendor/instance.json b/vendor/instance.json new file mode 100644 index 0000000000..b1d623e85d --- /dev/null +++ b/vendor/instance.json @@ -0,0 +1,1133 @@ +{ + 'family': 'render', + 'name': 'renderLightingDefault', + 'label': 'renderLightingDefault - local', + 'version': 1, + 'time': '', + 'source': 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting/petr_test_shot01_lighting_v001.aep', + 'subset': 'renderLightingDefault', + 'asset': 'shot01', + 'attachTo': False, + 'setMembers': '', + 'publish': True, + 'resolutionWidth': 1920.0, + 'resolutionHeight': 1080.0, + 'pixelAspect': 1, + 'frameStart': 0, + 'frameEnd': 0, + 'frameStep': 1, + 'handleStart': 0, + 'handleEnd': 0, + 'ignoreFrameHandleCheck': False, + 'renderer': 'aerender', + 'review': True, + 'priority': 50, + 'families': [ + 'render', + 'review', + 'ftrack', + 'slack' + ], + 'multipartExr': False, + 'convertToScanline': False, + 'tileRendering': False, + 'tilesX': 0, + 'tilesY': 0, + 'toBeRenderedOn': 'deadline', + 'deadlineSubmissionJob': None, + 'anatomyData': { + 'project': { + 'name': 'petr_test', + 'code': 'petr_test' + }, + 'asset': 'shot01', + 'parent': 'seq01', + 'hierarchy': 'sequences/seq01', + 'task': { + 'name': 'lighting', + 'type': 'Lighting', + 'short': 'lgt' + }, + 'username': 'petrk', + 'app': 'aftereffects', + 'd': '6', + 'dd': '06', + 'ddd': 'Thu', + 'dddd': 'Thursday', + 'm': '1', + 'mm': '01', + 'mmm': 'Jan', + 'mmmm': 'January', + 'yy': '22', + 'yyyy': '2022', + 'H': '18', + 'HH': '18', + 'h': '6', + 'hh': '06', + 'ht': 'PM', + 'M': '14', + 'MM': '14', + 'S': '23', + 'SS': '23', + 'version': 1, + 'subset': 'renderLightingDefault', + 'family': 'render', + 'intent': '-' + }, + 'outputDir': 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting\\renders\\aftereffects\\petr_test_shot01_lighting_v001', + 'comp_name': '℗ renderLightingDefault', + 'comp_id': 1, + 'fps': 25, + 'projectEntity': { + '_id': ObjectId( + '5f2a6d2311e06a9818a1958b' + ), + 'name': 'petr_test', + 'created_d': datetime.datetime(2020, + 9, + 17, + 15, + 27, + 27, + 927000), + 'data': { + 'ftrackId': 'e5eda2bc-d682-11ea-afc1-92591a5b5e3e', + 'entityType': 'Project', + 'applications': [ + 'maya_2019', + 'photoshop_2021', + 'photoshop_2022', + 'harmony_17', + 'aftereffects_2022', + 'harmony_20', + 'nukestudio_12.2', + 'nukex_12.2', + 'hiero_12.2', + 'blender_2.93' + ], + 'library_project': True, + 'clipIn': 1, + 'resolutionWidth': 1920.0, + 'handleEnd': 0, + 'frameEnd': 1001, + 'resolutionHeight': 1080.0, + 'frameStart': 1001.0, + 'pixelAspect': 1.0, + 'fps': 25.0, + 'handleStart': 0, + 'clipOut': 1, + 'tools_env': [], + 'code': 'petr_test', + 'active': True + }, + 'type': 'project', + 'config': { + 'apps': [ + { + 'name': 'aftereffects/2022' + }, + { + 'name': 'maya/2019' + }, + { + 'name': 'hiero/12-2' + }, + { + 'name': 'photoshop/2021' + }, + { + 'name': 'nuke/12-2' + }, + { + 'name': 'photoshop/2022' + } + ], + 'tasks': { + 'Layout': { + 'short_name': 'lay' + }, + 'Setdress': { + 'short_name': 'dress' + }, + 'Previz': { + 'short_name': '' + }, + 'Generic': { + 'short_name': 'gener' + }, + 'Animation': { + 'short_name': 'anim' + }, + 'Modeling': { + 'short_name': 'mdl' + }, + 'Lookdev': { + 'short_name': 'look' + }, + 'FX': { + 'short_name': 'fx' + }, + 'Lighting': { + 'short_name': 'lgt' + }, + 'Compositing': { + 'short_name': 'comp' + }, + 'Tracking': { + 'short_name': '' + }, + 'Rigging': { + 'short_name': 'rig' + }, + 'Paint': { + 'short_name': 'paint' + }, + 'schedulle': { + 'short_name': '' + }, + 'Art': { + 'short_name': 'art' + }, + 'Texture': { + 'short_name': 'tex' + }, + 'Edit': { + 'short_name': 'edit' + } + }, + 'imageio': { + 'hiero': { + 'workfile': { + 'ocioConfigName': 'nuke-default', + 'ocioconfigpath': { + 'windows': [], + 'darwin': [], + 'linux': [] + }, + 'workingSpace': 'linear', + 'sixteenBitLut': 'sRGB', + 'eightBitLut': 'sRGB', + 'floatLut': 'linear', + 'logLut': 'Cineon', + 'viewerLut': 'sRGB', + 'thumbnailLut': 'sRGB' + }, + 'regexInputs': { + 'inputs': [ + { + 'regex': '[^-a-zA-Z0-9](plateRef).*(?=mp4)', + 'colorspace': 'sRGB' + } + ] + } + }, + 'nuke': { + 'viewer': { + 'viewerProcess': 'sRGB' + }, + 'baking': { + 'viewerProcess': 'rec709' + }, + 'workfile': { + 'colorManagement': 'Nuke', + 'OCIO_config': 'nuke-default', + 'customOCIOConfigPath': { + 'windows': [], + 'darwin': [], + 'linux': [] + }, + 'workingSpaceLUT': 'linear', + 'monitorLut': 'sRGB', + 'int8Lut': 'sRGB', + 'int16Lut': 'sRGB', + 'logLut': 'Cineon', + 'floatLut': 'linear' + }, + 'nodes': { + 'requiredNodes': [ + { + 'plugins': [ + 'CreateWriteRender' + ], + 'nukeNodeClass': 'Write', + 'knobs': [ + { + 'name': 'file_type', + 'value': 'exr' + }, + { + 'name': 'datatype', + 'value': '16 bit half' + }, + { + 'name': 'compression', + 'value': 'Zip (1 scanline)' + }, + { + 'name': 'autocrop', + 'value': 'True' + }, + { + 'name': 'tile_color', + 'value': '0xff0000ff' + }, + { + 'name': 'channels', + 'value': 'rgb' + }, + { + 'name': 'colorspace', + 'value': 'linear' + }, + { + 'name': 'create_directories', + 'value': 'True' + } + ] + }, + { + 'plugins': [ + 'CreateWritePrerender' + ], + 'nukeNodeClass': 'Write', + 'knobs': [ + { + 'name': 'file_type', + 'value': 'exr' + }, + { + 'name': 'datatype', + 'value': '16 bit half' + }, + { + 'name': 'compression', + 'value': 'Zip (1 scanline)' + }, + { + 'name': 'autocrop', + 'value': 'False' + }, + { + 'name': 'tile_color', + 'value': '0xadab1dff' + }, + { + 'name': 'channels', + 'value': 'rgb' + }, + { + 'name': 'colorspace', + 'value': 'linear' + }, + { + 'name': 'create_directories', + 'value': 'True' + } + ] + } + ], + 'customNodes': [] + }, + 'regexInputs': { + 'inputs': [ + { + 'regex': '[^-a-zA-Z0-9]beauty[^-a-zA-Z0-9]', + 'colorspace': 'linear' + } + ] + } + }, + 'maya': { + 'colorManagementPreference': { + 'configFilePath': { + 'windows': [], + 'darwin': [], + 'linux': [] + }, + 'renderSpace': 'scene-linear Rec 709/sRGB', + 'viewTransform': 'sRGB gamma' + } + } + }, + 'roots': { + 'work': { + 'windows': 'C:/projects', + 'darwin': '/Volumes/path', + 'linux': '/mnt/share/projects' + } + }, + 'templates': { + 'defaults': { + 'version_padding': 3, + 'version': 'v{version:0>{@version_padding}}', + 'frame_padding': 4, + 'frame': '{frame:0>{@frame_padding}}' + }, + 'work': { + 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task}', + 'file': '{project[code]}_{asset}_{task}_{@version}<_{comment}>.{ext}', + 'path': '{@folder}/{@file}' + }, + 'render': { + 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}', + 'file': '{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{ext}', + 'path': '{@folder}/{@file}' + }, + 'publish': { + 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}', + 'file': '{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}><_{udim}>.{ext}', + 'path': '{@folder}/{@file}', + 'thumbnail': '{thumbnail_root}/{project[name]}/{_id}_{thumbnail_type}.{ext}' + }, + 'hero': { + 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/hero', + 'file': '{project[code]}_{asset}_{subset}_hero<_{output}><.{frame}>.{ext}', + 'path': '{@folder}/{@file}' + }, + 'delivery': {}, + 'others': {} + } + }, + 'parent': None, + 'schema': 'avalon-core:project-2.0' + }, + 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', + 'frameStartHandle': 0, + 'frameEndHandle': 0, + 'byFrameStep': 1, + 'author': 'petrk', + 'expectedFiles': [ + 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting\\renders\\aftereffects\\petr_test_shot01_lighting_v001\\shot01_renderLightingDefault_v001.mov' + ], + 'slack_channel_message_profiles': [ + { + 'channels': [ + 'test_integration' + ], + 'upload_thumbnail': True, + 'message': 'Test message' + } + ], + 'slack_token': 'xoxb-1494100953104-2176825439264-jGqvQzfq9uZJPmyX5Q4o4TnP', + 'representations': [ + { + 'frameStart': 0, + 'frameEnd': 0, + 'name': 'mov', + 'ext': 'mov', + 'files': ' renderLightingDefault.mov', + 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', + 'tags': [ + 'review' + ], + 'published_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', + 'publishedFiles': [ + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov' + ] + }, + { + 'name': 'thumbnail', + 'ext': 'jpg', + 'files': 'thumbnail.jpg', + 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', + 'tags': [ + 'thumbnail' + ], + 'published_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', + 'publishedFiles': [ + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg' + ] + }, + { + 'frameStart': 0, + 'frameEnd': 0, + 'name': 'h264_mp4', + 'ext': 'mp4', + 'files': ' renderLightingDefault_h264burnin.mp4', + 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', + 'tags': [ + 'review', + 'burnin', + 'ftrackreview' + ], + 'resolutionWidth': 1920, + 'resolutionHeight': 1080, + 'outputName': 'h264', + 'outputDef': { + 'ext': 'mp4', + 'tags': [ + 'burnin', + 'ftrackreview' + ], + 'burnins': [], + 'ffmpeg_args': { + 'video_filters': [], + 'audio_filters': [], + 'input': [ + '-apply_trc gamma22' + ], + 'output': [ + '-pix_fmt yuv420p', + '-crf 18', + '-intra' + ] + }, + 'filter': { + 'families': [ + 'render', + 'review', + 'ftrack' + ] + }, + 'overscan_crop': '', + 'overscan_color': [ + 0, + 0, + 0, + 255 + ], + 'width': 0, + 'height': 0, + 'bg_color': [ + 0, + 0, + 0, + 0 + ], + 'letter_box': { + 'enabled': False, + 'ratio': 0.0, + 'state': 'letterbox', + 'fill_color': [ + 0, + 0, + 0, + 255 + ], + 'line_thickness': 0, + 'line_color': [ + 255, + 0, + 0, + 255 + ] + }, + 'filename_suffix': 'h264' + }, + 'frameStartFtrack': 0, + 'frameEndFtrack': 0, + 'ffmpeg_cmd': 'C:\\Users\\petrk\\PycharmProjects\\Pype3.0\\pype\\vendor\\bin\\ffmpeg\\windows\\bin\\ffmpeg -apply_trc gamma22 -i "C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr\\ renderLightingDefault.mov" -pix_fmt yuv420p -crf 18 -intra -y "C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr\\ renderLightingDefault_h264.mp4"', + 'published_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', + 'publishedFiles': [ + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' + ] + } + ], + 'assetEntity': { + '_id': ObjectId( + '5fabee9730a94666449245b7' + ), + 'name': 'shot01', + 'data': { + 'ftrackId': '0c5f548c-2425-11eb-b203-628b111fac3c', + 'entityType': 'Shot', + 'clipIn': 1, + 'resolutionWidth': 1920.0, + 'handleEnd': 0.0, + 'frameEnd': 1001, + 'resolutionHeight': 1080.0, + 'frameStart': 1001.0, + 'pixelAspect': 1.0, + 'fps': 25.0, + 'handleStart': 0.0, + 'clipOut': 1, + 'tools_env': [], + 'avalon_mongo_id': '5fabee9730a94666449245b7', + 'parents': [ + 'sequences', + 'seq01' + ], + 'hierarchy': 'sequences\\seq01', + 'tasks': { + 'lighting': { + 'type': 'Lighting' + }, + 'animation': { + 'type': 'Animation' + }, + 'compositing': { + 'type': 'Compositing' + } + }, + 'visualParent': ObjectId( + '5fabee9730a94666449245b6' + ) + }, + 'type': 'asset', + 'parent': ObjectId( + '5f2a6d2311e06a9818a1958b' + ), + 'schema': 'pype:asset-3.0' + }, + 'subsetEntity': { + '_id': ObjectId( + '61d723a271e6fce378bd428c' + ), + 'schema': 'openpype:subset-3.0', + 'type': 'subset', + 'name': 'renderLightingDefault', + 'data': { + 'families': [ + 'render', + 'review', + 'ftrack', + 'slack' + ] + }, + 'parent': ObjectId( + '5fabee9730a94666449245b7' + ) + }, + 'versionEntity': { + '_id': ObjectId( + '61d723a371e6fce378bd428d' + ), + 'schema': 'openpype:version-3.0', + 'type': 'version', + 'parent': ObjectId( + '61d723a271e6fce378bd428c' + ), + 'name': 1, + 'data': { + 'families': [ + 'render', + 'render', + 'review', + 'ftrack', + 'slack' + ], + 'time': '20220106T181423Z', + 'author': 'petrk', + 'source': 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting/petr_test_shot01_lighting_v001.aep', + 'comment': '', + 'machine': 'LAPTOP-UB778LHG', + 'fps': 25.0, + 'intent': '-', + 'frameStart': 0, + 'frameEnd': 0, + 'handleEnd': 0, + 'handleStart': 0, + 'inputLinks': [ + OrderedDict( + [ + ( + 'type', + 'generative' + ), + ( + 'id', + ObjectId( + '600ab849c411725a626b8c35' + )), + ( + 'linkedBy', + 'publish' + ) + ] + ) + ] + } + }, + 'transfers': [ + [ + 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr\\ renderLightingDefault_h264burnin.mp4', + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' + ] + ], + 'destination_list': [ + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' + ], + 'published_representations': { + ObjectId( + '61d723a371e6fce378bd428e' + ): { + 'representation': { + '_id': ObjectId( + '61d723a371e6fce378bd428e' + ), + 'schema': 'openpype:representation-2.0', + 'type': 'representation', + 'parent': ObjectId( + '61d723a371e6fce378bd428d' + ), + 'name': 'mov', + 'data': { + 'path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', + 'template': '{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{ext}' + }, + 'dependencies': [], + 'context': { + 'root': { + 'work': 'C:/projects' + }, + 'project': { + 'name': 'petr_test', + 'code': 'petr_test' + }, + 'hierarchy': 'sequences/seq01', + 'asset': 'shot01', + 'family': 'render', + 'subset': 'renderLightingDefault', + 'version': 1, + 'ext': 'mov', + 'task': { + 'name': 'lighting', + 'type': 'Lighting', + 'short': 'lgt' + }, + 'representation': 'mov', + 'username': 'petrk' + }, + 'files': [ + { + '_id': ObjectId( + '61d723a371e6fce378bd4291' + ), + 'path': '{root[work]}/petr_test/sequences/seq01/shot01/publish/render/renderLightingDefault/v001/petr_test_shot01_renderLightingDefault_v001.mov', + 'size': 1654788, + 'hash': 'petr_test_shot01_renderLightingDefault_v001,mov|1641489300,6230524|1654788', + 'sites': [ + { + 'name': 'studio', + 'created_dt': datetime.datetime(2022, + 1, + 6, + 18, + 15, + 15, + 264448) + } + ] + } + ] + }, + 'anatomy_data': { + 'project': { + 'name': 'petr_test', + 'code': 'petr_test' + }, + 'asset': 'shot01', + 'parent': 'seq01', + 'hierarchy': 'sequences/seq01', + 'task': { + 'name': 'lighting', + 'type': 'Lighting', + 'short': 'lgt' + }, + 'username': 'petrk', + 'app': 'aftereffects', + 'd': '6', + 'dd': '06', + 'ddd': 'Thu', + 'dddd': 'Thursday', + 'm': '1', + 'mm': '01', + 'mmm': 'Jan', + 'mmmm': 'January', + 'yy': '22', + 'yyyy': '2022', + 'H': '18', + 'HH': '18', + 'h': '6', + 'hh': '06', + 'ht': 'PM', + 'M': '14', + 'MM': '14', + 'S': '23', + 'SS': '23', + 'version': 1, + 'subset': 'renderLightingDefault', + 'family': 'render', + 'intent': '-', + 'representation': 'mov', + 'ext': 'mov' + }, + 'published_files': [ + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov' + ] + }, + ObjectId( + '61d723a371e6fce378bd4292' + ): { + 'representation': { + '_id': ObjectId( + '61d723a371e6fce378bd4292' + ), + 'schema': 'openpype:representation-2.0', + 'type': 'representation', + 'parent': ObjectId( + '61d723a371e6fce378bd428d' + ), + 'name': 'thumbnail', + 'data': { + 'path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', + 'template': '{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{ext}' + }, + 'dependencies': [], + 'context': { + 'root': { + 'work': 'C:/projects' + }, + 'project': { + 'name': 'petr_test', + 'code': 'petr_test' + }, + 'hierarchy': 'sequences/seq01', + 'asset': 'shot01', + 'family': 'render', + 'subset': 'renderLightingDefault', + 'version': 1, + 'ext': 'jpg', + 'task': { + 'name': 'lighting', + 'type': 'Lighting', + 'short': 'lgt' + }, + 'representation': 'jpg', + 'username': 'petrk' + }, + 'files': [ + { + '_id': ObjectId( + '61d723a371e6fce378bd4295' + ), + 'path': '{root[work]}/petr_test/sequences/seq01/shot01/publish/render/renderLightingDefault/v001/petr_test_shot01_renderLightingDefault_v001.jpg', + 'size': 871, + 'hash': 'petr_test_shot01_renderLightingDefault_v001,jpg|1641489301,1720147|871', + 'sites': [ + { + 'name': 'studio', + 'created_dt': datetime.datetime(2022, + 1, + 6, + 18, + 15, + 15, + 825446) + } + ] + } + ] + }, + 'anatomy_data': { + 'project': { + 'name': 'petr_test', + 'code': 'petr_test' + }, + 'asset': 'shot01', + 'parent': 'seq01', + 'hierarchy': 'sequences/seq01', + 'task': { + 'name': 'lighting', + 'type': 'Lighting', + 'short': 'lgt' + }, + 'username': 'petrk', + 'app': 'aftereffects', + 'd': '6', + 'dd': '06', + 'ddd': 'Thu', + 'dddd': 'Thursday', + 'm': '1', + 'mm': '01', + 'mmm': 'Jan', + 'mmmm': 'January', + 'yy': '22', + 'yyyy': '2022', + 'H': '18', + 'HH': '18', + 'h': '6', + 'hh': '06', + 'ht': 'PM', + 'M': '14', + 'MM': '14', + 'S': '23', + 'SS': '23', + 'version': 1, + 'subset': 'renderLightingDefault', + 'family': 'render', + 'intent': '-', + 'representation': 'jpg', + 'ext': 'jpg' + }, + 'published_files': [ + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg' + ] + }, + ObjectId( + '61d723a471e6fce378bd4296' + ): { + 'representation': { + '_id': ObjectId( + '61d723a471e6fce378bd4296' + ), + 'schema': 'openpype:representation-2.0', + 'type': 'representation', + 'parent': ObjectId( + '61d723a371e6fce378bd428d' + ), + 'name': 'h264_mp4', + 'data': { + 'path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', + 'template': '{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{ext}' + }, + 'dependencies': [], + 'context': { + 'root': { + 'work': 'C:/projects' + }, + 'project': { + 'name': 'petr_test', + 'code': 'petr_test' + }, + 'hierarchy': 'sequences/seq01', + 'asset': 'shot01', + 'family': 'render', + 'subset': 'renderLightingDefault', + 'version': 1, + 'output': 'h264', + 'ext': 'mp4', + 'task': { + 'name': 'lighting', + 'type': 'Lighting', + 'short': 'lgt' + }, + 'representation': 'mp4', + 'username': 'petrk' + }, + 'files': [ + { + '_id': ObjectId( + '61d723a471e6fce378bd4299' + ), + 'path': '{root[work]}/petr_test/sequences/seq01/shot01/publish/render/renderLightingDefault/v001/petr_test_shot01_renderLightingDefault_v001_h264.mp4', + 'size': 10227, + 'hash': 'petr_test_shot01_renderLightingDefault_v001_h264,mp4|1641489313,659368|10227', + 'sites': [ + { + 'name': 'studio', + 'created_dt': datetime.datetime(2022, + 1, + 6, + 18, + 15, + 16, + 53445) + } + ] + } + ] + }, + 'anatomy_data': { + 'project': { + 'name': 'petr_test', + 'code': 'petr_test' + }, + 'asset': 'shot01', + 'parent': 'seq01', + 'hierarchy': 'sequences/seq01', + 'task': { + 'name': 'lighting', + 'type': 'Lighting', + 'short': 'lgt' + }, + 'username': 'petrk', + 'app': 'aftereffects', + 'd': '6', + 'dd': '06', + 'ddd': 'Thu', + 'dddd': 'Thursday', + 'm': '1', + 'mm': '01', + 'mmm': 'Jan', + 'mmmm': 'January', + 'yy': '22', + 'yyyy': '2022', + 'H': '18', + 'HH': '18', + 'h': '6', + 'hh': '06', + 'ht': 'PM', + 'M': '14', + 'MM': '14', + 'S': '23', + 'SS': '23', + 'version': 1, + 'subset': 'renderLightingDefault', + 'family': 'render', + 'intent': '-', + 'resolution_width': 1920, + 'resolution_height': 1080, + 'fps': 25, + 'output': 'h264', + 'representation': 'mp4', + 'ext': 'mp4' + }, + 'published_files': [ + 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' + ] + } + }, + 'ftrackComponentsList': [ + { + 'assettype_data': { + 'short': 'render' + }, + 'asset_data': { + 'name': 'renderLightingDefault' + }, + 'assetversion_data': { + 'version': 1 + }, + 'component_overwrite': False, + 'thumbnail': True, + 'component_data': { + 'name': 'thumbnail' + }, + 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', + 'component_location': , + 'component': + }, + { + 'assettype_data': { + 'short': 'render' + }, + 'asset_data': { + 'name': 'renderLightingDefault' + }, + 'assetversion_data': { + 'version': 1 + }, + 'component_overwrite': False, + 'thumbnail': False, + 'component_data': { + 'name': 'ftrackreview-mp4', + 'metadata': { + 'ftr_meta': '{"frameIn": 0, "frameOut": 1, "frameRate": 25.0}' + } + }, + 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', + 'component_location': , + 'component': + }, + { + 'assettype_data': { + 'short': 'render' + }, + 'asset_data': { + 'name': 'renderLightingDefault' + }, + 'assetversion_data': { + 'version': 1 + }, + 'component_overwrite': False, + 'thumbnail': False, + 'component_data': { + 'name': 'thumbnail_src' + }, + 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', + 'component_location': , + 'component': + }, + { + 'assettype_data': { + 'short': 'render' + }, + 'asset_data': { + 'name': 'renderLightingDefault' + }, + 'assetversion_data': { + 'version': 1 + }, + 'component_overwrite': False, + 'thumbnail': False, + 'component_data': { + 'name': 'ftrackreview-mp4_src', + 'metadata': { + 'ftr_meta': '{"frameIn": 0, "frameOut": 1, "frameRate": 25.0}' + } + }, + 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', + 'component_location': , + 'component': + }, + { + 'assettype_data': { + 'short': 'render' + }, + 'asset_data': { + 'name': 'renderLightingDefault' + }, + 'assetversion_data': { + 'version': 1 + }, + 'component_overwrite': False, + 'thumbnail': False, + 'component_data': { + 'name': 'mov' + }, + 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', + 'component_location': , + 'component': + } + ], + 'ftrackIntegratedAssetVersions': [ + + ] +} \ No newline at end of file diff --git a/vendor/response.json b/vendor/response.json new file mode 100644 index 0000000000..26a4fae2fd --- /dev/null +++ b/vendor/response.json @@ -0,0 +1 @@ +{status: 200, headers: {'date': 'Tue, 11 Jan 2022 11:08:57 GMT', 'server': 'Apache', 'x-powered-by': 'HHVM/4.128.0', 'access-control-allow-origin': '*', 'referrer-policy': 'no-referrer', 'x-slack-backend': 'r', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'access-control-allow-headers': 'slack-route, x-slack-version-ts, x-b3-traceid, x-b3-spanid, x-b3-parentspanid, x-b3-sampled, x-b3-flags', 'access-control-expose-headers': 'x-slack-req-id, retry-after', 'x-oauth-scopes': 'chat:write,chat:write.public,files:write,chat:write.customize', 'x-accepted-oauth-scopes': 'chat:write', 'expires': 'Mon, 26 Jul 1997 05:00:00 GMT', 'cache-control': 'private, no-cache, no-store, must-revalidate', 'pragma': 'no-cache', 'x-xss-protection': '0', 'x-content-type-options': 'nosniff', 'x-slack-req-id': '9d1d11399a44c8751f89bb4dcd2b91fb', 'vary': 'Accept-Encoding', 'content-type': 'application/json; charset=utf-8', 'x-envoy-upstream-service-time': '52', 'x-backend': 'main_normal main_bedrock_normal_with_overflow main_canary_with_overflow main_bedrock_canary_with_overflow main_control_with_overflow main_bedrock_control_with_overflow', 'x-server': 'slack-www-hhvm-main-iad-qno3', 'x-slack-shared-secret-outcome': 'no-match', 'via': 'envoy-www-iad-omsy, envoy-edge-iad-bgfx', 'x-edge-backend': 'envoy-www', 'x-slack-edge-shared-secret-outcome': 'no-match', 'connection': 'close', 'transfer-encoding': 'chunked'}, body: {"ok":true,"channel":"C024DUFM8MB","ts":"1641899337.001100","message":{"type":"message","subtype":"bot_message","text":"RenderCompositingDefault published for Jungle\n\nHere should be link to review C:\\projects\\petr_test\\assets\\locations\\Jungle\\publish\\render\\renderCompositingDefault\\v253\\petr_test_Jungle_renderCompositingDefault_v253_h264.mp4\n\n Attachment links: \n\n","ts":"1641899337.001100","username":"OpenPypeNotifier","icons":{"image_48":"https:\/\/s3-us-west-2.amazonaws.com\/slack-files2\/bot_icons\/2022-01-07\/2934353684385_48.png"},"bot_id":"B024H0P0CAE"}} \ No newline at end of file diff --git a/vendor/temp.json b/vendor/temp.json new file mode 100644 index 0000000000..089174d26c --- /dev/null +++ b/vendor/temp.json @@ -0,0 +1,46 @@ +{ + project(name: "demo_Big_Episodic") { + representations( + first: 0, + after: 0, + localSite: "local", + remoteSite: "local" + ) { + edges { + node { + id + name + # Sorry: totalSize is not implemented, but it will be + # totalSize + fileCount + # overal sync state + localState{ + status + size + timestamp + } + remoteState{ + status + size + timestamp + } + # crawl to the top to get parent info + version { + version + subset { + family + name + folder { + name + } + } + } + } + } + pageInfo { + hasNextPage + endCursor + } + } + } +} \ No newline at end of file From b47e480d7c5384862afbbfae3e7e0b779036e1da Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 17:31:02 +0200 Subject: [PATCH 1490/2550] Revert "OP-4181 - clean up after review comments" This reverts commit a7150bd6f1c9494734f03265eecbe86ff284d882. --- igniter/GPUCache/data_0 | Bin 8192 -> 0 bytes igniter/GPUCache/data_1 | Bin 270336 -> 0 bytes igniter/GPUCache/data_2 | Bin 8192 -> 0 bytes igniter/GPUCache/data_3 | Bin 8192 -> 0 bytes igniter/GPUCache/index | Bin 262512 -> 0 bytes openpype/hooks/pre_python2_prelaunch.py | 35 - openpype/hosts/photoshop/tests/expr.py | 51 - openpype/lib/token | 1 - .../event_handlers_user/action_edl_create.py | 275 ---- openpype/pipeline/temp_anatomy.py | 1330 ----------------- .../plugins/publish/integrate_hero_version.py | 9 +- .../_process_referenced_pipeline_result.json | 92 -- tests/unit/test_unzip.py | 11 - vendor/configs/OpenColorIO-Configs | 1 - vendor/instance.json | 1133 -------------- vendor/response.json | 1 - vendor/temp.json | 46 - 17 files changed, 3 insertions(+), 2982 deletions(-) delete mode 100644 igniter/GPUCache/data_0 delete mode 100644 igniter/GPUCache/data_1 delete mode 100644 igniter/GPUCache/data_2 delete mode 100644 igniter/GPUCache/data_3 delete mode 100644 igniter/GPUCache/index delete mode 100644 openpype/hooks/pre_python2_prelaunch.py delete mode 100644 openpype/hosts/photoshop/tests/expr.py delete mode 100644 openpype/lib/token delete mode 100644 openpype/modules/ftrack/event_handlers_user/action_edl_create.py delete mode 100644 openpype/pipeline/temp_anatomy.py delete mode 100644 tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json delete mode 100644 tests/unit/test_unzip.py delete mode 160000 vendor/configs/OpenColorIO-Configs delete mode 100644 vendor/instance.json delete mode 100644 vendor/response.json delete mode 100644 vendor/temp.json diff --git a/igniter/GPUCache/data_0 b/igniter/GPUCache/data_0 deleted file mode 100644 index d76fb77e93ac8a536b5dbade616d63abd00626c5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8192 zcmeIuK?wjL5Jka{7-jo+5O1auw}mk8@B+*}b0s6M>Kg$91PBlyK!5-N0t5&UAV7cs W0RjXF5FkK+009C72oNCfo4^Gh&;oe? diff --git a/igniter/GPUCache/data_1 b/igniter/GPUCache/data_1 deleted file mode 100644 index 212f73166781160e472f8e76c3b9998b3775ecb7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 270336 zcmeI%u?@m75CFhW@CfV>3QP3t%>amw0a8XffrJVo)0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N c0t5&UAV7cs0RjXF5FkK+009C72>hqO2eI!7!2kdN diff --git a/igniter/GPUCache/data_2 b/igniter/GPUCache/data_2 deleted file mode 100644 index c7e2eb9adcfb2d3313ec85f5c28cedda950a3f9b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8192 zcmeIu!3h8`2n0b1_TQ7_m#U&=2(t%Qz}%M=ae7_Oi2wlt1PBlyK!5-N0t5&UAV7cs V0RjXF5FkK+009C72oTsN@Bv`}0$Tt8 diff --git a/igniter/GPUCache/data_3 b/igniter/GPUCache/data_3 deleted file mode 100644 index 5eec97358cf550862fd343fc9a73c159d4c0ab10..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8192 zcmeIuK@9*P5CpLeAOQbv2)|PW$RO!FMnHFsm9+HS=9>r*AV7cs0RjXF5FkK+009C7 W2oNAZfB*pk1PBlyK!5;&-vkZ-dID$w diff --git a/igniter/GPUCache/index b/igniter/GPUCache/index deleted file mode 100644 index b2998cfef1a6457e5cfe9dc37e029bdbe0a7f778..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 262512 zcmeIuu?>JQ00XcT9zZ-&b?3`o&{Gf_S0S;K0~nnt$>{4|&t%Cr+dIlg%Diho_EzWC z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjYm G5qJP2>jZZI diff --git a/openpype/hooks/pre_python2_prelaunch.py b/openpype/hooks/pre_python2_prelaunch.py deleted file mode 100644 index 84272d2e5d..0000000000 --- a/openpype/hooks/pre_python2_prelaunch.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -from openpype.lib import PreLaunchHook - - -class PrePython2Vendor(PreLaunchHook): - """Prepend python 2 dependencies for py2 hosts.""" - order = 10 - - def execute(self): - if not self.application.use_python_2: - return - - # Prepare vendor dir path - self.log.info("adding global python 2 vendor") - pype_root = os.getenv("OPENPYPE_REPOS_ROOT") - python_2_vendor = os.path.join( - pype_root, - "openpype", - "vendor", - "python", - "python_2" - ) - - # Add Python 2 modules - python_paths = [ - python_2_vendor - ] - - # Load PYTHONPATH from current launch context - python_path = self.launch_context.env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths) diff --git a/openpype/hosts/photoshop/tests/expr.py b/openpype/hosts/photoshop/tests/expr.py deleted file mode 100644 index ff796f417c..0000000000 --- a/openpype/hosts/photoshop/tests/expr.py +++ /dev/null @@ -1,51 +0,0 @@ -import json - -data = [ - { - "schema": "openpype:container-2.0", - "id": "pyblish.avalon.container", - "name": "imageArtNeew", - "namespace": "Jungle_imageArtNeew_001", - "loader": "ReferenceLoader", - "representation": "61c1eb91e1a4d1e5a23582f6", - "members": [ - "131" - ] - }, - { - "id": "pyblish.avalon.instance", - "family": "image", - "asset": "Jungle", - "subset": "imageMainBg", - "active": True, - "variant": "Main", - "uuid": "199", - "long_name": "BG" - }, - { - "id": "pyblish.avalon.instance", - "family": "image", - "asset": "Jungle", - "subset": "imageMain", - "active": True, - "variant": "Main", - "uuid": "192", - "long_name": "imageMain" - }, - { - "id": "pyblish.avalon.instance", - "family": "workfile", - "subset": "workfile", - "active": True, - "creator_identifier": "workfile", - "asset": "Jungle", - "task": "art", - "variant": "", - "instance_id": "3ed19342-cd8e-4bb6-8cda-d6e74d9a7efe", - "creator_attributes": {}, - "publish_attributes": {} - } -] - -with open("C:\\Users\\petrk\\PycharmProjects\\Pype3.0\\pype\\openpype\\hosts\\photoshop\\tests\\mock_get_layers_metadata.json", 'w') as fp: - fp.write(json.dumps(data, indent=4)) \ No newline at end of file diff --git a/openpype/lib/token b/openpype/lib/token deleted file mode 100644 index 193a2aac95..0000000000 --- a/openpype/lib/token +++ /dev/null @@ -1 +0,0 @@ -5d58370a7702b2efee5120704246baf4abb865323fc9db9a04827bfb478569d6 \ No newline at end of file diff --git a/openpype/modules/ftrack/event_handlers_user/action_edl_create.py b/openpype/modules/ftrack/event_handlers_user/action_edl_create.py deleted file mode 100644 index 7ac139ae63..0000000000 --- a/openpype/modules/ftrack/event_handlers_user/action_edl_create.py +++ /dev/null @@ -1,275 +0,0 @@ -import os -import subprocess -import tempfile -import shutil -import json -import sys - -import opentimelineio as otio -import ftrack_api -import requests - -from openpype_modules.ftrack.lib import BaseAction - - -def download_file(url, path): - with open(path, "wb") as f: - print("\nDownloading %s" % path) - response = requests.get(url, stream=True) - total_length = response.headers.get('content-length') - - if total_length is None: - f.write(response.content) - else: - dl = 0 - total_length = int(total_length) - for data in response.iter_content(chunk_size=4096): - dl += len(data) - f.write(data) - done = int(50 * dl / total_length) - sys.stdout.write("\r[%s%s]" % ('=' * done, ' ' * (50-done))) - sys.stdout.flush() - - -class ExportEditorialAction(BaseAction): - '''Export Editorial action''' - - label = "Export Editorial" - variant = None - identifier = "export-editorial" - description = None - component_name_order = ["exr", "mov", "ftrackreview-mp4_src"] - - def export_editorial(self, entity, output_path): - session = ftrack_api.Session() - unmanaged_location = session.query( - "Location where name is \"ftrack.unmanaged\"" - ).one() - temp_path = tempfile.mkdtemp() - - files = {} - for obj in entity["review_session_objects"]: - data = {} - parent_name = obj["asset_version"]["asset"]["parent"]["name"] - component_query = "Component where version_id is \"{}\"" - component_query += " and name is \"{}\"" - for name in self.component_name_order: - try: - component = session.query( - component_query.format( - obj["asset_version"]["id"], name - ) - ).one() - path = unmanaged_location.get_filesystem_path(component) - data["path"] = path.replace("\\", "/") - break - except ftrack_api.exception.NoResultFoundError: - pass - - # Download online review if not local path found. - if "path" not in data: - component = session.query( - component_query.format( - obj["asset_version"]["id"], "ftrackreview-mp4" - ) - ).one() - location = component["component_locations"][0] - component_url = location["location"].get_url(component) - asset_name = obj["asset_version"]["asset"]["name"] - version = obj["asset_version"]["version"] - filename = "{}_{}_v{:03d}.mp4".format( - parent_name, asset_name, version - ) - filepath = os.path.join( - output_path, "downloads", filename - ).replace("\\", "/") - - if not os.path.exists(os.path.dirname(filepath)): - os.makedirs(os.path.dirname(filepath)) - - download_file(component_url, filepath) - data["path"] = filepath - - # Get frame duration and framerate. - query = "Component where version_id is \"{}\"" - query += " and name is \"ftrackreview-mp4\"" - component = session.query( - query.format(obj["asset_version"]["id"]) - ).one() - metadata = json.loads(component["metadata"]["ftr_meta"]) - data["framerate"] = metadata["frameRate"] - data["frames"] = metadata["frameOut"] - metadata["frameIn"] - - # Find audio if it exists. - query = "Asset where parent.id is \"{}\"" - query += " and type.name is \"Audio\"" - asset = session.query( - query.format(obj["asset_version"]["asset"]["parent"]["id"]) - ) - if asset: - asset_version = asset[0]["versions"][-1] - query = "Component where version_id is \"{}\"" - query += " and name is \"{}\"" - comp = session.query( - query.format(asset_version["id"], "wav") - ).one() - src = unmanaged_location.get_filesystem_path(comp) - dst = os.path.join(temp_path, parent_name + ".wav") - shutil.copy(src, dst) - - # Collect data. - files[parent_name] = data - - clips = [] - for name, data in files.items(): - self.log.info("Processing {} with {}".format(name, data)) - f = data["path"] - range = otio.opentime.TimeRange( - start_time=otio.opentime.RationalTime(0, data["framerate"]), - duration=otio.opentime.RationalTime( - data["frames"], data["framerate"] - ) - ) - - media_reference = otio.schema.ExternalReference( - available_range=range, - target_url=f"file://{f}" - ) - - clip = otio.schema.Clip( - name=name, - media_reference=media_reference, - source_range=range - ) - clips.append(clip) - - # path = os.path.join(temp_path, name + ".wav").replace("\\", "/") - # if not os.path.exists(path): - # args = ["ffmpeg", "-y", "-i", f, path] - # self.log.info(subprocess.list2cmdline(args)) - # subprocess.call(args) - - timeline = otio.schema.timeline_from_clips(clips) - otio.adapters.write_to_file( - timeline, os.path.join(output_path, entity["name"] + ".xml") - ) - - data = "" - for f in os.listdir(temp_path): - f = f.replace("\\", "/") - data += f"file '{f}'\n" - - path = os.path.join(temp_path, "temp.txt") - with open(path, "w") as f: - f.write(data) - - args = [ - "ffmpeg", "-y", "-f", "concat", "-safe", "0", - "-i", os.path.basename(path), - os.path.join(output_path, entity["name"] + ".wav") - ] - self.log.info(subprocess.list2cmdline(args)) - subprocess.call(args, cwd=temp_path) - - shutil.rmtree(temp_path) - - def discover(self, session, entities, event): - '''Return true if we can handle the selected entities. - *session* is a `ftrack_api.Session` instance - *entities* is a list of tuples each containing the entity type and the - entity id. - If the entity is a hierarchical you will always get the entity - type TypedContext, once retrieved through a get operation you - will have the "real" entity type ie. example Shot, Sequence - or Asset Build. - *event* the unmodified original event - ''' - if len(entities) == 1: - if entities[0].entity_type == "ReviewSession": - return True - - return False - - def launch(self, session, entities, event): - '''Callback method for the custom action. - return either a bool ( True if successful or False if the action - failed ) or a dictionary with they keys `message` and `success`, the - message should be a string and will be displayed as feedback to the - user, success should be a bool, True if successful or False if the - action failed. - *session* is a `ftrack_api.Session` instance - *entities* is a list of tuples each containing the entity type and the - entity id. - If the entity is a hierarchical you will always get the entity - type TypedContext, once retrieved through a get operation you - will have the "real" entity type ie. example Shot, Sequence - or Asset Build. - *event* the unmodified original event - ''' - if 'values' in event['data']: - userId = event['source']['user']['id'] - user = session.query('User where id is ' + userId).one() - job = session.create( - 'Job', - { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'Export Editorial.' - }) - } - ) - session.commit() - - try: - output_path = event["data"]["values"]["output_path"] - - if not os.path.exists(output_path): - os.makedirs(output_path) - - self.export_editorial(entities[0], output_path) - - job['status'] = 'done' - session.commit() - except Exception: - session.rollback() - job["status"] = "failed" - session.commit() - self.log.error( - "Exporting editorial failed ({})", exc_info=True - ) - - return { - 'success': True, - 'message': 'Action completed successfully' - } - - items = [ - { - 'label': 'Output folder:', - 'type': 'text', - 'value': '', - 'name': 'output_path' - } - - ] - return { - 'success': True, - 'message': "", - 'items': items - } - - -def register(session): - '''Register action. Called when used as an event plugin.''' - - ExportEditorialAction(session).register() - - -if __name__ == "__main__": - session = ftrack_api.Session() - action = ExportEditorialAction(session) - id = "bfe0477c-d5a8-49d8-88b9-6d44d2e48fd9" - review_session = session.get("ReviewSession", id) - path = r"c:/projects" - action.export_editorial(review_session, path) \ No newline at end of file diff --git a/openpype/pipeline/temp_anatomy.py b/openpype/pipeline/temp_anatomy.py deleted file mode 100644 index 27a9370928..0000000000 --- a/openpype/pipeline/temp_anatomy.py +++ /dev/null @@ -1,1330 +0,0 @@ -import os -import re -import copy -import platform -import collections -import numbers - -import six -import time - -from openpype.settings.lib import ( - get_anatomy_settings, - get_project_settings, - get_default_project_settings, - get_local_settings -) - -from openpype.client import get_project -from openpype.lib.path_templates import ( - TemplateUnsolved, - TemplateResult, - TemplatesDict, - FormatObject, -) -from openpype.lib.log import Logger -from openpype.lib import get_local_site_id - -log = Logger.get_logger(__name__) - - -class ProjectNotSet(Exception): - """Exception raised when is created Anatomy without project name.""" - - -class RootCombinationError(Exception): - """This exception is raised when templates has combined root types.""" - - def __init__(self, roots): - joined_roots = ", ".join( - ["\"{}\"".format(_root) for _root in roots] - ) - # TODO better error message - msg = ( - "Combination of root with and" - " without root name in AnatomyTemplates. {}" - ).format(joined_roots) - - super(RootCombinationError, self).__init__(msg) - - -class BaseAnatomy(object): - """Anatomy module helps to keep project settings. - - Wraps key project specifications, AnatomyTemplates and Roots. - """ - - def __init__(self, project_doc, local_settings): - project_name = project_doc["name"] - self.project_name = project_name - - self._data = self._prepare_anatomy_data( - project_doc, local_settings - ) - self._templates_obj = AnatomyTemplates(self) - self._roots_obj = Roots(self) - - root_key_regex = re.compile(r"{(root?[^}]+)}") - root_name_regex = re.compile(r"root\[([^]]+)\]") - - # Anatomy used as dictionary - # - implemented only getters returning copy - def __getitem__(self, key): - return copy.deepcopy(self._data[key]) - - def get(self, key, default=None): - return copy.deepcopy(self._data).get(key, default) - - def keys(self): - return copy.deepcopy(self._data).keys() - - def values(self): - return copy.deepcopy(self._data).values() - - def items(self): - return copy.deepcopy(self._data).items() - - @staticmethod - def _prepare_anatomy_data(anatomy_data): - """Prepare anatomy data for further processing. - - Method added to replace `{task}` with `{task[name]}` in templates. - """ - templates_data = anatomy_data.get("templates") - if templates_data: - # Replace `{task}` with `{task[name]}` in templates - value_queue = collections.deque() - value_queue.append(templates_data) - while value_queue: - item = value_queue.popleft() - if not isinstance(item, dict): - continue - - for key in tuple(item.keys()): - value = item[key] - if isinstance(value, dict): - value_queue.append(value) - - elif isinstance(value, six.string_types): - item[key] = value.replace("{task}", "{task[name]}") - return anatomy_data - - def reset(self): - """Reset values of cached data in templates and roots objects.""" - self._data = self._prepare_anatomy_data( - get_anatomy_settings(self.project_name, self._site_name) - ) - self.templates_obj.reset() - self.roots_obj.reset() - - @property - def templates(self): - """Wrap property `templates` of Anatomy's AnatomyTemplates instance.""" - return self._templates_obj.templates - - @property - def templates_obj(self): - """Return `AnatomyTemplates` object of current Anatomy instance.""" - return self._templates_obj - - def format(self, *args, **kwargs): - """Wrap `format` method of Anatomy's `templates_obj`.""" - return self._templates_obj.format(*args, **kwargs) - - def format_all(self, *args, **kwargs): - """Wrap `format_all` method of Anatomy's `templates_obj`.""" - return self._templates_obj.format_all(*args, **kwargs) - - @property - def roots(self): - """Wrap `roots` property of Anatomy's `roots_obj`.""" - return self._roots_obj.roots - - @property - def roots_obj(self): - """Return `Roots` object of current Anatomy instance.""" - return self._roots_obj - - def root_environments(self): - """Return OPENPYPE_ROOT_* environments for current project in dict.""" - return self._roots_obj.root_environments() - - def root_environmets_fill_data(self, template=None): - """Environment variable values in dictionary for rootless path. - - Args: - template (str): Template for environment variable key fill. - By default is set to `"${}"`. - """ - return self.roots_obj.root_environmets_fill_data(template) - - def find_root_template_from_path(self, *args, **kwargs): - """Wrapper for Roots `find_root_template_from_path`.""" - return self.roots_obj.find_root_template_from_path(*args, **kwargs) - - def path_remapper(self, *args, **kwargs): - """Wrapper for Roots `path_remapper`.""" - return self.roots_obj.path_remapper(*args, **kwargs) - - def all_root_paths(self): - """Wrapper for Roots `all_root_paths`.""" - return self.roots_obj.all_root_paths() - - def set_root_environments(self): - """Set OPENPYPE_ROOT_* environments for current project.""" - self._roots_obj.set_root_environments() - - def root_names(self): - """Return root names for current project.""" - return self.root_names_from_templates(self.templates) - - def _root_keys_from_templates(self, data): - """Extract root key from templates in data. - - Args: - data (dict): Data that may contain templates as string. - - Return: - set: Set of all root names from templates as strings. - - Output example: `{"root[work]", "root[publish]"}` - """ - - output = set() - if isinstance(data, dict): - for value in data.values(): - for root in self._root_keys_from_templates(value): - output.add(root) - - elif isinstance(data, str): - for group in re.findall(self.root_key_regex, data): - output.add(group) - - return output - - def root_value_for_template(self, template): - """Returns value of root key from template.""" - root_templates = [] - for group in re.findall(self.root_key_regex, template): - root_templates.append("{" + group + "}") - - if not root_templates: - return None - - return root_templates[0].format(**{"root": self.roots}) - - def root_names_from_templates(self, templates): - """Extract root names form anatomy templates. - - Returns None if values in templates contain only "{root}". - Empty list is returned if there is no "root" in templates. - Else returns all root names from templates in list. - - RootCombinationError is raised when templates contain both root types, - basic "{root}" and with root name specification "{root[work]}". - - Args: - templates (dict): Anatomy templates where roots are not filled. - - Return: - list/None: List of all root names from templates as strings when - multiroot setup is used, otherwise None is returned. - """ - roots = list(self._root_keys_from_templates(templates)) - # Return empty list if no roots found in templates - if not roots: - return roots - - # Raise exception when root keys have roots with and without root name. - # Invalid output example: ["root", "root[project]", "root[render]"] - if len(roots) > 1 and "root" in roots: - raise RootCombinationError(roots) - - # Return None if "root" without root name in templates - if len(roots) == 1 and roots[0] == "root": - return None - - names = set() - for root in roots: - for group in re.findall(self.root_name_regex, root): - names.add(group) - return list(names) - - def fill_root(self, template_path): - """Fill template path where is only "root" key unfilled. - - Args: - template_path (str): Path with "root" key in. - Example path: "{root}/projects/MyProject/Shot01/Lighting/..." - - Return: - str: formatted path - """ - # NOTE does not care if there are different keys than "root" - return template_path.format(**{"root": self.roots}) - - @classmethod - def fill_root_with_path(cls, rootless_path, root_path): - """Fill path without filled "root" key with passed path. - - This is helper to fill root with different directory path than anatomy - has defined no matter if is single or multiroot. - - Output path is same as input path if `rootless_path` does not contain - unfilled root key. - - Args: - rootless_path (str): Path without filled "root" key. Example: - "{root[work]}/MyProject/..." - root_path (str): What should replace root key in `rootless_path`. - - Returns: - str: Path with filled root. - """ - output = str(rootless_path) - for group in re.findall(cls.root_key_regex, rootless_path): - replacement = "{" + group + "}" - output = output.replace(replacement, root_path) - - return output - - def replace_root_with_env_key(self, filepath, template=None): - """Replace root of path with environment key. - - # Example: - ## Project with roots: - ``` - { - "nas": { - "windows": P:/projects", - ... - } - ... - } - ``` - - ## Entered filepath - "P:/projects/project/asset/task/animation_v001.ma" - - ## Entered template - "<{}>" - - ## Output - "/project/asset/task/animation_v001.ma" - - Args: - filepath (str): Full file path where root should be replaced. - template (str): Optional template for environment key. Must - have one index format key. - Default value if not entered: "${}" - - Returns: - str: Path where root is replaced with environment root key. - - Raise: - ValueError: When project's roots were not found in entered path. - """ - success, rootless_path = self.find_root_template_from_path(filepath) - if not success: - raise ValueError( - "{}: Project's roots were not found in path: {}".format( - self.project_name, filepath - ) - ) - - data = self.root_environmets_fill_data(template) - return rootless_path.format(**data) - - -class Anatomy(BaseAnatomy): - _project_cache = {} - - def __init__(self, project_name=None, site_name=None): - if not project_name: - project_name = os.environ.get("AVALON_PROJECT") - - if not project_name: - raise ProjectNotSet(( - "Implementation bug: Project name is not set. Anatomy requires" - " to load data for specific project." - )) - - self._site_name = site_name - project_info = self.get_project_data_and_cache(project_name, site_name) - - super(Anatomy, self).__init__( - project_info["project_doc"], - project_info["local_settings"] - ) - - @classmethod - def get_project_data_and_cache(cls, project_name, site_name): - project_info = cls._project_cache.get(project_name) - if project_info is not None: - if time.time() - project_info["start"] > 10: - cls._project_cache.pop(project_name) - project_info = None - - if project_info is None: - if site_name is None: - if project_name: - project_settings = get_project_settings(project_name) - else: - project_settings = get_default_project_settings() - site_name = ( - project_settings["global"] - ["sync_server"] - ["config"] - ["active_site"] - ) - if site_name == "local": - site_name = get_local_site_id() - - project_info = { - "project_doc": get_project(project_name), - "local_settings": get_local_settings(site_name), - "site_name": site_name, - "start": time.time() - } - cls._project_cache[project_name] = project_info - - return project_info - - def reset(self): - """Reset values of cached data in templates and roots objects.""" - self._data = self._prepare_anatomy_data( - get_anatomy_settings(self.project_name, self._site_name) - ) - self.templates_obj.reset() - self.roots_obj.reset() - - -class AnatomyTemplateUnsolved(TemplateUnsolved): - """Exception for unsolved template when strict is set to True.""" - - msg = "Anatomy template \"{0}\" is unsolved.{1}{2}" - - -class AnatomyTemplateResult(TemplateResult): - rootless = None - - def __new__(cls, result, rootless_path): - new_obj = super(AnatomyTemplateResult, cls).__new__( - cls, - str(result), - result.template, - result.solved, - result.used_values, - result.missing_keys, - result.invalid_types - ) - new_obj.rootless = rootless_path - return new_obj - - def validate(self): - if not self.solved: - raise AnatomyTemplateUnsolved( - self.template, - self.missing_keys, - self.invalid_types - ) - - def copy(self): - tmp = TemplateResult( - str(self), - self.template, - self.solved, - self.used_values, - self.missing_keys, - self.invalid_types - ) - return self.__class__(tmp, self.rootless) - - def normalized(self): - """Convert to normalized path.""" - - tmp = TemplateResult( - os.path.normpath(self), - self.template, - self.solved, - self.used_values, - self.missing_keys, - self.invalid_types - ) - return self.__class__(tmp, self.rootless) - - -class AnatomyTemplates(TemplatesDict): - inner_key_pattern = re.compile(r"(\{@.*?[^{}0]*\})") - inner_key_name_pattern = re.compile(r"\{@(.*?[^{}0]*)\}") - - def __init__(self, anatomy): - super(AnatomyTemplates, self).__init__() - self.anatomy = anatomy - self.loaded_project = None - - def __getitem__(self, key): - return self.templates[key] - - def get(self, key, default=None): - return self.templates.get(key, default) - - def reset(self): - self._raw_templates = None - self._templates = None - self._objected_templates = None - - @property - def project_name(self): - return self.anatomy.project_name - - @property - def roots(self): - return self.anatomy.roots - - @property - def templates(self): - self._validate_discovery() - return self._templates - - @property - def objected_templates(self): - self._validate_discovery() - return self._objected_templates - - def _validate_discovery(self): - if self.project_name != self.loaded_project: - self.reset() - - if self._templates is None: - self._discover() - self.loaded_project = self.project_name - - def _format_value(self, value, data): - if isinstance(value, RootItem): - return self._solve_dict(value, data) - - result = super(AnatomyTemplates, self)._format_value(value, data) - if isinstance(result, TemplateResult): - rootless_path = self._rootless_path(result, data) - result = AnatomyTemplateResult(result, rootless_path) - return result - - def set_templates(self, templates): - if not templates: - self.reset() - return - - self._raw_templates = copy.deepcopy(templates) - templates = copy.deepcopy(templates) - v_queue = collections.deque() - v_queue.append(templates) - while v_queue: - item = v_queue.popleft() - if not isinstance(item, dict): - continue - - for key in tuple(item.keys()): - value = item[key] - if isinstance(value, dict): - v_queue.append(value) - - elif ( - isinstance(value, six.string_types) - and "{task}" in value - ): - item[key] = value.replace("{task}", "{task[name]}") - - solved_templates = self.solve_template_inner_links(templates) - self._templates = solved_templates - self._objected_templates = self.create_ojected_templates( - solved_templates - ) - - def default_templates(self): - """Return default templates data with solved inner keys.""" - return self.solve_template_inner_links( - self.anatomy["templates"] - ) - - def _discover(self): - """ Loads anatomy templates from yaml. - Default templates are loaded if project is not set or project does - not have set it's own. - TODO: create templates if not exist. - - Returns: - TemplatesResultDict: Contain templates data for current project of - default templates. - """ - - if self.project_name is None: - # QUESTION create project specific if not found? - raise AssertionError(( - "Project \"{0}\" does not have his own templates." - " Trying to use default." - ).format(self.project_name)) - - self.set_templates(self.anatomy["templates"]) - - @classmethod - def replace_inner_keys(cls, matches, value, key_values, key): - """Replacement of inner keys in template values.""" - for match in matches: - anatomy_sub_keys = ( - cls.inner_key_name_pattern.findall(match) - ) - if key in anatomy_sub_keys: - raise ValueError(( - "Unsolvable recursion in inner keys, " - "key: \"{}\" is in his own value." - " Can't determine source, please check Anatomy templates." - ).format(key)) - - for anatomy_sub_key in anatomy_sub_keys: - replace_value = key_values.get(anatomy_sub_key) - if replace_value is None: - raise KeyError(( - "Anatomy templates can't be filled." - " Anatomy key `{0}` has" - " invalid inner key `{1}`." - ).format(key, anatomy_sub_key)) - - if not ( - isinstance(replace_value, numbers.Number) - or isinstance(replace_value, six.string_types) - ): - raise ValueError(( - "Anatomy templates can't be filled." - " Anatomy key `{0}` has" - " invalid inner key `{1}`" - " with value `{2}`." - ).format(key, anatomy_sub_key, str(replace_value))) - - value = value.replace(match, str(replace_value)) - - return value - - @classmethod - def prepare_inner_keys(cls, key_values): - """Check values of inner keys. - - Check if inner key exist in template group and has valid value. - It is also required to avoid infinite loop with unsolvable recursion - when first inner key's value refers to second inner key's value where - first is used. - """ - keys_to_solve = set(key_values.keys()) - while True: - found = False - for key in tuple(keys_to_solve): - value = key_values[key] - - if isinstance(value, six.string_types): - matches = cls.inner_key_pattern.findall(value) - if not matches: - keys_to_solve.remove(key) - continue - - found = True - key_values[key] = cls.replace_inner_keys( - matches, value, key_values, key - ) - continue - - elif not isinstance(value, dict): - keys_to_solve.remove(key) - continue - - subdict_found = False - for _key, _value in tuple(value.items()): - matches = cls.inner_key_pattern.findall(_value) - if not matches: - continue - - subdict_found = True - found = True - key_values[key][_key] = cls.replace_inner_keys( - matches, _value, key_values, - "{}.{}".format(key, _key) - ) - - if not subdict_found: - keys_to_solve.remove(key) - - if not found: - break - - return key_values - - @classmethod - def solve_template_inner_links(cls, templates): - """Solve templates inner keys identified by "{@*}". - - Process is split into 2 parts. - First is collecting all global keys (keys in top hierarchy where value - is not dictionary). All global keys are set for all group keys (keys - in top hierarchy where value is dictionary). Value of a key is not - overridden in group if already contain value for the key. - - In second part all keys with "at" symbol in value are replaced with - value of the key afterward "at" symbol from the group. - - Args: - templates (dict): Raw templates data. - - Example: - templates:: - key_1: "value_1", - key_2: "{@key_1}/{filling_key}" - - group_1: - key_3: "value_3/{@key_2}" - - group_2: - key_2": "value_2" - key_4": "value_4/{@key_2}" - - output:: - key_1: "value_1" - key_2: "value_1/{filling_key}" - - group_1: { - key_1: "value_1" - key_2: "value_1/{filling_key}" - key_3: "value_3/value_1/{filling_key}" - - group_2: { - key_1: "value_1" - key_2: "value_2" - key_4: "value_3/value_2" - """ - default_key_values = templates.pop("defaults", {}) - for key, value in tuple(templates.items()): - if isinstance(value, dict): - continue - default_key_values[key] = templates.pop(key) - - # Pop "others" key before before expected keys are processed - other_templates = templates.pop("others") or {} - - keys_by_subkey = {} - for sub_key, sub_value in templates.items(): - key_values = {} - key_values.update(default_key_values) - key_values.update(sub_value) - keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) - - for sub_key, sub_value in other_templates.items(): - if sub_key in keys_by_subkey: - log.warning(( - "Key \"{}\" is duplicated in others. Skipping." - ).format(sub_key)) - continue - - key_values = {} - key_values.update(default_key_values) - key_values.update(sub_value) - keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) - - default_keys_by_subkeys = cls.prepare_inner_keys(default_key_values) - - for key, value in default_keys_by_subkeys.items(): - keys_by_subkey[key] = value - - return keys_by_subkey - - def _dict_to_subkeys_list(self, subdict, pre_keys=None): - if pre_keys is None: - pre_keys = [] - output = [] - for key in subdict: - value = subdict[key] - result = list(pre_keys) - result.append(key) - if isinstance(value, dict): - for item in self._dict_to_subkeys_list(value, result): - output.append(item) - else: - output.append(result) - return output - - def _keys_to_dicts(self, key_list, value): - if not key_list: - return None - if len(key_list) == 1: - return {key_list[0]: value} - return {key_list[0]: self._keys_to_dicts(key_list[1:], value)} - - def _rootless_path(self, result, final_data): - used_values = result.used_values - missing_keys = result.missing_keys - template = result.template - invalid_types = result.invalid_types - if ( - "root" not in used_values - or "root" in missing_keys - or "{root" not in template - ): - return - - for invalid_type in invalid_types: - if "root" in invalid_type: - return - - root_keys = self._dict_to_subkeys_list({"root": used_values["root"]}) - if not root_keys: - return - - output = str(result) - for used_root_keys in root_keys: - if not used_root_keys: - continue - - used_value = used_values - root_key = None - for key in used_root_keys: - used_value = used_value[key] - if root_key is None: - root_key = key - else: - root_key += "[{}]".format(key) - - root_key = "{" + root_key + "}" - output = output.replace(str(used_value), root_key) - - return output - - def format(self, data, strict=True): - copy_data = copy.deepcopy(data) - roots = self.roots - if roots: - copy_data["root"] = roots - result = super(AnatomyTemplates, self).format(copy_data) - result.strict = strict - return result - - def format_all(self, in_data, only_keys=True): - """ Solves templates based on entered data. - - Args: - data (dict): Containing keys to be filled into template. - - Returns: - TemplatesResultDict: Output `TemplateResult` have `strict` - attribute set to False so accessing unfilled keys in templates - won't raise any exceptions. - """ - return self.format(in_data, strict=False) - - -class RootItem(FormatObject): - """Represents one item or roots. - - Holds raw data of root item specification. Raw data contain value - for each platform, but current platform value is used when object - is used for formatting of template. - - Args: - root_raw_data (dict): Dictionary containing root values by platform - names. ["windows", "linux" and "darwin"] - name (str, optional): Root name which is representing. Used with - multi root setup otherwise None value is expected. - parent_keys (list, optional): All dictionary parent keys. Values of - `parent_keys` are used for get full key which RootItem is - representing. Used for replacing root value in path with - formattable key. e.g. parent_keys == ["work"] -> {root[work]} - parent (object, optional): It is expected to be `Roots` object. - Value of `parent` won't affect code logic much. - """ - - def __init__( - self, root_raw_data, name=None, parent_keys=None, parent=None - ): - lowered_platform_keys = {} - for key, value in root_raw_data.items(): - lowered_platform_keys[key.lower()] = value - self.raw_data = lowered_platform_keys - self.cleaned_data = self._clean_roots(lowered_platform_keys) - self.name = name - self.parent_keys = parent_keys or [] - self.parent = parent - - self.available_platforms = list(lowered_platform_keys.keys()) - self.value = lowered_platform_keys.get(platform.system().lower()) - self.clean_value = self.clean_root(self.value) - - def __format__(self, *args, **kwargs): - return self.value.__format__(*args, **kwargs) - - def __str__(self): - return str(self.value) - - def __repr__(self): - return self.__str__() - - def __getitem__(self, key): - if isinstance(key, numbers.Number): - return self.value[key] - - additional_info = "" - if self.parent and self.parent.project_name: - additional_info += " for project \"{}\"".format( - self.parent.project_name - ) - - raise AssertionError( - "Root key \"{}\" is missing{}.".format( - key, additional_info - ) - ) - - def full_key(self): - """Full key value for dictionary formatting in template. - - Returns: - str: Return full replacement key for formatting. This helps when - multiple roots are set. In that case e.g. `"root[work]"` is - returned. - """ - if not self.name: - return "root" - - joined_parent_keys = "".join( - ["[{}]".format(key) for key in self.parent_keys] - ) - return "root{}".format(joined_parent_keys) - - def clean_path(self, path): - """Just replace backslashes with forward slashes.""" - return str(path).replace("\\", "/") - - def clean_root(self, root): - """Makes sure root value does not end with slash.""" - if root: - root = self.clean_path(root) - while root.endswith("/"): - root = root[:-1] - return root - - def _clean_roots(self, raw_data): - """Clean all values of raw root item values.""" - cleaned = {} - for key, value in raw_data.items(): - cleaned[key] = self.clean_root(value) - return cleaned - - def path_remapper(self, path, dst_platform=None, src_platform=None): - """Remap path for specific platform. - - Args: - path (str): Source path which need to be remapped. - dst_platform (str, optional): Specify destination platform - for which remapping should happen. - src_platform (str, optional): Specify source platform. This is - recommended to not use and keep unset until you really want - to use specific platform. - roots (dict/RootItem/None, optional): It is possible to remap - path with different roots then instance where method was - called has. - - Returns: - str/None: When path does not contain known root then - None is returned else returns remapped path with "{root}" - or "{root[]}". - """ - cleaned_path = self.clean_path(path) - if dst_platform: - dst_root_clean = self.cleaned_data.get(dst_platform) - if not dst_root_clean: - key_part = "" - full_key = self.full_key() - if full_key != "root": - key_part += "\"{}\" ".format(full_key) - - log.warning( - "Root {}miss platform \"{}\" definition.".format( - key_part, dst_platform - ) - ) - return None - - if cleaned_path.startswith(dst_root_clean): - return cleaned_path - - if src_platform: - src_root_clean = self.cleaned_data.get(src_platform) - if src_root_clean is None: - log.warning( - "Root \"{}\" miss platform \"{}\" definition.".format( - self.full_key(), src_platform - ) - ) - return None - - if not cleaned_path.startswith(src_root_clean): - return None - - subpath = cleaned_path[len(src_root_clean):] - if dst_platform: - # `dst_root_clean` is used from upper condition - return dst_root_clean + subpath - return self.clean_value + subpath - - result, template = self.find_root_template_from_path(path) - if not result: - return None - - def parent_dict(keys, value): - if not keys: - return value - - key = keys.pop(0) - return {key: parent_dict(keys, value)} - - if dst_platform: - format_value = parent_dict(list(self.parent_keys), dst_root_clean) - else: - format_value = parent_dict(list(self.parent_keys), self.value) - - return template.format(**{"root": format_value}) - - def find_root_template_from_path(self, path): - """Replaces known root value with formattable key in path. - - All platform values are checked for this replacement. - - Args: - path (str): Path where root value should be found. - - Returns: - tuple: Tuple contain 2 values: `success` (bool) and `path` (str). - When success it True then path should contain replaced root - value with formattable key. - - Example: - When input path is:: - "C:/windows/path/root/projects/my_project/file.ext" - - And raw data of item looks like:: - { - "windows": "C:/windows/path/root", - "linux": "/mount/root" - } - - Output will be:: - (True, "{root}/projects/my_project/file.ext") - - If any of raw data value wouldn't match path's root output is:: - (False, "C:/windows/path/root/projects/my_project/file.ext") - """ - result = False - output = str(path) - - root_paths = list(self.cleaned_data.values()) - mod_path = self.clean_path(path) - for root_path in root_paths: - # Skip empty paths - if not root_path: - continue - - if mod_path.startswith(root_path): - result = True - replacement = "{" + self.full_key() + "}" - output = replacement + mod_path[len(root_path):] - break - - return (result, output) - - -class Roots: - """Object which should be used for formatting "root" key in templates. - - Args: - anatomy Anatomy: Anatomy object created for a specific project. - """ - - env_prefix = "OPENPYPE_PROJECT_ROOT" - roots_filename = "roots.json" - - def __init__(self, anatomy): - self.anatomy = anatomy - self.loaded_project = None - self._roots = None - - def __format__(self, *args, **kwargs): - return self.roots.__format__(*args, **kwargs) - - def __getitem__(self, key): - return self.roots[key] - - def reset(self): - """Reset current roots value.""" - self._roots = None - - def path_remapper( - self, path, dst_platform=None, src_platform=None, roots=None - ): - """Remap path for specific platform. - - Args: - path (str): Source path which need to be remapped. - dst_platform (str, optional): Specify destination platform - for which remapping should happen. - src_platform (str, optional): Specify source platform. This is - recommended to not use and keep unset until you really want - to use specific platform. - roots (dict/RootItem/None, optional): It is possible to remap - path with different roots then instance where method was - called has. - - Returns: - str/None: When path does not contain known root then - None is returned else returns remapped path with "{root}" - or "{root[]}". - """ - if roots is None: - roots = self.roots - - if roots is None: - raise ValueError("Roots are not set. Can't find path.") - - if "{root" in path: - path = path.format(**{"root": roots}) - # If `dst_platform` is not specified then return else continue. - if not dst_platform: - return path - - if isinstance(roots, RootItem): - return roots.path_remapper(path, dst_platform, src_platform) - - for _root in roots.values(): - result = self.path_remapper( - path, dst_platform, src_platform, _root - ) - if result is not None: - return result - - def find_root_template_from_path(self, path, roots=None): - """Find root value in entered path and replace it with formatting key. - - Args: - path (str): Source path where root will be searched. - roots (Roots/dict, optional): It is possible to use different - roots than instance where method was triggered has. - - Returns: - tuple: Output contains tuple with bool representing success as - first value and path with or without replaced root with - formatting key as second value. - - Raises: - ValueError: When roots are not entered and can't be loaded. - """ - if roots is None: - log.debug( - "Looking for matching root in path \"{}\".".format(path) - ) - roots = self.roots - - if roots is None: - raise ValueError("Roots are not set. Can't find path.") - - if isinstance(roots, RootItem): - return roots.find_root_template_from_path(path) - - for root_name, _root in roots.items(): - success, result = self.find_root_template_from_path(path, _root) - if success: - log.info("Found match in root \"{}\".".format(root_name)) - return success, result - - log.warning("No matching root was found in current setting.") - return (False, path) - - def set_root_environments(self): - """Set root environments for current project.""" - for key, value in self.root_environments().items(): - os.environ[key] = value - - def root_environments(self): - """Use root keys to create unique keys for environment variables. - - Concatenates prefix "OPENPYPE_ROOT" with root keys to create unique - keys. - - Returns: - dict: Result is `{(str): (str)}` dicitonary where key represents - unique key concatenated by keys and value is root value of - current platform root. - - Example: - With raw root values:: - "work": { - "windows": "P:/projects/work", - "linux": "/mnt/share/projects/work", - "darwin": "/darwin/path/work" - }, - "publish": { - "windows": "P:/projects/publish", - "linux": "/mnt/share/projects/publish", - "darwin": "/darwin/path/publish" - } - - Result on windows platform:: - { - "OPENPYPE_ROOT_WORK": "P:/projects/work", - "OPENPYPE_ROOT_PUBLISH": "P:/projects/publish" - } - - Short example when multiroot is not used:: - { - "OPENPYPE_ROOT": "P:/projects" - } - """ - return self._root_environments() - - def all_root_paths(self, roots=None): - """Return all paths for all roots of all platforms.""" - if roots is None: - roots = self.roots - - output = [] - if isinstance(roots, RootItem): - for value in roots.raw_data.values(): - output.append(value) - return output - - for _roots in roots.values(): - output.extend(self.all_root_paths(_roots)) - return output - - def _root_environments(self, keys=None, roots=None): - if not keys: - keys = [] - if roots is None: - roots = self.roots - - if isinstance(roots, RootItem): - key_items = [self.env_prefix] - for _key in keys: - key_items.append(_key.upper()) - - key = "_".join(key_items) - # Make sure key and value does not contain unicode - # - can happen in Python 2 hosts - return {str(key): str(roots.value)} - - output = {} - for _key, _value in roots.items(): - _keys = list(keys) - _keys.append(_key) - output.update(self._root_environments(_keys, _value)) - return output - - def root_environmets_fill_data(self, template=None): - """Environment variable values in dictionary for rootless path. - - Args: - template (str): Template for environment variable key fill. - By default is set to `"${}"`. - """ - if template is None: - template = "${}" - return self._root_environmets_fill_data(template) - - def _root_environmets_fill_data(self, template, keys=None, roots=None): - if keys is None and roots is None: - return { - "root": self._root_environmets_fill_data( - template, [], self.roots - ) - } - - if isinstance(roots, RootItem): - key_items = [Roots.env_prefix] - for _key in keys: - key_items.append(_key.upper()) - key = "_".join(key_items) - return template.format(key) - - output = {} - for key, value in roots.items(): - _keys = list(keys) - _keys.append(key) - output[key] = self._root_environmets_fill_data( - template, _keys, value - ) - return output - - @property - def project_name(self): - """Return project name which will be used for loading root values.""" - return self.anatomy.project_name - - @property - def roots(self): - """Property for filling "root" key in templates. - - This property returns roots for current project or default root values. - Warning: - Default roots value may cause issues when project use different - roots settings. That may happen when project use multiroot - templates but default roots miss their keys. - """ - if self.project_name != self.loaded_project: - self._roots = None - - if self._roots is None: - self._roots = self._discover() - self.loaded_project = self.project_name - return self._roots - - def _discover(self): - """ Loads current project's roots or default. - - Default roots are loaded if project override's does not contain roots. - - Returns: - `RootItem` or `dict` with multiple `RootItem`s when multiroot - setting is used. - """ - - return self._parse_dict(self.anatomy["roots"], parent=self) - - @staticmethod - def _parse_dict(data, key=None, parent_keys=None, parent=None): - """Parse roots raw data into RootItem or dictionary with RootItems. - - Converting raw roots data to `RootItem` helps to handle platform keys. - This method is recursive to be able handle multiroot setup and - is static to be able to load default roots without creating new object. - - Args: - data (dict): Should contain raw roots data to be parsed. - key (str, optional): Current root key. Set by recursion. - parent_keys (list): Parent dictionary keys. Set by recursion. - parent (Roots, optional): Parent object set in `RootItem` - helps to keep RootItem instance updated with `Roots` object. - - Returns: - `RootItem` or `dict` with multiple `RootItem`s when multiroot - setting is used. - """ - if not parent_keys: - parent_keys = [] - is_last = False - for value in data.values(): - if isinstance(value, six.string_types): - is_last = True - break - - if is_last: - return RootItem(data, key, parent_keys, parent=parent) - - output = {} - for _key, value in data.items(): - _parent_keys = list(parent_keys) - _parent_keys.append(_key) - output[_key] = Roots._parse_dict(value, _key, _parent_keys, parent) - return output diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 661975993b..adc629352e 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -14,6 +14,7 @@ from openpype.client import ( ) from openpype.client.operations import ( OperationsSession, + _create_or_convert_to_mongo_id, new_hero_version_doc, prepare_hero_version_update_data, prepare_representation_update_data, @@ -192,13 +193,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): op_session = OperationsSession() - entity_id = None - if old_version: - entity_id = old_version["_id"] new_hero_version = new_hero_version_doc( src_version_entity["_id"], - src_version_entity["parent"], - entity_id=entity_id + src_version_entity["parent"] ) if old_version: @@ -411,7 +408,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # Create representation else: - repre.pop("_id", None) + repre["_id"] = _create_or_convert_to_mongo_id(None) op_session.create_entity(project_name, "representation", repre) diff --git a/tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json b/tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json deleted file mode 100644 index fb798524bc..0000000000 --- a/tests/unit/openpype/lib/resources/_process_referenced_pipeline_result.json +++ /dev/null @@ -1,92 +0,0 @@ -[ - { - "_id": { - "$oid": "623c9d53db3f5046eb1ad5f4" - }, - "schema": "openpype:version-3.0", - "type": "version", - "parent": { - "$oid": "5f3e439a30a9464d6c181cbc" - }, - "name": 94, - "data": { - "families": [ - "workfile" - ], - "time": "20220324T173254Z", - "author": "petrk", - "source": "C:/projects_local/petr_test/assets/locations/Jungle/work/art/petr_test_Jungle_art_v009.psd", - "comment": "", - "machine": "LAPTOP-UB778LHG", - "fps": 25.0, - "intent": "-", - "inputLinks": [ - { - "type": "reference", - "id": { - "$oid": "618eb14f0a55a9c1591e913c" - }, - "linkedBy": "publish" - } - ] - }, - "outputs_recursive": [ - { - "_id": { - "$oid": "618eb14f0a55a9c1591e913c" - }, - "schema": "openpype:version-3.0", - "type": "version", - "parent": { - "$oid": "618e42a72ff49bd543bc1768" - }, - "name": 8, - "data": { - "families": [ - "image" - ], - "time": "20211112T192359Z", - "author": "petrk", - "source": "C:/projects_local/petr_test/assets/locations/Town/work/art/petr_test_Town_art_v005.psd", - "comment": "", - "machine": "LAPTOP-UB778LHG", - "fps": 25.0, - "intent": "-", - "inputLinks": [ - { - "type": "reference", - "id": { - "$oid": "5f3cd2d530a94638544837c3" - }, - "linkedBy": "publish" - } - ] - }, - "depth": 0 - }, - { - "_id": { - "$oid": "5f3cd2d530a94638544837c3" - }, - "schema": "pype:version-3.0", - "type": "version", - "parent": { - "$oid": "5f3a714030a9464bfc7d2382" - }, - "name": 7, - "data": { - "families": [ - "image" - ], - "time": "20200819T092032Z", - "author": "petrk", - "source": "/c/projects/petr_test/assets/characters/Hero/work/art/Hero_v019.psd", - "comment": "", - "machine": "LAPTOP-UB778LHG", - "fps": null - }, - "depth": 1 - } - ] - } -] \ No newline at end of file diff --git a/tests/unit/test_unzip.py b/tests/unit/test_unzip.py deleted file mode 100644 index 586fc49b6f..0000000000 --- a/tests/unit/test_unzip.py +++ /dev/null @@ -1,11 +0,0 @@ - -from openpype.hosts.harmony.api.lib import _ZipFile -from pathlib import Path - -def test_zip(): - source = "c:/Users/petrk/Downloads/fbb_fbb100_sh0020_workfileAnimation_v010.zip" - dest = "c:/projects/temp/unzipped_with_python_111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\\2222222222222222222222222222222222222222222222222222222222222222222222222222222222" - - dest = Path(dest) - with _ZipFile(source, "r") as zip_ref: - zip_ref.extractall(dest.as_posix()) \ No newline at end of file diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 diff --git a/vendor/instance.json b/vendor/instance.json deleted file mode 100644 index b1d623e85d..0000000000 --- a/vendor/instance.json +++ /dev/null @@ -1,1133 +0,0 @@ -{ - 'family': 'render', - 'name': 'renderLightingDefault', - 'label': 'renderLightingDefault - local', - 'version': 1, - 'time': '', - 'source': 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting/petr_test_shot01_lighting_v001.aep', - 'subset': 'renderLightingDefault', - 'asset': 'shot01', - 'attachTo': False, - 'setMembers': '', - 'publish': True, - 'resolutionWidth': 1920.0, - 'resolutionHeight': 1080.0, - 'pixelAspect': 1, - 'frameStart': 0, - 'frameEnd': 0, - 'frameStep': 1, - 'handleStart': 0, - 'handleEnd': 0, - 'ignoreFrameHandleCheck': False, - 'renderer': 'aerender', - 'review': True, - 'priority': 50, - 'families': [ - 'render', - 'review', - 'ftrack', - 'slack' - ], - 'multipartExr': False, - 'convertToScanline': False, - 'tileRendering': False, - 'tilesX': 0, - 'tilesY': 0, - 'toBeRenderedOn': 'deadline', - 'deadlineSubmissionJob': None, - 'anatomyData': { - 'project': { - 'name': 'petr_test', - 'code': 'petr_test' - }, - 'asset': 'shot01', - 'parent': 'seq01', - 'hierarchy': 'sequences/seq01', - 'task': { - 'name': 'lighting', - 'type': 'Lighting', - 'short': 'lgt' - }, - 'username': 'petrk', - 'app': 'aftereffects', - 'd': '6', - 'dd': '06', - 'ddd': 'Thu', - 'dddd': 'Thursday', - 'm': '1', - 'mm': '01', - 'mmm': 'Jan', - 'mmmm': 'January', - 'yy': '22', - 'yyyy': '2022', - 'H': '18', - 'HH': '18', - 'h': '6', - 'hh': '06', - 'ht': 'PM', - 'M': '14', - 'MM': '14', - 'S': '23', - 'SS': '23', - 'version': 1, - 'subset': 'renderLightingDefault', - 'family': 'render', - 'intent': '-' - }, - 'outputDir': 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting\\renders\\aftereffects\\petr_test_shot01_lighting_v001', - 'comp_name': '℗ renderLightingDefault', - 'comp_id': 1, - 'fps': 25, - 'projectEntity': { - '_id': ObjectId( - '5f2a6d2311e06a9818a1958b' - ), - 'name': 'petr_test', - 'created_d': datetime.datetime(2020, - 9, - 17, - 15, - 27, - 27, - 927000), - 'data': { - 'ftrackId': 'e5eda2bc-d682-11ea-afc1-92591a5b5e3e', - 'entityType': 'Project', - 'applications': [ - 'maya_2019', - 'photoshop_2021', - 'photoshop_2022', - 'harmony_17', - 'aftereffects_2022', - 'harmony_20', - 'nukestudio_12.2', - 'nukex_12.2', - 'hiero_12.2', - 'blender_2.93' - ], - 'library_project': True, - 'clipIn': 1, - 'resolutionWidth': 1920.0, - 'handleEnd': 0, - 'frameEnd': 1001, - 'resolutionHeight': 1080.0, - 'frameStart': 1001.0, - 'pixelAspect': 1.0, - 'fps': 25.0, - 'handleStart': 0, - 'clipOut': 1, - 'tools_env': [], - 'code': 'petr_test', - 'active': True - }, - 'type': 'project', - 'config': { - 'apps': [ - { - 'name': 'aftereffects/2022' - }, - { - 'name': 'maya/2019' - }, - { - 'name': 'hiero/12-2' - }, - { - 'name': 'photoshop/2021' - }, - { - 'name': 'nuke/12-2' - }, - { - 'name': 'photoshop/2022' - } - ], - 'tasks': { - 'Layout': { - 'short_name': 'lay' - }, - 'Setdress': { - 'short_name': 'dress' - }, - 'Previz': { - 'short_name': '' - }, - 'Generic': { - 'short_name': 'gener' - }, - 'Animation': { - 'short_name': 'anim' - }, - 'Modeling': { - 'short_name': 'mdl' - }, - 'Lookdev': { - 'short_name': 'look' - }, - 'FX': { - 'short_name': 'fx' - }, - 'Lighting': { - 'short_name': 'lgt' - }, - 'Compositing': { - 'short_name': 'comp' - }, - 'Tracking': { - 'short_name': '' - }, - 'Rigging': { - 'short_name': 'rig' - }, - 'Paint': { - 'short_name': 'paint' - }, - 'schedulle': { - 'short_name': '' - }, - 'Art': { - 'short_name': 'art' - }, - 'Texture': { - 'short_name': 'tex' - }, - 'Edit': { - 'short_name': 'edit' - } - }, - 'imageio': { - 'hiero': { - 'workfile': { - 'ocioConfigName': 'nuke-default', - 'ocioconfigpath': { - 'windows': [], - 'darwin': [], - 'linux': [] - }, - 'workingSpace': 'linear', - 'sixteenBitLut': 'sRGB', - 'eightBitLut': 'sRGB', - 'floatLut': 'linear', - 'logLut': 'Cineon', - 'viewerLut': 'sRGB', - 'thumbnailLut': 'sRGB' - }, - 'regexInputs': { - 'inputs': [ - { - 'regex': '[^-a-zA-Z0-9](plateRef).*(?=mp4)', - 'colorspace': 'sRGB' - } - ] - } - }, - 'nuke': { - 'viewer': { - 'viewerProcess': 'sRGB' - }, - 'baking': { - 'viewerProcess': 'rec709' - }, - 'workfile': { - 'colorManagement': 'Nuke', - 'OCIO_config': 'nuke-default', - 'customOCIOConfigPath': { - 'windows': [], - 'darwin': [], - 'linux': [] - }, - 'workingSpaceLUT': 'linear', - 'monitorLut': 'sRGB', - 'int8Lut': 'sRGB', - 'int16Lut': 'sRGB', - 'logLut': 'Cineon', - 'floatLut': 'linear' - }, - 'nodes': { - 'requiredNodes': [ - { - 'plugins': [ - 'CreateWriteRender' - ], - 'nukeNodeClass': 'Write', - 'knobs': [ - { - 'name': 'file_type', - 'value': 'exr' - }, - { - 'name': 'datatype', - 'value': '16 bit half' - }, - { - 'name': 'compression', - 'value': 'Zip (1 scanline)' - }, - { - 'name': 'autocrop', - 'value': 'True' - }, - { - 'name': 'tile_color', - 'value': '0xff0000ff' - }, - { - 'name': 'channels', - 'value': 'rgb' - }, - { - 'name': 'colorspace', - 'value': 'linear' - }, - { - 'name': 'create_directories', - 'value': 'True' - } - ] - }, - { - 'plugins': [ - 'CreateWritePrerender' - ], - 'nukeNodeClass': 'Write', - 'knobs': [ - { - 'name': 'file_type', - 'value': 'exr' - }, - { - 'name': 'datatype', - 'value': '16 bit half' - }, - { - 'name': 'compression', - 'value': 'Zip (1 scanline)' - }, - { - 'name': 'autocrop', - 'value': 'False' - }, - { - 'name': 'tile_color', - 'value': '0xadab1dff' - }, - { - 'name': 'channels', - 'value': 'rgb' - }, - { - 'name': 'colorspace', - 'value': 'linear' - }, - { - 'name': 'create_directories', - 'value': 'True' - } - ] - } - ], - 'customNodes': [] - }, - 'regexInputs': { - 'inputs': [ - { - 'regex': '[^-a-zA-Z0-9]beauty[^-a-zA-Z0-9]', - 'colorspace': 'linear' - } - ] - } - }, - 'maya': { - 'colorManagementPreference': { - 'configFilePath': { - 'windows': [], - 'darwin': [], - 'linux': [] - }, - 'renderSpace': 'scene-linear Rec 709/sRGB', - 'viewTransform': 'sRGB gamma' - } - } - }, - 'roots': { - 'work': { - 'windows': 'C:/projects', - 'darwin': '/Volumes/path', - 'linux': '/mnt/share/projects' - } - }, - 'templates': { - 'defaults': { - 'version_padding': 3, - 'version': 'v{version:0>{@version_padding}}', - 'frame_padding': 4, - 'frame': '{frame:0>{@frame_padding}}' - }, - 'work': { - 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task}', - 'file': '{project[code]}_{asset}_{task}_{@version}<_{comment}>.{ext}', - 'path': '{@folder}/{@file}' - }, - 'render': { - 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}', - 'file': '{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{ext}', - 'path': '{@folder}/{@file}' - }, - 'publish': { - 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}', - 'file': '{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}><_{udim}>.{ext}', - 'path': '{@folder}/{@file}', - 'thumbnail': '{thumbnail_root}/{project[name]}/{_id}_{thumbnail_type}.{ext}' - }, - 'hero': { - 'folder': '{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/hero', - 'file': '{project[code]}_{asset}_{subset}_hero<_{output}><.{frame}>.{ext}', - 'path': '{@folder}/{@file}' - }, - 'delivery': {}, - 'others': {} - } - }, - 'parent': None, - 'schema': 'avalon-core:project-2.0' - }, - 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', - 'frameStartHandle': 0, - 'frameEndHandle': 0, - 'byFrameStep': 1, - 'author': 'petrk', - 'expectedFiles': [ - 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting\\renders\\aftereffects\\petr_test_shot01_lighting_v001\\shot01_renderLightingDefault_v001.mov' - ], - 'slack_channel_message_profiles': [ - { - 'channels': [ - 'test_integration' - ], - 'upload_thumbnail': True, - 'message': 'Test message' - } - ], - 'slack_token': 'xoxb-1494100953104-2176825439264-jGqvQzfq9uZJPmyX5Q4o4TnP', - 'representations': [ - { - 'frameStart': 0, - 'frameEnd': 0, - 'name': 'mov', - 'ext': 'mov', - 'files': ' renderLightingDefault.mov', - 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', - 'tags': [ - 'review' - ], - 'published_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', - 'publishedFiles': [ - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov' - ] - }, - { - 'name': 'thumbnail', - 'ext': 'jpg', - 'files': 'thumbnail.jpg', - 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', - 'tags': [ - 'thumbnail' - ], - 'published_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', - 'publishedFiles': [ - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg' - ] - }, - { - 'frameStart': 0, - 'frameEnd': 0, - 'name': 'h264_mp4', - 'ext': 'mp4', - 'files': ' renderLightingDefault_h264burnin.mp4', - 'stagingDir': 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr', - 'tags': [ - 'review', - 'burnin', - 'ftrackreview' - ], - 'resolutionWidth': 1920, - 'resolutionHeight': 1080, - 'outputName': 'h264', - 'outputDef': { - 'ext': 'mp4', - 'tags': [ - 'burnin', - 'ftrackreview' - ], - 'burnins': [], - 'ffmpeg_args': { - 'video_filters': [], - 'audio_filters': [], - 'input': [ - '-apply_trc gamma22' - ], - 'output': [ - '-pix_fmt yuv420p', - '-crf 18', - '-intra' - ] - }, - 'filter': { - 'families': [ - 'render', - 'review', - 'ftrack' - ] - }, - 'overscan_crop': '', - 'overscan_color': [ - 0, - 0, - 0, - 255 - ], - 'width': 0, - 'height': 0, - 'bg_color': [ - 0, - 0, - 0, - 0 - ], - 'letter_box': { - 'enabled': False, - 'ratio': 0.0, - 'state': 'letterbox', - 'fill_color': [ - 0, - 0, - 0, - 255 - ], - 'line_thickness': 0, - 'line_color': [ - 255, - 0, - 0, - 255 - ] - }, - 'filename_suffix': 'h264' - }, - 'frameStartFtrack': 0, - 'frameEndFtrack': 0, - 'ffmpeg_cmd': 'C:\\Users\\petrk\\PycharmProjects\\Pype3.0\\pype\\vendor\\bin\\ffmpeg\\windows\\bin\\ffmpeg -apply_trc gamma22 -i "C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr\\ renderLightingDefault.mov" -pix_fmt yuv420p -crf 18 -intra -y "C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr\\ renderLightingDefault_h264.mp4"', - 'published_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', - 'publishedFiles': [ - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' - ] - } - ], - 'assetEntity': { - '_id': ObjectId( - '5fabee9730a94666449245b7' - ), - 'name': 'shot01', - 'data': { - 'ftrackId': '0c5f548c-2425-11eb-b203-628b111fac3c', - 'entityType': 'Shot', - 'clipIn': 1, - 'resolutionWidth': 1920.0, - 'handleEnd': 0.0, - 'frameEnd': 1001, - 'resolutionHeight': 1080.0, - 'frameStart': 1001.0, - 'pixelAspect': 1.0, - 'fps': 25.0, - 'handleStart': 0.0, - 'clipOut': 1, - 'tools_env': [], - 'avalon_mongo_id': '5fabee9730a94666449245b7', - 'parents': [ - 'sequences', - 'seq01' - ], - 'hierarchy': 'sequences\\seq01', - 'tasks': { - 'lighting': { - 'type': 'Lighting' - }, - 'animation': { - 'type': 'Animation' - }, - 'compositing': { - 'type': 'Compositing' - } - }, - 'visualParent': ObjectId( - '5fabee9730a94666449245b6' - ) - }, - 'type': 'asset', - 'parent': ObjectId( - '5f2a6d2311e06a9818a1958b' - ), - 'schema': 'pype:asset-3.0' - }, - 'subsetEntity': { - '_id': ObjectId( - '61d723a271e6fce378bd428c' - ), - 'schema': 'openpype:subset-3.0', - 'type': 'subset', - 'name': 'renderLightingDefault', - 'data': { - 'families': [ - 'render', - 'review', - 'ftrack', - 'slack' - ] - }, - 'parent': ObjectId( - '5fabee9730a94666449245b7' - ) - }, - 'versionEntity': { - '_id': ObjectId( - '61d723a371e6fce378bd428d' - ), - 'schema': 'openpype:version-3.0', - 'type': 'version', - 'parent': ObjectId( - '61d723a271e6fce378bd428c' - ), - 'name': 1, - 'data': { - 'families': [ - 'render', - 'render', - 'review', - 'ftrack', - 'slack' - ], - 'time': '20220106T181423Z', - 'author': 'petrk', - 'source': 'C:/projects/petr_test/sequences/seq01/shot01/work/lighting/petr_test_shot01_lighting_v001.aep', - 'comment': '', - 'machine': 'LAPTOP-UB778LHG', - 'fps': 25.0, - 'intent': '-', - 'frameStart': 0, - 'frameEnd': 0, - 'handleEnd': 0, - 'handleStart': 0, - 'inputLinks': [ - OrderedDict( - [ - ( - 'type', - 'generative' - ), - ( - 'id', - ObjectId( - '600ab849c411725a626b8c35' - )), - ( - 'linkedBy', - 'publish' - ) - ] - ) - ] - } - }, - 'transfers': [ - [ - 'C:\\Users\\petrk\\AppData\\Local\\Temp\\tmpwyhr_ecr\\ renderLightingDefault_h264burnin.mp4', - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' - ] - ], - 'destination_list': [ - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' - ], - 'published_representations': { - ObjectId( - '61d723a371e6fce378bd428e' - ): { - 'representation': { - '_id': ObjectId( - '61d723a371e6fce378bd428e' - ), - 'schema': 'openpype:representation-2.0', - 'type': 'representation', - 'parent': ObjectId( - '61d723a371e6fce378bd428d' - ), - 'name': 'mov', - 'data': { - 'path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', - 'template': '{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{ext}' - }, - 'dependencies': [], - 'context': { - 'root': { - 'work': 'C:/projects' - }, - 'project': { - 'name': 'petr_test', - 'code': 'petr_test' - }, - 'hierarchy': 'sequences/seq01', - 'asset': 'shot01', - 'family': 'render', - 'subset': 'renderLightingDefault', - 'version': 1, - 'ext': 'mov', - 'task': { - 'name': 'lighting', - 'type': 'Lighting', - 'short': 'lgt' - }, - 'representation': 'mov', - 'username': 'petrk' - }, - 'files': [ - { - '_id': ObjectId( - '61d723a371e6fce378bd4291' - ), - 'path': '{root[work]}/petr_test/sequences/seq01/shot01/publish/render/renderLightingDefault/v001/petr_test_shot01_renderLightingDefault_v001.mov', - 'size': 1654788, - 'hash': 'petr_test_shot01_renderLightingDefault_v001,mov|1641489300,6230524|1654788', - 'sites': [ - { - 'name': 'studio', - 'created_dt': datetime.datetime(2022, - 1, - 6, - 18, - 15, - 15, - 264448) - } - ] - } - ] - }, - 'anatomy_data': { - 'project': { - 'name': 'petr_test', - 'code': 'petr_test' - }, - 'asset': 'shot01', - 'parent': 'seq01', - 'hierarchy': 'sequences/seq01', - 'task': { - 'name': 'lighting', - 'type': 'Lighting', - 'short': 'lgt' - }, - 'username': 'petrk', - 'app': 'aftereffects', - 'd': '6', - 'dd': '06', - 'ddd': 'Thu', - 'dddd': 'Thursday', - 'm': '1', - 'mm': '01', - 'mmm': 'Jan', - 'mmmm': 'January', - 'yy': '22', - 'yyyy': '2022', - 'H': '18', - 'HH': '18', - 'h': '6', - 'hh': '06', - 'ht': 'PM', - 'M': '14', - 'MM': '14', - 'S': '23', - 'SS': '23', - 'version': 1, - 'subset': 'renderLightingDefault', - 'family': 'render', - 'intent': '-', - 'representation': 'mov', - 'ext': 'mov' - }, - 'published_files': [ - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov' - ] - }, - ObjectId( - '61d723a371e6fce378bd4292' - ): { - 'representation': { - '_id': ObjectId( - '61d723a371e6fce378bd4292' - ), - 'schema': 'openpype:representation-2.0', - 'type': 'representation', - 'parent': ObjectId( - '61d723a371e6fce378bd428d' - ), - 'name': 'thumbnail', - 'data': { - 'path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', - 'template': '{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{ext}' - }, - 'dependencies': [], - 'context': { - 'root': { - 'work': 'C:/projects' - }, - 'project': { - 'name': 'petr_test', - 'code': 'petr_test' - }, - 'hierarchy': 'sequences/seq01', - 'asset': 'shot01', - 'family': 'render', - 'subset': 'renderLightingDefault', - 'version': 1, - 'ext': 'jpg', - 'task': { - 'name': 'lighting', - 'type': 'Lighting', - 'short': 'lgt' - }, - 'representation': 'jpg', - 'username': 'petrk' - }, - 'files': [ - { - '_id': ObjectId( - '61d723a371e6fce378bd4295' - ), - 'path': '{root[work]}/petr_test/sequences/seq01/shot01/publish/render/renderLightingDefault/v001/petr_test_shot01_renderLightingDefault_v001.jpg', - 'size': 871, - 'hash': 'petr_test_shot01_renderLightingDefault_v001,jpg|1641489301,1720147|871', - 'sites': [ - { - 'name': 'studio', - 'created_dt': datetime.datetime(2022, - 1, - 6, - 18, - 15, - 15, - 825446) - } - ] - } - ] - }, - 'anatomy_data': { - 'project': { - 'name': 'petr_test', - 'code': 'petr_test' - }, - 'asset': 'shot01', - 'parent': 'seq01', - 'hierarchy': 'sequences/seq01', - 'task': { - 'name': 'lighting', - 'type': 'Lighting', - 'short': 'lgt' - }, - 'username': 'petrk', - 'app': 'aftereffects', - 'd': '6', - 'dd': '06', - 'ddd': 'Thu', - 'dddd': 'Thursday', - 'm': '1', - 'mm': '01', - 'mmm': 'Jan', - 'mmmm': 'January', - 'yy': '22', - 'yyyy': '2022', - 'H': '18', - 'HH': '18', - 'h': '6', - 'hh': '06', - 'ht': 'PM', - 'M': '14', - 'MM': '14', - 'S': '23', - 'SS': '23', - 'version': 1, - 'subset': 'renderLightingDefault', - 'family': 'render', - 'intent': '-', - 'representation': 'jpg', - 'ext': 'jpg' - }, - 'published_files': [ - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg' - ] - }, - ObjectId( - '61d723a471e6fce378bd4296' - ): { - 'representation': { - '_id': ObjectId( - '61d723a471e6fce378bd4296' - ), - 'schema': 'openpype:representation-2.0', - 'type': 'representation', - 'parent': ObjectId( - '61d723a371e6fce378bd428d' - ), - 'name': 'h264_mp4', - 'data': { - 'path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', - 'template': '{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{ext}' - }, - 'dependencies': [], - 'context': { - 'root': { - 'work': 'C:/projects' - }, - 'project': { - 'name': 'petr_test', - 'code': 'petr_test' - }, - 'hierarchy': 'sequences/seq01', - 'asset': 'shot01', - 'family': 'render', - 'subset': 'renderLightingDefault', - 'version': 1, - 'output': 'h264', - 'ext': 'mp4', - 'task': { - 'name': 'lighting', - 'type': 'Lighting', - 'short': 'lgt' - }, - 'representation': 'mp4', - 'username': 'petrk' - }, - 'files': [ - { - '_id': ObjectId( - '61d723a471e6fce378bd4299' - ), - 'path': '{root[work]}/petr_test/sequences/seq01/shot01/publish/render/renderLightingDefault/v001/petr_test_shot01_renderLightingDefault_v001_h264.mp4', - 'size': 10227, - 'hash': 'petr_test_shot01_renderLightingDefault_v001_h264,mp4|1641489313,659368|10227', - 'sites': [ - { - 'name': 'studio', - 'created_dt': datetime.datetime(2022, - 1, - 6, - 18, - 15, - 16, - 53445) - } - ] - } - ] - }, - 'anatomy_data': { - 'project': { - 'name': 'petr_test', - 'code': 'petr_test' - }, - 'asset': 'shot01', - 'parent': 'seq01', - 'hierarchy': 'sequences/seq01', - 'task': { - 'name': 'lighting', - 'type': 'Lighting', - 'short': 'lgt' - }, - 'username': 'petrk', - 'app': 'aftereffects', - 'd': '6', - 'dd': '06', - 'ddd': 'Thu', - 'dddd': 'Thursday', - 'm': '1', - 'mm': '01', - 'mmm': 'Jan', - 'mmmm': 'January', - 'yy': '22', - 'yyyy': '2022', - 'H': '18', - 'HH': '18', - 'h': '6', - 'hh': '06', - 'ht': 'PM', - 'M': '14', - 'MM': '14', - 'S': '23', - 'SS': '23', - 'version': 1, - 'subset': 'renderLightingDefault', - 'family': 'render', - 'intent': '-', - 'resolution_width': 1920, - 'resolution_height': 1080, - 'fps': 25, - 'output': 'h264', - 'representation': 'mp4', - 'ext': 'mp4' - }, - 'published_files': [ - 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4' - ] - } - }, - 'ftrackComponentsList': [ - { - 'assettype_data': { - 'short': 'render' - }, - 'asset_data': { - 'name': 'renderLightingDefault' - }, - 'assetversion_data': { - 'version': 1 - }, - 'component_overwrite': False, - 'thumbnail': True, - 'component_data': { - 'name': 'thumbnail' - }, - 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', - 'component_location': , - 'component': - }, - { - 'assettype_data': { - 'short': 'render' - }, - 'asset_data': { - 'name': 'renderLightingDefault' - }, - 'assetversion_data': { - 'version': 1 - }, - 'component_overwrite': False, - 'thumbnail': False, - 'component_data': { - 'name': 'ftrackreview-mp4', - 'metadata': { - 'ftr_meta': '{"frameIn": 0, "frameOut": 1, "frameRate": 25.0}' - } - }, - 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', - 'component_location': , - 'component': - }, - { - 'assettype_data': { - 'short': 'render' - }, - 'asset_data': { - 'name': 'renderLightingDefault' - }, - 'assetversion_data': { - 'version': 1 - }, - 'component_overwrite': False, - 'thumbnail': False, - 'component_data': { - 'name': 'thumbnail_src' - }, - 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.jpg', - 'component_location': , - 'component': - }, - { - 'assettype_data': { - 'short': 'render' - }, - 'asset_data': { - 'name': 'renderLightingDefault' - }, - 'assetversion_data': { - 'version': 1 - }, - 'component_overwrite': False, - 'thumbnail': False, - 'component_data': { - 'name': 'ftrackreview-mp4_src', - 'metadata': { - 'ftr_meta': '{"frameIn": 0, "frameOut": 1, "frameRate": 25.0}' - } - }, - 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001_h264.mp4', - 'component_location': , - 'component': - }, - { - 'assettype_data': { - 'short': 'render' - }, - 'asset_data': { - 'name': 'renderLightingDefault' - }, - 'assetversion_data': { - 'version': 1 - }, - 'component_overwrite': False, - 'thumbnail': False, - 'component_data': { - 'name': 'mov' - }, - 'component_path': 'C:\\projects\\petr_test\\sequences\\seq01\\shot01\\publish\\render\\renderLightingDefault\\v001\\petr_test_shot01_renderLightingDefault_v001.mov', - 'component_location': , - 'component': - } - ], - 'ftrackIntegratedAssetVersions': [ - - ] -} \ No newline at end of file diff --git a/vendor/response.json b/vendor/response.json deleted file mode 100644 index 26a4fae2fd..0000000000 --- a/vendor/response.json +++ /dev/null @@ -1 +0,0 @@ -{status: 200, headers: {'date': 'Tue, 11 Jan 2022 11:08:57 GMT', 'server': 'Apache', 'x-powered-by': 'HHVM/4.128.0', 'access-control-allow-origin': '*', 'referrer-policy': 'no-referrer', 'x-slack-backend': 'r', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'access-control-allow-headers': 'slack-route, x-slack-version-ts, x-b3-traceid, x-b3-spanid, x-b3-parentspanid, x-b3-sampled, x-b3-flags', 'access-control-expose-headers': 'x-slack-req-id, retry-after', 'x-oauth-scopes': 'chat:write,chat:write.public,files:write,chat:write.customize', 'x-accepted-oauth-scopes': 'chat:write', 'expires': 'Mon, 26 Jul 1997 05:00:00 GMT', 'cache-control': 'private, no-cache, no-store, must-revalidate', 'pragma': 'no-cache', 'x-xss-protection': '0', 'x-content-type-options': 'nosniff', 'x-slack-req-id': '9d1d11399a44c8751f89bb4dcd2b91fb', 'vary': 'Accept-Encoding', 'content-type': 'application/json; charset=utf-8', 'x-envoy-upstream-service-time': '52', 'x-backend': 'main_normal main_bedrock_normal_with_overflow main_canary_with_overflow main_bedrock_canary_with_overflow main_control_with_overflow main_bedrock_control_with_overflow', 'x-server': 'slack-www-hhvm-main-iad-qno3', 'x-slack-shared-secret-outcome': 'no-match', 'via': 'envoy-www-iad-omsy, envoy-edge-iad-bgfx', 'x-edge-backend': 'envoy-www', 'x-slack-edge-shared-secret-outcome': 'no-match', 'connection': 'close', 'transfer-encoding': 'chunked'}, body: {"ok":true,"channel":"C024DUFM8MB","ts":"1641899337.001100","message":{"type":"message","subtype":"bot_message","text":"RenderCompositingDefault published for Jungle\n\nHere should be link to review C:\\projects\\petr_test\\assets\\locations\\Jungle\\publish\\render\\renderCompositingDefault\\v253\\petr_test_Jungle_renderCompositingDefault_v253_h264.mp4\n\n Attachment links: \n\n","ts":"1641899337.001100","username":"OpenPypeNotifier","icons":{"image_48":"https:\/\/s3-us-west-2.amazonaws.com\/slack-files2\/bot_icons\/2022-01-07\/2934353684385_48.png"},"bot_id":"B024H0P0CAE"}} \ No newline at end of file diff --git a/vendor/temp.json b/vendor/temp.json deleted file mode 100644 index 089174d26c..0000000000 --- a/vendor/temp.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - project(name: "demo_Big_Episodic") { - representations( - first: 0, - after: 0, - localSite: "local", - remoteSite: "local" - ) { - edges { - node { - id - name - # Sorry: totalSize is not implemented, but it will be - # totalSize - fileCount - # overal sync state - localState{ - status - size - timestamp - } - remoteState{ - status - size - timestamp - } - # crawl to the top to get parent info - version { - version - subset { - family - name - folder { - name - } - } - } - } - } - pageInfo { - hasNextPage - endCursor - } - } - } -} \ No newline at end of file From e0f46635f5045ead05226504ed48b9f972145060 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 17:32:18 +0200 Subject: [PATCH 1491/2550] OP-4181 - clean up after review comments --- openpype/plugins/publish/integrate_hero_version.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index adc629352e..661975993b 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -14,7 +14,6 @@ from openpype.client import ( ) from openpype.client.operations import ( OperationsSession, - _create_or_convert_to_mongo_id, new_hero_version_doc, prepare_hero_version_update_data, prepare_representation_update_data, @@ -193,9 +192,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): op_session = OperationsSession() + entity_id = None + if old_version: + entity_id = old_version["_id"] new_hero_version = new_hero_version_doc( src_version_entity["_id"], - src_version_entity["parent"] + src_version_entity["parent"], + entity_id=entity_id ) if old_version: @@ -408,7 +411,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # Create representation else: - repre["_id"] = _create_or_convert_to_mongo_id(None) + repre.pop("_id", None) op_session.create_entity(project_name, "representation", repre) From f6f5e77f3c4f8b384feba08cfb5713829192e532 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 17:34:46 +0200 Subject: [PATCH 1492/2550] OP-4181 - clean up after review comments - missed line --- openpype/plugins/publish/integrate_hero_version.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 661975993b..84960ec609 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -443,8 +443,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repre["_id"], changes) else: - repre["old_id"] = repre["_id"] - repre["_id"] = _create_or_convert_to_mongo_id(None) + repre["old_id"] = repre.pop("_id") repre["type"] = "archived_representation" op_session.create_entity(project_name, "representation", repre) From b4644457501ee22d1f37ea0217835662365b19f3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 17:38:40 +0200 Subject: [PATCH 1493/2550] OP-4181 - fix - wrong entity type --- openpype/plugins/publish/integrate_hero_version.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 84960ec609..6d553a7a3c 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -445,7 +445,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): else: repre["old_id"] = repre.pop("_id") repre["type"] = "archived_representation" - op_session.create_entity(project_name, "representation", + op_session.create_entity(project_name, + "archived_representation", repre) op_session.commit() From b198e922b250fbfe5957be7b6e7c7d3b95a5517d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 6 Oct 2022 17:57:24 +0200 Subject: [PATCH 1494/2550] OP-4181 - fix - wrong entity type --- openpype/plugins/publish/integrate_hero_version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 6d553a7a3c..398a0226df 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -389,7 +389,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): old_repre, repre) op_session.update_entity( project_name, - "representation", + old_repre["type"], old_repre["_id"], update_data ) @@ -404,7 +404,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): archived_repre, repre) op_session.update_entity( project_name, - "representation", + old_repre["type"], archived_repre["_id"], update_data ) From 5b75511a6064c14c7532edddf50266d638616526 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 6 Oct 2022 18:53:24 +0200 Subject: [PATCH 1495/2550] traypublisher has it's controller --- openpype/tools/traypublisher/window.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index b1ff3c7383..be9f12e269 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -15,6 +15,7 @@ import appdirs from openpype.lib import JSONSettingRegistry from openpype.pipeline import install_host from openpype.hosts.traypublisher.api import TrayPublisherHost +from openpype.tools.publisher.control_qt import QtPublisherController from openpype.tools.publisher.window import PublisherWindow from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.constants import PROJECT_NAME_ROLE @@ -24,6 +25,12 @@ from openpype.tools.utils.models import ( ) +class TrayPublisherController(QtPublisherController): + @property + def host(self): + return self._host + + class TrayPublisherRegistry(JSONSettingRegistry): """Class handling OpenPype general settings registry. @@ -179,7 +186,10 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): class TrayPublishWindow(PublisherWindow): def __init__(self, *args, **kwargs): - super(TrayPublishWindow, self).__init__(reset_on_show=False) + controller = TrayPublisherController() + super(TrayPublishWindow, self).__init__( + controller=controller, reset_on_show=False + ) flags = self.windowFlags() # Disable always on top hint From 054b87bd687d41f2bc9e7fa7387c389c10da3112 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:20:10 +0200 Subject: [PATCH 1496/2550] fix event system access in qt controller --- openpype/tools/publisher/control_qt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index add7c4c7e4..8515a7a843 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -67,10 +67,10 @@ class QtPublisherController(PublisherController): super(QtPublisherController, self).__init__(*args, **kwargs) - self._event_system.add_callback( + self.event_system.add_callback( "publish.process.started", self._qt_on_publish_start ) - self._event_system.add_callback( + self.event_system.add_callback( "publish.process.stopped", self._qt_on_publish_stop ) From f13d2bc9653726dce4c3db4375b9c288d7e79bb6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:20:46 +0200 Subject: [PATCH 1497/2550] implemented helper publish plugins proxy to handle actions for plugins --- openpype/tools/publisher/control.py | 89 +++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 57098f8734..c084cba381 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -288,6 +288,95 @@ class PublishReport: return output +class PublishPluginsProxy: + """Wrapper around publish plugin. + + Prepare mapping for publish plugins and actions. Also can create + serializable data for plugin actions so UI don't have to have access to + them. + + This object is created in process where publishing is actually running. + + Notes: + Actions have id but single action can be used on multiple plugins so + to run an action is needed combination of plugin and action. + + Args: + plugins [List[pyblish.api.Plugin]]: Discovered plugins that will be + processed. + """ + + def __init__(self, plugins): + plugins_by_id = {} + actions_by_id = {} + action_ids_by_plugin_id = {} + for plugin in plugins: + plugin_id = plugin.id + plugins_by_id[plugin_id] = plugin + + action_ids = set() + action_ids_by_plugin_id[plugin_id] = action_ids + + actions = getattr(plugin, "actions", None) or [] + for action in actions: + action_id = action.id + action_ids.add(action_id) + actions_by_id[action_id] = action + + self._plugins_by_id = plugins_by_id + self._actions_by_id = actions_by_id + self._action_ids_by_plugin_id = action_ids_by_plugin_id + + def get_action(self, action_id): + return self._actions_by_id[action_id] + + def get_plugin(self, plugin_id): + return self._plugins_by_id[plugin_id] + + def get_plugin_id(self, plugin): + """Get id of plugin based on plugin object. + + It's used for validation errors report. + + Args: + plugin (pyblish.api.Plugin): Publish plugin for which id should be + returned. + + Returns: + str: Plugin id. + """ + + return plugin.id + + def get_plugin_action_items(self, plugin_id): + """Get plugin action items for plugin by it's id. + + Args: + plugin_id (str): Publish plugin id. + + Returns: + List[PublishPluginActionItem]: Items with information about publish + plugin actions. + """ + + return [ + self._create_action_item(self._actions_by_id[action_id], plugin_id) + for action_id in self._action_ids_by_plugin_id[plugin_id] + ] + + def _create_action_item(self, action, plugin_id): + label = action.label or action.__name__ + icon = getattr(action, "icon", None) + return PublishPluginActionItem( + action.id, + plugin_id, + action.active, + action.on, + label, + icon + ) + + @six.add_metaclass(ABCMeta) From a3d16def9b42328cfe89a03dbed9dc8ea1a51f9b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:23:01 +0200 Subject: [PATCH 1498/2550] created objects for controller <-> ui communiction related to plugin actions and validation errors --- openpype/tools/publisher/control.py | 236 ++++++++++++++++++++++++++++ 1 file changed, 236 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index c084cba381..484d90fc16 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -377,6 +377,242 @@ class PublishPluginsProxy: ) +class PublishPluginActionItem: + """Representation of publish plugin action. + + Data driven object which is used as proxy for controller and UI. + + Args: + action_id (str): Action id. + plugin_id (str): Plugin id. + active (bool): Action is active. + on_filter (str): Actions have 'on' attribte which define when can be + action triggered (e.g. 'all', 'failed', ...). + label (str): Action's label. + icon (Union[str, None]) Action's icon. + """ + + def __init__(self, action_id, plugin_id, active, on_filter, label, icon): + self.action_id = action_id + self.plugin_id = plugin_id + self.active = active + self.on_filter = on_filter + self.label = label + self.icon = icon + + def to_data(self): + """Serialize object to dictionary. + + Returns: + Dict[str, Union[str,bool,None]]: Serialized object. + """ + + return { + "action_id": self.action_id, + "plugin_id": self.plugin_id, + "active": self.active, + "on_filter": self.on_filter, + "label": self.label, + "icon": self.icon + } + + @classmethod + def from_data(cls, data): + """Create object from data. + + Args: + data (Dict[str, Union[str,bool,None]]): Data used to recreate + object. + + Returns: + PublishPluginActionItem: Object created using data. + """ + + return cls(**data) + + +class ValidationErrorItem: + """Data driven validation error item. + + Prepared data container with information about validation error and it's + source plugin. + + Can be converted to raw data and recreated should be used for controller + and UI connection. + + Args: + instance_id (str): Id of pyblish instance to which is validation error + connected. + instance_label (str): Prepared instance label. + plugin_id (str): Id of pyblish Plugin which triggered the validation + error. Id is generated using 'PublishPluginsProxy'. + """ + + def __init__( + self, + instance_id, + instance_label, + plugin_id, + context_validation, + title, + description, + detail, + ): + self.instance_id = instance_id + self.instance_label = instance_label + self.plugin_id = plugin_id + self.context_validation = context_validation + self.title = title + self.description = description + self.detail = detail + + def to_data(self): + """Serialize object to dictionary. + + Returns: + Dict[str, Union[str, bool, None]]: Serialized object data. + """ + + return { + "instance_id": self.instance_id, + "instance_label": self.instance_label, + "plugin_id": self.plugin_id, + "context_validation": self.context_validation, + "title": self.title, + "description": self.description, + "detail": self.detail, + } + + @classmethod + def from_result(cls, plugin_id, error, instance): + """Create new object based on resukt from controller. + + Returns: + ValidationErrorItem: New object with filled data. + """ + + instance_label = None + instance_id = None + if instance is not None: + instance_label = ( + instance.data.get("label") or instance.data.get("name") + ) + instance_id = instance.id + + return cls( + instance_id, + instance_label, + plugin_id, + instance is None, + error.title, + error.description, + error.detail, + ) + + @classmethod + def from_data(cls, data): + return cls(**data) + + +class PublishValidationErrorsReport: + """Publish validation errors report that can be parsed to raw data. + + Args: + error_items (List[ValidationErrorItem]): List of validation errors. + plugin_action_items (Dict[str, PublishPluginActionItem]): Action items + by plugin id. + """ + + def __init__(self, error_items, plugin_action_items): + self._error_items = error_items + self._plugin_action_items = plugin_action_items + + def __iter__(self): + for item in self._error_items: + yield item + + def group_items_by_title(self): + """Group errors by plugin and their titles. + + Items are grouped by plugin and title -> same title from different + plugin is different item. Items are ordered by plugin order. + + Returns: + List[Dict[str, Any]]: List where each item title, instance + information related to title and possible plugin actions. + """ + + ordered_plugin_ids = [] + error_items_by_plugin_id = collections.defaultdict(list) + for error_item in self._error_items: + plugin_id = error_item.plugin_id + if plugin_id not in ordered_plugin_ids: + ordered_plugin_ids.append(plugin_id) + error_items_by_plugin_id[plugin_id].append(error_item) + + grouped_error_items = [] + for plugin_id in ordered_plugin_ids: + plugin_action_items = self._plugin_action_items[plugin_id] + error_items = error_items_by_plugin_id[plugin_id] + + titles = [] + error_items_by_title = collections.defaultdict(list) + for error_item in error_items: + title = error_item.title + if title not in titles: + titles.append(error_item.title) + error_items_by_title[title].append(error_item) + + for title in titles: + grouped_error_items.append({ + "plugin_action_items": list(plugin_action_items), + "error_items": error_items_by_title[title], + "title": title + }) + return grouped_error_items + + def to_data(self): + """Serialize object to dictionary. + + Returns: + Dict[str, Any]: Serialized data. + """ + + return { + "error_items": [ + item.to_data() + for item in self._error_items + ], + "plugin_action_items": { + plugin_id: [ + action_item.to_data() + for action_item in action_items + ] + for plugin_id, action_items in self._plugin_action_items.items() + } + } + + @classmethod + def from_data(cls, data): + """Recreate object from data. + + Args: + data (dict[str, Any]): Data to recreate object. Can be created + using 'to_data' method. + + Returns: + PublishValidationErrorsReport: New object based on data. + """ + + error_items = [ + ValidationErrorItem.from_data(error_item) + for error_item in data["error_items"] + ] + plugin_action_items = [ + PublishPluginActionItem.from_data(action_item) + for action_item in data["plugin_action_items"] + ] + return cls(error_items, plugin_action_items) @six.add_metaclass(ABCMeta) From a63854f2656a04e4f1643112e86e2d7a48dcc657 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:24:54 +0200 Subject: [PATCH 1499/2550] Created object to gather validation errors during publish processing --- openpype/tools/publisher/control.py | 65 +++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 484d90fc16..c28d7ab3c9 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -615,6 +615,71 @@ class PublishValidationErrorsReport: return cls(error_items, plugin_action_items) +class PublishValidationErrors: + """Object to keep track about validation errors by plugin.""" + + def __init__(self): + self._plugins_proxy = None + self._error_items = [] + self._plugin_action_items = {} + + def __bool__(self): + return self.has_errors + + @property + def has_errors(self): + """At least one error was added.""" + + return bool(self._error_items) + + def reset(self, plugins_proxy): + """Reset object to default state. + + Args: + plugins_proxy (PublishPluginsProxy): Proxy which store plugins, + actions by ids and create mapping of action ids by plugin ids. + """ + + self._plugins_proxy = plugins_proxy + self._error_items = [] + self._plugin_action_items = {} + + def create_report(self): + """Create report based on currently existing errors. + + Returns: + PublishValidationErrorsReport: Validation error report with all + error information and publish plugin action items. + """ + + return PublishValidationErrorsReport( + self._error_items, self._plugin_action_items + ) + + def add_error(self, plugin, error, instance): + """Add error from pyblish result. + + Args: + plugin (pyblish.api.Plugin): Plugin which triggered error. + error (ValidationException): Validation error. + instance (Union[pyblish.api.Instance, None]): Instance on which was + error raised or None if was raised on context. + """ + + # Make sure the cached report is cleared + plugin_id = self._plugins_proxy.get_plugin_id(plugin) + self._error_items.append( + ValidationErrorItem.from_result(plugin_id, error, instance) + ) + if plugin_id in self._plugin_action_items: + return + + plugin_actions = self._plugins_proxy.get_plugin_action_items( + plugin_id + ) + self._plugin_action_items[plugin_id] = plugin_actions + + @six.add_metaclass(ABCMeta) class AbstractPublisherController(object): """Publisher tool controller. From d90838b630a47b6055600b2b4175e14b6cba62f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:25:41 +0200 Subject: [PATCH 1500/2550] removed unused 'get_manual_creators_base_info' --- openpype/tools/publisher/control.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index c28d7ab3c9..bd4b6a738e 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -772,16 +772,6 @@ class AbstractPublisherController(object): pass - @abstractmethod - def get_manual_creators_base_info(self): - """Creators that can be selected and triggered by artist. - - Returns: - List[CreatorBaseInfo]: Base information about creator plugin. - """ - - pass - @abstractmethod def get_context_title(self): """Get context title for artist shown at the top of main window. From e53efc7aba85bbbe12d46a3c35351a2f16bf6d59 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:26:38 +0200 Subject: [PATCH 1501/2550] create plugins proxy in controller --- openpype/tools/publisher/control.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index bd4b6a738e..4fbf20492d 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -920,6 +920,8 @@ class PublisherController(AbstractPublisherController): self._host, dbcon, headless=headless, reset=False ) + self._publish_plugins_proxy = None + # pyblish.api.Context self._publish_context = None # Pyblish report @@ -1290,6 +1292,10 @@ class PublisherController(AbstractPublisherController): # - pop the key after first collector using it would be safest option? self._publish_context.data["create_context"] = self._create_context + self._publish_plugins_proxy = PublishPluginsProxy( + self._publish_plugins + ) + self._publish_report.reset(self._publish_context, self._create_context) self._publish_validation_errors = [] self._publish_current_plugin_validation_errors = None From 8996e27df1a4fbf92973c58973cc9881cc81e655 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:27:51 +0200 Subject: [PATCH 1502/2550] changed how validation errors are collected and worked with in UI --- openpype/tools/publisher/control.py | 27 +-- .../publisher/widgets/validations_widget.py | 192 ++++++++++-------- 2 files changed, 119 insertions(+), 100 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 4fbf20492d..3df8da62cb 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -927,9 +927,7 @@ class PublisherController(AbstractPublisherController): # Pyblish report self._publish_report = PublishReport(self) # Store exceptions of validation error - self._publish_validation_errors = [] - # Currently processing plugin errors - self._publish_current_plugin_validation_errors = None + self._publish_validation_errors = PublishValidationErrors() # Any other exception that happened during publishing self._publish_error = None # Publishing is in progress @@ -1273,7 +1271,7 @@ class PublisherController(AbstractPublisherController): return self._publish_report.get_report(self._publish_plugins) def get_validation_errors(self): - return self._publish_validation_errors + return self._publish_validation_errors.create_report() def _reset_publish(self): self._publish_is_running = False @@ -1297,8 +1295,7 @@ class PublisherController(AbstractPublisherController): ) self._publish_report.reset(self._publish_context, self._create_context) - self._publish_validation_errors = [] - self._publish_current_plugin_validation_errors = None + self._publish_validation_errors.reset(self._publish_plugins_proxy) self._publish_error = None self._publish_max_progress = len(self._publish_plugins) @@ -1488,19 +1485,11 @@ class PublisherController(AbstractPublisherController): yield MainThreadItem(self.stop_publish) def _add_validation_error(self, result): - if self._publish_current_plugin_validation_errors is None: - self._publish_current_plugin_validation_errors = { - "plugin": result["plugin"], - "errors": [] - } - self._publish_validation_errors.append( - self._publish_current_plugin_validation_errors - ) - - self._publish_current_plugin_validation_errors["errors"].append({ - "exception": result["error"], - "instance": result["instance"] - }) + self._publish_validation_errors.add_error( + result["plugin"], + result["error"], + result["instance"] + ) def _process_and_continue(self, plugin, instance): result = pyblish.plugin.process( diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index fd9410df98..48b7370eee 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -50,6 +50,7 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): Has toggle button to show/hide instances on which validation error happened if there is a list (Valdation error may happen on context). """ + selected = QtCore.Signal(int) instance_changed = QtCore.Signal(int) @@ -75,33 +76,33 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): title_frame_layout.addWidget(toggle_instance_btn, 0) instances_model = QtGui.QStandardItemModel() - error_info = error_info["error_info"] help_text_by_instance_id = {} - context_validation = False - if ( - not error_info - or (len(error_info) == 1 and error_info[0][0] is None) - ): - context_validation = True - toggle_instance_btn.setArrowType(QtCore.Qt.NoArrow) - description = self._prepare_description(error_info[0][1]) - help_text_by_instance_id[None] = description - else: - items = [] - for instance, exception in error_info: - label = instance.data.get("label") or instance.data.get("name") - item = QtGui.QStandardItem(label) - item.setFlags( - QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable - ) - item.setData(label, QtCore.Qt.ToolTipRole) - item.setData(instance.id, INSTANCE_ID_ROLE) - items.append(item) - description = self._prepare_description(exception) - help_text_by_instance_id[instance.id] = description - instances_model.invisibleRootItem().appendRows(items) + items = [] + context_validation = False + for error_item in error_info["error_items"]: + context_validation = error_item.context_validation + if context_validation: + toggle_instance_btn.setArrowType(QtCore.Qt.NoArrow) + description = self._prepare_description(error_item) + help_text_by_instance_id[None] = description + continue + + label = error_item.instance_label + item = QtGui.QStandardItem(label) + item.setFlags( + QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable + ) + item.setData(label, QtCore.Qt.ToolTipRole) + item.setData(error_item.instance_id, INSTANCE_ID_ROLE) + items.append(item) + description = self._prepare_description(error_item) + help_text_by_instance_id[error_item.instance_id] = description + + if items: + root_item = instances_model.invisibleRootItem() + root_item.appendRows(items) instances_view = ValidationErrorInstanceList(self) instances_view.setModel(instances_model) @@ -162,9 +163,19 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): def minimumSizeHint(self): return self.sizeHint() - def _prepare_description(self, exception): - dsc = exception.description - detail = exception.detail + def _prepare_description(self, error_item): + """Prepare description text for detail intput. + + Args: + error_item (ValidationErrorItem): Item which hold information about + validation error. + + Returns: + str: Prepared detailed description. + """ + + dsc = error_item.description + detail = error_item.detail if detail: dsc += "

    {}".format(detail) @@ -192,32 +203,51 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): @property def is_selected(self): - """Is widget marked a selected""" + """Is widget marked a selected. + + Returns: + bool: Item is selected or not. + """ + return self._selected @property def index(self): - """Widget's index set by parent.""" + """Widget's index set by parent. + + Returns: + int: Index of widget. + """ + return self._index def set_index(self, index): - """Set index of widget (called by parent).""" + """Set index of widget (called by parent). + + Args: + int: New index of widget. + """ + self._index = index def _change_style_property(self, selected): """Change style of widget based on selection.""" + value = "1" if selected else "" self._title_frame.setProperty("selected", value) self._title_frame.style().polish(self._title_frame) def set_selected(self, selected=None): """Change selected state of widget.""" + if selected is None: selected = not self._selected + # Clear instance view selection on deselect if not selected: self._instances_view.clearSelection() + # Skip if has same value if selected == self._selected: return @@ -255,18 +285,23 @@ class ActionButton(BaseClickableFrame): """Plugin's action callback button. Action may have label or icon or both. - """ - action_clicked = QtCore.Signal(str) - def __init__(self, action, parent): + Args: + plugin_action_item (PublishPluginActionItem): Action item that can be + triggered by it's id. + """ + + action_clicked = QtCore.Signal(str, str) + + def __init__(self, plugin_action_item, parent): super(ActionButton, self).__init__(parent) self.setObjectName("ValidationActionButton") - self.action = action + self.plugin_action_item = plugin_action_item - action_label = action.label or action.__name__ - action_icon = getattr(action, "icon", None) + action_label = plugin_action_item.label + action_icon = plugin_action_item.icon label_widget = QtWidgets.QLabel(action_label, self) icon_label = None if action_icon: @@ -284,7 +319,10 @@ class ActionButton(BaseClickableFrame): ) def _mouse_release_callback(self): - self.action_clicked.emit(self.action.id) + self.action_clicked.emit( + self.plugin_action_item.plugin_id, + self.plugin_action_item.action_id + ) class ValidateActionsWidget(QtWidgets.QFrame): @@ -292,6 +330,7 @@ class ValidateActionsWidget(QtWidgets.QFrame): Change actions based on selected validation error. """ + def __init__(self, controller, parent): super(ValidateActionsWidget, self).__init__(parent) @@ -304,10 +343,9 @@ class ValidateActionsWidget(QtWidgets.QFrame): layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(content_widget) - self.controller = controller + self._controller = controller self._content_widget = content_widget self._content_layout = content_layout - self._plugin = None self._actions_mapping = {} def clear(self): @@ -320,28 +358,34 @@ class ValidateActionsWidget(QtWidgets.QFrame): widget.deleteLater() self._actions_mapping = {} - def set_plugin(self, plugin): + def set_error_item(self, error_item): """Set selected plugin and show it's actions. Clears current actions from widget and recreate them from the plugin. + + Args: + Dict[str, Any]: Object holding error items, title and possible + actions to run. """ + self.clear() - self._plugin = plugin - if not plugin: + + if not error_item: self.setVisible(False) return - actions = getattr(plugin, "actions", []) - for action in actions: - if not action.active: + plugin_action_items = error_item["plugin_action_items"] + for plugin_action_item in plugin_action_items: + if not plugin_action_item.active: continue - if action.on not in ("failed", "all"): + if plugin_action_item.on_filter not in ("failed", "all"): continue - self._actions_mapping[action.id] = action + action_id = plugin_action_item.action_id + self._actions_mapping[action_id] = plugin_action_item - action_btn = ActionButton(action, self._content_widget) + action_btn = ActionButton(plugin_action_item, self._content_widget) action_btn.action_clicked.connect(self._on_action_click) self._content_layout.addWidget(action_btn) @@ -351,9 +395,8 @@ class ValidateActionsWidget(QtWidgets.QFrame): else: self.setVisible(False) - def _on_action_click(self, action_id): - action = self._actions_mapping[action_id] - self.controller.run_action(self._plugin, action) + def _on_action_click(self, plugin_id, action_id): + self._controller.run_action(plugin_id, action_id) class VerticallScrollArea(QtWidgets.QScrollArea): @@ -365,6 +408,7 @@ class VerticallScrollArea(QtWidgets.QScrollArea): Resize if deferred by 100ms because at the moment of resize are not yet propagated sizes and visibility of scroll bars. """ + def __init__(self, *args, **kwargs): super(VerticallScrollArea, self).__init__(*args, **kwargs) @@ -576,45 +620,31 @@ class ValidationsWidget(QtWidgets.QFrame): self._errors_widget.setVisible(False) self._actions_widget.setVisible(False) - def set_errors(self, errors): - """Set errors into context and created titles.""" + def _set_errors(self, validation_error_report): + """Set errors into context and created titles. + + Args: + validation_error_report (PublishValidationErrorsReport): Report + with information about validation errors and publish plugin + actions. + """ + self.clear() - if not errors: + if not validation_error_report: return self._top_label.setVisible(True) self._error_details_frame.setVisible(True) self._errors_widget.setVisible(True) - errors_by_title = [] - for plugin_info in errors: - titles = [] - error_info_by_title = {} - - for error_info in plugin_info["errors"]: - exception = error_info["exception"] - title = exception.title - if title not in titles: - titles.append(title) - error_info_by_title[title] = [] - error_info_by_title[title].append( - (error_info["instance"], exception) - ) - - for title in titles: - errors_by_title.append({ - "plugin": plugin_info["plugin"], - "error_info": error_info_by_title[title], - "title": title - }) - - for idx, item in enumerate(errors_by_title): - widget = ValidationErrorTitleWidget(idx, item, self) + grouped_error_items = validation_error_report.group_items_by_title() + for idx, error_info in enumerate(grouped_error_items): + widget = ValidationErrorTitleWidget(idx, error_info, self) widget.selected.connect(self._on_select) widget.instance_changed.connect(self._on_instance_change) self._errors_layout.addWidget(widget) self._title_widgets[idx] = widget - self._error_info[idx] = item + self._error_info[idx] = error_info self._errors_layout.addStretch(1) @@ -640,7 +670,7 @@ class ValidationsWidget(QtWidgets.QFrame): if self._controller.publish_has_validation_errors: validation_errors = self._controller.get_validation_errors() self._set_current_widget(self._validations_widget) - self.set_errors(validation_errors) + self._set_errors(validation_errors) return if self._contoller.publish_has_finished: @@ -659,7 +689,7 @@ class ValidationsWidget(QtWidgets.QFrame): error_item = self._error_info[index] - self._actions_widget.set_plugin(error_item["plugin"]) + self._actions_widget.set_error_item(error_item) self._update_description() From 2a34a5f9780e9423a409230dc1751b057c53a2fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:28:51 +0200 Subject: [PATCH 1503/2550] renamed 'get_icon_for_family' to 'get_creator_icon' --- openpype/tools/publisher/control.py | 15 ++++++++++++--- .../tools/publisher/widgets/card_view_widgets.py | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 3df8da62cb..f870f5d9e3 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -812,7 +812,16 @@ class AbstractPublisherController(object): pass @abstractmethod - def get_icon_for_family(self, family): + def get_creator_icon(self, identifier): + """Receive creator's icon by identifier. + + Args: + identifier (str): Creator's identifier. + + Returns: + Union[str, None]: Creator's icon string. + """ + pass @abstractmethod @@ -1200,9 +1209,9 @@ class PublisherController(AbstractPublisherController): )) return output - def get_icon_for_family(self, family): + def get_creator_icon(self, identifier): """TODO rename to get creator icon.""" - creator = self._creators.get(family) + creator = self._creators.get(identifier) if creator is not None: return creator.get_icon() return None diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index fa391f4ba0..4bd2cf25ae 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -469,7 +469,7 @@ class InstanceCardView(AbstractInstanceView): group_widget = self._widgets_by_group[group_name] else: group_icons = { - idenfier: self.controller.get_icon_for_family(idenfier) + idenfier: self.controller.get_creator_icon(idenfier) for idenfier in identifiers_by_group[group_name] } From 562852875ed60a310a4f29052cbbe66356bda466 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:29:08 +0200 Subject: [PATCH 1504/2550] fix action trigger --- openpype/tools/publisher/control.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index f870f5d9e3..643efa8645 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1354,8 +1354,11 @@ class PublisherController(AbstractPublisherController): if self._publish_is_running: self._stop_publish() - def run_action(self, plugin, action): + def run_action(self, plugin_id, action_id): # TODO handle result in UI + plugin = self._publish_plugins_proxy.get_plugin(plugin_id) + action = self._publish_plugins_proxy.get_action(action_id) + result = pyblish.plugin.process( plugin, self._publish_context, None, action.id ) From 35562e4abb2c41962e701012bcb13ff7ad1e0067 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:29:36 +0200 Subject: [PATCH 1505/2550] remove unused variable reset --- openpype/tools/publisher/control.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 643efa8645..7aaaccd8d8 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1407,9 +1407,6 @@ class PublisherController(AbstractPublisherController): for idx, plugin in enumerate(self._publish_plugins): self._publish_progress = idx - # Reset current plugin validations error - self._publish_current_plugin_validation_errors = None - # Check if plugin is over validation order if not self._publish_validated: self._publish_validated = ( From e6042d9889cb52675f642a9deee1664ed0f7057b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:29:54 +0200 Subject: [PATCH 1506/2550] fix event system access --- openpype/tools/publisher/control.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 7aaaccd8d8..a42657dd9a 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1025,6 +1025,7 @@ class PublisherController(AbstractPublisherController): @property def host_is_valid(self): """Host is valid for creation.""" + return self._create_context.host_is_valid @property @@ -1035,7 +1036,7 @@ class PublisherController(AbstractPublisherController): def _emit_event(self, topic, data=None): if data is None: data = {} - self._event_system.emit(topic, data, "controller") + self.event_system.emit(topic, data, "controller") # --- Publish specific callbacks --- def get_asset_docs(self): From a06f629a08b8cfaa50fa35745d7805fd8737eff9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:30:32 +0200 Subject: [PATCH 1507/2550] added some docstrings --- openpype/tools/publisher/control.py | 123 +++++++++++++++++++++++++--- 1 file changed, 112 insertions(+), 11 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index a42657dd9a..d4b624e959 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -797,10 +797,12 @@ class AbstractPublisherController(object): @abstractmethod def reset(self): - pass + """Reset whole controller. + + This should reset create context, publish context and all variables + that are related to it. + """ - @abstractmethod - def emit_card_message(self, message): pass @abstractmethod @@ -828,52 +830,113 @@ class AbstractPublisherController(object): def create( self, creator_identifier, subset_name, instance_data, options ): - pass + """Trigger creation by creator identifier. + + Should also trigger refresh of instanes. + + Args: + creator_identifier (str): Identifier of Creator plugin. + subset_name (str): Calculated subset name. + instance_data (Dict[str, Any]): Base instance data with variant, + asset name and task name. + options (Dict[str, Any]): Data from pre-create attributes. + """ def save_changes(self): - """Save changes happened during creation.""" + """Save changes in create context.""" pass def remove_instances(self, instances): - """Remove list of instances.""" + """Remove list of instances from create context.""" pass @abstractproperty def publish_has_finished(self): + """Has publishing finished. + + Returns: + bool: If publishing finished and all plugins were iterated. + """ + pass @abstractproperty def publish_is_running(self): + """Publishing is running right now. + + Returns: + bool: If publishing is in progress. + """ + pass @abstractproperty def publish_has_validated(self): + """Publish validation passed. + + Returns: + bool: If publishing passed last possible validation order. + """ + pass @abstractproperty def publish_has_crashed(self): + """Publishing crashed for any reason. + + Returns: + bool: Publishing crashed. + """ + pass @abstractproperty def publish_has_validation_errors(self): + """During validation happened at least one validation error. + + Returns: + bool: Validation error was raised during validation. + """ + pass @abstractproperty def publish_max_progress(self): + """Get maximum possible progress number. + + Returns: + int: Number that can be used as 100% of publish progress bar. + """ + pass @abstractproperty def publish_progress(self): + """Current progress number. + + Returns: + int: Current progress value which is between 0 and + 'publish_max_progress'. + """ + pass @abstractproperty def publish_comment_is_set(self): + """Publish comment was at least once set. + + Publish comment can be set only once when publish is started for a + first time. This helpt to idetify if 'set_comment' should be called or + not. + """ + pass @abstractmethod def get_publish_crash_error(self): + pass @abstractmethod @@ -884,30 +947,68 @@ class AbstractPublisherController(object): def get_validation_errors(self): pass - @abstractmethod - def set_comment(self, comment): - pass - @abstractmethod def publish(self): + """Trigger publishing without any order limitations.""" + pass @abstractmethod def validate(self): + """Trigger publishing which will stop after validation order.""" + pass @abstractmethod def stop_publish(self): + """Stop publishing can be also used to pause publishing. + + Pause of publishing is possible only if all plugins successfully + finished. + """ + pass @abstractmethod - def run_action(self, plugin, action): + def run_action(self, plugin_id, action_id): + """Trigger pyblish action on a plugin. + + Args: + plugin_id (str): Id of publish plugin. + action_id (str): Id of publish action. + """ + pass @abstractmethod def reset_project_data_cache(self): pass + @abstractmethod + def set_comment(self, comment): + """Set comment on pyblish context. + + Set "comment" key on current pyblish.api.Context data. + + Args: + comment (str): Artist's comment. + """ + + pass + + @abstractmethod + def emit_card_message(self, message): + """Emit a card message which can have a lifetime. + + This is for UI purposes. Method can be extended to more arguments + in future e.g. different message timeout or type (color). + + Args: + message (str): Message that will be showed. + """ + + pass + class PublisherController(AbstractPublisherController): """Middleware between UI, CreateContext and publish Context. From 200107245a79c75bb5ce3329a04b31e3690241f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:33:51 +0200 Subject: [PATCH 1508/2550] controller is private for all widgets --- .../publisher/widgets/card_view_widgets.py | 7 ++-- .../publisher/widgets/list_view_widgets.py | 6 +-- .../tools/publisher/widgets/publish_frame.py | 42 +++++++++---------- openpype/tools/publisher/widgets/widgets.py | 16 +++---- 4 files changed, 36 insertions(+), 35 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 4bd2cf25ae..06fa49320e 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -351,10 +351,11 @@ class InstanceCardView(AbstractInstanceView): Wrapper of all widgets in card view. """ + def __init__(self, controller, parent): super(InstanceCardView, self).__init__(parent) - self.controller = controller + self._controller = controller scroll_area = QtWidgets.QScrollArea(self) scroll_area.setWidgetResizable(True) @@ -440,7 +441,7 @@ class InstanceCardView(AbstractInstanceView): # Prepare instances by group and identifiers by group instances_by_group = collections.defaultdict(list) identifiers_by_group = collections.defaultdict(set) - for instance in self.controller.instances: + for instance in self._controller.instances: group_name = instance.group_label instances_by_group[group_name].append(instance) identifiers_by_group[group_name].add( @@ -469,7 +470,7 @@ class InstanceCardView(AbstractInstanceView): group_widget = self._widgets_by_group[group_name] else: group_icons = { - idenfier: self.controller.get_creator_icon(idenfier) + idenfier: self._controller.get_creator_icon(idenfier) for idenfier in identifiers_by_group[group_name] } diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index a701181e5b..8438e17167 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -409,7 +409,7 @@ class InstanceListView(AbstractInstanceView): def __init__(self, controller, parent): super(InstanceListView, self).__init__(parent) - self.controller = controller + self._controller = controller instance_view = InstanceTreeView(self) instance_delegate = ListItemDelegate(instance_view) @@ -520,7 +520,7 @@ class InstanceListView(AbstractInstanceView): # Prepare instances by their groups instances_by_group_name = collections.defaultdict(list) group_names = set() - for instance in self.controller.instances: + for instance in self._controller.instances: group_label = instance.group_label group_names.add(group_label) instances_by_group_name[group_label].append(instance) @@ -771,7 +771,7 @@ class InstanceListView(AbstractInstanceView): context_selected = False instances_by_id = { instance.id: instance - for instance in self.controller.instances + for instance in self._controller.instances } for index in self._instance_view.selectionModel().selectedIndexes(): diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index 4e5f02f2da..b49f005640 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -185,7 +185,7 @@ class PublishFrame(QtWidgets.QWidget): self._shrunk_anim = shrunk_anim - self.controller = controller + self._controller = controller self._content_frame = content_frame self._content_layout = content_layout @@ -309,8 +309,8 @@ class PublishFrame(QtWidgets.QWidget): self._validate_btn.setEnabled(True) self._publish_btn.setEnabled(True) - self._progress_bar.setValue(self.controller.publish_progress) - self._progress_bar.setMaximum(self.controller.publish_max_progress) + self._progress_bar.setValue(self._controller.publish_progress) + self._progress_bar.setMaximum(self._controller.publish_max_progress) def _on_publish_start(self): self._set_success_property(-1) @@ -334,34 +334,34 @@ class PublishFrame(QtWidgets.QWidget): def _on_plugin_change(self, event): """Change plugin label when instance is going to be processed.""" - self._progress_bar.setValue(self.controller.publish_progress) + self._progress_bar.setValue(self._controller.publish_progress) self._plugin_label.setText(event["plugin_label"]) QtWidgets.QApplication.processEvents() def _on_publish_stop(self): - self._progress_bar.setValue(self.controller.publish_progress) + self._progress_bar.setValue(self._controller.publish_progress) self._reset_btn.setEnabled(True) self._stop_btn.setEnabled(False) - validate_enabled = not self.controller.publish_has_crashed - publish_enabled = not self.controller.publish_has_crashed + validate_enabled = not self._controller.publish_has_crashed + publish_enabled = not self._controller.publish_has_crashed if validate_enabled: - validate_enabled = not self.controller.publish_has_validated + validate_enabled = not self._controller.publish_has_validated if publish_enabled: if ( - self.controller.publish_has_validated - and self.controller.publish_has_validation_errors + self._controller.publish_has_validated + and self._controller.publish_has_validation_errors ): publish_enabled = False else: - publish_enabled = not self.controller.publish_has_finished + publish_enabled = not self._controller.publish_has_finished self._validate_btn.setEnabled(validate_enabled) self._publish_btn.setEnabled(publish_enabled) - error = self.controller.get_publish_crash_error() - validation_errors = self.controller.get_validation_errors() + error = self._controller.get_publish_crash_error() + validation_errors = self._controller.get_validation_errors() if error: self._set_error(error) @@ -369,7 +369,7 @@ class PublishFrame(QtWidgets.QWidget): self._set_progress_visibility(False) self._set_validation_errors() - elif self.controller.publish_has_finished: + elif self._controller.publish_has_finished: self._set_finished() else: @@ -377,7 +377,7 @@ class PublishFrame(QtWidgets.QWidget): def _set_stopped(self): main_label = "Publish paused" - if self.controller.publish_has_validated: + if self._controller.publish_has_validated: main_label += " - Validation passed" self._set_main_label(main_label) @@ -440,7 +440,7 @@ class PublishFrame(QtWidgets.QWidget): widget.style().polish(widget) def _copy_report(self): - logs = self.controller.get_publish_report() + logs = self._controller.get_publish_report() logs_string = json.dumps(logs, indent=4) mime_data = QtCore.QMimeData() @@ -463,7 +463,7 @@ class PublishFrame(QtWidgets.QWidget): if not ext or not new_filepath: return - logs = self.controller.get_publish_report() + logs = self._controller.get_publish_report() full_path = new_filepath + ext dir_path = os.path.dirname(full_path) if not os.path.exists(dir_path): @@ -483,13 +483,13 @@ class PublishFrame(QtWidgets.QWidget): self.details_page_requested.emit() def _on_reset_clicked(self): - self.controller.reset() + self._controller.reset() def _on_stop_clicked(self): - self.controller.stop_publish() + self._controller.stop_publish() def _on_validate_clicked(self): - self.controller.validate() + self._controller.validate() def _on_publish_clicked(self): - self.controller.publish() + self._controller.publish() diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d5e55b88f9..903ce70f01 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -994,7 +994,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): def __init__(self, controller, parent): super(GlobalAttrsWidget, self).__init__(parent) - self.controller = controller + self._controller = controller self._current_instances = [] variant_input = VariantInputWidget(self) @@ -1068,7 +1068,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): else: asset_names.add(asset_name) - for asset_doc in self.controller.get_asset_docs(): + for asset_doc in self._controller.get_asset_docs(): _asset_name = asset_doc["name"] if _asset_name in asset_names: asset_names.remove(_asset_name) @@ -1077,7 +1077,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): if not asset_names: break - project_name = self.controller.project_name + project_name = self._controller.project_name subset_names = set() invalid_tasks = False for instance in self._current_instances: @@ -1245,7 +1245,7 @@ class CreatorAttrsWidget(QtWidgets.QWidget): self._main_layout = main_layout - self.controller = controller + self._controller = controller self._scroll_area = scroll_area self._attr_def_id_to_instances = {} @@ -1274,7 +1274,7 @@ class CreatorAttrsWidget(QtWidgets.QWidget): self._attr_def_id_to_instances = {} self._attr_def_id_to_attr_def = {} - result = self.controller.get_creator_attribute_definitions( + result = self._controller.get_creator_attribute_definitions( instances ) @@ -1366,7 +1366,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): self._main_layout = main_layout - self.controller = controller + self._controller = controller self._scroll_area = scroll_area self._attr_def_id_to_instances = {} @@ -1398,7 +1398,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): self._attr_def_id_to_attr_def = {} self._attr_def_id_to_plugin_name = {} - result = self.controller.get_publish_attribute_definitions( + result = self._controller.get_publish_attribute_definitions( instances, context_selected ) @@ -1513,7 +1513,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): self._on_instance_context_changed ) - self.controller = controller + self._controller = controller self.global_attrs_widget = global_attrs_widget From 5cfd5db5d7d323133fb1d79a6a1da0e2effc4c49 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 11:53:10 +0200 Subject: [PATCH 1509/2550] added missing abstract property 'host_is_valid' --- openpype/tools/publisher/control.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index d4b624e959..1725961aac 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -687,7 +687,8 @@ class AbstractPublisherController(object): Define what must be implemented to be able use Publisher functionality. Goal is to have "data driven" controller that can be used to control UI - running in different process. That lead to some "" + running in different process. That lead to some disadvantages like UI can't + access objects directly but by using wrappers that can be serialized. """ _log = None @@ -762,6 +763,19 @@ class AbstractPublisherController(object): pass + @abstractproperty + def host_is_valid(self): + """Host is valid for creation part. + + Host must have implemented certain functionality to be able create + in Publisher tool. + + Returns: + bool: Host can handle creation of instances. + """ + + pass + @abstractproperty def instances(self): """Collected/created instances. From adc5d044d4047b9c403a359602205a195e68a9c5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 7 Oct 2022 12:20:09 +0200 Subject: [PATCH 1510/2550] OP-4181 - removed unneeded line _id set already higher when item is created. Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/plugins/publish/integrate_hero_version.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 398a0226df..5f4d284740 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -203,7 +203,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): if old_version: self.log.debug("Replacing old hero version.") - new_hero_version["_id"] = old_version["_id"] update_data = prepare_hero_version_update_data( old_version, new_hero_version ) From 33d8b2832c0fba3d2ae9501402625e4cf6cb020e Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 7 Oct 2022 12:23:47 +0200 Subject: [PATCH 1511/2550] :bug: fix uhacking of renderman hacks for Deadline --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 4d6068f3c0..6704d464ce 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -500,6 +500,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): plugin_info["Renderer"] = renderer + # this is needed because renderman plugin in Deadline + # handles directory and file prefixes separately + plugin_info["OutputFilePath"] = os.path.dirname( + job_info.OutputDirectory[0]).replace("\\", "/") + return job_info, plugin_info def _get_vray_export_payload(self, data): From 08d79e1a786ca0a01a50b7b781aaaf8c83b64371 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 7 Oct 2022 12:28:40 +0200 Subject: [PATCH 1512/2550] OP-3939 - added configuration of integrate profiles to Setting Integration of thumbnail representation could be controlled from single location with use of profiles. --- .../defaults/project_settings/global.json | 4 ++ .../schemas/schema_global_publish.json | 67 +++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 115a719995..1b7dc7a41a 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -164,6 +164,10 @@ } ] }, + "PreIntegrateThumbnails": { + "enabled": true, + "integrate_profiles": [] + }, "IntegrateSubsetGroup": { "subset_grouping_profiles": [ { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 297f96aa8c..9a8d10a4e1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -555,6 +555,73 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "PreIntegrateThumbnails", + "label": "Integrate Thumbnail Representations", + "is_group": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "label", + "label": "Explicitly set if Thumbnail representation should be integrated into DB.
    If no matching profile set, existing state from Host implementation is kept." + }, + { + "type": "list", + "key": "integrate_profiles", + "label": "Integrate profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "key": "subsets", + "label": "Subset names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "integrate_thumbnail", + "label": "Integrate thumbnail" + } + ] + } + } + ] + }, { "type": "dict", "collapsible": true, From 8b0e6410167d34a21d71bec3665ac67cb240b032 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 7 Oct 2022 12:30:21 +0200 Subject: [PATCH 1513/2550] OP-3939 - added plugin to update tags for thumbnail representation Integration of thumbnail could be explicitly controlled on one place, overwriting implicit state from host implementations. --- .../preintegrate_thumbnail_representation.py | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 openpype/plugins/publish/preintegrate_thumbnail_representation.py diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py new file mode 100644 index 0000000000..0c3ba4057c --- /dev/null +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -0,0 +1,68 @@ +""" Marks thumbnail representation for integrate to DB or not. + + Some hosts produce thumbnail representation, most of them do not create + them explicitly, but they created during extract phase. + + In some cases it might be useful to override implicit setting for host/task + + This plugin needs to run after extract phase, but before integrate.py as + thumbnail is part of review family and integrated there. + + It should be better to control integration of thumbnail in one place than + configure it in multiple places on host implementations. +""" +import pyblish.api + +from openpype.lib.profiles_filtering import filter_profiles + + +class PreIntegrateThumbnails(pyblish.api.InstancePlugin): + """Marks thumbnail representation for integrate to DB or not.""" + + label = "Should Integrate Thumbnails" + order = pyblish.api.IntegratorOrder + families = ["review"] + + integrate_profiles = {} + + def process(self, instance): + thumbnail_repre = None + for repre in instance.data["representations"]: + if repre["name"] == "thumbnail": + thumbnail_repre = repre + break + + if not thumbnail_repre: + return + + family = instance.data["family"] + subset_name = instance.data["subset"] + host_name = instance.context.data["hostName"] + + anatomy_data = instance.data["anatomyData"] + task = anatomy_data.get("task", {}) + + found_profile = filter_profiles( + self.integrate_profiles, + { + "hosts": host_name, + "tasks": task.get("name"), + "task_types": task.get("type"), + "families": family, + "subsets": subset_name, + }, + logger=self.log + ) + + if not found_profile: + return + + if not found_profile["integrate_thumbnail"]: + if "delete" not in thumbnail_repre["tags"]: + thumbnail_repre["tags"].append("delete") + else: + if "delete" in thumbnail_repre["tags"]: + thumbnail_repre["tags"].pop("delete") + + self.log.debug( + "Thumbnail repre tags {}".format(thumbnail_repre["tags"])) From 237a9fcf6cd990cfc72368cadb45b53b38565157 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 7 Oct 2022 12:30:38 +0200 Subject: [PATCH 1514/2550] OP-3939 - added bit of documentation --- openpype/plugins/publish/integrate_thumbnail.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index d86cec10ad..e7046ba2ea 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -1,3 +1,13 @@ +""" Integrate Thumbnails for Openpype use in Loaders. + + This thumbnail is different from 'thumbnail' representation which could + be uploaded to Ftrack, or used as any other representation in Loaders to + pull into a scene. + + This one is used only as image describing content of published item and + shows up only in Loader in right column section. +""" + import os import sys import errno @@ -12,7 +22,7 @@ from openpype.client.operations import OperationsSession, new_thumbnail_doc class IntegrateThumbnails(pyblish.api.InstancePlugin): - """Integrate Thumbnails.""" + """Integrate Thumbnails for Openpype use in Loaders.""" label = "Integrate Thumbnails" order = pyblish.api.IntegratorOrder + 0.01 From 56449218344d23d6f5bb4e23c849dbd5ba1ac93a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 12:47:10 +0200 Subject: [PATCH 1515/2550] store asset documents by name --- openpype/tools/publisher/control.py | 39 ++++++++++++++++++++--------- 1 file changed, 27 insertions(+), 12 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 1725961aac..2da26622eb 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -45,25 +45,34 @@ class AssetDocsCache: def __init__(self, controller): self._controller = controller self._asset_docs = None + # TODO use asset ids instead self._task_names_by_asset_name = {} + self._asset_docs_by_name = {} def reset(self): self._asset_docs = None self._task_names_by_asset_name = {} + self._asset_docs_by_name = {} def _query(self): - if self._asset_docs is None: - project_name = self._controller.project_name - asset_docs = get_assets( - project_name, fields=self.projection.keys() - ) - task_names_by_asset_name = {} - for asset_doc in asset_docs: - asset_name = asset_doc["name"] - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_names_by_asset_name[asset_name] = list(asset_tasks.keys()) - self._asset_docs = asset_docs - self._task_names_by_asset_name = task_names_by_asset_name + if self._asset_docs is not None: + return + + project_name = self._controller.project_name + asset_docs = get_assets( + project_name, fields=self.projection.keys() + ) + asset_docs_by_name = {} + task_names_by_asset_name = {} + for asset_doc in asset_docs: + asset_name = asset_doc["name"] + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_names_by_asset_name[asset_name] = list(asset_tasks.keys()) + asset_docs_by_name[asset_name] = asset_doc + + self._asset_docs = asset_docs + self._asset_docs_by_name = asset_docs_by_name + self._task_names_by_asset_name = task_names_by_asset_name def get_asset_docs(self): self._query() @@ -73,6 +82,12 @@ class AssetDocsCache: self._query() return copy.deepcopy(self._task_names_by_asset_name) + def get_asset_by_name(self, asset_name): + asset_doc = self._asset_docs_by_name.get(asset_name) + if asset_doc is None: + return None + return copy.deepcopy(asset_doc) + class PublishReport: """Report for single publishing process. From 2ab0ad9d4466c8d518726266115e0910fd53a0bf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 12:47:44 +0200 Subject: [PATCH 1516/2550] added ability to get and query full asset document --- openpype/tools/publisher/control.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 2da26622eb..8abe62e4b1 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -8,7 +8,10 @@ from abc import ABCMeta, abstractmethod, abstractproperty import six import pyblish.api -from openpype.client import get_assets +from openpype.client import ( + get_assets, + get_asset_by_id, +) from openpype.lib.events import EventSystem from openpype.pipeline import ( PublishValidationError, @@ -48,6 +51,7 @@ class AssetDocsCache: # TODO use asset ids instead self._task_names_by_asset_name = {} self._asset_docs_by_name = {} + self._full_asset_docs_by_name = {} def reset(self): self._asset_docs = None @@ -88,6 +92,15 @@ class AssetDocsCache: return None return copy.deepcopy(asset_doc) + def get_full_asset_by_name(self, asset_name): + self._query() + if asset_name not in self._full_asset_docs_by_name: + asset_doc = self._asset_docs_by_name.get(asset_name) + project_name = self._controller.project_name + full_asset_doc = get_asset_by_id(project_name, asset_doc["_id"]) + self._full_asset_docs_by_name[asset_name] = full_asset_doc + return copy.deepcopy(self._full_asset_docs_by_name[asset_name]) + class PublishReport: """Report for single publishing process. From 8b713db7d6d255433753fc56eef1efe886c56ce8 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 7 Oct 2022 12:48:22 +0200 Subject: [PATCH 1517/2550] :bug: drop dirname --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 6704d464ce..7fbe134410 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -502,8 +502,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # this is needed because renderman plugin in Deadline # handles directory and file prefixes separately - plugin_info["OutputFilePath"] = os.path.dirname( - job_info.OutputDirectory[0]).replace("\\", "/") + plugin_info["OutputFilePath"] = job_info.OutputDirectory[0] return job_info, plugin_info From 626cb387934956d2c1eea08a535dd83223a876cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 12:48:34 +0200 Subject: [PATCH 1518/2550] added ability to get existing subsets for passet asset name via controller --- openpype/tools/publisher/control.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 8abe62e4b1..89619f70f7 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -11,6 +11,7 @@ import pyblish.api from openpype.client import ( get_assets, get_asset_by_id, + get_subsets, ) from openpype.lib.events import EventSystem from openpype.pipeline import ( @@ -837,6 +838,10 @@ class AbstractPublisherController(object): def get_task_names_by_asset_names(self, asset_names): pass + @abstractmethod + def get_existing_subset_names(self, asset_name): + pass + @abstractmethod def reset(self): """Reset whole controller. @@ -1223,6 +1228,21 @@ class PublisherController(AbstractPublisherController): ) return result + def get_existing_subset_names(self, asset_name): + project_name = self.project_name + asset_doc = self._asset_docs_cache.get_asset_by_name(asset_name) + if not asset_doc: + return None + + asset_id = asset_doc["_id"] + subset_docs = get_subsets( + project_name, asset_ids=[asset_id], fields=["name"] + ) + return { + subset_doc["name"] + for subset_doc in subset_docs + } + def reset(self): """Reset everything related to creation and publishing.""" # Stop publishing From ac61407a4fe0d517b78cad30f2abffe51c378546 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 12:49:25 +0200 Subject: [PATCH 1519/2550] controller can handle get subset name based on creator identifier --- openpype/tools/publisher/control.py | 52 +++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 89619f70f7..444cdbc914 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -873,6 +873,29 @@ class AbstractPublisherController(object): pass + @abstractmethod + def get_subset_name( + self, + creator_identifier, + variant, + task_name, + asset_name, + instance_id=None + ): + """Get subset name based on passed data. + + Args: + creator_identifier (str): Identifier of creator which should be + responsible for subset name creation. + variant (str): Variant value from user's input. + task_name (str): Name of task for which is instance created. + asset_name (str): Name of asset for which is instance created. + instance_id (Union[str, None]): Existing instance id when subset + name is updated. + """ + + pass + @abstractmethod def create( self, creator_identifier, subset_name, instance_data, options @@ -1380,6 +1403,35 @@ class PublisherController(AbstractPublisherController): return creator.get_icon() return None + def get_subset_name( + self, + creator_identifier, + variant, + task_name, + asset_name, + instance_id=None + ): + """Get subset name based on passed data. + + Args: + creator_identifier (str): Identifier of creator which should be + responsible for subset name creation. + variant (str): Variant value from user's input. + task_name (str): Name of task for which is instance created. + asset_name (str): Name of asset for which is instance created. + instance_id (Union[str, None]): Existing instance id when subset + name is updated. + """ + + creator = self._creators[creator_identifier] + project_name = self.project_name + print(asset_name) + asset_doc = self._asset_docs_cache.get_full_asset_by_name(asset_name) + + return creator.get_subset_name( + variant, task_name, asset_doc, project_name + ) + def create( self, creator_identifier, subset_name, instance_data, options ): From 72dccf24a2fd887a86221bfc12ae815447bde7f6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 12:50:03 +0200 Subject: [PATCH 1520/2550] create widget does not call 'get_subset_name' on creator but via controller --- .../tools/publisher/widgets/create_widget.py | 38 +++++++------------ 1 file changed, 13 insertions(+), 25 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index 4c9fa63d24..39fdeae30f 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -174,7 +174,7 @@ class CreateWidget(QtWidgets.QWidget): self._controller = controller - self._asset_doc = None + self._asset_name = None self._subset_names = None self._selected_creator = None @@ -380,7 +380,7 @@ class CreateWidget(QtWidgets.QWidget): if asset_name is None: asset_name = self.current_asset_name - return asset_name + return asset_name or None def _get_task_name(self): task_name = None @@ -444,7 +444,7 @@ class CreateWidget(QtWidgets.QWidget): prereq_available = False creator_btn_tooltips.append("Creator is not selected") - if self._context_change_is_enabled() and self._asset_doc is None: + if self._context_change_is_enabled() and self._asset_name is None: # QUESTION how to handle invalid asset? prereq_available = False creator_btn_tooltips.append("Context is not selected") @@ -468,30 +468,19 @@ class CreateWidget(QtWidgets.QWidget): asset_name = self._get_asset_name() # Skip if asset did not change - if self._asset_doc and self._asset_doc["name"] == asset_name: + if self._asset_name and self._asset_name == asset_name: return - # Make sure `_asset_doc` and `_subset_names` variables are reset - self._asset_doc = None + # Make sure `_asset_name` and `_subset_names` variables are reset + self._asset_name = asset_name self._subset_names = None if asset_name is None: return - project_name = self._controller.project_name - asset_doc = get_asset_by_name(project_name, asset_name) - self._asset_doc = asset_doc + subset_names = self._controller.get_existing_subset_names(asset_name) - if asset_doc: - asset_id = asset_doc["_id"] - subset_docs = get_subsets( - project_name, asset_ids=[asset_id], fields=["name"] - ) - self._subset_names = { - subset_doc["name"] - for subset_doc in subset_docs - } - - if not asset_doc: + self._subset_names = subset_names + if subset_names is None: self.subset_name_input.setText("< Asset is not set >") def _refresh_creators(self): @@ -670,14 +659,13 @@ class CreateWidget(QtWidgets.QWidget): self.subset_name_input.setText("< Valid variant >") return - project_name = self._controller.project_name + asset_name = self._get_asset_name() task_name = self._get_task_name() - - asset_doc = copy.deepcopy(self._asset_doc) + creator_idenfier = self._selected_creator.identifier # Calculate subset name with Creator plugin try: - subset_name = self._selected_creator.get_subset_name( - variant_value, task_name, asset_doc, project_name + subset_name = self._controller.get_subset_name( + creator_idenfier, variant_value, task_name, asset_name ) except TaskNotSetError: self._create_btn.setEnabled(False) From 72e647a5b990411bd88d2f4e045fd66b906d0b7d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 7 Oct 2022 13:10:56 +0200 Subject: [PATCH 1521/2550] OP-3939 - fix - must be before integrate.py --- .../plugins/publish/preintegrate_thumbnail_representation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py index 0c3ba4057c..a4ab0443c2 100644 --- a/openpype/plugins/publish/preintegrate_thumbnail_representation.py +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -20,7 +20,7 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): """Marks thumbnail representation for integrate to DB or not.""" label = "Should Integrate Thumbnails" - order = pyblish.api.IntegratorOrder + order = pyblish.api.IntegratorOrder - 0.1 families = ["review"] integrate_profiles = {} From 467a6d89c8cfdc30e86786914e8513bcbed2022a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 7 Oct 2022 14:19:11 +0200 Subject: [PATCH 1522/2550] :recycle: set proper default value from the settings --- .../plugins/publish/submit_maya_deadline.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3f0905c586..67771eae50 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -199,10 +199,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): context = instance.context # Set it to default Maya behaviour if it cannot be determined - # from instance (but it should be, by the Collector). Also - rs_include_lights = instance.data.get("renderSetupIncludeLights", "1") + # from instance (but it should be, by the Collector). + + default_rs_include_lights = instance.context.data\ + .get('project_settings')\ + .get('maya')\ + .get('RenderSettings')\ + .get('enable_all_lights') + + rs_include_lights = instance.data.get( + "renderSetupIncludeLights", default_rs_include_lights) if rs_include_lights not in {"1", "0", True, False}: - rs_include_lights = "1" + rs_include_lights = default_rs_include_lights plugin_info = MayaPluginInfo( SceneFile=self.scene_path, Version=cmds.about(version=True), From 9b92caa2549cb442dc0fed0810be7456538a4c34 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 15:08:58 +0200 Subject: [PATCH 1523/2550] created new thumbnail extractor which is looking for 'thumbnailSource' on instance --- .../plugins/publish/extract_thumbnail.py | 173 ++++++++++++++++++ 1 file changed, 173 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py diff --git a/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py new file mode 100644 index 0000000000..eda3c8c191 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py @@ -0,0 +1,173 @@ +"""Create instance thumbnail from "thumbnailSource" on 'instance.data'. + +Output is new representation with "thumbnail" name on instance. If instance +already have such representation the process is skipped. + +This way a collector can point to a file from which should be thumbnail +generated. This is different approach then what global plugin for thumbnails +does. The global plugin has specific logic which does not support + +Todos: + No size handling. Size of input is used for output thumbnail which can + cause issues. +""" + +import os +import tempfile + +import pyblish.api +from openpype.lib import ( + get_ffmpeg_tool_path, + get_oiio_tools_path, + is_oiio_supported, + + run_subprocess, +) + + +class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): + """Create jpg thumbnail for instance based on 'thumbnailSource'. + + Thumbnail source must be a single image or video filepath. + """ + + label = "Extract Thumbnail (from source)" + # Before 'ExtractThumbnail' in global plugins + order = pyblish.api.ExtractorOrder - 0.00001 + hosts = ["traypublisher"] + + def process(self, instance): + subset_name = instance.data["subset"] + self.log.info( + "Processing instance with subset name {}".format(subset_name) + ) + + thumbnail_source = instance.data.get("thumbnailSource") + if not thumbnail_source: + self.log.debug("Thumbnail source not filled. Skipping.") + return + + elif not os.path.exists(thumbnail_source): + self.log.debug( + "Thumbnail source file was not found {}. Skipping.".format( + thumbnail_source)) + return + + # Check if already has thumbnail created + if self._already_has_thumbnail(instance): + self.log.info("Thumbnail representation already present.") + return + + # Create temp directory for thumbnail + # - this is to avoid "override" of source file + dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") + self.log.debug( + "Create temp directory {} for thumbnail".format(dst_staging) + ) + # Store new staging to cleanup paths + instance.context.data["cleanupFullPaths"].append(dst_staging) + + thumbnail_created = False + oiio_supported = is_oiio_supported() + + self.log.info("Thumbnail source: {}".format(thumbnail_source)) + filename = os.path.splitext(thumbnail_source)[0] + jpeg_file = filename + ".jpg" + full_output_path = os.path.join(dst_staging, jpeg_file) + + if oiio_supported: + self.log.info("Trying to convert with OIIO") + # If the input can read by OIIO then use OIIO method for + # conversion otherwise use ffmpeg + thumbnail_created = self.create_thumbnail_oiio( + thumbnail_source, full_output_path + ) + + # Try to use FFMPEG if OIIO is not supported or for cases when + # oiiotool isn't available + if not thumbnail_created: + if oiio_supported: + self.log.info(( + "Converting with FFMPEG because input" + " can't be read by OIIO." + )) + + thumbnail_created = self.create_thumbnail_ffmpeg( + thumbnail_source, full_output_path + ) + + # Skip representation and try next one if wasn't created + if not thumbnail_created: + self.log.warning("Thumbanil has not been created.") + return + + new_repre = { + "name": "thumbnail", + "ext": "jpg", + "files": jpeg_file, + "stagingDir": dst_staging, + "thumbnail": True, + "tags": ["thumbnail"] + } + + # adding representation + self.log.debug( + "Adding thumbnail representation: {}".format(new_repre) + ) + instance.data["representations"].append(new_repre) + + def _already_has_thumbnail(self, instance): + if "representations" not in instance.data: + self.log.warning( + "Instance does not have 'representations' key filled" + ) + instance.data["representations"] = [] + + for repre in instance.data["representations"]: + if repre["name"] == "thumbnail": + return True + return False + + def create_thumbnail_oiio(self, src_path, dst_path): + self.log.info("outputting {}".format(dst_path)) + oiio_tool_path = get_oiio_tools_path() + oiio_cmd = [ + oiio_tool_path, + "-a", src_path, + "-o", dst_path + ] + self.log.info("Running: {}".format(" ".join(oiio_cmd))) + try: + run_subprocess(oiio_cmd, logger=self.log) + return True + except Exception: + self.log.warning( + "Failed to create thubmnail using oiiotool", + exc_info=True + ) + return False + + def create_thumbnail_ffmpeg(self, src_path, dst_path): + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") + + max_int = str(2147483647) + ffmpeg_cmd = [ + ffmpeg_path, + "-y", + "-analyzeduration", max_int, + "-probesize", max_int, + "-i", src_path, + "-vframes", "1", + dst_path + ] + + self.log.info("Running: {}".format(" ".join(ffmpeg_cmd))) + try: + run_subprocess(ffmpeg_cmd, logger=self.log) + return True + except Exception: + self.log.warning( + "Failed to create thubmnail using ffmpeg", + exc_info=True + ) + return False From e3988d9afeaff53708eef857970ac1f2273e298a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 15:10:02 +0200 Subject: [PATCH 1524/2550] simple creators and batch mov are adding 'thumbnailSource' to 'instance.data' --- .../traypublisher/plugins/publish/collect_movie_batch.py | 4 ++-- .../plugins/publish/collect_simple_instances.py | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py index f37e04d1c9..3d93e2c927 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py @@ -35,12 +35,12 @@ class CollectMovieBatch( "stagingDir": os.path.dirname(file_url), "tags": [] } + instance.data["representations"].append(repre) if creator_attributes["add_review_family"]: repre["tags"].append("review") instance.data["families"].append("review") - - instance.data["representations"].append(repre) + instance.data["thumbnailSource"] = file_url instance.data["source"] = file_url diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index c0ae694c3c..3f07f4db00 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -148,8 +148,11 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): )) return + item_dir = review_file_item["directory"] + first_filepath = os.path.join(item_dir, filenames[0]) + filepaths = { - os.path.join(review_file_item["directory"], filename) + os.path.join(item_dir, filename) for filename in filenames } source_filepaths.extend(filepaths) @@ -176,6 +179,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): if "review" not in instance.data["families"]: instance.data["families"].append("review") + instance.data["thumbnailSource"] = first_filepath + review_representation["tags"].append("review") self.log.debug("Representation {} was marked for review. {}".format( review_representation["name"], review_path From 1666c16f81deef47a977042974a1c1c8ab60e0f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 15:14:28 +0200 Subject: [PATCH 1525/2550] fix representation filename --- .../traypublisher/plugins/publish/extract_thumbnail.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py index eda3c8c191..7781bb7b3e 100644 --- a/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py @@ -71,9 +71,9 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): oiio_supported = is_oiio_supported() self.log.info("Thumbnail source: {}".format(thumbnail_source)) - filename = os.path.splitext(thumbnail_source)[0] - jpeg_file = filename + ".jpg" - full_output_path = os.path.join(dst_staging, jpeg_file) + src_basename = os.path.basename(thumbnail_source) + dst_filename = os.path.splitext(src_basename)[0] + ".jpg" + full_output_path = os.path.join(dst_staging, dst_filename) if oiio_supported: self.log.info("Trying to convert with OIIO") @@ -104,7 +104,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): new_repre = { "name": "thumbnail", "ext": "jpg", - "files": jpeg_file, + "files": dst_filename, "stagingDir": dst_staging, "thumbnail": True, "tags": ["thumbnail"] From 9ce236a9de4747102ccab076fd52f763cf5051d2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 15:52:57 +0200 Subject: [PATCH 1526/2550] Added creator item for warpping creator plugins --- openpype/tools/publisher/control.py | 103 +++++++++++++++++++++++++++- 1 file changed, 102 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 444cdbc914..047b34d550 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -18,7 +18,12 @@ from openpype.pipeline import ( PublishValidationError, registered_host, ) -from openpype.pipeline.create import CreateContext +from openpype.pipeline.create import ( + CreateContext, + AutoCreator, + HiddenCreator, + Creator, +) # Define constant for plugin orders offset PLUGIN_ORDER_OFFSET = 0.5 @@ -709,6 +714,102 @@ class PublishValidationErrors: self._plugin_action_items[plugin_id] = plugin_actions +class CreatorType: + def __init__(self, name): + self.name = name + + def __str__(self): + return self.name + + def __eq__(self, other): + return self.name == str(other) + + +class CreatorTypes: + base = CreatorType("base") + auto = CreatorType("auto") + hidden = CreatorType("hidden") + artist = CreatorType("artist") + + +class CreatorItem: + """Wrapper around Creator plugin. + + Object can be serialized and recreated. + """ + + def __init__( + self, + identifier, + creator_type, + family, + label, + group_label, + icon, + instance_attributes_defs, + description, + detailed_description, + default_variant, + default_variants, + create_allow_context_change, + pre_create_attributes_defs + ): + self.identifier = identifier + self.creator_type = creator_type + self.family = family + self.label = label + self.icon = icon + self.description = description + self.detailed_description = detailed_description + self.default_variant = default_variant + self.default_variants = default_variants + self.create_allow_context_change = create_allow_context_change + self.instance_attributes_defs = instance_attributes_defs + self.pre_create_attributes_defs = pre_create_attributes_defs + + @classmethod + def from_creator(cls, creator): + if isinstance(creator, AutoCreator): + creator_type = CreatorTypes.auto + elif isinstance(creator, HiddenCreator): + creator_type = CreatorTypes.hidden + elif isinstance(creator, Creator): + creator_type = CreatorTypes.artist + else: + creator_type = CreatorTypes.base + + description = None + detail_description = None + default_variant = None + default_variants = None + pre_create_attr_defs = None + create_allow_context_change = None + if creator_type is CreatorTypes.artist: + description = creator.get_description() + detail_description = creator.get_detail_description() + default_variant = creator.get_default_variant() + default_variants = creator.get_default_variants() + pre_create_attr_defs = creator.get_pre_create_attr_defs() + create_allow_context_change = creator.create_allow_context_change + + identifier = creator.identifier + return cls( + identifier, + creator_type, + creator.family, + creator.label or identifier, + creator.get_group_label(), + creator.get_icon(), + creator.get_instance_attr_defs(), + description, + detail_description, + default_variant, + default_variants, + create_allow_context_change, + pre_create_attr_defs + ) + + @six.add_metaclass(ABCMeta) class AbstractPublisherController(object): """Publisher tool controller. From 447d15694a6eaa08c6470b0cc8329c6d94951803 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 15:53:26 +0200 Subject: [PATCH 1527/2550] use creator items instead of creators directly --- openpype/tools/publisher/control.py | 22 ++++--- .../tools/publisher/widgets/create_widget.py | 59 +++++++++++-------- .../publisher/widgets/precreate_widget.py | 6 +- 3 files changed, 50 insertions(+), 37 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 047b34d550..a8b9290811 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1107,7 +1107,6 @@ class AbstractPublisherController(object): @abstractmethod def get_publish_crash_error(self): - pass @abstractmethod @@ -1201,6 +1200,8 @@ class PublisherController(AbstractPublisherController): self._host, dbcon, headless=headless, reset=False ) + self._creator_items = {} + self._publish_plugins_proxy = None # pyblish.api.Context @@ -1290,9 +1291,10 @@ class PublisherController(AbstractPublisherController): return self._create_context.creators @property - def manual_creators(self): + def creator_items(self): """Creators that can be shown in create dialog.""" - return self._create_context.manual_creators + + return self._creator_items @property def host_is_valid(self): @@ -1393,6 +1395,12 @@ class PublisherController(AbstractPublisherController): self._create_context.reset_plugins() + creator_items = { + identifier: CreatorItem.from_creator(creator) + for identifier, creator in self._create_context.creators.items() + } + self._creator_items = creator_items + self._resetting_plugins = False self._emit_event("plugins.refresh.finished") @@ -1498,10 +1506,9 @@ class PublisherController(AbstractPublisherController): return output def get_creator_icon(self, identifier): - """TODO rename to get creator icon.""" - creator = self._creators.get(identifier) - if creator is not None: - return creator.get_icon() + creator_item = self._creator_items.get(identifier) + if creator_item is not None: + return creator_item.icon return None def get_subset_name( @@ -1526,7 +1533,6 @@ class PublisherController(AbstractPublisherController): creator = self._creators[creator_identifier] project_name = self.project_name - print(asset_name) asset_doc = self._asset_docs_cache.get_full_asset_by_name(asset_name) return creator.get_subset_name( diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index 39fdeae30f..10cf39675e 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -1,11 +1,9 @@ import sys import re import traceback -import copy from Qt import QtWidgets, QtCore, QtGui -from openpype.client import get_asset_by_name, get_subsets from openpype.pipeline.create import ( CreatorError, SUBSET_NAME_ALLOWED_SYMBOLS, @@ -150,18 +148,18 @@ class CreatorShortDescWidget(QtWidgets.QWidget): self._family_label = family_label self._description_label = description_label - def set_plugin(self, plugin=None): - if not plugin: + def set_creator_item(self, creator_item=None): + if not creator_item: self._icon_widget.set_icon_def(None) self._family_label.setText("") self._description_label.setText("") return - plugin_icon = plugin.get_icon() - description = plugin.get_description() or "" + plugin_icon = creator_item.icon + description = creator_item.description or "" self._icon_widget.set_icon_def(plugin_icon) - self._family_label.setText("{}".format(plugin.family)) + self._family_label.setText("{}".format(creator_item.family)) self._family_label.setTextInteractionFlags(QtCore.Qt.NoTextInteraction) self._description_label.setText(description) @@ -495,7 +493,10 @@ class CreateWidget(QtWidgets.QWidget): # Add new families new_creators = set() - for identifier, creator in self._controller.manual_creators.items(): + for identifier, creator_item in self._controller.creator_items.items(): + if creator_item.creator_type != "artist": + continue + # TODO add details about creator new_creators.add(identifier) if identifier in existing_items: @@ -507,10 +508,9 @@ class CreateWidget(QtWidgets.QWidget): ) self._creators_model.appendRow(item) - label = creator.label or identifier - item.setData(label, QtCore.Qt.DisplayRole) + item.setData(creator_item.label, QtCore.Qt.DisplayRole) item.setData(identifier, CREATOR_IDENTIFIER_ROLE) - item.setData(creator.family, FAMILY_ROLE) + item.setData(creator_item.family, FAMILY_ROLE) # Remove families that are no more available for identifier in (old_creators - new_creators): @@ -561,11 +561,11 @@ class CreateWidget(QtWidgets.QWidget): identifier = new_index.data(CREATOR_IDENTIFIER_ROLE) self._set_creator_by_identifier(identifier) - def _set_creator_detailed_text(self, creator): + def _set_creator_detailed_text(self, creator_item): # TODO implement description = "" - if creator is not None: - description = creator.get_detail_description() or description + if creator_item is not None: + description = creator_item.detailed_description or description self._controller.event_system.emit( "show.detailed.help", { @@ -575,32 +575,39 @@ class CreateWidget(QtWidgets.QWidget): ) def _set_creator_by_identifier(self, identifier): - creator = self._controller.manual_creators.get(identifier) - self._set_creator(creator) + creator_item = self._controller.creator_items.get(identifier) + self._set_creator(creator_item) - def _set_creator(self, creator): - self._creator_short_desc_widget.set_plugin(creator) - self._set_creator_detailed_text(creator) - self._pre_create_widget.set_plugin(creator) + def _set_creator(self, creator_item): + """Set current creator item. - self._selected_creator = creator + Args: + creator_item (CreatorItem): Item representing creator that can be + triggered by artist. + """ - if not creator: + self._creator_short_desc_widget.set_creator_item(creator_item) + self._set_creator_detailed_text(creator_item) + self._pre_create_widget.set_creator_item(creator_item) + + self._selected_creator = creator_item + + if not creator_item: self._set_context_enabled(False) return if ( - creator.create_allow_context_change + creator_item.create_allow_context_change != self._context_change_is_enabled() ): - self._set_context_enabled(creator.create_allow_context_change) + self._set_context_enabled(creator_item.create_allow_context_change) self._refresh_asset() - default_variants = creator.get_default_variants() + default_variants = creator_item.default_variants if not default_variants: default_variants = ["Main"] - default_variant = creator.get_default_variant() + default_variant = creator_item.default_variant if not default_variant: default_variant = default_variants[0] diff --git a/openpype/tools/publisher/widgets/precreate_widget.py b/openpype/tools/publisher/widgets/precreate_widget.py index eaadfe890b..ef34c9bcb5 100644 --- a/openpype/tools/publisher/widgets/precreate_widget.py +++ b/openpype/tools/publisher/widgets/precreate_widget.py @@ -58,12 +58,12 @@ class PreCreateWidget(QtWidgets.QWidget): def current_value(self): return self._attributes_widget.current_value() - def set_plugin(self, creator): + def set_creator_item(self, creator_item): attr_defs = [] creator_selected = False - if creator is not None: + if creator_item is not None: creator_selected = True - attr_defs = creator.get_pre_create_attr_defs() + attr_defs = creator_item.pre_create_attributes_defs self._attributes_widget.set_attr_defs(attr_defs) From 06e1cf0b0ffd5f74da6bea47e1bc82c83623d844 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 16:34:27 +0200 Subject: [PATCH 1528/2550] attribute definitions now have types --- openpype/lib/attribute_definitions.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 37446f01f8..0ce4c7866f 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -3,7 +3,7 @@ import re import collections import uuid import json -from abc import ABCMeta, abstractmethod +from abc import ABCMeta, abstractmethod, abstractproperty import six import clique @@ -115,6 +115,16 @@ class AbtractAttrDef: return False return self.key == other.key + @abstractproperty + def type(self): + """Attribute definition type also used as identifier of class. + + Returns: + str: Type of attribute definition. + """ + + pass + @abstractmethod def convert_value(self, value): """Convert value to a valid one. @@ -141,10 +151,12 @@ class UIDef(AbtractAttrDef): class UISeparatorDef(UIDef): - pass + type = "separator" class UILabelDef(UIDef): + type = "label" + def __init__(self, label): super(UILabelDef, self).__init__(label=label) @@ -160,6 +172,8 @@ class UnknownDef(AbtractAttrDef): have known definition of type. """ + type = "unknown" + def __init__(self, key, default=None, **kwargs): kwargs["default"] = default super(UnknownDef, self).__init__(key, **kwargs) @@ -181,6 +195,7 @@ class NumberDef(AbtractAttrDef): default(int, float): Default value for conversion. """ + type = "number" def __init__( self, key, minimum=None, maximum=None, decimals=None, default=None, **kwargs @@ -301,6 +316,8 @@ class EnumDef(AbtractAttrDef): default: Default value. Must be one key(value) from passed items. """ + type = "enum" + def __init__(self, key, items, default=None, **kwargs): if not items: raise ValueError(( @@ -343,6 +360,8 @@ class BoolDef(AbtractAttrDef): default(bool): Default value. Set to `False` if not defined. """ + type = "bool" + def __init__(self, key, default=None, **kwargs): if default is None: default = False @@ -585,6 +604,7 @@ class FileDef(AbtractAttrDef): default(str, List[str]): Default value. """ + type = "path" def __init__( self, key, single_item=True, folders=None, extensions=None, allow_sequences=True, extensions_label=None, default=None, **kwargs From bc39b992709ee75da2bf6dbc6f679b0a84b8f5f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 16:34:50 +0200 Subject: [PATCH 1529/2550] attribute definitions can be serialized and deserialized --- openpype/lib/attribute_definitions.py | 61 +++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 0ce4c7866f..a721aa09b8 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -90,6 +90,8 @@ class AbtractAttrDef: next to value input or ahead. """ + type_attributes = [] + is_value_def = True def __init__( @@ -135,6 +137,35 @@ class AbtractAttrDef: pass + def serialize(self): + """Serialize object to data so it's possible to recreate it. + + Returns: + Dict[str, Any]: Serialized object that can be passed to + 'deserialize' method. + """ + + data = { + "type": self.type, + "key": self.key, + "label": self.label, + "tooltip": self.tooltip, + "default": self.default, + "is_label_horizontal": self.is_label_horizontal + } + for attr in self.type_attributes: + data[attr] = getattr(self, attr) + return data + + @classmethod + def deserialize(cls, data): + """Recreate object from data. + + Data can be received using 'serialize' method. + """ + + return cls(**data) + # ----------------------------------------- # UI attribute definitoins won't hold value @@ -196,6 +227,12 @@ class NumberDef(AbtractAttrDef): """ type = "number" + type_attributes = [ + "minimum", + "maximum", + "decimals" + ] + def __init__( self, key, minimum=None, maximum=None, decimals=None, default=None, **kwargs @@ -267,6 +304,12 @@ class TextDef(AbtractAttrDef): default(str, None): Default value. Empty string used when not defined. """ + type = "text" + type_attributes = [ + "multiline", + "placeholder", + ] + def __init__( self, key, multiline=None, regex=None, placeholder=None, default=None, **kwargs @@ -305,6 +348,11 @@ class TextDef(AbtractAttrDef): return value return self.default + def serialize(self): + data = super(TextDef, self).serialize() + data["regex"] = self.regex.pattern + return data + class EnumDef(AbtractAttrDef): """Enumeration of single item from items. @@ -352,6 +400,11 @@ class EnumDef(AbtractAttrDef): return value return self.default + def serialize(self): + data = super(TextDef, self).serialize() + data["items"] = list(self.items) + return data + class BoolDef(AbtractAttrDef): """Boolean representation. @@ -605,6 +658,14 @@ class FileDef(AbtractAttrDef): """ type = "path" + type_attributes = [ + "single_item", + "folders", + "extensions", + "allow_sequences", + "extensions_label", + ] + def __init__( self, key, single_item=True, folders=None, extensions=None, allow_sequences=True, extensions_label=None, default=None, **kwargs From ac406106308bbff34b2e6a94d9d133159d23a853 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 16:35:07 +0200 Subject: [PATCH 1530/2550] added helper functions to serialize and deserialize attribute definitions --- openpype/lib/attribute_definitions.py | 90 +++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index a721aa09b8..bb0b07948f 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -8,6 +8,28 @@ from abc import ABCMeta, abstractmethod, abstractproperty import six import clique +# Global variable which store attribude definitions by type +# - default types are registered on import +_attr_defs_by_type = {} + + +def register_attr_def_class(cls): + """Register attribute definition. + + Currently are registered definitions used to deserialize data to objects. + + Attrs: + cls (AbtractAttrDef): Non-abstract class to be registered with unique + 'type' attribute. + + Raises: + KeyError: When type was already registered. + """ + + if cls.type in _attr_defs_by_type: + raise KeyError("Type \"{}\" was already registered".format(cls.type)) + _attr_defs_by_type[cls.type] = cls + def get_attributes_keys(attribute_definitions): """Collect keys from list of attribute definitions. @@ -756,3 +778,71 @@ class FileDef(AbtractAttrDef): if self.single_item: return FileDefItem.create_empty_item().to_dict() return [] + + +def serialize_attr_def(attr_def): + """Serialize attribute definition to data. + + Args: + attr_def (AbtractAttrDef): Attribute definition to serialize. + + Returns: + Dict[str, Any]: Serialized data. + """ + + return attr_def.serialize() + + +def serialize_attr_defs(attr_defs): + """Serialize attribute definitions to data. + + Args: + attr_defs (List[AbtractAttrDef]): Attribute definitions to serialize. + + Returns: + List[Dict[str, Any]]: Serialized data. + """ + + return [ + serialize_attr_def(attr_def) + for attr_def in attr_defs + ] + + +def deserialize_attr_def(attr_def_data): + """Deserialize attribute definition from data. + + Args: + attr_def (Dict[str, Any]): Attribute definition data to deserialize. + """ + + attr_type = attr_def_data.pop("type") + cls = _attr_defs_by_type[attr_type] + return cls.deserialize(attr_def_data) + + +def deserialize_attr_defs(attr_defs_data): + """Deserialize attribute definitions. + + Args: + List[Dict[str, Any]]: List of attribute definitions. + """ + + return [ + deserialize_attr_def(attr_def_data) + for attr_def_data in attr_defs_data + ] + + +# Register attribute definitions +for _attr_class in ( + UISeparatorDef, + UILabelDef, + UnknownDef, + NumberDef, + TextDef, + EnumDef, + BoolDef, + FileDef +): + register_attr_def_class(_attr_class) From 409ec104055779bab17c78da7d344c012dbf517f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 16:47:13 +0200 Subject: [PATCH 1531/2550] added serialization and deserialization of CreatorItem --- openpype/tools/publisher/control.py | 66 ++++++++++++++++++++++++++++- 1 file changed, 64 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index a8b9290811..f96782b08d 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -14,6 +14,10 @@ from openpype.client import ( get_subsets, ) from openpype.lib.events import EventSystem +from openpype.lib.attribute_definitions import ( + serialize_attr_defs, + deserialize_attr_defs, +) from openpype.pipeline import ( PublishValidationError, registered_host, @@ -731,6 +735,18 @@ class CreatorTypes: hidden = CreatorType("hidden") artist = CreatorType("artist") + @classmethod + def from_str(cls, value): + for creator_type in ( + cls.base, + cls.auto, + cls.hidden, + cls.artist + ): + if value == creator_type: + return creator_type + raise ValueError("Unknown type \"{}\"".format(str(value))) + class CreatorItem: """Wrapper around Creator plugin. @@ -758,6 +774,7 @@ class CreatorItem: self.creator_type = creator_type self.family = family self.label = label + self.group_label = group_label self.icon = icon self.description = description self.detailed_description = detailed_description @@ -809,6 +826,52 @@ class CreatorItem: pre_create_attr_defs ) + def to_data(self): + instance_attributes_defs = None + if self.instance_attributes_defs is not None: + instance_attributes_defs = serialize_attr_defs( + self.instance_attributes_defs + ) + + pre_create_attributes_defs = None + if self.pre_create_attributes_defs is not None: + instance_attributes_defs = serialize_attr_defs( + self.pre_create_attributes_defs + ) + + return { + "identifier": self.identifier, + "creator_type": str(self.creator_type), + "family": self.family, + "label": self.label, + "group_label": self.group_label, + "icon": self.icon, + "description": self.description, + "detailed_description": self.detailed_description, + "default_variant": self.default_variant, + "default_variants": self.default_variants, + "create_allow_context_change": self.create_allow_context_change, + "instance_attributes_defs": instance_attributes_defs, + "pre_create_attributes_defs": pre_create_attributes_defs, + } + + @classmethod + def from_data(cls, data): + instance_attributes_defs = data["instance_attributes_defs"] + if instance_attributes_defs is not None: + data["instance_attributes_defs"] = deserialize_attr_defs( + instance_attributes_defs + ) + + pre_create_attributes_defs = data["pre_create_attributes_defs"] + if pre_create_attributes_defs is not None: + data["pre_create_attributes_defs"] = deserialize_attr_defs( + pre_create_attributes_defs + ) + + data["creator_type"] = CreatorTypes.from_str(data["creator_type"]) + return cls(**data) + @six.add_metaclass(ABCMeta) class AbstractPublisherController(object): @@ -1395,11 +1458,10 @@ class PublisherController(AbstractPublisherController): self._create_context.reset_plugins() - creator_items = { + self._creator_items = { identifier: CreatorItem.from_creator(creator) for identifier, creator in self._create_context.creators.items() } - self._creator_items = creator_items self._resetting_plugins = False From 32768187f26c907cc78d5f3b2f36b33880975b01 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 7 Oct 2022 17:59:13 +0200 Subject: [PATCH 1532/2550] :rotating_light: cosmetic fixes --- openpype/hosts/maya/plugins/publish/validate_unique_names.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_unique_names.py b/openpype/hosts/maya/plugins/publish/validate_unique_names.py index 33a460f7cc..05776ee0f3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unique_names.py +++ b/openpype/hosts/maya/plugins/publish/validate_unique_names.py @@ -1,7 +1,6 @@ from maya import cmds import pyblish.api -import openpype.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateContentsOrder @@ -24,7 +23,7 @@ class ValidateUniqueNames(pyblish.api.Validator): """Returns the invalid transforms in the instance. Returns: - list: Non unique name transforms + list: Non-unique name transforms. """ From 53487001c53dfe760221e0deed4d5bc49077dab8 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 7 Oct 2022 18:31:13 +0200 Subject: [PATCH 1533/2550] :recycle: flipping default value and changing settings access --- .../deadline/plugins/publish/submit_maya_deadline.py | 11 ++++++----- openpype/settings/defaults/project_settings/maya.json | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 67771eae50..feb20c9e8a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -201,11 +201,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Set it to default Maya behaviour if it cannot be determined # from instance (but it should be, by the Collector). - default_rs_include_lights = instance.context.data\ - .get('project_settings')\ - .get('maya')\ - .get('RenderSettings')\ - .get('enable_all_lights') + default_rs_include_lights = ( + instance.context.data['project_settings'] + ['maya'] + ['RenderSettings'] + ['enable_all_lights'] + ) rs_include_lights = instance.data.get( "renderSetupIncludeLights", default_rs_include_lights) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 76ef0a7338..cacedb6f7f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -35,7 +35,7 @@ "RenderSettings": { "apply_render_settings": true, "default_render_image_folder": "renders", - "enable_all_lights": false, + "enable_all_lights": true, "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { From 12fee4ec4ff0985d28c74b40070e40aa13f25238 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:40:23 +0200 Subject: [PATCH 1534/2550] create context provides instances by id --- openpype/pipeline/create/context.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index a7e43cb2f2..87768606e6 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -780,6 +780,10 @@ class CreateContext: def instances(self): return self._instances_by_id.values() + @property + def instances_by_id(self): + return self._instances_by_id + @property def publish_attributes(self): """Access to global publish attributes.""" From 8f83ff878f45a01a3689da4e31ca63db5c97a67d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:40:41 +0200 Subject: [PATCH 1535/2550] prepared some methods for instance remote processing --- openpype/pipeline/create/context.py | 116 ++++++++++++++++++++++++++++ 1 file changed, 116 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 87768606e6..804e3955e5 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -197,6 +197,16 @@ class AttributeValues: def changes(self): return self.calculate_changes(self._data, self._origin_data) + def apply_changes(self, changes): + for key, item in changes.items(): + old_value, new_value = item + if new_value is None: + if key in self: + self.pop(key) + + elif self.get(key) != new_value: + self[key] = new_value + class CreatorAttributeValues(AttributeValues): """Creator specific attribute values of an instance. @@ -327,6 +337,21 @@ class PublishAttributes: changes[key] = (value, None) return changes + def apply_changes(self, changes): + for key, item in changes.items(): + if isinstance(item, dict): + self._data[key].apply_changes(item) + continue + + old_value, new_value = item + if new_value is not None: + raise ValueError( + "Unexpected type \"{}\" expected None".format( + str(type(new_value)) + ) + ) + self.pop(key) + def set_publish_plugins(self, attr_plugins): """Set publish plugins attribute definitions.""" @@ -693,6 +718,97 @@ class CreatedInstance: if member not in self._members: self._members.append(member) + def serialize_for_remote(self): + return { + "data": self.data_to_store(), + "orig_data": copy.deepcopy(self._orig_data) + } + + @classmethod + def deserialize_on_remote(cls, serialized_data, creator_items): + """Convert instance data to CreatedInstance. + + This is fake instance in remote process e.g. in UI process. The creator + is not a full creator and should not be used for calling methods when + instance is created from this method (matters on implementation). + + Args: + serialized_data (Dict[str, Any]): Serialized data for remote + recreating. Should contain 'data' and 'orig_data'. + creator_items (Dict[str, Any]): Mapping of creator identifier and + objects that behave like a creator for most of attribute + access. + """ + + instance_data = copy.deepcopy(serialized_data["data"]) + creator_identifier = instance_data["creator_identifier"] + creator_item = creator_items[creator_identifier] + + family = instance_data.get("family", None) + if family is None: + family = creator_item.family + subset_name = instance_data.get("subset", None) + + obj = cls( + family, subset_name, instance_data, creator_item, new=False + ) + obj._orig_data = serialized_data["orig_data"] + + return obj + + def remote_changes(self): + """Prepare serializable changes on remote side. + + Returns: + Dict[str, Any]: Prepared changes that can be send to client side. + """ + + return { + "changes": self.changes(), + "asset_is_valid": self._asset_is_valid, + "task_is_valid": self._task_is_valid, + } + + def update_from_remote(self, remote_changes): + """Apply changes from remote side on client side. + + Args: + remote_changes (Dict[str, Any]): Changes created on remote side. + """ + + self._asset_is_valid = remote_changes["asset_is_valid"] + self._task_is_valid = remote_changes["task_is_valid"] + + changes = remote_changes["changes"] + creator_attributes = changes.pop("creator_attributes", None) or {} + publish_attributes = changes.pop("publish_attributes", None) or {} + if changes: + self.apply_changes(changes) + + if creator_attributes: + self.creator_attributes.apply_changes(creator_attributes) + + if publish_attributes: + self.publish_attributes.apply_changes(publish_attributes) + + def apply_changes(self, changes): + """Apply changes created via 'changes'. + + Args: + Dict[str, Tuple[Any, Any]]: Instance changes to apply. Same values + are kept untouched. + """ + + for key, item in changes.items(): + old_value, new_value = item + if new_value is None: + if key in self: + self.pop(key) + else: + current_value = self.get(key) + if current_value != new_value: + self[key] = new_value + class CreateContext: """Context of instance creation. From b5a4420f0a8fa78b26988ab1e7e18d7150a04799 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:41:31 +0200 Subject: [PATCH 1536/2550] instances returns instances by id --- openpype/tools/publisher/control.py | 2 +- openpype/tools/publisher/widgets/card_view_widgets.py | 2 +- openpype/tools/publisher/widgets/list_view_widgets.py | 7 ++----- openpype/tools/publisher/window.py | 2 +- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index f96782b08d..6765c75992 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1345,7 +1345,7 @@ class PublisherController(AbstractPublisherController): @property def instances(self): """Current instances in create context.""" - return self._create_context.instances + return self._create_context.instances_by_id @property def _creators(self): diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 06fa49320e..2be37ea44c 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -441,7 +441,7 @@ class InstanceCardView(AbstractInstanceView): # Prepare instances by group and identifiers by group instances_by_group = collections.defaultdict(list) identifiers_by_group = collections.defaultdict(set) - for instance in self._controller.instances: + for instance in self._controller.instances.values(): group_name = instance.group_label instances_by_group[group_name].append(instance) identifiers_by_group[group_name].add( diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 8438e17167..17b50b764a 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -520,7 +520,7 @@ class InstanceListView(AbstractInstanceView): # Prepare instances by their groups instances_by_group_name = collections.defaultdict(list) group_names = set() - for instance in self._controller.instances: + for instance in self._controller.instances.values(): group_label = instance.group_label group_names.add(group_label) instances_by_group_name[group_label].append(instance) @@ -769,10 +769,7 @@ class InstanceListView(AbstractInstanceView): """ instances = [] context_selected = False - instances_by_id = { - instance.id: instance - for instance in self._controller.instances - } + instances_by_id = self._controller.instances for index in self._instance_view.selectionModel().selectedIndexes(): instance_id = index.data(INSTANCE_ID_ROLE) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 699cf6f1f9..bc2e42f051 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -523,7 +523,7 @@ class PublisherWindow(QtWidgets.QDialog): return all_valid = None - for instance in self._controller.instances: + for instance in self._controller.instances.values(): if not instance["active"]: continue From 56cea034aba692180f59a68814b900d5e127d8da Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:41:46 +0200 Subject: [PATCH 1537/2550] don't call same property more then once --- openpype/tools/publisher/window.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index bc2e42f051..3b3e27660d 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -496,8 +496,9 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_overlay_visibility(False) self._reset_btn.setEnabled(True) self._stop_btn.setEnabled(False) - validate_enabled = not self._controller.publish_has_crashed - publish_enabled = not self._controller.publish_has_crashed + publish_has_crashed = self._controller.publish_has_crashed + validate_enabled = not publish_has_crashed + publish_enabled = not publish_has_crashed if validate_enabled: validate_enabled = not self._controller.publish_has_validated if publish_enabled: From d71f201f65d453a8dacb98330e0f1fab39276d8a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:42:26 +0200 Subject: [PATCH 1538/2550] removed 'reset_project_data_cache' used in traypublisher --- openpype/tools/publisher/control.py | 7 ------- openpype/tools/traypublisher/window.py | 3 +++ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 6765c75992..4482aea5ec 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1213,10 +1213,6 @@ class AbstractPublisherController(object): pass - @abstractmethod - def reset_project_data_cache(self): - pass - @abstractmethod def set_comment(self, comment): """Set comment on pyblish context. @@ -1905,9 +1901,6 @@ class PublisherController(AbstractPublisherController): self._publish_next_process() - def reset_project_data_cache(self): - self._asset_docs_cache.reset() - def collect_families_from_instances(instances, only_active=False): """Collect all families for passed publish instances. diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index be9f12e269..dfe06d149d 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -30,6 +30,9 @@ class TrayPublisherController(QtPublisherController): def host(self): return self._host + def reset_project_data_cache(self): + self._asset_docs_cache.reset() + class TrayPublisherRegistry(JSONSettingRegistry): """Class handling OpenPype general settings registry. From 05344514d320c2cacba1a4a826f86b9910372839 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:42:45 +0200 Subject: [PATCH 1539/2550] reset assets cache on controller reset --- openpype/tools/publisher/control.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 4482aea5ec..a2dd88e4fb 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1438,6 +1438,8 @@ class PublisherController(AbstractPublisherController): # Reset avalon context self._create_context.reset_avalon_context() + self._asset_docs_cache.reset() + self._reset_plugins() # Publish part must be reset after plugins self._reset_publish() From 92f28271c5ba7d3769b453b11bf60a9b14d49e0c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:43:15 +0200 Subject: [PATCH 1540/2550] mimic creator methods --- openpype/tools/publisher/control.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index a2dd88e4fb..9f62eed54a 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -784,6 +784,12 @@ class CreatorItem: self.instance_attributes_defs = instance_attributes_defs self.pre_create_attributes_defs = pre_create_attributes_defs + def get_instance_attr_defs(self): + return self.instance_attributes_defs + + def get_group_label(self): + return self.group_label + @classmethod def from_creator(cls, creator): if isinstance(creator, AutoCreator): From ae717d4151a34f09f0cf6b7a641bca37d22757da Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:43:45 +0200 Subject: [PATCH 1541/2550] use creator item to get attribute definitions instead of instance --- openpype/tools/publisher/control.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 9f62eed54a..389382b96e 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1500,7 +1500,9 @@ class PublisherController(AbstractPublisherController): output = [] _attr_defs = {} for instance in instances: - for attr_def in instance.creator_attribute_defs: + creator_identifier = instance.creator_identifier + creator_item = self._creator_items[creator_identifier] + for attr_def in creator_item.instance_attributes_defs: found_idx = None for idx, _attr_def in _attr_defs.items(): if attr_def == _attr_def: From 098bcce75193e5e46adbe29ba1d9771ab0ab2f59 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:44:31 +0200 Subject: [PATCH 1542/2550] added some helper functions for easy overriding to avoid duplicity --- openpype/tools/publisher/control.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 389382b96e..b08486654c 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1089,6 +1089,7 @@ class AbstractPublisherController(object): def remove_instances(self, instances): """Remove list of instances from create context.""" + # TODO expect instance ids pass @@ -1485,7 +1486,7 @@ class PublisherController(AbstractPublisherController): self._resetting_instances = False - self._emit_event("instances.refresh.finished") + self._on_create_instance_change() def emit_card_message(self, message): self._emit_event("show.card.message", {"message": message}) @@ -1494,9 +1495,10 @@ class PublisherController(AbstractPublisherController): """Collect creator attribute definitions for multuple instances. Args: - instances(list): List of created instances for + instances(List[CreatedInstance]): List of created instances for which should be attribute definitions returned. """ + output = [] _attr_defs = {} for instance in instances: @@ -1530,6 +1532,7 @@ class PublisherController(AbstractPublisherController): which should be attribute definitions returned. include_context(bool): Add context specific attribute definitions. """ + _tmp_items = [] if include_context: _tmp_items.append(self._create_context) @@ -1614,7 +1617,7 @@ class PublisherController(AbstractPublisherController): creator = self._creators[creator_identifier] creator.create(subset_name, instance_data, options) - self._emit_event("instances.refresh.finished") + self._on_create_instance_change() def save_changes(self): """Save changes happened during creation.""" @@ -1623,12 +1626,19 @@ class PublisherController(AbstractPublisherController): def remove_instances(self, instances): """""" + # TODO expect instance ids instead of instances # QUESTION Expect that instances are really removed? In that case save # reset is not required and save changes too. self.save_changes() + self._remove_instances_from_context(instances) + + self._on_create_instance_change() + + def _remove_instances_from_context(self, instances): self._create_context.remove_instances(instances) + def _on_create_instance_change(self): self._emit_event("instances.refresh.finished") # --- Publish specific implementations --- From 7e53b0354a37de52ef46d7011275b99e832e4e18 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 7 Oct 2022 18:44:51 +0200 Subject: [PATCH 1543/2550] prepared base class of remote qt controller --- openpype/tools/publisher/control_qt.py | 311 +++++++++++++++++++++++++ 1 file changed, 311 insertions(+) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 8515a7a843..c7099caf98 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -2,6 +2,8 @@ import collections from Qt import QtCore +from openpype.pipeline.create import CreatedInstance + from .control import MainThreadItem, PublisherController @@ -86,3 +88,312 @@ class QtPublisherController(PublisherController): def _qt_on_publish_stop(self): self._main_thread_processor.stop() + + +class QtRemotePublishController(QtPublisherController): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self._created_instances = {} + + def _on_create_instance_change(self): + # TODO somehow get serialized instances from client + serialized_instances = [] + + created_instances = {} + for serialized_data in serialized_instances: + item = CreatedInstance.deserialize_on_remote( + serialized_data, + self._creator_items + ) + created_instances[item.id] = item + + self._created_instances = created_instances + self._emit_event("instances.refresh.finished") + + @property + def project_name(self): + """Current context project name. + + Returns: + str: Name of project. + """ + + pass + + @property + def current_asset_name(self): + """Current context asset name. + + Returns: + Union[str, None]: Name of asset. + """ + + pass + + @property + def current_task_name(self): + """Current context task name. + + Returns: + Union[str, None]: Name of task. + """ + + pass + + @property + def host_is_valid(self): + """Host is valid for creation part. + + Host must have implemented certain functionality to be able create + in Publisher tool. + + Returns: + bool: Host can handle creation of instances. + """ + + pass + + @property + def instances(self): + """Collected/created instances. + + Returns: + List[CreatedInstance]: List of created instances. + """ + + return self._created_instances + + def get_context_title(self): + """Get context title for artist shown at the top of main window. + + Returns: + Union[str, None]: Context title for window or None. In case of None + a warning is displayed (not nice for artists). + """ + + pass + + def get_asset_docs(self): + pass + + def get_asset_hierarchy(self): + pass + + def get_task_names_by_asset_names(self, asset_names): + pass + + def get_existing_subset_names(self, asset_name): + pass + + def reset(self): + """Reset whole controller. + + This should reset create context, publish context and all variables + that are related to it. + """ + + pass + + def get_publish_attribute_definitions(self, instances, include_context): + pass + + def get_subset_name( + self, + creator_identifier, + variant, + task_name, + asset_name, + instance_id=None + ): + """Get subset name based on passed data. + + Args: + creator_identifier (str): Identifier of creator which should be + responsible for subset name creation. + variant (str): Variant value from user's input. + task_name (str): Name of task for which is instance created. + asset_name (str): Name of asset for which is instance created. + instance_id (Union[str, None]): Existing instance id when subset + name is updated. + """ + + pass + + def create( + self, creator_identifier, subset_name, instance_data, options + ): + """Trigger creation by creator identifier. + + Should also trigger refresh of instanes. + + Args: + creator_identifier (str): Identifier of Creator plugin. + subset_name (str): Calculated subset name. + instance_data (Dict[str, Any]): Base instance data with variant, + asset name and task name. + options (Dict[str, Any]): Data from pre-create attributes. + """ + + pass + + def save_changes(self): + """Save changes happened during creation.""" + + created_instance_changes = {} + for instance_id, instance in self._created_instances.items(): + created_instance_changes[instance_id] = ( + instance.remote_changes() + ) + + # TODO trigger save changes + self._trigger("save_changes", created_instance_changes) + + def remove_instances(self, instances): + """Remove list of instances from create context.""" + # TODO add Args: + + pass + + @property + def publish_has_finished(self): + """Has publishing finished. + + Returns: + bool: If publishing finished and all plugins were iterated. + """ + + pass + + @property + def publish_is_running(self): + """Publishing is running right now. + + Returns: + bool: If publishing is in progress. + """ + + pass + + @property + def publish_has_validated(self): + """Publish validation passed. + + Returns: + bool: If publishing passed last possible validation order. + """ + + pass + + @property + def publish_has_crashed(self): + """Publishing crashed for any reason. + + Returns: + bool: Publishing crashed. + """ + + pass + + @property + def publish_has_validation_errors(self): + """During validation happened at least one validation error. + + Returns: + bool: Validation error was raised during validation. + """ + + pass + + @property + def publish_max_progress(self): + """Get maximum possible progress number. + + Returns: + int: Number that can be used as 100% of publish progress bar. + """ + + pass + + @property + def publish_progress(self): + """Current progress number. + + Returns: + int: Current progress value which is between 0 and + 'publish_max_progress'. + """ + + pass + + @property + def publish_comment_is_set(self): + """Publish comment was at least once set. + + Publish comment can be set only once when publish is started for a + first time. This helpt to idetify if 'set_comment' should be called or + not. + """ + + pass + + def get_publish_crash_error(self): + pass + + def get_publish_report(self): + pass + + def get_validation_errors(self): + pass + + def publish(self): + """Trigger publishing without any order limitations.""" + + pass + + def validate(self): + """Trigger publishing which will stop after validation order.""" + + pass + + def stop_publish(self): + """Stop publishing can be also used to pause publishing. + + Pause of publishing is possible only if all plugins successfully + finished. + """ + + pass + + def run_action(self, plugin_id, action_id): + """Trigger pyblish action on a plugin. + + Args: + plugin_id (str): Id of publish plugin. + action_id (str): Id of publish action. + """ + + pass + + def set_comment(self, comment): + """Set comment on pyblish context. + + Set "comment" key on current pyblish.api.Context data. + + Args: + comment (str): Artist's comment. + """ + + pass + + def emit_card_message(self, message): + """Emit a card message which can have a lifetime. + + This is for UI purposes. Method can be extended to more arguments + in future e.g. different message timeout or type (color). + + Args: + message (str): Message that will be showed. + """ + + pass From ea4ede05a8983118987f75a9f6a169a5d84770be Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 7 Oct 2022 20:31:46 +0200 Subject: [PATCH 1544/2550] Hotfix for Maya + Deadline + Redshift renders without merge AOVs Fix #3953 --- .../deadline/plugins/publish/submit_maya_deadline.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 4d6068f3c0..75a3921237 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -32,6 +32,9 @@ from maya import cmds from openpype.pipeline import legacy_io +from openpype.hosts.maya.api.lib_rendersettings import RenderSettings +from openpype.hosts.maya.api.lib import get_attr_in_layer + from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -471,9 +474,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.AssetDependency += self.scene_path # Get layer prefix - render_products = self._instance.data["renderProducts"] - layer_metadata = render_products.layer_data - layer_prefix = layer_metadata.filePrefix + renderlayer = self._instance.data["setMembers"] + renderer = self._instance.data["renderer"] + layer_prefix_attr = RenderSettings.get_image_prefix_attr(renderer) + layer_prefix = get_attr_in_layer(layer_prefix_attr, layer=renderlayer) plugin_info = copy.deepcopy(self.plugin_info) plugin_info.update({ From 2826310f1729e97f48f968cf065a25bc9653b62e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 8 Oct 2022 03:56:07 +0000 Subject: [PATCH 1545/2550] [Automated] Bump version --- CHANGELOG.md | 31 ++++++++++++++++++++----------- openpype/version.py | 2 +- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c616b70e3c..455c7aa900 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,17 +1,32 @@ # Changelog -## [3.14.4-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.4-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...HEAD) +**🚀 Enhancements** + +- General: Set root environments before DCC launch [\#3947](https://github.com/pypeclub/OpenPype/pull/3947) +- Maya: Moved plugin from global to maya [\#3939](https://github.com/pypeclub/OpenPype/pull/3939) +- Publisher: Instances can be marked as stored [\#3846](https://github.com/pypeclub/OpenPype/pull/3846) + **🐛 Bug fixes** +- Photoshop: missed sync published version of workfile with workfile [\#3946](https://github.com/pypeclub/OpenPype/pull/3946) +- Maya: fix regression of Renderman Deadline hack [\#3943](https://github.com/pypeclub/OpenPype/pull/3943) +- AttributeDefs: Fix crashing multivalue of files widget [\#3937](https://github.com/pypeclub/OpenPype/pull/3937) - Publisher: Files Drag n Drop cleanup [\#3888](https://github.com/pypeclub/OpenPype/pull/3888) +- Maya: Render settings validation attribute check tweak logging [\#3821](https://github.com/pypeclub/OpenPype/pull/3821) **🔀 Refactored code** - General: import 'Logger' from 'openpype.lib' [\#3926](https://github.com/pypeclub/OpenPype/pull/3926) +**Merged pull requests:** + +- Photoshop: create single frame image in Ftrack as review [\#3908](https://github.com/pypeclub/OpenPype/pull/3908) +- Maya: Warn correctly about nodes in render instance with unexpected names [\#3816](https://github.com/pypeclub/OpenPype/pull/3816) + ## [3.14.3](https://github.com/pypeclub/OpenPype/tree/3.14.3) (2022-10-03) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.3-nightly.7...3.14.3) @@ -28,6 +43,7 @@ - Flame: make migratable projects after creation [\#3860](https://github.com/pypeclub/OpenPype/pull/3860) - Photoshop: synchronize image version with workfile [\#3854](https://github.com/pypeclub/OpenPype/pull/3854) - General: Transcoding handle float2 attr type [\#3849](https://github.com/pypeclub/OpenPype/pull/3849) +- General: Simple script for getting license information about used packages [\#3843](https://github.com/pypeclub/OpenPype/pull/3843) - General: Workfile template build enhancements [\#3838](https://github.com/pypeclub/OpenPype/pull/3838) - General: lock task workfiles when they are working on [\#3810](https://github.com/pypeclub/OpenPype/pull/3810) @@ -44,7 +60,6 @@ - Tray Publisher: skip plugin if otioTimeline is missing [\#3856](https://github.com/pypeclub/OpenPype/pull/3856) - Flame: retimed attributes are integrated with settings [\#3855](https://github.com/pypeclub/OpenPype/pull/3855) - Maya: Extract Playblast fix textures + labelize viewport show settings [\#3852](https://github.com/pypeclub/OpenPype/pull/3852) -- Maya: Publishing data key change [\#3811](https://github.com/pypeclub/OpenPype/pull/3811) **🔀 Refactored code** @@ -53,8 +68,8 @@ - Unreal: Use new Extractor location [\#3917](https://github.com/pypeclub/OpenPype/pull/3917) - Flame: Use new Extractor location [\#3916](https://github.com/pypeclub/OpenPype/pull/3916) - Houdini: Use new Extractor location [\#3894](https://github.com/pypeclub/OpenPype/pull/3894) -- Harmony: Use new Extractor location [\#3893](https://github.com/pypeclub/OpenPype/pull/3893) - Hiero: Use new Extractor location [\#3851](https://github.com/pypeclub/OpenPype/pull/3851) +- Maya: Remove old legacy \(ftrack\) plug-ins that are of no use anymore [\#3819](https://github.com/pypeclub/OpenPype/pull/3819) - Nuke: Use new Extractor location [\#3799](https://github.com/pypeclub/OpenPype/pull/3799) **Merged pull requests:** @@ -73,33 +88,27 @@ - Flame: OpenPype submenu to batch and media manager [\#3825](https://github.com/pypeclub/OpenPype/pull/3825) - General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) -- SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) **🐛 Bug fixes** - General: Fix Pattern access in client code [\#3828](https://github.com/pypeclub/OpenPype/pull/3828) - Launcher: Skip opening last work file works for groups [\#3822](https://github.com/pypeclub/OpenPype/pull/3822) +- Maya: Publishing data key change [\#3811](https://github.com/pypeclub/OpenPype/pull/3811) - Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) - Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) +- Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) - nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) - Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) **🔀 Refactored code** -- Maya: Remove old legacy \(ftrack\) plug-ins that are of no use anymore [\#3819](https://github.com/pypeclub/OpenPype/pull/3819) - Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) - Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) - AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) -- General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) -- General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) -- General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) -- General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) -- General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) **Merged pull requests:** - Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) -- Kitsu - sync\_all\_project - add list ignore\_projects [\#3776](https://github.com/pypeclub/OpenPype/pull/3776) ## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) diff --git a/openpype/version.py b/openpype/version.py index 50864c0f1c..1bd566aa9b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.4-nightly.1" +__version__ = "3.14.4-nightly.2" From f7fdb10f6bf2fe6e36d7cf03348cb72fd8c06228 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 10 Oct 2022 10:54:06 +0200 Subject: [PATCH 1546/2550] OP-3939 - updated key for Settings --- .../plugins/publish/preintegrate_thumbnail_representation.py | 2 +- .../schemas/projects_schema/schemas/schema_global_publish.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py index a4ab0443c2..49e69ff34b 100644 --- a/openpype/plugins/publish/preintegrate_thumbnail_representation.py +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -46,7 +46,7 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): self.integrate_profiles, { "hosts": host_name, - "tasks": task.get("name"), + "task_names": task.get("name"), "task_types": task.get("type"), "families": family, "subsets": subset_name, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 9a8d10a4e1..c216d5fd9e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -598,7 +598,7 @@ "type": "task-types-enum" }, { - "key": "tasks", + "key": "task_names", "label": "Task names", "type": "list", "object_type": "text" From 745ac4a2d26508e14f530851f33a75165e705309 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 10 Oct 2022 11:06:41 +0200 Subject: [PATCH 1547/2550] OP-3939 - updated loop through representations Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../publish/preintegrate_thumbnail_representation.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py index 49e69ff34b..2c25d2a2fc 100644 --- a/openpype/plugins/publish/preintegrate_thumbnail_representation.py +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -26,8 +26,12 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): integrate_profiles = {} def process(self, instance): + repres = instance.data.get("representations") + if not repres: + return + thumbnail_repre = None - for repre in instance.data["representations"]: + for repre in repres: if repre["name"] == "thumbnail": thumbnail_repre = repre break From 811e7853e5c031d23fa51dabbc07cc0caf3bc2f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 14:16:53 +0200 Subject: [PATCH 1548/2550] added ability to serailize and deserialize event to data --- openpype/lib/events.py | 49 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 301d62e2a6..747761fb3e 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -1,6 +1,7 @@ """Events holding data about specific event.""" import os import re +import copy import inspect import logging import weakref @@ -207,6 +208,12 @@ class Event(object): @property def source(self): + """Event's source used for triggering callbacks. + + Returns: + Union[str, None]: Source string or None. Source is optional. + """ + return self._source @property @@ -215,6 +222,12 @@ class Event(object): @property def topic(self): + """Event's topic used for triggering callbacks. + + Returns: + str: Topic string. + """ + return self._topic def emit(self): @@ -227,6 +240,42 @@ class Event(object): ) self._event_system.emit_event(self) + def to_data(self): + """Convert Event object to data. + + Returns: + Dict[str, Any]: Event data. + """ + + return { + "id": self.id, + "topic": self.topic, + "source": self.source, + "data": copy.deepcopy(self.data) + } + + @classmethod + def from_data(cls, event_data, event_system=None): + """Create event from data. + + Args: + event_data (Dict[str, Any]): Event data with defined keys. Can be + created using 'to_data' method. + event_system (EventSystem): System to which the event belongs. + + Returns: + Event: Event with attributes from passed data. + """ + + obj = cls( + event_data["topic"], + event_data["data"], + event_data["source"], + event_system + ) + obj._id = event_data["id"] + return obj + class EventSystem(object): """Encapsulate event handling into an object. From d1f3c8e18e7fe4c87a07007918715e1b368937a2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 14:33:11 +0200 Subject: [PATCH 1549/2550] added properties with getters and setters --- openpype/tools/publisher/control.py | 128 +++++++++++++++++++++------- 1 file changed, 95 insertions(+), 33 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index b08486654c..9ca9924f39 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1278,12 +1278,15 @@ class PublisherController(AbstractPublisherController): self._publish_validation_errors = PublishValidationErrors() # Any other exception that happened during publishing self._publish_error = None + self._publish_error_msg = None # Publishing is in progress self._publish_is_running = False # Publishing is over validation order - self._publish_validated = False + self._publish_has_validated = False # Publishing should stop at validation stage self._publish_up_validation = False + self._publish_has_validation_errors = False + self._publish_has_crashed = False # All publish plugins are processed self._publish_finished = False self._publish_max_progress = 0 @@ -1642,41 +1645,100 @@ class PublisherController(AbstractPublisherController): self._emit_event("instances.refresh.finished") # --- Publish specific implementations --- - @property - def publish_has_finished(self): - return self._publish_finished - - @property - def publish_is_running(self): - return self._publish_is_running - - @property - def publish_has_validated(self): - return self._publish_validated - - @property - def publish_has_crashed(self): - return bool(self._publish_error) - - @property - def publish_has_validation_errors(self): - return bool(self._publish_validation_errors) - - @property - def publish_max_progress(self): - return self._publish_max_progress - - @property - def publish_progress(self): - return self._publish_progress - - @property - def publish_comment_is_set(self): - return self._publish_comment_is_set - def get_publish_crash_error(self): return self._publish_error + def _get_publish_has_finished(self): + return self._publish_finished + + def _set_publish_has_finished(self, value): + if self._publish_finished != value: + self._publish_finished = value + + def _get_publish_is_running(self): + return self._publish_is_running + + def _set_publish_is_running(self, value): + if self._publish_is_running != value: + self._publish_is_running = value + self._emit_event("publish.is_running.changed", {"value": value}) + + def _get_publish_has_validated(self): + return self._publish_has_validated + + def _set_publish_has_validated(self, value): + if self._publish_has_validated != value: + self._publish_has_validated = value + self._emit_event("publish.has_validated.changed", {"value": value}) + + def _get_publish_has_crashed(self): + return self._publish_has_crashed + + def _set_publish_has_crashed(self, value): + if self._publish_has_crashed != value: + self._publish_has_crashed = value + self._emit_event("publish.has_crashed.changed", {"value": value}) + + def _get_publish_has_validation_errors(self): + return self._publish_has_validation_errors + + def _set_publish_has_validation_errors(self, value): + if self._publish_has_validation_errors != value: + self._publish_has_validation_errors = value + self._emit_event( + "publish.has_validation_errors.changed", + {"value": value} + ) + + def _get_publish_max_progress(self): + return self._publish_max_progress + + def _set_publish_max_progress(self, value): + if self._publish_max_progress != value: + self._publish_max_progress = value + self._emit_event("publish.max_progress.changed", {"value": value}) + + def _get_publish_progress(self): + return self._publish_progress + + def _set_publish_progress(self, value): + if self._publish_progress != value: + self._publish_progress = value + self._emit_event("publish.progress.changed", {"value": value}) + + def _get_publish_error_msg(self): + return self._publish_error_msg + + def _set_publish_error_msg(self, value): + if self._publish_error_msg != value: + self._publish_error_msg = value + self._emit_event("publish.publish_error.changed", {"value": value}) + + publish_has_finished = property( + _get_publish_has_finished, _set_publish_has_finished + ) + publish_is_running = property( + _get_publish_is_running, _set_publish_is_running + ) + publish_has_validated = property( + _get_publish_has_validated, _set_publish_has_validated + ) + publish_has_crashed = property( + _get_publish_has_crashed, _set_publish_has_crashed + ) + publish_has_validation_errors = property( + _get_publish_has_validation_errors, _set_publish_has_validation_errors + ) + publish_max_progress = property( + _get_publish_max_progress, _set_publish_max_progress + ) + publish_progress = property( + _get_publish_progress, _set_publish_progress + ) + publish_error_msg = property( + _get_publish_error_msg, _set_publish_error_msg + ) + def get_publish_report(self): return self._publish_report.get_report(self._publish_plugins) From c907383f88f3db6fd7eaef76321bcab11069a958 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 14:34:23 +0200 Subject: [PATCH 1550/2550] use events to handle controller changes --- openpype/tools/publisher/control.py | 110 +++++++++++------- .../tools/publisher/widgets/publish_frame.py | 33 +++--- openpype/tools/publisher/window.py | 13 +-- 3 files changed, 86 insertions(+), 70 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 9ca9924f39..b4fc7cb91a 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -20,6 +20,7 @@ from openpype.lib.attribute_definitions import ( ) from openpype.pipeline import ( PublishValidationError, + KnownPublishError, registered_host, ) from openpype.pipeline.create import ( @@ -909,7 +910,7 @@ class AbstractPublisherController(object): def event_system(self): """Inner event system for publisher controller. - Event system is autocreated. + Is used for communication with UI. Event system is autocreated. Known topics: "show.detailed.help" - Detailed help requested (UI related). @@ -919,10 +920,20 @@ class AbstractPublisherController(object): "publish.reset.finished" - Controller reset finished. "publish.process.started" - Publishing started. Can be started from paused state. - "publish.process.validated" - Publishing passed validation. "publish.process.stopped" - Publishing stopped/paused process. "publish.process.plugin.changed" - Plugin state has changed. "publish.process.instance.changed" - Instance state has changed. + "publish.has_validated.changed" - Attr 'publish_has_validated' + changed. + "publish.is_running.changed" - Attr 'publish_is_running' changed. + "publish.has_validated.changed" - Attr 'has_validated' changed. + "publish.has_crashed.changed" - Attr 'publish_has_crashed' changed. + "publish.publish_error.changed" - Attr 'publish_error' + "publish.has_validation_errors.changed" - Attr + 'has_validation_errors' changed. + "publish.max_progress.changed" - Attr 'publish_max_progress' + changed. + "publish.progress.changed" - Attr 'publish_progress' changed. Returns: EventSystem: Event system which can trigger callbacks for topics. @@ -1158,27 +1169,22 @@ class AbstractPublisherController(object): """Current progress number. Returns: - int: Current progress value which is between 0 and - 'publish_max_progress'. + int: Current progress value from 0 to 'publish_max_progress'. """ pass @abstractproperty - def publish_comment_is_set(self): - """Publish comment was at least once set. + def publish_error_msg(self): + """Current error message which cause fail of publishing. - Publish comment can be set only once when publish is started for a - first time. This helpt to idetify if 'set_comment' should be called or - not. + Returns: + Union[str, None]: Message which will be showed to artist or + None. """ pass - @abstractmethod - def get_publish_crash_error(self): - pass - @abstractmethod def get_publish_report(self): pass @@ -1277,7 +1283,6 @@ class PublisherController(AbstractPublisherController): # Store exceptions of validation error self._publish_validation_errors = PublishValidationErrors() # Any other exception that happened during publishing - self._publish_error = None self._publish_error_msg = None # Publishing is in progress self._publish_is_running = False @@ -1645,9 +1650,6 @@ class PublisherController(AbstractPublisherController): self._emit_event("instances.refresh.finished") # --- Publish specific implementations --- - def get_publish_crash_error(self): - return self._publish_error - def _get_publish_has_finished(self): return self._publish_finished @@ -1746,10 +1748,13 @@ class PublisherController(AbstractPublisherController): return self._publish_validation_errors.create_report() def _reset_publish(self): - self._publish_is_running = False - self._publish_validated = False + self.publish_is_running = False + self.publish_has_validated = False + self.publish_has_crashed = False + self.publish_has_validation_errors = False + self.publish_finished = False + self._publish_up_validation = False - self._publish_finished = False self._publish_comment_is_set = False self._main_thread_iter = self._publish_iterator() @@ -1768,16 +1773,25 @@ class PublisherController(AbstractPublisherController): self._publish_report.reset(self._publish_context, self._create_context) self._publish_validation_errors.reset(self._publish_plugins_proxy) - self._publish_error = None - self._publish_max_progress = len(self._publish_plugins) - self._publish_progress = 0 + self.publish_error_msg = None + + self.publish_max_progress = len(self._publish_plugins) + self.publish_progress = 0 self._emit_event("publish.reset.finished") def set_comment(self, comment): - self._publish_context.data["comment"] = comment - self._publish_comment_is_set = True + """Set comment from ui to pyblish context. + + This should be called always before publishing is started but should + happen only once on first publish start thus variable + '_publish_comment_is_set' is used to keep track about the information. + """ + + if not self._publish_comment_is_set: + self._publish_context.data["comment"] = comment + self._publish_comment_is_set = True def publish(self): """Run publishing.""" @@ -1786,20 +1800,20 @@ class PublisherController(AbstractPublisherController): def validate(self): """Run publishing and stop after Validation.""" - if self._publish_validated: + if self.publish_has_validated: return self._publish_up_validation = True self._start_publish() def _start_publish(self): """Start or continue in publishing.""" - if self._publish_is_running: + if self.publish_is_running: return # Make sure changes are saved self.save_changes() - self._publish_is_running = True + self.publish_is_running = True self._emit_event("publish.process.started") @@ -1807,14 +1821,14 @@ class PublisherController(AbstractPublisherController): def _stop_publish(self): """Stop or pause publishing.""" - self._publish_is_running = False + self.publish_is_running = False self._emit_event("publish.process.stopped") def stop_publish(self): """Stop publishing process (any reason).""" - if self._publish_is_running: + if self.publish_is_running: self._stop_publish() def run_action(self, plugin_id, action_id): @@ -1835,14 +1849,14 @@ class PublisherController(AbstractPublisherController): # There are validation errors and validation is passed # - can't do any progree if ( - self._publish_validated - and self._publish_validation_errors + self.publish_has_validated + and self.publish_has_validation_errors ): item = MainThreadItem(self.stop_publish) # Any unexpected error happened # - everything should stop - elif self._publish_error: + elif self.publish_has_crashed: item = MainThreadItem(self.stop_publish) # Everything is ok so try to get new processing item @@ -1871,23 +1885,20 @@ class PublisherController(AbstractPublisherController): self._publish_progress = idx # Check if plugin is over validation order - if not self._publish_validated: - self._publish_validated = ( + if not self.publish_has_validated: + self.publish_has_validated = ( plugin.order >= self._validation_order ) - # Trigger callbacks when validation stage is passed - if self._publish_validated: - self._emit_event("publish.process.validated") # Stop if plugin is over validation order and process # should process up to validation. - if self._publish_up_validation and self._publish_validated: + if self._publish_up_validation and self.publish_has_validated: yield MainThreadItem(self.stop_publish) # Stop if validation is over and validation errors happened if ( - self._publish_validated - and self._publish_validation_errors + self.publish_has_validated + and self.publish_has_validation_errors ): yield MainThreadItem(self.stop_publish) @@ -1952,11 +1963,12 @@ class PublisherController(AbstractPublisherController): self._publish_report.set_plugin_skipped() # Cleanup of publishing process - self._publish_finished = True - self._publish_progress = self._publish_max_progress + self.publish_finished = True + self.publish_progress = self._publish_max_progress yield MainThreadItem(self.stop_publish) def _add_validation_error(self, result): + self.publish_has_validation_errors = False self._publish_validation_errors.add_error( result["plugin"], result["error"], @@ -1974,12 +1986,20 @@ class PublisherController(AbstractPublisherController): if exception: if ( isinstance(exception, PublishValidationError) - and not self._publish_validated + and not self.publish_has_validated ): self._add_validation_error(result) else: - self._publish_error = exception + if isinstance(exception, KnownPublishError): + msg = str(exception) + else: + msg = ( + "Something went wrong. Send report" + " to your supervisor or OpenPype." + ) + self.publish_error_msg = msg + self.publish_has_crashed = False self._publish_next_process() diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index b49f005640..8fd783a3c4 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -4,8 +4,6 @@ import time from Qt import QtWidgets, QtCore -from openpype.pipeline import KnownPublishError - from .widgets import ( StopBtn, ResetBtn, @@ -170,7 +168,7 @@ class PublishFrame(QtWidgets.QWidget): "publish.process.started", self._on_publish_start ) controller.event_system.add_callback( - "publish.process.validated", self._on_publish_validated + "publish.has_validated.changed", self._on_publish_validated_change ) controller.event_system.add_callback( "publish.process.stopped", self._on_publish_stop @@ -322,8 +320,9 @@ class PublishFrame(QtWidgets.QWidget): self._validate_btn.setEnabled(False) self._publish_btn.setEnabled(False) - def _on_publish_validated(self): - self._validate_btn.setEnabled(False) + def _on_publish_validated_change(self, event): + if event["value"]: + self._validate_btn.setEnabled(False) def _on_instance_change(self, event): """Change instance label when instance is going to be processed.""" @@ -360,10 +359,10 @@ class PublishFrame(QtWidgets.QWidget): self._validate_btn.setEnabled(validate_enabled) self._publish_btn.setEnabled(publish_enabled) - error = self._controller.get_publish_crash_error() + error_msg = self._controller.publish_error_msg validation_errors = self._controller.get_validation_errors() - if error: - self._set_error(error) + if error_msg: + self._set_error_msg(error_msg) elif validation_errors: self._set_progress_visibility(False) @@ -387,16 +386,16 @@ class PublishFrame(QtWidgets.QWidget): self._set_success_property(-1) - def _set_error(self, error): + def _set_error_msg(self, error_msg): + """Show error message to artist. + + Args: + error_msg (str): Message which is showed to artist. + """ + self._set_main_label("Error happened") - if isinstance(error, KnownPublishError): - msg = str(error) - else: - msg = ( - "Something went wrong. Send report" - " to your supervisor or OpenPype." - ) - self._message_label_top.setText(msg) + + self._message_label_top.setText(error_msg) self._set_success_property(0) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 3b3e27660d..e2beb480bd 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -248,7 +248,7 @@ class PublisherWindow(QtWidgets.QDialog): "publish.process.started", self._on_publish_start ) controller.event_system.add_callback( - "publish.process.validated", self._on_publish_validated + "publish.has_validated.changed", self._on_publish_validated_change ) controller.event_system.add_callback( "publish.process.stopped", self._on_publish_stop @@ -439,11 +439,7 @@ class PublisherWindow(QtWidgets.QDialog): self._controller.stop_publish() def _set_publish_comment(self): - if self._controller.publish_comment_is_set: - return - - comment = self._comment_input.text() - self._controller.set_comment(comment) + self._controller.set_comment(self._comment_input.text()) def _on_validate_clicked(self): self._set_publish_comment() @@ -489,8 +485,9 @@ class PublisherWindow(QtWidgets.QDialog): if self._tabs_widget.is_current_tab(self._create_tab): self._tabs_widget.set_current_tab("publish") - def _on_publish_validated(self): - self._validate_btn.setEnabled(False) + def _on_publish_validated_change(self, event): + if event["value"]: + self._validate_btn.setEnabled(False) def _on_publish_stop(self): self._set_publish_overlay_visibility(False) From 0f514aa5528efc44938a4eec671692d41329daed Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 14:38:52 +0200 Subject: [PATCH 1551/2550] mark methods that should be abstract in remote controller --- openpype/tools/publisher/control_qt.py | 112 ++++--------------------- 1 file changed, 18 insertions(+), 94 deletions(-) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index c7099caf98..8f0f304f9a 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -111,7 +111,7 @@ class QtRemotePublishController(QtPublisherController): self._created_instances = created_instances self._emit_event("instances.refresh.finished") - @property + @abstractproperty def project_name(self): """Current context project name. @@ -121,7 +121,7 @@ class QtRemotePublishController(QtPublisherController): pass - @property + @abstractproperty def current_asset_name(self): """Current context asset name. @@ -131,7 +131,7 @@ class QtRemotePublishController(QtPublisherController): pass - @property + @abstractproperty def current_task_name(self): """Current context task name. @@ -141,7 +141,7 @@ class QtRemotePublishController(QtPublisherController): pass - @property + @abstractproperty def host_is_valid(self): """Host is valid for creation part. @@ -186,6 +186,7 @@ class QtRemotePublishController(QtPublisherController): def get_existing_subset_names(self, asset_name): pass + @abstractmethod def reset(self): """Reset whole controller. @@ -195,9 +196,7 @@ class QtRemotePublishController(QtPublisherController): pass - def get_publish_attribute_definitions(self, instances, include_context): - pass - + @abstractmethod def get_subset_name( self, creator_identifier, @@ -220,6 +219,7 @@ class QtRemotePublishController(QtPublisherController): pass + @abstractmethod def create( self, creator_identifier, subset_name, instance_data, options ): @@ -237,6 +237,7 @@ class QtRemotePublishController(QtPublisherController): pass + @abstractmethod def save_changes(self): """Save changes happened during creation.""" @@ -246,116 +247,36 @@ class QtRemotePublishController(QtPublisherController): instance.remote_changes() ) - # TODO trigger save changes - self._trigger("save_changes", created_instance_changes) + # Send 'created_instance_changes' value to client + @abstractmethod def remove_instances(self, instances): """Remove list of instances from create context.""" # TODO add Args: pass - @property - def publish_has_finished(self): - """Has publishing finished. - - Returns: - bool: If publishing finished and all plugins were iterated. - """ - - pass - - @property - def publish_is_running(self): - """Publishing is running right now. - - Returns: - bool: If publishing is in progress. - """ - - pass - - @property - def publish_has_validated(self): - """Publish validation passed. - - Returns: - bool: If publishing passed last possible validation order. - """ - - pass - - @property - def publish_has_crashed(self): - """Publishing crashed for any reason. - - Returns: - bool: Publishing crashed. - """ - - pass - - @property - def publish_has_validation_errors(self): - """During validation happened at least one validation error. - - Returns: - bool: Validation error was raised during validation. - """ - - pass - - @property - def publish_max_progress(self): - """Get maximum possible progress number. - - Returns: - int: Number that can be used as 100% of publish progress bar. - """ - - pass - - @property - def publish_progress(self): - """Current progress number. - - Returns: - int: Current progress value which is between 0 and - 'publish_max_progress'. - """ - - pass - - @property - def publish_comment_is_set(self): - """Publish comment was at least once set. - - Publish comment can be set only once when publish is started for a - first time. This helpt to idetify if 'set_comment' should be called or - not. - """ - - pass - - def get_publish_crash_error(self): - pass - + @abstractmethod def get_publish_report(self): pass + @abstractmethod def get_validation_errors(self): pass + @abstractmethod def publish(self): """Trigger publishing without any order limitations.""" pass + @abstractmethod def validate(self): """Trigger publishing which will stop after validation order.""" pass + @abstractmethod def stop_publish(self): """Stop publishing can be also used to pause publishing. @@ -365,6 +286,7 @@ class QtRemotePublishController(QtPublisherController): pass + @abstractmethod def run_action(self, plugin_id, action_id): """Trigger pyblish action on a plugin. @@ -375,6 +297,7 @@ class QtRemotePublishController(QtPublisherController): pass + @abstractmethod def set_comment(self, comment): """Set comment on pyblish context. @@ -386,6 +309,7 @@ class QtRemotePublishController(QtPublisherController): pass + @abstractmethod def emit_card_message(self, message): """Emit a card message which can have a lifetime. From ebb6a17d9793b7aee94e01cb4ebe572bab26ecea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 14:43:57 +0200 Subject: [PATCH 1552/2550] trigger event on finished attribute change --- openpype/tools/publisher/control.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index b4fc7cb91a..dd7e90ea5f 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1656,6 +1656,7 @@ class PublisherController(AbstractPublisherController): def _set_publish_has_finished(self, value): if self._publish_finished != value: self._publish_finished = value + self._emit_event("publish.finished.changed", {"value": value}) def _get_publish_is_running(self): return self._publish_is_running From 8ffdbf0dcfc70d0bf2741cdce7464864e82f0051 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 14:44:51 +0200 Subject: [PATCH 1553/2550] instances are removed by ids --- openpype/tools/publisher/control.py | 13 +++++++++---- openpype/tools/publisher/control_qt.py | 2 +- openpype/tools/publisher/widgets/overview_widget.py | 6 +++++- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index dd7e90ea5f..0981f48dbe 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1098,7 +1098,7 @@ class AbstractPublisherController(object): pass - def remove_instances(self, instances): + def remove_instances(self, instance_ids): """Remove list of instances from create context.""" # TODO expect instance ids @@ -1632,18 +1632,23 @@ class PublisherController(AbstractPublisherController): if self._create_context.host_is_valid: self._create_context.save_changes() - def remove_instances(self, instances): + def remove_instances(self, instance_ids): """""" # TODO expect instance ids instead of instances # QUESTION Expect that instances are really removed? In that case save # reset is not required and save changes too. self.save_changes() - self._remove_instances_from_context(instances) + self._remove_instances_from_context(instance_ids) self._on_create_instance_change() - def _remove_instances_from_context(self, instances): + def _remove_instances_from_context(self, instance_ids): + instances_by_id = self._create_context.instances_by_id + instances = [ + instances_by_id[instance_id] + for instance_id in instance_ids + ] self._create_context.remove_instances(instances) def _on_create_instance_change(self): diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 8f0f304f9a..69809bcfe8 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -250,7 +250,7 @@ class QtRemotePublishController(QtPublisherController): # Send 'created_instance_changes' value to client @abstractmethod - def remove_instances(self, instances): + def remove_instances(self, instance_ids): """Remove list of instances from create context.""" # TODO add Args: diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 08c2ce0513..3c67e6298e 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -224,7 +224,11 @@ class OverviewWidget(QtWidgets.QFrame): dialog.exec_() # Skip if OK was not clicked if dialog.result() == QtWidgets.QMessageBox.Ok: - self._controller.remove_instances(instances) + instance_ids = { + instance.id + for instance in instances + } + self._controller.remove_instances(instance_ids) def _on_change_view_clicked(self): self._change_view_type() From 91b66812dbb9adc00a42b634608291d934a7e30b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 14:45:16 +0200 Subject: [PATCH 1554/2550] added some basic implementation of client event handling --- openpype/tools/publisher/control_qt.py | 71 ++++++++++++++++++++++++-- 1 file changed, 68 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 69809bcfe8..5638ea554a 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -1,7 +1,9 @@ import collections +from ABC import abstractmethod, abstractproperty from Qt import QtCore +from openpype.lib.events import Event from openpype.pipeline.create import CreatedInstance from .control import MainThreadItem, PublisherController @@ -90,15 +92,29 @@ class QtPublisherController(PublisherController): self._main_thread_processor.stop() -class QtRemotePublishController(QtPublisherController): +class QtRemotePublishController(PublisherController): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._created_instances = {} + self._main_thread_processor = MainThreadProcess() + self._main_thread_processor.start() + + @abstractmethod + def _get_serialized_instances(self): + """Receive serialized instances from client process. + + Returns: + List[Dict[str, Any]]: Serialized instances. + """ + + pass + + def _process_main_thread_item(self, item): + self._main_thread_processor.add_item(item) def _on_create_instance_change(self): - # TODO somehow get serialized instances from client - serialized_instances = [] + serialized_instances = self._get_serialized_instances() created_instances = {} for serialized_data in serialized_instances: @@ -111,6 +127,55 @@ class QtRemotePublishController(QtPublisherController): self._created_instances = created_instances self._emit_event("instances.refresh.finished") + def remote_events_handler(self, event_data): + event = Event.from_data(event_data) + + # Topics that cause "replication" of controller changes + if event.topic == "publish.max_progress.changed": + self.publish_max_progress = event["value"] + return + + if event.topic == "publish.progress.changed": + self.publish_progress = event["value"] + return + + if event.topic == "publish.has_validated.changed": + self.publish_has_validated = event["value"] + return + + if event.topic == "publish.is_running.changed": + self.publish_is_running = event["value"] + return + + if event.topic == "publish.publish_error.changed": + self.publish_error_msg = event["value"] + return + + if event.topic == "publish.has_crashed.changed": + self.publish_has_crashed = event["value"] + return + + if event.topic == "publish.has_validation_errors.changed": + self.publish_has_validation_errors = event["value"] + return + + if event.topic == "publish.finished.changed": + self.publish_finished = event["value"] + return + + # Topics that can be just passed by because are not affecting + # controller itself + # - "show.card.message" + # - "show.detailed.help" + # - "publish.reset.finished" + # - "instances.refresh.finished" + # - "plugins.refresh.finished" + # - "publish.process.started" + # - "publish.process.stopped" + # - "publish.process.plugin.changed" + # - "publish.process.instance.changed" + self.event_system.emit_event(event) + @abstractproperty def project_name(self): """Current context project name. From ac3326d29690183206eed23520a6fad48e0982de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 15:08:43 +0200 Subject: [PATCH 1555/2550] fix import --- openpype/tools/publisher/control_qt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 5638ea554a..10f576a3f3 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -1,5 +1,5 @@ import collections -from ABC import abstractmethod, abstractproperty +from abc import abstractmethod, abstractproperty from Qt import QtCore From 187411ef8bab8241662add0c57f81d538f3b008c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 15:10:18 +0200 Subject: [PATCH 1556/2550] added BaseController to handle base attributes --- openpype/tools/publisher/control.py | 319 +++++++++++++++------------- 1 file changed, 172 insertions(+), 147 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 0981f48dbe..f2f6d07cd6 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -917,7 +917,8 @@ class AbstractPublisherController(object): "show.card.message" - Show card message request (UI related). "instances.refresh.finished" - Instances are refreshed. "plugins.refresh.finished" - Plugins refreshed. - "publish.reset.finished" - Controller reset finished. + "publish.reset.finished" - Publish context reset finished. + "controller.reset.finished" - Controller reset finished. "publish.process.started" - Publishing started. Can be started from paused state. "publish.process.stopped" - Publishing stopped/paused process. @@ -934,6 +935,8 @@ class AbstractPublisherController(object): "publish.max_progress.changed" - Attr 'publish_max_progress' changed. "publish.progress.changed" - Attr 'publish_progress' changed. + "publish.host_is_valid.changed" - Attr 'host_is_valid' changed. + "publish.finished.changed" - Attr 'publish_finished' changed. Returns: EventSystem: Event system which can trigger callbacks for topics. @@ -943,6 +946,11 @@ class AbstractPublisherController(object): self._event_system = EventSystem() return self._event_system + def _emit_event(self, topic, data=None): + if data is None: + data = {} + self.event_system.emit(topic, data, "controller") + @abstractproperty def project_name(self): """Current context project name. @@ -1252,7 +1260,156 @@ class AbstractPublisherController(object): pass -class PublisherController(AbstractPublisherController): +class BasePublishController(AbstractPublisherController): + def __init__(self): + # Controller must implement it's update + self._creator_items = {} + + self._host_is_valid = False + + # Any other exception that happened during publishing + self._publish_error_msg = None + # Publishing is in progress + self._publish_is_running = False + # Publishing is over validation order + self._publish_has_validated = False + + self._publish_has_validation_errors = False + self._publish_has_crashed = False + # All publish plugins are processed + self._publish_finished = False + self._publish_max_progress = 0 + self._publish_progress = 0 + + @property + def creator_items(self): + """Creators that can be shown in create dialog.""" + + return self._creator_items + + def get_creator_icon(self, identifier): + creator_item = self._creator_items.get(identifier) + if creator_item is not None: + return creator_item.icon + return None + + def _get_host_is_valid(self): + return self._host_is_valid + + def _set_host_is_valid(self, value): + if self._host_is_valid != value: + self._host_is_valid = value + self._emit_event("publish.host_is_valid.changed", {"value": value}) + + def _get_publish_has_finished(self): + return self._publish_finished + + def _set_publish_has_finished(self, value): + if self._publish_finished != value: + self._publish_finished = value + self._emit_event("publish.finished.changed", {"value": value}) + + def _get_publish_is_running(self): + return self._publish_is_running + + def _set_publish_is_running(self, value): + if self._publish_is_running != value: + self._publish_is_running = value + self._emit_event("publish.is_running.changed", {"value": value}) + + def _get_publish_has_validated(self): + return self._publish_has_validated + + def _set_publish_has_validated(self, value): + if self._publish_has_validated != value: + self._publish_has_validated = value + self._emit_event("publish.has_validated.changed", {"value": value}) + + def _get_publish_has_crashed(self): + return self._publish_has_crashed + + def _set_publish_has_crashed(self, value): + if self._publish_has_crashed != value: + self._publish_has_crashed = value + self._emit_event("publish.has_crashed.changed", {"value": value}) + + def _get_publish_has_validation_errors(self): + return self._publish_has_validation_errors + + def _set_publish_has_validation_errors(self, value): + if self._publish_has_validation_errors != value: + self._publish_has_validation_errors = value + self._emit_event( + "publish.has_validation_errors.changed", + {"value": value} + ) + + def _get_publish_max_progress(self): + return self._publish_max_progress + + def _set_publish_max_progress(self, value): + if self._publish_max_progress != value: + self._publish_max_progress = value + self._emit_event("publish.max_progress.changed", {"value": value}) + + def _get_publish_progress(self): + return self._publish_progress + + def _set_publish_progress(self, value): + if self._publish_progress != value: + self._publish_progress = value + self._emit_event("publish.progress.changed", {"value": value}) + + def _get_publish_error_msg(self): + return self._publish_error_msg + + def _set_publish_error_msg(self, value): + if self._publish_error_msg != value: + self._publish_error_msg = value + self._emit_event("publish.publish_error.changed", {"value": value}) + + host_is_valid = property( + _get_host_is_valid, _set_host_is_valid + ) + publish_has_finished = property( + _get_publish_has_finished, _set_publish_has_finished + ) + publish_is_running = property( + _get_publish_is_running, _set_publish_is_running + ) + publish_has_validated = property( + _get_publish_has_validated, _set_publish_has_validated + ) + publish_has_crashed = property( + _get_publish_has_crashed, _set_publish_has_crashed + ) + publish_has_validation_errors = property( + _get_publish_has_validation_errors, _set_publish_has_validation_errors + ) + publish_max_progress = property( + _get_publish_max_progress, _set_publish_max_progress + ) + publish_progress = property( + _get_publish_progress, _set_publish_progress + ) + publish_error_msg = property( + _get_publish_error_msg, _set_publish_error_msg + ) + + def _reset_attributes(self): + """Reset most of attributes that can be reset.""" + + self.publish_is_running = False + self.publish_has_validated = False + self.publish_has_crashed = False + self.publish_has_validation_errors = False + self.publish_finished = False + + self.publish_error_msg = None + self.publish_progress = 0 + + +class PublisherController(BasePublishController): """Middleware between UI, CreateContext and publish Context. Handle both creation and publishing parts. @@ -1265,6 +1422,8 @@ class PublisherController(AbstractPublisherController): _log = None def __init__(self, dbcon=None, headless=False): + super(PublisherController, self).__init__() + self._host = registered_host() self._headless = headless @@ -1272,8 +1431,6 @@ class PublisherController(AbstractPublisherController): self._host, dbcon, headless=headless, reset=False ) - self._creator_items = {} - self._publish_plugins_proxy = None # pyblish.api.Context @@ -1282,20 +1439,9 @@ class PublisherController(AbstractPublisherController): self._publish_report = PublishReport(self) # Store exceptions of validation error self._publish_validation_errors = PublishValidationErrors() - # Any other exception that happened during publishing - self._publish_error_msg = None - # Publishing is in progress - self._publish_is_running = False - # Publishing is over validation order - self._publish_has_validated = False + # Publishing should stop at validation stage self._publish_up_validation = False - self._publish_has_validation_errors = False - self._publish_has_crashed = False - # All publish plugins are processed - self._publish_finished = False - self._publish_max_progress = 0 - self._publish_progress = 0 # This information is not much important for controller but for widget # which can change (and set) the comment. self._publish_comment_is_set = False @@ -1317,12 +1463,6 @@ class PublisherController(AbstractPublisherController): # Cacher of avalon documents self._asset_docs_cache = AssetDocsCache(self) - @property - def log(self): - if self._log is None: - self._log = logging.getLogger("PublisherController") - return self._log - @property def project_name(self): """Current project context defined by host. @@ -1364,28 +1504,11 @@ class PublisherController(AbstractPublisherController): return self._create_context.creators - @property - def creator_items(self): - """Creators that can be shown in create dialog.""" - - return self._creator_items - - @property - def host_is_valid(self): - """Host is valid for creation.""" - - return self._create_context.host_is_valid - @property def _publish_plugins(self): """Publish plugins.""" return self._create_context.publish_plugins - def _emit_event(self, topic, data=None): - if data is None: - data = {} - self.event_system.emit(topic, data, "controller") - # --- Publish specific callbacks --- def get_asset_docs(self): """Get asset documents from cache for whole project.""" @@ -1450,6 +1573,8 @@ class PublisherController(AbstractPublisherController): self.save_changes() + self.host_is_valid = self._create_context.host_is_valid + # Reset avalon context self._create_context.reset_avalon_context() @@ -1460,6 +1585,8 @@ class PublisherController(AbstractPublisherController): self._reset_publish() self._reset_instances() + self._emit_event("controller.reset.finished") + self.emit_card_message("Refreshed..") def _reset_plugins(self): @@ -1584,12 +1711,6 @@ class PublisherController(AbstractPublisherController): )) return output - def get_creator_icon(self, identifier): - creator_item = self._creator_items.get(identifier) - if creator_item is not None: - return creator_item.icon - return None - def get_subset_name( self, creator_identifier, @@ -1633,7 +1754,11 @@ class PublisherController(AbstractPublisherController): self._create_context.save_changes() def remove_instances(self, instance_ids): - """""" + """Remove instances based on instance ids. + + Args: + instance_ids (List[str]): List of instance ids to remove. + """ # TODO expect instance ids instead of instances # QUESTION Expect that instances are really removed? In that case save # reset is not required and save changes too. @@ -1654,99 +1779,6 @@ class PublisherController(AbstractPublisherController): def _on_create_instance_change(self): self._emit_event("instances.refresh.finished") - # --- Publish specific implementations --- - def _get_publish_has_finished(self): - return self._publish_finished - - def _set_publish_has_finished(self, value): - if self._publish_finished != value: - self._publish_finished = value - self._emit_event("publish.finished.changed", {"value": value}) - - def _get_publish_is_running(self): - return self._publish_is_running - - def _set_publish_is_running(self, value): - if self._publish_is_running != value: - self._publish_is_running = value - self._emit_event("publish.is_running.changed", {"value": value}) - - def _get_publish_has_validated(self): - return self._publish_has_validated - - def _set_publish_has_validated(self, value): - if self._publish_has_validated != value: - self._publish_has_validated = value - self._emit_event("publish.has_validated.changed", {"value": value}) - - def _get_publish_has_crashed(self): - return self._publish_has_crashed - - def _set_publish_has_crashed(self, value): - if self._publish_has_crashed != value: - self._publish_has_crashed = value - self._emit_event("publish.has_crashed.changed", {"value": value}) - - def _get_publish_has_validation_errors(self): - return self._publish_has_validation_errors - - def _set_publish_has_validation_errors(self, value): - if self._publish_has_validation_errors != value: - self._publish_has_validation_errors = value - self._emit_event( - "publish.has_validation_errors.changed", - {"value": value} - ) - - def _get_publish_max_progress(self): - return self._publish_max_progress - - def _set_publish_max_progress(self, value): - if self._publish_max_progress != value: - self._publish_max_progress = value - self._emit_event("publish.max_progress.changed", {"value": value}) - - def _get_publish_progress(self): - return self._publish_progress - - def _set_publish_progress(self, value): - if self._publish_progress != value: - self._publish_progress = value - self._emit_event("publish.progress.changed", {"value": value}) - - def _get_publish_error_msg(self): - return self._publish_error_msg - - def _set_publish_error_msg(self, value): - if self._publish_error_msg != value: - self._publish_error_msg = value - self._emit_event("publish.publish_error.changed", {"value": value}) - - publish_has_finished = property( - _get_publish_has_finished, _set_publish_has_finished - ) - publish_is_running = property( - _get_publish_is_running, _set_publish_is_running - ) - publish_has_validated = property( - _get_publish_has_validated, _set_publish_has_validated - ) - publish_has_crashed = property( - _get_publish_has_crashed, _set_publish_has_crashed - ) - publish_has_validation_errors = property( - _get_publish_has_validation_errors, _set_publish_has_validation_errors - ) - publish_max_progress = property( - _get_publish_max_progress, _set_publish_max_progress - ) - publish_progress = property( - _get_publish_progress, _set_publish_progress - ) - publish_error_msg = property( - _get_publish_error_msg, _set_publish_error_msg - ) - def get_publish_report(self): return self._publish_report.get_report(self._publish_plugins) @@ -1754,11 +1786,7 @@ class PublisherController(AbstractPublisherController): return self._publish_validation_errors.create_report() def _reset_publish(self): - self.publish_is_running = False - self.publish_has_validated = False - self.publish_has_crashed = False - self.publish_has_validation_errors = False - self.publish_finished = False + self._reset_attributes() self._publish_up_validation = False self._publish_comment_is_set = False @@ -1780,10 +1808,7 @@ class PublisherController(AbstractPublisherController): self._publish_report.reset(self._publish_context, self._create_context) self._publish_validation_errors.reset(self._publish_plugins_proxy) - self.publish_error_msg = None - self.publish_max_progress = len(self._publish_plugins) - self.publish_progress = 0 self._emit_event("publish.reset.finished") From 16aff5224fd86552e84744dc6201d30c2e14863e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 15:24:40 +0200 Subject: [PATCH 1557/2550] fix attribute changes --- openpype/tools/publisher/control.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index f2f6d07cd6..014efd5c01 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1995,11 +1995,11 @@ class PublisherController(BasePublishController): # Cleanup of publishing process self.publish_finished = True - self.publish_progress = self._publish_max_progress + self.publish_progress = self.publish_max_progress yield MainThreadItem(self.stop_publish) def _add_validation_error(self, result): - self.publish_has_validation_errors = False + self.publish_has_validation_errors = True self._publish_validation_errors.add_error( result["plugin"], result["error"], @@ -2030,7 +2030,7 @@ class PublisherController(BasePublishController): " to your supervisor or OpenPype." ) self.publish_error_msg = msg - self.publish_has_crashed = False + self.publish_has_crashed = True self._publish_next_process() From f9155bd6429933e8df407f9673d603bae5e71e6f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 15:28:45 +0200 Subject: [PATCH 1558/2550] implemented base controller --- openpype/tools/publisher/control.py | 181 ++++++++++++++++++---------- 1 file changed, 115 insertions(+), 66 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 014efd5c01..32a5d62fb5 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -891,10 +891,7 @@ class AbstractPublisherController(object): access objects directly but by using wrappers that can be serialized. """ - _log = None - _event_system = None - - @property + @abstractproperty def log(self): """Controller's logger object. @@ -902,54 +899,13 @@ class AbstractPublisherController(object): logging.Logger: Logger object that can be used for logging. """ - if self._log is None: - self._log = logging.getLogget(self.__class__.__name__) - return self._log + pass - @property + @abstractproperty def event_system(self): - """Inner event system for publisher controller. + """Inner event system for publisher controller.""" - Is used for communication with UI. Event system is autocreated. - - Known topics: - "show.detailed.help" - Detailed help requested (UI related). - "show.card.message" - Show card message request (UI related). - "instances.refresh.finished" - Instances are refreshed. - "plugins.refresh.finished" - Plugins refreshed. - "publish.reset.finished" - Publish context reset finished. - "controller.reset.finished" - Controller reset finished. - "publish.process.started" - Publishing started. Can be started from - paused state. - "publish.process.stopped" - Publishing stopped/paused process. - "publish.process.plugin.changed" - Plugin state has changed. - "publish.process.instance.changed" - Instance state has changed. - "publish.has_validated.changed" - Attr 'publish_has_validated' - changed. - "publish.is_running.changed" - Attr 'publish_is_running' changed. - "publish.has_validated.changed" - Attr 'has_validated' changed. - "publish.has_crashed.changed" - Attr 'publish_has_crashed' changed. - "publish.publish_error.changed" - Attr 'publish_error' - "publish.has_validation_errors.changed" - Attr - 'has_validation_errors' changed. - "publish.max_progress.changed" - Attr 'publish_max_progress' - changed. - "publish.progress.changed" - Attr 'publish_progress' changed. - "publish.host_is_valid.changed" - Attr 'host_is_valid' changed. - "publish.finished.changed" - Attr 'publish_finished' changed. - - Returns: - EventSystem: Event system which can trigger callbacks for topics. - """ - - if self._event_system is None: - self._event_system = EventSystem() - return self._event_system - - def _emit_event(self, topic, data=None): - if data is None: - data = {} - self.event_system.emit(topic, data, "controller") + pass @abstractproperty def project_name(self): @@ -1261,10 +1217,22 @@ class AbstractPublisherController(object): class BasePublishController(AbstractPublisherController): - def __init__(self): - # Controller must implement it's update - self._creator_items = {} + """Implement common logic for controllers. + Implement event system, logger and common attributes. Attributes are + triggering value changes so anyone can listen to their topics. + + Prepare implementation for creator items. Controller must implement just + their filling by '_collect_creator_items'. + + All prepared implementation is based on calling super '__init__'. + """ + + def __init__(self): + self._log = None + self._event_system = None + + # Host is valid for creation self._host_is_valid = False # Any other exception that happened during publishing @@ -1281,17 +1249,65 @@ class BasePublishController(AbstractPublisherController): self._publish_max_progress = 0 self._publish_progress = 0 + # Controller must '_collect_creator_items' to fill the value + self._creator_items = None + @property - def creator_items(self): - """Creators that can be shown in create dialog.""" + def log(self): + """Controller's logger object. - return self._creator_items + Returns: + logging.Logger: Logger object that can be used for logging. + """ - def get_creator_icon(self, identifier): - creator_item = self._creator_items.get(identifier) - if creator_item is not None: - return creator_item.icon - return None + if self._log is None: + self._log = logging.getLogget(self.__class__.__name__) + return self._log + + @property + def event_system(self): + """Inner event system for publisher controller. + + Is used for communication with UI. Event system is autocreated. + + Known topics: + "show.detailed.help" - Detailed help requested (UI related). + "show.card.message" - Show card message request (UI related). + "instances.refresh.finished" - Instances are refreshed. + "plugins.refresh.finished" - Plugins refreshed. + "publish.reset.finished" - Publish context reset finished. + "controller.reset.finished" - Controller reset finished. + "publish.process.started" - Publishing started. Can be started from + paused state. + "publish.process.stopped" - Publishing stopped/paused process. + "publish.process.plugin.changed" - Plugin state has changed. + "publish.process.instance.changed" - Instance state has changed. + "publish.has_validated.changed" - Attr 'publish_has_validated' + changed. + "publish.is_running.changed" - Attr 'publish_is_running' changed. + "publish.has_validated.changed" - Attr 'has_validated' changed. + "publish.has_crashed.changed" - Attr 'publish_has_crashed' changed. + "publish.publish_error.changed" - Attr 'publish_error' + "publish.has_validation_errors.changed" - Attr + 'has_validation_errors' changed. + "publish.max_progress.changed" - Attr 'publish_max_progress' + changed. + "publish.progress.changed" - Attr 'publish_progress' changed. + "publish.host_is_valid.changed" - Attr 'host_is_valid' changed. + "publish.finished.changed" - Attr 'publish_finished' changed. + + Returns: + EventSystem: Event system which can trigger callbacks for topics. + """ + + if self._event_system is None: + self._event_system = EventSystem() + return self._event_system + + def _emit_event(self, topic, data=None): + if data is None: + data = {} + self.event_system.emit(topic, data, "controller") def _get_host_is_valid(self): return self._host_is_valid @@ -1399,6 +1415,9 @@ class BasePublishController(AbstractPublisherController): def _reset_attributes(self): """Reset most of attributes that can be reset.""" + # Reset creator items + self._creator_items = None + self.publish_is_running = False self.publish_has_validated = False self.publish_has_crashed = False @@ -1408,6 +1427,35 @@ class BasePublishController(AbstractPublisherController): self.publish_error_msg = None self.publish_progress = 0 + @property + def creator_items(self): + """Creators that can be shown in create dialog.""" + if self._creator_items is None: + self._creator_items = self._collect_creator_items() + return self._creator_items + + @abstractmethod + def _collect_creator_items(self): + """Receive CreatorItems to work with. + + Returns: + Dict[str, CreatorItem]: Creator items by their identifier. + """ + + pass + + def get_creator_icon(self, identifier): + """Function to receive icon for creator identifier. + + Args: + str: Creator's identifier for which should be icon returned. + """ + + creator_item = self.creator_items.get(identifier) + if creator_item is not None: + return creator_item.icon + return None + class PublisherController(BasePublishController): """Middleware between UI, CreateContext and publish Context. @@ -1598,15 +1646,16 @@ class PublisherController(BasePublishController): self._create_context.reset_plugins() - self._creator_items = { - identifier: CreatorItem.from_creator(creator) - for identifier, creator in self._create_context.creators.items() - } - self._resetting_plugins = False self._emit_event("plugins.refresh.finished") + def _collect_creator_items(self): + return { + identifier: CreatorItem.from_creator(creator) + for identifier, creator in self._create_context.creators.items() + } + def _reset_instances(self): """Reset create instances.""" if self._resetting_instances: @@ -1638,7 +1687,7 @@ class PublisherController(BasePublishController): _attr_defs = {} for instance in instances: creator_identifier = instance.creator_identifier - creator_item = self._creator_items[creator_identifier] + creator_item = self.creator_items[creator_identifier] for attr_def in creator_item.instance_attributes_defs: found_idx = None for idx, _attr_def in _attr_defs.items(): From e02b686afe748a4e67b63650d2fcf9a70a994e75 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 17:07:50 +0200 Subject: [PATCH 1559/2550] still support get current context --- openpype/tools/publisher/control.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 481fb5981b..b4c89f221f 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -11,6 +11,7 @@ from openpype.lib.events import EventSystem from openpype.pipeline import ( PublishValidationError, registered_host, + legacy_io, ) from openpype.pipeline.create import CreateContext @@ -414,6 +415,9 @@ class PublisherController: str: Project name. """ + if not hasattr(self.host, "get_current_context"): + return legacy_io.active_project() + return self.host.get_current_context()["project_name"] @property @@ -424,6 +428,9 @@ class PublisherController: Union[str, None]: Asset name or None if asset is not set. """ + if not hasattr(self.host, "get_current_context"): + return legacy_io.Session["AVALON_ASSET"] + return self.host.get_current_context()["asset_name"] @property @@ -434,6 +441,9 @@ class PublisherController: Union[str, None]: Task name or None if task is not set. """ + if not hasattr(self.host, "get_current_context"): + return legacy_io.Session["AVALON_TASK"] + return self.host.get_current_context()["task_name"] @property From 2baa3a5b5449ece1972a4471ccad8396fd826af0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 17:08:40 +0200 Subject: [PATCH 1560/2550] fix typo --- openpype/tools/publisher/widgets/validations_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 74be672f3b..06ac0bad8a 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -678,7 +678,7 @@ class ValidationsWidget(QtWidgets.QFrame): self._set_errors(validation_errors) return - if self._contoller.publish_has_finished: + if self._controller.publish_has_finished: self._set_current_widget(self._publish_stop_ok_widget) return From 4d40024bdbaf4fbee276e2957d0de534675bb3b2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 18:35:32 +0200 Subject: [PATCH 1561/2550] added double click for asset dialog --- .../tools/publisher/widgets/assets_widget.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/assets_widget.py b/openpype/tools/publisher/widgets/assets_widget.py index 39bf3886ea..996c9029d4 100644 --- a/openpype/tools/publisher/widgets/assets_widget.py +++ b/openpype/tools/publisher/widgets/assets_widget.py @@ -1,6 +1,7 @@ import collections from Qt import QtWidgets, QtCore, QtGui + from openpype.tools.utils import ( PlaceholderLineEdit, RecursiveSortFilterProxyModel, @@ -163,6 +164,16 @@ class AssetsHierarchyModel(QtGui.QStandardItemModel): return item_name in self._items_by_name +class AssetDialogView(QtWidgets.QTreeView): + double_clicked = QtCore.Signal(QtCore.QModelIndex) + + def mouseDoubleClickEvent(self, event): + index = self.indexAt(event.pos()) + if index.isValid(): + self.double_clicked.emit(index) + event.accept() + + class AssetsDialog(QtWidgets.QDialog): """Dialog to select asset for a context of instance.""" @@ -178,7 +189,7 @@ class AssetsDialog(QtWidgets.QDialog): filter_input = PlaceholderLineEdit(self) filter_input.setPlaceholderText("Filter assets..") - asset_view = QtWidgets.QTreeView(self) + asset_view = AssetDialogView(self) asset_view.setModel(proxy_model) asset_view.setHeaderHidden(True) asset_view.setFrameShape(QtWidgets.QFrame.NoFrame) @@ -200,6 +211,7 @@ class AssetsDialog(QtWidgets.QDialog): layout.addWidget(asset_view, 1) layout.addLayout(btns_layout, 0) + asset_view.double_clicked.connect(self._on_ok_clicked) filter_input.textChanged.connect(self._on_filter_change) ok_btn.clicked.connect(self._on_ok_clicked) cancel_btn.clicked.connect(self._on_cancel_clicked) @@ -274,7 +286,7 @@ class AssetsDialog(QtWidgets.QDialog): index = self._asset_view.currentIndex() asset_name = None if index.isValid(): - asset_name = index.data(QtCore.Qt.DisplayRole) + asset_name = index.data(ASSET_NAME_ROLE) self._selected_asset = asset_name self.done(1) From d46ca7ed50b314fa3ae61e106cfd8297b96c630e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 19:23:29 +0200 Subject: [PATCH 1562/2550] cache assets hierarchy and stringify object ids --- openpype/tools/publisher/control.py | 46 +++++++++++++++++++++++------ 1 file changed, 37 insertions(+), 9 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 43721b9229..c0ffa942a4 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -60,15 +60,17 @@ class AssetDocsCache: def __init__(self, controller): self._controller = controller self._asset_docs = None - # TODO use asset ids instead + self._asset_docs_hierarchy = None self._task_names_by_asset_name = {} self._asset_docs_by_name = {} self._full_asset_docs_by_name = {} def reset(self): self._asset_docs = None + self._asset_docs_hierarchy = None self._task_names_by_asset_name = {} self._asset_docs_by_name = {} + self._full_asset_docs_by_name = {} def _query(self): if self._asset_docs is not None: @@ -81,8 +83,13 @@ class AssetDocsCache: asset_docs_by_name = {} task_names_by_asset_name = {} for asset_doc in asset_docs: + if "data" not in asset_doc: + asset_doc["data"] = {"tasks": {}, "visualParent": None} + elif "tasks" not in asset_doc["data"]: + asset_doc["data"]["tasks"] = {} + asset_name = asset_doc["name"] - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + asset_tasks = asset_doc["data"]["tasks"] task_names_by_asset_name[asset_name] = list(asset_tasks.keys()) asset_docs_by_name[asset_name] = asset_doc @@ -94,11 +101,38 @@ class AssetDocsCache: self._query() return copy.deepcopy(self._asset_docs) + def get_asset_hierarchy(self): + """Prepare asset documents into hierarchy. + + Convert ObjectId to string. Asset id is not used during whole + process of publisher but asset name is used rather. + + Returns: + Dict[Union[str, None]: Any]: Mapping of parent id to it's children. + Top level assets have parent id 'None'. + """ + + if self._asset_docs_hierarchy is None: + _queue = collections.deque(self.get_asset_docs()) + + output = collections.defaultdict(list) + while _queue: + asset_doc = _queue.popleft() + asset_doc["_id"] = str(asset_doc["_id"]) + parent_id = asset_doc["data"]["visualParent"] + if parent_id is not None: + parent_id = str(parent_id) + asset_doc["data"]["visualParent"] = parent_id + output[parent_id].append(asset_doc) + self._asset_docs_hierarchy = output + return copy.deepcopy(self._asset_docs_hierarchy) + def get_task_names_by_asset_name(self): self._query() return copy.deepcopy(self._task_names_by_asset_name) def get_asset_by_name(self, asset_name): + self._query() asset_doc = self._asset_docs_by_name.get(asset_name) if asset_doc is None: return None @@ -1588,14 +1622,8 @@ class PublisherController(BasePublishController): def get_asset_hierarchy(self): """Prepare asset documents into hierarchy.""" - _queue = collections.deque(self.get_asset_docs()) - output = collections.defaultdict(list) - while _queue: - asset_doc = _queue.popleft() - parent_id = asset_doc["data"]["visualParent"] - output[parent_id].append(asset_doc) - return output + return self._asset_docs_cache.get_asset_hierarchy() def get_task_names_by_asset_names(self, asset_names): """Prepare task names by asset name.""" From 5f1bfe2790e1864fca60be2895caa54333a7ca09 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 10 Oct 2022 19:24:01 +0200 Subject: [PATCH 1563/2550] use 'get_subset_name' on controller instead of calling directly creator --- openpype/tools/publisher/widgets/widgets.py | 28 +++++---------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 903ce70f01..c6c8ed3c7d 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1060,24 +1060,6 @@ class GlobalAttrsWidget(QtWidgets.QWidget): if self.task_value_widget.has_value_changed(): task_name = self.task_value_widget.get_selected_items()[0] - asset_docs_by_name = {} - asset_names = set() - if asset_name is None: - for instance in self._current_instances: - asset_names.add(instance.get("asset")) - else: - asset_names.add(asset_name) - - for asset_doc in self._controller.get_asset_docs(): - _asset_name = asset_doc["name"] - if _asset_name in asset_names: - asset_names.remove(_asset_name) - asset_docs_by_name[_asset_name] = asset_doc - - if not asset_names: - break - - project_name = self._controller.project_name subset_names = set() invalid_tasks = False for instance in self._current_instances: @@ -1093,11 +1075,13 @@ class GlobalAttrsWidget(QtWidgets.QWidget): if task_name is not None: new_task_name = task_name - asset_doc = asset_docs_by_name[new_asset_name] - try: - new_subset_name = instance.creator.get_subset_name( - new_variant_value, new_task_name, asset_doc, project_name + new_subset_name = self._controller.get_subset_name( + instance.creator_identifier, + new_variant_value, + new_task_name, + new_asset_name, + instance.id ) except TaskNotSetError: invalid_tasks = True From 65c2638f3dd047d5168b256dd848a9a0c89c5ec4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 11 Oct 2022 09:43:18 +0200 Subject: [PATCH 1564/2550] Fusion: Implement backwards compatibility (pre Fusion 17.4) --- openpype/hosts/fusion/api/pipeline.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index b22ee5328f..b6092f7c1b 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -44,11 +44,26 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") class FusionLogHandler(logging.Handler): # Keep a reference to fusion's Print function (Remote Object) - _print = getattr(sys.modules["__main__"], "fusion").Print + _print = None + + @property + def print(self): + if self._print is not None: + # Use cached + return self._print + + _print = getattr(sys.modules["__main__"], "fusion").Print + if _print is None: + # Backwards compatibility: Print method on Fusion instance was + # added around Fusion 17.4 and wasn't available on PyRemote Object + # before + _print = get_current_comp().Print + self._print = _print + return _print def emit(self, record): entry = self.format(record) - self._print(entry) + self.print(entry) def install(): From 2e86d0329357a938d540808db54c3831357fed1c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 10:55:09 +0200 Subject: [PATCH 1565/2550] fix import of PublisherWindow and add ability to pass controller --- openpype/tools/utils/host_tools.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 552ce0d432..eababfee32 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -269,25 +269,25 @@ class HostToolsHelper: dialog.activateWindow() dialog.showNormal() - def get_publisher_tool(self, parent): + def get_publisher_tool(self, parent=None, controller=None): """Create, cache and return publisher window.""" if self._publisher_tool is None: - from openpype.tools.publisher import PublisherWindow + from openpype.tools.publisher.window import PublisherWindow host = registered_host() ILoadHost.validate_load_methods(host) publisher_window = PublisherWindow( - parent=parent or self._parent + controller=controller, parent=parent or self._parent ) self._publisher_tool = publisher_window return self._publisher_tool - def show_publisher_tool(self, parent=None): + def show_publisher_tool(self, parent=None, controller=None): with qt_app_context(): - dialog = self.get_publisher_tool(parent) + dialog = self.get_publisher_tool(controller, parent) dialog.show() dialog.raise_() From 96e3c8c42f220df45140053076fb14359f582cca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 11:22:57 +0200 Subject: [PATCH 1566/2550] hide instance and plugin labels on publish process stop --- .../tools/publisher/widgets/publish_frame.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index 4e5f02f2da..8899faa7b9 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -210,6 +210,8 @@ class PublishFrame(QtWidgets.QWidget): self._shrunken = False self._top_widget_max_height = None self._top_widget_size_policy = top_content_widget.sizePolicy() + self._last_instance_label = None + self._last_plugin_label = None def mouseReleaseEvent(self, event): super(PublishFrame, self).mouseReleaseEvent(event) @@ -298,6 +300,9 @@ class PublishFrame(QtWidgets.QWidget): self._shrunk_main_label.setText(message) def _on_publish_reset(self): + self._last_instance_label = None + self._last_plugin_label = None + self._set_success_property() self._set_progress_visibility(True) @@ -313,6 +318,12 @@ class PublishFrame(QtWidgets.QWidget): self._progress_bar.setMaximum(self.controller.publish_max_progress) def _on_publish_start(self): + if self._last_plugin_label: + self._plugin_label.setText(self._last_plugin_label) + + if self._last_instance_label: + self._instance_label.setText(self._last_instance_label) + self._set_success_property(-1) self._set_progress_visibility(True) self._set_main_label("Publishing...") @@ -328,12 +339,14 @@ class PublishFrame(QtWidgets.QWidget): def _on_instance_change(self, event): """Change instance label when instance is going to be processed.""" + self._last_instance_label = event["instance_label"] self._instance_label.setText(event["instance_label"]) QtWidgets.QApplication.processEvents() def _on_plugin_change(self, event): """Change plugin label when instance is going to be processed.""" + self._last_plugin_label = event["plugin_label"] self._progress_bar.setValue(self.controller.publish_progress) self._plugin_label.setText(event["plugin_label"]) QtWidgets.QApplication.processEvents() @@ -343,6 +356,10 @@ class PublishFrame(QtWidgets.QWidget): self._reset_btn.setEnabled(True) self._stop_btn.setEnabled(False) + + self._instance_label.setText("") + self._plugin_label.setText("") + validate_enabled = not self.controller.publish_has_crashed publish_enabled = not self.controller.publish_has_crashed if validate_enabled: From e9eda8708e501ec6a919d0a44f123a720317ffb7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 11:23:21 +0200 Subject: [PATCH 1567/2550] de-shrunk publish frame on publish start --- openpype/tools/publisher/widgets/publish_frame.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index 8899faa7b9..04e5f85528 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -333,6 +333,8 @@ class PublishFrame(QtWidgets.QWidget): self._validate_btn.setEnabled(False) self._publish_btn.setEnabled(False) + self.set_shrunk_state(False) + def _on_publish_validated(self): self._validate_btn.setEnabled(False) From 4fa8494ae53fee74f6cf270dd79721650dcf9a27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 11:23:27 +0200 Subject: [PATCH 1568/2550] change label message --- openpype/tools/publisher/widgets/validations_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 4fa5ed4902..f35d286e88 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -470,7 +470,7 @@ class ValidationsWidget(QtWidgets.QFrame): ) # After success publishing publish_started_widget = ValidationArtistMessage( - "Publishing run smoothly", self + "Publishing went smoothly", self ) # After success publishing publish_stop_ok_widget = ValidationArtistMessage( From 2b6d6a5151d961a9844ab1c8d848a36edbd292c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 11:23:58 +0200 Subject: [PATCH 1569/2550] smaller font for report messages --- openpype/style/style.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index ac4785e630..740c1520e0 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1005,7 +1005,7 @@ VariantInputsWidget QToolButton { } ValidationArtistMessage QLabel { - font-size: 28pt; + font-size: 20pt; font-weight: bold; } From 8b570a37d548a0e0028863db2dc2d493b8fddf8f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 12:01:29 +0200 Subject: [PATCH 1570/2550] make publish frame borders transparent --- openpype/style/style.css | 1 - .../tools/publisher/widgets/publish_frame.py | 18 ++++++++----- openpype/tools/publisher/window.py | 27 ++++++++++--------- 3 files changed, 27 insertions(+), 19 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 740c1520e0..4d13dc7c89 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -971,7 +971,6 @@ VariantInputsWidget QToolButton { #PublishInfoFrame { background: {color:bg}; - border: 2px solid black; border-radius: 0.3em; } diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index 04e5f85528..ddaac7027d 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -29,7 +29,7 @@ class PublishFrame(QtWidgets.QWidget): +------------------------------------------------------------------------+ | < Main label > | | < Label top > | - | (#### 10% ) | + | (#### 10% ) | | | | | +------------------------------------------------------------------------+ @@ -37,7 +37,7 @@ class PublishFrame(QtWidgets.QWidget): details_page_requested = QtCore.Signal() - def __init__(self, controller, parent): + def __init__(self, controller, borders, parent): super(PublishFrame, self).__init__(parent) # Bottom part of widget where process and callback buttons are showed @@ -137,7 +137,7 @@ class PublishFrame(QtWidgets.QWidget): content_layout.addWidget(footer_widget) main_layout = QtWidgets.QVBoxLayout(self) - main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.setContentsMargins(borders, 0, borders, borders) main_layout.addWidget(content_frame) shrunk_anim = QtCore.QVariantAnimation() @@ -261,7 +261,7 @@ class PublishFrame(QtWidgets.QWidget): diff -= self._content_layout.spacing() window_pos = self.pos() - window_pos_y = self.pos().y() + diff + window_pos_y = window_pos.y() + diff window_height = self.height() - diff self._top_content_widget.setMinimumHeight(value) @@ -286,11 +286,17 @@ class PublishFrame(QtWidgets.QWidget): if self._shrunken: content_frame_hint = self._content_frame.sizeHint() - window_height = content_frame_hint.height() + layout = self.layout() + margins = layout.contentsMargins() + window_height = ( + content_frame_hint.height() + + margins.bottom() + + margins.top() + ) diff = self.height() - window_height window_pos = self.pos() - window_pos_y = self.pos().y() + diff + window_pos_y = window_pos.y() + diff self.resize(self.width(), window_height) self.move(window_pos.x(), window_pos_y) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index aa5f08eed4..4b382c0df5 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -33,6 +33,8 @@ class PublisherWindow(QtWidgets.QDialog): """Main window of publisher.""" default_width = 1300 default_height = 800 + footer_border = 8 + publish_footer_spacer = 2 def __init__(self, parent=None, reset_on_show=None): super(PublisherWindow, self).__init__(parent) @@ -126,22 +128,23 @@ class PublisherWindow(QtWidgets.QDialog): footer_bottom_layout.addWidget(validate_btn, 0) footer_bottom_layout.addWidget(publish_btn, 0) - footer_layout = QtWidgets.QVBoxLayout(footer_widget) - footer_margins = footer_layout.contentsMargins() - border = 2 - footer_layout.setContentsMargins( - footer_margins.left() + border, - footer_margins.top(), - footer_margins.right() + border, - footer_margins.bottom() + border - ) # Spacer helps keep distance of Publish Frame when comment input # is hidden - so when is shrunken it is not overlaying pages footer_spacer = QtWidgets.QWidget(footer_widget) - footer_spacer.setMinimumHeight(border) - footer_spacer.setMaximumHeight(border) + footer_spacer.setMinimumHeight(self.publish_footer_spacer) + footer_spacer.setMaximumHeight(self.publish_footer_spacer) footer_spacer.setVisible(False) + footer_layout = QtWidgets.QVBoxLayout(footer_widget) + footer_margins = footer_layout.contentsMargins() + + footer_layout.setContentsMargins( + footer_margins.left() + self.footer_border, + footer_margins.top(), + footer_margins.right() + self.footer_border, + footer_margins.bottom() + self.footer_border + ) + footer_layout.addWidget(comment_input, 0) footer_layout.addWidget(footer_spacer, 0) footer_layout.addWidget(footer_bottom_widget, 0) @@ -216,7 +219,7 @@ class PublisherWindow(QtWidgets.QDialog): main_layout.addWidget(under_publish_stack, 1) # Floating publish frame - publish_frame = PublishFrame(controller, self) + publish_frame = PublishFrame(controller, self.footer_border, self) help_btn.clicked.connect(self._on_help_click) tabs_widget.tab_changed.connect(self._on_tab_change) From 0c86f321b4bd1365e8453327b4a7e560718642e2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Oct 2022 12:58:47 +0200 Subject: [PATCH 1571/2550] settings: return back __legacy__ fallback --- openpype/settings/lib.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 3112400dbf..5eaddf6e6e 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -316,6 +316,22 @@ def _system_settings_backwards_compatible_conversion(studio_overrides): } +def _project_anatomy_backwards_compatible_conversion(project_anatomy): + # Backwards compatibility of node settings in Nuke 3.9.x - 3.10.0 + # - source PR - https://github.com/pypeclub/OpenPype/pull/3143 + value = project_anatomy + for key in ("imageio", "nuke", "nodes", "requiredNodes"): + if key not in value: + return + value = value[key] + + for item in value: + for node in item.get("knobs") or []: + if "type" in node: + break + node["type"] = "__legacy__" + + @require_handler def get_studio_system_settings_overrides(return_version=False): output = _SETTINGS_HANDLER.get_studio_system_settings_overrides( @@ -352,6 +368,7 @@ def get_project_settings_overrides(project_name, return_version=False): @require_handler def get_project_anatomy_overrides(project_name): output = _SETTINGS_HANDLER.get_project_anatomy_overrides(project_name) + _project_anatomy_backwards_compatible_conversion(output) return output From bbf3a8baae3c075133681668dd0c89f7ccdbb6ed Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Oct 2022 12:59:41 +0200 Subject: [PATCH 1572/2550] nuke: imageio from project settings with backward compatibility switch --- openpype/hosts/nuke/api/lib.py | 8 ++++++++ openpype/settings/defaults/project_settings/nuke.json | 1 + .../schemas/projects_schema/schema_project_nuke.json | 6 ++++++ 3 files changed, 15 insertions(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 6297da884c..1aea04d889 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -563,6 +563,14 @@ def get_node_path(path, padding=4): def get_nuke_imageio_settings(): + project_imageio = get_project_settings( + Context.project_name)["nuke"]["imageio"] + + # backward compatibility for project started before 3.10 + # those are still having `__legacy__` knob types + if not project_imageio["enabled"]: + return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] + return get_project_settings(Context.project_name)["nuke"]["imageio"] diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index e0feb06eb6..e5cbacbda7 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -9,6 +9,7 @@ } }, "imageio": { + "enabled": false, "viewer": { "viewerProcess": "sRGB" }, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index ff341fb919..e23e2b3bec 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -50,9 +50,15 @@ "key": "imageio", "type": "dict", "label": "Color Management (ImageIO)", + "checkbox_key": "enabled", "collapsible": true, "is_group": true, "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, { "key": "viewer", "type": "dict", From be8b2c5faa72f999848214ca2ce1e9e41576ea16 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Oct 2022 13:04:06 +0200 Subject: [PATCH 1573/2550] nuke: settings imageio separate schema --- .../projects_schema/schema_project_nuke.json | 254 +----------------- .../schemas/schema_nuke_imageio.json | 254 ++++++++++++++++++ 2 files changed, 256 insertions(+), 252 deletions(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_imageio.json diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index e23e2b3bec..154eca254b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -47,258 +47,8 @@ ] }, { - "key": "imageio", - "type": "dict", - "label": "Color Management (ImageIO)", - "checkbox_key": "enabled", - "collapsible": true, - "is_group": true, - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "key": "viewer", - "type": "dict", - "label": "Viewer", - "collapsible": false, - "children": [ - { - "type": "text", - "key": "viewerProcess", - "label": "Viewer Process" - } - ] - }, - { - "key": "baking", - "type": "dict", - "label": "Extract-review baking profile", - "collapsible": false, - "children": [ - { - "type": "text", - "key": "viewerProcess", - "label": "Viewer Process" - } - ] - }, - { - "key": "workfile", - "type": "dict", - "label": "Workfile", - "collapsible": false, - "children": [ - { - "type": "form", - "children": [ - { - "type": "enum", - "key": "colorManagement", - "label": "color management", - "enum_items": [ - { - "Nuke": "Nuke" - }, - { - "OCIO": "OCIO" - } - ] - }, - { - "type": "enum", - "key": "OCIO_config", - "label": "OpenColorIO Config", - "enum_items": [ - { - "nuke-default": "nuke-default" - }, - { - "spi-vfx": "spi-vfx" - }, - { - "spi-anim": "spi-anim" - }, - { - "aces_0.1.1": "aces_0.1.1" - }, - { - "aces_0.7.1": "aces_0.7.1" - }, - { - "aces_1.0.1": "aces_1.0.1" - }, - { - "aces_1.0.3": "aces_1.0.3" - }, - { - "aces_1.1": "aces_1.1" - }, - { - "aces_1.2": "aces_1.2" - }, - { - "custom": "custom" - } - ] - }, - { - "type": "path", - "key": "customOCIOConfigPath", - "label": "Custom OCIO config path", - "multiplatform": true, - "multipath": true - }, - { - "type": "text", - "key": "workingSpaceLUT", - "label": "Working Space" - }, - { - "type": "text", - "key": "monitorLut", - "label": "monitor" - }, - { - "type": "text", - "key": "int8Lut", - "label": "8-bit files" - }, - { - "type": "text", - "key": "int16Lut", - "label": "16-bit files" - }, - { - "type": "text", - "key": "logLut", - "label": "log files" - }, - { - "type": "text", - "key": "floatLut", - "label": "float files" - } - ] - } - ] - }, - { - "key": "nodes", - "type": "dict", - "label": "Nodes", - "collapsible": true, - "children": [ - { - "key": "requiredNodes", - "type": "list", - "label": "Plugin required", - "object_type": { - "type": "dict", - "children": [ - { - "type": "list", - "key": "plugins", - "label": "Used in plugins", - "object_type": { - "type": "text", - "key": "pluginClass" - } - }, - { - "type": "text", - "key": "nukeNodeClass", - "label": "Nuke Node Class" - }, - { - "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Knobs", - "key": "knobs" - } - ] - } - - ] - } - }, - { - "type": "splitter" - }, - { - "type": "list", - "key": "overrideNodes", - "label": "Plugin's node overrides", - "object_type": { - "type": "dict", - "children": [ - { - "type": "list", - "key": "plugins", - "label": "Used in plugins", - "object_type": { - "type": "text", - "key": "pluginClass" - } - }, - { - "type": "text", - "key": "nukeNodeClass", - "label": "Nuke Node Class" - }, - { - "key": "subsets", - "label": "Subsets", - "type": "list", - "object_type": "text" - }, - { - "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Knobs overrides", - "key": "knobs" - } - ] - } - ] - } - } - ] - }, - { - "key": "regexInputs", - "type": "dict", - "label": "Colorspace on Inputs by regex detection", - "collapsible": true, - "children": [ - { - "type": "list", - "key": "inputs", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "regex", - "label": "Regex" - }, - { - "type": "text", - "key": "colorspace", - "label": "Colorspace" - } - ] - } - } - ] - } - ] + "type": "schema", + "name": "schema_nuke_imageio" }, { "type": "dict", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_imageio.json new file mode 100644 index 0000000000..52db853ef6 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_imageio.json @@ -0,0 +1,254 @@ +{ + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "checkbox_key": "enabled", + "collapsible": true, + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "viewer", + "type": "dict", + "label": "Viewer", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "baking", + "type": "dict", + "label": "Extract-review baking profile", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, + { + "key": "workfile", + "type": "dict", + "label": "Workfile", + "collapsible": false, + "children": [ + { + "type": "form", + "children": [ + { + "type": "enum", + "key": "colorManagement", + "label": "color management", + "enum_items": [ + { + "Nuke": "Nuke" + }, + { + "OCIO": "OCIO" + } + ] + }, + { + "type": "enum", + "key": "OCIO_config", + "label": "OpenColorIO Config", + "enum_items": [ + { + "nuke-default": "nuke-default" + }, + { + "spi-vfx": "spi-vfx" + }, + { + "spi-anim": "spi-anim" + }, + { + "aces_0.1.1": "aces_0.1.1" + }, + { + "aces_0.7.1": "aces_0.7.1" + }, + { + "aces_1.0.1": "aces_1.0.1" + }, + { + "aces_1.0.3": "aces_1.0.3" + }, + { + "aces_1.1": "aces_1.1" + }, + { + "aces_1.2": "aces_1.2" + }, + { + "custom": "custom" + } + ] + }, + { + "type": "path", + "key": "customOCIOConfigPath", + "label": "Custom OCIO config path", + "multiplatform": true, + "multipath": true + }, + { + "type": "text", + "key": "workingSpaceLUT", + "label": "Working Space" + }, + { + "type": "text", + "key": "monitorLut", + "label": "monitor" + }, + { + "type": "text", + "key": "int8Lut", + "label": "8-bit files" + }, + { + "type": "text", + "key": "int16Lut", + "label": "16-bit files" + }, + { + "type": "text", + "key": "logLut", + "label": "log files" + }, + { + "type": "text", + "key": "floatLut", + "label": "float files" + } + ] + } + ] + }, + { + "key": "nodes", + "type": "dict", + "label": "Nodes", + "collapsible": true, + "children": [ + { + "key": "requiredNodes", + "type": "list", + "label": "Plugin required", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs", + "key": "knobs" + } + ] + } + + ] + } + }, + { + "type": "splitter" + }, + { + "type": "list", + "key": "overrideNodes", + "label": "Plugin's node overrides", + "object_type": { + "type": "dict", + "children": [ + { + "type": "list", + "key": "plugins", + "label": "Used in plugins", + "object_type": { + "type": "text", + "key": "pluginClass" + } + }, + { + "type": "text", + "key": "nukeNodeClass", + "label": "Nuke Node Class" + }, + { + "key": "subsets", + "label": "Subsets", + "type": "list", + "object_type": "text" + }, + { + "type": "schema_template", + "name": "template_nuke_knob_inputs", + "template_data": [ + { + "label": "Knobs overrides", + "key": "knobs" + } + ] + } + ] + } + } + ] + }, + { + "key": "regexInputs", + "type": "dict", + "label": "Colorspace on Inputs by regex detection", + "collapsible": true, + "children": [ + { + "type": "list", + "key": "inputs", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "regex", + "label": "Regex" + }, + { + "type": "text", + "key": "colorspace", + "label": "Colorspace" + } + ] + } + } + ] + } + ] +} \ No newline at end of file From a82968a32b8688bd32fcf9a6031deea8e6d8c424 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Oct 2022 13:37:11 +0200 Subject: [PATCH 1574/2550] nuke: backward compatible knob values fix --- .../nuke/plugins/publish/validate_write_nodes.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 26a563b13b..3e2881f298 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -77,11 +77,14 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): # fix type differences if type(node_value) in (int, float): - if isinstance(value, list): - value = color_gui_to_int(value) - else: - value = float(value) - node_value = float(node_value) + try: + if isinstance(value, list): + value = color_gui_to_int(value) + else: + value = float(value) + node_value = float(node_value) + except ValueError: + value = str(value) else: value = str(value) node_value = str(node_value) From 67f4112256d6ac3b6c5812f9a700e61e1d539c03 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 14:06:51 +0200 Subject: [PATCH 1575/2550] removed duplicated topic from docstring --- openpype/tools/publisher/control.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index c0ffa942a4..11006dbc08 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1320,7 +1320,6 @@ class BasePublishController(AbstractPublisherController): "publish.has_validated.changed" - Attr 'publish_has_validated' changed. "publish.is_running.changed" - Attr 'publish_is_running' changed. - "publish.has_validated.changed" - Attr 'has_validated' changed. "publish.has_crashed.changed" - Attr 'publish_has_crashed' changed. "publish.publish_error.changed" - Attr 'publish_error' "publish.has_validation_errors.changed" - Attr From e883f8743b179f8fadcc29b9ce9ffb25d6e43060 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 14:13:39 +0200 Subject: [PATCH 1576/2550] renamed 'BasePublishController' to 'BasePublisherController' --- openpype/tools/publisher/control.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 11006dbc08..05b0bb39be 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1251,7 +1251,7 @@ class AbstractPublisherController(object): pass -class BasePublishController(AbstractPublisherController): +class BasePublisherController(AbstractPublisherController): """Implement common logic for controllers. Implement event system, logger and common attributes. Attributes are @@ -1491,7 +1491,7 @@ class BasePublishController(AbstractPublisherController): return None -class PublisherController(BasePublishController): +class PublisherController(BasePublisherController): """Middleware between UI, CreateContext and publish Context. Handle both creation and publishing parts. From 75769804e974a4a33e83321b5e4eb0791d74281e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 14:13:52 +0200 Subject: [PATCH 1577/2550] use 'BasePublisherController' for 'QtRemotePublishController' --- openpype/tools/publisher/control_qt.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 10f576a3f3..006303ec6c 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -6,7 +6,11 @@ from Qt import QtCore from openpype.lib.events import Event from openpype.pipeline.create import CreatedInstance -from .control import MainThreadItem, PublisherController +from .control import ( + MainThreadItem, + PublisherController, + BasePublisherController, +) class MainThreadProcess(QtCore.QObject): @@ -92,7 +96,7 @@ class QtPublisherController(PublisherController): self._main_thread_processor.stop() -class QtRemotePublishController(PublisherController): +class QtRemotePublishController(BasePublisherController): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) From 18ebea7eb8825607cd13ea7d6adb23b6d486ba85 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 14:14:03 +0200 Subject: [PATCH 1578/2550] handle 'host_is_valid' attribute change --- openpype/tools/publisher/control_qt.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 006303ec6c..51aeec65d1 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -167,6 +167,10 @@ class QtRemotePublishController(BasePublisherController): self.publish_finished = event["value"] return + if event.topic == "publish.host_is_valid.changed": + self.host_is_valid = event["value"] + return + # Topics that can be just passed by because are not affecting # controller itself # - "show.card.message" @@ -174,6 +178,7 @@ class QtRemotePublishController(BasePublisherController): # - "publish.reset.finished" # - "instances.refresh.finished" # - "plugins.refresh.finished" + # - "controller.reset.finished" # - "publish.process.started" # - "publish.process.stopped" # - "publish.process.plugin.changed" From 845ad59d6cb6a06ca55a3989bd19a82f12d4ba55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Tue, 11 Oct 2022 14:39:53 +0200 Subject: [PATCH 1579/2550] Update openpype/hosts/maya/api/lib.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/maya/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 789dec31fa..292b95da84 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -23,7 +23,7 @@ from openpype.client import ( get_last_versions, get_representation_by_name ) -from openpype.api import get_project_settings +from openpype.settings import get_project_settings from openpype.pipeline import ( legacy_io, discover_loader_plugins, From ee8599944c229d6d83e46b9e30f7de100b17f181 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 11 Oct 2022 15:27:22 +0200 Subject: [PATCH 1580/2550] OP-4218 - added configuration to keep version synched Customer might want to keep all published items to use same version number. This is calculated as max published version of all subsets that are being published. --- .../defaults/project_settings/webpublisher.json | 1 + .../schema_project_webpublisher.json | 13 +++++++++++++ 2 files changed, 14 insertions(+) diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index cba472514e..09c7d3ec94 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -10,6 +10,7 @@ ], "publish": { "CollectPublishedFiles": { + "sync_next_version": false, "task_type_to_family": { "Animation": [ { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index 2ef7a05b21..e93e85ef19 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -49,6 +49,19 @@ "key": "CollectPublishedFiles", "label": "Collect Published Files", "children": [ + { + "type": "label", + "label": "Select if all versions of published items should be kept same. (As max(published) + 1.)" + }, + { + "type": "boolean", + "key": "sync_next_version", + "label": "Sync next publish version" + }, + { + "type": "label", + "label": "Configure resulting family and tags on representation based on uploaded file and task.
    Eg. '.png' is uploaded >> create instance of 'render' family
    'Create review' in Tags >> mark representation to create review from." + }, { "type": "dict-modifiable", "collapsible": true, From 9f7e241474aa9ec824c53a1cb0dfa143b6084e5a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 11 Oct 2022 15:30:33 +0200 Subject: [PATCH 1581/2550] OP-4218 - refactor - moved separator in UI Now it makes more sense, if sequence of .png is published >> create instance of 'render' family, add 'review' tag and additional families. --- .../projects_schema/schema_project_webpublisher.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index e93e85ef19..a81a403bcb 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -87,6 +87,9 @@ "label": "Extensions", "object_type": "text" }, + { + "type": "separator" + }, { "type": "list", "key": "families", @@ -97,9 +100,6 @@ "type": "schema", "name": "schema_representation_tags" }, - { - "type": "separator" - }, { "type": "text", "key": "result_family", From 06909bc45215203bc256a64b711f17dfeda2325b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 11 Oct 2022 15:31:33 +0200 Subject: [PATCH 1582/2550] OP-4218 - added variable to control versions Use same version number for all published intances if enabled. --- .../plugins/publish/collect_published_files.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 278a102f9d..d61ae4b17e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -46,6 +46,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): # from Settings task_type_to_family = [] + sync_next_version = False # find max version to be published, use for all def process(self, context): batch_dir = context.data["batchDir"] @@ -64,6 +65,9 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_type = context.data["taskType"] project_name = context.data["project_name"] variant = context.data["variant"] + + next_versions = [] + instances = [] for task_dir in task_subfolders: task_data = parse_json(os.path.join(task_dir, "manifest.json")) @@ -87,16 +91,15 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): host_name="webpublisher", project_settings=context.data["project_settings"] ) - version = self._get_next_version( + next_versions.append(self._get_next_version( project_name, asset_doc, subset_name - ) + )) instance = context.create_instance(subset_name) instance.data["asset"] = asset_name instance.data["subset"] = subset_name instance.data["family"] = family instance.data["families"] = families - instance.data["version"] = version instance.data["stagingDir"] = tempfile.mkdtemp() instance.data["source"] = "webpublisher" @@ -137,8 +140,16 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["handleStart"] = asset_doc["data"]["handleStart"] instance.data["handleEnd"] = asset_doc["data"]["handleEnd"] + instances.append(instance) self.log.info("instance.data:: {}".format(instance.data)) + if not self.sync_version: + return + + max_next_version = max(next_versions) + for inst in instances: + inst.data["version"] = max_next_version + def _get_subset_name(self, family, subset_template, task_name, variant): fill_pairs = { "variant": variant, From 4214aa1403452f89f3d19541b28a54d35e97bfed Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 11 Oct 2022 15:35:41 +0200 Subject: [PATCH 1583/2550] OP-4218 - fix version logic Added documentation. --- .../plugins/publish/collect_published_files.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index d61ae4b17e..85e0469801 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -91,15 +91,19 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): host_name="webpublisher", project_settings=context.data["project_settings"] ) - next_versions.append(self._get_next_version( + version = self._get_next_version( project_name, asset_doc, subset_name - )) + ) + next_versions.append(version) instance = context.create_instance(subset_name) instance.data["asset"] = asset_name instance.data["subset"] = subset_name + # set configurable result family instance.data["family"] = family + # set configurable additional families instance.data["families"] = families + instance.data["version"] = version instance.data["stagingDir"] = tempfile.mkdtemp() instance.data["source"] = "webpublisher" @@ -146,6 +150,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): if not self.sync_version: return + # overwrite specific version with same version for all max_next_version = max(next_versions) for inst in instances: inst.data["version"] = max_next_version @@ -187,7 +192,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "ext": ext[1:], "files": files, "stagingDir": task_dir, - "tags": tags + "tags": tags # configurable tags from Settings } self.log.info("sequences repre_data.data:: {}".format(repre_data)) return [repre_data] From b3e9d6a6b66f8b23e8d6fdc035efdbc8a136ef88 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 11 Oct 2022 15:57:34 +0200 Subject: [PATCH 1584/2550] OP-4218 - fix variable --- .../webpublisher/plugins/publish/collect_published_files.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 85e0469801..9d1c702860 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -147,13 +147,14 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instances.append(instance) self.log.info("instance.data:: {}".format(instance.data)) - if not self.sync_version: + if not self.sync_next_version: return # overwrite specific version with same version for all max_next_version = max(next_versions) for inst in instances: inst.data["version"] = max_next_version + self.log.debug("overwritten version:: {}".format(max_next_version)) def _get_subset_name(self, family, subset_template, task_name, variant): fill_pairs = { From 994edef5883b5dec83794728ffee8c38bdcfa9fc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 11 Oct 2022 16:04:30 +0200 Subject: [PATCH 1585/2550] OP-4218 - added docstring --- .../plugins/publish/collect_published_files.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 9d1c702860..dd4646f356 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -37,6 +37,15 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): This is not applicable for 'studio' processing where host application is called to process uploaded workfile and render frames itself. + + For each task configure what properties should resulting instance have + based on uploaded files: + - uploading sequence of 'png' >> create instance of 'render' family, + by adding 'review' to 'Families' and 'Create review' to Tags it will + produce review. + + There might be difference between single(>>image) and sequence(>>render) + uploaded files. """ # must be really early, context values are only in json file order = pyblish.api.CollectorOrder - 0.490 From f823ecfc05de17587cff0830c0e9b97c3a28d2dd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 11 Oct 2022 16:57:02 +0200 Subject: [PATCH 1586/2550] Refactor container `schema` value `avalon-core:container-2.0` -> `openpype:container-2.0` - These were the only remainders using the old schema value --- openpype/hosts/houdini/plugins/load/load_image.py | 2 +- openpype/hosts/houdini/plugins/load/load_usd_layer.py | 2 +- openpype/hosts/houdini/plugins/load/load_usd_reference.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py index 928c2ee734..c78798e58a 100644 --- a/openpype/hosts/houdini/plugins/load/load_image.py +++ b/openpype/hosts/houdini/plugins/load/load_image.py @@ -73,7 +73,7 @@ class ImageLoader(load.LoaderPlugin): # Imprint it manually data = { - "schema": "avalon-core:container-2.0", + "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, diff --git a/openpype/hosts/houdini/plugins/load/load_usd_layer.py b/openpype/hosts/houdini/plugins/load/load_usd_layer.py index 48580fc3aa..2e5079925b 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_layer.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_layer.py @@ -43,7 +43,7 @@ class USDSublayerLoader(load.LoaderPlugin): # Imprint it manually data = { - "schema": "avalon-core:container-2.0", + "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, diff --git a/openpype/hosts/houdini/plugins/load/load_usd_reference.py b/openpype/hosts/houdini/plugins/load/load_usd_reference.py index 6851c77e6d..c4371db39b 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_reference.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_reference.py @@ -43,7 +43,7 @@ class USDReferenceLoader(load.LoaderPlugin): # Imprint it manually data = { - "schema": "avalon-core:container-2.0", + "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, From e5f6bca5d540bb809a61cec550fe9be0c83c1143 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Oct 2022 17:32:13 +0200 Subject: [PATCH 1587/2550] flame: removing redundant validation --- openpype/hosts/flame/hooks/pre_flame_setup.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index 8f2edf59a6..f0fdaa86ba 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -44,13 +44,6 @@ class FlamePrelaunch(PreLaunchHook): # get image io project_settings = self.data["project_settings"] - # make sure anatomy settings are having flame key - if not project_settings["flame"].get("imageio"): - raise ApplicationLaunchFailed( - "Project settings are missing `flame/imageio` key. " - "Please make sure to update project settings." - ) - imageio_flame = project_settings["flame"]["imageio"] # get user name and host name From 08442d6b071911e54e2962e9456d67e47069b978 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 17:38:25 +0200 Subject: [PATCH 1588/2550] use direct import of resources --- openpype/hosts/maya/api/customize.py | 2 +- openpype/hosts/nuke/api/utils.py | 2 +- openpype/tools/launcher/actions.py | 2 +- openpype/tools/launcher/lib.py | 2 +- openpype/tools/launcher/window.py | 2 +- openpype/tools/standalonepublish/app.py | 2 +- openpype/tools/tray/pype_info_widget.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/api/customize.py b/openpype/hosts/maya/api/customize.py index 683e6b24b0..f66858dfb6 100644 --- a/openpype/hosts/maya/api/customize.py +++ b/openpype/hosts/maya/api/customize.py @@ -8,7 +8,7 @@ from functools import partial import maya.cmds as cmds import maya.mel as mel -from openpype.api import resources +from openpype import resources from openpype.tools.utils import host_tools from .lib import get_main_window diff --git a/openpype/hosts/nuke/api/utils.py b/openpype/hosts/nuke/api/utils.py index 5b0c607292..6bcb752dd1 100644 --- a/openpype/hosts/nuke/api/utils.py +++ b/openpype/hosts/nuke/api/utils.py @@ -1,7 +1,7 @@ import os import nuke -from openpype.api import resources +from openpype import resources from .lib import maintained_selection diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py index b954110da4..34d06f72cc 100644 --- a/openpype/tools/launcher/actions.py +++ b/openpype/tools/launcher/actions.py @@ -4,7 +4,7 @@ from Qt import QtWidgets, QtGui from openpype import PLUGINS_DIR from openpype import style -from openpype.api import resources +from openpype import resources from openpype.lib import ( Logger, ApplictionExecutableNotFound, diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index c1392b7b8f..68e57c6b92 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -1,7 +1,7 @@ import os from Qt import QtGui import qtawesome -from openpype.api import resources +from openpype import resources ICON_CACHE = {} NOT_FOUND = type("NotFound", (object, ), {}) diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index dab6949613..a9eaa932bb 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -4,7 +4,7 @@ import logging from Qt import QtWidgets, QtCore, QtGui from openpype import style -from openpype.api import resources +from openpype import resources from openpype.pipeline import AvalonMongoDB import qtawesome diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index 081235c91c..c93c33b2a5 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -13,7 +13,7 @@ from .widgets import ( ) from .widgets.constants import HOST_NAME from openpype import style -from openpype.api import resources +from openpype import resources from openpype.pipeline import AvalonMongoDB from openpype.modules import ModulesManager diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py index 8414cefec8..232d2024ac 100644 --- a/openpype/tools/tray/pype_info_widget.py +++ b/openpype/tools/tray/pype_info_widget.py @@ -5,7 +5,7 @@ import collections from Qt import QtCore, QtGui, QtWidgets from openpype import style -from openpype.api import resources +from openpype import resources from openpype.settings.lib import get_local_settings from openpype.lib.pype_info import ( get_all_current_info, From bca0c2c8103d1f81d7b2f9cee860f665d04bb3b8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 11 Oct 2022 17:39:04 +0200 Subject: [PATCH 1589/2550] add preffered qt bindings to Qt.py and qtpy --- openpype/hosts/maya/addon.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/addon.py b/openpype/hosts/maya/addon.py index 7b1f7bf754..16d8ae5cd6 100644 --- a/openpype/hosts/maya/addon.py +++ b/openpype/hosts/maya/addon.py @@ -30,7 +30,11 @@ class MayaAddon(OpenPypeModule, IHostAddon): # Set default values if are not already set via settings defaults = { - "OPENPYPE_LOG_NO_COLORS": "Yes" + "OPENPYPE_LOG_NO_COLORS": "Yes", + # For python module 'qtpy' + "QT_API": "PySide2", + # For python module 'Qt' + "QT_PREFERRED_BINDING": "PySide2" } for key, value in defaults.items(): if not env.get(key): From 3626d7572b2de2fca3f7e6e3fc3f3e2a4fb332f1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Oct 2022 17:47:49 +0200 Subject: [PATCH 1590/2550] fusion: removing redundant validation --- openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py index 83cd070924..d1ae5f64fd 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_ocio_hook.py @@ -15,12 +15,7 @@ class FusionPreLaunchOCIO(PreLaunchHook): project_settings = self.data["project_settings"] # make sure anatomy settings are having flame key - imageio_fusion = project_settings.get("fusion", {}).get("imageio") - if not imageio_fusion: - raise ApplicationLaunchFailed( - "Project settings are missing `fusion/imageio` key. " - "Please make sure you update your project settings. " - ) + imageio_fusion = project_settings["fusion"]["imageio"] ocio = imageio_fusion.get("ocio") enabled = ocio.get("enabled", False) From 9946ca351c7a57985a2348400080cdbb31c71647 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 11 Oct 2022 18:38:43 +0200 Subject: [PATCH 1591/2550] Added photoshop and aftereffects into ValidateVersion --- openpype/plugins/publish/validate_version.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/validate_version.py b/openpype/plugins/publish/validate_version.py index b94152ef2d..b91633430f 100644 --- a/openpype/plugins/publish/validate_version.py +++ b/openpype/plugins/publish/validate_version.py @@ -10,7 +10,8 @@ class ValidateVersion(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder label = "Validate Version" - hosts = ["nuke", "maya", "houdini", "blender", "standalonepublisher"] + hosts = ["nuke", "maya", "houdini", "blender", "standalonepublisher", + "photoshop", "aftereffects"] optional = False active = True From b527e38eb352be05d2e4231281e5e9d108858eb5 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 12 Oct 2022 04:16:33 +0000 Subject: [PATCH 1592/2550] [Automated] Bump version --- CHANGELOG.md | 24 +++++++++++------------- openpype/version.py | 2 +- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 455c7aa900..dca0e7ecef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,29 +1,38 @@ # Changelog -## [3.14.4-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.4-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...HEAD) **🚀 Enhancements** - General: Set root environments before DCC launch [\#3947](https://github.com/pypeclub/OpenPype/pull/3947) +- Refactor: changed legacy way to update database for Hero version integrate [\#3941](https://github.com/pypeclub/OpenPype/pull/3941) - Maya: Moved plugin from global to maya [\#3939](https://github.com/pypeclub/OpenPype/pull/3939) +- Fusion: Implement Alembic and FBX mesh loader [\#3927](https://github.com/pypeclub/OpenPype/pull/3927) - Publisher: Instances can be marked as stored [\#3846](https://github.com/pypeclub/OpenPype/pull/3846) **🐛 Bug fixes** +- Maya: Deadline OutputFilePath hack regression for Renderman [\#3950](https://github.com/pypeclub/OpenPype/pull/3950) +- Houdini: Fix validate workfile paths for non-parm file references [\#3948](https://github.com/pypeclub/OpenPype/pull/3948) - Photoshop: missed sync published version of workfile with workfile [\#3946](https://github.com/pypeclub/OpenPype/pull/3946) - Maya: fix regression of Renderman Deadline hack [\#3943](https://github.com/pypeclub/OpenPype/pull/3943) +- Tray: Change order of attribute changes [\#3938](https://github.com/pypeclub/OpenPype/pull/3938) - AttributeDefs: Fix crashing multivalue of files widget [\#3937](https://github.com/pypeclub/OpenPype/pull/3937) +- General: Fix links query on hero version [\#3900](https://github.com/pypeclub/OpenPype/pull/3900) - Publisher: Files Drag n Drop cleanup [\#3888](https://github.com/pypeclub/OpenPype/pull/3888) - Maya: Render settings validation attribute check tweak logging [\#3821](https://github.com/pypeclub/OpenPype/pull/3821) **🔀 Refactored code** +- General: Direct settings imports [\#3934](https://github.com/pypeclub/OpenPype/pull/3934) - General: import 'Logger' from 'openpype.lib' [\#3926](https://github.com/pypeclub/OpenPype/pull/3926) **Merged pull requests:** +- Maya + Yeti: Load Yeti Cache fix frame number recognition [\#3942](https://github.com/pypeclub/OpenPype/pull/3942) +- Fusion: Implement callbacks to Fusion's event system thread [\#3928](https://github.com/pypeclub/OpenPype/pull/3928) - Photoshop: create single frame image in Ftrack as review [\#3908](https://github.com/pypeclub/OpenPype/pull/3908) - Maya: Warn correctly about nodes in render instance with unexpected names [\#3816](https://github.com/pypeclub/OpenPype/pull/3816) @@ -68,6 +77,7 @@ - Unreal: Use new Extractor location [\#3917](https://github.com/pypeclub/OpenPype/pull/3917) - Flame: Use new Extractor location [\#3916](https://github.com/pypeclub/OpenPype/pull/3916) - Houdini: Use new Extractor location [\#3894](https://github.com/pypeclub/OpenPype/pull/3894) +- Harmony: Use new Extractor location [\#3893](https://github.com/pypeclub/OpenPype/pull/3893) - Hiero: Use new Extractor location [\#3851](https://github.com/pypeclub/OpenPype/pull/3851) - Maya: Remove old legacy \(ftrack\) plug-ins that are of no use anymore [\#3819](https://github.com/pypeclub/OpenPype/pull/3819) - Nuke: Use new Extractor location [\#3799](https://github.com/pypeclub/OpenPype/pull/3799) @@ -97,18 +107,6 @@ - Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) - Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) - Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) -- nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) -- Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) - -**🔀 Refactored code** - -- Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) -- Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) -- AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) - -**Merged pull requests:** - -- Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) ## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) diff --git a/openpype/version.py b/openpype/version.py index 1bd566aa9b..3a0c538daf 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.4-nightly.2" +__version__ = "3.14.4-nightly.3" From c369b6e493f0401ae8fb7df10b31a7a739667136 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 12 Oct 2022 15:18:26 +0800 Subject: [PATCH 1593/2550] delete unneccessary schema and data of Create Rig --- .../defaults/project_settings/maya.json | 2 -- .../schemas/schema_maya_create.json | 34 +++---------------- 2 files changed, 4 insertions(+), 32 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index b4164c63f0..3e3a61fc8f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -187,8 +187,6 @@ }, "CreateRig": { "enabled": true, - "write_color_sets": false, - "write_face_sets": false, "defaults": [ "Main", "Sim", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 7e12897336..bc6520474d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -170,36 +170,6 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CreateRig", - "label": "Create Rig", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "boolean", - "key": "write_color_sets", - "label": "Write Color Sets" - }, - { - "type": "boolean", - "key": "write_face_sets", - "label": "Write Face Sets" - }, - { - "type": "list", - "key": "defaults", - "label": "Default Subsets", - "object_type": "text" - } - ] - }, { "type": "dict", "collapsible": true, @@ -275,6 +245,10 @@ "key": "CreateReview", "label": "Create Review" }, + { + "key": "CreateRig", + "label": "Create Rig" + }, { "key": "CreateSetDress", "label": "Create Set Dress" From de3bdbddab4f811247898c614ca8056ae6689b39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 12 Oct 2022 10:54:51 +0200 Subject: [PATCH 1594/2550] :recycle: move data classes to new style --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7fbe134410..479906a5c2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -37,7 +37,7 @@ from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s -class MayaPluginInfo: +class MayaPluginInfo(): SceneFile = attr.ib(default=None) # Input OutputFilePath = attr.ib(default=None) # Output directory and filename OutputFilePrefix = attr.ib(default=None) @@ -50,7 +50,7 @@ class MayaPluginInfo: @attr.s -class PythonPluginInfo: +class PythonPluginInfo(): ScriptFile = attr.ib() Version = attr.ib(default="3.6") Arguments = attr.ib(default=None) @@ -58,7 +58,7 @@ class PythonPluginInfo: @attr.s -class VRayPluginInfo: +class VRayPluginInfo(): InputFilename = attr.ib(default=None) # Input SeparateFilesPerFrame = attr.ib(default=None) VRayEngine = attr.ib(default="V-Ray") @@ -69,7 +69,7 @@ class VRayPluginInfo: @attr.s -class ArnoldPluginInfo: +class ArnoldPluginInfo(): ArnoldFile = attr.ib(default=None) From 935c8e0cdd343b0d4c245716b7a31140b6ed755a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 12 Oct 2022 11:22:57 +0200 Subject: [PATCH 1595/2550] added information about instance passed to 'get_subset_name' and 'get_dynamic_data' --- website/docs/dev_publishing.md | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/website/docs/dev_publishing.md b/website/docs/dev_publishing.md index f11a2c3047..7a6082a517 100644 --- a/website/docs/dev_publishing.md +++ b/website/docs/dev_publishing.md @@ -198,6 +198,37 @@ class RenderLayerCreator(Creator): - **`get_dynamic_data`** (method) - Can be used to extend data for subset templates which may be required in some cases. +Methods are used before instance creation and on instance subset name update. Update may require to have access to existing instance because dynamic data should be filled from there. Because of that is instance passed to `get_subset_name` and `get_dynamic_data` so the creator can handle that cases. + +This is one example where subset name template may contain `"{layer}"` which is filled during creation because the value is taken from selection. In that case `get_dynamic_data` returns value for `"layer"` -> `"{layer}"` so it can be filled in creation. But when subset name of already existing instance is updated it should return already existing value. Note: Creator must make sure the value is available on instance. + +```python +from openpype.lib import prepare_template_data +from my_host import get_selected_layer + + +class SomeCreator(Creator): + def get_dynamic_data( + self, variant, task_name, asset_doc, project_name, host_name, instance + ): + # Before instance is created return unfilled key + # - the key will be filled during creation + if instance is None: + return {"layer": "{layer}"} + # Take value from existing instance + # - creator must know where to look for the value + return {"layer": instance.data["layer"]} + + def create(self, subset_name, instance_data, pre_create_data): + # Fill the layer name in + layer = get_selected_layer() + layer_name = layer["name"] + layer_fill_data = prepare_template_data({"layer": layer_name}) + subset_name = subset_name.format(**layer_fill_data) + instance_data["layer"] = layer_name + ... +``` + #### *HiddenCreator* Creator which is not showed in UI so artist can't trigger it directly but is available for other creators. This creator is primarily meant for cases when creation should create different types of instances. For example during editorial publishing where input is single edl file but should create 2 or more kind of instances each with different family, attributes and abilities. Arguments for creation were limited to `instance_data` and `source_data`. Data of `instance_data` should follow what is sent to other creators and `source_data` can be used to send custom data defined by main creator. It is expected that `HiddenCreator` has specific main or "parent" creator. From c7d2992b9d6cff3b9e939868639d2410e8b82067 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 12 Oct 2022 11:59:15 +0200 Subject: [PATCH 1596/2550] OP-3939 - fix wrong method to remove item --- .../plugins/publish/preintegrate_thumbnail_representation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py index 49e69ff34b..3ccaeed147 100644 --- a/openpype/plugins/publish/preintegrate_thumbnail_representation.py +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -62,7 +62,7 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): thumbnail_repre["tags"].append("delete") else: if "delete" in thumbnail_repre["tags"]: - thumbnail_repre["tags"].pop("delete") + thumbnail_repre["tags"].remove("delete") self.log.debug( "Thumbnail repre tags {}".format(thumbnail_repre["tags"])) From e0a222c75ea49c0e82d24120600137ddf3b5f3c1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 12 Oct 2022 12:13:20 +0200 Subject: [PATCH 1597/2550] modified remote qt controller --- openpype/tools/publisher/control_qt.py | 59 +++++++++++++------------- 1 file changed, 29 insertions(+), 30 deletions(-) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 51aeec65d1..edcbb0c9f0 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -187,7 +187,7 @@ class QtRemotePublishController(BasePublisherController): @abstractproperty def project_name(self): - """Current context project name. + """Current context project name from client. Returns: str: Name of project. @@ -197,7 +197,7 @@ class QtRemotePublishController(BasePublisherController): @abstractproperty def current_asset_name(self): - """Current context asset name. + """Current context asset name from client. Returns: Union[str, None]: Name of asset. @@ -207,7 +207,7 @@ class QtRemotePublishController(BasePublisherController): @abstractproperty def current_task_name(self): - """Current context task name. + """Current context task name from client. Returns: Union[str, None]: Name of task. @@ -215,19 +215,6 @@ class QtRemotePublishController(BasePublisherController): pass - @abstractproperty - def host_is_valid(self): - """Host is valid for creation part. - - Host must have implemented certain functionality to be able create - in Publisher tool. - - Returns: - bool: Host can handle creation of instances. - """ - - pass - @property def instances(self): """Collected/created instances. @@ -260,16 +247,6 @@ class QtRemotePublishController(BasePublisherController): def get_existing_subset_names(self, asset_name): pass - @abstractmethod - def reset(self): - """Reset whole controller. - - This should reset create context, publish context and all variables - that are related to it. - """ - - pass - @abstractmethod def get_subset_name( self, @@ -311,17 +288,26 @@ class QtRemotePublishController(BasePublisherController): pass - @abstractmethod - def save_changes(self): - """Save changes happened during creation.""" + def _get_instance_changes_for_client(self): + """Preimplemented method to receive instance changes for client.""" created_instance_changes = {} for instance_id, instance in self._created_instances.items(): created_instance_changes[instance_id] = ( instance.remote_changes() ) + return created_instance_changes - # Send 'created_instance_changes' value to client + @abstractmethod + def _send_instance_changes_to_client(self): + instance_changes = self._get_instance_changes_for_client() + # Implement to send 'instance_changes' value to client + + @abstractmethod + def save_changes(self): + """Save changes happened during creation.""" + + self._send_instance_changes_to_client() @abstractmethod def remove_instances(self, instance_ids): @@ -338,16 +324,29 @@ class QtRemotePublishController(BasePublisherController): def get_validation_errors(self): pass + @abstractmethod + def reset(self): + """Reset whole controller. + + This should reset create context, publish context and all variables + that are related to it. + """ + + self._send_instance_changes_to_client() + pass + @abstractmethod def publish(self): """Trigger publishing without any order limitations.""" + self._send_instance_changes_to_client() pass @abstractmethod def validate(self): """Trigger publishing which will stop after validation order.""" + self._send_instance_changes_to_client() pass @abstractmethod From 8c3ffcc5675561b0322edd1e83eac0d184456124 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 12 Oct 2022 12:21:57 +0200 Subject: [PATCH 1598/2550] added a docstring to remote controller --- openpype/tools/publisher/control_qt.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index edcbb0c9f0..ddc2dfa3e4 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -97,12 +97,24 @@ class QtPublisherController(PublisherController): class QtRemotePublishController(BasePublisherController): + """Abstract Remote controller for Qt UI. + + This controller should be used in process where UI is running and should + listen and ask for data on a client side. + + All objects that are used during UI processing should be able to convert + on client side to json serializable data and then recreated here. Keep in + mind that all changes made here should be send back to client controller + before critical actions. + + ATM Was not tested and will require some changes. All code written here is + based on theoretical idea how it could work. + """ + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._created_instances = {} - self._main_thread_processor = MainThreadProcess() - self._main_thread_processor.start() @abstractmethod def _get_serialized_instances(self): @@ -114,9 +126,6 @@ class QtRemotePublishController(BasePublisherController): pass - def _process_main_thread_item(self, item): - self._main_thread_processor.add_item(item) - def _on_create_instance_change(self): serialized_instances = self._get_serialized_instances() From 7c3e5be1dc0c703f2239de67c4641b94d93528bc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 12 Oct 2022 13:31:33 +0200 Subject: [PATCH 1599/2550] OP-4180 - WIP - added new parent BaseAnatomy class Reason is to get info directly from project_doc, not from settings. --- openpype/pipeline/anatomy.py | 109 +++++++++++++++++++++++++---------- 1 file changed, 79 insertions(+), 30 deletions(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index cb6e07154b..f60a23b421 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -6,8 +6,18 @@ import collections import numbers import six +import time -from openpype.settings.lib import get_anatomy_settings +from openpype.settings.lib import ( + # get_anatomy_settings, + get_project_settings, + get_default_project_settings, + get_local_settings, + create_settings_handler, + apply_local_settings_on_anatomy_settings +) + +from openpype.client import get_project from openpype.lib.path_templates import ( TemplateUnsolved, TemplateResult, @@ -39,34 +49,21 @@ class RootCombinationError(Exception): super(RootCombinationError, self).__init__(msg) -class Anatomy: +class BaseAnatomy(object): """Anatomy module helps to keep project settings. Wraps key project specifications, AnatomyTemplates and Roots. - - Args: - project_name (str): Project name to look on overrides. """ - root_key_regex = re.compile(r"{(root?[^}]+)}") root_name_regex = re.compile(r"root\[([^]]+)\]") - def __init__(self, project_name=None, site_name=None): - if not project_name: - project_name = os.environ.get("AVALON_PROJECT") - - if not project_name: - raise ProjectNotSet(( - "Implementation bug: Project name is not set. Anatomy requires" - " to load data for specific project." - )) - + def __init__(self, project_doc, local_settings): + project_name = project_doc["name"] self.project_name = project_name self._data = self._prepare_anatomy_data( - get_anatomy_settings(project_name, site_name) + project_doc, local_settings ) - self._site_name = site_name self._templates_obj = AnatomyTemplates(self) self._roots_obj = Roots(self) @@ -87,12 +84,15 @@ class Anatomy: def items(self): return copy.deepcopy(self._data).items() - @staticmethod - def _prepare_anatomy_data(anatomy_data): + def _prepare_anatomy_data(self, project_doc, local_settings): """Prepare anatomy data for further processing. Method added to replace `{task}` with `{task[name]}` in templates. """ + project_name = project_doc["name"] + handler = create_settings_handler() + anatomy_data = handler.project_doc_to_anatomy_data(project_doc) + templates_data = anatomy_data.get("templates") if templates_data: # Replace `{task}` with `{task[name]}` in templates @@ -103,20 +103,16 @@ class Anatomy: if not isinstance(item, dict): continue - for key in tuple(item.keys()): - value = item[key] - if isinstance(value, dict): - value_queue.append(value) + apply_local_settings_on_anatomy_settings(anatomy_data, + local_settings, project_name) - elif isinstance(value, six.string_types): - item[key] = value.replace("{task}", "{task[name]}") - return anatomy_data + self._data = anatomy_data def reset(self): """Reset values of cached data in templates and roots objects.""" - self._data = self._prepare_anatomy_data( - get_anatomy_settings(self.project_name, self._site_name) - ) + # self._data = self._prepare_anatomy_data( + # get_anatomy_settings(self.project_name, self._site_name) + # ) self.templates_obj.reset() self.roots_obj.reset() @@ -339,6 +335,59 @@ class Anatomy: return rootless_path.format(**data) +class Anatomy(BaseAnatomy): + _project_cache = {} + + def __init__(self, project_name=None, site_name=None): + if not project_name: + project_name = os.environ.get("AVALON_PROJECT") + + if not project_name: + raise ProjectNotSet(( + "Implementation bug: Project name is not set. Anatomy requires" + " to load data for specific project." + )) + + self._site_name = site_name + project_info = self.get_project_data_and_cache(project_name, site_name) + + super(Anatomy, self).__init__( + project_info["project_doc"], + project_info["local_settings"] + ) + + @classmethod + def get_project_data_and_cache(cls, project_name, site_name): + project_info = cls._project_cache.get(project_name) + if project_info is not None: + if time.time() - project_info["start"] > 10: + cls._project_cache.pop(project_name) + project_info = None + + if project_info is None: + if site_name is None: + if project_name: + project_settings = get_project_settings(project_name) + else: + project_settings = get_default_project_settings() + site_name = ( + project_settings["global"] + ["sync_server"] + ["config"] + ["active_site"] + ) + + project_info = { + "project_doc": get_project(project_name), + "local_settings": get_local_settings(), + "site_name": site_name, + "start": time.time() + } + cls._project_cache[project_name] = project_info + + return project_info + + class AnatomyTemplateUnsolved(TemplateUnsolved): """Exception for unsolved template when strict is set to True.""" From df4f3d45aa6c48ee209845b2a35a773b189455e0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 12 Oct 2022 14:44:54 +0200 Subject: [PATCH 1600/2550] fix instances access in 'get_subset_name' --- openpype/tools/publisher/control.py | 5 ++++- openpype/tools/publisher/widgets/widgets.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 05b0bb39be..699b8843cc 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1820,9 +1820,12 @@ class PublisherController(BasePublisherController): creator = self._creators[creator_identifier] project_name = self.project_name asset_doc = self._asset_docs_cache.get_full_asset_by_name(asset_name) + instance = None + if instance_id: + instance = self.instances[instance_id] return creator.get_subset_name( - variant, task_name, asset_doc, project_name + variant, task_name, asset_doc, project_name, instance=instance ) def create( diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index edd9d55c75..536650e209 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1082,8 +1082,8 @@ class GlobalAttrsWidget(QtWidgets.QWidget): new_task_name, new_asset_name, instance.id, - instance=instance ) + except TaskNotSetError: invalid_tasks = True instance.set_task_invalid(True) From 64f9d98c53f746a180e54e3022abfe734cd78f97 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 12 Oct 2022 14:46:11 +0200 Subject: [PATCH 1601/2550] hound fix --- openpype/tools/publisher/control.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 699b8843cc..da320b1f39 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -652,18 +652,22 @@ class PublishValidationErrorsReport: Dict[str, Any]: Serialized data. """ + error_items = [ + item.to_data() + for item in self._error_items + ] + + plugin_action_items = { + plugin_id: [ + action_item.to_data() + for action_item in action_items + ] + for plugin_id, action_items in self._plugin_action_items.items() + } + return { - "error_items": [ - item.to_data() - for item in self._error_items - ], - "plugin_action_items": { - plugin_id: [ - action_item.to_data() - for action_item in action_items - ] - for plugin_id, action_items in self._plugin_action_items.items() - } + "error_items": error_items, + "plugin_action_items": plugin_action_items } @classmethod From 7bf51b760d55cfceb66e91c610e9afee9e98537c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 12 Oct 2022 15:49:36 +0200 Subject: [PATCH 1602/2550] Fix - add object to attr.s declaration Older Maya (Python2) doesn't like declaration of attr.s without (object) --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index a59b8f1a48..ff061c6957 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -48,7 +48,7 @@ def _validate_deadline_bool_value(instance, attribute, value): @attr.s -class MayaPluginInfo: +class MayaPluginInfo(object): SceneFile = attr.ib(default=None) # Input OutputFilePath = attr.ib(default=None) # Output directory and filename OutputFilePrefix = attr.ib(default=None) @@ -63,7 +63,7 @@ class MayaPluginInfo: @attr.s -class PythonPluginInfo: +class PythonPluginInfo(object): ScriptFile = attr.ib() Version = attr.ib(default="3.6") Arguments = attr.ib(default=None) @@ -71,7 +71,7 @@ class PythonPluginInfo: @attr.s -class VRayPluginInfo: +class VRayPluginInfo(object): InputFilename = attr.ib(default=None) # Input SeparateFilesPerFrame = attr.ib(default=None) VRayEngine = attr.ib(default="V-Ray") @@ -82,7 +82,7 @@ class VRayPluginInfo: @attr.s -class ArnoldPluginInfo: +class ArnoldPluginInfo(object): ArnoldFile = attr.ib(default=None) From e34c2a75789a21ce40babfa4d93c5c72f5af9e32 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 12 Oct 2022 16:11:19 +0200 Subject: [PATCH 1603/2550] force env changes in maya --- openpype/hosts/maya/addon.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/addon.py b/openpype/hosts/maya/addon.py index 16d8ae5cd6..cdd2bc1667 100644 --- a/openpype/hosts/maya/addon.py +++ b/openpype/hosts/maya/addon.py @@ -28,17 +28,16 @@ class MayaAddon(OpenPypeModule, IHostAddon): env["PYTHONPATH"] = os.pathsep.join(new_python_paths) - # Set default values if are not already set via settings - defaults = { + # Set default environments + envs = { "OPENPYPE_LOG_NO_COLORS": "Yes", # For python module 'qtpy' "QT_API": "PySide2", # For python module 'Qt' "QT_PREFERRED_BINDING": "PySide2" } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value + for key, value in envs.items(): + env[key] = value def get_launch_hook_paths(self, app): if app.host_name != self.host_name: From 0d44dbc6c49ce80ddf9d8b4359dd5bdc2eafd9e8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 12 Oct 2022 17:46:50 +0200 Subject: [PATCH 1604/2550] OP-3939 - changed label of plugin --- .../plugins/publish/preintegrate_thumbnail_representation.py | 2 +- .../schemas/projects_schema/schemas/schema_global_publish.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py index d38f083876..5d3ea5654f 100644 --- a/openpype/plugins/publish/preintegrate_thumbnail_representation.py +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -19,7 +19,7 @@ from openpype.lib.profiles_filtering import filter_profiles class PreIntegrateThumbnails(pyblish.api.InstancePlugin): """Marks thumbnail representation for integrate to DB or not.""" - label = "Should Integrate Thumbnails" + label = "Override Integrate Thumbnails" order = pyblish.api.IntegratorOrder - 0.1 families = ["review"] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index c216d5fd9e..773dea1229 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -559,7 +559,7 @@ "type": "dict", "collapsible": true, "key": "PreIntegrateThumbnails", - "label": "Integrate Thumbnail Representations", + "label": "Override Integrate Thumbnail Representations", "is_group": true, "checkbox_key": "enabled", "children": [ From bfbf2a26a9b1a28e91a552cdbcf41cd6b5c24613 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 12 Oct 2022 18:22:57 +0200 Subject: [PATCH 1605/2550] OP-3939 - changed label of plugin --- .../plugins/publish/preintegrate_thumbnail_representation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py index 5d3ea5654f..f9e23223e6 100644 --- a/openpype/plugins/publish/preintegrate_thumbnail_representation.py +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -19,7 +19,7 @@ from openpype.lib.profiles_filtering import filter_profiles class PreIntegrateThumbnails(pyblish.api.InstancePlugin): """Marks thumbnail representation for integrate to DB or not.""" - label = "Override Integrate Thumbnails" + label = "Override Integrate Thumbnail Representations" order = pyblish.api.IntegratorOrder - 0.1 families = ["review"] From 77f8a8ad0c36e2075a70132f963e8248f2a3407e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 12 Oct 2022 19:26:23 +0200 Subject: [PATCH 1606/2550] Fix: Inventory action path wrong functions leads to errors --- openpype/pipeline/context_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 00fe353208..af0ee79f47 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -30,7 +30,7 @@ from .workfile import ( from . import ( legacy_io, register_loader_plugin_path, - register_inventory_action, + register_inventory_action_path, register_creator_plugin_path, deregister_loader_plugin_path, ) @@ -197,7 +197,7 @@ def install_openpype_plugins(project_name=None, host_name=None): pyblish.api.register_plugin_path(path) register_loader_plugin_path(path) register_creator_plugin_path(path) - register_inventory_action(path) + register_inventory_action_path(path) def uninstall_host(): From 97354e78ac1ca99523f07e6ad11b4b8477573dba Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 12 Oct 2022 20:30:56 +0200 Subject: [PATCH 1607/2550] Preserve existing subset group if instance does not set it for new version --- openpype/plugins/publish/integrate.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 8972e6ab70..0998e643e6 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -418,6 +418,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): subset_group = instance.data.get("subsetGroup") if subset_group: data["subsetGroup"] = subset_group + elif existing_subset_doc: + # Preserve previous subset group if new version does not set it + if "subsetGroup" in existing_subset_doc.get("data", {}): + subset_group = existing_subset_doc["data"]["subsetGroup"] + data["subsetGroup"] = subset_group subset_id = None if existing_subset_doc: From 8af0a486251b129f92addab10cc80fe0da4f318d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 13 Oct 2022 10:40:05 +0200 Subject: [PATCH 1608/2550] removed unused openpype.api imports --- .../blender/plugins/publish/validate_camera_zero_keyframe.py | 2 +- .../hosts/blender/plugins/publish/validate_no_colons_in_name.py | 2 +- .../hosts/blender/plugins/publish/validate_transform_zero.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py index 9ac0561ff3..84b9dd1a6e 100644 --- a/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py +++ b/openpype/hosts/blender/plugins/publish/validate_camera_zero_keyframe.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import openpype.api + import openpype.hosts.blender.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py index 3d7c5294f6..f5dc9fdd5c 100644 --- a/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import openpype.api + import openpype.hosts.blender.api.action from openpype.pipeline.publish import ValidateContentsOrder diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py index 249b14743b..742826d3d9 100644 --- a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -4,7 +4,7 @@ import mathutils import bpy import pyblish.api -import openpype.api + import openpype.hosts.blender.api.action from openpype.pipeline.publish import ValidateContentsOrder From 94baf5a746cf8e30a57b0a37b1e27f5eb12b22cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 13 Oct 2022 10:40:19 +0200 Subject: [PATCH 1609/2550] change import of 'ValidateContentsOrder' --- .../hosts/blender/plugins/publish/validate_mesh_has_uv.py | 5 +++-- .../plugins/publish/validate_mesh_no_negative_scale.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py index 83146c641e..cee855671d 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_has_uv.py @@ -3,14 +3,15 @@ from typing import List import bpy import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.blender.api.action class ValidateMeshHasUvs(pyblish.api.InstancePlugin): """Validate that the current mesh has UV's.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] category = "geometry" diff --git a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 329a8d80c3..45ac08811d 100644 --- a/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/openpype/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -3,14 +3,15 @@ from typing import List import bpy import pyblish.api -import openpype.api + +from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.blender.api.action class ValidateMeshNoNegativeScale(pyblish.api.Validator): """Ensure that meshes don't have a negative scale.""" - order = openpype.api.ValidateContentsOrder + order = ValidateContentsOrder hosts = ["blender"] families = ["model"] category = "geometry" From 39747041e8a417c0eaeadbc6ded439cc2eef245b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 13 Oct 2022 11:13:15 +0200 Subject: [PATCH 1610/2550] fix import of 'get_subset_name' in legacy creator --- openpype/pipeline/create/legacy_create.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/create/legacy_create.py b/openpype/pipeline/create/legacy_create.py index 2764b3cb95..82e5de7a8f 100644 --- a/openpype/pipeline/create/legacy_create.py +++ b/openpype/pipeline/create/legacy_create.py @@ -9,7 +9,9 @@ import os import logging import collections -from openpype.lib import get_subset_name +from openpype.client import get_asset_by_id + +from .subset_name import get_subset_name class LegacyCreator(object): @@ -147,11 +149,15 @@ class LegacyCreator(object): variant, task_name, asset_id, project_name, host_name ) + asset_doc = get_asset_by_id( + project_name, asset_id, fields=["data.tasks"] + ) + return get_subset_name( cls.family, variant, task_name, - asset_id, + asset_doc, project_name, host_name, dynamic_data=dynamic_data From 71f48425bff3ce0fe51620c8c3ac138672633f99 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 13 Oct 2022 11:29:57 +0100 Subject: [PATCH 1611/2550] Fix format string for Python 2 --- openpype/hosts/maya/plugins/publish/extract_layout.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 92ca6c883f..48edbe547a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -34,14 +34,15 @@ class ExtractLayout(openpype.api.Extractor): for asset in cmds.sets(str(instance), query=True): # Find the container grp_name = asset.split(':')[0] - containers = cmds.ls(f"{grp_name}*_CON") + containers = cmds.ls("{}*_CON".format(grp_name)) assert len(containers) == 1, \ - f"More than one container found for {asset}" + "More than one container found for {}".format(asset) container = containers[0] - representation_id = cmds.getAttr(f"{container}.representation") + representation_id = cmds.getAttr( + "{}.representation".format(container)) representation = legacy_io.find_one( { @@ -56,7 +57,8 @@ class ExtractLayout(openpype.api.Extractor): json_element = { "family": family, - "instance_name": cmds.getAttr(f"{container}.namespace"), + "instance_name": cmds.getAttr( + "{}.namespace".format(container)), "representation": str(representation_id), "version": str(version_id) } From 6f9ab9bfb956600b775747ba39f58421d28a46a9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 13 Oct 2022 13:34:01 +0200 Subject: [PATCH 1612/2550] OP-4180 - updated Anatomy Cache split into project and site. Internalized 2 methods from to be abandoned classes. --- openpype/pipeline/anatomy.py | 223 ++++++++++++++++++++++++++++------- 1 file changed, 178 insertions(+), 45 deletions(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index f60a23b421..0c6ce454d7 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -9,13 +9,13 @@ import six import time from openpype.settings.lib import ( - # get_anatomy_settings, get_project_settings, - get_default_project_settings, get_local_settings, - create_settings_handler, - apply_local_settings_on_anatomy_settings ) +from openpype.settings.constants import ( + DEFAULT_PROJECT_KEY +) +from openpype.settings import ProjectSettings from openpype.client import get_project from openpype.lib.path_templates import ( @@ -57,12 +57,12 @@ class BaseAnatomy(object): root_key_regex = re.compile(r"{(root?[^}]+)}") root_name_regex = re.compile(r"root\[([^]]+)\]") - def __init__(self, project_doc, local_settings): + def __init__(self, project_doc, local_settings, site_name): project_name = project_doc["name"] self.project_name = project_name self._data = self._prepare_anatomy_data( - project_doc, local_settings + project_doc, local_settings, site_name ) self._templates_obj = AnatomyTemplates(self) self._roots_obj = Roots(self) @@ -84,14 +84,13 @@ class BaseAnatomy(object): def items(self): return copy.deepcopy(self._data).items() - def _prepare_anatomy_data(self, project_doc, local_settings): + def _prepare_anatomy_data(self, project_doc, local_settings, site_name): """Prepare anatomy data for further processing. Method added to replace `{task}` with `{task[name]}` in templates. """ project_name = project_doc["name"] - handler = create_settings_handler() - anatomy_data = handler.project_doc_to_anatomy_data(project_doc) + anatomy_data = self._project_doc_to_anatomy_data(project_doc) templates_data = anatomy_data.get("templates") if templates_data: @@ -103,19 +102,13 @@ class BaseAnatomy(object): if not isinstance(item, dict): continue - apply_local_settings_on_anatomy_settings(anatomy_data, - local_settings, project_name) + self._apply_local_settings_on_anatomy_data(anatomy_data, + local_settings, + project_name, + site_name) self._data = anatomy_data - def reset(self): - """Reset values of cached data in templates and roots objects.""" - # self._data = self._prepare_anatomy_data( - # get_anatomy_settings(self.project_name, self._site_name) - # ) - self.templates_obj.reset() - self.roots_obj.reset() - @property def templates(self): """Wrap property `templates` of Anatomy's AnatomyTemplates instance.""" @@ -334,9 +327,118 @@ class BaseAnatomy(object): data = self.root_environmets_fill_data(template) return rootless_path.format(**data) + def _project_doc_to_anatomy_data(self, project_doc): + """Convert project document to anatomy data. + + Probably should fill missing keys and values. + """ + if not project_doc: + return {} + + project_settings_root = ProjectSettings( + project_doc["name"], reset=False, change_state=False + ) + anatomy_entity = project_settings_root["project_anatomy"] + anatomy_keys = set(anatomy_entity.keys()) + anatomy_keys.remove("attributes") + attribute_keys = set(anatomy_entity["attributes"].keys()) + + attributes = {} + project_doc_data = project_doc.get("data") or {} + for key in attribute_keys: + value = project_doc_data.get(key) + if value is not None: + attributes[key] = value + + project_doc_config = project_doc.get("config") or {} + + app_names = set() + if not project_doc_config or "apps" not in project_doc_config: + set_applications = False + else: + set_applications = True + for app_item in project_doc_config["apps"]: + if not app_item: + continue + app_name = app_item.get("name") + if app_name: + app_names.add(app_name) + + if set_applications: + attributes["applications"] = list(app_names) + + output = {"attributes": attributes} + for key in anatomy_keys: + value = project_doc_config.get(key) + if value is not None: + output[key] = value + + return output + + def _apply_local_settings_on_anatomy_data( + self, anatomy_data, local_settings, project_name, site_name + ): + """Apply local settings on anatomy data. + + ATM local settings can modify project roots. Project name is required + as local settings have data stored data by project's name. + + Local settings override root values in this order: + 1.) Check if local settings contain overrides for default project and + apply it's values on roots if there are any. + 2.) If passed `project_name` is not None then check project specific + overrides in local settings for the project and apply it's value on + roots if there are any. + + NOTE: Root values of default project from local settings are always + applied if are set. + + Args: + anatomy_data (dict): Data for anatomy. + local_settings (dict): Data of local settings. + project_name (str): Name of project for which anatomy data are. + """ + if not local_settings: + return + + local_project_settings = local_settings.get("projects") or {} + + # Check for roots existence in local settings first + roots_project_locals = ( + local_project_settings + .get(project_name, {}) + ) + roots_default_locals = ( + local_project_settings + .get(DEFAULT_PROJECT_KEY, {}) + ) + + # Skip rest of processing if roots are not set + if not roots_project_locals and not roots_default_locals: + return + + # Combine roots from local settings + roots_locals = roots_default_locals.get(site_name) or {} + roots_locals.update(roots_project_locals.get(site_name) or {}) + # Skip processing if roots for current active site are not available in + # local settings + if not roots_locals: + return + + current_platform = platform.system().lower() + + root_data = anatomy_data["roots"] + for root_name, path in roots_locals.items(): + if root_name not in root_data: + continue + anatomy_data["roots"][root_name][current_platform] = ( + path + ) + class Anatomy(BaseAnatomy): _project_cache = {} + _site_cache = {} def __init__(self, project_name=None, site_name=None): if not project_name: @@ -349,43 +451,74 @@ class Anatomy(BaseAnatomy): )) self._site_name = site_name - project_info = self.get_project_data_and_cache(project_name, site_name) + project_doc = self.get_project_doc_from_cache(project_name) + local_settings = get_local_settings() + if not site_name: + site_name = self.get_site_name_from_cache( + project_name, local_settings + ) super(Anatomy, self).__init__( - project_info["project_doc"], - project_info["local_settings"] + project_doc, + local_settings, + site_name ) @classmethod - def get_project_data_and_cache(cls, project_name, site_name): - project_info = cls._project_cache.get(project_name) - if project_info is not None: - if time.time() - project_info["start"] > 10: + def get_project_doc_from_cache(cls, project_name): + project_cache = cls._project_cache.get(project_name) + if project_cache is not None: + if time.time() - project_cache["start"] > 10: cls._project_cache.pop(project_name) - project_info = None + project_cache = None - if project_info is None: - if site_name is None: - if project_name: - project_settings = get_project_settings(project_name) - else: - project_settings = get_default_project_settings() - site_name = ( - project_settings["global"] - ["sync_server"] - ["config"] - ["active_site"] - ) - - project_info = { + if project_cache is None: + project_cache = { "project_doc": get_project(project_name), - "local_settings": get_local_settings(), - "site_name": site_name, "start": time.time() } - cls._project_cache[project_name] = project_info + cls._project_cache[project_name] = project_cache - return project_info + return copy.deepcopy( + cls._project_cache[project_name]["project_doc"] + ) + + @classmethod + def get_site_name_from_cache(cls, project_name, local_settings): + site_cache = cls._site_cache.get(project_name) + if site_cache is not None: + if time.time() - site_cache["start"] > 10: + cls._site_cache.pop(project_name) + site_cache = None + + if site_cache: + return site_cache["site_name"] + + local_project_settings = local_settings.get("projects") + if not local_project_settings: + return + + project_locals = local_project_settings.get(project_name) or {} + default_locals = local_project_settings.get(DEFAULT_PROJECT_KEY) or {} + active_site = ( + project_locals.get("active_site") + or default_locals.get("active_site") + ) + if not active_site: + project_settings = get_project_settings(project_name) + active_site = ( + project_settings + ["global"] + ["sync_server"] + ["config"] + ["active_site"] + ) + + cls._site_cache[project_name] = { + "site_name": active_site, + "start": time.time() + } + return active_site class AnatomyTemplateUnsolved(TemplateUnsolved): From 510326b92c1d37838b8463c2c6bfe7ac66d3bef6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 13 Oct 2022 15:21:43 +0200 Subject: [PATCH 1613/2550] hack py2 ftrack api to store schema to user's directory instead of temp dir --- .../ftrack-python-api/source/ftrack_api/session.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py b/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py index 1a5da44432..78f9d135b7 100644 --- a/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py +++ b/openpype/modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py @@ -13,10 +13,9 @@ import functools import itertools import distutils.version import hashlib -import tempfile +import appdirs import threading import atexit -import warnings import requests import requests.auth @@ -241,7 +240,7 @@ class Session(object): ) self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): + if auto_connect_event_hub is True: # Connect to event hub in background thread so as not to block main # session usage waiting for event hub connection. self._auto_connect_event_hub_thread = threading.Thread( @@ -252,9 +251,7 @@ class Session(object): # To help with migration from auto_connect_event_hub default changing # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) + self._event_hub._deprecation_warning_auto_connect = False # Register to auto-close session on exit. atexit.register(WeakMethod(self.close)) @@ -271,8 +268,9 @@ class Session(object): # rebuilding types)? if schema_cache_path is not False: if schema_cache_path is None: + schema_cache_path = appdirs.user_cache_dir() schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + 'FTRACK_API_SCHEMA_CACHE_PATH', schema_cache_path ) schema_cache_path = os.path.join( From 543d22ffc76eed10874f8c9cc8d516b369aed0b3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 13 Oct 2022 16:25:29 +0200 Subject: [PATCH 1614/2550] :bug: fix token for non-multipart outputs and unify variable names --- openpype/hosts/maya/api/lib_renderproducts.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 1ab771cfe6..cd204445b7 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -260,20 +260,20 @@ class ARenderProducts: """ try: - file_prefix_attr = IMAGE_PREFIXES[self.renderer] + prefix_attr = IMAGE_PREFIXES[self.renderer] except KeyError: raise UnsupportedRendererException( "Unsupported renderer {}".format(self.renderer) ) - file_prefix = self._get_attr(file_prefix_attr) + prefix = self._get_attr(prefix_attr) - if not file_prefix: + if not prefix: # Fall back to scene name by default log.debug("Image prefix not set, using ") file_prefix = "" - return file_prefix + return prefix def get_render_attribute(self, attribute): """Get attribute from render options. @@ -730,13 +730,16 @@ class RenderProductsVray(ARenderProducts): """Get image prefix for V-Ray. This overrides :func:`ARenderProducts.get_renderer_prefix()` as - we must add `` token manually. + we must add `` token manually. This is done only for + non-multipart outputs, where `` token doesn't make sense. See also: :func:`ARenderProducts.get_renderer_prefix()` """ prefix = super(RenderProductsVray, self).get_renderer_prefix() + if self.multipart: + return prefix aov_separator = self._get_aov_separator() prefix = "{}{}".format(prefix, aov_separator) return prefix @@ -974,15 +977,18 @@ class RenderProductsRedshift(ARenderProducts): """Get image prefix for Redshift. This overrides :func:`ARenderProducts.get_renderer_prefix()` as - we must add `` token manually. + we must add `` token manually. This is done only for + non-multipart outputs, where `` token doesn't make sense. See also: :func:`ARenderProducts.get_renderer_prefix()` """ - file_prefix = super(RenderProductsRedshift, self).get_renderer_prefix() - separator = self.extract_separator(file_prefix) - prefix = "{}{}".format(file_prefix, separator or "_") + prefix = super(RenderProductsRedshift, self).get_renderer_prefix() + if self.multipart: + return prefix + separator = self.extract_separator(prefix) + prefix = "{}{}".format(prefix, separator or "_") return prefix def get_render_products(self): From 91d3056534bc30190d5d5f31977afd0724635016 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Albert?= <89812691+sebasti1a@users.noreply.github.com> Date: Thu, 13 Oct 2022 16:48:29 +0200 Subject: [PATCH 1615/2550] Update openpype/modules/kitsu/actions/launcher_show_in_kitsu.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/kitsu/actions/launcher_show_in_kitsu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index ab523876ed..c95079e042 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -37,7 +37,7 @@ class ShowInKitsu(LauncherAction): project_zou_id = project["data"].get("zou_id") if not project_zou_id: raise RuntimeError(f"Project {project_name} has no " - f"connected ftrack id.") + f"connected kitsu id.") asset_zou_name = None asset_zou_id = None From 088e442214ce82155c39713485af1e1fbf2ad0b4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 13 Oct 2022 18:35:15 +0200 Subject: [PATCH 1616/2550] OP-4180 - fix missed return --- openpype/pipeline/anatomy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 0c6ce454d7..437a03f898 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -107,7 +107,7 @@ class BaseAnatomy(object): project_name, site_name) - self._data = anatomy_data + return anatomy_data @property def templates(self): From 840792a82c2c443dee77b4e8ec02ce6ec72b1e70 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 13 Oct 2022 19:14:48 +0200 Subject: [PATCH 1617/2550] added mechanism to define custom paths to ffmpeg and oiio tools and more detailed validation of them --- openpype/lib/vendor_bin_utils.py | 220 +++++++++++++++++++++++++++++-- 1 file changed, 212 insertions(+), 8 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index e5ab2872a0..31245d4ee4 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -1,10 +1,33 @@ import os import logging import platform +import subprocess log = logging.getLogger("Vendor utils") +class CachedToolPaths: + """Cache already used and discovered tools and their executables. + + Discovering path can take some time and can trigger subprocesses so it's + better to cache the paths on first get. + """ + + _cached_paths = {} + + @classmethod + def is_tool_cached(cls, tool): + return tool in cls._cached_paths + + @classmethod + def get_executable_path(cls, tool): + return cls._cached_paths.get(tool) + + @classmethod + def cache_executable_path(cls, tool, path): + cls._cached_paths[tool] = path + + def is_file_executable(filepath): """Filepath lead to executable file. @@ -98,6 +121,7 @@ def get_vendor_bin_path(bin_app): Returns: str: Path to vendorized binaries folder. """ + return os.path.join( os.environ["OPENPYPE_ROOT"], "vendor", @@ -107,6 +131,112 @@ def get_vendor_bin_path(bin_app): ) +def find_tool_in_custom_paths(paths, tool, validation_func=None): + """Find a tool executable in custom paths. + + Args: + paths (Iterable[str]): Iterable of paths where to look for tool. + tool (str): Name of tool (binary file) to find in passed paths. + validation_func (Function): Custom validation function of path. + Function must expect one argument which is path to executable. + If not passed only 'find_executable' is used to be able identify + if path is valid. + + Reuturns: + Union[str, None]: Path to validated executable or None if was not + found. + """ + + for path in paths: + # Skip empty strings + if not path: + continue + + # Handle cases when path is just an executable + # - it allows to use executable from PATH + # - basename must match 'tool' value (without extension) + extless_path, ext = os.path.splitext(path) + if extless_path == tool: + executable_path = find_executable(tool) + if executable_path and ( + validation_func is None + or validation_func(executable_path) + ): + return executable_path + continue + + # Normalize path because it should be a path and check if exists + normalized = os.path.normpath(path) + if not os.path.exists(normalized): + continue + + # Note: Path can be both file and directory + + # If path is a file validate it + if os.path.isfile(normalized): + basename, ext = os.path.splitext(os.path.basename(path)) + # Check if the filename has actually the sane bane as 'tool' + if basename == tool: + executable_path = find_executable(normalized) + if executable_path and ( + validation_func is None + or validation_func(executable_path) + ): + return executable_path + + # Check if path is a directory and look for tool inside the dir + if os.path.isdir(normalized): + executable_path = find_executable(os.path.join(normalized, tool)) + if executable_path and ( + validation_func is None + or validation_func(executable_path) + ): + return executable_path + return None + + +def _oiio_executable_validation(filepath): + """Validate oiio tool executable if can be executed. + + Validation has 2 steps. First is using 'find_executable' to fill possible + missing extension or fill directory then launch executable and validate + that it can be executed. For that is used '--help' argument which is fast + and does not need any other inputs. + + Any possible crash of missing libraries or invalid build should be catched. + + Main reason is to validate if executable can be executed on OS just running + which can be issue ob linux machines. + + Note: + It does not validate if the executable is really a oiio tool which + should be used. + + Args: + filepath (str): Path to executable. + + Returns: + bool: Filepath is valid executable. + """ + + filepath = find_executable(filepath) + if not filepath: + return False + + try: + proc = subprocess.Popen( + [filepath, "--help"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + proc.wait() + return proc.returncode == 0 + + except Exception: + pass + return False + + def get_oiio_tools_path(tool="oiiotool"): """Path to vendorized OpenImageIO tool executables. @@ -117,10 +247,67 @@ def get_oiio_tools_path(tool="oiiotool"): Default is "oiiotool". """ - oiio_dir = get_vendor_bin_path("oiio") - if platform.system().lower() == "linux": - oiio_dir = os.path.join(oiio_dir, "bin") - return find_executable(os.path.join(oiio_dir, tool)) + if CachedToolPaths.is_tool_cached(tool): + return CachedToolPaths.get_executable_path(tool) + + custom_paths_str = os.environ.get("OPENPYPE_CUSTOM_OIIO_PATHS") or "" + tool_executable_path = find_tool_in_custom_paths( + custom_paths_str.split(os.pathsep), + tool, + _oiio_executable_validation + ) + + if not tool_executable_path: + oiio_dir = get_vendor_bin_path("oiio") + if platform.system().lower() == "linux": + oiio_dir = os.path.join(oiio_dir, "bin") + default_path = os.path.join(oiio_dir, tool) + if _oiio_executable_validation(default_path): + tool_executable_path = default_path + + CachedToolPaths.cache_executable_path(tool, tool_executable_path) + return tool_executable_path + + +def _ffmpeg_executable_validation(filepath): + """Validate ffmpeg tool executable if can be executed. + + Validation has 2 steps. First is using 'find_executable' to fill possible + missing extension or fill directory then launch executable and validate + that it can be executed. For that is used '-version' argument which is fast + and does not need any other inputs. + + Any possible crash of missing libraries or invalid build should be catched. + + Main reason is to validate if executable can be executed on OS just running + which can be issue ob linux machines. + + Note: + It does not validate if the executable is really a ffmpeg tool. + + Args: + filepath (str): Path to executable. + + Returns: + bool: Filepath is valid executable. + """ + + filepath = find_executable(filepath) + if not filepath: + return False + + try: + proc = subprocess.Popen( + [filepath, "-version"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + proc.wait() + return proc.returncode == 0 + + except Exception: + pass + return False def get_ffmpeg_tool_path(tool="ffmpeg"): @@ -133,10 +320,27 @@ def get_ffmpeg_tool_path(tool="ffmpeg"): Returns: str: Full path to ffmpeg executable. """ - ffmpeg_dir = get_vendor_bin_path("ffmpeg") - if platform.system().lower() == "windows": - ffmpeg_dir = os.path.join(ffmpeg_dir, "bin") - return find_executable(os.path.join(ffmpeg_dir, tool)) + + if CachedToolPaths.is_tool_cached(tool): + return CachedToolPaths.get_executable_path(tool) + + custom_paths_str = os.environ.get("OPENPYPE_CUSTOM_FFMPEG_PATHS") or "" + tool_executable_path = find_tool_in_custom_paths( + custom_paths_str.split(os.pathsep), + tool, + _ffmpeg_executable_validation + ) + + if not tool_executable_path: + ffmpeg_dir = get_vendor_bin_path("ffmpeg") + if platform.system().lower() == "windows": + ffmpeg_dir = os.path.join(ffmpeg_dir, "bin") + tool_path = find_executable(os.path.join(ffmpeg_dir, tool)) + if tool_path and _ffmpeg_executable_validation(tool_path): + tool_executable_path = tool_path + + CachedToolPaths.cache_executable_path(tool, tool_executable_path) + return tool_executable_path def is_oiio_supported(): From 8a28b714a3f67552e14853576d3fa6db0f6ecc4f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Oct 2022 20:14:57 +0200 Subject: [PATCH 1618/2550] fix usage of functions from lib --- openpype/hosts/resolve/api/plugin.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/resolve/api/plugin.py b/openpype/hosts/resolve/api/plugin.py index 3995077d21..0ed7beee59 100644 --- a/openpype/hosts/resolve/api/plugin.py +++ b/openpype/hosts/resolve/api/plugin.py @@ -4,13 +4,15 @@ import uuid import qargparse from Qt import QtWidgets, QtCore +from openpype.settings import get_current_project_settings +from openpype.pipeline.context_tools import get_current_project_asset from openpype.pipeline import ( LegacyCreator, LoaderPlugin, ) -from openpype.pipeline.context_tools import get_current_project_asset -from openpype.hosts import resolve + from . import lib +from .menu import load_stylesheet class CreatorWidget(QtWidgets.QDialog): @@ -86,7 +88,7 @@ class CreatorWidget(QtWidgets.QDialog): ok_btn.clicked.connect(self._on_ok_clicked) cancel_btn.clicked.connect(self._on_cancel_clicked) - stylesheet = resolve.api.menu.load_stylesheet() + stylesheet = load_stylesheet() self.setStyleSheet(stylesheet) def _on_ok_clicked(self): @@ -438,7 +440,7 @@ class ClipLoader: source_in = int(_clip_property("Start")) source_out = int(_clip_property("End")) - resolve.swap_clips( + lib.swap_clips( timeline_item, media_pool_item, source_in, @@ -504,7 +506,7 @@ class Creator(LegacyCreator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - from openpype.settings import get_current_project_settings + resolve_p_settings = get_current_project_settings().get("resolve") self.presets = {} if resolve_p_settings: @@ -512,13 +514,13 @@ class Creator(LegacyCreator): self.__class__.__name__, {}) # adding basic current context resolve objects - self.project = resolve.get_current_project() - self.timeline = resolve.get_current_timeline() + self.project = lib.get_current_project() + self.timeline = lib.get_current_timeline() if (self.options or {}).get("useSelection"): - self.selected = resolve.get_current_timeline_items(filter=True) + self.selected = lib.get_current_timeline_items(filter=True) else: - self.selected = resolve.get_current_timeline_items(filter=False) + self.selected = lib.get_current_timeline_items(filter=False) self.widget = CreatorWidget From e3c2bb5a5e4d8a91508e0cd0db9ea2727424d6ed Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 14 Oct 2022 09:46:14 +0200 Subject: [PATCH 1619/2550] added one more last check for executable --- openpype/lib/vendor_bin_utils.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 31245d4ee4..7b52341290 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -265,6 +265,12 @@ def get_oiio_tools_path(tool="oiiotool"): if _oiio_executable_validation(default_path): tool_executable_path = default_path + # Look to PATH for the tool + if not tool_executable_path: + from_path = find_executable(tool) + if from_path and _oiio_executable_validation(from_path): + tool_executable_path = from_path + CachedToolPaths.cache_executable_path(tool, tool_executable_path) return tool_executable_path @@ -339,6 +345,12 @@ def get_ffmpeg_tool_path(tool="ffmpeg"): if tool_path and _ffmpeg_executable_validation(tool_path): tool_executable_path = tool_path + # Look to PATH for the tool + if not tool_executable_path: + from_path = find_executable(tool) + if from_path and _oiio_executable_validation(from_path): + tool_executable_path = from_path + CachedToolPaths.cache_executable_path(tool, tool_executable_path) return tool_executable_path From 9955ffe95c90fe181740aa81fd21015a0b99caba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 14 Oct 2022 10:12:30 +0200 Subject: [PATCH 1620/2550] fix validation errors access --- .../tools/publisher/widgets/publish_frame.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index 0a04b2a665..c5685461a7 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -384,12 +384,10 @@ class PublishFrame(QtWidgets.QWidget): self._validate_btn.setEnabled(validate_enabled) self._publish_btn.setEnabled(publish_enabled) - error_msg = self._controller.publish_error_msg - validation_errors = self._controller.get_validation_errors() - if error_msg: - self._set_error_msg(error_msg) + if self._controller.publish_has_crashed: + self._set_error_msg() - elif validation_errors: + elif self._controller.publish_has_validation_errors: self._set_progress_visibility(False) self._set_validation_errors() @@ -411,16 +409,12 @@ class PublishFrame(QtWidgets.QWidget): self._set_success_property(-1) - def _set_error_msg(self, error_msg): - """Show error message to artist. - - Args: - error_msg (str): Message which is showed to artist. - """ + def _set_error_msg(self): + """Show error message to artist on publish crash.""" self._set_main_label("Error happened") - self._message_label_top.setText(error_msg) + self._message_label_top.setText(self._controller.publish_error_msg) self._set_success_property(0) From ca32556b1177ee0dd36d4f863c5b6e816d804e86 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 11:20:39 +0200 Subject: [PATCH 1621/2550] Fix - tags might be missing on representation --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 96f573fe25..53c6e69ac0 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -169,7 +169,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): thumbnail_item["thumbnail"] = True # Create copy of item before setting location - if "delete" not in repre["tags"]: + if "delete" not in repre.get("tags", []): src_components_to_add.append(copy.deepcopy(thumbnail_item)) # Create copy of first thumbnail if first_thumbnail_component is None: @@ -284,7 +284,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): not_first_components.append(review_item) # Create copy of item before setting location - if "delete" not in repre["tags"]: + if "delete" not in repre.get("tags", []): src_components_to_add.append(copy.deepcopy(review_item)) # Set location From f152445b73352df2f32434cb57f3744c6ad1ab19 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 12:40:00 +0200 Subject: [PATCH 1622/2550] resolve: fixing in callback import --- openpype/hosts/resolve/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index 1c8d9dc01c..899cb825bb 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -244,7 +244,7 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( instance, old_value, new_value)) - from openpype.hosts.resolve import ( + from openpype.hosts.resolve.api import ( set_publish_attribute ) From 04f657327db467574d13682b25e2a9c9f6f62d28 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 14:19:02 +0200 Subject: [PATCH 1623/2550] resolve: code improvements --- openpype/hosts/resolve/api/__init__.py | 7 +++---- openpype/hosts/resolve/api/preload_console.py | 2 +- openpype/hosts/resolve/utils.py | 3 ++- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/resolve/api/__init__.py b/openpype/hosts/resolve/api/__init__.py index cf1edb4c35..00a598548e 100644 --- a/openpype/hosts/resolve/api/__init__.py +++ b/openpype/hosts/resolve/api/__init__.py @@ -1,10 +1,6 @@ """ resolve api """ - -bmdvr = None -bmdvf = None - from .utils import ( get_resolve_module ) @@ -70,6 +66,9 @@ from .workio import ( from .testing_utils import TestGUI +bmdvr = None +bmdvf = None + __all__ = [ "bmdvr", "bmdvf", diff --git a/openpype/hosts/resolve/api/preload_console.py b/openpype/hosts/resolve/api/preload_console.py index a822ea2460..8b2b31fe1a 100644 --- a/openpype/hosts/resolve/api/preload_console.py +++ b/openpype/hosts/resolve/api/preload_console.py @@ -1,6 +1,6 @@ #!/usr/bin/env python import time -from openpype.hosts.resolve.utils import get_resolve_module +from openpype.hosts.resolve.api.utils import get_resolve_module from openpype.lib import Logger log = Logger.get_logger(__name__) diff --git a/openpype/hosts/resolve/utils.py b/openpype/hosts/resolve/utils.py index d5c133bbf5..5881f153ae 100644 --- a/openpype/hosts/resolve/utils.py +++ b/openpype/hosts/resolve/utils.py @@ -9,7 +9,8 @@ def setup(env): log = Logger.get_logger("ResolveSetup") scripts = {} us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR") - us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") + us_dir = env["RESOLVE_UTILITY_SCRIPTS_DIR"] + us_paths = [os.path.join( RESOLVE_ROOT_DIR, "utility_scripts" From db5311da87c45bc7850ff1bd2d90c669b87eaaf3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 14:19:42 +0200 Subject: [PATCH 1624/2550] resolve: transfering settings to prelaunch hook no need to expose environment variables --- .../hosts/resolve/hooks/pre_resolve_setup.py | 112 ++++++++++++++---- 1 file changed, 88 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index 1d977e2d8e..1a36715437 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,7 +1,8 @@ import os - +import platform from openpype.lib import PreLaunchHook from openpype.hosts.resolve.utils import setup +from openpype.hosts.resolve import api as rapi class ResolvePrelaunch(PreLaunchHook): @@ -14,35 +15,98 @@ class ResolvePrelaunch(PreLaunchHook): app_groups = ["resolve"] def execute(self): - # TODO: add OTIO installation from `openpype/requirements.py` - # making sure python 3.6 is installed at provided path - py36_dir = os.path.normpath( - self.launch_context.env.get("PYTHON36_RESOLVE", "")) - assert os.path.isdir(py36_dir), ( - "Python 3.6 is not installed at the provided folder path. Either " - "make sure the `environments\resolve.json` is having correctly " - "set `PYTHON36_RESOLVE` or make sure Python 3.6 is installed " - f"in given path. \nPYTHON36_RESOLVE: `{py36_dir}`" - ) - self.log.info(f"Path to Resolve Python folder: `{py36_dir}`...") + current_platform = platform.system().lower() + PROGRAMDATA = self.launch_context.env["PROGRAMDATA"] + RESOLVE_SCRIPT_API_ = { + "windows": ( + f"{PROGRAMDATA}/Blackmagic Design/" + "DaVinci Resolve/Support/Developer/Scripting" + ), + "darwin": ( + "/Library/Application Support/Blackmagic Design" + "/DaVinci Resolve/Developer/Scripting" + ), + "linux": "/opt/resolve/Developer/Scripting" + } + RESOLVE_SCRIPT_API = os.path.normpath( + RESOLVE_SCRIPT_API_[current_platform]) + self.launch_context.env["RESOLVE_SCRIPT_API"] = RESOLVE_SCRIPT_API + + RESOLVE_SCRIPT_LIB_ = { + "windows": ( + "C:/Program Files/Blackmagic Design" + "/DaVinci Resolve/fusionscript.dll" + ), + "darwin": ( + "/Applications/DaVinci Resolve/DaVinci Resolve.app" + "/Contents/Libraries/Fusion/fusionscript.so" + ), + "linux": "/opt/resolve/libs/Fusion/fusionscript.so" + } + RESOLVE_SCRIPT_LIB = os.path.normpath( + RESOLVE_SCRIPT_LIB_[current_platform]) + self.launch_context.env["RESOLVE_SCRIPT_LIB"] = RESOLVE_SCRIPT_LIB + + # TODO: add OTIO installation from `openpype/requirements.py` + # making sure python <3.9.* is installed at provided path + python3_home = os.path.normpath( + self.launch_context.env.get("RESOLVE_PYTHON3_HOME", "")) + + assert os.path.isdir(python3_home), ( + "Python 3 is not installed at the provided folder path. Either " + "make sure the `environments\resolve.json` is having correctly " + "set `RESOLVE_PYTHON3_HOME` or make sure Python 3 is installed " + f"in given path. \nRESOLVE_PYTHON3_HOME: `{python3_home}`" + ) + self.launch_context.env["PYTHONHOME"] = python3_home + self.log.info(f"Path to Resolve Python folder: `{python3_home}`...") + + # add to the python path to path + env_path = self.launch_context.env["PATH"] + self.launch_context.env["PATH"] = os.pathsep.join([ + python3_home, + os.path.join(python3_home, "Scripts") + ] + env_path.split(os.pathsep)) + + self.log.debug(f"PATH: {self.launch_context.env['PATH']}") + + # add to the PYTHONPATH + env_pythonpath = self.launch_context.env["PYTHONPATH"] + self.launch_context.env["PYTHONPATH"] = os.pathsep.join([ + os.path.join(python3_home, "Lib", "site-packages"), + os.path.join(RESOLVE_SCRIPT_API, "Modules"), + ] + env_pythonpath.split(os.pathsep)) + + self.log.debug(f"PYTHONPATH: {self.launch_context.env['PYTHONPATH']}") + + RESOLVE_UTILITY_SCRIPTS_DIR_ = { + "windows": ( + f"{PROGRAMDATA}/Blackmagic Design" + "/DaVinci Resolve/Fusion/Scripts/Comp" + ), + "darwin": ( + "/Library/Application Support/Blackmagic Design" + "/DaVinci Resolve/Fusion/Scripts/Comp" + ), + "linux": "/opt/resolve/Fusion/Scripts/Comp" + } + RESOLVE_UTILITY_SCRIPTS_DIR = os.path.normpath( + RESOLVE_UTILITY_SCRIPTS_DIR_[current_platform] + ) # setting utility scripts dir for scripts syncing - us_dir = os.path.normpath( - self.launch_context.env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "") - ) - assert os.path.isdir(us_dir), ( - "Resolve utility script dir does not exists. Either make sure " - "the `environments\resolve.json` is having correctly set " - "`RESOLVE_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n" - f"RESOLVE_UTILITY_SCRIPTS_DIR: `{us_dir}`" - ) - self.log.debug(f"-- us_dir: `{us_dir}`") + self.launch_context.env["RESOLVE_UTILITY_SCRIPTS_DIR"] = ( + RESOLVE_UTILITY_SCRIPTS_DIR) # correctly format path for pre python script - pre_py_sc = os.path.normpath( - self.launch_context.env.get("PRE_PYTHON_SCRIPT", "")) + rapi_path = os.path.dirname(rapi.__file__) + pre_py_sc = os.path.join( + rapi_path, "preload_console.py") self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...") + # remove terminal coloring tags + self.launch_context.env["OPENPYPE_LOG_NO_COLORS"] = "True" + # Resolve Setup integration setup(self.launch_context.env) From 73fc3861f340b74e226ee56652d01d8355bcea70 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 14:54:31 +0200 Subject: [PATCH 1625/2550] resolve: reducing settings to minimal --- .../system_settings/applications.json | 34 ++----------------- 1 file changed, 2 insertions(+), 32 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index c37c3d299e..42eeb06191 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -782,41 +782,11 @@ "host_name": "resolve", "environment": { "RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR": [], - "RESOLVE_SCRIPT_API": { - "windows": "{PROGRAMDATA}/Blackmagic Design/DaVinci Resolve/Support/Developer/Scripting", - "darwin": "/Library/Application Support/Blackmagic Design/DaVinci Resolve/Developer/Scripting", - "linux": "/opt/resolve/Developer/Scripting" - }, - "RESOLVE_SCRIPT_LIB": { - "windows": "C:/Program Files/Blackmagic Design/DaVinci Resolve/fusionscript.dll", - "darwin": "/Applications/DaVinci Resolve/DaVinci Resolve.app/Contents/Libraries/Fusion/fusionscript.so", - "linux": "/opt/resolve/libs/Fusion/fusionscript.so" - }, - "RESOLVE_UTILITY_SCRIPTS_DIR": { - "windows": "{PROGRAMDATA}/Blackmagic Design/DaVinci Resolve/Fusion/Scripts/Comp", - "darwin": "/Library/Application Support/Blackmagic Design/DaVinci Resolve/Fusion/Scripts/Comp", - "linux": "/opt/resolve/Fusion/Scripts/Comp" - }, - "PYTHON36_RESOLVE": { + "RESOLVE_PYTHON3_HOME": { "windows": "{LOCALAPPDATA}/Programs/Python/Python36", "darwin": "~/Library/Python/3.6/bin", "linux": "/opt/Python/3.6/bin" - }, - "PYTHONPATH": [ - "{PYTHON36_RESOLVE}/Lib/site-packages", - "{VIRTUAL_ENV}/Lib/site-packages", - "{PYTHONPATH}", - "{RESOLVE_SCRIPT_API}/Modules", - "{PYTHONPATH}" - ], - "PATH": [ - "{PYTHON36_RESOLVE}", - "{PYTHON36_RESOLVE}/Scripts", - "{PATH}" - ], - "PRE_PYTHON_SCRIPT": "{OPENPYPE_REPOS_ROOT}/openpype/resolve/preload_console.py", - "OPENPYPE_LOG_NO_COLORS": "True", - "RESOLVE_DEV": "True" + } }, "variants": { "stable": { From 09b9ba8b83ec82ed66f4ef1043bc4d380fc3ccff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 15:10:32 +0200 Subject: [PATCH 1626/2550] resolve: removing unused code --- .../RESOLVE_API_README_v16.2.0_down.txt | 189 ------------------ openpype/hosts/resolve/api/preload_console.py | 31 --- .../hosts/resolve/hooks/pre_resolve_setup.py | 9 - 3 files changed, 229 deletions(-) delete mode 100644 openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_down.txt delete mode 100644 openpype/hosts/resolve/api/preload_console.py diff --git a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_down.txt b/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_down.txt deleted file mode 100644 index 139b66bc24..0000000000 --- a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_down.txt +++ /dev/null @@ -1,189 +0,0 @@ -Updated as of 08 March 2019 - --------------------------- -In this package, you will find a brief introduction to the Scripting API for DaVinci Resolve Studio. Apart from this README.txt file, this package contains folders containing the basic import modules for scripting access (DaVinciResolve.py) and some representative examples. - -Overview --------- - -As with Blackmagic Design Fusion scripts, user scripts written in Lua and Python programming languages are supported. By default, scripts can be invoked from the Console window in the Fusion page, or via command line. This permission can be changed in Resolve Preferences, to be only from Console, or to be invoked from the local network. Please be aware of the security implications when allowing scripting access from outside of the Resolve application. - - -Using a script --------------- -DaVinci Resolve needs to be running for a script to be invoked. - -For a Resolve script to be executed from an external folder, the script needs to know of the API location. -You may need to set the these environment variables to allow for your Python installation to pick up the appropriate dependencies as shown below: - - Mac OS X: - RESOLVE_SCRIPT_API="/Library/Application Support/Blackmagic Design/DaVinci Resolve/Developer/Scripting/" - RESOLVE_SCRIPT_LIB="/Applications/DaVinci Resolve/DaVinci Resolve.app/Contents/Libraries/Fusion/fusionscript.so" - PYTHONPATH="$PYTHONPATH:$RESOLVE_SCRIPT_API/Modules/" - - Windows: - RESOLVE_SCRIPT_API="%PROGRAMDATA%\\Blackmagic Design\\DaVinci Resolve\\Support\\Developer\\Scripting\\" - RESOLVE_SCRIPT_LIB="C:\\Program Files\\Blackmagic Design\\DaVinci Resolve\\fusionscript.dll" - PYTHONPATH="%PYTHONPATH%;%RESOLVE_SCRIPT_API%\\Modules\\" - - Linux: - RESOLVE_SCRIPT_API="/opt/resolve/Developer/Scripting/" - RESOLVE_SCRIPT_LIB="/opt/resolve/libs/Fusion/fusionscript.so" - PYTHONPATH="$PYTHONPATH:$RESOLVE_SCRIPT_API/Modules/" - (Note: For standard ISO Linux installations, the path above may need to be modified to refer to /home/resolve instead of /opt/resolve) - -As with Fusion scripts, Resolve scripts can also be invoked via the menu and the Console. - -On startup, DaVinci Resolve scans the Utility Scripts directory and enumerates the scripts found in the Script application menu. Placing your script in this folder and invoking it from this menu is the easiest way to use scripts. The Utility Scripts folder is located in: - Mac OS X: /Library/Application Support/Blackmagic Design/DaVinci Resolve/Fusion/Scripts/Comp/ - Windows: %APPDATA%\Blackmagic Design\DaVinci Resolve\Fusion\Scripts\Comp\ - Linux: /opt/resolve/Fusion/Scripts/Comp/ (or /home/resolve/Fusion/Scripts/Comp/ depending on installation) - -The interactive Console window allows for an easy way to execute simple scripting commands, to query or modify properties, and to test scripts. The console accepts commands in Python 2.7, Python 3.6 and Lua and evaluates and executes them immediately. For more information on how to use the Console, please refer to the DaVinci Resolve User Manual. - -This example Python script creates a simple project: - #!/usr/bin/env python - import DaVinciResolveScript as dvr_script - resolve = dvr_script.scriptapp("Resolve") - fusion = resolve.Fusion() - projectManager = resolve.GetProjectManager() - projectManager.CreateProject("Hello World") - -The resolve object is the fundamental starting point for scripting via Resolve. As a native object, it can be inspected for further scriptable properties - using table iteration and `getmetatable` in Lua and dir, help etc in Python (among other methods). A notable scriptable object above is fusion - it allows access to all existing Fusion scripting functionality. - -Running DaVinci Resolve in headless mode ----------------------------------------- - -DaVinci Resolve can be launched in a headless mode without the user interface using the -nogui command line option. When DaVinci Resolve is launched using this option, the user interface is disabled. However, the various scripting APIs will continue to work as expected. - -Basic Resolve API ------------------ - -Some commonly used API functions are described below (*). As with the resolve object, each object is inspectable for properties and functions. - - -Resolve - Fusion() --> Fusion # Returns the Fusion object. Starting point for Fusion scripts. - GetMediaStorage() --> MediaStorage # Returns media storage object to query and act on media locations. - GetProjectManager() --> ProjectManager # Returns project manager object for currently open database. - OpenPage(pageName) --> None # Switches to indicated page in DaVinci Resolve. Input can be one of ("media", "edit", "fusion", "color", "fairlight", "deliver"). -ProjectManager - CreateProject(projectName) --> Project # Creates and returns a project if projectName (text) is unique, and None if it is not. - LoadProject(projectName) --> Project # Loads and returns the project with name = projectName (text) if there is a match found, and None if there is no matching Project. - GetCurrentProject() --> Project # Returns the currently loaded Resolve project. - SaveProject() --> Bool # Saves the currently loaded project with its own name. Returns True if successful. - CreateFolder(folderName) --> Bool # Creates a folder if folderName (text) is unique. - GetProjectsInCurrentFolder() --> [project names...] # Returns an array of project names in current folder. - GetFoldersInCurrentFolder() --> [folder names...] # Returns an array of folder names in current folder. - GotoRootFolder() --> Bool # Opens root folder in database. - GotoParentFolder() --> Bool # Opens parent folder of current folder in database if current folder has parent. - OpenFolder(folderName) --> Bool # Opens folder under given name. - ImportProject(filePath) --> Bool # Imports a project under given file path. Returns true in case of success. - ExportProject(projectName, filePath) --> Bool # Exports a project based on given name into provided file path. Returns true in case of success. - RestoreProject(filePath) --> Bool # Restores a project under given backup file path. Returns true in case of success. -Project - GetMediaPool() --> MediaPool # Returns the Media Pool object. - GetTimelineCount() --> int # Returns the number of timelines currently present in the project. - GetTimelineByIndex(idx) --> Timeline # Returns timeline at the given index, 1 <= idx <= project.GetTimelineCount() - GetCurrentTimeline() --> Timeline # Returns the currently loaded timeline. - SetCurrentTimeline(timeline) --> Bool # Sets given timeline as current timeline for the project. Returns True if successful. - GetName() --> string # Returns project name. - SetName(projectName) --> Bool # Sets project name if given projectname (text) is unique. - GetPresets() --> [presets...] # Returns a table of presets and their information. - SetPreset(presetName) --> Bool # Sets preset by given presetName (string) into project. - GetRenderJobs() --> [render jobs...] # Returns a table of render jobs and their information. - GetRenderPresets() --> [presets...] # Returns a table of render presets and their information. - StartRendering(index1, index2, ...) --> Bool # Starts rendering for given render jobs based on their indices. If no parameter is given rendering would start for all render jobs. - StartRendering([idxs...]) --> Bool # Starts rendering for given render jobs based on their indices. If no parameter is given rendering would start for all render jobs. - StopRendering() --> None # Stops rendering for all render jobs. - IsRenderingInProgress() --> Bool # Returns true is rendering is in progress. - AddRenderJob() --> Bool # Adds render job to render queue. - DeleteRenderJobByIndex(idx) --> Bool # Deletes render job based on given job index (int). - DeleteAllRenderJobs() --> Bool # Deletes all render jobs. - LoadRenderPreset(presetName) --> Bool # Sets a preset as current preset for rendering if presetName (text) exists. - SaveAsNewRenderPreset(presetName) --> Bool # Creates a new render preset by given name if presetName(text) is unique. - SetRenderSettings([settings map]) --> Bool # Sets given settings for rendering. Settings map is a map, keys of map are: "SelectAllFrames", "MarkIn", "MarkOut", "TargetDir", "CustomName". - GetRenderJobStatus(idx) --> [status info] # Returns job status and completion rendering percentage of the job by given job index (int). - GetSetting(settingName) --> string # Returns setting value by given settingName (string) if the setting exist. With empty settingName the function returns a full list of settings. - SetSetting(settingName, settingValue) --> Bool # Sets project setting base on given name (string) and value (string). - GetRenderFormats() --> [render formats...]# Returns a list of available render formats. - GetRenderCodecs(renderFormat) --> [render codecs...] # Returns a list of available codecs for given render format (string). - GetCurrentRenderFormatAndCodec() --> [format, codec] # Returns currently selected render format and render codec. - SetCurrentRenderFormatAndCodec(format, codec) --> Bool # Sets given render format (string) and render codec (string) as options for rendering. -MediaStorage - GetMountedVolumes() --> [paths...] # Returns an array of folder paths corresponding to mounted volumes displayed in Resolve’s Media Storage. - GetSubFolders(folderPath) --> [paths...] # Returns an array of folder paths in the given absolute folder path. - GetFiles(folderPath) --> [paths...] # Returns an array of media and file listings in the given absolute folder path. Note that media listings may be logically consolidated entries. - RevealInStorage(path) --> None # Expands and displays a given file/folder path in Resolve’s Media Storage. - AddItemsToMediaPool(item1, item2, ...) --> [clips...] # Adds specified file/folder paths from Media Store into current Media Pool folder. Input is one or more file/folder paths. - AddItemsToMediaPool([items...]) --> [clips...] # Adds specified file/folder paths from Media Store into current Media Pool folder. Input is an array of file/folder paths. -MediaPool - GetRootFolder() --> Folder # Returns the root Folder of Media Pool - AddSubFolder(folder, name) --> Folder # Adds a new subfolder under specified Folder object with the given name. - CreateEmptyTimeline(name) --> Timeline # Adds a new timeline with given name. - AppendToTimeline(clip1, clip2...) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - AppendToTimeline([clips]) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - CreateTimelineFromClips(name, clip1, clip2, ...)--> Timeline # Creates a new timeline with specified name, and appends the specified MediaPoolItem objects. - CreateTimelineFromClips(name, [clips]) --> Timeline # Creates a new timeline with specified name, and appends the specified MediaPoolItem objects. - ImportTimelineFromFile(filePath) --> Timeline # Creates timeline based on parameters within given file. - GetCurrentFolder() --> Folder # Returns currently selected Folder. - SetCurrentFolder(Folder) --> Bool # Sets current folder by given Folder. -Folder - GetClips() --> [clips...] # Returns a list of clips (items) within the folder. - GetName() --> string # Returns user-defined name of the folder. - GetSubFolders() --> [folders...] # Returns a list of subfolders in the folder. -MediaPoolItem - GetMetadata(metadataType) --> [[types],[values]] # Returns a value of metadataType. If parameter is not specified returns all set metadata parameters. - SetMetadata(metadataType, metadataValue) --> Bool # Sets metadata by given type and value. Returns True if successful. - GetMediaId() --> string # Returns a unique ID name related to MediaPoolItem. - AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information. - GetMarkers() --> [markers...] # Returns a list of all markers and their information. - AddFlag(color) --> Bool # Adds a flag with given color (text). - GetFlags() --> [colors...] # Returns a list of flag colors assigned to the item. - GetClipColor() --> string # Returns an item color as a string. - GetClipProperty(propertyName) --> [[types],[values]] # Returns property value related to the item based on given propertyName (string). if propertyName is empty then it returns a full list of properties. - SetClipProperty(propertyName, propertyValue) --> Bool # Sets into given propertyName (string) propertyValue (string). -Timeline - GetName() --> string # Returns user-defined name of the timeline. - SetName(timelineName) --> Bool # Sets timeline name is timelineName (text) is unique. - GetStartFrame() --> int # Returns frame number at the start of timeline. - GetEndFrame() --> int # Returns frame number at the end of timeline. - GetTrackCount(trackType) --> int # Returns a number of track based on specified track type ("audio", "video" or "subtitle"). - GetItemsInTrack(trackType, index) --> [items...] # Returns an array of Timeline items on the video or audio track (based on trackType) at specified index. 1 <= index <= GetTrackCount(trackType). - AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information. - GetMarkers() --> [markers...] # Returns a list of all markers and their information. - ApplyGradeFromDRX(path, gradeMode, item1, item2, ...)--> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". - ApplyGradeFromDRX(path, gradeMode, [items]) --> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". - GetCurrentTimecode() --> string # Returns a string representing a timecode for current position of the timeline, while on Cut, Edit, Color and Deliver page. - GetCurrentVideoItem() --> item # Returns current video timeline item. - GetCurrentClipThumbnailImage() --> [width, height, format, data] # Returns raw thumbnail image data (This image data is encoded in base 64 format and the image format is RGB 8 bit) for the current media in the Color Page in the format of dictionary (in Python) and table (in Lua). Information return are "width", "height", "format" and "data". Example is provided in 6_get_current_media_thumbnail.py in Example folder. -TimelineItem - GetName() --> string # Returns a name of the item. - GetDuration() --> int # Returns a duration of item. - GetEnd() --> int # Returns a position of end frame. - GetFusionCompCount() --> int # Returns the number of Fusion compositions associated with the timeline item. - GetFusionCompByIndex(compIndex) --> fusionComp # Returns Fusion composition object based on given index. 1 <= compIndex <= timelineItem.GetFusionCompCount() - GetFusionCompNames() --> [names...] # Returns a list of Fusion composition names associated with the timeline item. - GetFusionCompByName(compName) --> fusionComp # Returns Fusion composition object based on given name. - GetLeftOffset() --> int # Returns a maximum extension by frame for clip from left side. - GetRightOffset() --> int # Returns a maximum extension by frame for clip from right side. - GetStart() --> int # Returns a position of first frame. - AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information. - GetMarkers() --> [markers...] # Returns a list of all markers and their information. - GetFlags() --> [colors...] # Returns a list of flag colors assigned to the item. - GetClipColor() --> string # Returns an item color as a string. - AddFusionComp() --> fusionComp # Adds a new Fusion composition associated with the timeline item. - ImportFusionComp(path) --> fusionComp # Imports Fusion composition from given file path by creating and adding a new composition for the item. - ExportFusionComp(path, compIndex) --> Bool # Exports Fusion composition based on given index into provided file name path. - DeleteFusionCompByName(compName) --> Bool # Deletes Fusion composition by provided name. - LoadFusionCompByName(compName) --> fusionComp # Loads Fusion composition by provided name and sets it as active composition. - RenameFusionCompByName(oldName, newName) --> Bool # Renames Fusion composition by provided name with new given name. - AddVersion(versionName, versionType) --> Bool # Adds a new Version associated with the timeline item. versionType: 0 - local, 1 - remote. - DeleteVersionByName(versionName, versionType) --> Bool # Deletes Version by provided name. versionType: 0 - local, 1 - remote. - LoadVersionByName(versionName, versionType) --> Bool # Loads Version by provided name and sets it as active Version. versionType: 0 - local, 1 - remote. - RenameVersionByName(oldName, newName, versionType)--> Bool # Renames Version by provided name with new given name. versionType: 0 - local, 1 - remote. - GetMediaPoolItem() --> MediaPoolItem # Returns a corresponding to the timeline item media pool item if it exists. - GetVersionNames(versionType) --> [strings...] # Returns a list of version names by provided versionType: 0 - local, 1 - remote. - GetStereoConvergenceValues() --> [offset, value] # Returns a table of keyframe offsets and respective convergence values - GetStereoLeftFloatingWindowParams() --> [offset, value] # For the LEFT eye -> returns a table of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values - GetStereoRightFloatingWindowParams() --> [offset, value] # For the RIGHT eye -> returns a table of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values diff --git a/openpype/hosts/resolve/api/preload_console.py b/openpype/hosts/resolve/api/preload_console.py deleted file mode 100644 index 8b2b31fe1a..0000000000 --- a/openpype/hosts/resolve/api/preload_console.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python -import time -from openpype.hosts.resolve.api.utils import get_resolve_module -from openpype.lib import Logger - -log = Logger.get_logger(__name__) - -wait_delay = 2.5 -wait = 0.00 -ready = None -while True: - try: - # Create project and set parameters: - resolve = get_resolve_module() - pm = resolve.GetProjectManager() - if pm: - ready = None - else: - ready = True - except AttributeError: - pass - - if ready is None: - time.sleep(wait_delay) - log.info(f"Waiting {wait}s for Resolve to have opened Project Manager") - wait += wait_delay - else: - print(f"Preloaded variables: \n\n\tResolve module: " - f"`resolve` > {type(resolve)} \n\tProject manager: " - f"`pm` > {type(pm)}") - break diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index 1a36715437..0cf9664457 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -2,8 +2,6 @@ import os import platform from openpype.lib import PreLaunchHook from openpype.hosts.resolve.utils import setup -from openpype.hosts.resolve import api as rapi - class ResolvePrelaunch(PreLaunchHook): """ @@ -98,13 +96,6 @@ class ResolvePrelaunch(PreLaunchHook): self.launch_context.env["RESOLVE_UTILITY_SCRIPTS_DIR"] = ( RESOLVE_UTILITY_SCRIPTS_DIR) - # correctly format path for pre python script - rapi_path = os.path.dirname(rapi.__file__) - pre_py_sc = os.path.join( - rapi_path, "preload_console.py") - self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc - self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...") - # remove terminal coloring tags self.launch_context.env["OPENPYPE_LOG_NO_COLORS"] = "True" From e43cf43a105f9aed18a1f7664af62d1fe21c8658 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 15:10:54 +0200 Subject: [PATCH 1627/2550] resolve: updating resolve api readme and our readme --- openpype/hosts/resolve/README.markdown | 16 +- ...v16.2.0_up.txt => RESOLVE_API_v18.0.4.txt} | 332 ++++++++++++++---- 2 files changed, 274 insertions(+), 74 deletions(-) rename openpype/hosts/resolve/{RESOLVE_API_README_v16.2.0_up.txt => RESOLVE_API_v18.0.4.txt} (70%) diff --git a/openpype/hosts/resolve/README.markdown b/openpype/hosts/resolve/README.markdown index 8c9f72fb0c..38db8a8004 100644 --- a/openpype/hosts/resolve/README.markdown +++ b/openpype/hosts/resolve/README.markdown @@ -1,14 +1,16 @@ #### Basic setup -- Install [latest DaVinci Resolve](https://sw.blackmagicdesign.com/DaVinciResolve/v16.2.8/DaVinci_Resolve_Studio_16.2.8_Windows.zip?Key-Pair-Id=APKAJTKA3ZJMJRQITVEA&Signature=EcFuwQFKHZIBu2zDj5LTCQaQDXcKOjhZY7Fs07WGw24xdDqfwuALOyKu+EVzDX2Tik0cWDunYyV0r7hzp+mHmczp9XP4YaQXHdyhD/2BGWDgiMsiTQbNkBgbfy5MsAMFY8FHCl724Rxm8ke1foWeUVyt/Cdkil+ay+9sL72yFhaSV16sncko1jCIlCZeMkHhbzqPwyRuqLGmxmp8ey9KgBhI3wGFFPN201VMaV+RHrpX+KAfaR6p6dwo3FrPbRHK9TvMI1RA/1lJ3fVtrkDW69LImIKAWmIxgcStUxR9/taqLOD66FNiflHd1tufHv3FBa9iYQsjb3VLMPx7OCwLyg==&Expires=1608308139) -- add absolute path to ffmpeg into openpype settings - ![image](https://user-images.githubusercontent.com/40640033/102630786-43294f00-414d-11eb-98de-f0ae51f62077.png) -- install Python 3.6 into `%LOCALAPPDATA%/Programs/Python/Python36` (only respected path by Resolve) -- install OpenTimelineIO for 3.6 `%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move built files from `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and +- Actually supported version is up to v18 +- install Python 3.6.2 (latest tested v17) or up to 3.9.13 (latest tested on v18) +- pip install PySide2: + - Python 3.9.*: open terminal and go to python.exe directory, then `python -m pip install PySide2` +- pip install OpenTimelineIO: + - Python 3.9.*: open terminal and go to python.exe directory, then `python -m pip install OpenTimelineIO` + - Python 3.6: open terminal and go to python.exe directory, then `python -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move built files from `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `/Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and ![image](https://user-images.githubusercontent.com/40640033/102792588-ffcb1c80-43a8-11eb-9c6b-bf2114ed578e.png) with installed CMake in PATH. -- install PySide2 for 3.6 `%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install PySide2` - make sure Resolve Fusion (Fusion Tab/menu/Fusion/Fusion Settings) is set to Python 3.6 ![image](https://user-images.githubusercontent.com/40640033/102631545-280b0f00-414e-11eb-89fc-98ac268d209d.png) +- Open OpenPype **Tray/Admin/Studio settings** > `applications/resolve/environment` and add Python3 path to `RESOLVE_PYTHON3_HOME` platform related. #### Editorial setup @@ -16,7 +18,7 @@ This is how it looks on my testing project timeline ![image](https://user-images.githubusercontent.com/40640033/102637638-96ec6600-4156-11eb-9656-6e8e3ce4baf8.png) Notice I had renamed tracks to `main` (holding metadata markers) and `review` used for generating review data with ffmpeg confersion to jpg sequence. -1. you need to start OpenPype menu from Resolve/EditTab/Menu/Workspace/Scripts/**__OpenPype_Menu__** +1. you need to start OpenPype menu from Resolve/EditTab/Menu/Workspace/Scripts/Comp/**__OpenPype_Menu__** 2. then select any clips in `main` track and change their color to `Chocolate` 3. in OpenPype Menu select `Create` 4. in Creator select `Create Publishable Clip [New]` (temporary name) diff --git a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt b/openpype/hosts/resolve/RESOLVE_API_v18.0.4.txt similarity index 70% rename from openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt rename to openpype/hosts/resolve/RESOLVE_API_v18.0.4.txt index f1b8b81a71..98597a12cb 100644 --- a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt +++ b/openpype/hosts/resolve/RESOLVE_API_v18.0.4.txt @@ -1,5 +1,5 @@ -Updated as of 20 October 2020 ------------------------------ +Updated as of 9 May 2022 +---------------------------- In this package, you will find a brief introduction to the Scripting API for DaVinci Resolve Studio. Apart from this README.txt file, this package contains folders containing the basic import modules for scripting access (DaVinciResolve.py) and some representative examples. @@ -89,12 +89,25 @@ Resolve Fusion() --> Fusion # Returns the Fusion object. Starting point for Fusion scripts. GetMediaStorage() --> MediaStorage # Returns the media storage object to query and act on media locations. GetProjectManager() --> ProjectManager # Returns the project manager object for currently open database. - OpenPage(pageName) --> None # Switches to indicated page in DaVinci Resolve. Input can be one of ("media", "cut", "edit", "fusion", "color", "fairlight", "deliver"). + OpenPage(pageName) --> Bool # Switches to indicated page in DaVinci Resolve. Input can be one of ("media", "cut", "edit", "fusion", "color", "fairlight", "deliver"). + GetCurrentPage() --> String # Returns the page currently displayed in the main window. Returned value can be one of ("media", "cut", "edit", "fusion", "color", "fairlight", "deliver", None). GetProductName() --> string # Returns product name. GetVersion() --> [version fields] # Returns list of product version fields in [major, minor, patch, build, suffix] format. GetVersionString() --> string # Returns product version in "major.minor.patch[suffix].build" format. + LoadLayoutPreset(presetName) --> Bool # Loads UI layout from saved preset named 'presetName'. + UpdateLayoutPreset(presetName) --> Bool # Overwrites preset named 'presetName' with current UI layout. + ExportLayoutPreset(presetName, presetFilePath) --> Bool # Exports preset named 'presetName' to path 'presetFilePath'. + DeleteLayoutPreset(presetName) --> Bool # Deletes preset named 'presetName'. + SaveLayoutPreset(presetName) --> Bool # Saves current UI layout as a preset named 'presetName'. + ImportLayoutPreset(presetFilePath, presetName) --> Bool # Imports preset from path 'presetFilePath'. The optional argument 'presetName' specifies how the preset shall be named. If not specified, the preset is named based on the filename. + Quit() --> None # Quits the Resolve App. ProjectManager + ArchiveProject(projectName, + filePath, + isArchiveSrcMedia=True, + isArchiveRenderCache=True, + isArchiveProxyMedia=False) --> Bool # Archives project to provided file path with the configuration as provided by the optional arguments CreateProject(projectName) --> Project # Creates and returns a project if projectName (string) is unique, and None if it is not. DeleteProject(projectName) --> Bool # Delete project in the current folder if not currently loaded LoadProject(projectName) --> Project # Loads and returns the project with name = projectName (string) if there is a match found, and None if there is no matching Project. @@ -109,9 +122,9 @@ ProjectManager GotoParentFolder() --> Bool # Opens parent folder of current folder in database if current folder has parent. GetCurrentFolder() --> string # Returns the current folder name. OpenFolder(folderName) --> Bool # Opens folder under given name. - ImportProject(filePath) --> Bool # Imports a project from the file path provided. Returns True if successful. + ImportProject(filePath, projectName=None) --> Bool # Imports a project from the file path provided with given project name, if any. Returns True if successful. ExportProject(projectName, filePath, withStillsAndLUTs=True) --> Bool # Exports project to provided file path, including stills and LUTs if withStillsAndLUTs is True (enabled by default). Returns True in case of success. - RestoreProject(filePath) --> Bool # Restores a project from the file path provided. Returns True if successful. + RestoreProject(filePath, projectName=None) --> Bool # Restores a project from the file path provided with given project name, if any. Returns True if successful. GetCurrentDatabase() --> {dbInfo} # Returns a dictionary (with keys 'DbType', 'DbName' and optional 'IpAddress') corresponding to the current database connection GetDatabaseList() --> [{dbInfo}] # Returns a list of dictionary items (with keys 'DbType', 'DbName' and optional 'IpAddress') corresponding to all the databases added to Resolve SetCurrentDatabase({dbInfo}) --> Bool # Switches current database connection to the database specified by the keys below, and closes any open project. @@ -125,8 +138,9 @@ Project GetTimelineByIndex(idx) --> Timeline # Returns timeline at the given index, 1 <= idx <= project.GetTimelineCount() GetCurrentTimeline() --> Timeline # Returns the currently loaded timeline. SetCurrentTimeline(timeline) --> Bool # Sets given timeline as current timeline for the project. Returns True if successful. + GetGallery() --> Gallery # Returns the Gallery object. GetName() --> string # Returns project name. - SetName(projectName) --> Bool # Sets project name if given projectname (string) is unique. + SetName(projectName) --> Bool # Sets project name if given projectName (string) is unique. GetPresetList() --> [presets...] # Returns a list of presets and their information. SetPreset(presetName) --> Bool # Sets preset by given presetName (string) into project. AddRenderJob() --> string # Adds a render job based on current render settings to the render queue. Returns a unique job id (string) for the new render job. @@ -144,27 +158,7 @@ Project LoadRenderPreset(presetName) --> Bool # Sets a preset as current preset for rendering if presetName (string) exists. SaveAsNewRenderPreset(presetName) --> Bool # Creates new render preset by given name if presetName(string) is unique. SetRenderSettings({settings}) --> Bool # Sets given settings for rendering. Settings is a dict, with support for the keys: - # "SelectAllFrames": Bool - # "MarkIn": int - # "MarkOut": int - # "TargetDir": string - # "CustomName": string - # "UniqueFilenameStyle": 0 - Prefix, 1 - Suffix. - # "ExportVideo": Bool - # "ExportAudio": Bool - # "FormatWidth": int - # "FormatHeight": int - # "FrameRate": float (examples: 23.976, 24) - # "PixelAspectRatio": string (for SD resolution: "16_9" or "4_3") (other resolutions: "square" or "cinemascope") - # "VideoQuality" possible values for current codec (if applicable): - # 0 (int) - will set quality to automatic - # [1 -> MAX] (int) - will set input bit rate - # ["Least", "Low", "Medium", "High", "Best"] (String) - will set input quality level - # "AudioCodec": string (example: "aac") - # "AudioBitDepth": int - # "AudioSampleRate": int - # "ColorSpaceTag" : string (example: "Same as Project", "AstroDesign") - # "GammaTag" : string (example: "Same as Project", "ACEScct") + # Refer to "Looking up render settings" section for information for supported settings GetRenderJobStatus(jobId) --> {status info} # Returns a dict with job status and completion percentage of the job by given jobId (string). GetSetting(settingName) --> string # Returns value of project setting (indicated by settingName, string). Check the section below for more information. SetSetting(settingName, settingValue) --> Bool # Sets the project setting (indicated by settingName, string) to the value (settingValue, string). Check the section below for more information. @@ -176,12 +170,13 @@ Project SetCurrentRenderMode(renderMode) --> Bool # Sets the render mode. Specify renderMode = 0 for Individual clips, 1 for Single clip. GetRenderResolutions(format, codec) --> [{Resolution}] # Returns list of resolutions applicable for the given render format (string) and render codec (string). Returns full list of resolutions if no argument is provided. Each element in the list is a dictionary with 2 keys "Width" and "Height". RefreshLUTList() --> Bool # Refreshes LUT List + GetUniqueId() --> string # Returns a unique ID for the project item MediaStorage GetMountedVolumeList() --> [paths...] # Returns list of folder paths corresponding to mounted volumes displayed in Resolve’s Media Storage. GetSubFolderList(folderPath) --> [paths...] # Returns list of folder paths in the given absolute folder path. GetFileList(folderPath) --> [paths...] # Returns list of media and file listings in the given absolute folder path. Note that media listings may be logically consolidated entries. - RevealInStorage(path) --> None # Expands and displays given file/folder path in Resolve’s Media Storage. + RevealInStorage(path) --> Bool # Expands and displays given file/folder path in Resolve’s Media Storage. AddItemListToMediaPool(item1, item2, ...) --> [clips...] # Adds specified file/folder paths from Media Storage into current Media Pool folder. Input is one or more file/folder paths. Returns a list of the MediaPoolItems created. AddItemListToMediaPool([items...]) --> [clips...] # Adds specified file/folder paths from Media Storage into current Media Pool folder. Input is an array of file/folder paths. Returns a list of the MediaPoolItems created. AddClipMattesToMediaPool(MediaPoolItem, [paths], stereoEye) --> Bool # Adds specified media files as mattes for the specified MediaPoolItem. StereoEye is an optional argument for specifying which eye to add the matte to for stereo clips ("left" or "right"). Returns True if successful. @@ -190,10 +185,11 @@ MediaStorage MediaPool GetRootFolder() --> Folder # Returns root Folder of Media Pool AddSubFolder(folder, name) --> Folder # Adds new subfolder under specified Folder object with the given name. + RefreshFolders() --> Bool # Updates the folders in collaboration mode CreateEmptyTimeline(name) --> Timeline # Adds new timeline with given name. - AppendToTimeline(clip1, clip2, ...) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - AppendToTimeline([clips]) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful. - AppendToTimeline([{clipInfo}, ...]) --> Bool # Appends list of clipInfos specified as dict of "mediaPoolItem", "startFrame" (int), "endFrame" (int). + AppendToTimeline(clip1, clip2, ...) --> [TimelineItem] # Appends specified MediaPoolItem objects in the current timeline. Returns the list of appended timelineItems. + AppendToTimeline([clips]) --> [TimelineItem] # Appends specified MediaPoolItem objects in the current timeline. Returns the list of appended timelineItems. + AppendToTimeline([{clipInfo}, ...]) --> [TimelineItem] # Appends list of clipInfos specified as dict of "mediaPoolItem", "startFrame" (int), "endFrame" (int), (optional) "mediaType" (int; 1 - Video only, 2 - Audio only). Returns the list of appended timelineItems. CreateTimelineFromClips(name, clip1, clip2,...) --> Timeline # Creates new timeline with specified name, and appends the specified MediaPoolItem objects. CreateTimelineFromClips(name, [clips]) --> Timeline # Creates new timeline with specified name, and appends the specified MediaPoolItem objects. CreateTimelineFromClips(name, [{clipInfo}]) --> Timeline # Creates new timeline with specified name, appending the list of clipInfos specified as a dict of "mediaPoolItem", "startFrame" (int), "endFrame" (int). @@ -202,6 +198,8 @@ MediaPool # "importSourceClips": Bool, specifies whether source clips should be imported, True by default # "sourceClipsPath": string, specifies a filesystem path to search for source clips if the media is inaccessible in their original path and if "importSourceClips" is True # "sourceClipsFolders": List of Media Pool folder objects to search for source clips if the media is not present in current folder and if "importSourceClips" is False + # "interlaceProcessing": Bool, specifies whether to enable interlace processing on the imported timeline being created. valid only for AAF import + DeleteTimelines([timeline]) --> Bool # Deletes specified timelines in the media pool. GetCurrentFolder() --> Folder # Returns currently selected Folder. SetCurrentFolder(Folder) --> Bool # Sets current folder by given Folder. DeleteClips([clips]) --> Bool # Deletes specified clips or timeline mattes in the media pool @@ -214,19 +212,26 @@ MediaPool RelinkClips([MediaPoolItem], folderPath) --> Bool # Update the folder location of specified media pool clips with the specified folder path. UnlinkClips([MediaPoolItem]) --> Bool # Unlink specified media pool clips. ImportMedia([items...]) --> [MediaPoolItems] # Imports specified file/folder paths into current Media Pool folder. Input is an array of file/folder paths. Returns a list of the MediaPoolItems created. + ImportMedia([{clipInfo}]) --> [MediaPoolItems] # Imports file path(s) into current Media Pool folder as specified in list of clipInfo dict. Returns a list of the MediaPoolItems created. + # Each clipInfo gets imported as one MediaPoolItem unless 'Show Individual Frames' is turned on. + # Example: ImportMedia([{"FilePath":"file_%03d.dpx", "StartIndex":1, "EndIndex":100}]) would import clip "file_[001-100].dpx". ExportMetadata(fileName, [clips]) --> Bool # Exports metadata of specified clips to 'fileName' in CSV format. # If no clips are specified, all clips from media pool will be used. + GetUniqueId() --> string # Returns a unique ID for the media pool Folder GetClipList() --> [clips...] # Returns a list of clips (items) within the folder. GetName() --> string # Returns the media folder name. GetSubFolderList() --> [folders...] # Returns a list of subfolders in the folder. + GetIsFolderStale() --> bool # Returns true if folder is stale in collaboration mode, false otherwise + GetUniqueId() --> string # Returns a unique ID for the media pool folder MediaPoolItem GetName() --> string # Returns the clip name. GetMetadata(metadataType=None) --> string|dict # Returns the metadata value for the key 'metadataType'. # If no argument is specified, a dict of all set metadata properties is returned. SetMetadata(metadataType, metadataValue) --> Bool # Sets the given metadata to metadataValue (string). Returns True if successful. + SetMetadata({metadata}) --> Bool # Sets the item metadata with specified 'metadata' dict. Returns True if successful. GetMediaId() --> string # Returns the unique ID for the MediaPoolItem. AddMarker(frameId, color, name, note, duration, --> Bool # Creates a new marker at given frameId position and with given marker information. 'customData' is optional and helps to attach user specific data to the marker. customData) @@ -248,15 +253,18 @@ MediaPoolItem GetClipProperty(propertyName=None) --> string|dict # Returns the property value for the key 'propertyName'. # If no argument is specified, a dict of all clip properties is returned. Check the section below for more information. SetClipProperty(propertyName, propertyValue) --> Bool # Sets the given property to propertyValue (string). Check the section below for more information. - LinkProxyMedia(propertyName) --> Bool # Links proxy media (absolute path) with the current clip. + LinkProxyMedia(proxyMediaFilePath) --> Bool # Links proxy media located at path specified by arg 'proxyMediaFilePath' with the current clip. 'proxyMediaFilePath' should be absolute clip path. UnlinkProxyMedia() --> Bool # Unlinks any proxy media associated with clip. ReplaceClip(filePath) --> Bool # Replaces the underlying asset and metadata of MediaPoolItem with the specified absolute clip path. + GetUniqueId() --> string # Returns a unique ID for the media pool item Timeline GetName() --> string # Returns the timeline name. SetName(timelineName) --> Bool # Sets the timeline name if timelineName (string) is unique. Returns True if successful. GetStartFrame() --> int # Returns the frame number at the start of timeline. GetEndFrame() --> int # Returns the frame number at the end of timeline. + SetStartTimecode(timecode) --> Bool # Set the start timecode of the timeline to the string 'timecode'. Returns true when the change is successful, false otherwise. + GetStartTimecode() --> string # Returns the start timecode for the timeline. GetTrackCount(trackType) --> int # Returns the number of tracks for the given track type ("audio", "video" or "subtitle"). GetItemListInTrack(trackType, index) --> [items...] # Returns a list of timeline items on that track (based on trackType and index). 1 <= index <= GetTrackCount(trackType). AddMarker(frameId, color, name, note, duration, --> Bool # Creates a new marker at given frameId position and with given marker information. 'customData' is optional and helps to attach user specific data to the marker. @@ -271,7 +279,8 @@ Timeline DeleteMarkerByCustomData(customData) --> Bool # Delete first matching marker with specified customData. ApplyGradeFromDRX(path, gradeMode, item1, item2, ...)--> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". ApplyGradeFromDRX(path, gradeMode, [items]) --> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned". - GetCurrentTimecode() --> string # Returns a string timecode representation for the current playhead position, while on Cut, Edit, Color and Deliver pages. + GetCurrentTimecode() --> string # Returns a string timecode representation for the current playhead position, while on Cut, Edit, Color, Fairlight and Deliver pages. + SetCurrentTimecode(timecode) --> Bool # Sets current playhead position from input timecode for Cut, Edit, Color, Fairlight and Deliver pages. GetCurrentVideoItem() --> item # Returns the current video timeline item. GetCurrentClipThumbnailImage() --> {thumbnailData} # Returns a dict (keys "width", "height", "format" and "data") with data containing raw thumbnail image data (RGB 8-bit image data encoded in base64 format) for current media in the Color Page. # An example of how to retrieve and interpret thumbnails is provided in 6_get_current_media_thumbnail.py in the Examples folder. @@ -280,37 +289,30 @@ Timeline DuplicateTimeline(timelineName) --> timeline # Duplicates the timeline and returns the created timeline, with the (optional) timelineName, on success. CreateCompoundClip([timelineItems], {clipInfo}) --> timelineItem # Creates a compound clip of input timeline items with an optional clipInfo map: {"startTimecode" : "00:00:00:00", "name" : "Compound Clip 1"}. It returns the created timeline item. CreateFusionClip([timelineItems]) --> timelineItem # Creates a Fusion clip of input timeline items. It returns the created timeline item. + ImportIntoTimeline(filePath, {importOptions}) --> Bool # Imports timeline items from an AAF file and optional importOptions dict into the timeline, with support for the keys: + # "autoImportSourceClipsIntoMediaPool": Bool, specifies if source clips should be imported into media pool, True by default + # "ignoreFileExtensionsWhenMatching": Bool, specifies if file extensions should be ignored when matching, False by default + # "linkToSourceCameraFiles": Bool, specifies if link to source camera files should be enabled, False by default + # "useSizingInfo": Bool, specifies if sizing information should be used, False by default + # "importMultiChannelAudioTracksAsLinkedGroups": Bool, specifies if multi-channel audio tracks should be imported as linked groups, False by default + # "insertAdditionalTracks": Bool, specifies if additional tracks should be inserted, True by default + # "insertWithOffset": string, specifies insert with offset value in timecode format - defaults to "00:00:00:00", applicable if "insertAdditionalTracks" is False + # "sourceClipsPath": string, specifies a filesystem path to search for source clips if the media is inaccessible in their original path and if "ignoreFileExtensionsWhenMatching" is True + # "sourceClipsFolders": string, list of Media Pool folder objects to search for source clips if the media is not present in current folder + Export(fileName, exportType, exportSubtype) --> Bool # Exports timeline to 'fileName' as per input exportType & exportSubtype format. - # exportType can be one of the following constants: - # resolve.EXPORT_AAF - # resolve.EXPORT_DRT - # resolve.EXPORT_EDL - # resolve.EXPORT_FCP_7_XML - # resolve.EXPORT_FCPXML_1_3 - # resolve.EXPORT_FCPXML_1_4 - # resolve.EXPORT_FCPXML_1_5 - # resolve.EXPORT_FCPXML_1_6 - # resolve.EXPORT_FCPXML_1_7 - # resolve.EXPORT_FCPXML_1_8 - # resolve.EXPORT_HDR_10_PROFILE_A - # resolve.EXPORT_HDR_10_PROFILE_B - # resolve.EXPORT_TEXT_CSV - # resolve.EXPORT_TEXT_TAB - # resolve.EXPORT_DOLBY_VISION_VER_2_9 - # resolve.EXPORT_DOLBY_VISION_VER_4_0 - # exportSubtype can be one of the following enums: - # resolve.EXPORT_NONE - # resolve.EXPORT_AAF_NEW - # resolve.EXPORT_AAF_EXISTING - # resolve.EXPORT_CDL - # resolve.EXPORT_SDL - # resolve.EXPORT_MISSING_CLIPS - # Please note that exportSubType is a required parameter for resolve.EXPORT_AAF and resolve.EXPORT_EDL. For rest of the exportType, exportSubtype is ignored. - # When exportType is resolve.EXPORT_AAF, valid exportSubtype values are resolve.EXPORT_AAF_NEW and resolve.EXPORT_AAF_EXISTING. - # When exportType is resolve.EXPORT_EDL, valid exportSubtype values are resolve.EXPORT_CDL, resolve.EXPORT_SDL, resolve.EXPORT_MISSING_CLIPS and resolve.EXPORT_NONE. - # Note: Replace 'resolve.' when using the constants above, if a different Resolve class instance name is used. + # Refer to section "Looking up timeline exports properties" for information on the parameters. GetSetting(settingName) --> string # Returns value of timeline setting (indicated by settingName : string). Check the section below for more information. SetSetting(settingName, settingValue) --> Bool # Sets timeline setting (indicated by settingName : string) to the value (settingValue : string). Check the section below for more information. + InsertGeneratorIntoTimeline(generatorName) --> TimelineItem # Inserts a generator (indicated by generatorName : string) into the timeline. + InsertFusionGeneratorIntoTimeline(generatorName) --> TimelineItem # Inserts a Fusion generator (indicated by generatorName : string) into the timeline. + InsertFusionCompositionIntoTimeline() --> TimelineItem # Inserts a Fusion composition into the timeline. + InsertOFXGeneratorIntoTimeline(generatorName) --> TimelineItem # Inserts an OFX generator (indicated by generatorName : string) into the timeline. + InsertTitleIntoTimeline(titleName) --> TimelineItem # Inserts a title (indicated by titleName : string) into the timeline. + InsertFusionTitleIntoTimeline(titleName) --> TimelineItem # Inserts a Fusion title (indicated by titleName : string) into the timeline. + GrabStill() --> galleryStill # Grabs still from the current video clip. Returns a GalleryStill object. + GrabAllStills(stillFrameSource) --> [galleryStill] # Grabs stills from all the clips of the timeline at 'stillFrameSource' (1 - First frame, 2 - Middle frame). Returns the list of GalleryStill objects. + GetUniqueId() --> string # Returns a unique ID for the timeline TimelineItem GetName() --> string # Returns the item name. @@ -323,6 +325,10 @@ TimelineItem GetLeftOffset() --> int # Returns the maximum extension by frame for clip from left side. GetRightOffset() --> int # Returns the maximum extension by frame for clip from right side. GetStart() --> int # Returns the start frame position on the timeline. + SetProperty(propertyKey, propertyValue) --> Bool # Sets the value of property "propertyKey" to value "propertyValue" + # Refer to "Looking up Timeline item properties" for more information + GetProperty(propertyKey) --> int/[key:value] # returns the value of the specified key + # if no key is specified, the method returns a dictionary(python) or table(lua) for all supported keys AddMarker(frameId, color, name, note, duration, --> Bool # Creates a new marker at given frameId position and with given marker information. 'customData' is optional and helps to attach user specific data to the marker. customData) GetMarkers() --> {markers...} # Returns a dict (frameId -> {information}) of all markers and dicts with their information. @@ -345,7 +351,8 @@ TimelineItem DeleteFusionCompByName(compName) --> Bool # Deletes the named Fusion composition. LoadFusionCompByName(compName) --> fusionComp # Loads the named Fusion composition as the active composition. RenameFusionCompByName(oldName, newName) --> Bool # Renames the Fusion composition identified by oldName. - AddVersion(versionName, versionType) --> Bool # Adds a new color version for a video clipbased on versionType (0 - local, 1 - remote). + AddVersion(versionName, versionType) --> Bool # Adds a new color version for a video clip based on versionType (0 - local, 1 - remote). + GetCurrentVersion() --> {versionName...} # Returns the current version of the video clip. The returned value will have the keys versionName and versionType(0 - local, 1 - remote). DeleteVersionByName(versionName, versionType) --> Bool # Deletes a color version by name and versionType (0 - local, 1 - remote). LoadVersionByName(versionName, versionType) --> Bool # Loads a named color version as the active version. versionType: 0 - local, 1 - remote. RenameVersionByName(oldName, newName, versionType)--> Bool # Renames the color version identified by oldName and versionType (0 - local, 1 - remote). @@ -354,12 +361,14 @@ TimelineItem GetStereoConvergenceValues() --> {keyframes...} # Returns a dict (offset -> value) of keyframe offsets and respective convergence values. GetStereoLeftFloatingWindowParams() --> {keyframes...} # For the LEFT eye -> returns a dict (offset -> dict) of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values. GetStereoRightFloatingWindowParams() --> {keyframes...} # For the RIGHT eye -> returns a dict (offset -> dict) of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values. + GetNumNodes() --> int # Returns the number of nodes in the current graph for the timeline item SetLUT(nodeIndex, lutPath) --> Bool # Sets LUT on the node mapping the node index provided, 1 <= nodeIndex <= total number of nodes. # The lutPath can be an absolute path, or a relative path (based off custom LUT paths or the master LUT path). # The operation is successful for valid lut paths that Resolve has already discovered (see Project.RefreshLUTList). + GetLUT(nodeIndex) --> String # Gets relative LUT path based on the node index provided, 1 <= nodeIndex <= total number of nodes. SetCDL([CDL map]) --> Bool # Keys of map are: "NodeIndex", "Slope", "Offset", "Power", "Saturation", where 1 <= NodeIndex <= total number of nodes. # Example python code - SetCDL({"NodeIndex" : "1", "Slope" : "0.5 0.4 0.2", "Offset" : "0.4 0.3 0.2", "Power" : "0.6 0.7 0.8", "Saturation" : "0.65"}) - AddTake(mediaPoolItem, startFrame=0, endFrame)=0 --> Bool # Adds mediaPoolItem as a new take. Initializes a take selector for the timeline item if needed. By default, the whole clip is added. startFrame and endFrame can be specified as extents. + AddTake(mediaPoolItem, startFrame, endFrame) --> Bool # Adds mediaPoolItem as a new take. Initializes a take selector for the timeline item if needed. By default, the full clip extents is added. startFrame (int) and endFrame (int) are optional arguments used to specify the extents. GetSelectedTakeIndex() --> int # Returns the index of the currently selected take, or 0 if the clip is not a take selector. GetTakesCount() --> int # Returns the number of takes in take selector, or 0 if the clip is not a take selector. GetTakeByIndex(idx) --> {takeInfo...} # Returns a dict (keys "startFrame", "endFrame" and "mediaPoolItem") with take info for specified index. @@ -367,7 +376,24 @@ TimelineItem SelectTakeByIndex(idx) --> Bool # Selects a take by index, 1 <= idx <= number of takes. FinalizeTake() --> Bool # Finalizes take selection. CopyGrades([tgtTimelineItems]) --> Bool # Copies the current grade to all the items in tgtTimelineItems list. Returns True on success and False if any error occurred. + UpdateSidecar() --> Bool # Updates sidecar file for BRAW clips or RMD file for R3D clips. + GetUniqueId() --> string # Returns a unique ID for the timeline item +Gallery + GetAlbumName(galleryStillAlbum) --> string # Returns the name of the GalleryStillAlbum object 'galleryStillAlbum'. + SetAlbumName(galleryStillAlbum, albumName) --> Bool # Sets the name of the GalleryStillAlbum object 'galleryStillAlbum' to 'albumName'. + GetCurrentStillAlbum() --> galleryStillAlbum # Returns current album as a GalleryStillAlbum object. + SetCurrentStillAlbum(galleryStillAlbum) --> Bool # Sets current album to GalleryStillAlbum object 'galleryStillAlbum'. + GetGalleryStillAlbums() --> [galleryStillAlbum] # Returns the gallery albums as a list of GalleryStillAlbum objects. + +GalleryStillAlbum + GetStills() --> [galleryStill] # Returns the list of GalleryStill objects in the album. + GetLabel(galleryStill) --> string # Returns the label of the galleryStill. + SetLabel(galleryStill, label) --> Bool # Sets the new 'label' to GalleryStill object 'galleryStill'. + ExportStills([galleryStill], folderPath, filePrefix, format) --> Bool # Exports list of GalleryStill objects '[galleryStill]' to directory 'folderPath', with filename prefix 'filePrefix', using file format 'format' (supported formats: dpx, cin, tif, jpg, png, ppm, bmp, xpm). + DeleteStills([galleryStill]) --> Bool # Deletes specified list of GalleryStill objects '[galleryStill]'. + +GalleryStill # This class does not provide any API functions but the object type is used by functions in other classes. List and Dict Data Structures ----------------------------- @@ -375,7 +401,6 @@ Beside primitive data types, Resolve's Python API mainly uses list and dict data As Lua does not support list and dict data structures, the Lua API implements "list" as a table with indices, e.g. { [1] = listValue1, [2] = listValue2, ... }. Similarly the Lua API implements "dict" as a table with the dictionary key as first element, e.g. { [dictKey1] = dictValue1, [dictKey2] = dictValue2, ... }. - Looking up Project and Clip properties -------------------------------------- This section covers additional notes for the functions "Project:GetSetting", "Project:SetSetting", "Timeline:GetSetting", "Timeline:SetSetting", "MediaPoolItem:GetClipProperty" and @@ -412,6 +437,179 @@ Affects: • x = MediaPoolItem:GetClipProperty('Super Scale') and MediaPoolItem:SetClipProperty('Super Scale', x) +Looking up Render Settings +-------------------------- +This section covers the supported settings for the method SetRenderSettings({settings}) + +The parameter setting is a dictionary containing the following keys: + - "SelectAllFrames": Bool (when set True, the settings MarkIn and MarkOut are ignored) + - "MarkIn": int + - "MarkOut": int + - "TargetDir": string + - "CustomName": string + - "UniqueFilenameStyle": 0 - Prefix, 1 - Suffix. + - "ExportVideo": Bool + - "ExportAudio": Bool + - "FormatWidth": int + - "FormatHeight": int + - "FrameRate": float (examples: 23.976, 24) + - "PixelAspectRatio": string (for SD resolution: "16_9" or "4_3") (other resolutions: "square" or "cinemascope") + - "VideoQuality" possible values for current codec (if applicable): + - 0 (int) - will set quality to automatic + - [1 -> MAX] (int) - will set input bit rate + - ["Least", "Low", "Medium", "High", "Best"] (String) - will set input quality level + - "AudioCodec": string (example: "aac") + - "AudioBitDepth": int + - "AudioSampleRate": int + - "ColorSpaceTag" : string (example: "Same as Project", "AstroDesign") + - "GammaTag" : string (example: "Same as Project", "ACEScct") + - "ExportAlpha": Bool + - "EncodingProfile": string (example: "Main10"). Can only be set for H.264 and H.265. + - "MultiPassEncode": Bool. Can only be set for H.264. + - "AlphaMode": 0 - Premultiplied, 1 - Straight. Can only be set if "ExportAlpha" is true. + - "NetworkOptimization": Bool. Only supported by QuickTime and MP4 formats. + +Looking up timeline export properties +------------------------------------- +This section covers the parameters for the argument Export(fileName, exportType, exportSubtype). + +exportType can be one of the following constants: + - resolve.EXPORT_AAF + - resolve.EXPORT_DRT + - resolve.EXPORT_EDL + - resolve.EXPORT_FCP_7_XML + - resolve.EXPORT_FCPXML_1_3 + - resolve.EXPORT_FCPXML_1_4 + - resolve.EXPORT_FCPXML_1_5 + - resolve.EXPORT_FCPXML_1_6 + - resolve.EXPORT_FCPXML_1_7 + - resolve.EXPORT_FCPXML_1_8 + - resolve.EXPORT_FCPXML_1_9 + - resolve.EXPORT_FCPXML_1_10 + - resolve.EXPORT_HDR_10_PROFILE_A + - resolve.EXPORT_HDR_10_PROFILE_B + - resolve.EXPORT_TEXT_CSV + - resolve.EXPORT_TEXT_TAB + - resolve.EXPORT_DOLBY_VISION_VER_2_9 + - resolve.EXPORT_DOLBY_VISION_VER_4_0 +exportSubtype can be one of the following enums: + - resolve.EXPORT_NONE + - resolve.EXPORT_AAF_NEW + - resolve.EXPORT_AAF_EXISTING + - resolve.EXPORT_CDL + - resolve.EXPORT_SDL + - resolve.EXPORT_MISSING_CLIPS +Please note that exportSubType is a required parameter for resolve.EXPORT_AAF and resolve.EXPORT_EDL. For rest of the exportType, exportSubtype is ignored. +When exportType is resolve.EXPORT_AAF, valid exportSubtype values are resolve.EXPORT_AAF_NEW and resolve.EXPORT_AAF_EXISTING. +When exportType is resolve.EXPORT_EDL, valid exportSubtype values are resolve.EXPORT_CDL, resolve.EXPORT_SDL, resolve.EXPORT_MISSING_CLIPS and resolve.EXPORT_NONE. +Note: Replace 'resolve.' when using the constants above, if a different Resolve class instance name is used. + +Looking up Timeline item properties +----------------------------------- +This section covers additional notes for the function "TimelineItem:SetProperty" and "TimelineItem:GetProperty". These functions are used to get and set properties mentioned. + +The supported keys with their accepted values are: + "Pan" : floating point values from -4.0*width to 4.0*width + "Tilt" : floating point values from -4.0*height to 4.0*height + "ZoomX" : floating point values from 0.0 to 100.0 + "ZoomY" : floating point values from 0.0 to 100.0 + "ZoomGang" : a boolean value + "RotationAngle" : floating point values from -360.0 to 360.0 + "AnchorPointX" : floating point values from -4.0*width to 4.0*width + "AnchorPointY" : floating point values from -4.0*height to 4.0*height + "Pitch" : floating point values from -1.5 to 1.5 + "Yaw" : floating point values from -1.5 to 1.5 + "FlipX" : boolean value for flipping horizontally + "FlipY" : boolean value for flipping vertically + "CropLeft" : floating point values from 0.0 to width + "CropRight" : floating point values from 0.0 to width + "CropTop" : floating point values from 0.0 to height + "CropBottom" : floating point values from 0.0 to height + "CropSoftness" : floating point values from -100.0 to 100.0 + "CropRetain" : boolean value for "Retain Image Position" checkbox + "DynamicZoomEase" : A value from the following constants + - DYNAMIC_ZOOM_EASE_LINEAR = 0 + - DYNAMIC_ZOOM_EASE_IN + - DYNAMIC_ZOOM_EASE_OUT + - DYNAMIC_ZOOM_EASE_IN_AND_OUT + "CompositeMode" : A value from the following constants + - COMPOSITE_NORMAL = 0 + - COMPOSITE_ADD + - COMPOSITE_SUBTRACT + - COMPOSITE_DIFF + - COMPOSITE_MULTIPLY + - COMPOSITE_SCREEN + - COMPOSITE_OVERLAY + - COMPOSITE_HARDLIGHT + - COMPOSITE_SOFTLIGHT + - COMPOSITE_DARKEN + - COMPOSITE_LIGHTEN + - COMPOSITE_COLOR_DODGE + - COMPOSITE_COLOR_BURN + - COMPOSITE_EXCLUSION + - COMPOSITE_HUE + - COMPOSITE_SATURATE + - COMPOSITE_COLORIZE + - COMPOSITE_LUMA_MASK + - COMPOSITE_DIVIDE + - COMPOSITE_LINEAR_DODGE + - COMPOSITE_LINEAR_BURN + - COMPOSITE_LINEAR_LIGHT + - COMPOSITE_VIVID_LIGHT + - COMPOSITE_PIN_LIGHT + - COMPOSITE_HARD_MIX + - COMPOSITE_LIGHTER_COLOR + - COMPOSITE_DARKER_COLOR + - COMPOSITE_FOREGROUND + - COMPOSITE_ALPHA + - COMPOSITE_INVERTED_ALPHA + - COMPOSITE_LUM + - COMPOSITE_INVERTED_LUM + "Opacity" : floating point value from 0.0 to 100.0 + "Distortion" : floating point value from -1.0 to 1.0 + "RetimeProcess" : A value from the following constants + - RETIME_USE_PROJECT = 0 + - RETIME_NEAREST + - RETIME_FRAME_BLEND + - RETIME_OPTICAL_FLOW + "MotionEstimation" : A value from the following constants + - MOTION_EST_USE_PROJECT = 0 + - MOTION_EST_STANDARD_FASTER + - MOTION_EST_STANDARD_BETTER + - MOTION_EST_ENHANCED_FASTER + - MOTION_EST_ENHANCED_BETTER + - MOTION_EST_SPEED_WRAP + "Scaling" : A value from the following constants + - SCALE_USE_PROJECT = 0 + - SCALE_CROP + - SCALE_FIT + - SCALE_FILL + - SCALE_STRETCH + "ResizeFilter" : A value from the following constants + - RESIZE_FILTER_USE_PROJECT = 0 + - RESIZE_FILTER_SHARPER + - RESIZE_FILTER_SMOOTHER + - RESIZE_FILTER_BICUBIC + - RESIZE_FILTER_BILINEAR + - RESIZE_FILTER_BESSEL + - RESIZE_FILTER_BOX + - RESIZE_FILTER_CATMULL_ROM + - RESIZE_FILTER_CUBIC + - RESIZE_FILTER_GAUSSIAN + - RESIZE_FILTER_LANCZOS + - RESIZE_FILTER_MITCHELL + - RESIZE_FILTER_NEAREST_NEIGHBOR + - RESIZE_FILTER_QUADRATIC + - RESIZE_FILTER_SINC + - RESIZE_FILTER_LINEAR +Values beyond the range will be clipped +width and height are same as the UI max limits + +The arguments can be passed as a key and value pair or they can be grouped together into a dictionary (for python) or table (for lua) and passed +as a single argument. + +Getting the values for the keys that uses constants will return the number which is in the constant + Deprecated Resolve API Functions -------------------------------- The following API functions are deprecated. @@ -450,12 +648,12 @@ TimelineItem Unsupported Resolve API Functions --------------------------------- -The following API (functions and paraameters) are no longer supported. +The following API (functions and parameters) are no longer supported. Use job IDs instead of indices. Project StartRendering(index1, index2, ...) --> Bool # Please use unique job ids (string) instead of indices. StartRendering([idxs...]) --> Bool # Please use unique job ids (string) instead of indices. DeleteRenderJobByIndex(idx) --> Bool # Please use unique job ids (string) instead of indices. GetRenderJobStatus(idx) --> {status info} # Please use unique job ids (string) instead of indices. - GetSetting and SetSetting --> {} # settingName "videoMonitorUseRec601For422SDI" is no longer supported. - # Please use "videoMonitorUseMatrixOverrideFor422SDI" and "videoMonitorMatrixOverrideFor422SDI" instead. + GetSetting and SetSetting --> {} # settingName videoMonitorUseRec601For422SDI is now replaced with videoMonitorUseMatrixOverrideFor422SDI and videoMonitorMatrixOverrideFor422SDI. + # settingName perfProxyMediaOn is now replaced with perfProxyMediaMode which takes values 0 - disabled, 1 - when available, 2 - when source not available. From ca71cbc4d03fd289fecd2f4355c79138520da49b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 15:13:26 +0200 Subject: [PATCH 1628/2550] resolve: readme update --- openpype/hosts/resolve/README.markdown | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/resolve/README.markdown b/openpype/hosts/resolve/README.markdown index 38db8a8004..a8bb071e7e 100644 --- a/openpype/hosts/resolve/README.markdown +++ b/openpype/hosts/resolve/README.markdown @@ -1,4 +1,4 @@ -#### Basic setup +## Basic setup - Actually supported version is up to v18 - install Python 3.6.2 (latest tested v17) or up to 3.9.13 (latest tested on v18) @@ -6,13 +6,13 @@ - Python 3.9.*: open terminal and go to python.exe directory, then `python -m pip install PySide2` - pip install OpenTimelineIO: - Python 3.9.*: open terminal and go to python.exe directory, then `python -m pip install OpenTimelineIO` - - Python 3.6: open terminal and go to python.exe directory, then `python -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move built files from `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `/Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and + - Python 3.6: open terminal and go to python.exe directory, then `python -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move built files from `./Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `./Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and ![image](https://user-images.githubusercontent.com/40640033/102792588-ffcb1c80-43a8-11eb-9c6b-bf2114ed578e.png) with installed CMake in PATH. - make sure Resolve Fusion (Fusion Tab/menu/Fusion/Fusion Settings) is set to Python 3.6 ![image](https://user-images.githubusercontent.com/40640033/102631545-280b0f00-414e-11eb-89fc-98ac268d209d.png) - Open OpenPype **Tray/Admin/Studio settings** > `applications/resolve/environment` and add Python3 path to `RESOLVE_PYTHON3_HOME` platform related. -#### Editorial setup +## Editorial setup This is how it looks on my testing project timeline ![image](https://user-images.githubusercontent.com/40640033/102637638-96ec6600-4156-11eb-9656-6e8e3ce4baf8.png) From e5a36df6606cee975fff95f0c2a8e87287ae4a90 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Oct 2022 15:22:08 +0200 Subject: [PATCH 1629/2550] resolve: rename Inventory to Manager --- openpype/hosts/resolve/api/menu.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/resolve/api/menu.py b/openpype/hosts/resolve/api/menu.py index 2c7678ee5b..86b292105a 100644 --- a/openpype/hosts/resolve/api/menu.py +++ b/openpype/hosts/resolve/api/menu.py @@ -54,15 +54,15 @@ class OpenPypeMenu(QtWidgets.QWidget): ) self.setWindowTitle("OpenPype") - workfiles_btn = QtWidgets.QPushButton("Workfiles...", self) - create_btn = QtWidgets.QPushButton("Create...", self) - publish_btn = QtWidgets.QPushButton("Publish...", self) - load_btn = QtWidgets.QPushButton("Load...", self) - inventory_btn = QtWidgets.QPushButton("Inventory...", self) - subsetm_btn = QtWidgets.QPushButton("Subset Manager...", self) - libload_btn = QtWidgets.QPushButton("Library...", self) + workfiles_btn = QtWidgets.QPushButton("Workfiles ...", self) + create_btn = QtWidgets.QPushButton("Create ...", self) + publish_btn = QtWidgets.QPushButton("Publish ...", self) + load_btn = QtWidgets.QPushButton("Load ...", self) + inventory_btn = QtWidgets.QPushButton("Manager ...", self) + subsetm_btn = QtWidgets.QPushButton("Subset Manager ...", self) + libload_btn = QtWidgets.QPushButton("Library ...", self) experimental_btn = QtWidgets.QPushButton( - "Experimental tools...", self + "Experimental tools ...", self ) # rename_btn = QtWidgets.QPushButton("Rename", self) # set_colorspace_btn = QtWidgets.QPushButton( From 45536f613d6a9414830cf0d8dff99296b82c570a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 14 Oct 2022 16:28:48 +0200 Subject: [PATCH 1630/2550] :sparkles: add originalBasename data to Tray Publisher --- .../traypublisher/plugins/publish/collect_simple_instances.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index c0ae694c3c..0ccef3f375 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -1,5 +1,6 @@ import os import tempfile +from pathlib import Path import clique import pyblish.api @@ -72,6 +73,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): instance.data["source"] = source instance.data["sourceFilepaths"] = list(set(source_filepaths)) + instance.data["originalBasename"] = Path( + instance.data["sourceFilepaths"][0]).stem self.log.debug( ( From 4e019e1eef3b18a88ab4ba3748fa76aa8e6faf44 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 14 Oct 2022 16:55:58 +0200 Subject: [PATCH 1631/2550] fix typo --- openpype/tools/publisher/widgets/validations_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index f35d286e88..a24797de3e 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -651,7 +651,7 @@ class ValidationsWidget(QtWidgets.QFrame): self.set_errors(validation_errors) return - if self._contoller.publish_has_finished: + if self._controller.publish_has_finished: self._set_current_widget(self._publish_stop_ok_widget) return From 5009a37b73412c76f3d15d8a9a5ec60911af7149 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 14 Oct 2022 18:57:37 +0200 Subject: [PATCH 1632/2550] return instance ids instead of instance objects --- .../publisher/widgets/card_view_widgets.py | 2 +- .../publisher/widgets/list_view_widgets.py | 17 +++++++--------- .../publisher/widgets/overview_widget.py | 20 ++++++++++--------- 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 2be37ea44c..4c7d6ce109 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -541,6 +541,6 @@ class InstanceCardView(AbstractInstanceView): context_selected = True elif selected_widget is not None: - instances.append(selected_widget.instance) + instances.append(selected_widget.instance.id) return instances, context_selected diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 17b50b764a..6d90e63683 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -723,13 +723,13 @@ class InstanceListView(AbstractInstanceView): widget.update_instance_values() def _on_active_changed(self, changed_instance_id, new_value): - selected_instances, _ = self.get_selected_items() + selected_instance_ids, _ = self.get_selected_items() selected_ids = set() found = False - for instance in selected_instances: - selected_ids.add(instance.id) - if not found and instance.id == changed_instance_id: + for instance_id in selected_instance_ids: + selected_ids.add(instance_id) + if not found and instance_id == changed_instance_id: found = True if not found: @@ -767,9 +767,8 @@ class InstanceListView(AbstractInstanceView): tuple: Selected instance ids and boolean if context is selected. """ - instances = [] + instance_ids = [] context_selected = False - instances_by_id = self._controller.instances for index in self._instance_view.selectionModel().selectedIndexes(): instance_id = index.data(INSTANCE_ID_ROLE) @@ -777,11 +776,9 @@ class InstanceListView(AbstractInstanceView): context_selected = True elif instance_id is not None: - instance = instances_by_id.get(instance_id) - if instance: - instances.append(instance) + instance_ids.append(instance_id) - return instances, context_selected + return instance_ids, context_selected def _on_selection_change(self, *_args): self.selection_changed.emit() diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 3c67e6298e..8759d2ad49 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -201,16 +201,16 @@ class OverviewWidget(QtWidgets.QFrame): self.create_requested.emit() def _on_delete_clicked(self): - instances, _ = self.get_selected_items() + instance_ids, _ = self.get_selected_items() # Ask user if he really wants to remove instances dialog = QtWidgets.QMessageBox(self) dialog.setIcon(QtWidgets.QMessageBox.Question) dialog.setWindowTitle("Are you sure?") - if len(instances) > 1: + if len(instance_ids) > 1: msg = ( "Do you really want to remove {} instances?" - ).format(len(instances)) + ).format(len(instance_ids)) else: msg = ( "Do you really want to remove the instance?" @@ -224,10 +224,7 @@ class OverviewWidget(QtWidgets.QFrame): dialog.exec_() # Skip if OK was not clicked if dialog.result() == QtWidgets.QMessageBox.Ok: - instance_ids = { - instance.id - for instance in instances - } + instance_ids = set(instance_ids) self._controller.remove_instances(instance_ids) def _on_change_view_clicked(self): @@ -238,11 +235,16 @@ class OverviewWidget(QtWidgets.QFrame): if self._refreshing_instances: return - instances, context_selected = self.get_selected_items() + instance_ids, context_selected = self.get_selected_items() # Disable delete button if nothing is selected - self._delete_btn.setEnabled(len(instances) > 0) + self._delete_btn.setEnabled(len(instance_ids) > 0) + instances_by_id = self._controller.instances + instances = [ + instances_by_id[instance_id] + for instance_id in instance_ids + ] self._subset_attributes_widget.set_current_instances( instances, context_selected ) From 804a92384f86df6fded616abf45061f7144761d5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 14 Oct 2022 19:00:08 +0200 Subject: [PATCH 1633/2550] pass type of selection from clicked widget --- .../publisher/widgets/card_view_widgets.py | 40 ++++++++++++++++--- 1 file changed, 35 insertions(+), 5 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 4c7d6ce109..28db844303 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -41,9 +41,26 @@ from ..constants import ( ) +class SelectionType: + def __init__(self, name): + self.name = name + + def __eq__(self, other): + if isinstance(other, SelectionType): + other = other.name + return self.name == other + + +class SelectionTypes: + clear = SelectionType("clear") + extend = SelectionType("extend") + extend_to = SelectionType("extend_to") + + class GroupWidget(QtWidgets.QWidget): """Widget wrapping instances under group.""" - selected = QtCore.Signal(str, str) + + selected = QtCore.Signal(str, str, SelectionType) active_changed = QtCore.Signal() removed_selected = QtCore.Signal() @@ -135,17 +152,21 @@ class GroupWidget(QtWidgets.QWidget): widget = InstanceCardWidget( instance, group_icon, self ) - widget.selected.connect(self.selected) + widget.selected.connect(self._on_widget_selection) widget.active_changed.connect(self.active_changed) self._widgets_by_id[instance.id] = widget self._content_layout.insertWidget(widget_idx, widget) widget_idx += 1 + def _on_widget_selection(self, instance_id, group_id, selection_type): + self.selected.emit(instance_id, group_id, selection_type) + + class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" - selected = QtCore.Signal(str, str) + selected = QtCore.Signal(str, str, SelectionType) # Group identifier of card # - this must be set because if send when mouse is released with card id _group_identifier = None @@ -173,7 +194,16 @@ class CardWidget(BaseClickableFrame): def _mouse_release_callback(self): """Trigger selected signal.""" - self.selected.emit(self._id, self._group_identifier) + + modifiers = QtWidgets.QApplication.keyboardModifiers() + selection_type = SelectionTypes.clear + if bool(modifiers & QtCore.Qt.ShiftModifier): + selection_type = SelectionTypes.extend_to + + elif bool(modifiers & QtCore.Qt.ControlModifier): + selection_type = SelectionTypes.extend + + self.selected.emit(self._id, self._group_identifier, selection_type) class ContextCardWidget(CardWidget): @@ -498,7 +528,7 @@ class InstanceCardView(AbstractInstanceView): def _on_active_changed(self): self.active_changed.emit() - def _on_widget_selection(self, instance_id, group_name): + def _on_widget_selection(self, instance_id, group_name, selection_type): self.select_item(instance_id, group_name) def select_item(self, instance_id, group_name): From d20adf201ac7073ed3f4a7aa320ce220c6b34f5f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 14 Oct 2022 19:00:58 +0200 Subject: [PATCH 1634/2550] added additional helper attributes and methods --- .../publisher/widgets/card_view_widgets.py | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 28db844303..80a3bf0fb1 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -89,21 +89,73 @@ class GroupWidget(QtWidgets.QWidget): self._group_icons = group_icons self._widgets_by_id = {} + self._ordered_instance_ids = [] self._label_widget = label_widget self._content_layout = layout + @property + def group_name(self): + """Group which widget represent. + + Returns: + str: Name of group. + """ + + return self._group + + def get_selected_instance_ids(self): + """Selected instance ids. + + Returns: + Set[str]: Instance ids that are selected. + """ + + return { + instance_id + for instance_id, widget in self._widgets_by_id.items() + if widget.is_selected + } + + def get_selected_widgets(self): + """Access to widgets marked as selected. + + Returns: + List[InstanceCardWidget]: Instance widgets that are selected. + """ + + return [ + widget + for instance_id, widget in self._widgets_by_id.items() + if widget.is_selected + ] + + def get_ordered_widgets(self): + """Get instance ids in order as are shown in ui. + + Returns: + List[str]: Instance ids. + """ + + return [ + self._widgets_by_id[instance_id] + for instance_id in self._ordered_instance_ids + ] + def get_widget_by_instance_id(self, instance_id): """Get instance widget by it's id.""" + return self._widgets_by_id.get(instance_id) def update_instance_values(self): """Trigger update on instance widgets.""" + for widget in self._widgets_by_id.values(): widget.update_instance_values() def confirm_remove_instance_id(self, instance_id): """Delete widget by instance id.""" + widget = self._widgets_by_id.pop(instance_id) widget.setVisible(False) self._content_layout.removeWidget(widget) @@ -140,6 +192,7 @@ class GroupWidget(QtWidgets.QWidget): # Sort instances by subset name sorted_subset_names = list(sorted(instances_by_subset_name.keys())) + # Add new instances to widget widget_idx = 1 for subset_names in sorted_subset_names: @@ -158,6 +211,15 @@ class GroupWidget(QtWidgets.QWidget): self._content_layout.insertWidget(widget_idx, widget) widget_idx += 1 + ordered_instance_ids = [] + for idx in range(self._content_layout.count()): + if idx > 0: + item = self._content_layout.itemAt(idx) + widget = item.widget() + if widget is not None: + ordered_instance_ids.append(widget.id) + + self._ordered_instance_ids = ordered_instance_ids def _on_widget_selection(self, instance_id, group_id, selection_type): self.selected.emit(instance_id, group_id, selection_type) @@ -178,6 +240,12 @@ class CardWidget(BaseClickableFrame): self._selected = False self._id = None + @property + def id(self): + """Id of card.""" + + return self._id + @property def is_selected(self): """Is card selected.""" From 0c27f807955ec312fe0628b331b3ba8416be85c9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 14 Oct 2022 19:02:53 +0200 Subject: [PATCH 1635/2550] implemented logic to handle multiselection --- .../publisher/widgets/card_view_widgets.py | 355 +++++++++++++++--- 1 file changed, 308 insertions(+), 47 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 80a3bf0fb1..c0cc3389c7 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -480,11 +480,12 @@ class InstanceCardView(AbstractInstanceView): self._content_layout = content_layout self._content_widget = content_widget - self._widgets_by_group = {} self._context_widget = None + self._widgets_by_group = {} + self._ordered_groups = [] - self._selected_group = None - self._selected_instance_id = None + self._explicitly_selected_instance_ids = [] + self._explicitly_selected_groups = [] self.setSizePolicy( QtWidgets.QSizePolicy.Minimum, @@ -504,21 +505,30 @@ class InstanceCardView(AbstractInstanceView): result.setWidth(width) return result - def _get_selected_widget(self): - if self._selected_instance_id == CONTEXT_ID: - return self._context_widget + def _get_selected_widgets(self): + output = [] + if ( + self._context_widget is not None + and self._context_widget.is_selected + ): + output.append(self._context_widget) - group_widget = self._widgets_by_group.get( - self._selected_group - ) - if group_widget is not None: - widget = group_widget.get_widget_by_instance_id( - self._selected_instance_id - ) - if widget is not None: - return widget + for group_widget in self._widgets_by_group.values(): + for widget in group_widget.get_selected_widgets(): + output.append(widget) + return output - return None + def _get_selected_instance_ids(self): + output = [] + if ( + self._context_widget is not None + and self._context_widget.is_selected + ): + output.append(CONTEXT_ID) + + for group_widget in self._widgets_by_group.values(): + output.extend(group_widget.get_selected_instance_ids()) + return output def refresh(self): """Refresh instances in view based on CreatedContext.""" @@ -534,8 +544,6 @@ class InstanceCardView(AbstractInstanceView): self.selection_changed.emit() self._content_layout.insertWidget(0, widget) - self.select_item(CONTEXT_ID, None) - # Prepare instances by group and identifiers by group instances_by_group = collections.defaultdict(list) identifiers_by_group = collections.defaultdict(set) @@ -551,15 +559,17 @@ class InstanceCardView(AbstractInstanceView): if group_name in instances_by_group: continue - if group_name == self._selected_group: - self._on_remove_selected() widget = self._widgets_by_group.pop(group_name) widget.setVisible(False) self._content_layout.removeWidget(widget) widget.deleteLater() + if group_name in self._explicitly_selected_groups: + self._explicitly_selected_groups.remove(group_name) + # Sort groups sorted_group_names = list(sorted(instances_by_group.keys())) + # Keep track of widget indexes # - we start with 1 because Context item as at the top widget_idx = 1 @@ -577,9 +587,6 @@ class InstanceCardView(AbstractInstanceView): ) group_widget.active_changed.connect(self._on_active_changed) group_widget.selected.connect(self._on_widget_selection) - group_widget.removed_selected.connect( - self._on_remove_selected - ) self._content_layout.insertWidget(widget_idx, group_widget) self._widgets_by_group[group_name] = group_widget @@ -588,6 +595,16 @@ class InstanceCardView(AbstractInstanceView): instances_by_group[group_name] ) + ordered_group_names = [""] + for idx in range(self._content_layout.count()): + if idx > 0: + item = self._content_layout.itemAt(idx) + group_widget = item.widget() + if group_widget is not None: + ordered_group_names.append(group_widget.group_name) + + self._ordered_groups = ordered_group_names + def refresh_instance_states(self): """Trigger update of instances on group widgets.""" for widget in self._widgets_by_group.values(): @@ -597,9 +614,6 @@ class InstanceCardView(AbstractInstanceView): self.active_changed.emit() def _on_widget_selection(self, instance_id, group_name, selection_type): - self.select_item(instance_id, group_name) - - def select_item(self, instance_id, group_name): """Select specific item by instance id. Pass `CONTEXT_ID` as instance id and empty string as group to select @@ -611,34 +625,281 @@ class InstanceCardView(AbstractInstanceView): group_widget = self._widgets_by_group[group_name] new_widget = group_widget.get_widget_by_instance_id(instance_id) - selected_widget = self._get_selected_widget() - if new_widget is selected_widget: - return - - if selected_widget is not None: - selected_widget.set_selected(False) - - self._selected_instance_id = instance_id - self._selected_group = group_name - if new_widget is not None: - new_widget.set_selected(True) + if selection_type is SelectionTypes.clear: + self._select_item_clear(instance_id, group_name, new_widget) + elif selection_type is SelectionTypes.extend: + self._select_item_extend(instance_id, group_name, new_widget) + elif selection_type is SelectionTypes.extend_to: + self._select_item_extend_to(instance_id, group_name, new_widget) self.selection_changed.emit() - def _on_remove_selected(self): - selected_widget = self._get_selected_widget() - if selected_widget is None: - self._on_widget_selection(CONTEXT_ID, None) + def _select_item_clear(self, instance_id, group_name, new_widget): + """Select specific item by instance id and clear previous selection. + + Pass `CONTEXT_ID` as instance id and empty string as group to select + global context item. + """ + + selected_widgets = self._get_selected_widgets() + for widget in selected_widgets: + if widget.id != instance_id: + widget.set_selected(False) + + self._explicitly_selected_groups = [group_name] + self._explicitly_selected_instance_ids = [instance_id] + + if new_widget is not None: + new_widget.set_selected(True) + + def _select_item_extend(self, instance_id, group_name, new_widget): + """Add/Remove single item to/from current selection. + + If item is already selected the selection is removed. + """ + + self._explicitly_selected_instance_ids = ( + self._get_selected_instance_ids() + ) + if new_widget.is_selected: + self._explicitly_selected_instance_ids.remove(instance_id) + new_widget.set_selected(False) + remove_group = False + if instance_id == CONTEXT_ID: + remove_group = True + else: + group_widget = self._widgets_by_group[group_name] + if not group_widget.get_selected_widgets(): + remove_group = True + + if remove_group: + self._explicitly_selected_groups.remove(group_name) + return + + self._explicitly_selected_instance_ids.append(instance_id) + if group_name in self._explicitly_selected_groups: + self._explicitly_selected_groups.remove(group_name) + self._explicitly_selected_groups.append(group_name) + new_widget.set_selected(True) + + def _select_item_extend_to(self, instance_id, group_name, new_widget): + """Extend selected items to specific instance id. + + This method is handling Shift+click selection of widgets. Selection + is not stored to explicit selection items. That's because user can + shift select again and it should use last explicit selected item as + source item for selection. + + Items selected via this function can get to explicit selection only if + selection is extended by one specific item ('_select_item_extend'). + From that moment the selection is locked to new last explicit selected + item. + + It's required to traverse through group widgets in their UI order and + through their instances in UI order. All explicitly selected items + must not change their selection state during this function. Passed + instance id can be above or under last selected item so a start item + and end item must be found to be able know which direction is selection + happening. + """ + + # Start group name (in '_ordered_groups') + start_group = None + # End group name (in '_ordered_groups') + end_group = None + # Instance id of first selected item + start_instance_id = None + # Instance id of last selected item + end_instance_id = None + + # Get previously selected group by explicit selected groups + previous_group = None + if self._explicitly_selected_groups: + previous_group = self._explicitly_selected_groups[-1] + + # Find last explicitly selected instance id + previous_last_selected_id = None + if self._explicitly_selected_instance_ids: + previous_last_selected_id = ( + self._explicitly_selected_instance_ids[-1] + ) + + # If last instance id was not found or available then last selected + # group is also invalid. + # NOTE: This probably never happen? + if previous_last_selected_id is None: + previous_group = None + + # Check if previously selected group is available and find out if + # new instance group is above or under previous selection + # - based on these information are start/end group/instance filled + if previous_group in self._ordered_groups: + new_idx = self._ordered_groups.index(group_name) + prev_idx = self._ordered_groups.index(previous_group) + if new_idx < prev_idx: + start_group = group_name + end_group = previous_group + start_instance_id = instance_id + end_instance_id = previous_last_selected_id + else: + start_group = previous_group + end_group = group_name + start_instance_id = previous_last_selected_id + end_instance_id = instance_id + + # If start group is not set then use context item group name + if start_group is None: + start_group = "" + + # If start instance id is not filled then use context id (similar to + # group) + if start_instance_id is None: + start_instance_id = CONTEXT_ID + + # If end group is not defined then use passed group name + # - this can be happen when previous group was not selected + # - when this happens the selection will probably happen from context + # item to item selected by user + if end_group is None: + end_group = group_name + + # If end instance is not filled then use instance selected by user + if end_instance_id is None: + end_instance_id = instance_id + + # Start and end group are the same + # - a different logic is needed in that case + same_group = start_group == end_group + + # Process known information and change selection of items + passed_start_group = False + passed_end_group = False + # Go through ordered groups (from top to bottom) and change selection + for name in self._ordered_groups: + # Prepare sorted instance widgets + if name == "": + sorted_widgets = [self._context_widget] + else: + group_widget = self._widgets_by_group[name] + sorted_widgets = group_widget.get_ordered_widgets() + + # Change selection based on explicit selection if start group + # was not passed yet + if not passed_start_group: + if name != start_group: + for widget in sorted_widgets: + widget.set_selected( + widget.id in self._explicitly_selected_instance_ids + ) + continue + + # Change selection based on explicit selection if end group + # already passed + if passed_end_group: + for widget in sorted_widgets: + widget.set_selected( + widget.id in self._explicitly_selected_instance_ids + ) + continue + + # Start group is already passed and end group was not yet hit + if same_group: + passed_start_group = True + passed_end_group = True + passed_start_instance = False + passed_end_instance = False + for widget in sorted_widgets: + if not passed_start_instance: + if widget.id in (start_instance_id, end_instance_id): + if widget.id != start_instance_id: + # Swap start/end instance if start instance is + # after end + # - fix 'passed_end_instance' check + start_instance_id, end_instance_id = ( + end_instance_id, start_instance_id + ) + passed_start_instance = True + + # Find out if widget should be selected + select = False + if passed_end_instance: + select = False + + elif passed_start_instance: + select = True + + # Check if instance is in explicitly selected items if + # should ont be selected + if ( + not select + and widget.id in self._explicitly_selected_instance_ids + ): + select = True + + widget.set_selected(select) + + if ( + not passed_end_instance + and widget.id == end_instance_id + ): + passed_end_instance = True + + elif name == start_group: + # First group from which selection should start + # - look for start instance first from which the selection + # should happen + passed_start_group = True + passed_start_instance = False + for widget in sorted_widgets: + if widget.id == start_instance_id: + passed_start_instance = True + + select = False + # Check if passed start instance or instance is + # in explicitly selected items to be selected + if ( + passed_start_instance + or widget.id in self._explicitly_selected_instance_ids + ): + select = True + widget.set_selected(select) + + elif name == end_group: + # Last group where selection should happen + # - look for end instance first after which the selection + # should stop + passed_end_group = True + passed_end_instance = False + for widget in sorted_widgets: + select = False + # Check if not yet passed end instance or if instance is + # in explicitly selected items to be selected + if ( + not passed_end_instance + or widget.id in self._explicitly_selected_instance_ids + ): + select = True + + widget.set_selected(select) + + if widget.id == end_instance_id: + passed_end_instance = True + + else: + # Just select everything between start and end group + for widget in sorted_widgets: + widget.set_selected(True) def get_selected_items(self): """Get selected instance ids and context.""" instances = [] - context_selected = False - selected_widget = self._get_selected_widget() - if selected_widget is self._context_widget: - context_selected = True + selected_widgets = self._get_selected_widgets() - elif selected_widget is not None: - instances.append(selected_widget.instance.id) + context_selected = False + for widget in selected_widgets: + if widget is self._context_widget: + context_selected = True + else: + instances.append(widget.id) return instances, context_selected From 1b8dff405a6737c45e62617da2fba8ea4604b308 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 15 Oct 2022 18:31:03 +0200 Subject: [PATCH 1636/2550] add process time to publish report --- openpype/tools/publisher/control.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index da320b1f39..9eff431171 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -3,6 +3,7 @@ import copy import logging import traceback import collections +import time from abc import ABCMeta, abstractmethod, abstractproperty import six @@ -232,15 +233,17 @@ class PublishReport: """Set that current plugin has been skipped.""" self._current_plugin_data["skipped"] = True - def add_result(self, result): + def add_result(self, result, process_time): """Handle result of one plugin and it's instance.""" + instance = result["instance"] instance_id = None if instance is not None: instance_id = instance.id self._current_plugin_data["instances_data"].append({ "id": instance_id, - "logs": self._extract_instance_log_items(result) + "logs": self._extract_instance_log_items(result), + "process_time": process_time }) def add_action_result(self, action, result): @@ -2100,9 +2103,11 @@ class PublisherController(BasePublisherController): ) def _process_and_continue(self, plugin, instance): + start = time.time() result = pyblish.plugin.process( plugin, self._publish_context, instance ) + process_time = time.time() - start self._publish_report.add_result(result) From 2787dbd83a1d621ddbcf0372d36fad825acf87d9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 15 Oct 2022 18:32:09 +0200 Subject: [PATCH 1637/2550] add report version to report data --- openpype/tools/publisher/control.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 9eff431171..17db324a68 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -4,6 +4,7 @@ import logging import traceback import collections import time +import uuid from abc import ABCMeta, abstractmethod, abstractproperty import six @@ -293,7 +294,9 @@ class PublishReport: "plugins_data": plugins_data, "instances": instances_details, "context": self._extract_context_data(self._current_context), - "crashed_file_paths": crashed_file_paths + "crashed_file_paths": crashed_file_paths, + "id": str(uuid.uuid4()), + "report_version": "1.0.0" } def _extract_context_data(self, context): From d3e5041379291c899ddbe5a14082915176c5776a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 15 Oct 2022 20:39:02 +0200 Subject: [PATCH 1638/2550] fix not passed argument --- openpype/tools/publisher/control.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 17db324a68..b415644a43 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -2112,7 +2112,7 @@ class PublisherController(BasePublisherController): ) process_time = time.time() - start - self._publish_report.add_result(result) + self._publish_report.add_result(result, process_time) exception = result.get("error") if exception: From a877176b39836b0d048bb0dc235225c6949008b5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Oct 2022 00:26:08 +0200 Subject: [PATCH 1639/2550] don't crash in collection when files are not filled --- .../plugins/publish/collect_simple_instances.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index d91694ef69..7035a61d7b 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -70,11 +70,17 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): repre_names, representation_files_mapping ) - + source_filepaths = list(set(source_filepaths)) instance.data["source"] = source - instance.data["sourceFilepaths"] = list(set(source_filepaths)) - instance.data["originalBasename"] = Path( - instance.data["sourceFilepaths"][0]).stem + instance.data["sourceFilepaths"] = source_filepaths + + # NOTE: Missing filepaths should not cause crashes (at least not here) + # - if filepaths are required they should crash on validation + if source_filepaths: + # NOTE: Original basename is not handling sequences + # - we should maybe not fill the key when sequence is used? + origin_basename = Path(source_filepaths[0]).stem + instance.data["originalBasename"] = origin_basename self.log.debug( ( From 0df15975b16aabac45a61f2f025956180537c2b2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Oct 2022 00:34:02 +0200 Subject: [PATCH 1640/2550] fix unwanted zooming if control was released in different widget --- .../publisher/publish_report_viewer/widgets.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/openpype/tools/publisher/publish_report_viewer/widgets.py b/openpype/tools/publisher/publish_report_viewer/widgets.py index dc82448495..4770bdcc65 100644 --- a/openpype/tools/publisher/publish_report_viewer/widgets.py +++ b/openpype/tools/publisher/publish_report_viewer/widgets.py @@ -148,12 +148,12 @@ class ZoomPlainText(QtWidgets.QPlainTextEdit): anim_timer.timeout.connect(self._scaling_callback) self._anim_timer = anim_timer - self._zoom_enabled = False self._scheduled_scalings = 0 self._point_size = None def wheelEvent(self, event): - if not self._zoom_enabled: + modifiers = QtWidgets.QApplication.keyboardModifiers() + if modifiers != QtCore.Qt.ControlModifier: super(ZoomPlainText, self).wheelEvent(event) return @@ -189,16 +189,6 @@ class ZoomPlainText(QtWidgets.QPlainTextEdit): else: self._scheduled_scalings += 1 - def keyPressEvent(self, event): - if event.key() == QtCore.Qt.Key_Control: - self._zoom_enabled = True - super(ZoomPlainText, self).keyPressEvent(event) - - def keyReleaseEvent(self, event): - if event.key() == QtCore.Qt.Key_Control: - self._zoom_enabled = False - super(ZoomPlainText, self).keyReleaseEvent(event) - class DetailsWidget(QtWidgets.QWidget): def __init__(self, parent): From 3afee7370a0c44baa4c611250be6c9c176c6dd82 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Oct 2022 00:34:16 +0200 Subject: [PATCH 1641/2550] define min/max of text sizes --- .../publish_report_viewer/widgets.py | 32 +++++++++++++++---- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/openpype/tools/publisher/publish_report_viewer/widgets.py b/openpype/tools/publisher/publish_report_viewer/widgets.py index 4770bdcc65..ff388fb277 100644 --- a/openpype/tools/publisher/publish_report_viewer/widgets.py +++ b/openpype/tools/publisher/publish_report_viewer/widgets.py @@ -139,6 +139,9 @@ class PluginLoadReportWidget(QtWidgets.QWidget): class ZoomPlainText(QtWidgets.QPlainTextEdit): + min_point_size = 1.0 + max_point_size = 200.0 + def __init__(self, *args, **kwargs): super(ZoomPlainText, self).__init__(*args, **kwargs) @@ -172,19 +175,36 @@ class ZoomPlainText(QtWidgets.QPlainTextEdit): factor = 1.0 + (self._scheduled_scalings / 300) font = self.font() + if self._point_size is None: - self._point_size = font.pointSizeF() + point_size = font.pointSizeF() + else: + point_size = self._point_size - self._point_size *= factor - if self._point_size < 1: - self._point_size = 1.0 + point_size *= factor + min_hit = False + max_hit = False + if point_size < self.min_point_size: + point_size = self.min_point_size + min_hit = True + elif point_size > self.max_point_size: + point_size = self.max_point_size + max_hit = True - font.setPointSizeF(self._point_size) + self._point_size = point_size + + font.setPointSizeF(point_size) # Using 'self.setFont(font)' would not be propagated when stylesheets # are applied on this widget self.setStyleSheet("font-size: {}pt".format(font.pointSize())) - if self._scheduled_scalings > 0: + if ( + (max_hit and self._scheduled_scalings > 0) + or (min_hit and self._scheduled_scalings < 0) + ): + self._scheduled_scalings = 0 + + elif self._scheduled_scalings > 0: self._scheduled_scalings -= 1 else: self._scheduled_scalings += 1 From 890b77214acd24482501ce19722f6701f6dac537 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 10:49:39 +0200 Subject: [PATCH 1642/2550] import lib content from lib directly --- openpype/hosts/flame/api/workio.py | 2 +- openpype/hosts/flame/hooks/pre_flame_setup.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/flame/api/workio.py b/openpype/hosts/flame/api/workio.py index 0c96c0752a..e49321c75a 100644 --- a/openpype/hosts/flame/api/workio.py +++ b/openpype/hosts/flame/api/workio.py @@ -1,7 +1,7 @@ """Host API required Work Files tool""" import os -from openpype.api import Logger +from openpype.lib import Logger # from .. import ( # get_project_manager, # get_current_project diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index f0fdaa86ba..713daf1031 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -3,16 +3,17 @@ import json import tempfile import contextlib import socket +from pprint import pformat + from openpype.lib import ( PreLaunchHook, - get_openpype_username + get_openpype_username, + run_subprocess, ) from openpype.lib.applications import ( ApplicationLaunchFailed ) from openpype.hosts import flame as opflame -import openpype -from pprint import pformat class FlamePrelaunch(PreLaunchHook): @@ -127,7 +128,6 @@ class FlamePrelaunch(PreLaunchHook): except OSError as exc: self.log.warning("Not able to open files: {}".format(exc)) - def _get_flame_fps(self, fps_num): fps_table = { float(23.976): "23.976 fps", @@ -179,7 +179,7 @@ class FlamePrelaunch(PreLaunchHook): "env": self.launch_context.env } - openpype.api.run_subprocess(args, **process_kwargs) + run_subprocess(args, **process_kwargs) # process returned json file to pass launch args return_json_data = open(tmp_json_path).read() From 49190b9f7876a9ed86136c6004dc671288c8e031 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 12:19:26 +0200 Subject: [PATCH 1643/2550] keep selectio between instance views --- .../publisher/widgets/card_view_widgets.py | 37 ++++++ .../publisher/widgets/list_view_widgets.py | 119 +++++++++++++++--- .../publisher/widgets/overview_widget.py | 10 +- openpype/tools/publisher/widgets/widgets.py | 15 +++ 4 files changed, 159 insertions(+), 22 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index c0cc3389c7..5daf8059b0 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -903,3 +903,40 @@ class InstanceCardView(AbstractInstanceView): instances.append(widget.id) return instances, context_selected + + def set_selected_items(self, instance_ids, context_selected): + s_instance_ids = set(instance_ids) + cur_ids, cur_context = self.get_selected_items() + if ( + set(cur_ids) == s_instance_ids + and cur_context == context_selected + ): + return + + selected_groups = [] + selected_instances = [] + if context_selected: + selected_groups.append("") + selected_instances.append(CONTEXT_ID) + + self._context_widget.set_selected(context_selected) + + for group_name in self._ordered_groups: + if group_name == "": + continue + + group_widget = self._widgets_by_group[group_name] + group_selected = False + for widget in group_widget.get_ordered_widgets(): + select = False + if widget.id in s_instance_ids: + selected_instances.append(widget.id) + group_selected = True + select = True + widget.set_selected(select) + + if group_selected: + selected_groups.append(group_name) + + self._explicitly_selected_groups = selected_groups + self._explicitly_selected_instance_ids = selected_instances diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 6d90e63683..c329ca0e8c 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -760,26 +760,6 @@ class InstanceListView(AbstractInstanceView): if changed_ids: self.active_changed.emit() - def get_selected_items(self): - """Get selected instance ids and context selection. - - Returns: - tuple: Selected instance ids and boolean if context - is selected. - """ - instance_ids = [] - context_selected = False - - for index in self._instance_view.selectionModel().selectedIndexes(): - instance_id = index.data(INSTANCE_ID_ROLE) - if not context_selected and instance_id == CONTEXT_ID: - context_selected = True - - elif instance_id is not None: - instance_ids.append(instance_id) - - return instance_ids, context_selected - def _on_selection_change(self, *_args): self.selection_changed.emit() @@ -819,3 +799,102 @@ class InstanceListView(AbstractInstanceView): proxy_index = self._proxy_model.mapFromSource(group_item.index()) if not self._instance_view.isExpanded(proxy_index): self._instance_view.expand(proxy_index) + + def get_selected_items(self): + """Get selected instance ids and context selection. + + Returns: + tuple: Selected instance ids and boolean if context + is selected. + """ + instance_ids = [] + context_selected = False + + for index in self._instance_view.selectionModel().selectedIndexes(): + instance_id = index.data(INSTANCE_ID_ROLE) + if not context_selected and instance_id == CONTEXT_ID: + context_selected = True + + elif instance_id is not None: + instance_ids.append(instance_id) + + return instance_ids, context_selected + + def set_selected_items(self, instance_ids, context_selected): + s_instance_ids = set(instance_ids) + cur_ids, cur_context = self.get_selected_items() + if ( + set(cur_ids) == s_instance_ids + and cur_context == context_selected + ): + return + + view = self._instance_view + src_model = self._instance_model + proxy_model = self._proxy_model + + select_indexes = [] + + select_queue = collections.deque() + select_queue.append( + (src_model.invisibleRootItem(), []) + ) + while select_queue: + queue_item = select_queue.popleft() + item, parent_items = queue_item + + if item.hasChildren(): + new_parent_items = list(parent_items) + new_parent_items.append(item) + for row in range(item.rowCount()): + select_queue.append( + (item.child(row), list(new_parent_items)) + ) + + instance_id = item.data(INSTANCE_ID_ROLE) + if not instance_id: + continue + + if instance_id in s_instance_ids: + select_indexes.append(item.index()) + for parent_item in parent_items: + index = parent_item.index() + proxy_index = proxy_model.mapFromSource(index) + if not view.isExpanded(proxy_index): + view.expand(proxy_index) + + elif context_selected and instance_id == CONTEXT_ID: + select_indexes.append(item.index()) + + selection_model = view.selectionModel() + if not select_indexes: + selection_model.clear() + return + + if len(select_indexes) == 1: + proxy_index = proxy_model.mapFromSource(select_indexes[0]) + selection_model.setCurrentIndex( + proxy_index, + selection_model.ClearAndSelect | selection_model.Rows + ) + return + + first_index = proxy_model.mapFromSource(select_indexes.pop(0)) + last_index = proxy_model.mapFromSource(select_indexes.pop(-1)) + + selection_model.setCurrentIndex( + first_index, + selection_model.ClearAndSelect | selection_model.Rows + ) + + for index in select_indexes: + proxy_index = proxy_model.mapFromSource(index) + selection_model.select( + proxy_index, + selection_model.Select | selection_model.Rows + ) + + selection_model.setCurrentIndex( + last_index, + selection_model.Select | selection_model.Rows + ) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 8759d2ad49..5bd3017c2a 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -321,15 +321,21 @@ class OverviewWidget(QtWidgets.QFrame): def _change_view_type(self): idx = self._subset_views_layout.currentIndex() new_idx = (idx + 1) % self._subset_views_layout.count() - self._subset_views_layout.setCurrentIndex(new_idx) - new_view = self._subset_views_layout.currentWidget() + old_view = self._subset_views_layout.currentWidget() + new_view = self._subset_views_layout.widget(new_idx) + if not new_view.refreshed: new_view.refresh() new_view.set_refreshed(True) else: new_view.refresh_instance_states() + instance_ids, context_selected = old_view.get_selected_items() + new_view.set_selected_items(instance_ids, context_selected) + + self._subset_views_layout.setCurrentIndex(new_idx) + self._on_subset_change() def _refresh_instances(self): diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 536650e209..ddbe1eb6b7 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -306,10 +306,25 @@ class AbstractInstanceView(QtWidgets.QWidget): Example: When delete button is clicked to know what should be deleted. """ + raise NotImplementedError(( "{} Method 'get_selected_items' is not implemented." ).format(self.__class__.__name__)) + def set_selected_items(self, instance_ids, context_selected): + """Change selection for instances and context. + + Used to applying selection from one view to other. + + Args: + instance_ids (List[str]): Selected instance ids. + context_selected (bool): Context is selected. + """ + + raise NotImplementedError(( + "{} Method 'set_selected_items' is not implemented." + ).format(self.__class__.__name__)) + class ClickableLineEdit(QtWidgets.QLineEdit): """QLineEdit capturing left mouse click. From bacbff0262e8306ef16ccf3d1e826e3e1cb2728f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 15:19:48 +0200 Subject: [PATCH 1644/2550] create context has callbacks for reset preparation and finalization --- openpype/pipeline/create/context.py | 15 +++++++++++++++ openpype/tools/publisher/control.py | 4 ++++ 2 files changed, 19 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index c1cf4dab44..1f3c32f0a7 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -860,6 +860,9 @@ class CreateContext: All changes will be lost if were not saved explicitely. """ + + self.reset_preparation() + self.reset_avalon_context() self.reset_plugins(discover_publish_plugins) self.reset_context_data() @@ -868,6 +871,18 @@ class CreateContext: self.reset_instances() self.execute_autocreators() + self.reset_finalization() + + def reset_preparation(self): + """Prepare attributes that must be prepared/cleaned before reset.""" + + pass + + def reset_finalization(self): + """Cleanup of attributes after reset.""" + + pass + def reset_avalon_context(self): """Give ability to reset avalon context. diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index b4c89f221f..19e28cca4b 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -552,6 +552,8 @@ class PublisherController: self.save_changes() + self.create_context.reset_preparation() + # Reset avalon context self.create_context.reset_avalon_context() @@ -560,6 +562,8 @@ class PublisherController: self._reset_publish() self._reset_instances() + self.create_context.reset_finalization() + self.emit_card_message("Refreshed..") def _reset_plugins(self): From e0bb8c0469d50273ad041280b786380d98de080c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 15:20:06 +0200 Subject: [PATCH 1645/2550] context can handle shared data for collection phase --- openpype/pipeline/create/context.py | 68 ++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 1f3c32f0a7..02398818d9 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -27,6 +27,11 @@ from .creator_plugins import ( UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) +class UnavailableSharedData(Exception): + """Shared data are not available at the moment when are accessed.""" + pass + + class ImmutableKeyError(TypeError): """Accessed key is immutable so does not allow changes or removements.""" @@ -809,6 +814,9 @@ class CreateContext: self._bulk_counter = 0 self._bulk_instances_to_process = [] + # Shared data across creators during collection phase + self._collection_shared_data = None + # Trigger reset if was enabled if reset: self.reset(discover_publish_plugins) @@ -877,11 +885,15 @@ class CreateContext: """Prepare attributes that must be prepared/cleaned before reset.""" pass + # Give ability to store shared data for collection phase + self._collection_shared_data = {} def reset_finalization(self): """Cleanup of attributes after reset.""" pass + # Stop access to collection shared data + self._collection_shared_data = None def reset_avalon_context(self): """Give ability to reset avalon context. @@ -991,7 +1003,8 @@ class CreateContext: and creator_class.host_name != self.host_name ): self.log.info(( - "Creator's host name is not supported for current host {}" + "Creator's host name \"{}\"" + " is not supported for current host \"{}\"" ).format(creator_class.host_name, self.host_name)) continue @@ -1266,3 +1279,56 @@ class CreateContext: if not plugin.__instanceEnabled__: plugins.append(plugin) return plugins + + def _validate_collection_shared_data(self): + if self._collection_shared_data is None: + raise UnavailableSharedData( + "Accessed Collection shared data out of collection phase" + ) + + def has_collection_shared_data(self, key): + """Check if collection shared data are set. + + Args: + key (str): Key under which are shared data stored. + + Retruns: + bool: Key is already set. + + Raises: + UnavailableSharedData: When called out of collection phase. + """ + + self._validate_collection_shared_data() + return key in self._collection_shared_data + + def get_collection_shared_data(self, key, default=None): + """Receive shared data during collection phase. + + Args: + key (str): Key under which are shared data stored. + default (Any): Default value if key is not set. + + Returns: + Any: Value stored under the key. + + Raises: + UnavailableSharedData: When called out of collection phase. + """ + + self._validate_collection_shared_data() + return self._collection_shared_data.get(key, default) + + def set_collection_shared_data(self, key, value): + """Store a value under collection shared data. + + Args: + key (str): Key under which will shared data be stored. + value (Any): Value to store. + + Raises: + UnavailableSharedData: When called out of collection phase. + """ + + self._validate_collection_shared_data() + self._collection_shared_data[key] = value From 2ed383c4768571436df3f2b3b2245d55ccdfdc6b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 15:20:24 +0200 Subject: [PATCH 1646/2550] added wrappers for access to shared data in create plugins --- openpype/pipeline/create/creator_plugins.py | 36 +++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 05ba8902aa..761054fbd5 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -6,6 +6,7 @@ from abc import ( abstractmethod, abstractproperty ) + import six from openpype.settings import get_system_settings, get_project_settings @@ -323,6 +324,41 @@ class BaseCreator: return self.instance_attr_defs + def has_collection_shared_data(self, key): + """Check if collection shared data are set. + + Args: + key (str): Key under which are shared data stored. + + Retruns: + bool: Key is already set. + """ + + return self.create_context.has_collection_shared_data(key) + + def get_collection_shared_data(self, key, default=None): + """Receive shared data during collection phase. + + Args: + key (str): Key under which are shared data stored. + default (Any): Default value if key is not set. + + Returns: + Any: Value stored under the key. + """ + + return self.create_context.get_collection_shared_data(key, default) + + def set_collection_shared_data(self, key, value): + """Store a value under collection shared data. + + Args: + key (str): Key under which will shared data be stored. + value (Any): Value to store. + """ + + return self.create_context.set_collection_shared_data(key, value) + class Creator(BaseCreator): """Creator that has more information for artist to show in UI. From c410ee662b8c4c81fb147aca90ebd9b963f2b934 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 17 Oct 2022 21:24:17 +0800 Subject: [PATCH 1647/2550] clean up --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 3e3a61fc8f..1e57a7baeb 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -33,7 +33,7 @@ }, "RenderSettings": { "apply_render_settings": true, - "default_render_image_folder": "renders", + "default_render_image_folder": "", "aov_separator": "underscore", "reset_current_frame": false, "arnold_renderer": { From 3f01d008c59205136d18675068f4242def604b2f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 15:26:38 +0200 Subject: [PATCH 1648/2550] added recommendation --- openpype/pipeline/create/context.py | 3 +++ openpype/pipeline/create/creator_plugins.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 02398818d9..613eaa2865 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1322,6 +1322,9 @@ class CreateContext: def set_collection_shared_data(self, key, value): """Store a value under collection shared data. + It is highly recommended to use very specific keys as creators may + clash each other if simple keys are used. + Args: key (str): Key under which will shared data be stored. value (Any): Value to store. diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 761054fbd5..343a416872 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -352,6 +352,9 @@ class BaseCreator: def set_collection_shared_data(self, key, value): """Store a value under collection shared data. + It is highly recommended to use very specific keys as creators may + clash each other if simple keys are used. + Args: key (str): Key under which will shared data be stored. value (Any): Value to store. From 0aefb39acb85b4f8dc0de3904b3613c3762d8c2c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 15:31:07 +0200 Subject: [PATCH 1649/2550] cache instances in shared data in tray publisher --- openpype/hosts/traypublisher/api/plugin.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 89c25389cb..1e592e786d 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -17,11 +17,27 @@ from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS +def _cache_and_get_instances(creator): + """Cache instances in shared data. + + Args: + creator (Creator): Plugin which would like to get instances from host. + + Returns: + List[Dict[str, Any]]: Cached instances list from host implementation. + """ + + shared_key = "openpype.traypublisher.instances" + if not creator.has_collection_shared_data(shared_key): + creator.set_collection_shared_data(shared_key, list_instances()) + return creator.get_collection_shared_data(shared_key) + + class HiddenTrayPublishCreator(HiddenCreator): host_name = "traypublisher" def collect_instances(self): - for instance_data in list_instances(): + for instance_data in _cache_and_get_instances(): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: instance = CreatedInstance.from_existing( @@ -58,7 +74,7 @@ class TrayPublishCreator(Creator): host_name = "traypublisher" def collect_instances(self): - for instance_data in list_instances(): + for instance_data in _cache_and_get_instances(): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: instance = CreatedInstance.from_existing( From aa6de1cfeba7338212c4043a3674fe50f6ecab90 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 16:51:15 +0200 Subject: [PATCH 1650/2550] renamed 'has_collection_shared_data' to 'collection_shared_data_contains' --- openpype/hosts/traypublisher/api/plugin.py | 2 +- openpype/pipeline/create/context.py | 2 +- openpype/pipeline/create/creator_plugins.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 1e592e786d..0f519e3c32 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -28,7 +28,7 @@ def _cache_and_get_instances(creator): """ shared_key = "openpype.traypublisher.instances" - if not creator.has_collection_shared_data(shared_key): + if not creator.collection_shared_data_contains(shared_key): creator.set_collection_shared_data(shared_key, list_instances()) return creator.get_collection_shared_data(shared_key) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 613eaa2865..298eacecb5 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1286,7 +1286,7 @@ class CreateContext: "Accessed Collection shared data out of collection phase" ) - def has_collection_shared_data(self, key): + def collection_shared_data_contains(self, key): """Check if collection shared data are set. Args: diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 343a416872..e5018c395e 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -324,7 +324,7 @@ class BaseCreator: return self.instance_attr_defs - def has_collection_shared_data(self, key): + def collection_shared_data_contains(self, key): """Check if collection shared data are set. Args: @@ -334,7 +334,7 @@ class BaseCreator: bool: Key is already set. """ - return self.create_context.has_collection_shared_data(key) + return self.create_context.collection_shared_data_contains(key) def get_collection_shared_data(self, key, default=None): """Receive shared data during collection phase. From ba2cb2d11d7dbc0384265071a9d464b68a8813f8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 17:00:44 +0200 Subject: [PATCH 1651/2550] add information about shared data to documentation --- website/docs/dev_publishing.md | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/website/docs/dev_publishing.md b/website/docs/dev_publishing.md index 7a6082a517..5f30f7f9c8 100644 --- a/website/docs/dev_publishing.md +++ b/website/docs/dev_publishing.md @@ -47,10 +47,14 @@ Context discovers creator and publish plugins. Trigger collections of existing i Creator plugins can call **creator_adds_instance** or **creator_removed_instance** to add/remove instances but these methods are not meant to be called directly out of the creator. The reason is that it is the creator's responsibility to remove metadata or decide if it should remove the instance. -#### Required functions in host implementation -Host implementation **must** implement **get_context_data** and **update_context_data**. These two functions are needed to store metadata that are not related to any instance but are needed for Creating and publishing process. Right now only data about enabled/disabled optional publish plugins is stored there. When data is not stored and loaded properly, reset of publishing will cause that they will be set to default value. Context data also parsed to json string similarly as instance data. +During reset are re-cached Creator plugins, re-collected instances, refreshed host context and more. Object of `CreateContext` supply shared data during the reset. They can be used by creators to share same data needed during collection phase or during creation for autocreators. -There are also few optional functions. For UI purposes it is possible to implement **get_context_title** which can return a string shown in UI as a title. Output string may contain html tags. It is recommended to return context path (it will be created function this purposes) in this order `"{project name}/{asset hierarchy}/{asset name}/{task name}"`. +#### Required functions in host implementation +It is recommended to use `HostBase` class (`from openpype.host import HostBase`) as base for host implementation with combination of `IPublishHost` interface (`from openpype.host import IPublishHost`). These abstract classes should guide you to fill missing attributes and methods. + +To sum them and in case host implementation is inheriting `HostBase` the implementation **must** implement **get_context_data** and **update_context_data**. These two functions are needed to store metadata that are not related to any instance but are needed for Creating and publishing process. Right now only data about enabled/disabled optional publish plugins is stored there. When data is not stored and loaded properly, reset of publishing will cause that they will be set to default value. Context data also parsed to json string similarly as instance data. + +There are also few optional functions. For UI purposes it is possible to implement **get_context_title** which can return a string shown in UI as a title. Output string may contain html tags. It is recommended to return context path (it will be created function this purposes) in this order `"{project name}/{asset hierarchy}/{asset name}/{task name}"` (this is default implementation in `HostBase`). Another optional function is **get_current_context**. This function is handy in hosts where it is possible to open multiple workfiles in one process so using global context variables is not relevant because artists can switch between opened workfiles without being acknowledged. When a function is not implemented or won't return the right keys the global context is used. ```json @@ -68,6 +72,12 @@ Main responsibility of create plugin is to create, update, collect and remove in #### *BaseCreator* Base implementation of creator plugin. It is not recommended to use this class as base for production plugins but rather use one of **HiddenCreator**, **AutoCreator** and **Creator** variants. +**Access to shared data** +Functions to work with "Collection shared data" can be used during reset phase of `CreateContext`. Creators can cache there data that are common for them. For example list of nodes in scene. Methods are implemented on `CreateContext` but their usage is primarily for Create plugins as nothing else should use it. +- **`collection_shared_data_contains`** - Check if shared data already has set a key. +- **`get_collection_shared_data`** - Receive value of shared data by a key. +- **`set_collection_shared_data`** - Set or update value of shared data key. + **Abstractions** - **`family`** (class attr) - Tells what kind of instance will be created. ```python From 1ecc673c6ccf3abeb6acdf2529a617daf447a51e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 17:15:51 +0200 Subject: [PATCH 1652/2550] error message fix --- openpype/pipeline/create/context.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index a35541f339..4ec6d7bdad 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1096,7 +1096,8 @@ class CreateContext: and creator_class.host_name != self.host_name ): self.log.info(( - "Creator's host name is not supported for current host {}" + "Creator's host name \"{}\"" + " is not supported for current host \"{}\"" ).format(creator_class.host_name, self.host_name)) continue From 495b5479140af18f5bfbd8342b2ba20132dc9888 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 17:21:29 +0200 Subject: [PATCH 1653/2550] fix args order --- openpype/tools/utils/host_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index eababfee32..046dcbdf6a 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -287,7 +287,7 @@ class HostToolsHelper: def show_publisher_tool(self, parent=None, controller=None): with qt_app_context(): - dialog = self.get_publisher_tool(controller, parent) + dialog = self.get_publisher_tool(parent, controller) dialog.show() dialog.raise_() From d1bd6943167a860d1fdd2584771ce16b665c75b0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 17:29:54 +0200 Subject: [PATCH 1654/2550] disable sequences in mov fielpaths input --- .../hosts/traypublisher/plugins/create/create_movie_batch.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index abe29d7473..cf25a37918 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -188,6 +188,7 @@ class BatchMovieCreator(TrayPublishCreator): folders=False, single_item=False, extensions=self.extensions, + allow_sequences=False, label="Filepath" ), BoolDef( From 609f9f12851dfc775edd04344a2c9aa1eaba8426 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 17:44:08 +0200 Subject: [PATCH 1655/2550] fix attribute access --- openpype/tools/publisher/control.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index b415644a43..911d464f80 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1563,10 +1563,10 @@ class PublisherController(BasePublisherController): str: Project name. """ - if not hasattr(self.host, "get_current_context"): + if not hasattr(self._host, "get_current_context"): return legacy_io.active_project() - return self.host.get_current_context()["project_name"] + return self._host.get_current_context()["project_name"] @property def current_asset_name(self): @@ -1576,10 +1576,10 @@ class PublisherController(BasePublisherController): Union[str, None]: Asset name or None if asset is not set. """ - if not hasattr(self.host, "get_current_context"): + if not hasattr(self._host, "get_current_context"): return legacy_io.Session["AVALON_ASSET"] - return self.host.get_current_context()["asset_name"] + return self._host.get_current_context()["asset_name"] @property def current_task_name(self): @@ -1589,10 +1589,10 @@ class PublisherController(BasePublisherController): Union[str, None]: Task name or None if task is not set. """ - if not hasattr(self.host, "get_current_context"): + if not hasattr(self._host, "get_current_context"): return legacy_io.Session["AVALON_TASK"] - return self.host.get_current_context()["task_name"] + return self._host.get_current_context()["task_name"] @property def instances(self): From 27d4f1fc70684fe2abb9f70ccbe04e9e7cae42fb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 18:47:58 +0200 Subject: [PATCH 1656/2550] reuse duration from pyblish result instead of calculating own --- openpype/tools/publisher/control.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 911d464f80..c8f38cb080 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -234,7 +234,7 @@ class PublishReport: """Set that current plugin has been skipped.""" self._current_plugin_data["skipped"] = True - def add_result(self, result, process_time): + def add_result(self, result): """Handle result of one plugin and it's instance.""" instance = result["instance"] @@ -244,7 +244,7 @@ class PublishReport: self._current_plugin_data["instances_data"].append({ "id": instance_id, "logs": self._extract_instance_log_items(result), - "process_time": process_time + "process_time": result["duration"] }) def add_action_result(self, action, result): @@ -2106,13 +2106,11 @@ class PublisherController(BasePublisherController): ) def _process_and_continue(self, plugin, instance): - start = time.time() result = pyblish.plugin.process( plugin, self._publish_context, instance ) - process_time = time.time() - start - self._publish_report.add_result(result, process_time) + self._publish_report.add_result(result) exception = result.get("error") if exception: From b4d6fa3a3af7874d05a48cbeb1ab1010af7d7d52 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 17 Oct 2022 18:54:27 +0200 Subject: [PATCH 1657/2550] removed unused import --- openpype/tools/publisher/control.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index c8f38cb080..13c1044201 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -3,7 +3,6 @@ import copy import logging import traceback import collections -import time import uuid from abc import ABCMeta, abstractmethod, abstractproperty From 27bf66a6b47aff79faee4eed3e18b7e59ed17667 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Oct 2022 20:59:17 +0200 Subject: [PATCH 1658/2550] resolve: other platform compatibility --- openpype/hosts/resolve/hooks/pre_resolve_setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index 0cf9664457..8574b3ad01 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -3,6 +3,7 @@ import platform from openpype.lib import PreLaunchHook from openpype.hosts.resolve.utils import setup + class ResolvePrelaunch(PreLaunchHook): """ This hook will check if current workfile path has Resolve @@ -15,7 +16,7 @@ class ResolvePrelaunch(PreLaunchHook): def execute(self): current_platform = platform.system().lower() - PROGRAMDATA = self.launch_context.env["PROGRAMDATA"] + PROGRAMDATA = self.launch_context.env.get("PROGRAMDATA", "") RESOLVE_SCRIPT_API_ = { "windows": ( f"{PROGRAMDATA}/Blackmagic Design/" From 3fd2a8826c183fbc7f549f1aa573fca90e47ecbb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Oct 2022 23:54:00 +0200 Subject: [PATCH 1659/2550] fix wrong attribute name --- openpype/tools/publisher/control.py | 14 +++++++------- openpype/tools/publisher/control_qt.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 13c1044201..a340f8c1d2 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1289,7 +1289,7 @@ class BasePublisherController(AbstractPublisherController): self._publish_has_validation_errors = False self._publish_has_crashed = False # All publish plugins are processed - self._publish_finished = False + self._publish_has_finished = False self._publish_max_progress = 0 self._publish_progress = 0 @@ -1337,7 +1337,7 @@ class BasePublisherController(AbstractPublisherController): changed. "publish.progress.changed" - Attr 'publish_progress' changed. "publish.host_is_valid.changed" - Attr 'host_is_valid' changed. - "publish.finished.changed" - Attr 'publish_finished' changed. + "publish.finished.changed" - Attr 'publish_has_finished' changed. Returns: EventSystem: Event system which can trigger callbacks for topics. @@ -1361,11 +1361,11 @@ class BasePublisherController(AbstractPublisherController): self._emit_event("publish.host_is_valid.changed", {"value": value}) def _get_publish_has_finished(self): - return self._publish_finished + return self._publish_has_finished def _set_publish_has_finished(self, value): - if self._publish_finished != value: - self._publish_finished = value + if self._publish_has_finished != value: + self._publish_has_finished = value self._emit_event("publish.finished.changed", {"value": value}) def _get_publish_is_running(self): @@ -1465,7 +1465,7 @@ class BasePublisherController(AbstractPublisherController): self.publish_has_validated = False self.publish_has_crashed = False self.publish_has_validation_errors = False - self.publish_finished = False + self.publish_has_finished = False self.publish_error_msg = None self.publish_progress = 0 @@ -2092,7 +2092,7 @@ class PublisherController(BasePublisherController): self._publish_report.set_plugin_skipped() # Cleanup of publishing process - self.publish_finished = True + self.publish_has_finished = True self.publish_progress = self.publish_max_progress yield MainThreadItem(self.stop_publish) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index ddc2dfa3e4..56132a4046 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -173,7 +173,7 @@ class QtRemotePublishController(BasePublisherController): return if event.topic == "publish.finished.changed": - self.publish_finished = event["value"] + self.publish_has_finished = event["value"] return if event.topic == "publish.host_is_valid.changed": From 7190c0785cebc78b3b68dab21923e19e958c23fe Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Oct 2022 00:05:34 +0200 Subject: [PATCH 1660/2550] go to report on publish stop if on publish tab --- openpype/tools/publisher/window.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index a0d1ac68fb..1424a3eccd 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -499,6 +499,9 @@ class PublisherWindow(QtWidgets.QDialog): publish_has_crashed = self._controller.publish_has_crashed validate_enabled = not publish_has_crashed publish_enabled = not publish_has_crashed + if self._tabs_widget.is_current_tab("publish"): + self._go_to_report_tab() + if validate_enabled: validate_enabled = not self._controller.publish_has_validated if publish_enabled: @@ -507,8 +510,6 @@ class PublisherWindow(QtWidgets.QDialog): and self._controller.publish_has_validation_errors ): publish_enabled = False - if self._tabs_widget.is_current_tab("publish"): - self._go_to_report_tab() else: publish_enabled = not self._controller.publish_has_finished From c9e10f6147356c618aaeb30251a39f428ae88ad5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Oct 2022 00:14:35 +0200 Subject: [PATCH 1661/2550] change progress bar on validation error --- openpype/style/style.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 4d13dc7c89..b466bd0820 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1086,7 +1086,7 @@ ValidationArtistMessage QLabel { border-color: {color:publisher:error}; } -#PublishProgressBar[state="0"]::chunk { +#PublishProgressBar[state="0"]::chunk, #PublishProgressBar[state="2"]::chunk { background: {color:bg-buttons}; } From 9e37f3448e2f266dc9fe019e303ecca6864bdb76 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Oct 2022 00:14:46 +0200 Subject: [PATCH 1662/2550] change page to publish on reset --- openpype/tools/publisher/window.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 1424a3eccd..39075d2489 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -470,6 +470,11 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) self._set_footer_enabled(False) self._update_publish_details_widget() + if ( + not self._tabs_widget.is_current_tab("create") + or not self._tabs_widget.is_current_tab("publish") + ): + self._tabs_widget.set_current_tab("publish") def _on_publish_start(self): self._create_tab.setEnabled(False) From 7f533390712c308e5eaa2c8c73c7ad2cb3bdc20a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Oct 2022 00:46:01 +0200 Subject: [PATCH 1663/2550] change progress bar colors on pause --- openpype/style/style.css | 15 +++++++-------- .../tools/publisher/widgets/publish_frame.py | 19 +++++++++++++++---- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index b466bd0820..a6818a5792 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -973,23 +973,22 @@ VariantInputsWidget QToolButton { background: {color:bg}; border-radius: 0.3em; } - -#PublishInfoFrame[state="-1"] { - background: rgb(194, 226, 236); -} - #PublishInfoFrame[state="0"] { - background: {color:publisher:crash}; + background: {color:publisher:success}; } #PublishInfoFrame[state="1"] { - background: {color:publisher:success}; + background: {color:publisher:crash}; } #PublishInfoFrame[state="2"] { background: {color:publisher:warning}; } +#PublishInfoFrame[state="3"], #PublishInfoFrame[state="4"] { + background: rgb(194, 226, 236); +} + #PublishInfoFrame QLabel { color: black; font-style: bold; @@ -1086,7 +1085,7 @@ ValidationArtistMessage QLabel { border-color: {color:publisher:error}; } -#PublishProgressBar[state="0"]::chunk, #PublishProgressBar[state="2"]::chunk { +#PublishProgressBar[state="1"]::chunk, #PublishProgressBar[state="4"]::chunk { background: {color:bg-buttons}; } diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index c5685461a7..e6333a104f 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -328,7 +328,7 @@ class PublishFrame(QtWidgets.QWidget): if self._last_instance_label: self._instance_label.setText(self._last_instance_label) - self._set_success_property(-1) + self._set_success_property(3) self._set_progress_visibility(True) self._set_main_label("Publishing...") @@ -407,7 +407,7 @@ class PublishFrame(QtWidgets.QWidget): "Hit publish (play button) to continue." ) - self._set_success_property(-1) + self._set_success_property(4) def _set_error_msg(self): """Show error message to artist on publish crash.""" @@ -416,7 +416,7 @@ class PublishFrame(QtWidgets.QWidget): self._message_label_top.setText(self._controller.publish_error_msg) - self._set_success_property(0) + self._set_success_property(1) def _set_validation_errors(self): self._set_main_label("Your publish didn't pass studio validations") @@ -426,7 +426,7 @@ class PublishFrame(QtWidgets.QWidget): def _set_finished(self): self._set_main_label("Finished") self._message_label_top.setText("") - self._set_success_property(1) + self._set_success_property(0) def _set_progress_visibility(self, visible): window_height = self.height() @@ -447,6 +447,17 @@ class PublishFrame(QtWidgets.QWidget): self.move(window_pos.x(), window_pos_y) def _set_success_property(self, state=None): + """Apply styles by state. + + State enum: + - None - Default state after restart + - 0 - Success finish + - 1 - Error happened + - 2 - Validation error + - 3 - In progress + - 4 - Stopped/Paused + """ + if state is None: state = "" else: From 88438127ce6bae249ea76fb1b8a47e28574d6dde Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Tue, 18 Oct 2022 11:06:20 +0200 Subject: [PATCH 1664/2550] upgrading change log generator to 2.3 also rising sinceTag to 3.12.0 --- .github/workflows/prerelease.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index bf39f8f956..81d5f05b17 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -40,13 +40,13 @@ jobs: - name: "✏️ Generate full changelog" if: steps.version_type.outputs.type != 'skip' id: generate-full-changelog - uses: heinrichreimer/github-changelog-generator-action@v2.2 + uses: heinrichreimer/github-changelog-generator-action@v2.3 with: token: ${{ secrets.ADMIN_TOKEN }} addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' issues: false issuesWoLabels: false - sinceTag: "3.0.0" + sinceTag: "3.12.0" maxIssues: 100 pullRequests: true prWoLabels: false @@ -92,4 +92,4 @@ jobs: github_token: ${{ secrets.ADMIN_TOKEN }} source_ref: 'main' target_branch: 'develop' - commit_message_template: '[Automated] Merged {source_ref} into {target_branch}' \ No newline at end of file + commit_message_template: '[Automated] Merged {source_ref} into {target_branch}' From 4c99546b035def8a9d4854e9d90df7edffc76a7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Tue, 18 Oct 2022 11:07:55 +0200 Subject: [PATCH 1665/2550] update change log generator to 2.3 rise sinceTag to 3.12.0 --- .github/workflows/release.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 85864b4442..cc69e1643a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -36,13 +36,13 @@ jobs: - name: "✏️ Generate full changelog" if: steps.version.outputs.release_tag != 'skip' id: generate-full-changelog - uses: heinrichreimer/github-changelog-generator-action@v2.2 + uses: heinrichreimer/github-changelog-generator-action@v2.3 with: token: ${{ secrets.ADMIN_TOKEN }} addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' issues: false issuesWoLabels: false - sinceTag: "3.0.0" + sinceTag: "3.12.0" maxIssues: 100 pullRequests: true prWoLabels: false @@ -121,4 +121,4 @@ jobs: github_token: ${{ secrets.ADMIN_TOKEN }} source_ref: 'main' target_branch: 'develop' - commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}' \ No newline at end of file + commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}' From 4641cb5bae28620e53fec1f2b75dd673b33de55c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 18 Oct 2022 14:03:05 +0200 Subject: [PATCH 1666/2550] added backrwards compatibility for PyQt4 --- openpype/tools/publisher/widgets/help_widget.py | 4 +++- openpype/tools/publisher/widgets/validations_widget.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/help_widget.py b/openpype/tools/publisher/widgets/help_widget.py index 7da07b1e78..0090111889 100644 --- a/openpype/tools/publisher/widgets/help_widget.py +++ b/openpype/tools/publisher/widgets/help_widget.py @@ -44,8 +44,10 @@ class HelpWidget(QtWidgets.QWidget): if commonmark: html = commonmark.commonmark(text) self._detail_description_input.setHtml(html) - else: + elif hasattr(self._detail_description_input, "setMarkdown"): self._detail_description_input.setMarkdown(text) + else: + self._detail_description_input.setText(text) class HelpDialog(QtWidgets.QDialog): diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 772a561504..8c483e8088 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -709,5 +709,7 @@ class ValidationsWidget(QtWidgets.QFrame): if commonmark: html = commonmark.commonmark(description) self._error_details_input.setHtml(html) - else: + elif hasattr(self._error_details_input, "setMarkdown"): self._error_details_input.setMarkdown(description) + else: + self._error_details_input.setText(description) From def2fc4bc81d3ec21f8a6cb59726456d7c3d4080 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 18 Oct 2022 14:51:54 +0200 Subject: [PATCH 1667/2550] removed '_CUSTOM' from env keys --- openpype/lib/vendor_bin_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 7b52341290..eb7987c8a1 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -250,7 +250,7 @@ def get_oiio_tools_path(tool="oiiotool"): if CachedToolPaths.is_tool_cached(tool): return CachedToolPaths.get_executable_path(tool) - custom_paths_str = os.environ.get("OPENPYPE_CUSTOM_OIIO_PATHS") or "" + custom_paths_str = os.environ.get("OPENPYPE_OIIO_PATHS") or "" tool_executable_path = find_tool_in_custom_paths( custom_paths_str.split(os.pathsep), tool, @@ -330,7 +330,7 @@ def get_ffmpeg_tool_path(tool="ffmpeg"): if CachedToolPaths.is_tool_cached(tool): return CachedToolPaths.get_executable_path(tool) - custom_paths_str = os.environ.get("OPENPYPE_CUSTOM_FFMPEG_PATHS") or "" + custom_paths_str = os.environ.get("OPENPYPE_FFMPEG_PATHS") or "" tool_executable_path = find_tool_in_custom_paths( custom_paths_str.split(os.pathsep), tool, From 8e90bf73c9bb0ccbb46fedcc557118b2cfb8adb8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 18 Oct 2022 15:00:56 +0200 Subject: [PATCH 1668/2550] added information about ffmeg and oiio to documentation --- website/docs/admin_settings_system.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index 8daba91db1..800ec1c840 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -26,7 +26,14 @@ as a naive barier to prevent artists from accidental setting changes. **`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up. Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume). + +### FFmpeg and OpenImageIO tools + +We bundle FFmpeg and OpenImageIO tools with OpenPype build for Windows and Linux builds. For MacOs support or to use different build is it possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings. Both should lead to directory where tool executables are located. Multiple paths are supported. + + ### OpenPype deployment control + **`Versions Repository`** - Location where automatic update mechanism searches for zip files with OpenPype update packages. To read more about preparing OpenPype for automatic updates go to [Admin Distribute docs](admin_distribute.md#2-openpype-codebase) From 952fd6c15c6f60a062bd3e14e0bc6df5dd7b33ee Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 18 Oct 2022 15:01:43 +0200 Subject: [PATCH 1669/2550] removed unnecessary lines --- website/docs/admin_settings_system.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index 800ec1c840..cef4571c84 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -26,14 +26,10 @@ as a naive barier to prevent artists from accidental setting changes. **`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up. Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume). - ### FFmpeg and OpenImageIO tools - We bundle FFmpeg and OpenImageIO tools with OpenPype build for Windows and Linux builds. For MacOs support or to use different build is it possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings. Both should lead to directory where tool executables are located. Multiple paths are supported. - ### OpenPype deployment control - **`Versions Repository`** - Location where automatic update mechanism searches for zip files with OpenPype update packages. To read more about preparing OpenPype for automatic updates go to [Admin Distribute docs](admin_distribute.md#2-openpype-codebase) From e9db3dbadce7bee241608f010785a3b14ce95641 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 18 Oct 2022 15:07:56 +0200 Subject: [PATCH 1670/2550] rephrase sentence --- website/docs/admin_settings_system.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index cef4571c84..66715e7288 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -27,7 +27,7 @@ as a naive barier to prevent artists from accidental setting changes. Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume). ### FFmpeg and OpenImageIO tools -We bundle FFmpeg and OpenImageIO tools with OpenPype build for Windows and Linux builds. For MacOs support or to use different build is it possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings. Both should lead to directory where tool executables are located. Multiple paths are supported. +We bundle FFmpeg tools for all platforms and OpenImageIO tools for Windows and Linux. By default are used bundled tools but it is possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings environments to look for them in different directory e.g. for different linux distributions or to add oiio support for MacOs. Values of both environment variables should lead to directory where tool executables are located (multiple paths are supported). ### OpenPype deployment control **`Versions Repository`** - Location where automatic update mechanism searches for zip files with From cad97d6d1dc98d9d2d46dec060801fa4645e6053 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 18 Oct 2022 15:18:24 +0200 Subject: [PATCH 1671/2550] simplify api by giving access to 'collection_shared_data' property --- openpype/hosts/traypublisher/api/plugin.py | 6 +- openpype/pipeline/create/context.py | 65 ++++----------------- openpype/pipeline/create/creator_plugins.py | 41 +++---------- 3 files changed, 23 insertions(+), 89 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 0f519e3c32..2cb5a8729f 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -28,9 +28,9 @@ def _cache_and_get_instances(creator): """ shared_key = "openpype.traypublisher.instances" - if not creator.collection_shared_data_contains(shared_key): - creator.set_collection_shared_data(shared_key, list_instances()) - return creator.get_collection_shared_data(shared_key) + if shared_key not in creator.collection_shared_data: + creator.collection_shared_data[shared_key] = list_instances() + return creator.collection_shared_data[shared_key] class HiddenTrayPublishCreator(HiddenCreator): diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 298eacecb5..c5c9a14f33 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -884,14 +884,12 @@ class CreateContext: def reset_preparation(self): """Prepare attributes that must be prepared/cleaned before reset.""" - pass # Give ability to store shared data for collection phase self._collection_shared_data = {} def reset_finalization(self): """Cleanup of attributes after reset.""" - pass # Stop access to collection shared data self._collection_shared_data = None @@ -1280,58 +1278,19 @@ class CreateContext: plugins.append(plugin) return plugins - def _validate_collection_shared_data(self): + @property + def collection_shared_data(self): + """Access to shared data that can be used during creator's collection. + + Retruns: + Dict[str, Any]: Shared data. + + Raises: + UnavailableSharedData: When called out of collection phase. + """ + if self._collection_shared_data is None: raise UnavailableSharedData( "Accessed Collection shared data out of collection phase" ) - - def collection_shared_data_contains(self, key): - """Check if collection shared data are set. - - Args: - key (str): Key under which are shared data stored. - - Retruns: - bool: Key is already set. - - Raises: - UnavailableSharedData: When called out of collection phase. - """ - - self._validate_collection_shared_data() - return key in self._collection_shared_data - - def get_collection_shared_data(self, key, default=None): - """Receive shared data during collection phase. - - Args: - key (str): Key under which are shared data stored. - default (Any): Default value if key is not set. - - Returns: - Any: Value stored under the key. - - Raises: - UnavailableSharedData: When called out of collection phase. - """ - - self._validate_collection_shared_data() - return self._collection_shared_data.get(key, default) - - def set_collection_shared_data(self, key, value): - """Store a value under collection shared data. - - It is highly recommended to use very specific keys as creators may - clash each other if simple keys are used. - - Args: - key (str): Key under which will shared data be stored. - value (Any): Value to store. - - Raises: - UnavailableSharedData: When called out of collection phase. - """ - - self._validate_collection_shared_data() - self._collection_shared_data[key] = value + return self._collection_shared_data diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index e5018c395e..97ee94c449 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -324,43 +324,18 @@ class BaseCreator: return self.instance_attr_defs - def collection_shared_data_contains(self, key): - """Check if collection shared data are set. - - Args: - key (str): Key under which are shared data stored. + @property + def collection_shared_data(self): + """Access to shared data that can be used during creator's collection. Retruns: - bool: Key is already set. + Dict[str, Any]: Shared data. + + Raises: + UnavailableSharedData: When called out of collection phase. """ - return self.create_context.collection_shared_data_contains(key) - - def get_collection_shared_data(self, key, default=None): - """Receive shared data during collection phase. - - Args: - key (str): Key under which are shared data stored. - default (Any): Default value if key is not set. - - Returns: - Any: Value stored under the key. - """ - - return self.create_context.get_collection_shared_data(key, default) - - def set_collection_shared_data(self, key, value): - """Store a value under collection shared data. - - It is highly recommended to use very specific keys as creators may - clash each other if simple keys are used. - - Args: - key (str): Key under which will shared data be stored. - value (Any): Value to store. - """ - - return self.create_context.set_collection_shared_data(key, value) + return self.create_context.collection_shared_data class Creator(BaseCreator): From 7571d62709eb5c9ecd08a07062e818f32eb4ab0c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 18 Oct 2022 15:22:50 +0200 Subject: [PATCH 1672/2550] Updated documentation --- website/docs/dev_publishing.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/website/docs/dev_publishing.md b/website/docs/dev_publishing.md index 5f30f7f9c8..135f6cd985 100644 --- a/website/docs/dev_publishing.md +++ b/website/docs/dev_publishing.md @@ -73,10 +73,7 @@ Main responsibility of create plugin is to create, update, collect and remove in Base implementation of creator plugin. It is not recommended to use this class as base for production plugins but rather use one of **HiddenCreator**, **AutoCreator** and **Creator** variants. **Access to shared data** -Functions to work with "Collection shared data" can be used during reset phase of `CreateContext`. Creators can cache there data that are common for them. For example list of nodes in scene. Methods are implemented on `CreateContext` but their usage is primarily for Create plugins as nothing else should use it. -- **`collection_shared_data_contains`** - Check if shared data already has set a key. -- **`get_collection_shared_data`** - Receive value of shared data by a key. -- **`set_collection_shared_data`** - Set or update value of shared data key. +Functions to work with "Collection shared data" can be used during reset phase of `CreateContext`. Creators can cache there data that are common for them. For example list of nodes in scene. Methods are implemented on `CreateContext` but their usage is primarily for Create plugins as nothing else should use it. Each creator can access `collection_shared_data` attribute which is a dictionary where shared data can be stored. **Abstractions** - **`family`** (class attr) - Tells what kind of instance will be created. From 9a9aeef3ac22d9504a1098abd7c5af843dd89b62 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 19 Oct 2022 10:42:09 +0200 Subject: [PATCH 1673/2550] fix removement of instances --- openpype/hosts/tvpaint/api/pipeline.py | 37 ++++++++++++++------------ 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index ac4dc05040..249326791b 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -139,6 +139,26 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost): log.info("Setting up project...") set_context_settings() + def remove_instance(self, instance): + """Remove instance from current workfile metadata. + + Implementation for Subset manager tool. + """ + + current_instances = get_workfile_metadata(SECTION_NAME_INSTANCES) + instance_id = instance.get("uuid") + found_idx = None + if instance_id: + for idx, _inst in enumerate(current_instances): + if _inst["uuid"] == instance_id: + found_idx = idx + break + + if found_idx is None: + return + current_instances.pop(found_idx) + write_instances(current_instances) + def application_exit(self): """Logic related to TimerManager. @@ -421,23 +441,6 @@ def save_current_workfile_context(context): return write_workfile_metadata(SECTION_NAME_CONTEXT, context) -def remove_instance(instance): - """Remove instance from current workfile metadata.""" - current_instances = get_workfile_metadata(SECTION_NAME_INSTANCES) - instance_id = instance.get("uuid") - found_idx = None - if instance_id: - for idx, _inst in enumerate(current_instances): - if _inst["uuid"] == instance_id: - found_idx = idx - break - - if found_idx is None: - return - current_instances.pop(found_idx) - write_instances(current_instances) - - def list_instances(): """List all created instances from current workfile.""" return get_workfile_metadata(SECTION_NAME_INSTANCES) From 7cb370e4150e638b06fcdc50f165c1cb022a23e7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 11:34:50 +0200 Subject: [PATCH 1674/2550] OP-4180 - cleanup of _project_doc_to_anatomy_data Previous implementation had only syntax sugar and filtering because of Settings, in base class it is not necessary. --- openpype/pipeline/anatomy.py | 42 ++++-------------------------------- 1 file changed, 4 insertions(+), 38 deletions(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 437a03f898..aad42683e8 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -61,6 +61,8 @@ class BaseAnatomy(object): project_name = project_doc["name"] self.project_name = project_name + self._site_name = site_name + self._data = self._prepare_anatomy_data( project_doc, local_settings, site_name ) @@ -335,43 +337,8 @@ class BaseAnatomy(object): if not project_doc: return {} - project_settings_root = ProjectSettings( - project_doc["name"], reset=False, change_state=False - ) - anatomy_entity = project_settings_root["project_anatomy"] - anatomy_keys = set(anatomy_entity.keys()) - anatomy_keys.remove("attributes") - attribute_keys = set(anatomy_entity["attributes"].keys()) - - attributes = {} - project_doc_data = project_doc.get("data") or {} - for key in attribute_keys: - value = project_doc_data.get(key) - if value is not None: - attributes[key] = value - - project_doc_config = project_doc.get("config") or {} - - app_names = set() - if not project_doc_config or "apps" not in project_doc_config: - set_applications = False - else: - set_applications = True - for app_item in project_doc_config["apps"]: - if not app_item: - continue - app_name = app_item.get("name") - if app_name: - app_names.add(app_name) - - if set_applications: - attributes["applications"] = list(app_names) - - output = {"attributes": attributes} - for key in anatomy_keys: - value = project_doc_config.get(key) - if value is not None: - output[key] = value + output = copy.deepcopy(project_doc["config"]) + output["attributes"] = copy.deepcopy(project_doc["data"]) return output @@ -450,7 +417,6 @@ class Anatomy(BaseAnatomy): " to load data for specific project." )) - self._site_name = site_name project_doc = self.get_project_doc_from_cache(project_name) local_settings = get_local_settings() if not site_name: From 5d91a904aebd77e8c290d3a426701ed86b0bf114 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 11:35:45 +0200 Subject: [PATCH 1675/2550] OP-4180 - Hound --- openpype/pipeline/anatomy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index aad42683e8..fd32a16bb2 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -15,7 +15,6 @@ from openpype.settings.lib import ( from openpype.settings.constants import ( DEFAULT_PROJECT_KEY ) -from openpype.settings import ProjectSettings from openpype.client import get_project from openpype.lib.path_templates import ( From c9fc2547a9b44c6b108633504e64b6dfc5b9d603 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 19 Oct 2022 11:56:55 +0200 Subject: [PATCH 1676/2550] replace imports from openpype.api --- openpype/hosts/hiero/api/pipeline.py | 1 - .../hiero/plugins/publish/integrate_version_up_workfile.py | 5 +++-- openpype/hosts/nuke/api/pipeline.py | 1 - openpype/hosts/nuke/plugins/publish/precollect_workfile.py | 5 +++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index dacfd338bb..ea61dc4785 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -251,7 +251,6 @@ def reload_config(): import importlib for module in ( - "openpype.api", "openpype.hosts.hiero.lib", "openpype.hosts.hiero.menu", "openpype.hosts.hiero.tags" diff --git a/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py b/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py index 934e7112fa..6ccbe955f2 100644 --- a/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py @@ -1,5 +1,6 @@ from pyblish import api -import openpype.api as pype + +from openpype.lib import version_up class IntegrateVersionUpWorkfile(api.ContextPlugin): @@ -15,7 +16,7 @@ class IntegrateVersionUpWorkfile(api.ContextPlugin): def process(self, context): project = context.data["activeProject"] path = context.data.get("currentFile") - new_path = pype.version_up(path) + new_path = version_up(path) if project: project.saveAs(new_path) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 7db420f6af..c343c635fa 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -66,7 +66,6 @@ def reload_config(): """ for module in ( - "openpype.api", "openpype.hosts.nuke.api.actions", "openpype.hosts.nuke.api.menu", "openpype.hosts.nuke.api.plugin", diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py index 822f405a6f..316c651b66 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py @@ -3,7 +3,8 @@ import os import nuke import pyblish.api -import openpype.api as pype + +from openpype.lib import get_version_from_path from openpype.hosts.nuke.api.lib import ( add_publish_knob, get_avalon_knob_data @@ -74,7 +75,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "fps": root['fps'].value(), "currentFile": current_file, - "version": int(pype.get_version_from_path(current_file)), + "version": int(get_version_from_path(current_file)), "host": pyblish.api.current_host(), "hostVersion": nuke.NUKE_VERSION_STRING From c930212f1c3c1cfa0de281ffaf95316d91ad0488 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 12:12:43 +0200 Subject: [PATCH 1677/2550] updating history.md --- HISTORY.md | 1818 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 1808 insertions(+), 10 deletions(-) diff --git a/HISTORY.md b/HISTORY.md index 032f876aa3..ca54c60273 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,1811 @@ # Changelog +## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) + +### 📖 Documentation + +- Documentation: Anatomy templates [\#3618](https://github.com/pypeclub/OpenPype/pull/3618) + +**🆕 New features** + +- Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) +- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) +- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) + +**🚀 Enhancements** + +- Flame: Adding Creator's retimed shot and handles switch [\#3826](https://github.com/pypeclub/OpenPype/pull/3826) +- Flame: OpenPype submenu to batch and media manager [\#3825](https://github.com/pypeclub/OpenPype/pull/3825) +- General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) +- Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) +- SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) +- Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) +- Blender: Publisher collect workfile representation [\#3670](https://github.com/pypeclub/OpenPype/pull/3670) +- Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) +- Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) +- Scene Inventory: Add subsetGroup column [\#3658](https://github.com/pypeclub/OpenPype/pull/3658) + +**🐛 Bug fixes** + +- General: Fix Pattern access in client code [\#3828](https://github.com/pypeclub/OpenPype/pull/3828) +- Launcher: Skip opening last work file works for groups [\#3822](https://github.com/pypeclub/OpenPype/pull/3822) +- Maya: Publishing data key change [\#3811](https://github.com/pypeclub/OpenPype/pull/3811) +- Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) +- Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) +- Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) +- nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) +- Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) +- Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) +- Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) +- Maya: `containerise` dont skip empty values [\#3674](https://github.com/pypeclub/OpenPype/pull/3674) + +**🔀 Refactored code** + +- Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) +- Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) +- AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) +- General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) +- General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) +- General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) +- General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) +- General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) +- General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) +- Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) +- Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) +- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) + +**Merged pull requests:** + +- Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) +- Kitsu - sync\_all\_project - add list ignore\_projects [\#3776](https://github.com/pypeclub/OpenPype/pull/3776) + +## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...3.14.1) + +### 📖 Documentation + +- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) +- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) + +**🆕 New features** + +- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) +- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662) + +**🚀 Enhancements** + +- General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) +- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) +- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) +- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) +- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) +- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) +- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) +- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) + +**🐛 Bug fixes** + +- Maya: Fix typo in getPanel argument `with_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753) +- General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) +- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) +- Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) +- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) +- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) +- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) +- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) +- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) +- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) +- RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) +- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) + +**🔀 Refactored code** + +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) +- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) +- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) +- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) +- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) +- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) +- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) +- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) +- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713) +- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) +- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) +- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) +- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705) +- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702) + +**Merged pull requests:** + +- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) +- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) +- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) +- Nuke: Validation refactory to new publisher [\#3567](https://github.com/pypeclub/OpenPype/pull/3567) + +## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...3.14.0) + +**🆕 New features** + +- Maya: Build workfile by template [\#3578](https://github.com/pypeclub/OpenPype/pull/3578) +- Maya: Implementation of JSON layout for Unreal workflow [\#3353](https://github.com/pypeclub/OpenPype/pull/3353) +- Maya: Build workfile by template [\#3315](https://github.com/pypeclub/OpenPype/pull/3315) + +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) +- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) +- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) +- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) +- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) + +**🐛 Bug fixes** + +- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) +- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) +- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) +- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) +- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) +- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) + +**🔀 Refactored code** + +- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) +- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653) +- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) +- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) +- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) +- General: Workfiles builder using query functions [\#3598](https://github.com/pypeclub/OpenPype/pull/3598) + +**Merged pull requests:** + +- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) +- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645) +- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) +- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628) + +## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) + +**🆕 New features** + +- Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) +- Traypublisher: simple editorial publishing [\#3492](https://github.com/pypeclub/OpenPype/pull/3492) + +**🚀 Enhancements** + +- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) +- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) +- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) +- General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) +- Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) +- Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) +- General: Python module appdirs from git [\#3589](https://github.com/pypeclub/OpenPype/pull/3589) +- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) +- General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) +- Maya: Render Creator has configurable options. [\#3097](https://github.com/pypeclub/OpenPype/pull/3097) + +**🐛 Bug fixes** + +- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) +- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) +- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) +- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) +- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) +- AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) +- Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) +- TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) +- Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) +- Fix general settings environment variables resolution [\#3587](https://github.com/pypeclub/OpenPype/pull/3587) +- Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) +- General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) +- Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) +- Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) + +**🔀 Refactored code** + +- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) +- General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) +- General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) +- General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) + +**Merged pull requests:** + +- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) +- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) +- Enable write color sets on animation publish automatically [\#3582](https://github.com/pypeclub/OpenPype/pull/3582) + +## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...3.12.2) + +### 📖 Documentation + +- Update website with more studios [\#3554](https://github.com/pypeclub/OpenPype/pull/3554) +- Documentation: Update publishing dev docs [\#3549](https://github.com/pypeclub/OpenPype/pull/3549) + +**🚀 Enhancements** + +- General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) +- Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) +- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) +- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) +- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) +- Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) +- Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) +- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) +- General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) +- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) +- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) +- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) +- Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) +- Enhance powershell build scripts [\#1827](https://github.com/pypeclub/OpenPype/pull/1827) + +**🐛 Bug fixes** + +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) +- Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) +- NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) +- Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) +- General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) +- Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) +- Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) +- Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) +- General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538) +- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) +- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) +- Nuke: double slate [\#3521](https://github.com/pypeclub/OpenPype/pull/3521) +- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) +- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) +- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) +- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) +- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) +- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) + +**🔀 Refactored code** + +- General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) +- General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) +- Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) +- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) +- General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) +- General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) +- Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) +- TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) +- Deadline: Use query functions [\#3466](https://github.com/pypeclub/OpenPype/pull/3466) +- Refactor Integrate Asset [\#2898](https://github.com/pypeclub/OpenPype/pull/2898) + +**Merged pull requests:** + +- Maya: fix active pane loss [\#3566](https://github.com/pypeclub/OpenPype/pull/3566) + +## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...3.12.1) + +### 📖 Documentation + +- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) + +**🆕 New features** + +- Maya: Add VDB to Arnold loader [\#3433](https://github.com/pypeclub/OpenPype/pull/3433) + +**🚀 Enhancements** + +- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) +- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) +- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) +- General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) +- General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) +- Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) +- Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) +- Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) +- Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) +- Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) +- Maya: Add additional playblast options to review Extractor. [\#3384](https://github.com/pypeclub/OpenPype/pull/3384) +- Maya: Ability to set resolution for playblasts from asset, and override through review instance. [\#3360](https://github.com/pypeclub/OpenPype/pull/3360) +- Maya: Redshift Volume Loader Implement update, remove, switch + fix vdb sequence support [\#3197](https://github.com/pypeclub/OpenPype/pull/3197) +- Maya: Implement `iter_visible_nodes_in_range` for extracting Alembics [\#3100](https://github.com/pypeclub/OpenPype/pull/3100) + +**🐛 Bug fixes** + +- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) +- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) +- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) +- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) +- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) +- General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) +- Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) +- Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) +- General: Fix query function in update logic [\#3468](https://github.com/pypeclub/OpenPype/pull/3468) +- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) +- General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) +- Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) +- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) +- Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) +- LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) +- Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) +- Maya: Camera extra data - additional fix for \#3304 [\#3386](https://github.com/pypeclub/OpenPype/pull/3386) +- Maya: Handle excluding `model` family from frame range validator. [\#3370](https://github.com/pypeclub/OpenPype/pull/3370) + +**🔀 Refactored code** + +- Maya: Merge animation + pointcache extractor logic [\#3461](https://github.com/pypeclub/OpenPype/pull/3461) +- Maya: Re-use `maintained_time` from lib [\#3460](https://github.com/pypeclub/OpenPype/pull/3460) +- General: Use query functions in global plugins [\#3459](https://github.com/pypeclub/OpenPype/pull/3459) +- Clockify: Use query functions in clockify actions [\#3458](https://github.com/pypeclub/OpenPype/pull/3458) +- General: Use query functions in rest api calls [\#3457](https://github.com/pypeclub/OpenPype/pull/3457) +- General: Use query functions in openpype lib functions [\#3454](https://github.com/pypeclub/OpenPype/pull/3454) +- General: Use query functions in load utils [\#3446](https://github.com/pypeclub/OpenPype/pull/3446) +- General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) +- General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) +- General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) +- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) +- Resolve: Use client query functions [\#3379](https://github.com/pypeclub/OpenPype/pull/3379) +- General: Host implementation defined with class [\#3337](https://github.com/pypeclub/OpenPype/pull/3337) + +## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.1...3.12.0) + +### 📖 Documentation + +- Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) +- Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) + +**🆕 New features** + +- Shotgrid: Add production beta of shotgrid integration [\#2921](https://github.com/pypeclub/OpenPype/pull/2921) + +**🚀 Enhancements** + +- Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) +- Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) +- General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) +- Hosts: More options for in-host callbacks [\#3357](https://github.com/pypeclub/OpenPype/pull/3357) +- Multiverse: expose some settings to GUI [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) +- Maya: Allow more data to be published along camera 🎥 [\#3304](https://github.com/pypeclub/OpenPype/pull/3304) +- Add root keys and project keys to create starting folder [\#2755](https://github.com/pypeclub/OpenPype/pull/2755) + +**🐛 Bug fixes** + +- NewPublisher: Fix subset name change on change of creator plugin [\#3420](https://github.com/pypeclub/OpenPype/pull/3420) +- Bug: fix invalid avalon import [\#3418](https://github.com/pypeclub/OpenPype/pull/3418) +- Nuke: Fix keyword argument in query function [\#3414](https://github.com/pypeclub/OpenPype/pull/3414) +- Houdini: fix loading and updating vbd/bgeo sequences [\#3408](https://github.com/pypeclub/OpenPype/pull/3408) +- Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) +- General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) +- Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) +- TVPaint: Make sure exit code is set to not None [\#3382](https://github.com/pypeclub/OpenPype/pull/3382) +- Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) +- Flame: bunch of publishing issues [\#3377](https://github.com/pypeclub/OpenPype/pull/3377) +- Harmony: added unc path to zifile command in Harmony [\#3372](https://github.com/pypeclub/OpenPype/pull/3372) +- Standalone: settings improvements [\#3355](https://github.com/pypeclub/OpenPype/pull/3355) +- Nuke: Load full model hierarchy by default [\#3328](https://github.com/pypeclub/OpenPype/pull/3328) +- Nuke: multiple baking streams with correct slate [\#3245](https://github.com/pypeclub/OpenPype/pull/3245) +- Maya: fix image prefix warning in validator [\#3128](https://github.com/pypeclub/OpenPype/pull/3128) + +**🔀 Refactored code** + +- Unreal: Use client query functions [\#3421](https://github.com/pypeclub/OpenPype/pull/3421) +- General: Move editorial lib to pipeline [\#3419](https://github.com/pypeclub/OpenPype/pull/3419) +- Kitsu: renaming to plural func sync\_all\_projects [\#3397](https://github.com/pypeclub/OpenPype/pull/3397) +- Houdini: Use client query functions [\#3395](https://github.com/pypeclub/OpenPype/pull/3395) +- Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) +- Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) +- Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) +- Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) +- Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) +- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) +- AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) +- TVPaint: Use client query functions [\#3340](https://github.com/pypeclub/OpenPype/pull/3340) +- Ftrack: Use client query functions [\#3339](https://github.com/pypeclub/OpenPype/pull/3339) +- Standalone Publisher: Use client query functions [\#3330](https://github.com/pypeclub/OpenPype/pull/3330) + +**Merged pull requests:** + +- Sync Queue: Added far future value for null values for dates [\#3371](https://github.com/pypeclub/OpenPype/pull/3371) +- Maya - added support for single frame playblast review [\#3369](https://github.com/pypeclub/OpenPype/pull/3369) +- Houdini: Implement Redshift Proxy Export [\#3196](https://github.com/pypeclub/OpenPype/pull/3196) + +## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.0...3.11.1) + +**🆕 New features** + +- Flame: custom export temp folder [\#3346](https://github.com/pypeclub/OpenPype/pull/3346) +- Nuke: removing third-party plugins [\#3344](https://github.com/pypeclub/OpenPype/pull/3344) + +**🚀 Enhancements** + +- Pyblish Pype: Hiding/Close issues [\#3367](https://github.com/pypeclub/OpenPype/pull/3367) +- Ftrack: Removed requirement of pypeclub role from default settings [\#3354](https://github.com/pypeclub/OpenPype/pull/3354) +- Kitsu: Prevent crash on missing frames information [\#3352](https://github.com/pypeclub/OpenPype/pull/3352) +- Ftrack: Open browser from tray [\#3320](https://github.com/pypeclub/OpenPype/pull/3320) +- Enhancement: More control over thumbnail processing. [\#3259](https://github.com/pypeclub/OpenPype/pull/3259) + +**🐛 Bug fixes** + +- Nuke: bake streams with slate on farm [\#3368](https://github.com/pypeclub/OpenPype/pull/3368) +- Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) +- Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) +- Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) +- AE- fix validate\_scene\_settings and renderLocal [\#3358](https://github.com/pypeclub/OpenPype/pull/3358) +- deadline: fixing misidentification of revieables [\#3356](https://github.com/pypeclub/OpenPype/pull/3356) +- General: Create only one thumbnail per instance [\#3351](https://github.com/pypeclub/OpenPype/pull/3351) +- nuke: adding extract thumbnail settings 3.10 [\#3347](https://github.com/pypeclub/OpenPype/pull/3347) +- General: Fix last version function [\#3345](https://github.com/pypeclub/OpenPype/pull/3345) +- Deadline: added OPENPYPE\_MONGO to filter [\#3336](https://github.com/pypeclub/OpenPype/pull/3336) +- Nuke: fixing farm publishing if review is disabled [\#3306](https://github.com/pypeclub/OpenPype/pull/3306) +- Maya: Fix Yeti errors on Create, Publish and Load [\#3198](https://github.com/pypeclub/OpenPype/pull/3198) + +**🔀 Refactored code** + +- Webpublisher: Use client query functions [\#3333](https://github.com/pypeclub/OpenPype/pull/3333) + +## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.10.0...3.11.0) + +### 📖 Documentation + +- Documentation: Add app key to template documentation [\#3299](https://github.com/pypeclub/OpenPype/pull/3299) +- doc: adding royal render and multiverse to the web site [\#3285](https://github.com/pypeclub/OpenPype/pull/3285) +- Module: Kitsu module [\#2650](https://github.com/pypeclub/OpenPype/pull/2650) + +**🆕 New features** + +- Multiverse: fixed composition write, full docs, cosmetics [\#3178](https://github.com/pypeclub/OpenPype/pull/3178) + +**🚀 Enhancements** + +- Settings: Settings can be extracted from UI [\#3323](https://github.com/pypeclub/OpenPype/pull/3323) +- updated poetry installation source [\#3316](https://github.com/pypeclub/OpenPype/pull/3316) +- Ftrack: Action to easily create daily review session [\#3310](https://github.com/pypeclub/OpenPype/pull/3310) +- TVPaint: Extractor use mark in/out range to render [\#3309](https://github.com/pypeclub/OpenPype/pull/3309) +- Ftrack: Delivery action can work on ReviewSessions [\#3307](https://github.com/pypeclub/OpenPype/pull/3307) +- Maya: Look assigner UI improvements [\#3298](https://github.com/pypeclub/OpenPype/pull/3298) +- Ftrack: Action to transfer values of hierarchical attributes [\#3284](https://github.com/pypeclub/OpenPype/pull/3284) +- Maya: better handling of legacy review subsets names [\#3269](https://github.com/pypeclub/OpenPype/pull/3269) +- General: Updated windows oiio tool [\#3268](https://github.com/pypeclub/OpenPype/pull/3268) +- Unreal: add support for skeletalMesh and staticMesh to loaders [\#3267](https://github.com/pypeclub/OpenPype/pull/3267) +- Maya: reference loaders could store placeholder in referenced url [\#3264](https://github.com/pypeclub/OpenPype/pull/3264) +- TVPaint: Init file for TVPaint worker also handle guideline images [\#3250](https://github.com/pypeclub/OpenPype/pull/3250) +- Nuke: Change default icon path in settings [\#3247](https://github.com/pypeclub/OpenPype/pull/3247) +- Maya: publishing of animation and pointcache on a farm [\#3225](https://github.com/pypeclub/OpenPype/pull/3225) +- Maya: Look assigner UI improvements [\#3208](https://github.com/pypeclub/OpenPype/pull/3208) +- Nuke: add pointcache and animation to loader [\#3186](https://github.com/pypeclub/OpenPype/pull/3186) +- Nuke: Add a gizmo menu [\#3172](https://github.com/pypeclub/OpenPype/pull/3172) +- Support for Unreal 5 [\#3122](https://github.com/pypeclub/OpenPype/pull/3122) + +**🐛 Bug fixes** + +- General: Handle empty source key on instance [\#3342](https://github.com/pypeclub/OpenPype/pull/3342) +- Houdini: Fix Houdini VDB manage update wrong file attribute name [\#3322](https://github.com/pypeclub/OpenPype/pull/3322) +- Nuke: anatomy compatibility issue hacks [\#3321](https://github.com/pypeclub/OpenPype/pull/3321) +- hiero: otio p3 compatibility issue - metadata on effect use update 3.11 [\#3314](https://github.com/pypeclub/OpenPype/pull/3314) +- General: Vendorized modules for Python 2 and update poetry lock [\#3305](https://github.com/pypeclub/OpenPype/pull/3305) +- Fix - added local targets to install host [\#3303](https://github.com/pypeclub/OpenPype/pull/3303) +- Settings: Add missing default settings for nuke gizmo [\#3301](https://github.com/pypeclub/OpenPype/pull/3301) +- Maya: Fix swaped width and height in reviews [\#3300](https://github.com/pypeclub/OpenPype/pull/3300) +- Maya: point cache publish handles Maya instances [\#3297](https://github.com/pypeclub/OpenPype/pull/3297) +- Global: extract review slate issues [\#3286](https://github.com/pypeclub/OpenPype/pull/3286) +- Webpublisher: return only active projects in ProjectsEndpoint [\#3281](https://github.com/pypeclub/OpenPype/pull/3281) +- Hiero: add support for task tags 3.10.x [\#3279](https://github.com/pypeclub/OpenPype/pull/3279) +- General: Fix Oiio tool path resolving [\#3278](https://github.com/pypeclub/OpenPype/pull/3278) +- Maya: Fix udim support for e.g. uppercase \ tag [\#3266](https://github.com/pypeclub/OpenPype/pull/3266) +- Nuke: bake reformat was failing on string type [\#3261](https://github.com/pypeclub/OpenPype/pull/3261) +- Maya: hotfix Pxr multitexture in looks [\#3260](https://github.com/pypeclub/OpenPype/pull/3260) +- Unreal: Fix Camera Loading if Layout is missing [\#3255](https://github.com/pypeclub/OpenPype/pull/3255) +- Unreal: Fixed Animation loading in UE5 [\#3240](https://github.com/pypeclub/OpenPype/pull/3240) +- Unreal: Fixed Render creation in UE5 [\#3239](https://github.com/pypeclub/OpenPype/pull/3239) +- Unreal: Fixed Camera loading in UE5 [\#3238](https://github.com/pypeclub/OpenPype/pull/3238) +- Flame: debugging [\#3224](https://github.com/pypeclub/OpenPype/pull/3224) +- add silent audio to slate [\#3162](https://github.com/pypeclub/OpenPype/pull/3162) +- Add timecode to slate [\#2929](https://github.com/pypeclub/OpenPype/pull/2929) + +**🔀 Refactored code** + +- Blender: Use client query functions [\#3331](https://github.com/pypeclub/OpenPype/pull/3331) +- General: Define query functions [\#3288](https://github.com/pypeclub/OpenPype/pull/3288) + +**Merged pull requests:** + +- Maya: add pointcache family to gpu cache loader [\#3318](https://github.com/pypeclub/OpenPype/pull/3318) +- Maya look: skip empty file attributes [\#3274](https://github.com/pypeclub/OpenPype/pull/3274) + +## [3.10.0](https://github.com/pypeclub/OpenPype/tree/3.10.0) (2022-05-26) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.8...3.10.0) + +### 📖 Documentation + +- Docs: add all-contributors config and initial list [\#3094](https://github.com/pypeclub/OpenPype/pull/3094) +- Nuke docs with videos [\#3052](https://github.com/pypeclub/OpenPype/pull/3052) + +**🆕 New features** + +- General: OpenPype modules publish plugins are registered in host [\#3180](https://github.com/pypeclub/OpenPype/pull/3180) +- General: Creator plugins from addons can be registered [\#3179](https://github.com/pypeclub/OpenPype/pull/3179) +- Ftrack: Single image reviewable [\#3157](https://github.com/pypeclub/OpenPype/pull/3157) +- Nuke: Expose write attributes to settings [\#3123](https://github.com/pypeclub/OpenPype/pull/3123) +- Hiero: Initial frame publish support [\#3106](https://github.com/pypeclub/OpenPype/pull/3106) +- Unreal: Render Publishing [\#2917](https://github.com/pypeclub/OpenPype/pull/2917) +- AfterEffects: Implemented New Publisher [\#2838](https://github.com/pypeclub/OpenPype/pull/2838) +- Unreal: Rendering implementation [\#2410](https://github.com/pypeclub/OpenPype/pull/2410) + +**🚀 Enhancements** + +- Maya: FBX camera export [\#3253](https://github.com/pypeclub/OpenPype/pull/3253) +- General: updating common vendor `scriptmenu` to 1.5.2 [\#3246](https://github.com/pypeclub/OpenPype/pull/3246) +- Project Manager: Allow to paste Tasks into multiple assets at the same time [\#3226](https://github.com/pypeclub/OpenPype/pull/3226) +- Project manager: Sped up project load [\#3216](https://github.com/pypeclub/OpenPype/pull/3216) +- Loader UI: Speed issues of loader with sync server [\#3199](https://github.com/pypeclub/OpenPype/pull/3199) +- Looks: add basic support for Renderman [\#3190](https://github.com/pypeclub/OpenPype/pull/3190) +- Maya: added clean\_import option to Import loader [\#3181](https://github.com/pypeclub/OpenPype/pull/3181) +- Add the scripts menu definition to nuke [\#3168](https://github.com/pypeclub/OpenPype/pull/3168) +- Maya: add maya 2023 to default applications [\#3167](https://github.com/pypeclub/OpenPype/pull/3167) +- Compressed bgeo publishing in SAP and Houdini loader [\#3153](https://github.com/pypeclub/OpenPype/pull/3153) +- General: Add 'dataclasses' to required python modules [\#3149](https://github.com/pypeclub/OpenPype/pull/3149) +- Hooks: Tweak logging grammar [\#3147](https://github.com/pypeclub/OpenPype/pull/3147) +- Nuke: settings for reformat node in CreateWriteRender node [\#3143](https://github.com/pypeclub/OpenPype/pull/3143) +- Houdini: Add loader for alembic through Alembic Archive node [\#3140](https://github.com/pypeclub/OpenPype/pull/3140) +- Publisher: UI Modifications and fixes [\#3139](https://github.com/pypeclub/OpenPype/pull/3139) +- General: Simplified OP modules/addons import [\#3137](https://github.com/pypeclub/OpenPype/pull/3137) +- Terminal: Tweak coloring of TrayModuleManager logging enabled states [\#3133](https://github.com/pypeclub/OpenPype/pull/3133) +- General: Cleanup some Loader docstrings [\#3131](https://github.com/pypeclub/OpenPype/pull/3131) +- Nuke: render instance with subset name filtered overrides [\#3117](https://github.com/pypeclub/OpenPype/pull/3117) +- Unreal: Layout and Camera update and remove functions reimplemented and improvements [\#3116](https://github.com/pypeclub/OpenPype/pull/3116) +- Settings: Remove environment groups from settings [\#3115](https://github.com/pypeclub/OpenPype/pull/3115) +- TVPaint: Match renderlayer key with other hosts [\#3110](https://github.com/pypeclub/OpenPype/pull/3110) +- Ftrack: AssetVersion status on publish [\#3108](https://github.com/pypeclub/OpenPype/pull/3108) +- Tray publisher: Simple families from settings [\#3105](https://github.com/pypeclub/OpenPype/pull/3105) +- Local Settings UI: Overlay messages on save and reset [\#3104](https://github.com/pypeclub/OpenPype/pull/3104) +- General: Remove repos related logic [\#3087](https://github.com/pypeclub/OpenPype/pull/3087) +- Standalone publisher: add support for bgeo and vdb [\#3080](https://github.com/pypeclub/OpenPype/pull/3080) +- Houdini: Fix FPS + outdated content pop-ups [\#3079](https://github.com/pypeclub/OpenPype/pull/3079) +- General: Add global log verbose arguments [\#3070](https://github.com/pypeclub/OpenPype/pull/3070) +- Flame: extract presets distribution [\#3063](https://github.com/pypeclub/OpenPype/pull/3063) +- Update collect\_render.py [\#3055](https://github.com/pypeclub/OpenPype/pull/3055) +- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) +- Maya: Implement Hardware Renderer 2.0 support for Render Products [\#2611](https://github.com/pypeclub/OpenPype/pull/2611) + +**🐛 Bug fixes** + +- nuke: use framerange issue [\#3254](https://github.com/pypeclub/OpenPype/pull/3254) +- Ftrack: Chunk sizes for queries has minimal condition [\#3244](https://github.com/pypeclub/OpenPype/pull/3244) +- Maya: renderman displays needs to be filtered [\#3242](https://github.com/pypeclub/OpenPype/pull/3242) +- Ftrack: Validate that the user exists on ftrack [\#3237](https://github.com/pypeclub/OpenPype/pull/3237) +- Maya: Fix support for multiple resolutions [\#3236](https://github.com/pypeclub/OpenPype/pull/3236) +- TVPaint: Look for more groups than 12 [\#3228](https://github.com/pypeclub/OpenPype/pull/3228) +- Hiero: debugging frame range and other 3.10 [\#3222](https://github.com/pypeclub/OpenPype/pull/3222) +- Project Manager: Fix persistent editors on project change [\#3218](https://github.com/pypeclub/OpenPype/pull/3218) +- Deadline: instance data overwrite fix [\#3214](https://github.com/pypeclub/OpenPype/pull/3214) +- Ftrack: Push hierarchical attributes action works [\#3210](https://github.com/pypeclub/OpenPype/pull/3210) +- Standalone Publisher: Always create new representation for thumbnail [\#3203](https://github.com/pypeclub/OpenPype/pull/3203) +- Photoshop: skip collector when automatic testing [\#3202](https://github.com/pypeclub/OpenPype/pull/3202) +- Nuke: render/workfile version sync doesn't work on farm [\#3185](https://github.com/pypeclub/OpenPype/pull/3185) +- Ftrack: Review image only if there are no mp4 reviews [\#3183](https://github.com/pypeclub/OpenPype/pull/3183) +- Ftrack: Locations deepcopy issue [\#3177](https://github.com/pypeclub/OpenPype/pull/3177) +- General: Avoid creating multiple thumbnails [\#3176](https://github.com/pypeclub/OpenPype/pull/3176) +- General/Hiero: better clip duration calculation [\#3169](https://github.com/pypeclub/OpenPype/pull/3169) +- General: Oiio conversion for ffmpeg checks for invalid characters [\#3166](https://github.com/pypeclub/OpenPype/pull/3166) +- Fix for attaching render to subset [\#3164](https://github.com/pypeclub/OpenPype/pull/3164) +- Harmony: fixed missing task name in render instance [\#3163](https://github.com/pypeclub/OpenPype/pull/3163) +- Ftrack: Action delete old versions formatting works [\#3152](https://github.com/pypeclub/OpenPype/pull/3152) +- Deadline: fix the output directory [\#3144](https://github.com/pypeclub/OpenPype/pull/3144) +- General: New Session schema [\#3141](https://github.com/pypeclub/OpenPype/pull/3141) +- General: Missing version on headless mode crash properly [\#3136](https://github.com/pypeclub/OpenPype/pull/3136) +- TVPaint: Composite layers in reversed order [\#3135](https://github.com/pypeclub/OpenPype/pull/3135) +- Nuke: fixing default settings for workfile builder loaders [\#3120](https://github.com/pypeclub/OpenPype/pull/3120) +- Nuke: fix anatomy imageio regex default [\#3119](https://github.com/pypeclub/OpenPype/pull/3119) +- General: Python 3 compatibility in queries [\#3112](https://github.com/pypeclub/OpenPype/pull/3112) +- General: TemplateResult can be copied [\#3099](https://github.com/pypeclub/OpenPype/pull/3099) +- General: Collect loaded versions skips not existing representations [\#3095](https://github.com/pypeclub/OpenPype/pull/3095) +- RoyalRender Control Submission - AVALON\_APP\_NAME default [\#3091](https://github.com/pypeclub/OpenPype/pull/3091) +- Ftrack: Update Create Folders action [\#3089](https://github.com/pypeclub/OpenPype/pull/3089) +- Maya: Collect Render fix any render cameras check [\#3088](https://github.com/pypeclub/OpenPype/pull/3088) +- Project Manager: Avoid unnecessary updates of asset documents [\#3083](https://github.com/pypeclub/OpenPype/pull/3083) +- Standalone publisher: Fix plugins install [\#3077](https://github.com/pypeclub/OpenPype/pull/3077) +- General: Extract review sequence is not converted with same names [\#3076](https://github.com/pypeclub/OpenPype/pull/3076) +- Webpublisher: Use variant value [\#3068](https://github.com/pypeclub/OpenPype/pull/3068) +- Nuke: Add aov matching even for remainder and prerender [\#3060](https://github.com/pypeclub/OpenPype/pull/3060) +- Fix support for Renderman in Maya [\#3006](https://github.com/pypeclub/OpenPype/pull/3006) + +**🔀 Refactored code** + +- Avalon repo removed from Jobs workflow [\#3193](https://github.com/pypeclub/OpenPype/pull/3193) +- General: Remove remaining imports from avalon [\#3130](https://github.com/pypeclub/OpenPype/pull/3130) +- General: Move mongo db logic and remove avalon repository [\#3066](https://github.com/pypeclub/OpenPype/pull/3066) +- General: Move host install [\#3009](https://github.com/pypeclub/OpenPype/pull/3009) + +**Merged pull requests:** + +- Harmony: message length in 21.1 [\#3257](https://github.com/pypeclub/OpenPype/pull/3257) +- Harmony: 21.1 fix [\#3249](https://github.com/pypeclub/OpenPype/pull/3249) +- Maya: added jpg to filter for Image Plane Loader [\#3223](https://github.com/pypeclub/OpenPype/pull/3223) +- Webpublisher: replace space by underscore in subset names [\#3160](https://github.com/pypeclub/OpenPype/pull/3160) +- StandalonePublisher: removed Extract Background plugins [\#3093](https://github.com/pypeclub/OpenPype/pull/3093) +- Nuke: added suspend\_publish knob [\#3078](https://github.com/pypeclub/OpenPype/pull/3078) +- Bump async from 2.6.3 to 2.6.4 in /website [\#3065](https://github.com/pypeclub/OpenPype/pull/3065) +- SiteSync: Download all workfile inputs [\#2966](https://github.com/pypeclub/OpenPype/pull/2966) +- Photoshop: New Publisher [\#2933](https://github.com/pypeclub/OpenPype/pull/2933) +- Bump pillow from 9.0.0 to 9.0.1 [\#2880](https://github.com/pypeclub/OpenPype/pull/2880) +- AfterEffects: Allow configuration of default variant via Settings [\#2856](https://github.com/pypeclub/OpenPype/pull/2856) + +## [3.9.8](https://github.com/pypeclub/OpenPype/tree/3.9.8) (2022-05-19) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.7...3.9.8) + +## [3.9.7](https://github.com/pypeclub/OpenPype/tree/3.9.7) (2022-05-11) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.6...3.9.7) + +## [3.9.6](https://github.com/pypeclub/OpenPype/tree/3.9.6) (2022-05-03) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.5...3.9.6) + +## [3.9.5](https://github.com/pypeclub/OpenPype/tree/3.9.5) (2022-04-25) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.4...3.9.5) + +## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.3...3.9.4) + +### 📖 Documentation + +- Documentation: more info about Tasks [\#3062](https://github.com/pypeclub/OpenPype/pull/3062) +- Documentation: Python requirements to 3.7.9 [\#3035](https://github.com/pypeclub/OpenPype/pull/3035) +- Website Docs: Remove unused pages [\#2974](https://github.com/pypeclub/OpenPype/pull/2974) + +**🆕 New features** + +- General: Local overrides for environment variables [\#3045](https://github.com/pypeclub/OpenPype/pull/3045) +- Flame: Flare integration preparation [\#2928](https://github.com/pypeclub/OpenPype/pull/2928) + +**🚀 Enhancements** + +- TVPaint: Added init file for worker to triggers missing sound file dialog [\#3053](https://github.com/pypeclub/OpenPype/pull/3053) +- Ftrack: Custom attributes can be filled in slate values [\#3036](https://github.com/pypeclub/OpenPype/pull/3036) +- Resolve environment variable in google drive credential path [\#3008](https://github.com/pypeclub/OpenPype/pull/3008) + +**🐛 Bug fixes** + +- GitHub: Updated push-protected action in github workflow [\#3064](https://github.com/pypeclub/OpenPype/pull/3064) +- Nuke: Typos in imports from Nuke implementation [\#3061](https://github.com/pypeclub/OpenPype/pull/3061) +- Hotfix: fixing deadline job publishing [\#3059](https://github.com/pypeclub/OpenPype/pull/3059) +- General: Extract Review handle invalid characters for ffmpeg [\#3050](https://github.com/pypeclub/OpenPype/pull/3050) +- Slate Review: Support to keep format on slate concatenation [\#3049](https://github.com/pypeclub/OpenPype/pull/3049) +- Webpublisher: fix processing of workfile [\#3048](https://github.com/pypeclub/OpenPype/pull/3048) +- Ftrack: Integrate ftrack api fix [\#3044](https://github.com/pypeclub/OpenPype/pull/3044) +- Webpublisher - removed wrong hardcoded family [\#3043](https://github.com/pypeclub/OpenPype/pull/3043) +- LibraryLoader: Use current project for asset query in families filter [\#3042](https://github.com/pypeclub/OpenPype/pull/3042) +- SiteSync: Providers ignore that site is disabled [\#3041](https://github.com/pypeclub/OpenPype/pull/3041) +- Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040) +- SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018) +- Maya: invalid review flag on rendered AOVs [\#2915](https://github.com/pypeclub/OpenPype/pull/2915) + +**Merged pull requests:** + +- Deadline: reworked pools assignment [\#3051](https://github.com/pypeclub/OpenPype/pull/3051) +- Houdini: Avoid ImportError on `hdefereval` when Houdini runs without UI [\#2987](https://github.com/pypeclub/OpenPype/pull/2987) + +## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...3.9.3) + +### 📖 Documentation + +- Documentation: Added mention of adding My Drive as a root [\#2999](https://github.com/pypeclub/OpenPype/pull/2999) +- Website Docs: Manager Ftrack fix broken links [\#2979](https://github.com/pypeclub/OpenPype/pull/2979) +- Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951) +- Documentation: New publisher develop docs [\#2896](https://github.com/pypeclub/OpenPype/pull/2896) + +**🆕 New features** + +- Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027) +- nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992) +- Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988) +- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) +- Multiverse: Initial Support [\#2908](https://github.com/pypeclub/OpenPype/pull/2908) + +**🚀 Enhancements** + +- General: default workfile subset name for workfile [\#3011](https://github.com/pypeclub/OpenPype/pull/3011) +- Ftrack: Add more options for note text of integrate ftrack note [\#3025](https://github.com/pypeclub/OpenPype/pull/3025) +- Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) +- Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) +- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) +- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) +- TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) +- Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) +- Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) +- General: `METADATA_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) +- General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975) +- Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967) +- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945) +- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943) +- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942) +- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) +- Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) +- General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923) +- CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919) +- Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916) +- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) +- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) +- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) + +**🐛 Bug fixes** + +- General: Fix validate asset docs plug-in filename and class name [\#3029](https://github.com/pypeclub/OpenPype/pull/3029) +- Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033) +- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) +- General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028) +- Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) +- AfterEffects: Added creating subset name for workfile from template [\#3023](https://github.com/pypeclub/OpenPype/pull/3023) +- General: Add example addons to ignored [\#3022](https://github.com/pypeclub/OpenPype/pull/3022) +- Maya: Remove missing import [\#3017](https://github.com/pypeclub/OpenPype/pull/3017) +- Ftrack: multiple reviewable componets [\#3012](https://github.com/pypeclub/OpenPype/pull/3012) +- Tray publisher: Fixes after code movement [\#3010](https://github.com/pypeclub/OpenPype/pull/3010) +- Hosts: Remove path existence checks in 'add\_implementation\_envs' [\#3004](https://github.com/pypeclub/OpenPype/pull/3004) +- Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) +- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998) +- Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996) +- PS: fix renaming subset incorrectly in PS [\#2991](https://github.com/pypeclub/OpenPype/pull/2991) +- Fix: Disable setuptools auto discovery [\#2990](https://github.com/pypeclub/OpenPype/pull/2990) +- AEL: fix opening existing workfile if no scene opened [\#2989](https://github.com/pypeclub/OpenPype/pull/2989) +- Maya: Don't do hardlinks on windows for look publishing [\#2986](https://github.com/pypeclub/OpenPype/pull/2986) +- Settings UI: Fix version completer on linux [\#2981](https://github.com/pypeclub/OpenPype/pull/2981) +- Photoshop: Fix creation of subset names in PS review and workfile [\#2969](https://github.com/pypeclub/OpenPype/pull/2969) +- Slack: Added default for review\_upload\_limit for Slack [\#2965](https://github.com/pypeclub/OpenPype/pull/2965) +- General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958) +- Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956) +- General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950) +- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949) +- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948) +- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947) +- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944) +- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941) +- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939) +- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936) +- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934) +- Maya: Do not pass `set` to maya commands \(fixes support for older maya versions\) [\#2932](https://github.com/pypeclub/OpenPype/pull/2932) +- General: Don't print log record on OSError [\#2926](https://github.com/pypeclub/OpenPype/pull/2926) +- Hiero: Fix import of 'register\_event\_callback' [\#2924](https://github.com/pypeclub/OpenPype/pull/2924) +- Flame: centos related debugging [\#2922](https://github.com/pypeclub/OpenPype/pull/2922) +- Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905) +- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) + +**🔀 Refactored code** + +- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935) +- General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931) +- General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927) +- General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918) +- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) +- General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912) + +**Merged pull requests:** + +- Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030) +- Bump paramiko from 2.9.2 to 2.10.1 [\#2973](https://github.com/pypeclub/OpenPype/pull/2973) +- Bump minimist from 1.2.5 to 1.2.6 in /website [\#2954](https://github.com/pypeclub/OpenPype/pull/2954) +- Bump node-forge from 1.2.1 to 1.3.0 in /website [\#2953](https://github.com/pypeclub/OpenPype/pull/2953) +- Maya - added transparency into review creator [\#2952](https://github.com/pypeclub/OpenPype/pull/2952) + +## [3.9.2](https://github.com/pypeclub/OpenPype/tree/3.9.2) (2022-04-04) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...3.9.2) + +## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) + +**🚀 Enhancements** + +- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) +- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) +- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) +- Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) +- Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) + +**🐛 Bug fixes** + +- General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904) +- Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902) +- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) +- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) +- Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) +- General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) +- General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) +- Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) +- Flame Babypublisher optimalization [\#2806](https://github.com/pypeclub/OpenPype/pull/2806) +- hotfix: OIIO tool path - add extension on windows [\#2618](https://github.com/pypeclub/OpenPype/pull/2618) + +**🔀 Refactored code** + +- General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) +- General: Move loader logic from avalon to openpype [\#2886](https://github.com/pypeclub/OpenPype/pull/2886) + +## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.2...3.9.0) + +**Deprecated:** + +- Houdini: Remove unused code [\#2779](https://github.com/pypeclub/OpenPype/pull/2779) +- Loader: Remove default family states for hosts from code [\#2706](https://github.com/pypeclub/OpenPype/pull/2706) +- AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845) + +### 📖 Documentation + +- Documentation: fixed broken links [\#2799](https://github.com/pypeclub/OpenPype/pull/2799) +- Documentation: broken link fix [\#2785](https://github.com/pypeclub/OpenPype/pull/2785) +- Documentation: link fixes [\#2772](https://github.com/pypeclub/OpenPype/pull/2772) +- Update docusaurus to latest version [\#2760](https://github.com/pypeclub/OpenPype/pull/2760) +- Various testing updates [\#2726](https://github.com/pypeclub/OpenPype/pull/2726) +- documentation: add example to `repack-version` command [\#2669](https://github.com/pypeclub/OpenPype/pull/2669) +- Update docusaurus [\#2639](https://github.com/pypeclub/OpenPype/pull/2639) +- Documentation: Fixed relative links [\#2621](https://github.com/pypeclub/OpenPype/pull/2621) +- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878) + +**🆕 New features** + +- Flame: loading clips to reels [\#2622](https://github.com/pypeclub/OpenPype/pull/2622) +- General: Store settings by OpenPype version [\#2570](https://github.com/pypeclub/OpenPype/pull/2570) + +**🚀 Enhancements** + +- New: Validation exceptions [\#2841](https://github.com/pypeclub/OpenPype/pull/2841) +- General: Set context environments for non host applications [\#2803](https://github.com/pypeclub/OpenPype/pull/2803) +- Houdini: Remove duplicate ValidateOutputNode plug-in [\#2780](https://github.com/pypeclub/OpenPype/pull/2780) +- Tray publisher: New Tray Publisher host \(beta\) [\#2778](https://github.com/pypeclub/OpenPype/pull/2778) +- Slack: Added regex for filtering on subset names [\#2775](https://github.com/pypeclub/OpenPype/pull/2775) +- Houdini: Implement Reset Frame Range [\#2770](https://github.com/pypeclub/OpenPype/pull/2770) +- Pyblish Pype: Remove redundant new line in installed fonts printing [\#2758](https://github.com/pypeclub/OpenPype/pull/2758) +- Flame: use Shot Name on segment for asset name [\#2751](https://github.com/pypeclub/OpenPype/pull/2751) +- Flame: adding validator source clip [\#2746](https://github.com/pypeclub/OpenPype/pull/2746) +- Work Files: Preserve subversion comment of current filename by default [\#2734](https://github.com/pypeclub/OpenPype/pull/2734) +- Maya: set Deadline job/batch name to original source workfile name instead of published workfile [\#2733](https://github.com/pypeclub/OpenPype/pull/2733) +- Ftrack: Disable ftrack module by default [\#2732](https://github.com/pypeclub/OpenPype/pull/2732) +- Project Manager: Disable add task, add asset and save button when not in a project [\#2727](https://github.com/pypeclub/OpenPype/pull/2727) +- dropbox handle big file [\#2718](https://github.com/pypeclub/OpenPype/pull/2718) +- Fusion Move PR: Minor tweaks to Fusion integration [\#2716](https://github.com/pypeclub/OpenPype/pull/2716) +- RoyalRender: Minor enhancements [\#2700](https://github.com/pypeclub/OpenPype/pull/2700) +- Nuke: prerender with review knob [\#2691](https://github.com/pypeclub/OpenPype/pull/2691) +- Maya configurable unit validator [\#2680](https://github.com/pypeclub/OpenPype/pull/2680) +- General: Add settings for CleanUpFarm and disable the plugin by default [\#2679](https://github.com/pypeclub/OpenPype/pull/2679) +- Project Manager: Only allow scroll wheel edits when spinbox is active [\#2678](https://github.com/pypeclub/OpenPype/pull/2678) +- Ftrack: Sync description to assets [\#2670](https://github.com/pypeclub/OpenPype/pull/2670) +- Houdini: Moved to OpenPype [\#2658](https://github.com/pypeclub/OpenPype/pull/2658) +- Maya: Move implementation to OpenPype [\#2649](https://github.com/pypeclub/OpenPype/pull/2649) +- General: FFmpeg conversion also check attribute string length [\#2635](https://github.com/pypeclub/OpenPype/pull/2635) +- Houdini: Load Arnold .ass procedurals into Houdini [\#2606](https://github.com/pypeclub/OpenPype/pull/2606) +- Deadline: Simplify GlobalJobPreLoad logic [\#2605](https://github.com/pypeclub/OpenPype/pull/2605) +- Houdini: Implement Arnold .ass standin extraction from Houdini \(also support .ass.gz\) [\#2603](https://github.com/pypeclub/OpenPype/pull/2603) +- New Publisher: New features and preparations for new standalone publisher [\#2556](https://github.com/pypeclub/OpenPype/pull/2556) +- Fix Maya 2022 Python 3 compatibility [\#2445](https://github.com/pypeclub/OpenPype/pull/2445) +- TVPaint: Use new publisher exceptions in validators [\#2435](https://github.com/pypeclub/OpenPype/pull/2435) +- Harmony: Added new style validations for New Publisher [\#2434](https://github.com/pypeclub/OpenPype/pull/2434) +- Aftereffects: New style validations for New publisher [\#2430](https://github.com/pypeclub/OpenPype/pull/2430) +- Farm publishing: New cleanup plugin for Maya renders on farm [\#2390](https://github.com/pypeclub/OpenPype/pull/2390) +- General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872) +- NewPublisher: Descriptions and Icons in creator dialog [\#2867](https://github.com/pypeclub/OpenPype/pull/2867) +- NewPublisher: Changing task on publishing instance [\#2863](https://github.com/pypeclub/OpenPype/pull/2863) +- TrayPublisher: Choose project widget is more clear [\#2859](https://github.com/pypeclub/OpenPype/pull/2859) +- Maya: add loaded containers to published instance [\#2837](https://github.com/pypeclub/OpenPype/pull/2837) +- Ftrack: Can sync fps as string [\#2836](https://github.com/pypeclub/OpenPype/pull/2836) +- General: Custom function for find executable [\#2822](https://github.com/pypeclub/OpenPype/pull/2822) +- General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817) +- global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812) +- Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811) +- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805) +- Houdini: Move Houdini Save Current File to beginning of ExtractorOrder [\#2747](https://github.com/pypeclub/OpenPype/pull/2747) +- Global: adding studio name/code to anatomy template formatting data [\#2630](https://github.com/pypeclub/OpenPype/pull/2630) + +**🐛 Bug fixes** + +- Settings UI: Search case sensitivity [\#2810](https://github.com/pypeclub/OpenPype/pull/2810) +- resolve: fixing fusion module loading [\#2802](https://github.com/pypeclub/OpenPype/pull/2802) +- Ftrack: Unset task ids from asset versions before tasks are removed [\#2800](https://github.com/pypeclub/OpenPype/pull/2800) +- Slack: fail gracefully if slack exception [\#2798](https://github.com/pypeclub/OpenPype/pull/2798) +- Flame: Fix version string in default settings [\#2783](https://github.com/pypeclub/OpenPype/pull/2783) +- After Effects: Fix typo in name `afftereffects` -\> `aftereffects` [\#2768](https://github.com/pypeclub/OpenPype/pull/2768) +- Houdini: Fix open last workfile [\#2767](https://github.com/pypeclub/OpenPype/pull/2767) +- Avoid renaming udim indexes [\#2765](https://github.com/pypeclub/OpenPype/pull/2765) +- Maya: Fix `unique_namespace` when in an namespace that is empty [\#2759](https://github.com/pypeclub/OpenPype/pull/2759) +- Loader UI: Fix right click in representation widget [\#2757](https://github.com/pypeclub/OpenPype/pull/2757) +- Harmony: Rendering in Deadline didn't work in other machines than submitter [\#2754](https://github.com/pypeclub/OpenPype/pull/2754) +- Aftereffects 2022 and Deadline [\#2748](https://github.com/pypeclub/OpenPype/pull/2748) +- Flame: bunch of bugs [\#2745](https://github.com/pypeclub/OpenPype/pull/2745) +- Maya: Save current scene on workfile publish [\#2744](https://github.com/pypeclub/OpenPype/pull/2744) +- Version Up: Preserve parts of filename after version number \(like subversion\) on version\_up [\#2741](https://github.com/pypeclub/OpenPype/pull/2741) +- Loader UI: Multiple asset selection and underline colors fixed [\#2731](https://github.com/pypeclub/OpenPype/pull/2731) +- General: Fix loading of unused chars in xml format [\#2729](https://github.com/pypeclub/OpenPype/pull/2729) +- TVPaint: Set objectName with members [\#2725](https://github.com/pypeclub/OpenPype/pull/2725) +- General: Don't use 'objectName' from loaded references [\#2715](https://github.com/pypeclub/OpenPype/pull/2715) +- Settings: Studio Project anatomy is queried using right keys [\#2711](https://github.com/pypeclub/OpenPype/pull/2711) +- Local Settings: Additional applications don't break UI [\#2710](https://github.com/pypeclub/OpenPype/pull/2710) +- Maya: Remove some unused code [\#2709](https://github.com/pypeclub/OpenPype/pull/2709) +- Houdini: Fix refactor of Houdini host move for CreateArnoldAss [\#2704](https://github.com/pypeclub/OpenPype/pull/2704) +- LookAssigner: Fix imports after moving code to OpenPype repository [\#2701](https://github.com/pypeclub/OpenPype/pull/2701) +- Multiple hosts: unify menu style across hosts [\#2693](https://github.com/pypeclub/OpenPype/pull/2693) +- Maya Redshift fixes [\#2692](https://github.com/pypeclub/OpenPype/pull/2692) +- Maya: fix fps validation popup [\#2685](https://github.com/pypeclub/OpenPype/pull/2685) +- Houdini Explicitly collect correct frame name even in case of single frame render when `frameStart` is provided [\#2676](https://github.com/pypeclub/OpenPype/pull/2676) +- hiero: fix effect collector name and order [\#2673](https://github.com/pypeclub/OpenPype/pull/2673) +- Maya: Fix menu callbacks [\#2671](https://github.com/pypeclub/OpenPype/pull/2671) +- hiero: removing obsolete unsupported plugin [\#2667](https://github.com/pypeclub/OpenPype/pull/2667) +- Launcher: Fix access to 'data' attribute on actions [\#2659](https://github.com/pypeclub/OpenPype/pull/2659) +- Maya `vrscene` loader fixes [\#2633](https://github.com/pypeclub/OpenPype/pull/2633) +- Houdini: fix usd family in loader and integrators [\#2631](https://github.com/pypeclub/OpenPype/pull/2631) +- Maya: Add only reference node to look family container like with other families [\#2508](https://github.com/pypeclub/OpenPype/pull/2508) +- General: Missing time function [\#2877](https://github.com/pypeclub/OpenPype/pull/2877) +- Deadline: Fix plugin name for tile assemble [\#2868](https://github.com/pypeclub/OpenPype/pull/2868) +- Nuke: gizmo precollect fix [\#2866](https://github.com/pypeclub/OpenPype/pull/2866) +- General: Fix hardlink for windows [\#2864](https://github.com/pypeclub/OpenPype/pull/2864) +- General: ffmpeg was crashing on slate merge [\#2860](https://github.com/pypeclub/OpenPype/pull/2860) +- WebPublisher: Video file was published with one too many frame [\#2858](https://github.com/pypeclub/OpenPype/pull/2858) +- New Publisher: Error dialog got right styles [\#2857](https://github.com/pypeclub/OpenPype/pull/2857) +- General: Fix getattr clalback on dynamic modules [\#2855](https://github.com/pypeclub/OpenPype/pull/2855) +- Nuke: slate resolution to input video resolution [\#2853](https://github.com/pypeclub/OpenPype/pull/2853) +- WebPublisher: Fix username stored in DB [\#2852](https://github.com/pypeclub/OpenPype/pull/2852) +- WebPublisher: Fix wrong number of frames for video file [\#2851](https://github.com/pypeclub/OpenPype/pull/2851) +- Nuke: Fix family test in validate\_write\_legacy to work with stillImage [\#2847](https://github.com/pypeclub/OpenPype/pull/2847) +- Nuke: fix multiple baking profile farm publishing [\#2842](https://github.com/pypeclub/OpenPype/pull/2842) +- Blender: Fixed parameters for FBX export of the camera [\#2840](https://github.com/pypeclub/OpenPype/pull/2840) +- Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832) +- Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828) +- Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827) +- Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825) +- Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824) +- General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821) +- Settings UI: Fix "Apply from" action [\#2820](https://github.com/pypeclub/OpenPype/pull/2820) +- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819) +- Nuke: Use AVALON\_APP to get value for "app" key [\#2818](https://github.com/pypeclub/OpenPype/pull/2818) +- StandalonePublisher: use dynamic groups in subset names [\#2816](https://github.com/pypeclub/OpenPype/pull/2816) + +**🔀 Refactored code** + +- Ftrack: Moved module one hierarchy level higher [\#2792](https://github.com/pypeclub/OpenPype/pull/2792) +- SyncServer: Moved module one hierarchy level higher [\#2791](https://github.com/pypeclub/OpenPype/pull/2791) +- Royal render: Move module one hierarchy level higher [\#2790](https://github.com/pypeclub/OpenPype/pull/2790) +- Deadline: Move module one hierarchy level higher [\#2789](https://github.com/pypeclub/OpenPype/pull/2789) +- Refactor: move webserver tool to openpype [\#2876](https://github.com/pypeclub/OpenPype/pull/2876) +- General: Move create logic from avalon to OpenPype [\#2854](https://github.com/pypeclub/OpenPype/pull/2854) +- General: Add vendors from avalon [\#2848](https://github.com/pypeclub/OpenPype/pull/2848) +- General: Basic event system [\#2846](https://github.com/pypeclub/OpenPype/pull/2846) +- General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839) +- Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829) +- Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823) +- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766) + +**Merged pull requests:** + +- Fusion: Moved implementation into OpenPype [\#2713](https://github.com/pypeclub/OpenPype/pull/2713) +- TVPaint: Plugin build without dependencies [\#2705](https://github.com/pypeclub/OpenPype/pull/2705) +- Webpublisher: Photoshop create a beauty png [\#2689](https://github.com/pypeclub/OpenPype/pull/2689) +- Ftrack: Hierarchical attributes are queried properly [\#2682](https://github.com/pypeclub/OpenPype/pull/2682) +- Maya: Add Validate Frame Range settings [\#2661](https://github.com/pypeclub/OpenPype/pull/2661) +- Harmony: move to Openpype [\#2657](https://github.com/pypeclub/OpenPype/pull/2657) +- Maya: cleanup duplicate rendersetup code [\#2642](https://github.com/pypeclub/OpenPype/pull/2642) +- Deadline: Be able to pass Mongo url to job [\#2616](https://github.com/pypeclub/OpenPype/pull/2616) + +## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.1...3.8.2) + +### 📖 Documentation + +- Cosmetics: Fix common typos in openpype/website [\#2617](https://github.com/pypeclub/OpenPype/pull/2617) + +**🚀 Enhancements** + +- TVPaint: Image loaders also work on review family [\#2638](https://github.com/pypeclub/OpenPype/pull/2638) +- General: Project backup tools [\#2629](https://github.com/pypeclub/OpenPype/pull/2629) +- nuke: adding clear button to write nodes [\#2627](https://github.com/pypeclub/OpenPype/pull/2627) +- Ftrack: Family to Asset type mapping is in settings [\#2602](https://github.com/pypeclub/OpenPype/pull/2602) +- Nuke: load color space from representation data [\#2576](https://github.com/pypeclub/OpenPype/pull/2576) + +**🐛 Bug fixes** + +- Fix pulling of cx\_freeze 6.10 [\#2628](https://github.com/pypeclub/OpenPype/pull/2628) +- Global: fix broken otio review extractor [\#2590](https://github.com/pypeclub/OpenPype/pull/2590) + +**Merged pull requests:** + +- WebPublisher: fix instance duplicates [\#2641](https://github.com/pypeclub/OpenPype/pull/2641) +- Fix - safer pulling of task name for webpublishing from PS [\#2613](https://github.com/pypeclub/OpenPype/pull/2613) + +## [3.8.1](https://github.com/pypeclub/OpenPype/tree/3.8.1) (2022-02-01) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.0...3.8.1) + +**🚀 Enhancements** + +- Webpublisher: Thumbnail extractor [\#2600](https://github.com/pypeclub/OpenPype/pull/2600) +- Loader: Allow to toggle default family filters between "include" or "exclude" filtering [\#2541](https://github.com/pypeclub/OpenPype/pull/2541) +- Launcher: Added context menu to to skip opening last workfile [\#2536](https://github.com/pypeclub/OpenPype/pull/2536) +- Unreal: JSON Layout Loading support [\#2066](https://github.com/pypeclub/OpenPype/pull/2066) + +**🐛 Bug fixes** + +- Release/3.8.0 [\#2619](https://github.com/pypeclub/OpenPype/pull/2619) +- Settings: Enum does not store empty string if has single item to select [\#2615](https://github.com/pypeclub/OpenPype/pull/2615) +- switch distutils to sysconfig for `get_platform()` [\#2594](https://github.com/pypeclub/OpenPype/pull/2594) +- Fix poetry index and speedcopy update [\#2589](https://github.com/pypeclub/OpenPype/pull/2589) +- Webpublisher: Fix - subset names from processed .psd used wrong value for task [\#2586](https://github.com/pypeclub/OpenPype/pull/2586) +- `vrscene` creator Deadline webservice URL handling [\#2580](https://github.com/pypeclub/OpenPype/pull/2580) +- global: track name was failing if duplicated root word in name [\#2568](https://github.com/pypeclub/OpenPype/pull/2568) +- Validate Maya Rig produces no cycle errors [\#2484](https://github.com/pypeclub/OpenPype/pull/2484) + +**Merged pull requests:** + +- Bump pillow from 8.4.0 to 9.0.0 [\#2595](https://github.com/pypeclub/OpenPype/pull/2595) +- Webpublisher: Skip version collect [\#2591](https://github.com/pypeclub/OpenPype/pull/2591) +- build\(deps\): bump pillow from 8.4.0 to 9.0.0 [\#2523](https://github.com/pypeclub/OpenPype/pull/2523) + +## [3.8.0](https://github.com/pypeclub/OpenPype/tree/3.8.0) (2022-01-24) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...3.8.0) + +### 📖 Documentation + +- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) + +**🆕 New features** + +- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547) +- Maya : V-Ray Proxy - load all ABC files via proxy [\#2544](https://github.com/pypeclub/OpenPype/pull/2544) +- Maya to Unreal: Extended static mesh workflow [\#2537](https://github.com/pypeclub/OpenPype/pull/2537) +- Flame: collecting publishable instances [\#2519](https://github.com/pypeclub/OpenPype/pull/2519) +- Flame: create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495) +- Flame: OpenTimelineIO Export Modul [\#2398](https://github.com/pypeclub/OpenPype/pull/2398) + +**🚀 Enhancements** + +- Webpublisher: Moved error at the beginning of the log [\#2559](https://github.com/pypeclub/OpenPype/pull/2559) +- Ftrack: Use ApplicationManager to get DJV path [\#2558](https://github.com/pypeclub/OpenPype/pull/2558) +- Webpublisher: Added endpoint to reprocess batch through UI [\#2555](https://github.com/pypeclub/OpenPype/pull/2555) +- Settings: PathInput strip passed string [\#2550](https://github.com/pypeclub/OpenPype/pull/2550) +- Global: Exctract Review anatomy fill data with output name [\#2548](https://github.com/pypeclub/OpenPype/pull/2548) +- Cosmetics: Clean up some cosmetics / typos [\#2542](https://github.com/pypeclub/OpenPype/pull/2542) +- General: Validate if current process OpenPype version is requested version [\#2529](https://github.com/pypeclub/OpenPype/pull/2529) +- General: Be able to use anatomy data in ffmpeg output arguments [\#2525](https://github.com/pypeclub/OpenPype/pull/2525) +- Expose toggle publish plug-in settings for Maya Look Shading Engine Naming [\#2521](https://github.com/pypeclub/OpenPype/pull/2521) +- Photoshop: Move implementation to OpenPype [\#2510](https://github.com/pypeclub/OpenPype/pull/2510) +- TimersManager: Move module one hierarchy higher [\#2501](https://github.com/pypeclub/OpenPype/pull/2501) +- Slack: notifications are sent with Openpype logo and bot name [\#2499](https://github.com/pypeclub/OpenPype/pull/2499) +- Slack: Add review to notification message [\#2498](https://github.com/pypeclub/OpenPype/pull/2498) +- Ftrack: Event handlers settings [\#2496](https://github.com/pypeclub/OpenPype/pull/2496) +- Tools: Fix style and modality of errors in loader and creator [\#2489](https://github.com/pypeclub/OpenPype/pull/2489) +- Maya: Collect 'fps' animation data only for "review" instances [\#2486](https://github.com/pypeclub/OpenPype/pull/2486) +- Project Manager: Remove project button cleanup [\#2482](https://github.com/pypeclub/OpenPype/pull/2482) +- Tools: Be able to change models of tasks and assets widgets [\#2475](https://github.com/pypeclub/OpenPype/pull/2475) +- Publish pype: Reduce publish process defering [\#2464](https://github.com/pypeclub/OpenPype/pull/2464) +- Maya: Improve speed of Collect History logic [\#2460](https://github.com/pypeclub/OpenPype/pull/2460) +- Maya: Validate Rig Controllers - fix Error: in script editor [\#2459](https://github.com/pypeclub/OpenPype/pull/2459) +- Maya: Validate NGONs simplify and speed-up [\#2458](https://github.com/pypeclub/OpenPype/pull/2458) +- Maya: Optimize Validate Locked Normals speed for dense polymeshes [\#2457](https://github.com/pypeclub/OpenPype/pull/2457) +- Maya: Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455) +- Houdini: Remove broken unique name counter [\#2450](https://github.com/pypeclub/OpenPype/pull/2450) +- Maya: Improve lib.polyConstraint performance when Select tool is not the active tool context [\#2447](https://github.com/pypeclub/OpenPype/pull/2447) +- General: Validate third party before build [\#2425](https://github.com/pypeclub/OpenPype/pull/2425) +- Maya : add option to not group reference in ReferenceLoader [\#2383](https://github.com/pypeclub/OpenPype/pull/2383) + +**🐛 Bug fixes** + +- AfterEffects: Fix - removed obsolete import [\#2577](https://github.com/pypeclub/OpenPype/pull/2577) +- General: OpenPype version updates [\#2575](https://github.com/pypeclub/OpenPype/pull/2575) +- Ftrack: Delete action revision [\#2563](https://github.com/pypeclub/OpenPype/pull/2563) +- Webpublisher: ftrack shows incorrect user names [\#2560](https://github.com/pypeclub/OpenPype/pull/2560) +- General: Do not validate version if build does not support it [\#2557](https://github.com/pypeclub/OpenPype/pull/2557) +- Webpublisher: Fixed progress reporting [\#2553](https://github.com/pypeclub/OpenPype/pull/2553) +- Fix Maya AssProxyLoader version switch [\#2551](https://github.com/pypeclub/OpenPype/pull/2551) +- General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549) +- Houdini: vdbcache family preserve frame numbers on publish integration + enable validate version for Houdini [\#2535](https://github.com/pypeclub/OpenPype/pull/2535) +- Maya: Fix Load VDB to V-Ray [\#2533](https://github.com/pypeclub/OpenPype/pull/2533) +- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532) +- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531) +- Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522) +- Maya: Fix Extract Look with space in names [\#2518](https://github.com/pypeclub/OpenPype/pull/2518) +- Fix published frame content for sequence starting with 0 [\#2513](https://github.com/pypeclub/OpenPype/pull/2513) +- Maya: reset empty string attributes correctly to "" instead of "None" [\#2506](https://github.com/pypeclub/OpenPype/pull/2506) +- Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505) +- General: Settings work if OpenPypeVersion is available [\#2494](https://github.com/pypeclub/OpenPype/pull/2494) +- General: PYTHONPATH may break OpenPype dependencies [\#2493](https://github.com/pypeclub/OpenPype/pull/2493) +- General: Modules import function output fix [\#2492](https://github.com/pypeclub/OpenPype/pull/2492) +- AE: fix hiding of alert window below Publish [\#2491](https://github.com/pypeclub/OpenPype/pull/2491) +- Workfiles tool: Files widget show files on first show [\#2488](https://github.com/pypeclub/OpenPype/pull/2488) +- General: Custom template paths filter fix [\#2483](https://github.com/pypeclub/OpenPype/pull/2483) +- Loader: Remove always on top flag in tray [\#2480](https://github.com/pypeclub/OpenPype/pull/2480) +- General: Anatomy does not return root envs as unicode [\#2465](https://github.com/pypeclub/OpenPype/pull/2465) +- Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456) + +**Merged pull requests:** + +- AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543) +- Maya: Remove Maya Look Assigner check on startup [\#2540](https://github.com/pypeclub/OpenPype/pull/2540) +- build\(deps\): bump shelljs from 0.8.4 to 0.8.5 in /website [\#2538](https://github.com/pypeclub/OpenPype/pull/2538) +- build\(deps\): bump follow-redirects from 1.14.4 to 1.14.7 in /website [\#2534](https://github.com/pypeclub/OpenPype/pull/2534) +- Nuke: Merge avalon's implementation into OpenPype [\#2514](https://github.com/pypeclub/OpenPype/pull/2514) +- Maya: Vray fix proxies look assignment [\#2392](https://github.com/pypeclub/OpenPype/pull/2392) +- Bump algoliasearch-helper from 3.4.4 to 3.6.2 in /website [\#2297](https://github.com/pypeclub/OpenPype/pull/2297) + +## [3.7.0](https://github.com/pypeclub/OpenPype/tree/3.7.0) (2022-01-04) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.4...3.7.0) + +**Deprecated:** + +- General: Default modules hierarchy n2 [\#2368](https://github.com/pypeclub/OpenPype/pull/2368) + +### 📖 Documentation + +- docs\[website\]: Add Ellipse Studio \(logo\) as an OpenPype contributor [\#2324](https://github.com/pypeclub/OpenPype/pull/2324) + +**🆕 New features** + +- Settings UI use OpenPype styles [\#2296](https://github.com/pypeclub/OpenPype/pull/2296) +- Store typed version dependencies for workfiles [\#2192](https://github.com/pypeclub/OpenPype/pull/2192) +- OpenPypeV3: add key task type, task shortname and user to path templating construction [\#2157](https://github.com/pypeclub/OpenPype/pull/2157) +- Nuke: Alembic model workflow [\#2140](https://github.com/pypeclub/OpenPype/pull/2140) +- TVPaint: Load workfile from published. [\#1980](https://github.com/pypeclub/OpenPype/pull/1980) + +**🚀 Enhancements** + +- General: Workdir extra folders [\#2462](https://github.com/pypeclub/OpenPype/pull/2462) +- Photoshop: New style validations for New publisher [\#2429](https://github.com/pypeclub/OpenPype/pull/2429) +- General: Environment variables groups [\#2424](https://github.com/pypeclub/OpenPype/pull/2424) +- Unreal: Dynamic menu created in Python [\#2422](https://github.com/pypeclub/OpenPype/pull/2422) +- Settings UI: Hyperlinks to settings [\#2420](https://github.com/pypeclub/OpenPype/pull/2420) +- Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419) +- TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418) +- General: Run applications as separate processes under linux [\#2408](https://github.com/pypeclub/OpenPype/pull/2408) +- Ftrack: Check existence of object type on recreation [\#2404](https://github.com/pypeclub/OpenPype/pull/2404) +- Enhancement: Global cleanup plugin that explicitly remove paths from context [\#2402](https://github.com/pypeclub/OpenPype/pull/2402) +- General: MongoDB ability to specify replica set groups [\#2401](https://github.com/pypeclub/OpenPype/pull/2401) +- Flame: moving `utility_scripts` to api folder also with `scripts` [\#2385](https://github.com/pypeclub/OpenPype/pull/2385) +- Centos 7 dependency compatibility [\#2384](https://github.com/pypeclub/OpenPype/pull/2384) +- Enhancement: Settings: Use project settings values from another project [\#2382](https://github.com/pypeclub/OpenPype/pull/2382) +- Blender 3: Support auto install for new blender version [\#2377](https://github.com/pypeclub/OpenPype/pull/2377) +- Maya add render image path to settings [\#2375](https://github.com/pypeclub/OpenPype/pull/2375) +- Settings: Webpublisher in hosts enum [\#2367](https://github.com/pypeclub/OpenPype/pull/2367) +- Hiero: python3 compatibility [\#2365](https://github.com/pypeclub/OpenPype/pull/2365) +- Burnins: Be able recognize mxf OPAtom format [\#2361](https://github.com/pypeclub/OpenPype/pull/2361) +- Maya: Add is\_static\_image\_plane and is\_in\_all\_views option in imagePlaneLoader [\#2356](https://github.com/pypeclub/OpenPype/pull/2356) +- Local settings: Copyable studio paths [\#2349](https://github.com/pypeclub/OpenPype/pull/2349) +- Assets Widget: Clear model on project change [\#2345](https://github.com/pypeclub/OpenPype/pull/2345) +- General: OpenPype default modules hierarchy [\#2338](https://github.com/pypeclub/OpenPype/pull/2338) +- TVPaint: Move implementation to OpenPype [\#2336](https://github.com/pypeclub/OpenPype/pull/2336) +- General: FFprobe error exception contain original error message [\#2328](https://github.com/pypeclub/OpenPype/pull/2328) +- Resolve: Add experimental button to menu [\#2325](https://github.com/pypeclub/OpenPype/pull/2325) +- Hiero: Add experimental tools action [\#2323](https://github.com/pypeclub/OpenPype/pull/2323) +- Input links: Cleanup and unification of differences [\#2322](https://github.com/pypeclub/OpenPype/pull/2322) +- General: Don't validate vendor bin with executing them [\#2317](https://github.com/pypeclub/OpenPype/pull/2317) +- General: Multilayer EXRs support [\#2315](https://github.com/pypeclub/OpenPype/pull/2315) +- General: Run process log stderr as info log level [\#2309](https://github.com/pypeclub/OpenPype/pull/2309) +- General: Reduce vendor imports [\#2305](https://github.com/pypeclub/OpenPype/pull/2305) +- Tools: Cleanup of unused classes [\#2304](https://github.com/pypeclub/OpenPype/pull/2304) +- Project Manager: Added ability to delete project [\#2298](https://github.com/pypeclub/OpenPype/pull/2298) +- Ftrack: Synchronize input links [\#2287](https://github.com/pypeclub/OpenPype/pull/2287) +- StandalonePublisher: Remove unused plugin ExtractHarmonyZip [\#2277](https://github.com/pypeclub/OpenPype/pull/2277) +- Ftrack: Support multiple reviews [\#2271](https://github.com/pypeclub/OpenPype/pull/2271) +- Ftrack: Remove unused clean component plugin [\#2269](https://github.com/pypeclub/OpenPype/pull/2269) +- Royal Render: Support for rr channels in separate dirs [\#2268](https://github.com/pypeclub/OpenPype/pull/2268) +- Houdini: Add experimental tools action [\#2267](https://github.com/pypeclub/OpenPype/pull/2267) +- Nuke: extract baked review videos presets [\#2248](https://github.com/pypeclub/OpenPype/pull/2248) +- TVPaint: Workers rendering [\#2209](https://github.com/pypeclub/OpenPype/pull/2209) +- OpenPypeV3: Add key parent asset to path templating construction [\#2186](https://github.com/pypeclub/OpenPype/pull/2186) + +**🐛 Bug fixes** + +- TVPaint: Create render layer dialog is in front [\#2471](https://github.com/pypeclub/OpenPype/pull/2471) +- Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428) +- PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417) +- Settings UI: Breadcrumbs path does not create new entities [\#2416](https://github.com/pypeclub/OpenPype/pull/2416) +- AfterEffects: Variant 2022 is in defaults but missing in schemas [\#2412](https://github.com/pypeclub/OpenPype/pull/2412) +- Nuke: baking representations was not additive [\#2406](https://github.com/pypeclub/OpenPype/pull/2406) +- General: Fix access to environments from default settings [\#2403](https://github.com/pypeclub/OpenPype/pull/2403) +- Fix: Placeholder Input color set fix [\#2399](https://github.com/pypeclub/OpenPype/pull/2399) +- Settings: Fix state change of wrapper label [\#2396](https://github.com/pypeclub/OpenPype/pull/2396) +- Flame: fix ftrack publisher [\#2381](https://github.com/pypeclub/OpenPype/pull/2381) +- hiero: solve custom ocio path [\#2379](https://github.com/pypeclub/OpenPype/pull/2379) +- hiero: fix workio and flatten [\#2378](https://github.com/pypeclub/OpenPype/pull/2378) +- Nuke: fixing menu re-drawing during context change [\#2374](https://github.com/pypeclub/OpenPype/pull/2374) +- Webpublisher: Fix assignment of families of TVpaint instances [\#2373](https://github.com/pypeclub/OpenPype/pull/2373) +- Nuke: fixing node name based on switched asset name [\#2369](https://github.com/pypeclub/OpenPype/pull/2369) +- JobQueue: Fix loading of settings [\#2362](https://github.com/pypeclub/OpenPype/pull/2362) +- Tools: Placeholder color [\#2359](https://github.com/pypeclub/OpenPype/pull/2359) +- Launcher: Minimize button on MacOs [\#2355](https://github.com/pypeclub/OpenPype/pull/2355) +- StandalonePublisher: Fix import of constant [\#2354](https://github.com/pypeclub/OpenPype/pull/2354) +- Houdini: Fix HDA creation [\#2350](https://github.com/pypeclub/OpenPype/pull/2350) +- Adobe products show issue [\#2347](https://github.com/pypeclub/OpenPype/pull/2347) +- Maya Look Assigner: Fix Python 3 compatibility [\#2343](https://github.com/pypeclub/OpenPype/pull/2343) +- Remove wrongly used host for hook [\#2342](https://github.com/pypeclub/OpenPype/pull/2342) +- Tools: Use Qt context on tools show [\#2340](https://github.com/pypeclub/OpenPype/pull/2340) +- Flame: Fix default argument value in custom dictionary [\#2339](https://github.com/pypeclub/OpenPype/pull/2339) +- Timers Manager: Disable auto stop timer on linux platform [\#2334](https://github.com/pypeclub/OpenPype/pull/2334) +- nuke: bake preset single input exception [\#2331](https://github.com/pypeclub/OpenPype/pull/2331) +- Hiero: fixing multiple templates at a hierarchy parent [\#2330](https://github.com/pypeclub/OpenPype/pull/2330) +- Fix - provider icons are pulled from a folder [\#2326](https://github.com/pypeclub/OpenPype/pull/2326) +- InputLinks: Typo in "inputLinks" key [\#2314](https://github.com/pypeclub/OpenPype/pull/2314) +- Deadline timeout and logging [\#2312](https://github.com/pypeclub/OpenPype/pull/2312) +- nuke: do not multiply representation on class method [\#2311](https://github.com/pypeclub/OpenPype/pull/2311) +- Workfiles tool: Fix task formatting [\#2306](https://github.com/pypeclub/OpenPype/pull/2306) +- Delivery: Fix delivery paths created on windows [\#2302](https://github.com/pypeclub/OpenPype/pull/2302) +- Maya: Deadline - fix limit groups [\#2295](https://github.com/pypeclub/OpenPype/pull/2295) +- Royal Render: Fix plugin order and OpenPype auto-detection [\#2291](https://github.com/pypeclub/OpenPype/pull/2291) +- New Publisher: Fix mapping of indexes [\#2285](https://github.com/pypeclub/OpenPype/pull/2285) +- Alternate site for site sync doesnt work for sequences [\#2284](https://github.com/pypeclub/OpenPype/pull/2284) +- FFmpeg: Execute ffprobe using list of arguments instead of string command [\#2281](https://github.com/pypeclub/OpenPype/pull/2281) +- Nuke: Anatomy fill data use task as dictionary [\#2278](https://github.com/pypeclub/OpenPype/pull/2278) +- Bug: fix variable name \_asset\_id in workfiles application [\#2274](https://github.com/pypeclub/OpenPype/pull/2274) +- Version handling fixes [\#2272](https://github.com/pypeclub/OpenPype/pull/2272) + +**Merged pull requests:** + +- Maya: Replaced PATH usage with vendored oiio path for maketx utility [\#2405](https://github.com/pypeclub/OpenPype/pull/2405) +- \[Fix\]\[MAYA\] Handle message type attribute within CollectLook [\#2394](https://github.com/pypeclub/OpenPype/pull/2394) +- Add validator to check correct version of extension for PS and AE [\#2387](https://github.com/pypeclub/OpenPype/pull/2387) +- Maya: configurable model top level validation [\#2321](https://github.com/pypeclub/OpenPype/pull/2321) +- Create test publish class for After Effects [\#2270](https://github.com/pypeclub/OpenPype/pull/2270) + +## [3.6.4](https://github.com/pypeclub/OpenPype/tree/3.6.4) (2021-11-23) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.3...3.6.4) + +**🐛 Bug fixes** + +- Nuke: inventory update removes all loaded read nodes [\#2294](https://github.com/pypeclub/OpenPype/pull/2294) + +## [3.6.3](https://github.com/pypeclub/OpenPype/tree/3.6.3) (2021-11-19) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.2...3.6.3) + +**🐛 Bug fixes** + +- Deadline: Fix publish targets [\#2280](https://github.com/pypeclub/OpenPype/pull/2280) + +## [3.6.2](https://github.com/pypeclub/OpenPype/tree/3.6.2) (2021-11-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.1...3.6.2) + +**🚀 Enhancements** + +- Tools: Assets widget [\#2265](https://github.com/pypeclub/OpenPype/pull/2265) +- SceneInventory: Choose loader in asset switcher [\#2262](https://github.com/pypeclub/OpenPype/pull/2262) +- Style: New fonts in OpenPype style [\#2256](https://github.com/pypeclub/OpenPype/pull/2256) +- Tools: SceneInventory in OpenPype [\#2255](https://github.com/pypeclub/OpenPype/pull/2255) +- Tools: Tasks widget [\#2251](https://github.com/pypeclub/OpenPype/pull/2251) +- Tools: Creator in OpenPype [\#2244](https://github.com/pypeclub/OpenPype/pull/2244) +- Added endpoint for configured extensions [\#2221](https://github.com/pypeclub/OpenPype/pull/2221) + +**🐛 Bug fixes** + +- Tools: Parenting of tools in Nuke and Hiero [\#2266](https://github.com/pypeclub/OpenPype/pull/2266) +- limiting validator to specific editorial hosts [\#2264](https://github.com/pypeclub/OpenPype/pull/2264) +- Tools: Select Context dialog attribute fix [\#2261](https://github.com/pypeclub/OpenPype/pull/2261) +- Maya: Render publishing fails on linux [\#2260](https://github.com/pypeclub/OpenPype/pull/2260) +- LookAssigner: Fix tool reopen [\#2259](https://github.com/pypeclub/OpenPype/pull/2259) +- Standalone: editorial not publishing thumbnails on all subsets [\#2258](https://github.com/pypeclub/OpenPype/pull/2258) +- Burnins: Support mxf metadata [\#2247](https://github.com/pypeclub/OpenPype/pull/2247) +- Maya: Support for configurable AOV separator characters [\#2197](https://github.com/pypeclub/OpenPype/pull/2197) +- Maya: texture colorspace modes in looks [\#2195](https://github.com/pypeclub/OpenPype/pull/2195) + +## [3.6.1](https://github.com/pypeclub/OpenPype/tree/3.6.1) (2021-11-16) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.0...3.6.1) + +**🐛 Bug fixes** + +- Loader doesn't allow changing of version before loading [\#2254](https://github.com/pypeclub/OpenPype/pull/2254) + +## [3.6.0](https://github.com/pypeclub/OpenPype/tree/3.6.0) (2021-11-15) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.5.0...3.6.0) + +### 📖 Documentation + +- Add alternative sites for Site Sync [\#2206](https://github.com/pypeclub/OpenPype/pull/2206) +- Add command line way of running site sync server [\#2188](https://github.com/pypeclub/OpenPype/pull/2188) + +**🆕 New features** + +- Add validate active site button to sync queue on a project [\#2176](https://github.com/pypeclub/OpenPype/pull/2176) +- Maya : Colorspace configuration [\#2170](https://github.com/pypeclub/OpenPype/pull/2170) +- Blender: Added support for audio [\#2168](https://github.com/pypeclub/OpenPype/pull/2168) +- Flame: a host basic integration [\#2165](https://github.com/pypeclub/OpenPype/pull/2165) +- Houdini: simple HDA workflow [\#2072](https://github.com/pypeclub/OpenPype/pull/2072) +- Basic Royal Render Integration ✨ [\#2061](https://github.com/pypeclub/OpenPype/pull/2061) +- Camera handling between Blender and Unreal [\#1988](https://github.com/pypeclub/OpenPype/pull/1988) +- switch PyQt5 for PySide2 [\#1744](https://github.com/pypeclub/OpenPype/pull/1744) + +**🚀 Enhancements** + +- Tools: Subset manager in OpenPype [\#2243](https://github.com/pypeclub/OpenPype/pull/2243) +- General: Skip module directories without init file [\#2239](https://github.com/pypeclub/OpenPype/pull/2239) +- General: Static interfaces [\#2238](https://github.com/pypeclub/OpenPype/pull/2238) +- Style: Fix transparent image in style [\#2235](https://github.com/pypeclub/OpenPype/pull/2235) +- Add a "following workfile versioning" option on publish [\#2225](https://github.com/pypeclub/OpenPype/pull/2225) +- Modules: Module can add cli commands [\#2224](https://github.com/pypeclub/OpenPype/pull/2224) +- Webpublisher: Separate webpublisher logic [\#2222](https://github.com/pypeclub/OpenPype/pull/2222) +- Add both side availability on Site Sync sites to Loader [\#2220](https://github.com/pypeclub/OpenPype/pull/2220) +- Tools: Center loader and library loader on show [\#2219](https://github.com/pypeclub/OpenPype/pull/2219) +- Maya : Validate shape zero [\#2212](https://github.com/pypeclub/OpenPype/pull/2212) +- Maya : validate unique names [\#2211](https://github.com/pypeclub/OpenPype/pull/2211) +- Tools: OpenPype stylesheet in workfiles tool [\#2208](https://github.com/pypeclub/OpenPype/pull/2208) +- Ftrack: Replace Queue with deque in event handlers logic [\#2204](https://github.com/pypeclub/OpenPype/pull/2204) +- Tools: New select context dialog [\#2200](https://github.com/pypeclub/OpenPype/pull/2200) +- Maya : Validate mesh ngons [\#2199](https://github.com/pypeclub/OpenPype/pull/2199) +- Dirmap in Nuke [\#2198](https://github.com/pypeclub/OpenPype/pull/2198) +- Delivery: Check 'frame' key in template for sequence delivery [\#2196](https://github.com/pypeclub/OpenPype/pull/2196) +- Settings: Site sync project settings improvement [\#2193](https://github.com/pypeclub/OpenPype/pull/2193) +- Usage of tools code [\#2185](https://github.com/pypeclub/OpenPype/pull/2185) +- Settings: Dictionary based on project roots [\#2184](https://github.com/pypeclub/OpenPype/pull/2184) +- Subset name: Be able to pass asset document to get subset name [\#2179](https://github.com/pypeclub/OpenPype/pull/2179) +- Tools: Experimental tools [\#2167](https://github.com/pypeclub/OpenPype/pull/2167) +- Loader: Refactor and use OpenPype stylesheets [\#2166](https://github.com/pypeclub/OpenPype/pull/2166) +- Add loader for linked smart objects in photoshop [\#2149](https://github.com/pypeclub/OpenPype/pull/2149) +- Burnins: DNxHD profiles handling [\#2142](https://github.com/pypeclub/OpenPype/pull/2142) +- Tools: Single access point for host tools [\#2139](https://github.com/pypeclub/OpenPype/pull/2139) + +**🐛 Bug fixes** + +- Ftrack: Sync project ftrack id cache issue [\#2250](https://github.com/pypeclub/OpenPype/pull/2250) +- Ftrack: Session creation and Prepare project [\#2245](https://github.com/pypeclub/OpenPype/pull/2245) +- Added queue for studio processing in PS [\#2237](https://github.com/pypeclub/OpenPype/pull/2237) +- Python 2: Unicode to string conversion [\#2236](https://github.com/pypeclub/OpenPype/pull/2236) +- Fix - enum for color coding in PS [\#2234](https://github.com/pypeclub/OpenPype/pull/2234) +- Pyblish Tool: Fix targets handling [\#2232](https://github.com/pypeclub/OpenPype/pull/2232) +- Ftrack: Base event fix of 'get\_project\_from\_entity' method [\#2214](https://github.com/pypeclub/OpenPype/pull/2214) +- Maya : multiple subsets review broken [\#2210](https://github.com/pypeclub/OpenPype/pull/2210) +- Fix - different command used for Linux and Mac OS [\#2207](https://github.com/pypeclub/OpenPype/pull/2207) +- Tools: Workfiles tool don't use avalon widgets [\#2205](https://github.com/pypeclub/OpenPype/pull/2205) +- Ftrack: Fill missing ftrack id on mongo project [\#2203](https://github.com/pypeclub/OpenPype/pull/2203) +- Project Manager: Fix copying of tasks [\#2191](https://github.com/pypeclub/OpenPype/pull/2191) +- StandalonePublisher: Source validator don't expect representations [\#2190](https://github.com/pypeclub/OpenPype/pull/2190) +- Blender: Fix trying to pack an image when the shader node has no texture [\#2183](https://github.com/pypeclub/OpenPype/pull/2183) +- Maya: review viewport settings [\#2177](https://github.com/pypeclub/OpenPype/pull/2177) +- MacOS: Launching of applications may cause Permissions error [\#2175](https://github.com/pypeclub/OpenPype/pull/2175) +- Maya: Aspect ratio [\#2174](https://github.com/pypeclub/OpenPype/pull/2174) +- Blender: Fix 'Deselect All' with object not in 'Object Mode' [\#2163](https://github.com/pypeclub/OpenPype/pull/2163) +- Tools: Stylesheets are applied after tool show [\#2161](https://github.com/pypeclub/OpenPype/pull/2161) +- Maya: Collect render - fix UNC path support 🐛 [\#2158](https://github.com/pypeclub/OpenPype/pull/2158) +- Maya: Fix hotbox broken by scriptsmenu [\#2151](https://github.com/pypeclub/OpenPype/pull/2151) +- Ftrack: Ignore save warnings exception in Prepare project action [\#2150](https://github.com/pypeclub/OpenPype/pull/2150) +- Loader thumbnails with smooth edges [\#2147](https://github.com/pypeclub/OpenPype/pull/2147) +- Added validator for source files for Standalone Publisher [\#2138](https://github.com/pypeclub/OpenPype/pull/2138) + +**Merged pull requests:** + +- Bump pillow from 8.2.0 to 8.3.2 [\#2162](https://github.com/pypeclub/OpenPype/pull/2162) +- Bump axios from 0.21.1 to 0.21.4 in /website [\#2059](https://github.com/pypeclub/OpenPype/pull/2059) + +## [3.5.0](https://github.com/pypeclub/OpenPype/tree/3.5.0) (2021-10-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.1...3.5.0) + +**Deprecated:** + +- Maya: Change mayaAscii family to mayaScene [\#2106](https://github.com/pypeclub/OpenPype/pull/2106) + +**🆕 New features** + +- Added project and task into context change message in Maya [\#2131](https://github.com/pypeclub/OpenPype/pull/2131) +- Add ExtractBurnin to photoshop review [\#2124](https://github.com/pypeclub/OpenPype/pull/2124) +- PYPE-1218 - changed namespace to contain subset name in Maya [\#2114](https://github.com/pypeclub/OpenPype/pull/2114) +- Added running configurable disk mapping command before start of OP [\#2091](https://github.com/pypeclub/OpenPype/pull/2091) +- SFTP provider [\#2073](https://github.com/pypeclub/OpenPype/pull/2073) +- Maya: Validate setdress top group [\#2068](https://github.com/pypeclub/OpenPype/pull/2068) +- Maya: Enable publishing render attrib sets \(e.g. V-Ray Displacement\) with model [\#1955](https://github.com/pypeclub/OpenPype/pull/1955) + +**🚀 Enhancements** + +- Maya: make rig validators configurable in settings [\#2137](https://github.com/pypeclub/OpenPype/pull/2137) +- Settings: Updated readme for entity types in settings [\#2132](https://github.com/pypeclub/OpenPype/pull/2132) +- Nuke: unified clip loader [\#2128](https://github.com/pypeclub/OpenPype/pull/2128) +- Settings UI: Project model refreshing and sorting [\#2104](https://github.com/pypeclub/OpenPype/pull/2104) +- Create Read From Rendered - Disable Relative paths by default [\#2093](https://github.com/pypeclub/OpenPype/pull/2093) +- Added choosing different dirmap mapping if workfile synched locally [\#2088](https://github.com/pypeclub/OpenPype/pull/2088) +- General: Remove IdleManager module [\#2084](https://github.com/pypeclub/OpenPype/pull/2084) +- Tray UI: Message box about missing settings defaults [\#2080](https://github.com/pypeclub/OpenPype/pull/2080) +- Tray UI: Show menu where first click happened [\#2079](https://github.com/pypeclub/OpenPype/pull/2079) +- Global: add global validators to settings [\#2078](https://github.com/pypeclub/OpenPype/pull/2078) +- Use CRF for burnin when available [\#2070](https://github.com/pypeclub/OpenPype/pull/2070) +- Project manager: Filter first item after selection of project [\#2069](https://github.com/pypeclub/OpenPype/pull/2069) +- Nuke: Adding `still` image family workflow [\#2064](https://github.com/pypeclub/OpenPype/pull/2064) +- Maya: validate authorized loaded plugins [\#2062](https://github.com/pypeclub/OpenPype/pull/2062) +- Tools: add support for pyenv on windows [\#2051](https://github.com/pypeclub/OpenPype/pull/2051) +- SyncServer: Dropbox Provider [\#1979](https://github.com/pypeclub/OpenPype/pull/1979) +- Burnin: Get data from context with defined keys. [\#1897](https://github.com/pypeclub/OpenPype/pull/1897) +- Timers manager: Get task time [\#1896](https://github.com/pypeclub/OpenPype/pull/1896) +- TVPaint: Option to stop timer on application exit. [\#1887](https://github.com/pypeclub/OpenPype/pull/1887) + +**🐛 Bug fixes** + +- Maya: fix model publishing [\#2130](https://github.com/pypeclub/OpenPype/pull/2130) +- Fix - oiiotool wasn't recognized even if present [\#2129](https://github.com/pypeclub/OpenPype/pull/2129) +- General: Disk mapping group [\#2120](https://github.com/pypeclub/OpenPype/pull/2120) +- Hiero: publishing effect first time makes wrong resources path [\#2115](https://github.com/pypeclub/OpenPype/pull/2115) +- Add startup script for Houdini Core. [\#2110](https://github.com/pypeclub/OpenPype/pull/2110) +- TVPaint: Behavior name of loop also accept repeat [\#2109](https://github.com/pypeclub/OpenPype/pull/2109) +- Ftrack: Project settings save custom attributes skip unknown attributes [\#2103](https://github.com/pypeclub/OpenPype/pull/2103) +- Blender: Fix NoneType error when animation\_data is missing for a rig [\#2101](https://github.com/pypeclub/OpenPype/pull/2101) +- Fix broken import in sftp provider [\#2100](https://github.com/pypeclub/OpenPype/pull/2100) +- Global: Fix docstring on publish plugin extract review [\#2097](https://github.com/pypeclub/OpenPype/pull/2097) +- Delivery Action Files Sequence fix [\#2096](https://github.com/pypeclub/OpenPype/pull/2096) +- General: Cloud mongo ca certificate issue [\#2095](https://github.com/pypeclub/OpenPype/pull/2095) +- TVPaint: Creator use context from workfile [\#2087](https://github.com/pypeclub/OpenPype/pull/2087) +- Blender: fix texture missing when publishing blend files [\#2085](https://github.com/pypeclub/OpenPype/pull/2085) +- General: Startup validations oiio tool path fix on linux [\#2083](https://github.com/pypeclub/OpenPype/pull/2083) +- Deadline: Collect deadline server does not check existence of deadline key [\#2082](https://github.com/pypeclub/OpenPype/pull/2082) +- Blender: fixed Curves with modifiers in Rigs [\#2081](https://github.com/pypeclub/OpenPype/pull/2081) +- Nuke UI scaling [\#2077](https://github.com/pypeclub/OpenPype/pull/2077) +- Maya: Fix multi-camera renders [\#2065](https://github.com/pypeclub/OpenPype/pull/2065) +- Fix Sync Queue when project disabled [\#2063](https://github.com/pypeclub/OpenPype/pull/2063) + +**Merged pull requests:** + +- Bump pywin32 from 300 to 301 [\#2086](https://github.com/pypeclub/OpenPype/pull/2086) + +## [3.4.1](https://github.com/pypeclub/OpenPype/tree/3.4.1) (2021-09-23) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.0...3.4.1) + +**🆕 New features** + +- Settings: Flag project as deactivated and hide from tools' view [\#2008](https://github.com/pypeclub/OpenPype/pull/2008) + +**🚀 Enhancements** + +- General: Startup validations [\#2054](https://github.com/pypeclub/OpenPype/pull/2054) +- Nuke: proxy mode validator [\#2052](https://github.com/pypeclub/OpenPype/pull/2052) +- Ftrack: Removed ftrack interface [\#2049](https://github.com/pypeclub/OpenPype/pull/2049) +- Settings UI: Deffered set value on entity [\#2044](https://github.com/pypeclub/OpenPype/pull/2044) +- Loader: Families filtering [\#2043](https://github.com/pypeclub/OpenPype/pull/2043) +- Settings UI: Project view enhancements [\#2042](https://github.com/pypeclub/OpenPype/pull/2042) +- Settings for Nuke IncrementScriptVersion [\#2039](https://github.com/pypeclub/OpenPype/pull/2039) +- Loader & Library loader: Use tools from OpenPype [\#2038](https://github.com/pypeclub/OpenPype/pull/2038) +- Adding predefined project folders creation in PM [\#2030](https://github.com/pypeclub/OpenPype/pull/2030) +- WebserverModule: Removed interface of webserver module [\#2028](https://github.com/pypeclub/OpenPype/pull/2028) +- TimersManager: Removed interface of timers manager [\#2024](https://github.com/pypeclub/OpenPype/pull/2024) +- Feature Maya import asset from scene inventory [\#2018](https://github.com/pypeclub/OpenPype/pull/2018) + +**🐛 Bug fixes** + +- Timers manger: Typo fix [\#2058](https://github.com/pypeclub/OpenPype/pull/2058) +- Hiero: Editorial fixes [\#2057](https://github.com/pypeclub/OpenPype/pull/2057) +- Differentiate jpg sequences from thumbnail [\#2056](https://github.com/pypeclub/OpenPype/pull/2056) +- FFmpeg: Split command to list does not work [\#2046](https://github.com/pypeclub/OpenPype/pull/2046) +- Removed shell flag in subprocess call [\#2045](https://github.com/pypeclub/OpenPype/pull/2045) + +**Merged pull requests:** + +- Bump prismjs from 1.24.0 to 1.25.0 in /website [\#2050](https://github.com/pypeclub/OpenPype/pull/2050) + +## [3.4.0](https://github.com/pypeclub/OpenPype/tree/3.4.0) (2021-09-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...3.4.0) + +### 📖 Documentation + +- Documentation: Ftrack launch argsuments update [\#2014](https://github.com/pypeclub/OpenPype/pull/2014) +- Nuke Quick Start / Tutorial [\#1952](https://github.com/pypeclub/OpenPype/pull/1952) +- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821) + +**🆕 New features** + +- Nuke: Compatibility with Nuke 13 [\#2003](https://github.com/pypeclub/OpenPype/pull/2003) +- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) +- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) +- Blender: Improved assets handling [\#1615](https://github.com/pypeclub/OpenPype/pull/1615) + +**🚀 Enhancements** + +- Added possibility to configure of synchronization of workfile version… [\#2041](https://github.com/pypeclub/OpenPype/pull/2041) +- General: Task types in profiles [\#2036](https://github.com/pypeclub/OpenPype/pull/2036) +- Console interpreter: Handle invalid sizes on initialization [\#2022](https://github.com/pypeclub/OpenPype/pull/2022) +- Ftrack: Show OpenPype versions in event server status [\#2019](https://github.com/pypeclub/OpenPype/pull/2019) +- General: Staging icon [\#2017](https://github.com/pypeclub/OpenPype/pull/2017) +- Ftrack: Sync to avalon actions have jobs [\#2015](https://github.com/pypeclub/OpenPype/pull/2015) +- Modules: Connect method is not required [\#2009](https://github.com/pypeclub/OpenPype/pull/2009) +- Settings UI: Number with configurable steps [\#2001](https://github.com/pypeclub/OpenPype/pull/2001) +- Moving project folder structure creation out of ftrack module \#1989 [\#1996](https://github.com/pypeclub/OpenPype/pull/1996) +- Configurable items for providers without Settings [\#1987](https://github.com/pypeclub/OpenPype/pull/1987) +- Global: Example addons [\#1986](https://github.com/pypeclub/OpenPype/pull/1986) +- Standalone Publisher: Extract harmony zip handle workfile template [\#1982](https://github.com/pypeclub/OpenPype/pull/1982) +- Settings UI: Number sliders [\#1978](https://github.com/pypeclub/OpenPype/pull/1978) +- Workfiles: Support more workfile templates [\#1966](https://github.com/pypeclub/OpenPype/pull/1966) +- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964) +- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963) +- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962) +- Global: Settings defined by Addons/Modules [\#1959](https://github.com/pypeclub/OpenPype/pull/1959) +- CI: change release numbering triggers [\#1954](https://github.com/pypeclub/OpenPype/pull/1954) +- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949) +- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948) +- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) +- OpenPype: Add version validation and `--headless` mode and update progress 🔄 [\#1939](https://github.com/pypeclub/OpenPype/pull/1939) +- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) +- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888) +- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) + +**🐛 Bug fixes** + +- Workfiles tool: Task selection [\#2040](https://github.com/pypeclub/OpenPype/pull/2040) +- Ftrack: Delete old versions missing settings key [\#2037](https://github.com/pypeclub/OpenPype/pull/2037) +- Nuke: typo on a button [\#2034](https://github.com/pypeclub/OpenPype/pull/2034) +- Hiero: Fix "none" named tags [\#2033](https://github.com/pypeclub/OpenPype/pull/2033) +- FFmpeg: Subprocess arguments as list [\#2032](https://github.com/pypeclub/OpenPype/pull/2032) +- General: Fix Python 2 breaking line [\#2016](https://github.com/pypeclub/OpenPype/pull/2016) +- Bugfix/webpublisher task type [\#2006](https://github.com/pypeclub/OpenPype/pull/2006) +- Nuke thumbnails generated from middle of the sequence [\#1992](https://github.com/pypeclub/OpenPype/pull/1992) +- Nuke: last version from path gets correct version [\#1990](https://github.com/pypeclub/OpenPype/pull/1990) +- nuke, resolve, hiero: precollector order lest then 0.5 [\#1984](https://github.com/pypeclub/OpenPype/pull/1984) +- Last workfile with multiple work templates [\#1981](https://github.com/pypeclub/OpenPype/pull/1981) +- Collectors order [\#1977](https://github.com/pypeclub/OpenPype/pull/1977) +- Stop timer was within validator order range. [\#1975](https://github.com/pypeclub/OpenPype/pull/1975) +- Ftrack: arrow submodule has https url source [\#1974](https://github.com/pypeclub/OpenPype/pull/1974) +- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972) +- Deadline: Houdini plugins in different hierarchy [\#1970](https://github.com/pypeclub/OpenPype/pull/1970) +- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967) +- Global: ExtractJpeg can handle filepaths with spaces [\#1961](https://github.com/pypeclub/OpenPype/pull/1961) +- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960) + +**Merged pull requests:** + +- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958) +- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933) + +## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...3.3.1) + +**🐛 Bug fixes** + +- TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946) +- Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945) +- standalone: editorial shared object problem [\#1941](https://github.com/pypeclub/OpenPype/pull/1941) +- Bugfix nuke deadline app name [\#1928](https://github.com/pypeclub/OpenPype/pull/1928) + +## [3.3.0](https://github.com/pypeclub/OpenPype/tree/3.3.0) (2021-08-17) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.2.0...3.3.0) + +### 📖 Documentation + +- Standalone Publish of textures family [\#1834](https://github.com/pypeclub/OpenPype/pull/1834) + +**🆕 New features** + +- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) +- Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923) +- Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901) + +**🚀 Enhancements** + +- Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940) +- Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927) +- Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925) +- Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920) +- Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919) +- submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) +- Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900) +- Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899) +- Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898) +- Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892) +- Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891) +- Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886) +- TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) +- Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) +- Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) +- Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) +- Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) +- Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) +- Anatomy schema validation [\#1864](https://github.com/pypeclub/OpenPype/pull/1864) +- Ftrack prepare project structure [\#1861](https://github.com/pypeclub/OpenPype/pull/1861) +- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) +- Independent general environments [\#1853](https://github.com/pypeclub/OpenPype/pull/1853) +- TVPaint Start Frame [\#1844](https://github.com/pypeclub/OpenPype/pull/1844) +- Ftrack push attributes action adds traceback to job [\#1843](https://github.com/pypeclub/OpenPype/pull/1843) +- Prepare project action enhance [\#1838](https://github.com/pypeclub/OpenPype/pull/1838) +- nuke: settings create missing default subsets [\#1829](https://github.com/pypeclub/OpenPype/pull/1829) +- Update poetry lock [\#1823](https://github.com/pypeclub/OpenPype/pull/1823) +- Settings: settings for plugins [\#1819](https://github.com/pypeclub/OpenPype/pull/1819) +- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) +- Maya: Deadline custom settings [\#1797](https://github.com/pypeclub/OpenPype/pull/1797) +- Maya: Shader name validation [\#1762](https://github.com/pypeclub/OpenPype/pull/1762) + +**🐛 Bug fixes** + +- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) +- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) +- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) +- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) +- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) +- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) +- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) +- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) +- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) +- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) +- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) +- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) +- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) +- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) +- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) +- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) +- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) +- imageio: fix grouping [\#1856](https://github.com/pypeclub/OpenPype/pull/1856) +- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) +- publisher: missing version in subset prop [\#1849](https://github.com/pypeclub/OpenPype/pull/1849) +- Ftrack type error fix in sync to avalon event handler [\#1845](https://github.com/pypeclub/OpenPype/pull/1845) +- Nuke: updating effects subset fail [\#1841](https://github.com/pypeclub/OpenPype/pull/1841) +- nuke: write render node skipped with crop [\#1836](https://github.com/pypeclub/OpenPype/pull/1836) +- Project folder structure overrides [\#1813](https://github.com/pypeclub/OpenPype/pull/1813) +- Maya: fix yeti settings path in extractor [\#1809](https://github.com/pypeclub/OpenPype/pull/1809) +- Failsafe for cross project containers. [\#1806](https://github.com/pypeclub/OpenPype/pull/1806) +- Houdini colector formatting keys fix [\#1802](https://github.com/pypeclub/OpenPype/pull/1802) +- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) +- Application launch stdout/stderr in GUI build [\#1684](https://github.com/pypeclub/OpenPype/pull/1684) +- Nuke: re-use instance nodes output path [\#1577](https://github.com/pypeclub/OpenPype/pull/1577) + +**Merged pull requests:** + +- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) +- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) +- Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) +- Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) +- PS, AE - send actual context when another webserver is running [\#1811](https://github.com/pypeclub/OpenPype/pull/1811) + +## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.4...3.2.0) + +### 📖 Documentation + +- Fix: staging and `--use-version` option [\#1786](https://github.com/pypeclub/OpenPype/pull/1786) +- Subset template and TVPaint subset template docs [\#1717](https://github.com/pypeclub/OpenPype/pull/1717) +- Overscan color extract review [\#1701](https://github.com/pypeclub/OpenPype/pull/1701) + +**🚀 Enhancements** + +- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) +- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) +- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795) +- Settings conditional dict [\#1777](https://github.com/pypeclub/OpenPype/pull/1777) +- Settings application use python 2 only where needed [\#1776](https://github.com/pypeclub/OpenPype/pull/1776) +- Settings UI copy/paste [\#1769](https://github.com/pypeclub/OpenPype/pull/1769) +- Workfile tool widths [\#1766](https://github.com/pypeclub/OpenPype/pull/1766) +- Push hierarchical attributes care about task parent changes [\#1763](https://github.com/pypeclub/OpenPype/pull/1763) +- Application executables with environment variables [\#1757](https://github.com/pypeclub/OpenPype/pull/1757) +- Deadline: Nuke submission additional attributes [\#1756](https://github.com/pypeclub/OpenPype/pull/1756) +- Settings schema without prefill [\#1753](https://github.com/pypeclub/OpenPype/pull/1753) +- Settings Hosts enum [\#1739](https://github.com/pypeclub/OpenPype/pull/1739) +- Validate containers settings [\#1736](https://github.com/pypeclub/OpenPype/pull/1736) +- PS - added loader from sequence [\#1726](https://github.com/pypeclub/OpenPype/pull/1726) +- Autoupdate launcher [\#1725](https://github.com/pypeclub/OpenPype/pull/1725) +- Toggle Ftrack upload in StandalonePublisher [\#1708](https://github.com/pypeclub/OpenPype/pull/1708) +- Nuke: Prerender Frame Range by default [\#1699](https://github.com/pypeclub/OpenPype/pull/1699) +- Smoother edges of color triangle [\#1695](https://github.com/pypeclub/OpenPype/pull/1695) + +**🐛 Bug fixes** + +- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) +- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) +- Invitee email can be None which break the Ftrack commit. [\#1788](https://github.com/pypeclub/OpenPype/pull/1788) +- Otio unrelated error on import [\#1782](https://github.com/pypeclub/OpenPype/pull/1782) +- FFprobe streams order [\#1775](https://github.com/pypeclub/OpenPype/pull/1775) +- Fix - single file files are str only, cast it to list to count properly [\#1772](https://github.com/pypeclub/OpenPype/pull/1772) +- Environments in app executable for MacOS [\#1768](https://github.com/pypeclub/OpenPype/pull/1768) +- Project specific environments [\#1767](https://github.com/pypeclub/OpenPype/pull/1767) +- Settings UI with refresh button [\#1764](https://github.com/pypeclub/OpenPype/pull/1764) +- Standalone publisher thumbnail extractor fix [\#1761](https://github.com/pypeclub/OpenPype/pull/1761) +- Anatomy others templates don't cause crash [\#1758](https://github.com/pypeclub/OpenPype/pull/1758) +- Backend acre module commit update [\#1745](https://github.com/pypeclub/OpenPype/pull/1745) +- hiero: precollect instances failing when audio selected [\#1743](https://github.com/pypeclub/OpenPype/pull/1743) +- Hiero: creator instance error [\#1742](https://github.com/pypeclub/OpenPype/pull/1742) +- Nuke: fixing render creator for no selection format failing [\#1741](https://github.com/pypeclub/OpenPype/pull/1741) +- StandalonePublisher: failing collector for editorial [\#1738](https://github.com/pypeclub/OpenPype/pull/1738) +- Local settings UI crash on missing defaults [\#1737](https://github.com/pypeclub/OpenPype/pull/1737) +- TVPaint white background on thumbnail [\#1735](https://github.com/pypeclub/OpenPype/pull/1735) +- Ftrack missing custom attribute message [\#1734](https://github.com/pypeclub/OpenPype/pull/1734) +- Launcher project changes [\#1733](https://github.com/pypeclub/OpenPype/pull/1733) +- Ftrack sync status [\#1732](https://github.com/pypeclub/OpenPype/pull/1732) +- TVPaint use layer name for default variant [\#1724](https://github.com/pypeclub/OpenPype/pull/1724) +- Default subset template for TVPaint review and workfile families [\#1716](https://github.com/pypeclub/OpenPype/pull/1716) +- Maya: Extract review hotfix [\#1714](https://github.com/pypeclub/OpenPype/pull/1714) +- Settings: Imageio improving granularity [\#1711](https://github.com/pypeclub/OpenPype/pull/1711) +- Application without executables [\#1679](https://github.com/pypeclub/OpenPype/pull/1679) +- Unreal: launching on Linux [\#1672](https://github.com/pypeclub/OpenPype/pull/1672) + +**Merged pull requests:** + +- Bump prismjs from 1.23.0 to 1.24.0 in /website [\#1773](https://github.com/pypeclub/OpenPype/pull/1773) +- TVPaint ftrack family [\#1755](https://github.com/pypeclub/OpenPype/pull/1755) + +## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.3...2.18.4) + +## [2.18.3](https://github.com/pypeclub/OpenPype/tree/2.18.3) (2021-06-23) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.2...2.18.3) + +## [2.18.2](https://github.com/pypeclub/OpenPype/tree/2.18.2) (2021-06-16) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.1.0...2.18.2) + +## [3.1.0](https://github.com/pypeclub/OpenPype/tree/3.1.0) (2021-06-15) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.0.0...3.1.0) + +### 📖 Documentation + +- Feature Slack integration [\#1657](https://github.com/pypeclub/OpenPype/pull/1657) + +**🚀 Enhancements** + +- Log Viewer with OpenPype style [\#1703](https://github.com/pypeclub/OpenPype/pull/1703) +- Scrolling in OpenPype info widget [\#1702](https://github.com/pypeclub/OpenPype/pull/1702) +- OpenPype style in modules [\#1694](https://github.com/pypeclub/OpenPype/pull/1694) +- Sort applications and tools alphabetically in Settings UI [\#1689](https://github.com/pypeclub/OpenPype/pull/1689) +- \#683 - Validate Frame Range in Standalone Publisher [\#1683](https://github.com/pypeclub/OpenPype/pull/1683) +- Hiero: old container versions identify with red color [\#1682](https://github.com/pypeclub/OpenPype/pull/1682) +- Project Manger: Default name column width [\#1669](https://github.com/pypeclub/OpenPype/pull/1669) +- Remove outline in stylesheet [\#1667](https://github.com/pypeclub/OpenPype/pull/1667) +- TVPaint: Creator take layer name as default value for subset variant [\#1663](https://github.com/pypeclub/OpenPype/pull/1663) +- TVPaint custom subset template [\#1662](https://github.com/pypeclub/OpenPype/pull/1662) +- Editorial: conform assets validator [\#1659](https://github.com/pypeclub/OpenPype/pull/1659) +- Nuke - Publish simplification [\#1653](https://github.com/pypeclub/OpenPype/pull/1653) +- \#1333 - added tooltip hints to Pyblish buttons [\#1649](https://github.com/pypeclub/OpenPype/pull/1649) + +**🐛 Bug fixes** + +- Nuke: broken publishing rendered frames [\#1707](https://github.com/pypeclub/OpenPype/pull/1707) +- Standalone publisher Thumbnail export args [\#1705](https://github.com/pypeclub/OpenPype/pull/1705) +- Bad zip can break OpenPype start [\#1691](https://github.com/pypeclub/OpenPype/pull/1691) +- Hiero: published whole edit mov [\#1687](https://github.com/pypeclub/OpenPype/pull/1687) +- Ftrack subprocess handle of stdout/stderr [\#1675](https://github.com/pypeclub/OpenPype/pull/1675) +- Settings list race condifiton and mutable dict list conversion [\#1671](https://github.com/pypeclub/OpenPype/pull/1671) +- Mac launch arguments fix [\#1660](https://github.com/pypeclub/OpenPype/pull/1660) +- Fix missing dbm python module [\#1652](https://github.com/pypeclub/OpenPype/pull/1652) +- Transparent branches in view on Mac [\#1648](https://github.com/pypeclub/OpenPype/pull/1648) +- Add asset on task item [\#1646](https://github.com/pypeclub/OpenPype/pull/1646) +- Project manager save and queue [\#1645](https://github.com/pypeclub/OpenPype/pull/1645) +- New project anatomy values [\#1644](https://github.com/pypeclub/OpenPype/pull/1644) +- Farm publishing: check if published items do exist [\#1573](https://github.com/pypeclub/OpenPype/pull/1573) + +**Merged pull requests:** + +- Bump normalize-url from 4.5.0 to 4.5.1 in /website [\#1686](https://github.com/pypeclub/OpenPype/pull/1686) + ## [3.0.0](https://github.com/pypeclub/openpype/tree/3.0.0) @@ -11,12 +1817,12 @@ - Easy to add Application versions. - Per Project Environment and plugin management. - Robust profile system for creating reviewables and burnins, with filtering based on Application, Task and data family. -- Configurable publish plugins. +- Configurable publish plugins. - Options to make any validator or extractor, optional or disabled. - Color Management is now unified under anatomy settings. - Subset naming and grouping is fully configurable. - All project attributes can now be set directly in OpenPype settings. -- Studio Setting can be locked to prevent unwanted artist changes. +- Studio Setting can be locked to prevent unwanted artist changes. - You can now add per project and per task type templates for workfile initialization in most hosts. - Too many other individual configurable option to list in this changelog :) @@ -774,8 +2580,6 @@ - Standalone Publisher: getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) -# Changelog - ## [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) (2020-11-15) [Full Changelog](https://github.com/pypeclub/pype/compare/2.13.5...2.13.6) @@ -1565,10 +3369,4 @@ A large cleanup release. Most of the change are under the hood. - _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner -\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* - - -\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* - - \* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* From d05ce1591022e95f802c8a4e7cc73249f42b2cde Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 12:13:32 +0200 Subject: [PATCH 1678/2550] add .github_change_generator config file to gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 4b773e97ed..18e7cd7bf2 100644 --- a/.gitignore +++ b/.gitignore @@ -110,3 +110,5 @@ tools/run_eventserver.* # Developer tools tools/dev_* + +.github_changelog_generator From 3c6fb3511e89fc12e6945a78f228ae9dd91a3c09 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 12:30:07 +0200 Subject: [PATCH 1679/2550] update changelog.md --- CHANGELOG.md | 1763 ++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 1706 insertions(+), 57 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dca0e7ecef..d841eb9747 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,44 +1,72 @@ # Changelog -## [3.14.4-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [Unreleased](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...HEAD) +**🆕 New features** + +- Webpublisher: use max next published version number for all items in batch [\#3961](https://github.com/pypeclub/OpenPype/pull/3961) +- General: Control Thumbnail integration via explicit configuration profiles [\#3951](https://github.com/pypeclub/OpenPype/pull/3951) + **🚀 Enhancements** +- Publisher: Multiselection in card view [\#3993](https://github.com/pypeclub/OpenPype/pull/3993) +- TrayPublisher: Original Basename cause crash too early [\#3990](https://github.com/pypeclub/OpenPype/pull/3990) +- Tray Publisher: add `originalBasename` data to simple creators [\#3988](https://github.com/pypeclub/OpenPype/pull/3988) +- General: Custom paths to ffmpeg and OpenImageIO tools [\#3982](https://github.com/pypeclub/OpenPype/pull/3982) +- Integrate: Preserve existing subset group if instance does not set it for new version [\#3976](https://github.com/pypeclub/OpenPype/pull/3976) +- Publisher: Prepare publisher controller for remote publishing [\#3972](https://github.com/pypeclub/OpenPype/pull/3972) +- Maya: new style dataclasses in maya deadline submitter plugin [\#3968](https://github.com/pypeclub/OpenPype/pull/3968) +- Maya: Define preffered Qt bindings for Qt.py and qtpy [\#3963](https://github.com/pypeclub/OpenPype/pull/3963) +- Settings: Move imageio from project anatomy to project settings \[pypeclub\] [\#3959](https://github.com/pypeclub/OpenPype/pull/3959) +- TrayPublisher: Extract thumbnail for other families [\#3952](https://github.com/pypeclub/OpenPype/pull/3952) +- Publisher: Pass instance to subset name method on update [\#3949](https://github.com/pypeclub/OpenPype/pull/3949) - General: Set root environments before DCC launch [\#3947](https://github.com/pypeclub/OpenPype/pull/3947) - Refactor: changed legacy way to update database for Hero version integrate [\#3941](https://github.com/pypeclub/OpenPype/pull/3941) - Maya: Moved plugin from global to maya [\#3939](https://github.com/pypeclub/OpenPype/pull/3939) +- Publisher: Create dialog is part of main window [\#3936](https://github.com/pypeclub/OpenPype/pull/3936) - Fusion: Implement Alembic and FBX mesh loader [\#3927](https://github.com/pypeclub/OpenPype/pull/3927) -- Publisher: Instances can be marked as stored [\#3846](https://github.com/pypeclub/OpenPype/pull/3846) +- Maya: Remove hardcoded requirement for maya/ start for image file prefix [\#3873](https://github.com/pypeclub/OpenPype/pull/3873) **🐛 Bug fixes** +- TrayPublisher: Disable sequences in batch mov creator [\#3996](https://github.com/pypeclub/OpenPype/pull/3996) +- Fix - tags might be missing on representation [\#3985](https://github.com/pypeclub/OpenPype/pull/3985) +- Resolve: Fix usage of functions from lib [\#3983](https://github.com/pypeclub/OpenPype/pull/3983) +- Maya: remove invalid prefix token for non-multipart outputs [\#3981](https://github.com/pypeclub/OpenPype/pull/3981) +- Ftrack: Fix schema cache for Python 2 [\#3980](https://github.com/pypeclub/OpenPype/pull/3980) +- Maya: add object to attr.s declaration [\#3973](https://github.com/pypeclub/OpenPype/pull/3973) - Maya: Deadline OutputFilePath hack regression for Renderman [\#3950](https://github.com/pypeclub/OpenPype/pull/3950) - Houdini: Fix validate workfile paths for non-parm file references [\#3948](https://github.com/pypeclub/OpenPype/pull/3948) - Photoshop: missed sync published version of workfile with workfile [\#3946](https://github.com/pypeclub/OpenPype/pull/3946) +- Maya: Set default value for RenderSetupIncludeLights option [\#3944](https://github.com/pypeclub/OpenPype/pull/3944) - Maya: fix regression of Renderman Deadline hack [\#3943](https://github.com/pypeclub/OpenPype/pull/3943) +- Kitsu: 2 fixes, nb\_frames and Shot type error [\#3940](https://github.com/pypeclub/OpenPype/pull/3940) - Tray: Change order of attribute changes [\#3938](https://github.com/pypeclub/OpenPype/pull/3938) - AttributeDefs: Fix crashing multivalue of files widget [\#3937](https://github.com/pypeclub/OpenPype/pull/3937) - General: Fix links query on hero version [\#3900](https://github.com/pypeclub/OpenPype/pull/3900) - Publisher: Files Drag n Drop cleanup [\#3888](https://github.com/pypeclub/OpenPype/pull/3888) -- Maya: Render settings validation attribute check tweak logging [\#3821](https://github.com/pypeclub/OpenPype/pull/3821) **🔀 Refactored code** +- Flame: Import lib functions from lib [\#3992](https://github.com/pypeclub/OpenPype/pull/3992) +- General: Fix deprecated warning in legacy creator [\#3978](https://github.com/pypeclub/OpenPype/pull/3978) +- Blender: Remove openpype api imports [\#3977](https://github.com/pypeclub/OpenPype/pull/3977) +- General: Use direct import of resources [\#3964](https://github.com/pypeclub/OpenPype/pull/3964) - General: Direct settings imports [\#3934](https://github.com/pypeclub/OpenPype/pull/3934) - General: import 'Logger' from 'openpype.lib' [\#3926](https://github.com/pypeclub/OpenPype/pull/3926) +- General: Remove deprecated functions from lib [\#3907](https://github.com/pypeclub/OpenPype/pull/3907) **Merged pull requests:** - Maya + Yeti: Load Yeti Cache fix frame number recognition [\#3942](https://github.com/pypeclub/OpenPype/pull/3942) - Fusion: Implement callbacks to Fusion's event system thread [\#3928](https://github.com/pypeclub/OpenPype/pull/3928) - Photoshop: create single frame image in Ftrack as review [\#3908](https://github.com/pypeclub/OpenPype/pull/3908) -- Maya: Warn correctly about nodes in render instance with unexpected names [\#3816](https://github.com/pypeclub/OpenPype/pull/3816) ## [3.14.3](https://github.com/pypeclub/OpenPype/tree/3.14.3) (2022-10-03) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.3-nightly.7...3.14.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...3.14.3) **🚀 Enhancements** @@ -51,10 +79,6 @@ - Publisher: Increase size of main window [\#3862](https://github.com/pypeclub/OpenPype/pull/3862) - Flame: make migratable projects after creation [\#3860](https://github.com/pypeclub/OpenPype/pull/3860) - Photoshop: synchronize image version with workfile [\#3854](https://github.com/pypeclub/OpenPype/pull/3854) -- General: Transcoding handle float2 attr type [\#3849](https://github.com/pypeclub/OpenPype/pull/3849) -- General: Simple script for getting license information about used packages [\#3843](https://github.com/pypeclub/OpenPype/pull/3843) -- General: Workfile template build enhancements [\#3838](https://github.com/pypeclub/OpenPype/pull/3838) -- General: lock task workfiles when they are working on [\#3810](https://github.com/pypeclub/OpenPype/pull/3810) **🐛 Bug fixes** @@ -68,7 +92,6 @@ - General: Copy of workfile does not use 'copy' function but 'copyfile' [\#3869](https://github.com/pypeclub/OpenPype/pull/3869) - Tray Publisher: skip plugin if otioTimeline is missing [\#3856](https://github.com/pypeclub/OpenPype/pull/3856) - Flame: retimed attributes are integrated with settings [\#3855](https://github.com/pypeclub/OpenPype/pull/3855) -- Maya: Extract Playblast fix textures + labelize viewport show settings [\#3852](https://github.com/pypeclub/OpenPype/pull/3852) **🔀 Refactored code** @@ -78,9 +101,6 @@ - Flame: Use new Extractor location [\#3916](https://github.com/pypeclub/OpenPype/pull/3916) - Houdini: Use new Extractor location [\#3894](https://github.com/pypeclub/OpenPype/pull/3894) - Harmony: Use new Extractor location [\#3893](https://github.com/pypeclub/OpenPype/pull/3893) -- Hiero: Use new Extractor location [\#3851](https://github.com/pypeclub/OpenPype/pull/3851) -- Maya: Remove old legacy \(ftrack\) plug-ins that are of no use anymore [\#3819](https://github.com/pypeclub/OpenPype/pull/3819) -- Nuke: Use new Extractor location [\#3799](https://github.com/pypeclub/OpenPype/pull/3799) **Merged pull requests:** @@ -90,7 +110,17 @@ ## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.2-nightly.5...3.14.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) + +### 📖 Documentation + +- Documentation: Anatomy templates [\#3618](https://github.com/pypeclub/OpenPype/pull/3618) + +**🆕 New features** + +- Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) +- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) +- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) **🚀 Enhancements** @@ -98,6 +128,13 @@ - Flame: OpenPype submenu to batch and media manager [\#3825](https://github.com/pypeclub/OpenPype/pull/3825) - General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) +- SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) +- Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) +- Blender: Publisher collect workfile representation [\#3670](https://github.com/pypeclub/OpenPype/pull/3670) +- Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) +- Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) +- Scene Inventory: Add subsetGroup column [\#3658](https://github.com/pypeclub/OpenPype/pull/3658) **🐛 Bug fixes** @@ -107,42 +144,625 @@ - Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) - Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) - Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) +- nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) +- Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) +- Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) +- Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) +- Maya: `containerise` dont skip empty values [\#3674](https://github.com/pypeclub/OpenPype/pull/3674) + +**🔀 Refactored code** + +- Photoshop: Use new Extractor location [\#3789](https://github.com/pypeclub/OpenPype/pull/3789) +- Blender: Use new Extractor location [\#3787](https://github.com/pypeclub/OpenPype/pull/3787) +- AfterEffects: Use new Extractor location [\#3784](https://github.com/pypeclub/OpenPype/pull/3784) +- General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) +- General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) +- General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) +- General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) +- General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) +- General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) +- Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) +- Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) +- Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) + +**Merged pull requests:** + +- Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) +- Kitsu - sync\_all\_project - add list ignore\_projects [\#3776](https://github.com/pypeclub/OpenPype/pull/3776) ## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.1-nightly.4...3.14.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...3.14.1) + +### 📖 Documentation + +- Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) +- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) + +**🆕 New features** + +- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) +- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662) + +**🚀 Enhancements** + +- General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) +- Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) +- General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) +- Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) +- Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) +- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) +- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671) +- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) + +**🐛 Bug fixes** + +- Maya: Fix typo in getPanel argument `with_focus` -\> `withFocus` [\#3753](https://github.com/pypeclub/OpenPype/pull/3753) +- General: Smaller fixes of imports [\#3748](https://github.com/pypeclub/OpenPype/pull/3748) +- General: Logger tweaks [\#3741](https://github.com/pypeclub/OpenPype/pull/3741) +- Nuke: missing job dependency if multiple bake streams [\#3737](https://github.com/pypeclub/OpenPype/pull/3737) +- Nuke: color-space settings from anatomy is working [\#3721](https://github.com/pypeclub/OpenPype/pull/3721) +- Settings: Fix studio default anatomy save [\#3716](https://github.com/pypeclub/OpenPype/pull/3716) +- Maya: Use project name instead of project code [\#3709](https://github.com/pypeclub/OpenPype/pull/3709) +- Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) +- Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) +- PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) +- RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) +- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) + +**🔀 Refactored code** + +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) +- Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) +- General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) +- AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) +- AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) +- General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) +- Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) +- General: Move subset name functionality [\#3723](https://github.com/pypeclub/OpenPype/pull/3723) +- General: Move creators plugin getter [\#3714](https://github.com/pypeclub/OpenPype/pull/3714) +- General: Move constants from lib to client [\#3713](https://github.com/pypeclub/OpenPype/pull/3713) +- Loader: Subset groups using client operations [\#3710](https://github.com/pypeclub/OpenPype/pull/3710) +- TVPaint: Defined as module [\#3707](https://github.com/pypeclub/OpenPype/pull/3707) +- StandalonePublisher: Define StandalonePublisher as module [\#3706](https://github.com/pypeclub/OpenPype/pull/3706) +- TrayPublisher: Define TrayPublisher as module [\#3705](https://github.com/pypeclub/OpenPype/pull/3705) +- General: Move context specific functions to context tools [\#3702](https://github.com/pypeclub/OpenPype/pull/3702) + +**Merged pull requests:** + +- Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) +- Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) +- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) +- Nuke: Validation refactory to new publisher [\#3567](https://github.com/pypeclub/OpenPype/pull/3567) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...3.14.0) + +**🆕 New features** + +- Maya: Build workfile by template [\#3578](https://github.com/pypeclub/OpenPype/pull/3578) +- Maya: Implementation of JSON layout for Unreal workflow [\#3353](https://github.com/pypeclub/OpenPype/pull/3353) +- Maya: Build workfile by template [\#3315](https://github.com/pypeclub/OpenPype/pull/3315) + +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) +- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) +- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) +- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) +- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650) + +**🐛 Bug fixes** + +- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) +- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) +- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644) +- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643) +- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632) +- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) + +**🔀 Refactored code** + +- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) +- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653) +- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647) +- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) +- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) +- General: Workfiles builder using query functions [\#3598](https://github.com/pypeclub/OpenPype/pull/3598) + +**Merged pull requests:** + +- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) +- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645) +- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) +- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.2...3.13.0) + +**🆕 New features** + +- Support for mutliple installed versions - 3.13 [\#3605](https://github.com/pypeclub/OpenPype/pull/3605) +- Traypublisher: simple editorial publishing [\#3492](https://github.com/pypeclub/OpenPype/pull/3492) + +**🚀 Enhancements** + +- Editorial: Mix audio use side file for ffmpeg filters [\#3630](https://github.com/pypeclub/OpenPype/pull/3630) +- Ftrack: Comment template can contain optional keys [\#3615](https://github.com/pypeclub/OpenPype/pull/3615) +- Ftrack: Add more metadata to ftrack components [\#3612](https://github.com/pypeclub/OpenPype/pull/3612) +- General: Add context to pyblish context [\#3594](https://github.com/pypeclub/OpenPype/pull/3594) +- Kitsu: Shot&Sequence name with prefix over appends [\#3593](https://github.com/pypeclub/OpenPype/pull/3593) +- Photoshop: implemented {layer} placeholder in subset template [\#3591](https://github.com/pypeclub/OpenPype/pull/3591) +- General: Python module appdirs from git [\#3589](https://github.com/pypeclub/OpenPype/pull/3589) +- Ftrack: Update ftrack api to 2.3.3 [\#3588](https://github.com/pypeclub/OpenPype/pull/3588) +- General: New Integrator small fixes [\#3583](https://github.com/pypeclub/OpenPype/pull/3583) +- Maya: Render Creator has configurable options. [\#3097](https://github.com/pypeclub/OpenPype/pull/3097) + +**🐛 Bug fixes** + +- Maya: fix aov separator in Redshift [\#3625](https://github.com/pypeclub/OpenPype/pull/3625) +- Fix for multi-version build on Mac [\#3622](https://github.com/pypeclub/OpenPype/pull/3622) +- Ftrack: Sync hierarchical attributes can handle new created entities [\#3621](https://github.com/pypeclub/OpenPype/pull/3621) +- General: Extract review aspect ratio scale is calculated by ffmpeg [\#3620](https://github.com/pypeclub/OpenPype/pull/3620) +- Maya: Fix types of default settings [\#3617](https://github.com/pypeclub/OpenPype/pull/3617) +- Integrator: Don't force to have dot before frame [\#3611](https://github.com/pypeclub/OpenPype/pull/3611) +- AfterEffects: refactored integrate doesnt work formulti frame publishes [\#3610](https://github.com/pypeclub/OpenPype/pull/3610) +- Maya look data contents fails with custom attribute on group [\#3607](https://github.com/pypeclub/OpenPype/pull/3607) +- TrayPublisher: Fix wrong conflict merge [\#3600](https://github.com/pypeclub/OpenPype/pull/3600) +- Bugfix: Add OCIO as submodule to prepare for handling `maketx` color space conversion. [\#3590](https://github.com/pypeclub/OpenPype/pull/3590) +- Fix general settings environment variables resolution [\#3587](https://github.com/pypeclub/OpenPype/pull/3587) +- Editorial publishing workflow improvements [\#3580](https://github.com/pypeclub/OpenPype/pull/3580) +- General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) +- Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) +- Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) + +**🔀 Refactored code** + +- General: Plugin settings handled by plugins [\#3623](https://github.com/pypeclub/OpenPype/pull/3623) +- General: Naive implementation of document create, update, delete [\#3601](https://github.com/pypeclub/OpenPype/pull/3601) +- General: Use query functions in general code [\#3596](https://github.com/pypeclub/OpenPype/pull/3596) +- General: Separate extraction of template data into more functions [\#3574](https://github.com/pypeclub/OpenPype/pull/3574) +- General: Lib cleanup [\#3571](https://github.com/pypeclub/OpenPype/pull/3571) + +**Merged pull requests:** + +- Webpublisher: timeout for PS studio processing [\#3619](https://github.com/pypeclub/OpenPype/pull/3619) +- Core: translated validate\_containers.py into New publisher style [\#3614](https://github.com/pypeclub/OpenPype/pull/3614) +- Enable write color sets on animation publish automatically [\#3582](https://github.com/pypeclub/OpenPype/pull/3582) ## [3.12.2](https://github.com/pypeclub/OpenPype/tree/3.12.2) (2022-07-27) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.2-nightly.4...3.12.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.1...3.12.2) + +### 📖 Documentation + +- Update website with more studios [\#3554](https://github.com/pypeclub/OpenPype/pull/3554) +- Documentation: Update publishing dev docs [\#3549](https://github.com/pypeclub/OpenPype/pull/3549) + +**🚀 Enhancements** + +- General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561) +- Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) +- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) +- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) +- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) +- Ftrack: Trigger custom ftrack topic of project structure creation [\#3506](https://github.com/pypeclub/OpenPype/pull/3506) +- Settings UI: Add extract to file action on project view [\#3505](https://github.com/pypeclub/OpenPype/pull/3505) +- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) +- General: Event system [\#3499](https://github.com/pypeclub/OpenPype/pull/3499) +- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) +- Nuke: load clip with options from settings [\#3497](https://github.com/pypeclub/OpenPype/pull/3497) +- TrayPublisher: implemented render\_mov\_batch [\#3486](https://github.com/pypeclub/OpenPype/pull/3486) +- Migrate basic families to the new Tray Publisher [\#3469](https://github.com/pypeclub/OpenPype/pull/3469) +- Enhance powershell build scripts [\#1827](https://github.com/pypeclub/OpenPype/pull/1827) + +**🐛 Bug fixes** + +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) +- Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) +- NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) +- Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) +- General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556) +- Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550) +- Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547) +- Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539) +- General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538) +- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525) +- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523) +- Nuke: double slate [\#3521](https://github.com/pypeclub/OpenPype/pull/3521) +- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) +- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) +- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) +- NewPublisher: Publish attributes are properly collected [\#3510](https://github.com/pypeclub/OpenPype/pull/3510) +- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) +- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) + +**🔀 Refactored code** + +- General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563) +- General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531) +- Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530) +- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529) +- General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527) +- General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522) +- Kitsu: Use query function from client [\#3496](https://github.com/pypeclub/OpenPype/pull/3496) +- TimersManager: Use query functions [\#3495](https://github.com/pypeclub/OpenPype/pull/3495) +- Deadline: Use query functions [\#3466](https://github.com/pypeclub/OpenPype/pull/3466) +- Refactor Integrate Asset [\#2898](https://github.com/pypeclub/OpenPype/pull/2898) + +**Merged pull requests:** + +- Maya: fix active pane loss [\#3566](https://github.com/pypeclub/OpenPype/pull/3566) ## [3.12.1](https://github.com/pypeclub/OpenPype/tree/3.12.1) (2022-07-13) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.1-nightly.6...3.12.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.12.0...3.12.1) + +### 📖 Documentation + +- Docs: Added minimal permissions for MongoDB [\#3441](https://github.com/pypeclub/OpenPype/pull/3441) + +**🆕 New features** + +- Maya: Add VDB to Arnold loader [\#3433](https://github.com/pypeclub/OpenPype/pull/3433) + +**🚀 Enhancements** + +- TrayPublisher: Added more options for grouping of instances [\#3494](https://github.com/pypeclub/OpenPype/pull/3494) +- NewPublisher: Align creator attributes from top to bottom [\#3487](https://github.com/pypeclub/OpenPype/pull/3487) +- NewPublisher: Added ability to use label of instance [\#3484](https://github.com/pypeclub/OpenPype/pull/3484) +- General: Creator Plugins have access to project [\#3476](https://github.com/pypeclub/OpenPype/pull/3476) +- General: Better arguments order in creator init [\#3475](https://github.com/pypeclub/OpenPype/pull/3475) +- Ftrack: Trigger custom ftrack events on project creation and preparation [\#3465](https://github.com/pypeclub/OpenPype/pull/3465) +- Windows installer: Clean old files and add version subfolder [\#3445](https://github.com/pypeclub/OpenPype/pull/3445) +- Blender: Bugfix - Set fps properly on open [\#3426](https://github.com/pypeclub/OpenPype/pull/3426) +- Hiero: Add custom scripts menu [\#3425](https://github.com/pypeclub/OpenPype/pull/3425) +- Blender: pre pyside install for all platforms [\#3400](https://github.com/pypeclub/OpenPype/pull/3400) +- Maya: Add additional playblast options to review Extractor. [\#3384](https://github.com/pypeclub/OpenPype/pull/3384) +- Maya: Ability to set resolution for playblasts from asset, and override through review instance. [\#3360](https://github.com/pypeclub/OpenPype/pull/3360) +- Maya: Redshift Volume Loader Implement update, remove, switch + fix vdb sequence support [\#3197](https://github.com/pypeclub/OpenPype/pull/3197) +- Maya: Implement `iter_visible_nodes_in_range` for extracting Alembics [\#3100](https://github.com/pypeclub/OpenPype/pull/3100) + +**🐛 Bug fixes** + +- TrayPublisher: Keep use instance label in list view [\#3493](https://github.com/pypeclub/OpenPype/pull/3493) +- General: Extract review use first frame of input sequence [\#3491](https://github.com/pypeclub/OpenPype/pull/3491) +- General: Fix Plist loading for application launch [\#3485](https://github.com/pypeclub/OpenPype/pull/3485) +- Nuke: Workfile tools open on start [\#3479](https://github.com/pypeclub/OpenPype/pull/3479) +- New Publisher: Disabled context change allows creation [\#3478](https://github.com/pypeclub/OpenPype/pull/3478) +- General: thumbnail extractor fix [\#3474](https://github.com/pypeclub/OpenPype/pull/3474) +- Kitsu: bugfix with sync-service ans publish plugins [\#3473](https://github.com/pypeclub/OpenPype/pull/3473) +- Flame: solved problem with multi-selected loading [\#3470](https://github.com/pypeclub/OpenPype/pull/3470) +- General: Fix query function in update logic [\#3468](https://github.com/pypeclub/OpenPype/pull/3468) +- Resolve: removed few bugs [\#3464](https://github.com/pypeclub/OpenPype/pull/3464) +- General: Delete old versions is safer when ftrack is disabled [\#3462](https://github.com/pypeclub/OpenPype/pull/3462) +- Nuke: fixing metadata slate TC difference [\#3455](https://github.com/pypeclub/OpenPype/pull/3455) +- Nuke: prerender reviewable fails [\#3450](https://github.com/pypeclub/OpenPype/pull/3450) +- Maya: fix hashing in Python 3 for tile rendering [\#3447](https://github.com/pypeclub/OpenPype/pull/3447) +- LogViewer: Escape html characters in log message [\#3443](https://github.com/pypeclub/OpenPype/pull/3443) +- Nuke: Slate frame is integrated [\#3427](https://github.com/pypeclub/OpenPype/pull/3427) +- Maya: Camera extra data - additional fix for \#3304 [\#3386](https://github.com/pypeclub/OpenPype/pull/3386) +- Maya: Handle excluding `model` family from frame range validator. [\#3370](https://github.com/pypeclub/OpenPype/pull/3370) + +**🔀 Refactored code** + +- Maya: Merge animation + pointcache extractor logic [\#3461](https://github.com/pypeclub/OpenPype/pull/3461) +- Maya: Re-use `maintained_time` from lib [\#3460](https://github.com/pypeclub/OpenPype/pull/3460) +- General: Use query functions in global plugins [\#3459](https://github.com/pypeclub/OpenPype/pull/3459) +- Clockify: Use query functions in clockify actions [\#3458](https://github.com/pypeclub/OpenPype/pull/3458) +- General: Use query functions in rest api calls [\#3457](https://github.com/pypeclub/OpenPype/pull/3457) +- General: Use query functions in openpype lib functions [\#3454](https://github.com/pypeclub/OpenPype/pull/3454) +- General: Use query functions in load utils [\#3446](https://github.com/pypeclub/OpenPype/pull/3446) +- General: Move publish plugin and publish render abstractions [\#3442](https://github.com/pypeclub/OpenPype/pull/3442) +- General: Use Anatomy after move to pipeline [\#3436](https://github.com/pypeclub/OpenPype/pull/3436) +- General: Anatomy moved to pipeline [\#3435](https://github.com/pypeclub/OpenPype/pull/3435) +- Fusion: Use client query functions [\#3380](https://github.com/pypeclub/OpenPype/pull/3380) +- Resolve: Use client query functions [\#3379](https://github.com/pypeclub/OpenPype/pull/3379) +- General: Host implementation defined with class [\#3337](https://github.com/pypeclub/OpenPype/pull/3337) ## [3.12.0](https://github.com/pypeclub/OpenPype/tree/3.12.0) (2022-06-28) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.12.0-nightly.3...3.12.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.1...3.12.0) + +### 📖 Documentation + +- Fix typo in documentation: pyenv on mac [\#3417](https://github.com/pypeclub/OpenPype/pull/3417) +- Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) + +**🆕 New features** + +- Shotgrid: Add production beta of shotgrid integration [\#2921](https://github.com/pypeclub/OpenPype/pull/2921) + +**🚀 Enhancements** + +- Webserver: Added CORS middleware [\#3422](https://github.com/pypeclub/OpenPype/pull/3422) +- Attribute Defs UI: Files widget show what is allowed to drop in [\#3411](https://github.com/pypeclub/OpenPype/pull/3411) +- General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) +- Hosts: More options for in-host callbacks [\#3357](https://github.com/pypeclub/OpenPype/pull/3357) +- Multiverse: expose some settings to GUI [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) +- Maya: Allow more data to be published along camera 🎥 [\#3304](https://github.com/pypeclub/OpenPype/pull/3304) +- Add root keys and project keys to create starting folder [\#2755](https://github.com/pypeclub/OpenPype/pull/2755) + +**🐛 Bug fixes** + +- NewPublisher: Fix subset name change on change of creator plugin [\#3420](https://github.com/pypeclub/OpenPype/pull/3420) +- Bug: fix invalid avalon import [\#3418](https://github.com/pypeclub/OpenPype/pull/3418) +- Nuke: Fix keyword argument in query function [\#3414](https://github.com/pypeclub/OpenPype/pull/3414) +- Houdini: fix loading and updating vbd/bgeo sequences [\#3408](https://github.com/pypeclub/OpenPype/pull/3408) +- Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) +- General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) +- Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) +- TVPaint: Make sure exit code is set to not None [\#3382](https://github.com/pypeclub/OpenPype/pull/3382) +- Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) +- Flame: bunch of publishing issues [\#3377](https://github.com/pypeclub/OpenPype/pull/3377) +- Harmony: added unc path to zifile command in Harmony [\#3372](https://github.com/pypeclub/OpenPype/pull/3372) +- Standalone: settings improvements [\#3355](https://github.com/pypeclub/OpenPype/pull/3355) +- Nuke: Load full model hierarchy by default [\#3328](https://github.com/pypeclub/OpenPype/pull/3328) +- Nuke: multiple baking streams with correct slate [\#3245](https://github.com/pypeclub/OpenPype/pull/3245) +- Maya: fix image prefix warning in validator [\#3128](https://github.com/pypeclub/OpenPype/pull/3128) + +**🔀 Refactored code** + +- Unreal: Use client query functions [\#3421](https://github.com/pypeclub/OpenPype/pull/3421) +- General: Move editorial lib to pipeline [\#3419](https://github.com/pypeclub/OpenPype/pull/3419) +- Kitsu: renaming to plural func sync\_all\_projects [\#3397](https://github.com/pypeclub/OpenPype/pull/3397) +- Houdini: Use client query functions [\#3395](https://github.com/pypeclub/OpenPype/pull/3395) +- Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) +- Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) +- Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) +- Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) +- Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) +- Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) +- AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) +- TVPaint: Use client query functions [\#3340](https://github.com/pypeclub/OpenPype/pull/3340) +- Ftrack: Use client query functions [\#3339](https://github.com/pypeclub/OpenPype/pull/3339) +- Standalone Publisher: Use client query functions [\#3330](https://github.com/pypeclub/OpenPype/pull/3330) + +**Merged pull requests:** + +- Sync Queue: Added far future value for null values for dates [\#3371](https://github.com/pypeclub/OpenPype/pull/3371) +- Maya - added support for single frame playblast review [\#3369](https://github.com/pypeclub/OpenPype/pull/3369) +- Houdini: Implement Redshift Proxy Export [\#3196](https://github.com/pypeclub/OpenPype/pull/3196) ## [3.11.1](https://github.com/pypeclub/OpenPype/tree/3.11.1) (2022-06-20) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.1-nightly.1...3.11.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.0...3.11.1) + +**🆕 New features** + +- Flame: custom export temp folder [\#3346](https://github.com/pypeclub/OpenPype/pull/3346) +- Nuke: removing third-party plugins [\#3344](https://github.com/pypeclub/OpenPype/pull/3344) + +**🚀 Enhancements** + +- Pyblish Pype: Hiding/Close issues [\#3367](https://github.com/pypeclub/OpenPype/pull/3367) +- Ftrack: Removed requirement of pypeclub role from default settings [\#3354](https://github.com/pypeclub/OpenPype/pull/3354) +- Kitsu: Prevent crash on missing frames information [\#3352](https://github.com/pypeclub/OpenPype/pull/3352) +- Ftrack: Open browser from tray [\#3320](https://github.com/pypeclub/OpenPype/pull/3320) +- Enhancement: More control over thumbnail processing. [\#3259](https://github.com/pypeclub/OpenPype/pull/3259) + +**🐛 Bug fixes** + +- Nuke: bake streams with slate on farm [\#3368](https://github.com/pypeclub/OpenPype/pull/3368) +- Harmony: audio validator has wrong logic [\#3364](https://github.com/pypeclub/OpenPype/pull/3364) +- Nuke: Fix missing variable in extract thumbnail [\#3363](https://github.com/pypeclub/OpenPype/pull/3363) +- Nuke: Fix precollect writes [\#3361](https://github.com/pypeclub/OpenPype/pull/3361) +- AE- fix validate\_scene\_settings and renderLocal [\#3358](https://github.com/pypeclub/OpenPype/pull/3358) +- deadline: fixing misidentification of revieables [\#3356](https://github.com/pypeclub/OpenPype/pull/3356) +- General: Create only one thumbnail per instance [\#3351](https://github.com/pypeclub/OpenPype/pull/3351) +- nuke: adding extract thumbnail settings 3.10 [\#3347](https://github.com/pypeclub/OpenPype/pull/3347) +- General: Fix last version function [\#3345](https://github.com/pypeclub/OpenPype/pull/3345) +- Deadline: added OPENPYPE\_MONGO to filter [\#3336](https://github.com/pypeclub/OpenPype/pull/3336) +- Nuke: fixing farm publishing if review is disabled [\#3306](https://github.com/pypeclub/OpenPype/pull/3306) +- Maya: Fix Yeti errors on Create, Publish and Load [\#3198](https://github.com/pypeclub/OpenPype/pull/3198) + +**🔀 Refactored code** + +- Webpublisher: Use client query functions [\#3333](https://github.com/pypeclub/OpenPype/pull/3333) ## [3.11.0](https://github.com/pypeclub/OpenPype/tree/3.11.0) (2022-06-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.11.0-nightly.4...3.11.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.10.0...3.11.0) + +### 📖 Documentation + +- Documentation: Add app key to template documentation [\#3299](https://github.com/pypeclub/OpenPype/pull/3299) +- doc: adding royal render and multiverse to the web site [\#3285](https://github.com/pypeclub/OpenPype/pull/3285) +- Module: Kitsu module [\#2650](https://github.com/pypeclub/OpenPype/pull/2650) + +**🆕 New features** + +- Multiverse: fixed composition write, full docs, cosmetics [\#3178](https://github.com/pypeclub/OpenPype/pull/3178) + +**🚀 Enhancements** + +- Settings: Settings can be extracted from UI [\#3323](https://github.com/pypeclub/OpenPype/pull/3323) +- updated poetry installation source [\#3316](https://github.com/pypeclub/OpenPype/pull/3316) +- Ftrack: Action to easily create daily review session [\#3310](https://github.com/pypeclub/OpenPype/pull/3310) +- TVPaint: Extractor use mark in/out range to render [\#3309](https://github.com/pypeclub/OpenPype/pull/3309) +- Ftrack: Delivery action can work on ReviewSessions [\#3307](https://github.com/pypeclub/OpenPype/pull/3307) +- Maya: Look assigner UI improvements [\#3298](https://github.com/pypeclub/OpenPype/pull/3298) +- Ftrack: Action to transfer values of hierarchical attributes [\#3284](https://github.com/pypeclub/OpenPype/pull/3284) +- Maya: better handling of legacy review subsets names [\#3269](https://github.com/pypeclub/OpenPype/pull/3269) +- General: Updated windows oiio tool [\#3268](https://github.com/pypeclub/OpenPype/pull/3268) +- Unreal: add support for skeletalMesh and staticMesh to loaders [\#3267](https://github.com/pypeclub/OpenPype/pull/3267) +- Maya: reference loaders could store placeholder in referenced url [\#3264](https://github.com/pypeclub/OpenPype/pull/3264) +- TVPaint: Init file for TVPaint worker also handle guideline images [\#3250](https://github.com/pypeclub/OpenPype/pull/3250) +- Nuke: Change default icon path in settings [\#3247](https://github.com/pypeclub/OpenPype/pull/3247) +- Maya: publishing of animation and pointcache on a farm [\#3225](https://github.com/pypeclub/OpenPype/pull/3225) +- Maya: Look assigner UI improvements [\#3208](https://github.com/pypeclub/OpenPype/pull/3208) +- Nuke: add pointcache and animation to loader [\#3186](https://github.com/pypeclub/OpenPype/pull/3186) +- Nuke: Add a gizmo menu [\#3172](https://github.com/pypeclub/OpenPype/pull/3172) +- Support for Unreal 5 [\#3122](https://github.com/pypeclub/OpenPype/pull/3122) + +**🐛 Bug fixes** + +- General: Handle empty source key on instance [\#3342](https://github.com/pypeclub/OpenPype/pull/3342) +- Houdini: Fix Houdini VDB manage update wrong file attribute name [\#3322](https://github.com/pypeclub/OpenPype/pull/3322) +- Nuke: anatomy compatibility issue hacks [\#3321](https://github.com/pypeclub/OpenPype/pull/3321) +- hiero: otio p3 compatibility issue - metadata on effect use update 3.11 [\#3314](https://github.com/pypeclub/OpenPype/pull/3314) +- General: Vendorized modules for Python 2 and update poetry lock [\#3305](https://github.com/pypeclub/OpenPype/pull/3305) +- Fix - added local targets to install host [\#3303](https://github.com/pypeclub/OpenPype/pull/3303) +- Settings: Add missing default settings for nuke gizmo [\#3301](https://github.com/pypeclub/OpenPype/pull/3301) +- Maya: Fix swaped width and height in reviews [\#3300](https://github.com/pypeclub/OpenPype/pull/3300) +- Maya: point cache publish handles Maya instances [\#3297](https://github.com/pypeclub/OpenPype/pull/3297) +- Global: extract review slate issues [\#3286](https://github.com/pypeclub/OpenPype/pull/3286) +- Webpublisher: return only active projects in ProjectsEndpoint [\#3281](https://github.com/pypeclub/OpenPype/pull/3281) +- Hiero: add support for task tags 3.10.x [\#3279](https://github.com/pypeclub/OpenPype/pull/3279) +- General: Fix Oiio tool path resolving [\#3278](https://github.com/pypeclub/OpenPype/pull/3278) +- Maya: Fix udim support for e.g. uppercase \ tag [\#3266](https://github.com/pypeclub/OpenPype/pull/3266) +- Nuke: bake reformat was failing on string type [\#3261](https://github.com/pypeclub/OpenPype/pull/3261) +- Maya: hotfix Pxr multitexture in looks [\#3260](https://github.com/pypeclub/OpenPype/pull/3260) +- Unreal: Fix Camera Loading if Layout is missing [\#3255](https://github.com/pypeclub/OpenPype/pull/3255) +- Unreal: Fixed Animation loading in UE5 [\#3240](https://github.com/pypeclub/OpenPype/pull/3240) +- Unreal: Fixed Render creation in UE5 [\#3239](https://github.com/pypeclub/OpenPype/pull/3239) +- Unreal: Fixed Camera loading in UE5 [\#3238](https://github.com/pypeclub/OpenPype/pull/3238) +- Flame: debugging [\#3224](https://github.com/pypeclub/OpenPype/pull/3224) +- add silent audio to slate [\#3162](https://github.com/pypeclub/OpenPype/pull/3162) +- Add timecode to slate [\#2929](https://github.com/pypeclub/OpenPype/pull/2929) + +**🔀 Refactored code** + +- Blender: Use client query functions [\#3331](https://github.com/pypeclub/OpenPype/pull/3331) +- General: Define query functions [\#3288](https://github.com/pypeclub/OpenPype/pull/3288) + +**Merged pull requests:** + +- Maya: add pointcache family to gpu cache loader [\#3318](https://github.com/pypeclub/OpenPype/pull/3318) +- Maya look: skip empty file attributes [\#3274](https://github.com/pypeclub/OpenPype/pull/3274) ## [3.10.0](https://github.com/pypeclub/OpenPype/tree/3.10.0) (2022-05-26) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.6...3.10.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.8...3.10.0) + +### 📖 Documentation + +- Docs: add all-contributors config and initial list [\#3094](https://github.com/pypeclub/OpenPype/pull/3094) +- Nuke docs with videos [\#3052](https://github.com/pypeclub/OpenPype/pull/3052) + +**🆕 New features** + +- General: OpenPype modules publish plugins are registered in host [\#3180](https://github.com/pypeclub/OpenPype/pull/3180) +- General: Creator plugins from addons can be registered [\#3179](https://github.com/pypeclub/OpenPype/pull/3179) +- Ftrack: Single image reviewable [\#3157](https://github.com/pypeclub/OpenPype/pull/3157) +- Nuke: Expose write attributes to settings [\#3123](https://github.com/pypeclub/OpenPype/pull/3123) +- Hiero: Initial frame publish support [\#3106](https://github.com/pypeclub/OpenPype/pull/3106) +- Unreal: Render Publishing [\#2917](https://github.com/pypeclub/OpenPype/pull/2917) +- AfterEffects: Implemented New Publisher [\#2838](https://github.com/pypeclub/OpenPype/pull/2838) +- Unreal: Rendering implementation [\#2410](https://github.com/pypeclub/OpenPype/pull/2410) + +**🚀 Enhancements** + +- Maya: FBX camera export [\#3253](https://github.com/pypeclub/OpenPype/pull/3253) +- General: updating common vendor `scriptmenu` to 1.5.2 [\#3246](https://github.com/pypeclub/OpenPype/pull/3246) +- Project Manager: Allow to paste Tasks into multiple assets at the same time [\#3226](https://github.com/pypeclub/OpenPype/pull/3226) +- Project manager: Sped up project load [\#3216](https://github.com/pypeclub/OpenPype/pull/3216) +- Loader UI: Speed issues of loader with sync server [\#3199](https://github.com/pypeclub/OpenPype/pull/3199) +- Looks: add basic support for Renderman [\#3190](https://github.com/pypeclub/OpenPype/pull/3190) +- Maya: added clean\_import option to Import loader [\#3181](https://github.com/pypeclub/OpenPype/pull/3181) +- Add the scripts menu definition to nuke [\#3168](https://github.com/pypeclub/OpenPype/pull/3168) +- Maya: add maya 2023 to default applications [\#3167](https://github.com/pypeclub/OpenPype/pull/3167) +- Compressed bgeo publishing in SAP and Houdini loader [\#3153](https://github.com/pypeclub/OpenPype/pull/3153) +- General: Add 'dataclasses' to required python modules [\#3149](https://github.com/pypeclub/OpenPype/pull/3149) +- Hooks: Tweak logging grammar [\#3147](https://github.com/pypeclub/OpenPype/pull/3147) +- Nuke: settings for reformat node in CreateWriteRender node [\#3143](https://github.com/pypeclub/OpenPype/pull/3143) +- Houdini: Add loader for alembic through Alembic Archive node [\#3140](https://github.com/pypeclub/OpenPype/pull/3140) +- Publisher: UI Modifications and fixes [\#3139](https://github.com/pypeclub/OpenPype/pull/3139) +- General: Simplified OP modules/addons import [\#3137](https://github.com/pypeclub/OpenPype/pull/3137) +- Terminal: Tweak coloring of TrayModuleManager logging enabled states [\#3133](https://github.com/pypeclub/OpenPype/pull/3133) +- General: Cleanup some Loader docstrings [\#3131](https://github.com/pypeclub/OpenPype/pull/3131) +- Nuke: render instance with subset name filtered overrides [\#3117](https://github.com/pypeclub/OpenPype/pull/3117) +- Unreal: Layout and Camera update and remove functions reimplemented and improvements [\#3116](https://github.com/pypeclub/OpenPype/pull/3116) +- Settings: Remove environment groups from settings [\#3115](https://github.com/pypeclub/OpenPype/pull/3115) +- TVPaint: Match renderlayer key with other hosts [\#3110](https://github.com/pypeclub/OpenPype/pull/3110) +- Ftrack: AssetVersion status on publish [\#3108](https://github.com/pypeclub/OpenPype/pull/3108) +- Tray publisher: Simple families from settings [\#3105](https://github.com/pypeclub/OpenPype/pull/3105) +- Local Settings UI: Overlay messages on save and reset [\#3104](https://github.com/pypeclub/OpenPype/pull/3104) +- General: Remove repos related logic [\#3087](https://github.com/pypeclub/OpenPype/pull/3087) +- Standalone publisher: add support for bgeo and vdb [\#3080](https://github.com/pypeclub/OpenPype/pull/3080) +- Houdini: Fix FPS + outdated content pop-ups [\#3079](https://github.com/pypeclub/OpenPype/pull/3079) +- General: Add global log verbose arguments [\#3070](https://github.com/pypeclub/OpenPype/pull/3070) +- Flame: extract presets distribution [\#3063](https://github.com/pypeclub/OpenPype/pull/3063) +- Update collect\_render.py [\#3055](https://github.com/pypeclub/OpenPype/pull/3055) +- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) +- Maya: Implement Hardware Renderer 2.0 support for Render Products [\#2611](https://github.com/pypeclub/OpenPype/pull/2611) + +**🐛 Bug fixes** + +- nuke: use framerange issue [\#3254](https://github.com/pypeclub/OpenPype/pull/3254) +- Ftrack: Chunk sizes for queries has minimal condition [\#3244](https://github.com/pypeclub/OpenPype/pull/3244) +- Maya: renderman displays needs to be filtered [\#3242](https://github.com/pypeclub/OpenPype/pull/3242) +- Ftrack: Validate that the user exists on ftrack [\#3237](https://github.com/pypeclub/OpenPype/pull/3237) +- Maya: Fix support for multiple resolutions [\#3236](https://github.com/pypeclub/OpenPype/pull/3236) +- TVPaint: Look for more groups than 12 [\#3228](https://github.com/pypeclub/OpenPype/pull/3228) +- Hiero: debugging frame range and other 3.10 [\#3222](https://github.com/pypeclub/OpenPype/pull/3222) +- Project Manager: Fix persistent editors on project change [\#3218](https://github.com/pypeclub/OpenPype/pull/3218) +- Deadline: instance data overwrite fix [\#3214](https://github.com/pypeclub/OpenPype/pull/3214) +- Ftrack: Push hierarchical attributes action works [\#3210](https://github.com/pypeclub/OpenPype/pull/3210) +- Standalone Publisher: Always create new representation for thumbnail [\#3203](https://github.com/pypeclub/OpenPype/pull/3203) +- Photoshop: skip collector when automatic testing [\#3202](https://github.com/pypeclub/OpenPype/pull/3202) +- Nuke: render/workfile version sync doesn't work on farm [\#3185](https://github.com/pypeclub/OpenPype/pull/3185) +- Ftrack: Review image only if there are no mp4 reviews [\#3183](https://github.com/pypeclub/OpenPype/pull/3183) +- Ftrack: Locations deepcopy issue [\#3177](https://github.com/pypeclub/OpenPype/pull/3177) +- General: Avoid creating multiple thumbnails [\#3176](https://github.com/pypeclub/OpenPype/pull/3176) +- General/Hiero: better clip duration calculation [\#3169](https://github.com/pypeclub/OpenPype/pull/3169) +- General: Oiio conversion for ffmpeg checks for invalid characters [\#3166](https://github.com/pypeclub/OpenPype/pull/3166) +- Fix for attaching render to subset [\#3164](https://github.com/pypeclub/OpenPype/pull/3164) +- Harmony: fixed missing task name in render instance [\#3163](https://github.com/pypeclub/OpenPype/pull/3163) +- Ftrack: Action delete old versions formatting works [\#3152](https://github.com/pypeclub/OpenPype/pull/3152) +- Deadline: fix the output directory [\#3144](https://github.com/pypeclub/OpenPype/pull/3144) +- General: New Session schema [\#3141](https://github.com/pypeclub/OpenPype/pull/3141) +- General: Missing version on headless mode crash properly [\#3136](https://github.com/pypeclub/OpenPype/pull/3136) +- TVPaint: Composite layers in reversed order [\#3135](https://github.com/pypeclub/OpenPype/pull/3135) +- Nuke: fixing default settings for workfile builder loaders [\#3120](https://github.com/pypeclub/OpenPype/pull/3120) +- Nuke: fix anatomy imageio regex default [\#3119](https://github.com/pypeclub/OpenPype/pull/3119) +- General: Python 3 compatibility in queries [\#3112](https://github.com/pypeclub/OpenPype/pull/3112) +- General: TemplateResult can be copied [\#3099](https://github.com/pypeclub/OpenPype/pull/3099) +- General: Collect loaded versions skips not existing representations [\#3095](https://github.com/pypeclub/OpenPype/pull/3095) +- RoyalRender Control Submission - AVALON\_APP\_NAME default [\#3091](https://github.com/pypeclub/OpenPype/pull/3091) +- Ftrack: Update Create Folders action [\#3089](https://github.com/pypeclub/OpenPype/pull/3089) +- Maya: Collect Render fix any render cameras check [\#3088](https://github.com/pypeclub/OpenPype/pull/3088) +- Project Manager: Avoid unnecessary updates of asset documents [\#3083](https://github.com/pypeclub/OpenPype/pull/3083) +- Standalone publisher: Fix plugins install [\#3077](https://github.com/pypeclub/OpenPype/pull/3077) +- General: Extract review sequence is not converted with same names [\#3076](https://github.com/pypeclub/OpenPype/pull/3076) +- Webpublisher: Use variant value [\#3068](https://github.com/pypeclub/OpenPype/pull/3068) +- Nuke: Add aov matching even for remainder and prerender [\#3060](https://github.com/pypeclub/OpenPype/pull/3060) +- Fix support for Renderman in Maya [\#3006](https://github.com/pypeclub/OpenPype/pull/3006) + +**🔀 Refactored code** + +- Avalon repo removed from Jobs workflow [\#3193](https://github.com/pypeclub/OpenPype/pull/3193) +- General: Remove remaining imports from avalon [\#3130](https://github.com/pypeclub/OpenPype/pull/3130) +- General: Move mongo db logic and remove avalon repository [\#3066](https://github.com/pypeclub/OpenPype/pull/3066) +- General: Move host install [\#3009](https://github.com/pypeclub/OpenPype/pull/3009) + +**Merged pull requests:** + +- Harmony: message length in 21.1 [\#3257](https://github.com/pypeclub/OpenPype/pull/3257) +- Harmony: 21.1 fix [\#3249](https://github.com/pypeclub/OpenPype/pull/3249) +- Maya: added jpg to filter for Image Plane Loader [\#3223](https://github.com/pypeclub/OpenPype/pull/3223) +- Webpublisher: replace space by underscore in subset names [\#3160](https://github.com/pypeclub/OpenPype/pull/3160) +- StandalonePublisher: removed Extract Background plugins [\#3093](https://github.com/pypeclub/OpenPype/pull/3093) +- Nuke: added suspend\_publish knob [\#3078](https://github.com/pypeclub/OpenPype/pull/3078) +- Bump async from 2.6.3 to 2.6.4 in /website [\#3065](https://github.com/pypeclub/OpenPype/pull/3065) +- SiteSync: Download all workfile inputs [\#2966](https://github.com/pypeclub/OpenPype/pull/2966) +- Photoshop: New Publisher [\#2933](https://github.com/pypeclub/OpenPype/pull/2933) +- Bump pillow from 9.0.0 to 9.0.1 [\#2880](https://github.com/pypeclub/OpenPype/pull/2880) +- AfterEffects: Allow configuration of default variant via Settings [\#2856](https://github.com/pypeclub/OpenPype/pull/2856) ## [3.9.8](https://github.com/pypeclub/OpenPype/tree/3.9.8) (2022-05-19) @@ -158,87 +778,1085 @@ ## [3.9.5](https://github.com/pypeclub/OpenPype/tree/3.9.5) (2022-04-25) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.2...3.9.5) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.4...3.9.5) ## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.4-nightly.2...3.9.4) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.3...3.9.4) + +### 📖 Documentation + +- Documentation: more info about Tasks [\#3062](https://github.com/pypeclub/OpenPype/pull/3062) +- Documentation: Python requirements to 3.7.9 [\#3035](https://github.com/pypeclub/OpenPype/pull/3035) +- Website Docs: Remove unused pages [\#2974](https://github.com/pypeclub/OpenPype/pull/2974) + +**🆕 New features** + +- General: Local overrides for environment variables [\#3045](https://github.com/pypeclub/OpenPype/pull/3045) +- Flame: Flare integration preparation [\#2928](https://github.com/pypeclub/OpenPype/pull/2928) + +**🚀 Enhancements** + +- TVPaint: Added init file for worker to triggers missing sound file dialog [\#3053](https://github.com/pypeclub/OpenPype/pull/3053) +- Ftrack: Custom attributes can be filled in slate values [\#3036](https://github.com/pypeclub/OpenPype/pull/3036) +- Resolve environment variable in google drive credential path [\#3008](https://github.com/pypeclub/OpenPype/pull/3008) + +**🐛 Bug fixes** + +- GitHub: Updated push-protected action in github workflow [\#3064](https://github.com/pypeclub/OpenPype/pull/3064) +- Nuke: Typos in imports from Nuke implementation [\#3061](https://github.com/pypeclub/OpenPype/pull/3061) +- Hotfix: fixing deadline job publishing [\#3059](https://github.com/pypeclub/OpenPype/pull/3059) +- General: Extract Review handle invalid characters for ffmpeg [\#3050](https://github.com/pypeclub/OpenPype/pull/3050) +- Slate Review: Support to keep format on slate concatenation [\#3049](https://github.com/pypeclub/OpenPype/pull/3049) +- Webpublisher: fix processing of workfile [\#3048](https://github.com/pypeclub/OpenPype/pull/3048) +- Ftrack: Integrate ftrack api fix [\#3044](https://github.com/pypeclub/OpenPype/pull/3044) +- Webpublisher - removed wrong hardcoded family [\#3043](https://github.com/pypeclub/OpenPype/pull/3043) +- LibraryLoader: Use current project for asset query in families filter [\#3042](https://github.com/pypeclub/OpenPype/pull/3042) +- SiteSync: Providers ignore that site is disabled [\#3041](https://github.com/pypeclub/OpenPype/pull/3041) +- Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040) +- SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018) +- Maya: invalid review flag on rendered AOVs [\#2915](https://github.com/pypeclub/OpenPype/pull/2915) + +**Merged pull requests:** + +- Deadline: reworked pools assignment [\#3051](https://github.com/pypeclub/OpenPype/pull/3051) +- Houdini: Avoid ImportError on `hdefereval` when Houdini runs without UI [\#2987](https://github.com/pypeclub/OpenPype/pull/2987) ## [3.9.3](https://github.com/pypeclub/OpenPype/tree/3.9.3) (2022-04-07) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.3-nightly.2...3.9.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.2...3.9.3) + +### 📖 Documentation + +- Documentation: Added mention of adding My Drive as a root [\#2999](https://github.com/pypeclub/OpenPype/pull/2999) +- Website Docs: Manager Ftrack fix broken links [\#2979](https://github.com/pypeclub/OpenPype/pull/2979) +- Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951) +- Documentation: New publisher develop docs [\#2896](https://github.com/pypeclub/OpenPype/pull/2896) + +**🆕 New features** + +- Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027) +- nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992) +- Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988) +- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) +- Multiverse: Initial Support [\#2908](https://github.com/pypeclub/OpenPype/pull/2908) + +**🚀 Enhancements** + +- General: default workfile subset name for workfile [\#3011](https://github.com/pypeclub/OpenPype/pull/3011) +- Ftrack: Add more options for note text of integrate ftrack note [\#3025](https://github.com/pypeclub/OpenPype/pull/3025) +- Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) +- Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) +- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) +- Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) +- TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) +- Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) +- Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) +- General: `METADATA_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) +- General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975) +- Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967) +- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945) +- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943) +- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942) +- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) +- Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) +- General: Default modules loaded dynamically [\#2923](https://github.com/pypeclub/OpenPype/pull/2923) +- CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919) +- Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916) +- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) +- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) +- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) + +**🐛 Bug fixes** + +- General: Fix validate asset docs plug-in filename and class name [\#3029](https://github.com/pypeclub/OpenPype/pull/3029) +- Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033) +- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) +- General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028) +- Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) +- AfterEffects: Added creating subset name for workfile from template [\#3023](https://github.com/pypeclub/OpenPype/pull/3023) +- General: Add example addons to ignored [\#3022](https://github.com/pypeclub/OpenPype/pull/3022) +- Maya: Remove missing import [\#3017](https://github.com/pypeclub/OpenPype/pull/3017) +- Ftrack: multiple reviewable componets [\#3012](https://github.com/pypeclub/OpenPype/pull/3012) +- Tray publisher: Fixes after code movement [\#3010](https://github.com/pypeclub/OpenPype/pull/3010) +- Hosts: Remove path existence checks in 'add\_implementation\_envs' [\#3004](https://github.com/pypeclub/OpenPype/pull/3004) +- Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) +- Fix - remove doubled dot in workfile created from template [\#2998](https://github.com/pypeclub/OpenPype/pull/2998) +- Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996) +- PS: fix renaming subset incorrectly in PS [\#2991](https://github.com/pypeclub/OpenPype/pull/2991) +- Fix: Disable setuptools auto discovery [\#2990](https://github.com/pypeclub/OpenPype/pull/2990) +- AEL: fix opening existing workfile if no scene opened [\#2989](https://github.com/pypeclub/OpenPype/pull/2989) +- Maya: Don't do hardlinks on windows for look publishing [\#2986](https://github.com/pypeclub/OpenPype/pull/2986) +- Settings UI: Fix version completer on linux [\#2981](https://github.com/pypeclub/OpenPype/pull/2981) +- Photoshop: Fix creation of subset names in PS review and workfile [\#2969](https://github.com/pypeclub/OpenPype/pull/2969) +- Slack: Added default for review\_upload\_limit for Slack [\#2965](https://github.com/pypeclub/OpenPype/pull/2965) +- General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958) +- Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956) +- General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950) +- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949) +- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948) +- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947) +- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944) +- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941) +- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939) +- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936) +- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934) +- Maya: Do not pass `set` to maya commands \(fixes support for older maya versions\) [\#2932](https://github.com/pypeclub/OpenPype/pull/2932) +- General: Don't print log record on OSError [\#2926](https://github.com/pypeclub/OpenPype/pull/2926) +- Hiero: Fix import of 'register\_event\_callback' [\#2924](https://github.com/pypeclub/OpenPype/pull/2924) +- Flame: centos related debugging [\#2922](https://github.com/pypeclub/OpenPype/pull/2922) +- Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905) +- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) + +**🔀 Refactored code** + +- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935) +- General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931) +- General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927) +- General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918) +- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) +- General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912) + +**Merged pull requests:** + +- Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030) +- Bump paramiko from 2.9.2 to 2.10.1 [\#2973](https://github.com/pypeclub/OpenPype/pull/2973) +- Bump minimist from 1.2.5 to 1.2.6 in /website [\#2954](https://github.com/pypeclub/OpenPype/pull/2954) +- Bump node-forge from 1.2.1 to 1.3.0 in /website [\#2953](https://github.com/pypeclub/OpenPype/pull/2953) +- Maya - added transparency into review creator [\#2952](https://github.com/pypeclub/OpenPype/pull/2952) ## [3.9.2](https://github.com/pypeclub/OpenPype/tree/3.9.2) (2022-04-04) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.2-nightly.4...3.9.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...3.9.2) ## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.1-nightly.3...3.9.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) + +**🚀 Enhancements** + +- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) +- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) +- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) +- Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) +- Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) + +**🐛 Bug fixes** + +- General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904) +- Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902) +- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) +- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) +- Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) +- General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) +- General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) +- Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) +- Flame Babypublisher optimalization [\#2806](https://github.com/pypeclub/OpenPype/pull/2806) +- hotfix: OIIO tool path - add extension on windows [\#2618](https://github.com/pypeclub/OpenPype/pull/2618) + +**🔀 Refactored code** + +- General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) +- General: Move loader logic from avalon to openpype [\#2886](https://github.com/pypeclub/OpenPype/pull/2886) ## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.0-nightly.9...3.9.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.2...3.9.0) + +**Deprecated:** + +- Houdini: Remove unused code [\#2779](https://github.com/pypeclub/OpenPype/pull/2779) +- Loader: Remove default family states for hosts from code [\#2706](https://github.com/pypeclub/OpenPype/pull/2706) +- AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845) + +### 📖 Documentation + +- Documentation: fixed broken links [\#2799](https://github.com/pypeclub/OpenPype/pull/2799) +- Documentation: broken link fix [\#2785](https://github.com/pypeclub/OpenPype/pull/2785) +- Documentation: link fixes [\#2772](https://github.com/pypeclub/OpenPype/pull/2772) +- Update docusaurus to latest version [\#2760](https://github.com/pypeclub/OpenPype/pull/2760) +- Various testing updates [\#2726](https://github.com/pypeclub/OpenPype/pull/2726) +- documentation: add example to `repack-version` command [\#2669](https://github.com/pypeclub/OpenPype/pull/2669) +- Update docusaurus [\#2639](https://github.com/pypeclub/OpenPype/pull/2639) +- Documentation: Fixed relative links [\#2621](https://github.com/pypeclub/OpenPype/pull/2621) +- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878) + +**🆕 New features** + +- Flame: loading clips to reels [\#2622](https://github.com/pypeclub/OpenPype/pull/2622) +- General: Store settings by OpenPype version [\#2570](https://github.com/pypeclub/OpenPype/pull/2570) + +**🚀 Enhancements** + +- New: Validation exceptions [\#2841](https://github.com/pypeclub/OpenPype/pull/2841) +- General: Set context environments for non host applications [\#2803](https://github.com/pypeclub/OpenPype/pull/2803) +- Houdini: Remove duplicate ValidateOutputNode plug-in [\#2780](https://github.com/pypeclub/OpenPype/pull/2780) +- Tray publisher: New Tray Publisher host \(beta\) [\#2778](https://github.com/pypeclub/OpenPype/pull/2778) +- Slack: Added regex for filtering on subset names [\#2775](https://github.com/pypeclub/OpenPype/pull/2775) +- Houdini: Implement Reset Frame Range [\#2770](https://github.com/pypeclub/OpenPype/pull/2770) +- Pyblish Pype: Remove redundant new line in installed fonts printing [\#2758](https://github.com/pypeclub/OpenPype/pull/2758) +- Flame: use Shot Name on segment for asset name [\#2751](https://github.com/pypeclub/OpenPype/pull/2751) +- Flame: adding validator source clip [\#2746](https://github.com/pypeclub/OpenPype/pull/2746) +- Work Files: Preserve subversion comment of current filename by default [\#2734](https://github.com/pypeclub/OpenPype/pull/2734) +- Maya: set Deadline job/batch name to original source workfile name instead of published workfile [\#2733](https://github.com/pypeclub/OpenPype/pull/2733) +- Ftrack: Disable ftrack module by default [\#2732](https://github.com/pypeclub/OpenPype/pull/2732) +- Project Manager: Disable add task, add asset and save button when not in a project [\#2727](https://github.com/pypeclub/OpenPype/pull/2727) +- dropbox handle big file [\#2718](https://github.com/pypeclub/OpenPype/pull/2718) +- Fusion Move PR: Minor tweaks to Fusion integration [\#2716](https://github.com/pypeclub/OpenPype/pull/2716) +- RoyalRender: Minor enhancements [\#2700](https://github.com/pypeclub/OpenPype/pull/2700) +- Nuke: prerender with review knob [\#2691](https://github.com/pypeclub/OpenPype/pull/2691) +- Maya configurable unit validator [\#2680](https://github.com/pypeclub/OpenPype/pull/2680) +- General: Add settings for CleanUpFarm and disable the plugin by default [\#2679](https://github.com/pypeclub/OpenPype/pull/2679) +- Project Manager: Only allow scroll wheel edits when spinbox is active [\#2678](https://github.com/pypeclub/OpenPype/pull/2678) +- Ftrack: Sync description to assets [\#2670](https://github.com/pypeclub/OpenPype/pull/2670) +- Houdini: Moved to OpenPype [\#2658](https://github.com/pypeclub/OpenPype/pull/2658) +- Maya: Move implementation to OpenPype [\#2649](https://github.com/pypeclub/OpenPype/pull/2649) +- General: FFmpeg conversion also check attribute string length [\#2635](https://github.com/pypeclub/OpenPype/pull/2635) +- Houdini: Load Arnold .ass procedurals into Houdini [\#2606](https://github.com/pypeclub/OpenPype/pull/2606) +- Deadline: Simplify GlobalJobPreLoad logic [\#2605](https://github.com/pypeclub/OpenPype/pull/2605) +- Houdini: Implement Arnold .ass standin extraction from Houdini \(also support .ass.gz\) [\#2603](https://github.com/pypeclub/OpenPype/pull/2603) +- New Publisher: New features and preparations for new standalone publisher [\#2556](https://github.com/pypeclub/OpenPype/pull/2556) +- Fix Maya 2022 Python 3 compatibility [\#2445](https://github.com/pypeclub/OpenPype/pull/2445) +- TVPaint: Use new publisher exceptions in validators [\#2435](https://github.com/pypeclub/OpenPype/pull/2435) +- Harmony: Added new style validations for New Publisher [\#2434](https://github.com/pypeclub/OpenPype/pull/2434) +- Aftereffects: New style validations for New publisher [\#2430](https://github.com/pypeclub/OpenPype/pull/2430) +- Farm publishing: New cleanup plugin for Maya renders on farm [\#2390](https://github.com/pypeclub/OpenPype/pull/2390) +- General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872) +- NewPublisher: Descriptions and Icons in creator dialog [\#2867](https://github.com/pypeclub/OpenPype/pull/2867) +- NewPublisher: Changing task on publishing instance [\#2863](https://github.com/pypeclub/OpenPype/pull/2863) +- TrayPublisher: Choose project widget is more clear [\#2859](https://github.com/pypeclub/OpenPype/pull/2859) +- Maya: add loaded containers to published instance [\#2837](https://github.com/pypeclub/OpenPype/pull/2837) +- Ftrack: Can sync fps as string [\#2836](https://github.com/pypeclub/OpenPype/pull/2836) +- General: Custom function for find executable [\#2822](https://github.com/pypeclub/OpenPype/pull/2822) +- General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817) +- global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812) +- Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811) +- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805) +- Houdini: Move Houdini Save Current File to beginning of ExtractorOrder [\#2747](https://github.com/pypeclub/OpenPype/pull/2747) +- Global: adding studio name/code to anatomy template formatting data [\#2630](https://github.com/pypeclub/OpenPype/pull/2630) + +**🐛 Bug fixes** + +- Settings UI: Search case sensitivity [\#2810](https://github.com/pypeclub/OpenPype/pull/2810) +- resolve: fixing fusion module loading [\#2802](https://github.com/pypeclub/OpenPype/pull/2802) +- Ftrack: Unset task ids from asset versions before tasks are removed [\#2800](https://github.com/pypeclub/OpenPype/pull/2800) +- Slack: fail gracefully if slack exception [\#2798](https://github.com/pypeclub/OpenPype/pull/2798) +- Flame: Fix version string in default settings [\#2783](https://github.com/pypeclub/OpenPype/pull/2783) +- After Effects: Fix typo in name `afftereffects` -\> `aftereffects` [\#2768](https://github.com/pypeclub/OpenPype/pull/2768) +- Houdini: Fix open last workfile [\#2767](https://github.com/pypeclub/OpenPype/pull/2767) +- Avoid renaming udim indexes [\#2765](https://github.com/pypeclub/OpenPype/pull/2765) +- Maya: Fix `unique_namespace` when in an namespace that is empty [\#2759](https://github.com/pypeclub/OpenPype/pull/2759) +- Loader UI: Fix right click in representation widget [\#2757](https://github.com/pypeclub/OpenPype/pull/2757) +- Harmony: Rendering in Deadline didn't work in other machines than submitter [\#2754](https://github.com/pypeclub/OpenPype/pull/2754) +- Aftereffects 2022 and Deadline [\#2748](https://github.com/pypeclub/OpenPype/pull/2748) +- Flame: bunch of bugs [\#2745](https://github.com/pypeclub/OpenPype/pull/2745) +- Maya: Save current scene on workfile publish [\#2744](https://github.com/pypeclub/OpenPype/pull/2744) +- Version Up: Preserve parts of filename after version number \(like subversion\) on version\_up [\#2741](https://github.com/pypeclub/OpenPype/pull/2741) +- Loader UI: Multiple asset selection and underline colors fixed [\#2731](https://github.com/pypeclub/OpenPype/pull/2731) +- General: Fix loading of unused chars in xml format [\#2729](https://github.com/pypeclub/OpenPype/pull/2729) +- TVPaint: Set objectName with members [\#2725](https://github.com/pypeclub/OpenPype/pull/2725) +- General: Don't use 'objectName' from loaded references [\#2715](https://github.com/pypeclub/OpenPype/pull/2715) +- Settings: Studio Project anatomy is queried using right keys [\#2711](https://github.com/pypeclub/OpenPype/pull/2711) +- Local Settings: Additional applications don't break UI [\#2710](https://github.com/pypeclub/OpenPype/pull/2710) +- Maya: Remove some unused code [\#2709](https://github.com/pypeclub/OpenPype/pull/2709) +- Houdini: Fix refactor of Houdini host move for CreateArnoldAss [\#2704](https://github.com/pypeclub/OpenPype/pull/2704) +- LookAssigner: Fix imports after moving code to OpenPype repository [\#2701](https://github.com/pypeclub/OpenPype/pull/2701) +- Multiple hosts: unify menu style across hosts [\#2693](https://github.com/pypeclub/OpenPype/pull/2693) +- Maya Redshift fixes [\#2692](https://github.com/pypeclub/OpenPype/pull/2692) +- Maya: fix fps validation popup [\#2685](https://github.com/pypeclub/OpenPype/pull/2685) +- Houdini Explicitly collect correct frame name even in case of single frame render when `frameStart` is provided [\#2676](https://github.com/pypeclub/OpenPype/pull/2676) +- hiero: fix effect collector name and order [\#2673](https://github.com/pypeclub/OpenPype/pull/2673) +- Maya: Fix menu callbacks [\#2671](https://github.com/pypeclub/OpenPype/pull/2671) +- hiero: removing obsolete unsupported plugin [\#2667](https://github.com/pypeclub/OpenPype/pull/2667) +- Launcher: Fix access to 'data' attribute on actions [\#2659](https://github.com/pypeclub/OpenPype/pull/2659) +- Maya `vrscene` loader fixes [\#2633](https://github.com/pypeclub/OpenPype/pull/2633) +- Houdini: fix usd family in loader and integrators [\#2631](https://github.com/pypeclub/OpenPype/pull/2631) +- Maya: Add only reference node to look family container like with other families [\#2508](https://github.com/pypeclub/OpenPype/pull/2508) +- General: Missing time function [\#2877](https://github.com/pypeclub/OpenPype/pull/2877) +- Deadline: Fix plugin name for tile assemble [\#2868](https://github.com/pypeclub/OpenPype/pull/2868) +- Nuke: gizmo precollect fix [\#2866](https://github.com/pypeclub/OpenPype/pull/2866) +- General: Fix hardlink for windows [\#2864](https://github.com/pypeclub/OpenPype/pull/2864) +- General: ffmpeg was crashing on slate merge [\#2860](https://github.com/pypeclub/OpenPype/pull/2860) +- WebPublisher: Video file was published with one too many frame [\#2858](https://github.com/pypeclub/OpenPype/pull/2858) +- New Publisher: Error dialog got right styles [\#2857](https://github.com/pypeclub/OpenPype/pull/2857) +- General: Fix getattr clalback on dynamic modules [\#2855](https://github.com/pypeclub/OpenPype/pull/2855) +- Nuke: slate resolution to input video resolution [\#2853](https://github.com/pypeclub/OpenPype/pull/2853) +- WebPublisher: Fix username stored in DB [\#2852](https://github.com/pypeclub/OpenPype/pull/2852) +- WebPublisher: Fix wrong number of frames for video file [\#2851](https://github.com/pypeclub/OpenPype/pull/2851) +- Nuke: Fix family test in validate\_write\_legacy to work with stillImage [\#2847](https://github.com/pypeclub/OpenPype/pull/2847) +- Nuke: fix multiple baking profile farm publishing [\#2842](https://github.com/pypeclub/OpenPype/pull/2842) +- Blender: Fixed parameters for FBX export of the camera [\#2840](https://github.com/pypeclub/OpenPype/pull/2840) +- Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832) +- Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828) +- Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827) +- Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825) +- Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824) +- General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821) +- Settings UI: Fix "Apply from" action [\#2820](https://github.com/pypeclub/OpenPype/pull/2820) +- Ftrack: Job killer with missing user [\#2819](https://github.com/pypeclub/OpenPype/pull/2819) +- Nuke: Use AVALON\_APP to get value for "app" key [\#2818](https://github.com/pypeclub/OpenPype/pull/2818) +- StandalonePublisher: use dynamic groups in subset names [\#2816](https://github.com/pypeclub/OpenPype/pull/2816) + +**🔀 Refactored code** + +- Ftrack: Moved module one hierarchy level higher [\#2792](https://github.com/pypeclub/OpenPype/pull/2792) +- SyncServer: Moved module one hierarchy level higher [\#2791](https://github.com/pypeclub/OpenPype/pull/2791) +- Royal render: Move module one hierarchy level higher [\#2790](https://github.com/pypeclub/OpenPype/pull/2790) +- Deadline: Move module one hierarchy level higher [\#2789](https://github.com/pypeclub/OpenPype/pull/2789) +- Refactor: move webserver tool to openpype [\#2876](https://github.com/pypeclub/OpenPype/pull/2876) +- General: Move create logic from avalon to OpenPype [\#2854](https://github.com/pypeclub/OpenPype/pull/2854) +- General: Add vendors from avalon [\#2848](https://github.com/pypeclub/OpenPype/pull/2848) +- General: Basic event system [\#2846](https://github.com/pypeclub/OpenPype/pull/2846) +- General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839) +- Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829) +- Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823) +- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766) + +**Merged pull requests:** + +- Fusion: Moved implementation into OpenPype [\#2713](https://github.com/pypeclub/OpenPype/pull/2713) +- TVPaint: Plugin build without dependencies [\#2705](https://github.com/pypeclub/OpenPype/pull/2705) +- Webpublisher: Photoshop create a beauty png [\#2689](https://github.com/pypeclub/OpenPype/pull/2689) +- Ftrack: Hierarchical attributes are queried properly [\#2682](https://github.com/pypeclub/OpenPype/pull/2682) +- Maya: Add Validate Frame Range settings [\#2661](https://github.com/pypeclub/OpenPype/pull/2661) +- Harmony: move to Openpype [\#2657](https://github.com/pypeclub/OpenPype/pull/2657) +- Maya: cleanup duplicate rendersetup code [\#2642](https://github.com/pypeclub/OpenPype/pull/2642) +- Deadline: Be able to pass Mongo url to job [\#2616](https://github.com/pypeclub/OpenPype/pull/2616) ## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.2-nightly.3...3.8.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.1...3.8.2) + +### 📖 Documentation + +- Cosmetics: Fix common typos in openpype/website [\#2617](https://github.com/pypeclub/OpenPype/pull/2617) + +**🚀 Enhancements** + +- TVPaint: Image loaders also work on review family [\#2638](https://github.com/pypeclub/OpenPype/pull/2638) +- General: Project backup tools [\#2629](https://github.com/pypeclub/OpenPype/pull/2629) +- nuke: adding clear button to write nodes [\#2627](https://github.com/pypeclub/OpenPype/pull/2627) +- Ftrack: Family to Asset type mapping is in settings [\#2602](https://github.com/pypeclub/OpenPype/pull/2602) +- Nuke: load color space from representation data [\#2576](https://github.com/pypeclub/OpenPype/pull/2576) + +**🐛 Bug fixes** + +- Fix pulling of cx\_freeze 6.10 [\#2628](https://github.com/pypeclub/OpenPype/pull/2628) +- Global: fix broken otio review extractor [\#2590](https://github.com/pypeclub/OpenPype/pull/2590) + +**Merged pull requests:** + +- WebPublisher: fix instance duplicates [\#2641](https://github.com/pypeclub/OpenPype/pull/2641) +- Fix - safer pulling of task name for webpublishing from PS [\#2613](https://github.com/pypeclub/OpenPype/pull/2613) ## [3.8.1](https://github.com/pypeclub/OpenPype/tree/3.8.1) (2022-02-01) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.1-nightly.3...3.8.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.0...3.8.1) + +**🚀 Enhancements** + +- Webpublisher: Thumbnail extractor [\#2600](https://github.com/pypeclub/OpenPype/pull/2600) +- Loader: Allow to toggle default family filters between "include" or "exclude" filtering [\#2541](https://github.com/pypeclub/OpenPype/pull/2541) +- Launcher: Added context menu to to skip opening last workfile [\#2536](https://github.com/pypeclub/OpenPype/pull/2536) +- Unreal: JSON Layout Loading support [\#2066](https://github.com/pypeclub/OpenPype/pull/2066) + +**🐛 Bug fixes** + +- Release/3.8.0 [\#2619](https://github.com/pypeclub/OpenPype/pull/2619) +- Settings: Enum does not store empty string if has single item to select [\#2615](https://github.com/pypeclub/OpenPype/pull/2615) +- switch distutils to sysconfig for `get_platform()` [\#2594](https://github.com/pypeclub/OpenPype/pull/2594) +- Fix poetry index and speedcopy update [\#2589](https://github.com/pypeclub/OpenPype/pull/2589) +- Webpublisher: Fix - subset names from processed .psd used wrong value for task [\#2586](https://github.com/pypeclub/OpenPype/pull/2586) +- `vrscene` creator Deadline webservice URL handling [\#2580](https://github.com/pypeclub/OpenPype/pull/2580) +- global: track name was failing if duplicated root word in name [\#2568](https://github.com/pypeclub/OpenPype/pull/2568) +- Validate Maya Rig produces no cycle errors [\#2484](https://github.com/pypeclub/OpenPype/pull/2484) + +**Merged pull requests:** + +- Bump pillow from 8.4.0 to 9.0.0 [\#2595](https://github.com/pypeclub/OpenPype/pull/2595) +- Webpublisher: Skip version collect [\#2591](https://github.com/pypeclub/OpenPype/pull/2591) +- build\(deps\): bump pillow from 8.4.0 to 9.0.0 [\#2523](https://github.com/pypeclub/OpenPype/pull/2523) ## [3.8.0](https://github.com/pypeclub/OpenPype/tree/3.8.0) (2022-01-24) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.0-nightly.7...3.8.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...3.8.0) + +### 📖 Documentation + +- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) + +**🆕 New features** + +- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547) +- Maya : V-Ray Proxy - load all ABC files via proxy [\#2544](https://github.com/pypeclub/OpenPype/pull/2544) +- Maya to Unreal: Extended static mesh workflow [\#2537](https://github.com/pypeclub/OpenPype/pull/2537) +- Flame: collecting publishable instances [\#2519](https://github.com/pypeclub/OpenPype/pull/2519) +- Flame: create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495) +- Flame: OpenTimelineIO Export Modul [\#2398](https://github.com/pypeclub/OpenPype/pull/2398) + +**🚀 Enhancements** + +- Webpublisher: Moved error at the beginning of the log [\#2559](https://github.com/pypeclub/OpenPype/pull/2559) +- Ftrack: Use ApplicationManager to get DJV path [\#2558](https://github.com/pypeclub/OpenPype/pull/2558) +- Webpublisher: Added endpoint to reprocess batch through UI [\#2555](https://github.com/pypeclub/OpenPype/pull/2555) +- Settings: PathInput strip passed string [\#2550](https://github.com/pypeclub/OpenPype/pull/2550) +- Global: Exctract Review anatomy fill data with output name [\#2548](https://github.com/pypeclub/OpenPype/pull/2548) +- Cosmetics: Clean up some cosmetics / typos [\#2542](https://github.com/pypeclub/OpenPype/pull/2542) +- General: Validate if current process OpenPype version is requested version [\#2529](https://github.com/pypeclub/OpenPype/pull/2529) +- General: Be able to use anatomy data in ffmpeg output arguments [\#2525](https://github.com/pypeclub/OpenPype/pull/2525) +- Expose toggle publish plug-in settings for Maya Look Shading Engine Naming [\#2521](https://github.com/pypeclub/OpenPype/pull/2521) +- Photoshop: Move implementation to OpenPype [\#2510](https://github.com/pypeclub/OpenPype/pull/2510) +- TimersManager: Move module one hierarchy higher [\#2501](https://github.com/pypeclub/OpenPype/pull/2501) +- Slack: notifications are sent with Openpype logo and bot name [\#2499](https://github.com/pypeclub/OpenPype/pull/2499) +- Slack: Add review to notification message [\#2498](https://github.com/pypeclub/OpenPype/pull/2498) +- Ftrack: Event handlers settings [\#2496](https://github.com/pypeclub/OpenPype/pull/2496) +- Tools: Fix style and modality of errors in loader and creator [\#2489](https://github.com/pypeclub/OpenPype/pull/2489) +- Maya: Collect 'fps' animation data only for "review" instances [\#2486](https://github.com/pypeclub/OpenPype/pull/2486) +- Project Manager: Remove project button cleanup [\#2482](https://github.com/pypeclub/OpenPype/pull/2482) +- Tools: Be able to change models of tasks and assets widgets [\#2475](https://github.com/pypeclub/OpenPype/pull/2475) +- Publish pype: Reduce publish process defering [\#2464](https://github.com/pypeclub/OpenPype/pull/2464) +- Maya: Improve speed of Collect History logic [\#2460](https://github.com/pypeclub/OpenPype/pull/2460) +- Maya: Validate Rig Controllers - fix Error: in script editor [\#2459](https://github.com/pypeclub/OpenPype/pull/2459) +- Maya: Validate NGONs simplify and speed-up [\#2458](https://github.com/pypeclub/OpenPype/pull/2458) +- Maya: Optimize Validate Locked Normals speed for dense polymeshes [\#2457](https://github.com/pypeclub/OpenPype/pull/2457) +- Maya: Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455) +- Houdini: Remove broken unique name counter [\#2450](https://github.com/pypeclub/OpenPype/pull/2450) +- Maya: Improve lib.polyConstraint performance when Select tool is not the active tool context [\#2447](https://github.com/pypeclub/OpenPype/pull/2447) +- General: Validate third party before build [\#2425](https://github.com/pypeclub/OpenPype/pull/2425) +- Maya : add option to not group reference in ReferenceLoader [\#2383](https://github.com/pypeclub/OpenPype/pull/2383) + +**🐛 Bug fixes** + +- AfterEffects: Fix - removed obsolete import [\#2577](https://github.com/pypeclub/OpenPype/pull/2577) +- General: OpenPype version updates [\#2575](https://github.com/pypeclub/OpenPype/pull/2575) +- Ftrack: Delete action revision [\#2563](https://github.com/pypeclub/OpenPype/pull/2563) +- Webpublisher: ftrack shows incorrect user names [\#2560](https://github.com/pypeclub/OpenPype/pull/2560) +- General: Do not validate version if build does not support it [\#2557](https://github.com/pypeclub/OpenPype/pull/2557) +- Webpublisher: Fixed progress reporting [\#2553](https://github.com/pypeclub/OpenPype/pull/2553) +- Fix Maya AssProxyLoader version switch [\#2551](https://github.com/pypeclub/OpenPype/pull/2551) +- General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549) +- Houdini: vdbcache family preserve frame numbers on publish integration + enable validate version for Houdini [\#2535](https://github.com/pypeclub/OpenPype/pull/2535) +- Maya: Fix Load VDB to V-Ray [\#2533](https://github.com/pypeclub/OpenPype/pull/2533) +- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532) +- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531) +- Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522) +- Maya: Fix Extract Look with space in names [\#2518](https://github.com/pypeclub/OpenPype/pull/2518) +- Fix published frame content for sequence starting with 0 [\#2513](https://github.com/pypeclub/OpenPype/pull/2513) +- Maya: reset empty string attributes correctly to "" instead of "None" [\#2506](https://github.com/pypeclub/OpenPype/pull/2506) +- Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505) +- General: Settings work if OpenPypeVersion is available [\#2494](https://github.com/pypeclub/OpenPype/pull/2494) +- General: PYTHONPATH may break OpenPype dependencies [\#2493](https://github.com/pypeclub/OpenPype/pull/2493) +- General: Modules import function output fix [\#2492](https://github.com/pypeclub/OpenPype/pull/2492) +- AE: fix hiding of alert window below Publish [\#2491](https://github.com/pypeclub/OpenPype/pull/2491) +- Workfiles tool: Files widget show files on first show [\#2488](https://github.com/pypeclub/OpenPype/pull/2488) +- General: Custom template paths filter fix [\#2483](https://github.com/pypeclub/OpenPype/pull/2483) +- Loader: Remove always on top flag in tray [\#2480](https://github.com/pypeclub/OpenPype/pull/2480) +- General: Anatomy does not return root envs as unicode [\#2465](https://github.com/pypeclub/OpenPype/pull/2465) +- Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456) + +**Merged pull requests:** + +- AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543) +- Maya: Remove Maya Look Assigner check on startup [\#2540](https://github.com/pypeclub/OpenPype/pull/2540) +- build\(deps\): bump shelljs from 0.8.4 to 0.8.5 in /website [\#2538](https://github.com/pypeclub/OpenPype/pull/2538) +- build\(deps\): bump follow-redirects from 1.14.4 to 1.14.7 in /website [\#2534](https://github.com/pypeclub/OpenPype/pull/2534) +- Nuke: Merge avalon's implementation into OpenPype [\#2514](https://github.com/pypeclub/OpenPype/pull/2514) +- Maya: Vray fix proxies look assignment [\#2392](https://github.com/pypeclub/OpenPype/pull/2392) +- Bump algoliasearch-helper from 3.4.4 to 3.6.2 in /website [\#2297](https://github.com/pypeclub/OpenPype/pull/2297) ## [3.7.0](https://github.com/pypeclub/OpenPype/tree/3.7.0) (2022-01-04) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.7.0-nightly.14...3.7.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.4...3.7.0) + +**Deprecated:** + +- General: Default modules hierarchy n2 [\#2368](https://github.com/pypeclub/OpenPype/pull/2368) + +### 📖 Documentation + +- docs\[website\]: Add Ellipse Studio \(logo\) as an OpenPype contributor [\#2324](https://github.com/pypeclub/OpenPype/pull/2324) + +**🆕 New features** + +- Settings UI use OpenPype styles [\#2296](https://github.com/pypeclub/OpenPype/pull/2296) +- Store typed version dependencies for workfiles [\#2192](https://github.com/pypeclub/OpenPype/pull/2192) +- OpenPypeV3: add key task type, task shortname and user to path templating construction [\#2157](https://github.com/pypeclub/OpenPype/pull/2157) +- Nuke: Alembic model workflow [\#2140](https://github.com/pypeclub/OpenPype/pull/2140) +- TVPaint: Load workfile from published. [\#1980](https://github.com/pypeclub/OpenPype/pull/1980) + +**🚀 Enhancements** + +- General: Workdir extra folders [\#2462](https://github.com/pypeclub/OpenPype/pull/2462) +- Photoshop: New style validations for New publisher [\#2429](https://github.com/pypeclub/OpenPype/pull/2429) +- General: Environment variables groups [\#2424](https://github.com/pypeclub/OpenPype/pull/2424) +- Unreal: Dynamic menu created in Python [\#2422](https://github.com/pypeclub/OpenPype/pull/2422) +- Settings UI: Hyperlinks to settings [\#2420](https://github.com/pypeclub/OpenPype/pull/2420) +- Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419) +- TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418) +- General: Run applications as separate processes under linux [\#2408](https://github.com/pypeclub/OpenPype/pull/2408) +- Ftrack: Check existence of object type on recreation [\#2404](https://github.com/pypeclub/OpenPype/pull/2404) +- Enhancement: Global cleanup plugin that explicitly remove paths from context [\#2402](https://github.com/pypeclub/OpenPype/pull/2402) +- General: MongoDB ability to specify replica set groups [\#2401](https://github.com/pypeclub/OpenPype/pull/2401) +- Flame: moving `utility_scripts` to api folder also with `scripts` [\#2385](https://github.com/pypeclub/OpenPype/pull/2385) +- Centos 7 dependency compatibility [\#2384](https://github.com/pypeclub/OpenPype/pull/2384) +- Enhancement: Settings: Use project settings values from another project [\#2382](https://github.com/pypeclub/OpenPype/pull/2382) +- Blender 3: Support auto install for new blender version [\#2377](https://github.com/pypeclub/OpenPype/pull/2377) +- Maya add render image path to settings [\#2375](https://github.com/pypeclub/OpenPype/pull/2375) +- Settings: Webpublisher in hosts enum [\#2367](https://github.com/pypeclub/OpenPype/pull/2367) +- Hiero: python3 compatibility [\#2365](https://github.com/pypeclub/OpenPype/pull/2365) +- Burnins: Be able recognize mxf OPAtom format [\#2361](https://github.com/pypeclub/OpenPype/pull/2361) +- Maya: Add is\_static\_image\_plane and is\_in\_all\_views option in imagePlaneLoader [\#2356](https://github.com/pypeclub/OpenPype/pull/2356) +- Local settings: Copyable studio paths [\#2349](https://github.com/pypeclub/OpenPype/pull/2349) +- Assets Widget: Clear model on project change [\#2345](https://github.com/pypeclub/OpenPype/pull/2345) +- General: OpenPype default modules hierarchy [\#2338](https://github.com/pypeclub/OpenPype/pull/2338) +- TVPaint: Move implementation to OpenPype [\#2336](https://github.com/pypeclub/OpenPype/pull/2336) +- General: FFprobe error exception contain original error message [\#2328](https://github.com/pypeclub/OpenPype/pull/2328) +- Resolve: Add experimental button to menu [\#2325](https://github.com/pypeclub/OpenPype/pull/2325) +- Hiero: Add experimental tools action [\#2323](https://github.com/pypeclub/OpenPype/pull/2323) +- Input links: Cleanup and unification of differences [\#2322](https://github.com/pypeclub/OpenPype/pull/2322) +- General: Don't validate vendor bin with executing them [\#2317](https://github.com/pypeclub/OpenPype/pull/2317) +- General: Multilayer EXRs support [\#2315](https://github.com/pypeclub/OpenPype/pull/2315) +- General: Run process log stderr as info log level [\#2309](https://github.com/pypeclub/OpenPype/pull/2309) +- General: Reduce vendor imports [\#2305](https://github.com/pypeclub/OpenPype/pull/2305) +- Tools: Cleanup of unused classes [\#2304](https://github.com/pypeclub/OpenPype/pull/2304) +- Project Manager: Added ability to delete project [\#2298](https://github.com/pypeclub/OpenPype/pull/2298) +- Ftrack: Synchronize input links [\#2287](https://github.com/pypeclub/OpenPype/pull/2287) +- StandalonePublisher: Remove unused plugin ExtractHarmonyZip [\#2277](https://github.com/pypeclub/OpenPype/pull/2277) +- Ftrack: Support multiple reviews [\#2271](https://github.com/pypeclub/OpenPype/pull/2271) +- Ftrack: Remove unused clean component plugin [\#2269](https://github.com/pypeclub/OpenPype/pull/2269) +- Royal Render: Support for rr channels in separate dirs [\#2268](https://github.com/pypeclub/OpenPype/pull/2268) +- Houdini: Add experimental tools action [\#2267](https://github.com/pypeclub/OpenPype/pull/2267) +- Nuke: extract baked review videos presets [\#2248](https://github.com/pypeclub/OpenPype/pull/2248) +- TVPaint: Workers rendering [\#2209](https://github.com/pypeclub/OpenPype/pull/2209) +- OpenPypeV3: Add key parent asset to path templating construction [\#2186](https://github.com/pypeclub/OpenPype/pull/2186) + +**🐛 Bug fixes** + +- TVPaint: Create render layer dialog is in front [\#2471](https://github.com/pypeclub/OpenPype/pull/2471) +- Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428) +- PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417) +- Settings UI: Breadcrumbs path does not create new entities [\#2416](https://github.com/pypeclub/OpenPype/pull/2416) +- AfterEffects: Variant 2022 is in defaults but missing in schemas [\#2412](https://github.com/pypeclub/OpenPype/pull/2412) +- Nuke: baking representations was not additive [\#2406](https://github.com/pypeclub/OpenPype/pull/2406) +- General: Fix access to environments from default settings [\#2403](https://github.com/pypeclub/OpenPype/pull/2403) +- Fix: Placeholder Input color set fix [\#2399](https://github.com/pypeclub/OpenPype/pull/2399) +- Settings: Fix state change of wrapper label [\#2396](https://github.com/pypeclub/OpenPype/pull/2396) +- Flame: fix ftrack publisher [\#2381](https://github.com/pypeclub/OpenPype/pull/2381) +- hiero: solve custom ocio path [\#2379](https://github.com/pypeclub/OpenPype/pull/2379) +- hiero: fix workio and flatten [\#2378](https://github.com/pypeclub/OpenPype/pull/2378) +- Nuke: fixing menu re-drawing during context change [\#2374](https://github.com/pypeclub/OpenPype/pull/2374) +- Webpublisher: Fix assignment of families of TVpaint instances [\#2373](https://github.com/pypeclub/OpenPype/pull/2373) +- Nuke: fixing node name based on switched asset name [\#2369](https://github.com/pypeclub/OpenPype/pull/2369) +- JobQueue: Fix loading of settings [\#2362](https://github.com/pypeclub/OpenPype/pull/2362) +- Tools: Placeholder color [\#2359](https://github.com/pypeclub/OpenPype/pull/2359) +- Launcher: Minimize button on MacOs [\#2355](https://github.com/pypeclub/OpenPype/pull/2355) +- StandalonePublisher: Fix import of constant [\#2354](https://github.com/pypeclub/OpenPype/pull/2354) +- Houdini: Fix HDA creation [\#2350](https://github.com/pypeclub/OpenPype/pull/2350) +- Adobe products show issue [\#2347](https://github.com/pypeclub/OpenPype/pull/2347) +- Maya Look Assigner: Fix Python 3 compatibility [\#2343](https://github.com/pypeclub/OpenPype/pull/2343) +- Remove wrongly used host for hook [\#2342](https://github.com/pypeclub/OpenPype/pull/2342) +- Tools: Use Qt context on tools show [\#2340](https://github.com/pypeclub/OpenPype/pull/2340) +- Flame: Fix default argument value in custom dictionary [\#2339](https://github.com/pypeclub/OpenPype/pull/2339) +- Timers Manager: Disable auto stop timer on linux platform [\#2334](https://github.com/pypeclub/OpenPype/pull/2334) +- nuke: bake preset single input exception [\#2331](https://github.com/pypeclub/OpenPype/pull/2331) +- Hiero: fixing multiple templates at a hierarchy parent [\#2330](https://github.com/pypeclub/OpenPype/pull/2330) +- Fix - provider icons are pulled from a folder [\#2326](https://github.com/pypeclub/OpenPype/pull/2326) +- InputLinks: Typo in "inputLinks" key [\#2314](https://github.com/pypeclub/OpenPype/pull/2314) +- Deadline timeout and logging [\#2312](https://github.com/pypeclub/OpenPype/pull/2312) +- nuke: do not multiply representation on class method [\#2311](https://github.com/pypeclub/OpenPype/pull/2311) +- Workfiles tool: Fix task formatting [\#2306](https://github.com/pypeclub/OpenPype/pull/2306) +- Delivery: Fix delivery paths created on windows [\#2302](https://github.com/pypeclub/OpenPype/pull/2302) +- Maya: Deadline - fix limit groups [\#2295](https://github.com/pypeclub/OpenPype/pull/2295) +- Royal Render: Fix plugin order and OpenPype auto-detection [\#2291](https://github.com/pypeclub/OpenPype/pull/2291) +- New Publisher: Fix mapping of indexes [\#2285](https://github.com/pypeclub/OpenPype/pull/2285) +- Alternate site for site sync doesnt work for sequences [\#2284](https://github.com/pypeclub/OpenPype/pull/2284) +- FFmpeg: Execute ffprobe using list of arguments instead of string command [\#2281](https://github.com/pypeclub/OpenPype/pull/2281) +- Nuke: Anatomy fill data use task as dictionary [\#2278](https://github.com/pypeclub/OpenPype/pull/2278) +- Bug: fix variable name \_asset\_id in workfiles application [\#2274](https://github.com/pypeclub/OpenPype/pull/2274) +- Version handling fixes [\#2272](https://github.com/pypeclub/OpenPype/pull/2272) + +**Merged pull requests:** + +- Maya: Replaced PATH usage with vendored oiio path for maketx utility [\#2405](https://github.com/pypeclub/OpenPype/pull/2405) +- \[Fix\]\[MAYA\] Handle message type attribute within CollectLook [\#2394](https://github.com/pypeclub/OpenPype/pull/2394) +- Add validator to check correct version of extension for PS and AE [\#2387](https://github.com/pypeclub/OpenPype/pull/2387) +- Maya: configurable model top level validation [\#2321](https://github.com/pypeclub/OpenPype/pull/2321) +- Create test publish class for After Effects [\#2270](https://github.com/pypeclub/OpenPype/pull/2270) ## [3.6.4](https://github.com/pypeclub/OpenPype/tree/3.6.4) (2021-11-23) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.7.0-nightly.1...3.6.4) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.3...3.6.4) + +**🐛 Bug fixes** + +- Nuke: inventory update removes all loaded read nodes [\#2294](https://github.com/pypeclub/OpenPype/pull/2294) ## [3.6.3](https://github.com/pypeclub/OpenPype/tree/3.6.3) (2021-11-19) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.3-nightly.1...3.6.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.2...3.6.3) + +**🐛 Bug fixes** + +- Deadline: Fix publish targets [\#2280](https://github.com/pypeclub/OpenPype/pull/2280) ## [3.6.2](https://github.com/pypeclub/OpenPype/tree/3.6.2) (2021-11-18) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.2-nightly.2...3.6.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.1...3.6.2) + +**🚀 Enhancements** + +- Tools: Assets widget [\#2265](https://github.com/pypeclub/OpenPype/pull/2265) +- SceneInventory: Choose loader in asset switcher [\#2262](https://github.com/pypeclub/OpenPype/pull/2262) +- Style: New fonts in OpenPype style [\#2256](https://github.com/pypeclub/OpenPype/pull/2256) +- Tools: SceneInventory in OpenPype [\#2255](https://github.com/pypeclub/OpenPype/pull/2255) +- Tools: Tasks widget [\#2251](https://github.com/pypeclub/OpenPype/pull/2251) +- Tools: Creator in OpenPype [\#2244](https://github.com/pypeclub/OpenPype/pull/2244) +- Added endpoint for configured extensions [\#2221](https://github.com/pypeclub/OpenPype/pull/2221) + +**🐛 Bug fixes** + +- Tools: Parenting of tools in Nuke and Hiero [\#2266](https://github.com/pypeclub/OpenPype/pull/2266) +- limiting validator to specific editorial hosts [\#2264](https://github.com/pypeclub/OpenPype/pull/2264) +- Tools: Select Context dialog attribute fix [\#2261](https://github.com/pypeclub/OpenPype/pull/2261) +- Maya: Render publishing fails on linux [\#2260](https://github.com/pypeclub/OpenPype/pull/2260) +- LookAssigner: Fix tool reopen [\#2259](https://github.com/pypeclub/OpenPype/pull/2259) +- Standalone: editorial not publishing thumbnails on all subsets [\#2258](https://github.com/pypeclub/OpenPype/pull/2258) +- Burnins: Support mxf metadata [\#2247](https://github.com/pypeclub/OpenPype/pull/2247) +- Maya: Support for configurable AOV separator characters [\#2197](https://github.com/pypeclub/OpenPype/pull/2197) +- Maya: texture colorspace modes in looks [\#2195](https://github.com/pypeclub/OpenPype/pull/2195) ## [3.6.1](https://github.com/pypeclub/OpenPype/tree/3.6.1) (2021-11-16) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.1-nightly.1...3.6.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.6.0...3.6.1) + +**🐛 Bug fixes** + +- Loader doesn't allow changing of version before loading [\#2254](https://github.com/pypeclub/OpenPype/pull/2254) ## [3.6.0](https://github.com/pypeclub/OpenPype/tree/3.6.0) (2021-11-15) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.6.0-nightly.6...3.6.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.5.0...3.6.0) + +### 📖 Documentation + +- Add alternative sites for Site Sync [\#2206](https://github.com/pypeclub/OpenPype/pull/2206) +- Add command line way of running site sync server [\#2188](https://github.com/pypeclub/OpenPype/pull/2188) + +**🆕 New features** + +- Add validate active site button to sync queue on a project [\#2176](https://github.com/pypeclub/OpenPype/pull/2176) +- Maya : Colorspace configuration [\#2170](https://github.com/pypeclub/OpenPype/pull/2170) +- Blender: Added support for audio [\#2168](https://github.com/pypeclub/OpenPype/pull/2168) +- Flame: a host basic integration [\#2165](https://github.com/pypeclub/OpenPype/pull/2165) +- Houdini: simple HDA workflow [\#2072](https://github.com/pypeclub/OpenPype/pull/2072) +- Basic Royal Render Integration ✨ [\#2061](https://github.com/pypeclub/OpenPype/pull/2061) +- Camera handling between Blender and Unreal [\#1988](https://github.com/pypeclub/OpenPype/pull/1988) +- switch PyQt5 for PySide2 [\#1744](https://github.com/pypeclub/OpenPype/pull/1744) + +**🚀 Enhancements** + +- Tools: Subset manager in OpenPype [\#2243](https://github.com/pypeclub/OpenPype/pull/2243) +- General: Skip module directories without init file [\#2239](https://github.com/pypeclub/OpenPype/pull/2239) +- General: Static interfaces [\#2238](https://github.com/pypeclub/OpenPype/pull/2238) +- Style: Fix transparent image in style [\#2235](https://github.com/pypeclub/OpenPype/pull/2235) +- Add a "following workfile versioning" option on publish [\#2225](https://github.com/pypeclub/OpenPype/pull/2225) +- Modules: Module can add cli commands [\#2224](https://github.com/pypeclub/OpenPype/pull/2224) +- Webpublisher: Separate webpublisher logic [\#2222](https://github.com/pypeclub/OpenPype/pull/2222) +- Add both side availability on Site Sync sites to Loader [\#2220](https://github.com/pypeclub/OpenPype/pull/2220) +- Tools: Center loader and library loader on show [\#2219](https://github.com/pypeclub/OpenPype/pull/2219) +- Maya : Validate shape zero [\#2212](https://github.com/pypeclub/OpenPype/pull/2212) +- Maya : validate unique names [\#2211](https://github.com/pypeclub/OpenPype/pull/2211) +- Tools: OpenPype stylesheet in workfiles tool [\#2208](https://github.com/pypeclub/OpenPype/pull/2208) +- Ftrack: Replace Queue with deque in event handlers logic [\#2204](https://github.com/pypeclub/OpenPype/pull/2204) +- Tools: New select context dialog [\#2200](https://github.com/pypeclub/OpenPype/pull/2200) +- Maya : Validate mesh ngons [\#2199](https://github.com/pypeclub/OpenPype/pull/2199) +- Dirmap in Nuke [\#2198](https://github.com/pypeclub/OpenPype/pull/2198) +- Delivery: Check 'frame' key in template for sequence delivery [\#2196](https://github.com/pypeclub/OpenPype/pull/2196) +- Settings: Site sync project settings improvement [\#2193](https://github.com/pypeclub/OpenPype/pull/2193) +- Usage of tools code [\#2185](https://github.com/pypeclub/OpenPype/pull/2185) +- Settings: Dictionary based on project roots [\#2184](https://github.com/pypeclub/OpenPype/pull/2184) +- Subset name: Be able to pass asset document to get subset name [\#2179](https://github.com/pypeclub/OpenPype/pull/2179) +- Tools: Experimental tools [\#2167](https://github.com/pypeclub/OpenPype/pull/2167) +- Loader: Refactor and use OpenPype stylesheets [\#2166](https://github.com/pypeclub/OpenPype/pull/2166) +- Add loader for linked smart objects in photoshop [\#2149](https://github.com/pypeclub/OpenPype/pull/2149) +- Burnins: DNxHD profiles handling [\#2142](https://github.com/pypeclub/OpenPype/pull/2142) +- Tools: Single access point for host tools [\#2139](https://github.com/pypeclub/OpenPype/pull/2139) + +**🐛 Bug fixes** + +- Ftrack: Sync project ftrack id cache issue [\#2250](https://github.com/pypeclub/OpenPype/pull/2250) +- Ftrack: Session creation and Prepare project [\#2245](https://github.com/pypeclub/OpenPype/pull/2245) +- Added queue for studio processing in PS [\#2237](https://github.com/pypeclub/OpenPype/pull/2237) +- Python 2: Unicode to string conversion [\#2236](https://github.com/pypeclub/OpenPype/pull/2236) +- Fix - enum for color coding in PS [\#2234](https://github.com/pypeclub/OpenPype/pull/2234) +- Pyblish Tool: Fix targets handling [\#2232](https://github.com/pypeclub/OpenPype/pull/2232) +- Ftrack: Base event fix of 'get\_project\_from\_entity' method [\#2214](https://github.com/pypeclub/OpenPype/pull/2214) +- Maya : multiple subsets review broken [\#2210](https://github.com/pypeclub/OpenPype/pull/2210) +- Fix - different command used for Linux and Mac OS [\#2207](https://github.com/pypeclub/OpenPype/pull/2207) +- Tools: Workfiles tool don't use avalon widgets [\#2205](https://github.com/pypeclub/OpenPype/pull/2205) +- Ftrack: Fill missing ftrack id on mongo project [\#2203](https://github.com/pypeclub/OpenPype/pull/2203) +- Project Manager: Fix copying of tasks [\#2191](https://github.com/pypeclub/OpenPype/pull/2191) +- StandalonePublisher: Source validator don't expect representations [\#2190](https://github.com/pypeclub/OpenPype/pull/2190) +- Blender: Fix trying to pack an image when the shader node has no texture [\#2183](https://github.com/pypeclub/OpenPype/pull/2183) +- Maya: review viewport settings [\#2177](https://github.com/pypeclub/OpenPype/pull/2177) +- MacOS: Launching of applications may cause Permissions error [\#2175](https://github.com/pypeclub/OpenPype/pull/2175) +- Maya: Aspect ratio [\#2174](https://github.com/pypeclub/OpenPype/pull/2174) +- Blender: Fix 'Deselect All' with object not in 'Object Mode' [\#2163](https://github.com/pypeclub/OpenPype/pull/2163) +- Tools: Stylesheets are applied after tool show [\#2161](https://github.com/pypeclub/OpenPype/pull/2161) +- Maya: Collect render - fix UNC path support 🐛 [\#2158](https://github.com/pypeclub/OpenPype/pull/2158) +- Maya: Fix hotbox broken by scriptsmenu [\#2151](https://github.com/pypeclub/OpenPype/pull/2151) +- Ftrack: Ignore save warnings exception in Prepare project action [\#2150](https://github.com/pypeclub/OpenPype/pull/2150) +- Loader thumbnails with smooth edges [\#2147](https://github.com/pypeclub/OpenPype/pull/2147) +- Added validator for source files for Standalone Publisher [\#2138](https://github.com/pypeclub/OpenPype/pull/2138) + +**Merged pull requests:** + +- Bump pillow from 8.2.0 to 8.3.2 [\#2162](https://github.com/pypeclub/OpenPype/pull/2162) +- Bump axios from 0.21.1 to 0.21.4 in /website [\#2059](https://github.com/pypeclub/OpenPype/pull/2059) ## [3.5.0](https://github.com/pypeclub/OpenPype/tree/3.5.0) (2021-10-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.5.0-nightly.8...3.5.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.1...3.5.0) + +**Deprecated:** + +- Maya: Change mayaAscii family to mayaScene [\#2106](https://github.com/pypeclub/OpenPype/pull/2106) + +**🆕 New features** + +- Added project and task into context change message in Maya [\#2131](https://github.com/pypeclub/OpenPype/pull/2131) +- Add ExtractBurnin to photoshop review [\#2124](https://github.com/pypeclub/OpenPype/pull/2124) +- PYPE-1218 - changed namespace to contain subset name in Maya [\#2114](https://github.com/pypeclub/OpenPype/pull/2114) +- Added running configurable disk mapping command before start of OP [\#2091](https://github.com/pypeclub/OpenPype/pull/2091) +- SFTP provider [\#2073](https://github.com/pypeclub/OpenPype/pull/2073) +- Maya: Validate setdress top group [\#2068](https://github.com/pypeclub/OpenPype/pull/2068) +- Maya: Enable publishing render attrib sets \(e.g. V-Ray Displacement\) with model [\#1955](https://github.com/pypeclub/OpenPype/pull/1955) + +**🚀 Enhancements** + +- Maya: make rig validators configurable in settings [\#2137](https://github.com/pypeclub/OpenPype/pull/2137) +- Settings: Updated readme for entity types in settings [\#2132](https://github.com/pypeclub/OpenPype/pull/2132) +- Nuke: unified clip loader [\#2128](https://github.com/pypeclub/OpenPype/pull/2128) +- Settings UI: Project model refreshing and sorting [\#2104](https://github.com/pypeclub/OpenPype/pull/2104) +- Create Read From Rendered - Disable Relative paths by default [\#2093](https://github.com/pypeclub/OpenPype/pull/2093) +- Added choosing different dirmap mapping if workfile synched locally [\#2088](https://github.com/pypeclub/OpenPype/pull/2088) +- General: Remove IdleManager module [\#2084](https://github.com/pypeclub/OpenPype/pull/2084) +- Tray UI: Message box about missing settings defaults [\#2080](https://github.com/pypeclub/OpenPype/pull/2080) +- Tray UI: Show menu where first click happened [\#2079](https://github.com/pypeclub/OpenPype/pull/2079) +- Global: add global validators to settings [\#2078](https://github.com/pypeclub/OpenPype/pull/2078) +- Use CRF for burnin when available [\#2070](https://github.com/pypeclub/OpenPype/pull/2070) +- Project manager: Filter first item after selection of project [\#2069](https://github.com/pypeclub/OpenPype/pull/2069) +- Nuke: Adding `still` image family workflow [\#2064](https://github.com/pypeclub/OpenPype/pull/2064) +- Maya: validate authorized loaded plugins [\#2062](https://github.com/pypeclub/OpenPype/pull/2062) +- Tools: add support for pyenv on windows [\#2051](https://github.com/pypeclub/OpenPype/pull/2051) +- SyncServer: Dropbox Provider [\#1979](https://github.com/pypeclub/OpenPype/pull/1979) +- Burnin: Get data from context with defined keys. [\#1897](https://github.com/pypeclub/OpenPype/pull/1897) +- Timers manager: Get task time [\#1896](https://github.com/pypeclub/OpenPype/pull/1896) +- TVPaint: Option to stop timer on application exit. [\#1887](https://github.com/pypeclub/OpenPype/pull/1887) + +**🐛 Bug fixes** + +- Maya: fix model publishing [\#2130](https://github.com/pypeclub/OpenPype/pull/2130) +- Fix - oiiotool wasn't recognized even if present [\#2129](https://github.com/pypeclub/OpenPype/pull/2129) +- General: Disk mapping group [\#2120](https://github.com/pypeclub/OpenPype/pull/2120) +- Hiero: publishing effect first time makes wrong resources path [\#2115](https://github.com/pypeclub/OpenPype/pull/2115) +- Add startup script for Houdini Core. [\#2110](https://github.com/pypeclub/OpenPype/pull/2110) +- TVPaint: Behavior name of loop also accept repeat [\#2109](https://github.com/pypeclub/OpenPype/pull/2109) +- Ftrack: Project settings save custom attributes skip unknown attributes [\#2103](https://github.com/pypeclub/OpenPype/pull/2103) +- Blender: Fix NoneType error when animation\_data is missing for a rig [\#2101](https://github.com/pypeclub/OpenPype/pull/2101) +- Fix broken import in sftp provider [\#2100](https://github.com/pypeclub/OpenPype/pull/2100) +- Global: Fix docstring on publish plugin extract review [\#2097](https://github.com/pypeclub/OpenPype/pull/2097) +- Delivery Action Files Sequence fix [\#2096](https://github.com/pypeclub/OpenPype/pull/2096) +- General: Cloud mongo ca certificate issue [\#2095](https://github.com/pypeclub/OpenPype/pull/2095) +- TVPaint: Creator use context from workfile [\#2087](https://github.com/pypeclub/OpenPype/pull/2087) +- Blender: fix texture missing when publishing blend files [\#2085](https://github.com/pypeclub/OpenPype/pull/2085) +- General: Startup validations oiio tool path fix on linux [\#2083](https://github.com/pypeclub/OpenPype/pull/2083) +- Deadline: Collect deadline server does not check existence of deadline key [\#2082](https://github.com/pypeclub/OpenPype/pull/2082) +- Blender: fixed Curves with modifiers in Rigs [\#2081](https://github.com/pypeclub/OpenPype/pull/2081) +- Nuke UI scaling [\#2077](https://github.com/pypeclub/OpenPype/pull/2077) +- Maya: Fix multi-camera renders [\#2065](https://github.com/pypeclub/OpenPype/pull/2065) +- Fix Sync Queue when project disabled [\#2063](https://github.com/pypeclub/OpenPype/pull/2063) + +**Merged pull requests:** + +- Bump pywin32 from 300 to 301 [\#2086](https://github.com/pypeclub/OpenPype/pull/2086) ## [3.4.1](https://github.com/pypeclub/OpenPype/tree/3.4.1) (2021-09-23) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.4.1-nightly.1...3.4.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.0...3.4.1) + +**🆕 New features** + +- Settings: Flag project as deactivated and hide from tools' view [\#2008](https://github.com/pypeclub/OpenPype/pull/2008) + +**🚀 Enhancements** + +- General: Startup validations [\#2054](https://github.com/pypeclub/OpenPype/pull/2054) +- Nuke: proxy mode validator [\#2052](https://github.com/pypeclub/OpenPype/pull/2052) +- Ftrack: Removed ftrack interface [\#2049](https://github.com/pypeclub/OpenPype/pull/2049) +- Settings UI: Deffered set value on entity [\#2044](https://github.com/pypeclub/OpenPype/pull/2044) +- Loader: Families filtering [\#2043](https://github.com/pypeclub/OpenPype/pull/2043) +- Settings UI: Project view enhancements [\#2042](https://github.com/pypeclub/OpenPype/pull/2042) +- Settings for Nuke IncrementScriptVersion [\#2039](https://github.com/pypeclub/OpenPype/pull/2039) +- Loader & Library loader: Use tools from OpenPype [\#2038](https://github.com/pypeclub/OpenPype/pull/2038) +- Adding predefined project folders creation in PM [\#2030](https://github.com/pypeclub/OpenPype/pull/2030) +- WebserverModule: Removed interface of webserver module [\#2028](https://github.com/pypeclub/OpenPype/pull/2028) +- TimersManager: Removed interface of timers manager [\#2024](https://github.com/pypeclub/OpenPype/pull/2024) +- Feature Maya import asset from scene inventory [\#2018](https://github.com/pypeclub/OpenPype/pull/2018) + +**🐛 Bug fixes** + +- Timers manger: Typo fix [\#2058](https://github.com/pypeclub/OpenPype/pull/2058) +- Hiero: Editorial fixes [\#2057](https://github.com/pypeclub/OpenPype/pull/2057) +- Differentiate jpg sequences from thumbnail [\#2056](https://github.com/pypeclub/OpenPype/pull/2056) +- FFmpeg: Split command to list does not work [\#2046](https://github.com/pypeclub/OpenPype/pull/2046) +- Removed shell flag in subprocess call [\#2045](https://github.com/pypeclub/OpenPype/pull/2045) + +**Merged pull requests:** + +- Bump prismjs from 1.24.0 to 1.25.0 in /website [\#2050](https://github.com/pypeclub/OpenPype/pull/2050) ## [3.4.0](https://github.com/pypeclub/OpenPype/tree/3.4.0) (2021-09-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.4.0-nightly.6...3.4.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...3.4.0) + +### 📖 Documentation + +- Documentation: Ftrack launch argsuments update [\#2014](https://github.com/pypeclub/OpenPype/pull/2014) +- Nuke Quick Start / Tutorial [\#1952](https://github.com/pypeclub/OpenPype/pull/1952) +- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821) + +**🆕 New features** + +- Nuke: Compatibility with Nuke 13 [\#2003](https://github.com/pypeclub/OpenPype/pull/2003) +- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) +- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) +- Blender: Improved assets handling [\#1615](https://github.com/pypeclub/OpenPype/pull/1615) + +**🚀 Enhancements** + +- Added possibility to configure of synchronization of workfile version… [\#2041](https://github.com/pypeclub/OpenPype/pull/2041) +- General: Task types in profiles [\#2036](https://github.com/pypeclub/OpenPype/pull/2036) +- Console interpreter: Handle invalid sizes on initialization [\#2022](https://github.com/pypeclub/OpenPype/pull/2022) +- Ftrack: Show OpenPype versions in event server status [\#2019](https://github.com/pypeclub/OpenPype/pull/2019) +- General: Staging icon [\#2017](https://github.com/pypeclub/OpenPype/pull/2017) +- Ftrack: Sync to avalon actions have jobs [\#2015](https://github.com/pypeclub/OpenPype/pull/2015) +- Modules: Connect method is not required [\#2009](https://github.com/pypeclub/OpenPype/pull/2009) +- Settings UI: Number with configurable steps [\#2001](https://github.com/pypeclub/OpenPype/pull/2001) +- Moving project folder structure creation out of ftrack module \#1989 [\#1996](https://github.com/pypeclub/OpenPype/pull/1996) +- Configurable items for providers without Settings [\#1987](https://github.com/pypeclub/OpenPype/pull/1987) +- Global: Example addons [\#1986](https://github.com/pypeclub/OpenPype/pull/1986) +- Standalone Publisher: Extract harmony zip handle workfile template [\#1982](https://github.com/pypeclub/OpenPype/pull/1982) +- Settings UI: Number sliders [\#1978](https://github.com/pypeclub/OpenPype/pull/1978) +- Workfiles: Support more workfile templates [\#1966](https://github.com/pypeclub/OpenPype/pull/1966) +- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964) +- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963) +- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962) +- Global: Settings defined by Addons/Modules [\#1959](https://github.com/pypeclub/OpenPype/pull/1959) +- CI: change release numbering triggers [\#1954](https://github.com/pypeclub/OpenPype/pull/1954) +- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949) +- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948) +- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) +- OpenPype: Add version validation and `--headless` mode and update progress 🔄 [\#1939](https://github.com/pypeclub/OpenPype/pull/1939) +- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) +- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888) +- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) + +**🐛 Bug fixes** + +- Workfiles tool: Task selection [\#2040](https://github.com/pypeclub/OpenPype/pull/2040) +- Ftrack: Delete old versions missing settings key [\#2037](https://github.com/pypeclub/OpenPype/pull/2037) +- Nuke: typo on a button [\#2034](https://github.com/pypeclub/OpenPype/pull/2034) +- Hiero: Fix "none" named tags [\#2033](https://github.com/pypeclub/OpenPype/pull/2033) +- FFmpeg: Subprocess arguments as list [\#2032](https://github.com/pypeclub/OpenPype/pull/2032) +- General: Fix Python 2 breaking line [\#2016](https://github.com/pypeclub/OpenPype/pull/2016) +- Bugfix/webpublisher task type [\#2006](https://github.com/pypeclub/OpenPype/pull/2006) +- Nuke thumbnails generated from middle of the sequence [\#1992](https://github.com/pypeclub/OpenPype/pull/1992) +- Nuke: last version from path gets correct version [\#1990](https://github.com/pypeclub/OpenPype/pull/1990) +- nuke, resolve, hiero: precollector order lest then 0.5 [\#1984](https://github.com/pypeclub/OpenPype/pull/1984) +- Last workfile with multiple work templates [\#1981](https://github.com/pypeclub/OpenPype/pull/1981) +- Collectors order [\#1977](https://github.com/pypeclub/OpenPype/pull/1977) +- Stop timer was within validator order range. [\#1975](https://github.com/pypeclub/OpenPype/pull/1975) +- Ftrack: arrow submodule has https url source [\#1974](https://github.com/pypeclub/OpenPype/pull/1974) +- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972) +- Deadline: Houdini plugins in different hierarchy [\#1970](https://github.com/pypeclub/OpenPype/pull/1970) +- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967) +- Global: ExtractJpeg can handle filepaths with spaces [\#1961](https://github.com/pypeclub/OpenPype/pull/1961) +- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960) + +**Merged pull requests:** + +- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958) +- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933) ## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...3.3.1) + +**🐛 Bug fixes** + +- TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946) +- Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945) +- standalone: editorial shared object problem [\#1941](https://github.com/pypeclub/OpenPype/pull/1941) +- Bugfix nuke deadline app name [\#1928](https://github.com/pypeclub/OpenPype/pull/1928) ## [3.3.0](https://github.com/pypeclub/OpenPype/tree/3.3.0) (2021-08-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.0-nightly.11...3.3.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.2.0...3.3.0) + +### 📖 Documentation + +- Standalone Publish of textures family [\#1834](https://github.com/pypeclub/OpenPype/pull/1834) + +**🆕 New features** + +- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) +- Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923) +- Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901) + +**🚀 Enhancements** + +- Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940) +- Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927) +- Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925) +- Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920) +- Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919) +- submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) +- Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900) +- Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899) +- Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898) +- Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892) +- Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891) +- Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886) +- TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) +- Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) +- Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) +- Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) +- Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) +- Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) +- Anatomy schema validation [\#1864](https://github.com/pypeclub/OpenPype/pull/1864) +- Ftrack prepare project structure [\#1861](https://github.com/pypeclub/OpenPype/pull/1861) +- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) +- Independent general environments [\#1853](https://github.com/pypeclub/OpenPype/pull/1853) +- TVPaint Start Frame [\#1844](https://github.com/pypeclub/OpenPype/pull/1844) +- Ftrack push attributes action adds traceback to job [\#1843](https://github.com/pypeclub/OpenPype/pull/1843) +- Prepare project action enhance [\#1838](https://github.com/pypeclub/OpenPype/pull/1838) +- nuke: settings create missing default subsets [\#1829](https://github.com/pypeclub/OpenPype/pull/1829) +- Update poetry lock [\#1823](https://github.com/pypeclub/OpenPype/pull/1823) +- Settings: settings for plugins [\#1819](https://github.com/pypeclub/OpenPype/pull/1819) +- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) +- Maya: Deadline custom settings [\#1797](https://github.com/pypeclub/OpenPype/pull/1797) +- Maya: Shader name validation [\#1762](https://github.com/pypeclub/OpenPype/pull/1762) + +**🐛 Bug fixes** + +- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) +- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) +- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) +- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) +- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) +- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) +- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) +- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) +- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) +- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) +- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) +- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) +- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) +- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) +- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) +- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) +- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) +- imageio: fix grouping [\#1856](https://github.com/pypeclub/OpenPype/pull/1856) +- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) +- publisher: missing version in subset prop [\#1849](https://github.com/pypeclub/OpenPype/pull/1849) +- Ftrack type error fix in sync to avalon event handler [\#1845](https://github.com/pypeclub/OpenPype/pull/1845) +- Nuke: updating effects subset fail [\#1841](https://github.com/pypeclub/OpenPype/pull/1841) +- nuke: write render node skipped with crop [\#1836](https://github.com/pypeclub/OpenPype/pull/1836) +- Project folder structure overrides [\#1813](https://github.com/pypeclub/OpenPype/pull/1813) +- Maya: fix yeti settings path in extractor [\#1809](https://github.com/pypeclub/OpenPype/pull/1809) +- Failsafe for cross project containers. [\#1806](https://github.com/pypeclub/OpenPype/pull/1806) +- Houdini colector formatting keys fix [\#1802](https://github.com/pypeclub/OpenPype/pull/1802) +- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) +- Application launch stdout/stderr in GUI build [\#1684](https://github.com/pypeclub/OpenPype/pull/1684) +- Nuke: re-use instance nodes output path [\#1577](https://github.com/pypeclub/OpenPype/pull/1577) + +**Merged pull requests:** + +- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) +- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) +- Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) +- Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) +- PS, AE - send actual context when another webserver is running [\#1811](https://github.com/pypeclub/OpenPype/pull/1811) ## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.4...3.2.0) + +### 📖 Documentation + +- Fix: staging and `--use-version` option [\#1786](https://github.com/pypeclub/OpenPype/pull/1786) +- Subset template and TVPaint subset template docs [\#1717](https://github.com/pypeclub/OpenPype/pull/1717) +- Overscan color extract review [\#1701](https://github.com/pypeclub/OpenPype/pull/1701) + +**🚀 Enhancements** + +- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) +- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) +- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795) +- Settings conditional dict [\#1777](https://github.com/pypeclub/OpenPype/pull/1777) +- Settings application use python 2 only where needed [\#1776](https://github.com/pypeclub/OpenPype/pull/1776) +- Settings UI copy/paste [\#1769](https://github.com/pypeclub/OpenPype/pull/1769) +- Workfile tool widths [\#1766](https://github.com/pypeclub/OpenPype/pull/1766) +- Push hierarchical attributes care about task parent changes [\#1763](https://github.com/pypeclub/OpenPype/pull/1763) +- Application executables with environment variables [\#1757](https://github.com/pypeclub/OpenPype/pull/1757) +- Deadline: Nuke submission additional attributes [\#1756](https://github.com/pypeclub/OpenPype/pull/1756) +- Settings schema without prefill [\#1753](https://github.com/pypeclub/OpenPype/pull/1753) +- Settings Hosts enum [\#1739](https://github.com/pypeclub/OpenPype/pull/1739) +- Validate containers settings [\#1736](https://github.com/pypeclub/OpenPype/pull/1736) +- PS - added loader from sequence [\#1726](https://github.com/pypeclub/OpenPype/pull/1726) +- Autoupdate launcher [\#1725](https://github.com/pypeclub/OpenPype/pull/1725) +- Toggle Ftrack upload in StandalonePublisher [\#1708](https://github.com/pypeclub/OpenPype/pull/1708) +- Nuke: Prerender Frame Range by default [\#1699](https://github.com/pypeclub/OpenPype/pull/1699) +- Smoother edges of color triangle [\#1695](https://github.com/pypeclub/OpenPype/pull/1695) + +**🐛 Bug fixes** + +- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) +- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) +- Invitee email can be None which break the Ftrack commit. [\#1788](https://github.com/pypeclub/OpenPype/pull/1788) +- Otio unrelated error on import [\#1782](https://github.com/pypeclub/OpenPype/pull/1782) +- FFprobe streams order [\#1775](https://github.com/pypeclub/OpenPype/pull/1775) +- Fix - single file files are str only, cast it to list to count properly [\#1772](https://github.com/pypeclub/OpenPype/pull/1772) +- Environments in app executable for MacOS [\#1768](https://github.com/pypeclub/OpenPype/pull/1768) +- Project specific environments [\#1767](https://github.com/pypeclub/OpenPype/pull/1767) +- Settings UI with refresh button [\#1764](https://github.com/pypeclub/OpenPype/pull/1764) +- Standalone publisher thumbnail extractor fix [\#1761](https://github.com/pypeclub/OpenPype/pull/1761) +- Anatomy others templates don't cause crash [\#1758](https://github.com/pypeclub/OpenPype/pull/1758) +- Backend acre module commit update [\#1745](https://github.com/pypeclub/OpenPype/pull/1745) +- hiero: precollect instances failing when audio selected [\#1743](https://github.com/pypeclub/OpenPype/pull/1743) +- Hiero: creator instance error [\#1742](https://github.com/pypeclub/OpenPype/pull/1742) +- Nuke: fixing render creator for no selection format failing [\#1741](https://github.com/pypeclub/OpenPype/pull/1741) +- StandalonePublisher: failing collector for editorial [\#1738](https://github.com/pypeclub/OpenPype/pull/1738) +- Local settings UI crash on missing defaults [\#1737](https://github.com/pypeclub/OpenPype/pull/1737) +- TVPaint white background on thumbnail [\#1735](https://github.com/pypeclub/OpenPype/pull/1735) +- Ftrack missing custom attribute message [\#1734](https://github.com/pypeclub/OpenPype/pull/1734) +- Launcher project changes [\#1733](https://github.com/pypeclub/OpenPype/pull/1733) +- Ftrack sync status [\#1732](https://github.com/pypeclub/OpenPype/pull/1732) +- TVPaint use layer name for default variant [\#1724](https://github.com/pypeclub/OpenPype/pull/1724) +- Default subset template for TVPaint review and workfile families [\#1716](https://github.com/pypeclub/OpenPype/pull/1716) +- Maya: Extract review hotfix [\#1714](https://github.com/pypeclub/OpenPype/pull/1714) +- Settings: Imageio improving granularity [\#1711](https://github.com/pypeclub/OpenPype/pull/1711) +- Application without executables [\#1679](https://github.com/pypeclub/OpenPype/pull/1679) +- Unreal: launching on Linux [\#1672](https://github.com/pypeclub/OpenPype/pull/1672) + +**Merged pull requests:** + +- Bump prismjs from 1.23.0 to 1.24.0 in /website [\#1773](https://github.com/pypeclub/OpenPype/pull/1773) +- TVPaint ftrack family [\#1755](https://github.com/pypeclub/OpenPype/pull/1755) ## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) @@ -246,7 +1864,7 @@ ## [2.18.3](https://github.com/pypeclub/OpenPype/tree/2.18.3) (2021-06-23) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.2...2.18.3) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.2...2.18.3) ## [2.18.2](https://github.com/pypeclub/OpenPype/tree/2.18.2) (2021-06-16) @@ -254,9 +1872,47 @@ ## [3.1.0](https://github.com/pypeclub/OpenPype/tree/3.1.0) (2021-06-15) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.1.0-nightly.4...3.1.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.0.0...3.1.0) -# Changelog +### 📖 Documentation + +- Feature Slack integration [\#1657](https://github.com/pypeclub/OpenPype/pull/1657) + +**🚀 Enhancements** + +- Log Viewer with OpenPype style [\#1703](https://github.com/pypeclub/OpenPype/pull/1703) +- Scrolling in OpenPype info widget [\#1702](https://github.com/pypeclub/OpenPype/pull/1702) +- OpenPype style in modules [\#1694](https://github.com/pypeclub/OpenPype/pull/1694) +- Sort applications and tools alphabetically in Settings UI [\#1689](https://github.com/pypeclub/OpenPype/pull/1689) +- \#683 - Validate Frame Range in Standalone Publisher [\#1683](https://github.com/pypeclub/OpenPype/pull/1683) +- Hiero: old container versions identify with red color [\#1682](https://github.com/pypeclub/OpenPype/pull/1682) +- Project Manger: Default name column width [\#1669](https://github.com/pypeclub/OpenPype/pull/1669) +- Remove outline in stylesheet [\#1667](https://github.com/pypeclub/OpenPype/pull/1667) +- TVPaint: Creator take layer name as default value for subset variant [\#1663](https://github.com/pypeclub/OpenPype/pull/1663) +- TVPaint custom subset template [\#1662](https://github.com/pypeclub/OpenPype/pull/1662) +- Editorial: conform assets validator [\#1659](https://github.com/pypeclub/OpenPype/pull/1659) +- Nuke - Publish simplification [\#1653](https://github.com/pypeclub/OpenPype/pull/1653) +- \#1333 - added tooltip hints to Pyblish buttons [\#1649](https://github.com/pypeclub/OpenPype/pull/1649) + +**🐛 Bug fixes** + +- Nuke: broken publishing rendered frames [\#1707](https://github.com/pypeclub/OpenPype/pull/1707) +- Standalone publisher Thumbnail export args [\#1705](https://github.com/pypeclub/OpenPype/pull/1705) +- Bad zip can break OpenPype start [\#1691](https://github.com/pypeclub/OpenPype/pull/1691) +- Hiero: published whole edit mov [\#1687](https://github.com/pypeclub/OpenPype/pull/1687) +- Ftrack subprocess handle of stdout/stderr [\#1675](https://github.com/pypeclub/OpenPype/pull/1675) +- Settings list race condifiton and mutable dict list conversion [\#1671](https://github.com/pypeclub/OpenPype/pull/1671) +- Mac launch arguments fix [\#1660](https://github.com/pypeclub/OpenPype/pull/1660) +- Fix missing dbm python module [\#1652](https://github.com/pypeclub/OpenPype/pull/1652) +- Transparent branches in view on Mac [\#1648](https://github.com/pypeclub/OpenPype/pull/1648) +- Add asset on task item [\#1646](https://github.com/pypeclub/OpenPype/pull/1646) +- Project manager save and queue [\#1645](https://github.com/pypeclub/OpenPype/pull/1645) +- New project anatomy values [\#1644](https://github.com/pypeclub/OpenPype/pull/1644) +- Farm publishing: check if published items do exist [\#1573](https://github.com/pypeclub/OpenPype/pull/1573) + +**Merged pull requests:** + +- Bump normalize-url from 4.5.0 to 4.5.1 in /website [\#1686](https://github.com/pypeclub/OpenPype/pull/1686) ## [3.0.0](https://github.com/pypeclub/openpype/tree/3.0.0) @@ -269,12 +1925,12 @@ - Easy to add Application versions. - Per Project Environment and plugin management. - Robust profile system for creating reviewables and burnins, with filtering based on Application, Task and data family. -- Configurable publish plugins. +- Configurable publish plugins. - Options to make any validator or extractor, optional or disabled. - Color Management is now unified under anatomy settings. - Subset naming and grouping is fully configurable. - All project attributes can now be set directly in OpenPype settings. -- Studio Setting can be locked to prevent unwanted artist changes. +- Studio Setting can be locked to prevent unwanted artist changes. - You can now add per project and per task type templates for workfile initialization in most hosts. - Too many other individual configurable option to list in this changelog :) @@ -1032,8 +2688,6 @@ - Standalone Publisher: getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) -# Changelog - ## [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) (2020-11-15) [Full Changelog](https://github.com/pypeclub/pype/compare/2.13.5...2.13.6) @@ -1823,9 +3477,4 @@ A large cleanup release. Most of the change are under the hood. - _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner - - - - - - +\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* From 589ba87baa0446ed745af3cc11fbcf28cfe7f05a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 12:33:20 +0200 Subject: [PATCH 1680/2550] updating worlfows --- .github/workflows/prerelease.yml | 44 +++++++++++++-------------- .github/workflows/release.yml | 52 ++++++++++++++++---------------- 2 files changed, 48 insertions(+), 48 deletions(-) diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index 81d5f05b17..078f6c85bb 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -37,27 +37,27 @@ jobs: echo ::set-output name=next_tag::$RESULT - - name: "✏️ Generate full changelog" - if: steps.version_type.outputs.type != 'skip' - id: generate-full-changelog - uses: heinrichreimer/github-changelog-generator-action@v2.3 - with: - token: ${{ secrets.ADMIN_TOKEN }} - addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' - issues: false - issuesWoLabels: false - sinceTag: "3.12.0" - maxIssues: 100 - pullRequests: true - prWoLabels: false - author: false - unreleased: true - compareLink: true - stripGeneratorNotice: true - verbose: true - unreleasedLabel: ${{ steps.version.outputs.next_tag }} - excludeTagsRegex: "CI/.+" - releaseBranch: "main" + # - name: "✏️ Generate full changelog" + # if: steps.version_type.outputs.type != 'skip' + # id: generate-full-changelog + # uses: heinrichreimer/github-changelog-generator-action@v2.3 + # with: + # token: ${{ secrets.ADMIN_TOKEN }} + # addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' + # issues: false + # issuesWoLabels: false + # sinceTag: "3.12.0" + # maxIssues: 100 + # pullRequests: true + # prWoLabels: false + # author: false + # unreleased: true + # compareLink: true + # stripGeneratorNotice: true + # verbose: true + # unreleasedLabel: ${{ steps.version.outputs.next_tag }} + # excludeTagsRegex: "CI/.+" + # releaseBranch: "main" - name: "🖨️ Print changelog to console" if: steps.version_type.outputs.type != 'skip' @@ -85,7 +85,7 @@ jobs: tags: true unprotect_reviews: true - - name: 🔨 Merge main back to develop + - name: 🔨 Merge main back to develop uses: everlytic/branch-merge@1.1.0 if: steps.version_type.outputs.type != 'skip' with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cc69e1643a..754f3d32d6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,7 +2,7 @@ name: Stable Release on: release: - types: + types: - prereleased jobs: @@ -13,7 +13,7 @@ jobs: steps: - name: 🚛 Checkout Code uses: actions/checkout@v2 - with: + with: fetch-depth: 0 - name: Set up Python @@ -33,27 +33,27 @@ jobs: echo ::set-output name=last_release::$LASTRELEASE echo ::set-output name=release_tag::$RESULT - - name: "✏️ Generate full changelog" - if: steps.version.outputs.release_tag != 'skip' - id: generate-full-changelog - uses: heinrichreimer/github-changelog-generator-action@v2.3 - with: - token: ${{ secrets.ADMIN_TOKEN }} - addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' - issues: false - issuesWoLabels: false - sinceTag: "3.12.0" - maxIssues: 100 - pullRequests: true - prWoLabels: false - author: false - unreleased: true - compareLink: true - stripGeneratorNotice: true - verbose: true - futureRelease: ${{ steps.version.outputs.release_tag }} - excludeTagsRegex: "CI/.+" - releaseBranch: "main" + # - name: "✏️ Generate full changelog" + # if: steps.version.outputs.release_tag != 'skip' + # id: generate-full-changelog + # uses: heinrichreimer/github-changelog-generator-action@v2.3 + # with: + # token: ${{ secrets.ADMIN_TOKEN }} + # addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}' + # issues: false + # issuesWoLabels: false + # sinceTag: "3.12.0" + # maxIssues: 100 + # pullRequests: true + # prWoLabels: false + # author: false + # unreleased: true + # compareLink: true + # stripGeneratorNotice: true + # verbose: true + # futureRelease: ${{ steps.version.outputs.release_tag }} + # excludeTagsRegex: "CI/.+" + # releaseBranch: "main" - name: 💾 Commit and Tag id: git_commit @@ -73,8 +73,8 @@ jobs: token: ${{ secrets.ADMIN_TOKEN }} branch: main tags: true - unprotect_reviews: true - + unprotect_reviews: true + - name: "✏️ Generate last changelog" if: steps.version.outputs.release_tag != 'skip' id: generate-last-changelog @@ -114,7 +114,7 @@ jobs: with: tag: "${{ steps.version.outputs.current_version }}" - - name: 🔁 Merge main back to develop + - name: 🔁 Merge main back to develop if: steps.version.outputs.release_tag != 'skip' uses: everlytic/branch-merge@1.1.0 with: From 268724b584f04b3861a77c8bb293b357e3127358 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 12:36:32 +0200 Subject: [PATCH 1681/2550] rename release label --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d841eb9747..c4f1dcf314 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [Unreleased](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.4](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...HEAD) From e5dacd20010a75e89007f265d6ed294443a66aef Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 13:30:50 +0200 Subject: [PATCH 1682/2550] OP-4180 - removed unneeded check Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/anatomy.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index fd32a16bb2..908dc2b187 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -333,8 +333,6 @@ class BaseAnatomy(object): Probably should fill missing keys and values. """ - if not project_doc: - return {} output = copy.deepcopy(project_doc["config"]) output["attributes"] = copy.deepcopy(project_doc["data"]) From 92a58ba0e5dc1122eef135862f4f5be6fc586813 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 19 Oct 2022 12:06:11 +0000 Subject: [PATCH 1683/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 3a0c538daf..6769bb8467 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.4-nightly.3" +__version__ = "3.14.4-nightly.4" From b15eb445c6336421bb8b81acda98c9ce8266c40c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 19 Oct 2022 12:09:55 +0000 Subject: [PATCH 1684/2550] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 6769bb8467..fd3606e9f2 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.4-nightly.4" +__version__ = "3.14.4" From 1054ab6010e84d82392941e932d97d979cca6c06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 19 Oct 2022 16:12:33 +0200 Subject: [PATCH 1685/2550] :bug: fix order of includes --- .../UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp index 4a53af26b5..49e805da4d 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeStyle.cpp @@ -1,5 +1,5 @@ -#include "OpenPype.h" #include "OpenPypeStyle.h" +#include "OpenPype.h" #include "Framework/Application/SlateApplication.h" #include "Styling/SlateStyleRegistry.h" #include "Slate/SlateGameResources.h" From bb25d7cfdf7555a1495b6a384ac17101de85527b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 19 Oct 2022 16:34:54 +0200 Subject: [PATCH 1686/2550] :bug: add `uproject` extension to unreal project template --- openpype/settings/defaults/project_anatomy/templates.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index caf399a903..221a87eb99 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -29,7 +29,7 @@ "delivery": {}, "unreal": { "folder": "{root[work]}/{project[name]}/unreal/{task[name]}", - "file": "{project[code]}_{asset}", + "file": "{project[code]}_{asset}.uproject", "path": "{@folder}/{@file}" }, "others": { From 232780f58abb6c90ac80d48366e9ac9a70c6c2c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 19 Oct 2022 16:47:15 +0200 Subject: [PATCH 1687/2550] :recycle: use extension defined by addon --- openpype/settings/defaults/project_anatomy/templates.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index 221a87eb99..9ac0bf2ec5 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -29,7 +29,7 @@ "delivery": {}, "unreal": { "folder": "{root[work]}/{project[name]}/unreal/{task[name]}", - "file": "{project[code]}_{asset}.uproject", + "file": "{project[code]}_{asset}{ext}", "path": "{@folder}/{@file}" }, "others": { From 48382d2bc7ef87141473e0e586f3d7f522057ca2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 19 Oct 2022 16:48:42 +0200 Subject: [PATCH 1688/2550] change import of RepairAction in houdini validator --- .../hosts/houdini/plugins/publish/validate_workfile_paths.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 0bd78ff38a..560b355e21 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- -import openpype.api import pyblish.api import hou +from openpype.pipeline.publish import RepairAction + class ValidateWorkfilePaths(pyblish.api.InstancePlugin): """Validate workfile paths so they are absolute.""" @@ -11,7 +12,7 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): families = ["workfile"] hosts = ["houdini"] label = "Validate Workfile Paths" - actions = [openpype.api.RepairAction] + actions = [RepairAction] optional = True node_types = ["file", "alembic"] From b6c3f86840ed9464f4b9def514e2b4afe670766b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 19 Oct 2022 17:02:43 +0200 Subject: [PATCH 1689/2550] Update openpype/settings/defaults/project_anatomy/templates.json Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/settings/defaults/project_anatomy/templates.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index 9ac0bf2ec5..3415c4451f 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -29,7 +29,7 @@ "delivery": {}, "unreal": { "folder": "{root[work]}/{project[name]}/unreal/{task[name]}", - "file": "{project[code]}_{asset}{ext}", + "file": "{project[code]}_{asset}.{ext}", "path": "{@folder}/{@file}" }, "others": { From b4ab7bb324133ad4cb3daabe620d5408d196c442 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 19 Oct 2022 17:31:13 +0200 Subject: [PATCH 1690/2550] change import of 'get_workfile_template_key' in unreal prelaunch hook --- openpype/hosts/unreal/hooks/pre_workfile_preparation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index 50b34bd573..4ae72593e9 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -8,8 +8,8 @@ from openpype.lib import ( PreLaunchHook, ApplicationLaunchFailed, ApplicationNotFound, - get_workfile_template_key ) +from openpype.pipeline.workfile import get_workfile_template_key import openpype.hosts.unreal.lib as unreal_lib From 84d03eac53ac4287a7e660fbd863f8923bb23b17 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 19 Oct 2022 17:49:37 +0200 Subject: [PATCH 1691/2550] removed unused imports and change imports from openpype.api --- .../hosts/maya/plugins/create/create_render.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 2b2c978d3c..a3e1272652 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -13,22 +13,14 @@ from openpype.settings import ( get_system_settings, get_project_settings, ) +from openpype.lib import requests_get +from openpype.modules import ModulesManager +from openpype.pipeline import legacy_io from openpype.hosts.maya.api import ( lib, lib_rendersettings, plugin ) -from openpype.lib import requests_get -from openpype.api import ( - get_system_settings, - get_project_settings) -from openpype.modules import ModulesManager -from openpype.pipeline import legacy_io -from openpype.pipeline import ( - CreatorError, - legacy_io, -) -from openpype.pipeline.context_tools import get_current_project_asset class CreateRender(plugin.Creator): From 321512bb0115ee38d085da753a2bb6b7e4e2a2ce Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 21:19:31 +0200 Subject: [PATCH 1692/2550] nuke: adding viewer and display exctractor --- openpype/hosts/nuke/api/lib.py | 44 ++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 1aea04d889..2691b7447a 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2930,3 +2930,47 @@ def get_nodes_by_names(names): nuke.toNode(name) for name in names ] + + +def get_viewer_config_from_string(input_string): + """Convert string to display and viewer string + + Args: + input_string (str): string with viewer + + Raises: + IndexError: if more then one slash in input string + IndexError: if missing closing bracket + + Returns: + tuple[str]: display, viewer + """ + display = None + viewer = input_string + # check if () or / or \ in name + if "/" in viewer: + split = viewer.split("/") + + # rise if more then one column + if len(split) > 2: + raise IndexError(( + "Viewer Input string is not correct. " + "more then two `/` slashes! {}" + ).format(input_string)) + + viewer = split[1] + display = split[0] + elif "(" in viewer: + pattern = r"([\w\d\s]+).*[(](.*)[)]" + result = re.findall(pattern, viewer) + try: + result = result.pop() + display = str(result[1]).rstrip() + viewer = str(result[0]).rstrip() + except IndexError: + raise IndexError(( + "Viewer Input string is not correct. " + "Missing bracket! {}" + ).format(input_string)) + + return (display, viewer) From babd9898d2ac5da414d5f758533e4fcd3096024c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 21:21:05 +0200 Subject: [PATCH 1693/2550] nuke: implementing display and viewer assignment --- openpype/hosts/nuke/api/plugin.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 91bb90ff99..9330309f64 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -19,7 +19,8 @@ from .lib import ( add_publish_knob, get_nuke_imageio_settings, set_node_knobs_from_settings, - get_view_process_node + get_view_process_node, + get_viewer_config_from_string ) @@ -312,7 +313,8 @@ class ExporterReviewLut(ExporterReview): dag_node.setInput(0, self.previous_node) self._temp_nodes.append(dag_node) self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + self.log.debug( + "OCIODisplay... `{}`".format(self._temp_nodes)) # GenerateLUT gen_lut_node = nuke.createNode("GenerateLUT") @@ -491,7 +493,15 @@ class ExporterReviewMov(ExporterReview): if not self.viewer_lut_raw: # OCIODisplay dag_node = nuke.createNode("OCIODisplay") - dag_node["view"].setValue(str(baking_view_profile)) + + display, viewer = get_viewer_config_from_string( + str(baking_view_profile) + ) + if display: + dag_node["display"].setValue(display) + + # assign viewer + dag_node["view"].setValue(viewer) # connect dag_node.setInput(0, self.previous_node) From 3cdad1e9677c5320951f090c9b7674863c11ee4c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 21:37:13 +0200 Subject: [PATCH 1694/2550] Nuke: add custom tags inputs to settings also implement custom tags to exctractor --- openpype/hosts/nuke/api/plugin.py | 24 ++++++++++++++++--- .../defaults/project_settings/nuke.json | 2 +- .../schemas/schema_nuke_publish.json | 4 ++-- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 9330309f64..5981a8b386 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -191,7 +191,20 @@ class ExporterReview(object): if "#" in self.fhead: self.fhead = self.fhead.replace("#", "")[:-1] - def get_representation_data(self, tags=None, range=False): + def get_representation_data( + self, tags=None, range=False, + custom_tags=None + ): + """ Add representation data to self.data + + Args: + tags (list[str], optional): list of defined tags. + Defaults to None. + range (bool, optional): flag for adding ranges. + Defaults to False. + custom_tags (list[str], optional): user inputed custom tags. + Defaults to None. + """ add_tags = tags or [] repre = { "name": self.name, @@ -201,6 +214,9 @@ class ExporterReview(object): "tags": [self.name.replace("_", "-")] + add_tags } + if custom_tags: + repre["custom_tags"] = custom_tags + if range: repre.update({ "frameStart": self.first_frame, @@ -417,6 +433,7 @@ class ExporterReviewMov(ExporterReview): return path def generate_mov(self, farm=False, **kwargs): + add_tags = [] self.publish_on_farm = farm read_raw = kwargs["read_raw"] reformat_node_add = kwargs["reformat_node_add"] @@ -435,10 +452,10 @@ class ExporterReviewMov(ExporterReview): self.log.debug(">> baking_view_profile `{}`".format( baking_view_profile)) - add_tags = kwargs.get("add_tags", []) + add_custom_tags = kwargs.get("add_custom_tags", []) self.log.info( - "__ add_tags: `{0}`".format(add_tags)) + "__ add_custom_tags: `{0}`".format(add_custom_tags)) subset = self.instance.data["subset"] self._temp_nodes[subset] = [] @@ -552,6 +569,7 @@ class ExporterReviewMov(ExporterReview): # ---------- generate representation data self.get_representation_data( tags=["review", "delete"] + add_tags, + custom_tags=add_custom_tags, range=True ) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index e5cbacbda7..57a09086ca 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -434,7 +434,7 @@ } ], "extension": "mov", - "add_tags": [] + "add_custom_tags": [] } } }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index e5827a92c4..c91d3c0e3d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -296,8 +296,8 @@ "label": "Write node file type" }, { - "key": "add_tags", - "label": "Add additional tags to representations", + "key": "add_custom_tags", + "label": "Add custom tags", "type": "list", "object_type": "text" } From 463f83a201519592f20fa54a45a329bdcd58b146 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 22:17:40 +0200 Subject: [PATCH 1695/2550] global: adding filtering custom tags to settings --- openpype/settings/defaults/project_settings/global.json | 3 ++- .../projects_schema/schemas/schema_global_publish.json | 9 +++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 1b7dc7a41a..b128564bc2 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -78,7 +78,8 @@ "review", "ftrack" ], - "subsets": [] + "subsets": [], + "custom_tags": [] }, "overscan_crop": "", "overscan_color": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 773dea1229..51fc8dedf3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -295,6 +295,15 @@ "label": "Subsets", "type": "list", "object_type": "text" + }, + { + "type": "separator" + }, + { + "key": "custom_tags", + "label": "Custom Tags", + "type": "list", + "object_type": "text" } ] }, From 9f05131c17849e076b41e96cd0e0ccba1abfa8f0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Oct 2022 22:19:12 +0200 Subject: [PATCH 1696/2550] global: implementing filtering by custom tags --- openpype/plugins/publish/extract_review.py | 28 +++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 27117510b2..cf8d6429fa 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1619,6 +1619,24 @@ class ExtractReview(pyblish.api.InstancePlugin): return self.profile_exclusion(matching_profiles) + def custom_tags_filter_validation( + self, repr_custom_tags, output_custom_tags_filter + ): + """Determines if entered custom tags intersect with custom tags filters. + + All cutsom tags values are lowered to avoid unexpected results. + """ + repr_custom_tags = repr_custom_tags or [] + valid = False + for tag in output_custom_tags_filter: + if tag in repr_custom_tags: + valid = True + break + + if valid: + return True + return False + def families_filter_validation(self, families, output_families_filter): """Determines if entered families intersect with families filters. @@ -1656,7 +1674,9 @@ class ExtractReview(pyblish.api.InstancePlugin): return True return False - def filter_output_defs(self, profile, subset_name, families): + def filter_output_defs( + self, profile, subset_name, families, custom_tags=None + ): """Return outputs matching input instance families. Output definitions without families filter are marked as valid. @@ -1689,6 +1709,12 @@ class ExtractReview(pyblish.api.InstancePlugin): if not self.families_filter_validation(families, families_filters): continue + custom_tags_filters = output_filters.get("custom_tags") + if custom_tags and not self.custom_tags_filter_validation( + custom_tags, custom_tags_filters + ): + continue + # Subsets name filters subset_filters = [ subset_filter From a23dfcea7909331b6db1d7b6301a7658aface47d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Oct 2022 23:19:51 +0200 Subject: [PATCH 1697/2550] implemented main function for publish report viewer --- .../tools/publisher/publish_report_viewer/__init__.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/openpype/tools/publisher/publish_report_viewer/__init__.py b/openpype/tools/publisher/publish_report_viewer/__init__.py index ce1cc3729c..bf77a6d30b 100644 --- a/openpype/tools/publisher/publish_report_viewer/__init__.py +++ b/openpype/tools/publisher/publish_report_viewer/__init__.py @@ -1,3 +1,5 @@ +from Qt import QtWidgets + from .report_items import ( PublishReport ) @@ -16,4 +18,13 @@ __all__ = ( "PublishReportViewerWidget", "PublishReportViewerWindow", + + "main", ) + + +def main(): + app = QtWidgets.QApplication([]) + window = PublishReportViewerWindow() + window.show() + return app.exec_() From a7747eb1acf121c692d17fa2ae86010e1ea4ba47 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Oct 2022 23:20:04 +0200 Subject: [PATCH 1698/2550] added launch function for publish report viewer --- openpype/cli.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/cli.py b/openpype/cli.py index 398d1a94c0..d24cd4a872 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -277,6 +277,13 @@ def projectmanager(): PypeCommands().launch_project_manager() +@main.command(context_settings={"ignore_unknown_options": True}) +def publish_report_viewer(): + from openpype.tools.publisher.publish_report_viewer import main + + sys.exit(main()) + + @main.command() @click.argument("output_path") @click.option("--project", help="Define project context") From 8fb9dd41d1dfd353ac906f6febcb2922202ed6c5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Oct 2022 23:20:27 +0200 Subject: [PATCH 1699/2550] added ps1 script to launch publish report viewer --- tools/run_publish_report_viewer.ps1 | 40 +++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 tools/run_publish_report_viewer.ps1 diff --git a/tools/run_publish_report_viewer.ps1 b/tools/run_publish_report_viewer.ps1 new file mode 100644 index 0000000000..3ff40e64e3 --- /dev/null +++ b/tools/run_publish_report_viewer.ps1 @@ -0,0 +1,40 @@ +<# +.SYNOPSIS + Helper script OpenPype Tray. + +.DESCRIPTION + + +.EXAMPLE + +PS> .\run_tray.ps1 + +#> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +# Install PSWriteColor to support colorized output to terminal +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" + +$env:_INSIDE_OPENPYPE_TOOL = "1" + +# make sure Poetry is in PATH +if (-not (Test-Path 'env:POETRY_HOME')) { + $env:POETRY_HOME = "$openpype_root\.poetry" +} +$env:PATH = "$($env:PATH);$($env:POETRY_HOME)\bin" + +Set-Location -Path $openpype_root + +Write-Color -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline +if (-not (Test-Path -PathType Container -Path "$($env:POETRY_HOME)\bin")) { + Write-Color -Text "NOT FOUND" -Color Yellow + Write-Color -Text "*** ", "We need to install Poetry create virtual env first ..." -Color Yellow, Gray + & "$openpype_root\tools\create_env.ps1" +} else { + Write-Color -Text "OK" -Color Green +} + +& "$($env:POETRY_HOME)\bin\poetry" run python "$($openpype_root)\start.py" publish-report-viewer --debug +Set-Location -Path $current_dir From e35e3e0299622bd600d59574857f27ad700890dd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 12:49:39 +0200 Subject: [PATCH 1700/2550] fix python 2 compatibility of ffmpeg and oiio tools discovery --- openpype/lib/vendor_bin_utils.py | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index eb7987c8a1..099f9a34ba 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -195,6 +195,28 @@ def find_tool_in_custom_paths(paths, tool, validation_func=None): return None +def _check_args_returncode(args): + try: + # Python 2 compatibility where DEVNULL is not available + if hasattr(subprocess, "DEVNULL"): + proc = subprocess.Popen( + args, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + proc.wait() + else: + with open(os.devnull, "w") as devnull: + proc = subprocess.Popen( + args, stdout=devnull, stderr=devnull, + ) + proc.wait() + + except Exception: + return False + return proc.returncode == 0 + + def _oiio_executable_validation(filepath): """Validate oiio tool executable if can be executed. @@ -223,18 +245,7 @@ def _oiio_executable_validation(filepath): if not filepath: return False - try: - proc = subprocess.Popen( - [filepath, "--help"], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - proc.wait() - return proc.returncode == 0 - - except Exception: - pass - return False + return _check_args_returncode([filepath, "--help"]) def get_oiio_tools_path(tool="oiiotool"): @@ -302,18 +313,7 @@ def _ffmpeg_executable_validation(filepath): if not filepath: return False - try: - proc = subprocess.Popen( - [filepath, "-version"], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - proc.wait() - return proc.returncode == 0 - - except Exception: - pass - return False + return _check_args_returncode([filepath, "-version"]) def get_ffmpeg_tool_path(tool="ffmpeg"): From 5d476f8f8b8136e20c88a6b5229c74fec2f2b1d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 20 Oct 2022 12:53:25 +0200 Subject: [PATCH 1701/2550] :recycle: remove redundant Creator stub this caused empty item in creator dialog --- openpype/hosts/unreal/api/__init__.py | 7 ++----- openpype/hosts/unreal/api/plugin.py | 11 +---------- openpype/hosts/unreal/plugins/create/create_camera.py | 4 ++-- openpype/hosts/unreal/plugins/create/create_layout.py | 4 ++-- openpype/hosts/unreal/plugins/create/create_look.py | 3 ++- openpype/hosts/unreal/plugins/create/create_render.py | 4 ++-- .../unreal/plugins/create/create_staticmeshfbx.py | 4 ++-- 7 files changed, 13 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/unreal/api/__init__.py b/openpype/hosts/unreal/api/__init__.py index 870982f5f9..3f96d8ac6f 100644 --- a/openpype/hosts/unreal/api/__init__.py +++ b/openpype/hosts/unreal/api/__init__.py @@ -1,10 +1,8 @@ # -*- coding: utf-8 -*- """Unreal Editor OpenPype host API.""" -from .plugin import ( - Loader, - Creator -) +from .plugin import Loader + from .pipeline import ( install, uninstall, @@ -25,7 +23,6 @@ from .pipeline import ( __all__ = [ "install", "uninstall", - "Creator", "Loader", "ls", "publish", diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index d8d2f2420d..6fc00cb71c 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -1,16 +1,7 @@ # -*- coding: utf-8 -*- from abc import ABC -from openpype.pipeline import ( - LegacyCreator, - LoaderPlugin, -) - - -class Creator(LegacyCreator): - """This serves as skeleton for future OpenPype specific functionality""" - defaults = ['Main'] - maintain_selection = False +from openpype.pipeline import LoaderPlugin class Loader(LoaderPlugin, ABC): diff --git a/openpype/hosts/unreal/plugins/create/create_camera.py b/openpype/hosts/unreal/plugins/create/create_camera.py index 2842900834..bf1489d688 100644 --- a/openpype/hosts/unreal/plugins/create/create_camera.py +++ b/openpype/hosts/unreal/plugins/create/create_camera.py @@ -2,11 +2,11 @@ import unreal from unreal import EditorAssetLibrary as eal from unreal import EditorLevelLibrary as ell -from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api.pipeline import instantiate +from openpype.pipeline import LegacyCreator -class CreateCamera(plugin.Creator): +class CreateCamera(LegacyCreator): """Layout output for character rigs""" name = "layoutMain" diff --git a/openpype/hosts/unreal/plugins/create/create_layout.py b/openpype/hosts/unreal/plugins/create/create_layout.py index 5fef08ce2a..c1067b00d9 100644 --- a/openpype/hosts/unreal/plugins/create/create_layout.py +++ b/openpype/hosts/unreal/plugins/create/create_layout.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- from unreal import EditorLevelLibrary -from openpype.hosts.unreal.api import plugin +from openpype.pipeline import LegacyCreator from openpype.hosts.unreal.api.pipeline import instantiate -class CreateLayout(plugin.Creator): +class CreateLayout(LegacyCreator): """Layout output for character rigs.""" name = "layoutMain" diff --git a/openpype/hosts/unreal/plugins/create/create_look.py b/openpype/hosts/unreal/plugins/create/create_look.py index 12f6b70ae6..4abf3f6095 100644 --- a/openpype/hosts/unreal/plugins/create/create_look.py +++ b/openpype/hosts/unreal/plugins/create/create_look.py @@ -2,9 +2,10 @@ """Create look in Unreal.""" import unreal # noqa from openpype.hosts.unreal.api import pipeline, plugin +from openpype.pipeline import LegacyCreator -class CreateLook(plugin.Creator): +class CreateLook(LegacyCreator): """Shader connections defining shape look.""" name = "unrealLook" diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index 950799cc10..a85d17421b 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -1,10 +1,10 @@ import unreal from openpype.hosts.unreal.api import pipeline -from openpype.hosts.unreal.api.plugin import Creator +from openpype.pipeline import LegacyCreator -class CreateRender(Creator): +class CreateRender(LegacyCreator): """Create instance for sequence for rendering""" name = "unrealRender" diff --git a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py index 601c2fae06..45d517d27d 100644 --- a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- """Create Static Meshes as FBX geometry.""" import unreal # noqa -from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api.pipeline import ( instantiate, ) +from openpype.pipeline import LegacyCreator -class CreateStaticMeshFBX(plugin.Creator): +class CreateStaticMeshFBX(LegacyCreator): """Static FBX geometry.""" name = "unrealStaticMeshMain" From 05c87821941bf2dd51f50453fb5d2864a7419092 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 14:32:43 +0200 Subject: [PATCH 1702/2550] raise exception on collect/save/remove operations of creator --- openpype/pipeline/create/context.py | 85 +++++++++++++++++++++++++++-- 1 file changed, 80 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 4ec6d7bdad..6bc70f33ea 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -62,6 +62,22 @@ class HostMissRequiredMethod(Exception): super(HostMissRequiredMethod, self).__init__(msg) +class CreatorOperationFailed(Exception): + pass + + +class CreatorsCollectionFailed(CreatorOperationFailed): + pass + + +class CreatorsSaveFailed(CreatorOperationFailed): + pass + + +class CreatorsRemoveFailed(CreatorOperationFailed): + pass + + class InstanceMember: """Representation of instance member. @@ -1221,8 +1237,26 @@ class CreateContext: self._instances_by_id = {} # Collect instances + failed_creators = [] for creator in self.creators.values(): - creator.collect_instances() + try: + creator.collect_instances() + except: + failed_creators.append(creator) + self.log.warning( + "Collection of instances for creator {} ({}) failed".format( + creator.label, creator.identifier), + exc_info=True + ) + + if failed_creators: + joined_creators = ", ".join( + [creator.label for creator in failed_creators] + ) + + raise CreatorsCollectionFailed( + "Failed to collect instances of creators {}".format(joined_creators) + ) def execute_autocreators(self): """Execute discovered AutoCreator plugins. @@ -1315,16 +1349,35 @@ class CreateContext: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) - for identifier, cretor_instances in instances_by_identifier.items(): + failed_creators = [] + for identifier, creator_instances in instances_by_identifier.items(): update_list = [] - for instance in cretor_instances: + for instance in creator_instances: instance_changes = instance.changes() if instance_changes: update_list.append(UpdateData(instance, instance_changes)) creator = self.creators[identifier] if update_list: - creator.update_instances(update_list) + try: + creator.update_instances(update_list) + + except: + failed_creators.append(creator) + self.log.warning( + "Instances update of creator {} ({}) failed".format( + creator.label, creator.identifier), + exc_info=True + ) + + if failed_creators: + joined_creators = ", ".join( + [creator.label for creator in failed_creators] + ) + + raise CreatorsSaveFailed( + "Failed save changes of creators {}".format(joined_creators) + ) def remove_instances(self, instances): """Remove instances from context. @@ -1333,14 +1386,36 @@ class CreateContext: instances(list): Instances that should be removed from context. """ + instances_by_identifier = collections.defaultdict(list) for instance in instances: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) + failed_creators = [] for identifier, creator_instances in instances_by_identifier.items(): creator = self.creators.get(identifier) - creator.remove_instances(creator_instances) + try: + creator.remove_instances(creator_instances) + except: + failed_creators.append(creator) + self.log.warning( + "Instances removement of creator {} ({}) failed".format( + creator.label, creator.identifier), + exc_info=True + ) + + if failed_creators: + joined_creators = ", ".join( + [creator.label for creator in failed_creators] + ) + + raise CreatorsRemoveFailed( + "Failed to remove instances of creators {}".format( + joined_creators + ) + ) + def _get_publish_plugins_with_attr_for_family(self, family): """Publish plugin attributes for passed family. From 3f54214a1113bac1ab5b53e8a0f17078eba1ce6a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 14:43:32 +0200 Subject: [PATCH 1703/2550] don't autotrigger save on controller reset --- openpype/tools/publisher/control.py | 3 --- openpype/tools/publisher/window.py | 41 ++++++++++++++++++----------- 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index a340f8c1d2..1a15a71040 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1662,11 +1662,8 @@ class PublisherController(BasePublisherController): def reset(self): """Reset everything related to creation and publishing.""" - # Stop publishing self.stop_publish() - self.save_changes() - self.host_is_valid = self._create_context.host_is_valid # Reset avalon context diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 39075d2489..7559b4a641 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -36,7 +36,7 @@ class PublisherWindow(QtWidgets.QDialog): footer_border = 8 publish_footer_spacer = 2 - def __init__(self, parent=None, controller=None, reset_on_show=None): + def __init__(self, parent=None, controller=None, reset_on_first_show=None): super(PublisherWindow, self).__init__(parent) self.setWindowTitle("OpenPype publisher") @@ -44,8 +44,8 @@ class PublisherWindow(QtWidgets.QDialog): icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) self.setWindowIcon(icon) - if reset_on_show is None: - reset_on_show = True + if reset_on_first_show is None: + reset_on_first_show = True if parent is None: on_top_flag = QtCore.Qt.WindowStaysOnTopHint @@ -298,7 +298,8 @@ class PublisherWindow(QtWidgets.QDialog): self._controller = controller self._first_show = True - self._reset_on_show = reset_on_show + self._reset_on_first_show = reset_on_first_show + self._reset_on_show = True self._restart_timer = None self._publish_frame_visible = None @@ -314,6 +315,18 @@ class PublisherWindow(QtWidgets.QDialog): self._first_show = False self._on_first_show() + if not self._reset_on_show: + return + + self._reset_on_show = False + # Detach showing - give OS chance to draw the window + timer = QtCore.QTimer() + timer.setSingleShot(True) + timer.setInterval(1) + timer.timeout.connect(self._on_show_restart_timer) + self._restart_timer = timer + timer.start() + def resizeEvent(self, event): super(PublisherWindow, self).resizeEvent(event) self._update_publish_frame_rect() @@ -324,16 +337,7 @@ class PublisherWindow(QtWidgets.QDialog): def _on_first_show(self): self.resize(self.default_width, self.default_height) self.setStyleSheet(style.load_stylesheet()) - if not self._reset_on_show: - return - - # Detach showing - give OS chance to draw the window - timer = QtCore.QTimer() - timer.setSingleShot(True) - timer.setInterval(1) - timer.timeout.connect(self._on_show_restart_timer) - self._restart_timer = timer - timer.start() + self._reset_on_show = self._reset_on_first_show def _on_show_restart_timer(self): """Callback for '_restart_timer' timer.""" @@ -342,9 +346,13 @@ class PublisherWindow(QtWidgets.QDialog): self.reset() def closeEvent(self, event): - self._controller.save_changes() + self.save_changes() + self._reset_on_show = True super(PublisherWindow, self).closeEvent(event) + def save_changes(self): + self._controller.save_changes() + def reset(self): self._controller.reset() @@ -436,7 +444,8 @@ class PublisherWindow(QtWidgets.QDialog): self._update_publish_frame_rect() def _on_reset_clicked(self): - self._controller.reset() + self.save_changes() + self.reset() def _on_stop_clicked(self): self._controller.stop_publish() From 2d92aed06e530e581229dc96c73ce562ba15ab70 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 15:08:36 +0200 Subject: [PATCH 1704/2550] handle failed collect,update and remove instances --- openpype/tools/publisher/control.py | 41 ++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 1a15a71040..2346825734 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -31,6 +31,9 @@ from openpype.pipeline.create import ( HiddenCreator, Creator, ) +from openpype.pipeline.create.context import ( + CreatorsOperationFailed, +) # Define constant for plugin orders offset PLUGIN_ORDER_OFFSET = 0.5 @@ -1708,8 +1711,18 @@ class PublisherController(BasePublisherController): self._create_context.reset_context_data() with self._create_context.bulk_instances_collection(): - self._create_context.reset_instances() - self._create_context.execute_autocreators() + try: + self._create_context.reset_instances() + self._create_context.execute_autocreators() + + except CreatorsOperationFailed as exc: + self._emit_event( + "instances.collection.failed", + { + "title": "Instance collection failed", + "message": str(exc) + } + ) self._resetting_instances = False @@ -1845,8 +1858,19 @@ class PublisherController(BasePublisherController): def save_changes(self): """Save changes happened during creation.""" - if self._create_context.host_is_valid: + if not self._create_context.host_is_valid: + return + + try: self._create_context.save_changes() + except CreatorsOperationFailed as exc: + self._emit_event( + "instances.save.failed", + { + "title": "Save failed", + "message": str(exc) + } + ) def remove_instances(self, instance_ids): """Remove instances based on instance ids. @@ -1869,7 +1893,16 @@ class PublisherController(BasePublisherController): instances_by_id[instance_id] for instance_id in instance_ids ] - self._create_context.remove_instances(instances) + try: + self._create_context.remove_instances(instances) + except CreatorsOperationFailed as exc: + self._emit_event( + "instances.remove.failed", + { + "title": "Remove failed", + "message": str(exc) + } + ) def _on_create_instance_change(self): self._emit_event("instances.refresh.finished") From f22e3c3a99697f312d35b769a25126effab5c65d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 15:08:44 +0200 Subject: [PATCH 1705/2550] show dialogs when creator fails --- openpype/tools/publisher/window.py | 65 ++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 7559b4a641..a7f2ec2ce6 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -1,3 +1,4 @@ +import collections from Qt import QtWidgets, QtCore, QtGui from openpype import ( @@ -222,6 +223,10 @@ class PublisherWindow(QtWidgets.QDialog): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) + dialog_message_timer = QtCore.QTimer() + dialog_message_timer.setInterval(100) + dialog_message_timer.timeout.connect(self._on_dialog_message_timeout) + help_btn.clicked.connect(self._on_help_click) tabs_widget.tab_changed.connect(self._on_tab_change) overview_widget.active_changed.connect( @@ -259,6 +264,15 @@ class PublisherWindow(QtWidgets.QDialog): controller.event_system.add_callback( "show.card.message", self._on_overlay_message ) + controller.event_system.add_callback( + "instances.collection.failed", self._instance_collection_failed + ) + controller.event_system.add_callback( + "instances.save.failed", self._instance_save_failed + ) + controller.event_system.add_callback( + "instances.remove.failed", self._instance_remove_failed + ) # Store extra header widget for TrayPublisher # - can be used to add additional widgets to header between context @@ -303,6 +317,9 @@ class PublisherWindow(QtWidgets.QDialog): self._restart_timer = None self._publish_frame_visible = None + self._dialog_messages_to_show = collections.deque() + self._dialog_message_timer = dialog_message_timer + self._set_publish_visibility(False) @property @@ -578,3 +595,51 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_frame.move( 0, window_size.height() - height ) + + def add_message_dialog(self, message, title): + self._dialog_messages_to_show.append((message, title)) + self._dialog_message_timer.start() + + def _on_dialog_message_timeout(self): + if not self._dialog_messages_to_show: + self._dialog_message_timer.stop() + return + + item = self._dialog_messages_to_show.popleft() + message, title = item + dialog = MessageDialog(message, title) + dialog.exec_() + + def _instance_collection_failed(self, event): + self.add_message_dialog(event["message"], event["title"]) + + def _instance_save_failed(self, event): + self.add_message_dialog(event["message"], event["title"]) + + def _instance_remove_failed(self, event): + self.add_message_dialog(event["message"], event["title"]) + + +class MessageDialog(QtWidgets.QDialog): + def __init__(self, message, title, parent=None): + super(MessageDialog, self).__init__(parent) + + self.setWindowTitle(title or "Something happend") + + message_widget = QtWidgets.QLabel(message, self) + + btns_widget = QtWidgets.QWidget(self) + submit_btn = QtWidgets.QPushButton("OK", btns_widget) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.addStretch(1) + btns_layout.addWidget(submit_btn) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(message_widget, 1) + layout.addWidget(btns_widget, 0) + + def showEvent(self, event): + super(MessageDialog, self).showEvent(event) + self.resize(400, 300) From 34dcbcbf1ec987e77e7390071207b36c36aab284 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 15:17:05 +0200 Subject: [PATCH 1706/2550] renamed 'CreatorOperationFailed' to 'CreatorsOperationFailed' --- openpype/pipeline/create/context.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 6bc70f33ea..fb53b95a92 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -62,19 +62,19 @@ class HostMissRequiredMethod(Exception): super(HostMissRequiredMethod, self).__init__(msg) -class CreatorOperationFailed(Exception): +class CreatorsOperationFailed(Exception): pass -class CreatorsCollectionFailed(CreatorOperationFailed): +class CreatorsCollectionFailed(CreatorsOperationFailed): pass -class CreatorsSaveFailed(CreatorOperationFailed): +class CreatorsSaveFailed(CreatorsOperationFailed): pass -class CreatorsRemoveFailed(CreatorOperationFailed): +class CreatorsRemoveFailed(CreatorsOperationFailed): pass From 65ec73a53a8589e4d3ddff7df2d579e4236cf823 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 15:57:21 +0200 Subject: [PATCH 1707/2550] fix page change --- openpype/tools/publisher/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index a7f2ec2ce6..2199981519 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -498,7 +498,7 @@ class PublisherWindow(QtWidgets.QDialog): self._update_publish_details_widget() if ( not self._tabs_widget.is_current_tab("create") - or not self._tabs_widget.is_current_tab("publish") + and not self._tabs_widget.is_current_tab("publish") ): self._tabs_widget.set_current_tab("publish") From d04231cc6b112457b5e440b3ef6bd7e5f0bd475d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 15:57:30 +0200 Subject: [PATCH 1708/2550] fix context label before publishing start --- openpype/tools/publisher/control.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 2346825734..d402ab2434 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -302,8 +302,11 @@ class PublishReport: } def _extract_context_data(self, context): + context_label = "Context" + if context is not None: + context_label = context.data.get("label") return { - "label": context.data.get("label") + "label": context_label } def _extract_instance_data(self, instance, exists): From ab40ab6201a32142b702f7ba1ad1545631c34171 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 16:19:02 +0200 Subject: [PATCH 1709/2550] change init arg back --- openpype/tools/publisher/window.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 2199981519..0e514bd2f2 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -37,7 +37,7 @@ class PublisherWindow(QtWidgets.QDialog): footer_border = 8 publish_footer_spacer = 2 - def __init__(self, parent=None, controller=None, reset_on_first_show=None): + def __init__(self, parent=None, controller=None, reset_on_show=None): super(PublisherWindow, self).__init__(parent) self.setWindowTitle("OpenPype publisher") @@ -45,8 +45,8 @@ class PublisherWindow(QtWidgets.QDialog): icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) self.setWindowIcon(icon) - if reset_on_first_show is None: - reset_on_first_show = True + if reset_on_show is None: + reset_on_show = True if parent is None: on_top_flag = QtCore.Qt.WindowStaysOnTopHint @@ -312,7 +312,9 @@ class PublisherWindow(QtWidgets.QDialog): self._controller = controller self._first_show = True - self._reset_on_first_show = reset_on_first_show + # This is a little bit confusing but 'reset_on_first_show' is too long + # forin init + self._reset_on_first_show = reset_on_show self._reset_on_show = True self._restart_timer = None self._publish_frame_visible = None From 0c899e601b22c6c5c9a8f084d4a7a9ad71b8104c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 17:10:09 +0200 Subject: [PATCH 1710/2550] fix attributes --- openpype/tools/publisher/control.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index bf1564597f..d2d01e7921 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1669,7 +1669,7 @@ class PublisherController(BasePublisherController): self.host_is_valid = self._create_context.host_is_valid - self.create_context.reset_preparation() + self._create_context.reset_preparation() # Reset avalon context self._create_context.reset_avalon_context() @@ -1681,7 +1681,7 @@ class PublisherController(BasePublisherController): self._reset_publish() self._reset_instances() - self.create_context.reset_finalization() + self._create_context.reset_finalization() self._emit_event("controller.reset.finished") From 2e9572aaebfac811a69a2a13b2eb2af8e094c097 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 17:21:43 +0200 Subject: [PATCH 1711/2550] use float numbers for animation --- openpype/tools/publisher/widgets/overview_widget.py | 7 ++++--- openpype/tools/publisher/widgets/publish_frame.py | 6 +++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 5bd3017c2a..4cf8ae0eed 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -93,8 +93,8 @@ class OverviewWidget(QtWidgets.QFrame): main_layout.addWidget(subset_content_widget, 1) change_anim = QtCore.QVariantAnimation() - change_anim.setStartValue(0) - change_anim.setEndValue(self.anim_end_value) + change_anim.setStartValue(float(0)) + change_anim.setEndValue(float(self.anim_end_value)) change_anim.setDuration(self.anim_duration) change_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) @@ -264,9 +264,10 @@ class OverviewWidget(QtWidgets.QFrame): + (self._subset_content_layout.spacing() * 2) ) ) - subset_attrs_width = int(float(width) / self.anim_end_value) * value + subset_attrs_width = int((float(width) / self.anim_end_value) * value) if subset_attrs_width > width: subset_attrs_width = width + create_width = width - subset_attrs_width self._create_widget.setMinimumWidth(create_width) diff --git a/openpype/tools/publisher/widgets/publish_frame.py b/openpype/tools/publisher/widgets/publish_frame.py index e6333a104f..00597451a9 100644 --- a/openpype/tools/publisher/widgets/publish_frame.py +++ b/openpype/tools/publisher/widgets/publish_frame.py @@ -248,13 +248,13 @@ class PublishFrame(QtWidgets.QWidget): hint = self._top_content_widget.minimumSizeHint() end = hint.height() - self._shrunk_anim.setStartValue(start) - self._shrunk_anim.setEndValue(end) + self._shrunk_anim.setStartValue(float(start)) + self._shrunk_anim.setEndValue(float(end)) if not anim_is_running: self._shrunk_anim.start() def _on_shrunk_anim(self, value): - diff = self._top_content_widget.height() - value + diff = self._top_content_widget.height() - int(value) if not self._top_content_widget.isVisible(): diff -= self._content_layout.spacing() From 8049bda095ca1290443e1ebcabb1d9a9a54caeca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 20 Oct 2022 18:38:00 +0200 Subject: [PATCH 1712/2550] fix message box --- openpype/tools/publisher/window.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 0e514bd2f2..4f0b81fa85 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -609,8 +609,9 @@ class PublisherWindow(QtWidgets.QDialog): item = self._dialog_messages_to_show.popleft() message, title = item - dialog = MessageDialog(message, title) + dialog = MessageDialog(message, title, self) dialog.exec_() + dialog.deleteLater() def _instance_collection_failed(self, event): self.add_message_dialog(event["message"], event["title"]) @@ -629,6 +630,7 @@ class MessageDialog(QtWidgets.QDialog): self.setWindowTitle(title or "Something happend") message_widget = QtWidgets.QLabel(message, self) + message_widget.setWordWrap(True) btns_widget = QtWidgets.QWidget(self) submit_btn = QtWidgets.QPushButton("OK", btns_widget) @@ -639,9 +641,15 @@ class MessageDialog(QtWidgets.QDialog): btns_layout.addWidget(submit_btn) layout = QtWidgets.QVBoxLayout(self) - layout.addWidget(message_widget, 1) + layout.addWidget(message_widget, 0) + layout.addStretch(1) layout.addWidget(btns_widget, 0) + submit_btn.clicked.connect(self._on_submit_click) + + def _on_submit_click(self): + self.close() + def showEvent(self, event): super(MessageDialog, self).showEvent(event) - self.resize(400, 300) + self.resize(400, 200) From 29a50cc280c13a3be9d6c9971d91e494ac8711d7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Oct 2022 21:23:46 +0200 Subject: [PATCH 1713/2550] global: exctract review custom tag filtering fix --- openpype/plugins/publish/extract_review.py | 95 +++++++++++----------- 1 file changed, 46 insertions(+), 49 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index cf8d6429fa..431ddcc3b4 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -128,6 +128,7 @@ class ExtractReview(pyblish.api.InstancePlugin): for repre in instance.data["representations"]: repre_name = str(repre.get("name")) tags = repre.get("tags") or [] + custom_tags = repre.get("custom_tags") if "review" not in tags: self.log.debug(( "Repre: {} - Didn't found \"review\" in tags. Skipping" @@ -158,15 +159,18 @@ class ExtractReview(pyblish.api.InstancePlugin): ) continue - # Filter output definition by representation tags (optional) - outputs = self.filter_outputs_by_tags(profile_outputs, tags) + # Filter output definition by representation's + # custom tags (optional) + outputs = self.filter_outputs_by_custom_tags( + profile_outputs, custom_tags) if not outputs: self.log.info(( "Skipped representation. All output definitions from" " selected profile does not match to representation's" - " tags. \"{}\"" + " custom tags. \"{}\"" ).format(str(tags))) continue + outputs_per_representations.append((repre, outputs)) return outputs_per_representations @@ -1619,24 +1623,6 @@ class ExtractReview(pyblish.api.InstancePlugin): return self.profile_exclusion(matching_profiles) - def custom_tags_filter_validation( - self, repr_custom_tags, output_custom_tags_filter - ): - """Determines if entered custom tags intersect with custom tags filters. - - All cutsom tags values are lowered to avoid unexpected results. - """ - repr_custom_tags = repr_custom_tags or [] - valid = False - for tag in output_custom_tags_filter: - if tag in repr_custom_tags: - valid = True - break - - if valid: - return True - return False - def families_filter_validation(self, families, output_families_filter): """Determines if entered families intersect with families filters. @@ -1675,7 +1661,7 @@ class ExtractReview(pyblish.api.InstancePlugin): return False def filter_output_defs( - self, profile, subset_name, families, custom_tags=None + self, profile, subset_name, families ): """Return outputs matching input instance families. @@ -1684,6 +1670,7 @@ class ExtractReview(pyblish.api.InstancePlugin): Args: profile (dict): Profile from presets matching current context. families (list): All families of current instance. + subset_name (str): name of subset Returns: list: Containg all output definitions matching entered families. @@ -1709,12 +1696,6 @@ class ExtractReview(pyblish.api.InstancePlugin): if not self.families_filter_validation(families, families_filters): continue - custom_tags_filters = output_filters.get("custom_tags") - if custom_tags and not self.custom_tags_filter_validation( - custom_tags, custom_tags_filters - ): - continue - # Subsets name filters subset_filters = [ subset_filter @@ -1737,39 +1718,55 @@ class ExtractReview(pyblish.api.InstancePlugin): return filtered_outputs - def filter_outputs_by_tags(self, outputs, tags): - """Filter output definitions by entered representation tags. + def filter_outputs_by_custom_tags(self, outputs, custom_tags): + """Filter output definitions by entered representation custom_tags. - Output definitions without tags filter are marked as valid. + Output definitions without custom_tags filter are marked as invalid, + only in case representation is having any custom_tags defined. Args: outputs (list): Contain list of output definitions from presets. - tags (list): Tags of processed representation. + custom_tags (list): Custom Tags of processed representation. Returns: list: Containg all output definitions matching entered tags. """ filtered_outputs = [] - repre_tags_low = [tag.lower() for tag in tags] + repre_c_tags_low = [tag.lower() for tag in (custom_tags or [])] for output_def in outputs: - valid = True - output_filters = output_def.get("filter") - if output_filters: - # Check tag filters - tag_filters = output_filters.get("tags") - if tag_filters: - tag_filters_low = [tag.lower() for tag in tag_filters] - valid = False - for tag in repre_tags_low: - if tag in tag_filters_low: - valid = True - break + valid = False + tag_filters = output_def.get("filter", {}).get("custom_tags") - if not valid: - continue + if ( + # if any of tag filter is empty, skip + custom_tags and not tag_filters + or not custom_tags and tag_filters + ): + continue + elif not custom_tags and not tag_filters: + valid = True - if valid: - filtered_outputs.append(output_def) + # lower all filter tags + tag_filters_low = [tag.lower() for tag in tag_filters] + + self.log.debug("__ tag_filters: {}".format(tag_filters)) + self.log.debug("__ repre_c_tags_low: {}".format( + repre_c_tags_low)) + + # check if any repre tag is not in filter tags + for tag in repre_c_tags_low: + if tag in tag_filters_low: + valid = True + break + + if not valid: + continue + + filtered_outputs.append(output_def) + + self.log.debug("__ filtered_outputs: {}".format( + [_o["filename_suffix"] for _o in filtered_outputs] + )) return filtered_outputs From 8e3b2ab933c11ef37678da0b784b971c861afdf6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 10:26:39 +0200 Subject: [PATCH 1714/2550] raise specific exceptions when creators fail to run their methods --- openpype/pipeline/create/context.py | 258 ++++++++++++++++++++++------ 1 file changed, 204 insertions(+), 54 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index fb53b95a92..dfa9049601 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1,6 +1,8 @@ import os +import sys import copy import logging +import traceback import collections import inspect from uuid import uuid4 @@ -22,6 +24,7 @@ from .creator_plugins import ( Creator, AutoCreator, discover_creator_plugins, + CreatorError, ) UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) @@ -63,19 +66,76 @@ class HostMissRequiredMethod(Exception): class CreatorsOperationFailed(Exception): - pass + """Raised when a creator process crashes in 'CreateContext'. + + The exception contains information about the creator and error. The data + are prepared using 'prepare_failed_creator_operation_info' and can be + serialized using json. + + Usage is for UI purposes which may not have access to exceptions directly + and would not have ability to catch exceptions 'per creator'. + + Args: + msg (str): General error message. + failed_info (list[dict[str, Any]]): List of failed creators with + exception message and optionally formatted traceback. + """ + + def __init__(self, msg, failed_info): + super(CreatorsOperationFailed, self).__init__(msg) + self.failed_info = failed_info class CreatorsCollectionFailed(CreatorsOperationFailed): - pass + def __init__(self, failed_info): + msg = "Failed to collect instances" + super(CreatorsCollectionFailed, self).__init__( + msg, failed_info + ) class CreatorsSaveFailed(CreatorsOperationFailed): - pass + def __init__(self, failed_info): + msg = "Failed update instance changes" + super(CreatorsSaveFailed, self).__init__( + msg, failed_info + ) class CreatorsRemoveFailed(CreatorsOperationFailed): - pass + def __init__(self, failed_info): + msg = "Failed to remove instances" + super(CreatorsRemoveFailed, self).__init__( + msg, failed_info + ) + + +class CreatorsCreateFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Faled to create instances" + super(CreatorsCreateFailed, self).__init__( + msg, failed_info + ) + + +def prepare_failed_creator_operation_info( + identifier, label, exc_info, add_traceback=True +): + formatted_traceback = None + exc_type, exc_value, exc_traceback = exc_info + error_msg = str(exc_value) + + if add_traceback: + formatted_traceback = "".join(traceback.format_exception( + exc_type, exc_value, exc_traceback + )) + + return { + "creator_identifier": identifier, + "creator_label": label, + "message": error_msg, + "traceback": formatted_traceback + } class InstanceMember: @@ -1202,7 +1262,67 @@ class CreateContext: with self.bulk_instances_collection(): self._bulk_instances_to_process.append(instance) + def create(self, identifier, *args, **kwargs): + """Wrapper for creators to trigger created. + + Different types of creators may expect different arguments thus the + hints for args are blind. + + Args: + identifier (str): Creator's identifier. + *args (Tuple[Any]): Arguments for create method. + **kwargs (Dict[Any, Any]): Keyword argument for create method. + """ + + creator = self.creators.get(identifier) + label = getattr(creator, "label", None) + failed = False + add_traceback = False + try: + # Fake CreatorError (Could be maybe specific exception?) + if creator is None: + raise CreatorError( + "Creator {} was not found".format(identifier) + ) + + creator.create(*args, **kwargs) + + except CreatorError: + failed = True + exc_info = sys.exc_info() + + except: + failed = True + add_traceback = True + exc_info = sys.exc_info() + + if not failed: + return + + self.log.warning( + ( + "Failed to run Creator with identifier \"{}\"." + ).format(identifier), + exc_info=add_traceback + ) + + raise CreatorsCreateFailed([ + prepare_failed_creator_operation_info( + identifier, label, exc_info, add_traceback + ) + ]) + def creator_removed_instance(self, instance): + """When creator removes instance context should be acknowledged. + + If creator removes instance conext should know about it to avoid + possible issues in the session. + + Args: + instance (CreatedInstance): Object of instance which was removed + from scene metadata. + """ + self._instances_by_id.pop(instance.id, None) @contextmanager @@ -1237,42 +1357,72 @@ class CreateContext: self._instances_by_id = {} # Collect instances - failed_creators = [] + failed_info = [] for creator in self.creators.values(): + label = creator.label try: creator.collect_instances() except: - failed_creators.append(creator) + exc_info = sys.exc_info() + identifier = creator.identifier self.log.warning( - "Collection of instances for creator {} ({}) failed".format( - creator.label, creator.identifier), + ( + "Collection of instances for creator {} failed" + ).format(identifier), exc_info=True ) + failed_info.append( + prepare_failed_creator_operation_info( + identifier, label, exc_info + ) + ) - if failed_creators: - joined_creators = ", ".join( - [creator.label for creator in failed_creators] - ) - - raise CreatorsCollectionFailed( - "Failed to collect instances of creators {}".format(joined_creators) - ) + if failed_info: + raise CreatorsCollectionFailed(failed_info) def execute_autocreators(self): """Execute discovered AutoCreator plugins. Reset instances if any autocreator executed properly. """ + + failed_info = [] for identifier, creator in self.autocreators.items(): + label = creator.label + failed = False + add_traceback = False try: creator.create() - except Exception: - # TODO raise report exception if any crashed - msg = ( - "Failed to run AutoCreator with identifier \"{}\" ({})." - ).format(identifier, inspect.getfile(creator.__class__)) - self.log.warning(msg, exc_info=True) + except CreatorError: + failed = True + exc_info = sys.exc_info() + + # Use bare except because some hosts raise their exceptions that + # do not inherit from python's `BaseException` + except: + failed = True + add_traceback = True + exc_info = sys.exc_info() + + if not failed: + continue + + failed_info.append( + prepare_failed_creator_operation_info( + identifier, label, exc_info, add_traceback + ) + ) + + self.log.warning( + ( + "Failed to run AutoCreator with identifier \"{}\"." + ).format(identifier), + exc_info=exc_info + ) + + if failed_info: + raise CreatorsCreateFailed(failed_info) def validate_instances_context(self, instances=None): """Validate 'asset' and 'task' instance context.""" @@ -1349,7 +1499,7 @@ class CreateContext: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) - failed_creators = [] + failed_info = [] for identifier, creator_instances in instances_by_identifier.items(): update_list = [] for instance in creator_instances: @@ -1358,26 +1508,29 @@ class CreateContext: update_list.append(UpdateData(instance, instance_changes)) creator = self.creators[identifier] - if update_list: - try: - creator.update_instances(update_list) + if not update_list: + continue - except: - failed_creators.append(creator) - self.log.warning( - "Instances update of creator {} ({}) failed".format( - creator.label, creator.identifier), - exc_info=True + label = creator.label + try: + creator.update_instances(update_list) + + except: + exc_info = sys.exc_info() + self.log.warning( + "Instances update of creator \"{}\" failed".format( + identifier), + exc_info=True + ) + + failed_info.append( + prepare_failed_creator_operation_info( + identifier, label, exc_info ) + ) - if failed_creators: - joined_creators = ", ".join( - [creator.label for creator in failed_creators] - ) - - raise CreatorsSaveFailed( - "Failed save changes of creators {}".format(joined_creators) - ) + if failed_info: + raise CreatorsSaveFailed(failed_info) def remove_instances(self, instances): """Remove instances from context. @@ -1392,30 +1545,27 @@ class CreateContext: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) - failed_creators = [] + failed_info = [] for identifier, creator_instances in instances_by_identifier.items(): creator = self.creators.get(identifier) + label = creator.label try: creator.remove_instances(creator_instances) except: - failed_creators.append(creator) + exc_info = sys.exc_info() self.log.warning( - "Instances removement of creator {} ({}) failed".format( - creator.label, creator.identifier), + "Instances removement of creator \"{}\" failed".format( + identifier), exc_info=True ) - - if failed_creators: - joined_creators = ", ".join( - [creator.label for creator in failed_creators] - ) - - raise CreatorsRemoveFailed( - "Failed to remove instances of creators {}".format( - joined_creators + failed_info.append( + prepare_failed_creator_operation_info( + identifier, label, exc_info + ) ) - ) + if failed_info: + raise CreatorsRemoveFailed(failed_info) def _get_publish_plugins_with_attr_for_family(self, family): """Publish plugin attributes for passed family. From 91fa300d997abe7f483965c1d342949d6460e61e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 10:57:45 +0200 Subject: [PATCH 1715/2550] implementaed separator widget --- openpype/tools/utils/widgets.py | 54 +++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index c8133b3359..ca65182124 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -448,3 +448,57 @@ class OptionDialog(QtWidgets.QDialog): def parse(self): return self._options.copy() + + +class SeparatorWidget(QtWidgets.QFrame): + """Prepared widget that can be used as separator with predefined color. + + Args: + size (int): Size of separator (width or height). + orientation (Qt.Horizontal|Qt.Vertical): Orintation of widget. + parent (QtWidgets.QWidget): Parent widget. + """ + + def __init__(self, size=2, orientation=QtCore.Qt.Horizontal, parent=None): + super(SeparatorWidget, self).__init__(parent) + + self.setObjectName("Separator") + + maximum_width = self.maximumWidth() + maximum_height = self.maximumHeight() + + self._size = None + self._orientation = orientation + self._maximum_width = maximum_width + self._maximum_height = maximum_height + self.set_size(size) + + def set_size(self, size): + if size == self._size: + return + if self._orientation == QtCore.Qt.Vertical: + self.setMinimumWidth(size) + self.setMaximumWidth(size) + else: + self.setMinimumHeight(size) + self.setMaximumHeight(size) + + self._size = size + + def set_orientation(self, orientation): + if self._orientation == orientation: + return + + # Reset min/max sizes in opossite direction + if self._orientation == QtCore.Qt.Vertical: + self.setMinimumHeight(0) + self.setMaximumHeight(self._maximum_height) + else: + self.setMinimumWidth(0) + self.setMaximumWidth(self._maximum_width) + + self._orientation = orientation + + size = self._size + self._size = None + self.set_size(size) From 20dd53a830b6745a89b4203c681e8eea54b079cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 10:58:08 +0200 Subject: [PATCH 1716/2550] use separator widget in error dialog --- openpype/tools/utils/error_dialog.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/tools/utils/error_dialog.py b/openpype/tools/utils/error_dialog.py index f7b12bb69f..30cba56416 100644 --- a/openpype/tools/utils/error_dialog.py +++ b/openpype/tools/utils/error_dialog.py @@ -1,6 +1,6 @@ from Qt import QtWidgets, QtCore -from .widgets import ClickableFrame, ExpandBtn +from .widgets import ClickableFrame, ExpandBtn, SeparatorWidget def convert_text_for_html(text): @@ -139,12 +139,10 @@ class ErrorMessageBox(QtWidgets.QDialog): mime_data ) - def _create_line(self): - line = QtWidgets.QFrame(self) - line.setObjectName("Separator") - line.setMinimumHeight(2) - line.setMaximumHeight(2) - return line + def _create_line(self, parent=None): + if parent is None: + parent = self + return SeparatorWidget(2, parent=parent) def _create_traceback_widget(self, traceback_text, parent=None): if parent is None: From 9ae99eb4badd874b1181697179fc9747963a2c58 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 10:58:34 +0200 Subject: [PATCH 1717/2550] change function name 'convert_text_for_html' to 'escape_text_for_html' --- openpype/tools/utils/error_dialog.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/utils/error_dialog.py b/openpype/tools/utils/error_dialog.py index 30cba56416..d4ca91848c 100644 --- a/openpype/tools/utils/error_dialog.py +++ b/openpype/tools/utils/error_dialog.py @@ -3,7 +3,7 @@ from Qt import QtWidgets, QtCore from .widgets import ClickableFrame, ExpandBtn, SeparatorWidget -def convert_text_for_html(text): +def escape_text_for_html(text): return ( text .replace("<", "<") @@ -19,7 +19,7 @@ class TracebackWidget(QtWidgets.QWidget): # Modify text to match html # - add more replacements when needed - tb_text = convert_text_for_html(tb_text) + tb_text = escape_text_for_html(tb_text) expand_btn = ExpandBtn(self) clickable_frame = ClickableFrame(self) @@ -110,7 +110,7 @@ class ErrorMessageBox(QtWidgets.QDialog): @staticmethod def convert_text_for_html(text): - return convert_text_for_html(text) + return escape_text_for_html(text) def _create_top_widget(self, parent_widget): label_widget = QtWidgets.QLabel(parent_widget) From ad28ea8121dc62f7dbe708ef3f2bbbe21d111810 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 10:58:55 +0200 Subject: [PATCH 1718/2550] top widget is optional --- openpype/tools/utils/error_dialog.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/tools/utils/error_dialog.py b/openpype/tools/utils/error_dialog.py index d4ca91848c..9c9015c00f 100644 --- a/openpype/tools/utils/error_dialog.py +++ b/openpype/tools/utils/error_dialog.py @@ -91,11 +91,12 @@ class ErrorMessageBox(QtWidgets.QDialog): footer_layout.addWidget(ok_btn, 0) bottom_line = self._create_line() - body_layout = QtWidgets.QVBoxLayout(self) - body_layout.addWidget(top_widget, 0) - body_layout.addWidget(content_scroll, 1) - body_layout.addWidget(bottom_line, 0) - body_layout.addLayout(footer_layout, 0) + main_layout = QtWidgets.QVBoxLayout(self) + if top_widget is not None: + main_layout.addWidget(top_widget, 0) + main_layout.addWidget(content_scroll, 1) + main_layout.addWidget(bottom_line, 0) + main_layout.addWidget(footer_widget, 0) copy_report_btn.clicked.connect(self._on_copy_report) ok_btn.clicked.connect(self._on_ok_clicked) From 280e4fe0ca3d32ae506969a249034bfd720976f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:00:59 +0200 Subject: [PATCH 1719/2550] change separator of report copy --- openpype/tools/utils/error_dialog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/error_dialog.py b/openpype/tools/utils/error_dialog.py index 9c9015c00f..6973fda8c4 100644 --- a/openpype/tools/utils/error_dialog.py +++ b/openpype/tools/utils/error_dialog.py @@ -132,7 +132,8 @@ class ErrorMessageBox(QtWidgets.QDialog): self.close() def _on_copy_report(self): - report_text = (10 * "*").join(self._report_data) + sep = "\n{}\n".format(10 * "*") + report_text = sep.join(self._report_data) mime_data = QtCore.QMimeData() mime_data.setText(report_text) From 239d2f90bfd729f1a6fe58883b6fd992e9242de2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:01:41 +0200 Subject: [PATCH 1720/2550] footer layout is under widget and store the widget to self --- openpype/tools/utils/error_dialog.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/error_dialog.py b/openpype/tools/utils/error_dialog.py index 6973fda8c4..5fe49a53af 100644 --- a/openpype/tools/utils/error_dialog.py +++ b/openpype/tools/utils/error_dialog.py @@ -85,7 +85,9 @@ class ErrorMessageBox(QtWidgets.QDialog): copy_report_btn = QtWidgets.QPushButton("Copy report", self) ok_btn = QtWidgets.QPushButton("OK", self) - footer_layout = QtWidgets.QHBoxLayout() + footer_widget = QtWidgets.QWidget(self) + footer_layout = QtWidgets.QHBoxLayout(footer_widget) + footer_layout.setContentsMargins(0, 0, 0, 0) footer_layout.addWidget(copy_report_btn, 0) footer_layout.addStretch(1) footer_layout.addWidget(ok_btn, 0) @@ -107,6 +109,8 @@ class ErrorMessageBox(QtWidgets.QDialog): if not report_data: copy_report_btn.setVisible(False) + self._content_scroll = content_scroll + self._footer_widget = footer_widget self._report_data = report_data @staticmethod From f4b123d65a5a10e871dfdd2adf1fe6c92a2a6727 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:01:52 +0200 Subject: [PATCH 1721/2550] added separator widget to public widgets --- openpype/tools/utils/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/utils/__init__.py b/openpype/tools/utils/__init__.py index 5ccc1b40b3..019ea16391 100644 --- a/openpype/tools/utils/__init__.py +++ b/openpype/tools/utils/__init__.py @@ -7,6 +7,7 @@ from .widgets import ( ExpandBtn, PixmapLabel, IconButton, + SeparatorWidget, ) from .views import DeselectableTreeView from .error_dialog import ErrorMessageBox @@ -37,6 +38,7 @@ __all__ = ( "ExpandBtn", "PixmapLabel", "IconButton", + "SeparatorWidget", "DeselectableTreeView", From 36a9aa2da61d583e9ddd8209b92614f751a42bbf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:02:57 +0200 Subject: [PATCH 1722/2550] modified error dialog to show more information by creator --- openpype/tools/publisher/window.py | 153 +++++++++++++++++++++-------- 1 file changed, 114 insertions(+), 39 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 4f0b81fa85..1bbc0d0cf4 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -6,6 +6,7 @@ from openpype import ( style ) from openpype.tools.utils import ( + ErrorMessageBox, PlaceholderLineEdit, MessageOverlayObject, PixmapLabel, @@ -223,9 +224,11 @@ class PublisherWindow(QtWidgets.QDialog): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) - dialog_message_timer = QtCore.QTimer() - dialog_message_timer.setInterval(100) - dialog_message_timer.timeout.connect(self._on_dialog_message_timeout) + creators_dialog_message_timer = QtCore.QTimer() + creators_dialog_message_timer.setInterval(100) + creators_dialog_message_timer.timeout.connect( + self._on_creators_message_timeout + ) help_btn.clicked.connect(self._on_help_click) tabs_widget.tab_changed.connect(self._on_tab_change) @@ -273,6 +276,9 @@ class PublisherWindow(QtWidgets.QDialog): controller.event_system.add_callback( "instances.remove.failed", self._instance_remove_failed ) + controller.event_system.add_callback( + "instances.create.failed", self._instance_create_failed + ) # Store extra header widget for TrayPublisher # - can be used to add additional widgets to header between context @@ -319,8 +325,8 @@ class PublisherWindow(QtWidgets.QDialog): self._restart_timer = None self._publish_frame_visible = None - self._dialog_messages_to_show = collections.deque() - self._dialog_message_timer = dialog_message_timer + self._creators_messages_to_show = collections.deque() + self._creators_dialog_message_timer = creators_dialog_message_timer self._set_publish_visibility(False) @@ -598,58 +604,127 @@ class PublisherWindow(QtWidgets.QDialog): 0, window_size.height() - height ) - def add_message_dialog(self, message, title): - self._dialog_messages_to_show.append((message, title)) - self._dialog_message_timer.start() + def add_message_dialog(self, title, failed_info): + self._creators_messages_to_show.append((title, failed_info)) + self._creators_dialog_message_timer.start() - def _on_dialog_message_timeout(self): - if not self._dialog_messages_to_show: - self._dialog_message_timer.stop() + def _on_creators_message_timeout(self): + if not self._creators_messages_to_show: + self._creators_dialog_message_timer.stop() return - item = self._dialog_messages_to_show.popleft() - message, title = item - dialog = MessageDialog(message, title, self) + item = self._creators_messages_to_show.popleft() + title, failed_info = item + dialog = CreatorsErrorMessageBox(title, failed_info, self) dialog.exec_() dialog.deleteLater() def _instance_collection_failed(self, event): - self.add_message_dialog(event["message"], event["title"]) + self.add_message_dialog(event["title"], event["failed_info"]) def _instance_save_failed(self, event): - self.add_message_dialog(event["message"], event["title"]) + self.add_message_dialog(event["title"], event["failed_info"]) def _instance_remove_failed(self, event): - self.add_message_dialog(event["message"], event["title"]) + self.add_message_dialog(event["title"], event["failed_info"]) + + def _instance_create_failed(self, event): + self.add_message_dialog(event["title"], event["failed_info"]) -class MessageDialog(QtWidgets.QDialog): - def __init__(self, message, title, parent=None): - super(MessageDialog, self).__init__(parent) +class CreatorsErrorMessageBox(ErrorMessageBox): + def __init__(self, error_title, failed_info, parent): + self._failed_info = failed_info + self._info_with_id = [ + {"id": idx, "info": info} + for idx, info in enumerate(failed_info) + ] + self._widgets_by_id = {} + self._tabs_widget = None + self._stack_layout = None - self.setWindowTitle(title or "Something happend") + super(CreatorsErrorMessageBox, self).__init__(error_title, parent) - message_widget = QtWidgets.QLabel(message, self) - message_widget.setWordWrap(True) + layout = self.layout() + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(0) - btns_widget = QtWidgets.QWidget(self) - submit_btn = QtWidgets.QPushButton("OK", btns_widget) + footer_layout = self._footer_widget.layout() + footer_layout.setContentsMargins(5, 5, 5, 5) - btns_layout = QtWidgets.QHBoxLayout(btns_widget) - btns_layout.setContentsMargins(0, 0, 0, 0) - btns_layout.addStretch(1) - btns_layout.addWidget(submit_btn) + def _create_top_widget(self, parent_widget): + return None - layout = QtWidgets.QVBoxLayout(self) - layout.addWidget(message_widget, 0) - layout.addStretch(1) - layout.addWidget(btns_widget, 0) + def _get_report_data(self): + output = [] + for info in self._failed_info: + creator_label = info["creator_label"] + creator_identifier = info["creator_identifier"] + report_message = "Creator:" + if creator_label: + report_message += " {} ({})".format( + creator_label, creator_identifier) + else: + report_message += " {}".format(creator_identifier) - submit_btn.clicked.connect(self._on_submit_click) + report_message += "\n\nError: {}".format(info["message"]) + formatted_traceback = info["traceback"] + if formatted_traceback: + report_message += "\n\n{}".format(formatted_traceback) + output.append(report_message) + return output - def _on_submit_click(self): - self.close() + def _create_content(self, content_layout): + tabs_widget = PublisherTabsWidget(self) - def showEvent(self, event): - super(MessageDialog, self).showEvent(event) - self.resize(400, 200) + stack_widget = QtWidgets.QFrame(self._content_widget) + stack_layout = QtWidgets.QStackedLayout(stack_widget) + + first = True + for item in self._info_with_id: + item_id = item["id"] + info = item["info"] + message = info["message"] + formatted_traceback = info["traceback"] + creator_label = info["creator_label"] + creator_identifier = info["creator_identifier"] + if not creator_label: + creator_label = creator_identifier + + msg_widget = QtWidgets.QWidget(stack_widget) + msg_layout = QtWidgets.QVBoxLayout(msg_widget) + + exc_msg_template = "{}" + message_label_widget = QtWidgets.QLabel(msg_widget) + message_label_widget.setText( + exc_msg_template.format(self.convert_text_for_html(message)) + ) + msg_layout.addWidget(message_label_widget, 0) + + if formatted_traceback: + line_widget = self._create_line(msg_widget) + tb_widget = self._create_traceback_widget(formatted_traceback) + msg_layout.addWidget(line_widget, 0) + msg_layout.addWidget(tb_widget, 0) + + msg_layout.addStretch(1) + + tabs_widget.add_tab(creator_label, item_id) + stack_layout.addWidget(msg_widget) + if first: + first = False + stack_layout.setCurrentWidget(msg_widget) + + self._widgets_by_id[item_id] = msg_widget + + content_layout.addWidget(tabs_widget, 0) + content_layout.addWidget(stack_widget, 1) + + tabs_widget.tab_changed.connect(self._on_tab_change) + + self._tabs_widget = tabs_widget + self._stack_layout = stack_layout + + def _on_tab_change(self, identifier): + widget = self._widgets_by_id[identifier] + self._stack_layout.setCurrentWidget(widget) From 098903260c79be64c58a07eb7616e24c04c41de9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:03:15 +0200 Subject: [PATCH 1723/2550] controller is triggering event on creator operation fail --- openpype/tools/publisher/control.py | 45 +++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index d402ab2434..280d2cf0a0 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1107,6 +1107,8 @@ class AbstractPublisherController(object): options (Dict[str, Any]): Data from pre-create attributes. """ + pass + def save_changes(self): """Save changes in create context.""" @@ -1716,14 +1718,24 @@ class PublisherController(BasePublisherController): with self._create_context.bulk_instances_collection(): try: self._create_context.reset_instances() - self._create_context.execute_autocreators() - except CreatorsOperationFailed as exc: self._emit_event( "instances.collection.failed", { "title": "Instance collection failed", - "message": str(exc) + "failed_info": exc.failed_info + } + ) + + try: + self._create_context.execute_autocreators() + + except CreatorsOperationFailed as exc: + self._emit_event( + "instances.create.failed", + { + "title": "AutoCreation failed", + "failed_info": exc.failed_info } ) @@ -1854,10 +1866,24 @@ class PublisherController(BasePublisherController): self, creator_identifier, subset_name, instance_data, options ): """Trigger creation and refresh of instances in UI.""" - creator = self._creators[creator_identifier] - creator.create(subset_name, instance_data, options) + + success = True + try: + self._create_context.create( + creator_identifier, subset_name, instance_data, options + ) + except CreatorsOperationFailed as exc: + success = False + self._emit_event( + "instances.create.failed", + { + "title": "Creation failed", + "failed_info": exc.failed_info + } + ) self._on_create_instance_change() + return success def save_changes(self): """Save changes happened during creation.""" @@ -1866,12 +1892,13 @@ class PublisherController(BasePublisherController): try: self._create_context.save_changes() + except CreatorsOperationFailed as exc: self._emit_event( "instances.save.failed", { - "title": "Save failed", - "message": str(exc) + "title": "Instances save failed", + "failed_info": exc.failed_info } ) @@ -1902,8 +1929,8 @@ class PublisherController(BasePublisherController): self._emit_event( "instances.remove.failed", { - "title": "Remove failed", - "message": str(exc) + "title": "Instance removement failed", + "failed_info": exc.failed_info } ) From accab0ca5f17996ff0db8e178dae1782eec31c4a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:03:30 +0200 Subject: [PATCH 1724/2550] create widget does not handle failed creation on it's own --- .../tools/publisher/widgets/create_widget.py | 118 ++---------------- 1 file changed, 7 insertions(+), 111 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index 10cf39675e..910b2adfc7 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -9,7 +9,6 @@ from openpype.pipeline.create import ( SUBSET_NAME_ALLOWED_SYMBOLS, TaskNotSetError, ) -from openpype.tools.utils import ErrorMessageBox from .widgets import ( IconValuePixmapLabel, @@ -35,79 +34,6 @@ class VariantInputsWidget(QtWidgets.QWidget): self.resized.emit() -class CreateErrorMessageBox(ErrorMessageBox): - def __init__( - self, - creator_label, - subset_name, - asset_name, - exc_msg, - formatted_traceback, - parent - ): - self._creator_label = creator_label - self._subset_name = subset_name - self._asset_name = asset_name - self._exc_msg = exc_msg - self._formatted_traceback = formatted_traceback - super(CreateErrorMessageBox, self).__init__("Creation failed", parent) - - def _create_top_widget(self, parent_widget): - label_widget = QtWidgets.QLabel(parent_widget) - label_widget.setText( - "Failed to create" - ) - return label_widget - - def _get_report_data(self): - report_message = ( - "{creator}: Failed to create Subset: \"{subset}\"" - " in Asset: \"{asset}\"" - "\n\nError: {message}" - ).format( - creator=self._creator_label, - subset=self._subset_name, - asset=self._asset_name, - message=self._exc_msg, - ) - if self._formatted_traceback: - report_message += "\n\n{}".format(self._formatted_traceback) - return [report_message] - - def _create_content(self, content_layout): - item_name_template = ( - "Creator: {}
    " - "Subset: {}
    " - "Asset: {}
    " - ) - exc_msg_template = "{}" - - line = self._create_line() - content_layout.addWidget(line) - - item_name_widget = QtWidgets.QLabel(self) - item_name_widget.setText( - item_name_template.format( - self._creator_label, self._subset_name, self._asset_name - ) - ) - content_layout.addWidget(item_name_widget) - - message_label_widget = QtWidgets.QLabel(self) - message_label_widget.setText( - exc_msg_template.format(self.convert_text_for_html(self._exc_msg)) - ) - content_layout.addWidget(message_label_widget) - - if self._formatted_traceback: - line_widget = self._create_line() - tb_widget = self._create_traceback_widget( - self._formatted_traceback - ) - content_layout.addWidget(line_widget) - content_layout.addWidget(tb_widget) - - # TODO add creator identifier/label to details class CreatorShortDescWidget(QtWidgets.QWidget): def __init__(self, parent=None): @@ -178,8 +104,6 @@ class CreateWidget(QtWidgets.QWidget): self._prereq_available = False - self._message_dialog = None - name_pattern = "^[{}]*$".format(SUBSET_NAME_ALLOWED_SYMBOLS) self._name_pattern = name_pattern self._compiled_name_pattern = re.compile(name_pattern) @@ -769,7 +693,6 @@ class CreateWidget(QtWidgets.QWidget): return index = indexes[0] - creator_label = index.data(QtCore.Qt.DisplayRole) creator_identifier = index.data(CREATOR_IDENTIFIER_ROLE) family = index.data(FAMILY_ROLE) variant = self.variant_input.text() @@ -792,40 +715,13 @@ class CreateWidget(QtWidgets.QWidget): "family": family } - error_msg = None - formatted_traceback = None - try: - self._controller.create( - creator_identifier, - subset_name, - instance_data, - pre_create_data - ) + success = self._controller.create( + creator_identifier, + subset_name, + instance_data, + pre_create_data + ) - except CreatorError as exc: - error_msg = str(exc) - - # Use bare except because some hosts raise their exceptions that - # do not inherit from python's `BaseException` - except: - exc_type, exc_value, exc_traceback = sys.exc_info() - formatted_traceback = "".join(traceback.format_exception( - exc_type, exc_value, exc_traceback - )) - error_msg = str(exc_value) - - if error_msg is None: + if success: self._set_creator(self._selected_creator) self._controller.emit_card_message("Creation finished...") - else: - box = CreateErrorMessageBox( - creator_label, - subset_name, - asset_name, - error_msg, - formatted_traceback, - parent=self - ) - box.show() - # Store dialog so is not garbage collected before is shown - self._message_dialog = box From 21e98faef021b83fbd961a63d6398795b9db119d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 21 Oct 2022 11:07:04 +0200 Subject: [PATCH 1725/2550] :sparkles: cache collected instances --- openpype/hosts/houdini/api/pipeline.py | 15 +++++++-------- openpype/hosts/houdini/api/plugin.py | 9 +++++++-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index d64479fc14..f15cd6f2d5 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -435,10 +435,13 @@ def list_instances(creator_id=None): """ instance_signature = { - "id": "pyblish.avalon.instance", - "identifier": creator_id + "id": "pyblish.avalon.instance" } - return lib.lsattrs(instance_signature) + + return [ + i for i in lib.lsattrs(instance_signature) + if i.paramEval("creator_identifier") == creator_id + ] def remove_instance(instance): @@ -448,12 +451,8 @@ def remove_instance(instance): because it might contain valuable data for artist. """ - nodes = instance.get("members") - if not nodes: - return - # Assume instance node is first node - instance_node = hou.node(nodes[0]) + instance_node = hou.node(instance.data.get("instance_node")) to_delete = None for parameter in instance_node.spareParms(): if parameter.name() == "id" and \ diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 897696533f..fa56b2cb8d 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -133,7 +133,7 @@ class HoudiniCreator(NewCreator): # wondering if we'll ever need more than one member here # in Houdini - instance_data["members"] = [instance_node.path()] + # instance_data["members"] = [instance_node.path()] instance_data["instance_node"] = instance_node.path() instance = CreatedInstance( @@ -167,7 +167,12 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - for instance in list_instances(creator_id=self.identifier): + instances = [i for i in self.collection_shared_data.get( + "houdini_cached_instances", []) if i.paramEval("creator_identifier") == self.identifier] + if not instances: + print("not using cached instances") + instances = list_instances(creator_id=self.identifier) + for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self ) From 19d237323d628bd4e656bf379be30ef3f1df6be1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 21 Oct 2022 11:07:23 +0200 Subject: [PATCH 1726/2550] :bug: fix multiple selection --- .../hosts/houdini/plugins/create/create_alembic_camera.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 183ab28b26..481c6bea77 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin -from openpype.pipeline import CreatedInstance +from openpype.pipeline import CreatedInstance, CreatorError class CreateAlembicCamera(plugin.HoudiniCreator): @@ -30,7 +30,9 @@ class CreateAlembicCamera(plugin.HoudiniCreator): } if self.selected_nodes: - path = self.selected_nodes.path() + if len(self.selected_nodes) > 1: + raise CreatorError("More than one item selected.") + path = self.selected_nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) From bda1bb3f292c05004546870ddd39d4c4df8bd384 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:46:02 +0200 Subject: [PATCH 1727/2550] unify the error handling in create context --- openpype/pipeline/create/context.py | 123 +++++++++++++++++----------- 1 file changed, 76 insertions(+), 47 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index dfa9049601..2dfdfc142f 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -123,8 +123,6 @@ def prepare_failed_creator_operation_info( ): formatted_traceback = None exc_type, exc_value, exc_traceback = exc_info - error_msg = str(exc_value) - if add_traceback: formatted_traceback = "".join(traceback.format_exception( exc_type, exc_value, exc_traceback @@ -133,7 +131,7 @@ def prepare_failed_creator_operation_info( return { "creator_identifier": identifier, "creator_label": label, - "message": error_msg, + "message": str(exc_value), "traceback": formatted_traceback } @@ -1274,10 +1272,12 @@ class CreateContext: **kwargs (Dict[Any, Any]): Keyword argument for create method. """ + error_message = "Failed to run Creator with identifier \"{}\". {}" creator = self.creators.get(identifier) label = getattr(creator, "label", None) failed = False add_traceback = False + exc_info = None try: # Fake CreatorError (Could be maybe specific exception?) if creator is None: @@ -1290,27 +1290,23 @@ class CreateContext: except CreatorError: failed = True exc_info = sys.exc_info() + self.log.warning(error_message.format(identifier, exc_info[1])) except: failed = True add_traceback = True exc_info = sys.exc_info() - - if not failed: - return - - self.log.warning( - ( - "Failed to run Creator with identifier \"{}\"." - ).format(identifier), - exc_info=add_traceback - ) - - raise CreatorsCreateFailed([ - prepare_failed_creator_operation_info( - identifier, label, exc_info, add_traceback + self.log.warning( + error_message.format(identifier, ""), + exc_info=True ) - ]) + + if failed: + raise CreatorsCreateFailed([ + prepare_failed_creator_operation_info( + identifier, label, exc_info, add_traceback + ) + ]) def creator_removed_instance(self, instance): """When creator removes instance context should be acknowledged. @@ -1357,23 +1353,35 @@ class CreateContext: self._instances_by_id = {} # Collect instances + error_message = "Collection of instances for creator {} failed. {}" failed_info = [] for creator in self.creators.values(): label = creator.label + identifier = creator.identifier + failed = False + add_traceback = False + exc_info = None try: creator.collect_instances() - except: + + except CreatorError: + failed = True + exc_info = sys.exc_info() + self.log.warning(error_message.format(identifier, exc_info[1])) + + except: + failed = True + add_traceback = True exc_info = sys.exc_info() - identifier = creator.identifier self.log.warning( - ( - "Collection of instances for creator {} failed" - ).format(identifier), + error_message.format(identifier, ""), exc_info=True ) + + if failed: failed_info.append( prepare_failed_creator_operation_info( - identifier, label, exc_info + identifier, label, exc_info, add_traceback ) ) @@ -1386,6 +1394,7 @@ class CreateContext: Reset instances if any autocreator executed properly. """ + error_message = "Failed to run AutoCreator with identifier \"{}\". {}" failed_info = [] for identifier, creator in self.autocreators.items(): label = creator.label @@ -1397,6 +1406,7 @@ class CreateContext: except CreatorError: failed = True exc_info = sys.exc_info() + self.log.warning(error_message.format(identifier, exc_info[1])) # Use bare except because some hosts raise their exceptions that # do not inherit from python's `BaseException` @@ -1404,22 +1414,17 @@ class CreateContext: failed = True add_traceback = True exc_info = sys.exc_info() - - if not failed: - continue - - failed_info.append( - prepare_failed_creator_operation_info( - identifier, label, exc_info, add_traceback + self.log.warning( + error_message.format(identifier, ""), + exc_info=True ) - ) - self.log.warning( - ( - "Failed to run AutoCreator with identifier \"{}\"." - ).format(identifier), - exc_info=exc_info - ) + if failed: + failed_info.append( + prepare_failed_creator_operation_info( + identifier, label, exc_info, add_traceback + ) + ) if failed_info: raise CreatorsCreateFailed(failed_info) @@ -1499,6 +1504,7 @@ class CreateContext: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) + error_message = "Instances update of creator \"{}\" failed. {}" failed_info = [] for identifier, creator_instances in instances_by_identifier.items(): update_list = [] @@ -1512,20 +1518,28 @@ class CreateContext: continue label = creator.label + failed = False + add_traceback = False + exc_info = None try: creator.update_instances(update_list) + except CreatorError: + failed = True + exc_info = sys.exc_info() + self.log.warning(error_message.format(identifier, exc_info[1])) + except: + failed = True + add_traceback = True exc_info = sys.exc_info() self.log.warning( - "Instances update of creator \"{}\" failed".format( - identifier), - exc_info=True - ) + error_message.format(identifier, ""), exc_info=True) + if failed: failed_info.append( prepare_failed_creator_operation_info( - identifier, label, exc_info + identifier, label, exc_info, add_traceback ) ) @@ -1545,22 +1559,37 @@ class CreateContext: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) + error_message = "Instances removement of creator \"{}\" failed. {}" failed_info = [] for identifier, creator_instances in instances_by_identifier.items(): creator = self.creators.get(identifier) label = creator.label + failed = False + add_traceback = False + exc_info = None try: creator.remove_instances(creator_instances) - except: + + except CreatorError: + failed = True exc_info = sys.exc_info() self.log.warning( - "Instances removement of creator \"{}\" failed".format( - identifier), + error_message.format(identifier, exc_info[1]) + ) + + except: + failed = True + add_traceback = True + exc_info = sys.exc_info() + self.log.warning( + error_message.format(identifier, ""), exc_info=True ) + + if failed: failed_info.append( prepare_failed_creator_operation_info( - identifier, label, exc_info + identifier, label, exc_info, add_traceback ) ) From 1f2e54c2c385af56c5d61d2976e8db434d9cb4ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 11:46:21 +0200 Subject: [PATCH 1728/2550] fix tab switch --- openpype/tools/publisher/window.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 1bbc0d0cf4..b6bd506c18 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -636,7 +636,8 @@ class CreatorsErrorMessageBox(ErrorMessageBox): def __init__(self, error_title, failed_info, parent): self._failed_info = failed_info self._info_with_id = [ - {"id": idx, "info": info} + # Id must be string when used in tab widget + {"id": str(idx), "info": info} for idx, info in enumerate(failed_info) ] self._widgets_by_id = {} @@ -725,6 +726,6 @@ class CreatorsErrorMessageBox(ErrorMessageBox): self._tabs_widget = tabs_widget self._stack_layout = stack_layout - def _on_tab_change(self, identifier): + def _on_tab_change(self, old_identifier, identifier): widget = self._widgets_by_id[identifier] self._stack_layout.setCurrentWidget(widget) From b54333086be343fc1524861f69bdd050e81caa8a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 12:41:13 +0200 Subject: [PATCH 1729/2550] :bug: fix wrong path in loader --- .../hosts/houdini/plugins/load/load_ass.py | 74 +++++++------------ 1 file changed, 28 insertions(+), 46 deletions(-) diff --git a/openpype/hosts/houdini/plugins/load/load_ass.py b/openpype/hosts/houdini/plugins/load/load_ass.py index 0144bbaefd..0e23259079 100644 --- a/openpype/hosts/houdini/plugins/load/load_ass.py +++ b/openpype/hosts/houdini/plugins/load/load_ass.py @@ -32,7 +32,12 @@ class AssLoader(load.LoaderPlugin): # Create a new geo node procedural = obj.createNode("arnold::procedural", node_name=node_name) - procedural.setParms({"ar_filename": self.get_path(self.fname)}) + + procedural.setParms( + { + "ar_filename": self.format_path( + self.fname, context["representation"]) + }) nodes = [procedural] self[:] = nodes @@ -46,57 +51,14 @@ class AssLoader(load.LoaderPlugin): suffix="", ) - def get_path(self, path): - - # Find all frames in the folder - ext = ".ass.gz" if path.endswith(".ass.gz") else ".ass" - folder = os.path.dirname(path) - frames = [f for f in os.listdir(folder) if f.endswith(ext)] - - # Get the collection of frames to detect frame padding - patterns = [clique.PATTERNS["frames"]] - collections, remainder = clique.assemble(frames, - minimum_items=1, - patterns=patterns) - self.log.debug("Detected collections: {}".format(collections)) - self.log.debug("Detected remainder: {}".format(remainder)) - - if not collections and remainder: - if len(remainder) != 1: - raise ValueError("Frames not correctly detected " - "in: {}".format(remainder)) - - # A single frame without frame range detected - filepath = remainder[0] - return os.path.normpath(filepath).replace("\\", "/") - - # Frames detected with a valid "frame" number pattern - # Then we don't want to have any remainder files found - assert len(collections) == 1 and not remainder - collection = collections[0] - - num_frames = len(collection.indexes) - if num_frames == 1: - # Return the input path without dynamic $F variable - result = path - else: - # More than a single frame detected - use $F{padding} - fname = "{}$F{}{}".format(collection.head, - collection.padding, - collection.tail) - result = os.path.join(folder, fname) - - # Format file name, Houdini only wants forward slashes - return os.path.normpath(result).replace("\\", "/") - def update(self, container, representation): # Update the file path file_path = get_representation_path(representation) - file_path = file_path.replace("\\", "/") + file_path = self.format_path(file_path, representation) procedural = container["node"] - procedural.setParms({"ar_filename": self.get_path(file_path)}) + procedural.setParms({"ar_filename": file_path}) # Update attribute procedural.setParms({"representation": str(representation["_id"])}) @@ -105,3 +67,23 @@ class AssLoader(load.LoaderPlugin): node = container["node"] node.destroy() + + @staticmethod + def format_path(path, representation): + """Format file path correctly for single bgeo or bgeo sequence.""" + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + is_sequence = bool(representation["context"].get("frame")) + # The path is either a single file or sequence in a folder. + if not is_sequence: + filename = path + else: + filename = re.sub(r"(.*)\.(\d+)\.(ass.*)", "\\1.$F4.\\3", path) + + filename = os.path.join(path, filename) + + filename = os.path.normpath(filename) + filename = filename.replace("\\", "/") + + return filename \ No newline at end of file From 2c3e66c18a23f345ea877d16b104c4ae714cfe2a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 12:45:56 +0200 Subject: [PATCH 1730/2550] :dog: fix hound --- openpype/hosts/houdini/plugins/load/load_ass.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/load/load_ass.py b/openpype/hosts/houdini/plugins/load/load_ass.py index 0e23259079..57e2d34d7c 100644 --- a/openpype/hosts/houdini/plugins/load/load_ass.py +++ b/openpype/hosts/houdini/plugins/load/load_ass.py @@ -1,6 +1,6 @@ import os -import clique +import re from openpype.pipeline import ( load, get_representation_path, @@ -86,4 +86,4 @@ class AssLoader(load.LoaderPlugin): filename = os.path.normpath(filename) filename = filename.replace("\\", "/") - return filename \ No newline at end of file + return filename From 250cdd32cec889336565a831d247471894c9a941 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 14:13:36 +0200 Subject: [PATCH 1731/2550] import lib functions from 'openpype.lib' --- .../plugins/publish/extract_thumbnail.py | 4 ++-- openpype/plugins/publish/collect_scene_version.py | 5 +++-- openpype/plugins/publish/extract_otio_audio_tracks.py | 11 ++++------- openpype/plugins/publish/extract_review.py | 7 +++---- openpype/plugins/publish/extract_scanline_exr.py | 8 ++++---- .../tools/settings/local_settings/mongo_widget.py | 2 +- 6 files changed, 17 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py index 3ee2f70809..1e894f9dbb 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py @@ -1,11 +1,11 @@ import os import tempfile import pyblish.api -import openpype.api from openpype.lib import ( get_ffmpeg_tool_path, get_ffprobe_streams, path_to_subprocess_arg, + run_subprocess, ) @@ -96,7 +96,7 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin): # run subprocess self.log.debug("Executing: {}".format(subprocess_jpeg)) - openpype.api.run_subprocess( + run_subprocess( subprocess_jpeg, shell=True, logger=self.log ) diff --git a/openpype/plugins/publish/collect_scene_version.py b/openpype/plugins/publish/collect_scene_version.py index 917647c61a..a7cea6093a 100644 --- a/openpype/plugins/publish/collect_scene_version.py +++ b/openpype/plugins/publish/collect_scene_version.py @@ -1,6 +1,7 @@ import os import pyblish.api -import openpype.api as pype + +from openpype.lib import get_version_from_path class CollectSceneVersion(pyblish.api.ContextPlugin): @@ -46,7 +47,7 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): if '' in filename: return - version = pype.get_version_from_path(filename) + version = get_version_from_path(filename) assert version, "Cannot determine version" rootVersion = int(version) diff --git a/openpype/plugins/publish/extract_otio_audio_tracks.py b/openpype/plugins/publish/extract_otio_audio_tracks.py index ed30a2f0f5..e19b7eeb13 100644 --- a/openpype/plugins/publish/extract_otio_audio_tracks.py +++ b/openpype/plugins/publish/extract_otio_audio_tracks.py @@ -1,9 +1,8 @@ import os import pyblish -import openpype.api from openpype.lib import ( get_ffmpeg_tool_path, - path_to_subprocess_arg + run_subprocess ) import tempfile import opentimelineio as otio @@ -102,9 +101,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): # run subprocess self.log.debug("Executing: {}".format(" ".join(cmd))) - openpype.api.run_subprocess( - cmd, logger=self.log - ) + run_subprocess(cmd, logger=self.log) else: audio_fpath = recycling_file.pop() @@ -225,7 +222,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): # run subprocess self.log.debug("Executing: {}".format(" ".join(cmd))) - openpype.api.run_subprocess( + run_subprocess( cmd, logger=self.log ) @@ -308,7 +305,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): # run subprocess self.log.debug("Executing: {}".format(args)) - openpype.api.run_subprocess(args, logger=self.log) + run_subprocess(args, logger=self.log) os.remove(filters_tmp_filepath) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 27117510b2..1e46b47c5f 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -10,12 +10,13 @@ import six import clique import pyblish.api -import openpype.api + from openpype.lib import ( get_ffmpeg_tool_path, get_ffprobe_streams, path_to_subprocess_arg, + run_subprocess, should_convert_for_ffmpeg, convert_input_paths_for_ffmpeg, @@ -350,9 +351,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # run subprocess self.log.debug("Executing: {}".format(subprcs_cmd)) - openpype.api.run_subprocess( - subprcs_cmd, shell=True, logger=self.log - ) + run_subprocess(subprcs_cmd, shell=True, logger=self.log) # delete files added to fill gaps if files_to_clean: diff --git a/openpype/plugins/publish/extract_scanline_exr.py b/openpype/plugins/publish/extract_scanline_exr.py index a7f7de5188..0e4c0ca65f 100644 --- a/openpype/plugins/publish/extract_scanline_exr.py +++ b/openpype/plugins/publish/extract_scanline_exr.py @@ -4,8 +4,8 @@ import os import shutil import pyblish.api -import openpype.api -import openpype.lib + +from openpype.lib import run_subprocess, get_oiio_tools_path class ExtractScanlineExr(pyblish.api.InstancePlugin): @@ -45,7 +45,7 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin): stagingdir = os.path.normpath(repre.get("stagingDir")) - oiio_tool_path = openpype.lib.get_oiio_tools_path() + oiio_tool_path = get_oiio_tools_path() if not os.path.exists(oiio_tool_path): self.log.error( "OIIO tool not found in {}".format(oiio_tool_path)) @@ -65,7 +65,7 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin): subprocess_exr = " ".join(oiio_cmd) self.log.info(f"running: {subprocess_exr}") - openpype.api.run_subprocess(subprocess_exr, logger=self.log) + run_subprocess(subprocess_exr, logger=self.log) # raise error if there is no ouptput if not os.path.exists(os.path.join(stagingdir, original_name)): diff --git a/openpype/tools/settings/local_settings/mongo_widget.py b/openpype/tools/settings/local_settings/mongo_widget.py index 3d3dbd0a5d..600ab79242 100644 --- a/openpype/tools/settings/local_settings/mongo_widget.py +++ b/openpype/tools/settings/local_settings/mongo_widget.py @@ -5,7 +5,7 @@ import traceback from Qt import QtWidgets from pymongo.errors import ServerSelectionTimeoutError -from openpype.api import change_openpype_mongo_url +from openpype.lib import change_openpype_mongo_url from openpype.tools.utils import PlaceholderLineEdit From 694bc49305d015ee0e773895541e3850695dce2f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 14:16:16 +0200 Subject: [PATCH 1732/2550] :bug: fix caching --- openpype/hosts/houdini/api/plugin.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index fa56b2cb8d..679f7b0d0f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -167,11 +167,13 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - instances = [i for i in self.collection_shared_data.get( - "houdini_cached_instances", []) if i.paramEval("creator_identifier") == self.identifier] + cached_instances = self.collection_shared_data.get( + "houdini_cached_instances") + instances = cached_instances.get(self.identifier) if not instances: print("not using cached instances") instances = list_instances(creator_id=self.identifier) + self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self From d29cd8edcdd678370b18ae57815942c82d7e6611 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 21 Oct 2022 14:32:16 +0200 Subject: [PATCH 1733/2550] workflows: adding milestone creator and assigner --- .github/workflows/milestone_assign.yml | 28 ++++++++++++ .github/workflows/milestone_create.yml | 62 ++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) create mode 100644 .github/workflows/milestone_assign.yml create mode 100644 .github/workflows/milestone_create.yml diff --git a/.github/workflows/milestone_assign.yml b/.github/workflows/milestone_assign.yml new file mode 100644 index 0000000000..b41886816b --- /dev/null +++ b/.github/workflows/milestone_assign.yml @@ -0,0 +1,28 @@ +name: Milestone - assign to PRs + +on: + pull_request_target: + types: [opened, reopened, edited] + +jobs: + run_if_release: + if: startsWith(github.base_ref, 'release/') + runs-on: ubuntu-latest + steps: + - name: 'Assign Milestone [next-minor]' + if: github.event.pull_request.milestone == null + uses: zoispag/action-assign-milestone@v1 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + milestone: 'next-minor' + + run_if_develop: + if: ${{ github.base_ref == 'develop' }} + runs-on: ubuntu-latest + steps: + - name: 'Assign Milestone [next-patch]' + if: github.event.pull_request.milestone == null + uses: zoispag/action-assign-milestone@v1 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + milestone: 'next-patch' \ No newline at end of file diff --git a/.github/workflows/milestone_create.yml b/.github/workflows/milestone_create.yml new file mode 100644 index 0000000000..b56ca81dc1 --- /dev/null +++ b/.github/workflows/milestone_create.yml @@ -0,0 +1,62 @@ +name: Milestone - create default + +on: + milestone: + types: [closed, edited] + +jobs: + generate-next-patch: + runs-on: ubuntu-latest + steps: + - name: 'Get Milestones' + uses: "WyriHaximus/github-action-get-milestones@master" + id: milestones + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + + - run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number') + id: querymilestone + env: + MILESTONES: ${{ steps.milestones.outputs.milestones }} + MILESTONE: "next-patch" + + - name: Read output + run: | + echo "${{ steps.querymilestone.outputs.number }}" + + - name: 'Create `next-patch` milestone' + if: steps.querymilestone.outputs.number == '' + id: createmilestone + uses: "WyriHaximus/github-action-create-milestone@v1" + with: + title: 'next-patch' + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + + generate-next-minor: + runs-on: ubuntu-latest + steps: + - name: 'Get Milestones' + uses: "WyriHaximus/github-action-get-milestones@master" + id: milestones + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + + - run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number') + id: querymilestone + env: + MILESTONES: ${{ steps.milestones.outputs.milestones }} + MILESTONE: "next-minor" + + - name: Read output + run: | + echo "${{ steps.querymilestone.outputs.number }}" + + - name: 'Create `next-minor` milestone' + if: steps.querymilestone.outputs.number == '' + id: createmilestone + uses: "WyriHaximus/github-action-create-milestone@v1" + with: + title: 'next-minor' + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file From 6ee68861a8bfa06f346c6f899bc26b5f8d29e670 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 14:40:33 +0200 Subject: [PATCH 1734/2550] :bug: fix missing keys --- openpype/hosts/houdini/api/plugin.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 679f7b0d0f..2a16b08908 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -168,11 +168,14 @@ class HoudiniCreator(NewCreator): def collect_instances(self): cached_instances = self.collection_shared_data.get( - "houdini_cached_instances") + "houdini_cached_instances", {}) instances = cached_instances.get(self.identifier) if not instances: - print("not using cached instances") instances = list_instances(creator_id=self.identifier) + if not self.collection_shared_data.get( + "houdini_cached_instances"): + self.collection_shared_data["houdini_cached_instances"] = {} + self.log.info("Caching instances for {}".format(self.identifier)) self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 for instance in instances: created_instance = CreatedInstance.from_existing( From 260573506b56d83d73ea785b335aa9134d652d96 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 14:56:09 +0200 Subject: [PATCH 1735/2550] Created simple item representing conversion requirement --- openpype/pipeline/create/context.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 3e09ff287d..918bc66cb0 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -852,6 +852,29 @@ class CreatedInstance: self[key] = new_value +class LegacyInstancesItem(object): + """Item representing convertor for legacy instances. + + Args: + identifier (str): Identifier of convertor. + label (str): Label which will be shown in UI. + """ + + def __init__(self, identifier, label): + self.identifier = identifier + self.label = label + + def to_data(self): + return { + "identifier": self.identifier, + "label": self.label + } + + @classmethod + def from_data(cls, data): + return cls(data["identifier"], data["label"]) + + class CreateContext: """Context of instance creation. From 8e99d9128a622956299e9dfdd5e22f22460e63d5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 14:56:56 +0200 Subject: [PATCH 1736/2550] implemented basic of convertor --- openpype/pipeline/create/creator_plugins.py | 90 +++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 97ee94c449..62562e4428 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -33,6 +33,96 @@ class CreatorError(Exception): super(CreatorError, self).__init__(message) +@six.add_metaclass(ABCMeta) +class LegacyInstanceConvertor(object): + """Helper for conversion of instances created using legacy creators. + + Conversion from legacy creators would mean to loose legacy instances, + convert them automatically or write a script which must user run. All of + these solutions are workign but will happen without asking or user must + know about them. This plugin can be used to show legacy instances in + Publisher and give user ability to run conversion script. + + Convertor logic should be very simple. Method 'find_instances' is to + look for legacy instances in scene a possibly call + pre-implemented 'add_legacy_item'. + + User will have ability to trigger conversion which is executed by calling + 'convert' which should call 'remove_legacy_item' when is done. + + It does make sense to add only one or none legacy item to create context + for convertor as it's not possible to choose which instace are converted + and which are not. + + Convertor can use 'collection_shared_data' property like creators. Also + can store any information to it's object for conversion purposes. + + Args: + create_context + """ + + def __init__(self, create_context): + self._create_context = create_context + + @abstractproperty + def identifier(self): + """Converted identifier. + + Returns: + str: Converted identifier unique for all converters in host. + """ + + pass + + @abstractmethod + def find_instances(self): + """Look for legacy instances in the scene. + + Should call 'add_legacy_item' if there is at least one item. + """ + + pass + + @abstractmethod + def convert(self): + """Conversion code.""" + + pass + + @property + def create_context(self): + """Quick access to create context.""" + + return self._create_context + + @property + def collection_shared_data(self): + """Access to shared data that can be used during 'find_instances'. + + Retruns: + Dict[str, Any]: Shared data. + + Raises: + UnavailableSharedData: When called out of collection phase. + """ + + return self._create_context.collection_shared_data + + def add_legacy_item(self, label): + """Add item to CreateContext. + + Args: + label (str): Label of item which will show in UI. + """ + + self._create_context.add_legacy_item(self.identifier, label) + + def remove_legacy_item(self): + """Remove legacy item from create context when conversion finished.""" + + self._create_context.remove_legacy_item(self.identifier) + + @six.add_metaclass(ABCMeta) class BaseCreator: """Plugin that create and modify instance data before publishing process. From 971e4a23bd67fb4ec214bed4b39f32e9f0943715 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 15:19:40 +0200 Subject: [PATCH 1737/2550] split reset of plugins to more methods --- openpype/pipeline/create/context.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 918bc66cb0..565fdbdf89 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1074,6 +1074,11 @@ class CreateContext: Reloads creators from preregistered paths and can load publish plugins if it's enabled on context. """ + + self._reset_publish_plugins(discover_publish_plugins) + self._reset_creator_plugins() + + def _reset_publish_plugins(self, discover_publish_plugins): import pyblish.logic from openpype.pipeline import OpenPypePyblishPluginMixin @@ -1115,6 +1120,7 @@ class CreateContext: self.publish_plugins = plugins_by_targets self.plugins_with_defs = plugins_with_defs + def _reset_creator_plugins(self): # Prepare settings system_settings = get_system_settings() project_settings = get_project_settings(self.project_name) From cff9990c6fc59ee5d142ce14db206951a5620fdf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 15:20:07 +0200 Subject: [PATCH 1738/2550] added logic to discover convertors and find legacy items --- openpype/pipeline/create/context.py | 51 +++++++++++++++++++++ openpype/pipeline/create/creator_plugins.py | 12 +++++ 2 files changed, 63 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 565fdbdf89..783b599aef 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -22,6 +22,7 @@ from .creator_plugins import ( Creator, AutoCreator, discover_creator_plugins, + discover_legacy_convertor_plugins, ) UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) @@ -940,6 +941,9 @@ class CreateContext: # Manual creators self.manual_creators = {} + self.legacy_convertors = {} + self.legacy_items_by_id = {} + self.publish_discover_result = None self.publish_plugins_mismatch_targets = [] self.publish_plugins = [] @@ -1020,6 +1024,7 @@ class CreateContext: with self.bulk_instances_collection(): self.reset_instances() + self.find_legacy_items() self.execute_autocreators() self.reset_finalization() @@ -1077,6 +1082,7 @@ class CreateContext: self._reset_publish_plugins(discover_publish_plugins) self._reset_creator_plugins() + self._reset_legacy_convertor_plugins() def _reset_publish_plugins(self, discover_publish_plugins): import pyblish.logic @@ -1172,6 +1178,29 @@ class CreateContext: self.creators = creators + def _reset_legacy_convertor_plugins(self): + legacy_convertors = {} + for convertor_class in discover_legacy_convertor_plugins(): + if inspect.isabstract(convertor_class): + self.log.info( + "Skipping abstract Creator {}".format(str(convertor_class)) + ) + continue + + convertor_identifier = convertor_class.identifier + if convertor_identifier in legacy_convertors: + self.log.warning(( + "Duplicated Converter identifier. " + "Using first and skipping following" + )) + continue + + legacy_convertors[convertor_identifier] = ( + convertor_identifier(self) + ) + + self.legacy_convertors = legacy_convertors + def reset_context_data(self): """Reload context data using host implementation. @@ -1243,6 +1272,14 @@ class CreateContext: def creator_removed_instance(self, instance): self._instances_by_id.pop(instance.id, None) + def add_legacy_item(self, convertor_identifier, label): + self.legacy_items_by_id[convertor_identifier] = ( + LegacyInstancesItem(convertor_identifier, label) + ) + + def remove_legacy_item(self, convertor_identifier): + self.legacy_items_by_id.pop(convertor_identifier, None) + @contextmanager def bulk_instances_collection(self): """Validate context of instances in bulk. @@ -1278,6 +1315,20 @@ class CreateContext: for creator in self.creators.values(): creator.collect_instances() + def find_legacy_items(self): + self.legacy_items_by_id = {} + + for convertor in self.legacy_convertors.values(): + try: + convertor.find_instances() + except: + self.log.warning( + "Failed to find instances of convertor \"{}\"".format( + convertor.identifier + ), + exc_info=True + ) + def execute_autocreators(self): """Execute discovered AutoCreator plugins. diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 62562e4428..ff9326693e 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -559,6 +559,10 @@ def discover_creator_plugins(): return discover(BaseCreator) +def discover_legacy_convertor_plugins(): + return discover(LegacyInstanceConvertor) + + def discover_legacy_creator_plugins(): from openpype.lib import Logger @@ -616,6 +620,9 @@ def register_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): register_plugin(LegacyCreator, plugin) + elif issubclass(plugin, LegacyInstanceConvertor): + register_plugin(LegacyInstanceConvertor, plugin) + def deregister_creator_plugin(plugin): if issubclass(plugin, BaseCreator): @@ -624,12 +631,17 @@ def deregister_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): deregister_plugin(LegacyCreator, plugin) + elif issubclass(plugin, LegacyInstanceConvertor): + deregister_plugin(LegacyInstanceConvertor, plugin) + def register_creator_plugin_path(path): register_plugin_path(BaseCreator, path) register_plugin_path(LegacyCreator, path) + register_plugin_path(LegacyInstanceConvertor, path) def deregister_creator_plugin_path(path): deregister_plugin_path(BaseCreator, path) deregister_plugin_path(LegacyCreator, path) + deregister_plugin_path(LegacyInstanceConvertor, path) From fe0ab169f7e8c25c6dcaf47323fd76078062170d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 15:34:43 +0200 Subject: [PATCH 1739/2550] import 'ApplicationManager' from lib --- openpype/modules/ftrack/ftrack_module.py | 2 +- openpype/plugins/load/open_djv.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 75ffd7f864..678af0e577 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -195,7 +195,7 @@ class FtrackModule( app_definitions_from_app_manager, tool_definitions_from_app_manager ) - from openpype.api import ApplicationManager + from openpype.lib import ApplicationManager query_keys = [ "id", "key", diff --git a/openpype/plugins/load/open_djv.py b/openpype/plugins/load/open_djv.py index 273c77c93f..bc5fd64b87 100644 --- a/openpype/plugins/load/open_djv.py +++ b/openpype/plugins/load/open_djv.py @@ -1,5 +1,5 @@ import os -from openpype.api import ApplicationManager +from openpype.lib import ApplicationManager from openpype.pipeline import load From b5503372c0d41b6b4d3fb841112307b03d1955a7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 15:34:57 +0200 Subject: [PATCH 1740/2550] fix docstring import --- .../ftrack/event_handlers_server/event_user_assigment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py index 88d252e8cf..c4e48b92f0 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py @@ -132,7 +132,7 @@ class UserAssigmentEvent(BaseEvent): """ Get data to fill template from task - .. seealso:: :mod:`openpype.api.Anatomy` + .. seealso:: :mod:`openpype.pipeline.Anatomy` :param task: Task entity :type task: dict From 3de3d303895cfe4bb92aaf373613d9d54871b432 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 16:47:06 +0200 Subject: [PATCH 1741/2550] pass creator to cache function --- openpype/hosts/traypublisher/api/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 2cb5a8729f..555041d389 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -37,7 +37,7 @@ class HiddenTrayPublishCreator(HiddenCreator): host_name = "traypublisher" def collect_instances(self): - for instance_data in _cache_and_get_instances(): + for instance_data in _cache_and_get_instances(self): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: instance = CreatedInstance.from_existing( @@ -74,7 +74,7 @@ class TrayPublishCreator(Creator): host_name = "traypublisher" def collect_instances(self): - for instance_data in _cache_and_get_instances(): + for instance_data in _cache_and_get_instances(self): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: instance = CreatedInstance.from_existing( From 696dc78be74dc8d48da411335c5e906db4c669ef Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:26:03 +0200 Subject: [PATCH 1742/2550] =?UTF-8?q?=F0=9F=A5=85=20catch=20edge=20case=20?= =?UTF-8?q?data=20flow?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 3426040d65..ceb3b753e0 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -310,6 +310,9 @@ def imprint(node, data, update=False): """ if not data: return + if not node: + self.log.error("Node is not set, calling imprint on invalid data.") + return current_parms = {p.name(): p for p in node.spareParms()} update_parms = [] From 4fe053b109d892a5b5f3770be693ae72d1c19967 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:32:27 +0200 Subject: [PATCH 1743/2550] :recycle: refactor the use of `members` --- .../plugins/publish/collect_active_state.py | 3 ++- .../houdini/plugins/publish/collect_frames.py | 2 +- .../plugins/publish/collect_instances.py | 2 +- .../publish/collect_members_as_nodes.py | 21 ------------------- .../plugins/publish/collect_output_node.py | 2 +- .../plugins/publish/collect_redshift_rop.py | 2 +- .../publish/collect_render_products.py | 2 +- .../plugins/publish/collect_usd_layers.py | 6 ++++-- .../plugins/publish/extract_alembic.py | 4 +++- .../houdini/plugins/publish/extract_ass.py | 4 +++- .../plugins/publish/extract_composite.py | 4 +++- .../plugins/publish/extract_redshift_proxy.py | 4 +++- .../houdini/plugins/publish/extract_usd.py | 3 ++- .../plugins/publish/extract_usd_layered.py | 2 +- .../plugins/publish/extract_vdb_cache.py | 4 +++- .../validate_abc_primitive_to_detail.py | 17 +++++++-------- .../publish/validate_alembic_face_sets.py | 4 ++-- .../publish/validate_alembic_input_node.py | 3 ++- .../publish/validate_animation_settings.py | 3 ++- .../plugins/publish/validate_bypass.py | 3 ++- .../publish/validate_cop_output_node.py | 15 +++++++++++-- .../publish/validate_file_extension.py | 4 +++- .../plugins/publish/validate_frame_token.py | 3 ++- .../plugins/publish/validate_no_errors.py | 2 +- .../validate_primitive_hierarchy_paths.py | 14 ++++++------- .../publish/validate_sop_output_node.py | 2 +- .../validate_usd_layer_path_backslashes.py | 2 +- .../publish/validate_usd_model_and_shade.py | 4 +++- .../publish/validate_usd_output_node.py | 2 +- .../plugins/publish/validate_usd_setdress.py | 3 ++- .../publish/validate_usd_shade_workspace.py | 2 +- .../publish/validate_vdb_output_node.py | 2 +- 32 files changed, 81 insertions(+), 69 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index dd83721358..cc3f2e7fae 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -1,4 +1,5 @@ import pyblish.api +import hou class CollectInstanceActiveState(pyblish.api.InstancePlugin): @@ -24,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9108432384..531cdf1249 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -18,7 +18,7 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) frame_data = lib.get_frame_data(ropnode) instance.data.update(frame_data) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 0582ee154c..bb85630552 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -84,7 +84,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["families"] = [instance.data["family"]] instance[:] = [node] - instance.data["members"] = [node] + instance.data["instance_node"] = node.path() instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py deleted file mode 100644 index 07d71c6605..0000000000 --- a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -import pyblish.api -import hou - - -class CollectMembersAsNodes(pyblish.api.InstancePlugin): - """Collects instance members as Houdini nodes.""" - - order = pyblish.api.CollectorOrder - 0.01 - hosts = ["houdini"] - label = "Collect Members as Nodes" - - def process(self, instance): - if not instance.data.get("creator_identifier"): - return - - nodes = [ - hou.node(member) for member in instance.data.get("members", []) - ] - - instance.data["members"] = nodes diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index a3989dc776..601ed17b39 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): import hou - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 33bf74610a..346bdf3421 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index e88c5ea0e6..fcd80e0082 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: - rop_path = instance.data["members"][0].path() + rop_path = instance.data["instance_node"].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index c21b336403..833add854b 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -3,6 +3,8 @@ import os import pyblish.api import openpype.hosts.houdini.api.usd as usdlib +import hou + class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" @@ -19,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = instance.data["members"][0] + rop_node = hou.node(instance.get("instance_node")) save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -55,7 +57,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] # include same USD ROP - layer_inst.append(instance.data["members"][0]) + layer_inst.append(rop_node) # include layer data layer_inst.append((layer, save_path)) diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 0ad7a5069f..cb2d4ef424 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAlembic(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAlembic(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index 864b8d5252..c6417ce18a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAss(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAss(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 1042dda8f0..7a1ab36b93 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -4,6 +4,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop, splitext +import hou + class ExtractComposite(publish.Extractor): @@ -14,7 +16,7 @@ class ExtractComposite(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index 4d32b6f97e..29ede98a52 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractRedshiftProxy(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 4f471af597..cbeb5add71 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -5,6 +5,7 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou class ExtractUSD(publish.Extractor): @@ -17,7 +18,7 @@ class ExtractUSD(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 7ce51c441b..0288b7363a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 8a6d3b578a..434d6a2160 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractVDBCache(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractVDBCache(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 55c705c65b..86e92a052f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -32,19 +32,18 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - + import hou # noqa output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) if output_node is None: - node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." % node.path() + "Ensure a valid SOP output path is set." % rop_node.path() ) - return [node.path()] + return [rop_node.path()] - rop = instance.data["members"][0] - pattern = rop.parm("prim_to_detail_pattern").eval().strip() + pattern = rop_node.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( "Alembic ROP has no 'Primitive to Detail' pattern. " @@ -52,7 +51,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -60,14 +59,14 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] # Let's assume each attribute is explicitly named for now and has no # wildcards for Primitive to Detail. This simplifies the check. diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index 10681e4b72..44d58cfa36 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api - +import hou class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -24,7 +24,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.data["instance_node"]) facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 4355bc7921..bafb206bd3 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline import PublishValidationError +import hou class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -33,7 +34,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): output_node = instance.data.get("output_node") if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 32c5078b9f..f11f9c0c62 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateAnimationSettings(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 59ab2d2b1b..1bf51a986c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -2,6 +2,7 @@ import pyblish.api from openpype.pipeline import PublishValidationError +import hou class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. @@ -36,6 +37,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 2e99e5fb41..600dad8161 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +import sys import pyblish.api +import six from openpype.pipeline import PublishValidationError @@ -34,10 +36,19 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + try: + output_node = instance.data["output_node"] + except KeyError as e: + six.reraise( + PublishValidationError, + PublishValidationError( + "Can't determine COP output node.", + title=cls.__name__), + sys.exc_info()[2] + ) if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index 5211cdb919..4584e78f4f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline import PublishValidationError +import hou + class ValidateFileExtension(pyblish.api.InstancePlugin): """Validate the output file extension fits the output family. @@ -40,7 +42,7 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): def get_invalid(cls, instance): # Get ROP node from instance - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) # Create lookup for current family in instance families = [] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index b65e9ef62e..b5f6ba71e1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateFrameToken(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index fd396ad8c9..f7c95aaf4e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -38,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance.data["members"][0]) + validate_nodes.append(hou.node(instance.get("instance_node"))) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index e1f1dc116e..d3a4c0cfbf 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -2,6 +2,7 @@ import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder from openpype.pipeline import PublishValidationError +import hou class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -30,18 +31,17 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def get_invalid(cls, instance): output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) if output_node is None: - node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." % node.path() + "Ensure a valid SOP output path is set." % rop_node.path() ) - return [node.path()] + return [rop_node.path()] - rop = instance.data["members"][0] - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -49,14 +49,14 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] cls.log.debug("Checking for attribute: %s" % path_attr) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a1a96120e2..ed7f438729 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -37,7 +37,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data.get("output_node") if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 3e593a9508..972ac59f49 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -26,7 +26,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 3ca0fd0298..a55eb70cb2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -6,6 +6,8 @@ from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux +import hou + def fullname(o): """Get fully qualified class name""" @@ -38,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 9a4d292778..af21efcafc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -36,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 89ae8b8ad9..01ebc0e828 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -22,8 +22,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): def process(self, instance): from pxr import UsdGeom + import hou - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 2ff2702061..bd3366a424 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -20,7 +20,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) workspace = rop.parent() definition = workspace.type().definition() diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index a9f8b38e7e..61c1209fc9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -38,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance.data["members"][0].path() + "ROP node '%s'." % instance.get("instance_node") ) return [instance] From d6826524949c471472d0b655931b78f44bdb55e2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:33:16 +0200 Subject: [PATCH 1744/2550] :recycle: absolute paths by default --- .../houdini/plugins/create/create_alembic_camera.py | 3 ++- .../hosts/houdini/plugins/create/create_arnold_ass.py | 11 +++++------ .../hosts/houdini/plugins/create/create_composite.py | 8 +++++++- .../hosts/houdini/plugins/create/create_pointcache.py | 9 ++++++++- 4 files changed, 22 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 481c6bea77..fec64eb4a1 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -25,7 +25,8 @@ class CreateAlembicCamera(plugin.HoudiniCreator): instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/{}.abc".format(subset_name), + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)), "use_sop_path": False, } diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 40b253d1aa..8b310753d0 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -28,23 +28,22 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance_node = hou.node(instance.get("instance_node")) - basename = instance_node.name() - instance_node.setName(basename + "_ASS", unique_name=True) - # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) instance_node.setParmTemplateGroup(parm_template_group) - filepath = "$HIP/pyblish/{}.$F4{}".format(subset_name, self.ext) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { # Render frame range "trange": 1, # Arnold ROP settings "ar_ass_file": filepath, - "ar_ass_export_enable": 1, - "filename": filepath + "ar_ass_export_enable": 1 } instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 1a9c56571a..45af2b0630 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -12,6 +12,8 @@ class CreateCompositeSequence(plugin.HoudiniCreator): family = "imagesequence" icon = "gears" + ext = ".exr" + def create(self, subset_name, instance_data, pre_create_data): import hou # noqa @@ -24,8 +26,12 @@ class CreateCompositeSequence(plugin.HoudiniCreator): pre_create_data) # type: CreatedInstance instance_node = hou.node(instance.get("instance_node")) - filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { + "trange": 1, "copoutput": filepath } diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 124936d285..6b6b277422 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -30,12 +30,19 @@ class CreatePointCache(plugin.HoudiniCreator): "prim_to_detail_pattern": "cbId", "format": 2, "facesets": 0, - "filename": "$HIP/pyblish/{}.abc".format(subset_name) + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)) } if self.selected_nodes: parms["sop_path"] = self.selected_nodes[0].path() + # try to find output node + for child in self.selected_nodes[0].children(): + if child.type().name() == "output": + parms["sop_path"] = child.path() + break + instance_node.setParms(parms) instance_node.parm("trange").set(1) From 822f8f4bbc60c419e5f46fc7b4e7f205291951d9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:33:42 +0200 Subject: [PATCH 1745/2550] :art: check for missing files --- openpype/hosts/houdini/plugins/publish/extract_ass.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index c6417ce18a..0d246625ba 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -35,8 +35,12 @@ class ExtractAss(publish.Extractor): # error and thus still continues to the integrator. To capture that # we make sure all files exist files = instance.data["frames"] - missing = [fname for fname in files - if not os.path.exists(os.path.join(staging_dir, fname))] + missing = [] + for file_name in files: + full_path = os.path.normpath(os.path.join(staging_dir, file_name)) + if not os.path.exists(full_path): + missing.append(full_path) + if missing: raise RuntimeError("Failed to complete Arnold ass extraction. " "Missing output files: {}".format(missing)) From 0e0920336b9d821857d0128101df82759f3f7ae3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:34:06 +0200 Subject: [PATCH 1746/2550] =?UTF-8?q?=F0=9F=A9=B9=20parameter=20access?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/pipeline.py | 2 +- openpype/hosts/houdini/api/plugin.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f15cd6f2d5..689d4d711c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -440,7 +440,7 @@ def list_instances(creator_id=None): return [ i for i in lib.lsattrs(instance_signature) - if i.paramEval("creator_identifier") == creator_id + if i.parm("creator_identifier").eval() == creator_id ] diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2a16b08908..560aeec6ea 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -131,11 +131,7 @@ class HoudiniCreator(NewCreator): instance_node = self._create_instance_node( subset_name, "/out", node_type) - # wondering if we'll ever need more than one member here - # in Houdini - # instance_data["members"] = [instance_node.path()] instance_data["instance_node"] = instance_node.path() - instance = CreatedInstance( self.family, subset_name, From f4b92f4d1daa67243369440aa6a4339c6c646f1b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:51:30 +0200 Subject: [PATCH 1747/2550] :art: improve imprinting --- openpype/hosts/houdini/api/lib.py | 10 ++++++---- openpype/hosts/houdini/api/plugin.py | 9 +++++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index ceb3b753e0..2452ceef62 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -324,14 +324,16 @@ def imprint(node, data, update=False): parm = get_template_from_value(key, value) - if key in current_parms.keys(): + if key in current_parms: + if node.evalParm(key) == data[key]: + continue if not update: - log.debug("{} already exists on {}".format(key, node)) + log.debug(f"{key} already exists on {node}") else: - log.debug("replacing {}".format(key)) + log.debug(f"replacing {key}") update_parms.append(parm) continue - # parm.hide(True) + templates.append(parm) parm_group = node.parmTemplateGroup() diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 560aeec6ea..51476fef52 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -184,12 +184,13 @@ class HoudiniCreator(NewCreator): instance_node = hou.node(created_inst.get("instance_node")) current_data = read(instance_node) + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } imprint( instance_node, - { - key: value[1] for key, value in _changes.items() - if current_data.get(key) != value[1] - }, + new_values, update=True ) From 021800d1dd72fe65039c2bf427e67b76fdc239f6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:52:00 +0200 Subject: [PATCH 1748/2550] :coffin: remove unused code --- .../hosts/houdini/hooks/set_operators_path.py | 25 ------------------ openpype/hosts/houdini/otls/OpenPype.hda | Bin 8238 -> 0 bytes 2 files changed, 25 deletions(-) delete mode 100644 openpype/hosts/houdini/hooks/set_operators_path.py delete mode 100644 openpype/hosts/houdini/otls/OpenPype.hda diff --git a/openpype/hosts/houdini/hooks/set_operators_path.py b/openpype/hosts/houdini/hooks/set_operators_path.py deleted file mode 100644 index 6f26baaa78..0000000000 --- a/openpype/hosts/houdini/hooks/set_operators_path.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -from openpype.lib import PreLaunchHook -import os - - -class SetOperatorsPath(PreLaunchHook): - """Set path to OpenPype assets folder.""" - - app_groups = ["houdini"] - - def execute(self): - hou_path = self.launch_context.env.get("HOUDINIPATH") - - openpype_assets = os.path.join( - os.getenv("OPENPYPE_REPOS_ROOT"), - "openpype", "hosts", "houdini", "hda" - ) - - if not hou_path: - self.launch_context.env["HOUDINIPATH"] = openpype_assets - return - - self.launch_context.env["HOUDINIPATH"] = "{}{}{}".format( - hou_path, os.pathsep, openpype_assets - ) diff --git a/openpype/hosts/houdini/otls/OpenPype.hda b/openpype/hosts/houdini/otls/OpenPype.hda deleted file mode 100644 index b34418d422b69282353dc134b1c4855e377c1039..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8238 zcmcgx?`{)E5O)fq!ceJHszg;pmjj7bs$&9tK*0%eY=@*xYzsR92_frzx3(9bcTc-} zi38#R`WF4rZ@d7{)fZ@Ib}x?4PMkQ{wiLyloj<>s{W~+;<>H&v$>$1u{cgKlEWK&e zN`?A%r5ulaX;wS`!uKCKBJvq$%N^ehSW~+42&i9>E9SUeX}+hP&U%u%nl?hgxb|GH zLoMIk|6;x+__-ghnQ!maM0DCufw`+w) zc%(am!_VW-H7j!bbM(Ijy#%2d4%fH9cC*ObK(uR~WTCcVw~Mil>8deP5Tct(-7ey2 zJn~~5oU2L^QmGkLl~6Omm1SC5j+w4*(I8Bvev(6iH|jzJYFTw?(6U2Ut^xaJV7a*& zaS!#B-5x~yP9JEuVpZRl`dYf1ET98Zcm7JHmj1@^`^5S{lyQQzgd}6LLflA;o~xPX z2Eh?&Q%)sJu%AwUOcVHUFnWDV$_!bxXAA~zlLptF2@~$5jTZ1YBp=h)9mo9qWT|Z_ zA|xXO{2&bc?)~GATMRpOAeI3!l@zP7rB76jB2a!RcV&)8N}A$Z|@^ zuY`tKJ`FDy#;<`@^z?Kez5=dJ#^?g!4FGOZXy%|sCT=n)8^AduQc3-j5!GM^&pSln zG=Qq?Kxkri$UV;R1S1QXP)Qs8IY;ZG2O2ZmyJcaql~%1PCglxxP@$z?qAlf>(=!1qLNs_jxhAyNP- z$`xG5g3lWzJWb&B0534r7&SI|0hG84_bFf&(sE~To=lU^Hdao64wB1S)Z}z% z`UkUj;dIuh8d81!18f=?C+@aZKY_;f)4wa-)-xJT1KES@GZSpI`GhLbVta>{(sensI#L>jgxJV2%i zWW@;COnf}oJ3XRbfiW((0Z4d|O_j3k+d>^NsSu=UNhfCxG-O_PEE$}9@a!{sXo_?- z8i02oO>8nWQZSqgR$FjQ24yl_ixMgcSjA2X&Kx0j1E!3oDg4LJ;)N~GNYMr)=fP;I za9$)edCeqk;rkCV-!bu-$8&m&v&9E5mrvUU!7F>vn08w%jPtF_Y1;DMfF; zWe_}io`%X(i}j1pX9=l~+NVdAz2H6rUC^3+186b-;s=PkBIJ2;)c+9^Grqq zddfmmu+ecf(T8Fb1oA5kDV%_apFpULL1-X}L(s{1nz%%P4R8hhStgmxI<{VNh}-m8 z)|>}h#eAb!+RX3m)Eo6mWyi4%m3U+)zfl4bgHXhj?LwvOV6b96yOc*}@$_}9@&FEJ zXunt<;KDFMkEKjCuFv(##vi%t2+gX?BCa8Q6Rp5&{7}g5n3+mwtQf!Q`Hh`YBVR5y z%K6>Wz-r8Lu2FdNLu8}%B5N}Z`!25()hcIT9*GTL;+4TOHR|k$?yN9l9!}5A)+(9QE{`T(OdM;}%nKf(Rz-rEp zEa$OqAv7$%N&SLXCzV^UBm)W+NxWCn~DxWr^b_1jAtUekt0V_L%HcEnK0m%mAKJ%z@ yR^PsZqcL^YdQ`(s+F1_$gAOU=NcdwZ33n_h;f*Cta^_fQ#1~6WxME4Cd-6X`t From 24ebd76bd90ef5705434b6ff26c34f294ce96dc5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 17:55:56 +0200 Subject: [PATCH 1749/2550] fix convertor creation --- openpype/pipeline/create/context.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 783b599aef..5f39d7a0d0 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1195,9 +1195,7 @@ class CreateContext: )) continue - legacy_convertors[convertor_identifier] = ( - convertor_identifier(self) - ) + legacy_convertors[convertor_identifier] = convertor_class(self) self.legacy_convertors = legacy_convertors From 3bdaf89a791a88e0a8fed5f3938aad697b7d08d2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 17:56:03 +0200 Subject: [PATCH 1750/2550] added id to legacy item --- openpype/pipeline/create/context.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 5f39d7a0d0..e0c5e49e40 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -862,18 +862,26 @@ class LegacyInstancesItem(object): """ def __init__(self, identifier, label): + self._id = str(uuid4()) self.identifier = identifier self.label = label + @property + def id(self): + return self._id + def to_data(self): return { + "id": self.id, "identifier": self.identifier, "label": self.label } @classmethod def from_data(cls, data): - return cls(data["identifier"], data["label"]) + obj = cls(data["identifier"], data["label"]) + obj._id = data["id"] + return obj class CreateContext: From 4ec0035ed593dd626d350f1c0fec768b176abf5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:56:18 +0200 Subject: [PATCH 1751/2550] =?UTF-8?q?=F0=9F=A6=AE=20hound=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/plugin.py | 3 +-- .../hosts/houdini/plugins/publish/validate_cop_output_node.py | 2 +- .../plugins/publish/validate_usd_layer_path_backslashes.py | 2 ++ 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 51476fef52..95e7add54f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -172,7 +172,7 @@ class HoudiniCreator(NewCreator): "houdini_cached_instances"): self.collection_shared_data["houdini_cached_instances"] = {} self.log.info("Caching instances for {}".format(self.identifier)) - self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 + self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E501 for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self @@ -182,7 +182,6 @@ class HoudiniCreator(NewCreator): def update_instances(self, update_list): for created_inst, _changes in update_list: instance_node = hou.node(created_inst.get("instance_node")) - current_data = read(instance_node) new_values = { key: new_value diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 600dad8161..1d0377c818 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -38,7 +38,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): try: output_node = instance.data["output_node"] - except KeyError as e: + except KeyError: six.reraise( PublishValidationError, PublishValidationError( diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 972ac59f49..a0e2302495 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -4,6 +4,8 @@ import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.pipeline import PublishValidationError +import hou + class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): """Validate USD loaded paths have no backslashes. From e57b932cf835887726e4711003b7459a0319540a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 18:09:55 +0200 Subject: [PATCH 1752/2550] :recycle: move methods around --- openpype/hosts/houdini/api/pipeline.py | 28 -------------------------- openpype/hosts/houdini/api/plugin.py | 24 ++++++++++++++++++---- 2 files changed, 20 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 689d4d711c..c1a5936415 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -430,32 +430,4 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): log.warning("%s - %s", instance_node.path(), exc) -def list_instances(creator_id=None): - """List all publish instances in the scene. - """ - instance_signature = { - "id": "pyblish.avalon.instance" - } - - return [ - i for i in lib.lsattrs(instance_signature) - if i.parm("creator_identifier").eval() == creator_id - ] - - -def remove_instance(instance): - """Remove specified instance from the scene. - - This is only removing `id` parameter so instance is no longer instance, - because it might contain valuable data for artist. - - """ - # Assume instance node is first node - instance_node = hou.node(instance.data.get("instance_node")) - to_delete = None - for parameter in instance_node.spareParms(): - if parameter.name() == "id" and \ - parameter.eval() == "pyblish.avalon.instance": - to_delete = parameter - instance_node.removeSpareParmTuple(to_delete) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 95e7add54f..ee508f0df4 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -13,8 +13,7 @@ from openpype.pipeline import ( CreatedInstance ) from openpype.lib import BoolDef -from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read +from .lib import imprint, read, lsattr class OpenPypeCreatorError(CreatorError): @@ -167,7 +166,11 @@ class HoudiniCreator(NewCreator): "houdini_cached_instances", {}) instances = cached_instances.get(self.identifier) if not instances: - instances = list_instances(creator_id=self.identifier) + instances = [ + i for i in lsattr("id", "pyblish.avalon.instance") + if i.parm("creator_identifier").eval() == self.identifier + ] + if not self.collection_shared_data.get( "houdini_cached_instances"): self.collection_shared_data["houdini_cached_instances"] = {} @@ -194,8 +197,21 @@ class HoudiniCreator(NewCreator): ) def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, + because it might contain valuable data for artist. + + """ for instance in instances: - remove_instance(instance) + instance_node = hou.node(instance.data.get("instance_node")) + to_delete = None + for parameter in instance_node.spareParms(): + if parameter.name() == "id" and \ + parameter.eval() == "pyblish.avalon.instance": + to_delete = parameter + instance_node.removeSpareParmTuple(to_delete) self._remove_instance_from_context(instance) def get_pre_create_attr_defs(self): From 7b5abe1770bc2736f0b8f09998b8a85889274e5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 18:11:44 +0200 Subject: [PATCH 1753/2550] :rotating_light: remove empty lines --- openpype/hosts/houdini/api/pipeline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index c1a5936415..88c9029141 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -428,6 +428,3 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): instance_node.bypass(not new_value) except hou.PermissionError as exc: log.warning("%s - %s", instance_node.path(), exc) - - - From e484df219d6e9cf8031a6f1268575cc2060b75d1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 19:28:12 +0200 Subject: [PATCH 1754/2550] Define constant for context group --- openpype/tools/publisher/constants.py | 3 +++ .../tools/publisher/widgets/card_view_widgets.py | 15 ++++++++------- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index dc44aade45..866792aa32 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -3,6 +3,9 @@ from Qt import QtCore # ID of context item in instance view CONTEXT_ID = "context" CONTEXT_LABEL = "Options" +# Not showed anywhere - used as identifier +CONTEXT_GROUP = "__ContextGroup__" + # Allowed symbols for subset name (and variant) # - characters, numbers, unsercore and dash diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 5daf8059b0..55e2249496 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -37,7 +37,8 @@ from .widgets import ( ) from ..constants import ( CONTEXT_ID, - CONTEXT_LABEL + CONTEXT_LABEL, + CONTEXT_GROUP, ) @@ -284,7 +285,7 @@ class ContextCardWidget(CardWidget): super(ContextCardWidget, self).__init__(parent) self._id = CONTEXT_ID - self._group_identifier = "" + self._group_identifier = CONTEXT_GROUP icon_widget = PublishPixmapLabel(None, self) icon_widget.setObjectName("FamilyIconLabel") @@ -595,7 +596,7 @@ class InstanceCardView(AbstractInstanceView): instances_by_group[group_name] ) - ordered_group_names = [""] + ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): if idx > 0: item = self._content_layout.itemAt(idx) @@ -749,7 +750,7 @@ class InstanceCardView(AbstractInstanceView): # If start group is not set then use context item group name if start_group is None: - start_group = "" + start_group = CONTEXT_GROUP # If start instance id is not filled then use context id (similar to # group) @@ -777,7 +778,7 @@ class InstanceCardView(AbstractInstanceView): # Go through ordered groups (from top to bottom) and change selection for name in self._ordered_groups: # Prepare sorted instance widgets - if name == "": + if name == CONTEXT_GROUP: sorted_widgets = [self._context_widget] else: group_widget = self._widgets_by_group[name] @@ -916,13 +917,13 @@ class InstanceCardView(AbstractInstanceView): selected_groups = [] selected_instances = [] if context_selected: - selected_groups.append("") + selected_groups.append(CONTEXT_GROUP) selected_instances.append(CONTEXT_ID) self._context_widget.set_selected(context_selected) for group_name in self._ordered_groups: - if group_name == "": + if group_name == CONTEXT_GROUP: continue group_widget = self._widgets_by_group[group_name] From 3a6bc00a5344c1e0a2124e5a62bda8bfa4d96a2d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 19:30:42 +0200 Subject: [PATCH 1755/2550] controller has access to convertor items --- openpype/tools/publisher/control.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index d2d01e7921..9abc53675d 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1234,6 +1234,14 @@ class AbstractPublisherController(object): pass + @abstractproperty + def legacy_items(self): + pass + + @abstractmethod + def convert_legacy_items(self, convertor_identifiers): + pass + @abstractmethod def set_comment(self, comment): """Set comment on pyblish context. @@ -1598,6 +1606,10 @@ class PublisherController(BasePublisherController): """Current instances in create context.""" return self._create_context.instances_by_id + @property + def legacy_items(self): + return self._create_context.legacy_items_by_id + @property def _creators(self): """All creators loaded in create context.""" @@ -1716,6 +1728,7 @@ class PublisherController(BasePublisherController): self._create_context.reset_context_data() with self._create_context.bulk_instances_collection(): self._create_context.reset_instances() + self._create_context.find_legacy_items() self._create_context.execute_autocreators() self._resetting_instances = False @@ -1841,6 +1854,12 @@ class PublisherController(BasePublisherController): variant, task_name, asset_doc, project_name, instance=instance ) + def convert_legacy_items(self, convertor_identifiers): + for convertor_identifier in convertor_identifiers: + self._create_context.run_convertor(convertor_identifier) + self._on_create_instance_change() + self.emit_card_message("Conversion finished") + def create( self, creator_identifier, subset_name, instance_data, options ): From b8e5e5e75f7ce5c85c702c757a65b2f6d9ed5e56 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 19:31:31 +0200 Subject: [PATCH 1756/2550] create context has function to run convertor --- openpype/pipeline/create/context.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index e0c5e49e40..250193f511 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1500,3 +1500,8 @@ class CreateContext: "Accessed Collection shared data out of collection phase" ) return self._collection_shared_data + + def run_convertor(self, convertor_identifier): + convertor = self.legacy_convertors.get(convertor_identifier) + if convertor is not None: + convertor.convert() From e19268c4a1606cff38ab018556bc63a261624578 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 19:31:57 +0200 Subject: [PATCH 1757/2550] implemented basic implementation of converter --- openpype/style/data.json | 5 +- openpype/style/style.css | 12 + openpype/tools/publisher/constants.py | 3 + .../publisher/widgets/card_view_widgets.py | 292 ++++++++++++---- .../publisher/widgets/list_view_widgets.py | 312 +++++++++++++----- .../publisher/widgets/overview_widget.py | 23 +- openpype/tools/publisher/widgets/widgets.py | 60 +++- 7 files changed, 538 insertions(+), 169 deletions(-) diff --git a/openpype/style/data.json b/openpype/style/data.json index fef69071ed..44c0d51999 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -100,7 +100,10 @@ "bg-expander": "#2C313A", "bg-expander-hover": "#2d6c9f", "bg-expander-selected-hover": "#3784c5" - } + }, + "bg-legacy": "rgb(17, 17, 17)", + "bg-legacy-hover": "rgb(41, 41, 41)", + "bg-legacy-selected": "rgba(42, 123, 174, .4)" }, "settings": { "invalid-light": "#C93636", diff --git a/openpype/style/style.css b/openpype/style/style.css index a6818a5792..983f2c886f 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -965,6 +965,18 @@ VariantInputsWidget QToolButton { background: {color:bg-view-selection}; } +#CardViewLegacyItemWidget { + background: {color:publisher:bg-legacy}; + border-radius: 0.2em; + +} +#CardViewLegacyItemWidget:hover { + background: {color:publisher:bg-legacy-hover}; +} +#CardViewLegacyItemWidget[state="selected"] { + background: {color:publisher:bg-legacy-selected}; +} + #ListViewSubsetName[state="invalid"] { color: {color:publisher:error}; } diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index 866792aa32..3c192bf8a3 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -6,6 +6,7 @@ CONTEXT_LABEL = "Options" # Not showed anywhere - used as identifier CONTEXT_GROUP = "__ContextGroup__" +LEGACY_ITEM_GROUP = "Legacy instances" # Allowed symbols for subset name (and variant) # - characters, numbers, unsercore and dash @@ -20,6 +21,8 @@ SORT_VALUE_ROLE = QtCore.Qt.UserRole + 2 IS_GROUP_ROLE = QtCore.Qt.UserRole + 3 CREATOR_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 4 FAMILY_ROLE = QtCore.Qt.UserRole + 5 +GROUP_ROLE = QtCore.Qt.UserRole + 6 +LEGACY_CONVERTER_IDENTIFIER = QtCore.Qt.UserRole + 7 __all__ = ( diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 55e2249496..58a7bbc509 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -39,6 +39,7 @@ from ..constants import ( CONTEXT_ID, CONTEXT_LABEL, CONTEXT_GROUP, + LEGACY_ITEM_GROUP, ) @@ -58,15 +59,12 @@ class SelectionTypes: extend_to = SelectionType("extend_to") -class GroupWidget(QtWidgets.QWidget): - """Widget wrapping instances under group.""" - +class BaseGroupWidget(QtWidgets.QWidget): selected = QtCore.Signal(str, str, SelectionType) - active_changed = QtCore.Signal() removed_selected = QtCore.Signal() - def __init__(self, group_name, group_icons, parent): - super(GroupWidget, self).__init__(parent) + def __init__(self, group_name, parent): + super(BaseGroupWidget, self).__init__(parent) label_widget = QtWidgets.QLabel(group_name, self) @@ -87,10 +85,9 @@ class GroupWidget(QtWidgets.QWidget): layout.addLayout(label_layout, 0) self._group = group_name - self._group_icons = group_icons self._widgets_by_id = {} - self._ordered_instance_ids = [] + self._ordered_item_ids = [] self._label_widget = label_widget self._content_layout = layout @@ -105,7 +102,12 @@ class GroupWidget(QtWidgets.QWidget): return self._group - def get_selected_instance_ids(self): + def get_widget_by_item_id(self, item_id): + """Get instance widget by it's id.""" + + return self._widgets_by_id.get(item_id) + + def get_selected_item_ids(self): """Selected instance ids. Returns: @@ -140,13 +142,80 @@ class GroupWidget(QtWidgets.QWidget): return [ self._widgets_by_id[instance_id] - for instance_id in self._ordered_instance_ids + for instance_id in self._ordered_item_ids ] - def get_widget_by_instance_id(self, instance_id): - """Get instance widget by it's id.""" + def _remove_all_except(self, item_ids): + item_ids = set(item_ids) + # Remove instance widgets that are not in passed instances + for item_id in tuple(self._widgets_by_id.keys()): + if item_id in item_ids: + continue - return self._widgets_by_id.get(instance_id) + widget = self._widgets_by_id.pop(item_id) + if widget.is_selected: + self.removed_selected.emit() + + widget.setVisible(False) + self._content_layout.removeWidget(widget) + widget.deleteLater() + + def _update_ordered_item_ids(self): + ordered_item_ids = [] + for idx in range(self._content_layout.count()): + if idx > 0: + item = self._content_layout.itemAt(idx) + widget = item.widget() + if widget is not None: + ordered_item_ids.append(widget.id) + + self._ordered_item_ids = ordered_item_ids + + def _on_widget_selection(self, instance_id, group_id, selection_type): + self.selected.emit(instance_id, group_id, selection_type) + + +class LegacyItemsGroupWidget(BaseGroupWidget): + def update_items(self, items_by_id): + items_by_label = collections.defaultdict(list) + for item_id, item in items_by_id.items(): + items_by_label[item.label].append(item) + + # Remove instance widgets that are not in passed instances + self._remove_all_except(items_by_id.keys()) + + # Sort instances by subset name + sorted_labels = list(sorted(items_by_label.keys())) + + # Add new instances to widget + widget_idx = 1 + for label in sorted_labels: + for item in items_by_label[label]: + if item.id in self._widgets_by_id: + widget = self._widgets_by_id[item.id] + widget.update_item(item) + else: + widget = LegacyItemCardWidget(item, self) + widget.selected.connect(self._on_widget_selection) + self._widgets_by_id[item.id] = widget + self._content_layout.insertWidget(widget_idx, widget) + widget_idx += 1 + + self._update_ordered_item_ids() + + +class InstanceGroupWidget(BaseGroupWidget): + """Widget wrapping instances under group.""" + + active_changed = QtCore.Signal() + + def __init__(self, group_icons, *args, **kwargs): + super(InstanceGroupWidget, self).__init__(*args, **kwargs) + + self._group_icons = group_icons + + def update_icons(self, group_icons): + self._group_icons = group_icons def update_instance_values(self): """Trigger update on instance widgets.""" @@ -154,14 +223,6 @@ class GroupWidget(QtWidgets.QWidget): for widget in self._widgets_by_id.values(): widget.update_instance_values() - def confirm_remove_instance_id(self, instance_id): - """Delete widget by instance id.""" - - widget = self._widgets_by_id.pop(instance_id) - widget.setVisible(False) - self._content_layout.removeWidget(widget) - widget.deleteLater() - def update_instances(self, instances): """Update instances for the group. @@ -179,17 +240,7 @@ class GroupWidget(QtWidgets.QWidget): instances_by_subset_name[subset_name].append(instance) # Remove instance widgets that are not in passed instances - for instance_id in tuple(self._widgets_by_id.keys()): - if instance_id in instances_by_id: - continue - - widget = self._widgets_by_id.pop(instance_id) - if widget.is_selected: - self.removed_selected.emit() - - widget.setVisible(False) - self._content_layout.removeWidget(widget) - widget.deleteLater() + self._remove_all_except(instances_by_id.keys()) # Sort instances by subset name sorted_subset_names = list(sorted(instances_by_subset_name.keys())) @@ -212,18 +263,7 @@ class GroupWidget(QtWidgets.QWidget): self._content_layout.insertWidget(widget_idx, widget) widget_idx += 1 - ordered_instance_ids = [] - for idx in range(self._content_layout.count()): - if idx > 0: - item = self._content_layout.itemAt(idx) - widget = item.widget() - if widget is not None: - ordered_instance_ids.append(widget.id) - - self._ordered_instance_ids = ordered_instance_ids - - def _on_widget_selection(self, instance_id, group_id, selection_type): - self.selected.emit(instance_id, group_id, selection_type) + self._update_ordered_item_ids() class CardWidget(BaseClickableFrame): @@ -305,6 +345,41 @@ class ContextCardWidget(CardWidget): self._label_widget = label_widget +class LegacyItemCardWidget(CardWidget): + """Card for global context. + + Is not visually under group widget and is always at the top of card view. + """ + + def __init__(self, item, parent): + super(LegacyItemCardWidget, self).__init__(parent) + self.setObjectName("CardViewLegacyItemWidget") + + self._id = item.id + self.identifier = item.identifier + self._group_identifier = LEGACY_ITEM_GROUP + + icon_widget = PublishPixmapLabel(None, self) + icon_widget.setObjectName("FamilyIconLabel") + + label_widget = QtWidgets.QLabel(item.label, self) + + icon_layout = QtWidgets.QHBoxLayout() + icon_layout.setContentsMargins(5, 5, 5, 5) + icon_layout.addWidget(icon_widget) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 5, 10, 5) + layout.addLayout(icon_layout, 0) + layout.addWidget(label_widget, 1) + + self._icon_widget = icon_widget + self._label_widget = label_widget + + def update_instance_values(self): + pass + + class InstanceCardWidget(CardWidget): """Card widget representing instance.""" @@ -482,6 +557,7 @@ class InstanceCardView(AbstractInstanceView): self._content_widget = content_widget self._context_widget = None + self._legacy_items_group = None self._widgets_by_group = {} self._ordered_groups = [] @@ -514,6 +590,9 @@ class InstanceCardView(AbstractInstanceView): ): output.append(self._context_widget) + if self._legacy_items_group is not None: + output.extend(self._legacy_items_group.get_selected_widgets()) + for group_widget in self._widgets_by_group.values(): for widget in group_widget.get_selected_widgets(): output.append(widget) @@ -527,23 +606,19 @@ class InstanceCardView(AbstractInstanceView): ): output.append(CONTEXT_ID) + if self._legacy_items_group is not None: + output.extend(self._legacy_items_group.get_selected_item_ids()) + for group_widget in self._widgets_by_group.values(): - output.extend(group_widget.get_selected_instance_ids()) + output.extend(group_widget.get_selected_item_ids()) return output def refresh(self): """Refresh instances in view based on CreatedContext.""" - # Create context item if is not already existing - # - this must be as first thing to do as context item should be at the - # top - if self._context_widget is None: - widget = ContextCardWidget(self._content_widget) - widget.selected.connect(self._on_widget_selection) - self._context_widget = widget + self._make_sure_context_widget_exists() - self.selection_changed.emit() - self._content_layout.insertWidget(0, widget) + self._update_legacy_items_group() # Prepare instances by group and identifiers by group instances_by_group = collections.defaultdict(list) @@ -574,17 +649,21 @@ class InstanceCardView(AbstractInstanceView): # Keep track of widget indexes # - we start with 1 because Context item as at the top widget_idx = 1 + if self._legacy_items_group is not None: + widget_idx += 1 + for group_name in sorted_group_names: + group_icons = { + idenfier: self._controller.get_creator_icon(idenfier) + for idenfier in identifiers_by_group[group_name] + } if group_name in self._widgets_by_group: group_widget = self._widgets_by_group[group_name] - else: - group_icons = { - idenfier: self._controller.get_creator_icon(idenfier) - for idenfier in identifiers_by_group[group_name] - } + group_widget.update_icons(group_icons) - group_widget = GroupWidget( - group_name, group_icons, self._content_widget + else: + group_widget = InstanceGroupWidget( + group_icons, group_name, self._content_widget ) group_widget.active_changed.connect(self._on_active_changed) group_widget.selected.connect(self._on_widget_selection) @@ -596,6 +675,9 @@ class InstanceCardView(AbstractInstanceView): instances_by_group[group_name] ) + self._update_ordered_group_nameS() + + def _update_ordered_group_nameS(self): ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): if idx > 0: @@ -606,6 +688,43 @@ class InstanceCardView(AbstractInstanceView): self._ordered_groups = ordered_group_names + def _make_sure_context_widget_exists(self): + # Create context item if is not already existing + # - this must be as first thing to do as context item should be at the + # top + if self._context_widget is not None: + return + + widget = ContextCardWidget(self._content_widget) + widget.selected.connect(self._on_widget_selection) + + self._context_widget = widget + + self.selection_changed.emit() + self._content_layout.insertWidget(0, widget) + + def _update_legacy_items_group(self): + legacy_items = self._controller.legacy_items + if not legacy_items and self._legacy_items_group is None: + return + + if not legacy_items: + self._legacy_items_group.setVisible(False) + self._content_layout.removeWidget(self._legacy_items_group) + self._legacy_items_group.deleteLater() + self._legacy_items_group = None + return + + if self._legacy_items_group is None: + group_widget = LegacyItemsGroupWidget( + LEGACY_ITEM_GROUP, self._content_widget + ) + group_widget.selected.connect(self._on_widget_selection) + self._content_layout.insertWidget(1, group_widget) + self._legacy_items_group = group_widget + + self._legacy_items_group.update_items(legacy_items) + def refresh_instance_states(self): """Trigger update of instances on group widgets.""" for widget in self._widgets_by_group.values(): @@ -622,9 +741,13 @@ class InstanceCardView(AbstractInstanceView): """ if instance_id == CONTEXT_ID: new_widget = self._context_widget + else: - group_widget = self._widgets_by_group[group_name] - new_widget = group_widget.get_widget_by_instance_id(instance_id) + if group_name == LEGACY_ITEM_GROUP: + group_widget = self._legacy_items_group + else: + group_widget = self._widgets_by_group[group_name] + new_widget = group_widget.get_widget_by_item_id(instance_id) if selection_type is SelectionTypes.clear: self._select_item_clear(instance_id, group_name, new_widget) @@ -669,7 +792,10 @@ class InstanceCardView(AbstractInstanceView): if instance_id == CONTEXT_ID: remove_group = True else: - group_widget = self._widgets_by_group[group_name] + if group_name == LEGACY_ITEM_GROUP: + group_widget = self._legacy_items_group + else: + group_widget = self._widgets_by_group[group_name] if not group_widget.get_selected_widgets(): remove_group = True @@ -781,7 +907,10 @@ class InstanceCardView(AbstractInstanceView): if name == CONTEXT_GROUP: sorted_widgets = [self._context_widget] else: - group_widget = self._widgets_by_group[name] + if name == LEGACY_ITEM_GROUP: + group_widget = self._legacy_items_group + else: + group_widget = self._widgets_by_group[name] sorted_widgets = group_widget.get_ordered_widgets() # Change selection based on explicit selection if start group @@ -893,6 +1022,8 @@ class InstanceCardView(AbstractInstanceView): def get_selected_items(self): """Get selected instance ids and context.""" + + convertor_identifiers = [] instances = [] selected_widgets = self._get_selected_widgets() @@ -900,17 +1031,27 @@ class InstanceCardView(AbstractInstanceView): for widget in selected_widgets: if widget is self._context_widget: context_selected = True - else: + + elif isinstance(widget, InstanceCardWidget): instances.append(widget.id) - return instances, context_selected + elif isinstance(widget, LegacyItemCardWidget): + convertor_identifiers.append(widget.identifier) - def set_selected_items(self, instance_ids, context_selected): + return instances, context_selected, convertor_identifiers + + def set_selected_items( + self, instance_ids, context_selected, convertor_identifiers + ): s_instance_ids = set(instance_ids) - cur_ids, cur_context = self.get_selected_items() + s_convertor_identifiers = set(convertor_identifiers) + cur_ids, cur_context, cur_convertor_identifiers = ( + self.get_selected_items() + ) if ( set(cur_ids) == s_instance_ids and cur_context == context_selected + and set(cur_convertor_identifiers) == s_convertor_identifiers ): return @@ -926,11 +1067,20 @@ class InstanceCardView(AbstractInstanceView): if group_name == CONTEXT_GROUP: continue - group_widget = self._widgets_by_group[group_name] + legacy_group = group_name == LEGACY_ITEM_GROUP + if legacy_group: + group_widget = self._legacy_items_group + else: + group_widget = self._widgets_by_group[group_name] + group_selected = False for widget in group_widget.get_ordered_widgets(): select = False - if widget.id in s_instance_ids: + if legacy_group: + is_in = widget.identifier in s_convertor_identifiers + else: + is_in = widget.id in s_instance_ids + if is_in: selected_instances.append(widget.id) group_selected = True select = True diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index c329ca0e8c..df07470f1d 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -35,7 +35,10 @@ from ..constants import ( SORT_VALUE_ROLE, IS_GROUP_ROLE, CONTEXT_ID, - CONTEXT_LABEL + CONTEXT_LABEL, + GROUP_ROLE, + LEGACY_CONVERTER_IDENTIFIER, + LEGACY_ITEM_GROUP, ) @@ -330,6 +333,9 @@ class InstanceTreeView(QtWidgets.QTreeView): """Ids of selected instances.""" instance_ids = set() for index in self.selectionModel().selectedIndexes(): + if index.data(LEGACY_CONVERTER_IDENTIFIER) is not None: + continue + instance_id = index.data(INSTANCE_ID_ROLE) if instance_id is not None: instance_ids.add(instance_id) @@ -439,26 +445,36 @@ class InstanceListView(AbstractInstanceView): self._group_items = {} self._group_widgets = {} self._widgets_by_id = {} + # Group by instance id for handling of active state self._group_by_instance_id = {} self._context_item = None self._context_widget = None + self._legacy_group_item = None + self._legacy_group_widget = None + self._legacy_widgets_by_id = {} + self._legacy_items_by_id = {} + self._instance_view = instance_view self._instance_delegate = instance_delegate self._instance_model = instance_model self._proxy_model = proxy_model def _on_expand(self, index): - group_name = index.data(SORT_VALUE_ROLE) - group_widget = self._group_widgets.get(group_name) - if group_widget: - group_widget.set_expanded(True) + self._update_widget_expand_state(index, True) def _on_collapse(self, index): - group_name = index.data(SORT_VALUE_ROLE) - group_widget = self._group_widgets.get(group_name) + self._update_widget_expand_state(index, False) + + def _update_widget_expand_state(self, index, expanded): + group_name = index.data(GROUP_ROLE) + if group_name == LEGACY_ITEM_GROUP: + group_widget = self._legacy_group_widget + else: + group_widget = self._group_widgets.get(group_name) + if group_widget: - group_widget.set_expanded(False) + group_widget.set_expanded(expanded) def _on_toggle_request(self, toggle): selected_instance_ids = self._instance_view.get_selected_instance_ids() @@ -517,6 +533,16 @@ class InstanceListView(AbstractInstanceView): def refresh(self): """Refresh instances in the view.""" + # Sort view at the end of refresh + # - is turned off until any change in view happens + sort_at_the_end = False + # Create or use already existing context item + # - context widget does not change so we don't have to update anything + if self._make_sure_context_item_exists(): + sort_at_the_end = True + + self._update_legacy_items_group() + # Prepare instances by their groups instances_by_group_name = collections.defaultdict(list) group_names = set() @@ -525,75 +551,12 @@ class InstanceListView(AbstractInstanceView): group_names.add(group_label) instances_by_group_name[group_label].append(instance) - # Sort view at the end of refresh - # - is turned off until any change in view happens - sort_at_the_end = False - - # Access to root item of main model - root_item = self._instance_model.invisibleRootItem() - - # Create or use already existing context item - # - context widget does not change so we don't have to update anything - context_item = None - if self._context_item is None: - sort_at_the_end = True - context_item = QtGui.QStandardItem() - context_item.setData(0, SORT_VALUE_ROLE) - context_item.setData(CONTEXT_ID, INSTANCE_ID_ROLE) - - root_item.appendRow(context_item) - - index = self._instance_model.index( - context_item.row(), context_item.column() - ) - proxy_index = self._proxy_model.mapFromSource(index) - widget = ListContextWidget(self._instance_view) - self._instance_view.setIndexWidget(proxy_index, widget) - - self._context_widget = widget - self._context_item = context_item - # Create new groups based on prepared `instances_by_group_name` - new_group_items = [] - for group_name in group_names: - if group_name in self._group_items: - continue - - group_item = QtGui.QStandardItem() - group_item.setData(group_name, SORT_VALUE_ROLE) - group_item.setData(True, IS_GROUP_ROLE) - group_item.setFlags(QtCore.Qt.ItemIsEnabled) - self._group_items[group_name] = group_item - new_group_items.append(group_item) - - # Add new group items to root item if there are any - if new_group_items: - # Trigger sort at the end + if self._make_sure_groups_exists(group_names): sort_at_the_end = True - root_item.appendRows(new_group_items) - - # Create widget for each new group item and store it for future usage - for group_item in new_group_items: - index = self._instance_model.index( - group_item.row(), group_item.column() - ) - proxy_index = self._proxy_model.mapFromSource(index) - group_name = group_item.data(SORT_VALUE_ROLE) - widget = InstanceListGroupWidget(group_name, self._instance_view) - widget.expand_changed.connect(self._on_group_expand_request) - widget.toggle_requested.connect(self._on_group_toggle_request) - self._group_widgets[group_name] = widget - self._instance_view.setIndexWidget(proxy_index, widget) # Remove groups that are not available anymore - for group_name in tuple(self._group_items.keys()): - if group_name in group_names: - continue - - group_item = self._group_items.pop(group_name) - root_item.removeRow(group_item.row()) - widget = self._group_widgets.pop(group_name) - widget.deleteLater() + self._remove_groups_except(group_names) # Store which groups should be expanded at the end expand_groups = set() @@ -652,6 +615,7 @@ class InstanceListView(AbstractInstanceView): # Create new item and store it as new item = QtGui.QStandardItem() item.setData(instance["subset"], SORT_VALUE_ROLE) + item.setData(instance["subset"], GROUP_ROLE) item.setData(instance_id, INSTANCE_ID_ROLE) new_items.append(item) new_items_with_instance.append((item, instance)) @@ -717,13 +681,147 @@ class InstanceListView(AbstractInstanceView): self._instance_view.expand(proxy_index) + def _make_sure_context_item_exists(self): + if self._context_item is not None: + return False + + root_item = self._instance_model.invisibleRootItem() + context_item = QtGui.QStandardItem() + context_item.setData(0, SORT_VALUE_ROLE) + context_item.setData(CONTEXT_ID, INSTANCE_ID_ROLE) + + root_item.appendRow(context_item) + + index = self._instance_model.index( + context_item.row(), context_item.column() + ) + proxy_index = self._proxy_model.mapFromSource(index) + widget = ListContextWidget(self._instance_view) + self._instance_view.setIndexWidget(proxy_index, widget) + + self._context_widget = widget + self._context_item = context_item + return True + + def _update_legacy_items_group(self): + created_new_items = False + legacy_items_by_id = self._controller.legacy_items + group_item = self._legacy_group_item + if not legacy_items_by_id and group_item is None: + return created_new_items + + root_item = self._instance_model.invisibleRootItem() + if not legacy_items_by_id: + root_item.removeRow(group_item.row()) + self._legacy_group_widget.deleteLater() + self._legacy_group_widget = None + return created_new_items + + if group_item is None: + created_new_items = True + group_item = QtGui.QStandardItem() + group_item.setData(LEGACY_ITEM_GROUP, GROUP_ROLE) + group_item.setData(1, SORT_VALUE_ROLE) + group_item.setData(True, IS_GROUP_ROLE) + group_item.setFlags(QtCore.Qt.ItemIsEnabled) + + root_item.appendRow(group_item) + + index = self._instance_model.index( + group_item.row(), group_item.column() + ) + proxy_index = self._proxy_model.mapFromSource(index) + widget = InstanceListGroupWidget( + LEGACY_ITEM_GROUP, self._instance_view + ) + widget.toggle_checkbox.setVisible(False) + widget.expand_changed.connect(self._on_legacy_group_expand_request) + self._instance_view.setIndexWidget(proxy_index, widget) + + self._legacy_group_item = group_item + self._legacy_group_widget = widget + + for row in reversed(range(group_item.rowCount())): + child_item = group_item.child(row) + child_identifier = child_item.data(LEGACY_CONVERTER_IDENTIFIER) + if child_identifier not in legacy_items_by_id: + group_item.removeRows(row, 1) + + new_items = [] + for identifier, convertor_item in legacy_items_by_id.items(): + item = self._legacy_items_by_id.get(identifier) + if item is None: + created_new_items = True + item = QtGui.QStandardItem(convertor_item.label) + new_items.append(item) + item.setData(convertor_item.id, INSTANCE_ID_ROLE) + item.setData(convertor_item.label, SORT_VALUE_ROLE) + item.setData(LEGACY_ITEM_GROUP, GROUP_ROLE) + item.setData( + convertor_item.identifier, LEGACY_CONVERTER_IDENTIFIER + ) + + if new_items: + group_item.appendRows(new_items) + + return created_new_items + + def _make_sure_groups_exists(self, group_names): + new_group_items = [] + for group_name in group_names: + if group_name in self._group_items: + continue + + group_item = QtGui.QStandardItem() + group_item.setData(group_name, GROUP_ROLE) + group_item.setData(group_name, SORT_VALUE_ROLE) + group_item.setData(True, IS_GROUP_ROLE) + group_item.setFlags(QtCore.Qt.ItemIsEnabled) + self._group_items[group_name] = group_item + new_group_items.append(group_item) + + # Add new group items to root item if there are any + if not new_group_items: + return False + + # Access to root item of main model + root_item = self._instance_model.invisibleRootItem() + root_item.appendRows(new_group_items) + + # Create widget for each new group item and store it for future usage + for group_item in new_group_items: + index = self._instance_model.index( + group_item.row(), group_item.column() + ) + proxy_index = self._proxy_model.mapFromSource(index) + group_name = group_item.data(GROUP_ROLE) + widget = InstanceListGroupWidget(group_name, self._instance_view) + widget.expand_changed.connect(self._on_group_expand_request) + widget.toggle_requested.connect(self._on_group_toggle_request) + self._group_widgets[group_name] = widget + self._instance_view.setIndexWidget(proxy_index, widget) + + return True + + def _remove_groups_except(self, group_names): + # Remove groups that are not available anymore + root_item = self._instance_model.invisibleRootItem() + for group_name in tuple(self._group_items.keys()): + if group_name in group_names: + continue + + group_item = self._group_items.pop(group_name) + root_item.removeRow(group_item.row()) + widget = self._group_widgets.pop(group_name) + widget.deleteLater() + def refresh_instance_states(self): """Trigger update of all instances.""" for widget in self._widgets_by_id.values(): widget.update_instance_values() def _on_active_changed(self, changed_instance_id, new_value): - selected_instance_ids, _ = self.get_selected_items() + selected_instance_ids, _, _ = self.get_selected_items() selected_ids = set() found = False @@ -774,6 +872,16 @@ class InstanceListView(AbstractInstanceView): proxy_index = self._proxy_model.mapFromSource(group_index) self._instance_view.setExpanded(proxy_index, expanded) + def _on_legacy_group_expand_request(self, _, expanded): + group_item = self._legacy_group_item + if not group_item: + return + group_index = self._instance_model.index( + group_item.row(), group_item.column() + ) + proxy_index = self._proxy_model.mapFromSource(group_index) + self._instance_view.setExpanded(proxy_index, expanded) + def _on_group_toggle_request(self, group_name, state): if state == QtCore.Qt.PartiallyChecked: return @@ -807,10 +915,17 @@ class InstanceListView(AbstractInstanceView): tuple: Selected instance ids and boolean if context is selected. """ + instance_ids = [] + convertor_identifiers = [] context_selected = False for index in self._instance_view.selectionModel().selectedIndexes(): + convertor_identifier = index.data(LEGACY_CONVERTER_IDENTIFIER) + if convertor_identifier is not None: + convertor_identifiers.append(convertor_identifier) + continue + instance_id = index.data(INSTANCE_ID_ROLE) if not context_selected and instance_id == CONTEXT_ID: context_selected = True @@ -818,14 +933,20 @@ class InstanceListView(AbstractInstanceView): elif instance_id is not None: instance_ids.append(instance_id) - return instance_ids, context_selected + return instance_ids, context_selected, convertor_identifiers - def set_selected_items(self, instance_ids, context_selected): + def set_selected_items( + self, instance_ids, context_selected, convertor_identifiers + ): s_instance_ids = set(instance_ids) - cur_ids, cur_context = self.get_selected_items() + s_convertor_identifiers = set(convertor_identifiers) + cur_ids, cur_context, cur_convertor_identifiers = ( + self.get_selected_items() + ) if ( set(cur_ids) == s_instance_ids and cur_context == context_selected + and set(cur_convertor_identifiers) == s_convertor_identifiers ): return @@ -851,20 +972,35 @@ class InstanceListView(AbstractInstanceView): (item.child(row), list(new_parent_items)) ) - instance_id = item.data(INSTANCE_ID_ROLE) - if not instance_id: + convertor_identifier = item.data(LEGACY_CONVERTER_IDENTIFIER) + + select = False + expand_parent = True + if convertor_identifier is not None: + if convertor_identifier in s_convertor_identifiers: + select = True + else: + instance_id = item.data(INSTANCE_ID_ROLE) + if instance_id == CONTEXT_ID: + if context_selected: + select = True + expand_parent = False + + elif instance_id in s_instance_ids: + select = True + + if not select: continue - if instance_id in s_instance_ids: - select_indexes.append(item.index()) - for parent_item in parent_items: - index = parent_item.index() - proxy_index = proxy_model.mapFromSource(index) - if not view.isExpanded(proxy_index): - view.expand(proxy_index) + select_indexes.append(item.index()) + if not expand_parent: + continue - elif context_selected and instance_id == CONTEXT_ID: - select_indexes.append(item.index()) + for parent_item in parent_items: + index = parent_item.index() + proxy_index = proxy_model.mapFromSource(index) + if not view.isExpanded(proxy_index): + view.expand(proxy_index) selection_model = view.selectionModel() if not select_indexes: diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 5bd3017c2a..e208786fc7 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -124,6 +124,9 @@ class OverviewWidget(QtWidgets.QFrame): subset_attributes_widget.instance_context_changed.connect( self._on_instance_context_change ) + subset_attributes_widget.convert_requested.connect( + self._on_convert_requested + ) # --- Controller callbacks --- controller.event_system.add_callback( @@ -201,7 +204,7 @@ class OverviewWidget(QtWidgets.QFrame): self.create_requested.emit() def _on_delete_clicked(self): - instance_ids, _ = self.get_selected_items() + instance_ids, _, _ = self.get_selected_items() # Ask user if he really wants to remove instances dialog = QtWidgets.QMessageBox(self) @@ -235,7 +238,9 @@ class OverviewWidget(QtWidgets.QFrame): if self._refreshing_instances: return - instance_ids, context_selected = self.get_selected_items() + instance_ids, context_selected, convertor_identifiers = ( + self.get_selected_items() + ) # Disable delete button if nothing is selected self._delete_btn.setEnabled(len(instance_ids) > 0) @@ -246,7 +251,7 @@ class OverviewWidget(QtWidgets.QFrame): for instance_id in instance_ids ] self._subset_attributes_widget.set_current_instances( - instances, context_selected + instances, context_selected, convertor_identifiers ) def _on_active_changed(self): @@ -314,6 +319,10 @@ class OverviewWidget(QtWidgets.QFrame): self.instance_context_changed.emit() + def _on_convert_requested(self): + _, _, convertor_identifiers = self.get_selected_items() + self._controller.convert_legacy_items(convertor_identifiers) + def get_selected_items(self): view = self._subset_views_layout.currentWidget() return view.get_selected_items() @@ -331,8 +340,12 @@ class OverviewWidget(QtWidgets.QFrame): else: new_view.refresh_instance_states() - instance_ids, context_selected = old_view.get_selected_items() - new_view.set_selected_items(instance_ids, context_selected) + instance_ids, context_selected, convertor_identifiers = ( + old_view.get_selected_items() + ) + new_view.set_selected_items( + instance_ids, context_selected, convertor_identifiers + ) self._subset_views_layout.setCurrentIndex(new_idx) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ddbe1eb6b7..b01fed25a5 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1461,6 +1461,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): └───────────────────────────────┘ """ instance_context_changed = QtCore.Signal() + convert_requested = QtCore.Signal() def __init__(self, controller, parent): super(SubsetAttributesWidget, self).__init__(parent) @@ -1479,9 +1480,48 @@ class SubsetAttributesWidget(QtWidgets.QWidget): # BOTTOM PART bottom_widget = QtWidgets.QWidget(self) - creator_attrs_widget = CreatorAttrsWidget( - controller, bottom_widget + + # Wrap Creator attributes to widget to be able add convert button + creator_widget = QtWidgets.QWidget(bottom_widget) + + # Convert button widget (with layout to handle stretch) + convert_widget = QtWidgets.QWidget(creator_widget) + convert_label = QtWidgets.QLabel( + ( + "Found instances created with legacy creators." + "\nDo you with to convert them?" + ), + creator_widget ) + convert_label.setWordWrap(True) + convert_label.setAlignment(QtCore.Qt.AlignCenter) + + convert_btn = QtWidgets.QPushButton( + "Convert legacy instances", convert_widget + ) + convert_separator = QtWidgets.QFrame(convert_widget) + convert_separator.setObjectName("Separator") + convert_separator.setMinimumHeight(2) + convert_separator.setMaximumHeight(2) + + convert_layout = QtWidgets.QGridLayout(convert_widget) + convert_layout.setContentsMargins(0, 0, 0, 0) + convert_layout.addWidget(convert_label, 0, 0, 1, 3) + convert_layout.addWidget(convert_btn, 1, 1) + convert_layout.addWidget(convert_separator, 2, 0, 1, 3) + convert_layout.setColumnStretch(0, 1) + convert_layout.setColumnStretch(1, 0) + convert_layout.setColumnStretch(2, 1) + + # Creator attributes widget + creator_attrs_widget = CreatorAttrsWidget( + controller, creator_widget + ) + creator_layout = QtWidgets.QVBoxLayout(creator_widget) + creator_layout.setContentsMargins(0, 0, 0, 0) + creator_layout.addWidget(convert_widget, 0) + creator_layout.addWidget(creator_attrs_widget, 1) + publish_attrs_widget = PublishPluginAttrsWidget( controller, bottom_widget ) @@ -1492,7 +1532,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): bottom_layout = QtWidgets.QHBoxLayout(bottom_widget) bottom_layout.setContentsMargins(0, 0, 0, 0) - bottom_layout.addWidget(creator_attrs_widget, 1) + bottom_layout.addWidget(creator_widget, 1) bottom_layout.addWidget(bottom_separator, 0) bottom_layout.addWidget(publish_attrs_widget, 1) @@ -1505,6 +1545,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): layout.addWidget(top_bottom, 0) layout.addWidget(bottom_widget, 1) + self._convertor_identifiers = None self._current_instances = None self._context_selected = False self._all_instances_valid = True @@ -1512,9 +1553,12 @@ class SubsetAttributesWidget(QtWidgets.QWidget): global_attrs_widget.instance_context_changed.connect( self._on_instance_context_changed ) + convert_btn.clicked.connect(self._on_convert_click) self._controller = controller + self._convert_widget = convert_widget + self.global_attrs_widget = global_attrs_widget self.creator_attrs_widget = creator_attrs_widget @@ -1537,7 +1581,12 @@ class SubsetAttributesWidget(QtWidgets.QWidget): self.instance_context_changed.emit() - def set_current_instances(self, instances, context_selected): + def _on_convert_click(self): + self.convert_requested.emit() + + def set_current_instances( + self, instances, context_selected, convertor_identifiers + ): """Change currently selected items. Args: @@ -1551,10 +1600,13 @@ class SubsetAttributesWidget(QtWidgets.QWidget): all_valid = False break + s_convertor_identifiers = set(convertor_identifiers) + self._convertor_identifiers = s_convertor_identifiers self._current_instances = instances self._context_selected = context_selected self._all_instances_valid = all_valid + self._convert_widget.setVisible(len(s_convertor_identifiers) > 0) self.global_attrs_widget.set_current_instances(instances) self.creator_attrs_widget.set_current_instances(instances) self.publish_attrs_widget.set_current_instances( From 45c944816c42d2593b61fc18f78ca321e6b3d120 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 21 Oct 2022 19:45:17 +0200 Subject: [PATCH 1758/2550] removed unused variable --- openpype/tools/publisher/widgets/card_view_widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 58a7bbc509..96802087ee 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -178,7 +178,7 @@ class BaseGroupWidget(QtWidgets.QWidget): class LegacyItemsGroupWidget(BaseGroupWidget): def update_items(self, items_by_id): items_by_label = collections.defaultdict(list) - for item_id, item in items_by_id.items(): + for item in items_by_id.values(): items_by_label[item.label].append(item) # Remove instance widgets that are not in passed instances From ba621ee54a9f7bc318dd3701ec80b3ee18354f55 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 22 Oct 2022 04:02:46 +0000 Subject: [PATCH 1759/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index fd3606e9f2..cda0a98ef3 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.4" +__version__ = "3.14.5-nightly.1" From 7a2e6bdf780f50d2680edf770955ae2db1cff1cd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 24 Oct 2022 00:10:04 +0200 Subject: [PATCH 1760/2550] :bug: fix caching --- openpype/hosts/houdini/api/__init__.py | 6 +----- openpype/hosts/houdini/api/plugin.py | 29 +++++++++++++------------- 2 files changed, 16 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index f29df021e1..2663a55f6f 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,9 +1,7 @@ from .pipeline import ( HoudiniHost, ls, - containerise, - list_instances, - remove_instance + containerise ) from .plugin import ( @@ -24,8 +22,6 @@ __all__ = [ "ls", "containerise", - "list_instances", - "remove_instance", "Creator", diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ee508f0df4..b7eda7f635 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -162,21 +162,22 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - cached_instances = self.collection_shared_data.get( - "houdini_cached_instances", {}) - instances = cached_instances.get(self.identifier) - if not instances: - instances = [ - i for i in lsattr("id", "pyblish.avalon.instance") - if i.parm("creator_identifier").eval() == self.identifier - ] + # cache instances if missing + if self.collection_shared_data.get("houdini_cached_instances") is None: + self.log.info("Caching instances ...") + self.collection_shared_data["houdini_cached_instances"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.parm("creator_identifier").eval() + if creator_id not in self.collection_shared_data[ + "houdini_cached_instances"]: + self.collection_shared_data["houdini_cached_instances"][ + creator_id] = [i] + else: + self.collection_shared_data["houdini_cached_instances"][ + creator_id].append(i) - if not self.collection_shared_data.get( - "houdini_cached_instances"): - self.collection_shared_data["houdini_cached_instances"] = {} - self.log.info("Caching instances for {}".format(self.identifier)) - self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E501 - for instance in instances: + for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self ) From d0d8c8958ce81c17f72d1717822699c28a6ba04c Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 24 Oct 2022 11:29:58 +0200 Subject: [PATCH 1761/2550] fix obj extractor --- openpype/hosts/maya/plugins/load/actions.py | 2 +- .../hosts/maya/plugins/publish/extract_obj.py | 51 ++++++++++++------- .../defaults/project_settings/maya.json | 24 +++++---- .../schemas/schema_maya_publish.json | 19 +++++++ 4 files changed, 67 insertions(+), 29 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/actions.py b/openpype/hosts/maya/plugins/load/actions.py index 253dae1e43..eca1b27f34 100644 --- a/openpype/hosts/maya/plugins/load/actions.py +++ b/openpype/hosts/maya/plugins/load/actions.py @@ -90,7 +90,7 @@ class ImportMayaLoader(load.LoaderPlugin): so you could also use it as a new base. """ - representations = ["ma", "mb"] + representations = ["ma", "mb", "obj"] families = ["*"] label = "Import" diff --git a/openpype/hosts/maya/plugins/publish/extract_obj.py b/openpype/hosts/maya/plugins/publish/extract_obj.py index 7c915a80d8..59f11a4aa9 100644 --- a/openpype/hosts/maya/plugins/publish/extract_obj.py +++ b/openpype/hosts/maya/plugins/publish/extract_obj.py @@ -2,15 +2,12 @@ import os from maya import cmds -import maya.mel as mel +# import maya.mel as mel import pyblish.api -import openpype.api -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline import publish +from openpype.hosts.maya.api import lib -from openpype.hosts.maya.api import obj - - -class ExtractObj(openpype.api.Extractor): +class ExtractObj(publish.Extractor): """Extract OBJ from Maya. This extracts reproducible OBJ exports ignoring any of the settings @@ -18,42 +15,60 @@ class ExtractObj(openpype.api.Extractor): """ order = pyblish.api.ExtractorOrder + hosts = ["maya"] label = "Extract OBJ" - families = ["obj"] + families = ["model"] def process(self, instance): - obj_exporter = obj.OBJExtractor(log=self.log) # Define output path staging_dir = self.staging_dir(instance) - filename = "{0}.fbx".format(instance.name) + filename = "{0}.obj".format(instance.name) path = os.path.join(staging_dir, filename) # The export requires forward slashes because we need to # format it into a string in a mel expression - path = path.replace('\\', '/') self.log.info("Extracting OBJ to: {0}".format(path)) - members = instance.data["setMembners"] + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=True, + type=("mesh", "nurbsCurve"), + noIntermediate=True, + long=True) self.log.info("Members: {0}".format(members)) self.log.info("Instance: {0}".format(instance[:])) - obj_exporter.set_options_from_instance(instance) + if not cmds.pluginInfo('objExport', query=True, loaded=True): + cmds.loadPlugin('objExport') # Export - with maintained_selection(): - obj_exporter.export(members, path) - cmds.select(members, r=1, noExpand=True) - mel.eval('file -force -options "{0};{1};{2};{3};{4}" -typ "OBJexport" -pr -es "{5}";'.format(grp_flag, ptgrp_flag, mats_flag, smooth_flag, normals_flag, path)) # noqa + with lib.no_display_layers(instance): + with lib.displaySmoothness(members, + divisionsU=0, + divisionsV=0, + pointsWire=4, + pointsShaded=1, + polygonObject=1): + with lib.shader(members, + shadingEngine="initialShadingGroup"): + with lib.maintained_selection(): + cmds.select(members, noExpand=True) + cmds.file(path, + exportSelected=True, + type='OBJexport', + preserveReferences=True, + force=True) if "representation" not in instance.data: instance.data["representation"] = [] representation = { 'name':'obj', - 'ext':'obx', + 'ext':'obj', 'files': filename, "stagingDir": staging_dir, } diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 86815b8fc4..b0bef4943b 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -131,6 +131,16 @@ "Main" ] }, + "CreateModel": { + "enabled": true, + "write_color_sets": false, + "write_face_sets": false, + "defaults": [ + "Main", + "Proxy", + "Sculpt" + ] + }, "CreatePointCache": { "enabled": true, "write_color_sets": false, @@ -187,16 +197,6 @@ "Main" ] }, - "CreateModel": { - "enabled": true, - "write_color_sets": false, - "write_face_sets": false, - "defaults": [ - "Main", - "Proxy", - "Sculpt" - ] - }, "CreateRenderSetup": { "enabled": true, "defaults": [ @@ -577,6 +577,10 @@ "vrayproxy" ] }, + "ExtractObj": { + "enabled": true, + "optional": true + }, "ValidateRigContents": { "enabled": false, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 53247f6bd4..ab8c6b885e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -657,6 +657,25 @@ "object_type": "text" } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractObj", + "label": "Extract OBJ", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + } + ] } ] }, From 96af8158796dad310feae7f96f74677e4311710f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 24 Oct 2022 11:36:43 +0200 Subject: [PATCH 1762/2550] turn off OBJ by default --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index b0bef4943b..988c0e777a 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -578,7 +578,7 @@ ] }, "ExtractObj": { - "enabled": true, + "enabled": false, "optional": true }, "ValidateRigContents": { From f0a394bfd9e749f3bde00fd4d43ba5921192fe7e Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 24 Oct 2022 11:37:02 +0200 Subject: [PATCH 1763/2550] =?UTF-8?q?=F0=9F=90=95=E2=80=8D=F0=9F=A6=BA=20s?= =?UTF-8?q?hut=20up=20hound?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../hosts/maya/plugins/publish/extract_obj.py | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_obj.py b/openpype/hosts/maya/plugins/publish/extract_obj.py index 59f11a4aa9..edfe0b9439 100644 --- a/openpype/hosts/maya/plugins/publish/extract_obj.py +++ b/openpype/hosts/maya/plugins/publish/extract_obj.py @@ -7,6 +7,7 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.maya.api import lib + class ExtractObj(publish.Extractor): """Extract OBJ from Maya. @@ -34,11 +35,11 @@ class ExtractObj(publish.Extractor): members = instance.data("setMembers") members = cmds.ls(members, - dag=True, - shapes=True, - type=("mesh", "nurbsCurve"), - noIntermediate=True, - long=True) + dag=True, + shapes=True, + type=("mesh", "nurbsCurve"), + noIntermediate=True, + long=True) self.log.info("Members: {0}".format(members)) self.log.info("Instance: {0}".format(instance[:])) @@ -58,17 +59,17 @@ class ExtractObj(publish.Extractor): with lib.maintained_selection(): cmds.select(members, noExpand=True) cmds.file(path, - exportSelected=True, - type='OBJexport', - preserveReferences=True, - force=True) + exportSelected=True, + type='OBJexport', + preserveReferences=True, + force=True) if "representation" not in instance.data: instance.data["representation"] = [] representation = { - 'name':'obj', - 'ext':'obj', + 'name': 'obj', + 'ext': 'obj', 'files': filename, "stagingDir": staging_dir, } From aefb6163ee35e428facb1a8044c33a6cfdc3b372 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 24 Oct 2022 09:40:21 +0000 Subject: [PATCH 1764/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index cda0a98ef3..079822029e 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.5-nightly.1" +__version__ = "3.14.5-nightly.2" From 5f3312af04155a0d34d2a7a4ccd23a1e3c8eee1d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 24 Oct 2022 13:38:42 +0200 Subject: [PATCH 1765/2550] change log update --- CHANGELOG.md | 53 +++++++++++++++++++++++++++++++++------------------- 1 file changed, 34 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c4f1dcf314..5464c390ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,40 @@ # Changelog -## [3.14.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.5](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...HEAD) + +**🚀 Enhancements** + +- Maya: add OBJ extractor to model family [\#4021](https://github.com/pypeclub/OpenPype/pull/4021) +- Publish report viewer tool [\#4010](https://github.com/pypeclub/OpenPype/pull/4010) +- Nuke | Global: adding custom tags representation filtering [\#4009](https://github.com/pypeclub/OpenPype/pull/4009) +- Publisher: Create context has shared data for collection phase [\#3995](https://github.com/pypeclub/OpenPype/pull/3995) +- Resolve: updating to v18 compatibility [\#3986](https://github.com/pypeclub/OpenPype/pull/3986) + +**🐛 Bug fixes** + +- TrayPublisher: Fix missing argument [\#4019](https://github.com/pypeclub/OpenPype/pull/4019) +- General: Fix python 2 compatibility of ffmpeg and oiio tools discovery [\#4011](https://github.com/pypeclub/OpenPype/pull/4011) + +**🔀 Refactored code** + +- Maya: Removed unused imports [\#4008](https://github.com/pypeclub/OpenPype/pull/4008) +- Unreal: Fix import of moved function [\#4007](https://github.com/pypeclub/OpenPype/pull/4007) +- Houdini: Change import of RepairAction [\#4005](https://github.com/pypeclub/OpenPype/pull/4005) +- Nuke/Hiero: Refactor openpype.api imports [\#4000](https://github.com/pypeclub/OpenPype/pull/4000) +- TVPaint: Defined with HostBase [\#3994](https://github.com/pypeclub/OpenPype/pull/3994) + +**Merged pull requests:** + +- Unreal: Remove redundant Creator stub [\#4012](https://github.com/pypeclub/OpenPype/pull/4012) +- Unreal: add `uproject` extension to Unreal project template [\#4004](https://github.com/pypeclub/OpenPype/pull/4004) +- Unreal: fix order of includes [\#4002](https://github.com/pypeclub/OpenPype/pull/4002) +- Fusion: Implement backwards compatibility \(+/- Fusion 17.2\) [\#3958](https://github.com/pypeclub/OpenPype/pull/3958) + +## [3.14.4](https://github.com/pypeclub/OpenPype/tree/3.14.4) (2022-10-19) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...3.14.4) **🆕 New features** @@ -27,7 +59,6 @@ - Maya: Moved plugin from global to maya [\#3939](https://github.com/pypeclub/OpenPype/pull/3939) - Publisher: Create dialog is part of main window [\#3936](https://github.com/pypeclub/OpenPype/pull/3936) - Fusion: Implement Alembic and FBX mesh loader [\#3927](https://github.com/pypeclub/OpenPype/pull/3927) -- Maya: Remove hardcoded requirement for maya/ start for image file prefix [\#3873](https://github.com/pypeclub/OpenPype/pull/3873) **🐛 Bug fixes** @@ -71,14 +102,6 @@ **🚀 Enhancements** - Publisher: Enhancement proposals [\#3897](https://github.com/pypeclub/OpenPype/pull/3897) -- Maya: better logging in Maketx [\#3886](https://github.com/pypeclub/OpenPype/pull/3886) -- Photoshop: review can be turned off [\#3885](https://github.com/pypeclub/OpenPype/pull/3885) -- TrayPublisher: added persisting of last selected project [\#3871](https://github.com/pypeclub/OpenPype/pull/3871) -- TrayPublisher: added text filter on project name to Tray Publisher [\#3867](https://github.com/pypeclub/OpenPype/pull/3867) -- Github issues adding `running version` section [\#3864](https://github.com/pypeclub/OpenPype/pull/3864) -- Publisher: Increase size of main window [\#3862](https://github.com/pypeclub/OpenPype/pull/3862) -- Flame: make migratable projects after creation [\#3860](https://github.com/pypeclub/OpenPype/pull/3860) -- Photoshop: synchronize image version with workfile [\#3854](https://github.com/pypeclub/OpenPype/pull/3854) **🐛 Bug fixes** @@ -86,12 +109,6 @@ - Flame: loading multilayer exr to batch/reel is working [\#3901](https://github.com/pypeclub/OpenPype/pull/3901) - Hiero: Fix inventory check on launch [\#3895](https://github.com/pypeclub/OpenPype/pull/3895) - WebPublisher: Fix import after refactor [\#3891](https://github.com/pypeclub/OpenPype/pull/3891) -- TVPaint: Fix renaming of rendered files [\#3882](https://github.com/pypeclub/OpenPype/pull/3882) -- Publisher: Nice checkbox visible in Python 2 [\#3877](https://github.com/pypeclub/OpenPype/pull/3877) -- Settings: Add missing default settings [\#3870](https://github.com/pypeclub/OpenPype/pull/3870) -- General: Copy of workfile does not use 'copy' function but 'copyfile' [\#3869](https://github.com/pypeclub/OpenPype/pull/3869) -- Tray Publisher: skip plugin if otioTimeline is missing [\#3856](https://github.com/pypeclub/OpenPype/pull/3856) -- Flame: retimed attributes are integrated with settings [\#3855](https://github.com/pypeclub/OpenPype/pull/3855) **🔀 Refactored code** @@ -105,8 +122,6 @@ **Merged pull requests:** - Maya: Fix Scene Inventory possibly starting off-screen due to maya preferences [\#3923](https://github.com/pypeclub/OpenPype/pull/3923) -- Maya: RenderSettings set default image format for V-Ray+Redshift to exr [\#3879](https://github.com/pypeclub/OpenPype/pull/3879) -- Remove lockfile during publish [\#3874](https://github.com/pypeclub/OpenPype/pull/3874) ## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) From 74ebf90046a442f905966a4fdd77f419b208f6e8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 24 Oct 2022 13:51:20 +0200 Subject: [PATCH 1766/2550] Removed submodule vendor/configs/OpenColorIO-Configs --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 0ccbcbce93f789572bbdb424bdfdf02940d40abb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 24 Oct 2022 13:54:55 +0200 Subject: [PATCH 1767/2550] Removed submodule vendor/configs/OpenColorIO-Configs --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 883f035361cee56d4e297271d178be20b40f2557 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 14:08:24 +0200 Subject: [PATCH 1768/2550] extract review does not crash with old settings overrides --- openpype/plugins/publish/extract_review.py | 41 ++++++++++------------ 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 431ddcc3b4..5e8f85ab86 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1731,38 +1731,33 @@ class ExtractReview(pyblish.api.InstancePlugin): Returns: list: Containg all output definitions matching entered tags. """ + filtered_outputs = [] repre_c_tags_low = [tag.lower() for tag in (custom_tags or [])] for output_def in outputs: - valid = False tag_filters = output_def.get("filter", {}).get("custom_tags") - if ( - # if any of tag filter is empty, skip - custom_tags and not tag_filters - or not custom_tags and tag_filters - ): - continue - elif not custom_tags and not tag_filters: + if not custom_tags and not tag_filters: + # Definition is valid if both tags are empty valid = True - # lower all filter tags - tag_filters_low = [tag.lower() for tag in tag_filters] + elif not custom_tags or not tag_filters: + # Invalid if one is empty + valid = False - self.log.debug("__ tag_filters: {}".format(tag_filters)) - self.log.debug("__ repre_c_tags_low: {}".format( - repre_c_tags_low)) + else: + # Check if output definition tags are in representation tags + valid = False + # lower all filter tags + tag_filters_low = [tag.lower() for tag in tag_filters] + # check if any repre tag is not in filter tags + for tag in repre_c_tags_low: + if tag in tag_filters_low: + valid = True + break - # check if any repre tag is not in filter tags - for tag in repre_c_tags_low: - if tag in tag_filters_low: - valid = True - break - - if not valid: - continue - - filtered_outputs.append(output_def) + if valid: + filtered_outputs.append(output_def) self.log.debug("__ filtered_outputs: {}".format( [_o["filename_suffix"] for _o in filtered_outputs] From c27f4cbbf4b671980759d8ae520b2fc724deb9cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 14:48:30 +0200 Subject: [PATCH 1769/2550] :art: workfile auto-creator --- openpype/hosts/houdini/api/plugin.py | 56 +++++++++----- .../houdini/plugins/create/create_workfile.py | 76 +++++++++++++++++++ .../plugins/publish/collect_current_file.py | 38 +++------- 3 files changed, 124 insertions(+), 46 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/create/create_workfile.py diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index b7eda7f635..aae6d137ac 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -35,6 +35,9 @@ class Creator(LegacyCreator): when hovering over a node. The information is visible under the name of the node. + Deprecated: + This creator is deprecated and will be removed in future version. + """ defaults = ['Main'] @@ -91,12 +94,35 @@ class Creator(LegacyCreator): sys.exc_info()[2]) -@six.add_metaclass(ABCMeta) -class HoudiniCreator(NewCreator): - selected_nodes = [] +class HoudiniCreatorBase(object): + @staticmethod + def cache_instances(shared_data): + """Cache instances for Creators to shared data. + + Create `houdini_cached_instances` key when needed in shared data and + fill it with all collected instances from the scene under its + respective creator identifiers. + + Args: + Dict[str, Any]: Shared data. + + Return: + Dict[str, Any]: Shared data dictionary. + + """ + if shared_data.get("houdini_cached_instances") is None: + shared_data["houdini_cached_instances"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.parm("creator_identifier").eval() + if creator_id not in shared_data["houdini_cached_instances"]: + shared_data["houdini_cached_instances"][creator_id] = [i] + else: + shared_data["houdini_cached_instances"][creator_id].append(i) # noqa + return shared_data @staticmethod - def _create_instance_node( + def create_instance_node( node_name, parent, node_type="geometry"): # type: (str, str, str) -> hou.Node @@ -117,6 +143,11 @@ class HoudiniCreator(NewCreator): instance_node.moveToGoodPosition() return instance_node + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator, HoudiniCreatorBase): + selected_nodes = [] + def create(self, subset_name, instance_data, pre_create_data): try: if pre_create_data.get("use_selection"): @@ -127,7 +158,7 @@ class HoudiniCreator(NewCreator): if node_type is None: node_type = "geometry" - instance_node = self._create_instance_node( + instance_node = self.create_instance_node( subset_name, "/out", node_type) instance_data["instance_node"] = instance_node.path() @@ -163,20 +194,7 @@ class HoudiniCreator(NewCreator): def collect_instances(self): # cache instances if missing - if self.collection_shared_data.get("houdini_cached_instances") is None: - self.log.info("Caching instances ...") - self.collection_shared_data["houdini_cached_instances"] = {} - cached_instances = lsattr("id", "pyblish.avalon.instance") - for i in cached_instances: - creator_id = i.parm("creator_identifier").eval() - if creator_id not in self.collection_shared_data[ - "houdini_cached_instances"]: - self.collection_shared_data["houdini_cached_instances"][ - creator_id] = [i] - else: - self.collection_shared_data["houdini_cached_instances"][ - creator_id].append(i) - + self.cache_instances(self.collection_shared_data) for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py new file mode 100644 index 0000000000..2a7cb14d68 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" +from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api.lib import read +from openpype.pipeline import CreatedInstance, AutoCreator +from openpype.pipeline.legacy_io import Session +from openpype.client import get_asset_by_name + + +class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): + """Workfile auto-creator.""" + identifier = "io.openpype.creators.houdini.workfile" + label = "Workfile" + family = "workfile" + icon = "gears" + + default_variant = "Main" + + def create(self): + variant = self.default_variant + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + + project_name = self.project_name + asset_name = Session["AVALON_ASSET"] + task_name = Session["AVALON_TASK"] + host_name = Session["AVALON_APP"] + + if current_instance is None: + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update( + self.get_dynamic_data( + variant, task_name, asset_doc, + project_name, host_name, current_instance) + ) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + + # Update instance context if is not the same + elif ( + current_instance["asset"] != asset_name + or current_instance["task"] != task_name + ): + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + current_instance["asset"] = asset_name + current_instance["task"] = task_name + current_instance["subset"] = subset_name + + def collect_instances(self): + self.cache_instances(self.collection_shared_data) + for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + pass + diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index 1383c274a2..9cca07fdc7 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -5,19 +5,20 @@ from openpype.pipeline import legacy_io import pyblish.api -class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): +class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin): """Inject the current working file into context""" order = pyblish.api.CollectorOrder - 0.01 label = "Houdini Current File" hosts = ["houdini"] + family = ["workfile"] - def process(self, context): + def process(self, instance): """Inject the current working file""" current_file = hou.hipFile.path() if not os.path.exists(current_file): - # By default Houdini will even point a new scene to a path. + # By default, Houdini will even point a new scene to a path. # However if the file is not saved at all and does not exist, # we assume the user never set it. filepath = "" @@ -34,43 +35,26 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): "saved correctly." ) - context.data["currentFile"] = current_file + instance.context.data["currentFile"] = current_file folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - task = legacy_io.Session["AVALON_TASK"] - - data = {} - - # create instance - instance = context.create_instance(name=filename) - subset = 'workfile' + task.capitalize() - - data.update({ - "subset": subset, - "asset": os.getenv("AVALON_ASSET", None), - "label": subset, - "publish": True, - "family": 'workfile', - "families": ['workfile'], + instance.data.update({ "setMembers": [current_file], - "frameStart": context.data['frameStart'], - "frameEnd": context.data['frameEnd'], - "handleStart": context.data['handleStart'], - "handleEnd": context.data['handleEnd'] + "frameStart": instance.context.data['frameStart'], + "frameEnd": instance.context.data['frameEnd'], + "handleStart": instance.context.data['handleStart'], + "handleEnd": instance.context.data['handleEnd'] }) - data['representations'] = [{ + instance.data['representations'] = [{ 'name': ext.lstrip("."), 'ext': ext.lstrip("."), 'files': file, "stagingDir": folder, }] - instance.data.update(data) - self.log.info('Collected instance: {}'.format(file)) self.log.info('Scene path: {}'.format(current_file)) self.log.info('staging Dir: {}'.format(folder)) - self.log.info('subset: {}'.format(subset)) From 32d1e572d7f869b596597cdc340f787f16858b92 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 24 Oct 2022 12:52:58 +0000 Subject: [PATCH 1770/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 079822029e..d1ba207aa3 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.5-nightly.2" +__version__ = "3.14.5-nightly.3" From 5b154d7a19d66f2e6d5b4f8567f38b441eae9066 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 15:00:17 +0200 Subject: [PATCH 1771/2550] :bug: fix HDA creation --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_hda.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 67c05b1634..5bb5786a40 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -43,7 +43,7 @@ class CreateHDA(plugin.HoudiniCreator): # if we have `use selection` enabled, and we have some # selected nodes ... subnet = parent_node.collapseIntoSubnet( - self._nodes, + self.selected_nodes, subnet_name="{}_subnet".format(node_name)) subnet.moveToGoodPosition() to_hda = subnet diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index a92d000457..8b97bf364f 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- import os - from pprint import pformat - import pyblish.api - from openpype.pipeline import publish +import hou class ExtractHDA(publish.Extractor): @@ -17,7 +15,7 @@ class ExtractHDA(publish.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance.data.get("members")[0] + hda_node = hou.node(instance.data.get("instance_node")) hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) From 2e818a44041e806bc503594c211ad04bd3b4a29d Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 24 Oct 2022 13:11:22 +0000 Subject: [PATCH 1772/2550] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index d1ba207aa3..b1e4227030 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.5-nightly.3" +__version__ = "3.14.5" From 245c5e9afb81f231bcc884f5c503ae6c812421b8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 17:16:45 +0200 Subject: [PATCH 1773/2550] changed label of legacy group --- openpype/tools/publisher/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index 3c192bf8a3..e5969160c1 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -6,7 +6,7 @@ CONTEXT_LABEL = "Options" # Not showed anywhere - used as identifier CONTEXT_GROUP = "__ContextGroup__" -LEGACY_ITEM_GROUP = "Legacy instances" +LEGACY_ITEM_GROUP = "Incompatible subsets" # Allowed symbols for subset name (and variant) # - characters, numbers, unsercore and dash From 080deda3167c2b40fcd10582b6c4e99498cf2ff1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 17:18:06 +0200 Subject: [PATCH 1774/2550] fix list view update --- openpype/tools/publisher/widgets/list_view_widgets.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index df07470f1d..53951e3cba 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -452,7 +452,6 @@ class InstanceListView(AbstractInstanceView): self._legacy_group_item = None self._legacy_group_widget = None - self._legacy_widgets_by_id = {} self._legacy_items_by_id = {} self._instance_view = instance_view @@ -715,6 +714,7 @@ class InstanceListView(AbstractInstanceView): root_item.removeRow(group_item.row()) self._legacy_group_widget.deleteLater() self._legacy_group_widget = None + self._legacy_items_by_id = {} return created_new_items if group_item is None: @@ -745,6 +745,7 @@ class InstanceListView(AbstractInstanceView): child_item = group_item.child(row) child_identifier = child_item.data(LEGACY_CONVERTER_IDENTIFIER) if child_identifier not in legacy_items_by_id: + self._legacy_items_by_id.pop(child_identifier, None) group_item.removeRows(row, 1) new_items = [] @@ -760,6 +761,7 @@ class InstanceListView(AbstractInstanceView): item.setData( convertor_item.identifier, LEGACY_CONVERTER_IDENTIFIER ) + self._legacy_items_by_id[identifier] = item if new_items: group_item.appendRows(new_items) From 2787351f03aa7eb7c0220a8f60ba85e4b6a91166 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 17:18:23 +0200 Subject: [PATCH 1775/2550] change labels of the message for user --- openpype/tools/publisher/widgets/widgets.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index b01fed25a5..ec63509dfa 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1486,18 +1486,22 @@ class SubsetAttributesWidget(QtWidgets.QWidget): # Convert button widget (with layout to handle stretch) convert_widget = QtWidgets.QWidget(creator_widget) - convert_label = QtWidgets.QLabel( + convert_label = QtWidgets.QLabel(creator_widget) + # Set the label text with 'setText' to apply html + convert_label.setText( ( - "Found instances created with legacy creators." - "\nDo you with to convert them?" - ), - creator_widget + "Found old publishable subsets" + " incompatible with new publisher." + "

    Press the update subsets button" + " to automatically update them" + " to be able to publish again." + ) ) convert_label.setWordWrap(True) convert_label.setAlignment(QtCore.Qt.AlignCenter) convert_btn = QtWidgets.QPushButton( - "Convert legacy instances", convert_widget + "Update subsets", convert_widget ) convert_separator = QtWidgets.QFrame(convert_widget) convert_separator.setObjectName("Separator") From e94cd00ad7adc36adf48f9c05a752e94611778d3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 17:18:31 +0200 Subject: [PATCH 1776/2550] change separator size --- openpype/tools/publisher/widgets/widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ec63509dfa..e091e76fab 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1505,8 +1505,8 @@ class SubsetAttributesWidget(QtWidgets.QWidget): ) convert_separator = QtWidgets.QFrame(convert_widget) convert_separator.setObjectName("Separator") - convert_separator.setMinimumHeight(2) - convert_separator.setMaximumHeight(2) + convert_separator.setMinimumHeight(1) + convert_separator.setMaximumHeight(1) convert_layout = QtWidgets.QGridLayout(convert_widget) convert_layout.setContentsMargins(0, 0, 0, 0) From a98085704ff5a2ebaa205b715bf72024bea0e6bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 17:21:39 +0200 Subject: [PATCH 1777/2550] added some padding and spacing --- openpype/tools/publisher/widgets/widgets.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index e091e76fab..d4c2623790 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1509,7 +1509,8 @@ class SubsetAttributesWidget(QtWidgets.QWidget): convert_separator.setMaximumHeight(1) convert_layout = QtWidgets.QGridLayout(convert_widget) - convert_layout.setContentsMargins(0, 0, 0, 0) + convert_layout.setContentsMargins(5, 0, 5, 0) + convert_layout.setVerticalSpacing(10) convert_layout.addWidget(convert_label, 0, 0, 1, 3) convert_layout.addWidget(convert_btn, 1, 1) convert_layout.addWidget(convert_separator, 2, 0, 1, 3) From 271a0056bcd988a2371124879e86805cc379cbca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 17:31:38 +0200 Subject: [PATCH 1778/2550] change the item look --- openpype/style/data.json | 5 +---- openpype/style/style.css | 12 ------------ .../tools/publisher/widgets/card_view_widgets.py | 5 ++--- 3 files changed, 3 insertions(+), 19 deletions(-) diff --git a/openpype/style/data.json b/openpype/style/data.json index 44c0d51999..fef69071ed 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -100,10 +100,7 @@ "bg-expander": "#2C313A", "bg-expander-hover": "#2d6c9f", "bg-expander-selected-hover": "#3784c5" - }, - "bg-legacy": "rgb(17, 17, 17)", - "bg-legacy-hover": "rgb(41, 41, 41)", - "bg-legacy-selected": "rgba(42, 123, 174, .4)" + } }, "settings": { "invalid-light": "#C93636", diff --git a/openpype/style/style.css b/openpype/style/style.css index 983f2c886f..a6818a5792 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -965,18 +965,6 @@ VariantInputsWidget QToolButton { background: {color:bg-view-selection}; } -#CardViewLegacyItemWidget { - background: {color:publisher:bg-legacy}; - border-radius: 0.2em; - -} -#CardViewLegacyItemWidget:hover { - background: {color:publisher:bg-legacy-hover}; -} -#CardViewLegacyItemWidget[state="selected"] { - background: {color:publisher:bg-legacy-selected}; -} - #ListViewSubsetName[state="invalid"] { color: {color:publisher:error}; } diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 96802087ee..95fa8cd5d2 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -353,19 +353,18 @@ class LegacyItemCardWidget(CardWidget): def __init__(self, item, parent): super(LegacyItemCardWidget, self).__init__(parent) - self.setObjectName("CardViewLegacyItemWidget") self._id = item.id self.identifier = item.identifier self._group_identifier = LEGACY_ITEM_GROUP - icon_widget = PublishPixmapLabel(None, self) + icon_widget = IconValuePixmapLabel("fa.magic", self) icon_widget.setObjectName("FamilyIconLabel") label_widget = QtWidgets.QLabel(item.label, self) icon_layout = QtWidgets.QHBoxLayout() - icon_layout.setContentsMargins(5, 5, 5, 5) + icon_layout.setContentsMargins(10, 5, 5, 5) icon_layout.addWidget(icon_widget) layout = QtWidgets.QHBoxLayout(self) From 7afb2b2e9fea0ca8cc4fd3d48c16069d052c50df Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 17:56:27 +0200 Subject: [PATCH 1779/2550] change variable to use convertor instead of legacy --- openpype/tools/publisher/constants.py | 4 +- openpype/tools/publisher/control.py | 4 +- .../publisher/widgets/card_view_widgets.py | 74 +++++++++---------- .../publisher/widgets/list_view_widgets.py | 70 +++++++++--------- 4 files changed, 77 insertions(+), 75 deletions(-) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index e5969160c1..8bea69c812 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -6,7 +6,7 @@ CONTEXT_LABEL = "Options" # Not showed anywhere - used as identifier CONTEXT_GROUP = "__ContextGroup__" -LEGACY_ITEM_GROUP = "Incompatible subsets" +CONVERTOR_ITEM_GROUP = "Incompatible subsets" # Allowed symbols for subset name (and variant) # - characters, numbers, unsercore and dash @@ -22,7 +22,7 @@ IS_GROUP_ROLE = QtCore.Qt.UserRole + 3 CREATOR_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 4 FAMILY_ROLE = QtCore.Qt.UserRole + 5 GROUP_ROLE = QtCore.Qt.UserRole + 6 -LEGACY_CONVERTER_IDENTIFIER = QtCore.Qt.UserRole + 7 +CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 7 __all__ = ( diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 9abc53675d..b867bddc9d 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1235,7 +1235,7 @@ class AbstractPublisherController(object): pass @abstractproperty - def legacy_items(self): + def convertor_items(self): pass @abstractmethod @@ -1607,7 +1607,7 @@ class PublisherController(BasePublisherController): return self._create_context.instances_by_id @property - def legacy_items(self): + def convertor_items(self): return self._create_context.legacy_items_by_id @property diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 95fa8cd5d2..9fd2bf0824 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -39,7 +39,7 @@ from ..constants import ( CONTEXT_ID, CONTEXT_LABEL, CONTEXT_GROUP, - LEGACY_ITEM_GROUP, + CONVERTOR_ITEM_GROUP, ) @@ -175,7 +175,7 @@ class BaseGroupWidget(QtWidgets.QWidget): self.selected.emit(instance_id, group_id, selection_type) -class LegacyItemsGroupWidget(BaseGroupWidget): +class ConvertorItemsGroupWidget(BaseGroupWidget): def update_items(self, items_by_id): items_by_label = collections.defaultdict(list) for item in items_by_id.values(): @@ -195,7 +195,7 @@ class LegacyItemsGroupWidget(BaseGroupWidget): widget = self._widgets_by_id[item.id] widget.update_item(item) else: - widget = LegacyItemCardWidget(item, self) + widget = ConvertorItemCardWidget(item, self) widget.selected.connect(self._on_widget_selection) self._widgets_by_id[item.id] = widget self._content_layout.insertWidget(widget_idx, widget) @@ -345,18 +345,18 @@ class ContextCardWidget(CardWidget): self._label_widget = label_widget -class LegacyItemCardWidget(CardWidget): +class ConvertorItemCardWidget(CardWidget): """Card for global context. Is not visually under group widget and is always at the top of card view. """ def __init__(self, item, parent): - super(LegacyItemCardWidget, self).__init__(parent) + super(ConvertorItemCardWidget, self).__init__(parent) self._id = item.id self.identifier = item.identifier - self._group_identifier = LEGACY_ITEM_GROUP + self._group_identifier = CONVERTOR_ITEM_GROUP icon_widget = IconValuePixmapLabel("fa.magic", self) icon_widget.setObjectName("FamilyIconLabel") @@ -556,7 +556,7 @@ class InstanceCardView(AbstractInstanceView): self._content_widget = content_widget self._context_widget = None - self._legacy_items_group = None + self._convertor_items_group = None self._widgets_by_group = {} self._ordered_groups = [] @@ -589,8 +589,8 @@ class InstanceCardView(AbstractInstanceView): ): output.append(self._context_widget) - if self._legacy_items_group is not None: - output.extend(self._legacy_items_group.get_selected_widgets()) + if self._convertor_items_group is not None: + output.extend(self._convertor_items_group.get_selected_widgets()) for group_widget in self._widgets_by_group.values(): for widget in group_widget.get_selected_widgets(): @@ -605,8 +605,8 @@ class InstanceCardView(AbstractInstanceView): ): output.append(CONTEXT_ID) - if self._legacy_items_group is not None: - output.extend(self._legacy_items_group.get_selected_item_ids()) + if self._convertor_items_group is not None: + output.extend(self._convertor_items_group.get_selected_item_ids()) for group_widget in self._widgets_by_group.values(): output.extend(group_widget.get_selected_item_ids()) @@ -617,7 +617,7 @@ class InstanceCardView(AbstractInstanceView): self._make_sure_context_widget_exists() - self._update_legacy_items_group() + self._update_convertor_items_group() # Prepare instances by group and identifiers by group instances_by_group = collections.defaultdict(list) @@ -648,7 +648,7 @@ class InstanceCardView(AbstractInstanceView): # Keep track of widget indexes # - we start with 1 because Context item as at the top widget_idx = 1 - if self._legacy_items_group is not None: + if self._convertor_items_group is not None: widget_idx += 1 for group_name in sorted_group_names: @@ -702,27 +702,27 @@ class InstanceCardView(AbstractInstanceView): self.selection_changed.emit() self._content_layout.insertWidget(0, widget) - def _update_legacy_items_group(self): - legacy_items = self._controller.legacy_items - if not legacy_items and self._legacy_items_group is None: + def _update_convertor_items_group(self): + convertor_items = self._controller.convertor_items + if not convertor_items and self._convertor_items_group is None: return - if not legacy_items: - self._legacy_items_group.setVisible(False) - self._content_layout.removeWidget(self._legacy_items_group) - self._legacy_items_group.deleteLater() - self._legacy_items_group = None + if not convertor_items: + self._convertor_items_group.setVisible(False) + self._content_layout.removeWidget(self._convertor_items_group) + self._convertor_items_group.deleteLater() + self._convertor_items_group = None return - if self._legacy_items_group is None: - group_widget = LegacyItemsGroupWidget( - LEGACY_ITEM_GROUP, self._content_widget + if self._convertor_items_group is None: + group_widget = ConvertorItemsGroupWidget( + CONVERTOR_ITEM_GROUP, self._content_widget ) group_widget.selected.connect(self._on_widget_selection) self._content_layout.insertWidget(1, group_widget) - self._legacy_items_group = group_widget + self._convertor_items_group = group_widget - self._legacy_items_group.update_items(legacy_items) + self._convertor_items_group.update_items(convertor_items) def refresh_instance_states(self): """Trigger update of instances on group widgets.""" @@ -742,8 +742,8 @@ class InstanceCardView(AbstractInstanceView): new_widget = self._context_widget else: - if group_name == LEGACY_ITEM_GROUP: - group_widget = self._legacy_items_group + if group_name == CONVERTOR_ITEM_GROUP: + group_widget = self._convertor_items_group else: group_widget = self._widgets_by_group[group_name] new_widget = group_widget.get_widget_by_item_id(instance_id) @@ -791,8 +791,8 @@ class InstanceCardView(AbstractInstanceView): if instance_id == CONTEXT_ID: remove_group = True else: - if group_name == LEGACY_ITEM_GROUP: - group_widget = self._legacy_items_group + if group_name == CONVERTOR_ITEM_GROUP: + group_widget = self._convertor_items_group else: group_widget = self._widgets_by_group[group_name] if not group_widget.get_selected_widgets(): @@ -906,8 +906,8 @@ class InstanceCardView(AbstractInstanceView): if name == CONTEXT_GROUP: sorted_widgets = [self._context_widget] else: - if name == LEGACY_ITEM_GROUP: - group_widget = self._legacy_items_group + if name == CONVERTOR_ITEM_GROUP: + group_widget = self._convertor_items_group else: group_widget = self._widgets_by_group[name] sorted_widgets = group_widget.get_ordered_widgets() @@ -1034,7 +1034,7 @@ class InstanceCardView(AbstractInstanceView): elif isinstance(widget, InstanceCardWidget): instances.append(widget.id) - elif isinstance(widget, LegacyItemCardWidget): + elif isinstance(widget, ConvertorItemCardWidget): convertor_identifiers.append(widget.identifier) return instances, context_selected, convertor_identifiers @@ -1066,16 +1066,16 @@ class InstanceCardView(AbstractInstanceView): if group_name == CONTEXT_GROUP: continue - legacy_group = group_name == LEGACY_ITEM_GROUP - if legacy_group: - group_widget = self._legacy_items_group + is_convertor_group = group_name == CONVERTOR_ITEM_GROUP + if is_convertor_group: + group_widget = self._convertor_items_group else: group_widget = self._widgets_by_group[group_name] group_selected = False for widget in group_widget.get_ordered_widgets(): select = False - if legacy_group: + if is_convertor_group: is_in = widget.identifier in s_convertor_identifiers else: is_in = widget.id in s_instance_ids diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 53951e3cba..32d84862f0 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -37,8 +37,8 @@ from ..constants import ( CONTEXT_ID, CONTEXT_LABEL, GROUP_ROLE, - LEGACY_CONVERTER_IDENTIFIER, - LEGACY_ITEM_GROUP, + CONVERTER_IDENTIFIER_ROLE, + CONVERTOR_ITEM_GROUP, ) @@ -333,7 +333,7 @@ class InstanceTreeView(QtWidgets.QTreeView): """Ids of selected instances.""" instance_ids = set() for index in self.selectionModel().selectedIndexes(): - if index.data(LEGACY_CONVERTER_IDENTIFIER) is not None: + if index.data(CONVERTER_IDENTIFIER_ROLE) is not None: continue instance_id = index.data(INSTANCE_ID_ROLE) @@ -450,9 +450,9 @@ class InstanceListView(AbstractInstanceView): self._context_item = None self._context_widget = None - self._legacy_group_item = None - self._legacy_group_widget = None - self._legacy_items_by_id = {} + self._convertor_group_item = None + self._convertor_group_widget = None + self._convertor_items_by_id = {} self._instance_view = instance_view self._instance_delegate = instance_delegate @@ -467,8 +467,8 @@ class InstanceListView(AbstractInstanceView): def _update_widget_expand_state(self, index, expanded): group_name = index.data(GROUP_ROLE) - if group_name == LEGACY_ITEM_GROUP: - group_widget = self._legacy_group_widget + if group_name == CONVERTOR_ITEM_GROUP: + group_widget = self._convertor_group_widget else: group_widget = self._group_widgets.get(group_name) @@ -540,7 +540,7 @@ class InstanceListView(AbstractInstanceView): if self._make_sure_context_item_exists(): sort_at_the_end = True - self._update_legacy_items_group() + self._update_convertor_items_group() # Prepare instances by their groups instances_by_group_name = collections.defaultdict(list) @@ -702,25 +702,25 @@ class InstanceListView(AbstractInstanceView): self._context_item = context_item return True - def _update_legacy_items_group(self): + def _update_convertor_items_group(self): created_new_items = False - legacy_items_by_id = self._controller.legacy_items - group_item = self._legacy_group_item - if not legacy_items_by_id and group_item is None: + convertor_items_by_id = self._controller.convertor_items + group_item = self._convertor_group_item + if not convertor_items_by_id and group_item is None: return created_new_items root_item = self._instance_model.invisibleRootItem() - if not legacy_items_by_id: + if not convertor_items_by_id: root_item.removeRow(group_item.row()) - self._legacy_group_widget.deleteLater() - self._legacy_group_widget = None - self._legacy_items_by_id = {} + self._convertor_group_widget.deleteLater() + self._convertor_group_widget = None + self._convertor_items_by_id = {} return created_new_items if group_item is None: created_new_items = True group_item = QtGui.QStandardItem() - group_item.setData(LEGACY_ITEM_GROUP, GROUP_ROLE) + group_item.setData(CONVERTOR_ITEM_GROUP, GROUP_ROLE) group_item.setData(1, SORT_VALUE_ROLE) group_item.setData(True, IS_GROUP_ROLE) group_item.setFlags(QtCore.Qt.ItemIsEnabled) @@ -732,36 +732,38 @@ class InstanceListView(AbstractInstanceView): ) proxy_index = self._proxy_model.mapFromSource(index) widget = InstanceListGroupWidget( - LEGACY_ITEM_GROUP, self._instance_view + CONVERTOR_ITEM_GROUP, self._instance_view ) widget.toggle_checkbox.setVisible(False) - widget.expand_changed.connect(self._on_legacy_group_expand_request) + widget.expand_changed.connect( + self._on_convertor_group_expand_request + ) self._instance_view.setIndexWidget(proxy_index, widget) - self._legacy_group_item = group_item - self._legacy_group_widget = widget + self._convertor_group_item = group_item + self._convertor_group_widget = widget for row in reversed(range(group_item.rowCount())): child_item = group_item.child(row) - child_identifier = child_item.data(LEGACY_CONVERTER_IDENTIFIER) - if child_identifier not in legacy_items_by_id: - self._legacy_items_by_id.pop(child_identifier, None) + child_identifier = child_item.data(CONVERTER_IDENTIFIER_ROLE) + if child_identifier not in convertor_items_by_id: + self._convertor_items_by_id.pop(child_identifier, None) group_item.removeRows(row, 1) new_items = [] - for identifier, convertor_item in legacy_items_by_id.items(): - item = self._legacy_items_by_id.get(identifier) + for identifier, convertor_item in convertor_items_by_id.items(): + item = self._convertor_items_by_id.get(identifier) if item is None: created_new_items = True item = QtGui.QStandardItem(convertor_item.label) new_items.append(item) item.setData(convertor_item.id, INSTANCE_ID_ROLE) item.setData(convertor_item.label, SORT_VALUE_ROLE) - item.setData(LEGACY_ITEM_GROUP, GROUP_ROLE) + item.setData(CONVERTOR_ITEM_GROUP, GROUP_ROLE) item.setData( - convertor_item.identifier, LEGACY_CONVERTER_IDENTIFIER + convertor_item.identifier, CONVERTER_IDENTIFIER_ROLE ) - self._legacy_items_by_id[identifier] = item + self._convertor_items_by_id[identifier] = item if new_items: group_item.appendRows(new_items) @@ -874,8 +876,8 @@ class InstanceListView(AbstractInstanceView): proxy_index = self._proxy_model.mapFromSource(group_index) self._instance_view.setExpanded(proxy_index, expanded) - def _on_legacy_group_expand_request(self, _, expanded): - group_item = self._legacy_group_item + def _on_convertor_group_expand_request(self, _, expanded): + group_item = self._convertor_group_item if not group_item: return group_index = self._instance_model.index( @@ -923,7 +925,7 @@ class InstanceListView(AbstractInstanceView): context_selected = False for index in self._instance_view.selectionModel().selectedIndexes(): - convertor_identifier = index.data(LEGACY_CONVERTER_IDENTIFIER) + convertor_identifier = index.data(CONVERTER_IDENTIFIER_ROLE) if convertor_identifier is not None: convertor_identifiers.append(convertor_identifier) continue @@ -974,7 +976,7 @@ class InstanceListView(AbstractInstanceView): (item.child(row), list(new_parent_items)) ) - convertor_identifier = item.data(LEGACY_CONVERTER_IDENTIFIER) + convertor_identifier = item.data(CONVERTER_IDENTIFIER_ROLE) select = False expand_parent = True From be54ff4d27978079855c99e5f8f9f1d188742b53 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 18:00:26 +0200 Subject: [PATCH 1780/2550] rename 'convert_legacy_items' to 'trigger_convertor_items' --- openpype/tools/publisher/control.py | 4 ++-- openpype/tools/publisher/widgets/overview_widget.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index b867bddc9d..245d328be4 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1239,7 +1239,7 @@ class AbstractPublisherController(object): pass @abstractmethod - def convert_legacy_items(self, convertor_identifiers): + def trigger_convertor_items(self, convertor_identifiers): pass @abstractmethod @@ -1854,7 +1854,7 @@ class PublisherController(BasePublisherController): variant, task_name, asset_doc, project_name, instance=instance ) - def convert_legacy_items(self, convertor_identifiers): + def trigger_convertor_items(self, convertor_identifiers): for convertor_identifier in convertor_identifiers: self._create_context.run_convertor(convertor_identifier) self._on_create_instance_change() diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index e208786fc7..7c1755b3eb 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -321,7 +321,7 @@ class OverviewWidget(QtWidgets.QFrame): def _on_convert_requested(self): _, _, convertor_identifiers = self.get_selected_items() - self._controller.convert_legacy_items(convertor_identifiers) + self._controller.trigger_convertor_items(convertor_identifiers) def get_selected_items(self): view = self._subset_views_layout.currentWidget() From a8f1e95696b005cb8466e67ab67d176ac60b1f2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 18:11:06 +0200 Subject: [PATCH 1781/2550] :bug: workfile instance changes are now persisted --- openpype/hosts/houdini/api/pipeline.py | 8 +-- .../houdini/plugins/create/create_workfile.py | 55 ++++++++++++------- 2 files changed, 40 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 88c9029141..6106dd4a6f 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -136,7 +136,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): ) @staticmethod - def _create_context_node(): + def create_context_node(): """Helper for creating context holding node. Returns: @@ -151,20 +151,20 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx.setCreatorState("OpenPype") op_ctx.setComment("OpenPype node to hold context metadata") op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) - op_ctx.hide(True) + # op_ctx.hide(True) return op_ctx def update_context_data(self, data, changes): op_ctx = hou.node(CONTEXT_CONTAINER) if not op_ctx: - op_ctx = self._create_context_node() + op_ctx = self.create_context_node() lib.imprint(op_ctx, data) def get_context_data(self): op_ctx = hou.node(CONTEXT_CONTAINER) if not op_ctx: - op_ctx = self._create_context_node() + op_ctx = self.create_context_node() return lib.read(op_ctx) def save_file(self, dst_path=None): diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 2a7cb14d68..0c6d840810 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -1,10 +1,12 @@ # -*- coding: utf-8 -*- """Creator plugin for creating workfiles.""" from openpype.hosts.houdini.api import plugin -from openpype.hosts.houdini.api.lib import read +from openpype.hosts.houdini.api.lib import read, imprint +from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER from openpype.pipeline import CreatedInstance, AutoCreator -from openpype.pipeline.legacy_io import Session +from openpype.pipeline import legacy_io from openpype.client import get_asset_by_name +import hou class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): @@ -12,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" family = "workfile" - icon = "gears" + icon = "document" default_variant = "Main" @@ -25,9 +27,9 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): ), None) project_name = self.project_name - asset_name = Session["AVALON_ASSET"] - task_name = Session["AVALON_TASK"] - host_name = Session["AVALON_APP"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if current_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) @@ -44,17 +46,16 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): variant, task_name, asset_doc, project_name, host_name, current_instance) ) - - new_instance = CreatedInstance( + self.log.info("Auto-creating workfile instance...") + current_instance = CreatedInstance( self.family, subset_name, data, self ) - self._add_instance_to_context(new_instance) - - # Update instance context if is not the same + self._add_instance_to_context(current_instance) elif ( current_instance["asset"] != asset_name or current_instance["task"] != task_name ): + # Update instance context if is not the same asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name @@ -63,14 +64,30 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): current_instance["task"] = task_name current_instance["subset"] = subset_name + # write workfile information to context container. + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + + workfile_data = {"workfile": current_instance.data_to_store()} + imprint(op_ctx, workfile_data) + def collect_instances(self): - self.cache_instances(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa - created_instance = CreatedInstance.from_existing( - read(instance), self - ) - self._add_instance_to_context(created_instance) + op_ctx = hou.node(CONTEXT_CONTAINER) + instance = read(op_ctx) + if not instance: + return + workfile = instance.get("workfile") + if not workfile: + return + created_instance = CreatedInstance.from_existing( + workfile, self + ) + self._add_instance_to_context(created_instance) def update_instances(self, update_list): - pass - + op_ctx = hou.node(CONTEXT_CONTAINER) + for created_inst, _changes in update_list: + if created_inst["creator_identifier"] == self.identifier: + workfile_data = {"workfile": created_inst.data_to_store()} + imprint(op_ctx, workfile_data, update=True) From 81f7aa5525e52f229cf4ec340f8a125358d0afeb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 18:15:24 +0200 Subject: [PATCH 1782/2550] get rid of 'legacy' from variables --- openpype/pipeline/create/context.py | 44 ++++++++++----------- openpype/pipeline/create/creator_plugins.py | 33 ++++++++-------- openpype/tools/publisher/control.py | 4 +- 3 files changed, 41 insertions(+), 40 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 250193f511..56d7447a0b 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -22,7 +22,7 @@ from .creator_plugins import ( Creator, AutoCreator, discover_creator_plugins, - discover_legacy_convertor_plugins, + discover_convertor_plugins, ) UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) @@ -853,8 +853,8 @@ class CreatedInstance: self[key] = new_value -class LegacyInstancesItem(object): - """Item representing convertor for legacy instances. +class ConvertorItem(object): + """Item representing convertor plugin. Args: identifier (str): Identifier of convertor. @@ -949,8 +949,8 @@ class CreateContext: # Manual creators self.manual_creators = {} - self.legacy_convertors = {} - self.legacy_items_by_id = {} + self.convertors_plugins = {} + self.convertor_items_by_id = {} self.publish_discover_result = None self.publish_plugins_mismatch_targets = [] @@ -1032,7 +1032,7 @@ class CreateContext: with self.bulk_instances_collection(): self.reset_instances() - self.find_legacy_items() + self.find_convertor_items() self.execute_autocreators() self.reset_finalization() @@ -1090,7 +1090,7 @@ class CreateContext: self._reset_publish_plugins(discover_publish_plugins) self._reset_creator_plugins() - self._reset_legacy_convertor_plugins() + self._reset_convertor_plugins() def _reset_publish_plugins(self, discover_publish_plugins): import pyblish.logic @@ -1186,9 +1186,9 @@ class CreateContext: self.creators = creators - def _reset_legacy_convertor_plugins(self): - legacy_convertors = {} - for convertor_class in discover_legacy_convertor_plugins(): + def _reset_convertor_plugins(self): + convertors_plugins = {} + for convertor_class in discover_convertor_plugins(): if inspect.isabstract(convertor_class): self.log.info( "Skipping abstract Creator {}".format(str(convertor_class)) @@ -1196,16 +1196,16 @@ class CreateContext: continue convertor_identifier = convertor_class.identifier - if convertor_identifier in legacy_convertors: + if convertor_identifier in convertors_plugins: self.log.warning(( "Duplicated Converter identifier. " "Using first and skipping following" )) continue - legacy_convertors[convertor_identifier] = convertor_class(self) + convertors_plugins[convertor_identifier] = convertor_class(self) - self.legacy_convertors = legacy_convertors + self.convertors_plugins = convertors_plugins def reset_context_data(self): """Reload context data using host implementation. @@ -1278,13 +1278,13 @@ class CreateContext: def creator_removed_instance(self, instance): self._instances_by_id.pop(instance.id, None) - def add_legacy_item(self, convertor_identifier, label): - self.legacy_items_by_id[convertor_identifier] = ( - LegacyInstancesItem(convertor_identifier, label) + def add_convertor_item(self, convertor_identifier, label): + self.convertor_items_by_id[convertor_identifier] = ConvertorItem( + convertor_identifier, label ) - def remove_legacy_item(self, convertor_identifier): - self.legacy_items_by_id.pop(convertor_identifier, None) + def remove_convertor_item(self, convertor_identifier): + self.convertor_items_by_id.pop(convertor_identifier, None) @contextmanager def bulk_instances_collection(self): @@ -1321,10 +1321,10 @@ class CreateContext: for creator in self.creators.values(): creator.collect_instances() - def find_legacy_items(self): - self.legacy_items_by_id = {} + def find_convertor_items(self): + self.convertor_items_by_id = {} - for convertor in self.legacy_convertors.values(): + for convertor in self.convertors_plugins.values(): try: convertor.find_instances() except: @@ -1502,6 +1502,6 @@ class CreateContext: return self._collection_shared_data def run_convertor(self, convertor_identifier): - convertor = self.legacy_convertors.get(convertor_identifier) + convertor = self.convertors_plugins.get(convertor_identifier) if convertor is not None: convertor.convert() diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index ff9326693e..2e7d8709a2 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -34,7 +34,7 @@ class CreatorError(Exception): @six.add_metaclass(ABCMeta) -class LegacyInstanceConvertor(object): +class LegacySubsetConvertor(object): """Helper for conversion of instances created using legacy creators. Conversion from legacy creators would mean to loose legacy instances, @@ -45,10 +45,10 @@ class LegacyInstanceConvertor(object): Convertor logic should be very simple. Method 'find_instances' is to look for legacy instances in scene a possibly call - pre-implemented 'add_legacy_item'. + pre-implemented 'add_convertor_item'. User will have ability to trigger conversion which is executed by calling - 'convert' which should call 'remove_legacy_item' when is done. + 'convert' which should call 'remove_convertor_item' when is done. It does make sense to add only one or none legacy item to create context for convertor as it's not possible to choose which instace are converted @@ -78,7 +78,8 @@ class LegacyInstanceConvertor(object): def find_instances(self): """Look for legacy instances in the scene. - Should call 'add_legacy_item' if there is at least one item. + Should call 'add_convertor_item' if there is at least one instance to + convert. """ pass @@ -108,19 +109,19 @@ class LegacyInstanceConvertor(object): return self._create_context.collection_shared_data - def add_legacy_item(self, label): + def add_convertor_item(self, label): """Add item to CreateContext. Args: label (str): Label of item which will show in UI. """ - self._create_context.add_legacy_item(self.identifier, label) + self._create_context.add_convertor_item(self.identifier, label) - def remove_legacy_item(self): + def remove_convertor_item(self): """Remove legacy item from create context when conversion finished.""" - self._create_context.remove_legacy_item(self.identifier) + self._create_context.remove_convertor_item(self.identifier) @six.add_metaclass(ABCMeta) @@ -559,8 +560,8 @@ def discover_creator_plugins(): return discover(BaseCreator) -def discover_legacy_convertor_plugins(): - return discover(LegacyInstanceConvertor) +def discover_convertor_plugins(): + return discover(LegacySubsetConvertor) def discover_legacy_creator_plugins(): @@ -620,8 +621,8 @@ def register_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): register_plugin(LegacyCreator, plugin) - elif issubclass(plugin, LegacyInstanceConvertor): - register_plugin(LegacyInstanceConvertor, plugin) + elif issubclass(plugin, LegacySubsetConvertor): + register_plugin(LegacySubsetConvertor, plugin) def deregister_creator_plugin(plugin): @@ -631,17 +632,17 @@ def deregister_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): deregister_plugin(LegacyCreator, plugin) - elif issubclass(plugin, LegacyInstanceConvertor): - deregister_plugin(LegacyInstanceConvertor, plugin) + elif issubclass(plugin, LegacySubsetConvertor): + deregister_plugin(LegacySubsetConvertor, plugin) def register_creator_plugin_path(path): register_plugin_path(BaseCreator, path) register_plugin_path(LegacyCreator, path) - register_plugin_path(LegacyInstanceConvertor, path) + register_plugin_path(LegacySubsetConvertor, path) def deregister_creator_plugin_path(path): deregister_plugin_path(BaseCreator, path) deregister_plugin_path(LegacyCreator, path) - deregister_plugin_path(LegacyInstanceConvertor, path) + deregister_plugin_path(LegacySubsetConvertor, path) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 245d328be4..107ddbbb93 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1608,7 +1608,7 @@ class PublisherController(BasePublisherController): @property def convertor_items(self): - return self._create_context.legacy_items_by_id + return self._create_context.convertor_items_by_id @property def _creators(self): @@ -1728,7 +1728,7 @@ class PublisherController(BasePublisherController): self._create_context.reset_context_data() with self._create_context.bulk_instances_collection(): self._create_context.reset_instances() - self._create_context.find_legacy_items() + self._create_context.find_convertor_items() self._create_context.execute_autocreators() self._resetting_instances = False From 4f70a58d5c7e9c604c1d6dabbeb80c4b74ab83b3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 18:17:27 +0200 Subject: [PATCH 1783/2550] renamed 'LegacySubsetConvertor' to 'SubsetConvertorPlugin' --- openpype/pipeline/create/creator_plugins.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 2e7d8709a2..584e082221 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -34,7 +34,7 @@ class CreatorError(Exception): @six.add_metaclass(ABCMeta) -class LegacySubsetConvertor(object): +class SubsetConvertorPlugin(object): """Helper for conversion of instances created using legacy creators. Conversion from legacy creators would mean to loose legacy instances, @@ -561,7 +561,7 @@ def discover_creator_plugins(): def discover_convertor_plugins(): - return discover(LegacySubsetConvertor) + return discover(SubsetConvertorPlugin) def discover_legacy_creator_plugins(): @@ -621,8 +621,8 @@ def register_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): register_plugin(LegacyCreator, plugin) - elif issubclass(plugin, LegacySubsetConvertor): - register_plugin(LegacySubsetConvertor, plugin) + elif issubclass(plugin, SubsetConvertorPlugin): + register_plugin(SubsetConvertorPlugin, plugin) def deregister_creator_plugin(plugin): @@ -632,17 +632,17 @@ def deregister_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): deregister_plugin(LegacyCreator, plugin) - elif issubclass(plugin, LegacySubsetConvertor): - deregister_plugin(LegacySubsetConvertor, plugin) + elif issubclass(plugin, SubsetConvertorPlugin): + deregister_plugin(SubsetConvertorPlugin, plugin) def register_creator_plugin_path(path): register_plugin_path(BaseCreator, path) register_plugin_path(LegacyCreator, path) - register_plugin_path(LegacySubsetConvertor, path) + register_plugin_path(SubsetConvertorPlugin, path) def deregister_creator_plugin_path(path): deregister_plugin_path(BaseCreator, path) deregister_plugin_path(LegacyCreator, path) - deregister_plugin_path(LegacySubsetConvertor, path) + deregister_plugin_path(SubsetConvertorPlugin, path) From 87671bcfd6905e7e1bf729c6aa0fef42f47d6d9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 19:16:54 +0200 Subject: [PATCH 1784/2550] added style for errored card message --- openpype/style/data.json | 4 +++- openpype/style/style.css | 21 +++++++++++---------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/openpype/style/data.json b/openpype/style/data.json index fef69071ed..146af84663 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -64,7 +64,9 @@ "overlay-messages": { "close-btn": "#D3D8DE", "bg-success": "#458056", - "bg-success-hover": "#55a066" + "bg-success-hover": "#55a066", + "bg-error": "#AD2E2E", + "bg-error-hover": "#C93636" }, "tab-widget": { "bg": "#21252B", diff --git a/openpype/style/style.css b/openpype/style/style.css index a6818a5792..9919973b06 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -688,22 +688,23 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { } /* Messages overlay */ -#OverlayMessageWidget { +OverlayMessageWidget { border-radius: 0.2em; - background: {color:bg-buttons}; -} - -#OverlayMessageWidget:hover { - background: {color:bg-button-hover}; -} -#OverlayMessageWidget { background: {color:overlay-messages:bg-success}; } -#OverlayMessageWidget:hover { + +OverlayMessageWidget:hover { background: {color:overlay-messages:bg-success-hover}; } -#OverlayMessageWidget QWidget { +OverlayMessageWidget[type="error"] { + background: {color:overlay-messages:bg-error}; +} +OverlayMessageWidget[type="error"]:hover { + background: {color:overlay-messages:bg-error-hover}; +} + +OverlayMessageWidget QWidget { background: transparent; } From 0fd54454192ffec16170b1cca574825955f7397f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 19:25:35 +0200 Subject: [PATCH 1785/2550] wrap convertor callbacks by custom exceptions --- openpype/pipeline/create/context.py | 94 +++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index b6dce4c03d..c87803c5c4 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -71,6 +71,41 @@ class HostMissRequiredMethod(Exception): super(HostMissRequiredMethod, self).__init__(msg) +class ConvertorsOperationFailed(Exception): + def __init__(self, msg, failed_info): + super(ConvertorsOperationFailed, self).__init__(msg) + self.failed_info = failed_info + + +class ConvertorsFindFailed(ConvertorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to find incompatible subsets" + super(ConvertorsFindFailed, self).__init__( + msg, failed_info + ) + + +class ConvertorsConversionFailed(ConvertorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to convert incompatible subsets" + super(ConvertorsConversionFailed, self).__init__( + msg, failed_info + ) + + +def prepare_failed_convertor_operation_info(identifier, exc_info): + exc_type, exc_value, exc_traceback = exc_info + formatted_traceback = "".join(traceback.format_exception( + exc_type, exc_value, exc_traceback + )) + + return { + "convertor_identifier": identifier, + "message": str(exc_value), + "traceback": formatted_traceback + } + + class CreatorsOperationFailed(Exception): """Raised when a creator process crashes in 'CreateContext'. @@ -1486,12 +1521,26 @@ class CreateContext: raise CreatorsCollectionFailed(failed_info) def find_convertor_items(self): + """Go through convertor plugins to look for items to convert. + + Raises: + ConvertorsFindFailed: When one or more convertors fails during + finding. + """ + self.convertor_items_by_id = {} + failed_info = [] for convertor in self.convertors_plugins.values(): try: convertor.find_instances() + except: + failed_info.append( + prepare_failed_convertor_operation_info( + convertor.identifier, sys.exc_info() + ) + ) self.log.warning( "Failed to find instances of convertor \"{}\"".format( convertor.identifier @@ -1499,6 +1548,9 @@ class CreateContext: exc_info=True ) + if failed_info: + raise ConvertorsFindFailed(failed_info) + def execute_autocreators(self): """Execute discovered AutoCreator plugins. @@ -1756,6 +1808,48 @@ class CreateContext: return self._collection_shared_data def run_convertor(self, convertor_identifier): + """Run convertor plugin by it's idenfitifier. + + Conversion is skipped if convertor is not available. + + Args: + convertor_identifier (str): Identifier of convertor. + """ + convertor = self.convertors_plugins.get(convertor_identifier) if convertor is not None: convertor.convert() + + def run_convertors(self, convertor_identifiers): + """Run convertor plugins by idenfitifiers. + + Conversion is skipped if convertor is not available. + + Args: + convertor_identifiers (Iterator[str]): Identifiers of convertors + to run. + + Raises: + ConvertorsConversionFailed: When one or more convertors fails. + """ + + failed_info = [] + for convertor_identifier in convertor_identifiers: + try: + self.run_convertor(convertor_identifier) + + except: + failed_info.append( + prepare_failed_convertor_operation_info( + convertor_identifier, sys.exc_info() + ) + ) + self.log.warning( + "Failed to convert instances of convertor \"{}\"".format( + convertor_identifier + ), + exc_info=True + ) + + if failed_info: + raise ConvertorsConversionFailed(failed_info) From 9774c507f20623697dbeae1de747ca99d990fded Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 19:26:27 +0200 Subject: [PATCH 1786/2550] Error message box is less creator's specific --- openpype/tools/publisher/window.py | 105 ++++++++++++++++------------- 1 file changed, 57 insertions(+), 48 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index b6bd506c18..58c73f4821 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -1,4 +1,5 @@ import collections +import copy from Qt import QtWidgets, QtCore, QtGui from openpype import ( @@ -224,10 +225,10 @@ class PublisherWindow(QtWidgets.QDialog): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) - creators_dialog_message_timer = QtCore.QTimer() - creators_dialog_message_timer.setInterval(100) - creators_dialog_message_timer.timeout.connect( - self._on_creators_message_timeout + errors_dialog_message_timer = QtCore.QTimer() + errors_dialog_message_timer.setInterval(100) + errors_dialog_message_timer.timeout.connect( + self._on_errors_message_timeout ) help_btn.clicked.connect(self._on_help_click) @@ -268,16 +269,16 @@ class PublisherWindow(QtWidgets.QDialog): "show.card.message", self._on_overlay_message ) controller.event_system.add_callback( - "instances.collection.failed", self._instance_collection_failed + "instances.collection.failed", self._on_creator_error ) controller.event_system.add_callback( - "instances.save.failed", self._instance_save_failed + "instances.save.failed", self._on_creator_error ) controller.event_system.add_callback( - "instances.remove.failed", self._instance_remove_failed + "instances.remove.failed", self._on_creator_error ) controller.event_system.add_callback( - "instances.create.failed", self._instance_create_failed + "instances.create.failed", self._on_creator_error ) # Store extra header widget for TrayPublisher @@ -325,8 +326,8 @@ class PublisherWindow(QtWidgets.QDialog): self._restart_timer = None self._publish_frame_visible = None - self._creators_messages_to_show = collections.deque() - self._creators_dialog_message_timer = creators_dialog_message_timer + self._error_messages_to_show = collections.deque() + self._errors_dialog_message_timer = errors_dialog_message_timer self._set_publish_visibility(False) @@ -357,7 +358,10 @@ class PublisherWindow(QtWidgets.QDialog): self._update_publish_frame_rect() def _on_overlay_message(self, event): - self._overlay_object.add_message(event["message"]) + self._overlay_object.add_message( + event["message"], + event.get("message_type") + ) def _on_first_show(self): self.resize(self.default_width, self.default_height) @@ -604,37 +608,39 @@ class PublisherWindow(QtWidgets.QDialog): 0, window_size.height() - height ) - def add_message_dialog(self, title, failed_info): - self._creators_messages_to_show.append((title, failed_info)) - self._creators_dialog_message_timer.start() + def add_error_message_dialog(self, title, failed_info, message_start=None): + self._error_messages_to_show.append( + (title, failed_info, message_start) + ) + self._errors_dialog_message_timer.start() - def _on_creators_message_timeout(self): - if not self._creators_messages_to_show: - self._creators_dialog_message_timer.stop() + def _on_errors_message_timeout(self): + if not self._error_messages_to_show: + self._errors_dialog_message_timer.stop() return - item = self._creators_messages_to_show.popleft() - title, failed_info = item - dialog = CreatorsErrorMessageBox(title, failed_info, self) + item = self._error_messages_to_show.popleft() + title, failed_info, message_start = item + dialog = ErrorsMessageBox( + title, failed_info, message_start, self + ) dialog.exec_() dialog.deleteLater() - def _instance_collection_failed(self, event): - self.add_message_dialog(event["title"], event["failed_info"]) - - def _instance_save_failed(self, event): - self.add_message_dialog(event["title"], event["failed_info"]) - - def _instance_remove_failed(self, event): - self.add_message_dialog(event["title"], event["failed_info"]) - - def _instance_create_failed(self, event): - self.add_message_dialog(event["title"], event["failed_info"]) + def _on_creator_error(self, event): + new_failed_info = [] + for item in event["failed_info"]: + new_item = copy.deepcopy(item) + new_item["label"] = new_item.pop("creator_label") + new_item["identifier"] = new_item.pop("creator_identifier") + new_failed_info.append(new_item) + self.add_error_message_dialog(event["title"], new_failed_info, "Creator:") -class CreatorsErrorMessageBox(ErrorMessageBox): - def __init__(self, error_title, failed_info, parent): +class ErrorsMessageBox(ErrorMessageBox): + def __init__(self, error_title, failed_info, message_start, parent): self._failed_info = failed_info + self._message_start = message_start self._info_with_id = [ # Id must be string when used in tab widget {"id": str(idx), "info": info} @@ -644,7 +650,7 @@ class CreatorsErrorMessageBox(ErrorMessageBox): self._tabs_widget = None self._stack_layout = None - super(CreatorsErrorMessageBox, self).__init__(error_title, parent) + super(ErrorsMessageBox, self).__init__(error_title, parent) layout = self.layout() layout.setContentsMargins(0, 0, 0, 0) @@ -659,17 +665,21 @@ class CreatorsErrorMessageBox(ErrorMessageBox): def _get_report_data(self): output = [] for info in self._failed_info: - creator_label = info["creator_label"] - creator_identifier = info["creator_identifier"] - report_message = "Creator:" - if creator_label: - report_message += " {} ({})".format( - creator_label, creator_identifier) + item_label = info.get("label") + item_identifier = info["identifier"] + if item_label: + report_message = "{} ({})".format( + item_label, item_identifier) else: - report_message += " {}".format(creator_identifier) + report_message = "{}".format(item_identifier) + + if self._message_start: + report_message = "{} {}".format( + self._message_start, report_message + ) report_message += "\n\nError: {}".format(info["message"]) - formatted_traceback = info["traceback"] + formatted_traceback = info.get("traceback") if formatted_traceback: report_message += "\n\n{}".format(formatted_traceback) output.append(report_message) @@ -686,11 +696,10 @@ class CreatorsErrorMessageBox(ErrorMessageBox): item_id = item["id"] info = item["info"] message = info["message"] - formatted_traceback = info["traceback"] - creator_label = info["creator_label"] - creator_identifier = info["creator_identifier"] - if not creator_label: - creator_label = creator_identifier + formatted_traceback = info.get("traceback") + item_label = info.get("label") + if not item_label: + item_label = info["identifier"] msg_widget = QtWidgets.QWidget(stack_widget) msg_layout = QtWidgets.QVBoxLayout(msg_widget) @@ -710,7 +719,7 @@ class CreatorsErrorMessageBox(ErrorMessageBox): msg_layout.addStretch(1) - tabs_widget.add_tab(creator_label, item_id) + tabs_widget.add_tab(item_label, item_id) stack_layout.addWidget(msg_widget) if first: first = False From 3ab3582b0a260bb9008a4138e8c3edc1c8f67ac1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 19:26:48 +0200 Subject: [PATCH 1787/2550] prepare to handle convertor errors --- openpype/tools/publisher/window.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 58c73f4821..a3387043b8 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -280,6 +280,12 @@ class PublisherWindow(QtWidgets.QDialog): controller.event_system.add_callback( "instances.create.failed", self._on_creator_error ) + controller.event_system.add_callback( + "convertors.convert.failed", self._on_convertor_error + ) + controller.event_system.add_callback( + "convertors.find.failed", self._on_convertor_error + ) # Store extra header widget for TrayPublisher # - can be used to add additional widgets to header between context @@ -636,6 +642,16 @@ class PublisherWindow(QtWidgets.QDialog): new_failed_info.append(new_item) self.add_error_message_dialog(event["title"], new_failed_info, "Creator:") + def _on_convertor_error(self, event): + new_failed_info = [] + for item in event["failed_info"]: + new_item = copy.deepcopy(item) + new_item["identifier"] = new_item.pop("convertor_identifier") + new_failed_info.append(new_item) + self.add_error_message_dialog( + event["title"], new_failed_info, "Convertor:" + ) + class ErrorsMessageBox(ErrorMessageBox): def __init__(self, error_title, failed_info, message_start, parent): From f9a75ea240e1c1c9c5e9213dbbb32d4cbf354067 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 19:27:21 +0200 Subject: [PATCH 1788/2550] handle ConvertorsOperationFailed in controller --- openpype/tools/publisher/control.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 482227e708..7cfc89f59e 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -33,6 +33,7 @@ from openpype.pipeline.create import ( ) from openpype.pipeline.create.context import ( CreatorsOperationFailed, + ConvertorsOperationFailed, ) # Define constant for plugin orders offset @@ -1743,7 +1744,16 @@ class PublisherController(BasePublisherController): } ) - self._create_context.find_convertor_items() + try: + self._create_context.find_convertor_items() + except ConvertorsOperationFailed as exc: + self._emit_event( + "convertors.find.failed", + { + "title": "Collection of unsupported subset failed", + "failed_info": exc.failed_info + } + ) try: self._create_context.execute_autocreators() @@ -1881,8 +1891,19 @@ class PublisherController(BasePublisherController): ) def trigger_convertor_items(self, convertor_identifiers): - for convertor_identifier in convertor_identifiers: - self._create_context.run_convertor(convertor_identifier) + success = True + try: + self._create_context.run_convertors(convertor_identifiers) + + except ConvertorsOperationFailed as exc: + success = False + self._emit_event( + "convertors.convert.failed", + { + "title": "Conversion failed", + "failed_info": exc.failed_info + } + ) self._on_create_instance_change() self.emit_card_message("Conversion finished") From 22a1191ab1a4e8ce516aef216e18f0f5a0817c68 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 19:27:34 +0200 Subject: [PATCH 1789/2550] emit card message can accept message types --- openpype/tools/publisher/control.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 7cfc89f59e..d4dddb75d5 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1264,7 +1264,7 @@ class AbstractPublisherController(object): pass @abstractmethod - def emit_card_message(self, message): + def emit_card_message(self, message, message_type=None): """Emit a card message which can have a lifetime. This is for UI purposes. Method can be extended to more arguments @@ -1771,8 +1771,14 @@ class PublisherController(BasePublisherController): self._on_create_instance_change() - def emit_card_message(self, message): - self._emit_event("show.card.message", {"message": message}) + def emit_card_message(self, message, message_type=None): + self._emit_event( + "show.card.message", + { + "message": message, + "message_type": message_type + } + ) def get_creator_attribute_definitions(self, instances): """Collect creator attribute definitions for multuple instances. From 12a272a8eec1c63bc2aece3c5a9acbb56cee0867 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 24 Oct 2022 19:32:06 +0200 Subject: [PATCH 1790/2550] added different types of card messages --- openpype/tools/publisher/control.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index d4dddb75d5..18d1a5b083 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -40,6 +40,11 @@ from openpype.pipeline.create.context import ( PLUGIN_ORDER_OFFSET = 0.5 +class CardMessageTypes: + standard = None + error = "error" + + class MainThreadItem: """Callback with args and kwargs.""" @@ -1264,7 +1269,9 @@ class AbstractPublisherController(object): pass @abstractmethod - def emit_card_message(self, message, message_type=None): + def emit_card_message( + self, message, message_type=CardMessageTypes.standard + ): """Emit a card message which can have a lifetime. This is for UI purposes. Method can be extended to more arguments @@ -1771,7 +1778,9 @@ class PublisherController(BasePublisherController): self._on_create_instance_change() - def emit_card_message(self, message, message_type=None): + def emit_card_message( + self, message, message_type=CardMessageTypes.standard + ): self._emit_event( "show.card.message", { @@ -1910,8 +1919,12 @@ class PublisherController(BasePublisherController): "failed_info": exc.failed_info } ) + + if success: + self.emit_card_message("Conversion finished") + else: + self.emit_card_message("Conversion failed", CardMessageTypes.error) self._on_create_instance_change() - self.emit_card_message("Conversion finished") def create( self, creator_identifier, subset_name, instance_data, options From 754cebb06fbf0a01d63d33c8b1bda918c48b28b5 Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Mon, 24 Oct 2022 13:13:22 -0700 Subject: [PATCH 1791/2550] Update dev_requirements.md Small typo and grammar fixes. --- website/docs/dev_requirements.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index eb4b132297..1c8958d1c0 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -39,13 +39,13 @@ Pype needs site-wide installation of **MongoDB**. It should be installed on reliable server, that all workstations (and possibly render nodes) can connect. This server holds **Avalon** database that is at the core of everything -Depending on project size and number of artists working connection speed and +Depending on project size and number of artists working, connection speed and latency influence performance experienced by artists. If remote working is required, this mongodb server must be accessible from Internet or cloud solution can be used. Reasonable backup plan or high availability options are recommended. *Replication* feature of MongoDB should be considered. This is beyond the scope of this documentation, please refer to [MongoDB Documentation](https://docs.mongodb.com/manual/replication/). -Pype can run it's own instance of mongodb, mostly for testing and development purposes. +Pype can run its own instance of mongodb, mostly for testing and development purposes. For that it uses locally installed MongoDB. Download it from [mognoDB website](https://www.mongodb.com/download-center/community), install it and @@ -69,7 +69,7 @@ the major DCCs, it most probably can run openPYPE. Installed, it takes around 400MB of space, depending on the platform -For well functioning ftrack event server, we recommend a linux virtual server with Ubuntu or CentOS. CPU and RAM allocation needs differ based on the studio size, but a 2GB of ram, with a dual core CPU and around 4GB of storage should suffice +For a well functioning ftrack event server, we recommend a linux virtual server with Ubuntu or CentOS. CPU and RAM allocation needs differ based on the studio size, but a 2GB of ram, with a dual core CPU and around 4GB of storage should suffice ## Deployment From 72287eb3d375d6c999c961296ecb1cf3b34a6761 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 10:23:15 +0200 Subject: [PATCH 1792/2550] validate source streams before otio burnins super is called --- openpype/scripts/otio_burnin.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/scripts/otio_burnin.py b/openpype/scripts/otio_burnin.py index 4c3a5de2ec..3520d8668c 100644 --- a/openpype/scripts/otio_burnin.py +++ b/openpype/scripts/otio_burnin.py @@ -113,11 +113,20 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if not ffprobe_data: ffprobe_data = _get_ffprobe_data(source) + # Validate 'streams' before calling super to raise more specific + # error + source_streams = ffprobe_data.get("streams") + if not source_streams: + raise ValueError(( + "Input file \"{}\" does not contain any streams" + " with image/video content." + ).format(source)) + self.ffprobe_data = ffprobe_data self.first_frame = first_frame self.input_args = [] - super().__init__(source, ffprobe_data["streams"]) + super().__init__(source, source_streams) if options_init: self.options_init.update(options_init) From 438922ccb0e7bd12c559e61c07f782c6d3537929 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 10:31:53 +0200 Subject: [PATCH 1793/2550] ffprobe run as list of args instead of string --- openpype/scripts/otio_burnin.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/openpype/scripts/otio_burnin.py b/openpype/scripts/otio_burnin.py index 3520d8668c..7223e8d4de 100644 --- a/openpype/scripts/otio_burnin.py +++ b/openpype/scripts/otio_burnin.py @@ -22,10 +22,6 @@ FFMPEG = ( '"{}"%(input_args)s -i "%(input)s" %(filters)s %(args)s%(output)s' ).format(ffmpeg_path) -FFPROBE = ( - '"{}" -v quiet -print_format json -show_format -show_streams "%(source)s"' -).format(ffprobe_path) - DRAWTEXT = ( "drawtext=fontfile='%(font)s':text=\\'%(text)s\\':" "x=%(x)s:y=%(y)s:fontcolor=%(color)s@%(opacity).1f:fontsize=%(size)d" @@ -48,8 +44,15 @@ def _get_ffprobe_data(source): :param str source: source media file :rtype: [{}, ...] """ - command = FFPROBE % {'source': source} - proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) + command = [ + ffprobe_path, + "-v", "quiet", + "-print_format", "json", + "-show_format", + "-show_streams", + source + ] + proc = subprocess.Popen(command, stdout=subprocess.PIPE) out = proc.communicate()[0] if proc.returncode != 0: raise RuntimeError("Failed to run: %s" % command) From 52bb4a0d40ba62f7be6c8e589bd36537571897e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 10:33:10 +0200 Subject: [PATCH 1794/2550] fix publisher import in experimental tools --- openpype/tools/experimental_tools/tools_def.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/experimental_tools/tools_def.py b/openpype/tools/experimental_tools/tools_def.py index fa2971dc1d..d3a1caa60e 100644 --- a/openpype/tools/experimental_tools/tools_def.py +++ b/openpype/tools/experimental_tools/tools_def.py @@ -164,9 +164,9 @@ class ExperimentalTools: def _show_publisher(self): if self._publisher_tool is None: - from openpype.tools import publisher + from openpype.tools.publisher.window import PublisherWindow - self._publisher_tool = publisher.PublisherWindow( + self._publisher_tool = PublisherWindow( parent=self._parent_widget ) From c90e8fed53c2f6c50346684c90b728b990ff25b5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 11:56:52 +0200 Subject: [PATCH 1795/2550] fix thumbnail publishing from standalone publisher --- .../standalonepublisher/plugins/publish/extract_thumbnail.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py index 3ee2f70809..8d7ea07f42 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_thumbnail.py @@ -118,6 +118,7 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin): 'files': filename, "stagingDir": staging_dir, "tags": ["thumbnail", "delete"], + "thumbnail": True } if width and height: representation["width"] = width From 051189bbca25f08fa1a1403809e92b0a80d49e18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:36:09 +0200 Subject: [PATCH 1796/2550] :bug: fix creator id --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 5bb5786a40..590c8f97fd 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -11,7 +11,7 @@ from openpype.hosts.houdini.api import plugin class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - identifier = "hda" + identifier = "io.openpype.creators.houdini.hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" From 6db2c8e33f78d2e6751665c3e22bb8c91b4329ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:36:54 +0200 Subject: [PATCH 1797/2550] :recycle: refactor name, collect legacy subsets --- openpype/hosts/houdini/api/plugin.py | 31 ++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index aae6d137ac..4dc6641ac9 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -96,13 +96,15 @@ class Creator(LegacyCreator): class HoudiniCreatorBase(object): @staticmethod - def cache_instances(shared_data): + def cache_subsets(shared_data): """Cache instances for Creators to shared data. - Create `houdini_cached_instances` key when needed in shared data and + Create `houdini_cached_subsets` key when needed in shared data and fill it with all collected instances from the scene under its respective creator identifiers. + U + Args: Dict[str, Any]: Shared data. @@ -110,15 +112,26 @@ class HoudiniCreatorBase(object): Dict[str, Any]: Shared data dictionary. """ - if shared_data.get("houdini_cached_instances") is None: - shared_data["houdini_cached_instances"] = {} + if shared_data.get("houdini_cached_subsets") is None: + shared_data["houdini_cached_subsets"] = {} + if shared_data.get("houdini_cached_legacy_subsets") is None: + shared_data["houdini_cached_legacy_subsets"] = {} cached_instances = lsattr("id", "pyblish.avalon.instance") for i in cached_instances: + if not i.parm("creator_identifier"): + # we have legacy instance + family = i.parm("family").eval() + if family not in shared_data["houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][family] = [i] + else: + shared_data["houdini_cached_legacy_subsets"][family].append(i) + continue + creator_id = i.parm("creator_identifier").eval() - if creator_id not in shared_data["houdini_cached_instances"]: - shared_data["houdini_cached_instances"][creator_id] = [i] + if creator_id not in shared_data["houdini_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] else: - shared_data["houdini_cached_instances"][creator_id].append(i) # noqa + shared_data["houdini_cached_subsets"][creator_id].append(i) # noqa return shared_data @staticmethod @@ -194,8 +207,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def collect_instances(self): # cache instances if missing - self.cache_instances(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data["houdini_cached_subsets"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self ) From 0fa86d5ce4fd772dfa37fb54eea1dc438680a471 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:37:15 +0200 Subject: [PATCH 1798/2550] :bug: fix lost pointer issue --- openpype/hosts/houdini/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 2452ceef62..13f5a62ec3 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -348,6 +348,9 @@ def imprint(node, data, update=False): else: for template in templates: parm_group.appendToFolder(parm_folder, template) + # this is needed because the pointer to folder + # is for some reason lost every call to `appendToFolder()` + parm_folder = parm_group.findFolder("Extra") node.setParmTemplateGroup(parm_group) From 1dcd49576b1c98d200c494fe4cd8658468bca4d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:37:37 +0200 Subject: [PATCH 1799/2550] :bug: hide context node by default --- openpype/hosts/houdini/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 6106dd4a6f..b0791fcb6c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -151,7 +151,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx.setCreatorState("OpenPype") op_ctx.setComment("OpenPype node to hold context metadata") op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) - # op_ctx.hide(True) + op_ctx.hide(True) return op_ctx def update_context_data(self, data, changes): From 20d111d60a1c0ac431adfc8567eeac87679b144a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:38:02 +0200 Subject: [PATCH 1800/2550] :sparkles: add legacy subset converter --- .../houdini/plugins/create/convert_legacy.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 openpype/hosts/houdini/plugins/create/convert_legacy.py diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py new file mode 100644 index 0000000000..be7ef714ba --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin +from openpype.hosts.houdini.api.lib import imprint + + +class HoudiniLegacyConvertor(SubsetConvertorPlugin): + identifier = "io.openpype.creators.houdini.legacy" + family_to_id = { + "camera": "io.openpype.creators.houdini.camera", + "ass": "io.openpype.creators.houdini.ass", + "imagesequence": "io.openpype.creators.houdini.imagesequence", + "hda": "io.openpype.creators.houdini.hda", + "pointcache": "io.openpype.creators.houdini.pointcache", + "redshiftproxy": "io.openpype.creators.houdini.redshiftproxy", + "redshift_rop": "io.openpype.creators.houdini.redshift_rop", + "usd": "io.openpype.creators.houdini.usd", + "usdrender": "io.openpype.creators.houdini.usdrender", + "vdbcache": "io.openpype.creators.houdini.vdbcache" + } + + def __init__(self, *args, **kwargs): + super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs) + self.legacy_subsets = {} + + def find_instances(self): + self.legacy_subsets = self.collection_shared_data.get( + "houdini_cached_legacy_subsets") + if not self.legacy_subsets: + return + self.add_convertor_item("Found {} incompatible subset{}.".format( + len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "") + ) + + def convert(self): + if not self.legacy_subsets: + return + + for family, subsets in self.legacy_subsets.items(): + if family in self.family_to_id: + for subset in subsets: + data = { + "creator_identifier": self.family_to_id[family], + "instance_node": subset.path() + } + print("Converting {} to {}".format( + subset.path(), self.family_to_id[family])) + imprint(subset, data) From 6f642ab34c09c617b0a0a10adc6d1821b901f337 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:56:28 +0200 Subject: [PATCH 1801/2550] trigger reset of controller when conversion finishes --- openpype/pipeline/create/context.py | 3 ++- openpype/tools/publisher/control.py | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index c87803c5c4..52a1729233 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1823,7 +1823,8 @@ class CreateContext: def run_convertors(self, convertor_identifiers): """Run convertor plugins by idenfitifiers. - Conversion is skipped if convertor is not available. + Conversion is skipped if convertor is not available. It is recommended + to trigger reset after conversion to reload instances. Args: convertor_identifiers (Iterator[str]): Identifiers of convertors diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 18d1a5b083..e05cffe20e 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1906,6 +1906,8 @@ class PublisherController(BasePublisherController): ) def trigger_convertor_items(self, convertor_identifiers): + self.save_changes() + success = True try: self._create_context.run_convertors(convertor_identifiers) @@ -1924,7 +1926,8 @@ class PublisherController(BasePublisherController): self.emit_card_message("Conversion finished") else: self.emit_card_message("Conversion failed", CardMessageTypes.error) - self._on_create_instance_change() + + self.reset() def create( self, creator_identifier, subset_name, instance_data, options @@ -1972,7 +1975,6 @@ class PublisherController(BasePublisherController): Args: instance_ids (List[str]): List of instance ids to remove. """ - # TODO expect instance ids instead of instances # QUESTION Expect that instances are really removed? In that case save # reset is not required and save changes too. self.save_changes() From 698fe8379ea78901418f4cc4f1d6f8fc941c40ae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Oct 2022 16:57:33 +0200 Subject: [PATCH 1802/2550] added logger to convertor --- openpype/pipeline/create/creator_plugins.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 584e082221..c69abb8861 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -61,9 +61,23 @@ class SubsetConvertorPlugin(object): create_context """ + _log = None + def __init__(self, create_context): self._create_context = create_context + @property + def log(self): + """Logger of the plugin. + + Returns: + logging.Logger: Logger with name of the plugin. + """ + + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + @abstractproperty def identifier(self): """Converted identifier. From 8a1040aa7495aa6c3578033c5f6bad0321ec209d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 18:26:36 +0200 Subject: [PATCH 1803/2550] :rotating_light: various :dog: fixes and docstrings --- openpype/hosts/houdini/api/plugin.py | 27 ++++++++++++------- .../houdini/plugins/create/convert_legacy.py | 27 +++++++++++++++++++ 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 4dc6641ac9..b5f79838d1 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -103,7 +103,9 @@ class HoudiniCreatorBase(object): fill it with all collected instances from the scene under its respective creator identifiers. - U + If legacy instances are detected in the scene, create + `houdini_cached_legacy_subsets` there and fill it with + all legacy subsets under family as a key. Args: Dict[str, Any]: Shared data. @@ -121,17 +123,21 @@ class HoudiniCreatorBase(object): if not i.parm("creator_identifier"): # we have legacy instance family = i.parm("family").eval() - if family not in shared_data["houdini_cached_legacy_subsets"]: - shared_data["houdini_cached_legacy_subsets"][family] = [i] + if family not in shared_data[ + "houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][ + family] = [i] else: - shared_data["houdini_cached_legacy_subsets"][family].append(i) + shared_data[ + "houdini_cached_legacy_subsets"][family].append(i) continue creator_id = i.parm("creator_identifier").eval() if creator_id not in shared_data["houdini_cached_subsets"]: shared_data["houdini_cached_subsets"][creator_id] = [i] else: - shared_data["houdini_cached_subsets"][creator_id].append(i) # noqa + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa return shared_data @staticmethod @@ -159,6 +165,7 @@ class HoudiniCreatorBase(object): @six.add_metaclass(ABCMeta) class HoudiniCreator(NewCreator, HoudiniCreatorBase): + """Base class for most of the Houdini creator plugins.""" selected_nodes = [] def create(self, subset_name, instance_data, pre_create_data): @@ -208,7 +215,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def collect_instances(self): # cache instances if missing self.cache_subsets(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_subsets"].get(self.identifier, []): # noqa + for instance in self.collection_shared_data[ + "houdini_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing( read(instance), self ) @@ -231,11 +239,10 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def remove_instances(self, instances): """Remove specified instance from the scene. - This is only removing `id` parameter so instance is no longer - instance, - because it might contain valuable data for artist. + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. - """ + """ for instance in instances: instance_node = hou.node(instance.data.get("instance_node")) to_delete = None diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py index be7ef714ba..2f3d1ef708 100644 --- a/openpype/hosts/houdini/plugins/create/convert_legacy.py +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -1,9 +1,22 @@ # -*- coding: utf-8 -*- +"""Convertor for legacy Houdini subsets.""" from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin from openpype.hosts.houdini.api.lib import imprint class HoudiniLegacyConvertor(SubsetConvertorPlugin): + """Find and convert any legacy subsets in the scene. + + This Convertor will find all legacy subsets in the scene and will + transform them to the current system. Since the old subsets doesn't + retain any information about their original creators, the only mapping + we can do is based on their families. + + Its limitation is that you can have multiple creators creating subset + of the same family and there is no way to handle it. This code should + nevertheless cover all creators that came with OpenPype. + + """ identifier = "io.openpype.creators.houdini.legacy" family_to_id = { "camera": "io.openpype.creators.houdini.camera", @@ -23,6 +36,15 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): self.legacy_subsets = {} def find_instances(self): + """Find legacy subsets in the scene. + + Legacy subsets are the ones that doesn't have `creator_identifier` + parameter on them. + + This is using cached entries done in + :py:meth:`~HoudiniCreatorBase.cache_subsets()` + + """ self.legacy_subsets = self.collection_shared_data.get( "houdini_cached_legacy_subsets") if not self.legacy_subsets: @@ -32,6 +54,11 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): ) def convert(self): + """Convert all legacy subsets to current. + + It is enough to add `creator_identifier` and `instance_node`. + + """ if not self.legacy_subsets: return From 4be13d4324cbf7efc9128cb613f4fe3456e1416e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 22:55:09 +0200 Subject: [PATCH 1804/2550] :recycle: switch print for log --- openpype/hosts/houdini/plugins/create/convert_legacy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py index 2f3d1ef708..4b8041b4f5 100644 --- a/openpype/hosts/houdini/plugins/create/convert_legacy.py +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -69,6 +69,6 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): "creator_identifier": self.family_to_id[family], "instance_node": subset.path() } - print("Converting {} to {}".format( + self.log.info("Converting {} to {}".format( subset.path(), self.family_to_id[family])) imprint(subset, data) From 9b74287bb2d746da8f128a707d7e80785888d571 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 26 Oct 2022 03:58:16 +0000 Subject: [PATCH 1805/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index b1e4227030..bf36fc4b10 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.5" +__version__ = "3.14.6-nightly.1" From 82ded66bd620eccaa2951d92e7de961aac121c4e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 11:37:53 +0200 Subject: [PATCH 1806/2550] add thumbnal if there are not reviewables --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 53c6e69ac0..2d06e2ab02 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -151,7 +151,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): first_thumbnail_component = None first_thumbnail_component_repre = None - if has_movie_review: + if not review_representations or has_movie_review: for repre in thumbnail_representations: repre_path = self._get_repre_path(instance, repre, False) if not repre_path: From 3eda8aa64bb2f975a5577344fc065aec5130e725 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 26 Oct 2022 11:46:52 +0200 Subject: [PATCH 1807/2550] add milestone worlfow to main --- .github/workflows/milestone_assign.yml | 28 ++++++++++++ .github/workflows/milestone_create.yml | 62 ++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) create mode 100644 .github/workflows/milestone_assign.yml create mode 100644 .github/workflows/milestone_create.yml diff --git a/.github/workflows/milestone_assign.yml b/.github/workflows/milestone_assign.yml new file mode 100644 index 0000000000..b41886816b --- /dev/null +++ b/.github/workflows/milestone_assign.yml @@ -0,0 +1,28 @@ +name: Milestone - assign to PRs + +on: + pull_request_target: + types: [opened, reopened, edited] + +jobs: + run_if_release: + if: startsWith(github.base_ref, 'release/') + runs-on: ubuntu-latest + steps: + - name: 'Assign Milestone [next-minor]' + if: github.event.pull_request.milestone == null + uses: zoispag/action-assign-milestone@v1 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + milestone: 'next-minor' + + run_if_develop: + if: ${{ github.base_ref == 'develop' }} + runs-on: ubuntu-latest + steps: + - name: 'Assign Milestone [next-patch]' + if: github.event.pull_request.milestone == null + uses: zoispag/action-assign-milestone@v1 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + milestone: 'next-patch' \ No newline at end of file diff --git a/.github/workflows/milestone_create.yml b/.github/workflows/milestone_create.yml new file mode 100644 index 0000000000..b56ca81dc1 --- /dev/null +++ b/.github/workflows/milestone_create.yml @@ -0,0 +1,62 @@ +name: Milestone - create default + +on: + milestone: + types: [closed, edited] + +jobs: + generate-next-patch: + runs-on: ubuntu-latest + steps: + - name: 'Get Milestones' + uses: "WyriHaximus/github-action-get-milestones@master" + id: milestones + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + + - run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number') + id: querymilestone + env: + MILESTONES: ${{ steps.milestones.outputs.milestones }} + MILESTONE: "next-patch" + + - name: Read output + run: | + echo "${{ steps.querymilestone.outputs.number }}" + + - name: 'Create `next-patch` milestone' + if: steps.querymilestone.outputs.number == '' + id: createmilestone + uses: "WyriHaximus/github-action-create-milestone@v1" + with: + title: 'next-patch' + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + + generate-next-minor: + runs-on: ubuntu-latest + steps: + - name: 'Get Milestones' + uses: "WyriHaximus/github-action-get-milestones@master" + id: milestones + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + + - run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number') + id: querymilestone + env: + MILESTONES: ${{ steps.milestones.outputs.milestones }} + MILESTONE: "next-minor" + + - name: Read output + run: | + echo "${{ steps.querymilestone.outputs.number }}" + + - name: 'Create `next-minor` milestone' + if: steps.querymilestone.outputs.number == '' + id: createmilestone + uses: "WyriHaximus/github-action-create-milestone@v1" + with: + title: 'next-minor' + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" \ No newline at end of file From 3ce9bd26ffea646dd3373af039f1bba8fd0c18fa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 26 Oct 2022 11:53:32 +0200 Subject: [PATCH 1808/2550] adding synchronized to workflow --- .github/workflows/milestone_assign.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/milestone_assign.yml b/.github/workflows/milestone_assign.yml index b41886816b..c5a231e59e 100644 --- a/.github/workflows/milestone_assign.yml +++ b/.github/workflows/milestone_assign.yml @@ -2,7 +2,7 @@ name: Milestone - assign to PRs on: pull_request_target: - types: [opened, reopened, edited] + types: [opened, reopened, edited, synchronize] jobs: run_if_release: From 3ae02cfb2b570070d5b987969d4aa1667bbacbfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 26 Oct 2022 13:51:03 +0200 Subject: [PATCH 1809/2550] :bug: handle missing directory --- igniter/bootstrap_repos.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index ccc9d4ac52..addcbed24c 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -815,6 +815,13 @@ class BootstrapRepos: except Exception as e: self._print(str(e), LOG_ERROR, exc_info=True) return None + if not destination_dir.exists(): + destination_dir.mkdir(parents=True) + elif not destination_dir.is_dir(): + self._print( + "Destination exists but is not directory.", LOG_ERROR) + return None + try: shutil.move(zip_file.as_posix(), destination_dir.as_posix()) except shutil.Error as e: From b6a2be53d4f1e7d2caf827ad4d6b45e366e9b1b0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 17:40:35 +0200 Subject: [PATCH 1810/2550] removed unused imports --- openpype/tools/publisher/widgets/create_widget.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index 910b2adfc7..c7d001e92e 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -1,11 +1,8 @@ -import sys import re -import traceback from Qt import QtWidgets, QtCore, QtGui from openpype.pipeline.create import ( - CreatorError, SUBSET_NAME_ALLOWED_SYMBOLS, TaskNotSetError, ) From 2a91415b42400d24f77dc2bff008d5e29e387114 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 19:01:34 +0200 Subject: [PATCH 1811/2550] Create widget has thumbnail --- .../tools/publisher/widgets/create_widget.py | 40 +++++++++++++++---- 1 file changed, 33 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index c7d001e92e..a8ca9af17d 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -10,6 +10,7 @@ from openpype.pipeline.create import ( from .widgets import ( IconValuePixmapLabel, CreateBtn, + ThumbnailWidget, ) from .assets_widget import CreateWidgetAssetsWidget from .tasks_widget import CreateWidgetTasksWidget @@ -23,11 +24,11 @@ from ..constants import ( SEPARATORS = ("---separator---", "---") -class VariantInputsWidget(QtWidgets.QWidget): +class ResizeControlWidget(QtWidgets.QWidget): resized = QtCore.Signal() def resizeEvent(self, event): - super(VariantInputsWidget, self).resizeEvent(event) + super(ResizeControlWidget, self).resizeEvent(event) self.resized.emit() @@ -150,13 +151,19 @@ class CreateWidget(QtWidgets.QWidget): # --- Creator attr defs --- creators_attrs_widget = QtWidgets.QWidget(creators_splitter) + # Top part - variant / subset name + thumbnail + creators_attrs_top = QtWidgets.QWidget(creators_attrs_widget) + + # Basics - variant / subset name + creator_basics_widget = ResizeControlWidget(creators_attrs_top) + variant_subset_label = QtWidgets.QLabel( - "Create options", creators_attrs_widget + "Create options", creator_basics_widget ) - variant_subset_widget = QtWidgets.QWidget(creators_attrs_widget) + variant_subset_widget = QtWidgets.QWidget(creator_basics_widget) # Variant and subset input - variant_widget = VariantInputsWidget(creators_attrs_widget) + variant_widget = ResizeControlWidget(variant_subset_widget) variant_input = QtWidgets.QLineEdit(variant_widget) variant_input.setObjectName("VariantInput") @@ -183,6 +190,18 @@ class CreateWidget(QtWidgets.QWidget): variant_subset_layout.addRow("Variant", variant_widget) variant_subset_layout.addRow("Subset", subset_name_input) + creator_basics_layout = QtWidgets.QVBoxLayout(creator_basics_widget) + creator_basics_layout.setContentsMargins(0, 0, 0, 0) + creator_basics_layout.addWidget(variant_subset_label, 0) + creator_basics_layout.addWidget(variant_subset_widget, 0) + + thumbnail_widget = ThumbnailWidget(creators_attrs_top) + + creators_attrs_top_layout = QtWidgets.QHBoxLayout(creators_attrs_top) + creators_attrs_top_layout.setContentsMargins(0, 0, 0, 0) + creators_attrs_top_layout.addWidget(creator_basics_widget, 1) + creators_attrs_top_layout.addWidget(thumbnail_widget, 0) + # Precreate attributes widget pre_create_widget = PreCreateWidget(creators_attrs_widget) @@ -198,8 +217,7 @@ class CreateWidget(QtWidgets.QWidget): creators_attrs_layout = QtWidgets.QVBoxLayout(creators_attrs_widget) creators_attrs_layout.setContentsMargins(0, 0, 0, 0) - creators_attrs_layout.addWidget(variant_subset_label, 0) - creators_attrs_layout.addWidget(variant_subset_widget, 0) + creators_attrs_layout.addWidget(creators_attrs_top, 0) creators_attrs_layout.addWidget(pre_create_widget, 1) creators_attrs_layout.addWidget(create_btn_wrapper, 0) @@ -237,6 +255,7 @@ class CreateWidget(QtWidgets.QWidget): create_btn.clicked.connect(self._on_create) variant_widget.resized.connect(self._on_variant_widget_resize) + creator_basics_widget.resized.connect(self._on_creator_basics_resize) variant_input.returnPressed.connect(self._on_create) variant_input.textChanged.connect(self._on_variant_change) creators_view.selectionModel().currentChanged.connect( @@ -275,6 +294,8 @@ class CreateWidget(QtWidgets.QWidget): self._create_btn = create_btn self._creator_short_desc_widget = creator_short_desc_widget + self._creator_basics_widget = creator_basics_widget + self._thumbnail_widget = thumbnail_widget self._pre_create_widget = pre_create_widget self._attr_separator_widget = attr_separator_widget @@ -681,6 +702,11 @@ class CreateWidget(QtWidgets.QWidget): self._first_show = False self._on_first_show() + def _on_creator_basics_resize(self): + self._thumbnail_widget.set_height( + self._creator_basics_widget.sizeHint().height() + ) + def _on_create(self): indexes = self._creators_view.selectedIndexes() if not indexes or len(indexes) > 1: From 2afc5315778a300e118d8acbb9f63581c934147d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 19:03:42 +0200 Subject: [PATCH 1812/2550] modified thumbnail to paint the content on own --- openpype/tools/publisher/widgets/widgets.py | 169 +++++++++++++++++--- 1 file changed, 149 insertions(+), 20 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d4c2623790..23ddeee2de 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -16,6 +16,7 @@ from openpype.tools.utils import ( PixmapLabel, BaseClickableFrame, set_style_property, + paint_image_with_color, ) from openpype.style import get_objected_colors from openpype.pipeline.create import ( @@ -26,6 +27,7 @@ from .assets_widget import AssetsDialog from .tasks_widget import TasksModel from .icons import ( get_pixmap, + get_image, get_icon_path ) @@ -1622,33 +1624,160 @@ class SubsetAttributesWidget(QtWidgets.QWidget): class ThumbnailWidget(QtWidgets.QWidget): - """Instance thumbnail widget. + """Instance thumbnail widget.""" + + width_ratio = 3.0 + height_ratio = 2.0 + border_width = 1 + offset_sep = 4 - Logic implementation of this widget is missing but widget is used - to offset `GlobalAttrsWidget` inputs visually. - """ def __init__(self, parent): - super(ThumbnailWidget, self).__init__(parent) - # Missing implementation for thumbnail # - widget kept to make a visial offset of global attr widget offset - # default_pix = get_pixmap("thumbnail") - default_pix = QtGui.QPixmap(10, 10) - default_pix.fill(QtCore.Qt.transparent) + super(ThumbnailWidget, self).__init__(parent) - thumbnail_label = QtWidgets.QLabel(self) - thumbnail_label.setPixmap( - default_pix.scaled( - 200, 100, + # TODO remove hardcoded colors + border_color = QtGui.QColor(67, 74, 86) + thumbnail_bg_color = QtGui.QColor(54, 61, 72) + + default_image = get_image("thumbnail") + default_pix = paint_image_with_color(default_image, border_color) + + self.border_color = border_color + self.thumbnail_bg_color = thumbnail_bg_color + self._default_pix = default_pix + self._current_pixes = None + self._cached_pix = None + self._height = None + self._width = None + + def set_width(self, width): + if self._width == width: + return + + self._width = width + self._cached_pix = None + self.setMinimumHeight(int( + (width / self.width_ratio) * self.height_ratio + )) + if self._height is not None: + self.setMinimumWidth(0) + + def set_height(self, height): + if self._height == height: + return + + self._height = height + self._cached_pix = None + self.setMinimumWidth(int( + (height / self.height_ratio) * self.width_ratio + )) + if self._width is not None: + self.setMinimumHeight(0) + + def _get_current_pixes(self): + if self._current_pixes is None: + return [self._default_pix] + return self._current_pixes + + def _cache_pix(self): + rect = self.rect() + rect_width = rect.width() + rect_height = rect.height() + + pix_x_offset = 0 + pix_y_offset = 0 + expected_height = int( + (rect_width / self.width_ratio) * self.height_ratio + ) + if expected_height > rect_height: + expected_height = rect_height + expected_width = int( + (rect_height / self.height_ratio) * self.width_ratio + ) + pix_x_offset = (rect_width - expected_width) / 2 + else: + expected_width = rect_width + pix_y_offset = (rect_height - expected_height) / 2 + + pixes_to_draw = self._get_current_pixes() + max_pix = 3 + if len(pixes_to_draw) > max_pix: + pixes_to_draw = pixes_to_draw[:-max_pix] + pixes_len = len(pixes_to_draw) + + width_offset, height_offset = self._get_pix_offset_size( + expected_width, expected_height, pixes_len + ) + pix_width = expected_width - width_offset + pix_height = expected_height - height_offset + full_border_width = 2 * self.border_width + + pix_bg_brush = QtGui.QBrush(self.thumbnail_bg_color) + + pix_pen = QtGui.QPen() + pix_pen.setWidth(self.border_width) + pix_pen.setColor(self.border_color) + + backgrounded_images = [] + for src_pix in pixes_to_draw: + scaled_pix = src_pix.scaled( + pix_width - full_border_width, + pix_height - full_border_width, QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation ) - ) + pos_x = int( + (pix_width - scaled_pix.width()) / 2 + ) + self.border_width + pos_y = int( + (pix_height - scaled_pix.height()) / 2 + ) + self.border_width - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(thumbnail_label, alignment=QtCore.Qt.AlignCenter) + new_pix = QtGui.QPixmap(pix_width, pix_height) + pix_painter = QtGui.QPainter() + pix_painter.begin(new_pix) + pix_painter.setBrush(pix_bg_brush) + pix_painter.setPen(pix_pen) + pix_painter.drawRect(0, 0, pix_width - 1, pix_height - 1) + pix_painter.drawPixmap(pos_x, pos_y, scaled_pix) + pix_painter.end() + backgrounded_images.append(new_pix) - self.thumbnail_label = thumbnail_label - self.default_pix = default_pix - self.current_pix = None + if pixes_len == 1: + width_offset_part = 0 + height_offset_part = 0 + else: + width_offset_part = int(float(width_offset) / (pixes_len - 1)) + height_offset_part = int(float(height_offset) / (pixes_len - 1)) + full_width_offset = width_offset + pix_x_offset + + final_pix = QtGui.QPixmap(rect_width, rect_height) + final_pix.fill(QtCore.Qt.transparent) + + final_painter = QtGui.QPainter() + final_painter.begin(final_pix) + for idx, pix in enumerate(backgrounded_images): + x_offset = full_width_offset - (width_offset_part * idx) + y_offset = (height_offset_part * idx) + pix_y_offset + final_painter.drawPixmap(x_offset, y_offset, pix) + final_painter.end() + + self._cached_pix = final_pix + + def _get_pix_offset_size(self, width, height, image_count): + if image_count == 1: + return 0, 0 + + part_width = width / self.offset_sep + part_height = height / self.offset_sep + return part_width, part_height + + def paintEvent(self, event): + if self._cached_pix is None: + self._cache_pix() + + painter = QtGui.QPainter() + painter.begin(self) + painter.drawPixmap(0, 0, self._cached_pix) + painter.end() From 89edb5cb9bfad4bc5fddfa04334e5e6444c0a1b1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 19:04:16 +0200 Subject: [PATCH 1813/2550] use private attribute --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 23ddeee2de..58a023d5f4 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1570,7 +1570,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): self.creator_attrs_widget = creator_attrs_widget self.publish_attrs_widget = publish_attrs_widget - self.thumbnail_widget = thumbnail_widget + self._thumbnail_widget = thumbnail_widget self.top_bottom = top_bottom self.bottom_separator = bottom_separator From b6a2b51dad47efce305719f26caa112b3e42b7f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 26 Oct 2022 19:20:24 +0200 Subject: [PATCH 1814/2550] thumbnail widget can adapt to size changes --- openpype/tools/publisher/widgets/widgets.py | 38 +++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 58a023d5f4..95ba321a63 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1650,11 +1650,25 @@ class ThumbnailWidget(QtWidgets.QWidget): self._cached_pix = None self._height = None self._width = None + self._adapted_to_size = True + self._last_width = None + self._last_height = None + + def set_adapted_to_hint(self, enabled): + self._adapted_to_size = enabled + if self._width is not None: + self.setMinimumHeight(0) + self._width = None + + if self._height is not None: + self.setMinimumWidth(0) + self._height = None def set_width(self, width): if self._width == width: return + self._adapted_to_size = False self._width = width self._cached_pix = None self.setMinimumHeight(int( @@ -1662,18 +1676,21 @@ class ThumbnailWidget(QtWidgets.QWidget): )) if self._height is not None: self.setMinimumWidth(0) + self._height = None def set_height(self, height): if self._height == height: return self._height = height + self._adapted_to_size = False self._cached_pix = None self.setMinimumWidth(int( (height / self.height_ratio) * self.width_ratio )) if self._width is not None: self.setMinimumHeight(0) + self._width = None def _get_current_pixes(self): if self._current_pixes is None: @@ -1781,3 +1798,24 @@ class ThumbnailWidget(QtWidgets.QWidget): painter.begin(self) painter.drawPixmap(0, 0, self._cached_pix) painter.end() + + def _adapt_to_size(self): + if not self._adapted_to_size: + return + + width = self.width() + height = self.height() + if width == self._last_width and height == self._last_height: + return + + self._last_width = width + self._last_height = height + self._cached_pix = None + + def resizeEvent(self, event): + super(ThumbnailWidget, self).resizeEvent(event) + self._adapt_to_size() + + def showEvent(self, event): + super(ThumbnailWidget, self).showEvent(event) + self._adapt_to_size() From 5d1fa90fccc1bd261c2af61eb639d677f372c363 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 27 Oct 2022 12:40:44 +0200 Subject: [PATCH 1815/2550] :recycle: soft fail when applying preset --- .../maya/plugins/publish/extract_playblast.py | 2 +- openpype/vendor/python/common/capture.py | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 1b5b8d34e4..b19d24fad7 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -133,7 +133,7 @@ class ExtractPlayblast(publish.Extractor): preset.update(panel_preset) cmds.setFocus(panel) - path = capture.capture(**preset) + path = capture.capture(log=self.log, **preset) self.log.debug("playblast path {}".format(path)) diff --git a/openpype/vendor/python/common/capture.py b/openpype/vendor/python/common/capture.py index 86c1c60e56..09a42d84d1 100644 --- a/openpype/vendor/python/common/capture.py +++ b/openpype/vendor/python/common/capture.py @@ -7,6 +7,7 @@ Playblasting with independent viewport, camera and display options import re import sys import contextlib +import logging from maya import cmds from maya import mel @@ -21,6 +22,7 @@ version_info = (2, 3, 0) __version__ = "%s.%s.%s" % version_info __license__ = "MIT" +logger = logging.getLogger("capture") def capture(camera=None, @@ -46,7 +48,8 @@ def capture(camera=None, display_options=None, viewport_options=None, viewport2_options=None, - complete_filename=None): + complete_filename=None, + log=None): """Playblast in an independent panel Arguments: @@ -91,6 +94,7 @@ def capture(camera=None, options, using `Viewport2Options` complete_filename (str, optional): Exact name of output file. Use this to override the output of `filename` so it excludes frame padding. + log (logger, optional): pass logger for logging messages. Example: >>> # Launch default capture @@ -109,7 +113,9 @@ def capture(camera=None, """ - + global logger + if log: + logger = log camera = camera or "persp" # Ensure camera exists @@ -736,7 +742,10 @@ def _applied_viewport_options(options, panel): plugin_options[plugin] = options.pop(plugin) # default options - cmds.modelEditor(panel, edit=True, **options) + try: + cmds.modelEditor(panel, edit=True, **options) + except TypeError as e: + logger.error("Cannot apply options {}".format(e)) # plugin display filter options for plugin, state in plugin_options.items(): From cc4d158c785d8a413076830aa556445d9292b234 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 15:12:51 +0200 Subject: [PATCH 1816/2550] moved thumbnail widget to separated file --- .../tools/publisher/widgets/create_widget.py | 2 +- .../publisher/widgets/thumbnail_widget.py | 312 ++++++++++++++++++ openpype/tools/publisher/widgets/widgets.py | 201 +---------- 3 files changed, 314 insertions(+), 201 deletions(-) create mode 100644 openpype/tools/publisher/widgets/thumbnail_widget.py diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index a8ca9af17d..7695101ad1 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -7,10 +7,10 @@ from openpype.pipeline.create import ( TaskNotSetError, ) +from .thumbnail_widget import ThumbnailWidget from .widgets import ( IconValuePixmapLabel, CreateBtn, - ThumbnailWidget, ) from .assets_widget import CreateWidgetAssetsWidget from .tasks_widget import CreateWidgetTasksWidget diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py new file mode 100644 index 0000000000..29bb6fb62f --- /dev/null +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -0,0 +1,312 @@ +import os +import tempfile +import uuid +from Qt import QtWidgets, QtCore, QtGui + +from openpype.lib import ( + run_subprocess, + is_oiio_supported, + get_oiio_tools_path, + get_ffmpeg_tool_path, +) +from openpype.lib.transcoding import ( + IMAGE_EXTENSIONS, + VIDEO_EXTENSIONS, +) + +from openpype.tools.utils import ( + paint_image_with_color, +) +from .icons import get_image + + +class ThumbnailWidget(QtWidgets.QWidget): + """Instance thumbnail widget.""" + + thumbnail_created = QtCore.Signal(str) + + width_ratio = 3.0 + height_ratio = 2.0 + border_width = 1 + offset_sep = 4 + + def __init__(self, parent): + # Missing implementation for thumbnail + # - widget kept to make a visial offset of global attr widget offset + super(ThumbnailWidget, self).__init__(parent) + self.setAcceptDrops(True) + + # TODO remove hardcoded colors + border_color = QtGui.QColor(67, 74, 86) + thumbnail_bg_color = QtGui.QColor(54, 61, 72) + + default_image = get_image("thumbnail") + default_pix = paint_image_with_color(default_image, border_color) + + self.border_color = border_color + self.thumbnail_bg_color = thumbnail_bg_color + self._default_pix = default_pix + self._current_pixes = None + self._cached_pix = None + self._height = None + self._width = None + self._adapted_to_size = True + self._last_width = None + self._last_height = None + self._review_extensions = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) + + def _get_filepath_from_event(self, event): + mime_data = event.mimeData() + if not mime_data.hasUrls(): + return None + + filepaths = [] + for url in mime_data.urls(): + filepath = url.toLocalFile() + if os.path.exists(filepath): + filepaths.append(filepath) + + if len(filepaths) == 1: + filepath = filepaths[0] + ext = os.path.splitext(filepath)[-1] + if ext in self._review_extensions: + return filepath + return None + + def dragEnterEvent(self, event): + filepath = self._get_filepath_from_event(event) + if filepath: + event.setDropAction(QtCore.Qt.CopyAction) + event.accept() + + def dragLeaveEvent(self, event): + event.accept() + + def dropEvent(self, event): + filepath = self._get_filepath_from_event(event) + if filepath: + output = export_thumbnail(filepath) + if output: + self.thumbnail_created.emit(output) + + def set_adapted_to_hint(self, enabled): + self._adapted_to_size = enabled + if self._width is not None: + self.setMinimumHeight(0) + self._width = None + + if self._height is not None: + self.setMinimumWidth(0) + self._height = None + + def set_width(self, width): + if self._width == width: + return + + self._adapted_to_size = False + self._width = width + self._cached_pix = None + self.setMinimumHeight(int( + (width / self.width_ratio) * self.height_ratio + )) + if self._height is not None: + self.setMinimumWidth(0) + self._height = None + + def set_height(self, height): + if self._height == height: + return + + self._height = height + self._adapted_to_size = False + self._cached_pix = None + self.setMinimumWidth(int( + (height / self.height_ratio) * self.width_ratio + )) + if self._width is not None: + self.setMinimumHeight(0) + self._width = None + + def _get_current_pixes(self): + if self._current_pixes is None: + return [self._default_pix] + return self._current_pixes + + def _cache_pix(self): + rect = self.rect() + rect_width = rect.width() + rect_height = rect.height() + + pix_x_offset = 0 + pix_y_offset = 0 + expected_height = int( + (rect_width / self.width_ratio) * self.height_ratio + ) + if expected_height > rect_height: + expected_height = rect_height + expected_width = int( + (rect_height / self.height_ratio) * self.width_ratio + ) + pix_x_offset = (rect_width - expected_width) / 2 + else: + expected_width = rect_width + pix_y_offset = (rect_height - expected_height) / 2 + + pixes_to_draw = self._get_current_pixes() + max_pix = 3 + if len(pixes_to_draw) > max_pix: + pixes_to_draw = pixes_to_draw[:-max_pix] + pixes_len = len(pixes_to_draw) + + width_offset, height_offset = self._get_pix_offset_size( + expected_width, expected_height, pixes_len + ) + pix_width = expected_width - width_offset + pix_height = expected_height - height_offset + full_border_width = 2 * self.border_width + + pix_bg_brush = QtGui.QBrush(self.thumbnail_bg_color) + + pix_pen = QtGui.QPen() + pix_pen.setWidth(self.border_width) + pix_pen.setColor(self.border_color) + + backgrounded_images = [] + for src_pix in pixes_to_draw: + scaled_pix = src_pix.scaled( + pix_width - full_border_width, + pix_height - full_border_width, + QtCore.Qt.KeepAspectRatio, + QtCore.Qt.SmoothTransformation + ) + pos_x = int( + (pix_width - scaled_pix.width()) / 2 + ) + self.border_width + pos_y = int( + (pix_height - scaled_pix.height()) / 2 + ) + self.border_width + + new_pix = QtGui.QPixmap(pix_width, pix_height) + pix_painter = QtGui.QPainter() + pix_painter.begin(new_pix) + pix_painter.setBrush(pix_bg_brush) + pix_painter.setPen(pix_pen) + pix_painter.drawRect(0, 0, pix_width - 1, pix_height - 1) + pix_painter.drawPixmap(pos_x, pos_y, scaled_pix) + pix_painter.end() + backgrounded_images.append(new_pix) + + if pixes_len == 1: + width_offset_part = 0 + height_offset_part = 0 + else: + width_offset_part = int(float(width_offset) / (pixes_len - 1)) + height_offset_part = int(float(height_offset) / (pixes_len - 1)) + full_width_offset = width_offset + pix_x_offset + + final_pix = QtGui.QPixmap(rect_width, rect_height) + final_pix.fill(QtCore.Qt.transparent) + + final_painter = QtGui.QPainter() + final_painter.begin(final_pix) + for idx, pix in enumerate(backgrounded_images): + x_offset = full_width_offset - (width_offset_part * idx) + y_offset = (height_offset_part * idx) + pix_y_offset + final_painter.drawPixmap(x_offset, y_offset, pix) + final_painter.end() + + self._cached_pix = final_pix + + def _get_pix_offset_size(self, width, height, image_count): + if image_count == 1: + return 0, 0 + + part_width = width / self.offset_sep + part_height = height / self.offset_sep + return part_width, part_height + + def paintEvent(self, event): + if self._cached_pix is None: + self._cache_pix() + + painter = QtGui.QPainter() + painter.begin(self) + painter.drawPixmap(0, 0, self._cached_pix) + painter.end() + + def _adapt_to_size(self): + if not self._adapted_to_size: + return + + width = self.width() + height = self.height() + if width == self._last_width and height == self._last_height: + return + + self._last_width = width + self._last_height = height + self._cached_pix = None + + def resizeEvent(self, event): + super(ThumbnailWidget, self).resizeEvent(event) + self._adapt_to_size() + + def showEvent(self, event): + super(ThumbnailWidget, self).showEvent(event) + self._adapt_to_size() + + +def _run_silent_subprocess(args): + with open(os.devnull, "w") as devnull: + run_subprocess(args, stdout=devnull, stderr=devnull) + + +def _convert_thumbnail_oiio(src_path, dst_path): + if not is_oiio_supported(): + return None + + oiio_cmd = [ + get_oiio_tools_path(), + "-i", src_path, + "--subimage", "0", + "-o", dst_path + ] + try: + _run_silent_subprocess(oiio_cmd) + except Exception: + return None + return dst_path + + +def _convert_thumbnail_ffmpeg(src_path, dst_path): + ffmpeg_cmd = [ + get_ffmpeg_tool_path(), + "-y", + "-i", src_path, + dst_path + ] + try: + _run_silent_subprocess(ffmpeg_cmd) + except Exception: + return None + return dst_path + + +def export_thumbnail(src_path): + root_dir = os.path.join( + tempfile.gettempdir(), + "publisher_thumbnails" + ) + if not os.path.exists(root_dir): + os.makedirs(root_dir) + + ext = os.path.splitext(src_path)[-1] + if ext not in (".jpeg", ".jpg", ".png"): + ext = ".jpeg" + filename = str(uuid.uuid4()) + ext + dst_path = os.path.join(root_dir, filename) + + output_path = _convert_thumbnail_oiio(src_path, dst_path) + if not output_path: + output_path = _convert_thumbnail_ffmpeg(src_path, dst_path) + return output_path diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 95ba321a63..290f69f280 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -16,18 +16,17 @@ from openpype.tools.utils import ( PixmapLabel, BaseClickableFrame, set_style_property, - paint_image_with_color, ) from openpype.style import get_objected_colors from openpype.pipeline.create import ( SUBSET_NAME_ALLOWED_SYMBOLS, TaskNotSetError, ) +from .thumbnail_widget import ThumbnailWidget from .assets_widget import AssetsDialog from .tasks_widget import TasksModel from .icons import ( get_pixmap, - get_image, get_icon_path ) @@ -1621,201 +1620,3 @@ class SubsetAttributesWidget(QtWidgets.QWidget): ) self.creator_attrs_widget.set_instances_valid(all_valid) self.publish_attrs_widget.set_instances_valid(all_valid) - - -class ThumbnailWidget(QtWidgets.QWidget): - """Instance thumbnail widget.""" - - width_ratio = 3.0 - height_ratio = 2.0 - border_width = 1 - offset_sep = 4 - - def __init__(self, parent): - # Missing implementation for thumbnail - # - widget kept to make a visial offset of global attr widget offset - super(ThumbnailWidget, self).__init__(parent) - - # TODO remove hardcoded colors - border_color = QtGui.QColor(67, 74, 86) - thumbnail_bg_color = QtGui.QColor(54, 61, 72) - - default_image = get_image("thumbnail") - default_pix = paint_image_with_color(default_image, border_color) - - self.border_color = border_color - self.thumbnail_bg_color = thumbnail_bg_color - self._default_pix = default_pix - self._current_pixes = None - self._cached_pix = None - self._height = None - self._width = None - self._adapted_to_size = True - self._last_width = None - self._last_height = None - - def set_adapted_to_hint(self, enabled): - self._adapted_to_size = enabled - if self._width is not None: - self.setMinimumHeight(0) - self._width = None - - if self._height is not None: - self.setMinimumWidth(0) - self._height = None - - def set_width(self, width): - if self._width == width: - return - - self._adapted_to_size = False - self._width = width - self._cached_pix = None - self.setMinimumHeight(int( - (width / self.width_ratio) * self.height_ratio - )) - if self._height is not None: - self.setMinimumWidth(0) - self._height = None - - def set_height(self, height): - if self._height == height: - return - - self._height = height - self._adapted_to_size = False - self._cached_pix = None - self.setMinimumWidth(int( - (height / self.height_ratio) * self.width_ratio - )) - if self._width is not None: - self.setMinimumHeight(0) - self._width = None - - def _get_current_pixes(self): - if self._current_pixes is None: - return [self._default_pix] - return self._current_pixes - - def _cache_pix(self): - rect = self.rect() - rect_width = rect.width() - rect_height = rect.height() - - pix_x_offset = 0 - pix_y_offset = 0 - expected_height = int( - (rect_width / self.width_ratio) * self.height_ratio - ) - if expected_height > rect_height: - expected_height = rect_height - expected_width = int( - (rect_height / self.height_ratio) * self.width_ratio - ) - pix_x_offset = (rect_width - expected_width) / 2 - else: - expected_width = rect_width - pix_y_offset = (rect_height - expected_height) / 2 - - pixes_to_draw = self._get_current_pixes() - max_pix = 3 - if len(pixes_to_draw) > max_pix: - pixes_to_draw = pixes_to_draw[:-max_pix] - pixes_len = len(pixes_to_draw) - - width_offset, height_offset = self._get_pix_offset_size( - expected_width, expected_height, pixes_len - ) - pix_width = expected_width - width_offset - pix_height = expected_height - height_offset - full_border_width = 2 * self.border_width - - pix_bg_brush = QtGui.QBrush(self.thumbnail_bg_color) - - pix_pen = QtGui.QPen() - pix_pen.setWidth(self.border_width) - pix_pen.setColor(self.border_color) - - backgrounded_images = [] - for src_pix in pixes_to_draw: - scaled_pix = src_pix.scaled( - pix_width - full_border_width, - pix_height - full_border_width, - QtCore.Qt.KeepAspectRatio, - QtCore.Qt.SmoothTransformation - ) - pos_x = int( - (pix_width - scaled_pix.width()) / 2 - ) + self.border_width - pos_y = int( - (pix_height - scaled_pix.height()) / 2 - ) + self.border_width - - new_pix = QtGui.QPixmap(pix_width, pix_height) - pix_painter = QtGui.QPainter() - pix_painter.begin(new_pix) - pix_painter.setBrush(pix_bg_brush) - pix_painter.setPen(pix_pen) - pix_painter.drawRect(0, 0, pix_width - 1, pix_height - 1) - pix_painter.drawPixmap(pos_x, pos_y, scaled_pix) - pix_painter.end() - backgrounded_images.append(new_pix) - - if pixes_len == 1: - width_offset_part = 0 - height_offset_part = 0 - else: - width_offset_part = int(float(width_offset) / (pixes_len - 1)) - height_offset_part = int(float(height_offset) / (pixes_len - 1)) - full_width_offset = width_offset + pix_x_offset - - final_pix = QtGui.QPixmap(rect_width, rect_height) - final_pix.fill(QtCore.Qt.transparent) - - final_painter = QtGui.QPainter() - final_painter.begin(final_pix) - for idx, pix in enumerate(backgrounded_images): - x_offset = full_width_offset - (width_offset_part * idx) - y_offset = (height_offset_part * idx) + pix_y_offset - final_painter.drawPixmap(x_offset, y_offset, pix) - final_painter.end() - - self._cached_pix = final_pix - - def _get_pix_offset_size(self, width, height, image_count): - if image_count == 1: - return 0, 0 - - part_width = width / self.offset_sep - part_height = height / self.offset_sep - return part_width, part_height - - def paintEvent(self, event): - if self._cached_pix is None: - self._cache_pix() - - painter = QtGui.QPainter() - painter.begin(self) - painter.drawPixmap(0, 0, self._cached_pix) - painter.end() - - def _adapt_to_size(self): - if not self._adapted_to_size: - return - - width = self.width() - height = self.height() - if width == self._last_width and height == self._last_height: - return - - self._last_width = width - self._last_height = height - self._cached_pix = None - - def resizeEvent(self, event): - super(ThumbnailWidget, self).resizeEvent(event) - self._adapt_to_size() - - def showEvent(self, event): - super(ThumbnailWidget, self).showEvent(event) - self._adapt_to_size() From c4432bf6ea127cf6bf113b6ae67d98c16cdc7b24 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 15:13:02 +0200 Subject: [PATCH 1817/2550] fix variant input style --- openpype/style/style.css | 4 ++-- openpype/tools/publisher/widgets/create_widget.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 9919973b06..585adceb26 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -911,11 +911,11 @@ PublisherTabBtn[active="1"]:hover { #PublishLogConsole { font-family: "Noto Sans Mono"; } -VariantInputsWidget QLineEdit { +#VariantInputsWidget QLineEdit { border-bottom-right-radius: 0px; border-top-right-radius: 0px; } -VariantInputsWidget QToolButton { +#VariantInputsWidget QToolButton { border-bottom-left-radius: 0px; border-top-left-radius: 0px; padding-top: 0.5em; diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index 7695101ad1..d47c2a07e0 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -164,6 +164,7 @@ class CreateWidget(QtWidgets.QWidget): variant_subset_widget = QtWidgets.QWidget(creator_basics_widget) # Variant and subset input variant_widget = ResizeControlWidget(variant_subset_widget) + variant_widget.setObjectName("VariantInputsWidget") variant_input = QtWidgets.QLineEdit(variant_widget) variant_input.setObjectName("VariantInput") From bd5121b17ba468c35c466fae2908b65aca94ba4a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 15:13:32 +0200 Subject: [PATCH 1818/2550] traypublisher has REVIEW_EXTENSIONS as set --- openpype/hosts/traypublisher/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 555041d389..f6dcce800d 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -14,7 +14,7 @@ from .pipeline import ( from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS -REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS +REVIEW_EXTENSIONS = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) def _cache_and_get_instances(creator): From c4fdf28d345170b2c972d922b4bde771f7de4e64 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 15:24:57 +0200 Subject: [PATCH 1819/2550] nuke: fixing loader hash convertor --- openpype/hosts/nuke/plugins/load/load_clip.py | 41 ++++++++++--------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 654ea367c8..aa5b1dfed1 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -1,3 +1,4 @@ +import os import nuke import qargparse @@ -84,6 +85,16 @@ class LoadClip(plugin.NukeLoader): + plugin.get_review_presets_config() ) + def _fix_path_for_knob(self, filepath, repre_cont): + basename = os.path.basename(filepath) + dirname = os.path.dirname(filepath) + frame = repre_cont.get("frame") + assert frame, "Representation is not sequence" + + padding = len(frame) + basename = basename.replace(frame, "#" * padding) + return os.path.join(dirname, basename).replace("\\", "/") + def load(self, context, name, namespace, options): repre = context["representation"] # reste container id so it is always unique for each instance @@ -91,7 +102,7 @@ class LoadClip(plugin.NukeLoader): is_sequence = len(repre["files"]) > 1 - file = self.fname.replace("\\", "/") + filepath = self.fname.replace("\\", "/") start_at_workfile = options.get( "start_at_workfile", self.options_defaults["start_at_workfile"]) @@ -121,18 +132,14 @@ class LoadClip(plugin.NukeLoader): duration = last - first first = 1 last = first + duration - elif "#" not in file: - frame = repre_cont.get("frame") - assert frame, "Representation is not sequence" - - padding = len(frame) - file = file.replace(frame, "#" * padding) + elif "#" not in filepath: + filepath = self._fix_path_for_knob(filepath, repre_cont) # Fallback to asset name when namespace is None if namespace is None: namespace = context['asset']['name'] - if not file: + if not filepath: self.log.warning( "Representation id `{}` is failing to load".format(repre_id)) return @@ -147,7 +154,7 @@ class LoadClip(plugin.NukeLoader): # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): - read_node["file"].setValue(file) + read_node["file"].setValue(filepath) used_colorspace = self._set_colorspace( read_node, version_data, repre["data"]) @@ -218,7 +225,7 @@ class LoadClip(plugin.NukeLoader): is_sequence = len(representation["files"]) > 1 read_node = nuke.toNode(container['objectName']) - file = get_representation_path(representation).replace("\\", "/") + filepath = get_representation_path(representation).replace("\\", "/") start_at_workfile = "start at" in read_node['frame_mode'].value() @@ -251,14 +258,10 @@ class LoadClip(plugin.NukeLoader): duration = last - first first = 1 last = first + duration - elif "#" not in file: - frame = repre_cont.get("frame") - assert frame, "Representation is not sequence" + elif "#" not in filepath: + filepath = self._fix_path_for_knob(filepath, repre_cont) - padding = len(frame) - file = file.replace(frame, "#" * padding) - - if not file: + if not filepath: self.log.warning( "Representation id `{}` is failing to load".format(repre_id)) return @@ -266,14 +269,14 @@ class LoadClip(plugin.NukeLoader): read_name = self._get_node_name(representation) read_node["name"].setValue(read_name) - read_node["file"].setValue(file) + read_node["file"].setValue(filepath) # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): used_colorspace = self._set_colorspace( read_node, version_data, representation["data"], - path=file) + path=filepath) self._set_range_to_node(read_node, first, last, start_at_workfile) From 733e3be8c469afc0b00782853bbfb17a6ccd324c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 27 Oct 2022 16:15:49 +0200 Subject: [PATCH 1820/2550] :recycle: optimize calls, fix representation dirname --- .../hosts/houdini/plugins/load/load_ass.py | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/houdini/plugins/load/load_ass.py b/openpype/hosts/houdini/plugins/load/load_ass.py index 57e2d34d7c..daabd1e405 100644 --- a/openpype/hosts/houdini/plugins/load/load_ass.py +++ b/openpype/hosts/houdini/plugins/load/load_ass.py @@ -1,11 +1,10 @@ import os - import re + from openpype.pipeline import ( load, get_representation_path, ) - from openpype.hosts.houdini.api import pipeline @@ -20,7 +19,6 @@ class AssLoader(load.LoaderPlugin): color = "orange" def load(self, context, name=None, namespace=None, data=None): - import hou # Get the root node @@ -35,8 +33,7 @@ class AssLoader(load.LoaderPlugin): procedural.setParms( { - "ar_filename": self.format_path( - self.fname, context["representation"]) + "ar_filename": self.format_path(context["representation"]) }) nodes = [procedural] @@ -52,10 +49,8 @@ class AssLoader(load.LoaderPlugin): ) def update(self, container, representation): - # Update the file path - file_path = get_representation_path(representation) - file_path = self.format_path(file_path, representation) + file_path = self.format_path(representation) procedural = container["node"] procedural.setParms({"ar_filename": file_path}) @@ -64,26 +59,31 @@ class AssLoader(load.LoaderPlugin): procedural.setParms({"representation": str(representation["_id"])}) def remove(self, container): - node = container["node"] node.destroy() @staticmethod - def format_path(path, representation): - """Format file path correctly for single bgeo or bgeo sequence.""" + def format_path(representation): + """Format file path correctly for single ass.* or ass.* sequence. + + Args: + representation (dict): representation to be loaded. + + Returns: + str: Formatted path to be used by the input node. + + """ + path = get_representation_path(representation) if not os.path.exists(path): - raise RuntimeError("Path does not exist: %s" % path) + raise RuntimeError("Path does not exist: {}".format(path)) is_sequence = bool(representation["context"].get("frame")) # The path is either a single file or sequence in a folder. - if not is_sequence: - filename = path - else: - filename = re.sub(r"(.*)\.(\d+)\.(ass.*)", "\\1.$F4.\\3", path) + if is_sequence: + dir_path, file_name = os.path.split(path) + path = os.path.join( + dir_path, + re.sub(r"(.*)\.(\d+)\.(ass.*)", "\\1.$F4.\\3", file_name) + ) - filename = os.path.join(path, filename) - - filename = os.path.normpath(filename) - filename = filename.replace("\\", "/") - - return filename + return os.path.normpath(path).replace("\\", "/") From 1b79f3162be3e7e827b0c6e95ac2412c0e26aa95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 27 Oct 2022 16:17:32 +0200 Subject: [PATCH 1821/2550] :recycle: unify the calls --- openpype/hosts/houdini/plugins/load/load_ass.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/load/load_ass.py b/openpype/hosts/houdini/plugins/load/load_ass.py index daabd1e405..710cd09c23 100644 --- a/openpype/hosts/houdini/plugins/load/load_ass.py +++ b/openpype/hosts/houdini/plugins/load/load_ass.py @@ -50,10 +50,8 @@ class AssLoader(load.LoaderPlugin): def update(self, container, representation): # Update the file path - file_path = self.format_path(representation) - procedural = container["node"] - procedural.setParms({"ar_filename": file_path}) + procedural.setParms({"ar_filename": self.format_path(representation)}) # Update attribute procedural.setParms({"representation": str(representation["_id"])}) From e13e59c6dc2bd9288d48f119aacb94893d46fa75 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 27 Oct 2022 17:10:01 +0200 Subject: [PATCH 1822/2550] hiero: fix effect collection --- .../hiero/plugins/publish/precollect_instances.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 84f2927fc7..1fc4b1f696 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -326,8 +326,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): return hiero_export.create_otio_time_range( frame_start, frame_duration, fps) - @staticmethod - def collect_sub_track_items(tracks): + def collect_sub_track_items(self, tracks): """ Returns dictionary with track index as key and list of subtracks """ @@ -336,8 +335,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): for track in tracks: items = track.items() + effet_items = track.subTrackItems() + # skip if no clips on track > need track with effect only - if items: + if not effet_items: continue # skip all disabled tracks @@ -345,10 +346,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin): continue track_index = track.trackIndex() - _sub_track_items = phiero.flatten(track.subTrackItems()) + _sub_track_items = phiero.flatten(effet_items) + _sub_track_items = list(_sub_track_items) # continue only if any subtrack items are collected - if not list(_sub_track_items): + if not _sub_track_items: continue enabled_sti = [] From 48c4c238f55bad7ae773abcf0c13d818180f0512 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:38:28 +0200 Subject: [PATCH 1823/2550] added helper function to get fake process id --- openpype/pipeline/__init__.py | 1 + openpype/pipeline/context_tools.py | 17 +++++++++++++++++ openpype/pipeline/workfile/lock_workfile.py | 14 +++----------- 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 2cf785d981..f5319c5a48 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -85,6 +85,7 @@ from .context_tools import ( register_host, registered_host, deregister_host, + get_process_id, ) install = install_host uninstall = uninstall_host diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index af0ee79f47..0ec19d50fe 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -5,6 +5,7 @@ import json import types import logging import platform +import uuid import pyblish.api from pyblish.lib import MessageHandler @@ -37,6 +38,7 @@ from . import ( _is_installed = False +_process_id = None _registered_root = {"_": ""} _registered_host = {"_": None} # Keep modules manager (and it's modules) in memory @@ -546,3 +548,18 @@ def change_current_context(asset_doc, task_name, template_key=None): emit_event("taskChanged", data) return changes + + +def get_process_id(): + """Fake process id created on demand using uuid. + + Can be used to create process specific folders in temp directory. + + Returns: + str: Process id. + """ + + global _process_id + if _process_id is None: + _process_id = str(uuid.uuid4()) + return _process_id diff --git a/openpype/pipeline/workfile/lock_workfile.py b/openpype/pipeline/workfile/lock_workfile.py index fbec44247a..579840c07d 100644 --- a/openpype/pipeline/workfile/lock_workfile.py +++ b/openpype/pipeline/workfile/lock_workfile.py @@ -1,9 +1,9 @@ import os import json -from uuid import uuid4 from openpype.lib import Logger, filter_profiles from openpype.lib.pype_info import get_workstation_info from openpype.settings import get_project_settings +from openpype.pipeline import get_process_id def _read_lock_file(lock_filepath): @@ -37,7 +37,7 @@ def is_workfile_locked_for_current_process(filepath): lock_filepath = _get_lock_file(filepath) data = _read_lock_file(lock_filepath) - return data["process_id"] == _get_process_id() + return data["process_id"] == get_process_id() def delete_workfile_lock(filepath): @@ -49,7 +49,7 @@ def delete_workfile_lock(filepath): def create_workfile_lock(filepath): lock_filepath = _get_lock_file(filepath) info = get_workstation_info() - info["process_id"] = _get_process_id() + info["process_id"] = get_process_id() with open(lock_filepath, "w") as stream: json.dump(info, stream) @@ -59,14 +59,6 @@ def remove_workfile_lock(filepath): delete_workfile_lock(filepath) -def _get_process_id(): - process_id = os.environ.get("OPENPYPE_PROCESS_ID") - if not process_id: - process_id = str(uuid4()) - os.environ["OPENPYPE_PROCESS_ID"] = process_id - return process_id - - def is_workfile_lock_enabled(host_name, project_name, project_setting=None): if project_setting is None: project_setting = get_project_settings(project_name) From 8cf23ec864a3bffca1f264b2bf0709862edc2807 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:40:18 +0200 Subject: [PATCH 1824/2550] create context can store thumbnails --- openpype/pipeline/create/context.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 52a1729233..71338f96e0 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1077,6 +1077,8 @@ class CreateContext: # Shared data across creators during collection phase self._collection_shared_data = None + self.thumbnail_paths_by_instance_id = {} + # Trigger reset if was enabled if reset: self.reset(discover_publish_plugins) @@ -1146,11 +1148,37 @@ class CreateContext: self.reset_finalization() + def refresh_thumbnails(self): + """Cleanup thumbnail paths. + + Remove all thumbnail filepaths that are empty or lead to files which + does not exists or of instances that are not available anymore. + """ + + invalid = set() + for instance_id, path in self.thumbnail_paths_by_instance_id.items(): + instance_available = True + if instance_id is not None: + instance_available = ( + instance_id not in self._instances_by_id + ) + + if ( + not instance_available + or not path + or not os.path.exists(path) + ): + invalid.add(instance_id) + + for instance_id in invalid: + self.thumbnail_paths_by_instance_id.pop(instance_id) + def reset_preparation(self): """Prepare attributes that must be prepared/cleaned before reset.""" # Give ability to store shared data for collection phase self._collection_shared_data = {} + self.refresh_thumbnails() def reset_finalization(self): """Cleanup of attributes after reset.""" From d71a1f8d524e34a47058110590cea38c7a4659a9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:41:11 +0200 Subject: [PATCH 1825/2550] creators can set thumbnail path and allow to pass thumbnail in precreation part --- openpype/pipeline/create/__init__.py | 2 ++ openpype/pipeline/create/constants.py | 2 ++ openpype/pipeline/create/creator_plugins.py | 11 +++++++++++ 3 files changed, 15 insertions(+) diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index 4b91951a08..89b876e6de 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -1,6 +1,7 @@ from .constants import ( SUBSET_NAME_ALLOWED_SYMBOLS, DEFAULT_SUBSET_TEMPLATE, + PRE_CREATE_THUMBNAIL_KEY, ) from .subset_name import ( @@ -40,6 +41,7 @@ from .legacy_create import ( __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", "DEFAULT_SUBSET_TEMPLATE", + "PRE_CREATE_THUMBNAIL_KEY", "TaskNotSetError", "get_subset_name", diff --git a/openpype/pipeline/create/constants.py b/openpype/pipeline/create/constants.py index 3af9651947..375cfc4a12 100644 --- a/openpype/pipeline/create/constants.py +++ b/openpype/pipeline/create/constants.py @@ -1,8 +1,10 @@ SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_." DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}" +PRE_CREATE_THUMBNAIL_KEY = "thumbnail_source" __all__ = ( "SUBSET_NAME_ALLOWED_SYMBOLS", "DEFAULT_SUBSET_TEMPLATE", + "PRE_CREATE_THUMBNAIL_KEY", ) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index c69abb8861..1e8423e48b 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -442,6 +442,13 @@ class BaseCreator: return self.create_context.collection_shared_data + def set_instance_thumbnail_path(self, instance_id, thumbnail_path=None): + """Set path to thumbnail for instance.""" + + self.create_context.thumbnail_paths_by_instance_id[instance_id] = ( + thumbnail_path + ) + class Creator(BaseCreator): """Creator that has more information for artist to show in UI. @@ -468,6 +475,10 @@ class Creator(BaseCreator): # - in some cases it may confuse artists because it would not be used # e.g. for buld creators create_allow_context_change = True + # A thumbnail can be passed in precreate attributes + # - if is set to True is should expect that a thumbnail path under key + # PRE_CREATE_THUMBNAIL_KEY can be sent in data with precreate data + create_allow_thumbnail = False # Precreate attribute definitions showed before creation # - similar to instance attribute definitions From f5c73f5a948ea52fbfe18d0a38f27d5506f7ad4e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:41:53 +0200 Subject: [PATCH 1826/2550] create context collector also adds thumbnail source to instances --- .../plugins/publish/collect_movie_batch.py | 3 ++- .../publish/collect_simple_instances.py | 3 ++- .../plugins/publish/extract_thumbnail.py | 13 +++++---- .../publish/collect_from_create_context.py | 27 ++++++++++++++++--- 4 files changed, 36 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py index 3d93e2c927..5f8b2878b7 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_movie_batch.py @@ -40,7 +40,8 @@ class CollectMovieBatch( if creator_attributes["add_review_family"]: repre["tags"].append("review") instance.data["families"].append("review") - instance.data["thumbnailSource"] = file_url + if not instance.data.get("thumbnailSource"): + instance.data["thumbnailSource"] = file_url instance.data["source"] = file_url diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py index 7035a61d7b..183195a515 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -188,7 +188,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): if "review" not in instance.data["families"]: instance.data["families"].append("review") - instance.data["thumbnailSource"] = first_filepath + if not instance.data.get("thumbnailSource"): + instance.data["thumbnailSource"] = first_filepath review_representation["tags"].append("review") self.log.debug("Representation {} was marked for review. {}".format( diff --git a/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py index 7781bb7b3e..96aefe0043 100644 --- a/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py @@ -42,7 +42,15 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): "Processing instance with subset name {}".format(subset_name) ) + # Check if already has thumbnail created + if self._already_has_thumbnail(instance): + self.log.info("Thumbnail representation already present.") + return + thumbnail_source = instance.data.get("thumbnailSource") + if not thumbnail_source: + thumbnail_source = instance.context.data.get("thumbnailSource") + if not thumbnail_source: self.log.debug("Thumbnail source not filled. Skipping.") return @@ -53,11 +61,6 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): thumbnail_source)) return - # Check if already has thumbnail created - if self._already_has_thumbnail(instance): - self.log.info("Thumbnail representation already present.") - return - # Create temp directory for thumbnail # - this is to avoid "override" of source file dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index fc0f97b187..ddb6908a4c 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -19,14 +19,28 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): if not create_context: return + thumbnail_paths_by_instance_id = ( + create_context.thumbnail_paths_by_instance_id + ) + context.data["thumbnailSource"] = ( + thumbnail_paths_by_instance_id.get(None) + ) + project_name = create_context.project_name if project_name: context.data["projectName"] = project_name + for created_instance in create_context.instances: instance_data = created_instance.data_to_store() if instance_data["active"]: + thumbnail_path = thumbnail_paths_by_instance_id.get( + created_instance.id + ) self.create_instance( - context, instance_data, created_instance.transient_data + context, + instance_data, + created_instance.transient_data, + thumbnail_path ) # Update global data to context @@ -39,7 +53,13 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): legacy_io.Session[key] = value os.environ[key] = value - def create_instance(self, context, in_data, transient_data): + def create_instance( + self, + context, + in_data, + transient_data, + thumbnail_path + ): subset = in_data["subset"] # If instance data already contain families then use it instance_families = in_data.get("families") or [] @@ -53,7 +73,8 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): "name": subset, "family": in_data["family"], "families": instance_families, - "representations": [] + "representations": [], + "thumbnailSource": thumbnail_path }) for key, value in in_data.items(): if key not in instance.data: From 52c83227a42e6e8a37fb831e494933a4cb0e6ee1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:42:26 +0200 Subject: [PATCH 1827/2550] enhanced caching of instance data in tray publisher --- openpype/hosts/traypublisher/api/plugin.py | 34 ++++++++++++---------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index f6dcce800d..0b62492477 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,3 +1,5 @@ +import collections + from openpype.lib.attribute_definitions import FileDef from openpype.pipeline.create import ( Creator, @@ -29,7 +31,11 @@ def _cache_and_get_instances(creator): shared_key = "openpype.traypublisher.instances" if shared_key not in creator.collection_shared_data: - creator.collection_shared_data[shared_key] = list_instances() + instances_by_creator_id = collections.defaultdict(list) + for instance_data in list_instances(): + creator_id = instance_data.get("creator_identifier") + instances_by_creator_id[creator_id].append(instance_data) + creator.collection_shared_data[shared_key] = instances_by_creator_id return creator.collection_shared_data[shared_key] @@ -37,13 +43,12 @@ class HiddenTrayPublishCreator(HiddenCreator): host_name = "traypublisher" def collect_instances(self): - for instance_data in _cache_and_get_instances(self): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) + instance_data_by_identifier = _cache_and_get_instances(self) + for instance_data in instance_data_by_identifier[self.identifier]: + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) def update_instances(self, update_list): update_instances(update_list) @@ -74,13 +79,12 @@ class TrayPublishCreator(Creator): host_name = "traypublisher" def collect_instances(self): - for instance_data in _cache_and_get_instances(self): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) + instance_data_by_identifier = _cache_and_get_instances(self) + for instance_data in instance_data_by_identifier[self.identifier]: + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) def update_instances(self, update_list): update_instances(update_list) From 3489a71b08d10478541b2e831f622eae81846620 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:42:43 +0200 Subject: [PATCH 1828/2550] settings creator allows thumbnail in precreation --- openpype/hosts/traypublisher/api/plugin.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 0b62492477..40877968e9 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -4,7 +4,8 @@ from openpype.lib.attribute_definitions import FileDef from openpype.pipeline.create import ( Creator, HiddenCreator, - CreatedInstance + CreatedInstance, + PRE_CREATE_THUMBNAIL_KEY, ) from .pipeline import ( @@ -114,11 +115,14 @@ class TrayPublishCreator(Creator): class SettingsCreator(TrayPublishCreator): create_allow_context_change = True + create_allow_thumbnail = True extensions = [] def create(self, subset_name, data, pre_create_data): # Pass precreate data to creator attributes + thumbnail_path = pre_create_data.pop(PRE_CREATE_THUMBNAIL_KEY, None) + data["creator_attributes"] = pre_create_data data["settings_creator"] = True # Create new instance @@ -126,6 +130,9 @@ class SettingsCreator(TrayPublishCreator): self._store_new_instance(new_instance) + if thumbnail_path: + self.set_instance_thumbnail_path(new_instance.id, thumbnail_path) + def get_instance_attr_defs(self): return [ FileDef( From 9c048478bb3870a1d50eb4e669fd6436218925a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:44:34 +0200 Subject: [PATCH 1829/2550] added small comment --- openpype/pipeline/create/creator_plugins.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 1e8423e48b..ef92b7ccc4 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -478,6 +478,9 @@ class Creator(BaseCreator): # A thumbnail can be passed in precreate attributes # - if is set to True is should expect that a thumbnail path under key # PRE_CREATE_THUMBNAIL_KEY can be sent in data with precreate data + # - is disabled by default because the feature was added in later stages + # and creators who would not expect PRE_CREATE_THUMBNAIL_KEY could + # cause issues with instance data create_allow_thumbnail = False # Precreate attribute definitions showed before creation From 90222b1b3fa6beddaf67f0989aa86a20074b6300 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:46:09 +0200 Subject: [PATCH 1830/2550] publisher has temp dir for thumbnails which is cleared up on publisher close --- openpype/tools/publisher/control.py | 39 +++++++++++++++++++++++++++++ openpype/tools/publisher/window.py | 1 + 2 files changed, 40 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index e05cffe20e..7a2f2bbb82 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -4,6 +4,8 @@ import logging import traceback import collections import uuid +import tempfile +import shutil from abc import ABCMeta, abstractmethod, abstractproperty import six @@ -24,6 +26,7 @@ from openpype.pipeline import ( KnownPublishError, registered_host, legacy_io, + get_process_id, ) from openpype.pipeline.create import ( CreateContext, @@ -1283,6 +1286,22 @@ class AbstractPublisherController(object): pass + @abstractmethod + def get_thumbnail_temp_dir_path(self): + """Return path to directory where thumbnails can be temporary stored. + + Returns: + str: Path to a directory. + """ + + pass + + @abstractmethod + def clear_thumbnail_temp_dir_path(self): + """Remove content of thumbnail temp directory.""" + + pass + class BasePublisherController(AbstractPublisherController): """Implement common logic for controllers. @@ -1523,6 +1542,26 @@ class BasePublisherController(AbstractPublisherController): return creator_item.icon return None + def get_thumbnail_temp_dir_path(self): + """Return path to directory where thumbnails can be temporary stored. + + Returns: + str: Path to a directory. + """ + + return os.path.join( + tempfile.gettempdir(), + "publisher_thumbnails", + get_process_id() + ) + + def clear_thumbnail_temp_dir_path(self): + """Remove content of thumbnail temp directory.""" + + dirpath = self.get_thumbnail_temp_dir_path() + if os.path.exists(dirpath): + shutil.rmtree(dirpath) + class PublisherController(BasePublisherController): """Middleware between UI, CreateContext and publish Context. diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index a3387043b8..77d4339052 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -383,6 +383,7 @@ class PublisherWindow(QtWidgets.QDialog): def closeEvent(self, event): self.save_changes() self._reset_on_show = True + self._controller.clear_thumbnail_temp_dir_path() super(PublisherWindow, self).closeEvent(event) def save_changes(self): From 7c09494ad0d04a377d93e35ada9399370ba0a45a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:46:22 +0200 Subject: [PATCH 1831/2550] implemented getter and setters for thumbnails --- openpype/tools/publisher/control.py | 31 +++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 7a2f2bbb82..eb6425b820 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1259,6 +1259,14 @@ class AbstractPublisherController(object): def trigger_convertor_items(self, convertor_identifiers): pass + @abstractmethod + def get_thumbnail_paths_for_instances(self, instance_ids): + pass + + @abstractmethod + def set_thumbnail_paths_for_instances(self, thumbnail_path_mapping): + pass + @abstractmethod def set_comment(self, comment): """Set comment on pyblish context. @@ -1817,6 +1825,29 @@ class PublisherController(BasePublisherController): self._on_create_instance_change() + def get_thumbnail_paths_for_instances(self, instance_ids): + thumbnail_paths_by_instance_id = ( + self._create_context.thumbnail_paths_by_instance_id + ) + return { + instance_id: thumbnail_paths_by_instance_id.get(instance_id) + for instance_id in instance_ids + } + + def set_thumbnail_paths_for_instances(self, thumbnail_path_mapping): + thumbnail_paths_by_instance_id = ( + self._create_context.thumbnail_paths_by_instance_id + ) + for instance_id, thumbnail_path in thumbnail_path_mapping.items(): + thumbnail_paths_by_instance_id[instance_id] = thumbnail_path + + self._emit_event( + "instance.thumbnail.changed", + { + "mapping": thumbnail_path_mapping + } + ) + def emit_card_message( self, message, message_type=CardMessageTypes.standard ): From 82aea56768145fc5978a45cf65f3c2f2a26e1684 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:46:35 +0200 Subject: [PATCH 1832/2550] added forgotter abstract methods --- openpype/tools/publisher/control.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index eb6425b820..b113c9316a 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -1118,11 +1118,13 @@ class AbstractPublisherController(object): pass + @abstractmethod def save_changes(self): """Save changes in create context.""" pass + @abstractmethod def remove_instances(self, instance_ids): """Remove list of instances from create context.""" # TODO expect instance ids From 334ec3310fd586b0361b4237f58c509f833b4af9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:46:58 +0200 Subject: [PATCH 1833/2550] added potential implementation of remote qt publisher controller --- openpype/tools/publisher/control_qt.py | 46 ++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/openpype/tools/publisher/control_qt.py b/openpype/tools/publisher/control_qt.py index 56132a4046..8b5856f234 100644 --- a/openpype/tools/publisher/control_qt.py +++ b/openpype/tools/publisher/control_qt.py @@ -115,6 +115,11 @@ class QtRemotePublishController(BasePublisherController): super().__init__(*args, **kwargs) self._created_instances = {} + self._thumbnail_paths_by_instance_id = None + + def _reset_attributes(self): + super()._reset_attributes() + self._thumbnail_paths_by_instance_id = None @abstractmethod def _get_serialized_instances(self): @@ -180,6 +185,11 @@ class QtRemotePublishController(BasePublisherController): self.host_is_valid = event["value"] return + # Don't skip because UI want know about it too + if event.topic == "instance.thumbnail.changed": + for instance_id, path in event["mapping"].items(): + self.thumbnail_paths_by_instance_id[instance_id] = path + # Topics that can be just passed by because are not affecting # controller itself # - "show.card.message" @@ -256,6 +266,42 @@ class QtRemotePublishController(BasePublisherController): def get_existing_subset_names(self, asset_name): pass + @property + def thumbnail_paths_by_instance_id(self): + if self._thumbnail_paths_by_instance_id is None: + self._thumbnail_paths_by_instance_id = ( + self._collect_thumbnail_paths_by_instance_id() + ) + return self._thumbnail_paths_by_instance_id + + def get_thumbnail_path_for_instance(self, instance_id): + return self.thumbnail_paths_by_instance_id.get(instance_id) + + def set_thumbnail_path_for_instance(self, instance_id, thumbnail_path): + self._set_thumbnail_path_on_context(self, instance_id, thumbnail_path) + + @abstractmethod + def _collect_thumbnail_paths_by_instance_id(self): + """Collect thumbnail paths by instance id in remote controller. + + These should be collected from 'CreatedContext' there. + + Returns: + Dict[str, str]: Mapping of thumbnail path by instance id. + """ + + pass + + @abstractmethod + def _set_thumbnail_path_on_context(self, instance_id, thumbnail_path): + """Send change of thumbnail path in remote controller. + + That should trigger event 'instance.thumbnail.changed' which is + captured and handled in default implementation in this class. + """ + + pass + @abstractmethod def get_subset_name( self, From 7a21dc8812c4c7720d2e12d67a9b27761c0f39bb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:47:30 +0200 Subject: [PATCH 1834/2550] thumbnail widget is using potential of controller --- .../tools/publisher/widgets/create_widget.py | 2 +- .../publisher/widgets/thumbnail_widget.py | 43 +++++++++++++------ openpype/tools/publisher/widgets/widgets.py | 2 +- 3 files changed, 32 insertions(+), 15 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index d47c2a07e0..fc35cd31cd 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -196,7 +196,7 @@ class CreateWidget(QtWidgets.QWidget): creator_basics_layout.addWidget(variant_subset_label, 0) creator_basics_layout.addWidget(variant_subset_widget, 0) - thumbnail_widget = ThumbnailWidget(creators_attrs_top) + thumbnail_widget = ThumbnailWidget(controller, creators_attrs_top) creators_attrs_top_layout = QtWidgets.QHBoxLayout(creators_attrs_top) creators_attrs_top_layout.setContentsMargins(0, 0, 0, 0) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 29bb6fb62f..c93b555d5b 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -1,5 +1,4 @@ import os -import tempfile import uuid from Qt import QtWidgets, QtCore, QtGui @@ -17,6 +16,8 @@ from openpype.lib.transcoding import ( from openpype.tools.utils import ( paint_image_with_color, ) +from openpype.tools.publisher.control import CardMessageTypes + from .icons import get_image @@ -30,7 +31,7 @@ class ThumbnailWidget(QtWidgets.QWidget): border_width = 1 offset_sep = 4 - def __init__(self, parent): + def __init__(self, controller, parent): # Missing implementation for thumbnail # - widget kept to make a visial offset of global attr widget offset super(ThumbnailWidget, self).__init__(parent) @@ -55,6 +56,9 @@ class ThumbnailWidget(QtWidgets.QWidget): self._last_height = None self._review_extensions = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) + self._controller = controller + self._output_dir = controller.get_thumbnail_temp_dir_path() + def _get_filepath_from_event(self, event): mime_data = event.mimeData() if not mime_data.hasUrls(): @@ -84,10 +88,17 @@ class ThumbnailWidget(QtWidgets.QWidget): def dropEvent(self, event): filepath = self._get_filepath_from_event(event) - if filepath: - output = export_thumbnail(filepath) - if output: - self.thumbnail_created.emit(output) + if not filepath: + return + + output = export_thumbnail(filepath, self._output_dir) + if output: + self.thumbnail_created.emit(output) + else: + self._controller.emit_card_message( + "Couldn't convert the source for thumbnail", + CardMessageTypes.error + ) def set_adapted_to_hint(self, enabled): self._adapted_to_size = enabled @@ -127,6 +138,16 @@ class ThumbnailWidget(QtWidgets.QWidget): self.setMinimumHeight(0) self._width = None + def set_current_thumbnails(self, thumbnail_paths=None): + pixes = [] + if thumbnail_paths: + for thumbnail_path in thumbnail_paths: + pixes.append(QtGui.QPixmap(thumbnail_path)) + + self._current_pixes = pixes or None + self._cached_pix = None + self.repaint() + def _get_current_pixes(self): if self._current_pixes is None: return [self._default_pix] @@ -181,10 +202,10 @@ class ThumbnailWidget(QtWidgets.QWidget): ) pos_x = int( (pix_width - scaled_pix.width()) / 2 - ) + self.border_width + ) pos_y = int( (pix_height - scaled_pix.height()) / 2 - ) + self.border_width + ) new_pix = QtGui.QPixmap(pix_width, pix_height) pix_painter = QtGui.QPainter() @@ -292,11 +313,7 @@ def _convert_thumbnail_ffmpeg(src_path, dst_path): return dst_path -def export_thumbnail(src_path): - root_dir = os.path.join( - tempfile.gettempdir(), - "publisher_thumbnails" - ) +def export_thumbnail(src_path, root_dir): if not os.path.exists(root_dir): os.makedirs(root_dir) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 290f69f280..ae32e5f42d 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1472,7 +1472,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): # Global attributes global_attrs_widget = GlobalAttrsWidget(controller, top_widget) - thumbnail_widget = ThumbnailWidget(top_widget) + thumbnail_widget = ThumbnailWidget(controller, top_widget) top_layout = QtWidgets.QHBoxLayout(top_widget) top_layout.setContentsMargins(0, 0, 0, 0) From 25d8139df229eb2c1f30d1219764cd2ff1cb9643 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:48:02 +0200 Subject: [PATCH 1835/2550] creator adds thumbnail to creators create --- .../tools/publisher/widgets/create_widget.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index fc35cd31cd..a610c405a4 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -1,9 +1,11 @@ +import os import re from Qt import QtWidgets, QtCore, QtGui from openpype.pipeline.create import ( SUBSET_NAME_ALLOWED_SYMBOLS, + PRE_CREATE_THUMBNAIL_KEY, TaskNotSetError, ) @@ -269,6 +271,7 @@ class CreateWidget(QtWidgets.QWidget): self._on_current_session_context_request ) tasks_widget.task_changed.connect(self._on_task_change) + thumbnail_widget.thumbnail_created.connect(self._on_thumbnail_create) controller.event_system.add_callback( "plugins.refresh.finished", self._on_plugins_refresh @@ -302,6 +305,7 @@ class CreateWidget(QtWidgets.QWidget): self._prereq_timer = prereq_timer self._first_show = True + self._last_thumbnail_path = None @property def current_asset_name(self): @@ -492,6 +496,14 @@ class CreateWidget(QtWidgets.QWidget): if self._context_change_is_enabled(): self._invalidate_prereq_deffered() + def _on_thumbnail_create(self, thumbnail_path): + last_path = self._last_thumbnail_path + if last_path and os.path.exists(last_path): + os.remove(last_path) + + self._last_thumbnail_path = thumbnail_path + self._thumbnail_widget.set_current_thumbnails([thumbnail_path]) + def _on_current_session_context_request(self): self._assets_widget.set_current_session_asset() task_name = self.current_task_name @@ -730,6 +742,8 @@ class CreateWidget(QtWidgets.QWidget): task_name = self._get_task_name() pre_create_data = self._pre_create_widget.current_value() + pre_create_data[PRE_CREATE_THUMBNAIL_KEY] = self._last_thumbnail_path + # Where to define these data? # - what data show be stored? instance_data = { @@ -749,3 +763,5 @@ class CreateWidget(QtWidgets.QWidget): if success: self._set_creator(self._selected_creator) self._controller.emit_card_message("Creation finished...") + self._last_thumbnail_path = None + self._thumbnail_widget.set_current_thumbnails() From e537d2d83c3249178bdd217776245ce73546210f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:48:23 +0200 Subject: [PATCH 1836/2550] handle thumbnail changes in subset widget --- openpype/tools/publisher/widgets/widgets.py | 65 ++++++++++++++++++++- 1 file changed, 64 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ae32e5f42d..1682e3e047 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -3,6 +3,8 @@ import os import re import copy import functools +import uuid +import shutil import collections from Qt import QtWidgets, QtCore, QtGui import qtawesome @@ -1064,6 +1066,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): def _on_submit(self): """Commit changes for selected instances.""" + variant_value = None asset_name = None task_name = None @@ -1132,6 +1135,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): def _on_cancel(self): """Cancel changes and set back to their irigin value.""" + self.variant_input.reset_to_origin() self.asset_value_widget.reset_to_origin() self.task_value_widget.reset_to_origin() @@ -1257,6 +1261,7 @@ class CreatorAttrsWidget(QtWidgets.QWidget): def set_instances_valid(self, valid): """Change valid state of current instances.""" + if ( self._content_widget is not None and self._content_widget.isEnabled() != valid @@ -1265,6 +1270,7 @@ class CreatorAttrsWidget(QtWidgets.QWidget): def set_current_instances(self, instances): """Set current instances for which are attribute definitions shown.""" + prev_content_widget = self._scroll_area.widget() if prev_content_widget: self._scroll_area.takeWidget() @@ -1354,6 +1360,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): families. Similar definitions are merged into one (different label does not count). """ + def __init__(self, controller, parent): super(PublishPluginAttrsWidget, self).__init__(parent) @@ -1387,6 +1394,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): def set_current_instances(self, instances, context_selected): """Set current instances for which are attribute definitions shown.""" + prev_content_widget = self._scroll_area.widget() if prev_content_widget: self._scroll_area.takeWidget() @@ -1560,6 +1568,11 @@ class SubsetAttributesWidget(QtWidgets.QWidget): self._on_instance_context_changed ) convert_btn.clicked.connect(self._on_convert_click) + thumbnail_widget.thumbnail_created.connect(self._on_thumbnail_create) + + controller.event_system.add_callback( + "instance.thumbnail.changed", self._on_thumbnail_changed + ) self._controller = controller @@ -1596,10 +1609,11 @@ class SubsetAttributesWidget(QtWidgets.QWidget): """Change currently selected items. Args: - instances(list): List of currently selected + instances(List[CreatedInstance]): List of currently selected instances. context_selected(bool): Is context selected. """ + all_valid = True for instance in instances: if not instance.has_valid_context: @@ -1620,3 +1634,52 @@ class SubsetAttributesWidget(QtWidgets.QWidget): ) self.creator_attrs_widget.set_instances_valid(all_valid) self.publish_attrs_widget.set_instances_valid(all_valid) + + self._update_thumbnails() + + def _on_thumbnail_create(self, path): + instance_ids = [ + instance.id + for instance in self._current_instances + ] + if self._context_selected: + instance_ids.append(None) + + if not instance_ids: + return + + mapping = {} + if len(instance_ids) == 1: + mapping[instance_ids[0]] = path + + else: + for instance_id in range(len(instance_ids)): + root = os.path.dirname(path) + ext = os.path.splitext(path)[-1] + dst_path = os.path.join(root, str(uuid.uuid4()) + ext) + shutil.copy(path, dst_path) + mapping[instance_id] = dst_path + + self._controller.set_thumbnail_paths_for_instances(mapping) + + def _on_thumbnail_changed(self, event): + self._update_thumbnails() + + def _update_thumbnails(self): + instance_ids = [ + instance.id + for instance in self._current_instances + ] + if self._context_selected: + instance_ids.append(None) + + mapping = self._controller.get_thumbnail_paths_for_instances( + instance_ids + ) + thumbnail_paths = [] + for instance_id in instance_ids: + path = mapping[instance_id] + if path: + thumbnail_paths.append(path) + + self._thumbnail_widget.set_current_thumbnails(thumbnail_paths) From eff9b5710e78b3c38948aab48d3d63cbec41964a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:56:31 +0200 Subject: [PATCH 1837/2550] CreateItem knows if support drop of thumbnails in create page --- openpype/tools/publisher/control.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index b113c9316a..10734a69f4 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -828,6 +828,7 @@ class CreatorItem: default_variant, default_variants, create_allow_context_change, + create_allow_thumbnail, pre_create_attributes_defs ): self.identifier = identifier @@ -841,6 +842,7 @@ class CreatorItem: self.default_variant = default_variant self.default_variants = default_variants self.create_allow_context_change = create_allow_context_change + self.create_allow_thumbnail = create_allow_thumbnail self.instance_attributes_defs = instance_attributes_defs self.pre_create_attributes_defs = pre_create_attributes_defs @@ -867,6 +869,7 @@ class CreatorItem: default_variants = None pre_create_attr_defs = None create_allow_context_change = None + create_allow_thumbnail = None if creator_type is CreatorTypes.artist: description = creator.get_description() detail_description = creator.get_detail_description() @@ -874,6 +877,7 @@ class CreatorItem: default_variants = creator.get_default_variants() pre_create_attr_defs = creator.get_pre_create_attr_defs() create_allow_context_change = creator.create_allow_context_change + create_allow_thumbnail = creator.create_allow_thumbnail identifier = creator.identifier return cls( @@ -889,6 +893,7 @@ class CreatorItem: default_variant, default_variants, create_allow_context_change, + create_allow_thumbnail, pre_create_attr_defs ) @@ -917,6 +922,7 @@ class CreatorItem: "default_variant": self.default_variant, "default_variants": self.default_variants, "create_allow_context_change": self.create_allow_context_change, + "create_allow_thumbnail": self.create_allow_thumbnail, "instance_attributes_defs": instance_attributes_defs, "pre_create_attributes_defs": pre_create_attributes_defs, } From ba849905c63fcef2aa630c5ab3a00be9b7f0ef17 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:56:51 +0200 Subject: [PATCH 1838/2550] thumbnail widdget can disable dropping --- .../publisher/widgets/thumbnail_widget.py | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index c93b555d5b..8c43602147 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -44,20 +44,25 @@ class ThumbnailWidget(QtWidgets.QWidget): default_image = get_image("thumbnail") default_pix = paint_image_with_color(default_image, border_color) + self._controller = controller + self._output_dir = controller.get_thumbnail_temp_dir_path() + self.border_color = border_color self.thumbnail_bg_color = thumbnail_bg_color self._default_pix = default_pix + + self._drop_enabled = True + self._current_pixes = None self._cached_pix = None + + self._review_extensions = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) + self._height = None self._width = None self._adapted_to_size = True self._last_width = None self._last_height = None - self._review_extensions = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) - - self._controller = controller - self._output_dir = controller.get_thumbnail_temp_dir_path() def _get_filepath_from_event(self, event): mime_data = event.mimeData() @@ -78,6 +83,10 @@ class ThumbnailWidget(QtWidgets.QWidget): return None def dragEnterEvent(self, event): + if not self._drop_enabled: + event.ignore() + return + filepath = self._get_filepath_from_event(event) if filepath: event.setDropAction(QtCore.Qt.CopyAction) @@ -87,6 +96,9 @@ class ThumbnailWidget(QtWidgets.QWidget): event.accept() def dropEvent(self, event): + if not self._drop_enabled: + return + filepath = self._get_filepath_from_event(event) if not filepath: return @@ -100,6 +112,13 @@ class ThumbnailWidget(QtWidgets.QWidget): CardMessageTypes.error ) + def set_drop_enabled(self, enabled): + if self._drop_enabled is enabled: + return + self._drop_enabled = enabled + self._cached_pix = None + self.repaint() + def set_adapted_to_hint(self, enabled): self._adapted_to_size = enabled if self._width is not None: @@ -149,6 +168,10 @@ class ThumbnailWidget(QtWidgets.QWidget): self.repaint() def _get_current_pixes(self): + if not self._drop_enabled: + # TODO different image for disabled drop + return [self._default_pix] + if self._current_pixes is None: return [self._default_pix] return self._current_pixes From 40fbc3a7e21fbc5f34bdcf275dfd3c189bc70392 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 27 Oct 2022 18:57:07 +0200 Subject: [PATCH 1839/2550] create widget is handling enabled dropping of thumbnails --- openpype/tools/publisher/constants.py | 7 ++++--- .../tools/publisher/widgets/create_widget.py | 16 ++++++++++++++-- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index 8bea69c812..74337ea1ab 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -20,9 +20,10 @@ INSTANCE_ID_ROLE = QtCore.Qt.UserRole + 1 SORT_VALUE_ROLE = QtCore.Qt.UserRole + 2 IS_GROUP_ROLE = QtCore.Qt.UserRole + 3 CREATOR_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 4 -FAMILY_ROLE = QtCore.Qt.UserRole + 5 -GROUP_ROLE = QtCore.Qt.UserRole + 6 -CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 7 +CREATOR_THUMBNAIL_ENABLED_ROLE = QtCore.Qt.UserRole + 5 +FAMILY_ROLE = QtCore.Qt.UserRole + 6 +GROUP_ROLE = QtCore.Qt.UserRole + 7 +CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 8 __all__ = ( diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index a610c405a4..f0db132d98 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -20,7 +20,8 @@ from .precreate_widget import PreCreateWidget from ..constants import ( VARIANT_TOOLTIP, CREATOR_IDENTIFIER_ROLE, - FAMILY_ROLE + FAMILY_ROLE, + CREATOR_THUMBNAIL_ENABLED_ROLE, ) SEPARATORS = ("---separator---", "---") @@ -457,6 +458,10 @@ class CreateWidget(QtWidgets.QWidget): item.setData(creator_item.label, QtCore.Qt.DisplayRole) item.setData(identifier, CREATOR_IDENTIFIER_ROLE) + item.setData( + creator_item.create_allow_thumbnail, + CREATOR_THUMBNAIL_ENABLED_ROLE + ) item.setData(creator_item.family, FAMILY_ROLE) # Remove families that are no more available @@ -558,6 +563,10 @@ class CreateWidget(QtWidgets.QWidget): self._set_context_enabled(creator_item.create_allow_context_change) self._refresh_asset() + self._thumbnail_widget.set_drop_enabled( + creator_item.create_allow_thumbnail + ) + default_variants = creator_item.default_variants if not default_variants: default_variants = ["Main"] @@ -742,7 +751,10 @@ class CreateWidget(QtWidgets.QWidget): task_name = self._get_task_name() pre_create_data = self._pre_create_widget.current_value() - pre_create_data[PRE_CREATE_THUMBNAIL_KEY] = self._last_thumbnail_path + if index.data(CREATOR_THUMBNAIL_ENABLED_ROLE): + pre_create_data[PRE_CREATE_THUMBNAIL_KEY] = ( + self._last_thumbnail_path + ) # Where to define these data? # - what data show be stored? From 33416b4658a4df85dc1dd44cb2199bfed5615dbc Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Thu, 27 Oct 2022 13:59:35 -0700 Subject: [PATCH 1840/2550] Formatting and wording changes to admin_settings --- website/docs/admin_settings.md | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/website/docs/admin_settings.md b/website/docs/admin_settings.md index 9b00e6c612..d27ffe8d4c 100644 --- a/website/docs/admin_settings.md +++ b/website/docs/admin_settings.md @@ -11,7 +11,7 @@ OpenPype stores all of it's settings and configuration in the mongo database. To **Settings** GUI can be started from the tray menu Admin -> Studio Settings. -Please keep in mind that these settings are set-up for the full studio and not per-individual. If you're looking for individual artist settings, you can head to +Please keep in mind that these settings are set-up for the full studio and not per-individual. If you're looking for individual artist settings, you can head to [Local Settings](admin_settings_local.md) section in the artist documentation. ## Categories @@ -31,32 +31,32 @@ You'll find that settings are split into categories: System sections contains all settings that can be configured on a studio level, but cannot be changed on a per-project basis. These include mostly high level options like path to mongo database, toggling major modules on and off and configuring studio wide application -availability. +availability. ### Project -Project tab contains most of OpenPype settings and all of them can be configured and overridden on a per-project basis if need be. This includes most of the workflow behaviors -like what formats to export, naming conventions, publishing validations, automatic assets loaders and a lot more. +Project tab contains most of OpenPype settings and all of them can be configured and overridden on a per-project basis if need be. This includes most of the workflow behaviors +like what formats to export, naming conventions, publishing validations, automatic assets loaders and a lot more. We recommend to try to keep as many configurations as possible on a studio level and only override selectively, because micromanaging all of the project settings might become cumbersome down the line. Most of the settings can be safely adjusted and locked on a project after the production started. ## Understanding Overrides -Most of the individual settings can be set and overridden on multiple levels. +Most of the individual settings can be set and overridden on multiple levels. ### OpenPype defaults -When you first open settings all of the values and categories will be marked with a -light **grey labels** or a **grey vertical bar** on the left edge of the expandable category. +When you first open settings, all of the values and categories will be marked with either +light **grey labels** or a **grey vertical bar** on the left edge of the expandable category. -That means, the value has been left at OpenPype Default. If the default changes in future +The grey colouring signifies the value has been left at OpenPype Default. If the default changes in future OpenPype versions, these values will be reflect the change after you deploy the new version. ### Studio defaults Any values that you change and then press save in the bottom right corner, will be saved -as studio defaults. This means they will stay at those values even if you update your pype. -To make it clear which settings are set by you specifically, they are marked with a **green +as studio defaults. This means they will stay at those values even if you update your pype. +To make it clear which settings are set by you specifically, they are marked with a **green edge** and **green labels**, once set. To set studio default, just change the value in the system tab and press save. If you want @@ -76,10 +76,13 @@ You can also reset any settings to OpenPype default by doing `right click` and ` Many settings are useful to be adjusted on a per-project basis. To identify project overrides, they are marked with **orange edge** and **orange labels** in the settings GUI. -To set project overrides proceed the same way as with the Studio defaults, but first select -a particular project you want to be configuring on the left hand side of the Project Settings tab. +The process of settting project overrides is similar to setting the Studio defaults. The key difference is to select a particular project you want to be configure. Those projects can be found on the left hand side of the Project Settings tab. + +In the image below you can see all three overrides at the same time. +1. Deadline has **no changes to the OpenPype defaults** at all — **grey** colour of left bar. +2. Maya has **studio-wide defaults configured**, which are inherited in the particular project - **green** colour of left bar. +3. Nuke contains **project specific overrides** - **orange** colour of left bar. -Here you can see all three overrides at the same time. Deadline has not studio changes at all, Maya has some studio defaults configures and Nuke also contains project specific overrides. ![colours_01](assets/settings/colours_02.png) Override colours work as breadcrumbs to allow quick identification of what was changed and where. As you can see on this image, Orange colour is propagated up the hierarchy even though only a single value (sync render version with workfile toggle), was changed. From 2c0e59da6af61567487a49326aa9afda65b175dd Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Thu, 27 Oct 2022 14:02:06 -0700 Subject: [PATCH 1841/2550] Italicized menu path --- website/docs/admin_settings.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/admin_settings.md b/website/docs/admin_settings.md index d27ffe8d4c..8626ef16ba 100644 --- a/website/docs/admin_settings.md +++ b/website/docs/admin_settings.md @@ -9,7 +9,7 @@ import TabItem from '@theme/TabItem'; OpenPype stores all of it's settings and configuration in the mongo database. To make the configuration as easy as possible we provide a robust GUI where you can access and change everything that is configurable -**Settings** GUI can be started from the tray menu Admin -> Studio Settings. +**Settings** GUI can be started from the tray menu *Admin -> Studio Settings*. Please keep in mind that these settings are set-up for the full studio and not per-individual. If you're looking for individual artist settings, you can head to [Local Settings](admin_settings_local.md) section in the artist documentation. From 5a3856b5654d75553b8abfe6e989995e8a17dd6c Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Thu, 27 Oct 2022 14:06:34 -0700 Subject: [PATCH 1842/2550] Fixed spelling and wording --- website/docs/admin_settings_system.md | 42 +++++++++++++-------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index 66715e7288..92f522104a 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -19,11 +19,11 @@ Settings applicable to the full studio. **`Admin Password`** - After setting admin password, normal user won't have access to OpenPype settings and Project Manager GUI. Please keep in mind that this is a studio wide password and it is meant purely -as a naive barier to prevent artists from accidental setting changes. +as a simple barrier to prevent artists from accidental setting changes. **`Environment`** - Globally applied environment variables that will be appended to any OpenPype process in the studio. -**`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up. +**`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up. Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume). ### FFmpeg and OpenImageIO tools @@ -58,10 +58,10 @@ their own attributes that need to be set, before they become fully functional. ### Avalon -**`Avalon Mongo Timeout`** - You might need to change this if your mongo connection is a bit slow. Making the +**`Avalon Mongo Timeout`** - You might need to change this if your mongo connection is a bit slow. Making the timeout longer will give Avalon better chance to connect. -**`Thumbnail Storage Location`** - simple disk storage path, where all thumbnails will be stored. +**`Thumbnail Storage Location`** - simple disk storage path, where all thumbnails will be stored. ### Ftrack @@ -89,15 +89,15 @@ Disable/Enable Standalone Publisher option ### Deadline -**`Deadline Rest URL`** - URL to deadline webservice that. This URL must be reachable from every +**`Deadline Rest URL`** - URL to deadline webservice that. This URL must be reachable from every workstation that should be submitting render jobs to deadline via OpenPype. ### Muster -**`Muster Rest URL`** - URL to Muster webservice that. This URL must be reachable from every +**`Muster Rest URL`** - URL to Muster webservice that. This URL must be reachable from every workstation that should be submitting render jobs to muster via OpenPype. -**`templates mapping`** - you can customize Muster templates to match your existing setup here. +**`templates mapping`** - you can customize Muster templates to match your existing setup here. ### Clockify @@ -107,36 +107,36 @@ workstation that should be submitting render jobs to muster via OpenPype. **`Max Idle Time`** - Duration (minutes) of inactivity, after which currently running timer will be stopped. -**`Dialog popup time`** - Time in minutes, before the end of Max Idle ti, when a notification will alert +**`Dialog popup time`** - Time in minutes, before the end of Max Idle ti, when a notification will alert the user that their timer is about to be stopped. ### Idle Manager Service monitoring the activity, which triggers the Timers Manager timeouts. -### Logging +### Logging Module that allows storing all logging into the database for easier retrieval and support. ## Applications -In this section you can manage what Applications are available to your studio, locations of their -executables and their additional environments. In OpenPype context each application that is integrated is +In this section you can manage what Applications are available to your studio, locations of their +executables and their additional environments. In OpenPype context each application that is integrated is also called a `Host` and these two terms might be used interchangeably in the documentation. -Each Host is made of two levels. +Each Host is made of two levels. 1. **Application group** - This is the main name of the application and you can define extra environments that are applicable to all versions of the given application. For example any extra Maya scripts that are not version dependent, can be added to `Maya` environment here. -2. **Application versions** - Here you can define executables (per platform) for each supported version of -the DCC and any default arguments (`--nukex` for instance). You can also further extend it's environment. +2. **Application versions** - Here you can define executables (per platform) for each supported version of +the DCC and any default arguments (`--nukex` for instance). You can also further extend it's environment. ![settings_applications](assets/settings/applications_01.png) ### Environments -Please keep in mind that the environments are not additive by default, so if you are extending variables like -`PYTHONPATH`, or `PATH` make sure that you add themselves to the end of the list. +Please keep in mind that the environments are not additive by default, so if you are extending variables like +`PYTHONPATH`, or `PATH` make sure that you add themselves to the end of the list. For instance: @@ -151,7 +151,7 @@ For instance: ### Adding versions -It is possible to add new version for any supported application. There are two ways of doing it. +It is possible to add new version for any supported application. There are two ways of doing it. 1. **Add new executable** to an existing application version. This is a good way if you have multiple fully compatible versions of your DCC across the studio. Nuke is a typical example where multiple artists might have different `v#` releases of the same minor Nuke release. For example `12.2v3` and `12.3v6`. When you add both to `12.2` Nuke executables they will be treated the same in OpenPype and the system will automatically pick the first that it finds on an artist machine when launching. Their order is also the order of their priority when choosing which version to run if multiple are present. ![settings_applications](assets/settings/settings_addapplication.gif) @@ -161,16 +161,16 @@ It is possible to add new version for any supported application. There are two w ## Tools -A tool in openPype is anything that needs to be selectively added to your DCC applications. Most often these are plugins, modules, extensions or similar depending on what your package happens to call it. +A tool in openPype is anything that needs to be selectively added to your DCC applications. Most often these are plugins, modules, extensions or similar depending on what your package happens to call it. OpenPype comes with some major CG renderers pre-configured as an example, but these and any others will need to be changed to match your particular environment. -Their environment settings are split to two levels just like applications to allow more flexibility when setting them up. +Their environment settings are split to two levels just like applications to allow more flexibility when setting them up. -In the image before you can see that we set most of the environment variables in the general MTOA level, and only specify the version variable in the individual versions below. Because all environments within pype setting will resolve any cross references, this is enough to get a fully dynamic plugin loading as far as your folder structure where you store the plugins is nicely organized. +In the image before you can see that we set most of the environment variables in the general MTOA level, and only specify the version variable in the individual versions below. Because all environments within pype setting will resolve any cross references, this is enough to get a fully dynamic plugin loading as far as your folder structure where you store the plugins is nicely organized. -In this example MTOA will automatically will the `MAYA_VERSION`(which is set by Maya Application environment) and `MTOA_VERSION` into the `MTOA` variable. We then use the `MTOA` to set all the other variables needed for it to function within Maya. +In this example MTOA will automatically will the `MAYA_VERSION`(which is set by Maya Application environment) and `MTOA_VERSION` into the `MTOA` variable. We then use the `MTOA` to set all the other variables needed for it to function within Maya. ![tools](assets/settings/tools_01.png) All of the tools defined in here can then be assigned to projects. You can also change the tools versions on any project level all the way down to individual asset or shot overrides. So if you just need to upgrade you render plugin for a single shot, while not risking the incompatibilities on the rest of the project, it is possible. \ No newline at end of file From 3a08324a879471afa5d5916a60227c948f9e3df2 Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Thu, 27 Oct 2022 14:27:13 -0700 Subject: [PATCH 1843/2550] Fixed some wording and grammar. --- website/docs/admin_settings_system.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index 92f522104a..f03a4e8c7b 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -27,7 +27,8 @@ as a simple barrier to prevent artists from accidental setting changes. Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume). ### FFmpeg and OpenImageIO tools -We bundle FFmpeg tools for all platforms and OpenImageIO tools for Windows and Linux. By default are used bundled tools but it is possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings environments to look for them in different directory e.g. for different linux distributions or to add oiio support for MacOs. Values of both environment variables should lead to directory where tool executables are located (multiple paths are supported). +We bundle FFmpeg tools for all platforms and OpenImageIO tools for Windows and Linux. By default, bundled tools are used, but it is possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings environments to look for them in different directory. +For example—when using different Linux distributions in a facility that do not have a consistent install location or to add OIIO support for MacOS. Values of both environment variables should lead to directory where tool executables are located instead of an explicit path to the binary executable. Using multiple paths are supported, separated by colons, is supported—e.g. */usr/local/bin:$HOME/.local/bin* ### OpenPype deployment control **`Versions Repository`** - Location where automatic update mechanism searches for zip files with @@ -41,11 +42,11 @@ For more information about Production and Staging go to [Distribute](admin_distr **Production version** and **Staging version** fields will define which version will be used in studio. Filling explicit version will force new OpenPype processes to use it. That gives more control over studio deployment especially when some workstations don't have access to version repository (e.g. remote users). It can be also used to downgrade studio version when newer version have production breaking bug. -When fields are not filled the latest version in versions repository is used as studio version. That makes updating easier as it is not needed to modify settings but workstations without access to versions repository can't find out which OpenPype version should be used. +When fields are not filled, the latest version in the versions repository is used as studio version. That makes updating easier as it is not needed to modify settings, though workstations without access to versions repository can't find out which OpenPype version should be used. -If version repository is not set or is not accessible for workstation the latest available version on workstation is used or version inside build. +If **`Version Repository`** is not set or is not accessible for workstation, the latest available version on workstation is used or the version inside build. -**`Version check interval`** - OpenPype tray application check if currently used OpenPype version is up to date with production/staging version. It is possible to modify how often the validation is triggered in minutes. It is possible to set the interval to `0`. That will turn off version validations but it is not recommend. +**`Version check interval`** - The OpenPype tray application has the ability to check if its version currently in use is up to date with the Studio's production/staging version. It is possible to modify how often the validation is triggered in minutes. The interval can also be set to `0`, which will turn off version validations, but it is not recommend. A dialog asking for restart is shown when OpenPype tray application detect that different version should be used. ![general_settings](assets/settings/settings_system_version_update.png) From fae3f14af49f59e3ddc945dfe25a2450f8e45c9f Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Thu, 27 Oct 2022 14:31:15 -0700 Subject: [PATCH 1844/2550] Modified working --- website/docs/admin_settings_system.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index f03a4e8c7b..00936d724c 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -54,15 +54,13 @@ A dialog asking for restart is shown when OpenPype tray application detect that ## Modules -Configuration of OpenPype modules. Some can only be turned on and off, others have -their own attributes that need to be set, before they become fully functional. +Configuration of OpenPype's various modules. Some can only be toggled on or off, while others have their own attributes that need to be set before they become fully functional. ### Avalon -**`Avalon Mongo Timeout`** - You might need to change this if your mongo connection is a bit slow. Making the -timeout longer will give Avalon better chance to connect. +**`Avalon Mongo Timeout`** - This might need to be changed if your mongo connection is a bit slow. Making the timeout longer will give Avalon better chance to connect. -**`Thumbnail Storage Location`** - simple disk storage path, where all thumbnails will be stored. +**`Thumbnail Storage Location`** - simple disk storage path where all thumbnails will be stored. ### Ftrack From 4f07ddae9d0cfe5a64d212af6f98180c60e42c12 Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Thu, 27 Oct 2022 14:34:27 -0700 Subject: [PATCH 1845/2550] Modified wording for clarity. --- website/docs/admin_settings_system.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index 00936d724c..c1d6c0a9ef 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -75,8 +75,7 @@ Additional Action paths **`Intent`** - Special ftrack attribute that mark the intention of individual publishes. This setting will be reflected in publisher as well as ftrack custom attributes -**`Custom Attributes`** - Write and Read permissions for all OpenPype required ftrack custom attributes. The values should be -ftrack roles names. +**`Custom Attributes`** - Write and Read permissions for all OpenPype required ftrack custom attributes. Each values needs to be name of an ftrack role. ### Sync Server From c81b42d6a6d86d62e5c829b32d3093a1e4d502a7 Mon Sep 17 00:00:00 2001 From: "Ryan J. Quinlan" Date: Thu, 27 Oct 2022 14:37:25 -0700 Subject: [PATCH 1846/2550] Added punctuation for clarity. --- website/docs/admin_settings_system.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index c1d6c0a9ef..8aeb281109 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -119,7 +119,7 @@ Module that allows storing all logging into the database for easier retrieval an ## Applications In this section you can manage what Applications are available to your studio, locations of their -executables and their additional environments. In OpenPype context each application that is integrated is +executables, and their additional environments. In OpenPype context, each application that is integrated is also called a `Host` and these two terms might be used interchangeably in the documentation. Each Host is made of two levels. From f8cf9ce3a13ae012bcc30d3d0b2c83070ceb5052 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 10:15:20 +0200 Subject: [PATCH 1847/2550] store loaded reports locally on machine to specific directory and allow label changes --- .../publisher/publish_report_viewer/window.py | 341 +++++++++++++----- 1 file changed, 245 insertions(+), 96 deletions(-) diff --git a/openpype/tools/publisher/publish_report_viewer/window.py b/openpype/tools/publisher/publish_report_viewer/window.py index 678884677c..2c249d058c 100644 --- a/openpype/tools/publisher/publish_report_viewer/window.py +++ b/openpype/tools/publisher/publish_report_viewer/window.py @@ -1,11 +1,12 @@ import os import json import six +import uuid + import appdirs from Qt import QtWidgets, QtCore, QtGui from openpype import style -from openpype.lib import JSONSettingRegistry from openpype.resources import get_openpype_icon_filepath from openpype.tools import resources from openpype.tools.utils import ( @@ -23,38 +24,198 @@ else: from report_items import PublishReport -FILEPATH_ROLE = QtCore.Qt.UserRole + 1 -MODIFIED_ROLE = QtCore.Qt.UserRole + 2 +ITEM_ID_ROLE = QtCore.Qt.UserRole + 1 -class PublisherReportRegistry(JSONSettingRegistry): - """Class handling storing publish report tool. - - Attributes: - vendor (str): Name used for path construction. - product (str): Additional name used for path construction. +def get_reports_dir(): + """Root directory where publish reports are stored for next session. + Returns: + str: Path to directory where reports are stored. """ + report_dir = os.path.join( + appdirs.user_data_dir("openpype", "pypeclub"), + "publish_report_viewer" + ) + if not os.path.exists(report_dir): + os.makedirs(report_dir) + return report_dir + + +class PublishReportItem: + """Report item representing one file in report directory.""" + + def __init__(self, content): + item_id = content.get("id") + changed = False + if not item_id: + item_id = str(uuid.uuid4()) + changed = True + content["id"] = item_id + + if not content.get("report_version"): + changed = True + content["report_version"] = "0.0.1" + + report_path = os.path.join(get_reports_dir(), item_id) + file_modified = None + if os.path.exists(report_path): + file_modified = os.path.getmtime(report_path) + self.content = content + self.report_path = report_path + self.file_modified = file_modified + self._loaded_label = content.get("label") + self._changed = changed + self.publish_report = PublishReport(content) + + @property + def version(self): + return self.content["report_version"] + + @property + def id(self): + return self.content["id"] + + def get_label(self): + return self.content.get("label") or "Unfilled label" + + def set_label(self, label): + if not label: + self.content.pop("label", None) + self.content["label"] = label + + label = property(get_label, set_label) + + def save(self): + save = False + if ( + self._changed + or self._loaded_label != self.label + or not os.path.exists(self.report_path) + or self.file_modified != os.path.getmtime(self.report_path) + ): + save = True + + if not save: + return + + with open(self.report_path, "w") as stream: + json.dump(self.content, stream) + + self._loaded_label = self.content.get("label") + self._changed = False + self.file_modified = os.path.getmtime(self.report_path) + + @classmethod + def from_filepath(cls, filepath): + if not os.path.exists(filepath): + return None + + try: + with open(filepath, "r") as stream: + content = json.load(stream) + + return cls(content) + except Exception: + return None + + def remove_file(self): + if os.path.exists(self.report_path): + os.remove(self.report_path) + + def update_file_content(self): + if not os.path.exists(self.report_path): + return + + file_modified = os.path.getmtime(self.report_path) + if file_modified == self.file_modified: + return + + with open(self.report_path, "r") as stream: + content = json.load(self.content, stream) + + item_id = content.get("id") + version = content.get("report_version") + if not item_id: + item_id = str(uuid.uuid4()) + content["id"] = item_id + + if not version: + version = "0.0.1" + content["report_version"] = version + + self.content = content + self.file_modified = file_modified + + +class PublisherReportHandler: + """Class handling storing publish report tool.""" + def __init__(self): - self.vendor = "pypeclub" - self.product = "openpype" - name = "publish_report_viewer" - path = appdirs.user_data_dir(self.product, self.vendor) - super(PublisherReportRegistry, self).__init__(name, path) + self._reports = None + self._reports_by_id = {} + + def reset(self): + self._reports = None + self._reports_by_id = {} + + def list_reports(self): + if self._reports is not None: + return self._reports + + reports = [] + reports_by_id = {} + report_dir = get_reports_dir() + for filename in os.listdir(report_dir): + ext = os.path.splitext(filename)[-1] + if ext == ".json": + continue + filepath = os.path.join(report_dir, filename) + item = PublishReportItem.from_filepath(filepath) + reports.append(item) + reports_by_id[item.id] = item + + self._reports = reports + self._reports_by_id = reports_by_id + return reports + + def remove_report_items(self, item_id): + item = self._reports_by_id.get(item_id) + if item: + try: + item.remove_file() + self._reports_by_id.get(item_id) + except Exception: + pass -class LoadedFilesMopdel(QtGui.QStandardItemModel): +class LoadedFilesModel(QtGui.QStandardItemModel): def __init__(self, *args, **kwargs): - super(LoadedFilesMopdel, self).__init__(*args, **kwargs) - self.setColumnCount(2) - self._items_by_filepath = {} - self._reports_by_filepath = {} + super(LoadedFilesModel, self).__init__(*args, **kwargs) - self._registry = PublisherReportRegistry() + self._items_by_id = {} + self._report_items_by_id = {} + + self._handler = PublisherReportHandler() self._loading_registry = False - self._load_registry() + + def refresh(self): + self._handler.reset() + self._items_by_id = {} + self._report_items_by_id = {} + + new_items = [] + for report_item in self._handler.list_reports(): + item = self._create_item(report_item) + self._report_items_by_id[report_item.id] = report_item + self._items_by_id[report_item.id] = item + new_items.append(item) + + if new_items: + root_item = self.invisibleRootItem() + root_item.appendRows(new_items) def headerData(self, section, orientation, role): if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole): @@ -63,22 +224,7 @@ class LoadedFilesMopdel(QtGui.QStandardItemModel): if section == 1: return "Modified" return "" - super(LoadedFilesMopdel, self).headerData(section, orientation, role) - - def _load_registry(self): - self._loading_registry = True - try: - filepaths = self._registry.get_item("filepaths") - self.add_filepaths(filepaths) - except ValueError: - pass - self._loading_registry = False - - def _store_registry(self): - if self._loading_registry: - return - filepaths = list(self._items_by_filepath.keys()) - self._registry.set_item("filepaths", filepaths) + super(LoadedFilesModel, self).headerData(section, orientation, role) def data(self, index, role=None): if role is None: @@ -88,17 +234,28 @@ class LoadedFilesMopdel(QtGui.QStandardItemModel): if col != 0: index = self.index(index.row(), 0, index.parent()) - if role == QtCore.Qt.ToolTipRole: - if col == 0: - role = FILEPATH_ROLE - elif col == 1: - return "File modified" + return super(LoadedFilesModel, self).data(index, role) + + def setData(self, index, value, role): + if role == QtCore.Qt.EditRole: + item_id = index.data(ITEM_ID_ROLE) + report_item = self._report_items_by_id.get(item_id) + if report_item is not None: + report_item.label = value + report_item.save() + value = report_item.label + + return super(LoadedFilesModel, self).setData(index, value, role) + + def _create_item(self, report_item): + if report_item.id in self._items_by_id: return None - elif role == QtCore.Qt.DisplayRole: - if col == 1: - role = MODIFIED_ROLE - return super(LoadedFilesMopdel, self).data(index, role) + item = QtGui.QStandardItem(report_item.label) + item.setColumnCount(self.columnCount()) + item.setData(report_item.id, ITEM_ID_ROLE) + + return item def add_filepaths(self, filepaths): if not filepaths: @@ -110,9 +267,6 @@ class LoadedFilesMopdel(QtGui.QStandardItemModel): filtered_paths = [] for filepath in filepaths: normalized_path = os.path.normpath(filepath) - if normalized_path in self._items_by_filepath: - continue - if ( os.path.exists(normalized_path) and normalized_path not in filtered_paths @@ -127,54 +281,46 @@ class LoadedFilesMopdel(QtGui.QStandardItemModel): try: with open(normalized_path, "r") as stream: data = json.load(stream) - report = PublishReport(data) + report_item = PublishReportItem(data) except Exception: # TODO handle errors continue - modified = os.path.getmtime(normalized_path) - item = QtGui.QStandardItem(os.path.basename(normalized_path)) - item.setColumnCount(self.columnCount()) - item.setData(normalized_path, FILEPATH_ROLE) - item.setData(modified, MODIFIED_ROLE) + label = data.get("label") + if not label: + report_item.label = ( + os.path.splitext(os.path.basename(filepath))[0] + ) + + item = self._create_item(report_item) + if item is None: + continue + new_items.append(item) - self._items_by_filepath[normalized_path] = item - self._reports_by_filepath[normalized_path] = report + report_item.save() + self._items_by_id[report_item.id] = item + self._report_items_by_id[report_item.id] = report_item - if not new_items: + if new_items: + root_item = self.invisibleRootItem() + root_item.appendRows(new_items) + + def remove_item_by_id(self, item_id): + report_item = self._report_items_by_id.get(item_id) + if not report_item: return + self._handler.remove_report_items(item_id) + item = self._items_by_id.get(item_id) + parent = self.invisibleRootItem() - parent.appendRows(new_items) + parent.removeRow(item.row()) - self._store_registry() - - def remove_filepaths(self, filepaths): - if not filepaths: - return - - if isinstance(filepaths, six.string_types): - filepaths = [filepaths] - - filtered_paths = [] - for filepath in filepaths: - normalized_path = os.path.normpath(filepath) - if normalized_path in self._items_by_filepath: - filtered_paths.append(normalized_path) - - if not filtered_paths: - return - - parent = self.invisibleRootItem() - for filepath in filtered_paths: - self._reports_by_filepath.pop(normalized_path) - item = self._items_by_filepath.pop(filepath) - parent.removeRow(item.row()) - - self._store_registry() - - def get_report_by_filepath(self, filepath): - return self._reports_by_filepath.get(filepath) + def get_report_by_id(self, item_id): + report_item = self._report_items_by_id.get(item_id) + if report_item: + return report_item.publish_report + return None class LoadedFilesView(QtWidgets.QTreeView): @@ -182,11 +328,13 @@ class LoadedFilesView(QtWidgets.QTreeView): def __init__(self, *args, **kwargs): super(LoadedFilesView, self).__init__(*args, **kwargs) - self.setEditTriggers(self.NoEditTriggers) + self.setEditTriggers( + self.EditKeyPressed | self.SelectedClicked | self.DoubleClicked + ) self.setIndentation(0) self.setAlternatingRowColors(True) - model = LoadedFilesMopdel() + model = LoadedFilesModel() self.setModel(model) time_delegate = PrettyTimeDelegate() @@ -226,9 +374,10 @@ class LoadedFilesView(QtWidgets.QTreeView): def showEvent(self, event): super(LoadedFilesView, self).showEvent(event) - self._update_remove_btn() + self._model.refresh() header = self.header() header.resizeSections(header.ResizeToContents) + self._update_remove_btn() def _on_selection_change(self): self.selection_changed.emit() @@ -237,14 +386,14 @@ class LoadedFilesView(QtWidgets.QTreeView): self._model.add_filepaths(filepaths) self._fill_selection() - def remove_filepaths(self, filepaths): - self._model.remove_filepaths(filepaths) + def remove_item_by_id(self, item_id): + self._model.remove_item_by_id(item_id) self._fill_selection() def _on_remove_clicked(self): index = self.currentIndex() - filepath = index.data(FILEPATH_ROLE) - self.remove_filepaths(filepath) + item_id = index.data(ITEM_ID_ROLE) + self.remove_item_by_id(item_id) def _fill_selection(self): index = self.currentIndex() @@ -257,8 +406,8 @@ class LoadedFilesView(QtWidgets.QTreeView): def get_current_report(self): index = self.currentIndex() - filepath = index.data(FILEPATH_ROLE) - return self._model.get_report_by_filepath(filepath) + item_id = index.data(ITEM_ID_ROLE) + return self._model.get_report_by_id(item_id) class LoadedFilesWidget(QtWidgets.QWidget): From f18e5c5896b8dc1fdcc9c1b08044d1a31a240d89 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 10:31:30 +0200 Subject: [PATCH 1848/2550] moved extract thumbnail from tray publisher to global plugins --- .../publish/extract_thumbnail_from_source.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/{hosts/traypublisher/plugins/publish/extract_thumbnail.py => plugins/publish/extract_thumbnail_from_source.py} (100%) diff --git a/openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail_from_source.py similarity index 100% rename from openpype/hosts/traypublisher/plugins/publish/extract_thumbnail.py rename to openpype/plugins/publish/extract_thumbnail_from_source.py From b42346e187cb5b18b6903074c0a31245c5d416fc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 10:31:49 +0200 Subject: [PATCH 1849/2550] use faster checks first --- .../publish/extract_thumbnail_from_source.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 96aefe0043..eaf48df5cb 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -41,12 +41,6 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): self.log.info( "Processing instance with subset name {}".format(subset_name) ) - - # Check if already has thumbnail created - if self._already_has_thumbnail(instance): - self.log.info("Thumbnail representation already present.") - return - thumbnail_source = instance.data.get("thumbnailSource") if not thumbnail_source: thumbnail_source = instance.context.data.get("thumbnailSource") @@ -55,10 +49,15 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): self.log.debug("Thumbnail source not filled. Skipping.") return - elif not os.path.exists(thumbnail_source): - self.log.debug( - "Thumbnail source file was not found {}. Skipping.".format( - thumbnail_source)) + # Check if already has thumbnail created + if self._already_has_thumbnail(instance): + self.log.info("Thumbnail representation already present.") + return + + if not os.path.exists(thumbnail_source): + self.log.debug(( + "Thumbnail source is set but file was not found {}. Skipping." + ).format(thumbnail_source)) return # Create temp directory for thumbnail From 7a18d3d85efe1c55d0413768ade1a230fb00cb4f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 10:31:58 +0200 Subject: [PATCH 1850/2550] removed hosts filter --- openpype/plugins/publish/extract_thumbnail_from_source.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index eaf48df5cb..1d75b6c381 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -34,7 +34,6 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): label = "Extract Thumbnail (from source)" # Before 'ExtractThumbnail' in global plugins order = pyblish.api.ExtractorOrder - 0.00001 - hosts = ["traypublisher"] def process(self, instance): subset_name = instance.data["subset"] From c5790fa896f646401e765e2d77527f71e03c7d0a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 28 Oct 2022 10:50:56 +0200 Subject: [PATCH 1851/2550] :art: add switch method --- openpype/hosts/houdini/plugins/load/load_ass.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/plugins/load/load_ass.py b/openpype/hosts/houdini/plugins/load/load_ass.py index 710cd09c23..557d601677 100644 --- a/openpype/hosts/houdini/plugins/load/load_ass.py +++ b/openpype/hosts/houdini/plugins/load/load_ass.py @@ -85,3 +85,6 @@ class AssLoader(load.LoaderPlugin): ) return os.path.normpath(path).replace("\\", "/") + + def switch(self, container, representation): + self.update(container, representation) From a45be7d4b57c1108059b3d7887a21fb0afd8dd7f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 11:23:25 +0200 Subject: [PATCH 1852/2550] nuke: add 13.2 variant --- .../system_settings/applications.json | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 42eeb06191..03499a8567 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -192,6 +192,24 @@ ] }, "variants": { + "13-2": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.2v1/Nuke13.2" + ] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": {} + }, "13-0": { "use_python_2": false, "executables": { @@ -281,6 +299,7 @@ "environment": {} }, "__dynamic_keys_labels__": { + "13-2": "13.2", "13-0": "13.0", "12-2": "12.2", "12-0": "12.0", @@ -301,6 +320,30 @@ ] }, "variants": { + "13-2": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.2v1/Nuke13.2" + ] + }, + "arguments": { + "windows": [ + "--nukex" + ], + "darwin": [ + "--nukex" + ], + "linux": [ + "--nukex" + ] + }, + "environment": {} + }, "13-0": { "use_python_2": false, "executables": { @@ -420,6 +463,7 @@ "environment": {} }, "__dynamic_keys_labels__": { + "13-2": "13.2", "13-0": "13.0", "12-2": "12.2", "12-0": "12.0", @@ -438,6 +482,30 @@ "TAG_ASSETBUILD_STARTUP": "0" }, "variants": { + "13-2": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.2v1/Nuke13.2" + ] + }, + "arguments": { + "windows": [ + "--studio" + ], + "darwin": [ + "--studio" + ], + "linux": [ + "--studio" + ] + }, + "environment": {} + }, "13-0": { "use_python_2": false, "executables": { @@ -555,6 +623,7 @@ "environment": {} }, "__dynamic_keys_labels__": { + "13-2": "13.2", "13-0": "13.0", "12-2": "12.2", "12-0": "12.0", @@ -573,6 +642,30 @@ "TAG_ASSETBUILD_STARTUP": "0" }, "variants": { + "13-2": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.2v1\\Nuke13.2.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.2v1/Nuke13.2" + ] + }, + "arguments": { + "windows": [ + "--hiero" + ], + "darwin": [ + "--hiero" + ], + "linux": [ + "--hiero" + ] + }, + "environment": {} + }, "13-0": { "use_python_2": false, "executables": { @@ -692,6 +785,7 @@ "environment": {} }, "__dynamic_keys_labels__": { + "13-2": "13.2", "13-0": "13.0", "12-2": "12.2", "12-0": "12.0", From fec7df18f12334a11ff0cb61177993d5dc89f626 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 12:07:20 +0200 Subject: [PATCH 1853/2550] use colors from style data --- openpype/tools/publisher/widgets/thumbnail_widget.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 8c43602147..dcb18d9cb7 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -2,6 +2,7 @@ import os import uuid from Qt import QtWidgets, QtCore, QtGui +from openpype.style import get_objected_colors from openpype.lib import ( run_subprocess, is_oiio_supported, @@ -37,9 +38,8 @@ class ThumbnailWidget(QtWidgets.QWidget): super(ThumbnailWidget, self).__init__(parent) self.setAcceptDrops(True) - # TODO remove hardcoded colors - border_color = QtGui.QColor(67, 74, 86) - thumbnail_bg_color = QtGui.QColor(54, 61, 72) + border_color = get_objected_colors("bg-buttons").get_qcolor() + thumbnail_bg_color = get_objected_colors("border").get_qcolor() default_image = get_image("thumbnail") default_pix = paint_image_with_color(default_image, border_color) From f7dec32e5c50836273d1c294b1d1733ff1671e1b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 12:08:49 +0200 Subject: [PATCH 1854/2550] draw disabled drop with slashed circle --- .../publisher/widgets/thumbnail_widget.py | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index dcb18d9cb7..48f40f7b5b 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -1,5 +1,7 @@ import os import uuid +import math + from Qt import QtWidgets, QtCore, QtGui from openpype.style import get_objected_colors @@ -40,6 +42,7 @@ class ThumbnailWidget(QtWidgets.QWidget): border_color = get_objected_colors("bg-buttons").get_qcolor() thumbnail_bg_color = get_objected_colors("border").get_qcolor() + overlay_color = get_objected_colors("font").get_qcolor() default_image = get_image("thumbnail") default_pix = paint_image_with_color(default_image, border_color) @@ -49,6 +52,7 @@ class ThumbnailWidget(QtWidgets.QWidget): self.border_color = border_color self.thumbnail_bg_color = thumbnail_bg_color + self.overlay_color = overlay_color self._default_pix = default_pix self._drop_enabled = True @@ -231,8 +235,14 @@ class ThumbnailWidget(QtWidgets.QWidget): ) new_pix = QtGui.QPixmap(pix_width, pix_height) + new_pix.fill(QtCore.Qt.transparent) pix_painter = QtGui.QPainter() pix_painter.begin(new_pix) + pix_painter.setRenderHints( + pix_painter.Antialiasing + | pix_painter.SmoothPixmapTransform + | pix_painter.HighQualityAntialiasing + ) pix_painter.setBrush(pix_bg_brush) pix_painter.setPen(pix_pen) pix_painter.drawRect(0, 0, pix_width - 1, pix_height - 1) @@ -253,14 +263,63 @@ class ThumbnailWidget(QtWidgets.QWidget): final_painter = QtGui.QPainter() final_painter.begin(final_pix) + final_painter.setRenderHints( + final_painter.Antialiasing + | final_painter.SmoothPixmapTransform + | final_painter.HighQualityAntialiasing + ) for idx, pix in enumerate(backgrounded_images): x_offset = full_width_offset - (width_offset_part * idx) y_offset = (height_offset_part * idx) + pix_y_offset final_painter.drawPixmap(x_offset, y_offset, pix) + + if not self._drop_enabled: + overlay = self._get_drop_disabled_overlay(rect_width, rect_height) + final_painter.drawPixmap(0, 0, overlay) + final_painter.end() self._cached_pix = final_pix + def _get_drop_disabled_overlay(self, width, height): + min_size = min(width, height) + circle_size = int(min_size * 0.8) + pen_width = int(circle_size * 0.1) + if pen_width < 1: + pen_width = 1 + + x_offset = int((width - circle_size) / 2) + y_offset = int((height - circle_size) / 2) + half_size = int(circle_size / 2) + angle = math.radians(45) + line_offset_p = QtCore.QPoint( + half_size * math.cos(angle), + half_size * math.sin(angle) + ) + overlay_pix = QtGui.QPixmap(width, height) + overlay_pix.fill(QtCore.Qt.transparent) + + painter = QtGui.QPainter() + painter.begin(overlay_pix) + painter.setRenderHints( + painter.Antialiasing + | painter.SmoothPixmapTransform + | painter.HighQualityAntialiasing + ) + painter.setBrush(QtCore.Qt.transparent) + pen = QtGui.QPen(self.overlay_color) + pen.setWidth(pen_width) + painter.setPen(pen) + rect = QtCore.QRect(x_offset, y_offset, circle_size, circle_size) + painter.drawEllipse(rect) + painter.drawLine( + rect.center() - line_offset_p, + rect.center() + line_offset_p + ) + painter.end() + + return overlay_pix + def _get_pix_offset_size(self, width, height, image_count): if image_count == 1: return 0, 0 From 4cf0fe9145f67d17e19232ba4b0ae1f769a1cb3d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 12:14:36 +0200 Subject: [PATCH 1855/2550] disable drop when no instance is selected --- openpype/tools/publisher/widgets/widgets.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 1682e3e047..96addb70a3 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1673,6 +1673,11 @@ class SubsetAttributesWidget(QtWidgets.QWidget): if self._context_selected: instance_ids.append(None) + if not instance_ids: + self._thumbnail_widget.set_drop_enabled(False) + self._thumbnail_widget.set_current_thumbnails(None) + return + mapping = self._controller.get_thumbnail_paths_for_instances( instance_ids ) From b3ca4abf6d0b191e57f001cbfc89a69baa7741e9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 12:45:07 +0200 Subject: [PATCH 1856/2550] allow the drop if instances are selected --- openpype/tools/publisher/widgets/widgets.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 96addb70a3..fb9bd761f4 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1687,4 +1687,5 @@ class SubsetAttributesWidget(QtWidgets.QWidget): if path: thumbnail_paths.append(path) + self._thumbnail_widget.set_drop_enabled(True) self._thumbnail_widget.set_current_thumbnails(thumbnail_paths) From 6c80a7f81f1e4ea5df27480ed0a2d7bb60fe165b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 13:07:44 +0200 Subject: [PATCH 1857/2550] context thumbnail is not used directly in extract thumbnail from source but creates thumbnail elsewhere and store it to "thumbnailPath" key on context --- .../publish/extract_thumbnail_from_source.py | 66 ++++++++++++------- 1 file changed, 43 insertions(+), 23 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 1d75b6c381..df877cec29 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -36,23 +36,49 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder - 0.00001 def process(self, instance): + self._create_context_thumbnail(instance.context) + subset_name = instance.data["subset"] self.log.info( "Processing instance with subset name {}".format(subset_name) ) thumbnail_source = instance.data.get("thumbnailSource") - if not thumbnail_source: - thumbnail_source = instance.context.data.get("thumbnailSource") - if not thumbnail_source: self.log.debug("Thumbnail source not filled. Skipping.") return # Check if already has thumbnail created - if self._already_has_thumbnail(instance): + if self._instance_has_thumbnail(instance): self.log.info("Thumbnail representation already present.") return + dst_filepath = self._create_thumbnail( + instance.context, thumbnail_source + ) + if not dst_filepath: + return + + dst_staging, dst_filename = os.path.split(dst_filepath) + new_repre = { + "name": "thumbnail", + "ext": "jpg", + "files": dst_filename, + "stagingDir": dst_staging, + "thumbnail": True, + "tags": ["thumbnail"] + } + + # adding representation + self.log.debug( + "Adding thumbnail representation: {}".format(new_repre) + ) + instance.data["representations"].append(new_repre) + + def _create_thumbnail(self, context, thumbnail_source): + if not thumbnail_source: + self.log.debug("Thumbnail source not filled. Skipping.") + return + if not os.path.exists(thumbnail_source): self.log.debug(( "Thumbnail source is set but file was not found {}. Skipping." @@ -66,7 +92,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): "Create temp directory {} for thumbnail".format(dst_staging) ) # Store new staging to cleanup paths - instance.context.data["cleanupFullPaths"].append(dst_staging) + context.data["cleanupFullPaths"].append(dst_staging) thumbnail_created = False oiio_supported = is_oiio_supported() @@ -98,26 +124,12 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): ) # Skip representation and try next one if wasn't created - if not thumbnail_created: - self.log.warning("Thumbanil has not been created.") - return + if thumbnail_created: + return full_output_path - new_repre = { - "name": "thumbnail", - "ext": "jpg", - "files": dst_filename, - "stagingDir": dst_staging, - "thumbnail": True, - "tags": ["thumbnail"] - } + self.log.warning("Thumbanil has not been created.") - # adding representation - self.log.debug( - "Adding thumbnail representation: {}".format(new_repre) - ) - instance.data["representations"].append(new_repre) - - def _already_has_thumbnail(self, instance): + def _instance_has_thumbnail(self, instance): if "representations" not in instance.data: self.log.warning( "Instance does not have 'representations' key filled" @@ -172,3 +184,11 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): exc_info=True ) return False + + def _create_context_thumbnail(self, context): + if "thumbnailPath" in context.data: + return + + thumbnail_source = context.data.get("thumbnailSource") + thumbnail_path = self._create_thumbnail(thumbnail_source) + context.data["thumbnailPath"] = thumbnail_path From 831023b3029a554ef9b390c2a5659186bd10ee57 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 13:08:01 +0200 Subject: [PATCH 1858/2550] integrate thumbnail can use context thumbnail (if is available) --- .../plugins/publish/integrate_thumbnail.py | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index e7046ba2ea..d8a3a00041 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -33,6 +33,8 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ] def process(self, instance): + context_thumbnail_path = instance.context.get("thumbnailPath") + env_key = "AVALON_THUMBNAIL_ROOT" thumbnail_root_format_key = "{thumbnail_root}" thumbnail_root = os.environ.get(env_key) or "" @@ -66,37 +68,43 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ).format(env_key)) return + version_id = None thumb_repre = None thumb_repre_anatomy_data = None for repre_info in published_repres.values(): repre = repre_info["representation"] + if version_id is None: + version_id = repre["parent"] + if repre["name"].lower() == "thumbnail": thumb_repre = repre thumb_repre_anatomy_data = repre_info["anatomy_data"] break + # Use context thumbnail (if is available) if not thumb_repre: self.log.debug( "There is not representation with name \"thumbnail\"" ) - return + src_full_path = context_thumbnail_path + else: + # Get full path to thumbnail file from representation + src_full_path = os.path.normpath(thumb_repre["data"]["path"]) - version = get_version_by_id(project_name, thumb_repre["parent"]) - if not version: - raise AssertionError( - "There does not exist version with id {}".format( - str(thumb_repre["parent"]) - ) - ) - - # Get full path to thumbnail file from representation - src_full_path = os.path.normpath(thumb_repre["data"]["path"]) if not os.path.exists(src_full_path): self.log.warning("Thumbnail file was not found. Path: {}".format( src_full_path )) return + version = get_version_by_id(project_name, version_id) + if not version: + raise AssertionError( + "There does not exist version with id {}".format( + str(version_id) + ) + ) + filename, file_extension = os.path.splitext(src_full_path) # Create id for mongo entity now to fill anatomy template thumbnail_doc = new_thumbnail_doc() From b8719c6cd2906021aadb359572a178238b24cd58 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 13:45:58 +0200 Subject: [PATCH 1859/2550] don't remove last path --- openpype/tools/publisher/widgets/create_widget.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index f0db132d98..e3c171912f 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -502,10 +502,6 @@ class CreateWidget(QtWidgets.QWidget): self._invalidate_prereq_deffered() def _on_thumbnail_create(self, thumbnail_path): - last_path = self._last_thumbnail_path - if last_path and os.path.exists(last_path): - os.remove(last_path) - self._last_thumbnail_path = thumbnail_path self._thumbnail_widget.set_current_thumbnails([thumbnail_path]) From 5861480c77725d11f87d312e964321b9d4b889fb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 13:46:38 +0200 Subject: [PATCH 1860/2550] add missing argument --- openpype/plugins/publish/extract_thumbnail_from_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index df877cec29..8da1213807 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -190,5 +190,5 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): return thumbnail_source = context.data.get("thumbnailSource") - thumbnail_path = self._create_thumbnail(thumbnail_source) + thumbnail_path = self._create_thumbnail(context, thumbnail_source) context.data["thumbnailPath"] = thumbnail_path From 1b040af666233794f78751f0061b8b6bbbe3c81f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 28 Oct 2022 13:50:00 +0200 Subject: [PATCH 1861/2550] removed unused import --- openpype/tools/publisher/widgets/create_widget.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index e3c171912f..a57a8791a8 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -1,4 +1,3 @@ -import os import re from Qt import QtWidgets, QtCore, QtGui From 57b81b4b5b5cd4ab98bfb9d73a8a69ba208bb061 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 14:41:58 +0200 Subject: [PATCH 1862/2550] hiero: loading effects --- .../hosts/hiero/plugins/load/load_effects.py | 259 ++++++++++++++++++ 1 file changed, 259 insertions(+) create mode 100644 openpype/hosts/hiero/plugins/load/load_effects.py diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py new file mode 100644 index 0000000000..40f8d66d0c --- /dev/null +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -0,0 +1,259 @@ +import json +from collections import OrderedDict +from pprint import pprint +import six + +from openpype.pipeline import ( + AVALON_CONTAINER_ID, + load +) +from openpype.hosts.hiero import api as phiero +from openpype.hosts.hiero.api import tags + + +class LoadEffects(load.LoaderPlugin): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["effectJson"] + families = ["effect"] + + label = "Load Effects" + order = 0 + icon = "cc" + color = "white" + ignore_attr = ["useLifetime"] + + def load(self, context, name, namespace, data): + """ + Loading function to get the soft effects to particular read node + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + active_sequence = phiero.get_current_sequence() + active_track = phiero.get_current_track( + active_sequence, "LoadedEffects") + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + namespace = namespace or context['asset']['name'] + object_name = "{}_{}".format(name, namespace) + + data_imprint = { + "source": version_data["source"], + "version": vname, + "author": version_data["author"], + } + + # getting file path + file = self.fname.replace("\\", "/") + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).items()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f) + + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + + for ef_name, ef_val in nodes_order.items(): + pprint("_" * 100) + pprint(ef_name) + pprint(ef_val) + new_name = "{}_loaded".format(ef_name) + if new_name not in used_subtracks: + effect_track_item = active_track.createEffect( + effectType=ef_val["class"], + timelineIn=ef_val["timelineIn"], + timelineOut=ef_val["timelineOut"] + ) + effect_track_item.setName(new_name) + node = effect_track_item.node() + for knob_name, knob_value in ef_val["node"].items(): + if ( + not knob_value + or knob_name == "name" + ): + continue + node[knob_name].setValue(knob_value) + + self.containerise( + active_track, + name=name, + namespace=namespace, + object_name=object_name, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + return + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + pass + + def reorder_nodes(self, data): + new_order = OrderedDict() + trackNums = [v["trackIndex"] for k, v in data.items() + if isinstance(v, dict)] + subTrackNums = [v["subTrackIndex"] for k, v in data.items() + if isinstance(v, dict)] + + for trackIndex in range( + min(trackNums), max(trackNums) + 1): + for subTrackIndex in range( + min(subTrackNums), max(subTrackNums) + 1): + item = self.get_item(data, trackIndex, subTrackIndex) + if item is not {}: + new_order.update(item) + return new_order + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if isinstance(val, dict) + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes through all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.items()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, six.text_type): + return str(input) + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + pass + + def containerise( + self, + track, + name, + namespace, + object_name, + context, + loader=None, + data=None + ): + """Bundle Hiero's object into an assembly and imprint it with metadata + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + track_item (hiero.core.TrackItem): object to imprint as container + name (str): Name of resulting assembly + namespace (str): Namespace under which to host container + context (dict): Asset information + loader (str, optional): Name of node used to produce this container. + + Returns: + track_item (hiero.core.TrackItem): containerised object + + """ + + data_imprint = { + object_name: { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": str(name), + "namespace": str(namespace), + "loader": str(loader), + "representation": str(context["representation"]["_id"]), + } + } + + if data: + for k, v in data.items(): + data_imprint[object_name].update({k: v}) + + self.log.debug("_ data_imprint: {}".format(data_imprint)) + self.set_track_openpype_tag(track, data_imprint) + + def set_track_openpype_tag(self, track, data=None): + """ + Set pype track item tag to input track_item. + + Attributes: + trackItem (hiero.core.TrackItem): hiero object + + Returns: + hiero.core.Tag + """ + data = data or {} + + # basic Tag's attribute + tag_data = { + "editable": "0", + "note": "OpenPype data container", + "icon": "openpype_icon.png", + "metadata": dict(data.items()) + } + # get available pype tag if any + _tag = self.get_track_openpype_tag(track) + + if _tag: + # it not tag then create one + tag = tags.update_tag(_tag, tag_data) + else: + # if pype tag available then update with input data + tag = tags.create_tag(phiero.pype_tag_name, tag_data) + # add it to the input track item + track.addTag(tag) + + return tag + + def get_track_openpype_tag(self, track): + """ + Get pype track item tag created by creator or loader plugin. + + Attributes: + trackItem (hiero.core.TrackItem): hiero object + + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if tag.name() == phiero.pype_tag_name: + return tag From 9e0993a53dc0e2aba4afd208bb977e8647e6d2ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 28 Oct 2022 14:44:14 +0200 Subject: [PATCH 1863/2550] Update openpype/hosts/nuke/plugins/load/load_clip.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/nuke/plugins/load/load_clip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index aa5b1dfed1..666312167f 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -91,7 +91,7 @@ class LoadClip(plugin.NukeLoader): frame = repre_cont.get("frame") assert frame, "Representation is not sequence" - padding = len(frame) + padding = len(str(frame)) basename = basename.replace(frame, "#" * padding) return os.path.join(dirname, basename).replace("\\", "/") From 302af6bc3a738351e12992fd260a8092cf2c4d47 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 15:07:43 +0200 Subject: [PATCH 1864/2550] update changelog --- CHANGELOG.md | 39 +++++++++++++++- HISTORY.md | 123 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 160 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5464c390ce..707b61676f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,43 @@ # Changelog -## [3.14.5](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...HEAD) + +### 📖 Documentation + +- Documentation: Minor updates to dev\_requirements.md [\#4025](https://github.com/pypeclub/OpenPype/pull/4025) + +**🆕 New features** + +- Nuke: add 13.2 variant [\#4041](https://github.com/pypeclub/OpenPype/pull/4041) + +**🚀 Enhancements** + +- Publish Report Viewer: Store reports locally on machine [\#4040](https://github.com/pypeclub/OpenPype/pull/4040) +- General: More specific error in burnins script [\#4026](https://github.com/pypeclub/OpenPype/pull/4026) +- General: Extract review does not crash with old settings overrides [\#4023](https://github.com/pypeclub/OpenPype/pull/4023) +- Publisher: Convertors for legacy instances [\#4020](https://github.com/pypeclub/OpenPype/pull/4020) +- workflows: adding milestone creator and assigner [\#4018](https://github.com/pypeclub/OpenPype/pull/4018) +- Publisher: Catch creator errors [\#4015](https://github.com/pypeclub/OpenPype/pull/4015) + +**🐛 Bug fixes** + +- Hiero - effect collection fixes [\#4038](https://github.com/pypeclub/OpenPype/pull/4038) +- Nuke - loader clip correct hash conversion in path [\#4037](https://github.com/pypeclub/OpenPype/pull/4037) +- Maya: Soft fail when applying capture preset [\#4034](https://github.com/pypeclub/OpenPype/pull/4034) +- Igniter: handle missing directory [\#4032](https://github.com/pypeclub/OpenPype/pull/4032) +- StandalonePublisher: Fix thumbnail publishing [\#4029](https://github.com/pypeclub/OpenPype/pull/4029) +- Experimental Tools: Fix publisher import [\#4027](https://github.com/pypeclub/OpenPype/pull/4027) +- Houdini: fix wrong path in ASS loader [\#4016](https://github.com/pypeclub/OpenPype/pull/4016) + +**🔀 Refactored code** + +- General: Import lib functions from lib [\#4017](https://github.com/pypeclub/OpenPype/pull/4017) + +## [3.14.5](https://github.com/pypeclub/OpenPype/tree/3.14.5) (2022-10-24) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...3.14.5) **🚀 Enhancements** diff --git a/HISTORY.md b/HISTORY.md index ca54c60273..f6cc74e114 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,128 @@ # Changelog +## [3.14.5](https://github.com/pypeclub/OpenPype/tree/3.14.5) (2022-10-24) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...3.14.5) + +**🚀 Enhancements** + +- Maya: add OBJ extractor to model family [\#4021](https://github.com/pypeclub/OpenPype/pull/4021) +- Publish report viewer tool [\#4010](https://github.com/pypeclub/OpenPype/pull/4010) +- Nuke | Global: adding custom tags representation filtering [\#4009](https://github.com/pypeclub/OpenPype/pull/4009) +- Publisher: Create context has shared data for collection phase [\#3995](https://github.com/pypeclub/OpenPype/pull/3995) +- Resolve: updating to v18 compatibility [\#3986](https://github.com/pypeclub/OpenPype/pull/3986) + +**🐛 Bug fixes** + +- TrayPublisher: Fix missing argument [\#4019](https://github.com/pypeclub/OpenPype/pull/4019) +- General: Fix python 2 compatibility of ffmpeg and oiio tools discovery [\#4011](https://github.com/pypeclub/OpenPype/pull/4011) + +**🔀 Refactored code** + +- Maya: Removed unused imports [\#4008](https://github.com/pypeclub/OpenPype/pull/4008) +- Unreal: Fix import of moved function [\#4007](https://github.com/pypeclub/OpenPype/pull/4007) +- Houdini: Change import of RepairAction [\#4005](https://github.com/pypeclub/OpenPype/pull/4005) +- Nuke/Hiero: Refactor openpype.api imports [\#4000](https://github.com/pypeclub/OpenPype/pull/4000) +- TVPaint: Defined with HostBase [\#3994](https://github.com/pypeclub/OpenPype/pull/3994) + +**Merged pull requests:** + +- Unreal: Remove redundant Creator stub [\#4012](https://github.com/pypeclub/OpenPype/pull/4012) +- Unreal: add `uproject` extension to Unreal project template [\#4004](https://github.com/pypeclub/OpenPype/pull/4004) +- Unreal: fix order of includes [\#4002](https://github.com/pypeclub/OpenPype/pull/4002) +- Fusion: Implement backwards compatibility \(+/- Fusion 17.2\) [\#3958](https://github.com/pypeclub/OpenPype/pull/3958) + +## [3.14.4](https://github.com/pypeclub/OpenPype/tree/3.14.4) (2022-10-19) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.3...3.14.4) + +**🆕 New features** + +- Webpublisher: use max next published version number for all items in batch [\#3961](https://github.com/pypeclub/OpenPype/pull/3961) +- General: Control Thumbnail integration via explicit configuration profiles [\#3951](https://github.com/pypeclub/OpenPype/pull/3951) + +**🚀 Enhancements** + +- Publisher: Multiselection in card view [\#3993](https://github.com/pypeclub/OpenPype/pull/3993) +- TrayPublisher: Original Basename cause crash too early [\#3990](https://github.com/pypeclub/OpenPype/pull/3990) +- Tray Publisher: add `originalBasename` data to simple creators [\#3988](https://github.com/pypeclub/OpenPype/pull/3988) +- General: Custom paths to ffmpeg and OpenImageIO tools [\#3982](https://github.com/pypeclub/OpenPype/pull/3982) +- Integrate: Preserve existing subset group if instance does not set it for new version [\#3976](https://github.com/pypeclub/OpenPype/pull/3976) +- Publisher: Prepare publisher controller for remote publishing [\#3972](https://github.com/pypeclub/OpenPype/pull/3972) +- Maya: new style dataclasses in maya deadline submitter plugin [\#3968](https://github.com/pypeclub/OpenPype/pull/3968) +- Maya: Define preffered Qt bindings for Qt.py and qtpy [\#3963](https://github.com/pypeclub/OpenPype/pull/3963) +- Settings: Move imageio from project anatomy to project settings \[pypeclub\] [\#3959](https://github.com/pypeclub/OpenPype/pull/3959) +- TrayPublisher: Extract thumbnail for other families [\#3952](https://github.com/pypeclub/OpenPype/pull/3952) +- Publisher: Pass instance to subset name method on update [\#3949](https://github.com/pypeclub/OpenPype/pull/3949) +- General: Set root environments before DCC launch [\#3947](https://github.com/pypeclub/OpenPype/pull/3947) +- Refactor: changed legacy way to update database for Hero version integrate [\#3941](https://github.com/pypeclub/OpenPype/pull/3941) +- Maya: Moved plugin from global to maya [\#3939](https://github.com/pypeclub/OpenPype/pull/3939) +- Publisher: Create dialog is part of main window [\#3936](https://github.com/pypeclub/OpenPype/pull/3936) +- Fusion: Implement Alembic and FBX mesh loader [\#3927](https://github.com/pypeclub/OpenPype/pull/3927) + +**🐛 Bug fixes** + +- TrayPublisher: Disable sequences in batch mov creator [\#3996](https://github.com/pypeclub/OpenPype/pull/3996) +- Fix - tags might be missing on representation [\#3985](https://github.com/pypeclub/OpenPype/pull/3985) +- Resolve: Fix usage of functions from lib [\#3983](https://github.com/pypeclub/OpenPype/pull/3983) +- Maya: remove invalid prefix token for non-multipart outputs [\#3981](https://github.com/pypeclub/OpenPype/pull/3981) +- Ftrack: Fix schema cache for Python 2 [\#3980](https://github.com/pypeclub/OpenPype/pull/3980) +- Maya: add object to attr.s declaration [\#3973](https://github.com/pypeclub/OpenPype/pull/3973) +- Maya: Deadline OutputFilePath hack regression for Renderman [\#3950](https://github.com/pypeclub/OpenPype/pull/3950) +- Houdini: Fix validate workfile paths for non-parm file references [\#3948](https://github.com/pypeclub/OpenPype/pull/3948) +- Photoshop: missed sync published version of workfile with workfile [\#3946](https://github.com/pypeclub/OpenPype/pull/3946) +- Maya: Set default value for RenderSetupIncludeLights option [\#3944](https://github.com/pypeclub/OpenPype/pull/3944) +- Maya: fix regression of Renderman Deadline hack [\#3943](https://github.com/pypeclub/OpenPype/pull/3943) +- Kitsu: 2 fixes, nb\_frames and Shot type error [\#3940](https://github.com/pypeclub/OpenPype/pull/3940) +- Tray: Change order of attribute changes [\#3938](https://github.com/pypeclub/OpenPype/pull/3938) +- AttributeDefs: Fix crashing multivalue of files widget [\#3937](https://github.com/pypeclub/OpenPype/pull/3937) +- General: Fix links query on hero version [\#3900](https://github.com/pypeclub/OpenPype/pull/3900) +- Publisher: Files Drag n Drop cleanup [\#3888](https://github.com/pypeclub/OpenPype/pull/3888) + +**🔀 Refactored code** + +- Flame: Import lib functions from lib [\#3992](https://github.com/pypeclub/OpenPype/pull/3992) +- General: Fix deprecated warning in legacy creator [\#3978](https://github.com/pypeclub/OpenPype/pull/3978) +- Blender: Remove openpype api imports [\#3977](https://github.com/pypeclub/OpenPype/pull/3977) +- General: Use direct import of resources [\#3964](https://github.com/pypeclub/OpenPype/pull/3964) +- General: Direct settings imports [\#3934](https://github.com/pypeclub/OpenPype/pull/3934) +- General: import 'Logger' from 'openpype.lib' [\#3926](https://github.com/pypeclub/OpenPype/pull/3926) +- General: Remove deprecated functions from lib [\#3907](https://github.com/pypeclub/OpenPype/pull/3907) + +**Merged pull requests:** + +- Maya + Yeti: Load Yeti Cache fix frame number recognition [\#3942](https://github.com/pypeclub/OpenPype/pull/3942) +- Fusion: Implement callbacks to Fusion's event system thread [\#3928](https://github.com/pypeclub/OpenPype/pull/3928) +- Photoshop: create single frame image in Ftrack as review [\#3908](https://github.com/pypeclub/OpenPype/pull/3908) + +## [3.14.3](https://github.com/pypeclub/OpenPype/tree/3.14.3) (2022-10-03) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...3.14.3) + +**🚀 Enhancements** + +- Publisher: Enhancement proposals [\#3897](https://github.com/pypeclub/OpenPype/pull/3897) + +**🐛 Bug fixes** + +- Maya: Fix Render single camera validator [\#3929](https://github.com/pypeclub/OpenPype/pull/3929) +- Flame: loading multilayer exr to batch/reel is working [\#3901](https://github.com/pypeclub/OpenPype/pull/3901) +- Hiero: Fix inventory check on launch [\#3895](https://github.com/pypeclub/OpenPype/pull/3895) +- WebPublisher: Fix import after refactor [\#3891](https://github.com/pypeclub/OpenPype/pull/3891) + +**🔀 Refactored code** + +- Maya: Remove unused 'openpype.api' imports in plugins [\#3925](https://github.com/pypeclub/OpenPype/pull/3925) +- Resolve: Use new Extractor location [\#3918](https://github.com/pypeclub/OpenPype/pull/3918) +- Unreal: Use new Extractor location [\#3917](https://github.com/pypeclub/OpenPype/pull/3917) +- Flame: Use new Extractor location [\#3916](https://github.com/pypeclub/OpenPype/pull/3916) +- Houdini: Use new Extractor location [\#3894](https://github.com/pypeclub/OpenPype/pull/3894) +- Harmony: Use new Extractor location [\#3893](https://github.com/pypeclub/OpenPype/pull/3893) + +**Merged pull requests:** + +- Maya: Fix Scene Inventory possibly starting off-screen due to maya preferences [\#3923](https://github.com/pypeclub/OpenPype/pull/3923) + ## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) From 4651627041c1b634814724f8db13348dd21f93cd Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 28 Oct 2022 13:17:15 +0000 Subject: [PATCH 1865/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index bf36fc4b10..838f935069 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.6-nightly.1" +__version__ = "3.14.6-nightly.2" From 77d84f42fae88adb2a17d0a1baae12e2f85e7997 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 28 Oct 2022 13:21:14 +0000 Subject: [PATCH 1866/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 838f935069..cc78495ea2 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.6-nightly.2" +__version__ = "3.14.6-nightly.3" From d30f5e61cabf31e8a9075a55b3edb0b8b003f92a Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 28 Oct 2022 13:25:35 +0000 Subject: [PATCH 1867/2550] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index cc78495ea2..e464d6787d 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.6-nightly.3" +__version__ = "3.14.6" From b04fc48fbc475f671c0876c6d05cfca79c6d95c0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 16:49:38 +0200 Subject: [PATCH 1868/2550] hiero: fix - skip audio in collect effects --- openpype/hosts/hiero/plugins/publish/collect_clip_effects.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py index 8d2ed9a9c2..9489b1c4fb 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -16,6 +16,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin): review_track_index = instance.context.data.get("reviewTrackIndex") item = instance.data["item"] + if "audio" in instance.data["family"]: + return + # frame range self.handle_start = instance.data["handleStart"] self.handle_end = instance.data["handleEnd"] From 21a3d2067e1732a14c3273a8ba6c2429ac8f7a19 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 16:50:30 +0200 Subject: [PATCH 1869/2550] hiero: load effects update - adding order - adding clip in out definition --- .../hosts/hiero/plugins/load/load_effects.py | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 40f8d66d0c..3158f29d93 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -41,11 +41,13 @@ class LoadEffects(load.LoaderPlugin): active_sequence, "LoadedEffects") # get main variables - version = context['version'] + version = context["version"] version_data = version.get("data", {}) vname = version.get("name", None) - namespace = namespace or context['asset']['name'] + namespace = namespace or context["asset"]["name"] object_name = "{}_{}".format(name, namespace) + clip_in = context["asset"]["data"]["clipIn"] + clip_out = context["asset"]["data"]["clipOut"] data_imprint = { "source": version_data["source"], @@ -69,7 +71,8 @@ class LoadEffects(load.LoaderPlugin): for stitem in phiero.flatten(active_track.subTrackItems()) } - for ef_name, ef_val in nodes_order.items(): + loaded = False + for index_order, (ef_name, ef_val) in enumerate(nodes_order.items()): pprint("_" * 100) pprint(ef_name) pprint(ef_val) @@ -77,8 +80,10 @@ class LoadEffects(load.LoaderPlugin): if new_name not in used_subtracks: effect_track_item = active_track.createEffect( effectType=ef_val["class"], - timelineIn=ef_val["timelineIn"], - timelineOut=ef_val["timelineOut"] + timelineIn=clip_in, + timelineOut=clip_out, + subTrackIndex=index_order + ) effect_track_item.setName(new_name) node = effect_track_item.node() @@ -90,6 +95,12 @@ class LoadEffects(load.LoaderPlugin): continue node[knob_name].setValue(knob_value) + # make sure containerisation will happen + loaded = True + + if not loaded: + return + self.containerise( active_track, name=name, @@ -98,7 +109,6 @@ class LoadEffects(load.LoaderPlugin): context=context, loader=self.__class__.__name__, data=data_imprint) - return def update(self, container, representation): """Update the Loader's path From 6ecbf122e46d08e0f04879b74be6c1ef7e909a66 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 29 Oct 2022 03:47:38 +0000 Subject: [PATCH 1870/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index e464d6787d..442c5f033b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.6" +__version__ = "3.14.7-nightly.1" From 49ebb5aa0118a8535250743400789efdf952ba90 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:05:19 +0100 Subject: [PATCH 1871/2550] hiero: abstraction for effect loader tag operations --- openpype/hosts/hiero/api/lib.py | 62 +++++++++++++++++-- openpype/hosts/hiero/api/pipeline.py | 18 ++++-- .../hosts/hiero/plugins/load/load_effects.py | 52 ---------------- 3 files changed, 70 insertions(+), 62 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index e5d35945af..9e626270f8 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -321,13 +321,67 @@ def get_track_item_pype_tag(track_item): return tag -def set_track_item_pype_tag(track_item, data=None): +def set_track_openpype_tag(track, data=None): """ - Set pype track item tag to input track_item. + Set openpype track tag to input track object. + + Attributes: + track (hiero.core.VideoTrack): hiero object + + Returns: + hiero.core.Tag + """ + data = data or {} + + # basic Tag's attribute + tag_data = { + "editable": "0", + "note": "OpenPype data container", + "icon": "openpype_icon.png", + "metadata": dict(data.items()) + } + # get available pype tag if any + _tag = get_track_openpype_tag(track) + + if _tag: + # it not tag then create one + tag = tags.update_tag(_tag, tag_data) + else: + # if pype tag available then update with input data + tag = tags.create_tag(self.pype_tag_name, tag_data) + # add it to the input track item + track.addTag(tag) + + return tag + + +def get_track_openpype_tag(track): + """ + Get pype track item tag created by creator or loader plugin. Attributes: trackItem (hiero.core.TrackItem): hiero object + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if tag.name() == self.pype_tag_name: + return tag + + +def set_trackitem_openpype_tag(track_item, data=None): + """ + Set openpype track tag to input track object. + + Attributes: + track (hiero.core.VideoTrack): hiero object + Returns: hiero.core.Tag """ @@ -1083,10 +1137,10 @@ def check_inventory_versions(track_items=None): project_name = legacy_io.active_project() filter_result = filter_containers(containers, project_name) for container in filter_result.latest: - set_track_color(container["_track_item"], clip_color) + set_track_color(container["_item"], clip_color) for container in filter_result.outdated: - set_track_color(container["_track_item"], clip_color_last) + set_track_color(container["_item"], clip_color_last) def selection_changed_timeline(event): diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index ea61dc4785..1b78159e04 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -17,6 +17,7 @@ from openpype.pipeline import ( ) from openpype.tools.utils import host_tools from . import lib, menu, events +import hiero log = Logger.get_logger(__name__) @@ -131,11 +132,12 @@ def ls(): yield container -def parse_container(track_item, validate=True): +def parse_container(item, validate=True): """Return container data from track_item's pype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. validate (bool)[optional]: validating with avalon scheme Returns: @@ -143,7 +145,11 @@ def parse_container(track_item, validate=True): """ # convert tag metadata to normal keys names - data = lib.get_track_item_pype_data(track_item) + if type(item) == hiero.core.VideoTrack: + data = lib.set_track_openpype_data(item) + else: + data = lib.set_track_item_pype_data(item) + if ( not data or data.get("id") != "pyblish.avalon.container" @@ -160,15 +166,15 @@ def parse_container(track_item, validate=True): required = ['schema', 'id', 'name', 'namespace', 'loader', 'representation'] - if not all(key in data for key in required): + if any(key not in data for key in required): return container = {key: data[key] for key in required} - container["objectName"] = track_item.name() + container["objectName"] = item.name() # Store reference to the node object - container["_track_item"] = track_item + container["_item"] = item return container diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 3158f29d93..947655b4c8 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -215,55 +215,3 @@ class LoadEffects(load.LoaderPlugin): self.log.debug("_ data_imprint: {}".format(data_imprint)) self.set_track_openpype_tag(track, data_imprint) - - def set_track_openpype_tag(self, track, data=None): - """ - Set pype track item tag to input track_item. - - Attributes: - trackItem (hiero.core.TrackItem): hiero object - - Returns: - hiero.core.Tag - """ - data = data or {} - - # basic Tag's attribute - tag_data = { - "editable": "0", - "note": "OpenPype data container", - "icon": "openpype_icon.png", - "metadata": dict(data.items()) - } - # get available pype tag if any - _tag = self.get_track_openpype_tag(track) - - if _tag: - # it not tag then create one - tag = tags.update_tag(_tag, tag_data) - else: - # if pype tag available then update with input data - tag = tags.create_tag(phiero.pype_tag_name, tag_data) - # add it to the input track item - track.addTag(tag) - - return tag - - def get_track_openpype_tag(self, track): - """ - Get pype track item tag created by creator or loader plugin. - - Attributes: - trackItem (hiero.core.TrackItem): hiero object - - Returns: - hiero.core.Tag: hierarchy, orig clip attributes - """ - # get all tags from track item - _tags = track.tags() - if not _tags: - return None - for tag in _tags: - # return only correct tag defined by global name - if tag.name() == phiero.pype_tag_name: - return tag From dcf4688e1c8802510e04ac95f74c0968500a8c52 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:16:47 +0100 Subject: [PATCH 1872/2550] hiero: renaming functions, with backward compatibility --- openpype/hosts/hiero/api/__init__.py | 12 ++-- openpype/hosts/hiero/api/lib.py | 69 +++++++++++-------- openpype/hosts/hiero/api/pipeline.py | 10 +-- .../plugins/publish/precollect_instances.py | 2 +- 4 files changed, 54 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index 781f846bbe..d0fb24b654 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,9 +30,9 @@ from .lib import ( get_timeline_selection, get_current_track, get_track_item_tags, - get_track_item_pype_tag, - set_track_item_pype_tag, - get_track_item_pype_data, + get_trackitem_openpype_tag, + set_trackitem_openpype_tag, + get_trackitem_openpype_data, set_publish_attribute, get_publish_attribute, imprint, @@ -85,9 +85,9 @@ __all__ = [ "get_timeline_selection", "get_current_track", "get_track_item_tags", - "get_track_item_pype_tag", - "set_track_item_pype_tag", - "get_track_item_pype_data", + "get_trackitem_openpype_tag", + "set_trackitem_openpype_tag", + "get_trackitem_openpype_data", "set_publish_attribute", "get_publish_attribute", "imprint", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 9e626270f8..b0da4ce7b3 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -301,26 +301,6 @@ def get_track_item_tags(track_item): return returning_tag_data -def get_track_item_pype_tag(track_item): - """ - Get pype track item tag created by creator or loader plugin. - - Attributes: - trackItem (hiero.core.TrackItem): hiero object - - Returns: - hiero.core.Tag: hierarchy, orig clip attributes - """ - # get all tags from track item - _tags = track_item.tags() - if not _tags: - return None - for tag in _tags: - # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: - return tag - - def set_track_openpype_tag(track, data=None): """ Set openpype track tag to input track object. @@ -375,6 +355,41 @@ def get_track_openpype_tag(track): return tag +def get_track_item_pype_tag(track_item): + # backward compatibility alias + return get_trackitem_openpype_tag(track_item) + + +def set_track_item_pype_tag(track_item, data=None): + # backward compatibility alias + return set_trackitem_openpype_tag(track_item, data) + + +def get_track_item_pype_data(track_item): + # backward compatibility alias + return get_trackitem_openpype_data(track_item) + + +def get_trackitem_openpype_tag(track_item): + """ + Get pype track item tag created by creator or loader plugin. + + Attributes: + trackItem (hiero.core.TrackItem): hiero object + + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track_item.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if tag.name() == self.pype_tag_name: + return tag + + def set_trackitem_openpype_tag(track_item, data=None): """ Set openpype track tag to input track object. @@ -395,7 +410,7 @@ def set_trackitem_openpype_tag(track_item, data=None): "metadata": dict(data.items()) } # get available pype tag if any - _tag = get_track_item_pype_tag(track_item) + _tag = get_trackitem_openpype_tag(track_item) if _tag: # it not tag then create one @@ -409,7 +424,7 @@ def set_trackitem_openpype_tag(track_item, data=None): return tag -def get_track_item_pype_data(track_item): +def get_trackitem_openpype_data(track_item): """ Get track item's pype tag data. @@ -421,7 +436,7 @@ def get_track_item_pype_data(track_item): """ data = {} # get pype data tag from track item - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) if not tag: return None @@ -474,7 +489,7 @@ def imprint(track_item, data=None): """ data = data or {} - tag = set_track_item_pype_tag(track_item, data) + tag = set_trackitem_openpype_tag(track_item, data) # add publish attribute set_publish_attribute(tag, True) @@ -1084,7 +1099,7 @@ def sync_clip_name_to_data_asset(track_items_list): # get name and data ti_name = track_item.name() - data = get_track_item_pype_data(track_item) + data = get_trackitem_openpype_data(track_item) # ignore if no data on the clip or not publish instance if not data: @@ -1096,10 +1111,10 @@ def sync_clip_name_to_data_asset(track_items_list): if data["asset"] != ti_name: data["asset"] = ti_name # remove the original tag - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) track_item.removeTag(tag) # create new tag with updated data - set_track_item_pype_tag(track_item, data) + set_trackitem_openpype_tag(track_item, data) print("asset was changed in clip: {}".format(ti_name)) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 1b78159e04..0c11f7072f 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -107,7 +107,7 @@ def containerise(track_item, data_imprint.update({k: v}) log.debug("_ data_imprint: {}".format(data_imprint)) - lib.set_track_item_pype_tag(track_item, data_imprint) + lib.set_trackitem_openpype_tag(track_item, data_imprint) return track_item @@ -192,7 +192,7 @@ def update_container(track_item, data=None): """ data = data or dict() - container = lib.get_track_item_pype_data(track_item) + container = lib.get_trackitem_openpype_data(track_item) for _key, _value in container.items(): try: @@ -201,7 +201,7 @@ def update_container(track_item, data=None): pass log.info("Updating container: `{}`".format(track_item.name())) - return bool(lib.set_track_item_pype_tag(track_item, container)) + return bool(lib.set_trackitem_openpype_tag(track_item, container)) def launch_workfiles_app(*args): @@ -278,11 +278,11 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): instance, old_value, new_value)) from openpype.hosts.hiero.api import ( - get_track_item_pype_tag, + get_trackitem_openpype_tag, set_publish_attribute ) # Whether instances should be passthrough based on new value track_item = instance.data["item"] - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) set_publish_attribute(tag, new_value) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 1fc4b1f696..bb02919b35 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -48,7 +48,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): self.log.debug("clip_name: {}".format(clip_name)) # get openpype tag data - tag_data = phiero.get_track_item_pype_data(track_item) + tag_data = phiero.get_trackitem_openpype_data(track_item) self.log.debug("__ tag_data: {}".format(pformat(tag_data))) if not tag_data: From a9ab5baac9903c5a307d737201b12e14ecdbbf85 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:26:48 +0100 Subject: [PATCH 1873/2550] hiero: improving bckw compatibility after rename --- openpype/hosts/hiero/api/__init__.py | 7 +++ openpype/hosts/hiero/api/lib.py | 75 +++++++++++++++++++++------- 2 files changed, 65 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index d0fb24b654..f457d791f5 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,6 +30,9 @@ from .lib import ( get_timeline_selection, get_current_track, get_track_item_tags, + get_track_item_pype_tag, + set_track_item_pype_tag, + get_track_item_pype_data, get_trackitem_openpype_tag, set_trackitem_openpype_tag, get_trackitem_openpype_data, @@ -99,6 +102,10 @@ __all__ = [ "apply_colorspace_project", "apply_colorspace_clips", "get_sequence_pattern_and_padding", + # depricated + "get_track_item_pype_tag", + "set_track_item_pype_tag", + "get_track_item_pype_data", # plugins "CreatorWidget", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index b0da4ce7b3..f4b80aea4e 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -7,11 +7,13 @@ import os import re import sys import platform +import functools +import warnings import ast import shutil import hiero -from Qt import QtWidgets +from Qt import QtWidgets, QtCore, QtXml from openpype.client import get_project from openpype.settings import get_project_settings @@ -20,15 +22,51 @@ from openpype.pipeline.load import filter_containers from openpype.lib import Logger from . import tags -try: - from PySide.QtCore import QFile, QTextStream - from PySide.QtXml import QDomDocument -except ImportError: - from PySide2.QtCore import QFile, QTextStream - from PySide2.QtXml import QDomDocument -# from opentimelineio import opentime -# from pprint import pformat +class DeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + log = Logger.get_logger(__name__) @@ -355,16 +393,19 @@ def get_track_openpype_tag(track): return tag +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") def get_track_item_pype_tag(track_item): # backward compatibility alias return get_trackitem_openpype_tag(track_item) +@deprecated("openpype.hosts.hiero.api.lib.set_trackitem_openpype_tag") def set_track_item_pype_tag(track_item, data=None): # backward compatibility alias return set_trackitem_openpype_tag(track_item, data) +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_data") def get_track_item_pype_data(track_item): # backward compatibility alias return get_trackitem_openpype_data(track_item) @@ -901,22 +942,22 @@ def set_selected_track_items(track_items_list, sequence=None): def _read_doc_from_path(path): - # reading QDomDocument from HROX path - hrox_file = QFile(path) - if not hrox_file.open(QFile.ReadOnly): + # reading QtXml.QDomDocument from HROX path + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.ReadOnly): raise RuntimeError("Failed to open file for reading") - doc = QDomDocument() + doc = QtXml.QDomDocument() doc.setContent(hrox_file) hrox_file.close() return doc def _write_doc_to_path(doc, path): - # write QDomDocument to path as HROX - hrox_file = QFile(path) - if not hrox_file.open(QFile.WriteOnly): + # write QtXml.QDomDocument to path as HROX + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.WriteOnly): raise RuntimeError("Failed to open file for writing") - stream = QTextStream(hrox_file) + stream = QtCore.QTextStream(hrox_file) doc.save(stream, 1) hrox_file.close() From 04d1016dfa71d5630e7b920371d3b4ea42e2fcff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:43:34 +0100 Subject: [PATCH 1874/2550] hiero: update api --- openpype/hosts/hiero/api/__init__.py | 6 ++++ openpype/hosts/hiero/api/lib.py | 51 ++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index f457d791f5..1fa40c9f74 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,6 +30,9 @@ from .lib import ( get_timeline_selection, get_current_track, get_track_item_tags, + get_track_openpype_tag, + set_track_openpype_tag, + get_track_openpype_data, get_track_item_pype_tag, set_track_item_pype_tag, get_track_item_pype_data, @@ -88,6 +91,9 @@ __all__ = [ "get_timeline_selection", "get_current_track", "get_track_item_tags", + "get_track_openpype_tag", + "set_track_openpype_tag", + "get_track_openpype_data", "get_trackitem_openpype_tag", "set_trackitem_openpype_tag", "get_trackitem_openpype_data", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index f4b80aea4e..3c1d500e46 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -393,6 +393,57 @@ def get_track_openpype_tag(track): return tag +def get_track_openpype_data(track): + """ + Get track's openpype tag data. + + Attributes: + trackItem (hiero.core.VideoTrack): hiero object + + Returns: + dict: data found on pype tag + """ + return_data = {} + # get pype data tag from track item + tag = get_track_openpype_tag(track) + + if not tag: + return None + + # get tag metadata attribute + tag_data = deepcopy(dict(tag.metadata())) + + for obj_name, obj_data in tag_data.items(): + return_data[obj_name] = {} + + # convert tag metadata to normal keys names and values to correct types + for k, v in obj_data.items(): + + key = k.replace("tag.", "") + + try: + # capture exceptions which are related to strings only + if re.match(r"^[\d]+$", v): + value = int(v) + elif re.match(r"^True$", v): + value = True + elif re.match(r"^False$", v): + value = False + elif re.match(r"^None$", v): + value = None + elif re.match(r"^[\w\d_]+$", v): + value = v + else: + value = ast.literal_eval(v) + except (ValueError, SyntaxError) as msg: + log.warning(msg) + value = v + + return_data[obj_name][key] = value + + return return_data + + @deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") def get_track_item_pype_tag(track_item): # backward compatibility alias From 00c2ac36c5c90181db330fba8f10ca6b094c96db Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 11:50:11 +0100 Subject: [PATCH 1875/2550] Fix enable state of "no registered families" item --- openpype/tools/creator/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/creator/model.py b/openpype/tools/creator/model.py index d3d60b96f2..307993103b 100644 --- a/openpype/tools/creator/model.py +++ b/openpype/tools/creator/model.py @@ -36,7 +36,7 @@ class CreatorsModel(QtGui.QStandardItemModel): if not items: item = QtGui.QStandardItem("No registered families") item.setEnabled(False) - item.setData(QtCore.Qt.ItemIsEnabled, False) + item.setData(False, QtCore.Qt.ItemIsEnabled) items.append(item) self.invisibleRootItem().appendRows(items) From 66571cc8cded1b6329f839cd9425da2631531a67 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 12:16:08 +0100 Subject: [PATCH 1876/2550] hiero: update parse_container and ls to new functionality accepting track containers --- openpype/hosts/hiero/api/pipeline.py | 85 +++++++++++++++++----------- 1 file changed, 51 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 0c11f7072f..1ce8e4e1c5 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -124,11 +124,20 @@ def ls(): """ # get all track items from current timeline - all_track_items = lib.get_track_items() + all_items = lib.get_track_items() - for track_item in all_track_items: - container = parse_container(track_item) - if container: + # append all video tracks + for track in lib.get_current_sequence(): + if type(track) != hiero.core.VideoTrack: + continue + all_items.append(track) + + for item in all_items: + container = parse_container(item) + if isinstance(container, list): + for _c in container: + yield _c + elif container: yield container @@ -144,39 +153,47 @@ def parse_container(item, validate=True): dict: The container schema data for input containerized track item. """ + def data_to_container(item, data): + if ( + not data + or data.get("id") != "pyblish.avalon.container" + ): + return + + if validate and data and data.get("schema"): + schema.validate(data) + + if not isinstance(data, dict): + return + + # If not all required data return the empty container + required = ['schema', 'id', 'name', + 'namespace', 'loader', 'representation'] + + if any(key not in data for key in required): + return + + container = {key: data[key] for key in required} + + container["objectName"] = item.name() + + # Store reference to the node object + container["_item"] = item + + return container + # convert tag metadata to normal keys names if type(item) == hiero.core.VideoTrack: - data = lib.set_track_openpype_data(item) + return_list = [] + _data = lib.get_track_openpype_data(item) + # convert the data to list and validate them + for _, obj_data in _data.items(): + cotnainer = data_to_container(item, obj_data) + return_list.append(cotnainer) + return return_list else: - data = lib.set_track_item_pype_data(item) - - if ( - not data - or data.get("id") != "pyblish.avalon.container" - ): - return - - if validate and data and data.get("schema"): - schema.validate(data) - - if not isinstance(data, dict): - return - - # If not all required data return the empty container - required = ['schema', 'id', 'name', - 'namespace', 'loader', 'representation'] - - if any(key not in data for key in required): - return - - container = {key: data[key] for key in required} - - container["objectName"] = item.name() - - # Store reference to the node object - container["_item"] = item - - return container + _data = lib.get_track_item_pype_data(item) + return data_to_container(item, _data) def update_container(track_item, data=None): From 25b61d3fdf657f38db35c741456d680cb1c24b59 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 12:17:34 +0100 Subject: [PATCH 1877/2550] hiero: refactor plugin to new abstracted functionality --- openpype/hosts/hiero/plugins/load/load_effects.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 947655b4c8..fa78684838 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -8,7 +8,6 @@ from openpype.pipeline import ( load ) from openpype.hosts.hiero import api as phiero -from openpype.hosts.hiero.api import tags class LoadEffects(load.LoaderPlugin): @@ -53,6 +52,7 @@ class LoadEffects(load.LoaderPlugin): "source": version_data["source"], "version": vname, "author": version_data["author"], + "children_names": [] } # getting file path @@ -95,6 +95,8 @@ class LoadEffects(load.LoaderPlugin): continue node[knob_name].setValue(knob_value) + # register all loaded children + data_imprint["children_names"].append(new_name) # make sure containerisation will happen loaded = True @@ -187,11 +189,13 @@ class LoadEffects(load.LoaderPlugin): for loaded assets. Arguments: - track_item (hiero.core.TrackItem): object to imprint as container + track (hiero.core.VideoTrack): object to imprint as container name (str): Name of resulting assembly namespace (str): Namespace under which to host container + object_name (str): name of container context (dict): Asset information - loader (str, optional): Name of node used to produce this container. + loader (str, optional): Name of node used to produce this + container. Returns: track_item (hiero.core.TrackItem): containerised object @@ -214,4 +218,4 @@ class LoadEffects(load.LoaderPlugin): data_imprint[object_name].update({k: v}) self.log.debug("_ data_imprint: {}".format(data_imprint)) - self.set_track_openpype_tag(track, data_imprint) + phiero.set_track_openpype_tag(track, data_imprint) From 3d55d4d9554c5b844da7697e7f1efea6f3ffa303 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 31 Oct 2022 19:36:44 +0800 Subject: [PATCH 1878/2550] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 115 ++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 openpype/hosts/maya/plugins/load/load_abc_to_standin.py diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py new file mode 100644 index 0000000000..defed4bd73 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -0,0 +1,115 @@ +import os +import clique + +from openpype.pipeline import ( + load, + get_representation_path +) +from openpype.settings import get_project_settings + + +class AlembicStandinLoader(load.LoaderPlugin): + """Load Alembic as Arnold Standin""" + + families = ["model", "pointcache"] + representations = ["abc"] + + label = "Import Alembic as Standin" + order = -5 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, options): + + import maya.cmds as cmds + import pymel.core as pm + import mtoa.ui.arnoldmenu + from openpype.hosts.maya.api.pipeline import containerise + from openpype.hosts.maya.api.lib import unique_namespace + + version = context["version"] + version_data = version.get("data", {}) + + self.log.info("version_data: {}\n".format(version_data)) + + frameStart = version_data.get("frameStart", None) + + asset = context["asset"]["name"] + namespace = namespace or unique_namespace( + asset + "_", + prefix="_" if asset[0].isdigit() else "", + suffix="_", + ) + + #Root group + label = "{}:{}".format(namespace, name) + root = pm.group(name=label, empty=True) + + settings = get_project_settings(os.environ['AVALON_PROJECT']) + colors = settings["maya"]["load"]["colors"] + + c = colors.get('ass') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) + + transform_name = label + "_ABC" + + standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn()) + standin = standinShape.getParent() + standin.rename(transform_name) + + pm.parent(standin, root) + + # Set the standin filepath + standinShape.dso.set(self.fname) + if frameStart is not None: + standinShape.useFrameExtension.set(1) + + nodes = [root, standin] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, representation): + + import pymel.core as pm + + path = get_representation_path(representation) + + # Update the standin + standins = list() + members = pm.sets(container['objectName'], query=True) + for member in members: + shape = member.getShape() + if (shape and shape.type() == "aiStandIn"): + standins.append(shape) + + for standin in standins: + standin.dso.set(path) + standin.useFrameExtension.set(1) + + container = pm.PyNode(container["objectName"]) + container.representation.set(str(representation["_id"])) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + import maya.cmds as cmds + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass From 7f88049d2a38e46fb933cbf23859529d46976915 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 31 Oct 2022 19:56:10 +0800 Subject: [PATCH 1879/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index defed4bd73..f39aa56650 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -1,5 +1,4 @@ import os -import clique from openpype.pipeline import ( load, @@ -41,7 +40,7 @@ class AlembicStandinLoader(load.LoaderPlugin): suffix="_", ) - #Root group + # Root group label = "{}:{}".format(namespace, name) root = pm.group(name=label, empty=True) From ac2f268575327c2e82d1cc9ca0a231caf54ea322 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Mon, 31 Oct 2022 13:28:50 +0100 Subject: [PATCH 1880/2550] Feature: Auto download last published workfile as first workfile --- .../hooks/pre_copy_last_published_workfile.py | 124 ++++++++++++++++++ openpype/modules/sync_server/sync_server.py | 9 +- 2 files changed, 132 insertions(+), 1 deletion(-) create mode 100644 openpype/hooks/pre_copy_last_published_workfile.py diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py new file mode 100644 index 0000000000..004f9d25e7 --- /dev/null +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -0,0 +1,124 @@ +import gc +import os +import shutil +from openpype.client.entities import ( + get_last_version_by_subset_id, + get_representations, + get_subsets, +) +from openpype.lib import PreLaunchHook +from openpype.modules.base import ModulesManager +from openpype.pipeline.load.utils import get_representation_path + + +class CopyLastPublishedWorkfile(PreLaunchHook): + """Copy last published workfile as first workfile. + + Prelaunch hook works only if last workfile leads to not existing file. + - That is possible only if it's first version. + """ + + # Before `AddLastWorkfileToLaunchArgs` + order = -1 + app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"] + + def execute(self): + """Check if local workfile doesn't exist, else copy it. + + 1- Check if setting for this feature is enabled + 2- Check if workfile in work area doesn't exist + 3- Check if published workfile exists and is copied locally in publish + + Returns: + None: This is a void method. + """ + # TODO setting + self.log.info("Trying to fetch last published workfile...") + + last_workfile = self.data.get("last_workfile_path") + if os.path.exists(last_workfile): + self.log.debug( + "Last workfile exists. Skipping {} process.".format( + self.__class__.__name__ + ) + ) + return + + project_name = self.data["project_name"] + task_name = self.data["task_name"] + + project_doc = self.data.get("project_doc") + asset_doc = self.data.get("asset_doc") + anatomy = self.data.get("anatomy") + if project_doc and asset_doc: + # Get subset id + subset_id = next( + ( + subset["_id"] + for subset in get_subsets( + project_name, + asset_ids=[asset_doc["_id"]], + fields=["_id", "data.family"], + ) + if subset["data"]["family"] == "workfile" + ), + None, + ) + if not subset_id: + return + + # Get workfile representation + workfile_representation = next( + ( + representation + for representation in get_representations( + project_name, + version_ids=[ + get_last_version_by_subset_id( + project_name, subset_id, fields=["_id"] + )["_id"] + ], + ) + if representation["context"]["task"]["name"] == task_name + ), + None, + ) + + if workfile_representation: # TODO add setting + # Get sync server from Tray, which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) + + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() + + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() + + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) + + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 8b11055e65..def9e6cfd8 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -236,7 +236,11 @@ class SyncServerThread(threading.Thread): """ def __init__(self, module): self.log = Logger.get_logger(self.__class__.__name__) - super(SyncServerThread, self).__init__() + + # Event to trigger files have been processed + self.files_processed = threading.Event() + + super(SyncServerThread, self).__init__(args=(self.files_processed,)) self.module = module self.loop = None self.is_running = False @@ -396,6 +400,8 @@ class SyncServerThread(threading.Thread): representation, site, error) + # Trigger files are processed + self.files_processed.set() duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -454,6 +460,7 @@ class SyncServerThread(threading.Thread): async def run_timer(self, delay): """Wait for 'delay' seconds to start next loop""" + self.files_processed.clear() await asyncio.sleep(delay) def reset_timer(self): From 7647176173d3e4d28290650761370efeede53505 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 13:37:07 +0100 Subject: [PATCH 1881/2550] hide thumbnail widget if drop is disabled --- .../tools/publisher/widgets/create_widget.py | 2 +- .../publisher/widgets/thumbnail_widget.py | 62 ------------------- openpype/tools/publisher/widgets/widgets.py | 4 +- 3 files changed, 3 insertions(+), 65 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index a57a8791a8..4540e70eb8 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -558,7 +558,7 @@ class CreateWidget(QtWidgets.QWidget): self._set_context_enabled(creator_item.create_allow_context_change) self._refresh_asset() - self._thumbnail_widget.set_drop_enabled( + self._thumbnail_widget.setVisible( creator_item.create_allow_thumbnail ) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 48f40f7b5b..53152f488f 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -55,8 +55,6 @@ class ThumbnailWidget(QtWidgets.QWidget): self.overlay_color = overlay_color self._default_pix = default_pix - self._drop_enabled = True - self._current_pixes = None self._cached_pix = None @@ -87,10 +85,6 @@ class ThumbnailWidget(QtWidgets.QWidget): return None def dragEnterEvent(self, event): - if not self._drop_enabled: - event.ignore() - return - filepath = self._get_filepath_from_event(event) if filepath: event.setDropAction(QtCore.Qt.CopyAction) @@ -100,9 +94,6 @@ class ThumbnailWidget(QtWidgets.QWidget): event.accept() def dropEvent(self, event): - if not self._drop_enabled: - return - filepath = self._get_filepath_from_event(event) if not filepath: return @@ -116,13 +107,6 @@ class ThumbnailWidget(QtWidgets.QWidget): CardMessageTypes.error ) - def set_drop_enabled(self, enabled): - if self._drop_enabled is enabled: - return - self._drop_enabled = enabled - self._cached_pix = None - self.repaint() - def set_adapted_to_hint(self, enabled): self._adapted_to_size = enabled if self._width is not None: @@ -172,10 +156,6 @@ class ThumbnailWidget(QtWidgets.QWidget): self.repaint() def _get_current_pixes(self): - if not self._drop_enabled: - # TODO different image for disabled drop - return [self._default_pix] - if self._current_pixes is None: return [self._default_pix] return self._current_pixes @@ -273,53 +253,11 @@ class ThumbnailWidget(QtWidgets.QWidget): y_offset = (height_offset_part * idx) + pix_y_offset final_painter.drawPixmap(x_offset, y_offset, pix) - if not self._drop_enabled: - overlay = self._get_drop_disabled_overlay(rect_width, rect_height) - final_painter.drawPixmap(0, 0, overlay) final_painter.end() self._cached_pix = final_pix - def _get_drop_disabled_overlay(self, width, height): - min_size = min(width, height) - circle_size = int(min_size * 0.8) - pen_width = int(circle_size * 0.1) - if pen_width < 1: - pen_width = 1 - - x_offset = int((width - circle_size) / 2) - y_offset = int((height - circle_size) / 2) - half_size = int(circle_size / 2) - angle = math.radians(45) - line_offset_p = QtCore.QPoint( - half_size * math.cos(angle), - half_size * math.sin(angle) - ) - overlay_pix = QtGui.QPixmap(width, height) - overlay_pix.fill(QtCore.Qt.transparent) - - painter = QtGui.QPainter() - painter.begin(overlay_pix) - painter.setRenderHints( - painter.Antialiasing - | painter.SmoothPixmapTransform - | painter.HighQualityAntialiasing - ) - painter.setBrush(QtCore.Qt.transparent) - pen = QtGui.QPen(self.overlay_color) - pen.setWidth(pen_width) - painter.setPen(pen) - rect = QtCore.QRect(x_offset, y_offset, circle_size, circle_size) - painter.drawEllipse(rect) - painter.drawLine( - rect.center() - line_offset_p, - rect.center() + line_offset_p - ) - painter.end() - - return overlay_pix - def _get_pix_offset_size(self, width, height, image_count): if image_count == 1: return 0, 0 diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index fb9bd761f4..9af9595a97 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1674,7 +1674,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): instance_ids.append(None) if not instance_ids: - self._thumbnail_widget.set_drop_enabled(False) + self._thumbnail_widget.setVisible(False) self._thumbnail_widget.set_current_thumbnails(None) return @@ -1687,5 +1687,5 @@ class SubsetAttributesWidget(QtWidgets.QWidget): if path: thumbnail_paths.append(path) - self._thumbnail_widget.set_drop_enabled(True) + self._thumbnail_widget.setVisible(True) self._thumbnail_widget.set_current_thumbnails(thumbnail_paths) From 0a7c20398cc985f050a8399fb6cced87c98f3b78 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 13:41:13 +0100 Subject: [PATCH 1882/2550] draw dashes if user can drop thumbnails --- .../publisher/widgets/thumbnail_widget.py | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 53152f488f..808210a673 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -155,11 +155,6 @@ class ThumbnailWidget(QtWidgets.QWidget): self._cached_pix = None self.repaint() - def _get_current_pixes(self): - if self._current_pixes is None: - return [self._default_pix] - return self._current_pixes - def _cache_pix(self): rect = self.rect() rect_width = rect.width() @@ -180,7 +175,13 @@ class ThumbnailWidget(QtWidgets.QWidget): expected_width = rect_width pix_y_offset = (rect_height - expected_height) / 2 - pixes_to_draw = self._get_current_pixes() + if self._current_pixes is None: + draw_dashes = True + pixes_to_draw = [self._default_pix] + else: + draw_dashes = False + pixes_to_draw = self._current_pixes + max_pix = 3 if len(pixes_to_draw) > max_pix: pixes_to_draw = pixes_to_draw[:-max_pix] @@ -253,6 +254,15 @@ class ThumbnailWidget(QtWidgets.QWidget): y_offset = (height_offset_part * idx) + pix_y_offset final_painter.drawPixmap(x_offset, y_offset, pix) + # Draw drop enabled dashes + if draw_dashes: + pen = QtGui.QPen() + pen.setWidth(1) + pen.setBrush(QtCore.Qt.darkGray) + pen.setStyle(QtCore.Qt.DashLine) + final_painter.setPen(pen) + final_painter.setBrush(QtCore.Qt.transparent) + final_painter.drawRect(rect) final_painter.end() From 1c604ee1be458d4b65bbc41999396902572d9afc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 13:41:39 +0100 Subject: [PATCH 1883/2550] define max thumbnails in class variable --- openpype/tools/publisher/widgets/thumbnail_widget.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 808210a673..e119d640b4 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -33,6 +33,7 @@ class ThumbnailWidget(QtWidgets.QWidget): height_ratio = 2.0 border_width = 1 offset_sep = 4 + max_thumbnails = 3 def __init__(self, controller, parent): # Missing implementation for thumbnail @@ -182,9 +183,8 @@ class ThumbnailWidget(QtWidgets.QWidget): draw_dashes = False pixes_to_draw = self._current_pixes - max_pix = 3 - if len(pixes_to_draw) > max_pix: - pixes_to_draw = pixes_to_draw[:-max_pix] + if len(pixes_to_draw) > self.max_thumbnails: + pixes_to_draw = pixes_to_draw[:-self.max_thumbnails] pixes_len = len(pixes_to_draw) width_offset, height_offset = self._get_pix_offset_size( From b12bb8723040d3c76cff92f6fd1c7b1bef9a5549 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 13:57:03 +0100 Subject: [PATCH 1884/2550] hiero: refactor update container function --- openpype/hosts/hiero/api/pipeline.py | 41 ++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 1ce8e4e1c5..1e4158261c 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -196,29 +196,46 @@ def parse_container(item, validate=True): return data_to_container(item, _data) -def update_container(track_item, data=None): - """Update container data to input track_item's pype tag. +def update_container(item, data=None): + """Update container data to input track_item or track's + openpype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. data (dict)[optional]: dictionery with data to be updated Returns: bool: True if container was updated correctly """ - data = data or dict() + def update_container_data(container, data): + for key in container: + try: + container[key] = data[key] + except KeyError: + pass + return container - container = lib.get_trackitem_openpype_data(track_item) + data = data or {} - for _key, _value in container.items(): - try: - container[_key] = data[_key] - except KeyError: - pass + if type(item) == hiero.core.VideoTrack: + object_name = "{}_{}".format( + data["name"], data["namespace"]) + containers = lib.get_track_openpype_data(item) + for obj_name, container in containers.items(): + if object_name != obj_name: + continue + updated_container = update_container_data(container, data) + containers.update(updated_container) - log.info("Updating container: `{}`".format(track_item.name())) - return bool(lib.set_trackitem_openpype_tag(track_item, container)) + return bool(lib.set_track_openpype_tag(item, containers)) + else: + container = lib.get_trackitem_openpype_data(item) + updated_container = update_container_data(container, data) + + log.info("Updating container: `{}`".format(item.name())) + return bool(lib.set_trackitem_openpype_tag(item, updated_container)) def launch_workfiles_app(*args): From 4ab8fd1a822d6c2e9f60d3eb4933eee61e381208 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 14:01:44 +0100 Subject: [PATCH 1885/2550] hiero: updating doc strings --- openpype/hosts/hiero/api/pipeline.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 1e4158261c..e9e16ef5b1 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -220,13 +220,19 @@ def update_container(item, data=None): data = data or {} if type(item) == hiero.core.VideoTrack: + # form object data for test object_name = "{}_{}".format( data["name"], data["namespace"]) + + # get all available containers containers = lib.get_track_openpype_data(item) for obj_name, container in containers.items(): + # ignore all which are not the same object if object_name != obj_name: continue + # update data in container updated_container = update_container_data(container, data) + # merge updated container back to containers containers.update(updated_container) return bool(lib.set_track_openpype_tag(item, containers)) From 72e729f59f5276ecb752fc59d01921b7815656de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 15:07:57 +0100 Subject: [PATCH 1886/2550] fix mapping on multiselection --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 9af9595a97..c7b6965991 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1653,7 +1653,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): mapping[instance_ids[0]] = path else: - for instance_id in range(len(instance_ids)): + for instance_id in instance_ids: root = os.path.dirname(path) ext = os.path.splitext(path)[-1] dst_path = os.path.join(root, str(uuid.uuid4()) + ext) From c9d255ce59719ed2282f6528c1bd32c8bbbc5df2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 15:18:07 +0100 Subject: [PATCH 1887/2550] separated thumbnail painter widget and thumbnail widget to be able handle buttons overlay --- .../tools/publisher/widgets/create_widget.py | 4 + .../publisher/widgets/thumbnail_widget.py | 288 +++++++++++------- openpype/tools/publisher/widgets/widgets.py | 18 ++ 3 files changed, 198 insertions(+), 112 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index 4540e70eb8..7bdac46273 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -272,6 +272,7 @@ class CreateWidget(QtWidgets.QWidget): ) tasks_widget.task_changed.connect(self._on_task_change) thumbnail_widget.thumbnail_created.connect(self._on_thumbnail_create) + thumbnail_widget.thumbnail_cleared.connect(self._on_thumbnail_clear) controller.event_system.add_callback( "plugins.refresh.finished", self._on_plugins_refresh @@ -504,6 +505,9 @@ class CreateWidget(QtWidgets.QWidget): self._last_thumbnail_path = thumbnail_path self._thumbnail_widget.set_current_thumbnails([thumbnail_path]) + def _on_thumbnail_clear(self): + self._last_thumbnail_path = None + def _on_current_session_context_request(self): self._assets_widget.set_current_session_asset() task_name = self.current_task_name diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index e119d640b4..b45d61623e 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -1,6 +1,5 @@ import os import uuid -import math from Qt import QtWidgets, QtCore, QtGui @@ -24,22 +23,15 @@ from openpype.tools.publisher.control import CardMessageTypes from .icons import get_image -class ThumbnailWidget(QtWidgets.QWidget): - """Instance thumbnail widget.""" - - thumbnail_created = QtCore.Signal(str) - +class ThumbnailPainterWidget(QtWidgets.QWidget): width_ratio = 3.0 height_ratio = 2.0 border_width = 1 - offset_sep = 4 max_thumbnails = 3 + offset_sep = 4 - def __init__(self, controller, parent): - # Missing implementation for thumbnail - # - widget kept to make a visial offset of global attr widget offset - super(ThumbnailWidget, self).__init__(parent) - self.setAcceptDrops(True) + def __init__(self, parent): + super(ThumbnailPainterWidget, self).__init__(parent) border_color = get_objected_colors("bg-buttons").get_qcolor() thumbnail_bg_color = get_objected_colors("border").get_qcolor() @@ -48,103 +40,22 @@ class ThumbnailWidget(QtWidgets.QWidget): default_image = get_image("thumbnail") default_pix = paint_image_with_color(default_image, border_color) - self._controller = controller - self._output_dir = controller.get_thumbnail_temp_dir_path() - self.border_color = border_color self.thumbnail_bg_color = thumbnail_bg_color self.overlay_color = overlay_color self._default_pix = default_pix + self._cached_pix = None self._current_pixes = None + self._has_pixes = False + + @property + def has_pixes(self): + return self._has_pixes + + def clear_cache(self): self._cached_pix = None - - self._review_extensions = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) - - self._height = None - self._width = None - self._adapted_to_size = True - self._last_width = None - self._last_height = None - - def _get_filepath_from_event(self, event): - mime_data = event.mimeData() - if not mime_data.hasUrls(): - return None - - filepaths = [] - for url in mime_data.urls(): - filepath = url.toLocalFile() - if os.path.exists(filepath): - filepaths.append(filepath) - - if len(filepaths) == 1: - filepath = filepaths[0] - ext = os.path.splitext(filepath)[-1] - if ext in self._review_extensions: - return filepath - return None - - def dragEnterEvent(self, event): - filepath = self._get_filepath_from_event(event) - if filepath: - event.setDropAction(QtCore.Qt.CopyAction) - event.accept() - - def dragLeaveEvent(self, event): - event.accept() - - def dropEvent(self, event): - filepath = self._get_filepath_from_event(event) - if not filepath: - return - - output = export_thumbnail(filepath, self._output_dir) - if output: - self.thumbnail_created.emit(output) - else: - self._controller.emit_card_message( - "Couldn't convert the source for thumbnail", - CardMessageTypes.error - ) - - def set_adapted_to_hint(self, enabled): - self._adapted_to_size = enabled - if self._width is not None: - self.setMinimumHeight(0) - self._width = None - - if self._height is not None: - self.setMinimumWidth(0) - self._height = None - - def set_width(self, width): - if self._width == width: - return - - self._adapted_to_size = False - self._width = width - self._cached_pix = None - self.setMinimumHeight(int( - (width / self.width_ratio) * self.height_ratio - )) - if self._height is not None: - self.setMinimumWidth(0) - self._height = None - - def set_height(self, height): - if self._height == height: - return - - self._height = height - self._adapted_to_size = False - self._cached_pix = None - self.setMinimumWidth(int( - (height / self.height_ratio) * self.width_ratio - )) - if self._width is not None: - self.setMinimumHeight(0) - self._width = None + self.repaint() def set_current_thumbnails(self, thumbnail_paths=None): pixes = [] @@ -153,8 +64,17 @@ class ThumbnailWidget(QtWidgets.QWidget): pixes.append(QtGui.QPixmap(thumbnail_path)) self._current_pixes = pixes or None - self._cached_pix = None - self.repaint() + self._has_pixes = self._current_pixes is not None + self.clear_cache() + + def paintEvent(self, event): + if self._cached_pix is None: + self._cache_pix() + + painter = QtGui.QPainter() + painter.begin(self) + painter.drawPixmap(0, 0, self._cached_pix) + painter.end() def _cache_pix(self): rect = self.rect() @@ -276,14 +196,146 @@ class ThumbnailWidget(QtWidgets.QWidget): part_height = height / self.offset_sep return part_width, part_height - def paintEvent(self, event): - if self._cached_pix is None: - self._cache_pix() - painter = QtGui.QPainter() - painter.begin(self) - painter.drawPixmap(0, 0, self._cached_pix) - painter.end() +class ThumbnailWidget(QtWidgets.QWidget): + """Instance thumbnail widget.""" + + thumbnail_created = QtCore.Signal(str) + thumbnail_cleared = QtCore.Signal() + + def __init__(self, controller, parent): + # Missing implementation for thumbnail + # - widget kept to make a visial offset of global attr widget offset + super(ThumbnailWidget, self).__init__(parent) + self.setAcceptDrops(True) + + thumbnail_painter = ThumbnailPainterWidget(self) + + buttons_widget = QtWidgets.QWidget(self) + buttons_widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) + + clear_button = QtWidgets.QPushButton("x", buttons_widget) + + buttons_layout = QtWidgets.QHBoxLayout(buttons_widget) + buttons_layout.setContentsMargins(3, 3, 3, 3) + buttons_layout.addStretch(1) + buttons_layout.addWidget(clear_button, 0) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(thumbnail_painter) + + clear_button.clicked.connect(self._on_clear_clicked) + + self._controller = controller + self._output_dir = controller.get_thumbnail_temp_dir_path() + + self._review_extensions = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) + + self._height = None + self._width = None + self._adapted_to_size = True + self._last_width = None + self._last_height = None + + self._buttons_widget = buttons_widget + self._thumbnail_painter = thumbnail_painter + + @property + def width_ratio(self): + return self._thumbnail_painter.width_ratio + + @property + def height_ratio(self): + return self._thumbnail_painter.height_ratio + + def _get_filepath_from_event(self, event): + mime_data = event.mimeData() + if not mime_data.hasUrls(): + return None + + filepaths = [] + for url in mime_data.urls(): + filepath = url.toLocalFile() + if os.path.exists(filepath): + filepaths.append(filepath) + + if len(filepaths) == 1: + filepath = filepaths[0] + ext = os.path.splitext(filepath)[-1] + if ext in self._review_extensions: + return filepath + return None + + def dragEnterEvent(self, event): + filepath = self._get_filepath_from_event(event) + if filepath: + event.setDropAction(QtCore.Qt.CopyAction) + event.accept() + + def dragLeaveEvent(self, event): + event.accept() + + def dropEvent(self, event): + filepath = self._get_filepath_from_event(event) + if not filepath: + return + + output = export_thumbnail(filepath, self._output_dir) + if output: + self.thumbnail_created.emit(output) + else: + self._controller.emit_card_message( + "Couldn't convert the source for thumbnail", + CardMessageTypes.error + ) + + def set_adapted_to_hint(self, enabled): + self._adapted_to_size = enabled + if self._width is not None: + self.setMinimumHeight(0) + self._width = None + + if self._height is not None: + self.setMinimumWidth(0) + self._height = None + + def set_width(self, width): + if self._width == width: + return + + self._adapted_to_size = False + self._width = width + self.setMinimumHeight(int( + (width / self.width_ratio) * self.height_ratio + )) + if self._height is not None: + self.setMinimumWidth(0) + self._height = None + self._thumbnail_painter.clear_cache() + + def set_height(self, height): + if self._height == height: + return + + self._height = height + self._adapted_to_size = False + self.setMinimumWidth(int( + (height / self.height_ratio) * self.width_ratio + )) + if self._width is not None: + self.setMinimumHeight(0) + self._width = None + + self._thumbnail_painter.clear_cache() + + def set_current_thumbnails(self, thumbnail_paths=None): + self._thumbnail_painter.set_current_thumbnails(thumbnail_paths) + self._update_buttons_position() + + def _on_clear_clicked(self): + self.set_current_thumbnails() + self.thumbnail_cleared.emit() def _adapt_to_size(self): if not self._adapted_to_size: @@ -296,15 +348,27 @@ class ThumbnailWidget(QtWidgets.QWidget): self._last_width = width self._last_height = height - self._cached_pix = None + self._thumbnail_painter.clear_cache() + + def _update_buttons_position(self): + self._buttons_widget.setVisible(self._thumbnail_painter.has_pixes) + size = self.size() + my_height = size.height() + height = self._buttons_widget.sizeHint().height() + self._buttons_widget.setGeometry( + 0, my_height - height, + size.width(), height + ) def resizeEvent(self, event): super(ThumbnailWidget, self).resizeEvent(event) self._adapt_to_size() + self._update_buttons_position() def showEvent(self, event): super(ThumbnailWidget, self).showEvent(event) self._adapt_to_size() + self._update_buttons_position() def _run_silent_subprocess(args): diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index c7b6965991..744c51ce07 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1569,6 +1569,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): ) convert_btn.clicked.connect(self._on_convert_click) thumbnail_widget.thumbnail_created.connect(self._on_thumbnail_create) + thumbnail_widget.thumbnail_cleared.connect(self._on_thumbnail_clear) controller.event_system.add_callback( "instance.thumbnail.changed", self._on_thumbnail_changed @@ -1662,6 +1663,23 @@ class SubsetAttributesWidget(QtWidgets.QWidget): self._controller.set_thumbnail_paths_for_instances(mapping) + def _on_thumbnail_clear(self): + instance_ids = [ + instance.id + for instance in self._current_instances + ] + if self._context_selected: + instance_ids.append(None) + + if not instance_ids: + return + + mapping = { + instance_id: None + for instance_id in instance_ids + } + self._controller.set_thumbnail_paths_for_instances(mapping) + def _on_thumbnail_changed(self, event): self._update_thumbnails() From 8935e221e63c0e34d882dff1fdad0024b4c345b8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 31 Oct 2022 23:36:31 +0800 Subject: [PATCH 1888/2550] remove underscore from subset name --- .../deadline/plugins/publish/submit_publish_job.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index aba505b3c6..35f2532c16 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -457,9 +457,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): cam = [c for c in cameras if c in col.head] if cam: - subset_name = '{}_{}_{}'.format(group_name, cam, aov) + if aov: + subset_name = '{}_{}_{}'.format(group_name, cam, aov) + else: + subset_name = '{}_{}'.format(group_name, cam) else: - subset_name = '{}_{}'.format(group_name, aov) + if aov: + subset_name = '{}_{}'.format(group_name, aov) + else: + subset_name = '{}'.format(group_name) if isinstance(col, (list, tuple)): staging = os.path.dirname(col[0]) From 1f2ad7c304f3d8e623bfac657d01d2ebe22c790f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 16:45:54 +0100 Subject: [PATCH 1889/2550] don't use alpha on button hover color --- openpype/style/data.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/style/data.json b/openpype/style/data.json index 146af84663..404ca6944c 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -27,7 +27,7 @@ "bg": "#2C313A", "bg-inputs": "#21252B", "bg-buttons": "#434a56", - "bg-button-hover": "rgba(168, 175, 189, 0.3)", + "bg-button-hover": "rgb(81, 86, 97)", "bg-inputs-disabled": "#2C313A", "bg-buttons-disabled": "#434a56", From 65e7c45e94ed1fb19dc512ce8ced91506dd2efec Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 17:13:09 +0100 Subject: [PATCH 1890/2550] hiero: wip updating effect containers --- openpype/hosts/hiero/api/lib.py | 32 ++++--------------- openpype/hosts/hiero/api/pipeline.py | 12 ++++--- openpype/hosts/hiero/api/tags.py | 22 ++++++++----- .../hosts/hiero/plugins/load/load_effects.py | 1 + 4 files changed, 29 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 3c1d500e46..d04a710df1 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -9,6 +9,7 @@ import sys import platform import functools import warnings +import json import ast import shutil import hiero @@ -414,32 +415,11 @@ def get_track_openpype_data(track): tag_data = deepcopy(dict(tag.metadata())) for obj_name, obj_data in tag_data.items(): - return_data[obj_name] = {} - - # convert tag metadata to normal keys names and values to correct types - for k, v in obj_data.items(): - - key = k.replace("tag.", "") - - try: - # capture exceptions which are related to strings only - if re.match(r"^[\d]+$", v): - value = int(v) - elif re.match(r"^True$", v): - value = True - elif re.match(r"^False$", v): - value = False - elif re.match(r"^None$", v): - value = None - elif re.match(r"^[\w\d_]+$", v): - value = v - else: - value = ast.literal_eval(v) - except (ValueError, SyntaxError) as msg: - log.warning(msg) - value = v - - return_data[obj_name][key] = value + obj_name = obj_name.replace("tag.", "") + print(obj_name) + if obj_name in ["applieswhole", "note", "label"]: + continue + return_data[obj_name] = json.loads(obj_data) return return_data diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index e9e16ef5b1..26c8ebe6d3 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -133,12 +133,13 @@ def ls(): all_items.append(track) for item in all_items: - container = parse_container(item) + container_data = parse_container(item) if isinstance(container, list): - for _c in container: + if isinstance(container_data, list): + for _c in container_data: yield _c - elif container: - yield container + elif container_data: + yield container_data def parse_container(item, validate=True): @@ -186,6 +187,9 @@ def parse_container(item, validate=True): if type(item) == hiero.core.VideoTrack: return_list = [] _data = lib.get_track_openpype_data(item) + log.info("_data: {}".format(_data)) + if not _data: + return # convert the data to list and validate them for _, obj_data in _data.items(): cotnainer = data_to_container(item, obj_data) diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index fac26da03a..918af3dc1f 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -1,3 +1,4 @@ +import json import re import os import hiero @@ -85,17 +86,22 @@ def update_tag(tag, data): # get metadata key from data data_mtd = data.get("metadata", {}) - # due to hiero bug we have to make sure keys which are not existent in - # data are cleared of value by `None` - for _mk in mtd.dict().keys(): - if _mk.replace("tag.", "") not in data_mtd.keys(): - mtd.setValue(_mk, str(None)) + # # due to hiero bug we have to make sure keys which are not existent in + # # data are cleared of value by `None` + # for _mk in mtd.dict().keys(): + # if _mk.replace("tag.", "") not in data_mtd.keys(): + # mtd.setValue(_mk, str(None)) # set all data metadata to tag metadata - for k, v in data_mtd.items(): + for _k, _v in data_mtd.items(): + value = str(_v) + if type(_v) == dict: + value = json.dumps(_v) + + # set the value mtd.setValue( - "tag.{}".format(str(k)), - str(v) + "tag.{}".format(str(_k)), + value ) # set note description of tag diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index fa78684838..16c9187ad9 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -52,6 +52,7 @@ class LoadEffects(load.LoaderPlugin): "source": version_data["source"], "version": vname, "author": version_data["author"], + "objectName": object_name, "children_names": [] } From 44f6d1c724ce5312dc42b044088f99870bf19494 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:10:44 +0100 Subject: [PATCH 1891/2550] set render hint for paint image with color --- openpype/tools/utils/lib.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index d8dd80046a..5302946c28 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -79,6 +79,11 @@ def paint_image_with_color(image, color): pixmap.fill(QtCore.Qt.transparent) painter = QtGui.QPainter(pixmap) + painter.setRenderHints( + painter.Antialiasing + | painter.SmoothPixmapTransform + | painter.HighQualityAntialiasing + ) painter.setClipRegion(alpha_region) painter.setPen(QtCore.Qt.NoPen) painter.setBrush(color) From 1d827f997fa36127c3627c0199cd9aa16e8f2e5d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:12:29 +0100 Subject: [PATCH 1892/2550] draw backgroup only final image --- .../tools/publisher/widgets/thumbnail_widget.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index b45d61623e..69161a7bd7 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -114,11 +114,6 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): pix_height = expected_height - height_offset full_border_width = 2 * self.border_width - pix_bg_brush = QtGui.QBrush(self.thumbnail_bg_color) - - pix_pen = QtGui.QPen() - pix_pen.setWidth(self.border_width) - pix_pen.setColor(self.border_color) backgrounded_images = [] for src_pix in pixes_to_draw: @@ -144,9 +139,6 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): | pix_painter.SmoothPixmapTransform | pix_painter.HighQualityAntialiasing ) - pix_painter.setBrush(pix_bg_brush) - pix_painter.setPen(pix_pen) - pix_painter.drawRect(0, 0, pix_width - 1, pix_height - 1) pix_painter.drawPixmap(pos_x, pos_y, scaled_pix) pix_painter.end() backgrounded_images.append(new_pix) @@ -162,6 +154,10 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): final_pix = QtGui.QPixmap(rect_width, rect_height) final_pix.fill(QtCore.Qt.transparent) + bg_pen = QtGui.QPen() + bg_pen.setWidth(self.border_width) + bg_pen.setColor(self.border_color) + final_painter = QtGui.QPainter() final_painter.begin(final_pix) final_painter.setRenderHints( @@ -169,6 +165,10 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): | final_painter.SmoothPixmapTransform | final_painter.HighQualityAntialiasing ) + final_painter.setBrush(QtGui.QBrush(self.thumbnail_bg_color)) + final_painter.setPen(bg_pen) + final_painter.drawRect(rect) + for idx, pix in enumerate(backgrounded_images): x_offset = full_width_offset - (width_offset_part * idx) y_offset = (height_offset_part * idx) + pix_y_offset From 53c3ae8e5614c0b939ba1875499f58ffdf1bb811 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:13:03 +0100 Subject: [PATCH 1893/2550] added helper function to draw checker --- .../publisher/widgets/thumbnail_widget.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 69161a7bd7..3f159d5812 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -29,6 +29,7 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): border_width = 1 max_thumbnails = 3 offset_sep = 4 + checker_boxes_count = 20 def __init__(self, parent): super(ThumbnailPainterWidget, self).__init__(parent) @@ -76,6 +77,43 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): painter.drawPixmap(0, 0, self._cached_pix) painter.end() + def _draw_empty_checker(self, width, height): + checker_size = int(float(width) / self.checker_boxes_count) + if checker_size < 1: + checker_size = 1 + + single_checker_pix = QtGui.QPixmap(checker_size * 2, checker_size * 2) + single_checker_pix.fill(QtCore.Qt.transparent) + single_checker_painter = QtGui.QPainter() + single_checker_painter.begin(single_checker_pix) + single_checker_painter.setPen(QtCore.Qt.NoPen) + single_checker_painter.setBrush(QtGui.QColor(89, 89, 89)) + single_checker_painter.drawRect( + 0, 0, single_checker_pix.width(), single_checker_pix.height() + ) + single_checker_painter.setBrush(QtGui.QColor(188, 187, 187)) + single_checker_painter.drawRect( + 0, 0, checker_size, checker_size + ) + single_checker_painter.drawRect( + checker_size, checker_size, checker_size, checker_size + ) + single_checker_painter.end() + x_offset = (width % checker_size) * -0.5 + y_offset = (height % checker_size) * -0.5 + + empty_pix = QtGui.QPixmap(width, height) + empty_pix.fill(QtCore.Qt.transparent) + empty_painter = QtGui.QPainter() + empty_painter.begin(empty_pix) + empty_painter.drawTiledPixmap( + QtCore.QRectF(0, 0, width, height), + single_checker_pix, + QtCore.QPointF(x_offset, y_offset) + ) + empty_painter.end() + return empty_pix + def _cache_pix(self): rect = self.rect() rect_width = rect.width() From 2d4e13ae5655c6ef2bc8f7c69b6c465c8916037d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:44:32 +0100 Subject: [PATCH 1894/2550] implemented new pixmap button which is not pushbutton based --- openpype/style/style.css | 12 ++++ openpype/tools/utils/__init__.py | 2 + openpype/tools/utils/widgets.py | 94 ++++++++++++++++++++++++++++++-- 3 files changed, 103 insertions(+), 5 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 585adceb26..15abb6130b 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -884,6 +884,18 @@ PublisherTabBtn[active="1"]:hover { background: {color:bg}; } +PixmapButton{ + border: 0px solid transparent; + border-radius: 0.2em; + background: {color:bg-buttons}; +} +PixmapButton:hover { + background: {color:bg-button-hover}; +} +PixmapButton:disabled { + background: {color:bg-buttons-disabled}; +} + #CreatorDetailedDescription { padding-left: 5px; padding-right: 5px; diff --git a/openpype/tools/utils/__init__.py b/openpype/tools/utils/__init__.py index 019ea16391..31c8232f47 100644 --- a/openpype/tools/utils/__init__.py +++ b/openpype/tools/utils/__init__.py @@ -7,6 +7,7 @@ from .widgets import ( ExpandBtn, PixmapLabel, IconButton, + PixmapButton, SeparatorWidget, ) from .views import DeselectableTreeView @@ -38,6 +39,7 @@ __all__ = ( "ExpandBtn", "PixmapLabel", "IconButton", + "PixmapButton", "SeparatorWidget", "DeselectableTreeView", diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index ca65182124..13225081ed 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -252,6 +252,90 @@ class PixmapLabel(QtWidgets.QLabel): super(PixmapLabel, self).resizeEvent(event) +class PixmapButtonPainter(QtWidgets.QWidget): + def __init__(self, pixmap, parent): + super(PixmapButtonPainter, self).__init__(parent) + + self._pixmap = pixmap + self._cached_pixmap = None + + def set_pixmap(self, pixmap): + self._pixmap = pixmap + self._cached_pixmap = None + + self.repaint() + + def _cache_pixmap(self): + size = self.size() + self._cached_pixmap = self._pixmap.scaled( + size.width(), + size.height(), + QtCore.Qt.KeepAspectRatio, + QtCore.Qt.SmoothTransformation + ) + + def paintEvent(self, event): + painter = QtGui.QPainter() + painter.begin(self) + if self._pixmap is None: + painter.end() + return + + painter.setRenderHints( + painter.Antialiasing + | painter.SmoothPixmapTransform + | painter.HighQualityAntialiasing + ) + if self._cached_pixmap is None: + self._cache_pixmap() + + painter.drawPixmap(0, 0, self._cached_pixmap) + + painter.end() + + +class PixmapButton(ClickableFrame): + def __init__(self, pixmap=None, parent=None): + super(PixmapButton, self).__init__(parent) + + button_painter = PixmapButtonPainter(pixmap, self) + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(2, 2, 2, 2) + + self._button_painter = button_painter + + def setContentsMargins(self, *args): + layout = self.layout() + layout.setContentsMargins(*args) + self._update_painter_geo() + + def set_pixmap(self, pixmap): + self._button_painter.set_pixmap(pixmap) + + def sizeHint(self): + font_height = self.fontMetrics().height() + return QtCore.QSize(font_height, font_height) + + def resizeEvent(self, event): + super(PixmapButton, self).resizeEvent(event) + self._update_painter_geo() + + def showEvent(self, event): + super(PixmapButton, self).showEvent(event) + self._update_painter_geo() + + def _update_painter_geo(self): + size = self.size() + layout = self.layout() + left, top, right, bottom = layout.getContentsMargins() + self._button_painter.setGeometry( + left, + top, + size.width() - (left + right), + size.height() - (top + bottom) + ) + + class OptionalMenu(QtWidgets.QMenu): """A subclass of `QtWidgets.QMenu` to work with `OptionalAction` @@ -474,8 +558,10 @@ class SeparatorWidget(QtWidgets.QFrame): self.set_size(size) def set_size(self, size): - if size == self._size: - return + if size != self._size: + self._set_size(size) + + def _set_size(self, size): if self._orientation == QtCore.Qt.Vertical: self.setMinimumWidth(size) self.setMaximumWidth(size) @@ -499,6 +585,4 @@ class SeparatorWidget(QtWidgets.QFrame): self._orientation = orientation - size = self._size - self._size = None - self.set_size(size) + self._set_size(self._size) From 2ed10e4f0fb9baf5006f87ee056619bf7a9f997b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:45:56 +0100 Subject: [PATCH 1895/2550] separated painting into smaller methods --- .../publisher/widgets/thumbnail_widget.py | 166 +++++++++++------- 1 file changed, 103 insertions(+), 63 deletions(-) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 3f159d5812..d0ac83d6eb 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -77,84 +77,70 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): painter.drawPixmap(0, 0, self._cached_pix) painter.end() - def _draw_empty_checker(self, width, height): + def _paint_checker(self, width, height): checker_size = int(float(width) / self.checker_boxes_count) if checker_size < 1: checker_size = 1 - single_checker_pix = QtGui.QPixmap(checker_size * 2, checker_size * 2) - single_checker_pix.fill(QtCore.Qt.transparent) - single_checker_painter = QtGui.QPainter() - single_checker_painter.begin(single_checker_pix) - single_checker_painter.setPen(QtCore.Qt.NoPen) - single_checker_painter.setBrush(QtGui.QColor(89, 89, 89)) - single_checker_painter.drawRect( - 0, 0, single_checker_pix.width(), single_checker_pix.height() + checker_pix = QtGui.QPixmap(checker_size * 2, checker_size * 2) + checker_pix.fill(QtCore.Qt.transparent) + checker_painter = QtGui.QPainter() + checker_painter.begin(checker_pix) + checker_painter.setPen(QtCore.Qt.NoPen) + checker_painter.setBrush(QtGui.QColor(89, 89, 89)) + checker_painter.drawRect( + 0, 0, checker_pix.width(), checker_pix.height() ) - single_checker_painter.setBrush(QtGui.QColor(188, 187, 187)) - single_checker_painter.drawRect( + checker_painter.setBrush(QtGui.QColor(188, 187, 187)) + checker_painter.drawRect( 0, 0, checker_size, checker_size ) - single_checker_painter.drawRect( + checker_painter.drawRect( checker_size, checker_size, checker_size, checker_size ) - single_checker_painter.end() - x_offset = (width % checker_size) * -0.5 - y_offset = (height % checker_size) * -0.5 + checker_painter.end() + return checker_pix - empty_pix = QtGui.QPixmap(width, height) - empty_pix.fill(QtCore.Qt.transparent) - empty_painter = QtGui.QPainter() - empty_painter.begin(empty_pix) - empty_painter.drawTiledPixmap( - QtCore.QRectF(0, 0, width, height), - single_checker_pix, - QtCore.QPointF(x_offset, y_offset) + def _paint_default_pix(self, pix_width, pix_height): + full_border_width = 2 * self.border_width + width = pix_width - full_border_width + height = pix_height - full_border_width + if width > 100: + width = int(width * 0.6) + height = int(height * 0.6) + + scaled_pix = self._default_pix.scaled( + width, + height, + QtCore.Qt.KeepAspectRatio, + QtCore.Qt.SmoothTransformation ) - empty_painter.end() - return empty_pix - - def _cache_pix(self): - rect = self.rect() - rect_width = rect.width() - rect_height = rect.height() - - pix_x_offset = 0 - pix_y_offset = 0 - expected_height = int( - (rect_width / self.width_ratio) * self.height_ratio + pos_x = int( + (pix_width - scaled_pix.width()) / 2 ) - if expected_height > rect_height: - expected_height = rect_height - expected_width = int( - (rect_height / self.height_ratio) * self.width_ratio - ) - pix_x_offset = (rect_width - expected_width) / 2 - else: - expected_width = rect_width - pix_y_offset = (rect_height - expected_height) / 2 - - if self._current_pixes is None: - draw_dashes = True - pixes_to_draw = [self._default_pix] - else: - draw_dashes = False - pixes_to_draw = self._current_pixes - - if len(pixes_to_draw) > self.max_thumbnails: - pixes_to_draw = pixes_to_draw[:-self.max_thumbnails] - pixes_len = len(pixes_to_draw) - - width_offset, height_offset = self._get_pix_offset_size( - expected_width, expected_height, pixes_len + pos_y = int( + (pix_height - scaled_pix.height()) / 2 ) - pix_width = expected_width - width_offset - pix_height = expected_height - height_offset + new_pix = QtGui.QPixmap(pix_width, pix_height) + new_pix.fill(QtCore.Qt.transparent) + pix_painter = QtGui.QPainter() + pix_painter.begin(new_pix) + pix_painter.setRenderHints( + pix_painter.Antialiasing + | pix_painter.SmoothPixmapTransform + | pix_painter.HighQualityAntialiasing + ) + pix_painter.drawPixmap(pos_x, pos_y, scaled_pix) + pix_painter.end() + return new_pix + + def _draw_thumbnails(self, thumbnails, pix_width, pix_height): full_border_width = 2 * self.border_width + checker_pix = self._paint_checker(pix_width, pix_height) backgrounded_images = [] - for src_pix in pixes_to_draw: + for src_pix in thumbnails: scaled_pix = src_pix.scaled( pix_width - full_border_width, pix_height - full_border_width, @@ -177,9 +163,63 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): | pix_painter.SmoothPixmapTransform | pix_painter.HighQualityAntialiasing ) + + tiled_rect = QtCore.QRectF( + pos_x, pos_y, scaled_pix.width(), scaled_pix.height() + ) + pix_painter.drawTiledPixmap( + tiled_rect, + checker_pix, + QtCore.QPointF(0.0, 0.0) + ) pix_painter.drawPixmap(pos_x, pos_y, scaled_pix) pix_painter.end() backgrounded_images.append(new_pix) + return backgrounded_images + + def _cache_pix(self): + rect = self.rect() + rect_width = rect.width() + rect_height = rect.height() + + pix_x_offset = 0 + pix_y_offset = 0 + expected_height = int( + (rect_width / self.width_ratio) * self.height_ratio + ) + if expected_height > rect_height: + expected_height = rect_height + expected_width = int( + (rect_height / self.height_ratio) * self.width_ratio + ) + pix_x_offset = (rect_width - expected_width) / 2 + else: + expected_width = rect_width + pix_y_offset = (rect_height - expected_height) / 2 + + if self._current_pixes is None: + used_default_pix = True + pixes_to_draw = None + pixes_len = 1 + else: + used_default_pix = False + pixes_to_draw = self._current_pixes + if len(pixes_to_draw) > self.max_thumbnails: + pixes_to_draw = pixes_to_draw[:-self.max_thumbnails] + pixes_len = len(pixes_to_draw) + + width_offset, height_offset = self._get_pix_offset_size( + expected_width, expected_height, pixes_len + ) + pix_width = expected_width - width_offset + pix_height = expected_height - height_offset + + if used_default_pix: + thumbnail_images = [self._paint_default_pix(pix_width, pix_height)] + else: + thumbnail_images = self._draw_thumbnails( + pixes_to_draw, pix_width, pix_height + ) if pixes_len == 1: width_offset_part = 0 @@ -207,13 +247,13 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): final_painter.setPen(bg_pen) final_painter.drawRect(rect) - for idx, pix in enumerate(backgrounded_images): + for idx, pix in enumerate(thumbnail_images): x_offset = full_width_offset - (width_offset_part * idx) y_offset = (height_offset_part * idx) + pix_y_offset final_painter.drawPixmap(x_offset, y_offset, pix) # Draw drop enabled dashes - if draw_dashes: + if used_default_pix: pen = QtGui.QPen() pen.setWidth(1) pen.setBrush(QtCore.Qt.darkGray) From 01279cc6fd97017abb8b8adf3fa3aeadc23147e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:46:08 +0100 Subject: [PATCH 1896/2550] change thumbnail bg color --- openpype/tools/publisher/widgets/thumbnail_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index d0ac83d6eb..53e0891623 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -35,7 +35,7 @@ class ThumbnailPainterWidget(QtWidgets.QWidget): super(ThumbnailPainterWidget, self).__init__(parent) border_color = get_objected_colors("bg-buttons").get_qcolor() - thumbnail_bg_color = get_objected_colors("border").get_qcolor() + thumbnail_bg_color = get_objected_colors("bg-view").get_qcolor() overlay_color = get_objected_colors("font").get_qcolor() default_image = get_image("thumbnail") From bbaf811df913818b062c64bae7a75a68d6944edd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:46:36 +0100 Subject: [PATCH 1897/2550] added image for clear thumbnail button and use pixmap button --- .../widgets/images/clear_thumbnail.png | Bin 0 -> 15872 bytes .../tools/publisher/widgets/thumbnail_widget.py | 9 ++++++++- openpype/tools/publisher/widgets/widgets.py | 1 + 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 openpype/tools/publisher/widgets/images/clear_thumbnail.png diff --git a/openpype/tools/publisher/widgets/images/clear_thumbnail.png b/openpype/tools/publisher/widgets/images/clear_thumbnail.png new file mode 100644 index 0000000000000000000000000000000000000000..406328cb51319160db0e722e673b7cac0449eee3 GIT binary patch literal 15872 zcmZX5by$;O)b}=0Qba&$Nona0B?l6Y5F`}^L3*^r5K$1RQPL^{K~j;DngXIo$N*`W z)T9|5V|>r>eZRN<;DU?$Jm;MI-1l>y6TcH@W};6|!%hPNf#{(IIxr9j0{jU9QBwlH zc7lhFKp=3Go3^$Y9HxJR7pkkRAg!#RAR!|u4FX+F2}o%)(!PHFd7FTtK>q8PSf|bR z7WbaMPSDjUe9bET8K=gayN zmx^v2v3A@?=QbL~ulBy=j$SYrAyP1FTZAbLoKeB%`j3uvzxh}tfv}JROZPx#| zgVv<{Ep*ql{H6AT@{U?mb#w0eeZI1b8_umLNo00G>Zr(cVj>TZL+(xKzDwMcms7^H zHN7bB>&ArL)`-mbG%qSsHvh|7P%oD*{H;q-sLnl$o!FP;$~C8v$M5x@PsS_*n*nwt^MMAa+G8;XNuQ@XtNwJ3Pa$S$m8W zd>_ftc)v4iqwZD2T1)PfH}wA?BmccnR(aQ(Cv+ifu3IWK#`S1{M#~L2lXE@>HgFI~ zlArVg4ldFR1c7)#P@U@*!MSTwA)g*thHRX8%U={s`1F-t*DYP>79VfPXa|!He1qj8 zfAZrGU6a(WN}wHVJ_7vidiThq=f)LF6q(*C-ng)l!1aw%SL@>LFsKR>YjK6z;ELzK z#o^}2o*LzMpTh%XM?x2~SB@{wDfWSHew*yKd#t`)-m+)M3q^*yUf%%;B-|IN-oLq* z{bSmeFnM{@SIDUDps^yd{{G_Wz6$n@Ct-5B&6ai&Ts}67!|uzYqv?mVV0w}uly!X6 z0-DBYp{Jh(UG*UZJ|lA9Keef#myN0(ROaH>-qT+|wxX>k{DT59R84^<_9*O2M6NT6 zzqov-fgWw#BS~%5dwB9J>$Fb9fb_xfJ7%quW!Wk)OX&AmrLu&~TS659|+=E~5Jb z!-2HVWuK*+R=NxZqIt9+H+X!M_AAeWe()YGSBlUstqU7jCPedV4nvWC1ITPz8+%bwT571L@_>quCU|F1;fh;lm$Dk`Q@5q&ZGaUOQE6jUw2HJv! zXoXiZ`pn-XiY-0f#bm65T5sH;BdBiv9Q|X^>j!?#E&<8!Ty;ErKKr2(?*Sn?Vt=A2 zz62Q@vl@r6KTOw^8aeC=)pNwdBM(CH7#EC8-nr zP6mB-JZMu88-}Gtc_tP0`GQ|p+R!v75A7rsKYl!4Ciy5z3W7^vrO+UU=KX0isG9Hk|3q}zbEwJL2L}qfuetCc8F@~@%I3;)B;1sZFE#T(y zJVim@#+1-|`qPxzO@Kc*H-H&?9{fSKrNwEge@gM;E$#4gSjs#&J2h!$eS|% z62RW&HrWQauha9nydWDW`x^|qi(W#}hY8fqDPZl<`aV~9OLdJ9ijX#5BBn$-yssC{ zVXy&(y7%0}3Sjbn3`LVLUl)7CG%vJjv5@5f5%76kl8o5C3}#CQ<~mI&-g z;ktR$x^WkfJQR{iCOm8WWEfAQWuF0wq|YT2XXK7YsfhDSfHEMNv(w<=O&TrreA+O& zgWkVU2Je5%qY|FBvZ$Xo?ymv|&h(-6qr9EAGl$BBAu|Hu4}5T=Zyw%p0ROFPN0^ee zF{z)2w!pbIX)xQ`L)U@b8Pi6!@rK{tm;U0MGGzJ~d>ZX2i_yphgC4) zOV0+|9Q_mBEgG$?J%R9+jqiV>N|g4&LCN{!bu9&L5N>jA$Zz_?Mz6(F95QQFX&wi| z_%%TAo&&}NRzVi)em`;UYq-3 z+wkfwR1BI1Q|U+5KmP+&M)!V%45d$3z@dI#P!iN#O&%<@*)>3R%PI#k+@o+L<{IJf zoY7(o!-q-Cpz8l|9ToFrdFw9L7~MWVwxr4i!e@X_A!=fP)bS})Q@&K*4@Sh}8~Q`x zNtF4NYOX^4l!1Wv^=qIz@J36$rNX#Mg6^raDKU;J0*`zbb;&sqM4wW5yyP@v1U&Bj z!0Vfw&4#}$>%m=fsT9N7-a__}iTf(498on;;fj;Gd>F-8lAN$>>f+TSCFw%kU2Iy# zJB#;&7cj=x2E5f|Be@~147rA%p~h32_c6KQd2-JOx>B+NpuS;FQ zFmT5*u|eaHs%hOMH4U>T{?7gM(40Y6^jVgsqfKKYQ?n}I{F^2i*PrhvqEL-{g->L* zotxijr-x83Jhvg7VBLNK$Oy8@Pd)j70j2>+mGGDAXhSvS2tas!3BqPB&^} zIx=P_8JYLYI6L%#Fh&~Bf|0%uo}2C8rxrMhf)+iN%{Mdro{nA$#7TKjyY7KD+VoJEui#2>$NE# zB~w9jK9_U4xu6y+1U$7@E2G!u*_}>PT=d*WnGG0Y;v&q4@@r8!M;QrnVzA?k6fbr= zBf@df-XusZ;*(oSwlBy>7VrHm8?NYvtLu8HBn~P_wTygpJkcOaOW*houu(isOg+boOi^2RFu_bYnw4BmgHw~DB!CvG@)e&$c z_3E{6Y6kXoWSe&|A9zlPWg`2jeFLz-{`SQJ*%3$;T0&#kG2UP!>QP*z=5ORN-Vh=4 znAKsg|B&*;$mxf*Ebs6meGGk-hP<5Vdgo(E+K(&2?T(dU+=`nib*_S&6tgq$hg}`dfkQHG8m`HJ#9n za(-g?{XO6bIgU_e|qoBSsisi_Cc&MZHEO4U9e#Y%Q@KAx&1Nf(-JcD@8yR5y^=BNW?E~t zTSbDk52QDP4pPu1Hb%=7%PHw-&kmk2`|s@7nACI|STY-Ox@iEHBss4TrU&%~Jn++l z=t|l~@}OVH&+d_!$4W9yP(eRHlWnvki8D653m^w$4EFRf56c7*Hc%XUVl_uM) zb9Y&wB%aDWMxAhLlURei`re>?h5Dy#Ja8_G#T!gWjch;z+O zv&)9dZ~5{3QFj|zM!jd3-%}g;IKRBxNX_V1T9pBw;UlV9xVO@;Uh@rLS=a91(J5M_ zYo3?do#MkZ@*ag*xN9hdxUnqBckrkQx~ThXT?|OKC>Z%u_jlswIrAY)FarfQFA*&w z!3gOxxpf=5tCHxS7^`8Lo^c$dU0hOj=aaAx8J^16tucPblNvgn&yQ$|({xPFC`bv7 zr>Q%CAcLnea%-dr?gkhS@jOrNyF7ra!1#enyGqQw`H8$D5+i@2B?S_2cobZJ=dkQk zvuF0(_sac3Q&`q;L|PgT==i3&1+=A>af$OnIJcO{3baL!Vk~{(6@jS$;$npNp(@Z$F+A44wVskEVopy?7mk*XM! zvG3b{Z^4Bkb4v^J$KpQ1c&ZyBAuIk1X{a~ThJ=vHw8?Yg1(mtkLpLxEdjnM0&)G2M z%GoOmx(Nu+(T_xjF;_J_JD{BvZhNYRXgY?vk9=r&gyoIYcpxEP+>Qw7Fb(rx|2B0Z z&>1Yb&fX9=6rWF8dYcEl`UN|G)eyaouyX!QL$nZ<9p(*d*SJBrukWhvu$O7^Y;F_! zB*&lyO0U8>YCzD@@8Tc|q|e`f(}HxoW$G~CiVq8( z`In32mGDJD==kJ4jIzv;+LTMB!0y-S5M_L)x9lT>f&eJ8&QtlMi3j_@Ms(y#JmOKT zRG96U!O=@2q}*iu-;op0?ZF!Fr!CJ8-qvTe{WMpAF2~fwYI@gaF>j?+gvCd##7PC) zHeS@+&qVo(Q??ot4x1_nY56;Gsj*UlwvEv{Ss4OuYCoa4Uw^lQZIz`>$GU6Dt&PLp zOnleF{#ARBigHlNCQdR|XLs{l+@YL}pS03QDRn^(xrUF65_hxQt#XUdy&Y@~3FY9u zZj$`kmZg4uuwrWH1F`W*1U6}@AHH##>yI`>7S}xUH(855L1@0<*6}imvW;`JuTA-h zpa|&}g7W2=w`mg&TM{M><8Onw54z_FGvo+{=&hqiozPaB2dv-?Dy-rd|q z3M6<5AK^W+_UA>dZ3C&+1zUZLaj)zs%**;9CJEe6U-*hHy)xV9Gz#N+sIs5jdh+YC z_m$R%#<=aV_eBbMovWOmsehIBa5G9mJ}=RB>_->FL+jk-J*0h@@f43__Q}NHEH^8% zCJ6TA552a7n10!dL2>hcTYug6;@kZ_yZZ;(*rblmi47;NpnH*6iT^SYf9@GLAVFcihJXdLz%Xfp@b|hm5m)uT@P1T21A$hEWc-xrMP( zCWA3ES8QvCP;UR2NgJOrQ(Og28ichyl+kx7v(hUNY`tc@m~d!VM7i*g7~Meqx0UmZ z>C~oh$9}xfcGaO_dRObviPJM%L=j-r;(3oVvdY{uvISe3NyD(2hoUQp@Q;>a(c?ix zRSCz^p=6;mp`YKyoo#e0&jcWn>0G62K==oR`CrfO=0ZyX?1bdT2vojIzqpJQPOsaM zr+Knr8dX0>%B4TrPIp)7=Wo4ayy!{N?$ZE?&a748jBoc|;Owz}sxqH#QLzsLKIWll zP-?zMiA?PoH?vBG9FqOIYpIr?p#$Nl(l8m@BB4+GrBFS#x23r$RTkaiZ_jL|#`JF+ zDyat=7>jkjVB4ci4`;L5`x2Y4!ir#eDgTyZ&o+H^(AfaD^>?nYl;F?`OV}8kxNJcm zV){)*q~OAtTC?fD&a^DvzYysmmu~u?e9$@rvY&VjExCBz$)*)1l8XFCm^O4qnEwC` zgc}^j^W0DNN|e2xd9rPKxOMOBtd0%;$?YHO;9)af7=0PQJ{H2vP`9H-SKebS zY+N;eX5GEC$un*l=_vT)9p3>Rjnoe&{A1@Dq}tI&J<69?P)h-C&bAw3e|E@Me>QgT zFdNTnz)Zb*@$}8H7d|ot!@jAwso1-dPWPcn=07w0g7971=7$@XSLCW9-Tvz8;AV4m zF&3XkN%E2%+5VZ?_Z&dQe2B}{Wp`o25?LM0bWFMd^(ud!S?|!l@k}_KFmt51T*`4& zp6qY8CI{}#+41jWXfHs$A46w$yVk<~&pK-+DkJ(lw8rxfF0XvD$5FL36vG|*_}uv9 z{XW@Yi&zJvFw&q+Ym#j!C8DQpkQQ@`DICYcR=;C^Pd*Z?_vmlpFOOR2u%qB2GE zcOgHqY9;49x%})iz4dOx&#au=NeXYtjOXhCy~H-IVks1u_D_-qpPvai+&wcLuSrEe z)j>|K?l@+jHaN<7{uf|~??-3HSREdIX0}+o#VoEhE=HwgKK0^1V^_aa&U)iCbgv9`n@7|`NF%St07M%lPtjaGd1weQb5J;~ z7{uu!dLvhW10A^?SFkY5^#)*E@eL}Jx;0?_oA#T9G_ZN0qZ zKqx7DTBt{u3HT9?b}ghD;IG$ZeC^~qP^F759i4xMZ^7jEBSWuye%xE#xX||YN1o}T zc(UHCV|e@Eb6rFiqD#xI?@Gip9H-j&qJ|@2`YN7aM}e{S0JWRL$$Zw2Rrdvt?axe* zy!0nfpLO$|pR0T|ufDGw6UUd$p;7AGZDMc8ZE#rkm#H<-5O0tJy!~?7O>|_ zQy6tj^&2`?2{$zjI=)fjkQ_JPv3r_~MgW4xdzhQp&3h%3XalVdrVtD#aZ6a-nC#;}BB6o9XMB z`BN%^8^ONog&|Ij*VVi#zm=!59a-k7n&7`$>QK9zm=gNfvx!&TJC~-khVUpL_?r#6 z_0Oa^ej}XdiF^(wxq^-A>6dWU)FEI_}e|F?8xVVi8_zc;Z{0#~z z(7J@v>qfu!Fe&Td4^i~})s!x4LRERopI#iyN#0(+vU{+0#{ta?Uz~Z?Vr}_TPeEs?v`ZyyG;9el9iM5Rb#B zuUS%uORR6}FlybU40`xj__!{wHgBd@d1G&*emwL$4R-d6rbi>ldTiM&`}V}Vo0=r# zS&d}-7{St(ajd+{yO9z7V(;5P0~KQBE!7*Jb>`R5Q38dJ#HP~@ugo3keV#fy(2zd` zwhR62@AVRDNr)UnP8+2{awt?H6i)HF&PXvkF&>ogb+Jlfc*w}A=j3@KEuyn$nUN=+O$Y z9yl_~t`!XhiDtxudLL|$Tq}D^l|1(1ixiv0|GX60CbG7BjY^! z&R((H2$fD8ZCkfhPxMW~% zrOT+5H_!LlOCi$Ooy0H{HS(B3Ydkj|p)y90PXO3hwGeN0XueZ&&A;XuGxYe~g-FgE zHDwkzl{OQCZ?3EnuC?KW_L5oOYQw@Rlo?R&WR#HJ4x3F{$hD!Y;QoM{bw%iQ_n+@VlgMb6K1i~z1g~$afbMG=UU;|qZ zqWcZwmIpNyTE+mcPlo2ocI5uRMvO~K>n)C%w2aH-(t%Bn0dz3qmQFDch8&30ek@iM z4-eTkLwJq9>uT8mi`pb6Vjxv227&r)D>u>1c!bO-VK^f@^k?6-CZO3gmrMG0PCUX| zA+ol#c={uMk0dCA93XcDjMOsA$NQ|;(X*_yUOY;r3Z!;P9kiPmj|dziu)HarhOM!H z>cLjzY~Z!(Loq|+mZEVn&ikZ{0N^n?hQp$_l6WIXEE_{Jczx1Os@^(cUh}I2A9VO6My)$ZUKyj9l+M? zBrEr}*-Jt${YyFa5QoC4wIpPy7&uOs+o0c+z_GOe z=69$*%>1H@7>AjA+tAj~B2*z%NL$jC`LXE8$_6IgiUo#O_Mq}}oCaR<6&MTdoX9L- zb8f)qhnQ$3|+iR6rjGaPl{&?F~xcG2nnnHw_59r)K%Ij7v)EJpw6S7Xx>+ zOs=!$Feo)1bas==aT|;t@2|>xj+H0@FOGQ55{eT03I4_g1 z05rf$q;~h`Sn9X?+f~AVbE=Cqc*mPOhyj2p-G+wDxtDfHP9~hiM;EXTRN?e9;Ga-dI*h!9`k^hr z33V*a0C}fe9PoxKRDmWhTuUw@QMWx>pDs} z{-KNb_JJ=Hbh(0^09%1zHB(y0uL3@CLy-;4-f4nvCAq3$Y$o6~IRKYb2*}gYtvRFx zI7Wz(eBVD0XEuWp0eYN^fUb8*D%Ds(oWmy(@P3 zm_M`<)yoE2iORXdV*+PL>P#&1Y1Qr~_l6Q?Nz#B1&5P)$?Lw+I z=M_%>n2=4Y;|eN<>*$xa*590p4Zp7gY3pRtIt2$_j&C9DGd{M{n~DWNbM<7;@Dtz@Guw?zrS&`} z1Vcer$xH8UEOVaT;mUVoPp|c?tP`EdjMMz)w-}@sGVu7~wO_f7@t%%LX8U8Wp9$+n zfBe=vV!dEPJqd&j>9S2pN3)R*XeW4a-G*QHH#tW?1))b%4i%Qq-kp!MTj)vnU!jyX zDU?F~uTYBE1EI{7EnvrWmOWH>`^W7glg)9%c$s~H5HYwrOZHm`zDnE)tqgBhl5*($ z2&OmXp#QEMTRx&M)n|9L?Lh~Rb|W0C zL@^4sz72$oiw8i+_!bBmh2Q>B5?<(y+fvFum=kv8zPCbk+Pm_cf=V#M{kb&c2nOWpA^u1Hpgh@so;k*eJApLkTw|HxIN%T^Hy*4+Ov1@ z^zF(O-X@QN6!2ZT>uR3aPsAzL#ef6^NKk0ru6kSq5){3E3Cib`17|+hb>}Dah&qc2 zW2T%-a9bVuZUI zD|Ds_^k0-WW^#i%Z!9+v+Rg=EPn=z)^tg)cobTI)U(|Q5VWKnbO+YtpY)odZ2E`Q| zPmXCVPuYakBiPbZ3wkU$6M87Gsf9lTag%YA6)8VI22)$$g=sw59i?Y?xj4& zC;#C2J5%9^B#NXM*y_z?iUD}OsyEL^L@AdSfSf0@dwIM65+&0{bFESTzRSJd*AT% z1Lgj_iOVNR$0;Wp5C`jZ576FE8-d=Rx@eChphRLv-t%p7rt-m1Nc8bLIIA1>DL}-m ztPW(t7ubMLW}_YXD1pEEzq0OdL;{eUPd3T`6dyIDARa{9m2Dth=J}5V9vK?I-|C2_ z_27HHD2~+o-9<{bkB=Its}H+^Me*<%oz~161KUu2uvB$ivoOFVLqh`Kvb{1$;(}}G z=#OMRJS!@|X-Gu{D;YI;DDz4?DZBryAr6rGvXa#^MbgQo%=B5ms}LtuJ9PiE+Cfr9 zvo)Kfx;5aV03yefQ2#>UwgLr_ZNN&*rM^r~+XZ$B@t~YYwa23u9k_R9v-`;IhXp?o zvLflA*^}u!cNInFmT)2hMNe zCQU;tuZt0jV%RRG!~^9G^QA3-Re@tBNfST28wC6>9ilQ&1c8TXOYy02M3Kq-h7JNB zX7u7ruB0TuXh8{aF!!dEu>Ue3d&C4CPAWn9nE(Z-Q2RT;8RU!<0hVkN$6biL2^5T| zgFl^wT&8rm$4zPFQl45o5<^;5x3>mJe{Kb18r6>)9`Y6Zuzr`qrTE9GDLDzC+PWG7 z?A_b(2TA>qJ|SYmL(wjm^1<&wLC)mbsO0>0Od+3T#@WFu37#osL#d`i_%NUYWLvZ6 z3%Kth>=XK_u9`zh9Ga0NJIM@x-Z&M)ngjK)x=asOGx`6l6AF^dvvBn;AW~{j0|gJ+ z!4FtONI8~%tLOI_QJ}Om>id3M^Z{!la5XZ8{YYdZSsnW+#eQtn0*<9Z@c$@c)+JhG z@sT9?>7H>MkKVZ);?LtITF_%vDx#Kja6>MuenBjq{nsL3O~+ORlF2`|>f9uwE9erq*Rb8 z5K-ks9-0WZddp=E0@c^hW5`OZl%aWJ40_{^`+Z}QK2yUTXJU)c6&?Et#UB%Q)}Fl$ z%Q*Ndm!-G}Xq5&BByF_jl2|Dc&f!nq{vbSRg)P7@;$z%o6d|hj%QCXlNa|GSCz1}4 zBak<*tngSjRawTiR^|?MxlpT;@nS8h&}>`-s5Ro6O(Oe`(zYc~+D5hUwQ3nJsyN-M zyjSX*O44dK`#wo*qAuDrZiCr)ac%7VFro{>5i12t4biQ0FQuP5Bja!-k!9SzFvFAG zL+KPkD$N^pu)Y6bXEYA6nUd}%T~MCFvyFb_Uz+5c0_fOkEX%HUU2rTZ-p&Nns-v?7 zZr!o#D2Z0xTd-A1F3kibH)un@nnvowq^#+jJnr{^OsAGPd4_UU=O4_<=l!3yO6q^M zVi0x|sRF$#pydeA6z?g(fhpf43_S5Y=3k3OF;;V|vWrw9MWPYYQn=J&KIA*3~d z@N`JE4Cq2p09~jkZJA-9*|TDHgzx=mjD`YTucxG#VNr>85X2o*&`u&NyeGZWt{ zQ293L=CM*9wx#a7LeMWqek>u(o5eTf#kyfwm)ZCUz2Bh63nDoJyW`Vnu8x9{iSNc1 z2P3Cw;Lv?+4|Raf6|5lq$-#DrZAf@=HjCT2lhLLS=IRu~)^As!*Bx%EWa<_4PkeWW zC6$i@u;0%WlVI_FQr|7T3sYcpnuB28POI=ZYy1KIJ^w{{Bcxu#Zli9?JakCd zK|AcrM9<8b4qt6L3hF0%&V_XUrjP&IzmCW;D7f2l&1gZO)Jy8EX2G#H-|kmkQtMGl zxOLOhg}yqH)ZNU>NbO4>z^y23e-uw&c@=}U>|kr_wY!m|flEP2iwjN3vV`Y*$==4y zOdLw6ynlqi$7k$o#sp>ybObK<|;G*&1 zjgT4dSwny1>`X-^P81Ns*W17aoX4>mp1VrlJ9jqwOrhD%(o54Nf46-n4ugTN@I6N? zH>q3OEUS+fC@t*cC8|vZu2b&)xYeAUYeMMwAAR5bM!*w(;_NKzy?_af9Y`Jg?;Yd) z8~-}SJJ=}ARF!83=U#l*4c0`UBCN^7;t#gHbR3XxA}o(p@TnJ5WJ#T5nvpI*bFkyj zTg~HKmmxG{+!kIYUfQo)&Lcwi!4(0jk$sQZqf5{J#$1}5~HzD5L1XgK+sF#{H z25`eA&7>~-U}juFbIBy6Jh$~J^5PG+j+2Rx}y5YSstnUpq!DXwX& zJl8GIFZAQtY$z=og#$Q%l$1ko^~#WC8ftIVP$fvWK$CyFxU}plD`ZA6LWX}^eo`HH zQ2b5XaUJ>>Z1gCpD;1(a#jH9#3OBB=7Q#jfkP~b+hSPk z!S;RsY3Tr#+H|fcpfo@Ew>e<^h9Qh>T90rjt#UgxG@ac?=_D*{K0`T#w4fJ0H5=rm z0BMu;ydrPsSbr`*XBYipkQIiMQA#9w;Z+rRvg(qv0kmgrhiRbyq>MG}6aU{0);2ht zzErW4!nDhq!h_GV>Tpuzkq9(Z=}Ck)UiG^Q%oG++d~y&cb%k-Z3yd5O;3O_+elIY! zF*0_#;=_bbb-eL=E+Gmzj#nY{L?(OB62RH$Ac;So<5Y9k}$Qt)p%T}ycL^#?0< zROPwFCCJeK51I)s4EVwdVLKn0oR(n=+ynrrbK*rx-0P-rjWGIZ*TERC! zFknmuqs)5*Adn;lch3imMgSg>=VsAjAohD)nOJFjqCm@jVZUb4q7`hzjDkVf6MH_~ zEIm8nAO$f*6%WlM0O+r25qZYc&}X84G(NY36&KD2u^vdES$H-@x$2U{H{TG6M@_+&6T+9VPCH6Ym53jL##l4pd(*bj z^lAgyw~MjbBd_Eo@~++cohn(jrVuzRm~c-K)0Msw7ugmheF|Qc4jdj9JBEy$kE{-q zuLo6eHZ3Ecg|IBOu$&Or-76{gE;lVN7o9|L#A-fl%B77W`zFzh>g7aCWSZu1_~@G_ zES&>D9z4zVeJYqj{hsv5DdEGALAGwKgUk*R321I>Zd6 zF@VRKrnZtFB~3C=rwpJ_?mM*_dmb;nP0bJADnI{PrK z4^uLL)U2WiyyVP{lAHi5F#({Ujkat*%_U9=`9*4=&Ky@xIZZ9j6(WT4vY{cI^Wnz> zEd)QQrAYt@Y7^}2WZp=jL_hjqek{4Wj@C!TM+|?46pdJ{%HVq9$mAgtlb`gavA2WM*uz1-TCdBS8qK#F5+)IcrP^}Tjcrn}~E_Q)lLiN=_EJ4o)$P>0V$s3~aI zxL^m*e)!KID9*anT9vFonAnTyQUm2Oa?<}d`0rd`%dLOdUvH*OFbN{u=ZVUpEKupU zgHCMAVT{9p_e5=9d4r0zPxdy<*95F^{*-e0aV2E>$qer=LPknrdReTd*Ks~QI_H^b zrbV4I{?YZK`(&Gz*x_L1TQz|~T+EK06pux0XgOs)FsRC4N(Vi0iG2tT)E1w~cKq;) z+j{>O*T;Zoi)yda41n#IlLPN<&QFg#U>by;yJ2d~T!KNMx0Z2UL{^j1%ENmLI99R) zO1!sB%}KeCr<^~XMsf*d6LN9O12d_| zBpDPQc*TvOFo|C_vhPYBN0Eb4%%>dt9ghB&`5aK&R_>&kt5<)>U~lG=eftnL{$aPuJ6j~t+V2$|xrj4?R zSN3$1)twuf!s_tW`$o`ycSq$^7F)vliR6htnGfe?;oqo-8MFwQwR2(YN;09mqJ4L; z3{Jht1#Nux4}u>Gt<-HFJ4t_-%M~B%+c1K5Eli^wv?D%ueysBcZyI9zfXCs$j6m)A z10$>_s$XBQ)JOpoN#9J?#$_(R23mNmu;h*ayzBCd0&o@I)LPYz!}Dod_LOQA-nl75 z^f=`DgO7)7ePW=gEg39-gJT_PYm$P%m4+YBOiBhz=LZzqMk9m!yzn2t~4$Z+># zT7nb*4JUDfJeQuM;Zbv&@av^3m3%FkN$5o>H#*v=Oz`d0e2~k_B|Xx}(+R&R#)37) z>(b}^1LhoQm*Yc`rIzIrd*sJEH$$&}cb_%&|MqIET&P8njUP&X2ht2~$U0yyI2*TG zfs)3pdSRHsuU>*gorgCI{KS?jiA4s?TaOUkcSV-KeBPA#7ypfNoiZQjW4~ZO7-0BW zK~h5DIHe;g0cZDa#i&Ajk^+=#(;q8~4xKg42d2ar$xd}brJ!St*%pJ``bXtkI!>9u zllDGRo9D3MsHWp0JzxgVHjvDLiXc)g9xM)v^P-$)%%z{?Qe)jk(1L0`rI$3Kn4|c8 z-^uJw=wW}NyP~9$HDprn0CRRJ8Dx=4sa?Qiko#Z)N_hK3-NzglR1@jBff+_blAsma z>C(pcxt1>NcW1r;+Dhpzj#znA5_JSWAc6R1u7$@_|Yig(=!F1|^<@{~G8U5vNFmI2ev!_QZ|; zDZ%|lu1{++Hh}7Jmd}CRWv-Do$9}pLd}Xr)SQX|5wHh25oyMb2KD4I@%|WzkN3%Y) zp+BG_!F#b>iK1{}z`dV>3WQG_5J$p_x`B=7%QdU^Z^D-D}BMd3pJ{?QZx)574gCWQa&uUipNcSRD#1_f#KD1 z3e;n2D~MPf9v+DarwaeX-Egh}Y%QP4u}Emn+6fPX())mUJMY}QLtgF_c&i$6AdiWl zc-&7|9am)j4GBtawT-0mp|w>!%-Jkf6M0DsD&@%dc=*I=eFM&RP8I28#lChSQV(2Rlpr+yDRo literal 0 HcmV?d00001 diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index 53e0891623..aef5035603 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -17,6 +17,7 @@ from openpype.lib.transcoding import ( from openpype.tools.utils import ( paint_image_with_color, + PixmapButton, ) from openpype.tools.publisher.control import CardMessageTypes @@ -292,7 +293,13 @@ class ThumbnailWidget(QtWidgets.QWidget): buttons_widget = QtWidgets.QWidget(self) buttons_widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) - clear_button = QtWidgets.QPushButton("x", buttons_widget) + icon_color = get_objected_colors("bg-view-selection").get_qcolor() + icon_color.setAlpha(255) + clear_image = get_image("clear_thumbnail") + clear_pix = paint_image_with_color(clear_image, icon_color) + + clear_button = PixmapButton(clear_pix, buttons_widget) + clear_button.setObjectName("PixmapHoverButton") buttons_layout = QtWidgets.QHBoxLayout(buttons_widget) buttons_layout.setContentsMargins(3, 3, 3, 3) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 744c51ce07..f0c1a6df80 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -127,6 +127,7 @@ class PublishIconBtn(IconButton): - error : other error happened - success : publishing finished """ + def __init__(self, pixmap_path, *args, **kwargs): super(PublishIconBtn, self).__init__(*args, **kwargs) From 33178d15702500715c8400039742d6e5da3e4774 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:46:47 +0100 Subject: [PATCH 1898/2550] add different styles for button --- openpype/style/style.css | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/style/style.css b/openpype/style/style.css index 15abb6130b..0a703d1170 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -896,6 +896,14 @@ PixmapButton:disabled { background: {color:bg-buttons-disabled}; } +#PixmapHoverButton { + font-size: 11pt; + background: {color:bg-view}; +} +#PixmapHoverButton:hover { + background: {color:bg-button-hover}; +} + #CreatorDetailedDescription { padding-left: 5px; padding-right: 5px; From 78a725ca26a02e88e2361ccaff1287e7ce6bb8ea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 18:47:39 +0100 Subject: [PATCH 1899/2550] chnage the object name --- openpype/style/style.css | 4 ++-- openpype/tools/publisher/widgets/thumbnail_widget.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 0a703d1170..887c044dae 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -896,11 +896,11 @@ PixmapButton:disabled { background: {color:bg-buttons-disabled}; } -#PixmapHoverButton { +#ThumbnailPixmapHoverButton { font-size: 11pt; background: {color:bg-view}; } -#PixmapHoverButton:hover { +#ThumbnailPixmapHoverButton:hover { background: {color:bg-button-hover}; } diff --git a/openpype/tools/publisher/widgets/thumbnail_widget.py b/openpype/tools/publisher/widgets/thumbnail_widget.py index aef5035603..035ec4b04b 100644 --- a/openpype/tools/publisher/widgets/thumbnail_widget.py +++ b/openpype/tools/publisher/widgets/thumbnail_widget.py @@ -299,7 +299,7 @@ class ThumbnailWidget(QtWidgets.QWidget): clear_pix = paint_image_with_color(clear_image, icon_color) clear_button = PixmapButton(clear_pix, buttons_widget) - clear_button.setObjectName("PixmapHoverButton") + clear_button.setObjectName("ThumbnailPixmapHoverButton") buttons_layout = QtWidgets.QHBoxLayout(buttons_widget) buttons_layout.setContentsMargins(3, 3, 3, 3) From 645971c07e3df0d4a5dc1cdc564f5d147f592f56 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:04:27 +0100 Subject: [PATCH 1900/2550] better deffer reset of publisher --- openpype/tools/publisher/window.py | 40 ++++++++++++++++++------------ 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index a3387043b8..d8a69bbeb0 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -225,6 +225,12 @@ class PublisherWindow(QtWidgets.QDialog): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) + # Timer started on show -> connected to timer counter + # - helps to deffer on show logic by 3 event loops + show_timer = QtCore.QTimer() + show_timer.setInterval(1) + show_timer.timeout.connect(self._on_show_timer) + errors_dialog_message_timer = QtCore.QTimer() errors_dialog_message_timer.setInterval(100) errors_dialog_message_timer.timeout.connect( @@ -329,7 +335,6 @@ class PublisherWindow(QtWidgets.QDialog): # forin init self._reset_on_first_show = reset_on_show self._reset_on_show = True - self._restart_timer = None self._publish_frame_visible = None self._error_messages_to_show = collections.deque() @@ -337,6 +342,9 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) + self._show_timer = show_timer + self._show_counter = 0 + @property def controller(self): return self._controller @@ -347,17 +355,7 @@ class PublisherWindow(QtWidgets.QDialog): self._first_show = False self._on_first_show() - if not self._reset_on_show: - return - - self._reset_on_show = False - # Detach showing - give OS chance to draw the window - timer = QtCore.QTimer() - timer.setSingleShot(True) - timer.setInterval(1) - timer.timeout.connect(self._on_show_restart_timer) - self._restart_timer = timer - timer.start() + self._show_timer.start() def resizeEvent(self, event): super(PublisherWindow, self).resizeEvent(event) @@ -374,11 +372,21 @@ class PublisherWindow(QtWidgets.QDialog): self.setStyleSheet(style.load_stylesheet()) self._reset_on_show = self._reset_on_first_show - def _on_show_restart_timer(self): - """Callback for '_restart_timer' timer.""" + def _on_show_timer(self): + # Add 1 to counter until hits 2 + if self._show_counter < 3: + self._show_counter += 1 + return - self._restart_timer = None - self.reset() + # Stop the timer + self._show_timer.stop() + # Reset counter when done for next show event + self._show_counter = 0 + + # Reset if requested + if self._reset_on_show: + self._reset_on_show = False + self.reset() def closeEvent(self, event): self.save_changes() From 2c4d37d1bfef0b83134bd00775b00815493366cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:23:22 +0100 Subject: [PATCH 1901/2550] added create next overlay widget --- openpype/tools/publisher/widgets/__init__.py | 2 + openpype/tools/publisher/widgets/widgets.py | 201 +++++++++++++++++++ 2 files changed, 203 insertions(+) diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index a02c69d5e0..042985b007 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -8,6 +8,7 @@ from .widgets import ( ResetBtn, ValidateBtn, PublishBtn, + CreateNextPageOverlay, ) from .help_widget import ( HelpButton, @@ -28,6 +29,7 @@ __all__ = ( "ResetBtn", "ValidateBtn", "PublishBtn", + "CreateNextPageOverlay", "HelpButton", "HelpDialog", diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d4c2623790..507ecedb0f 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1652,3 +1652,204 @@ class ThumbnailWidget(QtWidgets.QWidget): self.thumbnail_label = thumbnail_label self.default_pix = default_pix self.current_pix = None + + +class CreateNextPageOverlay(QtWidgets.QWidget): + max_value = 100.0 + clicked = QtCore.Signal() + + def __init__(self, parent): + super(CreateNextPageOverlay, self).__init__(parent) + + self._bg_color = QtGui.QColor(127, 127, 255) + self._arrow_color = QtGui.QColor(255, 255, 255) + + change_anim = QtCore.QVariantAnimation() + change_anim.setStartValue(0.0) + change_anim.setEndValue(self.max_value) + change_anim.setDuration(200) + change_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) + + change_anim.valueChanged.connect(self._on_anim) + + self._change_anim = change_anim + self._is_visible = None + self._anim_value = 0.0 + self._increasing = False + self._under_mouse = None + self._handle_show_on_own = True + self._mouse_pressed = False + self.set_visible(True) + + def set_increasing(self, increasing): + if self._increasing is increasing: + return + self._increasing = increasing + if increasing: + self._change_anim.setDirection(self._change_anim.Forward) + else: + self._change_anim.setDirection(self._change_anim.Backward) + + if self._change_anim.state() != self._change_anim.Running: + self._change_anim.start() + + def set_visible(self, visible): + if self._is_visible is visible: + return + + self._is_visible = visible + if not visible: + self.set_increasing(False) + if not self._is_anim_finished(): + return + + self.setVisible(visible) + self._check_anim_timer() + + def _is_anim_finished(self): + if self._increasing: + return self._anim_value == self.max_value + return self._anim_value == 0.0 + + def _on_anim(self, value): + self._check_anim_timer() + + self._anim_value = value + + self.update() + + if not self._is_anim_finished(): + return + + if not self._is_visible: + self.setVisible(False) + + def set_handle_show_on_own(self, handle): + if self._handle_show_on_own is handle: + return + self._handle_show_on_own = handle + self._under_mouse = None + self._check_anim_timer() + + def set_under_mouse(self, under_mouse): + if self._under_mouse is under_mouse: + return + + if self._handle_show_on_own: + self._handle_show_on_own = False + self._under_mouse = under_mouse + self.set_increasing(under_mouse) + + def _is_under_mouse(self): + mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) + under_mouse = self.rect().contains(mouse_pos) + return under_mouse + + def _check_anim_timer(self): + if not self.isVisible(): + return + + if self._handle_show_on_own: + under_mouse = self._is_under_mouse() + else: + under_mouse = self._under_mouse + + self.set_increasing(under_mouse) + + def enterEvent(self, event): + super(CreateNextPageOverlay, self).enterEvent(event) + if self._handle_show_on_own: + self._check_anim_timer() + + def leaveEvent(self, event): + super(CreateNextPageOverlay, self).leaveEvent(event) + if self._handle_show_on_own: + self._check_anim_timer() + + def mousePressEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self._mouse_pressed = True + super(CreateNextPageOverlay, self).mousePressEvent(event) + + def mouseReleaseEvent(self, event): + if self._mouse_pressed: + self._mouse_pressed = False + if self.rect().contains(event.pos()): + self.clicked.emit() + + super(CreateNextPageOverlay, self).mouseReleaseEvent(event) + + def paintEvent(self, event): + painter = QtGui.QPainter() + painter.begin(self) + if self._anim_value == 0.0: + painter.end() + return + painter.setRenderHints( + painter.Antialiasing + | painter.SmoothPixmapTransform + ) + + pen = QtGui.QPen() + pen.setWidth(0) + painter.setPen(pen) + rect = QtCore.QRect(self.rect()) + + offset = rect.width() - int( + float(rect.width()) * 0.01 * self._anim_value + ) + + pos_y = rect.center().y() + left = rect.left() + offset + right = rect.right() + top = rect.top() + bottom = rect.bottom() + width = right - left + height = bottom - top + + q_height = height * 0.15 + + arrow_half_height = width * 0.2 + arrow_x_start = left + (width * 0.4) + arrow_x_end = arrow_x_start + arrow_half_height + arrow_top_y_boundry = arrow_half_height + q_height + arrow_bottom_y_boundry = height - (arrow_half_height + q_height) + offset = 0 + if pos_y < arrow_top_y_boundry: + pos_y = arrow_top_y_boundry + elif pos_y > arrow_bottom_y_boundry: + pos_y = arrow_bottom_y_boundry + + top_cubic_y = pos_y - q_height + bottom_cubic_y = pos_y + q_height + + path = QtGui.QPainterPath() + path.moveTo(right, top) + path.lineTo(right, bottom) + + path.cubicTo( + right, bottom, + left, bottom_cubic_y, + left, pos_y + ) + path.cubicTo( + left, top_cubic_y, + right, top, + right, top + ) + path.closeSubpath() + + painter.fillPath(path, self._bg_color) + + src_arrow_path = QtGui.QPainterPath() + src_arrow_path.moveTo(arrow_x_start, pos_y - arrow_half_height) + src_arrow_path.lineTo(arrow_x_end, pos_y) + src_arrow_path.lineTo(arrow_x_start, pos_y + arrow_half_height) + + arrow_stroker = QtGui.QPainterPathStroker() + arrow_stroker.setWidth(min(4, arrow_half_height * 0.2)) + arrow_path = arrow_stroker.createStroke(src_arrow_path) + + painter.fillPath(arrow_path, self._arrow_color) + + painter.end() From 30789058b34e0445da3c6a4a1bb12fafb073c3b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:23:38 +0100 Subject: [PATCH 1902/2550] overview widget can return global geo of subset view widget --- openpype/tools/publisher/widgets/overview_widget.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index be3839b90b..1c924d1631 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -195,6 +195,16 @@ class OverviewWidget(QtWidgets.QFrame): self._subset_views_widget.setMaximumWidth(view_width) self._change_anim.start() + def get_subset_views_geo(self): + parent = self._subset_views_widget.parent() + global_pos = parent.mapToGlobal(self._subset_views_widget.pos()) + return QtCore.QRect( + global_pos.x(), + global_pos.y(), + self._subset_views_widget.width(), + self._subset_views_widget.height() + ) + def _on_create_clicked(self): """Pass signal to parent widget which should care about changing state. From 90d0dd718bce3a4537ffe8d2301484369cd67e84 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:24:35 +0100 Subject: [PATCH 1903/2550] prepared methods for set/check current tab --- openpype/tools/publisher/window.py | 31 +++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index d8a69bbeb0..7a0c34e298 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -403,7 +403,7 @@ class PublisherWindow(QtWidgets.QDialog): self._context_label.setText(label) def _update_publish_details_widget(self, force=False): - if not force and self._tabs_widget.current_tab() != "details": + if not force and not self._is_current_tab("details"): return report_data = self.controller.get_publish_report() @@ -434,7 +434,7 @@ class PublisherWindow(QtWidgets.QDialog): ) def _on_tab_change(self, old_tab, new_tab): - if old_tab == "details": + if old_tab != "details": self._publish_details_widget.close_details_popup() if new_tab in ("create", "publish"): @@ -463,14 +463,23 @@ class PublisherWindow(QtWidgets.QDialog): def _on_create_request(self): self._go_to_create_tab() + def _set_current_tab(self, identifier): + self._tabs_widget.set_current_tab(identifier) + + def _is_current_tab(self, identifier): + return self._tabs_widget.is_current_tab(identifier) + def _go_to_create_tab(self): - self._tabs_widget.set_current_tab("create") + self._set_current_tab("create") + + def _go_to_publish_tab(self): + self._set_current_tab("publish") def _go_to_details_tab(self): - self._tabs_widget.set_current_tab("details") + self._set_current_tab("details") def _go_to_report_tab(self): - self._tabs_widget.set_current_tab("report") + self._set_current_tab("report") def _set_publish_overlay_visibility(self, visible): if visible: @@ -523,10 +532,10 @@ class PublisherWindow(QtWidgets.QDialog): self._set_footer_enabled(False) self._update_publish_details_widget() if ( - not self._tabs_widget.is_current_tab("create") - and not self._tabs_widget.is_current_tab("publish") + not self._is_current_tab("create") + and not self._is_current_tab("publish") ): - self._tabs_widget.set_current_tab("publish") + self._set_current_tab("publish") def _on_publish_start(self): self._create_tab.setEnabled(False) @@ -542,8 +551,8 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_details_widget.close_details_popup() - if self._tabs_widget.is_current_tab(self._create_tab): - self._tabs_widget.set_current_tab("publish") + if self._is_current_tab(self._create_tab): + self._set_current_tab("publish") def _on_publish_validated_change(self, event): if event["value"]: @@ -556,7 +565,7 @@ class PublisherWindow(QtWidgets.QDialog): publish_has_crashed = self._controller.publish_has_crashed validate_enabled = not publish_has_crashed publish_enabled = not publish_has_crashed - if self._tabs_widget.is_current_tab("publish"): + if self._is_current_tab("publish"): self._go_to_report_tab() if validate_enabled: From ee94f7c46c707846277a8faf4fb3bcf6087f1edf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:24:56 +0100 Subject: [PATCH 1904/2550] added overlay widget and necessary parts to window --- openpype/tools/publisher/window.py | 85 +++++++++++++++++++++++++++--- 1 file changed, 78 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 7a0c34e298..ddac19f2e5 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -29,6 +29,8 @@ from .widgets import ( HelpButton, HelpDialog, + + CreateNextPageOverlay, ) @@ -225,8 +227,9 @@ class PublisherWindow(QtWidgets.QDialog): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) - # Timer started on show -> connected to timer counter - # - helps to deffer on show logic by 3 event loops + create_overlay_button = CreateNextPageOverlay(self) + create_overlay_button.set_handle_show_on_own(False) + show_timer = QtCore.QTimer() show_timer.setInterval(1) show_timer.timeout.connect(self._on_show_timer) @@ -255,6 +258,7 @@ class PublisherWindow(QtWidgets.QDialog): publish_btn.clicked.connect(self._on_publish_clicked) publish_frame.details_page_requested.connect(self._go_to_details_tab) + create_overlay_button.clicked.connect(self._go_to_publish_tab) controller.event_system.add_callback( "instances.refresh.finished", self._on_instances_refresh @@ -310,6 +314,7 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_overlay = publish_overlay self._publish_frame = publish_frame + self._content_widget = content_widget self._content_stacked_layout = content_stacked_layout self._overview_widget = overview_widget @@ -342,6 +347,9 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) + self._create_overlay_button = create_overlay_button + self._app_event_listener_installed = False + self._show_timer = show_timer self._show_counter = 0 @@ -355,11 +363,38 @@ class PublisherWindow(QtWidgets.QDialog): self._first_show = False self._on_first_show() + self._show_counter = 0 self._show_timer.start() def resizeEvent(self, event): super(PublisherWindow, self).resizeEvent(event) self._update_publish_frame_rect() + self._update_create_overlay_size() + + def closeEvent(self, event): + self._uninstall_app_event_listener() + self.save_changes() + self._reset_on_show = True + super(PublisherWindow, self).closeEvent(event) + + def eventFilter(self, obj, event): + if event.type() == QtCore.QEvent.MouseMove: + self._update_create_overlay_visibility(event.globalPos()) + return super(PublisherWindow, self).eventFilter(obj, event) + + def _install_app_event_listener(self): + if self._app_event_listener_installed: + return + self._app_event_listener_installed = True + app = QtWidgets.QApplication.instance() + app.installEventFilter(self) + + def _uninstall_app_event_listener(self): + if not self._app_event_listener_installed: + return + self._app_event_listener_installed = False + app = QtWidgets.QApplication.instance() + app.removeEventFilter(self) def _on_overlay_message(self, event): self._overlay_object.add_message( @@ -383,16 +418,16 @@ class PublisherWindow(QtWidgets.QDialog): # Reset counter when done for next show event self._show_counter = 0 + self._update_create_overlay_size() + self._update_create_overlay_visibility() + if self._is_current_tab("create"): + self._install_app_event_listener() + # Reset if requested if self._reset_on_show: self._reset_on_show = False self.reset() - def closeEvent(self, event): - self.save_changes() - self._reset_on_show = True - super(PublisherWindow, self).closeEvent(event) - def save_changes(self): self._controller.save_changes() @@ -457,6 +492,13 @@ class PublisherWindow(QtWidgets.QDialog): self._report_widget ) + is_create = new_tab == "create" + if is_create: + self._install_app_event_listener() + else: + self._uninstall_app_event_listener() + self._create_overlay_button.set_visible(is_create) + def _on_context_or_active_change(self): self._validate_create_instances() @@ -669,6 +711,35 @@ class PublisherWindow(QtWidgets.QDialog): event["title"], new_failed_info, "Convertor:" ) + def _update_create_overlay_size(self): + height = self._content_widget.height() + metrics = self._create_overlay_button.fontMetrics() + width = int(metrics.height() * 3) + pos_x = self.width() - width + + tab_pos = self._tabs_widget.parent().mapTo( + self, self._tabs_widget.pos() + ) + tab_height = self._tabs_widget.height() + pos_y = tab_pos.y() + tab_height + + self._create_overlay_button.setGeometry( + pos_x, pos_y, + width, height + ) + + def _update_create_overlay_visibility(self, global_pos=None): + if global_pos is None: + global_pos = QtGui.QCursor.pos() + + under_mouse = False + my_pos = self.mapFromGlobal(global_pos) + if self.rect().contains(my_pos): + widget_geo = self._overview_widget.get_subset_views_geo() + widget_x = widget_geo.left() + (widget_geo.width() * 0.5) + under_mouse = widget_x < global_pos.x() + self._create_overlay_button.set_under_mouse(under_mouse) + class ErrorsMessageBox(ErrorMessageBox): def __init__(self, error_title, failed_info, message_start, parent): From ea6e924dd95b86053092af0f08790b8e8a77be83 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:31:53 +0100 Subject: [PATCH 1905/2550] use gradient and different color --- openpype/tools/publisher/widgets/widgets.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 507ecedb0f..975a1faa06 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1661,7 +1661,6 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def __init__(self, parent): super(CreateNextPageOverlay, self).__init__(parent) - self._bg_color = QtGui.QColor(127, 127, 255) self._arrow_color = QtGui.QColor(255, 255, 255) change_anim = QtCore.QVariantAnimation() @@ -1839,7 +1838,11 @@ class CreateNextPageOverlay(QtWidgets.QWidget): ) path.closeSubpath() - painter.fillPath(path, self._bg_color) + gradient = QtGui.QLinearGradient(left, pos_y, right, pos_y) + gradient.setColorAt(0, QtGui.QColor(22, 25, 29)) + gradient.setColorAt(1, QtGui.QColor(33, 37, 43)) + + painter.fillPath(path, gradient) src_arrow_path = QtGui.QPainterPath() src_arrow_path.moveTo(arrow_x_start, pos_y - arrow_half_height) From 42b1012e7c320ec783df7ce5c76b76a24e18896e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:41:12 +0100 Subject: [PATCH 1906/2550] use radial gradient --- openpype/tools/publisher/widgets/widgets.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 975a1faa06..c4481d4d9d 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1838,7 +1838,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): ) path.closeSubpath() - gradient = QtGui.QLinearGradient(left, pos_y, right, pos_y) + radius = height * 0.7 + focal = QtCore.QPointF(left, pos_y) + start_p = QtCore.QPointF(right - (width * 0.5), pos_y) + gradient = QtGui.QRadialGradient(start_p, radius, focal) gradient.setColorAt(0, QtGui.QColor(22, 25, 29)) gradient.setColorAt(1, QtGui.QColor(33, 37, 43)) From 17125a62edec43fe4c144485e584be964398aa41 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 21:03:16 +0100 Subject: [PATCH 1907/2550] hiero: adding fallback if incompatible knobs from version to version --- openpype/hosts/hiero/api/pipeline.py | 6 +++--- openpype/hosts/hiero/plugins/load/load_effects.py | 7 ++++++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 26c8ebe6d3..c48d404ede 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -134,7 +134,7 @@ def ls(): for item in all_items: container_data = parse_container(item) - if isinstance(container, list): + if isinstance(container_data, list): for _c in container_data: yield _c @@ -187,7 +187,7 @@ def parse_container(item, validate=True): if type(item) == hiero.core.VideoTrack: return_list = [] _data = lib.get_track_openpype_data(item) - log.info("_data: {}".format(_data)) + if not _data: return # convert the data to list and validate them @@ -196,7 +196,7 @@ def parse_container(item, validate=True): return_list.append(cotnainer) return return_list else: - _data = lib.get_track_item_pype_data(item) + _data = lib.get_trackitem_openpype_data(item) return data_to_container(item, _data) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 16c9187ad9..d8a388c6ed 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -94,7 +94,12 @@ class LoadEffects(load.LoaderPlugin): or knob_name == "name" ): continue - node[knob_name].setValue(knob_value) + + try: + node[knob_name].setValue(knob_value) + except NameError: + self.log.warning("Knob: {} cannot be set".format( + knob_name)) # register all loaded children data_imprint["children_names"].append(new_name) From 576903575e78e20ba0c7401061741b3c4cc50218 Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:00:26 +0100 Subject: [PATCH 1908/2550] Project setting --- .../hooks/pre_copy_last_published_workfile.py | 119 ++++++++++++------ .../defaults/project_settings/global.json | 3 +- .../schemas/schema_global_tools.json | 5 + 3 files changed, 88 insertions(+), 39 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 004f9d25e7..312548d2db 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -7,8 +7,10 @@ from openpype.client.entities import ( get_subsets, ) from openpype.lib import PreLaunchHook +from openpype.lib.profiles_filtering import filter_profiles from openpype.modules.base import ModulesManager from openpype.pipeline.load.utils import get_representation_path +from openpype.settings.lib import get_project_settings class CopyLastPublishedWorkfile(PreLaunchHook): @@ -32,9 +34,45 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ - # TODO setting + project_name = self.data["project_name"] + task_name = self.data["task_name"] + task_type = self.data["task_type"] + host_name = self.application.host_name + + # Check settings has enabled it + project_settings = get_project_settings(project_name) + profiles = project_settings["global"]["tools"]["Workfiles"][ + "last_workfile_on_startup" + ] + filter_data = { + "tasks": task_name, + "task_types": task_type, + "hosts": host_name, + } + last_workfile_settings = filter_profiles(profiles, filter_data) + use_last_published_workfile = last_workfile_settings.get( + "use_last_published_workfile" + ) + if use_last_published_workfile is None: + self.log.info( + ( + "Seems like old version of settings is used." + ' Can\'t access custom templates in host "{}".' + ).format(host_name) + ) + return + elif use_last_published_workfile is False: + self.log.info( + ( + 'Project "{}" has turned off to use last published workfile' + ' as first workfile for host "{}"' + ).format(project_name, host_name) + ) + return + self.log.info("Trying to fetch last published workfile...") + # Check there is no workfile available last_workfile = self.data.get("last_workfile_path") if os.path.exists(last_workfile): self.log.debug( @@ -44,9 +82,6 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return - project_name = self.data["project_name"] - task_name = self.data["task_name"] - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") @@ -65,6 +100,9 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) if not subset_id: + self.log.debug('No any workfile for asset "{}".').format( + asset_doc["name"] + ) return # Get workfile representation @@ -84,41 +122,46 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) - if workfile_representation: # TODO add setting - # Get sync server from Tray, which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + if not workfile_representation: + self.log.debug( + 'No published workfile for task "{}" and host "{}".' + ).format(task_name, host_name) + return - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, - ) - sync_server.reset_timer() + # Get sync server from Tray, which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) - # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots - ) - local_workfile_dir = os.path.dirname(last_workfile) + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() - # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir - ) + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) + + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index b128564bc2..5b1c750bf4 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -401,7 +401,8 @@ "hosts": [], "task_types": [], "tasks": [], - "enabled": true + "enabled": true, + "use_last_published_workfile": false } ], "open_workfile_tool_on_startup": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index ba446135e2..962008d476 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -149,6 +149,11 @@ "type": "boolean", "key": "enabled", "label": "Enabled" + }, + { + "type": "boolean", + "key": "use_last_published_workfile", + "label": "Use last published workfile" } ] } From 10fb9a141159302d4321d1acb8409fa5f341d7c9 Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:10:12 +0100 Subject: [PATCH 1909/2550] docstring --- openpype/hooks/pre_copy_last_published_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 312548d2db..b1b2fe2366 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -30,6 +30,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): 1- Check if setting for this feature is enabled 2- Check if workfile in work area doesn't exist 3- Check if published workfile exists and is copied locally in publish + 4- Substitute copied published workfile as first workfile Returns: None: This is a void method. From bebb9031c18534d06a3e7236f950c16c6cc88c02 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 10:12:26 +0100 Subject: [PATCH 1910/2550] change type of 'IMAGE_EXTENSIONS' and 'VIDEO_EXTENSIONS' to set --- openpype/lib/transcoding.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index e736ba8ef0..0bfccd3443 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -42,7 +42,7 @@ XML_CHAR_REF_REGEX_HEX = re.compile(r"&#x?[0-9a-fA-F]+;") # Regex to parse array attributes ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$") -IMAGE_EXTENSIONS = [ +IMAGE_EXTENSIONS = { ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", @@ -54,15 +54,15 @@ IMAGE_EXTENSIONS = [ ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", ".xpm", ".xwd" -] +} -VIDEO_EXTENSIONS = [ +VIDEO_EXTENSIONS = { ".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b", ".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v", ".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg", ".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb", ".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv" -] +} def get_transcode_temp_directory(): From 895bfbaae5aaebaf97c30233ae407f12ad52ca7d Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:14:58 +0100 Subject: [PATCH 1911/2550] comment length --- openpype/hooks/pre_copy_last_published_workfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index b1b2fe2366..d342151823 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -129,7 +129,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # Get sync server from Tray, which handles the asynchronous thread instance + # Get sync server from Tray, + # which handles the asynchronous thread instance sync_server = next( ( t["sync_server"] From c49017e6718ee169f052a75689b5624dc36705dc Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:50:30 +0100 Subject: [PATCH 1912/2550] lint --- openpype/hooks/pre_copy_last_published_workfile.py | 4 ++-- openpype/modules/sync_server/sync_server.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index d342151823..cf4edeac9b 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -65,8 +65,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): elif use_last_published_workfile is False: self.log.info( ( - 'Project "{}" has turned off to use last published workfile' - ' as first workfile for host "{}"' + 'Project "{}" has turned off to use last published' + ' workfile as first workfile for host "{}"' ).format(project_name, host_name) ) return diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index def9e6cfd8..353b39c4e1 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -239,7 +239,7 @@ class SyncServerThread(threading.Thread): # Event to trigger files have been processed self.files_processed = threading.Event() - + super(SyncServerThread, self).__init__(args=(self.files_processed,)) self.module = module self.loop = None From 32b91ef39feeb4e68eb05b2b4fe060fcf8902884 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 11:32:12 +0100 Subject: [PATCH 1913/2550] Integrate thumbnails plugin is context plugin without family filters --- .../plugins/publish/integrate_thumbnail.py | 345 ++++++++++++------ 1 file changed, 225 insertions(+), 120 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index d8a3a00041..f74c3d9609 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -13,174 +13,279 @@ import sys import errno import shutil import copy +import collections import six import pyblish.api -from openpype.client import get_version_by_id +from openpype.client import get_versions from openpype.client.operations import OperationsSession, new_thumbnail_doc +InstanceFilterResult = collections.namedtuple( + "InstanceFilterResult", + ["instance", "thumbnail_path", "version_id"] +) -class IntegrateThumbnails(pyblish.api.InstancePlugin): + +class IntegrateThumbnails(pyblish.api.ContextPlugin): """Integrate Thumbnails for Openpype use in Loaders.""" label = "Integrate Thumbnails" order = pyblish.api.IntegratorOrder + 0.01 - families = ["review"] required_context_keys = [ "project", "asset", "task", "subset", "version" ] - def process(self, instance): - context_thumbnail_path = instance.context.get("thumbnailPath") + def process(self, context): + # Filter instances which can be used for integration + filtered_instance_items = self._prepare_instances(context) + if not filtered_instance_items: + self.log.info( + "All instances were filtered. Thumbnail integration skipped." + ) + return + # Initial validation of available templated and required keys env_key = "AVALON_THUMBNAIL_ROOT" thumbnail_root_format_key = "{thumbnail_root}" thumbnail_root = os.environ.get(env_key) or "" - published_repres = instance.data.get("published_representations") - if not published_repres: - self.log.debug( - "There are no published representations on the instance." - ) - return - - anatomy = instance.context.data["anatomy"] + anatomy = context.data["anatomy"] project_name = anatomy.project_name if "publish" not in anatomy.templates: - self.log.warning("Anatomy is missing the \"publish\" key!") + self.log.warning( + "Anatomy is missing the \"publish\" key. Skipping." + ) return if "thumbnail" not in anatomy.templates["publish"]: self.log.warning(( - "There is no \"thumbnail\" template set for the project \"{}\"" + "There is no \"thumbnail\" template set for the project" + " \"{}\". Skipping." ).format(project_name)) return thumbnail_template = anatomy.templates["publish"]["thumbnail"] + if not thumbnail_template: + self.log.info("Thumbnail template is not filled. Skipping.") + return + if ( not thumbnail_root and thumbnail_root_format_key in thumbnail_template ): - self.log.warning(( - "{} is not set. Skipping thumbnail integration." - ).format(env_key)) + self.log.warning(("{} is not set. Skipping.").format(env_key)) return - version_id = None - thumb_repre = None - thumb_repre_anatomy_data = None - for repre_info in published_repres.values(): - repre = repre_info["representation"] - if version_id is None: - version_id = repre["parent"] + # Collect verion ids from all filtered instance + version_ids = { + instance_items.version_id + for instance_items in filtered_instance_items + } + # Query versions + version_docs = get_versions( + project_name, + version_ids=version_ids, + hero=True, + fields=["_id", "type", "name"] + ) + # Store version by their id (converted to string) + version_docs_by_str_id = { + str(version_doc["_id"]): version_doc + for version_doc in version_docs + } + self._integrate_thumbnails( + filtered_instance_items, + version_docs_by_str_id, + anatomy, + thumbnail_root + ) - if repre["name"].lower() == "thumbnail": - thumb_repre = repre - thumb_repre_anatomy_data = repre_info["anatomy_data"] + def _prepare_instances(self, context): + context_thumbnail_path = context.get("thumbnailPath") + valid_context_thumbnail = False + if context_thumbnail_path and os.path.exists(context_thumbnail_path): + valid_context_thumbnail = True + + filtered_instances = [] + for instance in context: + instance_label = self._get_instance_label(instance) + # Skip instances without published representations + # - there is no place where to put the thumbnail + published_repres = instance.data.get("published_representations") + if not published_repres: + self.log.debug(( + "There are no published representations" + " on the instance {}." + ).format(instance_label)) + continue + + # Find thumbnail path on instance + thumbnail_path = self._get_instance_thumbnail_path( + published_repres) + if thumbnail_path: + self.log.debug(( + "Found thumbnail path for instance \"{}\"." + " Thumbnail path: {}" + ).format(instance_label, thumbnail_path)) + + elif valid_context_thumbnail: + # Use context thumbnail path if is available + thumbnail_path = context_thumbnail_path + self.log.debug(( + "Using context thumbnail path for instance \"{}\"." + " Thumbnail path: {}" + ).format(instance_label, thumbnail_path)) + + # Skip instance if thumbnail path is not available for it + if not thumbnail_path: + self.log.info(( + "Skipping thumbnail integration for instance \"{}\"." + " Instance and context" + " thumbnail paths are not available." + ).format(instance_label)) + continue + + version_id = str(self._get_version_id(published_repres)) + filtered_instances.append( + InstanceFilterResult(instance, thumbnail_path, version_id) + ) + return filtered_instances + + def _get_version_id(self, published_representations): + for repre_info in published_representations.values(): + return repre_info["representation"]["parent"] + + def _get_instance_thumbnail_path(self, published_representations): + thumb_repre_doc = None + for repre_info in published_representations.values(): + repre_doc = repre_info["representation"] + if repre_doc["name"].lower() == "thumbnail": + thumb_repre_doc = repre_doc break - # Use context thumbnail (if is available) - if not thumb_repre: + if thumb_repre_doc is None: self.log.debug( "There is not representation with name \"thumbnail\"" ) - src_full_path = context_thumbnail_path - else: - # Get full path to thumbnail file from representation - src_full_path = os.path.normpath(thumb_repre["data"]["path"]) + return None - if not os.path.exists(src_full_path): - self.log.warning("Thumbnail file was not found. Path: {}".format( - src_full_path + path = thumb_repre_doc["data"]["path"] + if not os.path.exists(path): + self.log.warning( + "Thumbnail file cannot be found. Path: {}".format(path) + ) + return None + return os.path.normpath(path) + + def _integrate_thumbnails( + self, + filtered_instance_items, + version_docs_by_str_id, + anatomy, + thumbnail_root + ): + op_session = OperationsSession() + project_name = anatomy.project_name + + for instance_item in filtered_instance_items: + instance, thumbnail_path, version_id = instance_item + instance_label = self._get_instance_label(instance) + version_doc = version_docs_by_str_id.get(version_id) + if not version_doc: + self.log.warning(( + "Version entity for instance \"{}\" was not found." + ).format(instance_label)) + continue + + filename, file_extension = os.path.splitext(thumbnail_path) + # Create id for mongo entity now to fill anatomy template + thumbnail_doc = new_thumbnail_doc() + thumbnail_id = thumbnail_doc["_id"] + + # Prepare anatomy template fill data + template_data = copy.deepcopy(instance.data["anatomyData"]) + template_data.update({ + "_id": str(thumbnail_id), + "ext": file_extension[1:], + "name": "thumbnail", + "thumbnail_root": thumbnail_root, + "thumbnail_type": "thumbnail" + }) + + anatomy_filled = anatomy.format(template_data) + thumbnail_template = anatomy.templates["publish"]["thumbnail"] + template_filled = anatomy_filled["publish"]["thumbnail"] + + dst_full_path = os.path.normpath(str(template_filled)) + self.log.debug("Copying file .. {} -> {}".format( + thumbnail_path, dst_full_path )) - return + dirname = os.path.dirname(dst_full_path) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno != errno.EEXIST: + tp, value, tb = sys.exc_info() + six.reraise(tp, value, tb) - version = get_version_by_id(project_name, version_id) - if not version: - raise AssertionError( - "There does not exist version with id {}".format( - str(version_id) - ) + shutil.copy(thumbnail_path, dst_full_path) + + # Clean template data from keys that are dynamic + for key in ("_id", "thumbnail_root"): + template_data.pop(key, None) + + repre_context = template_filled.used_values + for key in self.required_context_keys: + value = template_data.get(key) + if not value: + continue + repre_context[key] = template_data[key] + + thumbnail_doc["data"] = { + "template": thumbnail_template, + "template_data": repre_context + } + op_session.create_entity( + project_name, thumbnail_doc["type"], thumbnail_doc + ) + # Create thumbnail entity + self.log.debug( + "Creating entity in database {}".format(str(thumbnail_doc)) ) - filename, file_extension = os.path.splitext(src_full_path) - # Create id for mongo entity now to fill anatomy template - thumbnail_doc = new_thumbnail_doc() - thumbnail_id = thumbnail_doc["_id"] + # Set thumbnail id for version + op_session.update_entity( + project_name, + version_doc["type"], + version_doc["_id"], + {"data.thumbnail_id": thumbnail_id} + ) + if version_doc["type"] == "hero_version": + version_name = "Hero" + else: + version_name = version_doc["name"] + self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( + version_name, version_id + )) - # Prepare anatomy template fill data - template_data = copy.deepcopy(thumb_repre_anatomy_data) - template_data.update({ - "_id": str(thumbnail_id), - "ext": file_extension[1:], - "thumbnail_root": thumbnail_root, - "thumbnail_type": "thumbnail" - }) - - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["thumbnail"] - - dst_full_path = os.path.normpath(str(template_filled)) - self.log.debug( - "Copying file .. {} -> {}".format(src_full_path, dst_full_path) - ) - dirname = os.path.dirname(dst_full_path) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno != errno.EEXIST: - tp, value, tb = sys.exc_info() - six.reraise(tp, value, tb) - - shutil.copy(src_full_path, dst_full_path) - - # Clean template data from keys that are dynamic - for key in ("_id", "thumbnail_root"): - template_data.pop(key, None) - - repre_context = template_filled.used_values - for key in self.required_context_keys: - value = template_data.get(key) - if not value: - continue - repre_context[key] = template_data[key] - - op_session = OperationsSession() - - thumbnail_doc["data"] = { - "template": thumbnail_template, - "template_data": repre_context - } - op_session.create_entity( - project_name, thumbnail_doc["type"], thumbnail_doc - ) - # Create thumbnail entity - self.log.debug( - "Creating entity in database {}".format(str(thumbnail_doc)) - ) - - # Set thumbnail id for version - op_session.update_entity( - project_name, - version["type"], - version["_id"], - {"data.thumbnail_id": thumbnail_id} - ) - self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( - version["name"], str(version["_id"]) - )) - - asset_entity = instance.data["assetEntity"] - op_session.update_entity( - project_name, - asset_entity["type"], - asset_entity["_id"], - {"data.thumbnail_id": thumbnail_id} - ) - self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format( - asset_entity["name"], str(version["_id"]) - )) + asset_entity = instance.data["assetEntity"] + op_session.update_entity( + project_name, + asset_entity["type"], + asset_entity["_id"], + {"data.thumbnail_id": thumbnail_id} + ) + self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format( + asset_entity["name"], version_id + )) op_session.commit() + + def _get_instance_label(self, instance): + return ( + instance.data.get("label") + or instance.data.get("name") + or "N/A" + ) From e4e6044198a7240e21387c2931926f7d0cffdbc2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 11:56:27 +0100 Subject: [PATCH 1914/2550] fix last pixel --- openpype/tools/publisher/widgets/widgets.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index c4481d4d9d..b8fb2d38b9 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1800,9 +1800,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): pos_y = rect.center().y() left = rect.left() + offset - right = rect.right() top = rect.top() - bottom = rect.bottom() + # Right and bootm is pixel index + right = rect.right() + 1 + bottom = rect.bottom() + 1 width = right - left height = bottom - top From 049de296240198cdf296d0ff411c2601f1568589 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 11:57:32 +0100 Subject: [PATCH 1915/2550] handle leave event --- openpype/tools/publisher/window.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index ddac19f2e5..2063cdab96 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -377,6 +377,10 @@ class PublisherWindow(QtWidgets.QDialog): self._reset_on_show = True super(PublisherWindow, self).closeEvent(event) + def leaveEvent(self, event): + super(PublisherWindow, self).leaveEvent(event) + self._update_create_overlay_visibility() + def eventFilter(self, obj, event): if event.type() == QtCore.QEvent.MouseMove: self._update_create_overlay_visibility(event.globalPos()) From 638f8238250cdd4ce7935c9c637dd0c3e80746ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 12:07:03 +0100 Subject: [PATCH 1916/2550] run openpype subprocess using 'Popen' instead of 'check_output' function --- .../repository/custom/plugins/GlobalJobPreLoad.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 61b95cf06d..b8d90cdf69 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -193,10 +193,17 @@ def inject_openpype_environment(deadlinePlugin): env["AVALON_TIMEOUT"] = "5000" print(">>> Executing: {}".format(" ".join(args))) - std_output = subprocess.check_output(args, - cwd=os.path.dirname(exe), - env=env) - print(">>> Process result {}".format(std_output)) + proc = subprocess.Popen( + args, + cwd=os.path.dirname(exe), + env=env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + std_output, std_err = proc.communicate() + print(">>> Process result {}\n".format(std_output, std_err)) + if proc.returncode != 0: + raise RuntimeError("OpenPype process failed.") print(">>> Loading file ...") with open(export_url) as fp: From 298fbcae701f2bcc2833e8b5c4310c40c05fe380 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 12:07:30 +0100 Subject: [PATCH 1917/2550] removed unnecessary headless environment (handled by '--headless' arg) --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index b8d90cdf69..a25a1b7e93 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -189,7 +189,6 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Missing OPENPYPE_MONGO env var, process won't work") env = os.environ - env["OPENPYPE_HEADLESS_MODE"] = "1" env["AVALON_TIMEOUT"] = "5000" print(">>> Executing: {}".format(" ".join(args))) From c178d6e2a1f9251ba9cfb09d1ad62cf62a30e411 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 12:07:37 +0100 Subject: [PATCH 1918/2550] formatting changes --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index a25a1b7e93..80f91607bc 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -164,7 +164,7 @@ def inject_openpype_environment(deadlinePlugin): args = [ exe, "--headless", - 'extractenvironments', + "extractenvironments", export_url ] From 4161b3b48db137a7bdd1b998612ea6450cc3ea54 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 12:56:53 +0100 Subject: [PATCH 1919/2550] use Deadline 'ProcessUtils' to run openpype process --- .../custom/plugins/GlobalJobPreLoad.py | 31 +++++++++---------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 80f91607bc..61d0c8eb86 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -7,7 +7,12 @@ import json import platform import uuid import re -from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils +from Deadline.Scripting import ( + RepositoryUtils, + FileUtils, + DirectoryUtils, + ProcessUtils, +) def get_openpype_version_from_path(path, build=True): @@ -162,7 +167,6 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Temporary path: {}".format(export_url)) args = [ - exe, "--headless", "extractenvironments", export_url @@ -188,21 +192,16 @@ def inject_openpype_environment(deadlinePlugin): if not os.environ.get("OPENPYPE_MONGO"): print(">>> Missing OPENPYPE_MONGO env var, process won't work") - env = os.environ - env["AVALON_TIMEOUT"] = "5000" + os.environ["AVALON_TIMEOUT"] = "5000" - print(">>> Executing: {}".format(" ".join(args))) - proc = subprocess.Popen( - args, - cwd=os.path.dirname(exe), - env=env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - std_output, std_err = proc.communicate() - print(">>> Process result {}\n".format(std_output, std_err)) - if proc.returncode != 0: - raise RuntimeError("OpenPype process failed.") + args_str = subprocess.list2cmdline(args) + print(">>> Executing: {} {}".format(exe, args_str)) + process = ProcessUtils.SpawnProcess(exe, args_str, os.path.dirname(exe)) + ProcessUtils.WaitForExit(process, -1) + if process.ExitCode != 0: + raise RuntimeError( + "Failed to run OpenPype process to extract environments." + ) print(">>> Loading file ...") with open(export_url) as fp: From f1e198ea94b0c13d90a7419e2b5d3a4dc1769b60 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 12:58:22 +0100 Subject: [PATCH 1920/2550] fix too long line --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 61d0c8eb86..9b35c9502d 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -196,7 +196,9 @@ def inject_openpype_environment(deadlinePlugin): args_str = subprocess.list2cmdline(args) print(">>> Executing: {} {}".format(exe, args_str)) - process = ProcessUtils.SpawnProcess(exe, args_str, os.path.dirname(exe)) + process = ProcessUtils.SpawnProcess( + exe, args_str, os.path.dirname(exe) + ) ProcessUtils.WaitForExit(process, -1) if process.ExitCode != 0: raise RuntimeError( From 02fb9561d7f25a1146d9c59dd2306bce7e166edf Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 1 Nov 2022 20:36:58 +0800 Subject: [PATCH 1921/2550] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index f39aa56650..68aeb24069 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -13,7 +13,7 @@ class AlembicStandinLoader(load.LoaderPlugin): families = ["model", "pointcache"] representations = ["abc"] - label = "Import Alembic as Standin" + label = "Import Alembic as Arnold Standin" order = -5 icon = "code-fork" color = "orange" @@ -21,7 +21,6 @@ class AlembicStandinLoader(load.LoaderPlugin): def load(self, context, name, namespace, options): import maya.cmds as cmds - import pymel.core as pm import mtoa.ui.arnoldmenu from openpype.hosts.maya.api.pipeline import containerise from openpype.hosts.maya.api.lib import unique_namespace @@ -42,7 +41,7 @@ class AlembicStandinLoader(load.LoaderPlugin): # Root group label = "{}:{}".format(namespace, name) - root = pm.group(name=label, empty=True) + root = cmds.group(name=label, empty=True) settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] @@ -55,16 +54,17 @@ class AlembicStandinLoader(load.LoaderPlugin): transform_name = label + "_ABC" - standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn()) - standin = standinShape.getParent() - standin.rename(transform_name) + standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0] + standin = cmds.listRelatives(standinShape, parent=True, typ="transform") + standin = cmds.rename(standin, transform_name) + standinShape = cmds.listRelatives(standin, children=True)[0] - pm.parent(standin, root) + cmds.parent(standin, root) # Set the standin filepath - standinShape.dso.set(self.fname) + cmds.setAttr(standinShape + ".dso", self.fname, type="string") if frameStart is not None: - standinShape.useFrameExtension.set(1) + cmds.setAttr(standinShape + ".useFrameExtension", 1) nodes = [root, standin] self[:] = nodes From 6b0d25cb7c6f3c0dd084bbccf0b5f06fe8fe1341 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 1 Nov 2022 20:38:51 +0800 Subject: [PATCH 1922/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 68aeb24069..5d6c52eac9 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -55,7 +55,8 @@ class AlembicStandinLoader(load.LoaderPlugin): transform_name = label + "_ABC" standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0] - standin = cmds.listRelatives(standinShape, parent=True, typ="transform") + standin = cmds.listRelatives(standinShape, parent=True, + typ="transform") standin = cmds.rename(standin, transform_name) standinShape = cmds.listRelatives(standin, children=True)[0] From f2a2cc1c38304d33eb1ebf410961bc9bf4f23da6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 14:13:15 +0100 Subject: [PATCH 1923/2550] change import of 'AttributeDefinitionsWidget' --- openpype/tools/workfile_template_build/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/workfile_template_build/window.py b/openpype/tools/workfile_template_build/window.py index ea4e2fec5a..22e26be451 100644 --- a/openpype/tools/workfile_template_build/window.py +++ b/openpype/tools/workfile_template_build/window.py @@ -3,7 +3,7 @@ from Qt import QtWidgets from openpype import style from openpype.lib import Logger from openpype.pipeline import legacy_io -from openpype.widgets.attribute_defs import AttributeDefinitionsWidget +from openpype.tools.attribute_defs import AttributeDefinitionsWidget class WorkfileBuildPlaceholderDialog(QtWidgets.QDialog): From 990647ef4f7c585b3a5e4c4fec6c5cef66e8b7f7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 14:36:19 +0100 Subject: [PATCH 1924/2550] 'AbtractAttrDef' inherit from 'object' --- openpype/lib/attribute_definitions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index bb0b07948f..589a4ef9ab 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -91,7 +91,7 @@ class AbstractAttrDefMeta(ABCMeta): @six.add_metaclass(AbstractAttrDefMeta) -class AbtractAttrDef: +class AbtractAttrDef(object): """Abstraction of attribute definiton. Each attribute definition must have implemented validation and From 563447e1a4b61cc404fbda6e92a0eec5acbe10c3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 14:36:31 +0100 Subject: [PATCH 1925/2550] add stretch to dialog --- openpype/tools/attribute_defs/dialog.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/attribute_defs/dialog.py b/openpype/tools/attribute_defs/dialog.py index e6c11516c8..69923d54e5 100644 --- a/openpype/tools/attribute_defs/dialog.py +++ b/openpype/tools/attribute_defs/dialog.py @@ -20,7 +20,8 @@ class AttributeDefinitionsDialog(QtWidgets.QDialog): btns_layout.addWidget(cancel_btn, 0) main_layout = QtWidgets.QVBoxLayout(self) - main_layout.addWidget(attrs_widget, 1) + main_layout.addWidget(attrs_widget, 0) + main_layout.addStretch(1) main_layout.addWidget(btns_widget, 0) ok_btn.clicked.connect(self.accept) From cec37f0101037840ba29a8f2daf7caf8a961a0c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 14:36:50 +0100 Subject: [PATCH 1926/2550] loader can handle both qargparse and attribute definitions --- openpype/tools/loader/lib.py | 25 ++++++++++++++++++------- openpype/tools/utils/widgets.py | 24 ++++++++++++++++++++++-- 2 files changed, 40 insertions(+), 9 deletions(-) diff --git a/openpype/tools/loader/lib.py b/openpype/tools/loader/lib.py index 28e94237ec..78a25d8d85 100644 --- a/openpype/tools/loader/lib.py +++ b/openpype/tools/loader/lib.py @@ -2,6 +2,8 @@ import inspect from Qt import QtGui import qtawesome +from openpype.lib.attribute_definitions import AbtractAttrDef +from openpype.tools.attribute_defs import AttributeDefinitionsDialog from openpype.tools.utils.widgets import ( OptionalAction, OptionDialog @@ -34,21 +36,30 @@ def get_options(action, loader, parent, repre_contexts): None when dialog was closed or cancelled, in all other cases {} if no options """ + # Pop option dialog options = {} loader_options = loader.get_options(repre_contexts) - if getattr(action, "optioned", False) and loader_options: + if not getattr(action, "optioned", False) or not loader_options: + return options + + if isinstance(loader_options[0], AbtractAttrDef): + qargparse_options = False + dialog = AttributeDefinitionsDialog(loader_options, parent) + else: + qargparse_options = True dialog = OptionDialog(parent) - dialog.setWindowTitle(action.label + " Options") dialog.create(loader_options) - if not dialog.exec_(): - return None + dialog.setWindowTitle(action.label + " Options") - # Get option - options = dialog.parse() + if not dialog.exec_(): + return None - return options + # Get option + if qargparse_options: + return dialog.parse() + return dialog.get_values() def add_representation_loaders_to_menu(loaders, menu, repre_contexts): diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index ca65182124..05513bc96e 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -3,10 +3,12 @@ import logging from Qt import QtWidgets, QtCore, QtGui import qargparse import qtawesome + from openpype.style import ( get_objected_colors, get_style_image_path ) +from openpype.lib.attribute_definitions import AbtractAttrDef log = logging.getLogger(__name__) @@ -317,8 +319,26 @@ class OptionalAction(QtWidgets.QWidgetAction): def set_option_tip(self, options): sep = "\n\n" - mak = (lambda opt: opt["name"] + " :\n " + opt["help"]) - self.option_tip = sep.join(mak(opt) for opt in options) + if not options or not isinstance(options[0], AbtractAttrDef): + mak = (lambda opt: opt["name"] + " :\n " + opt["help"]) + self.option_tip = sep.join(mak(opt) for opt in options) + return + + option_items = [] + for option in options: + option_lines = [] + if option.label: + option_lines.append( + "{} ({}) :".format(option.label, option.key) + ) + else: + option_lines.append("{} :".format(option.key)) + + if option.tooltip: + option_lines.append(" - {}".format(option.tooltip)) + option_items.append("\n".join(option_lines)) + + self.option_tip = sep.join(option_items) def on_option(self): self.optioned = True From 1738a177d2adaaaa8fa3cc95806c327673d0a866 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 14:43:34 +0100 Subject: [PATCH 1927/2550] tvpaint loaders are using attribute definitions instead of qargparse --- .../hosts/tvpaint/plugins/load/load_image.py | 44 +++++++++--------- .../plugins/load/load_reference_image.py | 45 ++++++++++--------- 2 files changed, 46 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/load/load_image.py b/openpype/hosts/tvpaint/plugins/load/load_image.py index 151db94135..5283d04355 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_image.py +++ b/openpype/hosts/tvpaint/plugins/load/load_image.py @@ -1,4 +1,4 @@ -import qargparse +from openpype.lib.attribute_definitions import BoolDef from openpype.hosts.tvpaint.api import plugin from openpype.hosts.tvpaint.api.lib import execute_george_through_file @@ -27,26 +27,28 @@ class ImportImage(plugin.Loader): "preload": True } - options = [ - qargparse.Boolean( - "stretch", - label="Stretch to project size", - default=True, - help="Stretch loaded image/s to project resolution?" - ), - qargparse.Boolean( - "timestretch", - label="Stretch to timeline length", - default=True, - help="Clip loaded image/s to timeline length?" - ), - qargparse.Boolean( - "preload", - label="Preload loaded image/s", - default=True, - help="Preload image/s?" - ) - ] + @classmethod + def get_options(cls, contexts): + return [ + BoolDef( + "stretch", + label="Stretch to project size", + default=cls.defaults["stretch"], + tooltip="Stretch loaded image/s to project resolution?" + ), + BoolDef( + "timestretch", + label="Stretch to timeline length", + default=cls.defaults["timestretch"], + tooltip="Clip loaded image/s to timeline length?" + ), + BoolDef( + "preload", + label="Preload loaded image/s", + default=cls.defaults["preload"], + tooltip="Preload image/s?" + ) + ] def load(self, context, name, namespace, options): stretch = options.get("stretch", self.defaults["stretch"]) diff --git a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py index 393236fba6..7f7a68cc41 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_reference_image.py +++ b/openpype/hosts/tvpaint/plugins/load/load_reference_image.py @@ -1,7 +1,6 @@ import collections -import qargparse - +from openpype.lib.attribute_definitions import BoolDef from openpype.pipeline import ( get_representation_context, register_host, @@ -42,26 +41,28 @@ class LoadImage(plugin.Loader): "preload": True } - options = [ - qargparse.Boolean( - "stretch", - label="Stretch to project size", - default=True, - help="Stretch loaded image/s to project resolution?" - ), - qargparse.Boolean( - "timestretch", - label="Stretch to timeline length", - default=True, - help="Clip loaded image/s to timeline length?" - ), - qargparse.Boolean( - "preload", - label="Preload loaded image/s", - default=True, - help="Preload image/s?" - ) - ] + @classmethod + def get_options(cls, contexts): + return [ + BoolDef( + "stretch", + label="Stretch to project size", + default=cls.defaults["stretch"], + tooltip="Stretch loaded image/s to project resolution?" + ), + BoolDef( + "timestretch", + label="Stretch to timeline length", + default=cls.defaults["timestretch"], + tooltip="Clip loaded image/s to timeline length?" + ), + BoolDef( + "preload", + label="Preload loaded image/s", + default=cls.defaults["preload"], + tooltip="Preload image/s?" + ) + ] def load(self, context, name, namespace, options): stretch = options.get("stretch", self.defaults["stretch"]) From 72ce97a6285e1e31782b8c9a3c5e0d6bb49ab56c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:07:06 +0100 Subject: [PATCH 1928/2550] general: fixing loader for multiselection --- openpype/tools/loader/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index d37ce500e0..826c7110da 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -515,7 +515,7 @@ class SubsetWidget(QtWidgets.QWidget): if not one_item_selected: # Filter loaders from first subset by intersected combinations for repre, loader in first_loaders: - if (repre["name"], loader) not in found_combinations: + if (repre["name"].lower(), loader) not in found_combinations: continue loaders.append((repre, loader)) From 761d624b2e40901be713068157ad7d7aeaeb3bb4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 16:07:50 +0100 Subject: [PATCH 1929/2550] fix comparison of repre name --- openpype/tools/loader/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index d37ce500e0..826c7110da 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -515,7 +515,7 @@ class SubsetWidget(QtWidgets.QWidget): if not one_item_selected: # Filter loaders from first subset by intersected combinations for repre, loader in first_loaders: - if (repre["name"], loader) not in found_combinations: + if (repre["name"].lower(), loader) not in found_combinations: continue loaders.append((repre, loader)) From 3dd115feef6c02e0effe7b44874c63056ed8a775 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:38:41 +0100 Subject: [PATCH 1930/2550] hiero: return specific container name --- openpype/hosts/hiero/api/lib.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index d04a710df1..e340209207 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -394,7 +394,7 @@ def get_track_openpype_tag(track): return tag -def get_track_openpype_data(track): +def get_track_openpype_data(track, container_name=None): """ Get track's openpype tag data. @@ -416,12 +416,16 @@ def get_track_openpype_data(track): for obj_name, obj_data in tag_data.items(): obj_name = obj_name.replace("tag.", "") - print(obj_name) + if obj_name in ["applieswhole", "note", "label"]: continue return_data[obj_name] = json.loads(obj_data) - return return_data + return ( + return_data[container_name] + if container_name + else return_data + ) @deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") From 393692559e3f57b5ed4db333e8c5e2c997801437 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:39:08 +0100 Subject: [PATCH 1931/2550] hiero: deep copy dicts --- openpype/hosts/hiero/api/pipeline.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index c48d404ede..3475bc62e4 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -1,6 +1,7 @@ """ Basic avalon integration """ +from copy import deepcopy import os import contextlib from collections import OrderedDict @@ -225,19 +226,19 @@ def update_container(item, data=None): if type(item) == hiero.core.VideoTrack: # form object data for test - object_name = "{}_{}".format( - data["name"], data["namespace"]) + object_name = data["objectName"] # get all available containers containers = lib.get_track_openpype_data(item) - for obj_name, container in containers.items(): - # ignore all which are not the same object - if object_name != obj_name: - continue - # update data in container - updated_container = update_container_data(container, data) - # merge updated container back to containers - containers.update(updated_container) + container = lib.get_track_openpype_data(item, object_name) + + containers = deepcopy(containers) + container = deepcopy(container) + + # update data in container + updated_container = update_container_data(container, data) + # merge updated container back to containers + containers.update({object_name: updated_container}) return bool(lib.set_track_openpype_tag(item, containers)) else: From 5b77f92d0bbf9cedb2f6c7b2a81964c45ccabd73 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:39:23 +0100 Subject: [PATCH 1932/2550] hiero: removing obsolete code --- openpype/hosts/hiero/api/tags.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index 918af3dc1f..cb7bc14edb 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -86,12 +86,6 @@ def update_tag(tag, data): # get metadata key from data data_mtd = data.get("metadata", {}) - # # due to hiero bug we have to make sure keys which are not existent in - # # data are cleared of value by `None` - # for _mk in mtd.dict().keys(): - # if _mk.replace("tag.", "") not in data_mtd.keys(): - # mtd.setValue(_mk, str(None)) - # set all data metadata to tag metadata for _k, _v in data_mtd.items(): value = str(_v) From 8c715a98aaa8bf4343b35f565400197ced021b0a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:39:59 +0100 Subject: [PATCH 1933/2550] hiero: update effects finish --- .../hosts/hiero/plugins/load/load_effects.py | 145 ++++++++++++------ 1 file changed, 101 insertions(+), 44 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index d8a388c6ed..3e5225ba22 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -1,11 +1,16 @@ import json from collections import OrderedDict -from pprint import pprint import six +from openpype.client import ( + get_version_by_id +) + from openpype.pipeline import ( AVALON_CONTAINER_ID, - load + load, + legacy_io, + get_representation_path ) from openpype.hosts.hiero import api as phiero @@ -40,18 +45,12 @@ class LoadEffects(load.LoaderPlugin): active_sequence, "LoadedEffects") # get main variables - version = context["version"] - version_data = version.get("data", {}) - vname = version.get("name", None) namespace = namespace or context["asset"]["name"] object_name = "{}_{}".format(name, namespace) clip_in = context["asset"]["data"]["clipIn"] clip_out = context["asset"]["data"]["clipOut"] data_imprint = { - "source": version_data["source"], - "version": vname, - "author": version_data["author"], "objectName": object_name, "children_names": [] } @@ -59,6 +58,31 @@ class LoadEffects(load.LoaderPlugin): # getting file path file = self.fname.replace("\\", "/") + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint + ): + self.containerise( + active_track, + name=name, + namespace=namespace, + object_name=object_name, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def _shared_loading( + self, + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=False + ): # getting data from json file with unicode conversion with open(file, "r") as f: json_f = {self.byteify(key): self.byteify(value) @@ -74,9 +98,6 @@ class LoadEffects(load.LoaderPlugin): loaded = False for index_order, (ef_name, ef_val) in enumerate(nodes_order.items()): - pprint("_" * 100) - pprint(ef_name) - pprint(ef_val) new_name = "{}_loaded".format(ef_name) if new_name not in used_subtracks: effect_track_item = active_track.createEffect( @@ -87,46 +108,82 @@ class LoadEffects(load.LoaderPlugin): ) effect_track_item.setName(new_name) - node = effect_track_item.node() - for knob_name, knob_value in ef_val["node"].items(): - if ( - not knob_value - or knob_name == "name" - ): - continue + else: + effect_track_item = used_subtracks[new_name] - try: - node[knob_name].setValue(knob_value) - except NameError: - self.log.warning("Knob: {} cannot be set".format( - knob_name)) + node = effect_track_item.node() + for knob_name, knob_value in ef_val["node"].items(): + if ( + not knob_value + or knob_name == "name" + ): + continue - # register all loaded children - data_imprint["children_names"].append(new_name) - # make sure containerisation will happen - loaded = True + try: + node[knob_name].setValue(knob_value) + except NameError: + self.log.warning("Knob: {} cannot be set".format( + knob_name)) - if not loaded: - return + # register all loaded children + data_imprint["children_names"].append(new_name) - self.containerise( - active_track, - name=name, - namespace=namespace, - object_name=object_name, - context=context, - loader=self.__class__.__name__, - data=data_imprint) + # make sure containerisation will happen + loaded = True + + return loaded def update(self, container, representation): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - + """ Updating previously loaded effects """ - pass + active_track = container["_item"] + file = get_representation_path(representation).replace("\\", "/") + + # get main variables + name = container['name'] + namespace = container['namespace'] + + # get timeline in out data + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + version_data = version_doc["data"] + clip_in = version_data["clipIn"] + clip_out = version_data["clipOut"] + + object_name = "{}_{}".format(name, namespace) + + # Disable previously created nodes + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + container = phiero.get_track_openpype_data( + active_track, object_name + ) + + loaded_subtrack_items = container["children_names"] + for loaded_stitem in loaded_subtrack_items: + if loaded_stitem not in used_subtracks: + continue + item_to_remove = used_subtracks.pop(loaded_stitem) + item_to_remove.node()["enable"].setValue(0) + + data_imprint = { + "objectName": object_name, + "name": name, + "representation": str(representation["_id"]), + "children_names": [] + } + + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=True + ): + return phiero.update_container(active_track, data_imprint) def reorder_nodes(self, data): new_order = OrderedDict() From f3b038ec7df4e77be2a251d3c84722736dc832cc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:45:27 +0100 Subject: [PATCH 1934/2550] hiero: removing unused attribute --- openpype/hosts/hiero/plugins/load/load_effects.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 3e5225ba22..fab426e58d 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -25,7 +25,6 @@ class LoadEffects(load.LoaderPlugin): order = 0 icon = "cc" color = "white" - ignore_attr = ["useLifetime"] def load(self, context, name, namespace, data): """ From 7df622df0cfd2af6092faf5918e75e97e1658473 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 18:29:21 +0100 Subject: [PATCH 1935/2550] fix thumbnail refreshing --- openpype/pipeline/create/context.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 71338f96e0..4fd460ffea 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1159,9 +1159,7 @@ class CreateContext: for instance_id, path in self.thumbnail_paths_by_instance_id.items(): instance_available = True if instance_id is not None: - instance_available = ( - instance_id not in self._instances_by_id - ) + instance_available = instance_id in self._instances_by_id if ( not instance_available @@ -1178,13 +1176,13 @@ class CreateContext: # Give ability to store shared data for collection phase self._collection_shared_data = {} - self.refresh_thumbnails() def reset_finalization(self): """Cleanup of attributes after reset.""" # Stop access to collection shared data self._collection_shared_data = None + self.refresh_thumbnails() def reset_avalon_context(self): """Give ability to reset avalon context. From d655a53136e724179da0889d0e508b607d9d173c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 18:45:07 +0100 Subject: [PATCH 1936/2550] use objected colors from styles --- openpype/tools/publisher/widgets/widgets.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index b8fb2d38b9..444ad4c7dc 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1661,7 +1661,15 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def __init__(self, parent): super(CreateNextPageOverlay, self).__init__(parent) - self._arrow_color = QtGui.QColor(255, 255, 255) + self._arrow_color = ( + get_objected_colors("bg-buttons").get_qcolor() + ) + self._gradient_start_color = ( + get_objected_colors("publisher", "tab-bg").get_qcolor() + ) + self._gradient_end_color = ( + get_objected_colors("bg-inputs").get_qcolor() + ) change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) @@ -1843,8 +1851,8 @@ class CreateNextPageOverlay(QtWidgets.QWidget): focal = QtCore.QPointF(left, pos_y) start_p = QtCore.QPointF(right - (width * 0.5), pos_y) gradient = QtGui.QRadialGradient(start_p, radius, focal) - gradient.setColorAt(0, QtGui.QColor(22, 25, 29)) - gradient.setColorAt(1, QtGui.QColor(33, 37, 43)) + gradient.setColorAt(0, self._gradient_start_color) + gradient.setColorAt(1, self._gradient_end_color) painter.fillPath(path, gradient) From 2e1f2fd912b9ceee565b489eb71c120e8228f107 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 21:21:46 +0100 Subject: [PATCH 1937/2550] flame: adding menu loader with selection --- openpype/hosts/flame/api/menu.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index f72a352bba..319ed7afb6 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -225,7 +225,8 @@ class FlameMenuUniversal(_FlameMenuApp): menu['actions'].append({ "name": "Load...", - "execute": lambda x: self.tools_helper.show_loader() + "execute": lambda x: callback_selection( + x, self.tools_helper.show_loader) }) menu['actions'].append({ "name": "Manage...", From 51a7dea7209559bce178d53c776cc340185d2973 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 21:22:09 +0100 Subject: [PATCH 1938/2550] flame: adding batch name to name of openclip --- openpype/hosts/flame/plugins/load/load_clip_batch.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 3b049b861b..4b510c9ce8 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -1,3 +1,4 @@ +from copy import deepcopy import os import flame from pprint import pformat @@ -22,7 +23,7 @@ class LoadClipBatch(opfapi.ClipLoader): # settings reel_name = "OP_LoadedReel" - clip_name_template = "{asset}_{subset}<_{output}>" + clip_name_template = "{batch}_{asset}_{subset}<_{output}>" def load(self, context, name, namespace, options): @@ -40,6 +41,9 @@ class LoadClipBatch(opfapi.ClipLoader): if not context["representation"]["context"].get("output"): self.clip_name_template.replace("output", "representation") + formating_data = deepcopy(context["representation"]["context"]) + formating_data["batch"] = self.batch.name.get_value() + clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) @@ -56,6 +60,7 @@ class LoadClipBatch(opfapi.ClipLoader): openclip_path = os.path.join( openclip_dir, clip_name + ".clip" ) + if not os.path.exists(openclip_dir): os.makedirs(openclip_dir) From bae7a9960a4f28e23aab10f959789eaa00c20c87 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 2 Nov 2022 04:02:57 +0000 Subject: [PATCH 1939/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 442c5f033b..46bb4b1cd0 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.1" +__version__ = "3.14.7-nightly.2" From 7cf4e085f7c00ff8a9af2fdf538d7b0aed88f566 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:19:02 +0100 Subject: [PATCH 1940/2550] handle more types --- openpype/lib/transcoding.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index e736ba8ef0..4fc3a7ce94 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -111,6 +111,7 @@ def get_oiio_info_for_input(filepath, logger=None): class RationalToInt: """Rational value stored as division of 2 integers using string.""" + def __init__(self, string_value): parts = string_value.split("/") top = float(parts[0]) @@ -157,16 +158,16 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "int": return int(value) - if value_type == "float": + if value_type in ("float", "double"): return float(value) # Vectors will probably have more types - if value_type in ("vec2f", "float2"): + if value_type in ("vec2f", "float2", "float2d"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 # - are returned as list of lists - if value_type == "matrix": + if value_type in ("matrix", "matrixd"): output = [] current_index = -1 parts = value.split(",") @@ -198,7 +199,7 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "rational2i": return RationalToInt(value) - if value_type == "vector": + if value_type in ("vector", "vectord"): parts = [part.strip() for part in value.split(",")] output = [] for part in parts: From 7aca8136f5ba0ab22fd0e6084d5cc2901ac791a1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:19:32 +0100 Subject: [PATCH 1941/2550] 'get_oiio_info_for_input' can return information about all subimages --- openpype/lib/transcoding.py | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 4fc3a7ce94..9d87818508 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -77,26 +77,38 @@ def get_transcode_temp_directory(): ) -def get_oiio_info_for_input(filepath, logger=None): +def get_oiio_info_for_input(filepath, logger=None, subimages=False): """Call oiiotool to get information about input and return stdout. Stdout should contain xml format string. """ args = [ - get_oiio_tools_path(), "--info", "-v", "-i:infoformat=xml", filepath + get_oiio_tools_path(), + "--info", + "-v" ] + if subimages: + args.append("-a") + + args.extend(["-i:infoformat=xml", filepath]) + output = run_subprocess(args, logger=logger) output = output.replace("\r\n", "\n") xml_started = False + subimages = [] lines = [] for line in output.split("\n"): if not xml_started: if not line.startswith("<"): continue xml_started = True + if xml_started: lines.append(line) + if line == "": + subimages.append(lines) + lines = [] if not xml_started: raise ValueError( @@ -105,8 +117,14 @@ def get_oiio_info_for_input(filepath, logger=None): ) ) - xml_text = "\n".join(lines) - return parse_oiio_xml_output(xml_text, logger=logger) + output = [] + for subimage in subimages: + xml_text = "\n".join(subimage) + output.append(parse_oiio_xml_output(xml_text, logger=logger)) + + if subimages: + return output + return output[0] class RationalToInt: From 61d9657ce16e9eb3b4a434368cb316d2bf8ac05a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:20:58 +0100 Subject: [PATCH 1942/2550] subimages are reason for conversion and skip definition of input channels if there are subimages --- openpype/lib/transcoding.py | 39 +++++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 9d87818508..32c71fee7e 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -399,6 +399,10 @@ def should_convert_for_ffmpeg(src_filepath): if not input_info: return None + subimages = input_info.get("subimages") + if subimages is not None and subimages > 1: + return True + # Check compression compression = input_info["attribs"].get("compression") if compression in ("dwaa", "dwab"): @@ -507,13 +511,23 @@ def convert_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) - oiio_cmd.extend([ + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - "-i:ch={}".format(input_channels_str), first_input_path, + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + + oiio_cmd.extend([ + input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + # TODO we should look for all subimages and try (somehow) find the + # best candidate for output + "--subimage", "0" ]) # Add frame definitions to arguments @@ -631,6 +645,15 @@ def convert_input_paths_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: + # Tell oiiotool which channels should be loaded + # - other channels are not loaded to memory so helps to avoid memory + # leak issues + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + for input_path in input_paths: # Prepare subprocess arguments oiio_cmd = [ @@ -644,13 +667,13 @@ def convert_input_paths_for_ffmpeg( oiio_cmd.extend(["--compression", compression]) oiio_cmd.extend([ - # Tell oiiotool which channels should be loaded - # - other channels are not loaded to memory so helps to - # avoid memory leak issues - "-i:ch={}".format(input_channels_str), input_path, + input_arg, input_path, # Tell oiiotool which channels should be put to top stack # (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + # TODO we should look for all subimages and try (somehow) find the + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From f437ce7c983cd30a37c3ed697e73d670a79fa87f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:37:26 +0100 Subject: [PATCH 1943/2550] fix variable names --- openpype/lib/transcoding.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 32c71fee7e..1ab3cb4081 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -96,7 +96,7 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): output = output.replace("\r\n", "\n") xml_started = False - subimages = [] + subimages_lines = [] lines = [] for line in output.split("\n"): if not xml_started: @@ -107,7 +107,7 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): if xml_started: lines.append(line) if line == "": - subimages.append(lines) + subimages_lines.append(lines) lines = [] if not xml_started: @@ -118,8 +118,8 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): ) output = [] - for subimage in subimages: - xml_text = "\n".join(subimage) + for subimage_lines in subimages_lines: + xml_text = "\n".join(subimage_lines) output.append(parse_oiio_xml_output(xml_text, logger=logger)) if subimages: @@ -651,7 +651,7 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - # - this option is crashing if used on multipart/subimages exrs + # - this option is crashing if used on multipart exrs input_arg += ":ch={}".format(input_channels_str) for input_path in input_paths: From 560f327de1cbbff29db576c382f2191844855338 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:38:57 +0100 Subject: [PATCH 1944/2550] comment out subimage --- openpype/lib/transcoding.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 1ab3cb4081..af40fa752c 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -524,10 +524,10 @@ def convert_for_ffmpeg( input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) "--ch", channels_arg, + # WARNING: This is commented out because ffmpeg won't be able to + # render proper output when only one subimage is outputed with oiio # Use first subimage - # TODO we should look for all subimages and try (somehow) find the - # best candidate for output - "--subimage", "0" + # "--subimage", "0" ]) # Add frame definitions to arguments @@ -671,9 +671,10 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be put to top stack # (and output) "--ch", channels_arg, + # WARNING: This is commented out because ffmpeg won't be able to + # render proper output when only one subimage is outputed with oiio # Use first subimage - # TODO we should look for all subimages and try (somehow) find the - "--subimage", "0" + # "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From ebbf827f0866b05d3d0915a6cb7f86f1bf814fa6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:43:11 +0100 Subject: [PATCH 1945/2550] fix line length --- openpype/lib/transcoding.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index af40fa752c..5a57026496 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -672,7 +672,8 @@ def convert_input_paths_for_ffmpeg( # (and output) "--ch", channels_arg, # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed with oiio + # render proper output when only one subimage is outputed + # with oiiotool # Use first subimage # "--subimage", "0" ]) From ea5a1f8e525a92af8a8c071bce7e55c92a99f451 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 19:43:37 +0100 Subject: [PATCH 1946/2550] added new settings for filtering of single frame output for review --- .../settings/defaults/project_settings/global.json | 3 ++- .../schemas/schema_global_publish.json | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index b128564bc2..8c129eeff5 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -79,7 +79,8 @@ "ftrack" ], "subsets": [], - "custom_tags": [] + "custom_tags": [], + "single_frame_filter": "everytime" }, "overscan_crop": "", "overscan_color": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 51fc8dedf3..a39ae9eff4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -304,6 +304,20 @@ "label": "Custom Tags", "type": "list", "object_type": "text" + }, + { + "type": "label", + "label": "Use output always / only if input is 1 frame image / only if has 2+ frames or is video" + }, + { + "type": "enum", + "key": "single_frame_filter", + "default": "everytime", + "enum_items": [ + {"everytime": "Always"}, + {"single_frame": "On 1 frame input"}, + {"multi_frame": "On 2+ frame input"} + ] } ] }, From 2c0ea81df64ea1958bbf377923ae4f7b3ed51eee Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 19:44:04 +0100 Subject: [PATCH 1947/2550] added single frame filtering logic into extract review --- openpype/plugins/publish/extract_review.py | 56 ++++++++++++++++------ 1 file changed, 42 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index d457bdc988..0c902cb568 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -3,26 +3,26 @@ import re import copy import json import shutil - from abc import ABCMeta, abstractmethod + import six - import clique - +import speedcopy import pyblish.api from openpype.lib import ( get_ffmpeg_tool_path, - get_ffprobe_streams, path_to_subprocess_arg, run_subprocess, - +) +from openpype.lib.transcoding import ( + IMAGE_EXTENSIONS, + get_ffprobe_streams, should_convert_for_ffmpeg, convert_input_paths_for_ffmpeg, - get_transcode_temp_directory + get_transcode_temp_directory, ) -import speedcopy class ExtractReview(pyblish.api.InstancePlugin): @@ -175,6 +175,26 @@ class ExtractReview(pyblish.api.InstancePlugin): outputs_per_representations.append((repre, outputs)) return outputs_per_representations + def _single_frame_filter(self, input_filepaths, output_defs): + single_frame_image = False + if len(input_filepaths) == 1: + ext = os.path.splitext(input_filepaths[0])[-1] + single_frame_image = ext in IMAGE_EXTENSIONS + + filtered_defs = [] + for output_def in output_defs: + output_filters = output_def.get("filter") or {} + frame_filter = output_filters.get("single_frame_filter") + if ( + (not single_frame_image and frame_filter == "single_frame") + or (single_frame_image and frame_filter == "multi_frame") + ): + continue + + filtered_defs.append(output_def) + + return filtered_defs + @staticmethod def get_instance_label(instance): return ( @@ -195,7 +215,7 @@ class ExtractReview(pyblish.api.InstancePlugin): outputs_per_repres = self._get_outputs_per_representations( instance, profile_outputs ) - for repre, outpu_defs in outputs_per_repres: + for repre, output_defs in outputs_per_repres: # Check if input should be preconverted before processing # Store original staging dir (it's value may change) src_repre_staging_dir = repre["stagingDir"] @@ -216,6 +236,16 @@ class ExtractReview(pyblish.api.InstancePlugin): if first_input_path is None: first_input_path = filepath + filtered_output_defs = self._single_frame_filter( + input_filepaths, output_defs + ) + if not filtered_output_defs: + self.log.debug(( + "Repre: {} - All output definitions were filtered" + " out by single frame filter. Skipping" + ).format(repre["name"])) + continue + # Skip if file is not set if first_input_path is None: self.log.warning(( @@ -249,7 +279,7 @@ class ExtractReview(pyblish.api.InstancePlugin): try: self._render_output_definitions( - instance, repre, src_repre_staging_dir, outpu_defs + instance, repre, src_repre_staging_dir, filtered_output_defs ) finally: @@ -263,10 +293,10 @@ class ExtractReview(pyblish.api.InstancePlugin): shutil.rmtree(new_staging_dir) def _render_output_definitions( - self, instance, repre, src_repre_staging_dir, outpu_defs + self, instance, repre, src_repre_staging_dir, output_defs ): fill_data = copy.deepcopy(instance.data["anatomyData"]) - for _output_def in outpu_defs: + for _output_def in output_defs: output_def = copy.deepcopy(_output_def) # Make sure output definition has "tags" key if "tags" not in output_def: @@ -1659,9 +1689,7 @@ class ExtractReview(pyblish.api.InstancePlugin): return True return False - def filter_output_defs( - self, profile, subset_name, families - ): + def filter_output_defs(self, profile, subset_name, families): """Return outputs matching input instance families. Output definitions without families filter are marked as valid. From e5205f5c81b677209e1626866c100ffd169ac414 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 19:53:03 +0100 Subject: [PATCH 1948/2550] prepared common function to cache instances during collection phase --- openpype/pipeline/create/__init__.py | 2 ++ openpype/pipeline/create/creator_plugins.py | 32 +++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/openpype/pipeline/create/__init__.py b/openpype/pipeline/create/__init__.py index 4b91951a08..9e858151fd 100644 --- a/openpype/pipeline/create/__init__.py +++ b/openpype/pipeline/create/__init__.py @@ -24,6 +24,8 @@ from .creator_plugins import ( deregister_creator_plugin, register_creator_plugin_path, deregister_creator_plugin_path, + + cache_and_get_instances, ) from .context import ( diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index c69abb8861..0f9c346966 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -1,5 +1,6 @@ import os import copy +import collections from abc import ( ABCMeta, @@ -660,3 +661,34 @@ def deregister_creator_plugin_path(path): deregister_plugin_path(BaseCreator, path) deregister_plugin_path(LegacyCreator, path) deregister_plugin_path(SubsetConvertorPlugin, path) + + +def cache_and_get_instances(creator, shared_key, list_instances_func): + """Common approach to cache instances in shared data. + + This is helper function which does not handle cases when a 'shared_key' is + used for different list instances functions. The same approach of caching + instances into 'collection_shared_data' is not required but is so common + we've decided to unify it to some degree. + + Function 'list_instances_func' is called only if 'shared_key' is not + available in 'collection_shared_data' on creator. + + Args: + creator (Creator): Plugin which would like to get instance data. + shared_key (str): Key under which output of function will be stored. + list_instances_func (Function): Function that will return instance data + if data were not yet stored under 'shared_key'. + + Returns: + Dict[str, Dict[str, Any]]: Cached instances by creator identifier from + result of passed function. + """ + + if shared_key not in creator.collection_shared_data: + value = collections.defaultdict(list) + for instance in list_instances_func(): + identifier = instance.get("creator_identifier") + value[identifier].append(instance) + creator.collection_shared_data[shared_key] = value + return creator.collection_shared_data[shared_key] From 53467f97f941eaeb60651f177c5639d4b45f314d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 19:54:01 +0100 Subject: [PATCH 1949/2550] use new function in tray publisher --- openpype/hosts/traypublisher/api/plugin.py | 45 ++++++++-------------- 1 file changed, 15 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 555041d389..24d7004e84 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -2,7 +2,8 @@ from openpype.lib.attribute_definitions import FileDef from openpype.pipeline.create import ( Creator, HiddenCreator, - CreatedInstance + CreatedInstance, + cache_and_get_instances, ) from .pipeline import ( @@ -16,34 +17,19 @@ from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS - -def _cache_and_get_instances(creator): - """Cache instances in shared data. - - Args: - creator (Creator): Plugin which would like to get instances from host. - - Returns: - List[Dict[str, Any]]: Cached instances list from host implementation. - """ - - shared_key = "openpype.traypublisher.instances" - if shared_key not in creator.collection_shared_data: - creator.collection_shared_data[shared_key] = list_instances() - return creator.collection_shared_data[shared_key] +SHARED_DATA_KEY = "openpype.traypublisher.instances" class HiddenTrayPublishCreator(HiddenCreator): host_name = "traypublisher" def collect_instances(self): - for instance_data in _cache_and_get_instances(self): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) + instances_by_identifier = cache_and_get_instances( + self, SHARED_DATA_KEY, list_instances + ) + for instance_data in instances_by_identifier[self.identifier]: + instance = CreatedInstance.from_existing(instance_data, self) + self._add_instance_to_context(instance) def update_instances(self, update_list): update_instances(update_list) @@ -74,13 +60,12 @@ class TrayPublishCreator(Creator): host_name = "traypublisher" def collect_instances(self): - for instance_data in _cache_and_get_instances(self): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) + instances_by_identifier = cache_and_get_instances( + self, SHARED_DATA_KEY, list_instances + ) + for instance_data in instances_by_identifier[self.identifier]: + instance = CreatedInstance.from_existing(instance_data, self) + self._add_instance_to_context(instance) def update_instances(self, update_list): update_instances(update_list) From eaa097d513ba5ec55e9b80267f634ca77d39fe9b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 19:54:13 +0100 Subject: [PATCH 1950/2550] change 'REVIEW_EXTENSIONS' to set instead of list --- openpype/hosts/traypublisher/api/plugin.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 24d7004e84..6b95379cf2 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,22 +1,19 @@ from openpype.lib.attribute_definitions import FileDef +from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS from openpype.pipeline.create import ( Creator, HiddenCreator, CreatedInstance, cache_and_get_instances, ) - from .pipeline import ( list_instances, update_instances, remove_instances, HostContext, ) -from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS - - -REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS +REVIEW_EXTENSIONS = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS) SHARED_DATA_KEY = "openpype.traypublisher.instances" From dbc4b64ca138d5b78326b27d3dd1fe986e3e4223 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 2 Nov 2022 20:39:04 +0100 Subject: [PATCH 1951/2550] Hiero: adding formating dict with batch --- openpype/hosts/flame/plugins/load/load_clip_batch.py | 2 +- openpype/settings/defaults/project_settings/flame.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 4b510c9ce8..17ad8075e4 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -45,7 +45,7 @@ class LoadClipBatch(opfapi.ClipLoader): formating_data["batch"] = self.batch.name.get_value() clip_name = StringTemplate(self.clip_name_template).format( - context["representation"]["context"]) + formating_data) # TODO: settings in imageio # convert colorspace with ocio to flame mapping diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index 0f3080ad64..34baf9ba06 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -142,7 +142,7 @@ "exr16fpdwaa" ], "reel_name": "OP_LoadedReel", - "clip_name_template": "{asset}_{subset}<_{output}>" + "clip_name_template": "{batch}_{asset}_{subset}<_{output}>" } } } \ No newline at end of file From e3c339b1537a3afdde9b694c825a8ab71ad5abe2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 11:27:53 +0100 Subject: [PATCH 1952/2550] added information about additional filtering --- website/docs/project_settings/settings_project_global.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/website/docs/project_settings/settings_project_global.md b/website/docs/project_settings/settings_project_global.md index 24ea09b6fb..9666c6568a 100644 --- a/website/docs/project_settings/settings_project_global.md +++ b/website/docs/project_settings/settings_project_global.md @@ -135,6 +135,12 @@ Profile may generate multiple outputs from a single input. Each output must defi - set alpha to `0` to not use this option at all (in most of cases background stays black) - other than `0` alpha will draw color as background +- **`Additional filtering`** + - Profile filtering defines which group of output definitions is used but output definitions may require more specific filters on their own. + - They may filter by subset name (regex can be used) or publish families. Publish families are more complex as are based on knowing code base. + - Filtering by custom tags -> this is used for targeting to output definitions from other extractors using settings (at this moment only Nuke bake extractor can target using custom tags). + - Nuke extractor settings path: `project_settings/nuke/publish/ExtractReviewDataMov/outputs/baking/add_custom_tags` + - Filtering by input length. Input may be video, sequence or single image. It is possible that `.mp4` should be created only when input is video or sequence and to create review `.png` when input is single frame. In some cases the output should be created even if it's single frame or multi frame input. ### IntegrateAssetNew From f694b433a581e45a371c99b960d905dbe7925efe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 11:32:53 +0100 Subject: [PATCH 1953/2550] fix too long line --- openpype/plugins/publish/extract_review.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 0c902cb568..1f9b30fba3 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -279,7 +279,10 @@ class ExtractReview(pyblish.api.InstancePlugin): try: self._render_output_definitions( - instance, repre, src_repre_staging_dir, filtered_output_defs + instance, + repre, + src_repre_staging_dir, + filtered_output_defs ) finally: From ed96f1d5b33649e8f5f21e5598e4ee56436f63df Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 11:41:59 +0100 Subject: [PATCH 1954/2550] requested cosmetic changes --- .../hooks/pre_copy_last_published_workfile.py | 172 +++++++++--------- 1 file changed, 90 insertions(+), 82 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index cf4edeac9b..7a835507f7 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -35,6 +35,17 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ + # Check there is no workfile available + last_workfile = self.data.get("last_workfile_path") + if os.path.exists(last_workfile): + self.log.debug( + "Last workfile exists. Skipping {} process.".format( + self.__class__.__name__ + ) + ) + return + + # Get data project_name = self.data["project_name"] task_name = self.data["task_name"] task_type = self.data["task_type"] @@ -73,97 +84,94 @@ class CopyLastPublishedWorkfile(PreLaunchHook): self.log.info("Trying to fetch last published workfile...") - # Check there is no workfile available - last_workfile = self.data.get("last_workfile_path") - if os.path.exists(last_workfile): - self.log.debug( - "Last workfile exists. Skipping {} process.".format( - self.__class__.__name__ - ) - ) - return - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") - if project_doc and asset_doc: - # Get subset id - subset_id = next( - ( - subset["_id"] - for subset in get_subsets( - project_name, - asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family"], - ) - if subset["data"]["family"] == "workfile" - ), - None, - ) - if not subset_id: - self.log.debug('No any workfile for asset "{}".').format( - asset_doc["name"] - ) - return - # Get workfile representation - workfile_representation = next( - ( - representation - for representation in get_representations( - project_name, - version_ids=[ + # Check it can proceed + if not project_doc and not asset_doc: + return + + # Get subset id + subset_id = next( + ( + subset["_id"] + for subset in get_subsets( + project_name, + asset_ids=[asset_doc["_id"]], + fields=["_id", "data.family"], + ) + if subset["data"]["family"] == "workfile" + ), + None, + ) + if not subset_id: + self.log.debug('No any workfile for asset "{}".').format( + asset_doc["name"] + ) + return + + # Get workfile representation + workfile_representation = next( + ( + representation + for representation in get_representations( + project_name, + version_ids=[ + ( get_last_version_by_subset_id( project_name, subset_id, fields=["_id"] - )["_id"] - ], - ) - if representation["context"]["task"]["name"] == task_name - ), - None, - ) + ) + or {} + ).get("_id") + ], + ) + if representation["context"]["task"]["name"] == task_name + ), + None, + ) - if not workfile_representation: - self.log.debug( - 'No published workfile for task "{}" and host "{}".' - ).format(task_name, host_name) - return + if not workfile_representation: + self.log.debug( + 'No published workfile for task "{}" and host "{}".' + ).format(task_name, host_name) + return - # Get sync server from Tray, - # which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + # Get sync server from Tray, + # which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, - ) - sync_server.reset_timer() + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() - # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots - ) - local_workfile_dir = os.path.dirname(last_workfile) + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) - # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir - ) + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) From 3c4476c2335ce760d2a49a80531f09705af79c68 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 12:00:44 +0100 Subject: [PATCH 1955/2550] added default png output definition --- .../defaults/project_settings/global.json | 58 ++++++++++++++++++- 1 file changed, 57 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 8c129eeff5..9c3f2f1e1b 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -53,6 +53,62 @@ "families": [], "hosts": [], "outputs": { + "png": { + "ext": "png", + "tags": [ + "ftrackreview" + ], + "burnins": [], + "ffmpeg_args": { + "video_filters": [], + "audio_filters": [], + "input": [], + "output": [] + }, + "filter": { + "families": [ + "render", + "review", + "ftrack" + ], + "subsets": [], + "custom_tags": [], + "single_frame_filter": "single_frame" + }, + "overscan_crop": "", + "overscan_color": [ + 0, + 0, + 0, + 255 + ], + "width": 1920, + "height": 1080, + "scale_pixel_aspect": true, + "bg_color": [ + 0, + 0, + 0, + 0 + ], + "letter_box": { + "enabled": false, + "ratio": 0.0, + "fill_color": [ + 0, + 0, + 0, + 255 + ], + "line_thickness": 0, + "line_color": [ + 255, + 0, + 0, + 255 + ] + } + }, "h264": { "ext": "mp4", "tags": [ @@ -80,7 +136,7 @@ ], "subsets": [], "custom_tags": [], - "single_frame_filter": "everytime" + "single_frame_filter": "multi_frame" }, "overscan_crop": "", "overscan_color": [ From 0b201e4f48175a72431460d80b1e9f337267b10d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 13:23:17 +0100 Subject: [PATCH 1956/2550] change labels in schema --- .../projects_schema/schemas/schema_global_publish.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index a39ae9eff4..742437fbde 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -315,8 +315,8 @@ "default": "everytime", "enum_items": [ {"everytime": "Always"}, - {"single_frame": "On 1 frame input"}, - {"multi_frame": "On 2+ frame input"} + {"single_frame": "Only if input has 1 image frame"}, + {"multi_frame": "Only if input is video or sequence of frames"} ] } ] From 646a289860cfce3d352a7f8b06d1a264086cbc1c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:15:35 +0100 Subject: [PATCH 1957/2550] interfaces are in init of modules --- openpype/modules/__init__.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 02e7dc13ab..1f345feea9 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -1,4 +1,14 @@ # -*- coding: utf-8 -*- +from .interfaces import ( + ILaunchHookPaths, + IPluginPaths, + ITrayModule, + ITrayAction, + ITrayService, + ISettingsChangeListener, + IHostAddon, +) + from .base import ( OpenPypeModule, OpenPypeAddOn, @@ -17,6 +27,14 @@ from .base import ( __all__ = ( + "ILaunchHookPaths", + "IPluginPaths", + "ITrayModule", + "ITrayAction", + "ITrayService", + "ISettingsChangeListener", + "IHostAddon", + "OpenPypeModule", "OpenPypeAddOn", From 0478d3ea59db98e732c13891d42e93676a11c0d8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:21:58 +0100 Subject: [PATCH 1958/2550] use new import of interfaces --- openpype/modules/avalon_apps/avalon_app.py | 3 +-- openpype/modules/clockify/clockify_module.py | 11 ++++++----- openpype/modules/deadline/deadline_module.py | 3 +-- .../modules/example_addons/example_addon/addon.py | 5 +---- openpype/modules/kitsu/kitsu_module.py | 7 +++++-- openpype/modules/launcher_action.py | 6 ++++-- openpype/modules/log_viewer/log_view_module.py | 3 +-- openpype/modules/muster/muster.py | 3 +-- openpype/modules/project_manager_action.py | 3 +-- .../modules/python_console_interpreter/module.py | 3 +-- openpype/modules/royalrender/royal_render_module.py | 3 +-- openpype/modules/settings_action.py | 3 +-- openpype/modules/shotgrid/shotgrid_module.py | 5 ++--- openpype/modules/sync_server/sync_server_module.py | 12 ++++++------ openpype/modules/timers_manager/timers_manager.py | 4 ++-- openpype/modules/webserver/host_console_listener.py | 2 +- openpype/modules/webserver/webserver_module.py | 3 +-- openpype/settings/lib.py | 9 +++------ openpype/tools/tray/pype_tray.py | 2 +- 19 files changed, 40 insertions(+), 50 deletions(-) diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/avalon_apps/avalon_app.py index 1d21de129b..f9085522b0 100644 --- a/openpype/modules/avalon_apps/avalon_app.py +++ b/openpype/modules/avalon_apps/avalon_app.py @@ -1,7 +1,6 @@ import os -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayModule +from openpype.modules import OpenPypeModule, ITrayModule class AvalonModule(OpenPypeModule, ITrayModule): diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/clockify/clockify_module.py index 932ce87c36..14fcb01f67 100644 --- a/openpype/modules/clockify/clockify_module.py +++ b/openpype/modules/clockify/clockify_module.py @@ -2,16 +2,17 @@ import os import threading import time +from openpype.modules import ( + OpenPypeModule, + ITrayModule, + IPluginPaths +) + from .clockify_api import ClockifyAPI from .constants import ( CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH ) -from openpype.modules import OpenPypeModule -from openpype_interfaces import ( - ITrayModule, - IPluginPaths -) class ClockifyModule( diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index bbd0f74e8a..9855f8c1b1 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -4,8 +4,7 @@ import six import sys from openpype.lib import requests_get, Logger -from openpype.modules import OpenPypeModule -from openpype_interfaces import IPluginPaths +from openpype.modules import OpenPypeModule, IPluginPaths class DeadlineWebserviceError(Exception): diff --git a/openpype/modules/example_addons/example_addon/addon.py b/openpype/modules/example_addons/example_addon/addon.py index 50554b1e43..ead647b41d 100644 --- a/openpype/modules/example_addons/example_addon/addon.py +++ b/openpype/modules/example_addons/example_addon/addon.py @@ -13,10 +13,7 @@ import click from openpype.modules import ( JsonFilesSettingsDef, OpenPypeAddOn, - ModulesManager -) -# Import interface defined by this addon to be able find other addons using it -from openpype_interfaces import ( + ModulesManager, IPluginPaths, ITrayAction ) diff --git a/openpype/modules/kitsu/kitsu_module.py b/openpype/modules/kitsu/kitsu_module.py index 23c032715b..b91373af20 100644 --- a/openpype/modules/kitsu/kitsu_module.py +++ b/openpype/modules/kitsu/kitsu_module.py @@ -3,8 +3,11 @@ import click import os -from openpype.modules import OpenPypeModule -from openpype_interfaces import IPluginPaths, ITrayAction +from openpype.modules import ( + OpenPypeModule, + IPluginPaths, + ITrayAction, +) class KitsuModule(OpenPypeModule, IPluginPaths, ITrayAction): diff --git a/openpype/modules/launcher_action.py b/openpype/modules/launcher_action.py index e3252e3842..c4331b6094 100644 --- a/openpype/modules/launcher_action.py +++ b/openpype/modules/launcher_action.py @@ -1,5 +1,7 @@ -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules import ( + OpenPypeModule, + ITrayAction, +) class LauncherAction(OpenPypeModule, ITrayAction): diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index da1628b71f..31e954fadd 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -1,5 +1,4 @@ -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayModule +from openpype.modules import OpenPypeModule, ITrayModule class LogViewModule(OpenPypeModule, ITrayModule): diff --git a/openpype/modules/muster/muster.py b/openpype/modules/muster/muster.py index 6e26ad2d7b..8d395d16e8 100644 --- a/openpype/modules/muster/muster.py +++ b/openpype/modules/muster/muster.py @@ -2,8 +2,7 @@ import os import json import appdirs import requests -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayModule +from openpype.modules import OpenPypeModule, ITrayModule class MusterModule(OpenPypeModule, ITrayModule): diff --git a/openpype/modules/project_manager_action.py b/openpype/modules/project_manager_action.py index 251964a059..5f74dd9ee5 100644 --- a/openpype/modules/project_manager_action.py +++ b/openpype/modules/project_manager_action.py @@ -1,5 +1,4 @@ -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules import OpenPypeModule, ITrayAction class ProjectManagerAction(OpenPypeModule, ITrayAction): diff --git a/openpype/modules/python_console_interpreter/module.py b/openpype/modules/python_console_interpreter/module.py index 8c4a2fba73..cb99c05e37 100644 --- a/openpype/modules/python_console_interpreter/module.py +++ b/openpype/modules/python_console_interpreter/module.py @@ -1,5 +1,4 @@ -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules import OpenPypeModule, ITrayAction class PythonInterpreterAction(OpenPypeModule, ITrayAction): diff --git a/openpype/modules/royalrender/royal_render_module.py b/openpype/modules/royalrender/royal_render_module.py index 4f72860ad6..10d74d01d1 100644 --- a/openpype/modules/royalrender/royal_render_module.py +++ b/openpype/modules/royalrender/royal_render_module.py @@ -2,8 +2,7 @@ """Module providing support for Royal Render.""" import os import openpype.modules -from openpype.modules import OpenPypeModule -from openpype_interfaces import IPluginPaths +from openpype.modules import OpenPypeModule, IPluginPaths class RoyalRenderModule(OpenPypeModule, IPluginPaths): diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 1e7eca4dec..1902caff1d 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -1,5 +1,4 @@ -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayAction +from openpype.modules import OpenPypeModule, ITrayAction class SettingsAction(OpenPypeModule, ITrayAction): diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py index 281c6fdcad..d26647d06a 100644 --- a/openpype/modules/shotgrid/shotgrid_module.py +++ b/openpype/modules/shotgrid/shotgrid_module.py @@ -1,12 +1,11 @@ import os -from openpype_interfaces import ( +from openpype.modules import ( + OpenPypeModule, ITrayModule, IPluginPaths, ) -from openpype.modules import OpenPypeModule - SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index a478faa9ef..e84c333a58 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -11,9 +11,12 @@ from collections import deque, defaultdict import click from bson.objectid import ObjectId -from openpype.client import get_projects -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayModule +from openpype.client import ( + get_projects, + get_representations, + get_representation_by_id, +) +from openpype.modules import OpenPypeModule, ITrayModule from openpype.settings import ( get_project_settings, get_system_settings, @@ -30,9 +33,6 @@ from .providers import lib from .utils import time_function, SyncStatus, SiteAlreadyPresentError -from openpype.client import get_representations, get_representation_by_id - - log = Logger.get_logger("SyncServer") diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index c168e9534d..27f9a4f68b 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -3,8 +3,8 @@ import platform from openpype.client import get_asset_by_name -from openpype.modules import OpenPypeModule -from openpype_interfaces import ( +from openpype.modules.interfaces import ( + OpenPypeModule, ITrayService, IPluginPaths ) diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/webserver/host_console_listener.py index 6138f9f097..fdfe1ba688 100644 --- a/openpype/modules/webserver/host_console_listener.py +++ b/openpype/modules/webserver/host_console_listener.py @@ -5,7 +5,7 @@ import logging from concurrent.futures import CancelledError from Qt import QtWidgets -from openpype_interfaces import ITrayService +from openpype.modules import ITrayService log = logging.getLogger(__name__) diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index 16861abd29..354ab1e4f9 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -24,8 +24,7 @@ import os import socket from openpype import resources -from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayService +from openpype.modules import OpenPypeModule, ITrayService class WebServerModule(OpenPypeModule, ITrayService): diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 5eaddf6e6e..288c587d03 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -138,8 +138,7 @@ def save_studio_settings(data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager - from openpype_interfaces import ISettingsChangeListener + from openpype.modules import ModulesManager, ISettingsChangeListener old_data = get_system_settings() default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] @@ -186,8 +185,7 @@ def save_project_settings(project_name, overrides): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager - from openpype_interfaces import ISettingsChangeListener + from openpype.modules import ModulesManager, ISettingsChangeListener default_values = get_default_settings()[PROJECT_SETTINGS_KEY] if project_name: @@ -248,8 +246,7 @@ def save_project_anatomy(project_name, anatomy_data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager - from openpype_interfaces import ISettingsChangeListener + from openpype.modules import ModulesManager, ISettingsChangeListener default_values = get_default_settings()[PROJECT_ANATOMY_KEY] if project_name: diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 3842a4e216..d4189af4d8 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -401,7 +401,7 @@ class TrayManager: def initialize_modules(self): """Add modules to tray.""" - from openpype_interfaces import ( + from openpype.modules import ( ITrayAction, ITrayService ) From 19c8fae79912424f9a4897f3d29181078e9c33b7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:26:18 +0100 Subject: [PATCH 1959/2550] fix timers manager --- openpype/modules/timers_manager/timers_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 27f9a4f68b..0ba68285a4 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -3,7 +3,7 @@ import platform from openpype.client import get_asset_by_name -from openpype.modules.interfaces import ( +from openpype.modules import ( OpenPypeModule, ITrayService, IPluginPaths From 81017f90cf9844c70bf8feb3dd45804f2aa954d7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:26:26 +0100 Subject: [PATCH 1960/2550] added forgotten modules --- openpype/modules/ftrack/ftrack_module.py | 4 ++-- openpype/modules/slack/slack_module.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 678af0e577..6f14f8428d 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -5,8 +5,8 @@ import platform import click -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import ( +from openpype.modules import ( + OpenPypeModule, ITrayModule, IPluginPaths, ISettingsChangeListener diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/slack/slack_module.py index 499c1c19ce..797ae19f4a 100644 --- a/openpype/modules/slack/slack_module.py +++ b/openpype/modules/slack/slack_module.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IPluginPaths +from openpype.modules import OpenPypeModule, IPluginPaths SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) From 12c28fe4668985e0e990921537a06e85e239003e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:27:38 +0100 Subject: [PATCH 1961/2550] change imports in hosts --- openpype/hosts/aftereffects/addon.py | 3 +-- openpype/hosts/blender/addon.py | 3 +-- openpype/hosts/flame/addon.py | 3 +-- openpype/hosts/fusion/addon.py | 3 +-- openpype/hosts/harmony/addon.py | 3 +-- openpype/hosts/hiero/addon.py | 3 +-- openpype/hosts/houdini/addon.py | 3 +-- openpype/hosts/maya/addon.py | 3 +-- openpype/hosts/nuke/addon.py | 3 +-- openpype/hosts/photoshop/addon.py | 3 +-- openpype/hosts/resolve/addon.py | 3 +-- openpype/hosts/standalonepublisher/addon.py | 3 +-- openpype/hosts/traypublisher/addon.py | 3 +-- openpype/hosts/tvpaint/addon.py | 3 +-- openpype/hosts/unreal/addon.py | 3 +-- openpype/hosts/webpublisher/addon.py | 3 +-- 16 files changed, 16 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/aftereffects/addon.py b/openpype/hosts/aftereffects/addon.py index 94843e7dc5..79df550312 100644 --- a/openpype/hosts/aftereffects/addon.py +++ b/openpype/hosts/aftereffects/addon.py @@ -1,5 +1,4 @@ -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon class AfterEffectsAddon(OpenPypeModule, IHostAddon): diff --git a/openpype/hosts/blender/addon.py b/openpype/hosts/blender/addon.py index 3ee638a5bb..f1da9b808c 100644 --- a/openpype/hosts/blender/addon.py +++ b/openpype/hosts/blender/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/flame/addon.py b/openpype/hosts/flame/addon.py index 5a34413bb0..d9359fc5bf 100644 --- a/openpype/hosts/flame/addon.py +++ b/openpype/hosts/flame/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon HOST_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py index 1913cc2e30..d1bd1566b7 100644 --- a/openpype/hosts/fusion/addon.py +++ b/openpype/hosts/fusion/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/harmony/addon.py b/openpype/hosts/harmony/addon.py index 872a7490b5..efef40ab92 100644 --- a/openpype/hosts/harmony/addon.py +++ b/openpype/hosts/harmony/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py index 3523e9aed7..f5bb94dbaa 100644 --- a/openpype/hosts/hiero/addon.py +++ b/openpype/hosts/hiero/addon.py @@ -1,7 +1,6 @@ import os import platform -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/houdini/addon.py b/openpype/hosts/houdini/addon.py index 8d88e83c56..80856b0624 100644 --- a/openpype/hosts/houdini/addon.py +++ b/openpype/hosts/houdini/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon HOUDINI_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/maya/addon.py b/openpype/hosts/maya/addon.py index cdd2bc1667..b9ecb8279f 100644 --- a/openpype/hosts/maya/addon.py +++ b/openpype/hosts/maya/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py index 54e4da5195..1c5d5c4005 100644 --- a/openpype/hosts/nuke/addon.py +++ b/openpype/hosts/nuke/addon.py @@ -1,7 +1,6 @@ import os import platform -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/photoshop/addon.py b/openpype/hosts/photoshop/addon.py index a41d91554b..965a545ac5 100644 --- a/openpype/hosts/photoshop/addon.py +++ b/openpype/hosts/photoshop/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/resolve/addon.py b/openpype/hosts/resolve/addon.py index a31da52a6d..02c1d7957f 100644 --- a/openpype/hosts/resolve/addon.py +++ b/openpype/hosts/resolve/addon.py @@ -1,7 +1,6 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon from .utils import RESOLVE_ROOT_DIR diff --git a/openpype/hosts/standalonepublisher/addon.py b/openpype/hosts/standalonepublisher/addon.py index 98ec44d4e2..65a4226664 100644 --- a/openpype/hosts/standalonepublisher/addon.py +++ b/openpype/hosts/standalonepublisher/addon.py @@ -4,8 +4,7 @@ import click from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import ITrayAction, IHostAddon +from openpype.modules import OpenPypeModule, ITrayAction, IHostAddon STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/traypublisher/addon.py b/openpype/hosts/traypublisher/addon.py index c86c835ed9..c157799898 100644 --- a/openpype/hosts/traypublisher/addon.py +++ b/openpype/hosts/traypublisher/addon.py @@ -4,8 +4,7 @@ import click from openpype.lib import get_openpype_execute_args from openpype.lib.execute import run_detached_process -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import ITrayAction, IHostAddon +from openpype.modules import OpenPypeModule, ITrayAction, IHostAddon TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/tvpaint/addon.py b/openpype/hosts/tvpaint/addon.py index d710e63f93..b695bf8ecc 100644 --- a/openpype/hosts/tvpaint/addon.py +++ b/openpype/hosts/tvpaint/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/unreal/addon.py b/openpype/hosts/unreal/addon.py index 16736214c5..e2c8484651 100644 --- a/openpype/hosts/unreal/addon.py +++ b/openpype/hosts/unreal/addon.py @@ -1,6 +1,5 @@ import os -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py index a64d74e62b..eb7fced2e6 100644 --- a/openpype/hosts/webpublisher/addon.py +++ b/openpype/hosts/webpublisher/addon.py @@ -2,8 +2,7 @@ import os import click -from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostAddon +from openpype.modules import OpenPypeModule, IHostAddon WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) From b716eb18cfa46bf9d527a76bf66feb7de14132fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:36:59 +0100 Subject: [PATCH 1962/2550] added deprecation warning --- openpype/modules/base.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 09aea50424..4761462df0 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -9,6 +9,7 @@ import logging import platform import threading import collections +import traceback from uuid import uuid4 from abc import ABCMeta, abstractmethod import six @@ -139,6 +140,15 @@ class _InterfacesClass(_ModuleClass): "cannot import name '{}' from 'openpype_interfaces'" ).format(attr_name)) + if _LoadCache.interfaces_loaded and attr_name != "log": + stack = list(traceback.extract_stack()) + stack.pop(-1) + self.log.warning(( + "Using deprecated import of \"{}\" from 'openpype_interfaces'." + " Please switch to use import" + " from 'openpype.modules.interfaces'" + " (will be removed after 3.16.x).{}" + ).format(attr_name, "".join(traceback.format_list(stack)))) return self.__attributes__[attr_name] From 406a7e0e2e0e39811f9c8d6b28e2aea28896da40 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:42:35 +0100 Subject: [PATCH 1963/2550] project list does not trigger unsaved changes dialog if is not in edit node --- openpype/tools/settings/settings/categories.py | 4 ++++ openpype/tools/settings/settings/widgets.py | 7 ++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index f4b2c13a12..e1b3943317 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -892,6 +892,10 @@ class ProjectWidget(SettingsCategoryWidget): def __init__(self, *args, **kwargs): super(ProjectWidget, self).__init__(*args, **kwargs) + def set_edit_mode(self, enabled): + super(ProjectWidget, self).set_edit_mode(enabled) + self.project_list_widget.set_edit_mode(enabled) + def _check_last_saved_info(self): if self.is_modifying_defaults: return True diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 722717df89..183b5b7b40 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -1009,6 +1009,7 @@ class ProjectListWidget(QtWidgets.QWidget): self._entity = None self.current_project = None + self._edit_mode = True super(ProjectListWidget, self).__init__(parent) self.setObjectName("ProjectListWidget") @@ -1061,6 +1062,10 @@ class ProjectListWidget(QtWidgets.QWidget): self.project_model = project_model self.inactive_chk = inactive_chk + def set_edit_mode(self, enabled): + if self._edit_mode is not enabled: + self._edit_mode = enabled + def set_entity(self, entity): self._entity = entity @@ -1112,7 +1117,7 @@ class ProjectListWidget(QtWidgets.QWidget): save_changes = False change_project = False - if self.validate_context_change(): + if not self._edit_mode or self.validate_context_change(): change_project = True else: From 040ba43d97f64dc0c8da4959539f8fc0f1137990 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Nov 2022 16:42:45 +0100 Subject: [PATCH 1964/2550] added title to unsaved changes dialog --- openpype/tools/settings/settings/widgets.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 183b5b7b40..b8ad21e7e4 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -646,6 +646,9 @@ class UnsavedChangesDialog(QtWidgets.QDialog): def __init__(self, parent=None): super(UnsavedChangesDialog, self).__init__(parent) + + self.setWindowTitle("Unsaved changes") + message_label = QtWidgets.QLabel(self.message) btns_widget = QtWidgets.QWidget(self) From 4874f5f79bb0b209302a48e8998ca6aa4caef376 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Nov 2022 17:58:10 +0100 Subject: [PATCH 1965/2550] OP-4371 - remove families from PreIntegrateThumbnail It should handle all instance which has thumbnail representations, not only specifically for image and render. There might be many more. --- .../plugins/publish/preintegrate_thumbnail_representation.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/plugins/publish/preintegrate_thumbnail_representation.py b/openpype/plugins/publish/preintegrate_thumbnail_representation.py index f9e23223e6..b88ccee9dc 100644 --- a/openpype/plugins/publish/preintegrate_thumbnail_representation.py +++ b/openpype/plugins/publish/preintegrate_thumbnail_representation.py @@ -21,9 +21,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): label = "Override Integrate Thumbnail Representations" order = pyblish.api.IntegratorOrder - 0.1 - families = ["review"] - integrate_profiles = {} + integrate_profiles = [] def process(self, instance): repres = instance.data.get("representations") From fa6a80bcef9cb225d2993e979e8e353ef2adffa9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Nov 2022 18:04:29 +0100 Subject: [PATCH 1966/2550] OP-4371 - remove obsolete ExtractThumbnail Functionality will be replaced by ExtractThumbnailFromSource which handles resolution. Reviewable instance must contain thumbnailSource field pointing to source file for Thumbnail. --- .../publish/collect_published_files.py | 5 +- .../plugins/publish/extract_thumbnail.py | 137 ------------------ 2 files changed, 4 insertions(+), 138 deletions(-) delete mode 100644 openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index dd4646f356..3aa941af42 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -149,10 +149,13 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): self.log.warning("Unable to count frames " "duration {}".format(no_of_frames)) - # raise ValueError("STOP") instance.data["handleStart"] = asset_doc["data"]["handleStart"] instance.data["handleEnd"] = asset_doc["data"]["handleEnd"] + if "review" in tags: + instance.data["thumbnailSource"] = \ + instance.data["representations"][0] + instances.append(instance) self.log.info("instance.data:: {}".format(instance.data)) diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py deleted file mode 100644 index a56521891b..0000000000 --- a/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py +++ /dev/null @@ -1,137 +0,0 @@ -import os -import shutil - -import pyblish.api -from openpype.lib import ( - get_ffmpeg_tool_path, - - run_subprocess, - - get_transcode_temp_directory, - convert_input_paths_for_ffmpeg, - should_convert_for_ffmpeg -) - - -class ExtractThumbnail(pyblish.api.InstancePlugin): - """Create jpg thumbnail from input using ffmpeg.""" - - label = "Extract Thumbnail" - order = pyblish.api.ExtractorOrder - families = [ - "render", - "image" - ] - hosts = ["webpublisher"] - targets = ["filespublish"] - - def process(self, instance): - self.log.info("subset {}".format(instance.data['subset'])) - - filtered_repres = self._get_filtered_repres(instance) - for repre in filtered_repres: - repre_files = repre["files"] - if not isinstance(repre_files, (list, tuple)): - input_file = repre_files - else: - file_index = int(float(len(repre_files)) * 0.5) - input_file = repre_files[file_index] - - stagingdir = os.path.normpath(repre["stagingDir"]) - - full_input_path = os.path.join(stagingdir, input_file) - self.log.info("Input filepath: {}".format(full_input_path)) - - do_convert = should_convert_for_ffmpeg(full_input_path) - # If result is None the requirement of conversion can't be - # determined - if do_convert is None: - self.log.info(( - "Can't determine if representation requires conversion." - " Skipped." - )) - continue - - # Do conversion if needed - # - change staging dir of source representation - # - must be set back after output definitions processing - convert_dir = None - if do_convert: - convert_dir = get_transcode_temp_directory() - filename = os.path.basename(full_input_path) - convert_input_paths_for_ffmpeg( - [full_input_path], - convert_dir, - self.log - ) - full_input_path = os.path.join(convert_dir, filename) - - filename = os.path.splitext(input_file)[0] - while filename.endswith("."): - filename = filename[:-1] - thumbnail_filename = filename + "_thumbnail.jpg" - full_output_path = os.path.join(stagingdir, thumbnail_filename) - - self.log.info("output {}".format(full_output_path)) - - ffmpeg_args = [ - get_ffmpeg_tool_path("ffmpeg"), - "-y", - "-i", full_input_path, - "-vframes", "1", - full_output_path - ] - - # run subprocess - self.log.debug("{}".format(" ".join(ffmpeg_args))) - try: # temporary until oiiotool is supported cross platform - run_subprocess( - ffmpeg_args, logger=self.log - ) - except RuntimeError as exp: - if "Compression" in str(exp): - self.log.debug( - "Unsupported compression on input files. Skipping!!!" - ) - return - self.log.warning("Conversion crashed", exc_info=True) - raise - - new_repre = { - "name": "thumbnail", - "ext": "jpg", - "files": thumbnail_filename, - "stagingDir": stagingdir, - "thumbnail": True, - "tags": ["thumbnail"] - } - - # adding representation - self.log.debug("Adding: {}".format(new_repre)) - instance.data["representations"].append(new_repre) - - # Cleanup temp folder - if convert_dir is not None and os.path.exists(convert_dir): - shutil.rmtree(convert_dir) - - def _get_filtered_repres(self, instance): - filtered_repres = [] - repres = instance.data.get("representations") or [] - for repre in repres: - self.log.debug(repre) - tags = repre.get("tags") or [] - # Skip instance if already has thumbnail representation - if "thumbnail" in tags: - return [] - - if "review" not in tags: - continue - - if not repre.get("files"): - self.log.info(( - "Representation \"{}\" don't have files. Skipping" - ).format(repre["name"])) - continue - - filtered_repres.append(repre) - return filtered_repres From 34f4ee5ec79facfddd4828b1f0316c3c2a206155 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Nov 2022 18:29:51 +0100 Subject: [PATCH 1967/2550] OP-4371 - fix - provide full path to first file as a source for Thumbnail --- .../plugins/publish/collect_published_files.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 3aa941af42..2bf097de41 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -83,8 +83,9 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): self.log.info("task_data:: {}".format(task_data)) is_sequence = len(task_data["files"]) > 1 + first_file = task_data["files"][0] - _, extension = os.path.splitext(task_data["files"][0]) + _, extension = os.path.splitext(first_file) family, families, tags = self._get_family( self.task_type_to_family, task_type, @@ -153,8 +154,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["handleEnd"] = asset_doc["data"]["handleEnd"] if "review" in tags: - instance.data["thumbnailSource"] = \ - instance.data["representations"][0] + first_file_path = os.path.join(task_dir, first_file) + instance.data["thumbnailSource"] = first_file_path instances.append(instance) self.log.info("instance.data:: {}".format(instance.data)) From 82be7ce8d053eefcb430a6b7d26948821ea6ea11 Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 18:33:54 +0100 Subject: [PATCH 1968/2550] Change to REST API using web server --- .../hooks/pre_copy_last_published_workfile.py | 54 ++++++++------- openpype/modules/sync_server/rest_api.py | 68 +++++++++++++++++++ openpype/modules/sync_server/sync_server.py | 12 ++-- .../modules/sync_server/sync_server_module.py | 9 +++ openpype/modules/timers_manager/rest_api.py | 2 +- 5 files changed, 112 insertions(+), 33 deletions(-) create mode 100644 openpype/modules/sync_server/rest_api.py diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 7a835507f7..cefc7e5d40 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -1,14 +1,14 @@ -import gc import os import shutil +from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, get_representations, get_subsets, ) from openpype.lib import PreLaunchHook +from openpype.lib.local_settings import get_local_site_id from openpype.lib.profiles_filtering import filter_profiles -from openpype.modules.base import ModulesManager from openpype.pipeline.load.utils import get_representation_path from openpype.settings.lib import get_project_settings @@ -137,33 +137,37 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # Get sync server from Tray, - # which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + # POST to webserver sites to add to representations + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + self.log.warning("Couldn't find webserver url") + return - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, + entry_point_url = "{}/sync_server".format(webserver_url) + rest_api_url = "{}/add_sites_to_representations".format( + entry_point_url + ) + try: + import requests + except Exception: + self.log.warning( + "Couldn't add sites to representations ('requests' is not available)" + ) + return + + requests.post( + rest_api_url, + json={ + "project_name": project_name, + "sites": [get_local_site_id()], + "representations": [str(workfile_representation["_id"])], + }, ) - sync_server.reset_timer() # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + rest_api_url = "{}/files_are_processed".format(entry_point_url) + while requests.get(rest_api_url).content: + sleep(5) # Get paths published_workfile_path = get_representation_path( diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py new file mode 100644 index 0000000000..b7c5d26d15 --- /dev/null +++ b/openpype/modules/sync_server/rest_api.py @@ -0,0 +1,68 @@ +from aiohttp.web_response import Response +from openpype.lib import Logger + + +class SyncServerModuleRestApi: + """ + REST API endpoint used for calling from hosts when context change + happens in Workfile app. + """ + + def __init__(self, user_module, server_manager): + self._log = None + self.module = user_module + self.server_manager = server_manager + + self.prefix = "/sync_server" + + self.register() + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + def register(self): + self.server_manager.add_route( + "POST", + self.prefix + "/add_sites_to_representations", + self.add_sites_to_representations, + ) + self.server_manager.add_route( + "GET", + self.prefix + "/files_are_processed", + self.files_are_processed, + ) + + async def add_sites_to_representations(self, request): + # Extract data from request + data = await request.json() + try: + project_name = data["project_name"] + sites = data["sites"] + representations = data["representations"] + except KeyError: + msg = ( + "Payload must contain fields 'project_name," + " 'sites' (list of names) and 'representations' (list of IDs)" + ) + self.log.error(msg) + return Response(status=400, message=msg) + + # Add all sites to each representation + for representation_id in representations: + for site in sites: + self.module.add_site( + project_name, representation_id, site, force=True + ) + + # Force timer to run immediately + self.module.reset_timer() + + return Response(status=200) + + async def files_are_processed(self, _request): + return Response( + body=bytes(self.module.sync_server_thread.files_are_processed) + ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 353b39c4e1..7fd2311c2d 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -237,15 +237,13 @@ class SyncServerThread(threading.Thread): def __init__(self, module): self.log = Logger.get_logger(self.__class__.__name__) - # Event to trigger files have been processed - self.files_processed = threading.Event() - - super(SyncServerThread, self).__init__(args=(self.files_processed,)) + super(SyncServerThread, self).__init__() self.module = module self.loop = None self.is_running = False self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) self.timer = None + self.files_are_processed = False def run(self): self.is_running = True @@ -400,8 +398,8 @@ class SyncServerThread(threading.Thread): representation, site, error) - # Trigger files are processed - self.files_processed.set() + # Trigger files process finished + self.files_are_processed = False duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -460,7 +458,6 @@ class SyncServerThread(threading.Thread): async def run_timer(self, delay): """Wait for 'delay' seconds to start next loop""" - self.files_processed.clear() await asyncio.sleep(delay) def reset_timer(self): @@ -469,6 +466,7 @@ class SyncServerThread(threading.Thread): if self.timer: self.timer.cancel() self.timer = None + self.files_are_processed = True def _working_sites(self, project_name): if self.module.is_project_paused(project_name): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index a478faa9ef..7aaf42006c 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -2089,6 +2089,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def cli(self, click_group): click_group.add_command(cli_main) + # Webserver module implementation + def webserver_initialization(self, server_manager): + """Add routes for syncs.""" + if self.tray_initialized: + from .rest_api import SyncServerModuleRestApi + self.rest_api_obj = SyncServerModuleRestApi( + self, server_manager + ) + @click.group(SyncServerModule.name, help="SyncServer module related commands.") def cli_main(): diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index 4a2e9e6575..979db9075b 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -21,7 +21,7 @@ class TimersManagerModuleRestApi: @property def log(self): if self._log is None: - self._log = Logger.get_logger(self.__ckass__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log def register(self): From bca965cf9cae5eff205edd0e191288731770fa1a Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 18:37:13 +0100 Subject: [PATCH 1969/2550] lint --- openpype/hooks/pre_copy_last_published_workfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index cefc7e5d40..6bec4f7d2c 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -151,7 +151,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): import requests except Exception: self.log.warning( - "Couldn't add sites to representations ('requests' is not available)" + "Couldn't add sites to representations " + "('requests' is not available)" ) return From 372c6d89c37e1d67aea8caab69667d55e5b6f34d Mon Sep 17 00:00:00 2001 From: 2-REC Date: Fri, 4 Nov 2022 15:26:33 +0700 Subject: [PATCH 1970/2550] Setting from other plugin --- .../publish/validate_texture_workfiles.py | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index 56ea82f6b6..a25b80438d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -1,5 +1,7 @@ +import os import pyblish.api +from openpype.settings import get_project_settings from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -18,23 +20,40 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True + #TODO(2-rec): remove/change comment # from presets main_workfile_extensions = ['mra'] def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] - if ext not in self.main_workfile_extensions: + main_workfile_extensions = self.get_main_workfile_extensions() + if ext not in main_workfile_extensions: self.log.warning("Only secondary workfile present!") return if not instance.data.get("resources"): msg = "No secondary workfile present for workfile '{}'". \ format(instance.data["name"]) - ext = self.main_workfile_extensions[0] + ext = main_workfile_extensions[0] formatting_data = {"file_name": instance.data["name"], "extension": ext} raise PublishXmlValidationError(self, msg, formatting_data=formatting_data ) + + @classmethod + def get_main_workfile_extensions(cls): + project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) + + #TODO: find better way? (depends on other plugin) + try: + extensions = (project_settings["standalonepublisher"] + ["publish"] + ["CollectTextures"] + ["main_workfile_extensions"]) + except KeyError: + extensions = cls.main_workfile_extensions + + return extensions From e359fb3d8451949ccba65204d69adb39d4a711cf Mon Sep 17 00:00:00 2001 From: Felix David Date: Fri, 4 Nov 2022 10:06:59 +0100 Subject: [PATCH 1971/2550] legacy compatibility --- openpype/hooks/pre_copy_last_published_workfile.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 6bec4f7d2c..f3293fa511 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -99,9 +99,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): for subset in get_subsets( project_name, asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family"], + fields=["_id", "data.family", "data.families"], ) - if subset["data"]["family"] == "workfile" + if subset["data"].get("family") == "workfile" + # Legacy compatibility + or "workfile" in subset["data"].get("families", {}) ), None, ) From a57a875b071f28b0bb0ddc234a7b95516dd41a0d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 4 Nov 2022 16:00:39 +0100 Subject: [PATCH 1972/2550] fix cache of asset docs --- openpype/tools/publisher/control.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 10734a69f4..615f3eb8d9 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -90,9 +90,9 @@ class AssetDocsCache: return project_name = self._controller.project_name - asset_docs = get_assets( + asset_docs = list(get_assets( project_name, fields=self.projection.keys() - ) + )) asset_docs_by_name = {} task_names_by_asset_name = {} for asset_doc in asset_docs: From 55b520128dd984ad8f9be31737f03c753c5a008e Mon Sep 17 00:00:00 2001 From: clement hector Date: Fri, 4 Nov 2022 17:45:51 +0100 Subject: [PATCH 1973/2550] get kitsu login to fill username burnin --- openpype/plugins/publish/extract_burnin.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 4179199317..5f6f0acc97 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -856,12 +856,17 @@ class ExtractBurnin(publish.Extractor): )) continue + kitsu_user = self.get_kitsu_user() + # Burnin values burnin_values = {} for key, value in tuple(burnin_def.items()): key_low = key.lower() if key_low in self.positions and value: - burnin_values[key_low] = value + if key_low == "bottom_left" and kitsu_user: + burnin_values[key_low] = kitsu_user + else: + burnin_values[key_low] = value # Skip processing if burnin values are not set if not burnin_values: @@ -882,6 +887,15 @@ class ExtractBurnin(publish.Extractor): return filtered_burnin_defs + def get_kitsu_user(self): + kitsu_login = os.environ.get("KITSU_LOGIN") + if not kitsu_login: + return None + + kitsu_user = kitsu_login.split("@")[0] + kitsu_user = kitsu_user.replace('.', ' ').title() + return kitsu_user + def families_filter_validation(self, families, output_families_filter): """Determine if entered families intersect with families filters. From befd6889ccf35216e1153eec5742d0b16edcceed Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 4 Nov 2022 21:25:12 +0100 Subject: [PATCH 1974/2550] use much simpler UI for the button --- openpype/tools/publisher/widgets/widgets.py | 112 +++++--------------- openpype/tools/publisher/window.py | 19 ++-- 2 files changed, 35 insertions(+), 96 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 444ad4c7dc..a180107380 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1660,22 +1660,19 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def __init__(self, parent): super(CreateNextPageOverlay, self).__init__(parent) - + self.setCursor(QtCore.Qt.PointingHandCursor) self._arrow_color = ( get_objected_colors("bg-buttons").get_qcolor() ) - self._gradient_start_color = ( + self._bg_color = ( get_objected_colors("publisher", "tab-bg").get_qcolor() ) - self._gradient_end_color = ( - get_objected_colors("bg-inputs").get_qcolor() - ) change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) change_anim.setEndValue(self.max_value) - change_anim.setDuration(200) - change_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) + change_anim.setDuration(400) + change_anim.setEasingCurve(QtCore.QEasingCurve.OutBounce) change_anim.valueChanged.connect(self._on_anim) @@ -1731,19 +1728,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): if not self._is_visible: self.setVisible(False) - def set_handle_show_on_own(self, handle): - if self._handle_show_on_own is handle: - return - self._handle_show_on_own = handle - self._under_mouse = None - self._check_anim_timer() - def set_under_mouse(self, under_mouse): if self._under_mouse is under_mouse: return - if self._handle_show_on_own: - self._handle_show_on_own = False self._under_mouse = under_mouse self.set_increasing(under_mouse) @@ -1756,22 +1744,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): if not self.isVisible(): return - if self._handle_show_on_own: - under_mouse = self._is_under_mouse() - else: - under_mouse = self._under_mouse - - self.set_increasing(under_mouse) - - def enterEvent(self, event): - super(CreateNextPageOverlay, self).enterEvent(event) - if self._handle_show_on_own: - self._check_anim_timer() - - def leaveEvent(self, event): - super(CreateNextPageOverlay, self).leaveEvent(event) - if self._handle_show_on_own: - self._check_anim_timer() + self.set_increasing(self._under_mouse) def mousePressEvent(self, event): if event.button() == QtCore.Qt.LeftButton: @@ -1792,74 +1765,41 @@ class CreateNextPageOverlay(QtWidgets.QWidget): if self._anim_value == 0.0: painter.end() return + + painter.setClipRect(event.rect()) painter.setRenderHints( painter.Antialiasing | painter.SmoothPixmapTransform ) - pen = QtGui.QPen() - pen.setWidth(0) - painter.setPen(pen) + painter.setPen(QtCore.Qt.NoPen) + rect = QtCore.QRect(self.rect()) + rect_width = rect.width() + rect_height = rect.height() - offset = rect.width() - int( - float(rect.width()) * 0.01 * self._anim_value - ) + size = rect_width * 0.9 - pos_y = rect.center().y() - left = rect.left() + offset - top = rect.top() - # Right and bootm is pixel index - right = rect.right() + 1 - bottom = rect.bottom() + 1 - width = right - left - height = bottom - top + x_offset = (rect_width - size) * 0.5 + y_offset = (rect_height - size) * 0.5 + if self._anim_value != self.max_value: + x_offset += rect_width - (rect_width * 0.01 * self._anim_value) - q_height = height * 0.15 - - arrow_half_height = width * 0.2 - arrow_x_start = left + (width * 0.4) + arrow_half_height = size * 0.2 + arrow_x_start = x_offset + (size * 0.4) arrow_x_end = arrow_x_start + arrow_half_height - arrow_top_y_boundry = arrow_half_height + q_height - arrow_bottom_y_boundry = height - (arrow_half_height + q_height) - offset = 0 - if pos_y < arrow_top_y_boundry: - pos_y = arrow_top_y_boundry - elif pos_y > arrow_bottom_y_boundry: - pos_y = arrow_bottom_y_boundry + center_y = rect.center().y() - top_cubic_y = pos_y - q_height - bottom_cubic_y = pos_y + q_height - - path = QtGui.QPainterPath() - path.moveTo(right, top) - path.lineTo(right, bottom) - - path.cubicTo( - right, bottom, - left, bottom_cubic_y, - left, pos_y + painter.setBrush(self._bg_color) + painter.drawEllipse( + x_offset, y_offset, + size, size ) - path.cubicTo( - left, top_cubic_y, - right, top, - right, top - ) - path.closeSubpath() - - radius = height * 0.7 - focal = QtCore.QPointF(left, pos_y) - start_p = QtCore.QPointF(right - (width * 0.5), pos_y) - gradient = QtGui.QRadialGradient(start_p, radius, focal) - gradient.setColorAt(0, self._gradient_start_color) - gradient.setColorAt(1, self._gradient_end_color) - - painter.fillPath(path, gradient) src_arrow_path = QtGui.QPainterPath() - src_arrow_path.moveTo(arrow_x_start, pos_y - arrow_half_height) - src_arrow_path.lineTo(arrow_x_end, pos_y) - src_arrow_path.lineTo(arrow_x_start, pos_y + arrow_half_height) + src_arrow_path.moveTo(arrow_x_start, center_y - arrow_half_height) + src_arrow_path.lineTo(arrow_x_end, center_y) + src_arrow_path.lineTo(arrow_x_start, center_y + arrow_half_height) arrow_stroker = QtGui.QPainterPathStroker() arrow_stroker.setWidth(min(4, arrow_half_height * 0.2)) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 2063cdab96..82a2576ff4 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -228,7 +228,6 @@ class PublisherWindow(QtWidgets.QDialog): publish_frame = PublishFrame(controller, self.footer_border, self) create_overlay_button = CreateNextPageOverlay(self) - create_overlay_button.set_handle_show_on_own(False) show_timer = QtCore.QTimer() show_timer.setInterval(1) @@ -716,20 +715,20 @@ class PublisherWindow(QtWidgets.QDialog): ) def _update_create_overlay_size(self): - height = self._content_widget.height() metrics = self._create_overlay_button.fontMetrics() - width = int(metrics.height() * 3) - pos_x = self.width() - width + size = int(metrics.height() * 3) + end_pos_x = self.width() + start_pos_x = end_pos_x - size - tab_pos = self._tabs_widget.parent().mapTo( - self, self._tabs_widget.pos() + center = self._content_widget.parent().mapTo( + self, + self._content_widget.rect().center() ) - tab_height = self._tabs_widget.height() - pos_y = tab_pos.y() + tab_height + pos_y = center.y() - (size * 0.5) self._create_overlay_button.setGeometry( - pos_x, pos_y, - width, height + start_pos_x, pos_y, + size, size ) def _update_create_overlay_visibility(self, global_pos=None): From 9ec78651547738a2d2ed3cf266ebb9428b44a6b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 4 Nov 2022 21:32:09 +0100 Subject: [PATCH 1975/2550] removred unnecessary restart --- openpype/tools/publisher/window.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 7cf3ae0da8..0daa31938d 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -362,7 +362,6 @@ class PublisherWindow(QtWidgets.QDialog): self._first_show = False self._on_first_show() - self._show_counter = 0 self._show_timer.start() def resizeEvent(self, event): From a852973e1139e5f2bba380f5c1e103ab3a817a54 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 4 Nov 2022 21:32:56 +0100 Subject: [PATCH 1976/2550] fix details dialog close --- openpype/tools/publisher/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 0daa31938d..281c7ad2a1 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -472,7 +472,7 @@ class PublisherWindow(QtWidgets.QDialog): ) def _on_tab_change(self, old_tab, new_tab): - if old_tab != "details": + if old_tab == "details": self._publish_details_widget.close_details_popup() if new_tab in ("create", "publish"): From a2c0e7228bff885bfca0c8df737408cd526c7d92 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 7 Nov 2022 12:24:52 +0100 Subject: [PATCH 1977/2550] Nuke: resolve hashes in file name only for frame token --- openpype/hosts/nuke/plugins/load/load_clip.py | 77 +++++++++++-------- 1 file changed, 43 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 666312167f..ec4f735522 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -1,7 +1,7 @@ -import os import nuke import qargparse - +from copy import deepcopy +from openpype.lib import Logger from openpype.client import ( get_version_by_id, get_last_version_by_subset_id, @@ -22,6 +22,8 @@ from openpype.hosts.nuke.api import ( ) from openpype.hosts.nuke.api import plugin +log = Logger.get_logger(__name__) + class LoadClip(plugin.NukeLoader): """Load clip into Nuke @@ -85,24 +87,19 @@ class LoadClip(plugin.NukeLoader): + plugin.get_review_presets_config() ) - def _fix_path_for_knob(self, filepath, repre_cont): - basename = os.path.basename(filepath) - dirname = os.path.dirname(filepath) - frame = repre_cont.get("frame") - assert frame, "Representation is not sequence" - - padding = len(str(frame)) - basename = basename.replace(frame, "#" * padding) - return os.path.join(dirname, basename).replace("\\", "/") - def load(self, context, name, namespace, options): - repre = context["representation"] + representation = context["representation"] # reste container id so it is always unique for each instance self.reset_container_id() - is_sequence = len(repre["files"]) > 1 + is_sequence = len(representation["files"]) > 1 - filepath = self.fname.replace("\\", "/") + if is_sequence: + representation = self._representation_with_hash_in_frame( + representation + ) + filepath = get_representation_path(representation).replace("\\", "/") + log.debug("_ filepath: {}".format(filepath)) start_at_workfile = options.get( "start_at_workfile", self.options_defaults["start_at_workfile"]) @@ -112,12 +109,10 @@ class LoadClip(plugin.NukeLoader): version = context['version'] version_data = version.get("data", {}) - repre_id = repre["_id"] + repre_id = representation["_id"] - repre_cont = repre["context"] - - self.log.info("version_data: {}\n".format(version_data)) - self.log.debug( + log.info("version_data: {}\n".format(version_data)) + log.debug( "Representation id `{}` ".format(repre_id)) self.handle_start = version_data.get("handleStart", 0) @@ -132,19 +127,17 @@ class LoadClip(plugin.NukeLoader): duration = last - first first = 1 last = first + duration - elif "#" not in filepath: - filepath = self._fix_path_for_knob(filepath, repre_cont) # Fallback to asset name when namespace is None if namespace is None: namespace = context['asset']['name'] if not filepath: - self.log.warning( + log.warning( "Representation id `{}` is failing to load".format(repre_id)) return - read_name = self._get_node_name(repre) + read_name = self._get_node_name(representation) # Create the Loader with the filename path set read_node = nuke.createNode( @@ -157,7 +150,7 @@ class LoadClip(plugin.NukeLoader): read_node["file"].setValue(filepath) used_colorspace = self._set_colorspace( - read_node, version_data, repre["data"]) + read_node, version_data, representation["data"]) self._set_range_to_node(read_node, first, last, start_at_workfile) @@ -179,7 +172,7 @@ class LoadClip(plugin.NukeLoader): data_imprint[k] = version elif k == 'colorspace': - colorspace = repre["data"].get(k) + colorspace = representation["data"].get(k) colorspace = colorspace or version_data.get(k) data_imprint["db_colorspace"] = colorspace if used_colorspace: @@ -213,6 +206,20 @@ class LoadClip(plugin.NukeLoader): def switch(self, container, representation): self.update(container, representation) + def _representation_with_hash_in_frame(self, representation): + """Convert frame key value to padded hash + + Args: + representation (dict): representation data + + Returns: + dict: altered representation data + """ + representation = deepcopy(representation) + frame = representation["context"]["frame"] + representation["context"]["frame"] = "#" * len(str(frame)) + return representation + def update(self, container, representation): """Update the Loader's path @@ -225,7 +232,13 @@ class LoadClip(plugin.NukeLoader): is_sequence = len(representation["files"]) > 1 read_node = nuke.toNode(container['objectName']) + + if is_sequence: + representation = self._representation_with_hash_in_frame( + representation + ) filepath = get_representation_path(representation).replace("\\", "/") + log.debug("_ filepath: {}".format(filepath)) start_at_workfile = "start at" in read_node['frame_mode'].value() @@ -240,8 +253,6 @@ class LoadClip(plugin.NukeLoader): version_data = version_doc.get("data", {}) repre_id = representation["_id"] - repre_cont = representation["context"] - # colorspace profile colorspace = representation["data"].get("colorspace") colorspace = colorspace or version_data.get("colorspace") @@ -258,11 +269,9 @@ class LoadClip(plugin.NukeLoader): duration = last - first first = 1 last = first + duration - elif "#" not in filepath: - filepath = self._fix_path_for_knob(filepath, repre_cont) if not filepath: - self.log.warning( + log.warning( "Representation id `{}` is failing to load".format(repre_id)) return @@ -312,7 +321,7 @@ class LoadClip(plugin.NukeLoader): read_node, updated_dict ) - self.log.info( + log.info( "updated to version: {}".format(version_doc.get("name")) ) @@ -348,8 +357,8 @@ class LoadClip(plugin.NukeLoader): time_warp_nodes = version_data.get('timewarps', []) last_node = None source_id = self.get_container_id(parent_node) - self.log.info("__ source_id: {}".format(source_id)) - self.log.info("__ members: {}".format(self.get_members(parent_node))) + log.info("__ source_id: {}".format(source_id)) + log.info("__ members: {}".format(self.get_members(parent_node))) dependent_nodes = self.clear_members(parent_node) with maintained_selection(): From 833829693584f2db9c08c83a35791a20675c1185 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 7 Nov 2022 15:02:30 +0100 Subject: [PATCH 1978/2550] remove the get kitsu username function --- openpype/plugins/publish/extract_burnin.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 5f6f0acc97..4179199317 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -856,17 +856,12 @@ class ExtractBurnin(publish.Extractor): )) continue - kitsu_user = self.get_kitsu_user() - # Burnin values burnin_values = {} for key, value in tuple(burnin_def.items()): key_low = key.lower() if key_low in self.positions and value: - if key_low == "bottom_left" and kitsu_user: - burnin_values[key_low] = kitsu_user - else: - burnin_values[key_low] = value + burnin_values[key_low] = value # Skip processing if burnin values are not set if not burnin_values: @@ -887,15 +882,6 @@ class ExtractBurnin(publish.Extractor): return filtered_burnin_defs - def get_kitsu_user(self): - kitsu_login = os.environ.get("KITSU_LOGIN") - if not kitsu_login: - return None - - kitsu_user = kitsu_login.split("@")[0] - kitsu_user = kitsu_user.replace('.', ' ').title() - return kitsu_user - def families_filter_validation(self, families, output_families_filter): """Determine if entered families intersect with families filters. From c0f2de08d9285aa7eb5359d6376b9bb41c065336 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 7 Nov 2022 16:11:38 +0100 Subject: [PATCH 1979/2550] get username from kitsu login --- .../plugins/publish/collect_kitsu_username.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py new file mode 100644 index 0000000000..846adac30a --- /dev/null +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +import os +import re + +import pyblish.api + + +class CollectKitsuUsername(pyblish.api.ContextPlugin): + """Collect Kitsu username from the kitsu login""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Kitsu username" + + def process(self, context): + for instance in context: + kitsu_login = os.environ['KITSU_LOGIN'] + + if kitsu_login: + kitsu_username = kitsu_login.split("@")[0] + kitsu_username = kitsu_username.split('.') + kitsu_username = ' '.join(kitsu_username) + + new_username = re.sub('[^a-zA-Z]', ' ', kitsu_username) + + instance.data['customData'] = { + "kitsuUsername": new_username.title() + } From 4e03b94e877cf0b0c39e4a4156997c5c9c22f781 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 7 Nov 2022 21:50:02 +0100 Subject: [PATCH 1980/2550] hiero: adding better ranges --- openpype/hosts/hiero/api/plugin.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index ea8a9e836a..5ec1c78aaa 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -170,7 +170,10 @@ class CreatorWidget(QtWidgets.QDialog): for func, val in kwargs.items(): if getattr(item, func): func_attr = getattr(item, func) - func_attr(val) + if isinstance(val, tuple): + func_attr(*val) + else: + func_attr(val) # add to layout layout.addRow(label, item) @@ -273,8 +276,8 @@ class CreatorWidget(QtWidgets.QDialog): elif v["type"] == "QSpinBox": data[k]["value"] = self.create_row( content_layout, "QSpinBox", v["label"], - setValue=v["value"], setMinimum=0, - setMaximum=100000, setToolTip=tool_tip) + setRange=(1, 9999999), setValue=v["value"], + setToolTip=tool_tip) return data From 017ec79552eeb000edc6159960867dc781275655 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 7 Nov 2022 23:20:29 +0100 Subject: [PATCH 1981/2550] change colors --- openpype/tools/publisher/widgets/widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 6c8ee3b332..ece27cd8cc 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1718,10 +1718,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): super(CreateNextPageOverlay, self).__init__(parent) self.setCursor(QtCore.Qt.PointingHandCursor) self._arrow_color = ( - get_objected_colors("bg-buttons").get_qcolor() + get_objected_colors("font").get_qcolor() ) self._bg_color = ( - get_objected_colors("publisher", "tab-bg").get_qcolor() + get_objected_colors("bg-buttons").get_qcolor() ) change_anim = QtCore.QVariantAnimation() From b75356d631f26048330e65ff24e78107dc0bbd0c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 7 Nov 2022 23:20:35 +0100 Subject: [PATCH 1982/2550] change easing curve --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ece27cd8cc..f170992c1a 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1728,7 +1728,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): change_anim.setStartValue(0.0) change_anim.setEndValue(self.max_value) change_anim.setDuration(400) - change_anim.setEasingCurve(QtCore.QEasingCurve.OutBounce) + change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) change_anim.valueChanged.connect(self._on_anim) From 94114d5ed0ecb4c785403c99e55a94f9b2f3cb6b Mon Sep 17 00:00:00 2001 From: clement hector Date: Tue, 8 Nov 2022 11:21:52 +0100 Subject: [PATCH 1983/2550] add instance name and extension checks to filter only reviewMain file --- .../kitsu/plugins/publish/integrate_kitsu_review.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index bf80095225..61d5a13660 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import os import gazu import pyblish.api @@ -31,9 +32,13 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): continue review_path = representation.get("published_path") + file_name, file_extension = os.path.splitext(review_path) + + if instance.data.get('name') != 'reviewMain' \ + or file_extension != '.mp4': + continue self.log.debug("Found review at: {}".format(review_path)) - gazu.task.add_preview( task, comment, review_path, normalize_movie=True ) From 0549bb68fd776488a63b32be7cbdf5af6c79a5d6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 8 Nov 2022 15:28:03 +0100 Subject: [PATCH 1984/2550] nuke: returning logging from self --- openpype/hosts/nuke/plugins/load/load_clip.py | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index ec4f735522..b17356c5c7 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -1,5 +1,6 @@ import nuke import qargparse +from pprint import pformat from copy import deepcopy from openpype.lib import Logger from openpype.client import ( @@ -22,14 +23,13 @@ from openpype.hosts.nuke.api import ( ) from openpype.hosts.nuke.api import plugin -log = Logger.get_logger(__name__) - class LoadClip(plugin.NukeLoader): """Load clip into Nuke Either it is image sequence or video file. """ + log = Logger.get_logger(__name__) families = [ "source", @@ -99,7 +99,7 @@ class LoadClip(plugin.NukeLoader): representation ) filepath = get_representation_path(representation).replace("\\", "/") - log.debug("_ filepath: {}".format(filepath)) + self.log.debug("_ filepath: {}".format(filepath)) start_at_workfile = options.get( "start_at_workfile", self.options_defaults["start_at_workfile"]) @@ -111,8 +111,9 @@ class LoadClip(plugin.NukeLoader): version_data = version.get("data", {}) repre_id = representation["_id"] - log.info("version_data: {}\n".format(version_data)) - log.debug( + self.log.debug("_ version_data: {}\n".format( + pformat(version_data))) + self.log.debug( "Representation id `{}` ".format(repre_id)) self.handle_start = version_data.get("handleStart", 0) @@ -133,7 +134,7 @@ class LoadClip(plugin.NukeLoader): namespace = context['asset']['name'] if not filepath: - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repre_id)) return @@ -238,7 +239,7 @@ class LoadClip(plugin.NukeLoader): representation ) filepath = get_representation_path(representation).replace("\\", "/") - log.debug("_ filepath: {}".format(filepath)) + self.log.debug("_ filepath: {}".format(filepath)) start_at_workfile = "start at" in read_node['frame_mode'].value() @@ -271,7 +272,7 @@ class LoadClip(plugin.NukeLoader): last = first + duration if not filepath: - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repre_id)) return @@ -321,7 +322,7 @@ class LoadClip(plugin.NukeLoader): read_node, updated_dict ) - log.info( + self.log.info( "updated to version: {}".format(version_doc.get("name")) ) @@ -357,8 +358,10 @@ class LoadClip(plugin.NukeLoader): time_warp_nodes = version_data.get('timewarps', []) last_node = None source_id = self.get_container_id(parent_node) - log.info("__ source_id: {}".format(source_id)) - log.info("__ members: {}".format(self.get_members(parent_node))) + self.log.debug("__ source_id: {}".format(source_id)) + self.log.debug("__ members: {}".format( + self.get_members(parent_node))) + dependent_nodes = self.clear_members(parent_node) with maintained_selection(): From 3dbfa8ee5143d411adf6bbe2357966078cb819e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 8 Nov 2022 16:30:40 +0100 Subject: [PATCH 1985/2550] removed max value and use 1.0 --- openpype/tools/publisher/widgets/widgets.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index f170992c1a..7ab6294817 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1711,7 +1711,6 @@ class SubsetAttributesWidget(QtWidgets.QWidget): class CreateNextPageOverlay(QtWidgets.QWidget): - max_value = 100.0 clicked = QtCore.Signal() def __init__(self, parent): @@ -1726,7 +1725,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) - change_anim.setEndValue(self.max_value) + change_anim.setEndValue(1.0) change_anim.setDuration(400) change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) @@ -1768,7 +1767,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def _is_anim_finished(self): if self._increasing: - return self._anim_value == self.max_value + return self._anim_value == 1.0 return self._anim_value == 0.0 def _on_anim(self, value): @@ -1838,8 +1837,8 @@ class CreateNextPageOverlay(QtWidgets.QWidget): x_offset = (rect_width - size) * 0.5 y_offset = (rect_height - size) * 0.5 - if self._anim_value != self.max_value: - x_offset += rect_width - (rect_width * 0.01 * self._anim_value) + if self._anim_value != 1.0: + x_offset += rect_width - (rect_width * self._anim_value) arrow_half_height = size * 0.2 arrow_x_start = x_offset + (size * 0.4) From a76ad6035110e917e44b36e799222787bf87fd9c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 16:41:19 +0100 Subject: [PATCH 1986/2550] use 'created_dt' of representation --- openpype/client/entities.py | 28 +++++++++++++++++++ .../hooks/pre_copy_last_published_workfile.py | 19 +++++++++++-- 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43afccf2f1..43c2874f57 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,6 +6,7 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ +from datetime import datetime import re import collections @@ -1367,6 +1368,33 @@ def get_representation_parents(project_name, representation): return parents_by_repre_id[repre_id] +def get_representation_last_created_time_on_site( + representation: dict, site_name: str +) -> datetime: + """Get `created_dt` value for representation on site. + + Args: + representation (dict): Representation to get creation date of + site_name (str): Site from which to get the creation date + + Returns: + datetime: Created time of representation on site + """ + created_time = next( + ( + site.get("created_dt") + for site in representation["files"][0].get("sites", []) + if site["name"] == site_name + ), + None, + ) + if created_time: + return created_time + else: + # Use epoch as 'zero' time + return datetime.utcfromtimestamp(0) + + def get_thumbnail_id_from_source(project_name, src_type, src_id): """Receive thumbnail id from source entity. diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index f3293fa511..4eb66f6f85 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -3,6 +3,8 @@ import shutil from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, + get_representation_by_id, + get_representation_last_created_time_on_site, get_representations, get_subsets, ) @@ -158,18 +160,29 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return + local_site_id = get_local_site_id() requests.post( rest_api_url, json={ "project_name": project_name, - "sites": [get_local_site_id()], + "sites": [local_site_id], "representations": [str(workfile_representation["_id"])], }, ) # Wait for the download loop to end - rest_api_url = "{}/files_are_processed".format(entry_point_url) - while requests.get(rest_api_url).content: + last_created_time = get_representation_last_created_time_on_site( + workfile_representation, local_site_id + ) + while ( + last_created_time + >= get_representation_last_created_time_on_site( + get_representation_by_id( + project_name, workfile_representation["_id"] + ), + local_site_id, + ) + ): sleep(5) # Get paths From 7b1069f708dc9c0d8153fc669a1106bc6d79d030 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 00:32:22 +0800 Subject: [PATCH 1987/2550] Alembic Loader as Arnold Standin --- .../hosts/maya/plugins/load/load_abc_to_standin.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 5d6c52eac9..94bb974917 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -10,7 +10,7 @@ from openpype.settings import get_project_settings class AlembicStandinLoader(load.LoaderPlugin): """Load Alembic as Arnold Standin""" - families = ["model", "pointcache"] + families = ["animation", "model", "pointcache"] representations = ["abc"] label = "Import Alembic as Arnold Standin" @@ -31,6 +31,7 @@ class AlembicStandinLoader(load.LoaderPlugin): self.log.info("version_data: {}\n".format(version_data)) frameStart = version_data.get("frameStart", None) + frameEnd = version_data.get("frameEnd", None) asset = context["asset"]["name"] namespace = namespace or unique_namespace( @@ -64,7 +65,13 @@ class AlembicStandinLoader(load.LoaderPlugin): # Set the standin filepath cmds.setAttr(standinShape + ".dso", self.fname, type="string") - if frameStart is not None: + cmds.setAttr(standinShape + ".abcFPS", 25) + + if frameStart is None: + cmds.setAttr(standinShape + ".useFrameExtension", 0) + elif frameStart == 1 and frameEnd == 1: + cmds.setAttr(standinShape + ".useFrameExtension", 0) + else: cmds.setAttr(standinShape + ".useFrameExtension", 1) nodes = [root, standin] @@ -93,7 +100,8 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) - standin.useFrameExtension.set(1) + standin.useFrameExtension.set(0) + standin.abcFPS.set(25) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 3ad8e95ca436c4229ae8f3eadbee5b41c8326a68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:57:02 +0100 Subject: [PATCH 1988/2550] add priority to add_site --- openpype/modules/sync_server/rest_api.py | 17 ++++++----------- openpype/modules/sync_server/sync_server.py | 4 ---- .../modules/sync_server/sync_server_module.py | 13 ++++++++++--- 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index b7c5d26d15..e92ddc8eee 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -1,4 +1,5 @@ from aiohttp.web_response import Response +from openpype.client.entities import get_representation_by_id from openpype.lib import Logger @@ -29,11 +30,6 @@ class SyncServerModuleRestApi: self.prefix + "/add_sites_to_representations", self.add_sites_to_representations, ) - self.server_manager.add_route( - "GET", - self.prefix + "/files_are_processed", - self.files_are_processed, - ) async def add_sites_to_representations(self, request): # Extract data from request @@ -54,15 +50,14 @@ class SyncServerModuleRestApi: for representation_id in representations: for site in sites: self.module.add_site( - project_name, representation_id, site, force=True + project_name, + representation_id, + site, + force=True, + priority=99, ) # Force timer to run immediately self.module.reset_timer() return Response(status=200) - - async def files_are_processed(self, _request): - return Response( - body=bytes(self.module.sync_server_thread.files_are_processed) - ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 7fd2311c2d..d0a40a60ff 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -243,7 +243,6 @@ class SyncServerThread(threading.Thread): self.is_running = False self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) self.timer = None - self.files_are_processed = False def run(self): self.is_running = True @@ -398,8 +397,6 @@ class SyncServerThread(threading.Thread): representation, site, error) - # Trigger files process finished - self.files_are_processed = False duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -466,7 +463,6 @@ class SyncServerThread(threading.Thread): if self.timer: self.timer.cancel() self.timer = None - self.files_are_processed = True def _working_sites(self, project_name): if self.module.is_project_paused(project_name): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 7aaf42006c..788032180e 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -136,7 +136,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, project_name, representation_id, site_name=None, - force=False): + force=False, priority=None): """ Adds new site to representation to be synced. @@ -152,6 +152,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists + priority (int): set priority Throws: SiteAlreadyPresentError - if adding already existing site and @@ -167,7 +168,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.reset_site_on_representation(project_name, representation_id, site_name=site_name, - force=force) + force=force, + priority=priority) def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): @@ -1655,7 +1657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False): + remove=False, pause=None, force=False, priority=None): """ Reset information about synchronization for particular 'file_id' and provider. @@ -1678,6 +1680,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): remove (bool): if True remove site altogether pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site + priority (int): set priority Raises: SiteAlreadyPresentError - if adding already existing site and @@ -1705,6 +1708,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} + # Add priority + if priority: + elem["priority"] = priority + if file_id: # reset site for particular file self._reset_site_for_file(project_name, representation_id, elem, file_id, site_name) From f19c2b3a7936b5ca0c3742ddc98f1d0fc38555ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:58:19 +0100 Subject: [PATCH 1989/2550] clean --- openpype/modules/sync_server/rest_api.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index e92ddc8eee..0c3b914833 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -1,5 +1,4 @@ from aiohttp.web_response import Response -from openpype.client.entities import get_representation_by_id from openpype.lib import Logger From 255d5d8b9b0c34d10b2ac17913e1369d41cc1108 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:59:03 +0100 Subject: [PATCH 1990/2550] clean --- openpype/modules/sync_server/sync_server_module.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 788032180e..94a97e9f37 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1657,7 +1657,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, priority=None): + remove=False, pause=None, force=False, + priority=None): """ Reset information about synchronization for particular 'file_id' and provider. From 4408ea9b02a00a0c0ff5adf4b45b2cb1f5168793 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 19:02:29 +0100 Subject: [PATCH 1991/2550] sort fields --- openpype/hooks/pre_copy_last_published_workfile.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 4eb66f6f85..acbc9ec1c7 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -178,7 +178,9 @@ class CopyLastPublishedWorkfile(PreLaunchHook): last_created_time >= get_representation_last_created_time_on_site( get_representation_by_id( - project_name, workfile_representation["_id"] + project_name, + workfile_representation["_id"], + fields=["files"], ), local_site_id, ) From bf6af7f7175ff1536b81e6ac8ae3f4c233271a26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 19:04:03 +0100 Subject: [PATCH 1992/2550] clean --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 94a97e9f37..4d848958e8 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1657,7 +1657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, + remove=False, pause=None, force=False, priority=None): """ Reset information about synchronization for particular 'file_id' From cf50722e1fee7c6ab227dedefc74a479713264fb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 8 Nov 2022 21:42:26 +0100 Subject: [PATCH 1993/2550] flame: load with native colorspace resolved from mapping --- openpype/hosts/flame/api/plugin.py | 13 +++++++++++++ openpype/hosts/flame/plugins/load/load_clip.py | 4 ++-- .../hosts/flame/plugins/load/load_clip_batch.py | 4 ++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 092ce9d106..45fa7fd9a4 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -690,6 +690,19 @@ class ClipLoader(LoaderPlugin): ) ] + _mapping = None + + def get_native_colorspace(self, input_colorspace): + if not self._mapping: + settings = get_current_project_settings()["flame"] + mapping = settings["imageio"]["profilesMapping"]["inputs"] + self._mapping = { + input["ocioName"]: input["flameName"] + for input in mapping + } + + return self._mapping.get(input_colorspace) + class OpenClipSolver(flib.MediaInfoFile): create_new_clip = False diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 0843dde76a..23879b923e 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -40,10 +40,10 @@ class LoadClip(opfapi.ClipLoader): clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = os.environ["AVALON_WORKDIR"] diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 3b049b861b..2de75df116 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -43,10 +43,10 @@ class LoadClipBatch(opfapi.ClipLoader): clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"] From 1618c6bbf502cf9a7bdcc546f4e04735b1573e90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Nov 2022 00:29:25 +0000 Subject: [PATCH 1994/2550] Bump loader-utils from 1.4.0 to 1.4.1 in /website Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.0 to 1.4.1. - [Release notes](https://github.com/webpack/loader-utils/releases) - [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.1/CHANGELOG.md) - [Commits](https://github.com/webpack/loader-utils/compare/v1.4.0...v1.4.1) --- updated-dependencies: - dependency-name: loader-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 04b9dd658b..7af15e9145 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4782,9 +4782,9 @@ loader-runner@^4.2.0: integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== loader-utils@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" - integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== + version "1.4.1" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" + integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" @@ -5124,7 +5124,12 @@ minimatch@^3.0.4: dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0, minimist@^1.2.5: +minimist@^1.2.0: + version "1.2.7" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== + +minimist@^1.2.5: version "1.2.6" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== From ff565317d0a1abe63671ab5e4b62ce599a000ff7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 08:39:52 +0800 Subject: [PATCH 1995/2550] Alembic Loader as Arnold Standin --- .../hosts/maya/plugins/load/load_abc_to_standin.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 94bb974917..19e60d33da 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -1,6 +1,7 @@ import os from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -46,6 +47,7 @@ class AlembicStandinLoader(load.LoaderPlugin): settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] + fps = legacy_io.Session["AVALON_FPS"] c = colors.get('ass') if c is not None: @@ -65,12 +67,14 @@ class AlembicStandinLoader(load.LoaderPlugin): # Set the standin filepath cmds.setAttr(standinShape + ".dso", self.fname, type="string") - cmds.setAttr(standinShape + ".abcFPS", 25) + cmds.setAttr(standinShape + ".abcFPS", float(fps)) if frameStart is None: cmds.setAttr(standinShape + ".useFrameExtension", 0) + elif frameStart == 1 and frameEnd == 1: cmds.setAttr(standinShape + ".useFrameExtension", 0) + else: cmds.setAttr(standinShape + ".useFrameExtension", 1) @@ -89,7 +93,7 @@ class AlembicStandinLoader(load.LoaderPlugin): import pymel.core as pm path = get_representation_path(representation) - + fps = legacy_io.Session["AVALON_FPS"] # Update the standin standins = list() members = pm.sets(container['objectName'], query=True) @@ -101,7 +105,7 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) standin.useFrameExtension.set(0) - standin.abcFPS.set(25) + standin.abcFPS.set(float(fps)) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 4c7733e6a74840b4a88318fa93de39fe82d227f6 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 9 Nov 2022 03:50:08 +0000 Subject: [PATCH 1996/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 46bb4b1cd0..81b2925fb5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.2" +__version__ = "3.14.7-nightly.3" From 8936512b67228af868e4c04a639bfb5ec3a68803 Mon Sep 17 00:00:00 2001 From: clement hector Date: Wed, 9 Nov 2022 11:17:07 +0100 Subject: [PATCH 1997/2550] simplifcation + check customData to avoid overriding it --- .../plugins/publish/collect_kitsu_username.py | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py index 846adac30a..9539518ebf 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py @@ -12,16 +12,17 @@ class CollectKitsuUsername(pyblish.api.ContextPlugin): label = "Kitsu username" def process(self, context): + kitsu_login = os.environ['KITSU_LOGIN'] + + if not kitsu_login: + return + + kitsu_username = kitsu_login.split("@")[0].replace('.', ' ') + new_username = re.sub('[^a-zA-Z]', ' ', kitsu_username).title() + for instance in context: - kitsu_login = os.environ['KITSU_LOGIN'] + # Don't override customData if it already exists + if 'customData' not in instance.data: + instance.data['customData'] = {} - if kitsu_login: - kitsu_username = kitsu_login.split("@")[0] - kitsu_username = kitsu_username.split('.') - kitsu_username = ' '.join(kitsu_username) - - new_username = re.sub('[^a-zA-Z]', ' ', kitsu_username) - - instance.data['customData'] = { - "kitsuUsername": new_username.title() - } + instance.data['customData']["kitsuUsername"] = new_username From 8feedcbc155387958b325e8eccc4c08bfad8a18e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:18:23 +0100 Subject: [PATCH 1998/2550] fix last version check --- .../hooks/pre_copy_last_published_workfile.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index acbc9ec1c7..96b5ccadb2 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -116,19 +116,19 @@ class CopyLastPublishedWorkfile(PreLaunchHook): return # Get workfile representation + last_version_doc = get_last_version_by_subset_id( + project_name, subset_id, fields=["_id"] + ) + if not last_version_doc: + self.log.debug("Subset does not have any versions") + return + workfile_representation = next( ( representation for representation in get_representations( project_name, - version_ids=[ - ( - get_last_version_by_subset_id( - project_name, subset_id, fields=["_id"] - ) - or {} - ).get("_id") - ], + version_ids=[last_version_doc["_id"]] ) if representation["context"]["task"]["name"] == task_name ), From 50afec52223e415c6118999f7f9ac465ed721d3e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:03 +0100 Subject: [PATCH 1999/2550] replaced 'add_sites_to_representations' with 'reset_timer' in rest api --- openpype/modules/sync_server/rest_api.py | 31 +++--------------------- 1 file changed, 3 insertions(+), 28 deletions(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index 0c3b914833..51769cd4fb 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -26,36 +26,11 @@ class SyncServerModuleRestApi: def register(self): self.server_manager.add_route( "POST", - self.prefix + "/add_sites_to_representations", - self.add_sites_to_representations, + self.prefix + "/reset_timer", + self.reset_timer, ) - async def add_sites_to_representations(self, request): - # Extract data from request - data = await request.json() - try: - project_name = data["project_name"] - sites = data["sites"] - representations = data["representations"] - except KeyError: - msg = ( - "Payload must contain fields 'project_name," - " 'sites' (list of names) and 'representations' (list of IDs)" - ) - self.log.error(msg) - return Response(status=400, message=msg) - - # Add all sites to each representation - for representation_id in representations: - for site in sites: - self.module.add_site( - project_name, - representation_id, - site, - force=True, - priority=99, - ) - + async def reset_timer(self, request): # Force timer to run immediately self.module.reset_timer() From 0ca0173e9b48b8cddc6b42dc4360d7bad8649d0f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:15 +0100 Subject: [PATCH 2000/2550] added ability to rese timer from add_site --- openpype/modules/sync_server/sync_server_module.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 4d848958e8..7dc1e15322 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -136,7 +136,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, project_name, representation_id, site_name=None, - force=False, priority=None): + force=False, priority=None, reset_timer=False): """ Adds new site to representation to be synced. @@ -171,6 +171,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): force=force, priority=priority) + if reset_timer: + self.reset_timer() + def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): """ From a137258b1b2a24c503b9db5e47735c9ac9f293d3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:32 +0100 Subject: [PATCH 2001/2550] 'reset_timer' can reset timer via rest api endpoint --- .../modules/sync_server/sync_server_module.py | 37 ++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 7dc1e15322..26a6abbbf4 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -916,7 +916,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule): In case of user's involvement (reset site), start that right away. """ - self.sync_server_thread.reset_timer() + + if not self.enabled: + return + + if self.sync_server_thread is None: + self._reset_timer_with_rest_api() + else: + self.sync_server_thread.reset_timer() + + def is_representaion_on_site( + self, project_name, representation_id, site_id + ): + # TODO implement + return False + + def _reset_timer_with_rest_api(self): + # POST to webserver sites to add to representations + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + self.log.warning("Couldn't find webserver url") + return + + rest_api_url = "{}/sync_server/reset_timer".format( + webserver_url + ) + + try: + import requests + except Exception: + self.log.warning( + "Couldn't add sites to representations " + "('requests' is not available)" + ) + return + + requests.post(rest_api_url) def get_enabled_projects(self): """Returns list of projects which have SyncServer enabled.""" From 109c52809c1f599a4d93b8dc15d99f4836c03e26 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:20:21 +0100 Subject: [PATCH 2002/2550] updated prelaunch hook with new abilities of sync server --- .../hooks/pre_copy_last_published_workfile.py | 52 +++++-------------- 1 file changed, 12 insertions(+), 40 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 96b5ccadb2..6fd50a64d6 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -141,49 +141,21 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # POST to webserver sites to add to representations - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - if not webserver_url: - self.log.warning("Couldn't find webserver url") - return - - entry_point_url = "{}/sync_server".format(webserver_url) - rest_api_url = "{}/add_sites_to_representations".format( - entry_point_url - ) - try: - import requests - except Exception: - self.log.warning( - "Couldn't add sites to representations " - "('requests' is not available)" - ) - return - local_site_id = get_local_site_id() - requests.post( - rest_api_url, - json={ - "project_name": project_name, - "sites": [local_site_id], - "representations": [str(workfile_representation["_id"])], - }, + sync_server = self.modules_manager.get("sync_server") + sync_server.add_site( + project_name, + workfile_representation["_id"], + local_site_id, + force=True, + priority=99, + reset_timer=True ) - # Wait for the download loop to end - last_created_time = get_representation_last_created_time_on_site( - workfile_representation, local_site_id - ) - while ( - last_created_time - >= get_representation_last_created_time_on_site( - get_representation_by_id( - project_name, - workfile_representation["_id"], - fields=["files"], - ), - local_site_id, - ) + while not sync_server.is_representaion_on_site( + project_name, + workfile_representation["_id"], + local_site_id ): sleep(5) From e89051466efa9d12b0700b95711b049ab3b944ca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:22:56 +0100 Subject: [PATCH 2003/2550] check if is sync server enabled --- openpype/hooks/pre_copy_last_published_workfile.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 6fd50a64d6..69e3d6efe4 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -37,6 +37,12 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ + + sync_server = self.modules_manager.get("sync_server") + if not sync_server or not sync_server.enabled: + self.log.deubg("Sync server module is not enabled or available") + return + # Check there is no workfile available last_workfile = self.data.get("last_workfile_path") if os.path.exists(last_workfile): @@ -142,7 +148,6 @@ class CopyLastPublishedWorkfile(PreLaunchHook): return local_site_id = get_local_site_id() - sync_server = self.modules_manager.get("sync_server") sync_server.add_site( project_name, workfile_representation["_id"], From f90e9f64128667e15e6ae375cc2a1e9dd85495a6 Mon Sep 17 00:00:00 2001 From: Thomas Fricard <51854004+friquette@users.noreply.github.com> Date: Wed, 9 Nov 2022 11:30:49 +0100 Subject: [PATCH 2004/2550] Update openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../modules/kitsu/plugins/publish/collect_kitsu_username.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py index 9539518ebf..896050f7e2 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_username.py @@ -12,7 +12,7 @@ class CollectKitsuUsername(pyblish.api.ContextPlugin): label = "Kitsu username" def process(self, context): - kitsu_login = os.environ['KITSU_LOGIN'] + kitsu_login = os.environ.get('KITSU_LOGIN') if not kitsu_login: return From 4b1a19f3bf868813f5f2a1749eda8277cd2882ba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:36:03 +0100 Subject: [PATCH 2005/2550] removed 'get_representation_last_created_time_on_site' function --- openpype/client/entities.py | 27 ------------------- .../hooks/pre_copy_last_published_workfile.py | 2 -- 2 files changed, 29 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43c2874f57..91d4b499b0 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1368,33 +1368,6 @@ def get_representation_parents(project_name, representation): return parents_by_repre_id[repre_id] -def get_representation_last_created_time_on_site( - representation: dict, site_name: str -) -> datetime: - """Get `created_dt` value for representation on site. - - Args: - representation (dict): Representation to get creation date of - site_name (str): Site from which to get the creation date - - Returns: - datetime: Created time of representation on site - """ - created_time = next( - ( - site.get("created_dt") - for site in representation["files"][0].get("sites", []) - if site["name"] == site_name - ), - None, - ) - if created_time: - return created_time - else: - # Use epoch as 'zero' time - return datetime.utcfromtimestamp(0) - - def get_thumbnail_id_from_source(project_name, src_type, src_id): """Receive thumbnail id from source entity. diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 69e3d6efe4..884b0f54b6 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -3,8 +3,6 @@ import shutil from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, - get_representation_by_id, - get_representation_last_created_time_on_site, get_representations, get_subsets, ) From cc7a3e8581293e7fa2c3a678a43ca9c579c40e0e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 18:41:48 +0800 Subject: [PATCH 2006/2550] adding the switching on off for multipart and force muiltilayer options --- openpype/hosts/maya/api/lib_rendersettings.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 2b996702c3..2fc7547c8c 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -154,6 +154,16 @@ class RenderSettings(object): self._set_global_output_settings() cmds.setAttr("redshiftOptions.imageFormat", img_ext) + if redshift_render_presets["multilayer_exr"]: + cmds.setAttr("redshiftOptions.exrMultipart", 1) + else: + cmds.setAttr("redshiftOptions.exrMultipart", 0) + + if redshift_render_presets["force_combine"]: + cmds.setAttr("redshiftOptions.exrForceMultilayer", 1) + else: + cmds.setAttr("redshiftOptions.exrForceMultilayer", 0) + cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) self._additional_attribs_setter(additional_options) From 3ee386543bc6b91ee7a0ab3c95424ac7955d7d98 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 9 Nov 2022 11:43:55 +0100 Subject: [PATCH 2007/2550] hiero: adding animated knobs also making track per subset --- .../hosts/hiero/plugins/load/load_effects.py | 27 +++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index fab426e58d..0819d1d1b7 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -13,6 +13,7 @@ from openpype.pipeline import ( get_representation_path ) from openpype.hosts.hiero import api as phiero +from openpype.lib import Logger class LoadEffects(load.LoaderPlugin): @@ -26,6 +27,8 @@ class LoadEffects(load.LoaderPlugin): icon = "cc" color = "white" + log = Logger.get_logger(__name__) + def load(self, context, name, namespace, data): """ Loading function to get the soft effects to particular read node @@ -41,7 +44,7 @@ class LoadEffects(load.LoaderPlugin): """ active_sequence = phiero.get_current_sequence() active_track = phiero.get_current_track( - active_sequence, "LoadedEffects") + active_sequence, "Loaded_{}".format(name)) # get main variables namespace = namespace or context["asset"]["name"] @@ -119,7 +122,27 @@ class LoadEffects(load.LoaderPlugin): continue try: - node[knob_name].setValue(knob_value) + # assume list means animation + # except 4 values could be RGBA or vector + if isinstance(knob_value, list) and len(knob_value) > 4: + node[knob_name].setAnimated() + for i, value in enumerate(knob_value): + if isinstance(value, list): + # list can have vector animation + for ci, cv in enumerate(value): + node[knob_name].setValueAt( + cv, + (clip_in + i), + ci + ) + else: + # list is single values + node[knob_name].setValueAt( + value, + (clip_in + i) + ) + else: + node[knob_name].setValue(knob_value) except NameError: self.log.warning("Knob: {} cannot be set".format( knob_name)) From 189dc17ce3440f9a45d68c631b19be6ddb017907 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:51:00 +0100 Subject: [PATCH 2008/2550] fix typo --- openpype/hooks/pre_copy_last_published_workfile.py | 2 +- openpype/modules/sync_server/sync_server_module.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 884b0f54b6..0e561334e1 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -155,7 +155,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): reset_timer=True ) - while not sync_server.is_representaion_on_site( + while not sync_server.is_representation_on_site( project_name, workfile_representation["_id"], local_site_id diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 26a6abbbf4..7228f43f84 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -925,7 +925,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): else: self.sync_server_thread.reset_timer() - def is_representaion_on_site( + def is_representation_on_site( self, project_name, representation_id, site_id ): # TODO implement From 21833283b833327b3b079604e389efe23af7f3c8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Nov 2022 12:27:07 +0100 Subject: [PATCH 2009/2550] added method to check if representation has all files on site --- .../modules/sync_server/sync_server_module.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 7228f43f84..dc20e37a12 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -926,10 +926,27 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.sync_server_thread.reset_timer() def is_representation_on_site( - self, project_name, representation_id, site_id + self, project_name, representation_id, site_name ): - # TODO implement - return False + """Checks if 'representation_id' has all files avail. on 'site_name'""" + representation = get_representation_by_id(project_name, + representation_id, + fields=["_id", "files"]) + if not representation: + return False + + on_site = False + for file_info in representation.get("files", []): + for site in file_info.get("sites", []): + if site["name"] != site_name: + continue + + if (site.get("progress") or site.get("error") or + not site.get("created_dt")): + return False + on_site = True + + return on_site def _reset_timer_with_rest_api(self): # POST to webserver sites to add to representations From d8c7ff2d15b912f00588771cd7aff8effef7f6df Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Nov 2022 12:48:52 +0100 Subject: [PATCH 2010/2550] small updates to docstrings --- openpype/modules/sync_server/sync_server_module.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index dc20e37a12..9a2ff97ed6 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -143,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): 'project_name' must have synchronization enabled (globally or project only) - Used as a API endpoint from outside applications (Loader etc). + Used as an API endpoint from outside applications (Loader etc). Use 'force' to reset existing site. @@ -153,6 +153,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): site_name (string): name of configured and active site force (bool): reset site if exists priority (int): set priority + reset_timer (bool): if delay timer should be reset, eg. user mark + some representation to be synced manually Throws: SiteAlreadyPresentError - if adding already existing site and @@ -1601,12 +1603,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Args: project_name (string): name of project - force to db connection as each file might come from different collection - new_file_id (string): + new_file_id (string): only present if file synced successfully file (dictionary): info about processed file (pulled from DB) representation (dictionary): parent repr of file (from DB) site (string): label ('gdrive', 'S3') error (string): exception message - progress (float): 0-1 of progress of upload/download + progress (float): 0-0.99 of progress of upload/download priority (int): 0-100 set priority Returns: From 756bb9d85acf7d8d286eb21ca205185d0a18eed1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 9 Nov 2022 16:26:44 +0100 Subject: [PATCH 2011/2550] hiero: improving management of versions --- openpype/hosts/hiero/api/lib.py | 10 ++++++++-- openpype/hosts/hiero/api/pipeline.py | 21 ++++++++++++--------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index e340209207..2829fe2bf5 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -11,6 +11,7 @@ import functools import warnings import json import ast +import secrets import shutil import hiero @@ -350,6 +351,8 @@ def set_track_openpype_tag(track, data=None): Returns: hiero.core.Tag """ + hash = secrets.token_hex(nbytes=4) + data = data or {} # basic Tag's attribute @@ -367,7 +370,10 @@ def set_track_openpype_tag(track, data=None): tag = tags.update_tag(_tag, tag_data) else: # if pype tag available then update with input data - tag = tags.create_tag(self.pype_tag_name, tag_data) + tag = tags.create_tag( + "{}_{}".format(self.pype_tag_name, hash), + tag_data + ) # add it to the input track item track.addTag(tag) @@ -390,7 +396,7 @@ def get_track_openpype_tag(track): return None for tag in _tags: # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: + if self.pype_tag_name in tag.name(): return tag diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 3475bc62e4..4ab73e7d19 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -201,6 +201,15 @@ def parse_container(item, validate=True): return data_to_container(item, _data) +def _update_container_data(container, data): + for key in container: + try: + container[key] = data[key] + except KeyError: + pass + return container + + def update_container(item, data=None): """Update container data to input track_item or track's openpype tag. @@ -214,15 +223,9 @@ def update_container(item, data=None): bool: True if container was updated correctly """ - def update_container_data(container, data): - for key in container: - try: - container[key] = data[key] - except KeyError: - pass - return container data = data or {} + data = deepcopy(data) if type(item) == hiero.core.VideoTrack: # form object data for test @@ -236,14 +239,14 @@ def update_container(item, data=None): container = deepcopy(container) # update data in container - updated_container = update_container_data(container, data) + updated_container = _update_container_data(container, data) # merge updated container back to containers containers.update({object_name: updated_container}) return bool(lib.set_track_openpype_tag(item, containers)) else: container = lib.get_trackitem_openpype_data(item) - updated_container = update_container_data(container, data) + updated_container = _update_container_data(container, data) log.info("Updating container: `{}`".format(item.name())) return bool(lib.set_trackitem_openpype_tag(item, updated_container)) From f111fc3763a2ae2b43330a08c4bf2064ef646cbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 9 Nov 2022 17:09:38 +0100 Subject: [PATCH 2012/2550] clean --- openpype/client/entities.py | 1 - openpype/modules/sync_server/rest_api.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 91d4b499b0..43afccf2f1 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,7 +6,6 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ -from datetime import datetime import re import collections diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index 51769cd4fb..a7d9dd80b7 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -30,8 +30,8 @@ class SyncServerModuleRestApi: self.reset_timer, ) - async def reset_timer(self, request): - # Force timer to run immediately + async def reset_timer(self, _request): + """Force timer to run immediately.""" self.module.reset_timer() return Response(status=200) From 9996c3f1afbe2e2b3adb110382586ffefd82a3ae Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 13:44:20 +0800 Subject: [PATCH 2013/2550] AOV Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 3 ++- openpype/hosts/maya/api/lib_rendersettings.py | 10 ---------- .../deadline/plugins/publish/submit_publish_job.py | 10 ++++++---- vendor/configs/OpenColorIO-Configs | 1 + 4 files changed, 9 insertions(+), 15 deletions(-) create mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index cd204445b7..ef75391638 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1016,7 +1016,8 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) or \ + bool(self._get_attr("redshiftOptions.exrMultipart")) # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 2fc7547c8c..2b996702c3 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -154,16 +154,6 @@ class RenderSettings(object): self._set_global_output_settings() cmds.setAttr("redshiftOptions.imageFormat", img_ext) - if redshift_render_presets["multilayer_exr"]: - cmds.setAttr("redshiftOptions.exrMultipart", 1) - else: - cmds.setAttr("redshiftOptions.exrMultipart", 0) - - if redshift_render_presets["force_combine"]: - cmds.setAttr("redshiftOptions.exrForceMultilayer", 1) - else: - cmds.setAttr("redshiftOptions.exrForceMultilayer", 0) - cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) self._additional_attribs_setter(additional_options) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 35f2532c16..615be78794 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -494,12 +494,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - preview = match_aov_pattern(app, aov_patterns, render_file_name) - + self.log.info("aov_pattern:{}".format(aov_patterns)) # toggle preview on if multipart is on - if instance_data.get("multipartExr"): + preview = match_aov_pattern(app, aov_patterns, render_file_name) + #if instance_data.get("multipartExr"): + if "Cryptomatte" in render_file_name: # for redshift preview = True + self.log.info("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name @@ -542,7 +544,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if new_instance.get("extendFrames", False): self._copy_extend_frames(new_instance, rep) instances.append(new_instance) - + self.log.info("instances:{}".format(instances)) return instances def _get_representations(self, instance, exp_files): diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs new file mode 160000 index 0000000000..0bb079c08b --- /dev/null +++ b/vendor/configs/OpenColorIO-Configs @@ -0,0 +1 @@ +Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 7e2ba84911dec742654ab07f28062c0ccbf0a731 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 13:57:15 +0800 Subject: [PATCH 2014/2550] AOV Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 3 ++- .../modules/deadline/plugins/publish/submit_publish_job.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index ef75391638..f89441cfc7 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1016,7 +1016,8 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) or \ + multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) \ + or \ bool(self._get_attr("redshiftOptions.exrMultipart")) # Get Redshift Extension from image format diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 615be78794..18fc769d49 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -497,7 +497,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.log.info("aov_pattern:{}".format(aov_patterns)) # toggle preview on if multipart is on preview = match_aov_pattern(app, aov_patterns, render_file_name) - #if instance_data.get("multipartExr"): if "Cryptomatte" in render_file_name: # for redshift preview = True From 252859ce0206a011828a1314e1530dbc12db5ea7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 18:03:46 +0800 Subject: [PATCH 2015/2550] AOV Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 6 ++++-- .../modules/deadline/plugins/publish/submit_publish_job.py | 6 +++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index f89441cfc7..a95c1c4932 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -536,6 +536,7 @@ class RenderProductsArnold(ARenderProducts): products = [] aov_name = self._get_attr(aov, "name") + multipart = bool(self._get_attr("defaultArnoldDriver.multipart")) ai_drivers = cmds.listConnections("{}.outputs".format(aov), source=True, destination=False, @@ -589,6 +590,7 @@ class RenderProductsArnold(ARenderProducts): ext=ext, aov=aov_name, driver=ai_driver, + multipart=multipart, camera=camera) products.append(product) @@ -1016,9 +1018,9 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file + multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) \ - or \ - bool(self._get_attr("redshiftOptions.exrMultipart")) + or bool(self._get_attr("redshiftOptions.exrMultipart")) # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 18fc769d49..27400bb269 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -494,13 +494,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - self.log.info("aov_pattern:{}".format(aov_patterns)) + # toggle preview on if multipart is on preview = match_aov_pattern(app, aov_patterns, render_file_name) - if "Cryptomatte" in render_file_name: # for redshift + + if instance_data.get("multipartExr"): preview = True - self.log.info("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name From 3cd1918f04ef5c13ab10e003064699b1659f8fb0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 11:23:32 +0100 Subject: [PATCH 2016/2550] shorter animation --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index a33e6e7565..71f476c4ef 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1726,7 +1726,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) change_anim.setEndValue(1.0) - change_anim.setDuration(400) + change_anim.setDuration(200) change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) change_anim.valueChanged.connect(self._on_anim) From 69f4253084bc53440d26d6eb1f880f571b1a7294 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 10 Nov 2022 12:07:23 +0100 Subject: [PATCH 2017/2550] :bug: fix regex to match semver better this fixes issues determining staging version from file name where multiple hyphens are used in pre-releas/buildmetadata part of the version string --- igniter/bootstrap_repos.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index addcbed24c..077f56d769 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -63,7 +63,8 @@ class OpenPypeVersion(semver.VersionInfo): """ staging = False path = None - _VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?") # noqa: E501 + # this should match any string complying with https://semver.org/ + _VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P[a-zA-Z\d\-.]*))?(?:\+(?P[a-zA-Z\d\-.]*))?") # noqa: E501 _installed_version = None def __init__(self, *args, **kwargs): @@ -211,6 +212,8 @@ class OpenPypeVersion(semver.VersionInfo): OpenPypeVersion: of detected or None. """ + # strip .zip ext if present + string = re.sub(r"\.zip$", "", string, flags=re.IGNORECASE) m = re.search(OpenPypeVersion._VERSION_REGEX, string) if not m: return None From 8eb704aeb2703c5809f6b236c7ec8f6b24fd2941 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:09:13 +0800 Subject: [PATCH 2018/2550] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 19e60d33da..a192d9c357 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -28,11 +28,10 @@ class AlembicStandinLoader(load.LoaderPlugin): version = context["version"] version_data = version.get("data", {}) - + family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) - + self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) - frameEnd = version_data.get("frameEnd", None) asset = context["asset"]["name"] namespace = namespace or unique_namespace( @@ -48,12 +47,14 @@ class AlembicStandinLoader(load.LoaderPlugin): settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] fps = legacy_io.Session["AVALON_FPS"] - - c = colors.get('ass') + c = colors.get(family[0]) if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - c[0], c[1], c[2]) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" @@ -72,7 +73,7 @@ class AlembicStandinLoader(load.LoaderPlugin): if frameStart is None: cmds.setAttr(standinShape + ".useFrameExtension", 0) - elif frameStart == 1 and frameEnd == 1: + elif "model" in family: cmds.setAttr(standinShape + ".useFrameExtension", 0) else: From fe47deca3ce1832f00f336051c27f5a4627964d1 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:10:41 +0800 Subject: [PATCH 2019/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index a192d9c357..8ce1aee3ac 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -30,7 +30,6 @@ class AlembicStandinLoader(load.LoaderPlugin): version_data = version.get("data", {}) family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) - self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) asset = context["asset"]["name"] @@ -51,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From bb924595c88fa0268eb3c6e3615ced6af5d6c755 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:11:56 +0800 Subject: [PATCH 2020/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 8ce1aee3ac..d93c85f8a4 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From 17c3b1f96ae5fefc1bcec3bc43fcbdc8bf8bb4fc Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:13:04 +0800 Subject: [PATCH 2021/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index d93c85f8a4..dafe999d9d 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,9 +50,9 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) ) transform_name = label + "_ABC" From b42160eead77da1d7e63059361c52236e1ea6385 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 12:14:40 +0100 Subject: [PATCH 2022/2550] fix button position on row inser --- openpype/tools/publisher/publish_report_viewer/window.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/tools/publisher/publish_report_viewer/window.py b/openpype/tools/publisher/publish_report_viewer/window.py index 2c249d058c..646ae69e7f 100644 --- a/openpype/tools/publisher/publish_report_viewer/window.py +++ b/openpype/tools/publisher/publish_report_viewer/window.py @@ -367,6 +367,7 @@ class LoadedFilesView(QtWidgets.QTreeView): def _on_rows_inserted(self): header = self.header() header.resizeSections(header.ResizeToContents) + self._update_remove_btn() def resizeEvent(self, event): super(LoadedFilesView, self).resizeEvent(event) From 790cca2569aaad88c74f85a1412432cea64ba6f2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 12:14:50 +0100 Subject: [PATCH 2023/2550] fix issue when last item is removed --- openpype/tools/publisher/publish_report_viewer/widgets.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/tools/publisher/publish_report_viewer/widgets.py b/openpype/tools/publisher/publish_report_viewer/widgets.py index ff388fb277..0d35ac3512 100644 --- a/openpype/tools/publisher/publish_report_viewer/widgets.py +++ b/openpype/tools/publisher/publish_report_viewer/widgets.py @@ -27,6 +27,9 @@ class PluginLoadReportModel(QtGui.QStandardItemModel): parent = self.invisibleRootItem() parent.removeRows(0, parent.rowCount()) + if report is None: + return + new_items = [] new_items_by_filepath = {} for filepath in report.crashed_plugin_paths.keys(): From 98244c77b08989959f04293a1b54aa43d3b2c67f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:15:11 +0800 Subject: [PATCH 2024/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index dafe999d9d..d93c85f8a4 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,9 +50,9 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) ) transform_name = label + "_ABC" From c5547766074b7999ea880b76705c14fba2828cd3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:16:10 +0800 Subject: [PATCH 2025/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index d93c85f8a4..8ce1aee3ac 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From d73ac24f59490554c20eb89af7675ae3bbcb0496 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:19:11 +0800 Subject: [PATCH 2026/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 8ce1aee3ac..9583063c7e 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -30,6 +30,7 @@ class AlembicStandinLoader(load.LoaderPlugin): version_data = version.get("data", {}) family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) + self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) asset = context["asset"]["name"] @@ -48,12 +49,12 @@ class AlembicStandinLoader(load.LoaderPlugin): fps = legacy_io.Session["AVALON_FPS"] c = colors.get(family[0]) if c is not None: + r = (float(c[0]) / 255) + g = (float(c[1]) / 255) + b = (float(c[2]) / 255) cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + r, g, b) transform_name = label + "_ABC" From ddd4e653919adfe58b10caa857948ccf98066868 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 13:39:01 +0100 Subject: [PATCH 2027/2550] hiero: unification of openpype tags --- openpype/hosts/hiero/api/lib.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 2829fe2bf5..7f0cf8149a 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -341,6 +341,11 @@ def get_track_item_tags(track_item): return returning_tag_data +def _get_tag_unique_hash(): + # sourcery skip: avoid-builtin-shadow + return secrets.token_hex(nbytes=4) + + def set_track_openpype_tag(track, data=None): """ Set openpype track tag to input track object. @@ -351,8 +356,6 @@ def set_track_openpype_tag(track, data=None): Returns: hiero.core.Tag """ - hash = secrets.token_hex(nbytes=4) - data = data or {} # basic Tag's attribute @@ -371,7 +374,10 @@ def set_track_openpype_tag(track, data=None): else: # if pype tag available then update with input data tag = tags.create_tag( - "{}_{}".format(self.pype_tag_name, hash), + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), tag_data ) # add it to the input track item @@ -468,7 +474,7 @@ def get_trackitem_openpype_tag(track_item): return None for tag in _tags: # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: + if self.pype_tag_name in tag.name(): return tag @@ -493,13 +499,18 @@ def set_trackitem_openpype_tag(track_item, data=None): } # get available pype tag if any _tag = get_trackitem_openpype_tag(track_item) - if _tag: # it not tag then create one tag = tags.update_tag(_tag, tag_data) else: # if pype tag available then update with input data - tag = tags.create_tag(self.pype_tag_name, tag_data) + tag = tags.create_tag( + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), + tag_data + ) # add it to the input track item track_item.addTag(tag) From c5d3e8a45788ce03c996096f5af89df967e735a0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 13:39:25 +0100 Subject: [PATCH 2028/2550] hiero: loading effects not able delete previous nodes --- openpype/hosts/hiero/plugins/load/load_effects.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 0819d1d1b7..a3fcd63b5b 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -188,7 +188,9 @@ class LoadEffects(load.LoaderPlugin): if loaded_stitem not in used_subtracks: continue item_to_remove = used_subtracks.pop(loaded_stitem) - item_to_remove.node()["enable"].setValue(0) + # TODO: find a way to erase nodes + self.log.debug( + "This node needs to be removed: {}".format(item_to_remove)) data_imprint = { "objectName": object_name, From 7938f843f0bdd6c9ae56f6978d3e5f211b5495a3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 20:52:40 +0800 Subject: [PATCH 2029/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 6 +++++- openpype/settings/defaults/project_settings/maya.json | 1 + .../schemas/schema_maya_render_settings.json | 5 +++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 2b996702c3..9acb65b84c 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -108,8 +108,10 @@ class RenderSettings(object): # function to revert render settings does not reset AOVs list in MtoA # Fetch current aovs in case there's any. current_aovs = AOVInterface().getAOVs() + remove_aovs = arnold_render_presets["remove_aovs"] + if remove_aovs: # Remove fetched AOVs - AOVInterface().removeAOVs(current_aovs) + AOVInterface().removeAOVs(current_aovs) mel.eval("unifiedRenderGlobalsRevertToDefault") img_ext = arnold_render_presets["image_format"] img_prefix = arnold_render_presets["image_prefix"] @@ -118,6 +120,8 @@ class RenderSettings(object): multi_exr = arnold_render_presets["multilayer_exr"] additional_options = arnold_render_presets["additional_options"] for aov in aovs: + if aov in current_aovs and not remove_aovs: + continue AOVInterface('defaultArnoldRenderOptions').addAOV(aov) cmds.setAttr("defaultResolution.width", width) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a..958025baeb 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -64,6 +64,7 @@ "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, + "remove_aovs": true, "tiled": true, "aov_list": [], "additional_options": [] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 0cbb684fc6..9beea16b97 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -69,6 +69,11 @@ "label": "Multilayer (exr)", "type": "boolean" }, + { + "key": "remove_aovs", + "label": "Remove AOVs", + "type": "boolean" + }, { "key": "tiled", "label": "Tiled (tif, exr)", From 377e6e88e1182871a1fc0cfe293e8ae697777a33 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 20:53:24 +0800 Subject: [PATCH 2030/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 958025baeb..09f3e61391 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -64,7 +64,7 @@ "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, - "remove_aovs": true, + "remove_aovs": false, "tiled": true, "aov_list": [], "additional_options": [] From 79eb997e4b7d49510615606cb6fa1c05ddec67d7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 14:19:49 +0100 Subject: [PATCH 2031/2550] flame: convert color mapping to classmethod --- openpype/hosts/flame/api/plugin.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 45fa7fd9a4..9efbd5c1bc 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -692,16 +692,17 @@ class ClipLoader(LoaderPlugin): _mapping = None - def get_native_colorspace(self, input_colorspace): - if not self._mapping: + @classmethod + def get_native_colorspace(cls, input_colorspace): + if not cls._mapping: settings = get_current_project_settings()["flame"] mapping = settings["imageio"]["profilesMapping"]["inputs"] - self._mapping = { + cls._mapping = { input["ocioName"]: input["flameName"] for input in mapping } - return self._mapping.get(input_colorspace) + return cls._mapping.get(input_colorspace) class OpenClipSolver(flib.MediaInfoFile): From 0f392dd99455eec17b81a73ac7894d3286d7fa17 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 17:11:42 +0100 Subject: [PATCH 2032/2550] falame: better colorspace loading --- openpype/hosts/flame/api/plugin.py | 38 ++++++++++++++++++- .../hosts/flame/plugins/load/load_clip.py | 3 +- .../flame/plugins/load/load_clip_batch.py | 2 +- 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 9efbd5c1bc..26129ebaa6 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -4,13 +4,13 @@ import shutil from copy import deepcopy from xml.etree import ElementTree as ET +import qargparse from Qt import QtCore, QtWidgets -import qargparse from openpype import style -from openpype.settings import get_current_project_settings from openpype.lib import Logger from openpype.pipeline import LegacyCreator, LoaderPlugin +from openpype.settings import get_current_project_settings from . import constants from . import lib as flib @@ -692,8 +692,42 @@ class ClipLoader(LoaderPlugin): _mapping = None + def get_colorspace(self, context): + """Get colorspace name + + Look either to version data or representation data. + + Args: + context (dict): version context data + + Returns: + str: colorspace name or None + """ + version = context['version'] + version_data = version.get("data", {}) + colorspace = version_data.get( + "colorspace", None + ) + + if ( + not colorspace + or colorspace == "Unknown" + ): + colorspace = context["representation"]["data"].get( + "colorspace", None) + + return colorspace + @classmethod def get_native_colorspace(cls, input_colorspace): + """Return native colorspace name. + + Args: + input_colorspace (str | None): colorspace name + + Returns: + str: native colorspace name defined in mapping or None + """ if not cls._mapping: settings = get_current_project_settings()["flame"] mapping = settings["imageio"]["profilesMapping"]["inputs"] diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 23879b923e..f8cb7b3e11 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -36,7 +36,8 @@ class LoadClip(opfapi.ClipLoader): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) + clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 19c0ed1ef0..048ac19431 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -35,7 +35,7 @@ class LoadClipBatch(opfapi.ClipLoader): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): From 9a722cb8bb8acd5deb744acfd11fab3528ae6289 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 10 Nov 2022 17:30:10 +0100 Subject: [PATCH 2033/2550] :art: creator for online family --- .../plugins/create/create_online.py | 98 +++++++++++++++++++ .../plugins/publish/collect_online_file.py | 24 +++++ 2 files changed, 122 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/create/create_online.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_online_file.py diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py new file mode 100644 index 0000000000..e8092e8eaf --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +"""Creator of online files. + +Online file retain their original name and use it as subset name. To +avoid conflicts, this creator checks if subset with this name already +exists under selected asset. +""" +import copy +import os +import re +from pathlib import Path + +from openpype.client import get_subset_by_name, get_asset_by_name +from openpype.lib.attribute_definitions import FileDef +from openpype.pipeline import ( + CreatedInstance, + CreatorError +) +from openpype.pipeline.create import ( + get_subset_name, + TaskNotSetError, +) + +from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator + + +class OnlineCreator(TrayPublishCreator): + """Creates instance from file and retains its original name.""" + + identifier = "io.openpype.creators.traypublisher.online" + label = "Online" + family = "online" + description = "Publish file retaining its original file name" + extensions = [".mov", ".mp4", ".mfx", ".m4v", ".mpg"] + + def get_detail_description(self): + return """# Publish batch of .mov to multiple assets. + + File names must then contain only asset name, or asset name + version. + (eg. 'chair.mov', 'chair_v001.mov', not really safe `my_chair_v001.mov` + """ + + def get_icon(self): + return "fa.file" + + def create(self, subset_name, instance_data, pre_create_data): + if not pre_create_data.get("representation_file")["filenames"]: + raise CreatorError("No files specified") + + asset = get_asset_by_name(self.project_name, instance_data["asset"]) + origin_basename = Path(pre_create_data.get( + "representation_file")["filenames"][0]).stem + + if get_subset_by_name( + self.project_name, origin_basename, asset["_id"]): + raise CreatorError(f"subset with {origin_basename} already " + "exists in selected asset") + + instance_data["originalBasename"] = origin_basename + subset_name = origin_basename + path = (Path( + pre_create_data.get( + "representation_file")["directory"] + ) / pre_create_data.get( + "representation_file")["filenames"][0]).as_posix() + + instance_data["creator_attributes"] = {"path": path} + + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, + instance_data, self) + self._store_new_instance(new_instance) + + def get_pre_create_attr_defs(self): + return [ + FileDef( + "representation_file", + folders=False, + extensions=self.extensions, + allow_sequences=False, + single_item=True, + label="Representation", + ) + ] + + def get_subset_name( + self, + variant, + task_name, + asset_doc, + project_name, + host_name=None, + instance=None + ): + if instance is None: + return "{originalBasename}" + + return instance.data["subset"] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py new file mode 100644 index 0000000000..1d173c326b --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from pathlib import Path + + +class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): + """Collect online file and retain its file name.""" + label = "Collect online file" + families = ["online"] + hosts = ["traypublisher"] + + def process(self, instance): + file = Path(instance.data["creator_attributes"]["path"]) + + if not instance.data.get("representations"): + instance.data["representations"] = [ + { + "name": file.suffix.lstrip("."), + "ext": file.suffix.lstrip("."), + "files": file.name, + "stagingDir": file.parent.as_posix() + } + ] + From 2b8846766f8cb65f9a6f7528c15ae840849097e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 10 Nov 2022 17:30:57 +0100 Subject: [PATCH 2034/2550] :art: defaults for online family --- .../defaults/project_anatomy/templates.json | 8 +++++++- .../settings/defaults/project_settings/global.json | 14 +++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index 3415c4451f..0ac56a4dad 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -48,10 +48,16 @@ "file": "{originalBasename}_{@version}.{ext}", "path": "{@folder}/{@file}" }, + "online": { + "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", + "file": "{originalBasename}<.{@frame}><_{udim}>.{ext}", + "path": "{@folder}/{@file}" + }, "__dynamic_keys_labels__": { "maya2unreal": "Maya to Unreal", "simpleUnrealTextureHero": "Simple Unreal Texture - Hero", - "simpleUnrealTexture": "Simple Unreal Texture" + "simpleUnrealTexture": "Simple Unreal Texture", + "online": "online" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 9c3f2f1e1b..0409ce802c 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -483,7 +483,19 @@ ] }, "publish": { - "template_name_profiles": [], + "template_name_profiles": [ + { + "families": [ + "online" + ], + "hosts": [ + "traypublisher" + ], + "task_types": [], + "task_names": [], + "template_name": "online" + } + ], "hero_template_name_profiles": [] } }, From bf76b73f2434668bccb60cc26dc12ba0016b0028 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 17:54:36 +0100 Subject: [PATCH 2035/2550] milestone assign only if PR merged [closed] --- .github/workflows/milestone_assign.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/milestone_assign.yml b/.github/workflows/milestone_assign.yml index c5a231e59e..4b52dfc30d 100644 --- a/.github/workflows/milestone_assign.yml +++ b/.github/workflows/milestone_assign.yml @@ -2,7 +2,7 @@ name: Milestone - assign to PRs on: pull_request_target: - types: [opened, reopened, edited, synchronize] + types: [closed] jobs: run_if_release: From 8d467b1a96426646498ffb93f2db31e2ab7344c0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 18:52:59 +0100 Subject: [PATCH 2036/2550] renamed 'CollectAvalonEntities' to 'CollectContextEntities' --- .../hosts/tvpaint/plugins/publish/collect_instance_frames.py | 2 +- openpype/hosts/tvpaint/plugins/publish/validate_marks.py | 2 +- .../plugins/publish/validate_tvpaint_workfile_data.py | 2 +- ...collect_avalon_entities.py => collect_context_entities.py} | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) rename openpype/plugins/publish/{collect_avalon_entities.py => collect_context_entities.py} (97%) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py index f291c363b8..d5b79758ad 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py @@ -6,7 +6,7 @@ class CollectOutputFrameRange(pyblish.api.ContextPlugin): When instances are collected context does not contain `frameStart` and `frameEnd` keys yet. They are collected in global plugin - `CollectAvalonEntities`. + `CollectContextEntities`. """ label = "Collect output frame range" order = pyblish.api.CollectorOrder diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py index 12d50e17ff..0030b0fd1c 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py @@ -39,7 +39,7 @@ class ValidateMarks(pyblish.api.ContextPlugin): def get_expected_data(context): scene_mark_in = context.data["sceneMarkIn"] - # Data collected in `CollectAvalonEntities` + # Data collected in `CollectContextEntities` frame_end = context.data["frameEnd"] frame_start = context.data["frameStart"] handle_start = context.data["handleStart"] diff --git a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py index a5e4868411..d8b7bb9078 100644 --- a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py @@ -13,7 +13,7 @@ class ValidateWorkfileData(pyblish.api.ContextPlugin): targets = ["tvpaint_worker"] def process(self, context): - # Data collected in `CollectAvalonEntities` + # Data collected in `CollectContextEntities` frame_start = context.data["frameStart"] frame_end = context.data["frameEnd"] handle_start = context.data["handleStart"] diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_context_entities.py similarity index 97% rename from openpype/plugins/publish/collect_avalon_entities.py rename to openpype/plugins/publish/collect_context_entities.py index 3b05b6ae98..0a6072a820 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_context_entities.py @@ -16,11 +16,11 @@ from openpype.client import get_project, get_asset_by_name from openpype.pipeline import legacy_io, KnownPublishError -class CollectAvalonEntities(pyblish.api.ContextPlugin): +class CollectContextEntities(pyblish.api.ContextPlugin): """Collect Anatomy into Context.""" order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Avalon Entities" + label = "Collect Context Entities" def process(self, context): legacy_io.install() From 292e8d45c40d4d3af25196dd8e9c979b9aa2a9f9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 18:53:34 +0100 Subject: [PATCH 2037/2550] get "asset" and "task" from context --- openpype/plugins/publish/collect_context_entities.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/collect_context_entities.py b/openpype/plugins/publish/collect_context_entities.py index 0a6072a820..31fbeb5dbd 100644 --- a/openpype/plugins/publish/collect_context_entities.py +++ b/openpype/plugins/publish/collect_context_entities.py @@ -3,6 +3,8 @@ Requires: session -> AVALON_ASSET context -> projectName + context -> asset + context -> task Provides: context -> projectEntity - Project document from database. @@ -13,20 +15,19 @@ Provides: import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import legacy_io, KnownPublishError +from openpype.pipeline import KnownPublishError class CollectContextEntities(pyblish.api.ContextPlugin): - """Collect Anatomy into Context.""" + """Collect entities into Context.""" order = pyblish.api.CollectorOrder - 0.1 label = "Collect Context Entities" def process(self, context): - legacy_io.install() project_name = context.data["projectName"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] + asset_name = context.data["asset"] + task_name = context.data["task"] project_entity = get_project(project_name) if not project_entity: From 91937c6c287f76a93bbbf13b2aa02deb72d74212 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 19:00:27 +0100 Subject: [PATCH 2038/2550] get "task" from context in anatomy context data --- openpype/plugins/publish/collect_anatomy_context_data.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 8433816908..55ce8e06f4 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -15,7 +15,6 @@ Provides: import json import pyblish.api -from openpype.pipeline import legacy_io from openpype.pipeline.template_data import get_template_data @@ -53,7 +52,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): asset_entity = context.data.get("assetEntity") task_name = None if asset_entity: - task_name = legacy_io.Session["AVALON_TASK"] + task_name = context.data["task"] anatomy_data = get_template_data( project_entity, asset_entity, task_name, host_name, system_settings From 81451300611b4eb7aab753ad1267848ec1965e72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 11 Nov 2022 10:00:16 +0100 Subject: [PATCH 2039/2550] :label: fix type hint --- openpype/pipeline/create/creator_plugins.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 782534d589..bb5ce00452 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -393,8 +393,9 @@ class BaseCreator: asset_doc(dict): Asset document for which subset is created. project_name(str): Project name. host_name(str): Which host creates subset. - instance(str|None): Object of 'CreatedInstance' for which is - subset name updated. Passed only on subset name update. + instance(CreatedInstance|None): Object of 'CreatedInstance' for + which is subset name updated. Passed only on subset name + update. """ dynamic_data = self.get_dynamic_data( From 2edcb15fbb1640dd57286d83828d9bb05e908c42 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 11 Nov 2022 17:55:26 +0800 Subject: [PATCH 2040/2550] fixing te multipart boolean option --- openpype/hosts/maya/api/lib_renderproducts.py | 20 ++++++++++++------- .../plugins/publish/submit_publish_job.py | 2 +- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index a95c1c4932..78a0a89472 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -536,7 +536,11 @@ class RenderProductsArnold(ARenderProducts): products = [] aov_name = self._get_attr(aov, "name") - multipart = bool(self._get_attr("defaultArnoldDriver.multipart")) + multipart = False + multilayer = bool(self._get_attr("defaultArnoldDriver.multipart")) + merge_AOVs = bool(self._get_attr("defaultArnoldDriver.mergeAOVs")) + if multilayer or merge_AOVs: + multipart = True ai_drivers = cmds.listConnections("{}.outputs".format(aov), source=True, destination=False, @@ -1018,9 +1022,11 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) \ - or bool(self._get_attr("redshiftOptions.exrMultipart")) + multipart = False + force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + exMultipart = bool(self._get_attr("redshiftOptions.exrMultipart")) + if exMultipart or force_layer: + multipart = True # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer @@ -1048,7 +1054,7 @@ class RenderProductsRedshift(ARenderProducts): # Any AOVs that still get processed, like Cryptomatte # by themselves are not multipart files. - aov_multipart = not multipart + # aov_multipart = not multipart # Redshift skips rendering of masterlayer without AOV suffix # when a Beauty AOV is rendered. It overrides the main layer. @@ -1079,7 +1085,7 @@ class RenderProductsRedshift(ARenderProducts): productName=aov_light_group_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) @@ -1093,7 +1099,7 @@ class RenderProductsRedshift(ARenderProducts): product = RenderProduct(productName=aov_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 27400bb269..e87cc6beeb 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -500,7 +500,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if instance_data.get("multipartExr"): preview = True - + self.log.info("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name From 81d09b98ffa87983d08ee8fb6e5ef83f23f231d2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 11 Nov 2022 17:58:26 +0800 Subject: [PATCH 2041/2550] fixing te multipart boolean option --- openpype/hosts/maya/api/lib_renderproducts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 78a0a89472..58fcd2d281 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1023,7 +1023,7 @@ class RenderProductsRedshift(ARenderProducts): # like Cryptomatte. # AOVs are merged in multi-channel file multipart = False - force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) # noqa exMultipart = bool(self._get_attr("redshiftOptions.exrMultipart")) if exMultipart or force_layer: multipart = True From 9324bf25383a773d8789a7d6debeea200b179b6f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 11 Nov 2022 18:05:42 +0800 Subject: [PATCH 2042/2550] fixing te multipart boolean option --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index e87cc6beeb..c1e9dd4015 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -495,8 +495,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - # toggle preview on if multipart is on preview = match_aov_pattern(app, aov_patterns, render_file_name) + # toggle preview on if multipart is on if instance_data.get("multipartExr"): preview = True From 84f021ffd82f00f30c9026aa7354201ba6a0e290 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 11 Nov 2022 12:18:21 +0100 Subject: [PATCH 2043/2550] don't close publisher window on escape button --- openpype/tools/publisher/window.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index f07995acc6..5875f7aa68 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -361,6 +361,13 @@ class PublisherWindow(QtWidgets.QDialog): super(PublisherWindow, self).resizeEvent(event) self._update_publish_frame_rect() + def keyPressEvent(self, event): + # Ignore escape button to close window + if event.key() == QtCore.Qt.Key_Escape: + event.accept() + return + super(PublisherWindow, self).keyPressEvent(event) + def _on_overlay_message(self, event): self._overlay_object.add_message( event["message"], From a09ab62eb7ab9c06dd99fb1b44d6946a30bf3d12 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:07:51 +0100 Subject: [PATCH 2044/2550] :recycle: some tweaks --- .../plugins/create/create_online.py | 20 +++++++------------ .../plugins/publish/collect_online_file.py | 6 +++--- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index e8092e8eaf..91016dc794 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -5,9 +5,6 @@ Online file retain their original name and use it as subset name. To avoid conflicts, this creator checks if subset with this name already exists under selected asset. """ -import copy -import os -import re from pathlib import Path from openpype.client import get_subset_by_name, get_asset_by_name @@ -16,11 +13,6 @@ from openpype.pipeline import ( CreatedInstance, CreatorError ) -from openpype.pipeline.create import ( - get_subset_name, - TaskNotSetError, -) - from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator @@ -31,14 +23,16 @@ class OnlineCreator(TrayPublishCreator): label = "Online" family = "online" description = "Publish file retaining its original file name" - extensions = [".mov", ".mp4", ".mfx", ".m4v", ".mpg"] + extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg"] def get_detail_description(self): - return """# Publish batch of .mov to multiple assets. + return """# Create file retaining its original file name. - File names must then contain only asset name, or asset name + version. - (eg. 'chair.mov', 'chair_v001.mov', not really safe `my_chair_v001.mov` - """ + This will publish files using template helping to retain original + file name and that file name is used as subset name. + + Bz default it tries to guard against multiple publishes of the same + file.""" def get_icon(self): return "fa.file" diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py index 1d173c326b..459ee463aa 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -3,9 +3,10 @@ import pyblish.api from pathlib import Path -class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): +class CollectOnlineFile(pyblish.api.InstancePlugin): """Collect online file and retain its file name.""" - label = "Collect online file" + label = "Collect Online File" + order = pyblish.api.CollectorOrder families = ["online"] hosts = ["traypublisher"] @@ -21,4 +22,3 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): "stagingDir": file.parent.as_posix() } ] - From b8b184b1b6c90fefcba386886554ebb32f99798c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:08:02 +0100 Subject: [PATCH 2045/2550] :art: add validator --- .../plugins/publish/validate_online_file.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/publish/validate_online_file.py diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py new file mode 100644 index 0000000000..86b9334184 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin, +) +from openpype.client import get_subset_by_name, get_asset_by_name + + +class ValidateOnlineFile(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validate that subset doesn't exist yet.""" + label = "Validate Existing Online Files" + hosts = ["traypublisher"] + families = ["online"] + order = ValidateContentsOrder + + optional = True + + def process(self, instance): + project_name = instance.context.data["projectName"] + asset_id = instance.data["assetEntity"]["_id"] + subset = get_subset_by_name( + project_name, instance.data["subset"], asset_id) + + if subset: + raise PublishValidationError( + "Subset to be published already exists.", + title=self.label + ) From 9d304f07da447f9a5686be702d6a930c0dc774dd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:08:29 +0100 Subject: [PATCH 2046/2550] :art: add family to integrator --- openpype/plugins/publish/integrate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0998e643e6..401270a788 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -129,7 +129,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "mvUsd", "mvUsdComposition", "mvUsdOverride", - "simpleUnrealTexture" + "simpleUnrealTexture", + "online" ] default_template_name = "publish" From cae09e0002ba379bbd5b39ce6720e6a2ff07b1ca Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:08:55 +0100 Subject: [PATCH 2047/2550] :label: fix docstring hints --- openpype/client/entities.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43afccf2f1..bbef8dc65e 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,10 +389,11 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - None: If subset with specified filters was not found. - Dict: Subset document which can be reduced to specified 'fields'. - """ + Union[str, Dict]: None if subset with specified filters was not found. + or dict subset document which can be reduced to + specified 'fields'. + """ if not subset_name: return None From deac4a33d41d9914a41437e21db6ac0af81d797c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:19:40 +0100 Subject: [PATCH 2048/2550] :rotating_light: fix hound :dog: --- .../traypublisher/plugins/create/create_online.py | 12 ++++++------ .../plugins/publish/validate_online_file.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 91016dc794..22d4b73aee 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -30,7 +30,7 @@ class OnlineCreator(TrayPublishCreator): This will publish files using template helping to retain original file name and that file name is used as subset name. - + Bz default it tries to guard against multiple publishes of the same file.""" @@ -52,11 +52,11 @@ class OnlineCreator(TrayPublishCreator): instance_data["originalBasename"] = origin_basename subset_name = origin_basename - path = (Path( - pre_create_data.get( - "representation_file")["directory"] - ) / pre_create_data.get( - "representation_file")["filenames"][0]).as_posix() + path = ( + Path( + pre_create_data.get("representation_file")["directory"] + ) / pre_create_data.get("representation_file")["filenames"][0] + ).as_posix() instance_data["creator_attributes"] = {"path": path} diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py index 86b9334184..12b2e72ced 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -6,7 +6,7 @@ from openpype.pipeline.publish import ( PublishValidationError, OptionalPyblishPluginMixin, ) -from openpype.client import get_subset_by_name, get_asset_by_name +from openpype.client import get_subset_by_name class ValidateOnlineFile(OptionalPyblishPluginMixin, From dbd00b3751eb6e9ffa378eb0b0c5985afbfdf41e Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:22:29 +0100 Subject: [PATCH 2049/2550] :rotating_light: hound fix 2 --- .../hosts/traypublisher/plugins/create/create_online.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 22d4b73aee..5a6373730d 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -52,11 +52,7 @@ class OnlineCreator(TrayPublishCreator): instance_data["originalBasename"] = origin_basename subset_name = origin_basename - path = ( - Path( - pre_create_data.get("representation_file")["directory"] - ) / pre_create_data.get("representation_file")["filenames"][0] - ).as_posix() + path = (Path(pre_create_data.get("representation_file")["directory"]) / pre_create_data.get("representation_file")["filenames"][0]).as_posix() # noqa instance_data["creator_attributes"] = {"path": path} From cf0cba1dba0d14b60ca1bff0f9d9170aff88bb43 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 11 Nov 2022 15:43:38 +0100 Subject: [PATCH 2050/2550] fix variable check in collect anatomy instance data --- openpype/plugins/publish/collect_anatomy_instance_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index f67d3373d9..909b49a07d 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -188,7 +188,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): for subset_doc in subset_docs: subset_id = subset_doc["_id"] last_version_doc = last_version_docs_by_subset_id.get(subset_id) - if last_version_docs_by_subset_id is None: + if last_version_doc is None: continue asset_id = subset_doc["parent"] From fedf91934dc529e4882f31b69641060114517cac Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 12 Nov 2022 03:44:20 +0000 Subject: [PATCH 2051/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 81b2925fb5..1953d0d6a5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.3" +__version__ = "3.14.7-nightly.4" From 88bf8840bd7757a19a20a460091800c0fc2741bb Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 17:57:32 +0800 Subject: [PATCH 2052/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 9583063c7e..605a492e4d 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -98,6 +98,7 @@ class AlembicStandinLoader(load.LoaderPlugin): # Update the standin standins = list() members = pm.sets(container['objectName'], query=True) + self.log.info("container:{}".format(container)) for member in members: shape = member.getShape() if (shape and shape.type() == "aiStandIn"): @@ -105,8 +106,11 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) - standin.useFrameExtension.set(0) standin.abcFPS.set(float(fps)) + if "modelMain" in container['objectName']: + standin.useFrameExtension.set(0) + else: + standin.useFrameExtension.set(1) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From ae8342c57932806f05b7e13a7d82ad7d0b5c4d0b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 18:40:20 +0800 Subject: [PATCH 2053/2550] aov Filtering --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 365feefb48189989976a95f65ec1d8a7fc6467cf Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 19:13:23 +0800 Subject: [PATCH 2054/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/settings/defaults/project_settings/maya.json | 2 +- .../schemas/schema_maya_render_settings.json | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 09f3e61391..54b70b4a44 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -64,8 +64,8 @@ "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, - "remove_aovs": false, "tiled": true, + "remove_aovs": false, "aov_list": [], "additional_options": [] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 9beea16b97..98d33ade91 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -70,13 +70,13 @@ "type": "boolean" }, { - "key": "remove_aovs", - "label": "Remove AOVs", + "key": "tiled", + "label": "Tiled (tif, exr)", "type": "boolean" }, { - "key": "tiled", - "label": "Tiled (tif, exr)", + "key": "remove_aovs", + "label": "Remove existing AOVs", "type": "boolean" }, { From f6495ca956c709cf33654d12c80cadedb5a272d3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Nov 2022 13:30:41 +0100 Subject: [PATCH 2055/2550] OP-4394 - extension is lowercased in Setting and in uploaded files --- .../webpublisher/plugins/publish/collect_published_files.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 2bf097de41..ac4ade4e48 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -86,6 +86,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): first_file = task_data["files"][0] _, extension = os.path.splitext(first_file) + extension = extension.lower() family, families, tags = self._get_family( self.task_type_to_family, task_type, @@ -244,7 +245,10 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): for config in families_config: if is_sequence != config["is_sequence"]: continue - if (extension in config["extensions"] or + + lower_extensions = [ext.lower() + for ext in config.get("extensions", [])] + if (extension.lower() in lower_extensions or '' in config["extensions"]): # all extensions setting found_family = config["result_family"] break From 93b9dd7224e669c4f453dc0578ebc57ce0812c6f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Nov 2022 13:40:28 +0100 Subject: [PATCH 2056/2550] OP-4394 - extension is lowercased in Setting and in uploaded files --- .../webpublisher/plugins/publish/collect_published_files.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index ac4ade4e48..40f4da9403 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -247,9 +247,9 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): continue lower_extensions = [ext.lower() - for ext in config.get("extensions", [])] + for ext in config.get("extensions", [''])] if (extension.lower() in lower_extensions or - '' in config["extensions"]): # all extensions setting + lower_extensions[0] == ''): # all extensions setting found_family = config["result_family"] break From 6a1846a36b06184fa16122c4c75b872baf29a012 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 21:10:29 +0800 Subject: [PATCH 2057/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 5 +++-- openpype/settings/defaults/project_settings/maya.json | 2 +- .../schemas/schema_maya_render_settings.json | 10 +++++----- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 9acb65b84c..24c183b938 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -101,14 +101,15 @@ class RenderSettings(object): from mtoa.core import createOptions # noqa from mtoa.aovs import AOVInterface # noqa createOptions() - arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] # noqa + render_settings = self._project_settings["maya"]["RenderSettings"] + arnold_render_presets = render_settings["arnold_renderer"] # noqa # Force resetting settings and AOV list to avoid having to deal with # AOV checking logic, for now. # This is a work around because the standard # function to revert render settings does not reset AOVs list in MtoA # Fetch current aovs in case there's any. current_aovs = AOVInterface().getAOVs() - remove_aovs = arnold_render_presets["remove_aovs"] + remove_aovs = render_settings["remove_aovs"] if remove_aovs: # Remove fetched AOVs AOVInterface().removeAOVs(current_aovs) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 54b70b4a44..f97ea47b52 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -59,13 +59,13 @@ "default_render_image_folder": "renders/maya", "enable_all_lights": true, "aov_separator": "underscore", + "remove_aovs": false, "reset_current_frame": false, "arnold_renderer": { "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, "tiled": true, - "remove_aovs": false, "aov_list": [], "additional_options": [] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 98d33ade91..c1bafc4108 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -31,6 +31,11 @@ {"dot": ". (dot)"} ] }, + { + "key": "remove_aovs", + "label": "Remove existing AOVs", + "type": "boolean" + }, { "key": "reset_current_frame", "label": "Reset Current Frame", @@ -74,11 +79,6 @@ "label": "Tiled (tif, exr)", "type": "boolean" }, - { - "key": "remove_aovs", - "label": "Remove existing AOVs", - "type": "boolean" - }, { "key": "aov_list", "label": "AOVs to create", From b17eb8df7d42f2b5fa5680785e4f5570f8f7fd9e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 21:24:57 +0800 Subject: [PATCH 2058/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 24c183b938..3e7e62a7a8 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -146,12 +146,17 @@ class RenderSettings(object): def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" - redshift_render_presets = ( - self._project_settings - ["maya"] - ["RenderSettings"] - ["redshift_renderer"] - ) + render_settings = self._project_settings["maya"]["RenderSettings"] + redshift_render_presets = render_settings["redshift_renderer"] + + remove_aovs = render_settings["remove_aovs"] + if remove_aovs: + aovs = cmds.ls(type='RedshiftAOV') + for aov in aovs: + enabled = cmds.getAttr("{}.enabled".format(aov)) + if enabled: + cmds.delete(aov) + additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] From 85d8edcd00edac8ab5b198b014f0f86acde6f1b6 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 21:25:51 +0800 Subject: [PATCH 2059/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 3e7e62a7a8..bc817c862e 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -146,7 +146,7 @@ class RenderSettings(object): def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" - render_settings = self._project_settings["maya"]["RenderSettings"] + render_settings = self._project_settings["maya"]["RenderSettings"] redshift_render_presets = render_settings["redshift_renderer"] remove_aovs = render_settings["remove_aovs"] From 1e995ea6d921611f221c3352a958cc1d960e8884 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:06:12 +0100 Subject: [PATCH 2060/2550] remove reviewMain checks --- .../kitsu/plugins/publish/integrate_kitsu_review.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index 61d5a13660..bf77f2c892 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -32,13 +32,8 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): continue review_path = representation.get("published_path") - file_name, file_extension = os.path.splitext(review_path) - - if instance.data.get('name') != 'reviewMain' \ - or file_extension != '.mp4': - continue - self.log.debug("Found review at: {}".format(review_path)) + gazu.task.add_preview( task, comment, review_path, normalize_movie=True ) From 6934b3e0ef92101871909d4b643c444001a4c478 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:09:18 +0100 Subject: [PATCH 2061/2550] add an option to chose which families will be uploaded to kitsu --- .../settings/defaults/project_settings/tvpaint.json | 6 ++++++ .../projects_schema/schema_project_tvpaint.json | 12 ++++++++++++ 2 files changed, 18 insertions(+) diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 88b5a598cd..2e413f50cd 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -11,6 +11,12 @@ 255, 255, 255 + ], + "families_to_upload": [ + "review", + "renderpass", + "renderlayer", + "renderscene" ] }, "ValidateProjectSettings": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json index 20fe5b0855..0392c9089b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -56,6 +56,18 @@ "key": "review_bg", "label": "Review BG color", "use_alpha": false + }, + { + "type": "enum", + "key": "families_to_upload", + "label": "Families to upload", + "multiselection": true, + "enum_items": [ + {"review": "review"}, + {"renderpass": "renderPass"}, + {"renderlayer": "renderLayer"}, + {"renderscene": "renderScene"} + ] } ] }, From 4c1d1f961511e6fe9a0a87d84bc16b1b3b710011 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:10:12 +0100 Subject: [PATCH 2062/2550] add review tag to the selected families in the tvpaint project settings --- openpype/hosts/tvpaint/plugins/publish/extract_sequence.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 77712347bd..d8aef1ab6b 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -127,9 +127,9 @@ class ExtractSequence(pyblish.api.Extractor): output_frame_start ) - # Fill tags and new families + # Fill tags and new families from project settings tags = [] - if family_lowered in ("review", "renderlayer", "renderscene"): + if family_lowered in self.families_to_upload: tags.append("review") # Sequence of one frame From fd08bbf17026aa3be3045804503342ce5f9a02c7 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:13:11 +0100 Subject: [PATCH 2063/2550] remove useless import --- openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index bf77f2c892..e5e6439439 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import os import gazu import pyblish.api From 2e2386a349c82d244ae5c0383a865c1d72237c56 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 22:47:33 +0800 Subject: [PATCH 2064/2550] adding removeAOVs in setting to allow users to choose whether keeping the aovs --- openpype/hosts/maya/api/lib_rendersettings.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index bc817c862e..fa09e26e9e 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -173,12 +173,16 @@ class RenderSettings(object): """Sets important settings for Vray.""" settings = cmds.ls(type="VRaySettingsNode") node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - vray_render_presets = ( - self._project_settings - ["maya"] - ["RenderSettings"] - ["vray_renderer"] - ) + render_settings = self._project_settings["maya"]["RenderSettings"] + vray_render_presets = render_settings["vray_renderer"] + # vrayRenderElement + remove_aovs = vray_render_presets["remove_aovs"] + if remove_aovs: + aovs = cmds.ls(type='VRayRenderElement') + for aov in aovs: + enabled = cmds.getAttr("{}.enabled".format(aov)) + if enabled: + cmds.delete(aov) # Set aov separator # First we need to explicitly set the UI items in Render Settings # because that is also what V-Ray updates to when that Render Settings From 20ea1c8212a5d9226d162ce5532ad641ad9a2b73 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 14 Nov 2022 18:27:11 +0100 Subject: [PATCH 2065/2550] ignore case sensitivity of extension in files widget --- openpype/lib/attribute_definitions.py | 7 +++++++ openpype/tools/attribute_defs/files_widget.py | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 589a4ef9ab..6baeaec045 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -541,6 +541,13 @@ class FileDefItem(object): return ext return None + @property + def lower_ext(self): + ext = self.ext + if ext is not None: + return ext.lower() + return ext + @property def is_dir(self): if self.is_empty: diff --git a/openpype/tools/attribute_defs/files_widget.py b/openpype/tools/attribute_defs/files_widget.py index 3f1e6a34e1..738e50ba07 100644 --- a/openpype/tools/attribute_defs/files_widget.py +++ b/openpype/tools/attribute_defs/files_widget.py @@ -349,7 +349,7 @@ class FilesModel(QtGui.QStandardItemModel): item.setData(file_item.filenames, FILENAMES_ROLE) item.setData(file_item.directory, DIRPATH_ROLE) item.setData(icon_pixmap, ITEM_ICON_ROLE) - item.setData(file_item.ext, EXT_ROLE) + item.setData(file_item.lower_ext, EXT_ROLE) item.setData(file_item.is_dir, IS_DIR_ROLE) item.setData(file_item.is_sequence, IS_SEQUENCE_ROLE) @@ -463,7 +463,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): for filepath in filepaths: if os.path.isfile(filepath): _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: + if ext.lower() in self._allowed_extensions: return True elif self._allow_folders: @@ -475,7 +475,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): for filepath in filepaths: if os.path.isfile(filepath): _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: + if ext.lower() in self._allowed_extensions: filtered_paths.append(filepath) elif self._allow_folders: From 6f65ea4f54590e0e3267880aa64454a56e490005 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 02:39:31 +0800 Subject: [PATCH 2066/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index fa09e26e9e..de849db21c 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -176,10 +176,11 @@ class RenderSettings(object): render_settings = self._project_settings["maya"]["RenderSettings"] vray_render_presets = render_settings["vray_renderer"] # vrayRenderElement - remove_aovs = vray_render_presets["remove_aovs"] + remove_aovs = render_settings["remove_aovs"] if remove_aovs: aovs = cmds.ls(type='VRayRenderElement') for aov in aovs: + # remove all aovs except LightSelect enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) From 6190e6ba111f66bf3be8beb52b54191aa8fd272f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 03:07:22 +0800 Subject: [PATCH 2067/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index de849db21c..f64a86ee07 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -184,6 +184,12 @@ class RenderSettings(object): enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) + # remove LightSelect + lightSelect_aovs =cmds.ls(type='VRayRenderElementSet') + for light_aovs in lightSelect_aovs: + light_enabled = cmds.getAttr("{}.enabled".format(light_aovs)) + if light_enabled: + cmds.delete(lightSelect_aovs) # Set aov separator # First we need to explicitly set the UI items in Render Settings # because that is also what V-Ray updates to when that Render Settings From 1b00dec8de85264a05dbb2cdfb5528c3864b094f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 03:08:08 +0800 Subject: [PATCH 2068/2550] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index f64a86ee07..1293f1287d 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -185,7 +185,7 @@ class RenderSettings(object): if enabled: cmds.delete(aov) # remove LightSelect - lightSelect_aovs =cmds.ls(type='VRayRenderElementSet') + lightSelect_aovs = cmds.ls(type='VRayRenderElementSet') for light_aovs in lightSelect_aovs: light_enabled = cmds.getAttr("{}.enabled".format(light_aovs)) if light_enabled: From 088e7507b3144a7deb3cafe4b51286649e446079 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 15 Nov 2022 09:06:09 +0100 Subject: [PATCH 2069/2550] logging format --- .../hooks/pre_copy_last_published_workfile.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 0e561334e1..44144e5fff 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -75,16 +75,20 @@ class CopyLastPublishedWorkfile(PreLaunchHook): self.log.info( ( "Seems like old version of settings is used." - ' Can\'t access custom templates in host "{}".' - ).format(host_name) + ' Can\'t access custom templates in host "{}".'.format( + host_name + ) + ) ) return elif use_last_published_workfile is False: self.log.info( ( 'Project "{}" has turned off to use last published' - ' workfile as first workfile for host "{}"' - ).format(project_name, host_name) + ' workfile as first workfile for host "{}"'.format( + project_name, host_name + ) + ) ) return @@ -114,8 +118,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) if not subset_id: - self.log.debug('No any workfile for asset "{}".').format( - asset_doc["name"] + self.log.debug( + 'No any workfile for asset "{}".'.format(asset_doc["name"]) ) return @@ -131,8 +135,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ( representation for representation in get_representations( - project_name, - version_ids=[last_version_doc["_id"]] + project_name, version_ids=[last_version_doc["_id"]] ) if representation["context"]["task"]["name"] == task_name ), @@ -141,8 +144,10 @@ class CopyLastPublishedWorkfile(PreLaunchHook): if not workfile_representation: self.log.debug( - 'No published workfile for task "{}" and host "{}".' - ).format(task_name, host_name) + 'No published workfile for task "{}" and host "{}".'.format( + task_name, host_name + ) + ) return local_site_id = get_local_site_id() @@ -152,13 +157,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): local_site_id, force=True, priority=99, - reset_timer=True + reset_timer=True, ) while not sync_server.is_representation_on_site( - project_name, - workfile_representation["_id"], - local_site_id + project_name, workfile_representation["_id"], local_site_id ): sleep(5) From 3b9662f97cfb19be8a44b95e16fa50785e22ea21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 15 Nov 2022 13:26:22 +0100 Subject: [PATCH 2070/2550] added settings for validate frame range in tray publisher --- .../project_settings/traypublisher.json | 7 +++++ .../schema_project_traypublisher.json | 18 +++++++++++++ .../schemas/template_validate_plugin.json | 26 +++++++++++++++++++ 3 files changed, 51 insertions(+) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 5db2a79772..e99b96b8c4 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -303,5 +303,12 @@ "extensions": [ ".mov" ] + }, + "publish": { + "ValidateFrameRange": { + "enabled": true, + "optional": true, + "active": true + } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 7c61aeed50..faa5033d2a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -311,6 +311,24 @@ "object_type": "text" } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "schema_template", + "name": "template_validate_plugin", + "template_data": [ + { + "key": "ValidateFrameRange", + "label": "Validate frame range" + } + ] + } + ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json new file mode 100644 index 0000000000..b57cad6719 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json @@ -0,0 +1,26 @@ +[ + { + "type": "dict", + "collapsible": true, + "key": "{key}", + "label": "{label}", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "type": "boolean", + "key": "active", + "label": "Active" + } + ] + } +] From 09be994ca11cf21c95e2e93e075e21c9ea190bbf Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 20:48:14 +0800 Subject: [PATCH 2071/2550] layout publish more than one container issue --- .../maya/plugins/publish/extract_layout.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index a801d99f42..a5131efca9 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -15,6 +15,7 @@ class ExtractLayout(publish.Extractor): label = "Extract Layout" hosts = ["maya"] families = ["layout"] + project_container = "AVALON_CONTAINERS" optional = True def process(self, instance): @@ -33,13 +34,17 @@ class ExtractLayout(publish.Extractor): for asset in cmds.sets(str(instance), query=True): # Find the container - grp_name = asset.split(':')[0] - containers = cmds.ls("{}*_CON".format(grp_name)) - - assert len(containers) == 1, \ - "More than one container found for {}".format(asset) - - container = containers[0] + project_container = self.project_container + container_list = cmds.ls(project_container) + assert len(container_list) == 1, \ + "No project container found for {} " \ + "Please create instance with loaded asset".format(asset) + containers = cmds.sets(project_container, query=True) + for con in containers: + if "_CON" not in con: + assert containers == [], \ + "No container found for {}".format(asset) + container = con representation_id = cmds.getAttr( "{}.representation".format(container)) From 0dff5e86c5f3f7b0e917c1538feb4ee23a6eb034 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 21:08:48 +0800 Subject: [PATCH 2072/2550] layout publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index a5131efca9..c6eca0b05e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -37,13 +37,12 @@ class ExtractLayout(publish.Extractor): project_container = self.project_container container_list = cmds.ls(project_container) assert len(container_list) == 1, \ - "No project container found for {} " \ - "Please create instance with loaded asset".format(asset) + "Please create instance with loaded asset" containers = cmds.sets(project_container, query=True) for con in containers: if "_CON" not in con: assert containers == [], \ - "No container found for {}".format(asset) + "No container found for {}".format(asset) container = con representation_id = cmds.getAttr( From f18efd29b2aebe89f3cc8dbbbf03dc9bdfdff5b2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 15 Nov 2022 17:36:30 +0100 Subject: [PATCH 2073/2550] OP-4394 - fix - lowercase extension everywhere Without it it would be stored in DB uppercased and final name would also be uppercased. --- .../webpublisher/plugins/publish/collect_published_files.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 40f4da9403..265e78a6c7 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -181,6 +181,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): def _get_single_repre(self, task_dir, files, tags): _, ext = os.path.splitext(files[0]) + ext = ext.lower() repre_data = { "name": ext[1:], "ext": ext[1:], @@ -200,6 +201,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): frame_start = list(collections[0].indexes)[0] frame_end = list(collections[0].indexes)[-1] ext = collections[0].tail + ext = ext.lower() repre_data = { "frameStart": frame_start, "frameEnd": frame_end, From fbd7531a311d1a0287c45babb12a7b029cd50a7d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 15 Nov 2022 18:42:46 +0100 Subject: [PATCH 2074/2550] change label of stopped publishing --- openpype/tools/publisher/widgets/validations_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 8c483e8088..935a12bc73 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -511,7 +511,7 @@ class ValidationsWidget(QtWidgets.QFrame): ) # After success publishing publish_started_widget = ValidationArtistMessage( - "Publishing went smoothly", self + "So far so good", self ) # After success publishing publish_stop_ok_widget = ValidationArtistMessage( From 4dd276fc4682f379bc1eaf2b088c24252920eeef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Nov 2022 23:23:26 +0000 Subject: [PATCH 2075/2550] Bump loader-utils from 1.4.1 to 1.4.2 in /website Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.1 to 1.4.2. - [Release notes](https://github.com/webpack/loader-utils/releases) - [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.2/CHANGELOG.md) - [Commits](https://github.com/webpack/loader-utils/compare/v1.4.1...v1.4.2) --- updated-dependencies: - dependency-name: loader-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 177a4a3802..220a489dfa 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4812,9 +4812,9 @@ loader-runner@^4.2.0: integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== loader-utils@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" - integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== + version "1.4.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" + integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From ae51001ee89812ff0c34a1175318983ad708380b Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 16 Nov 2022 03:40:10 +0000 Subject: [PATCH 2076/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 1953d0d6a5..268f33083a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.4" +__version__ = "3.14.7-nightly.5" From 0645089ad61f0a893ce717a5cf4574ca81cd8ef2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 10:38:08 +0100 Subject: [PATCH 2077/2550] size of button is fully defined by style --- openpype/style/style.css | 4 ++++ openpype/tools/publisher/window.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 887c044dae..a7a48cdb9d 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1126,6 +1126,10 @@ ValidationArtistMessage QLabel { background: transparent; } +CreateNextPageOverlay { + font-size: 32pt; +} + /* Settings - NOT USED YET - we need to define font family for settings UI */ diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 281c7ad2a1..febf55b919 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -716,7 +716,7 @@ class PublisherWindow(QtWidgets.QDialog): def _update_create_overlay_size(self): metrics = self._create_overlay_button.fontMetrics() - size = int(metrics.height() * 3) + size = int(metrics.height()) end_pos_x = self.width() start_pos_x = end_pos_x - size From 20dacc342b5b4f5ff407fd616d0dc7818c551844 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 10:40:22 +0100 Subject: [PATCH 2078/2550] change style of button --- openpype/tools/publisher/widgets/widgets.py | 17 +++++++++-------- openpype/tools/publisher/window.py | 17 ++++++++++++----- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 71f476c4ef..ce3d91ce63 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1832,23 +1832,24 @@ class CreateNextPageOverlay(QtWidgets.QWidget): rect = QtCore.QRect(self.rect()) rect_width = rect.width() rect_height = rect.height() + radius = rect_width * 0.2 - size = rect_width * 0.9 - - x_offset = (rect_width - size) * 0.5 - y_offset = (rect_height - size) * 0.5 + x_offset = 0 + y_offset = 0 if self._anim_value != 1.0: x_offset += rect_width - (rect_width * self._anim_value) - arrow_half_height = size * 0.2 - arrow_x_start = x_offset + (size * 0.4) + arrow_height = rect_height * 0.4 + arrow_half_height = arrow_height * 0.5 + arrow_x_start = x_offset + ((rect_width - arrow_half_height) * 0.5) arrow_x_end = arrow_x_start + arrow_half_height center_y = rect.center().y() painter.setBrush(self._bg_color) - painter.drawEllipse( + painter.drawRoundedRect( x_offset, y_offset, - size, size + rect_width + radius, rect_height, + radius, radius ) src_arrow_path = QtGui.QPainterPath() diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index febf55b919..de26630312 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -257,7 +257,9 @@ class PublisherWindow(QtWidgets.QDialog): publish_btn.clicked.connect(self._on_publish_clicked) publish_frame.details_page_requested.connect(self._go_to_details_tab) - create_overlay_button.clicked.connect(self._go_to_publish_tab) + create_overlay_button.clicked.connect( + self._on_create_overlay_button_click + ) controller.event_system.add_callback( "instances.refresh.finished", self._on_instances_refresh @@ -471,6 +473,10 @@ class PublisherWindow(QtWidgets.QDialog): self._help_dialog.width(), self._help_dialog.height() ) + def _on_create_overlay_button_click(self): + self._create_overlay_button.set_under_mouse(False) + self._go_to_publish_tab() + def _on_tab_change(self, old_tab, new_tab): if old_tab == "details": self._publish_details_widget.close_details_popup() @@ -716,19 +722,20 @@ class PublisherWindow(QtWidgets.QDialog): def _update_create_overlay_size(self): metrics = self._create_overlay_button.fontMetrics() - size = int(metrics.height()) + height = int(metrics.height()) + width = int(height * 0.7) end_pos_x = self.width() - start_pos_x = end_pos_x - size + start_pos_x = end_pos_x - width center = self._content_widget.parent().mapTo( self, self._content_widget.rect().center() ) - pos_y = center.y() - (size * 0.5) + pos_y = center.y() - (height * 0.5) self._create_overlay_button.setGeometry( start_pos_x, pos_y, - size, size + width, height ) def _update_create_overlay_visibility(self, global_pos=None): From 91a4a06ab6e9f9e9a8c6378aeebe014fbe6c9a21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 11:32:26 +0100 Subject: [PATCH 2079/2550] change maximum number of frame start/end and clip in/out in anatomy settings --- .../schemas/schema_anatomy_attributes.json | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json index a2a566da0e..3667c9d5d8 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json @@ -16,22 +16,26 @@ { "type": "number", "key": "frameStart", - "label": "Frame Start" + "label": "Frame Start", + "maximum": 999999999 }, { "type": "number", "key": "frameEnd", - "label": "Frame End" + "label": "Frame End", + "maximum": 999999999 }, { "type": "number", "key": "clipIn", - "label": "Clip In" + "label": "Clip In", + "maximum": 999999999 }, { "type": "number", "key": "clipOut", - "label": "Clip Out" + "label": "Clip Out", + "maximum": 999999999 }, { "type": "number", From 33656d00550c64f50d9e42088c389a43315cd905 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 11:32:43 +0100 Subject: [PATCH 2080/2550] project manager has higher max numbers --- openpype/tools/project_manager/project_manager/view.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/project_manager/project_manager/view.py b/openpype/tools/project_manager/project_manager/view.py index cca892ef72..8d1fe54e83 100644 --- a/openpype/tools/project_manager/project_manager/view.py +++ b/openpype/tools/project_manager/project_manager/view.py @@ -28,7 +28,7 @@ class NameDef: class NumberDef: def __init__(self, minimum=None, maximum=None, decimals=None): self.minimum = 0 if minimum is None else minimum - self.maximum = 999999 if maximum is None else maximum + self.maximum = 999999999 if maximum is None else maximum self.decimals = 0 if decimals is None else decimals From 8c6abf1c8faeea4ff5ecafa6c0e5dbfb22ce06cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 11:32:59 +0100 Subject: [PATCH 2081/2550] remove duplicated key --- openpype/tools/settings/settings/constants.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/settings/settings/constants.py b/openpype/tools/settings/settings/constants.py index d98d18c8bf..23526e4de9 100644 --- a/openpype/tools/settings/settings/constants.py +++ b/openpype/tools/settings/settings/constants.py @@ -24,7 +24,6 @@ __all__ = ( "SETTINGS_PATH_KEY", "ROOT_KEY", - "SETTINGS_PATH_KEY", "VALUE_KEY", "SAVE_TIME_KEY", "PROJECT_NAME_KEY", From e11815b663d3910032fc6f2ec492df857ce91590 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Nov 2022 14:41:35 +0100 Subject: [PATCH 2082/2550] OP-4394 - safer handling of Settings extensions Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/collect_published_files.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 265e78a6c7..181f8b4ab7 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -247,11 +247,17 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): for config in families_config: if is_sequence != config["is_sequence"]: continue + extensions = config.get("extensions") or [] + lower_extensions = set() + for ext in extensions: + if ext: + ext = ext.lower() + if ext.startswith("."): + ext = ext[1:] + lower_extensions.add(ext) - lower_extensions = [ext.lower() - for ext in config.get("extensions", [''])] - if (extension.lower() in lower_extensions or - lower_extensions[0] == ''): # all extensions setting + # all extensions setting + if not lower_extensions or extension in lower_extensions: found_family = config["result_family"] break From ed7795061f946ca71e7c3b09977c68525e3cd24c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 12 Nov 2022 03:44:20 +0000 Subject: [PATCH 2083/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 81b2925fb5..1953d0d6a5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.3" +__version__ = "3.14.7-nightly.4" From f9732a8385a75384d91a424ac007285a082c9a2a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 18:52:59 +0100 Subject: [PATCH 2084/2550] renamed 'CollectAvalonEntities' to 'CollectContextEntities' --- .../hosts/tvpaint/plugins/publish/collect_instance_frames.py | 2 +- openpype/hosts/tvpaint/plugins/publish/validate_marks.py | 2 +- .../plugins/publish/validate_tvpaint_workfile_data.py | 2 +- ...collect_avalon_entities.py => collect_context_entities.py} | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) rename openpype/plugins/publish/{collect_avalon_entities.py => collect_context_entities.py} (97%) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py index f291c363b8..d5b79758ad 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py @@ -6,7 +6,7 @@ class CollectOutputFrameRange(pyblish.api.ContextPlugin): When instances are collected context does not contain `frameStart` and `frameEnd` keys yet. They are collected in global plugin - `CollectAvalonEntities`. + `CollectContextEntities`. """ label = "Collect output frame range" order = pyblish.api.CollectorOrder diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py index 12d50e17ff..0030b0fd1c 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py @@ -39,7 +39,7 @@ class ValidateMarks(pyblish.api.ContextPlugin): def get_expected_data(context): scene_mark_in = context.data["sceneMarkIn"] - # Data collected in `CollectAvalonEntities` + # Data collected in `CollectContextEntities` frame_end = context.data["frameEnd"] frame_start = context.data["frameStart"] handle_start = context.data["handleStart"] diff --git a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py index a5e4868411..d8b7bb9078 100644 --- a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py @@ -13,7 +13,7 @@ class ValidateWorkfileData(pyblish.api.ContextPlugin): targets = ["tvpaint_worker"] def process(self, context): - # Data collected in `CollectAvalonEntities` + # Data collected in `CollectContextEntities` frame_start = context.data["frameStart"] frame_end = context.data["frameEnd"] handle_start = context.data["handleStart"] diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_context_entities.py similarity index 97% rename from openpype/plugins/publish/collect_avalon_entities.py rename to openpype/plugins/publish/collect_context_entities.py index 3b05b6ae98..0a6072a820 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_context_entities.py @@ -16,11 +16,11 @@ from openpype.client import get_project, get_asset_by_name from openpype.pipeline import legacy_io, KnownPublishError -class CollectAvalonEntities(pyblish.api.ContextPlugin): +class CollectContextEntities(pyblish.api.ContextPlugin): """Collect Anatomy into Context.""" order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Avalon Entities" + label = "Collect Context Entities" def process(self, context): legacy_io.install() From 910b7d7120be3982548bf8913410cbb71669f0e9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 18:53:34 +0100 Subject: [PATCH 2085/2550] get "asset" and "task" from context --- openpype/plugins/publish/collect_context_entities.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/collect_context_entities.py b/openpype/plugins/publish/collect_context_entities.py index 0a6072a820..31fbeb5dbd 100644 --- a/openpype/plugins/publish/collect_context_entities.py +++ b/openpype/plugins/publish/collect_context_entities.py @@ -3,6 +3,8 @@ Requires: session -> AVALON_ASSET context -> projectName + context -> asset + context -> task Provides: context -> projectEntity - Project document from database. @@ -13,20 +15,19 @@ Provides: import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import legacy_io, KnownPublishError +from openpype.pipeline import KnownPublishError class CollectContextEntities(pyblish.api.ContextPlugin): - """Collect Anatomy into Context.""" + """Collect entities into Context.""" order = pyblish.api.CollectorOrder - 0.1 label = "Collect Context Entities" def process(self, context): - legacy_io.install() project_name = context.data["projectName"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] + asset_name = context.data["asset"] + task_name = context.data["task"] project_entity = get_project(project_name) if not project_entity: From 017720d754d5912d4df5a233168c64f3caccd56f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 19:00:27 +0100 Subject: [PATCH 2086/2550] get "task" from context in anatomy context data --- openpype/plugins/publish/collect_anatomy_context_data.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 8433816908..55ce8e06f4 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -15,7 +15,6 @@ Provides: import json import pyblish.api -from openpype.pipeline import legacy_io from openpype.pipeline.template_data import get_template_data @@ -53,7 +52,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): asset_entity = context.data.get("assetEntity") task_name = None if asset_entity: - task_name = legacy_io.Session["AVALON_TASK"] + task_name = context.data["task"] anatomy_data = get_template_data( project_entity, asset_entity, task_name, host_name, system_settings From db6bfcb1ee978a3aa77ed00b1dbc2462715a187b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Jul 2022 03:59:40 +0000 Subject: [PATCH 2087/2550] Bump terser from 5.10.0 to 5.14.2 in /website Bumps [terser](https://github.com/terser/terser) from 5.10.0 to 5.14.2. - [Release notes](https://github.com/terser/terser/releases) - [Changelog](https://github.com/terser/terser/blob/master/CHANGELOG.md) - [Commits](https://github.com/terser/terser/commits) --- updated-dependencies: - dependency-name: terser dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 64 +++++++++++++++++++++++++++++++++-------------- 1 file changed, 45 insertions(+), 19 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 7af15e9145..177a4a3802 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -1543,15 +1543,37 @@ dependencies: "@hapi/hoek" "^9.0.0" +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/resolve-uri@^3.0.3": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.5.tgz#68eb521368db76d040a6315cdb24bf2483037b9c" - integrity sha512-VPeQ7+wH0itvQxnG+lIzWgkysKIr3L9sslimFW55rHMdGu/qCQ5z5h9zq4gI8uBtqkpHhsF4Z/OwExufUCThew== + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.11" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz#771a1d8d744eeb71b6adb35808e1a6c7b9b8c8ec" - integrity sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg== + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== "@jridgewell/trace-mapping@^0.3.0": version "0.3.4" @@ -1561,6 +1583,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.14" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" + integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@mdx-js/mdx@1.6.22", "@mdx-js/mdx@^1.6.21": version "1.6.22" resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-1.6.22.tgz#8a723157bf90e78f17dc0f27995398e6c731f1ba" @@ -2140,10 +2170,10 @@ acorn@^6.1.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== -acorn@^8.0.4, acorn@^8.4.1: - version "8.7.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" - integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== +acorn@^8.0.4, acorn@^8.4.1, acorn@^8.5.0: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== address@^1.0.1, address@^1.1.2: version "1.1.2" @@ -6843,11 +6873,6 @@ source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== -source-map@~0.7.2: - version "0.7.3" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" - integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== - sourcemap-codec@^1.4.4: version "1.4.8" resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" @@ -7053,12 +7078,13 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.4: terser "^5.7.2" terser@^5.10.0, terser@^5.7.2: - version "5.10.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.10.0.tgz#b86390809c0389105eb0a0b62397563096ddafcc" - integrity sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA== + version "5.14.2" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" + integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA== dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" commander "^2.20.0" - source-map "~0.7.2" source-map-support "~0.5.20" text-table@^0.2.0: From ff760342c7719238e9ae06f9bb23c8747cabb615 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 14 Nov 2022 18:27:11 +0100 Subject: [PATCH 2088/2550] ignore case sensitivity of extension in files widget --- openpype/lib/attribute_definitions.py | 7 +++++++ openpype/tools/attribute_defs/files_widget.py | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 589a4ef9ab..6baeaec045 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -541,6 +541,13 @@ class FileDefItem(object): return ext return None + @property + def lower_ext(self): + ext = self.ext + if ext is not None: + return ext.lower() + return ext + @property def is_dir(self): if self.is_empty: diff --git a/openpype/tools/attribute_defs/files_widget.py b/openpype/tools/attribute_defs/files_widget.py index 3f1e6a34e1..738e50ba07 100644 --- a/openpype/tools/attribute_defs/files_widget.py +++ b/openpype/tools/attribute_defs/files_widget.py @@ -349,7 +349,7 @@ class FilesModel(QtGui.QStandardItemModel): item.setData(file_item.filenames, FILENAMES_ROLE) item.setData(file_item.directory, DIRPATH_ROLE) item.setData(icon_pixmap, ITEM_ICON_ROLE) - item.setData(file_item.ext, EXT_ROLE) + item.setData(file_item.lower_ext, EXT_ROLE) item.setData(file_item.is_dir, IS_DIR_ROLE) item.setData(file_item.is_sequence, IS_SEQUENCE_ROLE) @@ -463,7 +463,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): for filepath in filepaths: if os.path.isfile(filepath): _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: + if ext.lower() in self._allowed_extensions: return True elif self._allow_folders: @@ -475,7 +475,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): for filepath in filepaths: if os.path.isfile(filepath): _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: + if ext.lower() in self._allowed_extensions: filtered_paths.append(filepath) elif self._allow_folders: From 213c78b9a019ac3a8956718e19564b9d5bdfa067 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 00:16:06 +0200 Subject: [PATCH 2089/2550] Avoid name conflict where `group_name != group_node` due to maya auto renaming new node --- openpype/hosts/maya/plugins/load/load_yeti_cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py index 090047e22d..5ba381050a 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py @@ -73,8 +73,8 @@ class YetiCacheLoader(load.LoaderPlugin): c = colors.get(family) if c is not None: - cmds.setAttr(group_name + ".useOutlinerColor", 1) - cmds.setAttr(group_name + ".outlinerColor", + cmds.setAttr(group_node + ".useOutlinerColor", 1) + cmds.setAttr(group_node + ".outlinerColor", (float(c[0])/255), (float(c[1])/255), (float(c[2])/255) From d662b34ca7a70ddb797b4aef4d570028c23a5031 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 15 Nov 2022 13:26:22 +0100 Subject: [PATCH 2090/2550] added settings for validate frame range in tray publisher --- .../project_settings/traypublisher.json | 7 +++++ .../schema_project_traypublisher.json | 18 +++++++++++++ .../schemas/template_validate_plugin.json | 26 +++++++++++++++++++ 3 files changed, 51 insertions(+) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 5db2a79772..e99b96b8c4 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -303,5 +303,12 @@ "extensions": [ ".mov" ] + }, + "publish": { + "ValidateFrameRange": { + "enabled": true, + "optional": true, + "active": true + } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 7c61aeed50..faa5033d2a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -311,6 +311,24 @@ "object_type": "text" } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "schema_template", + "name": "template_validate_plugin", + "template_data": [ + { + "key": "ValidateFrameRange", + "label": "Validate frame range" + } + ] + } + ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json new file mode 100644 index 0000000000..b57cad6719 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json @@ -0,0 +1,26 @@ +[ + { + "type": "dict", + "collapsible": true, + "key": "{key}", + "label": "{label}", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "type": "boolean", + "key": "active", + "label": "Active" + } + ] + } +] From 125d0bbeed7b07640bc34dd877dac2e4c814895f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Mon, 31 Oct 2022 13:28:50 +0100 Subject: [PATCH 2091/2550] Feature: Auto download last published workfile as first workfile --- .../hooks/pre_copy_last_published_workfile.py | 124 ++++++++++++++++++ openpype/modules/sync_server/sync_server.py | 9 +- 2 files changed, 132 insertions(+), 1 deletion(-) create mode 100644 openpype/hooks/pre_copy_last_published_workfile.py diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py new file mode 100644 index 0000000000..004f9d25e7 --- /dev/null +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -0,0 +1,124 @@ +import gc +import os +import shutil +from openpype.client.entities import ( + get_last_version_by_subset_id, + get_representations, + get_subsets, +) +from openpype.lib import PreLaunchHook +from openpype.modules.base import ModulesManager +from openpype.pipeline.load.utils import get_representation_path + + +class CopyLastPublishedWorkfile(PreLaunchHook): + """Copy last published workfile as first workfile. + + Prelaunch hook works only if last workfile leads to not existing file. + - That is possible only if it's first version. + """ + + # Before `AddLastWorkfileToLaunchArgs` + order = -1 + app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"] + + def execute(self): + """Check if local workfile doesn't exist, else copy it. + + 1- Check if setting for this feature is enabled + 2- Check if workfile in work area doesn't exist + 3- Check if published workfile exists and is copied locally in publish + + Returns: + None: This is a void method. + """ + # TODO setting + self.log.info("Trying to fetch last published workfile...") + + last_workfile = self.data.get("last_workfile_path") + if os.path.exists(last_workfile): + self.log.debug( + "Last workfile exists. Skipping {} process.".format( + self.__class__.__name__ + ) + ) + return + + project_name = self.data["project_name"] + task_name = self.data["task_name"] + + project_doc = self.data.get("project_doc") + asset_doc = self.data.get("asset_doc") + anatomy = self.data.get("anatomy") + if project_doc and asset_doc: + # Get subset id + subset_id = next( + ( + subset["_id"] + for subset in get_subsets( + project_name, + asset_ids=[asset_doc["_id"]], + fields=["_id", "data.family"], + ) + if subset["data"]["family"] == "workfile" + ), + None, + ) + if not subset_id: + return + + # Get workfile representation + workfile_representation = next( + ( + representation + for representation in get_representations( + project_name, + version_ids=[ + get_last_version_by_subset_id( + project_name, subset_id, fields=["_id"] + )["_id"] + ], + ) + if representation["context"]["task"]["name"] == task_name + ), + None, + ) + + if workfile_representation: # TODO add setting + # Get sync server from Tray, which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) + + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() + + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() + + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) + + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 8b11055e65..def9e6cfd8 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -236,7 +236,11 @@ class SyncServerThread(threading.Thread): """ def __init__(self, module): self.log = Logger.get_logger(self.__class__.__name__) - super(SyncServerThread, self).__init__() + + # Event to trigger files have been processed + self.files_processed = threading.Event() + + super(SyncServerThread, self).__init__(args=(self.files_processed,)) self.module = module self.loop = None self.is_running = False @@ -396,6 +400,8 @@ class SyncServerThread(threading.Thread): representation, site, error) + # Trigger files are processed + self.files_processed.set() duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -454,6 +460,7 @@ class SyncServerThread(threading.Thread): async def run_timer(self, delay): """Wait for 'delay' seconds to start next loop""" + self.files_processed.clear() await asyncio.sleep(delay) def reset_timer(self): From af15b0d9415d1bfd2bff978ad81d370484d36bdb Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:00:26 +0100 Subject: [PATCH 2092/2550] Project setting --- .../hooks/pre_copy_last_published_workfile.py | 119 ++++++++++++------ .../defaults/project_settings/global.json | 3 +- .../schemas/schema_global_tools.json | 5 + 3 files changed, 88 insertions(+), 39 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 004f9d25e7..312548d2db 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -7,8 +7,10 @@ from openpype.client.entities import ( get_subsets, ) from openpype.lib import PreLaunchHook +from openpype.lib.profiles_filtering import filter_profiles from openpype.modules.base import ModulesManager from openpype.pipeline.load.utils import get_representation_path +from openpype.settings.lib import get_project_settings class CopyLastPublishedWorkfile(PreLaunchHook): @@ -32,9 +34,45 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ - # TODO setting + project_name = self.data["project_name"] + task_name = self.data["task_name"] + task_type = self.data["task_type"] + host_name = self.application.host_name + + # Check settings has enabled it + project_settings = get_project_settings(project_name) + profiles = project_settings["global"]["tools"]["Workfiles"][ + "last_workfile_on_startup" + ] + filter_data = { + "tasks": task_name, + "task_types": task_type, + "hosts": host_name, + } + last_workfile_settings = filter_profiles(profiles, filter_data) + use_last_published_workfile = last_workfile_settings.get( + "use_last_published_workfile" + ) + if use_last_published_workfile is None: + self.log.info( + ( + "Seems like old version of settings is used." + ' Can\'t access custom templates in host "{}".' + ).format(host_name) + ) + return + elif use_last_published_workfile is False: + self.log.info( + ( + 'Project "{}" has turned off to use last published workfile' + ' as first workfile for host "{}"' + ).format(project_name, host_name) + ) + return + self.log.info("Trying to fetch last published workfile...") + # Check there is no workfile available last_workfile = self.data.get("last_workfile_path") if os.path.exists(last_workfile): self.log.debug( @@ -44,9 +82,6 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return - project_name = self.data["project_name"] - task_name = self.data["task_name"] - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") @@ -65,6 +100,9 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) if not subset_id: + self.log.debug('No any workfile for asset "{}".').format( + asset_doc["name"] + ) return # Get workfile representation @@ -84,41 +122,46 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) - if workfile_representation: # TODO add setting - # Get sync server from Tray, which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + if not workfile_representation: + self.log.debug( + 'No published workfile for task "{}" and host "{}".' + ).format(task_name, host_name) + return - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, - ) - sync_server.reset_timer() + # Get sync server from Tray, which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) - # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots - ) - local_workfile_dir = os.path.dirname(last_workfile) + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() - # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir - ) + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) + + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 9c3f2f1e1b..7daa4afa79 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -458,7 +458,8 @@ "hosts": [], "task_types": [], "tasks": [], - "enabled": true + "enabled": true, + "use_last_published_workfile": false } ], "open_workfile_tool_on_startup": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index ba446135e2..962008d476 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -149,6 +149,11 @@ "type": "boolean", "key": "enabled", "label": "Enabled" + }, + { + "type": "boolean", + "key": "use_last_published_workfile", + "label": "Use last published workfile" } ] } From 7a7c91c418f1084dacd25e6aa453e0c70caf9fcd Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:10:12 +0100 Subject: [PATCH 2093/2550] docstring --- openpype/hooks/pre_copy_last_published_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 312548d2db..b1b2fe2366 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -30,6 +30,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): 1- Check if setting for this feature is enabled 2- Check if workfile in work area doesn't exist 3- Check if published workfile exists and is copied locally in publish + 4- Substitute copied published workfile as first workfile Returns: None: This is a void method. From e24489c463af8ce3a83807df69af984357363bfb Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:14:58 +0100 Subject: [PATCH 2094/2550] comment length --- openpype/hooks/pre_copy_last_published_workfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index b1b2fe2366..d342151823 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -129,7 +129,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # Get sync server from Tray, which handles the asynchronous thread instance + # Get sync server from Tray, + # which handles the asynchronous thread instance sync_server = next( ( t["sync_server"] From 17853d0b3b55658310bef044eb65bed19d533bed Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:50:30 +0100 Subject: [PATCH 2095/2550] lint --- openpype/hooks/pre_copy_last_published_workfile.py | 4 ++-- openpype/modules/sync_server/sync_server.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index d342151823..cf4edeac9b 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -65,8 +65,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): elif use_last_published_workfile is False: self.log.info( ( - 'Project "{}" has turned off to use last published workfile' - ' as first workfile for host "{}"' + 'Project "{}" has turned off to use last published' + ' workfile as first workfile for host "{}"' ).format(project_name, host_name) ) return diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index def9e6cfd8..353b39c4e1 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -239,7 +239,7 @@ class SyncServerThread(threading.Thread): # Event to trigger files have been processed self.files_processed = threading.Event() - + super(SyncServerThread, self).__init__(args=(self.files_processed,)) self.module = module self.loop = None From 881bcebd1dbec626d1b1e48ebf079746ad567b0c Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 11:41:59 +0100 Subject: [PATCH 2096/2550] requested cosmetic changes --- .../hooks/pre_copy_last_published_workfile.py | 172 +++++++++--------- 1 file changed, 90 insertions(+), 82 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index cf4edeac9b..7a835507f7 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -35,6 +35,17 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ + # Check there is no workfile available + last_workfile = self.data.get("last_workfile_path") + if os.path.exists(last_workfile): + self.log.debug( + "Last workfile exists. Skipping {} process.".format( + self.__class__.__name__ + ) + ) + return + + # Get data project_name = self.data["project_name"] task_name = self.data["task_name"] task_type = self.data["task_type"] @@ -73,97 +84,94 @@ class CopyLastPublishedWorkfile(PreLaunchHook): self.log.info("Trying to fetch last published workfile...") - # Check there is no workfile available - last_workfile = self.data.get("last_workfile_path") - if os.path.exists(last_workfile): - self.log.debug( - "Last workfile exists. Skipping {} process.".format( - self.__class__.__name__ - ) - ) - return - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") - if project_doc and asset_doc: - # Get subset id - subset_id = next( - ( - subset["_id"] - for subset in get_subsets( - project_name, - asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family"], - ) - if subset["data"]["family"] == "workfile" - ), - None, - ) - if not subset_id: - self.log.debug('No any workfile for asset "{}".').format( - asset_doc["name"] - ) - return - # Get workfile representation - workfile_representation = next( - ( - representation - for representation in get_representations( - project_name, - version_ids=[ + # Check it can proceed + if not project_doc and not asset_doc: + return + + # Get subset id + subset_id = next( + ( + subset["_id"] + for subset in get_subsets( + project_name, + asset_ids=[asset_doc["_id"]], + fields=["_id", "data.family"], + ) + if subset["data"]["family"] == "workfile" + ), + None, + ) + if not subset_id: + self.log.debug('No any workfile for asset "{}".').format( + asset_doc["name"] + ) + return + + # Get workfile representation + workfile_representation = next( + ( + representation + for representation in get_representations( + project_name, + version_ids=[ + ( get_last_version_by_subset_id( project_name, subset_id, fields=["_id"] - )["_id"] - ], - ) - if representation["context"]["task"]["name"] == task_name - ), - None, - ) + ) + or {} + ).get("_id") + ], + ) + if representation["context"]["task"]["name"] == task_name + ), + None, + ) - if not workfile_representation: - self.log.debug( - 'No published workfile for task "{}" and host "{}".' - ).format(task_name, host_name) - return + if not workfile_representation: + self.log.debug( + 'No published workfile for task "{}" and host "{}".' + ).format(task_name, host_name) + return - # Get sync server from Tray, - # which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + # Get sync server from Tray, + # which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, - ) - sync_server.reset_timer() + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() - # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots - ) - local_workfile_dir = os.path.dirname(last_workfile) + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) - # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir - ) + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) From 9e01c5deaa1615316b82d6123df8ffa1101a15ec Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 18:33:54 +0100 Subject: [PATCH 2097/2550] Change to REST API using web server --- .../hooks/pre_copy_last_published_workfile.py | 54 ++++++++------- openpype/modules/sync_server/rest_api.py | 68 +++++++++++++++++++ openpype/modules/sync_server/sync_server.py | 12 ++-- .../modules/sync_server/sync_server_module.py | 9 +++ openpype/modules/timers_manager/rest_api.py | 2 +- 5 files changed, 112 insertions(+), 33 deletions(-) create mode 100644 openpype/modules/sync_server/rest_api.py diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 7a835507f7..cefc7e5d40 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -1,14 +1,14 @@ -import gc import os import shutil +from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, get_representations, get_subsets, ) from openpype.lib import PreLaunchHook +from openpype.lib.local_settings import get_local_site_id from openpype.lib.profiles_filtering import filter_profiles -from openpype.modules.base import ModulesManager from openpype.pipeline.load.utils import get_representation_path from openpype.settings.lib import get_project_settings @@ -137,33 +137,37 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # Get sync server from Tray, - # which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + # POST to webserver sites to add to representations + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + self.log.warning("Couldn't find webserver url") + return - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, + entry_point_url = "{}/sync_server".format(webserver_url) + rest_api_url = "{}/add_sites_to_representations".format( + entry_point_url + ) + try: + import requests + except Exception: + self.log.warning( + "Couldn't add sites to representations ('requests' is not available)" + ) + return + + requests.post( + rest_api_url, + json={ + "project_name": project_name, + "sites": [get_local_site_id()], + "representations": [str(workfile_representation["_id"])], + }, ) - sync_server.reset_timer() # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + rest_api_url = "{}/files_are_processed".format(entry_point_url) + while requests.get(rest_api_url).content: + sleep(5) # Get paths published_workfile_path = get_representation_path( diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py new file mode 100644 index 0000000000..b7c5d26d15 --- /dev/null +++ b/openpype/modules/sync_server/rest_api.py @@ -0,0 +1,68 @@ +from aiohttp.web_response import Response +from openpype.lib import Logger + + +class SyncServerModuleRestApi: + """ + REST API endpoint used for calling from hosts when context change + happens in Workfile app. + """ + + def __init__(self, user_module, server_manager): + self._log = None + self.module = user_module + self.server_manager = server_manager + + self.prefix = "/sync_server" + + self.register() + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + def register(self): + self.server_manager.add_route( + "POST", + self.prefix + "/add_sites_to_representations", + self.add_sites_to_representations, + ) + self.server_manager.add_route( + "GET", + self.prefix + "/files_are_processed", + self.files_are_processed, + ) + + async def add_sites_to_representations(self, request): + # Extract data from request + data = await request.json() + try: + project_name = data["project_name"] + sites = data["sites"] + representations = data["representations"] + except KeyError: + msg = ( + "Payload must contain fields 'project_name," + " 'sites' (list of names) and 'representations' (list of IDs)" + ) + self.log.error(msg) + return Response(status=400, message=msg) + + # Add all sites to each representation + for representation_id in representations: + for site in sites: + self.module.add_site( + project_name, representation_id, site, force=True + ) + + # Force timer to run immediately + self.module.reset_timer() + + return Response(status=200) + + async def files_are_processed(self, _request): + return Response( + body=bytes(self.module.sync_server_thread.files_are_processed) + ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 353b39c4e1..7fd2311c2d 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -237,15 +237,13 @@ class SyncServerThread(threading.Thread): def __init__(self, module): self.log = Logger.get_logger(self.__class__.__name__) - # Event to trigger files have been processed - self.files_processed = threading.Event() - - super(SyncServerThread, self).__init__(args=(self.files_processed,)) + super(SyncServerThread, self).__init__() self.module = module self.loop = None self.is_running = False self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) self.timer = None + self.files_are_processed = False def run(self): self.is_running = True @@ -400,8 +398,8 @@ class SyncServerThread(threading.Thread): representation, site, error) - # Trigger files are processed - self.files_processed.set() + # Trigger files process finished + self.files_are_processed = False duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -460,7 +458,6 @@ class SyncServerThread(threading.Thread): async def run_timer(self, delay): """Wait for 'delay' seconds to start next loop""" - self.files_processed.clear() await asyncio.sleep(delay) def reset_timer(self): @@ -469,6 +466,7 @@ class SyncServerThread(threading.Thread): if self.timer: self.timer.cancel() self.timer = None + self.files_are_processed = True def _working_sites(self, project_name): if self.module.is_project_paused(project_name): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index e84c333a58..bff999723b 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -2089,6 +2089,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def cli(self, click_group): click_group.add_command(cli_main) + # Webserver module implementation + def webserver_initialization(self, server_manager): + """Add routes for syncs.""" + if self.tray_initialized: + from .rest_api import SyncServerModuleRestApi + self.rest_api_obj = SyncServerModuleRestApi( + self, server_manager + ) + @click.group(SyncServerModule.name, help="SyncServer module related commands.") def cli_main(): diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index 4a2e9e6575..979db9075b 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -21,7 +21,7 @@ class TimersManagerModuleRestApi: @property def log(self): if self._log is None: - self._log = Logger.get_logger(self.__ckass__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log def register(self): From 8d9542bb45088fec5c800fe7b7d9b76f5ca3c14c Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 18:37:13 +0100 Subject: [PATCH 2098/2550] lint --- openpype/hooks/pre_copy_last_published_workfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index cefc7e5d40..6bec4f7d2c 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -151,7 +151,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): import requests except Exception: self.log.warning( - "Couldn't add sites to representations ('requests' is not available)" + "Couldn't add sites to representations " + "('requests' is not available)" ) return From 5e02d7d2d71796b6826e320fd8cfbc3e77980d93 Mon Sep 17 00:00:00 2001 From: Felix David Date: Fri, 4 Nov 2022 10:06:59 +0100 Subject: [PATCH 2099/2550] legacy compatibility --- openpype/hooks/pre_copy_last_published_workfile.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 6bec4f7d2c..f3293fa511 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -99,9 +99,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): for subset in get_subsets( project_name, asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family"], + fields=["_id", "data.family", "data.families"], ) - if subset["data"]["family"] == "workfile" + if subset["data"].get("family") == "workfile" + # Legacy compatibility + or "workfile" in subset["data"].get("families", {}) ), None, ) From 1c48c0936290cbd7013df50d83392f53a68d51dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 16:41:19 +0100 Subject: [PATCH 2100/2550] use 'created_dt' of representation --- openpype/client/entities.py | 28 +++++++++++++++++++ .../hooks/pre_copy_last_published_workfile.py | 19 +++++++++++-- 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43afccf2f1..43c2874f57 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,6 +6,7 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ +from datetime import datetime import re import collections @@ -1367,6 +1368,33 @@ def get_representation_parents(project_name, representation): return parents_by_repre_id[repre_id] +def get_representation_last_created_time_on_site( + representation: dict, site_name: str +) -> datetime: + """Get `created_dt` value for representation on site. + + Args: + representation (dict): Representation to get creation date of + site_name (str): Site from which to get the creation date + + Returns: + datetime: Created time of representation on site + """ + created_time = next( + ( + site.get("created_dt") + for site in representation["files"][0].get("sites", []) + if site["name"] == site_name + ), + None, + ) + if created_time: + return created_time + else: + # Use epoch as 'zero' time + return datetime.utcfromtimestamp(0) + + def get_thumbnail_id_from_source(project_name, src_type, src_id): """Receive thumbnail id from source entity. diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index f3293fa511..4eb66f6f85 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -3,6 +3,8 @@ import shutil from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, + get_representation_by_id, + get_representation_last_created_time_on_site, get_representations, get_subsets, ) @@ -158,18 +160,29 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return + local_site_id = get_local_site_id() requests.post( rest_api_url, json={ "project_name": project_name, - "sites": [get_local_site_id()], + "sites": [local_site_id], "representations": [str(workfile_representation["_id"])], }, ) # Wait for the download loop to end - rest_api_url = "{}/files_are_processed".format(entry_point_url) - while requests.get(rest_api_url).content: + last_created_time = get_representation_last_created_time_on_site( + workfile_representation, local_site_id + ) + while ( + last_created_time + >= get_representation_last_created_time_on_site( + get_representation_by_id( + project_name, workfile_representation["_id"] + ), + local_site_id, + ) + ): sleep(5) # Get paths From a614f0f805acd6d73c57dc68bc00a9d7834714cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:57:02 +0100 Subject: [PATCH 2101/2550] add priority to add_site --- openpype/modules/sync_server/rest_api.py | 17 ++++++----------- openpype/modules/sync_server/sync_server.py | 4 ---- .../modules/sync_server/sync_server_module.py | 13 ++++++++++--- 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index b7c5d26d15..e92ddc8eee 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -1,4 +1,5 @@ from aiohttp.web_response import Response +from openpype.client.entities import get_representation_by_id from openpype.lib import Logger @@ -29,11 +30,6 @@ class SyncServerModuleRestApi: self.prefix + "/add_sites_to_representations", self.add_sites_to_representations, ) - self.server_manager.add_route( - "GET", - self.prefix + "/files_are_processed", - self.files_are_processed, - ) async def add_sites_to_representations(self, request): # Extract data from request @@ -54,15 +50,14 @@ class SyncServerModuleRestApi: for representation_id in representations: for site in sites: self.module.add_site( - project_name, representation_id, site, force=True + project_name, + representation_id, + site, + force=True, + priority=99, ) # Force timer to run immediately self.module.reset_timer() return Response(status=200) - - async def files_are_processed(self, _request): - return Response( - body=bytes(self.module.sync_server_thread.files_are_processed) - ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 7fd2311c2d..d0a40a60ff 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -243,7 +243,6 @@ class SyncServerThread(threading.Thread): self.is_running = False self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) self.timer = None - self.files_are_processed = False def run(self): self.is_running = True @@ -398,8 +397,6 @@ class SyncServerThread(threading.Thread): representation, site, error) - # Trigger files process finished - self.files_are_processed = False duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -466,7 +463,6 @@ class SyncServerThread(threading.Thread): if self.timer: self.timer.cancel() self.timer = None - self.files_are_processed = True def _working_sites(self, project_name): if self.module.is_project_paused(project_name): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index bff999723b..6a1fc9a1c5 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -136,7 +136,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, project_name, representation_id, site_name=None, - force=False): + force=False, priority=None): """ Adds new site to representation to be synced. @@ -152,6 +152,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists + priority (int): set priority Throws: SiteAlreadyPresentError - if adding already existing site and @@ -167,7 +168,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.reset_site_on_representation(project_name, representation_id, site_name=site_name, - force=force) + force=force, + priority=priority) def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): @@ -1655,7 +1657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False): + remove=False, pause=None, force=False, priority=None): """ Reset information about synchronization for particular 'file_id' and provider. @@ -1678,6 +1680,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): remove (bool): if True remove site altogether pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site + priority (int): set priority Raises: SiteAlreadyPresentError - if adding already existing site and @@ -1705,6 +1708,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} + # Add priority + if priority: + elem["priority"] = priority + if file_id: # reset site for particular file self._reset_site_for_file(project_name, representation_id, elem, file_id, site_name) From b6365d85404b88dddcb218969f0a7e30e4668e08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:58:19 +0100 Subject: [PATCH 2102/2550] clean --- openpype/modules/sync_server/rest_api.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index e92ddc8eee..0c3b914833 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -1,5 +1,4 @@ from aiohttp.web_response import Response -from openpype.client.entities import get_representation_by_id from openpype.lib import Logger From 138051f2f4c9d0c705eeb1cd299166d0ca249850 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:59:03 +0100 Subject: [PATCH 2103/2550] clean --- openpype/modules/sync_server/sync_server_module.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 6a1fc9a1c5..951cb116fc 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1657,7 +1657,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, priority=None): + remove=False, pause=None, force=False, + priority=None): """ Reset information about synchronization for particular 'file_id' and provider. From 3a5ebc6ea29fd4ec34b0fc80c27f5cc187ace8e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 19:02:29 +0100 Subject: [PATCH 2104/2550] sort fields --- openpype/hooks/pre_copy_last_published_workfile.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 4eb66f6f85..acbc9ec1c7 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -178,7 +178,9 @@ class CopyLastPublishedWorkfile(PreLaunchHook): last_created_time >= get_representation_last_created_time_on_site( get_representation_by_id( - project_name, workfile_representation["_id"] + project_name, + workfile_representation["_id"], + fields=["files"], ), local_site_id, ) From 29f0dee272c9b0b27c4a6e4098caab2ed11a1d7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 19:04:03 +0100 Subject: [PATCH 2105/2550] clean --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 951cb116fc..1292bed9af 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1657,7 +1657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, + remove=False, pause=None, force=False, priority=None): """ Reset information about synchronization for particular 'file_id' From a600cf4dcad5f8caa08187ca2d449bbf9986623a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:18:23 +0100 Subject: [PATCH 2106/2550] fix last version check --- .../hooks/pre_copy_last_published_workfile.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index acbc9ec1c7..96b5ccadb2 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -116,19 +116,19 @@ class CopyLastPublishedWorkfile(PreLaunchHook): return # Get workfile representation + last_version_doc = get_last_version_by_subset_id( + project_name, subset_id, fields=["_id"] + ) + if not last_version_doc: + self.log.debug("Subset does not have any versions") + return + workfile_representation = next( ( representation for representation in get_representations( project_name, - version_ids=[ - ( - get_last_version_by_subset_id( - project_name, subset_id, fields=["_id"] - ) - or {} - ).get("_id") - ], + version_ids=[last_version_doc["_id"]] ) if representation["context"]["task"]["name"] == task_name ), From 7596610c160cf83b5dccb00c1638756312a54cf1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:03 +0100 Subject: [PATCH 2107/2550] replaced 'add_sites_to_representations' with 'reset_timer' in rest api --- openpype/modules/sync_server/rest_api.py | 31 +++--------------------- 1 file changed, 3 insertions(+), 28 deletions(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index 0c3b914833..51769cd4fb 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -26,36 +26,11 @@ class SyncServerModuleRestApi: def register(self): self.server_manager.add_route( "POST", - self.prefix + "/add_sites_to_representations", - self.add_sites_to_representations, + self.prefix + "/reset_timer", + self.reset_timer, ) - async def add_sites_to_representations(self, request): - # Extract data from request - data = await request.json() - try: - project_name = data["project_name"] - sites = data["sites"] - representations = data["representations"] - except KeyError: - msg = ( - "Payload must contain fields 'project_name," - " 'sites' (list of names) and 'representations' (list of IDs)" - ) - self.log.error(msg) - return Response(status=400, message=msg) - - # Add all sites to each representation - for representation_id in representations: - for site in sites: - self.module.add_site( - project_name, - representation_id, - site, - force=True, - priority=99, - ) - + async def reset_timer(self, request): # Force timer to run immediately self.module.reset_timer() From 2052afc76a72d5845570e4900e527cabe1d1ecb1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:15 +0100 Subject: [PATCH 2108/2550] added ability to rese timer from add_site --- openpype/modules/sync_server/sync_server_module.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 1292bed9af..5e19a6fce0 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -136,7 +136,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, project_name, representation_id, site_name=None, - force=False, priority=None): + force=False, priority=None, reset_timer=False): """ Adds new site to representation to be synced. @@ -171,6 +171,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): force=force, priority=priority) + if reset_timer: + self.reset_timer() + def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): """ From f7c1fa01ae1b358b8af45ab777ebda4a6ba81bfc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:32 +0100 Subject: [PATCH 2109/2550] 'reset_timer' can reset timer via rest api endpoint --- .../modules/sync_server/sync_server_module.py | 37 ++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 5e19a6fce0..b505e25d2f 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -916,7 +916,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule): In case of user's involvement (reset site), start that right away. """ - self.sync_server_thread.reset_timer() + + if not self.enabled: + return + + if self.sync_server_thread is None: + self._reset_timer_with_rest_api() + else: + self.sync_server_thread.reset_timer() + + def is_representaion_on_site( + self, project_name, representation_id, site_id + ): + # TODO implement + return False + + def _reset_timer_with_rest_api(self): + # POST to webserver sites to add to representations + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + self.log.warning("Couldn't find webserver url") + return + + rest_api_url = "{}/sync_server/reset_timer".format( + webserver_url + ) + + try: + import requests + except Exception: + self.log.warning( + "Couldn't add sites to representations " + "('requests' is not available)" + ) + return + + requests.post(rest_api_url) def get_enabled_projects(self): """Returns list of projects which have SyncServer enabled.""" From 1d028d22ba4b796b6e8fe700b70f2f2e87217edb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:20:21 +0100 Subject: [PATCH 2110/2550] updated prelaunch hook with new abilities of sync server --- .../hooks/pre_copy_last_published_workfile.py | 52 +++++-------------- 1 file changed, 12 insertions(+), 40 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 96b5ccadb2..6fd50a64d6 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -141,49 +141,21 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # POST to webserver sites to add to representations - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - if not webserver_url: - self.log.warning("Couldn't find webserver url") - return - - entry_point_url = "{}/sync_server".format(webserver_url) - rest_api_url = "{}/add_sites_to_representations".format( - entry_point_url - ) - try: - import requests - except Exception: - self.log.warning( - "Couldn't add sites to representations " - "('requests' is not available)" - ) - return - local_site_id = get_local_site_id() - requests.post( - rest_api_url, - json={ - "project_name": project_name, - "sites": [local_site_id], - "representations": [str(workfile_representation["_id"])], - }, + sync_server = self.modules_manager.get("sync_server") + sync_server.add_site( + project_name, + workfile_representation["_id"], + local_site_id, + force=True, + priority=99, + reset_timer=True ) - # Wait for the download loop to end - last_created_time = get_representation_last_created_time_on_site( - workfile_representation, local_site_id - ) - while ( - last_created_time - >= get_representation_last_created_time_on_site( - get_representation_by_id( - project_name, - workfile_representation["_id"], - fields=["files"], - ), - local_site_id, - ) + while not sync_server.is_representaion_on_site( + project_name, + workfile_representation["_id"], + local_site_id ): sleep(5) From 5db743080ccb22adc89f76fc86f4ca26020503fd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:22:56 +0100 Subject: [PATCH 2111/2550] check if is sync server enabled --- openpype/hooks/pre_copy_last_published_workfile.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 6fd50a64d6..69e3d6efe4 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -37,6 +37,12 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ + + sync_server = self.modules_manager.get("sync_server") + if not sync_server or not sync_server.enabled: + self.log.deubg("Sync server module is not enabled or available") + return + # Check there is no workfile available last_workfile = self.data.get("last_workfile_path") if os.path.exists(last_workfile): @@ -142,7 +148,6 @@ class CopyLastPublishedWorkfile(PreLaunchHook): return local_site_id = get_local_site_id() - sync_server = self.modules_manager.get("sync_server") sync_server.add_site( project_name, workfile_representation["_id"], From 75e12954ee51d09916032224f4c72be84c12bacf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:36:03 +0100 Subject: [PATCH 2112/2550] removed 'get_representation_last_created_time_on_site' function --- openpype/client/entities.py | 27 ------------------- .../hooks/pre_copy_last_published_workfile.py | 2 -- 2 files changed, 29 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43c2874f57..91d4b499b0 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1368,33 +1368,6 @@ def get_representation_parents(project_name, representation): return parents_by_repre_id[repre_id] -def get_representation_last_created_time_on_site( - representation: dict, site_name: str -) -> datetime: - """Get `created_dt` value for representation on site. - - Args: - representation (dict): Representation to get creation date of - site_name (str): Site from which to get the creation date - - Returns: - datetime: Created time of representation on site - """ - created_time = next( - ( - site.get("created_dt") - for site in representation["files"][0].get("sites", []) - if site["name"] == site_name - ), - None, - ) - if created_time: - return created_time - else: - # Use epoch as 'zero' time - return datetime.utcfromtimestamp(0) - - def get_thumbnail_id_from_source(project_name, src_type, src_id): """Receive thumbnail id from source entity. diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 69e3d6efe4..884b0f54b6 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -3,8 +3,6 @@ import shutil from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, - get_representation_by_id, - get_representation_last_created_time_on_site, get_representations, get_subsets, ) From 99bebd82a7a3d9288799d4771a242a56dd58c40a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:51:00 +0100 Subject: [PATCH 2113/2550] fix typo --- openpype/hooks/pre_copy_last_published_workfile.py | 2 +- openpype/modules/sync_server/sync_server_module.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 884b0f54b6..0e561334e1 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -155,7 +155,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): reset_timer=True ) - while not sync_server.is_representaion_on_site( + while not sync_server.is_representation_on_site( project_name, workfile_representation["_id"], local_site_id diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index b505e25d2f..1f65ea9bda 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -925,7 +925,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): else: self.sync_server_thread.reset_timer() - def is_representaion_on_site( + def is_representation_on_site( self, project_name, representation_id, site_id ): # TODO implement From 44cfbf9f2922c80c21875f6f25658e6041c6b677 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Nov 2022 12:27:07 +0100 Subject: [PATCH 2114/2550] added method to check if representation has all files on site --- .../modules/sync_server/sync_server_module.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 1f65ea9bda..6250146523 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -926,10 +926,27 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.sync_server_thread.reset_timer() def is_representation_on_site( - self, project_name, representation_id, site_id + self, project_name, representation_id, site_name ): - # TODO implement - return False + """Checks if 'representation_id' has all files avail. on 'site_name'""" + representation = get_representation_by_id(project_name, + representation_id, + fields=["_id", "files"]) + if not representation: + return False + + on_site = False + for file_info in representation.get("files", []): + for site in file_info.get("sites", []): + if site["name"] != site_name: + continue + + if (site.get("progress") or site.get("error") or + not site.get("created_dt")): + return False + on_site = True + + return on_site def _reset_timer_with_rest_api(self): # POST to webserver sites to add to representations From 2e6f850b5d9a99d7063d0693414459834a6ba373 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Nov 2022 12:48:52 +0100 Subject: [PATCH 2115/2550] small updates to docstrings --- openpype/modules/sync_server/sync_server_module.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 6250146523..653ee50541 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -143,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): 'project_name' must have synchronization enabled (globally or project only) - Used as a API endpoint from outside applications (Loader etc). + Used as an API endpoint from outside applications (Loader etc). Use 'force' to reset existing site. @@ -153,6 +153,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): site_name (string): name of configured and active site force (bool): reset site if exists priority (int): set priority + reset_timer (bool): if delay timer should be reset, eg. user mark + some representation to be synced manually Throws: SiteAlreadyPresentError - if adding already existing site and @@ -1601,12 +1603,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Args: project_name (string): name of project - force to db connection as each file might come from different collection - new_file_id (string): + new_file_id (string): only present if file synced successfully file (dictionary): info about processed file (pulled from DB) representation (dictionary): parent repr of file (from DB) site (string): label ('gdrive', 'S3') error (string): exception message - progress (float): 0-1 of progress of upload/download + progress (float): 0-0.99 of progress of upload/download priority (int): 0-100 set priority Returns: From 5838f0f6097d57201402d9d5d360755f5c54b93c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 9 Nov 2022 17:09:38 +0100 Subject: [PATCH 2116/2550] clean --- openpype/client/entities.py | 1 - openpype/modules/sync_server/rest_api.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 91d4b499b0..43afccf2f1 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,7 +6,6 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ -from datetime import datetime import re import collections diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index 51769cd4fb..a7d9dd80b7 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -30,8 +30,8 @@ class SyncServerModuleRestApi: self.reset_timer, ) - async def reset_timer(self, request): - # Force timer to run immediately + async def reset_timer(self, _request): + """Force timer to run immediately.""" self.module.reset_timer() return Response(status=200) From c096279cfcd9bdf5124a2e444da2830ebf300d56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 15 Nov 2022 09:06:09 +0100 Subject: [PATCH 2117/2550] logging format --- .../hooks/pre_copy_last_published_workfile.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 0e561334e1..44144e5fff 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -75,16 +75,20 @@ class CopyLastPublishedWorkfile(PreLaunchHook): self.log.info( ( "Seems like old version of settings is used." - ' Can\'t access custom templates in host "{}".' - ).format(host_name) + ' Can\'t access custom templates in host "{}".'.format( + host_name + ) + ) ) return elif use_last_published_workfile is False: self.log.info( ( 'Project "{}" has turned off to use last published' - ' workfile as first workfile for host "{}"' - ).format(project_name, host_name) + ' workfile as first workfile for host "{}"'.format( + project_name, host_name + ) + ) ) return @@ -114,8 +118,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) if not subset_id: - self.log.debug('No any workfile for asset "{}".').format( - asset_doc["name"] + self.log.debug( + 'No any workfile for asset "{}".'.format(asset_doc["name"]) ) return @@ -131,8 +135,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ( representation for representation in get_representations( - project_name, - version_ids=[last_version_doc["_id"]] + project_name, version_ids=[last_version_doc["_id"]] ) if representation["context"]["task"]["name"] == task_name ), @@ -141,8 +144,10 @@ class CopyLastPublishedWorkfile(PreLaunchHook): if not workfile_representation: self.log.debug( - 'No published workfile for task "{}" and host "{}".' - ).format(task_name, host_name) + 'No published workfile for task "{}" and host "{}".'.format( + task_name, host_name + ) + ) return local_site_id = get_local_site_id() @@ -152,13 +157,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): local_site_id, force=True, priority=99, - reset_timer=True + reset_timer=True, ) while not sync_server.is_representation_on_site( - project_name, - workfile_representation["_id"], - local_site_id + project_name, workfile_representation["_id"], local_site_id ): sleep(5) From 460adc767e6b81e62fdab5d2699f0c10c1023e9a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 31 Oct 2022 19:36:44 +0800 Subject: [PATCH 2118/2550] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 115 ++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 openpype/hosts/maya/plugins/load/load_abc_to_standin.py diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py new file mode 100644 index 0000000000..defed4bd73 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -0,0 +1,115 @@ +import os +import clique + +from openpype.pipeline import ( + load, + get_representation_path +) +from openpype.settings import get_project_settings + + +class AlembicStandinLoader(load.LoaderPlugin): + """Load Alembic as Arnold Standin""" + + families = ["model", "pointcache"] + representations = ["abc"] + + label = "Import Alembic as Standin" + order = -5 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, options): + + import maya.cmds as cmds + import pymel.core as pm + import mtoa.ui.arnoldmenu + from openpype.hosts.maya.api.pipeline import containerise + from openpype.hosts.maya.api.lib import unique_namespace + + version = context["version"] + version_data = version.get("data", {}) + + self.log.info("version_data: {}\n".format(version_data)) + + frameStart = version_data.get("frameStart", None) + + asset = context["asset"]["name"] + namespace = namespace or unique_namespace( + asset + "_", + prefix="_" if asset[0].isdigit() else "", + suffix="_", + ) + + #Root group + label = "{}:{}".format(namespace, name) + root = pm.group(name=label, empty=True) + + settings = get_project_settings(os.environ['AVALON_PROJECT']) + colors = settings["maya"]["load"]["colors"] + + c = colors.get('ass') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) + + transform_name = label + "_ABC" + + standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn()) + standin = standinShape.getParent() + standin.rename(transform_name) + + pm.parent(standin, root) + + # Set the standin filepath + standinShape.dso.set(self.fname) + if frameStart is not None: + standinShape.useFrameExtension.set(1) + + nodes = [root, standin] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, representation): + + import pymel.core as pm + + path = get_representation_path(representation) + + # Update the standin + standins = list() + members = pm.sets(container['objectName'], query=True) + for member in members: + shape = member.getShape() + if (shape and shape.type() == "aiStandIn"): + standins.append(shape) + + for standin in standins: + standin.dso.set(path) + standin.useFrameExtension.set(1) + + container = pm.PyNode(container["objectName"]) + container.representation.set(str(representation["_id"])) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + import maya.cmds as cmds + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass From ecbe06bdc8aeee163072f6173b96ba2886b0ebb1 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 31 Oct 2022 19:56:10 +0800 Subject: [PATCH 2119/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index defed4bd73..f39aa56650 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -1,5 +1,4 @@ import os -import clique from openpype.pipeline import ( load, @@ -41,7 +40,7 @@ class AlembicStandinLoader(load.LoaderPlugin): suffix="_", ) - #Root group + # Root group label = "{}:{}".format(namespace, name) root = pm.group(name=label, empty=True) From 2e6974b0640d0ad43bbacb2163a1cf85a7933522 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 1 Nov 2022 20:36:58 +0800 Subject: [PATCH 2120/2550] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index f39aa56650..68aeb24069 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -13,7 +13,7 @@ class AlembicStandinLoader(load.LoaderPlugin): families = ["model", "pointcache"] representations = ["abc"] - label = "Import Alembic as Standin" + label = "Import Alembic as Arnold Standin" order = -5 icon = "code-fork" color = "orange" @@ -21,7 +21,6 @@ class AlembicStandinLoader(load.LoaderPlugin): def load(self, context, name, namespace, options): import maya.cmds as cmds - import pymel.core as pm import mtoa.ui.arnoldmenu from openpype.hosts.maya.api.pipeline import containerise from openpype.hosts.maya.api.lib import unique_namespace @@ -42,7 +41,7 @@ class AlembicStandinLoader(load.LoaderPlugin): # Root group label = "{}:{}".format(namespace, name) - root = pm.group(name=label, empty=True) + root = cmds.group(name=label, empty=True) settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] @@ -55,16 +54,17 @@ class AlembicStandinLoader(load.LoaderPlugin): transform_name = label + "_ABC" - standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn()) - standin = standinShape.getParent() - standin.rename(transform_name) + standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0] + standin = cmds.listRelatives(standinShape, parent=True, typ="transform") + standin = cmds.rename(standin, transform_name) + standinShape = cmds.listRelatives(standin, children=True)[0] - pm.parent(standin, root) + cmds.parent(standin, root) # Set the standin filepath - standinShape.dso.set(self.fname) + cmds.setAttr(standinShape + ".dso", self.fname, type="string") if frameStart is not None: - standinShape.useFrameExtension.set(1) + cmds.setAttr(standinShape + ".useFrameExtension", 1) nodes = [root, standin] self[:] = nodes From ce5d4c02fa7f31c7731a04f5580e52f042933353 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 1 Nov 2022 20:38:51 +0800 Subject: [PATCH 2121/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 68aeb24069..5d6c52eac9 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -55,7 +55,8 @@ class AlembicStandinLoader(load.LoaderPlugin): transform_name = label + "_ABC" standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0] - standin = cmds.listRelatives(standinShape, parent=True, typ="transform") + standin = cmds.listRelatives(standinShape, parent=True, + typ="transform") standin = cmds.rename(standin, transform_name) standinShape = cmds.listRelatives(standin, children=True)[0] From c58ef40f15c405c615cb5d4ec8a566a00de5c2eb Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 00:32:22 +0800 Subject: [PATCH 2122/2550] Alembic Loader as Arnold Standin --- .../hosts/maya/plugins/load/load_abc_to_standin.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 5d6c52eac9..94bb974917 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -10,7 +10,7 @@ from openpype.settings import get_project_settings class AlembicStandinLoader(load.LoaderPlugin): """Load Alembic as Arnold Standin""" - families = ["model", "pointcache"] + families = ["animation", "model", "pointcache"] representations = ["abc"] label = "Import Alembic as Arnold Standin" @@ -31,6 +31,7 @@ class AlembicStandinLoader(load.LoaderPlugin): self.log.info("version_data: {}\n".format(version_data)) frameStart = version_data.get("frameStart", None) + frameEnd = version_data.get("frameEnd", None) asset = context["asset"]["name"] namespace = namespace or unique_namespace( @@ -64,7 +65,13 @@ class AlembicStandinLoader(load.LoaderPlugin): # Set the standin filepath cmds.setAttr(standinShape + ".dso", self.fname, type="string") - if frameStart is not None: + cmds.setAttr(standinShape + ".abcFPS", 25) + + if frameStart is None: + cmds.setAttr(standinShape + ".useFrameExtension", 0) + elif frameStart == 1 and frameEnd == 1: + cmds.setAttr(standinShape + ".useFrameExtension", 0) + else: cmds.setAttr(standinShape + ".useFrameExtension", 1) nodes = [root, standin] @@ -93,7 +100,8 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) - standin.useFrameExtension.set(1) + standin.useFrameExtension.set(0) + standin.abcFPS.set(25) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 031465779bc4096a2848a545b52c37a44a010128 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 08:39:52 +0800 Subject: [PATCH 2123/2550] Alembic Loader as Arnold Standin --- .../hosts/maya/plugins/load/load_abc_to_standin.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 94bb974917..19e60d33da 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -1,6 +1,7 @@ import os from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -46,6 +47,7 @@ class AlembicStandinLoader(load.LoaderPlugin): settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] + fps = legacy_io.Session["AVALON_FPS"] c = colors.get('ass') if c is not None: @@ -65,12 +67,14 @@ class AlembicStandinLoader(load.LoaderPlugin): # Set the standin filepath cmds.setAttr(standinShape + ".dso", self.fname, type="string") - cmds.setAttr(standinShape + ".abcFPS", 25) + cmds.setAttr(standinShape + ".abcFPS", float(fps)) if frameStart is None: cmds.setAttr(standinShape + ".useFrameExtension", 0) + elif frameStart == 1 and frameEnd == 1: cmds.setAttr(standinShape + ".useFrameExtension", 0) + else: cmds.setAttr(standinShape + ".useFrameExtension", 1) @@ -89,7 +93,7 @@ class AlembicStandinLoader(load.LoaderPlugin): import pymel.core as pm path = get_representation_path(representation) - + fps = legacy_io.Session["AVALON_FPS"] # Update the standin standins = list() members = pm.sets(container['objectName'], query=True) @@ -101,7 +105,7 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) standin.useFrameExtension.set(0) - standin.abcFPS.set(25) + standin.abcFPS.set(float(fps)) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 6e94a81393884cab2c3b2798e2c765c08617c4d1 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:09:13 +0800 Subject: [PATCH 2124/2550] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 19e60d33da..a192d9c357 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -28,11 +28,10 @@ class AlembicStandinLoader(load.LoaderPlugin): version = context["version"] version_data = version.get("data", {}) - + family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) - + self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) - frameEnd = version_data.get("frameEnd", None) asset = context["asset"]["name"] namespace = namespace or unique_namespace( @@ -48,12 +47,14 @@ class AlembicStandinLoader(load.LoaderPlugin): settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] fps = legacy_io.Session["AVALON_FPS"] - - c = colors.get('ass') + c = colors.get(family[0]) if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - c[0], c[1], c[2]) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" @@ -72,7 +73,7 @@ class AlembicStandinLoader(load.LoaderPlugin): if frameStart is None: cmds.setAttr(standinShape + ".useFrameExtension", 0) - elif frameStart == 1 and frameEnd == 1: + elif "model" in family: cmds.setAttr(standinShape + ".useFrameExtension", 0) else: From 66608300969101ee75cf68a8c7a86dfd7b7710d4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:10:41 +0800 Subject: [PATCH 2125/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index a192d9c357..8ce1aee3ac 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -30,7 +30,6 @@ class AlembicStandinLoader(load.LoaderPlugin): version_data = version.get("data", {}) family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) - self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) asset = context["asset"]["name"] @@ -51,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From 7c3f625fe324328ed7e7a93e4adff4da7c1d6e8e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:11:56 +0800 Subject: [PATCH 2126/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 8ce1aee3ac..d93c85f8a4 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From bee7b17ff2b612fab87c95cdfa4143659453d049 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:13:04 +0800 Subject: [PATCH 2127/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index d93c85f8a4..dafe999d9d 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,9 +50,9 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) ) transform_name = label + "_ABC" From 3649ee7e4e163f45472c13f5f4bb74a65175e979 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:15:11 +0800 Subject: [PATCH 2128/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index dafe999d9d..d93c85f8a4 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,9 +50,9 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) ) transform_name = label + "_ABC" From cd27df0e8d35ccb94a4cfde09f399cbb6319a1c4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:16:10 +0800 Subject: [PATCH 2129/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index d93c85f8a4..8ce1aee3ac 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From 3186acc83e967f726ceec6c975fc74d6ea6cd8a2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:19:11 +0800 Subject: [PATCH 2130/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 8ce1aee3ac..9583063c7e 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -30,6 +30,7 @@ class AlembicStandinLoader(load.LoaderPlugin): version_data = version.get("data", {}) family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) + self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) asset = context["asset"]["name"] @@ -48,12 +49,12 @@ class AlembicStandinLoader(load.LoaderPlugin): fps = legacy_io.Session["AVALON_FPS"] c = colors.get(family[0]) if c is not None: + r = (float(c[0]) / 255) + g = (float(c[1]) / 255) + b = (float(c[2]) / 255) cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + r, g, b) transform_name = label + "_ABC" From cf8bd8eb59590df3b2a196d68bbb47e29fcd862f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 17:57:32 +0800 Subject: [PATCH 2131/2550] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 9583063c7e..605a492e4d 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -98,6 +98,7 @@ class AlembicStandinLoader(load.LoaderPlugin): # Update the standin standins = list() members = pm.sets(container['objectName'], query=True) + self.log.info("container:{}".format(container)) for member in members: shape = member.getShape() if (shape and shape.type() == "aiStandIn"): @@ -105,8 +106,11 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) - standin.useFrameExtension.set(0) standin.abcFPS.set(float(fps)) + if "modelMain" in container['objectName']: + standin.useFrameExtension.set(0) + else: + standin.useFrameExtension.set(1) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 93bff0c038c262290c5d8e0b5e28847c3a210777 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 16 Nov 2022 03:40:10 +0000 Subject: [PATCH 2132/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 1953d0d6a5..268f33083a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.4" +__version__ = "3.14.7-nightly.5" From 35b43f34ebbea13407154445369a4f8cdb15cf78 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Nov 2022 23:23:26 +0000 Subject: [PATCH 2133/2550] Bump loader-utils from 1.4.1 to 1.4.2 in /website Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.1 to 1.4.2. - [Release notes](https://github.com/webpack/loader-utils/releases) - [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.2/CHANGELOG.md) - [Commits](https://github.com/webpack/loader-utils/compare/v1.4.1...v1.4.2) --- updated-dependencies: - dependency-name: loader-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 177a4a3802..220a489dfa 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4812,9 +4812,9 @@ loader-runner@^4.2.0: integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== loader-utils@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" - integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== + version "1.4.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" + integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From ece1e8b9137d5a95d412b42b0e2b2fc5b4a9176a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Nov 2022 14:51:10 +0100 Subject: [PATCH 2134/2550] OP-4394 - Hound --- .../webpublisher/plugins/publish/collect_published_files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 181f8b4ab7..79ed499a20 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -253,7 +253,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): if ext: ext = ext.lower() if ext.startswith("."): - ext = ext[1:] + ext = ext[1:] lower_extensions.add(ext) # all extensions setting From 4b95ad68168b138070171c862e0afaf4c08fb9f0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Nov 2022 15:57:30 +0100 Subject: [PATCH 2135/2550] OP-4394 - use lowercased extension in ExtractReview There might be uppercased extension sent in by accident (.PNG), which would make all checks against set of extension not work. --- openpype/plugins/publish/extract_review.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 1f9b30fba3..982bd9dc24 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -152,7 +152,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if input_ext.startswith("."): input_ext = input_ext[1:] - if input_ext not in self.supported_exts: + if input_ext.lower() not in self.supported_exts: self.log.info( "Representation has unsupported extension \"{}\"".format( input_ext @@ -179,7 +179,7 @@ class ExtractReview(pyblish.api.InstancePlugin): single_frame_image = False if len(input_filepaths) == 1: ext = os.path.splitext(input_filepaths[0])[-1] - single_frame_image = ext in IMAGE_EXTENSIONS + single_frame_image = ext.lower() in IMAGE_EXTENSIONS filtered_defs = [] for output_def in output_defs: @@ -501,7 +501,7 @@ class ExtractReview(pyblish.api.InstancePlugin): first_sequence_frame += handle_start ext = os.path.splitext(repre["files"][0])[1].replace(".", "") - if ext in self.alpha_exts: + if ext.lower() in self.alpha_exts: input_allow_bg = True return { @@ -934,6 +934,8 @@ class ExtractReview(pyblish.api.InstancePlugin): if output_ext.startswith("."): output_ext = output_ext[1:] + output_ext = output_ext.lower() + # Store extension to representation new_repre["ext"] = output_ext From 5c37d91138332442fa1d746003f8b16a7e623f2e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 17:20:44 +0100 Subject: [PATCH 2136/2550] uncomment subimages because multipart exr is created which actually can't ffmpeg handle --- openpype/lib/transcoding.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 5a57026496..6f571ea522 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -476,7 +476,7 @@ def convert_for_ffmpeg( if input_frame_start is not None and input_frame_end is not None: is_sequence = int(input_frame_end) != int(input_frame_start) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -524,10 +524,8 @@ def convert_for_ffmpeg( input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) "--ch", channels_arg, - # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed with oiio # Use first subimage - # "--subimage", "0" + "--subimage", "0" ]) # Add frame definitions to arguments @@ -621,7 +619,7 @@ def convert_input_paths_for_ffmpeg( " \".exr\" extension. Got \"{}\"." ).format(ext)) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -639,6 +637,7 @@ def convert_input_paths_for_ffmpeg( red, green, blue, alpha = review_channels input_channels = [red, green, blue] + # TODO find subimage inder where rgba is available for multipart exrs channels_arg = "R={},G={},B={}".format(red, green, blue) if alpha is not None: channels_arg += ",A={}".format(alpha) @@ -671,11 +670,8 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be put to top stack # (and output) "--ch", channels_arg, - # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed - # with oiiotool # Use first subimage - # "--subimage", "0" + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From c028bb2a9446f5a7891a7a42427a62aa0f3a0886 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 16 Nov 2022 18:37:24 +0100 Subject: [PATCH 2137/2550] Update openpype/client/entities.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/client/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index bbef8dc65e..38d6369d09 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,7 +389,7 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - Union[str, Dict]: None if subset with specified filters was not found. + Union[None, Dict[str, Any]]: None if subset with specified filters was not found. or dict subset document which can be reduced to specified 'fields'. From 24da47332bbd0951acd621fbb54274153a8a1e02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 16 Nov 2022 18:56:18 +0100 Subject: [PATCH 2138/2550] :bug: fix representation creation --- .../traypublisher/plugins/publish/collect_online_file.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py index 459ee463aa..82c4870fe4 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -13,12 +13,11 @@ class CollectOnlineFile(pyblish.api.InstancePlugin): def process(self, instance): file = Path(instance.data["creator_attributes"]["path"]) - if not instance.data.get("representations"): - instance.data["representations"] = [ - { + instance.data["representations"].append( + { "name": file.suffix.lstrip("."), "ext": file.suffix.lstrip("."), "files": file.name, "stagingDir": file.parent.as_posix() - } - ] + } + ) From 45c6a9ab93a8c5ae0b830190eaabd559d8c369b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 16 Nov 2022 18:56:36 +0100 Subject: [PATCH 2139/2550] :recycle: refactor code --- .../plugins/create/create_online.py | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 5a6373730d..19f956a50e 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -38,23 +38,31 @@ class OnlineCreator(TrayPublishCreator): return "fa.file" def create(self, subset_name, instance_data, pre_create_data): - if not pre_create_data.get("representation_file")["filenames"]: + repr_file = pre_create_data.get("representation_file") + if not repr_file: raise CreatorError("No files specified") - asset = get_asset_by_name(self.project_name, instance_data["asset"]) - origin_basename = Path(pre_create_data.get( - "representation_file")["filenames"][0]).stem + files = repr_file.get("filenames") + if not files: + # this should never happen + raise CreatorError("Missing files from representation") + origin_basename = Path(files[0]).stem + + asset = get_asset_by_name( + self.project_name, instance_data["asset"], fields=["_id"]) if get_subset_by_name( - self.project_name, origin_basename, asset["_id"]): + self.project_name, origin_basename, asset["_id"], + fields=["_id"]): raise CreatorError(f"subset with {origin_basename} already " "exists in selected asset") instance_data["originalBasename"] = origin_basename subset_name = origin_basename - path = (Path(pre_create_data.get("representation_file")["directory"]) / pre_create_data.get("representation_file")["filenames"][0]).as_posix() # noqa - instance_data["creator_attributes"] = {"path": path} + instance_data["creator_attributes"] = { + "path": (Path(repr_file["directory"]) / files[0]).as_posix() + } # Create new instance new_instance = CreatedInstance(self.family, subset_name, From 3357392e71c5a1b53747d56c3430897f68a8995b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 16 Nov 2022 18:59:21 +0100 Subject: [PATCH 2140/2550] :rotating_light: fix :dog: --- openpype/client/entities.py | 4 ++-- .../traypublisher/plugins/publish/collect_online_file.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 38d6369d09..c415be8816 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,8 +389,8 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - Union[None, Dict[str, Any]]: None if subset with specified filters was not found. - or dict subset document which can be reduced to + Union[None, Dict[str, Any]]: None if subset with specified filters was + not found or dict subset document which can be reduced to specified 'fields'. """ diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py index 82c4870fe4..a3f86afa13 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -15,9 +15,9 @@ class CollectOnlineFile(pyblish.api.InstancePlugin): instance.data["representations"].append( { - "name": file.suffix.lstrip("."), - "ext": file.suffix.lstrip("."), - "files": file.name, - "stagingDir": file.parent.as_posix() + "name": file.suffix.lstrip("."), + "ext": file.suffix.lstrip("."), + "files": file.name, + "stagingDir": file.parent.as_posix() } ) From d5b1f58fd25c4fac3bfdc1bc1311ee2913f70498 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 17 Nov 2022 19:16:13 +0800 Subject: [PATCH 2141/2550] layout publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index c6eca0b05e..f77835d47f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -39,10 +39,15 @@ class ExtractLayout(publish.Extractor): assert len(container_list) == 1, \ "Please create instance with loaded asset" containers = cmds.sets(project_container, query=True) + load_asset = asset.split(':')[0] for con in containers: - if "_CON" not in con: + ass_transform = cmds.listRelatives(con, allParents=True)[0] + if load_asset not in ass_transform: assert containers == [], \ "No container found for {}".format(asset) + if "_CON" not in con: + assert containers == [], \ + "Container missing for {}".format(asset) container = con representation_id = cmds.getAttr( From 64e5af230a3509ef16d8c0ee0fc826284960232b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Nov 2022 12:54:50 +0100 Subject: [PATCH 2142/2550] OP-4394 - removed explicit lower from repre ext to not shadow upper case issue Using lower here would hide possibly broken representation, as we would expect both repre["ext"] and repre["name"] be lowercased. In case the aren't review won't get created >> someone will notice and fix issues on source representation. --- openpype/plugins/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 982bd9dc24..f299d1c6e9 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -152,7 +152,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if input_ext.startswith("."): input_ext = input_ext[1:] - if input_ext.lower() not in self.supported_exts: + if input_ext not in self.supported_exts: self.log.info( "Representation has unsupported extension \"{}\"".format( input_ext From 69ddc20e3c4003db2285d2095f45c0e585cae001 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 14:34:42 +0100 Subject: [PATCH 2143/2550] include secrets module to python_2 vendor --- .../vendor/python/python_2/secrets/LICENSE | 21 +++ .../python/python_2/secrets/__init__.py | 16 +++ .../vendor/python/python_2/secrets/secrets.py | 132 ++++++++++++++++++ 3 files changed, 169 insertions(+) create mode 100644 openpype/vendor/python/python_2/secrets/LICENSE create mode 100644 openpype/vendor/python/python_2/secrets/__init__.py create mode 100644 openpype/vendor/python/python_2/secrets/secrets.py diff --git a/openpype/vendor/python/python_2/secrets/LICENSE b/openpype/vendor/python/python_2/secrets/LICENSE new file mode 100644 index 0000000000..d3211e4d9f --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Scaleway + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/openpype/vendor/python/python_2/secrets/__init__.py b/openpype/vendor/python/python_2/secrets/__init__.py new file mode 100644 index 0000000000..c29ee61be1 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + + +__version__ = "1.0.6" + +# Emulates __all__ for Python2 +from .secrets import ( + choice, + randbelow, + randbits, + SystemRandom, + token_bytes, + token_hex, + token_urlsafe, + compare_digest +) diff --git a/openpype/vendor/python/python_2/secrets/secrets.py b/openpype/vendor/python/python_2/secrets/secrets.py new file mode 100644 index 0000000000..967d2862d9 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/secrets.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +"""Generate cryptographically strong pseudo-random numbers suitable for + +managing secrets such as account authentication, tokens, and similar. + + +See PEP 506 for more information. + +https://www.python.org/dev/peps/pep-0506/ + + +""" + + +__all__ = ['choice', 'randbelow', 'randbits', 'SystemRandom', + + 'token_bytes', 'token_hex', 'token_urlsafe', + + 'compare_digest', + + ] + +import os +import sys +from random import SystemRandom + +import base64 + +import binascii + + +# hmac.compare_digest did appear in python 2.7.7 +if sys.version_info >= (2, 7, 7): + from hmac import compare_digest +else: + # If we use an older python version, we will define an equivalent method + def compare_digest(a, b): + """Compatibility compare_digest method for python < 2.7. + This method is NOT cryptographically secure and may be subject to + timing attacks, see https://docs.python.org/2/library/hmac.html + """ + return a == b + + +_sysrand = SystemRandom() + + +randbits = _sysrand.getrandbits + +choice = _sysrand.choice + + +def randbelow(exclusive_upper_bound): + + """Return a random int in the range [0, n).""" + + if exclusive_upper_bound <= 0: + + raise ValueError("Upper bound must be positive.") + + return _sysrand._randbelow(exclusive_upper_bound) + + +DEFAULT_ENTROPY = 32 # number of bytes to return by default + + +def token_bytes(nbytes=None): + + """Return a random byte string containing *nbytes* bytes. + + + If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_bytes(16) #doctest:+SKIP + + b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' + + + """ + + if nbytes is None: + + nbytes = DEFAULT_ENTROPY + + return os.urandom(nbytes) + + +def token_hex(nbytes=None): + + """Return a random text string, in hexadecimal. + + + The string has *nbytes* random bytes, each byte converted to two + + hex digits. If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_hex(16) #doctest:+SKIP + + 'f9bf78b9a18ce6d46a0cd2b0b86df9da' + + + """ + + return binascii.hexlify(token_bytes(nbytes)).decode('ascii') + + +def token_urlsafe(nbytes=None): + + """Return a random URL-safe text string, in Base64 encoding. + + + The string has *nbytes* random bytes. If *nbytes* is ``None`` + + or not supplied, a reasonable default is used. + + + >>> token_urlsafe(16) #doctest:+SKIP + + 'Drmhze6EPcv0fN_81Bj-nA' + + + """ + + tok = token_bytes(nbytes) + + return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii') From 64a1e55170153504fcb4ff892a8030bc14ef034f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Nov 2022 15:59:58 +0100 Subject: [PATCH 2144/2550] enhance speed of collect audio by converting it to context plugin --- openpype/plugins/publish/collect_audio.py | 175 +++++++++++++++------- 1 file changed, 124 insertions(+), 51 deletions(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 7d53b24e54..db567f8b8f 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -1,21 +1,27 @@ +import collections import pyblish.api from openpype.client import ( - get_last_version_by_subset_name, + get_assets, + get_subsets, + get_last_versions, get_representations, ) -from openpype.pipeline import ( - legacy_io, - get_representation_path, -) +from openpype.pipeline import get_representation_path_with_anatomy -class CollectAudio(pyblish.api.InstancePlugin): +class CollectAudio(pyblish.api.ContextPlugin): """Collect asset's last published audio. The audio subset name searched for is defined in: project settings > Collect Audio + + Note: + The plugin was instance plugin but because of so much queries the + plugin was slowing down whole collection phase a lot thus was + converted to context plugin which requires only 4 queries top. """ + label = "Collect Asset Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] @@ -39,67 +45,134 @@ class CollectAudio(pyblish.api.InstancePlugin): audio_subset_name = "audioMain" - def process(self, instance): - if instance.data.get("audio"): - self.log.info( - "Skipping Audio collecion. It is already collected" - ) + def process(self, context): + # Fake filtering by family inside context plugin + filtered_instances = [] + for instance in pyblish.api.instances_by_plugin( + context, self.__class__ + ): + # Skip instances that already have audio filled + if instance.data.get("audio"): + self.log.info( + "Skipping Audio collecion. It is already collected" + ) + continue + filtered_instances.append(instance) + + # Skip if none of instances remained + if not filtered_instances: return # Add audio to instance if exists. + instances_by_asset_name = collections.defaultdict(list) + for instance in filtered_instances: + asset_name = instance.data["asset"] + instances_by_asset_name[asset_name].append(instance) + + asset_names = set(instances_by_asset_name.keys()) self.log.info(( - "Searching for audio subset '{subset}'" - " in asset '{asset}'" + "Searching for audio subset '{subset}' in assets {assets}" ).format( subset=self.audio_subset_name, - asset=instance.data["asset"] + assets=", ".join([ + '"{}"'.format(asset_name) + for asset_name in asset_names + ]) )) - repre_doc = self._get_repre_doc(instance) + # Query all required documents + project_name = context.data["projectName"] + anatomy = context.data["anatomy"] + repre_docs_by_asset_names = self.query_representations( + project_name, asset_names) - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.info("Audio Data added to instance ...") + for asset_name, instances in instances_by_asset_name.items(): + repre_docs = repre_docs_by_asset_names[asset_name] + if not repre_docs: + continue - def _get_repre_doc(self, instance): - cache = instance.context.data.get("__cache_asset_audio") - if cache is None: - cache = {} - instance.context.data["__cache_asset_audio"] = cache - asset_name = instance.data["asset"] + repre_doc = repre_docs[0] + repre_path = get_representation_path_with_anatomy( + repre_doc, anatomy + ) + for instance in instances: + instance.data["audio"] = [{ + "offset": 0, + "filename": repre_path + }] + self.log.info("Audio Data added to instance ...") - # first try to get it from cache - if asset_name in cache: - return cache[asset_name] + def query_representations(self, project_name, asset_names): + """Query representations related to audio subsets for passed assets. - project_name = legacy_io.active_project() + Args: + project_name (str): Project in which we're looking for all + entities. + asset_names (Iterable[str]): Asset names where to look for audio + subsets and their representations. - # Find latest versions document - last_version_doc = get_last_version_by_subset_name( + Returns: + collections.defaultdict[str, List[Dict[Str, Any]]]: Representations + related to audio subsets by asset name. + """ + + output = collections.defaultdict(list) + # Query asset documents + asset_docs = get_assets( project_name, - self.audio_subset_name, - asset_name=asset_name, + asset_names=asset_names, fields=["_id"] ) - repre_doc = None - if last_version_doc: - # Try to find it's representation (Expected there is only one) - repre_docs = list(get_representations( - project_name, version_ids=[last_version_doc["_id"]] - )) - if not repre_docs: - self.log.warning( - "Version document does not contain any representations" - ) - else: - repre_doc = repre_docs[0] + asset_id_by_name = {} + for asset_doc in asset_docs: + asset_id_by_name[asset_doc["name"]] = asset_doc["_id"] + asset_ids = set(asset_id_by_name.values()) - # update cache - cache[asset_name] = repre_doc + # Query subsets with name define by 'audio_subset_name' attr + # - one or none subsets with the name should be available on an asset + subset_docs = get_subsets( + project_name, + subset_names=[self.audio_subset_name], + asset_ids=asset_ids, + fields=["_id", "parent"] + ) + subset_id_by_asset_id = {} + for subset_doc in subset_docs: + asset_id = subset_doc["parent"] + subset_id_by_asset_id[asset_id] = subset_doc["_id"] - return repre_doc + subset_ids = set(subset_id_by_asset_id.values()) + if not subset_ids: + return output + + # Find all latest versions for the subsets + version_docs_by_subset_id = get_last_versions( + project_name, subset_ids=subset_ids, fields=["_id", "parent"] + ) + version_id_by_subset_id = { + subset_id: version_doc["_id"] + for subset_id, version_doc in version_docs_by_subset_id.items() + } + version_ids = set(version_id_by_subset_id.values()) + if not version_ids: + return output + + # Find representations under latest versions of audio subsets + repre_docs = get_representations( + project_name, version_ids=version_ids + ) + repre_docs_by_version_id = collections.defaultdict(list) + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + repre_docs_by_version_id[version_id].append(repre_doc) + + if not repre_docs_by_version_id: + return output + + for asset_name in asset_names: + asset_id = asset_id_by_name.get(asset_name) + subset_id = subset_id_by_asset_id.get(asset_id) + version_id = version_id_by_subset_id.get(subset_id) + output[asset_name] = repre_docs_by_version_id[version_id] + return output From 2db4cc43aae80fa8fb203ba775fff6fbe19a23c0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Nov 2022 17:52:57 +0100 Subject: [PATCH 2145/2550] Fix - typo --- openpype/hooks/pre_copy_last_published_workfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 44144e5fff..26b43c39cb 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -38,7 +38,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): sync_server = self.modules_manager.get("sync_server") if not sync_server or not sync_server.enabled: - self.log.deubg("Sync server module is not enabled or available") + self.log.debug("Sync server module is not enabled or available") return # Check there is no workfile available From d076de0d077197bf3afc64edf9ab08837f2db549 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Nov 2022 18:47:37 +0100 Subject: [PATCH 2146/2550] add more information about where ftrack service is storing versions or where is looking for versions --- openpype/modules/ftrack/scripts/sub_event_status.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index 6c7ecb8351..eb3f63c04b 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -7,6 +7,8 @@ import signal import socket import datetime +import appdirs + import ftrack_api from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( @@ -253,6 +255,15 @@ class StatusFactory: ) }) + items.append({ + "type": "label", + "value": ( + "Local versions dir: {}
    Version repository path: {}" + ).format( + appdirs.user_data_dir("openpype", "pypeclub"), + os.environ.get("OPENPYPE_PATH") + ) + }) items.append({"type": "label", "value": "---"}) return items From 996cf3dcf95cd5042b2433780406ec5e74f1ae30 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 20:52:55 +0100 Subject: [PATCH 2147/2550] Nuke: load image first frame --- openpype/hosts/nuke/plugins/load/load_image.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 3e81ef999b..3c5d4a7fc1 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -62,7 +62,9 @@ class LoadImage(load.LoaderPlugin): def load(self, context, name, namespace, options): self.log.info("__ options: `{}`".format(options)) - frame_number = options.get("frame_number", 1) + frame_number = options.get( + "frame_number", int(nuke.root()["first_frame"].getValue()) + ) version = context['version'] version_data = version.get("data", {}) From 8d1e720a889ebabc985505f0165ec11c4d6f7342 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 21:19:23 +0100 Subject: [PATCH 2148/2550] Nuke: reset tab to first native tab --- openpype/hosts/nuke/api/pipeline.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c343c635fa..fb707ca44c 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -364,6 +364,9 @@ def containerise(node, set_avalon_knob_data(node, data) + # set tab to first native + node.setTab(0) + return node From c06f6891e8b021ab6e67f70a080202161059d8e8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 21:19:46 +0100 Subject: [PATCH 2149/2550] nuke: close property panel after node creation --- openpype/hosts/nuke/plugins/load/load_camera_abc.py | 3 +++ openpype/hosts/nuke/plugins/load/load_clip.py | 3 +++ openpype/hosts/nuke/plugins/load/load_effects.py | 3 +++ openpype/hosts/nuke/plugins/load/load_effects_ip.py | 3 +++ openpype/hosts/nuke/plugins/load/load_image.py | 4 ++++ openpype/hosts/nuke/plugins/load/load_model.py | 4 ++++ openpype/hosts/nuke/plugins/load/load_script_precomp.py | 3 +++ 7 files changed, 23 insertions(+) diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index f5dfc8c0ab..9fef7424c8 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -65,6 +65,9 @@ class AlembicCameraLoader(load.LoaderPlugin): object_name, file), inpanel=False ) + # hide property panel + camera_node.hideControlPanel() + camera_node.forceValidate() camera_node["frame_rate"].setValue(float(fps)) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index b17356c5c7..565d777811 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -145,6 +145,9 @@ class LoadClip(plugin.NukeLoader): "Read", "name {}".format(read_name)) + # hide property panel + read_node.hideControlPanel() + # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index d164e0604c..cef4b0a5fc 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -89,6 +89,9 @@ class LoadEffects(load.LoaderPlugin): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 44565c139d..9bd40be816 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -90,6 +90,9 @@ class LoadEffectsInputProcess(load.LoaderPlugin): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 3e81ef999b..f7ce20eee9 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -112,6 +112,10 @@ class LoadImage(load.LoaderPlugin): r = nuke.createNode( "Read", "name {}".format(read_name)) + + # hide property panel + r.hideControlPanel() + r["file"].setValue(file) # Set colorspace defined in version data diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 151401bad3..ad985e83c6 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -63,6 +63,10 @@ class AlembicModelLoader(load.LoaderPlugin): object_name, file), inpanel=False ) + + # hide property panel + model_node.hideControlPanel() + model_node.forceValidate() # Ensure all items are imported and selected. diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 21e384b538..f0972f85d2 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -71,6 +71,9 @@ class LinkAsGroup(load.LoaderPlugin): "Precomp", "file {}".format(file)) + # hide property panel + P.hideControlPanel() + # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) self.log.info("colorspace: {}\n".format(colorspace)) From 554b3b256c4bfb368bb808376088e3315df54127 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 19 Nov 2022 03:37:23 +0000 Subject: [PATCH 2150/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 268f33083a..0116b49f4d 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.5" +__version__ = "3.14.7-nightly.6" From bc35e8b3a3c879e74bd4b79c65d4d87ec8394c97 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 19 Nov 2022 15:51:48 +0800 Subject: [PATCH 2151/2550] instances for bb geometry and publisher for bb geometry --- .../maya/plugins/create/create_proxy_abc.py | 42 ++++++++ .../maya/plugins/publish/collect_proxy_abc.py | 14 +++ .../maya/plugins/publish/extract_proxy_abc.py | 96 +++++++++++++++++++ .../defaults/project_settings/maya.json | 6 ++ .../schemas/schema_maya_create.json | 4 + 5 files changed, 162 insertions(+) create mode 100644 openpype/hosts/maya/plugins/create/create_proxy_abc.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_proxy_abc.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_proxy_abc.py diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py new file mode 100644 index 0000000000..f9671dfccf --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -0,0 +1,42 @@ +from openpype.hosts.maya.api import ( + lib, + plugin +) + + +class CreateProxyAlembic(plugin.Creator): + """Proxy Alembic for animated data""" + + name = "proxyAbcMain" + label = "Proxy Alembic" + family = "proxyAbc" + icon = "gears" + write_color_sets = False + write_face_sets = False + + + def __init__(self, *args, **kwargs): + super(CreateProxyAlembic, self).__init__(*args, **kwargs) + + # Add animation data + self.data.update(lib.collect_animation_data()) + + # Vertex colors with the geometry. + self.data["writeColorSets"] = self.write_color_sets + # Vertex colors with the geometry. + self.data["writeFaceSets"] = self.write_face_sets + # Include parent groups + self.data["includeParentHierarchy"] = False + # only nodes which are visible + self.data["visibleOnly"] = False + # Default to exporting world-space + self.data["worldSpace"] = True + + # Creating a single bounding box per shape selected + self.data["single"] = False + # name suffix for the bounding box + self.data["nameSuffix"] = "_BBox" + + # Add options for custom attributes + self.data["attr"] = "" + self.data["attrPrefix"] = "" diff --git a/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py b/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py new file mode 100644 index 0000000000..2a7890fcac --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py @@ -0,0 +1,14 @@ +import pyblish.api + +class CollectProxyAlembic(pyblish.api.InstancePlugin): + """Collect Proxy Alembic for instance.""" + + order = pyblish.api.CollectorOrder + 0.45 + families = ["proxyAbc"] + label = "Collect Proxy Alembic" + hosts = ["maya"] + + def process(self, instance): + """Collector entry point.""" + if not instance.data.get('families'): + instance.data["families"] = [] diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py new file mode 100644 index 0000000000..b1306edac5 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -0,0 +1,96 @@ +import os + +from maya import cmds + +from openpype.pipeline import publish +from openpype.hosts.maya.api.lib import ( + extract_alembic, + suspended_refresh, + maintained_selection, + iter_visible_nodes_in_range +) + + +class ExtractAlembic(publish.Extractor): + """Produce an alembic for bounding box geometry + """ + + label = "Extract Proxy (Alembic)" + hosts = ["maya"] + families = ["proxyAbc"] + + def process(self, instance): + + nodes, roots = self.get_members_and_roots(instance) + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + attrs = instance.data.get("attr", "").split(";") + attrs = [value for value in attrs if value.strip()] + attrs += ["cbId"] + + attr_prefixes = instance.data.get("attrPrefix", "").split(";") + attr_prefixes = [value for value in attr_prefixes if value.strip()] + + self.log.info("Extracting Proxy Meshes...") + + dirname = self.staging_dir(instance) + filename = "{name}.abc".format(**instance.data) + path = os.path.join(dirname, filename) + + options = { + "step": instance.data.get("step", 1.0), + "attr": attrs, + "attrPrefix": attr_prefixes, + "writeVisibility": True, + "writeCreases": True, + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False), + "uvWrite": True, + "selection": True, + "worldSpace": instance.data.get("worldSpace", True) + } + + if not instance.data.get("includeParentHierarchy", True): + + options["root"] = roots + if instance.data.get("visibleOnly", False): + nodes = list(iter_visible_nodes_in_range(nodes, + start=start, + end=end)) + with suspended_refresh(): + with maintained_selection(): + # TODO: select the bb geometry + self.create_proxy_geometry(instance, + start, + end) + extract_alembic(file=path, + startFrame=start, + endFrame=end, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + 'stagingDir': dirname + } + instance.data["representations"].append(representation) + + instance.context.data["cleanupFullPaths"].append(path) + + self.log.info("Extracted {} to {}".format(instance, dirname)) + #TODO: delete the bounding box + + def get_members_and_roots(self, instance): + return instance[:], instance.data.get("setMembers") + + def create_proxy_geometry(self, instance, start, end): + + inst_selection = cmds.ls(instance.name, long=True) + name_suffix = instance.data.get("nameSuffix") + if instance.data.get("single", True): + pass diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a..cb9af2c2b6 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -197,6 +197,12 @@ "Main" ] }, + "CreateProxyAlembic": { + "enabled": true, + "defaults": [ + "Main" + ] + }, "CreateRenderSetup": { "enabled": true, "defaults": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index bc6520474d..8512736211 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -237,6 +237,10 @@ "key": "CreateMayaScene", "label": "Create Maya Scene" }, + { + "key": "CreateProxyAlembic", + "label": "Create Proxy Alembic" + }, { "key": "CreateRenderSetup", "label": "Create Render Setup" From 33974c39d4aac0bb28ebd87e007acc166b8cd003 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 19 Nov 2022 17:13:50 +0800 Subject: [PATCH 2152/2550] aov Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 58fcd2d281..6fde0df162 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1110,7 +1110,7 @@ class RenderProductsRedshift(ARenderProducts): if light_groups_enabled: return products - beauty_name = "Beauty_other" if has_beauty_aov else "" + beauty_name = "BeautyAux" if has_beauty_aov else "" for camera in cameras: products.insert(0, RenderProduct(productName=beauty_name, From 75175b8a747a06c14c52a8d95bb951bc083e406a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 21 Nov 2022 16:02:13 +0800 Subject: [PATCH 2153/2550] create and publish bb geometry --- openpype/hosts/maya/plugins/load/actions.py | 2 + .../maya/plugins/publish/collect_proxy_abc.py | 14 ------ .../maya/plugins/publish/extract_proxy_abc.py | 43 +++++++++++++------ .../plugins/publish/validate_frame_range.py | 1 + .../defaults/project_settings/maya.json | 6 +++ .../schemas/schema_maya_publish.json | 20 +++++++++ 6 files changed, 58 insertions(+), 28 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/publish/collect_proxy_abc.py diff --git a/openpype/hosts/maya/plugins/load/actions.py b/openpype/hosts/maya/plugins/load/actions.py index eca1b27f34..9cc9180d6e 100644 --- a/openpype/hosts/maya/plugins/load/actions.py +++ b/openpype/hosts/maya/plugins/load/actions.py @@ -14,6 +14,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): families = ["animation", "camera", + "proxyAbc", "pointcache"] representations = ["abc"] @@ -48,6 +49,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): families = ["animation", "camera", + "proxyAbc", "pointcache"] representations = ["abc"] diff --git a/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py b/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py deleted file mode 100644 index 2a7890fcac..0000000000 --- a/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api - -class CollectProxyAlembic(pyblish.api.InstancePlugin): - """Collect Proxy Alembic for instance.""" - - order = pyblish.api.CollectorOrder + 0.45 - families = ["proxyAbc"] - label = "Collect Proxy Alembic" - hosts = ["maya"] - - def process(self, instance): - """Collector entry point.""" - if not instance.data.get('families'): - instance.data["families"] = [] diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index b1306edac5..4607fd8a4b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -11,7 +11,7 @@ from openpype.hosts.maya.api.lib import ( ) -class ExtractAlembic(publish.Extractor): +class ExtractProxyAlembic(publish.Extractor): """Produce an alembic for bounding box geometry """ @@ -22,6 +22,8 @@ class ExtractAlembic(publish.Extractor): def process(self, instance): nodes, roots = self.get_members_and_roots(instance) + + # Collect the start and end including handles start = float(instance.data.get("frameStartHandle", 1)) end = float(instance.data.get("frameEndHandle", 1)) @@ -32,9 +34,9 @@ class ExtractAlembic(publish.Extractor): attr_prefixes = instance.data.get("attrPrefix", "").split(";") attr_prefixes = [value for value in attr_prefixes if value.strip()] - self.log.info("Extracting Proxy Meshes...") - + self.log.info("Extracting pointcache..") dirname = self.staging_dir(instance) + filename = "{name}.abc".format(**instance.data) path = os.path.join(dirname, filename) @@ -52,18 +54,17 @@ class ExtractAlembic(publish.Extractor): } if not instance.data.get("includeParentHierarchy", True): - options["root"] = roots + + if instance.data.get("visibleOnly", False): nodes = list(iter_visible_nodes_in_range(nodes, start=start, end=end)) + with suspended_refresh(): with maintained_selection(): - # TODO: select the bb geometry - self.create_proxy_geometry(instance, - start, - end) + self.create_proxy_geometry(instance, nodes, start, end) extract_alembic(file=path, startFrame=start, endFrame=end, @@ -76,21 +77,35 @@ class ExtractAlembic(publish.Extractor): 'name': 'abc', 'ext': 'abc', 'files': filename, - 'stagingDir': dirname + "stagingDir": dirname } instance.data["representations"].append(representation) instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) - #TODO: delete the bounding box def get_members_and_roots(self, instance): return instance[:], instance.data.get("setMembers") - def create_proxy_geometry(self, instance, start, end): - - inst_selection = cmds.ls(instance.name, long=True) + def create_proxy_geometry(self, instance, node, start, end): + inst_selection = cmds.ls(node, long=True) name_suffix = instance.data.get("nameSuffix") if instance.data.get("single", True): - pass + cmds.geomToBBox(inst_selection, + name=instance.name, + nameSuffix=name_suffix, + single=True, + keepOriginal=True, + bakeAnimation=True, + startTime=start, + endTime=end) + else: + cmds.geomToBBox(inst_selection, + name=instance.name, + nameSuffix=name_suffix, + single=False, + keepOriginal=True, + bakeAnimation=True, + startTime=start, + endTime=end) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index b467a7c232..5e50ae72cd 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -25,6 +25,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): families = ["animation", "pointcache", "camera", + "proxyAbc", "renderlayer", "review", "yeticache"] diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index cb9af2c2b6..bfa3c9f0fb 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -575,6 +575,12 @@ "optional": false, "active": true }, + "ExtractProxyAlembic": { + "enabled": true, + "families": [ + "proxyAbc" + ] + }, "ExtractAlembic": { "enabled": true, "families": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index ab8c6b885e..2c6260db30 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -638,6 +638,26 @@ "type": "label", "label": "Extractors" }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractProxyAlembic", + "label": "Extract Proxy Alembic", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, From 94939e431a18cc45472276f01f96c71e5187dfc8 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 21 Nov 2022 16:18:26 +0100 Subject: [PATCH 2154/2550] rename families_to_upload to families_to_review + define it as class attribute --- openpype/hosts/tvpaint/plugins/publish/extract_sequence.py | 3 ++- openpype/settings/defaults/project_settings/tvpaint.json | 5 ++--- .../schemas/projects_schema/schema_project_tvpaint.json | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index d8aef1ab6b..7d2e9c6f25 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -19,6 +19,7 @@ class ExtractSequence(pyblish.api.Extractor): label = "Extract Sequence" hosts = ["tvpaint"] families = ["review", "renderPass", "renderLayer", "renderScene"] + families_to_review = ["review"] # Modifiable with settings review_bg = [255, 255, 255, 255] @@ -129,7 +130,7 @@ class ExtractSequence(pyblish.api.Extractor): # Fill tags and new families from project settings tags = [] - if family_lowered in self.families_to_upload: + if family_lowered in self.families_to_review: tags.append("review") # Sequence of one frame diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 2e413f50cd..9ccc318d70 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -12,11 +12,10 @@ 255, 255 ], - "families_to_upload": [ + "families_to_review": [ "review", "renderpass", - "renderlayer", - "renderscene" + "renderlayer" ] }, "ValidateProjectSettings": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json index 0392c9089b..61342ef738 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -59,8 +59,8 @@ }, { "type": "enum", - "key": "families_to_upload", - "label": "Families to upload", + "key": "families_to_review", + "label": "Families to review", "multiselection": true, "enum_items": [ {"review": "review"}, From cdb91c03795db7bc9b249e69dd605769562c11bc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 17:56:02 +0100 Subject: [PATCH 2155/2550] Added helper class for version resolving and sorting --- .../custom/plugins/GlobalJobPreLoad.py | 131 ++++++++++++++++++ 1 file changed, 131 insertions(+) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 9b35c9502d..6c3dd092fe 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -14,6 +14,137 @@ from Deadline.Scripting import ( ProcessUtils, ) +VERSION_REGEX = re.compile( + r"(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"(?:-(?P[a-zA-Z\d\-.]*))?" + r"(?:\+(?P[a-zA-Z\d\-.]*))?" +) + + +class OpenPypeVersion: + """Fake semver version class for OpenPype version purposes. + + The version + """ + def __init__(self, major, minor, patch, prerelease, origin=None): + self.major = major + self.minor = minor + self.patch = patch + self.prerelease = prerelease + + is_valid = True + if not major or not minor or not patch: + is_valid = False + self.is_valid = is_valid + + if origin is None: + base = "{}.{}.{}".format(str(major), str(minor), str(patch)) + if not prerelease: + origin = base + else: + origin = "{}-{}".format(base, str(prerelease)) + + self.origin = origin + + @classmethod + def from_string(cls, version): + """Create an object of version from string. + + Args: + version (str): Version as a string. + + Returns: + Union[OpenPypeVersion, None]: Version object if input is nonempty + string otherwise None. + """ + + if not version: + return None + valid_parts = VERSION_REGEX.findall(version) + if len(valid_parts) != 1: + # Return invalid version with filled 'origin' attribute + return cls(None, None, None, None, origin=str(version)) + + # Unpack found version + major, minor, patch, pre, post = valid_parts[0] + prerelease = pre + # Post release is not important anymore and should be considered as + # part of prerelease + # - comparison is implemented to find suitable build and builds should + # never contain prerelease part so "not proper" parsing is + # acceptable for this use case. + if post: + prerelease = "{}+{}".format(pre, post) + + return cls( + int(major), int(minor), int(patch), prerelease, origin=version + ) + + def has_compatible_release(self, other): + """Version has compatible release as other version. + + Both major and minor versions must be exactly the same. In that case + a build can be considered as release compatible with any version. + + Args: + other (OpenPypeVersion): Other version. + + Returns: + bool: Version is release compatible with other version. + """ + + if self.is_valid and other.is_valid: + return self.major == other.major and self.minor == other.minor + return False + + def __bool__(self): + return self.is_valid + + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, self.origin) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return self.origin == other + return self.origin == other.origin + + def __lt__(self, other): + if not isinstance(other, self.__class__): + return None + + if not self.is_valid: + return True + + if not other.is_valid: + return False + + if self.origin == other.origin: + return None + + same_major = self.major == other.major + if not same_major: + return self.major < other.major + + same_minor = self.minor == other.minor + if not same_minor: + return self.minor < other.minor + + same_patch = self.patch == other.patch + if not same_patch: + return self.patch < other.patch + + if not self.prerelease: + return False + + if not other.prerelease: + return True + + pres = [self.prerelease, other.prerelease] + pres.sort() + return pres[0] == self.prerelease + def get_openpype_version_from_path(path, build=True): """Get OpenPype version from provided path. From b1e899d8ee2a79cd673bdf14bf4adf2134443dca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 17:57:10 +0100 Subject: [PATCH 2156/2550] Use full version for resolving and use specific build if matches requested version --- .../custom/plugins/GlobalJobPreLoad.py | 197 ++++++++++-------- 1 file changed, 110 insertions(+), 87 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 6c3dd092fe..375cf48b8f 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -152,9 +152,9 @@ def get_openpype_version_from_path(path, build=True): build (bool, optional): Get only builds, not sources Returns: - str or None: version of OpenPype if found. - + Union[OpenPypeVersion, None]: version of OpenPype if found. """ + # fix path for application bundle on macos if platform.system().lower() == "darwin": path = os.path.join(path, "Contents", "MacOS", "lib", "Python") @@ -177,8 +177,10 @@ def get_openpype_version_from_path(path, build=True): with open(version_file, "r") as vf: exec(vf.read(), version) - version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) - return version_match[1] + version_str = version.get("__version__") + if version_str: + return OpenPypeVersion.from_string(version_str) + return None def get_openpype_executable(): @@ -190,6 +192,91 @@ def get_openpype_executable(): return exe_list, dir_list +def get_openpype_versions(exe_list, dir_list): + print(">>> Getting OpenPype executable ...") + openpype_versions = [] + + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if install_dir: + print("--- Looking for OpenPype at: {}".format(install_dir)) + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = get_openpype_version_from_path(subdir) + if not version: + continue + print(" - found: {} - {}".format(version, subdir)) + openpype_versions.append((version, subdir)) + return openpype_versions + + +def get_requested_openpype_executable( + exe, dir_list, requested_version +): + requested_version_obj = OpenPypeVersion.from_string(requested_version) + if not requested_version_obj: + print(( + ">>> Requested version does not match version regex \"{}\"" + ).format(VERSION_REGEX)) + return None + + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) + openpype_versions = get_openpype_versions(dir_list) + if not openpype_versions: + return None + + # if looking for requested compatible version, + # add the implicitly specified to the list too. + if exe: + exe_dir = os.path.dirname(exe) + print("Looking for OpenPype at: {}".format(exe_dir)) + version = get_openpype_version_from_path(exe_dir) + if version: + print(" - found: {} - {}".format(version, exe_dir)) + openpype_versions.append((version, exe_dir)) + + matching_item = None + compatible_versions = [] + for version_item in openpype_versions: + version, version_dir = version_item + if requested_version_obj.has_compatible_release(version): + compatible_versions.append(version_item) + if version == requested_version_obj: + # Store version item if version match exactly + # - break if is found matching version + matching_item = version_item + break + + if not compatible_versions: + return None + + compatible_versions.sort(key=lambda item: item[0]) + if matching_item: + version, version_dir = matching_item + print(( + "*** Found exact match build version {} in {}" + ).format(version_dir, version)) + + else: + version, version_dir = compatible_versions[-1] + + print(( + "*** Latest compatible version found is {} in {}" + ).format(version_dir, version)) + + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join(version_dir, "openpype_console.exe"), + os.path.join(version_dir, "openpype_console") + ] + return FileUtils.SearchFileList(";".join(exe_list)) + + def inject_openpype_environment(deadlinePlugin): """ Pull env vars from OpenPype and push them to rendering process. @@ -199,93 +286,29 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Injecting OpenPype environments ...") try: - print(">>> Getting OpenPype executable ...") exe_list, dir_list = get_openpype_executable() - openpype_versions = [] - # if the job requires specific OpenPype version, - # lets go over all available and find compatible build. + exe = FileUtils.SearchFileList(exe_list) + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print(( - ">>> Scanning for compatible requested version {}" - ).format(requested_version)) - install_dir = DirectoryUtils.SearchDirectoryList(dir_list) - if install_dir: - print("--- Looking for OpenPype at: {}".format(install_dir)) - sub_dirs = [ - f.path for f in os.scandir(install_dir) - if f.is_dir() - ] - for subdir in sub_dirs: - version = get_openpype_version_from_path(subdir) - if not version: - continue - print(" - found: {} - {}".format(version, subdir)) - openpype_versions.append((version, subdir)) + exe = get_requested_openpype_executable( + exe, dir_list, requested_version + ) + if exe is None: + raise RuntimeError(( + "Cannot find compatible version available for version {}" + " requested by the job. Please add it through plugin" + " configuration in Deadline or install it to configured" + " directory." + ).format(requested_version)) - exe = FileUtils.SearchFileList(exe_list) - if openpype_versions: - # if looking for requested compatible version, - # add the implicitly specified to the list too. - print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) - version = get_openpype_version_from_path( - os.path.dirname(exe)) - if version: - print(" - found: {} - {}".format( - version, os.path.dirname(exe) - )) - openpype_versions.append((version, os.path.dirname(exe))) - - if requested_version: - # sort detected versions - if openpype_versions: - # use natural sorting - openpype_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest available version found is {}" - ).format(openpype_versions[-1][0])) - requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 - compatible_versions = [] - for version in openpype_versions: - v = version[0].split(".")[:3] - if v[0] == requested_major and v[1] == requested_minor: - compatible_versions.append(version) - if not compatible_versions: - raise RuntimeError( - ("Cannot find compatible version available " - "for version {} requested by the job. " - "Please add it through plugin configuration " - "in Deadline or install it to configured " - "directory.").format(requested_version)) - # sort compatible versions nad pick the last one - compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest compatible version found is {}" - ).format(compatible_versions[-1][0])) - # create list of executables for different platform and let - # Deadline decide. - exe_list = [ - os.path.join( - compatible_versions[-1][1], "openpype_console.exe"), - os.path.join( - compatible_versions[-1][1], "openpype_console") - ] - exe = FileUtils.SearchFileList(";".join(exe_list)) - if exe == "": - raise RuntimeError( - "OpenPype executable was not found " + - "in the semicolon separated list " + - "\"" + ";".join(exe_list) + "\". " + - "The path to the render executable can be configured " + - "from the Plugin Configuration in the Deadline Monitor.") + if not exe: + raise RuntimeError(( + "OpenPype executable was not found in the semicolon " + "separated list \"{}\"." + "The path to the render executable can be configured" + " from the Plugin Configuration in the Deadline Monitor." + ).format(";".join(exe_list))) print("--- OpenPype executable: {}".format(exe)) From dbc72502b4cbf9859493d43ce90141f84ecc9420 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 17:57:37 +0100 Subject: [PATCH 2157/2550] few formatting changes --- .../custom/plugins/GlobalJobPreLoad.py | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 375cf48b8f..78e1371eee 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -326,22 +326,22 @@ def inject_openpype_environment(deadlinePlugin): export_url ] - add_args = {} - add_args['project'] = \ - job.GetJobEnvironmentKeyValue('AVALON_PROJECT') - add_args['asset'] = job.GetJobEnvironmentKeyValue('AVALON_ASSET') - add_args['task'] = job.GetJobEnvironmentKeyValue('AVALON_TASK') - add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME') - add_args["envgroup"] = "farm" + add_kwargs = { + "project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"), + "asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"), + "task": job.GetJobEnvironmentKeyValue("AVALON_TASK"), + "app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"), + "envgroup": "farm" + } + if all(add_kwargs.values()): + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) - if all(add_args.values()): - for key, value in add_args.items(): - args.append("--{}".format(key)) - args.append(value) else: - msg = "Required env vars: AVALON_PROJECT, AVALON_ASSET, " + \ - "AVALON_TASK, AVALON_APP_NAME" - raise RuntimeError(msg) + raise RuntimeError(( + "Missing required env vars: AVALON_PROJECT, AVALON_ASSET," + " AVALON_TASK, AVALON_APP_NAME" + )) if not os.environ.get("OPENPYPE_MONGO"): print(">>> Missing OPENPYPE_MONGO env var, process won't work") @@ -362,12 +362,12 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Loading file ...") with open(export_url) as fp: contents = json.load(fp) - for key, value in contents.items(): - deadlinePlugin.SetProcessEnvironmentVariable(key, value) + + for key, value in contents.items(): + deadlinePlugin.SetProcessEnvironmentVariable(key, value) script_url = job.GetJobPluginInfoKeyValue("ScriptFilename") if script_url: - script_url = script_url.format(**contents).replace("\\", "/") print(">>> Setting script path {}".format(script_url)) job.SetJobPluginInfoKeyValue("ScriptFilename", script_url) From 61e5dc3fc9c326a90601e774722ae30b419ef390 Mon Sep 17 00:00:00 2001 From: Thomas Fricard <51854004+friquette@users.noreply.github.com> Date: Mon, 21 Nov 2022 18:21:04 +0100 Subject: [PATCH 2158/2550] change order of default value Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/settings/defaults/project_settings/tvpaint.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 9ccc318d70..e03ce32030 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -14,8 +14,8 @@ ], "families_to_review": [ "review", - "renderpass", - "renderlayer" + "renderlayer", + "renderscene" ] }, "ValidateProjectSettings": { From e24c2f853b5b976e1c441470cf6e7f435e2c0815 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:15:34 +0100 Subject: [PATCH 2159/2550] attribute definitions can be hidden and disabled --- openpype/lib/attribute_definitions.py | 32 +++++++++++++++++++++------ 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 6baeaec045..ed151bbe4e 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -105,11 +105,14 @@ class AbtractAttrDef(object): How to force to set `key` attribute? Args: - key(str): Under which key will be attribute value stored. - label(str): Attribute label. - tooltip(str): Attribute tooltip. - is_label_horizontal(bool): UI specific argument. Specify if label is + key (str): Under which key will be attribute value stored. + default (Any): Default value of an attribute. + label (str): Attribute label. + tooltip (str): Attribute tooltip. + is_label_horizontal (bool): UI specific argument. Specify if label is next to value input or ahead. + hidden (bool): Will be item hidden (for UI purposes). + disabled (bool): Item will be visible but disabled (for UI purposes). """ type_attributes = [] @@ -117,16 +120,29 @@ class AbtractAttrDef(object): is_value_def = True def __init__( - self, key, default, label=None, tooltip=None, is_label_horizontal=None + self, + key, + default, + label=None, + tooltip=None, + is_label_horizontal=None, + hidden=False, + disabled=False ): if is_label_horizontal is None: is_label_horizontal = True + + if hidden is None: + hidden = False + self.key = key self.label = label self.tooltip = tooltip self.default = default self.is_label_horizontal = is_label_horizontal - self._id = uuid.uuid4() + self.hidden = hidden + self.disabled = disabled + self._id = uuid.uuid4().hex self.__init__class__ = AbtractAttrDef @@ -173,7 +189,9 @@ class AbtractAttrDef(object): "label": self.label, "tooltip": self.tooltip, "default": self.default, - "is_label_horizontal": self.is_label_horizontal + "is_label_horizontal": self.is_label_horizontal, + "hidden": self.hidden, + "disabled": self.disabled } for attr in self.type_attributes: data[attr] = getattr(self, attr) From 6abfa14e01d67eae20e7bb66c219feab99d70a37 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:16:02 +0100 Subject: [PATCH 2160/2550] added special definition for hidden values --- openpype/lib/attribute_definitions.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index ed151bbe4e..0df7b16e64 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -253,6 +253,26 @@ class UnknownDef(AbtractAttrDef): return value +class HiddenDef(AbtractAttrDef): + """Hidden value of Any type. + + This attribute can be used for UI purposes to pass values related + to other attributes (e.g. in multi-page UIs). + + Keep in mind the value should be possible to parse by json parser. + """ + + type = "hidden" + + def __init__(self, key, default=None, **kwargs): + kwargs["default"] = default + kwargs["hidden"] = True + super(UnknownDef, self).__init__(key, **kwargs) + + def convert_value(self, value): + return value + + class NumberDef(AbtractAttrDef): """Number definition. From fe392aa5db267ef09e0152d867eb02e45fee065e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:16:41 +0100 Subject: [PATCH 2161/2550] implemented hidden widget --- openpype/tools/attribute_defs/widgets.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index dc697b08a6..7f7c20009e 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -6,6 +6,7 @@ from Qt import QtWidgets, QtCore from openpype.lib.attribute_definitions import ( AbtractAttrDef, UnknownDef, + HiddenDef, NumberDef, TextDef, EnumDef, @@ -459,6 +460,29 @@ class UnknownAttrWidget(_BaseAttrDefWidget): self._input_widget.setText(str_value) +class HiddenAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + self.setVisible(False) + self._value = None + self._multivalue = False + + def setVisible(self, visible): + if visible: + visible = False + super(HiddenAttrWidget, self).setVisible(visible) + + def current_value(self): + if self._multivalue: + raise ValueError( + "{} can't output for multivalue.".format(self.__class__.__name__) + ) + return self._value + + def set_value(self, value, multivalue=False): + self._value = copy.deepcopy(value) + self._multivalue = multivalue + + class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): input_widget = FilesWidget( From 068ec3f89809eca1fcff32d81e36158f88dc248a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:17:09 +0100 Subject: [PATCH 2162/2550] enhanced attribute definitons widget --- openpype/tools/attribute_defs/widgets.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index 7f7c20009e..6db6da58e1 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -23,6 +23,16 @@ from .files_widget import FilesWidget def create_widget_for_attr_def(attr_def, parent=None): + widget = _create_widget_for_attr_def(attr_def, parent) + if attr_def.hidden: + widget.setVisible(False) + + if attr_def.disabled: + widget.setEnabled(False) + return widget + + +def _create_widget_for_attr_def(attr_def, parent=None): if not isinstance(attr_def, AbtractAttrDef): raise TypeError("Unexpected type \"{}\" expected \"{}\"".format( str(type(attr_def)), AbtractAttrDef @@ -43,6 +53,9 @@ def create_widget_for_attr_def(attr_def, parent=None): if isinstance(attr_def, UnknownDef): return UnknownAttrWidget(attr_def, parent) + if isinstance(attr_def, HiddenDef): + return HiddenAttrWidget(attr_def, parent) + if isinstance(attr_def, FileDef): return FileAttrWidget(attr_def, parent) @@ -116,6 +129,10 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): self._current_keys.add(attr_def.key) widget = create_widget_for_attr_def(attr_def, self) + self._widgets.append(widget) + + if attr_def.hidden: + continue expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: @@ -134,7 +151,6 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): layout.addWidget( widget, row, col_num, 1, expand_cols ) - self._widgets.append(widget) row += 1 def set_value(self, value): From a606de5b76b63a6051731f292daa7b0420bfbbde Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:17:56 +0100 Subject: [PATCH 2163/2550] don't add hidden widgets to publisher widgets --- openpype/tools/publisher/widgets/widgets.py | 26 +++++++++++++++------ 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ce3d91ce63..a0d97245ba 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -9,6 +9,7 @@ import collections from Qt import QtWidgets, QtCore, QtGui import qtawesome +from openpype.lib.attribute_definitions import UnknownDef from openpype.tools.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -1303,6 +1304,13 @@ class CreatorAttrsWidget(QtWidgets.QWidget): else: widget.set_value(values, True) + widget.value_changed.connect(self._input_value_changed) + self._attr_def_id_to_instances[attr_def.id] = attr_instances + self._attr_def_id_to_attr_def[attr_def.id] = attr_def + + if attr_def.hidden: + continue + expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: expand_cols = 1 @@ -1321,13 +1329,8 @@ class CreatorAttrsWidget(QtWidgets.QWidget): content_layout.addWidget( widget, row, col_num, 1, expand_cols ) - row += 1 - widget.value_changed.connect(self._input_value_changed) - self._attr_def_id_to_instances[attr_def.id] = attr_instances - self._attr_def_id_to_attr_def[attr_def.id] = attr_def - self._scroll_area.setWidget(content_widget) self._content_widget = content_widget @@ -1421,8 +1424,17 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): widget = create_widget_for_attr_def( attr_def, content_widget ) - label = attr_def.label or attr_def.key - content_layout.addRow(label, widget) + hidden_widget = attr_def.hidden + # Hide unknown values of publish plugins + # - The keys in most of cases does not represent what would + # label represent + if isinstance(attr_def, UnknownDef): + widget.setVisible(False) + hidden_widget = True + + if not hidden_widget: + label = attr_def.label or attr_def.key + content_layout.addRow(label, widget) widget.value_changed.connect(self._input_value_changed) From 29cc9bdce61ea3ce1dc01d17bb05c3a2db3afffe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:39:10 +0100 Subject: [PATCH 2164/2550] Fix line length --- openpype/tools/attribute_defs/widgets.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index 6db6da58e1..1ffb3d3799 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -489,9 +489,9 @@ class HiddenAttrWidget(_BaseAttrDefWidget): def current_value(self): if self._multivalue: - raise ValueError( - "{} can't output for multivalue.".format(self.__class__.__name__) - ) + raise ValueError("{} can't output for multivalue.".format( + self.__class__.__name__ + )) return self._value def set_value(self, value, multivalue=False): From 302c5fe21456e318e24451d2a6df190bc2dce449 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 22 Nov 2022 18:52:26 +0800 Subject: [PATCH 2165/2550] create and publish bb-geometry --- .../maya/plugins/create/create_proxy_abc.py | 2 + .../maya/plugins/load/load_abc_to_standin.py | 2 +- .../hosts/maya/plugins/load/load_gpucache.py | 2 +- .../hosts/maya/plugins/load/load_reference.py | 1 + .../maya/plugins/publish/extract_proxy_abc.py | 43 +++++++++---------- .../plugins/publish/collect_resources_path.py | 1 + openpype/plugins/publish/integrate.py | 1 + openpype/plugins/publish/integrate_legacy.py | 1 + .../defaults/project_settings/maya.json | 3 +- .../schemas/template_publish_families.json | 1 + 10 files changed, 32 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index f9671dfccf..57978cb4d9 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -34,6 +34,8 @@ class CreateProxyAlembic(plugin.Creator): # Creating a single bounding box per shape selected self.data["single"] = False + # remove the bbBox after publish + #self.data["removeBoundingBoxAfterPublish"] = False # name suffix for the bounding box self.data["nameSuffix"] = "_BBox" diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 605a492e4d..70866a3ba6 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -11,7 +11,7 @@ from openpype.settings import get_project_settings class AlembicStandinLoader(load.LoaderPlugin): """Load Alembic as Arnold Standin""" - families = ["animation", "model", "pointcache"] + families = ["animation", "model", "proxyAbc", "pointcache"] representations = ["abc"] label = "Import Alembic as Arnold Standin" diff --git a/openpype/hosts/maya/plugins/load/load_gpucache.py b/openpype/hosts/maya/plugins/load/load_gpucache.py index a09f924c7b..07e5734f43 100644 --- a/openpype/hosts/maya/plugins/load/load_gpucache.py +++ b/openpype/hosts/maya/plugins/load/load_gpucache.py @@ -10,7 +10,7 @@ from openpype.settings import get_project_settings class GpuCacheLoader(load.LoaderPlugin): """Load Alembic as gpuCache""" - families = ["model", "animation", "pointcache"] + families = ["model", "animation", "proxyAbc", "pointcache"] representations = ["abc"] label = "Import Gpu Cache" diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index c762a29326..c6b07b036d 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -16,6 +16,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): families = ["model", "pointcache", + "proxyAbc", "animation", "mayaAscii", "mayaScene", diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index 4607fd8a4b..ee38979577 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -22,7 +22,8 @@ class ExtractProxyAlembic(publish.Extractor): def process(self, instance): nodes, roots = self.get_members_and_roots(instance) - + self.log.info("nodes:{}".format(nodes)) + self.log.info("roots:{}".format(roots)) # Collect the start and end including handles start = float(instance.data.get("frameStartHandle", 1)) end = float(instance.data.get("frameEndHandle", 1)) @@ -34,7 +35,7 @@ class ExtractProxyAlembic(publish.Extractor): attr_prefixes = instance.data.get("attrPrefix", "").split(";") attr_prefixes = [value for value in attr_prefixes if value.strip()] - self.log.info("Extracting pointcache..") + self.log.info("Extracting Proxy Alembic..") dirname = self.staging_dir(instance) filename = "{name}.abc".format(**instance.data) @@ -55,16 +56,22 @@ class ExtractProxyAlembic(publish.Extractor): if not instance.data.get("includeParentHierarchy", True): options["root"] = roots + self.log.info("{}".format(options["root"])) + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True if instance.data.get("visibleOnly", False): nodes = list(iter_visible_nodes_in_range(nodes, start=start, end=end)) - with suspended_refresh(): with maintained_selection(): - self.create_proxy_geometry(instance, nodes, start, end) + self.create_proxy_geometry(instance, + nodes, + start, + end) extract_alembic(file=path, startFrame=start, endFrame=end, @@ -91,21 +98,13 @@ class ExtractProxyAlembic(publish.Extractor): def create_proxy_geometry(self, instance, node, start, end): inst_selection = cmds.ls(node, long=True) name_suffix = instance.data.get("nameSuffix") - if instance.data.get("single", True): - cmds.geomToBBox(inst_selection, - name=instance.name, - nameSuffix=name_suffix, - single=True, - keepOriginal=True, - bakeAnimation=True, - startTime=start, - endTime=end) - else: - cmds.geomToBBox(inst_selection, - name=instance.name, - nameSuffix=name_suffix, - single=False, - keepOriginal=True, - bakeAnimation=True, - startTime=start, - endTime=end) + bbox = cmds.geomToBBox(inst_selection, + name=instance.name, + nameSuffix=name_suffix, + single=instance.data.get("single", False), + keepOriginal=True, + bakeAnimation=True, + startTime=start, + endTime=end) + return cmds.select(bbox, noExpand=True) + diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 00f65b8b67..90aa0f44bb 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -21,6 +21,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.495 families = ["workfile", "pointcache", + "proxyAbc", "camera", "animation", "model", diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0998e643e6..66f9a7aa59 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -81,6 +81,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder families = ["workfile", "pointcache", + "proxyAbc", "camera", "animation", "model", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c..d05aea1e2f 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -76,6 +76,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.00001 families = ["workfile", "pointcache", + "proxyAbc", "camera", "animation", "model", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index bfa3c9f0fb..f4a9fdd9ed 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -944,7 +944,8 @@ "subset_name_filters": [], "families": [ "animation", - "pointcache" + "pointcache", + "proxyAbc" ], "repre_names": [ "abc" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json index f39ad31fbb..43dd74cdf9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json @@ -28,6 +28,7 @@ {"nukenodes": "nukenodes"}, {"plate": "plate"}, {"pointcache": "pointcache"}, + {"proxyAbc": "proxyAbc"}, {"prerender": "prerender"}, {"redshiftproxy": "redshiftproxy"}, {"reference": "reference"}, From 7bf1d0bc9b2efb05e9904a4784ded5ea2da5b717 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 22 Nov 2022 18:56:20 +0800 Subject: [PATCH 2166/2550] aov filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 1 - .../modules/deadline/plugins/publish/submit_publish_job.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 6fde0df162..c54e3ab3e0 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1054,7 +1054,6 @@ class RenderProductsRedshift(ARenderProducts): # Any AOVs that still get processed, like Cryptomatte # by themselves are not multipart files. - # aov_multipart = not multipart # Redshift skips rendering of masterlayer without AOV suffix # when a Beauty AOV is rendered. It overrides the main layer. diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index c1e9dd4015..6362b4ca65 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -500,7 +500,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if instance_data.get("multipartExr"): preview = True - self.log.info("preview:{}".format(preview)) + self.log.debug("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name @@ -543,7 +543,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if new_instance.get("extendFrames", False): self._copy_extend_frames(new_instance, rep) instances.append(new_instance) - self.log.info("instances:{}".format(instances)) + self.log.debug("instances:{}".format(instances)) return instances def _get_representations(self, instance, exp_files): From 996bd4897b80ae72a06a8bbc81c1b69d471485ac Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 12:04:47 +0100 Subject: [PATCH 2167/2550] tabs widget can set current tab by index --- openpype/tools/publisher/widgets/tabs_widget.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index 84638a002c..eb3eda8c19 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -68,7 +68,16 @@ class PublisherTabsWidget(QtWidgets.QFrame): self.set_current_tab(identifier) return button + def get_tab_by_index(self, index): + if index < 0 or index > self._btns_layout.count(): + return None + item = self._btns_layout.itemAt(index) + return item.widget() + def set_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier From 430f30c05e3dc53184277ed121efd0fdcd003b3a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 12:05:22 +0100 Subject: [PATCH 2168/2550] added helper methods to know on which tab we are --- openpype/tools/publisher/window.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index f107c0e505..3879e37ad7 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -539,6 +539,18 @@ class PublisherWindow(QtWidgets.QDialog): def _go_to_report_tab(self): self._set_current_tab("report") + def _is_on_create_tab(self): + self._is_current_tab("create") + + def _is_on_publish_tab(self): + self._is_current_tab("publish") + + def _is_on_details_tab(self): + self._is_current_tab("details") + + def _is_on_report_tab(self): + self._is_current_tab("report") + def _set_publish_overlay_visibility(self, visible): if visible: widget = self._publish_overlay From ac9b9b208e055c856c32313a87d20bf1dbf403c3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 12:54:28 +0100 Subject: [PATCH 2169/2550] OP-4196 - safer getter for published_path published_path might be missing in case of thumbnail not getting published. This implementation takes from staging if published_path not present --- .../slack/plugins/publish/integrate_slack_api.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 643e55915b..f40a13db9f 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -142,13 +142,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _get_thumbnail_path(self, instance): """Returns abs url for thumbnail if present in instance repres""" - published_path = None + thumbnail_path = None for repre in instance.data.get("representations", []): if repre.get('thumbnail') or "thumbnail" in repre.get('tags', []): - if os.path.exists(repre["published_path"]): - published_path = repre["published_path"] + self.log.info(repre) + repre_thumbnail_path = ( + repre.get("published_path") or + os.path.join(repre["stagingDir"], repre["files"]) + ) + if os.path.exists(repre_thumbnail_path): + self.log.info("exists") + thumbnail_path = repre_thumbnail_path break - return published_path + return thumbnail_path def _get_review_path(self, instance): """Returns abs url for review if present in instance repres""" From c61098b782492728f7dbbe667b2540b2805b35ba Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 13:00:34 +0100 Subject: [PATCH 2170/2550] OP-4196 - fix when task_data is not dict In legacy cases task might be only string with its name, not structure with additional metadata (type etc.). This implementation handles that. --- .../modules/slack/plugins/publish/integrate_slack_api.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index f40a13db9f..6138671180 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -121,10 +121,13 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): ): fill_pairs.append(("task", task_data["name"])) - else: + elif isinstance(task_data, dict): for key, value in task_data.items(): fill_key = "task[{}]".format(key) fill_pairs.append((fill_key, value)) + else: + # fallback for legacy - if task_data is only task name + fill_pairs.append(("task", task_data)) self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) From f993842c4ec7a4e91b5a42cbd61ddba0f9387a35 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 13:01:27 +0100 Subject: [PATCH 2171/2550] OP-4196 - remove unnecessary logging --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 6138671180..e43b07b228 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -148,13 +148,11 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): thumbnail_path = None for repre in instance.data.get("representations", []): if repre.get('thumbnail') or "thumbnail" in repre.get('tags', []): - self.log.info(repre) repre_thumbnail_path = ( repre.get("published_path") or os.path.join(repre["stagingDir"], repre["files"]) ) if os.path.exists(repre_thumbnail_path): - self.log.info("exists") thumbnail_path = repre_thumbnail_path break return thumbnail_path From ab17acddc7c192dab58727e87fe87b51e242a3df Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 13:39:24 +0100 Subject: [PATCH 2172/2550] OP-4196 - better handling of data It should take task from instance anatomyData, then from context and handle non dict items. --- .../slack/plugins/publish/integrate_slack_api.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index e43b07b228..2c6f3d21bd 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -112,7 +112,13 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if review_path: fill_pairs.append(("review_filepath", review_path)) - task_data = fill_data.get("task") + task_data = ( + copy.deepcopy(instance.data.get("anatomyData", [])).get("task") + or fill_data.get("task") + ) + if not isinstance(task_data, dict): + # fallback for legacy - if task_data is only task name + task_data["name"] = task_data if task_data: if ( "{task}" in message_templ @@ -121,13 +127,10 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): ): fill_pairs.append(("task", task_data["name"])) - elif isinstance(task_data, dict): + else: for key, value in task_data.items(): fill_key = "task[{}]".format(key) fill_pairs.append((fill_key, value)) - else: - # fallback for legacy - if task_data is only task name - fill_pairs.append(("task", task_data)) self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) From 3cd241d2dbfa56a43ae2199fb1c38bd236497cd1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 14:01:10 +0100 Subject: [PATCH 2173/2550] OP-4196 - fix wrong return type --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 2c6f3d21bd..9539d03306 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -113,7 +113,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): fill_pairs.append(("review_filepath", review_path)) task_data = ( - copy.deepcopy(instance.data.get("anatomyData", [])).get("task") + copy.deepcopy(instance.data.get("anatomyData", {})).get("task") or fill_data.get("task") ) if not isinstance(task_data, dict): From 8a121bc0ff43e86bbe42d660a29e1d1fed13e08c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:11:58 +0100 Subject: [PATCH 2174/2550] move default settings from 'project_settings/global/tools/publish/template_name_profiles' to legacy place --- .../defaults/project_settings/global.json | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index b8995de99e..46b8b1b0c8 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -288,6 +288,17 @@ "task_types": [], "tasks": [], "template_name": "maya2unreal" + }, + { + "families": [ + "online" + ], + "hosts": [ + "traypublisher" + ], + "task_types": [], + "tasks": [], + "template_name": "online" } ] }, @@ -484,19 +495,7 @@ ] }, "publish": { - "template_name_profiles": [ - { - "families": [ - "online" - ], - "hosts": [ - "traypublisher" - ], - "task_types": [], - "task_names": [], - "template_name": "online" - } - ], + "template_name_profiles": [], "hero_template_name_profiles": [] } }, From 788ed6478006c17644460945c9a60cc8207a036c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:50:46 +0100 Subject: [PATCH 2175/2550] fix typo --- openpype/tools/publisher/widgets/card_view_widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 9fd2bf0824..72644c09db 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -674,9 +674,9 @@ class InstanceCardView(AbstractInstanceView): instances_by_group[group_name] ) - self._update_ordered_group_nameS() + self._update_ordered_group_names() - def _update_ordered_group_nameS(self): + def _update_ordered_group_names(self): ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): if idx > 0: From 3ba5f8e0e99798c62ec295ea2a3706f3da8aac37 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:51:05 +0100 Subject: [PATCH 2176/2550] fix tas combobox sizes --- openpype/tools/publisher/widgets/widgets.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ce3d91ce63..332e231653 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -578,6 +578,11 @@ class TasksCombobox(QtWidgets.QComboBox): self._text = None + # Make sure combobox is extended horizontally + size_policy = self.sizePolicy() + size_policy.setHorizontalPolicy(size_policy.MinimumExpanding) + self.setSizePolicy(size_policy) + def set_invalid_empty_task(self, invalid=True): self._proxy_model.set_filter_empty(invalid) if invalid: From b2065acd7a43724ecd522a9f14531f3a45df38ce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:51:44 +0100 Subject: [PATCH 2177/2550] added ability to know if views have any items --- .../publisher/widgets/card_view_widgets.py | 7 +++++++ .../publisher/widgets/list_view_widgets.py | 7 +++++++ .../tools/publisher/widgets/overview_widget.py | 4 ++++ openpype/tools/publisher/widgets/widgets.py | 17 ++++++++++++++++- 4 files changed, 34 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 72644c09db..09635d1a15 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -676,6 +676,13 @@ class InstanceCardView(AbstractInstanceView): self._update_ordered_group_names() + def has_items(self): + if self._convertor_items_group is not None: + return True + if self._widgets_by_group: + return True + return False + def _update_ordered_group_names(self): ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 32d84862f0..1cdb4cdcdb 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -912,6 +912,13 @@ class InstanceListView(AbstractInstanceView): if not self._instance_view.isExpanded(proxy_index): self._instance_view.expand(proxy_index) + def has_items(self): + if self._convertor_group_widget is not None: + return True + if self._group_items: + return True + return False + def get_selected_items(self): """Get selected instance ids and context selection. diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 1c924d1631..b1aeda9cd4 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -205,6 +205,10 @@ class OverviewWidget(QtWidgets.QFrame): self._subset_views_widget.height() ) + def has_items(self): + view = self._subset_views_layout.currentWidget() + return view.has_items() + def _on_create_clicked(self): """Pass signal to parent widget which should care about changing state. diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 332e231653..d6c6f8673c 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -305,6 +305,20 @@ class AbstractInstanceView(QtWidgets.QWidget): "{} Method 'refresh' is not implemented." ).format(self.__class__.__name__)) + def has_items(self): + """View has at least one item. + + This is more a question for controller but is called from widget + which should probably should not use controller. + + Returns: + bool: There is at least one instance or conversion item. + """ + + raise NotImplementedError(( + "{} Method 'has_items' is not implemented." + ).format(self.__class__.__name__)) + def get_selected_items(self): """Selected instances required for callbacks. @@ -1185,7 +1199,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): """Set currently selected instances. Args: - instances(list): List of selected instances. + instances(List[CreatedInstance]): List of selected instances. Empty instances tells that nothing or context is selected. """ self._set_btns_visible(False) @@ -1619,6 +1633,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): instances(List[CreatedInstance]): List of currently selected instances. context_selected(bool): Is context selected. + convertor_identifiers(List[str]): Identifiers of convert items. """ all_valid = True From d87e8fe99c68c23a5fdf1ce19fc0debe654eee97 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:52:06 +0100 Subject: [PATCH 2178/2550] tabs widget can accept tab indexes --- openpype/tools/publisher/widgets/tabs_widget.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index eb3eda8c19..d8ad19cfc0 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -54,6 +54,9 @@ class PublisherTabsWidget(QtWidgets.QFrame): self._buttons_by_identifier = {} def is_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier return self._current_identifier == identifier @@ -69,10 +72,10 @@ class PublisherTabsWidget(QtWidgets.QFrame): return button def get_tab_by_index(self, index): - if index < 0 or index > self._btns_layout.count(): - return None - item = self._btns_layout.itemAt(index) - return item.widget() + if 0 >= index < self._btns_layout.count(): + item = self._btns_layout.itemAt(index) + return item.widget() + return None def set_current_tab(self, identifier): if isinstance(identifier, int): From dd50c6723e1ec892478205f72de2e0bf57940d35 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:55:39 +0100 Subject: [PATCH 2179/2550] small teaks and fixes --- openpype/tools/publisher/window.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 3879e37ad7..59dd2e6ec9 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -432,7 +432,7 @@ class PublisherWindow(QtWidgets.QDialog): self._update_create_overlay_size() self._update_create_overlay_visibility() - if self._is_current_tab("create"): + if self._is_on_create_tab(): self._install_app_event_listener() # Reset if requested @@ -450,7 +450,7 @@ class PublisherWindow(QtWidgets.QDialog): self._context_label.setText(label) def _update_publish_details_widget(self, force=False): - if not force and not self._is_current_tab("details"): + if not force and not self._is_on_details_tab(): return report_data = self.controller.get_publish_report() @@ -540,16 +540,16 @@ class PublisherWindow(QtWidgets.QDialog): self._set_current_tab("report") def _is_on_create_tab(self): - self._is_current_tab("create") + return self._is_current_tab("create") def _is_on_publish_tab(self): - self._is_current_tab("publish") + return self._is_current_tab("publish") def _is_on_details_tab(self): - self._is_current_tab("details") + return self._is_current_tab("details") def _is_on_report_tab(self): - self._is_current_tab("report") + return self._is_current_tab("report") def _set_publish_overlay_visibility(self, visible): if visible: @@ -601,11 +601,8 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) self._set_footer_enabled(False) self._update_publish_details_widget() - if ( - not self._is_current_tab("create") - and not self._is_current_tab("publish") ): - self._set_current_tab("publish") + self._go_to_publish_tab() def _on_publish_start(self): self._create_tab.setEnabled(False) @@ -621,8 +618,8 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_details_widget.close_details_popup() - if self._is_current_tab(self._create_tab): - self._set_current_tab("publish") + if self._is_on_create_tab(): + self._go_to_publish_tab() def _on_publish_validated_change(self, event): if event["value"]: @@ -635,7 +632,7 @@ class PublisherWindow(QtWidgets.QDialog): publish_has_crashed = self._controller.publish_has_crashed validate_enabled = not publish_has_crashed publish_enabled = not publish_has_crashed - if self._is_current_tab("publish"): + if self._is_on_publish_tab(): self._go_to_report_tab() if validate_enabled: From caf94fb68f789d528e69cf6b423b29b20fe16369 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:57:17 +0100 Subject: [PATCH 2180/2550] show publisher can accept tab to switch to --- openpype/tools/publisher/window.py | 62 +++++++++++++++++++++++++++++- openpype/tools/utils/host_tools.py | 16 ++++---- 2 files changed, 67 insertions(+), 11 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 59dd2e6ec9..0f7fd2c7e3 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -156,7 +156,7 @@ class PublisherWindow(QtWidgets.QDialog): footer_layout.addWidget(footer_bottom_widget, 0) # Content - # - wrap stacked widget under one more widget to be able propagate + # - wrap stacked widget under one more widget to be able to propagate # margins (QStackedLayout can't have margins) content_widget = QtWidgets.QWidget(under_publish_widget) @@ -267,6 +267,9 @@ class PublisherWindow(QtWidgets.QDialog): controller.event_system.add_callback( "publish.reset.finished", self._on_publish_reset ) + controller.event_system.add_callback( + "controller.reset.finished", self._on_controller_reset + ) controller.event_system.add_callback( "publish.process.started", self._on_publish_start ) @@ -337,11 +340,13 @@ class PublisherWindow(QtWidgets.QDialog): self._controller = controller self._first_show = True + self._first_reset = True # This is a little bit confusing but 'reset_on_first_show' is too long - # forin init + # for init self._reset_on_first_show = reset_on_show self._reset_on_show = True self._publish_frame_visible = None + self._tab_on_reset = None self._error_messages_to_show = collections.deque() self._errors_dialog_message_timer = errors_dialog_message_timer @@ -353,12 +358,21 @@ class PublisherWindow(QtWidgets.QDialog): self._show_timer = show_timer self._show_counter = 0 + self._window_is_visible = False @property def controller(self): return self._controller + def make_sure_is_visible(self): + if self._window_is_visible: + self.setWindowState(QtCore.Qt.ActiveWindow) + + else: + self.show() + def showEvent(self, event): + self._window_is_visible = True super(PublisherWindow, self).showEvent(event) if self._first_show: self._first_show = False @@ -372,6 +386,7 @@ class PublisherWindow(QtWidgets.QDialog): self._update_create_overlay_size() def closeEvent(self, event): + self._window_is_visible = False self._uninstall_app_event_listener() self.save_changes() self._reset_on_show = True @@ -449,6 +464,19 @@ class PublisherWindow(QtWidgets.QDialog): def set_context_label(self, label): self._context_label.setText(label) + def set_tab_on_reset(self, tab): + """Define tab that will be selected on window show. + + This is single use method, when publisher window is showed the value is + unset and not used on next show. + + Args: + tab (Union[int, Literal[create, publish, details, report]]: Index + or name of tab which will be selected on show (after reset). + """ + + self._tab_on_reset = tab + def _update_publish_details_widget(self, force=False): if not force and not self._is_on_details_tab(): return @@ -524,6 +552,11 @@ class PublisherWindow(QtWidgets.QDialog): def _set_current_tab(self, identifier): self._tabs_widget.set_current_tab(identifier) + def set_current_tab(self, tab): + self._set_current_tab(tab) + if not self._window_is_visible: + self.set_tab_on_reset(tab) + def _is_current_tab(self, identifier): return self._tabs_widget.is_current_tab(identifier) @@ -601,7 +634,32 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) self._set_footer_enabled(False) self._update_publish_details_widget() + + def _on_controller_reset(self): + self._first_reset, first_reset = False, self._first_reset + if self._tab_on_reset is not None: + self._tab_on_reset, new_tab = None, self._tab_on_reset + self._set_current_tab(new_tab) + return + + # On first reset change tab based on available items + # - if there is at least one instance the tab is changed to 'publish' + # otherwise 'create' is used + # - this happens only on first show + if first_reset: + if self._overview_widget.has_items(): + self._go_to_publish_tab() + else: + self._go_to_create_tab() + + elif ( + not self._is_on_create_tab() + and not self._is_on_publish_tab() ): + # If current tab is not 'Create' or 'Publish' go to 'Publish' + # - this can happen when publishing started and was reset + # at that moment it doesn't make sense to stay at publish + # specific tabs. self._go_to_publish_tab() def _on_publish_start(self): diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 046dcbdf6a..e8593a8ae2 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -285,14 +285,12 @@ class HostToolsHelper: return self._publisher_tool - def show_publisher_tool(self, parent=None, controller=None): + def show_publisher_tool(self, parent=None, controller=None, tab=None): with qt_app_context(): - dialog = self.get_publisher_tool(parent, controller) - - dialog.show() - dialog.raise_() - dialog.activateWindow() - dialog.showNormal() + window = self.get_publisher_tool(parent, controller) + if tab: + window.set_current_tab(tab) + window.make_sure_is_visible() def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -446,8 +444,8 @@ def show_publish(parent=None): _SingletonPoint.show_tool_by_name("publish", parent) -def show_publisher(parent=None): - _SingletonPoint.show_tool_by_name("publisher", parent) +def show_publisher(parent=None, **kwargs): + _SingletonPoint.show_tool_by_name("publisher", parent, **kwargs) def show_experimental_tools_dialog(parent=None): From 6af4412591b45f2001a9f01e998a36e871666ec9 Mon Sep 17 00:00:00 2001 From: clement hector Date: Tue, 22 Nov 2022 16:08:03 +0100 Subject: [PATCH 2181/2550] set creator window as parent of pop up window --- .../hosts/photoshop/plugins/create/create_legacy_image.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 2792a775e0..7672458165 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -29,7 +29,8 @@ class CreateImage(create.LegacyCreator): if len(selection) > 1: # Ask user whether to create one image or image per selected # item. - msg_box = QtWidgets.QMessageBox() + active_window = QtWidgets.QApplication.activeWindow() + msg_box = QtWidgets.QMessageBox(parent=active_window) msg_box.setIcon(QtWidgets.QMessageBox.Warning) msg_box.setText( "Multiple layers selected." @@ -102,7 +103,7 @@ class CreateImage(create.LegacyCreator): if group.long_name: for directory in group.long_name[::-1]: name = directory.replace(stub.PUBLISH_ICON, '').\ - replace(stub.LOADED_ICON, '') + replace(stub.LOADED_ICON, '') long_names.append(name) self.data.update({"subset": subset_name}) From 861cdadc9bbcd171da0d8793de6595db8446efce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 16:48:02 +0100 Subject: [PATCH 2182/2550] fix formatting --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d6c6f8673c..6bc09c55a3 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -316,7 +316,7 @@ class AbstractInstanceView(QtWidgets.QWidget): """ raise NotImplementedError(( - "{} Method 'has_items' is not implemented." + "{} Method 'has_items' is not implemented." ).format(self.__class__.__name__)) def get_selected_items(self): From 3b81c7f5731dfc5c018bff11e9758fc3e5e26450 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 18:15:27 +0100 Subject: [PATCH 2183/2550] OP-4196 - better logging of file upload errors --- .../slack/plugins/publish/integrate_slack_api.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 9539d03306..0cd5ec9de8 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -188,10 +188,17 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): channel=channel, title=os.path.basename(p_file) ) - attachment_str += "\n<{}|{}>".format( - response["file"]["permalink"], - os.path.basename(p_file)) - file_ids.append(response["file"]["id"]) + if response.get("error"): + error_str = self._enrich_error( + str(response.get("error")), + channel) + self.log.warning( + "Error happened: {}".format(error_str)) + else: + attachment_str += "\n<{}|{}>".format( + response["file"]["permalink"], + os.path.basename(p_file)) + file_ids.append(response["file"]["id"]) if publish_files: message += attachment_str From 855e7d1c61c16093706b276435aed02fbb108e91 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 18:28:01 +0100 Subject: [PATCH 2184/2550] OP-4196 - fix filtering profiles Task types didn't work. --- .../modules/slack/plugins/publish/collect_slack_family.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 39b05937dc..27e899d59a 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -18,15 +18,15 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None def process(self, instance): - task_name = legacy_io.Session.get("AVALON_TASK") + task_data = instance.data["anatomyData"].get("task", {}) family = self.main_family_from_instance(instance) key_values = { "families": family, - "tasks": task_name, + "tasks": task_data.get("name"), + "task_types": task_data.get("type"), "hosts": instance.data["anatomyData"]["app"], "subsets": instance.data["subset"] } - profile = filter_profiles(self.profiles, key_values, logger=self.log) From 110cd58fd168861b91cbdee366324edbe21c8917 Mon Sep 17 00:00:00 2001 From: Joseff Date: Tue, 22 Nov 2022 18:40:42 +0100 Subject: [PATCH 2185/2550] Update the UOpenPypePublishInstance to use UDataAsset --- .../Private/OpenPypePublishInstance.cpp | 184 ++++++++++------- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 54 ++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- .../Private/OpenPypePublishInstance.cpp | 185 +++++++++++------- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 61 +++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- 8 files changed, 330 insertions(+), 174 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..72dc617699 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,147 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split("/" + GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + UObject* Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + AssetDataInternal.Emplace(Asset); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } +} + +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) +{ + if (Cast(InAssetData.GetAsset()) == nullptr) + { + if (AssetDataInternal.Contains(NULL)) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + AssetDataInternal.Remove(NULL); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(NULL); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; - if (assetDir.StartsWith(*selfDir)) + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + + // Check for duplicated assets + for (const auto& Asset : AssetDataInternal) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } + + // Check if no UOpenPypePublishInstance type assets are included + for (const auto& Asset : AssetDataExternal) + { + if (Cast(Asset) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) -{ - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) - { - // exclude self - if (assetFName != "AssetContainer") - { - - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); - } - } -} +#endif diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..54e24e03d7 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -5,17 +5,51 @@ UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); - + + UPROPERTY(VisibleAnywhere,BlueprintReadOnly) + TSet AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) + TSet AssetDataExternal; + + /** + * Function for returning all the assets in the container. + * + * @return Returns all the internal and externally added assets into one set (TSet). + */ + UFUNCTION(BlueprintCallable, Category = Python) + TSet GetAllAssets() const + { + return AssetDataInternal.Union(AssetDataExternal); + }; + + private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const UObject* InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..9a89c3868b 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,148 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "AssetToolsModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split(GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + const TObjectPtr Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + AssetDataInternal.Emplace(Asset); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } +} + +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) +{ + if (Cast(InAssetData.GetAsset()) == nullptr) + { + if (AssetDataInternal.Contains(nullptr)) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + AssetDataInternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr& InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; - if (assetDir.StartsWith(*selfDir)) + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + + // Check for duplicated assets + for (const TObjectPtr& Asset : AssetDataInternal) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } + + // Check if no UOpenPypePublishInstance type assets are included + for (const TObjectPtr& Asset : AssetDataExternal) + { + if (Cast(Asset) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) -{ - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) - { - // exclude self - if (assetFName != "AssetContainer") - { - - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); - } - } -} +#endif diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..97df757acd 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,21 +1,62 @@ #pragma once +#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); - + + UPROPERTY(VisibleAnywhere,BlueprintReadOnly) + TSet> AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; + + /** + * Function for returning all the assets in the container. + * + * @return Returns all the internal and externally added assets into one set (TSet). + */ + UFUNCTION(BlueprintCallable, Category = Python) + TSet GetAllAssets() const + { + TSet> Unionized = AssetDataInternal.Union(AssetDataExternal); + + TSet ResultSet; + + for (auto& Asset : Unionized) + ResultSet.Add(Asset.Get()); + + return ResultSet; + } + private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const TObjectPtr& InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; From c3e5b7a169c670b35889a5fb0038ba4b50bf7841 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Nov 2022 21:21:42 +0100 Subject: [PATCH 2186/2550] update history.md --- HISTORY.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/HISTORY.md b/HISTORY.md index f6cc74e114..7365696f96 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,40 @@ # Changelog +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) + +### 📖 Documentation + +- Documentation: Minor updates to dev\_requirements.md [\#4025](https://github.com/pypeclub/OpenPype/pull/4025) + +**🆕 New features** + +- Nuke: add 13.2 variant [\#4041](https://github.com/pypeclub/OpenPype/pull/4041) + +**🚀 Enhancements** + +- Publish Report Viewer: Store reports locally on machine [\#4040](https://github.com/pypeclub/OpenPype/pull/4040) +- General: More specific error in burnins script [\#4026](https://github.com/pypeclub/OpenPype/pull/4026) +- General: Extract review does not crash with old settings overrides [\#4023](https://github.com/pypeclub/OpenPype/pull/4023) +- Publisher: Convertors for legacy instances [\#4020](https://github.com/pypeclub/OpenPype/pull/4020) +- workflows: adding milestone creator and assigner [\#4018](https://github.com/pypeclub/OpenPype/pull/4018) +- Publisher: Catch creator errors [\#4015](https://github.com/pypeclub/OpenPype/pull/4015) + +**🐛 Bug fixes** + +- Hiero - effect collection fixes [\#4038](https://github.com/pypeclub/OpenPype/pull/4038) +- Nuke - loader clip correct hash conversion in path [\#4037](https://github.com/pypeclub/OpenPype/pull/4037) +- Maya: Soft fail when applying capture preset [\#4034](https://github.com/pypeclub/OpenPype/pull/4034) +- Igniter: handle missing directory [\#4032](https://github.com/pypeclub/OpenPype/pull/4032) +- StandalonePublisher: Fix thumbnail publishing [\#4029](https://github.com/pypeclub/OpenPype/pull/4029) +- Experimental Tools: Fix publisher import [\#4027](https://github.com/pypeclub/OpenPype/pull/4027) +- Houdini: fix wrong path in ASS loader [\#4016](https://github.com/pypeclub/OpenPype/pull/4016) + +**🔀 Refactored code** + +- General: Import lib functions from lib [\#4017](https://github.com/pypeclub/OpenPype/pull/4017) + ## [3.14.5](https://github.com/pypeclub/OpenPype/tree/3.14.5) (2022-10-24) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...3.14.5) From e600cd1b3d2963a2a2e26dce79e07818bb4c5d28 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Nov 2022 21:24:11 +0100 Subject: [PATCH 2187/2550] updating to 3.14.7 --- CHANGELOG.md | 59 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 57 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 707b61676f..c3cccf2d1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,63 @@ # Changelog -## [3.14.6](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + + +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) ### 📖 Documentation From c63f468484b32628c6d87a35df993bf2303ecb83 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 23 Nov 2022 03:35:08 +0000 Subject: [PATCH 2188/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 0116b49f4d..a4af8b7a99 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.6" +__version__ = "3.14.7-nightly.7" From 56d5bf89212ef8f9c65bc782a11600ea62291e8a Mon Sep 17 00:00:00 2001 From: Joseff Date: Wed, 23 Nov 2022 10:58:11 +0100 Subject: [PATCH 2189/2550] Changed the member variables to use TSoftObjectPtr<> TSets now use TSoftObjectPtr<> for referencing the assets, which can reduce memory usage. --- .../Private/OpenPypePublishInstance.cpp | 25 +++--- .../OpenPype/Public/OpenPypePublishInstance.h | 84 +++++++++++++++---- .../Private/OpenPypePublishInstance.cpp | 15 ++-- .../OpenPype/Public/OpenPypePublishInstance.h | 79 ++++++++++++----- 4 files changed, 146 insertions(+), 57 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 72dc617699..ed81104c05 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -15,6 +15,9 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< FAssetRegistryModule>("AssetRegistry"); + const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked( + "PropertyEditor"); + FString Left, Right; GetPathName().Split("/" + GetName(), &Left, &Right); @@ -33,7 +36,6 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); - } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) @@ -53,9 +55,11 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (result) { - AssetDataInternal.Emplace(Asset); - UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + if (AssetDataInternal.Emplace(Asset).IsValidId()) + { + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } } } @@ -63,14 +67,14 @@ void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) { if (Cast(InAssetData.GetAsset()) == nullptr) { - if (AssetDataInternal.Contains(NULL)) + if (AssetDataInternal.Contains(nullptr)) { - AssetDataInternal.Remove(NULL); + AssetDataInternal.Remove(nullptr); REMOVE_INVALID_ENTRIES(AssetDataInternal) } else { - AssetDataExternal.Remove(NULL); + AssetDataExternal.Remove(nullptr); REMOVE_INVALID_ENTRIES(AssetDataExternal) } } @@ -121,22 +125,21 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( UOpenPypePublishInstance, AssetDataExternal)) { - // Check for duplicated assets for (const auto& Asset : AssetDataInternal) { if (AssetDataExternal.Contains(Asset)) { AssetDataExternal.Remove(Asset); - return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + return SendNotification( + "You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); } - } // Check if no UOpenPypePublishInstance type assets are included for (const auto& Asset : AssetDataExternal) { - if (Cast(Asset) != nullptr) + if (Cast(Asset.Get()) != nullptr) { AssetDataExternal.Remove(Asset); return SendNotification("You are not allowed to add publish instances!"); diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 54e24e03d7..0e946fb039 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -11,32 +11,80 @@ class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset public: - UPROPERTY(VisibleAnywhere,BlueprintReadOnly) - TSet AssetDataInternal; + /** + /** + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. + * + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetAllAssets() const + { + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; + + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + +private: + + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; /** * This property allows exposing the array to include other assets from any other directory than what it's currently * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! */ - UPROPERTY(EditAnywhere, BlueprintReadOnly) + UPROPERTY(EditAnywhere, Category = "Assets") bool bAddExternalAssets = false; - UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) - TSet AssetDataExternal; + UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets") + TSet> AssetDataExternal; - /** - * Function for returning all the assets in the container. - * - * @return Returns all the internal and externally added assets into one set (TSet). - */ - UFUNCTION(BlueprintCallable, Category = Python) - TSet GetAllAssets() const - { - return AssetDataInternal.Union(AssetDataExternal); - }; - - -private: void OnAssetCreated(const FAssetData& InAssetData); void OnAssetRemoved(const FAssetData& InAssetData); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 9a89c3868b..c2c7e249c3 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -35,6 +35,7 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) @@ -54,9 +55,11 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (result) { - AssetDataInternal.Emplace(Asset); - UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + if (AssetDataInternal.Emplace(Asset).IsValidId()) + { + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } } } @@ -124,7 +127,7 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro { // Check for duplicated assets - for (const TObjectPtr& Asset : AssetDataInternal) + for (const auto& Asset : AssetDataInternal) { if (AssetDataExternal.Contains(Asset)) { @@ -135,9 +138,9 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro } // Check if no UOpenPypePublishInstance type assets are included - for (const TObjectPtr& Asset : AssetDataExternal) + for (const auto& Asset : AssetDataExternal) { - if (Cast(Asset) != nullptr) + if (Cast(Asset.Get()) != nullptr) { AssetDataExternal.Remove(Asset); return SendNotification("You are not allowed to add publish instances!"); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 97df757acd..2f066bd94b 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -9,41 +9,78 @@ UCLASS(Blueprintable) class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { GENERATED_UCLASS_BODY() - public: - - UPROPERTY(VisibleAnywhere,BlueprintReadOnly) - TSet> AssetDataInternal; - /** - * This property allows exposing the array to include other assets from any other directory than what it's currently - * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 */ - UPROPERTY(EditAnywhere, BlueprintReadOnly) - bool bAddExternalAssets = false; + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; - UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) - TSet> AssetDataExternal; + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } /** - * Function for returning all the assets in the container. + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. * - * @return Returns all the internal and externally added assets into one set (TSet). + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! */ - UFUNCTION(BlueprintCallable, Category = Python) + UFUNCTION(BlueprintCallable, BlueprintPure) TSet GetAllAssets() const { - TSet> Unionized = AssetDataInternal.Union(AssetDataExternal); - - TSet ResultSet; + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; - for (auto& Asset : Unionized) - ResultSet.Add(Asset.Get()); + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); return ResultSet; } private: + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; + + /** + * This property allows the instance to include other assets from any other directory than what it's currently + * monitoring. + * @attention assets have to be added manually! They are not automatically registered or added! + */ + UPROPERTY(EditAnywhere, Category="Assets") + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; void OnAssetCreated(const FAssetData& InAssetData); void OnAssetRemoved(const FAssetData& InAssetData); @@ -52,11 +89,9 @@ private: bool IsUnderSameDir(const TObjectPtr& InAsset) const; #ifdef WITH_EDITOR - + void SendNotification(const FString& Text) const; virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; #endif - }; - From 0d88af8aec4c6112be2629865da7ffce4a7cce4d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Nov 2022 11:40:16 +0100 Subject: [PATCH 2190/2550] update latest 3.14.7 --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c3cccf2d1e..0c5f2cf8b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,9 @@ **🚀 Enhancements** +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) - Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) - General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) - Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) @@ -25,6 +28,7 @@ **🐛 Bug fixes** - General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) - Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) - Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) - Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) @@ -54,7 +58,6 @@ - remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) - Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) - ## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) From 6725c1f6d8dc025f13bffbbb1c92a242c49b618f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Nov 2022 11:40:57 +0100 Subject: [PATCH 2191/2550] udpate history --- HISTORY.md | 59 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/HISTORY.md b/HISTORY.md index 7365696f96..04a1073c07 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,64 @@ # Changelog + +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + ## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) From 2594bc2a0efa19331b7dbccb2624be41acf1032a Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 23 Nov 2022 10:45:17 +0000 Subject: [PATCH 2192/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index a4af8b7a99..a00c7de704 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.7" +__version__ = "3.14.7-nightly.8" From 8b1b09b33825dc9ff320b6c6e49597dedcf58f7f Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 23 Nov 2022 10:58:00 +0000 Subject: [PATCH 2193/2550] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index a00c7de704..ffabcf8025 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.8" +__version__ = "3.14.7" From 5779687a2b4467195a20bd9242d2fa782f7b27cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 23 Nov 2022 11:59:53 +0100 Subject: [PATCH 2194/2550] Removed unused argument --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 78e1371eee..40193bac71 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -192,7 +192,7 @@ def get_openpype_executable(): return exe_list, dir_list -def get_openpype_versions(exe_list, dir_list): +def get_openpype_versions(dir_list): print(">>> Getting OpenPype executable ...") openpype_versions = [] From ec0f6986158b2eab312ade16430c0eb969aa0e68 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Nov 2022 14:52:31 +0100 Subject: [PATCH 2195/2550] celaction: fixing host --- openpype/hosts/celaction/__init__.py | 10 +++ openpype/hosts/celaction/addon.py | 24 ++++++ .../hooks/pre_celaction_registers.py | 73 ++++++++++++------- 3 files changed, 80 insertions(+), 27 deletions(-) create mode 100644 openpype/hosts/celaction/addon.py diff --git a/openpype/hosts/celaction/__init__.py b/openpype/hosts/celaction/__init__.py index e69de29bb2..8983d48d7d 100644 --- a/openpype/hosts/celaction/__init__.py +++ b/openpype/hosts/celaction/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + CELACTION_ROOT_DIR, + CelactionAddon, +) + + +__all__ = ( + "CELACTION_ROOT_DIR", + "CelactionAddon", +) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py new file mode 100644 index 0000000000..c6d30935a1 --- /dev/null +++ b/openpype/hosts/celaction/addon.py @@ -0,0 +1,24 @@ +import os +from openpype.modules import OpenPypeModule, IHostAddon + +CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class CelactionAddon(OpenPypeModule, IHostAddon): + name = "celaction" + host_name = "celaction" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".scn"] diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_registers.py index e49e66f163..84ac3d130a 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_registers.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_registers.py @@ -27,17 +27,24 @@ class CelactionPrelaunchHook(PreLaunchHook): app = "celaction_publish" # setting output parameters - path = r"Software\CelAction\CelAction2D\User Settings" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) + path_user_settings = "\\".join([ + "Software", "CelAction", "CelAction2D", "User Settings" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings) hKey = winreg.OpenKey( - winreg.HKEY_CURRENT_USER, - "Software\\CelAction\\CelAction2D\\User Settings", 0, - winreg.KEY_ALL_ACCESS) + winreg.HKEY_CURRENT_USER, path_user_settings, 0, + winreg.KEY_ALL_ACCESS + ) - # TODO: this will need to be checked more thoroughly - pype_exe = os.getenv("OPENPYPE_EXECUTABLE") + openpype_executable = os.getenv("OPENPYPE_EXECUTABLE") - winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe) + winreg.SetValueEx( + hKey, + "SubmitAppTitle", + 0, + winreg.REG_SZ, + openpype_executable + ) parameters = [ "launch", @@ -53,33 +60,45 @@ class CelactionPrelaunchHook(PreLaunchHook): "--resolutionHeight *Y*", # "--programDir \"'*PROGPATH*'\"" ] - winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters)) + winreg.SetValueEx( + hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, + " ".join(parameters) + ) # setting resolution parameters - path = r"Software\CelAction\CelAction2D\User Settings\Dialogs" - path += r"\SubmitOutput" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + path_submit = "\\".join([ + path_user_settings, "Dialogs", "SubmitOutput" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_submit, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) # making sure message dialogs don't appear when overwriting - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\OverwriteScene" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + path_overwrite_scene = "\\".join([ + path_user_settings, "Messages", "OverwriteScene" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6) winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\SceneSaved" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + # set scane as not saved + path_scene_saved = "\\".join([ + path_user_settings, "Messages", "SceneSaved" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_scene_saved, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1) winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) @@ -90,11 +109,11 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - pype_celaction_dir = os.path.dirname(os.path.dirname( + openpype_celaction_dir = os.path.dirname(os.path.dirname( os.path.abspath(celaction.__file__) )) template_path = os.path.join( - pype_celaction_dir, + openpype_celaction_dir, "resources", "celaction_template_scene.scn" ) From c3b7e3269544d3471c02e193acd4054b5a08eb08 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:30:49 +0100 Subject: [PATCH 2196/2550] skip turning on/off of autosync --- .../publish/integrate_hierarchy_ftrack.py | 43 +------------------ 1 file changed, 2 insertions(+), 41 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index fa7a89050c..6bae922d94 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -8,9 +8,6 @@ import pyblish.api from openpype.client import get_asset_by_id from openpype.lib import filter_profiles - -# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` -CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" @@ -97,18 +94,9 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.task_types = self.get_all_task_types(project) self.task_statuses = self.get_task_statuses(project) - # disable termporarily ftrack project's autosyncing - if auto_sync_state: - self.auto_sync_off(project) + # import ftrack hierarchy + self.import_to_ftrack(project_name, hierarchy_context) - try: - # import ftrack hierarchy - self.import_to_ftrack(project_name, hierarchy_context) - except Exception: - raise - finally: - if auto_sync_state: - self.auto_sync_on(project) def import_to_ftrack(self, project_name, input_data, parent=None): # Prequery hiearchical custom attributes @@ -381,33 +369,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return entity - def auto_sync_off(self, project): - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False - - self.log.info("Ftrack autosync swithed off") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - def auto_sync_on(self, project): - - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True - - self.log.info("Ftrack autosync swithed on") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - def _get_active_assets(self, context): """ Returns only asset dictionary. Usually the last part of deep dictionary which From 635c662a8c357c5170aadfb9197081a16c27c3b2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:32:04 +0100 Subject: [PATCH 2197/2550] raise known publish error if project in ftrack was not found --- .../publish/integrate_hierarchy_ftrack.py | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 6bae922d94..8b0e4ab62d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -7,6 +7,7 @@ import pyblish.api from openpype.client import get_asset_by_id from openpype.lib import filter_profiles +from openpype.pipeline import KnownPublishError CUST_ATTR_GROUP = "openpype" @@ -16,7 +17,6 @@ CUST_ATTR_GROUP = "openpype" def get_pype_attr(session, split_hierarchical=True): custom_attributes = [] hier_custom_attributes = [] - # TODO remove deprecated "avalon" group from query cust_attrs_query = ( "select id, entity_type, object_type_id, is_hierarchical, default" " from CustomAttributeConfiguration" @@ -76,19 +76,25 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): create_task_status_profiles = [] def process(self, context): - self.context = context - if "hierarchyContext" not in self.context.data: + if "hierarchyContext" not in context.data: return hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - session = self.context.data["ftrackSession"] - project_name = self.context.data["projectEntity"]["name"] - query = 'Project where full_name is "{}"'.format(project_name) - project = session.query(query).one() - auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC] + session = context.data["ftrackSession"] + project_name = context.data["projectName"] + project = session.query( + 'select id, full_name from Project where full_name is "{}"'.format( + project_name + ) + ).first() + if not project: + raise KnownPublishError( + "Project \"{}\" was not found on ftrack.".format(project_name) + ) + self.context = context self.session = session self.ft_project = project self.task_types = self.get_all_task_types(project) From 5a0cc527325642c9871323a6aba8c263be72d194 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:34:02 +0100 Subject: [PATCH 2198/2550] implemented helper methods to query information we need from ftrack --- .../publish/integrate_hierarchy_ftrack.py | 123 ++++++++++++++++++ 1 file changed, 123 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 8b0e4ab62d..02946f813f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -103,6 +103,129 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # import ftrack hierarchy self.import_to_ftrack(project_name, hierarchy_context) + def query_ftrack_entitites(self, session, ft_project): + project_id = ft_project["id"] + entities = session.query(( + "select id, name, parent_id" + " from TypedContext where project_id is \"{}\"" + ).format(project_id)).all() + + entities_by_id = {} + entities_by_parent_id = collections.defaultdict(list) + for entity in entities: + entities_by_id[entity["id"]] = entity + parent_id = entity["parent_id"] + entities_by_parent_id[parent_id].append(entity) + + ftrack_hierarchy = [] + ftrack_id_queue = collections.deque() + ftrack_id_queue.append((project_id, ftrack_hierarchy)) + while ftrack_id_queue: + item = ftrack_id_queue.popleft() + ftrack_id, parent_list = item + if ftrack_id == project_id: + entity = ft_project + name = entity["full_name"] + else: + entity = entities_by_id[ftrack_id] + name = entity["name"] + + children = [] + parent_list.append({ + "name": name, + "low_name": name.lower(), + "entity": entity, + "children": children, + }) + for child in entities_by_parent_id[ftrack_id]: + ftrack_id_queue.append((child["id"], children)) + return ftrack_hierarchy + + def find_matching_ftrack_entities( + self, hierarchy_context, ftrack_hierarchy + ): + walk_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + walk_queue.append( + (entity_name, entity_data, ftrack_hierarchy) + ) + + matching_ftrack_entities = [] + while walk_queue: + item = walk_queue.popleft() + entity_name, entity_data, ft_children = item + matching_ft_child = None + for ft_child in ft_children: + if ft_child["low_name"] == entity_name.lower(): + matching_ft_child = ft_child + break + + if matching_ft_child is None: + continue + + entity = matching_ft_child["entity"] + entity_data["ft_entity"] = entity + matching_ftrack_entities.append(entity) + + hierarchy_children = entity_data.get("childs") + if not hierarchy_children: + continue + + for child_name, child_data in hierarchy_children.items(): + walk_queue.append( + (child_name, child_data, matching_ft_child["children"]) + ) + return matching_ftrack_entities + + def query_custom_attribute_values(self, session, entities, hier_attrs): + attr_ids = { + attr["id"] + for attr in hier_attrs + } + entity_ids = { + entity["id"] + for entity in entities + } + output = { + entity_id: {} + for entity_id in entity_ids + } + if not attr_ids or not entity_ids: + return {} + + joined_attr_ids = ",".join( + ['"{}"'.format(attr_id) for attr_id in attr_ids] + ) + + # Query values in chunks + chunk_size = int(5000 / len(attr_ids)) + # Make sure entity_ids is `list` for chunk selection + entity_ids = list(entity_ids) + results = [] + for idx in range(0, len(entity_ids), chunk_size): + joined_entity_ids = ",".join([ + '"{}"'.format(entity_id) + for entity_id in entity_ids[idx:idx + chunk_size] + ]) + results.extend( + session.query( + ( + "select value, entity_id, configuration_id" + " from CustomAttributeValue" + " where entity_id in ({}) and configuration_id in ({})" + ).format( + joined_entity_ids, + joined_attr_ids + ) + ).all() + ) + + for result in results: + attr_id = result["configuration_id"] + entity_id = result["entity_id"] + output[entity_id][attr_id] = result["value"] + + return output def import_to_ftrack(self, project_name, input_data, parent=None): # Prequery hiearchical custom attributes From a78ef54e56e7a0a0300fdc140ec40fb1be4111e9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:35:02 +0100 Subject: [PATCH 2199/2550] query user at the start of import method instead of requerying it again --- .../publish/integrate_hierarchy_ftrack.py | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 02946f813f..5d30b9bf7b 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -234,6 +234,16 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): attr["key"]: attr for attr in hier_custom_attributes } + # Query user entity (for comments) + user = self.session.query( + "User where username is \"{}\"".format(self.session.api_user) + ).first() + if not user: + self.log.warning( + "Was not able to query current User {}".format( + self.session.api_user + ) + ) # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] @@ -364,25 +374,18 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # Create notes. - user = self.session.query( - "User where username is \"{}\"".format(self.session.api_user) - ).first() - if user: - for comment in entity_data.get("comments", []): + entity_comments = entity_data.get("comments") + if user and entity_comments: + for comment in entity_comments: entity.create_note(comment, user) - else: - self.log.warning( - "Was not able to query current User {}".format( - self.session.api_user - ) - ) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) + + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + self.session._configure_locations() + six.reraise(tp, value, tb) # Import children. if 'childs' in entity_data: From 36afd8aa7c3a9a88001c20f6c0ae8c616a2bf51a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:36:21 +0100 Subject: [PATCH 2200/2550] import to ftrack is not recursion based but queue based method --- .../publish/integrate_hierarchy_ftrack.py | 26 ++++++++++++++----- 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 5d30b9bf7b..12e89a1884 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -227,7 +227,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return output - def import_to_ftrack(self, project_name, input_data, parent=None): + def import_to_ftrack(self, project_name, hierarchy_context): # Prequery hiearchical custom attributes hier_custom_attributes = get_pype_attr(self.session)[1] hier_attr_by_key = { @@ -247,8 +247,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] - for entity_name in input_data: - entity_data = input_data[entity_name] + # Use queue of hierarchy items to process + import_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + import_queue.append( + (entity_name, entity_data, None) + ) + + while import_queue: + item = import_queue.popleft() + entity_name, entity_data, parent = item + entity_type = entity_data['entity_type'] self.log.debug(entity_data) self.log.debug(entity_type) @@ -388,9 +397,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # Import children. - if 'childs' in entity_data: - self.import_to_ftrack( - project_name, entity_data['childs'], entity) + children = entity_data.get("childs") + if not children: + continue + + for entity_name, entity_data in children.items(): + import_queue.append( + (entity_name, entity_data, entity) + ) def create_links(self, project_name, entity_data, entity): # Clear existing links. From 5de422dea2c294bcc1ff097c272180b272e89e8a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:38:04 +0100 Subject: [PATCH 2201/2550] change how custom attributes are filled on entities and how entities are created --- .../publish/integrate_hierarchy_ftrack.py | 156 +++++++++--------- 1 file changed, 82 insertions(+), 74 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 12e89a1884..046dfd9ad8 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -229,10 +229,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): def import_to_ftrack(self, project_name, hierarchy_context): # Prequery hiearchical custom attributes - hier_custom_attributes = get_pype_attr(self.session)[1] + hier_attrs = get_pype_attr(self.session)[1] hier_attr_by_key = { attr["key"]: attr - for attr in hier_custom_attributes + for attr in hier_attrs } # Query user entity (for comments) user = self.session.query( @@ -244,6 +244,19 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.session.api_user ) ) + + # Query ftrack hierarchy with parenting + ftrack_hierarchy = self.query_ftrack_entitites( + self.session, self.ft_project) + + # Fill ftrack entities to hierarchy context + # - there is no need to query entities again + matching_entities = self.find_matching_ftrack_entities( + hierarchy_context, ftrack_hierarchy) + # Query custom attribute values of each entity + custom_attr_values_by_id = self.query_custom_attribute_values( + self.session, matching_entities, hier_attrs) + # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] @@ -260,75 +273,87 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): entity_type = entity_data['entity_type'] self.log.debug(entity_data) - self.log.debug(entity_type) - if entity_type.lower() == 'project': - entity = self.ft_project - - elif self.ft_project is None or parent is None: + entity = entity_data.get("ft_entity") + if entity is None and entity_type.lower() == "project": raise AssertionError( "Collected items are not in right order!" ) - # try to find if entity already exists - else: - query = ( - 'TypedContext where name is "{0}" and ' - 'project_id is "{1}"' - ).format(entity_name, self.ft_project["id"]) - try: - entity = self.session.query(query).one() - except Exception: - entity = None - # Create entity if not exists if entity is None: - entity = self.create_entity( - name=entity_name, - type=entity_type, - parent=parent - ) + entity = self.session.create(entity_type, { + "name": entity_name, + "parent": parent + }) + entity_data["ft_entity"] = entity + # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES - custom_attributes = entity_data.get('custom_attributes', []) - instances = [ - instance - for instance in self.context - if instance.data.get("asset") == entity["name"] - ] + custom_attributes = entity_data.get('custom_attributes', {}) + instances = [] + for instance in self.context: + instance_asset_name = instance.data.get("asset") + if ( + instance_asset_name + and instance_asset_name.lower() == entity["name"].lower() + ): + instances.append(instance) for instance in instances: instance.data["ftrackEntity"] = entity - for key in custom_attributes: + for key, cust_attr_value in custom_attributes.items(): + if cust_attr_value is None: + continue + hier_attr = hier_attr_by_key.get(key) # Use simple method if key is not hierarchical if not hier_attr: - assert (key in entity['custom_attributes']), ( - 'Missing custom attribute key: `{0}` in attrs: ' - '`{1}`'.format(key, entity['custom_attributes'].keys()) + if key not in entity["custom_attributes"]: + raise KnownPublishError(( + "Missing custom attribute in ftrack with name '{}'" + ).format(key)) + + entity['custom_attributes'][key] = cust_attr_value + continue + + attr_id = hier_attr["id"] + entity_values = custom_attr_values_by_id.get(entity["id"], {}) + # New value is defined by having id in values + # - it can be set to 'None' (ftrack allows that using API) + is_new_value = attr_id not in entity_values + attr_value = entity_values.get(attr_id) + + # Use ftrack operations method to set hiearchical + # attribute value. + # - this is because there may be non hiearchical custom + # attributes with different properties + entity_key = collections.OrderedDict(( + ("configuration_id", hier_attr["id"]), + ("entity_id", entity["id"]) + )) + op = None + if is_new_value: + op = ftrack_api.operation.CreateEntityOperation( + "CustomAttributeValue", + entity_key, + {"value": cust_attr_value} ) - entity['custom_attributes'][key] = custom_attributes[key] - - else: - # Use ftrack operations method to set hiearchical - # attribute value. - # - this is because there may be non hiearchical custom - # attributes with different properties - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = hier_attr["id"] - entity_key["entity_id"] = entity["id"] - self.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", - entity_key, - "value", - ftrack_api.symbol.NOT_SET, - custom_attributes[key] - ) + elif attr_value != cust_attr_value: + op = ftrack_api.operation.UpdateEntityOperation( + "CustomAttributeValue", + entity_key, + "value", + attr_value, + cust_attr_value ) + if op is not None: + self.session.recorded_operations.push(op) + + if self.session.recorded_operations: try: self.session.commit() except Exception: @@ -342,7 +367,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): for instance in instances: task_name = instance.data.get("task") if task_name: - instances_by_task_name[task_name].append(instance) + instances_by_task_name[task_name.lower()].append(instance) tasks = entity_data.get('tasks', []) existing_tasks = [] @@ -500,21 +525,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return task - def create_entity(self, name, type, parent): - entity = self.session.create(type, { - 'name': name, - 'parent': parent - }) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - return entity - def _get_active_assets(self, context): """ Returns only asset dictionary. Usually the last part of deep dictionary which @@ -536,19 +546,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): hierarchy_context = context.data["hierarchyContext"] - active_assets = [] + active_assets = set() # filter only the active publishing insatnces for instance in context: if instance.data.get("publish") is False: continue - if not instance.data.get("asset"): - continue - - active_assets.append(instance.data["asset"]) + asset_name = instance.data.get("asset") + if asset_name: + active_assets.add(asset_name) # remove duplicity in list - active_assets = list(set(active_assets)) - self.log.debug("__ active_assets: {}".format(active_assets)) + self.log.debug("__ active_assets: {}".format(list(active_assets))) return get_pure_hierarchy_data(hierarchy_context) From 8d8c7d1149baff769ea55ba321998d77bc777cb3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:03:52 +0000 Subject: [PATCH 2202/2550] Load alembic animation in Unreal --- .../plugins/load/load_alembic_animation.py | 162 ++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 openpype/hosts/unreal/plugins/load/load_alembic_animation.py diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_animation.py b/openpype/hosts/unreal/plugins/load/load_alembic_animation.py new file mode 100644 index 0000000000..496b6056ea --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_alembic_animation.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +"""Load Alembic Animation.""" +import os + +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) +from openpype.hosts.unreal.api import plugin +from openpype.hosts.unreal.api import pipeline as unreal_pipeline +import unreal # noqa + + +class AnimationAlembicLoader(plugin.Loader): + """Load Unreal SkeletalMesh from Alembic""" + + families = ["animation"] + label = "Import Alembic Animation" + representations = ["abc"] + icon = "cube" + color = "orange" + + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + sm_settings = unreal.AbcStaticMeshSettings() + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, -1.0]) + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + options.set_editor_property( + 'import_type', unreal.AlembicImportType.SKELETAL) + + options.static_mesh_settings = sm_settings + options.conversion_settings = conversion_settings + task.options = options + + return task + + def load(self, context, name, namespace, data): + """Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and openpype container + root = "/Game/OpenPype/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + version = context.get('version').get('name') + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + f"{root}/{asset}/{name}_v{version:03d}", suffix="") + + container_name += suffix + + if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + unreal.EditorAssetLibrary.make_directory(asset_dir) + + task = self.get_task(self.fname, asset_dir, asset_name, False) + + asset_tools = unreal.AssetToolsHelpers.get_asset_tools() + asset_tools.import_asset_tasks([task]) + + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + return asset_content + + def update(self, container, representation): + name = container["asset_name"] + source_path = get_representation_path(representation) + destination_path = container["namespace"] + + task = self.get_task(source_path, destination_path, name, True) + + # do import fbx and replace existing data + asset_tools = unreal.AssetToolsHelpers.get_asset_tools() + asset_tools.import_asset_tasks([task]) + + container_path = f"{container['namespace']}/{container['objectName']}" + + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) From 27e4985d488401e944a44c7b705f36f407f985e7 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:04:54 +0000 Subject: [PATCH 2203/2550] Extract Alembic animation from Blender --- .../plugins/publish/extract_abc_animation.py | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 openpype/hosts/blender/plugins/publish/extract_abc_animation.py diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py new file mode 100644 index 0000000000..80f7a4ba58 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -0,0 +1,75 @@ +import os + +import bpy + +from openpype.pipeline import publish +from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY + + +class ExtractAnimationABC(publish.Extractor): + """Extract as ABC.""" + + label = "Extract Animation ABC" + hosts = ["blender"] + families = ["animation"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.abc" + filepath = os.path.join(stagingdir, filename) + + context = bpy.context + scene = context.scene + view_layer = context.view_layer + + # Perform extraction + self.log.info("Performing extraction..") + + plugin.deselect_all() + + selected = [] + asset_group = None + + objects = [] + for obj in instance: + if isinstance(obj, bpy.types.Collection): + for child in obj.all_objects: + objects.append(child) + for obj in objects: + children = [o for o in bpy.data.objects if o.parent == obj] + for child in children: + objects.append(child) + + for obj in objects: + obj.select_set(True) + selected.append(obj) + + context = plugin.create_blender_context( + active=asset_group, selected=selected) + + # We export the abc + bpy.ops.wm.alembic_export( + context, + filepath=filepath, + selected=True, + flatten=False + ) + + plugin.deselect_all() + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, representation) From 36effdce72a3f6d4e82ce9150f4c61425af2fa49 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:12:34 +0000 Subject: [PATCH 2204/2550] Hound fixes --- .../hosts/blender/plugins/publish/extract_abc_animation.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py index 80f7a4ba58..e141ccaa44 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -4,7 +4,6 @@ import bpy from openpype.pipeline import publish from openpype.hosts.blender.api import plugin -from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractAnimationABC(publish.Extractor): @@ -22,8 +21,6 @@ class ExtractAnimationABC(publish.Extractor): filepath = os.path.join(stagingdir, filename) context = bpy.context - scene = context.scene - view_layer = context.view_layer # Perform extraction self.log.info("Performing extraction..") From 484a77a3adcebf046bba1b2eac0c915abd631213 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 18:11:37 +0100 Subject: [PATCH 2205/2550] integrate ftrack api always create new session for each instance processing --- .../plugins/publish/integrate_ftrack_api.py | 56 ++++++++++++++++--- 1 file changed, 47 insertions(+), 9 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 159e60024d..1d65a53a4a 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -36,10 +36,42 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): return context = instance.context - session = context.data["ftrackSession"] + task_entity, parent_entity = self.get_instance_entities( + instance, context) + if parent_entity is None: + self.log.info(( + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." + ).format(str(instance))) + return + context_session = context.data["ftrackSession"] + ftrack_api = context.data["ftrackPythonModule"] + # Create new session for uploading + # - this was added to prevent failed uploads due to connection lost + # it is possible it won't fix the issue and potentially make it worse + # in that case new session should not be created and should not be + # closed at the end. + # - also rename variable 'context_session' -> 'session' + session = ftrack_api.Session( + context_session.server_url, + context_session.api_key, + context_session.api_user, + auto_connect_event_hub=False, + ) + try: + self.integrate_to_ftrack( + session, + instance, + task_entity, + parent_entity, + component_list + ) + finally: + session.close() + + def get_instance_entities(self, instance, context): parent_entity = None - default_asset_name = None # If instance has set "ftrackEntity" or "ftrackTask" then use them from # instance. Even if they are set to None. If they are set to None it # has a reason. (like has different context) @@ -52,15 +84,21 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): parent_entity = context.data.get("ftrackEntity") if task_entity: - default_asset_name = task_entity["name"] parent_entity = task_entity["parent"] - if parent_entity is None: - self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." - ).format(str(instance))) - return + return task_entity, parent_entity + + def integrate_to_ftrack( + self, + session, + instance, + task_entity, + parent_entity, + component_list + ): + default_asset_name = None + if task_entity: + default_asset_name = task_entity["name"] if not default_asset_name: default_asset_name = parent_entity["name"] From f821337a8ee552aa91d5c7d7275c3a1d82bd3c24 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 18:56:17 +0100 Subject: [PATCH 2206/2550] don't create and close existing connections --- .../plugins/publish/integrate_ftrack_api.py | 52 +++++-------------- 1 file changed, 13 insertions(+), 39 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 1d65a53a4a..231bd8e81e 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -40,25 +40,15 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): instance, context) if parent_entity is None: self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." ).format(str(instance))) return - context_session = context.data["ftrackSession"] - ftrack_api = context.data["ftrackPythonModule"] - # Create new session for uploading - # - this was added to prevent failed uploads due to connection lost - # it is possible it won't fix the issue and potentially make it worse - # in that case new session should not be created and should not be - # closed at the end. - # - also rename variable 'context_session' -> 'session' - session = ftrack_api.Session( - context_session.server_url, - context_session.api_key, - context_session.api_user, - auto_connect_event_hub=False, - ) + session = context.data["ftrackSession"] + # Reset session and reconfigure locations + session.reset() + try: self.integrate_to_ftrack( session, @@ -67,8 +57,10 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): parent_entity, component_list ) - finally: - session.close() + + except Exception: + session.reset() + raise def get_instance_entities(self, instance, context): parent_entity = None @@ -224,13 +216,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): self.log.info("Setting task status to \"{}\"".format(status_name)) task_entity["status"] = status - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _fill_component_locations(self, session, component_list): components_by_location_name = collections.defaultdict(list) @@ -533,13 +519,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session.delete(member) del(member) - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() # Reset members in memory if "members" in component_entity.keys(): @@ -655,13 +635,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ) else: # Commit changes. - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _create_components(self, session, asset_versions_data_by_id): for item in asset_versions_data_by_id.values(): From 20ad9dc7277671ef273a26c04f7e4c909397cffd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 24 Nov 2022 15:50:06 +0800 Subject: [PATCH 2207/2550] create and publish bb geometry --- .../maya/plugins/create/create_proxy_abc.py | 6 +- .../maya/plugins/publish/extract_proxy_abc.py | 59 +++++++++++-------- 2 files changed, 34 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 57978cb4d9..1ef0529a81 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -25,17 +25,13 @@ class CreateProxyAlembic(plugin.Creator): self.data["writeColorSets"] = self.write_color_sets # Vertex colors with the geometry. self.data["writeFaceSets"] = self.write_face_sets - # Include parent groups - self.data["includeParentHierarchy"] = False # only nodes which are visible self.data["visibleOnly"] = False # Default to exporting world-space self.data["worldSpace"] = True - # Creating a single bounding box per shape selected - self.data["single"] = False # remove the bbBox after publish - #self.data["removeBoundingBoxAfterPublish"] = False + self.data["removeBoundingBoxAfterPublish"] = False # name suffix for the bounding box self.data["nameSuffix"] = "_BBox" diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index ee38979577..7a2c91535f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -20,10 +20,7 @@ class ExtractProxyAlembic(publish.Extractor): families = ["proxyAbc"] def process(self, instance): - - nodes, roots = self.get_members_and_roots(instance) - self.log.info("nodes:{}".format(nodes)) - self.log.info("roots:{}".format(roots)) + name_suffix = instance.data.get("nameSuffix") # Collect the start and end including handles start = float(instance.data.get("frameStartHandle", 1)) end = float(instance.data.get("frameEndHandle", 1)) @@ -41,6 +38,11 @@ class ExtractProxyAlembic(publish.Extractor): filename = "{name}.abc".format(**instance.data) path = os.path.join(dirname, filename) + proxy_root = self.create_proxy_geometry(instance, + name_suffix, + start, + end) + options = { "step": instance.data.get("step", 1.0), "attr": attrs, @@ -51,27 +53,17 @@ class ExtractProxyAlembic(publish.Extractor): "writeFaceSets": instance.data.get("writeFaceSets", False), "uvWrite": True, "selection": True, - "worldSpace": instance.data.get("worldSpace", True) + "worldSpace": instance.data.get("worldSpace", True), + "root": proxy_root } - if not instance.data.get("includeParentHierarchy", True): - options["root"] = roots - self.log.info("{}".format(options["root"])) - if int(cmds.about(version=True)) >= 2017: # Since Maya 2017 alembic supports multiple uv sets - write them. options["writeUVSets"] = True - if instance.data.get("visibleOnly", False): - nodes = list(iter_visible_nodes_in_range(nodes, - start=start, - end=end)) with suspended_refresh(): with maintained_selection(): - self.create_proxy_geometry(instance, - nodes, - start, - end) + cmds.select(proxy_root, hi=True, noExpand=True) extract_alembic(file=path, startFrame=start, endFrame=end, @@ -91,20 +83,35 @@ class ExtractProxyAlembic(publish.Extractor): instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) +#TODO: clean up the bounding box + remove_bb = instance.data.get("removeBoundingBoxAfterPublish") + if remove_bb: + for bbox in proxy_root: + bounding_box = cmds.listRelatives(bbox, parent=True) + cmds.delete(bounding_box) - def get_members_and_roots(self, instance): - return instance[:], instance.data.get("setMembers") - - def create_proxy_geometry(self, instance, node, start, end): - inst_selection = cmds.ls(node, long=True) - name_suffix = instance.data.get("nameSuffix") + def create_proxy_geometry(self, instance, name_suffix, start, end): + nodes = instance[:] + if instance.data.get("visibleOnly", False): + nodes = list(iter_visible_nodes_in_range(nodes, + start=start, + end=end)) + inst_selection = cmds.ls(nodes, long=True) + proxy_root = [] bbox = cmds.geomToBBox(inst_selection, - name=instance.name, nameSuffix=name_suffix, - single=instance.data.get("single", False), keepOriginal=True, + single=False, bakeAnimation=True, startTime=start, endTime=end) - return cmds.select(bbox, noExpand=True) + for b in bbox: + dep_node = cmds.ls(b, dag=True, shapes=False, + noIntermediate=True, sn=True) + for dep in dep_node: + if "Shape" in dep: + continue + proxy_root.append(dep) + self.log.debug("proxy_root: {}".format(proxy_root)) + return proxy_root From a0721ba2555f890873ceb9454f2ce3d00f544686 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 24 Nov 2022 16:04:09 +0800 Subject: [PATCH 2208/2550] create and publish bb geometry --- .../maya/plugins/create/create_proxy_abc.py | 1 - .../maya/plugins/publish/extract_proxy_abc.py | 2 +- .../defaults/project_settings/maya.json | 14 ++++---- .../schemas/schema_maya_create.json | 34 ++++++++++++++++--- 4 files changed, 39 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 1ef0529a81..4401f3c04f 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -14,7 +14,6 @@ class CreateProxyAlembic(plugin.Creator): write_color_sets = False write_face_sets = False - def __init__(self, *args, **kwargs): super(CreateProxyAlembic, self).__init__(*args, **kwargs) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index 7a2c91535f..f65626e915 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -83,7 +83,7 @@ class ExtractProxyAlembic(publish.Extractor): instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) -#TODO: clean up the bounding box + remove_bb = instance.data.get("removeBoundingBoxAfterPublish") if remove_bb: for bbox in proxy_root: diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index f4a9fdd9ed..78126283d0 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -149,6 +149,14 @@ "Main" ] }, + "CreateProxyAlembic": { + "enabled": true, + "write_color_sets": false, + "write_face_sets": false, + "defaults": [ + "Main" + ] + }, "CreateMultiverseUsd": { "enabled": true, "defaults": [ @@ -197,12 +205,6 @@ "Main" ] }, - "CreateProxyAlembic": { - "enabled": true, - "defaults": [ - "Main" - ] - }, "CreateRenderSetup": { "enabled": true, "defaults": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 8512736211..198b399e75 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -200,6 +200,36 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreateProxyAlembic", + "label": "Create Proxy Alembic", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, { "type": "schema_template", @@ -237,10 +267,6 @@ "key": "CreateMayaScene", "label": "Create Maya Scene" }, - { - "key": "CreateProxyAlembic", - "label": "Create Proxy Alembic" - }, { "key": "CreateRenderSetup", "label": "Create Render Setup" From 3325ee03306dcdc9a5de81f26c7c6d6e6dd16a0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 24 Nov 2022 13:18:35 +0100 Subject: [PATCH 2209/2550] :art: change how the instances are deleted and instance look --- openpype/hosts/houdini/api/plugin.py | 31 ++++++++++++++----- .../houdini/plugins/create/create_hda.py | 1 + 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index b5f79838d1..a1c10cd18b 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -43,7 +43,7 @@ class Creator(LegacyCreator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - self.nodes = list() + self.nodes = [] def process(self): """This is the base functionality to create instances in Houdini @@ -181,6 +181,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): instance_node = self.create_instance_node( subset_name, "/out", node_type) + self.customize_node_look(instance_node) + instance_data["instance_node"] = instance_node.path() instance = CreatedInstance( self.family, @@ -245,15 +247,30 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): """ for instance in instances: instance_node = hou.node(instance.data.get("instance_node")) - to_delete = None - for parameter in instance_node.spareParms(): - if parameter.name() == "id" and \ - parameter.eval() == "pyblish.avalon.instance": - to_delete = parameter - instance_node.removeSpareParmTuple(to_delete) + if instance_node: + instance_node.destroy() + self._remove_instance_from_context(instance) def get_pre_create_attr_defs(self): return [ BoolDef("use_selection", label="Use selection") ] + + @staticmethod + def customize_node_look( + node, color=hou.Color((0.616, 0.871, 0.769)), + shape="chevron_down"): + """Set custom look for instance nodes. + + Args: + node (hou.Node): Node to set look. + color (hou.Color, Optional): Color of the node. + shape (str, Optional): Shape name of the node. + + Returns: + None + + """ + node.setUserData('nodeshape', shape) + node.setColor(color) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 590c8f97fd..4bed83c2e9 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -70,6 +70,7 @@ class CreateHDA(plugin.HoudiniCreator): hda_node = to_hda hda_node.setName(node_name) + self.customize_node_look(hda_node) return hda_node def create(self, subset_name, instance_data, pre_create_data): From d65eadb9b76f2f9bab0806adfcc83849c09328d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 24 Nov 2022 13:23:12 +0100 Subject: [PATCH 2210/2550] :bug: fix function call in argument --- openpype/hosts/houdini/api/plugin.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index a1c10cd18b..e15e27c83f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -259,7 +259,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): @staticmethod def customize_node_look( - node, color=hou.Color((0.616, 0.871, 0.769)), + node, color=None, shape="chevron_down"): """Set custom look for instance nodes. @@ -272,5 +272,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): None """ + if not color: + color = hou.Color((0.616, 0.871, 0.769)) node.setUserData('nodeshape', shape) node.setColor(color) From b91c3f5630f9e21db6361452a3fa0b732d3a759f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:20:14 +0100 Subject: [PATCH 2211/2550] celaction rename hook --- .../hooks/{pre_celaction_registers.py => pre_celaction_setup.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/celaction/hooks/{pre_celaction_registers.py => pre_celaction_setup.py} (100%) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py similarity index 100% rename from openpype/hosts/celaction/hooks/pre_celaction_registers.py rename to openpype/hosts/celaction/hooks/pre_celaction_setup.py From 0f4c4ea6cbaac1870aa1b379d86b50ace6053582 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:21:40 +0100 Subject: [PATCH 2212/2550] default launcher --- openpype/settings/defaults/system_settings/applications.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 03499a8567..7f375a0a20 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1273,7 +1273,7 @@ "variant_label": "Local", "use_python_2": false, "executables": { - "windows": [], + "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], "darwin": [], "linux": [] }, From 609beaa75abcdf7c058b7b14deac0f6997d18b12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Thu, 24 Nov 2022 14:38:54 +0100 Subject: [PATCH 2213/2550] :bug: add all connections if file nodes are not connected using their "primary data" node, `listHistory` was ignoring them --- openpype/hosts/maya/plugins/publish/collect_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 157be5717b..e1adffaaaf 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -403,13 +403,13 @@ class CollectLook(pyblish.api.InstancePlugin): # history = cmds.listHistory(look_sets) history = [] for material in materials: - history.extend(cmds.listHistory(material)) + history.extend(cmds.listHistory(material, ac=True)) # handle VrayPluginNodeMtl node - see #1397 vray_plugin_nodes = cmds.ls( history, type="VRayPluginNodeMtl", long=True) for vray_node in vray_plugin_nodes: - history.extend(cmds.listHistory(vray_node)) + history.extend(cmds.listHistory(vray_node, ac=True)) # handling render attribute sets render_set_types = [ From 1b882cb7caddf5921b5e12a921f0b7c6182346b4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:47:44 +0100 Subject: [PATCH 2214/2550] celaction: recognize hook --- openpype/hosts/celaction/addon.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py index c6d30935a1..9158010011 100644 --- a/openpype/hosts/celaction/addon.py +++ b/openpype/hosts/celaction/addon.py @@ -11,6 +11,13 @@ class CelactionAddon(OpenPypeModule, IHostAddon): def initialize(self, module_settings): self.enabled = True + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(CELACTION_ROOT_DIR, "hooks") + ] + def add_implementation_envs(self, env, _app): # Set default values if are not already set via settings defaults = { From 1f38d061ce64729becdbd5c79abe1c3dfb30f2ca Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:48:15 +0100 Subject: [PATCH 2215/2550] celaciton: resolving cli to hook --- .../celaction/hooks/pre_celaction_setup.py | 31 +++++++------------ 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 84ac3d130a..cda268d669 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,8 +1,13 @@ import os import shutil +import subprocess import winreg -from openpype.lib import PreLaunchHook -from openpype.hosts.celaction import api as celaction +from openpype.lib import PreLaunchHook, get_openpype_execute_args +from openpype.hosts.celaction import api as caapi + +CELACTION_API_DIR = os.path.dirname( + os.path.abspath(caapi.__file__) +) class CelactionPrelaunchHook(PreLaunchHook): @@ -19,13 +24,6 @@ class CelactionPrelaunchHook(PreLaunchHook): if workfile_path: self.launch_context.launch_args.append(workfile_path) - project_name = self.data["project_name"] - asset_name = self.data["asset_name"] - task_name = self.data["task_name"] - - # get publish version of celaction - app = "celaction_publish" - # setting output parameters path_user_settings = "\\".join([ "Software", "CelAction", "CelAction2D", "User Settings" @@ -36,29 +34,24 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) - openpype_executable = os.getenv("OPENPYPE_EXECUTABLE") + path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") + subproces_args = get_openpype_execute_args("run", path_to_cli) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - openpype_executable + subprocess.list2cmdline(subproces_args) ) parameters = [ - "launch", - f"--app {app}", - f"--project {project_name}", - f"--asset {asset_name}", - f"--task {task_name}", "--currentFile \\\"\"*SCENE*\"\\\"", "--chunk 10", "--frameStart *START*", "--frameEnd *END*", "--resolutionWidth *X*", "--resolutionHeight *Y*", - # "--programDir \"'*PROGPATH*'\"" ] winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, @@ -109,9 +102,7 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - openpype_celaction_dir = os.path.dirname(os.path.dirname( - os.path.abspath(celaction.__file__) - )) + openpype_celaction_dir = os.path.dirname(CELACTION_API_DIR) template_path = os.path.join( openpype_celaction_dir, "resources", From 54bb955d8b914085582ca6061a84a6c003743982 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:57:44 +0100 Subject: [PATCH 2216/2550] celaction: improving setup hook --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index cda268d669..76a2e43452 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -42,10 +42,10 @@ class CelactionPrelaunchHook(PreLaunchHook): "SubmitAppTitle", 0, winreg.REG_SZ, - subprocess.list2cmdline(subproces_args) + subproces_args.pop(0) ) - parameters = [ + parameters = subproces_args + [ "--currentFile \\\"\"*SCENE*\"\\\"", "--chunk 10", "--frameStart *START*", From 00bbd4ebe1afbcdcd15668af7611947aac4ecc5c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:09:34 +0100 Subject: [PATCH 2217/2550] celation executable fix --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 76a2e43452..aa04c8c088 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -36,13 +36,14 @@ class CelactionPrelaunchHook(PreLaunchHook): path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) + openpype_executables = subproces_args.pop(0) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - subproces_args.pop(0) + openpype_executables ) parameters = subproces_args + [ From a3c55730c164d5b03444c7dcad46d6086ebf0c16 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:20:44 +0100 Subject: [PATCH 2218/2550] celaction removing unused code --- openpype/hosts/celaction/api/cli.py | 30 +++++-------------- .../celaction/hooks/pre_celaction_setup.py | 1 + 2 files changed, 9 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 88fc11cafb..7b2542ed06 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -1,6 +1,5 @@ import os import sys -import copy import argparse import pyblish.api @@ -13,10 +12,9 @@ from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins -log = Logger.get_logger("Celaction_cli_publisher") - -publish_host = "celaction" +log = Logger.get_logger("celaction") +PUBLISH_HOST = "celaction" HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") @@ -46,28 +44,16 @@ def cli(): celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ -def _prepare_publish_environments(): - """Prepares environments based on request data.""" - env = copy.deepcopy(os.environ) +# def _prepare_publish_environments(): +# """Prepares environments based on request data.""" +# env = copy.deepcopy(os.environ) - project_name = os.getenv("AVALON_PROJECT") - asset_name = os.getenv("AVALON_ASSET") - - env["AVALON_PROJECT"] = project_name - env["AVALON_ASSET"] = asset_name - env["AVALON_TASK"] = os.getenv("AVALON_TASK") - env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR") - env["AVALON_APP"] = f"hosts.{publish_host}" - env["AVALON_APP_NAME"] = "celaction/local" - - env["PYBLISH_HOSTS"] = publish_host - - os.environ.update(env) +# os.environ.update(env) def main(): # prepare all environments - _prepare_publish_environments() + # _prepare_publish_environments() # Registers pype's Global pyblish plugins install_openpype_plugins() @@ -76,7 +62,7 @@ def main(): log.info(f"Registering path: {PUBLISH_PATH}") pyblish.api.register_plugin_path(PUBLISH_PATH) - pyblish.api.register_host(publish_host) + pyblish.api.register_host(PUBLISH_HOST) return host_tools.show_publish() diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index aa04c8c088..5662f7f38f 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -54,6 +54,7 @@ class CelactionPrelaunchHook(PreLaunchHook): "--resolutionWidth *X*", "--resolutionHeight *Y*", ] + winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, " ".join(parameters) From 3aa578047997641b304393ec613701e83c8876fe Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:31:59 +0100 Subject: [PATCH 2219/2550] celaction: clearing old code --- openpype/hosts/celaction/api/cli.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 7b2542ed06..e00a50cbec 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -44,17 +44,7 @@ def cli(): celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ -# def _prepare_publish_environments(): -# """Prepares environments based on request data.""" -# env = copy.deepcopy(os.environ) - -# os.environ.update(env) - - def main(): - # prepare all environments - # _prepare_publish_environments() - # Registers pype's Global pyblish plugins install_openpype_plugins() From 2f80dcc950b88581bf0b7e2c4daa37fef7f80802 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:32:36 +0100 Subject: [PATCH 2220/2550] celaction: fixing scene env and adding chunk --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 5662f7f38f..b14fb12797 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -47,8 +47,8 @@ class CelactionPrelaunchHook(PreLaunchHook): ) parameters = subproces_args + [ - "--currentFile \\\"\"*SCENE*\"\\\"", - "--chunk 10", + "--currentFile *SCENE*", + "--chunk *CHUNK*", "--frameStart *START*", "--frameEnd *END*", "--resolutionWidth *X*", From bdda909f1f319c76562d0f62881ca88b09780912 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:41:26 +0100 Subject: [PATCH 2221/2550] celaction icon --- openpype/resources/app_icons/celaction.png | Bin 0 -> 4012 bytes .../resources/app_icons/celaction_local.png | Bin 40783 -> 0 bytes .../resources/app_icons/celaction_remotel.png | Bin 36400 -> 0 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 openpype/resources/app_icons/celaction.png delete mode 100644 openpype/resources/app_icons/celaction_local.png delete mode 100644 openpype/resources/app_icons/celaction_remotel.png diff --git a/openpype/resources/app_icons/celaction.png b/openpype/resources/app_icons/celaction.png new file mode 100644 index 0000000000000000000000000000000000000000..86ac092365d009052c4e0351732ad1dd067329de GIT binary patch literal 4012 zcmc(ic|26@`^WELWY5?hQj8@eWr-Gr8Cyu|$r3_CvW#sQV@t-M>_SK(WGgL}Qg$=4 zmc}mq$dW8QWo#4EFwFPVZ~5>0@9+D^dEMuJU!VIr*SXH?T=#V+{(ZqzkWY#a03c{? zW^4rj47!B@o;}dugn!}z4ZHzn4nY9$i~hb~AU9tUg4_giOB3#|+>!_doj6Y2jX%i5 zo?sA6@WSCiMdb%G2ni4%Wb7W~hQ|gFusDAp$}6V;alYT&2p15J$9j4Zz;mGwry;uc z55>E?{+7xTuzv2~I75gRvhe=4SmQhhp{{s$;2gmIE$9E^92?~3>k3KnppH_T24SAx z*v=L2kM;Kil>H0w5aa&u^}%>Icd)^Z{qestBb=X~yFUSJez-@5*q+}q8!W-s9Xwec zQidW5H8(b}3CpKVTKXPfgm zWlL6rgkbP9oY;UH>}a%8?2!l&c!mQ9mk5PBbG1#y=Af9btUxhGyP~7T?6R&-x{ViY zsduSw>BACz=X0}BpYB~IOB=_IzhKKnvb;LZ1J)f;Ic(37G9$<{yRlS8?76x6ZlPo))XeR|A5ESt^l}N=n1Ps}$B(WJm#YpPA zwltjCPDmj#=NTew$?ooRiM6jvMVI_nYUTPS1t%^y&6~TG9qN_CCHf+{t=LaRTdhY$ z{~sH~FR9MZfwl3K;uo@e)eH;^p=?rhdB_N6bu&udFSL z1%4H9=@RtWeSsA3@+OkCeobW%oxDw^RLbvYk@lT<&fKqR(lD`dg_zKVjM;_q*F6rI z9s{lSdOADGYjawG{&K$C+qg5~p8m~|RqWTw?5%D8UuQq^%<;iAV%5gh*=O@3JfCux zGlSwIfL52@rs^@fQ}=JseX1|g7ljzWqlaQ3V{SG1@pajjOTMv4q%vbmJmyP;e< z$gjLB>|m1<1tIfMtu#7S>ZTHHX1aJ~pMx=&8>n8uMTRL5j^KAKqr$xN9GT*?jeBAY{n_kI48e7?y9&*5Cs002%UK!p*#VXr zCoCPNjT(A;o|4s|tjs=Cz(AS{tT?HiV)nCXS}i#X@d!}ScwlY~Gy551xj~UyGCT8R zm^Gv1a<0;H?E#*yMHY%`(<6y#yybT=0O$8n(~ii2@*Y~jM=#=aUYLfWM6B*-^~ck! zw#Z!M4gy$fcqR2uPZ8cp+sF13{IFULoU$uneCgG$3e6$<^>XA0&6$(5&9>DG9nlOR z(!tWdE}a8-10!hE(gj9#)K1cZ>5+Q48KqM;ZIu~0dRLh$l=SYy3V-AAo%)faR!@5q z(2=5|-y^C9yJk#R!10jo9~@r>O_0(dnZ^)e$Z1E(HV(-KoxiQ5(=xT$Emfz7PI(ZK<;g&DaDKI~FgZw%VVSgi%@!lye#W-0Y;v-QnE~f;f&U}8 zay^~v0>WBH^eqrlw^x@2@xm z>zg8-vO=XKEs~GbpLJaMFej-I)_k~fQ}$Y1=?t#;|i zzGPN_{y6XOmj~}3V>TUn)!$-KR;C~!A!639?C+(B_Noxot!xqNo9W!2IVBB7Uf*3)nP^ohzvh8sc)KAvlRqW`Tf^2d)s4wuDnG+n`thh)+Cre5ypgzLl= zqAX+U#W1-x@_-qwcP}-~Ne)yFzV5t1MtB{-+@hG9#R^H!4KClq60ZyQMjHu8tvMgQ zsw;kInlKw`oEybA_sxWi8HQf*`hM6zcM(F8mzs89Mx-b$xi@-d*M9muT|xO2r=?{)wgp@c+NV4% zi}7X6#MxMIIjHR|lwS(9d5XEI8t+*`-Cu8g*z8ApUgkZWk2Q7@x;nqsCMsmMSC6DG z(C_%}?%Ey+CV>RYolU{;`t^@J7}Nz(ZPa~aU2|gELyuJ_>4@OA%+cFA6_bb0&C}Uw zWimhN;Ry!eQ^_MNZcl?!rC)QGFI_=SWexFoinpgskQ`2LeveVkTS;X$eOi--qXoRD zRa026FXge9X~mG|v@%%QRpT?8ql%{Ya@pr-e<@c5&fqlnl$_|Xr56&;Mlfc73op;= z&@R$Hndb*TS6ju|9pE9U)Qq#RGW*+wORQa9!!mU@UlH9tA$Va3sLX9$6!6zyG4_rr z%wEI~lCa38`Oxm8f@27vdFmAZ_3iqTaH;Oa^?!2I&-%y2tKvFCLLr61@fj4Gn)+Fr zZEouR^`G9r-uB~g^!e|Ymh^4Ys;lgQlXv>(r|e4D>H1FL7T@b|9aa|fa8v!H0g{m2 z)ZG`z)LqG-^wPYj#cpiEl&A|iV%G%JJ)?ZiWR zV&jO-A?a&>GH<%oEC=L=Hp=3j_&;{$o+`Q_Ulo&D+uO6B!$qL+jlG26a`Jcc@z-c$ z?N2#&lS#Wq{~V%Ot#?(dC%B6&Ct{#(Zn$p_?W2f-{?;TYg86U@XLe=Jw36(qs|!bQs)Z=Yf3Y4sDfDM zGhWsH__g;>iP>7{h8h)##W!cYT?9i)rAMY&b|0b5wj+p~QuRsFt6H5_)U~fIjEAJz z&@vo%D)c1n^o{1FX2wFXWZgt%@}gc!Z6R7{zSfb(8{HCOkss>0Zs<1h__9E^B}KD- za`=i;9wzo-!9vumox(|SLc-!@_jf)&W!n8x9j{(f^A;rgwxQvau$NMvLQy|SZG zVx6Iz7A-3*3@+(Zlsg+fK^JZ~aZ0p|cWjRipjO&Cwk>1m`v3D;htG|R-@@MzTX)6I z?!-MbcoOi+?<@km@>>U#^9n#d=)ZJ#MysP}?R|X3%O~gJ3Ki@5!Ne#QOsU{{8>VLVfYI+sLKj%-l10-WB)&1Ppb7D`0j_IxU zj-eWF1}fip5v`vYGO+(pL7k3&c;Y3!4Epk1ny#Qc=pwB38XS}42)f#@vhDst=Uvy5gKiYDm`KL0@{yyLun_qMkS+(Qg2E_y&#XGj|H6_$CsE>P|Mf( z*1+QeTiqu+q`FhSXiXSs2BxuIem-Q=;XY#`Cyp7**{ZbrR$)7D*4P%kTROt1Xmby< zajQChSRB)6;PdwjNonXz)us2JE*bwU8xCVl|J0U@m;O-jS?!)_? zbH4lIo;z!?m>IhJsjjNtRkf>$R#B41LMKHBfk0TYG7@Sa5IE`i2NfCk2HyE@4EROj zE~)MQ-pSJ4%f!_JBx>$tY5|pXFtM^wvoJCDaT&G{1c4A3Y}B>gwG|ck&72%qO`gZF zdOJ7+vq2z15pQP`Gdl}+sHugOjiV6Facd_H)W%$hMvGgKUC~+G!rDg0*VW>^uadf% zubmm6IgN-gRM49rSir%;-303GVDIS0?=3|0pLO|x_s?Im(Lnz*#NAGa=Klz#t*8PO zcXG9Wa^;AyVr zmi%fGQvdZV;FA!IwY$4BKO38umlvzoTUIAmD>e>3K0Y>fPBufrD{qut!qEdH;&|DOxHsrxuvu&G(NIeEC60fw`reSVZPzqqS~iMx}l zx|5Ure``_2+R5F?&DzNsDlY!4H9jb#mW`vilb74;|IATTf6nOjLqbijSR#`@iQ(IGK4kSU9@>_gwS; zJD1~so%`$#4$i=nB`jQRJT1(nT%8=C|JgRb&HpM3yX616-ha(C|6gTcm-=7lvH`-d zJsbD`W$gd{3h9gu_=<$cSz{|q?b~EWGd0ZpUOGe zBBub>BaVnr0~q7>Vvk(HYLt* zM0E?EbRh&T1U)?*b=)1fT`l=lCw@b8kB|Fnu|U24PG@^Gjwv1xke`q}$r-URS}%ug zJ?G@ODcA7v?8BYQF#U&X*p)ZIk(meP+Uw=(0iFc|uf96K2i}{D}t(+GH8F(9+#+t**^s8E76%Ue!o=8$(rQid3M}-+|eqq8ip^20OD9(x)}t) z9NSI@rnTE!4&J4XTHM11E@)^o{@StwmX|{y4LEadi4$?}k9)1GjbEJO8q@l3T0TIq zH|*|OVBp)N@V@Y6ZDRBC{T9!XEn3uAmD6aG=0-of5)U}ig>=qs3Y3N{>!u_qXG?J; zZx?KebGMX)F4V8ewAx1wJD(mIyuCuq1Rt;0kF+{((_0^J_ZL0p47O*hjjE~q)CcA) z^l4q!9rd^Y1qkq1AJAY|C!acMxoh}9n3Y5x+(l3WvsV^<5=Ge`|JYSuAgG}2_>vf9 zxTFsyeX!>yffFAJc6GC!h`5Sw5eRe`J?v@Sft~+dTzp(TsQcQ+ZnzmO+)ACwi_(DAQ`F9_n6W6 zf;*JNWbUtIXyi~n4(hYM2cc*<==@^azG(9~U-)g6cR0bn&)jG}jE5iN85fc0vYD>c z!c^7NV-R?=dDOi9>1X3{ipy6G+;R-$}7xaV;pkI|Zj&oJ$4ACJz1=rZ}Ruga4(@0M%^_KsAhi7 z^{^uyIJL*aAooSZYhP7w9Psc$6!PZNs%XiY)X3wSFpw4&Mz|+hRvtP zZr-m>!`A}Il9Rvp0M*%2IV*gK4Dvih?t1FLIZP+kP@fjkq#D-GID$-n>7L}*tac7%jv{yQbl!{~^A@A^^;z|Z2058576mvvdn=+I3zNgz`NuyBS3k$Ho;8#y82{!NxZ~K_ z4jdq{YAXKZ`{o`WNDt2T-lXE?itA*DB!Ckh%%=)(<+?)4nldjy_k$vjr)mY_w1uZi zC;Uics{ykhJ8I36D)aanP5X>72nQ)L_O#0|>|?$2s+iX^nsNAD+W+)EBo=t!$Y3)% z>Ug}UzB}#1S3a2~B00wYeZi7S2^__@&g8GLjgnn3tCt@y8(Y2F)+vrQQkCDCm=4yM z(d~G3-sRr?nd{i+*vmHl;L$M;aQw zKm-NtCI{fUl-M*z030uNb-{z=2l_4JiQ#cpC)X)VmsD3a&2SV)CR*CUTeRQd0vI^W zn;Cll4%tTUj%$B&fJl_PYE>vZXxDk=$~v#wk1y7N5W(Z{E5T&Y`h@sR>rv-Z;8j0c z;E#Y^d84y08{v+rrphx+SSPn8LIf4@>Tbnj+Ej}1*O$guF|CBl;_KbU{2NjH1E@ofn z+s-MZx(m5~NL1i&$f!t@wF@>&iL(P^%T)!Pgi?`%DW#;O3dP}Q>E$9P!Pqsd25k)0 z7V&=$h~e%u-sCjcSHrvxPY+jKc@GCxi$RauL4W#)NgAx$zuYc#KHYV;K3yk1Ij5nT zI@C$6H`tnkra8v^EiozYR3!-_B+ofGIZ6F)*F_%9u0BrsOSO6)y$l^Hcd*2S@q-0r zethY3Pxz$3^}n zUlkxk3$x_;O&4W@Wm>zM|?hC2f8?EVhE!_o0@=yXR+-#SmI zXP(25*Z#Xm-9a|1r9!6^gC|XoE#alKA1IMTn4M`ocIhNoNwnG`uoPHYN#m<#fFPDT{tl2uV4vh>EXy0k!AQ*yT*^k{V~8MVt9 z8(;M5Z}g`GzM$9GRyh0Dqn-EiBBVe3pRJETiH>wZaKiWIpw!KT9?nhIRZ z?fxT;JU^!Skq6_mmU+o;74b1|=9?DKpwD)$6~3AIuV4b?pQ*tY8_TK3^m$MJibXCm zh8De+$44VA+Qc3oKabaeWrS9`2fCV{Q-r%6X_38osCF!~-T+_#;#M>0Xo`;ZCglb3 zOxH%sewAC^OGP0PY=au2o>?#3*7wpk&5w9L-dSVZ9SncwY}3pO$rEZ74d;=7P>9bCPXU z6$+(y7mIFvRB^TiIn%L-z1lQ$lH7`DqK>{+>hk6H#u=n%AI~)j4%3VhRu=wBuXeF~ zmeF|PUdJ>$#lK+r8&pKTPC*hXjm=#td>XAhUqnpe!I$|+>}g|&GD@6%-; z)U{~53M?~ZEj`>TBv^cSAo92_($eg<{d^)0H#AZ6>wv7XwezmE^QTzclu+#$JBY*b zJZ|Oug7Hpa#eP!gife+Mlhg{PHY_G9obgSAwOd+MqUaH=I)wG^) zo{N>pKjMVV$C@F83^f>IuK)UPJpV*PSHl2^D89_ezFw7`dm7cQL6=rRTR;Ugsb>{; zb=maWWsJ^hv=^zqluoZFOB5)Cztqa?f#Y2N-OhC`1?-4ZxxTd=TiHaaW#$C!(tPbw z_8^(uHKP1g10W-(EpL<|8dr*AWh`sUPyhhOzZzL%EmFWTFWCnI&zvd#gCE|Y=b>*< zXg0fw44q2}@Vn7(Qblg?1AGjavjH5>MLc> zV<6L8nEoQaSRM4R|6uyt!JbKFz5TznyXepMs8F?3ab92xO7ivj zajeBh@DStD2Ghps(`imTKh<>?O5AyTT%0a4O;d^Om#cRvv*p#xHxphF^cjyv%s(+< zQmb1XN|YHua2GzR8)bESy2*R8A$J1s@tqwVmNgfIA@6?U)uLC&Ifa9LBVC3AGSs21 z0AsV;sn5jg)gltGc$^fzpe`YHu*{>+pukcATPM{hFGAA0A717PMacmwlwHquJ%r+Q zXzlwoRA2{;gJAQoLhNHLa={7*bGJrD8Txoh?dJBN1_mhm{)1viUR zQ#ei(^v8>KplMQ2b2eJP!$tFCKvltVO}rYA2u4s8CY#ZDefgHm?%zqCbn?*|--P8v z<@PzMbO2@1=|4)JCyQLxY=Nb4dY)A#_~2T9>>^-B!}pQT+tov!!7Sr3TMV1rHTMS6 zD{l(VOPkI{ZE8 zudmt+abzK~*EugD;4%tEQ5K)UGIF2AHt7~LGwSRBW7~k#63NO{8)w5`8tzO0m64%R z%=4favVWvLyaB5{;enBstRLQ1adUF+Swm$zwAh@1gPas_pt9_>9N^>Q%S?s^D_ck# zK3n1O)ps4H|4+KE^gJA$N#cn}cvgORe(1AgZ2cJ8zh-SIQn-44tOc_-P);6#`<^}I zy(k@4Cf848gKk8g+1JH6JwK6ZI=5G@4Rs?I2FpMm zbun2y>^GN<#F-*I9z=CFKd8_EpxvIXJe|LC-iJFSQgqSH$;X39sUbOpq}Aa*=JSiH z5+pNoKvVH6s#p@!$n#7-;S&PKg3bPL5Js|(PPq|MVr2ru_U-FvOv1)b-*w=IXQ*Sd655Rzd zmvm9=6E)7ZU#jsENZnzhq#bUjmu=dLW?nQ{lSvyv@_* z+LTqZ!=78jr&`qrhr4GKT2yEl$=R{Ws&zMfx}Cej9hu5~0W-(LPK%8Fs_@bDIeFq@ zh0~(ZY(5$uom0c;z=aELAoDZx&esp0wKI;~iH5%1OCS2JQ9ZA=4wkV6%QsAFK~m^) zefLU_0H~p)miHnBCCdt69%S;leMNaDZVOK>Hj(b_NjI6aDMVUM4l z=0649?iP~`J|B0ol@er{%7endwdhw7nnTOS1Ry-x&70PvmU6$J*-Pg7R)BOz@FMc4 zG=z_qEVv6#E|+^ZW+vfr;YepZ_bA?NKse=ee2!O*voiEuC5SW^AtOwNr!nji_dJDx zxrNOD&TNBxya_rNpSb`kR3Hp;k}4PZ3A}M&#^fQ?d{dvf0Mm9%EtozIdh)vhBH3~~ zz*)5kstBLYTN~_KJJH=UJ^J;MEwR&Fnq+4l5CfPq;LV3D_Q|%TCAnr{YCJss9g!{f z(uP#;l-Q5)h*!DpDFWwXZ`;)Jv^u6kWf<)SvEV}+2dvmsms*}j z%>yRDDgxX|zi#zex)`PFA&I-awPO@KL_;&2TP8jLp8{w%s(t{1*1Zw;cH$%hweDR; z^CC8slVe_k&8Xfw`EF0EbI;e;H?AB26;Je)5+q6+=7f5QsmJHs|33h9bEbXi*^YDu ze)K!EGuqGYB?A^cRDh`|D-==27B1Yi9)8zr5?Vtrz=Cp8OAqzxoNtYv0~|Q;(v&&$ zmLY|f*7G2Oe%n>Id;4lKhqlf_P8aJ+=3M)5rHQ41#Xb_M6|~7Vl(jc7C*r8T0Layv z5otXEBFn$LHmyxffAhZ_0zgWVr2btd>kC~MjzVJ4RX{EC5kJNT_haW=+W?DBU0>56 z5NV2$;_LmybdeZ>PEdkYq#0vdv0zJ;|3?;eqtU;*<6lSmVLjVmcw1{ZNP_j=;>>vu zAf=7}Gek4W&7)s51)e&4hPn$76Q1YWliRzcwF*b17=2FB=2jce5J3LCu?>COD~-Z@ z+z#bSrf?$hj|-klTZjhSU6`}={0u6OP-tGs zj?8QpB^v-t;P>+d03?Uv<8;E>MHv^X#El|_#P@bzruYkUacwLCA-zS60;QUKZPuBX^7SJf!x2y4jZ$zlnL_xv@z*Zu_H3WRcVLs_Vfv65+j zN*w#5{iis|;v3rrjD1K@gu{V{p;Ap4L$2`J9Y#P*fluV`;!t8N@u1YJl;FyfR;j|q zq54BBrZhy_LZyv!TX?~-R*X|ogIIol?90~j>JD9te~kh!U>jnxyuyA!W5R?YL-nUN z3NrG-cZa`%&QtnjUwRHyJE?_jjc0nd~MZ*_;+J*<3a$)rNdPiv^pe&z?ak zXXhb8Dp$0OpuXi_L$YeIHch21qef}dXUNbqseZOOIw_?&YW3-e3u8m=QD8Ie{leqj z@v@pTqtz|eix(}5+3$SvwcC9g^nezKUW0AA-)aBL2ijvWIUF55Why&&5TNmA z4grGRaqm~D5quNWMq%-aDV(q9^d9)MgxfO`WR z*1~$C$kC_3TbnDuU2o?=XksYv(ug5yK-zJH;U&N0!&h00*$1DuZ$gkT1T#zana1MS zKgiv2y|sS0y*T>hcjN+am`)4-c7HwynYs}er)&U~zT?)7Ts9h0u&mH*v4#@29=1%O}3zBwq$#(TB$j7QIaO0W$_S51ItlVZ6bd@F$1Y&OgAkv(KJK#DwdQUCxrk~@d(IfobBeSSdhkgv3M~}DNxBwNo|F! zRVdgYTv53vOP7a#A3zGu!C>`7;oB0Fv`DJJc?uxu6#j+@?KVXVH}Ed}sUKA6R{xe< z{P@S#c=m%($HlDP18vpFGcV~lk;At{`kLoT;IIB~j+Q&(;f*}!9ez~lhRNcK@AX-_ z5U6Xd--!%nK{6J8-pT{q0f z2=1Xage9EQr2VSa2^56r@Gx{KzK+^EAQ|{)M)z~ZN6E-dBg-Sz`1;2A9GWL<89VE` zo5CK(88MbIS-nSqv4QxsjP{DlVuJn^fyvX@+eztRH2{PA z6!jH72}G_9UC5k*fT1HWdS7Egf$T?+rc4E}ntJmHmvsB5UiH1yR(f(DXF^RKW z(qf8jMo6r3R>6o&h;mthGyb7)oD9VEd9%a|n=aI<|8!t{Hq(VC@Q^oe1VjxgUUtau z%UpKMPgu{@`C&UR15GqxRT|mOyT#)+y_5}PP+wTp5ASa>WfxM_hzvHH*dV+oWX`*X zvw3y)5<;RO)2HRN{y?s`E^>%9L6_h1d_bb>b7*Q_gk8ZkR&56sZ09{9RTn9@#G6av zP+T?>Xwj6rEH(XO@IJbOwiCf`Q4ry_#3I|f-L%$KS%(sxV6DlkR!KdKNpOGg&A=D! zgeV=8*C_#mLOtBS(dJYSuWcsEmmv9-9g(>MTIn4|`#k08U>nbbH~oKgYk4Yi2-V7q zl&?S5hqd*+?X%o$Ra zJkzYz;Sq#E<%>gHGv4W_Np3>shG5O2VN^rm*9^TT`XjXsGDkfk`CA*7}R< z&fywckKyDE8b}T!=5T~qMb0Q&=fWZo{)HVs=XI_p2nbG{G`oWE&sXTQE9P7essw=s5CWN%UILQ6gYYIt6#rLHR9G$x!>31X$NuSM?Py z=3<-+?Gtp{(`F0yz?y_r^S;ONvq3-a7UV#|)v{7(H+t`vHA*50DYIt!Mi-qHvc_J! z4cTp_^`Tc0@^$HRCJKHA(XeF>vTt^zJ8Eu~8`v?HZj_$k9=Tu;Ci52Wi(&fH-6IgkFo@F{Y5;H2D?D?@!WOu zTJ-qPnT*PgwV;K7)Da0?{Zmtx^&9oWFMeY>q8A})8{kio3X&~vkDzr~Q+s^M%sdmV zzx$War>1U1iE+-3MWwZcJxP!a?TQmrJZzAa*r;zBUFtR&LP$^x#ly|ZL9R9O5hJOm|H zI4+NV8@?Yli5sXri;8lD?T@rfjk@ZGl;YYM8j+xv82XIbYD#e0w2wT~4KClDgOah}(IJsvPqpgOR*<%0J-V9_?8i2{ zQK$}r!*5;~C|``=9sWtf%@WpxnLh+IS()ki^H$-4!nZDbq%PQMX1ipM8U*y3NP0Gh^?1; z7&2pWNf05<;W8j`naLG;)HdnRZ4zZ1d92Qxf9Q5cU9Ra1c!h%ziq!L2T@1(g^>glT z*M^E4R(rI*`pvpq-!F?U#uKOQuH5LM%$(0oR9RnREFgsxK;FOUx}i6)UAA7DmS*9Q_q-tZ`rPbwC&CRF8O1M~$VyUb|VoKAYIfr51vGP3< zDF!KGqzfn{$wu8<>knb&%8)5`!&fPY3BQ*Kd+oFfuDSKm9G(|kq z1A85k*zAQ0LZrPy7A>8N=-T3*Ly@6TCn<{ihS9n3_|l`Ml3zf zb$OMaFKEJmXUTQR=DI9rv%BHjRpiZv>Xk|OKB7-ofRyoinFXac)#LVj1yfhtvRKB6 zu7SxIeFo;a%pZ~bv38rj+%M;2LTgLq*@W=5#|*c~x%Cmb5L7p;D5wZ8t7c})aTYjf zDwb;xgm?ACi-2GI=_ph-=v6AXj#J&c;zw=Bjbfud3n4a$}ufn zt*OdqkT^UFkDnZ+J8sJCL|Av{W;^eg@OzQ*nwQHDs9hH}pX*KLAl$-D6GD(J~eBu~~PULeNJnAN3yN6Mrs z^;|qU#Os>kI@qT#Bv`9n0k{_LKvxsQ7thJCG!iS7$wTMF|iFu^Xn0Nvj#6}g86uR$2V6#Fof=4 zz0V}Unu?>x>eJz$3B7RBPC&*GYVsO;Tx9F)vTzz1oIvaH)wTW*YE4L|ad^WU4k+Io z=w%NI(2uHZp=BUccc!IhPo8Z|3x4cNvV{hTRuH>`h&> zQ-K>3ZPFUq+&n*Jv!{?~j9o z&ea`%y0mUk2#)X#*4;}NB12t8xdwQ#w^}z+|1%dtO9hz*Z1g^?V|-|xQ3$BED-eou%B1GBsrMQThq^68=vCc~&C|0D{wnQ7PP z9f;^90)Z>OJ$*|+9jLr+0Mgk}t9nJ5GJ7$D*}jV9bdr%bw0xel>)MuH zXc0`l`h*`ZpUwOQYbQ)d81w?mGdxTRg+IkXrOLys5H?*#`q&Vr*i4plZ?LG5bI2j1PdCa3-3{fAZ_GBj9{@kcG;=+0woE1^&*AT2*S6Q#> z+pWuHRHo_Txc19ut;$Z*m$1{27UZ8VqP1^TIr5vMh2*#!3zEU5i9g7Wx|t9nGvhD# zA&zq^lES3GtH&o;8f?EF!VXI7Bfn;J&8GPTCa$l2eqkm(a}BM~RK<=8zX0#kcachB z?K5ZW&Lt4R$#Qrz>K+~+W|nV!h9UF^L~r5@;BsAt6Uo8f`ZzX~sR-f-Giz!{IfqlHP0`fGjgI>j z&VlpP@Z;miy5tQ+Q2G3KnqVo5Z$|se_Hth#l4kHa;=*O$e%Jm-PS&U-G*~jAXM-m? z-IQE@N;o+6&!~d8ix^2QoMh#6fT!b!9wo?9Xj}6rFk&Axn#M~C>$VC!jWxP9hg{=) zC>bG7Ta)^ZQ9{1K)69A(E760{ma_FoM<#oEWB$kEi{Bq>Du#s}4mdeq7HG|_D@47f zMK+s%cYvLqh^Bi`vx%BN-Scv!U0>uk6JqBNL~)F?@UbJ;`T;o%LKFBI%beJ^zH2yZtw7N?Z`pA<1lLsB)(U2ENX`n%AsoT5 z>=BYcYe+BkS6Gl1LJpyUAMU=qu4P1OX{C@Sz9KTj7S?iFfXF8=_h*T(!WaudW5H}Oj5Ld`;Jyn>4*mlAe8+ihK#(o<(&5bCC3aUBd$B3pFMR`LL2KAxXdGEM z6^$$_L_Pu+8X_+(??Gpfo~!*w*yq^gX*Gs+U2aZDS7Zgu7(s}X#iE#6|L>y!!Fy8+ z?aGrEqW+7qQkP^ztoL7P8@bL$tX!JK%o}l2#W{NLx??u(Zm+oJ)T?r2TaO$-(8vUI z_)s`=T4dpm5K<(ZnFH;lrfM-z_m~8Rj3qm9k~>@YSaqwdiwGFH*C8SE`NpPs*EaU$ z6^H|cNU%r72f+*V*d&+^T=GMKnCpGWEa?@jiyCvvJldz8$$_8b8|=j7!abXwY=U0L z3st+`O_5frE5$|<5|XiLZK3$hl_c#1**I<#-y|^{O}Qr#goa)Lk;!s{Q*n=HW}*uw zWWU%99j^wxr}y4NsvPu zLevpXwHB&}8A^wWJdF*+=-UzU3l`-!l#v=(Z&-{Eu|kpJxbubGuA}0ZFO^orrt+O= zbxA*JF+ZLNx=_5f#x0k%oU6ZGO0#pw5?r!Q|7U}AZ8HO^?UEnBXMI8Bj|#Otqb1k%F_KC$J9omro|Xdc0wI@%{HEak*fq#c=M2|f8w%JeET zF-YtpjyN#!{nFDR7n#=0M)0gHRz}KP0ag``=Y2+t=8sIYh&`!(+w;Fl9kv^9XBNDN z$Ac^X$PfoA`8Abjlt_}V8n7_%>$(w6AXTX_<5b9l5W^F+Q433nn?cfCauXZmG-N)Ao; z=EL!~d2n(~x=j|AX>Lzz!@I%;Us@BHChwk4G^naaLZ=hH+uwfspju#T2V-zCuZL`Q zO~q%~a6iCv)g>(YDreXn4=Nbep?&o zL++`7+YBl6RyB`#$U%Uf%(wO6T7v=`c9ELGe zwXa?*fgY`|7e3S~WQsnnsG*N8e`Eb!{;L8r#HuEt!Gt(l6?d93kx`8tKd=Y=P2;fx z*Wx{+i8rb#Nsw79s83)NXa`lq8ZkxLDhADvbGZGs(I3q@vPAbo5Bn{6!Ky$+oki~U zTlS+}!3Es;m9r9*WB1a$_D#(%yNJj?q>wke$aI-XZQ+_jHf8G!4R5;;Ixtzb%twt2 z-Tg^JYO8{_>VC5ZRwjv8bl2G9#b|%|-h6Q;qRBM#@gstFG@jrX!kywZ<{y#4*?%4^ z7My=-$MR#&Ls~YpX^CXMfj;X!fw6HivrDQ_AO$8i$ER}aL>w3pM|NjgSgFttm!~9j zovH%$S2#Dv_9fFT(yKS?cC)6w@nQd3h{XGUz>(yFJW7LQMxtx zyV(&_hyf%8mQmm3^g%4fg7}Xbo^OAupfi0=W@i3m=YxD-nrj>$7FQjOqx}-~v}WB{ zO{G!(T#|&^8LcbALfRJhyu{%07q@nmZ=I?ybuKef_3m~q$g$w!kF`QUifiGfmG*H{ z9SB!$-Yhz4caRcS5Ra>ghEmN=L_wfS!mp$x&WR5`B>R5RnS+!~2V)$sLzzM24#(Js zorAyhx}n#$V%_G4tD-eUUkc;!j}p6*3>>O*I}8JG=3XC7k9OjBWXt*yQ?*L0aV7ar z;)p$b!eGUaU6bkr+m%5dlOHz5l;B<`TF`)-e-%Yv&f5U7Oc$Jh?VCZ_k;V)zpK&I-|N7 zZDxmoS;-Ue2AT3*9?vttGCE&jc7gzH79_TIoupep&QUtdQ!!2a)-0X<8J6` zN}vUg1xWtvxm9B-%3&&`?Cx>u5+P58(JyXa3m@b9b>sD<4?teL_hlk%5Z@{2TIocF zgZyF9Tw|w{d3Gow)<%Aczpdimeh}o$EAZgVLX1kTFLdFZcv&OvV;%01lcqN$h{sV_}zuue76~qcUEqPPA*kH2gjjg31#m zIU&WC1uAtSHAjw*4z0OCKIB*s;?E-WJZplJJ<8Xsw3iCB5|LV}+1OSgPxfYMF{0{n zaUdq?Zj$bKEGJV209%DN5R4Pd3~xT@K0DKMh$9QZVKEq?h|6lUX^0Go0|TWv+eVrBPo%cwN6wt=_{k z*<6Rs7rMFR$fnQ=HvevmTiKDkzy;woHxEx86Z>+HWRq#lls=-{_?|HWg6sfV4P_Yp z%8M9di)!+am8;6TBgjF;ECULQ_c!Tf^hU7=#wDV&w^)Z7pfZn`+v!zM#}EIC5wgL_ z6;ftYY#V@p!)oH12Z|s+_WX=uJIiM{_az`xr%&Y;QGcV5jsG0hF5Q~OkB~J8*Ce@+ z*xZMnn;H=rCVOfiD!=J2TE@rrx%<0pxTbZDPT7lY66pL5cizb>CQxO@*Yhu^h@uJw zF1}8&r)QbJNgGyDI63=N<8^6m$6VI$N||@Ju)XknL%zAZs-%XOG=c}{n7OvBzW;SKt$ZgD1u0XX66Pr59t+ANM!V^n~bA^?U#Y1 zilR}U6{SK2ULFDm5C-BQ2rERg_H9P7@JtsZD3)N$-+ zV8u)LNEb&f=wMiWkwKpy8Tm&waQIITbj zi8y@B{AP`mHCVmbe2QjjTc-_9a^?AMz(e*v+wFGgs$=`_`!^=0YZDNC;m6a&#!3;- zMx0pO?zo!pUGZ2rl3WfGrhVh>C;FgRUt4Q;?HCgp=*?$e6 z)|72QeqpC?N2**zRt=0Sp(3TK*75Xk?8n1ir>ut_hg^`^=OS+&Xa^4K*L+Quub~4$ zOvRBf0&dL(_!{g8i?Oc?_tYcU8-FhnDAMf=k=rJ=eZ6|cP6FnFuZ=KO|Jy_n>-neg z>qVlG5=LeHB=v89`;@Xs*< z_EnXU*gd9Ut3IwzHT*ae+HQL-A<`i3I)w6zBH8!g&!7AZC(bl*e*D{FztlU-9RG)a z_B!)wQE#iKr$v=Yrx~WibW5>A_j%l8P?}8cY22a)I z>|CjzGcg}^8}rP}Zt}a=E>Y1GNh`y%pYOHA^Ex)K)7=G!VKq!=u*mpP3_W+|X(na(0Ln$2E}QR!;7qvIB?h7G0oS2^7G zhfYC%wWL+LWfdX@ln$CH)4ZYkvZk5&pCxY9UQ?KWD z+gw>070o#EDxueq6M>6g@7q5|I2kR}wq zYF)etB$ylUA_ZAp7xnB5;rLio90RQ*Z3aqG+v1uyJ^@Nbnr>`d6Yc};2A4ErlUiJO zZvZ?r^00>jfj{OBVAuZ75Za~|F-)fIw$o=H{x;_dfFU5TKv;;Xl_FFC5bkHWYO$e) z2Osep@C@JQ@3>zeG6k_x$s}T*2**RAA<(Y%v?^))N}LaTs8$Yu0f4Bo1ix@^QC1MV zfK@AFFU|V!>*ZZ!3G>c%GxwDA07O_5fC$&id;;#cKyeZBoiK)0ZZ0_n^T_XTuQh3c zxkQGj8M}tg0RR9`=AQXRP+>t(+>XzbYd94? zS1TmEM%~$t(gEL~c&KZMdxD?>NVNBLsO54Fx-k{Y7l8*T0YU%>nzLpJY1>h><#WzC z#|z)E6>_(fRS5;93URHD3=aXu%%>(|Fb~Wn_m4DN?vplQ3g;3~*Lr2lBWsy^fox!% z@t)2RO2eW>i+s_-Q%*Uhq!^E7t-W$F8{`;8z8ac5SK-3SLfYC4kHrO#o3s zYeiTA0!*fwp~;=AXfeE}^_LVl5X=Zkgh;4iYH4m5jvE4yXtNgdDPivZG|=%kt=sMOl3#Grf!jSSa;&PhFeNB{){E`Y88f)z-^ zD1>Oph`1k^tI=|OR#HYd7laSzgFs?IYVZ*u&3(X{49k{OTIBo|iVZx)T>Sj!KQF~r z;2$h7fCN4SaM0*9(Sr4(R-)E?LnB6sVGemj>sGgpZc^npaMxmy!+)%aM%^v#>@m^E zZ|Jr02sjW2Aq^a29+?N$2tdyqYON~f5*}n8SkJ6qge-H&+G0NSk$F}g2Z-Qb2Ef#a zdn`GJ?P~hOwQ;?yZTJNlrvTEfW?X@VH=~sRGA&`=v=xEFMl2kYMT?+hfuq%-ouE}< z>T5|6tO^Db%mPHX>!{SsMm4~4?jt_W@zAsZ4}6juv`_8|O&g#OOGd-gXi1D_2#bTo zgKLB9Pr9SHMvfcqm`59H)Tat?lKG<+8jBS71MbAo-G^|+{eZR?^37;y%AvKT&$(|{ z54bxCLaW7xwvW)ldgXp6&oAj-z=}YC;g&;)VU-g10YtDKP)ZmGF2D((fnOMahgyi* zGFSU=Ys=S=N6?IMk#a07UR)YlODd_5q>b|-WHkl=Y5$aZvv|~vth>1NchtRC&pJm) zqU<5Ol)soy*0?HntP^Aa^N2iPF8PiE2=mAsA`f_kKSP%?_n&!$2S|a&D%Slw_41`E zo|+o2?#EWt>`U@4yrO}AcTT;6Acm5Xz0ZLNs2@CZxRI9X>}3yS%J*IDNbZH-(mW)*7h zoDb)vsj*ma@THbaYSe9|Nidu<79@|Anv}K5-G?Rru6Mo5D`}*U!&_Q}vX#dTWZcwN z0?3}KeU7ESRlCGt5x^AW=HoITy9Qq;8X($bBPAwqvv8AOFyb6&D45hR8=6rw*;3jU znc$}aQ7h&Tp^Z>M3&!tWE{Lx6eKshma61P zCJlNC!#yGo2NJ~Q=8lCnS?O44cmM}Lf>2<=V(NujVA8DSW&j8(>|k1MK8pbt0oPcK z8S2rpl+c{8mN>Uom5PRexfX;pf{+omY~>ibr-gYzTWWN-d{`m`YY=6E=gd7Cyw0)C zEAz)f)0oUy@08};5#-E;mJvrVvnY8~H>noUt98DW?jOPw5Mf?)&xj^|guKX{s3nJT z0eG-@5p-A^fO4`cT{GGy=L^82U@0%CQiyv;6DraigWrOm$qr*aaouQ_Nd|LR?>(vwF*Fk~pRR;IdchUF`obz>l*N2E;X^E z(p-ja<3{)@%^Wp*j7q%Abpjyaqk<>^(#Se5H`f3(@fM-yAnw`2rZb2W`e_ouG$}qn9nA6BDX7^W3=;;2F^8ML)}?nC4#YP=UR^?&?%(Zl`1?H zsKSl8J4->+cF2~%Rhr7zofbg)6FT5voy^5FfuYS8ebH+F;nGYrv;Eq;>Rlj zhk=Qak&#jRsU5O5knyR^w`{#9T*-xH7KxFOkx~0Wqg=DDF|%0Go;UyLPTe#hVq|1w zWYk?0RAWdb%+mshPhJm8GNf&fX=G$%WYik#V-x`-Rey=J0OGfCmn(BVnirSaR4PNG zk-KikXzejNfp5xvE*dp~Q9PFMRhJ(lY6ovbWkU+hQXu08(|kVdMnZmUB3tOS7+<*! zGszys?b;Njhsuzkj@A1b&Pwnc8WoNyt-hThEZ zg&ALU;4v~TOJ(X@p~y4BM17ee?@Z1yiti08wncu#5^-hNzMS>ZE=wdZ5>j=?stZ80 z<5n%}V>GQ1gw8Tq^-pe%+*4Z$Zlwr>m4X`U2LTlX0pKz6+%m3$Te8_zpXU2^jMDcr zF-Q!G;2wwp>Ma2U}V}|etV~+3+JTqUEq&YIC@4z<*s@jTEJ6%}q0ubM! zi5>u9)143$%Do?!A4pSzd`C0Ujc`Bd#0|;fzHd|B-M=yK9^6!L&k1m>67B4zLFcu} zEm$8>(TSILmJQa)#NFK}H*2b_xRZA8aR*J#x@kQK?&4Ccx5-oZyGdncvjEhi!W{Pq z?>s2H^Zb^`^9_I@Z#mIH90ojYw4peH#xLaLe-_FFuSJUGA&-E@3WOpz`j5>y|#R`%8)hmzv zo_8W|v3f8OrDI>=8+hgj;hjT-Zzko2En~$edw z@om89f1TWouKhsDfkinWsdN#;weK}JK2mGkmM=J2^KOHn!UM9}f4_dn-S$!uK-PJG zLljDmq6&}+Jr=PwltL?Rcum?d>M{(x)>iN+?`DZcwq%c9w|IJw>(6C(;EVK1;jrc( z$@d3DquVIFu}nbfLE)P_UMjeoHi+^e&o|@W94hi|q%N~0-{{E};-4-=!)m_>gsHuG z0g!?_TE;78{IgN46=hQ2hT?%H$Z?L`CFfqbN1t20b-+Em zSxgcHPA)J(jITQE7=>Y_*YkK3GpW}|1Vpm#ph*RH%rxN};rSz`7OmT9qJZ>zW}%gL zCaAb8R%hI=D3qtI?!!dGs!-;)3P3ih1yZdaNVP1ihTT~4&o38{xJ?%RWvhqWjq8To zT2U9VG77SgaGTZx+YCqJ4QVuvfB-bVz6MBtb6sWUU%BAy*>ugDiVXk zJueoI!9ks{q9J zztlFE$&;@IB57P$!#^=RaRkSg%Yu1uW5M0Lu4r-GFBZp&?eT{J51*|g-^ixW?9xnj zvp2+!k&5yH92wEFCW*%NikUsKczV5%tk=i8V`^^38wqO%BX_GKtsmV*1Rl8 znb_Ml47qF94xPyM)seQ@mVNssWd9`YoUz*Ld)P#=C{-W0YJ)STGeu4)zeqp#U>sL@TygV?w6~F z+@k^{YX_a{>&^PWVY!86)E%Vk!XlU{_w!`Y(hr&pcx0N+Spp}pP(9zQ9CA-@$-DI; z*?ao3@xqO{Cd!OFpQgMS!=Q-ox~H$m@2f>9{8p@yWx~5FWSl)a;|`yajng+9r)l#2 zLuI}ei-1`}CYh|0OmC(sSDDe+5x1>ZfTTxm;2`lju`qtSc1U<8@17S>dG&5Ncc54z zlaeho+T_A&mj$xUL(=;7BS52(-RPArTHhlh2JP``5dhb%8FCNG0@)O=`TpFE`8nA@%mWlD>?%SMqgZW~5@M~uE zdbif}o=n|}RruN20#dJ>+2fuSOKa)YD30+*KIp1Jds_+}2o`~bt)fdUMxOV|n?r?+ zTP=LEMHHdUZpf`4$h$Ll&A6i{MgH$aBc$L*J17WKtZOBJ_|SY1p|9$~DsR5+xL9@s zB3W<(9zR<#=&oKduBl?w zW-uw0mzzi08+{N zsAP>)BZR=iVr~3v`JnsxvH`am#Y%)gU)GpyyqN06G+1Fed7LNK$H7x`Zm<5VPg8Bo zqA9wq?s^Hg)sVYZz+=r&!O_+^n?sO}?KN{*eE^B@{(3p->sJrDjPQA1w%;ASOV;hu zpgW7O%dWi{cd&pjX~Q>(d9zgnPLHwKxFPt46!$d)&RxE0&}|lF`u%%FZf8 zG5eY|4ec1^wXI@0e8z7OoF=c?bVp2JS}uwMgwYNzsw7{H;4cF4vcjGerQ?c~Ma$#a zEqRv{j`o4__`oC2F)F3Swrj4!&4Z=vu52-7`RT2B_uF;OO%~zb6ZN{qGjgSEC$%<& z6{14P63NOH5RhLuF(W{fagT16g)f38$`?u&RwLJBygPQs2#YUj=2an6a09YxmWl;? z{lK8xRqp%Q(+h5XZ(+nH!)7S?{Qy8#w;VuLuU?&MBVjf0@O5IcBR{)rd*0o$L9CBu zL+%;Tq_PupF5XM#cb%6U7ENrL;bzwQewxqNG3q-;81`e|#ANx-tX}6`R|g1)^ptpE z$2vf@HWE^@hK+&0T$}gSDCC0A*(E?~Xjkz8Nv)~%}xKvG?yFzpJ~D?8Cwu3TBwg_V4s%vnxuieaw@ zx%!ujdB0pF40-irB@Ea3$v6uMjt?pfIlnu5?$n%HFgfRT>&?2b19Lq)w5gqS$BV)@ zB8;EkS}1Ps=ty^GreubZrL|37J|)0+gPi+~>xX=By`kR|+NCGs%bZ||FqiqFhtOy| zk?^`No++ySReUNhjXUOVVsBtZxf|EMLC7?4nLu9pxUApJQh*TG+#z@@~^$>2IIzA-G{}1dMDE!b zR92`Lpk%F3(RJRWjN7BXSn$zBR$y^lEu448!** z*tI=tV%8ltCFf@J>=-YrB`o!GD+VI>OZEuS1SVTdUk$C)PE7As`@rjqaJd>s zh!L08N0WyP!V@pF6+p@r_o@XVQ6Vas=S;_{fxO?gPwkhrLq)WRDwyg<-AyD1p*&3M z%ec9db8g=W_3g+Gd27A!3Yz;Z>+^1{07-A}@Fv8l)3xu?K9pkZHn|L6AoPKad4FAX zwH}Cf({t@}1V!fdXMH7CUgx98jc)gLqjo7BnJ6k{p=P{03uQZU4{s{Chr|k5Gl+6l zU+eC%0+6Nr_x$tE*R(z=3xeWj=svg1H}lE6=e8C62CJU9w?QSgH$3TLWeVkC$hp}F zx{1Z+OpOwyN`52YK~-AMz`0CUF^3w}@H_GIr^Wi9Y!6M*1`3V+-fq3l&Fati^k6j6 zyx~TdFPK;ghL7=}#|Zwc7X>($ir}QM=9U(DvV!o$(slsIbL9XetqUs*@zA{IG5HSw zp;tpbDr#iKjs4LG>%)Mfi`PI*Sm0S1`-+g;N34&Y%rLK{QJ6xP6qR*=KHuBuzy zN0EtpG<3G*8fAtZP`PZe0OQ~gYoXxo7E63fKAk0%2`qH3f104tY?;?RWb}?`GG(i0 zJS-&dx$%8y3O*J>8J-Ok4HXb!9~5nwhdLt#eUQ;3>32qvZaFTqStqu5(2<3tiV5xsP-nd$Pq}xsAzepJ1Y~U zyl)?p^&rZ}>g~n$7tH|3(xpqaANtL)tZ%sm0zK{jd-~}eK%=%Bt9aR4MaxJF+2tu!GR7kFFPwk;@l~Z9M7zx2FKe9=(l8eYA6ehAb9PDeJoyXJQR4(={wI z!p)W`?q8v&$8%eY@xWorEKN4n@MlO$V1DVlt?S3a=-rAR(GFHf#`mom_H7aiLE)ep z!x|Zz3(*b`R1XEAiKpxNUcIaN2?dZ!Ef&kU9>V7@31~b^e~*D6SJ$tDLdka>YMYhTt)?Ws@HD^F}8?OXVUJb0`btsxa&J%8@G=SIl*b^*v! z{=cW5O6|f*zE9Bn1^Sr}6|XWI4OeQ75(b~C3Xq*4%lyIkdZ0gu5Q z+NQ;s1T~P<7DIO}2X1I@#`y+HJ*DoE^>6sYawQ9EAZmbXyMS=@trPHgX{a$hlI<`P zbIx~PEbjidk|y1`VBAQ+C-xd+afH?6JoVFMuljK(ayPkP;@F1`9r&cNi^lzHk^;NED}bs`eBD zN)-1*Xmnv%FM`oKMyAvRq}b?aVy2jF7X_wxtzHIN7;!K{!z#FK!Z-QCaL#*DY3aVX z8cst-o7^y%m+Q<`C<9ceFh|AT$;EE0TwHXo5M(!|l_$o#V|)y?FyhiBQ9kd7(^1IB z-kPY?p)&GaFG)@+_;$EaNx3S-?&BJYiyRA&Qfv^Afwj7xQ*NY*83?ub@y8!`#~pWE z=_IHtU#TBR_~^|p=BTtAs_3Thwex*X&di8HiV7yho+HI2k!isHZM_J_=op#ESD~e! zeNMBB;)X}rTE~btMo}|vdx82f$C9oP@ee$ftZ2WYs=a?_4x8 zi;wstJ@#1LE-W?Gsf|<~9c#F;%zxFDl!+Ow(Y<~J-H02Q`QwchBHv-L@jZYlYXbRU z)Rq3LVTSNBvrs;O9+ecR;$Evl6G8MKrv1oh0LVl9_voXKR@55HJs&YqaXpu0otI<; z-CVfi2thI}`kBo5o~F+JL8JEKM^YY!agah;yosXbm)Mg}J~=|Rj~almbFtRmkE9hs zjIS>tTw}4!IGUwmY4*NGtu$8*q83J!bFkIZ zl}1Qu|3{%>3UBy%->@*-*k;;+fq~Hl5CH+wyza#8dFY{sO2Cjdm6xE)B)Qn!h_tlO z0;%lAGRwR>3Gmofh#Gd!kebQ{xt{DW14QFYaAaMv97%tJu6gEmpiqjxx~#30?qr0h zoM)y`F>?r4Rv&yLrg=6~zl0=~9jEZ#8O!#zTOiPb9z@$nw6I&HsrdnqlTN|jrJQSFf_1s1*4|cFKdK<|B?nF@$?Fb+<<@$E%X^g>{ zTgn6t7PRcQa~WGKU1WMhBi&eq!bl4w>8lB>4^reO_k`P{(g#F>SoVXBoOz=Fa(^WN zsRbevg|m0>jd#qtsPxZe3Oi1c8R1>;mVv{Hx$4yJ2}=8C(k`sx2NA4PoSlf`k>6qI zbs?^fJZqN2Y*8g=)(s$Wm1|O+0g!yO13=pv>JIz}EsrQpf!}dM&v4~Hgg5KKD_1$^ zdIBvwO36k6fZRWt0CKN~tOp+~>%K~YNJvw1G!~+VwOSg%w%KT;E6X-QjRylH@*VOh zIlVDo)~2i_8;gnu)qDj&Ci5acKBQv2J*>D8MT^{fVzF9rBQzKlD;95$=cdzpqZm~% zOTFwX>^RDN)bFnn3BHT)4#hOH4czY_8S36fPPwPSOTEuS*8TVUE>^Q<&8oVQ&@Yf| z)`QBFY~#QVRL59VqyXy1s8jNe_!sTOr1~V{wcCH28cW-ZU6RBR3Hy)>Y&y zJY?JqSq6jzCT53S|7q{}>eZ{=Bab{XLZ*)jfXE51lneRsnl)?A``-7y=l=9h|8$3q zPL(zlQV(gbxG#kNNZ{$f^n*Ep|+tIc|CDsro~7mEcp;(a0pa^#ep+l{*A z^w98%wkE@OVN>*&%?mOw6hT}d*2nbT#;;?$%*D#VLb2GSEt2l@WhSfd7J@)}xsMC= z%9_|+tnxkR1Cy&iWg>`Sf4|XFZw5f_Q~=S!ij`7%!^@_i-c-5R114tNya~nm$`=AK z&Wvo!R&7VRt&M+_QTjHVl@(eym{qi~ zhpRI1-##J)j@u<$%D^fYQ>^ADgeXw@{PrmB6>lq}i;%WcSG>XSO zv06zk$(2eFq+19SPz(U%&QS*tVQ>nlTtg1=-FM%e%KFgBB9OlSl&srlQqC9u@~P{E zC=pC|RriD#b(b;1J$_!mW7+n`?<8{ZP~n?n1VsAtMb}ujY7xGqPZ0&&z9Iw`P0M*L zvhLsU?hM+R7Tih!=&ej|&RDLiERP@qE7~D=9+mZY^^4U#3jmo=%)p9SDK!zlj=KWD-QXlt~(jf}E5Q8w?FRx)L)2W~S?2LGJT2 zBIKUlS}2xek2UBtJRd6K$L*4HXYShLgU&9FI~EIUMjfBh8%4uDED{SyMBJrctntY? zx4(dhZX!(M#}2!9t{J3FbmTT{FO;f17*64PzXTt%}0az`ZqNrFfrzHrar@_LpmoCD?J51yQL)3Ug+3V|CiDq187 zSC;Jy=}z)7KwxH2`b z;sT0MR2ZQg`ESUdP(0>}>^wmDX0Gtf#9Suei}V&qD8t7JKsUM-=YofG8r$)iy=2I`)!S15J_4f zrRuvomM_t&yZ!Qb@Dwc9tWR&Rm)Lr7-+lLuy#*rRuuL9)wsPf4cgG!fi~x^ z6LCY7IbwwzKV8hQ$yuM?i&>xdZElO&WE73GIU=jD*j?)*27lZ!+4#P#5H0!MO?khu zu%rT{2pESvwVSBwr;FBcv}hsyu`4TIh)V9Er~@PF>b}DA8W6K#7g3rHnv!*k1VHwk zm<=}mr8xJ@a6TuU=$;io#sb;uMr_hI>U8&&8wnm!p)kUIg+iSP-2VfjButW53q)SN zTzG%KNyDyAzdlmxA0$4p2L#z2%kVL?K%g7E9dySXRb5z7(1lJr&CMV+b861Le3zV0 zW6Z^ZAo7`5g?RIzx+tO%@pWfd((&Yzgu*z7aNAk|j{7zi+{*1cZal62Tf>Wm-M7R- zPMDGNLYQucL;2x!@Jxqn!B(YG!9n>qEC1~$s^jqj9*4;J@7kB%hpq8MXh7D*DzU(x z++1+Ww-v3AQtDgA7DsowuY%{=e9gt-5Lr83=@#qzFaeQWa@o>NP8&~z?z!il5wd%% z0p!+70Fu-klaC}1;J$jr%$z%MM%MMoq~(VSA`GGtyNoT8K7M>zBJ12Un?z&Y8hHU& zXuw*avmtvXj+xfuPTj3np0mKBidC;W1xyEHxgbWjJ4OKTq?x^L&%SJ_SA4z1T$0)L z480dy!Yz)|qN$&5QpERVBx?{RzdH7>dX0J*gj0OV%= zyZ!dt{YYCIOG`7Ld>tmH*sBCYPL?4`r5oH@@Kr^lOcVr0qteDgvp%}W3JIRmvw*Vh zxLh9Ely^^WE0k(MrB2n3TYVB&IYhLuGj`4SI0ALD1_q$OlDiL))?7t*s7VFkZu$!jGSy&XsUgq(OBtR8DE8f(8p@IzYmGv zdsKjApfIeVXI$+9no$4?GsNs~fB=X&WU9tHhr!d~EYY+7svGm2k5x<`xKajkkwxy|tnn zs~m_#!D6Q%*eRk(zF{v}T!OTHB54M;<%^a`9CWul-tM+UeE(({+vNGv0v?Zu@LM<7 z+^>=kf0}6XXV20<6DEJd-hE;<^|-nH znKHnWj#%}ZMY(!Je)Ez=H8(1NBwbZumVmz2B5;u#uF9IrICoziop4H9`fI_TMO65dyED|{ozSnk~2>+E4c zA1Cckk@t?BmUFKc@K~~YuTR+>ar~6RAQN@ppWn$=J1)~zea>`t;khRy35HSm+Y zLi918&lmR#WV52NId{S|SrjhkC+?n2k$Y-u}5{)wJdU~?{bJ7)$PSz}V?XiV`R%8-*BKp4=t~Di+8VnX@5UI%eV^WF{6%*^w1scUXb7 z`~*?dTpKq#MmhmZ7M0w%m*igF@lwGZJf+|coD%uHK>eA_2!N>7GbD;NJNO9y@V*nB zo7qcgkeu6V^`N`y#k}xM2 z%j)IRbM6mj_KF3PEu~#5uV+(O48!YUIL7g~fX8)WUEQ{!7(eRoi#H+qg_CXECO6(~ z-!b9}h*8`)DvDztBv!C(At#E*WSWYJ0yMub=Z>11b;nKX@hQ90dom+6h)lNhvz7Y9 zRHC?eqRQ%y1CZYXAfNiwr`&)4_kY*r$_jU!m!4*ezT=|>f$;+#I7vX{rAS7>J+#HS z&7^E*i)$v^Q*2IUB^3@L5;vY#eycq zI|!3Coj~;2&we(P)g1>QACqy>O*h@-o_gvjw_w479Y7-qBq4}o<60L?ut(LXXo(0I zyGf!|PLcQK$m0>RC?DADd>)yb+oe!FuIZZ5dHHWqxQ*7NfE z#Q_lrL(Z3TATu=Y+{pbRshUE!M%l1qr2D?#3nl-p-&SxptsQcEut7?0Hq~~^xjkzG ziHr9a74zc}-t$D5&yhuW=#-rAx$&S_A$Px&cMk~PJR`?lx2@n)@CTzJ-)OOhfn&ivEd2hhWrObK zb$P#;CflpYB*s^rc#LeM&BKb-b9>{>ukZ~@&Ai^o9n|mK!TkV;bGv7Wn+=!ZANI^- zT2h+l=9_Q!MHGcDZ>Il8J1r3C2QoZ>yzOmotK!#nJAlUsZL_r7cK9=1L6qn85&^0A z?CWuhXJmXYgcr6uU*VP1MvBVOv}m0yGIHg&7vg+{VGys*7?~y+E7O9{>`cd?#D@si z^&+*uDcbeazKm}gh3~gjo*(2a#S5ukN^I?vnLR!|{As@hBV~7J z$Yt_{L^bBtW(oozfsAe$wHiRK_kP;n{mxf>JmQEWDri=jia=7*+v0s9nlFu8OM{5e48BnmUtoy%Dfq$!6XNyzqF!&vO}u*X25-Fa%V(5G2v3DY0a4zF2#P2oPeB$0 zVNp;PVF7nnb{Dwy7hOwjZB=*AyL;yR-*>iqW@n~mx~tFM=bSp{G~wI2d~Tw>K{-E> z75p_vt3K~|l}k1!K})kYZrsQaT2OU#APIyY*ggEfPAx7j9<+4nQY(Sj$U}Wx{lbe> zRq;iPH;H#5?F?t$M5nXw4&}L;1!d8k(9?1kg9|WT6=0qZr zXvNDJv<}~(-BpK)6DP9kufN{ywVqok!iA-6rKqoy@6Cx&jgxR8;(8&b^qWM?c1*hN zQEJ|m(#=xP2}HK#7G$B225zb=Dk_AOmzRreGUqvNp)bCKNk8z16k7T*908#02$&w{($BrFV zSCoFIX>v*Vb!(a2U<3kzkS+^E-LLMtI=M|I3JVJvR>Z10ITQrK57-*MS&W3j_uqd% zssrs66Y+X2fj}Uni#XJMHTH2bkbaUplC86_8WuP3(8Z2U4hMla7=u5OK-`plG!};% zKzAb*vO)+1Lb|LEU1zD}Atl)WfvCDTYy>h-63F7ki(L=2&fsn&VloTK3Ly{(L9jkd zZ_uhL}(r~jt+*sYqDj_R`Kp>==JWMyZCK5?D zKKS4RwRP>GFj^qMtM~%~*|cd>%od23iRcv%Arc`F2+1Z9+s&=+x=OY_Dkvxr1fuHT zP!b3~P{;?F^X<3avPFv)c^-T>5{YFM77+)5KuCr4q3bf6<&la8PzW3B-n|>Ua~vHU zP6Bc8GX8)-ii(P&&P-2)kQG885YlOV$ZfLq(7t{9MEZoPdqYhi{D95po7HR9tYJ?) z@r204bhAKAR*2~&+2S5<0d*n~0)dc9>qB*2rJ`)fh!G=1AzSENNB4%CKpgbtQi#=9 z^XJd^WOAKEyz)VGbCGgH2n0eptdDe&hxGC|v5!JW=O!IFDLTkSeDfmqK_GrD1zS9W zR%H~MIaDTO&)G> zk5nfO!tz+SaA7rFn^6S95AH6$`4=isf<(MbLO0TgH=s@=LLd-QK^}BQ;Qh*vCxNQ6KjB$7~U`5vjVKG1DYabJx}rPh-#+G)DNwP;XfEc!z3npvMRHjMr$48TL;VhD3>t&R@MCSL?YDJ3B`fd7)6d55?^wBVrMj~9`BW?O zly?s02lWfSnRej70XApO99MTzEPX*t^=E7@EplZM2!v>?4_!C8xvx^8S3n-c#l_g2 z=IGYUCJ+bD;m_>Zv#VkuCUP*Dj&3C4wIa)Fc`p$Ofk1F`U732Eo8lg+ihaP7qw3VG zAP|Qs^TC|&zWc7H$>_)O@MDGe$qgYAArJ^Q^3X}d)aS!gZ6g`LVx)ry4`Mez9o9!W zEfC;O(h70wT4+=7URH*UJlq;m5s4581UFq{N)}X!1Ck2;fqGYp_3@{49iyByo}2={ znN(6z!rpo39bZdsqltsqtP;Nk7l}j&1R{@Uk_A)cx&jbCE-fv^?j%Q-W*31t_yhjH zjRg|1oyb^-#4VWaw_X;J2!W8j5tmT?=Q;n#B$u zKJ0o@y;fzpSsprhsIRq=6@fs=ZX)qxd8E>H1sw_UP`V8krt=s@(ycHfkw_!HLDQ4g z&ph)C8$5V$jT50K7pl+May+D9Ew!7-*O>A}+{P!wj=et}XXkxdTI2bd9qO_^Epq7q z(eX>W_2HK|kSr)A6?o%~H`sH}i3L|ESJH)lzv6TruiOBfuwt&DpX9q!;l{e=nrm2_ zHf?IWPED*aDR-|VLbqf^DVQv^YSl85lOIqrmQS-c=a}tgqS|NDu`<8k@A0E!72RuO z#R}66tX{1`Cl>wllJPqV3ky}()syKakF>k6loOZFH)~g~Ud@IK8DcaeZO=uM8)A!n zxFs2?V-N_0?65%OzR3lij)Xzrp@$w~Xv_y)>FCU~BS`ix3S*UmWxcY}<=uea6Q|b2 zGS!z+YiQ{ciQHENLiQ1dTYFm7g_WvRSHfqtYnQ4k1A&|Xoxn1_`3U=pndV9e(OMyX zD)stluS*~hGLJmGbVj;g*Jr;i+6Nd#iRRQ#DfeLCB_cdzvXdyz@3+3H?Pdx=B{ zgv=!oKUPPoS6u-`MMZ4Jj2YE*BoT-i#et3ll)?BbL&MM)Ki^6$Uf^s zee4$ZkZ!9~G>MYH4fRJK3FpjqM@JHYSWadM-wek-2n3~y{oXlvnWAp4EE}2F%v=J2 zkU8WLZOkK834_3ApM9oUAHy9T85raS@TAU*L?VlJ?%df2By!0mmsBlhGbwMcfVylX zrQThrL38y^LN4Jmk%ij4N^S}1!}ML{Y**Pa_WN(EufK0sJHq~Wb{*C#FNa;;IQILa z4aG;#l~i*qzODS@+Xxv9s&>p9c%~RItO9$Bmx}{IW?d>&-v2 zce7m9{FEFz$tflelhu)~LWuyzgZ066As#M*ZVcLS3~JWVqg>W==B?Mgdv|vC-FI7+ zwwsc-z1C)as$?TqY^iIp^~NzoBB{r?uaB_b>?pVXABuVKS4SL0pTnMNdkX8^sFv@~ z|N6(H?2iRU*z`m804e?_|Nj&xk|#Ty!s_QnTt4#{`^}eSY+0$h+vBw#Dupoq{hvRp zHha9?DRjb}tRqF2VbjCrIo?jgM}GiT1kZmFXa1BaY|p5SeKGYtFFrcGmCe)*-< z)Qq-LuUjl6mY(Y5(n<~X{&b8D{=C$cJZdWXW9;%ZrNUbAgBANKS*OoRUCBeo3wtZs zUHp6D``HL-!1|v&$wSBQzo}sVwW*9we2UywZY4_Oek$W0n>TM}Z@&4ad^pU}i9{e# zoajk>vugeN^@2paCWTv*C_k}~c!`9NJd{)(XG7MOMWxtPz#{P?5h(iWH^io}K@Pv$ z_>5pP#P= zl4_8H7nwx6D$8#>T(U+|ga6&a@8HX7-u+#bm&4xfcnT}MwtbIRQWE>-c6Y3YCFG3JMB*<%0Mn5?gD^SS=BzP=&cC8Q zyRl{Ms^<`gc<|i1LLr;*1uQbzIZm_s5AT0~E$6CC3;{a)J^PQbW3wZUjUGMoY&CDT&caDWPTk&~s$7*GK{2S?Ru9MPtw=d7%<((c8i+&QVqc?UUWE6EW*G^C{ z10;*ms(3;{DXid&@fUt8^d+N_tl@sC#jzx5J}B3ne!kEnkL3T#s?IxBI}{9tzKnlu z(!a=E*nKpMinihZQ6wB-->Tmolku9FV5{dYU%d|0t}*0ytb`j}API>}Rw#4?Ez~^6 z#Iu4s1P1T+qV>Cz4(|E8uznDx#8v)~04f%_oI>EX;PwSkCktTjJ*e zPL+5FRP5DW>xeA=J^SujY)-Ya+Tu!A8>EVP{BzW`r5iP~n)K)Dwc+TdE=f% z2h5yeZWXlE4VrVO6($EEuTlVAzR`K9i^beFPSMoVR69X=2bnDrCs?03OSB}Wlz)j< zy%zC{)knqHZ;%|0$G)PME%L7RjrLPFRXd#5eA_tf6HqL{FnMo;iKqD3(C_yH@lHOT zmVVsI>}P-2RkvoDf=GOI{eQyACk{6g{bQG@>6E#Cb;P~ZYKNKR zyUYw}QJPKSI6B3ufgIHYRcK6RiTZAFXFH$m5c!4^)xi0epu4|x)sg)27#^eePb~da zB@N0#rU94k^?g-8Mw~y(sP}Mo>-j!XWI5(gN$wOH$u^i~+}Y$!ke3Um_-cr8gfK?X zevz&wmD24B*o`xuI!l1+9ZD0hROZIQqBM({yGm^vIIR644}!mbcwMU7NXq+FA& z4c)0*3vsJFUP}A=oq0A_$^LM;*IDDKRC4$L*+E~B2#&UmaX>?W=S<_w2UQl5B={;o zEzikhX_M4vE%Qw?h{8)B2KJvca!HNGYw=gko~U~K!zVtGy2zH`@!BrPy}vrLvO~VQWO#1unBCZIP=n}uA!ZV z(ht0)I>EQ&AJjN#0@tyDXiXjHO5C|F33PCb?nmNyQI9^W0a|uc5j(^6o zF?bSK8Z_wAy!TomB^3IEElVO_^Ma+1c`3wxriO2~cm3<{Y@@CQb!jyGU?KUHP?9)J zNy#Zq;I_K~P7g_zKvWwoFz{T8yw4QHP6>O54Uzu+DfEv0ini%5|4s>{Eg5>H=J1Vt z5o%IR=u^(8qIBra-V-t2s-BSiIc8DR`jn7GJE#J|PeuI`g@Z&O=pQh@tc3h#kqtB~ zk-8N{xH7MWoeT*OFN<-%^MZg;#UbmLw(h`mo9Bb=*X`^!nLkjr4y57uGkJq*d(Z8h zzH%;$cnGr>Xznl&6BOgpK#p;B)62)D5mFaChDOW|dQ7gZtxdie$-X7$G#BzZAz*B! zPEZr0`~bZh=3C4OFWl2hA>Fb>N7*YO5-lbC{FzUmBJW5RF%fQB=Q~zkAcq9B)IkGT zs`&du02HgC_-IWif-rI})`fNWVc3b(quR}*GW&Kust#`kec=)9%Q<83s;-K`Q*^s0 zUo86gBz(o=J)?;F*zr=UQWJAS6c}+2Q5nJW67+dBmQ^aV+WuGk$sWF?t5;R~Ft)@g zr-U$xZ%dL6+WQ49`{Y@a_xpj(k4WxS{wND~8t(N%d3v>$96lrgjYX9K3KMv87?5#T z=;z^UGu9IMC4h9(Zo}%t;+M09fl&Snp3NotT-VHefny0C$ung&XjMsjJT2$*3-RH5 zWDUJCQtQR4!%g@9+fUDmyXBh@u>V`8?Kt9b(#iCbFXBv6(|Z0bC`F2sYE{dj>zG+1PLHJCfo-}y zMFl^e@jw6f)41t8gNbx-PQm<lvsGJF#S7RMgTFmCN}zrH+V!}PaCTjunc@U63^LM6VM z)87>g*h_`;HRJD63MLa#)?I%N@d3M~=EO5&1Kx|nWlzT4^X5ByyEkII6s74u)86WS zww|vlCW2G_cC^xWrqg`?HvPm44ax5eBA9edY7=*bjRQzqPkp_P0F{t1+zhp4z2RQ( zwpfl+&70hoCi-Mo?Rhjv z`|YU;QW1lu_yl{7YWwjE$(>4^6+5 z?y1k>5&ZD-`a*-P_ajryAs7QyDKaKvV>HqA|KpkOU^A{yg2-657?u(dZHNFPK-5mR z@U!nO0Xk>?WoUvd+`p%TPVLj#9B!OC`1Q(1z|dhZyxf(PgWB zboxNLjzL+|DT7hDxy*B9(?A|=6i2q1bc2!x%nI&Y$8qbU82F>H?uM}~2cZe^p{B0M zz~I|U8`GP^967^zUVUx zlcV<*K6gpF!IeD5?W{grFR@;m8?XIdXje6IPtpM`s&=NGhy0<*xa0g10KX44xNjwE zt8@;~=Do_^QVQe7t@Bjo%h0kV^s>)yAG&pm2MXCufDbjr$mbI`)=)-bQ^vh3{YgA} zk<{D8z3Q9tz~SFN#LY)mN?nPQ$mR;Uhd2yW$6~NC*ivUV^)B{!zQ}NVg~(#siLv3* zL59bCFH68bXRT8-{Qg#lT0OiPV7Z;9{~*_LRatJHZ!y{V#$Q4$g=e3~%K#cd1XBkc zFUfsHp1Gads_|h3Zw|$R82`HdtYQK`p2>w6v;q864k9` z90K7y)Koc5t?bJa?<)y5hZyVjJPf#4Nx47C<{2)=m2!aJx308Fr%! z-XoLfaf8&JoM+L>8{d&>3;7k47y6DJ1y8@8B5_$9d#8wSMQ{Np#a!Q8BMXiaO5UNn z42cqy#Wk|6zQh^s3X~&k`ES|GUk0MlnTnNWLpkI)5~}b?dF+ZOb@}yV%azVptx|VrL77jj&<`yoc_fuoo)$3d z0W{23CrOG&p86QRup zqiX8EShv2Zr>T~RoQdT7IcjETfVND9(L3e?CSq~N*rK$F1+4~Gyfzb$fzD)sLz2#l zQa0gt=^q~Y-_<3SEuUifA8>J5#HC@BHJsJF*py{8{}yg}A->S+V|Z$`4?m@m1`R9@ zpIstAA7}O6aGic4x|B#PY$zA=rt*rr1&!h$3NjhYQvl?cDIoK-{f1Kc^{==AA0>6c z6xpL^mN-bbL^cOSt+ZF1d(1Xs{rD*G!kOc`Iq5NVxb&jd^lKZ>CEKqVoQhWK!=DB8 z+SC#wcAp`HMN-V*BYM7C-|^dw9cZS}(7ZL&hglu(pU8UFG0RYV;MIyUvMu!2@K7)q z;f@eYa@_5D3SM3aV=cv1WX6!sp%JFeP!t@e6fNeQAFFimYW1FQ6mc9D!{|JvjLW}Z zm5~6RPge1_fM`c@@nK#m4vdgQxbWZ`qb{#5|B^#(CreFWi4n?(Km;Vss6Se9ER$V$ zl8$Ts$9*{{RvqTeAEpk&bh2@*cfSFW8iFKV4W@Hl`x$YbJwy^INktqlr=8V}bu3XZ ziSyVWtMF#p$oyy%qF?ki3g;sx(<^q=fbPZW~jF!;@0O_2HUI<>dPU z-=DNQ6$9MWE>6|d*2ANtl@~NyFd|Y=#PJH_xtbl@f>Tc%+xBKr8AZBa1upFB-tgb5 zv_%9r7OjHFoqZwlE=z)!^#K&rhBf|kfUgX!()0a< zZU+TwbY7@0E{OXE)y!4f(Fe~0&@k+u%);lIq;dhp(2y!cRcB~ZNhyL-$3lx=$#5+X z>AW6FG42^=1UFB8=yhl%!y@jnOY;t#GIOyvi+&Ws8~4co4i5aW=t4}h_PZ_k7vRbe z`E)a?u|#Z=J-RjB6c0V=wqi!u++nm$?UAb%&SmVQVM-yQ*PV(P@4>PE%2i5O7HC~e z5kL3mvgLut!KizSZWNpxh1geox2MG$<-|8C`5y{rAXxP7>EO5xQhZw|l!rB?&TmTpOQQ@Vl zJMNat;pHEJ%989#s&{-B8UqV)W#mT{hP4>xG%(5{{F5DQa#pIn*YQ1))-$Qd;(H#!Ai+D^nwgY=JZTK~i?}*d(33$A7BJ ze|1Ds_zXt)?qzX-vdzN0ze+VtHf3&;kjomrHSJ`}>Zhrk10K7#vXu@0h)r~I;QblY4?_wm+Q<_#k z&vy3tpsQG4pFKwVT)6TfRR{|n{1-Z-4^++IhIV2-!(POIJ^(eg4^q+7!QhbALgh~O z;3i6zoCoaTxn?c?_cdU`D)R0YUf-4eJ?>IQ8+exqu5(T%8rt+xV)L_-)YfK?KLRXg z@S;;Bj3}y3LOE|TLK^()yVoOp{^48weAPTh~97j0+df zWF3NzQlMcW>xl~|zDM-@CI{z0{cS45O$ZB$9lm$}(QQTIAfj-Ja^70{Ne%1VTjA2+ z18UM@9DQ9?7c&YBI-sf^MRD;mu-jV(K7;_odeH{3e-ZM+|Eb9d&~axx5N^3(T}_Ns zxIVc-gU{{i5PyH^J@`ju6Rrd`cB76Y6zv{ec4`fN1LAu+5Nmg3Qna(JU>1jvD)j+a} z%s98-Y1fM`A}k_r#uWA0fhe|xo~akd&5b^^Y+ckZH^;(a7Z4?KJvM_L$Q!{IYi0D z7pWAyv#l#o4<^dGWzxihd29GHF_%08yM-kU4NW-^88_qYmp3w?0Ck!z3npx(nz#ww zT&se}?-R{Z@sPlb7bcY}!t~7igl;Zd6#&jN?&7;WyK~=GeiqdquQcR^mp+93ty)PbCA71C|lDIIt)H7#R-~i4Ot#X8yI|59>QAUdCZe*+mh+?wGS0 zjdsMmL>3Z6fyED%G~szRLz%L}+mSSmtFhQGYCO8dkRXsWM*uNtz)~IWvbW`36Cn^B z5~PlBpNjaRdsaccvV6<1#=h74c)g7UcF@M zwkzs%nF|h@iDR{cy3tqlr8f`UxO*%`GzySW~^M z#5x!e`zcKhz&o~)mlt|2Snl$MEoH@+WIOMdDvXFNH$nANlebHKx{4#eN8z=?>@pxm z&+n=0l>pZ0k4*aYx7m593{&TE3InRKrK}nj12Wpi3DPl8E z4+GRbKBmgk7JIwhGhOS5$~F1+Hv63205-p;;M{&A-0&IIfMxFE2GT_SWA#6m@tcA3 z^VAo&C!4K(v(<$Hv$5cy=~UylKCir-odIr*bdp**R9YI^bN&|^vrt0S48xOARU@zi~y`SXfD4fe0$GV;}3EI35EJb?_pv+h9T@?_{& zJMF~J9X}XkIlnas7es4*m*Fww*U8jGnVDH5g%yyCgRBW}TV7p_6f@U6kH05P5EmIq zrfXop^%A0a8aF!noXY3M1q!2Nr8AhBYCi$>%aFP7NyAtOv|3LpYqwVFCcz&^vq ziN5T$L>_8sKD{TYG3b_?$8G~UF6#JR)s1sxmMOb)Ja`^%7G>@BmK4~|vfiX$Zs8xh zg06}=jTu~J;C&l|HH7qy{2I6h<2T|#S1nX)?9Mo*mFEj!Up4EPM6ph%)P5O$l61b& zZxy^`uSj^q;r=Cl3N~TVARs{l98#=6!AwL1xv4uRV4`@L~rCu0KOqa?46E|oP8`X4SC52647 diff --git a/openpype/resources/app_icons/celaction_remotel.png b/openpype/resources/app_icons/celaction_remotel.png deleted file mode 100644 index 320e8173eb90df65e0363f8784e4f034da0c76b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36400 zcmaHSWmKHa((T}byOUuU+&wr98Z=0P26sYm_rcxWHAaA7L4(TxAy^VTxRanEINXQ# zob!G6$31t}VvW$%UEN(>wQE;Dv06``;N#HXfIuL86=ek-5C|pZ;SY=n{DSCuJPN!~ zdny`uK6A14^s#Wa0m)jsSlU2UoGk2YbZjiF{oICaBtalF7Ki5so(3A~;#MwBJQfda zczm5)0cj9OQpVTS!phOc6KZK==in>_J8bWQK^?56VEV!ud>XFuHuet6{_Zx<{GUFz z@^`cnvxdn?LnVF1fexH(JT0KUPOqFj#C@e;|MV*ke17|aV5XlOy@ zUEFP;!aR?;t@wq6p(0{D{Er2N#KgFu0(|_UynLd({DR#4kHv-f#03PP|9-)M(cGv{yi4(O$ui3>FFxY%j@Ig!{Z~!*$bD8%}%gM&s^FOlI|0^r} zUu7T6!O0aES;5BL!P~}K$=$^X`p>k*9sav60{^wYf6H3`cU=Vkt1K^|4DZ9{{$HE@ zpPv8=dieN1rVD)eALF-i1}xqkFm0J3B3uwCu0%ya_POu;uR^R@Cf&?)$0!tQ(6E>$ zR1nW$kRjGO3WZuLjEkBWLwlxi?+T?oYCxFxgK`F)LP|lnLMQ`)*ys?{PZ?(3efDAb z_1J#ym+HOBy~=~V1COHZ&PPiJHI*;6i|$p<3oPz0kTNjFhgW%-(Bhl!p@$c!|G&Te zKmYlT)jmQaRJ&`k)}k*fi9SntaP8QD3lcwQRqD%?CdorZC@oSoM4xJtS)Igu%6}1-D%}B^Pu88nv(7?17V2O z8b61hF*78+zO(tej+&wxPpfs~#aeuO-K=H2VmwcqedAFtZhPH?9IhfRPm6iupDDE` z^cUS&WtZE<%Ti+!ho2`GJ;r|z8hd;t$EUw5iax);JG?LJy1zY~T)z7&B64?<-K7w6 zcUoNTv*gF>zhB*4Mrjtv?|=GLq+Ddlm(lq~jl(bxcMyBF1vhS`gi|`rzJh4%HP`Z*arc$MJdw78oK~FPz0h zvP`@Uv2gH+Z%|PkGPVVO2b)Qs_Tuuk90N)!j!R!x(T+==CeVs32R5L+#xwVmgd`8H zTPS^YE*7fc7Xb0oDAaZj2=PGn2dxTytH_h_Z^kX8s@nD{Cv*6%$|lR7wDZ6G{?0yA zSC1MswlX4s6~q(|%E}eAQxdctjVbcqV;3n3JQ?0OM$-FGs3Q)Nkn+blkmT$2WBBJR zGob?kV2r&d69&vEu~O@G`S&4k%S7Snm}y}7?144#2|ov=I^&)@L(ueRt&*j-2FsJB zX{2tB`X(8ZCD*!{;ay2cF@)OZ8atX9YH9@48HYI+t&DmMGU|Ai4YDwDAEQ~-e$Y1A z;C7iI0Kj`YuPAC>6=Ai37n9@$$(@XLo4%z6t zNd572JApG151x>RL!~Slw$RW0I3CG!b>C7rZ59$FLLj;VqTD%nvbt1lMWiWq_WfCT zz(Fe#f6)SR%K{NWt|$2}LOx#K3)TofOq0c)i(2MYngM0>-aU82sKBHWt&@<@<5_dG zY;2q7GOK^vt`FMF0c{=aCGT%e?~qAqRT(d=sE^XV=B1*N7$V3in z@GJ}Do}?q2e-rFz8PDAmlO%m*PdU!Fx)g%+z1n?S4s3=%+wmZ6;g7PI(Hg-uHw`Y{ zyO$sOAC4AQ(%D+{@3p!RNxk(BMG8ZvdBEt)tDELI=H0(=m)BV#^Hv@qtR#>7Nhi-{ z%;-&getJ!QnlYvA_&e`-Qioh~QVngf#xJ23ZP;cK7fcX+Gibh6-<4dH&GVZ+&zUH?(u*$E60T{b2*L{NB}F7j)z5 zE9tFn`b6SaZQn%6^B8PX38fF?TDF^UoHbN5s!_%i$yVz#`tvUZfvu)rDXHX+_YX4t z*s>NknZf?7?2*qRf1adYJ+`8R(yo#S6O+byCsZcMie&;WsXtNTz`UV--|Zya*O=-{j@>I^ z8o>4mTqoq$KOBo{E%nWd?pCB5WB()7mLxl#9!?-WWXK%%GzrOHSLL^;_ZHY%k5H@M z7pwcnv+*-ztczYVCgty+y=~*S?2k#pA(a?3hlC@pBPmUH1~Dj&SK?$?JiR45Nxu|e zIeh{~6UG(oq)z_nr!gai2oK$~kef`JthO?b1;Oydg;_jJCGWl=`a%8g+PizAWcHqE zzwH0yd<#U614Q_9(8L`pO1WNu_w^89+kgGmZ*<>3_km?^Z9hlLpPjMTncgz`BG-s)P@gF*h5Q-T#r6f z8piC#pfG^W1O8%hf+8B5OG4>ea?jXbk8!D)M48)N%@g9sOu^h)DjyeKih%elDJ0om zU3BS99;7?2~hF6TQ;%s z5NRd(5hbDHd}$#x+TpWzLa-Pn)P35}t`$EP`zK{`qSfV}Bo!1iqH@a;{*XVWy2e!m zG@`AhfbFqLA7kq?&pga&ISDv7E(&jpgWJ$Z{Ii%8!Z}BV?*+1SU2;A7K>Q<^gq7EW<$|Q;ETD`gI_(B|jD0Bzy4WDog$nkm$kseb00VCNvz6Zhw%Fb};fKCOwrpd+z6GBuS8DGkeRT0i0rdNl;R_ zZztda6|v|8YdPgaW{f>@`Rc4@`E_;VQtBzy`A?r>v{cfkq6}C2X7_tbamYK{L@LA|L0>xh zj+v0f*PE!2mg&Q3`dSl2GEvn zEtrSn)47`a<@_^|5&FLD7oS-kyDz#S70(ytpZtt@^ebTS(Cn4Y5U4`GC%oxZ&F;nB z^&x%G)wklL$BVA=q%7We5=xPvvf}lTUJ~tGqip1TbC{d(Bsf28zB@dtK3(6);q^{! zAAyI}#!#wrC>1sJ4|!{$H5yZ^MB%dH`E{FZN>r~Ma5Mt)JKf z5uKWUP1XaYY)Y;w~!6lm@65g-=d@H^ax*FCDwj{N( zem?I1<*b$dcIU}ta>&hk42f)&#qQo?6gG+c1$3i=aHf77iSWL`bE zK4Sfw<`d&RSBxo+(1c|tS@PGly6Lib9-7ZysXYAuJ+;# zo8#L?p=H2Y7ufw;a?FCxbeH_u9)k7t@ph5Gt}RGM#8*9zz9ummcQBDwWGobmS>;vf z0?@%%Od_#CYP*6}%Zy)^l&@lu)5mr$BJA14qmjCJlFIMf@Q*-z=AGAvUCVoh)YM)s z8WeW~2)XEShi`9MP4*=<$$A&kgq^KHI%4H(yd4qSW&jo}e}IQc)20DV`BaSMn5Zf= zwHMPvMvm3wLFetuUZxQdJax0m!S#hZmpP}E7ZxupADLZ_=r6LBUq0+T8Uz(jafW&M zMd0y3xl73P0`Y!6{2K6JO;2tl;`{F#)VUkr^OmE;$1P|_C7P%FNIKG>vkzv%M|=W% zD4Z2LK3x|Byy8%5;kdv1lTB4Y)`+jK20t4TNoAH&BrV1JRLPQ?Agr2ydN;)K3%rEx znzzv#oZlaIZOe24K)8_E{jlTw=$eV!D`b=puXm+YRv#n%c1_j2H%U)K-@sE_ws6@(654;vd;Ts>Zqee~fla9g6a zyKiXd9r1{n!@i=-@&jf7u%Gz6Mf4-zRyi1H!YHc4Fv78)&}YCZSjTg2+aj+F@C1{z z$Zk|(BJ9=%ISS>h5Boh>^a(6a3x>WmuhnVa+%d8+e??V6BlI6#Ys(8+4m#&XhbSp2 zeXWHzlEcPtk)IVR?;f0U(HMP)zP#h3hZzyASLY*;;Fm^0`!AO3Rrm$=-1yk#9F}tK z0c>yP*^PpAUixPQvA8w?i3F`374;c)N`)cYoz)PD;mh~0>0;>IhZs(# zJ1}8r1khOaz3%Y4M}Rkf-q=r~Haj9Hu!nX8Mp2Q-$DgaaOLPhB!%7YHw!?o#k%f~Z z@dfS3;8oqgt4(U^Gh!7My?Ld+s#{p6Fl?M1P1FA#o3#LXZ<##giI&oXk}OPeL?t_l z)=Zui{aWtfxZL{S(JL_hXa=Y3Y}26;mHrxzWi1+damWG{aAogA+w8TB_LIC5@pmbX z{?Y{N>s{1SQVQxg2oF8eDv{z{9lL4{xo)<|USG#Mg(gWpWvD);sv(qHmW9_MaUv(q{V3nLjoqa>b%+ivMcSMuPgbLZNkZ3)RU^-ub9iV7z7fR8 zn^U2CT*zk;w*N{r`11lpfFV^16Dl>z)%eEqYsrzWk**1l0mHJCCr$QWIJesTiR2Ar zFtW+i^>thbc1f1_oSbW|dQ*$##4(&zYpVrVm>*Xf3_)C$9Q>zza$EYm)jm~`Vs#S| z%OwEXWq8xA>4iPMYgY4e7C6+@nFM8TL!%C-=k1Dv${3SG<(Ar*QdNNQyQ#|8qaFS3 zHhDwlP-0R6_X$>zGuI(Ap0b1)FcjbZvfJGJ6#Gg5c&iHA*7}@1i_N#{#(q`bScr*bvg#9v*0U+*HiUub1s|}_A zWZJSAtFRvkoC$FT42H62_3-*Z8poC(*wCBcFC#QdPt>~~Yh`;W@-#gt^tL)hsBEoy zWMqB`RAGo$YkT9$K8`}TMh~okl45m=w+Qu{$)bRR-L^s&PX}0of5?)5M?pX!BlxK{ zlbg^FuaJvAdL_muNsZ(d*4EEsQX(^6&TKAkN?nG$>x#K64jvD=`@7)Mj#~4!(3^(0 zIBtm(<=&fj+)#$>rVV^#o-HN;r!+7RU5T!G8wU6T3#9!S^)6{yk3tu+q&Ox}1|NP= z(`4%Fa(B6%Od$1VVkg(GDAu`UHEJ^U*ZiC8NEgla+O_=C(+i7zAT?<#a17IZP~B-L zW3e2s8pwQWm=E4*m;1JQ!TRvvZLZ~AG%|D}iPp&Njp6jf>%@cv4iqAK^?GS~|02bQ zp34<#;)Oi(HN!lRQ;-cqJm1>Vydu zUYg)P4qu4rJ9yfLO9Sk=7l~wLTr`~Np3ZkA^yH*KVUQZ}UBPb9N}6Ww!wKkNTA#vW zf4pnm#&)SjboFCeTSLZvdHdk= zgZ9Id>5GewYkYVmN=iVk)}kH~67cH%a&4jx5Fnx+m5=h@nC!k{`hFOVh#23|h^X$C z|0jxgo(!{|{^`89JccC3ly|-7L+5hHijjPqH`sOarAq6+8(5HWBZ#_=2Z&-VRs+p zKdhxmvz#1H6|6gzA|UHr<~UYUVI7&gy&DzFB?Mx3Er0jik8T_#*%}6zNCJ&TWJD1q z1~I+z9}jUVR2cT7>>&lTlmdmwk<^xIE}I!(2^~N>{fWBM)j%#BpxA!cDZ%@MMPL%d zgv6g`2%fba&Ea>B-U5gc8)HXn>+kl^<7G}9ItDfLIV8bZroL%z_5xEq(0u*PD7{Qe zA_R-h72XJJAnjYgHqIi*VfWvX6^D}()eA(mM+)Fb7J(@cQ{>-d3gK1ZLUp%8ZaCG5t8F&!VYH}uZu>2G*jWS zR#tj!OOU5C!c??ay%z2Gjpe(l@GKBifUNJ4ylU=FPk@TlhtWL4lY6b|Dy`+S&1^H% zyAH3LB`@31QI+NG@zf^}hss1?qsEP3S5khEtEZ5uk`utoS`ZjIfKJAT`?);MYTjX3 zSp!p#UrEJ)`Jw%CFfIM*y}(^osQAaUuu7?=K1yT-lZ#F!FnRmcGQE_4quc9;9?@b6 z=@hvL>6Lw-(jEdC@5qZ~HETPckL?TmDZV7LP>-D?i36&_;H%w9Ib3e*MSEa~XJm1a zJ2}dFgv6J@ddoz9fe&=uc+i2zq}NO<(U9Bmb@9QLSMrX4Zxz@VF=9X;&sDFyXUC1O zL)HR#JI@~v{d1GF?^h*w(W&2UE>X;oj}X=FGRYSVjDTzt@7S8IJO8C8-oYZl1wXi{ zmz*f4n=ks5sGEM%KKO2dPaGbwVh7~Z<<1kCy{N2GG%V;bQw@d8uZC_^Tq<86Xq;iV zUO-8%LUvARpn=_BDJ=L_@v@WZGHdEdt1O)c5pbi*ZhQ*p_B*jhh zFF)9iWUzY-O@*SQ_)bk@N>G@1YGXkk%=y+f@%e5qjv)(Z2w*%Fs-#GA#rY_PL@xXO z-Z(YQ4T_EdHtK?4#8s&LRo=P#CjTYpMm`z}6kF$@^MNd-2e{(IeZbbQ2$bcZB&fOG z+{BCbDu4G5e%8vhS%d}_${>&cN$Id?3oLN+v@(r(MX0j2)|~Hpq8Hxf{-dt3r_M|X ztad*xR{3wdw+nrWaM6L)pgtoVUP@|AjP!qIVN|LZf@Wc#UOK_uyh!rMd)8tS81{*2 z4MoV-@lzAWw<09LnKA&Swog|_xX^F?&t_b0Qi1&{cSn?ZCsMvfI?&?2ruo=4sr7=b ze$MrQJ#AVJ4tSfvD$ryV)Ge{q8^`Mzd9m8@p~h$ z@4<}lNm9sSW6A17`V^-Hg+?5O4<#OL@>-IP~zqV0(6T@`*h zk5yi=Q1bfsLj+2xc)|2LjgSxhTl%t;Q0jn`UFffsyI-~PLVhRK>kz=XhGHas4v?!v zO(z5bzq7sJq9f=;1%>FJ3WFuL%M0OroB{YB4-9DWFLqEYZ(zrT#Rpw|lOO-o9I8^- zILNS$QCL52mO20@GFi{ihP=o*b7-Te4!)SB?5&gpk(w8Ms7|oOxwn5D35yCY{-uw@ z{W5pa7wD}Z&h#4hpIKA8T4(bP*UO!Np5)QS*F<9fE*{!?`SHVEF!Wd@&E1RL!CK$g zE%LXd2ylerwh|0?mY^BV*$f}O##3OoayQ%mkcbnqSc(7zd+001e79_)Yx?Zb)UDZt zd{=VCXIHh#`lv25!LA?IcN(?9SpSN5HC8)!5X0%HEPV$M3^{Qt*PoWU4MZWI#$lFx zFany-iIJjeVsISGODgkACE4^R-$QU6vR8geAT^yi_@NjLN`F(bfKzV=E6>M<^0&gc zUbu94HTGcAet(Jcrgdp#5mA*tGo00>bT~5=w-mxyZiEXbKWVFikudqP zQ~B;CSKpNfd7$s`zb3t{RrA zO?oMb%eQgcc%fv#wAFXyMai|*szh$9%>LK(XXv&05;}oW(R<64QK>; zU%IaLK1}AjbnX^)0X+3VP^??v3L1S31pwLX0Y;<7e-p^x_uvsZUjY}peG{eh6{?L> z$rI#c{hds$qB2UyVSpS1o(0679e|fA4y#I%5o~av7@EA91DNSD0J7=f5HNf@1ajdQ zf8NU64x%A+u{XTV|t^!=P~@9k0_U$kAWl;~~_tKluAw&2OU5)#}2 zz(72JqdIp0(lCL-)Yrl%d6o_%=if}s)g<*PsrEm;X1sE3K-d)V6y8@#+pTx?9ztMbO28kWV$m89 zh-M)x`DWIOF+;}ER{F-)daFNw`ENGprK}(Aq@G=VUdpO`PXe5GZxu)^ zL&jW5Uqpp&2|C?`!W`HG7(tBa65yX!(50XytOhJ8E206~6MAj|DcYPIHcd0hmEJT8Mhed3)Q>PoBw3UrYkg~&LYjc8G#e#<)4 zh_3%#_D_T0tkT1=lg!uN;7v-uyrEyX`KZu^CS2&<<3Mx~RJ3M{EUY{>MA^AdilX8) znR(q-o}&_gYkgK^DX6AX?ynhS!)sBAYIzO-V7vp6nLc6hGcCye5fn*Z`ma>*o&mW! z<-pUm^zjcER5sseT|TdJJw@F?L=p{?0%`k;#xe2zmPBnfu;F=A_Rz(fHov{hR=m*Y z&&7Iq6>^}tCSt&!(7h~iG+7`(L@erokO2SB$*N^7Sn_zCja>6v#zTxm0qcP`@`DXK zW#M48usQ&fw+fk~&-_zi0ZJ`AV`bqdH^n79)40p8y_f>bUfFp*pX>2Wzo5sq;%!YP_9qNg_#6?MBKM6sGUEcPhO^P4Rofa1 z<#_?ASJ!L>Y1?Y&dvmNifBxduR*yQxdz+1WNpE1OHidkuvLg9T-BiwW?Y#v)bN%(SGun{CUEq#9 z{mbhpXhL!DmBV(^L-J@`Ys`Wzi=xcc{;Qtg-9PvyaxbpSP((me*pvZv!*DZ=lTi=vzW~zHhEJ@_P306QZkHhRYBiTR*B4FH_fs> z%H?lGSo2Ve$Loy0&R|O0_UHMqMtF&jZAv-!R{MJ5LZp|aGUXLuo{uEt5jM1$ET7aH#OLtd%<2vpfDV;bTR zRBPoNdrPF+7L8)~D@ro_i@fuSDuS6f*Ya0M1|1pGrF$rP_sd$!e*IhNNego1?(nQHGDX&qz31^fo{q!;&X4s4<_-AzmXvna+A!A8Ld* zIQ_~3cch$+j@eW}F=;<#0QRt-vgOrHE$%_SHwyYz3*gWSl4&)}G+_yd_BdFgB#8&u z@R5zno8-0noQUPDj-x=q;bhEAYl=XeU&fm7z;jnbt`kxp|cO8 zNzSM%wh10U>+>e|#5B1?#@c@lm}1xu-9Qs!{6G_yL9R6gjPie@pnODhfmu~zRoYeX zt;m4?L1TevfnrpaySlg&-0g_`!hpE^e5(wN5|sw&M)mM$gRUcrnXflE zAs54Pe&ERL!vWPRo}r_Qi*c_nsJY$rX74DyOpa+7o_tc4agEc9>Mn;f_AU48XJf8( z*;>Tp!`&O_s5O4`-o#Z`;ZV`zV%|&Fv~YwS6ky?EQPNNlsjoW5fl1MS|Ij!4y_Gf4 z1Z3V~6Y2O_8m`yM?x=Ol3=}v#Y9rZDt~Z*+2Q^quc&nplM@vz(&O2L;q}{8W93ZUi zB!m@E!oxVGPUA(FH25PPyfuz8gI&ycQ(Hw$utvzNK4(vK+43-!|7*^+Gor{Zc{e7Fr?gc`Op1cQXLy& zdUOkyIiX$|)57K6FgU+B@Sx%6r7ITl1aW=s*l*^Xs7p_IfhQgPv4!Q9`hvOy6@EeQ zXuj4TD}PW}-`|>!f=$FoxME_W-JU6jk;9*(1}_woZ7OMDkarEW5^mhbSUE>=gyRxo zU-!;?y?>keICRFwiu(qI9!fcoX&RLijt+nPiv{uOIVSuDL>=rxflDpw@54+(IM4jv z4(v3UC7);h)95W&h(ZWVunt1cb5QVo`#X-p)w<5H`n@_-ed{R}>9?eKsJsWIh!&)0 zTE$YkM2eA5RD>adOD zi$Zq?ZADGg=EBQ>UE~BrgW!DbE<_pbPT*Ij9zh5uxlGv+(e@+xYqS1M4%urVM@BOG z$w)b2vzRpED}tHfOOJ#4TR`GX@^4Hvyajw2lXvd68i zw?Alx0AbIogG6E4y|3eDu}nP(@9at+1{qSrB>T@beud8B7zHd=k$;TH{_#7 zT~Ktz^~}9dFa=7AI5#0WdFMSY@8^lk8CiR5x1u91H-2>NYS4;R6(Mnxd&q?eJi=p3 zyso|lQpIP?Xkgt^2cic1Oaf%-?ld0if*Ga30C1zNfi&(WiPueGD$`9j1^dy{{=5L-4Z< zHUZ(>5Cj@g&!-q%Tx!OXNCkY5O>xlQSnoNT>?XlWEz}qnRRQ#ElflAFa;=YfAR97T zt_FoHA3FG7G7$TIrzJmB(twVOELV~$H*yHW{aM6Z+b17c++PCu`EtcrLPT^e%=5(d zhDO29R4C$}FDh#Fq9L#&T)XQPsY;yUDB_^97p2vE1MwL0Rp`MzI3GCpyNewWxU1pa zjEM%uX^U#;Tu@9RXunvt9*=MhsRi4QNQN52k>ivx^+naVXv^H%9=*L56dh1C8PVo^ z13W`Gg3m@Xok$O|J$i0*oOxKr-9Ye(nKhs@_;%k+BL3+eq1H+e^q{XBH&iiOag=$6 z&|EE1d`sK^8Z?w4FQ=`j-J_anE)y4fAebJ_`-f&d_-KKR;)orao9@{`NU9%>lX#8V zQcrn-2kguEAr*iT6s!_{ns)-XG)n&TlHD`O!Cl5ei4Evlc=NcZ7F1{zuQ#qHGt94K z8DvgzETXPSLfXJ=5v625LcCzK{?K7J1ux+TQf{li~=56yIz1H zxuUy9a`8i@7H*Y=?_^`iADHZ@z`5na{ua&NvJ{2p;_sxl4Lx>Ap2&6dWmX5N+zpEy zd3H*Z{mEii*}irNr6ZVcuyQKbrvPgjb4~+niD*@idt8ha^NyrPO11>8Q%6^hi#S43 za*zV0`%U2mbsSL_>oE^Py*(qoU;d=6bG{USZa)lz>u!GH=R)WY2c8;2*B6To*3($j z{LsYPkXYReJvOQToJjp;3W ztuJl9)};_9*v}fhQ&A#B{VpTYXd^TO+!L_?!!J68DCHTECglMf8Lb3CMPNf?wHA3Q z15~0cv^f#$ST|E}kdxQNX@O60zY}_jE4MMJTdS2 zCc_Xg^>?C?WHwD;{LszV_`A4w#sF5Eks>kCw=NRZs9d@6$!@kLha)9gU?erY5foQ6y+7|^Gap9-m416h$N6Wbz? zNa?@)c7C87#W8MAnKSWj0p=u3t)pCkG?`^4?T&6yP~0jZ%bw$bx~D8}v{kSYV-p^! z7G1B4q^+F>&wI^FaW6z;i}<5L_q|Er#soR47@X=;-&Gz*Vb>91OY5yh*>m9gZvy1z zFY>Tu`>=Ub8-|8QW&0=DG0*u7cLwQU17u*1*I37{ie!73udbqYN1~5U ze|n#}Y$LS-OIJ*GBt%qK4!E;g#Z~!bU@nG|%wj@O`T^dd&k&~tV;>rvea*~X$A(eg z{z#4LoWhh)T3^UH^!3o8<=0YIXofbT4)RIv*`Lnu9VRF2f~$hZC`|pvqMda;7iQBUax!uan57m}~3=BzPyPDN~pX;iD$kG9=(}p%Bew_Z*hZ zW|%bIU9M522SMuHaL+l030pqim<;{Kl)jC?SLtEy0~7VUW_&FksBc56ws3t8Ij64? z6{xZPS$>8FL6p_7O*lKyEKk`TXsO3$T72r2KtHzOrI~XvXgSL6Upg7(EFRdzN&Ts$ zPA~vgL+4C8{BnP4{OnzUipjYi%8=0`l#~m(!G1#85{yg?qRC$dDJ>s`5azaJ7#Gxd zm`ojNd6zc&+y|^6g~e>JijWMuNZ}`6H$oy0^B%^82ocxlBM3zdA>2mLVnQ9;NNo`f zC7KVO&b+0qKi^2-06(%g`;*7~VLPo#0-bBVwSt(@e7F&YB0J*#-0&M$hAg%VXs|_+ z_^(0E5z`GgrKB4Nu_1)x>rFZJ}yz?a)iH_D& zOGj5U>wudVniTFoGex)ZZY)xpo)N&kwbhU^L4xL$&eQe+mE$(7AlPpQX5X=(?1WrB zrcK6)B5>oEH)6s20CION(a9W)QVe&*D+ynYt#ehUFv(SY7yj$2--J6X(<`|bLH?xR z?KejVAr>@@ne%g)FrH=^7a6-ClkBUEm%dablyfne>WN+J$udi!YT7fpKY6&eWPgVn zHtd|Mp@D4!C73qfzNTxOZUmQ{Ng##@j?3iUQA!*+aK~0)2Y#)mHF#0! zFx0mO*lQ#te9T%2obpH>BVo}s2`~tl^8_oL(Wbi+me^d#!E_ zv3Zu8@!t953>8W?#&oR+P(a@`Iv6DgEj~dfE~TCMFEy%&^3qnn@J!iuH^V1#n3V}v z*OlM^)$si=Vnv}ndWLu@&1fNs^YbGTeYRNIBW6{63IS~DHKXx;h7{CX811SZ7x&)? z-26k{h8)2#$) zX%g){G!~SHTGIsHH@sf1o}LeiK4cvEe> zZ%m?}tEP-}1r`qw#O&z3@p~L29O?7IN$G<>V=0TVWZPS?Tt4r$3oGi?_t6i#`(vp$JUoV}b{XBN(##c+ry)9Z%ucgdBJ962no=j4bw-?-GI~D^ z5lySZ)*aDO_z%W&_?_}Kz=Zfvn2>d|Q*P&2$3~~z1ivR&$>?DTHR+N3Hy)CUeId8P zDW)MaE(1%uLN7fvjX72!5k&0rVC8Xm8v3_gZrO?UclqddR>f(*ZV_h00^rY2?aa=^ zOJmhsd|O2jGBncXuk*15hvhdtk3fAf_YG*G>Zr*X5>H}a1N^J3RV5Rh z%}?7dwR?|z*rVs|bo7Z4gchH;LAHh~^@?Up@= zXHF@HZSBalN?CMfOlf3dN}+bJSJiVmm&h;oIN50nL95zMv5+-LL$wNzZ&xP zVV!u@)MeEqlyKY~yWXx5XImOo3&o7#-nG^YyYAw7bWR1qP9`~X6KKB|Qr1js=JjP+ z0#OUPxz}8(UcaPjwv-!wR3$6zCgoHxyRV9j3u=qQ{-;;!3>vMeCxTTz#)b$m^RGIE zp+8p1U}A$;jv#QYWDZsC^PBlL z8b=v!qZ;h$a03!%v5~eYBkW^mlYDOs^GeeAi8_b_{^+|AOOk5*uTNErMsh~6RZlMU%7vl@OeWqnwm)*+0qs+JA z*&`9Dn}~qS`*i-H05;K#`tzQqu_CA(UH*QOUm!`nwDd=x$|m^ZO~T7B)otOMEv9K${X=UArO?lYUso$iA#UC02QmKu! zQz|(WqDqzN(G(08*9K)7p#hi-jG(knn_ZMD?FyCN^j@cVo?mp(TQkUs<)9vBlZ~~V z{W#dI0&!r9V6?l{9CVIdY2>+I@+@u^QH>Bq?bBTi&X>^>@}WH;_#l)E1c^h=Eu{EE zIDN&(j*4FmgSl#uTurA!&omH!nl}>x@)FgXjZoJB*|{jUl{yj1ESHcpJv1bZ2#2NL z6t6$Cke!P8##LImSt5X^SG5?4;r$#1dR1n@v1oC_k{(5iyCwHc-f_SsaUoQw*%>qX z1&Y2Qer1AQs#leB`}H5c)Jo+6lf_i~BRR0Oz|)x9l)|Z+cAky26$x~XRaSOEhK?C* zJ=l=ZEe|yOi>&*fNiAYu`vmZ#C6|}jXKzx9XdC_hz5tE%i=kLSCVPpCeu9Xs@wZF; zGCVex^r$ueaH3k7fQZsK#?I811IX;&dqVRaB~TzKdp+qdhA49x>~Nm%>d4U!Bm!5! z-VJ0j7%}2P@b!GSg!b@Zz8WiZzfD*bYT?6)E;IOfN?_RIwt7;k0eP^+z?dfm2aYOa zNg;I7D{oe1)GIO#CBcX^Uuw<;l*1Qo5RdqrB;!ev;--jdZt$i6OBL+|AGAuhFG)#e zOU9m;76jBPXizy--cvCVP89Np#Ki2wO^kT?|9Nr?!<^I@z5?wj)8&uF#l)5CbN67RpfG7KY#qq9n%FV5Po~;fBo=uaOi!SJK|G0qrOa#Hj zXr%B%BrRVF6rI2&HO%#V#4IH$wkb_-y6tJ{I}E&eP%oB7mWCUcY?8?HHcys}KtOX8 z3zr0B!C34}g>hKI#pfy8%R!4ZEN8BLRqa$4#l-TB<1dq}=U>TEp<^0cbfR8?Et(*V zvb})ixUPnmxx%R+&zpNo>b1vZC^?=1Tt;S9RiBCX(Qv7=Q~PB^dH>q1lE41|Yx#(t z9;_*84HVmH*Yq!NAbmI%!LAL&vmYfC&2&GRzIhCK-**ObRsJj=_yw;qFoMYI&)n6= z06QNeD`M`fQ&FH4TYi68d>>l_Jjul1GguN9tqB67=5<4z86UYSEx_VG9t5ASxjb1= zw2$3l$#3ygIo3m;)i%t;81-@Ghx!_<_!(O$-IEoxIH4;X{T2+Vs9&q>q1mVIfNg&@yOJ7k zem?>N3MGr8jY`Ili}$a4nLf5owlot-sy7ydck#bhP3-wt@x^cDX}P+n2$>TI7yUN~ zKTMH7W^?z}j}iqJVIPX{Mk;aKbm&OdCqWwtp1X5-_jrs~aBxtdJCjbaObW&3MsU6G zOHxvu5w9}Oz*zLKudc!n0BZ+&UVp0;z>uHjl~X{kV?q@&ahRM5{V^>5Bo(0va)%c# zaU1ykQQR)N@X*iFobvyk?xpw!G7>@dg-I}_H)>I{Dy(foRas22r^iW2UKoL|NZx6a zG&Cv)u_XRc4M!_TFqr0!98DfSj8p}oyPl{Qs74Br#v&}*WwyACLN|`p{G0>Ba`L$J zKun%i`)zYM^9A34!us#|XE+v?Yh$R!N#c9&=C7>Iaq6guN5L<0OkbdLX{{NH*P5J3 zorc8wzqI!>h!ZD9>k`Q0<>BDwni};FD+FO@ksAr3f|*e*hFpj^-{Yoyy2ihgpLLaH z)1hmhS>tQrZ)IJ>jB@3yAFQSNk+d@(la#Z3HQl@w)8WeNR!&;J|2%}m3qHNQ*Kc9eE7N6vJBk)JJ z+U4u7$SVrVr*;0E^HHM+A=Xa5@cL|G30Yj>yxd=pZk%WgA_zX*(iomCpT|Frt5b;#frL-Pf zMC92Qv%9H6`dlH)uMrm|LtYo>Nrx7~Jt3YAR;JpvId;a5Ya-cG{C?bbRl?77Wr*$@ z3eajetGtZ5>XRlL>uOwm)+m&GHHsuJgYq5Yk3xBr-i@bUNB;>lpMhxs+()to7M%lk1>DKm9Lt!v9@B3$7?Oa^keWgI_QutHyTLNp6$t^ycNrH*L zuWMoVcE`0b6N5SC?(c_xrMzs^X=n3nIJIxn;eWRG*W(ozy^z5oByXZ((x_#Qltn#f zy`^;h;uh{HU;Wtz_rx@MnOEYw8vLp*`p&GvILH42KNyh3jhuYvob^>LLc84xIO^OI>>O8_ zTD?{uwipNYPo=GYwsrHw-Np_3>N{Jn$XoQ zAz>ZAtk+=SleK|-9>+44w%4esr&9;p)##hrWc?vLK^iNh>?)y( zh);dO%^ubEgGZ9vcoW#O!=K%G*ff6TjB|iHK{vonO3t_MRu?FJNF*bed)ws1&{h`M z&-(IgrO;@=R&a&I^+nbL$~vrRk7pc)9%kA}+B%~8(^&3OsV)c(lwBaBPnz`RRnEdrm2M`NfVKl`j(bJV~0Bxl-bsvI*ewQPGFu9VEx6jwpA)!5qF`C9|hedUt_?Y5pia!!~Vv)N|Mo_{?^gt)L5Wg3tW+}a|hvp6f&4E zA_L;8>B;orMtTu5nei^kp^k~hVKsyvpdYrGv_(yx@%s6@T$`9kyf1px9N6- z1jC546yth@FNId^<+DC;Cem&XSuXOWp+*YTh2}>fo7@OLJcWJT>L}i1$W2hOEarq$Q%CQ6@)Yn;S)%!JKcGAQx{yK|afrCB>7iAqpb21=tu zlsdrXHV}m0a$e!HT4to>_nd(R>7avj(uxv_LnTE4(bAERXqdHzzzs%-4+5QAo@P2B z5fLK|p4=&wZKe2g>O^$OEG@;&A#CJ(M%YLC1m%O}ifkbLmrL~fbpe&G`jZzly0zKWcL>#L?Ut1XAFUN&t%)w6y79-?$!N)N3L=v0XAUW|mRiM~(EetWXFGbdZpGD0Zu=y^Ot;^x_Gy!4 zZ~JA6FRHRfRPO@_-4|^$e^$N_`B)Y%_5D6fCI-Ks@-kw9!;xyKc+6?DYLG7cpB{nL z8i(7T4CX5e7YAW5&VaZaVt9t^ULYTM=1sUpMO14DY9{K!n`08%amu=;3{@}7+|XeJ zy1R4)?o{XadsjW(A52xT$5sygTqAE0656tySumXvSrII0r#rqA1-j7_$BhFbMJ_3Us2*JQ=IoOv55Nw=-RA%D#o@K~VupMh6za4$T!;F=VdPMLjbibuFau&@$vF$nxDTP;Z zcuo2Bd-tr#Lk!qO8vH_t7uarBL`63`VMKwTevWWRKP%%YV#J5{`6=8GBN%}gVmCJk zw-!=jRdv0QmR6l5I}_ua=)%r!VnnW?IHoIiIdl)H!$*V_2Pn+4Q#;d-b2{ErIiC^f z@1x15ybnn~C%GCWC)tg23&Q$9orpTYxPb>Uqt2AvL5M&9=FZ~!FH_bNMrZ>!U+UIO z;gY^cng8r8s-s>%y>IL(4SqhKZ{5#525GaB=_WkazSsZ@kUzkGVKqy!=Z_nysPzf`W)K$`w003frU#LFc%RU zcXqicp_F+DzPE-pDYZCMU4$BwY#fm`EFtkNMPEs3yM(9OLlq)5noxr(J2m0AnNGGg z+>mK!tW)}9-lxkvZLoYN&~z(z-Lf;7 zLeLyBx5b3P>=T%28tNQI9$70M8-6*_n#;F^JJ2T(t7sRI!TFpK_6fz`uzxL1_~RAk zt`r>-8i@PY5iZm)!eM3u6=S2!7S-7hyh6R+IVhY8JA)4`&&&;PxaW47d>CUkl=}w~ z)8_blX?>V(rs6`=7tzX~_)Vuf z_rE6>h08#)vsbVkcuw?eJVB^pL24g*&5vw3W&|o)$v{D9LK$h<)8;+WJ$%-$CGj{E z(I(ODBV8-XwRo{68llB_Ay{j5sTL!-#CymN-+w!L8Pg%zL#%@KC*G5HOw>-WT@6bl zXDBs$D7c=Yx$H@dMbKyHrwB)7YegM23T`w!hI1oJWsoerp^K!+BFQAE-BIHN;SzHN z;n*Xp{b`?TEOKz^0tsWJod+?8ws`b%xl>fOgh2n z%9=)ht0IiGeS{Ys+?vwZBdp-I#=56Ig%=&+nuvs9tJwQ`(Abc%J9^xBHEdOKhx(ZM zFHyB3Q3w`es9@>v_J&a>E67XGUk@)VQ2a^0(wyEbQQ1o0`kN!s_@?#o`uMiB*=6g} zDkwwcPkcyS{AEx{0F`Z3n0pWX^zE*2c^U2e)WLtY4u(6La;PB8{SA~+i-ZbxQ{3n;Zts0*=B4FZ`-s;JWGGC@{?GOfcQ(I`+)Sc z0Iq0MbjjGGAtN{8Y{u1B)6~UH`@b+L1EslYGa`jen_=t&!yMbZ?~-K7$&A`ljoY(E z;>{iWY3+u8mDvoCW?q`-%UX3Jq7wJU$XSt~-uw1H#2B0|MCprEt}KXpcf|9?JF?t! z--fD(<^TJZsKx?oxyH&VFnEq1TPTM!*?A03QJ$k zD+kY60u@)YOlfmqKtfB@CM3(vZPI+yXZvz)fY2;;;RI6~26k&*1O zX_yxnDc~s|dN_c>c8<6bNyJw9Pt`K7R7{M7<|9|#gk&0rxdSh7e^ns7m!V!q3ng4; zWc>9nZU4$k0RPhBsy5Ul>LQeX;_iLa@iL)yF@_@^%>LFI7&;O(DEIOQ?is_vbZt9+?JbyWx}O zPD{>PS!b$d);?QbK0O^dDuy)^>6Zmpl_rKX`j2J5Juj0~;=_N`Ws!%B&*je&7D3C} zY5O*^$u$-o{~k3QQo!jcB)CLPpB9hMF0h3;I_Mh9pB`-6-jPvM7W=0Ps~%4p6K%N0 z5#kktIlHr)3Ks9m-wmH1yUHVf?*5zWWZT3F)^BDkj0rJXtHnTw__Ate<@aTdd@HD= z`JJR-l5-Ur10K=?C49u}ja>6m>Qv-)%7GN7k!SFpaLRD0cRep#(rIj)S{@q0_guxF zy51^sEO%;4H>~KOMCy&N*}RTCB96pdkL{b%voI&p1}xae5GRRFuc(&#qoi+cpors{ z0q_rHkL%E|^+f~IT>KsVSNUDyXcP}fGnp~6NP!JVKNrG5;p+A1H6IIRGa_HTL$fq) zslpII^aa_>CY>E#eg5mp4d7Kfws!Az+2l(D*DABh9@-v_WN zx>9;(6ZZ+@X#89%6Mv|n4KIseDn*s)8P9x{xg+} z3)aH_fxEh!!kCj6iI^5D-yWgkM^TI;0`1-YCV-A@{FaGRPz!OGSno=n7{TEEWWa#{ zJ@Q)AHKP2nXm3JO-8sgjXK{T_gSg7C6<&d$y*D64P=OcmzKSL-%NDiWx^{zGxnc3e z*?Bv!e^Oc%TJkI2Bc3r28oy8^NI@Ve2a}CCY698@+Vt@{TLEpJEvoR-nMI;~rYRAu zgN0a5M>GM>4tHPEvxx$sSPO3kRwx8TLw&VHT#Kgguwu-6Bnti5z#4*W+Bc7SWH1!t zlMp8cZS`#uE2LTwjRLl>JUw;W-t2RnJ%Z8<&&rm(gWIpm{9C~Va-9^k#5$^1v2EkG z%v|S;h+PPBHXoIAwGL}f5!J{%bk)uMY+lZyULK)BRi{!f8ye zBI}myoRx|jfhi6#4@{@W1S>>);5h`|Ak0r*L@Y`JB2L;5o=MY z?7qR$LE!DHEZ=XOuVqoS2D`B^&(F$RM+=gWqAoeFGWI zUc(90(`Zt}=msQNKI#t3qNv0qL6jm^v~$7!LosBI>S!BxhrP>u+6~fN62|sXloY#R zzswl9Ysw*L1MXjjbk-X;3AzWDLGXy(2(}95w`6DTe@SV(5*wku!G*!A!A^Q4E$)(j z8Ao7`;}}F=>N2ayMLd3E?v1^K*lMd8+T|tZ)7*m<+TaMx$qF>TQcSiJPgTSCR2vmv0+w*^Hq{njxHLsz5*#B zeT{1>_r0wAtuO_;9|uRuID7YdzlSeONZdsXGdt~X#!{jePlMnRUMDGq%@#Y4%DF{; zY`8n8>vhl=KFdNwICOi{-qi4^_PHGjJ98vg9t|H`&6t7ZzvUDfvzP#XK! zSUMKx2SXdg6fY9BI4)|A!`r54Jxf|c-Ho@7m6QF zo0ld!+{{SgzB`|(Lha1%SOaho#r4> z6y`Nh;{WpRh4684FdlKm3h=|@X9h!Ce?Mj5nMTlTGRBt?Xxzn`W7d(bwEu_bd{lYLhX(T_)_3lsy! zC&tR*C;mfm>c8J~31Jk3A1k;%zPK+9(5DQ7%u%QPdBrUWvG4fZP=byVx+KaV8BkI?AK!*!>`(uxVXt_H;o{) zB~Du~-6YLd>yi&?fn|F9NMvr*o$3MjS#_pgYMABkA*;GAwG5ioBxrwM`S*I&r;(a_ zr8^hZq=)^cT-BE*S0lSYu_cS{gMCdg&$qgN6`RoS1|F$@5_cVwSP|YV@W`S^`z(Hk z(DRD7n8z;u3rPQ|GO4RVQGZ`KI%T@{GxsmEy9BP5Ltpyrp@`>1@lYZS;C}KY-WNAt_iRS=?~#QL#Oj_mXeK=I*POpiA`onPB(tRg&9@Zln|MBiP+)=*6{_4Ec-|c4 z5^b~5jHrB0;2}^^cf%_@8$IfnLgL^(RQ3-^oR+%Hd3DQd9rUj$Z^^ zSSKdU&<|&t5s3)6YiC6fJ>g8BuX=nm@We64O%QMq z<9}}9Dl?cvB?rl!!qv=6m9zQ7;njp<+sXUvP9O1-yK_TRY}3LW`+^TkSVc?0;rjHQ zoscgeh(L6ek9No=nQxbkF@C~X_;#RC@O1~vabHjb%j;$h!NIbBmH4(a`Ps#(jD#5j zNv-b_Z&D4RGfKA~_a>CCSTUtq)Ht&y*j{&HF+8MO%5p472!X)WAm|*CW_y*l{I5T3 zZ@9TY;dk}X2G-i>9B9hK@g&>41GBSR?n|Y$YOKu86OK?P2(YD z{GJysnho zuxJ&kE|gbmK`0dV8(+4iF}VwK18U?p=D{_cI{gSjb2MhZssjbN?Ke;It)FyS?4TDO zFKp;RVRAyXTttgiKfzwBRJ69s$&Ji&JhdPkTUhc({8(?(V2Kt4YP!4e`BOYiV{_K$ zx2o{yxlwJ#1Sv-}%rr^Hh|OQYBiB`YNvz-e zTv+vD%A}npsg(V(@M-$nWGYRC0JL-pcr(pg;JTMT#{>h(Yac6A=j0|_83Ho4NTq%y?83TPO zYzk~R=P?^v-^W~k?-|+dMjd1E5!vzSf}w5Aa%Cj(7mlv+1wt)H8DZ1Mf7jn(b4uIw zp__h(FAqmgNoGnt$gU+)2Xxe6DpE`Cx#8LOmED%*APX@^5}mZ|$@H*^QzYrW(RF}0*khh<%s}0MKTZS~|cHuokLH^Gb+KolDX4dwLO5I5<`Q?zeGM&vb5|=#Fohjv5 zRtu=L$#GygU-4qEwb-Sr^M2fK2H3Wo?1xv!XV2V6PK!#p4JW*G{(T3F5`l$!wVY$8 zzaZUSBP5RVK4M9WV7x)eZ%!n%=1cMzXKRt4s@K4Q)GnWI&ih*vwWwRw{vcVRBmO9U zz!h5_)%<8yGaQJpagAttw+LNB9(T91bVTn{Of#rHn-Mr4X_EL7`%YtLoS#6m_IJ2~ z7pj8E@ii~V7MV`tnnM{v$Z=*Yh4%BJ-92U@FPW-7NnfM%H*Zj?29BAWo9@ z-vM~S`YZYV)b}`6jb8|H?_@y5rgswdNhxhyhMzRx!U(qYV%|TQ@LhWk^u-Qwdw7OMzn*$a)-JFaGBX>ue>J&?q<34CAY7Mo7zzV~UapB52% zc-j~JCvwH&ph=4vIwW9|45suCrV1#^=uYI;!szE$;JYH7>nB^p0OLUMINDQ3D+OkH z1%PJh7FP?a3%wuvWBYDNVa z|F@s={+~N&#}NU-0?*jhIj=|IUbJ;CM-DVyFIL#BQC7-%FJsWCzjjtQ;#H@&NdkvB zTZGyn-pm=H$GaK>gY8ccSc>y|Ge5c1W1g(W3rCN5JaCfEh1nf)Gbc#nI=7yf2)4TB z49sf7z^uB-lD+^h*VG`w8z~eNjjOCzVVv$|tRpiy+ZJj&&aafTp%1WzMZlQv0t?+q z1I!_4kL77X5F|F1)=qL9=g(q=Fi`fWBZo&8A%Xn)z|$r1QRPYSPy%RY9T##86!W+k zMonNRA(lE1$0WI)z(;9fDsC2+6#Zs~zfitJr) zlQs%xmoX2Lc3(c;01MZg7As3+K7(Y|nx*{C(X@#m_bjJIgwAV2Z>ul!L6!yr2m=L+ zfMEJ-qJZvETucod&JGPa6y*ESQzVKYwLF(}-#gS-H?XY_OyRE;{q8vB3+(NA&;#U; z9DMC2CCuZncUE!^g1|jC{=mH5mGqmJ^<*}3JePe9a z)|f=JH_#50Gfj=4SBk~AAs#G?Dfu{*HCu_C#Vq`e>AT!S-~MXk?nqY}&wv5KTM%R+ z9zizXrNcg=5ITmy`R}KW;pBy~z6QuecE!Fe=)NUK^u;};@UpiU4FM76A0EsWtp!PN zheY)?u9S_H0R?i1^g%Zu2CA4bYhH@;GxrHtS{97f=Rd)`n<;qj$r<)hJ@dJH&-6C_ zuEz_vXNT78Sx@Jl?BPK;RtWmae4yJt246NPy|6bvSoI%c!F8z~ndxxlJ%Lm3SwvtE z@zVM2Y%~Cv#duN+I>|Cmhyn|O$M9be9dlYRowWN$ay|+%%V{tu==@64J9^a+u6t+w z!gcq(wI{G+@_fa1$I>&j{em$YpMSlYUg;N1sSLj973Rs~>9)?fjk_sWd!W_r92kch zJplrGt|B$Mm&YYOy6Wg{oT8=2A_wHa?j!D+v+Sw-6%&Rs7N+G%<05qygF|G7_0N^! zM(0x&NA?iYrMXuU=h(|nPbZNG7kv5O0&I0kmSTzfyS3T}^$9{0_jB;pOh z&@fK{yvkYMqz&$sX8&!h^^yacQx%PZ?)kwl!TE$Mw{(Ed8mhj}ZX| z#j>8v;tLrd_Zx>rsJ~g&tMeh>3Wx0ElCUt4Zd37VUr?N54|@V;a5z4X;3$2UVbE3Y zrJj9dqtUr$9rm~$jSIFB$z#=sH9bo~j>(bCTD^pCw4{-a@Nnfcil#X=v%a8aHC5jw zN#+^@rX`L&-89`fO$oeF>v&Lh!u3^Ls6;+QGv`)Hj_@MekSpkvMsmY-N&LO*o)VK5 z87+-JvFLpds2W;~fX^$yPMJdPYA9xZ8~kfg%y=LEtH0Xgckm%z6@z3xe9 z#klN^U{=-rp3UnPmKTo9a-5SrdWdjj^WG6Lb03C?VygNTZPIci2!=~#kZKyHlhxdP z-@rebXw^0n^UJGt7}_p*hD{DQYZzPY=f@~m5fyz7WQQ3^55rQk_Iq4tuuDFP0b5>m z)8~6-e?(|+x%dZ*-aY|6VL?TgPrY>ful!!=HAVox>7e@wa zVL*4E0@Ge8+y<8Qg<5qOPQI4FkQrHq%{BX7vq)K#p-@KG(#9%1ViV+VHx^D$eq1>r4%YqvT^u_j}XDUu^zdeR;v zH3FT&&bgr;<6W|ID8`1|8J1`Q)g0%Ka@q*ce#b5;Rvg=jEy5UUISL|ag9?;0!d|6E zH3>PUI?DqqZ~Dgm4*VB_iwUBOM;dpt+GFJ(=@WW!RMLJ{z9%0RF?t zC@@ZV+uJW|;A}cVN0g4VoU_wip8&ytMYyY5x>eZkHh&rxup4G}$7=0f72>rgCWs0; zSDAH({|!ohrFD)Sl=GqZ9GlGFGDzO)L+8q3b4a5EU0U>Yk#+AcJ26+w4}o$qQyzn< zk;)^twQWmCVbGau7C$p<-=Aw1cVN@>4ZJ}VD|D~0B!upK-wzHELUa4Qtio&n9o(*{ z7kN16{tz&48z{Cv)Q>RI^?156_mQgHV|N2BfHqof6D7Zr&bRn0;xglOi9iriY59y` z%n>bK$SRxrNL>QtkFbtgHv@kl1>rhEenbf#1%lcDof%L{&mSrL=v+hwb^+l#q9B1l zV6LbS30haI=v_nQsKOBx&eT?*(Xx79Y_1-XK7sQ{hL{;t^xV0_b~k)bq2(7VEk2mh za2_C>j%xc-3OE`a?UW)!#<$c(G#W-z^zTHUL3+iYX?-qeAZ%)b`cI*I)`cGXq{90q z1y~?zuW|2&Iea&(kIvx=S5$W>C%EhA7qRY6v`9!$5I+(a8l<+_*>I(_LZF?p={n^a zjLrYK8lhi}IQ>G~BYta~YxE%*)OsGL1%)D0_+#GGPA7{crBee7{fL>MwAomsguBAL zc&5Ft!0^A{G!Tc;{2i2}{o%F4Y$P{?dxkvYTJK{qhn699QZ%fX*L}>u(yadt+P-@g z{Yl$c%13m(EVjeZPg1;HN3j-nr)uh!j8_;lGriY>n1Sm65Im>wKxz3UwcA)0W0>p- zK}?FY_~vG$+%M0gxV0z7J6;Wg!K`mJR$*Mx$VR49yesC1v%c~E^ljX{I3*%zwysqJ zfW?9YOz!T`Y&;s5fkAU(hsK{e9LCuC@q(U*q7UCQ3fRdBq@HUbLh@e58fzjnMeCrr z1mJ0=>vl;L^;8f}J@#XG`!%PCPD$vt%sh8wKrGqcl)J+K=SzSBRj!I$e0=K*g)FAU z1>Y6@EbSA{c3eJTau_cALwfQMSkV=i0B!bh9G-c?v-`S#rEKt{Ot4B)pXQquHnm%8 z4OI8=;$>p(FjN1ODRx!g7;U_z@dajWGm8xQ6Bt2MNOR0k&Ygf)VdbI-JU(En(Jl#-ZD(vQw&;ki&P;tXaWGQp6dNRefbWNrkYLM!KLgb^YzbkW$ucu(pPEf<-NwtfZ?K=X`zLD&vxv% zkXdW{6W?hAlsWftL6^XLZ+erML1skdHZ2MrK7j z+m19B9kJ3f>;yO>0~LSbi2wE^fYT^7py^^)tQQuX1x3{$AWJ58Mq-^R1D{RrFHlA5 zZXKDrW)0J)1rEE`Ao8^S+6^yMU`eFF%1IRw__}n5aBtP39u!R<=G5F)c`+F*Lqi-j z3|8sc@*SZsr!9Xh37klr-}&Rl-H?D0t)ZnOosSQN82+*db^XXt+vOOWAzdpjUFDzP zyS>5_yw?D?7Hw*5rpF7j{;-_qVbQv4K zyAgU}Gk=qM!LipK`_nZwyqa_T`Ms)t0Y&=x%*u_!2JW*4%xHN(ZvVGuFD4q$Emx=H zp^@1IEqHii%gNzt1@MDJwIaStDZ5=qAw~$oCIQcGxw+{h3x81(LjER;rki*ls#$1d;y}yQUQ|CnaZH~_JxIsY-;$yDjQ$X@Yami@L@a2d_ z=*|*_`OE)A3JSGIM9710NV&tjI0Jnr5auBKzL{b{RsdMT%GFBLwGo%1-|6Vckb7G4 z74tw|j-SL=Fd0PtmNX}UX1-lxVK{Lut7yx(<6Zn9?zirn1;g1v64S42yG?SG43n&* zhM#9&u1-(Uh;^?@YZY{R7*MPDRaI3j4?iHtN&K6lS=EEdEyuDWGU)eScihxrNyWsV zkO)h-Fth!cWmzOLp?DEP(V-WU`3LF82A+USC+*aAZ@s7NkeZrm<#o4NAexN=@ZB=O z*bH9m@hWXq35l@qBmTD)*{;0s6|n1nHD;K@xV-Q6x5|p*qJR};-t8J%fNOz2mqUCw zmdp40FyB`VrVPh-z%icf{dSOX_%X*0kaB>;o!NlD-xOO4jYeDUo=nd6d}D2TuzlVW z&zQ#LF|JFjhwAJV&@ z7|8`3zfNi+4<|$s(IPKY>ue!L|MPJ(8U_+n8jg>~*HGfC+H2-xg#`Rt&H!XO^3R&b zMm|~QACtsRTHD%qxN_J7sf>(rJ|MV>2=0G%U6q@A8_U`xw&OK|ydH}kJoA!ZF`I#_ zpE)v7^&2Bu6dOUUy&-IcSACs9mM_KFf*bX=JteR+05@WUt-T6MB{jIYqQC%QHiU2YG zp{FQy;y`DNvv^EoEonpnF#(j*pZ5YLzF>tZ-ob`}@=I$+l^Q8-R3o zcD1Vtq#79jfS0j?9d%(;y{V+b3uCmgPYQ^og{P^THsMt65i;@AR2)FUfNO`U(2OjN zA8!`FLl%xs+ZB+Fky;46O;?_;I#*iC-|x4M&_%a=xxw{651s|C)#{}HX9suZAYXT7 zKmdRgP6#iS?{(Ak(p$QHsZgUExTp~^*B=?TPl#2{9>&A&?9K~YwAi6)1IP5*!duY zB`=9Hr<8L+lUm$2y|0c}-*TZycma>l*cX#JZ6CJFK8%jn`5B4uZtcZD{SGtyMyEFX zbc^9>V^#Yz&Nb5O3(qO7Nk)k3x{9UwFB>UIr*q8g z)w_;ffrslmGxApg(77mmvpmzCQfn zCCHH&8e5Eq-Mx=G{HyDQ=LV878Vg^P=bvsr6*3+rOjkAiu^5!c&&M(DY9Zqb-tiIf z5!t8$=|}q9Qf%-ViR;98UxpW&KTPCy1z2+W^`+k-TRB9vdE7k%Ay^FLLhkT(Cz~HB zqoZIFa}WR&IyV5QvSnNExoo&OmY#^9d*O8G29X-F&5OI4AB_po2LiPAT1?l=<4&GnFOtMaGP&gq%VYQFM%Hj+9{0<;?3V~oP7aQnA!wp`Y zn)`;I2T2C8gczRb=B+QQUFgV}J#BnGgH4VM(!z_Sh)NBK2mbLoA35^=o`;KeoV`b? z8G!IQ;|v7p7`y!}xCJeu6TE8i!c^D!UCeAX*^RUQ<{$9fRAju`U~{a152^Xr4yeol zlEEqa(oPE#BhL(|I5ztX4ryqDLqCvb5S|bel9dxhSOzXaAwkT_LekN=z*5cyZ$1kG zw;80>#W3(OQFwd?s(qX{LVyy9o2$w!$!clEP!)EIegr4Qxsq9>zkbwHI?-s}>t5`k zpR{5u`VkVNc~gbs`0#{th9~l$=_}n#_tsv9aXR_vaLd4m#zDX7T>^Y52_R7ptxFV$ zo0~s-fed?Z^MJIq)%{RRW+{MXbvpNhhcrx-Cm?SV^zm8xZH|p+(V#W!9FVqQNa`Og zJPA+<8=UYgrpbO=H&4Bfa^sIw`bMcMK&Pc)8 z{RRynGrJbu8HhZ-d&2)`yUz!NxlLcR1AMo{o09*APi*wLcj6z3{5J<_?Z0WNFF}cR_iMcA`roRWb@#d{94i{0sn_HdLldmzTa!HS z$rj(tYl86K^d?{Q17SP@KqS%%G^u&heD*ioA34qubFWmLTy2J>hJ81(^S`Hb#CYD# z17c5Te+o~>N4j6$VKUdyWFyM#Z|S19tFFnwH>9@Mdrq-&C$pmTO?erfb42SAW@^fI zeeciaH#Y{>_uLBIxTXH<#^OAO?ja+vzcmR&A)0npb}T<9DRM6T@oLQaXY%#9V{@bgjOWzkKR+KkJRNBZ@r%q)+*~ryTo5j_?GKk?qIESOn`4b1oXl!$U8&C zkZzUBc7_24xLmDlsW&}$E|Q>4v)eUOdpxhHsyrD`jVXsEHOEq% zNq=5;C^d?rjK+aimU5LK5sucNljTbQj4)QFtY1_Uk>D@BV8o~a(c~t!gkXq!<@ggw zp}ONzq3Ztj*hIZ06{lNv$te@thgV;zKiG5`X+h>^t$)15D4}UBs)WWC4K{)32GWN8;Q9zVS=M zEL(d;WN8&lI(hNmA1>?P%?S5Q*h z`nj0MQORB`wduGMD}vXGzTl0iD0V@%>|1e z=S9^8*6YS8Y_L;J9VYR2Vcis>QpLE(SN0$Oevd8L??$IUf3C`}`?J%k<2zSZQ`{%T zRE=nlvy?IuHv;!h=zQMn!w+j^^?#dATnP0`Ev+-Xd{?}+^L=lH)uMtX68feuF{)V_ z`KS~qMQAQ6tOoKf(Jf-wQ2O6(wHoXdl7?bX+r4CxvSkMt>$loER`3eA>U0!Q+*CH) z1b&i`VXQ0XvXK%+ueclfO=eGLKfJtP(4ajSQjIV5|L<0S@mf)3J9iq|C#W5(_Gq7KJ@5i`Y!qm3$NRLZ9zr++Iz4ZtejS?EM8Ke1u*GYf$RTCn*uGo4ThqBazQQ(!e)ky_Zxff%rsXcGYMvIobg@ z)m}~q&T9C^-$T+qaLt)&!aHq?Dhzh%<{gp`F%s2gleG43!$2DA{>GwZGk&^AY z>Ra8M&pP8^L-CJ4d?649Uw%B6;s?4!Cz)cl=qYEf6TyJLit@+_g}*x&Z=OhNJYQCy5fMT zSeLw$<$QckVk65;v+&%i&e}u+zQh8En)Uc-&cD-%{^2XjC{Sfvfa|ZgNPhonpleq) zH9uxRnlLp#M}f~cQzgIOkcHKtYjVoT$X3@G%>QFX4_%UnJt%qP%)`Gf1|*eY~CO z3?^40GgLMev~u(KOd$gGRR}DQEfXKW=B#IH#!4V`wkL8>vr3Cl7Y&k4l!#CU<62_W zA(I8epgW$*{iZSAqyun`_j(5}kf9h@i{9bCL7O2>f&sE}ZNW8;T>E`D-BC06TMl6Gi zwW|C$aW5sRLnKRr4I4&&A|pMA$zYLQxme&v^L@3bl>w?39e6fZ0M0%z6L%wMY>@S~ z%H-`|dtoNVqHG{DP;I=iH&d`?R2w!Nq2 z%;{kSg4K?!xr_-7~nQ^U(0V+rRwv4LroD)!X5qw6E(vW@x0> z^?A>qY&~?=LAmbI_tEmVeCNhG52WP6zM$DWu9X^Kg4I37J+9zZc+_?BhwTSCu`R^N zyyf-SR{Z8vaQqDlnJ@~q82+m+Rk0mlCFU{_%~%I80E_;gx}Pgwdg?Cn#``S9O9JIa ztUfG6s1v|(5E2^x%$VCA*tIENBtW=tS9M^+e!>3LvZU>}fg_%)`rK$SPnpxcrJt+A z>Y|l-*+p>*dnKtGbx6Q?4vlx@`3RBf&1>3)Z%bO?2V~X3hssb5x6vjA@E+n>6Awt< z<748U5AKpa_a%^?_hWXDY4Y=z-;ySe4sUOK+C(Ft(rkPrP#9#~5W`WhC&Rtt!$ES( z>o{|+*)n!{s7pFX6W>;hRd*7Udnqvyh5$lMB(co{9$uzR?Ylq?gz64>$CA2%-_G^W z0p`|n7Y>at5FJx4c zd=NHJX1s}JrF{>i;E)URk6JMfEX$%Wd^yT5J+hwVCV&RYt6QlaFg|}*^<&i~+Gyhu zd-`IsycaeHb896urehrt9nX*wn3`H!(%YYcUc?*S zACUV|^23NcM_N`H{KXr%#(^#ByYcNU%;mYJ!*A&p~qDljU`7ydQjJKgG@~ zM2xYL+X63oU<6jg{MizDESuf6IQhfeG~$&-@}2JnpCj;{$lCmv`n(cKY%mfy(u+!C z)P~`x{mPp-p}4j65*(R#VMP>xdPZ81V&rUg-+QSxv+DG~Fr6)^YlQebuFce2fm)Ck zjv%)$w)6GQ3^x}Mic0lm=FuJQVNnncE7ty6Oq4KEDns-3+Mb}}*&}AgS}OptqJt#L zwK$b-_E9wJt?qAbQ6OQL62|bwvD*Wm7j%ddp%h8OxpfbiBUBMFR6pCxZ#fd^#OOdtMD%!UgCRanH2qxh$<6a^D6t9c z<%h0rFU7r=g~2%>k(In9x;7Jgs5LBz9ULqcn!;nnR+rjYfE_(_=HY#I<+-*MfbhHryxZFw)v*n$d7 zlWj9^3P;j0dzV#42|crR9b{x=yI(`Gr+huz>8+P7X$w(cd`ns{(<~y-4#zb?KDVQJ zF$#8{T!vg>e4z;vEa z^9-2soER$!B=yYDsNjV3UeeTmU)mD9FL8XTz7-cE@y~COV|tt~nQ?ej4Xk?%tXHfl z%5_K>|0&g#`q8Jk7p6Qr7VA0b)IzY+1Pi$tX4Ek-ThX^TUZNTM=oRjP{XU__i%zH{ zGj!O?%gb)IP}=Tbrl8=Sh@@h>`mjWb0G~_!^_AR{gz>f5-&_Aj^83odO9HI!9nm-8 z{eH$7)jxqO`uV-dY;AS*L?63!-%=w3dJH)|=^YN+nt1`nH*YjKj9S#VZn3k~V_rf1 zceM&5bb}7li1GO*oUOlDWAWT5_rObGfdX=;2w{t~EJiuh*}BFEnH_VqV;$ zH=xCk`%iku&302YK6ExD0(N+KSil?gN5iyx^p8z$Cbu_|gi}lfrl&7sycq>s+7$8iGV%+N>%hVJw|Ax~kK9l4W8 zXamun>CYyy0epNPe39kV$nLT!~US2v2{NhJ6cShN5XZ zthN1IeRqv`|8=9Ju>i(U9)Xo!s`j?f?RdfN@6&x2utkD?ao;gOXM)RU@Zb2g7C9R; zN2DimKb!DwD%Gg~nPZ-!R!VyMaRSkSeqd3Iyy7aY#cLH8e= zRVT2o=2_#KfItXokLZY}GiB&gm{s~0FYU!KYE6?+jLwigC-5q#`N3eR<2+&7BgP^) zTSWZr*x1oz8JnOoHQH2$KpEcbJ7sT+VFC{ao!$4Z}fX~ufKkf-&(mAWQLA%GAI`7 zGd-FA`}=&;ffmBR$H%vYG;sWEvM$?@Wn?0;?KZVeI=+s(eyu@{|L4+x_J09w2a@;| zEfi(0kn$4?FM+VnVXv(ZJ6_0?Je=C{3bFbIz+__T=hf@R* zC=sq?>2(SS;cYl)O7os@pnGoBs#R+4+_|b>zkW$y0wh`pD_X^1zlr#UcpzROs(1y& zE|qR(jweERsX6<&e*L;yzI?eZNNV5`hJswT-Lw)E5F)yWw3B9;5PKDT1?RBrcg~zS znjJcHNLm4j^ZNOgoNsIh%WoBxD-M3)D}EpGzrUuYM(-53W5*72SuR`;B;ppz^?X9> zKmj2dW(?ny=3U_k98Z}tMT^Mn*|RAi>?2|NfM|dyfan0rawmv=s-qR4fDla=5_luO z752q1UI+;0N6bpUG+68-Y<)m5FwX(;aBLCcR^OM$TR7?|AVl+uNGEB2t(;Qv*nfT2 ztXXR2%$aJ!gbBPrL}+m2$Pu-B_inXo*Dh6C8`+)%D-nMrHt8X+5Cw#Ic|y`pnqLaB z{8x4C*ij2fPEL**H*Oq%@v@J=_Hq39aaCMgtcByzqet#jSdRM>u|?N7x+x&Us}qvJ z()?WbO&oXX)Jc0I)2B~Y6DLlj!D1hQ=>s0eo;`cCcszUCWFIZn$AJR})ZV>&RY^&Sdi?mYdwQ$z7h-$<$+aho=6w1_}Vz*u#*XcXB;bM%3w%@7O_;_>X+Gxw=K34cZ5pnwp;77}bU z@&n;=_Z&Qtf`S6oty?!5MfUO1`nY@duFB8P*Pe%gKM3CuTT{W&NdY0kBP4yL`G)X& z!hXYt57*0nr%jut1`i%gqsTs*t&a;AE~tI`_UUE0*j8463Gv?v|1379ildPNLWEaH zP?fP*I10zTd-v8J37DOoO{2&@8r?@}X{q)$z#BJixTjAEbHuh(bM#R_h{O?+9BEbw zbDd)~dGh24QEQ7vk$uE^&9!US)WL%XBQ$KtPT_lUT+Go%0U;7sNQOzXQaK5Uz<>b* z)TBw1bQNGM0mg2LG`#G?1t!KJ7KaZXR)-E9Qdh2AF`q^P;QL}r&T@27K#1fK5>#bG z^ zZB+fHR*O}AIeCpT&efrSwCSV_!1Yf&$tu8lm^X5(SX_QmnBrFko z@r;DcDkzvY`Le2C*)`989!rGN($X{s4jib7bDcYPPHF)`IO@`+OX~9F%et+stklF( zTMukk^(&@6;T#4*0U;7qOnOLzZ1~~AEMcZ2F8>FJ2QuU#Bq*~2A{5rQZ{LKgDvT<& zRaI3w^n_9&AVdNg-wz%<@OsN9!gInhA*vUim9c!AcNakcA(By4UYBNw!wiRSc)hl5 z+qP=-=+UZY&z^dvSC1Y&G`n~2uIE-hO{|c*b?cTodGe%s z`t+&apExQ+DfDvTc~Hjfr@U(j3J5{?iwb7iv9}4tWuVg{6gEEE`ye35QGo~J5-Y&k zw{Ner>(RDr*G|6MHCaL?TLOr)@99 z>rxE16}EPW{eM5_{7DRwe+!>GM69XKVXY$!HNr>22TqIhZY+y}+W`-vTE^NfemevO zgdhllNP2vpmw+G$f}ns91VIoK5P~2Gf&xMi1VK Date: Thu, 24 Nov 2022 15:41:43 +0100 Subject: [PATCH 2222/2550] celaction rename variant to current --- openpype/settings/defaults/system_settings/applications.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 7f375a0a20..aa1d7387d0 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1268,7 +1268,7 @@ "CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" }, "variants": { - "local": { + "current": { "enabled": true, "variant_label": "Local", "use_python_2": false, From 0f1995e2972e526bf3f3f2c8e6c53363bd040264 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:54:37 +0100 Subject: [PATCH 2223/2550] celaction current variant settings --- openpype/settings/defaults/system_settings/applications.json | 2 +- .../schemas/system_schema/host_settings/schema_celaction.json | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index aa1d7387d0..77b91037c9 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1270,7 +1270,7 @@ "variants": { "current": { "enabled": true, - "variant_label": "Local", + "variant_label": "Current", "use_python_2": false, "executables": { "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json index 82be15c3b0..b104e3bb82 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json @@ -28,8 +28,8 @@ "name": "template_host_variant", "template_data": [ { - "app_variant_label": "Local", - "app_variant": "local" + "app_variant_label": "Current", + "app_variant": "current" } ] } From 08691e257d460ce537a62680d0e3eadf212858e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 16:13:06 +0100 Subject: [PATCH 2224/2550] celaction: ext added to anatomy data --- .../hosts/celaction/plugins/publish/collect_render_path.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index 9cbb0e4880..ec89fc2e35 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -21,7 +21,8 @@ class CollectRenderPath(pyblish.api.InstancePlugin): padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ "frame": f"%0{padding}d", - "representation": "png" + "representation": "png", + "ext": "png" }) anatomy_filled = anatomy.format(anatomy_data) From 72840c2805460aeb469388ef02b223b2ca98617f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 16:13:36 +0100 Subject: [PATCH 2225/2550] do not validate existence of maketx path after calling 'get_oiio_tools_path' --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 403b4ee6bc..df07a674dc 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -90,7 +90,7 @@ def maketx(source, destination, args, logger): maketx_path = get_oiio_tools_path("maketx") - if not os.path.exists(maketx_path): + if not maketx_path: print( "OIIO tool not found in {}".format(maketx_path)) raise AssertionError("OIIO tool not found") From 0167886c1396cbdd76ddae583e68217b9f165515 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 16:21:40 +0100 Subject: [PATCH 2226/2550] celaction: removing resolution from cli --- openpype/hosts/celaction/api/cli.py | 6 ------ openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 +--- .../plugins/publish/collect_celaction_instances.py | 9 ++++----- 3 files changed, 5 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index e00a50cbec..1214898e3b 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -35,12 +35,6 @@ def cli(): parser.add_argument("--frameEnd", help=("End of frame range")) - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index b14fb12797..e4a3bee5ee 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -50,9 +50,7 @@ class CelactionPrelaunchHook(PreLaunchHook): "--currentFile *SCENE*", "--chunk *CHUNK*", "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*", + "--frameEnd *END*" ] winreg.SetValueEx( diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index 1d2d9da1af..b5f99a1416 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -52,8 +52,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "subset": subset, "label": scene_file, "family": family, - "families": [family, "ftrack"], - "representations": list() + "families": [], + "representations": [] }) # adding basic script data @@ -72,7 +72,6 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): self.log.info('Publishing Celaction workfile') # render instance - family = "render.farm" subset = f"render{task}Main" instance = context.create_instance(name=subset) # getting instance state @@ -81,8 +80,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): # add assetEntity data into instance instance.data.update({ "label": "{} - farm".format(subset), - "family": family, - "families": [family], + "family": "render.farm", + "families": [], "subset": subset }) From 31babaac5fa7c33126dad277d4e28b4ff5aef184 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:07:39 +0100 Subject: [PATCH 2227/2550] change how extensions are checked when finding executable --- openpype/lib/vendor_bin_utils.py | 58 +++++++++++++++++++------------- 1 file changed, 34 insertions(+), 24 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 099f9a34ba..91ba94c60e 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -70,24 +70,21 @@ def find_executable(executable): low_platform = platform.system().lower() _, ext = os.path.splitext(executable) - # Prepare variants for which it will be looked - variants = [executable] - # Add other extension variants only if passed executable does not have one - if not ext: - if low_platform == "windows": - exts = [".exe", ".ps1", ".bat"] - for ext in os.getenv("PATHEXT", "").split(os.pathsep): - ext = ext.lower() - if ext and ext not in exts: - exts.append(ext) - else: - exts = [".sh"] + # Prepare extensions to check + exts = set() + if ext: + exts.add(ext.lower()) - for ext in exts: - variant = executable + ext - if is_file_executable(variant): - return variant - variants.append(variant) + else: + # Add other possible extension variants only if passed executable + # does not have any + if low_platform == "windows": + exts |= {".exe", ".ps1", ".bat"} + for ext in os.getenv("PATHEXT", "").split(os.pathsep): + exts.add(ext.lower()) + + else: + exts |= {".sh"} # Get paths where to look for executable path_str = os.environ.get("PATH", None) @@ -97,13 +94,26 @@ def find_executable(executable): elif hasattr(os, "defpath"): path_str = os.defpath - if path_str: - paths = path_str.split(os.pathsep) - for path in paths: - for variant in variants: - filepath = os.path.abspath(os.path.join(path, variant)) - if is_file_executable(filepath): - return filepath + if not path_str: + return None + + paths = path_str.split(os.pathsep) + for path in paths: + if not os.path.isdir(path): + continue + for filename in os.listdir(path): + filepath = os.path.abspath(os.path.join(path, filename)) + # Filename matches executable exactly + if filename == executable and is_file_executable(filepath): + return filepath + + basename, ext = os.path.splitext(filename) + if ( + basename == executable + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath return None From 3ca4c04a158b99e77d6f18b171ababd91d02eae0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:08:45 +0100 Subject: [PATCH 2228/2550] added ability to fill only extension when is missing --- openpype/lib/vendor_bin_utils.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 91ba94c60e..16e2c197f9 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -60,9 +60,10 @@ def find_executable(executable): path to file. Returns: - str: Full path to executable with extension (is file). - None: When the executable was not found. + Union[str, None]: Full path to executable with extension which was + found otherwise None. """ + # Skip if passed path is file if is_file_executable(executable): return executable @@ -86,6 +87,21 @@ def find_executable(executable): else: exts |= {".sh"} + # Executable is a path but there may be missing extension + # - this can happen primarily on windows where + # e.g. "ffmpeg" should be "ffmpeg.exe" + exe_dir, exe_filename = os.path.split(executable) + if exe_dir and os.path.isdir(exe_dir): + for filename in os.listdir(exe_dir): + filepath = os.path.join(exe_dir, filename) + basename, ext = os.path.splitext(filename) + if ( + basename == exe_filename + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath + # Get paths where to look for executable path_str = os.environ.get("PATH", None) if path_str is None: @@ -114,6 +130,7 @@ def find_executable(executable): and is_file_executable(filepath) ): return filepath + return None From 453cada172b5962921af9d3dc61c64b0b379d277 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:09:16 +0100 Subject: [PATCH 2229/2550] change how oiio tools executables are found --- openpype/lib/vendor_bin_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 16e2c197f9..b6797dbba0 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -299,8 +299,8 @@ def get_oiio_tools_path(tool="oiiotool"): oiio_dir = get_vendor_bin_path("oiio") if platform.system().lower() == "linux": oiio_dir = os.path.join(oiio_dir, "bin") - default_path = os.path.join(oiio_dir, tool) - if _oiio_executable_validation(default_path): + default_path = find_executable(os.path.join(oiio_dir, tool)) + if default_path and _oiio_executable_validation(default_path): tool_executable_path = default_path # Look to PATH for the tool From 6925a96ee64a57ee928b39d951a85a44296469ba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:09:48 +0100 Subject: [PATCH 2230/2550] celaction: return back resolution override --- openpype/hosts/celaction/api/cli.py | 6 ++++++ openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 1214898e3b..e00a50cbec 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -35,6 +35,12 @@ def cli(): parser.add_argument("--frameEnd", help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index e4a3bee5ee..81f77c1654 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -50,7 +50,9 @@ class CelactionPrelaunchHook(PreLaunchHook): "--currentFile *SCENE*", "--chunk *CHUNK*", "--frameStart *START*", - "--frameEnd *END*" + "--frameEnd *END*", + "--resolutionWidth *X*", + "--resolutionHeight *Y*" ] winreg.SetValueEx( From a17f516597a39402e8032bc66b5027f47b199086 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:10:07 +0100 Subject: [PATCH 2231/2550] celaction: deadline submitter to modules --- .../plugins/publish/submit_celaction_deadline.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) rename openpype/{hosts/celaction => modules/deadline}/plugins/publish/submit_celaction_deadline.py (95%) diff --git a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py similarity index 95% rename from openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py rename to openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea109e9445..8a3160e83d 100644 --- a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -37,13 +37,12 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): instance.data["toBeRenderedOn"] = "deadline" context = instance.context - deadline_url = ( - context.data["system_settings"] - ["modules"] - ["deadline"] - ["DEADLINE_REST_URL"] - ) - assert deadline_url, "Requires DEADLINE_REST_URL" + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + # if custom one is set in instance, use that + if instance.data.get("deadlineUrl"): + deadline_url = instance.data.get("deadlineUrl") + assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) self._comment = context.data.get("comment", "") From 66bbaf6fccce75e879f29729443531786693efab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:13:21 +0100 Subject: [PATCH 2232/2550] celaction: project width and height to hook --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 81f77c1654..cde3a0c723 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -19,6 +19,10 @@ class CelactionPrelaunchHook(PreLaunchHook): platforms = ["windows"] def execute(self): + project_doc = self.data["project_doc"] + width = project_doc["data"]["resolutionWidth"] + height = project_doc["data"]["resolutionHeight"] + # Add workfile path to launch arguments workfile_path = self.workfile_path() if workfile_path: @@ -70,8 +74,8 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) - winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) - winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) + winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width) + winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height) # making sure message dialogs don't appear when overwriting path_overwrite_scene = "\\".join([ From d7e6b030fe0ac0323b332b14b2c9bb39e839312b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 25 Nov 2022 11:55:21 +0100 Subject: [PATCH 2233/2550] refactore extract hierarchy plugin --- .../publish/extract_hierarchy_avalon.py | 369 +++++++++++------- 1 file changed, 223 insertions(+), 146 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 6b4e5f48c5..a9c0593f9f 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,9 +1,8 @@ +import collections from copy import deepcopy import pyblish.api from openpype.client import ( - get_project, - get_asset_by_id, - get_asset_by_name, + get_assets, get_archived_assets ) from openpype.pipeline import legacy_io @@ -17,7 +16,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): families = ["clip", "shot"] def process(self, context): - # processing starts here if "hierarchyContext" not in context.data: self.log.info("skipping IntegrateHierarchyToAvalon") return @@ -25,161 +23,240 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if not legacy_io.Session: legacy_io.install() - project_name = legacy_io.active_project() hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - self.project = None - self.import_to_avalon(context, project_name, hierarchy_context) + project_name = context.data["projectName"] + asset_names = self.extract_asset_names(hierarchy_context) - def import_to_avalon( + asset_docs_by_name = {} + for asset_doc in get_assets(project_name, asset_names=asset_names): + name = asset_doc["name"] + asset_docs_by_name[name] = asset_doc + + archived_asset_docs_by_name = collections.defaultdict(list) + for asset_doc in get_archived_assets( + project_name, asset_names=asset_names + ): + name = asset_doc["name"] + archived_asset_docs_by_name[name].append(asset_doc) + + project_doc = None + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data, None)) + + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, entity_data, parent = item + + entity_type = entity_data["entity_type"] + if entity_type.lower() == "project": + new_parent = project_doc = self.sync_project( + context, + entity_data + ) + + else: + new_parent = self.sync_asset( + context, + name, + entity_data, + parent, + project_doc, + asset_docs_by_name, + archived_asset_docs_by_name + ) + + children = entity_data.get("childs") + if not children: + continue + + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data, new_parent)) + + def extract_asset_names(self, hierarchy_context): + """Extract all possible asset names from hierarchy context. + + Args: + hierarchy_context (Dict[str, Any]): Nested hierarchy structure. + + Returns: + Set[str]: All asset names from the hierarchy structure. + """ + + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data)) + + asset_names = set() + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, data = item + if data["entity_type"].lower() != "project": + asset_names.add(name) + + children = data.get("childs") + if children: + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data)) + return asset_names + + def sync_project(self, context, entity_data): + project_doc = context.data["projectEntity"] + + if "data" not in project_doc: + project_doc["data"] = {} + current_data = project_doc["data"] + + changes = {} + entity_type = entity_data["entity_type"] + if current_data.get("entityType") != entity_type: + changes["entityType"] = entity_type + + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + if key not in current_data or current_data[key] != value: + update_key = "data.{}".format(key) + changes[update_key] = value + current_data[key] = value + + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": project_doc["_id"]}, + {"$set": changes} + ) + return project_doc + + def sync_asset( self, context, - project_name, - input_data, - parent=None, + asset_name, + entity_data, + parent, + project, + asset_docs_by_name, + archived_asset_docs_by_name ): - for name in input_data: - self.log.info("input_data[name]: {}".format(input_data[name])) - entity_data = input_data[name] - entity_type = entity_data["entity_type"] + project_name = project["name"] + # Prepare data for new asset or for update comparison + data = { + "entityType": entity_data["entity_type"] + } - data = {} - data["entityType"] = entity_type + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + data[key] = value - # Custom attributes. - for k, val in entity_data.get("custom_attributes", {}).items(): - data[k] = val + data["inputs"] = entity_data.get("inputs") or [] - if entity_type.lower() != "project": - data["inputs"] = entity_data.get("inputs", []) + # Parents and visual parent are empty if parent is project + parents = [] + parent_id = None + if project["_id"] != parent["_id"]: + parent_id = parent["_id"] + # Use parent's parents as source value + parents.extend(parent["data"]["parents"]) + # Add parent's name to parents + parents.append(parent["name"]) - # Tasks. - tasks = entity_data.get("tasks", {}) - if tasks is not None or len(tasks) > 0: - data["tasks"] = tasks - parents = [] - visualParent = None - # do not store project"s id as visualParent - if self.project is not None: - if self.project["_id"] != parent["_id"]: - visualParent = parent["_id"] - parents.extend( - parent.get("data", {}).get("parents", []) - ) - parents.append(parent["name"]) - data["visualParent"] = visualParent - data["parents"] = parents + data["visualParent"] = parent_id + data["parents"] = parents - update_data = True - # Process project - if entity_type.lower() == "project": - entity = get_project(project_name) - # TODO: should be in validator? - assert (entity is not None), "Did not find project in DB" - - # get data from already existing project - cur_entity_data = entity.get("data") or {} - cur_entity_data.update(data) - data = cur_entity_data - - self.project = entity - # Raise error if project or parent are not set - elif self.project is None or parent is None: - raise AssertionError( - "Collected items are not in right order!" + asset_doc = asset_docs_by_name.get(asset_name) + # --- Create/Unarchive asset and end --- + if not asset_doc: + # Just use tasks from entity data as they are + # - this is different from the case when tasks are updated + data["tasks"] = entity_data.get("tasks") or {} + archived_asset_doc = None + for archived_entity in archived_asset_docs_by_name[asset_name]: + archived_parents = ( + archived_entity + .get("data", {}) + .get("parents") ) - # Else process assset - else: - entity = get_asset_by_name(project_name, name) - if entity: - # Do not override data, only update - cur_entity_data = entity.get("data") or {} - entity_tasks = cur_entity_data["tasks"] or {} + if data["parents"] == archived_parents: + archived_asset_doc = archived_entity + break - # create tasks as dict by default - if not entity_tasks: - cur_entity_data["tasks"] = entity_tasks - - new_tasks = data.pop("tasks", {}) - if "tasks" not in cur_entity_data and not new_tasks: - continue - for task_name in new_tasks: - if task_name in entity_tasks.keys(): - continue - cur_entity_data["tasks"][task_name] = new_tasks[ - task_name] - cur_entity_data.update(data) - data = cur_entity_data - else: - # Skip updating data - update_data = False - - archived_entities = get_archived_assets( - project_name, - asset_names=[name] - ) - unarchive_entity = None - for archived_entity in archived_entities: - archived_parents = ( - archived_entity - .get("data", {}) - .get("parents") - ) - if data["parents"] == archived_parents: - unarchive_entity = archived_entity - break - - if unarchive_entity is None: - # Create entity if doesn"t exist - entity = self.create_avalon_asset( - name, data - ) - else: - # Unarchive if entity was archived - entity = self.unarchive_entity(unarchive_entity, data) - - # make sure all relative instances have correct avalon data - self._set_avalon_data_to_relative_instances( - context, - project_name, - entity + # Create entity if doesn't exist + if archived_asset_doc is None: + return self.create_avalon_asset( + asset_name, data, project ) - if update_data: - # Update entity data with input data - legacy_io.update_many( - {"_id": entity["_id"]}, - {"$set": {"data": data}} - ) + return self.unarchive_entity( + archived_asset_doc, data, project + ) - if "childs" in entity_data: - self.import_to_avalon( - context, project_name, entity_data["childs"], entity - ) + # --- Update existing asset --- + # Make sure current entity has "data" key + if "data" not in asset_doc: + asset_doc["data"] = {} + cur_entity_data = asset_doc["data"] + cur_entity_tasks = cur_entity_data.get("tasks") or {} - def unarchive_entity(self, entity, data): + # Tasks + data["tasks"] = {} + new_tasks = entity_data.get("tasks") or {} + for task_name, task_info in new_tasks.items(): + task_info = deepcopy(task_info) + if task_name in cur_entity_tasks: + src_task_info = deepcopy(cur_entity_tasks[task_name]) + src_task_info.update(task_info) + task_info = src_task_info + + data["tasks"][task_name] = task_info + + changes = {} + for key, value in data.items(): + if key not in cur_entity_data or value != cur_entity_data[key]: + update_key = "data.{}".format(key) + changes[update_key] = value + cur_entity_data[key] = value + + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances( + context, + project_name, + asset_doc + ) + + # Update asset in database if necessary + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": asset_doc["_id"]}, + {"$set": changes} + ) + return asset_doc + + def unarchive_entity(self, archived_doc, data, project): # Unarchived asset should not use same data - new_entity = { - "_id": entity["_id"], + asset_doc = { + "_id": archived_doc["_id"], "schema": "openpype:asset-3.0", - "name": entity["name"], - "parent": self.project["_id"], + "name": archived_doc["name"], + "parent": project["_id"], "type": "asset", "data": data } legacy_io.replace_one( - {"_id": entity["_id"]}, - new_entity + {"_id": archived_doc["_id"]}, + asset_doc ) - return new_entity + return asset_doc - def create_avalon_asset(self, name, data): + def create_avalon_asset(self, name, data, project): asset_doc = { "schema": "openpype:asset-3.0", "name": name, - "parent": self.project["_id"], + "parent": project["_id"], "type": "asset", "data": data } @@ -194,27 +271,27 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): project_name, asset_doc ): + asset_name = asset_doc["name"] + new_parents = asset_doc["data"]["parents"] + hierarchy = "/".join(new_parents) + parent_name = project_name + if new_parents: + parent_name = new_parents[-1] + for instance in context: - # Skip instance if has filled asset entity - if instance.data.get("assetEntity"): + # Skip if instance asset does not match + instance_asset_name = instance.data.get("asset") + if asset_name != instance_asset_name: continue - asset_name = asset_doc["name"] - inst_asset_name = instance.data["asset"] - if asset_name == inst_asset_name: - instance.data["assetEntity"] = asset_doc + instance_asset_doc = instance.data.get("assetEntity") + # Update asset entity with new possible changes of asset document + instance.data["assetEntity"] = asset_doc - # get parenting data - parents = asset_doc["data"].get("parents") or list() - - # equire only relative parent - parent_name = project_name - if parents: - parent_name = parents[-1] - - # update avalon data on instance + # Update anatomy data if asset was not set on instance + if not instance_asset_doc: instance.data["anatomyData"].update({ - "hierarchy": "/".join(parents), + "hierarchy": hierarchy, "task": {}, "parent": parent_name }) @@ -241,7 +318,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): hierarchy_context = context.data["hierarchyContext"] active_assets = [] - # filter only the active publishing insatnces + # filter only the active publishing instances for instance in context: if instance.data.get("publish") is False: continue From d92448f923a8da0e094c5b78e645c40b45d6f363 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:02:08 +0100 Subject: [PATCH 2234/2550] celaction: adding deadline custom plugin --- .../CelAction/CelAction.ico | Bin 0 -> 103192 bytes .../CelAction/CelAction.param | 38 ++++++ .../CelAction/CelAction.py | 121 ++++++++++++++++++ 3 files changed, 159 insertions(+) create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico new file mode 100644 index 0000000000000000000000000000000000000000..39d61592fe1addb07ed3ef93de362370485a23b9 GIT binary patch literal 103192 zcmeHQ2V4_L7oQ+17Eb(ZC>9jYh9aJ*CI!Yvtch-C}%GyqF4^>y<&9SCuIvvd)< zyn}(Z{5E{Be(h$pp=tdg_5&PnE+#$Pwd3M5(5L>QAp?FM?UI3!p_ z9*8`qYq`0$MK^tmffk0xHhFINd+5)x4>?bta1u{GnrxB#e4a(@6_1Wb=1x7gZK{30 z4oB?j%rLKBZ~mJ$-ri`WK@-vR>td5!SF1CNKeFsoyjQHrad{on_@E2l@k07({*I8r zZGeF?9Co)mCRH~+_Q~SvPrD~HFB?@a#(9pO(Ph+?NzK;wM@JHp2Mv!q>)T;Z z#><(HHePt##ChLlCj;b?y!?EJ9ZTk>@(n{fI#2RBzwOtg57Vr5QCRv(;fPt++K;%g zvY&B^9%{~x7q!>>>xBt2pTItBtUGNxvcIv&m(p!*FmXq=j?@FEe@vh{l4@JZ@*+$18?8#QTLrLpi{|1*jAsSr?~a%;lS=?HIvnAP4M_@ zu9=2A8Zj-5;zWBs1UOD?mc7F6gHf{k@oq+eBU+@e%$Na*bEpZ;g`<6Zmn4Z&jE1K8 zHg!%ozio%;Q`DvWK{gE=n(8tw`nc_-a*jQn*r2ajbStY@)%N#XVbuHYAUhz8}v zi9X+{?c`gx+57tKHm;9o%rNgV%gxgR?XO{BJ1C;BM{v@d_p{wRx1!D0vd4~e$cTC6 z>)!K``|ro8VeAn#ZS)$Bt5##>`!pZXn?F1D{>g1cr`*Gr>@%j06LSM%wrv-WPR!#6 zj%anu7`>fk&rUEqxY%{C`~IxqsQ2HsXJu7!ePb zpKmyboztS(L)YQ#_VrCYf(==nso?0t;Vaty8q{&hy?}`Q|BJQ_)$_2_Te$Q7`Heko zkB6RW$uL-EVcTy28qhS_Yv<>lP7RaicYY8us{YAK!OksD3w!+#;L~#XEVs>i41>O{ zALkfZ#!vC%x+`+We>=F3*+M*A;_75%#j?O)Fr zwZ?5#yNL7V$8GmKdvqf^e(=)IW_B+&da@EjT!x0Zoci)<&zpmKy1f{8B1AR=n(sqX zU(UX`sYiI%{GcWlwmP?J8`s79Xwv-b?XkLbijh@GmZg*>T>xXo= zqz!(wu%2;;);*~CyVC}{xEL*TywA=*ckJmOLD5IIvyKVtqp|3nOYiWQMb6^5q_eCv ze(1kxU>4g}qvO-I);on5=CyrWjavH)3hy^h>^Eh_Y`0+zO_A-Vo{M)49dmbNdU8*% zcu_*Dr6(3MMK;44Gg6;@W<9pu;?Q>mH!f3fHte^JE1F$vojo5|G1Nm{FX-pY*je-T z-M$`EFZ_^mLRjByELvwCUT@TnMs~JI7u!U8%^r+`dpCXGEh8-*?=* z+n`g8l}w$n$p32ZklksUs$1*3OtWME`RAUG?*|#B2(#;WzjnLD8a|)_V`0Cui^G4- zYT}Ul_`(CLYvRVkR&N?~DJ6&9Ib1KwZTw)gdGov;x$EW&8a)<Q+u<%9EzvHw^!@3A-{!ht1JCb$ed+iFS_b>s(HV4?Bdz1qwA~_lRHOyb$Gkm zV}1=!!DRbZ;ifzL9zXH2{#m!ve^9@N+s5BDt~bDl5gX%Sz*?U+xbDJQu`!uzMtX*I zi<>*!Ekf7SW2(re`2e=bAV4u>BZ|p2nW=~TH*P@DUWRX5dW@U<_tsfU#(2)=&g-7m z4j6BPMC1MZhu-w-`-6^a|9~*F7^@~TmW4GGTnTKpa&G!R$(<%x1xB-n8=El(EweTc zXglrdtsKYD_CrH|Ocags>AnBsmQZV+$;g?nBd=^1j*SrAVMlq6cDb<1LYLWL-gf=| z?t^k7=d@dJ(=+(Z|Cs((+dD_M`Z@BS&Q=$fCH)bh&qz%ec5cQ^UC!~Lfu@^{beO$% zUz^0p+s`rNcVyaMMjx6uIIPO|dEdlg*N}SQp>ZD#AHH7QY4xU*G=0a(_CvDc{**&O3LfEm-DxIww5PXK&?8ZfZp?t?%lpF z#o$cy<`19c$5{8eb@4;|U&+ardfW_a#_DC;yOAI6)u*LRy%Wz*m>?a8HLPLtKZd&q z`%md@xhtz)2(>e1f=!pZCrADD)WP+YcfBUdu8^qcF?4n*ruyzI7^on->2 zK+5qy2X%UFfTp%a{YI}69567vlX7(L?JuXn){{;SyBLtIb81q?nD9p|p2=TYm0`>B(nMtog^zK%MAQ#|K!NOuEbMeX;^{$K2-Cw_sy;FHS# z`mlkXFE3B3;l|v*Hzf8)$(d)q^_lRPp=*E7*)Pqix0%Md6!LG{`+eK;1Ap0KH2)W` zx#zuuCiOekV(26o*PQp^*(iND8MV@_?cih6kOP;JaTo8{{I=w@_nv?m&WHD&p75rr zS2eG!;U=ct&n_F8>6`5^IWBK{{j=PQV@?{-um7LpIN#oe=!bAQGg0>zT{c=<_BRnbm zDC}}G*=+Oi`AIf+53!xIbWGPiF+j%?XOC;cok&HnA8$CcX;_nNvBdhat?)}UuQJrusf ze_c$H*_~&*=VxE1!jc|rpLBkAC!SlkW23yApPq4ZeQ;|ea@Z9#X|1VqORi(J(+6+A z$nS9}ePWR7jY~dD?k=yjs6P_STpVb+@Z_Vgs3uMBrT8b+zHJ}R%V=Y$%Un9~@+5R@ zJ7WF%%z0dDWbl{36%!i?lKrFfZ3kW+R!4`~b$VvtFAG1M*^@G1ot|lskz1#^`Ad_3 z`J;326kU%gBbHjcuF;^k+orc;=Ca2;NqaIr|LKia>>drC7#$sSPHH7E)9yAid3n=I zJ`EkiCNx_KLpRTQbilI*gL9pKbLP32F>Q668qWQDaa+oAvqQrB;U@X3F0!9_&v};S z9kA?Zde)9>^YblM8^-TCK7Z=RFKrDN=I5RXr?u_xbgv`-EcaBpYjgKNn*{bhYcCG6 z*%iZBdSj)y!-H8ahO->+AHF`tYxMM4OP+DPQ>I>@!?yQ1`(Vz@&_9#1@2x%X>ob#U zBHOdqFGgAGTJ+H|?y#cS@4fd#ZrRGLxxv%ok1nHlj_18yGcs?i2^gP~@B5DRAjzgj zw>Y0mm-H_*qOG>Hq0kqbKv z1wfm@@OwSHbNm9s+=Uh^FudoiD|`!WeH+Qk$TwS-32O_9c}qMSd1;lN{e!09`X}c<2DKCi=s{!nXR@f`OIp&_*UREZ5_!TjGEmjT;r2>F_t|DPfEfEd-X9iR(y(8W&B*^zRR z@TY6M`-=0#P}KbAwav?Ny% zf#TNx1&Q-0(Wekfl*9byDDTZ?igT;3(y>QTJ`z`rfHM6HUe2Pv^&{`YexiTXs#Pmj znf~+Bk9_Our^NhJqJOnCthx}Z)^sgG9 zs-8>qU)43NT0WwG)%aBPT%!M~u3^>k5&f&ir>f@?{a1AjtCo-GUo}2eJ(uXes%u!a zd_@1M@u}*$ME_M?!>Z*Y`d5ulRnH~*uj(3BEuXgP{|oOG@&8b*TD5YO>7O6>+c)q3 zBYf>a^sknNRTomGf9yxgkF}?;j~(^}`__p6+)=6SiT|%^T`S8iPXF5K6Ru1~l2$wd zME}KO^F8Dc{eKTyE?!2W|KhRv9&%*$&%I|%3E|xu?xUVX=2x6swIz8a%>{*Twm@A* zk@~!ternAld9j0v)Rpx8%B4vCymvb(UX+Dg`R_qznv{{&KYpJ|EKHZ&_u-HmX)cE= zo)2;#bb$9RUYB@pn4fW)Vu$M%sV_)cQes_7#HM)BRz>P^A`Gc7+=Qa_#rYKXVdui~ zn#AUNXp7cAFUC$D+tTVmzBZMgd0x2aNepsAfF#Us!*@={U{!%M65eQErZdy|cq_IG*uE^S{`xxD1O4?_vE<2~3w;kR3=vh3fx zY%oq5gWlzIPul^E2^Yvd%2n4w;2iG&x=yF{&AHp0;=;W@8}9$;G-3l)Q~!#(=Or$r zgs{#cgnM;?;2p(*_1!`nfH`5#Kd5fiMB@9C3Xkca+rsa0aigszddIpiq`WMO1L-n0 z5J7C9YS;?QoAEWjP`5l-S1HuHtPQ|=8nJ<@p>stkVEixq9tQ`W?+JzfQUZ9MSA;n6 z6z5T|LR$g5Nr3x>c8cwhxOO2>wKbtpRy$X*lF_??93 zkW~|z??);;rvp7ksG)CVV?sKy0qwUHsSP~&&juJ2=Wa8K1J6>G(tQ(ITS$jBg3j8H z!uKtK#0EmpW!eVt{l5@=zm(n=z&!%KXQ0bMh`-}3`HtK7jiPo-kJkn8dLOOVl5Btv zK9_V^VguT(dzyQR4TPdIv|jO=umF55iyH?NZ2;z!ztL@xPmAbdnG5F(+Cu?f!s&Mj{}((6m4KU-3|%07Xh#V7y4Sj20uFiE24NC_!vOp zGIV*UyHI;2C!Es=U_H`0#rowXEuy#yPD+WR@V*e#-AuPbLhVHW^f(V}AivNKWS$?D zltyeod-X2MIiA;*!v=^ynJk!E`dexPqLR*eW%)1;_z-_GEmgb>+h7CZK)-BS$6R=y zm!E#P7&!v2l{P^9$+T35uZ}* zXUa>W@O^`V4G_Q5id9jCw~lQ9t_P0_^%|}ttK*V>XX0x~g)>Pys89rmPho|^s}+rx z*9Kr5vCs-Z36sSlkQ0RTU_6)cD}{Y!RQZ+4?=wey3X4Tndoq;y6yoDnVH;j}meq&|fQ=sBS(e=;8U70~5y2-lH@#kEV)wF?2b7O3_q%+H~)ZP5CM z^1<3AA>prTAL3x@Si`K5xzMI3IlP<6inSJSMlF|eLf&f8)AV3fx z2oMAa0t5kq06~BtKoB4Z5CjMU1Ob8oL4Y7Y5FiK;1PB5I0fGQQfFM8+AP5iy2m%BF zf&f9_>mvZKx{#x^2q0t#n?$(N)x-!H=pkfF$3+O4>&nNCOBt7|uh>3(OdR)5cUSHZ z+Q)}1|0GWED6>5X+;gbc@JAo4!j=kBQtQz<1tGJheB7g&d>k_R3y&;3&}GeK=KTT6c~W>MXo(ckpZRx1E~rOuoM^&C@>&WV4z5cGJ}tV zldmpu=^2n$fg%DT1%$*3aY_ME%EMh=1x)!U^-dv9qRJ7#8(i1{c>pnxG5m+a4xt8h z3P=(s{|EvE0fGQQfFM8+AP5iy%8dZLU)U=*3^}Q!P}CN_kF5)3e*qc+@1^O%UPMMJ z>)tJR)By0#RD=J=tPoTO%6*n8n+NY` z;=MfsWzv-EKMW;S5c}nB7WCQ%`1c2jQAKdRcY(5fp$x_=0)CHxa{|jBrK%p}1p!W` zX5I^1lzI#`# za6HrqvlY+?$ex4c=fR%4nm+i?Cu@!TeAstd(}y5wA(3BtZdHKkYNScp{)zsH{gd-w zvj~v!kBtBH+)5*Zbp=AYt0%zw8norPHV3Jk~!cbOA22HH9@?`wuQ3ewktq& z8flW&KjyI_Nh8cwKqC_QnR*mAvTLFI-1}{d*Z}u_o5Fma5w$-S@SJYd$K@-xrNib#p`vMAo8UQ!I2 zQC~Q5a``yUxznfs11KlXXHog-$G&y>-?GDXDPD9RiU)g)OU5nwz8$Aa(r|hKaie~c z$b&KB+-XplUYIX)o^bCOQ*1c5pe#=##;V~)U|q60tj%o1>oc&Hy-)d(X^9BsViROti}lXWyjbwP0dXG1eKrwezyfS~m8)?rs0=b%fvj9^tUbkpH7ZQp zL}!Y9`v;5x7xJt8o|_<}4c0dhmRXT_kTzTkR9P+-0R#N}GK&p!H{P2q6z`pvgt#we zVLXJ8KdxLIXhCHd^SXen8QdqoP(qlq(cd58z1fU05CH~GS|CQbSOgi{f~@cGclU&% z+(Hca@Am=*3^C--DOU$tP#I)w2^b`CVBX6A7y#cN@Rtzx=@@!!aMJ=Y%EcmJU<4S% z0nsumh8xS(ffiJTXZnpme#y6~IB=hfzeh?F8}19iU&i}e_UF+A#PI#@dVs+_4y*&< z?{U&&qTo{~#lBSsW8eqh->IgFzP{Bq;9Mx2BMZ0*|6u_4`E0n)hy0UZD>c4V{x!iD z$X65W;0nH{$KPQQ!gqKu@pHlf1Ic>N6)00%6McQFZD1XPu1~{tP;o4z<6+oVzE%D; z!52Q)3owA|0;~(MZyWqax9VaX?%**)6McQFZQwB<#=YJ68i3~q^q9z(uI~W`?%yi! zn&b=Q+>C92|L8>TI(uyo;MY;j8G)_tL zDX)SKH1Z$s6JZYI1XQ?BL|d{?1i(QKf&f8)AW*3Yz`BSD&}E?eO6VM{nY4oR^SBJG z9km5NsY6Q2;kwtM>{nk~f__f`(d%1^;SR{dM5iBv->x!oI-L%$eQjj`FKblr18ezc z7LZt42AAC<{Y~>M8}9hp{DW^=HQ;9`*oC7K8VB!U<7W~BAa4lkhQx=@K}k7BSg#%q z&o8QfZT_{rKfr!IP?Cn|tNRTA-`9XBLE?N$^!a|7NSNYEp9A4vTc0%lqE9!=JYW98 zi6{J*QE>!+3Vhp%{glys@bfXr_@nLTZ+_P0GLJtZ*4;`UfABjwyl5*R>rxZvPQyy) zpB=7Kz(Y9r9N_mTXrg^Fm^J@P!@nhSQt;D|FIOqoue`TjRNjYul06{s`%+@TdrB-~ z1LabJq(u^5@%w>d@TZZa=YOK`{u5XReC@?)(sySng@61`5auoKy$|rDBYE#fEO=YM zFYqczoF|oGEW8)=u^=7Zk>bS;hIEERhP)4d<1&@OKi<>iDNwwW@b8y0aXS4uq$Max z$7SjIu4Nqu%Tf>e#Xi~TKu5u+rqI6(_S!14hZxN55m1qIWo664OKIx>&vxJ&$L6OW zrMLy}zFq?UYL&K~$|?{1`(U3>0p}?tV8>G2nEu#)gW+CFD}0-PPrOIlKR}XmQzpEn zEizzxf%{f}&V3W`F**hB=$wc5QOjGnwF-m$|?(dRtFh;xv@hi0qY*cXWgN=&nIEKdI5Q@D~qqv^b5A61N_Ra zRng}~Dy08dnzkydEcjkxzr#)JJB`HH*N_kQ#QYEY@f;@&&Ihg7SMGB#$N)ZboXD{|UU}063;I%+FUd!&aM%hs!6Ej>f>40HtsoR( z?<)w!*o_N93j2ZK5Pzu%hfLdo5I2D10%$-ILIXGyOG8SUP8}{tr{KW(>V?=(%NWQG zsL)T#fcR+v9ONhi0x(x;2lpE0GSnK<8Ui&2Y9*6y4)G4~&YH#!nB$EHlD%iZzn22x z{gd&&@C$(y^QUWy0cg87&}$&PHcHbn_vwhUM+m>q3QmY;^&6KS1X_o9ZHzI8lDdxrUWnkfOWJQlo0ms!EwCrJm=OA65k(? zUn9@1z#k2-qk6#@yB5e2$VwXG-^Ir@^xOpc-o=M!soa;d^I+{~tfs~kb=!gdC&Jog zKHeJ!_VoV(agANq)aD@+jp0Q1$rFHI0WW2Z7;tb_x2~qjf!ss5&nNQ)+!qx0#h=)| zS81wkb=v{Ce};ZJ92f^UFb=T8t8dWcJVzZqfCo!>?>z_a7sdbg1n^LE=ht3C0u0PK zk8Lt|$u1Np$^z@+5X||$>YgmnpDEz~p7&}QJfoBNG<@~%Jb~P8I8i?(bK^(iv0;=t z`qOk7z@aJd(L>YilJ>q21d7b5n6Pf)Atjti|5g;GAm=gnjq-0sSE*{2Qm0_$&y@`~b8V2;avJ zlM?=o(=a3|4=7{7e>$F>mv&f=_|F{KkY}l~GGsy->%8~7<&^=kyfi;Np4+Ka|J|;l z^*{KkU7!1~6U-S$O8f))Psd8`oACbKYWDv#|LK@ZaIbej@N%vn?4f*spD^7I$DzJS zN&ItT2HfC8cEi3{CF)99TQJ@?;=Y{wnD_dxL};U>vN9#5K^q2~du{IVUVFTO>v)-b z0-*1@oM`JXZrpHu&t0bdP@tY3=TYx1Sf-MAuB`lmwNHdt;0;wAeTMZ>7LdRlA(1UY JVj#NS{{bY%R3rcZ literal 0 HcmV?d00001 diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param new file mode 100644 index 0000000000..24c59d2005 --- /dev/null +++ b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param @@ -0,0 +1,38 @@ +[About] +Type=label +Label=About +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=Celaction Plugin for Deadline +Description=Not configurable + +[ConcurrentTasks] +Type=label +Label=ConcurrentTasks +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=True +Description=Not configurable + +[Executable] +Type=filename +Label=Executable +Category=Config +CategoryOrder=0 +CategoryIndex=0 +Description=The command executable to run +Required=false +DisableIfBlank=true + +[RenderNameSeparator] +Type=string +Label=RenderNameSeparator +Category=Config +CategoryOrder=0 +CategoryIndex=1 +Description=The separator to use for naming +Required=false +DisableIfBlank=true +Default=. diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py new file mode 100644 index 0000000000..d19adc4fb9 --- /dev/null +++ b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py @@ -0,0 +1,121 @@ +from System.Text.RegularExpressions import * + +from Deadline.Plugins import * +from Deadline.Scripting import * + +import _winreg + +###################################################################### +# This is the function that Deadline calls to get an instance of the +# main DeadlinePlugin class. +###################################################################### + + +def GetDeadlinePlugin(): + return CelActionPlugin() + + +def CleanupDeadlinePlugin(deadlinePlugin): + deadlinePlugin.Cleanup() + +###################################################################### +# This is the main DeadlinePlugin class for the CelAction plugin. +###################################################################### + + +class CelActionPlugin(DeadlinePlugin): + + def __init__(self): + self.InitializeProcessCallback += self.InitializeProcess + self.RenderExecutableCallback += self.RenderExecutable + self.RenderArgumentCallback += self.RenderArgument + self.StartupDirectoryCallback += self.StartupDirectory + + def Cleanup(self): + for stdoutHandler in self.StdoutHandlers: + del stdoutHandler.HandleCallback + + del self.InitializeProcessCallback + del self.RenderExecutableCallback + del self.RenderArgumentCallback + del self.StartupDirectoryCallback + + def GetCelActionRegistryKey(self): + # Modify registry for frame separation + path = r'Software\CelAction\CelAction2D\User Settings' + _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, path) + regKey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0, + _winreg.KEY_ALL_ACCESS) + return regKey + + def GetSeparatorValue(self, regKey): + useSeparator, _ = _winreg.QueryValueEx( + regKey, 'RenderNameUseSeparator') + separator, _ = _winreg.QueryValueEx(regKey, 'RenderNameSeparator') + + return useSeparator, separator + + def SetSeparatorValue(self, regKey, useSeparator, separator): + _winreg.SetValueEx(regKey, 'RenderNameUseSeparator', + 0, _winreg.REG_DWORD, useSeparator) + _winreg.SetValueEx(regKey, 'RenderNameSeparator', + 0, _winreg.REG_SZ, separator) + + def InitializeProcess(self): + # Set the plugin specific settings. + self.SingleFramesOnly = False + + # Set the process specific settings. + self.StdoutHandling = True + self.PopupHandling = True + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Rendering.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Wait.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Timeline Scrub.*") + + celActionRegKey = self.GetCelActionRegistryKey() + + self.SetSeparatorValue(celActionRegKey, 1, self.GetConfigEntryWithDefault( + "RenderNameSeparator", ".").strip()) + + def RenderExecutable(self): + return RepositoryUtils.CheckPathMapping(self.GetConfigEntry("Executable").strip()) + + def RenderArgument(self): + arguments = RepositoryUtils.CheckPathMapping( + self.GetPluginInfoEntry("Arguments").strip()) + arguments = arguments.replace( + "", str(self.GetStartFrame())) + arguments = arguments.replace("", str(self.GetEndFrame())) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetStartFrame()) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetEndFrame()) + arguments = arguments.replace("", "\"") + return arguments + + def StartupDirectory(self): + return self.GetPluginInfoEntryWithDefault("StartupDirectory", "").strip() + + def ReplacePaddedFrame(self, arguments, pattern, frame): + frameRegex = Regex(pattern) + while True: + frameMatch = frameRegex.Match(arguments) + if frameMatch.Success: + paddingSize = int(frameMatch.Groups[1].Value) + if paddingSize > 0: + padding = StringUtils.ToZeroPaddedString( + frame, paddingSize, False) + else: + padding = str(frame) + arguments = arguments.replace( + frameMatch.Groups[0].Value, padding) + else: + break + + return arguments From 4a9b214d8daf7b93634248518572659dc3094001 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 25 Nov 2022 12:12:58 +0100 Subject: [PATCH 2235/2550] fix sync of asset docs on instances --- .../plugins/publish/extract_hierarchy_avalon.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index a9c0593f9f..b2a6adc210 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -59,7 +59,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): else: new_parent = self.sync_asset( - context, name, entity_data, parent, @@ -67,6 +66,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): asset_docs_by_name, archived_asset_docs_by_name ) + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances( + context, + project_name, + new_parent + ) children = entity_data.get("childs") if not children: @@ -132,7 +137,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): def sync_asset( self, - context, asset_name, entity_data, parent, @@ -140,7 +144,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): asset_docs_by_name, archived_asset_docs_by_name ): - project_name = project["name"] # Prepare data for new asset or for update comparison data = { "entityType": entity_data["entity_type"] @@ -219,13 +222,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): changes[update_key] = value cur_entity_data[key] = value - # make sure all relative instances have correct avalon data - self._set_avalon_data_to_relative_instances( - context, - project_name, - asset_doc - ) - # Update asset in database if necessary if changes: # Update entity data with input data From 04ac17455790ffdb6275ae31d0ec4bb4f595732e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:30:31 +0100 Subject: [PATCH 2236/2550] adding .eslintignore file for deadline plugin --- openpype/hosts/celaction/resources/.eslintignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 openpype/hosts/celaction/resources/.eslintignore diff --git a/openpype/hosts/celaction/resources/.eslintignore b/openpype/hosts/celaction/resources/.eslintignore new file mode 100644 index 0000000000..17a45423da --- /dev/null +++ b/openpype/hosts/celaction/resources/.eslintignore @@ -0,0 +1 @@ +deadline_custom_plugin/CelAction/* \ No newline at end of file From 86608dd106f2421062a8746b85caa89a729cb714 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:40:53 +0100 Subject: [PATCH 2237/2550] trying better flake8 ignore --- openpype/hosts/celaction/resources/.eslintignore | 1 - openpype/hosts/celaction/resources/.flake8 | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) delete mode 100644 openpype/hosts/celaction/resources/.eslintignore create mode 100644 openpype/hosts/celaction/resources/.flake8 diff --git a/openpype/hosts/celaction/resources/.eslintignore b/openpype/hosts/celaction/resources/.eslintignore deleted file mode 100644 index 17a45423da..0000000000 --- a/openpype/hosts/celaction/resources/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -deadline_custom_plugin/CelAction/* \ No newline at end of file diff --git a/openpype/hosts/celaction/resources/.flake8 b/openpype/hosts/celaction/resources/.flake8 new file mode 100644 index 0000000000..6cc095c684 --- /dev/null +++ b/openpype/hosts/celaction/resources/.flake8 @@ -0,0 +1,3 @@ +[flake8] +exclude = + */deadline_custom_plugin/CelAction \ No newline at end of file From f8e3e99aad1af2c4571725f6be7c9617a4768218 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:42:29 +0100 Subject: [PATCH 2238/2550] flake8 ignore almost done --- setup.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 0a9664033d..a721b8e9f6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,8 @@ exclude = docs, */vendor, website, - openpype/vendor + openpype/vendor, + */deadline_custom_plugin/CelAction max-complexity = 30 From ceabbe0de37a09915661456dd073ab55a98f8deb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:44:34 +0100 Subject: [PATCH 2239/2550] flake8 file clearing --- openpype/hosts/celaction/resources/.flake8 | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 openpype/hosts/celaction/resources/.flake8 diff --git a/openpype/hosts/celaction/resources/.flake8 b/openpype/hosts/celaction/resources/.flake8 deleted file mode 100644 index 6cc095c684..0000000000 --- a/openpype/hosts/celaction/resources/.flake8 +++ /dev/null @@ -1,3 +0,0 @@ -[flake8] -exclude = - */deadline_custom_plugin/CelAction \ No newline at end of file From 6cefb2ec4b63744b0bed508b3d9671d69e022c57 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 15:01:14 +0100 Subject: [PATCH 2240/2550] celaction: deadline and settings improvements --- .../plugins/publish/collect_render_path.py | 26 +++++------ .../publish/submit_celaction_deadline.py | 5 +-- .../defaults/project_settings/celaction.json | 12 ++--- .../defaults/project_settings/deadline.json | 9 ++++ .../schema_project_celaction.json | 39 ++++------------ .../schema_project_deadline.json | 45 +++++++++++++++++++ 6 files changed, 82 insertions(+), 54 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index ec89fc2e35..e5871f8792 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -11,29 +11,29 @@ class CollectRenderPath(pyblish.api.InstancePlugin): families = ["render.farm"] # Presets - anatomy_render_key = None - publish_render_metadata = None + output_extension = "png" + anatomy_template_key_render_files = None + anatomy_template_key_metadata = None def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy_data["family"] = "render" - padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ - "frame": f"%0{padding}d", - "representation": "png", - "ext": "png" + "family": "render", + "representation": self.output_extension, + "ext": self.output_extension }) anatomy_filled = anatomy.format(anatomy_data) # get anatomy rendering keys - anatomy_render_key = self.anatomy_render_key or "render" - publish_render_metadata = self.publish_render_metadata or "render" + r_anatomy_key = self.anatomy_template_key_render_files + m_anatomy_key = self.anatomy_template_key_metadata # get folder and path for rendering images from celaction - render_dir = anatomy_filled[anatomy_render_key]["folder"] - render_path = anatomy_filled[anatomy_render_key]["path"] + render_dir = anatomy_filled[r_anatomy_key]["folder"] + render_path = anatomy_filled[r_anatomy_key]["path"] + self.log.debug("__ render_path: `{}`".format(render_path)) # create dir if it doesnt exists try: @@ -47,9 +47,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin): instance.data["path"] = render_path # get anatomy for published renders folder path - if anatomy_filled.get(publish_render_metadata): + if anatomy_filled.get(m_anatomy_key): instance.data["publishRenderMetadataFolder"] = anatomy_filled[ - publish_render_metadata]["folder"] + m_anatomy_key]["folder"] self.log.info("Metadata render path: `{}`".format( instance.data["publishRenderMetadataFolder"] )) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 8a3160e83d..e39c2c0061 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -7,11 +7,10 @@ import requests import pyblish.api -class ExtractCelactionDeadline(pyblish.api.InstancePlugin): +class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". + Renders are submitted to a Deadline Web Service. """ diff --git a/openpype/settings/defaults/project_settings/celaction.json b/openpype/settings/defaults/project_settings/celaction.json index a4a321fb27..dbe5625f06 100644 --- a/openpype/settings/defaults/project_settings/celaction.json +++ b/openpype/settings/defaults/project_settings/celaction.json @@ -1,13 +1,9 @@ { "publish": { - "ExtractCelactionDeadline": { - "enabled": true, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10 + "CollectRenderPath": { + "output_extension": "png", + "anatomy_template_key_render_files": "render", + "anatomy_template_key_metadata": "render" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index a6e7b4a94a..8e892bb67e 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -70,6 +70,15 @@ "department": "", "multiprocess": true }, + "CelactionSubmitDeadline": { + "enabled": true, + "deadline_department": "", + "deadline_priority": 50, + "deadline_pool": "", + "deadline_pool_secondary": "", + "deadline_group": "", + "deadline_chunk_size": 10 + }, "ProcessSubmittedJobOnFarm": { "enabled": true, "deadline_department": "", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json index 500e5b2298..15d9350c84 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json @@ -14,45 +14,24 @@ { "type": "dict", "collapsible": true, - "checkbox_key": "enabled", - "key": "ExtractCelactionDeadline", - "label": "ExtractCelactionDeadline", + "key": "CollectRenderPath", + "label": "CollectRenderPath", "is_group": true, "children": [ { - "type": "boolean", - "key": "enabled", - "label": "Enabled" + "type": "text", + "key": "output_extension", + "label": "Output render file extension" }, { "type": "text", - "key": "deadline_department", - "label": "Deadline apartment" - }, - { - "type": "number", - "key": "deadline_priority", - "label": "Deadline priority" + "key": "anatomy_template_key_render_files", + "label": "Anatomy template key: render files" }, { "type": "text", - "key": "deadline_pool", - "label": "Deadline pool" - }, - { - "type": "text", - "key": "deadline_pool_secondary", - "label": "Deadline pool (secondary)" - }, - { - "type": "text", - "key": "deadline_group", - "label": "Deadline Group" - }, - { - "type": "number", - "key": "deadline_chunk_size", - "label": "Deadline Chunk size" + "key": "anatomy_template_key_metadata", + "label": "Anatomy template key: metadata job file" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index cd1741ba8b..77d520c54a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -387,6 +387,51 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CelactionSubmitDeadline", + "label": "CelactionSubmitDeadline", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "deadline_department", + "label": "Deadline apartment" + }, + { + "type": "number", + "key": "deadline_priority", + "label": "Deadline priority" + }, + { + "type": "text", + "key": "deadline_pool", + "label": "Deadline pool" + }, + { + "type": "text", + "key": "deadline_pool_secondary", + "label": "Deadline pool (secondary)" + }, + { + "type": "text", + "key": "deadline_group", + "label": "Deadline Group" + }, + { + "type": "number", + "key": "deadline_chunk_size", + "label": "Deadline Chunk size" + } + ] + }, { "type": "dict", "collapsible": true, From ecbdac09e21f0f186d78cbb04c8030bbf1588e82 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:14:45 +0100 Subject: [PATCH 2241/2550] celaction: add local render target --- openpype/hosts/celaction/api/cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index e00a50cbec..4c07374b08 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -53,6 +53,7 @@ def main(): pyblish.api.register_plugin_path(PUBLISH_PATH) pyblish.api.register_host(PUBLISH_HOST) + pyblish.api.register_target("local") return host_tools.show_publish() From 6994e48a716e80a8b7cdb344d609826b0874b447 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:15:09 +0100 Subject: [PATCH 2242/2550] celaction: returning frame to anatomy data --- openpype/hosts/celaction/plugins/publish/collect_render_path.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index e5871f8792..f6db6c000d 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -18,7 +18,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin): def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) + padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ + "frame": f"%0{padding}d", "family": "render", "representation": self.output_extension, "ext": self.output_extension From f8b00d7ad4a66b93886b48c8100f42fdf9b6fb25 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:16:39 +0100 Subject: [PATCH 2243/2550] celaction: submit job from published workfile --- .../publish/submit_celaction_deadline.py | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index e39c2c0061..03d59b30fd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -80,6 +80,26 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): render_dir = os.path.normpath(os.path.dirname(render_path)) render_path = os.path.normpath(render_path) script_name = os.path.basename(script_path) + + for item in instance.context: + if "workfile" in item.data["families"]: + msg = "Workfile (scene) must be published along" + assert item.data["publish"] is True, msg + + template_data = item.data.get("anatomyData") + rep = item.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = instance.context.data["anatomy"].format( + template_data) + template_filled = anatomy_filled["publish"]["path"] + script_path = os.path.normpath(template_filled) + + self.log.info( + "Using published scene for render {}".format(script_path) + ) + jobname = "%s - %s" % (script_name, instance.name) output_filename_0 = self.preview_fname(render_path) @@ -96,7 +116,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): chunk_size = self.deadline_chunk_size # search for %02d pattern in name, and padding number - search_results = re.search(r"(.%0)(\d)(d)[._]", render_path).groups() + search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups() split_patern = "".join(search_results) padding_number = int(search_results[1]) From 2117df1ad2a314e600178dec6c10d977cdfdc6e6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:38:14 +0100 Subject: [PATCH 2244/2550] move deadline plugin to correct place --- .../custom/plugins}/CelAction/CelAction.ico | Bin .../custom/plugins}/CelAction/CelAction.param | 0 .../custom/plugins}/CelAction/CelAction.py | 1 + setup.cfg | 2 +- 4 files changed, 2 insertions(+), 1 deletion(-) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.ico (100%) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.param (100%) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.py (98%) diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico similarity index 100% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param similarity index 100% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py similarity index 98% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py index d19adc4fb9..2d0edd3dca 100644 --- a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py +++ b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py @@ -71,6 +71,7 @@ class CelActionPlugin(DeadlinePlugin): # Ignore 'celaction' Pop-up dialog self.AddPopupIgnorer(".*Rendering.*") + self.AddPopupIgnorer(".*AutoRender.*") # Ignore 'celaction' Pop-up dialog self.AddPopupIgnorer(".*Wait.*") diff --git a/setup.cfg b/setup.cfg index a721b8e9f6..10cca3eb3f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,7 +9,7 @@ exclude = */vendor, website, openpype/vendor, - */deadline_custom_plugin/CelAction + *deadline/repository/custom/plugins max-complexity = 30 From 891a7e7609b9bd296af0923f3c508f9b8a203689 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:47:22 +0100 Subject: [PATCH 2245/2550] celaction: restructuring cli.py and kwargs --- openpype/hosts/celaction/api/__init__.py | 1 - openpype/hosts/celaction/hooks/pre_celaction_setup.py | 11 +++++------ .../plugins/publish/collect_celaction_cli_kwargs.py | 10 +++++----- .../plugins/publish/collect_celaction_instances.py | 3 ++- openpype/hosts/celaction/scripts/__init__.py | 1 + .../celaction/{api/cli.py => scripts/publish_cli.py} | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) delete mode 100644 openpype/hosts/celaction/api/__init__.py create mode 100644 openpype/hosts/celaction/scripts/__init__.py rename openpype/hosts/celaction/{api/cli.py => scripts/publish_cli.py} (93%) diff --git a/openpype/hosts/celaction/api/__init__.py b/openpype/hosts/celaction/api/__init__.py deleted file mode 100644 index 8c93d93738..0000000000 --- a/openpype/hosts/celaction/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -kwargs = None diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index cde3a0c723..21ff38b701 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,12 +1,11 @@ import os import shutil -import subprocess import winreg from openpype.lib import PreLaunchHook, get_openpype_execute_args -from openpype.hosts.celaction import api as caapi +from openpype.hosts.celaction import scripts -CELACTION_API_DIR = os.path.dirname( - os.path.abspath(caapi.__file__) +CELACTION_SCRIPTS_DIR = os.path.dirname( + os.path.abspath(scripts.__file__) ) @@ -38,7 +37,7 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) - path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") + path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) openpype_executables = subproces_args.pop(0) @@ -108,7 +107,7 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - openpype_celaction_dir = os.path.dirname(CELACTION_API_DIR) + openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR) template_path = os.path.join( openpype_celaction_dir, "resources", diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 15c5ddaf1c..b82c0f5648 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.hosts.celaction import api as celaction +from openpype.hosts.celaction import scripts class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -9,15 +9,15 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - kwargs = celaction.kwargs.copy() + passing_kwargs = scripts.PASSING_KWARGS.copy() self.log.info("Storing kwargs: %s" % kwargs) - context.set_data("kwargs", kwargs) + context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values - for k, v in kwargs.items(): + for k, v in passing_kwargs.items(): self.log.info(f"Setting `{k}` to instance.data with value: `{v}`") if k in ["frameStart", "frameEnd"]: - context.data[k] = kwargs[k] = int(v) + context.data[k] = passing_kwargs[k] = int(v) else: context.data[k] = v diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index b5f99a1416..35ac7fc264 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -36,7 +36,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "version": version } - celaction_kwargs = context.data.get("kwargs", {}) + celaction_kwargs = context.data.get( + "passingKwargs", {}) if celaction_kwargs: shared_instance_data.update(celaction_kwargs) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py new file mode 100644 index 0000000000..dfd9b37ae2 --- /dev/null +++ b/openpype/hosts/celaction/scripts/__init__.py @@ -0,0 +1 @@ +PASSING_KWARGS = None diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/scripts/publish_cli.py similarity index 93% rename from openpype/hosts/celaction/api/cli.py rename to openpype/hosts/celaction/scripts/publish_cli.py index 4c07374b08..586880dc4c 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -7,7 +7,7 @@ import pyblish.util import openpype.hosts.celaction from openpype.lib import Logger -from openpype.hosts.celaction import api as celaction +from openpype.hosts.celaction import scripts from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins @@ -41,7 +41,7 @@ def cli(): parser.add_argument("--resolutionHeight", help=("Height of resolution")) - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ + scripts.PASSING_KWARGS = parser.parse_args(sys.argv[1:]).__dict__ def main(): From 91580fdb301dd9907660a6a2b145b775f961ac97 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:51:44 +0100 Subject: [PATCH 2246/2550] celaction: hook should define data from asset_doc rather then from project doc --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 21ff38b701..a8fc7f322e 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -18,9 +18,9 @@ class CelactionPrelaunchHook(PreLaunchHook): platforms = ["windows"] def execute(self): - project_doc = self.data["project_doc"] - width = project_doc["data"]["resolutionWidth"] - height = project_doc["data"]["resolutionHeight"] + asset_doc = self.data["asset_doc"] + width = asset_doc["data"]["resolutionWidth"] + height = asset_doc["data"]["resolutionHeight"] # Add workfile path to launch arguments workfile_path = self.workfile_path() From a4e6f67692e46208bb80465987359aeebc610ca3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:56:19 +0100 Subject: [PATCH 2247/2550] celaction: fix kwargs print --- .../plugins/publish/collect_celaction_cli_kwargs.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index b82c0f5648..e552e9ba6a 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,6 @@ import pyblish.api from openpype.hosts.celaction import scripts +from pprint import pformat class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -11,7 +12,10 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): def process(self, context): passing_kwargs = scripts.PASSING_KWARGS.copy() - self.log.info("Storing kwargs: %s" % kwargs) + self.log.info("Storing kwargs ...") + self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) + + # set kwargs to context data context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values From 30550c26b01bbe502abaf74562408bd0f1308475 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:04:19 +0100 Subject: [PATCH 2248/2550] celaction: render from published workfile fix --- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 03d59b30fd..3be864781f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -82,7 +82,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): script_name = os.path.basename(script_path) for item in instance.context: - if "workfile" in item.data["families"]: + if "workfile" in item.data["family"]: msg = "Workfile (scene) must be published along" assert item.data["publish"] is True, msg From 0c475d5b9d1edaad6ce1c47f16e87f297c3443c4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:40:23 +0100 Subject: [PATCH 2249/2550] celaction: deadline job delay settings --- .../publish/submit_celaction_deadline.py | 78 ++++++++++++++----- .../defaults/project_settings/deadline.json | 3 +- .../schema_project_deadline.json | 5 ++ 3 files changed, 64 insertions(+), 22 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 3be864781f..f716621d59 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -5,7 +5,7 @@ import getpass import requests import pyblish.api - +from openpype.pipeline import legacy_io class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline @@ -25,12 +25,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): deadline_pool_secondary = "" deadline_group = "" deadline_chunk_size = 1 - - enviro_filter = [ - "FTRACK_API_USER", - "FTRACK_API_KEY", - "FTRACK_SERVER" - ] + deadline_job_delay = "00:00:08:00" def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" @@ -163,10 +158,11 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): # frames from Deadline Monitor "OutputFilename0": output_filename_0.replace("\\", "/"), - # # Asset dependency to wait for at least the scene file to sync. + # # Asset dependency to wait for at least + # the scene file to sync. # "AssetDependency0": script_path "ScheduledType": "Once", - "JobDelay": "00:00:08:00" + "JobDelay": self.deadline_job_delay }, "PluginInfo": { # Input @@ -191,18 +187,58 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - i = 0 - for key, values in dict(os.environ).items(): - if key.upper() in self.enviro_filter: - payload["JobInfo"].update( - { - "EnvironmentKeyValue%d" - % i: "{key}={value}".format( - key=key, value=values - ) - } - ) - i += 1 + # Include critical environment variables with submission + keys = [ + "PYTHONPATH", + "PATH", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "PYBLISHPLUGINPATH", + "NUKE_PATH", + "TOOL_ENV", + "FOUNDRY_LICENSE", + "OPENPYPE_VERSION" + ] + # Add mongo url if it's enabled + if instance.context.data.get("deadlinePassMongoUrl"): + keys.append("OPENPYPE_MONGO") + + # add allowed keys from preset if any + if self.env_allowed_keys: + keys += self.env_allowed_keys + + environment = dict({ + key: os.environ[key] for key in keys + if key in os.environ}, **legacy_io.Session + ) + + for _path in os.environ: + if _path.lower().startswith('openpype_'): + environment[_path] = os.environ[_path] + + # to recognize job from OPENPYPE for turning Event On/Off + environment.update({ + "OPENPYPE_LOG_NO_COLORS": "1", + "OPENPYPE_RENDER_JOB": "1" + }) + + # finally search replace in values of any key + if self.env_search_replace_values: + for key, value in environment.items(): + for _k, _v in self.env_search_replace_values.items(): + environment[key] = value.replace(_k, _v) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 8e892bb67e..6e1c0f3540 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -77,7 +77,8 @@ "deadline_pool": "", "deadline_pool_secondary": "", "deadline_group": "", - "deadline_chunk_size": 10 + "deadline_chunk_size": 10, + "deadline_job_delay": "00:00:00:00" }, "ProcessSubmittedJobOnFarm": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 77d520c54a..5295b0e9d6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -429,6 +429,11 @@ "type": "number", "key": "deadline_chunk_size", "label": "Deadline Chunk size" + }, + { + "type": "text", + "key": "deadline_job_delay", + "label": "Delay job (timecode dd:hh:mm:ss)" } ] }, From eb1b6e037b09c20bca12ab335ec675aeae0b311e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:42:22 +0100 Subject: [PATCH 2250/2550] label readability --- .../schemas/projects_schema/schema_project_deadline.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 5295b0e9d6..69f81ed682 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -392,7 +392,7 @@ "collapsible": true, "checkbox_key": "enabled", "key": "CelactionSubmitDeadline", - "label": "CelactionSubmitDeadline", + "label": "Celaction Submit Deadline", "is_group": true, "children": [ { From 9f5e892a678307dec9b3d577a0ae1364a680af06 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:49:09 +0100 Subject: [PATCH 2251/2550] removing redundant code --- .../plugins/publish/submit_celaction_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index f716621d59..e5b06b007c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -208,10 +208,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): if instance.context.data.get("deadlinePassMongoUrl"): keys.append("OPENPYPE_MONGO") - # add allowed keys from preset if any - if self.env_allowed_keys: - keys += self.env_allowed_keys - environment = dict({ key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session @@ -227,12 +223,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): "OPENPYPE_RENDER_JOB": "1" }) - # finally search replace in values of any key - if self.env_search_replace_values: - for key, value in environment.items(): - for _k, _v in self.env_search_replace_values.items(): - environment[key] = value.replace(_k, _v) - payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( key=key, From 3491ef73ac54a98cab2e759c81a211a44549ea37 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:25:41 +0100 Subject: [PATCH 2252/2550] remove nuke code --- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index e5b06b007c..0583e146a7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -199,9 +199,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): "FTRACK_API_USER", "FTRACK_SERVER", "PYBLISHPLUGINPATH", - "NUKE_PATH", "TOOL_ENV", - "FOUNDRY_LICENSE", "OPENPYPE_VERSION" ] # Add mongo url if it's enabled From f162ec56d76b1fe4a6b4267ea1fdfe3ca6ee6927 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:58:22 +0100 Subject: [PATCH 2253/2550] celaction: removing environment from job --- .../publish/submit_celaction_deadline.py | 41 ------------------- 1 file changed, 41 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 0583e146a7..9a7d74c1f7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -187,47 +187,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - # Include critical environment variables with submission - keys = [ - "PYTHONPATH", - "PATH", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "PYBLISHPLUGINPATH", - "TOOL_ENV", - "OPENPYPE_VERSION" - ] - # Add mongo url if it's enabled - if instance.context.data.get("deadlinePassMongoUrl"): - keys.append("OPENPYPE_MONGO") - - environment = dict({ - key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session - ) - - for _path in os.environ: - if _path.lower().startswith('openpype_'): - environment[_path] = os.environ[_path] - - # to recognize job from OPENPYPE for turning Event On/Off - environment.update({ - "OPENPYPE_LOG_NO_COLORS": "1", - "OPENPYPE_RENDER_JOB": "1" - }) - - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) From 193112c18bcb17c829ea4afb6813823bf10d888e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:58:53 +0100 Subject: [PATCH 2254/2550] deadline: adding openpype version to global job --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 6362b4ca65..249211e965 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -241,6 +241,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") + environment["OPENPYPE_VERSION"] = os.environ.get("OPENPYPE_VERSION") environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" From b74675ebb0fc360eaa9af4b8faa57d0bcd5e0541 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 26 Nov 2022 03:31:25 +0000 Subject: [PATCH 2255/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index ffabcf8025..bf9f97d5f4 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7" +__version__ = "3.14.8-nightly.1" From 74ab26863c6063eaaaac17d3f005f2c692debf2c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 15:56:35 +0800 Subject: [PATCH 2256/2550] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 88 +++++++++++++++++++ .../maya/plugins/publish/collect_gltf.py | 18 ++++ .../maya/plugins/publish/extract_gltf.py | 65 ++++++++++++++ .../plugins/publish/collect_resources_path.py | 1 + openpype/plugins/publish/integrate.py | 1 + openpype/plugins/publish/integrate_legacy.py | 1 + .../defaults/project_settings/maya.json | 4 + .../schemas/schema_maya_publish.json | 21 ++++- 8 files changed, 198 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/maya/api/gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_gltf.py diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py new file mode 100644 index 0000000000..dd2a95a6d9 --- /dev/null +++ b/openpype/hosts/maya/api/gltf.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +"""Tools to work with GLTF.""" +import logging + +from pyblish.api import Instance + +from maya import cmds, mel # noqa + +log = logging.getLogger(__name__) + +_gltf_options = { + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials + "eut":bool, # excludeUnusedTexcoord + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly +} + +def extract_gltf(parent_dir, + filename, + **kwargs): + + """Sets GLTF export options from data in the instance. + + """ + + cmds.loadPlugin('maya2glTF', quiet=True) + # load the UI to run mel command + mel.eval("maya2glTF_UI()") + + parent_dir = parent_dir.replace('\\', '/') + options = { + "dsa": 1, + "glb": True + } + options.update(kwargs) + + for key, value in options.copy().items(): + if key not in _gltf_options: + log.warning("extract_gltf() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + continue + + job_args = list() + default_opt = "maya2glTF -of \"{0}\" -sn \"{1}\"".format(parent_dir, filename) # noqa + job_args.append(default_opt) + + for key, value in options.items(): + if isinstance(value, str): + job_args.append("-{0} \"{1}\"".format(key, value)) + elif isinstance(value, bool): + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + log.info("{}".format(job_str)) + mel.eval(job_str) + + # close the gltf export after finish the export + gltf_UI = "maya2glTF_exporter_window" + if cmds.window(gltf_UI, q = True, exists =True): + cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py new file mode 100644 index 0000000000..dba06dca23 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +import pyblish.api + + +class CollectGLTF(pyblish.api.InstancePlugin): + """Collect Assets for GLTF/GLB export.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Asset for GLTF/GLB export" + families = ["model", "animation", "pointcache"] + + def process(self, instance): + if not instance.data.get("families"): + instance.data["families"] = [] + + if "fbx" not in instance.data["families"]: + instance.data["families"].append("gltf") + diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py new file mode 100644 index 0000000000..1cab40d825 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -0,0 +1,65 @@ +import os + +from maya import cmds, mel +import pyblish.api + +from openpype.pipeline import publish +from openpype.hosts.maya.api import lib +from openpype.hosts.maya.api.gltf import extract_gltf + +class ExtractGLB(publish.Extractor): + + order = pyblish.api.ExtractorOrder + hosts = ["maya"] + label = "Extract GLB" + families = ["gltf"] + + def process(self, instance): + staging_dir = self.staging_dir(instance) + filename = "{0}.glb".format(instance.name) + path = os.path.join(staging_dir, filename) + + + self.log.info("Extracting GLB to: {}".format(path)) + + nodes = instance[:] + + self.log.info("Instance: {0}".format(nodes)) + + start_frame = instance.data('frameStart') or \ + int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = instance.data('frameEnd') or \ + int(cmds.playbackOptions(query=True, + animationEndTime=True)) + fps = mel.eval('currentTimeUnitToFPS()') + + options = { + "sno": True, #selectedNodeOnly + "nbu": True, # .bin instead of .bin0 + "ast": start_frame, + "aet": end_frame, + "afr": fps, + "dsa": 1, + "acn": instance.name, + "glb": True, + "vno": True #visibleNodeOnly + } + with lib.maintained_selection(): + cmds.select(nodes, hi=True, noExpand=True) + extract_gltf(staging_dir, + instance.name, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'glb', + 'ext': 'glb', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract GLB successful to: {0}".format(path)) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 00f65b8b67..70610da909 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -50,6 +50,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "source", "assembly", "fbx", + "gltf", "textures", "action", "background", diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788..3c78109934 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -111,6 +111,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c..2a3512471c 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -106,6 +106,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a..3413dee83b 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -250,6 +250,10 @@ "CollectFbxCamera": { "enabled": false }, + "CollectGLTF": { + "enabled": true, + "glb" : true + }, "ValidateInstanceInContext": { "enabled": true, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index ab8c6b885e..3aca9b2010 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -35,6 +35,25 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectGLTF", + "label": "Collect Assets for GLTF/GLB export", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "glb", + "label": "Export GLB" + } + ] + }, { "type": "splitter" }, @@ -62,7 +81,7 @@ } ] }, - { + { "type": "dict", "collapsible": true, "key": "ValidateFrameRange", From 13002a39491410e27e84fd02a9f46cc19510ed52 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 16:14:29 +0800 Subject: [PATCH 2257/2550] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 62 +++++++++---------- .../maya/plugins/publish/collect_gltf.py | 1 - .../maya/plugins/publish/extract_gltf.py | 10 +-- .../defaults/project_settings/maya.json | 2 +- 4 files changed, 37 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py index dd2a95a6d9..2519bd1220 100644 --- a/openpype/hosts/maya/api/gltf.py +++ b/openpype/hosts/maya/api/gltf.py @@ -2,43 +2,42 @@ """Tools to work with GLTF.""" import logging -from pyblish.api import Instance - from maya import cmds, mel # noqa log = logging.getLogger(__name__) _gltf_options = { - "of": str, # outputFolder - "cpr": str, # copyright - "sno": bool, # selectedNodeOnly - "sn": str, # sceneName - "glb": bool, # binary - "nbu": bool, # niceBufferURIs - "hbu": bool, # hashBufferURI - "ext": bool, # externalTextures - "ivt": int, # initialValuesTime - "acn": str, # animationClipName - "ast": int, # animationClipStartTime - "aet": int, # animationClipEndTime - "afr": float, # animationClipFrameRate - "dsa": int, # detectStepAnimations - "mpa": str, # meshPrimitiveAttributes - "bpa": str, # blendPrimitiveAttributes - "i32": bool, # force32bitIndices - "ssm": bool, # skipStandardMaterials - "eut":bool, # excludeUnusedTexcoord - "dm": bool, # defaultMaterial - "cm": bool, # colorizeMaterials - "dmy": str, # dumpMaya - "dgl": str, # dumpGLTF - "imd": str, # ignoreMeshDeformers - "ssc": bool, # skipSkinClusters - "sbs": bool, # skipBlendShapes - "rvp": bool, # redrawViewport - "vno": bool # visibleNodesOnly + "of" : str, # outputFolder + "cpr" : str, # copyright + "sno" : bool, # selectedNodeOnly + "sn" : str, # sceneName + "glb" : bool, # binary + "nbu" : bool, # niceBufferURIs + "hbu" : bool, # hashBufferURI + "ext" : bool, # externalTextures + "ivt" : int, # initialValuesTime + "acn" : str, # animationClipName + "ast" : int, # animationClipStartTime + "aet" : int, # animationClipEndTime + "afr" : float, # animationClipFrameRate + "dsa" : int, # detectStepAnimations + "mpa" : str, # meshPrimitiveAttributes + "bpa" : str, # blendPrimitiveAttributes + "i32" : bool, # force32bitIndices + "ssm" : bool, # skipStandardMaterials + "eut": bool, # excludeUnusedTexcoord + "dm" : bool, # defaultMaterial + "cm" : bool, # colorizeMaterials + "dmy" : str, # dumpMaya + "dgl" : str, # dumpGLTF + "imd" : str, # ignoreMeshDeformers + "ssc" : bool, # skipSkinClusters + "sbs" : bool, # skipBlendShapes + "rvp" : bool, # redrawViewport + "vno" : bool # visibleNodesOnly } + def extract_gltf(parent_dir, filename, **kwargs): @@ -63,6 +62,7 @@ def extract_gltf(parent_dir, log.warning("extract_gltf() does not support option '%s'. " "Flag will be ignored..", key) options.pop(key) + options.pop(value) continue job_args = list() @@ -84,5 +84,5 @@ def extract_gltf(parent_dir, # close the gltf export after finish the export gltf_UI = "maya2glTF_exporter_window" - if cmds.window(gltf_UI, q = True, exists =True): + if cmds.window(gltf_UI, q=True, exists=True): cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py index dba06dca23..bbc4e31f92 100644 --- a/openpype/hosts/maya/plugins/publish/collect_gltf.py +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -15,4 +15,3 @@ class CollectGLTF(pyblish.api.InstancePlugin): if "fbx" not in instance.data["families"]: instance.data["families"].append("gltf") - diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py index 1cab40d825..f5ceed5f33 100644 --- a/openpype/hosts/maya/plugins/publish/extract_gltf.py +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -7,6 +7,7 @@ from openpype.pipeline import publish from openpype.hosts.maya.api import lib from openpype.hosts.maya.api.gltf import extract_gltf + class ExtractGLB(publish.Extractor): order = pyblish.api.ExtractorOrder @@ -19,7 +20,6 @@ class ExtractGLB(publish.Extractor): filename = "{0}.glb".format(instance.name) path = os.path.join(staging_dir, filename) - self.log.info("Extracting GLB to: {}".format(path)) nodes = instance[:] @@ -28,14 +28,14 @@ class ExtractGLB(publish.Extractor): start_frame = instance.data('frameStart') or \ int(cmds.playbackOptions(query=True, - animationStartTime=True)) + animationStartTime=True))# noqa end_frame = instance.data('frameEnd') or \ int(cmds.playbackOptions(query=True, - animationEndTime=True)) + animationEndTime=True)) # noqa fps = mel.eval('currentTimeUnitToFPS()') options = { - "sno": True, #selectedNodeOnly + "sno": True, # selectedNodeOnly "nbu": True, # .bin instead of .bin0 "ast": start_frame, "aet": end_frame, @@ -43,7 +43,7 @@ class ExtractGLB(publish.Extractor): "dsa": 1, "acn": instance.name, "glb": True, - "vno": True #visibleNodeOnly + "vno": True # visibleNodeOnly } with lib.maintained_selection(): cmds.select(nodes, hi=True, noExpand=True) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 3413dee83b..e73f73161e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -251,7 +251,7 @@ "enabled": false }, "CollectGLTF": { - "enabled": true, + "enabled": false, "glb" : true }, "ValidateInstanceInContext": { From 6dadff2b58ebbc14c1e3622aa866fca5abe31490 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 16:17:09 +0800 Subject: [PATCH 2258/2550] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 54 ++++++++++++++++----------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py index 2519bd1220..2a983f1573 100644 --- a/openpype/hosts/maya/api/gltf.py +++ b/openpype/hosts/maya/api/gltf.py @@ -7,34 +7,34 @@ from maya import cmds, mel # noqa log = logging.getLogger(__name__) _gltf_options = { - "of" : str, # outputFolder - "cpr" : str, # copyright - "sno" : bool, # selectedNodeOnly - "sn" : str, # sceneName - "glb" : bool, # binary - "nbu" : bool, # niceBufferURIs - "hbu" : bool, # hashBufferURI - "ext" : bool, # externalTextures - "ivt" : int, # initialValuesTime - "acn" : str, # animationClipName - "ast" : int, # animationClipStartTime - "aet" : int, # animationClipEndTime - "afr" : float, # animationClipFrameRate - "dsa" : int, # detectStepAnimations - "mpa" : str, # meshPrimitiveAttributes - "bpa" : str, # blendPrimitiveAttributes - "i32" : bool, # force32bitIndices - "ssm" : bool, # skipStandardMaterials + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials "eut": bool, # excludeUnusedTexcoord - "dm" : bool, # defaultMaterial - "cm" : bool, # colorizeMaterials - "dmy" : str, # dumpMaya - "dgl" : str, # dumpGLTF - "imd" : str, # ignoreMeshDeformers - "ssc" : bool, # skipSkinClusters - "sbs" : bool, # skipBlendShapes - "rvp" : bool, # redrawViewport - "vno" : bool # visibleNodesOnly + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly } From 8eae3d395672f599f1fe8dea554ed1d83e1f50e3 Mon Sep 17 00:00:00 2001 From: Joseff Date: Sat, 26 Nov 2022 14:37:15 +0100 Subject: [PATCH 2259/2550] Update The #include for NotificationManager MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Renamed the include to "Framework/Notifications/NotificationManager.h" Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index c2c7e249c3..322663eeec 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -3,7 +3,7 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" #include "AssetToolsModule.h" -#include "NotificationManager.h" +#include "Framework/Notifications/NotificationManager.h" #include "SNotificationList.h" //Moves all the invalid pointers to the end to prepare them for the shrinking From a9e2e7392295cf8edfcfd1345c31a14b41a12939 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 26 Nov 2022 20:24:53 +0000 Subject: [PATCH 2260/2550] Maintain time connections on update. --- openpype/hosts/maya/api/plugin.py | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 39d821f620..985cddaa08 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -217,7 +217,7 @@ class ReferenceLoader(Loader): # Need to save alembic settings and reapply, cause referencing resets # them to incoming data. - alembic_attrs = ["speed", "offset", "cycleType"] + alembic_attrs = ["speed", "offset", "cycleType", "time"] alembic_data = {} if representation["name"] == "abc": alembic_nodes = cmds.ls( @@ -226,7 +226,17 @@ class ReferenceLoader(Loader): if alembic_nodes: for attr in alembic_attrs: node_attr = "{}.{}".format(alembic_nodes[0], attr) - alembic_data[attr] = cmds.getAttr(node_attr) + connections = cmds.listConnections(node_attr, plugs=True) + data = { + "connected": False, + "attribute": None, + "value": cmds.getAttr(node_attr) + } + if connections: + data["connected"] = True + data["attribute"] = connections[0] + + alembic_data[attr] = data else: self.log.debug("No alembic nodes found in {}".format(members)) @@ -263,8 +273,14 @@ class ReferenceLoader(Loader): "{}:*".format(namespace), type="AlembicNode" ) if alembic_nodes: - for attr, value in alembic_data.items(): - cmds.setAttr("{}.{}".format(alembic_nodes[0], attr), value) + for attr, data in alembic_data.items(): + node_attr = "{}.{}".format(alembic_nodes[0], attr) + if data["connected"]: + cmds.connectAttr( + data["attribute"], node_attr, force=True + ) + else: + cmds.setAttr(node_attr, data["value"]) # Fix PLN-40 for older containers created with Avalon that had the # `.verticesOnlySet` set to True. From c2e9bdf161b7b073a2133efb2ce9d04e2360540d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 26 Nov 2022 21:35:40 +0100 Subject: [PATCH 2261/2550] celaction: DL improving code --- .../publish/submit_celaction_deadline.py | 41 ++++++++++--------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 9a7d74c1f7..7913851d8a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -2,10 +2,9 @@ import os import re import json import getpass - import requests import pyblish.api -from openpype.pipeline import legacy_io + class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline @@ -194,10 +193,15 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): self.expected_files(instance, render_path) self.log.debug("__ expectedFiles: `{}`".format( instance.data["expectedFiles"])) + response = requests.post(self.deadline_url, json=payload) if not response.ok: - raise Exception(response.text) + self.log.error("Submission failed!") + self.log.error(response.status_code) + self.log.error(response.content) + self.log.debug(payload) + raise SystemExit(response.text) return response @@ -235,32 +239,29 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): split_path = path.split(split_patern) hashes = "#" * int(search_results[1]) return "".join([split_path[0], hashes, split_path[-1]]) - if "#" in path: - self.log.debug("_ path: `{}`".format(path)) - return path - else: - return path - def expected_files(self, - instance, - path): + self.log.debug("_ path: `{}`".format(path)) + return path + + def expected_files(self, instance, filepath): """ Create expected files in instance data """ if not instance.data.get("expectedFiles"): - instance.data["expectedFiles"] = list() + instance.data["expectedFiles"] = [] - dir = os.path.dirname(path) - file = os.path.basename(path) + dirpath = os.path.dirname(filepath) + filename = os.path.basename(filepath) - if "#" in file: - pparts = file.split("#") + if "#" in filename: + pparts = filename.split("#") padding = "%0{}d".format(len(pparts) - 1) - file = pparts[0] + padding + pparts[-1] + filename = pparts[0] + padding + pparts[-1] - if "%" not in file: - instance.data["expectedFiles"].append(path) + if "%" not in filename: + instance.data["expectedFiles"].append(filepath) return for i in range(self._frame_start, (self._frame_end + 1)): instance.data["expectedFiles"].append( - os.path.join(dir, (file % i)).replace("\\", "/")) + os.path.join(dirpath, (filename % i)).replace("\\", "/") + ) From 1f6be563eb6b28ef0988d55e6a143fe0e98d6aa1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 26 Nov 2022 21:36:03 +0100 Subject: [PATCH 2262/2550] celaction: improving code PR feedback --- .../celaction/hooks/pre_celaction_setup.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index a8fc7f322e..62cebf99ed 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,6 +1,7 @@ import os import shutil import winreg +import subprocess from openpype.lib import PreLaunchHook, get_openpype_execute_args from openpype.hosts.celaction import scripts @@ -13,7 +14,6 @@ class CelactionPrelaunchHook(PreLaunchHook): """ Bootstrap celacion with pype """ - workfile_ext = "scn" app_groups = ["celaction"] platforms = ["windows"] @@ -39,28 +39,28 @@ class CelactionPrelaunchHook(PreLaunchHook): path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) - openpype_executables = subproces_args.pop(0) + openpype_executable = subproces_args.pop(0) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - openpype_executables + openpype_executable ) parameters = subproces_args + [ - "--currentFile *SCENE*", - "--chunk *CHUNK*", - "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*" + "--currentFile", "*SCENE*", + "--chunk", "*CHUNK*", + "--frameStart", "*START*", + "--frameEnd", "*END*", + "--resolutionWidth", "*X*", + "--resolutionHeight", "*Y*" ] winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters) + subprocess.list2cmdline(parameters) ) # setting resolution parameters From b0e8490dd0e6a06b76df0a7b8b5b68b3766c5049 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 28 Nov 2022 14:52:44 +0800 Subject: [PATCH 2263/2550] add proxyAbc as the family of the validator of animation out set related node ids --- .../publish/validate_animation_out_set_related_node_ids.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 649913fff6..5a527031be 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -20,7 +20,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): """ order = ValidateContentsOrder - families = ['animation', "pointcache"] + families = ['animation', "pointcache", "proxyAbc"] hosts = ['maya'] label = 'Animation Out Set Related Node Ids' actions = [ From a2abcd252471c3cce2f56e0441043693df12bc0f Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 09:32:14 +0000 Subject: [PATCH 2264/2550] Optional viewport refresh on pointcache extraction --- .../hosts/maya/plugins/create/create_pointcache.py | 1 + .../hosts/maya/plugins/publish/extract_pointcache.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index ab8fe12079..cdec140ea8 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -28,6 +28,7 @@ class CreatePointCache(plugin.Creator): self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups self.data["worldSpace"] = True # Default to exporting world-space + self.data["refresh"] = False # Default to suspend refresh. # Add options for custom attributes self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 7c1c6d5c12..5f5532e60a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,13 +86,21 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - with suspended_refresh(): + if instance.data.get("refresh", False): with maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic(file=path, startFrame=start, endFrame=end, **options) + else: + with suspended_refresh(): + with maintained_selection(): + cmds.select(nodes, noExpand=True) + extract_alembic(file=path, + startFrame=start, + endFrame=end, + **options) if "representations" not in instance.data: instance.data["representations"] = [] From 9e2f3ab8685dea532a4b452da0ec6e0ef5ee56da Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 09:56:22 +0000 Subject: [PATCH 2265/2550] Disable viewport Pan/Zoom on playblast extraction. --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index b19d24fad7..04e3c7bccf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -115,6 +115,10 @@ class ExtractPlayblast(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -135,6 +139,8 @@ class ExtractPlayblast(publish.Extractor): path = capture.capture(log=self.log, **preset) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) From 34d2f1252601a3fed9285a0b72b2ad759e463623 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:04:19 +0000 Subject: [PATCH 2266/2550] Hound --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 04e3c7bccf..1f9f9db99a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -118,7 +118,7 @@ class ExtractPlayblast(publish.Extractor): # Disable Pan/Zoom. pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) - + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -140,7 +140,7 @@ class ExtractPlayblast(publish.Extractor): path = capture.capture(log=self.log, **preset) cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) - + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) From 49ad1dbc97a6a1ea79f8ff273c5f275d4cd84282 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:12:02 +0000 Subject: [PATCH 2267/2550] Include thumbnail extraction --- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 712159c2be..06244cf003 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -117,6 +117,10 @@ class ExtractThumbnail(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): # Force viewer to False in call to capture because we have our own # viewer opening call to allow a signal to trigger between @@ -136,6 +140,7 @@ class ExtractThumbnail(publish.Extractor): _, thumbnail = os.path.split(playblast) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) self.log.info("file list {}".format(thumbnail)) From 0c54d8fcad1babdd9c03891e695db8da46eb7a51 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:15:30 +0000 Subject: [PATCH 2268/2550] Enable thumbnail transparency on extraction. --- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 712159c2be..311278145a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -105,6 +105,11 @@ class ExtractThumbnail(publish.Extractor): pm.currentTime(refreshFrameInt - 1, edit=True) pm.currentTime(refreshFrameInt, edit=True) + # Override transparency if requested. + transparency = instance.data.get("transparency", 0) + if transparency != 0: + preset["viewport2_options"]["transparencyAlgorithm"] = transparency + # Isolate view is requested by having objects in the set besides a # camera. if preset.pop("isolate_view", False) and instance.data.get("isolate"): From 9aeb7898527d9d2892394402462251a1b89dfe87 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Tue, 29 Nov 2022 13:04:29 +0700 Subject: [PATCH 2269/2550] bugfix: Use unused 'paths' list --- openpype/tools/standalonepublish/widgets/widget_drop_frame.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index f8a8273b26..18c2b27678 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -178,7 +178,7 @@ class DropDataFrame(QtWidgets.QFrame): paths = self._get_all_paths(in_paths) collectionable_paths = [] non_collectionable_paths = [] - for path in in_paths: + for path in paths: ext = os.path.splitext(path)[1] if ext in self.image_extensions or ext in self.sequence_types: collectionable_paths.append(path) From 37535f35bdc5792e5ac3b0c5acef52cc8ad8c5dd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 29 Nov 2022 21:56:34 +0800 Subject: [PATCH 2270/2550] gltf extractor for Maya --- openpype/settings/defaults/project_settings/maya.json | 3 +-- .../schemas/projects_schema/schemas/schema_maya_publish.json | 5 ----- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index e73f73161e..59e71b2e29 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -251,8 +251,7 @@ "enabled": false }, "CollectGLTF": { - "enabled": false, - "glb" : true + "enabled": false }, "ValidateInstanceInContext": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 3aca9b2010..e7a56d0749 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -46,11 +46,6 @@ "type": "boolean", "key": "enabled", "label": "Enabled" - }, - { - "type": "boolean", - "key": "glb", - "label": "Export GLB" } ] }, From decc8df4aef6eb1aef8e55152c3eea0760d1fad2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 29 Nov 2022 17:29:58 +0100 Subject: [PATCH 2271/2550] :construction: 3dsmax addon basics --- openpype/hosts/3dsmax/api/__init__.py | 0 openpype/hosts/3dsmax/plugins/__init__.py | 0 openpype/hosts/3dsmax/startup/startup.py | 2 - openpype/hosts/max/__init__.py | 10 ++ openpype/hosts/max/addon.py | 16 ++ openpype/hosts/max/api/__init__.py | 13 ++ openpype/hosts/max/api/lib.py | 2 + openpype/hosts/max/api/menu.py | 80 +++++++++ openpype/hosts/max/api/pipeline.py | 153 ++++++++++++++++++ openpype/hosts/max/hooks/set_paths.py | 17 ++ .../hosts/{3dsmax => max/plugins}/__init__.py | 0 .../hosts/{3dsmax => max}/startup/startup.ms | 0 openpype/hosts/max/startup/startup.py | 7 + openpype/settings/entities/enum_entity.py | 2 +- 14 files changed, 299 insertions(+), 3 deletions(-) delete mode 100644 openpype/hosts/3dsmax/api/__init__.py delete mode 100644 openpype/hosts/3dsmax/plugins/__init__.py delete mode 100644 openpype/hosts/3dsmax/startup/startup.py create mode 100644 openpype/hosts/max/__init__.py create mode 100644 openpype/hosts/max/addon.py create mode 100644 openpype/hosts/max/api/__init__.py create mode 100644 openpype/hosts/max/api/lib.py create mode 100644 openpype/hosts/max/api/menu.py create mode 100644 openpype/hosts/max/api/pipeline.py create mode 100644 openpype/hosts/max/hooks/set_paths.py rename openpype/hosts/{3dsmax => max/plugins}/__init__.py (100%) rename openpype/hosts/{3dsmax => max}/startup/startup.ms (100%) create mode 100644 openpype/hosts/max/startup/startup.py diff --git a/openpype/hosts/3dsmax/api/__init__.py b/openpype/hosts/3dsmax/api/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/3dsmax/plugins/__init__.py b/openpype/hosts/3dsmax/plugins/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/3dsmax/startup/startup.py b/openpype/hosts/3dsmax/startup/startup.py deleted file mode 100644 index dd8c08a6b9..0000000000 --- a/openpype/hosts/3dsmax/startup/startup.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -print("inside python startup") \ No newline at end of file diff --git a/openpype/hosts/max/__init__.py b/openpype/hosts/max/__init__.py new file mode 100644 index 0000000000..8da0e0ee42 --- /dev/null +++ b/openpype/hosts/max/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + MaxAddon, + MAX_HOST_DIR, +) + + +__all__ = ( + "MaxAddon", + "MAX_HOST_DIR", +) \ No newline at end of file diff --git a/openpype/hosts/max/addon.py b/openpype/hosts/max/addon.py new file mode 100644 index 0000000000..734b87dd21 --- /dev/null +++ b/openpype/hosts/max/addon.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +import os +from openpype.modules import OpenPypeModule, IHostAddon + +MAX_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class MaxAddon(OpenPypeModule, IHostAddon): + name = "max" + host_name = "max" + + def initialize(self, module_settings): + self.enabled = True + + def get_workfile_extensions(self): + return [".max"] diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py new file mode 100644 index 0000000000..b6998df862 --- /dev/null +++ b/openpype/hosts/max/api/__init__.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +"""Public API for 3dsmax""" + +from .pipeline import ( + MaxHost +) +from .menu import OpenPypeMenu + + +__all__ = [ + "MaxHost", + "OpenPypeMenu" +] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py new file mode 100644 index 0000000000..e50de85f68 --- /dev/null +++ b/openpype/hosts/max/api/lib.py @@ -0,0 +1,2 @@ +def imprint(attr, data): + ... diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py new file mode 100644 index 0000000000..13ca503b4d --- /dev/null +++ b/openpype/hosts/max/api/menu.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +"""3dsmax menu definition of OpenPype.""" +from abc import ABCMeta, abstractmethod +import six +from Qt import QtWidgets, QtCore +from pymxs import runtime as rt + +from openpype.tools.utils import host_tools + + +@six.add_metaclass(ABCMeta) +class OpenPypeMenu(object): + + def __init__(self): + self.main_widget = self.get_main_widget() + + @staticmethod + def get_main_widget(): + """Get 3dsmax main window.""" + return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) + + def get_main_menubar(self): + """Get main Menubar by 3dsmax main window.""" + return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] + + def get_or_create_openpype_menu(self, name="&OpenPype", before="&Help"): + menu_bar = self.get_main_menubar() + menu_items = menu_bar.findChildren( + QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) + help_action = None + for item in menu_items: + if name in item.title(): + # we already have OpenPype menu + return item + + if before in item.title(): + help_action = item.menuAction() + + op_menu = QtWidgets.QMenu("&OpenPype") + menu_bar.insertMenu(before, op_menu) + return op_menu + + def build_openpype_menu(self): + openpype_menu = self.get_or_create_openpype_menu() + load_action = QtWidgets.QAction("Load...", openpype_menu) + load_action.triggered.connect(self.load_callback) + openpype_menu.addAction(load_action) + + publish_action = QtWidgets.QAction("Publish...", openpype_menu) + publish_action.triggered.connect(self.publish_callback) + openpype_menu.addAction(publish_action) + + manage_action = QtWidgets.QAction("Manage...", openpype_menu) + manage_action.triggered.connect(self.manage_callback) + openpype_menu.addAction(manage_action) + + library_action = QtWidgets.QAction("Library...", openpype_menu) + library_action.triggered.connect(self.library_callback) + openpype_menu.addAction(library_action) + + openpype_menu.addSeparator() + + workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) + workfiles_action.triggered.connect(self.workfiles_callback) + openpype_menu.addAction(workfiles_action) + + def load_callback(self): + host_tools.show_loader(parent=self.main_widget) + + def publish_callback(self): + host_tools.show_publisher(parent=self.main_widget) + + def manage_callback(self): + host_tools.show_subset_manager(parent=self.main_widget) + + def library_callback(self): + host_tools.show_library_loader(parent=self.main_widget) + + def workfiles_callback(self): + host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py new file mode 100644 index 0000000000..2ee5989871 --- /dev/null +++ b/openpype/hosts/max/api/pipeline.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" +import os +import sys +import logging +import contextlib + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher +import pyblish.api +from openpype.pipeline import ( + register_creator_plugin_path, + register_loader_plugin_path, + AVALON_CONTAINER_ID, +) +from openpype.hosts.max.api import OpenPypeMenu +from openpype.hosts.max.api import lib +from openpype.hosts.max import MAX_HOST_DIR +from openpype.pipeline.load import any_outdated_containers +from openpype.lib import ( + register_event_callback, + emit_event, +) +from pymxs import runtime as rt # noqa + +log = logging.getLogger("openpype.hosts.max") + +PLUGINS_DIR = os.path.join(MAX_HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + + +class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "max" + menu = None + + def __init__(self): + super(MaxHost, self).__init__() + self._op_events = {} + self._has_been_setup = False + + def install(self): + pyblish.api.register_host("max") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + log.info("Building menu ...") + + self.menu = OpenPypeMenu() + + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + + # register_event_callback("before.save", before_save) + # register_event_callback("save", on_save) + # register_event_callback("open", on_open) + # register_event_callback("new", on_new) + + # pyblish.api.register_callback( + # "instanceToggled", on_pyblish_instance_toggled + # ) + + self._has_been_setup = True + + def has_unsaved_changes(self): + # TODO: how to get it from 3dsmax? + return True + + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] + + def save_workfile(self, dst_path=None): + rt.saveMaxFile(dst_path) + return dst_path + + def open_workfile(self, filepath): + rt.checkForSave() + rt.loadMaxFile(filepath) + return filepath + + def get_current_workfile(self): + return os.path.join(rt.maxFilePath, rt.maxFileName) + + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + rt.callbacks.removeScript(id=rt.name(event.name)) + except RuntimeError as e: + log.info(e) + + rt.callbacks.addScript( + event.name, event.callback, id=rt.Name('OpenPype')) + + @staticmethod + def create_context_node(): + """Helper for creating context holding node.""" + + root_scene = rt.rootScene + + create_attr_script = (""" +attributes "OpenPypeContext" +( + parameters main rollout:params + ( + context type: #string + ) + + rollout params "OpenPype Parameters" + ( + editText editTextContext "Context" type: #string + ) +) + """) + + attr = rt.execute(create_attr_script) + rt.custAttributes.add(root_scene, attr) + + return root_scene.OpenPypeContext.context + + def update_context_data(self, data, changes): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + + lib.imprint(context, data) + + def get_context_data(self): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + return lib.read(context) + + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + rt.saveMaxFile(dst_path) + + +def ls(): + ... \ No newline at end of file diff --git a/openpype/hosts/max/hooks/set_paths.py b/openpype/hosts/max/hooks/set_paths.py new file mode 100644 index 0000000000..3db5306344 --- /dev/null +++ b/openpype/hosts/max/hooks/set_paths.py @@ -0,0 +1,17 @@ +from openpype.lib import PreLaunchHook + + +class SetPath(PreLaunchHook): + """Set current dir to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = ["max"] + + def execute(self): + workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + self.launch_context.kwargs["cwd"] = workdir diff --git a/openpype/hosts/3dsmax/__init__.py b/openpype/hosts/max/plugins/__init__.py similarity index 100% rename from openpype/hosts/3dsmax/__init__.py rename to openpype/hosts/max/plugins/__init__.py diff --git a/openpype/hosts/3dsmax/startup/startup.ms b/openpype/hosts/max/startup/startup.ms similarity index 100% rename from openpype/hosts/3dsmax/startup/startup.ms rename to openpype/hosts/max/startup/startup.ms diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py new file mode 100644 index 0000000000..afcbd2d132 --- /dev/null +++ b/openpype/hosts/max/startup/startup.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +from openpype.hosts.max.api import MaxHost +from openpype.pipeline import install_host + +host = MaxHost() +install_host(host) + diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index c07350ba07..c0c103ea10 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -152,7 +152,7 @@ class HostsEnumEntity(BaseEnumEntity): schema_types = ["hosts-enum"] all_host_names = [ - "3dsmax", + "max", "aftereffects", "blender", "celaction", From 2e4db127569edce09336d84fcc4835954fbe6ce6 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 30 Nov 2022 03:32:24 +0000 Subject: [PATCH 2272/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index bf9f97d5f4..9a34c85bf8 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.1" +__version__ = "3.14.8-nightly.2" From 29b9603aab1b14e54026f23965281a1fda7d53a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:26:42 +0100 Subject: [PATCH 2273/2550] change start_number if input is sequence instead of adding -ss --- openpype/plugins/publish/extract_review.py | 57 +++++++++++++--------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index f299d1c6e9..af49f7d79b 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -598,9 +598,13 @@ class ExtractReview(pyblish.api.InstancePlugin): if temp_data["input_is_sequence"]: # Set start frame of input sequence (just frame in filename) # - definition of input filepath - ffmpeg_input_args.extend([ - "-start_number", str(temp_data["first_sequence_frame"]) - ]) + # - add handle start if output should be without handles + start_number = temp_data["first_sequence_frame"] + if temp_data["without_handles"] and temp_data["handles_are_set"]: + start_number += temp_data["handle_start"] + ffmpeg_input_args.extend( + ["-start_number", str(start_number)] + ) # TODO add fps mapping `{fps: fraction}` ? # - e.g.: { @@ -609,49 +613,54 @@ class ExtractReview(pyblish.api.InstancePlugin): # "23.976": "24000/1001" # } # Add framerate to input when input is sequence - ffmpeg_input_args.append( - "-framerate {}".format(temp_data["fps"]) - ) + ffmpeg_input_args.extend([ + "-framerate", str(temp_data["fps"]) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) # - this is definition of an output - ffmpeg_output_args.append( - "-start_number {}".format(temp_data["output_frame_start"]) - ) + ffmpeg_output_args.extend([ + "-start_number", str(temp_data["output_frame_start"]) + ]) # Change output's duration and start point if should not contain # handles - start_sec = 0 if temp_data["without_handles"] and temp_data["handles_are_set"]: - # Set start time without handles - # - check if handle_start is bigger than 0 to avoid zero division - if temp_data["handle_start"] > 0: + # Add -ss (start offset in seconds) if input is not sequence + if not temp_data["input_is_sequence"]: start_sec = float(temp_data["handle_start"]) / temp_data["fps"] - ffmpeg_input_args.append("-ss {:0.10f}".format(start_sec)) + # Set start time without handles + # - Skip if start sec is 0.0 + if start_sec > 0.0: + ffmpeg_input_args.extend([ + "-ss", "{:0.10f}".format(start_sec) + ]) # Set output duration inn seconds - ffmpeg_output_args.append("-t {:0.10}".format(duration_seconds)) + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: - ffmpeg_output_args.append("-frames:v {}".format(output_frames_len)) + ffmpeg_output_args.extend([ + "-frames:v", str(output_frames_len) + ]) # Add duration of an input sequence if output is video if ( temp_data["input_is_sequence"] and not temp_data["output_is_sequence"] ): - ffmpeg_input_args.append("-to {:0.10f}".format( - duration_seconds + start_sec - )) + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) # Add video/image input path - ffmpeg_input_args.append( - "-i {}".format( - path_to_subprocess_arg(temp_data["full_input_path"]) - ) - ) + ffmpeg_input_args.extend([ + "-i", path_to_subprocess_arg(temp_data["full_input_path"]) + ]) # Add audio arguments if there are any. Skipped when output are images. if not temp_data["output_ext_is_image"] and temp_data["with_audio"]: From f128425155e0144378882cc0b71d5444d82c9f44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 30 Nov 2022 17:27:12 +0100 Subject: [PATCH 2274/2550] Update openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../deadline/plugins/publish/submit_celaction_deadline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 7913851d8a..ea44a24459 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -197,9 +197,9 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): response = requests.post(self.deadline_url, json=payload) if not response.ok: - self.log.error("Submission failed!") - self.log.error(response.status_code) - self.log.error(response.content) + self.log.error( + "Submission failed! [{}] {}".format( + response.status_code, response.content)) self.log.debug(payload) raise SystemExit(response.text) From 7a90f8f084b8f0d242564584b4df296106c997f9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 30 Nov 2022 17:33:47 +0100 Subject: [PATCH 2275/2550] celaction: shifting argparse to publish plugin --- .../publish/collect_celaction_cli_kwargs.py | 18 +++++++++++-- openpype/hosts/celaction/scripts/__init__.py | 1 - .../hosts/celaction/scripts/publish_cli.py | 27 ------------------- 3 files changed, 16 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index e552e9ba6a..bf97dd744b 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,6 @@ import pyblish.api -from openpype.hosts.celaction import scripts +import argparse +import sys from pprint import pformat @@ -10,7 +11,20 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - passing_kwargs = scripts.PASSING_KWARGS.copy() + parser = argparse.ArgumentParser(prog="celaction") + parser.add_argument("--currentFile", + help="Pass file to Context as `currentFile`") + parser.add_argument("--chunk", + help=("Render chanks on farm")) + parser.add_argument("--frameStart", + help=("Start of frame range")) + parser.add_argument("--frameEnd", + help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__ self.log.info("Storing kwargs ...") self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py index dfd9b37ae2..e69de29bb2 100644 --- a/openpype/hosts/celaction/scripts/__init__.py +++ b/openpype/hosts/celaction/scripts/__init__.py @@ -1 +0,0 @@ -PASSING_KWARGS = None diff --git a/openpype/hosts/celaction/scripts/publish_cli.py b/openpype/hosts/celaction/scripts/publish_cli.py index 586880dc4c..39d3f1a94d 100644 --- a/openpype/hosts/celaction/scripts/publish_cli.py +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -1,13 +1,11 @@ import os import sys -import argparse import pyblish.api import pyblish.util import openpype.hosts.celaction from openpype.lib import Logger -from openpype.hosts.celaction import scripts from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins @@ -20,30 +18,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -def cli(): - parser = argparse.ArgumentParser(prog="celaction_publish") - - parser.add_argument("--currentFile", - help="Pass file to Context as `currentFile`") - - parser.add_argument("--chunk", - help=("Render chanks on farm")) - - parser.add_argument("--frameStart", - help=("Start of frame range")) - - parser.add_argument("--frameEnd", - help=("End of frame range")) - - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - - scripts.PASSING_KWARGS = parser.parse_args(sys.argv[1:]).__dict__ - - def main(): # Registers pype's Global pyblish plugins install_openpype_plugins() @@ -59,6 +33,5 @@ def main(): if __name__ == "__main__": - cli() result = main() sys.exit(not bool(result)) From 63eae39de5b26d63b4312c7e57ad8bb5ad5767c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:37:14 +0100 Subject: [PATCH 2276/2550] moved few lines to make it more logical --- openpype/plugins/publish/extract_review.py | 24 +++++++++------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index af49f7d79b..61f162dfcc 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -616,6 +616,11 @@ class ExtractReview(pyblish.api.InstancePlugin): ffmpeg_input_args.extend([ "-framerate", str(temp_data["fps"]) ]) + # Add duration of an input sequence if output is video + if not temp_data["output_is_sequence"]: + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) @@ -627,6 +632,11 @@ class ExtractReview(pyblish.api.InstancePlugin): # Change output's duration and start point if should not contain # handles if temp_data["without_handles"] and temp_data["handles_are_set"]: + # Set output duration in seconds + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) + # Add -ss (start offset in seconds) if input is not sequence if not temp_data["input_is_sequence"]: start_sec = float(temp_data["handle_start"]) / temp_data["fps"] @@ -637,26 +647,12 @@ class ExtractReview(pyblish.api.InstancePlugin): "-ss", "{:0.10f}".format(start_sec) ]) - # Set output duration inn seconds - ffmpeg_output_args.extend([ - "-t", "{:0.10}".format(duration_seconds) - ]) - # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: ffmpeg_output_args.extend([ "-frames:v", str(output_frames_len) ]) - # Add duration of an input sequence if output is video - if ( - temp_data["input_is_sequence"] - and not temp_data["output_is_sequence"] - ): - ffmpeg_input_args.extend([ - "-to", "{:0.10f}".format(duration_seconds) - ]) - # Add video/image input path ffmpeg_input_args.extend([ "-i", path_to_subprocess_arg(temp_data["full_input_path"]) From 42588daab5033e5cdd7a0e9c3bbd9d626550c24d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:38:16 +0100 Subject: [PATCH 2277/2550] unify formatting --- openpype/plugins/publish/extract_review.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 61f162dfcc..9310923a9f 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -602,9 +602,9 @@ class ExtractReview(pyblish.api.InstancePlugin): start_number = temp_data["first_sequence_frame"] if temp_data["without_handles"] and temp_data["handles_are_set"]: start_number += temp_data["handle_start"] - ffmpeg_input_args.extend( - ["-start_number", str(start_number)] - ) + ffmpeg_input_args.extend([ + "-start_number", str(start_number) + ]) # TODO add fps mapping `{fps: fraction}` ? # - e.g.: { From 9a439d408bbdb1ed2b32a61b1ab63f1740a72f65 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Wed, 30 Nov 2022 17:36:51 +0000 Subject: [PATCH 2278/2550] Improvements - account for time attribute reconnection. - simpler data collection --- openpype/hosts/maya/api/plugin.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 985cddaa08..66b525bad1 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -226,15 +226,13 @@ class ReferenceLoader(Loader): if alembic_nodes: for attr in alembic_attrs: node_attr = "{}.{}".format(alembic_nodes[0], attr) - connections = cmds.listConnections(node_attr, plugs=True) + inputs = cmds.listConnections( + node_attr, plugs=True, destination=False + ) data = { - "connected": False, - "attribute": None, + "input": None if inputs is None else inputs[0], "value": cmds.getAttr(node_attr) } - if connections: - data["connected"] = True - data["attribute"] = connections[0] alembic_data[attr] = data else: @@ -275,11 +273,16 @@ class ReferenceLoader(Loader): if alembic_nodes: for attr, data in alembic_data.items(): node_attr = "{}.{}".format(alembic_nodes[0], attr) - if data["connected"]: + if data["input"]: cmds.connectAttr( - data["attribute"], node_attr, force=True + data["input"], node_attr, force=True ) else: + inputs = cmds.listConnections( + node_attr, plugs=True, destination=False + ) + if inputs: + cmds.disconnectAttr(inputs[0], node_attr) cmds.setAttr(node_attr, data["value"]) # Fix PLN-40 for older containers created with Avalon that had the From 7544771744427522841e580ac1cee4945b6d07d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 13:29:18 +0100 Subject: [PATCH 2279/2550] replace reset with configure locations --- .../modules/ftrack/plugins/publish/integrate_ftrack_api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 231bd8e81e..0e8209866f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -46,8 +46,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): return session = context.data["ftrackSession"] - # Reset session and reconfigure locations - session.reset() + # Reset session operations and reconfigure locations + session.recorded_operations.clear() + session._configure_locations() try: self.integrate_to_ftrack( From 4acbb9fa1823aec8adcec1e25e69c48a49a51979 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 1 Dec 2022 14:19:30 +0100 Subject: [PATCH 2280/2550] general: integrate skips transfere files in src == dst --- openpype/plugins/publish/integrate.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788..7e4fc84658 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -291,6 +291,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: + if src == dst: + continue + # todo: add support for hardlink transfers file_transactions.add(src, dst) From 15fa6f6f18b00659351c133b5db0bf342c5a0035 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 14:53:00 +0100 Subject: [PATCH 2281/2550] fix occational double parents issue --- openpype/modules/ftrack/lib/avalon_sync.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 935d1e85c9..0341c25717 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -1556,7 +1556,7 @@ class SyncEntitiesFactory: deleted_entities.append(mongo_id) av_ent = self.avalon_ents_by_id[mongo_id] - av_ent_path_items = [p for p in av_ent["data"]["parents"]] + av_ent_path_items = list(av_ent["data"]["parents"]) av_ent_path_items.append(av_ent["name"]) self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items))) @@ -1855,7 +1855,7 @@ class SyncEntitiesFactory: _vis_par = _avalon_ent["data"]["visualParent"] _name = _avalon_ent["name"] if _name in self.all_ftrack_names: - av_ent_path_items = _avalon_ent["data"]["parents"] + av_ent_path_items = list(_avalon_ent["data"]["parents"]) av_ent_path_items.append(_name) av_ent_path = "/".join(av_ent_path_items) # TODO report @@ -1997,7 +1997,7 @@ class SyncEntitiesFactory: {"_id": mongo_id}, item )) - av_ent_path_items = item["data"]["parents"] + av_ent_path_items = list(item["data"]["parents"]) av_ent_path_items.append(item["name"]) av_ent_path = "/".join(av_ent_path_items) self.log.debug( @@ -2110,6 +2110,7 @@ class SyncEntitiesFactory: entity_dict = self.entities_dict[ftrack_id] + final_parents = entity_dict["final_entity"]["data"]["parents"] if archived_by_id: # if is changeable then unarchive (nothing to check here) if self.changeability_by_mongo_id[mongo_id]: @@ -2123,10 +2124,8 @@ class SyncEntitiesFactory: archived_name = archived_by_id["name"] if ( - archived_name != entity_dict["name"] or - archived_parents != entity_dict["final_entity"]["data"][ - "parents" - ] + archived_name != entity_dict["name"] + or archived_parents != final_parents ): return None @@ -2136,11 +2135,7 @@ class SyncEntitiesFactory: for archived in archived_by_name: mongo_id = str(archived["_id"]) archived_parents = archived.get("data", {}).get("parents") - if ( - archived_parents == entity_dict["final_entity"]["data"][ - "parents" - ] - ): + if archived_parents == final_parents: return mongo_id # Secondly try to find more close to current ftrack entity @@ -2350,8 +2345,7 @@ class SyncEntitiesFactory: continue changed = True - parents = [par for par in _parents] - hierarchy = "/".join(parents) + parents = list(_parents) self.entities_dict[ftrack_id][ "final_entity"]["data"]["parents"] = parents From aa6425cbf1cabc6b988b9008ac7a36151a7fed77 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 17:56:13 +0100 Subject: [PATCH 2282/2550] don't create qapplication if already exists --- openpype/tools/settings/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/settings/__init__.py b/openpype/tools/settings/__init__.py index 3e77a8348a..67e509f116 100644 --- a/openpype/tools/settings/__init__.py +++ b/openpype/tools/settings/__init__.py @@ -24,7 +24,9 @@ def main(user_role=None): user_role, ", ".join(allowed_roles) )) - app = QtWidgets.QApplication(sys.argv) + app = QtWidgets.QApplication.instance() + if not app: + app = QtWidgets.QApplication(sys.argv) app.setWindowIcon(QtGui.QIcon(style.app_icon_path())) widget = MainWidget(user_role) From b5a5c72d896eae245adcf43e9a09c2e1f031ac44 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 10:03:56 +0000 Subject: [PATCH 2283/2550] Comments to resolve. --- openpype/hosts/maya/api/mtoa.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py index 6b9b1d6d44..d19fecf6b5 100644 --- a/openpype/hosts/maya/api/mtoa.py +++ b/openpype/hosts/maya/api/mtoa.py @@ -56,7 +56,8 @@ class _AssExtractor(publish.Extractor): return mask def process(self, instance): - + #What is a dry run? + #ass.rr seems like an abstract variable. Needs clarification. dry_run = instance.data.get("ass.rr") staging_dir = self.staging_dir(instance) @@ -92,6 +93,7 @@ class _AssExtractor(publish.Extractor): else: mask = 44 + #get/set should be plugin options. # Generic options if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), False): @@ -108,6 +110,7 @@ class _AssExtractor(publish.Extractor): keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) + #Targets should already be collected targets = self.get_targets(instance) _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) @@ -116,6 +119,8 @@ class _AssExtractor(publish.Extractor): if not dry_run: self.log.debug("Running command: cmds.arnoldExportAss({})" .format(", ".join(_sorted_kwargs))) + #There should be a context for not updating the viewport from + #pointcache extraction. with vp2_paused_context(): with selection(targets): with self.motion_blur_ctx(mb, keys, length): @@ -131,11 +136,14 @@ class _AssExtractor(publish.Extractor): range_ = range(int(start), int(end) + 1) for i in range_: + #padding amount should be configurable. 3 does not seems + #enough as default. fp = "{}.{:03d}.ass".format(export_path.name, i) with open(fp, "w"): pass result.append(fp) + #Whether its a sequence or not, should already have been determined. if len(result) == 1: filepath = result[0] else: From 11a2ce396b9afec2036668cd69cd1658efee004c Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Fri, 2 Dec 2022 10:28:14 +0000 Subject: [PATCH 2284/2550] Consistent Python version --- website/docs/dev_requirements.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index 1c8958d1c0..fa2d996e20 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -55,7 +55,7 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li ## Python -**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). +**Python 3.7.9** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. From 73393a75b7e33c5dca88dacc4d8f05634da034f3 Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Fri, 2 Dec 2022 10:47:06 +0000 Subject: [PATCH 2285/2550] Note about unrestricted execution on Windows. --- website/docs/dev_build.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index 4e80f6e19d..9c99b26f1e 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -51,7 +51,9 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v #### Run from source -For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. +For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. To run the powershell scripts you may have to enable unrestricted execution as administrator: + +`Set-ExecutionPolicy -ExecutionPolicy unrestricted` To start OpenPype from source you need to From 103fd66282217ede3a69c04d9e251e23a6a4dbbd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 2 Dec 2022 19:53:54 +0800 Subject: [PATCH 2286/2550] layout publish more than one container issue --- .../maya/plugins/publish/extract_layout.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index f77835d47f..a11652feb3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -39,15 +39,17 @@ class ExtractLayout(publish.Extractor): assert len(container_list) == 1, \ "Please create instance with loaded asset" containers = cmds.sets(project_container, query=True) - load_asset = asset.split(':')[0] + # list the children of the containers + ass_transform = cmds.listRelatives(containers, allParents=True) + ass = cmds.listRelatives(asset, children=True, type="transform") + # compare the group of the asset with + # the children list of the container + # to find the content which is not loaded from the loader + for a in ass: + if a not in ass_transform: + assert containers == [], \ + "no container found in {}".format(a) for con in containers: - ass_transform = cmds.listRelatives(con, allParents=True)[0] - if load_asset not in ass_transform: - assert containers == [], \ - "No container found for {}".format(asset) - if "_CON" not in con: - assert containers == [], \ - "Container missing for {}".format(asset) container = con representation_id = cmds.getAttr( From a465315f034d9e297a27bdda9a9f37a52b21bc97 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 12:08:59 +0000 Subject: [PATCH 2287/2550] Add optional keyword to suspend_refresh. --- openpype/hosts/maya/api/lib.py | 8 ++++---- .../plugins/publish/extract_pointcache.py | 20 +++++++------------ 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2530021eba..b2bbb823aa 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -127,14 +127,14 @@ def get_main_window(): @contextlib.contextmanager -def suspended_refresh(): +def suspended_refresh(suspend=True): """Suspend viewport refreshes""" - + original_state = cmds.refresh(query=True, suspend=True) try: - cmds.refresh(suspend=True) + cmds.refresh(suspend=suspend) yield finally: - cmds.refresh(suspend=False) + cmds.refresh(suspend=original_state) @contextlib.contextmanager diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 5f5532e60a..23b76a48c2 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,21 +86,15 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - if instance.data.get("refresh", False): + with suspended_refresh(suspend=instance.data.get("refresh", False)): with maintained_selection(): cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) - else: - with suspended_refresh(): - with maintained_selection(): - cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) + extract_alembic( + file=path, + startFrame=start, + endFrame=end, + **options + ) if "representations" not in instance.data: instance.data["representations"] = [] From b14a0718d274522fe269a10237e628f6110437af Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 14:15:59 +0100 Subject: [PATCH 2288/2550] update change log and history for release --- CHANGELOG.md | 22 ++++++++++++++++++++++ HISTORY.md | 20 ++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c5f2cf8b5..3cca692b68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) + + ## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) diff --git a/HISTORY.md b/HISTORY.md index 04a1073c07..f4e132488b 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,25 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) ## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) From 6078a5746feb875c53d6d37274a52097b447bcd0 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:26:03 +0000 Subject: [PATCH 2289/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 9a34c85bf8..6903ab4d10 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.2" +__version__ = "3.14.8-nightly.3" From 2195cefe8a76e42b93eab2cf5195e9cb30ca79bd Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:32:51 +0000 Subject: [PATCH 2290/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 6903ab4d10..b27b98e2fa 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.3" +__version__ = "3.14.8-nightly.4" From 42984c54667ea1f27229368cbc5fc00e425a5575 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:34:53 +0000 Subject: [PATCH 2291/2550] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index b27b98e2fa..fc687a1263 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.4" +__version__ = "3.14.8" From 412d03d382aef31769f3d6f61e6ec70fa53fda6d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 15:52:04 +0100 Subject: [PATCH 2292/2550] Merge fixes for tests from branch with Deadline tests. Branch with newly implemented DL tests is targetted to release 3.15, but changes from it affects tests in develop. This branch should solve issues with automatic tests in develop without need of full release 3.15 changes. --- tests/README.md | 10 +++ tests/conftest.py | 12 +++ tests/integration/hosts/aftereffects/lib.py | 22 ++++-- .../test_publish_in_aftereffects.py | 34 ++++++--- ...test_publish_in_aftereffects_multiframe.py | 36 ++++++--- tests/integration/hosts/maya/lib.py | 19 ++++- .../hosts/maya/test_publish_in_maya.py | 57 ++++++++------ tests/integration/hosts/nuke/lib.py | 31 ++++++-- .../hosts/nuke/test_publish_in_nuke.py | 25 +++++-- tests/integration/hosts/photoshop/lib.py | 11 ++- .../photoshop/test_publish_in_photoshop.py | 6 +- tests/lib/db_handler.py | 23 +++--- tests/lib/testing_classes.py | 70 ++++++++++++++---- tests/resources/test_data.zip | Bin 7350 -> 5098 bytes tests/unit/igniter/test_bootstrap_repos.py | 30 ++++---- 15 files changed, 275 insertions(+), 111 deletions(-) diff --git a/tests/README.md b/tests/README.md index 69828cdbc2..d36b6534f8 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,5 +1,15 @@ Automatic tests for OpenPype ============================ + +Requirements: +============ +Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path. + +You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/ + +You can test that `mongorestore` is available by running this in console, or cmd: +```mongorestore --version``` + Structure: - integration - end to end tests, slow (see README.md in the integration folder for more info) - openpype/modules/MODULE_NAME - structure follow directory structure in code base diff --git a/tests/conftest.py b/tests/conftest.py index aa850be1a6..7b58b0314d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,3 +43,15 @@ def app_variant(request): @pytest.fixture(scope="module") def timeout(request): return request.config.getoption("--timeout") + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item, call): + # execute all other hooks to obtain the report object + outcome = yield + rep = outcome.get_result() + + # set a report attribute for each phase of a call, which can + # be "setup", "call", "teardown" + + setattr(item, "rep_" + rep.when, rep) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 9fffc6073d..ffad33d13c 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class AfterEffectsTestClass(HostFixtures): +class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,15 +18,15 @@ class AfterEffectsTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.aep") - dest_folder = os.path.join(download_test_data, + "test_project_test_asset_test_task_v001.aep") + dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, "work", self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.aep") + "test_project_test_asset_test_task_v001.aep") shutil.copy(src_path, dest_path) yield dest_path @@ -32,3 +35,12 @@ class AfterEffectsTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + # skip folder that contain "Logs", these come only from Deadline + return ["Logs", "Auto-Save"] + + +class AELocalPublishTestClass(AEHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 4925cbd2d7..57d5a3e3f1 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects Uses generic TestCase to prepare fixtures for test data, testing DBs, @@ -32,10 +32,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -49,27 +49,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index c882e0f9b2..2d95eada99 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -1,15 +1,15 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects - Should publish 5 frames + Should publish 10 frames """ PERSIST = True @@ -19,10 +19,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -36,27 +36,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "h264_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index f3a438c065..ab402f36e0 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class MayaTestClass(HostFixtures): +class MayaHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,7 +18,7 @@ class MayaTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -23,7 +26,7 @@ class MayaTestClass(HostFixtures): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") shutil.copy(src_path, dest_path) yield dest_path @@ -39,3 +42,11 @@ class MayaTestClass(HostFixtures): "{}{}{}".format(startup_path, os.pathsep, original_pythonpath)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + + +class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 68b0564428..b7ee228aae 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,7 +1,8 @@ -from tests.integration.hosts.maya.lib import MayaTestClass +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass -class TestPublishInMaya(MayaTestClass): +class TestPublishInMaya(MayaLocalPublishTestClass): """Basic test case for publishing in Maya Shouldnt be running standalone only via 'runtests' pype command! (??) @@ -28,7 +29,7 @@ class TestPublishInMaya(MayaTestClass): ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") ] - APP = "maya" + APP_GROUP = "maya" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" @@ -37,33 +38,41 @@ class TestPublishInMaya(MayaTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) - assert 11 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" + failures.append(DBAssert.count_of_types(dbcon, "representation", 5)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index d3c3d7ba81..baff675da7 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -1,17 +1,20 @@ import os import pytest -import shutil +import re -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class NukeTestClass(HostFixtures): +class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk" + source_file_name = "test_project_test_asset_test_task_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", @@ -27,7 +30,16 @@ class NukeTestClass(HostFixtures): dest_path = os.path.join(dest_folder, source_file_name) - shutil.copy(src_path, dest_path) + # rewrite old root with temporary file + # TODO - using only C:/projects seems wrong - but where to get root ? + replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE) + with open(src_path, "r") as fp: + updated = fp.read() + updated = replace_pattern.sub(output_folder_url.replace("\\", '/'), + updated) + + with open(dest_path, "w") as fp: + fp.write(updated) yield dest_path @@ -41,4 +53,11 @@ class NukeTestClass(HostFixtures): monkeypatch_session.setenv("NUKE_PATH", "{}{}{}".format(startup_path, os.pathsep, - original_nuke_path)) \ No newline at end of file + original_nuke_path)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + +class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 884160e0b5..f84f13fa20 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,17 +1,25 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.nuke.lib import NukeTestClass +from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass log = logging.getLogger("test_publish_in_nuke") -class TestPublishInNuke(NukeTestClass): +class TestPublishInNuke(NukeLocalPublishTestClass): """Basic test case for publishing in Nuke Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects/test_project/test_asset/test_task`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were @@ -20,7 +28,8 @@ class TestPublishInNuke(NukeTestClass): How to run: (in cmd with activated {OPENPYPE_ROOT}/.venv) - {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501 + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 To check log/errors from launched app's publish process keep PERSIST to True and check `test_openpype.logs` collection. @@ -30,14 +39,14 @@ class TestPublishInNuke(NukeTestClass): ("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "") ] - APP = "nuke" + APP_GROUP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 50 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - PERSIST = True # True - keep test_db, test_openpype, outputted test files + PERSIST = False # True - keep test_db, test_openpype, outputted test files TEST_DATA_FOLDER = None def test_db_asserts(self, dbcon, publish_finished): @@ -52,7 +61,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -61,7 +70,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 16ef2d3ae6..9d51a11c06 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest +) -class PhotoshopTestClass(HostFixtures): +class PhotoshopTestClass(HostFixtures, PublishTest): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -32,3 +35,7 @@ class PhotoshopTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 5387bbe51e..4aaf43234d 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass): ("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "") ] - APP = "photoshop" + APP_GROUP = "photoshop" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -72,7 +72,7 @@ class TestPublishInPhotoshop(PhotoshopTestClass): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 8)) + DBAssert.count_of_types(dbcon, "representation", 6)) additional_args = {"context.subset": "imageMainForeground", "context.ext": "png"} diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b181055012..82e741cc3b 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,9 +118,8 @@ class DBHandler: "Run with overwrite=True") else: if collection: - coll = self.client[db_name_out].get(collection) - if coll: - coll.drop() + if collection in self.client[db_name_out].list_collection_names(): # noqa + self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) @@ -133,7 +132,11 @@ class DBHandler: db_name=db_name, db_name_out=db_name_out, collection=collection) print("mongorestore query:: {}".format(query)) - subprocess.run(query) + try: + subprocess.run(query) + except FileNotFoundError: + raise RuntimeError("'mongorestore' utility must be on path." + "Please install it.") def teardown(self, db_name): """Drops 'db_name' if exists.""" @@ -231,13 +234,15 @@ class DBHandler: # Examples # handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") -# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa + +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa +# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") -# handler.setup_from_sql("test_db", "c:\\projects\\sql", +# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql", # collection="test_project", # drop=False, mode="upsert") diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 78a9f81095..5e3b11cfc9 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,9 +8,12 @@ import tempfile import shutil import glob import platform +import requests +import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler +from openpype.modules import ModulesManager class BaseTest: @@ -36,9 +39,9 @@ class ModuleUnitTest(BaseTest): PERSIST = False # True to not purge temporary folder nor test DB TEST_OPENPYPE_MONGO = "mongodb://localhost:27017" - TEST_DB_NAME = "test_db" + TEST_DB_NAME = "avalon_tests" TEST_PROJECT_NAME = "test_project" - TEST_OPENPYPE_NAME = "test_openpype" + TEST_OPENPYPE_NAME = "openpype_tests" TEST_FILES = [] @@ -57,7 +60,7 @@ class ModuleUnitTest(BaseTest): m.undo() @pytest.fixture(scope="module") - def download_test_data(self, test_data_folder, persist=False): + def download_test_data(self, test_data_folder, persist, request): test_data_folder = test_data_folder or self.TEST_DATA_FOLDER if test_data_folder: print("Using existing folder {}".format(test_data_folder)) @@ -78,7 +81,8 @@ class ModuleUnitTest(BaseTest): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST + persist = (persist or self.PERSIST or + self.is_test_failed(request)) if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -125,7 +129,8 @@ class ModuleUnitTest(BaseTest): monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data) @pytest.fixture(scope="module") - def db_setup(self, download_test_data, env_var, monkeypatch_session): + def db_setup(self, download_test_data, env_var, monkeypatch_session, + request): """Restore prepared MongoDB dumps into selected DB.""" backup_dir = os.path.join(download_test_data, "input", "dumps") @@ -135,13 +140,14 @@ class ModuleUnitTest(BaseTest): overwrite=True, db_name_out=self.TEST_DB_NAME) - db_handler.setup_from_dump("openpype", backup_dir, + db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir, overwrite=True, db_name_out=self.TEST_OPENPYPE_NAME) yield db_handler - if not self.PERSIST: + persist = self.PERSIST or self.is_test_failed(request) + if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -166,6 +172,13 @@ class ModuleUnitTest(BaseTest): mongo_client = OpenPypeMongoConnection.get_mongo_client() yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"] + def is_test_failed(self, request): + # if request.node doesn't have rep_call, something failed + try: + return request.node.rep_call.failed + except AttributeError: + return True + class PublishTest(ModuleUnitTest): """Test class for publishing in hosts. @@ -188,7 +201,7 @@ class PublishTest(ModuleUnitTest): TODO: implement test on file size, file content """ - APP = "" + APP_GROUP = "" TIMEOUT = 120 # publish timeout @@ -210,10 +223,10 @@ class PublishTest(ModuleUnitTest): if not app_variant: variant = ( application_manager.find_latest_available_variant_for_group( - self.APP)) + self.APP_GROUP)) app_variant = variant.name - yield "{}/{}".format(self.APP, app_variant) + yield "{}/{}".format(self.APP_GROUP, app_variant) @pytest.fixture(scope="module") def output_folder_url(self, download_test_data): @@ -310,7 +323,8 @@ class PublishTest(ModuleUnitTest): yield True def test_folder_structure_same(self, dbcon, publish_finished, - download_test_data, output_folder_url): + download_test_data, output_folder_url, + skip_compare_folders): """Check if expected and published subfolders contain same files. Compares only presence, not size nor content! @@ -328,12 +342,33 @@ class PublishTest(ModuleUnitTest): glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format( - "\n".join(sorted(not_matched))) + filtered_published = self._filter_files(published, + skip_compare_folders) + + # filter out temp files also in expected + # could be polluted by accident by copying 'output' to zip file + filtered_expected = self._filter_files(expected, skip_compare_folders) + + not_mtched = filtered_expected.symmetric_difference(filtered_published) + if not_mtched: + raise AssertionError("Missing {} files".format( + "\n".join(sorted(not_mtched)))) + + def _filter_files(self, source_files, skip_compare_folders): + """Filter list of files according to regex pattern.""" + filtered = set() + for file_path in source_files: + if skip_compare_folders: + if not any([re.search(val, file_path) + for val in skip_compare_folders]): + filtered.add(file_path) + else: + filtered.add(file_path) + + return filtered -class HostFixtures(PublishTest): +class HostFixtures(): """Host specific fixtures. Should be implemented once per host.""" @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): @@ -344,3 +379,8 @@ class HostFixtures(PublishTest): def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" raise NotImplementedError + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + """Use list of regexs to filter out published folders from comparing""" + raise NotImplementedError diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index 0faab86b37d5c7d1224e8a92cca766ed80536718..e22b9acdbdbcd7312776e33918ac2a6a9211dab3 100644 GIT binary patch delta 3519 zcmaJ@2{=^iA0A|BtYb!D2xCNpu{L(enk)$;^`FuhnnK9lwM?=ljAbyiA|y-5(nu5& zl58c)E)0qxZOHPUp}N<-_qpGB&U3zVp7;Ho_xsNGp6{*J`XHNxt-uPkv+%I(fB;wl z06>j{BRUz6*l{r$hfYpY1~IRU$G@92Nq|E{4jX{=@bYx>bM^J}RkE?zB6Dq$ue@4( zY0C)!T!jDt>P#?U8<{iyXRI|*ryh6`hs_fO#nlge8!HvVqqXQ*32TX zI}#Eups@}}3MwtEeHc7gEyia1jDX4iP#*ZH!e&tTWKi9w$yXrUA|yn*!t(*@Fz?bxCih3_1kQRSZYdZ#IXIi{+P|_K*{XTLV`b8=^DbfJ1mcWt z%~{*PoF8lBRsQV>hgIcM2gjUs%e@~(xbxo&H1?2P=m1?Jq7f=mWX=W9!$+<;J8!E7 z?P)6@!ag1Z?MSgYPpbV-hN#0#w=g8ckeDc6wR=KU)lORUYoEQi3xR@SxYtm zZN>2#S43Valot{T76Cf8M?Y}vAS16Txb1Fd*7*>`Lc=KCDy5^jxMU5_kglJC&nG*D?72{$?u|2b#VG261bjOG$`QUx$4glo@FLh+7s{l%x2OXMD2V&wdJM?CLv` zojPuWcPh*dpC|k=+h05ej*|4_H~M%pV}?P)3={ z1b}Womty924Q4MgCOE9v7_)O&aHjbTfog1-B0<#pGEjW8Nwo-8^2%A{V!#pnAQoH7 z;gGK2y==z?uF`-e{ILX05YrCJs}7Dl5;qZhufMp(h?9`2)Q2h^OvP01s(+69OV%sIwAB_~voy`c6^98w6cRMUWe2%GAmu8y> zy{8;?5tWsD(1J(3etfji**Gf{eQzy8VoC5G>|lcxAt?r?(G;{=hEuQqUssskuIdY% z);zkA?dV>Q!;LgK@3?^sB<7BWE@z!oIWZ^a;U~Fh4CK+O|I#4vp_TWTM_vjIn-0gl z@*|J=_7#|X(CivE^{_FegkJk1IqRKGvpaS}cnRt`(lO%4HZSQb{yO8SjOB5$*QUA7 z^0JK_1ewpU+c)QKI$<0^PRP%taXxw?T|V%;KN@+5UR@JIzEoSi>luES>ylHRkRLl; z@Z+$m%E5YrJ6W-UjDq`7o*I z)@Y({0mied;||Y@V{fb);bxY{ZygBK4P3>ElI7W*PW;%YpqI_Ye1_Xt8>!xJP*^Ry z8@aD(dgc_3bF2>>s=M|mg}rv;1+g>{TE!^Of!m z1cBwUEMM@D`l@8r*|4KrfXB4~Z^;)A%(o5R@6mC&r2`D9h(5jl_Ea4ZVe;Zs6mHo% z%KhCJ3_A8mVdZjWrx+Kh+@-JkYM(#MIn3mdS2_8x`SzU~P-NAFSB@V};G6jPFwW@=kCWwlli?BIpot@>Tnz z`XA$}?p}5E7jKIXm(ZAAnw0J7{!S`D<)&uw?0ax8zrUNR{mQ~l+`J#@NkoGfq9`_RRCw?U^J2mL-@B7?^mDHggLb0Yku*QTchMMwD80Rb6F&!=)Ngu%GZICYpTjx82Rg#$`x zl!Qc%)mma6Tsl$a!=`cSX-0HooK&^?zSHWFwHD*ACMPyj+4VL5u#2o0dYr}0C#3{F zPfp6}&Q{f(sfg*WeS)-sa-GRWi3%T`=h2#nm{Ai_jY^Ac^sE*KFWlqswLJ{7x4>`1>D7p3&wfxta;FpRa=S<59i`Z~6FKxn#btfk&>*mgd~G`3 zz^R|p;P{TinVMhk*zjTqb;Fnu0mS$^jo7j>0nIfB6kzI+=|ztUP$Zvqt%E%_4uAeD{9}7x^uKDZHA8VKv#!`PuHk zE!5Tv=9nNTDFQ0IwTf;-MOp3nNVQPue|&x6ZPjD=NbAtO{{U^5P{&8o7ZQiCb1 z8Ao(24(GQxu%{!~)D>*@3xMhx0Qeig%HGk<^_OVuYti^OM4P$UIlBB048`AK92~*+ zj$V%7Kf%ZV~5Z-Q;;l_Z{v#(ni-;f;D+uA>_3_4;u= zI>y9!s)`ru+DgUns)GsnWtapLep49acxd0{2Wk0TWj4*{imJI2Uk69VWvnWFZM?j| ze%qMx8u;{#E>lYWvOov!cI4*YZd;S;@PlY*>`z9zUMyVrD>_u zk;FT^8hqleVs%_zQL(~>Hsp7;rFNAStI;$Q-g4g}F+&Z$Wt|dOA(V#f@d()?^JueI z2U)cFn;&KHmxB*NZdS^iDfzlzBFgn{(~=Rb+ClY_syq@ zw+DCa2k9DL|6G@sbzJI8MOF;0-xKJ?`7i6(4(w`tvBcS|p#=?&>BoG?0hIm$UQ{tb zO$2FgwnIdX(AP~lAbGT67z!sIx^qt}npX#!QxNn-tt?~<2x6Od?}-azkmC>@k(f4A zwcy+(%^;wIk0qB&dBOuG65f_auz@AZ`Y}+I@lXpWVT1wF4do6C+Jqf~BzK)5wNP1B zPwySh7<&=C2p(?rR%|^hkkGAEQqx5JfUgm!$MJ@rogpqsstbr6`<5_qzpKBzqwWlO z6Y2Mn3wYKS?n6iH0A0l_|2%RRu&b+;{eM?+Mx3%jrx;L%8K)~S4NK1(*@JDCX{|kU zzNO?jbBg;{dRI+EXPhq|I;#p+E8K?ToZ#i{Qgb$iMNTqN1L3L>cMP)}>?vltdgr?O zmAW&HF#%i=FJ+B2Nd$sMVzE!qlSES6Dgpj1rF7QGj>&NmaAVOJt>h+pP733Wt+X)@ zk-JY#+)*|CTXwTGHu+J!QQteImc()L@Y_7F6O8yFh@RlYyLSjwXp4;uI(z1>gV8{#P(K>s0e!7z(GEu z7O8qsi2wP_XUyMI=k**{#TS~gH0X8b|47~Mg)<-yEgamCg+?X4mq3NQV?_I=Ku-9t z(~yR;Gd4N@39nxds4$ z>*7KFx5K$qHj`g;K*MlVt$nVv_X$)3cDVrn_J73w!QtraU=3BgKjdKhfxa#2eY_UI zMjkuV_tg*uMbsVDhzxIG?#-hzzCKl);>U0*CfM}iyrUz zfzt#u3p-niNlru03{b6<2 ze|47I|I9RG%Z5L+ai3iuct==wY(TroCgcHI{)4@l!$y z?7%j};EwBS$(9Xc3 zugDm^S_!1PFaDxQBC~61!1HXOu(;I9S)P$mOkJIaSv&vb1E%CaEvg~u>TU$PJ2WbJ z;bg%L?s6;%sxqDQ^N$N_CBQ=y$x5i`qf17efjb+MLH62VC2w(G1nJ4g8OP_T-)FCvpK0!;0$R3atfiSKInhJW>?c1L4GigStKEX#py?w z3EQ8S%J?KQ54C9-QZ}9+ru+d`bU9^XsnK#n_f_L8msnT8C)-A=$*KX`RY~RHFzJj} z)vY0|(uh0UvT)_90Lg`9kGL=$g#}wLS8B9no91|Dm9W}4z|bIVOe!$*A}FuZ+bl3iO(Z4 zH+j0GKb>D}!Aid;>`aEhck0z8K-76bK&8WPj!!qY=4JBTVd zxu8?YW@7igM%t@QPHCUU)|Zg5W(oa?;leTPd>eyY@i&fkp+bm1q+4P&R@&<{D3QD$ zp2LjyyNvaGzMCY?yh#YdcW@nChc~P$mkzc_)r(R=?N4rA zit{0De1m-w!e&{~Nm*)X4jAB&yRVN3cJ7S$-dzG*?IZ*-=PHJYPQM$#hpV$*jOE)d zg99xSvCL1=$SEzQE<{Ba<{GG14P$;n|Mmvk$12EyPk#pG#WOI$Tp$;V2DZ_1E+UJ+ zggL3EmM$xTe<{9Xw<^7sgM@Wu!FrdQ`-~zw13h+Rw@SpkvS+!10@j&&&)cSv;Wmge zGHI}P8lF+|H`CctcE?(Bxdyj31l+k)i;uHonPe1eIvOk0=H4B3TB9|znjax$vX!xSMi@b_C_J~>WmEWUV6y%-V`pG{#)xe=lmcPcUy~>u5LKm z`0GU!bvy|jNHMYJO?K>?uAxulI5AzzPhzk5O==WuwYRp76(YPv~iCu zjVLUI>7d=%^ql7^ZDTaTK-*wo@#d}_2}D(TA*w2pnX`h$_OwiUJZF@GU z%*u1sPNHdxS9k?&_RxXK-!O=DmffOOF|?6yP2ba_OMlE)OhSUuHa8yykxPK&LfklT z@g+&R=_hV07~{EzahwIeSK^x7zN6?ur1!olgz6*&kX|y#h{{- zL3klV!X+3NyDFYZJQgnBmQ+-L5&gCFatLUtYxcuv+5O{$Pd&h9UQj)H&Adqr3{Iu- z#s?rt+cpZjs%WHiTYEwVUm2%iNxJ2f27r|B$Hy_pvu#RHq=6EYef9px)-zrx$^$x#(auVSGjhrUzcAC4bWD9}#R zP524BC4d5j_T$=t;;mP8iA8Gq8e`6O4ZaXLP?cnFMQXAHTn6sEY$V~%TdfH7DpeEm<2N@|@f(b$&Cp%iLu$%A}KGw9GD~eJ)_-Zse z!qyc@IRFl|sKg~o|HAmGlckX&G3V<@8&}NKcOi4HWc-~ z>E2nB#knaP-=d~0;wxBt=t2>gPqq3WM;eQT-Po%7OVM(j9Q*G(%+z9@yi~93`bwKc z6CeZoaNn671Bt zUO0D=u1}u_OUc(Nwu~Bj{CMIy$G>JCpCPV=Tg>J!Wt5*2m6VBB`g)0(6IV~dv$)Ha zCgwAbTI(>X7O3Yj)(E)BvZ^NaSu4{b)XfEXRfxl`@RLd-D8l<^lkj>)eNj9N+ex7K zzIflmVHJK{Z%{1x9hppW+S?sz{KRcIv(qdw{GB4!+i0d_fh=gGIxAeU%U$|mECr~t z$vz)@QOxS=^v#007JIn#S(Eyf1&dH=HNe{1rxv7IEgAP(TB-*Rl0KoiE57R zyutJbBhN~jTv_@QYr_Twu9*lmG#iT0gjED=_ z=g=eL{pN_b^qekuROR(uC5yBn^d4R!8=3?qa8A)UZl2bZd9v51CzUMJ7J;27m=EFo zySLpQi3UcHg+{>r@Bqc3mx~8-4#MO?%Yd@(+1rODaBAHsnF2*b zAoUfP(>!B!*@BStadXG7YZP%pLFo*_Y}h^Lf$;J5eefkCinlkUN}p@>k8*nysjH8z zPgn3Sb(Wj*FNt0qcP`%XjWOTP`9t6Bv!K`A|L-0D_Y+T)IA%K~F2e4?Vmv}Mgtj?| zgx!dEq@7+|mxGlMV_|2c&h^xsj=jT6(qWo&mdk*UOGTuV+m3*OGh5u)b9fclmmkGMsN!zmT!^fS*ro`a?QBb>S*Lx&RB>`r508eC3%T1E@oY)t%+l!XH7yC1C&n+W!UnFa8gUpm=3j z4FF5~191O!U=Luz(=b5PBY>6}A#VdXF(2!xvHyUUtiZdQLzW^y)I_tROP^GdG=nr) z|I;^8KmE!zI22QD?8WuX4GLmdHG5klqd>Op#So63E|f+r81e50Wp8gIhk{^u#(;bc z0-+7Ttde|PQy+&0aafWJV^i;gbYhD-`!&%3f1d!n8%A>BZTxxGgL-_|Oh&>RATF-P z&aQ5bzo00up>E2whl8_?xs~m&1|v@-mHLcuU2G0C%l%M$;&;PTO!a+H)5y9}-;i(2QZ6$( zyjpBe)!FHi4r3}lF{=b%j2}WqthNI0*0pYfRS_mb==kASfeM7p&jgXyZZ`(p2KVLT z3sR<8Xx0}WXT4%y#*FA1UD-VL;cwZgP3t0K(c0XY*#Jw3lpFMiG~0^^mAel`f-{$= zek4{N!n8@3-fMq1gDJr*6cH>l+?P(j)jN#?M^f$$zc4Eq|3G%vi%>X_3IwzFq$5l_ z?16Z#_`I@JM#Q<6Jz_zqmI}F+ z=D@|lub&GXyKS9JXrG&V4aQ`kk;Z5#9+C?A&iZi@_@>tv2->Ld-abltl>hNdJJQoY zSM#+6*$O)O;;_U!?YBm=&-J}*)V{QOG3k)Z4-q};HdqS}a8Pqe+EX{oOh7Ud*S$^O zT*&I)>0R{9(zIJ}3DQgBYsVA2hpXRk$ABMoR7=7pH+eqS_{WGLLMgbv&Z&!$KrQnd z97quc78mgMZaK7wpzXrmzG%OC=U2F!-1E!$3*7JC^Dkcd6~Rqz`W1oBZx8Jx@f5-aUc7L7zy5V?15BP(=0sbH9f3qL2Ghg@PE_g3HeuVJv zng6Eqf7Oc%kL;W@z+c4lDib%!=>^SYXLtV>&9CBmHU7U&)P=5Hwu_$C z v2 - v3 = OpenPypeVersion(1, 2, 3, staging=True) - assert str(v3) == "1.2.3+staging" + v3 = OpenPypeVersion(1, 2, 3) + assert str(v3) == "1.2.3" - v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1") - assert str(v4) == "1.2.3-rc.1+staging" + v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1") + assert str(v4) == "1.2.3-rc.1" assert v3 > v4 assert v1 > v4 assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1") @@ -73,7 +73,7 @@ def test_openpype_version(printer): OpenPypeVersion(4, 8, 10), OpenPypeVersion(4, 8, 20), OpenPypeVersion(4, 8, 9), - OpenPypeVersion(1, 2, 3, staging=True), + OpenPypeVersion(1, 2, 3), OpenPypeVersion(1, 2, 3, build="foo") ] res = sorted(sort_versions) @@ -104,27 +104,26 @@ def test_openpype_version(printer): with pytest.raises(ValueError): _ = OpenPypeVersion(version="booobaa") - v11 = OpenPypeVersion(version="4.6.7-foo+staging") + v11 = OpenPypeVersion(version="4.6.7-foo") assert v11.major == 4 assert v11.minor == 6 assert v11.patch == 7 - assert v11.staging is True assert v11.prerelease == "foo" def test_get_main_version(): - ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo") + ver = OpenPypeVersion(1, 2, 3, prerelease="foo") assert ver.get_main_version() == "1.2.3" def test_get_version_path_from_list(): versions = [ OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')), - OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")), + OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")), OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo")) ] path = BootstrapRepos.get_version_path_from_list( - "3.4.5+staging", versions) + "3.4.5", versions) assert path == Path("/bar/baz") @@ -362,12 +361,15 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): result = fix_bootstrap.find_openpype(include_zips=True) # we should have results as file were created assert result is not None, "no OpenPype version found" - # latest item in `result` should be latest version found. + # latest item in `result` should be the latest version found. + # this will be `7.2.10-foo+staging` even with *staging* in since we've + # dropped the logic to handle staging separately and in alphabetical + # sorting it is after `strange`. expected_path = Path( d_path / "{}{}{}".format( - test_versions_2[3].prefix, - test_versions_2[3].version, - test_versions_2[3].suffix + test_versions_2[4].prefix, + test_versions_2[4].version, + test_versions_2[4].suffix ) ) assert result, "nothing found" From bf58eb8322e3c7092c7dc5b49f636311493dfb63 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:04:10 +0100 Subject: [PATCH 2293/2550] Hound --- tests/integration/hosts/nuke/lib.py | 1 + tests/lib/testing_classes.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index baff675da7..70860b92b3 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -59,5 +59,6 @@ class NukeHostFixtures(HostFixtures): def skip_compare_folders(self): yield [] + class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 5e3b11cfc9..82cc321ae8 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,12 +8,10 @@ import tempfile import shutil import glob import platform -import requests import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler -from openpype.modules import ModulesManager class BaseTest: From 52073873526505251d0087286bdb8775f3c050d9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:07:04 +0100 Subject: [PATCH 2294/2550] Added AE test with old stored instances Release 3.15 will move to New Publisher --- .../test_publish_in_aftereffects_legacy.py | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py new file mode 100644 index 0000000000..8c7a74c60e --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -0,0 +1,93 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestPublishInAfterEffects(AELocalPublishTestClass): + """Basic test case for publishing in AfterEffects + + Uses old Pyblish schema of created instances. + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens AfterEffects, run publish on prepared workile. + + Test zip file sets 3 required env vars: + - HEADLESS_PUBLISH - this triggers publish immediately app is open + - IS_TEST - this differentiate between regular webpublish + - PYBLISH_TARGETS + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + """ + PERSIST = False + + TEST_FILES = [ + ("1jqI_uG2NusKFvZZF7C0ScHjxFJrlc9F-", + "test_aftereffects_publish_legacy.zip", + "") + ] + + APP_GROUP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTest_taskMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInAfterEffects() From e68ad503e74907bc19a1e7ea71a6a07f675a7e4d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:07:47 +0100 Subject: [PATCH 2295/2550] Remove temporarily AE tests configured for Tray Publisher --- .../test_publish_in_aftereffects.py | 91 ------------------- ...test_publish_in_aftereffects_multiframe.py | 78 ---------------- 2 files changed, 169 deletions(-) delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py deleted file mode 100644 index 57d5a3e3f1..0000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ /dev/null @@ -1,91 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AELocalPublishTestClass): - """Basic test case for publishing in AfterEffects - - Uses generic TestCase to prepare fixtures for test data, testing DBs, - env vars. - - Opens AfterEffects, run publish on prepared workile. - - Test zip file sets 3 required env vars: - - HEADLESS_PUBLISH - this triggers publish immediately app is open - - IS_TEST - this differentiate between regular webpublish - - PYBLISH_TARGETS - - Then checks content of DB (if subset, version, representations were - created. - Checks tmp folder if all expected files were published. - - """ - PERSIST = False - - TEST_FILES = [ - ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", - "test_aftereffects_publish.zip", - "") - ] - - APP_GROUP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTest_taskMain")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "aep"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "thumbnail"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "png_png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py deleted file mode 100644 index 2d95eada99..0000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AELocalPublishTestClass): - """Basic test case for publishing in AfterEffects - - Should publish 10 frames - """ - PERSIST = True - - TEST_FILES = [ - ("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT", - "test_aftereffects_publish_multiframe.zip", - "") - ] - - APP_GROUP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTest_taskMain")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "aep"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "thumbnail"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "h264_png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() From 6bba712b98d3a966c89d2589a8dfe6884a4d7391 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 16:10:02 +0100 Subject: [PATCH 2296/2550] nuke: viewer with Rec.709 is correctly returning full name --- openpype/hosts/nuke/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 2691b7447a..bde06e4fd7 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2961,7 +2961,7 @@ def get_viewer_config_from_string(input_string): viewer = split[1] display = split[0] elif "(" in viewer: - pattern = r"([\w\d\s]+).*[(](.*)[)]" + pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" result = re.findall(pattern, viewer) try: result = result.pop() From 9e40ef96a2b320e413a39281beb12f7b2d6d219a Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 15:29:45 +0000 Subject: [PATCH 2297/2550] Ensure Mongo database directory exists. --- tools/run_mongo.ps1 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index c64ff75969..85b94b0971 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -112,4 +112,6 @@ $mongoPath = Find-Mongo $preferred_version Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]" -Color Green, Gray, Cyan, White, Cyan +New-Item -ItemType Directory -Force -Path $($dbpath) + Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null From 1fcc15583c1721636244682b3e06c21b4fda729a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:31:56 +0100 Subject: [PATCH 2298/2550] Fix AE legacy test --- .../hosts/aftereffects/test_publish_in_aftereffects_legacy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py index 8c7a74c60e..5d0c15d63a 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -62,7 +62,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTest_taskMain", + additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, @@ -71,7 +71,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 2, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", From debcf19e68ac44f139ebf22038f487fbb790975a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 2 Dec 2022 23:32:49 +0800 Subject: [PATCH 2299/2550] fix the ancestor issues for bbox selection --- .../maya/plugins/create/create_proxy_abc.py | 2 -- .../maya/plugins/publish/extract_proxy_abc.py | 22 ++++++------------- 2 files changed, 7 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 4401f3c04f..2d81cb663b 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -24,8 +24,6 @@ class CreateProxyAlembic(plugin.Creator): self.data["writeColorSets"] = self.write_color_sets # Vertex colors with the geometry. self.data["writeFaceSets"] = self.write_face_sets - # only nodes which are visible - self.data["visibleOnly"] = False # Default to exporting world-space self.data["worldSpace"] = True diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index f65626e915..fd70c8506b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -92,12 +92,10 @@ class ExtractProxyAlembic(publish.Extractor): def create_proxy_geometry(self, instance, name_suffix, start, end): nodes = instance[:] - if instance.data.get("visibleOnly", False): - nodes = list(iter_visible_nodes_in_range(nodes, - start=start, - end=end)) + nodes = list(iter_visible_nodes_in_range(nodes, + start=start, + end=end)) inst_selection = cmds.ls(nodes, long=True) - proxy_root = [] bbox = cmds.geomToBBox(inst_selection, nameSuffix=name_suffix, keepOriginal=True, @@ -105,13 +103,7 @@ class ExtractProxyAlembic(publish.Extractor): bakeAnimation=True, startTime=start, endTime=end) - for b in bbox: - dep_node = cmds.ls(b, dag=True, shapes=False, - noIntermediate=True, sn=True) - - for dep in dep_node: - if "Shape" in dep: - continue - proxy_root.append(dep) - self.log.debug("proxy_root: {}".format(proxy_root)) - return proxy_root + bbox_sel = cmds.ls(sl=True, long=True) + # bbox_sel = cmds.listRelatives(allDescendents=True, fullPath=True, type="mesh") + self.log.debug("proxy_root: {}".format(bbox_sel)) + return bbox_sel From e2ec1457c9014c164ed6ec807416c66ae19a5950 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:35:04 +0100 Subject: [PATCH 2300/2550] Fix Nuke legacy test Legacy Nuke tests are not cleaning up `renders` folders. Branch with DL version disables Cleanup for now. --- tests/integration/hosts/nuke/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index 70860b92b3..96daec7427 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -57,7 +57,7 @@ class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def skip_compare_folders(self): - yield [] + yield ["renders"] class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): From 8b71066d9c33d782ca2520bce251fe733e4d8ad5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Dec 2022 16:53:09 +0100 Subject: [PATCH 2301/2550] :art: add menu and basic publishing support --- openpype/hosts/max/api/__init__.py | 2 - openpype/hosts/max/api/lib.py | 66 ++++++++++- openpype/hosts/max/api/menu.py | 64 +++++++++-- openpype/hosts/max/api/pipeline.py | 63 +++++----- openpype/hosts/max/api/plugin.py | 108 ++++++++++++++++++ .../max/plugins/create/create_pointcache.py | 21 ++++ openpype/hosts/max/startup/startup.ms | 3 +- 7 files changed, 284 insertions(+), 43 deletions(-) create mode 100644 openpype/hosts/max/api/plugin.py create mode 100644 openpype/hosts/max/plugins/create/create_pointcache.py diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index b6998df862..503afade73 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -4,10 +4,8 @@ from .pipeline import ( MaxHost ) -from .menu import OpenPypeMenu __all__ = [ "MaxHost", - "OpenPypeMenu" ] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py index e50de85f68..8a57bb1bf6 100644 --- a/openpype/hosts/max/api/lib.py +++ b/openpype/hosts/max/api/lib.py @@ -1,2 +1,64 @@ -def imprint(attr, data): - ... +# -*- coding: utf-8 -*- +"""Library of functions useful for 3dsmax pipeline.""" +from pymxs import runtime as rt +from typing import Union + + +def imprint(node_name: str, data: dict) -> bool: + node = rt.getNodeByName(node_name) + if not node: + return False + + for k, v in data.items(): + rt.setUserProp(node, k, v) + + return True + + +def lsattr( + attr: str, + value: Union[str, None] = None, + root: Union[str, None] = None) -> list: + """List nodes having attribute with specified value. + + Args: + attr (str): Attribute name to match. + value (str, Optional): Value to match, of omitted, all nodes + with specified attribute are returned no matter of value. + root (str, Optional): Root node name. If omitted, scene root is used. + + Returns: + list of nodes. + """ + root = rt.rootnode if root is None else rt.getNodeByName(root) + + def output_node(node, nodes): + nodes.append(node) + for child in node.Children: + output_node(child, nodes) + + nodes = [] + output_node(root, nodes) + if not value: + return [n for n in nodes if rt.getUserProp(n, attr)] + + return [n for n in nodes if rt.getUserProp(n, attr) == value] + + +def read(container) -> dict: + data = {} + props = rt.getUserPropBuffer(container) + # this shouldn't happen but let's guard against it anyway + if not props: + return data + + for line in props.split("\r\n"): + key, value = line.split("=") + # if the line cannot be split we can't really parse it + if not key: + continue + data[key.strip()] = value.strip() + + data["instance_node"] = container + + return data diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py index 13ca503b4d..d1913c51e0 100644 --- a/openpype/hosts/max/api/menu.py +++ b/openpype/hosts/max/api/menu.py @@ -1,29 +1,70 @@ # -*- coding: utf-8 -*- """3dsmax menu definition of OpenPype.""" -from abc import ABCMeta, abstractmethod -import six from Qt import QtWidgets, QtCore from pymxs import runtime as rt from openpype.tools.utils import host_tools -@six.add_metaclass(ABCMeta) class OpenPypeMenu(object): + """Object representing OpenPype menu. + + This is using "hack" to inject itself before "Help" menu of 3dsmax. + For some reason `postLoadingMenus` event doesn't fire, and main menu + if probably re-initialized by menu templates, se we wait for at least + 1 event Qt event loop before trying to insert. + + """ def __init__(self): + super().__init__() self.main_widget = self.get_main_widget() + self.menu = None + + timer = QtCore.QTimer() + # set number of event loops to wait. + timer.setInterval(1) + timer.timeout.connect(self._on_timer) + timer.start() + + self._timer = timer + self._counter = 0 + + def _on_timer(self): + if self._counter < 1: + self._counter += 1 + return + + self._counter = 0 + self._timer.stop() + self.build_openpype_menu() @staticmethod def get_main_widget(): """Get 3dsmax main window.""" return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) - def get_main_menubar(self): + def get_main_menubar(self) -> QtWidgets.QMenuBar: """Get main Menubar by 3dsmax main window.""" return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] - def get_or_create_openpype_menu(self, name="&OpenPype", before="&Help"): + def get_or_create_openpype_menu( + self, name: str = "&OpenPype", + before: str = "&Help") -> QtWidgets.QAction: + """Create OpenPype menu. + + Args: + name (str, Optional): OpenPypep menu name. + before (str, Optional): Name of the 3dsmax main menu item to + add OpenPype menu before. + + Returns: + QtWidgets.QAction: OpenPype menu action. + + """ + if self.menu is not None: + return self.menu + menu_bar = self.get_main_menubar() menu_items = menu_bar.findChildren( QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) @@ -37,10 +78,13 @@ class OpenPypeMenu(object): help_action = item.menuAction() op_menu = QtWidgets.QMenu("&OpenPype") - menu_bar.insertMenu(before, op_menu) + menu_bar.insertMenu(help_action, op_menu) + + self.menu = op_menu return op_menu - def build_openpype_menu(self): + def build_openpype_menu(self) -> QtWidgets.QAction: + """Build items in OpenPype menu.""" openpype_menu = self.get_or_create_openpype_menu() load_action = QtWidgets.QAction("Load...", openpype_menu) load_action.triggered.connect(self.load_callback) @@ -63,18 +107,24 @@ class OpenPypeMenu(object): workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) workfiles_action.triggered.connect(self.workfiles_callback) openpype_menu.addAction(workfiles_action) + return openpype_menu def load_callback(self): + """Callback to show Loader tool.""" host_tools.show_loader(parent=self.main_widget) def publish_callback(self): + """Callback to show Publisher tool.""" host_tools.show_publisher(parent=self.main_widget) def manage_callback(self): + """Callback to show Scene Manager/Inventory tool.""" host_tools.show_subset_manager(parent=self.main_widget) def library_callback(self): + """Callback to show Library Loader tool.""" host_tools.show_library_loader(parent=self.main_widget) def workfiles_callback(self): + """Callback to show Workfiles tool.""" host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index 2ee5989871..cef45193c4 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -5,6 +5,8 @@ import sys import logging import contextlib +import json + from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api from openpype.pipeline import ( @@ -12,7 +14,7 @@ from openpype.pipeline import ( register_loader_plugin_path, AVALON_CONTAINER_ID, ) -from openpype.hosts.max.api import OpenPypeMenu +from openpype.hosts.max.api.menu import OpenPypeMenu from openpype.hosts.max.api import lib from openpype.hosts.max import MAX_HOST_DIR from openpype.pipeline.load import any_outdated_containers @@ -32,6 +34,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "max" menu = None @@ -46,23 +49,10 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): pyblish.api.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) - log.info("Building menu ...") + # self._register_callbacks() self.menu = OpenPypeMenu() - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - self._register_callbacks() - - # register_event_callback("before.save", before_save) - # register_event_callback("save", on_save) - # register_event_callback("open", on_open) - # register_event_callback("new", on_new) - - # pyblish.api.register_callback( - # "instanceToggled", on_pyblish_instance_toggled - # ) - self._has_been_setup = True def has_unsaved_changes(self): @@ -70,7 +60,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): return True def get_workfile_extensions(self): - return [".hip", ".hiplc", ".hipnc"] + return [".max"] def save_workfile(self, dst_path=None): rt.saveMaxFile(dst_path) @@ -88,17 +78,15 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): return ls() def _register_callbacks(self): - for event in self._op_events.copy().values(): - if event is None: - continue + rt.callbacks.removeScripts(id=rt.name("OpenPypeCallbacks")) - try: - rt.callbacks.removeScript(id=rt.name(event.name)) - except RuntimeError as e: - log.info(e) + rt.callbacks.addScript( + rt.Name("postLoadingMenus"), + self._deferred_menu_creation, id=rt.Name('OpenPypeCallbacks')) - rt.callbacks.addScript( - event.name, event.callback, id=rt.Name('OpenPype')) + def _deferred_menu_creation(self): + self.log.info("Building menu ...") + self.menu = OpenPypeMenu() @staticmethod def create_context_node(): @@ -128,12 +116,12 @@ attributes "OpenPypeContext" def update_context_data(self, data, changes): try: - context = rt.rootScene.OpenPypeContext.context + _ = rt.rootScene.OpenPypeContext.context except AttributeError: # context node doesn't exists - context = self.create_context_node() + self.create_context_node() - lib.imprint(context, data) + rt.rootScene.OpenPypeContext.context = json.dumps(data) def get_context_data(self): try: @@ -141,7 +129,9 @@ attributes "OpenPypeContext" except AttributeError: # context node doesn't exists context = self.create_context_node() - return lib.read(context) + if not context: + context = "{}" + return json.loads(context) def save_file(self, dst_path=None): # Force forwards slashes to avoid segfault @@ -149,5 +139,16 @@ attributes "OpenPypeContext" rt.saveMaxFile(dst_path) -def ls(): - ... \ No newline at end of file +def ls() -> list: + """Get all OpenPype instances.""" + objs = rt.objects + containers = [ + obj for obj in objs + if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID + ] + + for container in sorted(containers, key=lambda name: container.name): + yield lib.read(container) + + + diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py new file mode 100644 index 0000000000..0f01c94ce1 --- /dev/null +++ b/openpype/hosts/max/api/plugin.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +"""3dsmax specific Avalon/Pyblish plugin definitions.""" +import sys +from pymxs import runtime as rt +import six +from abc import ABCMeta +from openpype.pipeline import ( + CreatorError, + Creator, + CreatedInstance +) +from openpype.lib import BoolDef +from .lib import imprint, read, lsattr + + +class OpenPypeCreatorError(CreatorError): + pass + + +class MaxCreatorBase(object): + + @staticmethod + def cache_subsets(shared_data): + if shared_data.get("max_cached_subsets") is None: + shared_data["max_cached_subsets"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.get("creator_identifier") + if creator_id not in shared_data["max_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] + else: + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa + return shared_data + + @staticmethod + def create_instance_node(node_name: str, parent: str = ""): + parent_node = rt.getNodeByName(parent) if parent else rt.rootScene + if not parent_node: + raise OpenPypeCreatorError(f"Specified parent {parent} not found") + + container = rt.container(name=node_name) + container.Parent = parent_node + + return container + + +@six.add_metaclass(ABCMeta) +class MaxCreator(Creator, MaxCreatorBase): + selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = rt.getCurrentSelection() + + instance_node = self.create_instance_node(subset_name) + instance_data["instance_node"] = instance_node.name + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + self._add_instance_to_context(instance) + imprint(instance_node.name, instance.data_to_store()) + return instance + + def collect_instances(self): + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "max_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.get("instance_node") + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values, + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = rt.getNodeByName( + instance.data.get("instance_node")) + if instance_node: + rt.delete(instance_node) + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py new file mode 100644 index 0000000000..4c9ec7fb97 --- /dev/null +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" +from openpype.hosts.max.api import plugin +from openpype.pipeline import CreatedInstance + + +class CreatePointCache(plugin.MaxCreator): + identifier = "io.openpype.creators.max.pointcache" + label = "Point Cache" + family = "pointcache" + icon = "gear" + + def create(self, subset_name, instance_data, pre_create_data): + from pymxs import runtime as rt + + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/startup/startup.ms b/openpype/hosts/max/startup/startup.ms index 94318afb01..aee40eb6bc 100644 --- a/openpype/hosts/max/startup/startup.ms +++ b/openpype/hosts/max/startup/startup.ms @@ -2,7 +2,8 @@ ( local sysPath = dotNetClass "System.IO.Path" local sysDir = dotNetClass "System.IO.Directory" - local startup = sysPath.Combine (sysPath.GetDirectoryName getSourceFile) "startup.py" + local localScript = getThisScriptFilename() + local startup = sysPath.Combine (sysPath.GetDirectoryName localScript) "startup.py" python.ExecuteFile startup ) \ No newline at end of file From f96e4f162633425c00b077043624c09982a44d3d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:20:18 +0800 Subject: [PATCH 2302/2550] layout publish more than one container issue --- .../maya/plugins/publish/extract_layout.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index a11652feb3..e0bf158851 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -37,19 +37,14 @@ class ExtractLayout(publish.Extractor): project_container = self.project_container container_list = cmds.ls(project_container) assert len(container_list) == 1, \ - "Please create instance with loaded asset" - containers = cmds.sets(project_container, query=True) + "Please create instance with loaded asset!" # list the children of the containers - ass_transform = cmds.listRelatives(containers, allParents=True) - ass = cmds.listRelatives(asset, children=True, type="transform") - # compare the group of the asset with - # the children list of the container - # to find the content which is not loaded from the loader - for a in ass: - if a not in ass_transform: + grp_name = asset.split(':')[0] + con_sel = cmds.ls("{}*_CON".format(grp_name)) + if not con_sel: assert containers == [], \ - "no container found in {}".format(a) - for con in containers: + "Use all loaded contents without renaming and grouping!" # noqa + for con in con_sel: container = con representation_id = cmds.getAttr( From 1719e33b00807c336fdf6367460b9fb386a91930 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 17:20:32 +0100 Subject: [PATCH 2303/2550] flame: create vertically aligned subsets fix --- openpype/hosts/flame/api/plugin.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 26129ebaa6..7e012330cf 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -596,18 +596,19 @@ class PublishableClip: if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): - hero_data.update({"heroTrack": False}) - if _in == self.clip_in and _out == self.clip_out: + _hero_data = deepcopy(hero_data) + _hero_data.update({"heroTrack": False}) + if _in <= self.clip_in and _out >= self.clip_out: data_subset = hero_data["subset"] # add track index in case duplicity of names in hero data if self.subset in data_subset: - hero_data["subset"] = self.subset + str( + _hero_data["subset"] = self.subset + str( self.track_index) # in case track name and subset name is the same then add if self.subset_name == self.track_name: - hero_data["subset"] = self.subset + _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag - tag_hierarchy_data = hero_data + tag_hierarchy_data = _hero_data # add data to return data dict self.marker_data.update(tag_hierarchy_data) From f50fef2be8829e273ac08d85a431c1c1352e2b39 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:21:31 +0800 Subject: [PATCH 2304/2550] layout publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index e0bf158851..67a4bc564e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -40,11 +40,11 @@ class ExtractLayout(publish.Extractor): "Please create instance with loaded asset!" # list the children of the containers grp_name = asset.split(':')[0] - con_sel = cmds.ls("{}*_CON".format(grp_name)) - if not con_sel: + containers = cmds.ls("{}*_CON".format(grp_name)) + if not containers: assert containers == [], \ "Use all loaded contents without renaming and grouping!" # noqa - for con in con_sel: + for con in containers: container = con representation_id = cmds.getAttr( From 99c7faf78ff9e1ab2d7692a16f3769074ed895f4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:24:29 +0800 Subject: [PATCH 2305/2550] layer publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 67a4bc564e..bf41ca65ba 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -40,11 +40,11 @@ class ExtractLayout(publish.Extractor): "Please create instance with loaded asset!" # list the children of the containers grp_name = asset.split(':')[0] - containers = cmds.ls("{}*_CON".format(grp_name)) - if not containers: - assert containers == [], \ + container_sel = cmds.ls("{}*_CON".format(grp_name)) + if not container_sel: + assert container_sel == [], \ "Use all loaded contents without renaming and grouping!" # noqa - for con in containers: + for con in container_sel: container = con representation_id = cmds.getAttr( From 632ee268e172256b63eb88d081abcfe1bbbdee00 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:26:25 +0800 Subject: [PATCH 2306/2550] layer publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index bf41ca65ba..4ad2248d62 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -42,8 +42,8 @@ class ExtractLayout(publish.Extractor): grp_name = asset.split(':')[0] container_sel = cmds.ls("{}*_CON".format(grp_name)) if not container_sel: - assert container_sel == [], \ - "Use all loaded contents without renaming and grouping!" # noqa + assert container_sel == [], \ + "Use all loaded contents without renaming and grouping!" # noqa for con in container_sel: container = con From b15b5832241fda937dfda287220a70e66f8bcb7e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 3 Dec 2022 03:28:38 +0000 Subject: [PATCH 2307/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index fc687a1263..5e61ee3a6b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8" +__version__ = "3.14.9-nightly.1" From ee921e0bd4f384a3a94707d706f251e4aa997927 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Sat, 3 Dec 2022 17:04:02 +0700 Subject: [PATCH 2308/2550] Removed class variable and TODOs --- .../plugins/publish/validate_texture_workfiles.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index a25b80438d..a7ae02a2eb 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -20,10 +20,6 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True - #TODO(2-rec): remove/change comment - # from presets - main_workfile_extensions = ['mra'] - def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] @@ -43,17 +39,19 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): formatting_data=formatting_data ) - @classmethod - def get_main_workfile_extensions(cls): + @staticmethod + def get_main_workfile_extensions(): project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) - #TODO: find better way? (depends on other plugin) try: extensions = (project_settings["standalonepublisher"] ["publish"] ["CollectTextures"] ["main_workfile_extensions"]) except KeyError: - extensions = cls.main_workfile_extensions + raise Exception("Setting 'Main workfile extensions' not found." + " The setting must be set for the" + " 'Collect Texture' publish plugin of the" + " 'Standalone Publish' tool.") return extensions From d57fdcf7971bd99422b676a2a9074b2f7eb4e767 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sun, 4 Dec 2022 19:16:01 +0800 Subject: [PATCH 2309/2550] layout publish more than one container issue --- .../maya/plugins/create/create_layout.py | 7 +++++++ .../maya/plugins/publish/extract_layout.py | 21 ++++++++++++------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_layout.py b/openpype/hosts/maya/plugins/create/create_layout.py index 6dc87430aa..9fc0c5c4bc 100644 --- a/openpype/hosts/maya/plugins/create/create_layout.py +++ b/openpype/hosts/maya/plugins/create/create_layout.py @@ -8,3 +8,10 @@ class CreateLayout(plugin.Creator): label = "Layout" family = "layout" icon = "cubes" + def __init__(self, *args, **kwargs): + super(CreateLayout, self).__init__(*args, **kwargs) + + + # enable this when you want to + # publish group of loaded asset + self.data["groupLoadedAssets"] = False diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 4ad2248d62..6b9af68997 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -38,14 +38,19 @@ class ExtractLayout(publish.Extractor): container_list = cmds.ls(project_container) assert len(container_list) == 1, \ "Please create instance with loaded asset!" - # list the children of the containers - grp_name = asset.split(':')[0] - container_sel = cmds.ls("{}*_CON".format(grp_name)) - if not container_sel: - assert container_sel == [], \ - "Use all loaded contents without renaming and grouping!" # noqa - for con in container_sel: - container = con + + grp_loaded_ass = instance.data.get("groupLoadedAssets", False) + if grp_loaded_ass: + asset_list = cmds.listRelatives(asset, children=True) + for asset in asset_list: + grp_name = asset.split(':')[0] + else: + grp_name = asset.split(':')[0] + containers = cmds.ls("{}*_CON".format(grp_name)) + assert len(containers) > 0, \ + "Use all loaded contents without renaming" \ + "(and/or grouping if groupLoadedAssets disabled)" # noqa + container = containers[0] representation_id = cmds.getAttr( "{}.representation".format(container)) From 9f7377c304b20dc954f5d0d91c1d1831cdfd1c2e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sun, 4 Dec 2022 19:16:51 +0800 Subject: [PATCH 2310/2550] layout publish more than one container issue --- openpype/hosts/maya/plugins/create/create_layout.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/create/create_layout.py b/openpype/hosts/maya/plugins/create/create_layout.py index 9fc0c5c4bc..6f5b363693 100644 --- a/openpype/hosts/maya/plugins/create/create_layout.py +++ b/openpype/hosts/maya/plugins/create/create_layout.py @@ -8,6 +8,7 @@ class CreateLayout(plugin.Creator): label = "Layout" family = "layout" icon = "cubes" + def __init__(self, *args, **kwargs): super(CreateLayout, self).__init__(*args, **kwargs) From 5ed958056a079e5090b75ba1050f6c6bb6c3bcac Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sun, 4 Dec 2022 19:18:04 +0800 Subject: [PATCH 2311/2550] layout publish more than one container issue --- openpype/hosts/maya/plugins/create/create_layout.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_layout.py b/openpype/hosts/maya/plugins/create/create_layout.py index 6f5b363693..1768a3d49e 100644 --- a/openpype/hosts/maya/plugins/create/create_layout.py +++ b/openpype/hosts/maya/plugins/create/create_layout.py @@ -11,8 +11,6 @@ class CreateLayout(plugin.Creator): def __init__(self, *args, **kwargs): super(CreateLayout, self).__init__(*args, **kwargs) - - # enable this when you want to # publish group of loaded asset self.data["groupLoadedAssets"] = False From 7772ac7ea9125e2d06d5e31cf5771bb7e2211ca1 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 15:58:37 +0700 Subject: [PATCH 2312/2550] Added missing parenthesis --- .../event_handlers_server/event_first_version_status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py b/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py index ecc6c95d90..8ef333effd 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py +++ b/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py @@ -135,9 +135,9 @@ class FirstVersionStatus(BaseEvent): new_status = asset_version_statuses.get(found_item["status"]) if not new_status: - self.log.warning( + self.log.warning(( "AssetVersion doesn't have status `{}`." - ).format(found_item["status"]) + ).format(found_item["status"])) continue try: From c071724bcf16e25e2139bad404227de3ef2c83f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 10:19:28 +0100 Subject: [PATCH 2313/2550] remove tk_library and tcl_librabry envs for nuke processes --- openpype/hosts/hiero/addon.py | 5 +++++ openpype/hosts/nuke/addon.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py index f5bb94dbaa..1cc7a8637e 100644 --- a/openpype/hosts/hiero/addon.py +++ b/openpype/hosts/hiero/addon.py @@ -27,7 +27,12 @@ class HieroAddon(OpenPypeModule, IHostAddon): new_hiero_paths.append(norm_path) env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + # Remove auto screen scale factor for Qt + # - let Hiero decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py index 1c5d5c4005..9d25afe2b6 100644 --- a/openpype/hosts/nuke/addon.py +++ b/openpype/hosts/nuke/addon.py @@ -27,7 +27,12 @@ class NukeAddon(OpenPypeModule, IHostAddon): new_nuke_paths.append(norm_path) env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + # Remove auto screen scale factor for Qt + # - let Nuke decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] From f9680ccb78ebf2a1addd4f6b76a3cc776f09aff4 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 16:26:01 +0700 Subject: [PATCH 2314/2550] Removed extra quotes --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a..f0a38eee29 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -915,7 +915,7 @@ "current_context": [ { "subset_name_filters": [ - "\".+[Mm]ain\"" + ".+[Mm]ain" ], "families": [ "model" From 605432cc5db24201a124a4bf220200f672f502bf Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 16:30:01 +0700 Subject: [PATCH 2315/2550] Removed duplicate command --- openpype/tools/standalonepublish/widgets/widget_family.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index eab66d75b3..bd984942b8 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -194,9 +194,6 @@ class FamilyWidget(QtWidgets.QWidget): project_name, asset_name, fields=["_id"] ) - # Get plugin and family - plugin = item.data(PluginRole) - # Early exit if no asset name if not asset_name.strip(): self._build_menu([]) From c0b05e5846eecf7788d7ec3866023c83e4dded70 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 5 Dec 2022 10:50:58 +0100 Subject: [PATCH 2316/2550] add break and better explanation of procedure --- openpype/hosts/flame/api/plugin.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 7e012330cf..0d45792a38 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -596,6 +596,14 @@ class PublishableClip: if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): + """ + Since only one instance of hero clip is expected in + `self.vertical_clip_match`, this will loop only once + until none hero clip will be matched with hero clip. + + `tag_hierarchy_data` will be used only once for every + clip which is not hero clip. + """ _hero_data = deepcopy(hero_data) _hero_data.update({"heroTrack": False}) if _in <= self.clip_in and _out >= self.clip_out: @@ -609,6 +617,7 @@ class PublishableClip: _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag tag_hierarchy_data = _hero_data + break # add data to return data dict self.marker_data.update(tag_hierarchy_data) From ca1d518dd0dd026124b8879fdb446b34a170cc05 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 5 Dec 2022 10:53:03 +0100 Subject: [PATCH 2317/2550] comment improvement --- openpype/hosts/flame/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 0d45792a38..ca113fd98a 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -601,7 +601,7 @@ class PublishableClip: `self.vertical_clip_match`, this will loop only once until none hero clip will be matched with hero clip. - `tag_hierarchy_data` will be used only once for every + `tag_hierarchy_data` will be set only once for every clip which is not hero clip. """ _hero_data = deepcopy(hero_data) From ba149de263a5edef4468e9822ee6e44b2f557789 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 16:55:07 +0700 Subject: [PATCH 2318/2550] Check asset is 'not selected' --- .../standalonepublish/widgets/widget_family.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index bd984942b8..e1cbb8d397 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -186,16 +186,11 @@ class FamilyWidget(QtWidgets.QWidget): if item is None: return - asset_doc = None - if asset_name != self.NOT_SELECTED: - # Get the assets from the database which match with the name - project_name = self.dbcon.active_project() - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["_id"] - ) - # Early exit if no asset name - if not asset_name.strip(): + if ( + asset_name == self.NOT_SELECTED + or not asset_name.strip() + ): self._build_menu([]) item.setData(ExistsRole, False) print("Asset name is required ..") @@ -207,8 +202,10 @@ class FamilyWidget(QtWidgets.QWidget): asset_doc = get_asset_by_name( project_name, asset_name, fields=["_id"] ) + # Get plugin plugin = item.data(PluginRole) + if asset_doc and plugin: asset_id = asset_doc["_id"] task_name = self.dbcon.Session["AVALON_TASK"] From a4fc2913b49e7430638701ed89cff1eae77248c1 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Dec 2022 18:43:13 +0800 Subject: [PATCH 2319/2550] layout publish more than one container issue --- .../hosts/maya/plugins/publish/extract_layout.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 6b9af68997..7921fca069 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -36,8 +36,10 @@ class ExtractLayout(publish.Extractor): # Find the container project_container = self.project_container container_list = cmds.ls(project_container) - assert len(container_list) == 1, \ - "Please create instance with loaded asset!" + if len(container_list) == 0: + self.log.warning("Project container is not found!") + self.log.warning("The asset(s) may not be properly loaded after published") # noqa + continue grp_loaded_ass = instance.data.get("groupLoadedAssets", False) if grp_loaded_ass: @@ -47,9 +49,10 @@ class ExtractLayout(publish.Extractor): else: grp_name = asset.split(':')[0] containers = cmds.ls("{}*_CON".format(grp_name)) - assert len(containers) > 0, \ - "Use all loaded contents without renaming" \ - "(and/or grouping if groupLoadedAssets disabled)" # noqa + if len(containers) == 0: + self.log.warning("{} isn't from the loader".format(asset)) + self.log.warning("It may not be properly loaded after published") # noqa + continue container = containers[0] representation_id = cmds.getAttr( From ef02b58ea9903a77b33466706dc0eea00e98f80c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:36:46 +0000 Subject: [PATCH 2320/2550] Renaming some unreal loaders --- ...{load_alembic_geometrycache.py => load_geometrycache_abc.py} | 0 .../{load_alembic_skeletalmesh.py => load_skeletalmesh_abc.py} | 2 +- .../plugins/load/{load_rig.py => load_skeletalmesh_fbx.py} | 0 .../load/{load_alembic_staticmesh.py => load_staticmesh_abc.py} | 2 +- .../load/{load_staticmeshfbx.py => load_staticmesh_fbx.py} | 0 5 files changed, 2 insertions(+), 2 deletions(-) rename openpype/hosts/unreal/plugins/load/{load_alembic_geometrycache.py => load_geometrycache_abc.py} (100%) rename openpype/hosts/unreal/plugins/load/{load_alembic_skeletalmesh.py => load_skeletalmesh_abc.py} (99%) rename openpype/hosts/unreal/plugins/load/{load_rig.py => load_skeletalmesh_fbx.py} (100%) rename openpype/hosts/unreal/plugins/load/{load_alembic_staticmesh.py => load_staticmesh_abc.py} (99%) rename openpype/hosts/unreal/plugins/load/{load_staticmeshfbx.py => load_staticmesh_fbx.py} (100%) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py b/openpype/hosts/unreal/plugins/load/load_geometrycache_abc.py similarity index 100% rename from openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py rename to openpype/hosts/unreal/plugins/load/load_geometrycache_abc.py diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py b/openpype/hosts/unreal/plugins/load/load_skeletalmesh_abc.py similarity index 99% rename from openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py rename to openpype/hosts/unreal/plugins/load/load_skeletalmesh_abc.py index 9fe5f3ab4b..e316d255e9 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_skeletalmesh_abc.py @@ -14,7 +14,7 @@ import unreal # noqa class SkeletalMeshAlembicLoader(plugin.Loader): """Load Unreal SkeletalMesh from Alembic""" - families = ["pointcache"] + families = ["pointcache", "skeletalMesh"] label = "Import Alembic Skeletal Mesh" representations = ["abc"] icon = "cube" diff --git a/openpype/hosts/unreal/plugins/load/load_rig.py b/openpype/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py similarity index 100% rename from openpype/hosts/unreal/plugins/load/load_rig.py rename to openpype/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_staticmesh_abc.py similarity index 99% rename from openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py rename to openpype/hosts/unreal/plugins/load/load_staticmesh_abc.py index a5b9cbd1fc..c7841cef53 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_staticmesh_abc.py @@ -14,7 +14,7 @@ import unreal # noqa class StaticMeshAlembicLoader(plugin.Loader): """Load Unreal StaticMesh from Alembic""" - families = ["model"] + families = ["model", "staticMesh"] label = "Import Alembic Static Mesh" representations = ["abc"] icon = "cube" diff --git a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py b/openpype/hosts/unreal/plugins/load/load_staticmesh_fbx.py similarity index 100% rename from openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py rename to openpype/hosts/unreal/plugins/load/load_staticmesh_fbx.py From 012f1097862da05150b26e49bec86f66c7f142a1 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:38:31 +0000 Subject: [PATCH 2321/2550] Renaming current SkeletalMesh extractor to be FBX specific --- ...al_skeletalmesh.py => extract_unreal_skeletalmesh_fbx.py} | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) rename openpype/hosts/maya/plugins/publish/{extract_unreal_skeletalmesh.py => extract_unreal_skeletalmesh_fbx.py} (95%) diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py similarity index 95% rename from openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py rename to openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py index 258120db2f..b162ce47f7 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py @@ -21,12 +21,13 @@ def renamed(original_name, renamed_name): cmds.rename(renamed_name, original_name) -class ExtractUnrealSkeletalMesh(publish.Extractor): +class ExtractUnrealSkeletalMeshFbx(publish.Extractor): """Extract Unreal Skeletal Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 - label = "Extract Unreal Skeletal Mesh" + label = "Extract Unreal Skeletal Mesh - FBX" families = ["skeletalMesh"] + optional = True def process(self, instance): fbx_exporter = fbx.FBXExtractor(log=self.log) From 84689e086604058d93a88564ad2b669c50b5de68 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:39:40 +0000 Subject: [PATCH 2322/2550] Implemented the extractor for Alembic SkeletalMesh --- .../create/create_unreal_skeletalmesh.py | 16 +++ .../extract_unreal_skeletalmesh_abc.py | 111 ++++++++++++++++++ 2 files changed, 127 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py diff --git a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index 1a8e84c80d..424f456310 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -48,3 +48,19 @@ class CreateUnrealSkeletalMesh(plugin.Creator): cmds.sets(node, forceElement=joints_set) else: cmds.sets(node, forceElement=geometry_set) + + # Add animation data + self.data.update(lib.collect_animation_data()) + + # Only renderable visible shapes + self.data["renderableOnly"] = False + # only nodes that are visible + self.data["visibleOnly"] = False + # Include parent groups + self.data["includeParentHierarchy"] = False + # Default to exporting world-space + self.data["worldSpace"] = True + + # Add options for custom attributes + self.data["attr"] = "" + self.data["attrPrefix"] = "" diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py new file mode 100644 index 0000000000..9ce904b86d --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +"""Create Unreal Skeletal Mesh data to be extracted as FBX.""" +import os +from contextlib import contextmanager + +from maya import cmds # noqa + +import pyblish.api + +from openpype.pipeline import publish +from openpype.hosts.maya.api.lib import ( + extract_alembic, + suspended_refresh, + maintained_selection, + iter_visible_nodes_in_range +) + + +@contextmanager +def renamed(original_name, renamed_name): + # type: (str, str) -> None + try: + cmds.rename(original_name, renamed_name) + yield + finally: + cmds.rename(renamed_name, original_name) + + +class ExtractUnrealSkeletalMeshAbc(publish.Extractor): + """Extract Unreal Skeletal Mesh as FBX from Maya. """ + + label = "Extract Unreal Skeletal Mesh - Alembic" + hosts = ["maya"] + families = ["skeletalMesh"] + optional = True + + def process(self, instance): + self.log.info("Extracting pointcache..") + + geo = cmds.listRelatives( + instance.data.get("geometry"), allDescendents=True, fullPath=True) + joints = cmds.listRelatives( + instance.data.get("joints"), allDescendents=True, fullPath=True) + + nodes = geo + joints + + attrs = instance.data.get("attr", "").split(";") + attrs = [value for value in attrs if value.strip()] + attrs += ["cbId"] + + attr_prefixes = instance.data.get("attrPrefix", "").split(";") + attr_prefixes = [value for value in attr_prefixes if value.strip()] + + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.abc".format(instance.name) + path = os.path.join(staging_dir, filename) + + # The export requires forward slashes because we need + # to format it into a string in a mel expression + path = path.replace('\\', '/') + + self.log.info("Extracting ABC to: {0}".format(path)) + self.log.info("Members: {0}".format(nodes)) + self.log.info("Instance: {0}".format(instance[:])) + + options = { + "step": instance.data.get("step", 1.0), + "attr": attrs, + "attrPrefix": attr_prefixes, + "writeVisibility": True, + "writeCreases": True, + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False), + "uvWrite": True, + "selection": True, + "worldSpace": instance.data.get("worldSpace", True) + } + + self.log.info("Options: {}".format(options)) + + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True + + if not instance.data.get("includeParentHierarchy", True): + # Set the root nodes if we don't want to include parents + # The roots are to be considered the ones that are the actual + # direct members of the set + options["root"] = instance.data.get("setMembers") + + with suspended_refresh(suspend=instance.data.get("refresh", False)): + with maintained_selection(): + cmds.select(nodes, noExpand=True) + extract_alembic(file=path, + # startFrame=start, + # endFrame=end, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract ABC successful to: {0}".format(path)) From 7061b44d236baad6bc6a4a9f3b814e273dbb00bd Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:40:07 +0000 Subject: [PATCH 2323/2550] Fixed validator for the hierarchy to consider both geometry and joints --- .../maya/plugins/publish/validate_skeletalmesh_hierarchy.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py index 8221c18b17..398b6fb7bf 100644 --- a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py +++ b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py @@ -28,7 +28,9 @@ class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin): parent.split("|")[1] for parent in (joints_parents + geo_parents) } - if len(set(parents_set)) != 1: + self.log.info(parents_set) + + if len(set(parents_set)) > 2: raise PublishXmlValidationError( self, "Multiple roots on geometry or joints." From 1eba935287fea5a876b56af583eec47d99ad7252 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:40:35 +0000 Subject: [PATCH 2324/2550] Implemented validator to check if the mesh is triangulated --- .../validate_skeletalmesh_triangulated.py | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py diff --git a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py new file mode 100644 index 0000000000..c0a9ddcf69 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from openpype.hosts.maya.api.action import ( + SelectInvalidAction, +) +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) + +from maya import cmds + + +class ValidateSkeletalMeshTriangulated(pyblish.api.InstancePlugin): + """Validates that the geometry has been triangulated.""" + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["skeletalMesh"] + label = "Skeletal Mesh Triangulated" + optional = True + actions = [ + SelectInvalidAction, + RepairAction + ] + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "The following objects needs to be triangulated: " + "{}".format(invalid)) + + @classmethod + def get_invalid(cls, instance): + geo = instance.data.get("geometry") + + invalid = [] + + for obj in cmds.listRelatives( + cmds.ls(geo), allDescendents=True, fullPath=True): + n_triangles = cmds.polyEvaluate(obj, triangle=True) + n_faces = cmds.polyEvaluate(obj, face=True) + + if not (isinstance(n_triangles, int) and isinstance(n_faces, int)): + continue + + # We check if the number of triangles is equal to the number of + # faces for each transform node. + # If it is, the object is triangulated. + if cmds.objectType(obj, i="transform") and n_triangles != n_faces: + invalid.append(obj) + + return invalid + + @classmethod + def repair(cls, instance): + for node in cls.get_invalid(instance): + cmds.polyTriangulate(node) From 9a14dff1fa1274ccffcaa921e9818fa1b5ff0ada Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:51:10 +0000 Subject: [PATCH 2325/2550] Hound fixes --- .../maya/plugins/publish/extract_unreal_skeletalmesh_abc.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py index 9ce904b86d..e1f847f31a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -5,14 +5,11 @@ from contextlib import contextmanager from maya import cmds # noqa -import pyblish.api - from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( extract_alembic, suspended_refresh, - maintained_selection, - iter_visible_nodes_in_range + maintained_selection ) From 63508cf495fdf2a1f66234a855197fd56a0a072c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:58:27 +0000 Subject: [PATCH 2326/2550] Fixed missing attribute --- .../hosts/maya/plugins/create/create_unreal_skeletalmesh.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index 424f456310..6e72bf5324 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -60,6 +60,8 @@ class CreateUnrealSkeletalMesh(plugin.Creator): self.data["includeParentHierarchy"] = False # Default to exporting world-space self.data["worldSpace"] = True + # Default to suspend refresh. + self.data["refresh"] = False # Add options for custom attributes self.data["attr"] = "" From 8c1a63d15c96c10949b3a8146f77b26bd017819c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Dec 2022 21:59:00 +0800 Subject: [PATCH 2327/2550] fix the ancestor issues for bbox selection --- .../maya/plugins/publish/extract_proxy_abc.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index fd70c8506b..decaf410ac 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -96,13 +96,13 @@ class ExtractProxyAlembic(publish.Extractor): start=start, end=end)) inst_selection = cmds.ls(nodes, long=True) - bbox = cmds.geomToBBox(inst_selection, - nameSuffix=name_suffix, - keepOriginal=True, - single=False, - bakeAnimation=True, - startTime=start, - endTime=end) + cmds.geomToBBox(inst_selection, + nameSuffix=name_suffix, + keepOriginal=True, + single=False, + bakeAnimation=True, + startTime=start, + endTime=end) bbox_sel = cmds.ls(sl=True, long=True) # bbox_sel = cmds.listRelatives(allDescendents=True, fullPath=True, type="mesh") self.log.debug("proxy_root: {}".format(bbox_sel)) From b868e4be1ab4e4d72b83c3840f7b86c223383bfd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Dec 2022 22:00:36 +0800 Subject: [PATCH 2328/2550] fix the ancestor issues for bbox selection --- openpype/hosts/maya/plugins/publish/extract_proxy_abc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index decaf410ac..f348712d7c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -95,6 +95,7 @@ class ExtractProxyAlembic(publish.Extractor): nodes = list(iter_visible_nodes_in_range(nodes, start=start, end=end)) + inst_selection = cmds.ls(nodes, long=True) cmds.geomToBBox(inst_selection, nameSuffix=name_suffix, @@ -104,6 +105,6 @@ class ExtractProxyAlembic(publish.Extractor): startTime=start, endTime=end) bbox_sel = cmds.ls(sl=True, long=True) - # bbox_sel = cmds.listRelatives(allDescendents=True, fullPath=True, type="mesh") + self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel From 22e664c96e20bbf428b90b46d88383ac84e5d7e0 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 5 Dec 2022 14:45:18 +0000 Subject: [PATCH 2329/2550] Indicate sequence or single frame. --- openpype/hosts/maya/plugins/publish/collect_ass.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 7c9a1b76fb..3ce1f2ccf1 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,3 +1,5 @@ +import re + from maya import cmds import pyblish.api @@ -27,4 +29,10 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) + # Indicate to user that it'll be a single frame. + sequence = instance.data.get("exportSequence", False) + if not sequence: + group = re.compile(r" \[.*\]") + instance.data["label"] = group.sub("", instance.data["label"]) + self.log.debug("data: {}".format(instance.data)) From 0151540fdd91f92c51b78a21a529c2ae913b69a9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:24:56 +0100 Subject: [PATCH 2330/2550] added collector which can add comment per instance --- openpype/plugins/publish/collect_comment.py | 22 +++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 062142ace9..a2aef7fc1c 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -6,6 +6,28 @@ Provides: """ import pyblish.api +from openpype.lib.attribute_definitions import TextDef +from openpype.pipeline.publish import OpenPypePyblishPluginMixin + + +class CollectInstanceCommentDef( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): + label = "Comment per instance" + targets = ["local"] + # Disable plugin by default + families = ["*"] + enabled = True + + def process(self, instance): + pass + + @classmethod + def get_attribute_defs(cls): + return [ + TextDef("comment", label="Comment") + ] class CollectComment(pyblish.api.ContextPlugin): From f7f5019401adf912cfba48d9d939af7492c9e3a6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:25:14 +0100 Subject: [PATCH 2331/2550] CollectComment plugin also store comment on each instance --- openpype/plugins/publish/collect_comment.py | 86 ++++++++++++++++++--- 1 file changed, 76 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index a2aef7fc1c..a1b4e1364a 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -1,8 +1,26 @@ -""" -Requires: - None -Provides: - context -> comment (str) +"""Collect comment and add option to enter comment per instance. + +Combination of plugins. One define optional input for instances in Publisher +UI (CollectInstanceCommentDef) and second cares that each instance during +collection has available "comment" key in data (CollectComment). + +Plugin 'CollectInstanceCommentDef' define "comment" attribute which won't be +filled with any value if instance does not match families filter or when +plugin is disabled. + +Plugin 'CollectComment' makes sure that each instance in context has +available "comment" key in data which can be set to 'str' or 'None' if is not +set. +- In case instance already has filled comment the plugin's logic is skipped +- The comment is always set and value should be always 'str' even if is empty + +Why are separated: +- 'CollectInstanceCommentDef' can have specific settings to show comment + attribute only to defined families in publisher UI +- 'CollectComment' will run all the time + +Todos: + The comment per instance is not sent via farm. """ import pyblish.api @@ -31,11 +49,59 @@ class CollectInstanceCommentDef( class CollectComment(pyblish.api.ContextPlugin): - """This plug-ins displays the comment dialog box per default""" + """Collect comment per each instance. - label = "Collect Comment" - order = pyblish.api.CollectorOrder + Plugin makes sure each instance to publish has set "comment" in data so any + further plugin can use it directly. + """ + + label = "Collect Instance Comment" + order = pyblish.api.CollectorOrder + 0.49 def process(self, context): - comment = (context.data.get("comment") or "").strip() - context.data["comment"] = comment + context_comment = self.cleanup_comment(context.data.get("comment")) + # Set it back + context.data["comment"] = context_comment + for instance in context: + instance_label = str(instance) + # Check if comment is already set + instance_comment = self.cleanup_comment( + instance.data.get("comment")) + + # If comment on instance is not set then look for attributes + if not instance_comment: + attr_values = self.get_attr_values_from_data_for_plugin( + CollectInstanceCommentDef, instance.data + ) + instance_comment = self.cleanup_comment( + attr_values.get("comment") + ) + + # Use context comment if instance has all options of comment + # empty + if not instance_comment: + instance_comment = context_comment + + instance.data["comment"] = instance_comment + if instance_comment: + msg_end = " has comment set to: \"{}\"".format( + instance_comment) + else: + msg_end = " does not have set comment" + self.log.debug("Instance {} {}".format(instance_label, msg_end)) + + def cleanup_comment(self, comment): + """Cleanup comment value. + + Args: + comment (Union[str, None]): Comment value from data. + + Returns: + str: Cleaned comment which is stripped or empty string if input + was 'None'. + """ + + if comment: + return comment.strip() + return "" + From fd5ac3be1bc975d9e3329c835d583d269ec7c575 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:29:01 +0100 Subject: [PATCH 2332/2550] added settings for the attribute collector --- openpype/plugins/publish/collect_comment.py | 18 ++++++++++++++-- .../defaults/project_settings/global.json | 4 ++++ .../schemas/schema_global_publish.json | 21 +++++++++++++++++++ 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index a1b4e1364a..db5a04681b 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -35,12 +35,26 @@ class CollectInstanceCommentDef( label = "Comment per instance" targets = ["local"] # Disable plugin by default - families = ["*"] - enabled = True + families = [] + enabled = False def process(self, instance): pass + @classmethod + def apply_settings(cls, project_setting, _): + plugin_settings = project_setting["global"]["publish"].get( + "collect_comment_per_instance" + ) + if not plugin_settings: + return + + if plugin_settings.get("enabled") is not None: + cls.enabled = plugin_settings["enabled"] + + if plugin_settings.get("families") is not None: + cls.families = plugin_settings["families"] + @classmethod def get_attribute_defs(cls): return [ diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 46b8b1b0c8..89d7cf08b7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -24,6 +24,10 @@ ], "skip_hosts_headless_publish": [] }, + "collect_comment_per_instance": { + "enabled": false, + "families": [] + }, "ValidateEditorialAssetName": { "enabled": true, "optional": false diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 742437fbde..f2ada5fd8d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -60,6 +60,27 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "collect_comment_per_instance", + "label": "Collect comment per instance", + "checkbox_key": "enabled", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, From 1f05a3952262a342a72e8308643c6d1a7a0ffdba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:31:09 +0100 Subject: [PATCH 2333/2550] use comment from instance where possible --- openpype/hosts/nuke/plugins/publish/extract_slate_frame.py | 2 +- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 +- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + .../ftrack/plugins/publish/integrate_ftrack_description.py | 2 +- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 2 +- openpype/plugins/publish/extract_burnin.py | 2 +- openpype/plugins/publish/integrate.py | 2 +- openpype/plugins/publish/integrate_legacy.py | 2 +- 8 files changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index e7197b4fa8..06c086b10d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -298,7 +298,7 @@ class ExtractSlateFrame(publish.Extractor): def add_comment_slate_node(self, instance, node): - comment = instance.context.data.get("comment") + comment = instance.data["comment"] intent = instance.context.data.get("intent") if not isinstance(intent, dict): intent = { diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea44a24459..038ee4fc03 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -38,7 +38,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) - self._comment = context.data.get("comment", "") + self._comment = instance.data["comment"] self._deadline_user = context.data.get( "deadlineUser", getpass.getuser()) self._frame_start = int(instance.data["frameStart"]) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 249211e965..45688e8584 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -777,6 +777,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "handleEnd": handle_end, "frameStartHandle": start - handle_start, "frameEndHandle": end + handle_end, + "comment": instance.data["comment"], "fps": fps, "source": source, "extendFrames": data.get("extendFrames"), diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index e7c265988e..6ed02bc8b6 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -38,7 +38,7 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): self.log.info("There are any integrated AssetVersions") return - comment = (instance.context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index ac3fa874e0..6776509dda 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -45,7 +45,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): host_name = context.data["hostName"] app_name = context.data["appName"] app_label = context.data["appLabel"] - comment = (context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 4179199317..fd8dfdece9 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -468,7 +468,7 @@ class ExtractBurnin(publish.Extractor): burnin_data.update({ "version": int(version), - "comment": context.data.get("comment") or "" + "comment": instance.data["comment"] }) intent_label = context.data.get("intent") or "" diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 7e4fc84658..57a642c635 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -772,7 +772,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": instance.data.get("fps", context.data.get("fps")) } diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c..670b637faa 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -968,7 +968,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": context.data.get( "fps", instance.data.get("fps") From 5d24bfcf6318fa4fec1267612c933989fa2beb22 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:31:31 +0100 Subject: [PATCH 2334/2550] commit forgotten change of getting attribute values from plugin --- openpype/pipeline/publish/publish_plugins.py | 26 ++++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 6e2be1ce2c..47dfaf6b98 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,3 +1,4 @@ +import inspect from abc import ABCMeta import pyblish.api @@ -132,6 +133,25 @@ class OpenPypePyblishPluginMixin: ) return attribute_values + @staticmethod + def get_attr_values_from_data_for_plugin(plugin, data): + """Get attribute values for attribute definitions from data. + + Args: + plugin (Union[publish.api.Plugin, Type[publish.api.Plugin]]): The + plugin for which attributes are extracted. + data(dict): Data from instance or context. + """ + + if not inspect.isclass(plugin): + plugin = plugin.__class__ + + return ( + data + .get("publish_attributes", {}) + .get(plugin.__name__, {}) + ) + def get_attr_values_from_data(self, data): """Get attribute values for attribute definitions from data. @@ -139,11 +159,7 @@ class OpenPypePyblishPluginMixin: data(dict): Data from instance or context. """ - return ( - data - .get("publish_attributes", {}) - .get(self.__class__.__name__, {}) - ) + return self.get_attr_values_from_data_for_plugin(self.__class__, data) class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): From e6585be6772006ca748c8fbf5697f981bcd0de12 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:35:03 +0100 Subject: [PATCH 2335/2550] fix missing method --- openpype/plugins/publish/collect_comment.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index db5a04681b..83609a04bd 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -62,7 +62,10 @@ class CollectInstanceCommentDef( ] -class CollectComment(pyblish.api.ContextPlugin): +class CollectComment( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): """Collect comment per each instance. Plugin makes sure each instance to publish has set "comment" in data so any From 6e520f564bfe58aa23c1430d175d30dccd95eb40 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 09:52:36 +0100 Subject: [PATCH 2336/2550] removed redundant line --- openpype/plugins/publish/collect_comment.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 83609a04bd..12579cd957 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -121,4 +121,3 @@ class CollectComment( if comment: return comment.strip() return "" - From fffe1162b0ab68dbd3ea5e0e234c4b2b6cf51ac8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 10:00:16 +0100 Subject: [PATCH 2337/2550] fix import --- openpype/plugins/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index db567f8b8f..37a5e90f86 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -7,7 +7,7 @@ from openpype.client import ( get_last_versions, get_representations, ) -from openpype.pipeline import get_representation_path_with_anatomy +from openpype.pipeline.load import get_representation_path_with_anatomy class CollectAudio(pyblish.api.ContextPlugin): From ad1380541ff48ebc495baacfcfd65c83952f81a7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 10:17:03 +0100 Subject: [PATCH 2338/2550] fix fields query --- openpype/plugins/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 37a5e90f86..3a0ddb3281 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -121,7 +121,7 @@ class CollectAudio(pyblish.api.ContextPlugin): asset_docs = get_assets( project_name, asset_names=asset_names, - fields=["_id"] + fields=["_id", "name"] ) asset_id_by_name = {} From 89c5fdfb27c40e1a9797730830ef5ec8e38c4af7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 6 Dec 2022 14:31:00 +0100 Subject: [PATCH 2339/2550] Fix: Template path wrong normpath for cross platform --- openpype/pipeline/load/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 784d4628f3..bfa9fe07c7 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -555,7 +555,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): """ try: - template = repre_doc["data"]["template"] + template = repre_doc["data"]["template"].replace("\\", "/") except KeyError: raise InvalidRepresentationContext(( From 63b47efc51f92c8082ad76a3154a45703e80423c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:02:03 +0100 Subject: [PATCH 2340/2550] integrate thumbnail looks for thumbnail to multiple places --- .../plugins/publish/integrate_thumbnail.py | 58 +++++++++++++++++-- 1 file changed, 54 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f74c3d9609..cd472a7e6b 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -102,8 +102,56 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): thumbnail_root ) + def _get_thumbnail_from_instance(self, instance): + # 1. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path + + # 2. Look for thumbnail in published representations + published_repres = instance.data.get("published_representations") + path = self._get_thumbnail_path_from_published(published_repres) + if path and os.path.exists(path): + return path + + if path: + self.log.warning( + "Could not find published thumbnail path {}".format(path) + ) + + # 3. Look for thumbnail in "not published" representations + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _prepare_instances(self, context): - context_thumbnail_path = context.get("thumbnailPath") + context_thumbnail_path = context.data.get("thumbnailPath") valid_context_thumbnail = False if context_thumbnail_path and os.path.exists(context_thumbnail_path): valid_context_thumbnail = True @@ -122,8 +170,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): continue # Find thumbnail path on instance - thumbnail_path = self._get_instance_thumbnail_path( - published_repres) + thumbnail_path = self._get_thumbnail_from_instance(instance) if thumbnail_path: self.log.debug(( "Found thumbnail path for instance \"{}\"." @@ -157,7 +204,10 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): for repre_info in published_representations.values(): return repre_info["representation"]["parent"] - def _get_instance_thumbnail_path(self, published_representations): + def _get_thumbnail_path_from_published(self, published_representations): + if not published_representations: + return None + thumb_repre_doc = None for repre_info in published_representations.values(): repre_doc = repre_info["representation"] From 21411d50624385122d200b0d0317a54b26d83e50 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:14:32 +0100 Subject: [PATCH 2341/2550] store thumbnail path to instance data --- openpype/plugins/publish/extract_thumbnail_from_source.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 8da1213807..03df1455e2 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -73,6 +73,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): "Adding thumbnail representation: {}".format(new_repre) ) instance.data["representations"].append(new_repre) + instance.data["thumbnailPath"] = dst_filepath def _create_thumbnail(self, context, thumbnail_source): if not thumbnail_source: From aa704b40eaa42bd3e4184dd6b754cfcf8f3069f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:14:52 +0100 Subject: [PATCH 2342/2550] change order of thumbnail path resolving --- .../plugins/publish/integrate_thumbnail.py | 40 ++++++++----------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index cd472a7e6b..f1455dc66b 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -103,12 +103,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): ) def _get_thumbnail_from_instance(self, instance): - # 1. Look for thumbnail path on instance in 'thumbnailPath' - thumbnail_path = instance.data.get("thumbnailPath") - if thumbnail_path and os.path.exists(thumbnail_path): - return thumbnail_path - - # 2. Look for thumbnail in published representations + # 1. Look for thumbnail in published representations published_repres = instance.data.get("published_representations") path = self._get_thumbnail_path_from_published(published_repres) if path and os.path.exists(path): @@ -119,34 +114,33 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): "Could not find published thumbnail path {}".format(path) ) - # 3. Look for thumbnail in "not published" representations + # 2. Look for thumbnail in "not published" representations repres = instance.data.get("representations") - if not repres: - return None - thumbnail_repre = next( ( repre - for repre in repres + for repre in repres or [] if repre["name"] == "thumbnail" ), None ) - if not thumbnail_repre: - return None + if thumbnail_repre: + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") - staging_dir = thumbnail_repre.get("stagingDir") - if not staging_dir: - staging_dir = instance.data.get("stagingDir") + filename = thumbnail_repre.get("files") + if isinstance(filename, (list, tuple, set)): + filename = filename[0] - filename = thumbnail_repre.get("files") - if not staging_dir or not filename: - return None + if staging_dir and filename: + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path - if isinstance(filename, (list, tuple, set)): - filename = filename[0] - thumbnail_path = os.path.join(staging_dir, filename) - if os.path.exists(thumbnail_path): + # 3. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): return thumbnail_path return None From 505cf706f2041bdeecf6ff04c572276e3d446391 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Dec 2022 15:16:38 +0100 Subject: [PATCH 2343/2550] DL: refactory env var processing --- .../plugins/publish/submit_publish_job.py | 89 ++++++++++--------- 1 file changed, 47 insertions(+), 42 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 45688e8584..3e3ef03e66 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -126,22 +126,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "harmony": [r".*"], # for everything from AE "celaction": [r".*"]} - enviro_filter = [ + enviro_job_filter = [ + "OPENPYPE_METADATA_FILE", + "OPENPYPE_PUBLISH_JOB", + "OPENPYPE_RENDER_JOB", + "OPENPYPE_LOG_NO_COLORS" + ] + + enviro_keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", - "OPENPYPE_METADATA_FILE", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", "AVALON_APP_NAME", - "OPENPYPE_PUBLISH_JOB" - - "OPENPYPE_LOG_NO_COLORS", "OPENPYPE_USERNAME", - "OPENPYPE_RENDER_JOB", - "OPENPYPE_PUBLISH_JOB", - "OPENPYPE_MONGO", "OPENPYPE_VERSION" ] @@ -223,29 +220,41 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): instance_version = instance.data.get("version") # take this if exists if instance_version != 1: override_version = instance_version - output_dir = self._get_publish_folder(instance.context.data['anatomy'], - deepcopy( - instance.data["anatomyData"]), - instance.data.get("asset"), - instances[0]["subset"], - 'render', - override_version) + output_dir = self._get_publish_folder( + instance.context.data['anatomy'], + deepcopy(instance.data["anatomyData"]), + instance.data.get("asset"), + instances[0]["subset"], + 'render', + override_version + ) # Transfer the environment from the original job to this dependent # job so they use the same environment metadata_path, roothless_metadata_path = \ - self._create_metadata_path(instance) + self._create_metadata_path(instance) + + environment = { + "AVALON_PROJECT": legacy_io.Session["AVALON_PROJECT"], + "AVALON_ASSET": legacy_io.Session["AVALON_ASSET"], + "AVALON_TASK": legacy_io.Session["AVALON_TASK"], + "OPENPYPE_LOG_NO_COLORS": "1", + "OPENPYPE_USERNAME": instance.context.data["user"], + "OPENPYPE_PUBLISH_JOB": "1", + "OPENPYPE_RENDER_JOB": "0" + } + + # add environments from self.enviro_keys + for env_key in self.enviro_keys: + if os.getenv(env_key): + environment[env_key] = os.environ[env_key] + + # pass environment keys from self.enviro_job_filter + job_environ = job["Props"].get("Env", {}) + for env_j_key in self.enviro_job_filter: + if job_environ.get(env_j_key): + environment[env_j_key] = job_environ[env_j_key] - environment = job["Props"].get("Env", {}) - environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"] - environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] - environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] - environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") - environment["OPENPYPE_VERSION"] = os.environ.get("OPENPYPE_VERSION") - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_USERNAME"] = instance.context.data["user"] - environment["OPENPYPE_PUBLISH_JOB"] = "1" - environment["OPENPYPE_RENDER_JOB"] = "0" # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): mongo_url = os.environ.get("OPENPYPE_MONGO") @@ -309,19 +318,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if instance.data.get("suspend_publish"): payload["JobInfo"]["InitialStatus"] = "Suspended" - index = 0 - for key in environment: - if key.upper() in self.enviro_filter: - payload["JobInfo"].update( - { - "EnvironmentKeyValue%d" - % index: "{key}={value}".format( - key=key, value=environment[key] - ) - } - ) - index += 1 - + for index, (key_, value_) in enumerate(environment.items()): + payload["JobInfo"].update( + { + "EnvironmentKeyValue%d" + % index: "{key}={value}".format( + key=key_, value=value_ + ) + } + ) # remove secondary pool payload["JobInfo"].pop("SecondaryPool", None) From 608afc35465bf17d541cc58e8922e36580949787 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:19:04 +0100 Subject: [PATCH 2344/2550] move unpublished representations logic to separated method --- .../plugins/publish/integrate_thumbnail.py | 57 ++++++++++++------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f1455dc66b..809a1782e0 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -115,28 +115,9 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): ) # 2. Look for thumbnail in "not published" representations - repres = instance.data.get("representations") - thumbnail_repre = next( - ( - repre - for repre in repres or [] - if repre["name"] == "thumbnail" - ), - None - ) - if thumbnail_repre: - staging_dir = thumbnail_repre.get("stagingDir") - if not staging_dir: - staging_dir = instance.data.get("stagingDir") - - filename = thumbnail_repre.get("files") - if isinstance(filename, (list, tuple, set)): - filename = filename[0] - - if staging_dir and filename: - thumbnail_path = os.path.join(staging_dir, filename) - if os.path.exists(thumbnail_path): - return thumbnail_path + thumbnail_path = self._get_thumbnail_path_from_unpublished(instance) + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path # 3. Look for thumbnail path on instance in 'thumbnailPath' thumbnail_path = instance.data.get("thumbnailPath") @@ -223,6 +204,38 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): return None return os.path.normpath(path) + def _get_thumbnail_path_from_unpublished(self, instance): + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _integrate_thumbnails( self, filtered_instance_items, From e989db4e004a6fbc9487d74659787e02c7e2bad7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Dec 2022 15:27:39 +0100 Subject: [PATCH 2345/2550] pep8 --- .../deadline/plugins/publish/submit_publish_job.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 3e3ef03e66..5ed8c83412 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -126,14 +126,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "harmony": [r".*"], # for everything from AE "celaction": [r".*"]} - enviro_job_filter = [ + environ_job_filter = [ "OPENPYPE_METADATA_FILE", "OPENPYPE_PUBLISH_JOB", "OPENPYPE_RENDER_JOB", "OPENPYPE_LOG_NO_COLORS" ] - enviro_keys = [ + environ_keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", @@ -232,7 +232,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # Transfer the environment from the original job to this dependent # job so they use the same environment metadata_path, roothless_metadata_path = \ - self._create_metadata_path(instance) + self._create_metadata_path(instance) environment = { "AVALON_PROJECT": legacy_io.Session["AVALON_PROJECT"], @@ -244,14 +244,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "OPENPYPE_RENDER_JOB": "0" } - # add environments from self.enviro_keys - for env_key in self.enviro_keys: + # add environments from self.environ_keys + for env_key in self.environ_keys: if os.getenv(env_key): environment[env_key] = os.environ[env_key] - # pass environment keys from self.enviro_job_filter + # pass environment keys from self.environ_job_filter job_environ = job["Props"].get("Env", {}) - for env_j_key in self.enviro_job_filter: + for env_j_key in self.environ_job_filter: if job_environ.get(env_j_key): environment[env_j_key] = job_environ[env_j_key] From 4c102b51d6a50ed362ed164b76a72a67349e88f5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 6 Dec 2022 15:41:03 +0100 Subject: [PATCH 2346/2550] :memo: add documentation about testing on deadline --- website/docs/assets/deadline_job_version.png | Bin 0 -> 32810 bytes website/docs/dev_deadline.md | 38 +++++++++++++++++++ website/sidebars.js | 3 +- 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 website/docs/assets/deadline_job_version.png create mode 100644 website/docs/dev_deadline.md diff --git a/website/docs/assets/deadline_job_version.png b/website/docs/assets/deadline_job_version.png new file mode 100644 index 0000000000000000000000000000000000000000..0b78d6a35c9d1a9a26885511a8c171695285293e GIT binary patch literal 32810 zcmZ^~b9f|C`z@N8*qVuLV`AH!*iOf`CpHGLZQB#uw#|;y(dqfV-?``B=eg&Pdb+B* zYOD9&YrX4T9jUA+g^YlQ009AkEF&$h0s--<9(>Tl!GQ0mE!&lXKR&srNQpvJO%j}d zFQ6<$6ht5(YU2@KjiJHU@Q%{jE)WnXegBS6Lk=Zo5D)>~GU6g?o(314h(=!*8b3Y+ zU=0~pLH@vRBRo)FR;{l(7ociHC*t{`I*g!-?AI%p4Y;-qodf_ z!mz59)%koFxbvAYp`?3lO(S>dmkZq2*T$y%83SN!#)pp2RVtf{vjMzUM#z1S9pK@7 zX*_ksW-+aEX)x6y3-|DvIbI0c|NZ%>FaBdJb*El_J=O48#M4Jgs>N^b=2~4N1D;$e zE(W*iWea)7@N$#TAGBG@8t;MnyLU_HW9<5JV=64bajuo>a{XFC$TMlzJ^ZYjW928O zNQ$?^la*}u3s7RmrLx7v0pVj?J8JFC($lz?(?P|}DSax&iC;S{G~Su8y)T~0b)NM^ zcjvyl;K`!&vX*Mn9<`Xmi+IOn=|!XU=Z$=qTHktj@03c{hENyz>oZ$^-2KXZDqh#4 z)#R<~)23C%5cYe3bwF;MgMSW-ihE4LsGEDZ%L(Xw)?A32&)2Df-PJ$|=oaT;Fxfy} zjS_Gq6Ssv2esuG>=+_|OQ`QyGOTfAcH*-mXb*|qs+`;PUBcK0tRVlV0)B9?!=-tX) z&s91DpyGM*F|_oIy|Y@FJoJWax{=I!^(OvkEE@<4@n%YX3AvRgxQ9n|P<2}fuu-vI zYL$8n<13hrm+*hQTw*_DllK!9FidXo3+f7Et#*m=Co5!W{?-wDRcQxf@L$OWW; zvq@4l?PGs_Cv@*j<6=6!shc{kSae0eAI0>CtTdZm3T4EiO9cdnjb`s1cqq(@`yThr zYyWoac}^yKIak`7y^Ll{eeG=S+4?ysFxX9=H;OqMN;U(W?+mJtD3Y3vb z|6R<3`oZ!d=?lF5&FXbyW)}N_H0YmZ?L{4<6rbBNK39A7?4I%1iqCQ~E9WRN36;U- z{28z%i2aeXDz@~@CDar50mGE-#8%b8T{opCxM$OHL*}6;OR(6zRDxLN$njy2O%xk2 zzSO;RX_&bpwRB{nCt$<-ZORuP*28|_HozOd`)%wwg>xc2nt7}rI&*5bRmeYM)=hD8 z=;?}UYN&AuZ?t>xD=+L&;rE$;$=t#~ryvH$|v>_+mqQ zP>2noUoTIalIO97=ZdvAS&5SL!DfovQ2t%|yIAwq{7U?QTzmHy2D-V9{%)Qah008 zI@jLW(1iR1AO=li5X{UF$gSFUSNY^}Q2nY;$<<-<1aHP;?~ zUr!wFa)jFCZ@?CMZVs<`RvMBoqv&|VcN89&*3 zkUjBN+u=`#1otW*y7(6WxU=+7|p*y-?p z3v%J^F&}K5eYN#>S0&cZmXl7=50CjNHPd*8ivdu(vgsbkW=@T=VQ$C4#bo;Xd#WQS zfbKNi2eP{JN7EM3Kr6OkL^ShA!I`vOHaE&qz^;5pfG3;##{=bgfDeHn7c1u8Ue=PN zDvslRbM=nuZz_{>n$wEv>z&@8#bmF^*fX!M=)cLnr^UaVXDN9bWX{JKc1Ee5bPVeA z!|X|Bhoc5OLb-Q5d&##Pf5~ujqT=j&ka_LCt)?b*Yis+^k_NtUoi0}c$8RMqropJEjdT+8wU!( zYzJ>|>(>%y86$q55!uR;K_N>_seng#{AQKxgzJ?Zn-sFUbK7{}x#zN(<*()Ic#=EV zJ>@zO`WO8Nu7(ezPeiMBdC(%92#Xj+SO5GLxHyqtP zu%HMMIBIy!tsBBq%<;H8ct2U$UilF9E1I@j`0@0D&=skxSHi3Jw|GydBdU2J{Qht{ z4YZKok$DIBELRAfjDDh9c_G>jHN21OE)z*4a6u#mLQbJ8JAL63S^v4>GGHY|2qJ+K zBb-vovO+ZUU9|ceE(IL?3AtR~iWSS;MW2=mevrlN+ThNB+MV+iN8#hB`r5#or>gr| z!*d*hUjz+G1P8&>ms~1rB@)tQW?Z7>veCPNP*9GY4g8()@U}5*EI_wsYkHFJiXR;S zuBoh+s}SU-=*4ufHa_2Lg%#@#MY;@U^N|FAb&?+k$F=&54eDp}Vvpw3(wrUQpnw zbK@PY0E;Ydx0Co%Y1$@Ws==if81NEs*_&Zi?;0}nCI1g&x%6r+XP*w@%Scmp-c5M? z_BPv#I2$JTbx;U*@3>O$d9yVi5yLo9=b7>O@*_jjN7YyEJ@Sy)*6&tkdh_9f9Zy~F zd`HAltX_ACS_b#s5-ITD9{NcVu$dwl^lHuJUBAN;Zy)nXk?$PyrSKjG>|`@_YS+#cmV4dSzUTxvSm7{OoaJiEBiz*pGSM*f7F!`)OWbG*BFyhee3d-ManRDoXL zucR|BpF?IB)rcn3F~DHOJVbH&cSRfrMZ+{8@D?8vKJ4pXP<(v3DUI$Asb9;eV&$4S zoJaID*V;vsDCgNNS)%}z2}fx%j`3k`x0{ks;Jr`)Mq|2v2z3*G zWLlWp^7Xh~yq5=Qy-<`InGu zBa+X)EBJv&ZF^Gpc)8bnINQ4u{l}l?DfnC$p|=?o>ZoA|w93GH=$ZS(yyq5mSm@af zX{!4Kj0UY|1zZO9N0rulEfRnvcxH0n8L+)9M`v@g?{IZLJG@@p=es|>7;g1!87}QO zG`w-W+r)KV2N?8E;b$+bYV+&hfA^Wt%=tsP_^P@9lB;rQD7@4n>k7m7Qw3Djgh3!U+3ZsGxLQ}X3#dW(Poch3X z6MCgR^}kb{uL2LhlH+m&WDirvUHew~#i}>L8SMmWwu;rq?Gkh9>&Exn9)t(e1<+b9 za=^3Jns(?L^x)m$k%O$_1Rd<}cb`ukr&_pFjnlPR>9D=LHjvdX@cG}5S^V!! z(E#z^t}Y^t4VoUO0pkf=aInQ&(&BYtHbh+4gU|WK6WrqEJKJRYjU0D}N3;ytzh_Qj zv0s|<50hul;Qz1k{s*O`LjHq& z|IgR_X91gEx{k-^(B8LcHo2W4E~Z}5CSJCZbDzOY?M>+MnRy|7HM#w5`L=L|!+Pp0 zpUp)~6wgEVZw$mBlop478r$60A>Lw{tl9|mOlnSHO#G)!-y&eq{xxWr98&mSn@Qv0 z{)?TMQYo2M@BaW=6!h;L@PE$zEdS?x?|(zXKcEP?8=8B#B@PRC>kZgFSW@g6z^rHW z?~i_JmTJ0xo__8Ac)J3~p@HI*0zh-%li6sGEkA7HfLB&Q&@nkC`5ko@HMo<4=^NUF zFAxshKq>>i213-Ow>ra+6$k%Y{Ca9Q>d8b+1KM7BX*sQgH(-1~e|dG6wMoc3;4hUz zGz%sO4;>)dcJr|2u-E;5);*rZf;P-wpF!v9u0c9<4(>|^jfoC{CyaW^ff{ror)8*7 zV#F74?{klk(pbi{7a||>sP?`2uS*i5i_Tl~NdJL}03abmoA#3FHpg3phgTlSvb^K( z`B8=F^x|@mGD12ltVW|adux)4;`q?7LI;r_P(`|!>?i*j%BgL*`i#iy@U$9QvJrwm zd0tmgYC3qjmuxg;Tw4vk=EL^vz(A~hO-En46GUaP&lUOW)pI4UnKbP~zrunN08g^l z{g{;3srq0P{tW{u>|9fJ+p?GxL=mDjO=<{F{B_9yC7T)lH%%e6TI|MJqQKGCt2B7OSlXbp*)5F~pzU&EAIUQS=ZBq&Su{Ph1g0^g}wC*+BEX&VujiHaqg$b7^14D11;v z*h>p6RLy+Q)0`F3*2|>*dTWL>Z=l>a4|;-^^uy2OJ<;YAVsmUQUoHi#D4jGCOV!6n4WgPvV)5#Bi1SSqEK1 zeL`3?k+DnWrOO}LVo!H%HqvRy_9DL~@R$?J4x3zbFiqi0d5>NxG4QPHyfuNt_W+px zzK%&L=&84gY^7d-UiwYLXaF;*N$fdm*{VsM5oom*`@p)FU~Hwqx@vKC`=D5EkFYMO z?y>JM5;Y7gaI1$f4SaC(;L*!w8gla2$ZXGo(1#Ht(pUx_c3ic?n+|F%MTyH zFZ3*ptr##Rp8|{X>=CHxg8_hZEl#3V1qsXMiC}Z#IjY$?>+jr!c<*?aQ-KrlRE8cZ z@>{h?i9O%iyFl=yUT6G~U~0x`weCaMhUbEP0VGNtXxu79UbC%v_h z33IEw6E~cA-b$7;Zs6uc0hJ%XV9*-CYo&8B+<7Lr{f_IM4keK%xT1OEwol{*9sY|m zk@u-B-N}q{!MLHUZ^lbHB4*95q3X-2MujnYqV4?j)#6;zQQXdc#UA#q%5KaH(6kn!h`7)&X>6=vx?aaM4OH@d z%{XxEJZ;0dhzgm*!Se4qWBD5AT6^r?`LHWp)n!0rL5O5i;(G4=3*a^9Jk!-)>0yVh zh}W^)Ovu>nPq=wo?__sk_jKe!^H3=S1_c&*Zhin=m7JAYhy?y+>SS84y}V0WKM3-4 z_~J^V)W3L?Ae{BCnA+kGhU|92wh{$T#)W!dBdtrtlx5%LgPwf5AnKg+nn;~I8pf{v zgAwxS|8KSmf&T_NioD{TiN{v6cqw2*QQ{bOwQA8S48!TRguCpQ<(jLIz z*Hcq8_{KBY8sIffOnn!!_DQ15C4hlF6jYTS#Y))TZsSNDRqm$-E5~wgXGhnyO#Ueo z$*zt6br0$@$7zP0XnPGT>uzexQgoGDdzV42nKAyGrD@VuXf6KI;~U42Rx)j?2s-hH4jADXZ%=mqI^f}5_05)9Umph-79<#@zlMRy#WO`Dm%iWOZ`CJ z%N^P`rBxR-5VdTZqhTr0r{R-Rd%>8*#|Niox%-VP>n&qTp>ql|Vt)D0I;afBE?di+ z!yMs}E=yI_dx;qDi{^0yA*USqL~cX~O4rTYZvr#84zW?MtX5heZ$jsT%JyeCB6&4j z_~A||6w@iD=Qc))p@%e~m(5~Uhh5fJ_dW%!#~{xM>{()9+7*cdw7cS3>`5f$%yiJy z$ys%dE1V*_=j{X+a8`~vRlt}82TFheR$XLhSWF7ee))${jqC(2K1;t%7mN6a3sU=* z8#c&*p1{m=TWTnNX{5xIloCcH#Vu#uLGtU3_=z${k!PoO$j zBS}n8=+sr#j=PFz_fG@E;pL4lHh|XGW@tvqU>n+4OBeV|M2~2PKv|+nzm*p?dp}%w zWr2^F{Yv3esbSNJT-AXvDCQy>`M0O7$fq#i0 zdTTg)GdwmSkRgl}V)o~IfnyC;GpmMh6^2ST(bOFEMe0{h4$L`w6bx?IvjcYuvF*S6$=MQ?lvC(%d{=(DMhn?9&RafH9si(w;j z`OvzHCnHqzutHB?^vMrtRP$L?!SH*Rpya!Y$$eH)?|9jIB#ZNdg*{Z8|ETvna5UA? zCLhZgpp8Ri!8E|v$a{ukWi@fSr^{$%(OQB4V)-ukFSIKV5L07|KGItz+%g~%&G{j& zEyaVS@p8^Fby^?HZ7I8JirPG>HWcKt6;@oa@ni!Gr-a=rNtrV{a?l6Hmyug}|fQ zLCj@>F2#xD4OU4_&|Z=>O!v=FY|C~3QI8}2fL(*05l{0!jiI6MOfT$qYU&*>-Qacl z{GOcf07s}$mIrt8{!585&4xYZr!62nVTI_oG*o9yY69?lnd~XMH@|*nkowyy zD;MR?a_)5CAX3Wu()wV^gQsyqh7zmZ`Dqa&p8zer(8~4Qc74NZy`UK6Vwysrpv?q3 z{Ey*v{}jx}l`%5s!dp^n65Byx&@D-h{2upXtXf#kr>U>U@wYq6I6(IImd&%;{u|?Y z8eL>My}MB@k=Cf|D4N4K}AoM_4W_H36c@q(&#KuI*&Ni*tMVHw^;& z_#RyA4|YqQ*OaayVmfVu_C8_BgdKV?h?$pjZa|m&hX?6hNI$ihk0KVabkqOLH@5EE z+ye+pA=j9_eiN{sIr-r^@?2nCI$w2nA)%0>qh*1@;^zLM=0$KGNd9lA19q%lo>BX{ z{da_bH#WuFyOAq6Lc0>LYvgN+D5xa-H`A@$B`gJKs3buoTAo{fL!ZuL@L3%Uh^p}1 zu;RKNvv?@}!|5TlKTG1&MF7Ctg#>8)Ke|zf!8hL!ZL4U3V^JSebZ~zZv0$W9fR=-M z{Jb-)lS-{gvfpl|PF$A{ezbgHg+MNAfHJX4P7nA^H2%qJdSMbs?;Ry_yRzk{4 z1wv@3!F(P){49fo!(!g{EC@ z#$AGLTx@HG#sv;fGs3;O2nT$|7^-U=c6ZE4@6TA8RDZ&%_DO{19Ht`3X{eA>$1z4X zIAqFDfd)h$R_G<|*!+@ZYyPQ8Z?ST_ z;yyxT1-OGL)>UoP;nZkR@zGi*YaNc~9|2?wgh%SSjg-58wRo_KnrK$eBAN-w>-p#l zNv)Bqnrf6^zZJ1b&e3V3&n#=~1EUcJu5>Zk4?#kDl#Yg3X{n?KXcW5EbTg2)hoGagSP0iu4eRb)ZY&> zz~+R}6Ez~SYJ$NOr&Mb`2Wuvj?z}6I^2TTVIJ&d1>mAe5D(xtO&BL-^a@%xSHWUMJ zxENauh__u@4Vz2N$Q|c8*QpS@I9Vq{q1HRO2z!F5-ocDrv{@O=duSVxEzfkNPZ&4g zahK42v3=fsFZfDyh?PYv2HGWj<;Oq?Yfc{-xPE9l}7_QD%lY8t8tpY=Df*^SuTxQahh%#fy>rcE9yyl=G&wi|Lfhw9hew#o4eHgo-qNo%f=SnoRue z-uB&!)sub=R`kCmmZ%n63HEJzfI883?T-eJV_Syx&DRds;9@R+%p)`>3wHNQ#O-i03l!=J{6>%MU>>`?c+zi1y_s7St@Ko&vY zXA;!8K@|%>manBV;R(XVjAK|`l5(encFP-rSbH~Bee>j!eP%ssd|!QJ<(~^wXhrZ) zv3%WBYq^XXL}dfG7vwO`^m^@Dq&oG^2iYl)W2`jLyd(cF(osVP@BH=i>Sxai z(tG}nO{BaC1JsHp;h~qKsd%Iw50s(!&8D##Na2dtvY4RHOK84=gNIV(wqK|E?i9S+ z87Fyhd}xGh%;Ijw))YP)ANOpf4H~URS>d!>U}KEdP1AGp9|E*Ac(>eCS&RI*OlY}i zMGgp^{v>}(dcX>9+6Z^L)#uu~whs@pvI+^gYNIVcrs~NJEOm(iu@9Db7Rw~Dvm7IKYZn7~*_xTURPjIArEf zi$Jvf!Niv7dauMmc_SAkl|8Z6RHune+URn*p?-ekb(4N< zteqTC5}kBW&5SJXoA0Fe7Hm^PBUp|Lv)FoJKUClk+p=$gKQlm+>JtT#$Av+Q)JR8v z0%IdwkRutApK+3E=%gaBffRiCjg7lE26`IE()<1LLT*Li4;XI5PY_9uoB_7djd~Wm z=q=#a-bS~r0J$0sAbPU6u+qNj>u``ghD!zdZ$FW;g9pvGe;CaLGc)c95vw3PqsKTQ zdqpc&6G?_tOZb`X`(&_%3O`{A{=wbSV%V_*fAQZ8s$KDAZK==B0-0}ZVCC} zmm#kCFBfZtB(d#`R+^x`p6XC1P&xP~^7C0O->Px+#6t{sv#{WtdT zTjttn&OZrM8L5ti+^H!KFV~SCkI}gA`fs$q8=Nf(X>n3)tymz2J$M9rr%k3qQ}Jka z+}bNt`kmp{YvC0lxZedx^D^Kb`2-d~|H#%KTwi0X=s0sHg3m(5T|(M%0SiWmY!C%U zEtzois-_iIpD3;?+_mJ|?pk;;mchh+9_5!g1UM}znr+%%Z_?5SnB-Gh*M>idZ>h(C z>Jbe|WdZ96EPF3XXdye%|2IBQHXhPYT-IDfnWGNnQ|C z>}KlrgzaO{-;Qk%Y*W<+SKM(=*5zkN_>B#mU^`Jz3SCKImC-!M2oX67LS*`zD;Dl} zm}-%RL;6*czuRuA`@$FK@uVfu{l0K( zcvMt$s7$dWA@6@8hTlndxM^G8QM!dVV7oHn!p9Irm8R$DDWq`*cj}%!&ugDskXYl8 zZxaN2$7jd?EFL;;;}1(Dc3#X6Hb5F?S0nkv6Kmc!FY`F{@|XrCI3W@RykePYH4kE6 z4JK9UIz3`fbdexIEE)JzBFN;v+}92fJko!295apINkXcpU+zZ-1i$Y5Bz$7JrYY`Z zTdIJqxNxX;P9eRAI#n*gQ|&N#;+l}~(?*N7O4nC~4q9pHE%*|WQe&jci6BIu9t-Pl zybG0^T??fx`}ty;sN)B-(W$v)nz?*oCw9$I!sb`9|JE?mQddXtw7M3_5PhB%MF>mm zpkcskjfkKuhnGg8MH8#fBc1QK;>+I!vgU7gg+E0-E%fQdy#+2ORW8e>tu9u^oBC57 zjr@HbYTo55Xf0*mVcxA`bY%!`rj$O`PT{mmLjXRshclgf5k2+(8M}LLnfZI`GcB=( zdrPdT0loBg+R;s%9gkE_(AK;lSiEg#(Piw+JcJH_WPvM~DwX(TWk#e#epvF`Mnrf# zCJ54R?sk!*M9U93tKE+k{@a=H6WOJK0F2TUo#=P;(6#Kog{|GmZIaV$Zz99tR*m8p z6O=0d9a9&N^->9Gr!bc2C)KCB-G0FG2sT9CExSt=Df$9GS&sIE_{*Udfki^krRVXeW<|gOIM>Fq;zT{4ggxl){Fht`-azP^$QQF{K|Yb=k_a%MDskXnBLV9W3ib zI9Gvn&)s`wCV6Z-OYmvTXG0J96nNGB9mi)wP4Cv4Z~j6m$o7JKY5TbjKPIdZUP`5t z@ma9>%E=`gHCT7`1%|2L!eTwb>-NTRp`_mNuJds`*I%6R#iWvSFc+e6aoSgqa}#MM zzR_-0TkO^b@h;8c{pKYm^*gNgjJC9-=9}0uJ4jcno+R2GWp81|#pxE?G`+|}VCBcm z76?nyVYP%0Qv^rY-K`_4bt^4ebIqf)+62L$_H5QCRkgb3_*i!-*Qs>xZpmeM{m2%8 z1MtM2$PVAdcj#YlH5{l6q-?P{n3^CJ!MSwJ^#`Il7I<JNA78l7*I6_?hxX}& zmTMWq zLxzRNELL8=+41j5twR%jSsX}V*bz&<#>2C!TQSK7-lZ#N&`TS!MPAjr4+HlUpP_R< z(6(Ya$^EDl5JmA^^d}cQ-)Yg!f)c7C0E0wQi;#$w4lZDCMRD570Jfp+0~D#B@x(c5 zz*Dd@H$%bXHXIukiO<-!$Fh|!B+-}ow9LQ)=1}cPI;b-Nt%Rs}nVpc6QkIvez4(9L zkiOU+On~C2UeTORf}S2U%e+)%ZEw2q{q#x_B*WRgbnT&UPcG#<)hu#>kwHWcG(a@&T$sh$QH+LK-Y!D%S^q@M{NGS2KP56?A!f-1$M1GV| zx?rOTa8(E8$YS8OJ22E&ptN4ay%am}`sfZUmjTEgc*t`sHY=4vOX>pgV#Oxk8%(SsxA&b$l-qzZrl*IfC_Y?Uk*Y1j z0KJ~UDJ(;y1pAi;NHYYy>8;NEpsKI-$4xAPM_!csv$_R1lDRuOs5lk(`Tl!v6NyA? zF>X@*@DpkynN~yB5uvvt>q6C<{`vTsul;A^d97Y517H4Mjr5CdrZk4--Ji>y$q3h) z-(c_MDCRC&AM*7C)ru*9*J&rs#Eaqg3}^^p2_LEW8UAS!uTW`S@UO69s%0CLbr`5` zrg|fom%98d*!!|P2xGgs{sY-Y!wA?A?RvgUf4}ufEd4zw@!(kbaImn2dgqM%J1_oo zMQLRKZl*SEL9_CK6mMWgtHd>sLmg<|!gt`z`5jy8WWwQ?%s~6jF`8}vE-=7{y;9he z1yKUsDojCP`o-7qxH!$^zWnFt+$+QZP-3>f;EPAHmvZM{o|*{%$93xv#OPQSo*%Gu z!8CHuhYQEe0DG>+9?A)VnuFdty7{ZOc3RFrUbGK*mY*t z3xsrKy_X);`Jz}F);-88+2j4&^{^C3k`wlqTACR1RQaxOpYp|Yny#+;>cO@;7KP>_xU4G-#eT#rh`gkW!Z*0oRYypVUuhj@{qj4+ccd%`Mt?^n%EAWX8}8OX1|8U{P=4o zpBfmq#{gIb*yAMP|6?9_gXTkiNJA_Y{nHvK#kD#_qjlS)2S5miXVLI3{%sOAr*$x` z8?-ck#Y_{?FV#HuXV2BvmXE)wiSPvRl5O&rDZ$*)Up#FJ#SBtZvQf3-Pz5b3dX#!& zJ93@dqnwbo$Vh*0$U=ODv2R0S!&U@%?~%&wy)pZRK4i3I8%4p}oMt@oekjH>{$gy- zl|HNnz8EG=^fPX%^Cog<^7C3rSVzc2b^v*Bu<_Hj$K;5sG|jiIDXB>Oad@1kK<)&) z-Y$vvD_iILSN#*)Wp7E3&B`&A*NNT2C7^Y%X-;YPg`y+*pa!yHQ>S{75W39d- z-t3K$B@$xAdSgaB-M17i(O!BSFEMp0tI>K*zb8(wT^Y9fCcJxvNGD(Xrd;f7Xa$KU z#o866AJ)x6vzMRat+@8SU@3~AGYjIfu6})N;~7|iT!8BVVvyW1Q=|kja2KUHs6W8j zNp|av)4HX=U=|%=x1gcCPM*_AL(rb35|6E#zUFOQo_Lx$k6-!dU zQA4Gl`i`E(c&G3{tpoa22T78uC@+G1%%IbkT(cs)sJc?)`F_Hd_0iN9MbbP~HXvmk zhS@(lNqJG$xY9<3i66T=AacWCLF=@ofs~n(qR$QH=oWNsl@wj!YAuNSQ z5K(3LmcY8ZnXSTL+Hvmt6+xsH_+5x8}%q;!0g_%L%T=O}r z*a7(-1QNd}*O%FR%;!Ue1OwDdCsfaypAUwTcuDu|z#7nM5JazykjBe}xd;R8JePUL zo%`$NF!$%ByLiFB?iL*XpFEpr8ZX{qE5q)KGD`#>y@+(+EFgrQR~4k8vx+g8A&H8R z$!sSU(layuXB7A6Ixj`a_z35wkx;9D)C|_zg*usHfx@Ir$p%#cLeQFm4uv>st{^&0 z9ttTTO=f1i22qUZ!=7nZIkJezAW|G)@N)szq~D7eTeLmml5qwRwJ)EsvBuPM678H! zJQ{Mn(s3!^VNus1RRC=1!EXXI9lTd>7p2?xKI+Q%lc2lY7&c(_e=eEJozPm1ja*U; zjP*dCJY!%^Mc8-sCZO&Px2`HSdjSKk3^S-8Y7P8xzZ2(7dvdOm{IXv=SQ=yw#n3 zzh6F?#0(!9^Dwe8zz3L{k%)yNxI#CXgj4=Sxe(Sg;P7UkCb(d#@u52{X;PN6;5p61 zE_(v@=i*7Nam)847pGllbIk8Z#3>oJDjlKEhV8l~Ua$Ww^v~AjNPY6(NS;9VyCURO zi^OfP+aiUZMTMP}OJRkl`e=2ulpduITydWP`qcCpJ|w4s_#(2F%k+Q^^{uQT{?@@d z@$#vZrrcN>M9X{o9u0=MqY>Ql8b|YGok|hX=fqUlho9`qYb(#b?MoEkrmf{XEI1@7 zcxdRdk=4cA*an2$n2Ce&9;Z3kTe;qqGSPD8hXe&h?-Dl71$~GV(Uqu?6+)E|fjm>k zW<&a_^7fw)eYlmx@A}aULlMB;(A*#_MM+hSRUcNsq$7&&LeP{wS^bu(3nUF<3T?pi=AkQD3h7|_;!25mx0Mg zNQLi0lYGWcUEF7u`-pR!i@}LJWg->UXS-a3%m*0nPcF52G@o5#p#64=?U6>ngM1K~ z^6ahDcZz4WdHCA&6TG;P9jZLda6^KYV?;`7%cv6 z>|bsLhQKRr(P|p7EqI8NKQwG2z4oLSU-|aBv*lp*J6NkjGF(GIXWR#I~S^M><8}GfL#+U92`95Un9&EYb>g7mpUb_5pIT*dBrE;5S!7P zs5{u{+2Uu_2160wlTAS#&jnvoz@8TdV^w_^cQx*YUkJ@BHO9Kwvcw2E(F@<8({bFg z8;k3eBKF1DSN66WLnoY zk@P?V3*DXE+DTCU*)o6pxsAZ;r04c0mrbwWW6PAI)ce7OM8{du%L=fm8fSebrjXHl zKl^LH(r*&9$&t$MX{r^eD_{?379gp9JfvikM z|BFs9Wi+#BF19}DqBBfnLo#C*{Bj~{ywc(o)GprZxKfX397l(>@ZoS;%Vjz0A9~_< z&Oh7P)> z{kv_h2TH0}<#D=&kxz&VFKg%g_CI$d{jiy=ow>_U{u^`_1^*}LG=Pu% z<0Odr$w;BNR*-vdC91YqyRzjkg&iz!jV=E{p~Z+hqSS%JtjlzkQ&nzVueEyqa4p39 zCuSG_&7&h$oRKqT2QFv?(U0B#|Gzk#D*e~%cmen%3yBJ`kKDf zatBzGI3d{Q8~qw4(C9SQ(a8xY?x)2|Lt2frT2DFcDum4I7LxA9g~>%#l6j`0`k?K_NW;ug!J zP98#9Uh<}#JH!$qcR1U^<}JZYl(1*JxgC%1IG6jvBXtUdB{sI9)s|HowH_S=1&_px zWU@#5~r`^m6=jP(i_TSa8_(hi1)t^(S+SX3s&autgpW-szcwP4zeuW zMyGzww&u%o;@RSyGLYSB3+*Z{Mtp3W^O5G}e(4zA9Wds27cTt%%=f!nE7b|c-H(6r zM_#25*Mnym${)=plS6{l!w}zqCJhs9SZ4oFiG20i9%fAAmXLa{l5mDnQ?Aje4}2>L)5O4w&4E`^GqVz z7HXd86@7VJD`FWRq18APyPs%qN7RX3(G~bp0oP`2xFQFt)IIj`{JE3G zu#Vq2U5#krJy?pn@C9N7OU%6s2j~JRL={(Tcedzetvm@tdhHnBw;0VgK+}EBZ3~;C zpWGdo^mG&a2}C1IKYlH(u&m{4`CSBUoS=L*@DpNsRSNlLhaG=0uQrir76ZdT#E#ME zPvj~V9+eWm71mXIjWiEfp+h7 zh{Df{JMSbLD@hdODy|x%7{07rEL|N76Ga)?!~JyZewme8q363_zco`vROqN#UoVEj zHO7$oNVhrh7dc^SUF~p#Ugv3-`%7-c$7w2u>7pJtMtrfWJmM7R4>6Qn#F}5K@;flJ z+V1*^?VFULel!y6LJE6N#}#DCfhWDIK#0F6yt7en|{$}hqkzBWs}hMl5~`d02mnebOc|3(M1Y~AjVat^plm6qywlRF;z z9gt#KPkJ@i9l>B)Z}vwfWX^1+Zy118oI2&(ps^Jyvlr#-qtXY#DtHLjt@DM+o@0$# z+U`?P$$-s0?AsAWQ?Srr`V(F^5sm1Yui(!+0s`}ar$v42cHOU_kSwP4u}2l^PS@i* zCv4_JiJ;)0TRiV%Lns-K60uUTY&C`v#}afqo-jnT1PEnU^JzL?pn}RWt*7BSjRvra zE`gZ;iRLO)?|FvA3bONeEVaH?JsOEPG}37JovkUA;^93}6yrQYhI} z;*MR>TG!0D9_E8;5U&Q7)p&urgJ*u>v0Af@VrO&50-09d44f{Ry=XPlqcO_ zF`s6yUlnD;&UHc&eAhzc5E!k%L!D0*$C`w9os0}Tk651lr3Gmk4+99&ACL4b#i;JW zWYm;qqcVM-6yx4MhiG$y%NT5>p}P1NWl=A-rT$L7a%HS(b$K?cJa8du{J>3A{8_{H z&Cc!50mED(RU2awlcsl+i(A5g+bIrnHSr#Mh$~5tU(y$gO-x$qSfF52?Wc4uE~CD* zuUh;UWQ2SVvTq(FC5%6o@Rf{_|J0!yl5J{ z@BJn$T`tSn?JQR;F6mdhiTF0Wc3HfhQEVq{`YK|O%u194tNv=7*FY`j5+0A1hcQND z)S=QBr#k}{61Gcg$1=N|vlVx_vx7!7l+Eg}wTTp1SzwNg#V$aAH%KDu=id_Nbk?zD0GeB;(zdv_HKYc#nW0bq>uX;ixcUl$9 z6WaQ3(wH!MEu%34y$U#ZvSB|>$DgNaY~h%%5KqHh4N8!3D>|ntHF%vrwAo1IT-(?? z{)u$=mcJvpR>Uvy;CL237`fxk! zTEVcrM?V>b?%}&gG3cr}vFyrLCO|4{!b3pOf8$rXl?hA-^cB{4MfSH0xJr^IpWmmt zEnEDjf=fgXzt6lZ43O(Vw(TDg)qQLO2|+HjCoVQ8E{YWd7&?1RfRafideo0X7pBqx zV9$?Vsi;XcmLAwV%milrVYW+5CKO$=ag7Yla7|i89L4{vkP%$f_pB9FLPbX|vZD5i z`;({pJ|(s(ViU!=WGe2`t7bb}LUePVaLHb*Bw2L%Ub?bBTF-q~MphqUM&Q{+4f%;g zqVW{W&6kwzSfW0FO8&vo;%zFQ0q}qjjFj7-6AO^KQ**y}E0xdw98?OWfyDTcn@ zs)a&7h4`qd3CJ<+nt&3ScF>(|9zE2d=Y;yM^4Clg@0z~hop+#>j@cidu0PYD-8>_7 zW)_;n95cs#qjAT*uVZ;__{X%*KY!ds1B^`ApF?e^{o%XTA{dV$J8q&Y(}sUCiQupg zn;M3jiZfB*7XRPUi^%jRljqm{4hy^sNn1@zzWCi0-apG5I}fo%%gtAgAu=-F%Tk9; zrxlENsaC802iM!Zgm>X+eI&A#_cgvWDS7O>Bts>VB$bb8O^7buO48zCsJkjmW(j89 zv$@NtSYU%2fOv%Lv#EF_JN^~_E4bas&Lp1ZM(a%Ux3Jo+k+e?*@hY5Mc+)$Zpz5ke_umqZNi?K8%UQ3AoWRkVfs3W)8mx8C2U5|4+NF`|t0 zmnuJKp3sSovvt@+gAt?*LJSojI<-ETj)Xsn@e%zH(_uBd#8xSe3sn-fqL^8<}s0vdN0J8_mRq=&lf zi5*e2{^S_d*g}8pgcg-{9~LL0Dep)>iV-$_?gQSRd@M*@d4Y<9M#g;a5@&ndm#Rqx z?`gdBpcfoFJz;@2riDnC8)!p>cOv-xsEj)~15J?ZOk)e%w4`24*29`^qDi;VU$<=7$1DX;em;ldOc zXl_R{=pOru&rR2A_$jNGINcvicY*x}uvYq-r2A9zibc}SqMVV3`vqRqh>~rkw)z|4 zu?m+~3~X;~)uaS21(UuX#7>p&jn(Osu)^R)ZbFce?LG9~d@#Snbe<&c^NBULoQjkxd zz!iEh#%H#GJwzK$D)kcDIWIOWp9&cKqKJuC0XLEoL-W8eKd+o5f=Yq}W<ZnXPYKnhKWEk}_3?LUnc$m{`Ll8^{4GkZRrA2dg1YJiwt%Rt+VEf*B?E&H!yE`# z7uf?H@fbYYrV!1O<<+tZ@;5erwda4=Z#xovpOJ_S(<}Pgj2o(_a2YlApM*#Ek}ynB zMjBHj?gZ5fHTU}FKw_kl4s2he&YBEtYl!|Q{+v?o2xfhfby0HB`Pqs})CylEz5#Xd z@F`0MtG>C*(XXd6u;w}kUQ41GxP%}*a6R$bErnfy^&wQ?{gE%z?QYpDE@WBkJpAJO zxI*5oMzzVWl2In#4WBm4aU~%%*5r#_dqpJ*UUnvJ6`?+>ns_EievrOSHNFkDfqKMR z(btCNL<-+CRlc_pFIvm}PuvTm`U*KsHq;D?w#&1+UwsAfE{@72#*%*^ZrCTU#rXU0 zSiW3rJFRtgA&;|_9|$o?%%;Vf+QrDBeSm5rphd|aWS6OnoDN?IXo8_M!l;oAaMctV zk-#=dAf8m6X}mn)Y+pU2)@A+tsQ5GC+p zL|H+`&Z*0LM-ctKx;aLKzye~<^!K}ySi0E>Hv{8&sdGQs3$eYB^N!TnU)9>>Y*8@3 zROb#^E^>D9%zHG6r?K)Vo}uPAA7(w+ttUp-f_L3w^9f??@M2P;^grd-d z(CbM7V*TpRpsb9s-PftzXayfA^DIw_3GQPRD!xhDV$I*(S0lu-(Pj=IxTxdj?3Qc` zqH~x5#}4u^F<;Hq04s-;%U@N289om1-G|}|M@v|K7V8mq5x5VyzIbLzG_%P`13c*waoU6!DBE zd7Vk{uhI&kX|#}}@ck*A*JJW5{%|rVXF&6Cuk#bBEI}=6z|#+lsp@8=E7*0fels;?A>QPX{#$)5sGL zBL$(wfvwv%nZ9>iI4ourh$cgNVU8)LTXoU0d*r0SZ%%8ewYzhdO8G-2yr4|ys;dG= zW+snxt%kJtaQ*~wmPqb*u9(4T#^>^kS$KO$N$2!n@m*TYJ@ZOy62(pZuq|V01CtpI zf+@U|BnoX76C*5@cH`6Nqf{pdtWr)!w>>saHWuetlF>zd%lfSBh5$k`^IaouD#G=T zoP5IYRHW%?jG5a~v$O^BA*cc`Xy-94@GQkXP~ARtg#J6tUVf2oHwF#nwxT<(%)sP( zIo`OuJrg3+yIGg-`Mw)F9utMA-WPY%&5kRj#oPUH{J+A9eInux&WDKoE9wldoKAF+ zX@NwYh+YK71^2NrR2^~Y-LH~21!^YgCX*jnKolh~;EV)Bqngf7ln5CcV{bZ4>@Z+3R99AFfheqLz zi})H=_f{lva1Ml)ruQ`06hpI(*C!4*3ElWrBY{;dKu!MTL{k>rQFoW^sfoP?EU`;& z7p`L+4h@0m#07op*#a#Mo4Z|%d?NbM`7YPgQN9QoKf)dC@kJ8})pogORbACbp2t62 z3eDD%i>BCxt^-4D>cY$|{ya)TgEbsa3UMAtlYI9Qnc{fsoZ)8Xq0!N_gv|9n()MOa zW+f1jVY7+(QQj`8FZ|S#1%Adtfp%Zq(sWT_3&oUic+{<9Q^0y>Qm?;&`<~jvkbfJV zhVL7(;{aB|w!rTF(%lJ3rq>~*E_YPF1^JlPtZHxVc>0l~y*uvSzi*V{ky&ZvtZ-y- zKPiu0CMbGN{Ne$GmQ9U9V$;fIW?|B58it{4xtnodc`P-ufx_Y9mh3qZO2u-yQ5}7F zO9;w(!KJMieC352ZMt2P`Me@PH+VVC%l|2i1MSSIAbUQnqf)8Q$cdzCNd82S2$2wY z|7VY9Ub@HK%&|E4S|j8-@9HN+$(ZPArYxD>6o)*Y55KxM3w6Nxc?)L3edR9~?iBj@ zJF$ax?cM>+W^z>9df8=Fj}!(S;v+ZYVv{`*F9estzb9C@MS$!4<$8SMA}sV&EBoow z1kYaeQs=W;rdi0QCVK|nY$;;wDFPw}j5APs>_ixhE|JDP>s_BxGI~>PF=e%XV z5(3f`q=ZxrYxiC`o)uYKU$&}D|J~;DNFwAscfpEtJ!jB zNKGi)$%pnM$zAA_DLdU?)pC`%_ir_AE|WIAb&#(0y=FMoGBxby&j>>!p{ArzScIUI zpWPqjz{2EQLheyy9jpajlhKeVKTiBCoD0dfRX@C%5`fIXP>Z^-@dPfSb#ftOObkh; zc+g~DyTu#tr%eyi{fSp_X8ba=wCLsODHwl##o?7$udZ7N z|k%y9bb8j_|cw@xB<#DaI^M z|1cb!=`7lCTbuGJrv)wjt-q6&1LdsPKjq4dY$g2_2 z@v-Vdse>PB4be` z+mklJ@K%?QWQ+;{-pLowad#ffh2BFO58Rh&vtI|iFEKGYHpTtwctZhkE5f$Ps>0Mq zZwH#}{L0v{i`>dp=bmS7q69b+QeQNlDN&opxgLJGb(^N)x@!A-wo85aNVzWiO=`?x zBKEl{ICbwuaauvDt5ZL_vd0lo{YI^Gt$Q|U->J32nI zZ0R+*!2GeBd&(^1Wh$4qj0(LDKUI?ul`Kv;NU7mKh3CeDAge{#DYJEWJ z4@8rK2&HaE2-Nx#cvA57RK5=rz$LtcKEgEeJNNpXHtwzX;Fj?!k(BsFcKlh5u7Vkt zp)x?ik1pAMNh2yxau{R(le}5g zxN=!Nw|Xl=bX(0M2G1vd zF(IJn@uev)Z`eFPnla-*Ov-91iS;E`m#^;Woe}3d;lmzD)C!jHGlRj+|KRmw|Fb=l zF*dIbbb(y$GQ%`S?}x;(eWmb0R!Jl44Yx64$foMQ5@g*j>ZM^)p}3-$Kg;g1kgcO- z>V&oFqzIbwcH`n(h%ZBD#OCQ{xPZ@((B)0dh?$>anjId96Uc5~AKpaA=q3i?;6ym* zTont4v(o}i{2y61xLh9|9ij(nIrKSn^3o7QQuVFPf*de<{7@!2cRwO#_teJ4QAu=J zT?9PpSGO7aT{AjbrMzDjk!H=VC=$pZBx4bbN~uPxeIVwr{ItOSO6WL3H9~`cIwbJg zFzd+OSlR4A2qz-c@z$nqnvP3g|~ap%qVE!0o$ ze@aaV{@QzPFoOYOcgL|C7*Imk(gX@nTm6g8q?w1qS)PRe% z=ZEJL7RH$qYR=8Z%=6h#3Vn8G?^`#-2Tt@f0$~Dv@4+Q5MK8q4kF*DOg#DKHX)RaX z&PH?i2(u9%~yuVR|vd~bokRqM(I}yJHyk>8-~<{y+^W~cK(~mXKP7-4fHXy zcF(u6c7TdFL5lP0o;%jr?wW}n^?WBTd5c`=ksqNO^%0C5k6ne_gmyMAjUs3n#aS;C zoBvH+$f2R%siY7FHxP*8PSe6PoE;VxF%C9JQdy@_OTL%5(M+=I#;R2jmiRV{4;{0Y zue7;}uieS3(w&HD^=&aj1W^>XNVY%K-+{EBn@Vj0D8*!Ai04~Z9L0M5?pwWT?Rm8Q zUb#C!=DRC1s~3|X#S$dxzmttgg*G&D6b4dkR>^$no5wUj&$ za|^;Gdzc+|2w31mM{a`MgGRrzgi7JHFUpIv?H*5aetjAi( z2}^2D(9c7&IRt0G&3Ab)z*M|-6bywsAKLR`;7b&^`CtFOU_?J$arEd%=N%b-zRX^eF(8b{3Qr zhSPG_lp;_z6yV;=kH>SI7qpCEe28jY$9^J@@#@rPOx1gWEw^)J_%enadJq~mzcY|H zTkC_RgZUsPw*Gy+;Y@jK0=4v6UL}g_(Sq`pyRAPHE&OvpVU zAGzqZ9I8BQ>9m~)r~-pz&Uxe2G)g;)OFNt-yesW}F1OBo>KiSUKLiwz$eYhl-xyM5 z%`E>$CG7F5A4RSCX^Q-h<{YO%t1&%5c{Sjl0COmEJT6>wa>P=rStge6=u|nDD4#b; zo}4CC`G%&oQ}Pd-?DZNx!}SQ?X|pjW3z|x(3xN#qOF0qG$A#1WVbEJN@HaxC68;7M zaTJVv!J?N#>3rI{-t7_2w}jJXa+YI}mhzZOfvsJ#*a|cZN)mi%-dMo#eBANwRyz-G zJ%Np7pTA%cwe@PjdUPI}Ett{B%WQaaP>*h^sj;3M>`&ecYIy4-=F(uTEfL#J{N3vH za?K=gqzh*b0q=TalI5I!!kHOf{~o{dxSDo*;=m1>a@%lEzpvOe$)91W;-*AR!7cAN zc$r|GUY)pe^69u5JNv!IUHK70N+i>a1?y);9Ox5HhjELRta|R4i&t8X!zp&dQ$wlJ z`XGJo=TKCOGL*au{>HfHpsDev=ig> zX4D`z;rjSksvv|mW9XHLp7nsB_<@yuIlJLK`syXZq3W@6YS6X zhRca@3*xhA^%1o}=ic-g=iVC=An$VTCp%lMRKx8(qQ7b-2^MW&yiSZ^^0Sc%X1?j! z)?Ub!lr)vfu5-5rQtwI-9iCrH3f9-=QutHaRAY)LAzP9{Ua4AzesEgZu)=p31Yhij ztSHy&0mQ0jhGOcqWi2cU++FHPPhOk~L=}+jdHYNX`>f12nbZwlO96l`Rzat7NGT8F zkWQabfhPp@HwIqwn8)viHvI_&(Ir@Z*d!(;I#?AUlkg^rsx?CX6JqC#DjJLVu zS#chnOJjNrCb#&9S)$IVL=0Zj1ckUQ)DA$lm*UgRZx0_>2AI*mEgg}+EuBtJx12R% zLC_4}`e)z9E%b=D^xtUyM67i=1cVKz%#dY*rz5dmvsHAArw7rpG>S#J>bLtg&Fc${ z-kQmboG!E~NFR>HBAg$?zSDl`_~UbbKAdkg#2^C4fTp`(cUJhfz1UNSvcfK=EsEN^ z7@Udsb>9`_tFf!}9cOju!4Elf>f0(-q^7MAU2QQEgN`V${B*+Vu2aEeC`yQ}^pw1g}xvpSgQzfP*-3@u$-0RH__!l~C)SlOmi(kk&p z&IstG0E-)dUp5@7R-&AM;o4ax6*}C&8wRUhtQLmirTk?wpE7vUMDDI!*v?L&fg6k| zli*sCjky#PlR@@4Th76`Srf&iWft$|g7zp6|MWaE&0yeVao;+ko8ISyjHll&z|5m0 z@&NFw9?M526EHf>G%5XC7VUy5VS+Th+#{<1=tUoa#aJxe;|cFKa1%LuD8ui}UG?Ur z+zRq3I(sUhfA(8hH|S2jwq4gU??_(VK=%O8)m7|@ zP@7|sV9GX{ewIMeVFTfcb7$Vw6mgKU^ycp z4XPtALO_Sr8m>&sFY0n5bm}&!me)~k-^&>Y-bT}+3Zl97{`uSlAO}(wo#!gkP?b6M zN*yAGd)EG%9#qedk}gpwIfNeRt%(ZR1O{lVZQi31Qt)B3#Q%^@53j29wNb~vYN`@p z>K45i+0?nN@r|92MI;!*o8wikFnIu4b}yZcvH%*zj=7V@)OJCVtxqv zf6D%nxpBY;+H|$xi(4k{Gq6VNn1;G+#=%a)d*$(2+Wx(6IW9QAB5a=Rx2AAV)W(Z_ zMlE!lzrYM$M&glt(l?Oa$DgGobkF=>nVpE$lR0OHgtNU3slMX@RQ2KSm%@stYCKukUtOTBh`K&E>+*~c94Sao+bCpRo80#O{=?-jr-|HQ!GQGD&=c0( z2i2OvN|=5~;yq`2b8IdLnFvCd!zL#nSt&W8G?c7B_e!Apkw9#OT-xj930&y=jh-lqd#=0tMEoC)A`~BsSEOBjbMPBdGLrAE3cYS5#d4lArL11IW-HxT| zZp7iW;TH+#k}SJ<*ff2fobT+qwoE^1KS_ z|B3o4ZZ1hzD`5JIO%o{D|M8@ZKFgiQ?>H0%2IT{UgBvOfZ9O%$CGn!>OYa*hHWq-< z-$1sppD8UHz?6;3D<6|}nsc`vPn*&$p|oQ5f{!s;H88q72IYj=cZs4}SRKQGwSASPh z9CRQA^O)b$kCP63!4WL!^7E6l#~H|M@0NzSOrE5alCBnEQol+Fi;Ve#x75_SUA*CPvJXI;dBEY_IA6B%&0V|MLCG3tDL*cZr@ttR2i z^123j$Ni_NY6+3&*Ui)k%`>y&g%T?`diys?&n{_9lRQ#HfP%ooHg`j=^yy?R+_bMf zr1+yiXsHh2ZYL`* zMBT1&t)nDO=gNss47*mX@ypY|2mL7eFP0iED;34Bw7-%8$+$Kx0~_9K7f~k7h@>WY z$3q{t6hKc1@cP<#lz98+s=xx&Kdv%l4eCewiW2y0tuMhj>O`QML!sfkdb7s3 z&Q7k1AYi1jrfJ%W+> zmuvKxeEMf_wuw7(c{C}(sbQ}4N^TNEGlJO3FD?xOd+ZVd;sXNWO_eBcE(qS%|4%^3 z#ESwG7kYwJbVXX8D62@-P2lN)uIbpMi(cZw*br1ty1f1bfLb3`V8nC5wgjs; z?^ZJjGao5NP2Cm}s+U<4(dCfO^rAxFV&q6@J^4OUQ_<`BMf2FsUI@WfXGs2l_kifS zlB+5t%yrC{`0%Q)ik&xq<;RQukzi}~ti+j#-{q3Gh;#F%OlLsNlG8lquBkmvjzO2z z!7?j)5Yl9fDQC$9&~DG*I3ll&tf}zk*$KUOZ~&w38O6FuopiO=+p$;DhKDnf_+Gm> zTqCXZ$Ov{_8!s422S8!_VSZTx_3EI)miDpWBiM;RmcbWY&9&Fc=D%;Q^o53Kk12y#$T99}3~nZ?N5T(`chD6(4h^ z=dkHxI|{k%PY&$OM#gf1sNTsnhtWjvZ(R)%84Rs&;X3Bj^*Rp2k7spDOiYdA)N!i| zax5w<8T!3m=a*)cd_^ICjE{0Dqh^3n8N}nUT8|$&T5Dy*iyn(~R8OnGy)`X8Qg&ay zdvDjyTQ=(<c`Z04R|G+PaS zuJs1y+FOgkntlIX*&3hwxcV}GrI9n&ftk?-Acq9t>m*8_-1IP(@75SZ-?&r#7?{Oe2cbFXN(*yjl4QCE`uqj-3DcDMOlbX5kwJa0 z%E<}ndb7{lwzvJcpUYyb@L0?eThu_IYXTf#(fp^f9=WvVad%-n_380`=P3cW>Cn?g z0XTHY6b1WWxBrNAPJcGCYdFebg~;csL96XTRRW|r6-(mr`cHP4S3PD8-~3(p36YUd z=}DWVDlVrtC!6f;!n!(l^UFC(cc+GqB}jIxlal(E#W=W;=oJ{g-r&vH z)v;a>A#&49@rJT$>nibvZQW&lM_zk0A*aMSkA3j?eTc`^+uNC_iWlo3lUumes6JM# z%Cvwt<)ng6)wYD`GGPAQ5dc#6F2gfvv+d z-d-h~TSUDSHwb_(m@(q9&4e_jPvQr1LO6XNcR1?5=r?r}ELsT-2K8{c4;E6H?w-jX z%T082mfj!`9CEX=q|gwkNN||9O+0z5x()0)P>CMu#s<@+@Lz68RhxZFTJ^;MLaTji zoOZGp(BNUV!oFB$woN@$|s&dUkseU!C&EAa5c z_#cHNp}54T!-qwUuoF77CDwVC1U}9KQYNJY{qf&rsj*S5;`Az9q-NO(+GP2+8+P6OWqN^tm8i;@f^P*Op1mF+S-k4RFsM6hYoQ1 zWj(G{yJNyN)41gcFx)O>Bi`fG7~sl0qy`O4kv7l0Byq<26JC~OX(o#dl`!nRPvU6} ztRxk3X~jlqc`2vShr=e)s}=Ve9qXC^Nr#Xe(>e$^MHik-e1RMw!c}%q9fPXYkPmLpHhI^Z%taMw(76uKx=H?FB>dU+hNmL--q+BrimM6n zfgh>pzynjAXy#H+bpu!%B2!U^7Z63ib;oc_HXw7jv5H4Uv)5*@1I@Gq*D#$eT+Wf- zJ;&SNARw~GIjNH*V(T~nN9ijkDp;9+${tle<9_+4{rSdXv8ffzLg0FJ+ogO04(W1W|kn0O|DH9e=YDZtMP ze>~ZBv#-8Pu?%%n39YC~$41d1M@P6Lx1sM6gr_)fb?vzw8CpvQC}!K&M{w_7+=@GJ z=IVP5;l9kXGsW?uByQNb?QBz)8?m|K$dvGxT{Mjb4s};2v`E&TX|g7;j?%CShaly= z`cie`pt~4pc5xwa(J$nQxl|e_HYQeC+iKk(kNkA0YmL*$I2Jv(5-ywat%=yg{>a}3 z$m|Zs#Fk8Z`Dt`Vr}>+XOOoM>%Vwcy-ZL{YZYJax!i=du2Ib=x1S|G}nBI)%ltV#Q z0tT+#6vqPWr+?HNlv3feU)st%=QX>9)_l!CC^&zI$^#XGBiojqqN;kvVxtG z)WgczEgTSMMJDt;`OOcrE(xHoi+K;>1E38Fx$zeD_ahw<3==k2dcB*4*Ls{j60P;fG3V!l52ITg6w)fZ2}d<{oYdy!YT_ME z2LvZxk+wHs`oor(Y0VF9%2~;pi%CZfpl>S0V~eE|7KOq`HKnQk^|m*8M?Y+(wx1(R zmr^9B3K4aXYS`|4(_)uIuK!KO;_PwmW5Rm?;rtCfi^y#~{ZUE2NAiZ1eL0Dc;ZCAw z+)NiM!jcpp6v%t{F+c@iHWk) z@c8hFxMWwI+(F~!`I%z}L3-snpr3P?jlEuYpoW@-jE-#vih$Gq0K_*pbsx<973y+) z7nTy)`-H)pWHD5q5kk78&W0GMjC)$!;$qX=rZb!x@jX&x$~e}Z0w9LFoi4!jXNA-N ztF1E0S`}dm^!g{7uByc9FAJ`Pb%f>Yr46R5x2_mou}^x3aMJV2?LW^*T?NuMHGDw@ zpCeL(qAAUsxhzdPF~0=|!?SD`{6Y6pgudB7I&t^>5gg9F}L?BVX*vl=ji+(r8gtu853EvY>o*ey1;nM@ zFr()+S24b`FPzWeSG>rVN;=9^eW|Q~qZoD(3`uiP?jt;;Ju(Dt#(Fd?MSnwOv?QuJ zSO1hebP)4dNei69{awwyPATxlsx$b_#<|N_g{#WmOjo{$%S`CdJl@bauql(fjh)-B z7_dFpl*lU1PE%(&)9{@~wX3S$M0fG<6Y;Uvg)7F-dInJj1D8tRB5PJ;Jgh;=quztN zCb(-U8NYY)bBNSB{HOZh@10D(PzmFa9S=*P^K~uc30@o_PESrPu=XyvCDp5hStzqy znBd4Z4-n;I+0R+iP3vbmEOp-TP~<{)*F2HrS|o^oHoz=SFfr$ODw7M^R+Zq)?vq6P ztc)WQNGyzUf*ig(OT73^TOxu3RSGSPG&nzv>XZVXBn}*X`A{28)WJ9ad;I;tP-TgiIAkhNK6N<(vMY^>g%GGlt?s3p_DZQ8=$(p&uF5yX$zy(IYL^p$f&iuHMDhi_N+>2n=}!ST@&-L_0C8oEHs^rsB-A84jEu*p`Gl=p zThqb%+0Ut0`M;^e(epAPSO%GDzOIzk1`<-yKrV4gEKXzOTv7B*PQ=8xqjtiyIi_Y( zPrH^oyYq-Q)n-x=+fz77*n*EkwHll?>R>VzzQ0)Y1(|FOF($mkTUyw0np*8lcVHD`?uPF{O0F5Y(+xIhOx{_n5a>NTErEdcZA-3mHNZ5S5_xN zdAL#9ZLlLS?DM}P#`mWnrj=qZH*g5Opo%+aA$jks_@{W} zOnX-vB&5DhMJE9&T#ERN0zBA~u2t}O*_Yc(9rxt6SB1%W9xF-`}X*+VKt zCnYzSjXWIM1gnDNo@|bo9Vz*P76c#2V(2H5YVBpNlPCHlAIdd(b370=r7^>;fyR8~ z#a9U&1kkcQjK{@nQ>pJ}v>`0(eF|85JkmPkvutaC9il-ojQXe(his6|6?1`|Spx|k z3HNtCm8J)9)Zy*F!c1>M;rwjMzsX6|Y$1T|(0pbD6+DLV!1oi$uN+i}L*zzSa718K z>llIoru7p9Dz(vd0r)Bp_x5dHkoDeDao$;4b`vz}n zZ&Bie`m*+Zy%cY1H-`BjWo&|p$eB-bl!C>)N}i-co>^)gN|%mY=t5X(0?iC0nH#Qj z1g_tEz~?(c20S!CFj&xOKM%h0rW*bvPYIj?!&bYTZZP27^?PjUcgXu+cEn@n6q z3_A{0#$a5G1rTz-5Oxnfn0%y0C|cLPEaBU`XM}YsqmG9AfiaUlg7(p(lB#(m98c!! zkBg~TA5YO7H;6G5|7F;u?hX+y)+VrLTy(Ep#-+>7!o0uD7sdXIv`I#Qbb0iec|*4( z#-tr!NWf{abKf-~o6%PFw1$w-xi#LoPR}JiK;Hmc*{*_89fj1|#ad06%X@S3bEQ_Xl5l^f*eu6E8_w>sr5jiGt)7fv&R;{+B?>w(A_?4hfpQqR)Uwn?Fu`O3m{5|cw7MdJ)6|f zvITx?BqXUZ@T`-M3?1M1Cq*t}IE>HMs&^K8FxDEcK>LdCc`qa@K}BJ0?%AD9X-{^8 zeWuFn!a5#T_HgX+J{cb4WEhrkk_}=?I+DVqhTgO8My!P7s`7It@Th%;i>fO5$=Xul zwvZkA>-;i?E4IqHahb>xNKRsOh)E{95%os-!GFIT|AS?g7E082AB#p=V*;q;(s=K0 z>?BW0O4~z_cZjkz8NGc^h!2(LrZ2v~-!w&+phc7gk(ss12;fK=}yi4atamT%(hL|A!H z{CPb1IqeVaMK>Gq!7;^f_V6=*?AaXmiGWAJir<^v9L2e_IIo^A;x?_K5uSyb>9FA8 zDyWp5MhQuJtmO?6XF` zQ^4J0%o7Am*@z7b{gcQ}%4?3q+rPA2!-t7doh7jKPL8?cEFrss|7j;hs zcV_=RLSqDZd*~4jNskJQ{G8aQ#`#k;uCxr=zwutf;4Jcq$o{|@Cx|srx_Sv#=o`>`413 z)ck~XsSw$bxzMsPCp4wVF%ig_1yegspobCwE#6+&Ac~niw_NQcWXia=a%f^1L_a=} n86M@9Y2e>M`*&0bY|lHYF~TK}rd*2y1o$N-CNEkgY#8((aTkC3 literal 0 HcmV?d00001 diff --git a/website/docs/dev_deadline.md b/website/docs/dev_deadline.md new file mode 100644 index 0000000000..310b2e0983 --- /dev/null +++ b/website/docs/dev_deadline.md @@ -0,0 +1,38 @@ +--- +id: dev_deadline +title: Deadline integration +sidebar_label: Deadline integration +toc_max_heading_level: 4 +--- + +Deadline is not host as usual, it is missing most of the host features, but it does have +its own set of publishing plugins. + +## How to test OpenPype on Deadline + +### Versions + +Since 3.14 job submitted from OpenPype is bound to OpenPype version used to submit it. So +if you submit job with 3.14.8, Deadline will try to find that particular version and use it +for rendering. This is handled by `OPENPYPE_VERSION` variable on job - you can delete it from +there and then the version set in studio Settings will be used. + +![Deadline Job Version](assets/deadline_job_version.png) + +Deadline needs to bootstrap this version so it will try to look the closest compatible +build. So to use version 3.14.8 on Deadline it is enough to have build 3.14.0 or similar - important +are the first two version numbers - major and minor. If they match, the version +is considered compatible. + +### Testing + +So to test various changes you don't need to build again an again OpenPype and putting +it to directory where Deadline is looking for versions - this needs to be done only on +minor version change. That build will then be used to bootstrap whatever is set on the +job or in the studio Settings. + +So you can either use zip version if it suits you, or better set your sources directory +so it will be find as a version - for example with symlink. + +That way you can only modify `OPENPYPE_VERSION` variable on job to point it to version +you would like to test. \ No newline at end of file diff --git a/website/sidebars.js b/website/sidebars.js index af64282d61..f2d9ffee06 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -157,6 +157,7 @@ module.exports = { "dev_host_implementation", "dev_publishing" ] - } + }, + "dev_deadline" ] }; From c764dc20c641bb6ef58df1c4b29f7490b6417276 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:00:49 +0100 Subject: [PATCH 2347/2550] normalize paths when added to queue --- openpype/lib/file_transaction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 1626bec6b6..ce7ef100c1 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -66,8 +66,8 @@ class FileTransaction(object): """Add a new file to transfer queue""" opts = {"mode": mode} - src = os.path.abspath(src) - dst = os.path.abspath(dst) + src = os.path.normpath(os.path.abspath(src)) + dst = os.path.normpath(os.path.abspath(dst)) if dst in self._transfers: queued_src = self._transfers[dst][0] From 18a9c5568426f6b67dc23d90742c6ac140e38800 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:02:21 +0100 Subject: [PATCH 2348/2550] skip if source and destination are the same paths --- openpype/lib/file_transaction.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index ce7ef100c1..4aedc62fb6 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -84,9 +84,11 @@ class FileTransaction(object): self._transfers[dst] = (src, opts) def process(self): - # Backup any existing files - for dst in self._transfers.keys(): + for dst, (src, opts) in self._transfers.items(): + if not os.path.isdir(src) and dst == src: + continue + if os.path.exists(dst): # Backup original file # todo: add timestamp or uuid to ensure unique @@ -98,6 +100,12 @@ class FileTransaction(object): # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): + if not os.path.isdir(src) and dst == src: + self.log.debug( + "Source and destionation are same files {} -> {}".format( + src, dst)) + continue + self._create_folder_for_file(dst) if opts["mode"] == self.MODE_COPY: From 36dcab11c1c54cec6040456de8ec74ee20635111 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:02:52 +0100 Subject: [PATCH 2349/2550] formatting changes --- openpype/lib/file_transaction.py | 60 ++++++++++++++++++-------------- 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 4aedc62fb6..2d706adaef 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -14,9 +14,9 @@ else: class FileTransaction(object): - """ + """File transaction with rollback options. - The file transaction is a three step process. + The file transaction is a three-step process. 1) Rename any existing files to a "temporary backup" during `process()` 2) Copy the files to final destination during `process()` @@ -39,14 +39,12 @@ class FileTransaction(object): Warning: Any folders created during the transfer will not be removed. - """ MODE_COPY = 0 MODE_HARDLINK = 1 def __init__(self, log=None): - if log is None: log = logging.getLogger("FileTransaction") @@ -63,7 +61,14 @@ class FileTransaction(object): self._backup_to_original = {} def add(self, src, dst, mode=MODE_COPY): - """Add a new file to transfer queue""" + """Add a new file to transfer queue. + + Args: + src (str): Source path. + dst (str): Destination path. + mode (MODE_COPY, MODE_HARDLINK): Transfer mode. + """ + opts = {"mode": mode} src = os.path.normpath(os.path.abspath(src)) @@ -72,14 +77,15 @@ class FileTransaction(object): if dst in self._transfers: queued_src = self._transfers[dst][0] if src == queued_src: - self.log.debug("File transfer was already " - "in queue: {} -> {}".format(src, dst)) + self.log.debug( + "File transfer was already in queue: {} -> {}".format( + src, dst)) return else: self.log.warning("File transfer in queue replaced..") - self.log.debug("Removed from queue: " - "{} -> {}".format(queued_src, dst)) - self.log.debug("Added to queue: {} -> {}".format(src, dst)) + self.log.debug( + "Removed from queue: {} -> {} replaced by {} -> {}".format( + queued_src, dst, src, dst)) self._transfers[dst] = (src, opts) @@ -94,8 +100,8 @@ class FileTransaction(object): # todo: add timestamp or uuid to ensure unique backup = dst + ".bak" self._backup_to_original[backup] = dst - self.log.debug("Backup existing file: " - "{} -> {}".format(dst, backup)) + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) os.rename(dst, backup) # Copy the files to transfer @@ -112,8 +118,8 @@ class FileTransaction(object): self.log.debug("Copying file ... {} -> {}".format(src, dst)) copyfile(src, dst) elif opts["mode"] == self.MODE_HARDLINK: - self.log.debug("Hardlinking file ... {} -> {}".format(src, - dst)) + self.log.debug("Hardlinking file ... {} -> {}".format( + src, dst)) create_hard_link(src, dst) self._transferred.append(dst) @@ -124,23 +130,21 @@ class FileTransaction(object): try: os.remove(backup) except OSError: - self.log.error("Failed to remove backup file: " - "{}".format(backup), - exc_info=True) + self.log.error( + "Failed to remove backup file: {}".format(backup), + exc_info=True) def rollback(self): - errors = 0 - # Rollback any transferred files for path in self._transferred: try: os.remove(path) except OSError: errors += 1 - self.log.error("Failed to rollback created file: " - "{}".format(path), - exc_info=True) + self.log.error( + "Failed to rollback created file: {}".format(path), + exc_info=True) # Rollback the backups for backup, original in self._backup_to_original.items(): @@ -148,13 +152,15 @@ class FileTransaction(object): os.rename(backup, original) except OSError: errors += 1 - self.log.error("Failed to restore original file: " - "{} -> {}".format(backup, original), - exc_info=True) + self.log.error( + "Failed to restore original file: {} -> {}".format( + backup, original), + exc_info=True) if errors: - self.log.error("{} errors occurred during " - "rollback.".format(errors), exc_info=True) + self.log.error( + "{} errors occurred during rollback.".format(errors), + exc_info=True) six.reraise(*sys.exc_info()) @property From ee71a051b6066011fc4cfe8cd261de8fe9081fad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:05:14 +0100 Subject: [PATCH 2350/2550] removed redundant check of directory --- openpype/lib/file_transaction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 2d706adaef..6f285d73a8 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -92,7 +92,7 @@ class FileTransaction(object): def process(self): # Backup any existing files for dst, (src, opts) in self._transfers.items(): - if not os.path.isdir(src) and dst == src: + if dst == src: continue if os.path.exists(dst): @@ -106,7 +106,7 @@ class FileTransaction(object): # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): - if not os.path.isdir(src) and dst == src: + if dst == src: self.log.debug( "Source and destionation are same files {} -> {}".format( src, dst)) From 9f2cd89e1521bca7af39927d09655867a082456f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:06:52 +0100 Subject: [PATCH 2351/2550] remove unused variable --- openpype/lib/file_transaction.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 6f285d73a8..f265b8815c 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -91,18 +91,17 @@ class FileTransaction(object): def process(self): # Backup any existing files - for dst, (src, opts) in self._transfers.items(): - if dst == src: + for dst, (src, _) in self._transfers.items(): + if dst == src or not os.path.exists(dst): continue - if os.path.exists(dst): - # Backup original file - # todo: add timestamp or uuid to ensure unique - backup = dst + ".bak" - self._backup_to_original[backup] = dst - self.log.debug( - "Backup existing file: {} -> {}".format(dst, backup)) - os.rename(dst, backup) + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) + os.rename(dst, backup) # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): From 2c55ee55c266dbfe90394918e604ed87c51d619e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:09:54 +0100 Subject: [PATCH 2352/2550] remove source and destination check from integrate --- openpype/plugins/publish/integrate.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 57a642c635..6a85a87129 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -291,9 +291,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: - if src == dst: - continue - # todo: add support for hardlink transfers file_transactions.add(src, dst) From 1fc8528795d971d54aa4cebd4e74032828ffd712 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:11:45 +0000 Subject: [PATCH 2353/2550] Remove redundant infected code --- openpype/hosts/maya/api/lib.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index a415f625c0..b2bbb823aa 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3436,28 +3436,3 @@ def iter_visible_nodes_in_range(nodes, start, end): # If no more nodes to process break the frame iterations.. if not node_dependencies: break - - -@contextlib.contextmanager -def selection(*nodes): - """Execute something with a specific Maya selection. - - Example: - .. code-block:: python - - cmds.select('side') - print(cmds.ls(sl=True)) - # ['side'] - - with selection('top', 'lambert1'): - print(cmds.ls) - # ['top', 'lambert1'] - - print(cmds.ls(sl=True)) - # ['side'] - - """ - current = cmds.ls(sl=True) - cmds.select(*nodes, noExpand=True) - yield - cmds.select(current, noExpand=True) \ No newline at end of file From fd6345743c330e7d6f2cd80a7b333851e7c1f75e Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:12:46 +0000 Subject: [PATCH 2354/2550] Creator settings --- .../hosts/maya/plugins/create/create_ass.py | 48 ++++++--- .../defaults/project_settings/maya.json | 19 +++- .../schemas/schema_maya_create.json | 102 +++++++++++++++++- 3 files changed, 148 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 39f226900a..903a8ef0cf 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from openpype.hosts.maya.api import ( lib, plugin @@ -9,12 +7,27 @@ from maya import cmds class CreateAss(plugin.Creator): - """Arnold Archive""" + """Arnold Scene Source""" name = "ass" - label = "Ass StandIn" + label = "Arnold Scene Source" family = "ass" icon = "cube" + exportSequence = False + expandProcedurals = False + motionBlur = True + motionBlurKeys = 2 + motionBlurLength = 0.5 + maskOptions = False + maskCamera = False + maskLight = False + maskShape = False + maskShader = False + maskOverride = False + maskDriver = False + maskFilter = False + maskColor_manager = False + maskOperator = False def __init__(self, *args, **kwargs): super(CreateAss, self).__init__(*args, **kwargs) @@ -22,16 +35,27 @@ class CreateAss(plugin.Creator): # Add animation data self.data.update(lib.collect_animation_data()) - # Vertex colors with the geometry - self.data["exportSequence"] = False + self.data["exportSequence"] = self.exportSequence + self.data["expandProcedurals"] = self.expandProcedurals + self.data["motionBlur"] = self.motionBlur + self.data["motionBlurKeys"] = self.motionBlurKeys + self.data["motionBlurLength"] = self.motionBlurLength + + # Masks + self.data["maskOptions"] = self.maskOptions + self.data["maskCamera"] = self.maskCamera + self.data["maskLight"] = self.maskLight + self.data["maskShape"] = self.maskShape + self.data["maskShader"] = self.maskShader + self.data["maskOverride"] = self.maskOverride + self.data["maskDriver"] = self.maskDriver + self.data["maskFilter"] = self.maskFilter + self.data["maskColor_manager"] = self.maskColor_manager + self.data["maskOperator"] = self.maskOperator def process(self): instance = super(CreateAss, self).process() - # data = OrderedDict(**self.data) - - - nodes = list() if (self.options or {}).get("useSelection"): @@ -42,7 +66,3 @@ class CreateAss(plugin.Creator): assContent = cmds.sets(name="content_SET") assProxy = cmds.sets(name="proxy_SET", empty=True) cmds.sets([assContent, assProxy], forceElement=instance) - - # self.log.info(data) - # - # self.data = data diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a..a74f8e5827 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -171,7 +171,22 @@ "enabled": true, "defaults": [ "Main" - ] + ], + "exportSequence": false, + "expandProcedurals": false, + "motionBlur": true, + "motionBlurKeys": 2, + "motionBlurLength": 0.5, + "maskOptions": false, + "maskCamera": false, + "maskLight": false, + "maskShape": false, + "maskShader": false, + "maskOverride": false, + "maskDriver": false, + "maskFilter": false, + "maskColor_manager": false, + "maskOperator": false }, "CreateAssembly": { "enabled": true, @@ -1007,4 +1022,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index bc6520474d..6cf11e4cea 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -200,7 +200,103 @@ } ] }, - + { + "type": "dict", + "collapsible": true, + "key": "CreateAss", + "label": "Create Ass", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "boolean", + "key": "exportSequence", + "label": "Export Sequence" + }, + { + "type": "boolean", + "key": "expandProcedurals", + "label": "Expand Procedurals" + }, + { + "type": "boolean", + "key": "motionBlur", + "label": "Motion Blur" + }, + { + "type": "number", + "key": "motionBlurKeys", + "label": "Motion Blur Keys", + "minimum": 0 + }, + { + "type": "number", + "key": "motionBlurLength", + "label": "Motion Blur Length", + "decimal": 3 + }, + { + "type": "boolean", + "key": "maskOptions", + "label": "Mask Options" + }, + { + "type": "boolean", + "key": "maskCamera", + "label": "Mask Camera" + }, + { + "type": "boolean", + "key": "maskLight", + "label": "Mask Light" + }, + { + "type": "boolean", + "key": "maskShape", + "label": "Mask Shape" + }, + { + "type": "boolean", + "key": "maskShader", + "label": "Mask Shader" + }, + { + "type": "boolean", + "key": "maskOverride", + "label": "Mask Override" + }, + { + "type": "boolean", + "key": "maskDriver", + "label": "Mask Driver" + }, + { + "type": "boolean", + "key": "maskFilter", + "label": "Mask Filter" + }, + { + "type": "boolean", + "key": "maskColor_manager", + "label": "Mask Color Manager" + }, + { + "type": "boolean", + "key": "maskOperator", + "label": "Mask Operator" + } + ] + }, { "type": "schema_template", "name": "template_create_plugin", @@ -217,10 +313,6 @@ "key": "CreateMultiverseUsdOver", "label": "Create Multiverse USD Override" }, - { - "key": "CreateAss", - "label": "Create Ass" - }, { "key": "CreateAssembly", "label": "Create Assembly" From 3cf47e25626675ec00d1202fbaf99d1ad4b95030 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:13:12 +0000 Subject: [PATCH 2355/2550] Collect camera from objectset if present. --- .../hosts/maya/plugins/publish/collect_ass.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 3ce1f2ccf1..69af4c777d 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -8,6 +8,7 @@ import pyblish.api class CollectAssData(pyblish.api.InstancePlugin): """Collect Ass data.""" + # Offset to be after renderable camera collection. order = pyblish.api.CollectorOrder + 0.2 label = 'Collect Ass' families = ["ass"] @@ -25,7 +26,8 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - assert len(members) == 1, "You have multiple proxy meshes, please only use one" + msg = "You have multiple proxy meshes, please only use one" + assert len(members) == 1, msg instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) @@ -35,4 +37,17 @@ class CollectAssData(pyblish.api.InstancePlugin): group = re.compile(r" \[.*\]") instance.data["label"] = group.sub("", instance.data["label"]) + # Use camera in object set if present else default to render globals + # camera. + cameras = cmds.ls(type="camera", long=True) + renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)] + camera = renderable[0] + for node in instance.data["setMembers"]: + camera_shapes = cmds.listRelatives( + node, shapes=True, type="camera" + ) + if camera_shapes: + camera = node + instance.data["camera"] = camera + self.log.debug("data: {}".format(instance.data)) From 76bf9bf4de319182704b57687b9d97480e8b003b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:13:42 +0000 Subject: [PATCH 2356/2550] Working extractor --- .../hosts/maya/plugins/publish/extract_ass.py | 149 ++++++++++++------ 1 file changed, 102 insertions(+), 47 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 5c21a4ff08..b6bd4a2e22 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,20 +1,17 @@ import os +import contextlib from maya import cmds +import arnold from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection class ExtractAssStandin(publish.Extractor): - """Extract the content of the instance to a ass file + """Extract the content of the instance to a ass file""" - Things to pay attention to: - - If animation is toggled, are the frames correct - - - """ - - label = "Ass Standin (.ass)" + label = "Arnold Scene Source (.ass)" hosts = ["maya"] families = ["ass"] asciiAss = False @@ -28,50 +25,59 @@ class ExtractAssStandin(publish.Extractor): filenames = list() file_path = os.path.join(staging_dir, filename) + kwargs = { + "filename": file_path, + "selected": True, + "asciiAss": self.asciiAss, + "shadowLinks": True, + "lightLinks": True, + "boundingBox": True, + "expandProcedurals": instance.data.get("expandProcedurals", False), + "camera": instance.data["camera"], + "mask": self.get_ass_export_mask(instance) + } + + # Motion blur + motion_blur = instance.data.get("motionBlur", True) + motion_blur_keys = instance.data.get("motionBlurKeys", 2) + motion_blur_length = instance.data.get("motionBlurLength", 0.5) + # Write out .ass file self.log.info("Writing: '%s'" % file_path) - with maintained_selection(): - self.log.info("Writing: {}".format(instance.data["setMembers"])) - cmds.select(instance.data["setMembers"], noExpand=True) + with self.motion_blur_ctx(motion_blur, motion_blur_keys, motion_blur_length): + with maintained_selection(): + self.log.info( + "Writing: {}".format(instance.data["setMembers"]) + ) + cmds.select(instance.data["setMembers"], noExpand=True) - if sequence: - self.log.info("Extracting ass sequence") + if sequence: + self.log.info("Extracting ass sequence") - # Collect the start and end including handles - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 0) + # Collect the start and end including handles + kwargs.update({ + "start": instance.data.get("frameStartHandle", 1), + "end": instance.data.get("frameEndHandle", 1), + "step": instance.data.get("step", 0) + }) - exported_files = cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=self.asciiAss, - shadowLinks=True, - lightLinks=True, - boundingBox=True, - startFrame=start, - endFrame=end, - frameStep=step - ) - for file in exported_files: - filenames.append(os.path.split(file)[1]) - self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=False, - shadowLinks=True, - lightLinks=True, - boundingBox=True - ) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) + exported_files = cmds.arnoldExportAss(**kwargs) + + for file in exported_files: + filenames.append(os.path.split(file)[1]) + + self.log.info("Exported: {}".format(filenames)) + else: + self.log.info("Extracting ass") + cmds.arnoldExportAss(**kwargs) + self.log.info("Extracted {}".format(filename)) + filenames = filename + optionals = [ + "frameStart", "frameEnd", "step", "handles", + "handleEnd", "handleStart" + ] + for key in optionals: + instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] @@ -84,9 +90,58 @@ class ExtractAssStandin(publish.Extractor): } if sequence: - representation['frameStart'] = start + representation['frameStart'] = kwargs["start"] instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir)) + + #This should be separated out as library function that takes some + #attributes to modify with values. The function then resets to original + #values. + @contextlib.contextmanager + def motion_blur_ctx(self, force, keys, length): + if not force: + yield + return + + cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") + ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") + clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") + + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) + if keys > 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) + if length >= 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) + + try: + yield + finally: + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) + + #This should be refactored to lib. probably just need the node_types directionary + def get_ass_export_mask(self, instance): + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + return mask From 31d14cb70fb19e80b96c01b649e8a2c8ff34953c Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:16:11 +0000 Subject: [PATCH 2357/2550] Remove redundant lib --- openpype/hosts/maya/api/mtoa.py | 187 -------------------------------- 1 file changed, 187 deletions(-) delete mode 100644 openpype/hosts/maya/api/mtoa.py diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py deleted file mode 100644 index d19fecf6b5..0000000000 --- a/openpype/hosts/maya/api/mtoa.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -"""Library of classes and functions deadling with MtoA functionality.""" -import tempfile -import contextlib - -import clique -import pyblish.api - -from maya import cmds - -from openpype.pipeline import publish -from .viewport import vp2_paused_context -from .lib import selection - - -class _AssExtractor(publish.Extractor): - """Base class for ASS type extractors.""" - - order = pyblish.api.ExtractorOrder + 0.01 - hosts = ["maya"] - - def get_ass_export_mask(self, maya_set): - import arnold # noqa - mask = arnold.AI_NODE_ALL - - ai_masks = {"options": {"value": arnold.AI_NODE_OPTIONS, - "default": False}, - "camera": {"value": arnold.AI_NODE_CAMERA, - "default": False}, - "light": {"value": arnold.AI_NODE_LIGHT, - "default": False}, - "shape": {"value": arnold.AI_NODE_SHAPE, - "default": True}, - "shader": {"value": arnold.AI_NODE_SHADER, - "default": True}, - "override": {"value": arnold.AI_NODE_OVERRIDE, - "default": False}, - "driver": {"value": arnold.AI_NODE_DRIVER, - "default": False}, - "filter": {"value": arnold.AI_NODE_FILTER, - "default": False}, - "color_manager": {"value": arnold.AI_NODE_COLOR_MANAGER, - "default": True}, - "operator": {"value": arnold.AI_NODE_OPERATOR, - "default": True}} - - for mask_name, mask_data in ai_masks.items(): - attr = "inf_ass_export_{}".format(mask_name) - - submask = self.get_set_attr("{}.{}".format(maya_set, attr), - default=mask_data["default"]) - - if not submask: - mask = mask ^ mask_data["value"] - - return mask - - def process(self, instance): - #What is a dry run? - #ass.rr seems like an abstract variable. Needs clarification. - dry_run = instance.data.get("ass.rr") - - staging_dir = self.staging_dir(instance) - sequence = instance.data.get("exportSequence", False) - - if not cmds.pluginInfo("mtoa", query=True, loaded=True): - cmds.loadPlugin("mtoa") - - # Export to a temporal path - export_dir = instance.context.data["stagingDir"] - export_path = tempfile.NamedTemporaryFile(suffix=".ass", - dir=export_dir, - delete=False) - - set_ = instance.data["set"] - kwargs = {"shadowLinks": 1, - "lightLinks": 1, - "boundingBox": True, - "selected": True, - "f": export_path.name} - - # Animation - - if sequence: - mask = self.get_ass_export_mask(set_) - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 1.0) - if start is not None: - kwargs["startFrame"] = float(start) - kwargs["endFrame"] = float(end) - kwargs["frameStep"] = float(step) - else: - mask = 44 - - #get/set should be plugin options. - # Generic options - if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), - False): - kwargs["expandProcedurals"] = True - - if self.get_set_attr("{}.inf_ass_fullpath".format(set_), - True): - kwargs["fullPath"] = True - - kwargs["mask"] = mask - - # Motion blur - mb = self.get_set_attr("{}.inf_ass_motion_blur".format(set_), False) - keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) - length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) - - #Targets should already be collected - targets = self.get_targets(instance) - - _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) - _sorted_kwargs = ["{}={!r}".format(x, y) for x, y in _sorted_kwargs] - - if not dry_run: - self.log.debug("Running command: cmds.arnoldExportAss({})" - .format(", ".join(_sorted_kwargs))) - #There should be a context for not updating the viewport from - #pointcache extraction. - with vp2_paused_context(): - with selection(targets): - with self.motion_blur_ctx(mb, keys, length): - result = cmds.arnoldExportAss(**kwargs) - else: - instance.data["assExportKwargs"] = kwargs - start = kwargs.get("startFrame") - end = kwargs.get("endFrame") - result = [] - - range_ = [0] - if start is not None: - range_ = range(int(start), int(end) + 1) - - for i in range_: - #padding amount should be configurable. 3 does not seems - #enough as default. - fp = "{}.{:03d}.ass".format(export_path.name, i) - with open(fp, "w"): - pass - result.append(fp) - - #Whether its a sequence or not, should already have been determined. - if len(result) == 1: - filepath = result[0] - else: - collection = clique.assemble(result)[0][0] - filepath = collection.format() - - # Register the file - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'ass', - 'ext': 'ass', - 'files': filepath, - "stagingDir": staging_dir - } - - instance.data["representations"].append(representation) - - @contextlib.contextmanager - def motion_blur_ctx(self, force, keys, length): - if not force: - yield - return - - cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") - ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") - clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") - - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) - if keys > 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) - if length >= 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) - - try: - yield - finally: - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) From b974c675de93610b5dadda038c1e0c59526b3726 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 21:26:33 +0000 Subject: [PATCH 2358/2550] Refactor - use lib for attribute context - remove mask class method --- .../hosts/maya/plugins/publish/extract_ass.py | 96 +++++++------------ 1 file changed, 37 insertions(+), 59 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index b6bd4a2e22..7fc0cc1b2f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,11 +1,10 @@ import os -import contextlib from maya import cmds import arnold from openpype.pipeline import publish -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.hosts.maya.api.lib import maintained_selection, attribute_values class ExtractAssStandin(publish.Extractor): @@ -25,6 +24,40 @@ class ExtractAssStandin(publish.Extractor): filenames = list() file_path = os.path.join(staging_dir, filename) + # Mask + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + # Motion blur + values = { + "defaultArnoldRenderOptions.motion_blur_enable": instance.data.get( + "motionBlur", True + ), + "defaultArnoldRenderOptions.motion_steps": instance.data.get( + "motionBlurKeys", 2 + ), + "defaultArnoldRenderOptions.motion_frames": instance.data.get( + "motionBlurLength", 0.5 + ) + } + + # Write out .ass file kwargs = { "filename": file_path, "selected": True, @@ -34,17 +67,11 @@ class ExtractAssStandin(publish.Extractor): "boundingBox": True, "expandProcedurals": instance.data.get("expandProcedurals", False), "camera": instance.data["camera"], - "mask": self.get_ass_export_mask(instance) + "mask": mask } - # Motion blur - motion_blur = instance.data.get("motionBlur", True) - motion_blur_keys = instance.data.get("motionBlurKeys", 2) - motion_blur_length = instance.data.get("motionBlurLength", 0.5) - - # Write out .ass file self.log.info("Writing: '%s'" % file_path) - with self.motion_blur_ctx(motion_blur, motion_blur_keys, motion_blur_length): + with attribute_values(values): with maintained_selection(): self.log.info( "Writing: {}".format(instance.data["setMembers"]) @@ -96,52 +123,3 @@ class ExtractAssStandin(publish.Extractor): self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir)) - - #This should be separated out as library function that takes some - #attributes to modify with values. The function then resets to original - #values. - @contextlib.contextmanager - def motion_blur_ctx(self, force, keys, length): - if not force: - yield - return - - cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") - ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") - clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") - - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) - if keys > 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) - if length >= 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) - - try: - yield - finally: - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) - - #This should be refactored to lib. probably just need the node_types directionary - def get_ass_export_mask(self, instance): - mask = arnold.AI_NODE_ALL - - node_types = { - "options": arnold.AI_NODE_OPTIONS, - "camera": arnold.AI_NODE_CAMERA, - "light": arnold.AI_NODE_LIGHT, - "shape": arnold.AI_NODE_SHAPE, - "shader": arnold.AI_NODE_SHADER, - "override": arnold.AI_NODE_OVERRIDE, - "driver": arnold.AI_NODE_DRIVER, - "filter": arnold.AI_NODE_FILTER, - "color_manager": arnold.AI_NODE_COLOR_MANAGER, - "operator": arnold.AI_NODE_OPERATOR - } - - for key in node_types.keys(): - if instance.data.get("mask" + key.title()): - mask = mask ^ node_types[key] - - return mask From 1c985ca0015ce4e3161e18a91205a4590401e243 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 6 Dec 2022 23:45:51 +0100 Subject: [PATCH 2359/2550] :bug: fix publishing of alembics --- openpype/hosts/max/__init__.py | 2 +- openpype/hosts/max/api/__init__.py | 11 +- openpype/hosts/max/api/lib.py | 78 ++++++++++++-- openpype/hosts/max/api/pipeline.py | 9 +- openpype/hosts/max/api/plugin.py | 15 +-- .../max/plugins/publish/collect_workfile.py | 63 +++++++++++ .../max/plugins/publish/extract_pointcache.py | 100 ++++++++++++++++++ .../plugins/publish/validate_scene_saved.py | 19 ++++ 8 files changed, 272 insertions(+), 25 deletions(-) create mode 100644 openpype/hosts/max/plugins/publish/collect_workfile.py create mode 100644 openpype/hosts/max/plugins/publish/extract_pointcache.py create mode 100644 openpype/hosts/max/plugins/publish/validate_scene_saved.py diff --git a/openpype/hosts/max/__init__.py b/openpype/hosts/max/__init__.py index 8da0e0ee42..9a5af8258c 100644 --- a/openpype/hosts/max/__init__.py +++ b/openpype/hosts/max/__init__.py @@ -7,4 +7,4 @@ from .addon import ( __all__ = ( "MaxAddon", "MAX_HOST_DIR", -) \ No newline at end of file +) diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index 503afade73..26190dcfb8 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -2,10 +2,19 @@ """Public API for 3dsmax""" from .pipeline import ( - MaxHost + MaxHost, ) +from .lib import( + maintained_selection, + lsattr, + get_all_children +) + __all__ = [ "MaxHost", + "maintained_selection", + "lsattr", + "get_all_children" ] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py index 8a57bb1bf6..9256ca9ac1 100644 --- a/openpype/hosts/max/api/lib.py +++ b/openpype/hosts/max/api/lib.py @@ -1,7 +1,13 @@ # -*- coding: utf-8 -*- """Library of functions useful for 3dsmax pipeline.""" +import json +import six from pymxs import runtime as rt from typing import Union +import contextlib + + +JSON_PREFIX = "JSON::" def imprint(node_name: str, data: dict) -> bool: @@ -10,7 +16,10 @@ def imprint(node_name: str, data: dict) -> bool: return False for k, v in data.items(): - rt.setUserProp(node, k, v) + if isinstance(v, (dict, list)): + rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}') + else: + rt.setUserProp(node, k, v) return True @@ -39,10 +48,13 @@ def lsattr( nodes = [] output_node(root, nodes) - if not value: - return [n for n in nodes if rt.getUserProp(n, attr)] - - return [n for n in nodes if rt.getUserProp(n, attr) == value] + return [ + n for n in nodes + if rt.getUserProp(n, attr) == value + ] if value else [ + n for n in nodes + if rt.getUserProp(n, attr) + ] def read(container) -> dict: @@ -53,12 +65,58 @@ def read(container) -> dict: return data for line in props.split("\r\n"): - key, value = line.split("=") - # if the line cannot be split we can't really parse it - if not key: + try: + key, value = line.split("=") + except ValueError: + # if the line cannot be split we can't really parse it continue - data[key.strip()] = value.strip() - data["instance_node"] = container + value = value.strip() + if isinstance(value.strip(), six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + + data[key.strip()] = value + + data["instance_node"] = container.name return data + + +@contextlib.contextmanager +def maintained_selection(): + previous_selection = rt.getCurrentSelection() + try: + yield + finally: + if previous_selection: + rt.select(previous_selection) + else: + rt.select() + + +def get_all_children(parent, node_type=None): + """Handy function to get all the children of a given node + + Args: + parent (3dsmax Node1): Node to get all children of. + node_type (None, runtime.class): give class to check for + e.g. rt.FFDBox/rt.GeometryClass etc. + + Returns: + list: list of all children of the parent node + """ + def list_children(node): + children = [] + for c in node.Children: + children.append(c) + children = children + list_children(c) + return children + child_list = list_children(parent) + + return ([x for x in child_list if rt.superClassOf(x) == node_type] + if node_type else child_list) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index cef45193c4..4f8271fb7e 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- """Pipeline tools for OpenPype Houdini integration.""" import os -import sys import logging -import contextlib import json @@ -101,12 +99,12 @@ attributes "OpenPypeContext" ( context type: #string ) - + rollout params "OpenPype Parameters" ( editText editTextContext "Context" type: #string ) -) +) """) attr = rt.execute(create_attr_script) @@ -149,6 +147,3 @@ def ls() -> list: for container in sorted(containers, key=lambda name: container.name): yield lib.read(container) - - - diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py index 0f01c94ce1..4788bfd383 100644 --- a/openpype/hosts/max/api/plugin.py +++ b/openpype/hosts/max/api/plugin.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- """3dsmax specific Avalon/Pyblish plugin definitions.""" -import sys from pymxs import runtime as rt import six from abc import ABCMeta @@ -25,12 +24,12 @@ class MaxCreatorBase(object): shared_data["max_cached_subsets"] = {} cached_instances = lsattr("id", "pyblish.avalon.instance") for i in cached_instances: - creator_id = i.get("creator_identifier") + creator_id = rt.getUserProp(i, "creator_identifier") if creator_id not in shared_data["max_cached_subsets"]: - shared_data["houdini_cached_subsets"][creator_id] = [i] + shared_data["max_cached_subsets"][creator_id] = [i.name] else: shared_data[ - "houdini_cached_subsets"][creator_id].append(i) # noqa + "max_cached_subsets"][creator_id].append(i.name) # noqa return shared_data @staticmethod @@ -61,8 +60,12 @@ class MaxCreator(Creator, MaxCreatorBase): instance_data, self ) + for node in self.selected_nodes: + node.Parent = instance_node + self._add_instance_to_context(instance) imprint(instance_node.name, instance.data_to_store()) + return instance def collect_instances(self): @@ -70,7 +73,7 @@ class MaxCreator(Creator, MaxCreatorBase): for instance in self.collection_shared_data[ "max_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing( - read(instance), self + read(rt.getNodeByName(instance)), self ) self._add_instance_to_context(created_instance) @@ -98,7 +101,7 @@ class MaxCreator(Creator, MaxCreatorBase): instance_node = rt.getNodeByName( instance.data.get("instance_node")) if instance_node: - rt.delete(instance_node) + rt.delete(rt.getNodeByName(instance_node)) self._remove_instance_from_context(instance) diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py new file mode 100644 index 0000000000..7112337575 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +"""Collect current work file.""" +import os +import pyblish.api + +from pymxs import runtime as rt +from openpype.pipeline import legacy_io, KnownPublishError + + +class CollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect 3dsmax Workfile" + hosts = ['max'] + + def process(self, context): + """Inject the current working file.""" + folder = rt.maxFilePath + file = rt.maxFileName + if not folder or not file: + self.log.error("Scene is not saved.") + current_file = os.path.join(folder, file) + + context.data['currentFile'] = current_file + + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/max/plugins/publish/extract_pointcache.py b/openpype/hosts/max/plugins/publish/extract_pointcache.py new file mode 100644 index 0000000000..904c1656da --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_pointcache.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" +Export alembic file. + +Note: + Parameters on AlembicExport (AlembicExport.Parameter): + + ParticleAsMesh (bool): Sets whether particle shapes are exported + as meshes. + AnimTimeRange (enum): How animation is saved: + #CurrentFrame: saves current frame + #TimeSlider: saves the active time segments on time slider (default) + #StartEnd: saves a range specified by the Step + StartFrame (int) + EnFrame (int) + ShapeSuffix (bool): When set to true, appends the string "Shape" to the + name of each exported mesh. This property is set to false by default. + SamplesPerFrame (int): Sets the number of animation samples per frame. + Hidden (bool): When true, export hidden geometry. + UVs (bool): When true, export the mesh UV map channel. + Normals (bool): When true, export the mesh normals. + VertexColors (bool): When true, export the mesh vertex color map 0 and the + current vertex color display data when it differs + ExtraChannels (bool): When true, export the mesh extra map channels + (map channels greater than channel 1) + Velocity (bool): When true, export the meh vertex and particle velocity + data. + MaterialIDs (bool): When true, export the mesh material ID as + Alembic face sets. + Visibility (bool): When true, export the node visibility data. + LayerName (bool): When true, export the node layer name as an Alembic + object property. + MaterialName (bool): When true, export the geometry node material name as + an Alembic object property + ObjectID (bool): When true, export the geometry node g-buffer object ID as + an Alembic object property. + CustomAttributes (bool): When true, export the node and its modifiers + custom attributes into an Alembic object compound property. +""" +import os +import pyblish.api +from openpype.pipeline import publish +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractAlembic(publish.Extractor): + order = pyblish.api.ExtractorOrder + label = "Extract Pointcache" + hosts = ["max"] + families = ["pointcache", "camera"] + + def process(self, instance): + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + container = instance.data["instance_node"] + + self.log.info("Extracting pointcache ...") + + parent_dir = self.staging_dir(instance) + file_name = "{name}.abc".format(**instance.data) + path = os.path.join(parent_dir, file_name) + + # We run the render + self.log.info("Writing alembic '%s' to '%s'" % (file_name, + parent_dir)) + + abc_export_cmd = ( + f""" +AlembicExport.ArchiveType = #ogawa +AlembicExport.CoordinateSystem = #maya +AlembicExport.StartFrame = {start} +AlembicExport.EndFrame = {end} + +exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport + + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + + with maintained_selection(): + # select and export + + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(abc_export_cmd) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': file_name, + "stagingDir": parent_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py new file mode 100644 index 0000000000..6392b12d11 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from openpype.pipeline.publish import RepairAction +from pymxs import runtime as rt + + +class ValidateSceneSaved(pyblish.api.InstancePlugin): + """Validate that workfile was saved.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["max"] + label = "Validate Workfile is saved" + + def process(self, instance): + if not rt.maxFilePath or not rt.maxFileName: + raise PublishValidationError( + "Workfile is not saved", title=self.label) From d29a3ca4379a88202bc4279fe8966d87a3509820 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:17:21 +0100 Subject: [PATCH 2360/2550] :art: simple loader for alembics --- .../hosts/max/plugins/load/load_pointcache.py | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 openpype/hosts/max/plugins/load/load_pointcache.py diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py new file mode 100644 index 0000000000..150206b8b8 --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +"""Simple alembic loader for 3dsmax. + +Because of limited api, alembics can be only loaded, but not easily updated. + +""" +import os +from openpype.pipeline import ( + load, + get_representation_path, +) + + +class AbcLoader(load.LoaderPlugin): + """Alembic loader.""" + + families = ["model", "animation", "pointcache"] + label = "Load Alembic" + representations = ["abc"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + file_path = os.path.normpath(self.fname) + + abc_before = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + abc_export_cmd = (f""" +AlembicImport.ImportToRoot = false + +importFile @"{file_path}" #noPrompt + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + rt.execute(abc_export_cmd) + + abc_after = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + # This should yield new AlembicContainer node + abc_containers = abc_after.difference(abc_before) + + if len(abc_containers) != 1: + self.log.error("Something failed when loading.") + + abc_container = abc_containers.pop() + + container_name = f"{name}_CON" + container = rt.container(name=container_name) + abc_container.Parent = container + + return container + + def remove(self, container): + from pymxs import runtime as rt + + node = container["node"] + rt.delete(node) From 7327334226c45fc0291c3b08e041cb8fc7fa328b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:20:56 +0100 Subject: [PATCH 2361/2550] :rotating_light: fix :dog: --- openpype/hosts/max/api/__init__.py | 2 +- openpype/hosts/max/api/pipeline.py | 6 +----- openpype/hosts/max/plugins/publish/collect_workfile.py | 2 +- openpype/hosts/max/plugins/publish/validate_scene_saved.py | 3 +-- openpype/hosts/max/startup/startup.py | 1 - 5 files changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index 26190dcfb8..92097cc98b 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -6,7 +6,7 @@ from .pipeline import ( ) -from .lib import( +from .lib import ( maintained_selection, lsattr, get_all_children diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index 4f8271fb7e..f3cdf245fb 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -15,11 +15,7 @@ from openpype.pipeline import ( from openpype.hosts.max.api.menu import OpenPypeMenu from openpype.hosts.max.api import lib from openpype.hosts.max import MAX_HOST_DIR -from openpype.pipeline.load import any_outdated_containers -from openpype.lib import ( - register_event_callback, - emit_event, -) + from pymxs import runtime as rt # noqa log = logging.getLogger("openpype.hosts.max") diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py index 7112337575..3500b2735c 100644 --- a/openpype/hosts/max/plugins/publish/collect_workfile.py +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -4,7 +4,7 @@ import os import pyblish.api from pymxs import runtime as rt -from openpype.pipeline import legacy_io, KnownPublishError +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py index 6392b12d11..8506b17315 100644 --- a/openpype/hosts/max/plugins/publish/validate_scene_saved.py +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline import PublishValidationError -from openpype.pipeline.publish import RepairAction +from openpype.pipeline import PublishValidationError from pymxs import runtime as rt diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py index afcbd2d132..37bcef5db1 100644 --- a/openpype/hosts/max/startup/startup.py +++ b/openpype/hosts/max/startup/startup.py @@ -4,4 +4,3 @@ from openpype.pipeline import install_host host = MaxHost() install_host(host) - From 75606777695064693dca411bd47455988a669c14 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:23:22 +0100 Subject: [PATCH 2362/2550] :rotating_light: fix hound round 2 --- openpype/hosts/max/plugins/create/create_pointcache.py | 3 ++- openpype/hosts/max/plugins/load/load_pointcache.py | 3 +-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py index 4c9ec7fb97..c08b0dedfe 100644 --- a/openpype/hosts/max/plugins/create/create_pointcache.py +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -18,4 +18,5 @@ class CreatePointCache(plugin.MaxCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = rt.getNodeByName(instance.get("instance_node")) + # for additional work on the node: + # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py index 150206b8b8..285d84b7b6 100644 --- a/openpype/hosts/max/plugins/load/load_pointcache.py +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -6,8 +6,7 @@ Because of limited api, alembics can be only loaded, but not easily updated. """ import os from openpype.pipeline import ( - load, - get_representation_path, + load ) From ad95165765bc0841305888af177888bfaf7d1357 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:25:15 +0100 Subject: [PATCH 2363/2550] :rotating_light: fix hound round 3 --- openpype/hosts/max/plugins/create/create_pointcache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py index c08b0dedfe..32f0838471 100644 --- a/openpype/hosts/max/plugins/create/create_pointcache.py +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -11,9 +11,9 @@ class CreatePointCache(plugin.MaxCreator): icon = "gear" def create(self, subset_name, instance_data, pre_create_data): - from pymxs import runtime as rt + # from pymxs import runtime as rt - instance = super(CreatePointCache, self).create( + _ = super(CreatePointCache, self).create( subset_name, instance_data, pre_create_data) # type: CreatedInstance From f4391cbeb2245e132f561cbdc89b8aefc88b06cb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:39:28 +0100 Subject: [PATCH 2364/2550] :recycle: add 3dsmax 2023 variant --- .../system_settings/applications.json | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index a4db0dd327..b8aa8cec74 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -114,6 +114,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "max", + "environment": { + "ADSK_3DSMAX_STARTUPSCRIPTS_ADDON_DIR": "{OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup" + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "flame": { "enabled": true, "label": "Flame", @@ -1309,35 +1338,6 @@ } } }, - "3dsmax": { - "enabled": true, - "label": "3ds max", - "icon": "{}/app_icons/3dsmax.png", - "host_name": "3dsmax", - "environment": { - - }, - "variants": { - "2023": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "3DSMAX_VERSION": "2023" - } - } - } - }, "djvview": { "enabled": true, "label": "DJV View", From 5c3d44a0abc430e1670b70fc934e2d3484fd79fe Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Dec 2022 09:21:07 +0800 Subject: [PATCH 2365/2550] fix parenting issue when extracting proxy abc --- .../maya/plugins/publish/extract_proxy_abc.py | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index f348712d7c..aa2a4b783c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -97,14 +97,17 @@ class ExtractProxyAlembic(publish.Extractor): end=end)) inst_selection = cmds.ls(nodes, long=True) - cmds.geomToBBox(inst_selection, - nameSuffix=name_suffix, - keepOriginal=True, - single=False, - bakeAnimation=True, - startTime=start, - endTime=end) - bbox_sel = cmds.ls(sl=True, long=True) - + bbox = cmds.geomToBBox(inst_selection, + nameSuffix=name_suffix, + keepOriginal=True, + single=False, + bakeAnimation=True, + startTime=start, + endTime=end) + #TODO: fix the scale or disparenting for the group + # bbox_sel = cmds.listRelatives(bbox, parent=True) + # cmds.ls(bbox_sel, long=True) + master_group = cmds.group(name="bbox_grp") + bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel From 99930c2856ac882f8029000a6825911d48ac68c6 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 7 Dec 2022 03:30:40 +0000 Subject: [PATCH 2366/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 5e61ee3a6b..443c76544b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.1" +__version__ = "3.14.9-nightly.2" From b6b7ac29226f9c6084cad6ad8dba434d50bc96af Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Dec 2022 16:55:47 +0800 Subject: [PATCH 2367/2550] adding options for creating renderpasses in redshift and vray --- openpype/hosts/maya/api/lib_rendersettings.py | 27 ++- .../schemas/schema_maya_render_settings.json | 192 +++++++++--------- 2 files changed, 119 insertions(+), 100 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 1293f1287d..f9d24c3780 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -150,13 +150,23 @@ class RenderSettings(object): redshift_render_presets = render_settings["redshift_renderer"] remove_aovs = render_settings["remove_aovs"] + all_rs_aovs = cmds.ls(type='RedshiftAOV') if remove_aovs: - aovs = cmds.ls(type='RedshiftAOV') - for aov in aovs: + for aov in all_rs_aovs: enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) + redshift_aovs = redshift_render_presets["aov_list"] + for rs_aov in redshift_aovs: + rs_renderlayer = rs_aov.replace(" ", "") + rs_layername = "rsAov_{}".format(rs_renderlayer) + if rs_layername in all_rs_aovs: + continue + cmds.rsCreateAov(type=rs_aov) + # update the AOV list + mel.eval("redshiftUpdateActiveAovList;") + additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] @@ -177,19 +187,26 @@ class RenderSettings(object): vray_render_presets = render_settings["vray_renderer"] # vrayRenderElement remove_aovs = render_settings["remove_aovs"] + all_vray_aovs = cmds.ls(type='VRayRenderElement') + lightSelect_aovs = cmds.ls(type='VRayRenderElementSet') if remove_aovs: - aovs = cmds.ls(type='VRayRenderElement') - for aov in aovs: + for aov in all_vray_aovs: # remove all aovs except LightSelect enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) # remove LightSelect - lightSelect_aovs = cmds.ls(type='VRayRenderElementSet') for light_aovs in lightSelect_aovs: light_enabled = cmds.getAttr("{}.enabled".format(light_aovs)) if light_enabled: cmds.delete(lightSelect_aovs) + + vray_aovs = vray_render_presets["aov_list"] + for renderlayer in vray_aovs: + renderElement = "vrayAddRenderElement {}".format(renderlayer) + RE_name = mel.eval(renderElement) + if RE_name.endswith("1"): # if there is more than one same render element + cmds.delete(RE_name) # Set aov separator # First we need to explicitly set the UI items in Render Settings # because that is also what V-Ray updates to when that Render Settings diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index c1bafc4108..512e45f674 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -209,74 +209,76 @@ "defaults": "empty", "enum_items": [ {"empty": "< empty >"}, - {"atmosphereChannel": "atmosphere"}, - {"backgroundChannel": "background"}, - {"bumpNormalsChannel": "bumpnormals"}, - {"causticsChannel": "caustics"}, - {"coatFilterChannel": "coat_filter"}, - {"coatGlossinessChannel": "coatGloss"}, - {"coatReflectionChannel": "coat_reflection"}, - {"vrayCoatChannel": "coat_specular"}, - {"CoverageChannel": "coverage"}, - {"cryptomatteChannel": "cryptomatte"}, - {"customColor": "custom_color"}, - {"drBucketChannel": "DR"}, - {"denoiserChannel": "denoiser"}, - {"diffuseChannel": "diffuse"}, - {"ExtraTexElement": "extraTex"}, - {"giChannel": "GI"}, - {"LightMixElement": "None"}, - {"lightingChannel": "lighting"}, - {"LightingAnalysisChannel": "LightingAnalysis"}, - {"materialIDChannel": "materialID"}, - {"MaterialSelectElement": "materialSelect"}, - {"matteShadowChannel": "matteShadow"}, - {"MultiMatteElement": "multimatte"}, - {"multimatteIDChannel": "multimatteID"}, - {"normalsChannel": "normals"}, - {"nodeIDChannel": "objectId"}, - {"objectSelectChannel": "objectSelect"}, - {"rawCoatFilterChannel": "raw_coat_filter"}, - {"rawCoatReflectionChannel": "raw_coat_reflection"}, - {"rawDiffuseFilterChannel": "rawDiffuseFilter"}, - {"rawGiChannel": "rawGI"}, - {"rawLightChannel": "rawLight"}, - {"rawReflectionChannel": "rawReflection"}, - {"rawReflectionFilterChannel": "rawReflectionFilter"}, - {"rawRefractionChannel": "rawRefraction"}, - {"rawRefractionFilterChannel": "rawRefractionFilter"}, - {"rawShadowChannel": "rawShadow"}, - {"rawSheenFilterChannel": "raw_sheen_filter"}, - {"rawSheenReflectionChannel": "raw_sheen_reflection"}, - {"rawTotalLightChannel": "rawTotalLight"}, - {"reflectIORChannel": "reflIOR"}, - {"reflectChannel": "reflect"}, - {"reflectionFilterChannel": "reflectionFilter"}, - {"reflectGlossinessChannel": "reflGloss"}, - {"refractChannel": "refract"}, - {"refractionFilterChannel": "refractionFilter"}, - {"refractGlossinessChannel": "refrGloss"}, - {"renderIDChannel": "renderId"}, - {"FastSSS2Channel": "SSS"}, - {"sampleRateChannel": "sampleRate"}, + {"atmosphereChannel": "atmosphereChannel"}, + {"backgroundChannel": "backgroundChannel"}, + {"bumpNormalsChannel": "bumpNormalsChannel"}, + {"causticsChannel": "causticsChannel"}, + {"coatFilterChannel": "coatFilterChannel"}, + {"coatGlossinessChannel": "coatGlossinessChannel"}, + {"coatReflectionChannel": "coatReflectionChannel"}, + {"vrayCoatChannel": "vrayCoatChannel"}, + {"CoverageChannel": "CoverageChannel"}, + {"cryptomatteChannel": "cryptomatteChannel"}, + {"customColor": "customColor"}, + {"drBucketChannel": "drBucketChannel"}, + {"denoiserChannel": "denoiserChannel"}, + {"diffuseChannel": "diffuseChannel"}, + {"ExtraTexElement": "ExtraTexElement"}, + {"giChannel": "giChannel"}, + {"LightMixElement": "LightMixElement"}, + {"LightSelectElement": "LightSelectElement"}, + {"lightingChannel": "lightingChannel"}, + {"LightingAnalysisChannel": "LightingAnalysisChannel"}, + {"materialIDChannel": "materialIDChannel"}, + {"MaterialSelectElement": "MaterialSelectElement"}, + {"matteShadowChannel": "matteShadowChannel"}, + {"metalnessChannel": "metalnessChannel"}, + {"MultiMatteElement": "MultiMatteElement"}, + {"multimatteIDChannel": "multimatteIDChannel"}, + {"noiseLevelChannel": "noiseLevelChannel"}, + {"normalsChannel": "normalsChannel"}, + {"nodeIDChannel": "nodeIDChannel"}, + {"objectSelectChannel": "objectSelectChannel"}, + {"rawCoatFilterChannel": "rawCoatFilterChannel"}, + {"rawCoatReflectionChannel": "rawCoatReflectionChannel"}, + {"rawDiffuseFilterChannel": "rawDiffuseFilterChannel"}, + {"rawGiChannel": "rawGiChannel"}, + {"rawLightChannel": "rawLightChannel"}, + {"rawReflectionChannel": "rawReflectionChannel"}, + {"rawReflectionFilterChannel": "rawReflectionFilterChannel"}, + {"rawRefractionChannel": "rawRefractionChannel"}, + {"rawRefractionFilterChannel": "rawRefractionFilterChannel"}, + {"rawShadowChannel": "rawShadowChannel"}, + {"rawSheenFilterChannel": "rawSheenFilterChannel"}, + {"rawSheenReflectionChannel": "rawSheenReflectionChannel"}, + {"rawTotalLightChannel": "rawTotalLightChannel"}, + {"reflectIORChannel": "reflectIORChannel"}, + {"reflectChannel": "reflectChannel"}, + {"reflectionFilterChannel": "reflectionFilterChannel"}, + {"reflectGlossinessChannel": "reflectGlossinessChannel"}, + {"refractChannel": "refractChannel"}, + {"refractionFilterChannel": "refractionFilterChannel"}, + {"refractGlossinessChannel": "refractGlossinessChannel"}, + {"renderIDChannel": "renderIDChannel"}, + {"FastSSS2Channel": "FastSSS2Channel"}, + {"sampleRateChannel": "sampleRateChannel"}, {"samplerInfo": "samplerInfo"}, - {"selfIllumChannel": "selfIllum"}, - {"shadowChannel": "shadow"}, - {"sheenFilterChannel": "sheen_filter"}, - {"sheenGlossinessChannel": "sheenGloss"}, - {"sheenReflectionChannel": "sheen_reflection"}, - {"vraySheenChannel": "sheen_specular"}, - {"specularChannel": "specular"}, + {"selfIllumChannel": "selfIllumChannel"}, + {"shadowChannel": "shadowChannel"}, + {"sheenFilterChannel": "sheenFilterChannel"}, + {"sheenGlossinessChannel": "sheenGlossinessChannel"}, + {"sheenReflectionChannel": "sheenReflectionChannel"}, + {"vraySheenChannel": "vraySheenChannel"}, + {"specularChannel": "specularChannel"}, {"Toon": "Toon"}, - {"toonLightingChannel": "toonLighting"}, - {"toonSpecularChannel": "toonSpecular"}, - {"totalLightChannel": "totalLight"}, - {"unclampedColorChannel": "unclampedColor"}, - {"VRScansPaintMaskChannel": "VRScansPaintMask"}, - {"VRScansZoneMaskChannel": "VRScansZoneMask"}, - {"velocityChannel": "velocity"}, - {"zdepthChannel": "zDepth"}, - {"LightSelectElement": "lightselect"} + {"toonLightingChannel": "toonLightingChannel"}, + {"toonSpecularChannel": "toonSpecularChannel"}, + {"totalLightChannel": "totalLightChannel"}, + {"unclampedColorChannel": "unclampedColorChannel"}, + {"VRScansPaintMaskChannel": "VRScansPaintMaskChannel"}, + {"VRScansZoneMaskChannel": "VRScansZoneMaskChannel"}, + {"velocityChannel": "velocityChannel"}, + {"zdepthChannel": "zdepthChannel"} ] }, { @@ -366,46 +368,46 @@ "defaults": "empty", "enum_items": [ {"empty": "< none >"}, - {"AO": "Ambient Occlusion"}, + {"Ambient Occlusion": "Ambient Occlusion"}, {"Background": "Background"}, {"Beauty": "Beauty"}, - {"BumpNormals": "Bump Normals"}, + {"Bump Normals": "Bump Normals"}, {"Caustics": "Caustics"}, - {"CausticsRaw": "Caustics Raw"}, + {"Caustics Raw": "Caustics Raw"}, {"Cryptomatte": "Cryptomatte"}, {"Custom": "Custom"}, - {"Z": "Depth"}, - {"DiffuseFilter": "Diffuse Filter"}, - {"DiffuseLighting": "Diffuse Lighting"}, - {"DiffuseLightingRaw": "Diffuse Lighting Raw"}, + {"Depth": "Depth"}, + {"Diffuse Filter": "Diffuse Filter"}, + {"Diffuse Lighting": "Diffuse Lighting"}, + {"Diffuse Lighting Raw": "Diffuse Lighting Raw"}, {"Emission": "Emission"}, - {"GI": "Global Illumination"}, - {"GIRaw": "Global Illumination Raw"}, + {"Global Illumination": "Global Illumination"}, + {"Global Illumination Raw": "Global Illumination Raw"}, {"Matte": "Matte"}, - {"MotionVectors": "Ambient Occlusion"}, - {"N": "Normals"}, - {"ID": "ObjectID"}, - {"ObjectBumpNormal": "Object-Space Bump Normals"}, - {"ObjectPosition": "Object-Space Positions"}, - {"PuzzleMatte": "Puzzle Matte"}, + {"Motion Vectors": "Motion Vectors"}, + {"Normals": "Normals"}, + {"ObjectID": "ObjectID"}, + {"Object-Space Bump Normals": "Object-Space Bump Normals"}, + {"Object-Space Positions": "Object-Space Positions"}, + {"Puzzle Matte": "Puzzle Matte"}, {"Reflections": "Reflections"}, - {"ReflectionsFilter": "Reflections Filter"}, - {"ReflectionsRaw": "Reflections Raw"}, + {"Reflections Filter": "Reflections Filter"}, + {"Reflections Raw": "Reflections Raw"}, {"Refractions": "Refractions"}, - {"RefractionsFilter": "Refractions Filter"}, - {"RefractionsRaw": "Refractions Filter"}, + {"Refractions Filter": "Refractions Filter"}, + {"Refractions Raw": "Refractions Filter"}, {"Shadows": "Shadows"}, {"SpecularLighting": "Specular Lighting"}, - {"SSS": "Sub Surface Scatter"}, - {"SSSRaw": "Sub Surface Scatter Raw"}, - {"TotalDiffuseLightingRaw": "Total Diffuse Lighting Raw"}, - {"TotalTransLightingRaw": "Total Translucency Filter"}, - {"TransTint": "Translucency Filter"}, - {"TransGIRaw": "Translucency Lighting Raw"}, - {"VolumeFogEmission": "Volume Fog Emission"}, - {"VolumeFogTint": "Volume Fog Tint"}, - {"VolumeLighting": "Volume Lighting"}, - {"P": "World Position"} + {"Sub Surface Scatter": "Sub Surface Scatter"}, + {"Sub Surface Scatter Raw": "Sub Surface Scatter Raw"}, + {"Total Diffuse Lighting Raw": "Total Diffuse Lighting Raw"}, + {"Total Translucency Filter": "Total Translucency Filter"}, + {"Translucency Filter": "Translucency Filter"}, + {"Translucency Lighting Raw": "Translucency Lighting Raw"}, + {"Volume Fog Emission": "Volume Fog Emission"}, + {"Volume Fog Tint": "Volume Fog Tint"}, + {"Volume Lighting": "Volume Lighting"}, + {"World Position": "World Position"} ] }, { From 5b7a4c1704bb4114380cc0b99ef00784d22089df Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Dec 2022 15:36:43 +0100 Subject: [PATCH 2368/2550] settings UI is using 'qtpy' instead of 'Qt' --- openpype/tools/settings/local_settings/apps_widget.py | 2 +- openpype/tools/settings/local_settings/environments_widget.py | 2 +- openpype/tools/settings/local_settings/experimental_widget.py | 2 +- openpype/tools/settings/local_settings/general_widget.py | 2 +- openpype/tools/settings/local_settings/mongo_widget.py | 2 +- openpype/tools/settings/local_settings/projects_widget.py | 2 +- openpype/tools/settings/local_settings/widgets.py | 2 +- openpype/tools/settings/local_settings/window.py | 2 +- openpype/tools/settings/settings/base.py | 2 +- openpype/tools/settings/settings/breadcrumbs_widget.py | 2 +- openpype/tools/settings/settings/categories.py | 2 +- openpype/tools/settings/settings/color_widget.py | 2 +- openpype/tools/settings/settings/constants.py | 2 +- openpype/tools/settings/settings/dialogs.py | 2 +- openpype/tools/settings/settings/dict_conditional.py | 2 +- openpype/tools/settings/settings/dict_mutable_widget.py | 2 +- openpype/tools/settings/settings/images/__init__.py | 2 +- openpype/tools/settings/settings/item_widgets.py | 2 +- openpype/tools/settings/settings/lib.py | 2 +- openpype/tools/settings/settings/list_item_widget.py | 2 +- openpype/tools/settings/settings/list_strict_widget.py | 2 +- openpype/tools/settings/settings/multiselection_combobox.py | 2 +- openpype/tools/settings/settings/search_dialog.py | 2 +- openpype/tools/settings/settings/tests.py | 2 +- openpype/tools/settings/settings/widgets.py | 2 +- openpype/tools/settings/settings/window.py | 2 +- openpype/tools/settings/settings/wrapper_widgets.py | 2 +- 27 files changed, 27 insertions(+), 27 deletions(-) diff --git a/openpype/tools/settings/local_settings/apps_widget.py b/openpype/tools/settings/local_settings/apps_widget.py index c1f350fcbc..ce1fc86c32 100644 --- a/openpype/tools/settings/local_settings/apps_widget.py +++ b/openpype/tools/settings/local_settings/apps_widget.py @@ -1,5 +1,5 @@ import platform -from Qt import QtWidgets +from qtpy import QtWidgets from .widgets import ( Separator, ExpandingWidget diff --git a/openpype/tools/settings/local_settings/environments_widget.py b/openpype/tools/settings/local_settings/environments_widget.py index 14ca517851..5008f086d2 100644 --- a/openpype/tools/settings/local_settings/environments_widget.py +++ b/openpype/tools/settings/local_settings/environments_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.tools.utils import PlaceholderLineEdit diff --git a/openpype/tools/settings/local_settings/experimental_widget.py b/openpype/tools/settings/local_settings/experimental_widget.py index 22ef952356..b0d9381663 100644 --- a/openpype/tools/settings/local_settings/experimental_widget.py +++ b/openpype/tools/settings/local_settings/experimental_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.tools.experimental_tools import ( ExperimentalTools, LOCAL_EXPERIMENTAL_KEY diff --git a/openpype/tools/settings/local_settings/general_widget.py b/openpype/tools/settings/local_settings/general_widget.py index 35add7573e..5a75c219dc 100644 --- a/openpype/tools/settings/local_settings/general_widget.py +++ b/openpype/tools/settings/local_settings/general_widget.py @@ -1,6 +1,6 @@ import getpass -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.lib import is_admin_password_required from openpype.widgets import PasswordDialog from openpype.tools.utils import PlaceholderLineEdit diff --git a/openpype/tools/settings/local_settings/mongo_widget.py b/openpype/tools/settings/local_settings/mongo_widget.py index 600ab79242..9549d6eb17 100644 --- a/openpype/tools/settings/local_settings/mongo_widget.py +++ b/openpype/tools/settings/local_settings/mongo_widget.py @@ -2,7 +2,7 @@ import os import sys import traceback -from Qt import QtWidgets +from qtpy import QtWidgets from pymongo.errors import ServerSelectionTimeoutError from openpype.lib import change_openpype_mongo_url diff --git a/openpype/tools/settings/local_settings/projects_widget.py b/openpype/tools/settings/local_settings/projects_widget.py index 30a0d212f0..b330d54dec 100644 --- a/openpype/tools/settings/local_settings/projects_widget.py +++ b/openpype/tools/settings/local_settings/projects_widget.py @@ -1,6 +1,6 @@ import platform import copy -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.tools.settings.settings import ProjectListWidget from openpype.tools.utils import PlaceholderLineEdit from openpype.settings.constants import ( diff --git a/openpype/tools/settings/local_settings/widgets.py b/openpype/tools/settings/local_settings/widgets.py index 2733aef187..f40978a66f 100644 --- a/openpype/tools/settings/local_settings/widgets.py +++ b/openpype/tools/settings/local_settings/widgets.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.tools.settings.settings.widgets import ( ExpandingWidget ) diff --git a/openpype/tools/settings/local_settings/window.py b/openpype/tools/settings/local_settings/window.py index 76c2d851e9..fdb05e219f 100644 --- a/openpype/tools/settings/local_settings/window.py +++ b/openpype/tools/settings/local_settings/window.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtGui +from qtpy import QtWidgets, QtGui from openpype import style diff --git a/openpype/tools/settings/settings/base.py b/openpype/tools/settings/settings/base.py index 6def284a83..074ecdae90 100644 --- a/openpype/tools/settings/settings/base.py +++ b/openpype/tools/settings/settings/base.py @@ -5,7 +5,7 @@ import traceback import functools import datetime -from Qt import QtWidgets, QtGui, QtCore +from qtpy import QtWidgets, QtGui, QtCore from openpype.settings.entities import ProjectSettings from openpype.tools.settings import CHILD_OFFSET diff --git a/openpype/tools/settings/settings/breadcrumbs_widget.py b/openpype/tools/settings/settings/breadcrumbs_widget.py index 7524bc61f0..2676d2f52d 100644 --- a/openpype/tools/settings/settings/breadcrumbs_widget.py +++ b/openpype/tools/settings/settings/breadcrumbs_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtGui, QtCore +from qtpy import QtWidgets, QtGui, QtCore PREFIX_ROLE = QtCore.Qt.UserRole + 1 LAST_SEGMENT_ROLE = QtCore.Qt.UserRole + 2 diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index e1b3943317..2e5ce496ed 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -2,7 +2,7 @@ import sys import traceback import contextlib from enum import Enum -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome from openpype.lib import get_openpype_version diff --git a/openpype/tools/settings/settings/color_widget.py b/openpype/tools/settings/settings/color_widget.py index b38b46f3cb..819bfb3581 100644 --- a/openpype/tools/settings/settings/color_widget.py +++ b/openpype/tools/settings/settings/color_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from .item_widgets import InputWidget diff --git a/openpype/tools/settings/settings/constants.py b/openpype/tools/settings/settings/constants.py index 23526e4de9..b2792d885b 100644 --- a/openpype/tools/settings/settings/constants.py +++ b/openpype/tools/settings/settings/constants.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore DEFAULT_PROJECT_LABEL = "< Default >" diff --git a/openpype/tools/settings/settings/dialogs.py b/openpype/tools/settings/settings/dialogs.py index b1b4daa1a0..38da3cf881 100644 --- a/openpype/tools/settings/settings/dialogs.py +++ b/openpype/tools/settings/settings/dialogs.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.tools.utils.delegates import pretty_date diff --git a/openpype/tools/settings/settings/dict_conditional.py b/openpype/tools/settings/settings/dict_conditional.py index b2a7bb52a2..564603b258 100644 --- a/openpype/tools/settings/settings/dict_conditional.py +++ b/openpype/tools/settings/settings/dict_conditional.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from .widgets import ( ExpandingWidget, diff --git a/openpype/tools/settings/settings/dict_mutable_widget.py b/openpype/tools/settings/settings/dict_mutable_widget.py index 1c704b3cd5..b9932da789 100644 --- a/openpype/tools/settings/settings/dict_mutable_widget.py +++ b/openpype/tools/settings/settings/dict_mutable_widget.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from .base import BaseWidget from .lib import ( diff --git a/openpype/tools/settings/settings/images/__init__.py b/openpype/tools/settings/settings/images/__init__.py index 3ad65e114a..0b246349a8 100644 --- a/openpype/tools/settings/settings/images/__init__.py +++ b/openpype/tools/settings/settings/images/__init__.py @@ -1,5 +1,5 @@ import os -from Qt import QtGui +from qtpy import QtGui def get_image_path(image_filename): diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index 1ddee7efbe..d51f9b9684 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -1,6 +1,6 @@ import json -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.widgets.sliders import NiceSlider from openpype.tools.settings import CHILD_OFFSET diff --git a/openpype/tools/settings/settings/lib.py b/openpype/tools/settings/settings/lib.py index eef157812f..0bcf8a4e94 100644 --- a/openpype/tools/settings/settings/lib.py +++ b/openpype/tools/settings/settings/lib.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore from .widgets import SettingsToolBtn diff --git a/openpype/tools/settings/settings/list_item_widget.py b/openpype/tools/settings/settings/list_item_widget.py index cd1fd912ae..67ce4b9dc9 100644 --- a/openpype/tools/settings/settings/list_item_widget.py +++ b/openpype/tools/settings/settings/list_item_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.tools.settings import ( CHILD_OFFSET diff --git a/openpype/tools/settings/settings/list_strict_widget.py b/openpype/tools/settings/settings/list_strict_widget.py index f0a3022a50..b0b78e5732 100644 --- a/openpype/tools/settings/settings/list_strict_widget.py +++ b/openpype/tools/settings/settings/list_strict_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from .widgets import ( GridLabelWidget, diff --git a/openpype/tools/settings/settings/multiselection_combobox.py b/openpype/tools/settings/settings/multiselection_combobox.py index c2cc2a8fee..4cc81ff56e 100644 --- a/openpype/tools/settings/settings/multiselection_combobox.py +++ b/openpype/tools/settings/settings/multiselection_combobox.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets class ComboItemDelegate(QtWidgets.QStyledItemDelegate): diff --git a/openpype/tools/settings/settings/search_dialog.py b/openpype/tools/settings/settings/search_dialog.py index e6538cfe67..2860e7c943 100644 --- a/openpype/tools/settings/settings/search_dialog.py +++ b/openpype/tools/settings/settings/search_dialog.py @@ -1,7 +1,7 @@ import re import collections -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui ENTITY_LABEL_ROLE = QtCore.Qt.UserRole + 1 ENTITY_PATH_ROLE = QtCore.Qt.UserRole + 2 diff --git a/openpype/tools/settings/settings/tests.py b/openpype/tools/settings/settings/tests.py index fc53e38ad5..772d4618f7 100644 --- a/openpype/tools/settings/settings/tests.py +++ b/openpype/tools/settings/settings/tests.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore def indented_print(data, indent=0): diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index b8ad21e7e4..fd04cb0a23 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -1,6 +1,6 @@ import copy import uuid -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from openpype.client import get_projects diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 77a2f64dac..f479908f7b 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtGui, QtCore +from qtpy import QtWidgets, QtGui, QtCore from openpype import style diff --git a/openpype/tools/settings/settings/wrapper_widgets.py b/openpype/tools/settings/settings/wrapper_widgets.py index b14a226912..0b45a9a01b 100644 --- a/openpype/tools/settings/settings/wrapper_widgets.py +++ b/openpype/tools/settings/settings/wrapper_widgets.py @@ -1,5 +1,5 @@ from uuid import uuid4 -from Qt import QtWidgets +from qtpy import QtWidgets from .widgets import ( ExpandingWidget, From dbe5872960fc96ee6ad5a17d8a6c289d6ee6f9ef Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Dec 2022 23:43:23 +0800 Subject: [PATCH 2369/2550] options for creating renderpasses in redshift and vray --- openpype/hosts/maya/api/lib_rendersettings.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index f9d24c3780..d9b79e3c2f 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -159,13 +159,16 @@ class RenderSettings(object): redshift_aovs = redshift_render_presets["aov_list"] for rs_aov in redshift_aovs: - rs_renderlayer = rs_aov.replace(" ", "") - rs_layername = "rsAov_{}".format(rs_renderlayer) + if " " in rs_aov: + rs_renderlayer = rs_aov.replace(" ", "") + rs_layername = "rsAov_{}".format(rs_renderlayer) + else: + rs_layername = "rsAov_{}".format(rs_aov) if rs_layername in all_rs_aovs: continue cmds.rsCreateAov(type=rs_aov) # update the AOV list - mel.eval("redshiftUpdateActiveAovList;") + mel.eval("redshiftUpdateActiveAovList") additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] @@ -205,7 +208,8 @@ class RenderSettings(object): for renderlayer in vray_aovs: renderElement = "vrayAddRenderElement {}".format(renderlayer) RE_name = mel.eval(renderElement) - if RE_name.endswith("1"): # if there is more than one same render element + # if there is more than one same render element + if RE_name.endswith("1"): cmds.delete(RE_name) # Set aov separator # First we need to explicitly set the UI items in Render Settings From d7cc795d1fdc2fd635488bcc5282217e4b36b9c5 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 16:59:43 +0800 Subject: [PATCH 2370/2550] gltf extractor for Maya --- openpype/hosts/maya/plugins/publish/collect_gltf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py index bbc4e31f92..bb37fe3a7e 100644 --- a/openpype/hosts/maya/plugins/publish/collect_gltf.py +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -13,5 +13,5 @@ class CollectGLTF(pyblish.api.InstancePlugin): if not instance.data.get("families"): instance.data["families"] = [] - if "fbx" not in instance.data["families"]: + if "gltf" not in instance.data["families"]: instance.data["families"].append("gltf") From 444d5cd7bcb1d483d9f373b981e866a74a82dd85 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 18:24:21 +0800 Subject: [PATCH 2371/2550] create master group for boudning box as root for publishing abc --- .../maya/plugins/publish/extract_proxy_abc.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index aa2a4b783c..feb174559f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -97,16 +97,15 @@ class ExtractProxyAlembic(publish.Extractor): end=end)) inst_selection = cmds.ls(nodes, long=True) - bbox = cmds.geomToBBox(inst_selection, - nameSuffix=name_suffix, - keepOriginal=True, - single=False, - bakeAnimation=True, - startTime=start, - endTime=end) - #TODO: fix the scale or disparenting for the group - # bbox_sel = cmds.listRelatives(bbox, parent=True) - # cmds.ls(bbox_sel, long=True) + cmds.geomToBBox(inst_selection, + nameSuffix=name_suffix, + keepOriginal=True, + single=False, + bakeAnimation=True, + startTime=start, + endTime=end) + # create master group for bounding + # boxes as the main root master_group = cmds.group(name="bbox_grp") bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) From c0319efd2ace058f682578805bd688b06bcf389d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Dec 2022 12:05:01 +0100 Subject: [PATCH 2372/2550] change import in settings init --- openpype/tools/settings/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/__init__.py b/openpype/tools/settings/__init__.py index 3e77a8348a..0bc166b437 100644 --- a/openpype/tools/settings/__init__.py +++ b/openpype/tools/settings/__init__.py @@ -1,5 +1,5 @@ import sys -from Qt import QtWidgets, QtGui +from qtpy import QtWidgets, QtGui from openpype import style from .lib import ( From 27cb6512cf708c8187465e52c66f84e837b3f521 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Dec 2022 12:19:30 +0100 Subject: [PATCH 2373/2550] added more collectors of plugin types and use them on openpype plugin installation --- openpype/modules/base.py | 61 ++++++++++++++++++++----- openpype/modules/interfaces.py | 73 +++++++++++++++++++++--------- openpype/pipeline/context_tools.py | 23 ++++++---- 3 files changed, 117 insertions(+), 40 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 4761462df0..0fd21492e8 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -786,23 +786,15 @@ class ModulesManager: ).format(expected_keys, " | ".join(msg_items))) return output - def collect_creator_plugin_paths(self, host_name): - """Helper to collect creator plugin paths from modules. - - Args: - host_name (str): For which host are creators meants. - - Returns: - list: List of creator plugin paths. - """ - # Output structure + def _collect_plugin_paths(self, method_name, *args, **kwargs): output = [] for module in self.get_enabled_modules(): # Skip module that do not inherit from `IPluginPaths` if not isinstance(module, IPluginPaths): continue - paths = module.get_creator_plugin_paths(host_name) + method = getattr(module, method_name) + paths = method(*args, **kwargs) if paths: # Convert to list if value is not list if not isinstance(paths, (list, tuple, set)): @@ -810,6 +802,53 @@ class ModulesManager: output.extend(paths) return output + def collect_create_plugin_paths(self, host_name): + """Helper to collect creator plugin paths from modules. + + Args: + host_name (str): For which host are creators meant. + + Returns: + list: List of creator plugin paths. + """ + + return self._collect_plugin_paths( + "get_create_plugin_paths", + host_name + ) + + collect_creator_plugin_paths = collect_create_plugin_paths + + def collect_load_plugin_paths(self, host_name): + """Helper to collect load plugin paths from modules. + + Args: + host_name (str): For which host are load plugins meant. + + Returns: + list: List of load plugin paths. + """ + + return self._collect_plugin_paths( + "get_load_plugin_paths", + host_name + ) + + def collect_publish_plugin_paths(self, host_name): + """Helper to collect load plugin paths from modules. + + Args: + host_name (str): For which host are load plugins meant. + + Returns: + list: List of pyblish plugin paths. + """ + + return self._collect_plugin_paths( + "get_publish_plugin_paths", + host_name + ) + def get_host_module(self, host_name): """Find host module by host name. diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index f92ec6bf2d..d2c0dd5582 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -24,7 +24,7 @@ class OpenPypeInterface: Child classes of OpenPypeInterface may be used as mixin in different OpenPype modules which means they have to have implemented methods defined - in the interface. By default interface does not have any abstract parts. + in the interface. By default, interface does not have any abstract parts. """ pass @@ -44,40 +44,71 @@ class IPluginPaths(OpenPypeInterface): def get_plugin_paths(self): pass - def get_creator_plugin_paths(self, host_name): - """Retreive creator plugin paths. + def _get_plugin_paths_by_type(self, plugin_type): + paths = self.get_plugin_paths() + if not paths or plugin_type not in paths: + return [] - Give addons ability to add creator plugin paths based on host name. + paths = paths[plugin_type] + if not paths: + return [] - NOTES: - - Default implementation uses 'get_plugin_paths' and always return - all creator plugins. - - Host name may help to organize plugins by host, but each creator - alsomay have host filtering. + if not isinstance(paths, (list, tuple, set)): + paths = [paths] + return paths + + def get_create_plugin_paths(self, host_name): + """Receive create plugin paths. + + Give addons ability to add create plugin paths based on host name. + + Notes: + Default implementation uses 'get_plugin_paths' and always return + all create plugin paths. Args: host_name (str): For which host are the plugins meant. """ - paths = self.get_plugin_paths() - if not paths or "create" not in paths: - return [] + return self._get_plugin_paths_by_type("create") - create_paths = paths["create"] - if not create_paths: - return [] + def get_load_plugin_paths(self, host_name): + """Receive load plugin paths. - if not isinstance(create_paths, (list, tuple, set)): - create_paths = [create_paths] - return create_paths + Give addons ability to add load plugin paths based on host name. + + Notes: + Default implementation uses 'get_plugin_paths' and always return + all load plugin paths. + + Args: + host_name (str): For which host are the plugins meant. + """ + + return self._get_plugin_paths_by_type("load") + + def get_publish_plugin_paths(self, host_name): + """Receive publish plugin paths. + + Give addons ability to add publish plugin paths based on host name. + + Notes: + Default implementation uses 'get_plugin_paths' and always return + all publish plugin paths. + + Args: + host_name (str): For which host are the plugins meant. + """ + + return self._get_plugin_paths_by_type("publish") class ILaunchHookPaths(OpenPypeInterface): """Module has launch hook paths to return. - Modules does not have to inherit from this interface (changed 8.11.2022). - Module just have to have implemented 'get_launch_hook_paths' to be able use - the advantage. + Modules don't have to inherit from this interface (changed 8.11.2022). + Module just have to have implemented 'get_launch_hook_paths' to be able to + use the advantage. Expected result is list of paths. ["path/to/launch_hooks_dir"] diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 0ec19d50fe..da0ce8ecf4 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -158,17 +158,24 @@ def install_openpype_plugins(project_name=None, host_name=None): pyblish.api.register_discovery_filter(filter_pyblish_plugins) register_loader_plugin_path(LOAD_PATH) - modules_manager = _get_modules_manager() - publish_plugin_dirs = modules_manager.collect_plugin_paths()["publish"] - for path in publish_plugin_dirs: - pyblish.api.register_plugin_path(path) - if host_name is None: host_name = os.environ.get("AVALON_APP") - creator_paths = modules_manager.collect_creator_plugin_paths(host_name) - for creator_path in creator_paths: - register_creator_plugin_path(creator_path) + modules_manager = _get_modules_manager() + publish_plugin_dirs = modules_manager.collect_publish_plugin_paths( + host_name) + for path in publish_plugin_dirs: + pyblish.api.register_plugin_path(path) + + create_plugin_paths = modules_manager.collect_create_plugin_paths( + host_name) + for path in create_plugin_paths: + register_creator_plugin_path(path) + + load_plugin_paths = modules_manager.collect_load_plugin_paths( + host_name) + for path in load_plugin_paths: + register_loader_plugin_path(path) if project_name is None: project_name = os.environ.get("AVALON_PROJECT") From 367b7b262ea9131d7e9c184c704b852590dbb887 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Thu, 8 Dec 2022 13:06:10 +0100 Subject: [PATCH 2374/2550] changes for better support --- openpype/lib/path_templates.py | 2 +- openpype/pipeline/load/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index b160054e38..0f99efb430 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -422,7 +422,7 @@ class TemplateResult(str): cls = self.__class__ return cls( - os.path.normpath(self), + os.path.normpath(self.replace("\\", "/")), self.template, self.solved, self.used_values, diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index bfa9fe07c7..784d4628f3 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -555,7 +555,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): """ try: - template = repre_doc["data"]["template"].replace("\\", "/") + template = repre_doc["data"]["template"] except KeyError: raise InvalidRepresentationContext(( From 7c4c579fbe11ed248e6213925a04930e0467593f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 22:49:15 +0800 Subject: [PATCH 2375/2550] create master group for boudning box as root for publishing abc --- .../maya/plugins/publish/extract_proxy_abc.py | 34 +++++++++++++++++-- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index feb174559f..07c28a231a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -86,9 +86,8 @@ class ExtractProxyAlembic(publish.Extractor): remove_bb = instance.data.get("removeBoundingBoxAfterPublish") if remove_bb: - for bbox in proxy_root: - bounding_box = cmds.listRelatives(bbox, parent=True) - cmds.delete(bounding_box) + bbox_master = cmds.ls("bbox_grp") + cmds.delete(bbox_master) def create_proxy_geometry(self, instance, name_suffix, start, end): nodes = instance[:] @@ -104,9 +103,38 @@ class ExtractProxyAlembic(publish.Extractor): bakeAnimation=True, startTime=start, endTime=end) + # select the top group + self.top_hierarchy_selection() # create master group for bounding # boxes as the main root master_group = cmds.group(name="bbox_grp") bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel + + # find the top group of the bounding box transform + def top_hierarchy_selection(self): + targets = cmds.ls(sl=True, long=True) + top_grp_list = [] + for target in targets: + top_parent = None + stop = False + + while not stop: + top_grp = cmds.listRelatives(top_parent or target, + parent=True, + path=True) + if top_grp is None: + # the loop would be stopped + # after top group found + stop = True + else: + top_parent = top_grp[0] + + if top_grp: + self.log.debug('{} is the top group'.format(top_parent)) + if top_parent in top_grp_list: + continue + top_grp_list.append(top_parent) + + return cmds.select(top_grp_list) From 20400b51c995db486480636783336d00e0a8c162 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 16:11:26 +0100 Subject: [PATCH 2376/2550] :recycle: remove `exportSequence` flag --- .../hosts/maya/plugins/create/create_ass.py | 4 +-- .../hosts/maya/plugins/publish/collect_ass.py | 13 ++++--- .../hosts/maya/plugins/publish/extract_ass.py | 36 +++++++------------ .../defaults/project_settings/maya.json | 1 - 4 files changed, 19 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 903a8ef0cf..935a068ca5 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -13,7 +13,6 @@ class CreateAss(plugin.Creator): label = "Arnold Scene Source" family = "ass" icon = "cube" - exportSequence = False expandProcedurals = False motionBlur = True motionBlurKeys = 2 @@ -35,7 +34,6 @@ class CreateAss(plugin.Creator): # Add animation data self.data.update(lib.collect_animation_data()) - self.data["exportSequence"] = self.exportSequence self.data["expandProcedurals"] = self.expandProcedurals self.data["motionBlur"] = self.motionBlur self.data["motionBlurKeys"] = self.motionBlurKeys @@ -56,7 +54,7 @@ class CreateAss(plugin.Creator): def process(self): instance = super(CreateAss, self).process() - nodes = list() + nodes = [] if (self.options or {}).get("useSelection"): nodes = cmds.ls(selection=True) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 69af4c777d..45ec5b124e 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,6 +1,7 @@ import re from maya import cmds +from openpype.pipeline.publish import KnownPublishError import pyblish.api @@ -26,16 +27,14 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - msg = "You have multiple proxy meshes, please only use one" - assert len(members) == 1, msg + if len(members) != 1: + msg = "You have multiple proxy meshes, please only use one" + raise KnownPublishError(msg) instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) - # Indicate to user that it'll be a single frame. - sequence = instance.data.get("exportSequence", False) - if not sequence: - group = re.compile(r" \[.*\]") - instance.data["label"] = group.sub("", instance.data["label"]) + group = re.compile(r" \[.*\]") + instance.data["label"] = group.sub("", instance.data["label"]) # Use camera in object set if present else default to render globals # camera. diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 7fc0cc1b2f..3442d47ae9 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -21,7 +21,7 @@ class ExtractAssStandin(publish.Extractor): staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) - filenames = list() + filenames = [] file_path = os.path.join(staging_dir, filename) # Mask @@ -78,33 +78,21 @@ class ExtractAssStandin(publish.Extractor): ) cmds.select(instance.data["setMembers"], noExpand=True) - if sequence: - self.log.info("Extracting ass sequence") + self.log.info("Extracting ass sequence") - # Collect the start and end including handles - kwargs.update({ - "start": instance.data.get("frameStartHandle", 1), - "end": instance.data.get("frameEndHandle", 1), - "step": instance.data.get("step", 0) - }) + # Collect the start and end including handles + kwargs.update({ + "start": instance.data.get("frameStartHandle", 1), + "end": instance.data.get("frameEndHandle", 1), + "step": instance.data.get("step", 0) + }) - exported_files = cmds.arnoldExportAss(**kwargs) + exported_files = cmds.arnoldExportAss(**kwargs) - for file in exported_files: - filenames.append(os.path.split(file)[1]) + for file in exported_files: + filenames.append(os.path.split(file)[1]) - self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(**kwargs) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) + self.log.info("Exported: {}".format(filenames)) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index a74f8e5827..0b4ee704de 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -172,7 +172,6 @@ "defaults": [ "Main" ], - "exportSequence": false, "expandProcedurals": false, "motionBlur": true, "motionBlurKeys": 2, From bdc66ae574b606ee3b323cf429f1133a28ecd48a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 23:31:40 +0800 Subject: [PATCH 2377/2550] update removing bbox codes within the extractor --- .../maya/plugins/create/create_proxy_abc.py | 2 - .../maya/plugins/publish/extract_proxy_abc.py | 37 ++----------------- 2 files changed, 3 insertions(+), 36 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 2d81cb663b..2946f7b530 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -27,8 +27,6 @@ class CreateProxyAlembic(plugin.Creator): # Default to exporting world-space self.data["worldSpace"] = True - # remove the bbBox after publish - self.data["removeBoundingBoxAfterPublish"] = False # name suffix for the bounding box self.data["nameSuffix"] = "_BBox" diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index 07c28a231a..cf6351fdca 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -83,11 +83,9 @@ class ExtractProxyAlembic(publish.Extractor): instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) - - remove_bb = instance.data.get("removeBoundingBoxAfterPublish") - if remove_bb: - bbox_master = cmds.ls("bbox_grp") - cmds.delete(bbox_master) + # remove the bounding box + bbox_master = cmds.ls("bbox_grp") + cmds.delete(bbox_master) def create_proxy_geometry(self, instance, name_suffix, start, end): nodes = instance[:] @@ -103,38 +101,9 @@ class ExtractProxyAlembic(publish.Extractor): bakeAnimation=True, startTime=start, endTime=end) - # select the top group - self.top_hierarchy_selection() # create master group for bounding # boxes as the main root master_group = cmds.group(name="bbox_grp") bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel - - # find the top group of the bounding box transform - def top_hierarchy_selection(self): - targets = cmds.ls(sl=True, long=True) - top_grp_list = [] - for target in targets: - top_parent = None - stop = False - - while not stop: - top_grp = cmds.listRelatives(top_parent or target, - parent=True, - path=True) - if top_grp is None: - # the loop would be stopped - # after top group found - stop = True - else: - top_parent = top_grp[0] - - if top_grp: - self.log.debug('{} is the top group'.format(top_parent)) - if top_parent in top_grp_list: - continue - top_grp_list.append(top_parent) - - return cmds.select(top_grp_list) From a209140cd6f44ce62beb097f220a7fa3d21d1fa1 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 16:39:51 +0100 Subject: [PATCH 2378/2550] :bug: handle single frames --- openpype/hosts/maya/plugins/publish/extract_ass.py | 8 +++----- .../projects_schema/schemas/schema_maya_create.json | 5 ----- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 3442d47ae9..0678da6549 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -100,13 +100,11 @@ class ExtractAssStandin(publish.Extractor): representation = { 'name': 'ass', 'ext': 'ass', - 'files': filenames, - "stagingDir": staging_dir + 'files': filenames if len(filenames) > 1 else filenames[0], + "stagingDir": staging_dir, + 'frameStart': kwargs["start"] } - if sequence: - representation['frameStart'] = kwargs["start"] - instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 6cf11e4cea..f66b0181de 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -218,11 +218,6 @@ "label": "Default Subsets", "object_type": "text" }, - { - "type": "boolean", - "key": "exportSequence", - "label": "Export Sequence" - }, { "type": "boolean", "key": "expandProcedurals", From db4139fc3774b4f70999e1d06a8fe2491291dd40 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:05 +0000 Subject: [PATCH 2379/2550] Remove redundant viewport lib --- openpype/hosts/maya/api/viewport.py | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 openpype/hosts/maya/api/viewport.py diff --git a/openpype/hosts/maya/api/viewport.py b/openpype/hosts/maya/api/viewport.py deleted file mode 100644 index cbf78ab815..0000000000 --- a/openpype/hosts/maya/api/viewport.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -"""Tools for working with viewport in Maya.""" -import contextlib -from maya import cmds # noqa - - -@contextlib.contextmanager -def vp2_paused_context(): - """Context manager to stop updating of vp2 viewport.""" - state = cmds.ogs(pause=True, query=True) - - if not state: - cmds.ogs(pause=True) - - try: - yield - finally: - if cmds.ogs(pause=True, query=True) != state: - cmds.ogs(pause=True) From 8d8753b7293969374c26e733efea0452cc1b0048 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:17 +0000 Subject: [PATCH 2380/2550] Clean up collector --- openpype/hosts/maya/plugins/publish/collect_ass.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 45ec5b124e..b5e05d6665 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,5 +1,3 @@ -import re - from maya import cmds from openpype.pipeline.publish import KnownPublishError @@ -33,9 +31,6 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) - group = re.compile(r" \[.*\]") - instance.data["label"] = group.sub("", instance.data["label"]) - # Use camera in object set if present else default to render globals # camera. cameras = cmds.ls(type="camera", long=True) From 096cda17623121aaaad582068b200001b61e471b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:35 +0000 Subject: [PATCH 2381/2550] Fix frame flags. --- .../hosts/maya/plugins/publish/extract_ass.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 0678da6549..049f256a7a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -16,9 +16,6 @@ class ExtractAssStandin(publish.Extractor): asciiAss = False def process(self, instance): - - sequence = instance.data.get("exportSequence", False) - staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) filenames = [] @@ -60,6 +57,9 @@ class ExtractAssStandin(publish.Extractor): # Write out .ass file kwargs = { "filename": file_path, + "startFrame": instance.data.get("frameStartHandle", 1), + "endFrame": instance.data.get("frameEndHandle", 1), + "frameStep": instance.data.get("step", 1), "selected": True, "asciiAss": self.asciiAss, "shadowLinks": True, @@ -78,14 +78,9 @@ class ExtractAssStandin(publish.Extractor): ) cmds.select(instance.data["setMembers"], noExpand=True) - self.log.info("Extracting ass sequence") - - # Collect the start and end including handles - kwargs.update({ - "start": instance.data.get("frameStartHandle", 1), - "end": instance.data.get("frameEndHandle", 1), - "step": instance.data.get("step", 0) - }) + self.log.info( + "Extracting ass sequence with: {}".format(kwargs) + ) exported_files = cmds.arnoldExportAss(**kwargs) @@ -102,7 +97,7 @@ class ExtractAssStandin(publish.Extractor): 'ext': 'ass', 'files': filenames if len(filenames) > 1 else filenames[0], "stagingDir": staging_dir, - 'frameStart': kwargs["start"] + 'frameStart': kwargs["startFrame"] } instance.data["representations"].append(representation) From 834edafa472d97f0944b40a87ac44015a9d27007 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 22:20:14 +0100 Subject: [PATCH 2382/2550] :art: support for unreal engine 5.1 --- openpype/hosts/unreal/api/pipeline.py | 8 +++++++- .../hosts/unreal/hooks/pre_workfile_preparation.py | 1 + .../unreal/plugins/publish/collect_instances.py | 13 ++++++++----- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index d396b64072..db5b121d14 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -2,6 +2,7 @@ import os import logging from typing import List +import semver import pyblish.api @@ -21,6 +22,8 @@ import unreal # noqa logger = logging.getLogger("openpype.hosts.unreal") OPENPYPE_CONTAINERS = "OpenPypeContainers" +UNREAL_VERSION = semver.VersionInfo( + *os.getenv("OPENPYPE_UNREAL_VERSION").split(".")) HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.unreal.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") @@ -40,6 +43,7 @@ class UnrealHost(HostBase, ILoadHost): name = "unreal" def install(self): + version = UNREAL_VERSION install() def get_containers(self): @@ -111,7 +115,9 @@ def ls(): """ ar = unreal.AssetRegistryHelpers.get_asset_registry() - openpype_containers = ar.get_assets_by_class("AssetContainer", True) + # UE 5.1 changed how class name is specified + class_name = ["/Script", "AssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AssetContainer" # noqa + openpype_containers = ar.get_assets_by_class(class_name, True) # get_asset_by_class returns AssetData. To get all metadata we need to # load asset. get_tag_values() work only on metadata registered in diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index 4ae72593e9..2dc6fb9f42 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -150,6 +150,7 @@ class UnrealPrelaunchHook(PreLaunchHook): engine_path=Path(engine_path) ) + self.launch_context.env["OPENPYPE_UNREAL_VERSION"] = engine_version # Append project file to launch arguments self.launch_context.launch_args.append( f"\"{project_file.as_posix()}\"") diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py index 2f604cb322..db968330c6 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_instances.py @@ -3,6 +3,8 @@ import ast import unreal # noqa import pyblish.api +from openpype.hosts.unreal.api.pipeline import UNREAL_VERSION +from openpype.pipeline.publish import KnownPublishError class CollectInstances(pyblish.api.ContextPlugin): @@ -23,8 +25,10 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() - instance_containers = ar.get_assets_by_class( - "OpenPypePublishInstance", True) + class_name = ["/Script", + "AssetContainer"] if UNREAL_VERSION.major == 5 and \ + UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa + instance_containers = ar.get_assets_by_class(class_name, True) for container_data in instance_containers: asset = container_data.get_asset() @@ -32,9 +36,8 @@ class CollectInstances(pyblish.api.ContextPlugin): data["objectName"] = container_data.asset_name # convert to strings data = {str(key): str(value) for (key, value) in data.items()} - assert data.get("family"), ( - "instance has no family" - ) + if not data.get("family"): + raise KnownPublishError("instance has no family") # content of container members = ast.literal_eval(data.get("members")) From 151bb60679da425784d74b4e1fd9f5f6c47dee18 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 22:25:47 +0100 Subject: [PATCH 2383/2550] :rotating_light: fix Hound --- openpype/hosts/unreal/api/pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index db5b121d14..839465881d 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -23,7 +23,8 @@ import unreal # noqa logger = logging.getLogger("openpype.hosts.unreal") OPENPYPE_CONTAINERS = "OpenPypeContainers" UNREAL_VERSION = semver.VersionInfo( - *os.getenv("OPENPYPE_UNREAL_VERSION").split(".")) + *os.getenv("OPENPYPE_UNREAL_VERSION").split(".") +) HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.unreal.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") @@ -43,7 +44,6 @@ class UnrealHost(HostBase, ILoadHost): name = "unreal" def install(self): - version = UNREAL_VERSION install() def get_containers(self): From a8969fb5c2e1560e58a269a75165289d25f3c02e Mon Sep 17 00:00:00 2001 From: Felix David Date: Fri, 9 Dec 2022 09:57:10 +0100 Subject: [PATCH 2384/2550] Feature: API token refreshed every week --- openpype/modules/kitsu/utils/sync_service.py | 14 +++++++++----- pyproject.toml | 2 +- website/docs/module_kitsu.md | 2 ++ 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 441b95a7ec..237746bea0 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -1,12 +1,9 @@ import os +import threading import gazu -from openpype.client import ( - get_project, - get_assets, - get_asset_by_name -) +from openpype.client import get_project, get_assets, get_asset_by_name from openpype.pipeline import AvalonMongoDB from .credentials import validate_credentials from .update_op_with_zou import ( @@ -397,6 +394,13 @@ def start_listeners(login: str, password: str): login (str): Kitsu user login password (str): Kitsu user password """ + # Refresh token every week + def refresh_token_every_week(): + print("Refreshing token...") + gazu.refresh_token() + threading.Timer(7 * 3600 * 24, refresh_token_every_week).start() + + refresh_token_every_week() # Connect to server listener = Listener(login, password) diff --git a/pyproject.toml b/pyproject.toml index f74f40c561..20e676dcde 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ Click = "^7" dnspython = "^2.1.0" ftrack-python-api = "^2.3.3" shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} -gazu = "^0.8.28" +gazu = "^0.8.32" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" keyring = "^22.0.1" diff --git a/website/docs/module_kitsu.md b/website/docs/module_kitsu.md index ec38cce5e1..73e31a280b 100644 --- a/website/docs/module_kitsu.md +++ b/website/docs/module_kitsu.md @@ -26,6 +26,8 @@ openpype_console module kitsu sync-service -l me@domain.ext -p my_password ### Events listening Listening to Kitsu events is the key to automation of many tasks like _project/episode/sequence/shot/asset/task create/update/delete_ and some more. Events listening should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with strong reliability. If such timeout has been encountered, you must relaunch the `sync-service` command to run the synchronization step again. +Connection token is refreshed every week. + ### Push to Kitsu An utility function is provided to help update Kitsu data (a.k.a Zou database) with OpenPype data if the publishing to the production tracker hasn't been possible for some time. Running `push-to-zou` will create the data on behalf of the user. :::caution From fb3236675b90b023f98a83a7ada7e9aef278ca91 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 9 Dec 2022 10:07:14 +0000 Subject: [PATCH 2385/2550] Fix cmds.refresh not queryable --- openpype/hosts/maya/api/lib.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index b2bbb823aa..787a8fd8ad 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -128,13 +128,18 @@ def get_main_window(): @contextlib.contextmanager def suspended_refresh(suspend=True): - """Suspend viewport refreshes""" - original_state = cmds.refresh(query=True, suspend=True) + """Suspend viewport refreshes + + cmds.ogs(pause=True) is a toggle so we cant pass False. + """ + original_state = cmds.ogs(query=True, pause=True) try: - cmds.refresh(suspend=suspend) + if suspend and not original_state: + cmds.ogs(pause=True) yield finally: - cmds.refresh(suspend=original_state) + if suspend and not original_state: + cmds.ogs(pause=True) @contextlib.contextmanager From 67a45524a2c1bc331fce1650f633bc6a86274573 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 9 Dec 2022 10:07:45 +0000 Subject: [PATCH 2386/2550] Fix inverted refresh boolean --- openpype/hosts/maya/plugins/publish/extract_pointcache.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 23b76a48c2..7ed73fd5b0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,7 +86,8 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - with suspended_refresh(suspend=instance.data.get("refresh", False)): + suspend = not instance.data.get("refresh", False) + with suspended_refresh(suspend=suspend): with maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic( From 853bc962179713a195c7630423a59e050306917c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Dec 2022 11:25:43 +0100 Subject: [PATCH 2387/2550] nuke: fix subset name search in imageio override nodes --- openpype/hosts/nuke/api/lib.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index bde06e4fd7..cc5e0a94a1 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -611,7 +611,7 @@ def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): if ( onode["subsets"] - and not any(re.search(s, subset) for s in onode["subsets"]) + and not any(re.search(s, subset.lower()) for s in onode["subsets"]) ): continue @@ -694,7 +694,8 @@ def get_imageio_node_override_setting( # find matching override node override_imageio_node = None for onode in override_nodes: - log.info(onode) + log.debug("__ onode: {}".format(onode)) + log.debug("__ subset: {}".format(subset)) if node_class not in onode["nukeNodeClass"]: continue @@ -703,7 +704,7 @@ def get_imageio_node_override_setting( if ( onode["subsets"] - and not any(re.search(s, subset) for s in onode["subsets"]) + and not any(re.search(s, subset.lower()) for s in onode["subsets"]) ): continue From f51325543a1965f63e5dd70980227311b735c962 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Dec 2022 11:32:54 +0100 Subject: [PATCH 2388/2550] change default command for headless mode --- openpype/cli.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index d24cd4a872..7611915d84 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -29,8 +29,14 @@ def main(ctx): It wraps different commands together. """ + if ctx.invoked_subcommand is None: - ctx.invoke(tray) + # Default command for headless openpype is 'interactive' command + # otherwise 'tray' is used. + if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1": + ctx.invoke(interactive) + else: + ctx.invoke(tray) @main.command() From b713a2e0c6546fb1c017d6d3a6000d433eaf3dde Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Dec 2022 18:54:27 +0800 Subject: [PATCH 2389/2550] resolve the conflict for the project scheme --- .../schemas/projects_schema/schemas/schema_maya_create.json | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 198b399e75..231554d96e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -247,10 +247,6 @@ "key": "CreateMultiverseUsdOver", "label": "Create Multiverse USD Override" }, - { - "key": "CreateAss", - "label": "Create Ass" - }, { "key": "CreateAssembly", "label": "Create Assembly" From 8a533e59b28c306615eb3e5a415f62217780e734 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Dec 2022 19:18:28 +0800 Subject: [PATCH 2390/2550] resolve conflict --- .../schemas/schema_maya_create.json | 93 +------------------ 1 file changed, 1 insertion(+), 92 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index e1a3082616..231554d96e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -230,98 +230,7 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CreateAss", - "label": "Create Ass", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "list", - "key": "defaults", - "label": "Default Subsets", - "object_type": "text" - }, - { - "type": "boolean", - "key": "expandProcedurals", - "label": "Expand Procedurals" - }, - { - "type": "boolean", - "key": "motionBlur", - "label": "Motion Blur" - }, - { - "type": "number", - "key": "motionBlurKeys", - "label": "Motion Blur Keys", - "minimum": 0 - }, - { - "type": "number", - "key": "motionBlurLength", - "label": "Motion Blur Length", - "decimal": 3 - }, - { - "type": "boolean", - "key": "maskOptions", - "label": "Mask Options" - }, - { - "type": "boolean", - "key": "maskCamera", - "label": "Mask Camera" - }, - { - "type": "boolean", - "key": "maskLight", - "label": "Mask Light" - }, - { - "type": "boolean", - "key": "maskShape", - "label": "Mask Shape" - }, - { - "type": "boolean", - "key": "maskShader", - "label": "Mask Shader" - }, - { - "type": "boolean", - "key": "maskOverride", - "label": "Mask Override" - }, - { - "type": "boolean", - "key": "maskDriver", - "label": "Mask Driver" - }, - { - "type": "boolean", - "key": "maskFilter", - "label": "Mask Filter" - }, - { - "type": "boolean", - "key": "maskColor_manager", - "label": "Mask Color Manager" - }, - { - "type": "boolean", - "key": "maskOperator", - "label": "Mask Operator" - } - ] - }, + { "type": "schema_template", "name": "template_create_plugin", From b52018fc6231655d1a923e5c2cdd8c3903708be0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Dec 2022 19:21:17 +0800 Subject: [PATCH 2391/2550] resolve conflicts --- CHANGELOG.md | 84 +++- HISTORY.md | 114 +++++ openpype/action.py | 20 +- openpype/client/entities.py | 7 +- .../hooks/pre_copy_last_published_workfile.py | 2 +- openpype/host/interfaces.py | 2 +- .../plugins/publish/extract_abc_animation.py | 72 +++ openpype/hosts/celaction/__init__.py | 10 + openpype/hosts/celaction/addon.py | 31 ++ openpype/hosts/celaction/api/__init__.py | 1 - openpype/hosts/celaction/api/cli.py | 87 ---- .../hooks/pre_celaction_registers.py | 122 ------ .../celaction/hooks/pre_celaction_setup.py | 137 ++++++ .../publish/collect_celaction_cli_kwargs.py | 30 +- .../publish/collect_celaction_instances.py | 12 +- .../plugins/publish/collect_render_path.py | 23 +- openpype/hosts/celaction/scripts/__init__.py | 0 .../hosts/celaction/scripts/publish_cli.py | 37 ++ openpype/hosts/flame/api/plugin.py | 72 ++- .../hosts/flame/plugins/load/load_clip.py | 7 +- .../flame/plugins/load/load_clip_batch.py | 6 +- openpype/hosts/hiero/addon.py | 5 + openpype/hosts/hiero/api/__init__.py | 19 +- openpype/hosts/hiero/api/lib.py | 226 ++++++++-- openpype/hosts/hiero/api/pipeline.py | 146 +++++-- openpype/hosts/hiero/api/tags.py | 18 +- .../hosts/hiero/plugins/load/load_effects.py | 308 +++++++++++++ .../plugins/publish/collect_clip_effects.py | 3 + .../plugins/publish/precollect_instances.py | 2 +- openpype/hosts/houdini/api/__init__.py | 30 +- openpype/hosts/houdini/api/lib.py | 207 +++++++-- openpype/hosts/houdini/api/pipeline.py | 189 +++++--- openpype/hosts/houdini/api/plugin.py | 200 ++++++++- openpype/hosts/houdini/api/workio.py | 57 --- .../houdini/plugins/create/convert_legacy.py | 74 ++++ .../plugins/create/create_alembic_camera.py | 49 ++- .../plugins/create/create_arnold_ass.py | 46 +- .../plugins/create/create_composite.py | 52 ++- .../houdini/plugins/create/create_hda.py | 72 ++- .../plugins/create/create_pointcache.py | 61 +-- .../plugins/create/create_redshift_proxy.py | 42 +- .../plugins/create/create_redshift_rop.py | 56 ++- .../houdini/plugins/create/create_usd.py | 40 +- .../plugins/create/create_usdrender.py | 39 +- .../plugins/create/create_vbd_cache.py | 38 +- .../houdini/plugins/create/create_workfile.py | 93 ++++ .../plugins/publish/collect_active_state.py | 3 +- .../plugins/publish/collect_current_file.py | 38 +- .../houdini/plugins/publish/collect_frames.py | 25 +- .../plugins/publish/collect_instances.py | 9 +- .../plugins/publish/collect_output_node.py | 2 +- .../plugins/publish/collect_redshift_rop.py | 2 +- .../publish/collect_render_products.py | 2 +- .../plugins/publish/collect_usd_bootstrap.py | 2 +- .../plugins/publish/collect_usd_layers.py | 10 +- .../plugins/publish/extract_alembic.py | 4 +- .../houdini/plugins/publish/extract_ass.py | 12 +- .../plugins/publish/extract_composite.py | 31 +- .../houdini/plugins/publish/extract_hda.py | 6 +- .../plugins/publish/extract_redshift_proxy.py | 4 +- .../houdini/plugins/publish/extract_usd.py | 3 +- .../plugins/publish/extract_usd_layered.py | 2 +- .../plugins/publish/extract_vdb_cache.py | 4 +- .../publish/help/validate_vdb_input_node.xml | 21 + .../plugins/publish/increment_current_file.py | 6 +- .../houdini/plugins/publish/save_scene.py | 6 +- .../plugins/publish/valiate_vdb_input_node.py | 47 -- .../validate_abc_primitive_to_detail.py | 40 +- .../publish/validate_alembic_face_sets.py | 9 +- .../publish/validate_alembic_input_node.py | 29 +- .../publish/validate_animation_settings.py | 3 +- .../plugins/publish/validate_bypass.py | 15 +- .../plugins/publish/validate_camera_rop.py | 47 +- .../publish/validate_cop_output_node.py | 34 +- .../publish/validate_file_extension.py | 15 +- .../plugins/publish/validate_frame_token.py | 3 +- .../validate_houdini_license_category.py | 10 +- .../publish/validate_mkpaths_toggled.py | 13 +- .../plugins/publish/validate_no_errors.py | 11 +- .../validate_primitive_hierarchy_paths.py | 34 +- .../publish/validate_remote_publish.py | 27 +- .../validate_remote_publish_enabled.py | 11 +- .../publish/validate_sop_output_node.py | 21 +- .../validate_usd_layer_path_backslashes.py | 12 +- .../publish/validate_usd_model_and_shade.py | 10 +- .../publish/validate_usd_output_node.py | 11 +- .../validate_usd_render_product_names.py | 7 +- .../plugins/publish/validate_usd_setdress.py | 10 +- .../validate_usd_shade_model_exists.py | 9 +- .../publish/validate_usd_shade_workspace.py | 25 +- .../publish/validate_vdb_input_node.py | 13 +- .../publish/validate_vdb_output_node.py | 12 +- .../publish/validate_workfile_paths.py | 17 +- .../hosts/houdini/startup/MainMenuCommon.xml | 10 +- .../houdini/startup/python2.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.9libs/pythonrc.py | 6 +- openpype/hosts/max/__init__.py | 10 + openpype/hosts/max/addon.py | 16 + openpype/hosts/max/api/__init__.py | 20 + openpype/hosts/max/api/lib.py | 122 ++++++ openpype/hosts/max/api/menu.py | 130 ++++++ openpype/hosts/max/api/pipeline.py | 145 +++++++ openpype/hosts/max/api/plugin.py | 111 +++++ openpype/hosts/max/hooks/set_paths.py | 17 + openpype/hosts/max/plugins/__init__.py | 0 .../max/plugins/create/create_pointcache.py | 22 + .../hosts/max/plugins/load/load_pointcache.py | 65 +++ .../max/plugins/publish/collect_workfile.py | 63 +++ .../max/plugins/publish/extract_pointcache.py | 100 +++++ .../plugins/publish/validate_scene_saved.py | 18 + openpype/hosts/max/startup/startup.ms | 9 + openpype/hosts/max/startup/startup.py | 6 + openpype/hosts/maya/api/gltf.py | 88 ++++ openpype/hosts/maya/api/lib.py | 8 +- openpype/hosts/maya/api/lib_renderproducts.py | 19 +- .../hosts/maya/plugins/create/create_ass.py | 48 +- .../maya/plugins/create/create_pointcache.py | 1 + .../hosts/maya/plugins/publish/collect_ass.py | 19 +- .../maya/plugins/publish/collect_gltf.py | 17 + .../maya/plugins/publish/collect_look.py | 4 +- .../hosts/maya/plugins/publish/extract_ass.py | 120 ++--- .../maya/plugins/publish/extract_gltf.py | 65 +++ .../maya/plugins/publish/extract_look.py | 2 +- .../maya/plugins/publish/extract_playblast.py | 6 + .../plugins/publish/extract_pointcache.py | 12 +- .../maya/plugins/publish/extract_thumbnail.py | 5 + openpype/hosts/nuke/addon.py | 5 + openpype/hosts/nuke/api/lib.py | 2 +- openpype/hosts/nuke/api/pipeline.py | 3 + .../nuke/plugins/load/load_camera_abc.py | 3 + openpype/hosts/nuke/plugins/load/load_clip.py | 3 + .../hosts/nuke/plugins/load/load_effects.py | 3 + .../nuke/plugins/load/load_effects_ip.py | 3 + .../hosts/nuke/plugins/load/load_image.py | 8 +- .../hosts/nuke/plugins/load/load_model.py | 4 + .../nuke/plugins/load/load_script_precomp.py | 3 + .../plugins/publish/extract_slate_frame.py | 2 +- .../plugins/create/create_legacy_image.py | 5 +- .../publish/validate_texture_workfiles.py | 27 +- .../plugins/create/create_online.py | 96 ++++ .../plugins/publish/collect_online_file.py | 23 + .../plugins/publish/validate_online_file.py | 32 ++ .../plugins/publish/extract_sequence.py | 5 +- .../Private/OpenPypePublishInstance.cpp | 177 +++++--- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 102 ++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- .../Private/OpenPypePublishInstance.cpp | 180 +++++--- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 96 +++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- .../publish/collect_published_files.py | 16 +- openpype/lib/attribute_definitions.py | 52 ++- openpype/lib/file_transaction.py | 87 ++-- openpype/lib/path_templates.py | 2 +- openpype/lib/transcoding.py | 76 +++- openpype/lib/vendor_bin_utils.py | 83 ++-- .../publish/submit_celaction_deadline.py | 106 ++--- .../plugins/publish/submit_publish_job.py | 9 +- .../custom/plugins/CelAction/CelAction.ico | Bin 0 -> 103192 bytes .../custom/plugins/CelAction/CelAction.param | 38 ++ .../custom/plugins/CelAction/CelAction.py | 122 ++++++ .../custom/plugins/GlobalJobPreLoad.py | 362 +++++++++++----- openpype/modules/ftrack/lib/avalon_sync.py | 22 +- .../plugins/publish/integrate_ftrack_api.py | 73 ++-- .../publish/integrate_ftrack_description.py | 2 +- .../plugins/publish/integrate_ftrack_note.py | 2 +- .../publish/integrate_hierarchy_ftrack.py | 409 +++++++++++------- .../ftrack/scripts/sub_event_status.py | 11 + .../plugins/publish/integrate_kitsu_review.py | 1 - .../plugins/publish/collect_slack_family.py | 6 +- .../plugins/publish/integrate_slack_api.py | 35 +- openpype/pipeline/create/context.py | 3 +- openpype/pipeline/create/creator_plugins.py | 5 +- openpype/pipeline/publish/publish_plugins.py | 26 +- .../publish/collect_anatomy_instance_data.py | 2 +- openpype/plugins/publish/collect_audio.py | 177 +++++--- openpype/plugins/publish/collect_comment.py | 126 +++++- .../plugins/publish/collect_resources_path.py | 1 + openpype/plugins/publish/extract_burnin.py | 2 +- .../publish/extract_hierarchy_avalon.py | 367 +++++++++------- openpype/plugins/publish/extract_review.py | 71 +-- .../publish/extract_thumbnail_from_source.py | 1 + openpype/plugins/publish/integrate.py | 6 +- openpype/plugins/publish/integrate_legacy.py | 3 +- .../plugins/publish/integrate_thumbnail.py | 65 ++- openpype/resources/app_icons/3dsmax.png | Bin 0 -> 12804 bytes openpype/resources/app_icons/celaction.png | Bin 0 -> 4012 bytes .../resources/app_icons/celaction_local.png | Bin 40783 -> 0 bytes .../resources/app_icons/celaction_remotel.png | Bin 36400 -> 0 bytes .../defaults/project_anatomy/templates.json | 8 +- .../defaults/project_settings/celaction.json | 12 +- .../defaults/project_settings/deadline.json | 10 + .../defaults/project_settings/global.json | 15 + .../defaults/project_settings/maya.json | 21 +- .../defaults/project_settings/tvpaint.json | 5 + .../system_settings/applications.json | 35 +- openpype/settings/entities/enum_entity.py | 1 + .../schema_project_celaction.json | 39 +- .../schema_project_deadline.json | 50 +++ .../schema_project_tvpaint.json | 12 + .../schemas/schema_anatomy_attributes.json | 12 +- .../schemas/schema_global_publish.json | 21 + .../schemas/schema_maya_create.json | 93 +++- .../schemas/schema_maya_publish.json | 16 +- .../host_settings/schema_3dsmax.json | 39 ++ .../host_settings/schema_celaction.json | 4 +- .../system_schema/schema_applications.json | 4 + openpype/style/style.css | 4 + openpype/tools/attribute_defs/widgets.py | 42 +- openpype/tools/creator/model.py | 2 +- .../project_manager/project_manager/view.py | 2 +- openpype/tools/publisher/widgets/__init__.py | 2 + .../publisher/widgets/card_view_widgets.py | 11 +- .../publisher/widgets/list_view_widgets.py | 7 + .../publisher/widgets/overview_widget.py | 14 + .../tools/publisher/widgets/tabs_widget.py | 12 + .../publisher/widgets/validations_widget.py | 2 +- openpype/tools/publisher/widgets/widgets.py | 204 ++++++++- openpype/tools/publisher/window.py | 198 ++++++++- openpype/tools/settings/settings/constants.py | 1 - .../widgets/widget_drop_frame.py | 2 +- openpype/tools/utils/host_tools.py | 16 +- .../vendor/python/python_2/secrets/LICENSE | 21 + .../python/python_2/secrets/__init__.py | 16 + .../vendor/python/python_2/secrets/secrets.py | 132 ++++++ openpype/version.py | 2 +- setup.cfg | 3 +- tests/README.md | 10 + tests/conftest.py | 12 + tests/integration/hosts/aftereffects/lib.py | 22 +- ...=> test_publish_in_aftereffects_legacy.py} | 40 +- ...test_publish_in_aftereffects_multiframe.py | 64 --- tests/integration/hosts/maya/lib.py | 19 +- .../hosts/maya/test_publish_in_maya.py | 57 ++- tests/integration/hosts/nuke/lib.py | 32 +- .../hosts/nuke/test_publish_in_nuke.py | 25 +- tests/integration/hosts/photoshop/lib.py | 11 +- .../photoshop/test_publish_in_photoshop.py | 6 +- tests/lib/db_handler.py | 23 +- tests/lib/testing_classes.py | 68 ++- tests/resources/test_data.zip | Bin 7350 -> 5098 bytes tests/unit/igniter/test_bootstrap_repos.py | 30 +- tools/run_mongo.ps1 | 2 + website/docs/dev_build.md | 4 +- website/docs/dev_requirements.md | 2 +- website/yarn.lock | 6 +- 248 files changed, 7288 insertions(+), 2337 deletions(-) create mode 100644 openpype/hosts/blender/plugins/publish/extract_abc_animation.py create mode 100644 openpype/hosts/celaction/addon.py delete mode 100644 openpype/hosts/celaction/api/__init__.py delete mode 100644 openpype/hosts/celaction/api/cli.py delete mode 100644 openpype/hosts/celaction/hooks/pre_celaction_registers.py create mode 100644 openpype/hosts/celaction/hooks/pre_celaction_setup.py create mode 100644 openpype/hosts/celaction/scripts/__init__.py create mode 100644 openpype/hosts/celaction/scripts/publish_cli.py create mode 100644 openpype/hosts/hiero/plugins/load/load_effects.py delete mode 100644 openpype/hosts/houdini/api/workio.py create mode 100644 openpype/hosts/houdini/plugins/create/convert_legacy.py create mode 100644 openpype/hosts/houdini/plugins/create/create_workfile.py create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml delete mode 100644 openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py create mode 100644 openpype/hosts/max/__init__.py create mode 100644 openpype/hosts/max/addon.py create mode 100644 openpype/hosts/max/api/__init__.py create mode 100644 openpype/hosts/max/api/lib.py create mode 100644 openpype/hosts/max/api/menu.py create mode 100644 openpype/hosts/max/api/pipeline.py create mode 100644 openpype/hosts/max/api/plugin.py create mode 100644 openpype/hosts/max/hooks/set_paths.py create mode 100644 openpype/hosts/max/plugins/__init__.py create mode 100644 openpype/hosts/max/plugins/create/create_pointcache.py create mode 100644 openpype/hosts/max/plugins/load/load_pointcache.py create mode 100644 openpype/hosts/max/plugins/publish/collect_workfile.py create mode 100644 openpype/hosts/max/plugins/publish/extract_pointcache.py create mode 100644 openpype/hosts/max/plugins/publish/validate_scene_saved.py create mode 100644 openpype/hosts/max/startup/startup.ms create mode 100644 openpype/hosts/max/startup/startup.py create mode 100644 openpype/hosts/maya/api/gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_gltf.py create mode 100644 openpype/hosts/traypublisher/plugins/create/create_online.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_online_file.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/validate_online_file.py rename openpype/{hosts/celaction => modules/deadline}/plugins/publish/submit_celaction_deadline.py (73%) create mode 100644 openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico create mode 100644 openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param create mode 100644 openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py create mode 100644 openpype/resources/app_icons/3dsmax.png create mode 100644 openpype/resources/app_icons/celaction.png delete mode 100644 openpype/resources/app_icons/celaction_local.png delete mode 100644 openpype/resources/app_icons/celaction_remotel.png create mode 100644 openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json create mode 100644 openpype/vendor/python/python_2/secrets/LICENSE create mode 100644 openpype/vendor/python/python_2/secrets/__init__.py create mode 100644 openpype/vendor/python/python_2/secrets/secrets.py rename tests/integration/hosts/aftereffects/{test_publish_in_aftereffects.py => test_publish_in_aftereffects_legacy.py} (58%) delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 707b61676f..3cca692b68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,88 @@ # Changelog -## [3.14.6](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) + + +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) ### 📖 Documentation diff --git a/HISTORY.md b/HISTORY.md index f6cc74e114..f4e132488b 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,119 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) + +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) + +### 📖 Documentation + +- Documentation: Minor updates to dev\_requirements.md [\#4025](https://github.com/pypeclub/OpenPype/pull/4025) + +**🆕 New features** + +- Nuke: add 13.2 variant [\#4041](https://github.com/pypeclub/OpenPype/pull/4041) + +**🚀 Enhancements** + +- Publish Report Viewer: Store reports locally on machine [\#4040](https://github.com/pypeclub/OpenPype/pull/4040) +- General: More specific error in burnins script [\#4026](https://github.com/pypeclub/OpenPype/pull/4026) +- General: Extract review does not crash with old settings overrides [\#4023](https://github.com/pypeclub/OpenPype/pull/4023) +- Publisher: Convertors for legacy instances [\#4020](https://github.com/pypeclub/OpenPype/pull/4020) +- workflows: adding milestone creator and assigner [\#4018](https://github.com/pypeclub/OpenPype/pull/4018) +- Publisher: Catch creator errors [\#4015](https://github.com/pypeclub/OpenPype/pull/4015) + +**🐛 Bug fixes** + +- Hiero - effect collection fixes [\#4038](https://github.com/pypeclub/OpenPype/pull/4038) +- Nuke - loader clip correct hash conversion in path [\#4037](https://github.com/pypeclub/OpenPype/pull/4037) +- Maya: Soft fail when applying capture preset [\#4034](https://github.com/pypeclub/OpenPype/pull/4034) +- Igniter: handle missing directory [\#4032](https://github.com/pypeclub/OpenPype/pull/4032) +- StandalonePublisher: Fix thumbnail publishing [\#4029](https://github.com/pypeclub/OpenPype/pull/4029) +- Experimental Tools: Fix publisher import [\#4027](https://github.com/pypeclub/OpenPype/pull/4027) +- Houdini: fix wrong path in ASS loader [\#4016](https://github.com/pypeclub/OpenPype/pull/4016) + +**🔀 Refactored code** + +- General: Import lib functions from lib [\#4017](https://github.com/pypeclub/OpenPype/pull/4017) + ## [3.14.5](https://github.com/pypeclub/OpenPype/tree/3.14.5) (2022-10-24) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...3.14.5) diff --git a/openpype/action.py b/openpype/action.py index de9cdee010..15c96404b6 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -72,17 +72,19 @@ def get_errored_plugins_from_data(context): return get_errored_plugins_from_context(context) -# 'RepairAction' and 'RepairContextAction' were moved to -# 'openpype.pipeline.publish' please change you imports. -# There is no "reasonable" way hot mark these classes as deprecated to show -# warning of wrong import. -# Deprecated since 3.14.* will be removed in 3.16.* class RepairAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in @@ -103,13 +105,19 @@ class RepairAction(pyblish.api.Action): plugin.repair(instance) -# Deprecated since 3.14.* will be removed in 3.16.* class RepairContextAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43afccf2f1..c415be8816 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,10 +389,11 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - None: If subset with specified filters was not found. - Dict: Subset document which can be reduced to specified 'fields'. - """ + Union[None, Dict[str, Any]]: None if subset with specified filters was + not found or dict subset document which can be reduced to + specified 'fields'. + """ if not subset_name: return None diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 44144e5fff..26b43c39cb 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -38,7 +38,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): sync_server = self.modules_manager.get("sync_server") if not sync_server or not sync_server.enabled: - self.log.deubg("Sync server module is not enabled or available") + self.log.debug("Sync server module is not enabled or available") return # Check there is no workfile available diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py index 3b2df745d1..999aefd254 100644 --- a/openpype/host/interfaces.py +++ b/openpype/host/interfaces.py @@ -252,7 +252,7 @@ class IWorkfileHost: Remove when all usages are replaced. """ - self.save_workfile() + self.save_workfile(dst_path) def open_file(self, filepath): """Deprecated variant of 'open_workfile'. diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py new file mode 100644 index 0000000000..e141ccaa44 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -0,0 +1,72 @@ +import os + +import bpy + +from openpype.pipeline import publish +from openpype.hosts.blender.api import plugin + + +class ExtractAnimationABC(publish.Extractor): + """Extract as ABC.""" + + label = "Extract Animation ABC" + hosts = ["blender"] + families = ["animation"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.abc" + filepath = os.path.join(stagingdir, filename) + + context = bpy.context + + # Perform extraction + self.log.info("Performing extraction..") + + plugin.deselect_all() + + selected = [] + asset_group = None + + objects = [] + for obj in instance: + if isinstance(obj, bpy.types.Collection): + for child in obj.all_objects: + objects.append(child) + for obj in objects: + children = [o for o in bpy.data.objects if o.parent == obj] + for child in children: + objects.append(child) + + for obj in objects: + obj.select_set(True) + selected.append(obj) + + context = plugin.create_blender_context( + active=asset_group, selected=selected) + + # We export the abc + bpy.ops.wm.alembic_export( + context, + filepath=filepath, + selected=True, + flatten=False + ) + + plugin.deselect_all() + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, representation) diff --git a/openpype/hosts/celaction/__init__.py b/openpype/hosts/celaction/__init__.py index e69de29bb2..8983d48d7d 100644 --- a/openpype/hosts/celaction/__init__.py +++ b/openpype/hosts/celaction/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + CELACTION_ROOT_DIR, + CelactionAddon, +) + + +__all__ = ( + "CELACTION_ROOT_DIR", + "CelactionAddon", +) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py new file mode 100644 index 0000000000..9158010011 --- /dev/null +++ b/openpype/hosts/celaction/addon.py @@ -0,0 +1,31 @@ +import os +from openpype.modules import OpenPypeModule, IHostAddon + +CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class CelactionAddon(OpenPypeModule, IHostAddon): + name = "celaction" + host_name = "celaction" + + def initialize(self, module_settings): + self.enabled = True + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(CELACTION_ROOT_DIR, "hooks") + ] + + def add_implementation_envs(self, env, _app): + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".scn"] diff --git a/openpype/hosts/celaction/api/__init__.py b/openpype/hosts/celaction/api/__init__.py deleted file mode 100644 index 8c93d93738..0000000000 --- a/openpype/hosts/celaction/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -kwargs = None diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py deleted file mode 100644 index 88fc11cafb..0000000000 --- a/openpype/hosts/celaction/api/cli.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import sys -import copy -import argparse - -import pyblish.api -import pyblish.util - -import openpype.hosts.celaction -from openpype.lib import Logger -from openpype.hosts.celaction import api as celaction -from openpype.tools.utils import host_tools -from openpype.pipeline import install_openpype_plugins - - -log = Logger.get_logger("Celaction_cli_publisher") - -publish_host = "celaction" - -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") - - -def cli(): - parser = argparse.ArgumentParser(prog="celaction_publish") - - parser.add_argument("--currentFile", - help="Pass file to Context as `currentFile`") - - parser.add_argument("--chunk", - help=("Render chanks on farm")) - - parser.add_argument("--frameStart", - help=("Start of frame range")) - - parser.add_argument("--frameEnd", - help=("End of frame range")) - - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ - - -def _prepare_publish_environments(): - """Prepares environments based on request data.""" - env = copy.deepcopy(os.environ) - - project_name = os.getenv("AVALON_PROJECT") - asset_name = os.getenv("AVALON_ASSET") - - env["AVALON_PROJECT"] = project_name - env["AVALON_ASSET"] = asset_name - env["AVALON_TASK"] = os.getenv("AVALON_TASK") - env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR") - env["AVALON_APP"] = f"hosts.{publish_host}" - env["AVALON_APP_NAME"] = "celaction/local" - - env["PYBLISH_HOSTS"] = publish_host - - os.environ.update(env) - - -def main(): - # prepare all environments - _prepare_publish_environments() - - # Registers pype's Global pyblish plugins - install_openpype_plugins() - - if os.path.exists(PUBLISH_PATH): - log.info(f"Registering path: {PUBLISH_PATH}") - pyblish.api.register_plugin_path(PUBLISH_PATH) - - pyblish.api.register_host(publish_host) - - return host_tools.show_publish() - - -if __name__ == "__main__": - cli() - result = main() - sys.exit(not bool(result)) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_registers.py deleted file mode 100644 index e49e66f163..0000000000 --- a/openpype/hosts/celaction/hooks/pre_celaction_registers.py +++ /dev/null @@ -1,122 +0,0 @@ -import os -import shutil -import winreg -from openpype.lib import PreLaunchHook -from openpype.hosts.celaction import api as celaction - - -class CelactionPrelaunchHook(PreLaunchHook): - """ - Bootstrap celacion with pype - """ - workfile_ext = "scn" - app_groups = ["celaction"] - platforms = ["windows"] - - def execute(self): - # Add workfile path to launch arguments - workfile_path = self.workfile_path() - if workfile_path: - self.launch_context.launch_args.append(workfile_path) - - project_name = self.data["project_name"] - asset_name = self.data["asset_name"] - task_name = self.data["task_name"] - - # get publish version of celaction - app = "celaction_publish" - - # setting output parameters - path = r"Software\CelAction\CelAction2D\User Settings" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey( - winreg.HKEY_CURRENT_USER, - "Software\\CelAction\\CelAction2D\\User Settings", 0, - winreg.KEY_ALL_ACCESS) - - # TODO: this will need to be checked more thoroughly - pype_exe = os.getenv("OPENPYPE_EXECUTABLE") - - winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe) - - parameters = [ - "launch", - f"--app {app}", - f"--project {project_name}", - f"--asset {asset_name}", - f"--task {task_name}", - "--currentFile \\\"\"*SCENE*\"\\\"", - "--chunk 10", - "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*", - # "--programDir \"'*PROGPATH*'\"" - ] - winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters)) - - # setting resolution parameters - path = r"Software\CelAction\CelAction2D\User Settings\Dialogs" - path += r"\SubmitOutput" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) - winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) - winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) - winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) - - # making sure message dialogs don't appear when overwriting - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\OverwriteScene" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) - winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6) - winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) - - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\SceneSaved" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) - winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1) - winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) - - def workfile_path(self): - workfile_path = self.data["last_workfile_path"] - - # copy workfile from template if doesnt exist any on path - if not os.path.exists(workfile_path): - # TODO add ability to set different template workfile path via - # settings - pype_celaction_dir = os.path.dirname(os.path.dirname( - os.path.abspath(celaction.__file__) - )) - template_path = os.path.join( - pype_celaction_dir, - "resources", - "celaction_template_scene.scn" - ) - - if not os.path.exists(template_path): - self.log.warning( - "Couldn't find workfile template file in {}".format( - template_path - ) - ) - return - - self.log.info( - f"Creating workfile from template: \"{template_path}\"" - ) - - # Copy template workfile to new destinantion - shutil.copy2( - os.path.normpath(template_path), - os.path.normpath(workfile_path) - ) - - self.log.info(f"Workfile to open: \"{workfile_path}\"") - - return workfile_path diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py new file mode 100644 index 0000000000..62cebf99ed --- /dev/null +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -0,0 +1,137 @@ +import os +import shutil +import winreg +import subprocess +from openpype.lib import PreLaunchHook, get_openpype_execute_args +from openpype.hosts.celaction import scripts + +CELACTION_SCRIPTS_DIR = os.path.dirname( + os.path.abspath(scripts.__file__) +) + + +class CelactionPrelaunchHook(PreLaunchHook): + """ + Bootstrap celacion with pype + """ + app_groups = ["celaction"] + platforms = ["windows"] + + def execute(self): + asset_doc = self.data["asset_doc"] + width = asset_doc["data"]["resolutionWidth"] + height = asset_doc["data"]["resolutionHeight"] + + # Add workfile path to launch arguments + workfile_path = self.workfile_path() + if workfile_path: + self.launch_context.launch_args.append(workfile_path) + + # setting output parameters + path_user_settings = "\\".join([ + "Software", "CelAction", "CelAction2D", "User Settings" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_user_settings, 0, + winreg.KEY_ALL_ACCESS + ) + + path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") + subproces_args = get_openpype_execute_args("run", path_to_cli) + openpype_executable = subproces_args.pop(0) + + winreg.SetValueEx( + hKey, + "SubmitAppTitle", + 0, + winreg.REG_SZ, + openpype_executable + ) + + parameters = subproces_args + [ + "--currentFile", "*SCENE*", + "--chunk", "*CHUNK*", + "--frameStart", "*START*", + "--frameEnd", "*END*", + "--resolutionWidth", "*X*", + "--resolutionHeight", "*Y*" + ] + + winreg.SetValueEx( + hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, + subprocess.list2cmdline(parameters) + ) + + # setting resolution parameters + path_submit = "\\".join([ + path_user_settings, "Dialogs", "SubmitOutput" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_submit, 0, + winreg.KEY_ALL_ACCESS + ) + winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) + winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width) + winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height) + + # making sure message dialogs don't appear when overwriting + path_overwrite_scene = "\\".join([ + path_user_settings, "Messages", "OverwriteScene" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0, + winreg.KEY_ALL_ACCESS + ) + winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6) + winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) + + # set scane as not saved + path_scene_saved = "\\".join([ + path_user_settings, "Messages", "SceneSaved" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_scene_saved, 0, + winreg.KEY_ALL_ACCESS + ) + winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1) + winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) + + def workfile_path(self): + workfile_path = self.data["last_workfile_path"] + + # copy workfile from template if doesnt exist any on path + if not os.path.exists(workfile_path): + # TODO add ability to set different template workfile path via + # settings + openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR) + template_path = os.path.join( + openpype_celaction_dir, + "resources", + "celaction_template_scene.scn" + ) + + if not os.path.exists(template_path): + self.log.warning( + "Couldn't find workfile template file in {}".format( + template_path + ) + ) + return + + self.log.info( + f"Creating workfile from template: \"{template_path}\"" + ) + + # Copy template workfile to new destinantion + shutil.copy2( + os.path.normpath(template_path), + os.path.normpath(workfile_path) + ) + + self.log.info(f"Workfile to open: \"{workfile_path}\"") + + return workfile_path diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 15c5ddaf1c..bf97dd744b 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,7 @@ import pyblish.api -from openpype.hosts.celaction import api as celaction +import argparse +import sys +from pprint import pformat class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -9,15 +11,31 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - kwargs = celaction.kwargs.copy() + parser = argparse.ArgumentParser(prog="celaction") + parser.add_argument("--currentFile", + help="Pass file to Context as `currentFile`") + parser.add_argument("--chunk", + help=("Render chanks on farm")) + parser.add_argument("--frameStart", + help=("Start of frame range")) + parser.add_argument("--frameEnd", + help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__ - self.log.info("Storing kwargs: %s" % kwargs) - context.set_data("kwargs", kwargs) + self.log.info("Storing kwargs ...") + self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) + + # set kwargs to context data + context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values - for k, v in kwargs.items(): + for k, v in passing_kwargs.items(): self.log.info(f"Setting `{k}` to instance.data with value: `{v}`") if k in ["frameStart", "frameEnd"]: - context.data[k] = kwargs[k] = int(v) + context.data[k] = passing_kwargs[k] = int(v) else: context.data[k] = v diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index 1d2d9da1af..35ac7fc264 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -36,7 +36,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "version": version } - celaction_kwargs = context.data.get("kwargs", {}) + celaction_kwargs = context.data.get( + "passingKwargs", {}) if celaction_kwargs: shared_instance_data.update(celaction_kwargs) @@ -52,8 +53,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "subset": subset, "label": scene_file, "family": family, - "families": [family, "ftrack"], - "representations": list() + "families": [], + "representations": [] }) # adding basic script data @@ -72,7 +73,6 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): self.log.info('Publishing Celaction workfile') # render instance - family = "render.farm" subset = f"render{task}Main" instance = context.create_instance(name=subset) # getting instance state @@ -81,8 +81,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): # add assetEntity data into instance instance.data.update({ "label": "{} - farm".format(subset), - "family": family, - "families": [family], + "family": "render.farm", + "families": [], "subset": subset }) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index 9cbb0e4880..f6db6c000d 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -11,28 +11,31 @@ class CollectRenderPath(pyblish.api.InstancePlugin): families = ["render.farm"] # Presets - anatomy_render_key = None - publish_render_metadata = None + output_extension = "png" + anatomy_template_key_render_files = None + anatomy_template_key_metadata = None def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy_data["family"] = "render" padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ "frame": f"%0{padding}d", - "representation": "png" + "family": "render", + "representation": self.output_extension, + "ext": self.output_extension }) anatomy_filled = anatomy.format(anatomy_data) # get anatomy rendering keys - anatomy_render_key = self.anatomy_render_key or "render" - publish_render_metadata = self.publish_render_metadata or "render" + r_anatomy_key = self.anatomy_template_key_render_files + m_anatomy_key = self.anatomy_template_key_metadata # get folder and path for rendering images from celaction - render_dir = anatomy_filled[anatomy_render_key]["folder"] - render_path = anatomy_filled[anatomy_render_key]["path"] + render_dir = anatomy_filled[r_anatomy_key]["folder"] + render_path = anatomy_filled[r_anatomy_key]["path"] + self.log.debug("__ render_path: `{}`".format(render_path)) # create dir if it doesnt exists try: @@ -46,9 +49,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin): instance.data["path"] = render_path # get anatomy for published renders folder path - if anatomy_filled.get(publish_render_metadata): + if anatomy_filled.get(m_anatomy_key): instance.data["publishRenderMetadataFolder"] = anatomy_filled[ - publish_render_metadata]["folder"] + m_anatomy_key]["folder"] self.log.info("Metadata render path: `{}`".format( instance.data["publishRenderMetadataFolder"] )) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/celaction/scripts/publish_cli.py b/openpype/hosts/celaction/scripts/publish_cli.py new file mode 100644 index 0000000000..39d3f1a94d --- /dev/null +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -0,0 +1,37 @@ +import os +import sys + +import pyblish.api +import pyblish.util + +import openpype.hosts.celaction +from openpype.lib import Logger +from openpype.tools.utils import host_tools +from openpype.pipeline import install_openpype_plugins + + +log = Logger.get_logger("celaction") + +PUBLISH_HOST = "celaction" +HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__)) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") + + +def main(): + # Registers pype's Global pyblish plugins + install_openpype_plugins() + + if os.path.exists(PUBLISH_PATH): + log.info(f"Registering path: {PUBLISH_PATH}") + pyblish.api.register_plugin_path(PUBLISH_PATH) + + pyblish.api.register_host(PUBLISH_HOST) + pyblish.api.register_target("local") + + return host_tools.show_publish() + + +if __name__ == "__main__": + result = main() + sys.exit(not bool(result)) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 092ce9d106..ca113fd98a 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -4,13 +4,13 @@ import shutil from copy import deepcopy from xml.etree import ElementTree as ET +import qargparse from Qt import QtCore, QtWidgets -import qargparse from openpype import style -from openpype.settings import get_current_project_settings from openpype.lib import Logger from openpype.pipeline import LegacyCreator, LoaderPlugin +from openpype.settings import get_current_project_settings from . import constants from . import lib as flib @@ -596,18 +596,28 @@ class PublishableClip: if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): - hero_data.update({"heroTrack": False}) - if _in == self.clip_in and _out == self.clip_out: + """ + Since only one instance of hero clip is expected in + `self.vertical_clip_match`, this will loop only once + until none hero clip will be matched with hero clip. + + `tag_hierarchy_data` will be set only once for every + clip which is not hero clip. + """ + _hero_data = deepcopy(hero_data) + _hero_data.update({"heroTrack": False}) + if _in <= self.clip_in and _out >= self.clip_out: data_subset = hero_data["subset"] # add track index in case duplicity of names in hero data if self.subset in data_subset: - hero_data["subset"] = self.subset + str( + _hero_data["subset"] = self.subset + str( self.track_index) # in case track name and subset name is the same then add if self.subset_name == self.track_name: - hero_data["subset"] = self.subset + _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag - tag_hierarchy_data = hero_data + tag_hierarchy_data = _hero_data + break # add data to return data dict self.marker_data.update(tag_hierarchy_data) @@ -690,6 +700,54 @@ class ClipLoader(LoaderPlugin): ) ] + _mapping = None + + def get_colorspace(self, context): + """Get colorspace name + + Look either to version data or representation data. + + Args: + context (dict): version context data + + Returns: + str: colorspace name or None + """ + version = context['version'] + version_data = version.get("data", {}) + colorspace = version_data.get( + "colorspace", None + ) + + if ( + not colorspace + or colorspace == "Unknown" + ): + colorspace = context["representation"]["data"].get( + "colorspace", None) + + return colorspace + + @classmethod + def get_native_colorspace(cls, input_colorspace): + """Return native colorspace name. + + Args: + input_colorspace (str | None): colorspace name + + Returns: + str: native colorspace name defined in mapping or None + """ + if not cls._mapping: + settings = get_current_project_settings()["flame"] + mapping = settings["imageio"]["profilesMapping"]["inputs"] + cls._mapping = { + input["ocioName"]: input["flameName"] + for input in mapping + } + + return cls._mapping.get(input_colorspace) + class OpenClipSolver(flib.MediaInfoFile): create_new_clip = False diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 0843dde76a..f8cb7b3e11 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -36,14 +36,15 @@ class LoadClip(opfapi.ClipLoader): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) + clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = os.environ["AVALON_WORKDIR"] diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 17ad8075e4..048ac19431 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -35,7 +35,7 @@ class LoadClipBatch(opfapi.ClipLoader): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): @@ -47,10 +47,10 @@ class LoadClipBatch(opfapi.ClipLoader): clip_name = StringTemplate(self.clip_name_template).format( formating_data) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"] diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py index f5bb94dbaa..1cc7a8637e 100644 --- a/openpype/hosts/hiero/addon.py +++ b/openpype/hosts/hiero/addon.py @@ -27,7 +27,12 @@ class HieroAddon(OpenPypeModule, IHostAddon): new_hiero_paths.append(norm_path) env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + # Remove auto screen scale factor for Qt + # - let Hiero decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index 781f846bbe..1fa40c9f74 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,9 +30,15 @@ from .lib import ( get_timeline_selection, get_current_track, get_track_item_tags, + get_track_openpype_tag, + set_track_openpype_tag, + get_track_openpype_data, get_track_item_pype_tag, set_track_item_pype_tag, get_track_item_pype_data, + get_trackitem_openpype_tag, + set_trackitem_openpype_tag, + get_trackitem_openpype_data, set_publish_attribute, get_publish_attribute, imprint, @@ -85,9 +91,12 @@ __all__ = [ "get_timeline_selection", "get_current_track", "get_track_item_tags", - "get_track_item_pype_tag", - "set_track_item_pype_tag", - "get_track_item_pype_data", + "get_track_openpype_tag", + "set_track_openpype_tag", + "get_track_openpype_data", + "get_trackitem_openpype_tag", + "set_trackitem_openpype_tag", + "get_trackitem_openpype_data", "set_publish_attribute", "get_publish_attribute", "imprint", @@ -99,6 +108,10 @@ __all__ = [ "apply_colorspace_project", "apply_colorspace_clips", "get_sequence_pattern_and_padding", + # depricated + "get_track_item_pype_tag", + "set_track_item_pype_tag", + "get_track_item_pype_data", # plugins "CreatorWidget", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index e5d35945af..7f0cf8149a 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -7,11 +7,15 @@ import os import re import sys import platform +import functools +import warnings +import json import ast +import secrets import shutil import hiero -from Qt import QtWidgets +from Qt import QtWidgets, QtCore, QtXml from openpype.client import get_project from openpype.settings import get_project_settings @@ -20,15 +24,51 @@ from openpype.pipeline.load import filter_containers from openpype.lib import Logger from . import tags -try: - from PySide.QtCore import QFile, QTextStream - from PySide.QtXml import QDomDocument -except ImportError: - from PySide2.QtCore import QFile, QTextStream - from PySide2.QtXml import QDomDocument -# from opentimelineio import opentime -# from pprint import pformat +class DeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + log = Logger.get_logger(__name__) @@ -301,7 +341,124 @@ def get_track_item_tags(track_item): return returning_tag_data +def _get_tag_unique_hash(): + # sourcery skip: avoid-builtin-shadow + return secrets.token_hex(nbytes=4) + + +def set_track_openpype_tag(track, data=None): + """ + Set openpype track tag to input track object. + + Attributes: + track (hiero.core.VideoTrack): hiero object + + Returns: + hiero.core.Tag + """ + data = data or {} + + # basic Tag's attribute + tag_data = { + "editable": "0", + "note": "OpenPype data container", + "icon": "openpype_icon.png", + "metadata": dict(data.items()) + } + # get available pype tag if any + _tag = get_track_openpype_tag(track) + + if _tag: + # it not tag then create one + tag = tags.update_tag(_tag, tag_data) + else: + # if pype tag available then update with input data + tag = tags.create_tag( + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), + tag_data + ) + # add it to the input track item + track.addTag(tag) + + return tag + + +def get_track_openpype_tag(track): + """ + Get pype track item tag created by creator or loader plugin. + + Attributes: + trackItem (hiero.core.TrackItem): hiero object + + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if self.pype_tag_name in tag.name(): + return tag + + +def get_track_openpype_data(track, container_name=None): + """ + Get track's openpype tag data. + + Attributes: + trackItem (hiero.core.VideoTrack): hiero object + + Returns: + dict: data found on pype tag + """ + return_data = {} + # get pype data tag from track item + tag = get_track_openpype_tag(track) + + if not tag: + return None + + # get tag metadata attribute + tag_data = deepcopy(dict(tag.metadata())) + + for obj_name, obj_data in tag_data.items(): + obj_name = obj_name.replace("tag.", "") + + if obj_name in ["applieswhole", "note", "label"]: + continue + return_data[obj_name] = json.loads(obj_data) + + return ( + return_data[container_name] + if container_name + else return_data + ) + + +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") def get_track_item_pype_tag(track_item): + # backward compatibility alias + return get_trackitem_openpype_tag(track_item) + + +@deprecated("openpype.hosts.hiero.api.lib.set_trackitem_openpype_tag") +def set_track_item_pype_tag(track_item, data=None): + # backward compatibility alias + return set_trackitem_openpype_tag(track_item, data) + + +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_data") +def get_track_item_pype_data(track_item): + # backward compatibility alias + return get_trackitem_openpype_data(track_item) + + +def get_trackitem_openpype_tag(track_item): """ Get pype track item tag created by creator or loader plugin. @@ -317,16 +474,16 @@ def get_track_item_pype_tag(track_item): return None for tag in _tags: # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: + if self.pype_tag_name in tag.name(): return tag -def set_track_item_pype_tag(track_item, data=None): +def set_trackitem_openpype_tag(track_item, data=None): """ - Set pype track item tag to input track_item. + Set openpype track tag to input track object. Attributes: - trackItem (hiero.core.TrackItem): hiero object + track (hiero.core.VideoTrack): hiero object Returns: hiero.core.Tag @@ -341,21 +498,26 @@ def set_track_item_pype_tag(track_item, data=None): "metadata": dict(data.items()) } # get available pype tag if any - _tag = get_track_item_pype_tag(track_item) - + _tag = get_trackitem_openpype_tag(track_item) if _tag: # it not tag then create one tag = tags.update_tag(_tag, tag_data) else: # if pype tag available then update with input data - tag = tags.create_tag(self.pype_tag_name, tag_data) + tag = tags.create_tag( + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), + tag_data + ) # add it to the input track item track_item.addTag(tag) return tag -def get_track_item_pype_data(track_item): +def get_trackitem_openpype_data(track_item): """ Get track item's pype tag data. @@ -367,7 +529,7 @@ def get_track_item_pype_data(track_item): """ data = {} # get pype data tag from track item - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) if not tag: return None @@ -420,7 +582,7 @@ def imprint(track_item, data=None): """ data = data or {} - tag = set_track_item_pype_tag(track_item, data) + tag = set_trackitem_openpype_tag(track_item, data) # add publish attribute set_publish_attribute(tag, True) @@ -832,22 +994,22 @@ def set_selected_track_items(track_items_list, sequence=None): def _read_doc_from_path(path): - # reading QDomDocument from HROX path - hrox_file = QFile(path) - if not hrox_file.open(QFile.ReadOnly): + # reading QtXml.QDomDocument from HROX path + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.ReadOnly): raise RuntimeError("Failed to open file for reading") - doc = QDomDocument() + doc = QtXml.QDomDocument() doc.setContent(hrox_file) hrox_file.close() return doc def _write_doc_to_path(doc, path): - # write QDomDocument to path as HROX - hrox_file = QFile(path) - if not hrox_file.open(QFile.WriteOnly): + # write QtXml.QDomDocument to path as HROX + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.WriteOnly): raise RuntimeError("Failed to open file for writing") - stream = QTextStream(hrox_file) + stream = QtCore.QTextStream(hrox_file) doc.save(stream, 1) hrox_file.close() @@ -1030,7 +1192,7 @@ def sync_clip_name_to_data_asset(track_items_list): # get name and data ti_name = track_item.name() - data = get_track_item_pype_data(track_item) + data = get_trackitem_openpype_data(track_item) # ignore if no data on the clip or not publish instance if not data: @@ -1042,10 +1204,10 @@ def sync_clip_name_to_data_asset(track_items_list): if data["asset"] != ti_name: data["asset"] = ti_name # remove the original tag - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) track_item.removeTag(tag) # create new tag with updated data - set_track_item_pype_tag(track_item, data) + set_trackitem_openpype_tag(track_item, data) print("asset was changed in clip: {}".format(ti_name)) @@ -1083,10 +1245,10 @@ def check_inventory_versions(track_items=None): project_name = legacy_io.active_project() filter_result = filter_containers(containers, project_name) for container in filter_result.latest: - set_track_color(container["_track_item"], clip_color) + set_track_color(container["_item"], clip_color) for container in filter_result.outdated: - set_track_color(container["_track_item"], clip_color_last) + set_track_color(container["_item"], clip_color_last) def selection_changed_timeline(event): diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index ea61dc4785..4ab73e7d19 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -1,6 +1,7 @@ """ Basic avalon integration """ +from copy import deepcopy import os import contextlib from collections import OrderedDict @@ -17,6 +18,7 @@ from openpype.pipeline import ( ) from openpype.tools.utils import host_tools from . import lib, menu, events +import hiero log = Logger.get_logger(__name__) @@ -106,7 +108,7 @@ def containerise(track_item, data_imprint.update({k: v}) log.debug("_ data_imprint: {}".format(data_imprint)) - lib.set_track_item_pype_tag(track_item, data_imprint) + lib.set_trackitem_openpype_tag(track_item, data_imprint) return track_item @@ -123,79 +125,131 @@ def ls(): """ # get all track items from current timeline - all_track_items = lib.get_track_items() + all_items = lib.get_track_items() - for track_item in all_track_items: - container = parse_container(track_item) - if container: - yield container + # append all video tracks + for track in lib.get_current_sequence(): + if type(track) != hiero.core.VideoTrack: + continue + all_items.append(track) + + for item in all_items: + container_data = parse_container(item) + + if isinstance(container_data, list): + for _c in container_data: + yield _c + elif container_data: + yield container_data -def parse_container(track_item, validate=True): +def parse_container(item, validate=True): """Return container data from track_item's pype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. validate (bool)[optional]: validating with avalon scheme Returns: dict: The container schema data for input containerized track item. """ + def data_to_container(item, data): + if ( + not data + or data.get("id") != "pyblish.avalon.container" + ): + return + + if validate and data and data.get("schema"): + schema.validate(data) + + if not isinstance(data, dict): + return + + # If not all required data return the empty container + required = ['schema', 'id', 'name', + 'namespace', 'loader', 'representation'] + + if any(key not in data for key in required): + return + + container = {key: data[key] for key in required} + + container["objectName"] = item.name() + + # Store reference to the node object + container["_item"] = item + + return container + # convert tag metadata to normal keys names - data = lib.get_track_item_pype_data(track_item) - if ( - not data - or data.get("id") != "pyblish.avalon.container" - ): - return + if type(item) == hiero.core.VideoTrack: + return_list = [] + _data = lib.get_track_openpype_data(item) - if validate and data and data.get("schema"): - schema.validate(data) + if not _data: + return + # convert the data to list and validate them + for _, obj_data in _data.items(): + cotnainer = data_to_container(item, obj_data) + return_list.append(cotnainer) + return return_list + else: + _data = lib.get_trackitem_openpype_data(item) + return data_to_container(item, _data) - if not isinstance(data, dict): - return - - # If not all required data return the empty container - required = ['schema', 'id', 'name', - 'namespace', 'loader', 'representation'] - - if not all(key in data for key in required): - return - - container = {key: data[key] for key in required} - - container["objectName"] = track_item.name() - - # Store reference to the node object - container["_track_item"] = track_item +def _update_container_data(container, data): + for key in container: + try: + container[key] = data[key] + except KeyError: + pass return container -def update_container(track_item, data=None): - """Update container data to input track_item's pype tag. +def update_container(item, data=None): + """Update container data to input track_item or track's + openpype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. data (dict)[optional]: dictionery with data to be updated Returns: bool: True if container was updated correctly """ - data = data or dict() - container = lib.get_track_item_pype_data(track_item) + data = data or {} + data = deepcopy(data) - for _key, _value in container.items(): - try: - container[_key] = data[_key] - except KeyError: - pass + if type(item) == hiero.core.VideoTrack: + # form object data for test + object_name = data["objectName"] - log.info("Updating container: `{}`".format(track_item.name())) - return bool(lib.set_track_item_pype_tag(track_item, container)) + # get all available containers + containers = lib.get_track_openpype_data(item) + container = lib.get_track_openpype_data(item, object_name) + + containers = deepcopy(containers) + container = deepcopy(container) + + # update data in container + updated_container = _update_container_data(container, data) + # merge updated container back to containers + containers.update({object_name: updated_container}) + + return bool(lib.set_track_openpype_tag(item, containers)) + else: + container = lib.get_trackitem_openpype_data(item) + updated_container = _update_container_data(container, data) + + log.info("Updating container: `{}`".format(item.name())) + return bool(lib.set_trackitem_openpype_tag(item, updated_container)) def launch_workfiles_app(*args): @@ -272,11 +326,11 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): instance, old_value, new_value)) from openpype.hosts.hiero.api import ( - get_track_item_pype_tag, + get_trackitem_openpype_tag, set_publish_attribute ) # Whether instances should be passthrough based on new value track_item = instance.data["item"] - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) set_publish_attribute(tag, new_value) diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index fac26da03a..cb7bc14edb 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -1,3 +1,4 @@ +import json import re import os import hiero @@ -85,17 +86,16 @@ def update_tag(tag, data): # get metadata key from data data_mtd = data.get("metadata", {}) - # due to hiero bug we have to make sure keys which are not existent in - # data are cleared of value by `None` - for _mk in mtd.dict().keys(): - if _mk.replace("tag.", "") not in data_mtd.keys(): - mtd.setValue(_mk, str(None)) - # set all data metadata to tag metadata - for k, v in data_mtd.items(): + for _k, _v in data_mtd.items(): + value = str(_v) + if type(_v) == dict: + value = json.dumps(_v) + + # set the value mtd.setValue( - "tag.{}".format(str(k)), - str(v) + "tag.{}".format(str(_k)), + value ) # set note description of tag diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py new file mode 100644 index 0000000000..a3fcd63b5b --- /dev/null +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -0,0 +1,308 @@ +import json +from collections import OrderedDict +import six + +from openpype.client import ( + get_version_by_id +) + +from openpype.pipeline import ( + AVALON_CONTAINER_ID, + load, + legacy_io, + get_representation_path +) +from openpype.hosts.hiero import api as phiero +from openpype.lib import Logger + + +class LoadEffects(load.LoaderPlugin): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["effectJson"] + families = ["effect"] + + label = "Load Effects" + order = 0 + icon = "cc" + color = "white" + + log = Logger.get_logger(__name__) + + def load(self, context, name, namespace, data): + """ + Loading function to get the soft effects to particular read node + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + active_sequence = phiero.get_current_sequence() + active_track = phiero.get_current_track( + active_sequence, "Loaded_{}".format(name)) + + # get main variables + namespace = namespace or context["asset"]["name"] + object_name = "{}_{}".format(name, namespace) + clip_in = context["asset"]["data"]["clipIn"] + clip_out = context["asset"]["data"]["clipOut"] + + data_imprint = { + "objectName": object_name, + "children_names": [] + } + + # getting file path + file = self.fname.replace("\\", "/") + + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint + ): + self.containerise( + active_track, + name=name, + namespace=namespace, + object_name=object_name, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def _shared_loading( + self, + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=False + ): + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).items()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f) + + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + + loaded = False + for index_order, (ef_name, ef_val) in enumerate(nodes_order.items()): + new_name = "{}_loaded".format(ef_name) + if new_name not in used_subtracks: + effect_track_item = active_track.createEffect( + effectType=ef_val["class"], + timelineIn=clip_in, + timelineOut=clip_out, + subTrackIndex=index_order + + ) + effect_track_item.setName(new_name) + else: + effect_track_item = used_subtracks[new_name] + + node = effect_track_item.node() + for knob_name, knob_value in ef_val["node"].items(): + if ( + not knob_value + or knob_name == "name" + ): + continue + + try: + # assume list means animation + # except 4 values could be RGBA or vector + if isinstance(knob_value, list) and len(knob_value) > 4: + node[knob_name].setAnimated() + for i, value in enumerate(knob_value): + if isinstance(value, list): + # list can have vector animation + for ci, cv in enumerate(value): + node[knob_name].setValueAt( + cv, + (clip_in + i), + ci + ) + else: + # list is single values + node[knob_name].setValueAt( + value, + (clip_in + i) + ) + else: + node[knob_name].setValue(knob_value) + except NameError: + self.log.warning("Knob: {} cannot be set".format( + knob_name)) + + # register all loaded children + data_imprint["children_names"].append(new_name) + + # make sure containerisation will happen + loaded = True + + return loaded + + def update(self, container, representation): + """ Updating previously loaded effects + """ + active_track = container["_item"] + file = get_representation_path(representation).replace("\\", "/") + + # get main variables + name = container['name'] + namespace = container['namespace'] + + # get timeline in out data + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + version_data = version_doc["data"] + clip_in = version_data["clipIn"] + clip_out = version_data["clipOut"] + + object_name = "{}_{}".format(name, namespace) + + # Disable previously created nodes + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + container = phiero.get_track_openpype_data( + active_track, object_name + ) + + loaded_subtrack_items = container["children_names"] + for loaded_stitem in loaded_subtrack_items: + if loaded_stitem not in used_subtracks: + continue + item_to_remove = used_subtracks.pop(loaded_stitem) + # TODO: find a way to erase nodes + self.log.debug( + "This node needs to be removed: {}".format(item_to_remove)) + + data_imprint = { + "objectName": object_name, + "name": name, + "representation": str(representation["_id"]), + "children_names": [] + } + + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=True + ): + return phiero.update_container(active_track, data_imprint) + + def reorder_nodes(self, data): + new_order = OrderedDict() + trackNums = [v["trackIndex"] for k, v in data.items() + if isinstance(v, dict)] + subTrackNums = [v["subTrackIndex"] for k, v in data.items() + if isinstance(v, dict)] + + for trackIndex in range( + min(trackNums), max(trackNums) + 1): + for subTrackIndex in range( + min(subTrackNums), max(subTrackNums) + 1): + item = self.get_item(data, trackIndex, subTrackIndex) + if item is not {}: + new_order.update(item) + return new_order + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if isinstance(val, dict) + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes through all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.items()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, six.text_type): + return str(input) + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + pass + + def containerise( + self, + track, + name, + namespace, + object_name, + context, + loader=None, + data=None + ): + """Bundle Hiero's object into an assembly and imprint it with metadata + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + track (hiero.core.VideoTrack): object to imprint as container + name (str): Name of resulting assembly + namespace (str): Namespace under which to host container + object_name (str): name of container + context (dict): Asset information + loader (str, optional): Name of node used to produce this + container. + + Returns: + track_item (hiero.core.TrackItem): containerised object + + """ + + data_imprint = { + object_name: { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": str(name), + "namespace": str(namespace), + "loader": str(loader), + "representation": str(context["representation"]["_id"]), + } + } + + if data: + for k, v in data.items(): + data_imprint[object_name].update({k: v}) + + self.log.debug("_ data_imprint: {}".format(data_imprint)) + phiero.set_track_openpype_tag(track, data_imprint) diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py index 8d2ed9a9c2..9489b1c4fb 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -16,6 +16,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin): review_track_index = instance.context.data.get("reviewTrackIndex") item = instance.data["item"] + if "audio" in instance.data["family"]: + return + # frame range self.handle_start = instance.data["handleStart"] self.handle_end = instance.data["handleEnd"] diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 1fc4b1f696..bb02919b35 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -48,7 +48,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): self.log.debug("clip_name: {}".format(clip_name)) # get openpype tag data - tag_data = phiero.get_track_item_pype_data(track_item) + tag_data = phiero.get_trackitem_openpype_data(track_item) self.log.debug("__ tag_data: {}".format(pformat(tag_data))) if not tag_data: diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index fddf7ab98d..2663a55f6f 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,24 +1,13 @@ from .pipeline import ( - install, - uninstall, - + HoudiniHost, ls, - containerise, + containerise ) from .plugin import ( Creator, ) -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root -) - from .lib import ( lsattr, lsattrs, @@ -29,22 +18,13 @@ from .lib import ( __all__ = [ - "install", - "uninstall", + "HoudiniHost", "ls", "containerise", "Creator", - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - # Utility functions "lsattr", "lsattrs", @@ -52,7 +32,3 @@ __all__ = [ "maintained_selection" ] - -# Backwards API compatibility -open = open_file -save = save_file diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index c8a7f92bb9..13f5a62ec3 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,6 +1,10 @@ +# -*- coding: utf-8 -*- +import sys +import os import uuid import logging from contextlib import contextmanager +import json import six @@ -8,10 +12,13 @@ from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.context_tools import get_current_project_asset - import hou + +self = sys.modules[__name__] +self._parent = None log = logging.getLogger(__name__) +JSON_PREFIX = "JSON:::" def get_asset_fps(): @@ -29,23 +36,18 @@ def set_id(node, unique_id, overwrite=False): def get_id(node): - """ - Get the `cbId` attribute of the given node + """Get the `cbId` attribute of the given node. + Args: node (hou.Node): the name of the node to retrieve the attribute from Returns: - str + str: cbId attribute of the node. """ - if node is None: - return - - id = node.parm("id") - if node is None: - return - return id + if node is not None: + return node.parm("id") def generate_ids(nodes, asset_id=None): @@ -281,7 +283,7 @@ def render_rop(ropnode): raise RuntimeError("Render failed: {0}".format(exc)) -def imprint(node, data): +def imprint(node, data, update=False): """Store attributes with value on a node Depending on the type of attribute it creates the correct parameter @@ -290,49 +292,76 @@ def imprint(node, data): http://www.sidefx.com/docs/houdini/hom/hou/ParmTemplate.html + Because of some update glitch where you cannot overwrite existing + ParmTemplates on node using: + `setParmTemplates()` and `parmTuplesInFolder()` + update is done in another pass. + Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value + update (bool, optional): flag if imprint should update + already existing data or leave them untouched and only + add new. Returns: None """ + if not data: + return + if not node: + self.log.error("Node is not set, calling imprint on invalid data.") + return - parm_group = node.parmTemplateGroup() + current_parms = {p.name(): p for p in node.spareParms()} + update_parms = [] + templates = [] - parm_folder = hou.FolderParmTemplate("folder", "Extra") for key, value in data.items(): if value is None: continue - if isinstance(value, float): - parm = hou.FloatParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, bool): - parm = hou.ToggleParmTemplate(name=key, - label=key, - default_value=value) - elif isinstance(value, int): - parm = hou.IntParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, six.string_types): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - else: - raise TypeError("Unsupported type: %r" % type(value)) + parm = get_template_from_value(key, value) - parm_folder.addParmTemplate(parm) + if key in current_parms: + if node.evalParm(key) == data[key]: + continue + if not update: + log.debug(f"{key} already exists on {node}") + else: + log.debug(f"replacing {key}") + update_parms.append(parm) + continue + + templates.append(parm) + + parm_group = node.parmTemplateGroup() + parm_folder = parm_group.findFolder("Extra") + + # if folder doesn't exist yet, create one and append to it, + # else append to existing one + if not parm_folder: + parm_folder = hou.FolderParmTemplate("folder", "Extra") + parm_folder.setParmTemplates(templates) + parm_group.append(parm_folder) + else: + for template in templates: + parm_group.appendToFolder(parm_folder, template) + # this is needed because the pointer to folder + # is for some reason lost every call to `appendToFolder()` + parm_folder = parm_group.findFolder("Extra") - parm_group.append(parm_folder) node.setParmTemplateGroup(parm_group) + # TODO: Updating is done here, by calling probably deprecated functions. + # This needs to be addressed in the future. + if not update_parms: + return + + for parm in update_parms: + node.replaceSpareParmTuple(parm.name(), parm) + def lsattr(attr, value=None, root="/"): """Return nodes that have `attr` @@ -397,8 +426,22 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects - return {parameter.name(): parameter.eval() for - parameter in node.spareParms()} + data = {} + if not node: + return data + for parameter in node.spareParms(): + value = parameter.eval() + # test if value is json encoded dict + if isinstance(value, six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + data[parameter.name()] = value + + return data @contextmanager @@ -460,3 +503,89 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) + + +def get_main_window(): + """Acquire Houdini's main window""" + if self._parent is None: + self._parent = hou.ui.mainQtWindow() + return self._parent + + +def get_template_from_value(key, value): + if isinstance(value, float): + parm = hou.FloatParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, bool): + parm = hou.ToggleParmTemplate(name=key, + label=key, + default_value=value) + elif isinstance(value, int): + parm = hou.IntParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, six.string_types): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, (dict, list, tuple)): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=( + JSON_PREFIX + json.dumps(value),)) + else: + raise TypeError("Unsupported type: %r" % type(value)) + + return parm + + +def get_frame_data(node): + """Get the frame data: start frame, end frame and steps. + + Args: + node(hou.Node) + + Returns: + dict: frame data for star, end and steps. + + """ + data = {} + + if node.parm("trange") is None: + + return data + + if node.evalParm("trange") == 0: + self.log.debug("trange is 0") + return data + + data["frameStart"] = node.evalParm("f1") + data["frameEnd"] = node.evalParm("f2") + data["steps"] = node.evalParm("f3") + + return data + + +def splitext(name, allowed_multidot_extensions): + # type: (str, list) -> tuple + """Split file name to name and extension. + + Args: + name (str): File name to split. + allowed_multidot_extensions (list of str): List of allowed multidot + extensions. + + Returns: + tuple: Name and extension. + """ + + for ext in allowed_multidot_extensions: + if name.endswith(ext): + return name[:-len(ext)], ext + + return os.path.splitext(name) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index e4af1913ef..b0791fcb6c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -1,9 +1,13 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" import os import sys import logging import contextlib -import hou +import hou # noqa + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api @@ -26,6 +30,7 @@ from .lib import get_asset_fps log = logging.getLogger("openpype.hosts.houdini") AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS" +CONTEXT_CONTAINER = "/obj/OpenPypeContext" IS_HEADLESS = not hasattr(hou, "ui") PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins") @@ -35,71 +40,139 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -self = sys.modules[__name__] -self._has_been_setup = False -self._parent = None -self._events = dict() +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "houdini" + def __init__(self): + super(HoudiniHost, self).__init__() + self._op_events = {} + self._has_been_setup = False -def install(): - _register_callbacks() + def install(self): + pyblish.api.register_host("houdini") + pyblish.api.register_host("hython") + pyblish.api.register_host("hpython") - pyblish.api.register_host("houdini") - pyblish.api.register_host("hython") - pyblish.api.register_host("hpython") + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + register_event_callback("before.save", before_save) + register_event_callback("save", on_save) + register_event_callback("open", on_open) + register_event_callback("new", on_new) - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - register_event_callback("before.save", before_save) - register_event_callback("save", on_save) - register_event_callback("open", on_open) - register_event_callback("new", on_new) + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled + ) - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled - ) + self._has_been_setup = True + # add houdini vendor packages + hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") - self._has_been_setup = True - # add houdini vendor packages - hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") + sys.path.append(hou_pythonpath) - sys.path.append(hou_pythonpath) + # Set asset settings for the empty scene directly after launch of + # Houdini so it initializes into the correct scene FPS, + # Frame Range, etc. + # TODO: make sure this doesn't trigger when + # opening with last workfile. + _set_context_settings() + shelves.generate_shelves() - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile - _set_context_settings() - shelves.generate_shelves() + def has_unsaved_changes(self): + return hou.hipFile.hasUnsavedChanges() + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] -def uninstall(): - """Uninstall Houdini-specific functionality of avalon-core. + def save_workfile(self, dst_path=None): + # Force forwards slashes to avoid segfault + if dst_path: + dst_path = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=dst_path, + save_to_recent_files=True) + return dst_path - This function is called automatically on calling `api.uninstall()`. - """ + def open_workfile(self, filepath): + # Force forwards slashes to avoid segfault + filepath = filepath.replace("\\", "/") - pyblish.api.deregister_host("hython") - pyblish.api.deregister_host("hpython") - pyblish.api.deregister_host("houdini") + hou.hipFile.load(filepath, + suppress_save_prompt=True, + ignore_load_warnings=False) + return filepath -def _register_callbacks(): - for event in self._events.copy().values(): - if event is None: - continue + def get_current_workfile(self): + current_filepath = hou.hipFile.path() + if (os.path.basename(current_filepath) == "untitled.hip" and + not os.path.exists(current_filepath)): + # By default a new scene in houdini is saved in the current + # working directory as "untitled.hip" so we need to capture + # that and consider it 'not saved' when it's in that state. + return None - try: - hou.hipFile.removeEventCallback(event) - except RuntimeError as e: - log.info(e) + return current_filepath - self._events[on_file_event_callback] = hou.hipFile.addEventCallback( - on_file_event_callback - ) + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + hou.hipFile.removeEventCallback(event) + except RuntimeError as e: + log.info(e) + + self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback( + on_file_event_callback + ) + + @staticmethod + def create_context_node(): + """Helper for creating context holding node. + + Returns: + hou.Node: context node + + """ + obj_network = hou.node("/obj") + op_ctx = obj_network.createNode( + "null", node_name="OpenPypeContext") + op_ctx.moveToGoodPosition() + op_ctx.setBuiltExplicitly(False) + op_ctx.setCreatorState("OpenPype") + op_ctx.setComment("OpenPype node to hold context metadata") + op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) + op_ctx.hide(True) + return op_ctx + + def update_context_data(self, data, changes): + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + + lib.imprint(op_ctx, data) + + def get_context_data(self): + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + return lib.read(op_ctx) + + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + + hou.hipFile.save(file_name=dst_path, + save_to_recent_files=True) def on_file_event_callback(event): @@ -113,22 +186,6 @@ def on_file_event_callback(event): emit_event("new") -def get_main_window(): - """Acquire Houdini's main window""" - if self._parent is None: - self._parent = hou.ui.mainQtWindow() - return self._parent - - -def teardown(): - """Remove integration""" - if not self._has_been_setup: - return - - self._has_been_setup = False - print("pyblish: Integration torn down successfully") - - def containerise(name, namespace, nodes, @@ -251,7 +308,7 @@ def on_open(): log.warning("Scene has outdated content.") # Get main window - parent = get_main_window() + parent = lib.get_main_window() if parent is None: log.info("Skipping outdated content pop-up " "because Houdini window can't be found.") diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2bbb65aa05..e15e27c83f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,14 +1,19 @@ # -*- coding: utf-8 -*- """Houdini specific Avalon/Pyblish plugin definitions.""" import sys +from abc import ( + ABCMeta +) import six - import hou from openpype.pipeline import ( CreatorError, - LegacyCreator + LegacyCreator, + Creator as NewCreator, + CreatedInstance ) -from .lib import imprint +from openpype.lib import BoolDef +from .lib import imprint, read, lsattr class OpenPypeCreatorError(CreatorError): @@ -30,12 +35,15 @@ class Creator(LegacyCreator): when hovering over a node. The information is visible under the name of the node. + Deprecated: + This creator is deprecated and will be removed in future version. + """ defaults = ['Main'] def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - self.nodes = list() + self.nodes = [] def process(self): """This is the base functionality to create instances in Houdini @@ -84,3 +92,187 @@ class Creator(LegacyCreator): OpenPypeCreatorError, OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + + +class HoudiniCreatorBase(object): + @staticmethod + def cache_subsets(shared_data): + """Cache instances for Creators to shared data. + + Create `houdini_cached_subsets` key when needed in shared data and + fill it with all collected instances from the scene under its + respective creator identifiers. + + If legacy instances are detected in the scene, create + `houdini_cached_legacy_subsets` there and fill it with + all legacy subsets under family as a key. + + Args: + Dict[str, Any]: Shared data. + + Return: + Dict[str, Any]: Shared data dictionary. + + """ + if shared_data.get("houdini_cached_subsets") is None: + shared_data["houdini_cached_subsets"] = {} + if shared_data.get("houdini_cached_legacy_subsets") is None: + shared_data["houdini_cached_legacy_subsets"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + if not i.parm("creator_identifier"): + # we have legacy instance + family = i.parm("family").eval() + if family not in shared_data[ + "houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][ + family] = [i] + else: + shared_data[ + "houdini_cached_legacy_subsets"][family].append(i) + continue + + creator_id = i.parm("creator_identifier").eval() + if creator_id not in shared_data["houdini_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] + else: + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa + return shared_data + + @staticmethod + def create_instance_node( + node_name, parent, + node_type="geometry"): + # type: (str, str, str) -> hou.Node + """Create node representing instance. + + Arguments: + node_name (str): Name of the new node. + parent (str): Name of the parent node. + node_type (str, optional): Type of the node. + + Returns: + hou.Node: Newly created instance node. + + """ + parent_node = hou.node(parent) + instance_node = parent_node.createNode( + node_type, node_name=node_name) + instance_node.moveToGoodPosition() + return instance_node + + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator, HoudiniCreatorBase): + """Base class for most of the Houdini creator plugins.""" + selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + try: + if pre_create_data.get("use_selection"): + self.selected_nodes = hou.selectedNodes() + + # Get the node type and remove it from the data, not needed + node_type = instance_data.pop("node_type", None) + if node_type is None: + node_type = "geometry" + + instance_node = self.create_instance_node( + subset_name, "/out", node_type) + + self.customize_node_look(instance_node) + + instance_data["instance_node"] = instance_node.path() + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) + self._add_instance_to_context(instance) + imprint(instance_node, instance.data_to_store()) + return instance + + except hou.Error as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def lock_parameters(self, node, parameters): + """Lock list of specified parameters on the node. + + Args: + node (hou.Node): Houdini node to lock parameters on. + parameters (list of str): List of parameter names. + + """ + for name in parameters: + try: + parm = node.parm(name) + parm.lock(True) + except AttributeError: + self.log.debug("missing lock pattern {}".format(name)) + + def collect_instances(self): + # cache instances if missing + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "houdini_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = hou.node(created_inst.get("instance_node")) + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values, + update=True + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = hou.node(instance.data.get("instance_node")) + if instance_node: + instance_node.destroy() + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] + + @staticmethod + def customize_node_look( + node, color=None, + shape="chevron_down"): + """Set custom look for instance nodes. + + Args: + node (hou.Node): Node to set look. + color (hou.Color, Optional): Color of the node. + shape (str, Optional): Shape name of the node. + + Returns: + None + + """ + if not color: + color = hou.Color((0.616, 0.871, 0.769)) + node.setUserData('nodeshape', shape) + node.setColor(color) diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py deleted file mode 100644 index 5f7efff333..0000000000 --- a/openpype/hosts/houdini/api/workio.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Host API required Work Files tool""" -import os - -import hou - - -def file_extensions(): - return [".hip", ".hiplc", ".hipnc"] - - -def has_unsaved_changes(): - return hou.hipFile.hasUnsavedChanges() - - -def save_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.save(file_name=filepath, - save_to_recent_files=True) - - return filepath - - -def open_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.load(filepath, - suppress_save_prompt=True, - ignore_load_warnings=False) - - return filepath - - -def current_file(): - - current_filepath = hou.hipFile.path() - if (os.path.basename(current_filepath) == "untitled.hip" and - not os.path.exists(current_filepath)): - # By default a new scene in houdini is saved in the current - # working directory as "untitled.hip" so we need to capture - # that and consider it 'not saved' when it's in that state. - return None - - return current_filepath - - -def work_root(session): - work_dir = session["AVALON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - return os.path.join(work_dir, scene_dir) - else: - return work_dir diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py new file mode 100644 index 0000000000..4b8041b4f5 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +"""Convertor for legacy Houdini subsets.""" +from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin +from openpype.hosts.houdini.api.lib import imprint + + +class HoudiniLegacyConvertor(SubsetConvertorPlugin): + """Find and convert any legacy subsets in the scene. + + This Convertor will find all legacy subsets in the scene and will + transform them to the current system. Since the old subsets doesn't + retain any information about their original creators, the only mapping + we can do is based on their families. + + Its limitation is that you can have multiple creators creating subset + of the same family and there is no way to handle it. This code should + nevertheless cover all creators that came with OpenPype. + + """ + identifier = "io.openpype.creators.houdini.legacy" + family_to_id = { + "camera": "io.openpype.creators.houdini.camera", + "ass": "io.openpype.creators.houdini.ass", + "imagesequence": "io.openpype.creators.houdini.imagesequence", + "hda": "io.openpype.creators.houdini.hda", + "pointcache": "io.openpype.creators.houdini.pointcache", + "redshiftproxy": "io.openpype.creators.houdini.redshiftproxy", + "redshift_rop": "io.openpype.creators.houdini.redshift_rop", + "usd": "io.openpype.creators.houdini.usd", + "usdrender": "io.openpype.creators.houdini.usdrender", + "vdbcache": "io.openpype.creators.houdini.vdbcache" + } + + def __init__(self, *args, **kwargs): + super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs) + self.legacy_subsets = {} + + def find_instances(self): + """Find legacy subsets in the scene. + + Legacy subsets are the ones that doesn't have `creator_identifier` + parameter on them. + + This is using cached entries done in + :py:meth:`~HoudiniCreatorBase.cache_subsets()` + + """ + self.legacy_subsets = self.collection_shared_data.get( + "houdini_cached_legacy_subsets") + if not self.legacy_subsets: + return + self.add_convertor_item("Found {} incompatible subset{}.".format( + len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "") + ) + + def convert(self): + """Convert all legacy subsets to current. + + It is enough to add `creator_identifier` and `instance_node`. + + """ + if not self.legacy_subsets: + return + + for family, subsets in self.legacy_subsets.items(): + if family in self.family_to_id: + for subset in subsets: + data = { + "creator_identifier": self.family_to_id[family], + "instance_node": subset.path() + } + self.log.info("Converting {} to {}".format( + subset.path(), self.family_to_id[family])) + imprint(subset, data) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index eef86005f5..fec64eb4a1 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,46 +1,49 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance, CreatorError -class CreateAlembicCamera(plugin.Creator): - """Single baked camera from Alembic ROP""" +class CreateAlembicCamera(plugin.HoudiniCreator): + """Single baked camera from Alembic ROP.""" - name = "camera" + identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" family = "camera" icon = "camera" - def __init__(self, *args, **kwargs): - super(CreateAlembicCamera, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - # Set node type to create for output - self.data.update({"node_type": "alembic"}) + instance = super(CreateAlembicCamera, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/%s.abc" % self.name, + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)), "use_sop_path": False, } - if self.nodes: - node = self.nodes[0] - path = node.path() + if self.selected_nodes: + if len(self.selected_nodes) > 1: + raise CreatorError("More than one item selected.") + path = self.selected_nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) parms.update({"root": "/" + root, "objects": remainder}) - instance.setParms(parms) + instance_node.setParms(parms) # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance.parm("use_sop_path").lock(True) - instance.parm("trange").set(1) + to_lock = ["use_sop_path"] + self.lock_parameters(instance_node, to_lock) + + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 72088e43b0..8b310753d0 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -1,9 +1,12 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Arnold ASS files.""" from openpype.hosts.houdini.api import plugin -class CreateArnoldAss(plugin.Creator): +class CreateArnoldAss(plugin.HoudiniCreator): """Arnold .ass Archive""" + identifier = "io.openpype.creators.houdini.ass" label = "Arnold ASS" family = "ass" icon = "magic" @@ -12,42 +15,39 @@ class CreateArnoldAss(plugin.Creator): # Default extension: `.ass` or `.ass.gz` ext = ".ass" - def __init__(self, *args, **kwargs): - super(CreateArnoldAss, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "arnold"}) - self.data.update({"node_type": "arnold"}) + instance = super(CreateArnoldAss, self).create( + subset_name, + instance_data, + pre_create_data) # type: plugin.CreatedInstance - def process(self): - node = super(CreateArnoldAss, self).process() - - basename = node.name() - node.setName(basename + "_ASS", unique_name=True) + instance_node = hou.node(instance.get("instance_node")) # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export - parm_template_group = node.parmTemplateGroup() + parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) - node.setParmTemplateGroup(parm_template_group) + instance_node.setParmTemplateGroup(parm_template_group) - filepath = '$HIP/pyblish/`chs("subset")`.$F4{}'.format(self.ext) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { # Render frame range "trange": 1, - # Arnold ROP settings "ar_ass_file": filepath, "ar_ass_export_enable": 1 } - node.setParms(parms) - # Lock the ASS export attribute - node.parm("ar_ass_export_enable").lock(True) + instance_node.setParms(parms) - # Lock some Avalon attributes - to_lock = ["family", "id"] - for name in to_lock: - parm = node.parm(name) - parm.lock(True) + # Lock any parameters in this list + to_lock = ["ar_ass_export_enable", "family", "id"] + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index e278708076..45af2b0630 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -1,44 +1,42 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating composite sequences.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateCompositeSequence(plugin.Creator): +class CreateCompositeSequence(plugin.HoudiniCreator): """Composite ROP to Image Sequence""" + identifier = "io.openpype.creators.houdini.imagesequence" label = "Composite (Image Sequence)" family = "imagesequence" icon = "gears" - def __init__(self, *args, **kwargs): - super(CreateCompositeSequence, self).__init__(*args, **kwargs) + ext = ".exr" - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - # Type of ROP node to create - self.data.update({"node_type": "comp"}) + instance_data.pop("active", None) + instance_data.update({"node_type": "comp"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateCompositeSequence, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) + parms = { + "trange": 1, + "copoutput": filepath + } - """ - parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} - - if self.nodes: - node = self.nodes[0] - parms.update({"coppath": node.path()}) - - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - try: - parm = instance.parm(name) - parm.lock(True) - except AttributeError: - # missing lock pattern - self.log.debug( - "missing lock pattern {}".format(name)) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b98da8b8bb..4bed83c2e9 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,28 +1,22 @@ # -*- coding: utf-8 -*- -import hou - +"""Creator plugin for creating publishable Houdini Digital Assets.""" from openpype.client import ( get_asset_by_name, get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import lib from openpype.hosts.houdini.api import plugin -class CreateHDA(plugin.Creator): +class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - name = "hda" + identifier = "io.openpype.creators.houdini.hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" maintain_selection = False - def __init__(self, *args, **kwargs): - super(CreateHDA, self).__init__(*args, **kwargs) - self.data.pop("active", None) - def _check_existing(self, subset_name): # type: (str) -> bool """Check if existing subset name versions already exists.""" @@ -40,55 +34,51 @@ class CreateHDA(plugin.Creator): } return subset_name.lower() in existing_subset_names_low - def _process(self, instance): - subset_name = self.data["subset"] - # get selected nodes - out = hou.node("/obj") - self.nodes = hou.selectedNodes() + def _create_instance_node( + self, node_name, parent, node_type="geometry"): + import hou - if (self.options or {}).get("useSelection") and self.nodes: - # if we have `use selection` enabled and we have some + parent_node = hou.node("/obj") + if self.selected_nodes: + # if we have `use selection` enabled, and we have some # selected nodes ... - subnet = out.collapseIntoSubnet( - self.nodes, - subnet_name="{}_subnet".format(self.name)) + subnet = parent_node.collapseIntoSubnet( + self.selected_nodes, + subnet_name="{}_subnet".format(node_name)) subnet.moveToGoodPosition() to_hda = subnet else: - to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(self.name)) + to_hda = parent_node.createNode( + "subnet", node_name="{}_subnet".format(node_name)) if not to_hda.type().definition(): # if node type has not its definition, it is not user # created hda. We test if hda can be created from the node. if not to_hda.canCreateDigitalAsset(): - raise Exception( + raise plugin.OpenPypeCreatorError( "cannot create hda from node {}".format(to_hda)) hda_node = to_hda.createDigitalAsset( - name=subset_name, - hda_file_name="$HIP/{}.hda".format(subset_name) + name=node_name, + hda_file_name="$HIP/{}.hda".format(node_name) ) hda_node.layoutChildren() - elif self._check_existing(subset_name): + elif self._check_existing(node_name): raise plugin.OpenPypeCreatorError( ("subset {} is already published with different HDA" - "definition.").format(subset_name)) + "definition.").format(node_name)) else: hda_node = to_hda - hda_node.setName(subset_name) - - # delete node created by Avalon in /out - # this needs to be addressed in future Houdini workflow refactor. - - hou.node("/out/{}".format(subset_name)).destroy() - - try: - lib.imprint(hda_node, self.data) - except hou.OperationFailed: - raise plugin.OpenPypeCreatorError( - ("Cannot set metadata on asset. Might be that it already is " - "OpenPype asset.") - ) - + hda_node.setName(node_name) + self.customize_node_look(hda_node) return hda_node + + def create(self, subset_name, instance_data, pre_create_data): + instance_data.pop("active", None) + + instance = super(CreateHDA, self).create( + subset_name, + instance_data, + pre_create_data) # type: plugin.CreatedInstance + + return instance diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index feb683edf6..6b6b277422 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,48 +1,51 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreatePointCache(plugin.Creator): +class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - - name = "pointcache" + identifier = "io.openpype.creators.houdini.pointcache" label = "Point Cache" family = "pointcache" icon = "gears" - def __init__(self, *args, **kwargs): - super(CreatePointCache, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - self.data.update({"node_type": "alembic"}) + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "use_sop_path": True, # Export single node from SOP Path - "build_from_path": True, # Direct path of primitive in output - "path_attrib": "path", # Pass path attribute for output + "use_sop_path": True, + "build_from_path": True, + "path_attrib": "path", "prim_to_detail_pattern": "cbId", - "format": 2, # Set format to Ogawa - "facesets": 0, # No face sets (by default exclude them) - "filename": "$HIP/pyblish/%s.abc" % self.name, + "format": 2, + "facesets": 0, + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)) } - if self.nodes: - node = self.nodes[0] - parms.update({"sop_path": node.path()}) + if self.selected_nodes: + parms["sop_path"] = self.selected_nodes[0].path() - instance.setParms(parms) - instance.parm("trange").set(1) + # try to find output node + for child in self.selected_nodes[0].children(): + if child.type().name() == "output": + parms["sop_path"] = child.path() + break + + instance_node.setParms(parms) + instance_node.parm("trange").set(1) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index da4d80bf2b..8b6a68437b 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -1,18 +1,20 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Redshift proxies.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftProxy(plugin.Creator): +class CreateRedshiftProxy(plugin.HoudiniCreator): """Redshift Proxy""" - + identifier = "io.openpype.creators.houdini.redshiftproxy" label = "Redshift Proxy" family = "redshiftproxy" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateRedshiftProxy, self).__init__(*args, **kwargs) - + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) # Redshift provides a `Redshift_Proxy_Output` node type which shows # a limited set of parameters by default and is set to extract a @@ -21,28 +23,24 @@ class CreateRedshiftProxy(plugin.Creator): # why this happens. # TODO: Somehow enforce so that it only shows the original limited # attributes of the Redshift_Proxy_Output node type - self.data.update({"node_type": "Redshift_Proxy_Output"}) + instance_data.update({"node_type": "Redshift_Proxy_Output"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateRedshiftProxy, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) - """ parms = { - "RS_archive_file": '$HIP/pyblish/`chs("subset")`.$F4.rs', + "RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name), } - if self.nodes: - node = self.nodes[0] - path = node.path() - parms["RS_archive_sopPath"] = path + if self.selected_nodes: + parms["RS_archive_sopPath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + to_lock = ["family", "id", "prim_to_detail_pattern"] + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 6949ca169b..2cbe9bfda1 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -1,41 +1,40 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin to create Redshift ROP.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftROP(plugin.Creator): +class CreateRedshiftROP(plugin.HoudiniCreator): """Redshift ROP""" - + identifier = "io.openpype.creators.houdini.redshift_rop" label = "Redshift ROP" family = "redshift_rop" icon = "magic" defaults = ["master"] - def __init__(self, *args, **kwargs): - super(CreateRedshiftROP, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa + + instance_data.pop("active", None) + instance_data.update({"node_type": "Redshift_ROP"}) + # Add chunk size attribute + instance_data["chunkSize"] = 10 # Clear the family prefix from the subset - subset = self.data["subset"] + subset = subset_name subset_no_prefix = subset[len(self.family):] subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] - self.data["subset"] = subset_no_prefix + subset_name = subset_no_prefix - # Add chunk size attribute - self.data["chunkSize"] = 10 + instance = super(CreateRedshiftROP, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_node = hou.node(instance.get("instance_node")) - self.data.update({"node_type": "Redshift_ROP"}) - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ - basename = instance.name() - instance.setName(basename + "_ROP", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ROP", unique_name=True) # Also create the linked Redshift IPR Rop try: @@ -43,11 +42,12 @@ class CreateRedshiftROP(plugin.Creator): "Redshift_IPR", node_name=basename + "_IPR" ) except hou.OperationFailed: - raise Exception(("Cannot create Redshift node. Is Redshift " - "installed and enabled?")) + raise plugin.OpenPypeCreatorError( + ("Cannot create Redshift node. Is Redshift " + "installed and enabled?")) # Move it to directly under the Redshift ROP - ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) + ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1)) # Set the linked rop to the Redshift ROP ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance)) @@ -61,10 +61,8 @@ class CreateRedshiftROP(plugin.Creator): "RS_outputMultilayerMode": 0, # no multi-layered exr "RS_outputBeautyAOVSuffix": "beauty", } - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 5bcb7840c0..51ed8237c5 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -1,39 +1,39 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating USDs.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSD(plugin.Creator): +class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" - + identifier = "io.openpype.creators.houdini.usd" label = "USD (experimental)" family = "usd" icon = "gears" enabled = False - def __init__(self, *args, **kwargs): - super(CreateUSD, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usd"}) - self.data.update({"node_type": "usd"}) + instance = super(CreateUSD, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - - """ parms = { - "lopoutput": "$HIP/pyblish/%s.usd" % self.name, + "lopoutput": "$HIP/pyblish/{}.usd".format(subset_name), "enableoutputprocessor_simplerelativepaths": False, } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = [ @@ -42,6 +42,4 @@ class CreateUSD(plugin.Creator): "family", "id", ] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index cb3fe3f02b..f78f0bed50 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -1,42 +1,41 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin for creating USD renders.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSDRender(plugin.Creator): +class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" - + identifier = "io.openpype.creators.houdini.usdrender" label = "USD Render (experimental)" family = "usdrender" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateUSDRender, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - self.parent = hou.node("/stage") + instance_data["parent"] = hou.node("/stage") # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usdrender"}) - self.data.update({"node_type": "usdrender"}) + instance = super(CreateUSDRender, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - """ parms = { # Render frame range "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) - instance.setParms(parms) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 242c21fc72..1a5011745f 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,38 +1,36 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating VDB Caches.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateVDBCache(plugin.Creator): +class CreateVDBCache(plugin.HoudiniCreator): """OpenVDB from Geometry ROP""" - + identifier = "io.openpype.creators.houdini.vdbcache" name = "vbdcache" label = "VDB Cache" family = "vdbcache" icon = "cloud" - def __init__(self, *args, **kwargs): - super(CreateVDBCache, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "geometry"}) - # Set node type to create for output - self.data["node_type"] = "geometry" + instance = super(CreateVDBCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, + "sopoutput": "$HIP/pyblish/{}.$F4.vdb".format(subset_name), "initsim": True, "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"soppath": node.path()}) + if self.selected_nodes: + parms["soppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py new file mode 100644 index 0000000000..0c6d840810 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" +from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api.lib import read, imprint +from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER +from openpype.pipeline import CreatedInstance, AutoCreator +from openpype.pipeline import legacy_io +from openpype.client import get_asset_by_name +import hou + + +class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): + """Workfile auto-creator.""" + identifier = "io.openpype.creators.houdini.workfile" + label = "Workfile" + family = "workfile" + icon = "document" + + default_variant = "Main" + + def create(self): + variant = self.default_variant + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + + project_name = self.project_name + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] + + if current_instance is None: + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update( + self.get_dynamic_data( + variant, task_name, asset_doc, + project_name, host_name, current_instance) + ) + self.log.info("Auto-creating workfile instance...") + current_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(current_instance) + elif ( + current_instance["asset"] != asset_name + or current_instance["task"] != task_name + ): + # Update instance context if is not the same + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + current_instance["asset"] = asset_name + current_instance["task"] = task_name + current_instance["subset"] = subset_name + + # write workfile information to context container. + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + + workfile_data = {"workfile": current_instance.data_to_store()} + imprint(op_ctx, workfile_data) + + def collect_instances(self): + op_ctx = hou.node(CONTEXT_CONTAINER) + instance = read(op_ctx) + if not instance: + return + workfile = instance.get("workfile") + if not workfile: + return + created_instance = CreatedInstance.from_existing( + workfile, self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + op_ctx = hou.node(CONTEXT_CONTAINER) + for created_inst, _changes in update_list: + if created_inst["creator_identifier"] == self.identifier: + workfile_data = {"workfile": created_inst.data_to_store()} + imprint(op_ctx, workfile_data, update=True) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index 862d5720e1..cc3f2e7fae 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -1,4 +1,5 @@ import pyblish.api +import hou class CollectInstanceActiveState(pyblish.api.InstancePlugin): @@ -24,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = instance[0] + node = hou.node(instance.get("instance_node")) if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index 1383c274a2..9cca07fdc7 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -5,19 +5,20 @@ from openpype.pipeline import legacy_io import pyblish.api -class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): +class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin): """Inject the current working file into context""" order = pyblish.api.CollectorOrder - 0.01 label = "Houdini Current File" hosts = ["houdini"] + family = ["workfile"] - def process(self, context): + def process(self, instance): """Inject the current working file""" current_file = hou.hipFile.path() if not os.path.exists(current_file): - # By default Houdini will even point a new scene to a path. + # By default, Houdini will even point a new scene to a path. # However if the file is not saved at all and does not exist, # we assume the user never set it. filepath = "" @@ -34,43 +35,26 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): "saved correctly." ) - context.data["currentFile"] = current_file + instance.context.data["currentFile"] = current_file folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - task = legacy_io.Session["AVALON_TASK"] - - data = {} - - # create instance - instance = context.create_instance(name=filename) - subset = 'workfile' + task.capitalize() - - data.update({ - "subset": subset, - "asset": os.getenv("AVALON_ASSET", None), - "label": subset, - "publish": True, - "family": 'workfile', - "families": ['workfile'], + instance.data.update({ "setMembers": [current_file], - "frameStart": context.data['frameStart'], - "frameEnd": context.data['frameEnd'], - "handleStart": context.data['handleStart'], - "handleEnd": context.data['handleEnd'] + "frameStart": instance.context.data['frameStart'], + "frameEnd": instance.context.data['frameEnd'], + "handleStart": instance.context.data['handleStart'], + "handleEnd": instance.context.data['handleEnd'] }) - data['representations'] = [{ + instance.data['representations'] = [{ 'name': ext.lstrip("."), 'ext': ext.lstrip("."), 'files': file, "stagingDir": folder, }] - instance.data.update(data) - self.log.info('Collected instance: {}'.format(file)) self.log.info('Scene path: {}'.format(current_file)) self.log.info('staging Dir: {}'.format(folder)) - self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9bd43d8a09..531cdf1249 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -1,19 +1,13 @@ +# -*- coding: utf-8 -*- +"""Collector plugin for frames data on ROP instances.""" import os import re -import hou +import hou # noqa import pyblish.api from openpype.hosts.houdini.api import lib -def splitext(name, allowed_multidot_extensions): - - for ext in allowed_multidot_extensions: - if name.endswith(ext): - return name[:-len(ext)], ext - - return os.path.splitext(name) - class CollectFrames(pyblish.api.InstancePlugin): """Collect all frames which would be saved from the ROP nodes""" @@ -24,7 +18,9 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) + frame_data = lib.get_frame_data(ropnode) + instance.data.update(frame_data) start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) @@ -38,13 +34,13 @@ class CollectFrames(pyblish.api.InstancePlugin): self.log.warning("Using current frame: {}".format(hou.frame())) output = output_parm.eval() - _, ext = splitext(output, + _, ext = lib.splitext(output, allowed_multidot_extensions=[".ass.gz"]) file_name = os.path.basename(output) result = file_name # Get the filename pattern match from the output - # path so we can compute all frames that would + # path, so we can compute all frames that would # come out from rendering the ROP node if there # is a frame pattern in the name pattern = r"\w+\.(\d+)" + re.escape(ext) @@ -63,8 +59,9 @@ class CollectFrames(pyblish.api.InstancePlugin): # for a custom frame list. So this should be refactored. instance.data.update({"frames": result}) - def create_file_list(self, match, start_frame, end_frame): - """Collect files based on frame range and regex.match + @staticmethod + def create_file_list(match, start_frame, end_frame): + """Collect files based on frame range and `regex.match` Args: match(re.match): match object diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index d38927984a..bb85630552 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -47,6 +47,11 @@ class CollectInstances(pyblish.api.ContextPlugin): if node.evalParm("id") != "pyblish.avalon.instance": continue + # instance was created by new creator code, skip it as + # it is already collected. + if node.parm("creator_identifier"): + continue + has_family = node.evalParm("family") assert has_family, "'%s' is missing 'family'" % node.name() @@ -58,7 +63,8 @@ class CollectInstances(pyblish.api.ContextPlugin): data.update({"active": not node.isBypassed()}) # temporarily translation of `active` to `publish` till issue has - # been resolved, https://github.com/pyblish/pyblish-base/issues/307 + # been resolved. + # https://github.com/pyblish/pyblish-base/issues/307 if "active" in data: data["publish"] = data["active"] @@ -78,6 +84,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["families"] = [instance.data["family"]] instance[:] = [node] + instance.data["instance_node"] = node.path() instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index 0130c0a8da..601ed17b39 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = hou.node(instance.data["instance_node"]) # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 72b554b567..346bdf3421 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index d7163b43c0..fcd80e0082 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: - rop_path = instance[0].path() + rop_path = instance.data["instance_node"].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index cf8d61cda3..81274c670e 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,6 +1,6 @@ import pyblish.api -from openyppe.client import get_subset_by_name, get_asset_by_name +from openpype.client import get_subset_by_name, get_asset_by_name from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index e3985e3c97..833add854b 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -3,6 +3,8 @@ import os import pyblish.api import openpype.hosts.houdini.api.usd as usdlib +import hou + class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" @@ -19,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = instance[0] + rop_node = hou.node(instance.get("instance_node")) save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -54,8 +56,10 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance[0]) # include same USD ROP - layer_inst.append((layer, save_path)) # include layer data + # include same USD ROP + layer_inst.append(rop_node) + # include layer data + layer_inst.append((layer, save_path)) # Allow this subset to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 758d4c560b..cb2d4ef424 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAlembic(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAlembic(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index a302b451cb..0d246625ba 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAss(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAss(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved @@ -33,8 +35,12 @@ class ExtractAss(publish.Extractor): # error and thus still continues to the integrator. To capture that # we make sure all files exist files = instance.data["frames"] - missing = [fname for fname in files - if not os.path.exists(os.path.join(staging_dir, fname))] + missing = [] + for file_name in files: + full_path = os.path.normpath(os.path.join(staging_dir, file_name)) + if not os.path.exists(full_path): + missing.append(full_path) + if missing: raise RuntimeError("Failed to complete Arnold ass extraction. " "Missing output files: {}".format(missing)) diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 23e875f107..7a1ab36b93 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -1,9 +1,10 @@ import os - import pyblish.api from openpype.pipeline import publish -from openpype.hosts.houdini.api.lib import render_rop +from openpype.hosts.houdini.api.lib import render_rop, splitext + +import hou class ExtractComposite(publish.Extractor): @@ -15,7 +16,7 @@ class ExtractComposite(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved @@ -28,8 +29,24 @@ class ExtractComposite(publish.Extractor): render_rop(ropnode) - if "files" not in instance.data: - instance.data["files"] = [] + output = instance.data["frames"] + _, ext = splitext(output[0], []) + ext = ext.lstrip(".") - frames = instance.data["frames"] - instance.data["files"].append(frames) + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": ext, + "ext": ext, + "files": output, + "stagingDir": staging_dir, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + } + + from pprint import pformat + + self.log.info(pformat(representation)) + + instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 7dd03a92b7..8b97bf364f 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- import os - from pprint import pformat - import pyblish.api - from openpype.pipeline import publish +import hou class ExtractHDA(publish.Extractor): @@ -17,7 +15,7 @@ class ExtractHDA(publish.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance[0] + hda_node = hou.node(instance.data.get("instance_node")) hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index ca9be64a47..29ede98a52 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractRedshiftProxy(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 78c32affb4..cbeb5add71 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -5,6 +5,7 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou class ExtractUSD(publish.Extractor): @@ -17,7 +18,7 @@ class ExtractUSD(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index f686f712bb..0288b7363a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance[0] + node = hou.node(instance.get("instance_node")) # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 26ec423048..434d6a2160 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractVDBCache(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractVDBCache(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml new file mode 100644 index 0000000000..0f92560bf7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index c990f481d3..16d9ef9aec 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -2,7 +2,7 @@ import pyblish.api from openpype.lib import version_up from openpype.pipeline import registered_host - +from openpype.hosts.houdini.api import HoudiniHost class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. @@ -20,11 +20,11 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): def process(self, context): # Filename must not have changed since collecting - host = registered_host() + host = registered_host() # type: HoudiniHost current_file = host.current_file() assert ( context.data["currentFile"] == current_file ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save(new_filepath) + host.save_workfile(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py index 6128c7af77..d6e07ccab0 100644 --- a/openpype/hosts/houdini/plugins/publish/save_scene.py +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -14,13 +14,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): # Filename must not have changed since collecting host = registered_host() - current_file = host.current_file() + current_file = host.get_current_workfile() assert context.data['currentFile'] == current_file, ( "Collected filename from current scene name." ) if host.has_unsaved_changes(): - self.log.info("Saving current file..") - host.save_file(current_file) + self.log.info("Saving current file {}...".format(current_file)) + host.save_workfile(current_file) else: self.log.debug("No unsaved changes, skipping file save..") diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py deleted file mode 100644 index ac408bc842..0000000000 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ /dev/null @@ -1,47 +0,0 @@ -import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - - -class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB. - - Regardless of the amount of VDBs create the output will need to have an - equal amount of VDBs, points, primitives and vertices - - A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - """ - - order = ValidateContentsOrder + 0.1 - families = ["vdbcache"] - hosts = ["houdini"] - label = "Validate Input Node (VDB)" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" - ) - - @classmethod - def get_invalid(cls, instance): - - node = instance.data["output_node"] - - prims = node.geometry().prims() - nr_of_prims = len(prims) - - nr_of_points = len(node.geometry().points()) - if nr_of_points != nr_of_prims: - cls.log.error("The number of primitives and points do not match") - return [instance] - - for prim in prims: - if prim.numVertices() != 1: - cls.log.error("Found primitive with more than 1 vertex!") - return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index ea800707fb..86e92a052f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -1,8 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api from collections import defaultdict - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" @@ -24,18 +24,26 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitives found with inconsistent primitive " - "to detail attributes. See log." + raise PublishValidationError( + ("Primitives found with inconsistent primitive " + "to detail attributes. See log."), + title=self.label ) @classmethod def get_invalid(cls, instance): + import hou # noqa + output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) + if output_node is None: + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % rop_node.path() + ) - output = instance.data["output_node"] + return [rop_node.path()] - rop = instance[0] - pattern = rop.parm("prim_to_detail_pattern").eval().strip() + pattern = rop_node.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( "Alembic ROP has no 'Primitive to Detail' pattern. " @@ -43,7 +51,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -51,14 +59,14 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] # Let's assume each attribute is explicitly named for now and has no # wildcards for Primitive to Detail. This simplifies the check. @@ -67,7 +75,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the start frame then it might be # something that is emitted over time. As such we can't actually @@ -86,7 +94,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -94,7 +102,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = None for attr in pattern.split(" "): @@ -130,4 +138,4 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Path has multiple values: %s (path: %s)" % (list(values), path) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index cbed3ea235..44d58cfa36 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,7 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api - -from openpype.pipeline.publish import ValidateContentsOrder - +import hou class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -18,14 +17,14 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" def process(self, instance): - rop = instance[0] + rop = hou.node(instance.data["instance_node"]) facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 2625ae5f83..bafb206bd3 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError +import hou class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -12,7 +13,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" @@ -20,18 +21,28 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitive types found that are not supported" - "for Alembic output." + raise PublishValidationError( + ("Primitive types found that are not supported" + "for Alembic output."), + title=self.label ) @classmethod def get_invalid(cls, instance): invalid_prim_types = ["VDB", "Volume"] - node = instance.data["output_node"] + output_node = instance.data.get("output_node") - if not hasattr(node, "geometry"): + if output_node is None: + node = hou.node(instance.data["instance_node"]) + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] + + if not hasattr(output_node, "geometry"): # In the case someone has explicitly set an Object # node instead of a SOP node in Geometry context # then for now we ignore - this allows us to also @@ -40,7 +51,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): return frame = instance.data.get("frameStart", 0) - geo = node.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) invalid = False for prim_type in invalid_prim_types: diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 5eb8f93d03..f11f9c0c62 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateAnimationSettings(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 7cf8da69d6..1bf51a986c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError +import hou class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. @@ -11,7 +13,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder - 0.1 + order = pyblish.api.ValidatorOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" @@ -26,14 +28,15 @@ class ValidateBypassed(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: rop = invalid[0] - raise RuntimeError( - "ROP node %s is set to bypass, publishing cannot continue.." - % rop.path() + raise PublishValidationError( + ("ROP node {} is set to bypass, publishing cannot " + "continue.".format(rop.path())), + title=self.label ) @classmethod def get_invalid(cls, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index d414920f8b..41b5273e6a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -1,11 +1,13 @@ +# -*- coding: utf-8 -*- +"""Validator plugin for Houdini Camera ROP settings.""" import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["camera"] hosts = ["houdini"] label = "Camera ROP" @@ -14,30 +16,45 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = hou.node(instance.data.get("instance_node")) if node.parm("use_sop_path").eval(): - raise RuntimeError( - "Alembic ROP for Camera export should not be " - "set to 'Use Sop Path'. Please disable." + raise PublishValidationError( + ("Alembic ROP for Camera export should not be " + "set to 'Use Sop Path'. Please disable."), + title=self.label ) # Get the root and objects parameter of the Alembic ROP node root = node.parm("root").eval() objects = node.parm("objects").eval() - assert root, "Root parameter must be set on Alembic ROP" - assert root.startswith("/"), "Root parameter must start with slash /" - assert objects, "Objects parameter must be set on Alembic ROP" - assert len(objects.split(" ")) == 1, "Must have only a single object." + errors = [] + if not root: + errors.append("Root parameter must be set on Alembic ROP") + if not root.startswith("/"): + errors.append("Root parameter must start with slash /") + if not objects: + errors.append("Objects parameter must be set on Alembic ROP") + if len(objects.split(" ")) != 1: + errors.append("Must have only a single object.") + + if errors: + for error in errors: + self.log.error(error) + raise PublishValidationError( + "Some checks failed, see validator log.", + title=self.label) # Check if the object exists and is a camera path = root + "/" + objects camera = hou.node(path) if not camera: - raise ValueError("Camera path does not exist: %s" % path) + raise PublishValidationError( + "Camera path does not exist: %s" % path, + title=self.label) if camera.type().name() != "cam": - raise ValueError( - "Object set in Alembic ROP is not a camera: " - "%s (type: %s)" % (camera, camera.type().name()) - ) + raise PublishValidationError( + ("Object set in Alembic ROP is not a camera: " + "{} (type: {})").format(camera, camera.type().name()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 543539ffe3..1d0377c818 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,4 +1,9 @@ +# -*- coding: utf-8 -*- +import sys import pyblish.api +import six + +from openpype.pipeline import PublishValidationError class ValidateCopOutputNode(pyblish.api.InstancePlugin): @@ -20,9 +25,10 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod @@ -30,10 +36,19 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + try: + output_node = instance.data["output_node"] + except KeyError: + six.reraise( + PublishValidationError, + PublishValidationError( + "Can't determine COP output node.", + title=cls.__name__), + sys.exc_info()[2] + ) if output_node is None: - node = instance[0] + node = hou.node(instance.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() @@ -54,7 +69,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Cop2", ( - "Output node %s is not of category Cop2. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Cop2": + raise PublishValidationError( + ("Output node %s is not of category Cop2. " + "This is a bug...").format(output_node.path()), + title=cls.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index b26d28a1e7..4584e78f4f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -1,7 +1,11 @@ +# -*- coding: utf-8 -*- import os import pyblish.api from openpype.hosts.houdini.api import lib +from openpype.pipeline import PublishValidationError + +import hou class ValidateFileExtension(pyblish.api.InstancePlugin): @@ -29,15 +33,16 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "ROP node has incorrect " "file extension: %s" % invalid + raise PublishValidationError( + "ROP node has incorrect file extension: {}".format(invalid), + title=self.label ) @classmethod def get_invalid(cls, instance): # Get ROP node from instance - node = instance[0] + node = hou.node(instance.data["instance_node"]) # Create lookup for current family in instance families = [] @@ -53,7 +58,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): for family in families: extension = cls.family_extensions.get(family, None) if extension is None: - raise RuntimeError("Unsupported family: %s" % family) + raise PublishValidationError( + "Unsupported family: {}".format(family), + title=cls.label) if output_extension != extension: return [node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index 76b5910576..b5f6ba71e1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateFrameToken(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index f5f03aa844..f1c52f22c1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): @@ -24,7 +26,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): license = hou.licenseCategory() if license != hou.licenseCategoryType.Commercial: - raise RuntimeError( - "USD Publishing requires a full Commercial " - "license. You are on: %s" % license - ) + raise PublishValidationError( + ("USD Publishing requires a full Commercial " + "license. You are on: {}").format(license), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index be6a798a95..9d1f92a101 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Create Intermediate Directories Checked" @@ -14,10 +15,10 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Found ROP node with Create Intermediate " - "Directories turned off: %s" % invalid - ) + raise PublishValidationError( + ("Found ROP node with Create Intermediate " + "Directories turned off: {}".format(invalid)), + title=self.label) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 76635d4ed5..f7c95aaf4e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError def cook_in_range(node, start, end): @@ -28,7 +29,7 @@ def get_errors(node): class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] label = "Validate no errors" @@ -37,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance[0]) + validate_nodes.append(hou.node(instance.get("instance_node"))) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) @@ -62,4 +63,6 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): errors = get_errors(node) if errors: self.log.error(errors) - raise RuntimeError("Node has errors: %s" % node.path()) + raise PublishValidationError( + "Node has errors: {}".format(node.path()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 7a8cd04f15..d3a4c0cfbf 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError +import hou class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -19,19 +22,26 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "See log for details. " "Invalid nodes: {0}".format(invalid) + raise PublishValidationError( + "See log for details. " "Invalid nodes: {0}".format(invalid), + title=self.label ) @classmethod def get_invalid(cls, instance): - import hou + output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) - output = instance.data["output_node"] + if output_node is None: + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % rop_node.path() + ) - rop = instance[0] - build_from_path = rop.parm("build_from_path").eval() + return [rop_node.path()] + + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -39,20 +49,20 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] cls.log.debug("Checking for attribute: %s" % path_attr) # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the current frame then we can't # check whether the path names are correct. So we'll just issue a @@ -73,7 +83,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -81,7 +91,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = geo.primStringAttribValues(path_attr) # Ensure all primitives are set to a valid path @@ -93,4 +103,4 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Prims have no value for attribute `%s` " "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 0ab182c584..4e8e5fc0e8 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -1,7 +1,9 @@ +# -*-coding: utf-8 -*- import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError import hou @@ -27,17 +29,24 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): # We ensure it's a shell node and that it has the pre-render script # set correctly. Plus the shell script it will trigger should be # completely empty (doing nothing) - assert node.type().name() == "shell", "Must be shell ROP node" - assert node.parm("command").eval() == "", "Must have no command" - assert not node.parm("shellexec").eval(), "Must not execute in shell" - assert ( - node.parm("prerender").eval() == cmd - ), "REMOTE_PUBLISH node does not have correct prerender script." - assert ( - node.parm("lprerender").eval() == "python" - ), "REMOTE_PUBLISH node prerender script type not set to 'python'" + if node.type().name() != "shell": + self.raise_error("Must be shell ROP node") + if node.parm("command").eval() != "": + self.raise_error("Must have no command") + if node.parm("shellexec").eval(): + self.raise_error("Must not execute in shell") + if node.parm("prerender").eval() != cmd: + self.raise_error(("REMOTE_PUBLISH node does not have " + "correct prerender script.")) + if node.parm("lprerender").eval() != "python": + self.raise_error(("REMOTE_PUBLISH node prerender script " + "type not set to 'python'")) @classmethod def repair(cls, context): """(Re)create the node if it fails to pass validation.""" lib.create_remote_publish_node(force=True) + + def raise_error(self, message): + self.log.error(message) + raise PublishValidationError(message, title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index afc8df7528..8ec62f4e85 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @@ -18,10 +20,12 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=self.label) if node.isBypassed(): - raise RuntimeError("REMOTE_PUBLISH must not be bypassed.") + raise PublishValidationError( + "REMOTE_PUBLISH must not be bypassed.", title=self.label) @classmethod def repair(cls, context): @@ -29,7 +33,8 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=cls.label) cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH") node.bypass(False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a5a07b1b1a..ed7f438729 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -22,9 +24,9 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + "Output node(s) are incorrect", + title="Invalid output node(s)" ) @classmethod @@ -32,10 +34,10 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + output_node = instance.data.get("output_node") if output_node is None: - node = instance[0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() @@ -56,10 +58,11 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Sop": + raise PublishValidationError( + ("Output node {} is not of category Sop. " + "This is a bug.").format(output_node.path()), + title=cls.label) # Ensure the node is cooked and succeeds to cook so we can correctly # check for its geometry data. diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index ac0181aed2..a0e2302495 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -1,6 +1,10 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError + +import hou class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): @@ -24,7 +28,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) @@ -44,7 +48,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): invalid.append(layer) if invalid: - raise RuntimeError( + raise PublishValidationError(( "Loaded layers have backslashes. " - "This is invalid for HUSK USD rendering." - ) + "This is invalid for HUSK USD rendering."), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 2fd2f5eb9f..a55eb70cb2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -1,10 +1,13 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib - +from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux +import hou + def fullname(o): """Get fully qualified class name""" @@ -37,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) @@ -55,7 +58,8 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): if invalid: prim_paths = sorted([str(prim.GetPath()) for prim in invalid]) - raise RuntimeError("Found invalid primitives: %s" % prim_paths) + raise PublishValidationError( + "Found invalid primitives: {}".format(prim_paths)) class ValidateUsdShade(ValidateUsdModel): diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 1f10fafdf4..af21efcafc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateUSDOutputNode(pyblish.api.InstancePlugin): @@ -20,9 +22,10 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod @@ -33,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = hou.node(instance.get("instance_node")) cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py index 36336a03ae..02c44ab94e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- +import os import pyblish.api -import os +from openpype.pipeline import PublishValidationError class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): @@ -28,4 +30,5 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): if invalid: for message in invalid: self.log.error(message) - raise RuntimeError("USD Render Paths are invalid.") + raise PublishValidationError( + "USD Render Paths are invalid.", title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index fb1094e6b5..01ebc0e828 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUsdSetDress(pyblish.api.InstancePlugin): @@ -20,8 +22,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): def process(self, instance): from pxr import UsdGeom + import hou - rop = instance[0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) @@ -47,8 +50,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): invalid.append(node) if invalid: - raise RuntimeError( + raise PublishValidationError(( "SetDress contains local geometry. " "This is not allowed, it must be an assembly " - "of referenced assets." + "of referenced assets."), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index f08c7c72c5..c4f118ac3b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import re import pyblish.api @@ -5,6 +6,7 @@ import pyblish.api from openpype.client import get_subset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -32,7 +34,8 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): project_name, model_subset, asset_doc["_id"], fields=["_id"] ) if not subset_doc: - raise RuntimeError( - "USD Model subset not found: " - "%s (%s)" % (model_subset, asset_name) + raise PublishValidationError( + ("USD Model subset not found: " + "{} ({})").format(model_subset, asset_name), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index a4902b48a9..bd3366a424 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError import hou @@ -12,14 +13,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade Workspace" def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) workspace = rop.parent() definition = workspace.type().definition() @@ -39,13 +40,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): if node_type != other_node_type: continue - # Get highest version + # Get the highest version highest = max(highest, other_version) if version != highest: - raise RuntimeError( - "Shading Workspace is not the latest version." - " Found %s. Latest is %s." % (version, highest) + raise PublishValidationError( + ("Shading Workspace is not the latest version." + " Found {}. Latest is {}.").format(version, highest), + title=self.label ) # There were some issues with the editable node not having the right @@ -56,8 +58,9 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): ) rop_value = rop.parm("lopoutput").rawValue() if rop_value != value: - raise RuntimeError( - "Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values." + raise PublishValidationError( + ("Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values."), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index ac408bc842..1f9ccc9c42 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import ( + PublishValidationError +) class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +19,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" @@ -24,8 +27,10 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" + raise PublishValidationError( + self, + "Node connected to the output node is not of type VDB", + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 55ed581d4c..61c1209fc9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" @@ -25,8 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" " of type VDB!" + raise PublishValidationError( + "Node connected to the output node is not" " of type VDB!", + title=self.label ) @classmethod @@ -36,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance[0].path() + "ROP node '%s'." % instance.get("instance_node") ) return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 560b355e21..7707cc2dba 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -1,11 +1,17 @@ # -*- coding: utf-8 -*- import pyblish.api import hou +from openpype.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from openpype.pipeline.publish import RepairAction from openpype.pipeline.publish import RepairAction -class ValidateWorkfilePaths(pyblish.api.InstancePlugin): +class ValidateWorkfilePaths( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate workfile paths so they are absolute.""" order = pyblish.api.ValidatorOrder @@ -19,6 +25,8 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): prohibited_vars = ["$HIP", "$JOB"] def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid() self.log.info( "node types to check: {}".format(", ".join(self.node_types))) @@ -30,15 +38,16 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): self.log.error( "{}: {}".format(param.path(), param.unexpandedString())) - raise RuntimeError("Invalid paths found") + raise PublishValidationError( + "Invalid paths found", title=self.label) @classmethod def get_invalid(cls): invalid = [] for param, _ in hou.fileReferences(): - if param is None: + # it might return None for some reason + if not param: continue - # skip nodes we are not interested in if param.node().type().name() not in cls.node_types: continue diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml index abfa3f136e..c08114b71b 100644 --- a/openpype/hosts/houdini/startup/MainMenuCommon.xml +++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml @@ -1,10 +1,10 @@ - + - + - + - + bool: + node = rt.getNodeByName(node_name) + if not node: + return False + + for k, v in data.items(): + if isinstance(v, (dict, list)): + rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}') + else: + rt.setUserProp(node, k, v) + + return True + + +def lsattr( + attr: str, + value: Union[str, None] = None, + root: Union[str, None] = None) -> list: + """List nodes having attribute with specified value. + + Args: + attr (str): Attribute name to match. + value (str, Optional): Value to match, of omitted, all nodes + with specified attribute are returned no matter of value. + root (str, Optional): Root node name. If omitted, scene root is used. + + Returns: + list of nodes. + """ + root = rt.rootnode if root is None else rt.getNodeByName(root) + + def output_node(node, nodes): + nodes.append(node) + for child in node.Children: + output_node(child, nodes) + + nodes = [] + output_node(root, nodes) + return [ + n for n in nodes + if rt.getUserProp(n, attr) == value + ] if value else [ + n for n in nodes + if rt.getUserProp(n, attr) + ] + + +def read(container) -> dict: + data = {} + props = rt.getUserPropBuffer(container) + # this shouldn't happen but let's guard against it anyway + if not props: + return data + + for line in props.split("\r\n"): + try: + key, value = line.split("=") + except ValueError: + # if the line cannot be split we can't really parse it + continue + + value = value.strip() + if isinstance(value.strip(), six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + + data[key.strip()] = value + + data["instance_node"] = container.name + + return data + + +@contextlib.contextmanager +def maintained_selection(): + previous_selection = rt.getCurrentSelection() + try: + yield + finally: + if previous_selection: + rt.select(previous_selection) + else: + rt.select() + + +def get_all_children(parent, node_type=None): + """Handy function to get all the children of a given node + + Args: + parent (3dsmax Node1): Node to get all children of. + node_type (None, runtime.class): give class to check for + e.g. rt.FFDBox/rt.GeometryClass etc. + + Returns: + list: list of all children of the parent node + """ + def list_children(node): + children = [] + for c in node.Children: + children.append(c) + children = children + list_children(c) + return children + child_list = list_children(parent) + + return ([x for x in child_list if rt.superClassOf(x) == node_type] + if node_type else child_list) diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py new file mode 100644 index 0000000000..d1913c51e0 --- /dev/null +++ b/openpype/hosts/max/api/menu.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +"""3dsmax menu definition of OpenPype.""" +from Qt import QtWidgets, QtCore +from pymxs import runtime as rt + +from openpype.tools.utils import host_tools + + +class OpenPypeMenu(object): + """Object representing OpenPype menu. + + This is using "hack" to inject itself before "Help" menu of 3dsmax. + For some reason `postLoadingMenus` event doesn't fire, and main menu + if probably re-initialized by menu templates, se we wait for at least + 1 event Qt event loop before trying to insert. + + """ + + def __init__(self): + super().__init__() + self.main_widget = self.get_main_widget() + self.menu = None + + timer = QtCore.QTimer() + # set number of event loops to wait. + timer.setInterval(1) + timer.timeout.connect(self._on_timer) + timer.start() + + self._timer = timer + self._counter = 0 + + def _on_timer(self): + if self._counter < 1: + self._counter += 1 + return + + self._counter = 0 + self._timer.stop() + self.build_openpype_menu() + + @staticmethod + def get_main_widget(): + """Get 3dsmax main window.""" + return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) + + def get_main_menubar(self) -> QtWidgets.QMenuBar: + """Get main Menubar by 3dsmax main window.""" + return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] + + def get_or_create_openpype_menu( + self, name: str = "&OpenPype", + before: str = "&Help") -> QtWidgets.QAction: + """Create OpenPype menu. + + Args: + name (str, Optional): OpenPypep menu name. + before (str, Optional): Name of the 3dsmax main menu item to + add OpenPype menu before. + + Returns: + QtWidgets.QAction: OpenPype menu action. + + """ + if self.menu is not None: + return self.menu + + menu_bar = self.get_main_menubar() + menu_items = menu_bar.findChildren( + QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) + help_action = None + for item in menu_items: + if name in item.title(): + # we already have OpenPype menu + return item + + if before in item.title(): + help_action = item.menuAction() + + op_menu = QtWidgets.QMenu("&OpenPype") + menu_bar.insertMenu(help_action, op_menu) + + self.menu = op_menu + return op_menu + + def build_openpype_menu(self) -> QtWidgets.QAction: + """Build items in OpenPype menu.""" + openpype_menu = self.get_or_create_openpype_menu() + load_action = QtWidgets.QAction("Load...", openpype_menu) + load_action.triggered.connect(self.load_callback) + openpype_menu.addAction(load_action) + + publish_action = QtWidgets.QAction("Publish...", openpype_menu) + publish_action.triggered.connect(self.publish_callback) + openpype_menu.addAction(publish_action) + + manage_action = QtWidgets.QAction("Manage...", openpype_menu) + manage_action.triggered.connect(self.manage_callback) + openpype_menu.addAction(manage_action) + + library_action = QtWidgets.QAction("Library...", openpype_menu) + library_action.triggered.connect(self.library_callback) + openpype_menu.addAction(library_action) + + openpype_menu.addSeparator() + + workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) + workfiles_action.triggered.connect(self.workfiles_callback) + openpype_menu.addAction(workfiles_action) + return openpype_menu + + def load_callback(self): + """Callback to show Loader tool.""" + host_tools.show_loader(parent=self.main_widget) + + def publish_callback(self): + """Callback to show Publisher tool.""" + host_tools.show_publisher(parent=self.main_widget) + + def manage_callback(self): + """Callback to show Scene Manager/Inventory tool.""" + host_tools.show_subset_manager(parent=self.main_widget) + + def library_callback(self): + """Callback to show Library Loader tool.""" + host_tools.show_library_loader(parent=self.main_widget) + + def workfiles_callback(self): + """Callback to show Workfiles tool.""" + host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py new file mode 100644 index 0000000000..f3cdf245fb --- /dev/null +++ b/openpype/hosts/max/api/pipeline.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" +import os +import logging + +import json + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher +import pyblish.api +from openpype.pipeline import ( + register_creator_plugin_path, + register_loader_plugin_path, + AVALON_CONTAINER_ID, +) +from openpype.hosts.max.api.menu import OpenPypeMenu +from openpype.hosts.max.api import lib +from openpype.hosts.max import MAX_HOST_DIR + +from pymxs import runtime as rt # noqa + +log = logging.getLogger("openpype.hosts.max") + +PLUGINS_DIR = os.path.join(MAX_HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + + +class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + + name = "max" + menu = None + + def __init__(self): + super(MaxHost, self).__init__() + self._op_events = {} + self._has_been_setup = False + + def install(self): + pyblish.api.register_host("max") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + + # self._register_callbacks() + self.menu = OpenPypeMenu() + + self._has_been_setup = True + + def has_unsaved_changes(self): + # TODO: how to get it from 3dsmax? + return True + + def get_workfile_extensions(self): + return [".max"] + + def save_workfile(self, dst_path=None): + rt.saveMaxFile(dst_path) + return dst_path + + def open_workfile(self, filepath): + rt.checkForSave() + rt.loadMaxFile(filepath) + return filepath + + def get_current_workfile(self): + return os.path.join(rt.maxFilePath, rt.maxFileName) + + def get_containers(self): + return ls() + + def _register_callbacks(self): + rt.callbacks.removeScripts(id=rt.name("OpenPypeCallbacks")) + + rt.callbacks.addScript( + rt.Name("postLoadingMenus"), + self._deferred_menu_creation, id=rt.Name('OpenPypeCallbacks')) + + def _deferred_menu_creation(self): + self.log.info("Building menu ...") + self.menu = OpenPypeMenu() + + @staticmethod + def create_context_node(): + """Helper for creating context holding node.""" + + root_scene = rt.rootScene + + create_attr_script = (""" +attributes "OpenPypeContext" +( + parameters main rollout:params + ( + context type: #string + ) + + rollout params "OpenPype Parameters" + ( + editText editTextContext "Context" type: #string + ) +) + """) + + attr = rt.execute(create_attr_script) + rt.custAttributes.add(root_scene, attr) + + return root_scene.OpenPypeContext.context + + def update_context_data(self, data, changes): + try: + _ = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + self.create_context_node() + + rt.rootScene.OpenPypeContext.context = json.dumps(data) + + def get_context_data(self): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + if not context: + context = "{}" + return json.loads(context) + + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + rt.saveMaxFile(dst_path) + + +def ls() -> list: + """Get all OpenPype instances.""" + objs = rt.objects + containers = [ + obj for obj in objs + if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID + ] + + for container in sorted(containers, key=lambda name: container.name): + yield lib.read(container) diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py new file mode 100644 index 0000000000..4788bfd383 --- /dev/null +++ b/openpype/hosts/max/api/plugin.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +"""3dsmax specific Avalon/Pyblish plugin definitions.""" +from pymxs import runtime as rt +import six +from abc import ABCMeta +from openpype.pipeline import ( + CreatorError, + Creator, + CreatedInstance +) +from openpype.lib import BoolDef +from .lib import imprint, read, lsattr + + +class OpenPypeCreatorError(CreatorError): + pass + + +class MaxCreatorBase(object): + + @staticmethod + def cache_subsets(shared_data): + if shared_data.get("max_cached_subsets") is None: + shared_data["max_cached_subsets"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = rt.getUserProp(i, "creator_identifier") + if creator_id not in shared_data["max_cached_subsets"]: + shared_data["max_cached_subsets"][creator_id] = [i.name] + else: + shared_data[ + "max_cached_subsets"][creator_id].append(i.name) # noqa + return shared_data + + @staticmethod + def create_instance_node(node_name: str, parent: str = ""): + parent_node = rt.getNodeByName(parent) if parent else rt.rootScene + if not parent_node: + raise OpenPypeCreatorError(f"Specified parent {parent} not found") + + container = rt.container(name=node_name) + container.Parent = parent_node + + return container + + +@six.add_metaclass(ABCMeta) +class MaxCreator(Creator, MaxCreatorBase): + selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = rt.getCurrentSelection() + + instance_node = self.create_instance_node(subset_name) + instance_data["instance_node"] = instance_node.name + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + for node in self.selected_nodes: + node.Parent = instance_node + + self._add_instance_to_context(instance) + imprint(instance_node.name, instance.data_to_store()) + + return instance + + def collect_instances(self): + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "max_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing( + read(rt.getNodeByName(instance)), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.get("instance_node") + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values, + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = rt.getNodeByName( + instance.data.get("instance_node")) + if instance_node: + rt.delete(rt.getNodeByName(instance_node)) + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/max/hooks/set_paths.py b/openpype/hosts/max/hooks/set_paths.py new file mode 100644 index 0000000000..3db5306344 --- /dev/null +++ b/openpype/hosts/max/hooks/set_paths.py @@ -0,0 +1,17 @@ +from openpype.lib import PreLaunchHook + + +class SetPath(PreLaunchHook): + """Set current dir to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = ["max"] + + def execute(self): + workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + self.launch_context.kwargs["cwd"] = workdir diff --git a/openpype/hosts/max/plugins/__init__.py b/openpype/hosts/max/plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py new file mode 100644 index 0000000000..32f0838471 --- /dev/null +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" +from openpype.hosts.max.api import plugin +from openpype.pipeline import CreatedInstance + + +class CreatePointCache(plugin.MaxCreator): + identifier = "io.openpype.creators.max.pointcache" + label = "Point Cache" + family = "pointcache" + icon = "gear" + + def create(self, subset_name, instance_data, pre_create_data): + # from pymxs import runtime as rt + + _ = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + # for additional work on the node: + # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py new file mode 100644 index 0000000000..285d84b7b6 --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +"""Simple alembic loader for 3dsmax. + +Because of limited api, alembics can be only loaded, but not easily updated. + +""" +import os +from openpype.pipeline import ( + load +) + + +class AbcLoader(load.LoaderPlugin): + """Alembic loader.""" + + families = ["model", "animation", "pointcache"] + label = "Load Alembic" + representations = ["abc"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + file_path = os.path.normpath(self.fname) + + abc_before = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + abc_export_cmd = (f""" +AlembicImport.ImportToRoot = false + +importFile @"{file_path}" #noPrompt + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + rt.execute(abc_export_cmd) + + abc_after = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + # This should yield new AlembicContainer node + abc_containers = abc_after.difference(abc_before) + + if len(abc_containers) != 1: + self.log.error("Something failed when loading.") + + abc_container = abc_containers.pop() + + container_name = f"{name}_CON" + container = rt.container(name=container_name) + abc_container.Parent = container + + return container + + def remove(self, container): + from pymxs import runtime as rt + + node = container["node"] + rt.delete(node) diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py new file mode 100644 index 0000000000..3500b2735c --- /dev/null +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +"""Collect current work file.""" +import os +import pyblish.api + +from pymxs import runtime as rt +from openpype.pipeline import legacy_io + + +class CollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect 3dsmax Workfile" + hosts = ['max'] + + def process(self, context): + """Inject the current working file.""" + folder = rt.maxFilePath + file = rt.maxFileName + if not folder or not file: + self.log.error("Scene is not saved.") + current_file = os.path.join(folder, file) + + context.data['currentFile'] = current_file + + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/max/plugins/publish/extract_pointcache.py b/openpype/hosts/max/plugins/publish/extract_pointcache.py new file mode 100644 index 0000000000..904c1656da --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_pointcache.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" +Export alembic file. + +Note: + Parameters on AlembicExport (AlembicExport.Parameter): + + ParticleAsMesh (bool): Sets whether particle shapes are exported + as meshes. + AnimTimeRange (enum): How animation is saved: + #CurrentFrame: saves current frame + #TimeSlider: saves the active time segments on time slider (default) + #StartEnd: saves a range specified by the Step + StartFrame (int) + EnFrame (int) + ShapeSuffix (bool): When set to true, appends the string "Shape" to the + name of each exported mesh. This property is set to false by default. + SamplesPerFrame (int): Sets the number of animation samples per frame. + Hidden (bool): When true, export hidden geometry. + UVs (bool): When true, export the mesh UV map channel. + Normals (bool): When true, export the mesh normals. + VertexColors (bool): When true, export the mesh vertex color map 0 and the + current vertex color display data when it differs + ExtraChannels (bool): When true, export the mesh extra map channels + (map channels greater than channel 1) + Velocity (bool): When true, export the meh vertex and particle velocity + data. + MaterialIDs (bool): When true, export the mesh material ID as + Alembic face sets. + Visibility (bool): When true, export the node visibility data. + LayerName (bool): When true, export the node layer name as an Alembic + object property. + MaterialName (bool): When true, export the geometry node material name as + an Alembic object property + ObjectID (bool): When true, export the geometry node g-buffer object ID as + an Alembic object property. + CustomAttributes (bool): When true, export the node and its modifiers + custom attributes into an Alembic object compound property. +""" +import os +import pyblish.api +from openpype.pipeline import publish +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractAlembic(publish.Extractor): + order = pyblish.api.ExtractorOrder + label = "Extract Pointcache" + hosts = ["max"] + families = ["pointcache", "camera"] + + def process(self, instance): + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + container = instance.data["instance_node"] + + self.log.info("Extracting pointcache ...") + + parent_dir = self.staging_dir(instance) + file_name = "{name}.abc".format(**instance.data) + path = os.path.join(parent_dir, file_name) + + # We run the render + self.log.info("Writing alembic '%s' to '%s'" % (file_name, + parent_dir)) + + abc_export_cmd = ( + f""" +AlembicExport.ArchiveType = #ogawa +AlembicExport.CoordinateSystem = #maya +AlembicExport.StartFrame = {start} +AlembicExport.EndFrame = {end} + +exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport + + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + + with maintained_selection(): + # select and export + + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(abc_export_cmd) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': file_name, + "stagingDir": parent_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py new file mode 100644 index 0000000000..8506b17315 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from pymxs import runtime as rt + + +class ValidateSceneSaved(pyblish.api.InstancePlugin): + """Validate that workfile was saved.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["max"] + label = "Validate Workfile is saved" + + def process(self, instance): + if not rt.maxFilePath or not rt.maxFileName: + raise PublishValidationError( + "Workfile is not saved", title=self.label) diff --git a/openpype/hosts/max/startup/startup.ms b/openpype/hosts/max/startup/startup.ms new file mode 100644 index 0000000000..aee40eb6bc --- /dev/null +++ b/openpype/hosts/max/startup/startup.ms @@ -0,0 +1,9 @@ +-- OpenPype Init Script +( + local sysPath = dotNetClass "System.IO.Path" + local sysDir = dotNetClass "System.IO.Directory" + local localScript = getThisScriptFilename() + local startup = sysPath.Combine (sysPath.GetDirectoryName localScript) "startup.py" + + python.ExecuteFile startup +) \ No newline at end of file diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py new file mode 100644 index 0000000000..37bcef5db1 --- /dev/null +++ b/openpype/hosts/max/startup/startup.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +from openpype.hosts.max.api import MaxHost +from openpype.pipeline import install_host + +host = MaxHost() +install_host(host) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py new file mode 100644 index 0000000000..2a983f1573 --- /dev/null +++ b/openpype/hosts/maya/api/gltf.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +"""Tools to work with GLTF.""" +import logging + +from maya import cmds, mel # noqa + +log = logging.getLogger(__name__) + +_gltf_options = { + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials + "eut": bool, # excludeUnusedTexcoord + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly +} + + +def extract_gltf(parent_dir, + filename, + **kwargs): + + """Sets GLTF export options from data in the instance. + + """ + + cmds.loadPlugin('maya2glTF', quiet=True) + # load the UI to run mel command + mel.eval("maya2glTF_UI()") + + parent_dir = parent_dir.replace('\\', '/') + options = { + "dsa": 1, + "glb": True + } + options.update(kwargs) + + for key, value in options.copy().items(): + if key not in _gltf_options: + log.warning("extract_gltf() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + options.pop(value) + continue + + job_args = list() + default_opt = "maya2glTF -of \"{0}\" -sn \"{1}\"".format(parent_dir, filename) # noqa + job_args.append(default_opt) + + for key, value in options.items(): + if isinstance(value, str): + job_args.append("-{0} \"{1}\"".format(key, value)) + elif isinstance(value, bool): + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + log.info("{}".format(job_str)) + mel.eval(job_str) + + # close the gltf export after finish the export + gltf_UI = "maya2glTF_exporter_window" + if cmds.window(gltf_UI, q=True, exists=True): + cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2530021eba..b2bbb823aa 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -127,14 +127,14 @@ def get_main_window(): @contextlib.contextmanager -def suspended_refresh(): +def suspended_refresh(suspend=True): """Suspend viewport refreshes""" - + original_state = cmds.refresh(query=True, suspend=True) try: - cmds.refresh(suspend=True) + cmds.refresh(suspend=suspend) yield finally: - cmds.refresh(suspend=False) + cmds.refresh(suspend=original_state) @contextlib.contextmanager diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index cd204445b7..c54e3ab3e0 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -536,6 +536,11 @@ class RenderProductsArnold(ARenderProducts): products = [] aov_name = self._get_attr(aov, "name") + multipart = False + multilayer = bool(self._get_attr("defaultArnoldDriver.multipart")) + merge_AOVs = bool(self._get_attr("defaultArnoldDriver.mergeAOVs")) + if multilayer or merge_AOVs: + multipart = True ai_drivers = cmds.listConnections("{}.outputs".format(aov), source=True, destination=False, @@ -589,6 +594,7 @@ class RenderProductsArnold(ARenderProducts): ext=ext, aov=aov_name, driver=ai_driver, + multipart=multipart, camera=camera) products.append(product) @@ -1016,7 +1022,11 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + multipart = False + force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) # noqa + exMultipart = bool(self._get_attr("redshiftOptions.exrMultipart")) + if exMultipart or force_layer: + multipart = True # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer @@ -1044,7 +1054,6 @@ class RenderProductsRedshift(ARenderProducts): # Any AOVs that still get processed, like Cryptomatte # by themselves are not multipart files. - aov_multipart = not multipart # Redshift skips rendering of masterlayer without AOV suffix # when a Beauty AOV is rendered. It overrides the main layer. @@ -1075,7 +1084,7 @@ class RenderProductsRedshift(ARenderProducts): productName=aov_light_group_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) @@ -1089,7 +1098,7 @@ class RenderProductsRedshift(ARenderProducts): product = RenderProduct(productName=aov_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) @@ -1100,7 +1109,7 @@ class RenderProductsRedshift(ARenderProducts): if light_groups_enabled: return products - beauty_name = "Beauty_other" if has_beauty_aov else "" + beauty_name = "BeautyAux" if has_beauty_aov else "" for camera in cameras: products.insert(0, RenderProduct(productName=beauty_name, diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 39f226900a..935a068ca5 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from openpype.hosts.maya.api import ( lib, plugin @@ -9,12 +7,26 @@ from maya import cmds class CreateAss(plugin.Creator): - """Arnold Archive""" + """Arnold Scene Source""" name = "ass" - label = "Ass StandIn" + label = "Arnold Scene Source" family = "ass" icon = "cube" + expandProcedurals = False + motionBlur = True + motionBlurKeys = 2 + motionBlurLength = 0.5 + maskOptions = False + maskCamera = False + maskLight = False + maskShape = False + maskShader = False + maskOverride = False + maskDriver = False + maskFilter = False + maskColor_manager = False + maskOperator = False def __init__(self, *args, **kwargs): super(CreateAss, self).__init__(*args, **kwargs) @@ -22,17 +34,27 @@ class CreateAss(plugin.Creator): # Add animation data self.data.update(lib.collect_animation_data()) - # Vertex colors with the geometry - self.data["exportSequence"] = False + self.data["expandProcedurals"] = self.expandProcedurals + self.data["motionBlur"] = self.motionBlur + self.data["motionBlurKeys"] = self.motionBlurKeys + self.data["motionBlurLength"] = self.motionBlurLength + + # Masks + self.data["maskOptions"] = self.maskOptions + self.data["maskCamera"] = self.maskCamera + self.data["maskLight"] = self.maskLight + self.data["maskShape"] = self.maskShape + self.data["maskShader"] = self.maskShader + self.data["maskOverride"] = self.maskOverride + self.data["maskDriver"] = self.maskDriver + self.data["maskFilter"] = self.maskFilter + self.data["maskColor_manager"] = self.maskColor_manager + self.data["maskOperator"] = self.maskOperator def process(self): instance = super(CreateAss, self).process() - # data = OrderedDict(**self.data) - - - - nodes = list() + nodes = [] if (self.options or {}).get("useSelection"): nodes = cmds.ls(selection=True) @@ -42,7 +64,3 @@ class CreateAss(plugin.Creator): assContent = cmds.sets(name="content_SET") assProxy = cmds.sets(name="proxy_SET", empty=True) cmds.sets([assContent, assProxy], forceElement=instance) - - # self.log.info(data) - # - # self.data = data diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index ab8fe12079..cdec140ea8 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -28,6 +28,7 @@ class CreatePointCache(plugin.Creator): self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups self.data["worldSpace"] = True # Default to exporting world-space + self.data["refresh"] = False # Default to suspend refresh. # Add options for custom attributes self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 7c9a1b76fb..b5e05d6665 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,4 +1,5 @@ from maya import cmds +from openpype.pipeline.publish import KnownPublishError import pyblish.api @@ -6,6 +7,7 @@ import pyblish.api class CollectAssData(pyblish.api.InstancePlugin): """Collect Ass data.""" + # Offset to be after renderable camera collection. order = pyblish.api.CollectorOrder + 0.2 label = 'Collect Ass' families = ["ass"] @@ -23,8 +25,23 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - assert len(members) == 1, "You have multiple proxy meshes, please only use one" + if len(members) != 1: + msg = "You have multiple proxy meshes, please only use one" + raise KnownPublishError(msg) instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) + # Use camera in object set if present else default to render globals + # camera. + cameras = cmds.ls(type="camera", long=True) + renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)] + camera = renderable[0] + for node in instance.data["setMembers"]: + camera_shapes = cmds.listRelatives( + node, shapes=True, type="camera" + ) + if camera_shapes: + camera = node + instance.data["camera"] = camera + self.log.debug("data: {}".format(instance.data)) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py new file mode 100644 index 0000000000..bb37fe3a7e --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +import pyblish.api + + +class CollectGLTF(pyblish.api.InstancePlugin): + """Collect Assets for GLTF/GLB export.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Asset for GLTF/GLB export" + families = ["model", "animation", "pointcache"] + + def process(self, instance): + if not instance.data.get("families"): + instance.data["families"] = [] + + if "gltf" not in instance.data["families"]: + instance.data["families"].append("gltf") diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 157be5717b..e1adffaaaf 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -403,13 +403,13 @@ class CollectLook(pyblish.api.InstancePlugin): # history = cmds.listHistory(look_sets) history = [] for material in materials: - history.extend(cmds.listHistory(material)) + history.extend(cmds.listHistory(material, ac=True)) # handle VrayPluginNodeMtl node - see #1397 vray_plugin_nodes = cmds.ls( history, type="VRayPluginNodeMtl", long=True) for vray_node in vray_plugin_nodes: - history.extend(cmds.listHistory(vray_node)) + history.extend(cmds.listHistory(vray_node, ac=True)) # handling render attribute sets render_set_types = [ diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 5c21a4ff08..049f256a7a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,77 +1,93 @@ import os from maya import cmds +import arnold from openpype.pipeline import publish -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.hosts.maya.api.lib import maintained_selection, attribute_values class ExtractAssStandin(publish.Extractor): - """Extract the content of the instance to a ass file + """Extract the content of the instance to a ass file""" - Things to pay attention to: - - If animation is toggled, are the frames correct - - - """ - - label = "Ass Standin (.ass)" + label = "Arnold Scene Source (.ass)" hosts = ["maya"] families = ["ass"] asciiAss = False def process(self, instance): - - sequence = instance.data.get("exportSequence", False) - staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) - filenames = list() + filenames = [] file_path = os.path.join(staging_dir, filename) + # Mask + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + # Motion blur + values = { + "defaultArnoldRenderOptions.motion_blur_enable": instance.data.get( + "motionBlur", True + ), + "defaultArnoldRenderOptions.motion_steps": instance.data.get( + "motionBlurKeys", 2 + ), + "defaultArnoldRenderOptions.motion_frames": instance.data.get( + "motionBlurLength", 0.5 + ) + } + # Write out .ass file + kwargs = { + "filename": file_path, + "startFrame": instance.data.get("frameStartHandle", 1), + "endFrame": instance.data.get("frameEndHandle", 1), + "frameStep": instance.data.get("step", 1), + "selected": True, + "asciiAss": self.asciiAss, + "shadowLinks": True, + "lightLinks": True, + "boundingBox": True, + "expandProcedurals": instance.data.get("expandProcedurals", False), + "camera": instance.data["camera"], + "mask": mask + } + self.log.info("Writing: '%s'" % file_path) - with maintained_selection(): - self.log.info("Writing: {}".format(instance.data["setMembers"])) - cmds.select(instance.data["setMembers"], noExpand=True) + with attribute_values(values): + with maintained_selection(): + self.log.info( + "Writing: {}".format(instance.data["setMembers"]) + ) + cmds.select(instance.data["setMembers"], noExpand=True) - if sequence: - self.log.info("Extracting ass sequence") + self.log.info( + "Extracting ass sequence with: {}".format(kwargs) + ) - # Collect the start and end including handles - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 0) + exported_files = cmds.arnoldExportAss(**kwargs) - exported_files = cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=self.asciiAss, - shadowLinks=True, - lightLinks=True, - boundingBox=True, - startFrame=start, - endFrame=end, - frameStep=step - ) for file in exported_files: filenames.append(os.path.split(file)[1]) + self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=False, - shadowLinks=True, - lightLinks=True, - boundingBox=True - ) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] @@ -79,13 +95,11 @@ class ExtractAssStandin(publish.Extractor): representation = { 'name': 'ass', 'ext': 'ass', - 'files': filenames, - "stagingDir": staging_dir + 'files': filenames if len(filenames) > 1 else filenames[0], + "stagingDir": staging_dir, + 'frameStart': kwargs["startFrame"] } - if sequence: - representation['frameStart'] = start - instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py new file mode 100644 index 0000000000..f5ceed5f33 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -0,0 +1,65 @@ +import os + +from maya import cmds, mel +import pyblish.api + +from openpype.pipeline import publish +from openpype.hosts.maya.api import lib +from openpype.hosts.maya.api.gltf import extract_gltf + + +class ExtractGLB(publish.Extractor): + + order = pyblish.api.ExtractorOrder + hosts = ["maya"] + label = "Extract GLB" + families = ["gltf"] + + def process(self, instance): + staging_dir = self.staging_dir(instance) + filename = "{0}.glb".format(instance.name) + path = os.path.join(staging_dir, filename) + + self.log.info("Extracting GLB to: {}".format(path)) + + nodes = instance[:] + + self.log.info("Instance: {0}".format(nodes)) + + start_frame = instance.data('frameStart') or \ + int(cmds.playbackOptions(query=True, + animationStartTime=True))# noqa + end_frame = instance.data('frameEnd') or \ + int(cmds.playbackOptions(query=True, + animationEndTime=True)) # noqa + fps = mel.eval('currentTimeUnitToFPS()') + + options = { + "sno": True, # selectedNodeOnly + "nbu": True, # .bin instead of .bin0 + "ast": start_frame, + "aet": end_frame, + "afr": fps, + "dsa": 1, + "acn": instance.name, + "glb": True, + "vno": True # visibleNodeOnly + } + with lib.maintained_selection(): + cmds.select(nodes, hi=True, noExpand=True) + extract_gltf(staging_dir, + instance.name, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'glb', + 'ext': 'glb', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract GLB successful to: {0}".format(path)) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 403b4ee6bc..df07a674dc 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -90,7 +90,7 @@ def maketx(source, destination, args, logger): maketx_path = get_oiio_tools_path("maketx") - if not os.path.exists(maketx_path): + if not maketx_path: print( "OIIO tool not found in {}".format(maketx_path)) raise AssertionError("OIIO tool not found") diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index b19d24fad7..1f9f9db99a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -115,6 +115,10 @@ class ExtractPlayblast(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -135,6 +139,8 @@ class ExtractPlayblast(publish.Extractor): path = capture.capture(log=self.log, **preset) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 7c1c6d5c12..23b76a48c2 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,13 +86,15 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - with suspended_refresh(): + with suspended_refresh(suspend=instance.data.get("refresh", False)): with maintained_selection(): cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) + extract_alembic( + file=path, + startFrame=start, + endFrame=end, + **options + ) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 712159c2be..06244cf003 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -117,6 +117,10 @@ class ExtractThumbnail(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): # Force viewer to False in call to capture because we have our own # viewer opening call to allow a signal to trigger between @@ -136,6 +140,7 @@ class ExtractThumbnail(publish.Extractor): _, thumbnail = os.path.split(playblast) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) self.log.info("file list {}".format(thumbnail)) diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py index 1c5d5c4005..9d25afe2b6 100644 --- a/openpype/hosts/nuke/addon.py +++ b/openpype/hosts/nuke/addon.py @@ -27,7 +27,12 @@ class NukeAddon(OpenPypeModule, IHostAddon): new_nuke_paths.append(norm_path) env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + # Remove auto screen scale factor for Qt + # - let Nuke decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 2691b7447a..bde06e4fd7 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2961,7 +2961,7 @@ def get_viewer_config_from_string(input_string): viewer = split[1] display = split[0] elif "(" in viewer: - pattern = r"([\w\d\s]+).*[(](.*)[)]" + pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" result = re.findall(pattern, viewer) try: result = result.pop() diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c343c635fa..fb707ca44c 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -364,6 +364,9 @@ def containerise(node, set_avalon_knob_data(node, data) + # set tab to first native + node.setTab(0) + return node diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index f5dfc8c0ab..9fef7424c8 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -65,6 +65,9 @@ class AlembicCameraLoader(load.LoaderPlugin): object_name, file), inpanel=False ) + # hide property panel + camera_node.hideControlPanel() + camera_node.forceValidate() camera_node["frame_rate"].setValue(float(fps)) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index b17356c5c7..565d777811 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -145,6 +145,9 @@ class LoadClip(plugin.NukeLoader): "Read", "name {}".format(read_name)) + # hide property panel + read_node.hideControlPanel() + # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index d164e0604c..cef4b0a5fc 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -89,6 +89,9 @@ class LoadEffects(load.LoaderPlugin): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 44565c139d..9bd40be816 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -90,6 +90,9 @@ class LoadEffectsInputProcess(load.LoaderPlugin): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 3e81ef999b..49dc12f588 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -62,7 +62,9 @@ class LoadImage(load.LoaderPlugin): def load(self, context, name, namespace, options): self.log.info("__ options: `{}`".format(options)) - frame_number = options.get("frame_number", 1) + frame_number = options.get( + "frame_number", int(nuke.root()["first_frame"].getValue()) + ) version = context['version'] version_data = version.get("data", {}) @@ -112,6 +114,10 @@ class LoadImage(load.LoaderPlugin): r = nuke.createNode( "Read", "name {}".format(read_name)) + + # hide property panel + r.hideControlPanel() + r["file"].setValue(file) # Set colorspace defined in version data diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 151401bad3..ad985e83c6 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -63,6 +63,10 @@ class AlembicModelLoader(load.LoaderPlugin): object_name, file), inpanel=False ) + + # hide property panel + model_node.hideControlPanel() + model_node.forceValidate() # Ensure all items are imported and selected. diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 21e384b538..f0972f85d2 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -71,6 +71,9 @@ class LinkAsGroup(load.LoaderPlugin): "Precomp", "file {}".format(file)) + # hide property panel + P.hideControlPanel() + # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) self.log.info("colorspace: {}\n".format(colorspace)) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index e7197b4fa8..06c086b10d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -298,7 +298,7 @@ class ExtractSlateFrame(publish.Extractor): def add_comment_slate_node(self, instance, node): - comment = instance.context.data.get("comment") + comment = instance.data["comment"] intent = instance.context.data.get("intent") if not isinstance(intent, dict): intent = { diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 2792a775e0..7672458165 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -29,7 +29,8 @@ class CreateImage(create.LegacyCreator): if len(selection) > 1: # Ask user whether to create one image or image per selected # item. - msg_box = QtWidgets.QMessageBox() + active_window = QtWidgets.QApplication.activeWindow() + msg_box = QtWidgets.QMessageBox(parent=active_window) msg_box.setIcon(QtWidgets.QMessageBox.Warning) msg_box.setText( "Multiple layers selected." @@ -102,7 +103,7 @@ class CreateImage(create.LegacyCreator): if group.long_name: for directory in group.long_name[::-1]: name = directory.replace(stub.PUBLISH_ICON, '').\ - replace(stub.LOADED_ICON, '') + replace(stub.LOADED_ICON, '') long_names.append(name) self.data.update({"subset": subset_name}) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index 56ea82f6b6..a7ae02a2eb 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -1,5 +1,7 @@ +import os import pyblish.api +from openpype.settings import get_project_settings from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -18,23 +20,38 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True - # from presets - main_workfile_extensions = ['mra'] - def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] - if ext not in self.main_workfile_extensions: + main_workfile_extensions = self.get_main_workfile_extensions() + if ext not in main_workfile_extensions: self.log.warning("Only secondary workfile present!") return if not instance.data.get("resources"): msg = "No secondary workfile present for workfile '{}'". \ format(instance.data["name"]) - ext = self.main_workfile_extensions[0] + ext = main_workfile_extensions[0] formatting_data = {"file_name": instance.data["name"], "extension": ext} raise PublishXmlValidationError(self, msg, formatting_data=formatting_data ) + + @staticmethod + def get_main_workfile_extensions(): + project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) + + try: + extensions = (project_settings["standalonepublisher"] + ["publish"] + ["CollectTextures"] + ["main_workfile_extensions"]) + except KeyError: + raise Exception("Setting 'Main workfile extensions' not found." + " The setting must be set for the" + " 'Collect Texture' publish plugin of the" + " 'Standalone Publish' tool.") + + return extensions diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py new file mode 100644 index 0000000000..19f956a50e --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +"""Creator of online files. + +Online file retain their original name and use it as subset name. To +avoid conflicts, this creator checks if subset with this name already +exists under selected asset. +""" +from pathlib import Path + +from openpype.client import get_subset_by_name, get_asset_by_name +from openpype.lib.attribute_definitions import FileDef +from openpype.pipeline import ( + CreatedInstance, + CreatorError +) +from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator + + +class OnlineCreator(TrayPublishCreator): + """Creates instance from file and retains its original name.""" + + identifier = "io.openpype.creators.traypublisher.online" + label = "Online" + family = "online" + description = "Publish file retaining its original file name" + extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg"] + + def get_detail_description(self): + return """# Create file retaining its original file name. + + This will publish files using template helping to retain original + file name and that file name is used as subset name. + + Bz default it tries to guard against multiple publishes of the same + file.""" + + def get_icon(self): + return "fa.file" + + def create(self, subset_name, instance_data, pre_create_data): + repr_file = pre_create_data.get("representation_file") + if not repr_file: + raise CreatorError("No files specified") + + files = repr_file.get("filenames") + if not files: + # this should never happen + raise CreatorError("Missing files from representation") + + origin_basename = Path(files[0]).stem + + asset = get_asset_by_name( + self.project_name, instance_data["asset"], fields=["_id"]) + if get_subset_by_name( + self.project_name, origin_basename, asset["_id"], + fields=["_id"]): + raise CreatorError(f"subset with {origin_basename} already " + "exists in selected asset") + + instance_data["originalBasename"] = origin_basename + subset_name = origin_basename + + instance_data["creator_attributes"] = { + "path": (Path(repr_file["directory"]) / files[0]).as_posix() + } + + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, + instance_data, self) + self._store_new_instance(new_instance) + + def get_pre_create_attr_defs(self): + return [ + FileDef( + "representation_file", + folders=False, + extensions=self.extensions, + allow_sequences=False, + single_item=True, + label="Representation", + ) + ] + + def get_subset_name( + self, + variant, + task_name, + asset_doc, + project_name, + host_name=None, + instance=None + ): + if instance is None: + return "{originalBasename}" + + return instance.data["subset"] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py new file mode 100644 index 0000000000..a3f86afa13 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from pathlib import Path + + +class CollectOnlineFile(pyblish.api.InstancePlugin): + """Collect online file and retain its file name.""" + label = "Collect Online File" + order = pyblish.api.CollectorOrder + families = ["online"] + hosts = ["traypublisher"] + + def process(self, instance): + file = Path(instance.data["creator_attributes"]["path"]) + + instance.data["representations"].append( + { + "name": file.suffix.lstrip("."), + "ext": file.suffix.lstrip("."), + "files": file.name, + "stagingDir": file.parent.as_posix() + } + ) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py new file mode 100644 index 0000000000..12b2e72ced --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin, +) +from openpype.client import get_subset_by_name + + +class ValidateOnlineFile(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validate that subset doesn't exist yet.""" + label = "Validate Existing Online Files" + hosts = ["traypublisher"] + families = ["online"] + order = ValidateContentsOrder + + optional = True + + def process(self, instance): + project_name = instance.context.data["projectName"] + asset_id = instance.data["assetEntity"]["_id"] + subset = get_subset_by_name( + project_name, instance.data["subset"], asset_id) + + if subset: + raise PublishValidationError( + "Subset to be published already exists.", + title=self.label + ) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 1ebaf1da64..78074f720c 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -25,6 +25,7 @@ class ExtractSequence(pyblish.api.Extractor): label = "Extract Sequence" hosts = ["tvpaint"] families = ["review", "renderPass", "renderLayer", "renderScene"] + families_to_review = ["review"] # Modifiable with settings review_bg = [255, 255, 255, 255] @@ -133,9 +134,9 @@ class ExtractSequence(pyblish.api.Extractor): output_frame_start ) - # Fill tags and new families + # Fill tags and new families from project settings tags = [] - if family_lowered in ("review", "renderlayer", "renderscene"): + if family_lowered in self.families_to_review: tags.append("review") # Sequence of one frame diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..ed81104c05 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,150 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked( + "PropertyEditor"); + + FString Left, Right; + GetPathName().Split("/" + GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + UObject* Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + if (AssetDataInternal.Emplace(Asset).IsValidId()) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) { - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); - - if (assetDir.StartsWith(*selfDir)) + if (Cast(InAssetData.GetAsset()) == nullptr) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + if (AssetDataInternal.Contains(nullptr)) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + AssetDataInternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; + + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; + + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "AssetContainer") + // Check for duplicated assets + for (const auto& Asset : AssetDataInternal) { + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification( + "You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + } - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); + // Check if no UOpenPypePublishInstance type assets are included + for (const auto& Asset : AssetDataExternal) + { + if (Cast(Asset.Get()) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } + +#endif diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..0e946fb039 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -5,17 +5,99 @@ UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); + + /** + /** + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. + * + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetAllAssets() const + { + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; + + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + - UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ + UPROPERTY(EditAnywhere, Category = "Assets") + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets") + TSet> AssetDataExternal; + + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const UObject* InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..322663eeec 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,151 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "AssetToolsModule.h" +#include "Framework/Notifications/NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split(GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + const TObjectPtr Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + if (AssetDataInternal.Emplace(Asset).IsValidId()) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) { - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); - - if (assetDir.StartsWith(*selfDir)) + if (Cast(InAssetData.GetAsset()) == nullptr) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + if (AssetDataInternal.Contains(nullptr)) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + AssetDataInternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr& InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; + + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; + + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "AssetContainer") - { - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); + // Check for duplicated assets + for (const auto& Asset : AssetDataInternal) + { + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } + + // Check if no UOpenPypePublishInstance type assets are included + for (const auto& Asset : AssetDataExternal) + { + if (Cast(Asset.Get()) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } + +#endif diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..2f066bd94b 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,21 +1,97 @@ #pragma once +#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); + /** + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. + * + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetAllAssets() const + { + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; + + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } - UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; + + /** + * This property allows the instance to include other assets from any other directory than what it's currently + * monitoring. + * @attention assets have to be added manually! They are not automatically registered or added! + */ + UPROPERTY(EditAnywhere, Category="Assets") + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const TObjectPtr& InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif +}; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 2bf097de41..79ed499a20 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -86,6 +86,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): first_file = task_data["files"][0] _, extension = os.path.splitext(first_file) + extension = extension.lower() family, families, tags = self._get_family( self.task_type_to_family, task_type, @@ -180,6 +181,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): def _get_single_repre(self, task_dir, files, tags): _, ext = os.path.splitext(files[0]) + ext = ext.lower() repre_data = { "name": ext[1:], "ext": ext[1:], @@ -199,6 +201,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): frame_start = list(collections[0].indexes)[0] frame_end = list(collections[0].indexes)[-1] ext = collections[0].tail + ext = ext.lower() repre_data = { "frameStart": frame_start, "frameEnd": frame_end, @@ -244,8 +247,17 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): for config in families_config: if is_sequence != config["is_sequence"]: continue - if (extension in config["extensions"] or - '' in config["extensions"]): # all extensions setting + extensions = config.get("extensions") or [] + lower_extensions = set() + for ext in extensions: + if ext: + ext = ext.lower() + if ext.startswith("."): + ext = ext[1:] + lower_extensions.add(ext) + + # all extensions setting + if not lower_extensions or extension in lower_extensions: found_family = config["result_family"] break diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 6baeaec045..0df7b16e64 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -105,11 +105,14 @@ class AbtractAttrDef(object): How to force to set `key` attribute? Args: - key(str): Under which key will be attribute value stored. - label(str): Attribute label. - tooltip(str): Attribute tooltip. - is_label_horizontal(bool): UI specific argument. Specify if label is + key (str): Under which key will be attribute value stored. + default (Any): Default value of an attribute. + label (str): Attribute label. + tooltip (str): Attribute tooltip. + is_label_horizontal (bool): UI specific argument. Specify if label is next to value input or ahead. + hidden (bool): Will be item hidden (for UI purposes). + disabled (bool): Item will be visible but disabled (for UI purposes). """ type_attributes = [] @@ -117,16 +120,29 @@ class AbtractAttrDef(object): is_value_def = True def __init__( - self, key, default, label=None, tooltip=None, is_label_horizontal=None + self, + key, + default, + label=None, + tooltip=None, + is_label_horizontal=None, + hidden=False, + disabled=False ): if is_label_horizontal is None: is_label_horizontal = True + + if hidden is None: + hidden = False + self.key = key self.label = label self.tooltip = tooltip self.default = default self.is_label_horizontal = is_label_horizontal - self._id = uuid.uuid4() + self.hidden = hidden + self.disabled = disabled + self._id = uuid.uuid4().hex self.__init__class__ = AbtractAttrDef @@ -173,7 +189,9 @@ class AbtractAttrDef(object): "label": self.label, "tooltip": self.tooltip, "default": self.default, - "is_label_horizontal": self.is_label_horizontal + "is_label_horizontal": self.is_label_horizontal, + "hidden": self.hidden, + "disabled": self.disabled } for attr in self.type_attributes: data[attr] = getattr(self, attr) @@ -235,6 +253,26 @@ class UnknownDef(AbtractAttrDef): return value +class HiddenDef(AbtractAttrDef): + """Hidden value of Any type. + + This attribute can be used for UI purposes to pass values related + to other attributes (e.g. in multi-page UIs). + + Keep in mind the value should be possible to parse by json parser. + """ + + type = "hidden" + + def __init__(self, key, default=None, **kwargs): + kwargs["default"] = default + kwargs["hidden"] = True + super(UnknownDef, self).__init__(key, **kwargs) + + def convert_value(self, value): + return value + + class NumberDef(AbtractAttrDef): """Number definition. diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 1626bec6b6..f265b8815c 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -14,9 +14,9 @@ else: class FileTransaction(object): - """ + """File transaction with rollback options. - The file transaction is a three step process. + The file transaction is a three-step process. 1) Rename any existing files to a "temporary backup" during `process()` 2) Copy the files to final destination during `process()` @@ -39,14 +39,12 @@ class FileTransaction(object): Warning: Any folders created during the transfer will not be removed. - """ MODE_COPY = 0 MODE_HARDLINK = 1 def __init__(self, log=None): - if log is None: log = logging.getLogger("FileTransaction") @@ -63,49 +61,64 @@ class FileTransaction(object): self._backup_to_original = {} def add(self, src, dst, mode=MODE_COPY): - """Add a new file to transfer queue""" + """Add a new file to transfer queue. + + Args: + src (str): Source path. + dst (str): Destination path. + mode (MODE_COPY, MODE_HARDLINK): Transfer mode. + """ + opts = {"mode": mode} - src = os.path.abspath(src) - dst = os.path.abspath(dst) + src = os.path.normpath(os.path.abspath(src)) + dst = os.path.normpath(os.path.abspath(dst)) if dst in self._transfers: queued_src = self._transfers[dst][0] if src == queued_src: - self.log.debug("File transfer was already " - "in queue: {} -> {}".format(src, dst)) + self.log.debug( + "File transfer was already in queue: {} -> {}".format( + src, dst)) return else: self.log.warning("File transfer in queue replaced..") - self.log.debug("Removed from queue: " - "{} -> {}".format(queued_src, dst)) - self.log.debug("Added to queue: {} -> {}".format(src, dst)) + self.log.debug( + "Removed from queue: {} -> {} replaced by {} -> {}".format( + queued_src, dst, src, dst)) self._transfers[dst] = (src, opts) def process(self): - # Backup any existing files - for dst in self._transfers.keys(): - if os.path.exists(dst): - # Backup original file - # todo: add timestamp or uuid to ensure unique - backup = dst + ".bak" - self._backup_to_original[backup] = dst - self.log.debug("Backup existing file: " - "{} -> {}".format(dst, backup)) - os.rename(dst, backup) + for dst, (src, _) in self._transfers.items(): + if dst == src or not os.path.exists(dst): + continue + + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) + os.rename(dst, backup) # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): + if dst == src: + self.log.debug( + "Source and destionation are same files {} -> {}".format( + src, dst)) + continue + self._create_folder_for_file(dst) if opts["mode"] == self.MODE_COPY: self.log.debug("Copying file ... {} -> {}".format(src, dst)) copyfile(src, dst) elif opts["mode"] == self.MODE_HARDLINK: - self.log.debug("Hardlinking file ... {} -> {}".format(src, - dst)) + self.log.debug("Hardlinking file ... {} -> {}".format( + src, dst)) create_hard_link(src, dst) self._transferred.append(dst) @@ -116,23 +129,21 @@ class FileTransaction(object): try: os.remove(backup) except OSError: - self.log.error("Failed to remove backup file: " - "{}".format(backup), - exc_info=True) + self.log.error( + "Failed to remove backup file: {}".format(backup), + exc_info=True) def rollback(self): - errors = 0 - # Rollback any transferred files for path in self._transferred: try: os.remove(path) except OSError: errors += 1 - self.log.error("Failed to rollback created file: " - "{}".format(path), - exc_info=True) + self.log.error( + "Failed to rollback created file: {}".format(path), + exc_info=True) # Rollback the backups for backup, original in self._backup_to_original.items(): @@ -140,13 +151,15 @@ class FileTransaction(object): os.rename(backup, original) except OSError: errors += 1 - self.log.error("Failed to restore original file: " - "{} -> {}".format(backup, original), - exc_info=True) + self.log.error( + "Failed to restore original file: {} -> {}".format( + backup, original), + exc_info=True) if errors: - self.log.error("{} errors occurred during " - "rollback.".format(errors), exc_info=True) + self.log.error( + "{} errors occurred during rollback.".format(errors), + exc_info=True) six.reraise(*sys.exc_info()) @property diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index b160054e38..0f99efb430 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -422,7 +422,7 @@ class TemplateResult(str): cls = self.__class__ return cls( - os.path.normpath(self), + os.path.normpath(self.replace("\\", "/")), self.template, self.solved, self.used_values, diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 0bfccd3443..57279d0380 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -77,26 +77,38 @@ def get_transcode_temp_directory(): ) -def get_oiio_info_for_input(filepath, logger=None): +def get_oiio_info_for_input(filepath, logger=None, subimages=False): """Call oiiotool to get information about input and return stdout. Stdout should contain xml format string. """ args = [ - get_oiio_tools_path(), "--info", "-v", "-i:infoformat=xml", filepath + get_oiio_tools_path(), + "--info", + "-v" ] + if subimages: + args.append("-a") + + args.extend(["-i:infoformat=xml", filepath]) + output = run_subprocess(args, logger=logger) output = output.replace("\r\n", "\n") xml_started = False + subimages_lines = [] lines = [] for line in output.split("\n"): if not xml_started: if not line.startswith("<"): continue xml_started = True + if xml_started: lines.append(line) + if line == "": + subimages_lines.append(lines) + lines = [] if not xml_started: raise ValueError( @@ -105,12 +117,19 @@ def get_oiio_info_for_input(filepath, logger=None): ) ) - xml_text = "\n".join(lines) - return parse_oiio_xml_output(xml_text, logger=logger) + output = [] + for subimage_lines in subimages_lines: + xml_text = "\n".join(subimage_lines) + output.append(parse_oiio_xml_output(xml_text, logger=logger)) + + if subimages: + return output + return output[0] class RationalToInt: """Rational value stored as division of 2 integers using string.""" + def __init__(self, string_value): parts = string_value.split("/") top = float(parts[0]) @@ -157,16 +176,16 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "int": return int(value) - if value_type == "float": + if value_type in ("float", "double"): return float(value) # Vectors will probably have more types - if value_type in ("vec2f", "float2"): + if value_type in ("vec2f", "float2", "float2d"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 # - are returned as list of lists - if value_type == "matrix": + if value_type in ("matrix", "matrixd"): output = [] current_index = -1 parts = value.split(",") @@ -198,7 +217,7 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "rational2i": return RationalToInt(value) - if value_type == "vector": + if value_type in ("vector", "vectord"): parts = [part.strip() for part in value.split(",")] output = [] for part in parts: @@ -380,6 +399,10 @@ def should_convert_for_ffmpeg(src_filepath): if not input_info: return None + subimages = input_info.get("subimages") + if subimages is not None and subimages > 1: + return True + # Check compression compression = input_info["attribs"].get("compression") if compression in ("dwaa", "dwab"): @@ -453,7 +476,7 @@ def convert_for_ffmpeg( if input_frame_start is not None and input_frame_end is not None: is_sequence = int(input_frame_end) != int(input_frame_start) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -488,13 +511,21 @@ def convert_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) - oiio_cmd.extend([ + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - "-i:ch={}".format(input_channels_str), first_input_path, + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + + oiio_cmd.extend([ + input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + "--subimage", "0" ]) # Add frame definitions to arguments @@ -588,7 +619,7 @@ def convert_input_paths_for_ffmpeg( " \".exr\" extension. Got \"{}\"." ).format(ext)) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -606,12 +637,22 @@ def convert_input_paths_for_ffmpeg( red, green, blue, alpha = review_channels input_channels = [red, green, blue] + # TODO find subimage inder where rgba is available for multipart exrs channels_arg = "R={},G={},B={}".format(red, green, blue) if alpha is not None: channels_arg += ",A={}".format(alpha) input_channels.append(alpha) input_channels_str = ",".join(input_channels) + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: + # Tell oiiotool which channels should be loaded + # - other channels are not loaded to memory so helps to avoid memory + # leak issues + # - this option is crashing if used on multipart exrs + input_arg += ":ch={}".format(input_channels_str) + for input_path in input_paths: # Prepare subprocess arguments oiio_cmd = [ @@ -625,13 +666,12 @@ def convert_input_paths_for_ffmpeg( oiio_cmd.extend(["--compression", compression]) oiio_cmd.extend([ - # Tell oiiotool which channels should be loaded - # - other channels are not loaded to memory so helps to - # avoid memory leak issues - "-i:ch={}".format(input_channels_str), input_path, + input_arg, input_path, # Tell oiiotool which channels should be put to top stack # (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 099f9a34ba..b6797dbba0 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -60,9 +60,10 @@ def find_executable(executable): path to file. Returns: - str: Full path to executable with extension (is file). - None: When the executable was not found. + Union[str, None]: Full path to executable with extension which was + found otherwise None. """ + # Skip if passed path is file if is_file_executable(executable): return executable @@ -70,24 +71,36 @@ def find_executable(executable): low_platform = platform.system().lower() _, ext = os.path.splitext(executable) - # Prepare variants for which it will be looked - variants = [executable] - # Add other extension variants only if passed executable does not have one - if not ext: - if low_platform == "windows": - exts = [".exe", ".ps1", ".bat"] - for ext in os.getenv("PATHEXT", "").split(os.pathsep): - ext = ext.lower() - if ext and ext not in exts: - exts.append(ext) - else: - exts = [".sh"] + # Prepare extensions to check + exts = set() + if ext: + exts.add(ext.lower()) - for ext in exts: - variant = executable + ext - if is_file_executable(variant): - return variant - variants.append(variant) + else: + # Add other possible extension variants only if passed executable + # does not have any + if low_platform == "windows": + exts |= {".exe", ".ps1", ".bat"} + for ext in os.getenv("PATHEXT", "").split(os.pathsep): + exts.add(ext.lower()) + + else: + exts |= {".sh"} + + # Executable is a path but there may be missing extension + # - this can happen primarily on windows where + # e.g. "ffmpeg" should be "ffmpeg.exe" + exe_dir, exe_filename = os.path.split(executable) + if exe_dir and os.path.isdir(exe_dir): + for filename in os.listdir(exe_dir): + filepath = os.path.join(exe_dir, filename) + basename, ext = os.path.splitext(filename) + if ( + basename == exe_filename + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath # Get paths where to look for executable path_str = os.environ.get("PATH", None) @@ -97,13 +110,27 @@ def find_executable(executable): elif hasattr(os, "defpath"): path_str = os.defpath - if path_str: - paths = path_str.split(os.pathsep) - for path in paths: - for variant in variants: - filepath = os.path.abspath(os.path.join(path, variant)) - if is_file_executable(filepath): - return filepath + if not path_str: + return None + + paths = path_str.split(os.pathsep) + for path in paths: + if not os.path.isdir(path): + continue + for filename in os.listdir(path): + filepath = os.path.abspath(os.path.join(path, filename)) + # Filename matches executable exactly + if filename == executable and is_file_executable(filepath): + return filepath + + basename, ext = os.path.splitext(filename) + if ( + basename == executable + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath + return None @@ -272,8 +299,8 @@ def get_oiio_tools_path(tool="oiiotool"): oiio_dir = get_vendor_bin_path("oiio") if platform.system().lower() == "linux": oiio_dir = os.path.join(oiio_dir, "bin") - default_path = os.path.join(oiio_dir, tool) - if _oiio_executable_validation(default_path): + default_path = find_executable(os.path.join(oiio_dir, tool)) + if default_path and _oiio_executable_validation(default_path): tool_executable_path = default_path # Look to PATH for the tool diff --git a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py similarity index 73% rename from openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py rename to openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea109e9445..038ee4fc03 100644 --- a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -2,16 +2,14 @@ import os import re import json import getpass - import requests import pyblish.api -class ExtractCelactionDeadline(pyblish.api.InstancePlugin): +class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". + Renders are submitted to a Deadline Web Service. """ @@ -26,27 +24,21 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): deadline_pool_secondary = "" deadline_group = "" deadline_chunk_size = 1 - - enviro_filter = [ - "FTRACK_API_USER", - "FTRACK_API_KEY", - "FTRACK_SERVER" - ] + deadline_job_delay = "00:00:08:00" def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" context = instance.context - deadline_url = ( - context.data["system_settings"] - ["modules"] - ["deadline"] - ["DEADLINE_REST_URL"] - ) - assert deadline_url, "Requires DEADLINE_REST_URL" + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + # if custom one is set in instance, use that + if instance.data.get("deadlineUrl"): + deadline_url = instance.data.get("deadlineUrl") + assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) - self._comment = context.data.get("comment", "") + self._comment = instance.data["comment"] self._deadline_user = context.data.get( "deadlineUser", getpass.getuser()) self._frame_start = int(instance.data["frameStart"]) @@ -82,6 +74,26 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): render_dir = os.path.normpath(os.path.dirname(render_path)) render_path = os.path.normpath(render_path) script_name = os.path.basename(script_path) + + for item in instance.context: + if "workfile" in item.data["family"]: + msg = "Workfile (scene) must be published along" + assert item.data["publish"] is True, msg + + template_data = item.data.get("anatomyData") + rep = item.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = instance.context.data["anatomy"].format( + template_data) + template_filled = anatomy_filled["publish"]["path"] + script_path = os.path.normpath(template_filled) + + self.log.info( + "Using published scene for render {}".format(script_path) + ) + jobname = "%s - %s" % (script_name, instance.name) output_filename_0 = self.preview_fname(render_path) @@ -98,7 +110,7 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): chunk_size = self.deadline_chunk_size # search for %02d pattern in name, and padding number - search_results = re.search(r"(.%0)(\d)(d)[._]", render_path).groups() + search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups() split_patern = "".join(search_results) padding_number = int(search_results[1]) @@ -145,10 +157,11 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): # frames from Deadline Monitor "OutputFilename0": output_filename_0.replace("\\", "/"), - # # Asset dependency to wait for at least the scene file to sync. + # # Asset dependency to wait for at least + # the scene file to sync. # "AssetDependency0": script_path "ScheduledType": "Once", - "JobDelay": "00:00:08:00" + "JobDelay": self.deadline_job_delay }, "PluginInfo": { # Input @@ -173,19 +186,6 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - i = 0 - for key, values in dict(os.environ).items(): - if key.upper() in self.enviro_filter: - payload["JobInfo"].update( - { - "EnvironmentKeyValue%d" - % i: "{key}={value}".format( - key=key, value=values - ) - } - ) - i += 1 - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) @@ -193,10 +193,15 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): self.expected_files(instance, render_path) self.log.debug("__ expectedFiles: `{}`".format( instance.data["expectedFiles"])) + response = requests.post(self.deadline_url, json=payload) if not response.ok: - raise Exception(response.text) + self.log.error( + "Submission failed! [{}] {}".format( + response.status_code, response.content)) + self.log.debug(payload) + raise SystemExit(response.text) return response @@ -234,32 +239,29 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): split_path = path.split(split_patern) hashes = "#" * int(search_results[1]) return "".join([split_path[0], hashes, split_path[-1]]) - if "#" in path: - self.log.debug("_ path: `{}`".format(path)) - return path - else: - return path - def expected_files(self, - instance, - path): + self.log.debug("_ path: `{}`".format(path)) + return path + + def expected_files(self, instance, filepath): """ Create expected files in instance data """ if not instance.data.get("expectedFiles"): - instance.data["expectedFiles"] = list() + instance.data["expectedFiles"] = [] - dir = os.path.dirname(path) - file = os.path.basename(path) + dirpath = os.path.dirname(filepath) + filename = os.path.basename(filepath) - if "#" in file: - pparts = file.split("#") + if "#" in filename: + pparts = filename.split("#") padding = "%0{}d".format(len(pparts) - 1) - file = pparts[0] + padding + pparts[-1] + filename = pparts[0] + padding + pparts[-1] - if "%" not in file: - instance.data["expectedFiles"].append(path) + if "%" not in filename: + instance.data["expectedFiles"].append(filepath) return for i in range(self._frame_start, (self._frame_end + 1)): instance.data["expectedFiles"].append( - os.path.join(dir, (file % i)).replace("\\", "/")) + os.path.join(dirpath, (filename % i)).replace("\\", "/") + ) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 35f2532c16..45688e8584 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -241,6 +241,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") + environment["OPENPYPE_VERSION"] = os.environ.get("OPENPYPE_VERSION") environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" @@ -494,12 +495,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - preview = match_aov_pattern(app, aov_patterns, render_file_name) + preview = match_aov_pattern(app, aov_patterns, render_file_name) # toggle preview on if multipart is on + if instance_data.get("multipartExr"): preview = True - + self.log.debug("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name @@ -542,7 +544,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if new_instance.get("extendFrames", False): self._copy_extend_frames(new_instance, rep) instances.append(new_instance) - + self.log.debug("instances:{}".format(instances)) return instances def _get_representations(self, instance, exp_files): @@ -775,6 +777,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "handleEnd": handle_end, "frameStartHandle": start - handle_start, "frameEndHandle": end + handle_end, + "comment": instance.data["comment"], "fps": fps, "source": source, "extendFrames": data.get("extendFrames"), diff --git a/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico new file mode 100644 index 0000000000000000000000000000000000000000..39d61592fe1addb07ed3ef93de362370485a23b9 GIT binary patch literal 103192 zcmeHQ2V4_L7oQ+17Eb(ZC>9jYh9aJ*CI!Yvtch-C}%GyqF4^>y<&9SCuIvvd)< zyn}(Z{5E{Be(h$pp=tdg_5&PnE+#$Pwd3M5(5L>QAp?FM?UI3!p_ z9*8`qYq`0$MK^tmffk0xHhFINd+5)x4>?bta1u{GnrxB#e4a(@6_1Wb=1x7gZK{30 z4oB?j%rLKBZ~mJ$-ri`WK@-vR>td5!SF1CNKeFsoyjQHrad{on_@E2l@k07({*I8r zZGeF?9Co)mCRH~+_Q~SvPrD~HFB?@a#(9pO(Ph+?NzK;wM@JHp2Mv!q>)T;Z z#><(HHePt##ChLlCj;b?y!?EJ9ZTk>@(n{fI#2RBzwOtg57Vr5QCRv(;fPt++K;%g zvY&B^9%{~x7q!>>>xBt2pTItBtUGNxvcIv&m(p!*FmXq=j?@FEe@vh{l4@JZ@*+$18?8#QTLrLpi{|1*jAsSr?~a%;lS=?HIvnAP4M_@ zu9=2A8Zj-5;zWBs1UOD?mc7F6gHf{k@oq+eBU+@e%$Na*bEpZ;g`<6Zmn4Z&jE1K8 zHg!%ozio%;Q`DvWK{gE=n(8tw`nc_-a*jQn*r2ajbStY@)%N#XVbuHYAUhz8}v zi9X+{?c`gx+57tKHm;9o%rNgV%gxgR?XO{BJ1C;BM{v@d_p{wRx1!D0vd4~e$cTC6 z>)!K``|ro8VeAn#ZS)$Bt5##>`!pZXn?F1D{>g1cr`*Gr>@%j06LSM%wrv-WPR!#6 zj%anu7`>fk&rUEqxY%{C`~IxqsQ2HsXJu7!ePb zpKmyboztS(L)YQ#_VrCYf(==nso?0t;Vaty8q{&hy?}`Q|BJQ_)$_2_Te$Q7`Heko zkB6RW$uL-EVcTy28qhS_Yv<>lP7RaicYY8us{YAK!OksD3w!+#;L~#XEVs>i41>O{ zALkfZ#!vC%x+`+We>=F3*+M*A;_75%#j?O)Fr zwZ?5#yNL7V$8GmKdvqf^e(=)IW_B+&da@EjT!x0Zoci)<&zpmKy1f{8B1AR=n(sqX zU(UX`sYiI%{GcWlwmP?J8`s79Xwv-b?XkLbijh@GmZg*>T>xXo= zqz!(wu%2;;);*~CyVC}{xEL*TywA=*ckJmOLD5IIvyKVtqp|3nOYiWQMb6^5q_eCv ze(1kxU>4g}qvO-I);on5=CyrWjavH)3hy^h>^Eh_Y`0+zO_A-Vo{M)49dmbNdU8*% zcu_*Dr6(3MMK;44Gg6;@W<9pu;?Q>mH!f3fHte^JE1F$vojo5|G1Nm{FX-pY*je-T z-M$`EFZ_^mLRjByELvwCUT@TnMs~JI7u!U8%^r+`dpCXGEh8-*?=* z+n`g8l}w$n$p32ZklksUs$1*3OtWME`RAUG?*|#B2(#;WzjnLD8a|)_V`0Cui^G4- zYT}Ul_`(CLYvRVkR&N?~DJ6&9Ib1KwZTw)gdGov;x$EW&8a)<Q+u<%9EzvHw^!@3A-{!ht1JCb$ed+iFS_b>s(HV4?Bdz1qwA~_lRHOyb$Gkm zV}1=!!DRbZ;ifzL9zXH2{#m!ve^9@N+s5BDt~bDl5gX%Sz*?U+xbDJQu`!uzMtX*I zi<>*!Ekf7SW2(re`2e=bAV4u>BZ|p2nW=~TH*P@DUWRX5dW@U<_tsfU#(2)=&g-7m z4j6BPMC1MZhu-w-`-6^a|9~*F7^@~TmW4GGTnTKpa&G!R$(<%x1xB-n8=El(EweTc zXglrdtsKYD_CrH|Ocags>AnBsmQZV+$;g?nBd=^1j*SrAVMlq6cDb<1LYLWL-gf=| z?t^k7=d@dJ(=+(Z|Cs((+dD_M`Z@BS&Q=$fCH)bh&qz%ec5cQ^UC!~Lfu@^{beO$% zUz^0p+s`rNcVyaMMjx6uIIPO|dEdlg*N}SQp>ZD#AHH7QY4xU*G=0a(_CvDc{**&O3LfEm-DxIww5PXK&?8ZfZp?t?%lpF z#o$cy<`19c$5{8eb@4;|U&+ardfW_a#_DC;yOAI6)u*LRy%Wz*m>?a8HLPLtKZd&q z`%md@xhtz)2(>e1f=!pZCrADD)WP+YcfBUdu8^qcF?4n*ruyzI7^on->2 zK+5qy2X%UFfTp%a{YI}69567vlX7(L?JuXn){{;SyBLtIb81q?nD9p|p2=TYm0`>B(nMtog^zK%MAQ#|K!NOuEbMeX;^{$K2-Cw_sy;FHS# z`mlkXFE3B3;l|v*Hzf8)$(d)q^_lRPp=*E7*)Pqix0%Md6!LG{`+eK;1Ap0KH2)W` zx#zuuCiOekV(26o*PQp^*(iND8MV@_?cih6kOP;JaTo8{{I=w@_nv?m&WHD&p75rr zS2eG!;U=ct&n_F8>6`5^IWBK{{j=PQV@?{-um7LpIN#oe=!bAQGg0>zT{c=<_BRnbm zDC}}G*=+Oi`AIf+53!xIbWGPiF+j%?XOC;cok&HnA8$CcX;_nNvBdhat?)}UuQJrusf ze_c$H*_~&*=VxE1!jc|rpLBkAC!SlkW23yApPq4ZeQ;|ea@Z9#X|1VqORi(J(+6+A z$nS9}ePWR7jY~dD?k=yjs6P_STpVb+@Z_Vgs3uMBrT8b+zHJ}R%V=Y$%Un9~@+5R@ zJ7WF%%z0dDWbl{36%!i?lKrFfZ3kW+R!4`~b$VvtFAG1M*^@G1ot|lskz1#^`Ad_3 z`J;326kU%gBbHjcuF;^k+orc;=Ca2;NqaIr|LKia>>drC7#$sSPHH7E)9yAid3n=I zJ`EkiCNx_KLpRTQbilI*gL9pKbLP32F>Q668qWQDaa+oAvqQrB;U@X3F0!9_&v};S z9kA?Zde)9>^YblM8^-TCK7Z=RFKrDN=I5RXr?u_xbgv`-EcaBpYjgKNn*{bhYcCG6 z*%iZBdSj)y!-H8ahO->+AHF`tYxMM4OP+DPQ>I>@!?yQ1`(Vz@&_9#1@2x%X>ob#U zBHOdqFGgAGTJ+H|?y#cS@4fd#ZrRGLxxv%ok1nHlj_18yGcs?i2^gP~@B5DRAjzgj zw>Y0mm-H_*qOG>Hq0kqbKv z1wfm@@OwSHbNm9s+=Uh^FudoiD|`!WeH+Qk$TwS-32O_9c}qMSd1;lN{e!09`X}c<2DKCi=s{!nXR@f`OIp&_*UREZ5_!TjGEmjT;r2>F_t|DPfEfEd-X9iR(y(8W&B*^zRR z@TY6M`-=0#P}KbAwav?Ny% zf#TNx1&Q-0(Wekfl*9byDDTZ?igT;3(y>QTJ`z`rfHM6HUe2Pv^&{`YexiTXs#Pmj znf~+Bk9_Our^NhJqJOnCthx}Z)^sgG9 zs-8>qU)43NT0WwG)%aBPT%!M~u3^>k5&f&ir>f@?{a1AjtCo-GUo}2eJ(uXes%u!a zd_@1M@u}*$ME_M?!>Z*Y`d5ulRnH~*uj(3BEuXgP{|oOG@&8b*TD5YO>7O6>+c)q3 zBYf>a^sknNRTomGf9yxgkF}?;j~(^}`__p6+)=6SiT|%^T`S8iPXF5K6Ru1~l2$wd zME}KO^F8Dc{eKTyE?!2W|KhRv9&%*$&%I|%3E|xu?xUVX=2x6swIz8a%>{*Twm@A* zk@~!ternAld9j0v)Rpx8%B4vCymvb(UX+Dg`R_qznv{{&KYpJ|EKHZ&_u-HmX)cE= zo)2;#bb$9RUYB@pn4fW)Vu$M%sV_)cQes_7#HM)BRz>P^A`Gc7+=Qa_#rYKXVdui~ zn#AUNXp7cAFUC$D+tTVmzBZMgd0x2aNepsAfF#Us!*@={U{!%M65eQErZdy|cq_IG*uE^S{`xxD1O4?_vE<2~3w;kR3=vh3fx zY%oq5gWlzIPul^E2^Yvd%2n4w;2iG&x=yF{&AHp0;=;W@8}9$;G-3l)Q~!#(=Or$r zgs{#cgnM;?;2p(*_1!`nfH`5#Kd5fiMB@9C3Xkca+rsa0aigszddIpiq`WMO1L-n0 z5J7C9YS;?QoAEWjP`5l-S1HuHtPQ|=8nJ<@p>stkVEixq9tQ`W?+JzfQUZ9MSA;n6 z6z5T|LR$g5Nr3x>c8cwhxOO2>wKbtpRy$X*lF_??93 zkW~|z??);;rvp7ksG)CVV?sKy0qwUHsSP~&&juJ2=Wa8K1J6>G(tQ(ITS$jBg3j8H z!uKtK#0EmpW!eVt{l5@=zm(n=z&!%KXQ0bMh`-}3`HtK7jiPo-kJkn8dLOOVl5Btv zK9_V^VguT(dzyQR4TPdIv|jO=umF55iyH?NZ2;z!ztL@xPmAbdnG5F(+Cu?f!s&Mj{}((6m4KU-3|%07Xh#V7y4Sj20uFiE24NC_!vOp zGIV*UyHI;2C!Es=U_H`0#rowXEuy#yPD+WR@V*e#-AuPbLhVHW^f(V}AivNKWS$?D zltyeod-X2MIiA;*!v=^ynJk!E`dexPqLR*eW%)1;_z-_GEmgb>+h7CZK)-BS$6R=y zm!E#P7&!v2l{P^9$+T35uZ}* zXUa>W@O^`V4G_Q5id9jCw~lQ9t_P0_^%|}ttK*V>XX0x~g)>Pys89rmPho|^s}+rx z*9Kr5vCs-Z36sSlkQ0RTU_6)cD}{Y!RQZ+4?=wey3X4Tndoq;y6yoDnVH;j}meq&|fQ=sBS(e=;8U70~5y2-lH@#kEV)wF?2b7O3_q%+H~)ZP5CM z^1<3AA>prTAL3x@Si`K5xzMI3IlP<6inSJSMlF|eLf&f8)AV3fx z2oMAa0t5kq06~BtKoB4Z5CjMU1Ob8oL4Y7Y5FiK;1PB5I0fGQQfFM8+AP5iy2m%BF zf&f9_>mvZKx{#x^2q0t#n?$(N)x-!H=pkfF$3+O4>&nNCOBt7|uh>3(OdR)5cUSHZ z+Q)}1|0GWED6>5X+;gbc@JAo4!j=kBQtQz<1tGJheB7g&d>k_R3y&;3&}GeK=KTT6c~W>MXo(ckpZRx1E~rOuoM^&C@>&WV4z5cGJ}tV zldmpu=^2n$fg%DT1%$*3aY_ME%EMh=1x)!U^-dv9qRJ7#8(i1{c>pnxG5m+a4xt8h z3P=(s{|EvE0fGQQfFM8+AP5iy%8dZLU)U=*3^}Q!P}CN_kF5)3e*qc+@1^O%UPMMJ z>)tJR)By0#RD=J=tPoTO%6*n8n+NY` z;=MfsWzv-EKMW;S5c}nB7WCQ%`1c2jQAKdRcY(5fp$x_=0)CHxa{|jBrK%p}1p!W` zX5I^1lzI#`# za6HrqvlY+?$ex4c=fR%4nm+i?Cu@!TeAstd(}y5wA(3BtZdHKkYNScp{)zsH{gd-w zvj~v!kBtBH+)5*Zbp=AYt0%zw8norPHV3Jk~!cbOA22HH9@?`wuQ3ewktq& z8flW&KjyI_Nh8cwKqC_QnR*mAvTLFI-1}{d*Z}u_o5Fma5w$-S@SJYd$K@-xrNib#p`vMAo8UQ!I2 zQC~Q5a``yUxznfs11KlXXHog-$G&y>-?GDXDPD9RiU)g)OU5nwz8$Aa(r|hKaie~c z$b&KB+-XplUYIX)o^bCOQ*1c5pe#=##;V~)U|q60tj%o1>oc&Hy-)d(X^9BsViROti}lXWyjbwP0dXG1eKrwezyfS~m8)?rs0=b%fvj9^tUbkpH7ZQp zL}!Y9`v;5x7xJt8o|_<}4c0dhmRXT_kTzTkR9P+-0R#N}GK&p!H{P2q6z`pvgt#we zVLXJ8KdxLIXhCHd^SXen8QdqoP(qlq(cd58z1fU05CH~GS|CQbSOgi{f~@cGclU&% z+(Hca@Am=*3^C--DOU$tP#I)w2^b`CVBX6A7y#cN@Rtzx=@@!!aMJ=Y%EcmJU<4S% z0nsumh8xS(ffiJTXZnpme#y6~IB=hfzeh?F8}19iU&i}e_UF+A#PI#@dVs+_4y*&< z?{U&&qTo{~#lBSsW8eqh->IgFzP{Bq;9Mx2BMZ0*|6u_4`E0n)hy0UZD>c4V{x!iD z$X65W;0nH{$KPQQ!gqKu@pHlf1Ic>N6)00%6McQFZD1XPu1~{tP;o4z<6+oVzE%D; z!52Q)3owA|0;~(MZyWqax9VaX?%**)6McQFZQwB<#=YJ68i3~q^q9z(uI~W`?%yi! zn&b=Q+>C92|L8>TI(uyo;MY;j8G)_tL zDX)SKH1Z$s6JZYI1XQ?BL|d{?1i(QKf&f8)AW*3Yz`BSD&}E?eO6VM{nY4oR^SBJG z9km5NsY6Q2;kwtM>{nk~f__f`(d%1^;SR{dM5iBv->x!oI-L%$eQjj`FKblr18ezc z7LZt42AAC<{Y~>M8}9hp{DW^=HQ;9`*oC7K8VB!U<7W~BAa4lkhQx=@K}k7BSg#%q z&o8QfZT_{rKfr!IP?Cn|tNRTA-`9XBLE?N$^!a|7NSNYEp9A4vTc0%lqE9!=JYW98 zi6{J*QE>!+3Vhp%{glys@bfXr_@nLTZ+_P0GLJtZ*4;`UfABjwyl5*R>rxZvPQyy) zpB=7Kz(Y9r9N_mTXrg^Fm^J@P!@nhSQt;D|FIOqoue`TjRNjYul06{s`%+@TdrB-~ z1LabJq(u^5@%w>d@TZZa=YOK`{u5XReC@?)(sySng@61`5auoKy$|rDBYE#fEO=YM zFYqczoF|oGEW8)=u^=7Zk>bS;hIEERhP)4d<1&@OKi<>iDNwwW@b8y0aXS4uq$Max z$7SjIu4Nqu%Tf>e#Xi~TKu5u+rqI6(_S!14hZxN55m1qIWo664OKIx>&vxJ&$L6OW zrMLy}zFq?UYL&K~$|?{1`(U3>0p}?tV8>G2nEu#)gW+CFD}0-PPrOIlKR}XmQzpEn zEizzxf%{f}&V3W`F**hB=$wc5QOjGnwF-m$|?(dRtFh;xv@hi0qY*cXWgN=&nIEKdI5Q@D~qqv^b5A61N_Ra zRng}~Dy08dnzkydEcjkxzr#)JJB`HH*N_kQ#QYEY@f;@&&Ihg7SMGB#$N)ZboXD{|UU}063;I%+FUd!&aM%hs!6Ej>f>40HtsoR( z?<)w!*o_N93j2ZK5Pzu%hfLdo5I2D10%$-ILIXGyOG8SUP8}{tr{KW(>V?=(%NWQG zsL)T#fcR+v9ONhi0x(x;2lpE0GSnK<8Ui&2Y9*6y4)G4~&YH#!nB$EHlD%iZzn22x z{gd&&@C$(y^QUWy0cg87&}$&PHcHbn_vwhUM+m>q3QmY;^&6KS1X_o9ZHzI8lDdxrUWnkfOWJQlo0ms!EwCrJm=OA65k(? zUn9@1z#k2-qk6#@yB5e2$VwXG-^Ir@^xOpc-o=M!soa;d^I+{~tfs~kb=!gdC&Jog zKHeJ!_VoV(agANq)aD@+jp0Q1$rFHI0WW2Z7;tb_x2~qjf!ss5&nNQ)+!qx0#h=)| zS81wkb=v{Ce};ZJ92f^UFb=T8t8dWcJVzZqfCo!>?>z_a7sdbg1n^LE=ht3C0u0PK zk8Lt|$u1Np$^z@+5X||$>YgmnpDEz~p7&}QJfoBNG<@~%Jb~P8I8i?(bK^(iv0;=t z`qOk7z@aJd(L>YilJ>q21d7b5n6Pf)Atjti|5g;GAm=gnjq-0sSE*{2Qm0_$&y@`~b8V2;avJ zlM?=o(=a3|4=7{7e>$F>mv&f=_|F{KkY}l~GGsy->%8~7<&^=kyfi;Np4+Ka|J|;l z^*{KkU7!1~6U-S$O8f))Psd8`oACbKYWDv#|LK@ZaIbej@N%vn?4f*spD^7I$DzJS zN&ItT2HfC8cEi3{CF)99TQJ@?;=Y{wnD_dxL};U>vN9#5K^q2~du{IVUVFTO>v)-b z0-*1@oM`JXZrpHu&t0bdP@tY3=TYx1Sf-MAuB`lmwNHdt;0;wAeTMZ>7LdRlA(1UY JVj#NS{{bY%R3rcZ literal 0 HcmV?d00001 diff --git a/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param new file mode 100644 index 0000000000..24c59d2005 --- /dev/null +++ b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param @@ -0,0 +1,38 @@ +[About] +Type=label +Label=About +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=Celaction Plugin for Deadline +Description=Not configurable + +[ConcurrentTasks] +Type=label +Label=ConcurrentTasks +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=True +Description=Not configurable + +[Executable] +Type=filename +Label=Executable +Category=Config +CategoryOrder=0 +CategoryIndex=0 +Description=The command executable to run +Required=false +DisableIfBlank=true + +[RenderNameSeparator] +Type=string +Label=RenderNameSeparator +Category=Config +CategoryOrder=0 +CategoryIndex=1 +Description=The separator to use for naming +Required=false +DisableIfBlank=true +Default=. diff --git a/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py new file mode 100644 index 0000000000..2d0edd3dca --- /dev/null +++ b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py @@ -0,0 +1,122 @@ +from System.Text.RegularExpressions import * + +from Deadline.Plugins import * +from Deadline.Scripting import * + +import _winreg + +###################################################################### +# This is the function that Deadline calls to get an instance of the +# main DeadlinePlugin class. +###################################################################### + + +def GetDeadlinePlugin(): + return CelActionPlugin() + + +def CleanupDeadlinePlugin(deadlinePlugin): + deadlinePlugin.Cleanup() + +###################################################################### +# This is the main DeadlinePlugin class for the CelAction plugin. +###################################################################### + + +class CelActionPlugin(DeadlinePlugin): + + def __init__(self): + self.InitializeProcessCallback += self.InitializeProcess + self.RenderExecutableCallback += self.RenderExecutable + self.RenderArgumentCallback += self.RenderArgument + self.StartupDirectoryCallback += self.StartupDirectory + + def Cleanup(self): + for stdoutHandler in self.StdoutHandlers: + del stdoutHandler.HandleCallback + + del self.InitializeProcessCallback + del self.RenderExecutableCallback + del self.RenderArgumentCallback + del self.StartupDirectoryCallback + + def GetCelActionRegistryKey(self): + # Modify registry for frame separation + path = r'Software\CelAction\CelAction2D\User Settings' + _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, path) + regKey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0, + _winreg.KEY_ALL_ACCESS) + return regKey + + def GetSeparatorValue(self, regKey): + useSeparator, _ = _winreg.QueryValueEx( + regKey, 'RenderNameUseSeparator') + separator, _ = _winreg.QueryValueEx(regKey, 'RenderNameSeparator') + + return useSeparator, separator + + def SetSeparatorValue(self, regKey, useSeparator, separator): + _winreg.SetValueEx(regKey, 'RenderNameUseSeparator', + 0, _winreg.REG_DWORD, useSeparator) + _winreg.SetValueEx(regKey, 'RenderNameSeparator', + 0, _winreg.REG_SZ, separator) + + def InitializeProcess(self): + # Set the plugin specific settings. + self.SingleFramesOnly = False + + # Set the process specific settings. + self.StdoutHandling = True + self.PopupHandling = True + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Rendering.*") + self.AddPopupIgnorer(".*AutoRender.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Wait.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Timeline Scrub.*") + + celActionRegKey = self.GetCelActionRegistryKey() + + self.SetSeparatorValue(celActionRegKey, 1, self.GetConfigEntryWithDefault( + "RenderNameSeparator", ".").strip()) + + def RenderExecutable(self): + return RepositoryUtils.CheckPathMapping(self.GetConfigEntry("Executable").strip()) + + def RenderArgument(self): + arguments = RepositoryUtils.CheckPathMapping( + self.GetPluginInfoEntry("Arguments").strip()) + arguments = arguments.replace( + "", str(self.GetStartFrame())) + arguments = arguments.replace("", str(self.GetEndFrame())) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetStartFrame()) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetEndFrame()) + arguments = arguments.replace("", "\"") + return arguments + + def StartupDirectory(self): + return self.GetPluginInfoEntryWithDefault("StartupDirectory", "").strip() + + def ReplacePaddedFrame(self, arguments, pattern, frame): + frameRegex = Regex(pattern) + while True: + frameMatch = frameRegex.Match(arguments) + if frameMatch.Success: + paddingSize = int(frameMatch.Groups[1].Value) + if paddingSize > 0: + padding = StringUtils.ToZeroPaddedString( + frame, paddingSize, False) + else: + padding = str(frame) + arguments = arguments.replace( + frameMatch.Groups[0].Value, padding) + else: + break + + return arguments diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 9b35c9502d..40193bac71 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -14,6 +14,137 @@ from Deadline.Scripting import ( ProcessUtils, ) +VERSION_REGEX = re.compile( + r"(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"(?:-(?P[a-zA-Z\d\-.]*))?" + r"(?:\+(?P[a-zA-Z\d\-.]*))?" +) + + +class OpenPypeVersion: + """Fake semver version class for OpenPype version purposes. + + The version + """ + def __init__(self, major, minor, patch, prerelease, origin=None): + self.major = major + self.minor = minor + self.patch = patch + self.prerelease = prerelease + + is_valid = True + if not major or not minor or not patch: + is_valid = False + self.is_valid = is_valid + + if origin is None: + base = "{}.{}.{}".format(str(major), str(minor), str(patch)) + if not prerelease: + origin = base + else: + origin = "{}-{}".format(base, str(prerelease)) + + self.origin = origin + + @classmethod + def from_string(cls, version): + """Create an object of version from string. + + Args: + version (str): Version as a string. + + Returns: + Union[OpenPypeVersion, None]: Version object if input is nonempty + string otherwise None. + """ + + if not version: + return None + valid_parts = VERSION_REGEX.findall(version) + if len(valid_parts) != 1: + # Return invalid version with filled 'origin' attribute + return cls(None, None, None, None, origin=str(version)) + + # Unpack found version + major, minor, patch, pre, post = valid_parts[0] + prerelease = pre + # Post release is not important anymore and should be considered as + # part of prerelease + # - comparison is implemented to find suitable build and builds should + # never contain prerelease part so "not proper" parsing is + # acceptable for this use case. + if post: + prerelease = "{}+{}".format(pre, post) + + return cls( + int(major), int(minor), int(patch), prerelease, origin=version + ) + + def has_compatible_release(self, other): + """Version has compatible release as other version. + + Both major and minor versions must be exactly the same. In that case + a build can be considered as release compatible with any version. + + Args: + other (OpenPypeVersion): Other version. + + Returns: + bool: Version is release compatible with other version. + """ + + if self.is_valid and other.is_valid: + return self.major == other.major and self.minor == other.minor + return False + + def __bool__(self): + return self.is_valid + + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, self.origin) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return self.origin == other + return self.origin == other.origin + + def __lt__(self, other): + if not isinstance(other, self.__class__): + return None + + if not self.is_valid: + return True + + if not other.is_valid: + return False + + if self.origin == other.origin: + return None + + same_major = self.major == other.major + if not same_major: + return self.major < other.major + + same_minor = self.minor == other.minor + if not same_minor: + return self.minor < other.minor + + same_patch = self.patch == other.patch + if not same_patch: + return self.patch < other.patch + + if not self.prerelease: + return False + + if not other.prerelease: + return True + + pres = [self.prerelease, other.prerelease] + pres.sort() + return pres[0] == self.prerelease + def get_openpype_version_from_path(path, build=True): """Get OpenPype version from provided path. @@ -21,9 +152,9 @@ def get_openpype_version_from_path(path, build=True): build (bool, optional): Get only builds, not sources Returns: - str or None: version of OpenPype if found. - + Union[OpenPypeVersion, None]: version of OpenPype if found. """ + # fix path for application bundle on macos if platform.system().lower() == "darwin": path = os.path.join(path, "Contents", "MacOS", "lib", "Python") @@ -46,8 +177,10 @@ def get_openpype_version_from_path(path, build=True): with open(version_file, "r") as vf: exec(vf.read(), version) - version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) - return version_match[1] + version_str = version.get("__version__") + if version_str: + return OpenPypeVersion.from_string(version_str) + return None def get_openpype_executable(): @@ -59,6 +192,91 @@ def get_openpype_executable(): return exe_list, dir_list +def get_openpype_versions(dir_list): + print(">>> Getting OpenPype executable ...") + openpype_versions = [] + + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if install_dir: + print("--- Looking for OpenPype at: {}".format(install_dir)) + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = get_openpype_version_from_path(subdir) + if not version: + continue + print(" - found: {} - {}".format(version, subdir)) + openpype_versions.append((version, subdir)) + return openpype_versions + + +def get_requested_openpype_executable( + exe, dir_list, requested_version +): + requested_version_obj = OpenPypeVersion.from_string(requested_version) + if not requested_version_obj: + print(( + ">>> Requested version does not match version regex \"{}\"" + ).format(VERSION_REGEX)) + return None + + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) + openpype_versions = get_openpype_versions(dir_list) + if not openpype_versions: + return None + + # if looking for requested compatible version, + # add the implicitly specified to the list too. + if exe: + exe_dir = os.path.dirname(exe) + print("Looking for OpenPype at: {}".format(exe_dir)) + version = get_openpype_version_from_path(exe_dir) + if version: + print(" - found: {} - {}".format(version, exe_dir)) + openpype_versions.append((version, exe_dir)) + + matching_item = None + compatible_versions = [] + for version_item in openpype_versions: + version, version_dir = version_item + if requested_version_obj.has_compatible_release(version): + compatible_versions.append(version_item) + if version == requested_version_obj: + # Store version item if version match exactly + # - break if is found matching version + matching_item = version_item + break + + if not compatible_versions: + return None + + compatible_versions.sort(key=lambda item: item[0]) + if matching_item: + version, version_dir = matching_item + print(( + "*** Found exact match build version {} in {}" + ).format(version_dir, version)) + + else: + version, version_dir = compatible_versions[-1] + + print(( + "*** Latest compatible version found is {} in {}" + ).format(version_dir, version)) + + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join(version_dir, "openpype_console.exe"), + os.path.join(version_dir, "openpype_console") + ] + return FileUtils.SearchFileList(";".join(exe_list)) + + def inject_openpype_environment(deadlinePlugin): """ Pull env vars from OpenPype and push them to rendering process. @@ -68,93 +286,29 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Injecting OpenPype environments ...") try: - print(">>> Getting OpenPype executable ...") exe_list, dir_list = get_openpype_executable() - openpype_versions = [] - # if the job requires specific OpenPype version, - # lets go over all available and find compatible build. + exe = FileUtils.SearchFileList(exe_list) + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print(( - ">>> Scanning for compatible requested version {}" - ).format(requested_version)) - install_dir = DirectoryUtils.SearchDirectoryList(dir_list) - if install_dir: - print("--- Looking for OpenPype at: {}".format(install_dir)) - sub_dirs = [ - f.path for f in os.scandir(install_dir) - if f.is_dir() - ] - for subdir in sub_dirs: - version = get_openpype_version_from_path(subdir) - if not version: - continue - print(" - found: {} - {}".format(version, subdir)) - openpype_versions.append((version, subdir)) + exe = get_requested_openpype_executable( + exe, dir_list, requested_version + ) + if exe is None: + raise RuntimeError(( + "Cannot find compatible version available for version {}" + " requested by the job. Please add it through plugin" + " configuration in Deadline or install it to configured" + " directory." + ).format(requested_version)) - exe = FileUtils.SearchFileList(exe_list) - if openpype_versions: - # if looking for requested compatible version, - # add the implicitly specified to the list too. - print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) - version = get_openpype_version_from_path( - os.path.dirname(exe)) - if version: - print(" - found: {} - {}".format( - version, os.path.dirname(exe) - )) - openpype_versions.append((version, os.path.dirname(exe))) - - if requested_version: - # sort detected versions - if openpype_versions: - # use natural sorting - openpype_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest available version found is {}" - ).format(openpype_versions[-1][0])) - requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 - compatible_versions = [] - for version in openpype_versions: - v = version[0].split(".")[:3] - if v[0] == requested_major and v[1] == requested_minor: - compatible_versions.append(version) - if not compatible_versions: - raise RuntimeError( - ("Cannot find compatible version available " - "for version {} requested by the job. " - "Please add it through plugin configuration " - "in Deadline or install it to configured " - "directory.").format(requested_version)) - # sort compatible versions nad pick the last one - compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest compatible version found is {}" - ).format(compatible_versions[-1][0])) - # create list of executables for different platform and let - # Deadline decide. - exe_list = [ - os.path.join( - compatible_versions[-1][1], "openpype_console.exe"), - os.path.join( - compatible_versions[-1][1], "openpype_console") - ] - exe = FileUtils.SearchFileList(";".join(exe_list)) - if exe == "": - raise RuntimeError( - "OpenPype executable was not found " + - "in the semicolon separated list " + - "\"" + ";".join(exe_list) + "\". " + - "The path to the render executable can be configured " + - "from the Plugin Configuration in the Deadline Monitor.") + if not exe: + raise RuntimeError(( + "OpenPype executable was not found in the semicolon " + "separated list \"{}\"." + "The path to the render executable can be configured" + " from the Plugin Configuration in the Deadline Monitor." + ).format(";".join(exe_list))) print("--- OpenPype executable: {}".format(exe)) @@ -172,22 +326,22 @@ def inject_openpype_environment(deadlinePlugin): export_url ] - add_args = {} - add_args['project'] = \ - job.GetJobEnvironmentKeyValue('AVALON_PROJECT') - add_args['asset'] = job.GetJobEnvironmentKeyValue('AVALON_ASSET') - add_args['task'] = job.GetJobEnvironmentKeyValue('AVALON_TASK') - add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME') - add_args["envgroup"] = "farm" + add_kwargs = { + "project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"), + "asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"), + "task": job.GetJobEnvironmentKeyValue("AVALON_TASK"), + "app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"), + "envgroup": "farm" + } + if all(add_kwargs.values()): + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) - if all(add_args.values()): - for key, value in add_args.items(): - args.append("--{}".format(key)) - args.append(value) else: - msg = "Required env vars: AVALON_PROJECT, AVALON_ASSET, " + \ - "AVALON_TASK, AVALON_APP_NAME" - raise RuntimeError(msg) + raise RuntimeError(( + "Missing required env vars: AVALON_PROJECT, AVALON_ASSET," + " AVALON_TASK, AVALON_APP_NAME" + )) if not os.environ.get("OPENPYPE_MONGO"): print(">>> Missing OPENPYPE_MONGO env var, process won't work") @@ -208,12 +362,12 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Loading file ...") with open(export_url) as fp: contents = json.load(fp) - for key, value in contents.items(): - deadlinePlugin.SetProcessEnvironmentVariable(key, value) + + for key, value in contents.items(): + deadlinePlugin.SetProcessEnvironmentVariable(key, value) script_url = job.GetJobPluginInfoKeyValue("ScriptFilename") if script_url: - script_url = script_url.format(**contents).replace("\\", "/") print(">>> Setting script path {}".format(script_url)) job.SetJobPluginInfoKeyValue("ScriptFilename", script_url) diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 935d1e85c9..0341c25717 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -1556,7 +1556,7 @@ class SyncEntitiesFactory: deleted_entities.append(mongo_id) av_ent = self.avalon_ents_by_id[mongo_id] - av_ent_path_items = [p for p in av_ent["data"]["parents"]] + av_ent_path_items = list(av_ent["data"]["parents"]) av_ent_path_items.append(av_ent["name"]) self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items))) @@ -1855,7 +1855,7 @@ class SyncEntitiesFactory: _vis_par = _avalon_ent["data"]["visualParent"] _name = _avalon_ent["name"] if _name in self.all_ftrack_names: - av_ent_path_items = _avalon_ent["data"]["parents"] + av_ent_path_items = list(_avalon_ent["data"]["parents"]) av_ent_path_items.append(_name) av_ent_path = "/".join(av_ent_path_items) # TODO report @@ -1997,7 +1997,7 @@ class SyncEntitiesFactory: {"_id": mongo_id}, item )) - av_ent_path_items = item["data"]["parents"] + av_ent_path_items = list(item["data"]["parents"]) av_ent_path_items.append(item["name"]) av_ent_path = "/".join(av_ent_path_items) self.log.debug( @@ -2110,6 +2110,7 @@ class SyncEntitiesFactory: entity_dict = self.entities_dict[ftrack_id] + final_parents = entity_dict["final_entity"]["data"]["parents"] if archived_by_id: # if is changeable then unarchive (nothing to check here) if self.changeability_by_mongo_id[mongo_id]: @@ -2123,10 +2124,8 @@ class SyncEntitiesFactory: archived_name = archived_by_id["name"] if ( - archived_name != entity_dict["name"] or - archived_parents != entity_dict["final_entity"]["data"][ - "parents" - ] + archived_name != entity_dict["name"] + or archived_parents != final_parents ): return None @@ -2136,11 +2135,7 @@ class SyncEntitiesFactory: for archived in archived_by_name: mongo_id = str(archived["_id"]) archived_parents = archived.get("data", {}).get("parents") - if ( - archived_parents == entity_dict["final_entity"]["data"][ - "parents" - ] - ): + if archived_parents == final_parents: return mongo_id # Secondly try to find more close to current ftrack entity @@ -2350,8 +2345,7 @@ class SyncEntitiesFactory: continue changed = True - parents = [par for par in _parents] - hierarchy = "/".join(parents) + parents = list(_parents) self.entities_dict[ftrack_id][ "final_entity"]["data"]["parents"] = parents diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 159e60024d..0e8209866f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -36,10 +36,35 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): return context = instance.context - session = context.data["ftrackSession"] + task_entity, parent_entity = self.get_instance_entities( + instance, context) + if parent_entity is None: + self.log.info(( + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." + ).format(str(instance))) + return + session = context.data["ftrackSession"] + # Reset session operations and reconfigure locations + session.recorded_operations.clear() + session._configure_locations() + + try: + self.integrate_to_ftrack( + session, + instance, + task_entity, + parent_entity, + component_list + ) + + except Exception: + session.reset() + raise + + def get_instance_entities(self, instance, context): parent_entity = None - default_asset_name = None # If instance has set "ftrackEntity" or "ftrackTask" then use them from # instance. Even if they are set to None. If they are set to None it # has a reason. (like has different context) @@ -52,15 +77,21 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): parent_entity = context.data.get("ftrackEntity") if task_entity: - default_asset_name = task_entity["name"] parent_entity = task_entity["parent"] - if parent_entity is None: - self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." - ).format(str(instance))) - return + return task_entity, parent_entity + + def integrate_to_ftrack( + self, + session, + instance, + task_entity, + parent_entity, + component_list + ): + default_asset_name = None + if task_entity: + default_asset_name = task_entity["name"] if not default_asset_name: default_asset_name = parent_entity["name"] @@ -186,13 +217,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): self.log.info("Setting task status to \"{}\"".format(status_name)) task_entity["status"] = status - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _fill_component_locations(self, session, component_list): components_by_location_name = collections.defaultdict(list) @@ -495,13 +520,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session.delete(member) del(member) - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() # Reset members in memory if "members" in component_entity.keys(): @@ -617,13 +636,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ) else: # Commit changes. - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _create_components(self, session, asset_versions_data_by_id): for item in asset_versions_data_by_id.values(): diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index e7c265988e..6ed02bc8b6 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -38,7 +38,7 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): self.log.info("There are any integrated AssetVersions") return - comment = (instance.context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index ac3fa874e0..6776509dda 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -45,7 +45,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): host_name = context.data["hostName"] app_name = context.data["appName"] app_label = context.data["appLabel"] - comment = (context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index fa7a89050c..046dfd9ad8 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -7,10 +7,8 @@ import pyblish.api from openpype.client import get_asset_by_id from openpype.lib import filter_profiles +from openpype.pipeline import KnownPublishError - -# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` -CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" @@ -19,7 +17,6 @@ CUST_ATTR_GROUP = "openpype" def get_pype_attr(session, split_hierarchical=True): custom_attributes = [] hier_custom_attributes = [] - # TODO remove deprecated "avalon" group from query cust_attrs_query = ( "select id, entity_type, object_type_id, is_hierarchical, default" " from CustomAttributeConfiguration" @@ -79,120 +76,284 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): create_task_status_profiles = [] def process(self, context): - self.context = context - if "hierarchyContext" not in self.context.data: + if "hierarchyContext" not in context.data: return hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - session = self.context.data["ftrackSession"] - project_name = self.context.data["projectEntity"]["name"] - query = 'Project where full_name is "{}"'.format(project_name) - project = session.query(query).one() - auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC] + session = context.data["ftrackSession"] + project_name = context.data["projectName"] + project = session.query( + 'select id, full_name from Project where full_name is "{}"'.format( + project_name + ) + ).first() + if not project: + raise KnownPublishError( + "Project \"{}\" was not found on ftrack.".format(project_name) + ) + self.context = context self.session = session self.ft_project = project self.task_types = self.get_all_task_types(project) self.task_statuses = self.get_task_statuses(project) - # disable termporarily ftrack project's autosyncing - if auto_sync_state: - self.auto_sync_off(project) + # import ftrack hierarchy + self.import_to_ftrack(project_name, hierarchy_context) - try: - # import ftrack hierarchy - self.import_to_ftrack(project_name, hierarchy_context) - except Exception: - raise - finally: - if auto_sync_state: - self.auto_sync_on(project) + def query_ftrack_entitites(self, session, ft_project): + project_id = ft_project["id"] + entities = session.query(( + "select id, name, parent_id" + " from TypedContext where project_id is \"{}\"" + ).format(project_id)).all() - def import_to_ftrack(self, project_name, input_data, parent=None): + entities_by_id = {} + entities_by_parent_id = collections.defaultdict(list) + for entity in entities: + entities_by_id[entity["id"]] = entity + parent_id = entity["parent_id"] + entities_by_parent_id[parent_id].append(entity) + + ftrack_hierarchy = [] + ftrack_id_queue = collections.deque() + ftrack_id_queue.append((project_id, ftrack_hierarchy)) + while ftrack_id_queue: + item = ftrack_id_queue.popleft() + ftrack_id, parent_list = item + if ftrack_id == project_id: + entity = ft_project + name = entity["full_name"] + else: + entity = entities_by_id[ftrack_id] + name = entity["name"] + + children = [] + parent_list.append({ + "name": name, + "low_name": name.lower(), + "entity": entity, + "children": children, + }) + for child in entities_by_parent_id[ftrack_id]: + ftrack_id_queue.append((child["id"], children)) + return ftrack_hierarchy + + def find_matching_ftrack_entities( + self, hierarchy_context, ftrack_hierarchy + ): + walk_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + walk_queue.append( + (entity_name, entity_data, ftrack_hierarchy) + ) + + matching_ftrack_entities = [] + while walk_queue: + item = walk_queue.popleft() + entity_name, entity_data, ft_children = item + matching_ft_child = None + for ft_child in ft_children: + if ft_child["low_name"] == entity_name.lower(): + matching_ft_child = ft_child + break + + if matching_ft_child is None: + continue + + entity = matching_ft_child["entity"] + entity_data["ft_entity"] = entity + matching_ftrack_entities.append(entity) + + hierarchy_children = entity_data.get("childs") + if not hierarchy_children: + continue + + for child_name, child_data in hierarchy_children.items(): + walk_queue.append( + (child_name, child_data, matching_ft_child["children"]) + ) + return matching_ftrack_entities + + def query_custom_attribute_values(self, session, entities, hier_attrs): + attr_ids = { + attr["id"] + for attr in hier_attrs + } + entity_ids = { + entity["id"] + for entity in entities + } + output = { + entity_id: {} + for entity_id in entity_ids + } + if not attr_ids or not entity_ids: + return {} + + joined_attr_ids = ",".join( + ['"{}"'.format(attr_id) for attr_id in attr_ids] + ) + + # Query values in chunks + chunk_size = int(5000 / len(attr_ids)) + # Make sure entity_ids is `list` for chunk selection + entity_ids = list(entity_ids) + results = [] + for idx in range(0, len(entity_ids), chunk_size): + joined_entity_ids = ",".join([ + '"{}"'.format(entity_id) + for entity_id in entity_ids[idx:idx + chunk_size] + ]) + results.extend( + session.query( + ( + "select value, entity_id, configuration_id" + " from CustomAttributeValue" + " where entity_id in ({}) and configuration_id in ({})" + ).format( + joined_entity_ids, + joined_attr_ids + ) + ).all() + ) + + for result in results: + attr_id = result["configuration_id"] + entity_id = result["entity_id"] + output[entity_id][attr_id] = result["value"] + + return output + + def import_to_ftrack(self, project_name, hierarchy_context): # Prequery hiearchical custom attributes - hier_custom_attributes = get_pype_attr(self.session)[1] + hier_attrs = get_pype_attr(self.session)[1] hier_attr_by_key = { attr["key"]: attr - for attr in hier_custom_attributes + for attr in hier_attrs } + # Query user entity (for comments) + user = self.session.query( + "User where username is \"{}\"".format(self.session.api_user) + ).first() + if not user: + self.log.warning( + "Was not able to query current User {}".format( + self.session.api_user + ) + ) + + # Query ftrack hierarchy with parenting + ftrack_hierarchy = self.query_ftrack_entitites( + self.session, self.ft_project) + + # Fill ftrack entities to hierarchy context + # - there is no need to query entities again + matching_entities = self.find_matching_ftrack_entities( + hierarchy_context, ftrack_hierarchy) + # Query custom attribute values of each entity + custom_attr_values_by_id = self.query_custom_attribute_values( + self.session, matching_entities, hier_attrs) + # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] - for entity_name in input_data: - entity_data = input_data[entity_name] + # Use queue of hierarchy items to process + import_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + import_queue.append( + (entity_name, entity_data, None) + ) + + while import_queue: + item = import_queue.popleft() + entity_name, entity_data, parent = item + entity_type = entity_data['entity_type'] self.log.debug(entity_data) - self.log.debug(entity_type) - if entity_type.lower() == 'project': - entity = self.ft_project - - elif self.ft_project is None or parent is None: + entity = entity_data.get("ft_entity") + if entity is None and entity_type.lower() == "project": raise AssertionError( "Collected items are not in right order!" ) - # try to find if entity already exists - else: - query = ( - 'TypedContext where name is "{0}" and ' - 'project_id is "{1}"' - ).format(entity_name, self.ft_project["id"]) - try: - entity = self.session.query(query).one() - except Exception: - entity = None - # Create entity if not exists if entity is None: - entity = self.create_entity( - name=entity_name, - type=entity_type, - parent=parent - ) + entity = self.session.create(entity_type, { + "name": entity_name, + "parent": parent + }) + entity_data["ft_entity"] = entity + # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES - custom_attributes = entity_data.get('custom_attributes', []) - instances = [ - instance - for instance in self.context - if instance.data.get("asset") == entity["name"] - ] + custom_attributes = entity_data.get('custom_attributes', {}) + instances = [] + for instance in self.context: + instance_asset_name = instance.data.get("asset") + if ( + instance_asset_name + and instance_asset_name.lower() == entity["name"].lower() + ): + instances.append(instance) for instance in instances: instance.data["ftrackEntity"] = entity - for key in custom_attributes: + for key, cust_attr_value in custom_attributes.items(): + if cust_attr_value is None: + continue + hier_attr = hier_attr_by_key.get(key) # Use simple method if key is not hierarchical if not hier_attr: - assert (key in entity['custom_attributes']), ( - 'Missing custom attribute key: `{0}` in attrs: ' - '`{1}`'.format(key, entity['custom_attributes'].keys()) + if key not in entity["custom_attributes"]: + raise KnownPublishError(( + "Missing custom attribute in ftrack with name '{}'" + ).format(key)) + + entity['custom_attributes'][key] = cust_attr_value + continue + + attr_id = hier_attr["id"] + entity_values = custom_attr_values_by_id.get(entity["id"], {}) + # New value is defined by having id in values + # - it can be set to 'None' (ftrack allows that using API) + is_new_value = attr_id not in entity_values + attr_value = entity_values.get(attr_id) + + # Use ftrack operations method to set hiearchical + # attribute value. + # - this is because there may be non hiearchical custom + # attributes with different properties + entity_key = collections.OrderedDict(( + ("configuration_id", hier_attr["id"]), + ("entity_id", entity["id"]) + )) + op = None + if is_new_value: + op = ftrack_api.operation.CreateEntityOperation( + "CustomAttributeValue", + entity_key, + {"value": cust_attr_value} ) - entity['custom_attributes'][key] = custom_attributes[key] - - else: - # Use ftrack operations method to set hiearchical - # attribute value. - # - this is because there may be non hiearchical custom - # attributes with different properties - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = hier_attr["id"] - entity_key["entity_id"] = entity["id"] - self.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", - entity_key, - "value", - ftrack_api.symbol.NOT_SET, - custom_attributes[key] - ) + elif attr_value != cust_attr_value: + op = ftrack_api.operation.UpdateEntityOperation( + "CustomAttributeValue", + entity_key, + "value", + attr_value, + cust_attr_value ) + if op is not None: + self.session.recorded_operations.push(op) + + if self.session.recorded_operations: try: self.session.commit() except Exception: @@ -206,7 +367,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): for instance in instances: task_name = instance.data.get("task") if task_name: - instances_by_task_name[task_name].append(instance) + instances_by_task_name[task_name.lower()].append(instance) tasks = entity_data.get('tasks', []) existing_tasks = [] @@ -247,30 +408,28 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # Create notes. - user = self.session.query( - "User where username is \"{}\"".format(self.session.api_user) - ).first() - if user: - for comment in entity_data.get("comments", []): + entity_comments = entity_data.get("comments") + if user and entity_comments: + for comment in entity_comments: entity.create_note(comment, user) - else: - self.log.warning( - "Was not able to query current User {}".format( - self.session.api_user - ) - ) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) + + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + self.session._configure_locations() + six.reraise(tp, value, tb) # Import children. - if 'childs' in entity_data: - self.import_to_ftrack( - project_name, entity_data['childs'], entity) + children = entity_data.get("childs") + if not children: + continue + + for entity_name, entity_data in children.items(): + import_queue.append( + (entity_name, entity_data, entity) + ) def create_links(self, project_name, entity_data, entity): # Clear existing links. @@ -366,48 +525,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return task - def create_entity(self, name, type, parent): - entity = self.session.create(type, { - 'name': name, - 'parent': parent - }) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - return entity - - def auto_sync_off(self, project): - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False - - self.log.info("Ftrack autosync swithed off") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - def auto_sync_on(self, project): - - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True - - self.log.info("Ftrack autosync swithed on") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - def _get_active_assets(self, context): """ Returns only asset dictionary. Usually the last part of deep dictionary which @@ -429,19 +546,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): hierarchy_context = context.data["hierarchyContext"] - active_assets = [] + active_assets = set() # filter only the active publishing insatnces for instance in context: if instance.data.get("publish") is False: continue - if not instance.data.get("asset"): - continue - - active_assets.append(instance.data["asset"]) + asset_name = instance.data.get("asset") + if asset_name: + active_assets.add(asset_name) # remove duplicity in list - active_assets = list(set(active_assets)) - self.log.debug("__ active_assets: {}".format(active_assets)) + self.log.debug("__ active_assets: {}".format(list(active_assets))) return get_pure_hierarchy_data(hierarchy_context) diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index 6c7ecb8351..eb3f63c04b 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -7,6 +7,8 @@ import signal import socket import datetime +import appdirs + import ftrack_api from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( @@ -253,6 +255,15 @@ class StatusFactory: ) }) + items.append({ + "type": "label", + "value": ( + "Local versions dir: {}
    Version repository path: {}" + ).format( + appdirs.user_data_dir("openpype", "pypeclub"), + os.environ.get("OPENPYPE_PATH") + ) + }) items.append({"type": "label", "value": "---"}) return items diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index bf80095225..e5e6439439 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -31,7 +31,6 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): continue review_path = representation.get("published_path") - self.log.debug("Found review at: {}".format(review_path)) gazu.task.add_preview( diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 39b05937dc..27e899d59a 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -18,15 +18,15 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None def process(self, instance): - task_name = legacy_io.Session.get("AVALON_TASK") + task_data = instance.data["anatomyData"].get("task", {}) family = self.main_family_from_instance(instance) key_values = { "families": family, - "tasks": task_name, + "tasks": task_data.get("name"), + "task_types": task_data.get("type"), "hosts": instance.data["anatomyData"]["app"], "subsets": instance.data["subset"] } - profile = filter_profiles(self.profiles, key_values, logger=self.log) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 643e55915b..0cd5ec9de8 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -112,7 +112,13 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if review_path: fill_pairs.append(("review_filepath", review_path)) - task_data = fill_data.get("task") + task_data = ( + copy.deepcopy(instance.data.get("anatomyData", {})).get("task") + or fill_data.get("task") + ) + if not isinstance(task_data, dict): + # fallback for legacy - if task_data is only task name + task_data["name"] = task_data if task_data: if ( "{task}" in message_templ @@ -142,13 +148,17 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _get_thumbnail_path(self, instance): """Returns abs url for thumbnail if present in instance repres""" - published_path = None + thumbnail_path = None for repre in instance.data.get("representations", []): if repre.get('thumbnail') or "thumbnail" in repre.get('tags', []): - if os.path.exists(repre["published_path"]): - published_path = repre["published_path"] + repre_thumbnail_path = ( + repre.get("published_path") or + os.path.join(repre["stagingDir"], repre["files"]) + ) + if os.path.exists(repre_thumbnail_path): + thumbnail_path = repre_thumbnail_path break - return published_path + return thumbnail_path def _get_review_path(self, instance): """Returns abs url for review if present in instance repres""" @@ -178,10 +188,17 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): channel=channel, title=os.path.basename(p_file) ) - attachment_str += "\n<{}|{}>".format( - response["file"]["permalink"], - os.path.basename(p_file)) - file_ids.append(response["file"]["id"]) + if response.get("error"): + error_str = self._enrich_error( + str(response.get("error")), + channel) + self.log.warning( + "Error happened: {}".format(error_str)) + else: + attachment_str += "\n<{}|{}>".format( + response["file"]["permalink"], + os.path.basename(p_file)) + file_ids.append(response["file"]["id"]) if publish_files: message += attachment_str diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 4fd460ffea..9c468ae8fc 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -199,7 +199,7 @@ class InstanceMember: }) -class AttributeValues: +class AttributeValues(object): """Container which keep values of Attribute definitions. Goal is to have one object which hold values of attribute definitions for @@ -584,6 +584,7 @@ class CreatedInstance: if key in data: data.pop(key) + self._data["variant"] = self._data.get("variant") or "" # Stored creator specific attribute values # {key: value} creator_values = copy.deepcopy(orig_creator_attributes) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 782534d589..bb5ce00452 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -393,8 +393,9 @@ class BaseCreator: asset_doc(dict): Asset document for which subset is created. project_name(str): Project name. host_name(str): Which host creates subset. - instance(str|None): Object of 'CreatedInstance' for which is - subset name updated. Passed only on subset name update. + instance(CreatedInstance|None): Object of 'CreatedInstance' for + which is subset name updated. Passed only on subset name + update. """ dynamic_data = self.get_dynamic_data( diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 6e2be1ce2c..47dfaf6b98 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,3 +1,4 @@ +import inspect from abc import ABCMeta import pyblish.api @@ -132,6 +133,25 @@ class OpenPypePyblishPluginMixin: ) return attribute_values + @staticmethod + def get_attr_values_from_data_for_plugin(plugin, data): + """Get attribute values for attribute definitions from data. + + Args: + plugin (Union[publish.api.Plugin, Type[publish.api.Plugin]]): The + plugin for which attributes are extracted. + data(dict): Data from instance or context. + """ + + if not inspect.isclass(plugin): + plugin = plugin.__class__ + + return ( + data + .get("publish_attributes", {}) + .get(plugin.__name__, {}) + ) + def get_attr_values_from_data(self, data): """Get attribute values for attribute definitions from data. @@ -139,11 +159,7 @@ class OpenPypePyblishPluginMixin: data(dict): Data from instance or context. """ - return ( - data - .get("publish_attributes", {}) - .get(self.__class__.__name__, {}) - ) + return self.get_attr_values_from_data_for_plugin(self.__class__, data) class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index f67d3373d9..909b49a07d 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -188,7 +188,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): for subset_doc in subset_docs: subset_id = subset_doc["_id"] last_version_doc = last_version_docs_by_subset_id.get(subset_id) - if last_version_docs_by_subset_id is None: + if last_version_doc is None: continue asset_id = subset_doc["parent"] diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 7d53b24e54..3a0ddb3281 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -1,21 +1,27 @@ +import collections import pyblish.api from openpype.client import ( - get_last_version_by_subset_name, + get_assets, + get_subsets, + get_last_versions, get_representations, ) -from openpype.pipeline import ( - legacy_io, - get_representation_path, -) +from openpype.pipeline.load import get_representation_path_with_anatomy -class CollectAudio(pyblish.api.InstancePlugin): +class CollectAudio(pyblish.api.ContextPlugin): """Collect asset's last published audio. The audio subset name searched for is defined in: project settings > Collect Audio + + Note: + The plugin was instance plugin but because of so much queries the + plugin was slowing down whole collection phase a lot thus was + converted to context plugin which requires only 4 queries top. """ + label = "Collect Asset Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] @@ -39,67 +45,134 @@ class CollectAudio(pyblish.api.InstancePlugin): audio_subset_name = "audioMain" - def process(self, instance): - if instance.data.get("audio"): - self.log.info( - "Skipping Audio collecion. It is already collected" - ) + def process(self, context): + # Fake filtering by family inside context plugin + filtered_instances = [] + for instance in pyblish.api.instances_by_plugin( + context, self.__class__ + ): + # Skip instances that already have audio filled + if instance.data.get("audio"): + self.log.info( + "Skipping Audio collecion. It is already collected" + ) + continue + filtered_instances.append(instance) + + # Skip if none of instances remained + if not filtered_instances: return # Add audio to instance if exists. + instances_by_asset_name = collections.defaultdict(list) + for instance in filtered_instances: + asset_name = instance.data["asset"] + instances_by_asset_name[asset_name].append(instance) + + asset_names = set(instances_by_asset_name.keys()) self.log.info(( - "Searching for audio subset '{subset}'" - " in asset '{asset}'" + "Searching for audio subset '{subset}' in assets {assets}" ).format( subset=self.audio_subset_name, - asset=instance.data["asset"] + assets=", ".join([ + '"{}"'.format(asset_name) + for asset_name in asset_names + ]) )) - repre_doc = self._get_repre_doc(instance) + # Query all required documents + project_name = context.data["projectName"] + anatomy = context.data["anatomy"] + repre_docs_by_asset_names = self.query_representations( + project_name, asset_names) - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.info("Audio Data added to instance ...") + for asset_name, instances in instances_by_asset_name.items(): + repre_docs = repre_docs_by_asset_names[asset_name] + if not repre_docs: + continue - def _get_repre_doc(self, instance): - cache = instance.context.data.get("__cache_asset_audio") - if cache is None: - cache = {} - instance.context.data["__cache_asset_audio"] = cache - asset_name = instance.data["asset"] + repre_doc = repre_docs[0] + repre_path = get_representation_path_with_anatomy( + repre_doc, anatomy + ) + for instance in instances: + instance.data["audio"] = [{ + "offset": 0, + "filename": repre_path + }] + self.log.info("Audio Data added to instance ...") - # first try to get it from cache - if asset_name in cache: - return cache[asset_name] + def query_representations(self, project_name, asset_names): + """Query representations related to audio subsets for passed assets. - project_name = legacy_io.active_project() + Args: + project_name (str): Project in which we're looking for all + entities. + asset_names (Iterable[str]): Asset names where to look for audio + subsets and their representations. - # Find latest versions document - last_version_doc = get_last_version_by_subset_name( + Returns: + collections.defaultdict[str, List[Dict[Str, Any]]]: Representations + related to audio subsets by asset name. + """ + + output = collections.defaultdict(list) + # Query asset documents + asset_docs = get_assets( project_name, - self.audio_subset_name, - asset_name=asset_name, - fields=["_id"] + asset_names=asset_names, + fields=["_id", "name"] ) - repre_doc = None - if last_version_doc: - # Try to find it's representation (Expected there is only one) - repre_docs = list(get_representations( - project_name, version_ids=[last_version_doc["_id"]] - )) - if not repre_docs: - self.log.warning( - "Version document does not contain any representations" - ) - else: - repre_doc = repre_docs[0] + asset_id_by_name = {} + for asset_doc in asset_docs: + asset_id_by_name[asset_doc["name"]] = asset_doc["_id"] + asset_ids = set(asset_id_by_name.values()) - # update cache - cache[asset_name] = repre_doc + # Query subsets with name define by 'audio_subset_name' attr + # - one or none subsets with the name should be available on an asset + subset_docs = get_subsets( + project_name, + subset_names=[self.audio_subset_name], + asset_ids=asset_ids, + fields=["_id", "parent"] + ) + subset_id_by_asset_id = {} + for subset_doc in subset_docs: + asset_id = subset_doc["parent"] + subset_id_by_asset_id[asset_id] = subset_doc["_id"] - return repre_doc + subset_ids = set(subset_id_by_asset_id.values()) + if not subset_ids: + return output + + # Find all latest versions for the subsets + version_docs_by_subset_id = get_last_versions( + project_name, subset_ids=subset_ids, fields=["_id", "parent"] + ) + version_id_by_subset_id = { + subset_id: version_doc["_id"] + for subset_id, version_doc in version_docs_by_subset_id.items() + } + version_ids = set(version_id_by_subset_id.values()) + if not version_ids: + return output + + # Find representations under latest versions of audio subsets + repre_docs = get_representations( + project_name, version_ids=version_ids + ) + repre_docs_by_version_id = collections.defaultdict(list) + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + repre_docs_by_version_id[version_id].append(repre_doc) + + if not repre_docs_by_version_id: + return output + + for asset_name in asset_names: + asset_id = asset_id_by_name.get(asset_name) + subset_id = subset_id_by_asset_id.get(asset_id) + version_id = version_id_by_subset_id.get(subset_id) + output[asset_name] = repre_docs_by_version_id[version_id] + return output diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 062142ace9..12579cd957 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -1,19 +1,123 @@ -""" -Requires: - None -Provides: - context -> comment (str) +"""Collect comment and add option to enter comment per instance. + +Combination of plugins. One define optional input for instances in Publisher +UI (CollectInstanceCommentDef) and second cares that each instance during +collection has available "comment" key in data (CollectComment). + +Plugin 'CollectInstanceCommentDef' define "comment" attribute which won't be +filled with any value if instance does not match families filter or when +plugin is disabled. + +Plugin 'CollectComment' makes sure that each instance in context has +available "comment" key in data which can be set to 'str' or 'None' if is not +set. +- In case instance already has filled comment the plugin's logic is skipped +- The comment is always set and value should be always 'str' even if is empty + +Why are separated: +- 'CollectInstanceCommentDef' can have specific settings to show comment + attribute only to defined families in publisher UI +- 'CollectComment' will run all the time + +Todos: + The comment per instance is not sent via farm. """ import pyblish.api +from openpype.lib.attribute_definitions import TextDef +from openpype.pipeline.publish import OpenPypePyblishPluginMixin -class CollectComment(pyblish.api.ContextPlugin): - """This plug-ins displays the comment dialog box per default""" +class CollectInstanceCommentDef( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): + label = "Comment per instance" + targets = ["local"] + # Disable plugin by default + families = [] + enabled = False - label = "Collect Comment" - order = pyblish.api.CollectorOrder + def process(self, instance): + pass + + @classmethod + def apply_settings(cls, project_setting, _): + plugin_settings = project_setting["global"]["publish"].get( + "collect_comment_per_instance" + ) + if not plugin_settings: + return + + if plugin_settings.get("enabled") is not None: + cls.enabled = plugin_settings["enabled"] + + if plugin_settings.get("families") is not None: + cls.families = plugin_settings["families"] + + @classmethod + def get_attribute_defs(cls): + return [ + TextDef("comment", label="Comment") + ] + + +class CollectComment( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): + """Collect comment per each instance. + + Plugin makes sure each instance to publish has set "comment" in data so any + further plugin can use it directly. + """ + + label = "Collect Instance Comment" + order = pyblish.api.CollectorOrder + 0.49 def process(self, context): - comment = (context.data.get("comment") or "").strip() - context.data["comment"] = comment + context_comment = self.cleanup_comment(context.data.get("comment")) + # Set it back + context.data["comment"] = context_comment + for instance in context: + instance_label = str(instance) + # Check if comment is already set + instance_comment = self.cleanup_comment( + instance.data.get("comment")) + + # If comment on instance is not set then look for attributes + if not instance_comment: + attr_values = self.get_attr_values_from_data_for_plugin( + CollectInstanceCommentDef, instance.data + ) + instance_comment = self.cleanup_comment( + attr_values.get("comment") + ) + + # Use context comment if instance has all options of comment + # empty + if not instance_comment: + instance_comment = context_comment + + instance.data["comment"] = instance_comment + if instance_comment: + msg_end = " has comment set to: \"{}\"".format( + instance_comment) + else: + msg_end = " does not have set comment" + self.log.debug("Instance {} {}".format(instance_label, msg_end)) + + def cleanup_comment(self, comment): + """Cleanup comment value. + + Args: + comment (Union[str, None]): Comment value from data. + + Returns: + str: Cleaned comment which is stripped or empty string if input + was 'None'. + """ + + if comment: + return comment.strip() + return "" diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 90aa0f44bb..a2d5b95ab2 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -51,6 +51,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "source", "assembly", "fbx", + "gltf", "textures", "action", "background", diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 4179199317..fd8dfdece9 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -468,7 +468,7 @@ class ExtractBurnin(publish.Extractor): burnin_data.update({ "version": int(version), - "comment": context.data.get("comment") or "" + "comment": instance.data["comment"] }) intent_label = context.data.get("intent") or "" diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 6b4e5f48c5..b2a6adc210 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,9 +1,8 @@ +import collections from copy import deepcopy import pyblish.api from openpype.client import ( - get_project, - get_asset_by_id, - get_asset_by_name, + get_assets, get_archived_assets ) from openpype.pipeline import legacy_io @@ -17,7 +16,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): families = ["clip", "shot"] def process(self, context): - # processing starts here if "hierarchyContext" not in context.data: self.log.info("skipping IntegrateHierarchyToAvalon") return @@ -25,161 +23,236 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if not legacy_io.Session: legacy_io.install() - project_name = legacy_io.active_project() hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - self.project = None - self.import_to_avalon(context, project_name, hierarchy_context) + project_name = context.data["projectName"] + asset_names = self.extract_asset_names(hierarchy_context) + + asset_docs_by_name = {} + for asset_doc in get_assets(project_name, asset_names=asset_names): + name = asset_doc["name"] + asset_docs_by_name[name] = asset_doc + + archived_asset_docs_by_name = collections.defaultdict(list) + for asset_doc in get_archived_assets( + project_name, asset_names=asset_names + ): + name = asset_doc["name"] + archived_asset_docs_by_name[name].append(asset_doc) + + project_doc = None + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data, None)) + + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, entity_data, parent = item - def import_to_avalon( - self, - context, - project_name, - input_data, - parent=None, - ): - for name in input_data: - self.log.info("input_data[name]: {}".format(input_data[name])) - entity_data = input_data[name] entity_type = entity_data["entity_type"] - - data = {} - data["entityType"] = entity_type - - # Custom attributes. - for k, val in entity_data.get("custom_attributes", {}).items(): - data[k] = val - - if entity_type.lower() != "project": - data["inputs"] = entity_data.get("inputs", []) - - # Tasks. - tasks = entity_data.get("tasks", {}) - if tasks is not None or len(tasks) > 0: - data["tasks"] = tasks - parents = [] - visualParent = None - # do not store project"s id as visualParent - if self.project is not None: - if self.project["_id"] != parent["_id"]: - visualParent = parent["_id"] - parents.extend( - parent.get("data", {}).get("parents", []) - ) - parents.append(parent["name"]) - data["visualParent"] = visualParent - data["parents"] = parents - - update_data = True - # Process project if entity_type.lower() == "project": - entity = get_project(project_name) - # TODO: should be in validator? - assert (entity is not None), "Did not find project in DB" - - # get data from already existing project - cur_entity_data = entity.get("data") or {} - cur_entity_data.update(data) - data = cur_entity_data - - self.project = entity - # Raise error if project or parent are not set - elif self.project is None or parent is None: - raise AssertionError( - "Collected items are not in right order!" + new_parent = project_doc = self.sync_project( + context, + entity_data ) - # Else process assset + else: - entity = get_asset_by_name(project_name, name) - if entity: - # Do not override data, only update - cur_entity_data = entity.get("data") or {} - entity_tasks = cur_entity_data["tasks"] or {} - - # create tasks as dict by default - if not entity_tasks: - cur_entity_data["tasks"] = entity_tasks - - new_tasks = data.pop("tasks", {}) - if "tasks" not in cur_entity_data and not new_tasks: - continue - for task_name in new_tasks: - if task_name in entity_tasks.keys(): - continue - cur_entity_data["tasks"][task_name] = new_tasks[ - task_name] - cur_entity_data.update(data) - data = cur_entity_data - else: - # Skip updating data - update_data = False - - archived_entities = get_archived_assets( - project_name, - asset_names=[name] - ) - unarchive_entity = None - for archived_entity in archived_entities: - archived_parents = ( - archived_entity - .get("data", {}) - .get("parents") - ) - if data["parents"] == archived_parents: - unarchive_entity = archived_entity - break - - if unarchive_entity is None: - # Create entity if doesn"t exist - entity = self.create_avalon_asset( - name, data - ) - else: - # Unarchive if entity was archived - entity = self.unarchive_entity(unarchive_entity, data) - + new_parent = self.sync_asset( + name, + entity_data, + parent, + project_doc, + asset_docs_by_name, + archived_asset_docs_by_name + ) # make sure all relative instances have correct avalon data self._set_avalon_data_to_relative_instances( context, project_name, - entity + new_parent ) - if update_data: - # Update entity data with input data - legacy_io.update_many( - {"_id": entity["_id"]}, - {"$set": {"data": data}} + children = entity_data.get("childs") + if not children: + continue + + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data, new_parent)) + + def extract_asset_names(self, hierarchy_context): + """Extract all possible asset names from hierarchy context. + + Args: + hierarchy_context (Dict[str, Any]): Nested hierarchy structure. + + Returns: + Set[str]: All asset names from the hierarchy structure. + """ + + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data)) + + asset_names = set() + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, data = item + if data["entity_type"].lower() != "project": + asset_names.add(name) + + children = data.get("childs") + if children: + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data)) + return asset_names + + def sync_project(self, context, entity_data): + project_doc = context.data["projectEntity"] + + if "data" not in project_doc: + project_doc["data"] = {} + current_data = project_doc["data"] + + changes = {} + entity_type = entity_data["entity_type"] + if current_data.get("entityType") != entity_type: + changes["entityType"] = entity_type + + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + if key not in current_data or current_data[key] != value: + update_key = "data.{}".format(key) + changes[update_key] = value + current_data[key] = value + + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": project_doc["_id"]}, + {"$set": changes} + ) + return project_doc + + def sync_asset( + self, + asset_name, + entity_data, + parent, + project, + asset_docs_by_name, + archived_asset_docs_by_name + ): + # Prepare data for new asset or for update comparison + data = { + "entityType": entity_data["entity_type"] + } + + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + data[key] = value + + data["inputs"] = entity_data.get("inputs") or [] + + # Parents and visual parent are empty if parent is project + parents = [] + parent_id = None + if project["_id"] != parent["_id"]: + parent_id = parent["_id"] + # Use parent's parents as source value + parents.extend(parent["data"]["parents"]) + # Add parent's name to parents + parents.append(parent["name"]) + + data["visualParent"] = parent_id + data["parents"] = parents + + asset_doc = asset_docs_by_name.get(asset_name) + # --- Create/Unarchive asset and end --- + if not asset_doc: + # Just use tasks from entity data as they are + # - this is different from the case when tasks are updated + data["tasks"] = entity_data.get("tasks") or {} + archived_asset_doc = None + for archived_entity in archived_asset_docs_by_name[asset_name]: + archived_parents = ( + archived_entity + .get("data", {}) + .get("parents") + ) + if data["parents"] == archived_parents: + archived_asset_doc = archived_entity + break + + # Create entity if doesn't exist + if archived_asset_doc is None: + return self.create_avalon_asset( + asset_name, data, project ) - if "childs" in entity_data: - self.import_to_avalon( - context, project_name, entity_data["childs"], entity - ) + return self.unarchive_entity( + archived_asset_doc, data, project + ) - def unarchive_entity(self, entity, data): + # --- Update existing asset --- + # Make sure current entity has "data" key + if "data" not in asset_doc: + asset_doc["data"] = {} + cur_entity_data = asset_doc["data"] + cur_entity_tasks = cur_entity_data.get("tasks") or {} + + # Tasks + data["tasks"] = {} + new_tasks = entity_data.get("tasks") or {} + for task_name, task_info in new_tasks.items(): + task_info = deepcopy(task_info) + if task_name in cur_entity_tasks: + src_task_info = deepcopy(cur_entity_tasks[task_name]) + src_task_info.update(task_info) + task_info = src_task_info + + data["tasks"][task_name] = task_info + + changes = {} + for key, value in data.items(): + if key not in cur_entity_data or value != cur_entity_data[key]: + update_key = "data.{}".format(key) + changes[update_key] = value + cur_entity_data[key] = value + + # Update asset in database if necessary + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": asset_doc["_id"]}, + {"$set": changes} + ) + return asset_doc + + def unarchive_entity(self, archived_doc, data, project): # Unarchived asset should not use same data - new_entity = { - "_id": entity["_id"], + asset_doc = { + "_id": archived_doc["_id"], "schema": "openpype:asset-3.0", - "name": entity["name"], - "parent": self.project["_id"], + "name": archived_doc["name"], + "parent": project["_id"], "type": "asset", "data": data } legacy_io.replace_one( - {"_id": entity["_id"]}, - new_entity + {"_id": archived_doc["_id"]}, + asset_doc ) - return new_entity + return asset_doc - def create_avalon_asset(self, name, data): + def create_avalon_asset(self, name, data, project): asset_doc = { "schema": "openpype:asset-3.0", "name": name, - "parent": self.project["_id"], + "parent": project["_id"], "type": "asset", "data": data } @@ -194,27 +267,27 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): project_name, asset_doc ): + asset_name = asset_doc["name"] + new_parents = asset_doc["data"]["parents"] + hierarchy = "/".join(new_parents) + parent_name = project_name + if new_parents: + parent_name = new_parents[-1] + for instance in context: - # Skip instance if has filled asset entity - if instance.data.get("assetEntity"): + # Skip if instance asset does not match + instance_asset_name = instance.data.get("asset") + if asset_name != instance_asset_name: continue - asset_name = asset_doc["name"] - inst_asset_name = instance.data["asset"] - if asset_name == inst_asset_name: - instance.data["assetEntity"] = asset_doc + instance_asset_doc = instance.data.get("assetEntity") + # Update asset entity with new possible changes of asset document + instance.data["assetEntity"] = asset_doc - # get parenting data - parents = asset_doc["data"].get("parents") or list() - - # equire only relative parent - parent_name = project_name - if parents: - parent_name = parents[-1] - - # update avalon data on instance + # Update anatomy data if asset was not set on instance + if not instance_asset_doc: instance.data["anatomyData"].update({ - "hierarchy": "/".join(parents), + "hierarchy": hierarchy, "task": {}, "parent": parent_name }) @@ -241,7 +314,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): hierarchy_context = context.data["hierarchyContext"] active_assets = [] - # filter only the active publishing insatnces + # filter only the active publishing instances for instance in context: if instance.data.get("publish") is False: continue diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 1f9b30fba3..9310923a9f 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -179,7 +179,7 @@ class ExtractReview(pyblish.api.InstancePlugin): single_frame_image = False if len(input_filepaths) == 1: ext = os.path.splitext(input_filepaths[0])[-1] - single_frame_image = ext in IMAGE_EXTENSIONS + single_frame_image = ext.lower() in IMAGE_EXTENSIONS filtered_defs = [] for output_def in output_defs: @@ -501,7 +501,7 @@ class ExtractReview(pyblish.api.InstancePlugin): first_sequence_frame += handle_start ext = os.path.splitext(repre["files"][0])[1].replace(".", "") - if ext in self.alpha_exts: + if ext.lower() in self.alpha_exts: input_allow_bg = True return { @@ -598,8 +598,12 @@ class ExtractReview(pyblish.api.InstancePlugin): if temp_data["input_is_sequence"]: # Set start frame of input sequence (just frame in filename) # - definition of input filepath + # - add handle start if output should be without handles + start_number = temp_data["first_sequence_frame"] + if temp_data["without_handles"] and temp_data["handles_are_set"]: + start_number += temp_data["handle_start"] ffmpeg_input_args.extend([ - "-start_number", str(temp_data["first_sequence_frame"]) + "-start_number", str(start_number) ]) # TODO add fps mapping `{fps: fraction}` ? @@ -609,49 +613,50 @@ class ExtractReview(pyblish.api.InstancePlugin): # "23.976": "24000/1001" # } # Add framerate to input when input is sequence - ffmpeg_input_args.append( - "-framerate {}".format(temp_data["fps"]) - ) + ffmpeg_input_args.extend([ + "-framerate", str(temp_data["fps"]) + ]) + # Add duration of an input sequence if output is video + if not temp_data["output_is_sequence"]: + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) # - this is definition of an output - ffmpeg_output_args.append( - "-start_number {}".format(temp_data["output_frame_start"]) - ) + ffmpeg_output_args.extend([ + "-start_number", str(temp_data["output_frame_start"]) + ]) # Change output's duration and start point if should not contain # handles - start_sec = 0 if temp_data["without_handles"] and temp_data["handles_are_set"]: - # Set start time without handles - # - check if handle_start is bigger than 0 to avoid zero division - if temp_data["handle_start"] > 0: - start_sec = float(temp_data["handle_start"]) / temp_data["fps"] - ffmpeg_input_args.append("-ss {:0.10f}".format(start_sec)) + # Set output duration in seconds + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) - # Set output duration inn seconds - ffmpeg_output_args.append("-t {:0.10}".format(duration_seconds)) + # Add -ss (start offset in seconds) if input is not sequence + if not temp_data["input_is_sequence"]: + start_sec = float(temp_data["handle_start"]) / temp_data["fps"] + # Set start time without handles + # - Skip if start sec is 0.0 + if start_sec > 0.0: + ffmpeg_input_args.extend([ + "-ss", "{:0.10f}".format(start_sec) + ]) # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: - ffmpeg_output_args.append("-frames:v {}".format(output_frames_len)) - - # Add duration of an input sequence if output is video - if ( - temp_data["input_is_sequence"] - and not temp_data["output_is_sequence"] - ): - ffmpeg_input_args.append("-to {:0.10f}".format( - duration_seconds + start_sec - )) + ffmpeg_output_args.extend([ + "-frames:v", str(output_frames_len) + ]) # Add video/image input path - ffmpeg_input_args.append( - "-i {}".format( - path_to_subprocess_arg(temp_data["full_input_path"]) - ) - ) + ffmpeg_input_args.extend([ + "-i", path_to_subprocess_arg(temp_data["full_input_path"]) + ]) # Add audio arguments if there are any. Skipped when output are images. if not temp_data["output_ext_is_image"] and temp_data["with_audio"]: @@ -934,6 +939,8 @@ class ExtractReview(pyblish.api.InstancePlugin): if output_ext.startswith("."): output_ext = output_ext[1:] + output_ext = output_ext.lower() + # Store extension to representation new_repre["ext"] = output_ext diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 8da1213807..03df1455e2 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -73,6 +73,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): "Adding thumbnail representation: {}".format(new_repre) ) instance.data["representations"].append(new_repre) + instance.data["thumbnailPath"] = dst_filepath def _create_thumbnail(self, context, thumbnail_source): if not thumbnail_source: diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 66f9a7aa59..2ce8037f5f 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -112,6 +112,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", @@ -130,7 +131,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "mvUsd", "mvUsdComposition", "mvUsdOverride", - "simpleUnrealTexture" + "simpleUnrealTexture", + "online" ] default_template_name = "publish" @@ -769,7 +771,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": instance.data.get("fps", context.data.get("fps")) } diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index d05aea1e2f..8f3b0d4220 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -107,6 +107,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", @@ -969,7 +970,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": context.data.get( "fps", instance.data.get("fps") diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f74c3d9609..809a1782e0 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -102,8 +102,31 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): thumbnail_root ) + def _get_thumbnail_from_instance(self, instance): + # 1. Look for thumbnail in published representations + published_repres = instance.data.get("published_representations") + path = self._get_thumbnail_path_from_published(published_repres) + if path and os.path.exists(path): + return path + + if path: + self.log.warning( + "Could not find published thumbnail path {}".format(path) + ) + + # 2. Look for thumbnail in "not published" representations + thumbnail_path = self._get_thumbnail_path_from_unpublished(instance) + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path + + # 3. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _prepare_instances(self, context): - context_thumbnail_path = context.get("thumbnailPath") + context_thumbnail_path = context.data.get("thumbnailPath") valid_context_thumbnail = False if context_thumbnail_path and os.path.exists(context_thumbnail_path): valid_context_thumbnail = True @@ -122,8 +145,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): continue # Find thumbnail path on instance - thumbnail_path = self._get_instance_thumbnail_path( - published_repres) + thumbnail_path = self._get_thumbnail_from_instance(instance) if thumbnail_path: self.log.debug(( "Found thumbnail path for instance \"{}\"." @@ -157,7 +179,10 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): for repre_info in published_representations.values(): return repre_info["representation"]["parent"] - def _get_instance_thumbnail_path(self, published_representations): + def _get_thumbnail_path_from_published(self, published_representations): + if not published_representations: + return None + thumb_repre_doc = None for repre_info in published_representations.values(): repre_doc = repre_info["representation"] @@ -179,6 +204,38 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): return None return os.path.normpath(path) + def _get_thumbnail_path_from_unpublished(self, instance): + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _integrate_thumbnails( self, filtered_instance_items, diff --git a/openpype/resources/app_icons/3dsmax.png b/openpype/resources/app_icons/3dsmax.png new file mode 100644 index 0000000000000000000000000000000000000000..9ebdf6099f6ac279ebaeccdf21a658a34da5e7b6 GIT binary patch literal 12804 zcmcJ0_gj-q&@M#;LPtR9O&|~uL5fK4O*#ol6fhK}S&-hOhAusTs7NnK2mxu*jUX6Q zdJ(0FA{~_8`5xZy`~&B@&iNt9l_$Hiv$M0a_uR8durxPh0`Y(-C@7eWjr6Q3C@7`R zUvw9NCqZE;e!vH%kF}u=1>q~-67c2118p;H3W}OkhGP^B@SXmN5yFRpg89Swi?Zi& zg$o6R+zVqpZQDSHwb%5ye9(7$Ef0*v-@Vz}dRIh+pr^m8B_ynRm9I+#H*QZY!Qrbc^HJ*$UXz7JZ;saBVHp>%jM>f>E zYc|vx+rm0@lHzj}DfTTI_EBxy68>u{`rUH0_XioCuRe@iGHx?f%Pn94E%_jACeV9Hac~(PZ9tUT7sGm`Z zUN=BK(nXa<{NUif(@?QFpM%>J%c-dSas4Xo^znVD#zrXA;rJpnuS096<$z0ZcADkl z+1GepKrAt!+QYH&YcEKd>61V!;_j13DOnXL1FrQ+x`+Kc6Q+rpXshf#OE`V>t%MQ* z89T?RlWP{WG_|)j)YwMZMRCfrR&b(`A%a@77u<&c<6XEXzEvJpIaZS&hdTfLqBjII zyiI`sqxGt@k#Oa1bXdx)T{KeDzbk6NvmTKA@>J2b6B z!T1Z`TI_aanyfIgZ6mC{HtE0Lt;&B$7Gbmo2xc9WSUerh zZw<-y{q2P!(l3>Tz*6o~LT6o`65D2H; z3X9ENAvi;HC$(Vv+_LIzD1^IZUj{fyG?rN#rAT*&`&_+!cBsZqor}x-d*u0OOd=bf zA9}eO<@Cwg%rnT|z6;~=?fA~7Yk=sEBY9Bh+4(~+8rUsGyji1J;C&afRFg)*yEW5&&P61w&WOS0HB7-6moQkE#Y7=SEoRoTYcN%Lk z!t|_#a}RPyT05`S_%K?X>n;t((+M>o+gqG1=m%X0YYW-JRabzQx0)X9lJOyry^!WvH; zAa^QOJ5eC2$bk;`GA7!kKg+r7g`8T#{j{Vyq1;8$ZXC79cSFyEp)$T%-B4o&N;H%f zPX_GhrUkpFf2kJ)^ckz&j3Z$w?{F76yVi#W`Sg-WVB8f;xZ>YNdE$hFU_4eRGxt&t zA*UM*aRlRedIj(E-r**pT+zm#pZd^FZ>N0V_Xkm+p)~2xo-(s3Wv2l%6GkvW6eKfw z^-oES71zR*KeT8l`m6cqM_BBk7A#&X>1Kio&1x_rx%O!k2}H4x@8;UHEY%=CLLy^( zGjlWV3mY=}ph?SLI@?10Ak@ma6_hT(aAC$UEX@OTP~91N-VdLbp7z4&v)26~Wu!}j3wjZcUN zm77ZrZJl7l zqJmaUZ@;P#G>yJkIrR4@R?Rb8vO#nR)KvMB=W$nBJwCaRh^25xnLR$f-SF7%J*;x5Lgw@eu8mY;75woD?W|!JrWU_YxQNX-yLY|E`VP<~5HIo~ zBEyfAFFkzyL}w6_aMKQ6Fe!r&@Wn@lEJ4Qgj2IbZA@G-F`kPr#TDmfK|(rGL8+-RPeIR__`IBC*p+`N{p_E&=wL> zmYm~&MYd+u{5B!q462pl_irMY?W94kpeT<86E}e+*p%zZb;G^5BQ%kN;pc#v1WFcX z=kU93mnWy$4O$y;jQ_hC?7Pt`l}MDX4y#IOc+$3Mymk8_&bU}+*VXj`8bcX4-rcVR zc8zIDOp_;j4yMNOP1;w3M1T(Q4a_&S#=FR-=AK{OlB{4#mT+z-n~z89HE`d#_)2PBXO60(Jst69MdX_?Y*O56B zh^yWgk5)Z@Q6U_Ud%+t6G|j+U8t>;<3dwI56zo1!j^}iTG1^3tq^`VvT{i7%$UUkMenNv#^}k=y^Orb#O+-{=3XqC8lML2unl+ZqiVlg}WshQGZS)X*Ut#=vKRzH~ zJeF{p(jvL~vt~c+!%?YGd7t~go>zi*i+ey+i6W4+5+Vg**lRfCkF4d|xZ~qz;o2Q{ z*abm-l*f|4{(Ql{`N3Fanv-YM2$mErlA3CJ7Q+U$MMRdMUA61bq~mlErv&R)DS%_X z4$59`I(jr#n-NQT%J@>=DeIs5hSge*eHqeR3Xu(`M$nha#VLwI(#p=)Y&X-*bO`oi-`spd z#Z|5tuTVhDiy&JWlq8qGzHe=ycUOm=sqi@&9yp;h{x2teS8(Cufa_?&f|a*x?%xj| zvphv}YXdsSZE|ioQ93xdpYpyI<)%y^LrlJN{5c?au8UHn2e&c=hlKGWpky*j(X~}M zx}cw)>GA@aHkgNGx*(ba936>DX_8098gu{4HUAHl&ubfq*`wDvV3#NmpPavcQI5-m zik)Wt6EWtV7l#x+fPh`4flZW_t(Kjde^1=zvcja4-)QX>SH%nH`J4H88I*ONf%cpjTY87TCFSD@==I?wFFC)AVXD4rVuoq!K=HRyK5lao%9*y(@Hxhu^cG z9LGk)oBufR?Na|)5C!MHtlBVn3CL_#)&z7aSETi_z_L9zHn>r

    {SJ~!Y9>I3PqWedKP*SW{qUGfR^((-q<{G5uF z=Swwx2i~q_{TRd`=lM#obHE}a@k(CTAJXLVUb&ivPk=~k0{+s4xPOB&oSchk#Y@-N z&Aoc}!thRVt&YgjQ9zTa~O5;S(^--a|1z*l*QC;GD78qa`hLkLR7&#Ck<|OaeJea)>07gz`5RO6% zrn5Sas=aE8I`~(fJYoD`Qo&w&JgUaZZfG-#)!W|dO>6@r|HDr!birZIJHZe&4%pWT z^xmW9Pq*tOYw=nRq|Ioz9)Dc_i((Ta?zqw{`AIj1%*qFR4`S$}9PFqUp~f{InC--> zn9vwee;FL_W9eruu98tiX^f<3!#_hib!&s~BI&jx=oNY+(sB4$rYN2k6%}4NN zElD-7M5_PMyQ}U%iia9hI)7`XVK*>7T_$HGHY2+~1;$nb+?d+ozuskbg0Y zgW1QS+^#fQsL~5Ae&hD{WpMY?ITsm7o6hrSy4x0A`S%)>8D7dy_TIKOr7eC+JdUi%a_sOLi&#Eb!%AaT34hak`qKyR$#6Z#5A3X!n zBon$`kA8vC4b9UJy<~Jyq;~-^*vZB~&FO`!I@=vNSwu-Mwm((C8FZw`W4>d;9A7(; zqmNQdZp?-r$uwR;D0;fM77~|#S#V#zW-E5P%K-2JL$BUGgFCs?H~f$Ruko9~O5{0a zRSuZ0P%Zo9^gm|hv04+rXL(H1w;m zUOEW;nK^6Q_yn?qf0`3?&GIG2But1hE6wTT-8@}fUs6{hH@C1DA~;|dgm3^?v*I`Y zu5C_7)Dpflh><6Eh&2yMiYj6Fm2USx>^jTg$_IFcov?teG$8ME0ixAmEuI36(Kv^o zT*l=1`P`c3rNK+vAgoN^*Y%cqK&_$uCSE_TaHG}uX*G(Ep*V*m?Zhk%+bLU)=mUXFP{>P-qG^ zuCY3&7=4uM^XEi!-55nXOzN9T6L$A{#{hm&X%OyW1!~HN#S_i}JKX;+Ffjkf6Mjj2 z+3(j5h#8Nvg8N0*H9VRJ3!^d2xAHRTSkCg*?>tVQ3CRMeBF8UQIv@yuD;mp%j)+o^ zZnE=-la<5UrOb2$$KnZJ_iDpm%<|zG(#W(z~p&T=~(3~trw1XT5hkyU!5gD5_pT)Xhs9e+d>PY1aV zN--Y8HU2?_OGYxCc}PS3-2uGK_x9)xk@==2ri|aYB-Tf&y1W>7gWH}G*bqW82fE8h z@46SI`Y346Sm}0Ru|&n@L`}wK5G2LqLMe@_9pC7iOCR~To_1Rq*ztfM!o*E*$qoP- z#WG$(ksL6KRj{DI;DidQ=-Wm7bly7r*1^P*!BcH347whd4=_qp?x^uss_WNDa|D$y z37YeFh+4}2vD1pQdQ*yqsf_Z?LqMZn`p95MogcG5a)?2H%R?t}{yteXz2XE3;e|5s zd+=y^zqawzgK`18u8?R=mCoh=?4wyqGi?6(hE$jtTxnkSs!PB#0j1A$Qs8;q1;@B* zl}#g2p}Ua9pr`>=O~?M%Z%%+{DKfO&;ax1{=&iqHgp;LX;5D3>WCzXJlwpT@(+#vx zS82R#6+@dOJ9TZ-90#^&gf$%rKLr49mQNcynyE(S=_7%#sb_gUp2Dx2)kkVwux-q& z=f9g@m8--TRXiip-;XL^Z^p)u$P`96M0UBm5~?uU1ugH{kj>~}hP;Ke7A}%x5RvU= z8us{&OfQy7qI)~fxNA?NgL1u4A{HL;)fC4al*Y5pW)Ib$?G;S2LWAH8w+JfbeN?upRS@TYUb*7}Cl{LsJRMqe$LH z{XE=3a(WaNL+-RJJL3{!BT3{dMhvPOc!#yJy1VRLU6F2N&7Xm*=~xAqcs35bONQ25 zufk_3v1+a+jjRwyU0YUrluOzm6ae4MT1Wz+az2W9Tt&C_r*fz(;eNFyFH^Vi!SQV9 z?fP^&v+}Ix22ieD`_VWOnI2y`Kes-p&&d^2V=Ka^r($$45ayur^QMu86;YA!K6?HT z$fJ5ny4rDvmT;B1s>;#eU(eTX=UvMndhx{4^_bt(n4=pG%hw|>7#c#4me&1C9^U$L zTLfEsy%|J{#@tBD9@tF@XsRiLv7K9u_2a8|=Rx+x6EB{j)H+@evc#bLp2-%FUsr(QWiS^<%jvAYSNs7B2BszFMrig6tGPLiuvC zgK!aW_o<-rbiMQvOv(97oFe$Hn<#x)(Xdbjc7r-Ju%`5Im_(eu^`@m^0{Q+524NwnqcWHU)k=V&0w5 zDt~4KNdy#I(6@18&>|}b*Ga9-%mO`$R-C&Ox1S=-^nBM6MWY7`iG1%(R|Cp>Lu3jZB(y-wnRE%4+gg=rSFj<-O+)B>|5}quU zsfC^kt55iVEP-e>IxCD-oBB?DJsO72d*VWr1549J))C9HTTLHalfj#HJjJ{_xSUjX z|Le%nYN`CsCbqxGtF@%{zDjdkLi|!ob7afS9yIAGhY^FPhpU*39<1VxHr(|0M_$eL zg_vJz|7}c?Ktzw=%>=l{O^Vj`@@&H(zhcxcj)H@r8+F|7lBxGAHFY>Z5`0Y4U?19+ITwyWvf-_TUkP>l|Im`lTKUA1R8i*X+T9)aR)Q^DIV zSc@F6Gy2DS-UOPb-h&Uu1w4wUdorr~NYUkFPl!s8PKd5o*JWmjJ{g5A3koq1ximz`Hd%m<1zpKo>l7S=&}=9%qzaknxIMOetkjp>2*-$MCkG=_Yrve70>9QK0!+JiS$w;CQPJ_c)d=(6*J`sA*T4tGeDgl;@* z^ypZXAAP#{=|5^Yd98xgO4}Nvg|sPK9KjYELCjD;^V}!^&(Let&&BgyPG%(7_w5kQ zSu=`oa-+LVe-=vykFq&N!km!{Dwd1%nfNJEm75Mjk~YO5E9w+hN0Na!9W!6w`d5R{ zb9S%0EOc+G{Nm~a%DNqjYqrt&F`+b{1QL;1-MB0$&t%ftgQ?VhNy<36jqEODMVH%3dxl3oDe!gP>hIuFLE z92%U*Xx)D2@!F%+n1Wi9gQ9{`MQzz{)=(tijf#jMUqT4QeQmpQziS3Sl#`9dF(1V6 zfVc*AkaXk#!a*3cl+ysduURJr#1H==Z*Qa-2T=S2PELwVL#&t>8%AKh<@AtXr6E%( zQW9#FEC0>N(D+#y->+F`F#Ng^H{%WHBpI?(!~5t zeQk@pe3$I98vB3Rq~$!AXokb@HOec#KFY#)dsjv@C)FAFO-GH-q=K?dJ9xrqWnj<% z`3yxlf{kTS&cwW|q*P#hbt6}u1ICFh4V_q;C<~1*i+Z$q|3YS2%@FmF?&@appe7y8 z6ZwVvy|Uk>4~tT?U_6BoBxtqcS(``4w)Lod03o5s(x}1R(UDqGR-|gs`Rd6fD}GRgqg(niQp(Y26>33@F!!JCSv@c9ml<-1!Z}bl-`_D3M z+zfU}tH*^F5x$FWnD3|BpzcV;N>`<*St}A|nj4 z*7G)U?vs&Dt&JkUvQcTH&|RM11ASkwlM`8nJ`jwDX;SLEe9@fTSL``yK@ci8@1|b> zw`L_IbIh2eb_QT!xk93d&_-P_XB@8jzQ+VO$22(pO<{gG{JMLMKlCmb@5ZEzFOvgW(m1kbFWIj-^TZxO&{24yv2FJD~0+aX+65{IR<0_yc44=6*3`jF1SF zr7tDaPRyl=_@5T0)D7iu*VcmEu_g9x=?Fj{HdO12%!f&o#6unXW{MT*1? zH}B5p-E$$g?h0#u-;%xVWNeI#Cp7gJ02~3+T>Z@_Rd^0nyFt;LaKJWSuO>9R%!>P{Db8gPE2bYD7Qxs;K8*}euhB@D)Y)c)7@T88-> zg}a!7doyk$aS#Js6rmT|`0C}|%L=J75I2+6^Yv5mv?FMv#JZ;2u;;BKb<{j!A9TMA zuQ0qgbLwyK-;WoHq#JI!BxPr-Ex#@kty8-v@d|?*iQregV*lxNPVQJmQU#Tv)nFOY8M8e`E&+M+sXM3* zU|%TzUtAaZQB>!LlbQ+(@cVr>AMkBOE%e&wcbJ9lLf?M%_v6(r-y1r|d19G`N47?S zm`{AttLfFo4TFw$C#o@5>Cp2Ro3!6&^oN<-?{-~+zSyd?s8sSo&8uFX_3@cMi~3q4 zo$a|d^Ug-oV*UWt8MLPAUok8YboK8c=JS4+1eK$w`d=PLphk6Tqo#+ddSziEHT-V< zSRnK9jPF5Sfsc$`*gV9exfK@rSbJQETL<+{Tl1aqd%HK`4x_i)cfZffUB*t*gj_p) z{_=QbGrT^fC;Y^7R9smszvt`+>iw7S&bB@5%-P!GX--c;$Ix|^(Q6BOVH%;bfrK{A z)0%+N<(@`|Ky9Y9YL*P9XV>EM+P98oLLBW+I(fAbA~j^sG@#7=qZ739w&S=ofPqFZ z{P<{Qw2(!uz@)u`>P+LesGwDN+8NF-baO%Tu@T&8Fx$v?r(=3&nP0a3xRyt(nqdhx zu;Vc(vcTAJ_H5^e`u7MG@VqnwmbmkqKPC@ z&2f_Bs#PkfpD^g(f5(nzSY~!P`DQ6Pj%;2%wz0B}@Q%0(to%QArZ$4l?wq*k!PDNK zeQUcDw!sqqm%z2YpH||>N_&CbcM1u_&<=nA+=vbQdPCzZ=){M(phL(M3^qDh6FmFd zF0v0ldj5F(X6kQ7zyYjcb}$QX?9N0M3La6j6%bF*3;A28jfaErM4^aF8VANZEK{AL!n5JyGo*O+BZ2g@(nqZ`RUD z2HFfp`^VMK=FbgHUeG9aEUyYbfep1GdrBQopRW31N`P`Bpf*}}JXVkDULl?EXErtYs=_T{@fzZqmJ z!qYh}Yo-<~TvgEqJW~eM#`mOneg8d+uL9-t!P?`HH&tO>oLrzG!_e*G<vLU}i)34wBovx*{k&O6xztsH-ar z;fI-f=_>6q?130Px_AwN%J@gmHit}?t0}wxG?8?=Hb2USeQGh;_%&srUTUEESE~T- zeQaa>(OrbyNMi92)wvgxYxLG(<)cBupOmxT5^bqbsfNEFBp*6z8rXG518x^gn%rA` zJ8k$Uf-mJqqea_kwBRbVdSSlM39 z?k5Re?B%6fd9NI0^NuTfn_Bs^ZioKW z-VpUU)-?V;KzER1=kQNsIIN3}%R?vn_eL4gO0YOKH_B$Rt}@5=gtooGf?J= zcTCotfMD<+ZYUR`+x@K&_x~E_#J;E!(y@tgaR2#`{h{qxO8)WG#)zWh33kt_Uo?ei zxuEik!SOF);Ap?Hwe0<70Cchyv8Mo4Jr$9ZvTimQ(un}CfDRB{DyLW3-v~uL@P*z3 zODk81N%P@OS2{*MbnK%6%WH@UZd8HwiZ7kW(9@fiqn`RaJ~-WLgKTSPh*sNG#|k+H zY(CKV6{fVW#g8APYl%|q64_lpA1)3iluy`UkUgP$z8%O1CrPkSw%6(Q$W4r$fw zID}r?2nLndCEi`xXDXjy{CTbD~`Y8aO=%z+R=K_)Xt-ths^QUKM3Sz_AZhs&*v_ISA7#~(q__5 zkT2R4glk;Pc;_hcqCm8kxZZweYP0Hb(1SgV0jG#lY2NzT73I+v%v%?Ii~BtBp#<=3 zh}Q4DFP9zu5|Bsxj%@3#7U4(+)Ai_1&y4@vSsgX?oWB31@_4e;n6UaKciTIKLW#sx zGHtzxI6i}depbZ~xgW_gVy{T0^FamEf0^dhSpyHvX#ur-PcvfwG@4)xqWW*bJDqR) zJy2~so{n1@!&_QIv0y6Q-6oaj6fj9=M8SWiz4M}N$ZWlnqJ{J`+LWBEog&#VV?%|} zl=5e1Av??JA-0~ky=VOjVX=9(Bf9Q%ZaFEPy&a+$fdF2^9U=u#5kmb zCoE|aP2O7x9_40p>un`nM57laJ{JXfvlvlt2^AYrSJStgj+or6AqtPBoteF^e=m@= z%~OBfykAo2e`WRS>jZ<6OY}eJn+Rrc{JkV1gDVQryn<-ExhIrEvRYOKHVFCSf$0!^u4m%*wmPO9x*K7xo_jj zEKBtFZ{Za{f928~Yq`cIQlHf=gYB=NP}}Q@uU{*+e9ydTDHu&$9?igO$yVQ>h^X+G zIN4jF^Cj!DROYO3oC$Y?&r?17FTR0Eq;b*LHNfpxz8JK@R}WD?R18hR=V$wGQ016vtDg84%TON=togFmI7ffcv#$L0TR4SJh$|L;y);LMS) z1-z};VZ7xzVLfnJ@_b-OVNvY(sJHC6+TnXzzbgD#Jc4q}ReaqPJrE})hUA~M9) ztpHIyYcmHaTs8xW6c;Gi?um{fXvj(bD15DdBed^?xpIks>VZwrK%%2`3LG#-=mSVTvCk4 zU!WNVmpJ}d5B~i#XJV+&^Nl!{bF7DE&DP@?w|#Nw%u?d${9EPkUF~rSB~noMYx|iQue7^h{!hlc0QNlE&N1rq^271^7KK9+!T!_U-6~33L&Vdnm-(UA;b+?8l!5$< z4|Ib)>?(!M@c_*qs-ipmgIP35)7r2K2AbE-@0k*N_1^qa6`akh&h(^xP5b0p+3D(<0KScwn)oZux2&o&FTa^E6zI%%?qgt?+QByfI zY`~AgK`@_Te5->wgL?C*fXGM1#yZq)Lb}!y#d~crR8OtRe?TSMW8pA?Ju>E7mE6tPv$$dP4xhIEK4l;Z#HF9vXw-|F;9CFRlXk_D@dkuaN~-|FO!yB^&0)-6>{ l$`H2yo-zHX_gp)p4J|sLUs3g50RFi_VXSYiN6@(!^*@6=RyY6v literal 0 HcmV?d00001 diff --git a/openpype/resources/app_icons/celaction.png b/openpype/resources/app_icons/celaction.png new file mode 100644 index 0000000000000000000000000000000000000000..86ac092365d009052c4e0351732ad1dd067329de GIT binary patch literal 4012 zcmc(ic|26@`^WELWY5?hQj8@eWr-Gr8Cyu|$r3_CvW#sQV@t-M>_SK(WGgL}Qg$=4 zmc}mq$dW8QWo#4EFwFPVZ~5>0@9+D^dEMuJU!VIr*SXH?T=#V+{(ZqzkWY#a03c{? zW^4rj47!B@o;}dugn!}z4ZHzn4nY9$i~hb~AU9tUg4_giOB3#|+>!_doj6Y2jX%i5 zo?sA6@WSCiMdb%G2ni4%Wb7W~hQ|gFusDAp$}6V;alYT&2p15J$9j4Zz;mGwry;uc z55>E?{+7xTuzv2~I75gRvhe=4SmQhhp{{s$;2gmIE$9E^92?~3>k3KnppH_T24SAx z*v=L2kM;Kil>H0w5aa&u^}%>Icd)^Z{qestBb=X~yFUSJez-@5*q+}q8!W-s9Xwec zQidW5H8(b}3CpKVTKXPfgm zWlL6rgkbP9oY;UH>}a%8?2!l&c!mQ9mk5PBbG1#y=Af9btUxhGyP~7T?6R&-x{ViY zsduSw>BACz=X0}BpYB~IOB=_IzhKKnvb;LZ1J)f;Ic(37G9$<{yRlS8?76x6ZlPo))XeR|A5ESt^l}N=n1Ps}$B(WJm#YpPA zwltjCPDmj#=NTew$?ooRiM6jvMVI_nYUTPS1t%^y&6~TG9qN_CCHf+{t=LaRTdhY$ z{~sH~FR9MZfwl3K;uo@e)eH;^p=?rhdB_N6bu&udFSL z1%4H9=@RtWeSsA3@+OkCeobW%oxDw^RLbvYk@lT<&fKqR(lD`dg_zKVjM;_q*F6rI z9s{lSdOADGYjawG{&K$C+qg5~p8m~|RqWTw?5%D8UuQq^%<;iAV%5gh*=O@3JfCux zGlSwIfL52@rs^@fQ}=JseX1|g7ljzWqlaQ3V{SG1@pajjOTMv4q%vbmJmyP;e< z$gjLB>|m1<1tIfMtu#7S>ZTHHX1aJ~pMx=&8>n8uMTRL5j^KAKqr$xN9GT*?jeBAY{n_kI48e7?y9&*5Cs002%UK!p*#VXr zCoCPNjT(A;o|4s|tjs=Cz(AS{tT?HiV)nCXS}i#X@d!}ScwlY~Gy551xj~UyGCT8R zm^Gv1a<0;H?E#*yMHY%`(<6y#yybT=0O$8n(~ii2@*Y~jM=#=aUYLfWM6B*-^~ck! zw#Z!M4gy$fcqR2uPZ8cp+sF13{IFULoU$uneCgG$3e6$<^>XA0&6$(5&9>DG9nlOR z(!tWdE}a8-10!hE(gj9#)K1cZ>5+Q48KqM;ZIu~0dRLh$l=SYy3V-AAo%)faR!@5q z(2=5|-y^C9yJk#R!10jo9~@r>O_0(dnZ^)e$Z1E(HV(-KoxiQ5(=xT$Emfz7PI(ZK<;g&DaDKI~FgZw%VVSgi%@!lye#W-0Y;v-QnE~f;f&U}8 zay^~v0>WBH^eqrlw^x@2@xm z>zg8-vO=XKEs~GbpLJaMFej-I)_k~fQ}$Y1=?t#;|i zzGPN_{y6XOmj~}3V>TUn)!$-KR;C~!A!639?C+(B_Noxot!xqNo9W!2IVBB7Uf*3)nP^ohzvh8sc)KAvlRqW`Tf^2d)s4wuDnG+n`thh)+Cre5ypgzLl= zqAX+U#W1-x@_-qwcP}-~Ne)yFzV5t1MtB{-+@hG9#R^H!4KClq60ZyQMjHu8tvMgQ zsw;kInlKw`oEybA_sxWi8HQf*`hM6zcM(F8mzs89Mx-b$xi@-d*M9muT|xO2r=?{)wgp@c+NV4% zi}7X6#MxMIIjHR|lwS(9d5XEI8t+*`-Cu8g*z8ApUgkZWk2Q7@x;nqsCMsmMSC6DG z(C_%}?%Ey+CV>RYolU{;`t^@J7}Nz(ZPa~aU2|gELyuJ_>4@OA%+cFA6_bb0&C}Uw zWimhN;Ry!eQ^_MNZcl?!rC)QGFI_=SWexFoinpgskQ`2LeveVkTS;X$eOi--qXoRD zRa026FXge9X~mG|v@%%QRpT?8ql%{Ya@pr-e<@c5&fqlnl$_|Xr56&;Mlfc73op;= z&@R$Hndb*TS6ju|9pE9U)Qq#RGW*+wORQa9!!mU@UlH9tA$Va3sLX9$6!6zyG4_rr z%wEI~lCa38`Oxm8f@27vdFmAZ_3iqTaH;Oa^?!2I&-%y2tKvFCLLr61@fj4Gn)+Fr zZEouR^`G9r-uB~g^!e|Ymh^4Ys;lgQlXv>(r|e4D>H1FL7T@b|9aa|fa8v!H0g{m2 z)ZG`z)LqG-^wPYj#cpiEl&A|iV%G%JJ)?ZiWR zV&jO-A?a&>GH<%oEC=L=Hp=3j_&;{$o+`Q_Ulo&D+uO6B!$qL+jlG26a`Jcc@z-c$ z?N2#&lS#Wq{~V%Ot#?(dC%B6&Ct{#(Zn$p_?W2f-{?;TYg86U@XLe=Jw36(qs|!bQs)Z=Yf3Y4sDfDM zGhWsH__g;>iP>7{h8h)##W!cYT?9i)rAMY&b|0b5wj+p~QuRsFt6H5_)U~fIjEAJz z&@vo%D)c1n^o{1FX2wFXWZgt%@}gc!Z6R7{zSfb(8{HCOkss>0Zs<1h__9E^B}KD- za`=i;9wzo-!9vumox(|SLc-!@_jf)&W!n8x9j{(f^A;rgwxQvau$NMvLQy|SZG zVx6Iz7A-3*3@+(Zlsg+fK^JZ~aZ0p|cWjRipjO&Cwk>1m`v3D;htG|R-@@MzTX)6I z?!-MbcoOi+?<@km@>>U#^9n#d=)ZJ#MysP}?R|X3%O~gJ3Ki@5!Ne#QOsU{{8>VLVfYI+sLKj%-l10-WB)&1Ppb7D`0j_IxU zj-eWF1}fip5v`vYGO+(pL7k3&c;Y3!4Epk1ny#Qc=pwB38XS}42)f#@vhDst=Uvy5gKiYDm`KL0@{yyLun_qMkS+(Qg2E_y&#XGj|H6_$CsE>P|Mf( z*1+QeTiqu+q`FhSXiXSs2BxuIem-Q=;XY#`Cyp7**{ZbrR$)7D*4P%kTROt1Xmby< zajQChSRB)6;PdwjNonXz)us2JE*bwU8xCVl|J0U@m;O-jS?!)_? zbH4lIo;z!?m>IhJsjjNtRkf>$R#B41LMKHBfk0TYG7@Sa5IE`i2NfCk2HyE@4EROj zE~)MQ-pSJ4%f!_JBx>$tY5|pXFtM^wvoJCDaT&G{1c4A3Y}B>gwG|ck&72%qO`gZF zdOJ7+vq2z15pQP`Gdl}+sHugOjiV6Facd_H)W%$hMvGgKUC~+G!rDg0*VW>^uadf% zubmm6IgN-gRM49rSir%;-303GVDIS0?=3|0pLO|x_s?Im(Lnz*#NAGa=Klz#t*8PO zcXG9Wa^;AyVr zmi%fGQvdZV;FA!IwY$4BKO38umlvzoTUIAmD>e>3K0Y>fPBufrD{qut!qEdH;&|DOxHsrxuvu&G(NIeEC60fw`reSVZPzqqS~iMx}l zx|5Ure``_2+R5F?&DzNsDlY!4H9jb#mW`vilb74;|IATTf6nOjLqbijSR#`@iQ(IGK4kSU9@>_gwS; zJD1~so%`$#4$i=nB`jQRJT1(nT%8=C|JgRb&HpM3yX616-ha(C|6gTcm-=7lvH`-d zJsbD`W$gd{3h9gu_=<$cSz{|q?b~EWGd0ZpUOGe zBBub>BaVnr0~q7>Vvk(HYLt* zM0E?EbRh&T1U)?*b=)1fT`l=lCw@b8kB|Fnu|U24PG@^Gjwv1xke`q}$r-URS}%ug zJ?G@ODcA7v?8BYQF#U&X*p)ZIk(meP+Uw=(0iFc|uf96K2i}{D}t(+GH8F(9+#+t**^s8E76%Ue!o=8$(rQid3M}-+|eqq8ip^20OD9(x)}t) z9NSI@rnTE!4&J4XTHM11E@)^o{@StwmX|{y4LEadi4$?}k9)1GjbEJO8q@l3T0TIq zH|*|OVBp)N@V@Y6ZDRBC{T9!XEn3uAmD6aG=0-of5)U}ig>=qs3Y3N{>!u_qXG?J; zZx?KebGMX)F4V8ewAx1wJD(mIyuCuq1Rt;0kF+{((_0^J_ZL0p47O*hjjE~q)CcA) z^l4q!9rd^Y1qkq1AJAY|C!acMxoh}9n3Y5x+(l3WvsV^<5=Ge`|JYSuAgG}2_>vf9 zxTFsyeX!>yffFAJc6GC!h`5Sw5eRe`J?v@Sft~+dTzp(TsQcQ+ZnzmO+)ACwi_(DAQ`F9_n6W6 zf;*JNWbUtIXyi~n4(hYM2cc*<==@^azG(9~U-)g6cR0bn&)jG}jE5iN85fc0vYD>c z!c^7NV-R?=dDOi9>1X3{ipy6G+;R-$}7xaV;pkI|Zj&oJ$4ACJz1=rZ}Ruga4(@0M%^_KsAhi7 z^{^uyIJL*aAooSZYhP7w9Psc$6!PZNs%XiY)X3wSFpw4&Mz|+hRvtP zZr-m>!`A}Il9Rvp0M*%2IV*gK4Dvih?t1FLIZP+kP@fjkq#D-GID$-n>7L}*tac7%jv{yQbl!{~^A@A^^;z|Z2058576mvvdn=+I3zNgz`NuyBS3k$Ho;8#y82{!NxZ~K_ z4jdq{YAXKZ`{o`WNDt2T-lXE?itA*DB!Ckh%%=)(<+?)4nldjy_k$vjr)mY_w1uZi zC;Uics{ykhJ8I36D)aanP5X>72nQ)L_O#0|>|?$2s+iX^nsNAD+W+)EBo=t!$Y3)% z>Ug}UzB}#1S3a2~B00wYeZi7S2^__@&g8GLjgnn3tCt@y8(Y2F)+vrQQkCDCm=4yM z(d~G3-sRr?nd{i+*vmHl;L$M;aQw zKm-NtCI{fUl-M*z030uNb-{z=2l_4JiQ#cpC)X)VmsD3a&2SV)CR*CUTeRQd0vI^W zn;Cll4%tTUj%$B&fJl_PYE>vZXxDk=$~v#wk1y7N5W(Z{E5T&Y`h@sR>rv-Z;8j0c z;E#Y^d84y08{v+rrphx+SSPn8LIf4@>Tbnj+Ej}1*O$guF|CBl;_KbU{2NjH1E@ofn z+s-MZx(m5~NL1i&$f!t@wF@>&iL(P^%T)!Pgi?`%DW#;O3dP}Q>E$9P!Pqsd25k)0 z7V&=$h~e%u-sCjcSHrvxPY+jKc@GCxi$RauL4W#)NgAx$zuYc#KHYV;K3yk1Ij5nT zI@C$6H`tnkra8v^EiozYR3!-_B+ofGIZ6F)*F_%9u0BrsOSO6)y$l^Hcd*2S@q-0r zethY3Pxz$3^}n zUlkxk3$x_;O&4W@Wm>zM|?hC2f8?EVhE!_o0@=yXR+-#SmI zXP(25*Z#Xm-9a|1r9!6^gC|XoE#alKA1IMTn4M`ocIhNoNwnG`uoPHYN#m<#fFPDT{tl2uV4vh>EXy0k!AQ*yT*^k{V~8MVt9 z8(;M5Z}g`GzM$9GRyh0Dqn-EiBBVe3pRJETiH>wZaKiWIpw!KT9?nhIRZ z?fxT;JU^!Skq6_mmU+o;74b1|=9?DKpwD)$6~3AIuV4b?pQ*tY8_TK3^m$MJibXCm zh8De+$44VA+Qc3oKabaeWrS9`2fCV{Q-r%6X_38osCF!~-T+_#;#M>0Xo`;ZCglb3 zOxH%sewAC^OGP0PY=au2o>?#3*7wpk&5w9L-dSVZ9SncwY}3pO$rEZ74d;=7P>9bCPXU z6$+(y7mIFvRB^TiIn%L-z1lQ$lH7`DqK>{+>hk6H#u=n%AI~)j4%3VhRu=wBuXeF~ zmeF|PUdJ>$#lK+r8&pKTPC*hXjm=#td>XAhUqnpe!I$|+>}g|&GD@6%-; z)U{~53M?~ZEj`>TBv^cSAo92_($eg<{d^)0H#AZ6>wv7XwezmE^QTzclu+#$JBY*b zJZ|Oug7Hpa#eP!gife+Mlhg{PHY_G9obgSAwOd+MqUaH=I)wG^) zo{N>pKjMVV$C@F83^f>IuK)UPJpV*PSHl2^D89_ezFw7`dm7cQL6=rRTR;Ugsb>{; zb=maWWsJ^hv=^zqluoZFOB5)Cztqa?f#Y2N-OhC`1?-4ZxxTd=TiHaaW#$C!(tPbw z_8^(uHKP1g10W-(EpL<|8dr*AWh`sUPyhhOzZzL%EmFWTFWCnI&zvd#gCE|Y=b>*< zXg0fw44q2}@Vn7(Qblg?1AGjavjH5>MLc> zV<6L8nEoQaSRM4R|6uyt!JbKFz5TznyXepMs8F?3ab92xO7ivj zajeBh@DStD2Ghps(`imTKh<>?O5AyTT%0a4O;d^Om#cRvv*p#xHxphF^cjyv%s(+< zQmb1XN|YHua2GzR8)bESy2*R8A$J1s@tqwVmNgfIA@6?U)uLC&Ifa9LBVC3AGSs21 z0AsV;sn5jg)gltGc$^fzpe`YHu*{>+pukcATPM{hFGAA0A717PMacmwlwHquJ%r+Q zXzlwoRA2{;gJAQoLhNHLa={7*bGJrD8Txoh?dJBN1_mhm{)1viUR zQ#ei(^v8>KplMQ2b2eJP!$tFCKvltVO}rYA2u4s8CY#ZDefgHm?%zqCbn?*|--P8v z<@PzMbO2@1=|4)JCyQLxY=Nb4dY)A#_~2T9>>^-B!}pQT+tov!!7Sr3TMV1rHTMS6 zD{l(VOPkI{ZE8 zudmt+abzK~*EugD;4%tEQ5K)UGIF2AHt7~LGwSRBW7~k#63NO{8)w5`8tzO0m64%R z%=4favVWvLyaB5{;enBstRLQ1adUF+Swm$zwAh@1gPas_pt9_>9N^>Q%S?s^D_ck# zK3n1O)ps4H|4+KE^gJA$N#cn}cvgORe(1AgZ2cJ8zh-SIQn-44tOc_-P);6#`<^}I zy(k@4Cf848gKk8g+1JH6JwK6ZI=5G@4Rs?I2FpMm zbun2y>^GN<#F-*I9z=CFKd8_EpxvIXJe|LC-iJFSQgqSH$;X39sUbOpq}Aa*=JSiH z5+pNoKvVH6s#p@!$n#7-;S&PKg3bPL5Js|(PPq|MVr2ru_U-FvOv1)b-*w=IXQ*Sd655Rzd zmvm9=6E)7ZU#jsENZnzhq#bUjmu=dLW?nQ{lSvyv@_* z+LTqZ!=78jr&`qrhr4GKT2yEl$=R{Ws&zMfx}Cej9hu5~0W-(LPK%8Fs_@bDIeFq@ zh0~(ZY(5$uom0c;z=aELAoDZx&esp0wKI;~iH5%1OCS2JQ9ZA=4wkV6%QsAFK~m^) zefLU_0H~p)miHnBCCdt69%S;leMNaDZVOK>Hj(b_NjI6aDMVUM4l z=0649?iP~`J|B0ol@er{%7endwdhw7nnTOS1Ry-x&70PvmU6$J*-Pg7R)BOz@FMc4 zG=z_qEVv6#E|+^ZW+vfr;YepZ_bA?NKse=ee2!O*voiEuC5SW^AtOwNr!nji_dJDx zxrNOD&TNBxya_rNpSb`kR3Hp;k}4PZ3A}M&#^fQ?d{dvf0Mm9%EtozIdh)vhBH3~~ zz*)5kstBLYTN~_KJJH=UJ^J;MEwR&Fnq+4l5CfPq;LV3D_Q|%TCAnr{YCJss9g!{f z(uP#;l-Q5)h*!DpDFWwXZ`;)Jv^u6kWf<)SvEV}+2dvmsms*}j z%>yRDDgxX|zi#zex)`PFA&I-awPO@KL_;&2TP8jLp8{w%s(t{1*1Zw;cH$%hweDR; z^CC8slVe_k&8Xfw`EF0EbI;e;H?AB26;Je)5+q6+=7f5QsmJHs|33h9bEbXi*^YDu ze)K!EGuqGYB?A^cRDh`|D-==27B1Yi9)8zr5?Vtrz=Cp8OAqzxoNtYv0~|Q;(v&&$ zmLY|f*7G2Oe%n>Id;4lKhqlf_P8aJ+=3M)5rHQ41#Xb_M6|~7Vl(jc7C*r8T0Layv z5otXEBFn$LHmyxffAhZ_0zgWVr2btd>kC~MjzVJ4RX{EC5kJNT_haW=+W?DBU0>56 z5NV2$;_LmybdeZ>PEdkYq#0vdv0zJ;|3?;eqtU;*<6lSmVLjVmcw1{ZNP_j=;>>vu zAf=7}Gek4W&7)s51)e&4hPn$76Q1YWliRzcwF*b17=2FB=2jce5J3LCu?>COD~-Z@ z+z#bSrf?$hj|-klTZjhSU6`}={0u6OP-tGs zj?8QpB^v-t;P>+d03?Uv<8;E>MHv^X#El|_#P@bzruYkUacwLCA-zS60;QUKZPuBX^7SJf!x2y4jZ$zlnL_xv@z*Zu_H3WRcVLs_Vfv65+j zN*w#5{iis|;v3rrjD1K@gu{V{p;Ap4L$2`J9Y#P*fluV`;!t8N@u1YJl;FyfR;j|q zq54BBrZhy_LZyv!TX?~-R*X|ogIIol?90~j>JD9te~kh!U>jnxyuyA!W5R?YL-nUN z3NrG-cZa`%&QtnjUwRHyJE?_jjc0nd~MZ*_;+J*<3a$)rNdPiv^pe&z?ak zXXhb8Dp$0OpuXi_L$YeIHch21qef}dXUNbqseZOOIw_?&YW3-e3u8m=QD8Ie{leqj z@v@pTqtz|eix(}5+3$SvwcC9g^nezKUW0AA-)aBL2ijvWIUF55Why&&5TNmA z4grGRaqm~D5quNWMq%-aDV(q9^d9)MgxfO`WR z*1~$C$kC_3TbnDuU2o?=XksYv(ug5yK-zJH;U&N0!&h00*$1DuZ$gkT1T#zana1MS zKgiv2y|sS0y*T>hcjN+am`)4-c7HwynYs}er)&U~zT?)7Ts9h0u&mH*v4#@29=1%O}3zBwq$#(TB$j7QIaO0W$_S51ItlVZ6bd@F$1Y&OgAkv(KJK#DwdQUCxrk~@d(IfobBeSSdhkgv3M~}DNxBwNo|F! zRVdgYTv53vOP7a#A3zGu!C>`7;oB0Fv`DJJc?uxu6#j+@?KVXVH}Ed}sUKA6R{xe< z{P@S#c=m%($HlDP18vpFGcV~lk;At{`kLoT;IIB~j+Q&(;f*}!9ez~lhRNcK@AX-_ z5U6Xd--!%nK{6J8-pT{q0f z2=1Xage9EQr2VSa2^56r@Gx{KzK+^EAQ|{)M)z~ZN6E-dBg-Sz`1;2A9GWL<89VE` zo5CK(88MbIS-nSqv4QxsjP{DlVuJn^fyvX@+eztRH2{PA z6!jH72}G_9UC5k*fT1HWdS7Egf$T?+rc4E}ntJmHmvsB5UiH1yR(f(DXF^RKW z(qf8jMo6r3R>6o&h;mthGyb7)oD9VEd9%a|n=aI<|8!t{Hq(VC@Q^oe1VjxgUUtau z%UpKMPgu{@`C&UR15GqxRT|mOyT#)+y_5}PP+wTp5ASa>WfxM_hzvHH*dV+oWX`*X zvw3y)5<;RO)2HRN{y?s`E^>%9L6_h1d_bb>b7*Q_gk8ZkR&56sZ09{9RTn9@#G6av zP+T?>Xwj6rEH(XO@IJbOwiCf`Q4ry_#3I|f-L%$KS%(sxV6DlkR!KdKNpOGg&A=D! zgeV=8*C_#mLOtBS(dJYSuWcsEmmv9-9g(>MTIn4|`#k08U>nbbH~oKgYk4Yi2-V7q zl&?S5hqd*+?X%o$Ra zJkzYz;Sq#E<%>gHGv4W_Np3>shG5O2VN^rm*9^TT`XjXsGDkfk`CA*7}R< z&fywckKyDE8b}T!=5T~qMb0Q&=fWZo{)HVs=XI_p2nbG{G`oWE&sXTQE9P7essw=s5CWN%UILQ6gYYIt6#rLHR9G$x!>31X$NuSM?Py z=3<-+?Gtp{(`F0yz?y_r^S;ONvq3-a7UV#|)v{7(H+t`vHA*50DYIt!Mi-qHvc_J! z4cTp_^`Tc0@^$HRCJKHA(XeF>vTt^zJ8Eu~8`v?HZj_$k9=Tu;Ci52Wi(&fH-6IgkFo@F{Y5;H2D?D?@!WOu zTJ-qPnT*PgwV;K7)Da0?{Zmtx^&9oWFMeY>q8A})8{kio3X&~vkDzr~Q+s^M%sdmV zzx$War>1U1iE+-3MWwZcJxP!a?TQmrJZzAa*r;zBUFtR&LP$^x#ly|ZL9R9O5hJOm|H zI4+NV8@?Yli5sXri;8lD?T@rfjk@ZGl;YYM8j+xv82XIbYD#e0w2wT~4KClDgOah}(IJsvPqpgOR*<%0J-V9_?8i2{ zQK$}r!*5;~C|``=9sWtf%@WpxnLh+IS()ki^H$-4!nZDbq%PQMX1ipM8U*y3NP0Gh^?1; z7&2pWNf05<;W8j`naLG;)HdnRZ4zZ1d92Qxf9Q5cU9Ra1c!h%ziq!L2T@1(g^>glT z*M^E4R(rI*`pvpq-!F?U#uKOQuH5LM%$(0oR9RnREFgsxK;FOUx}i6)UAA7DmS*9Q_q-tZ`rPbwC&CRF8O1M~$VyUb|VoKAYIfr51vGP3< zDF!KGqzfn{$wu8<>knb&%8)5`!&fPY3BQ*Kd+oFfuDSKm9G(|kq z1A85k*zAQ0LZrPy7A>8N=-T3*Ly@6TCn<{ihS9n3_|l`Ml3zf zb$OMaFKEJmXUTQR=DI9rv%BHjRpiZv>Xk|OKB7-ofRyoinFXac)#LVj1yfhtvRKB6 zu7SxIeFo;a%pZ~bv38rj+%M;2LTgLq*@W=5#|*c~x%Cmb5L7p;D5wZ8t7c})aTYjf zDwb;xgm?ACi-2GI=_ph-=v6AXj#J&c;zw=Bjbfud3n4a$}ufn zt*OdqkT^UFkDnZ+J8sJCL|Av{W;^eg@OzQ*nwQHDs9hH}pX*KLAl$-D6GD(J~eBu~~PULeNJnAN3yN6Mrs z^;|qU#Os>kI@qT#Bv`9n0k{_LKvxsQ7thJCG!iS7$wTMF|iFu^Xn0Nvj#6}g86uR$2V6#Fof=4 zz0V}Unu?>x>eJz$3B7RBPC&*GYVsO;Tx9F)vTzz1oIvaH)wTW*YE4L|ad^WU4k+Io z=w%NI(2uHZp=BUccc!IhPo8Z|3x4cNvV{hTRuH>`h&> zQ-K>3ZPFUq+&n*Jv!{?~j9o z&ea`%y0mUk2#)X#*4;}NB12t8xdwQ#w^}z+|1%dtO9hz*Z1g^?V|-|xQ3$BED-eou%B1GBsrMQThq^68=vCc~&C|0D{wnQ7PP z9f;^90)Z>OJ$*|+9jLr+0Mgk}t9nJ5GJ7$D*}jV9bdr%bw0xel>)MuH zXc0`l`h*`ZpUwOQYbQ)d81w?mGdxTRg+IkXrOLys5H?*#`q&Vr*i4plZ?LG5bI2j1PdCa3-3{fAZ_GBj9{@kcG;=+0woE1^&*AT2*S6Q#> z+pWuHRHo_Txc19ut;$Z*m$1{27UZ8VqP1^TIr5vMh2*#!3zEU5i9g7Wx|t9nGvhD# zA&zq^lES3GtH&o;8f?EF!VXI7Bfn;J&8GPTCa$l2eqkm(a}BM~RK<=8zX0#kcachB z?K5ZW&Lt4R$#Qrz>K+~+W|nV!h9UF^L~r5@;BsAt6Uo8f`ZzX~sR-f-Giz!{IfqlHP0`fGjgI>j z&VlpP@Z;miy5tQ+Q2G3KnqVo5Z$|se_Hth#l4kHa;=*O$e%Jm-PS&U-G*~jAXM-m? z-IQE@N;o+6&!~d8ix^2QoMh#6fT!b!9wo?9Xj}6rFk&Axn#M~C>$VC!jWxP9hg{=) zC>bG7Ta)^ZQ9{1K)69A(E760{ma_FoM<#oEWB$kEi{Bq>Du#s}4mdeq7HG|_D@47f zMK+s%cYvLqh^Bi`vx%BN-Scv!U0>uk6JqBNL~)F?@UbJ;`T;o%LKFBI%beJ^zH2yZtw7N?Z`pA<1lLsB)(U2ENX`n%AsoT5 z>=BYcYe+BkS6Gl1LJpyUAMU=qu4P1OX{C@Sz9KTj7S?iFfXF8=_h*T(!WaudW5H}Oj5Ld`;Jyn>4*mlAe8+ihK#(o<(&5bCC3aUBd$B3pFMR`LL2KAxXdGEM z6^$$_L_Pu+8X_+(??Gpfo~!*w*yq^gX*Gs+U2aZDS7Zgu7(s}X#iE#6|L>y!!Fy8+ z?aGrEqW+7qQkP^ztoL7P8@bL$tX!JK%o}l2#W{NLx??u(Zm+oJ)T?r2TaO$-(8vUI z_)s`=T4dpm5K<(ZnFH;lrfM-z_m~8Rj3qm9k~>@YSaqwdiwGFH*C8SE`NpPs*EaU$ z6^H|cNU%r72f+*V*d&+^T=GMKnCpGWEa?@jiyCvvJldz8$$_8b8|=j7!abXwY=U0L z3st+`O_5frE5$|<5|XiLZK3$hl_c#1**I<#-y|^{O}Qr#goa)Lk;!s{Q*n=HW}*uw zWWU%99j^wxr}y4NsvPu zLevpXwHB&}8A^wWJdF*+=-UzU3l`-!l#v=(Z&-{Eu|kpJxbubGuA}0ZFO^orrt+O= zbxA*JF+ZLNx=_5f#x0k%oU6ZGO0#pw5?r!Q|7U}AZ8HO^?UEnBXMI8Bj|#Otqb1k%F_KC$J9omro|Xdc0wI@%{HEak*fq#c=M2|f8w%JeET zF-YtpjyN#!{nFDR7n#=0M)0gHRz}KP0ag``=Y2+t=8sIYh&`!(+w;Fl9kv^9XBNDN z$Ac^X$PfoA`8Abjlt_}V8n7_%>$(w6AXTX_<5b9l5W^F+Q433nn?cfCauXZmG-N)Ao; z=EL!~d2n(~x=j|AX>Lzz!@I%;Us@BHChwk4G^naaLZ=hH+uwfspju#T2V-zCuZL`Q zO~q%~a6iCv)g>(YDreXn4=Nbep?&o zL++`7+YBl6RyB`#$U%Uf%(wO6T7v=`c9ELGe zwXa?*fgY`|7e3S~WQsnnsG*N8e`Eb!{;L8r#HuEt!Gt(l6?d93kx`8tKd=Y=P2;fx z*Wx{+i8rb#Nsw79s83)NXa`lq8ZkxLDhADvbGZGs(I3q@vPAbo5Bn{6!Ky$+oki~U zTlS+}!3Es;m9r9*WB1a$_D#(%yNJj?q>wke$aI-XZQ+_jHf8G!4R5;;Ixtzb%twt2 z-Tg^JYO8{_>VC5ZRwjv8bl2G9#b|%|-h6Q;qRBM#@gstFG@jrX!kywZ<{y#4*?%4^ z7My=-$MR#&Ls~YpX^CXMfj;X!fw6HivrDQ_AO$8i$ER}aL>w3pM|NjgSgFttm!~9j zovH%$S2#Dv_9fFT(yKS?cC)6w@nQd3h{XGUz>(yFJW7LQMxtx zyV(&_hyf%8mQmm3^g%4fg7}Xbo^OAupfi0=W@i3m=YxD-nrj>$7FQjOqx}-~v}WB{ zO{G!(T#|&^8LcbALfRJhyu{%07q@nmZ=I?ybuKef_3m~q$g$w!kF`QUifiGfmG*H{ z9SB!$-Yhz4caRcS5Ra>ghEmN=L_wfS!mp$x&WR5`B>R5RnS+!~2V)$sLzzM24#(Js zorAyhx}n#$V%_G4tD-eUUkc;!j}p6*3>>O*I}8JG=3XC7k9OjBWXt*yQ?*L0aV7ar z;)p$b!eGUaU6bkr+m%5dlOHz5l;B<`TF`)-e-%Yv&f5U7Oc$Jh?VCZ_k;V)zpK&I-|N7 zZDxmoS;-Ue2AT3*9?vttGCE&jc7gzH79_TIoupep&QUtdQ!!2a)-0X<8J6` zN}vUg1xWtvxm9B-%3&&`?Cx>u5+P58(JyXa3m@b9b>sD<4?teL_hlk%5Z@{2TIocF zgZyF9Tw|w{d3Gow)<%Aczpdimeh}o$EAZgVLX1kTFLdFZcv&OvV;%01lcqN$h{sV_}zuue76~qcUEqPPA*kH2gjjg31#m zIU&WC1uAtSHAjw*4z0OCKIB*s;?E-WJZplJJ<8Xsw3iCB5|LV}+1OSgPxfYMF{0{n zaUdq?Zj$bKEGJV209%DN5R4Pd3~xT@K0DKMh$9QZVKEq?h|6lUX^0Go0|TWv+eVrBPo%cwN6wt=_{k z*<6Rs7rMFR$fnQ=HvevmTiKDkzy;woHxEx86Z>+HWRq#lls=-{_?|HWg6sfV4P_Yp z%8M9di)!+am8;6TBgjF;ECULQ_c!Tf^hU7=#wDV&w^)Z7pfZn`+v!zM#}EIC5wgL_ z6;ftYY#V@p!)oH12Z|s+_WX=uJIiM{_az`xr%&Y;QGcV5jsG0hF5Q~OkB~J8*Ce@+ z*xZMnn;H=rCVOfiD!=J2TE@rrx%<0pxTbZDPT7lY66pL5cizb>CQxO@*Yhu^h@uJw zF1}8&r)QbJNgGyDI63=N<8^6m$6VI$N||@Ju)XknL%zAZs-%XOG=c}{n7OvBzW;SKt$ZgD1u0XX66Pr59t+ANM!V^n~bA^?U#Y1 zilR}U6{SK2ULFDm5C-BQ2rERg_H9P7@JtsZD3)N$-+ zV8u)LNEb&f=wMiWkwKpy8Tm&waQIITbj zi8y@B{AP`mHCVmbe2QjjTc-_9a^?AMz(e*v+wFGgs$=`_`!^=0YZDNC;m6a&#!3;- zMx0pO?zo!pUGZ2rl3WfGrhVh>C;FgRUt4Q;?HCgp=*?$e6 z)|72QeqpC?N2**zRt=0Sp(3TK*75Xk?8n1ir>ut_hg^`^=OS+&Xa^4K*L+Quub~4$ zOvRBf0&dL(_!{g8i?Oc?_tYcU8-FhnDAMf=k=rJ=eZ6|cP6FnFuZ=KO|Jy_n>-neg z>qVlG5=LeHB=v89`;@Xs*< z_EnXU*gd9Ut3IwzHT*ae+HQL-A<`i3I)w6zBH8!g&!7AZC(bl*e*D{FztlU-9RG)a z_B!)wQE#iKr$v=Yrx~WibW5>A_j%l8P?}8cY22a)I z>|CjzGcg}^8}rP}Zt}a=E>Y1GNh`y%pYOHA^Ex)K)7=G!VKq!=u*mpP3_W+|X(na(0Ln$2E}QR!;7qvIB?h7G0oS2^7G zhfYC%wWL+LWfdX@ln$CH)4ZYkvZk5&pCxY9UQ?KWD z+gw>070o#EDxueq6M>6g@7q5|I2kR}wq zYF)etB$ylUA_ZAp7xnB5;rLio90RQ*Z3aqG+v1uyJ^@Nbnr>`d6Yc};2A4ErlUiJO zZvZ?r^00>jfj{OBVAuZ75Za~|F-)fIw$o=H{x;_dfFU5TKv;;Xl_FFC5bkHWYO$e) z2Osep@C@JQ@3>zeG6k_x$s}T*2**RAA<(Y%v?^))N}LaTs8$Yu0f4Bo1ix@^QC1MV zfK@AFFU|V!>*ZZ!3G>c%GxwDA07O_5fC$&id;;#cKyeZBoiK)0ZZ0_n^T_XTuQh3c zxkQGj8M}tg0RR9`=AQXRP+>t(+>XzbYd94? zS1TmEM%~$t(gEL~c&KZMdxD?>NVNBLsO54Fx-k{Y7l8*T0YU%>nzLpJY1>h><#WzC z#|z)E6>_(fRS5;93URHD3=aXu%%>(|Fb~Wn_m4DN?vplQ3g;3~*Lr2lBWsy^fox!% z@t)2RO2eW>i+s_-Q%*Uhq!^E7t-W$F8{`;8z8ac5SK-3SLfYC4kHrO#o3s zYeiTA0!*fwp~;=AXfeE}^_LVl5X=Zkgh;4iYH4m5jvE4yXtNgdDPivZG|=%kt=sMOl3#Grf!jSSa;&PhFeNB{){E`Y88f)z-^ zD1>Oph`1k^tI=|OR#HYd7laSzgFs?IYVZ*u&3(X{49k{OTIBo|iVZx)T>Sj!KQF~r z;2$h7fCN4SaM0*9(Sr4(R-)E?LnB6sVGemj>sGgpZc^npaMxmy!+)%aM%^v#>@m^E zZ|Jr02sjW2Aq^a29+?N$2tdyqYON~f5*}n8SkJ6qge-H&+G0NSk$F}g2Z-Qb2Ef#a zdn`GJ?P~hOwQ;?yZTJNlrvTEfW?X@VH=~sRGA&`=v=xEFMl2kYMT?+hfuq%-ouE}< z>T5|6tO^Db%mPHX>!{SsMm4~4?jt_W@zAsZ4}6juv`_8|O&g#OOGd-gXi1D_2#bTo zgKLB9Pr9SHMvfcqm`59H)Tat?lKG<+8jBS71MbAo-G^|+{eZR?^37;y%AvKT&$(|{ z54bxCLaW7xwvW)ldgXp6&oAj-z=}YC;g&;)VU-g10YtDKP)ZmGF2D((fnOMahgyi* zGFSU=Ys=S=N6?IMk#a07UR)YlODd_5q>b|-WHkl=Y5$aZvv|~vth>1NchtRC&pJm) zqU<5Ol)soy*0?HntP^Aa^N2iPF8PiE2=mAsA`f_kKSP%?_n&!$2S|a&D%Slw_41`E zo|+o2?#EWt>`U@4yrO}AcTT;6Acm5Xz0ZLNs2@CZxRI9X>}3yS%J*IDNbZH-(mW)*7h zoDb)vsj*ma@THbaYSe9|Nidu<79@|Anv}K5-G?Rru6Mo5D`}*U!&_Q}vX#dTWZcwN z0?3}KeU7ESRlCGt5x^AW=HoITy9Qq;8X($bBPAwqvv8AOFyb6&D45hR8=6rw*;3jU znc$}aQ7h&Tp^Z>M3&!tWE{Lx6eKshma61P zCJlNC!#yGo2NJ~Q=8lCnS?O44cmM}Lf>2<=V(NujVA8DSW&j8(>|k1MK8pbt0oPcK z8S2rpl+c{8mN>Uom5PRexfX;pf{+omY~>ibr-gYzTWWN-d{`m`YY=6E=gd7Cyw0)C zEAz)f)0oUy@08};5#-E;mJvrVvnY8~H>noUt98DW?jOPw5Mf?)&xj^|guKX{s3nJT z0eG-@5p-A^fO4`cT{GGy=L^82U@0%CQiyv;6DraigWrOm$qr*aaouQ_Nd|LR?>(vwF*Fk~pRR;IdchUF`obz>l*N2E;X^E z(p-ja<3{)@%^Wp*j7q%Abpjyaqk<>^(#Se5H`f3(@fM-yAnw`2rZb2W`e_ouG$}qn9nA6BDX7^W3=;;2F^8ML)}?nC4#YP=UR^?&?%(Zl`1?H zsKSl8J4->+cF2~%Rhr7zofbg)6FT5voy^5FfuYS8ebH+F;nGYrv;Eq;>Rlj zhk=Qak&#jRsU5O5knyR^w`{#9T*-xH7KxFOkx~0Wqg=DDF|%0Go;UyLPTe#hVq|1w zWYk?0RAWdb%+mshPhJm8GNf&fX=G$%WYik#V-x`-Rey=J0OGfCmn(BVnirSaR4PNG zk-KikXzejNfp5xvE*dp~Q9PFMRhJ(lY6ovbWkU+hQXu08(|kVdMnZmUB3tOS7+<*! zGszys?b;Njhsuzkj@A1b&Pwnc8WoNyt-hThEZ zg&ALU;4v~TOJ(X@p~y4BM17ee?@Z1yiti08wncu#5^-hNzMS>ZE=wdZ5>j=?stZ80 z<5n%}V>GQ1gw8Tq^-pe%+*4Z$Zlwr>m4X`U2LTlX0pKz6+%m3$Te8_zpXU2^jMDcr zF-Q!G;2wwp>Ma2U}V}|etV~+3+JTqUEq&YIC@4z<*s@jTEJ6%}q0ubM! zi5>u9)143$%Do?!A4pSzd`C0Ujc`Bd#0|;fzHd|B-M=yK9^6!L&k1m>67B4zLFcu} zEm$8>(TSILmJQa)#NFK}H*2b_xRZA8aR*J#x@kQK?&4Ccx5-oZyGdncvjEhi!W{Pq z?>s2H^Zb^`^9_I@Z#mIH90ojYw4peH#xLaLe-_FFuSJUGA&-E@3WOpz`j5>y|#R`%8)hmzv zo_8W|v3f8OrDI>=8+hgj;hjT-Zzko2En~$edw z@om89f1TWouKhsDfkinWsdN#;weK}JK2mGkmM=J2^KOHn!UM9}f4_dn-S$!uK-PJG zLljDmq6&}+Jr=PwltL?Rcum?d>M{(x)>iN+?`DZcwq%c9w|IJw>(6C(;EVK1;jrc( z$@d3DquVIFu}nbfLE)P_UMjeoHi+^e&o|@W94hi|q%N~0-{{E};-4-=!)m_>gsHuG z0g!?_TE;78{IgN46=hQ2hT?%H$Z?L`CFfqbN1t20b-+Em zSxgcHPA)J(jITQE7=>Y_*YkK3GpW}|1Vpm#ph*RH%rxN};rSz`7OmT9qJZ>zW}%gL zCaAb8R%hI=D3qtI?!!dGs!-;)3P3ih1yZdaNVP1ihTT~4&o38{xJ?%RWvhqWjq8To zT2U9VG77SgaGTZx+YCqJ4QVuvfB-bVz6MBtb6sWUU%BAy*>ugDiVXk zJueoI!9ks{q9J zztlFE$&;@IB57P$!#^=RaRkSg%Yu1uW5M0Lu4r-GFBZp&?eT{J51*|g-^ixW?9xnj zvp2+!k&5yH92wEFCW*%NikUsKczV5%tk=i8V`^^38wqO%BX_GKtsmV*1Rl8 znb_Ml47qF94xPyM)seQ@mVNssWd9`YoUz*Ld)P#=C{-W0YJ)STGeu4)zeqp#U>sL@TygV?w6~F z+@k^{YX_a{>&^PWVY!86)E%Vk!XlU{_w!`Y(hr&pcx0N+Spp}pP(9zQ9CA-@$-DI; z*?ao3@xqO{Cd!OFpQgMS!=Q-ox~H$m@2f>9{8p@yWx~5FWSl)a;|`yajng+9r)l#2 zLuI}ei-1`}CYh|0OmC(sSDDe+5x1>ZfTTxm;2`lju`qtSc1U<8@17S>dG&5Ncc54z zlaeho+T_A&mj$xUL(=;7BS52(-RPArTHhlh2JP``5dhb%8FCNG0@)O=`TpFE`8nA@%mWlD>?%SMqgZW~5@M~uE zdbif}o=n|}RruN20#dJ>+2fuSOKa)YD30+*KIp1Jds_+}2o`~bt)fdUMxOV|n?r?+ zTP=LEMHHdUZpf`4$h$Ll&A6i{MgH$aBc$L*J17WKtZOBJ_|SY1p|9$~DsR5+xL9@s zB3W<(9zR<#=&oKduBl?w zW-uw0mzzi08+{N zsAP>)BZR=iVr~3v`JnsxvH`am#Y%)gU)GpyyqN06G+1Fed7LNK$H7x`Zm<5VPg8Bo zqA9wq?s^Hg)sVYZz+=r&!O_+^n?sO}?KN{*eE^B@{(3p->sJrDjPQA1w%;ASOV;hu zpgW7O%dWi{cd&pjX~Q>(d9zgnPLHwKxFPt46!$d)&RxE0&}|lF`u%%FZf8 zG5eY|4ec1^wXI@0e8z7OoF=c?bVp2JS}uwMgwYNzsw7{H;4cF4vcjGerQ?c~Ma$#a zEqRv{j`o4__`oC2F)F3Swrj4!&4Z=vu52-7`RT2B_uF;OO%~zb6ZN{qGjgSEC$%<& z6{14P63NOH5RhLuF(W{fagT16g)f38$`?u&RwLJBygPQs2#YUj=2an6a09YxmWl;? z{lK8xRqp%Q(+h5XZ(+nH!)7S?{Qy8#w;VuLuU?&MBVjf0@O5IcBR{)rd*0o$L9CBu zL+%;Tq_PupF5XM#cb%6U7ENrL;bzwQewxqNG3q-;81`e|#ANx-tX}6`R|g1)^ptpE z$2vf@HWE^@hK+&0T$}gSDCC0A*(E?~Xjkz8Nv)~%}xKvG?yFzpJ~D?8Cwu3TBwg_V4s%vnxuieaw@ zx%!ujdB0pF40-irB@Ea3$v6uMjt?pfIlnu5?$n%HFgfRT>&?2b19Lq)w5gqS$BV)@ zB8;EkS}1Ps=ty^GreubZrL|37J|)0+gPi+~>xX=By`kR|+NCGs%bZ||FqiqFhtOy| zk?^`No++ySReUNhjXUOVVsBtZxf|EMLC7?4nLu9pxUApJQh*TG+#z@@~^$>2IIzA-G{}1dMDE!b zR92`Lpk%F3(RJRWjN7BXSn$zBR$y^lEu448!** z*tI=tV%8ltCFf@J>=-YrB`o!GD+VI>OZEuS1SVTdUk$C)PE7As`@rjqaJd>s zh!L08N0WyP!V@pF6+p@r_o@XVQ6Vas=S;_{fxO?gPwkhrLq)WRDwyg<-AyD1p*&3M z%ec9db8g=W_3g+Gd27A!3Yz;Z>+^1{07-A}@Fv8l)3xu?K9pkZHn|L6AoPKad4FAX zwH}Cf({t@}1V!fdXMH7CUgx98jc)gLqjo7BnJ6k{p=P{03uQZU4{s{Chr|k5Gl+6l zU+eC%0+6Nr_x$tE*R(z=3xeWj=svg1H}lE6=e8C62CJU9w?QSgH$3TLWeVkC$hp}F zx{1Z+OpOwyN`52YK~-AMz`0CUF^3w}@H_GIr^Wi9Y!6M*1`3V+-fq3l&Fati^k6j6 zyx~TdFPK;ghL7=}#|Zwc7X>($ir}QM=9U(DvV!o$(slsIbL9XetqUs*@zA{IG5HSw zp;tpbDr#iKjs4LG>%)Mfi`PI*Sm0S1`-+g;N34&Y%rLK{QJ6xP6qR*=KHuBuzy zN0EtpG<3G*8fAtZP`PZe0OQ~gYoXxo7E63fKAk0%2`qH3f104tY?;?RWb}?`GG(i0 zJS-&dx$%8y3O*J>8J-Ok4HXb!9~5nwhdLt#eUQ;3>32qvZaFTqStqu5(2<3tiV5xsP-nd$Pq}xsAzepJ1Y~U zyl)?p^&rZ}>g~n$7tH|3(xpqaANtL)tZ%sm0zK{jd-~}eK%=%Bt9aR4MaxJF+2tu!GR7kFFPwk;@l~Z9M7zx2FKe9=(l8eYA6ehAb9PDeJoyXJQR4(={wI z!p)W`?q8v&$8%eY@xWorEKN4n@MlO$V1DVlt?S3a=-rAR(GFHf#`mom_H7aiLE)ep z!x|Zz3(*b`R1XEAiKpxNUcIaN2?dZ!Ef&kU9>V7@31~b^e~*D6SJ$tDLdka>YMYhTt)?Ws@HD^F}8?OXVUJb0`btsxa&J%8@G=SIl*b^*v! z{=cW5O6|f*zE9Bn1^Sr}6|XWI4OeQ75(b~C3Xq*4%lyIkdZ0gu5Q z+NQ;s1T~P<7DIO}2X1I@#`y+HJ*DoE^>6sYawQ9EAZmbXyMS=@trPHgX{a$hlI<`P zbIx~PEbjidk|y1`VBAQ+C-xd+afH?6JoVFMuljK(ayPkP;@F1`9r&cNi^lzHk^;NED}bs`eBD zN)-1*Xmnv%FM`oKMyAvRq}b?aVy2jF7X_wxtzHIN7;!K{!z#FK!Z-QCaL#*DY3aVX z8cst-o7^y%m+Q<`C<9ceFh|AT$;EE0TwHXo5M(!|l_$o#V|)y?FyhiBQ9kd7(^1IB z-kPY?p)&GaFG)@+_;$EaNx3S-?&BJYiyRA&Qfv^Afwj7xQ*NY*83?ub@y8!`#~pWE z=_IHtU#TBR_~^|p=BTtAs_3Thwex*X&di8HiV7yho+HI2k!isHZM_J_=op#ESD~e! zeNMBB;)X}rTE~btMo}|vdx82f$C9oP@ee$ftZ2WYs=a?_4x8 zi;wstJ@#1LE-W?Gsf|<~9c#F;%zxFDl!+Ow(Y<~J-H02Q`QwchBHv-L@jZYlYXbRU z)Rq3LVTSNBvrs;O9+ecR;$Evl6G8MKrv1oh0LVl9_voXKR@55HJs&YqaXpu0otI<; z-CVfi2thI}`kBo5o~F+JL8JEKM^YY!agah;yosXbm)Mg}J~=|Rj~almbFtRmkE9hs zjIS>tTw}4!IGUwmY4*NGtu$8*q83J!bFkIZ zl}1Qu|3{%>3UBy%->@*-*k;;+fq~Hl5CH+wyza#8dFY{sO2Cjdm6xE)B)Qn!h_tlO z0;%lAGRwR>3Gmofh#Gd!kebQ{xt{DW14QFYaAaMv97%tJu6gEmpiqjxx~#30?qr0h zoM)y`F>?r4Rv&yLrg=6~zl0=~9jEZ#8O!#zTOiPb9z@$nw6I&HsrdnqlTN|jrJQSFf_1s1*4|cFKdK<|B?nF@$?Fb+<<@$E%X^g>{ zTgn6t7PRcQa~WGKU1WMhBi&eq!bl4w>8lB>4^reO_k`P{(g#F>SoVXBoOz=Fa(^WN zsRbevg|m0>jd#qtsPxZe3Oi1c8R1>;mVv{Hx$4yJ2}=8C(k`sx2NA4PoSlf`k>6qI zbs?^fJZqN2Y*8g=)(s$Wm1|O+0g!yO13=pv>JIz}EsrQpf!}dM&v4~Hgg5KKD_1$^ zdIBvwO36k6fZRWt0CKN~tOp+~>%K~YNJvw1G!~+VwOSg%w%KT;E6X-QjRylH@*VOh zIlVDo)~2i_8;gnu)qDj&Ci5acKBQv2J*>D8MT^{fVzF9rBQzKlD;95$=cdzpqZm~% zOTFwX>^RDN)bFnn3BHT)4#hOH4czY_8S36fPPwPSOTEuS*8TVUE>^Q<&8oVQ&@Yf| z)`QBFY~#QVRL59VqyXy1s8jNe_!sTOr1~V{wcCH28cW-ZU6RBR3Hy)>Y&y zJY?JqSq6jzCT53S|7q{}>eZ{=Bab{XLZ*)jfXE51lneRsnl)?A``-7y=l=9h|8$3q zPL(zlQV(gbxG#kNNZ{$f^n*Ep|+tIc|CDsro~7mEcp;(a0pa^#ep+l{*A z^w98%wkE@OVN>*&%?mOw6hT}d*2nbT#;;?$%*D#VLb2GSEt2l@WhSfd7J@)}xsMC= z%9_|+tnxkR1Cy&iWg>`Sf4|XFZw5f_Q~=S!ij`7%!^@_i-c-5R114tNya~nm$`=AK z&Wvo!R&7VRt&M+_QTjHVl@(eym{qi~ zhpRI1-##J)j@u<$%D^fYQ>^ADgeXw@{PrmB6>lq}i;%WcSG>XSO zv06zk$(2eFq+19SPz(U%&QS*tVQ>nlTtg1=-FM%e%KFgBB9OlSl&srlQqC9u@~P{E zC=pC|RriD#b(b;1J$_!mW7+n`?<8{ZP~n?n1VsAtMb}ujY7xGqPZ0&&z9Iw`P0M*L zvhLsU?hM+R7Tih!=&ej|&RDLiERP@qE7~D=9+mZY^^4U#3jmo=%)p9SDK!zlj=KWD-QXlt~(jf}E5Q8w?FRx)L)2W~S?2LGJT2 zBIKUlS}2xek2UBtJRd6K$L*4HXYShLgU&9FI~EIUMjfBh8%4uDED{SyMBJrctntY? zx4(dhZX!(M#}2!9t{J3FbmTT{FO;f17*64PzXTt%}0az`ZqNrFfrzHrar@_LpmoCD?J51yQL)3Ug+3V|CiDq187 zSC;Jy=}z)7KwxH2`b z;sT0MR2ZQg`ESUdP(0>}>^wmDX0Gtf#9Suei}V&qD8t7JKsUM-=YofG8r$)iy=2I`)!S15J_4f zrRuvomM_t&yZ!Qb@Dwc9tWR&Rm)Lr7-+lLuy#*rRuuL9)wsPf4cgG!fi~x^ z6LCY7IbwwzKV8hQ$yuM?i&>xdZElO&WE73GIU=jD*j?)*27lZ!+4#P#5H0!MO?khu zu%rT{2pESvwVSBwr;FBcv}hsyu`4TIh)V9Er~@PF>b}DA8W6K#7g3rHnv!*k1VHwk zm<=}mr8xJ@a6TuU=$;io#sb;uMr_hI>U8&&8wnm!p)kUIg+iSP-2VfjButW53q)SN zTzG%KNyDyAzdlmxA0$4p2L#z2%kVL?K%g7E9dySXRb5z7(1lJr&CMV+b861Le3zV0 zW6Z^ZAo7`5g?RIzx+tO%@pWfd((&Yzgu*z7aNAk|j{7zi+{*1cZal62Tf>Wm-M7R- zPMDGNLYQucL;2x!@Jxqn!B(YG!9n>qEC1~$s^jqj9*4;J@7kB%hpq8MXh7D*DzU(x z++1+Ww-v3AQtDgA7DsowuY%{=e9gt-5Lr83=@#qzFaeQWa@o>NP8&~z?z!il5wd%% z0p!+70Fu-klaC}1;J$jr%$z%MM%MMoq~(VSA`GGtyNoT8K7M>zBJ12Un?z&Y8hHU& zXuw*avmtvXj+xfuPTj3np0mKBidC;W1xyEHxgbWjJ4OKTq?x^L&%SJ_SA4z1T$0)L z480dy!Yz)|qN$&5QpERVBx?{RzdH7>dX0J*gj0OV%= zyZ!dt{YYCIOG`7Ld>tmH*sBCYPL?4`r5oH@@Kr^lOcVr0qteDgvp%}W3JIRmvw*Vh zxLh9Ely^^WE0k(MrB2n3TYVB&IYhLuGj`4SI0ALD1_q$OlDiL))?7t*s7VFkZu$!jGSy&XsUgq(OBtR8DE8f(8p@IzYmGv zdsKjApfIeVXI$+9no$4?GsNs~fB=X&WU9tHhr!d~EYY+7svGm2k5x<`xKajkkwxy|tnn zs~m_#!D6Q%*eRk(zF{v}T!OTHB54M;<%^a`9CWul-tM+UeE(({+vNGv0v?Zu@LM<7 z+^>=kf0}6XXV20<6DEJd-hE;<^|-nH znKHnWj#%}ZMY(!Je)Ez=H8(1NBwbZumVmz2B5;u#uF9IrICoziop4H9`fI_TMO65dyED|{ozSnk~2>+E4c zA1Cckk@t?BmUFKc@K~~YuTR+>ar~6RAQN@ppWn$=J1)~zea>`t;khRy35HSm+Y zLi918&lmR#WV52NId{S|SrjhkC+?n2k$Y-u}5{)wJdU~?{bJ7)$PSz}V?XiV`R%8-*BKp4=t~Di+8VnX@5UI%eV^WF{6%*^w1scUXb7 z`~*?dTpKq#MmhmZ7M0w%m*igF@lwGZJf+|coD%uHK>eA_2!N>7GbD;NJNO9y@V*nB zo7qcgkeu6V^`N`y#k}xM2 z%j)IRbM6mj_KF3PEu~#5uV+(O48!YUIL7g~fX8)WUEQ{!7(eRoi#H+qg_CXECO6(~ z-!b9}h*8`)DvDztBv!C(At#E*WSWYJ0yMub=Z>11b;nKX@hQ90dom+6h)lNhvz7Y9 zRHC?eqRQ%y1CZYXAfNiwr`&)4_kY*r$_jU!m!4*ezT=|>f$;+#I7vX{rAS7>J+#HS z&7^E*i)$v^Q*2IUB^3@L5;vY#eycq zI|!3Coj~;2&we(P)g1>QACqy>O*h@-o_gvjw_w479Y7-qBq4}o<60L?ut(LXXo(0I zyGf!|PLcQK$m0>RC?DADd>)yb+oe!FuIZZ5dHHWqxQ*7NfE z#Q_lrL(Z3TATu=Y+{pbRshUE!M%l1qr2D?#3nl-p-&SxptsQcEut7?0Hq~~^xjkzG ziHr9a74zc}-t$D5&yhuW=#-rAx$&S_A$Px&cMk~PJR`?lx2@n)@CTzJ-)OOhfn&ivEd2hhWrObK zb$P#;CflpYB*s^rc#LeM&BKb-b9>{>ukZ~@&Ai^o9n|mK!TkV;bGv7Wn+=!ZANI^- zT2h+l=9_Q!MHGcDZ>Il8J1r3C2QoZ>yzOmotK!#nJAlUsZL_r7cK9=1L6qn85&^0A z?CWuhXJmXYgcr6uU*VP1MvBVOv}m0yGIHg&7vg+{VGys*7?~y+E7O9{>`cd?#D@si z^&+*uDcbeazKm}gh3~gjo*(2a#S5ukN^I?vnLR!|{As@hBV~7J z$Yt_{L^bBtW(oozfsAe$wHiRK_kP;n{mxf>JmQEWDri=jia=7*+v0s9nlFu8OM{5e48BnmUtoy%Dfq$!6XNyzqF!&vO}u*X25-Fa%V(5G2v3DY0a4zF2#P2oPeB$0 zVNp;PVF7nnb{Dwy7hOwjZB=*AyL;yR-*>iqW@n~mx~tFM=bSp{G~wI2d~Tw>K{-E> z75p_vt3K~|l}k1!K})kYZrsQaT2OU#APIyY*ggEfPAx7j9<+4nQY(Sj$U}Wx{lbe> zRq;iPH;H#5?F?t$M5nXw4&}L;1!d8k(9?1kg9|WT6=0qZr zXvNDJv<}~(-BpK)6DP9kufN{ywVqok!iA-6rKqoy@6Cx&jgxR8;(8&b^qWM?c1*hN zQEJ|m(#=xP2}HK#7G$B225zb=Dk_AOmzRreGUqvNp)bCKNk8z16k7T*908#02$&w{($BrFV zSCoFIX>v*Vb!(a2U<3kzkS+^E-LLMtI=M|I3JVJvR>Z10ITQrK57-*MS&W3j_uqd% zssrs66Y+X2fj}Uni#XJMHTH2bkbaUplC86_8WuP3(8Z2U4hMla7=u5OK-`plG!};% zKzAb*vO)+1Lb|LEU1zD}Atl)WfvCDTYy>h-63F7ki(L=2&fsn&VloTK3Ly{(L9jkd zZ_uhL}(r~jt+*sYqDj_R`Kp>==JWMyZCK5?D zKKS4RwRP>GFj^qMtM~%~*|cd>%od23iRcv%Arc`F2+1Z9+s&=+x=OY_Dkvxr1fuHT zP!b3~P{;?F^X<3avPFv)c^-T>5{YFM77+)5KuCr4q3bf6<&la8PzW3B-n|>Ua~vHU zP6Bc8GX8)-ii(P&&P-2)kQG885YlOV$ZfLq(7t{9MEZoPdqYhi{D95po7HR9tYJ?) z@r204bhAKAR*2~&+2S5<0d*n~0)dc9>qB*2rJ`)fh!G=1AzSENNB4%CKpgbtQi#=9 z^XJd^WOAKEyz)VGbCGgH2n0eptdDe&hxGC|v5!JW=O!IFDLTkSeDfmqK_GrD1zS9W zR%H~MIaDTO&)G> zk5nfO!tz+SaA7rFn^6S95AH6$`4=isf<(MbLO0TgH=s@=LLd-QK^}BQ;Qh*vCxNQ6KjB$7~U`5vjVKG1DYabJx}rPh-#+G)DNwP;XfEc!z3npvMRHjMr$48TL;VhD3>t&R@MCSL?YDJ3B`fd7)6d55?^wBVrMj~9`BW?O zly?s02lWfSnRej70XApO99MTzEPX*t^=E7@EplZM2!v>?4_!C8xvx^8S3n-c#l_g2 z=IGYUCJ+bD;m_>Zv#VkuCUP*Dj&3C4wIa)Fc`p$Ofk1F`U732Eo8lg+ihaP7qw3VG zAP|Qs^TC|&zWc7H$>_)O@MDGe$qgYAArJ^Q^3X}d)aS!gZ6g`LVx)ry4`Mez9o9!W zEfC;O(h70wT4+=7URH*UJlq;m5s4581UFq{N)}X!1Ck2;fqGYp_3@{49iyByo}2={ znN(6z!rpo39bZdsqltsqtP;Nk7l}j&1R{@Uk_A)cx&jbCE-fv^?j%Q-W*31t_yhjH zjRg|1oyb^-#4VWaw_X;J2!W8j5tmT?=Q;n#B$u zKJ0o@y;fzpSsprhsIRq=6@fs=ZX)qxd8E>H1sw_UP`V8krt=s@(ycHfkw_!HLDQ4g z&ph)C8$5V$jT50K7pl+May+D9Ew!7-*O>A}+{P!wj=et}XXkxdTI2bd9qO_^Epq7q z(eX>W_2HK|kSr)A6?o%~H`sH}i3L|ESJH)lzv6TruiOBfuwt&DpX9q!;l{e=nrm2_ zHf?IWPED*aDR-|VLbqf^DVQv^YSl85lOIqrmQS-c=a}tgqS|NDu`<8k@A0E!72RuO z#R}66tX{1`Cl>wllJPqV3ky}()syKakF>k6loOZFH)~g~Ud@IK8DcaeZO=uM8)A!n zxFs2?V-N_0?65%OzR3lij)Xzrp@$w~Xv_y)>FCU~BS`ix3S*UmWxcY}<=uea6Q|b2 zGS!z+YiQ{ciQHENLiQ1dTYFm7g_WvRSHfqtYnQ4k1A&|Xoxn1_`3U=pndV9e(OMyX zD)stluS*~hGLJmGbVj;g*Jr;i+6Nd#iRRQ#DfeLCB_cdzvXdyz@3+3H?Pdx=B{ zgv=!oKUPPoS6u-`MMZ4Jj2YE*BoT-i#et3ll)?BbL&MM)Ki^6$Uf^s zee4$ZkZ!9~G>MYH4fRJK3FpjqM@JHYSWadM-wek-2n3~y{oXlvnWAp4EE}2F%v=J2 zkU8WLZOkK834_3ApM9oUAHy9T85raS@TAU*L?VlJ?%df2By!0mmsBlhGbwMcfVylX zrQThrL38y^LN4Jmk%ij4N^S}1!}ML{Y**Pa_WN(EufK0sJHq~Wb{*C#FNa;;IQILa z4aG;#l~i*qzODS@+Xxv9s&>p9c%~RItO9$Bmx}{IW?d>&-v2 zce7m9{FEFz$tflelhu)~LWuyzgZ066As#M*ZVcLS3~JWVqg>W==B?Mgdv|vC-FI7+ zwwsc-z1C)as$?TqY^iIp^~NzoBB{r?uaB_b>?pVXABuVKS4SL0pTnMNdkX8^sFv@~ z|N6(H?2iRU*z`m804e?_|Nj&xk|#Ty!s_QnTt4#{`^}eSY+0$h+vBw#Dupoq{hvRp zHha9?DRjb}tRqF2VbjCrIo?jgM}GiT1kZmFXa1BaY|p5SeKGYtFFrcGmCe)*-< z)Qq-LuUjl6mY(Y5(n<~X{&b8D{=C$cJZdWXW9;%ZrNUbAgBANKS*OoRUCBeo3wtZs zUHp6D``HL-!1|v&$wSBQzo}sVwW*9we2UywZY4_Oek$W0n>TM}Z@&4ad^pU}i9{e# zoajk>vugeN^@2paCWTv*C_k}~c!`9NJd{)(XG7MOMWxtPz#{P?5h(iWH^io}K@Pv$ z_>5pP#P= zl4_8H7nwx6D$8#>T(U+|ga6&a@8HX7-u+#bm&4xfcnT}MwtbIRQWE>-c6Y3YCFG3JMB*<%0Mn5?gD^SS=BzP=&cC8Q zyRl{Ms^<`gc<|i1LLr;*1uQbzIZm_s5AT0~E$6CC3;{a)J^PQbW3wZUjUGMoY&CDT&caDWPTk&~s$7*GK{2S?Ru9MPtw=d7%<((c8i+&QVqc?UUWE6EW*G^C{ z10;*ms(3;{DXid&@fUt8^d+N_tl@sC#jzx5J}B3ne!kEnkL3T#s?IxBI}{9tzKnlu z(!a=E*nKpMinihZQ6wB-->Tmolku9FV5{dYU%d|0t}*0ytb`j}API>}Rw#4?Ez~^6 z#Iu4s1P1T+qV>Cz4(|E8uznDx#8v)~04f%_oI>EX;PwSkCktTjJ*e zPL+5FRP5DW>xeA=J^SujY)-Ya+Tu!A8>EVP{BzW`r5iP~n)K)Dwc+TdE=f% z2h5yeZWXlE4VrVO6($EEuTlVAzR`K9i^beFPSMoVR69X=2bnDrCs?03OSB}Wlz)j< zy%zC{)knqHZ;%|0$G)PME%L7RjrLPFRXd#5eA_tf6HqL{FnMo;iKqD3(C_yH@lHOT zmVVsI>}P-2RkvoDf=GOI{eQyACk{6g{bQG@>6E#Cb;P~ZYKNKR zyUYw}QJPKSI6B3ufgIHYRcK6RiTZAFXFH$m5c!4^)xi0epu4|x)sg)27#^eePb~da zB@N0#rU94k^?g-8Mw~y(sP}Mo>-j!XWI5(gN$wOH$u^i~+}Y$!ke3Um_-cr8gfK?X zevz&wmD24B*o`xuI!l1+9ZD0hROZIQqBM({yGm^vIIR644}!mbcwMU7NXq+FA& z4c)0*3vsJFUP}A=oq0A_$^LM;*IDDKRC4$L*+E~B2#&UmaX>?W=S<_w2UQl5B={;o zEzikhX_M4vE%Qw?h{8)B2KJvca!HNGYw=gko~U~K!zVtGy2zH`@!BrPy}vrLvO~VQWO#1unBCZIP=n}uA!ZV z(ht0)I>EQ&AJjN#0@tyDXiXjHO5C|F33PCb?nmNyQI9^W0a|uc5j(^6o zF?bSK8Z_wAy!TomB^3IEElVO_^Ma+1c`3wxriO2~cm3<{Y@@CQb!jyGU?KUHP?9)J zNy#Zq;I_K~P7g_zKvWwoFz{T8yw4QHP6>O54Uzu+DfEv0ini%5|4s>{Eg5>H=J1Vt z5o%IR=u^(8qIBra-V-t2s-BSiIc8DR`jn7GJE#J|PeuI`g@Z&O=pQh@tc3h#kqtB~ zk-8N{xH7MWoeT*OFN<-%^MZg;#UbmLw(h`mo9Bb=*X`^!nLkjr4y57uGkJq*d(Z8h zzH%;$cnGr>Xznl&6BOgpK#p;B)62)D5mFaChDOW|dQ7gZtxdie$-X7$G#BzZAz*B! zPEZr0`~bZh=3C4OFWl2hA>Fb>N7*YO5-lbC{FzUmBJW5RF%fQB=Q~zkAcq9B)IkGT zs`&du02HgC_-IWif-rI})`fNWVc3b(quR}*GW&Kust#`kec=)9%Q<83s;-K`Q*^s0 zUo86gBz(o=J)?;F*zr=UQWJAS6c}+2Q5nJW67+dBmQ^aV+WuGk$sWF?t5;R~Ft)@g zr-U$xZ%dL6+WQ49`{Y@a_xpj(k4WxS{wND~8t(N%d3v>$96lrgjYX9K3KMv87?5#T z=;z^UGu9IMC4h9(Zo}%t;+M09fl&Snp3NotT-VHefny0C$ung&XjMsjJT2$*3-RH5 zWDUJCQtQR4!%g@9+fUDmyXBh@u>V`8?Kt9b(#iCbFXBv6(|Z0bC`F2sYE{dj>zG+1PLHJCfo-}y zMFl^e@jw6f)41t8gNbx-PQm<lvsGJF#S7RMgTFmCN}zrH+V!}PaCTjunc@U63^LM6VM z)87>g*h_`;HRJD63MLa#)?I%N@d3M~=EO5&1Kx|nWlzT4^X5ByyEkII6s74u)86WS zww|vlCW2G_cC^xWrqg`?HvPm44ax5eBA9edY7=*bjRQzqPkp_P0F{t1+zhp4z2RQ( zwpfl+&70hoCi-Mo?Rhjv z`|YU;QW1lu_yl{7YWwjE$(>4^6+5 z?y1k>5&ZD-`a*-P_ajryAs7QyDKaKvV>HqA|KpkOU^A{yg2-657?u(dZHNFPK-5mR z@U!nO0Xk>?WoUvd+`p%TPVLj#9B!OC`1Q(1z|dhZyxf(PgWB zboxNLjzL+|DT7hDxy*B9(?A|=6i2q1bc2!x%nI&Y$8qbU82F>H?uM}~2cZe^p{B0M zz~I|U8`GP^967^zUVUx zlcV<*K6gpF!IeD5?W{grFR@;m8?XIdXje6IPtpM`s&=NGhy0<*xa0g10KX44xNjwE zt8@;~=Do_^QVQe7t@Bjo%h0kV^s>)yAG&pm2MXCufDbjr$mbI`)=)-bQ^vh3{YgA} zk<{D8z3Q9tz~SFN#LY)mN?nPQ$mR;Uhd2yW$6~NC*ivUV^)B{!zQ}NVg~(#siLv3* zL59bCFH68bXRT8-{Qg#lT0OiPV7Z;9{~*_LRatJHZ!y{V#$Q4$g=e3~%K#cd1XBkc zFUfsHp1Gads_|h3Zw|$R82`HdtYQK`p2>w6v;q864k9` z90K7y)Koc5t?bJa?<)y5hZyVjJPf#4Nx47C<{2)=m2!aJx308Fr%! z-XoLfaf8&JoM+L>8{d&>3;7k47y6DJ1y8@8B5_$9d#8wSMQ{Np#a!Q8BMXiaO5UNn z42cqy#Wk|6zQh^s3X~&k`ES|GUk0MlnTnNWLpkI)5~}b?dF+ZOb@}yV%azVptx|VrL77jj&<`yoc_fuoo)$3d z0W{23CrOG&p86QRup zqiX8EShv2Zr>T~RoQdT7IcjETfVND9(L3e?CSq~N*rK$F1+4~Gyfzb$fzD)sLz2#l zQa0gt=^q~Y-_<3SEuUifA8>J5#HC@BHJsJF*py{8{}yg}A->S+V|Z$`4?m@m1`R9@ zpIstAA7}O6aGic4x|B#PY$zA=rt*rr1&!h$3NjhYQvl?cDIoK-{f1Kc^{==AA0>6c z6xpL^mN-bbL^cOSt+ZF1d(1Xs{rD*G!kOc`Iq5NVxb&jd^lKZ>CEKqVoQhWK!=DB8 z+SC#wcAp`HMN-V*BYM7C-|^dw9cZS}(7ZL&hglu(pU8UFG0RYV;MIyUvMu!2@K7)q z;f@eYa@_5D3SM3aV=cv1WX6!sp%JFeP!t@e6fNeQAFFimYW1FQ6mc9D!{|JvjLW}Z zm5~6RPge1_fM`c@@nK#m4vdgQxbWZ`qb{#5|B^#(CreFWi4n?(Km;Vss6Se9ER$V$ zl8$Ts$9*{{RvqTeAEpk&bh2@*cfSFW8iFKV4W@Hl`x$YbJwy^INktqlr=8V}bu3XZ ziSyVWtMF#p$oyy%qF?ki3g;sx(<^q=fbPZW~jF!;@0O_2HUI<>dPU z-=DNQ6$9MWE>6|d*2ANtl@~NyFd|Y=#PJH_xtbl@f>Tc%+xBKr8AZBa1upFB-tgb5 zv_%9r7OjHFoqZwlE=z)!^#K&rhBf|kfUgX!()0a< zZU+TwbY7@0E{OXE)y!4f(Fe~0&@k+u%);lIq;dhp(2y!cRcB~ZNhyL-$3lx=$#5+X z>AW6FG42^=1UFB8=yhl%!y@jnOY;t#GIOyvi+&Ws8~4co4i5aW=t4}h_PZ_k7vRbe z`E)a?u|#Z=J-RjB6c0V=wqi!u++nm$?UAb%&SmVQVM-yQ*PV(P@4>PE%2i5O7HC~e z5kL3mvgLut!KizSZWNpxh1geox2MG$<-|8C`5y{rAXxP7>EO5xQhZw|l!rB?&TmTpOQQ@Vl zJMNat;pHEJ%989#s&{-B8UqV)W#mT{hP4>xG%(5{{F5DQa#pIn*YQ1))-$Qd;(H#!Ai+D^nwgY=JZTK~i?}*d(33$A7BJ ze|1Ds_zXt)?qzX-vdzN0ze+VtHf3&;kjomrHSJ`}>Zhrk10K7#vXu@0h)r~I;QblY4?_wm+Q<_#k z&vy3tpsQG4pFKwVT)6TfRR{|n{1-Z-4^++IhIV2-!(POIJ^(eg4^q+7!QhbALgh~O z;3i6zoCoaTxn?c?_cdU`D)R0YUf-4eJ?>IQ8+exqu5(T%8rt+xV)L_-)YfK?KLRXg z@S;;Bj3}y3LOE|TLK^()yVoOp{^48weAPTh~97j0+df zWF3NzQlMcW>xl~|zDM-@CI{z0{cS45O$ZB$9lm$}(QQTIAfj-Ja^70{Ne%1VTjA2+ z18UM@9DQ9?7c&YBI-sf^MRD;mu-jV(K7;_odeH{3e-ZM+|Eb9d&~axx5N^3(T}_Ns zxIVc-gU{{i5PyH^J@`ju6Rrd`cB76Y6zv{ec4`fN1LAu+5Nmg3Qna(JU>1jvD)j+a} z%s98-Y1fM`A}k_r#uWA0fhe|xo~akd&5b^^Y+ckZH^;(a7Z4?KJvM_L$Q!{IYi0D z7pWAyv#l#o4<^dGWzxihd29GHF_%08yM-kU4NW-^88_qYmp3w?0Ck!z3npx(nz#ww zT&se}?-R{Z@sPlb7bcY}!t~7igl;Zd6#&jN?&7;WyK~=GeiqdquQcR^mp+93ty)PbCA71C|lDIIt)H7#R-~i4Ot#X8yI|59>QAUdCZe*+mh+?wGS0 zjdsMmL>3Z6fyED%G~szRLz%L}+mSSmtFhQGYCO8dkRXsWM*uNtz)~IWvbW`36Cn^B z5~PlBpNjaRdsaccvV6<1#=h74c)g7UcF@M zwkzs%nF|h@iDR{cy3tqlr8f`UxO*%`GzySW~^M z#5x!e`zcKhz&o~)mlt|2Snl$MEoH@+WIOMdDvXFNH$nANlebHKx{4#eN8z=?>@pxm z&+n=0l>pZ0k4*aYx7m593{&TE3InRKrK}nj12Wpi3DPl8E z4+GRbKBmgk7JIwhGhOS5$~F1+Hv63205-p;;M{&A-0&IIfMxFE2GT_SWA#6m@tcA3 z^VAo&C!4K(v(<$Hv$5cy=~UylKCir-odIr*bdp**R9YI^bN&|^vrt0S48xOARU@zi~y`SXfD4fe0$GV;}3EI35EJb?_pv+h9T@?_{& zJMF~J9X}XkIlnas7es4*m*Fww*U8jGnVDH5g%yyCgRBW}TV7p_6f@U6kH05P5EmIq zrfXop^%A0a8aF!noXY3M1q!2Nr8AhBYCi$>%aFP7NyAtOv|3LpYqwVFCcz&^vq ziN5T$L>_8sKD{TYG3b_?$8G~UF6#JR)s1sxmMOb)Ja`^%7G>@BmK4~|vfiX$Zs8xh zg06}=jTu~J;C&l|HH7qy{2I6h<2T|#S1nX)?9Mo*mFEj!Up4EPM6ph%)P5O$l61b& zZxy^`uSj^q;r=Cl3N~TVARs{l98#=6!AwL1xv4uRV4`@L~rCu0KOqa?46E|oP8`X4SC52647 diff --git a/openpype/resources/app_icons/celaction_remotel.png b/openpype/resources/app_icons/celaction_remotel.png deleted file mode 100644 index 320e8173eb90df65e0363f8784e4f034da0c76b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36400 zcmaHSWmKHa((T}byOUuU+&wr98Z=0P26sYm_rcxWHAaA7L4(TxAy^VTxRanEINXQ# zob!G6$31t}VvW$%UEN(>wQE;Dv06``;N#HXfIuL86=ek-5C|pZ;SY=n{DSCuJPN!~ zdny`uK6A14^s#Wa0m)jsSlU2UoGk2YbZjiF{oICaBtalF7Ki5so(3A~;#MwBJQfda zczm5)0cj9OQpVTS!phOc6KZK==in>_J8bWQK^?56VEV!ud>XFuHuet6{_Zx<{GUFz z@^`cnvxdn?LnVF1fexH(JT0KUPOqFj#C@e;|MV*ke17|aV5XlOy@ zUEFP;!aR?;t@wq6p(0{D{Er2N#KgFu0(|_UynLd({DR#4kHv-f#03PP|9-)M(cGv{yi4(O$ui3>FFxY%j@Ig!{Z~!*$bD8%}%gM&s^FOlI|0^r} zUu7T6!O0aES;5BL!P~}K$=$^X`p>k*9sav60{^wYf6H3`cU=Vkt1K^|4DZ9{{$HE@ zpPv8=dieN1rVD)eALF-i1}xqkFm0J3B3uwCu0%ya_POu;uR^R@Cf&?)$0!tQ(6E>$ zR1nW$kRjGO3WZuLjEkBWLwlxi?+T?oYCxFxgK`F)LP|lnLMQ`)*ys?{PZ?(3efDAb z_1J#ym+HOBy~=~V1COHZ&PPiJHI*;6i|$p<3oPz0kTNjFhgW%-(Bhl!p@$c!|G&Te zKmYlT)jmQaRJ&`k)}k*fi9SntaP8QD3lcwQRqD%?CdorZC@oSoM4xJtS)Igu%6}1-D%}B^Pu88nv(7?17V2O z8b61hF*78+zO(tej+&wxPpfs~#aeuO-K=H2VmwcqedAFtZhPH?9IhfRPm6iupDDE` z^cUS&WtZE<%Ti+!ho2`GJ;r|z8hd;t$EUw5iax);JG?LJy1zY~T)z7&B64?<-K7w6 zcUoNTv*gF>zhB*4Mrjtv?|=GLq+Ddlm(lq~jl(bxcMyBF1vhS`gi|`rzJh4%HP`Z*arc$MJdw78oK~FPz0h zvP`@Uv2gH+Z%|PkGPVVO2b)Qs_Tuuk90N)!j!R!x(T+==CeVs32R5L+#xwVmgd`8H zTPS^YE*7fc7Xb0oDAaZj2=PGn2dxTytH_h_Z^kX8s@nD{Cv*6%$|lR7wDZ6G{?0yA zSC1MswlX4s6~q(|%E}eAQxdctjVbcqV;3n3JQ?0OM$-FGs3Q)Nkn+blkmT$2WBBJR zGob?kV2r&d69&vEu~O@G`S&4k%S7Snm}y}7?144#2|ov=I^&)@L(ueRt&*j-2FsJB zX{2tB`X(8ZCD*!{;ay2cF@)OZ8atX9YH9@48HYI+t&DmMGU|Ai4YDwDAEQ~-e$Y1A z;C7iI0Kj`YuPAC>6=Ai37n9@$$(@XLo4%z6t zNd572JApG151x>RL!~Slw$RW0I3CG!b>C7rZ59$FLLj;VqTD%nvbt1lMWiWq_WfCT zz(Fe#f6)SR%K{NWt|$2}LOx#K3)TofOq0c)i(2MYngM0>-aU82sKBHWt&@<@<5_dG zY;2q7GOK^vt`FMF0c{=aCGT%e?~qAqRT(d=sE^XV=B1*N7$V3in z@GJ}Do}?q2e-rFz8PDAmlO%m*PdU!Fx)g%+z1n?S4s3=%+wmZ6;g7PI(Hg-uHw`Y{ zyO$sOAC4AQ(%D+{@3p!RNxk(BMG8ZvdBEt)tDELI=H0(=m)BV#^Hv@qtR#>7Nhi-{ z%;-&getJ!QnlYvA_&e`-Qioh~QVngf#xJ23ZP;cK7fcX+Gibh6-<4dH&GVZ+&zUH?(u*$E60T{b2*L{NB}F7j)z5 zE9tFn`b6SaZQn%6^B8PX38fF?TDF^UoHbN5s!_%i$yVz#`tvUZfvu)rDXHX+_YX4t z*s>NknZf?7?2*qRf1adYJ+`8R(yo#S6O+byCsZcMie&;WsXtNTz`UV--|Zya*O=-{j@>I^ z8o>4mTqoq$KOBo{E%nWd?pCB5WB()7mLxl#9!?-WWXK%%GzrOHSLL^;_ZHY%k5H@M z7pwcnv+*-ztczYVCgty+y=~*S?2k#pA(a?3hlC@pBPmUH1~Dj&SK?$?JiR45Nxu|e zIeh{~6UG(oq)z_nr!gai2oK$~kef`JthO?b1;Oydg;_jJCGWl=`a%8g+PizAWcHqE zzwH0yd<#U614Q_9(8L`pO1WNu_w^89+kgGmZ*<>3_km?^Z9hlLpPjMTncgz`BG-s)P@gF*h5Q-T#r6f z8piC#pfG^W1O8%hf+8B5OG4>ea?jXbk8!D)M48)N%@g9sOu^h)DjyeKih%elDJ0om zU3BS99;7?2~hF6TQ;%s z5NRd(5hbDHd}$#x+TpWzLa-Pn)P35}t`$EP`zK{`qSfV}Bo!1iqH@a;{*XVWy2e!m zG@`AhfbFqLA7kq?&pga&ISDv7E(&jpgWJ$Z{Ii%8!Z}BV?*+1SU2;A7K>Q<^gq7EW<$|Q;ETD`gI_(B|jD0Bzy4WDog$nkm$kseb00VCNvz6Zhw%Fb};fKCOwrpd+z6GBuS8DGkeRT0i0rdNl;R_ zZztda6|v|8YdPgaW{f>@`Rc4@`E_;VQtBzy`A?r>v{cfkq6}C2X7_tbamYK{L@LA|L0>xh zj+v0f*PE!2mg&Q3`dSl2GEvn zEtrSn)47`a<@_^|5&FLD7oS-kyDz#S70(ytpZtt@^ebTS(Cn4Y5U4`GC%oxZ&F;nB z^&x%G)wklL$BVA=q%7We5=xPvvf}lTUJ~tGqip1TbC{d(Bsf28zB@dtK3(6);q^{! zAAyI}#!#wrC>1sJ4|!{$H5yZ^MB%dH`E{FZN>r~Ma5Mt)JKf z5uKWUP1XaYY)Y;w~!6lm@65g-=d@H^ax*FCDwj{N( zem?I1<*b$dcIU}ta>&hk42f)&#qQo?6gG+c1$3i=aHf77iSWL`bE zK4Sfw<`d&RSBxo+(1c|tS@PGly6Lib9-7ZysXYAuJ+;# zo8#L?p=H2Y7ufw;a?FCxbeH_u9)k7t@ph5Gt}RGM#8*9zz9ummcQBDwWGobmS>;vf z0?@%%Od_#CYP*6}%Zy)^l&@lu)5mr$BJA14qmjCJlFIMf@Q*-z=AGAvUCVoh)YM)s z8WeW~2)XEShi`9MP4*=<$$A&kgq^KHI%4H(yd4qSW&jo}e}IQc)20DV`BaSMn5Zf= zwHMPvMvm3wLFetuUZxQdJax0m!S#hZmpP}E7ZxupADLZ_=r6LBUq0+T8Uz(jafW&M zMd0y3xl73P0`Y!6{2K6JO;2tl;`{F#)VUkr^OmE;$1P|_C7P%FNIKG>vkzv%M|=W% zD4Z2LK3x|Byy8%5;kdv1lTB4Y)`+jK20t4TNoAH&BrV1JRLPQ?Agr2ydN;)K3%rEx znzzv#oZlaIZOe24K)8_E{jlTw=$eV!D`b=puXm+YRv#n%c1_j2H%U)K-@sE_ws6@(654;vd;Ts>Zqee~fla9g6a zyKiXd9r1{n!@i=-@&jf7u%Gz6Mf4-zRyi1H!YHc4Fv78)&}YCZSjTg2+aj+F@C1{z z$Zk|(BJ9=%ISS>h5Boh>^a(6a3x>WmuhnVa+%d8+e??V6BlI6#Ys(8+4m#&XhbSp2 zeXWHzlEcPtk)IVR?;f0U(HMP)zP#h3hZzyASLY*;;Fm^0`!AO3Rrm$=-1yk#9F}tK z0c>yP*^PpAUixPQvA8w?i3F`374;c)N`)cYoz)PD;mh~0>0;>IhZs(# zJ1}8r1khOaz3%Y4M}Rkf-q=r~Haj9Hu!nX8Mp2Q-$DgaaOLPhB!%7YHw!?o#k%f~Z z@dfS3;8oqgt4(U^Gh!7My?Ld+s#{p6Fl?M1P1FA#o3#LXZ<##giI&oXk}OPeL?t_l z)=Zui{aWtfxZL{S(JL_hXa=Y3Y}26;mHrxzWi1+damWG{aAogA+w8TB_LIC5@pmbX z{?Y{N>s{1SQVQxg2oF8eDv{z{9lL4{xo)<|USG#Mg(gWpWvD);sv(qHmW9_MaUv(q{V3nLjoqa>b%+ivMcSMuPgbLZNkZ3)RU^-ub9iV7z7fR8 zn^U2CT*zk;w*N{r`11lpfFV^16Dl>z)%eEqYsrzWk**1l0mHJCCr$QWIJesTiR2Ar zFtW+i^>thbc1f1_oSbW|dQ*$##4(&zYpVrVm>*Xf3_)C$9Q>zza$EYm)jm~`Vs#S| z%OwEXWq8xA>4iPMYgY4e7C6+@nFM8TL!%C-=k1Dv${3SG<(Ar*QdNNQyQ#|8qaFS3 zHhDwlP-0R6_X$>zGuI(Ap0b1)FcjbZvfJGJ6#Gg5c&iHA*7}@1i_N#{#(q`bScr*bvg#9v*0U+*HiUub1s|}_A zWZJSAtFRvkoC$FT42H62_3-*Z8poC(*wCBcFC#QdPt>~~Yh`;W@-#gt^tL)hsBEoy zWMqB`RAGo$YkT9$K8`}TMh~okl45m=w+Qu{$)bRR-L^s&PX}0of5?)5M?pX!BlxK{ zlbg^FuaJvAdL_muNsZ(d*4EEsQX(^6&TKAkN?nG$>x#K64jvD=`@7)Mj#~4!(3^(0 zIBtm(<=&fj+)#$>rVV^#o-HN;r!+7RU5T!G8wU6T3#9!S^)6{yk3tu+q&Ox}1|NP= z(`4%Fa(B6%Od$1VVkg(GDAu`UHEJ^U*ZiC8NEgla+O_=C(+i7zAT?<#a17IZP~B-L zW3e2s8pwQWm=E4*m;1JQ!TRvvZLZ~AG%|D}iPp&Njp6jf>%@cv4iqAK^?GS~|02bQ zp34<#;)Oi(HN!lRQ;-cqJm1>Vydu zUYg)P4qu4rJ9yfLO9Sk=7l~wLTr`~Np3ZkA^yH*KVUQZ}UBPb9N}6Ww!wKkNTA#vW zf4pnm#&)SjboFCeTSLZvdHdk= zgZ9Id>5GewYkYVmN=iVk)}kH~67cH%a&4jx5Fnx+m5=h@nC!k{`hFOVh#23|h^X$C z|0jxgo(!{|{^`89JccC3ly|-7L+5hHijjPqH`sOarAq6+8(5HWBZ#_=2Z&-VRs+p zKdhxmvz#1H6|6gzA|UHr<~UYUVI7&gy&DzFB?Mx3Er0jik8T_#*%}6zNCJ&TWJD1q z1~I+z9}jUVR2cT7>>&lTlmdmwk<^xIE}I!(2^~N>{fWBM)j%#BpxA!cDZ%@MMPL%d zgv6g`2%fba&Ea>B-U5gc8)HXn>+kl^<7G}9ItDfLIV8bZroL%z_5xEq(0u*PD7{Qe zA_R-h72XJJAnjYgHqIi*VfWvX6^D}()eA(mM+)Fb7J(@cQ{>-d3gK1ZLUp%8ZaCG5t8F&!VYH}uZu>2G*jWS zR#tj!OOU5C!c??ay%z2Gjpe(l@GKBifUNJ4ylU=FPk@TlhtWL4lY6b|Dy`+S&1^H% zyAH3LB`@31QI+NG@zf^}hss1?qsEP3S5khEtEZ5uk`utoS`ZjIfKJAT`?);MYTjX3 zSp!p#UrEJ)`Jw%CFfIM*y}(^osQAaUuu7?=K1yT-lZ#F!FnRmcGQE_4quc9;9?@b6 z=@hvL>6Lw-(jEdC@5qZ~HETPckL?TmDZV7LP>-D?i36&_;H%w9Ib3e*MSEa~XJm1a zJ2}dFgv6J@ddoz9fe&=uc+i2zq}NO<(U9Bmb@9QLSMrX4Zxz@VF=9X;&sDFyXUC1O zL)HR#JI@~v{d1GF?^h*w(W&2UE>X;oj}X=FGRYSVjDTzt@7S8IJO8C8-oYZl1wXi{ zmz*f4n=ks5sGEM%KKO2dPaGbwVh7~Z<<1kCy{N2GG%V;bQw@d8uZC_^Tq<86Xq;iV zUO-8%LUvARpn=_BDJ=L_@v@WZGHdEdt1O)c5pbi*ZhQ*p_B*jhh zFF)9iWUzY-O@*SQ_)bk@N>G@1YGXkk%=y+f@%e5qjv)(Z2w*%Fs-#GA#rY_PL@xXO z-Z(YQ4T_EdHtK?4#8s&LRo=P#CjTYpMm`z}6kF$@^MNd-2e{(IeZbbQ2$bcZB&fOG z+{BCbDu4G5e%8vhS%d}_${>&cN$Id?3oLN+v@(r(MX0j2)|~Hpq8Hxf{-dt3r_M|X ztad*xR{3wdw+nrWaM6L)pgtoVUP@|AjP!qIVN|LZf@Wc#UOK_uyh!rMd)8tS81{*2 z4MoV-@lzAWw<09LnKA&Swog|_xX^F?&t_b0Qi1&{cSn?ZCsMvfI?&?2ruo=4sr7=b ze$MrQJ#AVJ4tSfvD$ryV)Ge{q8^`Mzd9m8@p~h$ z@4<}lNm9sSW6A17`V^-Hg+?5O4<#OL@>-IP~zqV0(6T@`*h zk5yi=Q1bfsLj+2xc)|2LjgSxhTl%t;Q0jn`UFffsyI-~PLVhRK>kz=XhGHas4v?!v zO(z5bzq7sJq9f=;1%>FJ3WFuL%M0OroB{YB4-9DWFLqEYZ(zrT#Rpw|lOO-o9I8^- zILNS$QCL52mO20@GFi{ihP=o*b7-Te4!)SB?5&gpk(w8Ms7|oOxwn5D35yCY{-uw@ z{W5pa7wD}Z&h#4hpIKA8T4(bP*UO!Np5)QS*F<9fE*{!?`SHVEF!Wd@&E1RL!CK$g zE%LXd2ylerwh|0?mY^BV*$f}O##3OoayQ%mkcbnqSc(7zd+001e79_)Yx?Zb)UDZt zd{=VCXIHh#`lv25!LA?IcN(?9SpSN5HC8)!5X0%HEPV$M3^{Qt*PoWU4MZWI#$lFx zFany-iIJjeVsISGODgkACE4^R-$QU6vR8geAT^yi_@NjLN`F(bfKzV=E6>M<^0&gc zUbu94HTGcAet(Jcrgdp#5mA*tGo00>bT~5=w-mxyZiEXbKWVFikudqP zQ~B;CSKpNfd7$s`zb3t{RrA zO?oMb%eQgcc%fv#wAFXyMai|*szh$9%>LK(XXv&05;}oW(R<64QK>; zU%IaLK1}AjbnX^)0X+3VP^??v3L1S31pwLX0Y;<7e-p^x_uvsZUjY}peG{eh6{?L> z$rI#c{hds$qB2UyVSpS1o(0679e|fA4y#I%5o~av7@EA91DNSD0J7=f5HNf@1ajdQ zf8NU64x%A+u{XTV|t^!=P~@9k0_U$kAWl;~~_tKluAw&2OU5)#}2 zz(72JqdIp0(lCL-)Yrl%d6o_%=if}s)g<*PsrEm;X1sE3K-d)V6y8@#+pTx?9ztMbO28kWV$m89 zh-M)x`DWIOF+;}ER{F-)daFNw`ENGprK}(Aq@G=VUdpO`PXe5GZxu)^ zL&jW5Uqpp&2|C?`!W`HG7(tBa65yX!(50XytOhJ8E206~6MAj|DcYPIHcd0hmEJT8Mhed3)Q>PoBw3UrYkg~&LYjc8G#e#<)4 zh_3%#_D_T0tkT1=lg!uN;7v-uyrEyX`KZu^CS2&<<3Mx~RJ3M{EUY{>MA^AdilX8) znR(q-o}&_gYkgK^DX6AX?ynhS!)sBAYIzO-V7vp6nLc6hGcCye5fn*Z`ma>*o&mW! z<-pUm^zjcER5sseT|TdJJw@F?L=p{?0%`k;#xe2zmPBnfu;F=A_Rz(fHov{hR=m*Y z&&7Iq6>^}tCSt&!(7h~iG+7`(L@erokO2SB$*N^7Sn_zCja>6v#zTxm0qcP`@`DXK zW#M48usQ&fw+fk~&-_zi0ZJ`AV`bqdH^n79)40p8y_f>bUfFp*pX>2Wzo5sq;%!YP_9qNg_#6?MBKM6sGUEcPhO^P4Rofa1 z<#_?ASJ!L>Y1?Y&dvmNifBxduR*yQxdz+1WNpE1OHidkuvLg9T-BiwW?Y#v)bN%(SGun{CUEq#9 z{mbhpXhL!DmBV(^L-J@`Ys`Wzi=xcc{;Qtg-9PvyaxbpSP((me*pvZv!*DZ=lTi=vzW~zHhEJ@_P306QZkHhRYBiTR*B4FH_fs> z%H?lGSo2Ve$Loy0&R|O0_UHMqMtF&jZAv-!R{MJ5LZp|aGUXLuo{uEt5jM1$ET7aH#OLtd%<2vpfDV;bTR zRBPoNdrPF+7L8)~D@ro_i@fuSDuS6f*Ya0M1|1pGrF$rP_sd$!e*IhNNego1?(nQHGDX&qz31^fo{q!;&X4s4<_-AzmXvna+A!A8Ld* zIQ_~3cch$+j@eW}F=;<#0QRt-vgOrHE$%_SHwyYz3*gWSl4&)}G+_yd_BdFgB#8&u z@R5zno8-0noQUPDj-x=q;bhEAYl=XeU&fm7z;jnbt`kxp|cO8 zNzSM%wh10U>+>e|#5B1?#@c@lm}1xu-9Qs!{6G_yL9R6gjPie@pnODhfmu~zRoYeX zt;m4?L1TevfnrpaySlg&-0g_`!hpE^e5(wN5|sw&M)mM$gRUcrnXflE zAs54Pe&ERL!vWPRo}r_Qi*c_nsJY$rX74DyOpa+7o_tc4agEc9>Mn;f_AU48XJf8( z*;>Tp!`&O_s5O4`-o#Z`;ZV`zV%|&Fv~YwS6ky?EQPNNlsjoW5fl1MS|Ij!4y_Gf4 z1Z3V~6Y2O_8m`yM?x=Ol3=}v#Y9rZDt~Z*+2Q^quc&nplM@vz(&O2L;q}{8W93ZUi zB!m@E!oxVGPUA(FH25PPyfuz8gI&ycQ(Hw$utvzNK4(vK+43-!|7*^+Gor{Zc{e7Fr?gc`Op1cQXLy& zdUOkyIiX$|)57K6FgU+B@Sx%6r7ITl1aW=s*l*^Xs7p_IfhQgPv4!Q9`hvOy6@EeQ zXuj4TD}PW}-`|>!f=$FoxME_W-JU6jk;9*(1}_woZ7OMDkarEW5^mhbSUE>=gyRxo zU-!;?y?>keICRFwiu(qI9!fcoX&RLijt+nPiv{uOIVSuDL>=rxflDpw@54+(IM4jv z4(v3UC7);h)95W&h(ZWVunt1cb5QVo`#X-p)w<5H`n@_-ed{R}>9?eKsJsWIh!&)0 zTE$YkM2eA5RD>adOD zi$Zq?ZADGg=EBQ>UE~BrgW!DbE<_pbPT*Ij9zh5uxlGv+(e@+xYqS1M4%urVM@BOG z$w)b2vzRpED}tHfOOJ#4TR`GX@^4Hvyajw2lXvd68i zw?Alx0AbIogG6E4y|3eDu}nP(@9at+1{qSrB>T@beud8B7zHd=k$;TH{_#7 zT~Ktz^~}9dFa=7AI5#0WdFMSY@8^lk8CiR5x1u91H-2>NYS4;R6(Mnxd&q?eJi=p3 zyso|lQpIP?Xkgt^2cic1Oaf%-?ld0if*Ga30C1zNfi&(WiPueGD$`9j1^dy{{=5L-4Z< zHUZ(>5Cj@g&!-q%Tx!OXNCkY5O>xlQSnoNT>?XlWEz}qnRRQ#ElflAFa;=YfAR97T zt_FoHA3FG7G7$TIrzJmB(twVOELV~$H*yHW{aM6Z+b17c++PCu`EtcrLPT^e%=5(d zhDO29R4C$}FDh#Fq9L#&T)XQPsY;yUDB_^97p2vE1MwL0Rp`MzI3GCpyNewWxU1pa zjEM%uX^U#;Tu@9RXunvt9*=MhsRi4QNQN52k>ivx^+naVXv^H%9=*L56dh1C8PVo^ z13W`Gg3m@Xok$O|J$i0*oOxKr-9Ye(nKhs@_;%k+BL3+eq1H+e^q{XBH&iiOag=$6 z&|EE1d`sK^8Z?w4FQ=`j-J_anE)y4fAebJ_`-f&d_-KKR;)orao9@{`NU9%>lX#8V zQcrn-2kguEAr*iT6s!_{ns)-XG)n&TlHD`O!Cl5ei4Evlc=NcZ7F1{zuQ#qHGt94K z8DvgzETXPSLfXJ=5v625LcCzK{?K7J1ux+TQf{li~=56yIz1H zxuUy9a`8i@7H*Y=?_^`iADHZ@z`5na{ua&NvJ{2p;_sxl4Lx>Ap2&6dWmX5N+zpEy zd3H*Z{mEii*}irNr6ZVcuyQKbrvPgjb4~+niD*@idt8ha^NyrPO11>8Q%6^hi#S43 za*zV0`%U2mbsSL_>oE^Py*(qoU;d=6bG{USZa)lz>u!GH=R)WY2c8;2*B6To*3($j z{LsYPkXYReJvOQToJjp;3W ztuJl9)};_9*v}fhQ&A#B{VpTYXd^TO+!L_?!!J68DCHTECglMf8Lb3CMPNf?wHA3Q z15~0cv^f#$ST|E}kdxQNX@O60zY}_jE4MMJTdS2 zCc_Xg^>?C?WHwD;{LszV_`A4w#sF5Eks>kCw=NRZs9d@6$!@kLha)9gU?erY5foQ6y+7|^Gap9-m416h$N6Wbz? zNa?@)c7C87#W8MAnKSWj0p=u3t)pCkG?`^4?T&6yP~0jZ%bw$bx~D8}v{kSYV-p^! z7G1B4q^+F>&wI^FaW6z;i}<5L_q|Er#soR47@X=;-&Gz*Vb>91OY5yh*>m9gZvy1z zFY>Tu`>=Ub8-|8QW&0=DG0*u7cLwQU17u*1*I37{ie!73udbqYN1~5U ze|n#}Y$LS-OIJ*GBt%qK4!E;g#Z~!bU@nG|%wj@O`T^dd&k&~tV;>rvea*~X$A(eg z{z#4LoWhh)T3^UH^!3o8<=0YIXofbT4)RIv*`Lnu9VRF2f~$hZC`|pvqMda;7iQBUax!uan57m}~3=BzPyPDN~pX;iD$kG9=(}p%Bew_Z*hZ zW|%bIU9M522SMuHaL+l030pqim<;{Kl)jC?SLtEy0~7VUW_&FksBc56ws3t8Ij64? z6{xZPS$>8FL6p_7O*lKyEKk`TXsO3$T72r2KtHzOrI~XvXgSL6Upg7(EFRdzN&Ts$ zPA~vgL+4C8{BnP4{OnzUipjYi%8=0`l#~m(!G1#85{yg?qRC$dDJ>s`5azaJ7#Gxd zm`ojNd6zc&+y|^6g~e>JijWMuNZ}`6H$oy0^B%^82ocxlBM3zdA>2mLVnQ9;NNo`f zC7KVO&b+0qKi^2-06(%g`;*7~VLPo#0-bBVwSt(@e7F&YB0J*#-0&M$hAg%VXs|_+ z_^(0E5z`GgrKB4Nu_1)x>rFZJ}yz?a)iH_D& zOGj5U>wudVniTFoGex)ZZY)xpo)N&kwbhU^L4xL$&eQe+mE$(7AlPpQX5X=(?1WrB zrcK6)B5>oEH)6s20CION(a9W)QVe&*D+ynYt#ehUFv(SY7yj$2--J6X(<`|bLH?xR z?KejVAr>@@ne%g)FrH=^7a6-ClkBUEm%dablyfne>WN+J$udi!YT7fpKY6&eWPgVn zHtd|Mp@D4!C73qfzNTxOZUmQ{Ng##@j?3iUQA!*+aK~0)2Y#)mHF#0! zFx0mO*lQ#te9T%2obpH>BVo}s2`~tl^8_oL(Wbi+me^d#!E_ zv3Zu8@!t953>8W?#&oR+P(a@`Iv6DgEj~dfE~TCMFEy%&^3qnn@J!iuH^V1#n3V}v z*OlM^)$si=Vnv}ndWLu@&1fNs^YbGTeYRNIBW6{63IS~DHKXx;h7{CX811SZ7x&)? z-26k{h8)2#$) zX%g){G!~SHTGIsHH@sf1o}LeiK4cvEe> zZ%m?}tEP-}1r`qw#O&z3@p~L29O?7IN$G<>V=0TVWZPS?Tt4r$3oGi?_t6i#`(vp$JUoV}b{XBN(##c+ry)9Z%ucgdBJ962no=j4bw-?-GI~D^ z5lySZ)*aDO_z%W&_?_}Kz=Zfvn2>d|Q*P&2$3~~z1ivR&$>?DTHR+N3Hy)CUeId8P zDW)MaE(1%uLN7fvjX72!5k&0rVC8Xm8v3_gZrO?UclqddR>f(*ZV_h00^rY2?aa=^ zOJmhsd|O2jGBncXuk*15hvhdtk3fAf_YG*G>Zr*X5>H}a1N^J3RV5Rh z%}?7dwR?|z*rVs|bo7Z4gchH;LAHh~^@?Up@= zXHF@HZSBalN?CMfOlf3dN}+bJSJiVmm&h;oIN50nL95zMv5+-LL$wNzZ&xP zVV!u@)MeEqlyKY~yWXx5XImOo3&o7#-nG^YyYAw7bWR1qP9`~X6KKB|Qr1js=JjP+ z0#OUPxz}8(UcaPjwv-!wR3$6zCgoHxyRV9j3u=qQ{-;;!3>vMeCxTTz#)b$m^RGIE zp+8p1U}A$;jv#QYWDZsC^PBlL z8b=v!qZ;h$a03!%v5~eYBkW^mlYDOs^GeeAi8_b_{^+|AOOk5*uTNErMsh~6RZlMU%7vl@OeWqnwm)*+0qs+JA z*&`9Dn}~qS`*i-H05;K#`tzQqu_CA(UH*QOUm!`nwDd=x$|m^ZO~T7B)otOMEv9K${X=UArO?lYUso$iA#UC02QmKu! zQz|(WqDqzN(G(08*9K)7p#hi-jG(knn_ZMD?FyCN^j@cVo?mp(TQkUs<)9vBlZ~~V z{W#dI0&!r9V6?l{9CVIdY2>+I@+@u^QH>Bq?bBTi&X>^>@}WH;_#l)E1c^h=Eu{EE zIDN&(j*4FmgSl#uTurA!&omH!nl}>x@)FgXjZoJB*|{jUl{yj1ESHcpJv1bZ2#2NL z6t6$Cke!P8##LImSt5X^SG5?4;r$#1dR1n@v1oC_k{(5iyCwHc-f_SsaUoQw*%>qX z1&Y2Qer1AQs#leB`}H5c)Jo+6lf_i~BRR0Oz|)x9l)|Z+cAky26$x~XRaSOEhK?C* zJ=l=ZEe|yOi>&*fNiAYu`vmZ#C6|}jXKzx9XdC_hz5tE%i=kLSCVPpCeu9Xs@wZF; zGCVex^r$ueaH3k7fQZsK#?I811IX;&dqVRaB~TzKdp+qdhA49x>~Nm%>d4U!Bm!5! z-VJ0j7%}2P@b!GSg!b@Zz8WiZzfD*bYT?6)E;IOfN?_RIwt7;k0eP^+z?dfm2aYOa zNg;I7D{oe1)GIO#CBcX^Uuw<;l*1Qo5RdqrB;!ev;--jdZt$i6OBL+|AGAuhFG)#e zOU9m;76jBPXizy--cvCVP89Np#Ki2wO^kT?|9Nr?!<^I@z5?wj)8&uF#l)5CbN67RpfG7KY#qq9n%FV5Po~;fBo=uaOi!SJK|G0qrOa#Hj zXr%B%BrRVF6rI2&HO%#V#4IH$wkb_-y6tJ{I}E&eP%oB7mWCUcY?8?HHcys}KtOX8 z3zr0B!C34}g>hKI#pfy8%R!4ZEN8BLRqa$4#l-TB<1dq}=U>TEp<^0cbfR8?Et(*V zvb})ixUPnmxx%R+&zpNo>b1vZC^?=1Tt;S9RiBCX(Qv7=Q~PB^dH>q1lE41|Yx#(t z9;_*84HVmH*Yq!NAbmI%!LAL&vmYfC&2&GRzIhCK-**ObRsJj=_yw;qFoMYI&)n6= z06QNeD`M`fQ&FH4TYi68d>>l_Jjul1GguN9tqB67=5<4z86UYSEx_VG9t5ASxjb1= zw2$3l$#3ygIo3m;)i%t;81-@Ghx!_<_!(O$-IEoxIH4;X{T2+Vs9&q>q1mVIfNg&@yOJ7k zem?>N3MGr8jY`Ili}$a4nLf5owlot-sy7ydck#bhP3-wt@x^cDX}P+n2$>TI7yUN~ zKTMH7W^?z}j}iqJVIPX{Mk;aKbm&OdCqWwtp1X5-_jrs~aBxtdJCjbaObW&3MsU6G zOHxvu5w9}Oz*zLKudc!n0BZ+&UVp0;z>uHjl~X{kV?q@&ahRM5{V^>5Bo(0va)%c# zaU1ykQQR)N@X*iFobvyk?xpw!G7>@dg-I}_H)>I{Dy(foRas22r^iW2UKoL|NZx6a zG&Cv)u_XRc4M!_TFqr0!98DfSj8p}oyPl{Qs74Br#v&}*WwyACLN|`p{G0>Ba`L$J zKun%i`)zYM^9A34!us#|XE+v?Yh$R!N#c9&=C7>Iaq6guN5L<0OkbdLX{{NH*P5J3 zorc8wzqI!>h!ZD9>k`Q0<>BDwni};FD+FO@ksAr3f|*e*hFpj^-{Yoyy2ihgpLLaH z)1hmhS>tQrZ)IJ>jB@3yAFQSNk+d@(la#Z3HQl@w)8WeNR!&;J|2%}m3qHNQ*Kc9eE7N6vJBk)JJ z+U4u7$SVrVr*;0E^HHM+A=Xa5@cL|G30Yj>yxd=pZk%WgA_zX*(iomCpT|Frt5b;#frL-Pf zMC92Qv%9H6`dlH)uMrm|LtYo>Nrx7~Jt3YAR;JpvId;a5Ya-cG{C?bbRl?77Wr*$@ z3eajetGtZ5>XRlL>uOwm)+m&GHHsuJgYq5Yk3xBr-i@bUNB;>lpMhxs+()to7M%lk1>DKm9Lt!v9@B3$7?Oa^keWgI_QutHyTLNp6$t^ycNrH*L zuWMoVcE`0b6N5SC?(c_xrMzs^X=n3nIJIxn;eWRG*W(ozy^z5oByXZ((x_#Qltn#f zy`^;h;uh{HU;Wtz_rx@MnOEYw8vLp*`p&GvILH42KNyh3jhuYvob^>LLc84xIO^OI>>O8_ zTD?{uwipNYPo=GYwsrHw-Np_3>N{Jn$XoQ zAz>ZAtk+=SleK|-9>+44w%4esr&9;p)##hrWc?vLK^iNh>?)y( zh);dO%^ubEgGZ9vcoW#O!=K%G*ff6TjB|iHK{vonO3t_MRu?FJNF*bed)ws1&{h`M z&-(IgrO;@=R&a&I^+nbL$~vrRk7pc)9%kA}+B%~8(^&3OsV)c(lwBaBPnz`RRnEdrm2M`NfVKl`j(bJV~0Bxl-bsvI*ewQPGFu9VEx6jwpA)!5qF`C9|hedUt_?Y5pia!!~Vv)N|Mo_{?^gt)L5Wg3tW+}a|hvp6f&4E zA_L;8>B;orMtTu5nei^kp^k~hVKsyvpdYrGv_(yx@%s6@T$`9kyf1px9N6- z1jC546yth@FNId^<+DC;Cem&XSuXOWp+*YTh2}>fo7@OLJcWJT>L}i1$W2hOEarq$Q%CQ6@)Yn;S)%!JKcGAQx{yK|afrCB>7iAqpb21=tu zlsdrXHV}m0a$e!HT4to>_nd(R>7avj(uxv_LnTE4(bAERXqdHzzzs%-4+5QAo@P2B z5fLK|p4=&wZKe2g>O^$OEG@;&A#CJ(M%YLC1m%O}ifkbLmrL~fbpe&G`jZzly0zKWcL>#L?Ut1XAFUN&t%)w6y79-?$!N)N3L=v0XAUW|mRiM~(EetWXFGbdZpGD0Zu=y^Ot;^x_Gy!4 zZ~JA6FRHRfRPO@_-4|^$e^$N_`B)Y%_5D6fCI-Ks@-kw9!;xyKc+6?DYLG7cpB{nL z8i(7T4CX5e7YAW5&VaZaVt9t^ULYTM=1sUpMO14DY9{K!n`08%amu=;3{@}7+|XeJ zy1R4)?o{XadsjW(A52xT$5sygTqAE0656tySumXvSrII0r#rqA1-j7_$BhFbMJ_3Us2*JQ=IoOv55Nw=-RA%D#o@K~VupMh6za4$T!;F=VdPMLjbibuFau&@$vF$nxDTP;Z zcuo2Bd-tr#Lk!qO8vH_t7uarBL`63`VMKwTevWWRKP%%YV#J5{`6=8GBN%}gVmCJk zw-!=jRdv0QmR6l5I}_ua=)%r!VnnW?IHoIiIdl)H!$*V_2Pn+4Q#;d-b2{ErIiC^f z@1x15ybnn~C%GCWC)tg23&Q$9orpTYxPb>Uqt2AvL5M&9=FZ~!FH_bNMrZ>!U+UIO z;gY^cng8r8s-s>%y>IL(4SqhKZ{5#525GaB=_WkazSsZ@kUzkGVKqy!=Z_nysPzf`W)K$`w003frU#LFc%RU zcXqicp_F+DzPE-pDYZCMU4$BwY#fm`EFtkNMPEs3yM(9OLlq)5noxr(J2m0AnNGGg z+>mK!tW)}9-lxkvZLoYN&~z(z-Lf;7 zLeLyBx5b3P>=T%28tNQI9$70M8-6*_n#;F^JJ2T(t7sRI!TFpK_6fz`uzxL1_~RAk zt`r>-8i@PY5iZm)!eM3u6=S2!7S-7hyh6R+IVhY8JA)4`&&&;PxaW47d>CUkl=}w~ z)8_blX?>V(rs6`=7tzX~_)Vuf z_rE6>h08#)vsbVkcuw?eJVB^pL24g*&5vw3W&|o)$v{D9LK$h<)8;+WJ$%-$CGj{E z(I(ODBV8-XwRo{68llB_Ay{j5sTL!-#CymN-+w!L8Pg%zL#%@KC*G5HOw>-WT@6bl zXDBs$D7c=Yx$H@dMbKyHrwB)7YegM23T`w!hI1oJWsoerp^K!+BFQAE-BIHN;SzHN z;n*Xp{b`?TEOKz^0tsWJod+?8ws`b%xl>fOgh2n z%9=)ht0IiGeS{Ys+?vwZBdp-I#=56Ig%=&+nuvs9tJwQ`(Abc%J9^xBHEdOKhx(ZM zFHyB3Q3w`es9@>v_J&a>E67XGUk@)VQ2a^0(wyEbQQ1o0`kN!s_@?#o`uMiB*=6g} zDkwwcPkcyS{AEx{0F`Z3n0pWX^zE*2c^U2e)WLtY4u(6La;PB8{SA~+i-ZbxQ{3n;Zts0*=B4FZ`-s;JWGGC@{?GOfcQ(I`+)Sc z0Iq0MbjjGGAtN{8Y{u1B)6~UH`@b+L1EslYGa`jen_=t&!yMbZ?~-K7$&A`ljoY(E z;>{iWY3+u8mDvoCW?q`-%UX3Jq7wJU$XSt~-uw1H#2B0|MCprEt}KXpcf|9?JF?t! z--fD(<^TJZsKx?oxyH&VFnEq1TPTM!*?A03QJ$k zD+kY60u@)YOlfmqKtfB@CM3(vZPI+yXZvz)fY2;;;RI6~26k&*1O zX_yxnDc~s|dN_c>c8<6bNyJw9Pt`K7R7{M7<|9|#gk&0rxdSh7e^ns7m!V!q3ng4; zWc>9nZU4$k0RPhBsy5Ul>LQeX;_iLa@iL)yF@_@^%>LFI7&;O(DEIOQ?is_vbZt9+?JbyWx}O zPD{>PS!b$d);?QbK0O^dDuy)^>6Zmpl_rKX`j2J5Juj0~;=_N`Ws!%B&*je&7D3C} zY5O*^$u$-o{~k3QQo!jcB)CLPpB9hMF0h3;I_Mh9pB`-6-jPvM7W=0Ps~%4p6K%N0 z5#kktIlHr)3Ks9m-wmH1yUHVf?*5zWWZT3F)^BDkj0rJXtHnTw__Ate<@aTdd@HD= z`JJR-l5-Ur10K=?C49u}ja>6m>Qv-)%7GN7k!SFpaLRD0cRep#(rIj)S{@q0_guxF zy51^sEO%;4H>~KOMCy&N*}RTCB96pdkL{b%voI&p1}xae5GRRFuc(&#qoi+cpors{ z0q_rHkL%E|^+f~IT>KsVSNUDyXcP}fGnp~6NP!JVKNrG5;p+A1H6IIRGa_HTL$fq) zslpII^aa_>CY>E#eg5mp4d7Kfws!Az+2l(D*DABh9@-v_WN zx>9;(6ZZ+@X#89%6Mv|n4KIseDn*s)8P9x{xg+} z3)aH_fxEh!!kCj6iI^5D-yWgkM^TI;0`1-YCV-A@{FaGRPz!OGSno=n7{TEEWWa#{ zJ@Q)AHKP2nXm3JO-8sgjXK{T_gSg7C6<&d$y*D64P=OcmzKSL-%NDiWx^{zGxnc3e z*?Bv!e^Oc%TJkI2Bc3r28oy8^NI@Ve2a}CCY698@+Vt@{TLEpJEvoR-nMI;~rYRAu zgN0a5M>GM>4tHPEvxx$sSPO3kRwx8TLw&VHT#Kgguwu-6Bnti5z#4*W+Bc7SWH1!t zlMp8cZS`#uE2LTwjRLl>JUw;W-t2RnJ%Z8<&&rm(gWIpm{9C~Va-9^k#5$^1v2EkG z%v|S;h+PPBHXoIAwGL}f5!J{%bk)uMY+lZyULK)BRi{!f8ye zBI}myoRx|jfhi6#4@{@W1S>>);5h`|Ak0r*L@Y`JB2L;5o=MY z?7qR$LE!DHEZ=XOuVqoS2D`B^&(F$RM+=gWqAoeFGWI zUc(90(`Zt}=msQNKI#t3qNv0qL6jm^v~$7!LosBI>S!BxhrP>u+6~fN62|sXloY#R zzswl9Ysw*L1MXjjbk-X;3AzWDLGXy(2(}95w`6DTe@SV(5*wku!G*!A!A^Q4E$)(j z8Ao7`;}}F=>N2ayMLd3E?v1^K*lMd8+T|tZ)7*m<+TaMx$qF>TQcSiJPgTSCR2vmv0+w*^Hq{njxHLsz5*#B zeT{1>_r0wAtuO_;9|uRuID7YdzlSeONZdsXGdt~X#!{jePlMnRUMDGq%@#Y4%DF{; zY`8n8>vhl=KFdNwICOi{-qi4^_PHGjJ98vg9t|H`&6t7ZzvUDfvzP#XK! zSUMKx2SXdg6fY9BI4)|A!`r54Jxf|c-Ho@7m6QF zo0ld!+{{SgzB`|(Lha1%SOaho#r4> z6y`Nh;{WpRh4684FdlKm3h=|@X9h!Ce?Mj5nMTlTGRBt?Xxzn`W7d(bwEu_bd{lYLhX(T_)_3lsy! zC&tR*C;mfm>c8J~31Jk3A1k;%zPK+9(5DQ7%u%QPdBrUWvG4fZP=byVx+KaV8BkI?AK!*!>`(uxVXt_H;o{) zB~Du~-6YLd>yi&?fn|F9NMvr*o$3MjS#_pgYMABkA*;GAwG5ioBxrwM`S*I&r;(a_ zr8^hZq=)^cT-BE*S0lSYu_cS{gMCdg&$qgN6`RoS1|F$@5_cVwSP|YV@W`S^`z(Hk z(DRD7n8z;u3rPQ|GO4RVQGZ`KI%T@{GxsmEy9BP5Ltpyrp@`>1@lYZS;C}KY-WNAt_iRS=?~#QL#Oj_mXeK=I*POpiA`onPB(tRg&9@Zln|MBiP+)=*6{_4Ec-|c4 z5^b~5jHrB0;2}^^cf%_@8$IfnLgL^(RQ3-^oR+%Hd3DQd9rUj$Z^^ zSSKdU&<|&t5s3)6YiC6fJ>g8BuX=nm@We64O%QMq z<9}}9Dl?cvB?rl!!qv=6m9zQ7;njp<+sXUvP9O1-yK_TRY}3LW`+^TkSVc?0;rjHQ zoscgeh(L6ek9No=nQxbkF@C~X_;#RC@O1~vabHjb%j;$h!NIbBmH4(a`Ps#(jD#5j zNv-b_Z&D4RGfKA~_a>CCSTUtq)Ht&y*j{&HF+8MO%5p472!X)WAm|*CW_y*l{I5T3 zZ@9TY;dk}X2G-i>9B9hK@g&>41GBSR?n|Y$YOKu86OK?P2(YD z{GJysnho zuxJ&kE|gbmK`0dV8(+4iF}VwK18U?p=D{_cI{gSjb2MhZssjbN?Ke;It)FyS?4TDO zFKp;RVRAyXTttgiKfzwBRJ69s$&Ji&JhdPkTUhc({8(?(V2Kt4YP!4e`BOYiV{_K$ zx2o{yxlwJ#1Sv-}%rr^Hh|OQYBiB`YNvz-e zTv+vD%A}npsg(V(@M-$nWGYRC0JL-pcr(pg;JTMT#{>h(Yac6A=j0|_83Ho4NTq%y?83TPO zYzk~R=P?^v-^W~k?-|+dMjd1E5!vzSf}w5Aa%Cj(7mlv+1wt)H8DZ1Mf7jn(b4uIw zp__h(FAqmgNoGnt$gU+)2Xxe6DpE`Cx#8LOmED%*APX@^5}mZ|$@H*^QzYrW(RF}0*khh<%s}0MKTZS~|cHuokLH^Gb+KolDX4dwLO5I5<`Q?zeGM&vb5|=#Fohjv5 zRtu=L$#GygU-4qEwb-Sr^M2fK2H3Wo?1xv!XV2V6PK!#p4JW*G{(T3F5`l$!wVY$8 zzaZUSBP5RVK4M9WV7x)eZ%!n%=1cMzXKRt4s@K4Q)GnWI&ih*vwWwRw{vcVRBmO9U zz!h5_)%<8yGaQJpagAttw+LNB9(T91bVTn{Of#rHn-Mr4X_EL7`%YtLoS#6m_IJ2~ z7pj8E@ii~V7MV`tnnM{v$Z=*Yh4%BJ-92U@FPW-7NnfM%H*Zj?29BAWo9@ z-vM~S`YZYV)b}`6jb8|H?_@y5rgswdNhxhyhMzRx!U(qYV%|TQ@LhWk^u-Qwdw7OMzn*$a)-JFaGBX>ue>J&?q<34CAY7Mo7zzV~UapB52% zc-j~JCvwH&ph=4vIwW9|45suCrV1#^=uYI;!szE$;JYH7>nB^p0OLUMINDQ3D+OkH z1%PJh7FP?a3%wuvWBYDNVa z|F@s={+~N&#}NU-0?*jhIj=|IUbJ;CM-DVyFIL#BQC7-%FJsWCzjjtQ;#H@&NdkvB zTZGyn-pm=H$GaK>gY8ccSc>y|Ge5c1W1g(W3rCN5JaCfEh1nf)Gbc#nI=7yf2)4TB z49sf7z^uB-lD+^h*VG`w8z~eNjjOCzVVv$|tRpiy+ZJj&&aafTp%1WzMZlQv0t?+q z1I!_4kL77X5F|F1)=qL9=g(q=Fi`fWBZo&8A%Xn)z|$r1QRPYSPy%RY9T##86!W+k zMonNRA(lE1$0WI)z(;9fDsC2+6#Zs~zfitJr) zlQs%xmoX2Lc3(c;01MZg7As3+K7(Y|nx*{C(X@#m_bjJIgwAV2Z>ul!L6!yr2m=L+ zfMEJ-qJZvETucod&JGPa6y*ESQzVKYwLF(}-#gS-H?XY_OyRE;{q8vB3+(NA&;#U; z9DMC2CCuZncUE!^g1|jC{=mH5mGqmJ^<*}3JePe9a z)|f=JH_#50Gfj=4SBk~AAs#G?Dfu{*HCu_C#Vq`e>AT!S-~MXk?nqY}&wv5KTM%R+ z9zizXrNcg=5ITmy`R}KW;pBy~z6QuecE!Fe=)NUK^u;};@UpiU4FM76A0EsWtp!PN zheY)?u9S_H0R?i1^g%Zu2CA4bYhH@;GxrHtS{97f=Rd)`n<;qj$r<)hJ@dJH&-6C_ zuEz_vXNT78Sx@Jl?BPK;RtWmae4yJt246NPy|6bvSoI%c!F8z~ndxxlJ%Lm3SwvtE z@zVM2Y%~Cv#duN+I>|Cmhyn|O$M9be9dlYRowWN$ay|+%%V{tu==@64J9^a+u6t+w z!gcq(wI{G+@_fa1$I>&j{em$YpMSlYUg;N1sSLj973Rs~>9)?fjk_sWd!W_r92kch zJplrGt|B$Mm&YYOy6Wg{oT8=2A_wHa?j!D+v+Sw-6%&Rs7N+G%<05qygF|G7_0N^! zM(0x&NA?iYrMXuU=h(|nPbZNG7kv5O0&I0kmSTzfyS3T}^$9{0_jB;pOh z&@fK{yvkYMqz&$sX8&!h^^yacQx%PZ?)kwl!TE$Mw{(Ed8mhj}ZX| z#j>8v;tLrd_Zx>rsJ~g&tMeh>3Wx0ElCUt4Zd37VUr?N54|@V;a5z4X;3$2UVbE3Y zrJj9dqtUr$9rm~$jSIFB$z#=sH9bo~j>(bCTD^pCw4{-a@Nnfcil#X=v%a8aHC5jw zN#+^@rX`L&-89`fO$oeF>v&Lh!u3^Ls6;+QGv`)Hj_@MekSpkvMsmY-N&LO*o)VK5 z87+-JvFLpds2W;~fX^$yPMJdPYA9xZ8~kfg%y=LEtH0Xgckm%z6@z3xe9 z#klN^U{=-rp3UnPmKTo9a-5SrdWdjj^WG6Lb03C?VygNTZPIci2!=~#kZKyHlhxdP z-@rebXw^0n^UJGt7}_p*hD{DQYZzPY=f@~m5fyz7WQQ3^55rQk_Iq4tuuDFP0b5>m z)8~6-e?(|+x%dZ*-aY|6VL?TgPrY>ful!!=HAVox>7e@wa zVL*4E0@Ge8+y<8Qg<5qOPQI4FkQrHq%{BX7vq)K#p-@KG(#9%1ViV+VHx^D$eq1>r4%YqvT^u_j}XDUu^zdeR;v zH3FT&&bgr;<6W|ID8`1|8J1`Q)g0%Ka@q*ce#b5;Rvg=jEy5UUISL|ag9?;0!d|6E zH3>PUI?DqqZ~Dgm4*VB_iwUBOM;dpt+GFJ(=@WW!RMLJ{z9%0RF?t zC@@ZV+uJW|;A}cVN0g4VoU_wip8&ytMYyY5x>eZkHh&rxup4G}$7=0f72>rgCWs0; zSDAH({|!ohrFD)Sl=GqZ9GlGFGDzO)L+8q3b4a5EU0U>Yk#+AcJ26+w4}o$qQyzn< zk;)^twQWmCVbGau7C$p<-=Aw1cVN@>4ZJ}VD|D~0B!upK-wzHELUa4Qtio&n9o(*{ z7kN16{tz&48z{Cv)Q>RI^?156_mQgHV|N2BfHqof6D7Zr&bRn0;xglOi9iriY59y` z%n>bK$SRxrNL>QtkFbtgHv@kl1>rhEenbf#1%lcDof%L{&mSrL=v+hwb^+l#q9B1l zV6LbS30haI=v_nQsKOBx&eT?*(Xx79Y_1-XK7sQ{hL{;t^xV0_b~k)bq2(7VEk2mh za2_C>j%xc-3OE`a?UW)!#<$c(G#W-z^zTHUL3+iYX?-qeAZ%)b`cI*I)`cGXq{90q z1y~?zuW|2&Iea&(kIvx=S5$W>C%EhA7qRY6v`9!$5I+(a8l<+_*>I(_LZF?p={n^a zjLrYK8lhi}IQ>G~BYta~YxE%*)OsGL1%)D0_+#GGPA7{crBee7{fL>MwAomsguBAL zc&5Ft!0^A{G!Tc;{2i2}{o%F4Y$P{?dxkvYTJK{qhn699QZ%fX*L}>u(yadt+P-@g z{Yl$c%13m(EVjeZPg1;HN3j-nr)uh!j8_;lGriY>n1Sm65Im>wKxz3UwcA)0W0>p- zK}?FY_~vG$+%M0gxV0z7J6;Wg!K`mJR$*Mx$VR49yesC1v%c~E^ljX{I3*%zwysqJ zfW?9YOz!T`Y&;s5fkAU(hsK{e9LCuC@q(U*q7UCQ3fRdBq@HUbLh@e58fzjnMeCrr z1mJ0=>vl;L^;8f}J@#XG`!%PCPD$vt%sh8wKrGqcl)J+K=SzSBRj!I$e0=K*g)FAU z1>Y6@EbSA{c3eJTau_cALwfQMSkV=i0B!bh9G-c?v-`S#rEKt{Ot4B)pXQquHnm%8 z4OI8=;$>p(FjN1ODRx!g7;U_z@dajWGm8xQ6Bt2MNOR0k&Ygf)VdbI-JU(En(Jl#-ZD(vQw&;ki&P;tXaWGQp6dNRefbWNrkYLM!KLgb^YzbkW$ucu(pPEf<-NwtfZ?K=X`zLD&vxv% zkXdW{6W?hAlsWftL6^XLZ+erML1skdHZ2MrK7j z+m19B9kJ3f>;yO>0~LSbi2wE^fYT^7py^^)tQQuX1x3{$AWJ58Mq-^R1D{RrFHlA5 zZXKDrW)0J)1rEE`Ao8^S+6^yMU`eFF%1IRw__}n5aBtP39u!R<=G5F)c`+F*Lqi-j z3|8sc@*SZsr!9Xh37klr-}&Rl-H?D0t)ZnOosSQN82+*db^XXt+vOOWAzdpjUFDzP zyS>5_yw?D?7Hw*5rpF7j{;-_qVbQv4K zyAgU}Gk=qM!LipK`_nZwyqa_T`Ms)t0Y&=x%*u_!2JW*4%xHN(ZvVGuFD4q$Emx=H zp^@1IEqHii%gNzt1@MDJwIaStDZ5=qAw~$oCIQcGxw+{h3x81(LjER;rki*ls#$1d;y}yQUQ|CnaZH~_JxIsY-;$yDjQ$X@Yami@L@a2d_ z=*|*_`OE)A3JSGIM9710NV&tjI0Jnr5auBKzL{b{RsdMT%GFBLwGo%1-|6Vckb7G4 z74tw|j-SL=Fd0PtmNX}UX1-lxVK{Lut7yx(<6Zn9?zirn1;g1v64S42yG?SG43n&* zhM#9&u1-(Uh;^?@YZY{R7*MPDRaI3j4?iHtN&K6lS=EEdEyuDWGU)eScihxrNyWsV zkO)h-Fth!cWmzOLp?DEP(V-WU`3LF82A+USC+*aAZ@s7NkeZrm<#o4NAexN=@ZB=O z*bH9m@hWXq35l@qBmTD)*{;0s6|n1nHD;K@xV-Q6x5|p*qJR};-t8J%fNOz2mqUCw zmdp40FyB`VrVPh-z%icf{dSOX_%X*0kaB>;o!NlD-xOO4jYeDUo=nd6d}D2TuzlVW z&zQ#LF|JFjhwAJV&@ z7|8`3zfNi+4<|$s(IPKY>ue!L|MPJ(8U_+n8jg>~*HGfC+H2-xg#`Rt&H!XO^3R&b zMm|~QACtsRTHD%qxN_J7sf>(rJ|MV>2=0G%U6q@A8_U`xw&OK|ydH}kJoA!ZF`I#_ zpE)v7^&2Bu6dOUUy&-IcSACs9mM_KFf*bX=JteR+05@WUt-T6MB{jIYqQC%QHiU2YG zp{FQy;y`DNvv^EoEonpnF#(j*pZ5YLzF>tZ-ob`}@=I$+l^Q8-R3o zcD1Vtq#79jfS0j?9d%(;y{V+b3uCmgPYQ^og{P^THsMt65i;@AR2)FUfNO`U(2OjN zA8!`FLl%xs+ZB+Fky;46O;?_;I#*iC-|x4M&_%a=xxw{651s|C)#{}HX9suZAYXT7 zKmdRgP6#iS?{(Ak(p$QHsZgUExTp~^*B=?TPl#2{9>&A&?9K~YwAi6)1IP5*!duY zB`=9Hr<8L+lUm$2y|0c}-*TZycma>l*cX#JZ6CJFK8%jn`5B4uZtcZD{SGtyMyEFX zbc^9>V^#Yz&Nb5O3(qO7Nk)k3x{9UwFB>UIr*q8g z)w_;ffrslmGxApg(77mmvpmzCQfn zCCHH&8e5Eq-Mx=G{HyDQ=LV878Vg^P=bvsr6*3+rOjkAiu^5!c&&M(DY9Zqb-tiIf z5!t8$=|}q9Qf%-ViR;98UxpW&KTPCy1z2+W^`+k-TRB9vdE7k%Ay^FLLhkT(Cz~HB zqoZIFa}WR&IyV5QvSnNExoo&OmY#^9d*O8G29X-F&5OI4AB_po2LiPAT1?l=<4&GnFOtMaGP&gq%VYQFM%Hj+9{0<;?3V~oP7aQnA!wp`Y zn)`;I2T2C8gczRb=B+QQUFgV}J#BnGgH4VM(!z_Sh)NBK2mbLoA35^=o`;KeoV`b? z8G!IQ;|v7p7`y!}xCJeu6TE8i!c^D!UCeAX*^RUQ<{$9fRAju`U~{a152^Xr4yeol zlEEqa(oPE#BhL(|I5ztX4ryqDLqCvb5S|bel9dxhSOzXaAwkT_LekN=z*5cyZ$1kG zw;80>#W3(OQFwd?s(qX{LVyy9o2$w!$!clEP!)EIegr4Qxsq9>zkbwHI?-s}>t5`k zpR{5u`VkVNc~gbs`0#{th9~l$=_}n#_tsv9aXR_vaLd4m#zDX7T>^Y52_R7ptxFV$ zo0~s-fed?Z^MJIq)%{RRW+{MXbvpNhhcrx-Cm?SV^zm8xZH|p+(V#W!9FVqQNa`Og zJPA+<8=UYgrpbO=H&4Bfa^sIw`bMcMK&Pc)8 z{RRynGrJbu8HhZ-d&2)`yUz!NxlLcR1AMo{o09*APi*wLcj6z3{5J<_?Z0WNFF}cR_iMcA`roRWb@#d{94i{0sn_HdLldmzTa!HS z$rj(tYl86K^d?{Q17SP@KqS%%G^u&heD*ioA34qubFWmLTy2J>hJ81(^S`Hb#CYD# z17c5Te+o~>N4j6$VKUdyWFyM#Z|S19tFFnwH>9@Mdrq-&C$pmTO?erfb42SAW@^fI zeeciaH#Y{>_uLBIxTXH<#^OAO?ja+vzcmR&A)0npb}T<9DRM6T@oLQaXY%#9V{@bgjOWzkKR+KkJRNBZ@r%q)+*~ryTo5j_?GKk?qIESOn`4b1oXl!$U8&C zkZzUBc7_24xLmDlsW&}$E|Q>4v)eUOdpxhHsyrD`jVXsEHOEq% zNq=5;C^d?rjK+aimU5LK5sucNljTbQj4)QFtY1_Uk>D@BV8o~a(c~t!gkXq!<@ggw zp}ONzq3Ztj*hIZ06{lNv$te@thgV;zKiG5`X+h>^t$)15D4}UBs)WWC4K{)32GWN8;Q9zVS=M zEL(d;WN8&lI(hNmA1>?P%?S5Q*h z`nj0MQORB`wduGMD}vXGzTl0iD0V@%>|1e z=S9^8*6YS8Y_L;J9VYR2Vcis>QpLE(SN0$Oevd8L??$IUf3C`}`?J%k<2zSZQ`{%T zRE=nlvy?IuHv;!h=zQMn!w+j^^?#dATnP0`Ev+-Xd{?}+^L=lH)uMtX68feuF{)V_ z`KS~qMQAQ6tOoKf(Jf-wQ2O6(wHoXdl7?bX+r4CxvSkMt>$loER`3eA>U0!Q+*CH) z1b&i`VXQ0XvXK%+ueclfO=eGLKfJtP(4ajSQjIV5|L<0S@mf)3J9iq|C#W5(_Gq7KJ@5i`Y!qm3$NRLZ9zr++Iz4ZtejS?EM8Ke1u*GYf$RTCn*uGo4ThqBazQQ(!e)ky_Zxff%rsXcGYMvIobg@ z)m}~q&T9C^-$T+qaLt)&!aHq?Dhzh%<{gp`F%s2gleG43!$2DA{>GwZGk&^AY z>Ra8M&pP8^L-CJ4d?649Uw%B6;s?4!Cz)cl=qYEf6TyJLit@+_g}*x&Z=OhNJYQCy5fMT zSeLw$<$QckVk65;v+&%i&e}u+zQh8En)Uc-&cD-%{^2XjC{Sfvfa|ZgNPhonpleq) zH9uxRnlLp#M}f~cQzgIOkcHKtYjVoT$X3@G%>QFX4_%UnJt%qP%)`Gf1|*eY~CO z3?^40GgLMev~u(KOd$gGRR}DQEfXKW=B#IH#!4V`wkL8>vr3Cl7Y&k4l!#CU<62_W zA(I8epgW$*{iZSAqyun`_j(5}kf9h@i{9bCL7O2>f&sE}ZNW8;T>E`D-BC06TMl6Gi zwW|C$aW5sRLnKRr4I4&&A|pMA$zYLQxme&v^L@3bl>w?39e6fZ0M0%z6L%wMY>@S~ z%H-`|dtoNVqHG{DP;I=iH&d`?R2w!Nq2 z%;{kSg4K?!xr_-7~nQ^U(0V+rRwv4LroD)!X5qw6E(vW@x0> z^?A>qY&~?=LAmbI_tEmVeCNhG52WP6zM$DWu9X^Kg4I37J+9zZc+_?BhwTSCu`R^N zyyf-SR{Z8vaQqDlnJ@~q82+m+Rk0mlCFU{_%~%I80E_;gx}Pgwdg?Cn#``S9O9JIa ztUfG6s1v|(5E2^x%$VCA*tIENBtW=tS9M^+e!>3LvZU>}fg_%)`rK$SPnpxcrJt+A z>Y|l-*+p>*dnKtGbx6Q?4vlx@`3RBf&1>3)Z%bO?2V~X3hssb5x6vjA@E+n>6Awt< z<748U5AKpa_a%^?_hWXDY4Y=z-;ySe4sUOK+C(Ft(rkPrP#9#~5W`WhC&Rtt!$ES( z>o{|+*)n!{s7pFX6W>;hRd*7Udnqvyh5$lMB(co{9$uzR?Ylq?gz64>$CA2%-_G^W z0p`|n7Y>at5FJx4c zd=NHJX1s}JrF{>i;E)URk6JMfEX$%Wd^yT5J+hwVCV&RYt6QlaFg|}*^<&i~+Gyhu zd-`IsycaeHb896urehrt9nX*wn3`H!(%YYcUc?*S zACUV|^23NcM_N`H{KXr%#(^#ByYcNU%;mYJ!*A&p~qDljU`7ydQjJKgG@~ zM2xYL+X63oU<6jg{MizDESuf6IQhfeG~$&-@}2JnpCj;{$lCmv`n(cKY%mfy(u+!C z)P~`x{mPp-p}4j65*(R#VMP>xdPZ81V&rUg-+QSxv+DG~Fr6)^YlQebuFce2fm)Ck zjv%)$w)6GQ3^x}Mic0lm=FuJQVNnncE7ty6Oq4KEDns-3+Mb}}*&}AgS}OptqJt#L zwK$b-_E9wJt?qAbQ6OQL62|bwvD*Wm7j%ddp%h8OxpfbiBUBMFR6pCxZ#fd^#OOdtMD%!UgCRanH2qxh$<6a^D6t9c z<%h0rFU7r=g~2%>k(In9x;7Jgs5LBz9ULqcn!;nnR+rjYfE_(_=HY#I<+-*MfbhHryxZFw)v*n$d7 zlWj9^3P;j0dzV#42|crR9b{x=yI(`Gr+huz>8+P7X$w(cd`ns{(<~y-4#zb?KDVQJ zF$#8{T!vg>e4z;vEa z^9-2soER$!B=yYDsNjV3UeeTmU)mD9FL8XTz7-cE@y~COV|tt~nQ?ej4Xk?%tXHfl z%5_K>|0&g#`q8Jk7p6Qr7VA0b)IzY+1Pi$tX4Ek-ThX^TUZNTM=oRjP{XU__i%zH{ zGj!O?%gb)IP}=Tbrl8=Sh@@h>`mjWb0G~_!^_AR{gz>f5-&_Aj^83odO9HI!9nm-8 z{eH$7)jxqO`uV-dY;AS*L?63!-%=w3dJH)|=^YN+nt1`nH*YjKj9S#VZn3k~V_rf1 zceM&5bb}7li1GO*oUOlDWAWT5_rObGfdX=;2w{t~EJiuh*}BFEnH_VqV;$ zH=xCk`%iku&302YK6ExD0(N+KSil?gN5iyx^p8z$Cbu_|gi}lfrl&7sycq>s+7$8iGV%+N>%hVJw|Ax~kK9l4W8 zXamun>CYyy0epNPe39kV$nLT!~US2v2{NhJ6cShN5XZ zthN1IeRqv`|8=9Ju>i(U9)Xo!s`j?f?RdfN@6&x2utkD?ao;gOXM)RU@Zb2g7C9R; zN2DimKb!DwD%Gg~nPZ-!R!VyMaRSkSeqd3Iyy7aY#cLH8e= zRVT2o=2_#KfItXokLZY}GiB&gm{s~0FYU!KYE6?+jLwigC-5q#`N3eR<2+&7BgP^) zTSWZr*x1oz8JnOoHQH2$KpEcbJ7sT+VFC{ao!$4Z}fX~ufKkf-&(mAWQLA%GAI`7 zGd-FA`}=&;ffmBR$H%vYG;sWEvM$?@Wn?0;?KZVeI=+s(eyu@{|L4+x_J09w2a@;| zEfi(0kn$4?FM+VnVXv(ZJ6_0?Je=C{3bFbIz+__T=hf@R* zC=sq?>2(SS;cYl)O7os@pnGoBs#R+4+_|b>zkW$y0wh`pD_X^1zlr#UcpzROs(1y& zE|qR(jweERsX6<&e*L;yzI?eZNNV5`hJswT-Lw)E5F)yWw3B9;5PKDT1?RBrcg~zS znjJcHNLm4j^ZNOgoNsIh%WoBxD-M3)D}EpGzrUuYM(-53W5*72SuR`;B;ppz^?X9> zKmj2dW(?ny=3U_k98Z}tMT^Mn*|RAi>?2|NfM|dyfan0rawmv=s-qR4fDla=5_luO z752q1UI+;0N6bpUG+68-Y<)m5FwX(;aBLCcR^OM$TR7?|AVl+uNGEB2t(;Qv*nfT2 ztXXR2%$aJ!gbBPrL}+m2$Pu-B_inXo*Dh6C8`+)%D-nMrHt8X+5Cw#Ic|y`pnqLaB z{8x4C*ij2fPEL**H*Oq%@v@J=_Hq39aaCMgtcByzqet#jSdRM>u|?N7x+x&Us}qvJ z()?WbO&oXX)Jc0I)2B~Y6DLlj!D1hQ=>s0eo;`cCcszUCWFIZn$AJR})ZV>&RY^&Sdi?mYdwQ$z7h-$<$+aho=6w1_}Vz*u#*XcXB;bM%3w%@7O_;_>X+Gxw=K34cZ5pnwp;77}bU z@&n;=_Z&Qtf`S6oty?!5MfUO1`nY@duFB8P*Pe%gKM3CuTT{W&NdY0kBP4yL`G)X& z!hXYt57*0nr%jut1`i%gqsTs*t&a;AE~tI`_UUE0*j8463Gv?v|1379ildPNLWEaH zP?fP*I10zTd-v8J37DOoO{2&@8r?@}X{q)$z#BJixTjAEbHuh(bM#R_h{O?+9BEbw zbDd)~dGh24QEQ7vk$uE^&9!US)WL%XBQ$KtPT_lUT+Go%0U;7sNQOzXQaK5Uz<>b* z)TBw1bQNGM0mg2LG`#G?1t!KJ7KaZXR)-E9Qdh2AF`q^P;QL}r&T@27K#1fK5>#bG z^ zZB+fHR*O}AIeCpT&efrSwCSV_!1Yf&$tu8lm^X5(SX_QmnBrFko z@r;DcDkzvY`Le2C*)`989!rGN($X{s4jib7bDcYPPHF)`IO@`+OX~9F%et+stklF( zTMukk^(&@6;T#4*0U;7qOnOLzZ1~~AEMcZ2F8>FJ2QuU#Bq*~2A{5rQZ{LKgDvT<& zRaI3w^n_9&AVdNg-wz%<@OsN9!gInhA*vUim9c!AcNakcA(By4UYBNw!wiRSc)hl5 z+qP=-=+UZY&z^dvSC1Y&G`n~2uIE-hO{|c*b?cTodGe%s z`t+&apExQ+DfDvTc~Hjfr@U(j3J5{?iwb7iv9}4tWuVg{6gEEE`ye35QGo~J5-Y&k zw{Ner>(RDr*G|6MHCaL?TLOr)@99 z>rxE16}EPW{eM5_{7DRwe+!>GM69XKVXY$!HNr>22TqIhZY+y}+W`-vTE^NfemevO zgdhllNP2vpmw+G$f}ns91VIoK5P~2Gf&xMi1VK<_{udim}>.{ext}", + "path": "{@folder}/{@file}" + }, "__dynamic_keys_labels__": { "maya2unreal": "Maya to Unreal", "simpleUnrealTextureHero": "Simple Unreal Texture - Hero", - "simpleUnrealTexture": "Simple Unreal Texture" + "simpleUnrealTexture": "Simple Unreal Texture", + "online": "online" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/celaction.json b/openpype/settings/defaults/project_settings/celaction.json index a4a321fb27..dbe5625f06 100644 --- a/openpype/settings/defaults/project_settings/celaction.json +++ b/openpype/settings/defaults/project_settings/celaction.json @@ -1,13 +1,9 @@ { "publish": { - "ExtractCelactionDeadline": { - "enabled": true, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10 + "CollectRenderPath": { + "output_extension": "png", + "anatomy_template_key_render_files": "render", + "anatomy_template_key_metadata": "render" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index a6e7b4a94a..6e1c0f3540 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -70,6 +70,16 @@ "department": "", "multiprocess": true }, + "CelactionSubmitDeadline": { + "enabled": true, + "deadline_department": "", + "deadline_priority": 50, + "deadline_pool": "", + "deadline_pool_secondary": "", + "deadline_group": "", + "deadline_chunk_size": 10, + "deadline_job_delay": "00:00:00:00" + }, "ProcessSubmittedJobOnFarm": { "enabled": true, "deadline_department": "", diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 7daa4afa79..89d7cf08b7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -24,6 +24,10 @@ ], "skip_hosts_headless_publish": [] }, + "collect_comment_per_instance": { + "enabled": false, + "families": [] + }, "ValidateEditorialAssetName": { "enabled": true, "optional": false @@ -288,6 +292,17 @@ "task_types": [], "tasks": [], "template_name": "maya2unreal" + }, + { + "families": [ + "online" + ], + "hosts": [ + "traypublisher" + ], + "task_types": [], + "tasks": [], + "template_name": "online" } ] }, diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 78126283d0..0e2f6b8b62 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -179,7 +179,21 @@ "enabled": true, "defaults": [ "Main" - ] + ], + "expandProcedurals": false, + "motionBlur": true, + "motionBlurKeys": 2, + "motionBlurLength": 0.5, + "maskOptions": false, + "maskCamera": false, + "maskLight": false, + "maskShape": false, + "maskShader": false, + "maskOverride": false, + "maskDriver": false, + "maskFilter": false, + "maskColor_manager": false, + "maskOperator": false }, "CreateAssembly": { "enabled": true, @@ -258,6 +272,9 @@ "CollectFbxCamera": { "enabled": false }, + "CollectGLTF": { + "enabled": false + }, "ValidateInstanceInContext": { "enabled": true, "optional": true, @@ -1022,4 +1039,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 88b5a598cd..e03ce32030 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -11,6 +11,11 @@ 255, 255, 255 + ], + "families_to_review": [ + "review", + "renderlayer", + "renderscene" ] }, "ValidateProjectSettings": { diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 03499a8567..936407a49b 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -114,6 +114,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "max", + "environment": { + "ADSK_3DSMAX_STARTUPSCRIPTS_ADDON_DIR": "{OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup" + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "flame": { "enabled": true, "label": "Flame", @@ -1268,12 +1297,12 @@ "CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" }, "variants": { - "local": { + "current": { "enabled": true, - "variant_label": "Local", + "variant_label": "Current", "use_python_2": false, "executables": { - "windows": [], + "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], "darwin": [], "linux": [] }, diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index defe4aa1f0..c0c103ea10 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -152,6 +152,7 @@ class HostsEnumEntity(BaseEnumEntity): schema_types = ["hosts-enum"] all_host_names = [ + "max", "aftereffects", "blender", "celaction", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json index 500e5b2298..15d9350c84 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json @@ -14,45 +14,24 @@ { "type": "dict", "collapsible": true, - "checkbox_key": "enabled", - "key": "ExtractCelactionDeadline", - "label": "ExtractCelactionDeadline", + "key": "CollectRenderPath", + "label": "CollectRenderPath", "is_group": true, "children": [ { - "type": "boolean", - "key": "enabled", - "label": "Enabled" + "type": "text", + "key": "output_extension", + "label": "Output render file extension" }, { "type": "text", - "key": "deadline_department", - "label": "Deadline apartment" - }, - { - "type": "number", - "key": "deadline_priority", - "label": "Deadline priority" + "key": "anatomy_template_key_render_files", + "label": "Anatomy template key: render files" }, { "type": "text", - "key": "deadline_pool", - "label": "Deadline pool" - }, - { - "type": "text", - "key": "deadline_pool_secondary", - "label": "Deadline pool (secondary)" - }, - { - "type": "text", - "key": "deadline_group", - "label": "Deadline Group" - }, - { - "type": "number", - "key": "deadline_chunk_size", - "label": "Deadline Chunk size" + "key": "anatomy_template_key_metadata", + "label": "Anatomy template key: metadata job file" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index cd1741ba8b..69f81ed682 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -387,6 +387,56 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CelactionSubmitDeadline", + "label": "Celaction Submit Deadline", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "deadline_department", + "label": "Deadline apartment" + }, + { + "type": "number", + "key": "deadline_priority", + "label": "Deadline priority" + }, + { + "type": "text", + "key": "deadline_pool", + "label": "Deadline pool" + }, + { + "type": "text", + "key": "deadline_pool_secondary", + "label": "Deadline pool (secondary)" + }, + { + "type": "text", + "key": "deadline_group", + "label": "Deadline Group" + }, + { + "type": "number", + "key": "deadline_chunk_size", + "label": "Deadline Chunk size" + }, + { + "type": "text", + "key": "deadline_job_delay", + "label": "Delay job (timecode dd:hh:mm:ss)" + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json index 20fe5b0855..61342ef738 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -56,6 +56,18 @@ "key": "review_bg", "label": "Review BG color", "use_alpha": false + }, + { + "type": "enum", + "key": "families_to_review", + "label": "Families to review", + "multiselection": true, + "enum_items": [ + {"review": "review"}, + {"renderpass": "renderPass"}, + {"renderlayer": "renderLayer"}, + {"renderscene": "renderScene"} + ] } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json index a2a566da0e..3667c9d5d8 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json @@ -16,22 +16,26 @@ { "type": "number", "key": "frameStart", - "label": "Frame Start" + "label": "Frame Start", + "maximum": 999999999 }, { "type": "number", "key": "frameEnd", - "label": "Frame End" + "label": "Frame End", + "maximum": 999999999 }, { "type": "number", "key": "clipIn", - "label": "Clip In" + "label": "Clip In", + "maximum": 999999999 }, { "type": "number", "key": "clipOut", - "label": "Clip Out" + "label": "Clip Out", + "maximum": 999999999 }, { "type": "number", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 742437fbde..f2ada5fd8d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -60,6 +60,27 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "collect_comment_per_instance", + "label": "Collect comment per instance", + "checkbox_key": "enabled", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 231554d96e..e1a3082616 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -230,7 +230,98 @@ } ] }, - + { + "type": "dict", + "collapsible": true, + "key": "CreateAss", + "label": "Create Ass", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "boolean", + "key": "expandProcedurals", + "label": "Expand Procedurals" + }, + { + "type": "boolean", + "key": "motionBlur", + "label": "Motion Blur" + }, + { + "type": "number", + "key": "motionBlurKeys", + "label": "Motion Blur Keys", + "minimum": 0 + }, + { + "type": "number", + "key": "motionBlurLength", + "label": "Motion Blur Length", + "decimal": 3 + }, + { + "type": "boolean", + "key": "maskOptions", + "label": "Mask Options" + }, + { + "type": "boolean", + "key": "maskCamera", + "label": "Mask Camera" + }, + { + "type": "boolean", + "key": "maskLight", + "label": "Mask Light" + }, + { + "type": "boolean", + "key": "maskShape", + "label": "Mask Shape" + }, + { + "type": "boolean", + "key": "maskShader", + "label": "Mask Shader" + }, + { + "type": "boolean", + "key": "maskOverride", + "label": "Mask Override" + }, + { + "type": "boolean", + "key": "maskDriver", + "label": "Mask Driver" + }, + { + "type": "boolean", + "key": "maskFilter", + "label": "Mask Filter" + }, + { + "type": "boolean", + "key": "maskColor_manager", + "label": "Mask Color Manager" + }, + { + "type": "boolean", + "key": "maskOperator", + "label": "Mask Operator" + } + ] + }, { "type": "schema_template", "name": "template_create_plugin", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 2c6260db30..9aaff248ab 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -35,6 +35,20 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectGLTF", + "label": "Collect Assets for GLTF/GLB export", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, { "type": "splitter" }, @@ -62,7 +76,7 @@ } ] }, - { + { "type": "dict", "collapsible": true, "key": "ValidateFrameRange", diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json new file mode 100644 index 0000000000..f7c57298af --- /dev/null +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json @@ -0,0 +1,39 @@ +{ + "type": "dict", + "key": "3dsmax", + "label": "Autodesk 3ds Max", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "schema_template", + "name": "template_host_unchangables" + }, + { + "key": "environment", + "label": "Environment", + "type": "raw-json" + }, + { + "type": "dict-modifiable", + "key": "variants", + "collapsible_key": true, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } + } + ] +} diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json index 82be15c3b0..b104e3bb82 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json @@ -28,8 +28,8 @@ "name": "template_host_variant", "template_data": [ { - "app_variant_label": "Local", - "app_variant": "local" + "app_variant_label": "Current", + "app_variant": "current" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/schema_applications.json b/openpype/settings/entities/schemas/system_schema/schema_applications.json index 20be33320d..36c5811496 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_applications.json +++ b/openpype/settings/entities/schemas/system_schema/schema_applications.json @@ -9,6 +9,10 @@ "type": "schema", "name": "schema_maya" }, + { + "type": "schema", + "name": "schema_3dsmax" + }, { "type": "schema", "name": "schema_flame" diff --git a/openpype/style/style.css b/openpype/style/style.css index 887c044dae..a7a48cdb9d 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1126,6 +1126,10 @@ ValidationArtistMessage QLabel { background: transparent; } +CreateNextPageOverlay { + font-size: 32pt; +} + /* Settings - NOT USED YET - we need to define font family for settings UI */ diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index dc697b08a6..1ffb3d3799 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -6,6 +6,7 @@ from Qt import QtWidgets, QtCore from openpype.lib.attribute_definitions import ( AbtractAttrDef, UnknownDef, + HiddenDef, NumberDef, TextDef, EnumDef, @@ -22,6 +23,16 @@ from .files_widget import FilesWidget def create_widget_for_attr_def(attr_def, parent=None): + widget = _create_widget_for_attr_def(attr_def, parent) + if attr_def.hidden: + widget.setVisible(False) + + if attr_def.disabled: + widget.setEnabled(False) + return widget + + +def _create_widget_for_attr_def(attr_def, parent=None): if not isinstance(attr_def, AbtractAttrDef): raise TypeError("Unexpected type \"{}\" expected \"{}\"".format( str(type(attr_def)), AbtractAttrDef @@ -42,6 +53,9 @@ def create_widget_for_attr_def(attr_def, parent=None): if isinstance(attr_def, UnknownDef): return UnknownAttrWidget(attr_def, parent) + if isinstance(attr_def, HiddenDef): + return HiddenAttrWidget(attr_def, parent) + if isinstance(attr_def, FileDef): return FileAttrWidget(attr_def, parent) @@ -115,6 +129,10 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): self._current_keys.add(attr_def.key) widget = create_widget_for_attr_def(attr_def, self) + self._widgets.append(widget) + + if attr_def.hidden: + continue expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: @@ -133,7 +151,6 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): layout.addWidget( widget, row, col_num, 1, expand_cols ) - self._widgets.append(widget) row += 1 def set_value(self, value): @@ -459,6 +476,29 @@ class UnknownAttrWidget(_BaseAttrDefWidget): self._input_widget.setText(str_value) +class HiddenAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + self.setVisible(False) + self._value = None + self._multivalue = False + + def setVisible(self, visible): + if visible: + visible = False + super(HiddenAttrWidget, self).setVisible(visible) + + def current_value(self): + if self._multivalue: + raise ValueError("{} can't output for multivalue.".format( + self.__class__.__name__ + )) + return self._value + + def set_value(self, value, multivalue=False): + self._value = copy.deepcopy(value) + self._multivalue = multivalue + + class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): input_widget = FilesWidget( diff --git a/openpype/tools/creator/model.py b/openpype/tools/creator/model.py index d3d60b96f2..307993103b 100644 --- a/openpype/tools/creator/model.py +++ b/openpype/tools/creator/model.py @@ -36,7 +36,7 @@ class CreatorsModel(QtGui.QStandardItemModel): if not items: item = QtGui.QStandardItem("No registered families") item.setEnabled(False) - item.setData(QtCore.Qt.ItemIsEnabled, False) + item.setData(False, QtCore.Qt.ItemIsEnabled) items.append(item) self.invisibleRootItem().appendRows(items) diff --git a/openpype/tools/project_manager/project_manager/view.py b/openpype/tools/project_manager/project_manager/view.py index cca892ef72..8d1fe54e83 100644 --- a/openpype/tools/project_manager/project_manager/view.py +++ b/openpype/tools/project_manager/project_manager/view.py @@ -28,7 +28,7 @@ class NameDef: class NumberDef: def __init__(self, minimum=None, maximum=None, decimals=None): self.minimum = 0 if minimum is None else minimum - self.maximum = 999999 if maximum is None else maximum + self.maximum = 999999999 if maximum is None else maximum self.decimals = 0 if decimals is None else decimals diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index a02c69d5e0..042985b007 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -8,6 +8,7 @@ from .widgets import ( ResetBtn, ValidateBtn, PublishBtn, + CreateNextPageOverlay, ) from .help_widget import ( HelpButton, @@ -28,6 +29,7 @@ __all__ = ( "ResetBtn", "ValidateBtn", "PublishBtn", + "CreateNextPageOverlay", "HelpButton", "HelpDialog", diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 9fd2bf0824..09635d1a15 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -674,9 +674,16 @@ class InstanceCardView(AbstractInstanceView): instances_by_group[group_name] ) - self._update_ordered_group_nameS() + self._update_ordered_group_names() - def _update_ordered_group_nameS(self): + def has_items(self): + if self._convertor_items_group is not None: + return True + if self._widgets_by_group: + return True + return False + + def _update_ordered_group_names(self): ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): if idx > 0: diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 32d84862f0..1cdb4cdcdb 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -912,6 +912,13 @@ class InstanceListView(AbstractInstanceView): if not self._instance_view.isExpanded(proxy_index): self._instance_view.expand(proxy_index) + def has_items(self): + if self._convertor_group_widget is not None: + return True + if self._group_items: + return True + return False + def get_selected_items(self): """Get selected instance ids and context selection. diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index be3839b90b..b1aeda9cd4 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -195,6 +195,20 @@ class OverviewWidget(QtWidgets.QFrame): self._subset_views_widget.setMaximumWidth(view_width) self._change_anim.start() + def get_subset_views_geo(self): + parent = self._subset_views_widget.parent() + global_pos = parent.mapToGlobal(self._subset_views_widget.pos()) + return QtCore.QRect( + global_pos.x(), + global_pos.y(), + self._subset_views_widget.width(), + self._subset_views_widget.height() + ) + + def has_items(self): + view = self._subset_views_layout.currentWidget() + return view.has_items() + def _on_create_clicked(self): """Pass signal to parent widget which should care about changing state. diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index 84638a002c..d8ad19cfc0 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -54,6 +54,9 @@ class PublisherTabsWidget(QtWidgets.QFrame): self._buttons_by_identifier = {} def is_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier return self._current_identifier == identifier @@ -68,7 +71,16 @@ class PublisherTabsWidget(QtWidgets.QFrame): self.set_current_tab(identifier) return button + def get_tab_by_index(self, index): + if 0 >= index < self._btns_layout.count(): + item = self._btns_layout.itemAt(index) + return item.widget() + return None + def set_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 8c483e8088..935a12bc73 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -511,7 +511,7 @@ class ValidationsWidget(QtWidgets.QFrame): ) # After success publishing publish_started_widget = ValidationArtistMessage( - "Publishing went smoothly", self + "So far so good", self ) # After success publishing publish_stop_ok_widget = ValidationArtistMessage( diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 447fd7bc12..4b9626154d 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -9,6 +9,7 @@ import collections from Qt import QtWidgets, QtCore, QtGui import qtawesome +from openpype.lib.attribute_definitions import UnknownDef from openpype.tools.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -305,6 +306,20 @@ class AbstractInstanceView(QtWidgets.QWidget): "{} Method 'refresh' is not implemented." ).format(self.__class__.__name__)) + def has_items(self): + """View has at least one item. + + This is more a question for controller but is called from widget + which should probably should not use controller. + + Returns: + bool: There is at least one instance or conversion item. + """ + + raise NotImplementedError(( + "{} Method 'has_items' is not implemented." + ).format(self.__class__.__name__)) + def get_selected_items(self): """Selected instances required for callbacks. @@ -578,6 +593,11 @@ class TasksCombobox(QtWidgets.QComboBox): self._text = None + # Make sure combobox is extended horizontally + size_policy = self.sizePolicy() + size_policy.setHorizontalPolicy(size_policy.MinimumExpanding) + self.setSizePolicy(size_policy) + def set_invalid_empty_task(self, invalid=True): self._proxy_model.set_filter_empty(invalid) if invalid: @@ -1180,7 +1200,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): """Set currently selected instances. Args: - instances(list): List of selected instances. + instances(List[CreatedInstance]): List of selected instances. Empty instances tells that nothing or context is selected. """ self._set_btns_visible(False) @@ -1303,6 +1323,13 @@ class CreatorAttrsWidget(QtWidgets.QWidget): else: widget.set_value(values, True) + widget.value_changed.connect(self._input_value_changed) + self._attr_def_id_to_instances[attr_def.id] = attr_instances + self._attr_def_id_to_attr_def[attr_def.id] = attr_def + + if attr_def.hidden: + continue + expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: expand_cols = 1 @@ -1321,13 +1348,8 @@ class CreatorAttrsWidget(QtWidgets.QWidget): content_layout.addWidget( widget, row, col_num, 1, expand_cols ) - row += 1 - widget.value_changed.connect(self._input_value_changed) - self._attr_def_id_to_instances[attr_def.id] = attr_instances - self._attr_def_id_to_attr_def[attr_def.id] = attr_def - self._scroll_area.setWidget(content_widget) self._content_widget = content_widget @@ -1421,8 +1443,17 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): widget = create_widget_for_attr_def( attr_def, content_widget ) - label = attr_def.label or attr_def.key - content_layout.addRow(label, widget) + hidden_widget = attr_def.hidden + # Hide unknown values of publish plugins + # - The keys in most of cases does not represent what would + # label represent + if isinstance(attr_def, UnknownDef): + widget.setVisible(False) + hidden_widget = True + + if not hidden_widget: + label = attr_def.label or attr_def.key + content_layout.addRow(label, widget) widget.value_changed.connect(self._input_value_changed) @@ -1614,6 +1645,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): instances(List[CreatedInstance]): List of currently selected instances. context_selected(bool): Is context selected. + convertor_identifiers(List[str]): Identifiers of convert items. """ all_valid = True @@ -1708,3 +1740,159 @@ class SubsetAttributesWidget(QtWidgets.QWidget): self._thumbnail_widget.setVisible(True) self._thumbnail_widget.set_current_thumbnails(thumbnail_paths) + + +class CreateNextPageOverlay(QtWidgets.QWidget): + clicked = QtCore.Signal() + + def __init__(self, parent): + super(CreateNextPageOverlay, self).__init__(parent) + self.setCursor(QtCore.Qt.PointingHandCursor) + self._arrow_color = ( + get_objected_colors("font").get_qcolor() + ) + self._bg_color = ( + get_objected_colors("bg-buttons").get_qcolor() + ) + + change_anim = QtCore.QVariantAnimation() + change_anim.setStartValue(0.0) + change_anim.setEndValue(1.0) + change_anim.setDuration(200) + change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) + + change_anim.valueChanged.connect(self._on_anim) + + self._change_anim = change_anim + self._is_visible = None + self._anim_value = 0.0 + self._increasing = False + self._under_mouse = None + self._handle_show_on_own = True + self._mouse_pressed = False + self.set_visible(True) + + def set_increasing(self, increasing): + if self._increasing is increasing: + return + self._increasing = increasing + if increasing: + self._change_anim.setDirection(self._change_anim.Forward) + else: + self._change_anim.setDirection(self._change_anim.Backward) + + if self._change_anim.state() != self._change_anim.Running: + self._change_anim.start() + + def set_visible(self, visible): + if self._is_visible is visible: + return + + self._is_visible = visible + if not visible: + self.set_increasing(False) + if not self._is_anim_finished(): + return + + self.setVisible(visible) + self._check_anim_timer() + + def _is_anim_finished(self): + if self._increasing: + return self._anim_value == 1.0 + return self._anim_value == 0.0 + + def _on_anim(self, value): + self._check_anim_timer() + + self._anim_value = value + + self.update() + + if not self._is_anim_finished(): + return + + if not self._is_visible: + self.setVisible(False) + + def set_under_mouse(self, under_mouse): + if self._under_mouse is under_mouse: + return + + self._under_mouse = under_mouse + self.set_increasing(under_mouse) + + def _is_under_mouse(self): + mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) + under_mouse = self.rect().contains(mouse_pos) + return under_mouse + + def _check_anim_timer(self): + if not self.isVisible(): + return + + self.set_increasing(self._under_mouse) + + def mousePressEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self._mouse_pressed = True + super(CreateNextPageOverlay, self).mousePressEvent(event) + + def mouseReleaseEvent(self, event): + if self._mouse_pressed: + self._mouse_pressed = False + if self.rect().contains(event.pos()): + self.clicked.emit() + + super(CreateNextPageOverlay, self).mouseReleaseEvent(event) + + def paintEvent(self, event): + painter = QtGui.QPainter() + painter.begin(self) + if self._anim_value == 0.0: + painter.end() + return + + painter.setClipRect(event.rect()) + painter.setRenderHints( + painter.Antialiasing + | painter.SmoothPixmapTransform + ) + + painter.setPen(QtCore.Qt.NoPen) + + rect = QtCore.QRect(self.rect()) + rect_width = rect.width() + rect_height = rect.height() + radius = rect_width * 0.2 + + x_offset = 0 + y_offset = 0 + if self._anim_value != 1.0: + x_offset += rect_width - (rect_width * self._anim_value) + + arrow_height = rect_height * 0.4 + arrow_half_height = arrow_height * 0.5 + arrow_x_start = x_offset + ((rect_width - arrow_half_height) * 0.5) + arrow_x_end = arrow_x_start + arrow_half_height + center_y = rect.center().y() + + painter.setBrush(self._bg_color) + painter.drawRoundedRect( + x_offset, y_offset, + rect_width + radius, rect_height, + radius, radius + ) + + src_arrow_path = QtGui.QPainterPath() + src_arrow_path.moveTo(arrow_x_start, center_y - arrow_half_height) + src_arrow_path.lineTo(arrow_x_end, center_y) + src_arrow_path.lineTo(arrow_x_start, center_y + arrow_half_height) + + arrow_stroker = QtGui.QPainterPathStroker() + arrow_stroker.setWidth(min(4, arrow_half_height * 0.2)) + arrow_path = arrow_stroker.createStroke(src_arrow_path) + + painter.fillPath(arrow_path, self._arrow_color) + + painter.end() diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 5875f7aa68..0f7fd2c7e3 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -29,6 +29,8 @@ from .widgets import ( HelpButton, HelpDialog, + + CreateNextPageOverlay, ) @@ -154,7 +156,7 @@ class PublisherWindow(QtWidgets.QDialog): footer_layout.addWidget(footer_bottom_widget, 0) # Content - # - wrap stacked widget under one more widget to be able propagate + # - wrap stacked widget under one more widget to be able to propagate # margins (QStackedLayout can't have margins) content_widget = QtWidgets.QWidget(under_publish_widget) @@ -225,8 +227,8 @@ class PublisherWindow(QtWidgets.QDialog): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) - # Timer started on show -> connected to timer counter - # - helps to deffer on show logic by 3 event loops + create_overlay_button = CreateNextPageOverlay(self) + show_timer = QtCore.QTimer() show_timer.setInterval(1) show_timer.timeout.connect(self._on_show_timer) @@ -255,6 +257,9 @@ class PublisherWindow(QtWidgets.QDialog): publish_btn.clicked.connect(self._on_publish_clicked) publish_frame.details_page_requested.connect(self._go_to_details_tab) + create_overlay_button.clicked.connect( + self._on_create_overlay_button_click + ) controller.event_system.add_callback( "instances.refresh.finished", self._on_instances_refresh @@ -262,6 +267,9 @@ class PublisherWindow(QtWidgets.QDialog): controller.event_system.add_callback( "publish.reset.finished", self._on_publish_reset ) + controller.event_system.add_callback( + "controller.reset.finished", self._on_controller_reset + ) controller.event_system.add_callback( "publish.process.started", self._on_publish_start ) @@ -310,6 +318,7 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_overlay = publish_overlay self._publish_frame = publish_frame + self._content_widget = content_widget self._content_stacked_layout = content_stacked_layout self._overview_widget = overview_widget @@ -331,25 +340,39 @@ class PublisherWindow(QtWidgets.QDialog): self._controller = controller self._first_show = True + self._first_reset = True # This is a little bit confusing but 'reset_on_first_show' is too long - # forin init + # for init self._reset_on_first_show = reset_on_show self._reset_on_show = True self._publish_frame_visible = None + self._tab_on_reset = None self._error_messages_to_show = collections.deque() self._errors_dialog_message_timer = errors_dialog_message_timer self._set_publish_visibility(False) + self._create_overlay_button = create_overlay_button + self._app_event_listener_installed = False + self._show_timer = show_timer self._show_counter = 0 + self._window_is_visible = False @property def controller(self): return self._controller + def make_sure_is_visible(self): + if self._window_is_visible: + self.setWindowState(QtCore.Qt.ActiveWindow) + + else: + self.show() + def showEvent(self, event): + self._window_is_visible = True super(PublisherWindow, self).showEvent(event) if self._first_show: self._first_show = False @@ -360,6 +383,38 @@ class PublisherWindow(QtWidgets.QDialog): def resizeEvent(self, event): super(PublisherWindow, self).resizeEvent(event) self._update_publish_frame_rect() + self._update_create_overlay_size() + + def closeEvent(self, event): + self._window_is_visible = False + self._uninstall_app_event_listener() + self.save_changes() + self._reset_on_show = True + self._controller.clear_thumbnail_temp_dir_path() + super(PublisherWindow, self).closeEvent(event) + + def leaveEvent(self, event): + super(PublisherWindow, self).leaveEvent(event) + self._update_create_overlay_visibility() + + def eventFilter(self, obj, event): + if event.type() == QtCore.QEvent.MouseMove: + self._update_create_overlay_visibility(event.globalPos()) + return super(PublisherWindow, self).eventFilter(obj, event) + + def _install_app_event_listener(self): + if self._app_event_listener_installed: + return + self._app_event_listener_installed = True + app = QtWidgets.QApplication.instance() + app.installEventFilter(self) + + def _uninstall_app_event_listener(self): + if not self._app_event_listener_installed: + return + self._app_event_listener_installed = False + app = QtWidgets.QApplication.instance() + app.removeEventFilter(self) def keyPressEvent(self, event): # Ignore escape button to close window @@ -390,17 +445,16 @@ class PublisherWindow(QtWidgets.QDialog): # Reset counter when done for next show event self._show_counter = 0 + self._update_create_overlay_size() + self._update_create_overlay_visibility() + if self._is_on_create_tab(): + self._install_app_event_listener() + # Reset if requested if self._reset_on_show: self._reset_on_show = False self.reset() - def closeEvent(self, event): - self.save_changes() - self._reset_on_show = True - self._controller.clear_thumbnail_temp_dir_path() - super(PublisherWindow, self).closeEvent(event) - def save_changes(self): self._controller.save_changes() @@ -410,8 +464,21 @@ class PublisherWindow(QtWidgets.QDialog): def set_context_label(self, label): self._context_label.setText(label) + def set_tab_on_reset(self, tab): + """Define tab that will be selected on window show. + + This is single use method, when publisher window is showed the value is + unset and not used on next show. + + Args: + tab (Union[int, Literal[create, publish, details, report]]: Index + or name of tab which will be selected on show (after reset). + """ + + self._tab_on_reset = tab + def _update_publish_details_widget(self, force=False): - if not force and self._tabs_widget.current_tab() != "details": + if not force and not self._is_on_details_tab(): return report_data = self.controller.get_publish_report() @@ -441,6 +508,10 @@ class PublisherWindow(QtWidgets.QDialog): self._help_dialog.width(), self._help_dialog.height() ) + def _on_create_overlay_button_click(self): + self._create_overlay_button.set_under_mouse(False) + self._go_to_publish_tab() + def _on_tab_change(self, old_tab, new_tab): if old_tab == "details": self._publish_details_widget.close_details_popup() @@ -465,20 +536,53 @@ class PublisherWindow(QtWidgets.QDialog): self._report_widget ) + is_create = new_tab == "create" + if is_create: + self._install_app_event_listener() + else: + self._uninstall_app_event_listener() + self._create_overlay_button.set_visible(is_create) + def _on_context_or_active_change(self): self._validate_create_instances() def _on_create_request(self): self._go_to_create_tab() + def _set_current_tab(self, identifier): + self._tabs_widget.set_current_tab(identifier) + + def set_current_tab(self, tab): + self._set_current_tab(tab) + if not self._window_is_visible: + self.set_tab_on_reset(tab) + + def _is_current_tab(self, identifier): + return self._tabs_widget.is_current_tab(identifier) + def _go_to_create_tab(self): - self._tabs_widget.set_current_tab("create") + self._set_current_tab("create") + + def _go_to_publish_tab(self): + self._set_current_tab("publish") def _go_to_details_tab(self): - self._tabs_widget.set_current_tab("details") + self._set_current_tab("details") def _go_to_report_tab(self): - self._tabs_widget.set_current_tab("report") + self._set_current_tab("report") + + def _is_on_create_tab(self): + return self._is_current_tab("create") + + def _is_on_publish_tab(self): + return self._is_current_tab("publish") + + def _is_on_details_tab(self): + return self._is_current_tab("details") + + def _is_on_report_tab(self): + return self._is_current_tab("report") def _set_publish_overlay_visibility(self, visible): if visible: @@ -530,11 +634,33 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) self._set_footer_enabled(False) self._update_publish_details_widget() - if ( - not self._tabs_widget.is_current_tab("create") - and not self._tabs_widget.is_current_tab("publish") + + def _on_controller_reset(self): + self._first_reset, first_reset = False, self._first_reset + if self._tab_on_reset is not None: + self._tab_on_reset, new_tab = None, self._tab_on_reset + self._set_current_tab(new_tab) + return + + # On first reset change tab based on available items + # - if there is at least one instance the tab is changed to 'publish' + # otherwise 'create' is used + # - this happens only on first show + if first_reset: + if self._overview_widget.has_items(): + self._go_to_publish_tab() + else: + self._go_to_create_tab() + + elif ( + not self._is_on_create_tab() + and not self._is_on_publish_tab() ): - self._tabs_widget.set_current_tab("publish") + # If current tab is not 'Create' or 'Publish' go to 'Publish' + # - this can happen when publishing started and was reset + # at that moment it doesn't make sense to stay at publish + # specific tabs. + self._go_to_publish_tab() def _on_publish_start(self): self._create_tab.setEnabled(False) @@ -550,8 +676,8 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_details_widget.close_details_popup() - if self._tabs_widget.is_current_tab(self._create_tab): - self._tabs_widget.set_current_tab("publish") + if self._is_on_create_tab(): + self._go_to_publish_tab() def _on_publish_validated_change(self, event): if event["value"]: @@ -564,7 +690,7 @@ class PublisherWindow(QtWidgets.QDialog): publish_has_crashed = self._controller.publish_has_crashed validate_enabled = not publish_has_crashed publish_enabled = not publish_has_crashed - if self._tabs_widget.is_current_tab("publish"): + if self._is_on_publish_tab(): self._go_to_report_tab() if validate_enabled: @@ -668,6 +794,36 @@ class PublisherWindow(QtWidgets.QDialog): event["title"], new_failed_info, "Convertor:" ) + def _update_create_overlay_size(self): + metrics = self._create_overlay_button.fontMetrics() + height = int(metrics.height()) + width = int(height * 0.7) + end_pos_x = self.width() + start_pos_x = end_pos_x - width + + center = self._content_widget.parent().mapTo( + self, + self._content_widget.rect().center() + ) + pos_y = center.y() - (height * 0.5) + + self._create_overlay_button.setGeometry( + start_pos_x, pos_y, + width, height + ) + + def _update_create_overlay_visibility(self, global_pos=None): + if global_pos is None: + global_pos = QtGui.QCursor.pos() + + under_mouse = False + my_pos = self.mapFromGlobal(global_pos) + if self.rect().contains(my_pos): + widget_geo = self._overview_widget.get_subset_views_geo() + widget_x = widget_geo.left() + (widget_geo.width() * 0.5) + under_mouse = widget_x < global_pos.x() + self._create_overlay_button.set_under_mouse(under_mouse) + class ErrorsMessageBox(ErrorMessageBox): def __init__(self, error_title, failed_info, message_start, parent): diff --git a/openpype/tools/settings/settings/constants.py b/openpype/tools/settings/settings/constants.py index d98d18c8bf..23526e4de9 100644 --- a/openpype/tools/settings/settings/constants.py +++ b/openpype/tools/settings/settings/constants.py @@ -24,7 +24,6 @@ __all__ = ( "SETTINGS_PATH_KEY", "ROOT_KEY", - "SETTINGS_PATH_KEY", "VALUE_KEY", "SAVE_TIME_KEY", "PROJECT_NAME_KEY", diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index f8a8273b26..18c2b27678 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -178,7 +178,7 @@ class DropDataFrame(QtWidgets.QFrame): paths = self._get_all_paths(in_paths) collectionable_paths = [] non_collectionable_paths = [] - for path in in_paths: + for path in paths: ext = os.path.splitext(path)[1] if ext in self.image_extensions or ext in self.sequence_types: collectionable_paths.append(path) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 046dcbdf6a..e8593a8ae2 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -285,14 +285,12 @@ class HostToolsHelper: return self._publisher_tool - def show_publisher_tool(self, parent=None, controller=None): + def show_publisher_tool(self, parent=None, controller=None, tab=None): with qt_app_context(): - dialog = self.get_publisher_tool(parent, controller) - - dialog.show() - dialog.raise_() - dialog.activateWindow() - dialog.showNormal() + window = self.get_publisher_tool(parent, controller) + if tab: + window.set_current_tab(tab) + window.make_sure_is_visible() def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -446,8 +444,8 @@ def show_publish(parent=None): _SingletonPoint.show_tool_by_name("publish", parent) -def show_publisher(parent=None): - _SingletonPoint.show_tool_by_name("publisher", parent) +def show_publisher(parent=None, **kwargs): + _SingletonPoint.show_tool_by_name("publisher", parent, **kwargs) def show_experimental_tools_dialog(parent=None): diff --git a/openpype/vendor/python/python_2/secrets/LICENSE b/openpype/vendor/python/python_2/secrets/LICENSE new file mode 100644 index 0000000000..d3211e4d9f --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Scaleway + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/openpype/vendor/python/python_2/secrets/__init__.py b/openpype/vendor/python/python_2/secrets/__init__.py new file mode 100644 index 0000000000..c29ee61be1 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + + +__version__ = "1.0.6" + +# Emulates __all__ for Python2 +from .secrets import ( + choice, + randbelow, + randbits, + SystemRandom, + token_bytes, + token_hex, + token_urlsafe, + compare_digest +) diff --git a/openpype/vendor/python/python_2/secrets/secrets.py b/openpype/vendor/python/python_2/secrets/secrets.py new file mode 100644 index 0000000000..967d2862d9 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/secrets.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +"""Generate cryptographically strong pseudo-random numbers suitable for + +managing secrets such as account authentication, tokens, and similar. + + +See PEP 506 for more information. + +https://www.python.org/dev/peps/pep-0506/ + + +""" + + +__all__ = ['choice', 'randbelow', 'randbits', 'SystemRandom', + + 'token_bytes', 'token_hex', 'token_urlsafe', + + 'compare_digest', + + ] + +import os +import sys +from random import SystemRandom + +import base64 + +import binascii + + +# hmac.compare_digest did appear in python 2.7.7 +if sys.version_info >= (2, 7, 7): + from hmac import compare_digest +else: + # If we use an older python version, we will define an equivalent method + def compare_digest(a, b): + """Compatibility compare_digest method for python < 2.7. + This method is NOT cryptographically secure and may be subject to + timing attacks, see https://docs.python.org/2/library/hmac.html + """ + return a == b + + +_sysrand = SystemRandom() + + +randbits = _sysrand.getrandbits + +choice = _sysrand.choice + + +def randbelow(exclusive_upper_bound): + + """Return a random int in the range [0, n).""" + + if exclusive_upper_bound <= 0: + + raise ValueError("Upper bound must be positive.") + + return _sysrand._randbelow(exclusive_upper_bound) + + +DEFAULT_ENTROPY = 32 # number of bytes to return by default + + +def token_bytes(nbytes=None): + + """Return a random byte string containing *nbytes* bytes. + + + If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_bytes(16) #doctest:+SKIP + + b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' + + + """ + + if nbytes is None: + + nbytes = DEFAULT_ENTROPY + + return os.urandom(nbytes) + + +def token_hex(nbytes=None): + + """Return a random text string, in hexadecimal. + + + The string has *nbytes* random bytes, each byte converted to two + + hex digits. If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_hex(16) #doctest:+SKIP + + 'f9bf78b9a18ce6d46a0cd2b0b86df9da' + + + """ + + return binascii.hexlify(token_bytes(nbytes)).decode('ascii') + + +def token_urlsafe(nbytes=None): + + """Return a random URL-safe text string, in Base64 encoding. + + + The string has *nbytes* random bytes. If *nbytes* is ``None`` + + or not supplied, a reasonable default is used. + + + >>> token_urlsafe(16) #doctest:+SKIP + + 'Drmhze6EPcv0fN_81Bj-nA' + + + """ + + tok = token_bytes(nbytes) + + return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii') diff --git a/openpype/version.py b/openpype/version.py index 268f33083a..443c76544b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.5" +__version__ = "3.14.9-nightly.2" diff --git a/setup.cfg b/setup.cfg index 0a9664033d..10cca3eb3f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,8 @@ exclude = docs, */vendor, website, - openpype/vendor + openpype/vendor, + *deadline/repository/custom/plugins max-complexity = 30 diff --git a/tests/README.md b/tests/README.md index 69828cdbc2..d36b6534f8 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,5 +1,15 @@ Automatic tests for OpenPype ============================ + +Requirements: +============ +Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path. + +You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/ + +You can test that `mongorestore` is available by running this in console, or cmd: +```mongorestore --version``` + Structure: - integration - end to end tests, slow (see README.md in the integration folder for more info) - openpype/modules/MODULE_NAME - structure follow directory structure in code base diff --git a/tests/conftest.py b/tests/conftest.py index aa850be1a6..7b58b0314d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,3 +43,15 @@ def app_variant(request): @pytest.fixture(scope="module") def timeout(request): return request.config.getoption("--timeout") + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item, call): + # execute all other hooks to obtain the report object + outcome = yield + rep = outcome.get_result() + + # set a report attribute for each phase of a call, which can + # be "setup", "call", "teardown" + + setattr(item, "rep_" + rep.when, rep) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 9fffc6073d..ffad33d13c 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class AfterEffectsTestClass(HostFixtures): +class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,15 +18,15 @@ class AfterEffectsTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.aep") - dest_folder = os.path.join(download_test_data, + "test_project_test_asset_test_task_v001.aep") + dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, "work", self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.aep") + "test_project_test_asset_test_task_v001.aep") shutil.copy(src_path, dest_path) yield dest_path @@ -32,3 +35,12 @@ class AfterEffectsTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + # skip folder that contain "Logs", these come only from Deadline + return ["Logs", "Auto-Save"] + + +class AELocalPublishTestClass(AEHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py similarity index 58% rename from tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py rename to tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py index 4925cbd2d7..5d0c15d63a 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -1,14 +1,16 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects + Uses old Pyblish schema of created instances. + Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. @@ -27,15 +29,15 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): PERSIST = False TEST_FILES = [ - ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", - "test_aftereffects_publish.zip", + ("1jqI_uG2NusKFvZZF7C0ScHjxFJrlc9F-", + "test_aftereffects_publish_legacy.zip", "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -49,23 +51,37 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py deleted file mode 100644 index c882e0f9b2..0000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ /dev/null @@ -1,64 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AfterEffectsTestClass): - """Basic test case for publishing in AfterEffects - - Should publish 5 frames - """ - PERSIST = True - - TEST_FILES = [ - ("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT", - "test_aftereffects_publish_multiframe.zip", - "") - ] - - APP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTestTaskDefault", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index f3a438c065..ab402f36e0 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class MayaTestClass(HostFixtures): +class MayaHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,7 +18,7 @@ class MayaTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -23,7 +26,7 @@ class MayaTestClass(HostFixtures): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") shutil.copy(src_path, dest_path) yield dest_path @@ -39,3 +42,11 @@ class MayaTestClass(HostFixtures): "{}{}{}".format(startup_path, os.pathsep, original_pythonpath)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + + +class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 68b0564428..b7ee228aae 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,7 +1,8 @@ -from tests.integration.hosts.maya.lib import MayaTestClass +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass -class TestPublishInMaya(MayaTestClass): +class TestPublishInMaya(MayaLocalPublishTestClass): """Basic test case for publishing in Maya Shouldnt be running standalone only via 'runtests' pype command! (??) @@ -28,7 +29,7 @@ class TestPublishInMaya(MayaTestClass): ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") ] - APP = "maya" + APP_GROUP = "maya" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" @@ -37,33 +38,41 @@ class TestPublishInMaya(MayaTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) - assert 11 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" + failures.append(DBAssert.count_of_types(dbcon, "representation", 5)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index d3c3d7ba81..96daec7427 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -1,17 +1,20 @@ import os import pytest -import shutil +import re -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class NukeTestClass(HostFixtures): +class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk" + source_file_name = "test_project_test_asset_test_task_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", @@ -27,7 +30,16 @@ class NukeTestClass(HostFixtures): dest_path = os.path.join(dest_folder, source_file_name) - shutil.copy(src_path, dest_path) + # rewrite old root with temporary file + # TODO - using only C:/projects seems wrong - but where to get root ? + replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE) + with open(src_path, "r") as fp: + updated = fp.read() + updated = replace_pattern.sub(output_folder_url.replace("\\", '/'), + updated) + + with open(dest_path, "w") as fp: + fp.write(updated) yield dest_path @@ -41,4 +53,12 @@ class NukeTestClass(HostFixtures): monkeypatch_session.setenv("NUKE_PATH", "{}{}{}".format(startup_path, os.pathsep, - original_nuke_path)) \ No newline at end of file + original_nuke_path)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield ["renders"] + + +class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 884160e0b5..f84f13fa20 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,17 +1,25 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.nuke.lib import NukeTestClass +from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass log = logging.getLogger("test_publish_in_nuke") -class TestPublishInNuke(NukeTestClass): +class TestPublishInNuke(NukeLocalPublishTestClass): """Basic test case for publishing in Nuke Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects/test_project/test_asset/test_task`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were @@ -20,7 +28,8 @@ class TestPublishInNuke(NukeTestClass): How to run: (in cmd with activated {OPENPYPE_ROOT}/.venv) - {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501 + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 To check log/errors from launched app's publish process keep PERSIST to True and check `test_openpype.logs` collection. @@ -30,14 +39,14 @@ class TestPublishInNuke(NukeTestClass): ("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "") ] - APP = "nuke" + APP_GROUP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 50 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - PERSIST = True # True - keep test_db, test_openpype, outputted test files + PERSIST = False # True - keep test_db, test_openpype, outputted test files TEST_DATA_FOLDER = None def test_db_asserts(self, dbcon, publish_finished): @@ -52,7 +61,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -61,7 +70,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 16ef2d3ae6..9d51a11c06 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest +) -class PhotoshopTestClass(HostFixtures): +class PhotoshopTestClass(HostFixtures, PublishTest): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -32,3 +35,7 @@ class PhotoshopTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 5387bbe51e..4aaf43234d 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass): ("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "") ] - APP = "photoshop" + APP_GROUP = "photoshop" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -72,7 +72,7 @@ class TestPublishInPhotoshop(PhotoshopTestClass): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 8)) + DBAssert.count_of_types(dbcon, "representation", 6)) additional_args = {"context.subset": "imageMainForeground", "context.ext": "png"} diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b181055012..82e741cc3b 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,9 +118,8 @@ class DBHandler: "Run with overwrite=True") else: if collection: - coll = self.client[db_name_out].get(collection) - if coll: - coll.drop() + if collection in self.client[db_name_out].list_collection_names(): # noqa + self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) @@ -133,7 +132,11 @@ class DBHandler: db_name=db_name, db_name_out=db_name_out, collection=collection) print("mongorestore query:: {}".format(query)) - subprocess.run(query) + try: + subprocess.run(query) + except FileNotFoundError: + raise RuntimeError("'mongorestore' utility must be on path." + "Please install it.") def teardown(self, db_name): """Drops 'db_name' if exists.""" @@ -231,13 +234,15 @@ class DBHandler: # Examples # handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") -# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa + +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa +# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") -# handler.setup_from_sql("test_db", "c:\\projects\\sql", +# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql", # collection="test_project", # drop=False, mode="upsert") diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 78a9f81095..82cc321ae8 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,6 +8,7 @@ import tempfile import shutil import glob import platform +import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler @@ -36,9 +37,9 @@ class ModuleUnitTest(BaseTest): PERSIST = False # True to not purge temporary folder nor test DB TEST_OPENPYPE_MONGO = "mongodb://localhost:27017" - TEST_DB_NAME = "test_db" + TEST_DB_NAME = "avalon_tests" TEST_PROJECT_NAME = "test_project" - TEST_OPENPYPE_NAME = "test_openpype" + TEST_OPENPYPE_NAME = "openpype_tests" TEST_FILES = [] @@ -57,7 +58,7 @@ class ModuleUnitTest(BaseTest): m.undo() @pytest.fixture(scope="module") - def download_test_data(self, test_data_folder, persist=False): + def download_test_data(self, test_data_folder, persist, request): test_data_folder = test_data_folder or self.TEST_DATA_FOLDER if test_data_folder: print("Using existing folder {}".format(test_data_folder)) @@ -78,7 +79,8 @@ class ModuleUnitTest(BaseTest): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST + persist = (persist or self.PERSIST or + self.is_test_failed(request)) if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -125,7 +127,8 @@ class ModuleUnitTest(BaseTest): monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data) @pytest.fixture(scope="module") - def db_setup(self, download_test_data, env_var, monkeypatch_session): + def db_setup(self, download_test_data, env_var, monkeypatch_session, + request): """Restore prepared MongoDB dumps into selected DB.""" backup_dir = os.path.join(download_test_data, "input", "dumps") @@ -135,13 +138,14 @@ class ModuleUnitTest(BaseTest): overwrite=True, db_name_out=self.TEST_DB_NAME) - db_handler.setup_from_dump("openpype", backup_dir, + db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir, overwrite=True, db_name_out=self.TEST_OPENPYPE_NAME) yield db_handler - if not self.PERSIST: + persist = self.PERSIST or self.is_test_failed(request) + if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -166,6 +170,13 @@ class ModuleUnitTest(BaseTest): mongo_client = OpenPypeMongoConnection.get_mongo_client() yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"] + def is_test_failed(self, request): + # if request.node doesn't have rep_call, something failed + try: + return request.node.rep_call.failed + except AttributeError: + return True + class PublishTest(ModuleUnitTest): """Test class for publishing in hosts. @@ -188,7 +199,7 @@ class PublishTest(ModuleUnitTest): TODO: implement test on file size, file content """ - APP = "" + APP_GROUP = "" TIMEOUT = 120 # publish timeout @@ -210,10 +221,10 @@ class PublishTest(ModuleUnitTest): if not app_variant: variant = ( application_manager.find_latest_available_variant_for_group( - self.APP)) + self.APP_GROUP)) app_variant = variant.name - yield "{}/{}".format(self.APP, app_variant) + yield "{}/{}".format(self.APP_GROUP, app_variant) @pytest.fixture(scope="module") def output_folder_url(self, download_test_data): @@ -310,7 +321,8 @@ class PublishTest(ModuleUnitTest): yield True def test_folder_structure_same(self, dbcon, publish_finished, - download_test_data, output_folder_url): + download_test_data, output_folder_url, + skip_compare_folders): """Check if expected and published subfolders contain same files. Compares only presence, not size nor content! @@ -328,12 +340,33 @@ class PublishTest(ModuleUnitTest): glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format( - "\n".join(sorted(not_matched))) + filtered_published = self._filter_files(published, + skip_compare_folders) + + # filter out temp files also in expected + # could be polluted by accident by copying 'output' to zip file + filtered_expected = self._filter_files(expected, skip_compare_folders) + + not_mtched = filtered_expected.symmetric_difference(filtered_published) + if not_mtched: + raise AssertionError("Missing {} files".format( + "\n".join(sorted(not_mtched)))) + + def _filter_files(self, source_files, skip_compare_folders): + """Filter list of files according to regex pattern.""" + filtered = set() + for file_path in source_files: + if skip_compare_folders: + if not any([re.search(val, file_path) + for val in skip_compare_folders]): + filtered.add(file_path) + else: + filtered.add(file_path) + + return filtered -class HostFixtures(PublishTest): +class HostFixtures(): """Host specific fixtures. Should be implemented once per host.""" @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): @@ -344,3 +377,8 @@ class HostFixtures(PublishTest): def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" raise NotImplementedError + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + """Use list of regexs to filter out published folders from comparing""" + raise NotImplementedError diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index 0faab86b37d5c7d1224e8a92cca766ed80536718..e22b9acdbdbcd7312776e33918ac2a6a9211dab3 100644 GIT binary patch delta 3519 zcmaJ@2{=^iA0A|BtYb!D2xCNpu{L(enk)$;^`FuhnnK9lwM?=ljAbyiA|y-5(nu5& zl58c)E)0qxZOHPUp}N<-_qpGB&U3zVp7;Ho_xsNGp6{*J`XHNxt-uPkv+%I(fB;wl z06>j{BRUz6*l{r$hfYpY1~IRU$G@92Nq|E{4jX{=@bYx>bM^J}RkE?zB6Dq$ue@4( zY0C)!T!jDt>P#?U8<{iyXRI|*ryh6`hs_fO#nlge8!HvVqqXQ*32TX zI}#Eups@}}3MwtEeHc7gEyia1jDX4iP#*ZH!e&tTWKi9w$yXrUA|yn*!t(*@Fz?bxCih3_1kQRSZYdZ#IXIi{+P|_K*{XTLV`b8=^DbfJ1mcWt z%~{*PoF8lBRsQV>hgIcM2gjUs%e@~(xbxo&H1?2P=m1?Jq7f=mWX=W9!$+<;J8!E7 z?P)6@!ag1Z?MSgYPpbV-hN#0#w=g8ckeDc6wR=KU)lORUYoEQi3xR@SxYtm zZN>2#S43Valot{T76Cf8M?Y}vAS16Txb1Fd*7*>`Lc=KCDy5^jxMU5_kglJC&nG*D?72{$?u|2b#VG261bjOG$`QUx$4glo@FLh+7s{l%x2OXMD2V&wdJM?CLv` zojPuWcPh*dpC|k=+h05ej*|4_H~M%pV}?P)3={ z1b}Womty924Q4MgCOE9v7_)O&aHjbTfog1-B0<#pGEjW8Nwo-8^2%A{V!#pnAQoH7 z;gGK2y==z?uF`-e{ILX05YrCJs}7Dl5;qZhufMp(h?9`2)Q2h^OvP01s(+69OV%sIwAB_~voy`c6^98w6cRMUWe2%GAmu8y> zy{8;?5tWsD(1J(3etfji**Gf{eQzy8VoC5G>|lcxAt?r?(G;{=hEuQqUssskuIdY% z);zkA?dV>Q!;LgK@3?^sB<7BWE@z!oIWZ^a;U~Fh4CK+O|I#4vp_TWTM_vjIn-0gl z@*|J=_7#|X(CivE^{_FegkJk1IqRKGvpaS}cnRt`(lO%4HZSQb{yO8SjOB5$*QUA7 z^0JK_1ewpU+c)QKI$<0^PRP%taXxw?T|V%;KN@+5UR@JIzEoSi>luES>ylHRkRLl; z@Z+$m%E5YrJ6W-UjDq`7o*I z)@Y({0mied;||Y@V{fb);bxY{ZygBK4P3>ElI7W*PW;%YpqI_Ye1_Xt8>!xJP*^Ry z8@aD(dgc_3bF2>>s=M|mg}rv;1+g>{TE!^Of!m z1cBwUEMM@D`l@8r*|4KrfXB4~Z^;)A%(o5R@6mC&r2`D9h(5jl_Ea4ZVe;Zs6mHo% z%KhCJ3_A8mVdZjWrx+Kh+@-JkYM(#MIn3mdS2_8x`SzU~P-NAFSB@V};G6jPFwW@=kCWwlli?BIpot@>Tnz z`XA$}?p}5E7jKIXm(ZAAnw0J7{!S`D<)&uw?0ax8zrUNR{mQ~l+`J#@NkoGfq9`_RRCw?U^J2mL-@B7?^mDHggLb0Yku*QTchMMwD80Rb6F&!=)Ngu%GZICYpTjx82Rg#$`x zl!Qc%)mma6Tsl$a!=`cSX-0HooK&^?zSHWFwHD*ACMPyj+4VL5u#2o0dYr}0C#3{F zPfp6}&Q{f(sfg*WeS)-sa-GRWi3%T`=h2#nm{Ai_jY^Ac^sE*KFWlqswLJ{7x4>`1>D7p3&wfxta;FpRa=S<59i`Z~6FKxn#btfk&>*mgd~G`3 zz^R|p;P{TinVMhk*zjTqb;Fnu0mS$^jo7j>0nIfB6kzI+=|ztUP$Zvqt%E%_4uAeD{9}7x^uKDZHA8VKv#!`PuHk zE!5Tv=9nNTDFQ0IwTf;-MOp3nNVQPue|&x6ZPjD=NbAtO{{U^5P{&8o7ZQiCb1 z8Ao(24(GQxu%{!~)D>*@3xMhx0Qeig%HGk<^_OVuYti^OM4P$UIlBB048`AK92~*+ zj$V%7Kf%ZV~5Z-Q;;l_Z{v#(ni-;f;D+uA>_3_4;u= zI>y9!s)`ru+DgUns)GsnWtapLep49acxd0{2Wk0TWj4*{imJI2Uk69VWvnWFZM?j| ze%qMx8u;{#E>lYWvOov!cI4*YZd;S;@PlY*>`z9zUMyVrD>_u zk;FT^8hqleVs%_zQL(~>Hsp7;rFNAStI;$Q-g4g}F+&Z$Wt|dOA(V#f@d()?^JueI z2U)cFn;&KHmxB*NZdS^iDfzlzBFgn{(~=Rb+ClY_syq@ zw+DCa2k9DL|6G@sbzJI8MOF;0-xKJ?`7i6(4(w`tvBcS|p#=?&>BoG?0hIm$UQ{tb zO$2FgwnIdX(AP~lAbGT67z!sIx^qt}npX#!QxNn-tt?~<2x6Od?}-azkmC>@k(f4A zwcy+(%^;wIk0qB&dBOuG65f_auz@AZ`Y}+I@lXpWVT1wF4do6C+Jqf~BzK)5wNP1B zPwySh7<&=C2p(?rR%|^hkkGAEQqx5JfUgm!$MJ@rogpqsstbr6`<5_qzpKBzqwWlO z6Y2Mn3wYKS?n6iH0A0l_|2%RRu&b+;{eM?+Mx3%jrx;L%8K)~S4NK1(*@JDCX{|kU zzNO?jbBg;{dRI+EXPhq|I;#p+E8K?ToZ#i{Qgb$iMNTqN1L3L>cMP)}>?vltdgr?O zmAW&HF#%i=FJ+B2Nd$sMVzE!qlSES6Dgpj1rF7QGj>&NmaAVOJt>h+pP733Wt+X)@ zk-JY#+)*|CTXwTGHu+J!QQteImc()L@Y_7F6O8yFh@RlYyLSjwXp4;uI(z1>gV8{#P(K>s0e!7z(GEu z7O8qsi2wP_XUyMI=k**{#TS~gH0X8b|47~Mg)<-yEgamCg+?X4mq3NQV?_I=Ku-9t z(~yR;Gd4N@39nxds4$ z>*7KFx5K$qHj`g;K*MlVt$nVv_X$)3cDVrn_J73w!QtraU=3BgKjdKhfxa#2eY_UI zMjkuV_tg*uMbsVDhzxIG?#-hzzCKl);>U0*CfM}iyrUz zfzt#u3p-niNlru03{b6<2 ze|47I|I9RG%Z5L+ai3iuct==wY(TroCgcHI{)4@l!$y z?7%j};EwBS$(9Xc3 zugDm^S_!1PFaDxQBC~61!1HXOu(;I9S)P$mOkJIaSv&vb1E%CaEvg~u>TU$PJ2WbJ z;bg%L?s6;%sxqDQ^N$N_CBQ=y$x5i`qf17efjb+MLH62VC2w(G1nJ4g8OP_T-)FCvpK0!;0$R3atfiSKInhJW>?c1L4GigStKEX#py?w z3EQ8S%J?KQ54C9-QZ}9+ru+d`bU9^XsnK#n_f_L8msnT8C)-A=$*KX`RY~RHFzJj} z)vY0|(uh0UvT)_90Lg`9kGL=$g#}wLS8B9no91|Dm9W}4z|bIVOe!$*A}FuZ+bl3iO(Z4 zH+j0GKb>D}!Aid;>`aEhck0z8K-76bK&8WPj!!qY=4JBTVd zxu8?YW@7igM%t@QPHCUU)|Zg5W(oa?;leTPd>eyY@i&fkp+bm1q+4P&R@&<{D3QD$ zp2LjyyNvaGzMCY?yh#YdcW@nChc~P$mkzc_)r(R=?N4rA zit{0De1m-w!e&{~Nm*)X4jAB&yRVN3cJ7S$-dzG*?IZ*-=PHJYPQM$#hpV$*jOE)d zg99xSvCL1=$SEzQE<{Ba<{GG14P$;n|Mmvk$12EyPk#pG#WOI$Tp$;V2DZ_1E+UJ+ zggL3EmM$xTe<{9Xw<^7sgM@Wu!FrdQ`-~zw13h+Rw@SpkvS+!10@j&&&)cSv;Wmge zGHI}P8lF+|H`CctcE?(Bxdyj31l+k)i;uHonPe1eIvOk0=H4B3TB9|znjax$vX!xSMi@b_C_J~>WmEWUV6y%-V`pG{#)xe=lmcPcUy~>u5LKm z`0GU!bvy|jNHMYJO?K>?uAxulI5AzzPhzk5O==WuwYRp76(YPv~iCu zjVLUI>7d=%^ql7^ZDTaTK-*wo@#d}_2}D(TA*w2pnX`h$_OwiUJZF@GU z%*u1sPNHdxS9k?&_RxXK-!O=DmffOOF|?6yP2ba_OMlE)OhSUuHa8yykxPK&LfklT z@g+&R=_hV07~{EzahwIeSK^x7zN6?ur1!olgz6*&kX|y#h{{- zL3klV!X+3NyDFYZJQgnBmQ+-L5&gCFatLUtYxcuv+5O{$Pd&h9UQj)H&Adqr3{Iu- z#s?rt+cpZjs%WHiTYEwVUm2%iNxJ2f27r|B$Hy_pvu#RHq=6EYef9px)-zrx$^$x#(auVSGjhrUzcAC4bWD9}#R zP524BC4d5j_T$=t;;mP8iA8Gq8e`6O4ZaXLP?cnFMQXAHTn6sEY$V~%TdfH7DpeEm<2N@|@f(b$&Cp%iLu$%A}KGw9GD~eJ)_-Zse z!qyc@IRFl|sKg~o|HAmGlckX&G3V<@8&}NKcOi4HWc-~ z>E2nB#knaP-=d~0;wxBt=t2>gPqq3WM;eQT-Po%7OVM(j9Q*G(%+z9@yi~93`bwKc z6CeZoaNn671Bt zUO0D=u1}u_OUc(Nwu~Bj{CMIy$G>JCpCPV=Tg>J!Wt5*2m6VBB`g)0(6IV~dv$)Ha zCgwAbTI(>X7O3Yj)(E)BvZ^NaSu4{b)XfEXRfxl`@RLd-D8l<^lkj>)eNj9N+ex7K zzIflmVHJK{Z%{1x9hppW+S?sz{KRcIv(qdw{GB4!+i0d_fh=gGIxAeU%U$|mECr~t z$vz)@QOxS=^v#007JIn#S(Eyf1&dH=HNe{1rxv7IEgAP(TB-*Rl0KoiE57R zyutJbBhN~jTv_@QYr_Twu9*lmG#iT0gjED=_ z=g=eL{pN_b^qekuROR(uC5yBn^d4R!8=3?qa8A)UZl2bZd9v51CzUMJ7J;27m=EFo zySLpQi3UcHg+{>r@Bqc3mx~8-4#MO?%Yd@(+1rODaBAHsnF2*b zAoUfP(>!B!*@BStadXG7YZP%pLFo*_Y}h^Lf$;J5eefkCinlkUN}p@>k8*nysjH8z zPgn3Sb(Wj*FNt0qcP`%XjWOTP`9t6Bv!K`A|L-0D_Y+T)IA%K~F2e4?Vmv}Mgtj?| zgx!dEq@7+|mxGlMV_|2c&h^xsj=jT6(qWo&mdk*UOGTuV+m3*OGh5u)b9fclmmkGMsN!zmT!^fS*ro`a?QBb>S*Lx&RB>`r508eC3%T1E@oY)t%+l!XH7yC1C&n+W!UnFa8gUpm=3j z4FF5~191O!U=Luz(=b5PBY>6}A#VdXF(2!xvHyUUtiZdQLzW^y)I_tROP^GdG=nr) z|I;^8KmE!zI22QD?8WuX4GLmdHG5klqd>Op#So63E|f+r81e50Wp8gIhk{^u#(;bc z0-+7Ttde|PQy+&0aafWJV^i;gbYhD-`!&%3f1d!n8%A>BZTxxGgL-_|Oh&>RATF-P z&aQ5bzo00up>E2whl8_?xs~m&1|v@-mHLcuU2G0C%l%M$;&;PTO!a+H)5y9}-;i(2QZ6$( zyjpBe)!FHi4r3}lF{=b%j2}WqthNI0*0pYfRS_mb==kASfeM7p&jgXyZZ`(p2KVLT z3sR<8Xx0}WXT4%y#*FA1UD-VL;cwZgP3t0K(c0XY*#Jw3lpFMiG~0^^mAel`f-{$= zek4{N!n8@3-fMq1gDJr*6cH>l+?P(j)jN#?M^f$$zc4Eq|3G%vi%>X_3IwzFq$5l_ z?16Z#_`I@JM#Q<6Jz_zqmI}F+ z=D@|lub&GXyKS9JXrG&V4aQ`kk;Z5#9+C?A&iZi@_@>tv2->Ld-abltl>hNdJJQoY zSM#+6*$O)O;;_U!?YBm=&-J}*)V{QOG3k)Z4-q};HdqS}a8Pqe+EX{oOh7Ud*S$^O zT*&I)>0R{9(zIJ}3DQgBYsVA2hpXRk$ABMoR7=7pH+eqS_{WGLLMgbv&Z&!$KrQnd z97quc78mgMZaK7wpzXrmzG%OC=U2F!-1E!$3*7JC^Dkcd6~Rqz`W1oBZx8Jx@f5-aUc7L7zy5V?15BP(=0sbH9f3qL2Ghg@PE_g3HeuVJv zng6Eqf7Oc%kL;W@z+c4lDib%!=>^SYXLtV>&9CBmHU7U&)P=5Hwu_$C z v2 - v3 = OpenPypeVersion(1, 2, 3, staging=True) - assert str(v3) == "1.2.3+staging" + v3 = OpenPypeVersion(1, 2, 3) + assert str(v3) == "1.2.3" - v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1") - assert str(v4) == "1.2.3-rc.1+staging" + v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1") + assert str(v4) == "1.2.3-rc.1" assert v3 > v4 assert v1 > v4 assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1") @@ -73,7 +73,7 @@ def test_openpype_version(printer): OpenPypeVersion(4, 8, 10), OpenPypeVersion(4, 8, 20), OpenPypeVersion(4, 8, 9), - OpenPypeVersion(1, 2, 3, staging=True), + OpenPypeVersion(1, 2, 3), OpenPypeVersion(1, 2, 3, build="foo") ] res = sorted(sort_versions) @@ -104,27 +104,26 @@ def test_openpype_version(printer): with pytest.raises(ValueError): _ = OpenPypeVersion(version="booobaa") - v11 = OpenPypeVersion(version="4.6.7-foo+staging") + v11 = OpenPypeVersion(version="4.6.7-foo") assert v11.major == 4 assert v11.minor == 6 assert v11.patch == 7 - assert v11.staging is True assert v11.prerelease == "foo" def test_get_main_version(): - ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo") + ver = OpenPypeVersion(1, 2, 3, prerelease="foo") assert ver.get_main_version() == "1.2.3" def test_get_version_path_from_list(): versions = [ OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')), - OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")), + OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")), OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo")) ] path = BootstrapRepos.get_version_path_from_list( - "3.4.5+staging", versions) + "3.4.5", versions) assert path == Path("/bar/baz") @@ -362,12 +361,15 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): result = fix_bootstrap.find_openpype(include_zips=True) # we should have results as file were created assert result is not None, "no OpenPype version found" - # latest item in `result` should be latest version found. + # latest item in `result` should be the latest version found. + # this will be `7.2.10-foo+staging` even with *staging* in since we've + # dropped the logic to handle staging separately and in alphabetical + # sorting it is after `strange`. expected_path = Path( d_path / "{}{}{}".format( - test_versions_2[3].prefix, - test_versions_2[3].version, - test_versions_2[3].suffix + test_versions_2[4].prefix, + test_versions_2[4].version, + test_versions_2[4].suffix ) ) assert result, "nothing found" diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index c64ff75969..85b94b0971 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -112,4 +112,6 @@ $mongoPath = Find-Mongo $preferred_version Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]" -Color Green, Gray, Cyan, White, Cyan +New-Item -ItemType Directory -Force -Path $($dbpath) + Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index 4e80f6e19d..9c99b26f1e 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -51,7 +51,9 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v #### Run from source -For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. +For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. To run the powershell scripts you may have to enable unrestricted execution as administrator: + +`Set-ExecutionPolicy -ExecutionPolicy unrestricted` To start OpenPype from source you need to diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index 1c8958d1c0..fa2d996e20 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -55,7 +55,7 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li ## Python -**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). +**Python 3.7.9** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. diff --git a/website/yarn.lock b/website/yarn.lock index 177a4a3802..220a489dfa 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4812,9 +4812,9 @@ loader-runner@^4.2.0: integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== loader-utils@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" - integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== + version "1.4.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" + integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From a7a2731f05a00a8436ea9153e1ce03c560346148 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Dec 2022 12:35:41 +0100 Subject: [PATCH 2392/2550] nuke: subset with mixed letter sizes --- openpype/hosts/nuke/api/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index cc5e0a94a1..1e5e7fc54b 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -611,7 +611,7 @@ def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): if ( onode["subsets"] - and not any(re.search(s, subset.lower()) for s in onode["subsets"]) + and not any(re.search(s.lower(), subset.lower()) for s in onode["subsets"]) ): continue @@ -704,7 +704,7 @@ def get_imageio_node_override_setting( if ( onode["subsets"] - and not any(re.search(s, subset.lower()) for s in onode["subsets"]) + and not any(re.search(s.lower(), subset.lower()) for s in onode["subsets"]) ): continue From a9d5beecfaf17610aa9249ff88132cd768b15f5e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Dec 2022 12:46:27 +0100 Subject: [PATCH 2393/2550] flake8 --- openpype/hosts/nuke/api/lib.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 1e5e7fc54b..7ee30bf273 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -611,7 +611,10 @@ def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): if ( onode["subsets"] - and not any(re.search(s.lower(), subset.lower()) for s in onode["subsets"]) + and not any( + re.search(s.lower(), subset.lower()) + for s in onode["subsets"] + ) ): continue @@ -704,7 +707,10 @@ def get_imageio_node_override_setting( if ( onode["subsets"] - and not any(re.search(s.lower(), subset.lower()) for s in onode["subsets"]) + and not any( + re.search(s.lower(), subset.lower()) + for s in onode["subsets"] + ) ): continue From 1338b1372c008de526a0fda97a826adfb313c3b4 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Dec 2022 13:00:29 +0100 Subject: [PATCH 2394/2550] :bug: fix the path --- openpype/hosts/unreal/api/pipeline.py | 2 +- openpype/hosts/unreal/plugins/publish/collect_instances.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 839465881d..ca5a42cd82 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -116,7 +116,7 @@ def ls(): """ ar = unreal.AssetRegistryHelpers.get_asset_registry() # UE 5.1 changed how class name is specified - class_name = ["/Script", "AssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AssetContainer" # noqa + class_name = ["/Script/OpenPype", "AssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AssetContainer" # noqa openpype_containers = ar.get_assets_by_class(class_name, True) # get_asset_by_class returns AssetData. To get all metadata we need to diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py index db968330c6..1f25cbde7d 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_instances.py @@ -25,7 +25,7 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() - class_name = ["/Script", + class_name = ["/Script/OpenPype", "AssetContainer"] if UNREAL_VERSION.major == 5 and \ UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa instance_containers = ar.get_assets_by_class(class_name, True) From ef9f338fb15b666db999daaa8f4341371591b3a2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Dec 2022 13:23:33 +0100 Subject: [PATCH 2395/2550] :bug: remove unnecessary header --- .../UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 2f066bd94b..e9d94aecfc 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,6 +1,5 @@ #pragma once -#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" From 1a75a6a041d83a2619b280af10b8c172338c9687 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Dec 2022 16:19:28 +0100 Subject: [PATCH 2396/2550] flame: settings for layer renaming --- .../settings/defaults/project_settings/flame.json | 6 +++++- .../schemas/projects_schema/schema_project_flame.json | 11 +++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index 34baf9ba06..9966fdbd33 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -142,7 +142,11 @@ "exr16fpdwaa" ], "reel_name": "OP_LoadedReel", - "clip_name_template": "{batch}_{asset}_{subset}<_{output}>" + "clip_name_template": "{batch}_{asset}_{subset}<_{output}>", + "layer_rename_template": "{asset}_{subset}<_{output}>", + "layer_rename_patterns": [ + "rgba" + ] } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 73664300aa..26a2dce2f5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -554,6 +554,17 @@ "type": "text", "key": "clip_name_template", "label": "Clip name template" + }, + { + "type": "text", + "key": "layer_rename_template", + "label": "Layer name template" + }, + { + "type": "list", + "key": "layer_rename_patterns", + "label": "Layer rename patters", + "object_type": "text" } ] } From 42764559330b63c91f6b3e1678bd9f79f0aa88fb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Dec 2022 16:20:08 +0100 Subject: [PATCH 2397/2550] flame: added layer renaming to batch loader --- openpype/hosts/flame/api/plugin.py | 56 +++++++++++++++++-- .../flame/plugins/load/load_clip_batch.py | 11 ++++ 2 files changed, 63 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index ca113fd98a..6aaf0c6d80 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -8,7 +8,7 @@ import qargparse from Qt import QtCore, QtWidgets from openpype import style -from openpype.lib import Logger +from openpype.lib import Logger, StringTemplate from openpype.pipeline import LegacyCreator, LoaderPlugin from openpype.settings import get_current_project_settings @@ -775,6 +775,11 @@ class OpenClipSolver(flib.MediaInfoFile): self.feed_colorspace = feed_data.get("colorspace") self.log.debug("feed_version_name: {}".format(self.feed_version_name)) + # layer rename variables + self.layer_rename_template = feed_data["layer_rename_template"] + self.layer_rename_patterns = feed_data["layer_rename_patterns"] + self.context_data = feed_data["context_data"] + # derivate other feed variables self.feed_basename = os.path.basename(feed_path) self.feed_dir = os.path.dirname(feed_path) @@ -813,9 +818,11 @@ class OpenClipSolver(flib.MediaInfoFile): def _create_new_open_clip(self): self.log.info("Building new openClip") - self.log.debug(">> self.clip_data: {}".format(self.clip_data)) for tmp_xml_track in self.clip_data.iter("track"): + # solve track (layer) name + self._rename_track_name(tmp_xml_track) + tmp_xml_feeds = tmp_xml_track.find('feeds') tmp_xml_feeds.set('currentVersion', self.feed_version_name) @@ -850,6 +857,46 @@ class OpenClipSolver(flib.MediaInfoFile): if uid == track_uid: return xml_track + def _rename_track_name(self, xml_track_data): + name_obj = xml_track_data.find("name") + layer_name = name_obj.text + + if ( + self.layer_rename_patterns + and not any( + re.search(lp_.lower(), layer_name.lower()) + for lp_ in self.layer_rename_patterns + ) + ): + return + + formating_data = self._update_formating_data( + layer=layer_name + ) + name_obj.text = StringTemplate( + self.layer_rename_template + ).format(formating_data) + + def _update_formating_data(self, **kwargs): + """ Updating formating data for layer rename + + Attributes: + key=value (optional): will be included to formating data + as {key: value} + Returns: + dict: anatomy context data for formating + """ + self.log.debug(">> self.clip_data: {}".format(self.clip_data)) + clip_name_obj = self.clip_data.find("name") + data = { + "originalBasename": clip_name_obj.text + } + # include version context data + data.update(self.context_data) + # include input kwargs data + data.update(kwargs) + return data + def _update_open_clip(self): self.log.info("Updating openClip ..") @@ -857,11 +904,12 @@ class OpenClipSolver(flib.MediaInfoFile): out_xml = out_xml.getroot() self.log.debug(">> out_xml: {}".format(out_xml)) - self.log.debug(">> self.clip_data: {}".format(self.clip_data)) - # loop tmp tracks updated_any = False for tmp_xml_track in self.clip_data.iter("track"): + # solve track (layer) name + self._rename_track_name(tmp_xml_track) + # get tmp track uid tmp_track_uid = tmp_xml_track.get("uid") self.log.debug(">> tmp_track_uid: {}".format(tmp_track_uid)) diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 048ac19431..96db04f6e3 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -25,6 +25,13 @@ class LoadClipBatch(opfapi.ClipLoader): reel_name = "OP_LoadedReel" clip_name_template = "{batch}_{asset}_{subset}<_{output}>" + """ Anatomy keys from version context data and dynamically added: + - {layer} - original layer name token + - {originalBasename} - original clip name taken from file + """ + layer_rename_template = "{asset}_{subset}<_{output}>" + layer_rename_patterns = [] + def load(self, context, name, namespace, options): # get flame objects @@ -40,6 +47,7 @@ class LoadClipBatch(opfapi.ClipLoader): # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): self.clip_name_template.replace("output", "representation") + self.layer_rename_template.replace("output", "representation") formating_data = deepcopy(context["representation"]["context"]) formating_data["batch"] = self.batch.name.get_value() @@ -69,6 +77,9 @@ class LoadClipBatch(opfapi.ClipLoader): "path": self.fname.replace("\\", "/"), "colorspace": colorspace, "version": "v{:0>3}".format(version_name), + "layer_rename_template": self.layer_rename_template, + "layer_rename_patterns": self.layer_rename_patterns, + "context_data": formating_data } self.log.debug(pformat( loading_context From 99e1c91a7531b0085f34192b50ef179afe22c76b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Dec 2022 16:57:12 +0100 Subject: [PATCH 2398/2550] flame: enhancing formatting data --- openpype/hosts/flame/api/plugin.py | 4 +++- openpype/hosts/flame/plugins/load/load_clip_batch.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 6aaf0c6d80..c682d294c5 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -858,6 +858,7 @@ class OpenClipSolver(flib.MediaInfoFile): return xml_track def _rename_track_name(self, xml_track_data): + layer_uid = xml_track_data.get("uid") name_obj = xml_track_data.find("name") layer_name = name_obj.text @@ -871,7 +872,8 @@ class OpenClipSolver(flib.MediaInfoFile): return formating_data = self._update_formating_data( - layer=layer_name + layerName=layer_name, + layerUID=layer_uid ) name_obj.text = StringTemplate( self.layer_rename_template diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 96db04f6e3..47d7da2a76 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -26,7 +26,8 @@ class LoadClipBatch(opfapi.ClipLoader): clip_name_template = "{batch}_{asset}_{subset}<_{output}>" """ Anatomy keys from version context data and dynamically added: - - {layer} - original layer name token + - {layerName} - original layer name token + - {layerUID} - original layer UID token - {originalBasename} - original clip name taken from file """ layer_rename_template = "{asset}_{subset}<_{output}>" From ec1b23898df7e748c1b819145119d265bf7e6d14 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 10 Dec 2022 03:28:34 +0000 Subject: [PATCH 2399/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 443c76544b..190f7ac401 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.2" +__version__ = "3.14.9-nightly.3" From 3acbd9bd0cc00811e10f8b48e1da60a129b2109c Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 10 Dec 2022 09:20:53 +0000 Subject: [PATCH 2400/2550] Prevent warning about already connected time attribute --- openpype/hosts/maya/api/plugin.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 66b525bad1..fe30001a96 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -273,7 +273,12 @@ class ReferenceLoader(Loader): if alembic_nodes: for attr, data in alembic_data.items(): node_attr = "{}.{}".format(alembic_nodes[0], attr) - if data["input"]: + + # Prevent warning about connecting to the time attribute + # cause Maya connects to this attribute by default. + if attr == "time": + continue + elif data["input"]: cmds.connectAttr( data["input"], node_attr, force=True ) From b833025a625ec2720da0822f76b4de24faae457f Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 10 Dec 2022 09:30:42 +0000 Subject: [PATCH 2401/2550] Improve readability --- openpype/hosts/maya/api/lib.py | 5 +++++ openpype/hosts/maya/api/plugin.py | 30 +++++++++++------------------- 2 files changed, 16 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2530021eba..04b0ad35f1 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3436,3 +3436,8 @@ def iter_visible_nodes_in_range(nodes, start, end): # If no more nodes to process break the frame iterations.. if not node_dependencies: break + + +def get_attribute_input(attr): + connections = cmds.listConnections(attr, plugs=True, destination=False) + return connections[0] if connections else None diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index fe30001a96..82df85a8be 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -226,11 +226,8 @@ class ReferenceLoader(Loader): if alembic_nodes: for attr in alembic_attrs: node_attr = "{}.{}".format(alembic_nodes[0], attr) - inputs = cmds.listConnections( - node_attr, plugs=True, destination=False - ) data = { - "input": None if inputs is None else inputs[0], + "input": lib.get_attribute_input(node_attr), "value": cmds.getAttr(node_attr) } @@ -271,23 +268,18 @@ class ReferenceLoader(Loader): "{}:*".format(namespace), type="AlembicNode" ) if alembic_nodes: + alembic_node = alembic_nodes[0] # assume single AlembicNode for attr, data in alembic_data.items(): - node_attr = "{}.{}".format(alembic_nodes[0], attr) - - # Prevent warning about connecting to the time attribute - # cause Maya connects to this attribute by default. - if attr == "time": - continue - elif data["input"]: - cmds.connectAttr( - data["input"], node_attr, force=True - ) + node_attr = "{}.{}".format(alembic_node, attr) + input = lib.get_attribute_input(node_attr) + if data["input"]: + if data["input"] != input: + cmds.connectAttr( + data["input"], node_attr, force=True + ) else: - inputs = cmds.listConnections( - node_attr, plugs=True, destination=False - ) - if inputs: - cmds.disconnectAttr(inputs[0], node_attr) + if input: + cmds.disconnectAttr(input, node_attr) cmds.setAttr(node_attr, data["value"]) # Fix PLN-40 for older containers created with Avalon that had the From 358f1b7d40a38f103041d92c976eeefb8e375bad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 11:45:51 +0100 Subject: [PATCH 2402/2550] Revert .toml update of Gazu Toml updates should go to next minor --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 20e676dcde..f74f40c561 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ Click = "^7" dnspython = "^2.1.0" ftrack-python-api = "^2.3.3" shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} -gazu = "^0.8.32" +gazu = "^0.8.28" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" keyring = "^22.0.1" From e026a27b9380467eed8ae2d2ecc082394f77cc2d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 12 Dec 2022 12:11:33 +0100 Subject: [PATCH 2403/2550] hiero: fixing thumbnail if multillayer exr --- openpype/hosts/hiero/plugins/publish/extract_thumbnail.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py index e64aa89b26..5ca79dc1dc 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -41,7 +41,7 @@ class ExtractThumnail(publish.Extractor): track_item_name, thumb_frame, ".png") thumb_path = os.path.join(staging_dir, thumb_file) - thumbnail = track_item.thumbnail(thumb_frame).save( + thumbnail = track_item.thumbnail(thumb_frame, "colour").save( thumb_path, format='png' ) From 9f61aa338536321b17730b175a8b603b0870a385 Mon Sep 17 00:00:00 2001 From: Joseff Date: Mon, 12 Dec 2022 14:27:26 +0100 Subject: [PATCH 2404/2550] Refactorization of folder coloring --- .../UE_4.7/Config/DefaultOpenPypeSettings.ini | 2 + .../UE_4.7/Source/OpenPype/OpenPype.Build.cs | 1 + .../Source/OpenPype/Private/OpenPype.cpp | 64 +++++++++++++++-- .../Source/OpenPype/Private/OpenPypeLib.cpp | 24 ++++--- .../Private/OpenPypePublishInstance.cpp | 51 +++++++++++++- .../OpenPype/Private/OpenPypeSettings.cpp | 21 ++++++ .../UE_4.7/Source/OpenPype/Public/OpenPype.h | 3 +- .../Source/OpenPype/Public/OpenPypeLib.h | 4 +- .../OpenPype/Public/OpenPypePublishInstance.h | 18 +++-- .../Source/OpenPype/Public/OpenPypeSettings.h | 32 +++++++++ .../UE_5.0/Config/DefaultOpenPypeSettings.ini | 2 + .../UE_5.0/Source/OpenPype/OpenPype.Build.cs | 1 + .../Source/OpenPype/Private/OpenPype.cpp | 69 +++++++++++++++++-- .../Source/OpenPype/Private/OpenPypeLib.cpp | 24 ++++--- .../Private/OpenPypePublishInstance.cpp | 69 ++++++++++++++++--- .../OpenPype/Private/OpenPypeSettings.cpp | 21 ++++++ .../UE_5.0/Source/OpenPype/Public/OpenPype.h | 2 + .../Source/OpenPype/Public/OpenPypeLib.h | 4 +- .../OpenPype/Public/OpenPypePublishInstance.h | 23 ++++--- .../Source/OpenPype/Public/OpenPypeSettings.h | 32 +++++++++ 20 files changed, 401 insertions(+), 66 deletions(-) create mode 100644 openpype/hosts/unreal/integration/UE_4.7/Config/DefaultOpenPypeSettings.ini create mode 100644 openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeSettings.cpp create mode 100644 openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeSettings.h create mode 100644 openpype/hosts/unreal/integration/UE_5.0/Config/DefaultOpenPypeSettings.ini create mode 100644 openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp create mode 100644 openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h diff --git a/openpype/hosts/unreal/integration/UE_4.7/Config/DefaultOpenPypeSettings.ini b/openpype/hosts/unreal/integration/UE_4.7/Config/DefaultOpenPypeSettings.ini new file mode 100644 index 0000000000..8a883cf1db --- /dev/null +++ b/openpype/hosts/unreal/integration/UE_4.7/Config/DefaultOpenPypeSettings.ini @@ -0,0 +1,2 @@ +[/Script/OpenPype.OpenPypeSettings] +FolderColor=(R=91,G=197,B=220,A=255) \ No newline at end of file diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/OpenPype.Build.cs b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/OpenPype.Build.cs index c30835b63d..46e5dcb2df 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/OpenPype.Build.cs +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/OpenPype.Build.cs @@ -42,6 +42,7 @@ public class OpenPype : ModuleRules "Engine", "Slate", "SlateCore", + "AssetTools" // ... add private dependencies that you statically link with here ... } ); diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp index 15c46b3862..d20abec9b1 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp @@ -1,6 +1,11 @@ #include "OpenPype.h" + +#include "ISettingsContainer.h" +#include "ISettingsModule.h" +#include "ISettingsSection.h" #include "LevelEditor.h" #include "OpenPypePythonBridge.h" +#include "OpenPypeSettings.h" #include "OpenPypeStyle.h" @@ -11,13 +16,12 @@ static const FName OpenPypeTabName("OpenPype"); // This function is triggered when the plugin is staring up void FOpenPypeModule::StartupModule() { - FOpenPypeStyle::Initialize(); FOpenPypeStyle::SetIcon("Logo", "openpype40"); // Create the Extender that will add content to the menu FLevelEditorModule& LevelEditorModule = FModuleManager::LoadModuleChecked("LevelEditor"); - + TSharedPtr MenuExtender = MakeShareable(new FExtender()); TSharedPtr ToolbarExtender = MakeShareable(new FExtender()); @@ -37,6 +41,7 @@ void FOpenPypeModule::StartupModule() LevelEditorModule.GetMenuExtensibilityManager()->AddExtender(MenuExtender); LevelEditorModule.GetToolBarExtensibilityManager()->AddExtender(ToolbarExtender); + RegisterSettings(); } void FOpenPypeModule::ShutdownModule() @@ -64,7 +69,6 @@ void FOpenPypeModule::AddMenuEntry(FMenuBuilder& MenuBuilder) FSlateIcon(FOpenPypeStyle::GetStyleSetName(), "OpenPype.Logo"), FUIAction(FExecuteAction::CreateRaw(this, &FOpenPypeModule::MenuDialog)) ); - } MenuBuilder.EndSection(); } @@ -89,13 +93,63 @@ void FOpenPypeModule::AddToobarEntry(FToolBarBuilder& ToolbarBuilder) ToolbarBuilder.EndSection(); } +void FOpenPypeModule::RegisterSettings() +{ + ISettingsModule& SettingsModule = FModuleManager::LoadModuleChecked("Settings"); -void FOpenPypeModule::MenuPopup() { + // Create the new category + // TODO: After the movement of the plugin from the game to editor, it might be necessary to move this! + ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project"); + + SettingsContainer->DescribeCategory("OpenPypeSettings", + LOCTEXT("RuntimeWDCategoryName", "OpenPypeSettings"), + LOCTEXT("RuntimeWDCategoryDescription", + "Configuration for the Open pype module")); + + UOpenPypeSettings* Settings = GetMutableDefault(); + + // Register the settings + ISettingsSectionPtr SettingsSection = SettingsModule.RegisterSettings("Project", "OpenPype", "General", + LOCTEXT("RuntimeGeneralSettingsName", + "General"), + LOCTEXT("RuntimeGeneralSettingsDescription", + "Base configuration for Open Pype Module"), + Settings + ); + + // Register the save handler to your settings, you might want to use it to + // validate those or just act to settings changes. + if (SettingsSection.IsValid()) + { + SettingsSection->OnModified().BindRaw(this, &FOpenPypeModule::HandleSettingsSaved); + } +} + +bool FOpenPypeModule::HandleSettingsSaved() +{ + UOpenPypeSettings* Settings = GetMutableDefault(); + bool ResaveSettings = false; + + // You can put any validation code in here and resave the settings in case an invalid + // value has been entered + + if (ResaveSettings) + { + Settings->SaveConfig(); + } + + return true; +} + + +void FOpenPypeModule::MenuPopup() +{ UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get(); bridge->RunInPython_Popup(); } -void FOpenPypeModule::MenuDialog() { +void FOpenPypeModule::MenuDialog() +{ UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get(); bridge->RunInPython_Dialog(); } diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeLib.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeLib.cpp index 5facab7b8b..a58e921288 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeLib.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeLib.cpp @@ -1,4 +1,6 @@ #include "OpenPypeLib.h" + +#include "AssetViewUtils.h" #include "Misc/Paths.h" #include "Misc/ConfigCacheIni.h" #include "UObject/UnrealType.h" @@ -10,21 +12,23 @@ * @warning This color will appear only after Editor restart. Is there a better way? */ -void UOpenPypeLib::CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd) +bool UOpenPypeLib::SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor, const bool& bForceAdd) { - auto SaveColorInternal = [](FString InPath, FLinearColor InFolderColor) + if (AssetViewUtils::DoesFolderExist(FolderPath)) { - // Saves the color of the folder to the config - if (FPaths::FileExists(GEditorPerProjectIni)) - { - GConfig->SetString(TEXT("PathColor"), *InPath, *InFolderColor.ToString(), GEditorPerProjectIni); - } + const TSharedPtr LinearColor = MakeShared(FolderColor); - }; - - SaveColorInternal(FolderPath, FolderColor); + AssetViewUtils::SaveColor(FolderPath, LinearColor, true); + UE_LOG(LogAssetData, Display, TEXT("A color {%s} has been set to folder \"%s\""), *LinearColor->ToString(), + *FolderPath) + return true; + } + UE_LOG(LogAssetData, Display, TEXT("Setting a color {%s} to folder \"%s\" has failed! Directory doesn't exist!"), + *FolderColor.ToString(), *FolderPath) + return false; } + /** * Returns all poperties on given object * @param cls - class diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index ed81104c05..38740f1cbd 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -3,6 +3,8 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" #include "NotificationManager.h" +#include "OpenPypeLib.h" +#include "OpenPypeSettings.h" #include "SNotificationList.h" //Moves all the invalid pointers to the end to prepare them for the shrinking @@ -36,6 +38,11 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + +#ifdef WITH_EDITOR + ColorOpenPypeDirs(); +#endif + } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) @@ -58,7 +65,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (AssetDataInternal.Emplace(Asset).IsValidId()) { UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + *this->GetName(), *Asset->GetName()); } } } @@ -96,6 +103,48 @@ bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const #ifdef WITH_EDITOR +void UOpenPypePublishInstance::ColorOpenPypeDirs() +{ + FString PathName = this->GetPathName(); + + //Check whether the path contains the defined OpenPype folder + if (!PathName.Contains(TEXT("OpenPype"))) return; + + //Get the base path for open pype + FString PathLeft, PathRight; + PathName.Split(FString("OpenPype"), &PathLeft, &PathRight); + + if (PathLeft.IsEmpty() || PathRight.IsEmpty()) + { + UE_LOG(LogAssetData, Error, TEXT("Failed to retrieve the base OpenPype directory!")) + return; + } + + PathName.RemoveFromEnd(PathRight, ESearchCase::CaseSensitive); + + //Get the current settings + const UOpenPypeSettings* Settings = GetMutableDefault(); + + //Color the base folder + UOpenPypeLib::SetFolderColor(PathName, Settings->GetFolderFColor(), false); + + //Get Sub paths, iterate through them and color them according to the folder color in UOpenPypeSettings + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked( + "AssetRegistry"); + + TArray PathList; + + AssetRegistryModule.Get().GetSubPaths(PathName, PathList, true); + + if (PathList.Num() > 0) + { + for (const FString& Path : PathList) + { + UOpenPypeLib::SetFolderColor(Path, Settings->GetFolderFColor(), false); + } + } +} + void UOpenPypePublishInstance::SendNotification(const FString& Text) const { FNotificationInfo Info{FText::FromString(Text)}; diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeSettings.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeSettings.cpp new file mode 100644 index 0000000000..7134614d22 --- /dev/null +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypeSettings.cpp @@ -0,0 +1,21 @@ +// Fill out your copyright notice in the Description page of Project Settings. + +#include "OpenPypeSettings.h" + +#include "IPluginManager.h" +#include "UObjectGlobals.h" + +/** + * Mainly is used for initializing default values if the DefaultOpenPypeSettings.ini file does not exist in the saved config + */ +UOpenPypeSettings::UOpenPypeSettings(const FObjectInitializer& ObjectInitializer) +{ + + const FString ConfigFilePath = OPENPYPE_SETTINGS_FILEPATH; + + // This has to be probably in the future set using the UE Reflection system + FColor Color; + GConfig->GetColor(TEXT("/Script/OpenPype.OpenPypeSettings"), TEXT("FolderColor"), Color, ConfigFilePath); + + FolderColor = Color; +} \ No newline at end of file diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPype.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPype.h index db3f299354..9cfa60176c 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPype.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPype.h @@ -12,10 +12,11 @@ public: virtual void ShutdownModule() override; private: + void RegisterSettings(); + bool HandleSettingsSaved(); void AddMenuEntry(FMenuBuilder& MenuBuilder); void AddToobarEntry(FToolBarBuilder& ToolbarBuilder); void MenuPopup(); void MenuDialog(); - }; diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeLib.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeLib.h index 59e9c8bd76..06425c7c7d 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeLib.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeLib.h @@ -5,14 +5,14 @@ UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypeLib : public UObject +class OPENPYPE_API UOpenPypeLib : public UBlueprintFunctionLibrary { GENERATED_BODY() public: UFUNCTION(BlueprintCallable, Category = Python) - static void CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd); + static bool SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor,const bool& bForceAdd); UFUNCTION(BlueprintCallable, Category = Python) static TArray GetAllProperties(UClass* cls); diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 0e946fb039..cd414fe2cc 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -8,10 +8,8 @@ UCLASS(Blueprintable) class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { GENERATED_UCLASS_BODY() - + public: - - /** /** * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is * placed in) @@ -58,8 +56,10 @@ public: UFUNCTION(BlueprintCallable, BlueprintPure) TSet GetAllAssets() const { - const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; - + const TSet>& IteratedSet = bAddExternalAssets + ? AssetDataInternal.Union(AssetDataExternal) + : AssetDataInternal; + //Create a new TSet only with raw pointers. TSet ResultSet; @@ -69,12 +69,10 @@ public: return ResultSet; } - private: - UPROPERTY(VisibleAnywhere, Category="Assets") TSet> AssetDataInternal; - + /** * This property allows exposing the array to include other assets from any other directory than what it's currently * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! @@ -93,11 +91,11 @@ private: bool IsUnderSameDir(const UObject* InAsset) const; #ifdef WITH_EDITOR + + void ColorOpenPypeDirs(); void SendNotification(const FString& Text) const; virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; #endif - }; - diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeSettings.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeSettings.h new file mode 100644 index 0000000000..2df6c887cf --- /dev/null +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypeSettings.h @@ -0,0 +1,32 @@ +// Fill out your copyright notice in the Description page of Project Settings. + +#pragma once + +#include "CoreMinimal.h" +#include "Object.h" +#include "OpenPypeSettings.generated.h" + +#define OPENPYPE_SETTINGS_FILEPATH IPluginManager::Get().FindPlugin("OpenPype")->GetBaseDir() / TEXT("Config") / TEXT("DefaultOpenPypeSettings.ini") + +UCLASS(Config=OpenPypeSettings, DefaultConfig) +class OPENPYPE_API UOpenPypeSettings : public UObject +{ + GENERATED_UCLASS_BODY() + + UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings) + FColor GetFolderFColor() const + { + return FolderColor; + } + + UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings) + FLinearColor GetFolderFLinearColor() const + { + return FLinearColor(FolderColor); + } + +protected: + + UPROPERTY(config, EditAnywhere, Category = Folders) + FColor FolderColor = FColor(25,45,223); +}; \ No newline at end of file diff --git a/openpype/hosts/unreal/integration/UE_5.0/Config/DefaultOpenPypeSettings.ini b/openpype/hosts/unreal/integration/UE_5.0/Config/DefaultOpenPypeSettings.ini new file mode 100644 index 0000000000..8a883cf1db --- /dev/null +++ b/openpype/hosts/unreal/integration/UE_5.0/Config/DefaultOpenPypeSettings.ini @@ -0,0 +1,2 @@ +[/Script/OpenPype.OpenPypeSettings] +FolderColor=(R=91,G=197,B=220,A=255) \ No newline at end of file diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs index fcfd268234..2ee5d9027c 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs @@ -44,6 +44,7 @@ public class OpenPype : ModuleRules "Engine", "Slate", "SlateCore", + "AssetTools" // ... add private dependencies that you statically link with here ... } ); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp index b3bd9a81b3..11aae0ffc2 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp @@ -1,8 +1,12 @@ #include "OpenPype.h" + +#include "ISettingsContainer.h" +#include "ISettingsModule.h" +#include "ISettingsSection.h" #include "OpenPypeStyle.h" #include "OpenPypeCommands.h" #include "OpenPypePythonBridge.h" -#include "LevelEditor.h" +#include "OpenPypeSettings.h" #include "Misc/MessageDialog.h" #include "ToolMenus.h" @@ -29,7 +33,10 @@ void FOpenPypeModule::StartupModule() FExecuteAction::CreateRaw(this, &FOpenPypeModule::MenuDialog), FCanExecuteAction()); - UToolMenus::RegisterStartupCallback(FSimpleMulticastDelegate::FDelegate::CreateRaw(this, &FOpenPypeModule::RegisterMenus)); + UToolMenus::RegisterStartupCallback( + FSimpleMulticastDelegate::FDelegate::CreateRaw(this, &FOpenPypeModule::RegisterMenus)); + + RegisterSettings(); } void FOpenPypeModule::ShutdownModule() @@ -43,6 +50,55 @@ void FOpenPypeModule::ShutdownModule() FOpenPypeCommands::Unregister(); } + +void FOpenPypeModule::RegisterSettings() +{ + ISettingsModule& SettingsModule = FModuleManager::LoadModuleChecked("Settings"); + + // Create the new category + // TODO: After the movement of the plugin from the game to editor, it might be necessary to move this! + ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project"); + + SettingsContainer->DescribeCategory("OpenPypeSettings", + LOCTEXT("RuntimeWDCategoryName", "OpenPypeSettings"), + LOCTEXT("RuntimeWDCategoryDescription", + "Configuration for the Open pype module")); + + UOpenPypeSettings* Settings = GetMutableDefault(); + + // Register the settings + ISettingsSectionPtr SettingsSection = SettingsModule.RegisterSettings("Project", "OpenPype", "General", + LOCTEXT("RuntimeGeneralSettingsName", + "General"), + LOCTEXT("RuntimeGeneralSettingsDescription", + "Base configuration for Open Pype Module"), + Settings + ); + + // Register the save handler to your settings, you might want to use it to + // validate those or just act to settings changes. + if (SettingsSection.IsValid()) + { + SettingsSection->OnModified().BindRaw(this, &FOpenPypeModule::HandleSettingsSaved); + } +} + +bool FOpenPypeModule::HandleSettingsSaved() +{ + UOpenPypeSettings* Settings = GetMutableDefault(); + bool ResaveSettings = false; + + // You can put any validation code in here and resave the settings in case an invalid + // value has been entered + + if (ResaveSettings) + { + Settings->SaveConfig(); + } + + return true; +} + void FOpenPypeModule::RegisterMenus() { // Owner will be used for cleanup in call to UToolMenus::UnregisterOwner @@ -64,7 +120,8 @@ void FOpenPypeModule::RegisterMenus() { FToolMenuSection& Section = ToolbarMenu->FindOrAddSection("PluginTools"); { - FToolMenuEntry& Entry = Section.AddEntry(FToolMenuEntry::InitToolBarButton(FOpenPypeCommands::Get().OpenPypeTools)); + FToolMenuEntry& Entry = Section.AddEntry( + FToolMenuEntry::InitToolBarButton(FOpenPypeCommands::Get().OpenPypeTools)); Entry.SetCommandList(PluginCommands); } } @@ -72,12 +129,14 @@ void FOpenPypeModule::RegisterMenus() } -void FOpenPypeModule::MenuPopup() { +void FOpenPypeModule::MenuPopup() +{ UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get(); bridge->RunInPython_Popup(); } -void FOpenPypeModule::MenuDialog() { +void FOpenPypeModule::MenuDialog() +{ UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get(); bridge->RunInPython_Dialog(); } diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeLib.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeLib.cpp index 5facab7b8b..a58e921288 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeLib.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeLib.cpp @@ -1,4 +1,6 @@ #include "OpenPypeLib.h" + +#include "AssetViewUtils.h" #include "Misc/Paths.h" #include "Misc/ConfigCacheIni.h" #include "UObject/UnrealType.h" @@ -10,21 +12,23 @@ * @warning This color will appear only after Editor restart. Is there a better way? */ -void UOpenPypeLib::CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd) +bool UOpenPypeLib::SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor, const bool& bForceAdd) { - auto SaveColorInternal = [](FString InPath, FLinearColor InFolderColor) + if (AssetViewUtils::DoesFolderExist(FolderPath)) { - // Saves the color of the folder to the config - if (FPaths::FileExists(GEditorPerProjectIni)) - { - GConfig->SetString(TEXT("PathColor"), *InPath, *InFolderColor.ToString(), GEditorPerProjectIni); - } + const TSharedPtr LinearColor = MakeShared(FolderColor); - }; - - SaveColorInternal(FolderPath, FolderColor); + AssetViewUtils::SaveColor(FolderPath, LinearColor, true); + UE_LOG(LogAssetData, Display, TEXT("A color {%s} has been set to folder \"%s\""), *LinearColor->ToString(), + *FolderPath) + return true; + } + UE_LOG(LogAssetData, Display, TEXT("Setting a color {%s} to folder \"%s\" has failed! Directory doesn't exist!"), + *FolderColor.ToString(), *FolderPath) + return false; } + /** * Returns all poperties on given object * @param cls - class diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 322663eeec..6f41600bae 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,8 +2,9 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" -#include "AssetToolsModule.h" -#include "Framework/Notifications/NotificationManager.h" +#include "NotificationManager.h" +#include "OpenPypeLib.h" +#include "OpenPypeSettings.h" #include "SNotificationList.h" //Moves all the invalid pointers to the end to prepare them for the shrinking @@ -16,8 +17,11 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< FAssetRegistryModule>("AssetRegistry"); + const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked( + "PropertyEditor"); + FString Left, Right; - GetPathName().Split(GetName(), &Left, &Right); + GetPathName().Split("/" + GetName(), &Left, &Right); FARFilter Filter; Filter.PackagePaths.Emplace(FName(Left)); @@ -34,15 +38,17 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); - - + +#ifdef WITH_EDITOR + ColorOpenPypeDirs(); +#endif } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - const TObjectPtr Asset = InAssetData.GetAsset(); + UObject* Asset = InAssetData.GetAsset(); if (!IsValid(Asset)) { @@ -58,7 +64,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (AssetDataInternal.Emplace(Asset).IsValidId()) { UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + *this->GetName(), *Asset->GetName()); } } } @@ -86,7 +92,7 @@ void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) REMOVE_INVALID_ENTRIES(AssetDataExternal); } -bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr& InAsset) const +bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const { FString ThisLeft, ThisRight; this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); @@ -96,6 +102,48 @@ bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr& InAsset #ifdef WITH_EDITOR +void UOpenPypePublishInstance::ColorOpenPypeDirs() +{ + FString PathName = this->GetPathName(); + + //Check whether the path contains the defined OpenPype folder + if (!PathName.Contains(TEXT("OpenPype"))) return; + + //Get the base path for open pype + FString PathLeft, PathRight; + PathName.Split(FString("OpenPype"), &PathLeft, &PathRight); + + if (PathLeft.IsEmpty() || PathRight.IsEmpty()) + { + UE_LOG(LogAssetData, Error, TEXT("Failed to retrieve the base OpenPype directory!")) + return; + } + + PathName.RemoveFromEnd(PathRight, ESearchCase::CaseSensitive); + + //Get the current settings + const UOpenPypeSettings* Settings = GetMutableDefault(); + + //Color the base folder + UOpenPypeLib::SetFolderColor(PathName, Settings->GetFolderFColor(), false); + + //Get Sub paths, iterate through them and color them according to the folder color in UOpenPypeSettings + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked( + "AssetRegistry"); + + TArray PathList; + + AssetRegistryModule.Get().GetSubPaths(PathName, PathList, true); + + if (PathList.Num() > 0) + { + for (const FString& Path : PathList) + { + UOpenPypeLib::SetFolderColor(Path, Settings->GetFolderFColor(), false); + } + } +} + void UOpenPypePublishInstance::SendNotification(const FString& Text) const { FNotificationInfo Info{FText::FromString(Text)}; @@ -125,16 +173,15 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( UOpenPypePublishInstance, AssetDataExternal)) { - // Check for duplicated assets for (const auto& Asset : AssetDataInternal) { if (AssetDataExternal.Contains(Asset)) { AssetDataExternal.Remove(Asset); - return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + return SendNotification( + "You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); } - } // Check if no UOpenPypePublishInstance type assets are included diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp new file mode 100644 index 0000000000..7134614d22 --- /dev/null +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp @@ -0,0 +1,21 @@ +// Fill out your copyright notice in the Description page of Project Settings. + +#include "OpenPypeSettings.h" + +#include "IPluginManager.h" +#include "UObjectGlobals.h" + +/** + * Mainly is used for initializing default values if the DefaultOpenPypeSettings.ini file does not exist in the saved config + */ +UOpenPypeSettings::UOpenPypeSettings(const FObjectInitializer& ObjectInitializer) +{ + + const FString ConfigFilePath = OPENPYPE_SETTINGS_FILEPATH; + + // This has to be probably in the future set using the UE Reflection system + FColor Color; + GConfig->GetColor(TEXT("/Script/OpenPype.OpenPypeSettings"), TEXT("FolderColor"), Color, ConfigFilePath); + + FolderColor = Color; +} \ No newline at end of file diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPype.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPype.h index 3ee5eaa65f..4261476da8 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPype.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPype.h @@ -14,6 +14,8 @@ public: private: void RegisterMenus(); + void RegisterSettings(); + bool HandleSettingsSaved(); void MenuPopup(); void MenuDialog(); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeLib.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeLib.h index 59e9c8bd76..06425c7c7d 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeLib.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeLib.h @@ -5,14 +5,14 @@ UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypeLib : public UObject +class OPENPYPE_API UOpenPypeLib : public UBlueprintFunctionLibrary { GENERATED_BODY() public: UFUNCTION(BlueprintCallable, Category = Python) - static void CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd); + static bool SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor,const bool& bForceAdd); UFUNCTION(BlueprintCallable, Category = Python) static TArray GetAllProperties(UClass* cls); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 2f066bd94b..146025bd6d 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,6 +1,5 @@ #pragma once -#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" @@ -9,7 +8,9 @@ UCLASS(Blueprintable) class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { GENERATED_UCLASS_BODY() + public: + /** /** * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is * placed in) @@ -56,8 +57,10 @@ public: UFUNCTION(BlueprintCallable, BlueprintPure) TSet GetAllAssets() const { - const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; - + const TSet>& IteratedSet = bAddExternalAssets + ? AssetDataInternal.Union(AssetDataExternal) + : AssetDataInternal; + //Create a new TSet only with raw pointers. TSet ResultSet; @@ -72,24 +75,26 @@ private: TSet> AssetDataInternal; /** - * This property allows the instance to include other assets from any other directory than what it's currently - * monitoring. - * @attention assets have to be added manually! They are not automatically registered or added! + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! */ - UPROPERTY(EditAnywhere, Category="Assets") + UPROPERTY(EditAnywhere, Category = "Assets") bool bAddExternalAssets = false; - UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets")) + UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets") TSet> AssetDataExternal; + void OnAssetCreated(const FAssetData& InAssetData); void OnAssetRemoved(const FAssetData& InAssetData); void OnAssetUpdated(const FAssetData& InAssetData); - bool IsUnderSameDir(const TObjectPtr& InAsset) const; + bool IsUnderSameDir(const UObject* InAsset) const; #ifdef WITH_EDITOR + void ColorOpenPypeDirs(); + void SendNotification(const FString& Text) const; virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h new file mode 100644 index 0000000000..2df6c887cf --- /dev/null +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h @@ -0,0 +1,32 @@ +// Fill out your copyright notice in the Description page of Project Settings. + +#pragma once + +#include "CoreMinimal.h" +#include "Object.h" +#include "OpenPypeSettings.generated.h" + +#define OPENPYPE_SETTINGS_FILEPATH IPluginManager::Get().FindPlugin("OpenPype")->GetBaseDir() / TEXT("Config") / TEXT("DefaultOpenPypeSettings.ini") + +UCLASS(Config=OpenPypeSettings, DefaultConfig) +class OPENPYPE_API UOpenPypeSettings : public UObject +{ + GENERATED_UCLASS_BODY() + + UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings) + FColor GetFolderFColor() const + { + return FolderColor; + } + + UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings) + FLinearColor GetFolderFLinearColor() const + { + return FLinearColor(FolderColor); + } + +protected: + + UPROPERTY(config, EditAnywhere, Category = Folders) + FColor FolderColor = FColor(25,45,223); +}; \ No newline at end of file From aecc5577400eceaa6381f40b2a26204e84227051 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 12 Dec 2022 14:45:09 +0100 Subject: [PATCH 2405/2550] events use different approach to define if event should be passed to callback --- openpype/lib/events.py | 46 ++++++++++++++++++++++++++++++++++++------ 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 747761fb3e..096201312f 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -74,22 +74,52 @@ class EventCallback(object): "Registered callback is not callable. \"{}\"" ).format(str(func))) - # Collect additional data about function - # - name - # - path - # - if expect argument or not + # Collect function name and path to file for logging func_name = func.__name__ func_path = os.path.abspath(inspect.getfile(func)) + + # Get expected arguments from function spec + # - positional arguments are always preferred + expect_args = False + expect_kwargs = False + fake_event = "fake" if hasattr(inspect, "signature"): + # Python 3 using 'Signature' object where we try to bind arg + # or kwarg. Using signature is recommended approach based on + # documentation. sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 + try: + sig.bind(fake_event) + expect_args = True + except TypeError: + pass + + try: + sig.bind(event=fake_event) + expect_kwargs = True + except TypeError: + pass + else: - expect_args = len(inspect.getargspec(func)[0]) > 0 + # In Python 2 'signature' is not available so 'getcallargs' is used + # - 'getcallargs' is marked as deprecated since Python 3.0 + try: + inspect.getcallargs(func, fake_event) + expect_args = True + except TypeError: + pass + + try: + inspect.getcallargs(func, event=fake_event) + expect_kwargs = True + except TypeError: + pass self._func_ref = func_ref self._func_name = func_name self._func_path = func_path self._expect_args = expect_args + self._expect_kwargs = expect_kwargs self._ref_valid = func_ref is not None self._enabled = True @@ -157,6 +187,10 @@ class EventCallback(object): try: if self._expect_args: callback(event) + + elif self._expect_kwargs: + callback(event=event) + else: callback() From 771d7994dcfbf76bef1547ad1fcdc799e7418697 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 12 Dec 2022 22:05:47 +0800 Subject: [PATCH 2406/2550] update the redshift render settings --- openpype/hosts/maya/api/lib_rendersettings.py | 21 +++++++++++++++++++ .../schemas/schema_maya_render_settings.json | 10 ++++----- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index d9b79e3c2f..5161141ef9 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -95,6 +95,7 @@ class RenderSettings(object): if renderer == "redshift": self._set_redshift_settings(width, height) + mel.eval("redshiftUpdateActiveAovList") def _set_arnold_settings(self, width, height): """Sets settings for Arnold.""" @@ -158,7 +159,10 @@ class RenderSettings(object): cmds.delete(aov) redshift_aovs = redshift_render_presets["aov_list"] + # list all the aovs + all_rs_aovs = cmds.ls(type='RedshiftAOV') for rs_aov in redshift_aovs: + rs_layername = rs_aov if " " in rs_aov: rs_renderlayer = rs_aov.replace(" ", "") rs_layername = "rsAov_{}".format(rs_renderlayer) @@ -170,6 +174,23 @@ class RenderSettings(object): # update the AOV list mel.eval("redshiftUpdateActiveAovList") + rs_p_engine = redshift_render_presets["primary_gi_engine"] + rs_s_engine = redshift_render_presets["secondary_gi_engine"] + + if int(rs_p_engine) or int(rs_s_engine) != 0: + cmds.setAttr("redshiftOptions.GIEnabled", 1) + if int(rs_p_engine) == 0: + # reset the primary GI Engine as default + cmds.setAttr("redshiftOptions.primaryGIEngine", 4) + if int(rs_s_engine) == 0: + # reset the secondary GI Engine as default + cmds.setAttr("redshiftOptions.secondaryGIEngine", 2) + else: + cmds.setAttr("redshiftOptions.GIEnabled", 0) + + cmds.setAttr("redshiftOptions.primaryGIEngine", int(rs_p_engine)) + cmds.setAttr("redshiftOptions.secondaryGIEngine", int(rs_s_engine)) + additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 512e45f674..e90495891b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -317,9 +317,8 @@ "defaults": "0", "enum_items": [ {"0": "None"}, - {"1": "Photon Map"}, - {"2": "Irradiance Cache"}, - {"3": "Brute Force"} + {"3": "Irradiance Cache"}, + {"4": "Brute Force"} ] }, { @@ -330,9 +329,8 @@ "defaults": "0", "enum_items": [ {"0": "None"}, - {"1": "Photon Map"}, - {"2": "Irradiance Cache"}, - {"3": "Brute Force"} + {"2": "Irradiance Point Cloud"}, + {"4": "Brute Force"} ] }, { From fefb64ed4268db7ef278fb8ad61baf8b544a31d4 Mon Sep 17 00:00:00 2001 From: Joseff Date: Mon, 12 Dec 2022 16:47:12 +0100 Subject: [PATCH 2407/2550] Removed the unnecessary DescribeCategory() --- .../integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp | 5 ----- .../integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp | 5 ----- 2 files changed, 10 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp index d20abec9b1..d06a08eb43 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPype.cpp @@ -101,11 +101,6 @@ void FOpenPypeModule::RegisterSettings() // TODO: After the movement of the plugin from the game to editor, it might be necessary to move this! ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project"); - SettingsContainer->DescribeCategory("OpenPypeSettings", - LOCTEXT("RuntimeWDCategoryName", "OpenPypeSettings"), - LOCTEXT("RuntimeWDCategoryDescription", - "Configuration for the Open pype module")); - UOpenPypeSettings* Settings = GetMutableDefault(); // Register the settings diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp index 11aae0ffc2..d23de61102 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPype.cpp @@ -59,11 +59,6 @@ void FOpenPypeModule::RegisterSettings() // TODO: After the movement of the plugin from the game to editor, it might be necessary to move this! ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project"); - SettingsContainer->DescribeCategory("OpenPypeSettings", - LOCTEXT("RuntimeWDCategoryName", "OpenPypeSettings"), - LOCTEXT("RuntimeWDCategoryDescription", - "Configuration for the Open pype module")); - UOpenPypeSettings* Settings = GetMutableDefault(); // Register the settings From d6b384f019e02b341e4c43d938daaa184c3ff4b2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 12 Dec 2022 17:18:28 +0100 Subject: [PATCH 2408/2550] added deprecation warning for 'get_creator_plugin_paths' --- openpype/modules/interfaces.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index d2c0dd5582..e3f54f1694 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -70,6 +70,13 @@ class IPluginPaths(OpenPypeInterface): host_name (str): For which host are the plugins meant. """ + if hasattr(self, "get_creator_plugin_paths"): + # TODO remove in 3.16 + self.log.warning(( + "DEPRECATION WARNING: Using method 'get_creator_plugin_paths'" + " which was renamed to 'get_create_plugin_paths'." + )) + return self.get_creator_plugin_paths(host_name) return self._get_plugin_paths_by_type("create") def get_load_plugin_paths(self, host_name): From b50e60987420762d06435478c8a14706c9c4f969 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 17:37:16 +0100 Subject: [PATCH 2409/2550] OP-4465 - updated filter_profiles to handle arrays in values key_values might now contain arrays. Useful for filtering on 'families' --- openpype/lib/profiles_filtering.py | 31 +++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/openpype/lib/profiles_filtering.py b/openpype/lib/profiles_filtering.py index 370703a68b..e030b19716 100644 --- a/openpype/lib/profiles_filtering.py +++ b/openpype/lib/profiles_filtering.py @@ -79,11 +79,11 @@ def fullmatch(regex, string, flags=0): return None -def validate_value_by_regexes(value, in_list): +def validate_value_by_regexes(values, in_list): """Validates in any regex from list match entered value. Args: - value (str): String where regexes is checked. + values (str|list): String where regexes is checked. in_list (list): List with regexes. Returns: @@ -102,17 +102,21 @@ def validate_value_by_regexes(value, in_list): # If value is not set and in list has specific values then resolve value # as not matching. - if not value: + if not values: return -1 + if isinstance(values, str): + values = [values] + regexes = compile_list_of_regexes(in_list) for regex in regexes: - if hasattr(regex, "fullmatch"): - result = regex.fullmatch(value) - else: - result = fullmatch(regex, value) - if result: - return 1 + for value in values: + if hasattr(regex, "fullmatch"): + result = regex.fullmatch(value) + else: + result = fullmatch(regex, value) + if result: + return 1 return -1 @@ -136,7 +140,8 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): Args: profiles_data (list): Profile definitions as dictionaries. - key_values (dict): Mapping of Key <-> Value. Key is checked if is + key_values (dict): Mapping of Key <-> Value|[Value]. + Key is checked if is available in profile and if Value is matching it's values. keys_order (list, tuple): Order of keys from `key_values` which matters only when multiple profiles have same score. @@ -181,12 +186,12 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): profile_scores = [] for key in keys_order: - value = key_values[key] - match = validate_value_by_regexes(value, profile.get(key)) + values = key_values[key] + match = validate_value_by_regexes(values, profile.get(key)) if match == -1: profile_value = profile.get(key) or [] logger.debug( - "\"{}\" not found in \"{}\": {}".format(value, key, + "\"{}\" not found in \"{}\": {}".format(values, key, profile_value) ) profile_points = -1 From 3296ac68ef1a2469ce68651fe4e6f8c8b4c71ba3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 17:41:51 +0100 Subject: [PATCH 2410/2550] OP-4465 - extract burnin is triggered by profiles in Settings Cleaned up obsolete methods --- openpype/plugins/publish/extract_burnin.py | 233 +++------------------ 1 file changed, 28 insertions(+), 205 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index fd8dfdece9..eab7652ae2 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -1,5 +1,4 @@ import os -import re import json import copy import tempfile @@ -21,6 +20,7 @@ from openpype.lib import ( CREATE_NO_WINDOW ) +from openpype.lib.profiles_filtering import filter_profiles class ExtractBurnin(publish.Extractor): @@ -34,25 +34,7 @@ class ExtractBurnin(publish.Extractor): label = "Extract burnins" order = pyblish.api.ExtractorOrder + 0.03 - families = ["review", "burnin"] - hosts = [ - "nuke", - "maya", - "shell", - "hiero", - "premiere", - "traypublisher", - "standalonepublisher", - "harmony", - "fusion", - "aftereffects", - "tvpaint", - "webpublisher", - "aftereffects", - "photoshop", - "flame" - # "resolve" - ] + optional = True positions = [ @@ -69,11 +51,15 @@ class ExtractBurnin(publish.Extractor): "y_offset": 5 } - # Preset attributes + # Configurable by Settings profiles = None options = None def process(self, instance): + if not self.profiles: + self.log.warning("No profiles present for create burnin") + return + # QUESTION what is this for and should we raise an exception? if "representations" not in instance.data: raise RuntimeError("Burnin needs already created mov to work on.") @@ -137,18 +123,29 @@ class ExtractBurnin(publish.Extractor): return filtered_repres def main_process(self, instance): - # TODO get these data from context - host_name = instance.context.data["hostName"] - task_name = os.environ["AVALON_TASK"] - family = self.main_family_from_instance(instance) + host_name = instance.data["anatomyData"]["app"] + families = list(set(instance.data["family"]).union( + set(instance.data["families"]))) + task_data = instance.data["anatomyData"].get("task", {}) + task_name = task_data.get("name") + task_type = task_data.get("type") + subset = instance.data["subset"] + + filtering_criteria = { + "hosts": host_name, + "families": families, + "task_names": task_name, + "task_types": task_type, + "subset": subset + } + profile = filter_profiles(self.profiles, filtering_criteria, + logger=self.log) - # Find profile most matching current host, task and instance family - profile = self.find_matching_profile(host_name, task_name, family) if not profile: self.log.info(( "Skipped instance. None of profiles in presets are for" - " Host: \"{}\" | Family: \"{}\" | Task \"{}\"" - ).format(host_name, family, task_name)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Task type \"{}\" | Subset \"{}\" " + ).format(host_name, families, task_name, task_type, subset)) return self.log.debug("profile: {}".format(profile)) @@ -158,8 +155,8 @@ class ExtractBurnin(publish.Extractor): if not burnin_defs: self.log.info(( "Skipped instance. Burnin definitions are not set for profile" - " Host: \"{}\" | Family: \"{}\" | Task \"{}\" | Profile \"{}\"" - ).format(host_name, family, task_name, profile)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Profile \"{}\"" + ).format(host_name, families, task_name, profile)) return burnin_options = self._get_burnin_options() @@ -693,130 +690,6 @@ class ExtractBurnin(publish.Extractor): ) }) - def find_matching_profile(self, host_name, task_name, family): - """ Filter profiles by Host name, Task name and main Family. - - Filtering keys are "hosts" (list), "tasks" (list), "families" (list). - If key is not find or is empty than it's expected to match. - - Args: - profiles (list): Profiles definition from presets. - host_name (str): Current running host name. - task_name (str): Current context task name. - family (str): Main family of current Instance. - - Returns: - dict/None: Return most matching profile or None if none of profiles - match at least one criteria. - """ - - matching_profiles = None - highest_points = -1 - for profile in self.profiles or tuple(): - profile_points = 0 - profile_value = [] - - # Host filtering - host_names = profile.get("hosts") - match = self.validate_value_by_regexes(host_name, host_names) - if match == -1: - continue - profile_points += match - profile_value.append(bool(match)) - - # Task filtering - task_names = profile.get("tasks") - match = self.validate_value_by_regexes(task_name, task_names) - if match == -1: - continue - profile_points += match - profile_value.append(bool(match)) - - # Family filtering - families = profile.get("families") - match = self.validate_value_by_regexes(family, families) - if match == -1: - continue - profile_points += match - profile_value.append(bool(match)) - - if profile_points > highest_points: - matching_profiles = [] - highest_points = profile_points - - if profile_points == highest_points: - profile["__value__"] = profile_value - matching_profiles.append(profile) - - if not matching_profiles: - return - - if len(matching_profiles) == 1: - return matching_profiles[0] - - return self.profile_exclusion(matching_profiles) - - def profile_exclusion(self, matching_profiles): - """Find out most matching profile by host, task and family match. - - Profiles are selectivelly filtered. Each profile should have - "__value__" key with list of booleans. Each boolean represents - existence of filter for specific key (host, taks, family). - Profiles are looped in sequence. In each sequence are split into - true_list and false_list. For next sequence loop are used profiles in - true_list if there are any profiles else false_list is used. - - Filtering ends when only one profile left in true_list. Or when all - existence booleans loops passed, in that case first profile from left - profiles is returned. - - Args: - matching_profiles (list): Profiles with same values. - - Returns: - dict: Most matching profile. - """ - self.log.info( - "Search for first most matching profile in match order:" - " Host name -> Task name -> Family." - ) - # Filter all profiles with highest points value. First filter profiles - # with matching host if there are any then filter profiles by task - # name if there are any and lastly filter by family. Else use first in - # list. - idx = 0 - final_profile = None - while True: - profiles_true = [] - profiles_false = [] - for profile in matching_profiles: - value = profile["__value__"] - # Just use first profile when idx is greater than values. - if not idx < len(value): - final_profile = profile - break - - if value[idx]: - profiles_true.append(profile) - else: - profiles_false.append(profile) - - if final_profile is not None: - break - - if profiles_true: - matching_profiles = profiles_true - else: - matching_profiles = profiles_false - - if len(matching_profiles) == 1: - final_profile = matching_profiles[0] - break - idx += 1 - - final_profile.pop("__value__") - return final_profile - def filter_burnins_defs(self, profile, instance): """Filter outputs by their values from settings. @@ -909,56 +782,6 @@ class ExtractBurnin(publish.Extractor): return True return False - def compile_list_of_regexes(self, in_list): - """Convert strings in entered list to compiled regex objects.""" - regexes = [] - if not in_list: - return regexes - - for item in in_list: - if not item: - continue - - try: - regexes.append(re.compile(item)) - except TypeError: - self.log.warning(( - "Invalid type \"{}\" value \"{}\"." - " Expected string based object. Skipping." - ).format(str(type(item)), str(item))) - - return regexes - - def validate_value_by_regexes(self, value, in_list): - """Validate in any regexe from list match entered value. - - Args: - in_list (list): List with regexes. - value (str): String where regexes is checked. - - Returns: - int: Returns `0` when list is not set or is empty. Returns `1` when - any regex match value and returns `-1` when none of regexes - match value entered. - """ - if not in_list: - return 0 - - output = -1 - regexes = self.compile_list_of_regexes(in_list) - for regex in regexes: - if re.match(regex, value): - output = 1 - break - return output - - def main_family_from_instance(self, instance): - """Return main family of entered instance.""" - family = instance.data.get("family") - if not family: - family = instance.data["families"][0] - return family - def families_from_instance(self, instance): """Return all families of entered instance.""" families = [] From e82f831c5abbccd3fbe98b4483f68f3c105866d3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 18:13:21 +0100 Subject: [PATCH 2411/2550] Revert "OP-4465 - updated filter_profiles to handle arrays in values" This reverts commit b50e60987420762d06435478c8a14706c9c4f969. --- openpype/lib/profiles_filtering.py | 31 +++++++++++++----------------- 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/openpype/lib/profiles_filtering.py b/openpype/lib/profiles_filtering.py index e030b19716..370703a68b 100644 --- a/openpype/lib/profiles_filtering.py +++ b/openpype/lib/profiles_filtering.py @@ -79,11 +79,11 @@ def fullmatch(regex, string, flags=0): return None -def validate_value_by_regexes(values, in_list): +def validate_value_by_regexes(value, in_list): """Validates in any regex from list match entered value. Args: - values (str|list): String where regexes is checked. + value (str): String where regexes is checked. in_list (list): List with regexes. Returns: @@ -102,21 +102,17 @@ def validate_value_by_regexes(values, in_list): # If value is not set and in list has specific values then resolve value # as not matching. - if not values: + if not value: return -1 - if isinstance(values, str): - values = [values] - regexes = compile_list_of_regexes(in_list) for regex in regexes: - for value in values: - if hasattr(regex, "fullmatch"): - result = regex.fullmatch(value) - else: - result = fullmatch(regex, value) - if result: - return 1 + if hasattr(regex, "fullmatch"): + result = regex.fullmatch(value) + else: + result = fullmatch(regex, value) + if result: + return 1 return -1 @@ -140,8 +136,7 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): Args: profiles_data (list): Profile definitions as dictionaries. - key_values (dict): Mapping of Key <-> Value|[Value]. - Key is checked if is + key_values (dict): Mapping of Key <-> Value. Key is checked if is available in profile and if Value is matching it's values. keys_order (list, tuple): Order of keys from `key_values` which matters only when multiple profiles have same score. @@ -186,12 +181,12 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): profile_scores = [] for key in keys_order: - values = key_values[key] - match = validate_value_by_regexes(values, profile.get(key)) + value = key_values[key] + match = validate_value_by_regexes(value, profile.get(key)) if match == -1: profile_value = profile.get(key) or [] logger.debug( - "\"{}\" not found in \"{}\": {}".format(values, key, + "\"{}\" not found in \"{}\": {}".format(value, key, profile_value) ) profile_points = -1 From a91ae985fbf481560bd9fc28eb78edf020250bff Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Dec 2022 18:15:05 +0100 Subject: [PATCH 2412/2550] Removed unused import --- openpype/tools/settings/settings/dict_mutable_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/dict_mutable_widget.py b/openpype/tools/settings/settings/dict_mutable_widget.py index b9932da789..27c9392320 100644 --- a/openpype/tools/settings/settings/dict_mutable_widget.py +++ b/openpype/tools/settings/settings/dict_mutable_widget.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore from .base import BaseWidget from .lib import ( From 0ee65517f4e34cdbda225f9c472f129375f43cc4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 18:26:54 +0100 Subject: [PATCH 2413/2550] OP-4465 - revert wrong defaults Values here were copied from Pyblish filtering, should not be used for profiles. --- openpype/settings/defaults/project_settings/global.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 89d7cf08b7..8c56500646 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -209,6 +209,9 @@ { "families": [], "hosts": [], + "task_types": [], + "task_names": [], + "subsets": [], "burnins": { "burnin": { "TOP_LEFT": "{yy}-{mm}-{dd}", From ec3e04699877d08b070ed4cfb1fbd0fda3d2a2b5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 18:30:55 +0100 Subject: [PATCH 2414/2550] OP-4465 - revert deleted filters families and hosts here are meant for Pyblish filtering, they should stay here for now. --- openpype/plugins/publish/extract_burnin.py | 37 +++++++++++++++++----- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index eab7652ae2..f113e61bb0 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -35,6 +35,26 @@ class ExtractBurnin(publish.Extractor): label = "Extract burnins" order = pyblish.api.ExtractorOrder + 0.03 + families = ["review", "burnin"] + hosts = [ + "nuke", + "maya", + "shell", + "hiero", + "premiere", + "traypublisher", + "standalonepublisher", + "harmony", + "fusion", + "aftereffects", + "tvpaint", + "webpublisher", + "aftereffects", + "photoshop", + "flame" + # "resolve" + ] + optional = True positions = [ @@ -123,9 +143,8 @@ class ExtractBurnin(publish.Extractor): return filtered_repres def main_process(self, instance): - host_name = instance.data["anatomyData"]["app"] - families = list(set(instance.data["family"]).union( - set(instance.data["families"]))) + host_name = instance.context.data["hostName"] + family = instance.data["family"] task_data = instance.data["anatomyData"].get("task", {}) task_name = task_data.get("name") task_type = task_data.get("type") @@ -133,7 +152,7 @@ class ExtractBurnin(publish.Extractor): filtering_criteria = { "hosts": host_name, - "families": families, + "families": family, "task_names": task_name, "task_types": task_type, "subset": subset @@ -144,8 +163,9 @@ class ExtractBurnin(publish.Extractor): if not profile: self.log.info(( "Skipped instance. None of profiles in presets are for" - " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Task type \"{}\" | Subset \"{}\" " - ).format(host_name, families, task_name, task_type, subset)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\"" + " | Task type \"{}\" | Subset \"{}\" " + ).format(host_name, family, task_name, task_type, subset)) return self.log.debug("profile: {}".format(profile)) @@ -155,8 +175,9 @@ class ExtractBurnin(publish.Extractor): if not burnin_defs: self.log.info(( "Skipped instance. Burnin definitions are not set for profile" - " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Profile \"{}\"" - ).format(host_name, families, task_name, profile)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\"" + " | Profile \"{}\"" + ).format(host_name, family, task_name, profile)) return burnin_options = self._get_burnin_options() From 76f1cbc2e3b93df36cc7bdc140e86ec5d0051f14 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 12 Dec 2022 18:31:19 +0100 Subject: [PATCH 2415/2550] print help instead of invokinf 'interactive' command --- openpype/cli.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index 7611915d84..2c32220522 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -34,7 +34,8 @@ def main(ctx): # Default command for headless openpype is 'interactive' command # otherwise 'tray' is used. if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1": - ctx.invoke(interactive) + print(ctx.get_help()) + sys.exit(0) else: ctx.invoke(tray) From b05bcf94258c52578f9758205e3fb74d220e645b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 12 Dec 2022 18:34:07 +0100 Subject: [PATCH 2416/2550] Change comment --- openpype/cli.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 2c32220522..897106c35f 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -31,8 +31,7 @@ def main(ctx): """ if ctx.invoked_subcommand is None: - # Default command for headless openpype is 'interactive' command - # otherwise 'tray' is used. + # Print help if headless mode is used if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1": print(ctx.get_help()) sys.exit(0) From 7e129a8526f92ea2f2b92e959e673ff528beeb8a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 13 Dec 2022 11:39:23 +0100 Subject: [PATCH 2417/2550] OP-4465 - added missed settings schema --- .../schemas/schema_global_publish.json | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index f2ada5fd8d..5388d04bc9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -526,11 +526,28 @@ "object_type": "text" }, { - "type": "hosts-enum", "key": "hosts", - "label": "Hosts", + "label": "Host names", + "type": "hosts-enum", "multiselection": true }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "key": "subsets", + "label": "Subset names", + "type": "list", + "object_type": "text" + }, { "type": "splitter" }, From 54780b477aa1303585e2bfe19fd57ab6d36a087c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 14:00:24 +0100 Subject: [PATCH 2418/2550] use new interface class name for publish host --- openpype/hosts/houdini/api/pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b0791fcb6c..f8e2c16d21 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -7,7 +7,7 @@ import contextlib import hou # noqa -from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher +from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost import pyblish.api @@ -40,7 +40,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "houdini" def __init__(self): From c85594c324e83bf1a8a95ee11f1caaed7c82f1d3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 13 Dec 2022 15:06:40 +0100 Subject: [PATCH 2419/2550] OP-4512 - fix normalize local name 'local' should be returned for local site only --- openpype/modules/sync_server/providers/local_drive.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 8f55dc529b..98bdb487da 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -5,6 +5,7 @@ import threading import time from openpype.lib import Logger +from openpype.lib.local_settings import get_local_site_id from openpype.pipeline import Anatomy from .abstract_provider import AbstractProvider @@ -220,6 +221,6 @@ class LocalDriveHandler(AbstractProvider): def _normalize_site_name(self, site_name): """Transform user id to 'local' for Local settings""" - if site_name != 'studio': + if site_name == get_local_site_id(): return 'local' return site_name From 8a40489d28a4aab9af9cdf047e83afda729515ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:16:29 +0100 Subject: [PATCH 2420/2550] launcher is using qtpy import instead of Qt --- openpype/tools/launcher/actions.py | 2 +- openpype/tools/launcher/constants.py | 2 +- openpype/tools/launcher/delegates.py | 2 +- openpype/tools/launcher/lib.py | 2 +- openpype/tools/launcher/models.py | 2 +- openpype/tools/launcher/widgets.py | 2 +- openpype/tools/launcher/window.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py index 34d06f72cc..61660ee9b7 100644 --- a/openpype/tools/launcher/actions.py +++ b/openpype/tools/launcher/actions.py @@ -1,6 +1,6 @@ import os -from Qt import QtWidgets, QtGui +from qtpy import QtWidgets, QtGui from openpype import PLUGINS_DIR from openpype import style diff --git a/openpype/tools/launcher/constants.py b/openpype/tools/launcher/constants.py index 61f631759b..cb0049055c 100644 --- a/openpype/tools/launcher/constants.py +++ b/openpype/tools/launcher/constants.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore ACTION_ROLE = QtCore.Qt.UserRole diff --git a/openpype/tools/launcher/delegates.py b/openpype/tools/launcher/delegates.py index 7b53658727..02a40861d2 100644 --- a/openpype/tools/launcher/delegates.py +++ b/openpype/tools/launcher/delegates.py @@ -1,5 +1,5 @@ import time -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from .constants import ( ANIMATION_START_ROLE, ANIMATION_STATE_ROLE, diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index 68e57c6b92..2507b6eddc 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -1,5 +1,5 @@ import os -from Qt import QtGui +from qtpy import QtGui import qtawesome from openpype import resources diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 6e3b531018..6c763544a9 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -6,7 +6,7 @@ import collections import time import appdirs -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui import qtawesome from openpype.client import ( diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 774ceb659d..3eb641bdb3 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -1,7 +1,7 @@ import copy import time import collections -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from openpype.tools.flickcharm import FlickCharm diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index a9eaa932bb..f68fc4befc 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -1,7 +1,7 @@ import copy import logging -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype import resources From c8872b9b6e7674a079dcbbfc21ff1492f8fb32b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:18:21 +0100 Subject: [PATCH 2421/2550] tray is using qtpy instead of Qt --- openpype/tools/tray/pype_info_widget.py | 2 +- openpype/tools/tray/pype_tray.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py index 232d2024ac..c616ad4dba 100644 --- a/openpype/tools/tray/pype_info_widget.py +++ b/openpype/tools/tray/pype_info_widget.py @@ -2,7 +2,7 @@ import os import json import collections -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import style from openpype import resources diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index d4189af4d8..df18325bec 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -6,7 +6,7 @@ import subprocess import platform -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets import openpype.version from openpype import resources, style From 23e9f120cf6152228b1f6c0ee735889f0433fb5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 13 Dec 2022 15:19:13 +0100 Subject: [PATCH 2422/2550] :recycle: fix 5.1 compatibility --- .../UE_5.0/Source/OpenPype/Private/AssetContainer.cpp | 8 ++++---- .../Source/OpenPype/Private/OpenPypePublishInstance.cpp | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp index c766f87a8e..4965ae2aab 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp @@ -30,8 +30,8 @@ void UAssetContainer::OnAssetAdded(const FAssetData& AssetData) // get asset path and class FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - + FString assetFName = AssetData.AssetClassPath.ToString(); + UE_LOG(LogTemp, Log, TEXT("asset name %s"), *assetFName); // split path assetPath.ParseIntoArray(split, TEXT(" "), true); @@ -60,7 +60,7 @@ void UAssetContainer::OnAssetRemoved(const FAssetData& AssetData) // get asset path and class FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + FString assetFName = AssetData.AssetClassPath.ToString(); // split path assetPath.ParseIntoArray(split, TEXT(" "), true); @@ -93,7 +93,7 @@ void UAssetContainer::OnAssetRenamed(const FAssetData& AssetData, const FString& // get asset path and class FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + FString assetFName = AssetData.AssetClassPath.ToString(); // split path assetPath.ParseIntoArray(split, TEXT(" "), true); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 322663eeec..c432ebb7e4 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -4,7 +4,7 @@ #include "AssetRegistryModule.h" #include "AssetToolsModule.h" #include "Framework/Notifications/NotificationManager.h" -#include "SNotificationList.h" +#include "Widgets/Notifications/SNotificationList.h" //Moves all the invalid pointers to the end to prepare them for the shrinking #define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ @@ -47,7 +47,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (!IsValid(Asset)) { UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), - *InAssetData.ObjectPath.ToString()); + *InAssetData.GetObjectPathString()); return; } From b487b90fa0ac0943b02bc7b61575163780fb20fd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 13 Dec 2022 15:19:29 +0100 Subject: [PATCH 2423/2550] :recycle: update build options --- .../integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs index fcfd268234..67db648b2a 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs @@ -6,7 +6,11 @@ public class OpenPype : ModuleRules { public OpenPype(ReadOnlyTargetRules Target) : base(Target) { + DefaultBuildSettings = BuildSettingsVersion.V2; + bLegacyPublicIncludePaths = false; + ShadowVariableWarningLevel = WarningLevel.Error; PCHUsage = ModuleRules.PCHUsageMode.UseExplicitOrSharedPCHs; + IncludeOrderVersion = EngineIncludeOrderVersion.Unreal5_0; PublicIncludePaths.AddRange( new string[] { From 4a69f9c6668c6b79a474889fae64724da9334569 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:20:41 +0100 Subject: [PATCH 2424/2550] standalone publisher is using qtpy instead of Qt --- openpype/tools/standalonepublish/app.py | 2 +- openpype/tools/standalonepublish/widgets/__init__.py | 2 +- openpype/tools/standalonepublish/widgets/model_asset.py | 2 +- .../standalonepublish/widgets/model_filter_proxy_exact_match.py | 2 +- .../widgets/model_filter_proxy_recursive_sort.py | 2 +- .../tools/standalonepublish/widgets/model_tasks_template.py | 2 +- openpype/tools/standalonepublish/widgets/model_tree.py | 2 +- .../standalonepublish/widgets/model_tree_view_deselectable.py | 2 +- openpype/tools/standalonepublish/widgets/widget_asset.py | 2 +- .../tools/standalonepublish/widgets/widget_component_item.py | 2 +- openpype/tools/standalonepublish/widgets/widget_components.py | 2 +- .../tools/standalonepublish/widgets/widget_components_list.py | 2 +- openpype/tools/standalonepublish/widgets/widget_drop_empty.py | 2 +- openpype/tools/standalonepublish/widgets/widget_drop_frame.py | 2 +- openpype/tools/standalonepublish/widgets/widget_family.py | 2 +- openpype/tools/standalonepublish/widgets/widget_family_desc.py | 2 +- openpype/tools/standalonepublish/widgets/widget_shadow.py | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index c93c33b2a5..d71c205c3b 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -4,7 +4,7 @@ import ctypes import signal from bson.objectid import ObjectId -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.client import get_asset_by_id diff --git a/openpype/tools/standalonepublish/widgets/__init__.py b/openpype/tools/standalonepublish/widgets/__init__.py index e61897f807..d79654498d 100644 --- a/openpype/tools/standalonepublish/widgets/__init__.py +++ b/openpype/tools/standalonepublish/widgets/__init__.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore HelpRole = QtCore.Qt.UserRole + 2 FamilyRole = QtCore.Qt.UserRole + 3 diff --git a/openpype/tools/standalonepublish/widgets/model_asset.py b/openpype/tools/standalonepublish/widgets/model_asset.py index 9fed46b3fe..2f67036e78 100644 --- a/openpype/tools/standalonepublish/widgets/model_asset.py +++ b/openpype/tools/standalonepublish/widgets/model_asset.py @@ -1,7 +1,7 @@ import logging import collections -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui import qtawesome from openpype.client import get_assets diff --git a/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py b/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py index 604ae30934..df9c6fb35f 100644 --- a/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py +++ b/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore class ExactMatchesFilterProxyModel(QtCore.QSortFilterProxyModel): diff --git a/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py b/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py index 71ecdf41dc..727d3a97d7 100644 --- a/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py +++ b/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py @@ -1,5 +1,5 @@ -from Qt import QtCore import re +from qtpy import QtCore class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel): diff --git a/openpype/tools/standalonepublish/widgets/model_tasks_template.py b/openpype/tools/standalonepublish/widgets/model_tasks_template.py index 648f7ed479..e22a4e3bf8 100644 --- a/openpype/tools/standalonepublish/widgets/model_tasks_template.py +++ b/openpype/tools/standalonepublish/widgets/model_tasks_template.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore import qtawesome from openpype.style import get_default_entity_icon_color diff --git a/openpype/tools/standalonepublish/widgets/model_tree.py b/openpype/tools/standalonepublish/widgets/model_tree.py index efac0d6b78..040e95d944 100644 --- a/openpype/tools/standalonepublish/widgets/model_tree.py +++ b/openpype/tools/standalonepublish/widgets/model_tree.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore from . import Node diff --git a/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py b/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py index 6a15916981..3c8c760eca 100644 --- a/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py +++ b/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore class DeselectableTreeView(QtWidgets.QTreeView): diff --git a/openpype/tools/standalonepublish/widgets/widget_asset.py b/openpype/tools/standalonepublish/widgets/widget_asset.py index 77d756a606..01f49b79ec 100644 --- a/openpype/tools/standalonepublish/widgets/widget_asset.py +++ b/openpype/tools/standalonepublish/widgets/widget_asset.py @@ -1,5 +1,5 @@ import contextlib -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome from openpype.client import ( diff --git a/openpype/tools/standalonepublish/widgets/widget_component_item.py b/openpype/tools/standalonepublish/widgets/widget_component_item.py index de3cde50cd..523c3977e3 100644 --- a/openpype/tools/standalonepublish/widgets/widget_component_item.py +++ b/openpype/tools/standalonepublish/widgets/widget_component_item.py @@ -1,5 +1,5 @@ import os -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from .resources import get_resource diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py index 237e1da583..a86ac845f2 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components.py +++ b/openpype/tools/standalonepublish/widgets/widget_components.py @@ -4,7 +4,7 @@ import tempfile import random import string -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.pipeline import legacy_io from openpype.lib import ( diff --git a/openpype/tools/standalonepublish/widgets/widget_components_list.py b/openpype/tools/standalonepublish/widgets/widget_components_list.py index 0ee90ae4de..e29ab3c127 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components_list.py +++ b/openpype/tools/standalonepublish/widgets/widget_components_list.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets class ComponentsList(QtWidgets.QTableWidget): diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_empty.py b/openpype/tools/standalonepublish/widgets/widget_drop_empty.py index a890f38426..110e4d6353 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_empty.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_empty.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui class DropEmpty(QtWidgets.QWidget): diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index 18c2b27678..f46e31786c 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -4,7 +4,7 @@ import json import clique import subprocess import openpype.lib -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from . import DropEmpty, ComponentsList, ComponentItem diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index e1cbb8d397..11c5ec33b7 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -1,6 +1,6 @@ import re -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.client import ( get_asset_by_name, diff --git a/openpype/tools/standalonepublish/widgets/widget_family_desc.py b/openpype/tools/standalonepublish/widgets/widget_family_desc.py index 2095b332bd..33174a852b 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family_desc.py +++ b/openpype/tools/standalonepublish/widgets/widget_family_desc.py @@ -1,5 +1,5 @@ import six -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from . import FamilyRole, PluginRole diff --git a/openpype/tools/standalonepublish/widgets/widget_shadow.py b/openpype/tools/standalonepublish/widgets/widget_shadow.py index de5fdf6be0..64cb9544fa 100644 --- a/openpype/tools/standalonepublish/widgets/widget_shadow.py +++ b/openpype/tools/standalonepublish/widgets/widget_shadow.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui class ShadowWidget(QtWidgets.QWidget): From 69b0c23056e6c81ac41b5be9bdb75ac75505cb9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:21:00 +0100 Subject: [PATCH 2425/2550] stdbroker is using qtpy instead of Qt --- openpype/tools/stdout_broker/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/stdout_broker/window.py b/openpype/tools/stdout_broker/window.py index f5720ca05b..5825da73e2 100644 --- a/openpype/tools/stdout_broker/window.py +++ b/openpype/tools/stdout_broker/window.py @@ -1,7 +1,7 @@ import re import collections -from Qt import QtWidgets +from qtpy import QtWidgets from openpype import style From 7d2229df4ab5164c98a1b811a06eae818bd04243 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Dec 2022 15:30:59 +0100 Subject: [PATCH 2426/2550] flame: replicating multilayer rename to reel clip loader --- .../hosts/flame/plugins/load/load_clip.py | 20 ++++++++++++++++++- .../defaults/project_settings/flame.json | 6 +++++- .../projects_schema/schema_project_flame.json | 11 ++++++++++ 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index f8cb7b3e11..2c107de2b4 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -1,3 +1,4 @@ +from copy import deepcopy import os import flame from pprint import pformat @@ -25,6 +26,14 @@ class LoadClip(opfapi.ClipLoader): reel_name = "Loaded" clip_name_template = "{asset}_{subset}<_{output}>" + """ Anatomy keys from version context data and dynamically added: + - {layerName} - original layer name token + - {layerUID} - original layer UID token + - {originalBasename} - original clip name taken from file + """ + layer_rename_template = "{asset}_{subset}<_{output}>" + layer_rename_patterns = [] + def load(self, context, name, namespace, options): # get flame objects @@ -38,8 +47,14 @@ class LoadClip(opfapi.ClipLoader): version_name = version.get("name", None) colorspace = self.get_colorspace(context) + # in case output is not in context replace key to representation + if not context["representation"]["context"].get("output"): + self.clip_name_template.replace("output", "representation") + self.layer_rename_template.replace("output", "representation") + + formating_data = deepcopy(context["representation"]["context"]) clip_name = StringTemplate(self.clip_name_template).format( - context["representation"]["context"]) + formating_data) # convert colorspace with ocio to flame mapping # in imageio flame section @@ -62,6 +77,9 @@ class LoadClip(opfapi.ClipLoader): "path": self.fname.replace("\\", "/"), "colorspace": colorspace, "version": "v{:0>3}".format(version_name), + "layer_rename_template": self.layer_rename_template, + "layer_rename_patterns": self.layer_rename_patterns, + "context_data": formating_data } self.log.debug(pformat( loading_context diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index 9966fdbd33..337e58ac62 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -119,7 +119,11 @@ ], "reel_group_name": "OpenPype_Reels", "reel_name": "Loaded", - "clip_name_template": "{asset}_{subset}<_{output}>" + "clip_name_template": "{asset}_{subset}<_{output}>", + "layer_rename_template": "{asset}_{subset}<_{output}>", + "layer_rename_patterns": [ + "rgba" + ] }, "LoadClipBatch": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 26a2dce2f5..24726f2d07 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -512,6 +512,17 @@ "type": "text", "key": "clip_name_template", "label": "Clip name template" + }, + { + "type": "text", + "key": "layer_rename_template", + "label": "Layer name template" + }, + { + "type": "list", + "key": "layer_rename_patterns", + "label": "Layer rename patters", + "object_type": "text" } ] }, From 4d0e8180674eb0db2ba402fc8532ac9624309dcf Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 13 Dec 2022 15:33:15 +0100 Subject: [PATCH 2427/2550] :recycle: fix includes --- .../UE_5.0/Source/OpenPype/Private/AssetContainer.cpp | 2 +- .../UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp | 2 +- .../integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp index 4965ae2aab..61e563f729 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp @@ -1,7 +1,7 @@ // Fill out your copyright notice in the Description page of Project Settings. #include "AssetContainer.h" -#include "AssetRegistryModule.h" +#include "AssetRegistry/AssetRegistryModule.h" #include "Misc/PackageName.h" #include "Engine.h" #include "Containers/UnrealString.h" diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index c432ebb7e4..f5eb6f9e70 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -1,7 +1,7 @@ #pragma once #include "OpenPypePublishInstance.h" -#include "AssetRegistryModule.h" +#include "AssetRegistry/AssetRegistryModule.h" #include "AssetToolsModule.h" #include "Framework/Notifications/NotificationManager.h" #include "Widgets/Notifications/SNotificationList.h" diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h index 3c2a360c78..2c06e59d6f 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h @@ -5,7 +5,7 @@ #include "CoreMinimal.h" #include "UObject/NoExportTypes.h" #include "Engine/AssetUserData.h" -#include "AssetData.h" +#include "AssetRegistry/AssetData.h" #include "AssetContainer.generated.h" /** From d931198797a02d908803f92f53b6d1632e153b68 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 13 Dec 2022 23:28:15 +0800 Subject: [PATCH 2428/2550] bug fix image plane load error --- .../maya/plugins/load/load_image_plane.py | 59 +++++++++++-------- 1 file changed, 34 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index b267921bdc..82c2676982 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -23,8 +23,8 @@ class CameraWindow(QtWidgets.QDialog): self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) self.camera = None - self.static_image_plane = False - self.show_in_all_views = False + # self.static_image_plane = False + # self.show_in_all_views = False self.widgets = { "label": QtWidgets.QLabel("Select camera for image plane."), @@ -45,8 +45,8 @@ class CameraWindow(QtWidgets.QDialog): for camera in cameras: self.widgets["list"].addItem(camera) - self.widgets["staticImagePlane"].setText("Make Image Plane Static") - self.widgets["showInAllViews"].setText("Show Image Plane in All Views") + # self.widgets["staticImagePlane"].setText("Make Image Plane Static") + # self.widgets["showInAllViews"].setText("Show Image Plane in All Views") # Build buttons. layout = QtWidgets.QHBoxLayout(self.widgets["buttons"]) @@ -57,13 +57,13 @@ class CameraWindow(QtWidgets.QDialog): layout = QtWidgets.QVBoxLayout(self) layout.addWidget(self.widgets["label"]) layout.addWidget(self.widgets["list"]) - layout.addWidget(self.widgets["staticImagePlane"]) - layout.addWidget(self.widgets["showInAllViews"]) + # layout.addWidget(self.widgets["staticImagePlane"]) + # layout.addWidget(self.widgets["showInAllViews"]) layout.addWidget(self.widgets["buttons"]) layout.addWidget(self.widgets["warning"]) self.widgets["okButton"].pressed.connect(self.on_ok_pressed) - self.widgets["cancelButton"].pressed.connect(self.on_cancel_pressed) + self.widgets["cancelButton"].clicked.connect(self.on_cancel_pressed) self.widgets["list"].itemPressed.connect(self.on_list_itemPressed) def on_list_itemPressed(self, item): @@ -73,8 +73,8 @@ class CameraWindow(QtWidgets.QDialog): if self.camera is None: self.widgets["warning"].setVisible(True) return - self.show_in_all_views = self.widgets["showInAllViews"].isChecked() - self.static_image_plane = self.widgets["staticImagePlane"].isChecked() + # self.show_in_all_views = self.widgets["showInAllViews"].isChecked() + # self.static_image_plane = self.widgets["staticImagePlane"].isChecked() self.close() @@ -106,12 +106,12 @@ class ImagePlaneLoader(load.LoaderPlugin): # Get camera from user selection. camera = None - is_static_image_plane = None - is_in_all_views = None + # is_static_image_plane = None + # is_in_all_views = None if data: camera = pm.PyNode(data.get("camera")) - is_static_image_plane = data.get("static_image_plane") - is_in_all_views = data.get("in_all_views") + # is_static_image_plane = data.get("static_image_plane") + # is_in_all_views = data.get("in_all_views") if not camera: cameras = pm.ls(type="camera") @@ -121,8 +121,8 @@ class ImagePlaneLoader(load.LoaderPlugin): window.exec_() camera = camera_names[window.camera] - is_static_image_plane = window.static_image_plane - is_in_all_views = window.show_in_all_views + # is_static_image_plane = window.static_image_plane + # is_in_all_views = window.show_in_all_views if camera == "create_camera": camera = pm.createNode("camera") @@ -139,18 +139,20 @@ class ImagePlaneLoader(load.LoaderPlugin): # Create image plane image_plane_transform, image_plane_shape = pm.imagePlane( fileName=context["representation"]["data"]["path"], - camera=camera, showInAllViews=is_in_all_views - ) + camera=camera) image_plane_shape.depth.set(image_plane_depth) - if is_static_image_plane: - image_plane_shape.detach() - image_plane_transform.setRotation(camera.getRotation()) + # if is_static_image_plane: + # image_plane_shape.detach() + # image_plane_transform.setRotation(camera.getRotation()) start_frame = pm.playbackOptions(q=True, min=True) end_frame = pm.playbackOptions(q=True, max=True) - image_plane_shape.frameOffset.set(1 - start_frame) + if int(start_frame) > 0: + image_plane_shape.frameOffset.set(int(start_frame)- 1) + else: + image_plane_shape.frameOffset.set(int(start_frame)) image_plane_shape.frameIn.set(start_frame) image_plane_shape.frameOut.set(end_frame) image_plane_shape.frameCache.set(end_frame) @@ -180,9 +182,13 @@ class ImagePlaneLoader(load.LoaderPlugin): QtWidgets.QMessageBox.Cancel ) if reply == QtWidgets.QMessageBox.Ok: - pm.delete( - image_plane_shape.listConnections(type="expression")[0] - ) + expressions = image_plane_shape.frameExtension.inputs(type="expression") + if expressions: + pm.delete(expressions) + + if not image_plane_shape.frameExtension.isFreeToChange(): + raise RuntimeError("Can't set frame extension for {}".format(image_plane_shape)) + image_plane_shape.frameExtension.set(start_frame) new_nodes.extend( @@ -233,7 +239,10 @@ class ImagePlaneLoader(load.LoaderPlugin): ) start_frame = asset["data"]["frameStart"] end_frame = asset["data"]["frameEnd"] - image_plane_shape.frameOffset.set(1 - start_frame) + if int(start_frame) > 0: + image_plane_shape.frameOffset.set(int(start_frame)- 1) + else: + image_plane_shape.frameOffset.set(int(start_frame)) image_plane_shape.frameIn.set(start_frame) image_plane_shape.frameOut.set(end_frame) image_plane_shape.frameCache.set(end_frame) From e989dbecdb28b7b7bfb8b2d415d081bed919b279 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:19:06 +0100 Subject: [PATCH 2429/2550] project manager is using qtpy instead of Qt module --- openpype/tools/project_manager/project_manager/__init__.py | 2 +- openpype/tools/project_manager/project_manager/constants.py | 2 +- openpype/tools/project_manager/project_manager/delegates.py | 2 +- openpype/tools/project_manager/project_manager/model.py | 2 +- .../project_manager/project_manager/multiselection_combobox.py | 2 +- openpype/tools/project_manager/project_manager/style.py | 2 +- openpype/tools/project_manager/project_manager/view.py | 2 +- openpype/tools/project_manager/project_manager/widgets.py | 2 +- openpype/tools/project_manager/project_manager/window.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/__init__.py b/openpype/tools/project_manager/project_manager/__init__.py index 6e44afd841..ac4e3d5f39 100644 --- a/openpype/tools/project_manager/project_manager/__init__.py +++ b/openpype/tools/project_manager/project_manager/__init__.py @@ -44,7 +44,7 @@ from .window import ProjectManagerWindow def main(): import sys - from Qt import QtWidgets + from qtpy import QtWidgets app = QtWidgets.QApplication([]) diff --git a/openpype/tools/project_manager/project_manager/constants.py b/openpype/tools/project_manager/project_manager/constants.py index 7ca4aa9492..72512d797b 100644 --- a/openpype/tools/project_manager/project_manager/constants.py +++ b/openpype/tools/project_manager/project_manager/constants.py @@ -1,5 +1,5 @@ import re -from Qt import QtCore +from qtpy import QtCore # Item identifier (unique ID - uuid4 is used) diff --git a/openpype/tools/project_manager/project_manager/delegates.py b/openpype/tools/project_manager/project_manager/delegates.py index b066bbb159..79e9554b0f 100644 --- a/openpype/tools/project_manager/project_manager/delegates.py +++ b/openpype/tools/project_manager/project_manager/delegates.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from .widgets import ( NameTextEdit, diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 6f40140e5e..29a26f700f 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -5,7 +5,7 @@ from uuid import uuid4 from pymongo import UpdateOne, DeleteOne -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui from openpype.client import ( get_projects, diff --git a/openpype/tools/project_manager/project_manager/multiselection_combobox.py b/openpype/tools/project_manager/project_manager/multiselection_combobox.py index f776831298..f12f402d1a 100644 --- a/openpype/tools/project_manager/project_manager/multiselection_combobox.py +++ b/openpype/tools/project_manager/project_manager/multiselection_combobox.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtWidgets +from qtpy import QtCore, QtWidgets class ComboItemDelegate(QtWidgets.QStyledItemDelegate): diff --git a/openpype/tools/project_manager/project_manager/style.py b/openpype/tools/project_manager/project_manager/style.py index 4405d05960..6445bc341d 100644 --- a/openpype/tools/project_manager/project_manager/style.py +++ b/openpype/tools/project_manager/project_manager/style.py @@ -1,5 +1,5 @@ import os -from Qt import QtGui +from qtpy import QtGui import qtawesome from openpype.tools.utils import paint_image_with_color diff --git a/openpype/tools/project_manager/project_manager/view.py b/openpype/tools/project_manager/project_manager/view.py index 8d1fe54e83..609db30a81 100644 --- a/openpype/tools/project_manager/project_manager/view.py +++ b/openpype/tools/project_manager/project_manager/view.py @@ -1,7 +1,7 @@ import collections from queue import Queue -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.client import get_project from .delegates import ( diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 4bc968347a..06ae06e4d2 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -16,7 +16,7 @@ from openpype.tools.utils import ( get_warning_pixmap ) -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui class NameTextEdit(QtWidgets.QLineEdit): diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py index 3b2dea8ca3..e35922cf36 100644 --- a/openpype/tools/project_manager/project_manager/window.py +++ b/openpype/tools/project_manager/project_manager/window.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import resources from openpype.style import load_stylesheet From bd17e0568214475ae3ae6219d411b98d6b9263dc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:20:39 +0100 Subject: [PATCH 2430/2550] use qtpy in context dialog --- openpype/tools/context_dialog/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/context_dialog/window.py b/openpype/tools/context_dialog/window.py index 3b544bd375..86c53b55c5 100644 --- a/openpype/tools/context_dialog/window.py +++ b/openpype/tools/context_dialog/window.py @@ -1,7 +1,7 @@ import os import json -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype.pipeline import AvalonMongoDB From 4d98f540739961fb746cf010bfb7329c29fbe95c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:22:35 +0100 Subject: [PATCH 2431/2550] traypublisher is using qtpy --- openpype/tools/traypublisher/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index dfe06d149d..3007fa66a5 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -8,7 +8,7 @@ publishing plugins. import platform -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome import appdirs From b7afb84d6ca19df23b5bcfef7e451bfc65e749f7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 13 Dec 2022 17:28:20 +0100 Subject: [PATCH 2432/2550] Fix - join needs list --- openpype/tools/attribute_defs/files_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/attribute_defs/files_widget.py b/openpype/tools/attribute_defs/files_widget.py index 738e50ba07..2c8ed729c2 100644 --- a/openpype/tools/attribute_defs/files_widget.py +++ b/openpype/tools/attribute_defs/files_widget.py @@ -155,7 +155,7 @@ class DropEmpty(QtWidgets.QWidget): extensions_label = " or ".join(allowed_items) else: last_item = allowed_items.pop(-1) - new_last_item = " or ".join(last_item, allowed_items.pop(-1)) + new_last_item = " or ".join([last_item, allowed_items.pop(-1)]) allowed_items.append(new_last_item) extensions_label = ", ".join(allowed_items) From a66e421fad3dda9a1620d307b68f01dce9a6cc82 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:46:43 +0100 Subject: [PATCH 2433/2550] use qtpy in module tools --- openpype/modules/avalon_apps/avalon_app.py | 4 ++-- openpype/modules/clockify/clockify_module.py | 2 +- openpype/modules/clockify/widgets.py | 2 +- .../example_addons/example_addon/widgets.py | 2 +- openpype/modules/ftrack/tray/ftrack_tray.py | 4 ++-- openpype/modules/ftrack/tray/login_dialog.py | 7 +++++-- openpype/modules/interfaces.py | 10 ++++----- openpype/modules/kitsu/kitsu_widgets.py | 2 +- .../modules/log_viewer/log_view_module.py | 2 +- openpype/modules/log_viewer/tray/app.py | 2 +- openpype/modules/log_viewer/tray/models.py | 2 +- openpype/modules/log_viewer/tray/widgets.py | 2 +- openpype/modules/muster/muster.py | 2 +- openpype/modules/muster/widget_login.py | 3 +-- .../window/widgets.py | 2 +- .../shotgrid/tray/credential_dialog.py | 2 +- .../modules/shotgrid/tray/shotgrid_tray.py | 2 +- .../modules/sync_server/sync_server_module.py | 2 +- openpype/modules/sync_server/tray/app.py | 2 +- .../modules/sync_server/tray/delegates.py | 2 +- openpype/modules/sync_server/tray/models.py | 21 +++++++++---------- openpype/modules/sync_server/tray/widgets.py | 12 +++++------ .../modules/timers_manager/idle_threads.py | 2 +- .../timers_manager/widget_user_idle.py | 2 +- .../webserver/host_console_listener.py | 2 +- 25 files changed, 49 insertions(+), 48 deletions(-) diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/avalon_apps/avalon_app.py index f9085522b0..a0226ecc5c 100644 --- a/openpype/modules/avalon_apps/avalon_app.py +++ b/openpype/modules/avalon_apps/avalon_app.py @@ -57,7 +57,7 @@ class AvalonModule(OpenPypeModule, ITrayModule): if not self._library_loader_imported: return - from Qt import QtWidgets + from qtpy import QtWidgets # Actions action_library_loader = QtWidgets.QAction( "Loader", tray_menu @@ -75,7 +75,7 @@ class AvalonModule(OpenPypeModule, ITrayModule): def show_library_loader(self): if self._library_loader_window is None: - from Qt import QtCore + from qtpy import QtCore from openpype.tools.libraryloader import LibraryLoaderWindow from openpype.pipeline import install_openpype_plugins diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/clockify/clockify_module.py index 14fcb01f67..300d5576e2 100644 --- a/openpype/modules/clockify/clockify_module.py +++ b/openpype/modules/clockify/clockify_module.py @@ -183,7 +183,7 @@ class ClockifyModule( # Definition of Tray menu def tray_menu(self, parent_menu): # Menu for Tray App - from Qt import QtWidgets + from qtpy import QtWidgets menu = QtWidgets.QMenu("Clockify", parent_menu) menu.setProperty("submenu", "on") diff --git a/openpype/modules/clockify/widgets.py b/openpype/modules/clockify/widgets.py index d58df3c067..122b6212c0 100644 --- a/openpype/modules/clockify/widgets.py +++ b/openpype/modules/clockify/widgets.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import resources, style diff --git a/openpype/modules/example_addons/example_addon/widgets.py b/openpype/modules/example_addons/example_addon/widgets.py index c0a0a7e510..cd0da3ae43 100644 --- a/openpype/modules/example_addons/example_addon/widgets.py +++ b/openpype/modules/example_addons/example_addon/widgets.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.style import load_stylesheet diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index e3c6e30ead..8718dff434 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -3,9 +3,9 @@ import time import datetime import threading -from Qt import QtCore, QtWidgets, QtGui - import ftrack_api +from qtpy import QtCore, QtWidgets, QtGui + from openpype import resources from openpype.lib import Logger from openpype_modules.ftrack import resolve_ftrack_url, FTRACK_MODULE_DIR diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/ftrack/tray/login_dialog.py index 05d9226ca4..fbb3455775 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/ftrack/tray/login_dialog.py @@ -1,10 +1,13 @@ import os + import requests +from qtpy import QtCore, QtGui, QtWidgets + from openpype import style from openpype_modules.ftrack.lib import credentials -from . import login_tools from openpype import resources -from Qt import QtCore, QtGui, QtWidgets + +from . import login_tools class CredentialsDialog(QtWidgets.QDialog): diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index f92ec6bf2d..7cd299df67 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -222,7 +222,7 @@ class ITrayAction(ITrayModule): pass def tray_menu(self, tray_menu): - from Qt import QtWidgets + from qtpy import QtWidgets if self.admin_action: menu = self.admin_submenu(tray_menu) @@ -247,7 +247,7 @@ class ITrayAction(ITrayModule): @staticmethod def admin_submenu(tray_menu): if ITrayAction._admin_submenu is None: - from Qt import QtWidgets + from qtpy import QtWidgets admin_submenu = QtWidgets.QMenu("Admin", tray_menu) admin_submenu.menuAction().setVisible(False) @@ -279,7 +279,7 @@ class ITrayService(ITrayModule): @staticmethod def services_submenu(tray_menu): if ITrayService._services_submenu is None: - from Qt import QtWidgets + from qtpy import QtWidgets services_submenu = QtWidgets.QMenu("Services", tray_menu) services_submenu.menuAction().setVisible(False) @@ -294,7 +294,7 @@ class ITrayService(ITrayModule): @staticmethod def _load_service_icons(): - from Qt import QtGui + from qtpy import QtGui ITrayService._failed_icon = QtGui.QIcon( resources.get_resource("icons", "circle_red.png") @@ -325,7 +325,7 @@ class ITrayService(ITrayModule): return ITrayService._failed_icon def tray_menu(self, tray_menu): - from Qt import QtWidgets + from qtpy import QtWidgets action = QtWidgets.QAction( self.label, diff --git a/openpype/modules/kitsu/kitsu_widgets.py b/openpype/modules/kitsu/kitsu_widgets.py index 65baed9665..5ff3613583 100644 --- a/openpype/modules/kitsu/kitsu_widgets.py +++ b/openpype/modules/kitsu/kitsu_widgets.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype.modules.kitsu.utils.credentials import ( diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index 31e954fadd..e9dba2041c 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -22,7 +22,7 @@ class LogViewModule(OpenPypeModule, ITrayModule): # Definition of Tray menu def tray_menu(self, tray_menu): - from Qt import QtWidgets + from qtpy import QtWidgets # Menu for Tray App menu = QtWidgets.QMenu('Logging', tray_menu) diff --git a/openpype/modules/log_viewer/tray/app.py b/openpype/modules/log_viewer/tray/app.py index def319e0e3..3c49f337d4 100644 --- a/openpype/modules/log_viewer/tray/app.py +++ b/openpype/modules/log_viewer/tray/app.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from .widgets import LogsWidget, OutputWidget from openpype import style diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/log_viewer/tray/models.py index d369ffeb64..bc1f54c06c 100644 --- a/openpype/modules/log_viewer/tray/models.py +++ b/openpype/modules/log_viewer/tray/models.py @@ -1,5 +1,5 @@ import collections -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui from openpype.lib import Logger diff --git a/openpype/modules/log_viewer/tray/widgets.py b/openpype/modules/log_viewer/tray/widgets.py index c7ac64ab70..981152e6e2 100644 --- a/openpype/modules/log_viewer/tray/widgets.py +++ b/openpype/modules/log_viewer/tray/widgets.py @@ -1,5 +1,5 @@ import html -from Qt import QtCore, QtWidgets +from qtpy import QtCore, QtWidgets import qtawesome from .models import LogModel, LogsFilterProxy diff --git a/openpype/modules/muster/muster.py b/openpype/modules/muster/muster.py index 8d395d16e8..77b9214a5a 100644 --- a/openpype/modules/muster/muster.py +++ b/openpype/modules/muster/muster.py @@ -53,7 +53,7 @@ class MusterModule(OpenPypeModule, ITrayModule): # Definition of Tray menu def tray_menu(self, parent): """Add **change credentials** option to tray menu.""" - from Qt import QtWidgets + from qtpy import QtWidgets # Menu for Tray App menu = QtWidgets.QMenu('Muster', parent) diff --git a/openpype/modules/muster/widget_login.py b/openpype/modules/muster/widget_login.py index ae838c6cea..f38f43fb7f 100644 --- a/openpype/modules/muster/widget_login.py +++ b/openpype/modules/muster/widget_login.py @@ -1,5 +1,4 @@ -import os -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import resources, style diff --git a/openpype/modules/python_console_interpreter/window/widgets.py b/openpype/modules/python_console_interpreter/window/widgets.py index 36ce1b61a2..b670352f44 100644 --- a/openpype/modules/python_console_interpreter/window/widgets.py +++ b/openpype/modules/python_console_interpreter/window/widgets.py @@ -5,7 +5,7 @@ import collections from code import InteractiveInterpreter import appdirs -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from openpype import resources from openpype.style import load_stylesheet diff --git a/openpype/modules/shotgrid/tray/credential_dialog.py b/openpype/modules/shotgrid/tray/credential_dialog.py index 9d841d98be..7b839b63c0 100644 --- a/openpype/modules/shotgrid/tray/credential_dialog.py +++ b/openpype/modules/shotgrid/tray/credential_dialog.py @@ -1,5 +1,5 @@ import os -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from openpype import style from openpype import resources diff --git a/openpype/modules/shotgrid/tray/shotgrid_tray.py b/openpype/modules/shotgrid/tray/shotgrid_tray.py index 4038d77b03..8e363bd318 100644 --- a/openpype/modules/shotgrid/tray/shotgrid_tray.py +++ b/openpype/modules/shotgrid/tray/shotgrid_tray.py @@ -1,7 +1,7 @@ import os import webbrowser -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.modules.shotgrid.lib import credentials from openpype.modules.shotgrid.tray.credential_dialog import ( diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 653ee50541..f9b99da02b 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1244,7 +1244,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if not self.enabled: return - from Qt import QtWidgets + from qtpy import QtWidgets """Add menu or action to Tray(or parent)'s menu""" action = QtWidgets.QAction(self.label, parent_menu) action.triggered.connect(self.show_widget) diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index 9b9768327e..c093835128 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.tools.settings import style diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py index 988eb40d28..e14b2e2f60 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/sync_server/tray/delegates.py @@ -1,5 +1,5 @@ import os -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from openpype.lib import Logger diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index d63d046508..b52f350907 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -3,8 +3,7 @@ import attr from bson.objectid import ObjectId import datetime -from Qt import QtCore -from Qt.QtCore import Qt +from qtpy import QtCore import qtawesome from openpype.tools.utils.delegates import pretty_timestamp @@ -79,16 +78,16 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel): def columnCount(self, _index=None): return len(self._header) - def headerData(self, section, orientation, role=Qt.DisplayRole): + def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole): if section >= len(self.COLUMN_LABELS): return - if role == Qt.DisplayRole: - if orientation == Qt.Horizontal: + if role == QtCore.Qt.DisplayRole: + if orientation == QtCore.Qt.Horizontal: return self.COLUMN_LABELS[section][1] if role == HEADER_NAME_ROLE: - if orientation == Qt.Horizontal: + if orientation == QtCore.Qt.Horizontal: return self.COLUMN_LABELS[section][0] # return name def data(self, index, role): @@ -123,7 +122,7 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel): return item.status == lib.STATUS[2] and \ item.remote_progress < 1 - if role in (Qt.DisplayRole, Qt.EditRole): + if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole): # because of ImageDelegate if header_value in ['remote_site', 'local_site']: return "" @@ -146,7 +145,7 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel): if role == STATUS_ROLE: return item.status - if role == Qt.UserRole: + if role == QtCore.Qt.UserRole: return item._id @property @@ -409,7 +408,7 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel): """ for i in range(self.rowCount(None)): index = self.index(i, 0) - value = self.data(index, Qt.UserRole) + value = self.data(index, QtCore.Qt.UserRole) if value == id: return index return None @@ -917,7 +916,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): if not self.can_edit: return - repre_id = self.data(index, Qt.UserRole) + repre_id = self.data(index, QtCore.Qt.UserRole) representation = get_representation_by_id(self.project, repre_id) if representation: @@ -1353,7 +1352,7 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel): if not self.can_edit: return - file_id = self.data(index, Qt.UserRole) + file_id = self.data(index, QtCore.Qt.UserRole) updated_file = None representation = get_representation_by_id(self.project, self._id) diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py index c40aa98f24..b9ef45727a 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/sync_server/tray/widgets.py @@ -3,8 +3,7 @@ import subprocess import sys from functools import partial -from Qt import QtWidgets, QtCore, QtGui -from Qt.QtCore import Qt +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from openpype.tools.settings import style @@ -260,7 +259,7 @@ class _SyncRepresentationWidget(QtWidgets.QWidget): self._selected_ids = set() for index in idxs: - self._selected_ids.add(self.model.data(index, Qt.UserRole)) + self._selected_ids.add(self.model.data(index, QtCore.Qt.UserRole)) def _set_selection(self): """ @@ -291,7 +290,7 @@ class _SyncRepresentationWidget(QtWidgets.QWidget): self.table_view.openPersistentEditor(index) return - _id = self.model.data(index, Qt.UserRole) + _id = self.model.data(index, QtCore.Qt.UserRole) detail_window = SyncServerDetailWindow( self.sync_server, _id, self.model.project, parent=self) detail_window.exec() @@ -615,7 +614,7 @@ class SyncRepresentationSummaryWidget(_SyncRepresentationWidget): table_view.setSelectionBehavior( QtWidgets.QAbstractItemView.SelectRows) table_view.horizontalHeader().setSortIndicator( - -1, Qt.AscendingOrder) + -1, QtCore.Qt.AscendingOrder) table_view.setAlternatingRowColors(True) table_view.verticalHeader().hide() table_view.viewport().setAttribute(QtCore.Qt.WA_Hover, True) @@ -773,7 +772,8 @@ class SyncRepresentationDetailWidget(_SyncRepresentationWidget): QtWidgets.QAbstractItemView.ExtendedSelection) table_view.setSelectionBehavior( QtWidgets.QTableView.SelectRows) - table_view.horizontalHeader().setSortIndicator(-1, Qt.AscendingOrder) + table_view.horizontalHeader().setSortIndicator( + -1, QtCore.Qt.AscendingOrder) table_view.horizontalHeader().setSortIndicatorShown(True) table_view.setAlternatingRowColors(True) table_view.verticalHeader().hide() diff --git a/openpype/modules/timers_manager/idle_threads.py b/openpype/modules/timers_manager/idle_threads.py index 7242761143..eb11bbf117 100644 --- a/openpype/modules/timers_manager/idle_threads.py +++ b/openpype/modules/timers_manager/idle_threads.py @@ -1,5 +1,5 @@ import time -from Qt import QtCore +from qtpy import QtCore from pynput import mouse, keyboard from openpype.lib import Logger diff --git a/openpype/modules/timers_manager/widget_user_idle.py b/openpype/modules/timers_manager/widget_user_idle.py index 1ecea74440..9df328e6b2 100644 --- a/openpype/modules/timers_manager/widget_user_idle.py +++ b/openpype/modules/timers_manager/widget_user_idle.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import resources, style diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/webserver/host_console_listener.py index fdfe1ba688..e5c11af9c2 100644 --- a/openpype/modules/webserver/host_console_listener.py +++ b/openpype/modules/webserver/host_console_listener.py @@ -3,7 +3,7 @@ from aiohttp import web import json import logging from concurrent.futures import CancelledError -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.modules import ITrayService From a3db8a0539956a7cc5d2f2b199d5e4abbc18b778 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:47:52 +0100 Subject: [PATCH 2434/2550] use qtpy in hosts which have use openpype process --- openpype/hosts/aftereffects/api/launch_logic.py | 2 +- openpype/hosts/aftereffects/api/lib.py | 2 +- openpype/hosts/aftereffects/api/pipeline.py | 2 +- openpype/hosts/blender/api/ops.py | 2 +- openpype/hosts/harmony/api/lib.py | 2 +- openpype/hosts/photoshop/api/launch_logic.py | 2 +- openpype/hosts/photoshop/api/lib.py | 2 +- openpype/hosts/photoshop/api/pipeline.py | 2 +- .../hosts/photoshop/plugins/create/create_legacy_image.py | 2 +- openpype/hosts/tvpaint/api/launch_script.py | 2 +- openpype/hosts/tvpaint/plugins/create/create_render_layer.py | 4 ++-- openpype/hosts/unreal/api/tools_ui.py | 2 +- 12 files changed, 13 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py index 9c8513fe8c..50675c8482 100644 --- a/openpype/hosts/aftereffects/api/launch_logic.py +++ b/openpype/hosts/aftereffects/api/launch_logic.py @@ -10,7 +10,7 @@ from wsrpc_aiohttp import ( WebSocketAsync ) -from Qt import QtCore +from qtpy import QtCore from openpype.lib import Logger from openpype.pipeline import legacy_io diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py index 8cdf9c407e..c738bcba2d 100644 --- a/openpype/hosts/aftereffects/api/lib.py +++ b/openpype/hosts/aftereffects/api/lib.py @@ -7,7 +7,7 @@ import traceback import logging from functools import partial -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.pipeline import install_host from openpype.modules import ModulesManager diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 7026fe3f05..68a00e30b7 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -1,6 +1,6 @@ import os -from Qt import QtWidgets +from qtpy import QtWidgets import pyblish.api diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index e0e09277df..481c199db2 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -10,7 +10,7 @@ from pathlib import Path from types import ModuleType from typing import Dict, List, Optional, Union -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import bpy import bpy.utils.previews diff --git a/openpype/hosts/harmony/api/lib.py b/openpype/hosts/harmony/api/lib.py index e5e7ad1b7e..e1e77bfbee 100644 --- a/openpype/hosts/harmony/api/lib.py +++ b/openpype/hosts/harmony/api/lib.py @@ -14,7 +14,7 @@ import json import signal import time from uuid import uuid4 -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import collections from .server import Server diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index 1f0203dca6..1403b6cfa1 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -8,7 +8,7 @@ from wsrpc_aiohttp import ( WebSocketAsync ) -from Qt import QtCore +from qtpy import QtCore from openpype.lib import Logger from openpype.pipeline import legacy_io diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 221b4314e6..e3b601d011 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -3,7 +3,7 @@ import sys import contextlib import traceback -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.lib import env_value_to_bool, Logger from openpype.modules import ModulesManager diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 9f6fc0983c..d2da8c5cb4 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -1,5 +1,5 @@ import os -from Qt import QtWidgets +from qtpy import QtWidgets import pyblish.api diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 7672458165..2d655cae32 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -1,6 +1,6 @@ import re -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop diff --git a/openpype/hosts/tvpaint/api/launch_script.py b/openpype/hosts/tvpaint/api/launch_script.py index c474a10529..614dbe8a6e 100644 --- a/openpype/hosts/tvpaint/api/launch_script.py +++ b/openpype/hosts/tvpaint/api/launch_script.py @@ -6,7 +6,7 @@ import ctypes import platform import logging -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype.pipeline import install_host diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py index a085830e96..009b69c4f1 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py @@ -207,8 +207,8 @@ class CreateRenderlayer(plugin.Creator): ) def _ask_user_subset_override(self, instance): - from Qt import QtCore - from Qt.QtWidgets import QMessageBox + from qtpy import QtCore + from qtpy.QtWidgets import QMessageBox title = "Subset \"{}\" already exist".format(instance["subset"]) text = ( diff --git a/openpype/hosts/unreal/api/tools_ui.py b/openpype/hosts/unreal/api/tools_ui.py index 2500f8495f..708e167a65 100644 --- a/openpype/hosts/unreal/api/tools_ui.py +++ b/openpype/hosts/unreal/api/tools_ui.py @@ -1,5 +1,5 @@ import sys -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import ( resources, From 374d4d9683ca9e8bc0aa56ca7c336385c917adc3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:48:48 +0100 Subject: [PATCH 2435/2550] use qtpy in fusion --- openpype/hosts/fusion/api/menu.py | 2 +- openpype/hosts/fusion/api/pipeline.py | 2 +- openpype/hosts/fusion/api/pulse.py | 2 +- .../hosts/fusion/deploy/MenuScripts/install_pyside2.py | 8 ++++---- .../fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py | 2 +- openpype/hosts/fusion/plugins/inventory/set_tool_color.py | 2 +- openpype/hosts/fusion/scripts/set_rendermode.py | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 39126935e6..42fbab70a6 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -1,6 +1,6 @@ import sys -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.tools.utils import host_tools from openpype.style import load_stylesheet diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index b6092f7c1b..6315fe443d 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -6,7 +6,7 @@ import sys import logging import pyblish.api -from Qt import QtCore +from qtpy import QtCore from openpype.lib import ( Logger, diff --git a/openpype/hosts/fusion/api/pulse.py b/openpype/hosts/fusion/api/pulse.py index eb7ef3785d..762f05ba7e 100644 --- a/openpype/hosts/fusion/api/pulse.py +++ b/openpype/hosts/fusion/api/pulse.py @@ -1,7 +1,7 @@ import os import sys -from Qt import QtCore +from qtpy import QtCore class PulseThread(QtCore.QThread): diff --git a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py index ab9f13ce05..e1240fd677 100644 --- a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py +++ b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py @@ -6,10 +6,10 @@ import importlib try: - from Qt import QtWidgets # noqa: F401 - from Qt import __binding__ - print(f"Qt binding: {__binding__}") - mod = importlib.import_module(__binding__) + from qtpy import API_NAME + + print(f"Qt binding: {API_NAME}") + mod = importlib.import_module(API_NAME) print(f"Qt path: {mod.__file__}") print("Qt library found, nothing to do..") diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py index 93f775b24b..f08dc0bf2c 100644 --- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py +++ b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py @@ -3,7 +3,7 @@ import sys import glob import logging -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome as qta diff --git a/openpype/hosts/fusion/plugins/inventory/set_tool_color.py b/openpype/hosts/fusion/plugins/inventory/set_tool_color.py index c7530ce674..a057ad1e89 100644 --- a/openpype/hosts/fusion/plugins/inventory/set_tool_color.py +++ b/openpype/hosts/fusion/plugins/inventory/set_tool_color.py @@ -1,4 +1,4 @@ -from Qt import QtGui, QtWidgets +from qtpy import QtGui, QtWidgets from openpype.pipeline import InventoryAction from openpype import style diff --git a/openpype/hosts/fusion/scripts/set_rendermode.py b/openpype/hosts/fusion/scripts/set_rendermode.py index f0638e4fe3..9d2bfef310 100644 --- a/openpype/hosts/fusion/scripts/set_rendermode.py +++ b/openpype/hosts/fusion/scripts/set_rendermode.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets import qtawesome from openpype.hosts.fusion.api import get_current_comp From 7810d425dabe7900411809e61444c9ab820205bb Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 13 Dec 2022 16:52:32 +0000 Subject: [PATCH 2436/2550] Camera is created only when the creator is not using the selection --- openpype/hosts/blender/plugins/create/create_camera.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py index 1a3c008069..ada512d7ac 100644 --- a/openpype/hosts/blender/plugins/create/create_camera.py +++ b/openpype/hosts/blender/plugins/create/create_camera.py @@ -32,11 +32,6 @@ class CreateCamera(plugin.Creator): subset = self.data["subset"] name = plugin.asset_name(asset, subset) - camera = bpy.data.cameras.new(subset) - camera_obj = bpy.data.objects.new(subset, camera) - - instances.objects.link(camera_obj) - asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) @@ -53,6 +48,11 @@ class CreateCamera(plugin.Creator): bpy.ops.object.parent_set(keep_transform=True) else: plugin.deselect_all() + camera = bpy.data.cameras.new(subset) + camera_obj = bpy.data.objects.new(subset, camera) + + instances.objects.link(camera_obj) + camera_obj.select_set(True) asset_group.select_set(True) bpy.context.view_layer.objects.active = asset_group From a97f92760086eb34b1d07b4e68b6d850355d49eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Dec 2022 21:35:01 +0100 Subject: [PATCH 2437/2550] fixing logic --- openpype/hosts/flame/plugins/load/load_clip.py | 6 ++++-- openpype/hosts/flame/plugins/load/load_clip_batch.py | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 2c107de2b4..6f47c23d57 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -49,8 +49,10 @@ class LoadClip(opfapi.ClipLoader): # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): - self.clip_name_template.replace("output", "representation") - self.layer_rename_template.replace("output", "representation") + self.clip_name_template = self.clip_name_template.replace( + "output", "representation") + self.layer_rename_template = self.layer_rename_template.replace( + "output", "representation") formating_data = deepcopy(context["representation"]["context"]) clip_name = StringTemplate(self.clip_name_template).format( diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 47d7da2a76..5975c6e42f 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -47,8 +47,10 @@ class LoadClipBatch(opfapi.ClipLoader): # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): - self.clip_name_template.replace("output", "representation") - self.layer_rename_template.replace("output", "representation") + self.clip_name_template = self.clip_name_template.replace( + "output", "representation") + self.layer_rename_template = self.layer_rename_template.replace( + "output", "representation") formating_data = deepcopy(context["representation"]["context"]) formating_data["batch"] = self.batch.name.get_value() From 730df2a4cd976f859f78cdf0ca60e484edd0d51f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Dec 2022 21:55:41 +0100 Subject: [PATCH 2438/2550] deadline: pr comments --- .../deadline/plugins/publish/submit_publish_job.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 5ed8c83412..5c5c54febb 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -127,10 +127,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "celaction": [r".*"]} environ_job_filter = [ - "OPENPYPE_METADATA_FILE", - "OPENPYPE_PUBLISH_JOB", - "OPENPYPE_RENDER_JOB", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_METADATA_FILE" ] environ_keys = [ @@ -238,10 +235,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "AVALON_PROJECT": legacy_io.Session["AVALON_PROJECT"], "AVALON_ASSET": legacy_io.Session["AVALON_ASSET"], "AVALON_TASK": legacy_io.Session["AVALON_TASK"], - "OPENPYPE_LOG_NO_COLORS": "1", "OPENPYPE_USERNAME": instance.context.data["user"], "OPENPYPE_PUBLISH_JOB": "1", - "OPENPYPE_RENDER_JOB": "0" + "OPENPYPE_RENDER_JOB": "0", + "OPENPYPE_REMOTE_JOB": "0", + "OPENPYPE_LOG_NO_COLORS": "1" } # add environments from self.environ_keys From c58162bb1e6246d32ef0e58f0202b17ac90afe44 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 14 Dec 2022 03:30:40 +0000 Subject: [PATCH 2439/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 190f7ac401..8d82df563d 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.3" +__version__ = "3.14.9-nightly.4" From f09ca54d962e34735b000595889c5d54c88e2fc0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 14 Dec 2022 15:04:34 +0800 Subject: [PATCH 2440/2550] bug fix image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 82c2676982..33ae61ec0d 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -23,8 +23,6 @@ class CameraWindow(QtWidgets.QDialog): self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) self.camera = None - # self.static_image_plane = False - # self.show_in_all_views = False self.widgets = { "label": QtWidgets.QLabel("Select camera for image plane."), @@ -45,8 +43,6 @@ class CameraWindow(QtWidgets.QDialog): for camera in cameras: self.widgets["list"].addItem(camera) - # self.widgets["staticImagePlane"].setText("Make Image Plane Static") - # self.widgets["showInAllViews"].setText("Show Image Plane in All Views") # Build buttons. layout = QtWidgets.QHBoxLayout(self.widgets["buttons"]) @@ -57,8 +53,6 @@ class CameraWindow(QtWidgets.QDialog): layout = QtWidgets.QVBoxLayout(self) layout.addWidget(self.widgets["label"]) layout.addWidget(self.widgets["list"]) - # layout.addWidget(self.widgets["staticImagePlane"]) - # layout.addWidget(self.widgets["showInAllViews"]) layout.addWidget(self.widgets["buttons"]) layout.addWidget(self.widgets["warning"]) @@ -73,8 +67,6 @@ class CameraWindow(QtWidgets.QDialog): if self.camera is None: self.widgets["warning"].setVisible(True) return - # self.show_in_all_views = self.widgets["showInAllViews"].isChecked() - # self.static_image_plane = self.widgets["staticImagePlane"].isChecked() self.close() From 29ee8e5edb46dc58b70dfa8b78eba3a1e5c07b68 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 14 Dec 2022 15:05:42 +0800 Subject: [PATCH 2441/2550] bug fix image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 33ae61ec0d..cc03787a6b 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -179,7 +179,7 @@ class ImagePlaneLoader(load.LoaderPlugin): pm.delete(expressions) if not image_plane_shape.frameExtension.isFreeToChange(): - raise RuntimeError("Can't set frame extension for {}".format(image_plane_shape)) + raise RuntimeError("Can't set frame extension for {}".format(image_plane_shape)) # noqa image_plane_shape.frameExtension.set(start_frame) From 1522d78873eb5c0ce34970b0eb4c48a74d8f594b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 14 Dec 2022 15:07:47 +0800 Subject: [PATCH 2442/2550] bug fix image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index cc03787a6b..4055dc50b0 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -102,8 +102,6 @@ class ImagePlaneLoader(load.LoaderPlugin): # is_in_all_views = None if data: camera = pm.PyNode(data.get("camera")) - # is_static_image_plane = data.get("static_image_plane") - # is_in_all_views = data.get("in_all_views") if not camera: cameras = pm.ls(type="camera") @@ -113,9 +111,6 @@ class ImagePlaneLoader(load.LoaderPlugin): window.exec_() camera = camera_names[window.camera] - # is_static_image_plane = window.static_image_plane - # is_in_all_views = window.show_in_all_views - if camera == "create_camera": camera = pm.createNode("camera") @@ -134,15 +129,12 @@ class ImagePlaneLoader(load.LoaderPlugin): camera=camera) image_plane_shape.depth.set(image_plane_depth) - # if is_static_image_plane: - # image_plane_shape.detach() - # image_plane_transform.setRotation(camera.getRotation()) start_frame = pm.playbackOptions(q=True, min=True) end_frame = pm.playbackOptions(q=True, max=True) if int(start_frame) > 0: - image_plane_shape.frameOffset.set(int(start_frame)- 1) + image_plane_shape.frameOffset.set(int(start_frame)-1) else: image_plane_shape.frameOffset.set(int(start_frame)) image_plane_shape.frameIn.set(start_frame) @@ -232,7 +224,7 @@ class ImagePlaneLoader(load.LoaderPlugin): start_frame = asset["data"]["frameStart"] end_frame = asset["data"]["frameEnd"] if int(start_frame) > 0: - image_plane_shape.frameOffset.set(int(start_frame)- 1) + image_plane_shape.frameOffset.set(int(start_frame)-1) else: image_plane_shape.frameOffset.set(int(start_frame)) image_plane_shape.frameIn.set(start_frame) From 916b5b82afd4779c3e1c099aee3fae300e8e55cb Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 14 Dec 2022 15:10:08 +0800 Subject: [PATCH 2443/2550] bug fix image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 4055dc50b0..5f1c582203 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -134,7 +134,7 @@ class ImagePlaneLoader(load.LoaderPlugin): end_frame = pm.playbackOptions(q=True, max=True) if int(start_frame) > 0: - image_plane_shape.frameOffset.set(int(start_frame)-1) + image_plane_shape.frameOffset.set(int(start_frame) - 1) else: image_plane_shape.frameOffset.set(int(start_frame)) image_plane_shape.frameIn.set(start_frame) @@ -166,7 +166,7 @@ class ImagePlaneLoader(load.LoaderPlugin): QtWidgets.QMessageBox.Cancel ) if reply == QtWidgets.QMessageBox.Ok: - expressions = image_plane_shape.frameExtension.inputs(type="expression") + expressions = image_plane_shape.frameExtension.inputs(type="expression") # noqa if expressions: pm.delete(expressions) @@ -224,7 +224,7 @@ class ImagePlaneLoader(load.LoaderPlugin): start_frame = asset["data"]["frameStart"] end_frame = asset["data"]["frameEnd"] if int(start_frame) > 0: - image_plane_shape.frameOffset.set(int(start_frame)-1) + image_plane_shape.frameOffset.set(int(start_frame) - 1) else: image_plane_shape.frameOffset.set(int(start_frame)) image_plane_shape.frameIn.set(start_frame) From bf4b70e9417407e603441f7ca0fe11b165452003 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 14 Dec 2022 10:46:31 +0000 Subject: [PATCH 2444/2550] Fixed layout update --- .../blender/plugins/load/load_layout_blend.py | 75 ++++++++++++++----- 1 file changed, 56 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index e0124053bf..9ab24b5ac1 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -6,6 +6,7 @@ from typing import Dict, List, Optional import bpy +from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, @@ -63,10 +64,12 @@ class BlendLayoutLoader(plugin.AssetLoader): # If it is the last object to use that library, remove it if count == 1: library = bpy.data.libraries.get(bpy.path.basename(libpath)) - bpy.data.libraries.remove(library) + if library: + bpy.data.libraries.remove(library) def _process( - self, libpath, asset_group, group_name, asset, representation, actions + self, libpath, asset_group, group_name, asset, representation, + actions, anim_instances ): with bpy.data.libraries.load( libpath, link=True, relative=False @@ -140,12 +143,12 @@ class BlendLayoutLoader(plugin.AssetLoader): elif local_obj.type == 'ARMATURE': plugin.prepare_data(local_obj.data) - if action is not None: + if action: if local_obj.animation_data is None: local_obj.animation_data_create() local_obj.animation_data.action = action elif (local_obj.animation_data and - local_obj.animation_data.action is not None): + local_obj.animation_data.action): plugin.prepare_data( local_obj.animation_data.action) @@ -157,19 +160,26 @@ class BlendLayoutLoader(plugin.AssetLoader): t.id = local_obj elif local_obj.type == 'EMPTY': - creator_plugin = get_legacy_creator_by_name("CreateAnimation") - if not creator_plugin: - raise ValueError("Creator plugin \"CreateAnimation\" was " - "not found.") + if (not anim_instances or + (anim_instances and + local_obj.name not in anim_instances.keys())): + avalon = local_obj.get(AVALON_PROPERTY) + if avalon and avalon.get('family') == 'rig': + creator_plugin = get_legacy_creator_by_name( + "CreateAnimation") + if not creator_plugin: + raise ValueError( + "Creator plugin \"CreateAnimation\" was " + "not found.") - legacy_create( - creator_plugin, - name=local_obj.name.split(':')[-1] + "_animation", - asset=asset, - options={"useSelection": False, - "asset_group": local_obj}, - data={"dependencies": representation} - ) + legacy_create( + creator_plugin, + name=local_obj.name.split(':')[-1] + "_animation", + asset=asset, + options={"useSelection": False, + "asset_group": local_obj}, + data={"dependencies": representation} + ) if not local_obj.get(AVALON_PROPERTY): local_obj[AVALON_PROPERTY] = dict() @@ -272,7 +282,8 @@ class BlendLayoutLoader(plugin.AssetLoader): avalon_container.objects.link(asset_group) objects = self._process( - libpath, asset_group, group_name, asset, representation, None) + libpath, asset_group, group_name, asset, representation, + None, None) for child in asset_group.children: if child.get(AVALON_PROPERTY): @@ -352,10 +363,20 @@ class BlendLayoutLoader(plugin.AssetLoader): return actions = {} + anim_instances = {} for obj in asset_group.children: obj_meta = obj.get(AVALON_PROPERTY) if obj_meta.get('family') == 'rig': + # Get animation instance + collections = list(obj.users_collection) + for c in collections: + avalon = c.get(AVALON_PROPERTY) + if avalon and avalon.get('family') == 'animation': + anim_instances[obj.name] = c.name + break + + # Get armature's action rig = None for child in obj.children: if child.type == 'ARMATURE': @@ -384,9 +405,25 @@ class BlendLayoutLoader(plugin.AssetLoader): # If it is the last object to use that library, remove it if count == 1: library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) - bpy.data.libraries.remove(library) + if library: + bpy.data.libraries.remove(library) - self._process(str(libpath), asset_group, object_name, actions) + asset = container.get("asset_name").split("_")[0] + + self._process( + str(libpath), asset_group, object_name, asset, + str(representation.get("_id")), actions, anim_instances + ) + + for o in anim_instances.keys(): + try: + obj = bpy.data.objects[o] + bpy.data.collections[anim_instances[o]].objects.link(obj) + except KeyError: + self.log.info(f"Object {o} does not exist anymore.") + coll = bpy.data.collections.get(anim_instances[o]) + if (coll): + bpy.data.collections.remove(coll) avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) for child in asset_group.children: From f222c2b9cc36e3f3d15d6b97969f3eafc0888ad9 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 14 Dec 2022 11:06:26 +0000 Subject: [PATCH 2445/2550] Small fix and comments --- .../hosts/blender/plugins/load/load_layout_blend.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index 9ab24b5ac1..c98b5c088a 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -6,7 +6,6 @@ from typing import Dict, List, Optional import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, @@ -415,13 +414,14 @@ class BlendLayoutLoader(plugin.AssetLoader): str(representation.get("_id")), actions, anim_instances ) - for o in anim_instances.keys(): + # Link the new objects to the animation collection + for inst in anim_instances.keys(): try: - obj = bpy.data.objects[o] - bpy.data.collections[anim_instances[o]].objects.link(obj) + obj = bpy.data.objects[inst] + bpy.data.collections[anim_instances[inst]].objects.link(obj) except KeyError: - self.log.info(f"Object {o} does not exist anymore.") - coll = bpy.data.collections.get(anim_instances[o]) + self.log.info(f"Object {inst} does not exist anymore.") + coll = bpy.data.collections.get(anim_instances[inst]) if (coll): bpy.data.collections.remove(coll) From 6f2c16fc6d92833b7f9e7747dd38e7129c2ef945 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 14 Dec 2022 11:09:08 +0000 Subject: [PATCH 2446/2550] Hound fixes --- openpype/hosts/blender/plugins/load/load_layout_blend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index c98b5c088a..f95d17a17f 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -176,7 +176,7 @@ class BlendLayoutLoader(plugin.AssetLoader): name=local_obj.name.split(':')[-1] + "_animation", asset=asset, options={"useSelection": False, - "asset_group": local_obj}, + "asset_group": local_obj}, data={"dependencies": representation} ) From 4d20f025f63d8178aece5fe58d28a8a3f6997aeb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Dec 2022 16:34:40 +0100 Subject: [PATCH 2447/2550] :bug: fix empty ue folder bug --- openpype/hosts/unreal/lib.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/lib.py b/openpype/hosts/unreal/lib.py index d02c6de357..095f5e414b 100644 --- a/openpype/hosts/unreal/lib.py +++ b/openpype/hosts/unreal/lib.py @@ -50,7 +50,10 @@ def get_engine_versions(env=None): # environment variable not set pass except OSError: - # specified directory doesn't exists + # specified directory doesn't exist + pass + except StopIteration: + # specified directory doesn't exist pass # if we've got something, terminate auto-detection process From 52e8601d44ab90dc7d6b57cfb2c4d30892340daa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 15 Dec 2022 11:41:11 +0100 Subject: [PATCH 2448/2550] use string instead of custom type for signal parameter --- .../publisher/widgets/card_view_widgets.py | 26 ++++++------------- 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 09635d1a15..57336f9304 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -43,24 +43,14 @@ from ..constants import ( ) -class SelectionType: - def __init__(self, name): - self.name = name - - def __eq__(self, other): - if isinstance(other, SelectionType): - other = other.name - return self.name == other - - class SelectionTypes: - clear = SelectionType("clear") - extend = SelectionType("extend") - extend_to = SelectionType("extend_to") + clear = "clear" + extend = "extend" + extend_to = "extend_to" class BaseGroupWidget(QtWidgets.QWidget): - selected = QtCore.Signal(str, str, SelectionType) + selected = QtCore.Signal(str, str, str) removed_selected = QtCore.Signal() def __init__(self, group_name, parent): @@ -269,7 +259,7 @@ class InstanceGroupWidget(BaseGroupWidget): class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" - selected = QtCore.Signal(str, str, SelectionType) + selected = QtCore.Signal(str, str, str) # Group identifier of card # - this must be set because if send when mouse is released with card id _group_identifier = None @@ -755,11 +745,11 @@ class InstanceCardView(AbstractInstanceView): group_widget = self._widgets_by_group[group_name] new_widget = group_widget.get_widget_by_item_id(instance_id) - if selection_type is SelectionTypes.clear: + if selection_type == SelectionTypes.clear: self._select_item_clear(instance_id, group_name, new_widget) - elif selection_type is SelectionTypes.extend: + elif selection_type == SelectionTypes.extend: self._select_item_extend(instance_id, group_name, new_widget) - elif selection_type is SelectionTypes.extend_to: + elif selection_type == SelectionTypes.extend_to: self._select_item_extend_to(instance_id, group_name, new_widget) self.selection_changed.emit() From 74972bec80b523e4457627801e6bfacf8d53fb8b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 15 Dec 2022 11:41:28 +0100 Subject: [PATCH 2449/2550] added all constants to __all__ --- openpype/tools/publisher/constants.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index 74337ea1ab..96f74a5a5c 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -35,5 +35,8 @@ __all__ = ( "SORT_VALUE_ROLE", "IS_GROUP_ROLE", "CREATOR_IDENTIFIER_ROLE", - "FAMILY_ROLE" + "CREATOR_THUMBNAIL_ENABLED_ROLE", + "FAMILY_ROLE", + "GROUP_ROLE", + "CONVERTER_IDENTIFIER_ROLE", ) From 0c481f83510438c5fb6243baa3f5c83adba6dbf6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 11:55:14 +0100 Subject: [PATCH 2450/2550] OP-4512 - safer resolution of enabled If new site is added in System setting, but project settings are not saved, 'enabled' key is missing. This should be safer in this cases. --- openpype/modules/sync_server/providers/dropbox.py | 2 +- openpype/modules/sync_server/providers/gdrive.py | 2 +- openpype/modules/sync_server/providers/sftp.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index e026ae7ef6..3515aee93f 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -22,7 +22,7 @@ class DropboxHandler(AbstractProvider): ) return - if not self.presets["enabled"]: + if not self.presets.get("enabled"): self.log.debug("Sync Server: Site {} not enabled for {}.". format(site_name, project_name)) return diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index 9a3ce89cf5..297a5c9fec 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -74,7 +74,7 @@ class GDriveHandler(AbstractProvider): ) return - if not self.presets["enabled"]: + if not self.presets.get("enabled"): self.log.debug( "Sync Server: Site {} not enabled for {}.".format( site_name, project_name diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 40f11cb9dd..1b4f68c585 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -72,7 +72,7 @@ class SFTPHandler(AbstractProvider): Returns: (boolean) """ - return self.presets["enabled"] and self.conn is not None + return self.self.presets.get("enabled") and self.conn is not None @classmethod def get_system_settings_schema(cls): From 753c876d9fd393f5f64bbf0392a91c89380bbf78 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 11:59:01 +0100 Subject: [PATCH 2451/2550] OP-4512 - create Anatomy object only for default local sites Eg. studio and local id of machine. It doesn't make much sense for Anatomy to handle other SiteSync sites. --- openpype/pipeline/anatomy.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 908dc2b187..5b4eb67247 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -24,6 +24,7 @@ from openpype.lib.path_templates import ( FormatObject, ) from openpype.lib.log import Logger +from openpype.lib import get_local_site_id log = Logger.get_logger(__name__) @@ -60,6 +61,10 @@ class BaseAnatomy(object): project_name = project_doc["name"] self.project_name = project_name + if site_name not in ["studio", get_local_site_id()]: + raise RuntimeError("Anatomy could be created only for default " + "local sites") + self._site_name = site_name self._data = self._prepare_anatomy_data( From 19ab86499a79cab738daef25850781f1409519b4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 12:00:10 +0100 Subject: [PATCH 2452/2550] OP-4512 - update method to get all configured sites Added more details to be more useful. --- .../modules/sync_server/sync_server_module.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 653ee50541..034121d996 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1368,13 +1368,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ sync_sett = self.sync_system_settings project_enabled = True + project_settings = None if project_name: project_enabled = project_name in self.get_enabled_projects() + project_settings = self.get_sync_project_setting(project_name) sync_enabled = sync_sett["enabled"] and project_enabled system_sites = {} if sync_enabled: for site, detail in sync_sett.get("sites", {}).items(): + if project_settings: + site_settings = project_settings["sites"].get(site) + if site_settings: + detail.update(site_settings) system_sites[site] = detail system_sites.update(self._get_default_site_configs(sync_enabled, @@ -1396,14 +1402,22 @@ class SyncServerModule(OpenPypeModule, ITrayModule): exclude_locals=True) roots = {} for root, config in anatomy_sett["roots"].items(): - roots[root] = config[platform.system().lower()] + roots[root] = config studio_config = { + 'enabled': True, 'provider': 'local_drive', "root": roots } all_sites = {self.DEFAULT_SITE: studio_config} if sync_enabled: - all_sites[get_local_site_id()] = {'provider': 'local_drive'} + all_sites[get_local_site_id()] = {'enabled': True, + 'provider': 'local_drive', + "root": roots} + # duplicate values for normalized local name + all_sites["local"] = { + 'enabled': True, + 'provider': 'local_drive', + "root": roots} return all_sites def get_provider_for_site(self, project_name=None, site=None): From e29f8d445b762545b4dbe7f7227f668cdabd7bac Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 12:05:42 +0100 Subject: [PATCH 2453/2550] OP-4512 - updated way to get sites Labels for roots are now actually changing according to selected sites --- .../local_settings/projects_widget.py | 27 ++++++++++++------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/openpype/tools/settings/local_settings/projects_widget.py b/openpype/tools/settings/local_settings/projects_widget.py index 30a0d212f0..2b8c56e38c 100644 --- a/openpype/tools/settings/local_settings/projects_widget.py +++ b/openpype/tools/settings/local_settings/projects_widget.py @@ -248,6 +248,9 @@ class SitesWidget(QtWidgets.QWidget): main_layout.addWidget(comboboxes_widget, 0) main_layout.addWidget(content_widget, 1) + active_site_widget.value_changed.connect(self.refresh) + remote_site_widget.value_changed.connect(self.refresh) + self.active_site_widget = active_site_widget self.remote_site_widget = remote_site_widget @@ -268,25 +271,29 @@ class SitesWidget(QtWidgets.QWidget): self.modules_manager.modules_by_name["sync_server"] ) - # This is temporary modification - # - whole logic here should be in sync module's providers - site_names = sync_server_module.get_active_sites_from_settings( - self.project_settings["project_settings"].value - ) + site_configs = sync_server_module.get_all_site_configs( + self._project_name) roots_entity = ( self.project_settings[PROJECT_ANATOMY_KEY][LOCAL_ROOTS_KEY] ) - + site_names = [self.active_site_widget.current_text(), + self.remote_site_widget.current_text()] output = [] for site_name in site_names: + if not site_name: + continue + site_inputs = [] - for root_name, path_entity in roots_entity.items(): - platform_entity = path_entity[platform.system().lower()] + site_config = site_configs[site_name] + for root_name, path_entity in site_config.get("root", {}).items(): + if not path_entity: + continue + platform_value = path_entity[platform.system().lower()] site_inputs.append({ "label": root_name, "key": root_name, - "value": platform_entity.value + "value": platform_value }) output.append( @@ -436,6 +443,7 @@ class SitesWidget(QtWidgets.QWidget): class _SiteCombobox(QtWidgets.QWidget): input_label = None + value_changed = QtCore.Signal() def __init__(self, modules_manager, project_settings, parent): super(_SiteCombobox, self).__init__(parent) @@ -661,6 +669,7 @@ class _SiteCombobox(QtWidgets.QWidget): self._set_local_settings_value(self.current_text()) self._update_style() + self.value_changed.emit() def _set_local_settings_value(self, value): raise NotImplementedError( From 61076ae7dbdf50541f9b465e0cfdc18ebe0740ef Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 15 Dec 2022 19:45:29 +0800 Subject: [PATCH 2454/2550] fix the setAttr error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 5f1c582203..6f8925a083 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -166,13 +166,17 @@ class ImagePlaneLoader(load.LoaderPlugin): QtWidgets.QMessageBox.Cancel ) if reply == QtWidgets.QMessageBox.Ok: - expressions = image_plane_shape.frameExtension.inputs(type="expression") # noqa + # find the input and output of frame extension + expressions = image_plane_shape.frameExtension.inputs() + frame_ext_output = image_plane_shape.frameExtension.outputs() if expressions: - pm.delete(expressions) + # the "time1" node is non-deletable attr + # in Maya, use disconnectAttr instead + pm.disconnectAttr(expressions, frame_ext_output) if not image_plane_shape.frameExtension.isFreeToChange(): raise RuntimeError("Can't set frame extension for {}".format(image_plane_shape)) # noqa - + # get the node of time instead and set the time for it. image_plane_shape.frameExtension.set(start_frame) new_nodes.extend( From c07bcb2b213cefaf2ae621d60a243c486d6ca80a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 15 Dec 2022 12:51:43 +0000 Subject: [PATCH 2455/2550] Fix problem when updating or removing layout with non loaded objects --- openpype/hosts/blender/plugins/load/load_layout_blend.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index f95d17a17f..7d2fd23444 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -48,8 +48,14 @@ class BlendLayoutLoader(plugin.AssetLoader): bpy.data.objects.remove(obj) def _remove_asset_and_library(self, asset_group): + if not asset_group.get(AVALON_PROPERTY): + return + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') + if not libpath: + return + # Check how many assets use the same library count = 0 for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects: From 9d76f9d50c52e0208c2bdf3be797c38eeab14dda Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 15 Dec 2022 16:53:03 +0000 Subject: [PATCH 2456/2550] Implemented workfile importer --- .../blender/plugins/load/import_workfile.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 openpype/hosts/blender/plugins/load/import_workfile.py diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py new file mode 100644 index 0000000000..3b9edbc8c7 --- /dev/null +++ b/openpype/hosts/blender/plugins/load/import_workfile.py @@ -0,0 +1,38 @@ +from pathlib import Path + +import bpy + +from openpype.hosts.blender.api import plugin + + +class ImportBlendLoader(plugin.AssetLoader): + """Import action for Blender (unmanaged) + + Warning: + The loaded content will be unmanaged and is *not* visible in the + scene inventory. It's purely intended to merge content into your scene + so you could also use it as a new base. + + """ + + representations = ["blend"] + families = ["*"] + + label = "Import" + order = 10 + icon = "arrow-circle-down" + color = "#775555" + + def load(self, context, name=None, namespace=None, data=None): + scene = bpy.context.scene + + with bpy.data.libraries.load(self.fname) as (data_from, data_to): + for attr in dir(data_to): + setattr(data_to, attr, getattr(data_from, attr)) + + # Add objects to current scene + # for obj in data_to.objects: + # scene.collection.objects.link(obj) + + # We do not containerize imported content, it remains unmanaged + return From d935b9b689ddff657056cf92cabb5be9a5b0be3f Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 15 Dec 2022 16:57:59 +0000 Subject: [PATCH 2457/2550] Hound fixes --- openpype/hosts/blender/plugins/load/import_workfile.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py index 3b9edbc8c7..14c970c8d9 100644 --- a/openpype/hosts/blender/plugins/load/import_workfile.py +++ b/openpype/hosts/blender/plugins/load/import_workfile.py @@ -1,5 +1,3 @@ -from pathlib import Path - import bpy from openpype.hosts.blender.api import plugin @@ -24,13 +22,12 @@ class ImportBlendLoader(plugin.AssetLoader): color = "#775555" def load(self, context, name=None, namespace=None, data=None): - scene = bpy.context.scene - with bpy.data.libraries.load(self.fname) as (data_from, data_to): for attr in dir(data_to): setattr(data_to, attr, getattr(data_from, attr)) # Add objects to current scene + # scene = bpy.context.scene # for obj in data_to.objects: # scene.collection.objects.link(obj) From e807d7e275eb3d159b61fac28073b9b081e83c18 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 16 Dec 2022 18:10:56 +0800 Subject: [PATCH 2458/2550] Update load_image_plane.py set the frame offset to zero by default --- openpype/hosts/maya/plugins/load/load_image_plane.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 6f8925a083..86816495ae 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -133,10 +133,7 @@ class ImagePlaneLoader(load.LoaderPlugin): start_frame = pm.playbackOptions(q=True, min=True) end_frame = pm.playbackOptions(q=True, max=True) - if int(start_frame) > 0: - image_plane_shape.frameOffset.set(int(start_frame) - 1) - else: - image_plane_shape.frameOffset.set(int(start_frame)) + image_plane_shape.frameOffset.set(0) image_plane_shape.frameIn.set(start_frame) image_plane_shape.frameOut.set(end_frame) image_plane_shape.frameCache.set(end_frame) @@ -227,10 +224,8 @@ class ImagePlaneLoader(load.LoaderPlugin): ) start_frame = asset["data"]["frameStart"] end_frame = asset["data"]["frameEnd"] - if int(start_frame) > 0: - image_plane_shape.frameOffset.set(int(start_frame) - 1) - else: - image_plane_shape.frameOffset.set(int(start_frame)) + + image_plane_shape.frameOffset.set(0) image_plane_shape.frameIn.set(start_frame) image_plane_shape.frameOut.set(end_frame) image_plane_shape.frameCache.set(end_frame) From 2a49265ba7c23c965754d5ed1c5ee59bfd7e058f Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 16 Dec 2022 10:59:51 +0000 Subject: [PATCH 2459/2550] Renamed to "Append workfile" --- openpype/hosts/blender/plugins/load/import_workfile.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py index 14c970c8d9..244eefe936 100644 --- a/openpype/hosts/blender/plugins/load/import_workfile.py +++ b/openpype/hosts/blender/plugins/load/import_workfile.py @@ -3,20 +3,19 @@ import bpy from openpype.hosts.blender.api import plugin -class ImportBlendLoader(plugin.AssetLoader): - """Import action for Blender (unmanaged) +class AppendBlendLoader(plugin.AssetLoader): + """Append workfile in Blender (unmanaged) Warning: The loaded content will be unmanaged and is *not* visible in the scene inventory. It's purely intended to merge content into your scene so you could also use it as a new base. - """ representations = ["blend"] families = ["*"] - label = "Import" + label = "Append Workfile" order = 10 icon = "arrow-circle-down" color = "#775555" From c053762fedc29289d0e1d03fd0f0a012d2923c81 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 16 Dec 2022 11:17:34 +0000 Subject: [PATCH 2460/2550] Differentiate Append and Import loaders --- .../blender/plugins/load/import_workfile.py | 35 ++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py index 244eefe936..2849031f7e 100644 --- a/openpype/hosts/blender/plugins/load/import_workfile.py +++ b/openpype/hosts/blender/plugins/load/import_workfile.py @@ -16,7 +16,7 @@ class AppendBlendLoader(plugin.AssetLoader): families = ["*"] label = "Append Workfile" - order = 10 + order = 9 icon = "arrow-circle-down" color = "#775555" @@ -25,10 +25,37 @@ class AppendBlendLoader(plugin.AssetLoader): for attr in dir(data_to): setattr(data_to, attr, getattr(data_from, attr)) + # We do not containerize imported content, it remains unmanaged + return + +class ImportBlendLoader(plugin.AssetLoader): + """Import workfile in the current Blender scene (unmanaged) + + Warning: + The loaded content will be unmanaged and is *not* visible in the + scene inventory. It's purely intended to merge content into your scene + so you could also use it as a new base. + """ + + representations = ["blend"] + families = ["*"] + + label = "Import Workfile" + order = 9 + icon = "arrow-circle-down" + color = "#775555" + + def load(self, context, name=None, namespace=None, data=None): + with bpy.data.libraries.load(self.fname) as (data_from, data_to): + for attr in dir(data_to): + if attr == "scenes": + continue + setattr(data_to, attr, getattr(data_from, attr)) + # Add objects to current scene - # scene = bpy.context.scene - # for obj in data_to.objects: - # scene.collection.objects.link(obj) + scene = bpy.context.scene + for obj in data_to.objects: + scene.collection.objects.link(obj) # We do not containerize imported content, it remains unmanaged return From 38ec7d49debc81b8b4b1536448b7fbcaad44fa18 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 16 Dec 2022 13:34:58 +0100 Subject: [PATCH 2461/2550] changelog update --- CHANGELOG.md | 69 +++++++++++++++++++++++++++++++++++++++++++++++++++- HISTORY.md | 69 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 137 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3cca692b68..f9820dec45 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,73 @@ # Changelog +## [3.14.9](https://github.com/pypeclub/OpenPype/tree/3.14.9) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.8...3.14.9) + +### 📖 Documentation + +- Documentation: Testing on Deadline [\#4185](https://github.com/pypeclub/OpenPype/pull/4185) +- Consistent Python version [\#4160](https://github.com/pypeclub/OpenPype/pull/4160) + +**🆕 New features** + +- Feature/op 4397 gl tf extractor for maya [\#4192](https://github.com/pypeclub/OpenPype/pull/4192) +- Maya: Extractor for Unreal SkeletalMesh [\#4174](https://github.com/pypeclub/OpenPype/pull/4174) +- 3dsmax: integration [\#4168](https://github.com/pypeclub/OpenPype/pull/4168) +- Blender: Extract Alembic Animations [\#4128](https://github.com/pypeclub/OpenPype/pull/4128) +- Unreal: Load Alembic Animations [\#4127](https://github.com/pypeclub/OpenPype/pull/4127) + +**🚀 Enhancements** + +- Houdini: Use new interface class name for publish host [\#4220](https://github.com/pypeclub/OpenPype/pull/4220) +- General: Default command for headless mode is interactive [\#4203](https://github.com/pypeclub/OpenPype/pull/4203) +- Maya: Enhanced ASS publishing [\#4196](https://github.com/pypeclub/OpenPype/pull/4196) +- Feature/op 3924 implement ass extractor [\#4188](https://github.com/pypeclub/OpenPype/pull/4188) +- File transactions: Source path is destination path [\#4184](https://github.com/pypeclub/OpenPype/pull/4184) +- Deadline: improve environment processing [\#4182](https://github.com/pypeclub/OpenPype/pull/4182) +- General: Comment per instance in Publisher [\#4178](https://github.com/pypeclub/OpenPype/pull/4178) +- Ensure Mongo database directory exists in Windows. [\#4166](https://github.com/pypeclub/OpenPype/pull/4166) +- Note about unrestricted execution on Windows. [\#4161](https://github.com/pypeclub/OpenPype/pull/4161) +- Maya: Enable thumbnail transparency on extraction. [\#4147](https://github.com/pypeclub/OpenPype/pull/4147) +- Maya: Disable viewport Pan/Zoom on playblast extraction. [\#4146](https://github.com/pypeclub/OpenPype/pull/4146) +- Maya: Optional viewport refresh on pointcache extraction [\#4144](https://github.com/pypeclub/OpenPype/pull/4144) +- CelAction: refactory integration to current openpype [\#4140](https://github.com/pypeclub/OpenPype/pull/4140) +- Maya: create and publish bounding box geometry [\#4131](https://github.com/pypeclub/OpenPype/pull/4131) +- Changed the UOpenPypePublishInstance to use the UDataAsset class [\#4124](https://github.com/pypeclub/OpenPype/pull/4124) +- General: Collection Audio speed up [\#4110](https://github.com/pypeclub/OpenPype/pull/4110) +- Maya: keep existing AOVs when creating render instance [\#4087](https://github.com/pypeclub/OpenPype/pull/4087) +- General: Oiio conversion multipart fix [\#4060](https://github.com/pypeclub/OpenPype/pull/4060) + +**🐛 Bug fixes** + +- Publisher: Signal type issues in Python 2 DCCs [\#4230](https://github.com/pypeclub/OpenPype/pull/4230) +- Blender: Fix Layout Family Versioning [\#4228](https://github.com/pypeclub/OpenPype/pull/4228) +- Blender: Fix Create Camera "Use selection" [\#4226](https://github.com/pypeclub/OpenPype/pull/4226) +- TrayPublisher - join needs list [\#4224](https://github.com/pypeclub/OpenPype/pull/4224) +- General: Event callbacks pass event to callbacks as expected [\#4210](https://github.com/pypeclub/OpenPype/pull/4210) +- Build:Revert .toml update of Gazu [\#4207](https://github.com/pypeclub/OpenPype/pull/4207) +- Nuke: fixed imageio node overrides subset filter [\#4202](https://github.com/pypeclub/OpenPype/pull/4202) +- Maya: pointcache [\#4201](https://github.com/pypeclub/OpenPype/pull/4201) +- Unreal: Support for Unreal Engine 5.1 [\#4199](https://github.com/pypeclub/OpenPype/pull/4199) +- General: Integrate thumbnail looks for thumbnail to multiple places [\#4181](https://github.com/pypeclub/OpenPype/pull/4181) +- Various minor bugfixes [\#4172](https://github.com/pypeclub/OpenPype/pull/4172) +- Nuke/Hiero: Remove tkinter library paths before launch [\#4171](https://github.com/pypeclub/OpenPype/pull/4171) +- Flame: vertical alignment of layers [\#4169](https://github.com/pypeclub/OpenPype/pull/4169) +- Nuke: correct detection of viewer and display [\#4165](https://github.com/pypeclub/OpenPype/pull/4165) +- Settings UI: Don't create QApplication if already exists [\#4156](https://github.com/pypeclub/OpenPype/pull/4156) +- General: Extract review handle start offset of sequences [\#4152](https://github.com/pypeclub/OpenPype/pull/4152) +- Maya: Maintain time connections on Alembic update. [\#4143](https://github.com/pypeclub/OpenPype/pull/4143) + +**🔀 Refactored code** + +- General: Use qtpy in modules and hosts UIs which are running in OpenPype process [\#4225](https://github.com/pypeclub/OpenPype/pull/4225) +- Tools: Use qtpy instead of Qt in standalone tools [\#4223](https://github.com/pypeclub/OpenPype/pull/4223) +- General: Use qtpy in settings UI [\#4215](https://github.com/pypeclub/OpenPype/pull/4215) + +**Merged pull requests:** + +- layout publish more than one container issue [\#4098](https://github.com/pypeclub/OpenPype/pull/4098) + ## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) @@ -21,7 +89,6 @@ - Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) - General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) - ## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) diff --git a/HISTORY.md b/HISTORY.md index f4e132488b..f24e95b2e1 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,74 @@ # Changelog + +## [3.14.9](https://github.com/pypeclub/OpenPype/tree/3.14.9) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.8...3.14.9) + +### 📖 Documentation + +- Documentation: Testing on Deadline [\#4185](https://github.com/pypeclub/OpenPype/pull/4185) +- Consistent Python version [\#4160](https://github.com/pypeclub/OpenPype/pull/4160) + +**🆕 New features** + +- Feature/op 4397 gl tf extractor for maya [\#4192](https://github.com/pypeclub/OpenPype/pull/4192) +- Maya: Extractor for Unreal SkeletalMesh [\#4174](https://github.com/pypeclub/OpenPype/pull/4174) +- 3dsmax: integration [\#4168](https://github.com/pypeclub/OpenPype/pull/4168) +- Blender: Extract Alembic Animations [\#4128](https://github.com/pypeclub/OpenPype/pull/4128) +- Unreal: Load Alembic Animations [\#4127](https://github.com/pypeclub/OpenPype/pull/4127) + +**🚀 Enhancements** + +- Houdini: Use new interface class name for publish host [\#4220](https://github.com/pypeclub/OpenPype/pull/4220) +- General: Default command for headless mode is interactive [\#4203](https://github.com/pypeclub/OpenPype/pull/4203) +- Maya: Enhanced ASS publishing [\#4196](https://github.com/pypeclub/OpenPype/pull/4196) +- Feature/op 3924 implement ass extractor [\#4188](https://github.com/pypeclub/OpenPype/pull/4188) +- File transactions: Source path is destination path [\#4184](https://github.com/pypeclub/OpenPype/pull/4184) +- Deadline: improve environment processing [\#4182](https://github.com/pypeclub/OpenPype/pull/4182) +- General: Comment per instance in Publisher [\#4178](https://github.com/pypeclub/OpenPype/pull/4178) +- Ensure Mongo database directory exists in Windows. [\#4166](https://github.com/pypeclub/OpenPype/pull/4166) +- Note about unrestricted execution on Windows. [\#4161](https://github.com/pypeclub/OpenPype/pull/4161) +- Maya: Enable thumbnail transparency on extraction. [\#4147](https://github.com/pypeclub/OpenPype/pull/4147) +- Maya: Disable viewport Pan/Zoom on playblast extraction. [\#4146](https://github.com/pypeclub/OpenPype/pull/4146) +- Maya: Optional viewport refresh on pointcache extraction [\#4144](https://github.com/pypeclub/OpenPype/pull/4144) +- CelAction: refactory integration to current openpype [\#4140](https://github.com/pypeclub/OpenPype/pull/4140) +- Maya: create and publish bounding box geometry [\#4131](https://github.com/pypeclub/OpenPype/pull/4131) +- Changed the UOpenPypePublishInstance to use the UDataAsset class [\#4124](https://github.com/pypeclub/OpenPype/pull/4124) +- General: Collection Audio speed up [\#4110](https://github.com/pypeclub/OpenPype/pull/4110) +- Maya: keep existing AOVs when creating render instance [\#4087](https://github.com/pypeclub/OpenPype/pull/4087) +- General: Oiio conversion multipart fix [\#4060](https://github.com/pypeclub/OpenPype/pull/4060) + +**🐛 Bug fixes** + +- Publisher: Signal type issues in Python 2 DCCs [\#4230](https://github.com/pypeclub/OpenPype/pull/4230) +- Blender: Fix Layout Family Versioning [\#4228](https://github.com/pypeclub/OpenPype/pull/4228) +- Blender: Fix Create Camera "Use selection" [\#4226](https://github.com/pypeclub/OpenPype/pull/4226) +- TrayPublisher - join needs list [\#4224](https://github.com/pypeclub/OpenPype/pull/4224) +- General: Event callbacks pass event to callbacks as expected [\#4210](https://github.com/pypeclub/OpenPype/pull/4210) +- Build:Revert .toml update of Gazu [\#4207](https://github.com/pypeclub/OpenPype/pull/4207) +- Nuke: fixed imageio node overrides subset filter [\#4202](https://github.com/pypeclub/OpenPype/pull/4202) +- Maya: pointcache [\#4201](https://github.com/pypeclub/OpenPype/pull/4201) +- Unreal: Support for Unreal Engine 5.1 [\#4199](https://github.com/pypeclub/OpenPype/pull/4199) +- General: Integrate thumbnail looks for thumbnail to multiple places [\#4181](https://github.com/pypeclub/OpenPype/pull/4181) +- Various minor bugfixes [\#4172](https://github.com/pypeclub/OpenPype/pull/4172) +- Nuke/Hiero: Remove tkinter library paths before launch [\#4171](https://github.com/pypeclub/OpenPype/pull/4171) +- Flame: vertical alignment of layers [\#4169](https://github.com/pypeclub/OpenPype/pull/4169) +- Nuke: correct detection of viewer and display [\#4165](https://github.com/pypeclub/OpenPype/pull/4165) +- Settings UI: Don't create QApplication if already exists [\#4156](https://github.com/pypeclub/OpenPype/pull/4156) +- General: Extract review handle start offset of sequences [\#4152](https://github.com/pypeclub/OpenPype/pull/4152) +- Maya: Maintain time connections on Alembic update. [\#4143](https://github.com/pypeclub/OpenPype/pull/4143) + +**🔀 Refactored code** + +- General: Use qtpy in modules and hosts UIs which are running in OpenPype process [\#4225](https://github.com/pypeclub/OpenPype/pull/4225) +- Tools: Use qtpy instead of Qt in standalone tools [\#4223](https://github.com/pypeclub/OpenPype/pull/4223) +- General: Use qtpy in settings UI [\#4215](https://github.com/pypeclub/OpenPype/pull/4215) + +**Merged pull requests:** + +- layout publish more than one container issue [\#4098](https://github.com/pypeclub/OpenPype/pull/4098) + ## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) From 86d3636004e37554e3cfc505425cf0868e98a4bb Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 16 Dec 2022 12:45:18 +0000 Subject: [PATCH 2462/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 8d82df563d..021b816b8f 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.4" +__version__ = "3.14.9-nightly.5" From a1fe5359cc7f88b80142128d09bbcd38215f3e45 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 16 Dec 2022 12:51:43 +0000 Subject: [PATCH 2463/2550] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 021b816b8f..5b5b1475c0 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.5" +__version__ = "3.14.9" From 8ceafdbc367130e2bfb26e44f7f68e4d5a84acf1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 16 Dec 2022 15:42:48 +0100 Subject: [PATCH 2464/2550] flame: adding also `rgb` layer to default rename --- openpype/settings/defaults/project_settings/flame.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index 337e58ac62..1422a76af3 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -122,6 +122,7 @@ "clip_name_template": "{asset}_{subset}<_{output}>", "layer_rename_template": "{asset}_{subset}<_{output}>", "layer_rename_patterns": [ + "rgb", "rgba" ] }, @@ -149,6 +150,7 @@ "clip_name_template": "{batch}_{asset}_{subset}<_{output}>", "layer_rename_template": "{asset}_{subset}<_{output}>", "layer_rename_patterns": [ + "rgb", "rgba" ] } From a0c60afb286bb615b67aa3e0fa156767f3c167e5 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 16 Dec 2022 14:46:29 +0000 Subject: [PATCH 2465/2550] Objects imported are stored in collections for each scene --- .../blender/plugins/load/import_workfile.py | 35 +++++++++++++++---- 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py index 2849031f7e..ed24140a59 100644 --- a/openpype/hosts/blender/plugins/load/import_workfile.py +++ b/openpype/hosts/blender/plugins/load/import_workfile.py @@ -1,8 +1,20 @@ +from pathlib import Path + import bpy from openpype.hosts.blender.api import plugin +def get_unique_number(asset, subset): + count = 1 + name = f"{asset}_{count:0>2}_{subset}" + collection_names = [coll.name for coll in bpy.data.collections] + while name in collection_names: + count += 1 + name = f"{asset}_{count:0>2}_{subset}" + return f"{count:0>2}" + + class AppendBlendLoader(plugin.AssetLoader): """Append workfile in Blender (unmanaged) @@ -28,6 +40,7 @@ class AppendBlendLoader(plugin.AssetLoader): # We do not containerize imported content, it remains unmanaged return + class ImportBlendLoader(plugin.AssetLoader): """Import workfile in the current Blender scene (unmanaged) @@ -46,16 +59,26 @@ class ImportBlendLoader(plugin.AssetLoader): color = "#775555" def load(self, context, name=None, namespace=None, data=None): + asset = context['asset']['name'] + subset = context['subset']['name'] + + unique_number = get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) + with bpy.data.libraries.load(self.fname) as (data_from, data_to): for attr in dir(data_to): - if attr == "scenes": - continue setattr(data_to, attr, getattr(data_from, attr)) - # Add objects to current scene - scene = bpy.context.scene - for obj in data_to.objects: - scene.collection.objects.link(obj) + current_scene = bpy.context.scene + + for scene in data_to.scenes: + # scene.name = group_name + collection = bpy.data.collections.new(name=group_name) + for obj in scene.objects: + collection.objects.link(obj) + current_scene.collection.children.link(collection) + for coll in scene.collection.children: + collection.children.link(coll) # We do not containerize imported content, it remains unmanaged return From f94a4195eb63662cdd70f17796c4c31f1265922b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 16 Dec 2022 14:47:28 +0000 Subject: [PATCH 2466/2550] The original names of the scenes are now preserved --- .../hosts/blender/plugins/load/import_workfile.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py index ed24140a59..92f5ba168c 100644 --- a/openpype/hosts/blender/plugins/load/import_workfile.py +++ b/openpype/hosts/blender/plugins/load/import_workfile.py @@ -65,15 +65,22 @@ class ImportBlendLoader(plugin.AssetLoader): unique_number = get_unique_number(asset, subset) group_name = plugin.asset_name(asset, subset, unique_number) + # We need to preserve the original names of the scenes, otherwise, + # if there are duplicate names in the current workfile, the imported + # scenes will be renamed by Blender to avoid conflicts. + original_scene_names = [] + with bpy.data.libraries.load(self.fname) as (data_from, data_to): for attr in dir(data_to): + if attr == "scenes": + for scene in data_from.scenes: + original_scene_names.append(scene) setattr(data_to, attr, getattr(data_from, attr)) current_scene = bpy.context.scene - for scene in data_to.scenes: - # scene.name = group_name - collection = bpy.data.collections.new(name=group_name) + for scene, s_name in zip(data_to.scenes, original_scene_names): + collection = bpy.data.collections.new(f"{group_name}_{s_name}") for obj in scene.objects: collection.objects.link(obj) current_scene.collection.children.link(collection) From 9d57af05615ccf93943600f5af6fc1e8dfaad983 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 16 Dec 2022 14:59:39 +0000 Subject: [PATCH 2467/2550] Scenes are renamed to identify from where they have been loaded --- .../blender/plugins/load/import_workfile.py | 71 ++++++++----------- 1 file changed, 31 insertions(+), 40 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py index 92f5ba168c..618fb83e31 100644 --- a/openpype/hosts/blender/plugins/load/import_workfile.py +++ b/openpype/hosts/blender/plugins/load/import_workfile.py @@ -1,18 +1,37 @@ -from pathlib import Path - import bpy from openpype.hosts.blender.api import plugin -def get_unique_number(asset, subset): - count = 1 - name = f"{asset}_{count:0>2}_{subset}" - collection_names = [coll.name for coll in bpy.data.collections] - while name in collection_names: - count += 1 - name = f"{asset}_{count:0>2}_{subset}" - return f"{count:0>2}" +def append_workfile(context, fname, do_import): + asset = context['asset']['name'] + subset = context['subset']['name'] + + group_name = plugin.asset_name(asset, subset) + + # We need to preserve the original names of the scenes, otherwise, + # if there are duplicate names in the current workfile, the imported + # scenes will be renamed by Blender to avoid conflicts. + original_scene_names = [] + + with bpy.data.libraries.load(fname) as (data_from, data_to): + for attr in dir(data_to): + if attr == "scenes": + for scene in data_from.scenes: + original_scene_names.append(scene) + setattr(data_to, attr, getattr(data_from, attr)) + + current_scene = bpy.context.scene + + for scene, s_name in zip(data_to.scenes, original_scene_names): + scene.name = f"{group_name}_{s_name}" + if do_import: + collection = bpy.data.collections.new(f"{group_name}_{s_name}") + for obj in scene.objects: + collection.objects.link(obj) + current_scene.collection.children.link(collection) + for coll in scene.collection.children: + collection.children.link(coll) class AppendBlendLoader(plugin.AssetLoader): @@ -33,9 +52,7 @@ class AppendBlendLoader(plugin.AssetLoader): color = "#775555" def load(self, context, name=None, namespace=None, data=None): - with bpy.data.libraries.load(self.fname) as (data_from, data_to): - for attr in dir(data_to): - setattr(data_to, attr, getattr(data_from, attr)) + append_workfile(context, self.fname, False) # We do not containerize imported content, it remains unmanaged return @@ -59,33 +76,7 @@ class ImportBlendLoader(plugin.AssetLoader): color = "#775555" def load(self, context, name=None, namespace=None, data=None): - asset = context['asset']['name'] - subset = context['subset']['name'] - - unique_number = get_unique_number(asset, subset) - group_name = plugin.asset_name(asset, subset, unique_number) - - # We need to preserve the original names of the scenes, otherwise, - # if there are duplicate names in the current workfile, the imported - # scenes will be renamed by Blender to avoid conflicts. - original_scene_names = [] - - with bpy.data.libraries.load(self.fname) as (data_from, data_to): - for attr in dir(data_to): - if attr == "scenes": - for scene in data_from.scenes: - original_scene_names.append(scene) - setattr(data_to, attr, getattr(data_from, attr)) - - current_scene = bpy.context.scene - - for scene, s_name in zip(data_to.scenes, original_scene_names): - collection = bpy.data.collections.new(f"{group_name}_{s_name}") - for obj in scene.objects: - collection.objects.link(obj) - current_scene.collection.children.link(collection) - for coll in scene.collection.children: - collection.children.link(coll) + append_workfile(context, self.fname, True) # We do not containerize imported content, it remains unmanaged return From e0a2f2370256212f2ea78f282a9dc3eeb156b362 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Dec 2022 16:35:14 +0100 Subject: [PATCH 2468/2550] :bug: fix node access --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_active_state.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_usd_layers.py | 3 ++- .../hosts/houdini/plugins/publish/extract_redshift_proxy.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_usd.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_usd_layered.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py | 2 +- .../houdini/plugins/publish/validate_animation_settings.py | 3 +-- openpype/hosts/houdini/plugins/publish/validate_bypass.py | 2 +- .../hosts/houdini/plugins/publish/validate_cop_output_node.py | 2 +- openpype/hosts/houdini/plugins/publish/validate_frame_token.py | 3 +-- openpype/hosts/houdini/plugins/publish/validate_no_errors.py | 2 +- .../plugins/publish/validate_usd_layer_path_backslashes.py | 2 +- .../houdini/plugins/publish/validate_usd_model_and_shade.py | 2 +- .../hosts/houdini/plugins/publish/validate_usd_output_node.py | 2 +- .../hosts/houdini/plugins/publish/validate_usd_setdress.py | 2 +- .../houdini/plugins/publish/validate_usd_shade_workspace.py | 2 +- .../hosts/houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 19 files changed, 20 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 4bed83c2e9..5f95b2efb4 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -34,7 +34,7 @@ class CreateHDA(plugin.HoudiniCreator): } return subset_name.lower() in existing_subset_names_low - def _create_instance_node( + def create_instance_node( self, node_name, parent, node_type="geometry"): import hou diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index cc3f2e7fae..7fda94b288 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -25,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = hou.node(instance.get("instance_node")) + node = hou.node(instance.data.get("instance_node")) if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 346bdf3421..f1d73d7523 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = hou.node(instance.get("instance_node")) + rop = hou.node(instance.data.get("instance_node")) # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index 833add854b..696560a590 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -21,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = hou.node(instance.get("instance_node")) + rop_node = hou.node(instance.data["instance_node"]) save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -56,6 +56,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] + layer_inst.data["instance_node"] = instance.data["instance_node"] # include same USD ROP layer_inst.append(rop_node) # include layer data diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index 29ede98a52..1d99ac665c 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -17,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): - ropnode = hou.node(instance.get("instance_node")) + ropnode = hou.node(instance.data.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index cbeb5add71..61c1b477b2 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -18,7 +18,7 @@ class ExtractUSD(publish.Extractor): def process(self, instance): - ropnode = hou.node(instance.get("instance_node")) + ropnode = hou.node(instance.data.get("instance_node")) # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 0288b7363a..8422a3bc3e 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = hou.node(instance.get("instance_node")) + node = hou.node(instance.data["instance_node"]) # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 434d6a2160..4bca758f08 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -17,7 +17,7 @@ class ExtractVDBCache(publish.Extractor): def process(self, instance): - ropnode = hou.node(instance.get("instance_node")) + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index f11f9c0c62..4878738ed3 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -37,8 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = hou.node(instance.get("instance_node")) - + node = hou.node(instance.data["instance_node"]) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") if frame_range == 0: diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 1bf51a986c..c10c5a2c05 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -37,6 +37,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - rop = hou.node(instance.get("instance_node")) + rop = hou.node(instance.data["instance_node"]) if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 1d0377c818..1fc767b309 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -48,7 +48,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): ) if output_node is None: - node = hou.node(instance.get("instance_node")) + node = hou.node(instance.data.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index b5f6ba71e1..06d4003295 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -37,8 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = hou.node(instance.get("instance_node")) - + node = hou.node(instance.data["instance_node"]) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") if frame_range == 0: diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index f7c95aaf4e..6c48eae70a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -38,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(hou.node(instance.get("instance_node"))) + validate_nodes.append(hou.node(instance.data.get("instance_node"))) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index a0e2302495..f2c7878c4e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -28,7 +28,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = hou.node(instance.get("instance_node")) + rop = hou.node(instance.data.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index a55eb70cb2..b8faae16d7 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -40,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = hou.node(instance.get("instance_node")) + rop = hou.node(instance.data.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index af21efcafc..5cb5bd35fb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -36,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = hou.node(instance.get("instance_node")) + node = hou.node(instance.data.get("instance_node")) cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 01ebc0e828..b96d185482 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -24,7 +24,7 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): from pxr import UsdGeom import hou - rop = hou.node(instance.get("instance_node")) + rop = hou.node(instance.data.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index bd3366a424..cb2099437d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -20,7 +20,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): def process(self, instance): - rop = hou.node(instance.get("instance_node")) + rop = hou.node(instance.data.get("instance_node")) workspace = rop.parent() definition = workspace.type().definition() diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 61c1209fc9..f9f88b3bf9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -38,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance.get("instance_node") + "ROP node '%s'." % instance.data.get("instance_node") ) return [instance] From c58cd1fb792fc27187fc58fb93b9eebe368c99d3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 16 Dec 2022 17:35:15 +0100 Subject: [PATCH 2469/2550] use qtpy in flame host --- openpype/hosts/flame/api/menu.py | 2 +- openpype/hosts/flame/api/plugin.py | 2 +- .../flame/startup/openpype_babypublisher/modules/panel_app.py | 2 +- .../flame/startup/openpype_babypublisher/modules/uiwidgets.py | 2 +- openpype/hosts/flame/startup/openpype_in_flame.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index 319ed7afb6..5f9dc57a61 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -1,5 +1,5 @@ import os -from Qt import QtWidgets +from qtpy import QtWidgets from copy import deepcopy from pprint import pformat from openpype.tools.utils.host_tools import HostToolsHelper diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index ca113fd98a..b53ce758f9 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -5,7 +5,7 @@ from copy import deepcopy from xml.etree import ElementTree as ET import qargparse -from Qt import QtCore, QtWidgets +from qtpy import QtCore, QtWidgets from openpype import style from openpype.lib import Logger diff --git a/openpype/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py b/openpype/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py index 1e8011efaa..5c5bb0b4a1 100644 --- a/openpype/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py +++ b/openpype/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import uiwidgets import app_utils diff --git a/openpype/hosts/flame/startup/openpype_babypublisher/modules/uiwidgets.py b/openpype/hosts/flame/startup/openpype_babypublisher/modules/uiwidgets.py index c6db875df0..5498a49197 100644 --- a/openpype/hosts/flame/startup/openpype_babypublisher/modules/uiwidgets.py +++ b/openpype/hosts/flame/startup/openpype_babypublisher/modules/uiwidgets.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore class FlameLabel(QtWidgets.QLabel): diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index d07aaa6b7d..39869333aa 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -1,6 +1,6 @@ from __future__ import print_function import sys -from Qt import QtWidgets +from qtpy import QtWidgets from pprint import pformat import atexit From b60ccfaf01dd1bb2ebf964afe33c435016956650 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 16 Dec 2022 17:44:39 +0100 Subject: [PATCH 2470/2550] use qtpy in nuke host --- openpype/hosts/nuke/api/lib.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 7ee30bf273..a066bbcdcf 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -10,7 +10,7 @@ from collections import OrderedDict import clique import nuke -from Qt import QtCore, QtWidgets +from qtpy import QtCore, QtWidgets from openpype.client import ( get_project, @@ -81,7 +81,6 @@ class Context: def get_main_window(): """Acquire Nuke's main window""" if Context.main_window is None: - from Qt import QtWidgets top_widgets = QtWidgets.QApplication.topLevelWidgets() name = "Foundry::UI::DockMainWindow" From 18c0a7fb3300d4e95651941b15eb4965b43e48f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 16 Dec 2022 17:49:28 +0100 Subject: [PATCH 2471/2550] use qtpy in hiero host implementation --- openpype/hosts/hiero/api/launchforhiero.py | 2 +- openpype/hosts/hiero/api/lib.py | 2 +- openpype/hosts/hiero/api/menu.py | 2 +- openpype/hosts/hiero/api/plugin.py | 2 +- openpype/hosts/hiero/plugins/publish/precollect_workfile.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/hiero/api/launchforhiero.py b/openpype/hosts/hiero/api/launchforhiero.py index 5f7dbe23c9..c2186e1d2a 100644 --- a/openpype/hosts/hiero/api/launchforhiero.py +++ b/openpype/hosts/hiero/api/launchforhiero.py @@ -1,7 +1,7 @@ import logging from scriptsmenu import scriptsmenu -from Qt import QtWidgets +from qtpy import QtWidgets log = logging.getLogger(__name__) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 7f0cf8149a..c344b35718 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -15,7 +15,7 @@ import secrets import shutil import hiero -from Qt import QtWidgets, QtCore, QtXml +from qtpy import QtWidgets, QtCore, QtXml from openpype.client import get_project from openpype.settings import get_project_settings diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py index 2a7560c6ba..6baeb38cc0 100644 --- a/openpype/hosts/hiero/api/menu.py +++ b/openpype/hosts/hiero/api/menu.py @@ -43,7 +43,7 @@ def menu_install(): """ - from Qt import QtGui + from qtpy import QtGui from . import ( publish, launch_workfiles_app, reload_config, apply_colorspace_project, apply_colorspace_clips diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index 5ec1c78aaa..38933a1e30 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -5,7 +5,7 @@ from copy import deepcopy import hiero -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qargparse from openpype.settings import get_current_project_settings diff --git a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py index c9bfb86810..08963f98fd 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py @@ -3,7 +3,7 @@ import tempfile from pprint import pformat import pyblish.api -from Qt.QtGui import QPixmap +from qtpy.QtGui import QPixmap import hiero.ui From 48d4f611f7cd60e0115e77329b464285e325431b Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 17 Dec 2022 03:27:33 +0000 Subject: [PATCH 2472/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 5b5b1475c0..454d56a5b7 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9" +__version__ = "3.14.10-nightly.1" From b22d22a65ecb7c8e750427ef4ac00fa1eaa80f6d Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 19 Dec 2022 00:03:05 +0100 Subject: [PATCH 2473/2550] :bug: fix dependencies --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3398e1725e..a92b996327 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -183,7 +183,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Adding file dependencies. if self.asset_dependencies: dependencies = instance.context.data["fileDependencies"] - dependencies.append(context.data["currentFile"]) for dependency in dependencies: job_info.AssetDependency += dependency @@ -294,7 +293,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add export job as dependency -------------------------------------- if export_job: job_info, _ = payload - job_info.JobDependency = export_job + job_info.JobDependencies = export_job if instance.data.get("tileRendering"): # Prepare tiles data @@ -431,7 +430,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): frame_assembly_job_info.ExtraInfo[0] = file_hash frame_assembly_job_info.ExtraInfo[1] = file - frame_assembly_job_info.JobDependency = tile_job_id + frame_assembly_job_info.JobDependencies = tile_job_id # write assembly job config files now = datetime.now() From 4c214cabe4890dcc2a9fd5f1f8fd4d4e5822cb4e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 19 Dec 2022 21:47:21 +0100 Subject: [PATCH 2474/2550] fix import of QtXml --- openpype/hosts/hiero/api/lib.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index c344b35718..bbd1edc14a 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -15,7 +15,11 @@ import secrets import shutil import hiero -from qtpy import QtWidgets, QtCore, QtXml +from qtpy import QtWidgets, QtCore +try: + from PySide import QtXml +except ImportError: + from PySide2 import QtXml from openpype.client import get_project from openpype.settings import get_project_settings From d439eec880c2703aba86b5500d28757a9bf00f25 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 18:36:27 +0800 Subject: [PATCH 2475/2550] maya image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 86816495ae..664cd06fc1 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -72,6 +72,8 @@ class CameraWindow(QtWidgets.QDialog): def on_cancel_pressed(self): self.camera = None + if self.camera: + return self.close() From 8055b793ee7bd1d90466bb0cd3f6334779ba1a8c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 18:45:14 +0800 Subject: [PATCH 2476/2550] maya image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 664cd06fc1..ce752f170f 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -71,8 +71,7 @@ class CameraWindow(QtWidgets.QDialog): self.close() def on_cancel_pressed(self): - self.camera = None - if self.camera: + if self.camera is not None: return self.close() From 9173199896975fe5d2a20e8ec99019fe1987140b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 18:46:06 +0800 Subject: [PATCH 2477/2550] maya image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index ce752f170f..081c646298 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -71,7 +71,7 @@ class CameraWindow(QtWidgets.QDialog): self.close() def on_cancel_pressed(self): - if self.camera is not None: + if self.camera: return self.close() From cd9f14a21d999a50e01d2ed9ce0f744a17af829c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 19:09:17 +0800 Subject: [PATCH 2478/2550] maya image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 081c646298..c18cb5c6c8 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -57,7 +57,7 @@ class CameraWindow(QtWidgets.QDialog): layout.addWidget(self.widgets["warning"]) self.widgets["okButton"].pressed.connect(self.on_ok_pressed) - self.widgets["cancelButton"].clicked.connect(self.on_cancel_pressed) + self.widgets["cancelButton"].pressed.connect(self.on_cancel_pressed) self.widgets["list"].itemPressed.connect(self.on_list_itemPressed) def on_list_itemPressed(self, item): @@ -71,8 +71,6 @@ class CameraWindow(QtWidgets.QDialog): self.close() def on_cancel_pressed(self): - if self.camera: - return self.close() From fac9dab4f8d099af832cdb96a107a5a1b8510e22 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Dec 2022 12:40:52 +0100 Subject: [PATCH 2479/2550] implement __ne__ in CreatorType for python 2 dccs --- openpype/tools/publisher/control.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 615f3eb8d9..50a814de5c 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -788,6 +788,10 @@ class CreatorType: def __eq__(self, other): return self.name == str(other) + def __ne__(self, other): + # This is implemented only because of Python 2 + return not self == other + class CreatorTypes: base = CreatorType("base") From 32f87d6324702dc24b86123d97bef40ec353a0de Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 19:47:07 +0800 Subject: [PATCH 2480/2550] maya image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index c18cb5c6c8..7d5d4cb12b 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -71,8 +71,10 @@ class CameraWindow(QtWidgets.QDialog): self.close() def on_cancel_pressed(self): - self.close() + if self.camera is None: + return + self.close() class ImagePlaneLoader(load.LoaderPlugin): """Specific loader of plate for image planes on selected camera.""" From ccc25796971a1c75bdda6037d75921d28ad9e2fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Dec 2022 13:39:27 +0100 Subject: [PATCH 2481/2550] use qtpy in maya implementation instead of Qt --- openpype/hosts/maya/api/commands.py | 2 +- openpype/hosts/maya/api/lib.py | 4 ++-- openpype/hosts/maya/api/menu.py | 2 +- openpype/hosts/maya/api/pipeline.py | 2 +- openpype/hosts/maya/api/shader_definition_editor.py | 2 +- openpype/hosts/maya/plugins/load/actions.py | 2 +- openpype/hosts/maya/plugins/load/load_image_plane.py | 2 +- openpype/hosts/maya/plugins/load/load_look.py | 2 +- .../maya/plugins/publish/validate_assembly_transforms.py | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/api/commands.py b/openpype/hosts/maya/api/commands.py index 355edf3ae4..4a36406632 100644 --- a/openpype/hosts/maya/api/commands.py +++ b/openpype/hosts/maya/api/commands.py @@ -39,7 +39,7 @@ class ToolWindows: def edit_shader_definitions(): - from Qt import QtWidgets + from qtpy import QtWidgets from openpype.hosts.maya.api.shader_definition_editor import ( ShaderDefinitionsEditor ) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index ca826946f8..dd5da275e8 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -116,7 +116,7 @@ RENDERLIKE_INSTANCE_FAMILIES = ["rendering", "vrayscene"] def get_main_window(): """Acquire Maya's main window""" - from Qt import QtWidgets + from qtpy import QtWidgets if self._parent is None: self._parent = { @@ -3018,7 +3018,7 @@ def update_content_on_context_change(): def show_message(title, msg): - from Qt import QtWidgets + from qtpy import QtWidgets from openpype.widgets import message_window # Find maya main window diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index e20f29049b..67109e9958 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -1,7 +1,7 @@ import os import logging -from Qt import QtWidgets, QtGui +from qtpy import QtWidgets, QtGui import maya.utils import maya.cmds as cmds diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index b3bf738a2b..3798170671 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -563,7 +563,7 @@ def on_save(): def on_open(): """On scene open let's assume the containers have changed.""" - from Qt import QtWidgets + from qtpy import QtWidgets from openpype.widgets import popup cmds.evalDeferred( diff --git a/openpype/hosts/maya/api/shader_definition_editor.py b/openpype/hosts/maya/api/shader_definition_editor.py index 6ea5e1a127..6edafb1f35 100644 --- a/openpype/hosts/maya/api/shader_definition_editor.py +++ b/openpype/hosts/maya/api/shader_definition_editor.py @@ -5,7 +5,7 @@ Shader names are stored as simple text file over GridFS in mongodb. """ import os -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.client.mongo import OpenPypeMongoConnection from openpype import resources import gridfs diff --git a/openpype/hosts/maya/plugins/load/actions.py b/openpype/hosts/maya/plugins/load/actions.py index 9cc9180d6e..98c8192294 100644 --- a/openpype/hosts/maya/plugins/load/actions.py +++ b/openpype/hosts/maya/plugins/load/actions.py @@ -153,7 +153,7 @@ class ImportMayaLoader(load.LoaderPlugin): """ - from Qt import QtWidgets + from qtpy import QtWidgets accept = QtWidgets.QMessageBox.Ok buttons = accept | QtWidgets.QMessageBox.Cancel diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index b267921bdc..c857c9254c 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.client import ( get_asset_by_id, diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py index 3ef19ad96f..8f3e017658 100644 --- a/openpype/hosts/maya/plugins/load/load_look.py +++ b/openpype/hosts/maya/plugins/load/load_look.py @@ -3,7 +3,7 @@ import json from collections import defaultdict -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.client import get_representation_by_name from openpype.pipeline import ( diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py index 3f2c59b95b..e8087a304f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_transforms.py @@ -89,7 +89,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): """ - from Qt import QtWidgets + from qtpy import QtWidgets from openpype.hosts.maya.api import lib # Store namespace in variable, cosmetics thingy From cb5cbaed7d7822730deeae5baf473761ae547857 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Dec 2022 13:43:28 +0100 Subject: [PATCH 2482/2550] use qtpy in max host --- openpype/hosts/max/api/menu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py index d1913c51e0..02d8315af6 100644 --- a/openpype/hosts/max/api/menu.py +++ b/openpype/hosts/max/api/menu.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """3dsmax menu definition of OpenPype.""" -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from pymxs import runtime as rt from openpype.tools.utils import host_tools From 64d1d67c46a94aa64184443e474827bff8facd97 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Dec 2022 13:46:04 +0100 Subject: [PATCH 2483/2550] use qtpy in houdini host --- openpype/hosts/houdini/api/usd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index 4f4a3d8e6f..b935dfdf30 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -3,7 +3,7 @@ import contextlib import logging -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype.client import get_asset_by_name From 20b85799e2f1c219254a42be2232398d8650351c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 21:16:19 +0800 Subject: [PATCH 2484/2550] maya image plane load error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 7d5d4cb12b..5075e4b8fe 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -71,6 +71,7 @@ class CameraWindow(QtWidgets.QDialog): self.close() def on_cancel_pressed(self): + self.camera = None if self.camera is None: return From 6a4261d81a001874f4ac26f72514a70f3a34dea6 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 21:58:50 +0800 Subject: [PATCH 2485/2550] maya load image plane error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 5075e4b8fe..2bfe6cb766 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -71,12 +71,10 @@ class CameraWindow(QtWidgets.QDialog): self.close() def on_cancel_pressed(self): - self.camera = None - if self.camera is None: + if self.camera or self.camera is None: + self.close() return - self.close() - class ImagePlaneLoader(load.LoaderPlugin): """Specific loader of plate for image planes on selected camera.""" From c405281c30b5d2321359ae1d9f5621aec4177ec2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 20 Dec 2022 15:08:23 +0100 Subject: [PATCH 2486/2550] fix positioning of nice checkbox for python 2 --- openpype/widgets/nice_checkbox.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/widgets/nice_checkbox.py b/openpype/widgets/nice_checkbox.py index 334a5d197b..6952cb41da 100644 --- a/openpype/widgets/nice_checkbox.py +++ b/openpype/widgets/nice_checkbox.py @@ -328,7 +328,7 @@ class NiceCheckbox(QtWidgets.QFrame): offset_ratio ) - margins_ratio = self._checker_margins_divider + margins_ratio = float(self._checker_margins_divider) if margins_ratio > 0: size_without_margins = int( (float(frame_rect.height()) / margins_ratio) @@ -351,9 +351,9 @@ class NiceCheckbox(QtWidgets.QFrame): ) if checkbox_rect.width() > checkbox_rect.height(): - radius = floor(checkbox_rect.height() / 2) + radius = floor(checkbox_rect.height() * 0.5) else: - radius = floor(checkbox_rect.width() / 2) + radius = floor(checkbox_rect.width() * 0.5) painter.setPen(QtCore.Qt.transparent) painter.setBrush(bg_color) @@ -369,7 +369,7 @@ class NiceCheckbox(QtWidgets.QFrame): if self._current_step == 0: x_offset = 0 else: - x_offset = (area_width / self._steps) * self._current_step + x_offset = (float(area_width) / self._steps) * self._current_step pos_x = checkbox_rect.x() + x_offset + margin_size_c pos_y = checkbox_rect.y() + margin_size_c From 0b8b2eb03a03eb25bb399d0b4d25c75b154792ca Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 20 Dec 2022 22:33:04 +0800 Subject: [PATCH 2487/2550] maya load image plane error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 2bfe6cb766..9eb4c7f561 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -71,9 +71,8 @@ class CameraWindow(QtWidgets.QDialog): self.close() def on_cancel_pressed(self): - if self.camera or self.camera is None: - self.close() - return + self.camera = None + self.close() class ImagePlaneLoader(load.LoaderPlugin): """Specific loader of plate for image planes on selected camera.""" @@ -109,7 +108,10 @@ class ImagePlaneLoader(load.LoaderPlugin): camera_names["Create new camera."] = "create_camera" window = CameraWindow(camera_names.keys()) window.exec_() - camera = camera_names[window.camera] + try: + camera = camera_names[window.camera] + except KeyError: + pass if camera == "create_camera": camera = pm.createNode("camera") From c597a12601415fb6cf90f680606915b8dc2b3bcb Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 21 Dec 2022 00:14:32 +0800 Subject: [PATCH 2488/2550] maya-image-plane-load-error --- openpype/hosts/maya/plugins/load/load_image_plane.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 9eb4c7f561..f77b755050 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -108,10 +108,10 @@ class ImagePlaneLoader(load.LoaderPlugin): camera_names["Create new camera."] = "create_camera" window = CameraWindow(camera_names.keys()) window.exec_() - try: - camera = camera_names[window.camera] - except KeyError: - pass + # Skip if no camera was selected (Dialog was closed) + if window.camera not in camera_names: + return + camera = camera_names[window.camera] if camera == "create_camera": camera = pm.createNode("camera") From 050df60c139021d1f0b15230ea90ca8a3f34bb76 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 21 Dec 2022 03:28:04 +0000 Subject: [PATCH 2489/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 454d56a5b7..904579ad55 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.1" +__version__ = "3.14.10-nightly.2" From 34f2c15d2dc2fa688026c46d1a8d72164321ee7e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 21 Dec 2022 16:28:50 +0100 Subject: [PATCH 2490/2550] hiero: creator gui with min max --- openpype/hosts/hiero/api/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index 38933a1e30..07457db1a4 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -276,8 +276,8 @@ class CreatorWidget(QtWidgets.QDialog): elif v["type"] == "QSpinBox": data[k]["value"] = self.create_row( content_layout, "QSpinBox", v["label"], - setRange=(1, 9999999), setValue=v["value"], - setToolTip=tool_tip) + setValue=v["value"], setMinimum=0, + setMaximum=100000, setToolTip=tool_tip) return data From a15d41e00b8956f10bda95f2a84d55f3b0081e16 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Dec 2022 11:19:47 +0100 Subject: [PATCH 2491/2550] OP-4512 - add 'local' to default sites It seems that local label could come too. --- openpype/pipeline/anatomy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 5b4eb67247..491ea48413 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -61,7 +61,7 @@ class BaseAnatomy(object): project_name = project_doc["name"] self.project_name = project_name - if site_name not in ["studio", get_local_site_id()]: + if site_name not in ["studio", "local", get_local_site_id()]: raise RuntimeError("Anatomy could be created only for default " "local sites") From 56cdcf3445bf49f97b86db5fade7833c2bb45c17 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Dec 2022 11:21:15 +0100 Subject: [PATCH 2492/2550] OP-4512 - added better logging --- openpype/pipeline/anatomy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 491ea48413..a6750ede7c 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -63,7 +63,7 @@ class BaseAnatomy(object): if site_name not in ["studio", "local", get_local_site_id()]: raise RuntimeError("Anatomy could be created only for default " - "local sites") + "local sites not for {}".format(site_name)) self._site_name = site_name From 95b15a9f00bbcd5f7bd0e241e3818ee6b6d64c9d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:44:35 +0100 Subject: [PATCH 2493/2550] global,nuke,maya: on demand placeholder removal preset attribute --- .../maya/api/workfile_template_builder.py | 2 +- .../nuke/api/workfile_template_builder.py | 15 ++++-- .../workfile/workfile_template_builder.py | 49 +++++++++++++++---- .../schema_templated_workfile_build.json | 11 ++++- 4 files changed, 62 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index ef043ed0f4..1d3f1cf568 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -28,7 +28,7 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): Args: path (str): A path to current template (usually given by - get_template_path implementation) + get_template_preset implementation) Returns: bool: Wether the template was succesfully imported or not diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 7a2e442e32..60bf906fbe 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -40,7 +40,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder): Args: path (str): A path to current template (usually given by - get_template_path implementation) + get_template_preset implementation) Returns: bool: Wether the template was succesfully imported or not @@ -273,6 +273,15 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): placeholder.data["nb_children"] += 1 reset_selection() + + # remove placeholders marked as delete + if ( + placeholder.data.get("delete") + and not placeholder.data.get("keep_placeholder") + ): + self.log.debug("Deleting node: {}".format(placeholder_node.name())) + nuke.delete(placeholder_node) + # go back to root group nuke.root().begin() @@ -454,12 +463,12 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): ) for node in placeholder_node.dependent(): for idx in range(node.inputs()): - if node.input(idx) == placeholder_node: + if node.input(idx) == placeholder_node and output_node: node.setInput(idx, output_node) for node in placeholder_node.dependencies(): for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node: + if placeholder_node.input(idx) == node and input_node: input_node.setInput(0, node) def _create_sib_copies(self, placeholder): diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 582657c735..f6a4ab51cb 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -401,7 +401,12 @@ class AbstractTemplateBuilder(object): key=lambda i: i.order )) - def build_template(self, template_path=None, level_limit=None): + def build_template( + self, + template_path=None, + level_limit=None, + keep_placeholders=None + ): """Main callback for building workfile from template path. Todo: @@ -410,16 +415,22 @@ class AbstractTemplateBuilder(object): Args: template_path (str): Path to a template file with placeholders. - Template from settings 'get_template_path' used when not + Template from settings 'get_template_preset' used when not passed. level_limit (int): Limit of populate loops. Related to 'populate_scene_placeholders' method. """ + template_preset = self.get_template_preset() if template_path is None: - template_path = self.get_template_path() + template_path = template_preset["path"] + + if keep_placeholders is None: + keep_placeholders = template_preset["placeholder_keep"] + self.import_template(template_path) - self.populate_scene_placeholders(level_limit) + self.populate_scene_placeholders( + level_limit, keep_placeholders) def rebuild_template(self): """Go through existing placeholders in scene and update them. @@ -489,7 +500,9 @@ class AbstractTemplateBuilder(object): plugin = plugins_by_identifier[identifier] plugin.prepare_placeholders(placeholders) - def populate_scene_placeholders(self, level_limit=None): + def populate_scene_placeholders( + self, level_limit=None, keep_placeholders=None + ): """Find placeholders in scene using plugins and process them. This should happen after 'import_template'. @@ -541,6 +554,11 @@ class AbstractTemplateBuilder(object): " is already in progress." )) continue + + # add flag for keeping placeholders in scene + # after they are processed + placeholder.data["keep_placeholder"] = keep_placeholders + filtered_placeholders.append(placeholder) self._prepare_placeholders(filtered_placeholders) @@ -599,8 +617,8 @@ class AbstractTemplateBuilder(object): ["profiles"] ) - def get_template_path(self): - """Unified way how template path is received usign settings. + def get_template_preset(self): + """Unified way how template preset is received usign settings. Method is dependent on '_get_build_profiles' which should return filter profiles to resolve path to a template. Default implementation looks @@ -637,6 +655,13 @@ class AbstractTemplateBuilder(object): ).format(task_name, task_type, host_name)) path = profile["path"] + + # switch to remove placeholders after they are used + placeholder_keep = profile.get("placeholder_keep") + # backward compatibility, since default is True + if placeholder_keep is not False: + placeholder_keep = True + if not path: raise TemplateLoadFailed(( "Template path is not set.\n" @@ -657,7 +682,10 @@ class AbstractTemplateBuilder(object): if path and os.path.exists(path): self.log.info("Found template at: '{}'".format(path)) - return path + return { + "path": path, + "placeholder_keep": placeholder_keep + } solved_path = None while True: @@ -683,7 +711,10 @@ class AbstractTemplateBuilder(object): self.log.info("Found template at: '{}'".format(solved_path)) - return solved_path + return { + "path": solved_path, + "placeholder_keep": placeholder_keep + } @six.add_metaclass(ABCMeta) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json index 99a29beb27..1826734291 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -25,8 +25,15 @@ { "key": "path", "label": "Path to template", - "type": "text", - "object_type": "text" + "type": "path", + "multiplatform": false, + "multipath": false + }, + { + "key": "placeholder_keep", + "label": "Keep placeholders", + "type": "boolean", + "default": true } ] } From 011cd8f2e4e9536ce59194668d91b92712484ea1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:45:38 +0100 Subject: [PATCH 2494/2550] nuke: remove update template menu item --- openpype/hosts/nuke/api/pipeline.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index fb707ca44c..918598c04f 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -217,10 +217,6 @@ def _install_menu(): "Build Workfile from template", lambda: build_workfile_template() ) - menu_template.addCommand( - "Update Workfile", - lambda: update_workfile_template() - ) menu_template.addSeparator() menu_template.addCommand( "Create Place Holder", From 90303d4137d3dfca49035ac1c878dbb830f42b42 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:48:46 +0100 Subject: [PATCH 2495/2550] global: updating docstrings --- openpype/pipeline/workfile/workfile_template_builder.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index f6a4ab51cb..2850175bc9 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -419,6 +419,9 @@ class AbstractTemplateBuilder(object): passed. level_limit (int): Limit of populate loops. Related to 'populate_scene_placeholders' method. + keep_placeholders (bool): Add flag to placeholder data for + hosts to decide if they want to remove + placeholder after it is used. """ template_preset = self.get_template_preset() @@ -518,6 +521,9 @@ class AbstractTemplateBuilder(object): Args: level_limit (int): Level of loops that can happen. Default is 1000. + keep_placeholders (bool): Add flag to placeholder data for + hosts to decide if they want to remove + placeholder after it is used. """ if not self.placeholder_plugins: From 212b372c03dad76e7aa1a7b83148ea78dda5611e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:49:18 +0100 Subject: [PATCH 2496/2550] nuke: make `get_group_io_nodes` soft fail --- openpype/hosts/nuke/api/lib.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index a066bbcdcf..2fdf446357 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2865,10 +2865,11 @@ def get_group_io_nodes(nodes): break if input_node is None: - raise ValueError("No Input found") + log.warning("No Input found") if output_node is None: - raise ValueError("No Output found") + log.warning("No Output found") + return input_node, output_node From 34e2ee9e7b36fd08d559c321019754cde71c03cd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Dec 2022 12:26:02 +0100 Subject: [PATCH 2497/2550] OP-4512 - fix typo --- openpype/modules/sync_server/providers/sftp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 1b4f68c585..c41edc78bc 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -72,7 +72,7 @@ class SFTPHandler(AbstractProvider): Returns: (boolean) """ - return self.self.presets.get("enabled") and self.conn is not None + return self.presets.get("enabled") and self.conn is not None @classmethod def get_system_settings_schema(cls): From 1dc52fc3a22f3b7b29e4ce71ed1fca983498d9aa Mon Sep 17 00:00:00 2001 From: Joseff Date: Thu, 22 Dec 2022 15:08:04 +0100 Subject: [PATCH 2498/2550] Alter the UObject.h include MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h index 2df6c887cf..aca80946bb 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypeSettings.h @@ -3,7 +3,7 @@ #pragma once #include "CoreMinimal.h" -#include "Object.h" +#include "UObject/Object.h" #include "OpenPypeSettings.generated.h" #define OPENPYPE_SETTINGS_FILEPATH IPluginManager::Get().FindPlugin("OpenPype")->GetBaseDir() / TEXT("Config") / TEXT("DefaultOpenPypeSettings.ini") From d61d88d0a57fea3511eaf65954be0fba89e580ab Mon Sep 17 00:00:00 2001 From: Joseff Date: Thu, 22 Dec 2022 15:08:31 +0100 Subject: [PATCH 2499/2550] Alter the PluginManager and UObjectGlobals include. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp index 7134614d22..a6b9eba749 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypeSettings.cpp @@ -2,8 +2,8 @@ #include "OpenPypeSettings.h" -#include "IPluginManager.h" -#include "UObjectGlobals.h" +#include "Interfaces/IPluginManager.h" +#include "UObject/UObjectGlobals.h" /** * Mainly is used for initializing default values if the DefaultOpenPypeSettings.ini file does not exist in the saved config From e34fee55fd5f0d00e745ab8d95663cb5be148b51 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Dec 2022 15:54:07 +0100 Subject: [PATCH 2500/2550] Fix for empty site_name --- openpype/pipeline/anatomy.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index a6750ede7c..627f7198bb 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -61,7 +61,8 @@ class BaseAnatomy(object): project_name = project_doc["name"] self.project_name = project_name - if site_name not in ["studio", "local", get_local_site_id()]: + if (site_name and + site_name not in ["studio", "local", get_local_site_id()]): raise RuntimeError("Anatomy could be created only for default " "local sites not for {}".format(site_name)) From f5cb893dc1b28b62b8132796bf5037d1236d6341 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:16:54 +0100 Subject: [PATCH 2501/2550] global: creator plugin abstraction for workfile builder template --- openpype/hosts/nuke/api/pipeline.py | 4 +- .../workfile/workfile_template_builder.py | 191 +++++++++++++++++- 2 files changed, 192 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 918598c04f..bdf12b7dc4 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -35,6 +35,7 @@ from .lib import ( ) from .workfile_template_builder import ( NukePlaceholderLoadPlugin, + NukePlaceholderCreatePlugin, build_workfile_template, update_workfile_template, create_placeholder, @@ -139,7 +140,8 @@ def _show_workfiles(): def get_workfile_build_placeholder_plugins(): return [ - NukePlaceholderLoadPlugin + NukePlaceholderLoadPlugin, + NukePlaceholderCreatePlugin ] diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 2850175bc9..d85d6b50dd 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -42,7 +42,10 @@ from openpype.pipeline.load import ( get_contexts_for_repre_docs, load_with_repre_context, ) -from openpype.pipeline.create import get_legacy_creator_by_name +from openpype.pipeline.create import ( + get_legacy_creator_by_name, + discover_legacy_creator_plugins +) class TemplateNotFound(Exception): @@ -235,7 +238,7 @@ class AbstractTemplateBuilder(object): def get_creators_by_name(self): if self._creators_by_name is None: - self._creators_by_name = get_legacy_creator_by_name() + self._creators_by_name = discover_legacy_creator_plugins() return self._creators_by_name def get_shared_data(self, key): @@ -1463,6 +1466,165 @@ class PlaceholderLoadMixin(object): pass +class PlaceholderCreateMixin(object): + """Mixin prepared for creating placeholder plugins. + + Implementation prepares options for placeholders with + 'get_create_plugin_options'. + + For placeholder population is implemented 'populate_create_placeholder'. + + PlaceholderItem can have implemented methods: + - 'create_failed' - called when creating of an instance failed + - 'create_succeed' - called when creating of an instance succeeded + """ + + def get_create_plugin_options(self, options=None): + """Unified attribute definitions for create placeholder. + + Common function for placeholder plugins used for creating of + publishable instances. Use it with 'get_placeholder_options'. + + Args: + plugin (PlaceholderPlugin): Plugin used for creating of + publish instances. + options (Dict[str, Any]): Already available options which are used + as defaults for attributes. + + Returns: + List[AbtractAttrDef]: Attribute definitions common for create + plugins. + """ + + creators_by_name = self.builder.get_creators_by_name() + creator_items = [ + (creator_name, creator.label or creator_name) + for creator_name, creator in creators_by_name.items() + ] + + creator_items = list(sorted(creator_items, key=lambda i: i[1])) + options = options or {} + return [ + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Main attributes"), + attribute_definitions.UISeparatorDef(), + + attribute_definitions.EnumDef( + "creator", + label="Creator", + default=options.get("creator"), + items=creator_items, + tooltip=( + "Creator" + "\nDefines what OpenPype creator will be used to" + " create publishable instance." + "\nUseable creator depends on current host's creator list." + "\nField is case sensitive." + ) + ), + attribute_definitions.TextDef( + "create_variant", + label="Variant", + default=options.get("create_variant"), + placeholder='Main', + tooltip=( + "Creator" + "\nDefines variant name which will be use for " + "\ncompiling of subset name." + ) + ), + attribute_definitions.UISeparatorDef(), + attribute_definitions.NumberDef( + "order", + label="Order", + default=options.get("order") or 0, + decimals=0, + minimum=0, + maximum=999, + tooltip=( + "Order" + "\nOrder defines creating instance priority (0 to 999)" + "\nPriority rule is : \"lowest is first to load\"." + ) + ) + ] + + def populate_create_placeholder(self, placeholder): + """Create placeholder is going to create matching publishabe instance. + + Args: + placeholder (PlaceholderItem): Placeholder item with information + about requested publishable instance. + """ + creator_name = placeholder.data["creator"] + create_variant = placeholder.data["create_variant"] + + creator_plugin = get_legacy_creator_by_name(creator_name) + + # create subset name + project_name = legacy_io.Session["AVALON_PROJECT"] + task_name = legacy_io.Session["AVALON_TASK"] + asset_name = legacy_io.Session["AVALON_ASSET"] + + # get asset id + asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) + assert asset_doc, "No current asset found in Session" + asset_id = asset_doc['_id'] + + subset_name = creator_plugin.get_subset_name( + create_variant, + task_name, + asset_id, + project_name + ) + + creator_data = { + "creator_name": creator_name, + "create_variant": create_variant, + "subset_name": subset_name, + "creator_plugin": creator_plugin + } + + # compile subset name from variant + try: + creator_instance = creator_plugin( + subset_name, + asset_name + ).process() + + except Exception: + failed = True + self.create_failed(placeholder, creator_data) + + else: + failed = False + self.create_succeed(placeholder, creator_instance) + + self.cleanup_placeholder(placeholder, failed) + + def create_failed(self, placeholder, creator_data): + if hasattr(placeholder, "create_failed"): + placeholder.create_failed(creator_data) + + def create_succeed(self, placeholder, creator_instance): + if hasattr(placeholder, "create_succeed"): + placeholder.create_succeed(creator_instance) + + def cleanup_placeholder(self, placeholder, failed): + """Cleanup placeholder after load of single representation. + + Can be called multiple times during placeholder item populating and is + called even if loading failed. + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + + pass + + class LoadPlaceholderItem(PlaceholderItem): """PlaceholderItem for plugin which is loading representations. @@ -1486,3 +1648,28 @@ class LoadPlaceholderItem(PlaceholderItem): def load_failed(self, representation): self._failed_representations.append(representation) + + +class CreatePlaceholderItem(PlaceholderItem): + """PlaceholderItem for plugin which is creating publish instance. + + Connected to 'PlaceholderCreateMixin'. + """ + + def __init__(self, *args, **kwargs): + super(CreatePlaceholderItem, self).__init__(*args, **kwargs) + self._failed_created_publish_instances = [] + + def get_errors(self): + if not self._failed_representations: + return [] + message = ( + "Failed to create {} instance using Creator {}" + ).format( + len(self._failed_created_publish_instances), + self.data["creator"] + ) + return [message] + + def create_failed(self, creator_data): + self._failed_created_publish_instances.append(creator_data) From dfb3d142aa765e27a4fc2eb3c861e70ad57fb464 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:17:27 +0100 Subject: [PATCH 2502/2550] nuke: workfile builder template creator plugin implementation --- .../nuke/api/workfile_template_builder.py | 410 +++++++++++++++++- 1 file changed, 409 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 60bf906fbe..5e9e5fcdce 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -7,7 +7,9 @@ from openpype.pipeline.workfile.workfile_template_builder import ( AbstractTemplateBuilder, PlaceholderPlugin, LoadPlaceholderItem, + CreatePlaceholderItem, PlaceholderLoadMixin, + PlaceholderCreateMixin ) from openpype.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, @@ -32,7 +34,7 @@ PLACEHOLDER_SET = "PLACEHOLDERS_SET" class NukeTemplateBuilder(AbstractTemplateBuilder): - """Concrete implementation of AbstractTemplateBuilder for maya""" + """Concrete implementation of AbstractTemplateBuilder for nuke""" def import_template(self, path): """Import template into current scene. @@ -544,6 +546,412 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): siblings_input.setInput(0, copy_output) +class NukePlaceholderCreatePlugin( + NukePlaceholderPlugin, PlaceholderCreateMixin +): + identifier = "nuke.create" + label = "Nuke create" + + def _parse_placeholder_node_data(self, node): + placeholder_data = super( + NukePlaceholderCreatePlugin, self + )._parse_placeholder_node_data(node) + + node_knobs = node.knobs() + nb_children = 0 + if "nb_children" in node_knobs: + nb_children = int(node_knobs["nb_children"].getValue()) + placeholder_data["nb_children"] = nb_children + + siblings = [] + if "siblings" in node_knobs: + siblings = node_knobs["siblings"].values() + placeholder_data["siblings"] = siblings + + node_full_name = node.fullName() + placeholder_data["group_name"] = node_full_name.rpartition(".")[0] + placeholder_data["last_loaded"] = [] + placeholder_data["delete"] = False + return placeholder_data + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, node in scene_placeholders.items(): + plugin_identifier_knob = node.knob("plugin_identifier") + if ( + plugin_identifier_knob is None + or plugin_identifier_knob.getValue() != self.identifier + ): + continue + + placeholder_data = self._parse_placeholder_node_data(node) + # TODO do data validations and maybe updgrades if are invalid + output.append( + CreatePlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_create_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + self.populate_create_placeholder(placeholder) + + def get_placeholder_options(self, options=None): + return self.get_create_plugin_options(options) + + def cleanup_placeholder(self, placeholder, failed): + # deselect all selected nodes + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + # getting the latest nodes added + nodes_init = placeholder.data["nodes_init"] + nodes_created = list(set(nuke.allNodes()) - set(nodes_init)) + self.log.debug("Created nodes: {}".format(nodes_created)) + if not nodes_created: + return + + placeholder.data["delete"] = True + + nodes_created = self._move_to_placeholder_group( + placeholder, nodes_created + ) + placeholder.data["last_created"] = nodes_created + refresh_nodes(nodes_created) + + # positioning of the created nodes + min_x, min_y, _, _ = get_extreme_positions(nodes_created) + for node in nodes_created: + xpos = (node.xpos() - min_x) + placeholder_node.xpos() + ypos = (node.ypos() - min_y) + placeholder_node.ypos() + node.setXYpos(xpos, ypos) + refresh_nodes(nodes_created) + + # fix the problem of z_order for backdrops + self._fix_z_order(placeholder) + self._imprint_siblings(placeholder) + + if placeholder.data["nb_children"] == 0: + # save initial nodes postions and dimensions, update them + # and set inputs and outputs of created nodes + + self._imprint_inits() + self._update_nodes(placeholder, nuke.allNodes(), nodes_created) + self._set_created_connections(placeholder) + + elif placeholder.data["siblings"]: + # create copies of placeholder siblings for the new created nodes, + # set their inputs and outpus and update all nodes positions and + # dimensions and siblings names + + siblings = get_nodes_by_names(placeholder.data["siblings"]) + refresh_nodes(siblings) + copies = self._create_sib_copies(placeholder) + new_nodes = list(copies.values()) # copies nodes + self._update_nodes(new_nodes, nodes_created) + placeholder_node.removeKnob(placeholder_node.knob("siblings")) + new_nodes_name = get_names_from_nodes(new_nodes) + imprint(placeholder_node, {"siblings": new_nodes_name}) + self._set_copies_connections(placeholder, copies) + + self._update_nodes( + nuke.allNodes(), + new_nodes + nodes_created, + 20 + ) + + new_siblings = get_names_from_nodes(new_nodes) + placeholder.data["siblings"] = new_siblings + + else: + # if the placeholder doesn't have siblings, the created + # nodes will be placed in a free space + + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes_created, direction="bottom", offset=200 + ) + node = nuke.createNode("NoOp") + reset_selection() + nuke.delete(node) + for node in nodes_created: + xpos = (node.xpos() - min_x) + xpointer + ypos = (node.ypos() - min_y) + ypointer + node.setXYpos(xpos, ypos) + + placeholder.data["nb_children"] += 1 + reset_selection() + + # remove placeholders marked as delete + if ( + placeholder.data.get("delete") + and not placeholder.data.get("keep_placeholder") + ): + self.log.debug("Deleting node: {}".format(placeholder_node.name())) + nuke.delete(placeholder_node) + + # go back to root group + nuke.root().begin() + + def _move_to_placeholder_group(self, placeholder, nodes_created): + """ + opening the placeholder's group and copying created nodes in it. + + Returns : + nodes_created (list): the new list of pasted nodes + """ + groups_name = placeholder.data["group_name"] + reset_selection() + select_nodes(nodes_created) + if groups_name: + with node_tempfile() as filepath: + nuke.nodeCopy(filepath) + for node in nuke.selectedNodes(): + nuke.delete(node) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste(filepath) + nodes_created = nuke.selectedNodes() + return nodes_created + + def _fix_z_order(self, placeholder): + """Fix the problem of z_order when a backdrop is create.""" + + nodes_created = placeholder.data["last_created"] + created_backdrops = [] + bd_orders = set() + for node in nodes_created: + if isinstance(node, nuke.BackdropNode): + created_backdrops.append(node) + bd_orders.add(node.knob("z_order").getValue()) + + if not bd_orders: + return + + sib_orders = set() + for node_name in placeholder.data["siblings"]: + node = nuke.toNode(node_name) + if isinstance(node, nuke.BackdropNode): + sib_orders.add(node.knob("z_order").getValue()) + + if not sib_orders: + return + + min_order = min(bd_orders) + max_order = max(sib_orders) + for backdrop_node in created_backdrops: + z_order = backdrop_node.knob("z_order").getValue() + backdrop_node.knob("z_order").setValue( + z_order + max_order - min_order + 1) + + def _imprint_siblings(self, placeholder): + """ + - add siblings names to placeholder attributes (nodes created with it) + - add Id to the attributes of all the other nodes + """ + + created_nodes = placeholder.data["last_created"] + created_nodes_set = set(created_nodes) + data = {"repre_id": str(placeholder.data["last_repre_id"])} + + for node in created_nodes: + node_knobs = node.knobs() + if "builder_type" not in node_knobs: + # save the id of representation for all imported nodes + imprint(node, data) + node.knob("repre_id").setVisible(False) + refresh_node(node) + continue + + if ( + "is_placeholder" not in node_knobs + or ( + "is_placeholder" in node_knobs + and node.knob("is_placeholder").value() + ) + ): + siblings = list(created_nodes_set - {node}) + siblings_name = get_names_from_nodes(siblings) + siblings = {"siblings": siblings_name} + imprint(node, siblings) + + def _imprint_inits(self): + """Add initial positions and dimensions to the attributes""" + + for node in nuke.allNodes(): + refresh_node(node) + imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) + node.knob("x_init").setVisible(False) + node.knob("y_init").setVisible(False) + width = node.screenWidth() + height = node.screenHeight() + if "bdwidth" in node.knobs(): + imprint(node, {"w_init": width, "h_init": height}) + node.knob("w_init").setVisible(False) + node.knob("h_init").setVisible(False) + refresh_node(node) + + def _update_nodes( + self, placeholder, nodes, considered_nodes, offset_y=None + ): + """Adjust backdrop nodes dimensions and positions. + + Considering some nodes sizes. + + Args: + nodes (list): list of nodes to update + considered_nodes (list): list of nodes to consider while updating + positions and dimensions + offset (int): distance between copies + """ + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) + + diff_x = diff_y = 0 + contained_nodes = [] # for backdrops + + if offset_y is None: + width_ph = placeholder_node.screenWidth() + height_ph = placeholder_node.screenHeight() + diff_y = max_y - min_y - height_ph + diff_x = max_x - min_x - width_ph + contained_nodes = [placeholder_node] + min_x = placeholder_node.xpos() + min_y = placeholder_node.ypos() + else: + siblings = get_nodes_by_names(placeholder.data["siblings"]) + minX, _, maxX, _ = get_extreme_positions(siblings) + diff_y = max_y - min_y + 20 + diff_x = abs(max_x - min_x - maxX + minX) + contained_nodes = considered_nodes + + if diff_y <= 0 and diff_x <= 0: + return + + for node in nodes: + refresh_node(node) + + if ( + node == placeholder_node + or node in considered_nodes + ): + continue + + if ( + not isinstance(node, nuke.BackdropNode) + or ( + isinstance(node, nuke.BackdropNode) + and not set(contained_nodes) <= set(node.getNodes()) + ) + ): + if offset_y is None and node.xpos() >= min_x: + node.setXpos(node.xpos() + diff_x) + + if node.ypos() >= min_y: + node.setYpos(node.ypos() + diff_y) + + else: + width = node.screenWidth() + height = node.screenHeight() + node.knob("bdwidth").setValue(width + diff_x) + node.knob("bdheight").setValue(height + diff_y) + + refresh_node(node) + + def _set_created_connections(self, placeholder): + """ + set inputs and outputs of created nodes""" + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + input_node, output_node = get_group_io_nodes( + placeholder.data["last_created"] + ) + for node in placeholder_node.dependent(): + for idx in range(node.inputs()): + if node.input(idx) == placeholder_node and output_node: + node.setInput(idx, output_node) + + for node in placeholder_node.dependencies(): + for idx in range(placeholder_node.inputs()): + if placeholder_node.input(idx) == node and input_node: + input_node.setInput(0, node) + + def _create_sib_copies(self, placeholder): + """ creating copies of the palce_holder siblings (the ones who were + created with it) for the new nodes added + + Returns : + copies (dict) : with copied nodes names and their copies + """ + + copies = {} + siblings = get_nodes_by_names(placeholder.data["siblings"]) + for node in siblings: + new_node = duplicate_node(node) + + x_init = int(new_node.knob("x_init").getValue()) + y_init = int(new_node.knob("y_init").getValue()) + new_node.setXYpos(x_init, y_init) + if isinstance(new_node, nuke.BackdropNode): + w_init = new_node.knob("w_init").getValue() + h_init = new_node.knob("h_init").getValue() + new_node.knob("bdwidth").setValue(w_init) + new_node.knob("bdheight").setValue(h_init) + refresh_node(node) + + if "repre_id" in node.knobs().keys(): + node.removeKnob(node.knob("repre_id")) + copies[node.name()] = new_node + return copies + + def _set_copies_connections(self, placeholder, copies): + """Set inputs and outputs of the copies. + + Args: + copies (dict): Copied nodes by their names. + """ + + last_input, last_output = get_group_io_nodes( + placeholder.data["last_created"] + ) + siblings = get_nodes_by_names(placeholder.data["siblings"]) + siblings_input, siblings_output = get_group_io_nodes(siblings) + copy_input = copies[siblings_input.name()] + copy_output = copies[siblings_output.name()] + + for node_init in siblings: + if node_init == siblings_output: + continue + + node_copy = copies[node_init.name()] + for node in node_init.dependent(): + for idx in range(node.inputs()): + if node.input(idx) != node_init: + continue + + if node in siblings: + copies[node.name()].setInput(idx, node_copy) + else: + last_input.setInput(0, node_copy) + + for node in node_init.dependencies(): + for idx in range(node_init.inputs()): + if node_init.input(idx) != node: + continue + + if node_init == siblings_input: + copy_input.setInput(idx, node) + elif node in siblings: + node_copy.setInput(idx, copies[node.name()]) + else: + node_copy.setInput(idx, last_output) + + siblings_input.setInput(0, copy_output) + + def build_workfile_template(*args): builder = NukeTemplateBuilder(registered_host()) builder.build_template() From 013ee5660af7dcb2b88499edf8d4d6dfcf3a259c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:17:39 +0100 Subject: [PATCH 2503/2550] fix typo --- openpype/hosts/nuke/plugins/load/load_backdrop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 164ab6f9f4..d1fb763500 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -28,7 +28,7 @@ class LoadBackdropNodes(load.LoaderPlugin): representations = ["nk"] families = ["workfile", "nukenodes"] - label = "Iport Nuke Nodes" + label = "Import Nuke Nodes" order = 0 icon = "eye" color = "white" From 410ed90cb33348ee38525a3045612ef2a19f1970 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:17:54 +0100 Subject: [PATCH 2504/2550] fix typo --- openpype/pipeline/create/creator_plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index bb5ce00452..8500dd1e22 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -608,7 +608,7 @@ def discover_legacy_creator_plugins(): plugin.apply_settings(project_settings, system_settings) except Exception: log.warning( - "Failed to apply settings to loader {}".format( + "Failed to apply settings to creator {}".format( plugin.__name__ ), exc_info=True From ae709afaaf85ca6bd1d6d74476ea8c561d550eec Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Dec 2022 12:25:01 +0100 Subject: [PATCH 2505/2550] Added dynamic message to Slack notification Artist can now add additional message, specific per instance and publish, if they are using Publisher. --- .../plugins/publish/collect_slack_family.py | 23 +++++++++++++++++-- .../plugins/publish/integrate_slack_api.py | 11 +++++---- website/docs/module_slack.md | 6 +++++ 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 27e899d59a..b3e7bbdcec 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -1,10 +1,12 @@ import pyblish.api from openpype.lib.profiles_filtering import filter_profiles -from openpype.pipeline import legacy_io +from openpype.lib import attribute_definitions +from openpype.pipeline import OpenPypePyblishPluginMixin -class CollectSlackFamilies(pyblish.api.InstancePlugin): +class CollectSlackFamilies(pyblish.api.InstancePlugin, + OpenPypePyblishPluginMixin): """Collect family for Slack notification Expects configured profile in @@ -17,6 +19,18 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None + @classmethod + def get_attribute_defs(cls): + return [ + attribute_definitions.TextDef( + # Key under which it will be stored + "additional_message", + # Use plugin label as label for attribute + label="Additional Slack message", + placeholder="" + ) + ] + def process(self, instance): task_data = instance.data["anatomyData"].get("task", {}) family = self.main_family_from_instance(instance) @@ -55,6 +69,11 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): ["token"]) instance.data["slack_token"] = slack_token + attribute_values = self.get_attr_values_from_data(instance.data) + additional_message = attribute_values.get("additional_message") + if additional_message: + instance.data["slack_additional_message"] = additional_message + def main_family_from_instance(self, instance): # TODO yank from integrate """Returns main family of entered instance.""" family = instance.data.get("family") diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 0cd5ec9de8..d94ecb02e4 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -31,11 +31,14 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): review_path = self._get_review_path(instance) publish_files = set() + message = '' + additional_message = instance.data.get("slack_additional_message") + if additional_message: + message = "{} \n".format(additional_message) for message_profile in instance.data["slack_channel_message_profiles"]: - message = self._get_filled_message(message_profile["message"], - instance, - review_path) - self.log.debug("message:: {}".format(message)) + message += self._get_filled_message(message_profile["message"], + instance, + review_path) if not message: return diff --git a/website/docs/module_slack.md b/website/docs/module_slack.md index 3a2842da63..2bfd7cb562 100644 --- a/website/docs/module_slack.md +++ b/website/docs/module_slack.md @@ -94,6 +94,12 @@ Few keys also have Capitalized and UPPERCASE format. Values will be modified acc Here you can find review {review_filepath} ``` +##### Dynamic message for artists +If artists uses host with implemented Publisher (new UI for publishing, implemented in Tray Publisher, Adobe products etc), it is possible for +them to add additional message (notification for specific users for example, artists must provide proper user id with '@'). +Additional message will be sent only if at least one profile, eg. one target channel is configured. +All available template keys (see higher) could be used here as a placeholder too. + #### Message retention Currently no purging of old messages is implemented in Openpype. Admins of Slack should set their own retention of messages and files per channel. (see https://slack.com/help/articles/203457187-Customize-message-and-file-retention-policies) From c6c08fd4ccaf779c2569139ab83f8fb16fe6c785 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:46:14 +0100 Subject: [PATCH 2506/2550] global: fix creator plugin discovery --- .../pipeline/workfile/workfile_template_builder.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index d85d6b50dd..07a1f3ec58 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -238,7 +238,14 @@ class AbstractTemplateBuilder(object): def get_creators_by_name(self): if self._creators_by_name is None: - self._creators_by_name = discover_legacy_creator_plugins() + self._creators_by_name = {} + for creator in discover_legacy_creator_plugins(): + creator_name = creator.__name__ + if creator_name in self._creators_by_name: + raise KeyError( + "Duplicated creator name {} !".format(creator_name) + ) + self._creators_by_name[creator_name] = creator return self._creators_by_name def get_shared_data(self, key): @@ -1497,6 +1504,8 @@ class PlaceholderCreateMixin(object): """ creators_by_name = self.builder.get_creators_by_name() + print(creators_by_name) + creator_items = [ (creator_name, creator.label or creator_name) for creator_name, creator in creators_by_name.items() From c16a5289e49286bb7c65be04a9629f846cab58ce Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Dec 2022 14:09:03 +0100 Subject: [PATCH 2507/2550] OP-4470 - better handle missing keys Message might contain {placeholder} which are not collected. Previously it would fail without sending message. Now missing keys are double escaped {{}}. --- .../plugins/publish/integrate_slack_api.py | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 0cd5ec9de8..9122c1c5ed 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -1,4 +1,5 @@ import os +import re import six import pyblish.api import copy @@ -132,14 +133,14 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): fill_key = "task[{}]".format(key) fill_pairs.append((fill_key, value)) - self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) fill_data.update(multiple_case_variants) - - message = None + message = '' try: - message = message_templ.format(**fill_data) + message = self._escape_missing_keys(message_templ, fill_data).\ + format(**fill_data) except Exception: + # shouldn't happen self.log.warning( "Some keys are missing in {}".format(message_templ), exc_info=True) @@ -263,3 +264,22 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): msg = " - application must added to channel '{}'.".format(channel) error_str += msg + " Ask Slack admin." return error_str + + def _escape_missing_keys(self, message, fill_data): + """Double escapes placeholder which are missing in 'fill_data'""" + placeholder_keys = re.findall("\{([^}]+)\}", message) + + fill_keys = [] + for key, value in fill_data.items(): + fill_keys.append(key) + if isinstance(value, dict): + for child_key in value.keys(): + fill_keys.append("{}[{}]".format(key, child_key)) + + not_matched = set(placeholder_keys) - set(fill_keys) + + for not_matched_item in not_matched: + message = message.replace("{}".format(not_matched_item), + "{{{}}}".format(not_matched_item)) + + return message From 3a41d6a72158af0707d58f146b834076ed386b13 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Dec 2022 14:13:00 +0100 Subject: [PATCH 2508/2550] OP-4470 - safer handling of review path 'published_path' might be missing. Thumbnail path was fixed previously, this one was missed. --- .../slack/plugins/publish/integrate_slack_api.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 9122c1c5ed..c4d6b27726 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -163,17 +163,21 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _get_review_path(self, instance): """Returns abs url for review if present in instance repres""" - published_path = None + review_path = None for repre in instance.data.get("representations", []): tags = repre.get('tags', []) if (repre.get("review") or "review" in tags or "burnin" in tags): - if os.path.exists(repre["published_path"]): - published_path = repre["published_path"] + repre_review_path = ( + repre.get("published_path") or + os.path.join(repre["stagingDir"], repre["files"]) + ) + if os.path.exists(repre_review_path): + review_path = repre_review_path if "burnin" in tags: # burnin has precedence if exists break - return published_path + return review_path def _python2_call(self, token, channel, message, publish_files): from slackclient import SlackClient From 81de2cf0c902671104de22f7768ed37ac8ed5c39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:25:41 +0100 Subject: [PATCH 2509/2550] global: fix _repr_ pformat printing --- openpype/pipeline/workfile/workfile_template_builder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 07a1f3ec58..630a11e4b5 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -984,7 +984,7 @@ class PlaceholderItem(object): def __init__(self, scene_identifier, data, plugin): self._log = None - self._scene_identifier = scene_identifier + self.name = scene_identifier self._data = data self._plugin = plugin @@ -1062,7 +1062,7 @@ class PlaceholderItem(object): @property def scene_identifier(self): - return self._scene_identifier + return self.name @property def finished(self): From 4f7e4fcac3ffbddc258247941b063f25746ef174 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:26:28 +0100 Subject: [PATCH 2510/2550] global: add _before_instance_create function for storing created nodes --- openpype/pipeline/workfile/workfile_template_builder.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 630a11e4b5..dce36eca82 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1594,6 +1594,8 @@ class PlaceholderCreateMixin(object): "creator_plugin": creator_plugin } + self._before_instance_create(placeholder) + # compile subset name from variant try: creator_instance = creator_plugin( @@ -1633,6 +1635,11 @@ class PlaceholderCreateMixin(object): pass + def _before_instance_create(self, placeholder): + """Can be overriden. Is called before instance is created.""" + + pass + class LoadPlaceholderItem(PlaceholderItem): """PlaceholderItem for plugin which is loading representations. From 5924dcc1f5d80b532fa30137cf1992b89d71d092 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:27:32 +0100 Subject: [PATCH 2511/2550] nuke: implementing _before_instance_create funtion --- openpype/hosts/nuke/api/workfile_template_builder.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 5e9e5fcdce..33dcdab749 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -574,6 +574,9 @@ class NukePlaceholderCreatePlugin( placeholder_data["delete"] = False return placeholder_data + def _before_instance_create(self, placeholder): + placeholder.data["nodes_init"] = nuke.allNodes() + def collect_placeholders(self): output = [] scene_placeholders = self._collect_scene_placeholders() From c06315d5d6d2ce567e152f25600b23fc2b7c2b47 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:28:03 +0100 Subject: [PATCH 2512/2550] nuke: fixing logic for creator placeholder plugin processing --- openpype/hosts/nuke/api/workfile_template_builder.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 33dcdab749..973e15b192 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -76,8 +76,7 @@ class NukePlaceholderPlugin(PlaceholderPlugin): node_knobs = node.knobs() if ( - "builder_type" not in node_knobs - or "is_placeholder" not in node_knobs + "is_placeholder" not in node_knobs or not node.knob("is_placeholder").value() ): continue @@ -756,16 +755,9 @@ class NukePlaceholderCreatePlugin( created_nodes = placeholder.data["last_created"] created_nodes_set = set(created_nodes) - data = {"repre_id": str(placeholder.data["last_repre_id"])} for node in created_nodes: node_knobs = node.knobs() - if "builder_type" not in node_knobs: - # save the id of representation for all imported nodes - imprint(node, data) - node.knob("repre_id").setVisible(False) - refresh_node(node) - continue if ( "is_placeholder" not in node_knobs From c0157e5787a822064238847a4b2a7e5bac71970c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 17:34:47 +0100 Subject: [PATCH 2513/2550] remove todo --- openpype/hosts/nuke/api/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 973e15b192..1b81f24e86 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -588,7 +588,7 @@ class NukePlaceholderCreatePlugin( continue placeholder_data = self._parse_placeholder_node_data(node) - # TODO do data validations and maybe updgrades if are invalid + output.append( CreatePlaceholderItem(node_name, placeholder_data, self) ) From 18949a0c76ecb7033867658eacbf4f05e03ff160 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 24 Dec 2022 03:27:57 +0000 Subject: [PATCH 2514/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 904579ad55..e14bc9daab 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.2" +__version__ = "3.14.10-nightly.3" From 8b6bc8444db044391bbe340e981abc404d1ffb38 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 28 Dec 2022 03:28:14 +0000 Subject: [PATCH 2515/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index e14bc9daab..40abb9e9fd 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.3" +__version__ = "3.14.10-nightly.4" From f5842d91bd49cc955f49ed53388efd565b84f0a6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Dec 2022 12:18:11 +0100 Subject: [PATCH 2516/2550] rename variable 'max_len' to 'message_len' --- openpype/widgets/message_window.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/widgets/message_window.py b/openpype/widgets/message_window.py index 94e51f5d4f..8301f5e8f2 100644 --- a/openpype/widgets/message_window.py +++ b/openpype/widgets/message_window.py @@ -105,16 +105,18 @@ class ScrollMessageBox(QtWidgets.QDialog): content_widget = QtWidgets.QWidget(self) scroll_widget.setWidget(content_widget) - max_len = 0 + message_len = 0 content_layout = QtWidgets.QVBoxLayout(content_widget) for message in messages: label_widget = QtWidgets.QLabel(message, content_widget) content_layout.addWidget(label_widget) - max_len = max(max_len, len(message)) + message_len = max(message_len, len(message)) # guess size of scrollable area max_width = QtWidgets.QApplication.desktop().availableGeometry().width - scroll_widget.setMinimumWidth(min(max_width, max_len * 6)) + scroll_widget.setMinimumWidth( + min(max_width, message_len * 6) + ) layout.addWidget(scroll_widget) if not cancelable: # if no specific buttons OK only From a941aabc049ee3ec59ef829dbd58c1abca937868 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Dec 2022 12:21:05 +0100 Subject: [PATCH 2517/2550] call the width method to get the value --- openpype/widgets/message_window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/widgets/message_window.py b/openpype/widgets/message_window.py index 8301f5e8f2..2b186475ee 100644 --- a/openpype/widgets/message_window.py +++ b/openpype/widgets/message_window.py @@ -113,7 +113,7 @@ class ScrollMessageBox(QtWidgets.QDialog): message_len = max(message_len, len(message)) # guess size of scrollable area - max_width = QtWidgets.QApplication.desktop().availableGeometry().width + max_width = QtWidgets.QApplication.desktop().availableGeometry().width() scroll_widget.setMinimumWidth( min(max_width, message_len * 6) ) From f3c13e7669c149ef4b652820a2f72f4553484ea2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Dec 2022 12:26:25 +0100 Subject: [PATCH 2518/2550] fix too long line --- openpype/widgets/message_window.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/widgets/message_window.py b/openpype/widgets/message_window.py index 2b186475ee..a44df2ec8e 100644 --- a/openpype/widgets/message_window.py +++ b/openpype/widgets/message_window.py @@ -113,7 +113,8 @@ class ScrollMessageBox(QtWidgets.QDialog): message_len = max(message_len, len(message)) # guess size of scrollable area - max_width = QtWidgets.QApplication.desktop().availableGeometry().width() + desktop = QtWidgets.QApplication.desktop() + max_width = desktop.availableGeometry().width() scroll_widget.setMinimumWidth( min(max_width, message_len * 6) ) From efab124e0f721e03367710f1800bb6b0a8f9ca1e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 31 Dec 2022 03:27:31 +0000 Subject: [PATCH 2519/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 40abb9e9fd..4fbe5a3608 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.4" +__version__ = "3.14.10-nightly.5" From 7a372d1b1cd481610a044a552e155db62ae8adbb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:05:47 +0100 Subject: [PATCH 2520/2550] Added possibility to mention users or groups --- .../plugins/publish/integrate_slack_api.py | 271 ++++++++++++++---- 1 file changed, 213 insertions(+), 58 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index d94ecb02e4..8d34521194 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -3,6 +3,9 @@ import six import pyblish.api import copy from datetime import datetime +import re +from abc import ABCMeta, abstractmethod +import time from openpype.client import OpenPypeMongoConnection from openpype.lib.plugin_tools import prepare_template_data @@ -33,6 +36,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): publish_files = set() message = '' additional_message = instance.data.get("slack_additional_message") + token = instance.data["slack_token"] if additional_message: message = "{} \n".format(additional_message) for message_profile in instance.data["slack_channel_message_profiles"]: @@ -52,18 +56,16 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): project = instance.context.data["anatomyData"]["project"]["code"] for channel in message_profile["channels"]: if six.PY2: - msg_id, file_ids = \ - self._python2_call(instance.data["slack_token"], - channel, - message, - publish_files) + client = SlackPython2Operations(token, self.log) else: - msg_id, file_ids = \ - self._python3_call(instance.data["slack_token"], - channel, - message, - publish_files) + client = SlackPython3Operations(token, self.log) + users, groups = client.get_users_and_groups() + message = self._translate_users(message, users, groups) + + msg_id, file_ids = client.send_message(channel, + message, + publish_files) if not msg_id: return @@ -177,15 +179,211 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): break return published_path - def _python2_call(self, token, channel, message, publish_files): - from slackclient import SlackClient + def _get_user_id(self, users, user_name): + """Returns internal slack id for user name""" + user_id = None + for user in users: + if (not user.get("deleted") and + (user_name.lower() == user["name"].lower() or + user_name.lower() == user["real_name"])): + user_id = user["id"] + break + return user_id + + def _get_group_id(self, groups, group_name): + """Returns internal group id for string name""" + group_id = None + for group in groups: + if (not group.get("date_delete") and + (group_name.lower() == group["name"].lower() or + group_name.lower() == group["handle"])): + group_id = group["id"] + break + return group_id + + def _translate_users(self, message, users, groups): + matches = re.findall("(?".format(slack_id) + else: + slack_id = self._get_group_id(groups, user_name) + if slack_id: + mention = "".format(slack_id) + if mention: + message = message.replace(orig_user, mention) + + return message + + +@six.add_metaclass(ABCMeta) +class AbstractSlackOperations: + + @abstractmethod + def _get_users_list(self): + """Return response with user list, different methods Python 2 vs 3""" + raise NotImplementedError + + @abstractmethod + def _get_usergroups_list(self): + """Return response with user list, different methods Python 2 vs 3""" + raise NotImplementedError + + @abstractmethod + def get_users_and_groups(self): + """Return users and groups, different retry in Python 2 vs 3""" + raise NotImplementedError + + @abstractmethod + def send_message(self, channel, message, publish_files): + """Sends message to channel, different methods in Python 2 vs 3""" + pass + + def _get_users(self): + """Parse users.list response into list of users (dicts)""" + first = True + next_page = None + users = [] + while first or next_page: + response = self._get_users_list() + first = False + next_page = response.get("response_metadata").get("next_cursor") + for user in response.get("members"): + users.append(user) + + return users + + def _get_groups(self): + """Parses usergroups.list response into list of groups (dicts)""" + response = self._get_usergroups_list() + groups = [] + for group in response.get("usergroups"): + groups.append(group) + return groups + + def _enrich_error(self, error_str, channel): + """Enhance known errors with more helpful notations.""" + if 'not_in_channel' in error_str: + # there is no file.write.public scope, app must be explicitly in + # the channel + msg = " - application must added to channel '{}'.".format(channel) + error_str += msg + " Ask Slack admin." + return error_str + + +class SlackPython3Operations(AbstractSlackOperations): + + def __init__(self, token, log): + from slack_sdk import WebClient + + self.client = WebClient(token=token) + self.log = log + + def _get_users_list(self): + return self.client.users_list() + + def _get_usergroups_list(self): + return self.client.usergroups_list() + + def get_users_and_groups(self): + from slack_sdk.errors import SlackApiError + while True: + try: + users = self._get_users() + groups = self._get_groups() + break + except SlackApiError as e: + retry_after = e.response.headers.get("Retry-After") + if retry_after: + print( + "Rate limit hit, sleeping for {}".format(retry_after)) + time.sleep(int(retry_after)) + else: + raise e + + return users, groups + + def send_message(self, channel, message, publish_files): + from slack_sdk.errors import SlackApiError + try: + attachment_str = "\n\n Attachment links: \n" + file_ids = [] + for published_file in publish_files: + response = self.client.files_upload( + file=published_file, + filename=os.path.basename(published_file)) + attachment_str += "\n<{}|{}>".format( + response["file"]["permalink"], + os.path.basename(published_file)) + file_ids.append(response["file"]["id"]) + + if publish_files: + message += attachment_str + + message = self.translate_users(message) + + response = self.client.chat_postMessage( + channel=channel, + text=message + ) + return response.data["ts"], file_ids + except SlackApiError as e: + # # You will get a SlackApiError if "ok" is False + error_str = self._enrich_error(str(e.response["error"]), channel) + self.log.warning("Error happened {}".format(error_str)) + except Exception as e: + error_str = self._enrich_error(str(e), channel) + self.log.warning("Not SlackAPI error", exc_info=True) + + return None, [] + + +class SlackPython2Operations(AbstractSlackOperations): + + def __init__(self, token, log): + from slackclient import SlackClient + + self.client = SlackClient(token=token) + self.log = log + + def _get_users_list(self): + return self.client.api_call("users.list") + + def _get_usergroups_list(self): + return self.client.api_call("usergroups.list") + + def get_users_and_groups(self): + while True: + try: + users = self._get_users() + groups = self._get_groups() + break + except Exception as e: + retry_after = e.response.headers.get("Retry-After") + if retry_after: + print( + "Rate limit hit, sleeping for {}".format(retry_after)) + time.sleep(int(retry_after)) + else: + raise e + + return users, groups + + def send_message(self, channel, message, publish_files): try: - client = SlackClient(token) attachment_str = "\n\n Attachment links: \n" file_ids = [] for p_file in publish_files: with open(p_file, 'rb') as pf: - response = client.api_call( + response = self.client.api_call( "files.upload", file=pf, channel=channel, @@ -206,7 +404,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if publish_files: message += attachment_str - response = client.api_call( + response = self.client.api_call( "chat.postMessage", channel=channel, text=message @@ -223,46 +421,3 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): self.log.warning("Error happened: {}".format(error_str)) return None, [] - - def _python3_call(self, token, channel, message, publish_files): - from slack_sdk import WebClient - from slack_sdk.errors import SlackApiError - try: - client = WebClient(token=token) - attachment_str = "\n\n Attachment links: \n" - file_ids = [] - for published_file in publish_files: - response = client.files_upload( - file=published_file, - filename=os.path.basename(published_file)) - attachment_str += "\n<{}|{}>".format( - response["file"]["permalink"], - os.path.basename(published_file)) - file_ids.append(response["file"]["id"]) - - if publish_files: - message += attachment_str - - response = client.chat_postMessage( - channel=channel, - text=message - ) - return response.data["ts"], file_ids - except SlackApiError as e: - # You will get a SlackApiError if "ok" is False - error_str = self._enrich_error(str(e.response["error"]), channel) - self.log.warning("Error happened {}".format(error_str)) - except Exception as e: - error_str = self._enrich_error(str(e), channel) - self.log.warning("Not SlackAPI error", exc_info=True) - - return None, [] - - def _enrich_error(self, error_str, channel): - """Enhance known errors with more helpful notations.""" - if 'not_in_channel' in error_str: - # there is no file.write.public scope, app must be explicitly in - # the channel - msg = " - application must added to channel '{}'.".format(channel) - error_str += msg + " Ask Slack admin." - return error_str From 1ee50975c0e61acac51f7f76b905a420e95487c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:14:41 +0100 Subject: [PATCH 2521/2550] Fix wrong position of method --- .../plugins/publish/integrate_slack_api.py | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index a010d08a82..fc5342177d 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -228,6 +228,25 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): return message + def _escape_missing_keys(self, message, fill_data): + """Double escapes placeholder which are missing in 'fill_data'""" + placeholder_keys = re.findall("\{([^}]+)\}", message) + + fill_keys = [] + for key, value in fill_data.items(): + fill_keys.append(key) + if isinstance(value, dict): + for child_key in value.keys(): + fill_keys.append("{}[{}]".format(key, child_key)) + + not_matched = set(placeholder_keys) - set(fill_keys) + + for not_matched_item in not_matched: + message = message.replace("{}".format(not_matched_item), + "{{{}}}".format(not_matched_item)) + + return message + @six.add_metaclass(ABCMeta) class AbstractSlackOperations: @@ -283,25 +302,6 @@ class AbstractSlackOperations: error_str += msg + " Ask Slack admin." return error_str - def _escape_missing_keys(self, message, fill_data): - """Double escapes placeholder which are missing in 'fill_data'""" - placeholder_keys = re.findall("\{([^}]+)\}", message) - - fill_keys = [] - for key, value in fill_data.items(): - fill_keys.append(key) - if isinstance(value, dict): - for child_key in value.keys(): - fill_keys.append("{}[{}]".format(key, child_key)) - - not_matched = set(placeholder_keys) - set(fill_keys) - - for not_matched_item in not_matched: - message = message.replace("{}".format(not_matched_item), - "{{{}}}".format(not_matched_item)) - - return message - class SlackPython3Operations(AbstractSlackOperations): From f80fe3fb938488009974cd931b285ea8564d7b2b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:14:57 +0100 Subject: [PATCH 2522/2550] Fix obsolete call of method --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index fc5342177d..803a07f5d2 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -352,8 +352,6 @@ class SlackPython3Operations(AbstractSlackOperations): if publish_files: message += attachment_str - message = self.translate_users(message) - response = self.client.chat_postMessage( channel=channel, text=message From 394c678299f26967ba64b87a7c8684c1d0419191 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:19:50 +0100 Subject: [PATCH 2523/2550] Do not throw exception if user or group list error Skip notification, publish shouldn't fail because of this. --- .../modules/slack/plugins/publish/integrate_slack_api.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 803a07f5d2..f18b927c98 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -331,7 +331,9 @@ class SlackPython3Operations(AbstractSlackOperations): "Rate limit hit, sleeping for {}".format(retry_after)) time.sleep(int(retry_after)) else: - raise e + self.log.warning("Cannot pull user info, " + "mentions won't work", exc_info=True) + return [], [] return users, groups @@ -395,7 +397,9 @@ class SlackPython2Operations(AbstractSlackOperations): "Rate limit hit, sleeping for {}".format(retry_after)) time.sleep(int(retry_after)) else: - raise e + self.log.warning("Cannot pull user info, " + "mentions won't work", exc_info=True) + return [], [] return users, groups From 61ef7479e8bbb842378229e87b2187924a368c7a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 3 Jan 2023 17:21:14 +0100 Subject: [PATCH 2524/2550] =?UTF-8?q?=F0=9F=94=A7=20pass=20mongo=20url=20a?= =?UTF-8?q?s=20default?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/settings/defaults/project_settings/deadline.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 6e1c0f3540..527f5c0d24 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -2,7 +2,7 @@ "deadline_servers": [], "publish": { "CollectDefaultDeadlineServer": { - "pass_mongo_url": false + "pass_mongo_url": true }, "CollectDeadlinePools": { "primary_pool": "", From 68fe82323883c8035f3831820681174980649802 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:53:00 +0100 Subject: [PATCH 2525/2550] Fix resolving of user_id Display name or real_name could be used also. --- .../slack/plugins/publish/integrate_slack_api.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index f18b927c98..577ead9667 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -187,10 +187,13 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _get_user_id(self, users, user_name): """Returns internal slack id for user name""" user_id = None + user_name_lower = user_name.lower() for user in users: if (not user.get("deleted") and - (user_name.lower() == user["name"].lower() or - user_name.lower() == user["real_name"])): + (user_name_lower == user["name"].lower() or + # bots dont have display_name + user_name_lower == user.get("display_name", '').lower() or + user_name_lower == user.get("real_name", '').lower())): user_id = user["id"] break return user_id @@ -208,8 +211,9 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _translate_users(self, message, users, groups): matches = re.findall("(? Date: Tue, 3 Jan 2023 17:58:28 +0100 Subject: [PATCH 2526/2550] Updated documentation --- website/docs/module_slack.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/website/docs/module_slack.md b/website/docs/module_slack.md index 2bfd7cb562..1999912fdc 100644 --- a/website/docs/module_slack.md +++ b/website/docs/module_slack.md @@ -100,6 +100,10 @@ them to add additional message (notification for specific users for example, art Additional message will be sent only if at least one profile, eg. one target channel is configured. All available template keys (see higher) could be used here as a placeholder too. +#### User or group notifications +Message template or dynamic data could contain user or group notification, it must be in format @artist.name, '@John Doe' or "@admin group" for display name containing space. +If value prefixed with @ is not resolved and Slack user is not found, message will contain same value (not translated by Slack into link and proper mention.) + #### Message retention Currently no purging of old messages is implemented in Openpype. Admins of Slack should set their own retention of messages and files per channel. (see https://slack.com/help/articles/203457187-Customize-message-and-file-retention-policies) From f1111a99bda950cb79ff36675f12c34146f05ddf Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 4 Jan 2023 03:28:26 +0000 Subject: [PATCH 2527/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 4fbe5a3608..ae514e371e 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.5" +__version__ = "3.14.10-nightly.6" From b2e8ea6fb80faeef67cbae6db212422c43cb0b58 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 11:36:12 +0100 Subject: [PATCH 2528/2550] Hound --- .../modules/slack/plugins/publish/integrate_slack_api.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 577ead9667..97182ffd9b 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -4,7 +4,6 @@ import six import pyblish.api import copy from datetime import datetime -import re from abc import ABCMeta, abstractmethod import time @@ -210,8 +209,9 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): return group_id def _translate_users(self, message, users, groups): - matches = re.findall("(? format.""" + matches = re.findall(r"(? Date: Wed, 4 Jan 2023 12:44:07 +0100 Subject: [PATCH 2529/2550] Fix - search pattern Updated to use user profile --- .../modules/slack/plugins/publish/integrate_slack_api.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 97182ffd9b..02197a6d01 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -191,8 +191,10 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if (not user.get("deleted") and (user_name_lower == user["name"].lower() or # bots dont have display_name - user_name_lower == user.get("display_name", '').lower() or - user_name_lower == user.get("real_name", '').lower())): + user_name_lower == user["profile"].get("display_name", + '').lower() or + user_name_lower == user["profile"].get("real_name", + '').lower())): user_id = user["id"] break return user_id @@ -210,7 +212,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _translate_users(self, message, users, groups): """Replace all occurences of @mentions with proper <@name> format.""" - matches = re.findall(r"(? Date: Wed, 4 Jan 2023 12:51:47 +0100 Subject: [PATCH 2530/2550] Fix - cannot pull response from ordinary exception --- .../slack/plugins/publish/integrate_slack_api.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 02197a6d01..bb5cd40936 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -397,15 +397,9 @@ class SlackPython2Operations(AbstractSlackOperations): groups = self._get_groups() break except Exception as e: - retry_after = e.response.headers.get("Retry-After") - if retry_after: - print( - "Rate limit hit, sleeping for {}".format(retry_after)) - time.sleep(int(retry_after)) - else: - self.log.warning("Cannot pull user info, " - "mentions won't work", exc_info=True) - return [], [] + self.log.warning("Cannot pull user info, " + "mentions won't work", exc_info=True) + return [], [] return users, groups From dff87d4e1cfabafb4ec40c46a1cbc48c851f661f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 12:52:32 +0100 Subject: [PATCH 2531/2550] Hound --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index bb5cd40936..21069e0b13 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -396,7 +396,7 @@ class SlackPython2Operations(AbstractSlackOperations): users = self._get_users() groups = self._get_groups() break - except Exception as e: + except Exception: self.log.warning("Cannot pull user info, " "mentions won't work", exc_info=True) return [], [] From 40cf2956fd968c8ffaf47c584c69705a852e4bc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 4 Jan 2023 14:26:15 +0100 Subject: [PATCH 2532/2550] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index dce36eca82..24b0cc81f1 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1504,7 +1504,6 @@ class PlaceholderCreateMixin(object): """ creators_by_name = self.builder.get_creators_by_name() - print(creators_by_name) creator_items = [ (creator_name, creator.label or creator_name) From e69f3539eaeac23d0663e71f83c9d463cbc50699 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 4 Jan 2023 14:26:26 +0100 Subject: [PATCH 2533/2550] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 24b0cc81f1..a834ca0e21 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -675,7 +675,7 @@ class AbstractTemplateBuilder(object): # switch to remove placeholders after they are used placeholder_keep = profile.get("placeholder_keep") # backward compatibility, since default is True - if placeholder_keep is not False: + if placeholder_keep is None: placeholder_keep = True if not path: From b01322645ae14e74b09d4125fcac5a8160d76c57 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 14:44:04 +0100 Subject: [PATCH 2534/2550] Correctly repair frame range with handle attributes if `handleStart` and `handleEnd` available on instance --- .../plugins/publish/validate_frame_range.py | 33 +++++++++++++++---- 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index 5e50ae72cd..dec2f00700 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -92,10 +92,31 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): """ Repair instance container to match asset data. """ - cmds.setAttr( - "{}.frameStart".format(instance.data["name"]), - instance.context.data.get("frameStartHandle")) - cmds.setAttr( - "{}.frameEnd".format(instance.data["name"]), - instance.context.data.get("frameEndHandle")) + node = instance.data["name"] + context = instance.context + + frame_start_handle = int(context.data.get("frameStartHandle")) + frame_end_handle = int(context.data.get("frameEndHandle")) + handle_start = int(context.data.get("handleStart")) + handle_end = int(context.data.get("handleEnd")) + frame_start = int(context.data.get("frameStart")) + frame_end = int(context.data.get("frameEnd")) + + # Start + if cmds.attributeQuery("handleStart", node=node, exists=True): + cmds.setAttr("{}.handleStart".format(node), handle_start) + cmds.setAttr("{}.frameStart".format(node), frame_start) + else: + # Include start handle in frame start if no separate handleStart + # attribute exists on the node + cmds.setAttr("{}.frameStart".format(node), frame_start_handle) + + # End + if cmds.attributeQuery("handleEnd", node=node, exists=True): + cmds.setAttr("{}.handleEnd".format(node), handle_end) + cmds.setAttr("{}.frameEnd".format(node), frame_end) + else: + # Include end handle in frame end if no separate handleEnd + # attribute exists on the node + cmds.setAttr("{}.frameEnd".format(node), frame_end_handle) From 0a4ed0988c0a359bbec6b6929d9073215af3cdea Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 14:45:00 +0100 Subject: [PATCH 2535/2550] Do not force instance handleStart and handleEnd to zero if not `handles` in data --- openpype/hosts/maya/plugins/publish/collect_instances.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_instances.py b/openpype/hosts/maya/plugins/publish/collect_instances.py index ad1f794680..75bc935143 100644 --- a/openpype/hosts/maya/plugins/publish/collect_instances.py +++ b/openpype/hosts/maya/plugins/publish/collect_instances.py @@ -174,9 +174,6 @@ class CollectInstances(pyblish.api.ContextPlugin): if "handles" in data: data["handleStart"] = data["handles"] data["handleEnd"] = data["handles"] - else: - data["handleStart"] = 0 - data["handleEnd"] = 0 data["frameStartHandle"] = data["frameStart"] - data["handleStart"] # noqa: E501 data["frameEndHandle"] = data["frameEnd"] + data["handleEnd"] # noqa: E501 From e44f585aa63c32ebb6913426626fdb4c3fd0a008 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 15:05:36 +0100 Subject: [PATCH 2536/2550] OP-4490 - safer resolving if site is active --- openpype/modules/sync_server/providers/dropbox.py | 2 +- openpype/modules/sync_server/providers/gdrive.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index 3515aee93f..a517e7d847 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -165,7 +165,7 @@ class DropboxHandler(AbstractProvider): Returns: (boolean) """ - return self.presets["enabled"] and self.dbx is not None + return self.presets.get("enabled") and self.dbx is not None @classmethod def get_configurable_items(cls): diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index 297a5c9fec..4e24fe41d2 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -119,7 +119,7 @@ class GDriveHandler(AbstractProvider): Returns: (boolean) """ - return self.presets["enabled"] and self.service is not None + return self.presets.get("enabled") and self.service is not None @classmethod def get_system_settings_schema(cls): From 8527554c2d3bafd1a7eb796219fe7cefebcf749b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 15:07:36 +0100 Subject: [PATCH 2537/2550] OP-4490 - fixed unnecessary checks Configured sites were checked all the time even if they weren't used. Now it checks only sites that are set for project. --- openpype/modules/sync_server/sync_server.py | 67 ++++++--------------- 1 file changed, 17 insertions(+), 50 deletions(-) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index d0a40a60ff..d1ca69a31c 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -169,7 +169,7 @@ def resolve_paths(module, file_path, project_name, return local_file_path, remote_file_path -def site_is_working(module, project_name, site_name): +def _site_is_working(module, project_name, site_name, site_config): """ Confirm that 'site_name' is configured correctly for 'project_name'. @@ -179,54 +179,17 @@ def site_is_working(module, project_name, site_name): module (SyncServerModule) project_name(string): site_name(string): + site_config (dict): configuration for site from Settings Returns (bool) """ - if _get_configured_sites(module, project_name).get(site_name): - return True - return False + provider = module.get_provider_for_site(site=site_name) + handler = lib.factory.get_provider(provider, + project_name, + site_name, + presets=site_config) - -def _get_configured_sites(module, project_name): - """ - Loops through settings and looks for configured sites and checks - its handlers for particular 'project_name'. - - Args: - project_setting(dict): dictionary from Settings - only_project_name(string, optional): only interested in - particular project - Returns: - (dict of dict) - {'ProjectA': {'studio':True, 'gdrive':False}} - """ - settings = module.get_sync_project_setting(project_name) - return _get_configured_sites_from_setting(module, project_name, settings) - - -def _get_configured_sites_from_setting(module, project_name, project_setting): - if not project_setting.get("enabled"): - return {} - - initiated_handlers = {} - configured_sites = {} - all_sites = module._get_default_site_configs() - all_sites.update(project_setting.get("sites")) - for site_name, config in all_sites.items(): - provider = module.get_provider_for_site(site=site_name) - handler = initiated_handlers.get((provider, site_name)) - if not handler: - handler = lib.factory.get_provider(provider, - project_name, - site_name, - presets=config) - initiated_handlers[(provider, site_name)] = \ - handler - - if handler.is_active(): - configured_sites[site_name] = True - - return configured_sites + return handler.is_active() class SyncServerThread(threading.Thread): @@ -288,7 +251,8 @@ class SyncServerThread(threading.Thread): for project_name in enabled_projects: preset = self.module.sync_project_settings[project_name] - local_site, remote_site = self._working_sites(project_name) + local_site, remote_site = self._working_sites(project_name, + preset) if not all([local_site, remote_site]): continue @@ -464,7 +428,7 @@ class SyncServerThread(threading.Thread): self.timer.cancel() self.timer = None - def _working_sites(self, project_name): + def _working_sites(self, project_name, sync_config): if self.module.is_project_paused(project_name): self.log.debug("Both sites same, skipping") return None, None @@ -476,9 +440,12 @@ class SyncServerThread(threading.Thread): local_site, remote_site)) return None, None - configured_sites = _get_configured_sites(self.module, project_name) - if not all([local_site in configured_sites, - remote_site in configured_sites]): + local_site_config = sync_config.get('sites')[local_site] + remote_site_config = sync_config.get('sites')[remote_site] + if not all([_site_is_working(self.module, project_name, local_site, + local_site_config), + _site_is_working(self.module, project_name, remote_site, + remote_site_config)]): self.log.debug( "Some of the sites {} - {} is not working properly".format( local_site, remote_site From 307a10e123ad0c177248b144f5a2aadc2c503491 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 15:35:43 +0100 Subject: [PATCH 2538/2550] Implement validate frame range repair for Render Layers - fix #3302 --- .../plugins/publish/validate_frame_range.py | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index dec2f00700..d86925184e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -5,6 +5,11 @@ from openpype.pipeline.publish import ( RepairAction, ValidateContentsOrder, ) +from openpype.hosts.maya.api.lib_rendersetup import ( + get_attr_overrides, + get_attr_in_layer, +) +from maya.app.renderSetup.model.override import AbsOverride class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -93,6 +98,11 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): Repair instance container to match asset data. """ + if "renderlayer" in instance.data.get("families"): + # Special behavior for renderlayers + cls.repair_renderlayer(instance) + return + node = instance.data["name"] context = instance.context @@ -120,3 +130,53 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): # Include end handle in frame end if no separate handleEnd # attribute exists on the node cmds.setAttr("{}.frameEnd".format(node), frame_end_handle) + + @classmethod + def repair_renderlayer(cls, instance): + """Apply frame range in render settings""" + + layer = instance.data["setMembers"] + context = instance.context + + start_attr = "defaultRenderGlobals.startFrame" + end_attr = "defaultRenderGlobals.endFrame" + + frame_start_handle = int(context.data.get("frameStartHandle")) + frame_end_handle = int(context.data.get("frameEndHandle")) + + cls._set_attr_in_layer(start_attr, layer, frame_start_handle) + cls._set_attr_in_layer(end_attr, layer, frame_end_handle) + + @classmethod + def _set_attr_in_layer(cls, node_attr, layer, value): + + if get_attr_in_layer(node_attr, layer=layer) == value: + # Already ok. This can happen if you have multiple renderlayers + # validated and there are no frame range overrides. The first + # layer's repair would have fixed the global value already + return + + overrides = list(get_attr_overrides(node_attr, layer=layer)) + if overrides: + # We set the last absolute override if it is an absolute override + # otherwise we'll add an Absolute override + last_override = overrides[-1][1] + if not isinstance(last_override, AbsOverride): + collection = last_override.parent() + node, attr = node_attr.split(".", 1) + last_override = collection.createAbsoluteOverride(node, attr) + + cls.log.debug("Setting {attr} absolute override in " + "layer '{layer}': {value}".format(layer=layer, + attr=node_attr, + value=value)) + cmds.setAttr(last_override.name() + ".attrValue", value) + + else: + # Set the attribute directly + # (Note that this will set the global attribute) + cls.log.debug("Setting global {attr}: {value}".format( + attr=node_attr, + value=value + )) + cmds.setAttr(node_attr, value) From e353095c24e219914f41b323feba9214f85c38fb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 15:36:19 +0100 Subject: [PATCH 2539/2550] Only apply deadline attributes when Deadline is enabled (fixes Create Render with Deadline module disabled) --- .../maya/plugins/create/create_render.py | 42 +++++++++++-------- 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index a3e1272652..8375149442 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -72,15 +72,19 @@ class CreateRender(plugin.Creator): def __init__(self, *args, **kwargs): """Constructor.""" super(CreateRender, self).__init__(*args, **kwargs) - deadline_settings = get_system_settings()["modules"]["deadline"] - if not deadline_settings["enabled"]: - self.deadline_servers = {} - return + + # Defaults self._project_settings = get_project_settings( legacy_io.Session["AVALON_PROJECT"]) if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: # noqa lib_rendersettings.RenderSettings().set_default_renderer_settings() + + # Deadline-only manager = ModulesManager() + deadline_settings = get_system_settings()["modules"]["deadline"] + if not deadline_settings["enabled"]: + self.deadline_servers = {} + return self.deadline_module = manager.modules_by_name["deadline"] try: default_servers = deadline_settings["deadline_urls"] @@ -193,8 +197,6 @@ class CreateRender(plugin.Creator): pool_names = [] default_priority = 50 - self.server_aliases = list(self.deadline_servers.keys()) - self.data["deadlineServers"] = self.server_aliases self.data["suspendPublishJob"] = False self.data["review"] = True self.data["extendFrames"] = False @@ -233,6 +235,9 @@ class CreateRender(plugin.Creator): raise RuntimeError("Both Deadline and Muster are enabled") if deadline_enabled: + self.server_aliases = list(self.deadline_servers.keys()) + self.data["deadlineServers"] = self.server_aliases + try: deadline_url = self.deadline_servers["default"] except KeyError: @@ -254,6 +259,19 @@ class CreateRender(plugin.Creator): default_priority) self.data["tile_priority"] = tile_priority + pool_setting = (self._project_settings["deadline"] + ["publish"] + ["CollectDeadlinePools"]) + primary_pool = pool_setting["primary_pool"] + self.data["primaryPool"] = self._set_default_pool(pool_names, + primary_pool) + # We add a string "-" to allow the user to not + # set any secondary pools + pool_names = ["-"] + pool_names + secondary_pool = pool_setting["secondary_pool"] + self.data["secondaryPool"] = self._set_default_pool(pool_names, + secondary_pool) + if muster_enabled: self.log.info(">>> Loading Muster credentials ...") self._load_credentials() @@ -273,18 +291,6 @@ class CreateRender(plugin.Creator): self.log.info(" - pool: {}".format(pool["name"])) pool_names.append(pool["name"]) - pool_setting = (self._project_settings["deadline"] - ["publish"] - ["CollectDeadlinePools"]) - primary_pool = pool_setting["primary_pool"] - self.data["primaryPool"] = self._set_default_pool(pool_names, - primary_pool) - # We add a string "-" to allow the user to not - # set any secondary pools - pool_names = ["-"] + pool_names - secondary_pool = pool_setting["secondary_pool"] - self.data["secondaryPool"] = self._set_default_pool(pool_names, - secondary_pool) self.options = {"useSelection": False} # Force no content def _set_default_pool(self, pool_names, pool_value): From d3f09c075badd52f3faa9d4fe41678dd1abf1d9b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 15:39:13 +0100 Subject: [PATCH 2540/2550] OP-4490 - Hound --- openpype/modules/sync_server/sync_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index d1ca69a31c..85b0774e90 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -443,9 +443,9 @@ class SyncServerThread(threading.Thread): local_site_config = sync_config.get('sites')[local_site] remote_site_config = sync_config.get('sites')[remote_site] if not all([_site_is_working(self.module, project_name, local_site, - local_site_config), + local_site_config), _site_is_working(self.module, project_name, remote_site, - remote_site_config)]): + remote_site_config)]): self.log.debug( "Some of the sites {} - {} is not working properly".format( local_site, remote_site From 5c6d86e06b84c464d0bafb640d5ef428f3d60650 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Jan 2023 10:35:14 +0100 Subject: [PATCH 2541/2550] maya: fix typo in template builder --- openpype/hosts/maya/api/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 1d3f1cf568..3416c98793 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -240,7 +240,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): cmds.setAttr(node + ".hiddenInOutliner", True) def load_succeed(self, placeholder, container): - self._parent_in_hierarhchy(placeholder, container) + self._parent_in_hierarchy(placeholder, container) def _parent_in_hierarchy(self, placeholder, container): """Parent loaded container to placeholder's parent. From 20b2b50bac741d82b454b207d83c1ca5bda849a3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Jan 2023 10:35:54 +0100 Subject: [PATCH 2542/2550] global: adding project anatomy data for formating --- openpype/pipeline/workfile/workfile_template_builder.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index a834ca0e21..390a5759fc 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -691,7 +691,14 @@ class AbstractTemplateBuilder(object): key: value for key, value in os.environ.items() } + fill_data["root"] = anatomy.roots + fill_data["project"] = { + "name": project_name, + "code": anatomy["attributes"]["code"] + } + + result = StringTemplate.format_template(path, fill_data) if result.solved: path = result.normalized() From 673e7a735928408dcc7eae463aba62af2a53744a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 6 Jan 2023 10:38:36 +0100 Subject: [PATCH 2543/2550] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 390a5759fc..58f152591f 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1517,7 +1517,7 @@ class PlaceholderCreateMixin(object): for creator_name, creator in creators_by_name.items() ] - creator_items = list(sorted(creator_items, key=lambda i: i[1])) + creator_items.sort(key=lambda i: i[1]) options = options or {} return [ attribute_definitions.UISeparatorDef(), From d6004c26462e3f0400674ce1fbc3b6936c46b5cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 6 Jan 2023 10:39:43 +0100 Subject: [PATCH 2544/2550] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 58f152591f..4fa45cdf30 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1574,7 +1574,7 @@ class PlaceholderCreateMixin(object): creator_name = placeholder.data["creator"] create_variant = placeholder.data["create_variant"] - creator_plugin = get_legacy_creator_by_name(creator_name) + creator_plugin = self.builder.get_creators_by_name()[creator_name] # create subset name project_name = legacy_io.Session["AVALON_PROJECT"] From c071140a6fcc8525b78f7f6b3bce8303bc011505 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Jan 2023 10:54:06 +0100 Subject: [PATCH 2545/2550] PR comments --- .../workfile/workfile_template_builder.py | 25 +++++++++++-------- .../schema_templated_workfile_build.json | 2 +- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 4fa45cdf30..e3821bb4d7 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -43,7 +43,6 @@ from openpype.pipeline.load import ( load_with_repre_context, ) from openpype.pipeline.create import ( - get_legacy_creator_by_name, discover_legacy_creator_plugins ) @@ -439,7 +438,7 @@ class AbstractTemplateBuilder(object): template_path = template_preset["path"] if keep_placeholders is None: - keep_placeholders = template_preset["placeholder_keep"] + keep_placeholders = template_preset["keep_placeholder"] self.import_template(template_path) self.populate_scene_placeholders( @@ -673,10 +672,10 @@ class AbstractTemplateBuilder(object): path = profile["path"] # switch to remove placeholders after they are used - placeholder_keep = profile.get("placeholder_keep") + keep_placeholder = profile.get("keep_placeholder") # backward compatibility, since default is True - if placeholder_keep is None: - placeholder_keep = True + if keep_placeholder is None: + keep_placeholder = True if not path: raise TemplateLoadFailed(( @@ -707,7 +706,7 @@ class AbstractTemplateBuilder(object): self.log.info("Found template at: '{}'".format(path)) return { "path": path, - "placeholder_keep": placeholder_keep + "keep_placeholder": keep_placeholder } solved_path = None @@ -736,7 +735,7 @@ class AbstractTemplateBuilder(object): return { "path": solved_path, - "placeholder_keep": placeholder_keep + "keep_placeholder": keep_placeholder } @@ -991,7 +990,7 @@ class PlaceholderItem(object): def __init__(self, scene_identifier, data, plugin): self._log = None - self.name = scene_identifier + self._scene_identifier = scene_identifier self._data = data self._plugin = plugin @@ -1056,7 +1055,13 @@ class PlaceholderItem(object): return self._log def __repr__(self): - return "< {} {} >".format(self.__class__.__name__, self.name) + name = None + if hasattr("name", self): + name = self.name + if hasattr("_scene_identifier ", self): + name = self._scene_identifier + + return "< {} {} >".format(self.__class__.__name__, name) @property def order(self): @@ -1069,7 +1074,7 @@ class PlaceholderItem(object): @property def scene_identifier(self): - return self.name + return self._scene_identifier @property def finished(self): diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json index 1826734291..b244460bbf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -30,7 +30,7 @@ "multipath": false }, { - "key": "placeholder_keep", + "key": "keep_placeholder", "label": "Keep placeholders", "type": "boolean", "default": true From cd2324f07e48e03b36a99918d3110e6d505757f4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 6 Jan 2023 15:42:47 +0100 Subject: [PATCH 2546/2550] fix how host ip is received --- .../ftrack/ftrack_server/event_server_cli.py | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 20c5ab24a8..9adc784224 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -169,6 +169,22 @@ def legacy_server(ftrack_url): time.sleep(1) +def get_host_ip(): + host_name = socket.gethostname() + try: + return socket.gethostbyname(host_name) + except Exception: + pass + + try: + import ipaddress + return socket.gethostbyname(str(ipaddress.ip_address(8888))) + + except Exception: + pass + return None + + def main_loop(ftrack_url): """ This is main loop of event handling. @@ -245,11 +261,13 @@ def main_loop(ftrack_url): ) host_name = socket.gethostname() + host_ip = get_host_ip() + main_info = [ ["created_at", datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S")], ["Username", getpass.getuser()], ["Host Name", host_name], - ["Host IP", socket.gethostbyname(host_name)], + ["Host IP", host_ip or "N/A"], ["OpenPype executable", get_openpype_execute_args()[-1]], ["OpenPype version", get_openpype_version() or "N/A"], ["OpenPype build version", get_build_version() or "N/A"] From e1edb76f731097f9887c096db0fdcb05aaed0616 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 6 Jan 2023 17:48:05 +0100 Subject: [PATCH 2547/2550] use 'get_host_ip' on more places --- .../action_where_run_ask.py | 4 ++-- .../ftrack/ftrack_server/event_server_cli.py | 17 +---------------- openpype/modules/ftrack/ftrack_server/lib.py | 19 ++++++++++++++++++- .../ftrack/scripts/sub_event_status.py | 7 ++++--- 4 files changed, 25 insertions(+), 22 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py index 0d69913996..65d1b42d82 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -3,6 +3,7 @@ import socket import getpass from openpype_modules.ftrack.lib import BaseAction +from openpype_modules.ftrack.ftrack_server.lib import get_host_ip class ActionWhereIRun(BaseAction): @@ -53,8 +54,7 @@ class ActionWhereIRun(BaseAction): try: host_name = socket.gethostname() msgs["Hostname"] = host_name - host_ip = socket.gethostbyname(host_name) - msgs["IP"] = host_ip + msgs["IP"] = get_host_ip() or "N/A" except Exception: pass diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 9adc784224..25ebad6658 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -26,6 +26,7 @@ from openpype_modules.ftrack import ( ) from openpype_modules.ftrack.lib import credentials from openpype_modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.ftrack_server.lib import get_host_ip class MongoPermissionsError(Exception): @@ -169,22 +170,6 @@ def legacy_server(ftrack_url): time.sleep(1) -def get_host_ip(): - host_name = socket.gethostname() - try: - return socket.gethostbyname(host_name) - except Exception: - pass - - try: - import ipaddress - return socket.gethostbyname(str(ipaddress.ip_address(8888))) - - except Exception: - pass - return None - - def main_loop(ftrack_url): """ This is main loop of event handling. diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index c8143f739c..61d3bfa259 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -9,8 +9,9 @@ import time import queue import collections import appdirs -import pymongo +import socket +import pymongo import requests import ftrack_api import ftrack_api.session @@ -32,6 +33,22 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" +def get_host_ip(): + host_name = socket.gethostname() + try: + return socket.gethostbyname(host_name) + except Exception: + pass + + try: + import ipaddress + return socket.gethostbyname(str(ipaddress.ip_address(8888))) + + except Exception: + pass + return None + + class SocketBaseEventHub(ftrack_api.event.hub.EventHub): hearbeat_msg = b"hearbeat" diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index eb3f63c04b..dc5836e7f2 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -15,7 +15,8 @@ from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, - TOPIC_STATUS_SERVER_RESULT + TOPIC_STATUS_SERVER_RESULT, + get_host_ip ) from openpype.lib import ( Logger, @@ -29,10 +30,10 @@ log = Logger.get_logger("Event storer") action_identifier = ( "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) -host_ip = socket.gethostbyname(socket.gethostname()) +host_ip = get_host_ip() action_data = { "label": "OpenPype Admin", - "variant": "- Event server Status ({})".format(host_ip), + "variant": "- Event server Status ({})".format(host_ip or "IP N/A"), "description": "Get Infromation about event server", "actionIdentifier": action_identifier } From 1c530c0cb4c35353b7b697905d79c5628c705ff3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 6 Jan 2023 18:09:17 +0100 Subject: [PATCH 2548/2550] skip ipaddress way to receive ip address --- openpype/modules/ftrack/ftrack_server/lib.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 61d3bfa259..eb64063fab 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -40,12 +40,6 @@ def get_host_ip(): except Exception: pass - try: - import ipaddress - return socket.gethostbyname(str(ipaddress.ip_address(8888))) - - except Exception: - pass return None From 823f661c47d254e6560dd33945a942d0e92a55c0 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 7 Jan 2023 03:27:59 +0000 Subject: [PATCH 2549/2550] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index ae514e371e..732682dd60 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.6" +__version__ = "3.14.10-nightly.7" From 4cc66395c5a60c82cd862165c849591a112180f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Jan 2023 06:31:59 +0000 Subject: [PATCH 2550/2550] Bump json5 from 1.0.1 to 1.0.2 in /website Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2. - [Release notes](https://github.com/json5/json5/releases) - [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md) - [Commits](https://github.com/json5/json5/compare/v1.0.1...v1.0.2) --- updated-dependencies: - dependency-name: json5 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 220a489dfa..9af21c7500 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4740,9 +4740,9 @@ json-schema-traverse@^1.0.0: integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" @@ -5154,16 +5154,11 @@ minimatch@^3.0.4: dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0: +minimist@^1.2.0, minimist@^1.2.5: version "1.2.7" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== -minimist@^1.2.5: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== - mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"

(Zfh%a(0ju?YwE!YFunz8QgZg!+ zPM&D9#c;H#N9X1g9ay<#_u!FN^y}Jz)xx~RIb`_Awafo1FD-B1YtY0g)0#D|%QEDe zHEWC`fMY>@)L@fEQLa-bYtWGX#l`vi_V49bfg;&p2>k8gwePyngLG}v`NR{s-_G6I z$&$Q(UnxylM~)gLE6SBqC$(wayk5h)yLas-G^%ZjCdo0@M<0E1_JUt54k0BoWyk)K zZaoJ4`P(lazWc!s3zzih)rV93TQ{z5*s^75MOna84oCa<-^;C5D@bhQxJXjS%&b3Z z^stt#>!+s0*R5NpxX@i%;u$q+MBhF=diL%Ksthi5z#kYgY}n{gqg%Ib)vS4QRaN`- z>(`@4_n4U2x^?TxvQku3c$Bk;_4uTy!MVbEj5+sUJ^t4@S9<*KfQGu9a#aH10YUoZ z*Wd2G@4>>N!UrF?XXNmqf*_!NT*G3*xg$w5BNluGt4C4HVGDuY+<*#zx{Fm6nNCaoHE(%cKiInu3dVJpKv+DP!6jg1u7Xz zG21OAo}fxvYiHIqo4HE2XtpK^miWTriaMETP7Ag4x4EBw_(rXaR4ACDsFH+?HQFcg zdi9`R_6LH&px5mwu5_1reBtgr`uFcQ5YACBB;J0CoSCO($w#p&Yi4MYwK7gKZ-XWQe1XE5 zSc<199vpCjH1QUJGpP!?8xO66gU&JR9uzr0fA5w}+YaPa{P^8ZpMUv%Q7F`H`0%+u zFL>_xXCHdxN!n!Z*t)qutM%(<{jp@pi!aRl>dVi!?byEj@6~(u<}Lnx@dxj{yDMiW zN27^gsty{1!C)|$8iB?vAw4!U8M(11iJ~YpMUNgeeA-o0Hf`SWdT=3iVNm1j&5$%827*!;J;F>gno0<~S&_mNp{Qa&3@LjnPqG=V~NkG<;+O#H*CloZ@=~4r(Yx{ zr}gRCGkg1%2OoIgg%_TG=k52pckdAn2228aY}8`4*(_FY`O%|PkyC@gU@#a=jX@47 z@=nqJlXmMsj5&_KV$y_Xo_;*-q(xwS21K#6vUJg+x$}SgWbO~2KK1CsAAdTBx7E^U@NSiGz#sHiau(8L zql5lnFeLF-2g6wjoweAQuu@c6QOuK;7@HkE3q`UvM`C$NB~P)ME~B|Ys(J!Iicw?0 z@X8r9y@xEzAp&%p6EsPYgTa8uJ%7Q%*+0&m|HD@g-FwFu-_BMj;fEjRy!z^!zbxJ` zV#I_&y*h_|#WX8(X2_m3hL%V2#$YfQ4CW-DP(O0inKcx##%P)*@WO;~mp$^(jFqd_ z{O|U=z(0;W4Ww(v)R-D5ABb@nWZSqqtX7TULT_z?00`32BTNRyt|M&&hN6oO`%bwfi{S7GaK#FkYn_z$x_Hy2@M6C z%`Qo@7z_l;N+q{DJvsHa-+%w{%UM5u^~w6}IfGbEgL!c(U1!W^1WHh z7|m^rtJ1+GoI8JZ4d=@QJ%(s~`}A(rvc+3(zgt{d#xe|2HWj@qLMJteWGce^D{s74 zC)J^aDtJynvJaryH_)tJunOlCJ=3fEt*rj}=U;#M)m+vrNIDf%SgXbM#%qtIq}T%@ zIM>1(Ghbi)^G~zBetW~VO>H}LF`2B4LO=1u(?9>VI4vpusw>9z9W+=Z^z5A*KA-jB z-)nZK*KYpAjJxWlCGu8b(Qm)L^6X4S1~um;NqOeEnR)qnFTC)Qz;j@Up%_puS{4;C z=txMr>w!nww{E>_=gy~|f32{h{FPT;u<|+?_LxkT2cLVL=1mXZd2`6;(P_?CUNYmZ zyL=LJ+0{2snlPqt=eo~6`)uAXzjC~As2-o{{9JH`P-E$gs_1`Wr1!r#KNkR*h~WTg zmtim`2~-}MX3p?Mia8#nsw&5M0_Z;t`QFFNXx#(G=w$UcBB&4((uP7II6+1x1f{Da zf#%MTN!q7cTes`*$UXnFGK9ruH8Ma6!8b<9!2qJW#R{nf*RJ^EgLgkZknbirRt53W zc~#NvF+9bSyxGiBOj&NRzpV6=Z(jdu_78t;*lA_Vj3-oDQ6Vv`EXp>EU~`xWn$Tp$ z?+c0wDXCS5l@`Mo|lp!*b#GAG2@I6tQa7}f|h zowh=;I;H9~$w^|EAly*XD$)15FJ4=3Tw z=`s9=^LC~r^i)goFY~YKaVX@&dW^wf&Mk%wGK3OL;K`0eLTYIME772T0F?#(Q+Zk= zNW~ZMq1SB~OW0W8e;Bp%nO789mc&Z0n_?N36-hSu(Tc#;cQ3@P#l@&Nm zg_Z!hWy|KyoxAky+rNMRK|On1a{Z0}+puvnxWHD6MMV=_YaHy9QZ)^|tC?mv5L|~v z2n9r%5_BCz-U70~F*-+yG@(F3s%lyZbTT7rM*?U0C`(nE>dO%UM_o>u!_psT!C){L z%)f=1s=qfZ6t8iU^E zL?{%gQRp9u?mSbF#CV|tdXO1(6Z~akJE4kFCCySSWhbB|fM&?nP+`Cwmcc0p`A1J1 zp(m&a6%vr>%}k_9Ade7Q1x8*fY%q!uDGDkme4><~rs&mDj7BlACkDNL5v5mXB$tsK zK-3`}Mj{FF!&Q7#o07@SH7TM>~ zF;3voI7;X-GdVga>vQ2XV*eO}!CZWRm>{!ntVjEkPZ#uhLkd(7#0eyoF~r#5pg#N~ z6lfD@4aw*li$pgn7!UypMuUih$QdEVs}(8D%sP3}4*E<-UT~(vUs97)S_k(xGGH16 z6SNnSgVHldL5*SKDg;pI6YUeliIxI+_h8y7nxuG=;3<-!3`qx*56X>>0_922Dkw1x z8VpAjJsdbh^ipI{ZO91rLZbsmdBQ%7tc?RR)~gAoq1#7Ys{TUq=t9CLh=idNIvQ{X z`5*E5PX>|w9$e6Y9-+TRI0l2kV9q~`(Y8o`&J2(lxS1fA5h;j@*FldEDU|WH#412T zK;+P%CG=h>wBa8|5XFrqJt%C^kjN?`66c@olt$9%r4Q)q!H7l<#-h>xEND}7K@p6T z08)<1gO0+`Izx$(7#rz9Zb7V(Do2|x2MYk}8hU{h1fgT7%JfVT$?d40{~SE}UTulU z^HC+ahD#(e27|$1{tc>b{h|-#hn+rECLH}m(hGle9XYw^J}DyFe`xrU(Z3)mXn0p7 zBpQB-7`Yvj$ku}q41ujGiSdcHg4iOTHwp?te~q>RTB6Yp#c)-Po*<*Tj${1!Yy#EP~Y-SY`A`8{!2Ykseqh zghYa9gdYhi>8X_xQdM67*cU2cRY{_|gnb)+F>;WrHBgKs5aY0NMRGz!*N#!{a|VH{1jM{0jrka%kicT_oLhU9gLP)}rjKR>u zMqWwlBCNTieke^>xUPh7N zKtnW$8WBKnN0AyD77O-(bP;*Xkr33E9o;D?E4sIfHbix%tS9eG>$c>_?q@+Yik^$Is!c3~rbeX2?&A1kq zTl-D7^%-mFJi^?p50@58`YY6uBFGmNJsSD6F%-=bmywmqxQj(wa#~8FNi6rtP&K15 z4ndW}5-2x?=I)6GAvu>t2|QGiKFUCJ5Mx`dmbI+@Ku}WPLOFKHbyd`3nh(0}+L0;c z>;K;EAq^dmq7lYJktEvH!9W$x1Y=L^e(lUl?Y}Qw7u2|;ml}h)SO7hqN)n=>Z(x{e zq{r}ioC_1I8x+Kb~EU~Hz zDQVa{L4`FE8XBxF(i*fOXdO*0VrZ=f8yTde2PGKU3Iwbiu|&!Pe~_@FY^Gkh9x&PR+B7j-{Q?x`L))N^uBHP|Pd?X#!H;1-C&`<8W@t zw}p)^yL?Rl*xdQ+OL_Ll3mC*WwsEg6Z8P+|UF9l+BpLOpQ$k$BE}iN-b#Fms_y|ds zgWl#hd~#LV+D+RE-3on7!35Kx^O@Z)>7U80{PMG99zLaBi#F|AH?3c{UcLJD(|BKz zJ1B`kSHoUi+cs`c9|G%SIF*8ZrGC9EY#nq(|IknKcB%q%GPM(fxzM18kGzs1BqUDA z44s633>FYL%(9^FJynGuhGA8L4QkfGBgUIeBu(oO0y^jqgiL0Os_KoKG^tm=LHz~| z>(#4Ut5%B3DFlN-F{~K!3|A$up+-I!$}nqUK(5jAMSOW=6xzw6MF}>+y#P%AAnawo|8Cv0VB79F68Qilucl1R7fAG=<($j0zuF zY66OB{56ifDkPG;hFP2&ponnxqF;<-sHq-Yta=Q&aXiQ z$+{*Jd~KokRhG7mNF{K{&3X+kF(=ouzPe+v{{|FckC8!_xli<(7 zp}?*o6FD4G2!@8s1NIdTi*R(3#>Hl}XpvOB^w*t|nedk$$lYBMW=TyF#j3nea*C7! z?sm6*`}p+k_4Y3KY-`ZWLW5Ezct^{z&&<53Ly~9n4=W3KT0gW5=%G7S2<+dut0)+j zBuUj##%Qyw5;TZe#VE z%hs)1fP}Vg-L_4e_U&4?XxXSJzi`j4y%fuWuc@Kuk*I(YmUUT^f*h%vc(YFN&;S5G zIiv={p$b+H>m(CU__}rK-Z*tillpZNViS6I>!irR_1ku()oXU?Xf9U#7&#G zcs!otO0rHU1YNhkrSo7;2p?tSoUZ6}9X=)}uqqog^{{+EjA>VT%bvpqb#K$MRqG6s zzoM+r9Y|==uS;#lQ|<{Wsw$eB3>)7}-LSU|E;Vb*Q4FAqpygoCtwj#^M)K`!x$`^JKdAwLN zR{X(WAQT9O0tWCcNYkvt={z`m2AbtyNxS#%HSx+TZk#^7W#gt`J3@Z{&;i3UYuBE1 z)!3_Vm^5hUV6(~25kmd+bkBhu58QtJ-FMzTXTiMWI+>2-WZoH5P*T2Z=`y#+%`lM{ zuxQ9pG%WQAZS;i%94keFor8lm4jtd6s{uUEm6cUI`}|8!JpF9{zLz}p_#?GzXC8NW zmqut#+L2B`NJF}`B{dy=Q&6=zZm&0bxYCmMsr2`r#P`>z16DAC7m1rW7 zUmCP@yyl^CO=US0wso9*@3g+@g4vmp(x_|qhJq|9iY=?%&~X!cWDrVR<9=f=8_=>| zywz+pNQgBDmT@YU{zx!oMueGdj4TwMug)n-i6x zPUjJm$MmfiOZrm#+;GjX<}p;*)MmoXcU?Cq%Vx7#Ee(d>a?9wZRpYRrNn^8HW6Y+4 z;$jB%c~cB;ve@i)tBFsjpOsQsx?566KKqAxg{>#uHM~>jtM4Cb`)kghl{`mll3ZF= zsV1jp9y!Sx26It?x`)6#BMTkID5Ke&UfN|aWxA+w?nu|T3SHAJE;Q%r5s)+^mt-&c@QM3o%$RYXw2LeL z>*GhCc;?wBAAag@`+%Xn8#!1~13|Wg{(9|Ob@YwhtUienlG+TYd*JI|3(XwJF)Bmh z)`jmqem|7q_UD%dooQ(q)@T_taR0@SEr$B*-KU?KIrGV9KFnz}Wn^6uR2^O21kbWD z0u%5Ds!u$sguGPU!IyO;R{y=VGDw@Vzj=L4>i9cm-2c#h_q5&p!b?j$CXS{;VQGJU zf!*PN#!fUL26Itz&{smkl4_Ey|zyyrve%n-B<;x#~_Qc*0NiR z%0GVEB*n&92&H8G(k))L>c%TvRSD_oPN^V!PjP^A#5k3lox4O!Vk}D=okNVtw)d;= zOS+BjVh?I@ZQD3D{;;IT#G%w0lp(Ic!0AsKWw@$GUAuf}8EBT?yLzS1X0zLD{(XBY zEjEYI!bQgpT8c(YS3?VXsmuXa-#Nm$?(1JxR0Jd?*ktlGO*g!C&pY3L^VCbLQ^sH2 z#~F~IkwB8rnm=~O0|s-kU|hu!D+8B9&8~>ZT{IBmhaP?6Kz_k(x7_%`b2IO|`wp6- zXFmJ<;w4K^4=D-eEF7J?8k}J?r?ipS0FqbziCQ=fbQ)dTs96A4rpx7uON?*TvBTrf z%zXNZClj18y}I|1!a;^nwrpL;T6l)HY~P;et_)?>s>PsJJ}5|Tz!e5ZffQ+2bcrIA z=uRVAY^)i}MF=_rk&^(Y7u&uQ0%DwyknrG)dv3YuhOfT)_TfjL%sX%(QuCuR4u>Kr zx9ZuaR-$45(Xhi{t=g_-GJ1Xep=gS1O=~fH?C2JzyuVhgT(xy?u~&|?Un!)p=;zXu zaEvS7_#KImD4GO5dU4SdG&_-tY5<88^zt$au14dL1Wi%lFDiAUP>f*7`|9o0b+5a; zS-rlaoSPSJ5KSy>f-=}M+mE_*XhUY-@)avrZrxYvQ&1TWfo54`GN5EM()X&J8uzQb zDq*$T9nheLAj$7`cDii(n5GAQ`t-9ub_Z2e2{j*lOE>k`S>Jy7)mLAA@J3$aTPAgn z3riYJ({`(+s(cvC#S0Y?K?Hn8FRv35F-$8~t$E_fnP9obUOFlzIq8+x-<&;nKKP?1 zlS$S!iY93q3BK{*FC?nE-VAj_4=zRb4dG3nN*N(04 zy!8Csd2>Uu3bI>SQK>_=khQ8nnu^Nua-UnKXi;SpjW?OiqUcdXuayOZfxLgTw}TH$ zUBsYM0VkshnU8=#zasJv9A$7`)! zJ5iN=MTNq^OM5ygQfD}7!_RA&L1PD43X95RU2od1ogkx|J_xulN_Oupa|-Ov*-Ny$wzN}`uRIQ|Ngr*CMg^e2K5;g^o1!c*t=5`Qg(0LykUPy z1s9WIOQ=&>p{Au~RJaRE%Sv^XAyrzHHICFMHO#TR;fo(`2`@Il#v2{iuvEM_+&QU603eA~8ll%_XG#@RBcsYjzWy4Gp1-ZXn?+MHK|!dp2Ag0*4lb zR0l+~{J_5T`-CA6fByTD&+oXTmZh>>G+9gq^FLc0ZhPJ9pMN*+mv2VtIXkJY)7#(Ap0n_ydoq{5_THi# zH>f3NaaeegVol*ybJv+Ww<`MSn|#&Gvb@=9G4piE-rbuDW5zuG<>IBE-P|RC^SEV> zAsNAHHSthrnzuso=p8PSHQ62Js@JlT6y+~2%`W#i8#JsfiD40W^t-?P_WN(Y|9;M_apf;Q{EH+oCacgqt4{HzwYdy)?5=(c z<|2T6f7UWu_iDym7m6I3JAcm3oSkfq-+!zqiXaH7sVU{<6#&PvibiY5fd%!Y0)FoG z*WRhqASM(p;b;NLE%;@+PN93mhVUc91dGHok`x9I1FKyqX;LhJ!wDkhG;{B)cpeO%jy_g~E6X%SE6Mlsv2iY|3e8ckNLu4lk`rA5NWWZ>yQe4sr$<+9X^mZho%=lmsj+c& zvK;<>JBmXb$2($^tV+qg;*iN6n~<7lV@btR=GE1(JK*)|F?AA|%EIDG)m6K$#ha7o zhBlq13yE17bkX+wpmFP3S0%~PaL83xbRm9w@sar@$tkIEF-{AIlp3lU&4x>|_Z53J zN8JW#;B_O^@;zJf10-*)HRzrfF3GuL`g{9L7SwZ&!Cb76@6B4uYCiB=tMqvG&YbL; z>oK5dU<_3VG#*3-okcGs)JZglZ#XD)>)Pwy`=_&l%FsNx&ae%nnA8;10)XA4Ne?JR z5wmw}e(&A)cWlq5Sg<^_u5(_GJ1yNApX7jDFtjCqzpt#+^T}s#e)jpSJ-c!_PEa-V zz=3@#^0`&PETpH_qbal3TaZ_fO=uh#42d8T;$5+nSYBA-q3xt@%)@X|1HUqyTl6e4 z{A%a#**MpF48SfTrzk=+v_@#U3N_tKnQf#oP71CNUH0jIzfK=jj3c1kp$7rr(2eOo zNVqWIuMVYy^hOg34nzYl*tpD$a3j=+3T;@1LgOTLxTN5k0)Yo25zvK+ay0G{Yosxs z2wXFYqLU1WDLPOi9Ydp9p(^OUG|Cfn6iNX5gdG_dDr|!!uwsny&>$jxa2%Pg%d(Qu zWBl!t`(}Uk$cw+`LJJ$p9oYzwG=m<5(u|q5BO#C_!8lXLJveJj$-_e*`JFMDqRux4 zb8!NCyo4eAhB`*{_{W_&IVYvZhXOW%R>7cBvM~jPh0y4gCZ+k-w?;1ep#J#*%N_R&Z0{qX$)(Bp78 z924u1WIxBVV0tK$^ZSB=z?rSQBCE!bYV@oIA!(doBLoMj_#tbfkQXO#?64fPpU4G_ zte8K@+!CcJ1MMEKr5&e1wfE=OCj22p@DUl+>2lDq77nBwp$S=sxFNCF_u&}V8sL=Qa z6y_Hc6oS^O5_*);C<)kBm0@)O)Ri@>vd*%criC><1jd)7xR{tYXfZe(PPm{M24o(t zeefAnim>oedbpus}u z700khG%ax?7$ugc7+7eAVQB_b69koISop>O6dKe8J4f%gi#*(c{=n5rfuoJyPk|Z@ zio<0dJ9g}Xg+EDhnB^cVkQVU4L0rJSk7x`S1*4l0a)aYS0fdg;Bmt|cYCMN=5dvq% z_>BC%$O>NY+JHGHkX3!-(eEb)YNC+@*GCKnbk4XZ1^S0n)ObeNu-T^yBnCZ4H;`c= zbXpf_Lewb*zDB%WSU?()VjGc3N~3A?WE2BFF6vhVHS6QETOUJ6M|@+q7{nP zG+nWoO)N*NT2R$OBn26Q^n&w2fk#4U#=Mx60vb+%0FkgS_ztmX7VQ(Mvr)`rf<~L6 zI2wtjUNFjNd>U(!2>&r6Vhmsj#$Yg*^BLqvqCV@$w=<(Q5%Dszq(Ie>6dTSe3aBFb zg$CbON3UiCokpWdz>5V3_uyuvAt2L{*{9JrUDdq-pCkn+3IrSiQAF^5(e5MQ5c00lr=wtOl2PMChsK1F1RQ;ZL~qeUFL#7RL;0f+v zBdJhAw>4Ju6fuPd5Mu8Obnr_udGYN`FDTcNp z|JPWM;K70(b0yI`j8!ys2vS&MP7v@+c*aJqXr4>7kTfOm0>cOd!5IsvzL770 zUU`h9oFrJCVl|4<2o{7Gyk;7-6Fu_@*}(!)Q1jp@i}742heo%8M~@aLn?{4{4w;Y~ zp|FHPljz0m;H`t#4GkIe{1#+rlmc~%8_VI@o}&c{c-UgR*Xr|>9nGvsvYg2b|4e4HSwL@R1s50FW()>{!CVZ`!$wXe2oYotd^}Z^b9SBJ z{l}6RKGUOt(8Es@o7AMoxXUgdKcG*~uD$y9Zj;Fs=H-_A6y(iD?&=!Sj7K7244@*M z!S^3SwCEQM27@_Aknhb}#^_$doavGjoWF3+$<7CI+Ne^<+G5l-Q?DDAq54Fl`<&ox zars}~^uVXt#X*{h7*yjU92OatGiEe6SI5`nK@A6bLQa|@ zN9JPy!%rJ2QMjfmXqp8yf;=+HR8wU%pMPYZ6l1(*7IV75`G?bc=AJ)RLm@wI>0vOK za|_aA+R$V2?C3F4LE~xH>h|u?Kt&8DbsT%u71wle`J-}-`mV(de|*1kmsirOG!i}F zn7cQ-G$*%QJy}Uc`%^_WxmAk{6R0Dpik^a;tp#C@R5i|;oLN7GsobBJUl!0#D9PyY zDw49LG;WY2fXLTK><1|(!|@EIieAf*kLKQH-muxj+c*dB{`!WyzYg1z zoAe$%Y{cje^2%B7KL6=zznP;oMKZivlsoveMj^n7mm($~3Sn_Jy7bu>#xgsy{nkW_ zp1a}G$6sDVBxJQ7IBxjB!Oe<4f8@2#mhTA>A-3&|*Y9bjmiohCC9`|xqW;64uVk%N za|~im5vWvHk1-g`1qK?-b@m8{#R2{M-gfKFnVA{r)lcAB0;<-$S(96DzM)%}&Z2be z`*yl4g#y8VKX7pI`vN|nrEb$iMX8c)O%`c;V$)uu5WQQ~a&lo&BC7!$*!D35@7o&&I}d=@_^4fhprJzkKQ)Z!QgHb?KZC z;**;+c2unUd+D0o5F9C#U*tQF>>Qf?#7$RCzGC92>ppc|etnZ*2+cx6$`V;+hQ@ zd*x+AJ2Y-LaLlCPy&A=kUcT9|t8TpR^8TILwrJY8>lL@(b#1R&VM%UTKXc#ut-jc} zSQAHQ@6U}*&2o8iH-7TWgI_N%v?axW+eK(f*q+tz(lJANbZyL;>ln9AyP z!|m5z(xh37R;^mrOEwX8l4A09?hD1n#b|Ok-|db~NOKmgTk!Ie&o0jmn4LD1m8xoW zGtxYJbILfgJw7gO)27wVtR^n?U|Rx%!C)|$D$slxByqMLbTXUGFTeWcf`vbK?%3g} z#~%T&(dBeL@%Y1y8#Vmkqfb8j;!8mgqESx)kYm!3)un5jT6rH&pLXLlcYM7krg6It z>E=){XiBv2{_)MH@4xG|+wPcen=-9;j4s8-#GqbGNJ$bT&GC%Q83zG^*&=Z0S!k4} zrYNy(+BQzCSn|??x8HE%4L7|0TZxxxf8BFk^B=$O*1PYy@80{~T;{bT$2pDL*>D0$ z5^^w^tqzJ|%@zyCpqasptaQy~cd#5Eotc-VDK$1eRnrddS;b&57!1bnk3l}pPPa8p z)4@RC-uoZ=d&SBT!-qWl!2J)+xVKBEjz7$v`}~V9hr{A=9;`Ysx~5qi&djVjNtwMS z&v@*?tNJ%@*`z^wLJCjN)cy_YOBKZu7cZ@yzhrMpy=DZu!>k!IY8h#x^M)M;#R^5M zzWRKd?UG6NJ^##IBb!<&QEb$ulls@!OFXeLPOHsk=8)o|zIT)W{f+4F(Sbrn_i}LG z7z_r3!5ogE$5jsU+3=Eio-Zybx&5yD_U7)pX4;iwMvwe!`HIJ%e5SI}&2d~b(y2od zp`f>{IKR^450r0P^3~UKSMKvL3>*oFWJCvbnqpy&U>HC-;}dgtvlqJX#?x?q^f{leNc2kf8G9IjMFJWRvgE=I3^Sdcn=>C z1fFxH&*QM#b(+*wO^%C;rz^?}B5pMXgTY`h)j33u&k|;{IVWfL^*8-*-@d%H>(<|M z|AYDY`KQ%W9RXC;Us1V#e__bK{pWX{dG4iGUU~Jyd8_l>Dxs=b?K&i~jIX$u?>e%t zdBf7}tg!X>)olN6jmk*%=#e_ig_1lUJX-@9W}b{X5sU zWiR-7b}uiv>b*V?f~Mp+~zFc=I5g8|VPKI$q;7bC8urpehkXJvja@FMr^&)c+V^H0Al z+O}=G#bSwWKxnFL&ur1IO}h+UM9(Q58fD;=(|UC2HE_hpF;{f)ee}fpD=T!lAUoLU#=D30=su!f_eSoY zANzEl? z?6Nwerw#4jXXuD=y(XzvNJs15yq(*&OFd^i+1|Tw^_C(ZVP>e{{&j2jhoOm~N^E-j_E{!PRm4Ev z_EmWybT3#_Ro1;oL41tm)F$@E3)036zP>A4M{!2TQb7yEc#ec|Kf`~9fdOtG9X7YQ8X!3i@-8Um}1 z1FX5KX%ubDmx-phJlH6p(4?BkdZ@A(eNYLdq)E+~Fw^)>F${aKHIE+0(!d$2ddE8C z7L5maaV!?&Gr`%`V+auhp=+0p8#ZkzEGXnSuI7^L_iJ6+7ESI`)-+ymnxJ|RluG!+ zjC+6Fldq*M6*QhY4<#Ae;$kQelB=#gnl+hC!qI1U1u<47F=_a7GsmQcJR81wZBOKV&=?ojmF$Mz3v~++_qr9OE{+C!eGvSz&3z!fa;A@uTX*g)^c*6_07-#B@=-V>sT6OqTFrvMM}v*7ZHS2G_o|9{ zNcR@VCp1B#3R*oo9t9eqG#d>uQbJA#13+XC$%mT@FeL)Z_Am&t{X*%yf{>!hu z6(1M#?dKo2Zq=eDlB_9|DZ?|YTWObALQ;sRXJ-J{7-+M(;l5V~i(Uz?xoG&Is8ZPP zE=N>&1EQ>{V!%^bVFZ+Wf{{KOCFphs!huQ@R#EBphvcBgh*0VAg`#q#h(WIrq1+u1 zjoEX+tMrG&fV-liyfPr`x*W;C$RZ%21_Eb^Gs^`(y>jh!H{3Ms?sxWNUh(9;Q(C4` zVbr-J2R!abIlLh#g#xK*t7Fgdd8^iK+g~9+ z5Pn6~b$?kw*8!KsQU1mMlb=ZW2?XTW+?Tt6ybn`-G;HcgivK$JQZP~Ps@6@rSgasM%p9a0bA9;>Q zjESlzW&hKFuPexfUw-|4!sKgIRh|9qXWhDYmZTF%vZAVW zlKsunWkNang%pyAR-G#v$mG!`rBi|)YvaCmeY0u9=1rSdynk!YOwHW$e;+Sgv3bLq z%^NqoI>HJW_$m{pfA{J5=Hut=*s^im(m5Yr-Tv!>K5 zbW2cRT@L#ziVx%$mX_^U@Y1Y#IjQXj)~ORC1xoX4PkrzE+^x&!-_zcZWL4D?k`o)HCzli!X$(zKIz%if$S(l) z?JEih7CTLnx+5-DFUi@wEjwrHrUOB1oJ#;7LJa$NmwRKQ^Qh4B zcW;?ExM%lnE${rk$*m8Kv09+^Yjo55AC0@SaogTQ9^R2P^}$)){utV-)yV1Z?6D0V z(8ubgTThrgp#YFTZ@-=R>GXch20fJ5dHCSE^{rt^A$z^}cD)Zr4{AH&^#jhBy>DFB zq5CC02ade%H)YEG_jd{f!LdY*5SnEeG^L!rX8nd;j*KqJPQg0-xkrb3zPxHk_hzHt zt@q6HW7)zNMzno)qhtTaR}Q-6$5OR&(PN{!bnD&avYS2&)w_JcxOzc}-~|EoV#A4m z^Tn8~CN_Fs3>T9FV0V9w4 z%+>SI6p=~mamB>Jaa-TM@7vuDq+Wm;_Im>%SrvWdouAz@rC{n4pYOHawCLBmFQ-pm z6lb%VI!}M|X8pwhPp!5&Gdqo+bXn%kmmc2KddArLJLbRo(t=%j{HU4lj#S=#{;kls zFJ7xZfBYR^m8odnRuY`(%8;VTOhWC;?tj1joB=PCTypu;2~D=Web=`+HWLeb=DSXR zWQckG8*|mEPxRh$^Ha07?yCqS-?nsRw{Kfc`BI(q-j-2o2aWll+$j)YzuOC*a4@0M zgz>{Vd%wMNMZ*WCcVbq4{qDl-5D{_>eBrGgJMMkvgVkA&Zn(>P)5ItKE*tZH?&TZ$ z4ST25WntlrqBFp!cb_RoC7$Z~jk6 zijRHaQ6y6tV9R{OF-I*>1fOr-NRo%Qc6D8|+wE2pT~u7ep+~G>M{3Y*?R(GnEBBNZ zm-s{9T$%19jXTJYxA7nVL^2|ROABoT)J!0BxC&yd7E_A}_rJf&TjBF>fA5+uo$A$M zc{@$#=I(~HD4yaiseAt#Us--hY3`=K?`~1)Km4vMWF#jhCCh%d+v`knDedn2ZDnqG znLoV#{uW$=hOOWtM2ARHq$U%##Nn@P@{|>pSFWG^@}-$R;{|5M888N}q4{J{FMqXB zY~Y9+XA#{LK;;G?z~lGs%`bvYC=mqdnjEaO^?&8#$Fi0@_QEeaJ*pn4?EmGuX)A|k z#v~d&*tKI)YJ11;2qMb-6mZ=G-=1X_kWdb5m@kv8(;C>W9_pae*F1gHWZ6} zUf6i~{;5xVvDbR@qF?L3mNos?7@O7H<%ZX9P+uDI^eQv>$73gs$=Lqlqg&cu*VnoC z=XX9@;pI7K8PI~j$ekYe->UWfz?Vud zx%|rUjkmpZ``0^d0x1R?j(+N%p3=u}Jhw9K{-+u&eDs}#J4$q8d?aIuvC-m)*EWt^ zGjPe|aq&DF^+Hu72hI zR{NiM{H+y*vQ5x~l|7%>aZ~AKLto!5P?Q`P{Nm8v;DaVkiYD%h=z~?QS_1W)veDx=xBEog5$`xtdMjJiggH+|CgW*T>UspX| zTmw)}2j4kSl@)4RKwd@>BUwd`E*cMc5OA6g+XecKSd*ES17S3s)ISGythA9`BW*gg z{3x1H7VxRg)vtN_oIO2O6eTq+<)e4rNKA+yJ89~sO7@p+Bvrgz1b9W2Q}Rxo`F>pZx6;G=E9zpqbY+-toit z^H%S6j(Xzt8?B!Vz5Szo_FEVKR{zyn*Zv%1vzfcz_|{G83j?2A33}Xl{G>}WvtPOS zJ8t5wH}Al1W!x(Q@}kGk(~?YW4cuas9L=IWNq3f5F!L_(2c7Ha&LEjMx7t@@Wi1 zN2b|*_H4JJSHKo9Yg7vEU4H)J<)lol*4G>`ar_9aUP&zMaTtls#B_a6#8*>&bB zds0^Oi8tTQ&%f#E-vd_Rgik`BbqqaD3WkFF_ZOVjD|0aCH<3=C9-Z6%wPx!X(PMhp zsHr3o2KxZ0s+_aytjw2r#?bYIr0y+yX}>@A%$yZlwys;fc1MBgOm31$%c9AV7#|xK zhn9qdM4HxhF=R}|0R;xZs(G<>UDav9W_LR5R*ThQVYHBhhKA~jteavIW9_US4#_nd zy$$-OvlgerVYeAsQHozg16MRvCe3jPP7|exVMRwzz>u_Hb-Elj3&7K=qR8rDyU@Zu z=cTWGJ}UU}f}ISS;=~9ADF$ifEUp-*kyc=IRYv7SyEZwTPAjWPkQX`tRp%UWF%Bq* z#bP!Ql1~Iv_+Qmye;^2_0NRdcW4=sPRx2thW;HAA9Yz zX;)2YpY!Q#nJxW&?VcdpWI$hG-N%df3mnHLHtbkWTr+2FG0WQ$v$E2;vdycvY~Hlj z8{cZeZ8uLFHEQgnfoa9-H)Lyd`!tf*EZbNV))^#zQxZ9oeUc?avp!6JtF~ z*ZUGO+VmYTaL~AMV%DI#veACMoqnS+T?Mg#!Q&pmtFexvSPhn z*AXfCKQ7$hp^DwHm)>#L^a%s%Iz3zG{!yU2cWhp_ufkNX=hW-2x$26c#M;H>oG0gx z4Lizgtw%KS|Nhek54mltugi7U-!gV+7u$}{mK_k_Oc7x}Db%aqJe?|Dw(fw|xMlZd zbz8RR)pz8C@t2JpclqRIq2E^gz3`{P0gpa^w(!m?zw|gww)i?jhV-rc z<=qAOnyE(5LZ4-zW$d(Dp;`rkAv7BS=0XKp{WP7Pnv_>i3SXi>|7n26bJ3c|FzSRR zh2}0iTk~a}J`pdlar0?oA8E1efhYd-TX{-V=(zM2J-T*GwMAngG);A+HH^#u?&XjF zEcR>Q2db)WJh`kPl{4ZP_uY&vk;4nZiGqzGrj#bBaa}IGa(K5ai{w}B>B?UZzwqt$ ze4ojg*l5JfH}r27ntjdGPfHwT#z=l}ZZJs9K%U^MMSW0c2S|F_h+A&ylVze5QFmp@ z^RK+`dylN!5^J{`ciR=s_?++VdthFFs#BU1zAlHkCS@Nt!cpy2e;dq#O?U56;hJ?B5t6Z6F7|0Wn(5Dc%MZ zpAh|SiLu*Fa0Nms0;tevKsuULhc)qxCc}}0jaDoTr$8k*D}<^ccbK$TO$)*Z{q@$zzh|dUcwt)N z+;iCBTir>Hc z)$+aN-i9~NdC2k7^p`fu-EMqq6#K=u1%vvAUVM07g}}HPbRN{di+9eGAFlDFbi4A3 z9q-cZTTUHdGfUwI+z`dztqq8g1nro}R{$=IkwoRhU^|}w6FnQYeF=NM$=-MEzeo~D8z`in)wZz88 z?A)@!o|0*y7-8jWSKs>bs@Qtz=pG~ds%AlOvP5BFi3ZVpm8CvOwsLVXt@@1~KmPK| z#*Dda!o*7&n$i7UwN#EDICjFNmtHz%@+~)xZfr$!`X^Y}{C$NIq!s<1@<71hinru1 z|8nM2Z*BLGyv2ly3toFxMoRIvEtNKxGsfZAylrD_N^Kr(`fs8g+6FtXplRJ|QetdM ztX+wE54!(m4~ z-tZL-#gLPb@S%vR0J#pUMC<0gz9 z-6>8E)@jw=tg1xF>)o|+s~Q`}`^)xj+T#|St{4q`FE||$9gA?p`5`G?_HSMBS8@GI zZyrBtK#yk0tZa!(Ok}*dJNK8;PNyC03S$fW|&wMg<+Yki+=aJVjUKXclWMxyAukFx&fkT53U_;FUGd24MuA&x{kqMFql&T ztf(Y~RYeAu2aU`<4JaMi<>Q#}3kF^1SjH`JwQFb9&dRJ~&tLlErgAGdi^y?}xUVD# z8=Mk`QACM?b~nSo3D!&|6DLdlBgKm%wBX*wpMN-e58JqT%g%lKb*Y!gp)AzMbO|V1 zA`}kuEDydbq}RZ`W`Y4P3O@uAbtWr{DhRv^KL^XPf=((_aMz#TeY$XK*c{)q``}@H zS|rlO41^5FAs3f~lG5lwH7KMg$rh7fq-7a1Nuqg!55JQEW$*_AHnT;8a{-BL7Bh(o z^PlB3Z`EKh7|iLTlV}1RG%mQR>(ux>6_8;^LIaYFJmpi*=wAdC7x{$n|5zB?&@mkZ z)HQc&woI2U{pjhL&%OBk%dfrl;k+$Djv+X+Ii+?6;c-hsYmA-tEea-IN}`^ z(&HgB8`PHbca_SLc9;O5K~^p+RkrHW$DerPhri{zgL)?T3rb{5dhHau>h}dgvPKr{ z+AF7IBr$~K@z9N1G^pIVwj90T-e?9yfqX+gUrA-iRV&5f@yM=(6szVfE(`K84&SC9 zKY97lM}G9S9@4RviTUS|)+$I!3Iz)aD;)JQ?Ou;$btT2Ha&bwac=+SXG(+#(vYl$$ zA~o#yd4r~wO*0F(uJ=T@{s&>$XwN$a^PeLmlRl9O%eYN>Of*Fbhr?B`k3SGOaw z&6jfvf-JgC3=Vkx?qg~dFIPO{R8Y(_@f7Pxf}Yuw zL`g$7B^q?@K~vM#Km02k-s0b(`I5%i~&1sYn#JKjr8@mdd_m;~VnxZute!8Tm zCT8d4a-4va2RU1i@hRqQ=&^?M7|=B3Jf9B)+96JtQ%-8rseQu+P1`hY)fzPOC`+5R zts5lj*(+9W&-bgy{{$FTEzQm@WlX#n@&^I|zo&3-UQkGD(6X+3<_Um90u?15aBnq2_7-eh`scRNkVc|sHz6N6;LhKho$pcMJ4Ku9Ce0rR`N|5muV1z0 zfM4`ilvH?Sf>47&Uqw-2d3nM9yns&YvLbo*uli|mc9>$w=&<&%x9q^aJtaPkp$RGI zt|)UW6sP)2cW+p`G5df|H`~pOu7-mlsXXWJ#jE#+bWQS?=Iz;A5};7uwH&N0$jd!Y z;aB_>#bpY^tN!wRTi37Ky4OQ+Xc8!rP{M(-f_(={eX@!Mqn7U78)E;@-cL^_w)!{Cz&gzFUI)fglSWlLg+n zdF#dk?!UTt@e-mrwrmN1`geT8f-vT+Y1b2y?4tZowTqnBPXYkglrn`@={lmLW$~yb zmvkBm`PU#R&y-{lZR@8DY$%)1J!qDY1hA&En;f)dp7Rc;Kx)Vv0&=}208mVn8VRb6 zgf_fT2m>b>xWtnmL<2l0;gq*WAwME;Pn4Px5jYyY-ghlRP!x&dvdBRchVsl4li&gs z0ZBreR^lo*C7=xV+qNViFW+eA0Z#!W!3_b*PXwhVFi<+5^V9stcdLpxcwPOG*%oJNRla~Snno5Q8osv#VHg==Vv6a*Aq z=_Y8NJv1F?L>w=uY0Bm1v}(rgWQ{sauH9)e=&W`pffLY~pZy_%q<|to_KQY^fnl&VhTC-ZY4I8(UXi<(9 z_(ti85+we6?a;{@bPw90UaM7lBBR*HvKv$09)qHCt$>eLtFVB8K*^SpeDhZF-8)Y` z?%YYbm!8W*P9M>MJbS8oKtQS0#^{RmD+=egUa~-Ehp;^D3pwh_yAB>(zC=*C=OUE+ zgY@#AZvL2%8c4PW7on8$lJdlU&?MmNLlIF*M84#esNN_jjuIbz%RA4z@IjIQbj?F| zRj6J8van?tWl^!DpV#q^;d%MXgxCGb(*~Y`gn_q9yxe`F*LGG zn^t8jRVi1wa+}tzjA2nD2luzg%&Zxc&L&%yfBn(M6W0%{U9{lWW5+lDwCd2sb>Dw+ z>_+jrE`bk@nx&mtlhY2JegE0JyDr?` zxnjW&J5Qe8wqnWdQ(J%hdheCw`5(_r%;Ba_93^N>{d#wgC|0sTqlUHW)-75pzDKuC zxG`|bnBjNr(!3AeID9R2!;-nn4xBr_W!ZPzkMH~W`>kgZm(HDaG0pz&YvXOWcG!SE zenp~Nv}|6fYW27Z6+5(Vs|^ktGjw34Kz{Vr)RT9!SAFy8FTY(rvT-RC;n>C>*BrUL zdcmhB?__;4b6Pq_j2-c^CNQ*Phqk54m9JdAX7lDv1B=AGJfOFO)V?$A)l2El#dAK` zcJAh$Ro{QV{rJf(%YWK`cJq?24qm4t;-kgI+NLcapCufo^v)`PQB~hb>3@`*ov~AluzGCGHRjRaV*}@ziHT>n5teE=!=@ZYV z*zi`gc#I(8Z&q#L36Gsgt%t76{c8ZBF*RpQaLq~sex!jMd zgU}wGJRWEe$F|A%a#!$~Bn_}~i+Ed*UQD-=hi>ShWBy@3;+^&yW9GMa|Frm+O{2{3 zR3P!3%L$qq%d&38x>ZD#$T=MjhYN+Wf(kSoBtc}sWdn_db%9ZCh^bz$M9$vL7oBRv zb`tLntk9xeof4L7M=qxd_lL1Xw*xwzk_;t$r0`&)Gh#RTOZnq2N0x|(& z5Z&5!&l=HjV7cU<_uUckN7>;oA|fors-j;R+~36*r@cB#)cOzT*E2Y}c;hCGtJkU( z8(*$VmkyLUc*3Y5DK6}b_og4ckpX>w)!_@!6QLm;T(@ZB$!p8L{OD|o1@W;;a)WUU z4@eoWF3@}|W0Ed}9?H4le^5`J{SXBqG9qlr58r&gd~>FQ%Xac9xeho&kL)_biSa7oVKt$YsSlS+-oan3Q8jAr?ori%+pQvz%ONt~1l&PP4kw z?d}XKn`UK|=*~g!U}h@>~MGV$qFbBEPpS@fr?;3clShxxR3%mLD3-)W$&KF3HM6MMiK93 z+n^vQ4=zE8o_BYv(-RABo#D*`N>j-mx(liSba$l&vh9!`C=`T4;`wVJ!=~gxsR5-3 zZb%;$0aYLr0Y&%Z0TP6ofucbkQc)2g59sbtd#DIDPjr-Ww#)kz?m`hbZyu5#6oJ17 zw@?b3R|I#u9rEDHfIzGA5FwHLML;zG4^c?Ac#5FZgVklr$xOX_%i&}n=VO2(m^xwTOWnKo>=~1}@ko+@3Ca<(0fSL1RnCL$lse8C>CmQ)g0 zw3Bj=+VL!J(BFsW-62l?OJ({ufy;y&w^4Fo^-~zZz8Ge5#4-oQj^an*yYTy}*fCiZ#DguiCgc^93B0OFLc}0M` zpbY1a4xf(|;ddXR@&+M)bd(43(Hf93JP;)1orL7nC)+ko4(nVo_scm@^j z;`UEEC8t=)VJo8H{`tqZo(Q;u+nU|s`=s8d8n^#8%}+-{n8;y)C7KQx*^WHFbj7~( zz{<@#Ru4S8a`D=ux3G|?=wK5gyR214eb+1Vqwcddrf7ToxO1pw+KBJ(1q7DqJhp%2 zjRg}X{*tYXR|~8ovgy#l4I_@M`fTF`w}LeQcu_7VXJdH4INTF_xQ@q$FhQIckWR^wd1?*)*XqQvU8Gl!PGBy z-(y8KCv4o|gJox#`ktuO^yNvj(?yIk$N-TNfHuk4{toITUGs)HJ@*~r6#!C zvac*17rN#B4-%_13u#J=>|S52}%e0%vlQ$$RiwjF{`&3||P zr4l`dj~!AYdEL_W2NSdn2M(_6{CUpzyRKz(TyFHRwadz`sW)*GJl4G7(7y zDy%Lu-;FD&KxNChdHHwvxKoAPuLj*+ zx!(+yaoKP%;)(u0##~5NQ$(Il_H;afk0p`A11okMFtB#c!qIboLkdON-mecXle=xn z4?7ZVZimCg2`<*%?)&Tg&bAo$J=1I3Z!K2D_V`I=7;SKs5%0elwsr8>H91e`V}Rhu z$+5cy(aEwse_{Kq^Xs`K=GEo9x*X_J>+>rm%C{QuYG?O?!OL|cMs*Ciw&<-TXCbRt z)e)Zz4mthZyzhjU)=kzgoc`tB6vW4&qxbD8^KtN??ZN(iKH6FLQ2T*Pv&?!1lpdAD zyO`3QNA;*^Irzie?a6xFZHXVgaCFGFSs$LM{r$30$#2vczA_ur`uS-|vgGiEi%S1g zbND(HO{x4Vwj4638@KG0FLIkq7}$i^|IO?T7u;fz_V0euH1q54Rvf)dnMUm^3$69X|)a`{_GAk51=@hkl+KQs~iOypgS^xis607^Ed<*EijJ; z7M|nZ1*p1rQ!{%`e9xd&+=Khz!=FdbT9%SQ?l1QRHJ?}FatfN6`4=U*EXg{ZF(g2J z_xeqq)Myxj=Ui?Pqe#q^N!1>=aQlf1m#$s8{8=*-ExX-9#mW_~pFC{GbsCzK-5IB^ zq#!2zJ&4vcy7{}CkDR%15z=|Tij(CrQsr2^c#GV&c@0skZrrR1efH1+V`zw3t4g|^ zB%o~ItjnRSq?o>Viq$aBa-MO7ByZCWANB2OplzK5kRi`rWWQla_=pKj{xNEJ`Z!8K!Z0@Fe?K za8u#cd%WDQ)WuIn{&YJ?&1QzReSKtI)y{AB7{ZHq-ETzGdI2aw$MRa z{EY#%XDpr`@biSNu``>T8!>FDwNjT6eHvg#zj|k5vOn!ii|jLZeA!z|=Y8ewvu-^7 z&1+vDOyhXlooXLtE;YRpI&^P%NS|3-YaZ-4Y=s4FW~V zZykQ${Ev6uCVBrRGd^u$`}T)rN3P|Hj)1;Pw$jz4)wI48n^37$iG_yE=CSq=#kABlMXxS#T$JRZ&G(+*62 zhIub93?xBZzjH6H(@5W7wGV$5P7d!}As2I~N(H-ZERw#@PR9AcJ8~@P3bHM>jFimF zcU3X1nlz-6uHOl6)~suz64uMN@A76p%iRli5-(p%Q1|QC!lb82iaPi6_SDXkCI+S6 zyytYd8aHb$bB~NTVY0y5ELj;Dp7d>Q+UTEg{kJZGScv#OWEriV zEzz+5>*FR&8Z~~(*pY+VmXBl{0veEbl-WKsM0fa-zm?D?geU)Rid4qiL;TZs?aEw0}X?B~&S123i zit^{d%7R|>Cm|DRqQ=ZK4rg|HR=VZ(g{^N?KK1K@k3KnA=lkoN;5z$l>udAh{9?lu z>E!w~x8f#mzhIT+-SG$3r0{5&r|IgnUUP!qKgV*p*U$Y{k@1|yX(512~!uRHYn z-FK{(%ydh-{kKo*Twne5y~YbJ-f_tCX5%-XCF@ND2F%H`?51y*xB~)4rYlWm4lgSCX@N`9j5QU*0vFH8>`6{Hg_O z114`4vM>KKxu!zN7(rxTxp7`aMaC2_$q9ltPa>Ne)O*SKTN$}o4pClT{p-$C6O5X3 zZ#C|nI`yImHuupVt0%raY{DYB{8rZtZEw}GwPpXdvjvil zmGNq2K9?*Pit%bmDqXgHmv<(XPuqJsog%?L(*zgs!xQ)X@Z;_z^~2k=VaoEdzczG$ z?xg^IQ4;c`137?yD%1|2HCYq|Es*gfh2kNt@_vvfmruWAKA>Nb*K$l*&X@0J6w7lP zfD#d&5V{)u;0g0qMZV0SFgW|Sp61`j^YRDs*T$=%DT+9yjjwIiGLJO|Pqjmz@_d6o z&%=83iTIHf@wuL8+RtljEld8jr*t)m5jG#k`V;ha6& z;(~4iGKr$qpyKO{Xt3G?fpyp&4vz(kKxxc+MsV7l9P&CuX!JD4Q~pL;6gXTR(eLA* zT3#C6;*N#gQG`gL{*{SkWjaoc5eD|Dh*PckU3P=J22#ue=9x{xa7Fu#y zR1$(>v<8D3>0)0n`SV(YhQRabL$$AKgloi}L!gmGG0n$zI~>O@iy z@!zx85h5+L%@dklfHYT;cME8qg)9jYuGY{P@`WYBP5wARB`E^=?LyaLkY8vw5Dy6z zLaipzGFCA7;YWC|I8IU|4ks9Lyaj6XBSJ@XS^fI{u(a&D7L&FyI<@yVPn&@5M|@rc~L0vLirdqw!Aj;#FoSJd~ESz zyLW7fZ$HHEq4&p7<49-1CFF1h0UiPx)t6LgVB<+?A;7aiPx8-bdjB94JrFfCsMcrI zag`D&ZTN|oqxwaAvQP{zx_J57WFzDQS8FlW&ZEu5pD7#v0{)PX0l6-m6L2*p8PtE8 zj0*zLITxX~ug8?Rx0R z0vTMia z2+{+`WR5e{=sv7R%QAs7E9-)?wod$L?e#PVXpu#`zBRLJxvZ6g2K|^0+Ncs;(f2%& zk;Vmi1SmnE2XK6F>v2;%Rq$8Kyvh)k_EXpCo86Mk=>047e{Vuf>g-QbW^BsRkunoe zq1~V{onj@Mz?d8te){pJUyj}*Ni^juFT*Og&D?Fy*Ku3A%-Bik7$^?1lvScg2F5lU zJmsZ$IhUsm&g+|3FPwiw+kJ4KwzZ-tj+beZ>il$)@@NMG^BAKvQMFsJPDehOD+BN1a)3Uk zQO$(3iFe*j!^S}L#D+WBa&*XO860~Dq};#v5(umfyh z!Ci9v<`v!b$rqPRX!FUX$iU|;Nd&heH`iLO-|XqbyTsr8_NDh$UrTi<4om(rfRgPZ zZPaVLE*AeL6hZO4*E=DJ4{hHYlE3*EctFvG^jimi`1;K^X1_J#{_)`(Q%8O9bA}-* zT%!ff^%i#@Q8@;i108Y=@yU$3e4zl&>BOF6p4rk3p8m+$?cYk)u5?MBs+wfZN z4jtMJd3$G0czo#+8dhRVfuTzJMIwXrNJ6PpZWpHui-O#ShlTnv5+}&oprDGSb$j3F z)4pT-L9ea8sV`Th8hvi{XH)uh?%tz&x3+yhI8mfxRij2F)a=n#UOuj8=Pup4c7Fdz zg`T}5Sx?mzMxYU@7Xx4llhOF<*wHPTHzf%|VTDKg1<+Y!XyA&Er`wz^ZxFNw^x`MF z%%x%7_S7$SPsLIyhfZY^RK^=k+U=ZF)n<}$$n@h~OyW61mB!sao6)|O#?3$e``?Eo za*Qd`1R5j~BC`8sUE+Y`>reUCZBiiYY}YBT4QSta_^V?l zOq)D*SeNR-lx#K{B}-JuQivBK^3OR6gsqI_i-~P zjU7F1!h~VHTbB)BB{HU7$CqDyV*-SX>`*nlOqgHp?ZkA9(m_%e&m1>~6=iTrbKt|S zFTZy%D6S~-E)P;ltJauEDJeNs3cbUW5rW2JP?|u;!2nN=F2bo_35cmt3|X) zz(cnK11=>qd+e;QbQ-lML`E}p{9+PXmAx7YmKzUM>*@BmN}^SE!CZ4!zv6CE@N~CF z0hG+z94wFwn65m}vm6Hp2keJ@bE0ia?q@&}csCFCLPjoJqFkw5fPR#fT}kq6UodTX zw7CiIvWazu{M@?;>*icM#|vQ-K7VcHtP%5GANBQ{6TY4P%7psM)?%*NLP`ax2+$bG))Q3v@Hcu;6_fkS9<|(fxsvopzSzTn@2HulYSn z8A;xWh8sND1bZ2)+`VDo<~7T6wIr!8UcJrd?~eOs#;7mfn)=ggJ-WneG|^Q@zcO;c zn`0nw-qZ=JK6!0*kD_WuduR8ybW@|3wQiS>kpEQxCF}eG0s{QKFMVGOD3D`t7nAM< zZfj9spePESj)Zs_J`1Ym@HZg~tWdPqhh3$wCii)1z{^v9()XTJSM7B1GD9*Ne*f$J z1`eM%=din2y_U7YG>k^C*Lp|SK=4X_0l_#LnqAHa+TKyXf&Eiw;8N;7T>UPzob{&#p(Nt3DtuzHFnaNCE3@!a2A*tHw6``R<(SiY85Jm zYe`8JTs*F%8Mmkc+SM){&B#nxsixJ-H>gs+Ud2*TjEfa8wKlMHTtoo!sws(>IwUfp zM2LY=R02s9wV}lZ){RvM$5gFYp?O>YR3VfCJ}}%IUxe6sEWx0rX({*Yw(lBsdZl{L zQRVx-_sI=a`LbpFPHrAGenRCQV`}sqQM%89y*byv+;N*%k^1x_o2=&6%}I`X@Hl+% z;XeSx$B1`i<*gR__a6l)?M16rG~T-Ih-=%fb+gh`uC-K^T8t>DoVk|ECr*Q1s?N0`LTtTAO%a#hf-|_*MIOmm9r(G3$3~kl4ZuL?jl%Nj^4W%qMF5gPU{rpW} zqm#-QU0zId_TbyYK$qqHhOPp;KmE#)i&$*K2JJexYEUj*OQDNMnmn_4i!~s~Y|!T< zT*)x|2cWmr=$(TS_x>H=f59zl!mD)cT%J96c>|sR zFixqU=%&qEj_ls@<-x-~8eF@IULD(d(6>V>2b0LHt=l2i7&7M5zL5;8sZh7;`;$7q z+_}{&ecF%f*{DvKhPNAQw(9U|&nmGLSggELTjtf*dUTJ_m#EaZZkUQGUSn9#4rA+u zxp)z}JEJoOn9U&u)x9%2KG>RGqE4Oa6i1m#_UPVq>d zu+wUz!4P0Jm>6TZUM;F$KJZHtFRDn2&b)j>RkC<-;rA-i2OkO_(0FwoP(S(aif38Y zGIS0ZUcYYLI(6$dD9N7LndmSPIGP5g%xT3`Xt!2bg{v7A$Kzqj=~>- zB%=8>3`2v&l}U*0VgTvwWII9^G4%Alk1H5_H>c*4WsB^7i~)}TBCV5v^dUAmF-nVwer(n zGoz}E9vORR+Kd*XX0)5Sc#FN{uzuB|RBjjRcDi`)9@i4@aIgZ&oLK(RrzhpygG)NS zGOfwim;5yOCT!va?RMF(-n*j-jf~b;X;!_8`uck#C)F76!7r-#7F9#eEc)cb1G)DO zE$i~;<^)2WvHA0Q1E(|_JgvjWN1{7*uEx4qk+VD8-q*RZ=w{sxg^U3tqrR|l!Q8`{ zNr%@Cp7?sBFOI`w^QtRG=x8~qnfd11Ng^vTB`d_0r=907-%cf!sZf$&GnAgxC}8Kw zn>;TNI7SlG$*UGsSkQ|bAo0P6e+5cc0MUYms$Z;hdwzhd0-+$A@TpLK0}Ig`13-}E zt%O9*acTAE(_eq1a$je{Ewp=Ca!|EH1yuFpbT|`vrv=BzszxgYx&wJ-OnW zcg9a%ZLi(EL2)pVACu)6&auvvREuBnD4o^Dp|{19kd}H^3XgBps;>X7ZQuO3`)0Ni zZDK{-x^#t(FBjr=IP5N_e0$k;sBL*}HGbLlO|zq+H$!)mPXNy2Ax?;p|Xcg_UVI zuwG>Pm7@o)Th+?+Y0>Qptkt+Ke{%Chr%P0N4%9ry!;L)geDL8v1JvT7e-x&HYK){w zgH}yb^zR*vR!36I1IdSxjN|Mt8^osLhYlXtf54yt{a(IYvpFzFL2uDv`v9O9P0Dxpc!_XiPFj8Bl`M|z?UmE;U zx0Z2q`qcoMKxMy#8_Bm$UQWkjn-A{Wvq!H^4a$a7H}~#J52@e0Ywwl z6EIq>$@6*wh6cqTckpcX%}eK#Lz)lk)4Ola`lZOU3&*YragDpTsTD=*i_~h>rB9!} zeVUZ<)3aySueVkl(5+|p9zFYa^t-TV|E`~JXY+aUQNa!lt~hy2 zyJr4=;o-p%A$mqhQTfrc>`y`)DAKh0VM+!-jvlDTo;;`KWT^ChM)V6rUhnCZ>uHZY^B3TGBL5;@936|I2xL5F)QHIw$8Fm3 z>%=LqUAuk*n#=<-RseMXw4E1(VUlF?yL>0)t_#}Rz4-FQg# zCWGq--8**TTADR8E#vyF%S*odcGx>#j-B<*n;WmB*ijx3MBZ7dRj2UlQ>J}7?AUCr^T)DsIRCKl@}KK$7w^>xpb9X%Qxz3k6wS;Q zc_Jvha-R-mRA*Mcz2v~DOt=*fX-Fc=#MW&an6l;ky<8TpV1}rY#0MBil_eK@plkgnUs-Zdr=d^~lpQ%n01?4~L_l}INo0%M@B0Gh z5-gagf9;Ls{&1PmuB8bw9^vAnB4JTc*>9DG*-C^Ze)QIdkQjt?sW}!3Z$>aam3)Htq$QUZzc+d57Et< z>$Yt>u)br#hkHZYckNP2n{qFkb%{kA1fTq1<>E{Caz(qT*4QNzD+V6?xbBCya6L3g zW1ZF=`qeQc-?ke4bY{}|%a7|fUd}O=Yc#%RwW37!jWj~7*4M30uYGgTS7#ELCKGm! z@ZWYaLwj@E%ge;M8>cq7dU!`FO=;AI06*2?ji2v1ebuTCE>dN1uiB;ko%d3BM#Ep) zwd?0&*DAdF-TTzCE?*u=)YSQPUWfD}mn;N9h;l>~W?{EE`-_(C-MeR>VwuOT3k+i{ zTZy^7ef!Kg$!+HJuXug?CqEs^t~7Mb@S+!QWa5eYC$3FES}+*!n7PO6Z*@BJM$=U} zCSypwj)On%6nypg;Y&{B4+u0!7Q6TM1WhT)%$vsmHa%TglmOB$OY{tY@E150E=U#u!zxUIp>oua|Ol$c`lP zco)$cB1Hh=;(Uof5{j!Y#5NxOR)D-~dO~p`FVqt=e^U9zJK+rdaG&Ruuinc%Y2}OP zNea?}OHd-p^~&#x=>n&DC3`|#1(0VHEnkiyCbTEd5bO&&UMTDiC)@%GVleqD9pfQD zmDp_-p68Te{~kaY6@-T_gL97KFJDR6xpTKA*9w*Qcp{P{nM|fG-MZ~Oa+3DyuRt>@ zPMv8nrR8X~T0YS1P*J>!qE)uQiTD*=Gd%H!%s#!nn78ngQUWvVR?M8~E#vQrI_wMf7cWCG3 z9F-Qg=VT_OTdYpE)s}ts#EHEZ6HedA$Pwl2l%&KQ2WNL=rzM?E&b4LbCZ;(oxw%*nPfWjOb6BjI*AtVIt$bEm%FS$>%kIcY zzjr3dj>-=OmN+XN+pm>&*YXQ?10}gFDK~Fi%3!TlxUV%e_1>-YT)V(%RawVQU%j0T zGD>Opydx#D(}LFghoA4eN2{Mc)bMWt6fmudV=b9ByX)VwUVQk+5E&7AE7QiPHI!Nd zHXg|0Ka@F71NQxr11iY^=AcT(1-`)B$CzBCbSM}!-k`q+NRMR95yiv#J7<&Z&%vE( zApyhFV26pAc)YjAo>$~7ue7`qp8(P!}lIJk+GR6h=}kn=6^Tn zwb@49BXtPfRFrT@^31fB!EOdlLPYiFTbbl`YFiF_sb^e~a=L4y-(P+(nNuT1EA;IZ zx*o`%uwrG32)7R3v_G5oC7@?xrMku37k;~7W0->4X?=YBe}d=F$KLnH%6v?TcNJ_t zMl$!KyO(T=s_etx1Uy9-&N0#p^mX?_@lJe6Eg$f2@iE+y*6FLa?|tRk4X`b}q0b*+ zHxt1{Ba0XrGWYJGTMo2yUEwEV4?vtj<%2r>XIvoyB?~e^-QUZt0K&(||0j6ve2fMw zAu4F@@PX3yh8Iv^GDZn1TJ#;y;}BGoNIx1_c#eM;q9ld*o8KQZz{R?~K`#W*yT$Zd ziNBq>c=)#czeL6WC<{rUeL}t8p9wfc(*>D~eemHg16#1<0HKUm!#R)US8u#pfZrd> zm{c*c9w)v~3Hbj4P{JhnQ70#b2sbU4a25Hl*~gNI)0*P-+vet4JdUJ%@WBJ0ef)od z7cpLq_*j%hOoc0ayrCfTF?<#)QB2@j4*AMeLeX3lG#TaJ$x|T*T6I!r2mjMkbD-J( zTYSv(d~C6pUw3S+*l~!T(clgB!G~w#V`x7%s}+PA`hu1T&^o>T-x94p{53p(K3=eR zKQ4k?L(5rtG{3q)+sE)xET$O8hgIv+zKp*z^U|Gt`^166XHo=xfn17MupvoO;S~Wd zZvFAIKn4SupiJfK>nON`uj%>l0Ds8G4u?IWNYqz9EYcf{AbWXUIC60R$M3x(io$=a zEs1W2-7Pa}jXKY*;NOPZmgOeRdbP5y^WVjD=VP>rz}o;6MXi7X>^V|Xs@I3_^e&H) zj7|$rIka4z?k~4$7;bmEh5S&wSS$7QSD-|F2#U=hMn&n@HC;DGbj=7Ot_m=~;H(d*#z0x-CrIv8(34+p0=1%em}1Rwweh_ICgoXg>e$ckR&D zqG%3sL4fDMc}~#hv*C&D5Qp0FR*+|HMu7)GL>km*m(HB|*+(DF{qTcN=6t*LLRwUG zbeV{Vkcfyd&k-3J4jPKGMLQG{VtGEF5vXEBfpfE7pv~V=7+M>}b4mdE?!EeVfg(Xl z`I7QxNePDUoPZKWkUVKCfMDg8-c-B*GWP4caQ`PQ-Hv>ej4sQcT{rS82F6>y0_5cw zY&!lypmJ|-BF_bnW%vc+`Q8gv;hzLa5D7Q$7H>DyWY9s4!}Ee?#zO;vPeBB>=0GVF zC1u^M1kVJXC(EIy0O#-t=Mduk)f*i?z0n~ZNDT$%UGPYB^a;^XB6-o3Pj7VLL3C&f zp6CxEJ}D1RDuOrAbAjiMPb)%Rbm4w=Tu4s+Zj?U*l)AvNE{7*^NZ#eP=VmteJn_eu z&3JA~^Ph4qO^^INN|t4wb(U}2yyT6GyRTeGWNmfdJ@WmCQla$UqGU#+UiJDr{&Sb~ zfQRzD3i~$!*+A_>lA#GGZ9r*wf2kcJ+gLG=QPJR}5WGtSQjtluPOpOl4fm)lnYdV{ zeb3RO1`QiAKsjD|dC0hLewa17UVs*DZU+y|`?47#i(V+o6_gs1L+}Pd+7QCA+KQd0 z&s({1!T*hSPgGB2CG36>oUjgKqcL+A zuU@|%lG?I+Rx9KO0bPifdu8#j>z2-F28*w|3pn zN)dX8n;Ey=7SLt(FYDK={bkL%U)HahSk>zA?4|#F@Rvtv{sR8Vdp!{z`#5pq{Ianz z8|J>%vR>6MUw>t2msZmT_k4TQOMTlmS@_QQ2Gz>%Snxsd$ncY^zNBgD^y)7&Emru< z$g!SUHBZe@@EIEw4%alQUg7(h}wEfdb64o&+hj@~izikM7<6lSxB6HmSYp zlh>=pmpt<0XQ6@SldI-IuZM{D(zD>x#d45TfS>XB@;Q}C6B4%4{aIV*C;JkE9d~sf`(p=A9N2Y4&vtXB4sX}6#+uJ(R4g6)+wwUn9AS4Bq3Fn|r!8ug&hIunv)T#RQj8TKTw46Sq=UXHD^=sb@9!lft6@LBpy^_%p zaG9E+;nVXF)3bBo6CNw%A*M*!&IKPds8(*_I}`h~Y4YjBp$;eeh%|+s%5hxHI(4VL z`Ob&|eKJx~X@-U?ARJ7gsEgwUSFWDhxAu+J{th=18j8R`KmtOEfP_x|aFl`~3>BG$uJFh$j|_w6?RLbp z8nbBMt%S=LE?m5L;p&aMYbKNo$Fq_%?a1#7o9oJUATstQ<3V0x2olEyw`^FSzIWvC zm1{1BA~{KLUxU+S`ks6JAt0Y#D?f)(yQyPYO zV_5IdApfQBPp(m}^zsj`FOP6N z;8|OVp0fuA@BV(nxlFr?z(o-Z8d=~mwNXn$QYx8q+BlhnvXB%h3lgniP&45bTh?8) z%_|)u(sr(1vG-a|nZXOjMC|-#(e5jyMz4A;YWeg-tw!r+5B({_s3D-8QGkEro;Q!^Si!%+1Bf#NvBqPm0-BUS#z?C-R8_IzM=2u zr;R#vQ|L8LPSh2NZ{71H&6cSj?#U=pyGQR9Wi9)@TX8%Ky&hJHZkxXGTXUPGf4bo3 z3o(;6^}aQF#DYYgkQ=?ZV^Gq_!5ebT-byQMxOFc>Lq3Q1$52adj{6Jz4gxVZ$D-4! z!8BoL(EA1Cib|1y1dbzdNmDL2r&BXFr^}?*L^be1Nj4AA!8af1H#>)*W}t9envx<1)`gP68uei2r=licBf0LW>9pB z1SK5G3n@T4BuRi)uAylM%K~v)?M|~%pPg$BG@CMWas$jp$d5^{g}muBPy~*lNl@P* z=gMF~z9O(3l+osJ<`u!{i4K%(Hs~Q)y;kjH-N02qkR(BP#3|$gYyp}iS&oD3*`2IW zueI3h{&_`!Pj3zAv`|2e`hE?Fplo5teJ|7i;pPPR$>Cz5qH^s{h>}+Wx&9`j<$e*= zY8S^bRQ@8!!1p}wDS}(4MSE#HQiL3<&1BHq?9N9XqNfN1jzMa`xFBEt=pkyMlJ7r6 zsE2ZKvAifGC8yngf9#>08Sj2rJhqrxt0PJB){Pre#*TuM4qdvm>Cox7qenk@>kU~J z+jZ_15)!g*^(y41=}FTXkRWu~uzstrO$nRTqxhQ#{dF{QrKZ;E(T0{Jt4O~TvjWqqV-0-%4Ky5-Z`BF5`w&w<>BE8tUadnt3#^FYZrXF z`6AFMuA(#`J24q2D7Q7E$+s!ft~aUsd8(fVBeZ56g=Dk9I>BJ}_*Q~@yGnPP-ahok z_Md+|ouwy{2OSKjFiaA0wOOz16c452ojFb(%1CNWT1W)(I19G8h=F9LDbQ31f+T4Y zxp;GevH?*fXQqC@#v_f^$M;!I8MU6g|2shaM5U@xrR=IToBaL!1xeQFG)hmTJUtP9 z1@YqPiE5gHZj#p%b=kT0z=ztQ&SrO!N{X^dg5s;5GRji0ffFR7BR&57?}Mi>of<@= zXBP(rU`7fmvdOFSdl${+WlEp0wBLtYrGR3UyNv6hJ$0pMv+8D2%DS{~?SiHI+(x6A zm3Gh0(jg&1H?Ca3OZcfZ2IJ{Dy<6nkntePPOGL>+1EhnZyN>;!QD8Pt(Ds|ZZCEmA z=j~F>diLnhrlgL=*)uCX-XWNC5|5=~ls-6^I(OoTU#A!fze?d;lFw_1ydy!vp=(NrB`I3<`u3jZW_gc^q2h7qxOhhz7aw z3Jpcu{)U90&mbkl^Xn7cr0JATPx`?c^vx4JG(-)bL24zs7KJ}Cx(6CPitf2ARINl0 zMbU$mb4CeQ(ta|$MNlrwD*}ohqD1!;!IQ%$rr`EK@ zp(4QFQ=rwO3Gj~>!5cka5uOOoFMsq99~40lMT{U-;zMqIK@>3zJPPgbLH$#clabnl zV?jZ|?Ynf1D^u=^Pd=($r=EX6fFz4ro$ilj7xFHt;;iA7w!Km1ozumOMtI!OD2y)^ zY1nPV$g#DIIlM-7bNx397Oy{c&l=o$_B*jx?-p%RCDN?dhtc~-x0$rTfAp4}%{RAf z{NW9Q7NbMT^ql&Blhn_~yuCYD@0F)O?OMIjWLCD%!33A3&c~Nt7bbR^v>queAh|>V z4=geGvkkq2Q+c=L*48fu&fP2F8sJD#5K5IOW@I>vH5W*jcji`mWA8NWmt$vcy3=On z+R^1i5h{Sdcg=wCu=uJ&-&@)xD8)tMITv>R^vS2|6YaVheTO$F9uaEL6)hWM@*~cD zKJkN}Po?q%!Q4)~ZXHm(q9~qK$)JgNGW)Lq9;IHdrx>+f&5#tO)kB7!oTvQlFjVQh z54S@dGzw@H1R6s48-x0V0V3;;>@#UDL29rSp3XhVYERh;hwO;icKaA7;LaU-Zgt7f%|q%IxPzgxHNLuRkL$Qgn_Gxxec26LPb4vT`Qw9aINM(;@qdMcRI z0Ret+N7N0GfnzdiiMY-!%Lqs3jqBU3d)H2#-rAVON|+!N>C(~pR?AL3di8sIqtt3- zr!Zb5f`DN?+8Ek)!eBILbp}Zw4S~U-5%q^n=pJ_E$g#|9S7=BmK|#+_KvA?RFr1ORaG!U0|t5v>2nVQ2k__yxYyICoX;L6Q0G-~Glrhofd+h?$IuQlC2KA)n4bV z>E-uNXjGwG#fnv`RC{xuUvO025wjYdeYsxQiWO>hoOW2-cG9?JfxOdhXX4AKjxCwm zG_GXbA*=m5bgv#BNQx3c(3h@WjfyEMN`=va{5{}MZ}1Q3LXqbGU5Gp%UA>CmmE$); z>o=@dGt`ls6<@6mDT*pbcJ}!rzX_nOThgx`zmlahnf*Y|MBa53;~JW!G7HZ0Egvw6 zb6&l8g{s@Df30d2%M}fvB|~snIF)_7=qwr?1zrNm@$E&{1RBwC|Lf za!=`<&t~qSUZ0VbMm{j4Wl_|Jgv18$*AE`QpbC!+Q=L0^?54|HG$>Guw*5G{Zp}>< z?q4L*x^m&B^dfQ9Xll=2|F0xvc2;1S`F6x=Qca>;TOJvRN!=}Cc)|+p> z{qE%Ml>)TbgAqqKkwmOqql(O_bE)yoTGT9KP;o?jg&HK9Dh?$v==6q19lNUtin+Xd zRbus#)0#JH)}mId35*`PY>i^}&1<(g!XiVNoa~E-&Re326%P? z{n`x-^QR~hZApixW@btvdb9H%1KNod$z{|C{(Gg%2Ov9>PMu?@(5Gk59zDAEs;|F! zG(`x+y}Afkr0~h{Jn)lNL-GO&BoqRpX_^!T7PY^JfDtOmdTHIfPd4$jh79W6y+fn; zKtH5fVs4J}Y~T(?F(gTm6r4dy5~F4ax7&rn9|72U0E(gzD-$FEF;t}M(#CmTZqJCU z)BfcleOgp1W`byt35roxT4O4>4vlL-%AVI;=!_r;G&&@T60J@>_~V?{UKu-j#O#C; z-C9=))$%Tvnr48yQD)Q(jd5<)bN|8!&zOS`J`^gTSzT^73xCj8?GD@ck;8`e>pfyn zzeNkahHm*xtr|$oo|@w>SGfk^a_6bkAR1`2nJVdYxjga*jfg{%(A$-7-pyTcg3igw z_6rOoROG=Gi>{Sz-7_{Is6oS$DLW2a);^dzgp;)U%KBO3dbVxXp+lP%eP-`WvC0qj zhgBe(js8M(lS!|R89ls5+cMaVvl${1s%VTa+me&*;xr@)2K;^5NiYV>)+rrF9V#|* z+$&S3z0#&sFe&JCI*ZM-!5)&vPy=~+CeTK)=%p0enO0$W2n4e@HYy4+7rNv>1$bPT9w@{M-`98Qg5dU{G(k3OjydKl{5EzK5XP$3lHE` z`m~E=Q?ht<;cQMCcpg3GH0WK%}ePhXsF&uc>wZdU8!l7fS4j`yPemY z1A{0rBQw>CW+XxmDVjFpDi^KRlGN?XS9PV!1aK~wou$jhMQ0~m%iZ`rtci`w<-110N?Mw8iWGMTkn-LtBg1j8h6U3^I0_NCsX&VUPx`%cSRbixBXE2%l5KKD8!}4B0RzZ+mI(;VX!lJP~+jZ>Hy<7Kz6W?EX z(x#ymdjZTF0i==1U{VGfJpjpU3XTfCG`CmBR?R!@D)-6fT?2B@Ubs`FT8#+InroLe z+OU`i+HSQzeV=_<64Yv~3yp(6TXhv4AXO%>Oz8hYP(F;LNS&IdNcbPLrzamI#eiZ) zKTzgyingBKu-njS;!9mScka@)OXp5aO6x^I0z;AtsnDTw%jRu+w5S%1-#dLJ!mN7yT*RXrL_MN(SY*9A~OTTsNcG9iW zi57LK#(g`qY|*MowUQyK!~^?n;dSe_XxE}mukNKRzwEw{x49#7rq3YH_!vfrw$!WF zZaa%M>C?J(hYr;vT`5;C+$Kxatly$jr_SAax2RyqJbxvN6EQH3Der_$Xz>&c6(O-X zH!ojKi)h@hP3!jUYL%igFQ2?C$5yV}v{lRIfY#l+mnYAiOLVC$=eGPRR_WWKW%HJ; zy0r_rwQT>j-kEQU_p?0uXttv z6EeD-4PSkJBCU)=KwR`s-Jv+B;R@Rhy{m`*1mZ;{V2lwdObL9B;#a)R9go+D=UF#NJ;dzxBJV$z*W%YWs{_NguH-Eh{XyDwV+I052`de|Ta z;Ro^uLeN}nn!u&Idp7-6V(i>0Lth>;bokId9UGR=3(ERpoFFpp+_k8+q5gpy;r@Fu zfpf(+?BA{LfFZ+2jA~!ZdVbHT9O3rR z2#|GX8*i1!yTfbLG@m+p0h!+Ub65C5wC^+!S0Pmd8igeBSFc~q%Fez&NB?nf?x*W5 z{{&?JfG|z*DrKVl!-~h3EM2<96OXv~(j}vf*_TdVxNYSnq{^v)$@n|JU3Ql&Q#!~G zDWpas^U|dpCL*>%@!a$0jm1m)G2GoVhYp{-;iNE2T85|#DiRqLY80>S+`X#zP0$V|MPl+D|-v-!}HaSz@J3{~4MZCtkY$n{KKWhmF8A$Rc<9uOQ75pA@e z{&DWIt14v2iZW(Vm*`QST=VX{Gos;v&tX|6D2-%GO}u$K+bMf|Q_3=_F>=YL&)v+$ z^=9{tBd6}TT{n(iqpMbn5A+WR3eaMXl!PnkJc)MbR=Ki;z*==nNl7;oZe(zLc@Vg@ z@n0?FNV24oRD!^6`E}#f>jm3?b@38%ShWV#!Zi=AK7Rx_!Die%dGtyqn3VbboCdlkQ$0h;wlMsEDq_2pwcm*(y2r@^6;f31zIK0x{?%%&ABBUtyNaOOM<|099nFT zR^EY?FY_L!Kj>GSWRN~83oI*<8Zgk&iy;a6uZq&pgp5{UBY#_pRtxzcWcZ0!vXAhf z2ole-$c+z_pJcS4A45HOuK80a2;}v={8xk3;^y2+FM4i}MU^OElBm2g0ds}`>4qw; z090GS1@bZ{2r8U^(Ui`#(uFF+8*qD4YEj?KURI6pv?LDmC;f@gm* zch8O;z}%kZ2a*ZHvEs2M60cod`Qy^3tv~hx(%@N#+qavQPSL($L+BAsqM&2bNWyd`g6TY zaH}u_A6o_%r~G zsLWB-DwWq{9N&2^;~6L!m;h7j%Qfk$`)$tdG_$tgQ1Y|zG5j$aO_Sy=HQGn#AcI1G z`O?L!moLMG&qGZ_es;UfUECyb%FG`xWU`>v!_9#J1DlQ+QeAH-9b^6Sz1auus&siv zA)gmS&S@x7zkX3UgN}?T(|lmftj}l6-hL$)^wYmuETHWoO|&93%0Z>s5{=lX;)2OvMr-?7%$Y-Fo?}if*QPP@U#Db|%VjE!7}cVv zhDqH!ci!IIf0&Yam!0KRlvc;ww@MVl#>t8Vt$vb?3S=DKuzv-9|FJVk+1&3@vc$Wn zBDFdW>Rsa6_n$32XTv3)c7y!}?;C4T@IgHZKNx+|Jb-xQRH0h!>8-&}ApGnMtXN99RHKNFys&;8#lD21G z-m~UJwn0UHtXg>D>SQBHp-ZJ@|UBDZr(fk{a1m;BxO>A8sT%Pp1%P^Nza#$ zzeD~qEl69FWI-s{{xX%FFNW~TovK_<$lnLj6L}Fs4%xgR|3AP~IEOhXxv);@TH*f+ zBkErSAA@Ba9u_+L{kP%d^|#);b0>+WpGi21tjbW5t=i3Atf;IKA!Sb##L5(oW$!m^ zT+FZ<)n9vM&pUF84qUaO{k5qJPOI)5*nO4tzUlZ!09;t2!<62&u>GIS-j%IY-k3;& zOzMj_9oaoLdEeTF`;s1`WX@r!Jb8Uj*QfKBAHU&vdM881v8hDc9{nogJ3gGZ!=jIG z(yn#WD&Yk5O`OO%zI?&<%NaSA*tW0rtsSf-k;cc~*!;|O z^eN-7UP~N5dHR*B*JuiDbnLz2NsvVq7QmJ3)#Hp?n=0!X!9OJlaw+x6$&dM~?A>B(oqBLmeq@3grDNSGiQ zjb2SkXa)hAR3Y)aHwDh&LYw}0Qb02oMAqh%X_8k~t%4+0>l7#ONM5^LPV^QK&0|%o z8A<}P7t({v2u7!6D8Z3c@6Fv48sO?p(`$crE)e-o7cs_Af6Vf!HSSzm|JC66=ZuUB zd4E^t*AgfP6eemyOZo}P85S08EGCnLfE+3Vp%QAnL7m3}BJVQ9*6!N1o3wMv*T-ay z)-NiySkVwYCUC5wTHns}_V>SAdqV%pmXBk%&EIg-!E@DL`J}|M7K47VnZru988^Mb zxz~p-zgJMktP36=`|v{W-1!(+CYgA~1s@e41rwC0B7MhmIV2SUyz@w4;W=I$fOp*- zw_??r@snRK78AYbyRRx$tibbVmo)DcPl6<=xUcHE@qnH6mkU zsambN4DQ*r!<41V-|bg7l20+$eR=xVE0(QZweq#*W{K5x_;BUeN-nvr;W@ngd^qx2m z-F?LyZ6kD)Lb8~6Ve4V4ck7xq7Y~^h35qORyjMb6`HkgT)`h_4J{^5ZeFg<2TK;OShjrGqBk2c84mA51pUVp=wB)#B>m29 z2WUd1n0x2q@jW|s?b^9>%gz(`Gy$OuO-SL9MTEqCzx=Xs!_O;@r1=+(GD)(`yRDa! zvm;_l1&G3nY!g2C@Zy2ZU$mEya)IZO0}a82?BfDFzgotmnj!5tsStaWDnP(6NfMCv z^1_Kr&clREy`cGA0I+g^SF2Pi|LKSCnat*whmSgS>MW%8w1I1Zl_Q(lYxQ;+S+uSA zlOU@N%;p}Z+XngJjW9&cBp zE;>%Bn@?NTH|PD)i%#6TrR(;|oW@z7yuFT(YuLJF#G%=*91ZC|e8Sk0R~N7Q_4tLG zcdSL4zcQqr_1BF%&k0Rl8$+!d{`JP&hT)r6_jUdJ`a4^547v(^rnk$QGxV(kRD6?x zqq>_C7XNs_#z?uzx3Ys84jIvmIkt22k=)wjh8y<1xpc$DYjr+6+VV(;vpse-`=xEK z^%k>MU9S7A>D7)exKzD^@ZG?#&zk)M0z<+AY2FdpU|^@1?A71RTSLFJVqEaHPd;0g z+4j5dUpD>r+3XEB-Okv-^F~Qu^qRQcj*MiHXAPz6cI@0)+A#L}^I9F+hR{G6O=ik&4`(elRp}~;0oIHtN_4g zAOD}=x!cFeev4ABN<^z_1m3x&VDmA2#*~Z^c$UAv!y<;`kljC;z3IaffT=(Wdw9;& zGvhC|x5x0nRD5!ZzcZyPJuMqr{)^*dfXn5gX?n%d@2gjSL?+7(b=)rT1UER(|T}p5oRI z-d}q`6phu#yw{QXWzkn_4&8Pa8@Fa@rJX}xTY(MU@N2DK>i1h{r?q5I!^vNca=zPY z0o!Bv*jDZ(BfdC{GYlrW)#YEB(kbH9hOfT=HGzv7`rZ867v7t_;&_F(Pjop}Z#enG z%|82TwEmF`FE`+eNyOLF5*p0xZu_eLH|LGC+{$$`Xk&fqJ$V=^G0KL+gm7bDvk0)sYiwFzQ#C3iB%dMx+Ub*?pbM#@E_Zty`(MnnoLL<7y4Ja?|n3omzxjT@kfw;s>{!7Z^pkkU57cw9=q2 zww*hF_R^Ujr*tZ-cjfH_qyQ8nM^3ItQ8YtKE+_H4sAQMt=;!P`=3`02+VN_DvG2cLV`r!&L}<piPzb>;wnKeI8aQr#{uHLD%wba7&ScvP4F8bp`XVt4;1 zi&n7Ubh=_=V^=KszE;iZ<0rrV!{VPf#fAoWuXqf=sFn;OD-9>fCqEyIk)5YBX_WkA zgB^@&XteMn^hAr3OW!|d`tZ)JI(F>RwMG3JZD$>G^3X?>D}mXP@7=ocZO{6x+IQ;G zzFo@}-AB&d@6a-6v6&2%3m4=#xr}|EO&!*$HKfp`MZFsBK0Lvpo#&Aw9Fz?Fc}m0a zoW<@63J7$gxnoca%NI_ubyqyh(^tuHW~TZFK2U@N4KYjjCq8Na_78e3_*}>kKKo|V`3#Z}cz27Iouso{k7Z_Coz|moFS=N_Tl-i}@x}v% z`nLw{eDL8p0>_b=fi_^kz(|H}@kj?MXpmI|5$W%LW8KHIzy4y*{5hX}x!}jWH*$+c zhsF5&nf(3zJV#)lAIrM!tN;b`#{5%|CC+8b&B^w_>f}X~5C}`bWrfgOG_Ca+A&YKD zuH}AGEP79mAaM>$c8Ji}x~+kIF36hXG`4?*8SLcN%lfhbUQDx$q#a z%?X0l`!oFHc33UI;fezlZ~hhJSR9<^A|D=sAP6zVq8BchSGsh`0YgTvS+^c45+uGt zUrz-_%Iqc0^&{#mm5er{c`z2QoRgA)*KS-ylD{*g>QRY-&{NKxx0i0)wt6s=dGpqt zq@Ja?wvhrFW0d{l|a&) zLY71D(BMO503u^^v#wlshP7odkp zo;F4~MopOX_D?H69#hv(qf(euC3sy6KuNt&7|Y9zfV@EEoO9CUI=%Vb<^u=z?mcwy z$ei9`CJHFoZAsO2`tsnBgY$a@dnnm+xfkf%8+JcLWUZRIqv!v;ZU2Eidk-Hx`ax3{ zhkgN)EH+~G;q9y5YV8k{ED0PQT&mm5wTBMv-FtBVp{1|4t?2JWl>AWgDEngGSLFi% zIW{WNV*1Z0?t%RWSG?D+b|~<#@)MiO)EM*S>J>AZhUhT2BgfG0qiuWl?As5=uJz-} zr`vp#`~-kxw_0o~R{Y$&O{bl^_93^le?-Or_?=Lz?rhSY`I+V@nW7;?(Q(1zRm%o} z-agcrj-$oLPf7`=XpL6q@imAuI?JXvKix{SU3&IbPWnAd*5xl-g{m=;)EEsK$PB3x z&c5I7Bjen&&?Yl7@0{N9QTsqA{H)cgl|{ZdqtD(j{lo1{=N0Es3b|Pq=QIzZR4Q7} zPQLU8t(q2+PHg?=i*wCaTyR;EZtj>jw6U=1{kiLNYkhtEMs5bW`}{7&O}Id4 z3|bmTi%2hS+>_~NJhE#O!>G|}E1eGRn|%4)!5jXgR^1crrzTgWvQJ-T)eKrvrBj^5 z5{%BEWsuH*s)|$_j9SV|(4Z8!QZp{xOV?DaSebRXgmUq5am8Bo_;BNytnBo=85wuB zPpcM_@!7DC4euq~$Vg4KxDJgxJGl2RIvu6fGY!j@NjS3q4oUgm)cR1kfz4m+U4INv z=5uW>(DSP$DZc!W9&cB;G~>O4c4g@#*p+$iS|0$+z}Q|N4hq@*{rYp6b`^nxxHmcR=&e7>RR{pd*A*aIdg(E|L{dV#0%Vj42`fjl$ zqqZ~{WBh37PZ>rHa^4D&@5fYx4P;W`WpoG8ZI2u9c@J{k+(o++Qgdp)dFWMn`sjI= zT0w z%xyP!oAY(4OQu3I=UnTH_+~U1`eG&%^-DEeTbP6GE-OJFBFVJ76i)GO! zVoG25Lo&V(c=o(QCc@pdJ3C8P5C}$vXX44{42PDgGl8TDNXmQV!w@-_i{&szuTvHT zqweUk+0jr3`W0skCOt!PwoE5(^4F7a7fE#4kW~!82#V1fv^eKtc?|fE&P%~*wY!uR z%#fJ(CAR<_w z#wN99qlT2(Tni+xlm}xB0eXrj4aIuD{&}%o{l;&$oA0k@{+HpK&-H%-=wtBtJm+&k zgU5lR!0Hmc&V&jXaFS9(RTvHV4};O9SK~5zi|oA&(In?`)||3t+45ySE?co;Obv&_ zAsa%g4w&`v$R5MLTD@ZBn#CVa>Q+I^a(*TwpO&0tb1|Wz!PhRHCA4N*V>BM0-L_`W zAN(tnL}UAqq9ju0&>kPGU%mXtWh+;H`|jw*Q6R%YYjvOe<+>FsR;~VFQu|U>!cAET zm+q-3LuhEo$s_yyLt}89*8e)OZu7C*LMxO6Mn_LotJWA)+|64_LTE^Ec3N_d&8B12 zZ1J|^KKyPaWO&8e^>4QiR0FffiC3IGzTLEX`SKr^EPbzifRiOiI*?H%UcSLXT!$qy zB`eEhGJ{sZ2RG|dDf{@5i;0;ciAF+-M~2=xdN|D-6d3I9f9U9eu<&B{>!;;R_+&QHk!$>HK;okkz5J~XUO!?yk3 zIY?G&+qqG=S`DJnfB*wQ{J!j6h)5E+6cike_D#|nK}!eINl8$aNS(Rvz{ZwOdpBy@ zym`ys<7RKV8d#}a>xMDu3x{-W)~rR3H&&c;8$)zDt#_~!WC|Y|5d%?ay%{tHZ&IK% zFboqE6W`zHTd)MCw!RRar`(}*n^TE37x`*}|dKy3z zkgaKp2G#2HZ9=HAKrD3f!H2&Ekj)^#aRR;LQUu0-Rc44hA6=_z;F%pq)fKB$j8`XL zODkWaE+&YWJuCC{zJo$om^wS{%8`qidXw2td7TbKsYFG?8TO__sk&jKK(<=GmQIBIDfXhm3O{eE1Il8O1UtOIRM` z+!xpXF#r(-73{55VvmJWv{P(bL@aYLxv3=F=oPR zU+>S=QfM67Xg0|%Cy$$hLyRPsk$E*ELtym&MxE?%NRh>&h_s{>f%oniejjLO6oW>{ z*zta^ev`i5OVu9St(ZF{ho?isgJ{9Ux;aV4?%hcug2K(f!%nAMA}*G@b}9MMR|8P@ zE~g{g>I#SmB^^$|92jhtTJ?49am`qww2^07r<;r~6>YtC zIn|p?A%t(Hj1PquXci#?s~m2g=aIzn;r|;7AEOBxo~6fyfQ``@6``TQlq5*7YoC0; zk2FIN)B^&=DJJK{rr#o8n%u2<)27W@G;7whRxzCj5CI=WHqiUC4ea&l7g zt+RJr#<&Ju8rP~>yIOp70Dtw+QNBp+Ds}4DsNJq}S$^}L(~y59Auw=B3MB;=x?oMc zmYD1=R=-p2T8*1kj&`SAy?mR9tz5NkvnEYjw5?u@%1TJI@S=*O^X*!PkrYFb5}R}5 zNeOw}2#z}S8+GQ-1(cioQP(Tqa_&b#Qm34HR{zW79ANCStMMSbMAB| z6;Z50jP=T`V%4if29RlI5AHp3#ZF;a_cHK+qVeU*m5R{Z+`i_(9Se`439j(a)&3Fq zz2j$-IZy~R;Z<5SDjQKGI>M~Fduqp?GwHU>^ejPNta8QpqD8}~+`Fl_FI-45m#k2x zbm@}e zWr{@=EfHWlvTDQG95-eR^>-y6znX;;IEK@D6Jxn{`ep`i&(0-E)uCfFdeemG% zR7|f`GXR>R)W!g#kZ~v7CSmN2i^&l+8}iZYn| z9~Wex4Mq|e=-G+}IN6~b2 z(U|fTE0-x({@}iS5|{@Cx*duf%Yz+)VIt?`zL<{u^XRG=4qJjY8IMjHZ(#aHxqUQ1*D+3oBjBSNlTze&>!a#TQg+~fSkhlnju z)*mA>mQ|wQ++2Y7KUQR}T&#ErIjVlEx>35{S?_}3gfs2xp*?5Q+#=?Am+OJRI=s%S zFbwQx#xJZ&*AX3a7L8bVUaO%noP-XdJWRBKH^(bZtq4lP&<_%UBsg5Hr3s{v@otym zu@Bvmq!|XHDlwIfV=*w&X^O-ImPHyON{T`L8a&Mr8j|2vb z$cD_bkdUX87^&7kCRDu3C6QVNEPtgXyEzr5Wk^LeM4O94Tmm@=&hHul?TafD6iJ{m zNSw>XE6czzO0A=C^!gmGDU4*4nwA0CexdtG_~64|!0)U-2GL+NnfeTPIXE=L-!Jf$ z!Tou+JHC8{u048x_}<%KH=~6vDiuifyp^m^2M9#y&a7vBIZ{AD-XIC|oZu0G4}>iA zD7i?DuEj0ejwV<`8VsnCyWyAfF0b@@5+t{cj2}2@T$%Jm-~V*rs#v;VbXZszGpJq0KM6&YBrei`S%t%=4;tp|;%dgHy>OYXY#|A=!K zg_54{{!R)XdlnxHqN~93u_`vUBoBGG@Dr$<$zAjlXlyRxnw-?9I?>$y-e zkg+kkV&%$}BTc~cm7VL1qM{>G9eA4hLJcUzFje{j+bNWLcDC zUDZho$ELs1er`%c>E`{$cQkCA^2QFA>7Q7S{@nSPXy0i7CZq9cNCB#+ppRE$vOZWX z-~z_T`0pRIq#}6{=YH=X;FEQ-X?O2jP3J@zef>W=B!U+)s3P|x;f3X3Z348!=P+I^ zNj&dHeG9a0-{Cv{_5n0wHtImn%RgQX%}W%7_MN+R?b+wX^=onQWnxQ}+PwZ3utH0g zD&4kIr{Lhwd&$W-L6nX!qgJago<9eq_T<(JX$ekev9^;(jcOQr?PeB5gXtl;bCThl zQwf~Q>2fQ^EgH8|hQ)Z670}2q%evtgK_(~y144IlDme3UBkyM2g32>;3~7Yd?lGWK z12dnJ?ZUvcVce+~5^rau-nw@2MyeyK<=C2;i+(t2*N_&Qf85I<9%!h~!^3A5XEf0?!F59qZuTBZ;GEr_BR6CELl zQc`m2ecJ};8Fa+AKKSt5=~Heu3*nZWoZUON0sD6D-oxQ=Lafhbe*moAx_u}AfPi(Y zR>HaGbWFf6TXm@(BB=;h;?DKkf4i0; zVCG648qqf_B^y-@p5 zt&eu@u{gy)B%*4Ep-rMBDeL-?-P^ZZ$P9?9S0hM8hZl{ADHg`aIVV;w*>lq&srX!~ z>c}xIi-0a5rX1db4;aEv@AG}CmJW$))_-W1oE5KpeN?4mUN}!7+Qd|~s+8ZbWt&c? zrx-Pbhy*0bUlb<5$9p{Qk0EhUWDD|mwWJa;7534ci?;jka((z4fT!pp1V${Z)0AhH zDXHZHo(mtlT)@ZwPOJ9ezXt*MI4b;cJ|-!8=B$rCc>9g?)D(|*gK`xrbnMpS;NHC% z8R;$CwEOyt&m>uF-nyO9U|hRu#Z&oMblJ2Wm+zT)b!hErzi9%so@;=mRenVq^&Hqb zls|d&w8g)4h0@y09Y3wyc^2#R#V6eY?j6~8I@#t3YV%5aY021LYxG}k`bsmq;qYCO zkv2uvereh({yWCLw>m@CkX}BnhHY5*#nwwKL*Y1ui;f1fPEQf0^_;$`+Ba#_uQ#ss zS#o@r3Ga6=c3{OWE~;F`X#2{~J~)IH#yDhlW_zpMU>N0h&9NKI4udNSv=&?qu-}jZFRdk1bJ#8pgDTF_@{M+yDRG2WO zf4!hv2M#8rSxj}iHy5|R_wmo?l2z4S`*BvK{hzJAt7iO4*KJyf-t@|#A2W1>p=i}U zgN6p2_~4DTxBr0`kdN>&&8SI|{5#{-Xn%6F^cemil}E??3L*&5`()qoJPxQyiF`D$ z@Ekt)P}qQ4Eu&UzG#a&9Z7>+S^z2isZv7@LS`8jPQg1MLF8X6gqU2XJM04)9Bb-ig z4ul{}BI#eWR;6OPE9*b{e&w1K-_85=wxLvwn#BSrj^njL>b}jttX#Ev#aGi-+gtRg z7nyc=&id3kLp#UiIz+8Ls!TEF{GN?B?+^6K0#6rjHth8|3l@C)?e~j59abkuFR@Pc z{uD1+lJKa;?c3W{je7sPZ@&6*^-;cb=bAAtUO?K1Y&IB;ByQ(;4)_IKt|9};8ykN5 zdHJfJSAIEZHP>rM(;$}{6lpY2s|hSyqe|?JWs^SoZtk2l2Qy9O>Xi<4xp`X1I(u-( z$`vb?eKUWTqhjSaEybDZ_3w>see>Jpt5>dEx%$@(A`nM4i{qHh?o3Kc*XneIg3BHU zHhz?S+{TA1iPqN>$ZPjAM^IysEPZx zRQaY#=b!lit>+FR&zefLeCegu;*PgJ+?i`4IZI5p4@Q(r`{mm&&s6?kQKQ7^vsWkN zaJVxgeBzN!kslO$Wk>No-!AKOen9g@+>qD5YUTcF$Q!#c6JmItwHIwUc6f{8H;-;Q zoy5~LEoPoSbRuTz-tp3#{omSJ{hQR+t~IRoMa3nj-z|OR$W5o3QtLy)47TlGjF`QP z)zhMjE7|R}P7#UwR{wN3T}N=ae%xPI;)OOg8EVu6msU%&f#QjMuyLk_Q8{PWp#+!fH_tEG+4y!_fXq2tcX zRt;*`Y<6_p0?-Cxx4oTSz>sF5aYnzc%_~pUzmH>QB2eBfHHSRVjPJ zxARU{dVf)agekMvUeDz*TXMNscfOSRSDAiZ7tw6?(lPwJS9ezXuuszTVc%RO3^ckU z@NrD*SBKW1wk(~u?TTA@e=M@OwPqfkAiVLCmya9Om=pc^wu!cPCT(>GYSG?AtUW6& zDJ54V3DISX?l5gYdG^4H@3!BRNZy9{xWmh zlMa|Pq%1Sd-dflBO258~Gxa1BUZK~(!NsnBJ!$FX!pO&9At4WwU>qw0)-OUdKVM)N zPpJ5)N-c)=>@lEQ_dz{7cB&f_M%yu50$s^>{JDdWXi{cgzyZlV`|)tx$1?e}!`mWc zw7kT7o)3z}{112q^1+8c0h8H(-u#8j*Kb+Ae$(9V79v*x$X@^h2b?^sa*RBJ?DuhPC_v@B&M z-8J?a+<`l|@hq*T2*=5Jo2{*1dbMqA^1+=K6yD7boFFOWRtNzt9`yzzAiBWi^Lx)3 zS~Lm0v}NPAojZ5$|LxrE93W#vNI0HrwW_sxqu%K8_@ENpV$H5yi}3Euq`Q9oM|807 z+IWsuW};yj=ddLvWCm0(O`hJedHb%NyAPedmTs3GzNG?CYA$cvlih6ifRL1=G)s=9 zZqw!@$0J)!7NH`w8hvhdmS=%;p#$0)9O^^arVpWjGcxE6Booti^5h=X0|G-M!oov? zN>r=gu~)-t!A=(kM$!W)*7Sc4B5TWaxc`$y`zwGJD@dZ7bD^f25Ac=a+=2k+MWO7L zKK%CpO;Hqt*C&p9ZT#3bCQq0#Y5d3U%|s4B{(KV!F->S_%a21^ESKL}nQ-d(k)y{? z?p(8AP^Gl9zb;#{IkU|-$1YsBwDqG#cYpqN+0Lsjl4giIznm#LbJ^D8=MwhLsl0v4 z`==oZ0vBv4Yk$~6$HkmmyXXd?CG+KlV6+CkmdPKaqIG7I4jIt2K?5g*1jUg(w=3DlG?94ap~NdGZ&66f3JUKJ%=$mJ=!HtxjCWMn+zIKt*0)3 zIDS>&v|lcsJ9+r<;bv)TGfgH+B}sxQ(63xr=&g%ql8C~&Ub-qDPFD{VbIX`YBD7{51^Gc)072yPxZjdR)@f=q;q4aE`X85WK5x^3R3dpdrc*YKg>L7v z+IYx=3?h&c91aexwWc&iGz1N$k$5-j;#4#g1e6vY#HXyHG+I({u>vr!OzI6@?SXSS zF^X_I98OJGweDlbMID*++PW;g<{?Gm#Q;6Z=R01F_C3aNR9FvT6awHX899f9hr|oK zXtif#sSS1d4V^Y^^u)=dlw;($32%JA@SP#mO={%81>$(UPCgqX-a(dbJ^72(zi!*I zWyi1E-|rk`pg2KtWhId)omZ<>cl@)bIb!_nWtD-MnSnj(tB*sS{!l9hvH; zZ?4(A88Wl=*9|`nDV=WPy{QypU^&@ow{Bi?b1d2#lO*Bf_N^<~nfDYIr9SvjPymt| z=ov&d`h-7_s>ZpS^VG^u;MN-qh;#-Z)PK(mF{>Vy)I(D?+Z- zW_L?ylO0UrownRu6yoG140*ao5eN$7?KTL8>m7&)RlF@T!)lKi+P^{Kitl$5py4Yi zD*&}X7ThfB&Ld+bjF8;uE>H@|m22Zv$kzx?h|XM;sFDRQ87o&aRQk3Zhvk+p4XjZk zTogfF;ACVFDEYNI1e`?fGcev|bMR=lHn|;=FFfbGfg?&7v$J-Sy z&3Nyi9V|^?C{UKrX8R9mBs^;`(c_&y=AGYfIG^cIAuCEI2}psnTbw*nYA_m;DVcjBnYrAwUp!vZ}v*JlC}fCtlV=YyYle)eYNZ7|G4i;xv4v6$E_UwOV#1J zkB0n|VbTziB(e@@5a^ybNvpLwhJa@00@E7oQk)V{^re1SsInGSj{%=TH||XeYr0^w#D%7RWuj3{IvU;UEti_ znQNp*PjLR-=X_gcbL+9|BxE0BV7coli3uy!vgeTCLvPI5o)%rNN4Ewgk~YuTq3iN$ zW9^Z3YqlrkFce8pw9Mv|AHQj+{k0KaUEvA2)jQi;pXoPxyUT=9FNC<2lA(isDGE-9 z4Zb2>6%GJYj3PlaT0Ken>|-DPX*|t7z7Nz^6%*O*cEdMlvYv1S=qqqGMS0$={(WKr zOVq#Dx67MdpD<(5*$fwQgAb;&+g|G33mXQRx$8?_E<5p1sJ{kC&l}@80pzuaY+K|+ z&+|sq`@{2xLJlHW(h~j$ZjzrT&~tpw>yP1)D>kx6loc|dqNDlMXhH;??|=``6X6Nv zE0ip=mbA(<_U=BofA79MJ0?_4w>VVB@ESwDm^Y@+gf&MG9z3*l!Hn)@Nh|B`=O?D# zPRz&>j7I&b6DMiCKczJqfBT?C<@R&5WlMU-5>0b07AFS?o+--lz;Zc(}^rkKvH{m?+J}6row6cmRIZ6AF?)}TuHFsQM1)) zbSBK5cqJj%XwWC!yqRUS8#R>;y?d*kYgHfNJa_??Nwx8J_jKj>d6r012G)uIX1oZymF_B3Jciy0N zTD@AmZSUUb=;DehQ8cA#bg{7+ASOhvua+=l5$`t$Ll-6BeIxo5J;a9a_ZzNkO~h!Xk@9 zR9bH~8#F-aN=mYdVVL0H0BBKY&;~wKer6p}ZE&v&xf`Z-u3x=wt*UiKEK7At7|+Fw z8b^;SRk>!}Mz5|@_nOixgqMiW&|tU&L_?cGVkVPOr;jl(L3M|{_T{>5yXOunT|_6i zc`-aJf~0A53zQj}kwYVjDrG5@kkBL%1bEV(BIUp=ufh>r)EhGK6oqPC*2u()n<%8$a4FFx*=_ijT2Ze@-B3e71 z7Zt^oaY~cC=lhw1TD5K2pmzVw{;z!cR_|i|B+q;IW+nXxr=^ujOSozS~pD4|s|1diFAPS31v_yel+%x^$- zi;|Kg{s~$SB1c(q=}OYkWco(v5QBm_6d@e+pJOG;7)h8jp*a5|;fQpLo?guA#ht#K%>tRjw4{xOMSHnkghI zrgWLOxYDKL>$Rw0x_$F5hh?1DbvU3_m-x6+ab+5}D&bte|BS|)R3QYKf9it|KKSro z0nxtG09?f(azLCcUQ4{1m7V=NmIvo%f$zLu{&~o9Kv40J3RNN*UK>^{s%Ug%^iz%! zr6X0z=g%ct+@cE6Cq}8+gzXoYmhIbADjQd(OkAm;+*3zW$%vxmi&_)2tG8-aCfbyJ ze&5c$=dCoJo}8(UELW>;z3Op6cegIvg|_*oAQ@Q_4Moc6QxE-i$4wHHu2`eKUFw#O zFIzf7n{sONug6ky)6#OVzzU5T)GS{i)?mML@5ZS!sbN*?*RD~$T50pSg>!b~;9!%3 z5(HXBvAdf1#!Va7uU)B3g(^k)BTHAE z$a3L<#fyrG2QH+cwR^ZGFx=msaP(>_Z_lx7tF>%XvrH6z_w=4!dvB5@YBg=%vPq4y zab+vT1>fDW;FojRBFQJ5ODWm1bG=Fx%0=S`zW(eGYhViLN2RV+56CS7_oOx{ zlN1{L@P2zbWJwZ)C#K+o4}S%cEc=-Z@M*Q#!8GuO`tY9wriu&?Nw}6s(=;-#6f+Tp z{v~+!ldwpVjF#O9?B;C+di^nc6f0grRjfsqW-+?o@$mtMqa*p^uC2e_bBSomu{T(S zR+PIv-c&KThF1H9Rp~mSW6r|iKb%J{ZNRE#7-hdPNpQQ}ynu#>30k8DqcpG3kOYTo zbPR#WnPZ)t$7=^p(#QubjvULlIS%>7!r%rpO=5z}^Pv*{EIZ?G2S2G361!Xb;M|H zxB>{xN^$u3*oOiLA0Pjp;6?B;8n0HSSWBWIIAUVxk4og_J?EZ7JyUtL37l zq6OZ~b7+TZ1dyWyrMT7b;m-k@4RX@`;wtXJ)T{eYf}YbIvr_E7`CL8}K=}Ch{{%09 zj~DI5Bo80UqQDk-K2{YkR*d8POE;_&W8g&@$9Zeom2(#ovqbMQULXDheCwS6K0fy0 z`QmAOEXlH)Wj!wE9|uX03_~l@+=s#eL`lziKG&i>xS;Scq+a0HAA^uB8P~Ypz?W+p z(o#W$VzP?VX@#sayVh(wm-*0&CCKG}uxX2|-67HjEp%IN$lrpGj{!bD_Tl;BiF^!X zOyYRQ_HF$A&1yA+_KfiYZRX>4yKmmUyKBz@XtzE}eijJMT&t+|H!$Ase;1O-gR-Qe z|BN3BM4Qi*kBN32`{OFFvp}gZ_{$6lYgDI8=!LK6Z2s-iEnrr7I3`q=qDNeke2_oloKuX@E zFq%U955t2LSV+k&VkC0Ag*qkgb~)^xTXG_X6J-9~N)h1Z##S>vZ6$6#d>i@8yk9Wn z+RJ6XA6Lc*3dNOpmlJ?OISC>_5hTdJ!-+0q-JC=qpI!gr)BJm|`}}44P#^)+`?$YM z=<;yhpnjcz0RJBrEjx7d*ojkTPnvGFQH3U;d-Q`3Lk)t?F=V@+78POrRK zF?HFoEaZIgnelx6WzJu`SDDWx%c3L{^!Z#Md!_jDLwdel{`~ZJ4%+oJCJIbsnTDO( zHVnZWJTzqQm6GJ|?{7J_=-Z7KY;F~BvLJzS`mA*%@eZ;?i~c=3*D9_-lzx8Y#QA$N z?Shz_s%iV_{6VqTR!o_(Dchj_mLyQ?Umn;mDA3<#vx6}63T+wfRHoHx zt|VMv^UEga)z8Yu(8PI%#mahzzj0EnHRzu~C_N(t7TFQ}{lRsTAjSHxZ)=x0_`Q|4 ztA2QX?5%F?=ck#qPoEF^ND8FJ1jgs1R)z=2I(4y_#-PS{H(jb}-wrj2V@v1kNzyS- zNZ*}vr{b*ixm54klg=_h0j(!~S8{XfS;x(~zv@c?lzNd0jq5h@)pBQN4_|RT;Mt61 z&qI8k>zyzO@}e-EXRVWW4o-mv)B)H%Xrn}eqJ`9JyS|9vT%&GJD zV=?j3IxUXs?0&g8drnSHHvCx}914}du(W~2Xt!kNWM^Bfc4$2U?~blfryOx(<-4N? z5A6Hdo}g;2O8Zl+rSg=`t9sm-yCFB&fJ@4BG6~4Q1`*L=u{v0^o3IF`YOW^)Qnr_F z*`ajiub)hQWyru@-=3@2xnGn7mUpxF_s^|&>bvbue?1||kc_cNboKa<3kxO=95i6& zl50%anq^|u0_$>EE#CBPE)lsk72(fiRZ3y8*wD);Q4<(cqk7!kMUw^&8aU(IqkK%= z%B9KNu%^BGSERPhdu_Q+ZqlOM2K78m}OP7A#gIH86g%j>4 zzlQU{hk^|BV$fjF8I1;`!Qf{yMTCcjh6ZaiY6vtL4Lr|#FZ_Lg0p-ZDM332Z`}Wm@ z1hCI8A6ox*+WTB6Jvt{17gC=1SScjA;@2Y zA&K-fKnl?wCro_l)R*IblXMi05$1^4Q0?#KJQyL%@`l7u5iPpi%TB&eR}z8=)J|Ce&nqM*S* zdZ5zbjIRCcz5DiU+qG}cny+7PTRb->%TS@?TLX&9<6N zx~Nj6{BK{`d+y|g^z^j!948M-dvr|EtYh0RvH~Z#-JACwi!By|w30#!@%cy|AAI;{ zA%m?fgRlilAtfVY@@q54Pkv*;!XKleBD-~I9}p10ah%79cq%|*xN|J6zq|WC_Raz@ zilgo0x4!3bcOfAmxVyU-+R~P~d*8aeb=S8>DMhMKXesVaa19UwarfkMe(U?q>|H{j z)alnG-5<*(H@EZb?CfR!^Nen|=hS_Xsonba?cKBggzMkjV$oAF>$Y3!6dIbG3@GqTp)sUab*0ITz9mE(?lB;mp#<<_ziHB`xvR`2jb7 z^2CrB9U^e9rg|kE_4W031{U9##0=~|z+z!>-exl3Jm>J;d0iv86JSQ>Jq~N5H>knpfg1z?J0QhPPPe1c zn}!A@4}gouS{qNFEhqZ*?#KL7f{Y??K?vX_!6G7`tX0rj<1}-=X zDrr(!L=1+*p^wU?yfQCKxFK--L({CUOzJgq^vKay-~Za$9RKc9W=`$FExT{p(4oU; zKK)&m=pU)qQ6L^L1WZX{RD2TnvMZ*1ebV5YtD$K+EF?&hVTU)0f)IZ{BR1gf8~eC6 zK6m|?VPi)P9sbCQI=7@@ndpa~3VOC%|KVdMKEFHkwx_QR;UzpWJOoq`ym2+M$!Int zB$~C>3TnhtAFexg^1!Ea;|&@Q8xSErYrn0)fB;dgHg@B^v@fPdIPcl~VAGYd72()aViD?KvTx^g8rKnQM3(Umhj>6SN556!qYa`bMDNb#*o6ulFl)Y9sFY`slNx11A0NyimUX zvpc&x-Bo4wsL79QDRsRX#K`I$HTiE3A508wN{-Lr3h4lgikztktZD@~z{QD3&K4#dEn9K$9%gljh zZRwu&0Y-ksIpXZH7#E@}LLm06#PUdSRu;<(Z}cb99g z8%f}#PD^5lDIz+7sy}}$=e*$OZ_>jcz6zr$hmhV4U5fK#ybm@|~q9mI2Iz!+4dON@S;O4Z%TaNa< z`?|hS1`Xe8i%Phe;>BrGQBnDSMkxps$rB`T-~L1A&gD*>GCnjo*y}+1&bkDF6*6Xz zjXJe%W3j)9ROAwt-vUdgO_+1bEYnvL6KF~oal`sfxBlnuiJeKtV`~by;U2#a$Mg&) z@Z?8}?%2Kh24?U5&u>VYFg@Ah;UvP(+$AZX?x(N6EGL`0ph1!61tdi&0D-&7IANC_ z_y{0LB8vBXX5$l0_jJOw36np_9`wWu{oO9UCDkDPUL!2Sj=kgd0fi^F*mxwM;=_@F zenEa(t>0Z+W-s`DOhxX8b9=wCwPD?ZDR}gtdlqDAiQqvaq8l?`>4K0^)8-MQ=Zxu= zM0rauU`+!OQ~3Rh@{IfYXWbT!>P@DgjA>7Ne=2g)lMl{_JoQMQ zXdOkzJYN6cyk{pR1Ry+PsSkbpRpX7MS!ed2`laEKPoIc$xFrPjxEV{6Ej%I$+?sL& z|KFe(TzOCi4-*cCYON3*43NQ*0cK0*``4`a;rs8G{qW`(eZ4j)7U(1RQ@}|QR7z0X z?FN-FjlbEiMc?30B~#>|WetL9gvP_4KmFL|PH%2sHUH(C2bwurs{_?kyVFI$3NMHP zGuNA|ia{l;+2qfGAHwrsS3XK;J;y(P=*3;eS+9J%@vHv~4KZR~-6d!(cxu2F9GMNI z7sdqg-6}5p9iyB_KU=c#T$vmho_G9!5SN-jkO-&?C@W3EFE#vCX%>-C*JnJB#%Qf26XEdD;zj_#6UNXiIN0{ z?smi5-|abh;@HtsIXm8(8tsQ(*mNG0AfWKDWQCP-z?>$)Ln*~|o z8Eucb?=Qz-?_|L@nubv!1c`rxRTnAoI*=smkPGmL~`TBE<7#~10Tp3g1NdeVqEvupSr?>Dm|O_A*iMDP07J%d>2Kqb)Q# z9AO#WXz~joP>Vgku#qqZhll#1k|5GHx0U5tthl&1EH04{IgS-Hsfn>nNkO^KPCEp{ zuh>vh6_tOM{KDdAo`3D3$DV!snU}V0->s#|U;BE4@#381n^S_REs7p zR;w}{6Cp70LlV8iT{JieLP0P}=*r7Wqv8^A8Q-vYL2>`_GyP4*@#6=$cV`~clPv>_ zQG#YpEWUqQ_xQx*PH~Za?*Fm0Ny1z2`n;)(zVq>1KDsQLqY1q3r`BNCLP? zzz7bnd7-TU$FQ^c1;8`MTb1v?nUbt|D_?lznL~X(Il1$jw{9OECYcQ;LFC+Sw=%T` zB?$rng`v+7sng>i6nZrTs<^}81B0H9kBsnk-u@$i$AzHErUE~A0NW~{YDB60#oQ(E-l<-HSwWf{ZB#4def5>mP(#fHoFYOAfjqAaHY z5AWJ{B&d`|Mg;ro9cPc8G9>hiOH2$4?=!x)-|pS}aRPo3yrwYVN5d`y&RUmWS}!N} z9vTtesYmB{(NL$maQ(V$gRlBX2QC4%jg>y96EB=u<*L3I;85ru4A!-rbk7Dg-7)t(*@bI=cv|a zsvSbM*M+xLR4#?UaihUtG#bDwNs)@v;rr+^U=*3Z>W4EClV^`GYMdUH1Ms3K+iX^g zLx_%#B?RE=p<2CBi%XmUTt6UaT9XkMMUHWah5@}3&z;D^2!bl!v!%4}eNPS?7GXN@ zY{l*2%$Ba1A`O-O!P&wfk7z*O5$^8vkJbyt!L-X=^5R-^%!#1y`S!GYCU}y zNV3r{P%+`BCvcQ^IflRS{jKGXr-NcRsrSfv`DU&1cMONqfO3`N`3o-iYF{(UoIG)@=O+%1nF z7-e#9dG6I6VKbk3^3g{gdFa9Wuj#30M2*zc)L7UUcE|J2Jve7DUA%AmzC6(^oZY?o zj5}fC9nZdS?@-6a^@p1*0tR&uh_SmmzpP0FIx0)3Hf+$`ryhOuu_x}nY5-QSc3ZZ! z`q=i32O3jm-22p%kKa3^e|!X+x%~T*z_B+y@brs!^*sOa;$x&n=?I1o++JN=ZHJ~X zLaZw}vF~VY`kd#Ve`rQ*{ka22Pu9^f{ij`j?_-ZV{`8BpLXRxln9GWyt)j5n3BS0& zF_xLe^3q0*L~ONZcketOKl_E}AHF7CDBZv3NRz*OZtcn~h1y{^JpBC2GlP#U`f6tr zPdhiy`@FvA!%sc+=(?05PTAR3I;SAFrnub9yv zeROlDd<5KIGalJ^N*J z>OAqa+tT(gUspoYxJE4a^1Ho(10NV|KCtDO#n5_%L*R45fo2ye0tz%7x)8u?B?<7M zSg>t(Y`a zY2d_Bj}+Hyc#pKC=s;SMp;zpSsH6ifT%^1MfA=_k?o@VQ*9C9{!D`GJ5E%$}s@ScDT2{Gsb zgM+ZL;&NER{u3zsJIM}Zlo4DqYD3#z4+Ol#Cnh5))8|e}HuB;xHY)rkWqYOIOLJlKf%kBLm+$owe*Tg77XDe(^*~hG zCYN0g1QcID+$K&K>u)wG<3MB}EF?wE|9lBJA9_T}N#J|fuTM{dLBDt3A@ChmMx0y@ zSr$E3*MP@YJUCDgcp!Y9+T2YGpIe@#i|ajp?!DJ{HA))Wsb%xO*^*Vsieak-Xq!Y(>tUXr7 zDGOzRlCSeEpIj|2dE?8&sgM3NxAM{Zm)FrGI`BW=-q(%eWn5~^-@oGH&kxyQkp?V8 z47zS=ihOYUx|0=r^HN(PW6_Ry@%6b~OyKUzzI}0Tj}ev^KR%)l2=6-ciHFBVI!bn} z{AOKK`lLR@>GeyGR%4>E#|?M)S67eP zd&7fA`#tmS)kz*!#yrKlfBJ6e`W%Nyb(wa{+*!k7vvI(pv|&BgBqb&D9w)>7vKV_oKqUx>;l;JHivS`f8^Zct{lHbWukK%(`$yI-?I?if z{!cy4zmBTMDs7X?>M;P6U<|`T-v@l_Br(u0;NH7$-oA4$)MKyiDterpl;ChWcI`Rv zYkCX-A9F6d$JhM|oC!(?>gJ$o&id-+&>^5H5@GG;>F<0oqVVx&7oV(igL*ki8H_Y0 z^Nfc_gZn)_@71Uu@49J4MWC+rUkeaaR?rZz6m9v_0FulgT0M;mZX1IdOY#$eJxrGy(8fBco;@m zg9JDrdgxdIrvxNGB#h8Cf-1ncJTj%z(jh&kJ#x$7+)p0+EL$}GuHA|L4n5w5iB`Df z2yl-DX4BRU`E6N`5jnO~3@AW-L4OTdk|}dS-_gB1`@cO{11fsH@P7;N%lH>O-DY3; zm%n@$&QJiS!9wC^;R=5|8I9F0RgYELAit!?tpTd6(`v_!9igRZUr;l=Amrrap3BV# zCEl;;@$UsrN}l!Mk`aYZzq<5nBST_vfdfh84H@G{CFvu^OzX7$oyWgD!<#NzowrTH zJL?l~T=;-~$;(+uV}_2qzHiBYZ+kh5HU8N;Zp_&w@9(Mc^Y&@~CVzt- zFW!ktR#}-|CJOAPtsC3${!TzWG?R5tGP2Vb^p^p(Bc2lw;-amg{v{w9RM4=P=C>8A zZL$aZg3{x9XRc%+sx}bHLTlmyxJwe{yVt2 zE*moG>4*E%*6RIVy}rIOxV^1o&$;a`9vN}}N7Ex}OZG4SU~8rSuPmkg+W<-CSVqL? z%dVJxd9J)3&tJR)m2ALM1V%)WZOi)~OBxZ;An%o)b^LN0w94-XJVg&CQy^2@CogvC zWDtW@JyvOJ{7yaoe*`w$3J3)oj+_77fh4moH!mY7LF?&uIXDu?A@6ar;y-o^S6q)# z*nkLtGoIQe}}$Tmtg>64-wXM{~)tATgUZZ)oG~)zj$bu3&rhznS*lUcHl;r?ywCv_aaQf2{Ji zaOK;p7ks%3h2y!P9*ZKgX-iwyV{r3wY^P{J-~`S)fe6--;3T(;mW@&Qy}%kyyOp8F zDs?=UN?SzL<9`WPUXMRtvJ(-l8n{F^5M*X!8`oovCNUwA@}|6d^U z7Hz{MEAWdYNtQ%LnSb1N*8`CyDXr@r*S^&C%rh@%It>&e@^o~U5i=(b^5?9q)a={? zhu`05vYz<)7Y-uR2*?m2~avw+#x0v)wUN zDqsJ~%bT6XOP3>PFI3gm6KZ?4O4~ziul|<+ovSO_UX4wfdJ`hEhzz%tkY%x;u%OXm z`IYYw$1=+G{vl*JIJ8&#h_Hs;`zr*R011^mwu-Z-jvhOG@$0z7_e!V^Yp^3qVcb;lsMm)3X$*#wLIuVqD9Nn4c#$;o3=OIJ;iTme& z^zDWsA+B?B7+;@TYVoihH#7whG`L34rj45%ocHzGEJhUID%l7Dl1l|e4<0wscy!*o zUv0>B_$Q@wA{$PgA_vU9ZE)b>_1`U9y=~v&bCp(*%z%M6-WGo7xi`PvcW~eS11E~D zAkFQz|Jdbn!+pDzC0#D}os@BcL@(MU_40s9o*-P-f{MzYL6T*Evk|=79nSVUK11bq zfoP&5LUZ#ANs`2H96av7qQ{`T0tGVMaS=9Kx4R+<-a3#(m&0ncU34@xSe*g_>ngr8 zAxvXM(%s8etX}ow${)VDIjOpd(HKIy&wl3l8z$YpaQ&LqtG;^up0V+=)nf_@#hXgc z=btNWvefU)JfjZ^Bz1ai?(+ZM^`H5U;0W&yCbGyQgm&BuYu2v$;iolgzI^A=vGI1R z#n5HK10SzhwR-KkZ=ai)+BGhmFFJ9$th5T4TEF#pTuQv8!Re!)-}}%@EBVj}S%fQ< z%CyetM~S7U51((eG@QuFZDfQ16V?zs{GK*(EDhOb3(9RwV1PcdQ(9O@}Dt{}WJXX{gin_AjV!JD!VY=Zy!WRKT6nD5)C#%R< z&9mpf+W*jNv#+`S);A9Kd+~{$HWw?AQ3=W25=x(+HtV+kE-8r{d+p3&p|sv)GH8M5 zqBsGZQcemDkHQF2^f>HJ9#b}7hH{MS14iFHjh}z}^c!!!>86KX{%l8{DShmeDShy5 zuibOqbvNAq?wWH*Km<)ZIgwSG&~!A$0N}nPMLkf&3AlqI^oubrv(Ty9uXES z2qLI|;0SQp^*WP2F2P@CEk^r4^v3dihxWcZJ6eZ$I3XrBCiyZO=diW<9b zt1A^16&00#8fEW4r9lOHny`mf3GR3RDU298JnitOuN|^EZ1o3MXV#4vI+zs%#93c= zbnAv%l60Lf$l7<#9vt9rhRb=k)J(iSHpm#*lF-VbL?l71=gD5v$0Su%)!CeGR?tVp zb~0KEPnHpsTBxhHy1g4O;*|FA zx1Secghu4^_8w|TNQf3&HhK2_2M<#6H|mW29vmdEU-HP{-jC!ByK8pO=pggS|IN92 z+VrcYj2!Z5#0N)rJk+_~1KrXl$Y+iHe5}J!%Yh*zK%{1O)`i8l@E(#FI5LsHVWygG=)U!8Ck* z<_8ZyxNG2dXO8^z(VYWm8>u&uC}OeLaOk7MMh95ofz=#EhrtiU(q*#QYu^V*9U~1SQW6D0GWo%cwi=sqn=Cl4 zGn#ZF%L(Dp5qe8QnJ9qb{~{fgH7KDYJoP`^l`(kM3!BX|p1&_cu6FR6pdf!y6hI9C z$YfP@waGu2CUAk{__)M4ysWtJ(vAUe8sqWUolJ0OkjQa@UqC>h#Me2TLO@u}>gR49 znKJex%}sCJ-p@=S&~b-~?v1CsaxBx(Xb+DFlR#<>CUY2pH#zbPEtDxRB-kI7B@J0; zsbLroNHM5eNN^;*gzJqI#&euvNJT|OMdhCc_;47a!GNC(<)yixZ`%Ml162pF@ctrD ztiXxBDBwm3vMO_%W$GKx4Gj;Ah>nSljt&o?VNb2rKcM@l$#Db`+%2tV*Dx->xT318 zAP+SsM8(I`+Jpf;VhYb5b9-3mjsl=F?X`lJC7U?v478Q*vqTL2f2@8N;*;2T3=Angr#=tt);_L zk`rZDX=%9-6&Ds66B-#4kFgeOorQtZnSkTn>aKuOj|pyjQ9(JE-gAJK4o*x+)(Ukc z6=%!ybEVi`U6a%ONpj$Wp2-_3_KeFK)-{u&WbERN5DyTv8e$ zFAuCJLoY(*-wQTr+D)?PhGU{M!1v`CBS@zsS(gkYJ#6KR<$l z0+V`l32>e}2}df!QEM86)Vi`(oC?10;rl0#A3bi&=;0~O{o6{hi0F)1XBB!ysI zIAfsxz`W1*R6Fp5K?C#!8xB`s1d8B(iQQ7fob9JdS%;MkpLyQ{*No{G*l=?5x(!7L zUURZoNFFe2?hUi2_tPDk_rKM7b^6gyz4pL06UUAnKVf|Tkj&TaTvi#NCH}dm`PX3F z>U<}awg}MGpU-#lYAvh`YiXTMtJBip7jV$vm8Jlo)D;8)oTGjfe-EP9%_V7FKzQ$2_e`@azH`xO9gQOxNz$~k5H{Q}lwp-k zD{u;KRfaX+H0QRlsKH2Mpdc4nx5K+u7Xln`N~D zQ-s#2BYn*fAiFunDPVd%rDz8DezD-1f&^+w1O<~Jvu=jv1m!MKLZ^olrdg*;B!Lmw zVJr!Zo6%54Eg{3bwA~(FiAz$nR#`_xP~yXRPAI8WVslOx11DI4^dcHkXQnW9{c3eR z5S1&;W!D1%?k|Dk_uq3zXh>jfT?2UQE3{<|NfH48{y8~$->p~)iiI|;$Dr;jA)qWo zfMX(Qa035Z{%AnK3dh1K17&18rli|dcKP%Zv zDquk=YXc&UlGW+}(s8jdr6KkT?M-65Q2d6edFf!ZG1n8q@-lt?ewrvw|)&2 ziW&*OI=+CP0rX5Zg!Q@l{;O|x$eQnie-G+@5&%_?Rjw?*q{ldh-G1v_KR@%A zUw`j%yOe%6AjfhVvg1+S&IPO=v3oih^-vY~NB1!-!G#aTqU(0HEI%C@Xb4OvjK6-I|iGlvv z5En^samG7V?i@UC@a%1#0Hdkj6HDI_Uhn$+{y-z;Jw;$#(GwrJXH@8^Z{Pm*ln1t3 z{xKxZt?zUDy<`0fw|?=}4rbtE3+@VB_Ti$P=Nd8cLjL{@dc0%@BHCb&2b>ecY}x|+ z<2Ji~HEiJ-ZN7_@uh^_vrJu9eqE1) z0|S)e7Q*p-ad9blW6|@%(os0L4U%Zp(#bjMIz4y%OY)&k&z=c0;<&cwg9~5b-|qdw zK7XLcN-F`vdm_f%bKB66Gb=v);Verc0`GOyQJex!G>S+(!)b6#Q<=SNa&7iWzjcMrLX!mcsywD}ZAO@*=tkTx_oq7xk?)pZ{ zC!a19Bq2E|Vb=61b@dH-1x2S$o#B5~lEHVIcSLt^BZ;*@-!xE zC_{ATUa@k~k>h17iHnRge&Qo{^g-4x`EXMX12r4NNi?*}h*TtZ|Iyr`^Q)mAe^6!C zqEQ29_YVgyzOClmk)8QYgSVj(=yAfdC-3OatX%Z*wj4VfJ?YagUNHZ7_Y14?<&+5% z(*2;ANqpX(@3ZWIJty9L%eB+BIp3^5UR9ZM&XGKM#-O0e{W~n?SUut{J9+qQX(NMp zJ%7>Fq`PhP)|AhBKDt+LAY@$I{oXJBD}DWZp*Dp?cw2Se>7!W{u7nBqdi8kO2}@At z!M)OAgS04GbA0`wG7rxh(#Q2-j+ZBm?P=r%&Rw0gV^5i+MKz>3Dt$ojIK9SQQA2d^ z9VHa+U-r{+8&3>-YT?z$SMPqgr^2q(a1^e%9)mIn0bdkQr~%{3vYBnYR06d;NjQTWO&z%bdn@NvFtnP=SUo59P+@7z)c@M zv#Km2G|&t%8TAy38bZ2Hyms!i(HZga@m+^ax_0)2uAzF~Nq4{Mu7~cvW=NOLNpUf~ zuKCYX_YDuX2>}xye({D3#K{4-S?ixV?v{HeXONP20I3&H&&6=K)Gw&m;u_AKa?R^c z%}k;-%ECmTS|CgQeXn}(uGu|e61(;rf9jcfe6nL_lmx zN~cbV2}x5PeD<#XVXR~bPxPZT#?ZvH$n{K5&(A+cV&0}bSw=}buK%=tiSenu2Ti;2mT5ghQP$f+Ad4ItmNDgq>!%Fp zl^oM;+U@s^=}e)F!#MoG=byT5d`4V+QcBA38y~rAYy#%uN&l3wH{5;wgs!pi=>z)p z&PWQuK-Pe#ggdIL>&yW$6s3h4{I`L8`o$Jhw1MAH41?pj+I)NUHQ2izjl&nD@)tlf zD6jG08ef-7yJMsFby{Ok{bQB3#_#u!8=I_OEc{xOMBpEf8#5v|w;(zy!eBIP*tET% zun3g2inDboGVioSKe}gA#z*Pbt-}Iz-YWp|vG zMi(^5GtRhi58OMmKVNpVs8J$FiYwgs^&0<2wm%}jc+-oU`p&O?Hh)CFkHlN&FQ0jA z_LMiyl4%p}dHUYO9gjV+#bu^}1&=d+@+0?6?d2&wR#ML)$*HN9W6M8Vchb!Yo+b;Z z1aKay>#O@WCA=Gd_rAEq8=qMaxBa?@ezF)zziw0Ryyv>Oqw`)`yt!EJaqD|84JlqQ zf7yXT=%-5_D>C!jqa)U&KeSd$P_m5CJs$n$ov^hx-S_TkE~NA1+aH-4bL8E}cg5d( z|ES;-UwwG8=d+J>&;9=G`9Gaw2EDoEHTuJQ-`s}ZzH!^t`L|EFYp2VsOTFQ<&t{!_ zaKZv%${mm19J%%JC%?&;Vn^Qm^aDdIE9ZZ<_KXMRi+n#PU$y2`BChy z+n!j{s717$C*E=OAkCfyZ)`84dGpYx-<`~V{ndAw=j{}M0X^2t_~zKiwat1A`;WhP znSHz8TPH$jkxCjmXYRE@dtRE)PI+i-$k9bBomV|}OE+g(PPxS+(w!&G3EOr3%opo! z+q5%de%$R_{7occ==H$7=h1h^e9qtf^nVf$Kl|{Hb!NT~;TqXY7{Ecz)6=rKCv z@h>MkU;AXmxhl`!r^i5B;lqRmK#2(PhPPE@gy3k5i|W*U+Q`9^h7F!PqTleYL1C1O zB18kJ9)Il6>+eux-cgox3xO zi^~Wc`?U%phNIOd3+!D74>360&CSE0=mt73OBxKtl!~hissK>dO4=T`LwX*;|UYm_RT>sLtB7V&SLrKK#gH zKD?|f$tHt=BuJmpzYVy4HMoSV+^{0|Z5vraiLe24ZkaK%YnL89yY=Xi(Qm}a2@`rI zo8jUe%3^fBqwU|zpF!f>P7mK9Z%swz-wP=EDGGR@020G7MFoBtgruGOKK!X}{6lZf zjIF9^umS8Y4{vR#FKNOO`gTv0>#FO`J$v>|@^@A@*;rKOg_OZVLuAQTTcaIw?RetQ z)>8yU;FiOS_TX3DaqEP5>;BD08nrFMqZNRlQnXH|hv=w_=P3bU2EJVO{!>UanYZkJ zAAJ1Ty!lI3Y&-@>gI%ae_$)vZS`rAW#BuSXuL{rm>f=v7{NVkKwvccf)F7ZWn%0{1 zwAE&Jvv85JUj{|6+Q#}$$tfHh^`s~sORuz zliA~T0+0T06=Y@oYNZmOR4afq|GoPE7Xd2%O{;%eyZvw(5}1(| z5v0>=4F;X}Fq!lm$9Xv42!KD1B(NTr!(mqr7u#Y7F36<>c;EgWB%X0On@zbnZ#@CL z3JeH!dN^MlPZkB%?ev`nf-f2s;RT4XZs1+dUzfqN7fZ4%iJYe;uAAjWN#HJ|Upat` z!%_4~`e6t-k@@xSy^4y;6$PpetJRd2QbEufANg z_{*<8ee2n&Ni}D8tlM~0p77Q;OTSz8#Y9w^C9KT)$99lg*3mD-c?vK?~AIp-I*7c5SU+Q}L>6iC84RBhSHm2*y858^Jo%J5ExiJNJ z`hGf{<<#mGdkS2f22{&{p#J_YW=Zg1F$1Sf7~M6BmQfVT+x+g2N1NQDqt2BWQ8r1L8&ld55;49@}J1IJ_e;>2ixOd-S z@E!g|!3LiREEkY6uu~AQ1^+hRRWM&Nstiz_*YinC%rc5k?okDw)v01yB95hNfcv5lz$P{-|b!lOlO)>^WgqcZKeFew> zZCFZtRFIJXZYLo&oIhRIfJAidZm2$-Yk_VwYKTmZMJlpNY&fY8i0+gSL?VdWX5~1$ z!|rORt*H&0IsfG$=NHXiakPxXLCTd|@jlZlKAsB|ZP2cw$6K~-Y_s!^TlDyvm%8Qr z_cfr$6binuIHfZgja2JZEr1)yCrrF!kZbkgO?eg%5M=OeQ6`P?Zo9X+7a?^fEdi&0 za||n?sKD_cYn8>H3E30S@21&(^%W=foh-GdUiEOV%4JK|94Seh`~Jhdir3`!oZxur zj&JMDIvm6YPOaBl_O{GWxX3dsY&-`c;92vrlV7+kt$63o)8&oMZg;-f^U#y8tSw`P zJhbSh_!HZ5VmsIV@A>tOT0|e!WAv2i(G{y!9k)aenAFG5dU*Y+!&M%E2cELx3*(el z!vc0Q(kR`mwuzpd*nT;z?uzulYN+Zbv zYmXZL>`c>!_dfgyow@kY$e%u5StelefID7_UUS#WCj=9;-VQ0Nu7@>rRgYEL7?;&! z04TtkSLD!0k{ICU|LA}2wOJK+$X9~Hq6zrTCr+N;wsY^V=`jG7heFH38sG~Im;V4o zBDe;uJQ#%`Ye&Vu1Z&%E+oc7q45SqWFJLuI@ zXc;R|HY#&{K&7`O4d8mB2F|~QzaVhwkc$!lFL+^g?qw|iB)B+-(&C6<&;UZ<7_cG< z0`MNdZNT<=*p&>IE&~aY;ox03|EI-@EIQrjEkB-l@XRe~FCGZc6L7^wxY`L!MROX! z#lbb;JGk1maz!QMfmM(LvdHr?Mnc~m-hr|JwsJSVWPtM^pw0liv-oLmyf%yY{=E-2 zoM}X%;6ghP5cSIHF*arT&8WM|m%ephB0 z2l5X=;J3l$vbSoPb=aL&OC26RG(%TPIQ_Zr z4%Zt3LOYMW^|~(Rq}yKj&zxzayZCbz*=MS?od)#`vYtDZd8DwWA)kw#(nZS3sdQQ@ z3y$pCCHK0yyJO?JY&V69K-GsnF#m}gkzV@t;erF%Y%FGJFS$5{=;gh-^(bp}GoiA_O8y1@z)bL?(pziOa(v|=v|Ihb#0>A=ZQ77fVfLna(boQC- zbEnUqJDr`Q9PpRpCr%X>mGE%Gbr{7P_cB0p-nqe$X1#*ffQqC9!3k3uP$k!Qn|WQT zV`pY|1E-;sMXp889sls!t?%D8u2bPBH@uZ8Qs9s2qH94rAh{O>dgaY~;pWS=N`!Jj z@0pOxSpYyIzVl&rC>f-ffuYK6DB09(O1YnltRN_ka`}5VZ$DGwVkN~0cxkrjiwJE1 zlxWa)b0QT}t!_ZMG&G=`*qjU{A@**-e)8x0GADX}Dkw>3+Zb`x5XYS9iyLpW+KCNq? zQv*IQD@&7M3USgSV~v?Qn{VI)ZitVaqCkHATawN#c>I|!|}+d-Cao&T^~ zrje^(c;M-0pMCbN`O6QL8afT1FuA|!_~KWec;cy7zuZ+s284r3rwA%#z!c|kYa-)g zLAJZ=ODpR=Bu&CJf?AfA)`kR{SzbWEX&O+Q1GO!@$2C{?C(kZ>|G6js`|MLse35D8 zBn{67&ABV#`}-ex^4S-Ds7Sl{?(u$tj7Nk9vT)f?f&g_=UaR#p`T2t{7Wl)HdH+$9 zhO^ZmOLLz3Dz}MkX=}v@y7JoxXJ2>IjW@o!k&%fDjj-(j6sNex zqJkRtg>L1)3s8>o_PWYCD+@=i|IL{MM{{&d(ic1T}_7ZJCm@zkD8MJ`v>$?TJdi@1o8026Gk0MA*;eE3I!BB&AsYfOqc z1xb|bn7-?<9)9O?3(BhNtsXKwzO%olJnQ(rGMn9MbufaAViyJgfS<4v8N7N2)BA$G zC^awdA0T)Ej{~u}nV>#>J0qTk{G!q-s|O|lCkg@4oDwC95~!1S zqbUa91-*s@&XqEJa&*a&!*w<{f`D52g+FmvASj7xw{+MqtIEFu@D~V+fy>eCW?d0L zxuszppfrN*x4sevfJ)AW+PeA%t6OS4BD%exxEudJ$p#q+{pu_BKz1HJp%$a5gre8c{E&;Ix1=;hh_zJK?|9!Mi@@S`!gzM%y z;HsxEF1Qx>3ICeG8=iG{?$RBrEi14)c%afCF{1Z0a1l`wV3Usbor;P|$HnVPz@nlB z@W$K3d#Pif^3Q?7T^itgUGN{s0L1r6+r^tg@Tw&DvcQj(mlnYHv7*stUwy~0F{8&$ z7(Z^}#K~7ZwWiSsA;pCyG?p)%uwO2m%*@shHSkx~if;QTJAW zzY-6_xLj;-PyojZg2~?;B#Vu1hg)kZTK?Rf6M9TtVz~4DyZW0cL~NNcebI9!@!-DR+Ps^L<>NQc~MA+ik2(Oixo@l z615yrQBmngz-kk8kAc&MA>g>E2GvFZNrc1TehcrJ&0qvkwrdAF_k$<9;TWk*P}k7} zT*t)_9NcgGTHx|e?%lPAk>g-9GG2Ewt01a#FN2O^*r=hyIvv@)1yo8w ze6K0RB&esdP4xv;HZnaeoniDb@yQ5NURK6OM4HWhgjQ#0sHt>0-HJEtbA7#lcNogM z?S%zZa<}e1IED_3>SRC~ic5-cL>5CbQWI)wa%@cw96VuytjOG#+h@c$nPWI9dE|i5 z%ssnkpQUyPPF%FegtlRwM{f6x;_cj6t6S`wS0A+#YL^xzyk_l7BO&_gUvHXXXi(a`Uviher zN9sy_^GS9C3rS`@tvi|e?I(n&E`cn*1q@N9!f(fiVDP75;z_@ z(FjHe6?upEof2lgvuWdZuZ)n2_V3Ru_v>-(lW#BiVc8FBx4bd%?3X{B@vyRnXO$>G z&q1|B1geh`Qe)-GZM&++ys>HHvbSagS`KX6Ua7-mF=cSS;Jmz3_0@JD$DsN^8ylCt z_9=GbcdJ+Z^urq?kNx+RV}9**a5dn!XELd%sHn6bkk6OwLPe_#CyGfLncui&V_rdi z+uct>BgH3nLc8Dk@Z2tbEi>7ER*NGzNz+;yu0JF39=C^8rvBnI@NO>*9RMa+)M(Vg?pu+C^O<3&clNOs zj-}=phVcN&h6~H+v=pJ?T`mu=Ot>SoWL`e!H zz9k1l#sNo}$)Gks>iuX;YQJ5`>Utn5Z4>YOV_p!;D{C-yh(dnzmrCjl$Z=;%)|V$7<)l6g4R;abIdLW&@do<7_T0K?__mN{m=CHPnOgwZ4x+~r+a$J+KroA=`jfZlZ6Q~Z_#!_ zwoA&8URm^PXzO|`$9IYsyz7YnWn@v-1*VOfHQc%By{)A>vh@;Q{{q0*20V0B^Gml4 zTM?93xZOx|h?kab052E6`~NC%8mjH89;>uLL{SP035*C2_PzH4$}a`w&*%T^k3{8f z20TcHVRtcRvXm_zk#9lEiFm!+I=fL_~wUUvk!Qp*vLN&x6Vu52m4hqusyl@ zdVFP2^jM&z_S#JUa+<2gD(wSMxGKPoLHO1is?t#af&}NuDF6SO9xvK~NKM{_o+OC{ z{&9Xmo9#cAz{pz5(oy8x&$d;zYMD#sm; z=EwS%#2DafuU5u$Ns`QMhVz};@OUmnj!B6S1WxpA*ah$|0{hFus8xOiPz`IhGSpPb zsN=a*+99eQcRa3){xs_`6k}N_v zm{H@#WW*WZ!Wpgcs&_|2;yf;f?~u2qqN1YmuLeb-TL5?Bl;A!_ZM~ca3B<{St|KBF z_pREpY0J*d8@KE`U9XMo(IwKKrfHo{>pk>(Ee9tTv@~k}QQ$Nvx6A3p&9>MPIX4V| zt8e{YB>sXWrw3>$GzDG-*5z~oarISzWKjTG>pRWeye!v605R6~zW(9qsRDPgB+HV> zG0kyZa9Od7(ysuoB*{X{Y4Aj@tv_!S6&00#4ajGUwrj*D5w5U-Xk>0P9A917X4|Vl zts$-J9oN2;ar&v3Go1z!5qMo}_fgZw_aH6RF0r{2S=PvAlCtD%T6?I(&B0xyfuR2W zE@nw!u*lwHM+{Djpdu>8N=|LHLtABr*RGMaRiNii2C+jC&=Ztrn$=e7-bY(npDQ926x z{;+iRMg=(>|yoJoD-XsOVS zrcWOmA~B4o_Z=TSJsvA(;=4wYJe-!O4Tvq^du)_fyb&v$oFQ~_$!GTpE?uMf!VpNYIT_apYSqFD*-L`Y*j;)(k zpA(bg{7_c%m{SIih_dY1v~~NA?K^fJDP=Fx`*wypTtG!dr6Yk&o^~TDbKo~+h&7*I zm~UyaVAwBuejLjne{D$jUqY6HLwj`@5mvt|vy3Bg6hUNxciXHD4VK2n3rBrTg&aMk ztD!6_za9uL3Pc$=gq*b|X3mGt-Z^Q)_%XwE`72L0>ivR}hs~LpYKoil*xfV64N3Lm zs?VRRr8*7j6J|emX#3tX#bvo%(xeP2E2rGyv|9^~?67sZrn_^~x@_RoN(iCtf7?5c z&K*5&;^ZOeVSMG`^Ht`~!)M;{=&iFRP8kuy7g|kydeLXM?A(94u)HK&>Un1$>ra^# z7;mdB$T}h>O#te+JI{mD{xO}>;>q)Cmha22EOD4p;{DLZyu(7bo9?{pwri)2oiuG` zXa4ZXLQU%Ut_=s>(_Vc1>ak;n4~(%L+*_jY4^QmgBdFlVZ+7QZmD#C?=ZCOP-yYSB_DnK$>R0LDzt%Nm;_3DjG!pWZMUM4@zJ0fWGy|NQ);6OT9`!O!P68S z;&1XWEJ9P1$aB0_Ys9-wn=!zzYR9tqAJ3on$;Yd*oV=tFIP;VnQg*)m{>Ps!*z4|j z(=Ee|f`*6)_V>704Y&mChNehAlfN0maFWC|g8$G-!{w^8bymA5M?^#fj(Kj$cT1Np zx~)@Pqw5cJgR7{hsHps%pvpFj8Vtoy1OrZ_(zx3Zk$5hs-feq7sRX2sCLB1UgoV?!qbxP<*(22Ka%4;N~m>dBqz1 zQoF~9M~ud2fO+eU08B|ZPJ)=wRO?0v@Ck;2 z8iZ0Ef-e}TCb?xKv|Du_yJ-6^n07v|Oo`!2bcR z%%&nLDk>^%12`N89K67>0t5S!Tf3AU2~iLdVk2Bhw=P||Y2&&zKfOL7RF{|%BPq+i za~>8UDIBpi)jK#eFd!H>zp!ee3~`3z&Vf98<*pS7s^J?CE_(Hy+_7Kp-?;GQIb9Je zVer>sVqKj@#z^3$LtR9@)5w%;1((u*5GDG>06hju3O@(PaG*O%X&uKFJo8z8=ey=@ z*!k6cJt$NZU`i0i;nWZ$PT(@n3a~*I$~fYS3$6uz`h;EJJjq>p()A_zO?DOtJ}x*d zmWKLzi>+zx>o1?ACJpZoyDHi#oU-w>ii(O##{hW6C>+8C3UCyP&Lx`DaR9niYjfE) zJ~8K(TW-DMj@xg$@rD~;-D1;IDC|DuIiSCipwsFI40E_j?RFCAp4SN2^H=n*`>b)C+Z-4dXUPcn`Q{6I_ z7)m&R=Qyj)6&f7KiK3)8=z}rL=5ksQSzt_qM-LXxAIzz*=YW$AA2JYSWg`;NX>oD{ zNwwqZSBL$ss;H>^D?t$nC^Aj}cMc^nuQIoFfP?^-UVvKpOTf>>i+p`>gT=e|5jVba zQ(#qPZ9{{_(&S)71gNkvIHdpB(L`k>6Bf~>M?6tcURG36R$&T`^EXhnHB@TP&b4_* z-3$W+4Q38dHLpn-L$QcZe~YEQp`xNDue!--PjXkkN`e-Q)9!) z>=KCp(Jp;HAVnz3yPQQO^+fm1iM2J1F*wmg+AGTnF+{_fQ#+?LHRM<=Hf5o6jmS8x zO%_0Xy)AsmkmQD{(u->b?Fw~*l8TB-M*WS=Ymt-9~khy=)uI=+@2fH|luWqO<)J)oZHfz_?=lgKS zH*Pq_`7!%fttkt;`oj$;&u)J$V)yqu>Z(~~jclN57)en$_*E8VB4EIORvkZZ;LxGX zUp(5W;fEjgRTXSmwdR!mrX|~s9@)R~otyi1a{ln~x6J4_f82Ha?3Tw}@4dVQLrAZ# zBM44X1T=-OU;m;~lfG-P*5!T#2i$7hHYv&Q7c{mk-b2hm+r85^naY|J&TV%ymswBpB7h{*Iw+$T#P||`oXYyp-m~wUY5V4(@AH7%`9ApfAgTLFsCulT zqS7|NCQq9yi>xF;M^KiA2g2RvIddlUpz!fBjI}gnAC9R zxm~$d9K|j<;a_pU#|}q{!2@JS%kSO;NOp5Ky!X|M@@4Tw^#1ZYj|-N;@$<_yAK=?~ zNyudmw~JBN15r^?=`bLlE#8g_Hc=yCplp_yHr`$h(%2__JSQ=}puY-SFOy?BlT(rxebOqkA z#BOZEdMs%~AOjy1pLO`ULsk9~sI2ke8mir{(e->ria`ug^;ktkrDFj1cfuqaD7=94 zDTy4^hTo;=rM!wr1aa0)i!x0P1R!+6_t(y)MFItv5Y|fSkdEx z{I;sc642qh*p+UL)fc4l7eF*9hqwq?>wDkcSQ>PvRyL`6tfHdQF+e_B=+$F5u16Hv zO>KO9H6ll+h6_9=ipu;U2wZdOS4L5*{1OPrO*lcS^+2umgtd}W$5*SUsB{?M{9{zI z!UdBh8I((#wzl>8PKgOgJWq!vCj?TWq(R^^7qyku4Q`plRZae*sPmlwsvfJTsI&!O z83bic0^X(!XMD7AO_L=A^G_dp)szV%29FpyWWlG3`~dF@MGPCZn;gF~fV+%}qR zyQZlo+uTjIrpdN#V=`v4ZQIz@q+~gaU9XuwTtDa$mw}`pHGaK zZub*|y@4lpqCm+)oUFB^@o_RBC=`+KyGkh1>59NIMV|nH4G-Z030*VCpCh&^QXG-M zl(1dF&iDGYIbYt5`uBWfgpWrE0+4_CzvJW%Mz-8SWy9SdxyFec|XzfQTrV9d4{CJq;s1vUbg$Qfsd$dc%~+CcIXz-!~Valu*7x_ z@0Xj3`rNLvwxYJSro9q+x?S=RXSPJMy#+7-peO*cgol>S-`ix;4ujjFEfHADNG@Y7 z=Y9X+_nB`L_<6+}K0AN6-r~l*ZG?pdQu*Bh!EE20cINYWJPulBrF8l$G8_qud(!Kg z)q)jHw$?isSLAXrzE28Nzg?`=hoX>jwCPEQDLU2V45YA|Z{NB7OXas515#K0C1B3! z{$e(>PaInBG+|=(yfM}8FxQ2EC9)gh^pf5yF0SQvKJH|c;3sRDEc(2vuwgBrs;Eaz$4@`(YHU!HEIGWZ?;baUw!p-9UPPb z91e-4`bWprUAj7K?AVuAv8t+)OF^5Vu)j?UHCT|9fQUlf+7l6+WXCpbGYk|O-t;!M z2*rFZ;q&F!oj;hPe(J6F1*T(=YTtPA!Y?QA*saagoIj@bPEc`Ap5-%m`TG>?m@Qb4 zze>0KlxQdZDW@+Kprf8b4>^O=c4pJ~v4UyOP5}*dZf4s;Nn6Jm0QO z5-l%-NZ+TFfUZEu=Aqc~GPd3*RH@bOIrk3HOl<`v&*f<9d0)3zK3^rIfZS#<4wf0V zxg~5`+*b&m{2Mx^=L+ag_L4Q`IY(8M*LKM>IOt*kWjur%|5x)k0H7Nls4A zS=2f#j^_+R!@5}}^1rPZ>Bmwr2`$SbJYNW3Sv=WQhTp~24$;YmZsP^;eN2^Z;-nb$ zXnPAP*o7NG!CJtX>g8}AaBZ8TUjc$WHbqdYqN(oYd z!2>=HEJW@h-Y)V=Uop6M+gjD+&S&i}?S-RN$Y>C>hm!Fw8%*K~I_;LfR$6I&qQEoa z9|@E#t?{$`<8Ap#4tdm6eD>OE1gIzSZVil;#!;pu6uk!`Yz zVQ^b}`txb%GD9Qj_^zTQs66t3iBIMf%sUtkTW$PwZoON(Q1kKB;v)EEl>_E=9|9AQ zCl!ab#yHNU4?l3H1EDLupBkpb29r)Z-|?{v*(}sWkjm`lnGD82Wd(feodMhAQis0j z1jVR=QZglkww{GPn8Yy&n`QT{T7k+zLJ`q0fKO8;R6)d6Z zE#O;S!`10ZAV%;)Tv^wskHKIL>A|NUAfo1H7{>_x{fvyb{0S(qG*4yB~nvH ztDvZ^xqz2f*3r>Y(J_LCw_`_UWfhiSOLGVPO&f`5&{gSt2ruFnY$}13G~##`6OY~r zJbauL?a1S%{94@h-{&^*^7Q0#`ONYd1t}4+qNbl4R+0lxK`UYzEsHBKs$Uf13k$|8 z6{1nAEB7GDG2n8W+TyNI$tZCp<%Rc$9eXPlAiynMWDv;YI#!glH5Hs?!pqCA@E~^N zg6qq+6GAzFSGIBgOs#qwO(AzID5UR+K{w`t?8>gHbpn2S{oc}`_LbU#E&Ju9&oc)W z96MlxDiGIBq|~v&1}%90#IP6QZOqp+{s|XFNT_?#UenH+;{b1gz_P33sxob*o8QxZ zqDp|T+2+alXINzXV_VTP!gE>lHxxU)=V|@d)tG;S_(3n5Z8k2o3-5MoTbNoKvi$cm zRcQo2OIyGs1&}4Y0;B_Ep^!XJi$%QdPgk1XZ$kOB7Akqvb!u$-nqG7Gx>h^^x~^~LhW>JOZMbgk?wkE)T3;mp5^M-7tOzPWQv_lKZfR?bZv{cWKlu5bK8}km zz|4{?HRn^?A9h}NH}W{C?N4DMeoORy*}+eSok|tPs!sVzoy(RAYI~SXB;IgH={{Q7 z8FcgUaijG!9K5%5n-5`_gXik#+y`d)NN{!H_}O1oiNjRtUXM(d95kJ6B)1-%q|4~} zQN_=T$edwLKd6AliiKbOz1#HZ0=RR~MhODkY(h@daGTcd%1vklX@23o9!9#kJeWb7 zOyBMSn0wRO&3a(z#yM*Vxakl?Lzplw9BT8L#{&$Zq+UWF(p(lAVwWW$Te7{EiFte6Ai_2_~lV%jy4U+>i)9W$_FRnL4o)@YKUJqu^KU$2H9*d zQRwzps3Dk}swN53R#J)*V7Oop^ zAQc$2`nRU%vXcPNE!~;o_fw>6P8@bb09hDBy~$uwAP4~x$SE7L9yGP`{|5mTqh^l- z0(DkLyp;%>C`Lm51AzuF`&%ZhFqIcYiEy}?a4q`%FX0Lqox%*-j93IWZ^!*+WqTbm z8!w8oPri0V{WxJWyzdh`?=iCh&2eqn!5~)cb5i#sK`f__{}M`=2YGbgbjhMzvLx8N zF$66CXaQA-SZ?_89c@#*nJ5qGyCx~z1Uw1JpUu=R5uTggm7f74Tga`eJnL3E1dlRB zn%_I@n4FdizW{wlnJNW4tyh2EYsJV^K}>+dr*GE0PzF!I=?%nl`OrJcAJ+^ypwqDh&vVVT?G3h7hLi!zhU^ie)Xdk^btNLfH zqw?(h+ZPZ~DgF*JN=|ijg^cKdCvw=ziZp8p-E`yAz_3XE4>$zBjOF?ghm56L7d*t; z?((wF$(IE(l(_q}^ACC$%r=ZlLr%Mu6un8) zJG_`;-=WsNU+Uh%w0__}2GnPTK~$trc>mVIAw~Uv>rV_>O|oLVC{&Ct<~lRax#m|P zyjwD>D7;D8;#$Ckht-cZXY$_XPY=nFv2n`$E|*3c++2+tKsQ)C6jZF;M?GN!e&o1FRe?g_sKdL00ebvpl7&E@a6Ncp*KmJ*WQgvK>sa6LyQUZ z0G26J>EQSX%JDgMH9He_dy#L6LLs~x@F12Hw8d$GK|`myvB4kOdJ1GTAt~z5X7Ia@ z?3Q9mC@98AsSL#6rC`ZA*lT_K{2$y&-v>{8Jr5dc{CJ-O%LY5HY3|3ZGMa0;!waLA zr$Ww{3msbVxCG|zz&;R^ruM5 zr@R)DpBg{Myma*NWNRG}fuf@x>y5zAr~0uBG4u_wNk0x5u9hOeFmnxR(5Z3jY4B4u zKMqpO$KCBlrpiP93d>-udRQzqQhuB!r`zbz=1?RFqUXXQUUh2BCKUlcvyq(T~;s93)$-JEQ2DL-I=IyK>a^H;X3I#>y2Cxeryaeb3%)DEAu&+VZ1*>9f@qb{c-09}Ym_!~XWXd1rqh#r%3fd0v10 z_&VB?+u0TcSH6*`F+h{fKSX3yUP?<-N=;|OLmP?cnDIr#^HJ3p1U20iPy7jDu46)K z9Aej4oi2gkZ(dU-d=o1T;@v4(MxleC=)-@VmVf<8YQ?*13t9ps30SEIGV7iGEq4wo zWbRFWbHkS3OVBVe;@0fokPzyEZFgD2=lUfDC+?d1ym=S@jEu)#;ANPjn5?5&9D5?* z_JjY_ib=lhm+*B+b2>j6XT>Xp4bmx>+M!<53btW!ivWVF%9Hxd2E zx3+XG4w*;DyLKO#dLvhqPyiTjAwsO@`3CE#S$<#*~rlFFQ1aVTh8VBm@- zo@`cIP&`PDSFO`?H>?)?Bevo9yc2M`>PgPZIBo7Wl-Vw}f@-JPYT-AZYR&0$t!OjV zbNrWwt&wnDt8Q=h$;k}ybznV9y@q~eUiW~HV{sJ=C949ej3N0&euP^%?~utiUdum` zb*m2N^HYBR0Bj9?J9MKVB!gu74}cs3wh0ljE3je+H%qGPl`D zwK;LcofI~uzR1CopfRd0Wt&+oz9-@FeSJ*5>2Gvptr*>`?VGtwJMEK*$s8}QgNFJg zO_EI5L!&>UtcEaQaTNrsvi=xx-jkhTguF>g?*Df%b_01fQ%OzUx*^S zY*ff$_c7c^!pX|OuNsKyGw4>m;1dPr&k;qg?w$Iz$e-zi`)KrS@jabANpkg zk_9uhVTY6JjEi6s#>!fZ2Bicj1_9X1CJnwsFtF7rWQ? zI*k#CL>+<|Y-dk++hZY(=m^)JJEo`Tf!{Kyw54P^@ibSyV6*N+(H@5=caCjlpS99y zXl1&;4!LB)8J=U`;X#FSI#lH1sSi#`9S2s_v9*bb1U(10$Kp>5kpC+x0Zrq0XKAodh zOY!+|F6RcT$0LP8=o}$21ZC_US-ZVs& zWF;f8A;kG_)_Z?tgL-Jkb*Vn&b$SCN!->qS8qA$Nm22h`xjaE2l*~l1sk%cg4OzcS zJC1{_zNIK1WPn~~zuGCx?tY%!{V^+_3K`9TvX$@t~~sJLNo zzo?%pEVb-=-Hy7z?wbBG(hC~16JYA-C@Gy))XxV6Q}ao>qf;LB_Nn~`K+S{$NTl+}1p%(crG3wX z09=q!$fTxKy|?yB48n7 z1{^rf2gZ9BrGdZKpH^|Zxc3)Vdv_hzFxc$tu<>rW{!9qK^7p2f}dgJgAODi4{U^6^p>4Uc1b3D2i7(7B)X06R}yyKiP`KALTv4aaNNwu zO=j*p9y&V0o5b>N`u?O*<#SeVEVIKkDDyw{k~+=)xouRhY7eBGC5iu3nNlRh!6WxC zW|00aYX7^awo9{imJwVPbHnbxaRMpUA5aWS$JzYZU!jm`cxR0F56XlI@ApdFD6$At zB4G)=nLgj!q40`|88fGAT5EFZsXOq1HJH0WOUo4hnpwsvQ8_U&B@eO6FU-r%Yb;GQr5;!+6AE!DWMrIliQjY+8P$<8 z=fM%yKAlwuIgGy~T(7uMkQ!N`uQO{o66e~HGPl2<96$PBx+|AdSKU8CPkvWP5*7pEG$I+k-S?H!YfXSXGNu*h+Vh|0dZ0BPsn4Q+hSf_u4kMa zRn2?XSmi@l;4JzMxe56GhB;0$Zat_|!##!4Rq?qV-I&!VA~g5V4Nv4K!ql0ZtbaF* z+0&zn^mxbiH$gDZ{D>YpP}YjBtLwpOA@io$PNUiN?Ks=l+sra2J^2yFOQrLwY>%($ zwc+qc_=mxM6Q*tD&E&vZbZz~*dXvfEh+3RN^^%IlDDd9fJZyGGoFIsykD%8M7_?!n z(QUgEVsf#TYP?o=WUjgu!{8+&Vw=@%xD$fBA?W8ap+0)tpB^4Q!KB^la1g8TM`mMf zglyeo6d*;@ISc?-0q&TLfP^!e814((#4Bok{B$+%e**->z#;AYj%G+XE7od^24C_= zeAUx;?#X!LH55FvF#%)T2GTBRRce|yM9$|R(5@Mp`M0}a0;EHHR8 zh3oE;;oI?duZcFZw7V4vKYI3QZtHp&6l!i#UBs5k2na|!?R(dTK3z>I3#y;Ifw|=| zU{1P1tAuLcKkXCf9>psmB{XXb{sb(b2&v@t_L(72z-yzctB?n76z;o8v%}dx<%;1I zQ9z{m$G2>9bS&HnBqw?KAKmG~4XD!hO=zaI3mZ~E6?|lSr1=qpf~Y=eG$@evl=H`B z)dV!c31|2J=v&8FNWdu;%D;C*QRlJ{4dy=Tpf%3t!Ecx&<#w8`@%7#R>l<}M9{H{+ zNUXA3`-H#@fdAo7)uD{6V9rxYS%y*TGM40^`43!I<&xsETjP$Y35J2jaow;pkzr4y za0HCppU{Scb)NU!WI9({{Lk|~0ITCg%SEzvcjMRpARYKZr0;)RXCtZeS-)>XiN)Pk zlO!l(eFaqeLo^eyo{~QhZ+>k_ppf6jLwQ|I0_@46)$WZ&Mg#51{6P_NV~v7xt)k@s zcX<0{u>Kns;myxCNgG0#ixt~lAngwTP;Pnt1#?KPU(B}1Lv6a(^%lmqO?H@qHg z&n|fW7kSPaZT120G6O_%J3NKV(}%)>hop%isGgfq;99cvcynM@tnPhYhdMVh7iJzQ zvt3^yP0@c`%k-`M1EZ_^hDd zq0`Q8EE2ys{uLBn1tMkffcKWNbv=oA!r)YOT20^rw)yWYM2u0AN|_>JS;_i(%@_!< zC_z|nm0vyhXyJ&kpe2GEWvsFDc)K(~zpu~#zx7ex&J1BN87LRgVI61yd>YqJSa zQD4M*qBh=iETe)*#dM5FsKTxybrTw$_a8&U)t_xY4EXb0YS$sH&jb5OZD-D4akD(Y z5$U!?4&*Q&=1$~)9YB9igpDBu4W&I!MFXG}j@e4=uzndrEg^~h2_&=sKDvzShlOuT zGUnb<$P|a zCF5pWJCLZFEYU*eqT{~sw&I)ai1V?O=&wxCB;^D}I&Gr={{0#hek)`l_XygvK zg*GV5U7d*Q2T*3sHfi>sB!Z*ZzPtKkN02J-=CbZDcTK@meA|oG`1!IHP;uR*#1A`X z!`xi_gR$0E6YD%t-NXCtiU30dYTo9M)!;{mw(tGs6>AKD68Q`_ez{$S0=WUJQO(%o zAc?cixSS8d@VNHnpGFOZ&%fQ`W+l(%1rwBDZIR<%<>$MoseqQS`u*3X3@262?!zd? z%=o~&5e$77o)!6|*0uA^D2L7+XCN(H{Mmuq`Ouz)GRw?+Nmb?G+`Z-J{sMuJtPpI| z4pa$@sbgx9gPe{)2SOH^N6cHAn9VI=hK|p-LB&8aW;@m}hO%|M9M*P{ClDj_baK2j z17JJB6tL5jiDW|d)MGJ5YC5Qc$b7Unw)$WS#Gfo^SPh28;5!YG2*!yyH;=Wd&a+n^ z(#pN?@jhXrhj=(VeeVz%(sQA%6-knbK+)|LW~iAcOqzdb0(`UlFtaonx4~>B*Tix5 zyw}JhJ29RA4>Vt;wNN>N3n} zJAO1T2IER0XgZ!Z4FjZ-Yeg4RU;KfrW{ZW0fJiU*f3PAM+DC!)sH44kHRIRbV>go| z2cQ>}F3Cw#Lh}9|E-scf_qAhPg>aD9vsxx?iPH^G7$z+$FK`Bf|2Af6=lS+@Dj!h; zgzAL}Wzls)P{O7|kTO}i{9+rZJN0Rpn)RhbLaCS!nyS>hJT`KzyH<8>SC8roaadWu zwz!Udv2a(geQ9H}MF_$Uk~gx=V#)K0)%BS+dKHt=70YKV8}+Z989+K+`}Q@Zy5Scx zDP~-=X@6`w8!9QcNK|L1+g+LuwxgmpuE<`Kb*n;!(Zo4Fm-zEwk*fkrw$}aCG_gb- z;Xj(;9(9#$%M95;KvI5ZmkyuQJ54F@7r9h(CRdp|EpR3k;(*xD+d*WbIZTw z3OGZ6>bKvdh16@E6INmm@5_7cU6brcKjnS7FIO&{t{c&8ooUbt4PDU?C0zHrzIAx( zC8jjF>ku@V-35IRcQ8Sz7#@5~7~=Uo%ZkNUs3;VTZE1aU-8#vYh{*oeD2_;9KoO=o zbAyYD+8QcaT6VQO0_uv6-BbCVp=3#AWod2Vu!I;yBD|Ii|D$%pPW+ln2CV)&1UAj^X zTo~TSQMN?8>Epz%TdUT0;Saf+da+<8M9b=#UbS|+?O9Tv2Qo%!Km01+4-{A9NZufL zb)aM#`4wW3BGQ~CjXknEf%g|0h(hwdJR|3%AT2c}o%^?lF^E{|uK*Pbul;A$lle-W zW>YPTp&{Qtfz(zmn(CE-AwWc7tI~l8h{)W7U33fZ;h+O;4hjyu5ze&+zu|2TiHU4J z7b3#v#TnFC2o8+B>$5o5_Q_%pNFFiqhxGDC3i?j;%gPdD2UWcAmkdPPpb?&T>pp8m zUH5ya{1}&?gDG}JMiDYjP3zk%5hM-F!Ga;NhXQeK|>Gx5U2X!H$(feUpc7e z*BX<4J#J9LCp#qg8VwXR3wI7{ z5V;P`93Ke)Mu*5wpdQ6N>4eLAvBvQ|oryt!W)iByLd>xTmFOEY*3J`5awM2%Ng;(2 z*4yh_qpeyeMGv@sr7(M)J{@<*%b*>u4gZgVBhIsdtMC@g5-6!SfIB#uTKXwx{=EM- zUP}lX7Llk1LAh`Y9gHS`oqvK2$&Qm?lio5@*et=Mc(bgMak<^|=@@(CyvDWlJWGm3 zlbaX$aT>T00;I)~Y$CgqPafQu_#4#HkaXXtXkRX`rZc=G(%xjdUSlf~5Y)sm-%bI( ziX~dUpfN$h=m-V%tKk60s2F;}Yt*0}K!8>%#N3zIf0|R9oK%BQ9J4p?1`Y{}iYhxS(B`deAYaUdc55{+HA^q5 zgx^mc2#f8ts=7LH8&73XBLO40Fc!@7vJqB%F^V3Y%+3#PPVW)84tKkNnSVF7p=&TT z>^wnU_hC)*YK;d{g2zylpmQ4qiniz6Xp>|F(C1IkoK({UAE?M{qn?wkWXDVp)$NQY z8_<&=AFHFN7)2%1?PUn<`2b6&M*n7e#HGjRVevgDH2IM#`5z9m)6~dCviEIvz;m4l zU%htl&QKC$Q;WrfU#<6^+62{s?GSyn8G1I0vIHSD8FXF2m$&D20lz1=4FxWh^28!2pJ5EW#6B2@AEh_iFP zr-eXUaSp7V@afVYPL53Se*4Eox;oi#Ih9?ua$<@B_S_sQ|0(cqAp9LGv=Q9ogGDO!_i9}zG>A6aNkH_jlaLf$%V=YZ3Z(O7k2Vy zF=cD!4_PKG*{KmK1I(azj9U2zR|nHiqgkBKj7H_IZQsUR1X~v=g1(=DlN{C~d=`-} zb0`aC=jKqe0*2QkS*Z(qr(qmG6C%+Fuz^PkD8%6uJs;xnEX`u+NziN)d)Ac-yH++ z$^PY>UvUZkOOk!z12xBh_J*$kz5Q59Sc7ni!F0YW8ybPS==weIxK+=0K|9ziAzV-{ z`QM35bni%Hs7;qd4JeaRg7v2H5_yG_IFAiHgmu?q4>oW;5z;}JU zoV*+|?8*rXPH4eEf)kFtdRGTlO^F|}J?*f=_quTF1l**rE!>PSzi^kkss9odQi}3) zlsgGOyj<{lH{XL~bXT8K31aBoou_pBXyg`+oZxthiKH_vgysA9!KqS%z@eAGcUF5$ z*dz%_;$JV{-{rtiiKcSSTvF;Pm4S+~cd?)R4$P;nI5!U2op9vR(sbqipNEZVEM;`q z&|zr*63QW#E>F(F>C2rSG_4hXN{D51eMr_xnju=y{{fI>*}k>d@fCU0U*O0iY(Vm` zPyVxQy0Qx1J>JFq;A6@z>jE(11n2ro~VC0x`PE16n0 z4^HXC2eH@Z>ch`NsJ2=c;eTG3k-vAHJ{#3ihe#l0_)?vbj8@hKRKu4try4T2kqAve zfE0oROZlr3AS)b>5?^?p)~{w{?Ts(G)aI&uvc&9bYk0$lEue8f+w*fM(-jYz!An3uGs0$& zlvgCc6aT}x$d86#PeoB67Th`}H2v3lUdFTVukHRAKFBNhghdytlWUvrv!>{`Sk_KI ziig$bf8xVZF=s%KC1gg-pfBeks0m(@UfzwsX(7S<BHT9zXfiaJY^yxfd>j>z?Qsfc{MQ3 zl2AdvlIe@*sjpMlW8LVnl!8tE?d`9ZKcd3D4O_!&@y_$xd4UM!!4^=_G-WD=)?rg_ zh@n7+8YGhSPQ79can(>ZY@wiOWK&iG)~wixlgJPAZ~8dg-5| z=1ID$y1$(u1CfFPGZMjCtf)Vwc+>tmKqZQb=21r!Nybrhz=lj_!o;4^A7nGsvyr12 zw^UJJflA(hOi*F05k}Nl?%k*!wxZ z0@)su%IcyXD;N)(-~Kbpc^axr)C?eNj{Z1F4Z}!z4a7qVT9E~nRxNRjpOToe5+LnY z#eWOUUh&Es|2KG>&EhgrLvrL3-vYTZ-tK^G8*?J?o;ED-YCM*(SZ-D)UqMG(bs%aU zrFJSqAi8}tJGBGhm~1P+L|#YM`-?9iAu@Qp`et(2XfWm!my~=VTTrFn5{lRM?<|c- zyJ#A9C5_E)NlDwy!3gb@;DaDLA50<=cu#=9U^ENShHqStq~3ZHm$iIz-H#HA%hkQE zcvkt})XyvF^1x8)&rTuGjr@ajv7pomJr!Im?)oP!?YLb-gM+~NoQbIVTh$orVGeo8 zSj%xFLjA=4{_bug(UOM5@f*woLWQ6X*k9B`7JFW$ANE>rUV3Z>lH#782lB>e;f}b4 z9KjS1MC$-+1!{qh>Q#`Fchi zMg-@cNh>)F5N?FI3~>XxA9*{q#LlSB6;2IB|!46gmorbs_(XA z<@YIjZ50`o;sZOOF|vl^){pxqb1Sad7%#F%=YrlHygug)Jz9DmFDscK9Wp&as*jc7 zeMJ5EJ?}@$)IOE4jp%=>7ZmW@OLlv(gl!TLZ_)-rG_0@G(&dF(r0kW2aOywwT8P6R z(1v&HI$Rau;QsbL>$!+*0ANFp@q58N(T}2dE1}GXrCyVh`;6w$A{0r~ zWJFHC_dTu+-+Dtc*Gfrz$vA&D8(}CUVbP#rQgt8zIna^?nih_`Xy(X!0=#Q-^Le1) zanZij{U)Ls5P@qec!FcBPtA=caKDph*Y$SazNb}g_zfvy(;a?Z%Dm-3wO%;2h)ghO z6Ne&mojP3(2Fgm;8mrGhzOkCrMftP{cA>->G6KPmrUEvnivkaK!+B52$nNO;BF%+rY+5Hbx%e zUUzlei4gePptj+D9zYmEsc&29rwF!RMP2G@=-DYRvu%$_!(G?9G+RvzXI-f1+e?=Z zHRtSk_b|xbS?#%-s79K#8a`l<$Ba}tBshu}#mjJ3TIpc!0;9+42gAPe1}A=7`0*XX zAJgZE;Zt#4@~Q&#ap*x+icZjV69+n8qaVBoA) zTHqL{2*^?iC+PJR%}S+{Nt#a-Kd1O}+$aplJ(%slgTo4O^;ZLIZSLtoc~`l&bm4)K z8TJp#4_oMjsg(b$-e@tnnJ4yAP;~?W+q5y7we~z@sN zyNiSB3rc*;fOwhKMneSVi#8q#Yg;1C@4Kiq&80(5DsfnL&g=JFmIm|EQWrvqJL~>G zo#|>mo$ZWuR^DjVzwi5>RQ+GIq(>Q^{o(3yRBgX3>3WY-9uV`L9j!Ij*AGt{b3Ggp z>)CIkokUKf^uW1StYh4Ck#UpY)z48>ws+9${|1;0cjYvipE z6GqP%h)C+HoAbUP)MI=-t$(@^d^#MS0!L!HavkUo9)H~EeIo62*#L98s}(3FDmhz> zF^XvkkM+BJ%2Ob@_)%x#X8{T6<(EtOUFL))2MPpb$2#pi?0PyTx*lKGzL|=v-Id6CQt=p$v zRXUHT$i1#nyOU0elCbDr=%x*`Uip{JSpRzpd`HkXhooy-nvx0C2U)kb+W~MZ=J1G$ zndRBtP4pjeQY!x4+2Cb`gUWN882&@x0!gC6ghXchwB^?1)+?M)Vlb1^R$LdgS_Bmp z6+Pzp*?u`$vpE<>HZ_YG{MbY=>qcZ<Ww;L}ca@YOErinuS?)4R8)IQRSFAPcG{UF-aABGRw-E8t#xG3~BRdn9kj% zMSOfQV_~YX0C3aJ2OL^LZSodsrW5kKK0xLYF=Qba)ZC& zwj*mmIJE+74}&wz3s(ILHT6AH#W}Pb@M=HErw9^8b@=NVCMb94l~bP2I}t zyJIsLGLYH*LmG~K`B?Ek(TW5>FsmCsv0`!|a8$BJYvtZIl9gPn02(56093~tK$Q!N z_%`B~i5I|!BdmIgT2%(V=8~cc=t_J@Cb!z8x7bLJe0n1dXzt4PA03?J9x+%6{J%n9 z1srQ}F;E^6r;YS!6LI=A!g2|T3_!s6cCF63c%iENAEN94zR3rl#?ayd$8 zO4k1e?cQ|x@3eO4#K!%lejBt=i|skl!sdygD;fMS^e15}Nl$S359=y}`gcL}DmV3s z32k&BQ0W$#*Rl`nL;YvthAaLB+mbm5nuj zlU=SG^y&JW<)er`V*&)?Hxa4+q{x!7!X$?Xdw`n=3%nugNbEI&VDs5Um7Ee09QBvJ>h#oo@ux`Qsp$st6(EZLB_2cJLrzF7dBNtTSrF6gt zI{us~HlMY_DwHDFxJ%&ObGj6ksR2RZ{wd%)`>PIp49(hZqm>IA?rR0$M?Fy6nLoYI zyQqobHL%=sRag8$C;w->0kkbRn3CxWSaP06mJ}g01zNGnH$m2K(L(8Z>dHTVC@z-3 zpYz$y=1_}H1NpA<9_8}Lr@B)7&@>==2cI^X7XOI-9~M^ed#5W{$KhucZcDV_AfjQt ziTyC5TCmguh=NuB4!NzIA8kMPTShQs>X*#d@9{v>a_`Hc4Ma3oyPVHw84*I^bU4=5 zxCVIm@09<_%li(Be+@C{n(%`eWc}Ec)zLtKCtS6GYWjGKZO)>l=S|L3nI(IXC>|t` zA}8bKp`)QSJ2Mt~sR?Zw9-6`~K5L!T!(aUfP}U-bWDd*% zw0Ln5m3RQ1_@he3;GSV2Vx$*%|QJLqL zm#3H1f2Q${c*xH{l6;AR%T}H}fD}5`pPUDzIEHKb2}Cd@aeJg+b7<(#2|2Q`JrQ={ z5KFz}unL(17LwxD=H{{&@O`}_69=jsy#J(pB;?aM4OT09hQ<@WWwL|sKt=**f^l-M zA){e@Y0=>DpvEHT6ZvnF2tz=#a0&6hx7j@NAB$Nka#3hWERL(HY=g)|8Dr-gp6k(8 z%to>k;Z6rp($}bA$Gk0e7{jW#I9!*~y;wH@1@!QI6^Yop37sYeHHb9E-Z|E-kvY6d zg+o5m*M`)cHc?5JVtKXe6!BS@!3QD^ht-1efe@~YF@}0i(J48X_M2Kh&tGX@cQCeL z<%t5%R(v&qQvU-#xrS^(k;|X+vc&=mt~^%6LFzt663k>pAo-!IE1@+ooXQaA5cF)IB>nZQReaujQ zb5_w`%*!k{o=2tk`YZ9*y`+GzVPM4b_6OrpNQebxB1FiX@#IKoIu>&ab*-bbxq4bp z#UggAH<`{1%q*ANp6JxM>itULK3#TxIj)>5b;$B*zv0K*$4y@66F59otM=*ezNsbX zPMEcCpt*=*EAgt<6_{4Rh*#l*mBLoNb3cF=*{z8G@z8@W1z)Q~)s#0zWspF_O(_s< z&8uZRHGEg>XbC0zH3jq88P#&6vgcOzX(+A^^*Ltzj&CTI)RSQG@ z?f1^~^L10_l(|!U%m2uLpiH{Ou%2UT*ezp?a?p^znrjx%Gzm6n5+e`lH6P0)Lcv7h zyPvI=>U{nB)d*A#eShp6AM`EiDt~{S)ycg%I4tZjsw>}2o;Zu(J3+K*hJyyEitmPP zK{e50EWlK{m}%*D>LmMu>u<2I2W&rg;87gxSQpFbp}l!)D3W8>{k8p3(O9PTxQKZ(>sR(5@s&B!t++gey)X$ZDt&z0j$(KhGX* z*Ox^aL(qiG(aw=&IWm@>=aT7Uv_Usk0~jzn0YTon(vrzsuGBFY5n-h+J<$ws-&w&w z2|n$g@^fAo>Qf!ndDY+ko{ciX?><8iBF>Us z3#Zz*-LXRNhcA6DAqn6*B6r+U7Bm~4n0N?3=FP=Hu+!2-bDGW_4FAjFkZ);7o{h}9 zJ1F2D$GiKefwJIc^SoAc%2WR=UIC|y?%su1H2e)QoeI=l&YU6Wo0!- zNaE^$p4YTn@E?Jn@9M`goPD*JI)JV(HejdV(To^_dqC5ulCh$+yu7^Hj(YX(>|iCr z{y6Vk46A7+zi!~@N+0g>54@tvA0#Tsf9@4E1|Nos(PMKNi=ayBER~;G5d_Q@(%t59 z*!l{Mbx1TFRNMAE&IvfeEc#OHFRAdMom*zDQRYy>o9;b13%$bn4N3E(tY6zlC(iYn zeHWHGAk+?XG}KVTp;4G6p~TUc1@i({7(RTzp>sdVxt4cN(bJAF!KBEBt<4|4G!O@B zvp;Ho3fz2lGEcU#jmX5#zQ+WB311mCz_iSH~OwgySz9=5VRBj{i^8rxVOx*d|YVE8c2^r@r7YZ$928eF?l>CP6-Q< zH6FCM-S5|fI)iIHZ(b>Bv{vpa+Qo-tQ>Kl4JW?tiU6c64q(Q?Oh2dj|Ypql#ImCVX zwl5W6fB~=5rZVwZo-fPQrBauu(en}WX}ryhlZP~`^-rNp2R1g6ryD${+b~B$a6#f8~~88@=D?aa2LCNVuq* z&r7*%FYyicfv~?e8>sxAUL^7^{I>iqI`8iJ?7FU&-$dTDGoa}td0Ma_8i z-8Wu+{jWvB^y*cAs{~jk)jTx88m8p_H2K2Z|e68LB7%jvgaHX33I%F(1zju6a!;wSTRmJ-c<=CTVJ^#wLhz4OlNPmBw*oj-oK*wAyv?T^0p_M7j1 z^v)d_)!X-!FdRbb4O-t4kO+cNIz5eh-+^a|;S{YU;i#_*csrJ1K7bsM@|Q0^0VMMXuW!+^Z^@fuvV!(K($Y02%_wY{LIsO_#_ElY_h z17m02b4?exILEJmXhhqgAAa1MZ{c9WChU+!B-TyG^!UH_&ICBF;>_dk^?Psb8O>YMb#EbzzM+xEG$kIk`O0A zD30L!lFea`wTW%Xk}X*y$<}?1=04x+-tIReEaM{(s6|rxSI^@&Pu*X?W2*oCb@%(a zzzmXI^wGg{9Z}2TjaxTFc0JP)vnqfX@{9}(jz!2C{p!qYdxlYs)h#e^q1PLONFECo z5N924we`oC0ff6zPNR9*@KWBq3g7vgT$RvA#w<0|P@N0U^_wooUo?5PRbBQMY?I z6z4LEOSI#yJzm_1Laf^Abg)C`2E$n4A>`Bsi`gi7`$t1ip2o#MNmQlhS%u)xm^UWj z#+MXB#-cKZmOMZ-?mpccQ7UI_JP;bcnnIz>CfFyReiOg|BCl~`QBE8=dc@t+C0xH2 z`?}YYKm`RwqQab&q2X@Sb<46+G%z;mi$m-;PU&Wba`>tsA^A{lm`l zTCBZf5;d;liCjVwZbP2^#Bw~A-cOBSNtRfF__+9lpfnjflO;JxRyn5}SwudBZx)h+ zg^*GKM`m~>P06`M#PX>b&*TYA&4>k;MEp!<#D_>a+#pDZ2=GMGaef3vi6JIQ3S2M& zmys0XB)ABONEb-z0uTv*BAdjQC187B*VFv+P(7wlD6b_iAuoufy4VR zxTp8}PKCu{A|_3SeZDTF2VT~`%i8oANLWmLR`uQgyv6&%Loc1vPxb@<6@m4e>M_-0 z3WYKqV4vA>fDt|N`>`xU(F2F}O=mqO>mQe8;3_zE=*vOuvAj~FLeW4}lH+OqwE?jT z@hrlZjva3NSjo>I^`}X$UScGbMxJXhCH>`$_~4XWWGA_xG9uH%$^PVD5l$-ao1~cwCm8{HtVJ z$sDa@rg}`FP-X}0Gtci~p`W;(WMnjUuxY>BJx%9xVLis5+xpr9lOd)kUj~4fCZAkvvjK^_W7T%pTZhezBjEeDP=`A&COd96omBLTBf69zV91TW|%MIz;(0;8u-9 zMvgh80Em;nUz4eWyVqoD0{o1U$*xXJ#)@%qto+tHONO6s(5@oZ$Vf98Y=$XoS0)ei zvUL2ILZQqW*r%S~D@8`J^C<{Q1|{|%+1J@Q?dMmEVsbwPnM8g4Y1EYKL+4jxs2)=& zl<5G#o-xO8OQBFG6v_+*kO&o=03{MALxj>5 ze&=)qpC50`Ub5!yzq|jz2k(9Gq5JN?XI*u9I?u=Dv0`4n{&On1UopBoNhK zs9Yp;Rw5Bkl1N3@BF80iF!5)jkxT*Mrp~0L_~a?!PkcC@Ai=Uy$>bta$?)N8IQ&?Q zfib7hu7i_NyDpQ=PV6N!UU31$UaH*7Wsu7vDJc{Rg)$A`ZoB;+fMU3MC$Sb$qPwTd zGckb`I`#Lfh{vK|=U6R5nYNmFb>RJNRg$r#ht?p51%9)d+<2hqSPIi{vZnN91 z+1YtUqY8w5kpz|@e8TtwKmTcyz+GyKBiHH7$+BkIY#M1I7{%fR;}K*jo?GP5!HKcS z7|}Nd!33kTyYiimY#TA9j1Kr>IG>b=DQx9)oEC-X_l6-pL55SC?1e?nEUV3GOH;(7 zk#HPd`Cc@P1=H@_wqvdTrF|W|g1>w{SbQZ`VRjT1Sk*8V3Xx#j+RU6hT&y+IYO~o) ze8d}0U@c}%&a$Fho6TyqWm-UJz!#s4wmI_%hQj#%BpWu@dbo%e6af7B_nz@jUlgxt zfkL5B{{P_Ct^YQQq7X_f_AXi2+1=^!Olv*Pur92q%ksSU;Q-RA6g&?E!0D8hbW3`= z$!tk(MuWjxwPsaG{Cw*re+;tt!eBDy2i8ICD-saPbAU8Mvo?lepbh?%`HW)sBr`a2mGRu}XZ2sn*D{Bj*#}A$l^9<1CEm(Ko zeYcj|Y`Byqg?x0Nvu9kyQjs);&0|51t4bTznA%@H)Q-!Nj{%NF5GXXxs#Ujc{O0<3 z*>L1cbAN7DpYDyPHB-@ zFqAExU*vR{l~QngG{6+BSY0l*{_%gFd+CkcrxHuo&x^LS^^B(9e9uaD?<-w%7Qv&t zTEa?vlK{x7T)DWyneQrar0ZaCz!OlKbBg9xRhGC4at(>GVBW18^SfT#v18|(yWehB zZ~5-x?p^Qqi^UE1F9^N4=Tea~a{O3FgyT&4<%=saT7UKE_Lujb7&4ZW+Bt7~i{D&1 zfBxLkLZ_>+*a8Oz$M~|_YMIlYa23_%E@zHi9~oJHIj+Kzva%wl-M~Y2#+;S6tzEgSvbeOY zINPLhTGOK=Ltd6w8;!=E3+J_VXBx{Zn}7evww-59c{bd_z#CGlRAx0dIy!+&#Gdh(QeC6)KHRuh*)TCWqY;^bN&gF;{N3$KC1IrWw-o`VM!8(Vnj*iDxGG7n|Cn zP$-m{51ILdJp){Y6J5aaHRJU7bAl34zI4;Wb1yyh#G^lc?C~dFi7x+MjWrxiAXY-r z;ierw{rQvIfB(^_p={Z_Of6a1UzJ)afdc^P^+p!ET1qVSij?5t|A3&*Sz4}X-?QV< zAN^$8bHCetX53m{yU?ZV-t{Em`M3VJPgEI{N`)K+gP39^gDD+n=TbY_LIk7JsA*5tPN|Cwe8oBKe2uL(=YBHFWXpW zPs9<&Vf9rixJiSPNbxXZTe@bhqP3;N7f6`WjDVjUFXlNO8PY7tFEf#;tt|?LLYe6R zGAEf$NixwNS%PLEk|;VWOBE5-{LTOPPe0hYdA*le zP?*dXBzG1h#SK^c*R1zhfa3nx z=-PfF#A2MWaP7ToGI|bu*c=E;MCUJG%=NNSC=?3iYXFciAh>3RkZ3U$kJ+lnNe6Xe zq|X-!27Hh5z9 z>u(-!kMoB7;8pOny@THb_-rD_M$E5)&r*_k(Wx4ol@kwxkK=QK;S>O4(PGTYw)Rxq3TsJRr zYTxdUFNQ?y8T(uVk z34Dq~C$YXk={lQC%G*rB@qG8`R%lBbJoMtPe!KIxFTeWw+sC`20$`LneSTR9@QlZF z8To})-a9zt9vt*(jMg+QH{oIP%L*dB=O^Ov&%QB*0N^ZXim~QBFZ}YA!zaVmWs8iy z{-K24S)5~$e7-;^4w-=tcgW$);ZSU1LQz>&Hhkt|KgVH1Q%vDDK*aAK^h69UN7}@M zXtFvql6Pn%tTyW-olSdQ{^c_V#*3C!WL@1MeH~yTVkld6$EqUF@z-8_>!X2*U@Q&+ z*K_);G=EV+*yr;G^^4{^FP=UTPUV<^@H~xhNuf|?58N$lzr`_egjiD3Bgx(6p62f# zXV{ihttktA_--H6Dis1RTx;W4CVTP9IjYW+Z6m4q)hsWKcXejo_V9++5RE zVC%_YKC7s@T!U<@Z(Civy4uMPp4``TZagp^)R!*3xxQgVZT;%p@qK%aT^NlK*I9~3 zoVPr5lmCxzoCyjFRa*7k|9;=v>bknR>H@{+*`^~WyZaKR!i9Ak>T8y+T$;lS_nm7Q zNUvF6TT`=Q<(gYEJD%U!G#=-PXUB4U$%@T+BYO{bM3tNv3`%Op;@dZ^T)MozOx<_- zokJ&?1)Dc-ZoFgliiWyn`H}Z`9Xs0#=QZAyec^Y9-JHO&3TxG^D^;id@X@eJ0idMG z%&)8{5qmyp>5oX6bJyQ>XI-&LXPdL6w!UG_ErrZE_oa*H#|k!nXVpzrE9x3*_>X?} z@&{2wrisr;!eRQp6NN&VJ+M#j*vCUZdGVHXTgML^+284&#_LHSSWvRu)%d+lCGkl3 zMpY=wqR`1VUOU$AiIa{^oENY#7E(KJD$c-PaS9afJ9{o{Uc7PZhUlK>+Qcjc5JO{w z{e2^020%t*%XQey8tlpjdfR$DF$u|I!D8ItsMG2Vm_N!UJc!hQlFbDW99CBliuCcAUHzQYz%dD`%n_ z(e)%K6v}KOx!$5*LXd?>g3*ISkGrPv`c9~zph#3@IBYupMz6cDtQ7GL4^6~y8ZwP( z60E1Ia$}Jtx{HNhj=@2Mn*}_Nk4+lECM1?8VXbhL zk3FkYz)M`8b3Ac;r{?5vM%)e%MbdjN9}sYoq$zwkP!h4fJH+4R7?!EiK=^CY z5C-5PaYmR(ARrJ)$N5PDhM1I7ko*vmyrkhkOkJM5C7%mVq*+Q#M*9T30xC)0P(&nQl^x~?!V7z1xF*b&Dv3m6z1T{$Un4KJh9 zZuF*{Iq!}3DS=xfIZDxJw{vNn;mNcWJsh~gqOl?+1zLC_uB4JAUNj140!8htL%k6D zUsO$oNylEoikyZ{JwX}fBm_qluJZJDzlEHHzR{dusom_NJG*yc@Nhvptr-;dfy_JO}9Z20S;o z#UZt&-#`7EN^{_wVv3Y9yEb1^L=u=9AJ1lYL5!XtO;><+Tjjx=^$U`%f}gGEDhKU& z=LZ1yD)K7ZI40>E>szN?bti;Z!!abb-|i2t=z*DJfgOr3W7UAeq1v<`Vt3s$ufq!p zuNfhL3*c78_1?v*>(rduTf@qve8EPyJvrd3^Vj^#{_h)~*v=pM0CjC&^<&ZBhCXOB z;;f6|yqEV10<5*1vjtv(JX6zNHUj;}#{}NS`Hs{T^I=@!MeVR!tn_Ii5P}LgT6JVn z-|sapoE`ot9+=M@@UPwF4l}S7%_zWi3RVsiIzLq&r0T&&|9VhC8De+l;P${LPm6YS z%h%zl)U!G;h-kZKe}T zrd50LE}w#F!*6#1X!1=}N?2F0w3Ql2rKK)#eu zx1{F&960M4=*dGJ@jC~)g4-WPIA{)d^x0-tG`#Pb;=(@%+y^Z%v87ECW1l659l3KC zBubKdtSW>Lkp_Yj<3sv>x%C{csVYO)L!EFW`WHj%`N!I)tzL%Tq;PjF3^=(*$*{Xs zSN-P!<)jbID7 z?kT@*yRdST*UdEeq;M=)+^a0*r&r4iOSIWA2wQiX+6wl(`TMBt>flr&zc6zRyGSc}A z-IT==XBm1Qw`Vq9eVbFEb5_b4OC-+bf4GryhV&v;jS|W~&zfA!C0L9pUYo)v((=`O zAa31Y{`KNjNBNN?uv&ihN9o3My8^J^@!2B*}@YJWwCJ%rH5?$JcY{o)e$A0S~nUbatPLIr*RF$)z))Pf!Wl%{9rtQ_85Q ztgLt8qcbUaiS&AJa%c#FFBZOy;k;XhZdA{J-Yes~FNc9(OyEpUDR7%$muMlc(g%E- z_wm`$ud(Z?_C@+oxtht!@EUjC|Jzir+vD`_b6k4p1KJEbdy`+y8b5;o7ecSbx_6ha z>U6a+>kpnO*FtP;w!M!?K!x7N6)*|rDorYdKFTHHXgD1mPUKwd&du+aNU=u|z{G8U zvF?NCYY+3wy0-jl?@PjHt8qtZjxN2`y5W3}qm}OCsSt1zoCf0SdAh&cPBN|#Neh2V zob}7jtH{J|B_+V!TR)YIhni}D&EM9P^;z9wUmfA+iG9#^Uu|ky;5mm_nz_>*c-j13 zHW<2vD#=Dtex*h28w+?We0tjy7K5OT?Al_m`O6@VsoD`Yl`KRwSI2i)kp()O!J|Rvn75QJwLMX|ah}#^ve{(tKRJ0N zsJNJ}G$r2czntEM&hK~Uq-$W%#L)f!{D2QE?&^FYWkjj?w!L8;=@Hj5a&wJ(Zk+p1 zyf-uzpgXBv5EA~nCC8?HcI~5_qg>k3CAyw~Ffm*$_V0v~ho>05W-93|IqXCS1di43 z?!Vq01LVLdn-f@9Lac~uxKxgd{QC1XhH1Omvb5`3zXhD8!VYy^ry;{avA-7b+&vBt zn3JL7VtDU_W#HB|y>$Jzm*=7&4mL2y6s9xf+wsAiXD5!hL#wAI{Xq;|M`4R<&ijt5 zR;Lv7D*Ag&>077mNV(&n5!4Z0Q|)#Im^WsDlsh9 zj+Ap8boC74b#^|`w`v6JTQCb$W9}F4BE?+0GFA8ZGL9*!Ri) zoOLgWr*Dx{4x6p%%y7nujYzxwrGLoD@o zu|xSz5R)+u^Q4mHq4bbqN}zm6Jkzca6?_tIQcYI6CPUv{_heCI7Dcc-i3dWLW#=BD z%VPmBo=b|>p8;hIGlHG*gPs{VNIuI(SRv((D}1LGivBU zG(tuqfBM%3$#JiGkKmy~EQr?~Ea#9UqDTEir+Ay!I=u%;3KdD26>OxB+82gkVmBJ8 zf^iS_L89RAb93#xvHA=+GG_bk6l{4t9m&389t!uPIvuBdA(}fnX6gc-9r(DBI|!?4 zzSG;X*&6@yWtTXUDtTztx0gw;jUR?yZP`tv!-NEdS&UBaSTpLR(en=A9Lz#WwzP#iNcW**aCce#LR;RZv_?@j&z!1Gxx zDGcXrjlvh1mFc`GOb+c8$y28+jeLf>EoqW~IQN8&uO6)%f&5N^_Xlfnk6ujRewXK* z8L+RZu;y7eB=_>*^QDSeVgAx504UsnneQ}nUKK5~hbc0&K2ei&17dj2W|h+@PU;a* zwXM@Q9Qz6Bqp$*YLzp@2)5>GQ>q~fGCJol!m+`&`iNSF3c-OC<@to+h?Geb33hRMB zNPOKDKTR8zuvPRTqC8G}I*;A`cpMvpE) zW;^<_N9;97XYH(f+u#!D^haJ$d}%agWLtk?h#pDtvXJ4%lvZEq^-DRY_rc zMhZ}ARNupV^0{xGI#*rEy-~l-}C` zw&X{L!I#{I!0ixl?duR1=sR2QdNxiewUO5gNwb?S=DBpo3xxgISi6pv{ zmMF--xL9siOUmR3c#U(RCw$G(!WgfhVUhWnLTw__6(J+WRGHMYZAQ(>PqBvmkB=># z8Ci7MX>M!75?5sT+fD)1R!kWk4ow%M<8?+m=l&fefdEg;<#FJ0kcQX4?|bK$%g0^u zg_@0@A>bu%0f_I&RgMuGeXcAslG7SYpw_B34Qfi6(oiDS+GblQ+jmEee6tq(tjzAQ z?%^f$$pIEeS{As}?w&4dm@z$vkXcG2;^0+7x6UhJ@?!aUBfUh~=&g)!5CMi=GM}IQ zlCHGO$PPFX{_+=FFMDkF!td2a`*#t@k0G@;ze+(Ti|<7xD7iX4(sF9Qp*i|-^Rdz! z1v*zdaa^W?wDz7El(9o4fmCz+JgG(qrhnJHJ|vPev!tvWKO|7xAAZS{rHK|lpo$Ll z=w)V!gk;46raw&0ew8kL7d`V^7lv-S{@9dh5^I`)e!x2fYvf$CSp^KHV#CQL_OjeC zGERSt%)8rixd(LGF00btxMIgeg>LB2ze3~I(Qa;y>+vRepY=iZK5K#Zc?}GYYd|Zk z({njfa=D{ztQjzRdH%8Y_4u`=>NQp^_d>eY3iPxadr2cUuHznEE38ZI;~>+1UotOq zSYWgi_p*1+CgyW^n|wXR@ltZR|FVsldM*@#8atKh0zm?W_nxl+6>_a|bj;?=00byj zzz+A)lxwS8-T4}!?koBTovZP7@v^?|cv=HnqnBpib0RDohPT#nwy{*Vr#t8g=2>~h3OqNqLR_Txn zD`QI@b$Sh@W)-a9!nrF%?4+5Ae}0iAp|j(g67)NHJAs~&=R@(}wv$2u zm!ra*BE_O2n4DWuNUYHjQ9soou#JxZquANEnrD=Z2T8i6=3~;SzU^YQTV%qRzw>Tw z9I#e<13os`S08y}oH$x9ss6cG;OTiSN&Gc)vRbWKlcYl?#9TPe)yzvzPb5dw>>~_~ za$c}CApSS!yQfRGN;TRblEglE5+H9w?a1(h($n1-7P^v5K zShQu5T9xr`tXI1TiFu7zD; zS?p*z`gUsypvitE+F$fo=) z?ldjWUerM;?MjQ`NTXuIf^iyO7oiq72cj#-LM=HA(Xd~oD>GBLX+nuC+jb2)k>L;3 z@7LwUK?snt4hdB)=VCCMPr_wpi{#LTBj3JimUIP!(~R}*dkLYB4Z(-m|1AQ=d`&nS zVsq_K(@Et=#(%lx~Lxiu=B975YcH$PQQqj2#g^kZAv zWp{vKNsjKyP9llm9*DX>=D`b+O4d;!o2x@o*zlbabIE%Pi0ZoeB%MMwPzQP+QA~kq_bP&8w>s=~CU-kzWywJV#0UKdaMFQyid~ z*RBd4u~|A2P<-3Jajs!}BOevvatQ`wMz-(Vml4RPNbN|{P^IGrv*td&OP!!XH;cu% zibC~!0={Pw4zBRcL21o!sFtLQ$Y_)}_-D&JTRbG&>~9wAL6oqc^8^LO%IKeKc1(3; z+1E`q`IEqt2Rtu4Dl?b`+`jshG%1SZh)cN3oBiYp7ZMDkX+2o>2Be|U3{Rw(5fpJm zYM6<+vzu2C6)QQ%bf^F1j&!{(wc?Aotwl;*@{BDCUTkU!><5orIphSSF^Qu3 zFSS2dt)(2y9LOY3qWq1f^I>U{IpXhNwMYQvfZZ~#r-41Y;30Cs*if;eQhgQ(% zRZT)qay6Nr84}Gmijsp|HtEJdZZAv%rZmqsUSARUsEqlVWGud?Nm4}3mH{R+GX3tN z#4T;4?NCOJsq{02?6;t*MuzxBn>k$sfnC{`M)hi%*ZZj|J3J&$#+v`(Aan&*(a^=_ ze+gP>(~af%!(?cA+auQ8oc&xA174jFX8vZb+j-U`KiC(}--N}Z+<4BaK{m4RzR1B* zqUc+z-nGeBFVQ0#8SQU61bkrx(HQHltJJZRp?1KTx&UQcy3JM` zKD%rd+l3kh7bxvG+DXrK~8+%E_AmA}1vkELN;QSF>EbtCqBE z^Q#g&wwt1PQx!gJQnp#s1^JzF$V|AGElwjiTY3GR4H+97a-#+$u}TrEG=)T$CuLaB z1(^uZX#wuU|Bfp4U~v#U2Rp6&Js#DaFg3*G2>DAsI6*j=*GYYo5>)rbFSD%`m8;8( z!LTK^eDH{>QH&v>32iZ!JgV+E+7j9FGX)9;+-Nwb8keDpg{%cX>uLn$cV6qOZ2`B>H>bQefPit*L{YYzRQB5fffo~CHk_(W*@`(m~{ zZib^`f+tBX_wKP#iWdhSz^(r{EEz?2FpgYeIL6Pv?+?I1XRDu*maHhIn$oJD(nkFi zO5)*qXZ(-DhXb~S68I5=Bxw%UXsh^2|c}boLdD9;##6hbkIIHjZeiE_3H|f{3>Ogp-#U)!~LXT1D+``>uwJa)x z101o~+QtSNG0utG4cxJLRYq(F(Wzv-;4kUJ%XFVPGEJxM z?EjFokebu{?$51y^=T?-w~_&81a?oGuDcoRQD6&-)^no2^w5CI6HKPkNhsE%(!C>< z4qn{i`KvG-76vI`Y`hE+72mw5-kG_oGQ||O3m2V@_$Es{V|jum&alqO+S9VyWZSd) zSg#M1k@XZ-v#jl@H>huKNo&9SL)NTxxDKxm;#n{}*y*(Esq*vEX7ts6+?Q{6TWIp7 z!x@3!lketPw~Id7an?B$kGoqiw_u(h)(_J`zxK$^@CX;V>W8<4N#ZblKgyr0EMp*m z7|VY(C?(95**xjoC?6^ij~$nr9lhvA4biG|9hrBrKF__hu1>QeXEo+$H)L%qyYEK= z{hgXExH!O7N#W$u-#lj_uxH{zx=BUS?|f`=l%sY=lb_Z^UebD(@OQEl1>5x;5Pdo1FcVGLy4PyeoN00>77o*<` z8xfMO7rL~ugB1AGh-9RKC!3{H<%BBvsNS9I5uGumb`oFqJY4z-v)|FC{h>;b?@>bB z+7XciUO|5CYNpC|d6P0vO{Q`Bo+TvgU_&G(`I1p@Hh9pS{dPsY|KQ2sc$@q^8HP$C zu{aw-rd<#H4d%yp9NwnwFR374s@_iU_5s{(f_>!Rv`NJWlCZ+cpD&yJItH+&m1gLp zddhZpvu?jc&Gwf?T#h$MD%U=ZZZl)?beYMvCHyO!a;k13ZbSED*ZG>ENh*GFK#v0& zx~UwIz>}v3jhr!3bk`=^WAQWW5J8cc%jz=xpfss5HR^eoo`pX_4 zED}~VN~3qTc~d#HyBoU$Hb{~gDZUlROMaZEG$TS6k3yd~=PU}}E9$=CM= zETqgM`62JIgyka3zDM#t8GEH6juM7RVaQefao2&{tIp+Yf20HuMYy^Z7M}VpWv@C7 z*D*&B=s#^%xqG+Q`%F$2wx%6VhbO^((Six6<9IuBx$!Z2)f7Mj@!AuyRLDQ0@juP# zlCTVO=OGG(rK0G*<9>3utlCxVvox{or%LHE)DjBO31j3I86~W<*B{i~)+IOLZwCY$ z|GmbWd|Mjvppeail-~(R36x^k{F`Ssq*_A5DUABfK0Pn0OA$g*YCM~5e8>EaDbM@C zk=kTrZCV0m)8cy`f62^Ca;_(^5$jn8m1GNBhVDZWsC?|0DV}EU7ceGL(u@PX4ZWJZ z@e6&}Z1~&%kULf^-73SLI!8s{k02aPp}}ow8S0*^O|kE}N{xg;>>+9TGF>~CaQ#2l zM5K<3J8TFgrv;nivOb|CNp0GjoLr~j`xjq6^m=zbzG;OvLyxTvZ0E~$^>%{iBH(P- z63}cWqfmq81Tcr@bFQ$%>@JGqHnt4z?uk2S$Hfd}@G+Uw*>cs9`}F(YG5Lh2*qQ-? z%gj^V5h>yX^|NdGoHt>!A{_pMnQ+_8pmrhl&aNGKOt$aW)gfvGtwG6}eB_52dUWBf z{Ar)*xTqha>=IrBEm?eq0Q}7HjAZnP)ni)be;=T0Z{|+pw+zfl$l@IOwWrY9>~dkI zJC#B}~PFO?OTzx!bKjVIM1F3&OY z+dUO!lYUgAZ0S=Ns z5O~av0jw=I7Y~G7Qb`~$t;)?x_?x?BeRVs=Ru&P73| zx+Uvmtq>Z|CkPpq7QZezflyx8$=K6vrb5t3tU$}n$BEg{D(lL7ex$P{b`gCF<9feM zQXvA&<^1Ato7P{04~SOVEfTB3hZy!Rq)6D^dP~bVBWuhzp5HvpX$&;J(bSC7To%pc zG-WqN%NcRrrUfXB1%y@f$|H}KfiReTDB)` z$b+ZNW~A74EUj3c)+xuo^sD}4m#JE{h8$4p`mU;eeSf((j2l$khQruCF%yo0urxwA z9mu_Y`B~>b?9EMIIzKz(V#>+QfJmVx{>i9{d+}6JR?B>PdxV%1BiP<~+j6u$*(}DD z=j_(R*FlE^<(P*QOF;65;8glHtsaz_U~*U6IT3t$CuyRw z0mYDL0~EGp?WbVjLcAHWxs=dNb?A9SxzqgSuP!YoldQF827tx4b!|!**it}-zQxBn* z94v-^W+oz5?1Dgz{_Emp*=mk zJ>Wa=JO}2J`VTEGHxPe%T?dxbPB9EWt2Od1niAF`0l6(f zX^f~oIB!uOct#$Exod*G_Bw|r!c6NL zSc0v~4DGMjB+Qm-W9l-HJo;gOpC{@~ZJ)vE9mbM=s=rEmnd`sne^sw zv@?>#hB(cXlz*dWf9>!f^D*wDB`Sbj?9|_Dys(l50Uu@Oxa!T~zkAsja{ow+L5d-= z)5yZR`E~s3d!{*Cf;B%iVmGJDixw21!G^V_y>0(?hf7uFaTX4;CMT3e-X`)L^M;h?jGQ_;N| zfb%ZZiLJ|e(w(ZQa}Rg~c`%i6Kw~4O+lbzFdY2x1U*ruY69Nj5c$vI*J}W*#>qop*>L(CMZS2fI|0>gDH%1L7#VM?pHsSu5sh!VXPqoIM7E(qsQo2pv5t&kPbOxsYQY zC)28+LWD-OpBK(jl);gzb>U?7mh6NYiF=42IO!?{2UVFSAx(fb;;SEng4~Xwvdi#E z%Y{m0bJji+lbn5+yl--8>FRX6UMAj$Wg)x<5;cJd|t zAzI=8dBkG4+lBc`R)?J^Lmb;_aG}0$EX1vC6mVaR_ZkIl8^`gT6UHh$QlB)FWIWkz z(pTywS9(8Ya(m^fm8-ocK8ic{vRMRD+r*G5No=8VQlrjz)0 zr>Wulq@2zA{wJ&AhPAyftuNwuZy5au0d&G^m(8fZ{+xSR6Z02vn(4l+-ep0^ahJee zC&1NFhR%tHrN{Wmr;zn?&*Yy0{bAqX<1+zSY=wPFv+H~%_g(SDgz#IkFiH#G`IM&W zH_J~YX~`V0E62y4O@~hwi`Amt6<~DypyimfOQ`;!t#MINp58`f#k?b+PP;2UF0dQQ z``|+&+v<3ay=UMMI?;<T6m#b zr!xZEK%G#hLW4O*9Zb{-$=HN&|CLIgS*q~S!WzDXKKZnzH@YB%9m^R(5J~+N?Gx>K z(3LDX+Dr?Qq1^mw^0&=2<%?f{(VDhh`K{+LWh}PB!OsTlXqNz|Yt@7RO1G3UPqR<4 z=H?4NZyV$RRsh~B&R143BNd^b87}8z=uZ?aXyPj|!1cq3?bR^zaR#OjTM^26_^WPx?yyJr9%LabeNcB}k`yg8md6sb)fYl5!y ziiDWSQL^yXQTw#u;Jhqu%J*hE49z%4pAk`x*+zvNl)szM*!3|T^?WT;qcxkf`)zG9 z?5wzwk2Aa(OuY<}Q3*YX(eGq^^CNau2f1$MqW^8&D@dVyn%PSacJeu0%v9GkT0kc8 zZ*L4pZRAh%AdX;?9K>Em4kvWEp5aF(6hJt>j4%iZDi2o<{KkR2(tzwK#YKP(&!JVx zf(b>@ow@dJ;~0d&2#(SUrPRT7S>0yAZjW-fiYoY~>fn~<#X&2lXAqnmNrhCOScoad zr)xlq=5ImkyH2a?LE$@rMo1O!g^1*D{$8~#mp}L6{r^JH@Bd^gSkLJ4cd334v=z7` z4#w|eWw{@7xu9p_30vpNbqaL04!Wf@k?p_{8dQv}-}7Gy>s(aEdaNrDyTW%5j##{w zk2j5hR=hZWwQj%e4s~pQan1jUVKo{;E*h+%D=a7ppz9wqgt%bQ8=R#(U6e)Y*((Tx zh^T+)*ppJU`2dqu>BXCya}vUe3`QJ2e=tQK&5#dcg2rq4_d8vt#>Pa#uYpEmmbqvR zH#;)jz(V7Ws3?V@Z~1I0#Eybp7U4pQ{aBwY$Sf4sYm9D`R>kKX_5oym_PfsFSR{gc z7nUU)}NV8?%{1{kIl>Mk!aptP&eO ztBGDHxKP|ld47MLvk-97eH@L*OqbT zVf;YvpxLzy=a1YmTs3|Ape)@--lh<#j-Lg@YrU{th5|oL&R7 zs+s}GVV>qv1BDku?bO93R|is7BaM7tFKh{XqTBYAPIFG9_z^!ZQ&lh}K-fBhgw>u& zq~DjE#;E%u&wDiR>5~HAsGh%Uv`nARkxWjH+@DA_jFkKPYQRWq{KXIOrJ+qu?VQv$ zS+qm@KIk>Up`9dCrA?cqtb=O?Sd($%Wq5rrT<`;;-pq(5KgQ)JLyh1769 z4P--no*|=t9RzDraVp`x{%EH(1ooV) ziyDhMKMsHom(`$pvY*e0-x8OrT*mV@3z#d8O6oPN!FQOrKfhr=RtN+?^|hv@$Lp7Y z(=}aj`AKi>HG16Bp5b3r?7iD{O$Xl#lWMYWzOVkbX)f0{mxEAwFi!8Xq8@YASk&cq zr#X=ykHhn#5^6dzxcVM-loNW+LQ+}^%51hhmXD91ML6HC>#w`8>UC>pm;DGomB#;% zdc6UM!@3I~Y(;9+Wwbq6-$A2g$K9^&J!&^t?ky_gHIfZmfJPyMrgx5?{#!E|g-+S~ zWX1Sq%8rS^zQ)~jKeih1imNiOH_+E`?&_Hm=XZi%s&c{txQ#Q`YMGr^D*>Hhrd}?Y zKIS((hZNqF-afnBOW8k;lEo!A99OtVlezA@Q*Ycqf@EwTafND_R|>}#z1Lp1ZwMb* zbNFkRTLY}s?UiU#L!5M;3+sIDRp1yt(Y7UJX$-EdX-iFt4^l4-b^MepLUVQ{;m^?a zOi!oVM#ODfyN=+JDT7-_mnbd!H18WZTd41^@u~4-B~oR)$6o%+{_xXgqgl^`e6fEP-Yx-puPLhS zbtjATj!*d`;R7NMe`o+73{@ZJ_gBgPM4G+kb2dH)*C$C*L%wW|z9;q=42}xHAv+XN z7`XM>y!-hJ=~;O&8&hU{sggI@U^t0h4%=n;tHI~cc$6rtLL>Hkp_(f3i>|C5InrQD z{v_X3i@H}{C3R;xi%b#k|_jrEr7Nmp#tPo6dI_W)+pP$j($mDnFl zyi=*V(~w4t6b)f6?Y(7)oUhFKy;5QQh$J0Up~f;bj);H%^zkgTOw#OyTSdKU<}4Ul zGw?E*N*|bmY#SkbswvrGaH5WM6|9rOm3*meHAAcD5+V7;Fw9RgEL~s7J+3&m3IoE7 z8;R)Kqz{V4wEIM0&judzr%`D)eKWKzB;#8~un#rVQb|l|r@1&DE2I~=heb4R`nC>x`;phxgJeqI2ITYt-(Ja(;-6SO3ZL zNw@YQzi>XLG&;!tMHJa8PUtg#VJC z-2Z@2Vaa`feY|cb@)7j1?4j|-IvLZFvurn1&KHf(M82+s%ACid23B>JJP!!UMe|fX zmjb)Nz^%n1j|42vyrq`4t&9jO70jJOnHD-_!vmNg%CTn#9C}ad0a0$e=b=lX=ak9Q z*4Nb_uxp9DoLPpUFtSB~3|40?X}h2tI!%BExi(Q`c^R2DS^E%e`=OD|K97$nZj;=v zkZg{O@z;m)zz7rFQGiEeE4o(?-$XJS+8fT2F{1;Wh&n-}0RzJsF6-Xsj3i;8lfe`$ z>cKTH)$F6h-4{(q-v)y=^`+dJE~ zvu2S7SVt1NdZArZ+r*ia`9}tHV*~Wh5w?|V_6N93(}3 zo;J%Bb?|!LSX5A?c@%IAZv5LYSqO14jFM}8o$Ja-AnY=8H+d`7a3W?u$ti39nbd-c zCJyLBXS6SWV;h7ua&@J+lSmsPwaTIrW4G<}# z4ZqL{d)PH7kN#vWDEp2rE0(hlh~Z$lh3zBW(qJH$c0G4$R*0fYOU~EDobvr2@h6mP zKnR818v(u8&s5h6)n z*u^q^83)p+t%aAjSQfXH#5QzosN$NZ=uMK!g^f8HBQ-t5`l8FZyGTC|1B5*v7V5}r*>J97zdJISt-MQ z{kwI16o|C7BdSl{QBGAp#11{X>i$r~AYkO8;{JJ&NVjcEO}*cefa%}?jWR>=M{_z1 zr^|>@5j-Stdp|(D`VGGOI75_kEj=9s&e!%AN|&l}<9;G3#k122qfCX}GA|GaVoLBh z%lGm9n&pBwg8bTWt;*P!@*T^*Tbexob-eHXwPnOEBk}Ze7x39P{9e3qF8WQDZ9stn?D0wcO z-8i+_$1I_x;un=>?ocllys=d+p-nw(@=#=0PX^5z#g)Tyo|@|nzOf&yaN(SkmW(_N zVVzV)t(EvbST>mNGp?@x8E|9Zpw7527Xjlfio zErvU+$>yhx^t)r#Xw3I3b{3D(`dQRc ziHH1*(7F0{q6AbZ81h4p~_tcFG20rwr?qoQE zm|yi~h$vGRrzsjR<1$6DxJ3LoaN4hfR|!l{@F)j}pgU^OLLTAApWp;1isteMv!UfT1OWCWL-2zm;^CiJG?@=Z{Id zVSL+4-KccW9$hCzy`^q~y&Z2bIU1$^vefuXf?h&Xm(6Ri*W0ZXe!S1)0`KLWb_x^* z+M1RcjMzWKNo@AW9+b|va|j!cWJz~O(NjmmmP-JW*A_y1K3WFK&x@k(GmYSC3?>eO1((@$WUBwyfszc6^+*G&$QUywoM@qr; zlRobz|7A%_VQJ<`@M3^3aqU{7vraqyR#6FTZuiKKzd$P2r)0)wBSTRkvG}tJ&lsAp;$_68?vh@@z9H(E(ECMQ(lC>ts z`7zIQ|AS(a+RnNbI-GNZL6vtu&neCMwGXhwH_t@1cO{vY(0RStQr2%@H}S;mi}Bp`i9O2`CDFT!3@LZ}GywCS$rw;Q@I)adCMj(`{z#V2BQQ(&;kGm+K2n%VW% z>g9J(Pj}aaV?d`LIE%`=q_f<>L%fvq?lSNbJ`||0Lhnu4+*7cw7_w?FU#{jhOo??u zi#^UDW2ST8!|}^PzFsY%-;%Gg_ZM}ipBV|pQ`^Jym?0n+nU+A4?efdw*NZNF;Lt*< z%JbJ&(nUY_dtWig0Qiz+Dkc0zgaiur@Y#f4W1vok$(641Y>$USa8=_JV`;TmH`L0N zs;q)h!;fWl^F#73Ma-pd6+I=Bm4tC1lit2aye0ovLzi{!$=94tcu3|qR|35J6I0z7 zuSKu%1MI^yPkP|ifc;^v@B!5l0}ixaCDf8S`DE}6u*W`#NlR{jtO)lXp#j_+k$~{B z);>b*^AybB#<|Cr!F5T%3X%*}V$Q;|&QieUOTOpJAjX`nKGstTz~kJo`T2#j^*HR! zvPERq+2Ggqwu&SCI>6LEsP3vJZmHM@$3Ke(k$ekAV*j@5)b`i%fU6}tuM$#kjyy1ijPAG==k z;g4hLy&h{8+edHkHx7^&WHPE>7rjoz#gW=nyI9v*%y7Aa=-W#61ed zyBT8iTPlSkKvurTmus5naHnKUd!y^%!0CeOmd7Afk0ApkI&O7<^fm_Jpcs?H#G`Do zlZ3*O+{=7JWm-5EM1SoiE{mz(sjgP?8SO=@!d($wl1nVp4R3`Ik3CT1PH*DtxIsqc zP{X&9@#Jc7X8!34h-bw{pW0aw<~35NQh~%xPJ~o94Lj3#O{c@FL*Y5?}myxWO; zrRBmJ?`o03K4#8`QG1jUWm+TDDPe;iCzFnoo~|$=@)ctYMPoHqia1m@lDIU6+-3d- zI=go3tggNH;R-FO3S!*v(@4n%t4HQljs5J~-4~7R#WHNGaLvm?zZtpFrE<&2!vBGj ziv@1aMtC~)O=_SdX-Q!C_8n2?Hhn-KtD<00_9?{M2HSmJEDB$!ZlDX19+OBLxsvu8 z5N%SUhAewsok2NIwf20JdtBPos#hSRc3eMJ-plTtbU)v4A#{%~^_~JvH!#!S0fF|7 zBLR#P`fH<58lFeojzY$;=45n4cX&AK^`dP zds%x`AFq-p=m*1+_nppbtE~K{A!p7wBoiy)+KOr`)xWfE?ORSuM3SE3)woqmCq1QH zK6_1>YHK0=9|7l0lQcWq)V4i5xcj@|>b%6yr9Ub5x&j^j_~|zq&cm!oEBE zX!V0Hl|9J*Op)6Lgl4Vp*!9O6Q(?}}R^`6dw5Fc=hA^@iJ;yWUPqrdt-GF}nm zA&%w0jGWJXEBXe~*qpv7C8g-DxSocvQSD>r;n?hra^n5&%o`U%4!blF6k&N^UMpj5vdpKai)RE&$EK8FkgH_KOays87>_n5#a{X z=PD`stoFNLiJ>GxF&_iIFpjx&-2gEuNxRqd^bg9p&Hl{+6QzU}jq6FA4ikqXEmbnd zj5;Yd8U(meNdj@sT!5s$-dom~aMU|IT4 zta;pStoaIdn!&rPe%}&JtzBupw19T#ErKkD+_&e2gBtv=_Sh{Zz4+KZc@DxVOW_B& z*U`Wc6fos-;Re|l@*bmN)uZ6CWiiEkb)Tx~IY%y9V>=R7=9XPjxa@GoF+qs-1|y{o zPZ5G(rfOc6^V=^kL2T7e#&3>qFI1=uPX1>3;LrimPudy~K52WSi54Gy@zS7uo_;;0 z;1R4vr&bwFM)f@#Wm&ljsZu>iGx7wIyVd!*ZEsUp#EcWb&EZTz>LkypI@bQ|EA~l# zjtKp8)9(9Hzy1bluD5+(uO>M~MAX;)UwlDVCyKhAmN^4G;$7^tQmACO&B&MgGi3G1 zxW2-36x5DZi0<@XUbK8tGwZ5RjG}x;rG5 zZUhDdq`Q0QE-C4Tp+Te@q!~Jg?hZeE&v&l#5A190wbr%Q^Q`;+-8)@xrD0`~EJU0b zT^|qF=rkrfWA{&#Fqzk-*Zh_EQ*&5CT)L-g3AWynMd~S-FfGZ7JhO~S5X3`l9MQyw zR){|j5rjA5ayL4Z{qU*4YREqdy%_en7)DQzW&W4Yp#I7+XwWT?etiyCf##bXbtwOK zmj`ZeH?=%&yeL~D0Hg&PqPJ0JA}ss9rr>j(cos%wk*(9%RBsg^oslz?x7_(NPk;m9 zqhI(0F@CnPeJQ-S{qmVRxU@HBOV_*xihZy-F;PEfVvscIw)Li3*K;;w20O|)<2Tt@ zlVPb2TN`0s@}b#B3GOwehV4f6O6{fBcYFPNMMqJWZ|0k7Ghe!Xyv~u1t~H&|pR}lw zZ^dD)KXdT|r8WOA zLEydGS~oH>lFYK(Su~Tj-FAyPqW!xCqkP4D!VcE_8ff z9Id?n3Rj{+tQzuOK@oC>Y))-|@g42~0IjYE<|Dh}zj0V1Wmb3;{dJBo`9D~+`R4!l zqEP?&q9_A`BehV%QeDrAOo^y2S_hjJxnjh#*^AzYeHSan6q~K6Ef_>j2ov@MpiksyD`vI?)BZQ6${xvXA~LH%D{9LEsyu-A(|2`X4^UcgMUXt=}xT{Fwt>fJrtr?_EuQ8cQ zvps*Ixc3Bqf0}#syYgd)EzUoW|I3ZXUX{gIZ2y)fOGL_J)xH4$rc^xuVkJ`Z9{z zY|Z{zt}-#;?Sp@}n7t$J*I;P4FkJ#B#Lx?-c2=;Rs0aJ|6za;3ZHYecW-_NQi z121LP6VwK1|2>UEv1G~V6F*16&WIJp#28tcpp~FGT|?t-wXaxk?dMFlT6fB%AD-chYCCGRp|JzI7wGu-_qx9)r|HXuEyV-!$!6q> z2vg%F|3G&(hO!juL~0J7V6DMv3mbvYwYW8t5#3CiIBI_C&XXg>>unABh3>zmX-Wl5 zHv1B6#Q|a5!v>cCI^T(6w#M=WN~a0qpBH=Y8fR#1Ag#ZE!t+%+GGCtA%&SQCUSJZM zrL4KEfCn}QjbCYEZP%KC4;rDm{w1n_A!o6JRBsyuf;RI3BI*XoFGw|eCoXuyJMe-^ zrqU%e!C5ZT^f{wyoS=~)#SvBkT5&R{HwQVi{0%Qg*#&EDAgFRq3?a8JIg z&0#A=RsS3;l4W;n`j2k!Psb5OIxUv44@j8mA#8z$4t5CUp^^6YtG*5s?{L3|#61bqW zJM(E{{uQ=|c3SJEu3TDdH#b*IqRP_qH+NG1wh0BnMIWI^ zXAyCpqNC&cw)9gp$5@x~;iR?DYp3RY9SbPCsWY*RgpIxPrqU4>h-wOI)XY8gIHx)C zZhNu4)XkEKJ)A2q?4I&UU(hHzBtY}IqhYV>SC^pb#P+VMEru&rQ9ZDj|DP)G(0S52 zd^uvv|M&4Gac{w;PT$$!2`j6#=NvSkxozjxX<^8$7Nju>M@u{{#gv7GEeSLLTnXYO zjx{BK{JY_-L};Eh!i$o2y=b9p{tU{>f*v>e<|@qvUh{wNBZ(!UsK;b9foTduh1)4* zb1chUblW?G%)uX|b|l6hJML;9e%cHxLl{lJTpewP^PbPgRQvk=D+tdSeW-W&aWk$?+zLhvyv>S@UvBIL3m>wXrvB+9@te)7o<>F{KP%#nyF&9; zE*teL?pn96PTlTCqSK4y5`3Inzvs2M&v`YU^jROwIF%jUKx+(4k1;`2t+gZ1ee-R~ z6TYDEBpNJl2Z~_n49tN;Dj7-=)rw1{0v)rjbWdITN(z2vHT}ze@N@%I9LLxX^mh&m zqN-5S@$*!UZqkD$emZeRj@D}Y?VfOCGk(Irj_umo-t)k1f7l6|lZcyU^A({x_}u6O zcyTZ9Uh1`O$bRITD(bhUh7?bvEyVJ2zXqiP*q;*!hnDVd>;C^M&V* zB{jQW-E9m=_+EzliU6wg+S+YXQ2(lCta7O&Gwc=(BNIh$488E)QGHWhSQr*V&GtCV zKKE%HCvGJ7RiQ5}Xy5#is_)MFIdYcX;*!g;+ut}Rr-wKvdpoClHg$x-dzJ6< zdepbEOW9mYjGWR;D8c!rC0XRUg+5Qc_-oFS@?uT<1t?+s8^b+0WSjk2VI5sk_wc5& z0e*4n5ww#ui+ie4Q+j3g^nUFrw^VV1b%{Ccexj( zdhA5-R7Bh?@8I5rdg{B*FMTl88RS*lR})C7s$8=)8NeX<9qv2uPe2Hxm1};K2B$R1 z4^tN6Bpw1PN4^!Rx0t_h+=gZ`&G)1|KP=R#)-FPZ#}x4x*mw-Jgv)(9Zw|8<=Kx^z zrJre>Tq10S0%&2WXK=$a*-eKz)9|94Bu^!bN6k%g5*h4k0p+JXWs=w?A4@o zBV+5jUEm(3)9Z|!40XAqP?V}kVSTLTC+^dEp>F=9viO2kjAxL4#aG4r_5cV>?yoAN;oBS@~-COPX2($ zlNfG#2Ln~3Vfy$>K34O>5SEPoaKYcg2&`?8-hmh$o?>W@;Fm{1hc;tQo*a}DsTqQ^ zb;)`J{P!lnw=MCFU7~8(p-mwIr{!nWR3`U>9id5( z6A|7$R5b?Wj|K#R*8)}@r!QOKg5}t&TJV&g)gap4_FN|?vpx4Z&TcpEd|{_X_~Kjh z5d?8NfaVZlNGmw&*5-v0^XUTQ2(rFVJB3Shmf*_=nNdQED3-{G=*lXvQ`*f>Vlh_H zVp2k{flM%=7y)76iFf*9ma{=eD!+?E6|}Lsx3NCPcxoH$n_%~z2++j*%)H9 zv!EtIwz0wmV+*?#1lQ$5d&0$fQfTgW+O;Z}`8}4bSqw1j%*`{u+qE2V;XXp()Bb$_%9a!2%DBFa=}>1G1Jv z$tt%55JgW10{wL71G<(&Nm7^4W7(@Sqzk6cp^fXN^yHL&SI`kw!k~Gxq*SQER4pGghTv1+b-5}GN6tLht6&q0hE&E)d*o0J=VLrA}}Xe3gMO<(OYU;en`)~~|) z2X^8k{YL==Fl2B6)(y9&2b(I3%0o>R$O9l22Q?!!`*BvbmkZ?IrG|P64`@!OE%Fkj zWCb<`u5ldFKlR}@7C0~I;t@^EBsVezwEe=ggCLHM1D0Pp8gq}7c_ z>0kxdq=G?~DP4F=!V7d=b|hLst&2Hv@XsNV;_;z@Iw#lzG5F$?0-AS zYV7N_PAB5vx}4jjA5s1W4r(dxQnfWsX*`!~M(xTWcUcsKjJa9-AeAiMkplCWa;q8O z%y}ib?qZvknWj%@jaqOUw|zr^HEhSVx)?*pnfvG4wluGuts1_8!(-kX zYyoRqh+V49(i>!u4wZW&?{>K8$d>+UBK1V?Iu`x9ap5o~t1~2jb z;aj;!S)6H5+uK~2IUNdXf>?-jY+f8M=_hx6r~sbUdP0ll7sa!63%o zwKF5Swmo=00*7k2fXON}^<{`kjtElW4aIypMG8FxDXrBXepr(EAW{-n=FQ^fM%*?f_6>U{`dyVad>0cn?ev zj7a+`1{fNA{;miU{Rg{g39ZSBu#T-`CFC$iYbRZz(n+V%ESXs|%@D2#=u!4;hZsrN zZUbLD{kV4lE#U<-Y$i;9{9^aELB{X_lE7<+&nkiE!ykyj{}c7+M}x~()|E10$X7FA zJP@w+tG0Mv3W%}aA{Z)0sWG#)&f|!w?T(`o-z|z=m8RwV&8mky8m>UL+{8}ocrW<@ zWUKRn{Qr|B_x-!y?9MoDi{1X|9ST1!O+d=Zxlc0$;4nxdq_Q(AkZfFK1yJv}ee>mJ zHFvek(ty4shBQvYI+h8Ky>Pg^X=Lp-O^dab`V}5%b@M&G21;bW{h}SAwiz`^S+A9z&bQzBTOA5%PNx`Lzl+I?mC4EXrSXf`t@a-W18i?Q8518Zn^Z<+Fil~`xc2dK=wG?{dvJLw z{Or8Oaw|Sz81>*2bsG>T*g3N=_3(Kam;L}#gkQrYeSLB}u55A|ZR9yvzl}ajM-w=E zvdi+i#25fnlJjkxXWVBhQ@gb4#yDj7w*VPufHs_5 zeGV9e6(Y+F+Ns1Ltq-NIn(%;8eUDc~IreFx+>0CSz&dBseW38`^}(Lb)JY0Yrn0x~ z8>ZpUH4P)d5ktb2s>K(e*M~7b^YykNLq|Gi?P!yDoY6-oSfvgAXOEaYZ7Yv9k^&(z zoL9Bp5%IcKHR^4-5?xxzhq3upuHpGYwm+mi3Aphm^^=GzsGwAfA?a6c#RUF$eK*x_ z-m4NVwmR$iAf6iEdIne2ARx(N<31QY-iC?kdQV&OQKY9g;a=j3pWk}D9-VbV@8P-S zD|OoR9!=+dQQF2nOmWq1M{$k4S65dzkoC8#!-=u9z!26ZM}0ZtT(O&s`uE>@4q25P z_X(&f#zm83&j0EQ4w4I9cR8)y;aAowRfm2=!%r*vNGqSbUcvZ8X9BB*)@u+FVyASo zb?OjGj!m0Ngb3;LBFr?0U_vFFHTF@r^5X3?{a@|rJ>1zzZyX6X|B#@8kx&Th61jf) z^NUB`q^3INij6#QOa$U&80`5PnLnLZv8JR6t`-I;Bj61Q=()z^F_OL|Q zMn9o!dSsRv&*_)qMn6?JX^9|#6%W1-^+;Bg>(^Cgc07Mi;OyRzHBEs)$S>|L%bMo^X?GW@Gqg9s4eGG z>k?m4mAqtMP02p;kbdI7u%f~z^&^d|nbn1L`=Z;e`|TnYw;TmV2Wl!g}`%-c6W$uiA zQXV06zuLnT!))@{hF;UsOAk1B0Io3^fr3R~J8bp+^`gFT4*B^jho92%jSArC+dXmq zj<-eyHo5^Pr>>7 z;rk#7av5ZnL!0=Ir6aD@xYQx@$rF(kZzQ_M+xD9TM<&7PXZkY+YK&XmJOqo`c{Y3& zXmK-Y8PwiMYOYR63$YPYCdyHG&br@FnLO0VP&PUNqO55!alD}!*0*f)yopI1yqT%M z@yR)&A_mYvo=qFCQSaSq^93-Al9)CKN}$Uw87g;0Uxt}Iic4lQp?T5Mebv+dFx$Q^ zr%Je?*xPyb=fcPh=U$xSvkeko9gGmQ@vliJjbqy!PxnO_o7klt59JM?Q{U~tk&q24 z=KcAfR&}&;GD{`gmd>R!gas!~s^GmA#@7_`%KR3HeT)xRPxC7rC3+*5- zX*k_mZ=};ak>nI1aWafdb4=FJnuQ?MxE824B7ZcgV;H_}W(X2D+`4>|f#aN+N{s4x zwRUDn;8JZ*cEx4<_g)Tx)52tq_?a8c!zR|?sho;^Jo;>Uxd0rQzZ{zB{$q2nfwOtI zf))gfD_bI zjT(-I7d0*lt1>P0uIYwY=Hqu2tX#Rvg$YXM4c$g*9>|X*e1(Niy3yO1jjYb8Z_Fbj z`clw2$^8914!+z?Rn4j*!6F{#!O~rPeQ<|gCoIrVh7;puBpvnxi`QL*hhK=>)xOfU zxQ-VQR6heI-ESs+v#t~Z#{TjUeg6^F+6+~vVd2beDBp^IFhdFWB~BM8KVX2l*zZqx z0W@wiOb8VcwE>tOk~K6j-Urpqi$0td#|L-(%|Cdo00|$A;{-nc&01iiZjWu@IJm>a{VK%E2H7w>oIlp(IT@uA_%{>@(Jso75(=4niI+Wx?z6n+Q?RgSUpmO z{nsvy(g81czpYgv^vQoNta2qS;pJSrwGF(SAwCiS23R!^wIAz0$Uczd{UqU2Tr-s; z!0zpcc^;|QlY0kE3>o(<>DQ=NlQ9&y(kWnr`VGT-7k&$jEyG1_oRi8I9oNywX7W~2 zv5yAmFAZ_~Ee2FEsuu1`?+rZ+K5C~9l(9O6yRUDFX2P|qNC+UFB|Y*CW;%!?D#gxj zNFXx+Y@Z`>28)#x%&Gayt;U{zhi)n-T*ss1dMm*#Q}td~Fg`-p|h`zl4rRg>c0 zq#LmgS+=Ijm9NiEk}wCl55$H&%8Z?%@HtlF!InkGUle%VA6Jzp7+r}URrYfsNwY?0 z?DqW~lUp8<)tR;!=1h<)8cPL!`gWTnuEtwdY0okl2Dk8&kcihza47^zw0ZTpv z@_Tl}ZMz!)YetUqxr1h*)F*9iY;E>r;*&W6{boM4Ab}PCDvKHqZ$ zPUw!XRY*LC9=vTScYo4ud(g;)fDQ5xyw~OGv4;W@36ttp3F-7SAOO!}!>eSz64QS3 zXO=4SLMi7jpA3JS%qCG&keh$s?=N;2m#k453HFA&=h_FSGW4U7g}KV<)0g9~1`-pI zgk)0Xmv2n~o`@6K{N=(qjZ~}!?cZL1|M2b6VX1fw{e7QgQGa_qM)E)>CZ|@S)Sh7KXIEV;P(a=*=k_EFeHNgFCRKMDG2yQhV^|l9hkD~S9qa@?C(ttHcN&eFO zK$ll(kaz)i!U^#~?Lmphs(1Q!-j4^G#*wdo+A$kB9#blkfO7rzsnn;fxo|;RItD6a zS1Gbp@l>V1PF@uF5>Xd;OR$B*BQX|=$BDHv&&X4DWJ4<>8^reNG$<(bavp(+pT(_M zxXH}VRr;(mI2rw@dn;oEGI6`}K|mYU?RpjNOMbw2_--&YP;M3cM}65)s4w|A)^>yI z9BZc4(L0=R>LpOfZ_Ii`jM2w2lFj1~^|`ozsgwhO47W<+U2RAAhjb;43d5VQON~E+ zk%%kVA&CNdBjpc667D8{rpHcE%PJjWC)uD2TaeoAf%2V9Z@Fj_8YR&Y+;QH-C79E0 zZ<~n*xX#r94qv5=4j-h9t`6k|^aOlptEmikMZnYSkfn$wqOE8lRr)h<;5I2@D;dFm znxr}j8`8LaAFyWqnX}KfKXgExyjVKfBp&@4>bKOAn!8pEtR$?CWfN(Hzx#5U{d>9it-~T5rAL$bpfQ)68H(- zGVrh9d)*%&EmjkXu~5)uu^)WoggQJX*%VKoA`s?dqA0eAr1gq4R=*cC->Ck4^Eqhi z^}r@qBv0b+>|anG#DE*9Fy&DNj-+K{s=Zzz;sr)F8p(smy(ih1g)8E5nD)958$zol zGU=XVqii?2(N0+QS)pYF6@;hX0cl**eoK~bqI`h;Qfw&nXUapJn#bNj0VWYu2~wL_ zjk%_u!BKxr7crjGM64;l3zNqm@iA3_SV{mz_Vc&qu>KDPbJ5r^+T()k(u-VDu-jE6 zqW@9Av?=Uh)=lt)=8~JxkX9}73Kxox=#kCf+1?m}o7cJjj0Z!JzWu`)N{o7G47I3R z&;-I%l-&Z3!V@(7+$W?{;E2Y-;gg+Hv+BT2iP!0oX14f*ZBNA{*)@Mn*!GQbCI-6$ zgMxg-AoV6Ot^*kYYpY;rT)QNQYi<_vVUo}g>E`N;@$(VZR~jR`uxbTm!{_M!WKM2L zvi)yqgV{Vh(l9bupGzo^jG$oY8g%g|(x;S0qOHfW%OOubYIMb9BsF1o*CUqE^h|so zAA`Xl%_zs}Bp3p_`n13k>Ta8GpqTwvc|a^HP#t6tt1uyKzc!f`n9!F+}o*0cm_Dg%7yMC-|@dJtkM^Epo0jP;yuN_oPD zozK>^N?JE=J#a~e8@1})-77%^Iv$jgMosWziMUlSzUVKFeJoeLpn&g-Azx6L7bmk( zL93wmSg$0g@mU4Z@vV8f0aht_*u|xxQAeA7gu>qs@Q7@m_PcPsWphzYinzW)12JiW z!JU&qb6r~>r8AN8*bF5PFFlsF@KI&F9Im!cJL0p}|EA!6I`iKTB++l6p{=3VP}#q< zK}Cegc3z)&d#&dov#;K!2(reW7t&#<-_U2$YN~Y7!Ki3)pYGlMT>dH$!G?|!#C%mz zJE8{;#RrFCAjpm4n*-IZp=Xw$_ekiWm|L-gP}Rtkzr)o^a|P=C`E}rs<7yG`79M*F zkI2r}Tg!PD5Uj{0>vLWue{NMX)^I(jSCD8g=Gf$g0BSQ9_T1T8&gYPt1vss+?)6_w zhSB%s0%0c!e94Qck1#5B;6wB>;aZc4g6d2w6xNz7U=J_D2Lo<8?2PmWV`Ydac3tc= z=*{?P9ftSHT8Mj`0aWBM@SdutP@t0q&#MfufpD&@FP}=I@hTUVn>wsa%v>`wkBNIn z(S){Oe&#k^ak`X7y7%?>ZhAS-Pqp(YD``;midzxnug!V94HwN5Q%~R+Q$*k!8{5`K zU6&;#Vmqv}Ryc@;VpO@=<%hAm zZPBg~Rji_iK)tO9Yx-VqswVS>p4Z@SJFf*=^)culk>_?}AA$E85e2SS=o?ybkr697 z@Rxb(dX5P!Cs(9aj~NVw0L~{}im?rK?lE_F?xv*jbB7Tkv7Sujf!IC9x*{r=Jvzca z%yRg}KX_q1^9rOlTII@!q95FsxwPtmierUWthP=LOkou#g&}=%kV&c-@mfOdjvgtM`|~oiI}ch(H-wn ziRd%)t2&}&+99~BQXfvA92f@KabTl8^Zj1*cGr?}xyPG@d-9(ef1RK9lmt5ho~dc* z2gLkKvDxkYR`@A3VzBi~jfai$j+o(MX8awOmwh%BHHuFTTG1r~XxEchorj9=-qzG) zXPQ0#$z$>Fl+8{cXs^g7d#c%GN{|>=@~K0{Hhee}7b6=U4*J^u>Op7+ACyVt3y8I4 zuAR4te4=U^QJx|4a}3;U3NRg9Q5am zPHs)QB1rm(e03;p*ewajT&N#cI;utIDE=DuzLtVGwBYD_o?_>(+d*b&T*1_>MV&8# zq!g)4!ml+#0AHPIKOJ7s=W8FD^OAJkV6_s5h`j?z)jN`#xB8IBO=29q3n0&X;%JQt-S1%7%XWkzB2$7I9q~6SgT+YZ2o;Scd2Cn9evQi!3yB zrDN_}LW(pD{S2qKyo?@{?99~{W+dA8^n!GE$XdsZT7cL5g;_mL!;_gx$inWE}uEiy|x464oDHKX^4-h=KLve@T8r-?z{q8tp zoN<0Y5|XTKbItwOBJz`(0wx+68XO!PrjnxU7dSZhZ?NZAC`hp1{bm$pa(HU8x78O!>IOdGH zTPjuz6a`h~7WT|UactGl`Anfm#Sd?WYe1X2qy!Qzkq(SCxsgBj9U%jFcoT+KtB-*% z{$svXxOiXx6l5Pf1-h>aTj%><3*WRqJbTWy@2(0qRabMxNf&&2m*YWF5Bl_u0Ih^H z>L+@r^ah_CuG{P1i{Jg=AV3S9CXky5l`e43`i0U0`i2qWPzdm~%>+731!Bu|S ztWqqn=iU3EXfCghHc@nYu#tsSKO-8xj{E-qAGddTELXHGYOB0%takm-l#i@;wnAU~ z_k&qbbf&t*odpJxyU}|Bv_tALE0F?(fT5K?Uf!o3Fo7?+cS`XVq#y4;6ZT=pZlT z44{>dr;LK~_Gita_u#9GO@4$OSG5H67w%=++);F0$L_<{FmfP$UcmYq*z3;APFVQA zQ*cPhIpi7vco{K{U~lbUem)~!rB;9Hc)2f12E3jqRH1#U%VzDzI}*n>n&-og+st{d z4Z;3C?KZ8#`o1veo)=r-ex%SfatHoaiSaz(!%#5n#2!*Ir~VXd+nEx&c?b~7yHyBO zPTMuxuX<`Dv0ub19DgwdpML~Da_yw_Wg~Hm*m0x@1~9fi+IjZ!cHC8Eht3qfUXOCm z91u>w3CXVC3wD3H#MLmjkuDLgQ$%xJBY0?+Dt0oF+3c~GQ(o-OA|^kn|8ayDw?D3b z*;&TkZ#a;eykko=3s(y9L=$!ft%cOcq?{JrOIHQ@rt|%|-kuNlgYvvZb_61w1#SOq z4x%^(%edFYE%wWRa0Y%AF|0Z>}lUzE!c9uEyT^OZN1E>{SYGNzNZ>eroO49Z&W#9hcj)%7|@OM_R8{&P&W3 z|19(N-E>X&}~Z#!w?n=X zIEBm|^F9naW5QLbut~5n(mP1MxAtt0(S9X6?kR`Z=6Y>VZvRz8c^^Aavi))2I8w6Z zaCfs*eGUKOVi&x;z?^g+b_x`eM-FJ3`QHFa^n0o5FRdh?llX?L4-w`UP#s}w4tbnQ zEAMGubpFPt3Due_XrLrq9HA0XHR85nCNlebv`#X3YUB5&-MXdDV;7NZ{P> z%JZP2?Yk1Wi7lOlS_*pH%WvDHP4ENr^AjgzIoZ*5YnK9v7bWI?N94W)Hn7Q3j*Kxb zyqlB%v)Z%=3B8?2>muP18Yn@~W7jUY8Ohmqdtn?g>GWb0)Be~?;(vW(zWrUn><<4t zW@-GZ_=PWa?_RWVRLRRsM?_a2yUS{J;J+62l?Tq1XTS9uR9N>i72lHcrn>CwcqBG^ zTzWC4co}^uQOYClsi=ymc&LmUU4ynk|D8T+`9Zg+@rTFD5${C)gfkMNg*f6068*C1IPcZ~a9@D6}TzZCX~S!SbuB(HA}7zxQ2evauk zFPZsY3paXGUS2L#5n2@eU%zMUtTFF5^Y&iIyR`jZ$L54LN5a>yKZkd@9_x=&gr!a7^NYO_Bn{(JkA4M3I%}T z>aTYUBa0Y$);LDVI|bti!qmAR#L^R-tt6M9LO>1=U+h;wzXxDV08|tNSkH4nSKj$W zOwK9#W7TK%j=QAp(`$ca>6CJwjE-%7l7y0*bKa9&2nwQ_-0(O6d}5a?WfPW;5mHfo z3WHAS9rQ|ma6DDaA7uLYpABZ`R3V`&iw*%`&9;PsweYi;%WaL8Z@^<0YrA0B?oq_X zwbR!C(-DVxx?lOO3z6{HIvTb4_sEj0l>`MWHHaZtx%S#A?_??=tko>;maJy>f9q?? zIR|x_u6BK_4)-(83UxuWHs_uf6jH0Oltnol|C{a&LVkOIip$5_Vy7p3 zXcT&5Z+9qh?;WvyylVFNM?50tq@H;2Z>93vPecf0qh)~eNaR0K9TlJ5nG^R-{>TD& zKMV<^ZlBz&dy;7|b$etR?UnAG=RkU-o=wcRbDN|xmW?58_U(l)H+m^-mZX(S42RW&f2q&^O(&)Ag|1S353X5ejSK+1DMNss` zS~94R4WlG{_h4c2M7Z>NTjxa(D%(_Bwxq*Q1jmL%Z>(nY2%oq@naeS=v$>E{Oijx4 z1js1kX`{RbFP$J_g3PXzNzrNAamvlfz^J6@x#eu;@RVeGi3b-#K84q=d4=g!g}!wJ*tk#z6aRs`Q4i%bos~3^i~&YYIckmtw6iEaE4$J6^?OamhGAB3oap6@4nckro9 zXbkQM#IowYb>ek9d(+>a4eh=Stdoeh6Y?!;e=$vDl6c#Q;0rz4B!Lbj-ZUn)VrymPU$}I`|n*tu} zwm|5Okv&GKWkLcDwS`0QXyac($~Sc2Ua$hs0&l;b>BAT0UTKgXIVM!S0%}-)_l00 zPXn*=HZ?UI~<9bS-nTR=$_E(Hf+dV(n~0H@|FA-%KExqSSeRX)&MlZ%}Bg$qN1{5Kx;W zG|v>{wu&tEFu|ZEZiz7@Dy!N}<&PF>y)m%iRZrMKgtbf?-EXDn@|`oQ>K5-X*rXggcVq7 zHBYz}&$n}H0rf%B2s_%fJA)p!f-UNk@6>F>w3aBVC((*ce8V?4&@w>9zE8Y>T~qQSsdl8R8buQ;4&_8UV-onv!yeCU9o0+bPf zF{a=-^tgh~r2J!Cz`*^sX-PKTV-fgG(Tm}{`^Zpf?!h18os$@M5Y*YfAT;2G?iF~09X&Cx)`?_s;5yLTYFN-z7ySV$%T>wUQ9?mq*c#1K{A-Oq|Xpjx~h$(LZsTl`$U zL@Scu2^lh_cTNj)*RZfj^ivaGxvB0EYrSs<3+JhEf{cyOi~sLT?pSg zVZQED6}*=Cn2xyxcQ4w(9I%JEMyj80@<<6Sd0D>c^~~>i+uu*m5$F#(ZtW1xI~Q95 zqa702wJHKh^c!;s^~c2p1)PLM!!Bgm!giJ!{y~fs@hbT_U(S!I$?-NWI@|EVjs>}}&@rCnug~}{lB$sW)w{Z+O00&FO4u!) zLuZ($)ya1>KSDdAh}aXvGZxs}I1jX`lW9wEX=w@(l1=fduN(|*P*$z|v)0G1tuHI# zHejCNiQ2vQw{Tx#_LLa+YAUmnHr5JTpENq`7w(ii>e5P`8g=U{6p~EhH|-6%#i2(qn!8Txk#-omwL;voA18}4Rar#Y%$Nr*AwhwkHl$H$#g zsF1#Pk%UCM%;_7DU5xtf=u$4z4%CV#bAgWnKHAcqGKVsFx*H>))VxyWQY7pnWBjv( z-rxHJf9{2n$6+6G8}>O( zF2L|RN)ZnEUMw#wF-sce9EShYd-SwPf4nR)#@{P#x>t95-=A5V8@d~$lxSdcE%Z|@ z`$Ik5f!c4Z?AZSO z?p3Vt56JT%P24qnG=O`#J-XP6zjse?j~cgoaFl!iX|d*b2N|G}dw%$FhgH^{$>u5V z)ZO9vWK6mVXEaiPMtuKCN^LVvDq;BJ4y7+s&UcQ_n9|&5;+ENI$J>N?7X4fD-oU`? z-Eg>xEp1IE?9=p$rDy1(*+l47^kdu26)MLZ!I%MPC%!{q%(u4@=)7CD^$5eg#j5%s ziiMnaW>%=&i72=DtD(P+#gcu3bq&B-hbbCrIzQwg2JYZP)v6`4s*y871*9Jymi~Dn zCoPK=oKDTY>sgWgBbpL!rf7=xZN=T0v2I4aWt-vy(* z@C2{B?0&=<;ik>X)uX&q543P76S{3OIc(nT?~5guyF3(i)P<>K)SY(AY&X(r3vD~< zM^P8XHR2r0gFQ)&{X6W|{B&v(;DXRUE?NGb1qVXyS~Qqp7j&wvGk=+byDxOLMO!{5 zw*a}@Ox(`$E{I&*b}WaD02e8{gWg(vPbq)41}^}5cJOvTn;eYK$-3!wVI=X(o3Zb{ zjqGfh(X)u}2wm+T2cs}=C&gH$k^n3s_~24fyRj}IdIoOn-Cf2lMFtH>3_+CgS~EamdPBlS?cw{F{R0w7K}kAyKl^hO3#1VK$wgpGNJ>N zfKU7Gkc)DF{G9B_dOfoXFA{Y3dxSqbP5EaSrz{~`9t06}y7>36TxY zBl(incIf(7IGv^MI4hm$xr;r{#m-+Ww<1nfCXdv!Vx{Zq3Qo$oOr=*|3Xijq8KYW! z_MT$Chs;*{ASZ%1irns$RDjyq^XEr{b^2vfCp~u!HVd@;79!U)c;(@L*6#?;U7_=0JI%#(n3V-1&Q_lw)yq67muq z#`Fr=?_w(sB0jpgBETbqw?K&h36 zmyeQ`(FCJN*%9jXE1L|U0+CWOO}QaU=!|a6?uuF#f5ifwa5#a(({*a6WX%TdxB}|^ zhhR)vYcGUzrtm+yPu#gC1!vMyI-4*aXqlvTS z8^DLz*`6|=W04KDOs>T9PO1-^+=nwfEG-C^J8MJ_C61$X+4$uL>BnyH*__xY++6?dL9zR` zPbp$+(J<`?r82q;Ra8e+1raAanV>C*k+ib{!R16i`fkyzy|QlZBFxM~sFjox42^{5D* z3T2j$D*>Ym8&juK+@3Ljp>Z%OA(*2GiKJ9sf{9>43qi5Tl1~>}bOWf8_@kfRN{sHx zWEagaI0>QZeDFmQ5VS3b#$R@UM?es>V8HPvD?vK7%5T(Kq-3CH`y~QdHlGgd7JMU3 zg#J*KV|2UZ{S{<-GwF5vy;NKLs#Z0f)DOaRT8|DGWDu1L0E$%Yodqqzpyp+?| zyyPg_x1OLb{i)c!{gQVBDjk9fe&-srC8E%Iap>d&-Syi1(hYdSSagvrbod6G*Sq49 ziR!0d&}dD2edIt)qW+DQKno#`qP498C0-z&)W_j5N+Tv+^d{T$DauQjdIDR!g;EZe z%{8Err8L7MuQIVgb4((^uk5etTh{pjFcbbyYdqH^PnNi?z~}5H7GugZ4};}bYsJ~t z<#u9MZ~qS3Z`{htG2gz2d~ICqz%AggXn#Tx0>s6d86KCyEJ=4g^)!0OF)W69tpd1@^ z4aJ95mL;#sKmjG}R2s7($Yl)&C29D>#C~|}Oz;}wt9qspOSlQP`*6;{_zQ_?@iK73 zkc`Hh1W%ZuqPx=+HKWOU2yzmfVo5VLIU3h^Y6)=dA?>MUFtUdR*UfHk_w5Laj;|V3 zDp0GTk?11acJ;m99A<<9jxx3MD49UE9x75_EE+!*j4giSF23Vi;BHVEVEMV>nQTpO zjV6GgkB?;oIj&tK1@ovB>`Pe%IV{nQC5@lMB@b5MJ!Vx_ITOJM$NM)Fh{P3dmbK3I>e{X>=8Xkq$yV(;gdcs6wFux*mY4o=&jGLfh`W{%C$fbn1 zirlyQ&2mATW$k&+$))3!fmL}OSBgWU9G1-cd#mEmIdmdovjr9t`RxQwa^9RnTnd@g zz3}6OvT3ZV^z%~j^>?HTQX+Y`0t#)gbJVnkAf9fN?Fn=C<08yckvZqiz%K9C67UuJ zK-(A0s_Ms1#;rWIGda?X+-t}tZ6YTIA&UCR=`huapGV?A`~=jAz@QHiv~TsDR<$Y^ z%u!kh8fa7P=23?YLNF^hRpPLm+5BDu!?xW`pOcvrOSWxuLUY(aD@E|5oISAo#*d)X z2Xjf|;L`*(m7pY>vN0?E;#tU!UO6K&5~MB}=75H}V+)OY*HSH`BX>9M%O3tCspMpXp=QD9Rb2^uf{jqO@(bSSm+Ym2sxnub90;?WiERO!{09 zT_Y_dFQh`r8|>(ATlIQOqDl1RDuI6^mG2r#4T{-H=I0*1%^nI$JhN^kn&Z$!{;R=F zsrkthq|GgcsyraQvk(sCv->sL@yXS!vQzOMR^rpU!5?eZ4TLK4mA3R-$7e7L_E5okIM zQDJ{k_x(5f#*uWb(f_@G)9M$90nC^90xQq)#?ar_w^1_)yYtOn6Ikq)TCI$Xje zO@x~}N}k^?c7-DLW`<_dgift$I+R<@H#`RiAhUr{uv*V{NoazO13JnLik?|JWcbSKGlkP zy$p=e$iZ;UgeYQiOst(9M!TACP;+!r=B{fY7eR=@$L79f(i`7eal!J8kgF_=h#Xpl z;p<-ttfRxT(E%%UjSj1{=lnmr$zVge!Kt4ZkSB?Y?&9_h)K1yUSB4NDvj!;TEuihI z9e$KPf6uTyh${$c>bcw(mpRLn?PhY+oC@QVEe;Yq%m7g)zorCuG1NAXJwtVx=qO3@e!F$E4L9k(9K{TrMRCZC(Tj+Yk{FuGE?=x z`RDemtWt>eW)2!5@^h0Uf(N%mY|k6tG9L56EZ?n|l{2Zj^jr*6n$ll4?~nu1*+|=q zimNT>HqWWCDnQ_wcVMEgg29FP zakve7s1r`EGQPLCW862yj2>Sm2rVVFlv+s88KWiH2JgKQmt4ODqS@epvxjQ&$EQdS zP2a%Px=g`DAi@exz@pUhIEIer*Z2D9Z#T}H8jWZ3imMcWq20r1bYkx^-wYqFYj*eT zXcsXJ#(bMson14rKb_0E-!h}=wXg3s2^Q5;^CbTcoQ`(PT=cTc9vu$L_p(lsuatF6 zP0I#bFF#6e43up8pSB3EZn#-ftOh&oM?$NUc;bcBknFkdq`IWk?a1LZ20?4*+CvuArtxPUnwlhSQm?tOE^<3>=r_Y%XA%1ygk|=y~!t`Npicrq= zr0dQnmXl_qW{Jhd(9CQN;Agp~Y7^2Tdv>eu9QZwzm_%sfvANp4GQctk68D8xnCI{5&{8CQ(V4`GIu6RtY5y88E8CP*FARWE!EQ(v@MGT=xVN}nS z)t5_}k$I!3((slz?+L9Brzduq5ZxDP`hYZS?M!eK&oxJLV( zTMkaJ5xH<6KUVLSLO;9gx%=5G=5rD!Nng%j4r}4k^>Rs|_~sZ=8-*hY@USFYWeHNEpK@dxT~o7t<^M+@TazgVBXL;1 z;Os{=;C-4NwE0!_&(JrFfYkVIic7rr6QWx6_fK@&kdt;Ty8B|ubDQSO3HmnXPTPRI zGqeM}H&t*{;Ur^Ri`jI3n=4nHv}Nf_;jVg`g=hU1MW}}XsqfgqngpS_AA!ueSnHh_ zHUyJ8a=z|nN-KWe^C~~GSL;M)IneURF5K84!1Bo0hk?q6iu3VTZkZM`Sf&^Vu(B^6hqPj13)KSM834^4;{`Xou29gel_7%XCdgJWF`@ zBp$Xk*)OWKs!6?EMgWQV;BEIYJcoT0I`sYcm|ty2K8^CG?!cEu(XGx(!G;-vCNMeH z%a>)=sl3f^%f9`IUOf-`7!QkvE@yG^y-yWacAK?={m9v>Q~I0fo8q1oJKc~tp#6CF zWs%EW-h5lVCwo^HJ<>iU3Be4lZUGCyQF73|GKXl() z-U`v<41(Xy_l)WAlEjlM0T9qKlfF`Jek0uVTI5;AxR*8~&k~eoc>1pNG+7*H>(=II zo%(<4;tfHEQ}8Ft>7UWH+&^5?$q7v}4lQ$i`$1SUmWN@_>@>HtNsUx(Zq#6g{`rrS z&%i(3sFETNH9V@$jTj0c<{8KsuO^RhK{*UlJC40VKQ+u(SOJjCbpz01$n$Ik`$ql2i@STmVuh4oOl%ja14+J<~G^%w>qA?h7 z-R%R-URA6z4U!gLR{dH3s=@6dGp5rAC{`Q@&HxOdI>cNGq@4_$+9J4p=_t50?tX#Tp*MB0}Bw$PkQm@e^8ZOTDCl zStPgG1QsS8+QBptQxrvEeDEQU*1tH<<2A~n%}*$@9B&%6V>m?WrNB9e)!D?DsFBWu zeCBH~tXut@XvF7+s+~o~TqHI~-a(hqdYTWdW=L=pblEtGy7=>1%K;9B=zWpM`VRhG zR}N+|_G7p`_qr|CNT_Vy3~gZpsB|h1F2D+*7=bs=d=V>^3yGLu(nvgv&cbPV zhh1(A1|b>W<{;d_06PziQtqdF^b-_K+x5@p44t>A&F>B8Ue)FFCe}}XFd(HHn~)a) zCzPf4Np%j;>ZF%IvJh3o-Q`(_mFCkdf%MtU5g7wDJ`V2lu>8-l{p)d*AUWe0)H5-3 zWhGn?IYkZ_x)mA|s7AXMB7>u8m8Q8vmA^$@FqywS$b&UAsmdeE{r5A%?3RF(e{ops|JDa z)cO_9i=f5R${xTT28;sfS*1S6)4}!tIM?FX!)Uf^PCkzqv8Dik;KbxG{&xCrOy+AF zEeD(#_(_g``x5>qnRs;W_&3sAnAZdjcV1Pbwv+ zKBF=-53lYpp%I1JskwfMew+HVMT8d5ic14i-x5cSN z+wuCfM?Y_SSZeSmrON>;EahtnCSW5?4u6O6D`;>>N(Ze=ZMI5NIe zvML`P343A@bN=NLYQ&(8j!{@161V1&;E2DQhgzm4r$B$wIs)sfE)`F`!@RD#19xa& z?#kGL8SJiTxvAbBmQuUl6G;)%#q}l9x0;=5C!YOwxe92YvYCa<&)%8X@*z9Z0f7p+YP;b_K8=nr>ut=7y!Bh}yPZLk_>{-O{l}UID^A6R84c~fl zJ)s6+b&rem#!_O>`A}%F2e_YxZEpHiXVJw4mV-e)`?*HpvaV>*ysHR0P4nHHoW)D; z;FqYsQCx2Il4J8f-?<&I#`zWKZ_xY=a|4D-4j;C*;y*h!N_cZ|G+#&Uu^^x9`A^e# z*t|$9TxSy0jszq zhwqBzv0ArN)G<1Gjm^bQBOU`i>R_u-ITzcba1=Z-5~}6;9iFvOpS~6yQfyPqZcI<~ zobs};wkISimg9y6J9AWse|%T*G=uj;+QwFl)Zxk-d|;GtK{shUG8XtdOkZ3W(;~h; zULi!L@&V3>VD%zMzHE5)w>o>Y(0t}}6mOXO^ZU%CpW3$f_tAf3G_B_{yHyuFn{^x2n3a2Q~4i^PGb^@(n zH=U=#YM(pxH_R*yz~pRA@Jg_??3famZf?6VN~Q?I1wby8sqLAO!MKTp4_vv5;kEwr zW>rXdJKGAK4VDs1*f?E%T8j*Sp6`L4CBA*qmfxP5Z`EJDUBOlhYkfIU@8xUx zb#15)6feN<{a#oD+)c_%mMJ*9>P$@KJBoNqjLax9>6;dIdesRfl!{jXkOCyqxUH)e zKe>{sXEDePkji+&<4QBJNypp+QCD0nF@Nl?T&Zs2@bzRaUoa)NSBkVbMWekgvp==hVy0n<3%;6&gM6SmgChI zcOkiJLX^G&nqS_DQS%(3<9_*0-J6T!{&6IK$0kZuM2JIrCSS~QAnn{Bj9)hI`Y)|M z$8{-!k5c&%>Bqa}Wu#=TL0T+Qchd8(q-H0_|HRWoYDK_xYy=`llP6JaRZF<($C0mk zdC79kY0FKY{;zOS9+jE{yNHB~2Hd8b_K^2J)&n}4fg36jj7rU^sLL$qZ!J-Busdl>XtSyuIfGz-;#`^yIME0hTW`#ajyS z(BDvzJEx-o;C^YiV4JL} zuruGlXXOr@#?cp-kG(mz_L?xp4+7Tczf(EOL~!}kKV(ngO?0#Nbcp*0`_1{rWL!_~=}U z=0fJjJpJ~-jFK%0wD24_q^Kfy3_KWMXBt$!`6601shs^&{S74~X3*k!4*S_3u}m7a z00d;wdC}&=Ey1m8rTs1^qJgc>cV#=hxA8dI1qu^p9g;0QHb0`^Zk~YNsktk;zWi+x z*X39)wO7|RVkf?hz?Uhd&)ZlQJHCv7hU|xr$A}-WgQawW;VmdQGkKuLRN=)69XOfS zunclefb*?|0K=Deeh6GIZwRM!Rn-^hEF6FEXXD4npPSPwN6dcx=Nf5%cL3GV4Fl0` z5@pd892hVTPBT44*p+cQ?BTOcng}BeU!s(36iqPSeYQ@BH}Uf0rIaI&#JNd;Q6AR`hq4$F8E^$tahFibc3ojw zip6WeEd^D}!>Jbn!!V(rqd=0V0$kZ0BE_ zK+cMJ$2$S3$Di@fe_}UVB;4wCR_)(gyr%z4P+(Z>QW_4~`Y4kx^loxlnY5iiYHL!> zv5Uy*{%-7xvUjDw0DQK}_FYt_YIzn7!!Zw4V`LgU@Jhe&-QB+GzW2l36bp?)pPaNC z?NdXLu>bSkhS&Xmi6+M{n2g9t`F~grM8PD0$x~Q~G*87Dp8GA}<-s@L<0g^5Jli%X z*^(m+v0zu;2#qI&g46qt2+dIp+lq7Pds9FVp_@Naf=cm}o*Eo!vpMI~_uI7_4avgU zSKpum=tr$y*UH5auwAw|ebgAC&>PI8NP?FaVm@XGd3d6v} z%*#XC&RXS0q zurOvfmDum}=@;W8vc0+Lm1?_R+yS{~3-Q1%GV#MiH3nz)E&qz8FBf>Bk>85fNSH%> zhJv_@2=f)et-{YmozcjmcvVYn_tgVr8g?2aE|{9RKCFL20<-;An{^fhA9~Du!ImDa zM#BC|CUek8Z|Hs__hF{B-ATHD^2<$<*iM5fodWaRr~j-5B-&W2)lj(6@=eL)x@{(n zrp&^t^NLlrTmV+h-#VrG!>m6`oTZ)ZJZV(*5O$DMM&VlfvGF$>R`DsTq!tCu^I8!# z28J<`1o!{&Y6iCG3;#z%tCdNYU2kzNa+>4jj;FkQ(i=iTEdo0;@QO5kwaaexm>#l7I z%l_DOy7N1xnPXn+r7s*W-L&ZF7(_)I-`Ps?*#Pu=O-tq}&;?y9)ZK9z%(r`z&OE+; z2$C6oD_P00$v6#nxA|V?8O!jea^;&l?%kb0?9lX)o|m>EnQMpEUc-00i`J?G$@=6v z=S{G64okrQ@;OuR|0kdGh6L}Kk4B@8UX2@~@`qS0^il(Pq0>USC&VOKlQ&Nc&Bg1? zyhdrn_A|5MJS??vF+e)Kfw@2f;`aZNyV!d>Z%9Qsy`BQ_vad-h9QSN^WlT)hufD_A zSKooZOEw1Zu^At5ZYseMjH7^O)~?(yR!KIz13DkwR%wfk{%+I-uNxnsprMtdDm>(} z4BcngKT1Z#@}5?E^l@}79Q4)oKbxB5Z??m;gMW?Dz+A}rY8MUG3Zeu$3{=UcLzkqk zASTCT&vRNS6N4T24I~tNb)ux)RlhgQu283iJWd|pQEooevcr%G@1UuuF}P-M+;f7z z&%ev>8T%X2i31hskai67DtN-#*XDxYb+Ee_6hOb;=vt^z(>qbb_yO{+okHlDP3{{W zr_a$>GNno;V=cXRciQbX%=WbSxe*oSJNX>VqH*bLzI7C0*o z_TFbK`@=(wWEjSH)zA5|U7I@6GCC12B%xDA1p;7Gv zu3Z}8LZ21$9+YINL=}8QT}r4lTz1VFLHxSDhZi24Q}+{5kfW6^QaPJ_>8%X(9pXuJ zNPulF`SgW5NViDYg06=ofu%@g!C!T6*ldST>DW-8I%#Y|ZqZ@2*4{k5Yo(t-H3HE) z-vH)$ZNUQi$g5-zdKI6%xD)F!Xsvm2hM!Wk?c(()KGIW+BJ7?OvUk>vo`r_g53TfR zglW@iNBqlc{1Wy~1hqskmtpx}EIG&bM@BrS%r4GtRGaHSeu6?lLJ(Jpr0}-e#VgWg zComcd+NW^xKwaGS$vXJ!>QHI?j^R;?3=H~4Li!OPTk!k+cO{~l>pxgYaRTX&et|A}ODRV0k@pEH~>e5nJW zh)U%#NWgDh*cPqo8#EGAad()uR~&sHa#>$T;HW~<9~W*)MLWbM?mMi!PrE(uxU0B5 zO8;^`Z=zt}K4?JkIQl|BbPQV?@znI<`WUZmNUF_hcnNNU1_gS+>>j{{apatsyW``8 z>i7)S*~A*IQ+7RN4A9~|*4l+LgmXA|{<-B&VRQOrL#YY8?G%e0@Ir%i0up#0i+}!L z;6I7)a)S_Wtq-H6>G_Xluk>lPnSM;<;YjH85q-0~!n!}~4wG9jC72Z}B*T9LCty3? zfXh?nck!^=P<62qxlNJ-%O5t*MOIPG-IEU>tHotGxp%%_wCJe>sCa#3TND5nE=M}X zEzHmu z+fl$1pKR@E3w4}0@@bCOF81-)wg$$oj`wLN56*TSyjZ6l5~~5;c1W#j1WwV$1J_Gb z_TAE^?m_s>#E7&?5%WCg%Sf7Oh@G71y*7cH#6j)wG6IS#11A#Lrpx2ohG>?$BO0>B{gbi_Mm>!Tz<+d?=#2l z?@=JyOd9s!`~FP+01=NJp{M$9F@piY)emL!(@N&e)z%WQ&&x^&m4cqIhY?;nn*?4y zu&i|n7S4CAzB$T8t)jTo^mCgsdRLBF{3+v;w=u#B!{6N}N<}0NX4AT2nIDhG9|r!h zaIs|#jqHK?*igThTk0G`yk-lb+!|~m^E@ic7gU|{TuMsrV_4S%pEcC)COC-EBB_F( zotH&T$H8*M9P(^NxPq!2bVk(LAM06;45EHF(?f5MsvwdcLRd{^M>;XR^Mp>B^X-hQ zwXF?=Xv}ZV@_}{D^-AggrW4&s3+`rOxjUx~NyX(7tq;psJDihpDYl)N8_p4`Ph@kz zIfsIZ+2n>QtNw7?4E>rDyb1u0prO>F9^uXsgkjk7kgZRCkGqnDH7}#dIDLij(KXw> z?Ny=ge0;`jL3#sTAQED=ZrV3RD^V_c5ukk-P_HBh; z$$|7n^BF866d4S2(`ML5UP*}aMikITfCFSJMuII~>u;+JgfYtVEOH8t1wtpUEk1+u zM;b@*y14uEroL9@rhQ1cAVEK$4Ic--ZKo}n!djh1MO>NW*nfOR0mCJ@6QlDOq zb*`kWG%*8F*wyt142N%Rh|i7Bw{L@^L&n3l zW?RjirZ;}zMcLbE_Ho4w?uFhmcP#zQ(W{;vun6Vew#J`BU@tV0ywu ziaIL)1!N4K0TmulfZe7x_2_K>w`_h0Bf){>L@8_qJHqR&J!By$N22_YydN}1gn-L? zah`H1fZY6w&sxHDs|X9WEmUQlX+w@C!nm>yWsm>YAm*KFii*7m;?x z6|WSS^7K!H1J24f?86=nE5ZUPBPo6$Q=Y1d&uq7Cf+XsH!Yq*(p1EpuPZmXXv)ANg zD8me-8`5K;n%Jm$HHOz>;dr3+nbIkYp_Vh)RwKGWg;e%cJj18E!tkyAl6<{~4iXrkKEs5AVY^ogXw>9=wonzo z^xSxh`!?k-`NrqOOVO96!1;X;f5iXS)Or8I^~K#jdXyj}MD#ileK3q3qD3c&h~CR6 zqnAM2AX@Yqz4ty^1fxXny&L7q_ul(@o`2x{aL(T6tj~V0wT>907WZyFbFKXZ zNu`|0v(9!opV9u~nb0u*C_$jn!ep+Py9$?_Bz$eSPp^DzfJR0zJf08Pr2%a{>gfKp{sr;2kN-zFA5s1vqR(jQEJF#7I`ay$ z*4Za-HV!Db!;(bxRxHpSPh_W8mYPo4s{XTSg|AL>7XGc&<1gZB&zkB~^t=AVY4{l->1BTjDaZrL(#H%@NO?gTE_^N?xv z)xPH=7?%}mMT=`{2$H9#rj8c$lE()?XcbsHC|3sgvDLW zo6qeg%QLDaX$7$~uuQ^Pb`15#%wsi;M%m`LJzV zE#YpB6cI)~%cYe^v?w$5(;dtRb^fHXY4I8;k_&q9Ty<;(ED_$QwT$KeBxxBYt!UQH z;&T!y8>c2U?{ga>eZ|3F3|JjzpK+!MWVB1ehmfYITSYEEs!|{yam4BumFsmemVFGq zo{pk}cyVNJvmNvY{9k|~@oe!3K=$hokxYsi^-$=SG{dgIv2Vt53ks$p@gW3(%={&V zSgTJ8#-G32w+e7=*Fu=}VbTG;s6*t`%mYFq=yD{VWGKt9{kt06YEjIj)LaikXypnal^}9vBPL`SE!0+9_@)|A1qUOb2acP*U@yO3itlyVU->t z#EmfM8|=?K8GXb4c>iY~5wk_NgY+w$14>HD=A+=t%yXrswI_8X8v=u)Sq7$zUQd(x zY~7BI8F=;!x>pNxf-PbNSiXBa`Am-4UeqHk!P#F;FlI`lHJ&l83HE0XcdIM>-`6I@mjMkIs{gDn_cWjRp*b9vt<62&W4J58VCMjuXrg<>g;xDRcQEa|Fsk z%j%_sAA5IE1V8_B$+VCbu#Pi*Q}M5j{Q!~&xM^#wss61;P)#)TtgI;ZDfe~l{*Ym+ zHbGI6@7TEKW59cwYZqt^JayF$A|oZIQVgZ%{8M{c_S)Tjo2%HJm~VldH^t0xXQk&T zsN`*OI}0sWYLdzglhu?m){V-zodd(~^ba{*1>&?{?4wrW50eu-b%i;jABd{F z$a>IF7EN8zXbGBsp>>CieYR&U2{&(j4kCnT?tG?FrP0T?mB=MW(&Ctd|uRk3#8SznU8=+{Qr@omW{?%W(tbLW$k}2Au?Yc2tFx*HDs4gxf+|OjcZ)!E8TtX%e$CE%eQz zul6VNs|QBG<zs@)9kzj=4DApf>dY2RneqS5T=FHGzB zatPz;pVDb`gAOI}+)+hcYkUTErR0%1N$(Q#_DO$1h~K@;y&urG1q38XBISAR~az&88Xu!qMx~R*DC57jJeUbNgTeBNwSZt=~hT zXUR9#&&f9yoHq~0`a^RMp*So%l4I=7p{p`bpAo10z83{Ap20_Q-QDfu<@PAm0{+Ny zur3wGzGPSr=OOps56|#my2hp4%3djD+8_$$Wp1)c*$Vj?p(otENoS-~WkTQ;2wrbcYC$m83ON$?(PoIap#0YM)is|9|}L=YI@;Bq(^t55_?n{$h>iI#iwt^EC&>mzPJkwC@`DFVTeq9(Xi2a$`rCfTRHlO=ySgP{QCrlnvEE7 zLRP!Iy4|51r|W+x2y}~v9l*kl5;pWPYt8s5B59&|(y`wTd0v-!=FQaY^mk=P4mPvl z-W4CIhCeN%gX157U(~AF&r{*eE;WK^2=Z!NmqHhK0p$-6swn3?du7Ag=v%gfKe6w+ zSuD6aH7v&n^mmY%?ki=M$q97hZAJbT8Lh8nAX?c{B=<|x))=3MGLN1LTs&;JJ~T-C z#Xb{qx;E5~fu~mH@e%0Q;*f1n!@v&6CS?AuFu%!3#Pq@W%G~89NkQMbL%F=rE!6D> zh-xqO?DYQ=D%+sDZFGf2fYII*8)OgC@^X%Ajy-(FH34Q##v41;u zl*4bnS_ImnD>lM1C=KnC!iV!8juzcyeT6e1ZP?$FLRbroDN!sba_h5!u`yyL(Qf2d z3cmAwDC5lHe3?0ETF)uZX;Qvh^Tp10y%!m&r?Ec?^Isyrzwx0JFuL4X8DJLQ_kAUC zck-zzCFnBl}E;)wdC>?0T4j11%KYZfRjx?$wN8}A1W-12b3awIl;>DlCAZF}H&a@;+X9OhHf z9KzL72*VXE-AAXo!ahL{JFkrTN0gRhx(zC7VW6&N%8O4iM_#iDd5|Ejz;GU`<6)oX zSsf@*{qK)WNpk}O!dr8fh)8qcmk=$HtS3NI*3*$Ne}|q6iW$qHF4r9(CDBRGvsdY# zT^@Z)<j{0T;mzx3BfP~H z$@3Z4jx7%_X^Iu?#ZwhO3C&mwyRyPH&78q; z`q{vwr;Z=PlfS0E7d_ceRb=?^X}<~LQ~7h&p)O{J=utbf8s@$UA+86#r7;vB)zRia zQBgE(68j+kJ{&0c;iNl9&mgs#YF+GW!SnMybyPK*66OAP?{EgzH3#0k*n~`cs6jJFleTQ%7}=+)Ee zc#W{VhrSn?S;iP#aZ{$Mw}`O+T>&1Y3%;AqU2kV8*O=K{)bm;Job|SN6?lSKQm{1p zQr!A~z}d>itq#eTCN@>$+T^CtpF$)rBt|TSwVyq~&&LKriXl1S4V_el4V<@GWF$ub zc5quD62ne-*1d!~HV9yf6d z85;dQAA1V_ z8=bZHCG!mio#0BCJ8GDnZOL*P__Ul}XtwF0f~ha1t_onP0ooE5cNgrZNV(sI`)?-v zds1bWj;$M_W+o<#hPNtwy`iapr+JXEQBuW1#C)jGm( zo06V~2&)hO4c3lTk579u8+T?|rf1#EM<(+gTz1!M0%fKSWwkW_lVsvJq`ZE@*g^2csKa zaaY?KsHjl|yAUJ^okZ}ahrr{DGeZdW%UyZjEMqdHoR`--0di@?=!?G!rNaama9?Uz z?V>3D?HCwV>!dH97aN`?+jnDx!1Kb@IX_pT9*#XW@@oc$oc6u>$6j$Ul~2EJb2B}d z8XF76U6@`HG2vv2^(VHZJas#}NXAs7X}qrbQ~EFZB+NWYvq1M+^{)o1Io_8o*}1!D zrd>}!@;bE1tugfV$$n} z^(!%ILzVECX+`X%(~{p*0n8mcJksqW?EV$6Em`1K*^w1)@q_tgTA}Zkh84}*S!^lX zAO9H3m3h7r?NdaYTv#fH*a}Cbr^Lk0v5Olkh=V@_5>JRCzKTFW*Jfll#{-pSv$)^? z4K({0^4HGJ#2!gEpSGF`b0lB!b<%9HX=xSwIt2CNNPlv84C>aQgUU0|M$_vGMOR;2 zD9gSi1vdKvh-krPqY@m{w(5S#IN9^ie;W|Pi3QV2@QUnO2|yvD3?3u+K=Hh>Z^cwj$3+ZC$uh)sDwGuRWBpZN_;KergJG?VTFh=)8>m3(tY z)uP7J)lBmoar+^@xWnJko;DfSy^$qdiFcbPM_1XAy|HS3jBv@5@ZIloda_>(tN6A= z1N)m&rba25j{ctPcK3+7!vT(p9bnju7Jo3isw%ghR)_m}9>{bdEopP$rjkL>^c02` zz5XeV2lI~+Uizh_`(ULhvjc1%AogIhDX_-)$<%$yRd%J}Y6vepsv$1f2mUBcZSfQi zm(z%!tX+gvDN>ks-m7G-N_N6NNXOdK zlU!2+m)Lri2q&;%XoYGk!1W2?S+mi7lY!QN^Jlw-dA+`*)c%;y-~9s~Q+689owMiV0G!yc}v zbkxn`D}lHk(0wmiIyEuAfJrC>1v-F{xh_3{~X)kk5EYHZDyG zb~=z?_mJVZ)|ax1h)2HOLKQjp8R$|OwAM#i8=P$R-;Pk0kbCR0npx{wHAqrcf?Ge_ z?TVM(4GKe06F*$u?Z?srHh(+hXZx!vx;Ed3bNQbaB2Sn6`LJNmeaLTPetI{zW6s`? zAgI|uvmf@5x1z5s)4OUc^;>GqY9Pv}P5^^=@(m?F+Dy&DElbyrb4Ic@p^+h_b(rTDlT=eVg} zu8kqOy18SpY_CRbJ+|<6{;j(%tNczMq^~B_Qk3qatZ~bgxIHGpWYC=hZDK3&CGSmh z$JEdx%$$&KFkTD6Z9&WU4@-_=VOpyFcxAbXbh4y}i3Ww^cg^(3qIZri+E{kxQ`~FKa{JH<-h-%ZB$a4Y^oa?(t%pu~JGmRBk5+3R|SwW)h(3|a+ zi{JB=Hg|kTP_(eTGrokyOe-w__*&fR%RVz2LHO=lYQ)X+ycV_Die0{BT6yhto%5cC znerJ!FYamwErn4Dy8$nS8WD@NUDTX>DEV$J2IIu1$Bgs+8@YM|#)gqDf_n{0{68vM zy+^h~G9zu$>yt9*d)8I-mv>97Qj5VoO&ON!i@5vAVw06e=!NnB~zpJ^w_!=xd#et=f#;1`%B;QFQk19vVPlc_I8IV|mFz}_hzeT+`veV}? zR)7~!otSt2=_L3C=%=?%e~4A3!a$9C+N$D4j*I4r@Q0NBaN^L^V9*8 z>ZfBzaLtq zR~7g+g-D?tuQc-m%c<+otvfrFE3d*%r|DH1A7WX-pAiz(4Mmz$<2uwjj^Up2K=o>vmCE;eaG{p7qwhfdWje-LAJU%Wx#C*2Oza3 zpX^YmMx_JczLcz^ZKu>-B32)(i~c*!`WcHQ9CUCca6Ooue-(Uvy-Z|*r{_L=L0l|+ zuYYi#q)JK@BaIs1N-~BkJ`ob27E5Ion#wF%4-MG~MV>=^I%6|ASChZ{ujFpDA2OT< znO06II9b+m5j)(k5zDW7mO=71qK&@n&O@a3z1D}0qES9m#4jUPb4fr<O!rqAuwKBE^r40mE zLcf+oM8J_WL|!qWzXd#q0d^mG2DOfdAbBz9@nIF7i>JLL^ejvYypmXuR(Skfx2h;2 zLKU0(v=pf%y$UAN?y)M?(KSUVqg2Ct`1P30?phErz1%O*=vT?2*GO-NY{}lQ+rGn2 zZJssGv*Q#WkQqs5V>SjszN=XL(%5cw{pgCpt%7^!bcl=)Z+drfQ1<3?+`XUXxpk?K`|-SSjxKx;pWpThpY75&$L6+FCEdd#+06g1 z;Ey7Y^&ChaNCL$I6SQoEhp>FylP&Z4BQfq5{L==PnEdjCX17emL6xM-jyG8@S8o4_ z;VfS~WO6J~Rg_{YjO#DZ+pDG=?jm%Jgc-2f)qE0em#g|Iv@RoB>2<5yRgnyQHepxO zQh1k$pd5a}cAPi(32G<|CIvJM+sOs37{0C_$ETWlbVB}-mad(cs<*~l*}Nn$OfEy2 zoY!z)AMVwEYQ>a*C=*(SUkLcqsV;gC4k_GPOc?wQX_-E3v&Yc1KcpS05RG2T zFKOuQ1l>owG-1GLidWq|MqO$^sD#aCl&B0Z>u{`hp;@lb2eF4m;pkew0OSNvlP7=Y zzwMjRgex9=TpQwQ4VzimnZY+)Zp``-K;W&{J-(})f|2-YCrO4^+kKt?HJjHT_tmp2 z)-Uu$o1`XMOBs2R?X3uN=RSPvYAx^+sY+w z$Le+QCta^&<}s0}lldG6S!b8UW;ez8wJ z@(GH9G9w_CL-v$SWoOGL+1tpRz+36oybMQ&t(xeuY{$q{6p5GJy`8RS@0Kx(?lFTO zD>!F3?j$QGJW$Bv`>-uqr2i?|65%5;Mn-_ROTar-Lb^f{ZmPfO8aIwv7Si{;S8P?x z0xjUMk>T@I1K6=qFNj!F7PxLa?vZ->{z5mhB3G0#^(MSY=B)ISk?BA+^ATQV)k_!k zfsDE3Fa-Luh_Z*u&p^#}Tt-@Q7}mdSq`uqVt3NR{1Q*uKGzz<&I!5($na9Y@ zc2cXFzDZb^ZtL=)A=7SArjJNuDQS4AGTw*soRNf>4}cHj)yGf+?=N4){Qam0?Ah(j z+Q@ zHpbiUtkNjuFFGEpoSsUupC_VDwV-)R!Q~)S`FSN#KWo&6RRZKyuJ~uj9xnC)1SU1B z&)zrUz_(cU4sShKEE;(3e|`Zvt=s`4YRlSv3^Cn{2n5DM9g^9|+lKJPPn3|)w9+8( zmB)yphh5~9sTXas85l?M@OiV!J{|v~NX;#+iSHQ*9ZY%DJQZmc$j7pt$=D0se}9@2 zl$Pbn@J3id>~;zIG*VB&F^axMaZXWOdqYO>`fxiD4-rPzzfwTV7tWFf)Z`*PHrVc^>~i$ zPqNs3dQ>0?FW;|;Au-c`p)H#q4-mo5*A;@7-WwU(HtQ~Nsj84bA#9lQAo?6&5^ppe zKD!C#SybD7X~}CB26fau8yyCF(;)!O*n9kTPDF|rZkqd&cAkb__EyJ@3zvo_0_jxd z!)&mi;%q6B?JKYlzw~zFX6@X4i8TyhIISszcxDawLv-4F0GlT7Cr8AcHPK@3O-m)5 zSIQ3*LYE54cU(|JTc$d z6KOgY&0cB+jeZ9N1Qb-$?mIok**bmli&&>RTb#tb#Y3I$QSe)~Pp$%hN&8^%D7P%V zx(W#usyBkmcXNuBdm8;KkQp7NNwTVHHExebE%y%PmyKT**l!aqhHq}v@@<-mxV|4i z0>4?_9$bR4ipJja@aHUJ{l?>Ou$5OZm0Zo3-16qwKvhdEoH75(Y#Q#t4+5I<&B#Nt?$NT@JlQUC*DmAjA?q`SHT znJRT3BbR+v0?&P-NouKLeuTUgN_k$cFd)K585zUN+}wkJ|$Zr~zq3E&`l6&-kutWsJqnsT;Z4 zox2k<3q2sU9$anBvOrT zSrWb`TSCdH8)}ph!Q6P9BBw}pFR`c)2wF^#)a)uC2DbjV;t+hfxVby-$~)+X?D8St zL*76arCy8Me~r_T6KMXK&Qtni_7$p!Ml&Rk+VjIInff{YBJ|k}zL&K-hN4V^F*G+F z6sjp|6;_-0qWS&?huf)j~R#?K^vyZ%lQvjZjAYiBczdS&_W6Klb>0LlIped z>8~3<8U|Kfvq{!(Zke<4bje(*hMYCGAu_R)LlK2JI6$OCPIO?WL1DN8rehR7&|kYBWqvd!pq0%RM4E^{&)4r#&E;>#Cda; z#eCwoLF?w|%`!S2w~#s2?>gF_VKFtDk#v(Y`Jc*j)fQC`n0Fcjd55X^&>nj5gUOzx z$=}~$E!vX|Qr~6;f0Y%}TvSAaKjT~EmQst1bS0Qxcjc|)HkThLfq?dscA4(&idT3f zSLC>9imI=E^W7qb_wQTih4<&>GpH7yZ&Ni6-%5!!433!z_{+!bECz>)ZH(G8=J=ya zQ%*TeQY+*3ROp}fMw-vlp<{HJG1mmN@}dd6<4_d?cbbCu36QE$m66TV*nU6qMqf?t z>Wdp104%ziDFZNKFd~Bwui(11p&zTFpFqg$vli4IeAx&lvQo>ST{{xK^RESVY{C}0 TAO39h=#iqV>f2Ij<1haQx}=q) literal 0 HcmV?d00001 diff --git a/website/docs/assets/unreal_openpype_tools_load.png b/website/docs/assets/unreal_openpype_tools_load.png new file mode 100644 index 0000000000000000000000000000000000000000..4909feac3b26adad018829cf4a26d07d9ab324aa GIT binary patch literal 27465 zcmZs@Wl&sQ&@G$e?(XgmK?Zks?(n>C-LLAa z`(qep>eSi$^zQDx*6Q99t|%{o^ac0JhYuf+q$EX^KYaN36Y~EJ91P^IkEPu+ zNgGm<>72`k$GVr=+}fmf*l9^#3tB=)lIEIR;7^`gPKSV&&+ffZ+ff*--kMsdmA#2| zFj9h1$tKaPX+oTzq}*AId8#KRe?_N&!xIHRDK|LK%kk1dZRV#h0x?$7;C`2g*GJtS zmw4K}cD(WLwJpHy)9x3hAHMjxtaF)X``mKfwLiVOEwt~gFQjE<5hjU}#!F_{@6Yqb zOOm5gVJGBZgo|!Li=jKef1e{6AWx1Sj`3A$GF+6@A%GN~8*-VvqI3JpN(W!c5(RpA zFMQZ30-#k<5^_O2XLTG&%=^1gJap#!{iy$c_uIE#Z(SV4X@C*6>~mn=$J{a9^X7?@#hW)_3T1=VkcJW2r>R-FeZH|ai3^+oLSQXfrqGx_R=g;_Y~A7?z2BUSZMHEn<*>eSdVL zy6sf@P|g>?OC88|u@LI{W+mxnqhq&#r*27=d;ybcMM*I)3~2> zhG(13=BVqcjV2AKBRAX&@gMUy7y?t)7o7yB10jYjC6GwYS!msRc6>y6{^_xs{pzE2 znzYpMQd7`krgz|X5-X#nOKX~^!7Uxgwl*zLpz`gnKJ~;~zYiD;cuTi5eE4sDAh!YZ zPMsvr_?}CQa1cJvv1n?4MNh@SuBquI4(W$ zcA7HWWzbg8dhdl24TqDL#oy-qmqXO0*nm6TlIwk7Bgnv~ZATfPbAwmo@?bySea5t$ z6F$0|r_kZ)D+54i)~iS~y_SRx`J(qp(i09~uO#+;R7+^Raoz%YADasG&YWwi$97`u zVvTuk$TVVPDjCd=AE$`-^%XpN(_#Id|Beeuv}V1p7f%eX&>fZ7Ab?lH%D>CRAgek= z5x49l`7|UAw5v0b$kA4x6b_G8yF>7>{B}xbdLF%J1wgxgRI&QcO8WSG0lp7-p0Nmi z;(wD0h6+Hn6)KNMqP>5whx2r|`DqiaLb9CR4*rg8O}w$D;CY_$HvrKgfwj}8M7H1e zc9|{1^(!Q$#0a{<;oNdlsl9z22jYTp2uiKg>MwtLQ&ZLzxwH`e2RtRX^ToUoB zKTi1Ow_aDinI6cO)0yjDV9fhCOmsLt#dn@zP(o6JWk0hA;q`3nTOG@N=36Db_rqIp zf`)|M3(xRlW(u%wl=`1I=d1GD0(iS=UGZ(`0AFAH zs3SY>lBeGi1-?Dp%G@Q*jO>i^j4a%Yh_+rwKWJ<71IMB~lo%o3tZUP!=VW`Gp6^*hGN9m)&4^r7lxK?kN4pK_^NhkO1N z_=l&fisv}QR*lBR8-R&XJ~b>RwyWj9D!0VP5LgxN9t~e&Lh)X0lm0? znVJeN@!p}0e^PkWS|OGvZS!h; z>q0b96u6cNJLi4tDaErdlZ+K`FmfN?H7L>&-ooVUc6BN4$>mEda~TN=n+eiBf~t1M zay{F^n<*x9?E6Mu>ee({s~{(z7R5YfpMgSrf?VIE9gfIDt9U!q`6yGL@*d)__Ue6^ zjU%GiabbAySUqh8fljMCTMC+Z>>P%!G(DZ_b!8g{)O=qr5)KT8nr6_NZU@CiWDt~X z$t&`9uG#MThe&qF(_?*gKS#$yT-C@0YTYV`GtPu;Mci2sqO81Zv#pVOCiM0lcOs>&FWkJ%BHH=yli$XJCy}%w>#8_1w`xv$ zn5yXE8hybUDGEMYz@BLdfvRG5LcQbG-ABizU2Q4yn2%c}dc>_} z81#g}VFNuVaWsT1RgpI9NZox`?A0qph)aTe2$HjjEZk_bId~-KruBg&BFJ@(_?o8MYs|e>BCDib*Ym0WdDH6k>4b?{`^J}8wRH7 zmXoBRFYFfi?P9%6bp67_Io&_6b+6lQG3owhh&(ixJrY{=x`pnK#%l_kxG|=HHRaS1 zZ5^~kmHU`EUlB5$a#W#A-fF=ud1mmk{5KhZ(iE{x-nlC?EMd2(PS|l%?|4>PVYIBu z*m4k|%-_&3x^C~Wq>l{bn-}xdTUzsdGkoI%_qg#MZkH!rjnp+rrJMy{h&CNP@{EW; z4GH*1hnSTnF%UJjuU1)UOECX@tRP44msWT1UU6$}$Z(j4aae4sjxZj>Ij0vJ z#EtoUZg&>?wnFr15hIw2#|%sM1&s(sXk2JbX!yJ09w}*WX&w^nfJpcBcGSO+^9*Jq zJZi&KGnS#Z?K587WYZrn7JnC%jp~d>?%u$cuQyL4Z#R{1Di7kx1TiziUa!w6No!|k zNdVs+3~&srZ;Q8B1jOWCV1NKXxgA{L_y(nr6fW~B{t(ZaMQ!ltR^|IrZC_x*UX>NMgX60Qxk%^jy|tqv!-_yl$fK8JxBgK6wqdhu@`i z+-S%Ho;sejouo6O++Ul_tquk`t$O-Fr%5Wq#YYNsR`V4M7#NAWvfTtbLOH`Ey2-_^ zR67@2%G*Q@lQ4#GH6Du1##1(R61CO819RAYK?$6Tlu`EUEy1^0QW>6u*hy|-zk~Q5 z$9Uh~w!(>m$z=M;$WQMxeom7t3Nn?eiu2E~7 zC$r<#>blXhHP-9YY;O}kF3y^A-8hHCUOX|dWQJ<@FurzIPFMC6FR3|kMX)!O5mHT1 zj9(+HZJYuRp`kP!v?T*#m%qc9@wrJ3xqzsXM(x6$AIqFc%C1ee}w z)>#ZSrp?FghR#A4{f6$JtZ7qTb{0Pm&^5%=wV^{?oAq~h#gX_Uhpy^n3M+;kA@|{E zmvNRwSkPgu;}3NL3ohlduOE*##dsgJv@-u-VqwI0J?zJ%c6fA|2%S%blp?>er|(b6 z<6$Ae+ley7V{amPlq!rRa}C6zbQIm^>JY|J5mp~u-EQzFY%riSyS|v=Q!7l2gm3T^ zo?UA*Ux*rU{W^lVui0wH)hZITp#=u*!wgV(!+Ym{!K|rUoIRF1WgEA(C$#k-vp}ml zz5$CtNzOas{eiidM%_8hTPfJPpjHFjPA7;OAu8}GwpaP(w{Dh>jMH+n5HiG`q`>+d zJUAS;ec8>jr4tlgJo77n<UkGuY1QGES+i1K!>z%ea~YltX}Vn|;5WNIN5wc$)3S?FFE zLd(tYBMD~w6Lolu9*T}bNEkme&NyBWr|(_3gsMK_CF$mn|6jtTBLpXb3k(5O#x@f- zIiBHOe#R|__;at}M}b8ZLL42JyIC@?VOA{fqUX%g^w-nNc6df~LTUpK2bG6vx=Lz+ zpAQYm0{1t58uNZSG_enP8#AW9juH=>L`G%Y{ThUTJAs<~RdzX@li)7M<_Omg5H1*xW?U^8?1^*# zmGC6<4gAvy6pfwX8csS@6yBIkc{y?VASRU(s%r+>bmYI5nWU{XbnNkr@Q6;crmt&_ zdk=)L)R~oww8c3zHUx2sCb`#E3h6_Em3sPB17`frp4h`#%hp>cpwl+T_AwXu&T5^w1{Dg+Q3A6`BL;DkbTP8MC zJsb^Bz1e{tkue%g{uRpUbMjBuawT}fZNqPz4hPYB-{f=MrJwTabENB$-DiUnRUDIxB4+^VP?+!uNzC8ii@R|&pGujR^!34v93+ESegNf(> zJRAa#!B9uvl$a|R`k4E2-55AHNGX-n$24T5-79q*-08Phz>r zC&MZNvWnG*hRWm`papHAOm_bPR@jw(u<#Vwg&CW)zp0aCU}`?p%ho5N4p8@(*qlRs z^KP3$og+nfqunyp$7fsw7Yk-(o9xsaJ@Fk6Dgg*IPlSp)a@YKZ9_6NqId0O#>KW}f zmUtaVJ+}UBP?AEWMCdk%y|HQly>$})vH&28hXqTr4KFYNwKZTIG8_1ZijC9fc}8nG z^WQqKhw?wbh*7BCs7mu}%*ieZlrXywl?w_}3~J9bM;G1^0-;)zOI|Sh z8D(oY|LlY_Mcj!l)ra4ByacAhj&oapBx~Pk)?vttk-J(etOxQC&hAck*5Nlb`Jmpu z0hhYqmYFcmaugoup0m z=o9bG^QnY&EnEU`LY!+5doShA6(wQ@s^fNKu+=h9b%&y+wTF&gu_C;1@MsumZaY`1 z8U&?D6+z1iN9a}teGz~(_tUk+Wk=xcAh~?&A7kIm-)ybI!a~vOD9Bo)4-lT?a*R~N zcovKtuU}>AjHI0{YL2gu_9UyG4$Me)7EL=cp=zdxM{&pCLP3m)E5ivD&jRYQRZ z-pj9Wbxsa(aWHo8+W*Hlz_7J1JE-29C1M`1T5a)og^#kNytP-W{>hil;r~r&)S;;{ z9m}4F*aYsUcYv5~($(X~R zXG3Gb!LSgXxA*uG@yi2QTs}b*R)?n%@s(YE!ZDp{ExB_$xJ^&A00*e67w*WfR zFdCBQNPiT(z2Ft{tdn-t`6T0_!2U4$c7jFCAxw>6U(!Hb_Lz24F7en<$AG(nHSwJy!V`D1;-t%Ns&G~Xv&~k_ za-x}k<(OY*wO$9G3MMypN;LMY*B%yX4t|}ka{xP<5Ok>Xg>fwF=%&T>@AQhg&rC7hWcrM}Lm%jEM{IId&@9_`)Up*Q&r9|3K)xxnu=rcvGQEe6;^b-j*J8 zv-{O?7$dRJgtkCH(~>vJ)#TjieoV=>Y0a^K*a9x&4+qYT#|LAe$f!_%jK+c}9teIf z1M%^;mB*ABMv7b?D}C6MV@fKR6m*MWxa)VB*kcT2mq} zz}X3|@1U}k3W#IKlCM+4$#@@PG$77wX5Bu3m#ga`9PJQPQ~%-77gV?A9PWK1x`o~O zT=J)(epTMtCQS)pTy#A~`kzWD3p5=_m5v-CJ6{r&r`61JXmOHB|GTNW?<5r*e!GkA z?<}@s|Im}kP+VptDUs!Y2kNNc|bjwhaEXiUPu{XU$|w_IPIvb zr&+7>vuZm0edXhwGM18ra!PAzXZ5B_gLU@XwNi9Tvs1UYp+7xZ$%hx2j&C3B%`+;h zZ`pkWwKR7rfT3eftTN4&olOt&!u6L zM5G?gfc=0r9#+Z@bE(=#%6WWd^-hF#fbv!FR_W9ah8wEd=hZ;Gne855X2Yv`= z#i}LE;&&ds-M!tzctTF5J6Q$_bO{ti>fS+w`$s&7pUC`?z47-w!o4|xBB2D&*Iby} zG|nV;STGI^7fEu1gRAoL*nu^_cF?)Wm4_OJh`n3QCDZr`lUj$-!#QH0zy}Y?W)u9p zj=ZG$gAdJ)>+LI{Brzy!5L~}yPl{swdrea&ymt!58vhwbAnp^{Z`aRgd9>#iyhO|7 z<&+*hLY<$MjjNh`k@WC)V-T?Sw@AqR2YWc#+&La+>!weMaSYMsNcO({XJR-l*|TN7 z=JSS!1jmc|K=6fxxc<#x&o{#kza1fghrw5t4jYY!P9}XH-=%7o=TzPfU9TAmQ<-uH zAqVG=m*@4OTarv_5lB5#YGQu&hV&J`s?@4sOcQ?bH7j|LdZ4{WkYXRP0qJR%=Gkh zWwmI6v{MBoY2uW@bOEW{$1CkAIC$a-Yxx(d&Bo|4WeayKU1M(ZVx-?tHCb zA5CJjn*Yu7z^8Hl?ey9yPBdEd8-5d$!k1k++GiRUZ`Lot#9E=uvWs&fg@`>XRm;iU zPRKg9g?&APdWls7Caa8OpT=Fv(4`<-h8l7hT4yHZdL#0lAdKTho=7J{b7yQ8bnctWLN3Gpu*;ZmRi@DmsR7rxv?9=Kyo_poD zac31H#8b>{t6Ut9(v4555|!#~qpLVBCKRP<8%(V0)@v+Y?n;bOneGtx^A9IR(ISn2 z?+gQ6N-*nsbyo*GP_vEmZApN}Z@cuZI-}aYNP7MC9$=-wkj=HHGjf*g>6uKve(DYZ zxFgxGLkpHCgZJa*?@M4da=hy_(vLJ_>NtkGp{FM`!>zd6DbT2wk0eqo<-DVPWs4r{ zC0DE8lPh}`B=X0~E)}#KPNx$>^IZ*j8*{OqVT=BP9%XWCYUFcA;SpU%xLRa~>py`? zT1S{O?}LX+|3KpgR?7AHcL6oIGafdcSOAUl->D_OmRDIajn3S-6_u|l`__1#D$-Bk zimCxC)3>G{TWS)p6>7Hn4z;k`19G%wb}>w0E+n?XxOHQ_| z(?m0K0eARE9a;aF zTM}(l+eu4Fp9Zsf6SQue9O`*i<^HMTgyoF=RY%IjCHo>b*IsR9s9Ys92LzHXc$F2XYbONF;V+*5 z_(S2+#l0gBivK+s+6Jle8o3x_Yr&y^ZHeG%nuPl+{Z#>@;GY@vebcNDN8`h+zTMo9 zG>OzNB}fmNI+_H_B2?MU|zkfkPyHyYwq>87?Rm&Ds6f z5XH*xr9ZaGrs92pl(sF0ZS81KPOS;(v9<5;q zx>zj$kvU2PgHS7n#5d_`DifqD-l-9f(f?zFhU5b#_4z&j_2CncakE@=l zTdD{ECOR_UP_AixeDPRG2foqNyQg1kLoW5bl)Eks@!yG%gQds&2r$&%Vv~CnA9+O& zZgWoWBaHc0z3^(;mSDQTfRJdVdVPJb14rbG-tyWa9G2R07~NGEuZ0U0!ydhHLdvL5 zL5$7gQf=`LH_=**Zd10Y&Y(Ix&bp+ zT_F&NkeJKGd>{8k`NF86%hp^Ngd6!yU_QcNKHnKnCRwa;AT*4kri(V;k#?VI-@@9# zDUUlXsvw8X7VeJ5cj(dKktJGZg(i|Bx84$`u)Y^Ifpf2k8CqdJ_zy#ODue%N|D0;@ zfz|u%RP2W55VAohf46=1Czw>*%sgQoj0xKz4NIz?SueEFO>7(?d4MY>%vhVJMUg9z zA@}ZGp5I>)qVkYGEnI|g=WJnzv7r6s-G|38jYNWQQ)h8HKtNUTr6{*BfsIzy5o2#> zJxL!4n#_VB##?Yw`-_kdT+Fg~gAWzD9Wqa4Q-Sw2RJ!F!diB-sSfrH)ms1783#=xN z`WBa!8?IzR3abeTc0Hjr*AQRzHoD&xy+$)*~T(s7Oo;)y_m`uCFslvV7KvAOKH=# z<`%l=&ELk1U6vt7?#TyNzZ8sSMPs!U>$=X+^&st~SP3}zG+8^9e{ll0UQRKiJhKhy ztv^!tqUA<8O!&;6A{{A>wEoC5YWXNab9|PRp!?L6K&`W zpAzvXA9KgMGYC$f6K7xt9eW1Lw5IX;#K&=87rVt5JH;~yd>J9|zqWB}nnb9??zxU- z6?C61E2sy-lBtqj-acBo)u?%mXQ?KoefS$BY7~|hIUGHv)3%w%;&#WWMTm3db(a+C zJqeF;M&q?t4~vh-KrWx zG#!FsCHg6?jsk_!jvT%Qwg*;7CLWf=<0G5?cVrI4)$1OS!Z874okTusX7>8lc9c2G z#NoI^62~P-8boEE04`~JyI9kjE4Tr%NIeey9cvW6G^7kP=%tWna*D&nd^aeOh1ozq z4aOR4VhnO!(ADD87dRe_-W1ETwLGe-9knHo@1w zbL{dwoci1cDQO9CMJSxLQUa(c`zL=Yd1Fs~1}8^Zkdk(Rc7Mn`%#=jh6wgCVV_jck zsoJ8Oesqarp*IjyMgk!v^fBo>@h%2}UB*AT9Kewx9K=TDwHhBKLd?j?pY)?W>NPyxMiWb)pRon(B@_p2jZ~ZG7Z(?ZqBU>ZF=Wo~4@}+QGW1 ztM34wkK01GvCTStO`Va6CfmT(I+JlvFD}bH@6+YiZm(~jx4zO3paM!Qp~L#r7Dnrpg7o_cS4(X zuv8Dc<^~V=IX-k8^0gdfXiGyt!FFU5ltDQJ__h-83~S+SX|#D1zWlDU+MdGQ8tG3* zb#V&t*VCfCkf9P}!=UMvji-z3jaV}RqR4n^eA~e{UbOi;WTMwYc^Dbz*fP{-ZONI;qrDb{-A7j56B5PH{wvht>=uLFw+onEZgE3FmC0UyldJ9^yP6a zqtS7Hx?K1gnbbR#0<>tv?m0C8!Z>6=CkVFSF<(JqjBU z{N@lQKk=73nJSF2{N83}p7swG?|oBtH{P5asjJoPblLpCaT;0lxs;pOE1opt0NvCU z`+HY}=70iBJw}6-(*+9DB6Fp7bqMt=2d~K?eV>Xl4;4JhKYMXR{KOK+Jv3UJEJYx^ zuN=p2+`H%q`>vC-L=7@}?+Ut@Bc;eLqcTp2Z=GUFHXd?vZqf#b2bInK0?(P;s%c@6 zul}No2^}TAjk3WPy8$vfu7|!MFTxFxAr0s;-WOk05@gT;+OD@E-ufW-4(EX4ggy0t zhxH_bF;7y|@yA>k{NG~Pspf4wonF)LUb$5*KgF;Ob?xPb0j1eQH+5QCg6NrW37LSS zOLLg6g8{B1Jx<7J%jNv8r#Tf18;>n-MPL4_)d!fm;<1k>F`Rj8cntI(fLzD8FKF~x zqOhMOu@^IN^k!kwzQC1##cWf;A~rDJ-(=Y4+jhCb{}Wc=gj)SLS;&1!d_36mge-yz z;yaRFR)1%7YREH|rt9);^;l(iT0XD_W*hMmGQB4fRDUTR_4=`w!i#W;XY_O=p|(;#E&!*0q%PJ5z7}9FD-$p z_+R`7#X4^zro=+o!%`DJxNhf>kf2orh#&t?ajH8qgmF_^Wjih@)B~&mo!R!55lZ22 z-x7|%p^+gG4YGt1tdb;|W8}ylMA@}w^86=Y68TZX_y;5TWf^jOot*RtiG9ioCE~D) zz)QHcKA~ruzLwu(nyIB_iStS{>pM=2^1m7$b1eH0N)zm58?f7EFs6@lEuhF~x7cQK zVNgcF4vYQySlj26;Mg@Ghl~1|xhzo|-i0_Y=mu+tfa9-V6-ZigKV^dc=FOB!~C&J=YXL% z<{4TO?C;Ov{QJ2}$^p%*cAFkOYpO6G%_$yF@aS@M#W6k?G; zr3tG7jMmddzCL&gu-4aCuvI&mLKV$HpbzRHpfrTT1R=9f+GzluC{Ky48m>Ud?d+G0 z#9>I8cgrk&*P^e;JZZ|liJyoXzqp!H{a1%Rz_z9%pw?7~}aRAvoS)u{MQ zbUuieJ~R%?0ws2N6X8f=-?-MyQ*^XKWSP{v6+9n;Zxr$`aI(1J;J@@9{9r5T7DuiI zq#6)8ED2G{Jet;vyYv2OSWHjPkd%UjJml2xrH>x|&1=5*FBXXj8g27!D#?}51WRHy z`l_sV3&01XfmYwfpC4M!cz)m2=WpH*l+}V>sYp36DE*%8tov2J|9 zlJtUIykY$st7C(9UByV zXla;8iQTvq^Z?Z-AyhJEFEbn5j_RtKb-1fMT=&3DbyOTfI#u5x$$JlwN>XV#0D}GW zJd`YGpH6LkddNB)7MxXs9@-jG($9DltwLAU-R}4bber0EZ{s1&KfqR_d0>|J$C1)p{1-NwwKB(tmIOGCNsrMkgnQ?`q*JW1@dv0_ZU!q7QdoQH-~SHI#`W z;@aU%;DlmTrZhU0orYoJ$(^bZvfw#bW=){2Gn@aS?ioM}oNxI|X^&|v9G*xRdrbFa zR-cT(zR+CdG%yc3s~)f_-xT!NacW3G@!6sL8+*oJdKaswTt7RnTsU$HX#ia^eCh(d zyyV|f5j_-KUM%0{>JZVJE?|N|+`HpeqU~3Z2UxfraMjhXmq>(Axwokk=Gzl)2VH&s zM}N6Gws=kCZo`|{?nAHud`3~+KY3eaG&eVcZ(Oiwk-At%w8-%+y<;~2O!U_bjeucz zvj1Ec$j0$=zqoV2rGYTaKVpu~9X9W<%_)VRsjJ%MpTrm~}{g2{IHEitO zUdbt%@ihBCWBEYJ`?!c*b{IoTQKvcirskD(B`Z~J}zId>B(GUkegN)N#FP?}9 zknbj6P=HF#VkbJ;4qk#(5k9aZqqkT6^2p)6NB(4VKNryu7?Gu&8}I0mHjeza=>iC^ zdC;8rzcQOwE-O0XDEuZD8^jTD_=qMom5Sa%5LE!!2GRZkypn|Y(RlW zRrz98H_d8*>V&{!k0Yv|5G9!$T|zj>heVk6Jiiem4>|l{hF7jyx&?t$^<_vU#G1y9 zw2b7kRs(6tv08y(62VAqTydBuQ!0^Zhy)qO1?N%?$H?|%mJi4c;_j`HCC5FUvSr{| zZz$HSYD*d*!toeJdhe=hDyd}>38De|AfY+7gOXab4Z6RUvzpMC2zb%=qW{qyp|sjs zE4P!|X1)^La`_ku1J85jDiezkidv(OO1`k831FC~hjawoYPEfY7s)^e++v9p#?hJ} zwum>C4_0NIvKi>=ILNy zm`5kZ<`UqZrZGJyudZ+lwz}7hvG9oAn}tFlVk&+!jQ;bWg!&T(-r8%)qPNt;K3jf$ z=7O$3luZYS!2v|-juzJhSG??jiK0#Xja>rW6jkbuXOm)OJ6Ybnl~f%ZBXESd4mkB# zbowb=7KtCn<;oE4KydoKS3Oc`+vf>~@OlB7*!GwbU4*x_@_3<|g;0WUK;-VtjHaam zF9gSx*HvLjVERlG_3bzm3}@+;IJ9%vQX=%NEKdfR`;gbRI1C5 zpFYcoF1*XSQqRMSadCWL$oq0$-#dgr@Bx*)=}okI2iayZv@owj)Aw)3bQ2hujaTJ8 zu1X_CUlR%IFMADVbeBw~K~-$n8#}yBOU8^@w_QTkkV?+6CUep%4zLbjhj)%+rA&Ot z=6{Gqqeo~OWxN1>oVW{tu=P!r7SS#H_l}+?#?~C`MNXA*u01`|9{aS+Ms(qzMm#`- zieAEM$Dxe^5W$UhWhPl;$udg^*36 zFZW=IQ8a{)9@3*qh|iCM5>Ae8_i5K4N0_s8lSU_KRIbX*C1489x6A~7syRZNJ|00> zk6I=&jzd&e1cO{Pvy|Lp@eh%M%}oK4j}ZkfegBU%LXzaZs#s-GhflxKHT!$8G;F-` zR&Q86(3l*YmAJHiag8+^E9QUAv0(h>YWunf5KJ#v*kVHqsKH zoG6azw1XnOW969Tur;&}JO7-ET1)b#AFJFeWt}^Ivg`Nkq@)Q`XwG4QT&)X$AdeU< z&FI_FRV&>gUrf}k)T}I;^a+oYGiqJ0VVcT+qk@xkQ?N}#_k(NU@E+ZbE%X=rmj66C zh%-|(i~Wh@V2l5m&L~l8E_IF3^9ln7)q!*ep!Q}x>+d9yz1txv1uXXZA{4pp*5`rI z|LKI)_WNK+J%6+%2a|Y~S#Xrn++BT$`KLf={$*+Se9>=GrLeDpi!mxPF@s77H!OXR z2hOOVL5;PoZW52$^e44?3Aw>Q{+$G&ExM18YD1#bI~$|w6tbVf`-qQUZ9WaRE8zq% zy;p{gmf&gqtkZh1$0NhAQ+>MxZOxLrM`y-fZ5a?(JL0}bsSWdQ-fk&x=PyU_-pQU! z=`Va7EcmJ5YmHC4*O0;jxFAJel`9-_+^E&%m(xE&PB=zwoB$&KhqP5Nv8g*t=jUEj z=hu{UKlv?O9dUYaV`71dCv+~458(5GR7}dM3IVFC_&3|e*`%tazAw%EP?D4?h76Ry zqE@T14g{Zx-kJ;)t=0803iX3f(oEd?Po(aru12;eCw}x_8&b6l1`X)3Qc%WD;?j~b6R%}GcoQ2u{5%} z7!G?`mEU0&8x$6H>aR_`#Dat&4*U%guJrF?Qx{mEcdkh#)6KYgT1H#>?8-UjMt0^RhG|Jh zmu=dHEu54D69(ZLA`zB+|21nP(R_Fs`KyG(XCp|M=g%pH$EcMfwFK3?;`^zBLl~0i zZ|lJy#>%z?5^KPeDh+A*H<#t>^qEe`)g?7jwFke8jsV4-?JVC2Yj@aMg;Mj^+D{CA z7Sr){o5eLts9u7&<;ls%vEy9cm&!2}1@BEw8rp2_HQ=g|5F07Zb&qKE86mJf?^8(Z zU{4Xp8S+nzZ1p}j=saVQc!dLc;*xsXvi*HbTb5cLhTzo^KH4n+m)>j zV`HGh{~lFX8fp0gsrcwXRg^l^;9KQrG>_Wujo&37q%Ap)^K5`aDa*@CIgh`l8*hqN z+!Qkgn3TC|qOVtcfwTYF^lxBJtkndU*nrn`a3dGNc}R^|U}yFDEcZOY)%%|5}VZ(VUQn#UGqG4q7KeOtWnpo`HJOFjJ%H-Z&57lhnacnA>?Pb zMYl{~TNcCXJw^fVF&goMw4bYBHH_y}+1IBihtS|6$kh&wPKMVCBDG zWlz2NG;~9S_+Ze#d*?XkYxqy0trk?iYX`Dvtypug00Oh-G=TBwd}a;CF`mI>fx%2anlOvZOTiLW9Pk zl7P-IVHB>{p~pS>g($tUCo zVY+Z(P9vP?J_S`d4d=-A0Gbva>bR8SF+@oFDQZo?`u1~Z?UO#pda?e5Q7WWDl4zyQ zj_mq9a&A!ff>X-!hIO%>ae%W#9fE4<#rx$3DN9mntx_)g46qLoVR9wV-!*Nq$WPhn z9pfuKhE65QwA4-T(xY==p(L}}(Q5qusFO2PE_V=Ty`OQEhe36U2|!&Z3Lh{7Q5IYPApuQ#WTi-!x;;f#Icy(vE>3H(o)Fl+-!U#xrMO|8$3rq(Rwg$ zM2X)?(06PcA^sN)Q8%+2n~%%tAdM-cKXHU(OgK}X=ql$4>o}9dIe){BZ%XL=GRNCQ zfVW6)iT;aNDO`bC%x6;or}P~i*^XPXaXu54s{xuBKrg(9dY{ndIG7zLL5lW^NkV`N z?EKybVIxPrKK|h)b?I33T03J!Q~hY~>ajtw zp`G>C(4p6U86C$vx2DR-RjG38_aJ~7$!?&`|A^w-`s|i+%$P!J<6#Cl11Qk)UqNGo z{>CWG2$57&|$X~ep#^LH_ia=E*+urtJ1l( zYX?2|WZ&mH9stdUUt)O)Y{C=2O$gjsL(cc?S+A?gbKT53_NSRAtITI>Q&LgN{H21` zz=vag>Lz&}CTjX|8X_&15}jlym&iU%yrQV6rC+PzS~R}q?><)B6d9p-TU92`Yt3tR z9p-$;g8`n*t#?)+ypB7@2)FAWC(w~G_pNvZ)x%vsMh>s8&Goe3bg_7WZ@{22+pqsi zn+|>aywAEoColP^L*rJywbmPc6O%lC)}e=W3~;ZPkm~8}vadqgii=*;ZTB)_y`Db5 zrZ3I%nR_iH@qs_eJj>2;kq`3s+WODmCTLi zcv&iy%32nB86q`*ZlY0DJ7>GhX{I1`lQJM?NkWT)IHHUuu_nTq@No1YS~$=BkEXZH zBH&*_KgPwh<46=$765oi;x;9al{HV_9@43ElLp#*MXTm_ezOBUXfviy z(Cv-!Dz)1iI5j{J_)yn+yD@z$3#1HoT*Vi!i6fiKlzUkZ(=4qT`@|FHtcTofCKn2Q zvEm zKvf>~qprInGus^rZxDu5+6S^I98D?* zm*k%446ChJ>X169es~M#DV4!vr(Mdfdd!H1 zybrb5I{L>0R$+Ms=_%WD!u^41j<-zsq=L(Jn#iLg>An!8`eg-YSQ1|jTA(|LRFHF$ z#CYH&rDh9j?5lrt z_fRsw4%)yM%9HCO{m)qm$jOk}5FEmFjavl+RYrnX1OQ$va3E`u6GUO%^dr4>_e-cu zJup&$=}QVePH*|Zypn;t%fo(E$FeWWf+khJBK%D2zBIi>OdL$k=LG!ZzGAeqe*u}r z=#=F___qObLcA6#+8Nj4biE@>zpA8ADs@a{U?LNO8DkG3&qI8E7Km=mT}%?( zCHd$8DzT0T8M517vy?0FgGu;r!Y?9&Dq}-W?~_4xE8w1XBKoJ1_GPo!7vRds_DUsQ zjnBv?7Q6xcKzRd@KrIyEH|xqoWMo3^Sj;kdct}>}Dy|Lo_L##WT8K>L8|+(Xs#Y>% z&#R=vLjuO`b3kFTtGH$%0B_hkF=roOqYgwpv?P?_3)rKo| z%~>AON(X6{?OXM)oS%-r*lU0420573XLy}9Bt_56n2!Cfd%cLy+5ac`>C39xerheP z9(39AuRjDpMFJOfow!8c<1>p}TNvElrOYZ5{qQM<^@X%ni}3o0P$lj zny|&G&wq&ZU(|0(x|;gEWmp5@aWa02+0jIvmN;_g(O~{-T&b&`>f8z=23EmZ!$t}m zR{`QJ%qlc^WQX*BXcKs%`DL%}kaPC^2Ezjd#XE6-l7l(t6 z3uO#&S4CIsK;Dtf;D+hFx`k5?O?v62Wb^m|Up;LrV!(Lc)eqUZIoCBguN@f}Fm84G z;+e9^V40|%aS>_HRA`NZitR7Vd6?JzU&9z$(EK4M%psIICfPYuq(X(=0ObXKjOyyXv4vb3tN5F(=e!vF#6}i9}>`v zq8hyBlpp4rt>>|bjWB5S*)?!;53}@qHqazn3}!tte#U`Pc3dC_^Wd-MQp{yawtU>|r7o_D+b=8h2wX zU)U6v@yeq_RBHJ*=AKd{qLW+1c=!sNt&FFF;UpUPLsP3wTP{E5S1!C}dZ7Jw+V_&B ze+|>Gss^Q!02bxa1ZYTPSa4HAkE<}>l?P*SxWSu}7VP(Ec3j;A5D1O}Qx%2hxC5QF z{kpdJqaKB|>;m?7;^S5#{s$VJgqL&1gs_43;}k`?yOJ(J=I^2umh({XCG8uibQ~>x zpb@j6TbWy&@^pbFoAz#J;==*UI>pHlz0#9cLy1X_uMiLP*!Dj`%{r8BQ0SAAF`f^L^g2Z2gqDam9vs!XSjpE&C3=C{IcH~2V zv_r%_jvk{gO&!7B=FcF-cdVzSOE@kM_NzHzP2c&9>iuWbLPU0yDp5Ia zI8ym?_3+89Mbb84%MZz26BEPJ5mRJElC5^1$TuQQB*<3@TPiC*#Qq@_dVsII5!=+A z?;nCyMR83(s>27x-*h|;v+DoqLXw%z^fOORNjXdVM=Hx-x$`*xuRb~Q?y#^(PEKam z|Lv=joSebFKV7FqZYuemPG{#~2Tr57&P6vV3`41NFY7~pCViv0sisDjK8ZjZsu%p3 zLOmggd>vy*YI$pS{8GTJy$4km%T7{K>0r&#Rsd8As8lN zS~p~Tn(0FhCpOGZ=6_f3aLTQp9=kq6O3F2M_?&vrGs5V*87{gy+jxq*wE7~A9_m0B z*Yi7fl?Wk*Rz7q8AK1CY*?i5Eq6@KK74-n%h3qFrl&uw2)Jqb^N|YcO*fFu{epd^j z^2@vGJDJKD^vvOBSweSR{hm|1KJX_I9eHkDPzR)2A*sCm#^;PZc_Jqi1l`$M?YAzx3-*6bVS9O^P%0&>OR#0;gZjpKziKg5#|Efs{^U%y>elpZ#`Ilh(_+ zI#3>L6Shs3UWVW4Gm9TVuA{d-NY_$*zimK(D~#p-clsbl-`l(^#kIq?jo8~ChQ_lE zO=&!Eli`)Fr%wDFN5#K3O7cRbe|(_-=KeMiAF->nPXfp`So;QUO0F?MHUCl2k0pTA zjRt}e#pBDkp??3n2THe1%Fs_cSeJ~y};%hwF%av&qt`tPSHeL%0;0^XTEJe94 z#%e&+$QFp(51Qt_pGJJ|F&x7wWgT9NX8LF4kUn7)E^nx%*@Wv+X3_Vdvg?8yh(lOJD<>UKYgsH}S@abgrdhrkjumZwb0{YQ9hxfn{C@(5_-1qr^@X$a@%_-Ta zIyM8(=n*}WGh3Ys4x}3lUc?JH-^XO7-f61BYCrLo+mQNE)v&2fTL8V58xTKi-<%US zcF6vd{{@1~)XQ?bM7af88hnUjmml6=U9V_>e4cG@i^(|NcLlBkUf*FZD9pKED{ub6 zmQA*sJDq(&LW3TG|BH!m7MjZ6e4UAo0r*`F5YY`jUjicBt07tRCQ?*n~k`HGdi)#2q0qvA*O5D z!uFirL(-eUPcz27)HO@~*Tf_*5R8N4#i(e%i^cJtzh0N+3kwgG*OvQO;u61sp_`Ku z$)6GjPFPz;zhBbhi;2fBF;Vqft?2%p@@B;&+3=B~zOD8-zTjB+Y?=Rlu-Vm6kL25h zO51gorAMjc?|WtU_D!Gl zljl0zr6{qF@ga6EphaiDyS?R@4SVXWRPqW+xo41^s*}kX4ZMI--edQi4t-r1cJn;B zZnzb>^ziiYYff|TVy*yoxQ|%^M=hQ|7o6FTiuSa{FR&+>xF0Rz{_xLc)`IbC z>=nT@jV4<8J3FLy;F`DxE-8SMUxmOsg#+GY0cafiQl`N?ka|Sr)PE8?wK4m1HZ4_G z_v)vP=BjL~0oV zV6B^(G>=4|9!kPQ)XmWpYoCO64O^eKu;3)mKQs2A<&nlmK&$1VNYv1nbFL=z=vQC5!1H8z}zTC+Ntd;0_A7tRm_+c*u zkA@`daPa=BPkPY$rCvD93Dft?y!c|D+E9nx#&V-!FTsEUH~sWPPV;EP#nMA|+f(I3 z@T`l~!b;x$kmtPv>{$Vjsq;H^PS0&}EVS|Z1@qE^R2L44*dabRr<-!^o|Rhr3el7e zdJ#my`kzRc(#hA4Zc;l1*q@oYcu&!4B< z<9BzHosL>o4pFZvMe;d*+GHEyR}$PyvC_U@rG5=D{ZZXEH*^FH$b{0}?m}>^=qv_^ zmcrPP5`NPJr-=i5jq!f)S6kaclDSP6+U~0I8tHH=|LG@f;lHEWA>jRCQ@JL<`(TpO zf?rAEjpB9S5lEYBerr)n{fM?Wy-_?J1yf#5$bVsDeJTS*?nI{MQ zOAbv%|J4~MCQ5!ZU~m%dcT4SADvu;HiP%EA->%fmvgODx3khVr#y=|I?#~}gQQy!c zsmu8e(4^Gk{&g~t2)n9!?uDRmZ^6EJyc{~y!O~gnpwI1MtX;Y0=JD|hCh^<05)88W z-`OdvfJXgs-}SWzwJA!G4o!AohSroyh-C3mSLt=zQHA9?Av^+{SFy|IZ$X zBYNE6gR{X0L2Mb_(2{EN>O6sb=qb*zloOzju`;ux`!Y%KTW!j0_d3oaOw{C?hdnfQ zP#{boiwD7Rdtlsbmbmlwjfkb6@=U4ZKmVD)f0^CTzk^$tvNuG6t~?d{Ec>goAE8;f zb6#xcVP5p4WWH{whTnN0yLV{7x@2ZE+}LQ3nhngX29pDl32o7a)S_L?oK68&g#U@+ z{3B*QNYAD(@3NN+H4W+dzmdP%;O}IQYIaroPWa_slr>S?Z`H>Ui#(sVXzt&!YCVUURLU-}xXGs=Yau6W$&N*0Jm#o^O|)d)Z$UZMEJh zQTuVr@Wq9T=S)jS-N?vDT;2ZspMzfLn*27SV*FqIw3Q;q-HO-`O=eyghVDG2{HApj zMOt4<5$@8Zw?D>8g)Gsa+4h=D`$6`%-L<}-&ad9YLZqg?({{mTX(ZY8*8qL%SMgD-#SO>XMaXzr9ZwZ z0OE!vx9+EYL~a&X?4k#jxiob#*#4X9Scg*0U}`wgQIXr=MQb_mIYq{})3~&h^>%Io zN?VkXO8}xylNns;-qG=F+0ZvfRW4lOYlze-S5-}pO!>q>%CZoC3f~XZDDTx}4;J42 z1A?wd9g{w&o+zjvcFl4#98BTJ{ihhD z26RVI8B#cvWBI5N65m53^;@W~HjAA;{JorbCq=PgtK85pv=!r#A1d-$&g}r;Fxb4v z$iWcSuw8`?F-M8Yh-pJ#ajhY&U{HEFhYbuP72-t&V}D1`9*5Mw@|IJRNsi|f&1LbS$e{NHQuq?M#e zad_H%m@p{>!Q&u~eJeG;Xu)@bcZb3BP6$&F{zms5Vaq_UxMAWp z7g|V33UE=TV)Z@tn!FS7&MM!Z5-hB!##~%qt6VZ76iJoLFf-^|Ry0cfe-%)~QFpaf zuL6i>+^j2zYvdZ6b*>?L9)Yyh%lrzT(Tqj1wOvE4mmK@3vM>MOclnQ@dUo351h0jL zh3<50(*g(NK|3GJsc-ilR+-2c(1aoWrwf}+8`MmWF_G)bnv=8P8dfI@LUgaGzN%y5 z(DM>K5#zAlVAxM0vg297j8+DO?F%#2D3SsJV3U+G`T{&vFVoZ6G>7#(B4{!U-f-w( znOjd@`zXH_qN8ePr(e=mK#|qqJ$ZvGdXVh#S!n2b zh*VnZoBL#%xQDcI&THg>NJ=RQT zF~Ae$SfQ!JP);Sn+Cw@xW%|w-=VXK5;klbKOW*4X&&=lP9D8h(-M%Au#QLC

Ae!bko@Fg!Qh}?6<2V&4sb6Yyrg>=TP5OQ(jm!_i-x(86@|IvC59uopL-h8coyn%XKztCrP^Nl=K0bhRMQHhV>NTgk_8XfqS8$%y&%U7O? zns^N~)EW2I^*H*0ZxkIpwW4M=FZ1L{2EHW_-fuoe#^eOjMA^RNsowp!j)&}69Se1A zlV!b}B-MOj?WuupIJEV|>cB~8^JwX-$0bw1$N$bo@DSJwzc7T~G~Jmd@!}Db60)QX zs1AT%qeQpiYIv1fBaM^u0=FWE&-q^0C*Di;ZOJ?{l*PPLjT9U=U(>r&o$?@AQBYQC zcDKgzN&>JIGK$H}sUGF(_$&UvJEql=kvl0&%d3AX zsE8)zBpUDWxfvgr+;)5T^5MR2Iub$a^b1)Zp~IY9QBoF%J9scECFUvew-qotIMfah z6>q`S5YLd4J#1nxM8TE;!5-XFWqJK#L{UE$nXgr6m3j z$a)-l^2ai~_h+}F;midEI^{9+0F9u*N&HR~Lyyrn%}u0JiXwDShX-l&L~;x^uOsh} z0&gVGi=khal5%%dt6yRqZ+z36DwXl}`K^nZnY2ce?R9PaG^0(;O1$Y`h=P#J57a1zVa>KHJTbwI~09ihVMVN^q99sHQeqsVWALCJ)Mx+3T|ys8t*M;Owe z5Z<}hJHybMT?2t%n$XinB5>^u_wRhtZ<&2g?396x}!oX%& ziJmB*y0QSYYXOd=?>8nL?t}yKnYgdtuziS(l!`H<8M4)Wh3dpYPliw9^!Bk$5~U|p zWYX@nK>(vGbNqGT?9Vf(Ye^4nO_#}r9dq5D%;%e&w1naq`g@M7wdh5#dLvkAr80j+ za#zU7sGN_GexWrIcY|&@5a(CuA=@;VO}5Do)G13G`aF4JOEqx3_2%533SWoTd{_HA zaexj^53|`#qp<+=zXPd=kuM~OXC}w!qbg$HpNF*I%v}`5(XJKaO3?o4^5hFg2J2y# z#V7cZjIm%yvZzeDVKSl_`}#3Y0!SrRfDir#xp~8jK#!hS5?hhS4)~^TR5E$T(83o_ zWI(oJo8ZcVWRW7&AWLzHhg<4EXZZoOL?F6N7J*d>6rx4=_xbrT^zPu_S)ypG(2`M# zIgKs#9Q}`cv$+N*ko?WKULT>k=TrnviW%Hc2bja*@I;0X_MdS2y2ZX?nij=?R8A-) z){o0~Tx{_uAX+5#*VZM|=TYO6|9c*`7Y#X9^4~iw*j=7n%w@JAq^JqiJXn{9Ju{@zhNCYhY+J|&hh+0?E(hiypd#?}LeG&PO zSOt*b!BUAoMFKSl6G8YyA>Ne)EqM(W;OdVe-I&Qw(-G&V+Oic+l~)*Fx4+_=OzZJc zO(!cy*;l}G)Ek2$^)~4r$8M*KXWoJFBntU+6!1%#ug8O3upXW9;VDeoZ@1}y{w-k?Eb6p{=fNusZ@FWdb=`Z{Lme}v5;rYGa zkPg+a$_G^yVQ=|1^HpZW;2wc*5<6X#BN1S(71oT`gc>_WShfpFxSk;fv8J6r&J+_X zfjh(`SRhfap7->sOI;lg<0`d#x zm2}by?xox-Sf}XM#FRwOrWSr(W(+FxU!24ph@eZT0m8j9DVR(^TaP&+`CpCUm}yj7 z7c74`sM<*AnPUi8!k5OT3Uf?hM6h$v9s>9+K_rKSItFS=CtZ{AgAWB_u`3N9N%cvj zfntn}@787gU9!mE5}zo_jLGl*5f=GBlY)1bW7}m^oZjO1Aew!SBjRB;`)4fHB|^kD zKAROY`72nmur_&8+(m29!W%-jRtR&cw6@zcOF zJu#jbLjD;?*4K0LeB8PDQkGVx6p#Ku1jSBF0AdW1FP9bMr!(2n2cw|O-B+9f*&u3j zZ=Kr{PHJ+^CS1dr19+f?K^E%3)}{*gtgr_|*LV%b-%mCDDaYJ51X#3336OL!o#;HwDDFYqN*>`QDTE9)j4?OMiCIu=TG3BKfSoYQ;w{d5x6^grGg4j=!`5XHSyKI4x#&VR>G3Dr0iXNesPn(`24Y zXY;Z@rGuc*uKysZ1&H(zIWhv|ERYu9!rKGwXLN&)kco{i$S3@+|5R|y*Etlpd}N2A zEQ%vZycdk=?QSSArwDN32~^NxwkYzC6)2{ezVQ1qrfcN>QmmJDLL?iJAp!v*#x}p7 zA82p8MqNK?7*Gi1;7EO2wRhpse`#p;Qxc-=#2$|BvWvBCg;05tE=~iH#BP%?34YRc zv88!G#*_+?B4yCeEA^S#9*ow%P71^nI+VmCa7YS&&TbGVjM^g7MH(IFci@?zaMd04 z=;rE$9J%x-8JaI%%K=ZrZcqlZRf$WxGcrTq<{n!0ub4vxI7s=sKjE_c>0N^_Q9!Y$ zA@3{%4=zo;0bgA z`sH7InRJRbnMwFl3oXxR-{V2M$wR(LfR-L^8b*`^vw}WW&n`6%I_54d3 zXj(TRujT4g#n%UqobPsLP2U}K1AgM+R=KoOPsWQ9JhG(1sS1{LRuu2&J{@5(SqN8U zP}ClKy7ULa7)@AZu5J=E0^CY_9ba(7E6xuW7l#!scKlOeq6{Xf@SG-`qPeLl_G&8j zUU?4Ak|EH!$_-N6)}MUQh@InlaAO$03VP+KH`6aTxijCD%zK6Awk2#`K*a!q$zEkL z##D^5mZ%HBFQCaw{I7IT(TMA6y!{Wx@XH{McZrz8X-Sl!L;C{+(2FPI6C6qdrlh?g zPc(I0P!Jt|h_%w?p}}uIa3F(j^S5c3Kl#fG)U#6#qjuTO2uT@B@h2aFXa#;1vTxWq zG4$QEM~UDYzz^Hr2x8*Ck=tz#*apQXBBqy}-ktq)BG&9qsRPM5-82|1Y|Wa6ly)PZ%?zt#_L| zWN7L|k`jgGZUa$Q5kIPX={*!h0RR;Zo zbx&*mA6U1U-^%HRutXR}KTJx=!*GkBNg>Cb9%x4(U~fN~DbixOnO~wf26ZJ^0Bv`I z!Pmqp^7D9>=F76suH9I>nAM!<_w0q2lE*nxG@!Z(rtgq&Ua(T6S^%B8FVImEET~EWIw=_AQ%HjU z*(92|Nz5~^UOP=iGtB4qv?unvLsi99$<21bZ<(Zm@83miVj^|w+lU{jF- z;=#%|aL3U>GjzlHCiEFej4t+fa&3IE=)P|W#6PmhRKf0_nhMPzcaU&Ans z^CYGBas)Qz4wGVoQ{ZkVvkV)o1LhT;3P*Zg;R32`4G&cW!dYaeojf=iIRe{nP5nN8 zLGk#I6)M30FgYULPIyxhfsaRQiQ&JSOU;Nv+RfRnRTw78n;TB>9GqSeB5=Wj7ncbB zIDe6h3sWy}-8oA60tb`F{C_kU1-`yLB^*>f!Ue0UoU974S63p1rUrE7qsZH7@_L|r~V@tAo`rTEDZt=WlZ!b6d90I3Vjfs z*r2_=HdI$v&)5Z^quE<)ePx*y5YyDt>smGq1x*ZH8iH_$R7l%vSam!TXjH|FArRgx z@7$X@V>9cSvN!2COOq5|dB$Ii$%w7{@-=$WSl)|Av|*xZIi6l!^?9yA^h*Yfm-%^vzr2DAW!3U&b?DcdEWKMMHzKAwKeZ zLd-i_lZ%A$%oyfTB^vm6_zMKf{{;1a!w^=E^V=QU?wHG)zW=xq{9oV=1ado77j=2> zezx_<|Vjc3P$)5s9 zIN*v>mI>1zV9~^wQt9@D?MwoD+`L2kS#IMxoJfyF%$|nZ9@Eg0!(BYEXbE3ANO@Wc z+f9ErTQSJ=wZaMeE2+Xj#a|&+_!HmYg_mW4k7KnVYar&@%25y;SPDp}*uM!qLDX0O z5PE{Bljts8Z5HG*rRbj(Zt#zLvB={h;D7d8&qrfum|jZNGAd=AbvID`UwNM;1D<`Ac ze)inuh8Z2qNR*mz#Z070*aT8ddZ|9zaXj4OZ1_E^?g+tCdZdiiUYW|KAdN?J$*B9I zk6DqP{Hg83{oVeMy5;LI)JY3E22^aArJk0Zts=sC$lMhb)a(N(#I6nmW=FCT7xVj^ zM#ZyhVN)+28`ETmQRolv)%vb5-l0q=B7C{n{|n|k66Rb>|%6=v0Z>Z)@3wC z{BRCWX#478M-kG~LIAN0pZ*Art%kraUHrS7(<^sI)n(TdZ`JIsll*XSP=iIujun7(YmXT<| zt?20J*pNQ={TbsT8uQFB?7#7iY~jG$i0A(wGXCWQZA zbh$5$E$NRWU_Jp~_>dW4WWicx$03ptc_x0NE^~ zF)~TEpCfPNmDo$QFqmW)$(k4}=H7cU&S`)V$;c`LM>Q(d1}!wBLTS1CXZB$Ye!*d^ zQs?|-6~#WpPTi565x)w?z<=JSg&<&=C6`pib_DIRuf4CBREKlec*(&Dm<8Do6Gsph z?-E=iggF^fwLVlliy4RNAo%H(VONN^^W?c@b;@nEDH_daNR)XPHWs~FW7@Z$&pv;b zd-fXX!V~Ob5>7o(pdCgOx`AzicnhY!_P3a=f9Jig#bRQG&Zj6#M12msB{FuKPncvr zl9TD6?%J28a zdD0oH2{&~`-do;s{)jQ>A+}zs^XITFqC3$0 zP@Y{!DXMS%A|56KhpiI}dH*Q+;`G@3F?w`--dr>S@`1qWGHpE0X94U7u={J~#_4YD z##qRL;`iGwHji~Mc_rJr6?wQ)A)-xR!Tt-J+O4DLHXN+?pR&ilhcxaaor!X75aj0L%_wgGL<^L9m zOFOgwa5WNOM{L{*Yws?EUMS5ZVwc)i0m|u|{hDOceQsrnH}cr@uk?M~hN(Nbb&=~_ zZ$)jP3||%yW%~w!xeZzRotH_odh3Hr7i_de0;;g4m%*)JSAV`3JU@05p$2cVYgC$R zDGfC98ce>`tz8l;BSJ?K>5ur~qqzD!<4bkY92$dTx#p-bep58jE#fT_O;TC62HR}Z zA>P8~&>tTWNV^awh<$N&*0E^ND)7rkVS4vjB7EkMkq`UPXVd9Td$zU5CDr;%bvf_~ zy6nEgxtEvt*6G4Ar&QYUH)3QIU(8$}O$!;Sp=NwvQwceA?`r7&u4~Xph_#7aLueEY zVHcVf0v4GZMw1%<+c=dOfl6{b&V9ZVGDWwBYRAO|VY-ij2t}Cmye}mGKB@-xOX~4v zFk~1w^0AUc>>MR-BISCczF!4F!kF)bLO8t*=vymK`Jz{F!O%e7qbs>0dpKRZ31ucu z#iJ_%+k@zaHw^Y^ICU0=eq4Lh=xpxxhsDr0!G`JaZg9&vkx|r8MmERIDbj~ancb8g zTE!d>8l)xvC5}Y%2Ut-m0wEiMjagL1%N~ zVQ?QTLBQ;s#encO;d=~>*+2{lv_83t#p98ryh`O&m@AVyXj|(0U6u(oV(Orc7wv?( zlt@jb?T*0Uuz&r~tXz2ZnS;z}oLL^6Y106UM#1m0!97?v6edzqtNbwTJW*yO!a$Gd zyRV%AoFVr1Q7oVz8jAgJ)>*=rvv8*kHa4jrMeTj7j-rU|VbPYb{-}dC7O2JXP-<9a z{QYd(h5!Y-Z+$gBiO{-iVlHHLV#0Dn{{r!v|FB?p9>3R$zl^KPh#985>|bt{Si4YIO_f*nc>N5K@~2pvDKx3vKQSh_;HO)BI%67eO6~X_ zUm4$4MOF(&-eEO3xX77Oi-^Y|zlY;cP9b-P8H2#)A58~fk&;-2CTF9@q7NDy|B64A zsrquFl_Kz86PGB<&3|Wain#oa&S^F+zl;K+I^LtHivB};p5w>OlC*g}c?h$^fGOF3xacx1?f&IEwJm`FhHgO5uquvpC?9TBwK z7#f%i>%X$``1b!4bEff7wtF1+czPIH#!M4avM*6tS}YSHGnObhlqD)VA^WH>Wu381 zWJ!i$DB6%TNA}8^u_RJ44o1e320iwn$GPr0bw1~O&inJ`esf=+`@UY>_jUcR|L^&~s?<^u|sKm2Mtn|KOIn#;>G{o{khjPJT$k@(o zikn4#OftV{%L{0g#y!v9IBg@-1o_4{r}JjMtB_pMc4t>P=VMX?psNgN)V0AbtbpU& zg@9dnRhTN48)ZELH$Vk7PNZ9OLH=68=2z3Y1wt>)2rpge+nnIe58GN`HFx_yY4E(B zK{~>0jyrK<^y#@fL5_2IHDfpHK}V1n5{sxcL>^D~dAq+}o!?AxLWGx1J7eU1%8t2S z?DZ!94}l4_T{}Iou8Q)EjtX*A${cLBlWH7hxe3E;PO&5w&naGQkQ^*jm2&(j`U8;y zC&AYnz!vAztXjE{@Bbwk(XC3k!{)eUwL3lrTvM_@(2-yPf_hQ6Qm&utr@(u$QIbfl zIH^DuVNYpmvdExD{@H=g z-Fp(Io2xR&^CJ5@c0Q(!az~&#b_M>x&aMCAG$HM29db>gshL(SDs6}f{pe;OL213$ zZS#;+QL?ZLpdP#{$c#1Qk*~~$r+tGECl}1%WPG*47y15!Z@*QT!RSjM+QG441ej;C zbpDnpw<&!{@t%yc{X3JhW2QY5c1jARu`=GI#&gqgJK_OWnZbSN&#amRbY_{q>#cS1 z*|ZYLewvsp*i>H4&p4|o{lKE81RplS-7&sv^%KDslVt6YiVd?@1&sR#PV62(Gd9%h zktNLZRuPNu6(tqgnKcOIfe51&{dV@JBr~?%<&^T22jk~xGdv1vnoYCgt8y<}Eb#Cr zO}OUqUvM*yqvw=g(wcoaqI_X+2F55hM<-lAjTIdhr<1Oz)t7IFDhYzKq%gHJ-YK=S z#fX28d(^KLep$hw@^MgN>@2w!$0rbAf-=zyy!m|aU2o~qqPohVM-}=oy(>AJsYOAC zn8V{<5r5oVZ5cPqQtei6;gnn;evnbvxaAOT;AI`jTccx^p7;@gd>_WsrwfJ5P=E!s zUh9TFSgBJ~^K2|1J$7ZU|LOZ|B_&k^s%1bmba7>8&6LDJQ%k>KE?e-ob({54oKK99 z&}u=5(bC*Mn1BNi=FKmoQEygbgO~Sdw*~dKDg`Rb##mNmLIDXY-1NC5R#+$kQC$Ee zIZFehb|C@R;U(f7)UoiwsBT;yFs(S|Hgz6wKmE07O8zJeumyD-Ev5=(-`ZpI4l9#T zYi=4>7;1;o)tci*#mo9430{PSTKV0dbf#UAL$(2J7-?XKdVft}O46@}eH%Z~^aCsP z)~$pRd+t3Br6jfSH>)>L{Yjzl3j2_FhHQ!io6a|58hnqEBe4{MqbhrKFX`v!%#092 zOJ_4;{})kw_c{XruzT@8AJ*(sW3w;t4Q!0YD z`mwX@s>-FDFn|)X_<{nqBcNg{fF+Gb!i0AM28#vq2pk6i3_>ke zdzOgLghCE#7S8puz57caHJVufi3H(pEBh~P?n%ug#S>EJ46gl$I%ih-T;nI6M7sW6 zo2!rL(K-`_H!EeEwsy`_@XEt^_Q^J((xJMZw&tcEb53VE6w|*qPA?U3kUx*!wVL2= zwm|E=abcHgG3Zuj${LNs`N=MlcOjrv-Kzf0>%L+ONf|L{Z-yd}ph?{X~oo+TDO< zdP35R_gO*MdxxrV7tDsEZ<&YZn|yyLw27IiD6s{wA5Lfhr9RHSxW+sqjbC{HU-{EbS$bbk5<_UtVC#dcta3g~3a zQ%BKdM?t5I@EqhDoOLbJTW5b+v&X-nH?k93>U3((QY>z_w4dg$qIK%u6}(KzIECp* znum~2i1lTPWvU_t-+qB)z1BKh^3Jm|IkZ3kR}WqnK6%j-T{4vK>xp6*hoFuAnyZ`d zf`~&Xa^eZj@>0>X%M4t9p^zw5>7k55iaw1!rBJd>z8AYkkDY0Qh`_=j#qp#}^krf6 z85kyZz{=Ku_n6p#TE>6|h~?Nw8UR-Bu7jx#b^6zqD#UuUMk5#L%*E&po7*6(?Sp|K z?DzMYJ4Fbl3`>s~>GULCv9{oq<3k57`D_;1FKtTWG@$2`1ESl~eZd>NxuYN?$`kzV zl`O2v^{}vA7fAfuQrCLy>H8za=}LY6tG~-}!t}pN8BPAy%z3p+W+rYSvyQ70zAf7w zJx<)B!uUq-LZS|Y8C)jI-W3!TH$Gop`&o6i_T{m>&eZIA-k|oPAfYUWT8>MM^zJRX zQ22?>&!HDPKH-fX-~yrP0D;xi?e&A>LOjwKHQr&^{+pILr>7-x!#+c<4dd59c*VIP zPTI`1^_T0_S#an5`2EZ75`Dz2Ty(&(&4pNJtj>4(%#c@vop0^IU7D#fo-ayPtE7gL z9xcjyGUA%1T8~r$2z2O86fiY2-TA->`7^hRewK9|8@ z6s4A{C_{6@c^?wUvQ~7O13;`J3!hdT%Zf0fDc(@u@fIEr37WAEhomiz#mMXi*h>5I zcB|5sj&LQHX`6R!o6YWbNQ zWufb74J;1qL+GBok$LjEEV|0V(4m!*z6Yxo{{FGbZy&!Qqvy?R?piK!A@r$?959&8RSU;h~`I!|!&_@o~;_{hF^B zlKA1rgF8L5v)$AE^(7fi1u+>-F-#Am`Co0G#{?@ayqI9(Q%)~7($bK0Vb)#8Amk<1 z(kV!?%(bsWN=tQ4_;lSpO-E>-6Z0yWNvzKX334Zg3|Ykg7*`d)M~OnU zI2&Q_C^K#$`5~8)(3MEW{qvJ+JCipd_-rYGew^IVNk$rND`=M%a!g1_$YPyi&sJSs z-O*A*g#}fvQn5+f-^m>5Pkhb}49IOlN>=BaKgNbrUv3OyQ}8*>m1wsGTph2r3A1zX zkdTm^iXI&uiHeH)r=b}%xUi~;4Qm}Ti;(XilRBE1WY^X4#_1z8x16-!Cae_Soc7?k zFRLTY=@m;rB|h=HCPYWicLWE6k4pMI^y16iRmY=s!MAVU8eG>dPfsPtdHJ0e^Yt{7 z6R3o#Mf^CpxI9i)+uGaPMH*o5zkEqtf1*sZMg&D9%xG=Io-aq3V8$udA}1yFy*$(} zNG~vHdt(}Ovs3T9)P8?y7#tj&7DFf+{1EurS0S-gjIOxJbC=HZ=6olfg5M@1w)M|< z!$cZ!G51ZGLX|APv}WzMfA~l2(#OZgJ%5f$zWMWOuF)w8d8hA6O1?4Z z{*9}v>o$+;YG7Y1@%&I<9O;h=QyRf={MixtFf<%Jq-3latjN3k+z9Ya*#$ePuTk6N z?d& zFM`5;eIp%`qBD-FZQ{;f6W@yIpmQ2Mk2Gz(8Cxf8s{SC{*20Fc0n>2c(#c; zOQR8u_D+c<;A~SiuuItYSORsu{#1KVVibu6QI|^5)6>(i)-F`3uGHcWnHwA24(v%t z;X5oQ5fP(fAa$bkd8<`i-3PJnnXF7zoZIzX3^$8Yku?L6o z#P32*%@=F!ImSju-Euc1hCZ>uW_BmOY(3i~x;j~-vGut=-AGa>KtFo&&dS=l&Sj-p zx5DJKb)A%6^Qc5GGjuK&=`ty!xVTs^ykN)d3>Aw6o(H42z1jWp@rz`^c_QlLBNs%| ze;6(jc#Z$X4Z2hK0dXi@(DP>qBJyNIWnALiX94%z#ewHjPr4lYcA{6YKQ8_p`?N8( zxwzR!jB7L*t$$h$;vJU|2b*EjM|6vHP^Kg3-tAcvE6Mj#&n;{G`ArnspjRp?{Xq<2O88L{{4ZD~D z&Q{?t!Ty-7>69BpsQPtz?vBeM$g1*TNVZ*ZzPZ?AgGpMq>)9K%2a+|1*x*H>z97h9 zptzE&#oh?f>%>tn!?4vWW%^FnREF_(f zc8@`RF0+O9v4l{0rlFN-y?`qV@FH|Uv=XiRdwby@Lh?eR(U|cG2w)B%WuKtIR2Q$5irJ)G+(Ek;naCm6FEECN$G>|s=OV3c5JWfNylE?t7=GfjXV2X7c%Vn1Ky0F^voUFl%}Sk1+R3*C2Hu?wh&+GCMegt%8+8a> zokZQUiBCs+1BFOE^V*GoLbA-S&IfP* zT|T2hk&F)C4ue9HlLGNkXbGf;kBurMLUOCk2EVV?*s89@itYPnXynPo%f^wg z>Q#pP9HD=Q6t>dn4X~rpZ3Apv(A}AQwk+f6YQV|a?8tJncih<3&feZr0)lRv&%q!c ztF1@&d|hOkB8)$yeMk&NM!)Z)M$wG} zW3+vG-h{`^&3&oC4j4|u>KiN!EUdVzwcrlNAK$9ab%+1{Ihbp=Dq2RG1L^rS*C&Ko zyJS*=Kz)4b3d#E9T+-9q&CN{kkobF+khfF-1RJ}yzCN;e-#D#Qx5CCl)82eB=i*xo zpeleGloUx#JSu4+T)fBgN@V$!+!2UB3o41v(IUhu>6<|va{w!}MEkt`LKKyp&uNfr z&H;V7duQaM_?rvB(bP>iZP|l~uD&|*grVb6>*?!PH6WfF5fTDYbu;4UFjwNe*&EZ^ zDx#{YI;ZO-VNFL*Uq#?DBH@|IVcs46gdPA|7#+)8NATlnTR_d#V$KjCr#}JQE-WmR z@Twyoaf^uDfe=^Dga?um&Lkh;rkIr53rmzS4^#@vyc z^+-=jysx!|(^w5{=9LM?SC0&#AfIu$x)zWy2s$A;9>gTSf{E9b_P~rK1_r?R715Hk>VPy@gJMq zw<62SSwG`8a$A>Exh0F;9&$fF8XO!9@q8u@vA4H(ibkoVgfY5SR^gqUcd@%sZD4(b zGt*3Di-s|FQov}`LD&NZ+~RveXG)js_vTIR9^mnSyq7Uh;?HF&iNU#mGDu8J48FVO z6k!P$3BLdHantcHaZGG%_eMw$(u)Y&RGdk6d3pIgq6X_l#E!7H^e_&Ri&VTYgqzc= zEBP?nT8Ft`y)lGt=t4e+(dnCj7eq=wAtda{4(WWcNs1z@glbk&9`i>+3`z+qGO~4J zQr9sau9Ldboq?}>?w3`zoDfToB&x_Lgwjo+9hO)9#r?Jj%A9TyW5hC3!w zX;&C}&xiMpQJa2IbUc@mDXR1H=tBc9k={(@$>TjEASBFyX;ql4KwXDkMC&HDpG!gp zrl&m?s;vRx8Wr)_9t5OHFBAIi2?I^)U&(V_IS}GGR6zKEY|7%c#kEIQ=Y+5CXmfb& zO@f@jB4tx}Mv%6(w>Mv;PJfekp9!zR%MFyW{12S={|9vA-{Pa>kp36IzQ=<>W&STX z%d9rmRaREk*7gK7F$aZc!|9KYkFQ6^#v)#H zd%mPp67x!DgDi#)gPJ%rG~_nt_wBHc4WvWl1*mX3>szw;QW7(MmxpBJuK@;3_El$m z8^-vTB#v#qC)5ppBpUW%@UN4&5E%yt<_RuN!0&wJ;>imbMS~yjuM)){(m#CYIr@Pk zG6X;qD2ILRV~v3XDgdG?p8i@MpFe+wLqQ}j-d})IsUhVa25A0|bv2m_Ooj(vZ=iE? z*Q~Fb@x81R+q;}>2I46Jmg^H99!6||8>r7#|KFF#D{;tYE52)-ub7Ez zooQ%j80qYm8_ygwIG%jcd1H{v( z2A>Kxy<{>w9Q6x#7H}5^Tp`7&FSX7~mz#Y+T{k)SYO~Sc;^IPh)zLR2$^oYy(oxl- zbK4;zCf?B4sK?C7$&sRd+vvX4U!q;wNc*%l;^y}*;M`Q@PN1YTLXu-%`kbzJxyeBt zUZ=4nH#lZZ()vmEXkw>L5;;3NCsGObAAJ<`{zc^T`hmobRRU? z$&l&wd$_wvNg+bvRU^@oL&7-+X8`nD*rc9;L3OIc4>qGlgd;O7sr{<;``fD>EF>PY zB6jkQxHzl?@{!r_MXHcv3G)THjazfiMkvn_GqP~x=luL5TPPPvA_G_duoCh%pBnGZ2|F{`kLD)8Oxm4AzOk!*rAq~yM(Z^7qXABIv zxDC9>tTLt2$^wwnvw3wnRa|G7aENN4zZF*^Ry%e$vNHi4th{F`sXR}h+bV3yZuJyP z4V{6j3Xf$P8IMM+*JR@*LOmaA(F$TCo?aSi{B`eN-no?WurLYiV>};ZVdUWOU4m2$ zBfi+ISbP`^)}KH%f+H$1VASAZeEkDR+b-F-Z+`(v|Hr=L!eW#_HuUM@#z-Uu!kd^A zsPnqu=9TsNBt%RR$PQhKfSx0CtpA!TwX>O&b%JCgEsD4EwX-HALOXXP2PbRoH3Mza z@=$7#IkWG?$eQ2IwG7VZ8fo(6*rP+qu1yXjDK89PE2G5%z>KsjRsO^`@7&ZPqs2Vw z=BDkrMGPx-0hYo)J%WNW7{Ge^OuXRnejx4*uNP=^~W;fir(^g~GM>jVzjI(&C9U9o3U3U1(6P zE|^`ehj$2fZs{-X*<%C`y*t<``Y|IRgEB9fo80M<+jw_R5A9dLWi~IPibN`Phx6Q- z3S>jlGjUW?n7~1gUYTt(D>>RR}5s z^;oaEVUxUNGG=*wQ-^v;^+i7DdddPp*Wm(Ony;0UK7GP^#BbM&!lg<-D0+%Okcz@X zxv@(%hFR&N%2|$QM+BT5*kc?V@jW6_Ok!e`pxe4cIAk=8BtD&@0h>j@dYI^X+pb}r z`|=$XWk6Bql50ns>stHRC;s0hDC+u8+_(4`86nP?i(N+q>kOwUX6v&P8QcSliv~yW zhWP+70{;Bc);C@;OHQ6#Yn@+Q?Dp36>hbXK=4bzbad-o-R($QsflVKvYmjxGphd%qPJGNb|2F!d_p1BBzNNSm( zJXjs4BI2#zo^&)+RszNsxydT!Ub!w?6o~M80q?RZ-3_(av?IvFqdL2X8^zp9LU{5k zoT>zmrcKV_$2T1DPN?|&4}CENqVTU+bP!hf+vXWBNe3ooW+|2dwD9%xR|OjBv0KF` z>VlGal?@HP-rlk^5_us$kU*DCg(>GlX$%>)Q<>A4yswYt_Am~g$(U}MD67t`qWSJF ze8N_I6iEaI8d$c_a|lc=;{TD4l9n_T9+@66=rD49eZ5ElF8M#1$^Kt5l*dc4HR#v( z@}&k>Q@#UVBco;D_1YwT@#b~>UIt8FJ=X6xpxjR{KM|s}+S8hJHDNww503t!VrFKh zrlzK+HxGz5pvp0yfh5}9+1VGI2>bHIR6$`d^`&m5O|g~`;B@ozdVDL-Xi?^FB#5WX zo8gMjY36?M?h}xk>nH6~Vut`FMj+j~kr9E-}IbE$6^h&DF4t4zn3HkQK%&_9U$efUJt z4EO2UykE;&KCgMCOWM{?jR89FWT8fWzgz-PdB8i4W(x5dWJ5#tn!8JzlH_ptz0ljS zRfnc8xYZ*~&Zlx^<0?8H-fVP7C|g4jHD_*3hf~UK8zz5RHn1}>C-G3+xnhuTTDgj zlj?lO@#63qhw&n*uhCqaVco4ma6@?&l}@%_jV$SQ>q0A*5phScBXYZ-cEi=v?b##3 z+5IT2mHNKA`w5ykih3Dbk>Ez;v9FKt`q~3k^|Z?blKRUc%Of=r*#f7R=tK>h%is7F z+ae=svpXn8x@#&ct2B+(giD0i9FB2W4C~TJr%ifnXpKvMufNY^KMHx0oRw96fs6l7 zHpMoYL*Vj_EKRi7y1GAr`+CnTeYRtzFhj|YoqT>4{CE{C&;`-g)X>1kq^mOPLIQO) zT_F!b?5FK*Wn*)mf@u&NZsuYvVq|npH_-fMOvkfcw|GD3>Id)FpJ`*_7GxYS1*V;czJPfapj~G9CgVT*O_v8ywdOhzyra4cQK=SI7bW!Es#fLgMEE{3kzfz zM3Rzf$}?NfPVXm=7?rQE2{SgArDV_{LQbE5<(mivvNaCO?EU8Bb)@ew`QlF1A$lO_ zMn0``$!q0}xYnB&7X_yucMkAf@#+cKzJo2ZG_saK?AWd6XIX+c3nI&jkbRNA{M6wa zC5m3mnwSX84BGlEO&C1DaHKrOYBjE~FRV5WM45hjpdcHuKO_uA$qF-gu?R4J0HQ|) z4moGIp?fSecH2O#Q}a3yQ3M8ovZbLx@bb+wWDKHs&^iIK{&0byj);T=_#65`F#rTE zE=DG{zL^*ZY!7cH;*(oF?}xr%zeg0a<%9I)*&K0_aeB!^4cScrYpWUq^rJu|ki{&6cGcJxTZl^!je%xXiOaJk8 zlr^vSeCYX5PZS;=wFsl@Hw8)oH-wDQ+0^iGlwMLrR~Z)Kf*c&a)h=f#ZVTAQj)%Wk zbg=Y@_{7M_NZ=wbx4#Xz|NDm{IfhP4f>+-=9;j=Dek?MD=m6t*hDkxvpF4iIna6^5 zk-MGy&h7J#_PSXgQ_V~#8g5=3JUO2inhX6|b;InR@l#`{Mhk1nVU=cFHeanel?>C+ z-7Ky&w-E{`-mzSkN#3su@_6wd3HQ85cX^NZ&skZDHm-aOw)bI&7Hi6dRE!Nqt$tNN zyRhs5>d!t&^_>>u%IGQRms)R;OE^~&0!Pyaj}bRP%R>e?%VoZjmY@GBj=de&SHQF~ z25ugv72XLcK{O3wSg2L=;Y>hAhxw63UW4B9rds#AqA4X3Uv<2PX< zr6|I_u6snc(%8B7y8z5*TtCahWZ5MCy+D3p;qUsF$ZV|O=`V}wF44b5&d`AI53X3vmzgk_-32_-_8y`{)}Y(c-`WI))#OVqOjkE&khW*@3Jz(4Fim)`j6($ggW4ubb0!}sgKR-Gb zoAxBk?>=S9$r6Q;rTUG-SzB*Op^a(IprXfoj@kRBh1Yl{QS=y{eiFg}I!H-K(io6qK$AxT6%_k{ml(*++<=Q*Utfd%D8sX7(DLiDkJ(T6 zH49Z)zhlnenxVZweL!glH%GYfYpaP*#T@4+3Z>v$SBj?;d_Pp^c6WcD;j8=-T)^I@CYe`eSP+~YFc#p@ddugeJoN_#azaLK|a|=~a&WOv)SO zvzS^5RLN7MkKw57pi^nk9`vh_aL(mW(CY{rikHO6=Q+DhmT0t(@eX*0akedH(0EE0NYCXwL;-Ov!V6WH+UL@&s{8zrls7RN^U$9~sY5`Wc;^*u@v;QD}N` zBYBD7;@QOo4>z}P(4D|#Mw_|Jc(G<-l0Rrr7Pu33S3Y&C=X^GQVS$~W92TDR{Tokh zm!^z806(-0umVx>bs|`yYrL?az}{Wvk8cRZ(e|X1W&gYJbMvBw{)>Tg|F4d6T>SXO zHqYpzD7rgQOZZM1OrHPeF&O+fXPqqHXfP%lEVu0v1h24e2#F=j;j$CT85vP-_pGh0 z4G))S7OIHoBK!OMD{j$;=fD^G-w`EC7gAt@C9);hN2zVo^KI5O<9ya(^BTYm@2hY$ zHxmcCMHra!fWXc>yzR&REaJ}vg|7GDiEs0n3)t0j(#uvveSbOaWclad#~0B6zB`8I z7gq)n72}#<*$su__l(!Ynrho=y7CXyNJj*t{m(Dn$nqi0=E)~!3;TWp4PahgZ*Y=- zWy&j1K^NgL(A?Y{v=!m+|BYIp_Lf?Ah9HVqTT}z}m`xxDjQzTc;Fy>g;J|G~vMqN6 z)8O*UTrhEy5b9NHG&JWq3fWRb!qzjStX!r^e>f`Y-1 zYE$ZXFoD3%&MqdFt&Jb`|0DF5T3$Pdi%+f?o6U1R7XS1QQQOv@H&eXauB#U($-Mvl zUWeA#L_sY2M@&@IzKp~5b zjt&VvIyu2(+UOHPdfttRK6rY5&Xq>dApSM@z?gBbpH-$;c+Iin=*uz*0}a~KTgKg* z_~P7&R`b6suZt`?7y45;4i6>e{?0O4kYp2tsHBp;8H!5&mr$}kHu?r9a_I0q_^c&cUJ}0a35JE~Juf6_w3UK;9 z?(R+gzZq_2gBQANDLn0y^j8E&x|&MLGi3Wc_rVRi#JN`MQ2UhV|5q$3mI#)Ope7(tCCD4j_9Lic?l>ll;TR%VtI;g-wf4JUC zbT0sP+{)(HW!wg)1ehQJJoM{d2dmFSln(Byaa{P;pM`x@9WxM*kxEi&X{ijIKB)hTDqEoQb{(#1W@Cd^ z0ZdJu{r!DV2Xw~F|I~&2v6OziU3+v}*!UF{7n+Sp%kP8yH{x_@agkgxM<8qg*z~m9 z(T}1cB5+=jhH0~Z@eyKGGc*uh>jPL&Hl3oh?TrMDqYQXe%Im)e6)oNem%xjnM`L2q zig8v9S*5)QDMUDxU6(8Vd0J9{h?*P~pdi8J#zddiB`IAN)q|vMQQ%QsT@5^uN?8|O z^3!trdd@9buxf_`SarjoSB%A=7P1lZ;lqbCMiQN`2Nvzt!gZEpL&0s(i^47klDIVoIJxwC5|oH~3z zx6nTMV}C>J#sQN*bCg<|El$O7F^lHIL#J!fQ)|C(*kxyI+FA)DMI| zyzBT+!c$JzQYyv8FA>?Ac(R1v6amjJHmT&8+5|)7Vm9!n^%tIMc0kZQo9r5P^?Cxj z)WBiD8o7P$`_Xy2i}66>!^aNLGzb5z@db*JMf3eG;xZNT|ov^m^BdnL~$ zQ+N+wcuG;?H&SPhi^YmJ6VuVq4TwSRU%h%odzAEV3~AGR=x8Zkj4F2N~$SaFviEe^%qwYa-m3GVLP^xpd; zlV>JB^30jD*V$*ScfwVbWwG9nya50JSn_gG>Hq*D8UTQBf`$se@_>G24F5uKQJ4J$ zC?6x+ho2x@NGM4F097#OrgMOl`-jEqgEI#1tY)s45_N7o|L?TJ+QZX;;;^?Cg@iF;hQ2I{c}G4t_h z{p>n3nVG`we^Ft>SUX$S!J!Af^?s&h;u;6CT_EkM%mubcMhEP5Fpvm;BME(VX_wHy0 z*p6I*;=m{bd#t`CZit9Pi;_3pU7NxpX$_)a8d`w`6Q7zc$FVs49jsf&a2#7rCsN;t zi8by)6gAQMp`<@{=r>k^O@OHKj2@rfTbTnD&Uw!qVcTNIN9V~ z(c7k^X*F3Em3c#NJ1iw7@8ddc$bck;`iV!!F<+6-h5Fd&yn&W|GoKkp z`v)@YD`%ah|1PJi=Aev?Z1N#DT3t9%twiA7pS zo04k8(OS#i5GX32kG)q0VHGWs%%i#{A<!6jQNj0#TpAf%+hn0@&Wz@6~+9_n*d#H!<<;=Mg0eB_j zYg1OI)pWm;L4Hz{yEx&B-EBho_*JMN!>(Qjc2z8h7UVi9{XQxC`N)EIPEFQ>-5UE7 zG-!P)qf~obZavo>Ddp2&6PP)|C2z`rRr^DC^?oXRM4tTPLd`l*FEf7MsY z!LDuK>eaMaf-yu-S_Yoeb7m%5-0{ zg1y{!_(!?bf+qh`zTU}a1{GVDmR;dgN9-wpAJiAUPupyM=SJ_KUPF=Gc1M&+UDHnZ z5jmYH*|fo))=hUDVl1TWjrm7jx5vZX|B*JG+UH1Sp6$D#U=(-Ua&{S)?|U3A;+RDHKDrn$P!?|S*(z?3Q@QM37OhW!00 zMyQE&@#0*x1j$T;@G6UH{g2{{HX1@p=%@Jqx^{$?3Sy=s3UMSMm3PT;)>Qjk`+k&3C-G32ueaTK~?S z{qHW$w(8G2Gf$!lZYd(ej2m#~-gNd$tp#p+`Jf@cJG9L3T07t|z36jOwd?x$x_@^Qv@Va%TN<^}NNc z)x|oWvyfebCcIuXhGySGnK6qGXHW?d5%K#`@IWy!9O|Y3_B?l=Bdws}y7yVa(u+0u z#V?e0s^5n{m@?!_yyTP@cO&r-d#f5?c*k^6Vf6wZ(u3)lHCI^M=?kvI>(|ugPfIRe za%>!(iNTQxp~CYvt(fy73`hTQ^16xQ6H$Wo*@bp-U36oFHuGW!svlLVvho9bXj5iVQtT+()1I)NY25CNl_cqQfX|8$*S^KoUSi#yzxl zZh8LC^ZwWE=?tRh;jrgZe}BkA4}})C&{1W&*W=W{K$u1I9N7Qa`L<=f%{fKowrSkg z97eSg7p*E=lWSM;Y)4KU5zL)MG$$9UfPSR!a=s4>QBdI!vhOH^dhq2bwf4#Ju1#49 zJZ{sz^1gqTCX-3AwKnDcHB{T^3A(k#g^YszS(#QG_rZC8?G>lqFzhUs7pSMcBSv~iyTsOywGtnqI&%Wd1K z@2LMD7DKg{OR?8!W6NQ-2G+uCKd(i<$76Dabqle_MY9VfI`>ZhL%nro|EIr{!`dAW znht)4mgc?bAEpxl%*TBaL=;vYZ+-8F!G;H^Sm!w1^!?edPrC{1EN4QGDgO5<7x&}- z?LK>wl)Bb~5$jtSYghEIqD}&+mo>r{A^fdfR;RwF-oq5$M`{|$`QGb~Yp@8G@-u@^ zI9?M$%=|o>#1%d;mNr?W!e^K4M<^u2p&bHwJ_MiM!D|ZPb%z=%umtRXA;`XyT6CK? z7+v>R_Z@y*yA_>3&OG(8Saj+qA3e3SR|mO0cr+h=IPjj1TlYEs0Ar4NY3NwyLkya0 zQs5leUNFskq52r5=y$c^I{Ie|z?zmH_6~{~H#BDd!Ya7pc#Q%pxqQuQ)x$|_W3_AC zM1{r0covZd0mU`~sy9&&AD@=ATUDQWlG^=}YyKtotp~eg#TLux=?dM3Kk0X{xU$(d9(12wZ7PRUshRsOIEWkX z931Q{t+`%H!y+zSpX)R3mUYs-u)8!VkN}?| z+cfd(HiKBzimTXO^n>dt?|kCFabF`s8gQ)CA%s6;)FpURFs!L$pSV1;7O#hGu(a9t&*lLDumw!#@el=W)#)w|jLe)om`4}}+$ujiEQ zhn5Yi?ltUaz*iIw&%jqO+Fw&L|EF_9uHG*CwP%$Pr2_=C91&%U3VVI=(J4gQJ2w8+ zoah%4C3o_a<2zvZI<8l0)R_U0*{0~$JIvm_o15_pvuEL#^J=~Y)A+-f5V2kRB0cOp z#!pJ{Ab*?%xN>XwunKEf4ZB0QZEKj0lN6n?&0O1o6^?J;>YLGP@={0LSTcJLbDkMF zGoHBm&gS(3$B?e-`6)ErSEz$l^NY=XquiGFNPX~4qc(VD<6G>^z+8;!)C&4=**APP zfEeyH>N;SEDlOV4JDl|XMH(_U&r9Cw7i7lDWK-58vo4WS^Gou3JG@AfGU2W4QtmMs z0}jMOGJr2P<<$T6(H*oGo0h-ed%HTUW09$;EOI@wcEy3+&EI~G6{;tCWP88YI>;YK zv~OQMNKbLS#X3=Ju3MO!hC9ERsG)prE{@%Owu4wl988G7?9|ZVPJ2^qvtesEKW_Nv*o;xR48hh%?ad`8cU1i+wm}4v7#khgCOhOa$H}k``BBJ zNP7={+IpY-cZ_Ol!qrTX&K$+j+3{QInoDT2-S-7Ga=Gc8k<~Nk30Ut ztz(U;ee(k%%=oI~=+yl0f22u;qWe`a!$)%~XAlj!;P&8rVZNQ;UmHW+wfp4xM32_{ zS=h0Imz&^$**FJK?CeG1^|awEZ#FYhaM6LHiCHLh6Euf7UR>9sFRFQjDZKc z_i+}nu=?5=$0?IVrf>PDAYx!KjFN7f{vsjeu%8^QLBH^jVXn1WoId6t5~-6*t(?}* zb-FG^hi~42Mp~@e`?yJOUBbw)Y>w#v)g@!C>&0)CiEd7um# z!`v``XNK#jv$Zv+6;b2#v$DcswpKeSXjvW!F?#MX@OnG;I@aOMCovQL>+?JOAA! z3`L|E;`v=PHkJ&_B~5Aei^8>(Wm;af%c#CoURHS}3)Ylg`P13Vs!<-)%Y~xx>$Y`Q z1Z#%?y?>)$?8WJR0|*z#E5UI{^!+x{(IEfAN4{^&*Z&q(*%ptMRWX^5xN0VN zRp4g=UhbJk>tDD`<6A&OG1wbBJ2L-?D=f;v$Hd_wJFN(HLqKVyLwEcHa@8lE(VJ_7#Q90~*iavx#9C zVbT=rB|c(VX@Ouc^Me=!^-1Px4pkl%Svr>(*SxKmY!P)mDMJ#w-9Ak!|z6Dfs zPS<7hBCJEO_J}$FDp#0wGw=M=&;hnj?r9%h^Ls7O9ysJ;r>+zOuIYozwbZP9mU5Q^Uy*&v|7K|tnYTs3u}CRHWWOBUh;f4 zEPT2wHMC_geSKW1@p(zDNOBo(>4b?K{R~ZvdT;-gz3J-W;`(og9((KERP`FjCPlmb zstM-b;r;#gbL5`?wgp@jV4-3!O~<9^ijIdp|Z;h zHp}7d2U!P-sg<`L98^!rFspzJvL5t?gdU-nY|9z?p$?x?#zpamAfe~H(gcrt=oYksc3+%&-A71+$J;6{z^?ZLm~noa(;YsL@_PIy#++xbJU|!eg~v_<4{65 zKZM^v&Bf9iGBa2-N`pB@7h8Bv|AJ~RDS+@KBd{?4U1R2I%jcqH`$n$d3E_Iab%z&K z?`CR4xGFB;nS<1>HZFymekpi*sLS9jtG82S)7g{)N8P260Wq%l*a*U~(}1DhJ*3+ITc}UW9ox-FVXwSj#y7j7^$WN8^_$qF?>aOV^Vb#5UE; zus8-$wmkR7Th?Ds-LYk6JVapOG3;W`e=3GDQ05A=sd4?gbJ=9`2OIY3NmKkE_X>xr zz(Vb04;w*4>_Vq|dO(daSRHizdWbj= zJN`osey{x(p$RH(kf6HzRe6RK^7>`1Xz!cxDom%Y|K7Jmb(R#03BuZ*_i&QI>T7W^ z>e-!c4!Q!yJS;6`Fis{SC%z zZ^)2A8TA^z(lKem?*qNt_D7b^J6<0Psl)j zSm>BziOjgVWUSJop^BP)ym82=Anz-7z&vpvUqPDC3I6i(+|WdxZ+0wIl?qslAJ z^USwT$vWn#<(PY+VoyKGk&jH|k#tU)a}>>eQa>24_nP){;VEDU%-Eio)rYjuH5cZN zDUqnBtWkLOuzs%+Upb%rBLc{V7AQdz9WgH~w^2d3xytt5zsUlXhbU&mAUc3sWg%Pb zn~u}AAwq5aeY0Q=vPZY(s9}yO_n8T(*cow_aJ^=RQOoyqd4N&EZwX$n}p8 z`TIoU`E>jSKkwX99KT+VxC|8+W*Xis+Qb+N`h%`H+%3IewpC5jsiSu+$@G%lRiajz* zi*EPmj=?e_*srhvtGc(p#)V(KD=wP#n+@~HN8;U@x;8YT4-a5Uj(L4=U%|(fOXRS>@ILKGlj3rqgUP~c z8k{}hR)@;P`myy?GTZT9X+Jr`VaLjqEcFLR+lT`0ZkE5Xj)Ormo#B@k&5IQD)}KAt z5b;I(fH5qw|Z0F>@m;$PK2Y5LMuDI^8i9h}QAswbRxxCII`VFSkSYS6ZX ztb_W7t;C+yn;viU2+|Vk<4yi>;f=|UOBkgE=%HCiMjCo1OUROs25a?7q(*~)KXZkI zHmSDnk<$3e1yslGVA{jV%XU9iDFt+wW~8jae&4VI4u4;=vv_o@=Vm7Q7o$Void<@g zS%mCciYsOLlrH>%jf&FiDH_gubfk2oaTL~GCTJcXJ#oIvoVM{~@Qv5tjz7V5C1dTS z!pzlnUn%aetfd^me?ns8M&yRIayP&%=<&G0;Xb1W=bT5hX2SPg04?UerGv@lcU8_) z$2v66nF2gK$X59CL$&eaMP#9Hd+R;{u-nb@FmAeOJ`|bSCC62~rmTJSt(u!s6|3!4z~Jy4n;t3`I<-`7ODlHWCm}Ok z@m}}#bU?uQzT{o8I-LFL9CFK%2L+Zu@setyhnixB1$SF7hp+ipRI&Xgo{vo{P0y|v z#OlvfS!%2GyZnCC%^!$C8k5}rM)41;2{Z`b1p+;jf9ZIi+%ug{1W_7g9#%Bi3!Vi$ z3fOUNrL#`dZCYQ06GnMUImcVLzx1Qnl%*WL-%)4S>&virZmN}U=$)86RxY(h{h1qm za#VZS#WB;+`~=c1tU-OxA#!+YcDIAUi*LB_mrc4(yFzdYyue%5UrJ*eNv>yeSbZP; zC@+<(bO{kB(3tJPH>Lxrn7wM5f%B^QOWwGO^N!3sPvJNm5ZGKje+cUu`7?~d@Up%8 zrYrOzLg(AwIF&dbyTpI|%cg%XJaEKXZCkFj)>2%i2K|{|A6E&6brHAoQ!syBW_h3L zP|yf@R@0Iqc3eJFE}~m1VE$Vdhu6GMcA~m$Iq5Rfpk~8@vmU~nHkFI?7{2{oT6%VB z)_pjnN zMZMQ8%WF7HhwMkEIBANPSlasym;I+HnZSy5Lin*LH*JzVv{k&# z%^XMZ;5_Ywl2Y#ryYTU@S=&ATBa;`sR6%e0{^WnV-TEz-W!0l6z$8e|F<)~s!{@Xp zs$S*$A75@B*u9M)kjkeImvFz{;j8;1q%hFx8sd!WSDhH-I6c~sIQ{CS)$}R%Zo)Jv zHFCbFU{Z7#MfWHMvxz>}jGu|!z#YTP95tfP&(>O3cUp>MhKo>%o}g?V>@V5mXIoZ% zeF9A0jN^TJ+!#Yi*|ngFlz+<*v;6cv1w;8+UdLmv@5KR_p5s?6%jWT~+}(KQ<=X1u zEyL8dyysam8uCsGR>Mk_zVn~DEH6GmJLYr}pVXy}y9=A)Zaa5^qLGSt^$^htFfe#2 za70G$Se;!h%L(RIP4+z>pnHS((D5{TI-F*)Yab**gPraDSkY+hm?xT(D;qU`!1?w4 z)LZSv1lW;UCbpGe#!>I(RHE=iuj}XG!as$D?FGd}I()=+(1gLWr5&eN?05EUrI|?Z zcGUTt3^^-sA~}4h^&`0BKfU7;;d#Ld6xtP>AE~3nhK-J!83e~gxyzyS-qz;ud#Yu% ztN!~OJc&pHMKb2a?Z-sSyd$x>Y+QlB;xAth8t9e9G({jJjgXtB-ISZ;Nvysr@!pFwF?b+Yn}w-C_1{QMy&Ub4>|ey425X}iYf{= zM&Vn~OZJKD~j!K95pyO~+*%1Xy|A`Il2LH`s8epHj? z&Q)@6Rl!+VF}VeK#erVx=JdYG@RD>~ediuVJ*?o+%6Ll=oAI6K&n0UhR{ct@ZR>H? zfA6`@>Gy?obvR87us)}Cg<;jp_NO1RvYNnyFZ`6YHStAXYc0o&wfvOxujdeV{8qg2 zgV-Xszh~=IM|{>?$3nHW`rrbU1O*kX?p|kfl|&k4rk{hgipmN5>Cj|`*{R*8V<+sn zb?4jaeRuE2X1b))b1!lir3=vz>!$66;O056Rw45*%7$%z?eFq-rE-@4-3olx0WGWB z1Y0O?wz$}-h%T-iA3QskH6HxtI|9DBIyGJVnjRwD%{tE!@Ot0uWmEJ)j*@V=t?tkxL@DNJPJTQ$nDReywhN;s#V+P){b zVl&Gql2_iAOi1D>YJ;UI+H<9kRLij0S*Q6Gmpz*+2c?_vHhg(_ShYK?c3c)2nm8K> z=ray{E@C$`m6wCC0zYER%)@o!;`{UvZ1<(gwyMfc$oZ`oU7-EHl>y$gVNaNH+hnXg zx3}ALDF-s-wmQY~N{l0**8q#j+ho7s=yDOY zuzL5AT8gpzUHd(2Hcd_K_J}iIwL#m&6wiQ_a{>Y%uO0NAq3$;;Ypdx&o(uITB<@L~ z?vTkpKMt;2S`&HSCy#nGd+0P?vujNsd3>F>3MU^pdbH%Q|5Tzq4^2_`eRzMiJb9ha(^Xcoiunot@Mb2-a^x*=v3&zUo`%gx0T@U7WrvubK#!k4p{_rc z<84OG>+9RMK3q6oUTY^x^RFANO$>bJ@*i~*E^9sMW*c5QbkMcHlInXCm<$a@r9${` zk7wNI9nohBUz3m{OFm8m-%GcY;v=|RCYeL zoB1R!ldKe_&!Z#n6>M&~B%fDPo!O3Yn~^^Z1b*f6E9K`}b3G=6g_N>0!h>gNosl0c z|9VIGL*9!n9E96fIR4@7)6$oAIqu>NG5Emd)BGt!WMMTNrhxAKXs4OR8Vvea4YFG* zyt_zOond;i2m#Xdi%&Vw410AXQFj66YYn;F-;-R(MVef4YSEEqWn`}wEB`H_sSO(- zD24t=@KrdVZ`A^$YzsMsbaa}fsj!=s(THXo5&hI)xN@#9U&wwdvsEWrccOegpf6r( zbY%URddjRqJY`v?DFN`9@=l)O>t9sosD(}bzjCrsqJI;EM3crNC*z$C{v#idU)zbp z$(#wd0sH{rjk`JN@qAR?`LN=^a3E%zVO=J4Pz1l#beHX7qugMj3u<4Io1Hn?u2AfN z%K5_|*4d}nS+cwdf~r!jh&kkus0fSWMx3^3gxKzL7leocn2Kfk9s5;D8IgNh@e7EQ zf8??5dJUmSfZ3b~GO$7jmQSKpWi8@`F}ORNAwW4`#285zp!%7HnUwpGhEo&j1!C!4 zRD|ILzynU2Z}lMuaiyUuWnIz~QOkUu%qnIr#7rIa21D{T-U%Cx(1@-JlUTPdJsN}7 zr8HA*rYQT!LOUnVm4bN6_-Uk3(y2wWhy_(^7;m-(J|o!ahOBeCa6S>QR2kKM({jR3 zp6XP<>dmc9d&C|bzldR3{wGT_lS6o!KG>2_p}P;LWFY4scfza_DEMd@SZ`)b^7&jVeG`DNv!yTUCVrOrP|FSqE0#*w%i+~erDHUiWs!la{lbUa8bL9(h7FF0gQQU+}9~`U-9bh?8WEM!nNC-iV z&+q_l!nH$qo7A`OR7mt?>@;69WGCQerxg7t*mNBmVt`6P?=S>DN9+- z4{cxKTk#-lrum`HkR2@)5prT_#8(9SZMSzg!&9g%vYI&PV6Ax~v0z zy?6SNzr!i5;Vet# zUWcHtOq?B8ENFQS;ff%S+;CD2`(eCMY%$rpl0|M@+{Klco6cej{3RbR?>rg!T!r+Q z!HrjKB+}r>NP5d`spZ(<7ZJ<2H1Vd#%NQ}W?igta?-Z{j?}yEwte`I6cMBv8!Opqs z%#%#dX((z-IZ=UkVmzt4P%}6i1T#JW?wDC8G31l{ciG;i>8fpBAIW8nD^F6lyYE~=qP1ENq7w52BCF;nwF{od&-h!CePQAi>g6Vq<=BC z2m^Bfzq;spzmh;Rkf)fhAat?~bSkL=eGt(rUjSEfnm(d{M%TXs+pBnE1#hWj@KT1~Taj(|w1eJg>0qT&$xIARSI4tE zclnA)-*}Eovf##g+%IGZ5&x3mo2KBChI(P=NR~h{_KWKC*{n-Eqx2b)(%J|$JL#qA zu`wX=$*0Nm8GQ~`NH=Fsu9DcFe}nA4%^q_DK~rmJwZ!NSe2G5m2oXr%3j0_OLIB4Y zwD(L7sjUUN$);28a+RsI_`NY6L0Ei_cW~mWV?4@z#i_q~lyBK{5aAya**XJ=D9^*B_C=t3;*53K0oes%4;w8 z0uoOEU?a`dKlUTQTuL|LVEQeoXE0_&n_oJUJN7;y4kyg5yPg_#Pbxgv$(m8YIL49~ zp&y~D6`iUCVtg3sve|ev@WqP^bpgwVTepuXEGvNcI?xj1La3St;iWd|`+$`4LxP$U zIpG3UJe_KYTU=3du>pCo38`*g#LUIt%3yN)Avd8&Hg&;Eq}Ff0M6q)OW>3!3#lXQX zt8zP->mqE93^26K zu>G*SCH-|zR&42!f&?LqmE5-dekWQb$${p$X*UbT<^WN+hySYka-aR$WD92$c+GU0 zcSTNhu*IIvNc58BBzJCp{f!o~FV3}-+a^)mC94#SQ;PW43aro|A=0{goE+4$02*Hl zk@!pS%c)sKzeC&^v1gpU3DzyrDm#$c&qTf;4I{m|H+E30d zB;B{|1mNAhn$h}9B5Yz~Y1gs7aov=XygCK*cXuL=&r-9|0>A1#-FqWD;e4asJ2u^I z3rg0nu|NULDJgNpqJ_dvdKtrmz-U5h=#LaQWU!`qTb^uJB$hg}RIhQV=Gb{()zoT=#QdX-~)Od)LC>w{D=oUSCdMS^bBY3KMt5UhjF` zbiT>Mxefo;nDCi9kE`RL+WS)Q^TC)P+)tT}N4`5LNSo3sqPP?uJ!LmIcYWNDX!buJ zHKHrGUsJ#q0PZ1tv2Hk3vl#7{n{Fml<2tYB4}3hwsz1w2aSrcC6ha-)X+TQCM~khq z3@As?2tz&~Xc^H${o&0n`j!D;mDJCu2jT+J10hbT>RNZa6~iT=Mb_^It<-!sn(4=6 z$&^hqENyM>Ez#;yAm|n{L3}4>MY2n1?xhjOE?<)cR0*VdxrNkVdFvMC|D{$%fTbt^ zD4{m-e*m`UNy@1;+!`hd53nI47?z0pBua6(|p_`yd9DEP;U~n1>tfZ5yEPO5{L@@GJ_{1rubY+WEcq+Cj7j1V# zv%*VrmkbV7TYt9woQGo(yj!lHgCpZ&fzSyU#|{$m=?Ul&z5Lj5idJhu0+6!RKPr2~ zpvKg?C-{#tDv%5b_#Z(9t>Rsq84iI_kT9Q<)`|)XT@+Vk(?MyP@RD0_&7}Nx_mylG zbHo}SftmyLh#axFW?N#P0^+%jk6l!izUKs6%UWj$BJrdo%au7|1fV1<&z~c1Z z;5;FY_tHR`feoxX5f+9JALmhQ?CV%$k{9&F$ zx08|M!?u79L$D~BN@mi0I@!C7p!&39yXfJTUB80fFLzSk61M)~g-QS7)+7SZMgY># zbk+^e`nZ_f`qK_S+>grIkBWUfs!_Rz6B{~fmsz(&WKDI>ko&I@IU^b>BC}(C|FGif zW&4)J@cTagcC|5qzme%C&Y+6{+F|2Ra;<+%l)zZ~kAEy2EY&31=_xvAhs?Hq(L8Co zuzq)A>j#HBG%cm;L&{*eJbWkNx{UvtLa^WNe|tglt0i>C<<{HtvN6A6s4Pcx@p!_HItf=4b^`D)PP_&JAjP97m30^jC?`GZ?i~Uh})Q-m4UXnm9f|tpL!(` zcHTe%q{36@bQ>(u4|zg5Uywd=Al38lQV(|BR?_v6VdLCy56ZQ`P76a~g4D=qmXRF- zD=+EOM7P@BWo^l#YB2rMD&F5u`s-EqP%ls{U=p9KfeS1d(rE7y+?|t$0e^1pZ*%#O zduB_T%XpH|aLe-L&7(5S&;P2k8qZ2!Z}8hXiY*dD{Ibd|w})HN-ihRG{@lD|LW?lo6KMoDf*`YAbI?0H|s431^q{6?rD^r&^|JQ}b8 zKQ75E+Odz}LK1E7pMK;vzI@~^Uhs@%e^3oMi|xkxvhk5Xd`9C* zCJJG~sa@t-z-VnQGnXO zHM!g|VCkI}p}-q}C`u=AYeHV%y}C6JExbh5^bf#i(E)d68m~Hv(JAkMdf|e!wkL{J zfqKMU+_}Jqua{vpcr@5XpCQx>mdED3zpX83ctdUdyg)3tHfZD|0Ln-y3af4$n@JM0 zRci$SY;BKDxr?fyHbVF}qPQejC@*DHg*n49rSYv$!4UIPDon(oA+m(YFW86;TOUa~ z@d32{H>8dhy>PM&=6WOfk2Y~fjTU>kbA}V|X)2ZEn1)4&Dd z%1esdUga9SQm**xbRUJ=Km#53Fqa%J z#-{z7qE*0=nyr;)lDRs^&SLoY_+}rXFW-I0$LJp_<_W@|Y6%r1>98*q4*| zNa==+aIjQJ<|QrWWqoYQ&90VF)yUWn46nNyDmzBrFxlfRalu+x{lK96*wq?$TziFX ziH-U}_ZA8uj^Q^K>fCFf@CHH_lPA1e-O~w}Ru&rFz;-fE+Z21f6ieAsN~sV;6F^Nd za8Jud8|=!>)j6>AW=ob8pd|C4Q|oXU7nxTnH$_DbML&QB!F>}rSQO;gu!>y-Q1!M` z+Y%HSqZc2Go~iNjKOOTxHky)m8s{VWg2>p8f#~bQ%<-{kNWQ~im7r;4jd4bY5Yg%j zYHKCIPGrEt*PxmxTx6YWXE_ghf5mi%Q;qhO^8kX$PL9yR;nAel+jpJ)=73b^4TPVT zT`~tH!Xo5K$zofCawb}6f|;!xM&aE(+kt^v+f2J!4fj+Zw8N@@C35Mu9^fq!bE@Rs zH1yCeZwUuSp)8oU*tE_fU8Eh3kp$`#yV)o#+brhwBLwC3&wmy+NGM^qE4?}6l6=9> z{^5-aN+)+xVBi3QUY{=8B0J;JJ=8h9x%S8uS6n#bnr)`kWE;ek>Vcb`vXuD_oo|eg z1W*i4s!)O>A7*+iN(5ur-6OhK0{|0(x?T950TR17jGl_R*R|IUcRDg>o=GE{#GX&{ zx%u4@01LzUlfj=qf8kp3j*%i`O`QGKGlB^_;fG1i?Fa9LWiA}fcvZGnG=Cv5;P+K8 zPqoHCYDbGE$J}C{Mrow-4`Ug{QH|irpXH--q1{4V+72S|lm+sY=-+s8DyU!H4sokd zY%-VfKy#09!nKfEq?Yv9ibD@)T{VEdtOmoDfoH~fN(m~(%>5l1%npK=Z$l?F=nQwM zRv>5=IU*^6LZI}7&a!0I8drFR;*tX20b_fOuuOvs87eYC7XhOW*58VsmDloQ+ei1@O>^-7B_TTDTM#k zprDpi$5d-q3_tA+qzI+AsspAZxJ`$1E!)0`0P~Wv+R_Iul^6tdc~St&)PF0$Jay>p z0N2BIGc0Wdn^bZsnb_yhw;9x?G?e5s%dC^kzB(GR9%Sd3KzlvY1T$Lu8sVD-lYcLU zfYOKIGyN=}pKKNhVD#TPt*JO}_6K#9*$j+3*>e7p`koxm7ib2zI#w_$d0=7^r$ad`$=2gxEc1Cc00@KB-*a%4npvsWQ-=&o* zOf9qM43=L5#{9kmcwpwe$Df3~CD^woSx7(pcsFy;DmWi*THSeTD}Hjdr=U#5;H310 z0?(Y*Gl#%JV@XIk-F5Z@&G?ck13w~JfN!^Yi0v_wkl9_t7vIre2tc;tHdTUZGCS3Z z4>PtTwFpY&tLR~)i2^|WRo8^mU!HJ;eO(X;50lcTD*^bx&T$FcSio=5FqTYigzjh| zC364ORDry)EIZV&*<(~xge+gvw)hJjSp_62z%Sb&jW4nkGkLK9lr*w$7;=qRoE$+V z^|gw@gC!2K2@-B4;eo{ufdRRDazxivAB>2(B9UzKjAO1P;?G}DRzf?!xnOs;80QmT zl#mND_D>M|_rR*2zNc=NSX;!2%lwVLqr%u_rWe#rBs#9(_TR3NPU>$cSGxi4rFWau zBO=+NaS?TL$=byb6$&rB&v@=rv>3Q{)Dl4Ty`YxWb>s3;_A3A2_LGc^kYQgnppurk zRPS4(%LGBz8M0WzCj2FWq|+VC-c?NDX=8YpUJiw6cqqXw@SDOxb6kq2M(q}Ehg{dN z;*;}_S-#?|;z+ne_{8XXbKfK8eLeyFO&xkoFrs$F2=-<)^XZ?6vl3!!4n(v40ocOf=)!AO`|{ORpsW|QT)^1W>;@>QoOdp_E--@ ze#=9CAuY#l#(2dw>_<<>s{Y;a;^}m_VPI3=c7}3%Z38(Mu*p7WSkBMV=?Hl)Sggj^ zH5UZCV6|ikU@e!{oPrv#r0}=DYe;w* z@!>9P1SRw39X*+0UMfzux$QZr(+SA46Y!@T392(n()GJ6!X;jtP?e^o- z#mqn@ihK-30LwW2UeH^gwqxTOA}yWZ>8=63 zhpL~zCH5drF98$EPn9xQMmwZAHqFCvt-qYm03tL9lOQH)YbvM;n!!VYi|TBc7ec76 zh&r6)r(1S=BONMP%V5V&aX)~k^b_@^zAPm3Cf_<{8fgB?RlUE<%-wdqVCviPAi zq9bq}s_*eL9(Xdp5)=XqM41JnT9dLHFD{Djf0~OH?l5i?QBD3|1)u0T4)F|V%01kP{c@koEdW^C(;}hO4hj^=Lm(Fb?BIIPaKCrt zdNVE~I2CS8OZEH0ktC9l30O^L#m27MfHeetD#pMI)u#v*!P4;7H4-PC__M=`kh6^L z&J|DXfo8-aFMy4$9+fUzAC_i8)kzm2GRnRLl4t*sgXiif|LeemOi!a@V1MKV^H3vi zcC}1_D9vku!oN6+Wb%+I;aC>AJ$i(1hIL^8#Yu?Ayj+I^$<`O)2Gxf$F-F~)Y9pSF zQcvR*cUbg^;@|XyubK%w+ZL!rxqKTIg8^|bmBQcoB5pCDj>xgvWJh6tW3egwbL2u= z1#l&u-#KC*8Qn#;o^=|Lpwar1o_ouGiv1>phRBh}l4uzaT?2VI$X8g*ku=g77Ssez z_GXTgvEo9qB~s54-cfWOHQME!$nzpV%e^m%@V&jReu^+^Mr_M zOh-5C8T?(ZG`0mAm7~Dne1+Hx?B4Ww?*GS=u$8RanH?L{dPZ>C~ibO+r5lKPA?m zzrSd6cG(dUCNB=#PUoFzvo1W=1xc-tlFN^F%l*p);T>LVr+rYz*wx}1tEH$i0yjFK zb*h)U@__qyZCtZ9H8*O2SS3(E9?>}G`>3O&+g#2C7TLhOFY2~&$agN9Al%jks(1Kv zDO5%Tf=5+t*oS9{0{c-aep3@bIdkiEuN)GcEg&Ku9ZvOv;r+J_{a!rrTdGaRvlMqH z3jZk3(c6OS5ARDKwrQ9xgPsv=TQQe6hAE4PB7xeM& z7s{WxC3l?4j07_^_;mQHBW=$KOP#Zx&sY`9La!f}?RFxUy{gdKSUO%h|?$R!4@2`<(}1dVfIPJstDgU3SK?<&_ih% zBN3t{zT*4Fp{E_TcFK)mb%xb8|L4WQ7V(+D?ELuJyFmdA1gw4e1Nn)WIr^oY!ae`_ zDJQ>KA8=iTMe#70?w&4JhL-Ye)>=>Y>OYbOa>SA#a*T4yeWsq8cMq>tI9^<_LczJR z|HIN*aJAL7-8vz-ySux)TY(mb;$FOXa1Vtdh2riMFYc70g{Qc?yF-ECe!_dcKaeq! z?6vk<_nh;ZzcnTA69|9xWO5uFn|&9i%P4n!9IP$yMe7^)r_e|g{E(H=pEp~|5pPI? z1hh5(E--iAGyULW4aE>z*-(4a``aAwoi-r6-*@EOVqWqvPAujK#E7cAiJF?QK4)^lR}UsFa;jzriWk?SJ+9n_8yCMEc|#4{A+0`NzS+dE1A&2XfuKP6K!iZVKt;s{y6%zHWzpP&gn&3asrO2 zhdkyLFg$y5(BK{$U+T=y{7A~aoeDA~l0Coan!oa+JQO(}NgDNv!$f8{1-qD?LLT7@ zkmH71gOcDK>N0Z_AMXxLh}^EeuEpVA@=Z(-bD|qOJ6HIBCRp`76)Kt@mcd(9v8s$? zzrh^q1TKO_&oN75@1!Aae|$8zpp!n|%V_idp8Qlumj zCAQdWElV!o^*9+Xoi0YGum^G^eNM&kzC1@iI(h;ID?B3lQJlyi zNB-=xWds;}T|6g7q6Yu<302t@p)k3`Ae&q0URBLmh7!Lky;FQ{P!H8}^*ddI;(p5@@G(a91;OW)JL(T#x%mMT+Wpg3_ zvPp00p3^bFE`9Qu56L)A{7c*1Y8d&t&YJB~VNhyIUw5O0J0>i5RzHOCJi7j*>iaOA zXF6x>K01W*43k?}sU8>lpKN+&6VfSHgI}MAU4kyNMD!xwrY*ZWN$Q+$I^ZsBM!ix3 z6T>PiTh{z`|B|_6U&L(lqH$sr+^z&d7?mp;Ou7HBS&Qw3Jo?xGQbjfCRKePutsJ6!IxbRVH@ybz;t%lMQnB#7KL{f*-h3S@`nwsp>MKG`^9$`c9@2u z_Z;`Ui51-XHr3Pd+k+QA>wNrovmgSyrpH;6$N!RFXdPtCn$dro<|Q169mpLh+kChJ zCQbGrQ5jP{Z6M7rILVz9=DRH0WI2)Ve=n)|kJXhnQ zTPnd{nb08(*~bN7Q*~?(&Q0_*gw4Y~F zNkE9eYPy+Qdu(bM+*Q%s`Eumcj!58?v#XFWPAdxW2B)$%fx)Nab%T(4Anfvdq z(%)=WIw+|W)e^){>?`!Wj=q<$i%vE#eoIG@TlhNW$zCOI@iJ>AD%nOu7fq~j$qR~2 zn0f){y8TYH6TKmu?CA~zJT7xy+ja~yjR9NU`a2j&WBt&pNZ1vhR%k>EFY3TG(&HEW zo!U;Fs_0KsuK#MiE!s%n|7mq!+Df=hT-xqIg7e3NZW?xBvu4BH8+%xMo7pA!aXAO8 zw1;v6J<1wYP%y{sFG*}+bnxFd#euEJ*4pl!4Yn)x%fozEDqD5y53sY*kqx$}Aw|?A zEa`H(lQA$U66D>RlzUYK9-xW1Q;UH~ncmhl9XC*RR{8ZSSa`AS1g6Zr$voavw`1O& z)Anr2Nu$)_yT4p?A~^22iFpjI;A?^=Of0fgv1gdqx`)ZI+2S~-E#>l}_-J=4+esNf z$Rr(LB>5fbQ@4l5OdHAvaZm`xIdfQoAO1Y;eG>V+kax=<1&d`Y69)CJ8oWmXPyOuo z23vae87q(-C+Hlvcx8sKJ?skxSR;H-nXBec^>w`?)L%E>mtLbj6ZtP@apACjd0X3e zRtIV_KRWqL_oR;~HfC#XnhhYN;dy1ZF=1WyY2q1zIDC0bz;c2l2C3y>DoT(?yki5t zD_QHR?98N_7;j>p^yQ99Ox*WG2R|f>s-`)uP%K}LI-J*oH!K0n_5ZXFoonHJ77)3> zx=BoHFM1m8dWsJ5D(O1`wbis8vG%jLz;s+bWcPje1{9cb-0&*m@@tn+4EK~OI{4FU zKOMlT?h!i7bq18YsA^vNneBH}z6<+t-Jd7E;)QX(ECSaUFe;!q$eK#0ZqJ`e0Y zxx$$n%u$;_f5MNeZ2XHj33VMcu3?ZHVu%PcdRUQcj2`X}V&0sgTusJpWh&MrC%X73 z^oPDgHGa!{CKfPNH|vj9+e^EU&WkZ&qlr6}%<C;~<6exsgTX(# zz>K9Z?o9s+(vo$W39GFQd_JE;Eesv9p}FC-~)F`S1(CQ~?`sPiW-0sb*2apHn4F z<{>#Mn95um73=Qn=_qfqVdmO{P|Za_asj)JRmhvk=7EhSbN&AvzUHMcJcPZWf?o3c zhK<&?B%?A^VF`RCOX`N7*`ehF)4hmD&*K$c`#I=~EB$sP@t@>bMLe$@d3UN2DHC^g z5wHI_>eD+dNULkvr+i6i#Jsym%smUzknRyqV&Y#Fzh1%dA=MPb%YahQ4X{A^G`IpT6@oH zrQCyR6falSt|8D+ZGC$oBxBU8GJj-fM&JkKwq>-2h31ve#HmqF`Rt(9DOdgZ)xH6m zRzk1fenf#3sY%F)sIaXT!M}dHq@6ls@w^NIjS(*zIf|rVulu&dl4I`n?tl+jiGboL zIxU6D3Fr%tE9Mw%iRC`2pr})5&{GxqlrR{T{)r^r51SOVeMxxlFdaPdafA3gF0@d3 zH-o_ro_jxNqmqUftVl^|zo5LP0CUYn9m|lpHiB(rrx%Cpr~;kN;B1E?QuX>}wiLS^ zkpwNp_|RahN-t{tNnX+W-QnVRP;=bNf!!=FDCx9$d5kX<_RiOP>KcUXUQMfW1;a=t zD_lKv-ve<+^w(0YAXVL>Q@_z@EVw_(qX;9!XnKN~@&b_{pncyXsn*^CI5E+Lx?Ev+ zI?+VLtIdx^?Q5n8o$!ptVjw>At*0F{ZX6+qQOjLo6vnULw2+MN$pALNA5_?%0>$YJ zAbwFt;o|11Uo+|QscnxxE`BTQ4OlJ&8L`@3uAcC(uVA14VGr#kkEvwCwu}fiunMzW z7Ga(o)kw-z7`x}#OPEMr9G9ceAG5&68ZY$Ke!b1oxVp$um@@rt zp#GT;DqU184n7W!&G*=SWjqaW&W1P4Y_)Ye$-cTSn$yNcP+9R@DVZW`RaG2N5xkb9 z&Rh0^g>5^xh|=QTju!G*>6cg|QXKsohT;umDzod`l(*^NByGBE&>V4AFMx+M zLWsm-KL4^-+%UIF7`_p!0a?YKmp3Fetp$W3ha#$h5lW@ovL{Nm#(Gi<;5+gcX*k6cq6?|#&RncLKQ``eON}0(VqGwLe z193+r2cpUv;6=6p@j}EnNYvDFf_L)9f?SEX1GdeyyZwxkEWf{70eQDnw#keq8FDE| zifw6m~xadnCcfBjb}0nM|${hNS=wrT-vd;C#i(}la>_mQ(9+T z9PtmlTWV91;YD zadJoOFZy8f#udzxeW1ada1`vc^uFWzxg6O9a0Ht(SJc+iWh+cCqBdD0rgEnHImHe_ zp#neLf8-pr2f-fnC7fj#j7u-$8MZ*u%0|igANqrz`ir+JzUa?oIZ|~~e1H@3lLIVD zS94QySH@;firL9iHJiVXBHrE8v2!7Rc4JS@wuKD_U*IS*zG`Gba2eP}u6inIsE1`%ZD3$r%!$nE&@rQX>kV@hkN7g!z-$KrfSiNlhbg z`q_<;naMBV`(@ZAHBDeaN(P#(PC#v33+Hi78_lvJ3FkZOkJ7dt7!yX=yMr4K3*ov} zqtT|r#~L8|h-|cMdWDN>lhN_YFPMK2g95>}NWXk=8La74NY%d4vgCfkt+Zug=_ax7 z%&;!g3Z&U-RS=akrdys$3N3I~Wg?9(rH;u&7JkBehEa3a;{tZ*Fb0xvd`-{45GAw0 z(l{Fe+Mn`(+5sxt0_SVsVGapsw*i12sG%@l#PfT?neVfdxFe!CiLbh%2YhnB9l(Ul zrM+(cB`Y_j_|WmID{?wX{!?>1fP>47ZfQUFw%yjntnYw)on%c@@e1wV!5;Hwa@Q6? zP3?A%BP|*3@>pt{ z{EpFAg*A>k----fL6~S@fj=#3B+>0AT_kNwm=W%CSQ%CEejIv$7!{32lP%!}eIu{F z3l^@z+E4jDa2S?IOw`*y`p#{$`K)MFY8qIb-r{v3(Ta91i#xq+=(g@>|z~9(^gW0aoy;q|--F zczj0EOE9LDi~8WSG=Oaya@#iFkxitC_*k0hRa*E*k{s7#^CaJ2P+GaPXBFYJyZX@4 zsV|u!jBeJB@t#_cpLg`VnESqB7HUo_{o7dXE9^e=_*gVf(%!HM!zJxh{_{v@ncRPK zL|u-MH)JV%VAa=i}Ac( zu#*Xem;|gB6ofullI;YdgjGC`uQM2<2r&e3hqTb=66@bJ^RP1T};9Qw=IOFmJJR_3zR z^{MK>DfYXM`2g%7^-0|U)cl0k#8kp#JtjpQ$G9tk0^5WCH|U!F**aRs!L~s?F{w(Q zN~!CW`o%!`hK5@sl zomN*Tv1SLHJlDq`$dE9F|Ea%2_A91HGZjcogQKQ=G{pop>QktG2Nq^4a?h^!TRjqTf`m@k=}TP+ z&7T+bbpmYAK7Gtvh#I$4|Ahu&0q+p^1nrBhv%DQ#pIfbOFrHxCm=4A65%f0`fVb)= z-w+vuF(7rL_`~n6$KRxenH%mZd*srviIFkJXfEYVZG>+#V1EPlDm4{>5%h*&M6^EW zM1j?ut!2YuTw3Y0=}#>xAf1b>U_}d4sK}$nlE@Y>Gt9{o&R? z`jhm;CJ8XI5&+r&X|Tklf#82)Y(Q`?0sLPRE|liGQ~9kX9kh*6=@w$jk^rT}8s-#E zc-r{a5*b6@85X;Ji_NrnMznC!3L_IGsJ=C!Nwj1H3zVHjDwIabi0pLgXBlW?5n-K0 z61fMPnQoepU!I)-+4v?xNq!cs`$5w~?T zJ|nDo6LY*x1Z714V*EByo2R>(sSOY!K~_^>&*7o_McX$;K8|T%AbQ$3y%G`l&pppQ zU5p4*;!49iUtBgfC>E@5dIzKJP@P#D67U-=leVp%AN?@H zCqsN{2(CF88{n!V2fix``{2bNJnpbndC5(iKFfsW$wZ?Bzy3kGn2N{i@HOsa`$ zmQ5VZnlY`uvL!MN$LXnT;lJ%C(mqk8$8NIXPF$4t)LQ@o>fZV=OHn*H4e<{31ny)U z6Ly1EpNs6T;-ZflBkG?#M4`gi><-<)ks_Su0o@I{73NF>t;kAWQDCO8Bfmumn}sIF zAXG%#w&ay!bn$>$%kZj@&htHIr=|+MD7}W~t_tnx9jGe_zxiF`^Kp;SYK1?;itU45M z5f_rwjpfs<2ItLa?6dimk5cAsMOH{{l5>Ni+_`7@6m*fcNZ8O*^|d;xV5YXsuQdUs zqOKud$wNHOJkCkp@%mFI$rsgfa+%{0$A;noT3XFr){2w>{eWD6+=DATfRG%M9BmEx z5DD}iGjiiJ4&w?gc?-0`Jhgssu=U6NF=OloQ3{9($p}PA)N4PruO-FvQs7ejlaW&^ z!5T0qY^|fOE!yT9q_11&T&nA8lNS;)=lGsSV%+H)KYClrdOo`RT)UM!@o$%ujLZKj zs9jKYwP9SV6q2Aw*Xr<{ksbqCrFD#zlyPq#K>et$2TgfxO@Z&=J_8V1)}du(1I#8` zCI3+C^G$Fb%a?Q;6Z;ig-jyZ@UQ{!9nU!{pZr_VJRt>*o%+bfVvTP_TDip9uUu&O$ z18BMT_`Um<`YT7~-W?+yI}y{*$q8MKhB=x{p+~o^WI{f!hyw?Y_q+hqpMX8o&w%#Q z4g^tPP!tBs2!O|tqKGE%E?q`m6hW}C&=R=$#G(S%fO>Tclnte@r)O3fi4BKPH25_D zkPD1dvN4`}TuBY0=v?~qk4l2pgjv9gKXfTk6X-Fc{rWr4EAe@vjk5lb7_?Z80nL^$ zMU)ON7h+Jz3qkpm>ApB<&eI_jlbAkij*EyBYSq?Sn|5In*4$pgUNYBU4m>rBfM^-X zOIT8DC-ShufELU8bwe4iW?tJbt@SyYMSWq=7FD(;AjK*$ zo?@^9+9Uh6l8#ib5Ux&dMH#qUViz9xSB4Lal_}rt~K!^ZA~t|A5jBr zC>F900uf@#5J!hYs!x>DYZ}%U^F7v4%jvKTzg51l$6-2uy{bw$s8vnamZQuY>^{hd zE3F&MGhzi6->e}x{~8(SkYws1g!}Zj#E{b(2nOgnVNq!r?q1naZuIw~NBpN}N|(P~ zUT8RSpX%cRu!np1{9hd$HSlR?-xoCu?x5@@?azpoaM?qn(P7m4Kh{mEU(_@Vri|#a zYg^Qt*qcb4W~IrEwly?5aW_OT$OzP$c1F%u;2M6E@7$R8&00cl~F#$G?dFXV~mf@_KFi^ZY|R zy=T+aVQ%MKLx;eP{tn%ol;lVs@dkvk+*lgux=ajk=jrDuUip6O#E)3-6XD14rA z|IXW=^06Uf{Hg0{>*QI0B~U zlJPp;W4|YlzornKWswP`x^&E+!?9KpGm@n(#gR(|dN&!soGVKP;$~vUC|~myc_m1-F(fj3aoN0+Sdwf2Axot=X5xcajZnyY(UQ>{(AXBRi#P7;7p z0D7K@dqPoEYO4h8Q)9+NvHuv5{S^P7U@>`iQo9SSZ>EVo-&6(X{MQym+&bI_C#vuC zrqi3BaLRdY@NP1=$U`-km)5|8XW>OQ_WvjF$83=Ugku4rqIKG)+%59?Jbh*HFQz$9 z!eZkMt?c5^`gW-hkBO;7pnvc{7d#TPn!dt5d7I>R z%TOb`Ht~iDz!QvutJV=%mOJ8?zs!f%h(NtAZxTKx?}*hv#?9+5<8Q8PBYm~+Hz};S zQ3g5?K)b%KDAxAQR$62WluV)q^Zg$Q57OaLyiI^TS?F+huF4}vs=Wq6f%%Rn2CZ`OsaoHIzk5*PY=8536i*rv~jf-F;R`+aS{yhj^e&;hJ8wt zXgYDz|H|ULXeh)?MhG;M&IW6>q~?O(f#H7e;a6KJ5dr*PL(4v}z4O9u;1r%=QB$Qe zW2#25wrm*k5VgS9$pWWy@Oi9e@K9HSe8-&^P&D;NKDQxL?*2)VV2xWxf%@jd5keme zR-R0=4L+nlNB;evi7s}J@|Sjvg%Tv8Q_A)M`~+u1fKGFDnrItlQV0j{d5K6mm$sjR zfTn+o@zC9p20yB^_@RDZ5E8@{!W!{Is^yW8HowWk3k z`+9d@KtpiXr-a6ubnMlWS|+tz+$!KhBFiI!ysFSa?q5wxcs3vR zo{_o5EWD_5p|Y6JfNV0VZkIOXt#+Q{_{Kf*R(&y~^YSFE-LQ|x|L4hauv*cuJ$*3- zLXu;%W7i556gmmAiOWS5YB)En9z+s$Y*RCKfgnbeK%7xODOsQ{vhlkTiak&z1f|f_ z-k3YMQ>GQT7-B4uh(*73Xf9(`7L;BP_`F)BNBa^i{rvQoMGSp`UY zO~hBY?xIU4=({Rk3TpDzPBjV1kb!VlFT02Z`%OvA5lxH(~y6rGAMH> z*a=2D*W=f3;g~lo&1&SJg(o*DcWom{9gf|be4S|HS+#*I| zzz6OFZM`@@3{FID;SM6cqafgzcoz_mY#k+xVS3xFEt5%(a41_tfymgSLk`e-zaKFr)M-(1a!5HNgF%V19|akQ&mEUi z9>6rqFpm!4ZjXZUBn1whpKtkS!rvIl|iUAJpW^a(lAvG7J8kCU_ESj94yyc zsb}G)YKd46q`xDZD`@3^I3daNO9L3tDov0C@Nakp<*-BqcwuKqT!eHOD<w%4p z`dl$rs?TLP3q-$<=I*@zCsQwmJv}pbzv`KVFLq={fL+74*R=N#41S8?gQW{zALfUh z?!kG1H;bj>50i0T8Oi+>5x@0*j!PJ3sD7(?*e^s_sl1rBnAT~TWL3w8FM}&NTmS-E z0fb{ApAeJJG0+ex@8A(QOjDIOI~9I8$cSRF5*#Rj0>u4kDCv|bt>866&4M-(5`EU; z^4okqHW!dhY{|7<%8Y;k0kgn(ry%uED?qWZ><|)GG=rYV9uOiKmE^6I&vjGGk4&lZ z@`>vZQtR}cIan>!52_WzhV5S{)izn~g)l(i{cV}oeyjq!Pb2^*#gSIDa25v&9RY$w zMX!a_IVn@z1#{i7jr@J(!yabtvVZ*zgq|j3Ou6x%Zn)&>L;`bf!+Npz~0w^cGf>XPCZNF?`$g z8Yo8d1`i(EjG!`ogK1tnw(?CAb)F+!n^h?qNxC_8!tm*{3#ZC#+tlq_o@Z5bHZp() zlz$T>r+WnbPBZBG(1XqPP9ZEku%f!uS!dp$+q*E~zG2*Kh@+Qfa;7&LbJ|pvc1X~_ z=vxYeEg__kG^vXN+Z_OR_~DoOaZwtayzqv8E zC!P1gG;dJmRWZ}c%=wvQv0RvMf@yZ5Ufxi7U*)&>k!Bn?z>~IQ zNt-v)&yC@aHBGMo1GzzZi*%wOm7T8$zNC3_W<|cF>-hthxak-naY2WB&S+=BWxii^ z?Rq&gaDH2(wbf#X3^@TMQ% zXZ7DXnfm%u^-k^PofRMRN>L3ik_F5j!Ub!{cn-kMt!NyZ^A@se*{eHlN`;TEf!GQO5#>wS&5sH#sC+D|Zm31kFhaSIP;7@nNAhw0E8Cy#u4$x@ zOT#f19(?G?SA}vq)PV4iZ=TM;M#x|3vwsr5zEebEaGWnXEQ?+~g(~a}o`n9HvaDy< z-b_|WpXB%Z817@a4ORfqxz#nDN%QxAt?e8*vA0A`R5@Z5s$?14yzv=)_Et=5%kG~rEykiA?aGoJbI8qO>zw7n=wfahP@1&C6W&3 z{E~9fsHTA6SyBCv9V16wk8%IC1{hiVjO8tGJuLCh4T~ylGeJ=AF#PiM)A`GBa92f4 z94D*4_#=gNmRz2(wyjb}cy>0SS+I`CnOGi+-pi(93ZDS*NKEnwp_7_TS;B;3m9r5B zcu0PeYXzFw6T(M+G73VztN9qwy4|tKA(6X6ZUq|YDAX0b@qvQW)l{rSm^m*LCy1Ql zI%bOBEiGG)jJlA%!-GD0oIkO!+jiS#V@SmsTr34z;~QCLIGYpxsXsv(Xu5Tg&L=ic zQRRixd&e`p#ayPpTMiY!$#}wbhK!*Vqc`s;QtZMtj{OnYNY#^3Z{3!zC!}L%w69~b zSmzW0r2hQ+@n=<~vY@a#GTFp(c7#{I{E(*Uj;15Yp_Atx8tAS;<;o8W`Lw@jVi7ZO z+pXvm(QVT2PPxl^vr;AXg+>e$Yyf^UAax#sm7LajRlEyhrYB>;%olQqiy0Fz!^rE~ zoZMpxl%XRdWEK?C^O(IYY5H&7&QA6Z%yr4c$)m#H zJrJC2yjRnrNn+q1824{}!BzXRwB8a%)^zR=le!Stm2`QizviLiuAlSUjaSc`nrFx; zaVX)}$$0G8jGk*569kG5F?L>uQl0rgDOyxIcPF3MZ=dV$pDA@ME0|V&w!d4idd^qX zSw~&(FFSByjUi7TKC7AkXAf(4xza))qhwmX^3hvZ=S~~&znKZS{6imid&CYT13s?= zTyva|5pN!TW2}~trXPTr4K>9JgP(tOm0l9{ZG1c!Xa8?9%hk#-Ou%RA7aEYUP~$E+ z0=1h@T11Nw@Han-0;F(Qq=?jPnF2IA#1x|<;Z4GhXB=aEkFFGC!;c;di@T#E9qO6n zAeDv&q*`XsG@wJSBF{c`0|DHV7|t|YCaCVr?`_Gd+^xX$d9o?vI1%uWSda|mQbRM2 z^)gQWOH)pv47&(gWt6@MHF{n%{kPMFBj&}NdAG3vTH7*0ykaG=*FqO+!go9_-oa1| z!uo1AkqDZjQ1LHT3EEqJ%u#-dUzI#Bd=U=cp#5TzmK3_svB*{O|Xf zGo_nVd=839@bk3*zrd)x%{zOa?~c@@zpoiVM?p2OXZ3p}%gGHd`=M=Rm&2VkcUA_L zn}n$$f{V93yFaJ`e;L0lQzd^tbJJKnEq7s!uKxWq`??7$#j|h;FjV>G#>^6{@xs&Y_Jwzc5eF?Al%KhI!q$+YK(-=Jn^vEnK```-}~ zzfYdGuu>3?U9_58U)NLoDX;CHrERaZ+j|d-lGIou$iYhAIh2v80?xC4H$Vd6r%%~h z|Fs=|E=z{h!>x8)2Ve@yRAbH@HMdQ3=QHM$Q4t}R3yp{OJcKJC9Ly|x{1)?krt-xz zL12HW0DwaA4{1RaGMJyKCls3CP$h8@+W`tLpDKAZxVTIuqPW$~kbk2i>{n zj$DEU(we>`zf$6H>nfIXd%hJe%2CjS@;%aKGnt<*6)umxbL``ZqNqz*W^HK5^D1xF zp>XcfrMzFQR`tfvrV6apgG(-}6+S4lMy!0_^D^)|08@*7d59^e{!ZDz>Yd*2N9tI~ zhg#WycY&+sZg=<+&8xNuSNqwlp=}GCIj)tCcs~*%#cju5ikzy@OG6BZ<@Lgd2f2(} zYoB0V2t}7~mv2p5i%u=8;wcYtuXk~r$^2G=OLlZ;)uxm=;31Zz>TN+<4A|BIxT;nU zv|M~PaACXkySAOhwA{_!ma?cNHFeJcU#NHKa^C;v2KU!xC`2Ddn%stDHY7&}J?Q{9 zBZgFo@^&@FssdLHf|>V&_ZxO^&w~6!PN!YCl`s=`<%b6j`@MQl%-FZ1T{x5#h*B;) z9}PrUg@$7lQ#YGRTDJe-b$%Rsf_9~L1@Tbaw0SRk+MBH3$9Rq=8}C_pu0KP-hc%s5 z>%(Of6jp)@_Ico4svu*+b)VB1#~dZS=R;d?>|CvTNQ2vKUeLeE|BjSks$zE99H#hr z_s}6;JFH};C19Iuy)2u6uGm=D;|Ki22D;Kzd4#6OXhys+|G8(+Btgtk!3dxcLmBFCl)N6mMfBs1j>p=CKVYxs z+IQC%-%1X}>*@FE?yZZzEJzULMfAL8=`OWpLpHL#>M!3S-*qadsi{#Qs`vEjb(%S` zJC$ae2p9P0aUzrK6^l^>^REdrvn-#o)+a&Kc&1>48i>KosRcYQ38@}Xpk5+-bA^Z?cC-+4#=*1ASckCo`WSiQC$(h z6QQ^tBdb%Z*pt{za>4N>RTC3}B(XhkE*T*((yvzkW1i|vW+R)oub%TuJkUp}&_ajG zmmH8vU8$^Ss#wMEh|+wFJfNV9K0D3_|Lt;5sgx}*dfPQS#-fqh&rPGXm|yTOiL}|2 zw&2V>laT}I*DR_iVMZ^d$ln*&s_^@9*w-IKYEk4~2)RV4Z}RAwmL$+N`A+_h>wQch z6gl~?4r05t)X*Xe($D<-7hc0%>f$X{kX!JyZ36RuV#=kN`julLXTZ9x4#)UN&eegu4&>n~pAm?PU~R#7(Pet^#wl<*EI@mCCFHN@=dh6 z0bg9qs~+VSLBX4WAMx?6Fs}n6(oszu<|7z7EX^~8wGpZ--Q$+h^mYQFBgrr@mQ}i|saWSP%9u302}MPd_Z5>D&bV*WdB6u34e)-nzn# zNtiF;u@92Xxd2)D;lRm8=TM{P6*zP(A>ktC-0Ak$3tdZnUaGyPjgd~_*;{Ojq{DI; zN!%~9&1FYC?WM~ljR6LE+U}szuYTHAA3DHryL)KvblhwH`Hv{qCnimf^I`Bc*{H(3 zUUa%9_}PK0t4Z?&N0HoO?I(S<^-GJ57~NzfLsSpT-Qf+v$nHM1bm+Am(xWV_V3Au4 zSs4C`&ara`v*NAMO<@1C?A6i{XKM~NZnDnCdqo%zr-+-?c1BEv7_w<0iF;)R0K9+5 zjfn-;B8qIHN#tM|5XpW^8o;v4w8pmIV8Wvnjq6llVa*a3B$KSquNdm1=^?}gO!Tp~ zzbABCw#vCGo!<+`W ziuM4enuZ7~Ir7LN>zjzsha>tST*Qz84X)VT|c4THvPc11c_N56q)?!Xo~1 z@fOZ_$cqaec(ajr1S?p@n`r(3J;7!UdzRK>y#xhD)$L_Zd#CEGLZ%)zi<@V;`$kkQ z1RM1kuDosM5&f`N_l1P@X6`MQf5(Q|^SIko*Ne7+$LX#Pt;Z%-7yB<*R|(VVgjt+O z+lt?{^_pF)d8n#ozgVugr5GYh8|Ud^X3X5Gm!25gsks3p12TA=`;5QR>~_yyiy*^FrCtqSeWU46 zSWnsdaQ?XOnES}%RGJUx;E^1Jr_TfxfGB;U8cFH-$J~p0!{+^0kReb&^pTxy;?OoU zhi1zC0f1~f8qyG%F!tB*F&p-h%JzZBCD)g}oqxD}; z8{~WY<<<2F{Xq2NFOEnY34m})9uSqE`O}IS}2Pm9Tp1QNxC#o z_3^U$k+D}EAWS2rEDKhBx1Kw|sG?$fNJ*tkCLH(6-i91-jP3T<><0yS{{2#2TCVn1 zdX@V&hpK((fTm5M)#nHDn<((ZFq+1af^W)KV>gWfG&9BNg0=+A#e$D*XTs&}1~{1A zu<-q^5e#!rhGgNB;|`;;vnfKB-VPCb1#cOgj%VbGOLy#r^vKKe$+(~yZ|OjoiIPn{ z@JL%C-ymn8P{H%T()wedS9i>zxr;D5>qHAu!c5LE0`->1A2n^sxEG4jX)NK|8Z`ax zNvN7XU0GwT%xEEaI~)^?ZTs*Gx&ffY)KY-wn{JrlX`@>k59&F$oiSdJx(oE`VJ>CDx>3JSIF0a_Cg zf#rPnD%alT5|YU7+;Z@}S`wd!S%q1BjfMOs`w^NWFC(JaPua=(4ov4@rXq?eFaFu# zOk$D?Q}k4!Md}YvFm9Zy?_{*8mG8gWw!1CI%8m}ZT^sDReYNGRy@S2k!EItIJc8fT(mX^WH~mXR4%$Vm(~lX5?^qviOTRNvR+89j+MU(h+(^%i)}iDosBGI*TGo^x%o^)u zS)){#OcKtUFz(fZnlDx}{7q$SrpkotNaJ2tCYx8gv+#peD)qxasB<=&jpKu0e&PQ{ z=>#|}sLlJ&>j^GcCwBH*sr`8`SEaDr>-3N?a!p`w7ipu!AA)?qhsyrn({n_j-B%BX z=Rhp)rw8IQ001o^PdM1>W8Wrak`zdvN*ElD#qA~t(Fi9GcLN5M92m7piY8HIEiOt4 z{w>VeO!~c4TZjQ@Q9LE&KDnj4!cZuz}jtrwD0<4H<;0k65t=Qkr@#{dT2!Ubr7%8SKo?F zV9!JgMOAX>`QO{}UQE}EFkP21?eWuA=_M>?ICgl(U)PwBow9CBjYk?QAPdxQ@12Em zWR!6>9748G`ZK(*0C`V4$5EVcTp`M1<9)EN#oB@$qVNLuij_r>n{I9q> z>{+ZDS*rTJ!WO^5@ZD~?h~2_t(PVws{Up@;cL!xluB|bw-&ullUk(zQ@Rt;Zq#MTV zN{wyom9m()YigWq2>S`3W=@LL3A9rLrC)DoyE?5D8%mpKL?$<_W~0*0!oH4{-4LC| z(nHS!>T??@pd+#|nM575C<^>ea4m~8XqCVBm3AU57mK3;a)bYevpBNjOnh{iTc0i9 zdl@wpINqWAfDho7MZ!K<{|^+AdOne@fVf3k34~DHr@~?Hpl6@oC3DT5E8+ zMw0uCEsB~j(yN~5D3U`4iqkDRWz+;kKq+{T{k2zNMu64a23`y1&VMbG8fmpUA5xp^ zv>s8`xTJB%1Ogy_eT8_WMNz0yicRb{&P0HhC{W2WFtj2C1@InW6Niujo2VdYnPkfM!0f?(e}WU0 zJJAFttPkL%DBDkFD5nKi2eLR$4m=mgO~-Ny1S^G4q-`W&v7&|ueGaK_wA0Rm*TDGvem;83FeWCI}>hfL;r*?QvL#152|V(tsdxM^}N%nUF9s zxBxOTmKge7Fmisvb-45q3XI0ceU8eMxvb<1?5i$!eoIU zIhBs-xf}ZO$Ct7t*)f2V(l1}p;-Mt(2^)OoaZ+~BMB8rmUot^^AuD5aaO$W`jP_Ex zAH0VpBTM6dk+H@`4FRdimrS{$(F6O^Ru5tBH2WG(7L&dwhx!}!*&lYfvlX`FSCv8| zat#?9&}O9W<2AL}ja#|n$g;u+xlARff?-Vl7AA=u8mtR3r8nH^y8kO|yPbG~wGIcf zwyc1aMPTNB3bT~HOJtwO$+Mtav9w{SyEpWwR8o<|U}NDT&R9=ze~!W5=4FPQ1E>m;tnmPcyV`^hC*?7cQ5Yl?pBJsOMxH-3UtD|zcqXIns3jZpO8FRckbsp zuJbtk-E2SPex>piVFZIR+}PT$0sO#b{9QD=z{L2i8ub;~RYU(ifZ^LhF!ErFSW_1n zM(+2o;m=+WnUq}iTJ%Fa_yi5V_y)J&38@Hd9&;`l2A$B{XfNt2E(FNMjLf}Xe=QA5 zX#25wB7Vr9%fo<2ns?IAiacVoj6c{wN(w)80M^20YGENxo7i;AB)^8FS3aC=GrNXd zUvMbHp%)^b_44GCgFN}YevWWsH26>irs9|0T%Ajx?7kaTPI)e29yo+|yxnrBOU6Ab zavON<3~rqWMcjZ5pP%-}cosh9J8215j5;+gTQQ%%&PHF;zv;;TC+Yl>;A(d!&)H7` zWAwt_G)^)bVaZsnqxUjG*|2uiJMsJfLhGt5vrbzOm?fQ`pDdhn<9$K&X8rjvUPK>WG>^zw1UDM6* zA1El%rKnPpI@Y>7Ofdr+ze-KGHfm^`pQFCY)yxf9rhgJSwcjWj{%3dVu~G3G;_Fac zd1OyBTbn9`hX-G>@((DM7^!jX^0h3iiQKiwF_?eu3(MiQN38#Wb1dhDitc8g%YKuc zdjuPp-N2x7txa_gB2VF6p)ke6bnxS~9K_kwqH#|#*`D{U)@d{K5iG{<@j{=uj?!hf zVsBr%o}E6B)fDtN%OHmG`?w>15Bczup0N^*we_#0wZ(v?DOCBL;XB0W)=J`~sAzVl zq4T#31K$8=aevUTuy}(X)pObhNe9jpv0l-^qp@gT`7==6b!Nsj*!YJ0M&5L!>_({M zXGgld02<<%;bzLr6fhbvM^kgd;ZG+Z4(~-qlv0#tF;)k}rUsK=6jpQfih$m;bqi|l zE6{tU;1lB5_ls55!k5htdB0+iS(@Wz;n{?A4qX zs-*3P#AgfH;iBAP{6u}S&?5Ocp0MK+_Kv_!1}S0}*Fc$yv?)_k7r`Zl{wI7RFk4hA zgUxgKryJwoxTLMl$;>(Ym9H;8Ex+})T-Z6xNB#KGRNr&#rPG7kzOghTd1B#r z>RlO47Tx`eBEbh%X>mOdg~f~ae;V?6!J>4(kecehoG#()}DHiPQ>8PcZlHs1T zm7O)`hiYZ0{8r_djnV$mUw>oayXHl`&7C=!6^6mT2{^a5kO3F95w-+p&{im}%Z0jn zf$f2>#puO)-Vol7`AfN?ujkMJ@N|#IGbAzq?C+2ro1@Be|11IE4sQ)c_V!*md$@}z z10DdceA~0dBy6RQ`|gniM*zzp36WtqMf8~Q6AIO$+e8d3loVUh8yY=R=3N*2>%Lq{ z?V~=SemNiB*HkA;0k5kJE9#Wv#8P)X)ng}z2TMx;I-X18A;zl3^CRVrS7=CI;sQ*A zKf^v7vog?f`*<_n9m|Njj^eZ}MC3RGgA^UAYH(drz?pREs9QpiJwM<{YE)lU-C5(0 zNASzdIpEt4_bt7$0Lq)Q7QEm%zP*?s+mrfZL)FF=@hFf2pR$g49=1my<8PlYFLr-+ z-y?;mNrab>v}94o^idQ|hW`v%6La#ZL$uS(?eB}!I~Qe`De>2#+AkR3lM^*Dg|*gS zCkV1P=IXa6gk4uOOvci-=^e>sXuBhcKU*+b$IcFewgIct({Ru4p)ewryx zb;0z@*RmuB0Tapz0ned(7u4BNpF02k{`+jez5$b7tkl^vkx=UUfU-|UcdTEQ*>1SH_NrG_&j2Q9DOiq zStw|3KZU~X&dKHD!{(RHqal&(n|Jq(;q8e6^?}92RN<0RsmO82kBndcxN+kHr3nWeKk90Kzbvvo#+#D6}|#C<3T0 zr+QQ=y~$i698oANlVg^i^4aHJyS)6k7tYo?EGC_j%yS0&^7gT15SIkgUfWmew|G1O zJgK)Y=&3XOwc5Aw{ie~`y^S;nHM!=qPNL?zo!4NBbxWu2JZvLB*^>%BPol_=aYi&x zuyJ(NOE*2^icMW4Ux!iOcm10(Cs^4*B8;MYCi(7CQwW?m6^#X>w~w3Up+|Z&A@V-L zT-=vCR=F#^Q9oimoHySDld2n!&BELyLI=NC3@LhC{=7mon0Wr>-nF7EBUt2pWi0bO zYq5Ku(a-gw8zKn%z4l%|yWzP&pB}3dlgJI!ND)nUW8i)fi@^ix84TcX$vyg8Fv(|^ z^vwd9d{3nSTw&B}&>>Dr(oFImuMgDI+hlhkhF@pVKEvse5_mkGx1nK}iieYd%yaGF zcpw=wwa8>yJl!n-wD;_*sSs<(VjXJY&~OHi`>EB?N2X+7Tb;Wuko`z|)BCX%@`ZQ|A!c6_tr z<+T4n=2JxdzUDo6eh-Pb^8saDbc-m@3^|vUDUpau{x=s+Zoz zOO2&P$MFZQ^!bu}5|`Kgq8v><98RE+RM-OB7!Tj7LqK*U=3khzf)!Rwu~0si%C@1& z`xdq^=C^w?auU80F=A^pbS%s7p!*%x`BClk03W>+YY(GNiFKdh2{GaBH-TGXIMR0S zmR)Oo5!@S3oXp>lGrbkc!>$4f=JYNF&pM*@<1+w?@P`3dG2&uKkmIr1h%6AM_UBCq zCgrytg3YhW{)p*I{alOr8kC@x4LpMjeDCpr#sbiG>hU0UjBsxcgu_+LKziU~K){yw zGp#MDpV;-8SI z?A=xJ3r3Yk8pdcm=8}K}GwxRPB3mtekO0LMKeO1S+p+~{nZZYpfwTEojPFK0h2_;I z5^Yv1ara6D%2~y2HoKy{cD)uUnoBDUo`CL6tKR0-dPIHhP8`I`(PoQ-eXmKjnVUvB zO@`dFz+W`FlLc`IMkW`GLilST{Z+;q_3zGzsdzBclNb1yR*1QNZI4xbaz7cqR@_V^FV3NSd>7l7jiYz~h zZRDTEYGYTLN>LK7`~QOsap>^irj@R`^c&|bIZwk%EmXu$z6??ed0Bts#zCxxN4;=> zM+U(K5{l2x^`#G`@#j#A3*bU<&Bsp0{zd`5LxO7f-K950VzQkG2jKWA@z|N=I~51a zrHIAOmv-XP{4$X4!tU996uJ4;_zL5Dhslg>%;7&N7yihQlDd*vydyjah~)&xq|A6i zw1rB|M!Fw{ma`C~?@+hVLL$+xWS}DO4}#H$a#!_!VfnhBC^7u+-bbHpmpEnZQdVJ% z;WUBpMam^&F1kZBRsA`{ef`A(f+4vYu=cA@djCk}OY0Sy zvd;SY+rP^vV@W^tJ{rGyIRcO2Ex0;H4n!67`+r-Mvj_}8z#4DNID#sf_PMy|EY`H*up>OY02|G8>=$5@ z*;sb?0&JcM{u9`M(~LEHM5#sd1Hn#U`>zoYqF5$d3;*A@oE;gA&wvPuGwZhnT?dgd z!|(-^pzvN1RKQkkRc34lg_j8>;-OllDA^Lf*46Q8ZGu{vZFEgfH@xVcD#yD|PIlOia=>e z9<)LhH4ztI`Z0dlnixB8TNY>T^LX-}Z^gz0p^=Gt{*oouq4AysH`zGej+lOTR=!Q; ze;hf4f!lchaQ7W&|7KU4pL*^UrAtQ>vT5`OdX5A5>!DNEqOR*O^_Z{UVjuZ_!vXIc z7e40-w1}@`goM(w1Ous`)tVpSH!O}`an7jXDew8BfOlf>{wx4Fb(x1cGFy!yaTrqY zR-2pTq0h}|y0KJs&Mnn|amUzMx*BL9)KUDs?eFErr7pS}YSYObJgQ5Q!L*<6AkVVzeAAc+y*QIQ4vx95)r;62pP zdJ|`02J};F^#WV7=EPRVsKP>~?GJz?gagzv#1AeWR=SOJ?i)r9dJHQ;3J1e*$?;6; zW&k|;UNMXGYg&m4SBBN@g?GoshUW2axUpWfp?h%?^AqE#X5v7>qHfrTi|;F)pr=8L zBkhtIK}I*$c9MlnyiNImx((45q?lx24Xeuc8KkE)6z)X*rIciBN4sM@#zBm{N2(+q zyxooAF;_OR+=3LeV!ImkNvJ~xHyPH7pbtvS(){r4%8A~q+D-dMwyW#IbOSGd<)1r6 zix@vhGTA_D)b!`QP@*%yh)<&rLU3Cb8nbdmT=Vm5sYmypmv}k*D{q{|y(#J5Xjp-S zeo~6ff)Q1w=YA@AOUEzQDrXTt*>Y{rtPQaV&8zG$fLkHe$v?0)I(#S?!BAiYZRJyKQglnm9j{7c7|_GAzGB@rx*9`}J#LCY zh&YWcubm@w3BJo)oNIBA95H}bIbI~;oCufGk#PH>TQay847X_Vl|Zl^Wx9@a7<4Q@ zXa{X$^+1w>_9-Y}6)P2Lz>!vf& zNE0}(iyk$tRB{NqBIPwCC~|mmJX3?5LlLJD^Q7Ujtu2WOBqMs-R=i7w$*V91(Z1~Q zy)wYs*@)=I7Ml)EiB6+ns7&quZoYS!4nL!|Qvy_FXgV@91F#}8aiX2Z5s~zzidC_` zU5YCE9y^BXV0x^3lhCYmfivoZa#(&nGc&^#O$47@G}xBH7MYeT_&a+rdSw?WQ@}uclHnh}Br2mYzc?R!A2eY{$5(ol>2Ed?IyS9G&`c{ z-OMCLd}uvWzRD@3d0VFdW$}#LjnStIN5HAcY?2ek=Va%F+vtvJC*SJ#uDYp2fo;UD z+_0&+%R%vNEnRfMhd_9)^uK4yrXG%K$Xb@BFm7@)iFvF;*6DS9%AH4oH13GzF@l+| zmjUYLy1i|sbHAmE#zk9#!BTpj`e(Fd(o+%08Vqq5SHekGEd56Fl5B&_6 zf73g>U@xP18ByXWA$?+;Jd!=bl_T^Toh^*rrD(OfS?jZ+t^l0GB%O37P(pfvoXj}< zB;)bWrVi5~6+K5cesZ3gl3Bq2Zn5Mz!0)HPP2{h7?^V>yweD^vURA_<6b%abfued$ zpYK;1EOJ#)pM5bZLP~nx>XDTk`Op zr_%d+7@#~J(yS;$I+*mBrQTJqEW=G@=W61`Lr|R`l|*NUZ|XDM1HhbSzdQQ7 zpzJI_lZqhvrxCj9OLSmpiZX4ymLWnX*t06yT1_-l<|u&a`y_|tC|)J|i5Z}Obvh^tP#+*?9p#*rSe17T2W zjbf9CWWND0VeP54K|MsjkpZP^=QS#OgzSuHvbkR2#o9ORc?K#IaVXX58-@z)4R}ff zVYgfx1w^er$iBLzO23Un^Wy%hB4Wl?jgZc;vhee*RWvC#siovWyhgC{Mp&pwW%{eI zOz>})JoY*i*!=>{rJ!OJar zt?GdNJM&6OzW*qZc3GJj9j7FH%EJ8KhtXXN#IVee;$)mW%A*LK zEiL%?$tj3=63{S7L~Y!LqbtPKKNHFN`4wrhg^LJ=1NE*m1jBda9U)Pk{LqYluriU_ zL`+4)#9-c#-M64S2%k5KYyd+#66Sp+(q6IDwdY{>V3E>ts!Jsq;tctPE}doI&49p- zJmJo?%vf4*^W=;|n>w2!wD^X#PwmEGZAeo~ONW-X19<>NRc~N4Ii%nQZ+w#Vm3_ZMXG|_zxO_)BMX_=V6V77ICMg({~LtDeF(gz4?9ZrcH z|HD%02J{nV3yVCF*cQt>_flAD6a>QEg!(>#_d?lWojk;l%n_a&`5rQldV&8Rn0hBthn2P z`}N3kz|o8$pS+WNbm%a<&K$mfs*Vs@p~|)Gm9TI9x0A{Xjwa=0#A{XQyBJga-8ZUz zoyrOEwV9QDg&)ba@5Wgui}@v}p!dY!xIS1!f}22IM&_SK{vG1+0dZcLb;x!MFQueItjg8D%~ zZdS_h^spe;TMKdNRw3@Z$7%P8y% zrt)m^^QV<-Te(l?tG&9`nL57eQ&D!#+Z0x?o|M~k_MaaEf-_fn>zT&3D$#$Pom*IK zo9ewll~Gs<3K4XZsHJ}taB*Vr*Q?mJ>*@Br*AMI|`nw)XKUI;d)TsnqW&T{B+q&!< z>oj_AyPnZ$y-T?;%yw}8Ej}$Y#{GTVADdu8$9)mEDY!*TEHsWd0*i}X@#o*dk%~wv zWjzjyJ_+yuKzIf-A_KXq%_a_2gmbqAcRZr&@LU!6%S8p)wy%=m z`tu{VyHmbR>^NC#B7R!?Kv&OUjRT>ru+^`ZCu@w>QBPdRC^X~yfMsq>vzN+_ctz0Z z$1g80=Xt2P_5@dX=%%AX7i5Y;1&$@cCOH7!Tr8{0Ozui%Z6G0y;iAmZEqRoJh(>$; zmM4?`fht1a*Twx{nOLm)QQ)mmP*pRA+`V2eTLu#)a{8=vGAPNn~@vh=Z2j4}@ zCvN*Tocxm{vodqMyLz58A6B)HnEQle5La38&H=(ZPc5L>DcZd;QtK4(?QDAz1{~%4 zFVu+eN4)CPTCS}uD({QSnb^8f?w~?JBEaO5?w|zJDbu?s_(YIHsI{Ji!6+HgC>f3$ z-UVs@4t&O7rHG-nd6n`!l1V-ugvA!9hR)>yUjeWYn^weTU9>}|G4ST8y&6%ehN&7- zYMZzX2a5Is&Rm7+)Kx8$t8hCRqAWh)d#o>*%ry zhSQNf%ekuB_g>bDTcr9>ujke@C&$cA{wVsq$5Dy*SsK&-&}Tz=T-LY~MMvHyye8QrD{1x~ zcWxp7#`PpJWXc8X&9p5{eQIril-aKr`JqThcZAKA2+H7tV$X_af%+EU+*K~O*Bnkk z@nZ(;g<)bcFil)b%MHL1tL>kBw+kqHE~FKOP&r5?iw^Itwz7bbl_4pf7$9A|51GU1 z^f*}9jlyBGY+2|zq|Vbd%@4E}OFEDY4C*w^Lz~UrQ!{kKWXB}A{<)#_CxB*hg``tb ztg~^%kD;7JsFv>p^`;$)`R9dRF6_OL2)8(z=yk`e%3($M24FX>nD<9z^MJ|>w5C|& zge)#$RE?FuHj33Go4fL(Q`8G&3L;+*Q$E+)v)!%0)C|}$oU`q*@h$_Nly$O(b*W^h z>j8Ac$8G-qs5bxTww}XNJ;iUM=>DI8 zWdxc>&@pUpYl1f(Zd*dYUP$G8Qqk1-_#&W2N_~Vin!CtMpq4qrd1dXtb(t1NN`#dz z2C$vH_{FOxKGJ!>>}E%?ypu@jnG8rzxML#>pRKI!JxpE3nqXe>+tc(>#KY=74DRfc zR+s@al`oO(aOa&?`$d?g+wv|fr~>^Xa2Lxq74;+14puB*sDC9C+iV7-HwGE8iLMeY z%KUEbd-RNxwA)MY*K6(jd>@NMtWh%CASjS)Jpcg9XA%yn^@et--wZ1O&JQ~Sdx7x% zUwam~?j6}IAs4yvR<#*%>~|87S6@=O=#fJ%DD$3i|JVc$ds5l?!Rmg56ieBM69kk& zLbKD%B(cS!n<9Qp8vxAi0{P)c26iw|b$#~`mP1^Dl<6>gA3+rnU2dOwW+@*egMz|s zcD8gKzn5%O8MXmjGbq&S;#Mtt)pzaFvYT1|@>N&&;r-N?P8m~0 zTFG{&RGF3Sw->ka-FTnVNUJTR2&eV!J4VJV`s2{3E%nE*Up(11uo>j~br5=$*N^X` zjzeTxFoWfRb`k;UKj)%RpEFGAZfaxE-^NqD^}^#kk2SKb)?FSJBPSHfiMRM)JlW4f znH5jYi4J}ku!3_~Ju-&K-KK!MIq(m1(TWzwot2xuXVlzG)Y1Cm;*6~roFVv+%QZ$B zwN{fYz4)|dwjyB;jtccCZ-)rH12Zdt{b0UtwAq3-wQ-yKs=i;)>p**gI5K|0dArYzjnUsz5T&4kgcoFe$%oTn@=p^}k zOQ#AL`}INB_eM(K-(w{h5hfwjtNTKC6MMv;S6<;%)$27o~73^PeRFCKBS8S_SCM?#+Phph=M% zhjF)cS=V(TyMTz!p97|?ZpVs$Wr2juWRiZHjl+CiEGkCQzjI zTQ?tn;_6&9AC1%y7pQ^5<+|$5ZG}3nlRoP6OZ%X!y=e(Fla6>BEd-SXeaV7qqD8IP zt$gt&vj4i`b2ULG9i(Ihi~ivLnw|&`NY_RBZ)i-%*ngohZyo!y*wqyrzOJCzXO}F7<*3T) zK(r&nKds=KyVUlIkv=-ANF*?0W0ED>l%^p#=1S!+ClNSv+ucnEn>4Z!S%@BuklszrpL_!&4eCgeKqH!V0gu9l!w=1DO7HS-@ z&8|gr-$FfWvXlM(dau#auEtbcjc9{sXz~~=n1hCom`+y zY3{HLy)C-`sc+EO{;QW5*5xHiq((0mfAn73oRt3^ZYU9A5}X5G$_Mka4s0W_*TQqy zViN039cQ-d?M+K&=`H!4`)SsvG;iT-fM1uAxb-?qZBn9=%#lPbZk0Gdy&~VF>l(hW z&Cw?7s(0R0*t7mH3yc@#Ec_9h+S+~DH z`FA*PP79H)Mm~v;IQFE@rV7YrMc_Ctg1BrR$8s zCZ~^tv4FL!(po@mqpCCJ?EFS%&$BxQMe^BGJcsWTrTD783^RsN%BCPelIcP5SQaFv zLK(uvxe?$+Aq;i>4E@rA$S9dFKg!4sSB;Lj%>`?CBmKE=_kHz5+|5mOo-S&J40N%@ z;b>-Pjr1ZmcM_|Xhu`_L29e?4Q*+U_rr5}d(Be$Dw4b|6?L*N~Z%q#GMx1447paz< z7g|ii>r~iuGM#Iu`nOc1ZK^@d9(*F`;}ynjzgn%8CY0@^XN3gL!#o zIX}PZBfsUS5^hRODh*jm;_En4S_h0sy7E)nY4ki)tDrU+*~t3M;1(ipM105(Blk+7RHLFO-B7l{pe?OU%?6_b?nF9uaN&t z_w{c@;@WjcpWqyfMGqWMjY-tVJQIMfm3uv zP>#|J^p6mUMX~@WP2zQHx=*NpDFBMtPU46RCjbDfP6AD82C1(}9;aU9L3Y#^Vy2YTf99Z9}`6ccb=Y`--!}%TbAC= zHiTr`m+EpHB`LPBvfQ4k@ zs{^uK=e^m+#$+pxiv7;5G%w>b>`uWO&CfMFiO@?#*!tM2&ek@0P?`ZkZc8PGwGU@2 z1+HfO4C#2SH~?C=8O)zEQ-N!HT#~(m*-=)CG?2?iB5#zIqD02I_6zqA;v6GP= zti&NEgU-XI#8Wg1?Bv%P;Eo@DOif#x8UQwt387hl?o>;!ZgGmIzq2#mWuJfx3nQ3t z^^AuAc>r=)``yBTg*udJ%I6?~dYdb+ZSQheQ@&v2E27d~)7GR4d7-?SNeY4Z<*)^i z(3Imd9j4(PcDYHRSto#}_UtTrm>Sp9W*-vlEVgmD_iCiE7OfdkW;s%a0mT@#XiL?Q z+lz)>VDtug8LZyqX2(8~M@kq-t35R%>H7pW*B>q5@RF;J&v(mV^}*M}#FxIGlqJcf z_JHr!s64~&YI^+2_9e8cwJ>WQ+qXbLGz)UiBnM*a`VRE1#C1vtRnBy6p6G8f&UGH8 zjv^ao_`9aHt%r81{N;ZFH=EfF`B8|W#mZ33MPzy-(-7=Gik9!6=u&z5EUn)z#S>{Y z!m`a-ZZr4CfTad|3PY6}#76l+ zCO)sH1P^2FM3LToJURFv2TINf(+-W<9U@Q=%h^WbrvT*UMU<{Z>b$t@v{TE!;{SvV1og!SM3~7 zkPw^{F)Da~67!hhR4pHt5eko=DaLTWiX$V^mgIt5RcyyLd<9U;7c81@5}_1o)3=uy z?9pSY86xFC`EiXd-v{CRF_(;yS@XM#yIbdmVED(%lrcyuQybisBDsEV5*tkt5(|wC z14{=bvQL8Y4bRn7v&LVWr1@BcD8(#xUlFAwim|&@*Uyl{^&h5TbGJ^XE+M%PbNvYF zuR*zJJi{x~?54QMGK$rYo{WwE?Bvnd{V5O0uPZXKYEr_)4Dwc&vW4@CK$FN!p`SW> zg{7SeDfkJY1tYFUM*+bus1d3yYmsaD zI9jh>Rtydb}+VQP(qp3ukT+dSz?o0|n1n-qCU@+(T43v$-P=oZwaG(Ac3jCl@ zI3>`_4d~!SP5AkU0|}r8)=TtHhj4iY!|n|7(9wO;dL$aRJ= zm(~WpEzpr7%+w^TE~b+;ebFivjLc4`-v?!+$C40`HTBg2TeV_((Das{@iz}SQy5G~Wif3FD4UWfMsc152bp;~Mda!TQU zE2J-t%)HI3nc@?rgV9#{vwFjvvuiTTVTHbqNr-Ti{k+UHZpzj_qU_}Dk=&mWpV=CN z4j_XE!s?saIl6`ZXU0s9TUqnaXSWbpID>AHB`M1To$fZMf({wJ$3nS^SCI5>Ic_jWNCB20r4(Ptvx}AYM=DfVY_?wR|7S z;1-Qmuv4`crlXnz3gkvnl&!1kiJpclIz9VSDk{^=V&DmIyL)WE%EV z1h96Lo}5Q~(Dv)Z8kz(wX(x19j+PbP^SwOx-$iRW(UZ6s?Q503kxfLsj)+W%)_sCH z_2)g2_$bLmTz9r@{!JvXx$bm2;SfAXE=o)nze^CbSLu?3qtxIDtz0+mGs_DK?2jbi zh^6n!gfIS>Z1{dl%`tumBg8dc&8dNO;h3nR@5({LGaV%f@dlnQ@%;^;x%@}43~z4w zcn&$QB3DMLWw%hw>iDjV7}}{d2}vm7V0T8cT@?CWCF`7UbfjH>>odW z5Zv1VeJ!-kaVbB{AvX~L)CY&=^{ph$P#nN9#lE%Cfi7fD@WOKi&6r2U`U%_L3q(gv zZv9P(Z>b}G{?;sDxptytH8+B7jM_`zo78(a>Tk}#B8*B(?TcaVF&DR^#!&)AkFYA0 zx7i2g7ee8%z|zX)dpV(KNH8I+p03Ne#RX2qdwK3rG47UT4hGfuk<9hIY?e%IbXreF zGt*+0JZ;jUc_b8CBDJ?#V=2;k0sR3zW)ecwCRD^x#BSXXcL4h7ud&Jh|YAxThK9~xuHuQ8ny?oi_C!Eoqik^KW>xMb|nYg5Z zFG@y1+dpSbRkPotJVapd=K|DqupNl2d;u!;P`WaiV*Nb_Hk^b<1 zFkCnbtf_Li(*E}t@;tB5#kevv{g9p(@=eA-x{%l&4LAtdgAlhEIwwYT9L5OLF;(3N zQg!PQP;?UeNMrgy5&G(@lN(ME*=sqv8;LA;5$xZZe>jv8jFK3p#k~g*O*&##oVJjr z9Sd-Z^o%JKXjv6gfq#~78|#>AeWjGJp^9=34dxk zBCKLI|LVFi{4UAO*PZutT7BsLIKNa#AMGk<=vgH#+>a4Idbp6+4?F zDg?J>Ey@ItS;rGA**Yw)y*+%N1L5U1)E1$n-SaSeXLQ7=&@OW5JSYH1b3j5I&K9t2&vC z7?1`kBaE;x-;fkaUff3Nu_&y$aN+mu%|-{Ofme*!sW_TPYxW~<@X$_^&i@bf2^NI55f-LrlEH#C zBNnSUDH!X*?i{m_9=x|FBgdL=!}!37p8eV@GSWSbf`IP4J_72W2{R($3&_CfZ6J~? zNe;_Sd0wM#F323Xrb1Hb0Mozg`)JeR|sP4U<~ z!K;es>rV!wmCF8ojcP^%D?ClchMvb_Yc$bi_=V;$uK(-bfFDY4nYnHB1!jUO3QBt^ z(r1%E?u#(3_Fi@lFW47;u12IWf>ouvKhwyM-rj-3@hyc8iG>)usOP7SJ(P>MhNe$8 zjexu3zmMGPA;|o5BraZL90-{ckq}?nq1)VzsQ9U8Je=*Jx`tS%=2@M@t47u3*om%{eZ+(6hb9{57Wu$Fz z)at9!7H~Y{de_jGlUuO-^oS_`bj}HO(+?%C7vtC);=J&Ke{ER)1ojd}rJ0x-T+7=B%I#H=_k z6IM#_ke-P}8Qiy1Jz3D=vzMcb^v;dcWjEP|Fh>@V##>(=%eL(FmJV8-^{rKL!65FS1NpUjzwQC5mW_kLj{i7r3G znNd@cMw}XKf^Wv^e7C)mawbI<$urLYjP@oqD1Y!92k|NOE!!NnB%-Z|{x<|#Tg8(n z<|47JR}*-==wo%4!A9pGQX1HRttkBvd*qNA_4{*nz;{zqw0`)1A|TmJbIC{KLjYk5 zuz#E|Cwq@rKY6_Fa_ef@L_bkz1(urPOGJe~{f~^9Lky55v7{6!JRiF>mgD&HpZyBM9s6>d45U{`(zh6~eZPxn9bdqY3+k%f`ApFIBIP)@0?ZqR9oWA#QkvLoz*OuOK6 z49@cTeLX|FXD%*{(Sn!=0Mbl#B$GI81#5MHAeKU7YRKZGY`ngBoMHmPoOz8Wxuk-z zGu6XPU(dCmBgY#IAZ1(8BtMpXviAD4tm9%{YZMy0Y}!rZQs{V2EvbLkmYar^86CjF zjA^?h#@q)*Wxb%n=rE>KD1{MH$#1iusOh=VUmoGXZ)qnwD-iqsCamrqjgS?sMqUta zgYFvMvDcsg>?|1unIcePa2Lk9e=|Z`J`Zfyyfg3g@ zVrIlr&)J%OqcK-TkQs{KYw_91{5OB?f1K|y9DYe{Qv8f`_Z7Ei7U_dY5GCbFA?8X5 zL5xVD_OXPCq<$XFVq#?iuFXiCypeJ3~nKoch^r(Z;K|DG^;W1Zh2@ z{b!%jte3+%yfljt?*}IhSMz-*JO;VT(Y7;x6X-hl_hJ1uZ2D6j`%*UlODJG2 zsfll*>-M2`&y`@|V+br05(2BAQmo6xbLyU?H9)EAC!?*@AzIh+YF-Ag`hkKZvqpnJW2l0s~%APErA#l&R@BNL`qO zZZLBcF!e;5xV`X7pNTlo(SYqhSq~N?jr87 z2$3DBY=-8y@q->wSYu`emEpB|fK3p9Ie_-I&0?3oI#nFW{Pfdl>wP8S(S+pJ-Pac! z)NmIz?yQ+^*d%1e#(WH!NN6n*v-#`|tPuO0fstzC7`K`_)swhA#EYD-EQO$6H*yB& zc`wRcao6!skvb*iS^#)u9n$}#l;u_86cwLc>xf|}sZSi!$5qBlE$B^9O@OjZUZTdn zG?9tfzeaVaw&<#wmaLxVwBKLE)&2a^-wVX~{GUM{M{HTipr~vicbZ7nIAQZlokspY zyR|I6f{`an7XW>ZqK-;CIkR92I>rXvvD>pChxcw1NEnUZN?%Ui?Dw)4GzI@51 zcxli(YniB5d^g_Mexzg%MU*8k>jOHD5i9q=$|ns@yhk;%K2H~tj%IztpcI5%Y*xdT zp+L07VDwX>REs{Bw~1sES9`g_LPycJ#$;JUgxAWjjIbh0SM}pU5+I6y8~N%3Ehlcz z--S^-ul}vfqt35w+s;_~ID3C#YbZ;9@_S{$zLBzWpi_86zu$lmV*otEk2Ml8*=XbG z=dR^cOPk$xMM|*{XE8?n2nKPWSUxLzpn?GDO!WAsx-@hlVp1~AMk1PG6VKsVF~sJ& zjj+svl<){}8M8GJmT?nGy;*Yg=4TR9^~xAq9OoMbI0j3CdAZViN17659E%U&d=JsK zH@+qD!+}ppKP^bxbiGEY+0O>eq>n8~92gR!1C|g*-`<9uvMge0f*t{!-BEQ5W=0Hc z0z|VXM4(DcSc)POP!P`S|qD-@1WQPcp!pAG%L zknERJZegv$FnQ&NE6qt@;F=;jNPlsP zPQq@=x?dl)Xd8I`($8wOOS>N5&Gv;C$M126;&*MoNgW36(TkLD_{_D+JM+qM=3^bA z;?mn;Sv(O1@>9>84482LiCK5j<-Xb9nbLDS+%8iiUh#l#0#aarh-3+dYYJz6X_g zNp~mUjWi71 zrF3_QG}7JODk0q<9RecWaX)uIXPCdrwC97j>35R+;I!8UWV`=9t90SxOyz3<+^0h>PKEae$3|5DM4 zS4n&BhwRUB%_*bt1rtEhc!^!b36Fz%Osi7#|FhTU!2Cw)EDPcExteG;=I9a^(u+*k zYl*%&^ZMH%_KuSyx7j@CN25joSeqrw^EI3VZ)`NzTuxf9*aC{y8``Nl%EcfJ;JO^I zF1AHOWZ=f#t2ENYkpHX?6^@dx6Gbb-s9XBJz$vnaYlwrnb8Lo|#O&aCyn5d9;+rol zq}XCL2_JWJI!ltFgk*Uy@ej*FOq-=1uQmIWMX1NV>)aF=g~pcN5qa36r=P>AC@gF) z-&iZT_}#(k7>u%6C}rI2=T$b*TS$NALjFV)4$2Fe`^rd6Pcnk!IALMJrN}1>zspk5 zJ>LS}t%~?}VJ0~{X`i6B>2;7VniUULQf{~7(+MsBM2!fumKGnwG$IQ!cS;0Nq_Udb zn){xSU#~3_&ugmb4;u6zS-#jVU7jD`&5b)OzviX%{FNtxh6a4UW419?Ql$9iFK_5^cEe?XqiUUI9c3b>!>P6-+EV*0i`UC8SfFmE7ET}=lY*9el z_Z8UwI&-}Vv(clt|*bp0kg=Q^B3(Z0~3`=~?)k*HEi~zbPupDPmKEk^Q>9`<2+2 z4wi-ynMdW1@S#Gpm$q#4X-_yywcBpHQEh7}!yHL(_FR@RgxM(tfoo8H3V&mo?3cFS zts2Gu7}2ghRCawN+Vts`%>8mwR66e%Y0vF=O_9 ziqMHttXIWQ;Eg|o+>|e7=9RYx*bsjcb5bX8EF`m1$D=_UVDXxWdhH#N1JYu(o)1bh zXP^0efz&leMO~`u3X&_)XQHCb7~N^J8UHpM3~eVUU=fr+RDds@4cx<%Pow+I1@)4= zDz2Z_)^{^E^E?1*&u=|>Sga+b!knB17T%eWyw)9;2J<7F+4CE2#ge`}4qQCiI!0aS9n0ibJsSW%RT@ zY>rU!a}>Vqxq=cOqu>fp*KzEcjuK&%G0Li~WIttf9&#_}=5NO)smGsGF#($EQ*p*a7H%7T_lTo;ndRvnQ{ zMQy!XBe!*dl1OC>9>l7HdlO%szmjUsX<19Z!7K75Q-Gh~$N=okCeBT4mWyDa`hTY6ejk*eDDQXgQ$PcAz?s`;kG3 zU!!qsMKLM|nD~?%X8&?VzCxUf&6PQyi|v6Z3gyGgZie?M^b1q4e}$ZsYww$Q`Owb2 z50t(JFI2M80zih|F#IkNt%Cu47dvM>G|21}MaF}>;y|WUw4A7c{2TEGKl&3-A8B@4 z|9X{`87&wqH^wYl4ZkSPP;-iGO2h0BnweeMiYFt!g+?2Om{=p)6uR4)_RO~=>s@*Q~dzjAeA z3@DmnOy_-(ie{uFEKCwM5SmpTceqLk=2W)>by+v_`UWtw8|4#IyHg}Zw$BRc>CkRms8Aa z^h3Wu@J7&IWKEK3qwLHr2OB}8y-&rC7jd$j@zL!nAGaR{fvQ>Ofrw{=wNP4zRefQ$ z4B~i9Wy(wKbLa6+74R4)vrGAx9<}$wea7>t8J5S__pUBQ#pmNY)}L?Q^EX?RnEXjT zNLNt$nV#n^svFs{+0ny7(Y0Cp`u^MNbjV95@D|;1#SWTu+>yAZLHSvtpQorl+{E-Z zfm611Es`KxSEiFszOVce%q~xl4KV<(0BFqL>;c4!bG89Bd|6mEoFI{IAiX&7Ac4#3 z(hTN9pWd2!Z0y7EbCpSj1Xu?CZR;}Sj0Bk~#;yobOI*JoBEnuF`h?z`{SxKyw;=8c z$z~V=sz;VO7FM$-avxc>qqda6N3Bs|82lo z@H48LD* zM>ubk(!NN!nUEc8bVPb{FR4e_g{haw;r=!bxQ694v*;kBeN7OTQxJ8hs>ZBP9q!dU zz{qMs3wlP<=sHWX8YyZZUF%Hx`Q*7;`k)=T=dSvimQ=- znmv0~V~xY;FFCHwYZjD0ss>cHIsgY-=7yPqW{!sIhNk2@WK87`*kf_p%&J;No zF2!fj3&oFu%hS-N(=3+${`-_?S8bYo{AullAE}4HpRSJXKh+Z*h~}r;(^oScoExvy zI@|P~#$vf!FZED)bT<{Q)7Fu<^RrJQL>@kHL~by@8uj;n8|wAjI4f89^9drE#hJ77 zLKbv;JtK9F`3r^AWV00!3exPt1+ACrqxAYQXX1DIWh&Qeec8lo%~P_>rz`*yT!`(; zG?X8>$Nz^eHO*bJgkQ4*PSlI!eLZgeH6{YeOSGiCc`GTc+KXh{7AypXyaBME+c00X z!`)CL-%+$0nAjjWl2{!@G0Q&#(=iYc_*;JOZ+`pzmZ2OuVlyNL3`EHg$*gCLUGhLb zp!&|Uc+yf}m@HSI?;jFn(rZf|A2H%c43_g6p+*u*50*Dc=3vl%S`3z{_y1Ir!hk9I zabUGPoT{2q1MrYudJ$h(xyUG64vtjn;}+b}I?2WEP2W_@B( z)fqX2zp68)WSStDtU33wq>vElDqdJ?%$o0!MhJKy!9grsY~%MGCzK7g6uIgZjv{?%{2$VO6y2kwHTI%`Oc;8s^G z7h$kJdravT;Lk_i$HrHQ}9gVU28k=hv&;4jR zs>dyQYB+vlu>i~MBhxMmqpW^xzdCSopT5jAV?!_{w=N5mi;ATme;8IjJf?}tcmybFnOU|6ipvX1_bOVW4R+^ z?<1@EHn4QSG9Rsr1$l^3tL+b)ZjyV7f2|*ySJ0qlOpg3$^ElLJB0QUWn}iTM@i_9g z6B(DW)3MldIQ{#NyMz74PHjVS6FD=zJWHv<$o4d5$@J7V)g1XpP0b$HrEQ}-BNKV} zYfZDhpWj-oo|?`-h^E?u)A_jUu z2&hO3Fko|^%8$Tcz&k)q0xRCkDhj;C?-$rh9xo!M7@LBFfjvFgj7(j>m*C6pX z;H#@eA(QD1d94?gy7saUFM9WcKH`I1YqBoh$lT#dd+EApbqgC{E{UG~P(O4qJCm=F zi0-r20&Mt0a8yooFnrJ7{}yv?cN2X*AH{;Mp3*3cJ$h)~>>e?r;8n4!f0|^povdfG zr-+mnZc|C69IWr*a#q5&z+Y0^OC6Y}H7<{~Ks1o^{^;h5Q*?SZu76lz`Z%Z4h#1G# zt|cu(>aB$9ty~n#f2fggORVPqboa=MCu|cL@i&St;o<>gm*-P9!@k+C0dc+P?WjSw zwy+vP&9akfeS1CG-0_&ulb&2>HH7iYt+xB|c>iWW>sMtwR-1br_TIKO>vjtwsqxym zW$pd*Nc-RC-)q|L(!`3ocI$Ph|8hpBhz(4(IyFW1QZ#dUG+d5K)7#7AFZ-;Ru4JBX z{2{)_3Idc$ir+nQlC^KMHBb)=LUG)LXFMY*-@7b1`g)|NdyM+*r5dP?uG|`KL^XI) zUyJ8@+d6-}JWYjCTg1D%l}P3R#OPZW`rFyvknu z+ZOp(g?ao21*2aK#a{s)qEB*J;2b#7UX`O~S(0d6N|UVYpD>L;GT9MC-jKE=IUdq`~$ZCNB!X-JnBPjE*%+fHsok`VtY+{h!-8D$^2 zOY`tS;j}$|5TIMP!43b)OUw0+IU>}ZNP)4h+xkfuS0Ncp_FlDlp-QllyN$4T17w9r zI?VGfqL6HUVS5dOV+lnHoFe!e&HyfM=SkM^Mw`lYu?A>2^@326&=M;L(XJ@jd)Ty~ z>0TRGh_H*KFRzdk2QRQ8;?gXyoM404Z=4&(CVlj+R3q~EQTcLRhsmZ8Jp-o-&WA2% zZ3lua>%MQ@k1NZw(m@H2Q{@Jt9{)PnzRwmC*u5`Mm=Hc(m$!?3CcQ{rLrV^r@hfl8tLWvLX9mHImab-BtXR@#SSBYIW&yg8uTnk!f5 z*c6TH4v!1!(y4&lk2p?4DIeye*$c&(U&6ssaWEd@UVdQiUv&mNI-$gg24xC5b~^S8 z1;XnzwjM-2S|W;U-ZgDPlWDVKrr#IB1}vJ0s3RlZlyL{AFhbNB#FT#>>VMIaZh=8&r6!Vj;F05jLgC*Iga=3=%c+-;=+DGmvQF$W*C z%V&ChHNoZQ?BDO)d8tU9`omeWYksOsuGH_WZDi)-PV7n4*vgGXJ+y1eH_D(i@Yeiu z7;vO<#qYdJZIlRq_;7SZb2uj+u;T6eOi%1w2L83@13Y;%}wvImqs6yynjsG9m3^^r&Z#2@wx9F z!4Gq2l$51mJc?1Ynhwl(Mm1{P(cdL>$H=@(;y%d9wEZR#a!pR8pcTJOYgg+GCe6Ry z%@q5Ngq;v}wfAy4$SwX4ZXH`~2&YCC;yvt;%W1Z| zJdM~PYk(!}XUj%lH#1=|?E*T$?jG{|1l!l*>bm*x-4W`&87wKA)!vV^f$+UKX6XAq z9+gY5Z~WVp!U06=XLAJ3g1xMwZ0^n(Ln1z!Cd$*WP`8dOFUJs`Vb3Gi%k76o?v%f= zXO-X|Vff$Ai9p3K+zZ>TZfTn%Y^P+@V&gLM4+NX=&E4kCK1ng-Vc7zed7-VyRAjsd z1AkuuSCLdnc549{J0Se`bk1)M-)-t9Yfwxxm_d;`pvcG-nq!+1{-K;tA3fiJpmFrb z$UfNf7AzuUS9WW`{i#A-y8xyuD80`Y1Dt}blrmzODf(VLzipUSdckir^i#oT?2Q!l z_qps&-9&k!{gjzM9`G`S$4!y(Up*nWX+mCCxakXdo&q+)#NzW&{KxowPd-T9M$kwL zud=9NCNeV@=6m_P_dV;pOwtqM^BwG$KTCUYTvVf??7d4``&wHu&#a$dT*z?BN{LGH z`Ln%C{A4b~xAUkhI}71Jr%{a(zJ9JO&ugIAru_LxB`7{Z9n`ttfot5o+;n5dC2$^a zd9;nW#pKUeqz-2ssJfYM<&G&MEEv%TL?C77I3nzy3}y8cNeY@=NXp87mqVm}m+NW9 zPDc@s2vAm+tc?!BgdGayn;qFM6q4S4h;;2m!z}j=yo@z>Tz|f6@xw+rfj)i>;#v47 zN&A7ornOakKMZ5F^A2#a_=;;~zj z*zC(y3@S!ax6ig;H>j==W5sYVB!$Ise@kyltAv+;W`{sY-p4fRKq1rpK2sUC3!t=* zjoj`zJ?JfcEfPcQ0fr$DwrmUW4yuLcCkS$&lDYlWIO9=)@aP7F@ek7Io(5QY>illN zhJ!NQW_tEl3XQnS@=?JWJ>^+CC>&G5T&i{e+Aylk*x` z5u80{wuHgL42(2bgkUTPyX`T%jkW7`aiZk0hz|)uFEH3AH=vc}sq)c!{s2`=@Z9IA za6dpxd{->6->wLejoks}geRDB)h2lMl#Qr5`_BK&`Gxp(?!zEC@HfS$w(IWd&#WBo z?GJZK`^W^c zy@#>_tQf5^=v3ap$(5cct;t$`R%n+kef8vQX-qwPqg+}f5*Rf})+IDAlB>i{$N>R1 zAaUS(n@t>xWPBe$+#S(+(k^rUC+GVyr635odL>a(7;SM`IEVZBqZ)I}W@Q6CWciB| zR&U)_nc3c5Rd`7we34J|{zk2w%L`?-H#yWw_dL#tNSU$z-jGjwoF$VbLGOKUu1iQK zCwwlEw(s$g%7}%cnC>-wxOj%@g}ZCgt!$yX&{AdC)yK@MUExCrs){so3Uprug3SNe z`F5sLprhr|moAow+GiI(aw`&beMlMtl9TeOw-0?MtSvX_`9U*MrawmjHdQ3!TNK8( z*O!HQ)%J8XageK3>v7l{GWl<-gk%$MGc$0Cyf&~qkre1gMU+~tStmnY)hiW zM#m{ReERVUKof`&b9pPSVJoh^!3(C-7o158_s(8mSir#{QM4DvcU)_Kk zq!2;LB%>=hfYdDLnv&{3#Sn{_mR-)$C>NI*MIHMM63;w+k3*RL4w!TgZC1_mnlNU> zM^w=(RufRFWoWfEVJqvYA6$E=E&OE18ca!7Se$M13TNP+9lb={AC*^6(DzbR5hbZx z*=ws3^(i6olHiZ3@W!;#nH4iESc1uA>9j?~q7=uIFGH>bT`O7}?dokeFBbiXUi*B! zeKh`-k^nQTiY>>){1W-&j9ntYQTZ1z?RLF}>fJb6|)JAQ75JAF3~3Yhsd@Lj1W zGJXqyw@7WR#__`H`^$UDbPXWJrA8CVk zem9S0OQ!BS%<}vZ%;ju9oNzd^-vug@`v|NxZSk*HFw6D*y6UL9Ji_9&>aFu60Av8_ z{o>)df6GD@;Y*xr4;nmJlz0P)oPgrqQ7g>N;%f~rJu?DOEji0?$MTZ!XW_;_Ohr#{oJb!LV?!z-cj3E3CxqS06-tka$! zL|Y`8WraBpRJ@;V^ZZ@;u6W5p@ulvDw5Pkc^8+Vt(Qg$XfJr_+@u?(XQm6iPhhZ5t zj;`gwlcs<7=~TG)+p-#^%@w>~GMk&Ff%ZvFd^){I2GRx7w}ffne5ybc)CVAo?3D30 zC^M^#jFfvtG+b_)f3S8BhjfQ^e2LM?Ek!Ari9RMY&b(LdwTQ(jk!6dnS0$8YS zj^+!MC-+FgirrOPSr%3%2k_u*V!+j%TYIy9Vb%3nlH8yvK;1l;aM!w$>hUZ-&o9qP zb2{FvL~og(A2|PsX6f=7qEQtJTCRg(DsYlvjO^~dou0V-F!ehBEH1&KpuV0cn!WFJ zeBZ9!AftgV$#g$dTM@;+VqM<~td7xU z>%mvq8+{k*s+z#-{&V6hxcxJh=OGj?3{@z3yEH25s-;9Z0z2XRPk0&tK zQ7_yMb>k%d*;s!98yX8NDy+<;>p3Pckx$V_Z;u%w+|F;}_7&C!kO-4+>?N8>9A1$+ zVXHAIQM!!fEPW7-K*e#%nGAWJFjb9grm&pkLshb<=Gbz!0o9Kah#vC9 z#i20lp)55ucSbW$8}BayIIpszyu@4h3zSoRN>jrj8s%ZaxX=;-q$s2iB9NJH ztvnAeFuzzGC?`EGDVP zttXKoKxZYuDAuplx?}?z*Z0EVOCN%m*3;dr%*=jiO5jhYkM5;pA0bn>XtZ)-VM{Mw z^&dGOQ|2Ym6jeDD@C&@acgy=^bDbhuaark}=gLwZCqVlSOBZ#QgVggxhvdPkllxZA zV?1+1W*lIEq{hg=r$#H)VQHkB5H}qe^&YvfH0_nc5q3$iA|B^s1BDZ_g!jt3U{nAf za<2>kRT_=Zf-(tUQ2>ZS>W6kCwGx{gzUI5h#|Q8LZ~!qMUw7w=XBbmoa*JQ8id6Db zB}BR&YoLQ(jU^gi$xt-_1rD~KSTj-KSK1O!DsQO|53cY+x>@Kft3>Ek&)}8)n=B82 z=1jo?GbjtKv%RtrW>9ris|p?2AkBvFki|A0{dC)fK6pmjwUXtx zeAM#3(v6+RXm8xvE_c(-Y~Z8p0F2g9bAyYhIb`Ua$M&|8wGTZeWztlX5{k1Vm_g4w zhr#(xRAiI`%pvem3lpm!xKDMLDfR{QTmdni2?9G>$L)!u1B=-&QUu z0yl^<>z=lU{p@U}eE<1pp^*2h?}U25YI(at3q}2DkHtEkl`j!mv?&^E>(ZmVTzHIg zVuE1)y8re5m#v$c4*MkhFs`TU+KSsem#R~<7I^CLny{MtbQF*riO6aF1iLyTMtSDD zg$1{artczN!K>ipf*bWMp!(smsxOdk7ZQeLQgR@R%CEBi{EbvP+HWjLTGt=YA(J`r z={UB-r$|Z%S+S=MNjI3|W-=<;ZvpJPJ*2ZBg1u3aNn7hPn?H~vWVt2Nz0p~~Uey=~ zkkxFiQWR3e%CL=+v5ilW`f88*-{Nh-@-gd`G^JtHNI5e(-2&7@grOr;Lh{uln)%%_)snnOktl4q- zU`hZKpV)|LpoaKxVuhDt_>>U^$Tj=_)OTmyznvl*gTTLy#1YZTcU<)K)WB?r#6>S? zIz8xYCqZo5@55rQy76%$Hx88^WuW{o{;n?cJlxn{HwI?*FUKJn$QSKV@qPQUKk<2U z@BS{0px^)pa7HWe?Iijgf;|#`<&NXp!3}XP4PXjtS}L1@7-2}S5{C5V33T^QA2wT{ z?jqARfmbZzlWib_DENllZy0VT+&q(j#XPMX(^FaQ%ZnYD8$tdNc0c5m5*UwiAfgMX zY%JhziYMIyp}b$*9h^Arc+J*jrOISn@@OSTHxK$*sd#v1NH3+r9g26U&RcqXcK5@W}$k6c14Z;A=pFPf4bF1~@! z?^^i9Tn$OfUz&Mug@ca|@`Kh%&oDV#dtDK^Kkv58gsp-?#Lij=%3mX@^Kz#UlC1;l zEW4E!3%nIPFWskct)LZy0cG-ePWra`2uCQu8vw>fQr-Z*?ZvQ`M)b|n0rYznw>ld} zYD(ibthDit=movJ+1JgN>zE~Ub_pD9N7Cmom~0(_=>h3+#K4W+-3*zThC!7aQZnDB zLQPCWEOg&E`(aZS4u-|V5f#^W(O{^r&<}lK6 zJb#Gp_uW`m`dNkUtB?(!W$)m058xRD-PKR0sC&zzIs|D7b4Y=zL?sU%aGa@2Wb~dU z^uToC znj zQsngSw2+kj=aLp2QK9!G>6Ll6c+wbAAU7TeHy(f($BTC7+n+`&7Szy-Yt`G#8)`Hk!{mUTI5jsaJPvh1>)h3>CU zq1TJ%sLu?wo^ zu&)-xbIW5|o-}ZX!o)~fnfvKIjgSw$UpUKjwwMT|I4VR!-y=yL)cN8pf7`PVwv49P)vOX(Huq}3i&bC%Z|{~fqcy8hq5g>=#X8C-lB z+bO7&x(}7Qs1q^}z$1B;=&9k@vG!JvdwbL9jn$HG5wyh&{jW)6qaEEenWd>**BfR* ze)19>XoXB}aK#@J6#kpkAt!G@m^1})ft)yBC_XGWe-Vssg^8Bb)VXh`&65P;9GsJT zeNPq`)d}Qc;1}avDooY^O5+p**{#pAT-=D6K~9f0Vs_FU80?RvwEV@WNS`ckKV7Qf zyMN5zfv6;PST8f1iv$o`)V!-w;>Z~;CFFV68-=<>3!)*0(}bUGv-U>Iz_$jn$)eN} zd-iosDm3K^z}zPmYp1lcq~fVFbP|RyE`+g8N3;1CKRjI!n#x{K%toj3dbamD^Yp%D z8BdKox(Od?k07P0T}uje!ym4U-(4rmCFl{_3{jG_60`5;mg--zXe{`bb$)=GgGl6- zY=mP9Sq}DnEUflagMbLxBxl8c_nBtfHLrd3dg_V`BW8Qqn(lY|)!_FgjpjR>(cI8? z%%qXJ_&r}=8fOL}TqPfG#g=~r{r%%qAyR-rSNSTSD@qTiwsjlhz!M=_^Ei&P4^iXl zT9kMfhf5-sjC)mn5PG{F>mZsCS9A1!Bj8s;VB-;h65(sXIcPgW#F61-Yb3HJTKC$C z##+4d{p@@*&Q{xbmT_=2$miEc^4ls7%OT7y0Yi_N;z~0nHu{Nv4TRH5_VUKxzVk@L zE<;p8t7rX?2C3gOb)6?In8L3ua%kdjR$pRbKkyMiv?&X1127LVaP>v z8Kw7Y9HOP5;M>DTA^9^L&3pDElDhHCosr%}>RKki~V6y>&sy)hId z*g54`I)N(p7Y5Rw+jX22={rroNWy6&Waew!vX4da{pNSg*064ko3C8Dh)~e8v)Yya z$fR*-DJppb=B2^O?n+Ke>Lv2!tONF{uM<o8nsEl;R{|8g*kq;*v)T@|XrAr`5%@ zss7-IuM{T?kLsoulbf;Bawg_5<3>(@{MVi) z<(vDoG~ci1Vu?|4_~&5*-}D zi5wUdL38BIxNrFvBQJ5BB6&J=Bm8<^Vgc`({y%ZX4d8##3=CPQ_zgX-)cZ^OE9E!mv2F=>T<{gvuv?AP9 zD9Otl4@~cLzpq!zSIy>KC6V4j0Q_PB0E|9~v;6`^grPHs@nlZewi-!T2{`6@`;HBn z%CNiAfjI!ZblBmJ(k`Tlenm-0hcuD605%vM<76bQ*(DDeFRwI~w1HW<-Y|~ma|$Z_ z6x=Lp1zT4#R3p*DL4cd7?VZl33k4-_!jL!!jegoBa^w~h^?|)hO1^Hfb%xp+iFFij zQOPs6q_UkNPn%OTZTDCfPQPjGcWQmAB*kdt}PP6|l?VufaqhpD)m{rJyuQR-fguUn%uTXo`4HdHf&WbhPBFU)^RE zvlqktgY{%nf_fpKVo3^myMX@1YhJ^NzSOp$C%xYICRQQCn41ei#kRTCLgwMogt!l* ziG@3KJ$u(gNHa!GS_ok*UQk~pksIwq(QX;^N9H!1O&q5uDN zU3{EzMMr~ZAUz-i!_lqba+o4LW7sLsd-O;FfNsy#eQ(9JzUnr?v8LT~(T^!ez-nm$ zMEZMitLRB?&j&vvFyQ3P&&C-+1jzb*a*Xv0q5Iiy4KG5V<^B`% zR$SSI@A#XwZ*g9|GC>BMP>*<#dy!wU>tf>h1cU%d3gE=r#otxvX8OgYv)8!>o*>Ah zeELbRT-yh;?ZNkGpjm=Q@DE3CfE&07PI(FVb`lrZhV3t3LsI((Zxdxx=CB4jN3mvs ztu{|AWmU~Z2YP=!B=kC-JyDi9UWr51hmxj$3N6MbHw7Pjv_FTrP4~T+u$xb)Cwb3j z2JHLC?d7Ml!%+tN+(#>kVDWJ_Pu#|B&&FXl?G_!*z;Se)NF6xEmQOPTX`j^jIJ-T~ z{~La9ReLn#TV!13S5JHEkP_cAWJAS}rGl{tvd0Xqc0*?}(EPw~oUDXEX?x@NH4RBB z@Qx|hL)VUmB9@Rk;6`U=tkFA>8q4)!_TrHKcS%|qYEDL<;93ZD%^+=qnxi_F80 zjXyQPv5l9N{AQG^2sotX_|kXbqwPT_qIq?i?UL-dd=jCjNJWrFl*AjPU!`h^IX`*;G;~#z?Yvmb!m*H+^01g1^`P{f zbM%Gbnm!*%{V7ivFuWgRlHAIDPFhv_H#iTpe5}?4TEOs|lLnQM{~CvK`VMa9p0tNx zyHbY@$$K;{h~*wnA=QD^R(wN4o{)kfl0@p0H_$dw#gQy7F8-pt^hW8PH`WDGlAHj+ z%ar(ax!*WLWTnGXlI4-0SjkjMv(_XT-uva>`FYj1gvDI*$5uG&bZyA2eop?S zIfq5rYoD8_)BRx+!F!5khcaa7QJjKcfUH818df~O@tuQcBsV&i1U9YL6`_du?GK=P zB)$oNJP@gx(2Xxd7MjvN#YD93v;zq3^`Tt=?vZUW_BJT&D!AEU;ohp4*}APUIFI#> zYG-T+Ugo;wagA|@Ku}&H{Uu3KT3N(S!N)bZhRRfg$y^*))jpc~?N={%#eb(TD!E5s zUSTXL@e59HKLqkP7OQc+LmH4jB@Cay#^_pF5^;e&bY35& z;!qkUiI_&3H3>?1(}f5hCn|h4|8s0WxnBDmoBMy$4Zf}+9<=%z#C-j~;Eh;7(5t`v z7oD7kBx~>50McFNxN{3^oH{?q-1b`t@Is9MWzH?XX+hbw0JSFo2E3w!%LZBacw1o} z3&gINv|`QlceqxVgh=kw0$*yMc>BRMy1V*R{*|&|AWM88Sj}`ddY!}$=ho?Hjbe?( zpf}swL%CU@eNDluR!v#LqQu*FfR3Pq0ryJwdkUWqEuf9J*^x+Q+wx`*eKUGpuj^@% zEFwoGIj=cM^JOgHN`=LvRaVmv!M=yWNBDU2j9j$$^0;kD z8hHb%p_kixo$6qSVhOGa3o--aWE;Rge_Qa|fV-!Bhgt|9!4 zk9p^jwTbe$6azrmMPfy1T6>f!(}Hq1HvyQ&_osVl_@f4|L&V%m`?6dzRF3af<bDoBT=W?44VQ4}>4*;*jK#Gj)yE-lBidqYpO-B!!W^ zejPx0OYf9g!i^+5(^{fWX5G-6EZztPNVMOS{aB2Ca0pD1u&lF*5AKH|MOT$futn4S z{Yy*qa3ywL3De6%0jN=`P%HGKdQ~$8iSZ<`MzeXQc3N8~e7;E=0He-Kh*lB}dgYam z>ah+<>$Kj6d)O_G77NE%I}G)pCp?hEmUD9Bq)66|sTzF671I+{{Iu*{q<(1OBmNPH z2HHf3NUksV<+e>)(fz6rYxCvCmaukVI(&azh;+L;vz;pVt=tdP;L!BPQiRS>{;V(- zOBWCajUcDM;NNZMCLnUCzEEW*)zYC^O0z4ulR1-0p%O26~ zJYobKAq@{ZZfH;J4B|%4(kAy6L~)=&-xPr2Sb6um!_o7Cqh0CajGQ7qPNjmPwZh{m z!cf(=(Ky&ua0#ZF7PdvJ3d{GN&Pq8xy;5SJskD_q9rTgHnxqShRY@oNI3 zyiPQ|HjXtn;yrS`1o@oa_Z}{Od@|*MxOcBt5M7NEKQ=zG!zCX;C9BwBEyA zV0@~1G^Qxx2$RYJWgmgIi zC~rtXry5r~!SiUD58FAdVK~IYg<+`sVEWYpA3ICI7^YOOnIhutoI z8GMUXl1N*Pn)KKAMHyewG0I=Znp4$n$zya(AbH%^E{ZIh zcbvgX^bV?X9GVQeQN;7q?DkoIN^7NEC5hju+`&($=w^ryBpn<$4^N*ra+*N8N!B9I z>!4pY)HjR+p?I&*)RwG7U5&=HDy@G@fHnQ&16|4dvuORKJwLZ6HB!5XlWD zkcB*`7oOKH4HYK%=3TSG0%U5-%>V#YoOaQaAlPVcK2pFKf-l?_m^LGm`%gZEZJ|0^ zRM|&GJuy@PS&&N53z%3%vhzD%3h=T!*5N6#@6nvF2z9RM4|?5;b-fQjnozM8K~gSH zXF9rP9VU6q7CI`@v+O%>-^`;5;HWPO}3+=%jSymzElwkd;3u&7aOk%ekI&hu+@ z__QEo3AONt7p)@THkxVN0D#$As*^(_{g9C@ z9G7ZD2OMrhCBoLTt059KZq<9JZsIX;YWDaE?(93LLeV`4N~#ywtiJT-Ljs^4O-A;p zNfxG^UyRC?oXC^ zT`dCKaQ}`w2H$im)iIrK*yzA$IUXz7N%*6Z8(jZOHW;5E9^Fw#E}7tz!ML%j!G7HP8-5botU@VU;!>;=wFB zUQh5z`NIXILpy&a;>{8k02TMKy)ij&HJ(4w>`kD$RCz_$e@4^`>{3zDnmlSG5bAGN z@{w_!RS%G}&|snM6}u5Cv0o|MY0hI#9?yz544G_iYEgBcSS6c#qbdDjHGWzwjqZH= zOnd3_gI79RC#^&%G3;f4>U+&hU(8Y2Et$aGA_60KDx}_bYtRf~3XVq#@?~aElty$7 zpwFW1+DJy;(R_cc?`}2mkG#hu*vER?c*{{USc_a_?Bk2FGr|bf@q@upe+p^2BT8Bbn|0C ztETekJ((8L9_ivjN1pFi_US$vS1z-2*yDyA3~WpMNe%k;*?**ZYB$Mv4X2)G$s|fb zwrruB`gd492mI{L)Sx%yWnQ!YAmB*ZlzmrihD;FyiI8>_Q*p?LDlZ&Jce zSi)qR>?JvfE+k48qyEVC_aDD8SYC7z4z;)&$b_s7PntHplfE`Dj5!gWa2jY`;2kr# z7Y)ws#4?{_TVRSl=x5*=uUSl%)wR8$eJ7l3N%!eq*}9yi0sV7>X!2t8iOHj(lXt*a zeA+&}2EK#idvW}&dOadaz7RQIsaVZd%3eRXMK=~}PtpvTbd2_t1B!5;4po*t8|%Z> zCk^IV{?g8uJl%7^Ft(>F(Df(KxW3HJXG=eYI@b@8tn)|lp|Ag+EgeGTh6F8$@0H{% zobHYZp%NC$LiFr4w_Q(*WuMe^Opox-G&%RdnrO~S?o;yv$t&2p+X(HIe=Fk2b+fDP zba|5hZaQ?>T_ZH8HKg_}x*@_wmRF;@U?@gc6RTRp%u%*fgeqcv8%5Gijont}@bT}2 z^MF+&hJer_lviQN>42eu^G+HW2Ln@&CNn{Oq$DucT2=B#v(9~L^UK3L{5J?SpqiSwA_)N`T~qk+vr7y!HLzf&|fx9kZx{OqdZJT z;L8Zlm5j*zDCy61&f|t<+%Gfxx)mir5l7*KF~;i{U?$Gabcis34@Rkt+e8)JPZ3HOe!Bk;pmdBb2J>^WwKwy!y+F&m{t+^mIE;~iW7G3cT1)jIl%^siEUMa zyAM4{%O3y^$M&p{WgaXKBW*W&>c|r}Q3}|W&{*Q<$8(4^nh4LEhCq3rZ$~t9C-dnY z6Q$x8BJ$#ayeLl}gE==UxiVakp7-K$PO$fubx0CnOYuAq32vz4Po2P27Ohe(D<%a^ zS{vin37vcY%m%U15lS!6&(kg&_R6|<%$2Q##u}$ZtJ?O`jIrYWCu_%-F)>6X)YLU1 z%i)_M;J{cKvwEbRv3c$63k$-&z|PO(D4~;JTpJctGqBu`K%u8M={}_gsk5J<~${ zusz;f^@nqwRqAo&c6yUo$EZ;XXO4xZs^>A;pSnZ5q^M1)WL(i+D`CyV+I{BLt5se7 z({o%&ZKt0HeA>3&9RY`W7Jc3w;wwhxX_(B`{@c8-0c{wtl~IDahRsk5Ur+LN9Y`Xi zP*I_%>z*3_M@k}unDMo1{U;}pu>M~@kLHH+p1*-Yi%{+G_Z>0gi|!rK%WJ#k_Lwm#P)@m`E7RB7b^v4lChoI;Jf zAp(FL?54r7o){Lihh!8SqdY$Lh_Hi2V4DB#<=$>d%csXE)aPksaQ{=SGs6vffiC1s zdF#y3hn;vp=Ar#ZNjl(WdQ?mdLpA7=29I{!=f&c9v>R4pof#Uq(U(;I!&mp8>a|tO zSfm3yP*epm<=)}7n(^!vEJnVDtzlB;rebchkmsa~SONl~iKI5DjC3Fmmp{9v2R_UB zHq!-mrx1q%1}YPZZ1h87!YK+53lj6?(3>&;T&-Rm${4g+$ntud+7!;7Lbh z4`jXn;i~i>f=o}1o%0_h4|8R}E7g1S{AfVyKUFd+B=`M_{aGqc3YP0GukorCzl$$} z1qP2gj-h`4|4(j@6Nbc{B4E(n&B24anNqIrvBBRV)qkUZ_g)|>(MDDi!U@FH{@+?Y znl5$=eR|2@z_7R`B&wEEhkj4ox&g`p^t-f({Cc0?nE4RvH#XknbiKSE`Y*dxqb&O^ z-ME1MtcUdHllC9^+LMu~g?t9Bl5`sOaunA-D-9SHsC-=1|`P0u&<&QvmF!b?M9D@L&#R3nH`{ZyQ$45x0J9^-_Im z2`AO~y#SO!sCN_IPVP~AE#3qRk^jr7G{&||puUQv`-kcWZe;cye8TlQotn^=Vr3dW z&yIPW`Y@MUc_{?gFK{*0sPD;#FO$c9P`pkp1{1-|UfvIuzKOgig%7V^R)ToN!-`p^ zA?E%(hWC?cIa#C;+$55ZZP(y{4*#vvX#Fa+lXCpG3}T_`c?$vo6xmlgn-Vz1)EEX; zo%X`~k_GLV^XOdzUGZGY|H*y;|Ij)Ako~awuk44-D;o?$(rA5WcekS8M}2VD<(XD2 zQ6^@tMJuJlkr*X4u%ylC9H9DQ1y9zk-1Z{r=>5*0Uyw`~Rm@4)r0gdY27J8Av$1M& z;TJLPwCJ0isQH5vkjmaiTV)VSr{x$!&IMEs#a8hR>eZM#?sZY?SMxLKf2k@Gx8gO~ z>o52%f3dMTUY~-*kt8bVjlpq?`YQ5raH8HCC;(3&0vhF_9?LSMEHPOp5-AYgRU|DrlO$!LfBBkkm!^a|4iSXtNo13@g!p65&lqWtwV9vHml{B9OyHclZ@e}@>Zw)OjL!7p_hJHE?Dr)e_B^ulq1(e ziA(3J@L@sV9@l&3pbKz_qCV24Na7ENg#}5;AOa zr)6n)#L>xzC57@UXOND8{?iSX*%;j7l2~t^uFTSC^|l(8!&i-n5T2pI`-`Wn{~#Zk zHdXOlUoBeW7;P&Z65KtFvOy)5ITH2XVJ+NxKorLv=zIQuyfDy-@=gyIsQ1~StS0CMQ{BP>8&i= zmmxow<}Ooyo&^IwTil&K=62D=K^8Je)jQa=5p_6;$)?11N4YHz#m1^WyLJGKSp3f* zk0cTWqh5YjpcP<82frK}!J0MBBKpCTNsoKnR$$Fn6!;pAZ@>bn7bU4*V8CNA&+=Km+@4W@>i56ke|C$Pw)6xmXgVjK0o??NCYY7yQN`PzT^ss{I9ARN&LP}6 zCKLsvl%&~QxgxU}UJLLy&7mgvAMoHT(bD1vM#4Sqts~``lrB!`;97-Og9}7tJUzvgOrNS3RlQnAqTTjBUKPnBbW}Jq&a_Oq z8CND1-?-XH5HM{zelCeeF6~h=YAxYdleAy$Rfo>y2e>|Q|9Y-Q#U+OcivI#g6T*>@ zn9viO>FW#C6-Qwr_?d$f>_{n$igXH5+xL(l2BXi);4!`mZ(t9P*x;_cmp5|eYs)u|3xkQfwgTK34x-MgM z-oI`el0!cF3JVHiy!QVdjrrCMsd|ZT^8Kx%%Uy z3_E;3;=JG<#lqV+1xgn%VCu2qIWvcvU|b(xFHgD*^e7(6jFYt4Xtfkd?Rm!mCkSGM)gXv7fASyWdy zLSzc+Lj|sT+n~g76nwE3%kXZ5|Gj_Fm-9KxmD8J}^VA({TL}PG1FNZv^HcxVji<@% zJsPm6orduYsy*?cN*XoOvg{`g^i5_7d7&2{#b=!KWCt!uzm|qX1LD+$Amo1W7Z9M=cUM?F^xZT3z~M4(?3`iIwr%WrrJ{+e4IJ?^g8XXajD_udsqMED4+*sJv6}o zC9^00VpPW`3@7t2urYqld0WYQGl$@KR>wRU{g(M465XwRGX-~cEM6l$|AB=xxv5<) zNCYc$QcaBY#zXG>R=d;>NF1EBq1- z3(Ipz0YFPBw8$n)vnp!45@nm1f=aIf6rviYZGaFlAqhefIKT~yLS-n=2o$h{tizr? zvj(W(vT8VEb2ml%Ho>h|ANyzu$e;dHpi0<-9i!3la$X5IvO;BsF{iGf8EwN~TlQ4r zmh|jGgq0$428klOgkPbOueshsN%ijLCN7g^2=&LIhyiRd-bCzn6IT8t#5)dcF*WU| zyT0n&zbBV|M!tGz@P6jrMh@#7LSdb)bWbI)n)u%R&0cRszouOo!zB;W`-d>y9r zNp-kv@>H^S${&t=%Us(|x+_aC%%x9gswj@_R0Q zpO#xD#VJ@X)7SChasBn-xxyNdyqFeG_=m2E{hqjR%Ww$PeLTR z5Ans*OS)zJUK$9!^@PMnghuWlQMFVphNak^+grcUfNZyUX z$n_q$!yI5RHtIQDFGZZl#t|?r`-;{*WB**o)-(YG9q`fAhYRZ_?Bzj00j6kU3)ZwU zhH@BrdiG0=7txC1Q?alNw8r`+Yr)x%&GmrYua4WPSW%@lB!Pf~Q2Z@@3BTMK6WPhe z@@fR3uU+(cg^sROuys~vXZb^yDhw?1P$Zo%*7*pV?9Afalh`at9I$Btm&ST2E{o>H zPm*T;2_$H~HQ z-xly743^tIV}<3rt{bfkFBXod>&al2K?zcdqC zxIVMx$!T~*tXcgzVJ797-0^pZ)A#aA(7MpqSbCER?t;N<)evcY@I=A1<;x&AaDqT% zzCwXE>~y-NTHG}ZTd>8&)e?JEM7}UQ5!6sRu-Swh-ln{uTn8QH60aHy8tN31DS@7* z>1Fz2rYwVpUbw*_LM0Ji*k+y_hl7GG(hN}r7lWJRy^uV5R$!|-Zn_hcKE(ey@kpHRFBY9cxsB&opg26{*U`fnYgO@ zypm-Ji706`Xq(Y?dem9Qin}UW|5l8}2q)3X;6R6q6Ybm2qIQ9lVZpQYwqZ%2di}PO zq$)}^qfE%6Kj%+E5|V$B9mjC==MGx_p9Cavjr@a|F`Jl4{^@@NBwez%4GvzX$t|Q7 zJn@cRr^MoQ-$DaVpZ)H0F+5wTF=om*h+dFx|$Zbe63bRehJ3)Y4e>NF@thkJQJ&Z#OG6*4NsuQ#knZXV@ zb8TbEU(Z04Th=z6-PmA#+R(@Z zTfpN)019iBIjZFb{2sDA z%}5TMiIz^_tioxjl+M6Ipo>U$2cwiy-r*+_MRkwnki;8*Hcve}0NU9Za0J{RES8f2 zS=-`5cJKA@5`S)MTuleJ4vxjIp7VL#fg~b}cMP(%uL$rFpnoKeVkc(QKWwJ*0C8W(TH-9dB4(}Bl3foN8wJ&i<+KpUy`ZkuK&Qf zlNS#FzMo!wv=-L?w&tN=O$jHLSrvUdw%F7SLEeuXH)zCVw95Ut-5N=XI%x?^Gr3pi zb9R@{SjHlm`- zfpSwCotTA2$_b#qjfN|t8v#K%&N`B88xn3EePBW`J69V5t@I2rB3>^)Mp2+=TaX>I z8PSu^n*g7iz|wX`T@QCzUrJN0%a-CbYt~psc_BRgtQwZN=GfajM?n`AxoBbHVf)kA zZRfQV@B5q(6}2iWj+QOu4(1%yWRFw&K@O=t`abjhKBN0yHhuGzUQPx704;gtvds6@ zendB9pev>CahhYE?|ttPzGi=?>{A*E$a}I_IA0%6+D2r}+fCuT*%yyHXSA1@ZYG%m zTHaf@(TpOc=P*99TD+2qxpJ$%d%aG~@z(oz=Pd94o-TolOK=nVSc+&Y(8O6=GYYS| z!AT(j4Rou(7BKJQt%FfGhScOE)9Q`{LkueTuAg9aKm8iByfJd7h;yWw zuCZbmPWtTZfNO5*j@Si2SXB}(S3C^CP$^e#KtQJ_#X_hz25}Zlou~o%e`Se71zoMFsjZSyU zo}gW2kom9Ne9}UxRP(erW)-az2C3ybJ+@6|IHUz^_BF%^q}JmQ?E|;jj`_R71T7xq zYV~JHx25;(A}; z{s(BXyQ#X35u_Ad=B->Z)H$2vFz(ZJoM4C(>8$`M!&`CJ27&&;LN(6qO60u;=EKsK zEZj)cfF@Xgck;sws07dDXjim4FS)^htcFMe_=AxTp0v(ENa7g@pwV3AN1#UJv(Y4I z91$S#82KeDKfS}IX~3Eu(*iSAJYWuFRTe*5{cMei256=&fxCar8aoJDW)*>hr?7~N z5D-Z3)mn1{96%(&-5~rHcL}I9b5phF<`~S+C<%KRLd(4Pt&-s|E&Q@-> zqd7ey_cc-Y((F&M;*WkYu}LxUm)26wn4B zuo_-fJa%>g=m%#2oX4A$>pBK$>n>Iv~uvi z%w4^30mM%N&y*Ta*~07m$w6Cs5e8*UdgHWmP{6SEl7wuS%@0-la5f4HV7U-Wu5Rgr zpMa!iUL>^MXFGZ_kf++LfKdv}<2Q2&x7S(HOQMzpMHn!R5c0k0t_DdMhMTue3L;n? zIGwquH#hUmI9&-VYBqADSk0VMirSt$5re5{*u+}yNmINN11%4TNdA=0#PK&eP_HhTK1yi#PJgTVk^Pk#^eGqg0)k zb%*yoJF15b1B`N2?tj{d8g#ue$X37y!P61mJ0*#?);|;ZryeRCG{0g4;W z2Jwd;0Mgb4rDg)v1omU{OA&YqeeXImn8MR|%mEA{f&#;wa*os~u?o+w2hpCXj&)Pr zMiJ7~RkPPiG1Mo=(}TRSf%~>>t!Es7iWzSx#vWae$9@c!WuOSJ||zO?x88sSU&7&kSY z2}mezUfj?nbpTg17u906nnJR5BILyHVNYdJ0Ez4u%E6l-U&g8JZSLAh0au|+y~Kk*l>bez1t_Iw)-@Nr z*MKQf|BS3U!w3n39(4Y>M9Q(4S(zIlL-}+RZJyMCEVV2k9|Mj3@RgnVV?FCTb+qRE zO}%79*n)?sUEzYLRc~P?KKrU}grhGbUrZs&8K2SyL6)U+L74QjsP!_PHV*ZKtuHcGc@+dHhxoh(V?=3h)h#zYCGWIe1hB#% zm3SH4WIfWA1VrJyS{mBVXgIFJY->XIX3+on2#Iu$K_-?<|8s0f4IkJJy!>vk- zU^u1sSkxFaq#cWkx-f1nnF_Plv}5gI7|q|e7=N7iow0=Dn60csktO)C!}L@^$4vXr z4;2Xlm>rhvI3eImXx)_b*B;K7dWlFRC8`L`&Ozti?iug=4Puu%od6)4vIsu@VA0-; z+2m;ZhY^eSWaK$y`X}H|>ZJoy{O1hHhC?2t>tgBK)Xw`$?EV(uyI`T46vj>~<mTxSmQH^V~1AYkq?s2L~+>kCZqNXYWQsTJsHR~(c<>Oqv!Q%a|znp#^L4XO%` z8$kvgLD`|z+Nr;9P7NAN8(gqxUi<65h_Zfc`ex>iDrR@>7{ub_V0WF0~i4 zY1%AK;6DLrh<#d%@V6%{u^N~C+0_20fb%m(I+#Zspg)e2@*Sh;tSg@N0f4WHz^-cL z`x6Nc(2=pFl;TVZ$hyR1)#x5^?N1|%wBiB+ z`WL-!S3tvwym#vp>1k-&ci|Hsm?r7~L=(0O67R{>WaxtdUeK70XtNDzX+u#6#Tt)P z8ujI!VX`u;b4KK7eZZ=4@RO7K5r=rXIzu(e_+bAJj?17H=0rd(ZjoF-bkhOx+7(+X zAuK-}`+}Iu|8?AFNhEN9RR}R@v$}6o($=ld-lBkk#EY5TW>yv&cmJZ3+DzxIYFozNwWx zo&C1lzn{c=Cm543)5kC|`cs3gq0k!@tMmUmcJdqW;5qugmuL+r+l^uB?I+=~hCSIQ zR-@ocv(U@U)3HB#PAkQ&KY)?w^v-s{$+}_Zh$# zNZk7A)wDNjaXv%?t3M((8JnovQXw`K7OV4M)Q z#N9+1usl|shUG?YW6^;N=q!={B(qJ)@@!4=xvU9fCc!$=Z3*a6d&K%!WxlZDQ6YzW zD0hu;y{S<71~NM3E#y*iQ$#;A`z;>XOX+ikmZ?xDR3ps)_4l2UWHEm^Og9?Q7mT41 zivM3kTRx#EzPEQV?8#-t>!?&D?xbx258sS?nxSywJ1ybwA0?bxsibASdx)n&Kb2jM zR{XxtyySJNg)mIJPw#%T{QWarPIM1VMA)%bzvFmRPq5z9>E`iug}LzS2UjiNg|sdj zw_X$myuU>_c&uun!3Hyt)HT@s%!B6T)#KocqNiGmFK}lY=1JO6mm*7fN0j*-BDYzc zOvuDbdTFi(T$?y@n(F2y7Eq`Ts=j?-g+*Xx;$PC z14XGfy8%=mE>U!^W4@N=IPO!KXyp90S;BmJd>d}^NL5<`GtCViLasz~7!NV}t!lRu z?_Xt5G(_g%kq64XD1pLJ9%m4c|D}iMiCmT@p?nbZuE*<+evxY`X0mzCp$S|#Xt066 zC=nc(vK(!&c4UPG@KSy{Z~<^ze4Z=;P6^0b7CAyb1=Q+U?xg(IzR;F8WL`v$}hz8`?s03kv65M4* z<&ff0cg$uUzS;z}O--%;ezGA%IC><{Jr)tj5 zOvME@R@?a|l7fkK*cy^Z53UM{53JR8H&Jt-8{Gv|iqReR9yWU4r0fq&43}gT*+6NV zijz0YR_1r8RUYWUE~cp&70ksYtId?mv!9ME0D4>nWPP0(=bSr6!i#JzTJ50*Oc~hE zaUPLnlIYuN;8Yu?d*8Cj*{?@TAkajcwP`~V`|VTG0i|x(-|m|x$-A7P`S zf@(upO$knE8J*X_1}*FX2!#%tse$>`v596@W(@$OA6R!lI%3{EI1M}j*TaPOQeyE* z%H@Qc50~46&WvJ`)afb2L_w+88Hwp zSntybkU8iq8<`0JESA-45X~ZD-8;4k_n*Q zJ6~1|@>h&Agv9IRakOBb4D_bgf8$WZih*;R9fSZo?AGIhA9OlEhgB^m8k{|*$Vjvtot^KzetT zBeyB){?Uo_)sdtbbvBz;Y15VRK>_+ zJalme#&rnIQ%9E66di{B{xv!(zOpETadH3XB5lI>k1jT$^>LHQuFEB^fs*J%$iVHp&j$$Ja-MKm+bI%?L7=Gr zylr{8TO2i+%VV`3W0viWBe5fzg8;6BR8ZIo!1@%!HER%VVU#H=P1FJ=cY<%=@oJ2F zyaguz+>-T0`48RosVK#sSp8}qc(B_Ik?6onzS+(5VwT3i<;q2#u;(4_BtC#5?69tJ zOXOa4Lh0ressg1u9;ay<<`K7X=O$b3isWS9Au7|YjKD|tuPX2dSGmv8Z1OJd4yms! z9#bRd>_aZImh{WhO-pk=YuHM*Xd3xu6a;P?O|i=KnIPZ_cwf_di1ltn0*y-Y776O z@T<%1f}hd9A_(|B7BRGxX$$qU`GVDF(g#kpnDfWhnDn7r?fU~MrfHe!PVC5uKiVkw zkj;wzs7o?ujrS>i#j-cXbc2bXq1Ugd_l$oIgXF|Gd+6)GJW;|#K*u_DDXrh}EcDY8 zJVw&!3d`iV>v}`tJKFFn463h(nG*zdC3z@YN=9~}%)=ch9aI&Hv>ilS-b=U&xj6zU zwd*kY!89Us2VvBIesguGHcvP{AhlNl}c*p>GOi}}qP zeyr(ZZ6b)%5XQSVc#;ghNpgn$RSCU*1thyCtZ~wokn_7y(+kORdem~3f%mHhcJ_=- zTaCm^Grn=lZ|h5>_Vx~lM;#36go-h)-eX9Wm>AFSDIcyTU?>bG*7(-mKig`ye_~MX zs|!81@ET%Ura8*>9y^}w&+wQ1GsQhScn-LL&-^iNoGW*1O!jV)a03!wmwq;HocgD= zA`$;qD-w2ce7m9o!$2oktDE1!9^=2jI|uiEDRFxc(VTekI1qD3Pid>;cTV-kQ&+L1o@OB~D7@ zetHHBK22Y7#1xYWm80W7T>(mr=Xn@Ejxdr)zRSAS_}c2rv4vlG2ShgU{s2Y>V@{*A zjX%*J9Iqq_&4?$n@K9Cg?oJcmp~Hmbup>v1WUmQ4a@?eNu#z6*GeO2jg{_ zR%9PChp<>kc^+oYYm$;!{XK@#@DDK*nWm4IQ2I#oc;ytTbxO?>#o2|EY&}h+!j$@}t>fUUOh!E&UyeB4R;DJ30G9 zJF+w{-l@gJlq3=$PQ7NPECxek0{}+_Sym7I`#NsphZ;ann=TV&cJ#D{z#FnB26nG) ztV`&t?>80_lTZ@J#H>PYuDez>t7S0LI{D z_Yfb!7oaAi?YPYv|Ms{ja65nlCfh=(R=oj>4PaIAMZSFz?h(Q~@NsdZF<$@(aHI0+ zBUQh+4i_GmnRcY|SYuC=@Lx5bixNaOtb<1znHJv$ID9b=8CV(7c+g-I9n@-U8RF}W zaq0gu1I8>$&c}Fpa#9|fK6;yRDcMS*eS@(w`6-VA@4+?J4X&S$4^EMnW%{jrUik7EqQtdr$#12jWW`NN0f)gSr8l5f`dfWu zufsGrAQ89y=DWN5?YHsUkG?%cs%|SI^4$w(4_0GL25@}%pt4J|4wA58^#}+)z_rb59Tc9G=x682U@@Ca`jLFUuxo5A$Z}*cX1g(DaF!j zTHDs%O6lx%>YxMsR9-rAYz=icdNflZ10(b8qbH`!9|Fd41Mp5~lz^Ijg0hicGH;=g z>$?Eic8Ec-5x<(u3*I3{ETrLYe+d*`*ZyKgL=JpmJzZQVV|kN1RBp$#DoPq+tJTh5 z5bY^BWv8qVO81DH;0+RuRD+)lXrSD^DEnC+J8IcTT=TXX$&GjfzEWI!VZNteZT@g9 z@9%|PM6L^-0}Kp*CLeR6NnPI0x72eWgFJr>{XH-V%vB0OO=2o8wk&9rhe*`<8{hJ& zfh&wbN6}wtzVPbJ!AY+;k{%1~A&1|CBO4J@?4Yzv`x2Jv;&m@<8=7 zMk;WDpf!qFW=lHhb2o9q4>%b)c*)PPG=^;DyL-Qdp9e3SI>oz;Sx8MePTa^j$f7Vq$J!fW^-rNWKDE5X8qO zd?;M?Q1w|-*}gbBz&fpq-Gqkzd`i-o+;2s}7Ow2e3t1toXQ6}SarSTJ6z*?GngFuf ztZ)kqA>+TR+X0+jYmHF5G~U@~Lsy!_!yMQbQ%pvb!*kYL?Kf9wQou8d-PcBHJ!^5i zzvWY@1v|-!CwMlI`$BP!cbT1yRzNP;$}en*U5VrBVi>F#Lqp=hj8iPp3ymUSzv7(t zpFyD=E?I@@eze0yB3`U)p6Ryv5Rj~HU^GCUm>NXi{MljeObBgtd9wMW7i0gL6Zz8w zDP`Wi^E;Bp>754@4vqRRiA%w7zI?~ol(t2q=W0046|VE%Q|n8NM(pF?1Yo@={^VscJXCx0SPAcohLw__fs$P9`l$*X@W4bL$PAk{@^J^m;FNiL>w)D{# zU1d*f39W#^UcV$5&8VYp5Ec11(-ac;Vgi z^pstR66+iHH8KuhvY-l3eiv~74Cr}qh{mqpvVjsq6@gw|(IV^euI0T?nC(Mj;T$@1~Rgk; zIN?K+sQdAL_4`5dNAzLW$9mJUZkPX`Gna!$?ihUbaiyjI8a5qX2C^Oy@i!_g&`=Ce`hwZec63FJIFD02^U_)s?@JM1w1$w!JJ5Abp zFns{CjIETK^pP#FUl6WkF17d(ui0$VI!GZjRP9yr&MS%EQt-3Ng|Wy7cOmnQQXTyN zxy~e2@PA|gyTsY})bPD|rL)hWOkCFa$rz@LX}g`QU@dI89n*e&8M;#x8+h=UDwG%` z5kQ!No#OVY7yEdaR;?TJzvmnXjpBVlHqZzeOeZ32SoGIf3{+jmP zQEl7}mjNXDLUleFgy{%cA6faIPuYgBC&J5D_9)QIvGnWp(X47L7b~D-G`;*V~%=ylBV4}C$_b(rYXvSTl$e8MSU`egiz0t$(15psU7>;F%m$x{biQ1!Km zhX1E~#sOI&<%SqL?YwPr>vlhh%r4;WRt?aZig1dYEq5(oRwiY9Gt*M;@J||zLuIEo z_oqc@U=SFyj>nO^irm}Pw!BLF&HQI$fP7lO0`R5YjH(mFP~@;Q%CVY zr|)<1_SZ|{)itbATj<2gUli(+gjr8%NL~e&)1rfYLprfjp#YuBAqVytF?_~bUgpykRv40g4MPJ=&%J6qak$rB~k-*Fan+>cd z_Y71F`ZwY`^Qn%V)T;yrK9^m9`UX#I^p80Ot)rDU_44MpaOkL<&>bg6b%S#m(N$gu zBeG%~6N7>_?@L3~T0%z47XK}-*nRCcHy=~(CaPUrW0KI|SBJ!e9)HLehLE#K$T(&{ zbKd&>w>JsYZRTM&=dvrrmQ3-CVliL^G}x)C?6J+9Cb22ie~Z2_Uabi?WrvUwey{z~ zUU#A7ExMBxnxp4-0fVO;T(<+{M2|*O6i^n+2D4ab+y_>7(c^MBh#D@ElQ-2Y zLE!pnrszh+ReF%v#iKZdgFI?Sp!yy-{|&aHX0-_0q8}aIH+2PiV08ym1m-cQY2vp! z8m}_QUUNGRFb-fqUIvlF7fau|?I~=sP6<8N%U~Ak`4BnDQWDVw)D(OOY`kQwFU2yu z%RNmn{N=}y?;MGl}=66g+V=RgH0dW(f;P)G(u2@ecyDE z|9KOoz&NFCk@7kh*YoGH!OYW>RQAQZufFtgPsH5a8<;k>@uBbDN|uoy4*l1I7#JwU z9q@u4UpN%E89i}tugEJ(hz`==2dH^`VQr0@r^Xb8vqUhc%3X*qOD=}ev)I=2v3|f4 z%+2(`lhJn1YeA89m@U?Dc#K;(cYHy#Kec_>T>?50jzBR#uYwFcufe+}($7+d=HfV0 z5ZAp~J=aNQffF2gm zJC_nE3cdNa8Q3|R1#JX!jP3E^yXpD9H2lbuHC@SK;y`OR)@MdM0cx|v*=30yLA2T1 zN-fxI^m~ae8nq?71PIoHPcMer(tAooE=tIqxi~IK3xvp5^0y?2UH;5jcVFwD*R$Jh z$Y?xrJ3alv=ic8B>dAo6DqfrmynhY6a6a0tnm&P6cZ+KbB^5Aoc_=@KS^G4hT?CLd zOB8`eG9u81>l#=FaRWkbgq#ojwoXP3ctEoF`k(N;Y8Dq?C(xfgVJHdqKc5&ao7e22 zz)`*$vfP!i0fwEZ;*0lSfR^{cQIgtIhM)1ld-Qe;&tD{n(+Mpoz!Y<*3+w%Rl4Z6D z3aRA=c1>+y*@|+jH0G@1*E{r#0VKuJ_oCkqiCxO2N(LaF_sqS~5ERS22nQ=$q3Fc8$wx6BI0E zqo<>k#*?0;{1>kB1rYa%5G!HuRkbE7sV#h;1|-wu7(qEO^!f*3e&Btj#N=-Wu_Pmt z_90JK(Riz5UUk$~_u7NXI{{)Ap3n(UDT+$g`$=G_pQ$<-nVP;>$D<7{^??8^C6;Td zsC2!$*1=32f8jI~`;J}+?&`fUom!PJCPO}t8Vm;+C4A!3x-F0`vvD`2XGq=i{A+0o ztQEz)xi@HFKKsUKOE2)f(X;fTD__di^txveS+N@7lAj)jb>t|9wHaGjszOR=DQFqU z^gGz)U81*BC*jwFhOCSKj=HF0P?y}0?!(Hw^6e_d-Hj+>A&?b7Q%@PI%MJ62it`R@ z%mRD@zIHjA&cSjz33tKtfR2GxeuE zghHw@(`3m$Ljr+R7;+An+eb|R-~hKC%h^0Dcp4G(tqnl&}d2KRXY zYi$r^rr~{2(wPKb%{&uIANsV^z3p*MXuHZ+qAo4P0>IQKmNPQObRInuJNJ zj~KpPp@hybbLLrkH-gSfnx+OoyZTxuHI45jM)yXNT!DKRzr{1?F3O^zc5E=*X5TDG zjqi;{>QSY#!|>=ZIqG0i{^2US(&d%3uc3a~OeP38g^~jym3ZUcJUrcrZKA&)9qHsl znLNE#A;U%IJgA@cfw*y_$21FXdzE*iSZP>Hlm);PI{UC5p2n3^!#v27U(NpJnN=`L zT}?^CbS5d?oNQ>ag@(c`o;eH0r{H?Kmg4)I$TD{DS5#J?9_A-Ivu%le#A^2`UDU(2zOg2`O~ z`Ee{=-Hy7bEtN43$&Hr62%B&`9mHHu!qe$i?ICJVPS^F(sy9YN(s-w*N)CzC71YW% zBHBVM;Lp5Pz>?|iYtxROS)Qc03`{3dez4gJyRk5K@8$5?~p zL1Kb|m=kKQ0AW97*0e3Fxu;i-(XzpC!h-f~M>lZf(q8yIV$C1=8P*c8xig*Pw^;v% z#!KJx={(GhzmJi5dY36IQF$GU2nKO!16qJn7J=za$7d1KBXw{h1uvXjin@MA3)-8r ztu<^Ximg;^w7ts>$V2HRo39U$aimZVFvG@Jgp@=bvIzK7 z7LeiqQvw$!c`gBW`|OicKJnONwNDaMc_{G?&X9XstcxW-N>qXb+eumD$gl5j_JXyk%CY)EF|@wVrK*_rL3Nv4{CIhe8J`|~RDJ$p*<+o_Hi7Tm z(NHGcqb5TbO!4$|Ml~1fi zIJy{czoOdER&nwr3@|)Rrsf{LWYxq^aJrO#xq0O)h0c=~cIv8uQ@JLF&X`movrlmK zn@7AMG7lH=r(1%DnLu5~x&BZ?e2qg!_C<8jU%l~`7yh231(+Ul^6&s7Sw&h8(Zv`- z(H)ngNCglA^ITFujg(42@zJ=U;FiTiC9Iv@p>3V)M0&Kc9;yr#;I9(O1#cDOPmdXE z_DX^3>Gf5@!^ur>q+eO~2;E6(H~4hY`eI@~*^{KBN4f?H0~fbTl6Ej~SlAWi*aPFz zu-^g4Bwsh^ywyk)n0VG!%ucUn7PC;L9rLSEA1Im;Sa$vKvr$m`P(X{>({6pa;U@lE z6@gz|2vMu0HC+Z3wG9=h8C9l;7g7~05Cd;8Da3o>yISQYRaSArKVx z>f%0plu`8Sd(FLO$o(~>TIZ|BeM9kTA>`6rUS!N)xV9}BI@-tVAhae82 zX%>UBid3&$#ey-VkoJ*w!uh};Yox7S}Yo}vAy66a8lgEOOPSf z(`Tac4Ah-?J|8K;HCtd|d;LaWqrts;5ZMV0o+>%69Ui;V_-MRwrs^s74Em8IPe%*N zK%?Jm`|_1n>S(KyNy>t@d2k^k6DgTpN^Vvf$HcxK?8)8QzjtqgfP%GX_eF3U+_X zb*99)!W*8y5&ex;kZr5WoG(Xo(LW@r>(LxhviJOgUg$31Z*1!cF7_GWEtYjGSee!l z_BWtkkM7ssh`!#vv>Q9G_-}$8hUvaF`SDFVqRy>;V&DIPoW-RgxOGX6tdR;e%zUwW z6Sg**)`H@qU)^{uMS!~ukV*Jn2VyOd<&;REz&t!zH6*hnVm>p*ai7d~Nx^vw>6F!7 zV+wFAnY8h#8+#%@Jb^6Qgkl;NPIV+sofsP%(Dxn>W}Nj2tE_ZUn;m%G`}2$t za$I4p%;k=w;OVJecB~!|;HVpHtu$NS=O|yM|I=ipC&7*xvX?EZe315~ZuT9S><3wy ziP_X%%B?*$)}93Rkzu=gAiuW@LfA+0)O+PLc#7obUPPVbWTRAg zc!;+a19MY>E5Rd!vzb*dGS|-dlSbq;3hP{tjRd2{MV4Mq=!)2&Pq=?qxii=gW_-8Yu8)HcBZH(ds`$ zXh0xeO_SXcc)#ym6;!vmwPndqQdUEeLGT#MQWeD}kYi*;nGzPs5d*Y@r?5pR70NEd zV?s#4!^khn-qcsFbr7=Gp4p_qdRh<94{l>^7^t_|oZ__ReA3`EkuNG)ujY2zOn-JRjh`9-#L;H5&fAeZr(2tw6Yzm+5SKl6RVMZW?iB zYLhpZfZ^ZE5W4x}5BrM7OTGbZad{V`OKOjN^6b0QhtD|B1IY0Keh;FLa5;3(zs4WS z^Y!-eJyzIoTqKm`{ewsW4{I7Qg66$#sX0KBxc>TuDWTdD7>4-xtRj%ODu(;_4>F?E zZllm3(6;o&OfDurpC5?9Hn$0ceZI>@ zP+RmHW;4rvrWJxjvhF68@NvhbA~}LGoc9pNfhEN4MwTNK!>f0tM|SkBeN#Jk;EU9Y zmv=f|&z4s0*KqfIiNe_9^&RP>T*qM`Z+mq?-evZxvHWnSz@kq(2+qew9wEZimS9@K zq(GN(t3%UNfc+nWmlq{;V3g!nH%F;HEw9mB;a2*RM~!c5WcKPRKklxnGcEl&M+ga| z2d$*oP8Jp66Dgv#^It^#_oF|!h=}?aJl+-8@R;n9Er93E;y)4H-dJ4f%o$5LTkdSAMZYNm<)3Z zRoPS(zFy!RDL;pp$a;VPOH$@Dx|*h=>nO#NTx5fpmiBWkS@`GLd%@O)X~Hvn z;f_h#ocMo^f2O{iUW|5K>vUZW9!eBmm^jv=FHbQAbOJwyjwx>2WqZTW)l?ae@*VVT ze|SZG5IuS7k9o2GS1}6xcIGUrj39sgwQX+nF1RE({F>8-+8+3Lf%)DVCIH8Rt}}@t z1ZCDSU?|dX*sww{tZBFcu9}1(ASv`prSC!*MIp+){iz6$PJA@1;a0Fzn;~pnZtme^ zreKimqHU&8BnaVc*K5bXh|%UqRM_$YNDDbt_NJjCQhnl-ao)Cp(Wom`h7t0>R5|;u zM`S7>wlYk8#A=hz;GOU8B`pZp?vpL1D>J5bjZ_g#VjH%WKM0>o#v@hb*>xA^(er0- zpp#jQo2Vj;nT^+wr1;^SpeX5L7*Su6Ex?FU4n-LL-X==?Xn`|*L=jG%v|pq;1v zUy7LE!}keOImtrpL-;J_lY{>@A^M&0Un*i|kJ&QY9b*z-llz}m<}&SziQM#@S{7qS zZ=|U*{AGvnpZ}4d#!0aBwvm7SZ^^^SmJ%3+9%Cs%`#jD)z|Ggcb?qp4m<}=Lf%#9E z4rHu`#j{wGcxoV#;le!N1Mm@XKV%Z)Er!`J^9rr^Em5-Q>eMGPT8be|%&EELtA%8u z2qYycJJbmiiM!D6xQ@DlR%c1F^lA)92hlQbLzrO&DCy0J`>im-J^Ay1dJo( zixiJ;I;1n?3~NKc7hGI}m@$00Ng4K6D3&Hq&g075QNC6}>BL_(vTAPKv@W>WwVs;R3 zZi_M590uwjXhXTmk(uzY0GiFn#MzVZl656Hgi_etvcxw}>0g0qFMW!k*N3QgG&qiS zJKVyOW2v^MVUp{Bif3FLNchy5l(lL5uETRI)g#3DNH+hCqy3)@3H{Ff;b`ITj|3^; z_HtW}kvK`_5v<@Zg^cq^v;^^n@8xFhgxnuNT_jfipS17M40t_Jin;TmUs!Va*V4HF z^B>}>J~D0c-4BQjyn-sj>{Q1f@}DF!iFrSzJnFh37$Dx&^x+hlw6Y6;Lv=a{TNiK* zwktOkdZt};07%Z9ipT<101XQP%Kws=g4vThgchxXoI-YYQmhte2oFau0J|ycmkTBtq9q4-QuiOQ!MlYX}X266s0)_W5p?#Ix61osXIEWA875 zHlqx}W#av@jWgiEoQan?JPUE;%FJ{t0uZY5|1MEVi^$8dC}0aHewKpw@9RA$5jTc{ zj<#Xk5(^Cl=ZvDSx$yr^QLg9+Es8%k3JO1d z$o7jm4Li72z|PkB93IarE3md&cwI#g2vuTr3pM#s_DQ;3#3=6Hx zJE}QJOLvG`_p`jFGv1yOh#Z%-@Ng0ZRlt^d(r7EwI;=tQuVHZ>v(-$+fw@&=1S61L zjFH3qT$7A%g$H;XAKp?jwV|u>fZYApUhP$p%!#-B?HO>XLT*OuM|Mtj)n&{RgCXZS zGyr6$TXt`~jA}CGi}aJ;Sl0UsJ?k=$I8I%m|Pag?VAy zZPGH?|8l^fvB?43O=83J4D zrXi~c;p2@Vcp+yHER!|dw6=P$?Qz{gJ~;P@PVP%wl9hy#s&%}{`ibve!YKA1(~sYf zvQ+i7!NLyl{pBi2k9}Y4WRNE82#24cU;i5U9V|1u+NsQ~LZ8yv99(!Iv$n#NZSl{$xyH9nNHeS0L{`hpH{ z<_%GoxL(yM>Q;*ok=&q&!jR`srE-CII)MbD#3Sih@C`Tt3a>~Kb-s$X980+~NvY-D zai{KWFebtm(YnuUxg!~{zI@Y=nyzI)ygB9oA-JfJ#Q7X3k5hKM*K8J22ui8lf2G_U;}coJf#P?=}l` ze(tp~fM643*cqq;!uNC|i(7W0jOsdqxiG2h^_fAOI8t52I5IIcOl8*rNiV9E_Lrv@ z68=rZl)7|2HcitM+b4Dv5uxL%*b?Ur<-szXuB_x8v>3tx0A8T{SECY?Ou%lQfdyk{ z`dzA$w-r_kuCFW$>-*4=Zk$N<3}I24bdpz>Z?(T5O;FUW%dc}I`q&|TyBL=d!dik4N_IzmA<#r?vrlPJ;?oOx``9mFIyyq?ET`)c& zY248$V|W4$qYBZcauqFYh%%E}&|mQax*9exh3a;vJ&w|Aihk?5w-)Vf=K^J`2jcDQ zGdITD+%#D;1M$^rn1Q{9%A7yMdF&n(uw^m1C*BR9Q9U+NtoBY_#$$`MizUZu?h%%n z;d`ccsGkTie#MnYfG;+j0o1*ZQx@7|HRkUi)092-5+x;yF_apPCszHfuWFsR{965e zk6@SeRj-$0HwT+zsCuZ;Xs{}cu22nO(X;9m_a=*+wvO zbSzBuiuUOje@T1Epvy);DJQqF4EJvRBj9=6GL#Uv#6JfTSe#y&xd3Nj@I@LcT*Qy6 zZotGtJx(cw0DCacQEbg#jX6V+A>U1D(`ow)k`yITR=7mo7naZ-dl_Oj&lo2P zOp8q^ap-J?Vs5QR!l!6nH?EUJQg+00hE&+vf%xY~Z2O!W@wHjYk4V3e#^hMg70vTU zo65Upj~za`-)&0$=Q_HIwY|IFv{Z%Sw5!-ESUNhq1E$8`oc&^{ftcX%-(2VN zKoCr_ca-72H%0;>EfB9LQ26ExK6vcAjGRorl4VJhGAyO@1xerb-Al5<61O0Ri6O`d z-8envJ4A!@Q*FknST~-X@f{eP_vI9E=qy$s>`6!BP?Qp)w4UYzO~@yJC;cwbIRZK` z$nir(RyLS*wvyC4E(wgHw$g^EJe0mH$DhC=;i8F@ZwM9mZ2vaf$)}E?x}?Eg!(@Y( z@iCg_PROK?$kDvqc;IYre>rN(UH8zsrMrN|B58}vz{D%eE2|~Y7`>|*M=<&a$F@}e zfaT4!8#>htA;-4#h74N=F3v4@^%9Pux5U4D{hY8Gj!&$pcv?IC*TPG$1)O;!%egkP$+ytSxD|=v+-$E5=0pK9x`LL@@#4Wt4Tm*>i z_Q-Kk%K-W)BJhCEPMD=+2!<>|`g-03Ay#I6Oh(+hIgXpgVYYfG(t}dqza%HR5Igm- z%PiGHU^Mm(<{c$~!sqDYz~O7;a5}i;VY2Ytik!CzFEqRH(?eij8?$t6?NqTSE=85tvo0V>M4IVq|zZtjDR1s<-vqft(C1KKc zGcrX#4+7ElhY!zW%kVHh55wI0rFl2f@HD zuCD!7Qquz{McacK@YWa+9QNgSD{9ssKv@Y2vdaVqpZxky|M;uKUx9 zc6ilDf|?Trh6y{Te1@Zbss+yJf-$2v7Y$^G8VGQRzDvM@eL-~CtJNlTx!PMHn~iP# z){UYp{@|nXuv~j#Z69^`h^jo3N9TTKf6+9Kqptl{f2rEX#m7?f)O^F=6OMC|7j=e$iCD}d4r$)SYDhe&<*{O4b~ zq0;ktW=IasB9&0H~YV+Y~R-3QT>g|Mk-B!03~(HeuA^wKNn<_rRvh3Xi0WahMM@F)yHNh{i;m%-Aza}T2{K6!ORN%;dhV=BR5cL4~ zY=E;D%TWc?sA0Qps3jhN_^0sp@u+@ui6QRo3w)P8d{W+VNEWv+ zt3#CmDRL*#3>?vnPrCe#`-3j=sPE0}w}<(U-{e0S-Kx2K9QI;Pbus5kVo)z+-!;I! zg<<=JNocTKv@{%NK070f4GQT9JL7m~HG(tcyLH@{iswwILNukl*9h>>Rai&!|*t8UfEG!jgT-|rC%0G|H@;6DT zpijc>-Qpu-=-P_8ONFUycS(yix=QqZ?sI}K-0YK~FkKhn6HJV-88o1nCAt_DXJfZO zgc*owk1$*lp&~Fj9ZAeAjCGSCqh%2)dtG&{NU(x6d!bp?-^e5tggrm3`s}nK-Og~3 zk{yGZOB-v62Hy%`C3?#K5@cGLvMxflv4R0JGF$%m9&o4n&C6xf4z?4@TNoN-(d>|l z>Y0o*qP{IK5AWh_7^R?!l$0fW?CnJLa7#j$9;aF!N|e9tE(u9nKVxa5w4RWpxf9H* zx|iu3<@O@2R2nKelx8D)`Pd!vCB?fKQ^Kh(gcQm{6%?&3;CNN~z=-v$b{7{rs1g-# z`q>MWXSU|c-=X)q$4#KlJfSe?P4QME6p1nXVK@uVm<2*LSJ2*GKlHDnbW*GNfw2x=h z-uX7GGx&P8wR&d(Mww1M(L>Ndwc=db{vyy z<6X%;7`li@NbE+W0EJ=ByC;f>8cA_KVn5}fmO27)6Pb%_p)za_ax(Kc^5x!b%nVup zGv<}Ea<;t?oIdUTdT#(sPawY|j72qWmuRD`bZtLN1xfhR7su-@w#~6`s+H)Td|0tD zHRec4Ip}INP>XS!sdG_8d~)pUVewrEt$_&L9i0Z#R2k+a%vBS$tO~1*>?yK6f%|@o z`&;(m$hsbl^4urQ%_ag=Pr13n_qAGH+dou>DGoFwfqZIkfS{huyEF=+NL--QD8@UL z$xa@b{NwIH62~~UEu|``*}!a1I1cm@N>+!xDcOV^3c+-#%yZbG@kzdUf6`@$GLgk#yO`$Eq82Ksoi^ z9F>UlSLJTp4}@<>JCEgKl`z1B4Eoe2MK+clktJW>eOn$DYVSqThn+A`Oy zTBiZ|Met+Z+%FiTPJ=2TpV+|jeEQ{>K_Kn80nmE{mj>LC0O>oK)wB5PE=MH;d1h1n zCtaC4aqSlqXQ1Xx;dUdL3f*$(UR1eVY;wu0#BPFy^Q(8?gbIbM(6iY^BcDsIu#g=Z zMHT-O&ZCUE4wXd5Avl04#W_lzYf|CEab(vp^1UC^y-%9pdnv-h6LDN+v$q49!IfFi z_MXMRUq7}=lnOA6$Q7zfmpmZwyq`bczI3JX{uM?S-h)YW7VILAe z$N7!H=vmo&DaBQ(rJ%}G?E@+F)a=1b`Lx3M!us~`QNMp*74_Lj!=I$k9(DBW=hEXJ zWTNUZBC;>vWQsVmquS_)-U;J?$)rM0>ObXwr0ke08MTZDBvn1m;?Si^tHllO-YcYJ zLV3#`g25vWrR4&!SlG)SxE+#r)*C^v6ZL%hJ%cgesSFfTx6e`Bh(sqLG)a`pj#32R zMGQr$ovK!)8Q6a~A@^j&s=2)3tqz)I$1Y1(hE}(76gi`7LKLrcf_lK%(=@`3_!AW6_yc!Ddv4j`%p($lYj@DVaQ|A__vi4Y*m zkby-23@D%}$A=-SToO7VFYjF%_fA&^fNCEexr8H(`%O^?=H3-QkVWbKb z4VA;+9{{bGKH*@>#;lwl)4L6dQ8J`|2elEG!4T_`Uv-OZ2L6jsssE1BpVOj&(cqv} z9;Lz~eRV^A7${oBxKizd|2tGN>3>k@{-C#N8lCcgx4-<;NFibrLN9kq_~q%7i;``L z4leRH{TzE~}xSW_3pfQKfV(;*dT zOvxw_f~2UG{HhR4Pf7IA7A4b+p>#AvR#fL)dV>kDMarf{ed@5olwI$=JG<4`d@%$# zV%nhcHiidXG+LZ8OS|uq(y)%}goyTel<}8IOL8d<04F{LxA@$&QY?XA7K1gYO^w7)l=|mlcW{sGAz0J}hhCpe5d!f1K zp2uVPG$9>mR9gX>^4U#Hxc1>!ZAr+8N5cP0b7*TY@n5ndiKc%0|Lnub4cVMShkW@Z zVLv}_{ru_~VO_l1iFQgdv>Ns`WIl^nRm9P=)j)JLq76mqq+`#73Laa0170CYAq*tB z%wC2OlPM83Y&(=-48V>6V%tD_M*}iLn15bNJFx>)Vy5NuUUBm6B>7fBK_WOqMqYuA-q7!Yf?9aVu{=2r%$F1S*#ex7|CvLazHJf~oc zo5UHhbt1?g-19`JZ|YSm`Y#6!C5@nb^tZIc(0#?*^R4bW$J0nxJYZptjc9qJb&*R# zb7#9}x3H1_;3|_~zvhAlJ8LnKom?rzf_ag%N4A?o3Q{ua9Kn^nr7pk+{uYeR(f_$U zbOdH0VxkM5ex_5vkg2E?^fGv2omk?><2BahBXULZqa|5Xzue|E&Hu!Z^Mq4MPUMTv zlsa`Vh@)`NOS$7^oKZ76X17=p8DyCG_U=dLczj3gHhAbKdLPEr=#eY}bTb(#!Hy0T zazl3KbyCmr^TO?~p6i1ce1=$wEHxcCqJBSDrdDt`9QU^d{bHFF_13d(nw+NBeK$1Oh7TKD?Ft zcRaU^{+i39zs*LLXW%;v36H8b8LGPBJIm7a#i_K_Jf zu@qU|+Ce6?gSPc#LTP#T;z-PyQ1g{bS_#kU^`ZF*OflCs{IcV(o6;;z#yURyZ~r^y z6L(oc&KFUmnzJfo$>FX7Z@bcG6&Tylwvy&N9F(5{2^3JHgbNERSV?2Dt&!@uZYX%@ zsa#?S0ut2r9mhKq0Ciama||pFLk&_Dg@>6=2uD=QWg^9$(%J5)NIHxxCUXjM9GM3i z22u18(k@ow_>V+-MzazbgS6<$j;R#gFn?g%!)mUzb!2}bq+scu{k+V3LZ1^=SAm~x zc+@2Kniw5n1T@%49K`l#1Aq`1y(kYEOqd_4bFV8?6~C-p9`+K;xuHF4M=GuROnIF`K`suo3=T%9dg1D#`A!FzV}6ue?4Yj)V10pV7w+ zTu?wCyWGw4Q+gAB<&xv(rJbQ>`TJ|BQbK9*M8=mH8XBoVH-zYA$ne-a1DX2kXQOs~ z-M6Hd?tYqaiqBH)cC7ll zOxlw|8V*N!yUEFQjq&C*O{v{pKMerNX_pd-p_#WJ(49MK)J?-Q-Q3_fEhawkap)|2 zIMgYFfZuk|6wDhJo5H{7E&wBwf4e7fIp|-mvj?WNY7!%wRHyvGi;Y2c7YFLB1To<- zn2QJKdj(~imDK$FUnfqylN|YO&5+wu`ih&R2S@cdhagE{6a|S~{2bkl)!R@wiVc?U z4s^J4FD$R~5JP_$%^`M3A1_YHf~J|}cFPhq4X$uQ(W37-_+R~y_hl`ti)^MPeDO&h zy$UO3EVCHA9w5T_0~4?8*;GEGAorR4N(w3YlI&upte~vO)(>fX5wEXMRFfJFvo5V# zlIFDz99}I7&$)JBPqiqp;Lb^tNz^xc&W$8a)#WK7tPC}^D)H0tW&5Mdk;wjfN2iJV zKUIiQ!%rgWj*5GKw+F$y&Xfu^nJs3dX|iN*wm5YN?+eh2nbmu5A16J2t+5mVdzSxb zz<>=PGSd_*6^|rrwLkP-oh|g@8C3ob%ZY9Q@V%_r%R^eTVLjU z4U9y>rg>s>V zbXFrs9h$r&%}(^$S6L!viuL_Mrt?N*Q2bySU52ixXd8b~G27=6&`Mm6Xf~{xKLXoa zSb>|O`nH-_P9)^0DzD>nrq!XZInm1VD>dR;19iB~4P5x`(f#KdBZ$z9{CK}>QjL75 zfMD_zejX2YQT-!=(z}^?0Z)#^4L@=ZeR{&u@+aPS?=6o0D%Ddzz>KsJ!vv;#u>l1O zz-6y@@=g2=NH)S{kDQney&w+5TE|GlV`nI5>nrRJ3|Ua7hYjtXs>@!0g>qKZKpZHU z_LT`MrT5!FiLG&rEj7#}GEIK<#guXP6s8xaFjTS~?uiDC5I!bIFm=U=ezE1QzquV7 z@sXJ5ShBVnwzQc}N`@sGN1BkO;d)KV?(YWNr8uvOHQj7B${`sX-WC0v$`H$H2|Q8M zbf&uW6@s5Rs@B*zGkf3X9>e`vuv*;n7&6k&?F=7qAjUj7 zYmZmca)`TRq_6?9nGugj;C? z#MbthPHJx?0O;Yr&~j8f7>Y_}J#C|OlzNFFG?WKTT6wiVgyQ_rk+E;nkpk{k8b}t> zfh8Qd~{9g02pqU6keqs1?sZqx!yI6PD1(gd&9*d8~a zm@v$Ut14@7s|eaB*&$}9baeOfG|1a{4D@|t;-@f?0GQ8STj@iE5$E7Uh5lX~DP|4W zETTO8{pkHU${(R`Ee$${%rBd5x;61^W zc#Y#!!dl+=<1ct06#|{#go#Lx&|Tf0cP%f3o{WuU8YsH{rdNLV{Y{~5G^fFFp{tR- z&dt=&hKuh<15-(6MmaUT`v?a~46ff_cUlWarGEQ~iCkOY7FQ_4N%Y%S_`n(pdXuQl z)dPUk&hX3S^w3O;0cpxD1+dafEX`BjXObp*h|2-ei~E{BI>^F1faLLO@~)dM>IlWg zd5aMMc@T#sNUYA>lDv(teV6jG*!jfcLXXJ+vr3}2S$JKZ&_xGo8N4Z%1o!mK4 z_eqUx#8h*V1tcw024n7eJ$uBcQeCuTcuix)D0M$ae<}YZ9ltu$v3nb?OMNTRAwQF9 z%DY+?Lw#ybVG>4g*=(g%PJgw*6qhSQ;;R@fjojzU{p)%4KTmF^>lgwTyq7Yo4xGN+iHPw&jJW;oI3}6N z>>b@98Rfh`Of1!PJ+eC%5k5HbV2%U@Rd*whq;^P6+im&P8P5v_S}-VJ&mmpn2QA-S zuV>c4tRB~TfQI~nm~0soMZ{I%$Eo1GQCY-*JvqIeOJ0KKa>-|pQH6nGoxTSZ7>IWY zKtdui0!DV&m~A3sXak>nPkR}3#?0&!?XZouR^Z=%Ib$!A#`_ZJLdugc-H%DnV+=~6L9>R+y9kLAiFb^PLwJ6J?U2|dHK1;^5N2(1Da z{S~vDsgDMFLzf&qU2iyu_GQt8+`>Q0g|M6JlZjtFVcl5N(jMr&LBR!ljc*2CgN%XV zXu5&BFM2oRN@`eSOzgWQ6vsV5j<@7&rs;G}JR`0JUrYSJoTbj@sZZI_IY}p}U0uE* zfapX6bZSV@Y&TzJ6(U&?mWB@$r@u6vW;O)9CcVIdaeZeh3>pjmwV@R4njgXw zyip?WTR-0Ctg3e9?cBD^k}`f(l_`{~p*%_SmLS4__U>I9MgsJocg z;oaJz@u4^P$;=9y<)}HNnwKfX#63ZTP8jaF3tP;vJy_eAqwmZ?PPb z@AG#zRBEiHZ5?V>QI{n2)3d8PIUjBRvnpo~O9(1b-QCH)h$gaMzY-?+^AU&peOvFB zh@j*6u8T=P>-RlV8C5Qep@;a=a4Evn*R%AyVfOGbR=YGe*4EOD?GfZ5=MJKsW`UTQ z?lmh3OR8>k<#A+gD+_}7$4bH!%tFMA)IA&+3S5zn8INK83WW?M6G$QQRY9EWB|xJ6 z?P&>X?Ns(sxZXO;VtOnsd(#Y0#uQOJilf{?lzx{%(=jtyKF-xr1^9V5}(8K#W>sp9?R)ceF{nQ8v`b7sJb_p5ML zb|M-I5xN9RKK#g5PP0)!X)eyF$|ZKtg*7*wl$L6Z_3N|8hSE|>CZzWbrRQ`M)TCTB z!el59vP$PW6N@Qi64TtXWsY<3(TmA~Ty#ux#0UQo$DpGmyo+AuTd$+nEuCavJGy`noV2Ds2Q7c@&Y7m2)!l;rS8DwP|+fk1A? zhx&?}tNIOT9IKSQt5#KP&mMKNSF1{LLEs=3JQ%;qTu$0HdHBwco5;M~ixpRj9gmB5twWB<52koo9(c$5V>BT} zEXe@-Q+B40>fXW6$w_@Ixr6%Gp+1`O5X7ZBfmhl+IJE=MkTl6oZBcN)kKb;c22Tfc zI#}Sv5yHs6f0QxIEi0|{nyL6OK@CZK{KQ)g*U3%v>Du-OkJS3oVDk8H&E}J&8Vsu^ z*yHjix~1l*ShTo$B{I;Od;ry#L@YiL?)7Oa-#ze*F+vHBVN8G&n&pjS445y!oX+i> zwF*0%8&k$+oDE&4U-k+@Z(UaEuf&eaY!^K^Ukp4i!TVs`+HyP1-_&v3?pRVw-j(}z zkIgSXuP1oMl=R2?Ny&*&0s{GmhoE`!rGYXBlB}hUwfDiPp^AI+_W3qI5#U94gLex^ zH@ldzjHA>;fzGJT_$A8tbW&9iSjA%+FAs4|2hYvX=W})7@w(rRp?JIwWr_FC?xptR z3{q2$%?4qT5`iu6jI_6&{L4ncI_ z-_xY@EPY>_S@tid|54CMD1mzXSIo7}@OD6HHJaq(KH5mWgjVA&yXh;O&2FKE0j<65 ziN3rE@6Z`XA)4CtkvMXZhB@?5d1BYc&PZop_=tg6dIhm&nfY@$;*3^4h`D1b%+J0yl}@1`i`+uvLrYNU|xW{ z5e4Z8krN-lh%qtY9UWE-=rnz-4g>+d{`>-kGFK9d>S@!}NZPR51QHO{=F{YI(Xg+c zTPd}t>7VnmsMGuTu?XdY(1S!1tcwxt$1>jZ$|68p!|80gVxGQ;9tHm-JtM6|Sq=A- zZ@A!k+ecXw$|XrK0YEckRgdx3>J&Lo|HK{>uK{H zcd7|>;82qko#HmqB3_Yu-UEcLDLiG3#)}xCjAq+)gs!A2Q}psQ-FGmfP{kO!1E(RO zfh-Yyxt{W~`5Pha1UHoYe!|J0cFUsN1D@=;1&V~f3ND^kYQs%>Ee8LwHN21=>!c zW2#3|p{|EpaIxq7c?MTGOY)*Y^;5OAR?yM&Pu8I#XJv(i;7GPICj?m(tsSMJeV`V* zA!sF6fR=J-)QqM0c?YV~@~$wh z)7+0O7rwN^3~Is{9-qZ7V+y;JVbfS+a0d5yYb8e-2B&L1@!vpR{+`X3Z~xUQXs}^e z8iNnS!x@dT)0wbqpPNqgCc+R=<%BV!1-DdrRi}Docf%Sz5FqC>{ zfaK|!K<-&m`YL-3<%2uY&os4|h6Igbj&&6ZFtWIx7-2gF-$bsZ(_O0DE}BaZx8S?x zK@wSw2*9-ybv+#fm{crG!5it2a6Ei_!0pwTWPoZBW^3rB&%4#PTLq5_Dxb(cEEck9 ziIoNa`P3`E38*zFeXtRK0i5&-3Mx8WAxK$oh1+ZC&_*w-E@Of}SmopfseeL}xn-3+ zK*Vjy#sSR^EI!e~N{=WR#wk&d+57@C37H2{WU|IoSpGXx!n789@2_#o%u|iJcT_G` zxEWscdYbSE96OQ(;VIku``KGb=bzys3j8>9b9<}oo|rezP-hZq2o4e^khfZh?p$K? zxTj^w`*I8W>-Xyf)9@Pk?AzIVB40r+#iYmikgA9tct0*sITFFcNfbdP05j{2V!2i# zF*A!f;NWICrd}9>!10Oiu^4D)7gotGKRt>M_tr3*P0N}KK8e1z#yI;{zSDqjGkkV3 z&|inW+Ai;7gj7Y9eiPYq6HbdGrGOJqY)fF{*|*rAWq8Nlr(k6#up0j4u`yDdCPVfY zu*}#O4LQ}M&5%+&xkV?XFX`79zg;m&%0Ii{);n+BEY#D9_<84KiMmv%lO((co0aa? z=43+9o#yo0s|m@!6fXePxqIv4R$FWZcS%ssen=a!%FHLym8L)NHz&-Cs(MQFuACyk zSnwSKiowW$z*vm|shC&Q#5~-g6$DHO8u1+40jdO7Y{a8G@EEx;3|!zVpMTF=dqe+ zOxSG~c4%j~jfu|0`@IdyQ3hvf0c%sl!By%8A#@m~?Y{-y2gRlbjqKZW z?okQcy)%4&^wCFr&8~+$W$Cr5hphbP&-hV{ju3;jRpE)5EZ2vCe9VU{R%UI6Uc^h5 z=oWoX+ARL#=ec}6XZs-}(S0E|SmD9-+}FI!2U!!;Yg?+zS07smesUN4-Eju7C6hCw z8Aw}Azpt7Kmv0uA{o#GxC+L=|%vgKXu*mL7GCRkyP00aLe8P8S9X2a0q%x1xl93DQXV52Fya9W->`UKkR_%hI8E zqHBfveyj$B3}q>o#WPU~8O6v03O%BW-7eAB&1R;Z zzW7!Zv&Srm_{m7wpKQvp!A9j5qJv6RlEubSw2_8wdqb~xHP)J{`Uxk=S%VG5`2tH4 zPj%bnd;5S}?Xu83I-f8vN|CBc0?E9#)lXvs$ua5C`)ZLQe2m}P`$<(JlQX$b9u-Rc zt=}<5-Fb-B6!PF)(#F#%eGRRvTKm_IhtNQGb*{~qP_)YOht=;L=9@|~@`_N~%MsIp zKRxlkHbd0&eK|k!qrdfvW3`HB^<>l2SGp1Bff3tFVN!>p?damok7USw#jaQ3b&qAe z{&oi5ATl$oxR(c`W4{b*u#hAW2wkuPhPe2J_$|o_=MZ@zU6^bD0y9)!Xi{16eJ}#o zY-P!t-}Z>s1v3Ysb4;$O6&HWRIZ^gZuK6K!i_Pj>SJjRJ`r*mD^i_ZxM%`KD#nYu+ zaw^KzHYz4EydRiu;4M~6ia@^odCa$4{5 zG0)H)RTj!rJzF0dbWK-ldwt|9It)ehUfqGU74sg|M{u(yEJp^f_Mi+)qKo=bHrN`> zVJ(j*h?TCJd(l8i>ZXOR+pGP@oy@oQswnyIiI@Ib*zRz7RZWPVe9!%EiG`9uSVO+~ zaw5s;B9#-TpdU%^7PvYOD389L-&ImLQI&Y=ny{@S{U()v1>~^Px zRSzWGQr;olp&gU?r;2>YdKyFVa@mhHjD*b^XB-1ep2!H{Loqk~P!RFLRZs%GB%-JO z6jTLJz(yaysz8Ff{Hh#D0e793t2DOWQr02`hy&DL)}n@R(3V4PdM6R06)b;s?4Wcz zM@`n9ge9hc+^ND`A7^gbElY-EIciADq7^E6zpsZ^m7^sE*aJm@OTppQ>DCKq zh2ljo4KPCJ+*v2woGB_S-`(+wPW!6)Ie^ZRB|?qL^`P6 z_RrH&4P2~nJJ;MmIdPZXi|!1OVvmAFHjsq71kJk2UzI^UYodF5aIn3O_-BV1*@AJS zC*Iq!^5Or85lp(zhcS*9nBk~|iNl5;B&8dNeRzjKIuY%dk%z%4V0SOFh1WUS%$2$i zrIKmTCe^$f?dmMAylelfibh);7?IrgBX+-99!WAh;!PEym7V@fyn?(TbDGyowBPd# zYt}Q@jNY0{=VyxFeK_3!AbF7d0+sABoWi55a(fsyer1gmTDJ`{LwGVi{3D^W#1r*T z>ZFV0LXyC=S(f8uHfw&oubYnUIx^dth{p2O2=HZ6hhasJZA0qGCV7}|~ zPig$_`UFfy&+47D{B`N4q`_-w^)Xucm_3gkU%G}>hM3KSYLL2krCM}ai;2kJQ&B-v zq8NU^ZNihvX^)h`oGo|6S)oD%1OOa#DAV49_vgE-q9z1(WVk~*8Z>C*d900iEwu&T z5Qh1a&O=#ZfhZ)E$+2b>DajTY&MLP(Eh|dTV|UdN2|PeB+zxsDun7L8x#Kd;YL*ry z08kOlMnzOHBy7k|rFhvKRa#+YQj7BbPPO(e%rdxYW{iK$IEQ8}4yqdL!bcdIgwmIrypPY@Uax!vhbudOY7$t)X z>S<3$U(NNO9QzIle$|u9yhB_sybWqVVjL40$z5|D>so-z(Ojs6pu1^|YOG5Om%_g{ zyXF_CuQZZ3+qTKab0a6shpa-90UG@Cg+p*0E?5a5{1UQ-qTG=_*X`Gm*@>l_-v-!Gm7jf!{WVwlX2~f{=vIX7QCLrdEY0&gRFlmO`k{{-C^F zeZ?e>&dm6cd9XKuw;9|Utk8Ly8o;pouSuwXo4l_3a<Q~qjvr25ZQPjaXL@pS-3pz9uhpJMDulnR#iFz zQvq*9X-#ZHAFr-5ce4)jg0k@9X&{xZ1!Y)uZs}Of!c3zvS=v5_kBo%KBRV#bN-VKm zJ>2?7fO+_m^ie}J+mRZzk@u(5jqg)xwn=c$5B!HZ9(%EvnEx26)=7`&k8zw##U_|?bbFbvxL%FQe4 zRZy^`<-l>d*Q?+uZBp>%s5F5)5_aj%;P9zS!tS^5SI15IWcq02a-cvl4a$_aPex7z z(iLA`qZ6~Ri6*rn$?y+LBh6=FP)l>sp|&AIu?9A)I`F}6t32Mdt~`G0xhVSyjf`>% zv)GpE^2xb|oBbf!Mq(-ZSJJwv_VR?Js9nYn$v+g?4ab!4h?S1DE(y9nTLjmpfE(;* zPyX;Lhr6@bUNWxmOpR2zh_nU@z8b9JYmLnk9#15Qb;zUe>I&6K1qOtkYY`c{FC#?)kA(YQOr$s*x(gMBwyBIzj0~QNk#TOo3U&?sAII&|4?Y*uaPAH3)mf7(e zeIVwX4QYk`cW)zxO+O^_YNB7sz~|0firq5`Fcq{h`CP-0d1gjgH?-97qbMm9@qXw? zc1_T|BjMoTb;CnjS+^h#%|mIV(obfQ88Yd5XXo!}e1tc7(MUs3P&8>3Uo5T>#fPxB z8Q-g_^y!0zpyWnN{J-%yQ%tGOwqgGZl*JBDc>=g(nYw) znW@-u5uKrnwHI!so%`Az81T)+XW90zSQQjSW1A>Iu!x?rGl225H8|fMz4k=P_HmI8 z%t9@l>w}HQ5?SMDihR94m8y+yiqoan*@PlRb_3- zMNPXK!j$tNe`- z>|R3nl;}9cBH&}3U$u{82wC%clf6EAAZO)2xNV5!e_87I?&bFNF4^Vg9g)^?J3cOR z@c{zinj*aq0nuUE=q!224 z1ajvoFmn?iRpgA0CdBi)Xav{TMBsV=jZMy*yc!J$g$N}W6`P6UJ#8aWw<-3-rfwC8 z+?=BX+7;|+PhhK@xDykfmuW@t-M%B(9Enh4if9L`mh(;qxg?qavgmxc-HL1>P+aVt6qB?Xv}Xz4N8$ABcMA0FfjZZ1B&N8!hR@B{Jf0X@SUF|P*lpY%i#P~p&|nZ^E%uX zRfe>Xv;zH90O&XnA3dN+ef5&)P`67b0e2;L+Q!4S-Sn6WuF83hy>&xCA1EwA{PXhT z3T92h#f-&s`p$dq`zgWEm-~fPJKu+W3ZMVyZK2uA^J`Od?maP_S-z*?`r`XE^YVA= zcB7eD@Y`{Rh4WC=W8HR}PVCozFCCnj2EHd#+jY#8{~n*Wa-ECqJIyn<-CvGGG~Cy1 z;=cTQZiwNqFFB23JSj9fx28M49XELx-PU`)3mE+}_q?=C@$&3E>UQF1a?w2Q{rzs+ z^`)};a!%tI4z&kvMxU;BtkqjbjjrxFSU!fW-U~{_y?9-b{_MwW7>Zx!pQK@li&=ec z*!Wyd+Wt87V^mBy{YkJPcj@+Mbok}s@5VQaz2}!z6HmxWjD2qX=(kkim#esw&mGTU zFR4{6B?VKV)g5NM+Fl{jAN*+u(yJH7>6N1)I&J91B0_-1$)VPh>vCFz+M zW0hZea?|i=`xX^y7wR44{M^W@y5-RIUVi%48)NJrgYf!@g3s4~{88LU-IpnZ6DcJd zR4zXJK54rf9&I;$-@XpJ`&}qb_KruBJh?0TvX61~{*1=o7OqNZcDiM$*qr?JRc%x2 zGvlZi`uuuWg`gvfQ|a7q2~sCZq=}1Er2Fl?BYK_nL=_8tcvOY>YD+793m=uK2xBZO z$?^8w@2bGI`7`b0<@I3$lD^(d8}n?J{(*ow4A4kHS^f@KF&kxLfp+w-fQjgVp45Wf zV{D^fsgZ-{h4A=e)50D7l90*sqfO@Oov@8S>(M1`7y%}*?SO(;-RWPaXiM{&-%I;; z*>vZGUF+z#cF>V?>XY$>4N!8*`$+0}Zo9F=t=$vZAM<7ZVYGDY(fpzzM(7il$cc&2 z>g~AS45jZ-$nhwZ=lnCqX?hPgtnEce{hwO>f|#(a@A)I{==#fk&-P&HOT*ncspQh* zppW0=<2h^RWSgo&7qjHGwt=tB%P@Ura;Km3#khuOr{~$$%bw@H3j4p8f7>;ko+TpZ z{3ai%pZ{%+R^?u^KbyHsOZ~f`5io%q1aE(tN?o6SIiY=N>Z=!^KpyJ!=z1wdw0oE= z`&Dg^(`5Msn;A`5?c5mao%Td=lj==px$n)1UfdaS1RBzmr5tfW{}~J8i<%lDr(RQ4 zs~H-T7CB;i-on7`A2YnB$l_c|7xcUaYX-YCVi^B9&ts*iJtlL6*<$(dF!1$&=i|Hj z2x0@#n19CLtl-b=mbIFQTpsu)VO!5iz=ABbg?cm2o%M@U+jK0uXaA)RtFwOp7sA3! zK}5@p>>ZEM>|f&YmQ2~-o?odrnki4p|0LR+BYYJD?M{n&iWZanTodUQAZv`Bim;Pd zHkJMU;_xRtff+*%X=M*P*_;d~`Yi-`?`hrmNlSr(Oy9JRR&?mcPQbA^P-UZ)=(K_? zWPpVp`c8AM(WQqY&)ClHtZhUDh$8Us*J1W(s_zLC&{pPUNjcZLota)Y=8 zfs8OGL_Lwxm~{I8dG$T4?&@2k-Jd^yGMzhblegneo}vg_nS6FStvc`h>W38{$9q2Z zO1=zNjyn6@)LFZetJQ`4$kE5>a)Wys3ytCRl``GN|vgEVo^PErV zbKhzEe4ejpRTeGecQvQ!0nZbQle!hWSoYLmvdBMZuOj*y4h^sR+Qj+(yz(l!f*MJnbFd>uT1lQRT#!jx7wA{Gbv%c z#Z8(vUn=ASoaMw1!15)J^rJw@yS;H?O^FAh);VUrw4bksFRtjF>W+c`-t7~hSpNwV zO+qy2dpNStNkH%54OrN+)Z^k&1n9N?DYtq4{AWcfuPKiS6lR5swuz3s9oT$~8RcwBoxV|?lFYLg&2o{R zH;rua3?@q*ehhYaKTt(RjgzOr(Y+T{Ap?)$z(bA(++Ls&0)B(E$rRE}vR}pJ>PQY) z3ef4irgKRf{~UW+@e;?*W#@a zfA&s!*m2IFd8uF1a9I1kXDiU|?=rpC^CHco9x9*gz2Du$O~%XR=r_Esxu@&9QDntZ zf(^ksA}P<07rQs5fN{@jX1}?|wXpFKvghK}*2$PhRCxsBOd{?{vL#c`GS@+4?9?V{ z?a6@C-uEK1O1PBzVq9V>U~6#9aqb@_L8#KgsX za>J}8#?}l~31^Jvo2GuFdCC?$N#!!R<%8arp-ouFA}RX&=a0xf?5bl#_ciHOQJ1;X zbI3SGTr`X6s+l)s0F8UF;t4Abmre=YsPCnAL`r3wj1c}mh6o) z5dP?Q(@L07G-r7Ic@(qJvub7h@yL0U*T(n|r(nVaP3!M)Ha-$5{g-=Z3gKa)r6SLb za+@RT(Jl+Wi~8tb1FzRu(SXMvCU>PdUw2Cudh$c~KpmcQPoq2I;vM`)7g3mpe<^EM z6>S7SDY4N4tM~Wzg8jM9%Kfk(m&aC_f>L*_q!~Z|T~u!G*^q8eu;K-$ni%^<_ojUK zaCqq~b>qwjk~Dhly*c#K*?xgk{p0)WOHQ-Sn_ZBMKT;rZGSlzCz&w;laD&Y^abi3F zJgfpg?*bN_@Hdxc>t^zAF9Y7&27)8Y1dyS{kelkCNn}lrG%%I ztZ#1mh!*pk>@52ur!mJS!;y9aqxd}BSQ&-^Y1P-PY&0l4Zc@%^Sl#@D|9+ZKqKD9G z5n;mz$_#Cu4i4$mm8bd_1zl^xkf*S-S(5Pt6aMrDRST;Loj0HaoD6)lQcd9z*O^lL zC|c*qtwa!=(z^|oh)gVyeq<+EpILS0~OKCv^m@@|-OuZ7|{C01ll z-~|Gwy?|MgGVdT&NcwDDhV-uH&XUxO9Un~d-lL@STi+CuPwQS|`Fu6JDy5iq(kb=$ zlzwB!|4==sa4^PCHIft8fsX9g6%?d?BH<5r1Z@8qHm;Z7-#b=$xm%=?2y)z6;l-T+ zWT94Mn*tEvK?izDNIw4jjv5$pz5Aoj|0yjepXyifQDrY7TUK zNCWgGPr6)&!mE|SYAeasGJjkjQq5n|V%XcL!?#f2x@_{ij>6CX-z zEwCVq+29k=DE@+D6xM$4zEnfB_d&+E;5&NM#u_mhK)88e;E+uT|1*Ycg3+)Hjh zF8@iG9C(OXM;uC>FC-XBtX!yJ?tZ%yzvRT?>b$Pn`y^t^flPJQ?LJ`E zD#F=1Lj1yufGEQ+tGuV`K)Lt1I&He|w6$wIdV+WOXuOh^lQxr(vV!aYu7rQt{E{v2hgfF_djh*`&zI!~$y;g-S=~VYbBBqy z)U+Eaz`=>P7Bsq6ic+^S?rdM0(3L#2io$`5Sh{&Zqq8dB=0wI zAGY-|)v{&X)LMf$g%>6=_cZRuG~0Sx%^z>~=3?KPT*Ke54VOMcvOSNhobA@T_c z1Ldg9Fos4(%HyVxHGF$|1^}Zb$dK=40JL!OKr$Rz|9ecw_>&VlRbr7rX+u%|Pe%j6 z=&_uAgc1wiBTq^%0gA-xEBG**BBEYlcujd)@mQg?t zFRnC%1+SLhij8x~l)fQp8_hNpnjclSFW5|%%<1IS$GRirh4xc2kEXmg{5kTq0@RaL ziv??*L^L+2NXR(^SEtJ7$*YcR{-qN)!Z$|sLtAC)qU244{m})& zTK9^IIlsHm^v2x021b4ODt}ar#9~TlWJLeERd{->{06BK^l-aT^${ek1#nDc4F2wRS0&Co94WuIbUe?K|K|W0#iX{ zOqHxzg6tEyj~(upHPZ$Za;i$yOeb=e*0y@Kzz|Akt%iE`?&t%JH(b0kgrJ?xzBJHf zPSf>_&31}Hr4ev1^QF{JG>ukT#?}{yHgp2`dR=zA23Rx?Qv7TmewZp0;;4qD%J&lL ze=Snvg=F?Uu9dYmDS66WWPA*Lazfc4e6RpEN7`jAv_L)1ky_Lpl02 zZz97~h@1GQS>}JKc_59bx4d40XX#naM!K)y|FnwM`F2Nw$5V$ekcr~lpe-S7P`EPF zYC^U9_Z87tizi|phk@RoE6 zqP|`|VX_V>PaB6W?-uwT7RMC2^`FQnzy5A7CU5yCVl>QAH&<0C3%5s<;Rj2zXw;#^ zXzkzU@Oz118(O`LzcRe{MiB9xzM`^H?rHFfLW&BLB3C-)%N0|Sp0h4LgcM>&8f$AU9Qm$`0ZN`8IQz`gX-aMT9Zf7p%Im&c%x&goavukWkTK=y)23{)%&JiDM|_nLg0| zo|NYeZ`>!z)3I~WWHTKkFvPhJhg7-5mf{u07?Aer*7p9JLb%hE9=Y;qn(V{FSmQH? z(LNy0(VRhua74CvdV`$fdD-M&!Zc*fb{1yexF1v%7a)eYBg*)}&C^4ck0)@0)tjL4 zJc8$aZYYSHkd4TRN7*JckHN%DD0K2E?XjYv*0`Kr`?N>hIiEK zjZ>@?g&P0@W?rJWfnm`ic7?0xWAZchU}KkvQCd9(_-@P`2|r;fSF%;4l){CmMXH*d zo-DF8Xks#^ZMs@->!)nX3}+wf-1J{6CyrO9M0fSFisV=BgF;i^-sRg+SLke6slvCM z=8GJJUCX^2iEE{Ly{d^smMG6AWIyaWzsMYZqP4A0Ue(u+ou)aqfF~(*KFI3-J!$gg zSIV)X7?*B}zT{sDl>Do@ytNYd$l8ZT3;?|hKx_YR3ef1!DTxJdRmK^O#jB#ET8|vT zp*t8hnLUc<%F|L#XdPRFk;BYN1W2>L5qF6rwVjK|kgj6#K<^0M5 zw3C*|H8jS|rco*Ld_`sVc;E5KEh9;j0%ntoD%AfR_(-8dYU|yz=U8@SkiYbxsO3*z zM$}0|6|Mf5jg%8TXoL$1abw9Uv41MG`M*`p&tuR36U^PA+Z~b==Oa59 zXgQT79FV1h5S9Sq!{0->}X3*DvyBM50ylt5}?&xHJk`B09Z zfR#^~!x+l4=fRF7Eos_$YRn9p*}D-!_Z$yiEj^yFCivv2$bNSU*A=kni0>19EJ{Og zJI-|6ko`j5v$<}FZ;THG>QG|PB&^V1jf=vypm7?2CMr_&-6~h3lK!6VKI`?$+{|7> zaCRGAYto5%D5e+#X)NK@pH(bT_WuPs#a(lNiL-}d3yLqQsTt&o5~9;6P*oUbJF zPMR>mc1>KmgGqxR4hw4oiRo1w8~M-3uA=mMux2<@_<)s*G|z5K{I`Lml?NChH;U8y z1gIBVsb%=AjlD*2xb2APj6GF;7=c7^bR}3P`|6A%7%SEVUMzDB3(}Z^@JBLJ!WDhO zGs=>!Z4|NBT`beRZ31n$K8pDSL?4WZVzJBRUVgmPq~a|M1Po!awtih?%C5l+P@gH~ zrw;X{xuhlaq0qnqVAq-@o?ptZBpq>mDWDAN2)_`MH-RavXahS1eF`nN6cIl;71GBz8%N3Yc84yb~Qb5C@MH%FP6DZ#=@A+bXFP=EIbwuHSELJ z+JOkrw!=t_Li%~|!|bJSNxB`l6w)G>$E5cveD5BtIq>gHd@3ZZ zhaCF}^aHr}n=4V2-t`?>3nMp`yyOONF}1G0gYl4ADTaZam>jv>heZYQMbv=r?@~?@ z+AC!vcy)LaydbKug;2P*$QS{fW)D`IjkBM}cg0EpmH3c4>c-^q=gzScRO$;CD+f~S zF&mSGSZFPlt&$tjd5HIhjy5EB|| zd>*?JT7UQb+rZvF#&a`#2^Xj&G>cZ24)GC0?{2|jH?-;jKH%0#ahE{_+cloM1Eflo zuA*C<{ydmz1gSwGpvQ%9VP2XCE7jQ!#K2jy6eKRY+!C*yrY&@DTiRFaJwFm1Hl3}I z4f&}2RJbbd%E#V~i{E;e-NwuSFE5?sg>Rt`g*Zza%nyTn;`qC`EtvZ?(6-7_YZpAV zC4H2NC;cF#yq|;k3lgWrgG_lBBJ48TzI?DPP6p_GRKo-O*<@sA%fTyA&rHFHtI7 z6uu{6g4nL|mWn=!aeN0C#kRBUpfOuh<@aTxL#mD2|nXjE| zQJwLVP6XT0`~hDLjJ0cS{iRNo6PfP9rLHY)5D!}J25=xIyBg0B9QHJ=;JfIV05v|i zEDoBMtZVw+CvtPWdZj5WZFMN(pE|Dto~=&ovhbj_Quy__m4U>P=L~8Yc)cA^mPPY< zd*Hqk^ZVmE-U3>{xgYZw^mKO1*3*S>JcK4}qEFZ@qv#dCE&C)UE8}7KIS#o>)xoS; z!eyr+i9tMfo)^`ll$(Slodr{jZ%xw}eQ!{iyJRDfFhW0y%(kC1MC~Q)XO?aG*3)lc zzB7uxqr!mzh=uI3#(+J~_o`wYOxLrIpW2WY2ds7Abtbd~@X1n_t*}CALbpHmpC6LP z?PE9*n>@lLjn%M35`HG#HTq|!q^eHR`(Kyw4^5KFHRc61d75u5@BQvC_n3CvV3N+; zYA-vO!mHlSswW8dN7jE{=~=a&pN;?aFWQDiegLW$Km90vr)Tu{Mf>G$+|Mi2+zbIC zG=OW#|9*7bO4i$+APQn;{d%|W<9)Ov18}@8sH~eik@z%&o+D16SU986t=nGI zsuc^PJ{tVV`I3c$^-$qYMK6K@(VmBJpq!{J{|Z1OX+%WaK+ZMG(|g-&awB$hf+5As z0?E?7aD`y2%jj_if|@Z8t2PV8qSSa zk3tQ9ttQCFln)v&>)W#JzgU=CW zP}iD`Vok*gX;jwX+4m1P4V6^(fJ|6s_!YPi$T!5$9gw{%ruWozph(eWs zE>1y~Uaw?UKhU291*8;@o&7i}>wsvWL{p-dD*1~$nTGbxkHa1un`y30MCY{Iz~&+X zY@INm13~0z>p0jY0P?>CbWDgRUi7SaSSnohFHsAz*xl1!hOb#fXLOsbeSB> z96Z7eBv{4Zk7W@;O=(j#VX^PX@c|X7*ZQ?cy}QE3b_`*RuIzDfmp^LHHwt$R?C_qB z3-N~G;!{tp1K0Wa1-EAk)l3eU%D@}T=-8;Az*VaO_xrJC~Nk~1X5#AWz-8c z{wmQlLHagC!%NA3?&%fJooowH%2%&mG3EqZ@?%Qrex$hwBPI?XfZ1}UhKX+%wm zJ64Hp;t_lLsHv&!@Q&#RAbSEM93L$el+zWRy59@ydAt?RJ0H{bO7g$-_-rv_+j1>} zLpa9CW0x5T)kWHH6Zy@$c{0|7ibU7IgVK=cq9k&ePAOu*rSKdtyht)fr91>QvN~Nu z`7V4fwlS#*!Kt;9GuxA;*iPwnZAP*xlmMS=$#VCNV1HpbiqntVoul1j{96Lw$7g)+ zt{h&#r);&Q)VD&p|7FiSg3;!~KezLTA1@A~1%VFV#GSQM$=GsmW*S9w-*S9d>-&1P z??Wl%3MWS^3!gNd;Dh55U^Gi(Gm-sn!sY$Q*CkTM!qWl(m=P#cb=>i#60bxO0)Jx! zzuMErSd*TQ&C)8Ol}Frsd1oQA5h-ojk(xUxeuB%hnV`R4SW=y_!;n^y9EcjE9nL2U zdceeSjEZz6zH@Ms`6;-Kbx(WHB(D>(Z}OcPBb1o6@lA^Zg*gdB3g(9PeC^<`VL%2w zUu?OrzLZ*SFv>ycil}01)@xgwt;HV7T-NcQ6}1W@98^qk7YMEh zhnk%wAt{Gf%RxYA3Pd*iIA$62!zCnJw=n!>xkAlE$JTlJPfM(_0XSuPjk^5Y{Ff-ZwX>B}v=W zadx?ET>Kh8AeZv8zuKd$v zPPiUs{{Ig3XgZ51mk6m4$GE)7i<)sD6?#Zeecg9Twjj6HQ_wR>k0b+ZfbC z^aqx$y3oE%wW#?C@*PN-iAsb>wzhlg!_`98M0wLNi-wnu+*kq>PX$TZ0U(HpirF3S6)!a=FMgh2VW&c zVoS@|8XTVSF9f5<|q_H#kEc+Xr zi?0XgK(Aylji=TdTT&!OlOwT-u?YHRfyu)@Z3|PL>`rDMmO_P9imtq9YuX%Q>lvN` z3FfltdLSh`+HoOjJV*cxefNPD%^=EZ)XKww=9sliMz4x!z}}Z~lwVd-fJ@-(DU7L# zS=YjmD>K~eU2RF*=nY4iGO_*Y#5J#oiyxsCUmPhF(GEUR)S(sa$a-OD_t;lDjpj>= z5&8r@9g{{hQCo0C6zIn~WbicI6M-r3;TBM_Ub9%RU#E)Z>X4 zA!eom$;0HcEb6gEq+NESCcj&O+trlI0>xGCWH<;y-$xv%{^iHubRxEvqvFT?7zX!n z^Y{16`NJ%oUBVGk5X1D(Oay6c^lDy`s%TxBIMX(Baa@TUaLJGEKzLpea1?L2{KlW> z-VrHjaiQ1=IzoNN4n+AS+f`=lG%qwyS&fexJ@aH3;b3eig<(Myg02x|C+_gjwg zY-7e$Z-Er#8E>*L2V7FavS1JEB_4@Qmn)1*5-NLqKV|xd{xX%kFfD$-C%?+AoY@-y4HG6= zOkwHO!5c2=gCP@Wu!Uu1zaAdYO&#WE8=H~JBHTe4q^nYbI77JO6;~i~Ws#$~yS-+n z77P@sj$oOTbBMsB&^kkKHU$4`(D`KC&;R#s)wZ{a7NB$(@+u-n+&Vs9# zXvhb)xau01r?%S_ipz05<*@oU_kfCD1qR>tveISxp!KR#q8CR=C||fpVtOTk81N7j z`oKv?f|P9s4Xgx!Bi%+7L=3k%JXKpn{_!h%H=}KkY(n*MczSTy615DAk@g3<>|uH` zYuWRe5UTppdC(d7Ite55NhQh(XRJmIR43>rV^?+A%rz7S;8s^=YGGdb0pZ^5soX~- z&=D#0&^5x`s{4*E=NMt9qC=Bl7+BKeG5~kKFDQ_54fl;ADOxXbAP24RcLL1?+S3-v zW_{q*?-BWlr&BCBqEDNqy3#hRAp58=4<>0Qu|yiQ65_oxQAvgpkKITi)6A=faPjPq zSr?)0lLU+^y{5kw`k0M!^kK(jKQTT3jG!=lzpv5UX*xiZ)Z}OPd#?(Yo}{06ry4Mb zs)uSu{0&)yDxzJoRcMN+eM#lpY4h8#6@`vfwDhy+ z4vK{*hD8o#lD`a#s=gL23tP<6FoCR<*=?s5trl$k|7#cISv}{k{h6Md_jE)$=XtIi zSMux}g}Ilx^qzvyqVKhUKV^BKGJ_a49gPz98@n)N<&9GuMYTOy-M)dauG&Hv_K2lC z`QE1(7iB%)it-wGq@0*F_xMVeXGvR0f9pavBAmZ)@@;Ls5|dqZMQlm-kF+n2 znF#)t#cyq3KzF_aWZp#D#0=NTEoXlD;|hd4n5C3^Otd1<;TZ4p+fbV?SzHo06a9~s z^YeZBNs?_twR>-ipHlz(57w?pL%!dNAIu^b*d&woHgbD?9xr8hn4hK*4-GT%AVx!S zUXuoQFCjUyc!N@riWX_f`^|89Rbkv##qmw&v<70-bZKDuY5f6tb&Ni=_&=QPDzI3O zA@hMW7_INvEeKZIXB2js4u1z!#Ucxja@gwoAmNe|`4;q!iCG$`0l(LN_W; z$rT%5L**nXcXFO^ULh-4=U`SUd$!KhNJH0O@0YZ59HdLtfU?FhVY5zr7mEV7v<5XN zVI5*Sc-|Nc;Qdg4HhRhF7Xyq&;iD4y~ zMFf_Ar38+vgcmu8rZsNsl63n`huRc2c|84=+~+LZNfeeTodbC91dQHO^bk!;caS>X zIYo4w4s_OurNj3r@vB-$kuq~HgCeFkGY?QA!c zGMXKBYwB=C-xT&Uj?%o18QS&)8;nrR-C`wwoxEX14+wy?TXy{x4`|KJ)2|;Z&va!f$VVlwq(9hvWA6h|y0JK={V9Y?{K%V4Sf6z>MT`AWL+qnxh z<0@aK^!vN-m^**qV~Rze^{I03r4%(#^f^!aTzxrvfe&W$DHgz>;lB+`lq(r+!@X)FEwlnR=%5eb-mM7jjV$%q$ z#rB-)DuSiHx3v1~Ci?PtQP9Gj^Qd*VAj(Mvea(H*@8xN_^ZBYOD`9H&<@x5a zb@gd%PwKWCIQ72_8{yS;tMAv3L7+ zn#@OtEbnlQXAnYL`4al%N)YDJ94w0zZ}W#hA03;eVv?iRC^2A*v#1t?z-dZc^Y*2} z(z_Sc=|b27i)46|(9L^z{Rfl;n^{bG4}>;%#I?a@RP*+33_+-{zJ)WEAY;CHdxAzb zQQAWo5+~FTf}Erky13)mauucnO?kjOkqKcvX<)_Qa8VZFpCPi0{t4#?07sFF^GP07 z#%F%;JGRl0Q_@`0kEi~6ptMxi&muDnZ1(T9nyls=avJhcn!v_smC$xS5GF3`aT2I~ zr@_~pEe}hykue1R%=>aLA=dIyTmE-KNV1->NIOFPH&%mmGQMh+YUFfS-YxlXm}q=I zm0A)6l$@7yIv->%jq`@n?}c!Co28J^MDDD{)=ZJ0L9mwWJAGo`)uZ+kPI336lN!RD z-yo>!*>CjpI$r(6mwG;nCz$=z>X{;4z)>1LIh zgO2Q6SmQ*I(P82DtmEmU3A5{z0Rs1t4u~laMr5{y+OL0AUHN4NB6vmXj{N2bpserx zd1_}CSKDci%l%$yX=>-=mWYkUf23t5e}ocZ^6amkiGQ4gUVKt`ECmC|KKPB5XkOAz?pyfr|>`CU8&N9jof3NbI32irbF2x4F__f zD8d1&OW+>w1;{p}AW2dDMQ>wdfaG<+#~NflW^}%z6h)Z41zH8FIWuP#ayULZ$PxYu z#Z}7&ptJ(es4P{rT%~LXNvkhy()n zLqO1Rmtg!yj)8gBp{|t3NJbnfM8bKxB=hU1a}HCS+c2p|vsiQlnL)GoKa+f6L!yQ# ztIweY&1VtB*Kn7)`nc^fZx&XJN$t@^nmO)s?2G`fe?5=xLntHSW5v%0`eW{A5tuv8 z_05X<%`}TxWdzCDE_Re=#H;PtwMuo)&{n&!CQk6La0uBPQJCzAU9Ka z?dH4jIlTV(?RVCic|_g&Sj2i3+F=kgN4UD~HlH@+;XvYET32Os^Z!8?$TmUl+Mb`xUOqL@hSgWt?KOa1S)0}AP)^ng=@3e62tIX zurnoyr?8DCG|MrWX*Xu5f@LXV136eNY6WQ5{aNyYT)GA*#maW+aE8 zqa=J!s!zCe%qgQ@h2Hg(0AkO2eax1m!_}Y#{L6Q5IkZg~{)A-Va=k|suPyF971{SHJCYwEuWp)=bydU&D)%GSph{CQR9LB>>%!H-ZL z?rhM|ryoDJEf(u-pMR%d!m3p;PAYJ!*~V{&8Smr!46L<5R?fNrSr(#k?-v&g`_&~T+8Xqa|M7XF%So=o&_hp$~-)Qi;y(hMw z4Q?7VsfA!X0%oQ|_)Q_v+0Y^9=U>WJ!n7Hocgkvi3x-ssgURZ24T z&i!{)e0z1vNHt^aeO-iIalV&@E{|vZrS!27nU}$HbcUdG)bN}bYxL<-&6~IUI#2(3 zAb{4i+Z|=&h*mV4thfl227%1c{sqFqKm9>_pRK`c+dyAcYpWsN#>6mwWvSXbx3;uY z4PSOj=rvcC(-5mIqEh?%=1{IJ4{IZCQdn#s^QsXWnu>W=eL*!9yL50K`)v>@)ojtf z`Y_7oR(5SSC2I=%VK6+)D9oj%*QmKZVh%@9#4e8-E^pDm`m8DUT0^ydSqD*xrvadUI;sz^$bU0e*|0&#A_yQTY9~bY!AKsK ztVEZu)PYCw6AUBGz7`qKN)MbFE|yhJsk?xZu`|e~gKb zY%=wqUyk)suj4$~ILscC^ya2*M`G{k#Lv&`rgim5jIyb#K1lFE>Tw;ohoGF_I3b(j zkTnB+c4hYvC2cz@XKu#FO`4Z#dhKf*;=Bt~np^1%!Otn1nxWM-=;HC5ucAmEpCMTz zYaA+TUF&@s83L@OBQzlXI+Qa9s^ec_ZJsK$edADZEYW|2O5(@Pji_cDjFcJ}@E#?q zDK5=DT$`Zc$Tyh4L+td~k&rRRhg>?CkWDvQPIibAPB!#(UdDGGC5Aqba!b}^w?{-8 z(T4nTMvYnX8r_iT(1C_nLIpnAZ7s1g9MD5c$3F}nWgbWmk{n3C#O$W4@cwF4eW68( zKD3KgKCP#Ls%>aP836urulMs;FUTFna?Drz9;)rOTgqaE1)=4fUQ@8gr)aSY^&34N; zSTzm8Kg6oe4QNNP)AhbxM_g!d|Cerjo{;L1vMJB)HMyC>nuE3Z%Zg(yj|a78*=cwy zDH0_M`Za_T6R^G9gb*`KAvSX97Mjs#yJ$d(OPF8(8PX?z!*#BKY2X$CZ7dg&bQvWr zMKF%-ZBl4PyrQ_V9U85or_{ejalZGMR~{uuLt0cLbXujZd+a%X6!o)lF5Q)$vW}|r zrXMSsveT3reMG}7g768rW5bpTA%3%blXq4M&`FpcMy)y?laWUBMe>+}__OAznWd)G zymc~FHS=armUm`V?bj+4_H8wDvFYfh zY-#5RyUC{y7BPZ{4kIyD-dpVdXdDoZ#bW1ii8;CF2u*J~BLpRAOwqnM5ou@re3;w) z-Xnk$lgeeH$u6D34&DdNdwNya<#<10BRSo21q)(Cnp((Oj5T>aPaU(z9B;YZ7Vb>9 z2tc14T;1@n@qb*SdeZ;qChfNh^M<7>MG3!VT^Nj+jb%6c>v{!>0$0a#9Y~5n4{>OMMIwv*x{~6&~;V zTVOyw!A{TE5M=lKDGQ%uP7@mF(&F>h0!8hSrJ7}~d&bC0)Wa3RmY%+!q!2Ti+X$-a zKPD#ezR!JlSYfGrJr#$)FZR!SHc`{llaFjGlww71dJ)U zIENXT^s8^wgc_M>^iN^Ab+!g?%7^*FM*|iAXsqOf*6*ux!d7A#p}WjZQA{8H?u{Jd zP$N|NOFw9-aT(0by-0WM8+t@DQb1InPbGHjK`gI|ZG3E#Pn>Bu_6y1?URo6zYrL8g zUzlMDI)kV$JBl6`#}@2r4MC+fzZ%dtjBv)DrRWFHxQtXYX7I6FcHcyL8Cx3B`b^C` z$$3lFi_#~@dT2Bf4ia-nGX`mMSKjWZVHU4w9pE+u1&kLP46^fnc=Mi-Z>M)*dr3@c zIWFJQPOFA~G~)<*PS4m?)o7Z}A9ImZtAzOTS^g)~5fJsH7`1O-y%LR{dY+nj^iC+= z0eM0&$N@^x%aVc|r<{eQ6BWNU24#y)%=J0Jx@qGOHext zW1qesN_!Rt^;2+aWhuM0f;1E`eSs-;G?Rz&8+J|g6f%7I7}f_bGZ@!H8;yVBXP0wr zakHmwx#=g6m+QOm)4wOmQAMbo$rgx?Mi@gn5WRF!6N--2Zuiz{GhmGor2p;lR;GE1Z+oU4LPQ6=k~s7T z*~GIa9gMi!zJw@{$>g#RSLaHlC)Z!sH8DLL$h=XZd>9KhOK(h_5Vj^_Y0~ht7?cV{ zl$e;);v_vd3z8`N2aXt+=`uw-8f@NE*X-~${0wU+DVyj@4s^`n@#YztV=K2s)8O(~ zHh5!R{QA;~aV`%1kQWQi{9 z(l$F@*Qd^9703z_mCiXSZa$%GNKbu(;`tMGdw9J?7wD2@YA~t^wK&Xp;X|}fIjOl@ z$~NqexsGy^C0innu@&AI+Vzg@4I<=@bFge>Hdm&?)Q8WkIBV;MjPCAo zgU0tWevQ4{vv`%jQdQEdizL?HbTF%@0Z_@L@-SYH&Yz-&b*0}X4z za*3hD>Dv*Vc=pMSgidv1z@C`pKTQujWdBmqMY zG0e$`7(GFA_eIksF@9Jj6sN~IH4jJf(``ju7y>@|>h zLbWu|9&hALpFqw?%RYB~Pzf%hj8;_cMMLhISnN^Ly;G=kJ+6A1+NxL2W$BF&R|9J5 z67lmI;kj;A4K6>D_#Sr>OU^NPOajC>8CYCqW7g4=5&t_@)@YCpAb%yq?7S4Yrjj&0BT zL0+z>3U5(G8%q#Nya$np%?zf3)Z$s~{oy&N3jNs!mO!7`1e8X>|7^LE-7DhywWRX& zFKAS)pTsNpV1cQ%IA~HwIA~mL>&vfLUO2F$)1}bK&$?6#J@>(RRGRtQ67va0=9ZMg zTY7QBguEgEG#An~9K38RH^tO*sVbu~gGbVW(Plwou*-6IMIMUli*#_g0poko1sWc`dQ5s{gR3iz;cIoG zkEnM63$UfJ$jz*QZ8la3#}D(T%nXUs;m-!jSvJ4fm+&3y6q&HUxLMR;TmFIJ7kwHa z$-9$bo@eD|WcOXoI-5Dy6TJVKYu=1C7RdFz>Zhd4FVgXlAUR%jqL?JV;?T1B%VBQ& z`+*s#Ctb*%5;?>XG^9rIop_R;f{{k%SpQgU;DSU&A9>%U0)zPaA_mK!&6M2fbud4b zG?iualwyoyQ-wl^x?EX4SCwh1nlB5pSzZE7{_zZ90i{6MGc7wj{)~RmmaBXZ+i+ta z6;SCSbR48)!*}LeZ4aJdc^<(}8A6gtSVN=vZpMM;92TKE@yeOhmqlPGer)nZ!d^L- z7dLrkJOLq{h28gql`+@Ey|$V8vkU|K4_@WJBda**OpJV4wE8QGq=VbB-6w-I=iM@u z`rLL(KH)gRY@PlacnXP=wLx*%%=e<-kt)&$Wg<+XKfJ=F#5nmC*IJ>uaZQT{`Z!99 zYZDD6n2`CSdUH?(<9NP%1JyqaZPed!_R%3h>#@$|!>06$-U8|a)hq7iwCAq*V10Rc zDN%W8WR?E*pQiYp7V6SoyVI#kCwVUSMJjc}&be~#g84{#zt)@%?`LC{-JWw?9D7v} zs7l#r@B*U!*4~;!QryV%a^V;cc7^6P06=t8+Mg@HkMD@SrXEEt5`4mc_#zpSVFh!;-hv z!A+~efd|j}-d0x!`>jqcPk*29D^sZd^#hOa5mQHZFWIkGJ$ki-2V#%o88|`ETy8L4 zX0R!zS7n*mKbI^nR!(XfS$Ce?ZZ-H2V?ioR?$-atHAn@H(JDuI?BNd}@fl6n`OAD^?JGsclx1GO6YMvAA1Zi@ zrhDY5K7O(z`Oli~jOLr8$xk(A8_>rD*iD5#mg42{d^2k-5AAX?tM#*Vy7+j4+Cg0| zq*`%{X?M_KZNPZ4i@eRdDQttdg4E-eejkG8e)8Nq9pYvA+Vsf;xXy`TcXR!F!akIh z@7m&qq8{9W!QXFU_UBj#WpcUIN!SuN)v!3f`Y5SmQeGe*JmD8k5D6fB-bWe_E->!I zKm$_uoZY#$Oj9hI!>~iKlv%>qQ^1h%3$?;$ZcHuyt@-lmlHktLO@GDY3Ud79r zQE5sF#ylrx?OvxL+Ax$hZO@oT74z0Pek1H^hHakwQxTrKXpT9)FD4Yu0}UGSEPqDZ z=KZe|Jzb)tD*qB@lPYCdA8GKPm+|h_>@I0MF2^?fk9H43MIt$9kH?q*=`ky+mBRd9C;1qd4i+ro_tOP{iK)f z{Fz73fn`lSp^}Hsv4m0tQtAyIEenApX<0w#tBrt6&;>fVhrn?pIewR`KmN)s!YWG{ zyYZB`$7Or}c;4c4+p8*0D!@u;zwyR;AWX|$mJRa`fl>X@I^ZPYvHyGmrvxEsy?F zaA#^v{fDEmIinfCSX$3~5H%j_4Tyx5SZBds+s!rDMKTWry{j3YKjB~m0BE{D^!U{v(B*nV4({+3o2`Sm77zkqT3=hu*_NtWWUH|*3w z%a8`s8|@pUGk7dtgpl@Whd!q9vqN8Cf32DgwhLY1|Nn|0P zLV^0M*}E7tgxrj&)M7f0uicpZ-eTYP^;xmB)n~8GD1%ltNIA#UH@DlLbDArAG0tn2 zLH7@NX#2B-S(k<=&rW2fkhp7)gy>FI(!BoL9>xHyT?tk|$(ECT)z)=Zw*ynYOh;{l zKHIf_()SH#c((-fPAdO=#IQJh=a?P5FSzg}UoC`A_SuCE178$uT~((Gz30THj+e-j z?i|6Kr}Eh5ynX3;^%9sCkqB}!XHXpC@+%i(_(9a*xF{%EBesO=f>#y+ zA>NKatskQDgjNPymxAjMs<{ivm*wEbgsB8mvU4-KR275b%o~X_12Bkmr4?Jm$UuCr zhZ=(B(pAjiT)_}%<*@V&B5A;?Qp;${@(dmq;V+m}8q;f@KeDFIR$nss_9VJhzkwt6 ztwaXz*I8WF0;VB~lgCds9oXC8;6p0KGskz21F~^WbM`7CDh8hui5+KSogwH`GxuTl z4=jXFH828MF_#k>O09xvdDOFUoSbFPD(xPF7hYDhT;6IIyH zc%;A}JTgH$MIc3zb*8sPhWn}k&7 zQ^b?D#DP;O5BLJB<-m%Q5#r9!70r)ou>Je1rgo*VDQkM#^p`I5t-lu2k5(i_d46BX zEr%6to%=@N|)QGokwBgm=u5PJbx$?sUG3yP2p$S>^Ix6>ARt_JpIO( z5;)uI@XP6<%9Am9(F0&kxl`Lbrt`?FQR7LrGqCRJ0&URFm%yig2KP_J0chzNgC0O& zEwHc;3zR1|t<)Q8KY>@F0^^>6@U$u2>lSoDqrAwPpkpuyg`FazuTq{w;S?aCNJ)*^ zY}V?v@8j6jTR^zF(P8EP`u4@z>QJSd9j z4GH6AzANDhNXI%{W+4){_Xx)5cJKO1LlGS(N%Ohw%rtXYd^-J=X*9|!fss3vN-pj4 zier>4!-ei{-Oa$){G0!@qo&Hcyi9UQF51I_MNzT?DBn#-9X_LDD#id+~o0oW7lkm(G{PaP2 zF>!t<(HJH+*`)6sM$$4ed>FP2LHOH}yj}LGaHmGuFC;|k{V1!wRCR1nsVm zVP&lmr)I<%H73UUCV>Y&MySU=B}z@nC#acvKJ^VdY6K*~trpfwe_^83R>IwSZaCqV z)oiHtS|JzwOug)LdKqi5Ot`ydv%pH+;QhDZZs6^gJHKi8mhCR1$dmrj2fnor=F?oF zezU^Jfsu8*wRJv^|K$gwt;rTsRCb~+cAQKp1%f|`UML$0^O^Ov=T z_o+n8MQ0l%mvm;4LJMF7^h#xieR!mh$MmOPdI4Zf(_88UrV6Y&dyJbFaJ znEtXRI=uX7J>dm+)$r11$o+DkxvGRfT`|r+7g3IphwK-pd0yj}E?K+mK2*x(S{*yQ z3&UhkU#Wdc!Bo~Dk{L~TyzkoVrm#c#f?k30ajh~T<*IcdT#h}3Bv-24|JDd1 zH!>eeD@bJM)zy{%7=P~i(eADeoHfX5A#m0W^y~l^AhJ3~}zKvpf~*?pX(j z`v#)HUr4<+9?aS}J5^g<8liB{*=e^TdEmc$Tr#yM%CTWSzT<{JxDI#RGx6~#eG|IN zG|`+NL=y1)9Ab%od%Ea!{Db=HkoviV$?uYd(9ozS{m^@e${VJ@$*3)$8Gz{m!rSn_$fqH!(MNwR z`+eWKQ+{5Io>q|`UNb}pV7H0G8xvQem)UTxxo6(k=Y3EDmo`Q@5`DJ})VQ z>=&ByElu!O!3gU9xs>UXn$_$IFzt5T7)^{;YBx21c23Y!Q<^etZZS&j`l!53Zcg;Z ztFE@%s>cUtW(!sykigozb;izccjkPq_>28I9meL{A^OzWAx-gIE0b4WhxzFT#}_-g z_PhEC8~s4}&re3ebMZ*vU5=ZRqTAEL_>NBvr%pf_Q=5PHzZm>L5Z1?|us|8yZu~r2 zeKRAu{1f$8Bx*EvmT9}Gr>$ElWi9%FL%`@s9&u&g+kufqF<=(sT z8vk`*41LCm4_F0?kDOM=@T9@2KmB8*q#<|f<0&f<@FZY(FJZ1d1_!>$yz)0*6f5#8 zQkG@LwlM&w3Q7KkuPF2wN1hPuMp#VE4}}j!`6y5`s_tyNl%c@XJQNRiRtpGtg|fC{ zKs4OkR2cIf#4FYqy;rUUi=LMs4B}$`1{Tk5HgX->5izI7=+)%-QDze9!v%lOyEe?b zs%fSKP*O7=`{OPoTCT`4u?p1~Jx@+0zDi7ZLn`p(eyrBRbQ-;4YA1D8X3q41WCh!I zhLrcuThEMx$kuNt=L8iwU57s&U9=tw6;WMEr29LRc=H(s6~8k3Rv{?t{BkHj(V|qU z{6n_Sua~>(Rvx~8iC5diYQf z1Zz!F4u|WTSXnUZ#~L&gCsp0fK-1H}zp|IsY39jK>iBp^#nB`75s~9$zf9nDf1xIi@qj77tE=FOJoZh9vI!>#%wV(^Gr5f zDna*<{qjCk#kKs$E|f}xtJDdP?9+M8iVrR8*+k}>+>CBEKd+fNWnKWBigf{K*`(H! zy7v|@fa0@U1Ld33zZ5{wlrq8i6ZrVVLyDcv*G)6>;dSovccP4R=fbCBE;DP!Rossx zwlzgu*1-jn?}jDtMdmdJxvPBT3yCexG~^PBjKwf$WXCZ_=w23s-=*NY(E8Xq_v-4uY3jRbUEE898- zK?;Yj3`lHq8U|_FHNwNI6om?0V5HWIlIfZj4fd6WzDQG(yGA?yuj02i|ZdQY0UlH2ACh{oA|NAPGd{leBl> zG9v=cu@3nQk_4bVeU68cn%`wC0Vt(OK$C6fF=-mT z+kE+hZ}kmB5;;?e5yD115SmTMM}M}>zHRG}+;kYG(NvrS!f^I*DMX40XFv`u$^BTq zT$g>7Tjf)Fy(vN8!fS1#4`DTUP|>&i%9?x7QCYSikd0p83+=dLG(u~{EWUV;sgBZA zjngD7AmD8(6t?ZEhGPuh^`ky`HBYqb_zaGRe7$+XNO~855xl3i%_OR#!_*Ifn)M`D zK+}S`wB1nzn%#(e7xiNoRf}IaiRx<%Jn-Zqq*fs&S5PJoxk@r^mHD6-e>d zP4iZB3g|6$7owFW-V_MPQL`fi!I;(>hiF}hp&^%l4~bQ?XNigevi^+Z*rcU$`yn5* zUF>gCx=1LS@-)*@fvyBL9^E$N&EviocLQD4<)2SJ=Y6tUYPUF~huyE<@|~vj<%Ndh zE$0-g&Ff1fMdUGg=u>g8T-b`n0 zw^?v>d3;Qc^EjvIWpPX%9a}vnk8w8xEXnmh$y7U)xKPT{__vF=_kxsiZZ!a~nT&+R z{i(gyvt5VUOfa7K&cpvEhf+W2RvOMxr&`Br=8UC-H{F-^Cfc-}D~^&{DO&pR!a z><6<6RaUPZ9(IkWn|BjD&`L-Nw@uL|A@o&SWi$RL6$pnEsyT2zi#Z86H$$0_WH^nWaiXK%OdM76~@Zi6hyO}H_ zq*zs7!pdr`cDmbFW!7sI3T*MBE%JO48j_kwc7KIO+lWfX9yScd)mi$WxJGvQt(9^T zaDwQ}=sI*FECBtX$M|k2qCv_MTeEkxzxPq}mPtVEF>h|@B%Jv}+=MYK8jqif%LPp# z!97-;S+Gaxh@dUUcO3MvZ%VOCNzF6AmzSUB)VWmdfisq-HvldmQhiFkXZD&3(#wxD zq7TB?Y8BcXdX6J~pI_S5?3ueNq3KTeSj^(2w*<^`-FTYUNXJ;lfLvO_KZ+AXgi3Fk zqDu>mn2*mTS+5t%kpAtIc46L;%jP4W2!FF}%EG)$=J*QwbFAvwM73%*2L-1^2@1#&Ve84 zj^X)$Dy1j&CdSCP(IYBG%(Jr~@Ij6#R-TFj~T4j^%4*UTjRCm_E_l7r*pZe0zn#c;z)#ToSD|l5dveyZ&76gswm3oq? zfT;?gkKg@@Xwh2pfYmg&h!3v=z9(6@+3Z*JQlB9^m1W-4)5`BV6wi{-s9X8Cgc|3kOm`bMTj%Q<3eYZ| zv-)5cp`*~2*qgagKNI&1|GpK1b)0;N@2}9eRaeobr!Rqt058j3hQ_L;8@g@h4zvFk zhCNQ9%gItc(Qz&BCM))Oi|{1)i^4i@X|n1Jr6wYes9$|cuj|^As!W@Is|_n%cgoN| z`O~U!mUBl}v!%d{A(ONgMMrJgWp$oQ#Gd-Y(TC{Z3Q9L#Jk=_jxqYbusx@tRa+9Y{ zdoT9fQ|dfTc!O=2$h(#`$8#|;8V8;-*N2$5l6Y^LuljwlO&bSwrI7bsho)dbFQkjn zq+SpAJx=)%#0yjD&lDf~3+X0j`!35WpB8;gIxmyIWkdNkQIgXj>+i|hzgQxm_MfAA zn;-;p)xA4TrvLvqJ!;P+Y_9?@-0&6>YQnS0!9<2PTlW=PaFhoU=x|muMExH4x`Q7} zkZ~S4>|kfaBCKQJk+Lt%>Y9o1{a_!pVC*#7W{!%yoHRz;UNA~m%1~^LNh-YYWvY_V zSePQ@{futa;4QDyDdBI8kg&HYplLrhl_fXO@nl6)LY_zS0lLG8W5I)O`T}H1p-c0f z-PN1kyB`$RKui}D$sZ#U7Tr=n6-cU3Uc)gM=|UNn;rfVZ@6$Zh!Au=(6}|Z0*u#B( zq*&mjMFDX(PO7Hjfd&^YbQc;_{-fVW{CD+paUcTr(1*SkE%8Kvmi|UUVy2CtLC?{h zzo12RikB(V7MXXVk~}FTk7++w10(K13l#D7C&8|#rOT#=E?7Nm7m@{qnd>gBuI0?r zhsLR)v$|Gsb71;Z7^c4tagnXTlN{i{4q3ma5{uwsX@%?0{oJ>)oT>S6#y>79&Y8-p zV=8S}(5HFyr~yR{xW7^YmJ;|43%GrPB8=M3(y3mm+VFX{euWAoG(@~ZZOikP7Q2Q4 z7o^hev%&Rr_STXd^-H;h-73kSO=SmsDgP=W5HP}!^n1P-PT&F6iOtF}jxOxO z<{1{2#geu<)NBeUy?YI8bvjWdz;Cw!2y|3*{`s-^F3gU>2b+B=uplW^p%cM}BXR6D z{rb4`AGtQ3?}c$6=Oeoe^*mWHT~J+_VGpk`2CT93r_N^v*U^Oja&D>0&l*O_foJfq zc&usAQvusp`oR^;0BC>1yodVzr=rIRb(9}Q)Wfnx;Y4+3LRdp&o~tD79~I}+!nRw; z4jgdHre05JJQ^Ad-)#XM-oiu!H5UOi>@t0Q=6u$^&yYPJ3r*UqSHr@JB|&iq-{@UA zJbHrkRt@iV)RV{L~1INa{I8N~@FWWlPCw}(%y%a~iw?{CQ> zQVrVjVpaf_t`8~KWe>r=fMGY8^%?QFn$f5KM4vIGE3eO~?WOBGAiLYnF9gix&pY>Q zNjJH-ncgKSse_xq^`wxpK`?|U^Tz(+4-mJh+^C6 zjS@J|{VV142R8!wokiW?a;CKsjo2diwe6yOn0LNr^Wh(wi^V(fviF{x#HR0vEalhm z%{8Nm&ug)G^YP6kkDF7c+=qMFrv6%cNXB}Zq)N9dVX}0Y*ReHzGwiOne7HHJ@hYj4 zRmP;>gPrmPI(^*#+TkI2Q9(ZEkzbdz#kD=USKv*0bMJG}-WPF`hyM#V|0Dl9nHG*b z?AR0eOmbZSgoAM8iG3Sq=kZE?lm^ML=!4N*ZaDiavIfCsU6kMW*YF%)1bAhz@|=Jj zhWx}-HQ{L!YY_cnD57459l?NM;X0U{6fAd@TjWtR7|8i=4pn6vU<4|mK zrIO`R2cl89#a4pT4kgYaBX5q}NUl2-4Rm4lHl5sPr3N*n`5@D}`jn=0829;GGd1(e z3jcm}CMdt|YVG}O93)mNdHxPq4(P0aD(h)tKGZ5bPeIUvrzaq1kI4-^vr_ooM>BmF zy6HM8I4TENIFPN;#IQfhGBM|xjo}>?a zfgIVE8VSzsDVv(0QYRl$)y!)#GeoNx^zr137ate?W2NJvTiz&(4S?3rjR9c36$SxEuXF3=N$_lqvD=L6$FpVaP8|k5 z@pJkfDQf>WNeHu=yB-T!mxleQWQfJnE;j8Of~jMI<=V8ki6l^@$LWJ52vn2)edG}L zp%Bp|NPVt6qU){bE_{`EguqJ*`%S$9>$9pT9*UeQ;S8$z5YmE0QfnvWy8CCZ7`*c_ zjT}`}LP6H=x)fixE_tw%kvB=voZ;>VEh5wgBxzi#*+e(gZ7110&GFzpN46@WG~ivr zo}g@@-UP*6vMQ0ql{dP+alrD03glN&L^VT!J;CEEdKrNb62DI z_UaDn;Aaf(oC%W@6CC5Ik;AIoybM#5_|__)(5+O$k`&&t1Q!j@Iow#mj4oiKO#$IT za>~Bl<89N6x!;De*+dB~BgGNba*Kg4?{%M3W_!6K$(?#Z=5Fg-v1Qbojf0!4gBA-( zXG_Bj+{`2F#?Mf5Xkq(;8-6Gv~Zr;(Ks6ya3vADkC?&hUnYaktX; z@)y4=jZN1_998uQ4x?Q>7rA6qtK$&>i z1nT$#GR5>CMd};#G406gl_>4DvAAX#%AaX0VfK$%%3MG2q+7p=?3oexKo-T}u4{tp3CzV;CXytWF~fXM#2cb!EDjoW`6&C{$R7XR*q(uhMPj^n$7 z_y&nmcRvH^=7-vTtSXIsQmO$jwekzk_yqpL z79lk`c@-7n3efKRR2!wr%k*Ai1g~3Et;)`YXL{?G%~CJ$uD{}Z`jo(S9PgDRysvCZQcS11*ZFg*})A$-5P(FkHXvB zaeT0Vbq5}BmzsS0t_uQ87Q;M&4tKrI&Q4EY0(8O_S6|=C_m#Nl-)YtKXr83--5$RY zSee>TGk}OrW`D|oYdb_5x(VOstV39WMgEP6GbU#z@!{XQKmk>rSRu6Qi4s8uEd2I} z6m&xM*|x4kYwvIAb#bjWso_&|uUI4O@`PywY_BCThbDc*EABMpaX}Nri@hDy{P;75 zDF0{m#~IvcuxC;c5X%==<79LV9*9)Be`&)#Y{dMcmocf~6 z)*5gW%9ORt;joi2G9`g>H4|+a4wt4?y^c7pp(7=*HgU;4>ueg-TiG+Q_XY0XQBmMn zBr&^FOK-8Hs}^|-lJQdL=TUX9sJ_XgPkg;)TfxVU=u%7f1ye7=Z0}i(&b*&uLs96p zAoU`6qvU=9BhrS=9JTzeZ<2;dm9V2=jrLE9nksi9hFCL+oVeXiBX8i37x_@EdBl>U zDtpTJ%@^ryCm9>w#dx2gtvv$mFG@O}`Dn!-`{6@WMN`dCWnSB>2-dSZvRBHb$bUHb z!-l-hjZhv#v{KmNxq1?7Q30fcgHopCPYrQ!KpkZ2X{mNKV;lJ|A7DmSj@95a`T zw@8s2*5r;jlLc=M3m4QovwrcRVw6E@hxdL|aXms3xx@QG(r+%LDMwL@SL~vN>F)R};N(}ooN?|vAt@xkE zRVFuJp?(#54-v056FWIkQRGiq7WY-GGU1w7MI?^DmR?qeaSX~PzChNKI4i5RP{u8J z$yrs4YZTku^~#0AjVsf<9M;`TJum0r#wnCC0MWY=q9KB-8&Qi^99Ow^c&&Ak;`zVd z4v-gCc~RK(9Q8tdJ=IB|v$q6qr>8y9X`W3^WKCA~O>nvMEk6}JGkPF+janikud@F- z6~mR2MXxEa$2+V>Ggq|w_4Vd+^LQ7vLWj}6eK{{5!mioMtwwdhA>+6g?aoWHKW-`c zEzJK56KMX@%6==K<7>aveog4~a5-<-q+|VuSCvHeNwu^RM~T!i!;P9n;S`LINxjPg zp99KPlCvArM>X2xhMHhnK^wo@wVdmyqp`(PC2$=}vm$5}%cR6CzfK*m{r=;J#Da)+ z<7i>veZ7yh%3A>R*u%N@3u$a%@num#TwcV991f~?~x9|$#XcKs^?GALuwF=Orn2Q zmk`m2KdfYze_lP*)V(`4Ga=1{z{h`~vG_0*l8&Y;r=z4#32$t6I0R%P9qx-$n*K2! z8`pF|z8;CEnNp(GOr4a$KHl))0dKw1TGgYKmt1?D%Lp(VMF=(Bu|l2fBYUUTaD57s zni?1j-jpRr;|lkKvgYH7xCpcfLpBuJ9cqRR^y&xk17sg5=8X4)Xj(2Q6j_(VO*B`m zXZbswowI>F{f zwH-#?RJNv%oZK=HDK^-DmQ`iDjUiAovXTC_CrPKNqr@oWO!>!_$l4|NgCQ|DCp7=B zenB1o`LqlK)h&>m&}GbdJChUs4_|InX;n;6hf3#v@fNpVEghm8AsvOqU%n}(@%qw& zeTnEmt9aJ0rDO?-U)x;FF05)xk~H~J`;A~~%Mcw& zem~7r5aA4EzhvM)MD>HwuB^aK$)nMza*t(356rgx~etag1UUN+LMADdw+T*&CX^mGq9ym5FCMrp$BBtH+016YPjkT2g+e`?iFsB%292D-U-0SepSx zN)T(hyM#MIAB0C{W-FXO&F*|u^KjOu7)VA;FrN876}wtv0f&rEF#7**8IjMW3q60Y z9IRq_uKJwwTdLhAl(*wFUmR5uN-Xbr+WQ#`51TmtUd}Uf9_;|RTF&I`8dL)f$0P7M z6?C9C0bwc^2jI4-QC>1otyKM&77{wT(A1LNpuf`HDkEg4E`;UXuCy>0sl1?){D09c z>Avw%UNiLSHlc#OVJeVTBYrKUgv+UMMHL(41Fq!?|2(!HxxHT{ zTsn>pTd)XcAnbOW|K`V!1Z#Lr?eFjNNKQ#OCE|3p#6jxJf!$6tBOhN|R(Dj*f5!0r z?usAW>F>7lt^(5eiW>#mh+iz^8@phlR29nUG9n$K`-Wly{#sV!n7Ge|qA%{r&yM`1 zzi23nsd<{tfw_k#R7(5I0~DJ)8F4E+vA zjDB(wsUwM6(Ua&fZM&*ZnUjn}$EWvyWnE?Rxx5G&blo;JG0rXEjL>Bcgo!oLwLZ`Z zxFW8a9F%3&utN*{FTX(-5S5ms_>xGXB+tWYgqMxW5^GjP1!00{N#gs_upJ$p!vSE% z%fXo>R-)x z3hM(Q-Pnl%3AAf0bWJ}R!C^wgNb3}zRIyrF<+-AfU6lV>+(v0GE6#D&Zv0=}HG?y* zqu{j?0|+s+OFh6URl+(V+gBN9VaID%o&A`bORK1+voJ-){MxQWIbfc+WZh;AWEDH3 ztfY};FfNHd#hhC1FQM~U!!uh{Ga~A4WqH+4fFMp)ai8Vx2h*n~&h`vBZ`l4*QCP=5 zzVTxtRiV_=P@6!!n&QZ~XRrQABX?LOXKR^QI}LWt%Q#f5TU5;#bVgs$`c-f9O(`1; z`JF1(RrP5fbk%DZ^QbYsWI!`%6T-XWBc%{nQ8#9ekXj9BWMC6CA|LB zC@^`qeNd4GbqiJJB%;FQrkrs|)Dz4D)k^f((esUyaV8)-k;N@@6KEkE-^1z#je95a z5c9MlzHw=nN2nAwR_LMYQM5|8C&F{)(U{A5B zMw^~KWp+haQcQ|AWp&|huPt5#aq7tbePudg^UX5ba7nKVMLJ~KI%*T^MJeUlo7en7 zt^UjAMc=3fGrASDZ<)JEB%?=o8-PIRlD1TxQu_^`co}0%x+7q6564ae-aZ4Clx(tV zKr_%DfBEAC0H_S!8I4aXWjX~01$jxj2AXBM%Z$HVV`2L=!v7Mi1348Ht-^D90Rd4v zLj530O^z`Wlpm!Tf@iasAwZ=G8X1?Vu--=yXZ!g;5Ys31e1gm=+xH9leXT@8j2Znn zGrKT_4;#dqfQb72*E}2OTNJ5L4vJ}LU_joB90q+Q;FXK4<6Mw_BdYa0Hax>;gaBTV zBYA^#WQV!uIBN2Y_|leE>BTlTQTIXD%HQ$RTx=WZR*0^%9V{BS3mW?FB&@`%m-|;9 z_grvUujn-Sub;6nO}vKYkhRj+a5G-1C-oEEYU}TqMmmqR(%UVw)Gyf4Q&_} zd|Pf9wrjeB@*mR%T&zzam)m7GKR;KIoX!JI>mp}5L*4DyPfKI?^|Z?R)@}%csH0;w z;|Yg`MX#gq7{3rebAvgG{gRx049Yhs5tX8aU?S}h?mAhg;!k$3M{OR>*y2U@eEEWS z7=@9rb|n|N7rAOoT;geFcNqVxO-zsh9YiqSoWtQoHX&WGG)5*zTGtv?D|#t1teTC1 zCz7;PUeu^1A`>;^lD&EENt;?C5sw&7RQE)~ma?(wRUBw1!cEEi6DJ?4c z(@`o+5K{L-={h+ zjT4qj;0aydeps0#=}>^Wr?9Q(TiO{9sGGLnSQL6iZ?8W**8 zJL&8r53UPSV-+}t`kHV8AsX!2ZaVMyCk-@5`QKkkNAznBEj%+k!8 zQC?U9m=8<Z%4qF3EV4)qW(WFy%G@8%rjJ2PRhj;!X(|ETn zPo+g%J8zvl+|rkx?=u(xcr-cZ^X}{;IHxF|9^#$rlX1hlE_N_maz;?qk2h`&m4C=PzyPA-OLb@#)1+zP9fVpHu*FkI9fv z)Gzny+JY_t|5{$qa^P$WU`VY1Oz!js`9+0y#%#$4=sR}w9j!>N%88Q-Q$T^j8YfYK zbac!7B}Z>KQPgCDeDR~5!Gz6lSE7NRy$tOS>uG&>%lY6q-^Hlbi}~}Yl#a=FDR#he zO@L}q3m0wYL^j(lnIzZ@5z4YDS{oXa7o=`Yb{e@6^-!ruxHwPqg!FMJvBayq6(0U` zYk1>-QEEy!!cjl$3DMzLERHemyo!2$k9N7_zPfoKBi;M2=5y8)d4!O3au)tn!ahA} zziW`X>btl8OOv6gE7Lmu+Eae2Qp>G^r7I|~;}nG1%Ppm0T2j>pvfXTvTLuiD8e=yY z$AdL@cXqU8mIF+80Y@YNo~iBqxZjkN$S&cQ)za3~ot0X8CG_M!HAA>{jap_SNi4!aNygMOzP1ruBvfeM$e8*lF>HW zKj115=|oxQa!{v=UF0xw>>n1|cqQ5FW~w}>XWi?#$~(`-wp~It>wN$QE!AW3nY5(k z{Tal?Iz7igOD2RQh^&FiT8@r)ne6QuX|5T)0hUAy{$RXc!S=p|gf|shQ`9%0siynf z{bRxlP54RV-m8y#h+;ch2d=5Uo+Et#_yX$Un-L;(cCfa}3%+ zHhy-a!7lK)>iv@pcx}ngI}8zkb83VA1|$u?%K@1#N1Trvsq#~RaQhmd2^|Fd?x(*B z1!4&Kl(Y5`H#QO#MYq>mI{;+u#4g^g*)zih@3u<$MqnABt9izdOE(mLvv?$25HO|9 z`xO2APU}s~HvVUSdpX;(9JFZ74#JVdc2GE@XYn5u-?>8`-Er5vrvbG*H`_MIdMm|p z{2Bw$rzDU3|1QjS{giGPUjroK7rO_-h7QdA*QZ{eeh(CeK`Cwq2(&M2oYvFjrtP6~ zP-IgfcEsVQI!+)p8|>?WQc!S=Tf0 z)HRS(vt%57F^C|NIz`T72~3pu1u1}?-N=UZ)Mu!V$cX&z>mOe+$X+GJWA3L>$-ksR zQte%#JZb#ax?PMPFifQ;d?O7CW%>0v5?7Ydw@Q`Y`CfeUf+?s}mKyn>M7O3F!ldxt zs534my8zd4=cRQdvqo>;(D%N!AtuJKV+qO-rF2d9hdG$$n-(s;(I6(dmES(uI@w87 zzmEjN{H_}xgl_`h5E11vHX#e*E^TC3B6;bqJ|3W4K0bvV!eVI`QgK^0O0H=oWZa-& zJh8I!%&AWR$a^#`YAWa__U~^$KfJp z2zx!6z2n2At_pm7mjBb#Sq4P4#@(IX zh=d?W4k~wFg)hVA+0TCVTI>H?6AO1>kI{063-SGOG!fuK zkz7w+&4x^>NYU7WU3xW*$%342dE?LkL@2KvN_dJ|^TTL;A>OI?uk;`y?cF3XrF0gG zot{P||9J0jcw>KhC7VN{+}_spR)^SM4h@B;5jveXrCsJ|oJ_XM!u$$!e{T9)8H=OD zpwz)7(wB_~v(etwS1s>cs|a@6lQzt}*^aY&@~F=4YksHaR26X*Qu=EjUy`x#9__|> zy}C*C?-utK~aJm5dAlAb`LJJZgM(LCCsTyCG=+umjHDavpNdRIc0sC@ZC|xL4J;E@R^|wfJ9HOWM3&gn9v#b|SLQyu%gL|Rxd6I{&3w|i7^ca`-Y zQE#9@J|L9pbB|4g#+H!z>fOeNd_67(k^M#AzQ;JE2+i2NEbNP~&-!AhfR?_OvuRu| z{1nbVs`W~9eBjkDlw(W%n0xJLqO#U88n6S~=YBAv=C-R0=&zA?uBKsKEOaQj5N@ z(A^x?U7FLl*Jb(yo5p!A#JS|}bzmmk+paWBjBbNDv;-`@>u^&dUQRH%$7yjO$^4X{;7JU{48)5@|5vf z>EnQP+qBkr@VZd#m(cKUH!sXV#>;+pq@R&$m;=YfAzrQo3YWx%$bd;Eu?OhLrEq?_ zi%y28#Bd!%uO--&N%q}3+80=!Ppi2RWO%B-yz!g&ry~8g%S%fnCQQ4KXLs>RW+)Xd ztPeDvQgXgMBNN%PZ2zQi0Z{$+#0o+NK!(?e3h1(fOl!AW=mIA1oW12RN*Gm^ON|Mb zd7>!eVA8kZPLM;NldD1@(~ZXU{hr$CJ$!{)tg9|hmSg>CN1zeYEcrMOZ?lt?Y4Yc9 zm7fpkJ*)>+lkec7@wo3I2E+*iXoZid`J#f7v^a{`**lATK3XQf+{G_Q@Fdk$r^+7l zGNb3~{gxZjpTYKd0gB(}pKO%Zf^c&D%KC#$<2?f*X}WXB48OO7 zLu9>iblZSnZk-#7y;5|3QnPuJnB}FGCHFHJHDxc)F6Rs4<9Rt8W#QAMOhcGVV)!+Q ztA5dIEnqBmCqVkqzHj7lIDZ$UIDP^-Ib4q>twSrPRJPwHy7z>RYKS)EPh zyA+)ouDY(C%$0iALFDAVJ$nLszTs1L+48bAQ%A?{xA*tHZ&ECC=rYi-kIJw!ydSBh zsd4?7w#BxlhT@hDyxEti{;`rz7a3FCo)-UXSr1owMt_qe(G- zV;QVUG`K~M$^>^=7P zyU?c|jQnL79aiglGN!i`cVX_SI2A)OTq&Mk*cLB915fT0stO9>M90~Vxv~5w_Ars& zS&aj|?SFl8KY{v@Qs+ayV`gCk&LXlA5t7CLH&N$4z|hcXr^O%5d@jgtNNu)SCa}TI z;J8D>G;iC_o)b~gKy?p2i2H;u_w~RIm;1Kwq<#O3%u!DjH?Mo;LW#DnB-x|)pj>^F z48j_wl6~UpedFqe_r5^Fsyh-{9~iE?{vC#ZIXMsH3tmlKR=YFvA3JdpR0Re(K9zQ_ zAMAY6>XDmNz1p-IdGTWILgLN(!gw~5xtGi%T7N}t5n<{mx1iYEJvb;KzB6P(rB9U= zcqc#cVAW4WnI+uQcDxg3{jV&tOlY;Qrpaxm9_@f&Cngz~YCFt`Yinz%WV!cbnGTW` zjDsd{?y*_JTV4N>W40kc_d@%fEbVVa#HL$jU7>smk<#)wgMEa0oDeC|tIG#H$+&3| z?O$5HuRrlC8FO0 zao?%D@F8qAGyyp$N({$q;x@*U+MposyvQ&4HFVd~MoW|8j71t4WlOaA*?@iqzb~&| z^vbx7sl8b}mES7_#E&nUvz0-X*3L&XIP?aQ;9h)^sEVqesng3qkRAzkhGU7CNYKqzRB##+ytw#($wK#Bj zK4@Zv!*F{uoz?8O?zy6D=1Yq<6W@N4{i%ax!_HCl-F3;XrO&trgH$Y^Pyw%A%)0$v zL1{qh`-?26KuJyXM_={d-_zhwU+5crwb@5WzP+2S=lV1L##Zc6w&Ve{PWYRMuhFOJ z!J9kBNrvVxgmUs{Pll*|LcIpQxK(_NG!}U{_nlVm+FMZblQ?gV0)48kG?#eIXA!p; ze~SW>N_C&#p!R!|6toFAom~(jV16YF*Qne;jZ5Qtyf|*3iIDPBx&lSZv6h>nSZA zuw%EY_xnk_F*5m+G?}bQ{OSt0dxnD}Aei5Z@j2_5&V&IG+Q}$GCigUmnS7h9b<;;S z@I_Ju8GYQ2Mn_IkErWcxBK zbLSQ&?F>TSEyb(GWfgzs+bou+z<(OcC3-C+B7&H#9zdxC3fQtb29IejIXTEJt-KWTx@P~!#!Xbx2V z*~i4B6oE~z{;SR}cMy$+bvQ6GGM?$Iv{>0hwi2yU^tMF<)3c;=-c(J#?*9>~0C7!A zqjpNSwy_2sLaB>zVt zw$22VXbK1kHf!?zu-xQZ(tPz)D(at<11LazI$y+V z5Z26qy=vXoQ}YP$Z=G~KOl}CQX*(akyh`R{NZ{NqcwjBD#3B!*{Z!6fn(unAT4F3h z1EFgb)TJ$GH064l2R_363Rzz&xwR~mIB8EkJ@k58`gic?0@FL8Nm(`s8be~d9e^|0jhfvrOmfrA#uit#kWoZ}zV;iwq$PxO%HOqJ9b>gvJUvyy|? zPxwJ1P-#%v$|9R$(oNuq^(V<6$@&?H)_k~b;LZbM3;!(T+Y9|cD5`Z>EFj)}+}G0F zLfH9hT>W(*tDDMSxuV7eCJ76xp9>vI6xo^r(IbKyEXjh7X8{v2 zqNV#a+MmWiUP8SQ_#eQBkB~?~?|T#GA?(5W`pUUlWtqGIurPIw!n?#Qy7xBa@}~QI zC<9z2YDq!f4>Ik`uz-Mw531T0-q9R6e!CdhdFl1YPsLuNPmQ)Ff%_Z#x^h8V`>3-G zL8Lcp+wIwlF=3CK+?N-9Zd=v0Y`Uda8w1YwslgqRJE)S4kTXQxb&P*@lI3MZ|4rg4 z%p*U&vMpF-_+nBKNe4U840Oxe-cJ`)gq{DhH7!SkM0NE+#jmZaUhMV_eK@d?;b-A{ z7?RbO@hUp=myojfcOn`uycB+!IZYPhk z#T?F&#-q~((6*s$Et$!}lO)M3>bJAmA?pZKj1Q}JidqZZ+;~Ql8vTv|)1&t(!olO5 zK?7G?h%i*npC+V_!+TTTcVM<{rZeTi{S)=3o8qq9^_(IaNq#Pl^!dvSWor*V>o;9&xaeSWEcR~|aiGq7q>;#w zc<@?*pLL5{-VpPv(-bZiIFY_`I9khENAph16a6Og9>O|IrL%kwi3aByIPNI z^I}w6*w}CUredi-V@G)AuiJA;nOj!E&6%@@ZDh!X0oZ!k&m3YI@`$#*WI2{?)<{58 zswE27WA8ZnVnraw=QE@r)V3G=>D;PJ07HR)mnAjyNXzi)d`X$uM;>&MHkA-xM}FKG;~ zU2eCykB+{|7XZl{St=ump&k0^F+0au|Su4i1 zwt_kB)z;`0G@Qp}f#&lS;_bId&2xEK@o1XMb4+%9)=Hj6K-Yq?RJbEH`?#xOT^=?S z8BMfp-)KA9b@1BSFSM9QSZecjgCx-`flk}2xzGbPPBS~rX+Vy#<^#$zxy35noFHwf-fRq);L8CGWPuR_RbPUH5uhYebaeqwLMCRpy%;O5|` z(@w(L@8M&pO})#?GD!~W4TuO^l*X8-AYBfA6qy$Epa27W=zU;jt>+#=NtD6Q3u+Hd zyVy^;hZSda=E0FA^%B#+@sgK{nYzx`U(6jjAuI^4>kMPaq+!{^2pgpPO2q* zD-|`YIHV1KVY#sQ{`~bJljjcCAR|o_zEtxFhQgCV!L#3GE3CB9hl=xeOvj^)*B|TW z19zCAE{9*OuDAKBP8ROKUwu}A*`XP-_^b(5>9kH_okPql35VgkyyF|}&jq|L4)PT@ zC153E2e+spRKH0@(wX#W8~BF&1nz(P(Hz}6x_2C5a)LNhoRe@0cV8%yevsJ|axEqP zPw>Q>1;gqQ$-yF~lO_pB6?y?W20YV>2@c->Sx@u>lZ>W%;rH$<_ygNGRFSff66`sjnls8;GNCq zO%!>_zghgBT=?&oRY5BQNUQ5w_b&$`9E&`x6!=+gzef9LtL>$zC6;dcjz|*|&T==1 zv@1!kXWESk1FZ1znVIbQv-*Z&sg@;j=L`!!E%|e{-X#D7&P->c`08m$jSej6qEAD? zRulvXpgsf3#Fpn*w-h(kA%h9x-83|pEPn`RTieN$Teh=6D$+!{LVYsN6#p;)$tj*Y z>0*ghkm{6tv7GhXI*3$NZTGl11v>fvw z4rrhiW8$$2t1QrR;8E)Fy`NjZmk%<3lDBcCBit&!2T1Bd(0Ry4F}CJJRD;F$ITio{ z_Z2H?OWPwfB+v_h%FFtQMZ#~e%GT{Q3?pkG1fKbom?n{#c#+Y6DYx$0E@hY5N z=XjXA7nfODOI=?JWOg#nD)6#e(Ep--4erfq+zuHm z&m>Kpr|j;7H~oZKj5tXw657WRUF23j#SJ=a$M<9Wb@xnQ5vRH_=>+KRIVi{eRMaU# z0kn8&5a9c7ox^W~n=Y2)HrP*VM-=49$%baMigy7d+edf60Rj&SHWUGuTxvsrVbNai z1L7yN19Si6of^D5=3JCFSLDFyurPgxOc|RU8H+xXxM<9cjPaysKW{q+Rc)+rwE)#Y z68&(@+ySD|Abty33Wei|;bYqZ60!3yd6M&fGj2k)BGz@o@m(JyZ*IcIOYP2faifWe zFv!LBXWJP_RroI-s&_O7CMdCctDyg0zijzb4{tc7H56wHFi{;JkyLgZu$1ltQd*4p&ap^{}UWZ4xpJO z|5UvL@Tk2Kn&~Ia4}rxlp9@%^^D6BM5mi8g(H3A;p9m1hwgGiVrRBz@C0OLx7O~<* z*ttZtNvP-odZOWDfFO(itk;%LJ|@Q(NQ^7$c|Zou#{e^&@-JoCzbOBv%k^xFGl0nC z!=FDR0s|TWiaml>n6Ui-aF#4e*hSaYQZ(I2v*!g=<|3H*5o>7bm6Jz0JxVUI+nl9K zi!)f&D~fuv0#JE?PrA#{9+~d=5H1rj#~M&}BE`Lk?SJ>@9r}PxgjSQea87>nzVs0pWE(@r;lGt0K3I3 z__(Oy!i0(2&aa=C)82%ZQeAX|m9B2Q)QgPEPwn;o6&yR4x0#>EQx4Hz*z4-$X6xlf zgIV<>@bz*b3>Pl z*d9i}ZOUhJeV#$LPCma#@6S)L2Zh*hu_n>hYH;Rh@m2-1x>8_Oi!EGA(<#6M_jYwn z?XC1qJPYs~RiguaMm!?dnK3`)CYAq|2t`kQurhoyO25gbi!Z=QR`2#Q_3mOW$vmum zJz_F?xu=90ep~a!wjb9(2MUmPKH2$%H#A_!b!6&2Gcm*Nk@*z}zY1VpXTE3H&E|i@ zIYH=BnqQa)EwPt&jce&TOs>^Rfq$FlU*uxJqyChGM7iaq4N|B2+oP|DJ$!cek5Xux zWBmP#vBmX!2_gO}t#O}izZ^ZE?+J})zy7lpvO}mgMLri|0G8uM54&Tz_}5SAr*Q&= zu7FCfeV!4$)rhyhB6=Vh7$mhcp)~Q=Z}n!t_>f%ipcUlVZ<;WRtqqT$@5+Tcz4z~* zbVXIQJo&JBP+VJ>{Au_`ymLNci9+$N(rpdsZM%d+q>7EhgKaY99wYqy4k6=}k$_)f zSBLl0NdoLUDtJR~A%@W&wlny=_&L~-Ff-{&^4=OR=r)VTd_T;`*|Ub%Tg$9s>t z$!duM#=T*Iu4i`TFl&+1;eC@9Ve-ehYs3gYs#TV5QI*!=8e2YsH@8~`uxVj?$e*v4 zYdj54+~z4*<2&Vc7&=yH58Km^TBh78q&h#8w&5`+0#ko~F^rSC;?T2Q)a}i}?FqL4 zD@hk}37Bq^o#{4S@swmwgS2iXNX_1lw`a1}ig27pa%o5rmA5+P{#3w z9=Px;D*At0SFCvP)TkD_DN$V>>Gbt(hS*SaPpur$j1P)sH;;rtkHLsKAZ;K zG=}-K^Fy&D`_+%iksRjoViWw@td0E zGUF&we2>6Y@5n66^Df zP0R9$A^k@>5iZBj4JI-QvVl9B2Eh@*Z^IJ533AnGxZCy&6Z>a`cf zg95E>z-d*k3bXR8S~2u^H@1|*fps2;MS_;V5*WtJAB)yn;n$QaxyvQ`@4t=5)OM=( z)gom1b)FAV0*ekh_&Kmij+kF}U)LP5t7Y88<8WCB*`3~6W6~X{DJC{y5#?mL_;_Tc zLwfU*YE53OZ*)`F0u5W35#u`rpO@^M6HvgraM`>~J%50Irjd8%Nrwle@zl&L4|rHz)Gi8k!IZ#!>0q-kZI)1gGl zwSLu=-hRq6QiI?B{h1u2^P3`urDPimwkeDe;!0Mfys*|VAN$BNWu9N}lUwDm&^#`0 zH@2_>8vRPQWk{rn>2@1jron(#xR;CX+$=ns%erg+bHfWIE{j!Zjf+yp_zjwPu5Ttr zTlt6BTb7HOk(W}awN#j*C)F9~x($r}L(>~E#2&OmSvz(I9w=Q3tYr2eAKXEUpM!ed zM6#+^{e#5wDNh455uu$!subk>y0*WZ%D&k0;WcCazH(&oxUow`$SN9N&_va2V}KK3 zCXl_)3u%|RP|Nl?JQkj-{f)q#Je5g7E*yB2R9)YU*7d%$Q#=S`cvm~$>XHyMewLq? z0MnDlCs?#@aoY*_bL#MY=Pz+ZxhbnNix zZK>nNRAR2wr<}^m-`hgx7|M$EE5e>8e*a>h^H&le@c0mFL&e14wsFk~TD`OUWEW2W z`TDYhVXK4FvBdb?^pT#M`FF@KZHq1Wt)Pp|tSuKA9N||U;QlCYlOJj3nZfQHY^ zc({@diQ^Ot9S-XlPmO3N>S+rLDJIu;Z|`llhG7?*z9$pg&bj>a_k2NlJcj`FRc1pd zF#g2?_KBEdZg@0~7aj&q&li^@oRpdL|5Ml4w{PH2F)j0RWAt-Qv*)DGPWcwp1fjhE zcX=6zd|+(kzjDOoKauZP>XtW}%_#b|m@n7)gaNcAk=-q>NYFI>{R4R4%0ccXqM_=e z&`&Qvvn9@k;x0spNH9jxL?8L)oG)WoTi60-Wai~v_u=^O?31KQI8;P$dt1}7CD=@M z=}R;+_OU<03O%+3)vKKi82vM4!}BiW)ym-v^2TvCUq1vn;5_3C%}um`yVmZcy3(y7 zW}#;2HS%ERbsl$r#flo`jKSY@iYbV<`}Dd@1z_8KQ2-MMR8fkOyTbC_L*P;Xf6dSl7quBrwfPgq%OkLT-WVd zVGoa<|1|+_DA%GIs8BEJ$*6VV%Ds!G6 z+#c0{FWvCBmfEO$*x-CeolF>`M3Q8~i^`F#hs004l6mS|ObM>AqKZXgq+oW?|EP{@ zyRX5<`C5M0&K6sAQbYRZ!;JdPZp;aU6rKK6^sT;NYbUO6k2MNS5S^tXP))>0_?+*_ zLm82`Rg-B7ffEs1qc=7*QiD`bMMVcmR>zF#2n%7;Ql4Sc%47wveU&ZhG|ZN7Eg&XR zG?B?lnf;K^NIg1iy7l$<*~e1@+27XEDJ3DlG8dON+XZZ0roZ2zOZdf3X3hY(HF zt~?ZnVmhP|ywm&l(7V$aTDdIlz$z}HzkuY${K(eV-}yGSRvdL;bss~5{>14F{o!0I z(S#gkvzQaZMXm`5kW84>EGcwM;HB$3tQ%g>`qgYuHuu3dl5OPf``8J}Fh_K8ZdK=| z8)zIfDa&rZ;4iWokTDty8Wbe?$I9sb7(`VAJw>!>;pB9_5ImzbENHniewgB5^@gKzyC^}hvccY)f%$-$7&~B-o zIC|`#LGhN?hSdmd3wPt1x22NJ19objNoDzuA{vhj(g>Sjd-JH+lznHGjGI?8+_D@fK2_!FdzV`?SeKQX~pqPZIE9tywp- zKqJDJ6)nbf+|zH}J(uwHMkdLTtHNA)o=@#NIDhIJRV;uFhs}{hq2kP{LmEO^(9xoCY;#Dl0a?V3r0$2Df zU*c?Dl`JmoN&KjZE&lk$euvW_r^S9ivqP0H5`53)B)8Wv3JMH4K z;~s%G@z(?9q*;iW;`Tcu%enolaA4r%zzttbzd@ZYD1NGx4v>(OoE)JI!i2;c3+Jbm zym!4l*aq;PuvW)s6a(-vuPTctpg@$_*6$4A{CU(cbVDZi_PRmuL@;yh0vT=ccQcTy zbsKh2Bj&ro^H-Or)vLH}Aa-DRYqd&CPv~qJ}Uo) zFI^YChO_%P*SwNJwsA0A?onWqtl8T9^vivo#@X=DEV4k4O#4V?FaPc6bN)QJ00wc& z=iATGNo-}$u}}IXQ|3G@+e+HTP?0!{&vjzQ$N}iF%tLnPt z(wVfGpC>o?GIw%62&ripWmw&N=OdBBxNF=l!>&mBpJ)6n;F*5M{RVOYY6R}v)7{d5 z1;B~qxdG7lW}{PoH0MB|kQ~t}pFGjD+GSA7RI-$`q>+KY!S?gpen;!ap8+@x_M=1L zXA1k6FMe^&&!)3cH+NcHS6KR$M?&>RfDOm8k9I^EyMP~Lm*+=wkslp$NecY;vfBZ) zX@0E(GRA9@7hVH=r596#KKQ`^0?0Q37>Jc;IgEoN^oFwo0_Z8_vk~nW^7RnpLl5Pv#M_I+$;?`+cBRD* zb!;NhXP`4;BljeB0mv^8S7Csv1#=`YY_JCg_{feiQC+}E_3a#;1=vs@s5de&fG&93 zGy=`6V|4HfHTYrqa(Rz2c9VTXtT=U+SaYXlwBL?`K=RSA1xiY_?akdWh$eM$8i*7D zyjO((?&<>lX=JAr)-w(@VM zMzoSbZDVI5lJE^3u_+saQ?gC~e)>vrI>7yK+i^ako&N&_bflqhoQ!HaZyn*h1X}K- zSfIN#=__gdEqRdBKPLVRyNWCEVysdTPv?mh4#U6zAH|$a71oYREU$0X0dx%KB=~qv z*=MYcrz)U6Q(k`N>NDm!`XDtgeGP=VkKbnUb@3N$ydXaCU8F`}1u~!l+D}`|Vd!ut zW7MaY#lHy1q6=ZmRRuwIKZ4ysfG}CuMd)+B2M8YN2>FvC6!3xxwj-S+LG|84F@=lWQ>tZ%_iwfFA?;Ps!hLg%uT~ep))VRM>94mkVHb~6BnMU ziJNT`xNI~?hcr_O=XAdy_9zsdalY3KS7zA-4UssqJ5V2gOC`A3w@Z=hNTwdbrLLyMU$4WHOfXBl=iRitcwzTd{&b+ z0?)lWOx-jnyI-T?qvq%5>wx5O2~csv*Lw89qpr67o<44(PgD|z-EL_0hWm&^wp+ST zD{8C`Sc>R}-QNj4nr8tHxcopEYV;P!-FKk>mua6c0#pyk$%*zIIq0I@LiSu6O261T zs}HeDuSyS+%9C_BhG~dMH8EO-Z;syjI1v3?Tp4C~26zrg*}RjSjny2PqknbV|BFWu zkNE}wa=5M4^W){SlrVrmxDt%F(t#c0>D_v(!6BqRV~5-M=6T4N77qkTrB6x_&-v_7 z)fWe~7+)Rp@n1ohnt?{rYNRzF?PO=wJWTb_;V~9B6wx56W9aN`hrrZqx zNjP3#W>KecNAQlh-VnCo!Dm0G00d-p7c=+5>Rypr`$cBqJJ4c{3R$uvnqJG*YPK=QccBb- zatK3N^O)d(K>TAGFegk9wYk8qDK=>*GX93fR+>nfU_M?muad4M+eY&?|6TSPi2T$o z;l2(#yT~GKRCAG2t z1O(|hhl!ghAH#&k49Pr!chS*EpfY?9#AJutYyvRXp3Fo9OXC8Rl_(F8eaSmH9@Fgt zn5j07fMa!P`?{usQRe;2PP(?mh4+4VXY9Ifh01m`K zwFJBQyM?~?d<-}jiQ>*K?8xAPbt@>E21EPxX zCRlCkqL2$HoTtH^i!XaMqCvTOS?S+882Iv){e8t=8+p0QsT@<3JoN;?4MKg-Xm!mK z6F+gGY>3BWj-+c@B(&7GUXOf4vYjXCW)`>VmDvWAs84ydms`Jz^jpkH_4A(D46m>K zV!NdxO*q@rH^mn~uFdLY+wQKSUqu0U2u$o|B_mT*y@866zc&30m-wwNV>LQl)!#@d zasd-w(b>#dLcI`cmb~Hc`1q;zzPd{q+%bFY{AL1EQ6Q2HMdbu;m$%tjSIkgv5CyV3 z1Gg~HSlsFF@tbE?&^jyhn$a%oEC^5-gI+^<@h4|+JCJNR%~Nsh-XvmzI9b>`Cy(JL zARD&Ud<1b{=}UXh64r?90ZjGj#cRT$l4%ZWp3e73S;h~UBATu*SCc}c-t7RZfhH0C zy#&Yv#06&C^@Ov#`vh6(GEZtyUGjJKjwsP{6p4wENfSY0uuo;01b~AHFa&9_^+_$# zp78US<2A;B2R72aet!0jN{3(bx@4@AIgHY)61yJu_~_}qz0@rN)fxjW@5Ja*EmGcH^?v4{(qbu9kV zN~{k*=pkh;>(O_!JYSny|INEyDtbnp3hpaDO!lQQR8S40!qKdEpCSe}Uc%pP?`MGK zCLG69si~u8*-&oug|LBBFzBhGk$kO^q&`?+x0&IbroFtXq9rJ47_M`f!+ zKeb&CFu0f_SeAazF8WPukZF6BdFNw2#ZAwQuWiXoC7sDLp4W9B%r1j{_Ue#j!;HRQ#cZG_L#URG}ux zuP;J0fe%?d-3jg|(BC)0F~T3GA$W;8Qp=rHaMVz)AZ+!5ve=}sh^hsb(=wS6(EDaO zh3Ri<_(bOvEG;QLinPNKC_Wa7;Zb2v?X{VzZshFGq7F04xMLE@dK_fGT>Acxb>k05 z^WVw&j|u`xL+%;|xq#Q2{XOr!VSA)Exyh{LNG1P+_ zZIL&;H*w$@14%hgfN5@H=R8$Pz zaCYHDG8V(kQ68BRF=_TiF4APG7}nx#yYEyTrrob(6k#fFHr1QePha)?{?0 zDC!975Ep4T$0s&|S-}yt##XTWF=C_ItOIy-7MY8(N#*ApUr)Kb8NTbDH*NIxE6kJ0 zNv(!>PgXlC*+->c$Dd`BbXo3{u*iDiq+0X2sFyk_^FFiy&a&q+8V5!F&fDYmW8?NE zMSRBG#r-MEo7;JBHin{e@X=!gyG@yp&#apHT_CsCQA_n}Y26{#GNZ>Ob&eM7;Naco z{f!^9>P;4CeK9QUxX)!m*reppQcYo5cTStp-}*ho%NEq&qptJIlYcYNm{aH$I~)N} zWu*ao0h?VGBVn7oz0P_j%Ff8bndNFnYv4^4Tlz!qp*FH3N>N)yMwR^^c21hu6%NO6 zw;mO^j*28z8j3PzGnvI*Qp`}TF(F}}-4y(#q?Y;;@;nj1{o2NTThdf9UrL<^7D|J8APDZfjgjrlFUMKv(xYQZYP%{`dg(0*>r1edbtG zL=7v;wndr}jLKi&JgHdtmS}0Q8hp>8OyG=PS$>-JOx-+lw4h2EzV!Xcc_?XTvYI)Q z^Lx?n-QRd??+la*dv;eR7QZWj74m9W^c5tDKM&a!%dg*`mHYR6A4j805ngqo91Z-c zyyMTjbCf50bWV=FDg&uXJdWNo%A!ZH$yZ6F;?eYOlEFKC2Fpy$PJxkA=cZPHt`imF z=J@?>ZN2iwBHg%KDA)@#D5t>PHjpa`JbYI(V{@(gH={Jn?ZqQ-A_d# zBU76$eO}eIPhsCXMkJzXaF1p@X&q#2Ic&`}W)oa9dru;&6yALP`EJZL|_TOonBgsnB&cuIvsEJBgQSpw~T-Fa}&<8c*I7 z11eg*(C0M_z3g=Xj4ZuMUJiUo+t%l;?IpTT8RV#Ts|sc{xnD9!Yf zczE^!vK+L!?zp1TZ}e-P)kQr;=@dkE;eIi2k{cBL$N&8l&NXYquz2=bcAL$})kxl4 z23qY{+D*vu{yyJ~^3U<7OtUcbA?DBt3_d)pcT)OyqQRC(Xj|DcHl~Xcc2D*B>sdYJ zO8_WCq3CZF^vgAr1vgnIp9I%-Gyj{4DpE1qcQd44B~&k^?$1s{lXV8Q!GH#rQ~VXZ z4RO{q_0|wq8JMQ@@%d*1)BbV!TJoYhk+t92jZZ(u846w2fUVyNJ!9NcE{N~8D1G17 zTq@>!>IdNtHK{bndszCELdjr+OYwP}Uepr#swkeUZ9b77;p=AdcItw)CFw=aMoVG7 zq!Q618tFMP)|WIfT)po!jegpN;gC{!Q9g9-BX{a~S5!E~eAY(pK#X1`2w1>@%F+Jn zVER}asyDBggo&=km~8nG49LtZ?>~NK?r=VQ_VKW+t^rv^Atl literal 0 HcmV?d00001 diff --git a/website/docs/assets/unreal_level_streaming_method_no_sequences.png b/website/docs/assets/unreal_level_streaming_method_no_sequences.png new file mode 100644 index 0000000000000000000000000000000000000000..77a2754ded51a563ad3426d13f65edff7c2ce83f GIT binary patch literal 82416 zcmX`SWmKD8*R>rSiWgek-6`%~v{-S6;>9gEgyIgxT|#jy4#i!AySuylo9lkQ@%_lo zIP)iCBzvzl=Um4QS5cBd|3LTw005xN$x3|(0AP^;02l#eMCd0jg{9iiHyD@iG7^AF z2=O8G0p3zvQ5*pH8;km4f&hI+ag^0@0RS+1{(Hd;I24-!0Pla~q{P)d3{SFuL!x#vU4B!`e}+McKtcFsqsvLZ#dyS6={d$9X`^3T`2&MKBI4~UyxeaW$Zt!5 z*YWer(kk!tY!eC0(T#sb9aW|Ft1WJ2YxCzTAI{sX%c|O+tPYF8F#<>bErc?@+$aD4 z)6pa~C-U@gUBIDvnt2M7@9Xrg#7$)1q?WW>RG;NO4e@qH==&GEv@eU7twZMCPKTKZ zCp&K6Hd0WYh+|UHlzlH&A;VxK85wy35=~nvr(k(r!|vy+b0!xfoq>^GM`aw_w&~?7 zQogk#qvDzZ14o%;zVQTvnL1}}`)3>iT=dT^C>1dR>3A)di~f`Mb=0&IYn*SYz#PBq z8aLtjF9IC-3jFh>(%^QH`;w+NoLho=;*$3EL;0>#P6w&?B43E4yp|xl_c}u4!4Db+ITe(6+R&$W+FFq zU}gFK(k~W^+$dn&Ts=}drf+vGPTwJAn^o`=UtXT{*JO8UfSt=CHLVAmN{nR{gyXRNz)Kl??O ze*TsaxX(l9jV|QH047`k<@txnc)(trwG&$5<`-G3C9ckpCTApgbyyygHh_eHG=McX z7UnkwF|sc};s%xk@C9TCfb$4unpvI~RS5~PNQMcf0@b@JCfN1#1b*y_nVFrBeZk*E z1wgDZjcr+tnk|}?oU9VC>x`Jic<<0KAiptYONa$2wZU=p3<*t^b2!u}|Z5`vQ3QC8_3{OeI6kaL-|2 zg)qgmW7!pnwad|jjQy-S$`clGNLr!I!Ll7|9zKU9TH-GaLU-PAJ)$8Zi6@p(U~6;5 z1sX|v|F?xd2i;{l*)N(t@jrzUs>tK{!|@o?gV;U70LWj%IBP6b7Up?- zoI#A_g6JS1vN5uShS`jfn;C;+0tg@#iVGw43j69e0$`qrF1|rWHr|w*;3RDJ4KyHa4?QptJnSYpG z@}X_Xs=7dOlOZ0GrJkKUAW|7u7kk6 z#TWw!5^SzGf!FFPzps)|g%4{0E6iC61kM!eMtg%=Xsa6wfI*m3;kNSSW%)z{kdKA5;-xae-dHaH1<&`T2% zDtuzX&Evl0zV16Ro%42cRg=*&{qo4mgY&B*?-*5PHBD1nD|4 zS`8twFR*)=&l%%;1}cLvWr=5t{j98tS3I`scb9A`YtY@-JY~|jT{LI^fQKzTK}(FP zIk&;{Ag+?(JP#HzR=%8*#NfzQQy&d^LohvlRZi3L2=XfYLj#9KV`TKKq#`X%&nA